From fb49328ce9c2071030e2ed618cdf2b3d9b2ebfac Mon Sep 17 00:00:00 2001 From: Benjamin-eecs Date: Mon, 2 May 2022 22:02:24 +0800 Subject: [PATCH 01/19] feat(doc): update readthedocs --- .clang-format | 2 + .gitignore | 7 + Makefile | 83 ++++++ TorchOpt/__init__.py | 17 +- TorchOpt/_lib/adam_op.py | 22 +- TorchOpt/_src/MetaOptimizer.py | 71 ++--- TorchOpt/_src/Optimizer.py | 59 ++-- .../_src/accelerated_op/adam_op/AdamOp.py | 18 +- TorchOpt/_src/alias.py | 94 ++++--- TorchOpt/_src/base.py | 15 +- TorchOpt/_src/clip.py | 30 +- TorchOpt/_src/combine.py | 9 +- TorchOpt/_src/hook.py | 8 +- TorchOpt/_src/pytypes.py | 3 +- TorchOpt/_src/schedule.py | 48 ++-- TorchOpt/_src/transform.py | 265 ++++++++++-------- TorchOpt/_src/update.py | 7 +- TorchOpt/_src/utils.py | 18 +- TorchOpt/_src/visual.py | 30 +- docs/Makefile | 20 ++ docs/_static/css/style.css | 138 +++++++++ docs/_static/images/logo-torchopt.pdf | Bin 0 -> 54623 bytes docs/_static/images/logod-05.png | Bin 0 -> 7297 bytes docs/_static/images/logod-07.png | Bin 0 -> 158465 bytes docs/_static/js/copybutton.js | 64 +++++ docs/conf.py | 91 ++++++ docs/index.rst | 46 +++ examples/L2R/helper/argument.py | 34 ++- examples/L2R/helper/model.py | 30 +- examples/L2R/helper/utils.py | 69 +++-- examples/L2R/train_l2r.py | 116 ++++---- examples/LOLA/helper/agent.py | 13 +- examples/LOLA/helper/argument.py | 45 ++- examples/LOLA/helper/env.py | 26 +- examples/LOLA/helper/utils.py | 31 +- examples/LOLA/lola_dice.py | 69 +++-- examples/LOLA/visualise.py | 10 +- examples/MAML-RL/helpers/Tabular_mdp.py | 42 ++- examples/MAML-RL/helpers/__init__.py | 18 +- examples/MAML-RL/helpers/policy.py | 19 +- examples/MAML-RL/run_MAML.py | 55 ++-- examples/MGRL/toy.py | 5 +- examples/few-shot/maml-omniglot.py | 77 +++-- examples/few-shot/support/omniglot_loaders.py | 121 +++++--- examples/visualize.py | 21 +- include/adam_op/adam_op.h | 36 +-- include/adam_op/adam_op_impl.h | 36 +-- include/utils.h | 2 +- setup.py | 22 +- src/adam_op/adam_op.cpp | 39 +-- src/adam_op/adam_op_impl.cpp | 85 +++--- .../high_level/test_high_level_inplace.py | 62 ++-- .../unit/low_level/test_low_level_inplace.py | 35 ++- tests/unit/test_clip.py | 23 +- tests/unit/test_schedule.py | 3 +- 55 files changed, 1523 insertions(+), 786 deletions(-) create mode 100644 Makefile create mode 100644 docs/Makefile create mode 100644 docs/_static/css/style.css create mode 100644 docs/_static/images/logo-torchopt.pdf create mode 100644 docs/_static/images/logod-05.png create mode 100644 docs/_static/images/logod-07.png create mode 100644 docs/_static/js/copybutton.js create mode 100644 docs/conf.py create mode 100644 docs/index.rst diff --git a/.clang-format b/.clang-format index 3d22e0a8..7e93992a 100644 --- a/.clang-format +++ b/.clang-format @@ -1 +1,3 @@ BasedOnStyle: Google +DerivePointerAlignment: false +PointerAlignment: Left diff --git a/.gitignore b/.gitignore index 14816e4b..f74fff7e 100644 --- a/.gitignore +++ b/.gitignore @@ -6,3 +6,10 @@ TorchOpt/**/*.so TorchOpt.egg-info dist **/.ipynb_checkpoints/* + +# Sphinx documentation +docs/_build/ + + +# mkdocs documentation +/site diff --git a/Makefile b/Makefile new file mode 100644 index 00000000..b4e42f22 --- /dev/null +++ b/Makefile @@ -0,0 +1,83 @@ +print-% : ; @echo $* = $($*) +SHELL = /bin/bash +PROJECT_NAME = TorchOpt +PYTHON_FILES = $(shell find . -type f -name "*.py") +CPP_FILES = $(shell find . -type f -name "*.h" -o -name "*.cpp") +COMMIT_HASH = $(shell git log -1 --format=%h) + + +# installation + +check_install = python3 -c "import $(1)" || (cd && pip3 install $(1) --upgrade && cd -) +check_install_extra = python3 -c "import $(1)" || (cd && pip3 install $(2) --upgrade && cd -) + + +flake8-install: + $(call check_install, flake8) + $(call check_install_extra, bugbear, flake8_bugbear) + +py-format-install: + $(call check_install, isort) + $(call check_install, yapf) + +mypy-install: + $(call check_install, mypy) + +cpplint-install: + $(call check_install, cpplint) + +clang-format-install: + command -v clang-format-11 || sudo apt-get install -y clang-format-11 + +clang-tidy-install: + command -v clang-tidy || sudo apt-get install -y clang-tidy + + +doc-install: + $(call check_install, pydocstyle) + $(call check_install, doc8) + $(call check_install, sphinx) + $(call check_install, sphinx_rtd_theme) + $(call check_install_extra, sphinxcontrib.spelling, sphinxcontrib.spelling pyenchant) + +# python linter + +flake8: flake8-install + flake8 $(PYTHON_FILES) --count --show-source --statistics + +py-format: py-format-install + isort --check $(PYTHON_FILES) && yapf -r -d $(PYTHON_FILES) + +mypy: mypy-install + mypy $(PROJECT_NAME) + +# c++ linter + +cpplint: cpplint-install + cpplint $(CPP_FILES) + +clang-format: clang-format-install + clang-format-11 --style=file -i $(CPP_FILES) -n --Werror + +# documentation + +docstyle: doc-install + pydocstyle $(PROJECT_NAME) && doc8 docs && cd docs && make html SPHINXOPTS="-W" + +doc: doc-install + cd docs && make html && cd _build/html && python3 -m http.server + +spelling: doc-install + cd docs && make spelling SPHINXOPTS="-W" + +doc-clean: + cd docs && make clean + +lint: flake8 py-format clang-format cpplint mypy docstyle spelling + +format: py-format-install clang-format-install + isort $(PYTHON_FILES) + yapf -ir $(PYTHON_FILES) + clang-format-11 -style=file -i $(CPP_FILES) + + diff --git a/TorchOpt/__init__.py b/TorchOpt/__init__.py index 368aee18..28e783d5 100644 --- a/TorchOpt/__init__.py +++ b/TorchOpt/__init__.py @@ -13,15 +13,12 @@ # limitations under the License. # ============================================================================== -from ._src import combine -from ._src import clip -from ._src import visual -from ._src import hook -from ._src import schedule -from ._src.MetaOptimizer import MetaOptimizer, MetaSGD, MetaAdam, MetaRMSProp -from ._src.Optimizer import Optimizer, SGD, Adam, RMSProp +from ._src import (accelerated_op_available, clip, combine, hook, schedule, + visual) +from ._src.alias import adam, rmsprop, sgd +from ._src.MetaOptimizer import MetaAdam, MetaOptimizer, MetaRMSProp, MetaSGD +from ._src.Optimizer import SGD, Adam, Optimizer, RMSProp from ._src.update import apply_updates -from ._src.alias import sgd, adam, rmsprop -from ._src.utils import stop_gradient, extract_state_dict, recover_state_dict -from ._src import accelerated_op_available +from ._src.utils import extract_state_dict, recover_state_dict, stop_gradient + __version__ = "0.4.1" diff --git a/TorchOpt/_lib/adam_op.py b/TorchOpt/_lib/adam_op.py index e19efc59..0a72e0b1 100644 --- a/TorchOpt/_lib/adam_op.py +++ b/TorchOpt/_lib/adam_op.py @@ -13,22 +13,30 @@ # limitations under the License. # ============================================================================== -def forward_(updates, mu, nu, lr, b1, b2, eps, eps_root, count): ... +def forward_(updates, mu, nu, lr, b1, b2, eps, eps_root, count): + ... -def forwardMu(updates, mu, b1): ... +def forwardMu(updates, mu, b1): + ... -def forwardNu(updates, nu, b2): ... +def forwardNu(updates, nu, b2): + ... -def forwardUpdates(new_mu, new_nu, lr, b1, b2, eps, eps_root, count): ... +def forwardUpdates(new_mu, new_nu, lr, b1, b2, eps, eps_root, count): + ... -def backwardMu(dmu, updates, mu, b1): ... +def backwardMu(dmu, updates, mu, b1): + ... -def backwardNu(dnu, updates, nu, b2): ... +def backwardNu(dnu, updates, nu, b2): + ... -def backwardUpdates(dupdates, updates, new_mu, new_nu, lr, b1, b2, count): ... + +def backwardUpdates(dupdates, updates, new_mu, new_nu, lr, b1, b2, count): + ... diff --git a/TorchOpt/_src/MetaOptimizer.py b/TorchOpt/_src/MetaOptimizer.py index d5134b8d..fa9c541f 100644 --- a/TorchOpt/_src/MetaOptimizer.py +++ b/TorchOpt/_src/MetaOptimizer.py @@ -13,19 +13,18 @@ # limitations under the License. # ============================================================================== +import jax import torch from torch import nn -import jax import TorchOpt -from TorchOpt._src.alias import sgd, adam, rmsprop from TorchOpt._src import base +from TorchOpt._src.alias import adam, rmsprop, sgd from TorchOpt._src.pytypes import ScalarOrSchedule class MetaOptimizer(object): """A high-level optimizer base class for meta learning.""" - def __init__(self, net: nn.Module, impl: base.GradientTransformation): """ Args: @@ -51,18 +50,23 @@ def step(self, loss: torch.Tensor): loss (torch.Tensor): the loss that is used to compute the gradients to the network parameters. """ # step parameter only - for idx, (state, param_containers) in enumerate(zip(self.state_groups, self.param_containers_groups)): + for idx, (state, param_containers) in enumerate( + zip(self.state_groups, self.param_containers_groups)): flatten_params, containers_tree = jax.tree_util.tree_flatten( param_containers) flatten_params = tuple(flatten_params) - grad = torch.autograd.grad( - loss, flatten_params, create_graph=True, allow_unused=True) + grad = torch.autograd.grad(loss, + flatten_params, + create_graph=True, + allow_unused=True) updates, state = self.impl.update(grad, state, False) self.state_groups[idx] = state - new_params = TorchOpt.apply_updates( - flatten_params, updates, inplace=False) + new_params = TorchOpt.apply_updates(flatten_params, + updates, + inplace=False) unflatten_new_params = containers_tree.unflatten(new_params) - for (container, unflatten_param) in zip(param_containers, unflatten_new_params): + for (container, unflatten_param) in zip(param_containers, + unflatten_new_params): container.update(unflatten_param) def add_param_group(self, net): @@ -89,7 +93,6 @@ def load_state_dict(self, state_dict): class MetaSGD(MetaOptimizer): """A canonical Stochastic Gradient Descent optimiser.""" - def __init__(self, net, lr: ScalarOrSchedule, @@ -102,17 +105,16 @@ def __init__(self, args: other arguments see `alias.sgd`, here we set `moment_requires_grad=True` to make tensors like momentum be differentiable. """ - super().__init__(net, - sgd(lr=lr, - momentum=momentum, - nesterov=nesterov, - moment_requires_grad=moment_requires_grad) - ) + super().__init__( + net, + sgd(lr=lr, + momentum=momentum, + nesterov=nesterov, + moment_requires_grad=moment_requires_grad)) class MetaAdam(MetaOptimizer): """The classic Adam optimiser.""" - def __init__(self, net, lr: ScalarOrSchedule, @@ -128,20 +130,19 @@ def __init__(self, args: other arguments see `alias.adam`, here we set `moment_requires_grad=True` to make tensors like momentum be differentiable. """ - super().__init__(net, - adam(lr=lr, - b1=b1, - b2=b2, - eps=eps, - eps_root=eps_root, - moment_requires_grad=moment_requires_grad, - use_accelerated_op=use_accelerated_op) - ) + super().__init__( + net, + adam(lr=lr, + b1=b1, + b2=b2, + eps=eps, + eps_root=eps_root, + moment_requires_grad=moment_requires_grad, + use_accelerated_op=use_accelerated_op)) class MetaRMSProp(MetaOptimizer): """The classic RMSProp optimiser.""" - def __init__(self, net, lr: ScalarOrSchedule, @@ -157,10 +158,12 @@ def __init__(self, args: other arguments see `alias.adam`, here we set `moment_requires_grad=True` to make tensors like momentum be differentiable. """ - super().__init__(net, rmsprop(lr=lr, - decay=decay, - eps=eps, - initial_scale=initial_scale, - centered=centered, - momentum=momentum, - nesterov=nesterov)) + super().__init__( + net, + rmsprop(lr=lr, + decay=decay, + eps=eps, + initial_scale=initial_scale, + centered=centered, + momentum=momentum, + nesterov=nesterov)) diff --git a/TorchOpt/_src/Optimizer.py b/TorchOpt/_src/Optimizer.py index c13d68a4..d825118f 100644 --- a/TorchOpt/_src/Optimizer.py +++ b/TorchOpt/_src/Optimizer.py @@ -13,16 +13,16 @@ # limitations under the License. # ============================================================================== -import torch import jax +import torch + +from TorchOpt._src.alias import adam, rmsprop, sgd from TorchOpt._src.pytypes import ScalarOrSchedule from TorchOpt._src.update import apply_updates -from TorchOpt._src.alias import adam, sgd, rmsprop class Optimizer(object): """A high-level base class that has the similar with `torch.optim.Optimier`""" - def __init__(self, params, impl): """ Args: @@ -52,10 +52,12 @@ def zero_grad(self, set_to_none: bool = False): """ for group in self.param_groups: if set_to_none: + def f(p): p.grad = None return None else: + def f(p): if p.grad is None: return None @@ -65,6 +67,7 @@ def f(p): p.grad.requires_grad_(False) p.grad.zero_() return None + jax.tree_map(f, group) def state_dict(self): @@ -88,7 +91,10 @@ def step(self, closure=None): loss = closure() for param, state in zip(self.param_groups, self.state_groups): - def f(p): return p.grad + + def f(p): + return p.grad + grad = jax.tree_map(f, param) updates, _ = self.impl.update(grad, state) apply_updates(param, updates) @@ -105,7 +111,6 @@ def add_param_group(self, params): class SGD(Optimizer): """The classic Adam optimiser.""" - def __init__(self, params, lr: ScalarOrSchedule, @@ -116,15 +121,16 @@ def __init__(self, params (iterable): an iterable of `torch.Tensor`s. Specifies what Tensors should be optimized. args: other arguments see `alias.adam`. """ - super().__init__(params, sgd(lr=lr, - momentum=momentum, - nesterov=nesterov, - moment_requires_grad=False)) + super().__init__( + params, + sgd(lr=lr, + momentum=momentum, + nesterov=nesterov, + moment_requires_grad=False)) class Adam(Optimizer): """A canonical Stochastic Gradient Descent optimiser.""" - def __init__(self, params, lr: ScalarOrSchedule, @@ -138,18 +144,19 @@ def __init__(self, params (iterable): an iterable of `torch.Tensor`s. Specifies what Tensors should be optimized. args: other arguments see `alias.sgd`. """ - super().__init__(params, adam(lr=lr, - b1=b1, - b2=b2, - eps=eps, - eps_root=eps_root, - moment_requires_grad=False, - use_accelerated_op=use_accelerated_op)) + super().__init__( + params, + adam(lr=lr, + b1=b1, + b2=b2, + eps=eps, + eps_root=eps_root, + moment_requires_grad=False, + use_accelerated_op=use_accelerated_op)) class RMSProp(Optimizer): """An RMSProp optimiser.""" - def __init__(self, params, lr: ScalarOrSchedule, @@ -164,10 +171,12 @@ def __init__(self, params (iterable): an iterable of `torch.Tensor`s. Specifies what Tensors should be optimized. args: other arguments see `alias.sgd`. """ - super().__init__(params, rmsprop(lr=lr, - decay=decay, - eps=eps, - initial_scale=initial_scale, - centered=centered, - momentum=momentum, - nesterov=nesterov)) + super().__init__( + params, + rmsprop(lr=lr, + decay=decay, + eps=eps, + initial_scale=initial_scale, + centered=centered, + momentum=momentum, + nesterov=nesterov)) diff --git a/TorchOpt/_src/accelerated_op/adam_op/AdamOp.py b/TorchOpt/_src/accelerated_op/adam_op/AdamOp.py index 40049378..92fd92d4 100644 --- a/TorchOpt/_src/accelerated_op/adam_op/AdamOp.py +++ b/TorchOpt/_src/accelerated_op/adam_op/AdamOp.py @@ -14,7 +14,9 @@ # ============================================================================== from typing import Any + import torch + from TorchOpt._lib import adam_op @@ -69,8 +71,8 @@ def jvp(ctx: Any, *grad_inputs: Any) -> Any: @staticmethod def forward(ctx, *args): new_mu, new_nu, (b1, b2, eps, eps_root, count) = args - new_updates = adam_op.forwardUpdates( - new_mu, new_nu, b1, b2, eps, eps_root, count) + new_updates = adam_op.forwardUpdates(new_mu, new_nu, b1, b2, eps, + eps_root, count) ctx.save_for_backward(new_updates, new_mu, new_nu) ctx.others = (b1, b2, eps, eps_root, count) return new_updates @@ -80,8 +82,8 @@ def backward(ctx, *args): dupdates = args[0] updates, new_mu, new_nu = ctx.saved_tensors b1, b2, eps, eps_root, count = ctx.others - result = adam_op.backwardUpdates( - dupdates, updates, new_mu, new_nu, b1, b2, count) + result = adam_op.backwardUpdates(dupdates, updates, new_mu, new_nu, + b1, b2, count) return result[0], result[1], None def __init__(self, b1=0.9, b2=0.999, eps=1e-8, eps_root=0., inplace=True): @@ -98,14 +100,14 @@ def __call__(self, mu, nu, updates, count): current_device = torch.cuda.current_device() torch.cuda.set_device(updates.device) if self.inplace: - new_updates, new_mu, new_nu = adam_op.forward_(updates, mu, nu, self.b1, - self.b2, self.eps, self.eps_root, count) + new_updates, new_mu, new_nu = adam_op.forward_( + updates, mu, nu, self.b1, self.b2, self.eps, self.eps_root, + count) else: new_mu = self.MuOp.apply(updates, mu, self.b1) new_nu = self.NuOp.apply(updates, nu, self.b2) new_updates = self.UpdatesOp.apply( - new_mu, - new_nu, + new_mu, new_nu, (self.b1, self.b2, self.eps, self.eps_root, count)) if updates.is_cuda: torch.cuda.set_device(current_device) diff --git a/TorchOpt/_src/alias.py b/TorchOpt/_src/alias.py index be69b6c9..a34ea4dc 100644 --- a/TorchOpt/_src/alias.py +++ b/TorchOpt/_src/alias.py @@ -31,33 +31,34 @@ # ============================================================================== from typing import Optional + import jax -from TorchOpt._src import base -from TorchOpt._src import combine -from TorchOpt._src import transform +from TorchOpt._src import base, combine, transform from TorchOpt._src.pytypes import ScalarOrSchedule def _scale_by_lr(lr: ScalarOrSchedule, flip_sign=True): m = -1 if flip_sign else 1 if callable(lr): + def schedule_wrapper(count): - def f(scaled_lr): return m * scaled_lr + def f(scaled_lr): + return m * scaled_lr + return jax.tree_map(f, lr(count)) + return transform.scale_by_schedule(schedule_wrapper) return transform.scale(m * lr) -def adam( - lr: ScalarOrSchedule, - b1: float = 0.9, - b2: float = 0.999, - eps: float = 1e-8, - eps_root: float = 0.0, - moment_requires_grad: bool = False, - use_accelerated_op: bool = False -) -> base.GradientTransformation: +def adam(lr: ScalarOrSchedule, + b1: float = 0.9, + b2: float = 0.999, + eps: float = 1e-8, + eps_root: float = 0.0, + moment_requires_grad: bool = False, + use_accelerated_op: bool = False) -> base.GradientTransformation: """The classic Adam optimiser. Adam is an SGD variant with learning rate adaptation. The `lr` @@ -85,17 +86,20 @@ def adam( """ adam_inst = transform.scale_by_accelerated_adam if use_accelerated_op else transform.scale_by_adam return combine.chain( - adam_inst(b1=b1, b2=b2, eps=eps, eps_root=eps_root, + adam_inst(b1=b1, + b2=b2, + eps=eps, + eps_root=eps_root, moment_requires_grad=moment_requires_grad), _scale_by_lr(lr), ) def sgd( - lr: ScalarOrSchedule, - momentum: Optional[float] = None, - nesterov: bool = False, - moment_requires_grad: bool = False, + lr: ScalarOrSchedule, + momentum: Optional[float] = None, + nesterov: bool = False, + moment_requires_grad: bool = False, ) -> base.GradientTransformation: """A canonical Stochastic Gradient Descent optimiser. @@ -118,24 +122,21 @@ def sgd( A `GradientTransformation`. """ return combine.chain( - (transform.trace(decay=momentum, nesterov=nesterov, + (transform.trace(decay=momentum, + nesterov=nesterov, moment_requires_grad=moment_requires_grad) - if momentum is not None else base.identity()), - _scale_by_lr(lr) - ) - - -def rmsprop( - lr: ScalarOrSchedule, - decay: float = 0.9, - eps: float = 1e-8, - initial_scale: float = 0., - centered: bool = False, - momentum: Optional[float] = None, - nesterov: bool = False -) -> base.GradientTransformation: - # pylint: disable=line-too-long - """A flexible RMSProp optimiser. + if momentum is not None else base.identity()), _scale_by_lr(lr)) + + +def rmsprop(lr: ScalarOrSchedule, + decay: float = 0.9, + eps: float = 1e-8, + initial_scale: float = 0., + centered: bool = False, + momentum: Optional[float] = None, + nesterov: bool = False) -> base.GradientTransformation: + # pylint: disable=line-too-long + """A flexible RMSProp optimiser. RMSProp is an SGD variant with learning rate adaptation. The `learning_rate` used for each weight is scaled by a suitable estimate of the magnitude of the gradients on previous steps. Several variants of RMSProp can be found @@ -159,13 +160,18 @@ def rmsprop( Returns: the corresponding `GradientTransformation`. """ - # pylint: enable=line-too-long - if centered: + # pylint: enable=line-too-long + if centered: + return combine.chain( + transform.scale_by_stddev(decay=decay, + eps=eps, + initial_scale=initial_scale), + _scale_by_lr(lr), + (transform.trace(decay=momentum, nesterov=nesterov) + if momentum is not None else base.identity())) return combine.chain( - transform.scale_by_stddev(decay=decay, eps=eps, initial_scale=initial_scale), - _scale_by_lr(lr), - (transform.trace(decay=momentum, nesterov=nesterov) if momentum is not None else base.identity())) - return combine.chain(transform.scale_by_rms(decay=decay, eps=eps, initial_scale=initial_scale), - _scale_by_lr(lr), - (transform.trace(decay=momentum, nesterov=nesterov) if momentum is not None else base.identity()) - ) + transform.scale_by_rms(decay=decay, + eps=eps, + initial_scale=initial_scale), _scale_by_lr(lr), + (transform.trace(decay=momentum, nesterov=nesterov) + if momentum is not None else base.identity())) diff --git a/TorchOpt/_src/base.py b/TorchOpt/_src/base.py index c49b1861..5b2ad532 100644 --- a/TorchOpt/_src/base.py +++ b/TorchOpt/_src/base.py @@ -30,7 +30,7 @@ # limitations under the License. # ============================================================================== -from typing import NamedTuple, Tuple, Callable +from typing import Callable, NamedTuple, Tuple import typing_extensions @@ -55,7 +55,6 @@ class TransformInitFn(typing_extensions.Protocol): arbitrary structured initial `state` for the gradient transformation. This may hold statistics of the past updates or any other non static information. """ - def __call__(self, params: Params) -> OptState: """The `init` function. @@ -77,13 +76,10 @@ class TransformUpdateFn(typing_extensions.Protocol): optional, it must however be provided when using transformations that require access to the current values of the parameters. """ - - def __call__( - self, - updates: Updates, - state: OptState, - inplace: bool = True - ) -> Tuple[Updates, OptState]: + def __call__(self, + updates: Updates, + state: OptState, + inplace: bool = True) -> Tuple[Updates, OptState]: """The `update` function. Args: @@ -136,7 +132,6 @@ def identity() -> GradientTransformation: Returns: An (init_fn, update_fn) tuple. """ - def init_fn(_): return EmptyState() diff --git a/TorchOpt/_src/clip.py b/TorchOpt/_src/clip.py index ef817047..3b50c40c 100644 --- a/TorchOpt/_src/clip.py +++ b/TorchOpt/_src/clip.py @@ -17,24 +17,24 @@ # ============================================================================== import jax - import torch from torch._six import inf -from TorchOpt._src import base +from TorchOpt._src import base ClipState = base.EmptyState -def clip_grad_norm(max_norm: float, norm_type: float = 2., - error_if_nonfinite: bool = False) -> base.GradientTransformation: +def clip_grad_norm( + max_norm: float, + norm_type: float = 2., + error_if_nonfinite: bool = False) -> base.GradientTransformation: """Clips gradient norm of an iterable of parameters. Args: max_delta: The maximum absolute value for each element in the update. Returns: An (init_fn, update_fn) tuple. """ - def init_fn(params): del params return ClipState() @@ -49,14 +49,17 @@ def update_fn(updates, state, inplace=True): device = available_updates[0].device with torch.no_grad(): if norm_type == inf: - norms = [p.abs().max().to(device) - for p in available_updates] - total_norm = norms[0] if len( - norms) == 1 else torch.max(torch.stack(norms)) + norms = [p.abs().max().to(device) for p in available_updates] + total_norm = norms[0] if len(norms) == 1 else torch.max( + torch.stack(norms)) else: - total_norm = torch.norm(torch.stack( - [torch.norm(p, norm_type).to(device) for p in available_updates]), norm_type) - if error_if_nonfinite and torch.logical_or(total_norm.isnan(), total_norm.isinf()): + total_norm = torch.norm( + torch.stack([ + torch.norm(p, norm_type).to(device) + for p in available_updates + ]), norm_type) + if error_if_nonfinite and torch.logical_or(total_norm.isnan(), + total_norm.isinf()): raise RuntimeError( f'The total norm of order {norm_type} for gradients from ' '`parameters` is non-finite, so it cannot be clipped. To disable ' @@ -68,11 +71,14 @@ def update_fn(updates, state, inplace=True): # when the gradients do not reside in CPU memory. clip_coef_clamped = min(clip_coef, 1.) if inplace: + def f(g): return g.mul_(clip_coef_clamped) if g is not None else None else: + def f(g): return g.mul(clip_coef_clamped) if g is not None else None + new_updates = jax.tree_map(f, updates) return new_updates, state diff --git a/TorchOpt/_src/combine.py b/TorchOpt/_src/combine.py index a1ec43bb..6a1b241c 100644 --- a/TorchOpt/_src/combine.py +++ b/TorchOpt/_src/combine.py @@ -33,9 +33,7 @@ from TorchOpt._src import base -def chain( - *args: base.GradientTransformation -) -> base.GradientTransformation: +def chain(*args: base.GradientTransformation) -> base.GradientTransformation: """Applies a list of chainable update transformations. Given a sequence of chainable transforms, `chain` returns an `init_fn` @@ -57,8 +55,9 @@ def init_fn(params): def update_fn(updates, state, inplace=True): if len(update_fns) != len(state): - raise ValueError('The number of updates and states has to be the same in ' - 'chain! Make sure you have called init first!') + raise ValueError( + 'The number of updates and states has to be the same in ' + 'chain! Make sure you have called init first!') new_state = [] for s, fn in zip(state, update_fns): updates, new_s = fn(updates, s, inplace) diff --git a/TorchOpt/_src/hook.py b/TorchOpt/_src/hook.py index ef5c31cf..95c6ba63 100644 --- a/TorchOpt/_src/hook.py +++ b/TorchOpt/_src/hook.py @@ -15,7 +15,8 @@ import jax import torch -from .base import GradientTransformation, EmptyState + +from .base import EmptyState, GradientTransformation def zero_nan_hook(g: torch.Tensor) -> torch.Tensor: @@ -30,12 +31,13 @@ def register_hook(hook) -> GradientTransformation: Returns: An (init_fn, update_fn) tuple. """ - def init_fn(_): return EmptyState() def update_fn(updates, state, inplace=False): - def f(g): return g.register_hook(hook) if g is not None else None + def f(g): + return g.register_hook(hook) if g is not None else None + jax.tree_map(f, updates) return updates, state diff --git a/TorchOpt/_src/pytypes.py b/TorchOpt/_src/pytypes.py index 0d116c03..ca14c319 100644 --- a/TorchOpt/_src/pytypes.py +++ b/TorchOpt/_src/pytypes.py @@ -1,4 +1,4 @@ -from typing import Any, Iterable, Mapping, Union, Callable +from typing import Any, Callable, Iterable, Mapping, Union from torch import Tensor @@ -6,6 +6,5 @@ Numeric = Union[Tensor, Scalar] TensorTree = Union[Tensor, Iterable['TensorTree'], Mapping[Any, 'TensorTree']] - Schedule = Callable[[Numeric], Numeric] ScalarOrSchedule = Union[float, Schedule] diff --git a/TorchOpt/_src/schedule.py b/TorchOpt/_src/schedule.py index 08c7e637..192cca3c 100644 --- a/TorchOpt/_src/schedule.py +++ b/TorchOpt/_src/schedule.py @@ -30,20 +30,18 @@ # limitations under the License. # ============================================================================== -from absl import logging -import numpy as np import jax -from TorchOpt._src import base -from TorchOpt._src import pytypes +import numpy as np +from absl import logging +from TorchOpt._src import base, pytypes -def polynomial_schedule( - init_value: pytypes.Scalar, - end_value: pytypes.Scalar, - power: pytypes.Scalar, - transition_steps: int, - transition_begin: int = 0 -) -> base.Schedule: + +def polynomial_schedule(init_value: pytypes.Scalar, + end_value: pytypes.Scalar, + power: pytypes.Scalar, + transition_steps: int, + transition_begin: int = 0) -> base.Schedule: """Constructs a schedule with polynomial transition from init to end value. Args: init_value: initial value for the scalar to be annealed. @@ -62,31 +60,35 @@ def polynomial_schedule( if transition_steps <= 0: logging.info( 'A polynomial schedule was set with a non-positive `transition_steps` ' - 'value; this results in a constant schedule with value `init_value`.') + 'value; this results in a constant schedule with value `init_value`.' + ) return lambda count: init_value if transition_begin < 0: logging.info( 'An exponential schedule was set with a negative `transition_begin` ' - 'value; this will result in `transition_begin` falling back to `0`.') + 'value; this will result in `transition_begin` falling back to `0`.' + ) transition_begin = 0 def schedule(count): def impl(count): count = np.clip(count - transition_begin, 0, transition_steps) frac = 1 - count / transition_steps - return (init_value - end_value) * (frac ** power) + end_value + return (init_value - end_value) * (frac**power) + end_value + return jax.tree_map(impl, count) + return schedule # Alias polynomial schedule to linear schedule for convenience. -def linear_schedule( - init_value: pytypes.Scalar, - end_value: pytypes.Scalar, - transition_steps: int, - transition_begin: int = 0 -) -> base.Schedule: - return polynomial_schedule( - init_value=init_value, end_value=end_value, power=1, - transition_steps=transition_steps, transition_begin=transition_begin) +def linear_schedule(init_value: pytypes.Scalar, + end_value: pytypes.Scalar, + transition_steps: int, + transition_begin: int = 0) -> base.Schedule: + return polynomial_schedule(init_value=init_value, + end_value=end_value, + power=1, + transition_steps=transition_steps, + transition_begin=transition_begin) diff --git a/TorchOpt/_src/transform.py b/TorchOpt/_src/transform.py index 7b148c07..6c293684 100644 --- a/TorchOpt/_src/transform.py +++ b/TorchOpt/_src/transform.py @@ -30,14 +30,13 @@ # limitations under the License. # ============================================================================== -from typing import NamedTuple, List +from typing import List, NamedTuple import jax import torch from TorchOpt._src import base -from TorchOpt._src.pytypes import Schedule, ScalarOrSchedule - +from TorchOpt._src.pytypes import ScalarOrSchedule, Schedule ScaleState = base.EmptyState @@ -45,12 +44,11 @@ def inc_count(updates, count: List[int]) -> List[int]: def f(c, g): return c + 1 if g is not None else c + return jax.tree_map(f, count, updates) -def scale( - step_size: float -) -> base.GradientTransformation: +def scale(step_size: float) -> base.GradientTransformation: """Scale updates by some fixed scalar `step_size`. Args: @@ -59,18 +57,20 @@ def scale( Returns: An (init_fn, update_fn) tuple. """ - def init_fn(params): del params return ScaleState() def update_fn(updates, state, inplace=True): if inplace: + def f(g): return g.mul_(step_size) if g is not None else None else: + def f(g): return g.mul(step_size) if g is not None else None + updates = jax.tree_map(f, updates) return updates, state @@ -82,9 +82,7 @@ class ScaleByScheduleState(NamedTuple): count: List[int] -def scale_by_schedule( - step_size_fn: Schedule -) -> base.GradientTransformation: +def scale_by_schedule(step_size_fn: Schedule) -> base.GradientTransformation: """Scale updates using a custom schedule for the `step_size`. Args: @@ -94,19 +92,19 @@ def scale_by_schedule( Returns: An (init_fn, update_fn) tuple. """ - def init_fn(params): return ScaleByScheduleState(count=tuple(0 for _ in range(len(params)))) def update_fn(updates, state, inplace=True): step_size = step_size_fn(state.count) if inplace: - updates = jax.tree_map( - lambda g, step_size: g.mul_(step_size), updates, step_size) + updates = jax.tree_map(lambda g, step_size: g.mul_(step_size), + updates, step_size) else: - updates = jax.tree_map( - lambda g, step_size: g.mul(step_size), updates, step_size) - return updates, ScaleByScheduleState(count=inc_count(updates, state.count)) + updates = jax.tree_map(lambda g, step_size: g.mul(step_size), + updates, step_size) + return updates, ScaleByScheduleState( + count=inc_count(updates, state.count)) return base.GradientTransformation(init_fn, update_fn) @@ -120,11 +118,15 @@ class ScaleByRStdDevState(NamedTuple): def _update_moment(updates, moments, decay, order, inplace=True): """Compute the exponential moving average of the `order`-th moment.""" if inplace: + def f(g, t): - return t.mul_(decay).add_(g ** order, alpha=1 - decay) if g is not None else t + return t.mul_(decay).add_(g**order, alpha=1 - + decay) if g is not None else t else: + def f(g, t): - return t.mul(decay).add(g ** order, alpha=1 - decay) if g is not None else t + return t.mul(decay).add(g**order, alpha=1 - + decay) if g is not None else t return jax.tree_map(f, updates, moments) @@ -133,11 +135,15 @@ def _update_moment_per_elem_norm(updates, moments, decay, order, inplace=True): """Compute the EMA of the `order`-th moment of the element-wise norm.""" if inplace: + def f(g, t): - return t.mul_(decay).add_(g ** order, alpha=1 - decay) if g is not None else t + return t.mul_(decay).add_(g**order, alpha=1 - + decay) if g is not None else t else: + def f(g, t): - return t.mul(decay).add(g ** order, alpha=1 - decay) if g is not None else t + return t.mul(decay).add(g**order, alpha=1 - + decay) if g is not None else t return jax.tree_map(f, updates, moments) @@ -152,19 +158,23 @@ class ScaleByAdamState(NamedTuple): def _bias_correction(moment, decay, count, inplace=True): """Perform bias correction. This becomes a no-op as count goes to infinity.""" if inplace: - def f(t, c): return t.div_(1 - decay ** c) + + def f(t, c): + return t.div_(1 - decay**c) else: - def f(t, c): return t.div(1 - decay ** c) + + def f(t, c): + return t.div(1 - decay**c) return jax.tree_map(f, moment, count) def scale_by_adam( - b1: float = 0.9, - b2: float = 0.999, - eps: float = 1e-8, - eps_root: float = 0.0, - moment_requires_grad: bool = False, + b1: float = 0.9, + b2: float = 0.999, + eps: float = 1e-8, + eps_root: float = 0.0, + moment_requires_grad: bool = False, ) -> base.GradientTransformation: """Rescale updates according to the Adam algorithm. @@ -182,27 +192,33 @@ def scale_by_adam( Returns: An (init_fn, update_fn) tuple. """ - def init_fn(params): mu = jax.tree_map( # First moment - lambda t: torch.zeros_like(t, requires_grad=moment_requires_grad), params) + lambda t: torch.zeros_like(t, requires_grad=moment_requires_grad), + params) nu = jax.tree_map( # Second moment - lambda t: torch.zeros_like(t, requires_grad=moment_requires_grad), params) - return ScaleByAdamState(count=tuple(0 for _ in range(len(mu))), mu=tuple(mu), nu=tuple(nu)) + lambda t: torch.zeros_like(t, requires_grad=moment_requires_grad), + params) + return ScaleByAdamState(count=tuple(0 for _ in range(len(mu))), + mu=tuple(mu), + nu=tuple(nu)) def update_fn(updates, state, inplace=True): mu = _update_moment(updates, state.mu, b1, 1, inplace) - nu = _update_moment_per_elem_norm( - updates, state.nu, b2, 2, inplace) + nu = _update_moment_per_elem_norm(updates, state.nu, b2, 2, inplace) count_inc = inc_count(updates, state.count) mu_hat = _bias_correction(mu, b1, count_inc, False) nu_hat = _bias_correction(nu, b2, count_inc, False) if inplace: + def f(g, m, v): - return m.div_(torch.sqrt_(v.add_(eps_root)).add_(eps)) if g is not None else None + return m.div_(torch.sqrt_( + v.add_(eps_root)).add_(eps)) if g is not None else None else: + def f(g, m, v): - return m.div(torch.sqrt(v.add(eps_root)).add(eps)) if g is not None else None + return m.div(torch.sqrt( + v.add(eps_root)).add(eps)) if g is not None else None updates = jax.tree_map(f, updates, mu_hat, nu_hat) return updates, ScaleByAdamState(count=count_inc, mu=mu, nu=nu) @@ -211,11 +227,11 @@ def f(g, m, v): def scale_by_accelerated_adam( - b1: float = 0.9, - b2: float = 0.999, - eps: float = 1e-8, - eps_root: float = 0.0, - moment_requires_grad: bool = False, + b1: float = 0.9, + b2: float = 0.999, + eps: float = 1e-8, + eps_root: float = 0.0, + moment_requires_grad: bool = False, ) -> base.GradientTransformation: """Rescale updates according to the Adam algorithm. @@ -239,10 +255,14 @@ def scale_by_accelerated_adam( def init_fn(params): mu = jax.tree_map( # First moment - lambda t: torch.zeros_like(t, requires_grad=moment_requires_grad), params) + lambda t: torch.zeros_like(t, requires_grad=moment_requires_grad), + params) nu = jax.tree_map( # Second moment - lambda t: torch.zeros_like(t, requires_grad=moment_requires_grad), params) - return ScaleByAdamState(count=tuple(0 for _ in range(len(params))), mu=mu, nu=nu) + lambda t: torch.zeros_like(t, requires_grad=moment_requires_grad), + params) + return ScaleByAdamState(count=tuple(0 for _ in range(len(params))), + mu=mu, + nu=nu) def update_fn(updates, state, inplace=True): count_inc = inc_count(updates, state.count) @@ -253,7 +273,9 @@ def update_fn(updates, state, inplace=True): new_mus.append(new_mu) new_nus.append(new_nu) new_updates.append(new_update) - return tuple(new_updates), ScaleByAdamState(count=count_inc, mu=tuple(new_mus), nu=tuple(new_nus)) + return tuple(new_updates), ScaleByAdamState(count=count_inc, + mu=tuple(new_mus), + nu=tuple(new_nus)) return base.GradientTransformation(init_fn, update_fn) @@ -264,9 +286,9 @@ class TraceState(NamedTuple): def trace( - decay: float, - nesterov: bool = False, - moment_requires_grad: bool = False, + decay: float, + nesterov: bool = False, + moment_requires_grad: bool = False, ) -> base.GradientTransformation: """Compute a trace of past updates. @@ -282,34 +304,47 @@ def trace( Returns: An (init_fn, update_fn) tuple. """ - def init_fn(params): if decay == 0.: return TraceState(trace=()) else: - return TraceState( - trace=jax.tree_map( - lambda t: torch.zeros_like(t, requires_grad=moment_requires_grad), params)) + return TraceState(trace=jax.tree_map( + lambda t: torch.zeros_like( + t, requires_grad=moment_requires_grad), params)) def update_fn(updates, state, inplace=True): if nesterov: if inplace: - def f1(g, t): return t.copy_(g.add(t, alpha=decay)) - def f2(g, t): return g.add_(t, alpha=decay) + + def f1(g, t): + return t.copy_(g.add(t, alpha=decay)) + + def f2(g, t): + return g.add_(t, alpha=decay) + new_trace = jax.tree_map(f1, updates, state.trace) updates = jax.tree_map(f2, updates, new_trace) else: - def f(g, t): return g.add(t, alpha=decay) + + def f(g, t): + return g.add(t, alpha=decay) + new_trace = jax.tree_map(f, updates, state.trace) updates = jax.tree_map(f, updates, new_trace) else: if inplace: - def f(g, t): return g.add_(t, alpha=decay) + + def f(g, t): + return g.add_(t, alpha=decay) + updates = jax.tree_map(f, updates, state.trace) state.trace.copy_(updates) new_trace = state.trace else: - def f(g, t): return g.add(t, alpha=decay) + + def f(g, t): + return g.add(t, alpha=decay) + updates = jax.tree_map(f, updates, state.trace) new_trace = updates @@ -319,16 +354,14 @@ def f(g, t): return g.add(t, alpha=decay) class ScaleByRmsState(NamedTuple): - """State for exponential root mean-squared (RMS)-normalized updates.""" - nu: base.Updates + """State for exponential root mean-squared (RMS)-normalized updates.""" + nu: base.Updates -def scale_by_rms( - decay: float = 0.9, - eps: float = 1e-8, - initial_scale: float = 0. -) -> base.GradientTransformation: - """Rescale updates by the root of the exp. moving avg of the square. +def scale_by_rms(decay: float = 0.9, + eps: float = 1e-8, + initial_scale: float = 0.) -> base.GradientTransformation: + """Rescale updates by the root of the exp. moving avg of the square. References: [Hinton](www.cs.toronto.edu/~tijmen/csc321/slides/lecture_slides_lec6.pdf) @@ -341,41 +374,44 @@ def scale_by_rms( Returns: An (init_fn, update_fn) tuple. """ + def init_fn(params): + nu = jax.tree_map(lambda n: torch.full_like(n, initial_scale), + params) # second moment + return ScaleByRmsState(nu=nu) + + def update_fn(updates, state, params=None, inplace=True): + del params + nu = _update_moment_per_elem_norm(updates, state.nu, decay, 2, inplace) + if inplace: - def init_fn(params): - nu = jax.tree_map(lambda n: torch.full_like(n, initial_scale), params) # second moment - return ScaleByRmsState(nu=nu) + def f(g, n): + return g.mul_(torch.rsqrt(n.add(eps))) + else: - def update_fn(updates, state, params=None, inplace=True): - del params - nu = _update_moment_per_elem_norm(updates, state.nu, decay, 2, inplace) - if inplace: - def f(g, n): return g.mul_(torch.rsqrt(n.add(eps))) - else: - def f(g, n): return g.mul(torch.rsqrt(n.add(eps))) - # """The followings are pytorch style""" - # if inplace: - # def f(g, n): return g.div_(torch.sqrt_(n).add_(eps)) - # else: - # def f(g, n): return g.div(torch.sqrt(n).add(eps)) - updates = jax.tree_map(f, updates, nu) - return updates, ScaleByRmsState(nu=nu) + def f(g, n): + return g.mul(torch.rsqrt(n.add(eps))) - return base.GradientTransformation(init_fn, update_fn) + # """The followings are pytorch style""" + # if inplace: + # def f(g, n): return g.div_(torch.sqrt_(n).add_(eps)) + # else: + # def f(g, n): return g.div(torch.sqrt(n).add(eps)) + updates = jax.tree_map(f, updates, nu) + return updates, ScaleByRmsState(nu=nu) + + return base.GradientTransformation(init_fn, update_fn) class ScaleByRStdDevState(NamedTuple): - """State for centered exponential moving average of squares of updates.""" - mu: base.Updates - nu: base.Updates + """State for centered exponential moving average of squares of updates.""" + mu: base.Updates + nu: base.Updates -def scale_by_stddev( - decay: float = 0.9, - eps: float = 1e-8, - initial_scale: float = 0. -) -> base.GradientTransformation: - """Rescale updates by the root of the centered exp. moving average of squares. +def scale_by_stddev(decay: float = 0.9, + eps: float = 1e-8, + initial_scale: float = 0.) -> base.GradientTransformation: + """Rescale updates by the root of the centered exp. moving average of squares. References: [Hinton](www.cs.toronto.edu/~tijmen/csc321/slides/lecture_slides_lec6.pdf) @@ -388,26 +424,31 @@ def scale_by_stddev( Returns: An (init_fn, update_fn) tuple. """ + def init_fn(params): + mu = jax.tree_map(torch.zeros_like, params) # First moment + nu = jax.tree_map(lambda n: torch.full_like(n, initial_scale), + params) # second moment + return ScaleByRStdDevState(mu=mu, nu=nu) + + def update_fn(updates, state, params=None, inplace=True): + del params + mu = _update_moment(updates, state.mu, decay, 1, inplace) + nu = _update_moment_per_elem_norm(updates, state.nu, decay, 2, inplace) + if inplace: - def init_fn(params): - mu = jax.tree_map(torch.zeros_like, params) # First moment - nu = jax.tree_map(lambda n: torch.full_like(n, initial_scale), params) # second moment - return ScaleByRStdDevState(mu=mu, nu=nu) + def f(g, m, n): + return g.mul_(torch.rsqrt(n.sub(m**2).add(eps))) + else: - def update_fn(updates, state, params=None, inplace=True): - del params - mu = _update_moment(updates, state.mu, decay, 1, inplace) - nu = _update_moment_per_elem_norm(updates, state.nu, decay, 2, inplace) - if inplace: - def f(g, m, n): return g.mul_(torch.rsqrt(n.sub(m ** 2).add(eps))) - else: - def f(g, m, n): return g.mul(torch.rsqrt(n.sub(m ** 2).add(eps))) - # """The followings are pytorch style""" - # if inplace: - # def f(g, m, n): return g.div_(torch.sqrt_(n.sub_(m ** 2)).add(eps)) - # else: - # def f(g, m, n): return g.div(torch.sqrt(n.sub(m ** 2)).add(eps)) - updates = jax.tree_map(f, updates, mu, nu) - return updates, ScaleByRStdDevState(mu=mu, nu=nu) - - return base.GradientTransformation(init_fn, update_fn) + def f(g, m, n): + return g.mul(torch.rsqrt(n.sub(m**2).add(eps))) + + # """The followings are pytorch style""" + # if inplace: + # def f(g, m, n): return g.div_(torch.sqrt_(n.sub_(m ** 2)).add(eps)) + # else: + # def f(g, m, n): return g.div(torch.sqrt(n.sub(m ** 2)).add(eps)) + updates = jax.tree_map(f, updates, mu, nu) + return updates, ScaleByRStdDevState(mu=mu, nu=nu) + + return base.GradientTransformation(init_fn, update_fn) diff --git a/TorchOpt/_src/update.py b/TorchOpt/_src/update.py index ec0f8361..885ca71a 100644 --- a/TorchOpt/_src/update.py +++ b/TorchOpt/_src/update.py @@ -35,7 +35,9 @@ from TorchOpt._src import base -def apply_updates(params: base.Params, updates: base.Updates, inplace: bool = True) -> base.Params: +def apply_updates(params: base.Params, + updates: base.Updates, + inplace: bool = True) -> base.Params: """Applies an update to the corresponding parameters. This is a utility functions that applies an update to a set of parameters, and @@ -55,11 +57,14 @@ def apply_updates(params: base.Params, updates: base.Updates, inplace: bool = Tr Updated parameters, with same structure, shape and type as `params`. """ if inplace: + def f(p, u): if u is not None: p.data.add_(u) return p else: + def f(p, u): return p.add(u) if u is not None else p + return jax.tree_map(f, params, updates) diff --git a/TorchOpt/_src/utils.py b/TorchOpt/_src/utils.py index fd0e2e93..ad30373b 100644 --- a/TorchOpt/_src/utils.py +++ b/TorchOpt/_src/utils.py @@ -13,12 +13,12 @@ # limitations under the License. # ============================================================================== -import jax +from typing import Dict, List, NamedTuple, Union +import jax import torch from torch import nn -from typing import List, NamedTuple, Union, Dict from TorchOpt._src.MetaOptimizer import MetaOptimizer @@ -64,7 +64,12 @@ def f(obj): jax.tree_map(f, true_target) -def extract_state_dict(mod, copy=False, *, with_buffer=True, enable_visual=False, visual_prefix=''): +def extract_state_dict(mod, + copy=False, + *, + with_buffer=True, + enable_visual=False, + visual_prefix=''): """Extract target state. Since a tensor use `grad_fn` to connect itself with the previous computation @@ -110,8 +115,7 @@ def get_v(v): def _update(term): if len(term) != 0: - params.append( - {k: get_v(v) for k, v in term.items()}) + params.append({k: get_v(v) for k, v in term.items()}) _update(mod._parameters) if with_buffer: @@ -122,7 +126,8 @@ def _update(term): _update(module._parameters) if with_buffer: _update(module._buffers) - return _ModuleState(params=tuple(params), visual_contents=visual_contents) + return _ModuleState(params=tuple(params), + visual_contents=visual_contents) elif isinstance(mod, MetaOptimizer): state = mod.state_dict() if copy: @@ -133,6 +138,7 @@ def get_v(v): return v requires_grad = v.requires_grad return v.clone().detach_().requires_grad_(requires_grad) + flatten_state = jax.tree_map(get_v, flatten_state) return state_tree.unflatten(flatten_state) else: diff --git a/TorchOpt/_src/visual.py b/TorchOpt/_src/visual.py index 54c0e58e..e71c5ebc 100644 --- a/TorchOpt/_src/visual.py +++ b/TorchOpt/_src/visual.py @@ -16,12 +16,13 @@ # https://github.com/szagoruyko/pytorchviz/blob/master/torchviz/dot.py # ============================================================================== +import warnings from collections import namedtuple from distutils.version import LooseVersion from typing import Dict, Generator -from graphviz import Digraph + import torch -import warnings +from graphviz import Digraph Node = namedtuple('Node', ('name', 'inputs', 'attr', 'op')) @@ -52,14 +53,20 @@ def get_fn_name(fn, show_attrs, max_attr_chars): col2width = min(max(len(str(v)) for v in attrs.values()), max_attr_chars) sep = "-" * max(col1width + col2width + 2, len(name)) attrstr = '%-' + str(col1width) + 's: %' + str(col2width) + 's' - def truncate(s): return s[:col2width - 3] + \ - "..." if len(s) > col2width else s + + def truncate(s): return s[:col2width - 3] + \ +"..." if len(s) > col2width else s + params = '\n'.join(attrstr % (k, truncate(str(v))) for (k, v) in attrs.items()) return name + '\n' + sep + '\n' + params -def make_dot(var, params=None, show_attrs=False, show_saved=False, max_attr_chars=50): +def make_dot(var, + params=None, + show_attrs=False, + show_saved=False, + max_attr_chars=50): """ Produces Graphviz representation of PyTorch autograd graph. If a node represents a backward function, it is gray. Otherwise, the node @@ -129,7 +136,8 @@ def get_var_name(var, name=None): def get_var_name_with_flag(var): if var in param_map: - return '%s\n %s' % (param_map[var][0], size_to_str(param_map[var][1].size())) + return '%s\n %s' % (param_map[var][0], + size_to_str(param_map[var][1].size())) else: return None @@ -148,15 +156,17 @@ def add_nodes(fn): attr = attr[len(SAVED_PREFIX):] if torch.is_tensor(val): dot.edge(str(id(fn)), str(id(val)), dir="none") - dot.node(str(id(val)), get_var_name( - val, attr), fillcolor='orange') + dot.node(str(id(val)), + get_var_name(val, attr), + fillcolor='orange') if isinstance(val, tuple): for i, t in enumerate(val): if torch.is_tensor(t): name = attr + '[%s]' % str(i) dot.edge(str(id(fn)), str(id(t)), dir="none") - dot.node(str(id(t)), get_var_name( - t, name), fillcolor='orange') + dot.node(str(id(t)), + get_var_name(t, name), + fillcolor='orange') if hasattr(fn, 'variable'): # if grad_accumulator, add the node for `.variable` diff --git a/docs/Makefile b/docs/Makefile new file mode 100644 index 00000000..d4bb2cbb --- /dev/null +++ b/docs/Makefile @@ -0,0 +1,20 @@ +# Minimal makefile for Sphinx documentation +# + +# You can set these variables from the command line, and also +# from the environment for the first two. +SPHINXOPTS ?= +SPHINXBUILD ?= sphinx-build +SOURCEDIR = . +BUILDDIR = _build + +# Put it first so that "make" without argument is like "make help". +help: + @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + +.PHONY: help Makefile + +# Catch-all target: route all unknown targets to Sphinx using the new +# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). +%: Makefile + @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) diff --git a/docs/_static/css/style.css b/docs/_static/css/style.css new file mode 100644 index 00000000..b37cead2 --- /dev/null +++ b/docs/_static/css/style.css @@ -0,0 +1,138 @@ +body { + font-family: "Lato","proxima-nova","Helvetica Neue",Arial,sans-serif; +} + +/* Default header fonts are ugly */ +h1, h2, .rst-content .toctree-wrapper p.caption, h3, h4, h5, h6, legend, p.caption { + font-family: "Lato","proxima-nova","Helvetica Neue",Arial,sans-serif; +} + +/* Use white for docs background */ +.wy-side-nav-search { + background-color: #fff; +} + +.wy-nav-content { + max-width: 1200px !important; +} + +.wy-nav-content-wrap, .wy-menu li.current > a { + background-color: #fff; +} + +.wy-side-nav-search>a img.logo { + width: 100%; + margin-top: 10px; +} + +@media screen and (min-width: 1400px) { + .wy-nav-content-wrap { + background-color: #fff; + } + + .wy-nav-content { + background-color: #fff; + } +} + +/* Fixes for mobile */ +.wy-nav-top { + background-color: #fff; + background-repeat: no-repeat; + background-position: center; + padding: 0; + margin: 0.4045em 0.809em; + color: #333; +} + +.wy-nav-top > a { + display: none; +} + +@media screen and (max-width: 768px) { + .wy-side-nav-search>a img.logo { + height: 60px; + } +} + +/* This is needed to ensure that logo above search scales properly */ +.wy-side-nav-search a { + display: block; +} + +/* This ensures that multiple constructors will remain in separate lines. */ +.rst-content dl:not(.docutils) dt { + display: table; +} + +/* Use our red for literals (it's very similar to the original color) */ +.rst-content tt.literal, .rst-content tt.literal, .rst-content code.literal { + color: #4692BC; +} + +.rst-content tt.xref, a .rst-content tt, .rst-content tt.xref, +.rst-content code.xref, a .rst-content tt, a .rst-content code { + color: #404040; +} + +/* Change link colors (except for the menu) */ + +a { + color: #4692BC; +} + +a:hover { + color: #4692BC; +} + + +a:visited { + color: #4692BC; +} + +.wy-menu a { + color: #b3b3b3; +} + +.wy-menu a:hover { + color: #b3b3b3; +} + +/* Default footer text is quite big */ +footer { + font-size: 80%; +} + +footer .rst-footer-buttons { + font-size: 125%; /* revert footer settings - 1/80% = 125% */ +} + +footer p { + font-size: 100%; +} + +.ethical-rtd { + display: none; +} + +.ethical-fixedfooter { + display: none; +} + +.ethical-content { + display: none; +} + +/* For hidden headers that appear in TOC tree */ +/* see http://stackoverflow.com/a/32363545/3343043 */ +.rst-content .hidden-section { + display: none; +} + +nav .hidden-section { + display: inherit; +} + +.wy-side-nav-search>div.version { + color: #000; +} diff --git a/docs/_static/images/logo-torchopt.pdf b/docs/_static/images/logo-torchopt.pdf new file mode 100644 index 0000000000000000000000000000000000000000..5e1cbdabe24214402166932721e258000ffa5dd3 GIT binary patch literal 54623 zcmeFa2b|kf);=6qNGPFqHozpIBx@2&vSiCcG9X!!d$-(ZktNxdWx2?5-37wZ33Zna zq4y<(P9T(}_uc}8zJw4;7Dx!4@7f-FCdnk=?EC)yllNQiGLfZwj_y6rIp>~xZt;FT zikSvaH*T}vhabK0$~Ie>`k=miBD2km8G4tL6FF6Ch(aH%clBYY5i#h!eP%t~hZ#($ z-UDujz-@y8*W3CI?L$#hA8f!59~jt1%n3~C3s?bM+&P?|t#nu0APL!t%>#UaXKg3K1m zV!==Xh6ZLJ2t>fJ(F_WJ5+(*WQnZ0EASi~Th}ldJ9Im(L1hIA~s5{iWPUicJpn3tl zOU$LTbRPyc>r!d8)2t8V^I9L?dL9Il>4Q!BP`x1PqX99gj~oj3A$>3qKuJ?YPT6Lw z^t*|Ku>GuAiCM@Wb{ua!vw*@rj z&pV8Jy(zmv@md992s&V@-VKzITQ47=!Qj1v4GoB;d|BniL6y#{ z!2-vNP5lmnR-sK{0G+05nl-2ag$56-%?B+h4S)|&v*!IiNFP!qWxelh$+RitA%Vg` zdL1-I&;yBRb_Cx5V zr;dH>twl=@YK+xgz2B0ZuUmG|_sf3#c(;%DU$XR|gZ{MpvOSjmIPMVfr$78o) z+iMrcsgB^vTwjsfIbvuUPcmgAbl@@GB2K zkw5Z*b65OyuH#bW!(+bq>#r}*xaLT2_SZAMIOgcR4&G_+kGz4qKOczSIP-1D`NBTq zO#eRk&?|m8koWl;=`WAE`XoV~Hgm>q`+QnI2|sPfESMXaKdI;oSJ#DanIMrzUnI8eEwnI+;aaem%jJ)_Sf)7?0e@7 z_ZH9Ius3|{Vf(Cb%sTRt{e9tAp1oaf8XvgylM2yyYT7d)vd1+~%{t+ji*zqMwgbLu zqT!$k$N%bc&DrDfSpATvu5tbpp7)n4Uc2Rn)6X(p9~}G64dWh=f3iM#;me#dJ+#oEoxtlKv)?0e<&PtNeWF@wC{nNMSTeR%PScRupu zCl7tT!|a1zJ>i<@$G3fO+27Q0W6VoV{^N@4AA9qgWckG(p1E78|M2a{XI?yci!))* zYqy7|G|ZD+di1)x<#Tk_$Gy(X?WZp`twe!JzRw|)G;)n6`u;_m9%^j+`B z-Ysr1d~yEFEq2*-ujQ|N|K?}$J(1ma*l(%Lx%1u+4-l_CovytttLaO|d`P(IqvqfF zP>9hcPT1t$x!2uXINC~XhSEyt(I3Z2lMkGK!b8iDoqw2k+Z%U%##zof^QPMmyXK{5 zAG-H)k9mhP-+0}-=#9OnXq&9qeE((9Ep9zCwafAEB=5WZtM8{iyw!7e60>HbQ(iu( zItRXMZ1Ms4@^c$U`1+r6?SEkO;oDBl(t+bXn*IfOQRI_;(^v9e?iPL?Kl9WtsJ_D{ z&vT!4dGXD}JDYvUreDM{O+rPSI1tr=%yE6IA`YjTVd}W z%b&3FS(PH-Gm*>w3c>J=YPS!UPOu6=Ru7k9dD(n9)?OK-UPhT~Sf>vjuQkS`RjICA$(Ufx@|{Bv-{NU-sB9RL}IOOo%{&>t%!n*00`|p}f|6$v)czod5;^FiB-kl?toO$#YliYK+ zotl4n^P4|BbbEBFKRWP5_~qrnB@3SKk8k?JS(&q|ufgXYb#CMG*%Pxxc7=G@dxz~d z=eakpI`F>F|J?WGD!sAFpPw=LtVM^V{=qDH=Cli@yhqW?t~~6Uo#<M3wyrCm@`qFXcFGky{H(sPo`#&*Z_r;$)wQRv| zM~t_Q36DEt=4t!xN8S6V>&1`0zJAV;7jOse`Dk|Hp0U(E3pRP}9{Q=N`_A3@-LpPl zKJn1qZ@g>lm3!~@-N{QfyZ&j9uu<={Ga$fx(51Q^UoN@M?JCJ3kELt(=u#302$ok@q-;Moj zlDKdp^6e4NQccY5cS{jdN0wGXeY9#PC(@Q;5U_S596EpOjG^|Y5Je6+cq z`%7l$U$=P1`R}`g(_RYzU?n~W%)e|6Arq6`tdiuIeG67_Wa|856pk!SK#}-_*{Mj z-;b=fOB?k42-b>94a5EaCFj*c!RoG09Fu?Q(zDLJ|HLucPI_AD&RoxRs){w?fV zY_cP1TeLYrZN4*29e?Aw$m@#6c3>51{`Hh;Z+yDVm_zo!$4xos zPW6(5iZ|bATI%|Ck4x&;oV)*t{+&MiYO~^9=&hMc@Bid4OU!RQy4C67V{f0DnoC{< zeI;EId1H@r@yR#f)z@w~cO`!0V{cyb^!5IaepTX45fGYOfu1*bm$O@tkeOzH-gJuieYsA7^v_T>SCg7feAzr|mcOvwxg3_BhD0 z|NL`4-Q|L-<5&G%`r{@SJowEe__c2(YG)ruU-8gG*nK!JkBCZZ*5M>-^__xcjb`r;Ryn{$~IDddZ;|Kl9@l=&J|zdf>a~kNWP~ z`c%{I8u!2hD_5=@$Xv7diO`FtA1>SW!9$kc_rep};)}nYReTz`@YbhiU$|`l`8VHa zTy|@GuIuJUk3V>?50BjIyKm~X)YNCrXD962J9iDrzXo?Y zMqK1Y4}N380)GA`^B0Y~W%}1+zP@|wF(1D?CUD0#?;ZQ@ifQNmZ5;A%?7a`({b}(F zTmG;r_`$#{SB2+YH0STa$FtO{9>^X2!iw0Yn_j-@{_k(1o3-5~#~!r(rpIi*>B9ti z^0=q^_WIZCO>f!xNy7Z%n3>m)n|AqL+Od1zvUhgVM`j(g`yUoQbaY|+(~djyp)LPy zE$sc5EpI;d&{;dWjyq}A*cqG8@7rP$G5+MOW5l*+>Hf9(_m;$z9Zx+eaK(YwkNMNF zp|f^H$Z0Qbb^W+yC!Ki3UN7dBu6ohGbTe(+dpA33b2qWw<;yQWAKLU4Ztv{4g%3S& z%kF0!8+?C4ZOfbfD!;$~f}M^Sx7|Zm{d?d$=r5ZNY0TSqJZTFO2!$EQ3o zWw)uXoD&Hd*+Wjhe2GtY{THuJTz2%bJ(elUcG~mwCDhi}h%di-$H#X}d~137vRAH6 z>z7Wr_khDm{gr?G^s}Y=ynN;>4_s5U1LwycxZ~lQKAiWbdB*7JhrV+wGjHZR!@SSt z-5tB^)}w_-Zr}3mv)|U`+2d1vyU zA2{lcqfYpF_RrUSyWNBZ+n&8`=A=zddSSvZr(Vdz(Orzk&wV^OLD}o?6HY(lpa};| zIMTf3CFH8z zJMaJb^;LHsdDfBVzIVZU({`|)bKtV=_uA>GPnLhGU$xIy5AONTEo@t)x5jt;w{m&p za-?wR`VEj%h1C zdT}@IA@0Omj^mEmZnqO7CobRe!3Phy<&b-3WgmKHi|e+CJ$lB;PuUK7xfd1&43H&)pZ;$2+o~8JAKvhyAUyG)v}ybDdRZ>Tj06oxk5@o-3{st{Z>d zU;e)A!W+Fa&l`K)e%C3kh4;T$zM}F$t>1S3wRzWrf1mi|^-q0g*&jAcF75k`K6d3z z=iPMsn~OjH@bj5(dP|AY4{Gk?dq3H;pG?xoR0v z4}CxD`^k4wGjICku1Pl}{`}bOPp^1-JKZriJw3nw@`odi=b~-i zI&tgMFFjq}b2+vj`t!AWDCe0j%NDaz{=)dKw`Z-oSL1IrZhP*YdABQ#`iEa1{>7`` zKK<>%RI^w$8)QZ{cm3Uw)pxr%kRDQPk(vs+t-$6RxUdQ+eE(W zrusYcmi}_(6Z2EId~(te_2s+Cw0zR*zbv>p`wn$v;k~<7KDzR)m2a3RWzlOde3@VN z;6hmW=z~qNm;d?wAKi~#{QNd!x6QrhdMCShhWVTyykoB3eR?QXWzI^E+m)?BYUYE_e?1#&x%NJc?z2dGb_rLPItHxecxoXMP z;j5px2EXQpYxlf%&b2>Zm%Hx$>jT$6dV}eP8*ZF%*o&3Ei~$0c{}bmyseZE{!ru7BU1zWe~*P|dpS z{q*tiOSV~Z-Y5Hfa>G(&>3yFvpT4;4&}AQgR`~44&rkefhc7Pqa>|!?EI(xVv;U6% z`{S?FuYUddv~TwK=DHQ=iicPFR=)eK^6ihSPW^6=@2>wI|Ne;|B0qfcWA&$TKV9(i z)SvJD#rezPUzJ~*apkJxzxi>5xN<109E>4R9D^Z!u$<6ZX+5{Aofqk3wT42=83)%u zQb!iGzJ!>Pax*6_U-;mpK1rB4DT28ncflg2C0j!kgAGrJZ^*n^m~`;KRx@h-HKm|{ z99dsY$>vJ^wV9JRkSi7Y!FBVteo|knh$hdRL^L1tMcuwWOI{WGOw-|MJOn{~Ci8R@ z!2}*R9oPpFR#No3 z^r~TcUQOv?v)QbN5IurS10|-F>N$<8P0N)gx0!VKP-2N!rGh5qbA3&|Tq0lAW=@*a zd{Ot)ZkWJ#-%}{7+16VGUN2@vrJ119!_#5?NOV9vUBX;xdP_Ld`Mjd9aRopGt&7NN zS5oUIOz9;hwJvhEd;P?UG9qP31Bu`DQt^|}B==lgLT+*1_@4|!uS?3WsfM!_NUlGuP zmYKG#XNJJ{C-bVpX#-rLkd=6@X=-{PsTumAM?+O@me(XLD-PoUIJIn^$ZD(K?L5dz zO<&NT<#Is4wP}KweC=}8ia&kq#rZ(8;l^W-VZ@D^lOzJ*09#IW?sJjySNOMT!V)NOvHdGqoJA@rWv3R zV(5q9{#Js&Ta#fWN8*uJL-~AmpzV|GLx$0ZVAIjQDQ=FJa$3HWp4x6#i`yX08tg*p zvXaPgQns{)keiZUUB-i2(T1euaSNPU5}O`n=A@2G8LmKWf+o^U)dMcEzbXk@dH}(U zt$Gc+HB_s$iPEB!N^1i!h73^+-di8ppz1n&G>G{-6&Sp^J~~NhPGIQG89ho6{-7Lp z1X2V^q-Y{YkamKg^d>)1Vy)?lU^O>5tGQ~Y++{aGpnf^)A#@PY`j7-D-~7)a&f-2%V+;u-VVbW^0hGF`ytIA>2W_R!Atgg$7>|5e(w94x3*hW07z^ zMdF}JDZyvI(ijBjA-=%7|hHR`;NNzKUTj6rNdD&_{Sq0za*7s7Asl_hj!r9h$s96(> zRZy~70bI75Y;L_-PT0a4*SyrX-<*hq(>y3d5=|mP7Ode62QEme`IT_<*+9_Wd`>l= zr`&Nk5Nt6Velxa4A;1J}H6M^A3?#;bn2WRdQ%NN&DY5^w7mPvT-F`k4Laf zJV-{kNZ7zduv|-bl|VtTW-AGaEVOC}F4nJumRb-rJ2?u>htIV% zglT!=HWBHzgkuEIM1o-WB!z(iJW(Mil5od}dX@4M9+p6=REqGj zgs}>ooi9e{s-UCt^Cab`LqtF(ZB;r;1WTklMWaLrB?ErOO@!@ai($miq=*PlCQ=Mc zM5|<~YIPBDJE^3sX@ZlB zZjET9D1X)CBj7yMVi-?(>VyHMq?9*8810mr@)`-8r=Y4Y1=>pK{C+FY0NPIYHIgdP zK_U<)t!|q42Xv&pN-L>go^-|N8W9S$7?NHZKoHWAeu%LVtV)LROwb>WkrAE|su3$0 zk1>T*1SWZwf&I}enT%PK4N_JZ{RkzSFdnA=ETkLjwHYF8X3@t9y z4y7_Vi?8IM2{~&C?e3bfg^&t@wj!(wU)4?jTxoQ*_-O^4CB|in>Du(#Oy`mL=2v97D@2mkzd@%s3&o zkq%XvfIaLX!x1J5JX9@WW<(+abWLGoIg%hbAEVkMa5`aRYEaZ45V8y^N7ZyP$msc) zwU#EVRyNkEp_G;?Ru3D~rR9{BW#e2;30M<++z`mqRw>W1fr7zWK)G5#Em>>+gtw+; ztY|(_N>?}=VdtIcTF_=I@%dWA-C}6-v4SlP+iX!nPyzJ|bIzy=fS62k5Kokin4?C4qQK_pwXVtEHB(31Y zAQcK$85bp&S_~6XU2=JNjk3U`D;m>^bwk>fj+H|eqs682Ws`)rP><8j_)`#T&)33YEjWnV3NVy~Myi_gSy{Y5H8Q@S-5d)cX`e{w>=c&r6$u?m;b|X2>+>1C)MDtj zc!)S{QT$$kbh1p*&(%niY_Ixr4vWUR4E{!eO0qs8KzQf?6m$mM7|`QzI1n!|2011L z6v0}A64gK>Y-=$Tp(Gf%gzZ5{3IsiZ!^UPaK`!gS>AWGBgPpaenZO>GB3Go4rQqh} zx-sN;c!Dw#3JEo@gT=}rRqzqCxjjw^VIH3$OypP_<1fRsl?{^t1-2<{h6yGamyNA? zLQ&XTWc6{@pAGuL9xd!-!f=>(L?}4Uhl|;W85Za;=7_dxh%%{4#0JOeu*^jw*?5`B z+rU3uA*;d>D4fWKE7>R&=F{0mBpMV1kuf-;>4KPGjOJ+7oQ#EajTjM5g%L~JAI5yO zbO^CZv1C{ZMx1=CLT3DkHyYDvvL6i2;@)i57l{PpqM-O9Trgfv=X?<{6xU_*K17Oe z-de$rC>)n$+6;r%Vv?%}Y6vOkIK8Kdr^A2$o1Y${b>%6E&$!Bf#jI>|ETd=w>)C~rz*Iy-4k7bLgQ;YO8si=mV> zI^%ZLDK#vvY`%hLtX_9L57jbp$)iUxB~vo`Jb9fc6P!O$pdxZe7brUH0XbI;qL{}c zn>g0)2s*RAV7SFFibWkj^CCHncV-QdDC$VN6_*ruI26B=#1p6Tt+DRlmf%aH6x1_I7P7*Iai^A znF=HK%MSa!S$WEc%L7rz&ujg38oOkQ1iWgj8M(7JIJJ}&5fh23hDZj~m5!G^RxGIIEagnus3l5mhGnCO z+Y|LfB@i)ZOsKPx59;(Lv$3L&619$ zkj+-XQ|7v>(KbDANx|xwj8nr)NZpihd-yus2t_>_k0K2<;q%}+6rytevPR=j!W8tG zT_gmBLNzVmfZdgFNQ(yGY&3#+1isZ|SVzU;o=hHzG~&2WK#)o$k@8d-gIyJ94-zq? z8)9C=3kIV;u&Qyp9m6RWdQ1PsMCLS?`U|SjwQZ7-q{LZZ_uf?y`g%*+c>I z781sCROP)DlgXaYXc6H}Ql_kV&4dYWR3knri$`5Gz251@Yq>^2?}yB87tH#i?a0Bb z7!aLabn3_)TJ*`Nj!zoHdNrprBymz-kLiOsbGd@K^jbroNb2=^9oigW$_d1gC9I)# z+?~`X2)n->W0NUAh<90I{b! z*5e?}<4+Mvb68NN0|a3Y5Po-nNa|^#Jq))I&4&4hkNo{iiy=|9TfyMPW+7zIJW!w+ zjIUYPPgp=}fk=3}#h-V9QWo$65VZR(;2w|&-~rrNn_T*57Zda6Qa{f$6KE;{QmgU{k&{W;m_ZFtf`+aJblAqpzn z?T)yH6>Yk~UbEG#fJ&vgAX|?}G2lo@gRh;p@rVvORTEgbnrb!Kk%==_NstAGHQ+^W zqgqcDAf2y~b_8Y39bz$GAS-~eY9SG2U@-~@z|dUCbCH6YXb&skasuhdV4cmUvJTM2 zVp3cwl=)oTn3RiNxxjI`qEjgrjde{e>MDA!jWSUMYC>4(rk+taVP?@Hj?3kw+o~BW zo;f-R(Swb`MNW{Pet-Rzg=8YvQMpZO6uC;@9n=O-~XvJL!Ft!3_ z$_Q0UJd_O(vRyVMg+|C1(2^Ci7uF$o$zU#2)8(4aRbUOpJXG}8l5~lMf>|TU6pP+= z7vS-eQL)ToygQCIWJwJ35ks^>>I0!frfzkoiA+v#R~rOhK%C`Tc)fpGO1Fc4J<@Ork{-6OH|jKdl;e%CRzz zh_Nt~3>E#drx=DK&T6_)cgP;H;mq(wkeAAoIj2vGQSMwQ=A*L}vmMiCaKRMH)o}uE z)Z2ZE7||pgg-mr57sZ*1PI8sK`YIWhvx<``WPOZ_^8(}MB2JG#9;8A!zYa-6SR-0W zlRny+(IAtau+ezMQMI>fSS#m=45T!4syV4hNf#ZF0tTgMq=dR(0dN{CKFC$HqxuLV zNBp&NIw4h<3d>e3ff7uG3L!g}YS=R}V<6g=f~Fz8O{`ZXkK)ivj=VWpGq^FlVl;WY zCS#~rjZjh5pRD9-B^i%-NWL1@!B#z=EZNYI$!GvMQ#0>DZ6=?+9UmqQWiXhE8slK> zP>Y2c?s!=MTQ$O+Ka{UQ!c-V3n_YN?;~_a|j7XJuBCW=23}1J|Yfea39Tj< zh(gv_^kymC9~Q$ALl!aVDqcC7k1~*1pEL$tUblzjLN#MFp@KZ0#_@5+qj_vTVp<;%e5g4|G%>HsnY#2RaCFV-H^T8nJiNy-6$hvS}kyX%AIBy9= zQfi2a1lFNLEr#pTFx(`SJf2)dt!ZH#(rKcY_VET9R%urxkccn_Nk;2s2+K$rpCf4q zxeN}q#V{-h3ST2^6qRUrO!|1-Y%bI8m;>lb5=*MOn#<#b;(Dhy7p1*2mnoPHT(nfm zfHf36mvM1PI&UeJinTNvb+#BY%~$H2)!8Eh6m*mho$~h%uej z>Q;*yE^|s=2ea{vzag1uZ_-VeXc0|;Xf$Gg`LbT|WlD``2vrel9b`Y$bfRnua6yAJ zpp$HEOVM*?5HZ>edA{mU9CC;{5IPVsDit{s2F!fUkl#a> zsDvaEh=9cODy@|>0f*h819>ucsNrbG*nUYa0;gi|u^2}sBK~>^ujpOnqS00`>y#qn zQ$1#}BBbeZK{ZL~v?PTiX4Xr=LOc}&Lv+@J*5G!UC6&lWz(zGn133g^SQoKM#;}@( zm0HWYpH}}pTy;AH0o>cYNcLC*6~P)OU9LkW;eweji|2x z6H3`i)!WfmSj9tx1?;8EIvWXrX6b;;b~*$_qYubw9f|6YRtp z;YK*D34S;f#;Vpx&Fl-Ate#w$i5CbtBfD!BwIR!4tHWsVme~-PjY<*;cQRFRv~#d| zEE_?y*%ZXZflJenbi@+J0PHLu6^tktu4SA8TwoKpg4E0UQXHx`>L#IPi-a7BBojBn zoJAMO`GZwmt{r1XeH91evii*FY}swnnTvHQp7lmOp`o61Jy%>jr8&*!v{cNwyvCLc zgZ?~NB%06i1-lU^>{*N3;L#=Q!GbgGOHo#pN@Un{IL<{`9Cy2AE$87hXIelDAqyB! z&`CmH2nL-IPZF_ITMQAS*&PnY;x^e|!b1VtUB_*xEIVtg2Pb1}jFLS@$cvIlEu0)4 z01Q=|PI_{(DV#93NAzWZuAnBc?64lCBIbHVBOpmN#^X_m^G56?rNIXS29WjlhC?+& zJzuS7PzH!8f<;tjn5ZZLBxZ1zif#9mh{G{9=t+jc(Tv%}WomdNffgYw5x{MU zgi^<&$$Cg%&tajmV3*P|Y&A1xu@>+!HX|QPr!sVg#Jm;|x`|qVT6Ak_ zfd?99hukb&G86+At1O3Vb)#rXgKT9aXXV{hStxTj8zNNu2$gTua9HFvLXnZ47A;`J z1}XJ&STog)K}3%GShitK;$ikASs^*Q>Zal-J$v^8MQ_OkjO=w^|quMlDFt8X+g-Sqthj| zXf@IyG9IGrr5q#Wd9v*AfT>~|W^6) zg;~-G2^B-hf%4^;kr4A_s)yr7(F1 z3TM+iD|*tPOVupytTiry?3q8nu^?xM7rc^PCr8vkA(Ux@63aR*X;(6C6haE?h1{qC z#ekGhUc{`NFCURP7pD(p=8Hp z1=dAF4o^~3BcLa5gjBjz_eaxgut=C#Ij9M#us$l|c9=-TV>(v~vnm;9sAehZT!beK zCUQ7^N-hcQ$N^WQ!Mut);&C|=um%dO%NwwJ`5?`EnGl|=CD@_|E4XpA>dQ15{w&59 z5uZ>JHM5~o$kUmKo-k+9b}(pX+NQ?`g1HcIL|B6@C%{bW??zw*;dd>C^(DcP7F-#Y zixoblR>QSsOoJsghprK=ND-^Qu0lBC6=R;5-fgi3v4UoWtk#G*=!r<_Y%XA|G;EIc zc-iMAtRR^M)~VnG&#PeeH&jo>r5LI!*vx4iTK1tzLYFWXV*vRI9|lvA+=!vq%1)C<9iuH5cZiXd(8%1E)Q90Td{5SSCCH87A8!J;`x zqHRhXBq2!`%>;vfE~gsmb+OV2>52hY1WataCF|&h9JKQPO?seyCLEIY>Dl(Xegd9~NSF2i*Hg6=9R@3?d zY^oG02E8pG0HdFJ!5C6fBc{SyH5?Xj_7=OX8Mc84u*L8{<}!>yi{S__8djwH)RoY) zv}TW%9S$aAbFd-UYH`LLR+|}5hIS60nFsu zRiM_bD#p_}G zDjs#{oM5d+!JJ8D6+*E)zyettt-$4)19Tl24?9Pye&=4R$-j}b>|mxk6ttEzfS@i7 z(oQu;Q7GbIZGy#IDpj4qM$zPQm)h$OF()3&7b0?%vYV}xQwt;Uh*gEfV#!MA5+#qX zV6@9@6?i#Kbrob!UW=I!Sdu*Egs)@_HXMzZY)V%f*>t1LuvCb-39iV+!Fn#bO4tHm zvWb=g4r3vJrOGvM$d3ids*KxQipNw1RRXzGPz0ytq;1xsSd3wonvQ@dXFZ!uw%0lM zB5}e|vtuUbO5|s_p2@B4V<~^*iIFkz%LTy`lx?g6-T_!W%=(J{9z?P}J@YV|b`mWGJ_w zCuqzn;0{%(r^Avi)KFQbq^7Ef%}OE>cRd0ZjA|{URwKD$1lPo5wb&j!+7!*=)N_rT zB`T0|Qg(o;NT_BGNMW5&1xp;PAnk7}B!cw-9w`P>&N^Si8Gj&zg6NyGGX6?MNEETC zvy5d1N289k=A!hpr&drSrcgj9yL6m0h{=pr&55iV0!w1*m{-*k1X!y^<3gz_#Hn;$ zG}p}@C+bi!0puV&hIpgeo~`g3EOjR;mt*C4ATL#1W;g`;$S4|)DGEqMaB-Jk*4b-t z%tkaMPdP|{jA<5@M)Z>{#qPzkYMjSv%p^g`Vi&+-A!AewRvh(ghK9h2PO)&i3(#y% zuaA+PK@tu`JXVKf(dq^)>;g{I7`53Ezo81#sHBF##DL0bus2Tmvxvq~V3<-)hyf(! zEIaHj$(M?@*NX699dHWt@lG(=g@a||(QGZCI9+itvp|-dU?Big(B%{sF;?oin8)6z z2ZJ=kI>GwAMmEg&N-3xqElBy z-P_C9*Iek{%Z`fXu7?q~2Q-)SyWRcv95@Ur$BTAqpj?)Oe$Hs%jizK`8e%q>rWr*P zn`VYh;xvQE!6H~{i@*Zjs^99gLscB)OXc};b6XbR3tlu`0IYcd!%PS&PD71`~&mmOB7RdtL)Al!_gW&_DkBn+Ejl)y1F zL>XWV!Y$ouk3hfHI|AUOEUCFu#**bqrGbK)&nNp^n`YW#TjTj!^pvPd6;WW+yz)CY zS9f$QVDEM zs}Fn76kl_{Oi(PYez)@gY=3G#|J@sDT08UlIlif110@dVNfSP>Td#9(sLDedY1#!w zP>%#6B!O|7r{Nq2Pebt}JdG2R_%s|&V2Ck+A_)%cxmk;R-S4QT#oXW~D+6!?&8NyO;&AIty1e|QpzW_DmcN6NC7ACyY@J>*{)8vZvK24Y&H{x0ez=wof~VCjO;8& z2Xw#BBW@^k-Cb!Y{(lL&HlV8;W5=@BHr_$2?rcn5-GKk6%^K{I1sff6G6ObT4$w*h z94%M}+jB+Wk-%Ynzy?<B+CO>B0N@lTsEOfC)C~>DiK~mh^HR z)T#gu{u5JbzFYw3a3)kvt+&oLQbxRXN z`$zkNOY){P}$9_M*DX-XQ60?)yi0OsNC&E0)^95xwYBZ?a_#9%TX zIlUn9(=&xsQ_iaiVFWo3iE3Dv2KJAx)l06Ic9=8n(hj>f0&PbJx&W`PkIib0xLdaQ zpxMRr-qWn6aic;dE1arN=CeXm>l8?DsHTp@D|OhEYoscwoT)>mV&+I0oJs&eshmEf zZ>mJTsjkgUW~Qq-tWk`>|FnDBxZB;9 z#_DR&2}SI=kyK~Ku=6eY!BM@cRxbF|e6zzI@Tp+G{|xAowcC^q38>ctVSXiypFlsPd1qW>Ods^o+541<} z!)~ly(#Dm-Qb|f=#gNM7N?`Z|;*a_O!(~e#SQ=!vjza6>=BD9f+ zAir`>(grNR^?PVyc|w)MoWpD>2jrAe;6Fw7gS<;S?!bA7s-9_zb5XeE3@&cyVBamownt z>|p}xyt^5DcV6B+4P(Gt5OaOatKn~MJz4jAf#x+2e+gP@&&;K z2x<;W=QTc`tMmaim~Mg&QaBlGw@6M0W`<11renAPX=z5YGOP1j_j^I~UdWdua5&1K z%D3AxbbH;TO~k;N-r~rpO$Rj_-u`aMH5vX-cMKgUTGb-BSh*_ZvK%-KMH0k@G`vc! z5^GimpgIiCL46^(rSs=4Iq#&Uvyc6F!O5Robo|Hj&L7U{_p8;%3l=QB?)cSdf1g%` z)N<>vmEp7-q*OqAT7!=N7itohLXx%2 zYTsSG!d?G9Vk0{e2v#|bPg|R#`yOS`Q;MPaI%|I`iO@1Ts(sQ^U;Fiat@pPoJiO3I zwmHP7rTjpX*SNDzBJhJAU^KW^XEx-T`n2jm7Fu8;-k!Cwy?unhPQEHXXpsD7q@BJun=XGj<{RcVNL9su5)8D zyskC$0v?QZvSK~;dL4H(8V%2YZG27>?buLH*qJ&u)Dw1HUpv~sYsM?xXr-fJqd!a8 zIo7|YoEMk`->^Z#Ak}EY4rzuf8>81mI*~^&mWOn+k3CslN2f;{c%Zx6D8Yl#jsxrk zJlNcca(V*K&{34phMuY8H#f3?1=}F-jGdujPvBvOQR+9r;I27H?}nQj&mg_y%z83C zv|~iQq1WjvN1N$^@wt1Zp%t?<2YTHM-lz{+HV8augZ$>G4L7@f6I^UW(Vpx;C{D)ctU@QfX*p1dYJYCAns3%uqqeWS{-bmiHTsm?(fBQ8amDE$#Zr`_oFRjX~moE`Phxha}EW8J%RUs%=lV4vW*eF z4Vqd&I=PviObvICKxcF z8y9-r)KxF&!T5%SEm&t(Vzj{rKy%LoDKaqLYr@xws7L*P0XBAq&^^It*uc1@)5nfB z_*zjX_!FCHY{J*=)<-{n-PM>q;RkJy`Qf8A?8KVR6CelRYet=DCyzVo;WKyqSuf~q z*g`;K2lCNod##=i_|HM`HX7^@s1peG0v_C*j_)bx97+z1Hu%7_th?Fi#b&nV_cor1 zb-25%uP6LAEbugSy4g_&U)OblslJV>=j*1Odx6i~iOhPkKCCmk9Bt^e!cNfV4Q@T@ zW^zY;#|L+}^Ynt=hV^{i9kinoJupezSp$!4G<_cobu6M6_`0LFo{SIfj5S6Ze62x4 z3%-`?G;K8Kbwj~k!0S#e^#tDk%=G$5ht>=ozR{k1T=$_Z>pc8pL(cqI^LQGo%GFEV z`A={*&N|PE8k`~pMOx?4bb`m$NjI%KBWyZuzir)3YYSnWXABR?XakOn93ozGGpePv zd8g^Onjr(2tQlynLT@VYbn zqu*_sJ9At;Szec_H}K4z*l09^hio*VCelq6^#a}yRZrjzk&R~Xx>41rx4fZ)4toNx zo80XUye`>j2Cp+$G5WFFpnU^Km#R1Hx@7+mcB43_NP-_ZOl$xD%qq5*emDPaOiLIy zXf~@8{8|5i#EtN`fDWokeIe!4B!3Mbi^_9gQAshPUR?7M*&I`lz8((O$5FyT=^#gkAS|-_ZtM zv!fe4e{!Sh@w(}lUcl>~;MEg$u%R;~A8p`)6{i~`c5rt_vnTL6-Q@oZ@cuV~^^thC zrmC7h0~8$L$8OpLlvv_bsnGgi?dFVq`)+_@2DSp!{a~sK{FWNG-V?;TVbp(mF~W+0 z|CHt0^#6Uy+A>+so&Sn!J;GW0(SOCYwoKM@=fC1wk8sw0^j~qUEtB=!`LDRvBb>D# ztxsHA%{aJL0KdW~YJFl(m^rCB>EMBFz*R|8MNVm6gJCvJ1Mm5)yu~1d+l|*zWwGVo9#a4_3GBM*RG18m#Tmh zLDyFo*R!{7^)R&k><=RM%-fRu^F53D0$U|^n6TAe6Bb_We({i{#6{aaaR13uZa(g@@&PqW`%dC-nKUh%qZPj+6VHf8&*zrP1R|940<03k0oBqt9>vsFfbB8cIwa> zC{3U=O~DqDAyET`;t*vqL1qhOv0x|xLjyAq1R`MAXa)s92@``GDcV37K>8a;5wn>d zI9zYfCG&koP^CZzY=cQ3$mg{_ymhnnQ^BS-^ilAG*80f*w`fBJVB(2IP60_t2?+^F zB0rBG+3?;(m^s;8V1Ihjm7C{}95}!skig(}kyQjU9Ew2~L{}80rtt!u$g8UA>hH!2 E0M0}%^Z)<= literal 0 HcmV?d00001 diff --git a/docs/_static/images/logod-05.png b/docs/_static/images/logod-05.png new file mode 100644 index 0000000000000000000000000000000000000000..098b8a17738ea62e68c958a382ff290983da08d6 GIT binary patch literal 7297 zcmbVxbwHEf7dN1UBc+ubARs+OI%G(x#ORhWQYA)7Be2m7kd~OVv`9CO7$sAb5*S_5 zAsz3&zxV(5kN1yz?!C{AbIyIv^W1aJCt6QOgBoxjKtx1Dt)=-)pNNPUOKAI3kP+zM zTt+3Ky5p{C=1D{(#`|9(_AOHOAtGXbto2OA&^LQKN8W=?t0AJ+wf_@&yfo*O6g60w zdwuUCrsC;u0Kz)7pGmTgBvja`zlhhJRGYO*GBB z(6Ygb!!^I<0o%suJ75YRqU}IAkQqS-?yuHkzXL?jQOEESXub-ZkMK}Z5JjL-WW7mr zqw=}py1FuDd}M?Pg(eEGa#2V+T6QD)TmKERamNV*d)Chmyf8@*^4b=XdHJZ-fcp2A zN@jHw;HH;)$h#q@4Z;&Tf5>~261@U-kF=!0QEyPO^0@R8w-bU`Z?FMgn0R|})QevEz<+@X+~e>%2@#t-#zAFP)bv}!P))FJXG8%Yg&_hhiC z5Q6ZPz)%VNDaF*`%EoV=xCf(@W?HxpoOYhv3g$6TQLCi?L^Mu(%IcP|!x?1pw8b}* zm(_k1lvQvnRl*N%8=Q#?To|2i6+GU*NsgOHI;_0|ocwglzzUfARvc8sDzHbOSw!-fMtJB98*2>bjqAY%PmiWQh%J1CDOPZ&$xg+ z(ktj9LHrHoYL+D?y$JE@EechT~#O8^j zSe63sQP-)Lk0s6bR_5HHmbV2G^tNG(>(S#AZd(TKhKv_CW(5b@_p6L1NExVki=sL0?6}2Tl($K{bcnudT~#Pp8~UYY z1GnBp-W6T10?n-wTls5J=E=wrsfduy2Q6cB793v@=`miUhY&BNstLy0x-G-XBX-9Y zMZF|ij^CbJ;4cJ#J!%_9ecTBpD&0|x)KR#7162G|&ey4)fiw`ti-1-@c7XU{rQorj zq5R`Yx9ETOy16a9nc(Wg{A(nGI}ZjGKY_u{iIh{XlcCH`vi zURwG4XY`b6@-(Q`NdKPWK*HnOO9#4|EP0R4OsFxRd9iD|`E(W1wo1~XO@@@F^VGo8{N7ZzQ*_JSrO6Zw+?@0Is<+GwNB12LC3 zW{kGEgX-V#MtIL2YzS>&u6#@ZSzK>&_v?4z`n$?^c`jj%49IQr&O->rJ}Cx*q}slc zkz9DQ+BVE}UvLO-FMHX!N7dJwBlE5OCdtO~IplKdi|Iq`DjDty++6i47(ut2Qq!!0 zhX>3b7D3JcNy9%q53fQcPR*O8m)g36f_gl^i0Z5FMWyfcB@+|h*_Rj$Lb{CfinFss zy-xw>AuF=$D+PzJh3(e_z01jcGgV~2 zLl5W;QcTIQl1Ok!Yv%7Yt-=f3i7f~G;ez1cZo#R0S4RqYM&P;VrtQF|K7 z-8E?To!!Rh&bsj)vA?2TFhFW*xh<09{_qCmeNcC5oL;Nu->YyH?er(>eiDDBgKX5V zC=X`$*Ln^TLTjY$v9z6r2=zGM+ST+&8YtlSCU~%>9b2Qf`c4BuzA0JvW0)HD=vQiU zk*gH;xfGt;n9)aHvmJlt(#VYj9$eOrQ}XTwDWUfiJPtjqo}U*~^;+Bu)qE842Q%M$ zx49tofnj5WppFOgfM$5(^mg?UZS2B*;5gUEwb5$D zZY}X5&2MsNkOm&ak93-ezJ)o3dA-*W3Z>fTF6P*H#_&~UUps*vu*voNZ*fn&{QgajiS53jWtjS7; z8g^Ldm|rM+g$&P2=d!}&zD28K?GcEOVgut2;Vah9_?n~hiK1Yj3C~+M`Fn$+6WRAD zwa#sU$q_ea6AA)REJsF3bnFHks!83@Tvin~`3RQ%8gS7a`8Z=;1o|8Plt!ODm4`Ox z36=ZO0*5aZL*+e>feA8xd^#C&pDb^i|HZ{W(r9N(+o`qL*C=BJ)*bDyd(_n!`GY%C zb6m5Ed%qi`V|I_2LbWf7CLX^NtL964DaRE2D_A!RxtMcVR6`d?^YMWN$M-F)Y(}dfZ!y`?aPjsATpU_T{=>sZWf6{15|FoAbc}l+1 zIPC4B4eU+%)H0F+ivuj#2J#R7+Z^k+c+gpkQOr7@{g?|MOusIPl@Mw-(yarXl^%st zw}>?psR$b7z|hMMZs~0TLXU#l`Zm*c?G>W7-Y;ds6q7Af$b4>(%QpCVRn2XE>bWc4 znfnEQ4&aU3sdwzuulG);8aPW^(X}Bmx^-lY{{;*e?DmQXidYib)tJg!w@fXIN7X7;%1%saN>P ze-zYE)6Wt5{ClDY>T$OXm*n+>N!2nZIeB!AZS)Qu_5~T-{F{cCYxOVYb!AlTJInTTuIdwZ-Jj zx0N_mc1Pq8Cr!t`T>Rv@j;NDUwm*93b;g$5kGyj}*K~QOw_)=TM%va(!@TJZo7)?b z3v_Z;m~K!|nxqY_7*dX++WJjj?Ga}!8z;r6DzfCu*zJD9jfW!G_h)efKxmt{U@&0g zUDnu}p1*(iw{7KZ!w=Y=TPqyHD2n#r}*vfcN68A?aaoAQ|ba#AI}M-07;NET|#s(*9k>Y}Jw+derc#hc?j zm688^o{pJ~Z8d$jf$EA=qhVW9#v4=r+he2V%=6yBQSw9RnljpjHVvrVS3@Q=R9A!JDWd^7(bSYNb%m}10H6-jy z`OR6_f1|fzDk0eHp_r^9TCMlm!ZELLxSF;~gVKx{CFO>7q;+$n9YC&UF&jwzby|cc ze}$8;B1;9TX@6BolCg%#{1r}($68Am$VeHLuo_HPWtN36d%hh6E|kd`eoT5!6PGql@U_RvM{CZ9a#pl0Rwpur=8L?j=Jt~8g`vs6 zr#g%)N3))i)M4fCphB!#{yxyKnLIJn_>3M^np$6Dm#auiDBj;U@NxJTIXA4cFkO&h zbx_uP$I+Vq7W{Rq63*pt@ZX{T98=$+T~ao5Y&x55N0d^WYhAYCeZ~lXvHM%pmD#su z)mWL84Bz5Z+x5Q?E(Wg9C-GWb4P+U8B5LIKl52dP40bZDmB6~W+yI4G20#4sKFY7EMdsMq;=Oft(xcm@38|YtC}0kI(uZQSMb6pD z|J|TtaMqu7F{90t0@R%MTli9Sq9$JRcoV&1r};(ib9?7QAhF~&aN^+zXmRqq9Tmi^ zakX&q^8W0_pxlD3hw77xJy>o5DpXW%96fW!!B19R z5vH4k_FZlRKP-3iSg5ZI4bM6RtiJPU@RR zLMYl!hwZOqWyF&Gq%Ok&G(x64*%s%@W)yTnZz}7}KfV|Q$4OVaNtMVnAPCVvd4dB` zzQ&q++Ubn0F|@*O*Jc=R6ax#tJl<2(DL>t2^qTl}cI&6=qd5g7_0AG0lW)D*ilyjT z-;p1$%lBk04pm=yBoMei#=#an0^+?p82&S`wl{9QKk3cA?!WYP0hJueho|Q5jGebV z^)GszC6J@Uue4_TF;{C&AyI58V9#rfwoKi3(vimXHAU(8Xfy@W<*w=tp7+r>Yf!cE zm}jICgGPv-vfY8;GA;<|YuWP)^fZ<1CUJgNwmR4lqZ9{XA8x_Q#j_c1wYUzmN*3 zjr&1hNt1}cFI3eIt?wR2Nl@r2;!Dy2ueue&p88-eG=NFFmdVY1a;p80z-jimws6+N zsY;Y5BBOUYp8>qmeF!cVyr1ZgH z0o`Q>4TK$u`|HyMXDQ5^*Vc8Q2<}MVV z-eT-Cz8>bzRff82i%*EQ8K48szQ2jf@Tw1gGlsqa9r%w3k~rUI2~9UsIthsZZ!OC( z_To+Z(E?{~bltmjh7;-NMPH)q@dY+(ri^J}3jG32@nb@+F;tcj!-N^u=5ZOB`=W~H zO!@jb;U8;b5)usZ8*DBok4JW0zas~t(guG$=7PmpP8DL>mBT}?*a-4mlJ3`4$`oPO z-`<|D2^_ul`&E=g}Hf)rHvgy%qvp9YXu2EF3^W>Ek&6){k%{R24`!rf(Ch=Og62(vALZj!uwFTkh4%? zcqj@0_;DRGQP zC^s6MYiOH;1l^y9cwV%tv{d1K?(urc4dV>ojIBfY5e`EL0yE9DZPzAgbhs zL=LK^O5_2KZ&PMDFS^JcnDdJYb~!V2^hQKVY}4ZU=6mk;O&)znBaw}N$u2+)m7ng& zH#2E@*+hifD##Q1^7a@Xf$R!bVUH%K>t`4W6Gz;|go#6c5dMXspJUExO7;NdrUj2u zlMp83rCD5EI&U8#gF{sQ%mJB}+df6kG9F)4e8giT4xF=y&!Q}?s=E|30XZYn68vP< zhZ)qkt58SjJN*+E?iGd)CV+7afFqJ*q&>QAW=6Ss`ZN#WZv;`A zeE}6w$xC*2o2eOf&wHo>P;!mEjA862+=kBcO8ruBN~j%bKXp0|V_K{km^DofGKDi% zllVbP!_E;wDpMCcV8)^6vcI0ci>>5p$H9Rb(9J-Dow1fG1HOH$VkTi>W8N;M{@C}% zp4z-~SmhO;cxC|1iYbTx_@d#BX6W!=83vCZi-OiBJPqHkF4{rs74$RJzsRJ}Y57hs zB&&lOgS|LJNbqKHs}x0RH%NiZ-T(vtiZs!cml}*eJ>zaOl`P#3h};A-JH!)m;nRP7 zm-rT}Uzv8ZNy$15UnHOas;KC2dFfWhS2xtMk%hPF}CM z`}0BHzu?@hRdXc}SEaH9XQL8@Pr-ynO?c<;NJ$-wE-W)Q$IU^XJ*=qsN(++YK~*Y@ zecGBN(S<@FcfBKYC`kh5c78HWI3&&qsS_6AVM}jE%BIHAVh~PU(%nB^*Y*Q1{5pL( z(x3FL42=4h9i8itl+8(fW>u!Nuw&7!^~#iA#7hZLMx z+r!fw8vnMgq}G-zjkUngsKR3y9nplVp+|`8>Vf`xEP9`jV0|iB=z2GSNIk2rLsr;J z2UNbnwa)Wf!dOQTV#Y0*`?uV;jvlj|fN93TG{@K~<{3e&Z&mv{FL}3qlO<)^Pq%n1 z_@~|3+2_m`-`yAJ{YgSKPeQ9;L{80Muj!62X^)lF>$3|3!S85ezkWLvkx}|bqO0E! zm-VnTiZND?ZKbv8t=X?Vr@?m`Z9Z}OY&?^uz`yhj)n-*WIe&qPtx#vSIo(4oYFbL?imq zH8C{JEg1m|eueUWE8k*h!{aMbS%~o~BMZtwqrty=T)Om; zQeq881#VRr$PTi%V-FI z7?|QG!Fea(Yxn;f20ja4)FfbSPXBL+?f-X?Z#OxCN2P7=f)gvS=jK*~|N1kRSj}_- Qrbwivrt_={Y!mi>054Y?g8%>k literal 0 HcmV?d00001 diff --git a/docs/_static/images/logod-07.png b/docs/_static/images/logod-07.png new file mode 100644 index 0000000000000000000000000000000000000000..81c753be8214e548254e0606b979de28ea13cb3c GIT binary patch literal 158465 zcmaex2RzjM|KGFAjv_mXM2_rD8AXW7sBn}O4SRD^MpnbhZb(H@5}9XLHd&cbwww{l z`hU*d@jTDh>-WFc>v7lj-rx7K2CYO6wwmE zaBYfiYEN%~=`Hyjt()~BztD8?XE~d@t+6H?&9@HT`mtw4TzsHJERcRi_i*y4Wq!{g zu*$z*x|83mV+;yE7MJ$K?;?C9v)IV?Zz%Au48ip7CKs_iL8|FflKGf%4L<+L)$m+Kr%X~@WY=ZgTJ1GGaEmu ziO6>%>Rvyqe>YIybUP916r8z`v83g?0EmVFk|O{Y_3dNFSJU%&!Z(gt93K zw^=g5?c#2}+Uif#@NWW$io*^Bx9kfqJGkWqDsWXF?x+AR^JS7?$mp&e44K!8!GHW3 zbuv-9tf4z_m2`RtS#q92n#d>hwh)Mypt}$Bfi7dpX$e;_w{}>~^2)wV3w>C=p9pq* z2Qc0rtp2jbR*vKZpd6|#Hn-N}@!nqxT#)8E@yb44xM5s5I=@U!0ANUhb9V3b@gd~@ zhvVfvU^@R`Y$uARn3?^@O|5PceLaU^2a1L6RGTjrelW4QNenqT=p_xJ82~bQ9)yLx zKP9;K{V6GO*)?A~UxNf(jW-XSjSEzQoKvfb;Tn_mFR6uqi2e6RCMnW*KNYwEClSdc)YuQai=xUd1^V(YD`MXQe zTLLCKw6!>XHSkw>h{69)?t|atD8eP4!*Cg)++wQDy|~2-h{4x*8|fA%(ahb$NN>}g zEO8m4yyG8jt-TGFcwrbxxy2X5y&XWpz6>L`gQVpScFoViv;JUBij+jIo09+iNp3ut@*!f zP?T>6N|;;zCAx69=x$2QvJfcaIReKpC_IE5=(2VpdY>ri4loT><^&Yzu`LmAv5S^w z^!IF)t4t)q?AnFd`jYq|FtjXbJ3p0g*CIp!C@`dq_}OO%vn) z>alk=&gjn>OauWDCS(U{SwgqgQP9}|N9=N)sA-RbRv}SrGEqB7^}%voV%F|rVtDk} zK{=mwLUnqTm<0wE;4Z-syN#0P)IBd;Z|E<2QhhHIS+U!Jm2|q~#q5O6r0>C{Y&{=2rb*DZKMkKVN%>#BC>wD`82 zGUNn2BWN64$JMmB7sC#26Rx&;hmXDn4*;QA;V}?JHiYCTDlRRO#Fe_Z*G^Q}?Ah|p z5N$XWjXc{_B*JxEe1Dl*VAY-&KX>AzUuE@CGB8tO=GxW&tL7;JBaHM z#60V`gW2v1Uar&88h;wWI#_N@Ac-LuE;)tohu58~xGA;@X|&1?RLKt^lZNO1{^~&g z4^$l6u7KL1|C3?xw;aJKphVHruKj{6Csp)z39gyHw5_zF{68dwGQ}y0$<1#Eat?9aEfZy|r6bzJrENPTN?9d)g7?M{a^`it8i(vb z!quBGa4DA94I{aQ>(wq@v2vY?Cjes3!);^X`0AZqa!%(6AcH#$mgGPzR^yZxCNB37 zzFaae)rv?CVq00TJAha9g749Pl@gM)y%V27fUo`1cf{?axPOYm$os0uRYv z$8)-EkR3{RcN_>Va!f4~`S;xljzspMy zAfhEnq0HQmQN-I6cBo-GL&{V*5{M8UJIrPM_V@=Z$Hr_MJQf9C?h*jf-YPgTPq!sX zL=DPr|LqXN1y>geG`)ivNE4IfN5r9#M|ge*v1K6QiAT<|6pUa9O2u$IW6lgQ)~p%= zS;*bR176mk9|6+rb#vdzNfDe}R9&$89c}5VheMw7h!JJ*=r&nfi+~Zyaxm#b!oSZx zFmQN7X8wVbwzK0vHNZ^1JDiqC3<_Oakj9?sB5^cu-Mh!RjMJHVam@7AOul9sFP#_iI5G1XL!1Wo92r zH~#Q8j=Lq!JrXgzgJATBkTuJJw+Nx#o-+--}AK#YiHVXI@K9xQqkhKDgS!%gep`V>*hhT*X z0Wx~Jsz`PRBp3Wy03o1`hXi#pt8; zzkv%rJbam2;afgGbXlt4qUn#iMbaieg%1>R-MTBw4E~xHx_m>+wI{H6M-C~wh0J81 z@h=vqX{Lm0oe^kVpqnKS?f)vwr^OE(g%~W43+}qk%{`o*I&H7UktY`au-t!XlPVPf zgS&6ZW&%Rt@t<|qEQ`m3wyZ2V9ZG+@r^v*`S-* zU~qRtF{&YEtWJKEBnaN5qASJ823UF};fG}$#%ixTYg)<2p*dR}$&PzwZ^>1lP|LiXq) z!Xsd%3rCxg&@dTeyllhH;G4TQ7F%oA$mJE7-laHj;*{=x22 zP{BE9JbxoHH$vJ<=PPsJMuO>vt$Ca~aEnJ{O?dMwIVrgB+SpIc1*^JNOf)WtrhLOW^+8eW@9_bhhTKxe=` z+`~fYE4N&pz3%hpB6Iv#7d~5PTFCQ8PV%w@Y|?;Y-b~ST{Ji}iIWY4(2pH@qjD7V7 zTCy~F{!C=9GGux1#Zz9A%{m5yN@osZFONIgNivoKw|y{k4$$>&_%eZcb~<#q?nNkO zQ#hjd%uz(6vH=eyW@sR|30__|oiEWLWv6dZ(QMw6%?t7%$ZE6wdm%IUQYzqxRGPCl zuE2_U352A#YIQ7u>NL}QcfBvZANxymj@XX~Gx)SXB?^ZCSK~_vooy-AUHzW+aIREj zE-))=afG1#KHwG7z|o(Rv%+U;=oC>LJJGDEY6Pcfaew$ zP{4T2dK(&A?mzwcKK=E?;*ru|V0G1*o3|H$@}wV>C;NAyis2HM`|9(l2nhX`y`kZX zwbc>O5~eiC)5%fyFt+r1-}rT66UWW{zTKC0l{zGnp?0X`i_BOS5^g!FeOVcqGc7HpiuC-*v$7d5kQ=;>Q z`76$)Pt+!*SB@<0_EX%ny7&*#mY#=}-z~0-VSM1iPyoA)3nNQeS&&5ad@wXr5QptCxS|SIx+60tvOOjZY;|;D{NI%DDxo^f%Rrg zljn!FD%VX3;r?J-+e8D7bmj(cLw}7b?)5BgbWw30 z9T5lN9WxBVL8$P1b4AbPF4IjLvXY~l4eyZ<17fYkS{#&6@cptAdYl^n2`8r*Ejcbzj%B$ zo0#V8ktT})?^G6a9DG7^lM;lxm}TmR=UM(-S9AC%>fUANpGx2^e7i3tN(_SpvYuH4 zaYIa;DQ_}I0h4kc{~c--?rPT$tdo?i92LquAug^CY8yVwT1QVb$m7%}@JnC{;1ReN zw3j6-!eEdwEh|d@b=B^kgg_i~*IBjdtUsMJDigJ3qI}y!b|US~pseXn z216oO*ZVh$Be5rECV9=-2s82;`2G?o@ysgh)Uu8?&^T~B62MOK;W35B!$QBPH}Hd^ zx=mw#ZcRak^4Q(Pj)!K2A8|!>;D}~nd@HnG!2vve(W`=~BsH70*q2}$uoy{asRhWPpeJqt(7Kcls3)sy#F8sJUtWG&=5NN64T;Jb6D z!EmXV3d~?$i#WB=6~23Skf1^ML`T#FG>N=b<~){mq;hj_M4Xxg2>RO(7uo~7{$|ZV zS9~fbP{dP-povfL@cfVuXu^I7ISB%^4sBeAb=nD`%Xm>>3U&bJ#aTb2zpR+#>U#-t z&N&}~*zSAER|Xz6xceI_rANP%*;rRxO;<=cnrZAE_1J8I4g z`w0zfja4KfegtW|W&;gSV$S(oTX#yaZw&|x=@KJ|g>XCe957}ub1eY%yLM6Y+dHaw z?ogNd3y3310w^TvhBN%O5aYXLnAc>ev4nvp{8yMAMDdh=OB9VdwD}j}2}2fCKP+^h zTX4?m3koXl*QeDwCwP~NiRR-uX-r(5@Qz^xxnu@R9P;d~(ME4Y3EgRUEqgxxrfv8d zaJ0c|3Cx`cm(anU_5?+50ijOg)LRCId$!gdIe9O~%3ELHzZB(y^=}wkT=D@em$XnO zpEzl;9MM`>8Ht~^5iocIx5s1U?88d0(Zg8zVi%;XVB?f&K$$5I95Q^*bBXcBk>>CCJ?H?L5pqAjT!Y3h2~M z^bjSd0H5%Po^-v>eHWB}Am_>nWiqu9VO@qJIEic#mPbFvjnX+NFB3mD zx{Fe?KD>P&P?_y?teM8`p3i0ad9ILG(;a`!^L6)e!{)E&9v&026>ly; zk<*GdEk`D0zP~wJEA`>+vr$>i;B&67dq48>nAXiSwG#PtXk@o19*rcDPz+8~8W)9y zfSqlkD7uRCJ6MU|6W%>^hLAaKWk1zDEln)!uM11E4^9$Tz3?OAA#tOFcvta0fHkux zWI1k`+M%!NK`x^74gR|{^?o@MA{gNvV-Fgsyu$F1%HqS>!a~HWZY6K~{|os6^y?RV zqe32UoioBrdf4|7Nhg4#uK{<=`sNL|EaG|SJfTX<2Zc*7&vMG*Mx zojDnUJ@s{k1&K6FdR<X)-Vp3DK3ZM`$=dTPFHa}NUvQ>A*?t}2tLlMN;KXY-q{|2g}*^{ zFp#PCs|%Y0bH)pL1%a*%dkAXV=Qa84ztm>}lVkZ95prbJ>H7Z1OrWNEsVzp>*|N=$ zZY0!KMAD(0-GKJ{lOe1iGPysyS(|?L3v~z;9;=EryhPFl(UavR2_q%WKKWcC(Uiyo zy9rVBD7;e=RtGi84{tS^`6tD1!<$c$aAi$MA+-<(@C4ZIP+M8xN(!8dgJP^NhJ-6$ zS4@b3p2#x1AKV9WKXOhM`GrZFs)Xl9`4OK!JaUyVIQdOLJTZM5RmyWu6moAjvmfUY zo&@xmOXF{8wuGmuhgIe%t2jo-dn`Yz_8j+rcCYsJ*~UG@^G#_#;Tc8~M29cnsT@({ z7a>5?!LjmVjah`~{2qSf3JZoO#@$QBhw0~lkZw0$%+OV&E^G{Qa(IiUDyjl<*I5)CVP#)0iVzlX1_<}jZM1P zd5OS3Eu4Qn!S!R2A&QwK)R`2^8{$Z<6RQEKtxLBr3RN&Q;?$}_xEFrN6$ARatRHf` zL7w`NN4c@+vtboXy25mLco+B?N)QP$KZb_{79SSqp!ZPQi~g7=qcG@g7HzbTU}A+^ zg*F}u`0J&+N^S_o`XKJy5a!0lFDMRuC7DGt5&Olyt?p*)7DZ4}3H?c7NnfVUd2s}C zw$HPD;TL(ygaHpRMhEl4F+rfC&oiF-TqbfTeZhUMSIbYnVr_m~jlTnZ9Tq{+TsTwn zX$)&^_Cwiu>Zhnx<;6U`i%36ASf%*@OxDd{CoSiF53|2)t_y~m!=$*sNLP#pzeL() zm^RZifoa;EU$r{CPEvf%EV{jEKw0=ob|^$}Urm_(tLTN4VOl4a>#AB2_D)Hq`OV5@ z8B|1qRN(}X;t0Y;wsn%Mvp^4mBqz!)X;y_g6O`EZTZ;EO?@)y-g>m9q$TCVcz~<8P zNl`q>guk1)=fZEZW(%|Ee)`>6re@JLZ7qYMyh<1P`eo+C2_d|2db-c_^eal;BpdYa zPw%>yzA7ITz=jVO6?F&B{R?3Z*jLMWKhy55N;^R*HFelhYGpL}&aYzN3KHf7NV#7T zOvI{4O_)8pyCfp=PI>0c`JpLLD>h8UviJASk20hez;rq zL*27&=??xjbSZ3q8%+mtX}Yg2f&y)HpX3~iVcA&_PM*z#5jgRMA7|tQsS3nqgj=v`|as0P~l||D=`MR5i0?6iI!V;y4;PHdRcnor@Dzc zs#M-lepGH#NQ7L3=7y67axY472&pm&(h)ObYVzk~F#N}*zQ3ger@?H%sniBaHz);4 zH@(mD(?o5RkM~XTSEJt-&7%E25H#oyuM4IfkF1kWgQ)cLK4MKJ?uWW;Xh>kla%E^= zA>hx(ELuQu#h1{dz#l?H?~9M5umKyeA8*+-3?XB8jXgQy5X17*Ld{24e~(o5?q9`C zAfvp-hX!;ljA~3I(AOPs1is`RiH^paojCa6>jUOl-*FH;Lo z;q`bM7P72q7G3M9-~WDfiEyvOeb@wP3YR*K_kwtNNhS6C+Xo<}@eXb94izDT(oHq1 z%I0wOuB=`6C>>tzQxGh&cZmV(5S?W~2Z5>|1jn;J$$KP8_T;<`UJ&Qb@t{pF&f$s8HNecM^?y2i)bxYkR6{Q%FZX5bjO@DC|oI?ys)6afJl7w&4?#W-o{GG}tf zZJlXGp4=@x7m&zYgf}Uu3;sxg$04vHC|gs`SDOb3o`WzHz&=tc1lxaq&S4Dn%paaK zT?=Q;Xj;3pKu7`}M`6Z=-EFY?Gchk%0NAF3iCOe8^Lg2nNQ#-vnMy&5gxYZo@hreu z-`<{pdB^)oWsZL@5r{HNhS`h*IDsV$PM{3N0H@x|Xfqz7!SGy;j(4asNV#${B>ve6 zEPOiOrz*;yZxNnDYcS^a^jQIXF=JXcwSp=<2OyrS;W1qw3YB~w)H&eh- z93r6D2U1AVt)t4Iv02k#e(tHpH#awb(t_~#8@dB-az{bScL#)27ZtN;>3MxZM}-#g z9-boj2vPYMYY-`q6QeE!*&c!$KF)VSh+z|gUc;+*cASZCA4N}Vheb#fE{z=6I`;&< zhwr*&5#&uE4Xq1LcajN_6qKOs7v7prKyvWsCpr#Ro7Z$OrEEV%p^^a)V{8&bNbe3N zE^yd`u%gN+9E$qi;u+h1mYgU-5Ab}}KgE*_7mpCA@NnuNyqfLcx+eJ#68L;w`#(si z;Yi5-jidx#&{F>gi4YtKdfXOk23+wUa)Cb!h!F?=yc!-QAadlu<$unzrs;>QZWh zcjJif#D`y!+%Z;5>v26Gq?iv#_^r*GofF+`V+w}@Mz4;fzVw{(Ojx6(k@lZ>^r%dc zMSQ7A_t!6LZ*Um#BA7$tf1zI)}u6|9bSjtW05U zsjPk@;^TqR`4h+Z?c#6L(^iWzfMNlqDu{hI5K_qDV2l0!VXC3e-7jflM(UR&lKp;s zDmS?$lvcvE>s1-W%2zPmB#E$J84;Ikm|QdU{V?*<#+u99Y$T(m-)esUBk!iFXC?Jl zxt-_6%BUa|Zg>N-CQ-KW_<9EOE#2ay7>ldjdP#=mVd3k3Cu^PtigB@yT%@0)tpsfW zy3~$Fn?vfCX%82-X|NZ%rDUbfMb9(w7x$$1r>lc4?R|YEqJmJ)laDB``swUWuDc)K z^PiJuk-3o&mE1YP=VIi%k)(JHg5(a=anTrb-jB246I5*R(HJS0wCi=g^epP(pTRuQ zLr_kdGZ<{dZX1gN{zoFv=@LOdI=CkfxV+wftkb!{dBM?7kfsvwQTR+u#<44;xWL`C z!Hg}-nBHi0k8(%B{-8(C4%e^dxItqv8V42G92erL3%bs3r2gshobFcbbk=_)hvsr- zT<|X27lD8VB_$S*B&tsM2E;|Y=v=sApZM#52>boX#G2CQU6K$3NQm{4YzmWLFP$hm z1^n|DXzvtO8OM7;C9U*fbd~WE!IrE^z^IC2U6kdMs7nRTh1!V@0C`vIr<$7QgY{B0Mrfk?~7yllbQr2f(x=TR<)+U#pTZCR# zWrd`U(pvZ5AZwY7;ucnFvJJ9SdLCsyv7!BKrPN`{cVpGVeMy{|6jBM3OewRYFz?k~ z(00Dz?52FrO8N4A*M-8n6_b>x*SYvmw_ zmt*6w3SE-@*;Pf)CNP$o%omMRZS-peO&9ZJyn5-Mbvnj(V`D+1{hJ=do@NiajL6ci#2<$i*_tq4o60UV zky|LNkU;X5a%>zRHRR$_Lx-&#+t-zr;5vnwW1O@JE8(L6`{t_%zbT9gZm2d33hHVR z_jb039}z9~Vn`_mB}=n`i4=RUUzZ18=8Eaat_o|BgKWg1~v0#PJELu%NH0-tV_FsZZv;%+IRQ}8rzCOu%`rD`r`0bW#lpNbR)v)Xi zt`rwgW&FY# zf$?sU0Iieq>0rb!VQmJAv_J#qH~lQxOP@^zFj3!+&Tw_nvjia^<6DE&^0r8Fp3LF8 zUS}j=I)_|veXij`%99FXS&Cq$15Z!S*(ebY&=K@9(@bdC2uceyg&@*J@hNGS^^jp)}`Kzbk8( z8Yyk!>6W`ZM$!x4DC>3p9$l10pj<)R!yW#_RUbJ|js}~LoZo0r&UmW2H%_TBZ0?IE zejx9`iO;$0_pD}1#f&YNwdkiwmeyK1j>$?>c{sVVa5+!{dtmUjpj6y<`(GXW*DC(9R3`G!;~?YXXIPR7ch6V$w!8hRu}YueF04%@`6{II~fZ1-l2jezhLLbfOX}g zI!pT*I|59}LnF3eRTL!3lLeXHHc(UE7><-J`j`mur+ef;D_i z2U-@p=c&BrTjervG4Yj#qe?C7^o4Fc(-qw%EIwo@i*v zC?Vr|7YUEl#Xdi~&kxQoo!9`Q@KmrV)U&0vdA!!R?px&|+<%`5{h)A<+Ci7|_DhPs zzUR{9sq9XakV;%S0;@T`-i3AltHD4qc4uMU&lbsHj0v`8IA+SN!`7)uK}G^;+pC#J zR%KYcexji8dJd#LQG4xBf zMBBMpCQ07kAEeIFm}%9YiRqkOIJLW?3rPua9sEmjQGkL}g*&;h<2J4>l#p89xA0`JD4pL6Wtli* z`tl`&hlOeD3f^@=&{TqOBJCvwDHlel@UhrA&-lnUG-=Sc@i>=yN{d&D&XwedwBdKc zi*0M4Exc$syNivdKAe7H%v{g~;pqVSJ52$x zwiFiE&(c`CbxBjAe|-X_)HrJ~?XT;Kg;GSF^c}aVnSQn@R)w=>K>?9BSqrSNeIF)66|! z@tbz`0Uxc-v5fJ*t6yZvQE}%_gz*g}J|xq_{TzTt4cH7lnQgLUk$RB~zn6j{?Kj3f zftM!qYD_z}+A+5JAn*F!kZg(pgnU+H`RXGp$A?yqu~xYO$&kZ9RRXE0k*U3P_Z00* z|GQ<7%E}cG=Tb-8xeO$4{#IVZ1kicK{W5x`n!NGaY`*TD%&7y~5Q;M0JLa!R^2W1H zOtpEMe1G~{aM?TIM#WAN%o{Dzgr;O3d=$pDosQoof3+#Oa|Uz|1T^8hz%-_Qxd=d_ zO5*vJxR7r4x~SI5=g_tHaY5gFzq~8ZxuK?1RFp_NMggH?K$+wB=OC>eDRkl4+YQq@ z^h$E>D)gWTi+k^`q`NW-_7O~Is3DI-g3WFRrD#hGrgePkf$mf6PBrBfh)@o;ryr=k zeBii8>kQ6*^nf+lDE804;SMc8HI8~qtseaT`WVSEFS1vI=3<^J-#ybSdtX|yY3yUp|U;SP}#UA z=Srl?>)jITk5cSpbsrxF$-{YN9Q*9Zy3>_Va1uuf%miD-^M48XEnCU4B~4cd;7grm z>&hPZmP1WX-oz=r0|&0_hV>? zzh;%diD-o3hq%(Hb@{rXH(uAoN@{FD;bIuQ$Psn+#az=u?!G~>83Ue)Bh&81`38`~ z1vahKuscDTxLBlM0V={yt;kR^o_$e7W4!=X^rmMItLi^_R6C`)?A+)#Bv-15L zuPM7txFGa8qaOmjXiL#F`UL6cN18QIyTCT=&#vl9`T~JMv^Uk)h|8PbITCQF-LVE( zagE8o6Vy}~{?nR)m+u3!0VixA%y6q>wRF?8!o=ANgmJkcw&MxTbc%RrOT_S;)CCU6%mEbW>1$kr7IEWwf(Yyp=C13S+n5jTI(U4lrZ@(s4T(_>i794C<0~rQvU7n3#Vbb^Qhv+DvODLes8n zHaVLFb>h_wa^Rh?ZfmY^Mv4^zb-t}7jScH|74coaeZCU-lIaZuR1d=C%DANEfrV;c z;Ajtv50^ZrgD{FFQq=FJq}Tgb&5eShvc8oU_OrxUQExoWk1CE@KEj!o_|gCR&~MQG zLx%WJ>R=k%VD>^;ixf+fMQ&W?71Gx!95!Qz?(32xRro?0k7=xX*X_Fysu~XEB(k-z ztn@?EXZkL#UEs77g%)&Tt5=wPWt}!i9Q0jj*VF&5KeWm%Uu*xp6vG5zo`BNOl~dl0 zmO5%PgsK&$%IMAHTgna}sd>bbUZMbj#+1x-@od_m5kJnOQG&k|=l&?Kcwbese#8DY z{?xGV=ifBJLfHXe4s!-yY=7Xbvqt{nPDktsYA11M%9uDwI@L(i4Cb?O5~tU{mPkNT zDGM6;d?y|vE@bZy{CL`Xd~~rDF_z#$@0?_Ig2Lmdw;0a)H~rW; z9FSaGZbG=;C+CP6FaICj_QGU6P14Rp2s#>LC3ZU1bM5VZZ^l!XdW&jSRuaC)BYz_e zghxIwhCF(<@v>0hOF-!Evp#N* zs+nGdxTg`1;+uel-NFKrF=hb;B;$M0LJUScZ&Z?wF&Z_?pjIn(n`7Rl;Dgr@) z59jk$;v9Vj_WgQiNPj_6PH%Wc_~3|^O1-*Ivbs!oDQIsYQr^vno{S5UY^tI3KYb|_ z9#FExjWowc26A7E=}?9|yJ&h0&HajOkN9^T!BRjS!i&Z3mID_nub{o&SEBkgFQ;8z z83#GKap0=E!~#^P3&@gA(ADTqt!xwbR{f-^IsBd1@I=4iDrTrn%-TP9~^Qx%0`fDV7k8a+H# z&^;gV&>8H}0?GNCggV787u0EN)#x~QER$Ko z1At@L@EY{fmdNNkPem>2F<;~AEt_#ULISSiwcsrJ*`63RXEl&|bScWgd-pCS$O1e3wZfY__o`hdh1GG5X8J zwK1#U1YZ8W-yHL4aev%b`m$PGAB%F228jOjZaZiZ38bStm-VV@W_T+&R7wXHSZ!6zH!UkD(bWh6JRCK=NF6uVs zj!vCu>V^VcStCB9xi<~y$zEPN&bJq$G9I>kulfAPOs7l&!km|FI&StY0GXma{-ky~ zmq~x%8{O@XqV|B8u-_Zu0^t4T!=yHAyVHsDQ&!XL4Or;al0^E#fl(JWojVXzke<{g zGisv0AoLvx<#@?vyl&-?7&WLAFA)1_NqhdV)W? zCFC%Wk5nk;m|DSv#je(~pXi~?IOlJIlv7{IpUFHs7($Ca@68Zgw85A|hcTt{LlhW# zHu_0sn~JGHYtqT#HTwB&CX&-7rGyH z2U^t0zFxg)A?OrYnU3@+hET4&=xH9dgJof)ZcsD%Jo7779X(T~J>&@{K3g*-o$j0v5?(zB2 zCwEC37wz{}n9GgpYmMhDszT6T!AA6Sj5A6$qlv_P@Iutk%uwo_`<|Jf?!Th}MFo#G zaptfjW?^x42K?|iBrjIG7yI3dFLYYj#!C%all#e$e#va=^9$tLk z+jg2qrR2%WPsNd%e2nNK!KO|d*$mPKYshO@lr0!BuoB&xML`asMU%tb?v%N7MD zXkDQh24`J)N4j<1^o4=w)=U}75m_d`Y1QI5ec>)hepdnCdk z)FZ9$jsE22kfM{tJM@aJq>WGdAU70?q#NL@Dld{=EJU!PosiyVe_dm|{@g@T<&dSX zGqa9*)-~u?c5y~`6i+ZozL0gHkq|_tK9Dwmx0?BwdXJ zB~mDv$)&{F&3ep&F_z`uEZ- z$e>IfDvB{`geWFC<^+@;8lNWx37d0Z-*koUtAjVS0uSi4&p)gCg%OL1d%WnV2Jz6qNEiT9ABH1D1qCF~%s?6#9 z!)0%|r+Zn1jXX6sfcOrKF3A4Xo?>+d9gYSG3Mey;^jbi#K;4nNMM%0np@G90M} z1NKv(e}8ylz%A`LXawB9NoWuO(D2hN%1a2>McZG0QImPgO1_PA_!R&Ain z(yT5kI8Aou>4x|uph)ymi|~+&J_*EWu-i4|XHtKx_qn!uk}qH)CeYCvoFGnuajHI$ zT4MWk7bNM$K-ps6b&+zcY|r)mc?umeP^QuGd^cxZZyi1t2-Of-Vjs-yb4qZU3MA&` zPM2eHcb!iz)a8y}aVd0k^1ISMxag$YLlXU>OTX>l60P6GM!Vdpm(`tJrQ$9ffre3` z$k~*!E^lvCpHs@~@((&{=j)|=8{HbmzZ#dXZKfkMV$Ujx3Oy(S3Pr2mFvu7(z5NrK^kzkGe3ZyF>OZ663k|gUi_mmRRaN-DCzU z5Mb0E`A(X_n{4qTVsP|J|6hM+T}{TrkcwtQr|i$d*ZQ0)843|zF^X*@IdZ@UI-SK< zRS1EC;;{sk0?6i#cgv?`JJlLKG$@rBG`jp$puW3o{`HZ35BZujec+#rqywL2IbVf> zX+zNQP1mGRAuCII9<5{>La9O@GnBN}dfF6}EY0dI&t^r_7erD_AvF0_Nwv}tMUs_X z=%zfdNIBu-;#yl2e3FqOt-G4!ECP)l6!woUp@S%k|*>xWjY&TyGfR!mFk zypwun-N@|xn#xx&&oOoQbX+qF>l>$_AL?JLV%?XEymW!2eDTTKYf1}SciDNDN>g4S z%1wtKgzy&Zq{QwJ1e*Lv#6&h(8Px?+bP*#NY4ZX7rn4Qg*;&qz*JFt%s{)HJ!CuAq zm2+$^oSsTS+zJCSvdU7zpjzytZTmcSZnzhzVnOQu3y?9cq7!0X38kFygeV?=edY5>mNRy; zNRYgs3}oF=+5LVzV1JG0eTDgL4Z6<2XlvJQ5D7lZPFMYTy-~eir0F(B=S$J*+KUK9 zkxqI(U?VR0tTGm0_%uXZXBK+VJZ$^MF-xBl%5jj*m^SveS0RO{c>(Jk8i4f&Ka*8h>JVsyZm2%c?ul;%nA>5lh0LWZ)o9MU{ag9 z)_C&#MGR;3WKEG1XXs?;3fSmumc>WIVJBU0!G^mmT2DhLN2t+v6(clUgAZ{uPy&Z} zZydPs(7?;~CSq`%3=HXPfX^G}7|(EIL@Raak+Ehu`EHzQz)>k04$)M1_DYJ#;qN6i9~+OC7juWUnOhIyW@x3jV!rGE1AYj9E?-N>++ zD>%=a36CgH`+>)-$)VEuC%2!f2fCrv_-*35x`btYY!0CqR&4Ng!Hu^*bC}#EiZ;sJI*Uwe7n8iFP+mm1& z51}s>d=NB}m~72rg!b_qZ>JmTKLUs?)CT(DRMTlPAv>ik8U&peY&f|QT7rc9EIIT6XLg6Y1#XVH6)%q$N4NK(>-V%Wg5 zM5v>9Vl>q*7aJ5T>uHU@^+cdA3aa~aYk4G)a6p6ZKa<>s8D0jGxs+$LF8qu&kF%y3 zdtivIQ|D-HOG7|^RRL8;s}e>n-E0h|2;9I z?ej?$UWn%OSHr0hg-n4Bo`W35xs;9{tjzd|t{n#&I{#PsDJ^i8NtTG&MdmjZ$soZb zoBlBu-k2X1+W0juxzXpIU~Q;U${?FDi(H?HCJAT|+PE&hzSh3cn1}G2c4QtbJ}t;8 z)j;0~p?hvlqwlIwO^Q1=-pcEIF`Z|(lBz7-(A1gX?rL?kaId-1h6z?Q=geY00>$v< z@3Kkb_%*HZG&}2HEMoc9R6fV9`_F4d_;Or-UA?*L2px31f>tK&l+}t(TP?FJ+aIG``t=>TKv@~0Upd(sQNhQ z_r=rAF#YR;8LtjfxvOS;D`Ho#uyh!K8TTOnro*5jo(N{}Gh(yd+Ctp|L&G}C%r89c zRWGd8-jLr*e#vg6=lZRd>mx!EK?&1w$5nNt(DDYg$igCi2G5 z^|9*Q40Bq3N8GR>&v>lwk--m6DV(kHH~Y|LAHOYob}L*24g4*{a!aY$ ztxJ^)I@Z3JfBoHim0o;tE`QiRsk7?+Msk{c$Lp`wC7~sYinO1hQhgTsjXhv~!*Idk zeq)i?5|0v7j|Le!$G0Fz; zH=ZhlfuB`rWO8sx{krE#+TIJ3b=)1Xjtn;a=NyG?nQfUI=!E9q{@xgymwcSg(s}3O zsxD7&q3JG#RQurMCeyWcACDs&lG{kn<>RrDkzE1ilukTl3qy;u|++9*isJK$bj;I8zNsd5T-`` zVKKQqZ;Vu;6vNG!t&SR$M%urq23#BBOX#dKx5c6@gM5b@F_%6hH25y1ANczBl3kGF z<+I_24@)u!_L{Z0{}f-~mM6Vf>5PwUyLNZweDNW1U82vYPPhKQ;}i-+zHZ7c=MU(B7S- z{yKso^y1Zsc31C&`QH_8li+uUV!vB_%~f`5iP~!E;$dEqB51hY+2436EHkMgum>cSu^T|d>;BR*Ol^Gjazig@MGah=W#U}-)eI*0lFUJo`djno*OGvixH}`s%PKzvt~|SzzfBkw!$Mq-04cK}119P&!um&>|@vi13XYu=c{}iulcXpncd+xbs&Uw6Kds73}9p1vt^3B-Xq>vDN6MNpoP4J6H z+0V}h{0$v)Wi0p-#84#M&(HN0iNm&d;CdnVm@UoI?STYbGTT(Z&6Y{VZj7|4-?S-> zxV|i42rk#Niizvr)h@kSZY~}ed5Rm-WtJ41P%Aj6&?&p>%?BHNB*((xut+uazw)&B zm6NS%OYFC<7ZtPekdz}ATEu?n^*Z?}so*8TRZm9DXDp@kCTJVgs6D=0*N)z__nybg zHs%iTuREwl6*h^D)9m(h$L99k5~!9H11ldPTNcxlxZ_{)<0FctRazt>RngwDbNRJPXZYSkY=^|14r{dM&9IqZI6UF>zK<7H65 zQGl`ldB*u4W|IG$-p$6r<$le||KerUva@LKKf26pQ#-_HWy&KzY-ivR2SkrB>0&Sd zs^1kD=4#Jeh1U3XKE)y>tkH@TJ`Z0%8AIrta-t_=(8>fpXx?gO#dP2U0i{Xb#5)U(q-C6yKACaE5>lq!0O6%T$N_5BHx; zfg#Y{4N6=kNJa?q`p1%)_$k{p|ELo7t(g-RXDdcB=+x@OK* zCNS~MdRIi=(sd9jRHO1}U(UUs^=_>u4_{T-?A{ZVVIphMkry6`Nw?(|`@M(GmMt$K zks$E>X7EE|Dv2gqL-vlMqB+l3#8MT-PhUWr0;&##0I7dLa#1#)12Jk8qVpl#GoR+g zfOI2S7)!fXx46+KhKWz-x}}Ppy2$RU^>HXm2fPUP-NauQaYU2QIcUcQIFQt*Q}||! znsnzhjz>f-|M`SsCRGN{P}OC0ERu1rS*LYjb}v;qIb-hxPsUKGvq_h11t+3v0)|Dt z5_-U&b?O6&CsBsM-ffhr3 z7CXzr?Vw0QR_(UQPpW<%KiUoG4pJ_9g!Aw4=#-F$kG@n+b`@g3WhWN^LmnP)1Hkx5 z3Fyg$MwJYPdj=F|qv!oeKefn%U$7d3M+o6)&qx|h;g`J7V;S8E+#7#;$wRrJ!59y=7F-w3=J7}9ZZFx>`A#b)Gdu)zwO-3}W z+=?z@y$pB_+gb3qAOPhR;IPm8^32?V+`kVZe?r$voBx57WmSwhSZu;@|ER9hXV}}U zh5B7@!Gb@!a*wfhw>`5FO5|Di$Pzp+N+r(mXG!CGB%Mb!c9?Ttx=g3x2Zc`1fAUMO z`d92FSTo?bnyzR0L%&M3wImLy5=-j6!>3+|_O3tmF)3EVl1ClWVVr^K!G}x)aI^#M z^0dfg&bk+pl!f`K7|sJ7CE1FN_*R7mS;xgI!=u)0#pX zbKf=1s%!1ST|BiecEwydnNYQ_cBQA5j-kE`;hN3cvZr_)AECh=MSc-GmBzW9-y5$- zmnP}Q!6Jvo_)f1Zj4JwEXKaH#8UCht9V*=6T~h+7(lp?qvCMLEapB{6Ns-#r--fgI zgoxWJuHh*OO@`=dny}#R9k>I8=3+0n^XLviDkk_IynWIIaSx10(-m+X@so@??%E}t z+lC*&b-ZV}j;IU+)j6`xN`v`IK%1!s$vRf=id2VEL<{` zAr$wd`eJ0b4W!A${h45Uh|Xqd64)f%A$6pCs5!-&{VQF{^r6_M3v;x$(RoK@$Y92F zG4fdb;PP;{HSK~f^W6vYk^(`}-6O1g=wc8aY4_A=C56@%&sgzo*cs3~Nc!@I17xyO z@62G%;K8rQHNl5{7f4X67V)XE1ZZ*q%n74+2JO>Kg#Hl=r+!r((rGXq*~UC|;!3@qI34E)hcqv5h25Yh)yEoII}IZ%d`K7H!w+xW*FKYH z=ot%M3wQ2_4Zrof#R>&#AIktC=eY^GJW=3SKwAr7&7j(upO0>`V??^|aIx_PwjA1} zKP@T{_Ru*p=WI3nJY3a+LUvBuYGilUrVlLZ5f>P9Pz|RPkQlv-okN$jMM8yfn~$?* zb$O%`?@m)v-5luJQE6C>_^2{iwVGC;JRAyLO~qe$@4`!E12i1Zl|!oj3ZLoN049th zh-jl0WkEhd1L%uz?IBHEZT&~tb`>Gi_fOsHKh$vO1qt+;z%~aU&Z?gP5ykogB|%%w~5 zkl9?N3-{ZTXbzWCtLK~03fs=~=sbMaD|=0J@G23yb3}qGv}vDln?J)Zkn}egv2{%1 z!+d=OcX*pEC(>Y$2@pUF^bbnJABJ^Q>?fjbIp~@yNPR5mJq@^b>cI$uLmpF|Zfos6 zREwnBr4a7gaaJD?K+x-IadkXtn@7!cqd#7^42!Y%rPm-e6r%DxH^W(2^W7c@gdc%Q zJBqBp$)FYe?04ROD*T#7a&Rpu(;$?p)P$(p_?Zu8Tz*6C9ov1u%+=Ug&5KlCu1f)e z(C(Ii!{?Zl>&=E*o7*f3JE0opotd_qP5hhQCaF(-Vo2qFeyv3aULiac8}rxkJ$91EK?&Fx{10G+WqV(qvfrP*vKh|Mp8BI{Lv1!WcRxF{TD_}r zf-iYID@baB4JG#8k^Rco_fN1s@eni9ba^NZG8$eF6#_*oWDpN13bmujAg>MV`GZFRMACL6pS~#eW!5C#@TTJy^P8-PmI?k=m@Bx0(IYT{ zCty_2=8#|%mv8+62)f00uBq;}Dk_}~!s^aVNZ*F)A2@uNnv=iYSXQVGyi7H4Dp3Lo z?nWh^d%-v^$LnQIbxvr*QHI_@u6@7fz*|*Ooos-}ltF?dj98hqlKB2{{=6`C0c-`3 zY_@q|a9yi&Wp<<_E*G$0_k0H43R8cD3;^D~no?pFbfHZuYk#m)b25OmXZM>apICIb`)E`w{9U-Wdn+hG zZdlsk9{Ont03`Zrq?Lr`)ujS%)xyfrntDFG%a^g*pvRuCm>zf_JUgA_z&V|o2)yF= z)>&c?b;?HP;%Dqvt(v4&EYVpEq%D&3(b6%RrRgvt-?pced*-}*b9az0Mz#>B){aci z9-4Jm+s)&3C7v%%U@oG_riY@>S@Q6y`ZRzPXy-gwCdVNX>;QOD$5p>fzUBLLAhNOS|_yRfG|7fTsY)o$9RdAsV? zW}w4H)oThUtc>*{aP_MT=^{C&=Yi%=cLtyDOnR0sVz+K&!-3(oH4LPi^4P6Ol)(q$ zjtl`Hj+4IUILZkY#l1|x_1Wzh?R?~72?FT#z3M>6@ziwo8jAN3Z9qw=cEYa9AH^O; zUxi6Z7Wgz@N8qd_y|xcV8^KhvGMAK^=*=X0L(`JVU9|qIvA!niDS;sjHr%Hirrp;JUR2?dlW#*d-hJimH?qYbCzgmnUp2AHF!+lJ8` zD#9MrG9D0J)f;%5n|B9`K2qwtzZnNB?ytkOH8CxgXu2adIGRKuN2VLO);fn_pQo8L zBu1+GZp$VezyDsfwI1q}0x9wI)*i`OplqBgv!JWFVaVQU%K(uU48~L)WXp?n zLPOq(dNp5155OHI;MFIf8-a#iB$;8q&%7Qj>92eJB}*&rQ)mO!shetlOTT-yzZyu? zdYe&#;2CL4PN6N{ukO1{1ZTk(^K_nBLc@hc0`A%oymm+;^_eS4WxM7omQra~Rvq3_ zd6<9JZrye$tgDS&PDX7%bQL8+tzA>$)A-PnX~JQY_0CqHcUtEXlzHN9qQW$k4QYD5 zO}UtGU`hxZ(N!Z+Joj9Mx19>aoGMf~dyowc78H3+s%5$wk}#s@b^YF@tt2+&0y#rhTlz3ZMvB?G1;I^ z*nUy@-)Ou4L)Pl1xdhy%Lbj-~WrCNND@Pil@Kl&8Z&yTe7jdzfH}gv>HdNbRiXTsH zecDKjt8y@2j8IoWp{+q?UZci;*~YlyzC?gC9cg9NlZ?4ZW*cRq0@SLPtEn2D+0~@JUALU}EtMTgQ`k3k*y~y5P`} zSrH5It$sE2Kl`EF^pI-zCx@dSnj1kq6()sV4(+h&oUH=ygw3K`n2tk0j<*udR54P~ zh0&Q`12S738v!pu7Qvd2Cot^jM&IV~D-~1eV zNTnR~rK0L$@(CRg{OZ^ZKAZ4Yq2_33b!+ks5R!ehA=9NKMEOthJ;BB|vrR;qUJY}D9E^>7O?AdU2Phb*$ zrbea@RPO|%V+w_p@si}f4N|KUpdmQyJ+usjql*Vh&0XZ1_@u@aKe+!8g<)1i6QOYY z=PkDc4iF8DAz+wGNv@WY<-KIbd}MisoMgc;CLnr8lMLu{6FS01^?F(JolC=?AUsw9<0gxq|-+g9nfoNa6Ypp%<7l~1ojEk#{Qmt>+rD{xQ z#`?|MrETA!Dn`HGt76F`J3}MdXck;)Wr!t`G9vgK@c<>r!9y8B7CgKEwl0pv8Eg;H z!XXz;dUpFC$|DMZ$s=sY(IP^&FFI3x#`o8HH^L#WZRHYho!R0)b%08vry`HYnNgm~ zKvwz07FjR^(K6S$WkfEUlRxu$qvz{9j94hR!j8XV3oaXeH$tEHUVsOeT1m4}U!Gp|E36&Dsym4vOR_$g2C-cLzd@sI}9b$5ioaY;iHu{48oY60frda zS&h+3%}VKO`g~WQ{;t4rKKn&y*6!+HIY})!r&QyaR<1tw`z$qPMqbOe#=3vm8q?OJmZ5lr;~&$uj5LPT5>T9%Z)tAncL@phX;Y8IO5dp zQlRFeI0|;JAn1WC*mLZ$hLsw~_U@og>Az3|;&a%e3oV?jF$0q;36}M$h(u}$mSg# zgc6%^{;P5+WN6>Al0e`D=h8ds7n(V* zLLB$j)GxRO0S_D8{bwL8=aJVC`Y7BmeU-;daWAJJlq?^A)MwEurbDWX`VTumj9>#4YZ(o&Mc5=}G ziv(beXR;eoHjF>)k4PU1he5da|G21h&i?*&HQIV9%0T;6VM>^e{oniXvPxQwl5&n7cRH2#i;U?(TZR zCA>tp49eQ!`n!2u{d%O$YJT~oNc2LiBNrLE_<3b&{mMhZL%tHtxPCH=0VEYhkN)Jr z+P)65xFJzeP`S$d7RIAsMpnWQ_dWq%;70utrM4}&EKVEz=Lz#~54*_%W%3|b8;~y5 z9da;qSf|gYg#(evMsQz4{NZp!x~&43_oI2wdT?c1wU)X_J5TM#kDxBLz!#YbP83UZ zT^UrQ!!_Ig)fb39@m*Rsa;98|Ej|a$YiQ&5PR_*6@6rO-xgVS}IWH@*Dxq`zXzH{M2sP_tQseBKQKeS4E8go41pj5sAF zwtmbm{r8KV38K6=S@ui5kS*xpP1s`DarzJqkb|%N3(GhY>P^n%Yzz7$B0SGCly9Lt zSAFCHFNERD($ff%(vm|iKtEn@uGAd0S?plq{nj?^V*D?9P@~gs(%l`+KUUrR?ICcL z8WU%s{%$UK308CY_XAI!`+xd>6`7F-%M5)b`3+Qnj;Wf38{uann41snS334KRu*(j zr>v9LHeXcM2YEJyHC!wl+TBKCJ%;DNN*lEGZ{tJ`a29R}>dgbS4NB!$Q2h()@0e%R zeUp{_NY$D9O#;u0y=P_Yo|SSQ{e-`p-aXSi$ujv(Q}VjIvvl;f6jWvSOvj|V(DCUC z4;(LG9k8iB@Kzx^Q8`KN!;EJg;$dYdPFaCw=$C{GU|Ue)A8Oe5-tFhf zy32Ha`B!aZy6x`kpp_~jF)`X)Pt?d4A_({BpZoN4UhFp*FGY8{*=c~;j2Li=K+m2k zAZqfUNI8CmX>$JuTYLsyfYgX|AYM!aTmtJsuf?#`SVhzWGkW#mC8A$}5yK{8x|~Mo zPPBDn@coJ!<|p=!X@NRD6^x2dc-AE{Q!GhWzzh`}_M*Ruqg&D8%T1kH9uV!JQC5$y zsO^K#C4iI07>(-DwDcib@Nh5vv+i8Z#!MG?#bh#~@v)?qn;iuTr6GYapzBiK9QV=D zbXNe#s}N0n(L-DIYAu-GejkK9)O#vy;<()X-IDHUhAnctgaZ|i9n(rA(Zv9>;?(v8s z9S|R!fe>@z_D}^@!>!WQnYk#W-|x3NePP!GVk#I$o8N##%iD_2-qLo#@m^YSiqhRz z(Vqtk%u?|Q0^DM2JYr&mEm0>$I6*%{ZO25*!vA=Kfnf4oD3Eu-SonKNfB_`9Sv_&O zRRr?)@N5|rXF?vd6wPq-_ZrrCqx0cv2uwFE?@m5F@upcwv^HP_(6Qn0RIjG0uRA%O z12Dkv?SVEXzK#v;!h0lW>{rA4)g9zXgIg7Dxe=}-_D9)PSwlTYyNQQYtqp7 z-qAw==9ge8Amt5;kMwNK*EgNji}aV+a1Z1$FxK1Nc0G>v{0R8ws))Xd=0mXcUyV4! zzap)=McD^?c%M1CZJ>#T5UMd36*?8LO@~zDf$^k1nS?bT42G0xzDrk+9_IW*cmAPS z=4Vj4(IVe`n4bGy`BGOkHZzY%y#z14Ir|b}Siy%L@-v5 z;Q%7Nc+Z$x<@3B>&g>$~A7cfuN*&w|V08>^l%mg}6Doy;x z_2LeUHa|?YlUbfuB*Amb_R{zSmB#+iGz_r}O@o&{u9UiS^2_`hPNUn=hrI~#9xP7E zOj-vbmxWy8-N%ZjcSD;%Z^8R^CxyiZ3}Oi=SZk$^L$7G@@!0Y&mg`r*3lrSOx_ox3 zY=h22CzY1XeH{V)$si`z&nEJ?j)V)>=}%Oo&GX_jky@;d!*HCI#_V8Tie&S#YFu=W z@44=muqU(G_nuYya5%a|FTvz4@03}l#94qfQ=G<}yyGqN<;j;n&~xVk`RHrkIgXl501!1ZhyD(Bhlf?u-H3p7%4af81$;rad)6vhB+MTvqwAm}vM1#I=WxafDK= zt^y>N!+!g#Y{qE&8n%k}odH-rbJTU;yaVExayygo<{ z!w?nHcmGzX>%nmtqYCdUn}M&medK_8Yr}|nBHP=eo;1lC>%c_`?Y(fzs(X${f>|8X zKl2!XdG`nEdVD_^6GOZAnp-rU8)+=R9w?E?q zeFE>tI~+2qJmB|Y{EDtKvu4?&*G5f-bmH9wv&XnDv2#ItpWPbeyFZWB*hA2sFesUT zG}JJh=vhp^jpt-r?b&ArOJT@J2t^NsD7%k+cX)~CWimU_ruaGrqZ}Rj=HGH)ozxY| zUa?X_iq`+J01jMc@++=vKq#%@dLgm<>sGXVl}yvD*x@qb|FH2VQi__}NHAx@^Z2DQ zUK8VGbT5vt)d>r_kBK}wG>A}cnPKR9@oi|swBzur7kv(BW{yJ7 z*jJ+9f^o2#ed_Ml+g=^k8cQXh&5Qm6)*9WMU)*BT`I)IIEic?_4H~92ARKHEB^Dx| zuv0!H0Tsnot8daiYM2fC;!9llE-Hu(O~Y&`Pp(1^F|BJ3ZmxV+CElw9Av}Me@6@5E zFi=fW;|!W!-{S!NR&;!2pG;~bxRXW!F&`Q%3zp`4Oknpf6Kz-~yF=9#()4-2#tG#! zvaNZpJES+X(W9p`9%%7)U*R~ne+@44M5oNF&I1Jo&?$vF!mo~)Mzg_c_&_&)O|ssF zrAd_c?kXW}F%q7FnH0NL=vKj*K2xxq8mKeS<#4}>2bny_A8qPD(-^9VH+#WNXt40E zVD~M5=Mi%HKvgPc`KX&mo^?8=W4j`hG(X;gwFN8NBkM%HH$u zXkXf}1+~_{*iMbW$)rO{UiSX)LMwm%?Mjp8q~5+jyqIf5nV_)Jex-Dv;pRY()2<#= z!#^20a-e-w4OfEW==?vn{$|ROeZ3je%FZrnFFWSb!hO84^%s*vx0K;3~Rrn)%`?!Jrf zkBjfelA9Fjee`{wOiETvQYy2#ktAV7f#)cTScNRB0;1|y&HcuqEoA^RNzD|{+T68a z1w=SKO7`b*m&{J8QBl5ZZN!ZWUUmH;E>LmP~CYhNX!6DXr z*J&UjtTSl&KK^4c+#nZj*^3>P+xLIEQUQ6G1C=rFj_KT_b@{M{IJH=n{bdxl2LE#6 zQ(5Jzk2|Ft)604N(0)lCJ%&_S32ao12i{-u;P9HZg%%5Ud82;RAW-`n)h?rsg4A== zyUnW$W=QO4VMkkAF`qrp zE3YNt10DQl_UCbiC&8Yy@*N+LF71=tI0wn@qK56y4=t;6$=ZmAQ9v968t9gJK^fkN z1;Vt0bCgPcc~Y}RKlKp}5Mnvv-Q_d{>cylpwk+PsLJFVsmwpA%kCASS z&mV@Xhtr@GezG22b`Um>(z9{{QV%k-lquqm7g>O(@vugp7(0aK90wc!F>)7~Uq*m^ zVJ$Ytxw}LY<~Ua{UBoM~ttUOd#0013jR4N*&yD|B&I|RbH+#DED%B5#0 zF{vR$nR5S{h{k3W1|bAiWL7pBH`XpN*)HQ4KnFP8fF9 z{0*Fx(i%H|LWJkwJ>pG`Q<4(S(A%lB1b=)Po(ADHW%Dz>tAFS#(-FpfICh$%!c%^rsKO0>My8EXLkiIBP%uAXwXla?2R_vy1 zZ>GIQ^;e#A69U$je%FT&AiUS?OIzLNY`7BhM6YzMTybgT3uo(6d#j|`_f#0qV;9&=2aSmu0c5B5NF zMlM&=CAWHGKM^w#$c+~_Ew!+Mq^Y!1dU}vlFa&LX2Z-=|vrLSbaBf!zEUg%280>ZO zw9r8Hyo9IM^sn#JArCT68zBhWeK7R0+Gey9LdC#=lc?fB-#S(0y*(NwupEQ2x^y%b z?XwiRD}WUao`X_24+F4*(j`Q&nBBiebPw5k?xfrVi`l+2@U}iR&>pIe%)4`eSL_3EJ_|0v z2XhYrC4hwkyg0!3+!g!NA*6l2{hN7ni0Vi6PZq^hKvD6$){QRXa%3o@OM{L@;!E`k zckk#TuXYfY9~*@hbL&BpXM-e97z2ygx9_Y9hY;G~Au|c6)RSP)t22X){uz`w@8kGK z5&LlnnYsYFr03Hg`B+_w?h3q8I8Ou@<4<)un(zbZ`Qy#zV-CC1gVPEx={rumFUHUk z&Rfb7{)UiO5p0VB!{(D-F0`k z;iBNCkIj=JE34a@0nAZ+M74mKJrT?NwGhR1ph)8p-fxBW6$pZ%pXhbQ%>ze33ZJNS{lvu;2H$-uKx5dMyqo$rX_GjQ+{rCz+6#WJ?qwJ&&s;RPOK2HY3~Vn2(1|OwNX-kD zLp@#+fvkj1^1#9IhIkh;wo(Z7LhrrQz-i~qtIDsuVlP)Psv5}x7!drf?30D&4ecjh z8Z%#ac_4+#so58XN2FUn5{?z1Du;T+!HC+T-e zP`+Z&d5aZvyYuHxC@#D9(%{)?)`!+i8``x6(zpHD!TpmBOt=YtdGJzDksoi&iM3vbYGAd&q%hY?CvH8uYBt04ld9*rxcD`_%2u}wL5DXGMxz5{a2W^=~ zJs92d)Ir>UvEywmJ4~_b5xdI8q2_MLPTK zj&V?g)OdPKIJkNZh4ozJLZB_;%)xMc`7BNsKkJ>it-bng0!%U6nf4+qx)T4;#0L@Y^@qDY4 zDp|YmDF;e8FZZEHPJr`A`xH^$uVCY3{&nx38H<2Pj|sTc7UwXxLeZNA?qv}sA)Gl> z03q>Vk}MoTAH2OEM!GJ0x!f%)0f)aaa^<}5Z1mp?=y}}}y)<^zz6ka`W4EsqbY$IF zGK-<2SuCLSiH-e+m^RcvS*$ok)r))&^&x+Sbt=|CgQIj^AVZG$dr=?C@HS}!efUkV z!hjy~6{;nI(4Roig0*IS;5zR64~07{4VJHjZuBz9(L#fESDw$N`4|XCGmu_I)p2XiVSQ+C`uvwj?p%P2}+EpD&wCOF^`HM4ONDtxvDH#4g!Y z&+U^5$2HRKYMxo*B$jCwaS@j=ym|Mv|WmfE7kJ1%m$+*^BSQ;ns=U`maoB=mKY4ZE0{<`1pO^11XOL1srm8 zWwQD%J_uY9eC7&HdQ6p@PuxC$Yb3=^)N?R^K}w3M+72CUI5fnn(;#OtN7^t0lP2Nt14jN~w_X3Hx=I zUK{9h>2zl>xtx_k_k;?<^#jD<((tsGB>|NaS}wa+E))+U1Mf|N0{`~C)aC$B33*r; zr#kBusse6^`&Bv~-)uoP6S-vi!YP~MBe>geF^_#^ah(kVi*GD4rE2l?e!>r;YwK!>hsqB zdm^%t;&s<1AqbiB&nM5te??oZcW-z2f6M+OD_{8q(*3_(fQH;gE@(?dV@NViIcDf< zHb-+9%ezmRZg(yI|AC2PTmFvM+2=hG37mK=3~5zpT~!{FDLG9MqOR z1}c$tLQNCkdQ^{)Kfm6tI6srpTk8<~Dhz`2&?%g*XBAkB`F55z9%fMwMMF16ZF|t+*YPAuO!y=4N=C|~0{H=A z2(9ikzDWfs$Nj_5Qs%;G3tB+~9)!w~rrlMbl>#RZWHKZt?3qze1kd|gKk;3zLp1Is zFc_k3_~%4lc8>5ME*(i=FTSFLGV^Vqk<;yzdWI(&hhpFYVCw5sbtGt0j*2wowIyVZ#4?rnMWm3yVrRb^3thn zP(vQ~$pgm(rw*@<7z+pzqo3BS`gn^fTY+a$$d3HXGrAIpoIU>VD(bZ&s0uqMv*hnp%YhZ|`@SfF;nC!xK0< zY`YSgZ{CLEZx~t!Y>fRPDy5M))Otd-H}T1es$JOULn-6`YHOaKAyG99LOBtNnrtVf zlT|bj=Rm8S2s&a2&wQ3AHA`2y!jV;)yS%)?We}ck-%klKKfer}YrLO^Eu|Y%M#DZ% zT~{lHwj;$?Lg}>cws!mor#ne_Q$`9hbU86PlV+O0ba2G;_7KjYT8kS-H$D ziDfzJF}`&t%6|SrP2&K-5spG1`v7<_$^oz{4a~Bl@@wOO034lF7IE?L0lMa5AM+Yo z0s8|4xsYbUp* z(HN8!Wc0Fu6gd@-c0h~hg74>vFSqg2ny+HZ?PvFWIOTrM1l36V1Tb8}6EL2Hd|g`MLbjeMX~mgZA7l|5UHnUAUjJBa3LrY+~> zg6H=5>)@=`;~seW{n`Uv9FKB+NBF)3w~bJeW*6=Clj}q{lP}DleCrh5&WaipfN{I{ z@{yamOGnzFWq5e|m6Y&9F=ib`5}++Js#DO}aqpg7?W}wa($jX7iaUMW3;5;7Kb%mG zddNr2%d6>W+I*4Sj0g&RqbJilLau>I{Qc2vfCyL4{y87<&Ty1J$U>^8f>omnhT7lp zmCswC_NInJpJpGCpsxs$^jx@NE0V_h{k)t#;FrFhDlO(zZRc))eVz_Zdm3r>^b(8Y`}I;pO?ezP#TPsFW65Us!nxew1t^bM1j z(SSa@hc{9xn?4ozwHQYQ$BmV9X1`+xVF40-(_PWI4R; zJ$48p(={iT(GmY&)w4=)P$;L#`^|1nz%|*AQE#jOW*)GASsc9z`qO)7aT(|{`!mnS zEY-i-fSDNDc*{^;%ad+p?&|?UHk3jcqV$Cq*ZWE-&E&2{`Hf z!tCVITW0hvp=F;4UbC$HQB{?NyXq|ghbQ*cElu8TPHzCIduO%aWtTEC(5L$ioW4{J zm~6MRlr&A6 zM?OQ(M#yziW5RuYJRG^*rz=hI5;!3Qc*EX2EKxQ&CfxK2Kv0Yn8+WLLADEmQNuMQ$ zFr(FZ`OVP@;b<{3Xj;ui1J%I%4J#8ZzTz>xi#8~K5uJE&zquTGcSk_@)b7ae64*N3 zFNUKNmL0{n6I32xuq(STOr;)0J9ntF&BmQH3i3s|kz(ZN3`&0gVfa5A-i$mbzRM*1 zNDG}*ng#f-g~+;s3z%u3EC12oYvSR;w2X$ ziVJZdQGyM=W0VmTwv1*sCj zX-S=PqMTjejx{W?jr}Kg^1JGR7a)TLDNyKbnIu4-fpE zeFz}2!M+~f2ySt){?Y4RoIj^9(5@BqdV_fVVd`%Ou}p8Eqv7R1WL!&PNtHEuq-swO z;*$rY#|agL{$G5G@bQwRCFz6lP*D_!3?B|FncrX6iEs|=d5p!;!paKqi6w*i{>}wp zL^zyRlHFM)Ag~EH;a%UA6}N^w)HJvw62KJ&Ivw0s{iuVYT}qbh_B2M%QD2#+fY8H& z*5QUWuM%CPz#=M!MX?=(B@p`y|Ai&5#>RH=z6k{&Be)LGU8csW$lIYQ!)1yXODC2&^)QYIjnCpP$MfvI5 zuNA4cfmC6Z$_`kyb^*6j8DKH>mLL~WXNp$1z!|}^kA6T@UR7pMtV{gR^x>aR#OpAQLTn}@R=hfFbW5=TJ zy%YNID?sxcz{R+85};O$C+M}%UU`#PdJ6y|o+?eYvDpk%ON}!?WGyB(r!@EZ%${DA zq*wj;5U3d+BZ1eym%boHv+Q#QO`)MdzP9(F%Q(iXLqAwQ`Efx;jfzuNH9OFlG6=bh zB)z?RL>HaAcXDIcj>e{?0aqvuMcjy+px_{dFjj7#XQJ<5Nj&p&XEI*^LTeZLgSk)= zxZH^RED5{4EQm^s(?LpO3_vY-R26y`jteAjBEpc08|+H2M<45%8@G_6O$aQd<5&_; z1gg+tbh)CP<(FBhRgP@yn7ho>9`ejO#ae&_n2R&(1oF$iRjV`f5!u}Uzf;cHYGqZ? zjj&C4M$tkN68D_kw=RK?LEq8^z56;~4WM_&EFER^{l}&EzQYan?gl7UfYg2$O#p_y zyZ5*6x$c9;D$^Z8;#)y)Ck`;zgFxl| zB9&5%Os2uj;ZIWdGj9;Sy=d4zswsdKBv$?z4rXgyKQi6i1jeAl=2U-v{%?AOi-)3) zfQT~Ze!=jj7X-qr255C-%TvtBN8AF09KS2oY-z%jJ6CqSo6W{yT2HH{)Cndf>?Tem zA0y@%dUL}Yq>|?XGzKOT*FihE_MC+DK|M9_hsmwfm|l|OWhb&lpN3rs9)3XqdPzlTQry4iaBgTu?G5a`KH zzi*FTlV zp~=sE*r4v7$4JQ7Po12^3w0oTSkJCTV6Qsb#VGvU-6-DAK|Eor<18zY zp~o2H(-wJw5t=B3hvr!BTb8(`xreWd#YT(Hml!=gW!vECnNicLm~<3%d@T3*C!U> zgM3M%a&(RN_x-eROc1igB6eY>2v!0^B_1n68>xBW&%LmRE+A?R=+HPP!5 zN~SdLlsbPH9W!>AQ(P9Y(RlbiEB<{sGo}gj7vGIfp8WAa;O(bEf58XyLn#}iNi;XF z@J4)sVQ%ak%CQK5K8>|CkPGrQnl$tmDc!ok7tm0ZTefrD-7m%$^5>>86S!*Obx9C- z*dil)cEk3i-GJE=+)3D9RknH@U*5x^50>&Yk7Mb<=8Ro9b75Fea9-p&QW$ld(uiBx zgQ%IyZ7!Tvx4A==RLLwgT^1eZL^Uf-DK7t=-%?T`JAEN;)^3!*!`{xk^N6ZMZtGCR z76=)1%@-tUUe#&=)o(zHyb=J9b@$lz&R>3@-OdB%mF`bqUVy^WLh?EO>YQx3i29zx zpR5OCi=sgl$Zta^`9GP&;9VM|vv(8=1FYxmt9i}=1MxEBvV7k@0&z$0pGPsof3p%H z$4nh&;=!vyF|Euyntfl*M>}`vniuGr$Mv+E51bNnGIk}fVo9KN(d((O(#D9#u!|TQ z(QLXb!V)#@*|lD+Z|~|;D0PwI4iY4nB^-w}T?;`WOyyG=`TeJ|$A_R1^Y^xS0&0S8 z$KEH2G-J*~2uNYXK~P$ZR*`GD+Fb#lAU@K9{ILuiY@O(RS7XN;c$j)Jv3?!^rr1AR zenrdmBrP(GgCYdI{ZjmK0?});%iwT^nhNl2-}CY$o6ad|1U7xaFKR|TYUXMZAAe}@ zLA=YM0fs6l^E1Zq+XA{-WWMJF@M%k3>JN? zZV9D1AHsf%|3*O1xFeAA7QeW{0jb0&@)}L4oc^373k@3*AbH?7XFIOD z+h)#v_vG7ph(8(ff>Vaj#IvE!8;0TrS+yf`>#z5n+L^WWDAe1iFhg#8xxH@a)c9@8 zZrf=q;82wUj+zypI(h>FkEcWip6>hHK8ZS#+&VT9Ct23b900_xv=5l z^VcGDHvauHWAJtern4u%Eq8vUUTjXZ^grGe!HD|Rse~Y9a%I1?phra~ho%CdvgbPB zh2r`|?rSE0M*rFlr6|)HD7#1p+}qtYr|DGk$RO8^Wp0US{)+`I>{TsLA<2do-1$%a zpY}FNSAKnzEjCt8qpim zpY)?ypAm;=oFbilaSZOv)cy4++N?Ai9GSUZ?gbtPrff3lh+^T(|F>T9+v=JWmB>oS z-wQ@D3@e`vX)>R%8O(3g9QntTQ7p;|TZ@g5(kAjbL-&X(N-NA9uEa$Sf=TDhh zvG8E$1Y5Wy=!YQ9SL^njC$q6_o6@yDA6f%wpYq>m*1Bq+7xW|nG)1_2x285PRaI3d z9rczBputXYgS?ZNJ9^1+$OT?vL38GRN&YZOKzmdg-rnc1AYr<%81i?fApoK6lfBAt zW#b+T)m2M@;BQ$YmelJNkK=oDkD2Oipp%*@!yO9UtaEcX(Yzy)JuQLn9gcrZ=lD(! z3;i^-jEYD99DfkYV{D!1iEl~oJ<->FT$Rw+Dc7m9>@hqr#rDopC(kjmavmt>PYBgCaEWY4&Eu6v#D4zJGXJf4oPjoca;8_<0)!VK~ZR_ChBX0KVme@vZU)#9gAi5H72Q`yPa zl_>>tBemI~Fsmp#q@|EuuO<7NCpMAZu@|&4(*-8>Jb77<_uoUT`;o{Qjc1ToQTjQHX@($ z(U_6c`citaQJN|qVH2N?dMFs^R@y7!lv=MwD>hoHSgfPuzK9uB)WW*^+7ypCO`o38 zNFEy8muO#{2Xt}(*RRwS7mgv7FZ}~HGg6liXp8fj2j{Rcf~AOt0|)q|zWZmHUE5LC z*z>A))L{%;pkvy$)kSkgGPwA+8xF^AbEx;)Qtn4I8Q4dp{ZWw3pqmI)rUA8#Un=#g?K!)34Pp8<#74=!X9uoo-Ok7qPE;n$W-tJ~m7}M}9TgXm< zSB5<3%8cGJKZ8!sO_o}gp~%L|>hNj9URpwCYT=nA$jZf4l8)c&>CONzoA96M9>@M! zDk8BVxEQrtT7`%{A>u!7CI6tTwS%2ej-AhUQBt^kix`!nCk@EjQl917#|nOtfo;40 zSj0wF2KKp?V|SQW>KN&orSuOzx@3vypDoHP>R)p1zc&`It%NwXoHAQV znau-)iJkNDQO}Z`aV4G{t)rD6Qf%uR%})yX@=a^alblD8Y1S<3Yy)>}g?n1|U(arI z8JA3EOKtf4HY*Ks4A+U+{;>}#EjcTZ>`d(k9UU-KV^0%fXeHCOM67M zQM0{<15{PE2&?}+T&xT~#UrWM%e3E~s%*`48~ApA|)r-#V?N5 z^(VVsWy0w+n~3SYl(R2Vd&0<6@A`Aem)%BMqwvBgX|7DE-dEy62q<&@a373zg;@1k zlBcY9VOca`xnDwn8wuV2CBnoM$eb~%Ufp2uG=F3uF> z2kYYS*islppX;YYt$)>v`^3YJW*M~ZL7WZLx<#Hk+cScKeG?MjhJ~ZR`?yaWLM=L~ z-R;AxG40i4QbI-+nEURgjS%OL_$WjMG`RkFT(Z~M&u{uj5--6kB{o|8nkMh++uoD6 z)o*jCkk1hy>?iLMiMO2}cJUX>`2^RCDh)IH- zZOLAiNrglaIiD#SUB%Fz!(;HB+@@jiTI7~I$Vxwnc=&5l!KMWAI{NHu6YsRE0237F z(vMsotPy6Q%jM(|H>4c?o7k%KX3^d|K>k`;QMYkPK`=dcFb#K8O^ddwYvRCw>F~h= z%6Uc6Xe3L=quBX!IX^pP^xZFIVcR1$p+5cv_$b?+Ao5-b-Ih~=4&}V_IPWJs;;JN) zGaeN{uOB<#&^H=VzeGGzym6y~_pcwtCgQ0m+)q3V3j&c~ZlZH0i!5*miI<2#`s%VH z0HSK*IWF`^_R{rz7k9m{U9^V+V}HvdpWz>eujV2>>!wE;8LnTW1^lI3o!k zJY=OlMl^{+gB`Q_?^sx9yqXV4(-|!)SA!Zlw=#F(rMhW@-%^MoXY2y_Z}+p5ky;P%-vx~NIW_z7pDTkJpqFN9<5E$?%!jWj1I52=OGO_F^v zd0IwrxPIk8&Eu|kx2EpiKKLtzek|A&i0pxL9ghsbt47O5tAu8cww;M39uwND@Se9% z3)LE+s3f z);!yJM%3_J5pgY;G?yIB`7EI4FDa`kN#dP1Lo7wky^XmJWNTZEjiLX_xFb)0u>9H$ zt89sJ0hL?p-v0RHS%ov8wV9NPBtZz5qK{Ww?y#U-t#CUEQzi>X2{(fJjq_I>Qd?Wi z*$kY##m?$-a76|CEAl>vokz59F=wYF2W1jjW^to`PKl7)1JnvhESDsc{ClW25>9VmnySP z^ACcgV`2C#3?DJ+llg&W#`qo-i#k=-ATy_@5&5ot?ED?fsqQZb^6|m^h1)0N(QTT^ z#i(@}@d%VS05r~718KNuzIY#a&CL50IQ{{;v+X^MJKtU&z?UUj1}=GJ(6>Y3dG1WD zZEw)6X(*oZUv_qZWu_YcQk(^{YbJ+sHO?|gL1b;>*S|y%C*9H@`q&c{1uF6Aef=Y1 zf$FD({FW{jz1aEU2g;K$GwLSw!oVw3lhtE)l}w7v-|31CTIlGxKBuAj?=NszLm{mk!wDfA4n%n`q9anovMd{Oa9TEl8X#ESukLS0Xa5$D7G|$)DDn+O&P# zIho@Ujb7(Wy4?QNUkzK;kFOv6%zRH)>p=$xkKb^gJ)nS!+s-h#FN!QCtV{de;u}*6 zphGg0PImKeNHFjv|DA_d>V+LOqFIxGq<*SlF3@BwqZC|tNcP0KF)}nW=$dv8!XQdk z*85Hlt<@MrQNwYSOyn49mkN9)MbMxqvgqOqTe?h&L6HPNSqx-FsjE6K34gJ^b@ixV;TNO=$ zENLaxzw5S0%I~@`XKT&(pnPF(%Sly?3N`5R6eN$%FxWFP=-m_6pCM{s%YA!rm$L;( zAPfX6Fjfqbyfm!)>mNT4g~n=&9*qzG7O~TN5+%>n#mDL`pH>mUB|gx5K0*aR=au+1 zm*2myx)qu_DTOH8dqmYSlZH#eP}7?{$sO~-;S5_2GMcmNy{brr zmYsTUgZ7J3G@{>n&93%!vlI=kR^f+|F-Xr6LIaU7g6>S9lAh*#MDj^J&O9YF@(p`P z=>}bPvR>U=(co>|NrEa?Ll8&-IG@daR^#P~Ju^riAec-yPczwap^^aI{$(jwFMgxh z4&axN37%ym7nx7wkb(cd3fE$g0{*F!{}@Sx^WLkc4!?lrLe>)KP2fVaLorY?wyf2- zrmHu*_zm_=x7rdUH7x(U=&;~bGBHh>ySWm{=oVnKb>|pm#6uG_y<{E{yN!W2r$OWD z1zy(+e8A0{7n<}oBwin0ZZsqiBMlYe50LVH9J6U{JIz9hIU1oJLg6ybhtgPFz4y>E zdFYjw6-PqyHNtPBdobOK0FssWx)@t6;-9HElf&`D80I@=R70;bE~PTOcq>Mi4O|Z6 zjNp@#agU*E4-S^G+08S+GYPTT!sdTA5kdC;bN3#{4PIJW z)ITqgBH$&jT(R+yhs#yvlWKi?+DjFG`FmT(P6P0~H!*KSG~u$}Fd55$C^SK^Ft5#t zQG;$rhx0l?)p=3;mEJROI1d6$b_6m}q>z=uQ_*c;OJU#ZZBB0PkxoN~;0Ur5VX+T# zs0NJPz23cum6RXi;#piufaV|{QJddT8sezO6s%g)1l8XZ?($%@k&YMr4DLO^Bh?Sb zm7$MTC!0^yaLx3+(!u>UAG&$F))e^T|Ga3)>2)$+eqqP;;ER3qc98;T@20zG5 z!ThNm>B*ClM9$)K(CORdR;`ETB<89drHPY7&lXE5mzs*>iCSf$ z_s8()pVH-!Ai6^2eY%|U3+OvS@9+VX=?<>IXXL^bK^C%?dLC7n`G|JP;^s|T5+R-| zi7XvOEJYM4*zhH@z@wk9EmmGm>p7KBl7~FLa6LW_IV-tA zZ$&gCHr%-^1;o{sAP=!GW$wx*uaG>;R23#g17|2814id@=gLi8%+pf6%KF$%D37iT zWT9j=YNUtEO$KuGbNKfqnYpEr=ZdSJO_u?_&`;bNB*f}vcK@H}7mFDN6JkP0V&zC< zB{QM~S!*(K&r@V2$F{E!RTw&Af1KMyb0tB8qnDeB=yw}Tve)Nt4tk=gbos}zd^QKj zhWhSrQ*-zjk$QctVfL>r_wmi&n zUh|(hgfBn&1v7=4Deg+4uqQF-M)DE=EGYg@@(Ovp1_srhL$^nBEFx@dvC1(9m4^qO=gZH4s9?<`9 zH4HYFBacVM`fV*J1T0bdPBR|O=SpTJ>-~(;QAkDef~O#0kR>lZ<7A&6 zZWyjXo&p7`A1N3L@fMxV8CV!;l%0NJU$5T73Xyxzq9cS@fhR-ELYF55J0G7yM|TBi z$8V25oRrT(>;fGXg!QZG zxe8{GeXkYhrrfH?1{wH(b{YEYYM>PRWB{b2^6e{X*^G)cX5Vdff_2$mES>!G&N{rUWAItErA!8?&K6PxTMAe zBuPZttW&mq;7DX#decN#`|-F}fugEGhhk33Htx3KX`rk3N};L$UbSh&l05h54CS7& z`8sZcR(Bjl_VM7=b}B&Y3#8-IDbs)?jvP?>%Ye{npSj9v9RwIg+OA8^Q;@bMH&sgE z+iY;O9CMrAqFAU&G$SNe@jUO91NGm}j-L8pjYp5^rh#GZ1YOs>1RV2>>GE|J9y@Pi zI>n}g=7TT)f~PTo$rhBbupBdlLCO2J!A|4L6&%SQ$k#o#LiMcq>wH3L)h(YsT zO?Z)p{p5|xlR=PMMB<1+IlWl@Ls658w!-4q-67$FW@3-!uff8mDLJ|q%r{#EghpSO zU+?MN%Y$l^RsL3XI4(!ct#DWHt>zHI1D-2zKTp@)N=X#?n$oIa4#Ou5Xr?uLW;JdaS6 zbG4g(jYC(CYAle^>jPowO^Z|bFNz8Ehdp;j77vyVXT5(*t*1ezp!0-oG)`)(3X4v| zJyyT4w}QA4q(<<4UXsNNE*D3p5fQKy(Ab_?Dx->2{6`*fDxoLH!e--`53<+gD$^Zk zw56BSDE_bBS6%V)z;pVR7S99Q0{pT$^!4~-29mwu$310r2LR_;Q8)*OQC@#_@uml4 z$N^vUyVJtffR*Z6p1mDiB|)tlk4Ok9HKn=f_8r?d0?2cSgh7i{E)#v;Cw;1ca zCAz*g{Z-_SL`g1n>?Z!TcK)+_{6TL&jRb#P@%VMc-G|tiHl18$N%a@|KCy;Z`-<#a zzA13M^lGIWu(54LI@Y(;&XDg0nH)s<3u(@>XKc4}NG$pc$f@`l*MdYB(0JmS(8)t< za|g=xry$bQZ=m%l=d~#3bD&YFdSe<;b^z)E^vg^Tp7!7uZ?FC3-85`sdx*LfZ@dm? z!5#CQ-a{hq&h-yA6cDBEbFFG>PH0^L3`w7oLAXB8jlOmoATvhq!5h;L`(-6VP1>(K z^e-i1L9TEA_JCA;QiJ(o9D3&B8OmVTo>8W;x)v4|xh#N1ls;ZgX>CkwXcz9q?xg)? z&YizQCho%P(*2JF{ONAp{4Vj zK*ZQ_QK?Lh>6YhuIm;VRn4s@;p21{IBG(NoTR@gjTrQ3G=;;f`Zt!n zM=*=Bq+hH+;AC9o8JB}?2o?l+EwU+I)O3PbC7zNA^jlOv50L=)ikU@)U4Ty4D`g<5 zSpvZ0D31f{S`84}`1KJ3b)d^UQc?dxncjtCXjDStS9uP>9!+SUmHHpAlT%u5ylmta zG}6O1GwjNEtSEb$2qe@^|4gm773rBe8jc&JlDQgB1FhrXfpV%8 zgs1*x7b-2Qf5ecrB&3?cl>VWoqxlGL4s(zFqW*;k(5Ngu%H90TG%P!lL&HMwNVgyn z#QVSOydaA&%70Ln{-fh3mZF%$WnxUIeeWolg+?x&cjx9krX2 zL`OF;J^%$Ki851r9_~83izd$==q`v7Q z?J9XhH;}V>GGu#z&XOAlb+6+@`hVXRtJ`}Q?M-=@j;b(rxt}C8_^<)EzI89@BGC4d z{S%Gkj>H@^v`w%tI+rw&yZv1KTA3VZ6D|+~%6Rbzd4uA~o-?`MFlaWG*eJ@U0<~bC z3Q+Qv0G|slU@}kAdq3pKnQ`SOnY;`=6u~dM2GIv{d2xHctr6-kw(D!=ALe`VVMsBXf8tQ83A3 zpsOuN6xjY|YU=wubEPT`v#c1YLLy&bqOkibP;MSZm?jG3E~8Bhf3OaKLfp*J9W|83 zig+FZUIFHrE9^ckFbNt2KEMn>^Z_#kb`uT|;OWYPu}LjsIip1Nbp=d^8_V_mUZzXW zuF~k%>WhIqN$Wo&fFy#Emu3A0?)y^wtyZPf%QpU(ZGN)3%(AjrzS?Y8@TK8*P0_o& z_QLi$m-n1I$4Z(!_GD@~`!BO)b)-W7vKk*7jM>J=0={~Db)|5cQ>Te!3BvK@pd3~k zPwSL~M71U#yRBrhdmD@x9lNQHUjigHMM(4JiBh>X(2V?O0mPdFmvfYOSaos8V>z<& zyXqE!e5e;`>y39(-CBHuv|8k^6*c=Nr+b(glF!GgrE*J&N17)L_Jcei*g zeLTmyu`C^VqWL5T$msiD#Up?jT@pwaULq_W-CIAAD|i1K9RR7 z96o|R|C!+=6$f6W%ds)I{-6=aJGuh76kVh!)+P5lEA+y}SKpW(?cC;JBgd67N(Ov9 zBpcCqjH_yTY^uhUgjoq5+oE5eg*2lFWRxgCr-HhvKva}kU=}tH8kQHKt$tB?w)p@% z9dyCcxF7-0(?Fa7`IgcQ6AuFi+CMvkp*@D7{S?7;pm(7FDg)XpcY^Yocs+Y&PNNtU zlCU&wUtLpN1c83*-Z>*GY)u2YEo-~bk2zTeSP?JhtMqs+<7R;7fmYh}iOQ+4=HCc> z95zAe#nInEn4+Kh7yJvVjBn=T>wZpJ&O#PD+!22Mo_hc=zE=C4pzbsPU%lN8Fehm?M=%H-t)k*F-l>yOC^f|pbHX_$ zIOj(x;PN zgjrQwv%!r1uNjh}a>rJXP1l~If~k*H_I=}eB0O)Dkz|XT3z_oeu~67ifBEf8b*xLQ zDu5=2#60Ho*}$-c3iYnrptFy>#>FoTt3C$7E-ww%X}k9*_7ChR2fT8xo5@BAMz4`E zrl7Bi4RF@qoO6a6#Eb|W-)l+jDbIQ{&boEiSP@}foB#^|-%Av+eG{|WD4H_d$0`BA z3tHqfQ*pb1a;aBL#J>V>-is;3&+ubJu|W0DaYm%A{HJ_S)OXt{H_2#oi16GxFy$j3 zV8B(a-03Q}QE~XBNAR=S*-RqYj%Ph=0Pjv9G$pw1opi`c3>PuU@mU5=BwF#^~NW{o0Gwl&yZ*z91yC@Er2$}&2q_y z(+7PJh{4X&6)C7;r*Hu@iYq$G@F%8ZhZ8w2dFo5YW&DwL9r&8*^A6VBmlb!5d8c-) z+V>oq^vXR}-LE>NvQGq+l15*V6|@b-Z}P>U*_a^{LS0{n1QH9k^$Pn_<2IIeYe4nAhPO>-d8HK7eQgKXuRMzh=O zpAgJ`-V}cTl@4)L=k!_6y#r&g@iFeeibLXGSaReddO1@t2X#vJz^m=+eVyr$vKyRP z%^qJr25;T8rvq=C!PKTJ<@^+~_XtQ0ThG#r`N6)So6x9r{EPu9Nz*_Wpol^b&J(~r zbGjzLbxjE??h&k99KJHfZjX}^-F~kc+>(qEa>(dltm(tB_fh0ZsDHnwFt;^Qth_)O z9X#<^fnjKmnd(h1g}8Mw@yRUPJxI2PT_|LtAW>y(fq1%rG+Kz^+1J=6DZn*rQ(9GF z7TTVff-8?`0p?adL(DzwooWE-Z;=+MU6l{n^4fkQMkYQXd|!mwj@mf1`2hrS`&>pm zSEl-!t!v(VR*#Wg)eAWG66!9<#7Y$-%S67*O9OAuR`$87KR=ksA%31zp0R)Kj&Tvk z<_7fMO=<1#hCS+8@@Xh4zKs4*V>U$7cFB$1lENXSQZhv~jZTwePc0-mIQx{K1(Z{_ zB_sP;Izit3AIWo$)K-m2jLVkM zC6ok2EkW0ZALK7RHtKp=%C9_NkJt$A0DZMFViBYRoIKKmsRr3SJwl_R=>E6Ipx;dh zEg5j&yB|5gJ~9uu$}=w*O2-C{CUI8bS?^@SQt6?sFv4k;?AwzNdKA<5snkc7+GiBo zvkk6W^xzX^-b~TU3Q|bty->%!*}0YdAwn6W&}}ERk8k+Cd>7$yGWc6CthdVZC9-H| z*6%bMUenJm2AVczzAab^yME}n-I7r;%l?DqZOV2QF$DR61dTjO`OEB3V}qdAsWjK( zO}e(i{1gJspKmfe#LF5c83veLq*I*!{afdKz*=T+u`Gs5E)2pF`l{|C z!FrnhU!cjFO5{V4`!Q?cjX_4zxLDYXxryOWk5Z-y*cZt~`_og2(b&bfFbd0Cp3+?{ zRoK4ARASHKDuO6h1ut&B>z_hCWMGA~iBp|TE=V_I-LDN1*?urn@Y#{Zhmp`uYe8(@ z()4A6AV2a38AJ$z2XaUe+qe;e46C|tQH5FQ>0RKmp`ic+l=`PQ9!Rx}5?LA`>KETf z-`ABo9**BJGLO-myF34=H?IT^2o)V~Gy_+g_>VD(*wayOA{EQjuX>d=-x57+8KkIbWZt2aVHu4%4!rakK{r$4QDjKB#d0iU7$x2Z1&n& zKGD&q!~bX@fRj3O{*1drT1pERDcs2+p0D0+k)A~V9)2Ouc`uFa*@;w4evVYS*Rd$n1uTGu7=8CDTu5lLFKmEMN_>C{t&F}+=~cKuEk^CUI* z408HV`*JL$2+_fEXb@$X)mJ*QbRQz_bh}?E5SD0koc92_v)h=|vATCq2}%9B zz|d~4{@t9vpDzd7%+lyr`QR5bL@^gm25GzZ->xT0RW~0V<-vqI3*T?0*kz{VrflXs zu~145&4`zJAA}H0Wnj5=Ls7VEOd=m=QRUkmg3GoGQApFz^|hUT zm%}@oD)_}BAiqZE#KwrVOjL^J5O*(^A0nLlm|6)vTnka;CXMdt@zF%g>mq%eoRsX= zb`K^5FpexQ(?Nr=!%wj1znA}2k6`s#;+q8_`Abcttqx8)ReEc`gxovFvT|f9lOdaV zXJ1cx694E-*HG>L7vqzx=cjw=>qnWcV$ga=r$X6^V6s-Xx*;X=bN2oPA2*~Gb%3Vc zT6jZ^-WscHD;+ofqKY&fx9Ny?v)p7W;v%0xS+Gv4l)j%!a@!-je5S|521R&6Fe8C@ zNS8R{%n+sJo=Z2o%9<b6FlKQ{&i;m>a&^N+0MVPVbxygwIydEc}O(GtVWE}R9X zO@XL?AYIUJ{6xXMY#d2!d7)H=5}q`}==68)h-MeG*+@ig{ql#*{szgEWjJXYz^D2F z8@;IIDh7WLqP%G-`sX*hjc*Hm@Hl_itqWe++=wkvgolBOm#1iVS^Zcc=^az8P5QrY z=+ntXxEovCHxcipAQ52T*Qr!wxyz!}or6dv{O}8zCgOw?lAK-ztfJkeQ0BYLdNjHP zJ5#61FzT&`rx(Jkti=TlE#!@4OXwjH4)XT^U-iuPc3bL30MH*!QCLavU~=bzpi!s5 z0&FMA&T6y_9ua3p{nCvyhwJb@r41`PEEqjO2~UecGPE}`v}3p)}Iqh|X4714I7D;VZQ>rVoeu0`8RU{R>{*Q)vdu262bg zy}$a7E_62K%S1{gJ2GLj>RgeheT;bH&F&9+M*1F>si7*TAmY9{A3G(^(^bxjG=u!vE51CN*Lksk-^>{QGO#H?k0HL(5JB;vt0mYWhK0i zC$ab3Y)dgT(4waR>(fgfftQx@quTo0qSx`s@*~kOM=Q1 ziiFCz1lK&7D%;%&2y*RcWF;Rs^5X^7&#&~*iemVTc@P+;{-qCWpxP$P(t2)NUHHrB zuWgd3^WENoYl7y50hWw0>}1jQM~9THlt;Z}Bmpy?1IU6Aicf0LsCaJgQ}(Vgvi6Nc z4E(#pelJB|Jg5j*XYUZ#n!OLvh<`p^IBSn zNWW+Kac6g+RE&5l7~$*Pag|Kz%s!x958P_;8(o`oSt|97;=`Z#MSUrrOMK6aMtXiR zSwa3t#N8FN5@ADo9F;EpHtg*WS8zB`{BNpM$&66zCgxxl)U4;I& z(%<^~;~vrQC%H0qDmc*<%VxRtfHKWLJ0YsE)ArS9m@sy=i83_LeTE8x~f`*4~bL zXnWPTHMrC0yik5|WNYvMB%S%LfRzxJJE5`qXZlS2+>iDNV#_N8p40@Dd#dBnKE zX=LQVqa*yR+lbNF-uf%fqq%zZBN6eVHZ(<07%kfNbwQ{fViPw7RR4$A}?ntH*!~HNM+? zhyh{eRaalEB@X=4HMknppb3fgEsm$%E1I%s6!mRZemk6`l;Eioe_Nj(qAPVaL{CZJ zpns3Uo{n1o&EPfFLk(DIFZF3h;-;(Kcg-Z+uhy(%o=iubPpkZkc3e4nJEp45U}%o0 z@9fodyO7;m6wbKYWOMbs>9`*l0nLneJ2`i2M{Ik0Phcx^OxQa9xa?N)=Fd0VtORGH zeq;v2ucGUZ4P_|z5mdEylf!@e!u&(Gso;(yxHyKCQ@2RzXwhtpO_PtuDH7gt)q7=E z#`P53{Ya;u5Bhf$#uRB$?;xoG1C%i>xNHzx-0m_#Iv)CRQWjE#-tU#DYJ^Ugph%D2 zV`|x!>Gp}kYFw7?-dWZMMCJK&u&MHK`ymdsgXUCFFvL*@d$2w5x>}z z^QM1^!qya?!ob4qHY})UPbj?Ee2Dss^KOk-B1}WMy>m9Lh zj_M)^%_6uKUoxV?l8)VObP4(I=q%7{jSrPkLvr2DkL(pb?S}O>q?c2!vYLF2uVMS`EN}MXiNUf$}teH}oe5G&JEM*YRvR}~xk+&Wb6mIoI<62*G{sc{1qipEK zB&E7b$KF+*zG7`J+)%qxD%LFus#BDbQw`hhIjeVXr_@h6!LEJLC;W=7IOd#o@qg{P z(D&&=X@vb@Q}eB{lok$FDMZ|txymbN+rJBgrt#|kxDa0mr zf0UuUf{3j@X3&zssoLIdQ};*j;$cbHcA|U^2_psJ>!V zc}9bt8~;IX>?Z{pG#b(MeJk8ZTHeZGKWa?StBocgg4O7H8&ir#x?e@r?a7mfWLK7= ztXS9cttv}n|QYa+D_h1C9GY*VlL3J<3r;S)ocK zoTV}MB|J{PuPa-mfNP@KsDF6Mfgp&tCG32!^_!J=IU`J)0P}lMMv?B+>XrM!{;sG8 zWPyQj$I+f=3+)qfO8{jT<7DF|;KL;|>{%<<*dnyBR9{U8@BSjn^~VX8RjL17Vk>wl zUV#H7i(`r)k=o>QjvB!lu*5VoK#tzcLdyav==UE%g9W)oFk1BD%4ebR^?AN-C(u<2 zJ$Ze#1ue)mQMwZ7U=TaK>@kFoU((n@x;YNeqYlg%VpZN*q19AFMhnNSDCc`=rYlW; zS3`93D$T)3Xp>7+s|d>9HOH_YT04ouvzw8XeuN;(`IN+VHFYbL-(_K+PlX5OIMYsc zL4)=L6pcvyTo9!tY4$1a@>=6Eu7vS$r(!=W;I*8&?Udw>bn-P-pKA+up|cNklNjFy zO>GqmQahxJS0ncx$%71MODsk$=Hgg4vE}cXq-PJU#&ipV;rYX4M4YKN9NJqP$su17 z&W2Z8{&LZtMT}qj^rqsNy+z{Er=M{@%&9~2=n=1`wH-|h9H%{IeypU$+)X0ZM&;E0 za~vRPi%;BQUbjt6X)-sFEbud(Xtj5;-bkMf8N>(p{z87QX>xCQcJNDwT^->rW6Xyr zw_r`6j-0nI-NAmiNX*jte))6_eG=s-v&Vzj>fOa#C)r(lfS|hxp8PT#(WMc<+z#aY z{GBnD3p1rbrkRI?!Ck0f3;lZH5<$4A}i9=4(#o}O4_eyL*1ltTL=i?JIuSOUC ztnS6N+{OxvSjZj$y~psQv(eWg8lutDrisse-DR|)!NtIMy0O!Fqwv0yInR#rS@j#D zvm|?8#Y-o9wVtSVTt343;Xy=i)?Mf|6th(8&}l&*L)yuW2>%y zh@=S9uG(&4e|{{1D1WRXy^$gIH6u&}QGb9zq}9VD#A(S`l*fyLY&MD#+4R^`22$An z*6|9Kz9^O`7e z&~-?YL8Vdx-F-vN5gHyBczA&I$8xO8@C6I_C ztJaDtf8BNW5hOo|PiNr&vxD6ZqzboxCN7K6aasfh>n``Tq>)vFT^Vj9vXPMEI?wnHGzts`?>wnW>FGLRGVh1imhdycw%G#zt-&3q#qT( zyHuS1RRpg3*LNW-nl>8J=z5OFH(~K+&g`*q&Kz?Yw$R{RFEDMB2JtF!;SckUy2_ z&^bl_7e*Cw5r0oYJ)$9NFm-dVZ_m!GgPuRXXV(?5_muFkfZZm)Rv3kD+H23Pd5cSu zGRwdyku4(jXPkGto4Ogx;W|UR?D1xm&*nIlrSu5Iut)hi{4>)Njel=uJ9>dp7xNWm ziXSYro0oJu%)W0}lyF)gLdb#>7jNIPlF$IK=sD;!`aib*ND$p5V`U*aiL;Z=H|fPb ztgnGJRC9>vti1-u?!QGOu{1{f$gtbL(qD7c(9xmM?Mh%uM$pNV^a;lh<)q%c)40KR zZ$p}1{dQM8^QZ<4zDV__jgSGpd2l&JqYbgx=3itfx&&rw>&4^MWCaT}kPQYQ>v4cS#9iB8T`b=2WSmVZ}Xd5~F;jL*z}<83(%N zgb#Ks9yx4QA_4J-XV)SuJyQUs133}E=<9&TCH`t`Y0!C3F|SDL)C=82l;%yj+sb!a z7k7Vo5*wI!l9o*J@XS|pZUqG0Zf$)ks=asb%=2!s|C+ud4$^ozFpIszsFpnLVPQ}= z7!JH-Tb1b!*bL%x3&_0qvR`8(JbLxbt*gyQiu_Y`s7oCx4Wv}?da5itu4ZGQ*$&-| zlP)%Ru#V*oCDq7QrAr%+XMJoyf0^320WbCPPeZk-xOS}sgf=7+>$Y0WHr;l<3b7OF z652z~T<$ZDHM^vN#hC@sg+G>^7dAv5N44e;bMRk|h6w8tkH_n+9sGsUayNMyS2?xp zLISVYU|ZJWUU0_FG4F`2h7{5UxQk!;>p%`C+5Nr4YNtQleG}Bl!XfcsDJkXomJSCG zA$6!1r2>1LToZi~7E3CyTTT;3m8k&HT)y@N3T9N`Vf!+jxwh$lg*54G_>mpNjTt^~{%8 z(6t6PZRfNeZvFUfeEU>xuXWZ1HuqpDZL_p$%Auv^O?XCRtHJ8m4G#9Znio#^Fz zhs|-EQ~QEXvkFSDLj1#bj#w~oPkFr3VJvcQWyRdA6n%{-`I6JTyx=}LO z)D{e_$v5--tO`ue7-YtL2dhYt*0bnqt5G+*0ttTKVc{D`{x0b8k1=`S5M3lu)fxJ# zM0O7PB^WVwleGnVAs}xm5}6y|8KF^)ef{*L(gWdBEH#mBi>lk}w_x0!2~o7PK40z= z1iQxM%L47{@5o~C9-)V@?!YpeL_F9+F%jJI? zgz}Uscbz|YZ6jVx1cv%;xK&b+5C-XKqsZ2CO2w(((o<)Fb0t#O+Bui1Sz3>qeXs`GtoeEy4 zKEri*15wV8C+kXp*Xpmeu)(3I)`QKi1(<-6j@xkc7qNAtkhg~=8~3RYb1}UNOWNZ> z)rvSdsg(LWo7>E)UgAB*#YK8@RRY7Wo+R0MvBCtq*o0RKAK_egr;g&RpN6JE z=Y_e76L^=KNIx$6{49e;8A*kkzg=RqdvW91t(cCDq*gHF{sh3s$8owWoZMaItU&%% z>{h?iq|@Jv`L+dUp``qQaEsjQ=%=EJr=`iu*2*FsLvs|lia1%{&clOS7z*}8d%RQo zheC^|m)sIho@NV;Mo>!SlHZ17%TjV>))r+yD433S0GnO;F%h0OEYSUx{@mXAQUm~<-qO@3hP^{!G!T>fv1)TseWjcuA>*S!iv&y)&#a@P;tC{K7} zZCQzPFNp{j;`$&{nOrC|rR}CIX5Bzs0uRpaBxp{01 z`3KSixmOGSO30(fUY*(Mm>RO8plq%XKbY$H3@5xz@hzo^b((gvnwT$VKT>vFgR!+ z7RtNZO{*|TwcUtRkTB#A(`vWq+U7a5{qy~Cs-&_=1j9SBa!cFWb28t!Fe;R60c8$v z;p_`g=LfX+vfXBn?f3_F($yCF?z*0VJAa*kp~Qfp%0+DBtP~UT1OY-`Wq-%)*BlXw z2T$go%t%~ivu9c9I}AFBV;`JXSGKG@Fs~&M`5Q7c+{G+-W+YidE1LYuKCC1DRuyo}>8supiaOAl@^@22Qxu{&jCd?hfd9Lh&}3@F)n zpxkSlQE~Im4c91ZBl@CRK~)&UMHv!?yGS~eA%MXX1zO!M_fJ7|*`h+&U{Kn7JLU`Z z$|2uuF*8?9sAk>wTg*nq5$fRqD-kC^zX1BNS!sIe+gFch3{yVA5U4NiaCsQnP2nJQkGUOCHYGYi7WPFnJ zuWr_k0u<9+nvtd4jEoE1byrq}i-|8cUSo-?xA@hl5=!<)l>HgC;8X=GbF|)pZ>{>6 zR6OV}HS?g0EkxTP+SS%HXvy|S`fB85jh3X{6Ymn-J zYZuNun^l}#!<7F{ZDNDkZp_H|IUeFwm9Y#4foM=2$o)LdD2YdDve#k148M;?Mcfro zb_r4$%jMvq1o`L&(;hyU^#~%U8!)?5AZnJ1Hz!~#YZsp8d`~Q2&8np6TkyB|K1SN6 z8wFf=*#_-UMA82uT6WUS;~7C5Cpw4oB$?Oq2zF!1L1PAs+0~3*w}{6y*_Ehjk+!vwxRHK- zc7q=CH8KYOj8*5t!|Zp1b=x-)xlRXj5BT@~Y?WVs3X_Q;JxM;>*>Zptkyv-eLLP{CwKj z!eOuKhFjj4KljbpVdcL70(kVCD?9HDt?+t6YB>Xz*8H##^qRVOTY2sQkrWFc_W=&$ zX}uwtXw+xAtm&l@4W?xyDLQHKZx%nMHB(tA8mRssQ{NpaiJ^Zq2$P8*$UTUxVQ1pV2Qfl6Yee1m&3jX9ExPy?Y%0E5i}vflwW&I z*fTm-$fR>6?z0~iMK72$d{1nwtS=#GuFPyaq2pgd&&|9w{RIieWBz`VrrB1zqFyw} z&i&_JU7-DgA_)QW=U3}KPRUepzlf60s{}pshnJzqi)6jT$tAwcmT37|FH5HgE6>S~ z_TUuh_)|Q+{4{hXQz)Fj`+M+e`r1dae0?!O3cZvr@-YAOaX-wSj6QGUb3?mqPF~Ql z)ZsP2>?NKo+$3BuH0Z+H-J+%5r{yZf1bCwHy3NKJ<78IDldl;w5%@mz&YiW%IaP>j z9yXP@DOhQmj*U2wyYu7%=25&{t>(;u^}*W-xAeVy&Cw`2f~`_qL(2+7dtpUgE3J$U zwO7+SbC9tO68i{r#}bst?xy8iDem5d{w_#y+3SWWElUp*zs|O2hcK9b7?I9q#9i&I zpe$bOk(K)a&B3S<9T2T^2}l;%I2_t9cdm} z?(>7XaVV9i=tg`^{clT8(VsNUN{)i7>s#wAG}-*Ka<3}xH*UIZt%26=_~sANXIYre zgt^MPki+_*PZzwJl&8|rR zsgW9DLwuVg!yNxvXPDCoQyQON0r6FC3!@^04#TG|b6Zt6C66l1NL`+Msy!nTk?RFz z%;&mwe#uYGcwaGV{k19Wv`G>Q)Mp z7qs$g6)cnBFWRW+wuPeNkL2GoaZT-p(`;GPcSfB}9AR{4*J6(!SUx&hKAZ5FWxa%H z9#ILX9;dO(7IQ>9fox6u`L#4gjRRIk@6eEfN6rqD0KZZmeci@106;53CyW-)g#|SbtXxv8XR}c&f2Z(VMV`+O+2>q4a7291*@4 zfmJWt5GA3w&=GQRu!^?tuVf9obn;4Nd~>>LA4cQPR0B=TUfk|_;|~l83TC}LDO|+d z$#9^eg3Nut%+8)%p7A-fE!o6;$sx@+Dt3qjgb}#>LlkiL+r@=)15($X04v9IM$pw# zjSfHw<~;9-BSW`qg9}kaIB?Z1O#F!AX={^Nq|bYg$+CyUqI{3F=<9rv#V(5Uezr`> zX&#|sHClL(NF%@ayPArL!KRWY+e0_hv#f#T_!3Rr<8~!t={wgYnYHqNeP~}wAFj@K zG?q+fUa|KVexKIC;dW#cu;@1xP73EDsg!fF4 zqPltn@WIIA5%riwS4Pb;=eL^=nyb2C&v2KnLdaLX-w`*T2w}F(6D%&Az0p7k&Zaf7 zb3@$IMd{bT0} z-)?J+;Q0Mh*E@X62f^?|?ez9dnWeqWxBdKqSs*~B;tXs%2%6(s-g5)Qz?zr6q;Fi%m*ypU7U_J%BMaplb=2yKPW6XiC;2Au6v zT&|X8(=>lp3Zu zN6L44kTva$_X@`MdY;~`63gEEoY7`vQsg_CH^pRPcWf+H@d3zwCa}V%Wf!bC1i0=n zpcf8Qu#A~H*PSFY1D0lGLEnWOzMiB9)>A)=noP-$Q?*0nigozee9gGAPch(JIi4*m zNOITVREbjd$bvqSOyo1NC4FZ@dG^vORxHupTK0ADRxy!8b6!VnHa*OR!kasd5@~9| z84ooK&jvcKvNV(Uz2ThM6KtE^yI_~IEV&l%uIt+A&2haog0{ToZ~NZ36Y=nmk3~4e zF0%(qC1RRtZ6n_;2xGSY_?|9uq51PCfHo@tZHDjybZ$hX!6$#=f(l>v*0~Vb*mZ_H zqiV^@2#oAlx++o3m7MiYFS(yn`|GB6Lr5i1gpio_d$uLSe(p|tlU^=ot7~Y$w|MX3 zARe8UoO^$jPZD08xMZ^VyNQGfKL!r{;w!I*#=D9zzer^5PQG|k|H;MU$I=45hVnD` z{*>fOPpvOwBe!O{HjsfAx(~oO4hk`J32`%72veTsKSw=KT*qUO4S8?Lf;NKxe6nA@ zm!^$eWci1Ct*=3%nT&>D>oORPX(2Ng-kN3HPr1Z|-e5_v=d|FK+&jFIQ)<7`MdW|s z=x?9g9ZL1=IZ1Ati=KI)eP@{v<_(q=z8`p;z}{8rSQ;q@_3A07o=JpgCAL_`ar<Z2j$Iunms5dxIawG* zt{(gIQ~M*QD{b@ODw9k%JiPswK=?tKSGnpTl#vCb?%q${nGH~-iHHyM_Kl4FeYmc= zMctaHGfevD=M4uLDv`*aW+4+(jUz`i%}xjAQMdUh?@M-@%&BYhit`4Z-z{O_JX3YU zWwxfD!~R~$aSi?=Q`V9>qz3ic2OmCcwscVU50MpJ@?F;Vkt@#{tazSMK#MCsd9R|0 zw-A8PG40pyN21s%P)uO+PI1Tu%-3bIFA*Pc4I6!;IN?xSd2r71SIP7xhsYME=O2vV z+<9UZ;{d7fhS2AQ*-z69X>55waGeHU@ubk|Mji6&L)~|hBcEVdYLY-iBdlOMu$*gXc%w$#s!1S(Dy_wwfwJo1XH zeu$ZZ$fniSEd!)?w9RvyEQrgUE7M|0l4Cp#lvdya0>iZR*T78&CwNP)SMY$QQCwx> znP9gi@X0QRVTl$Ds>h9Z{`hUP~_jfKb>ZObGtyy z&38sy-hH}#<-mXZFtzClGrr9oUI3MLxfy7BhK=#$#1X zvTU{yI}E|yT6{2CLmT>~EQqErQlPd78Lfc2j*XCv=4_o5TNYTQZY@zB_ z;_U7jsD5UWO<4MzHt(P8;C$zeW3B!UIrqLpsUZ7Xl%$*vJ9}y~UDGZVyCnSry*~l7 zDeFw_NOwO$i?}AqRx;FQb*<+%qNMtsAz*C#7G8u?9NmL}uZ?_V*-?mGxi#AV{ledi zIqvUKf0U}DUlAy^`q;voJ;4yzqa`T00$EG=AT)K>J}^mV*hoj4$O{#EIF=yi)M_xK z9Gdck!QV1PAv9$j3%QqgVuHP`15cQq5XjyetqgP?XVWyNI5+FRG42;sQRq*|Ndtvn zwSOvR45Y$oYT}v-%|{CD51lk=Jj8Z3S}SlpN4eHwDt2%YC*FH})T3G{Nqy^HXvM9< zD{LcH6`ZD^jv^AF=kwMK?(Le>I$8K8XTwkF^jMs4gc$i~XL4(H=X0({moix@O?=Qw zADg%hST47DbZkw+_n39>nuz_u_2T87#&k@`S+$}N+Mf3Aq2Hp(RPkH7hh5^+!w5|X zQGZld|HVYP7cUNB0P#GlV3&16}I7x}@~nS2|2y zkf%9}?Om$gDD%}+6a`2`mmEwtflV1n38xb&QGkds6Eea5RdP?)8LDoPO_j-?+T}!` zS~xaMihj^=_X@YrRkd7t3xW%WLa_EGrnpcu%{SXk094V^^a=7hEoV>2ao=3g*u|@N zD?m)0md_TEAkBRu;rwb}mS7B>it4c;#$NR|xcCWAa(p_J3WLeTH>FU&?XSx=zx>Vt9?)uptAME?ClU zi1-yw1?+Zq9@&9iS%g)Kwuw1~bX#qY^X!ou@8n< zedgoPvzkeVuY{#WSA7ESxgnNb*9phY7A3slbWV4UTJ>n9v+>LEXetBUhx>O!a&PAc zwg$2)OELQg9XG&amHgQ++dSBN&FGG1JZ!tMy5 zq*J`O`qG@kd`A6ww9LFCtji*Q!rg~r@g0^OQl!R*m->8~^C#8eD6sxM7xuN~ zO9zE*CM~$JdPH-|>2GEDC)PKxn_}QXj-l3Ft8{M3_=kBB#{6MPopzNowJ)#E)<>Ne zUAH9|gF-Gv*?^8B(11Osn$8)kUqJaBmhOy|IZf#EXJgM%~u{f97n7sl8CI8 zd!wLKV>;m8jWDA~?9r01Rwn_rI64jfm$HO%ZqaE?`^Ne?&Bj zIL}^J;{4)F_5AA7&%-FK-GyUSLRl4Qk&F!3a}uUTQ6+Oo>s`p=oRA8sX^82RKWH4~ zqS|3Gx?XV=k>9SGvv?hT5sJL|QxB{n%li>+oaF{I{|n+8hPQZo6lS8gm>zVdBscyX zf++B9etD`f6VnCWqVK91!@tK~OQhVPe(Rh}0>B5jEy*c_W3^IHE#{@a-Ov*f6zy@k zY_>9yCMWjm_H;jeX=tN_b;Drb|UucXdZ=^OpLmG5Y3rl6l?5q3FiFs(kLysu!aSHtt-gA-Ao5kT$V_m!hQoWO7`4S5QrB#6seWBz`PS z-PDHyXBPS)u_9bia&YKw^d5wGx2#ZU6rST@cP6XwnKy+GfD-XCyDT^0=p(&n z*bWrr1TvWp&tQ-szNaOVe0Du#iE!;B2;p@^zpUQrAkxj+XQSJ16AAgxz zwyN)=DfaGMWrJH#Ls2%Ao))8SYwc5D6g@@P zKMMuaqg8U!%OH?bUJW#K-#Y18aaLC7kU9Ffei&yZa;5mWEOy)3yC;+-T*eZV+k0OC za#rA)yKIGDr(ePM=D&c!UpnvYoIq7qhU3f`>5*?ik`Yb(?}Oj%#A1&L7T-TwLbc*2 zk~O`8N(3ke!lK>}h}TL=dE29nd|+CQXz)SdJZNf)`!eab z_5ETn9N?4ZD~U->5Os(pGVA)L*SN*~;TxRlGhgk+&C_UO{UEmd2)h%15M7c|@GFUAul@0!B4)?-J0V1d>^rQ=588Ux@nIr$Fjj>%7$jXA^k?KXVag0Ltil znKFLN>U2^BJMT!kR6KG{7SOzHd`>eScqYD5A0VzK&|zgs2{Gmb%$FpWPf9|qU+A(c zE&{FphfJpW97v;pT8Un;8Lsp0XTdI+JF-!)XZ|#r!>CPsVzBYk9~J)*Hx$}NtP7J( zM-60jQpQw9oa=w36H{1Sc;cw4y<13JCKtEPfPNa^l2h%mw!vAVPRNFC`{e7ZAbv%J z1*_uZŤJ!sv+1`|2?;TL)H6E1IdC#-@Zxa>J`U&&Vdz$&3ErrIBwaYO^5V0gDF z?r_pKZ>&D8OFZ0EC!HD6=w(AFtM9n#<2vPP+;dUnyg9XtWBru3oGOVR#HN$d)hMp5 zaQXPLfpMD|_UsWCGQ4+UrVez!X=l{=sOMiF*_CNr3!NemUm@{R9Id7A3ovv&wq7HE zztS0&+Q_o6tqx?i@tYNGgXATozJva}7Bl}iTiPBFlhg^2fUB@8!Sq-D4|nrO&Ib&c zmMQs!vY7*Jb&_{QJ`Nqh2q3|KEh2hXeNsWir}t5-Ip{R(7!m8(8yg(I;KzN_$9Kh% zs2QoAY`v6q=cxn4#(lgs=cD8FdGvx`9QpM3Q=|%L%rqa0U$bgUks}MimX+Q3T56SnO$damP1E_mGAEdPFh)m ze5VFXD5hY$t04|rPmqX`<%A4z`E3FeQu&!PSJ=pjpLm_kNpF? z(wY<|9sWGXBqor?V!&Jb81B~9zQNR1d&PZ34HoA_^{xq^>UJORm`4xIk@V5nMY;&cy{;?vEH*@}mXL|aI*J>c}v)W1HQ zY~I+0Niq3Drnf&Nzj64^PI2wAl{!S_DDey%SZ_0bU00_y?A-gE`niVy%;pOSor?)j zh`sDMVX;J*8O_D{eQ)Av*jbi9V=owXdSBP?=ls*npQrHrM-QE$#iE~eVHeByQ+doB zH-_eRNv@LjP61+_Gw$r;IWpP-64n2D2^z13gl7=Qn6^Wj|9+H>W3boWM;5wBi?2un z@s6q_MFP~cy3JjzaHZqrE+doBR)EZ=B)-+}UGGzSm6#A*HpKjBG{a7Nw&Cbk-kBa% z=z-D34p>J2ZnU;`<>z&Syj%H%(hvO96E(ieZ|8rrv&Zr90npE14#5Z`WZ>5SeHQf0 zBZmr_n(dH&W}`xu?MH9pcYhuRrT?q6Ck|0ZrN^r@_b~usu{vauZ0Cseg}9*`O`<;? zU=&(C2KK9J*NntSe3J_|1wT8&*Q7Ms(_!7t!EQ3wKPz-&Oe_oJ{!mXp0Ha=NP?t;X z(4zL+p*_*%?4OkvoTUx#R(H1oZ;l3g_a2%8ZrUQ4UuaT7{B!E-3bkrJQBG)mcV# z^T!SjF`p5oCF}?wzD<8)@7&8i0XtGn-bc=)3V6$FS0wI<8xZM>pts;82l~XrA`Gps zZaQ3xAqQ;(l+H|twrj*w_Xlo5f|FMH6$Ftvz5qsx;$d`a-(haCo&7dQJmuY0`tH)E zoP^GLe{R=nBo%h%)c9&C^9oOF@16niFeD<;%i&vJCJ`AIEZUTV;B5sLcRan?I9$DV z9|(C~1R1H@shyUC=s>}8L8Td?_1dF`ijntvnr**H^Q~#cz?Nnxw<{%5ezc9iuR(Oq z7rIa~U?eR`-eM}`odMcnU*fGjqKKXRJfxE0sZxKR^LS97ksf`~k@vX!7u{WWIB3`9 zk9}x5co;z80E(JcqLi22u4}cAA)gLqBnas`$&0)Ui$8&)oL7_fihRZW!EJE&cJmwB zH7y5TDxH{6>A40gB%B62xyKCNyAlT%6@0`U14KODxF4r^Q11sJf=nJhZ*i8SRtETo zC@MfDJnVq$r?`J%x$9xTq@!Y4CsOUAMK6bP;PWQlwuheze0v&`HEU-qa5Zv}yqFLr zm@c>a*$^5I?60OBQJ=BN$`@1_V=y>Xkv^Wa$r%P@sOM99*-X^%$&IAHZ!>)?;9xbf zp&XFPq5bzr=-(XNm}J@#sLpd26gn1namamoPP{JL<`e-7sC0W`aNQh%zGKR89fwO+tC=qsWQf*0frSds7q}VA- zZCOixk!8sJA||_nrBKi|Y*MVozFwzEysN1ISN7OPApvS+Ed7!t41%2WRik=&3@Xhy zY#JAO`(45VT$D9}IhNs=B}sLl)cvZD@a>`4Wy6CI-8%9ZmqxKQB~hN%AD;Wa?P5~Q zAs0Supqu8=73KZRQppL&>f2m|{DFfZuh(kmZh-d8PHRNLqDjM;t_mDr$-UL;!9(gJ z#_g5D3RUO?pU zXpSc`YxgT?nm1O{i9rLIVk^>OjBj^-LKh+j4!0@;sT^Xxr(8(*qzA|(%Nu-s^i%Yj zX;>`kuyJtUQ^a~sCn~mnqM_t;gSCnKR_C`1zsZ#u0_#1%sB)p{=pk@(RD~|{ujP>F zHjtRzx2=QVv&-ZN=Qck=NEQbUK&|eI0*_sG6rylXxFIki+-wXc^FbZ!lRZTvXDnV$4n&6#@3FQ{+2Tcs%j# z1-=eKTDT4(H-5ft*okMT){_jnYFm=vEzqSzo1ghJY58z!RkBGcw zPD?2g2u4l4m+Wf1X8kEv2LoOkAKgdH_Nie%382ZqrKDhkgmJ2GToEH#5B%(7=tvCfhjxT1!ep@d4{C$%H9G(&loo>3kD^j0p{7E=l?Way)#Nd3$%Xx z)yTw1T67Q$@>{uIiP8@GM3)WMLKM%=d1(5fFBma|mRUCUqQO$hm%|q+pSLBPI7Vte z>M(5>UVTmdu;vW(V9FfdAC<~;43p+nI-eAKHy6jKiX*3b5GP)Pi3mQ5#2M=}qnW-W zN=}M(jtbDg2)1F0n{~o$v~j*)4e$FW6S%Qg*#9WtxRv!2S?eopL1MVi;u;?omD$^h~lXhk;+w?K(@-+7L`D8b08Zr4sBDu zu$L(%O+n>{r`0?428V;=YqtKq?OUYi>2JVR52S(Uu{J379`Bu-!p7T#A&08PF8=Q~Cm{o-+qv&$1}cQ}}= zS*Q~8AxS164jwjfYwlV%aSd8|Lz^s`#WOsaRH`%;Nhe!9xt?rx=!)Xlm2nN6nd!a7zz%=N29CLKBu;atb(A-wBY<*@}f=^fM_bkt!ua9wYHL^va)h&6Hj?EB`` zJL!Pz>ZOVq&<({TL^K7udC9D}RE2jK@A_L*J$U#XOv{&mjvw^Re%Z~jFz>6!+Dpz< zQ5~Qd828IZm;7w-p%QSIAl+=Iy%ej3IqRLA_|@hTwARvU+wq+$&kQyJJI_~feZQJ0 zY3c)^9P0?HQU0Y*gjcnTSJ0_^sb^6F`_A?2_ZgCntvxIaiTlEy5Zm>nnQBoK8d@8D zLv<&_73O=8!bG}egFv`is@mW^|IPn0I>UUJsn(LT)M&;yE_D2&6uBJe_YI_hfT(UK z))_hLJ`qB+@5jNyG3|`ZN5vD;D!-&_{_J^aieWy+G#ND36toy%2*-Q9tgEh~5+_?x z!xWECGtRJ9cfsu-J?tG>ABcxO4oysMNvy+v`Q|<|;UtMa3{2^%5%U^V0WSdsYD7Kh z4DtS`r4vD^(P71u)HaySURJ`Hls?nV7jn#d+t#R4!%${hvNVxy&X)Di8EreM3xM`Z z)3y5rl@(N#8dWq#nb>BxrxlKIO=GR05{n@9s&M;EVz{4RDG5mwXz;362-ez9J#(^tZ%LCvw;vKvu7?n|*bDRdQv8pqMa` z|Kx;6`uGJWaxb)a;4n0Vp#v!08Q=X%62A);bcB|u6P@9ypTialJVkS3hl^s3X}@ol z@k+Yl7-Zd3-XE@LfnHl6hK%HUousI6oYDHnnRwa0qZWeVu=D))8BSlxU!Li~R#0Uy zxU)iD$PpL6>upIg5p$p8r)R-epuXt%s00h?dG5Dl`upi^j%{CGvSSHzUp=d1CcFg% z{OodQ61IBxEkvtx*7I1n@0qvDuGJK;m;VsJmd@NzB%6@_`KjwYj|m0GM=HH%BUrKnFik;+8x9=z6#H7G`uc4Hz@wGCa)R#Y2rYR2S3K|YcM5;+t;{|ut zQZ*ZGJx}!K3db6zv({ogfBWl*+CCp()dLsmuMDknV35seQX*d)MC-pkz82$L#G=}r z+$n-fVRT1W5GY!P4$F*xyo$L^GeUf_fP-+4a3Ce~nNH+ZZP;Yi;6xnZL{`3s;%Ujp z8t1qlG314h6$MnrrHlXUJp@8)@}BxROPxyd>z^&iLe2ivwf(c>1r8nv3Mz@8I5hVZ zDOk7rzIkyysgL?@t`;)q44# zy>#DZ-g(CdU1th6C#&-Zm4Mb%-8!7isJQ<96%z8QRJC1u!$yQDeQv22{VxmR{5O>Z zI;>t|zVHR^Zj!c#KHa8)L9%!k+^4bmyAVPK9GKsaYIGiaLSA_HkS3F&MM>)Oz~0U? z4bcGzr!`Tw`AJaUALgs#!vb1C=No_For%0ZVdHi&~p_A z=qE&E_e&b_@a(GTv=3|;#?rno8A@;DmUUjt(AgG3M2S9#m;M=utN0>@S4CNkUc-L6 z>?{zbrF&2xzBMH=$IAsiZ<$%G;}a{(kd+WT$D!~RStyAZuApMaDT-$78r4F0U8pHW zoW?pRVRa=+TEAOg{2ajZjDjYo~OeT7kd;EJ@|l5N-a414XttLTB{WX z+XvjHuveq5$hfoWIeXQI7&e6?{l@zTMD` z^*6W~kp$c#@8UEl3D2TC+ZASTr!0K!4$u71`;h{Ih)Nrjsg;|;9`<{mQo4YWCcpolznfbY=M_E;om!w#2tw-qibT`{Kd4HDBlpM-Cn{$0@0&cB<#%;z!)u@O>)`>lDuF5@M z`h<}^l%!gp?H1@|{fiG=F0ZesP!D}V+e%iD8xcx*gjR?3CDaFjQ{dy|05$XYdd8?q z6=(Qw>c~YH?awHJlMEDj9IwCB7jCAha|c4Z#iU1SAqI8ybT{oodDr7WG>K{XxyMda zrkv+tzRJCBdDaOG;LdFdJ$0(y@HvzkNW;9qF!~0#Pg1LKi$PP{{)sA<0Z;y`m<}J~ zCmt;uJLvaQv3bfagu7N&jA;`Ol?+YPe&s$k_;B{u<8qUVADnOmO}`VkAA4k4O1R8* zAlLT7xQ6e(pERN+ft-4{;?NYJnOiuENnFO12Op&iyhZ5-#dT-i`}=v03W6@vgxscn z6cc7^X9lHJeJWXcdqg@R4c=7DztP8LXkG= ztmDo!)b2X1Sz8NHtv|7})3xsnQwh7$P?j4WTH)(**fozfTd@{Czc$pfOyBWKt<`H z6-@(#BtYny8E%4ZAcB9K)^>MZv&O%3OG%CJbTJ2WkRy z-Nv=%|1#tdL;G*=2D@SaLZ`6smwROu*!UL=W&z*kbR(ww(U{AI~7n1@=t?i!41Ah$-Z%Wu31{)^HGeh z=RoQYvirn_ct1KMb39v2Sg3x8pq_7E2DQRKyac8&QhVi65r_hvP`_%{n`z3Scc^_2 z=t6&OpOf=D*j;6y4>OCTf^(SYM3%o%Uw)EIVFzS_%rM>% z%b-*jm`zx0jBA;kM(R+1aR4vAdFcBo*Lp;SuQtq*A*-T>_KuRU&3vdQ&9BCv;Q=rR z#hx!`V+!lRNYWjRFJC8g=ikl-I0_@%a|m+;X1TD-?{u*>__2^st0-9E6QqI%QaX!P zJ+&7DD=Dg|hT1Ys(Pp8Ku}9lj#W)^uh+DdjUh2xi8qva9L}|$%`^Wzd+H%HYO9nx^ zuf|0G*0Ssy{GKcWuBFOApKa<*3rkM`An|RT-uAwv3Pz6hF9yKI3FKfY4mT!9p>;yM z#nZ@Fi4br30c5=?!M-V>-Z|!z2l-c~jIdu*bnmGH=hU27hD!;wQaFMc`* z8EF|Vuw99<8Zq^(dJ9kjGBm2cvAa^a0|rew&V;Bqg+6PzFa@H{0;^uezZ#P>QhJ_R z;#*v&cL5c^Dn&3%WF&lI(f9lmEZb-Q$803^mSu!NKU`k!J@s&zBl(MyxRIK-W?@23T)jLAd=$;VH?micn z&LrstE|a4bI7FS^?_kB#-!`alVC2_E&&gvUoxt1w(={&d?~`AKM#6qh>_c)llaepA8u3TyO+kKpOTiWN(7q&b*6ezgXO?5 zR%w}u3{;jx^o3&`j1+BSv(Yk*1|kfu{iuRXYWxD6ZX_F_A%(lLb4Yk1gd za@%G6YRv1u>Z2!eIaI}UUgibiK2XIfoN1hh0XXe8(D8(N^9oChNQ*k*V&|=p&}5kD z>*-uyY$)=sG6zh7FnnqgZX%>mm6o;pV{Lfyt+mz=S?AtqGmi3$znqtOl*{C{W5Aa}!qW3^t)!6(REAEl6zx@Lxm+4qXNqYso zT-!NqMH6eB3WeDFU1qtw6Wm%OCYM8kIW}K%uSJ7|Dq8pDh-hvJv7eeXbjO+o89d4} zBR}`C88Q46czGgqvx4`+&P|FDCOd>s#ClznJkH1dN70lQo0oD8G~LVfJ66{gE>#tt zA&W`YGdAAr3sqfiZ^23z55UH7Tvrbv>61kSG7bOeQC^xgR$|Cm4hh#iD?Z(syCwug zInnJ{LhEw!`6@#POl?-Eh%Vo0+-_zgXefiIW@3bjxH1AsJTGU=!k=7&hqz35JM>J- zLl{ZCyT4LLRrIdqc>BfS-gZf4XB7f{^FUzV2jn4)z`qOL_-EU)_H0a@w`>T0Tj7&&_r`u9ZTb`$Inq=s`LR38{1j%Rc7<-R$& zAfCbZGL!@KJ9e9`v@+j&zX}l1DBc6zC$U{@7H|C>c0$}3j%+0qyypp});M6(`0u3ex!rzYKID5~ChxoJpP699Awhjl!rPlPesKaGD8tQKo>H^?Fsg9y z+GLccgTTqr(U|*f@Vb#s%&P+-l61Pt25t52Nh)_1I6$%h%a#edS#YgV5=gsl(-PX| zd;2DOEqzh(+TDXes(1l2enej>DY3r55sU#zKXg<*WwGYr8fP25bZMsu8!AjEL*v&c zN90ILl~C{n6MR0ap+)Qb&K3+#pec^SC$MW|=&;>wS}-1qC$U!!)mT}W7WJoafha4#T7X-P zYW~1q{lTim@pz+>xS3G5UxjWFV=z`aSgXk^;8Y*}xNlzn&w0<~JeI~jzQvRbm;E^O zed_5~MUXSW{f4Ye3l6SA(N57JW%Y*%UO zC;8z?G=nKl#h6f_VTqT;p}(%~woWvRceucU^Fz6+bl)fm;!mJx83y#9(AkcXdSuw! zCoa@YDhX`Tw#FnENQ&wMfqpfO$%V z+QV0{v4q1gzpql~g6K`M2c~;iEPqcGe|3{VRTIfTT1?DnVP%0?eyUG&7DVIy*W~AHGZX`Lazx1P)D#Vx=$=kUNA%Pj3rD4_}N}l0)nC zI6H~4%|UjMQF2K(j3S7ED-zv$cP{mq<>sH-ko8K|QjQt7;KeC6>^Jx10k`mOB!aw% zE9y1&z!`WH@KQK%Q4K%S8S=v>aI`J8byOOJL=r^hpwVM0#X!3j2-<+-Ujyo%nPDJS zwn7SpJ3ES=Lxr}h+$Pp@N92dsTHr)fCv>T@;!r4(&^RgbO9*xGT&+n|FMtg|Wo|}e z=Nj12!QbSB)j!Q4FqbB(t^m9v(zFY$m&aOIqaRePHj9?@FF~7(VRM=g$GRyyCmaX1zl;gB`GpDi5b_Jw`D&~Wv@ucHn=>&nl6Y}P zy>2uI>I^|V*5kJbMsEFn!3z^9fB)f}!?!IQEg`=cwqOWuFPkMDd;?Of!}Cx?_jhK! zOVP_HuLS|=ib-x>%(PC%TO6XjcA07o#3q7ZR25~JkA+hu{ZR27SPL?paiKCCXB$!T{(sD+mQ zBQBrI-TeXcCY@Ma2@4W-YoRM#yE84HE}wD(VLH}$eyzxu1U~e=kOGR_=o(Vx%H+eo zfD~o%ywuC5C}f&+aANUH4e;ACpSSWw8v{rMmwa(KD%V4no!;5JW8D*$1pA|Y61!)P zh$<7JjE%uUNliO3lq0~Ca7T|T5ZF~4$U}ZUlQY2HXTJ5SATs#d`i2c^xOfgRh70Dt z;)j^-B*-N9U*s^+V6%1GqDR#XfRzWxxQpIZxOh#P>>;$9aHu)uuS5;lyyHd!Ez)n7+UU~y4szM0?$M;3xQ zvbtcPM|1IqF3Yt@DjpH?u}=~J_;-1nH>d{!37;xA>JN|Jb^>&Q#H`#1AvSvj0`yt_ zpftTh7eM+P|0*&czY-5E0o7c3En~YjA!WvjCTmq9`PDbZMV&Y6Cl4Pop{HCIRA;oH zYH_DEI?*hmGHAZoDx%g);ea|0JQOcaWG3+fn1C!Ol~A@PM6^p}^^K=O1t1oh43|a3 zy_g4rquJ|qX&qF7T+nFY5Vw2jlu}@{O6Edd{DkT^V~1GruTWQ#OyPDV?pu=fSKUzX zbu*pReMEXxOHQPc$*t_DKW@&AP2e<`tM6B&KkO4HYQ^zvOq_WF17S-y4=s1r zh?T!Sb~(XeP@q#+wzXYNUJ!~{j$?RLzrkOX47~Bg+e6Kw<{}@W=Wj z2aZ<&lGQ9~nIkqM!Q^YWa8&c&_p3%Pp(ZY2Pc(kqAe5yDqrx;0sVZG>d_44X3?#(L zX)ALLoh`siOn);)oY>)BVU!qqq@*&04io>8O^wlVn&@U&XC+?Qk0XNK;k{h0nwilA zl3bM}Uq#@|80ddmoyYe+@Ab)Q>?DQh(?crG`QbRz8c5&(a^Bmo zM>2Dw;ZhR7**EB9Fbw^ezqpfnXkOHU%Nzq<$Tjqa<^N1)CqSWUyeh(l>N;_P4k>u% zlAzo6-~RunpwfBTl-8%V_@aIwlK?FQOaOIDDOus_O+Gk#pZRYHBJ@YwJTtTZI4DDZ z2L;S4@BO`=4CJPQQ7Qmlpf9GY?{(C|eY}$-iv$oCWZ>T4Vn>q@_pYJYdi2giYWTiy zDZNEl+1Kgb?fVW~P4_oGO3D!ar_l4s`3QE2!!VMjF)lop{zmLhpbe@LjLL6WfJ-|oW(NTG>PJ@3` zujeeE#tA8FIK8INtRJv1hz+Uv=)Iq2G_8Y}g~cuMmyT0C#fU|(@itq|PgnMRY$)r+ z5lA+?eCNp)-W7u)7<=#&XkD(%1{#SH&Fs7TyG|KGv*LM@MMvV|?4DgQ5Vpf)4IQev zaL__6SItNajOi)e&ozbVri4x_+9Yopqb6j90c%W6L_p?Cq|JVi5S?A7`p>A9+xZ}% zqf$%Cm1J`m^cA1_V&V5t8swHQ%Au+jx24VuziK9M_5E+4hL}Sxs%`aPL*h8$9eCGf0jx?9&*|_+DbjV!V%~iH;@KBN9Um3lz;MMbPZqyS;sW2N48F1M98K8DEyQ* zL)dxIan6bjpdITsS!Sy**BgOg-#NFob#D`06WSIp6pwNvB zz-b090$a0A=HBrCL~Eo(l8@~>6U3p~|NhYSdIeUe&~mr2w!@72{~oDwgK7+9oP#s*`y)Pg3ZcnSJRV;)K9F#jdS;2zj(y*&Fwr^9_U zlo_J%SW(5l1Y2rHin*A!@~G1(1B@s?Mx!g?(vD#03QDh7A^n(HU=sBUQ)ftTmPm%&8nlZiYK5#fHa*XIHtsrW7PzQ}Z- zKL7brhOoiU(ORN8%MzS)XK@c42w0ksUn(dID*y|RSd6T2BvFnG3+8HAV#Z5XCs zpD(@{2;@1gNuB_6>e}KoEBVNvaOK|-iXQBoClm>*w&{z+Ngw_tDB&DL388>@Wpswm zeqw_T$pEt_g}ZEY@A%g}y*PnS(i)|7IGU)I3E!FVhlxI3T|*4z;GUQMIE=E~n)IK$ z>3a$Q0hK|47hm6#|9iG&^s_vPjeoqXUNJ9TVgLI3atH%1T0(o%(Z_e*KGxq~{k6XZEbm?Q`7LYrsu(Jr;hdEX+~YM+u>}8Klb!^=ICqno zV2dlAmKeg9;+C&?C?+?hl_yH6{et5|+eJmHeJB#Leli(Yah06mKvzP_StrQo>^-z4E8QTCwPgZs&{A9+JvvvmwzzQvJ{ zA`?vH`Q-E%2>qekHev z^dN9O08)R7!wn$)YMnuv+#>oPYNu=cdE*SFY?|V@=Rx>ia=fhabDDWPgeWAMN_HhE zaB0bKRVLY^&?g9}Mie~w9mv>a&>fyVPRPI;%QlNsr@pu^f8e6d!9OM%(F7ktT{=svm9_;>MLHjE!XU?g z()NV8$9QSo3YOFqaG7fCR}6D9B>ViU(oE!&zEodk^nnG*kT!(4^BM`+Xr>I&h5DYb z{(o|D-Od8)d*byDB&c56tHRJI0u+$G;P;gMlfX9^C4KECVNQa2^M)q8{8D|W)uR#S zGk(9Bf;FO9kd)i!8Lr79aF7Rf_a%lmS$Kol7>AyrQr-%a5(sVYnX`^N22JlW5;E~v zrTB{rMsEBQJbp3p>#n7u6I>ua;X$^3OqcPE05#u!M2QLVXdHysHrYmp-2)cX4xuJT zQJ9A{$35xa57n1Vt*G)BfeDKKzi<9<8ID2Hs5%>lod-IKe_r%jF-xxts>l*=er`MW z`~qj1(lgJUs5W6G%FO?Y9!NFeZ2a}#=Yo@GEL?$Sp3kxtvHa~X32`Lj9SC3avs|De zx$z@P+i(K{*~YK-pGwICHbVdaH^~gowMgc8D7S)B=9 zd6z*{0U9`fGU)z4vc5c?s;~QhUvp+6LlO-#g=DNql**KpAsMceS!9SZbW5h9s7Pj+ zXC(>Y8WLsBl$m^F$WV%q`mJ+rpXYgAzwiE|+{ zU3TD3qbWk63v$^tjsi!f)^*3*?UEn1tI<2^$motQsS~;KcUQeXsPjKn!V{c|IgG^^ zXZb72-B&ugid0OmZ!^)QzD(GX+%t5rMd6Eu*&|LqV&hk{;O_pB&Fv4==!ur0j-6l# zn5%`-rapqJt7K434odrw$)={=dbjq_^*e|pimO3p{odGnb)6|u^@LJ5e?EHW*C$_G zsa`z%DaG{t(>gf03p)!x)3yNlZc$;%o$49Jcoq4=6fRKGB^i^b=mNvdN=HgB^CK4J zKeS6GnLyp~q<-RqtL#e@iLt)gC<6E!&8@e6eq_h@N6U(y&){><&adlzLf2y^Juwbv zC-$|XPxybx3Oss!V<1V7m5T<4UU`@3pm1{CSgyIQG|e2$?&n2*JS{5qWPCOtTxH_ zCF=y4Cy5ua4@d-FN=S-!2nRsd4Q!R8LFzc53%U!MBw)#e*~nL8#9~h35%svuAA;Vy zn^p0k4DI#sfww;wf-Sl*nciW-Rx+o? z(e)b|=)L>4tEZ%}RIKrM#XbGq$x$yxrr5Ba9s@N<jIfix?NW`~rk|V{K0_rS`L*=i zPHCN;`h1gL6)Bj%Xm~;TFz8cheBsmG8{o`km`fd?t6%UnvBj7d4N?K|_H&2wpO!d` zQkx{#AIV}BZ9ik0)rq+n*R}Dg`|{svFNQ9Bql&UWIYsFm**LV4viN|$KUAuqHhmt< zgz9lFa7e`YSy!@(X$50$7W;0}B`v2NtCujE@@vn0X_-;@Ynjt#-p4RfC%%u|;Ue`v zQbU!>^mI!xwBS`KRZYs~9Z)a@c`sopv({z328~uj1PJ$Kz84Vbsj?iqek~Mq$Y$TxJD;`#Y?cHp54xU!#}|Nk?3y+d2Bp~0J_9=kv=AmE5!-1aabI& zwy!S3zyYuM!1b@cACAH51(FGOH+&K8tqQ0!|7i#&)|#P$upjc)&!_9GJ8CZsugbBXlgxYiI|#3abQ~7`XG$3V zic?Z^+(`im;SUmz3ftdJ+m6A>@o|xWv6txmV+CG$=f%B9GD=^8Xvyzo_0%A!e(ci! zOvZd>FhmQ|(Wjo3zj#Y>RCv>oAuIMP7kxyd4 z^oO*b#xg0u{5kE}ir>0$1g?ZR$w6- zN?)*`I?&~)1!1VeKi=`?bly3HIB3Z5oA0)e%V%gaHYd}#9w?}rH|!^0ySHaSTNTCd)Hm2BeMu|+!bfF6ZYmcCcQCF&c_9xpUpbu>aWuY=i;lrQCl&C;mt9yI7e5j5>o1S%c;X@sHRp^E4YfPOM|FnVea#e~EY? zWdgesDd%n4|NgOJ;dd1n;c3`VdPxuEV=6#O8p;BsUBsW7Msz84X8bG2B+uaXRw{J- z(ogmxHpk8vTs&HERsdWE2=Ojf#vRuk_>e5Wj>$Oym$TY=U?A{Ue26IvA03)%Qo9}_ zd0Cf*{{WlBB!E3WQ%Gr>y^k2&dP~N>McECj5h>xT5~Hd?b2T3J^z|6TfJ^r}e;pQb z^Nui(Yar2`%U_gPT%@kdpbbbrQ`>{z_`J{9N=oH6!ecMFAAkh7f)}$nPqO7>-Xfvi_^MzT}ok? zQjH#j?R4T)#c3NXewZ1!#=Jnh!KDO}_1AFb54#FZ`e44$%CF`ne$%y=^B6xP7yr!r z0>rtH?H%aUAe_>L(DnxJD{ z^Of}LSa(K_;I*PQ{E#N}f}7_l4Px?|r|Sq!8A8bU_1WW;3z8oL9h}|?41{D)vxMt0 z`R3nAwYB>33ume__HFa;fn(&=IC^o!kP$a=JCML$m@T8Hk6y2QH_bnZ;}gx=myG=% zq!}Sf-UUlhQN`O|T+Mw7!;!WMl}Xq*kBC3;st;>zed+Z#1z@t1#hcO<)Vv!7m9-ke z>>v?5W?ISWu8?ge{@H>|RB%6ZADos`TDy+el^cfxs)b1pxmLbNy_p2og!0Hex)dHI z^7B{+42QjN$c}$KD;clcl^3_aLV z1NO=c%@+0Jsw1P@zkl$cRP*Nr1~j#M?;|r3?cVgt%=LA=hYa6g$1B_X!TkwhLJ)e6 zzYEVu3BN#@$uV5qfY?{cpZ*BDm)>sDy>0(x zhzjMx0u@lV$VsUfDQ9b_1!aPGk2#iHhkA&d!B+Ou2J)1#rx*v9F>^reJ*{C_%}_Tj z?jwIqsUIWaj1Rurc=SN=;>idh`~vbW_2nn;DFdLf?ChuKrsPxW{z6;h(nl3wj$71{ zWnCHSDDXhamNJA`&qTTAFHR~$CXA{EjHJQ_htVkP|_C_P)QX|XK;fo?p7 zJ)Pt`o2t^}KR1GUTxl(XDpv5U>d{=oPaMs}&77n&Upo_E6kCX&MB25)0tlBV%>;L7 zgDvHC0(|=}v=3q%ns>rJvA0jKy;g&`=AjqXqk>5CL*P2q66{Pf(nO?lsuJ5-5+p?M zI*u@(dP=+T8~R|niIh^Z_3vmY$&^25?$k-je0V~IO)Ysi?hACNg9wWV zBr21KbgVy~8sJ?Y+r(s>CBY3=-TG)@n}LtPY5Z#?{5d}~x~mok9sD~)ie!L`9Vr(h zUW0)gQT&H^P0T#mKNf$4Ii>SH3lD4DOBUl3D=PxJ1V|~jY|5MG7`N3BYo+~N+JnA; zTbM8qnIfS`=N46n=RD2JUI0nUKB2u`B4z537ndQm@g(~Qj5}}fEM*`gQDYL z+3{OPl+z}Mg%J`+UYN6lHWzT;_N$d3J*w{_jvD~wrZB5#!*8Q&K;7o3HeSK3*1 z<%BZ^;!s@iWpLPImv|P00g@b7m&E=@WSPR5<5Y@wKw0A>9Uq>&Fdu<;oQ$yHQi{-F zo(s7CprC7~4|C#H8Xs8Te7R*8pUDmU7ZL#%UZhHl-@kXk|1q>T*VB@Z$U^&41m$s>k%<)a?|?F;25Qm4I|)_+Gz zO+`F0r+W;)7@oq#k&1F5_OIIXOPi3Rawoy?T5H}(KInV+S5IXAPqk0?_H5NXaq=Id z-7rJSv;CSyyOm#OiQ{!K;|W*%Pm%OUem7h^@B{U~ zgbVPNQQHu70LthI{>TTlTHH|m6IiKbWy@j2zlYp7-h2E5{?FuSlVqfNuRIqSyRAqVCdle)E7 zlMb7nQ6(zks2m$y6o2slpIEY)Qwvw9T`U+~vsSM{1zp`^n{Z|eE{q7m5ylh}i2tqD zbh!n~_#sq%&48Kn89j8iIb2Blxk;X4&w2a@A;Q1a`{pd<(i`ID4ZkEK{s#}Jh1l#g`^7fBUYs1Dg-nvpSf?kM@6JKX9_UWJMUe2`e$}0pkxN zMBYIa0(s4xd<-9H5$VH}BFcQ$%ZB1{5(w}UokK^C|6U0vWmo7^Tm^P-*nX=toIQz} z_9|4JZ#;$}f4pWjh+@2fKC3Lf@@Z}5x z*{rLC`VYCo;t})(FQi&KeJ!n4BhZZ9=bvnI!*s`((lOzGbq78!?MQ=U36#9Y4|@`> zV^?D~##nYJhXT226vhF*WW^?^X;l3*0nzywsw+JSx{BYYZ+OZ(f0h4 z!kcY5z;SpOJJ9d>x|i`k;ygLifAci5soQ*)qO+Dnh_486?De+Wh=OT!}`_TZ1EGi><&*aFoCNnDC@rZtKMhy3kP2 zxt#3+Ev2G}9e?f*2IMWL1K;x|x$NT=F-_i!_kkXiU?3;QqWjKmsTJL5s5?U>`9Zwg z5L^kjFyoz2J8-@PKo22Rq1VtyC_-#3+9!1%UIKWb@v^yTwq-W_2TtBlBMzzTAEQ3* zd1gmHbVvg=2SzCHw!&FANVLtpNSOn|K`7+o@y@~}KA7HTbd|KszwUasE21g27C?4- z#&l#iSdKLn5xUSQ)uPILVJ9cC>gl%MUS&xFmv0#wMsT8ty1O~GLLI{&QvE2qG+bZN zm+W{p>sJiDB4rQe9O5f;0duLFs-VgIs=|nFc?QPp6j^P#}^u$!X`7yH0RKh*HS%OR{%SSG3Q|yJyOL_!G#%!c6}VVZh@HX zk1-o?@-m``+l!2NMI_&N7h(FA3h0U0VD16Fa~jZFqWD@hLc%z@sOLI9z5>6d1~M7I zbc&LrPU1I?vI%kpGtXo0Cx*1XFmaIu#zV@+?op?J^Yo!(x#^IwKX+$&V8e)vIb)$< zJIBt)v|+eJNa}lg;#_UZ`0%_m@vQlPnW3R){>(+Zs-TL~8X*0hR7C8)NI)rvP3X-P zp#%Xb*f}K>N`LA05r_k@zK};$a_B?%R=nh(g3n=1iVWvql0JLEQ$0!>p!g&<+_-*E zpF2j{*%w9=lO*9z7cfL4k(&1s`L3Ly4;W;pR{HEyr;2qgyvI_Dz#fE1*Dg(9l#d99)3E^A@KBrG3QLTELSI`zs`--r}sZ`nlxDcqM zfUZuH)yI->cIY0Fxk{z6#rz;7@2s=$!N8B>bt4Ui(^N4@QOW@|mjj~o8-?LyNBkrY zxZl_A+3=Uz50-`@%yL#Bi-ysGfWh0Jp>7%O_~~Nk24ydR&njeND6-DCYk4Nk$V-iW zg^n`vwz|`xiybRck?K_UYwp-l7L!PAeE~b@b%6zu@-W>CU(=3in!2_<5x|UDzqdPo zt>)zSe1vLoP55=@#d|&ez{psL)9)DecI%Qqf<}QZ1yTQ&F{v+M!9JjSOKBDti@x-0 zGJp$;c<93FwmFu9jc_hC%>+JOU18@tRDZ<7m%&qW#6-rx}=`qA*k$7!F(tHOjf@LP{0Oi`G=8{YnJOj51x z)!CeUb`8*liyb8`1=kiWbF^*dfkCP9E6y4;-N3=|@i#w~TJsvviODu_U~s--2X zIY1~SDvs0)3Txg?`#tE|0l71nBq%?_f-)xlGqeB~7B?UBJVogoXa&kDMLWez)nCf_ z88G!av60S&Zb6&#Bjr9VvJg;&(Tn$K!+TEd?XS}NM;X_OQN+>P#uuD{4Vbk`zERf% z8}KI!rg%*k1tdgSWI257ypZ`88M@XJBf+mFz0ZRMD(PUV8lEQ11KW1M-1Tl;*wc8zFwyJWa8;;T@BUqI{k6JlK%XweU%IATrJ(&ai?i><~BY?RF;Aq0zQldCfVW;15kZ`d0!zGoZ%3{p$^v zFxlQd|8UFSx^D_{-_en(c=^l{YK(XKqkAR)*2U#=6Bi3EK)weZ)|>P`4Zz|x&P^8+ zL&9r?(lbfj%sk>qMuYW%8bAM4Fv;}LS0Z}{;rS&Pk;b7cPCh?FGw2}6=Z!VZVXPu3 z4LJW(dnnU+m-nd$rXoL!?G_H*2Pe>alcbMG^)vmZ{7uJvT}n%#N&q!s8gj&kgc*tF+x|>bg7|sn2nY zI1@-$j-9_hTn6(WA6dnA4GjlUI-^w0P2Mj#Xr~)z7s6Gagq?MyioFcDob%9jnF1$iyv!lPlx0oJb?MO^7rD{O1jP9)4 ze_c-bDDz6G2#y`(Ovn#CbeFk^3r?xQxcdT)%iSM%%$D|#9&dcsLf|7R^bGIvPet9@ z6S~DJNQ-k`=;TN+v-QquyS4|Qr9y18b`$JhH#TxefJ{(i*kYR^KEOnIVcqr3aEu#T zt#J&zK+~fR`k>-fiw%b&htwu%W}baWvsi`XmVc|I#SxVK(6tc)YTP+)%AfX<#pE=P zEncZo7hBYhVjo(VJ=R6TNXokp2u_CKX`VbBPTRB|C=4@>3(tMuL5B+~49&G1%$&0O8W=@|PXo;q%Yt1jc=w zR^ZVi5-Ww2bn6pg1D-3z69pt6(c(9zdL%Y3{2=6r(?z3R){B7i%?4sk6Y{23ez{NJ7mim3$JevR3PFHT~D zeNNf5$J6sPSqc5D2w70(2P+etAZp{dxvA>g+!ac$MZ1uH`5Bewc=fE$0kj!C@q}_OxN6n67sU42$ z;J=G2uA-mcHO@VE+{%&wxb+9T=zR02)+JCj==F)bg52}+fe(+phMfd7l>GUL7n;?v zbCGpt$TGcIuxFa;{+fTG{!nr*m^xlCF9%|HkdGpv|GJ&1BlbK=oGf9hyg*mk* zpnPm^;8g?@-}eN3IQFHE?!UE)SOL?#xwQG&N9=jRTU|d2gKEcy-PbJaXwTjDY5OWk zK-SYfN8Uw*$lK#}|0G|i{AgtOBTMz-%b9*nYT)vths$4k*PO~**l$I2YO4|V$8;35_jC7eR(u4(oitJV20fkKQ#t`ihm8Y?M{8XONa((V zXW+xvVp|bAe#|53k6TTUK&`V{R|(81ZRpQ9$rn_e>=`sDtYw`(jRUrH#meCo(nHqa z)Dbw=uJ?IPj*mRBT8!>Fm$O`6pi+Z@@K+KI+0QOAPGQmvqT7!{Dqd z)WFz-4@$U}=fk2u2Zh${>K9G|bSAHcdYfD}1dtE87BcaSw9hrMfT}eY@4Q_9Et)me zi=wOd)PPJuVSR4ma`p2C|C}v5fthUn`USk#qRYw*ImBNS>pkRV_4ed}d#1>HPY3^R z3mzbPIwNnL%+B>HND&y>pMACO5-FYFndkH$Ie3B^$xz+op5WXk)V+i|I`TgC#IH7- zBaeUvjZcFY{Ox8x!-s1}?A|1|xF&FazC*ixjq)YHzsZ3=^^&2XQ z#!K9J&t0%!F68a{p|s6kuiaT5!Zn>N6Z=#)S{eenEa>iScb9j3Pl|vUHGVdA9+UH1 z7=}EKqnZ{h$l6#sm7Zbun zPaqc>^|sR)U{pu9{9q4lXLJksX|Z`9nH`C0yr8b+sH5XM-nC%;*a2@xNwa?cu+AsP zUWH{kP^5c4?k$b}qDn9csoVMD^+w=q`ktB$4JsX>L95EiC zYN%PoELtYfZb7&gw10os3I?}OM$rg&XryFrPWc%p0JaiqI#m9y#Qy}8)&lJ$c7-t<`dG!kYW($n-P)=ltN=O?ZRHfVePX zkHWR-M%HWuOJbAsg<_Ss81w-`0(*1KTw7fKbwn|BER+{Sp^5swZ z?Y9h@D0kdywG3{zYQAOfP9KXrMUJ*%DuH)cJDmqN(M|dI5fSMXZV^e>7)&4wI>CNr z{{^r&S%L`r*V2{z1r3EeBE`Vqv%Z^vK9gSUsOoiGJh}zaBfX^V$^ys7@_=(N*YDW= zxo~uZ3m~xd6r}POA8Vg^-&iGqsa3iQXS~#Sr2@E4Jf!ip*;=HoV9_Y-z+XuhjXvG| zIk5G|g5>Hk=HRfOEDVTihh0%>GNapbDM~1pMNTr$VnfvVT#*EC3j z)_~{>UAX(x-(@2#UNFF@*B# zS=$jietOR+``8wP*?v0ab4Q&sz#MpMesAnHGdVU|T{z*jxu~I|_Ar4Yd(8K3Z(nHq zQ-8$~0DY_pWOD%mkKCL!`zqcinjJu%tuZih%WoFm{Ig&R(T92;=OTIh+IEw$21!7~ z9*55h`zburel31`EB5^uVx8C|;=XQr5xtsTP zA0vFino-}dAVP^>y7ZDuliZw~z)pR)&$5an59VY$Hoj~xox zmOx6+04Z%X??N$I;Z9A`zyUxmFh4<+pX^D{8RCc zT0qBSE6VKjVGimjrTT9>ceAD1p&i7#H4a@nwviCtPmaD~MxR#?_=&k;JRnm|}uNQZ4I8FxNd6n$2Gs*0!!T^|RM9pv1bg8{x^DE*G_6BPS9%yb(!^ z$@1z$=r0Ss&0IYW5x84|^`VF63n0vpkkgSx4Z33%e(nKuiU z%+FJv%N-Ss&6bV2feGwUF0h7e1yN!bcHpdGsRU`*kF$}V{hA|L`BqS&F+*HzJCB;3 z4e$H@r*$5-`xz~a>Qt*K5{gqG}=z58q1UT7Q&3DyeY z<_fZN?(DnSU6lgvQa{6GGH}zTJXkHf(wBSF3eS<|;-Wep@+yn}+4TOMeTqfCET)ce z@n}=GrzgkH1jxahqq$tZah43?@m~=r;s+m`v2deushT!_amXJ_yhCCAVV)in5(JvA zpX_In>hoC#*df{ednp5wLq4pf2GF@TNRFv5e@S7UM@})h|k_A|L#7zT7n&`0PtV4++vuTxMFjC zq#5L+@-~776d9sqy zsnc=JSoH8{MtQWEfh1ho7(P=lw~7r6x$e?6deT{1;4GI32PuYnIJ;Lf*`B>2@*UEM zi8_c(Y@MRUo~)9IcQHXE%Eri=HB~2*z!qP(&@~;yn;J~$LDsD0!k*L8>_>mEpY#`I zv-wepS<|x}blqi=YDViTe@e-QzfCdKhY}O&_psY`>CMsTWe6;Y{Od9eH^dHaUb?|9 z#3KA235-h{-olOQr!-DBpIJ~F0h>PeeWQ>r`m{6+Wb=3@Rb4myAxqY3T<`VgEiTK* z5PJXoK{C#)qH7;iXKXAHyf(-L@A1lv?hdL?_@(F_d(rjV5S` z1>Ud%>yMyU&vqyb7C$+q7a7+#@QX(2i20xEilMv)&z6dTK36@ZI}C$GT3ddYV@h(2 z&A3wfKb*JfV@|Q7&a`%~!s#~96EauTTph2K7L`4r^Q+f3SlkJ|Ifg;gPkaa4s0jzp zxyLwEu(o1h+db~GllI@~N+z(>ZnBJ@WhG4loVIhZ8+?j)YI*gSbxeeqei%RLSA-4y zXw}oX_+a(QvFs;(0~ab+FLR>KY{&2og14o)F^{34xda|wP4U#zlCj|-*SsmmS)EfmAy910 za*R9wc}4X0!eP>PsCkj2@;*_?PXas@o~v@b^Ol62RiPL8Vb=K;mB1c1_s+%}(oIg= z3%DZnfdsLt2R-O5k+=Ugv+l6QJL+PxSn@C9+PprQ8(G&aR@V!=>YAawG%T8v>_~rZfCw+pHIp;c zqcea zVR?ZRAeP$n4tkr7FBHVG6pnLTsRS)0DkM_t zMg6dQJ_~a^we;g)ID}iE(>Ipq1*Pwq&224u4diw6vRQp+{k@PGx~eWz<8t;0*JRLlTd zrOQ_@4i1W~62--i&i(L3aW8Fy2tSR(=KchlZ>Ey?fVH^^Z}U?3tLe3uq>V z39qvzwaXR73|Hq*dSN>AsIbRxKTmVtt6wdCXI6C_@8OD0u65hrb8x>VLGSa!uUnZw z8QwoR)u3BELjB>m@NEu)Q{tL0^7n93AnwJW$s6CNLOfS;xxA-eAojAEk!oA)7s4Uo zhL_2%)#GK^%0l6nyVmY408d`4_m^`rn&WxvW$keV9~H?s+s`FI_RCLm@Lc~)$Q)bv z@MvPt0Xbm@9GLD4s(k0;O+GMvo`4qd)9A_19fmwSliM27xGrljC~~*KbKTZR^zmUl!&j#CU~dPy1oZ@I9s#C<%hsCd7k$&=E9-A7 z56rzg_Iir>2Tfrq%?}?d-=IFFKkI~_j|Gnx31Tz=w!%P$qUh11TF>{1j+)KiEF^fO_N@58L4NJW4Xf(v?M}tEycgC^l1Q5IYr{1i)0cCgFM&$M zkYg@-wR-V2kNQu$ccLN5=ScUmc<6*aj4<@1PFg_rTl&-9xKW_k__+Z1Cd$0=oZ63$ z{G>{FAsn;u+_CHO!2*zuZCG9y*L${`^u_56NL6mI; zo}F~e*L^yzrZwm8Y5di!`u^z-W-^-tTSKRBv@RC#w{I`0s_VO-v01D{mqk!KX)b_Q z1&)$}A}OM1n_o5`_dLFJ&nLwm8iXtqlEUuGucgb_ZlOs&=}oZ-M+B?xS?LbtEm0mO%r)bQSTE?R)^y@Tc6LWI5W-FySA4eeK&bCLcg3kZ7}Ea zA^agm8=$D&udw8&*1!X`w|NeINcG*RrKyh0t%w(%RVWSmD4v8qW4kq zV_J{nURaojGPp78ioQGq07*mZ{l6?-AtsX|6oox&~#~~3);=u!1qx3OB z7^d!mBby2m=doV*a?Kn+=Y1l3?N6Z7@R4+Pvspy}cxHGx)oQ7=<%*Kqv(4BrNybH` zdMB^SvqKVbPr{QejPCNWt(W&Ql8v(3=(8UCYZj?uXl6kH8N>XzBG&I8U-U~;RrM0t zvIbB7bGUkuM?t*7)#NL)1Uz=S)(l2(0h9HolNTrAq`t=p4B^(I_`B#O7)yZzVr8Er-G*4pg? zvECn(_LtY9Jfu(qhEMg7_c{(+?#MinuLM7cR7W5fPoW4eS-#6AZX{~KSF zsl`3bR%;KZo!$7Orcl^}Voa+?5XmZAK1N+s+1%~fugGg~Bd}uOGW*$pk57fxVUS(7 zKuOP2zp&;P4eOU9&Gm=wdtE|@x;a=GXP3HRgs<^rbI}9PL58u6&U0O^_jPkCNrhrL zDw0G2ln}b_OCb9_@uaW)fBrnPT2&q>^hm^PUPFKOV7Q;`frkd|Y4aUN!djsrC?+xe zidycnNntB_5KfhVZ+xrE5@CrLJ#0>+mwJAxYRB9bdkf@)Oa5oDv##dTk_XytQwzj1=`Eg2Q^;FI!UG8A3FT7s4 z8&LJr!EkkRm}8&2)J2|aZ&25&lJE8KwhduVe{4G zQ#@R82XtuZq!c-Z@_iFr3>ZrxCu5bnx?v7i$?Phr8jus_aslcVbs~Ext_L(I`!haP z5^X}PD5Hl=QrIa;sE=EzO*lk97A)ney&<~u&B9NHT=a%_IktKwny*I-107l%bb8Wz zbU%85!yP-*&Qp6@^(-x~_+tm!L=f=E$aVw9UfM8fL^OAh;@qMII#hVLzvKq)l(AVo zS6up=_*}Go+E>=-AsYm)Eqgv`g!_rbkWEOk(|pu2PV-hZ*M~4VP=r5kK`!s=cXip* zu=tX6MTRIAIKWzMb7gExbBV4UptJ~u@&E~;+A!LP^Ras(^vh#QuM6Jw*9Y&=?4 z*1Y!IdI!z|Y^D84=u@?Wt^SaA$XUJkt6^P=*3rR2dM~?LBn>rU9tiyrSkeiYKstwY}uNcSNwZ^KT6HfBgr5Nf3W%YOo5M(^W>7~qLe)Qb) zgY*fMExS;V!M5f-)-A2j*wZDm5$fFsw(8o>w-O$mgo99S-g#CTP~zxkm5*^N6a2Yy zVnTsa1znwnP0r^{57mPZ(RlU;dIr@!>^X(0t}(vA zlc%d?=+cg@NHLpTcDQDnP!~$YPZTnX_e%>&km~wXMcEppklv#*?YD02fjRbu-`~A5 z6E*M#wp3G|x{SOh=u8eoMqH^YDyfM<$wfrr_vJM14KUJ$cwV;lHg8 z7^d%^kx44Bd$7IsUVu^Z!Ud-~Q8O8aF>&0N@m9r-^x{Bov{Z3V#r|atGsQ`)_uiBD8TgspVi!D1ICpM zZbZC-Ta9OK8T)h*L_e;pLKC$IN(0MS&2Cz}3qQh^XE7kN{2YwOSH3#4LX-+In`(`f zkIOKwcX&uGoN_c+*+2FYZ}zpFT!Oq$lb22CEkA(Y*6jo|6~21WH@1#B<)XtJT6-(V zr`Gn5jCAE8iY|L&N3+0_jOksMyL$Rv7t9IP$*lbER1Sdq6|)jTg&q#IrjN_6NIIFe zEKI9MBNBg?>7myTsWnp^mosm?+?rYmlo^V@_6K7vFCbstAbS1R5b|}k_`NXTnwO%l zrD~Vn)5Da!GPpo$!9&a_9QWh&9qTGLcR*?Xv`3S$@2sJ;(fVSP>CGh4h0Z!4sv6h$D!ZaMR7CBY^@O}f5sOPGL3 ze)Jz}kMzlvrP%8Ch;(PdwW&g@*H;n8YyBs`2F*=YlD@pPE2OLg^2ifp*fK(DH{uySpkDID1E0WSdfR zCX-m856g0ua_1E1)FxfE?JDq$Pf*MI1^No^Q$Jw)B|=kLEywB?VB3o4^R}c8Nv3%wQT@~b67LZ0WN>I6{_9Cnug`sBuUKGNV7i?FZ4dUg9N zil2R)m6Xc1yiowdjmZ$PefzYX+Ey8DA^9JC=8q~rxhOg{m{)1gA+iJ>mMNr92Km4- zU`H>{_+5H=o_=2tI9uvbo_#@z!ne=tUkcaE!UX;;mRgxWI?(;=cky;>jPP`w{8VTu z-Vjirtbez9PrwS zOB-q9;i!{V3qfYVJ=JUKmdM)36Q<;|dxURq%l%M(LTqr03@aU`(_+u_y%QXN3sNKT zM|co^_Vu!Q)5klH`d!$B|EXS!sn*6tJ}P#re)?`P5UOTsM9Rm~49;oCO71sGE=dQXU9#0 z`aZha%2J^b2FcXTs~-pQyS7Hc|ERcHix~dv<94ci zj>&XL&35wP*K@2pZ&$e%@)2Y1Su?xj?Nxhnf!U{pFX*3};bn3qkoCdCS0^2Cilg&( zGslbp&85)q2@TaW+CBw>`N&ZEu1B|8yZk#|FE@NfYD22VzdrQ-7Hr;<&Q&mX`ta^WpsD_x+6xz$ z9gY%@QAC_uHU?V;%O~`|*{ai#^6%`6zsYERTin`J4lHS^T^gj ze@yi^b^p5by#6MxT}9?)%3q=`CGUyde}4(1b+G7GB&O`qzHQf<=9~bzD8a;%^ljqc z+2hf43X>w~LSG&UmzU-g$@~y5+JS3YM)#mANI#*89!`Mq_hL`AW$(MkJ5a|26RpB9 z`BY-{&mX-vn+vjKkbXt~Nbr~7XY$wXuhlXh%HCwx8AHL-%q;n|Q_J$7=n(OYTi@ul zf4tPElI{vDGLl{A8hG#JORHUpe^z7D?SE=BB3}AC?rS@}jw9M_?nI_@wKul)dWJtD!(<@b+`m_bN*K9^2EaESNEJ%VURv3y@%#iQC1WjbVn zuWuwX$tw;XY!1&#k+8K{+p}G+i~4}bmkC>*vQ-I>lx3=Ze043Erlgvu`?p2&vOJHy zU|!wBt8NTo@O(U0sxUsZ+7rc>Vq4X^$J!Qrcb;T0({Fo`X<^+=!i?L6O2SL&hN3-c zTnYt~{x^|tsjU=I9v8Y)*Xxvu)G!3C?z8VaObE}}-%tIPa6U%f+|}Qq*kb;q&c ziV`~FY58t6Cb?fJIfTYuFFJ9GMH!Q_rl8^8RS2RuZwejPi$oQKQ(X9u{j)>73t zuIn~8RQZ^y=n;|I(*2dt_i&#<2)`jnZg@5iLoZydPn zf4%2p^ZRb4%GYaAO^?W&lMk=BFW8L?c4**azm{-zT5Us`Y*dy>6=%F&sRMBozGCN* zz8pS;(H`^Gyp>Z|X*f_Er?t=Y8ky$Lgm`)mHJY}V<7ssK17{}vv@EIK{_(!etegI7 zRFrqerLZyQnoqdslQqh&eF;>~3~rsIXx)}*4VwGSyejF_a%w}rwxhmV0_5~eddNhr z$P#{DZZji0;5u)f?Pp!O9Mt7eXkV~G9KPWep}`L-+pwIh{%p9gc~e3Y?fHtRwpZlc z@Vccmu6aD`+*F{yiP7rD=p|>@Ymggi3cvw9`OI?9bn0aDr&(XwtFVefcKK(Ke9God z;@VF7S5*3i0lt~sq{|6UT>4%GKgwCy%s}>n)6XeTr#BJgW6a&QeB?4aTS8-7eBFD? zgzH;w${%dwFnQL@?dqC|DEfFq_tAk z3*Vh7IT=NV#i`tlzn**##O;b@1uk!y{~uG|9Z&W9#{IF$E}>%-ihQ$Y_9}&pjzab* zdyj06T_F`kw(QKZ=Q&7{ILF?5&qMZp?xWxHydMAM_44Za-1jx#*ZaEeZf&c~t)n0Ilm*DG6^;`fkc$3wa6P@;K+k|-zLyX7<=mOmv z>E-{jH!Dysf`$=%=TFX4h@j1^xMl$U6weF~yW~e*p0nzf-NmxBT9;h3u@E#DC_;eQ ze@pq9K$_&s;Du8W(R;_1;i*1aBqXSFt$lJ#kRRH7&`^mBRJ&6TNGu6-E_N&|J#zf( zp}iG_{mD21GMN?EPvqo>!!}e-klsKI<~0|$UY1=&MD6$-mIs`Hj&JuCC{GdZ_Vewi zp6mBLX!;jxpL5xwmcMBN$S;M65SlUp09B2X`*X#mZ)E^Hal*qpXmX?ieLp303a6dR zh%P@3?t=~R1nR%nqy`&xWM`>9EV4WpZ~(i%NH_ z#ILIX4b~YH?(JvqE573fRD_qZ-xD+ii75d~3;g^SpY;1HUM`vsv~UdWkU}Vs%2ToV z6$Y>`m61#?LDI7~Bd$QD*Q{#fXiB}~7eJG);h!}WG+b2O>9STesCQ{Bb2Q+I`FRvs zJXPv~(|<-%VzU6Hf{Z`FtEoV1MOI}mi(653eZS*lzuvDG6CoE7FWnKGKv@pk=%Js` z0CWXPH0rNb6Ai<`mdJm;R+W7;eh_W{oJ= z)83-E<_7j*EC0TS)Ot^%rlSSwtcl26@x%@Y4&H$|jLpAI?vqlW5Tb#T78G29 zm3TI3iq>d0oL&S7C3bD}BZMIUw-0^31<4yF);9RtX`pcs#fUFERGJV1;=LPDX{}$N zBK3`>E+y6eeS$1@?y!;pCgb^{<>2{?GDI?-DRgjRY!zTpTA%%f;KybA$(f8Y@u*%` z=OlvF#TrA6TyuMB5<4WRdn|mE5tw0XA8V@TZTaBlp-7{#MjA$hnN1h7xjlc5*s*V5 zIL*s#>O4H0jGI|w_Bxbz{r4Av7gZ*Nw1Pkt=-0xmXGe@mk${X_WZV(E))OtBe{Q+w z?D@unwB^Elhdzyn2N)Te8PGAyVtK^lNDfTm-`q>}U<(cAy(_r<1|)^X`Fp2J3rTyW z2p9P~z^@Ez3owyi!3Xmtpo|i%#Acs|(#T4?Y)k>S0i5USV-kv{VLzctYGO#h6lcg- zm*>&f5Cz@td;N;|o0sYYSLX3b-;8Z9hNy&U^ao%sBjWS75LYjd(8}z+@Yc@hQ&1u( zya0fD!=2T$tep0q8crWnj5&o>O*<#jhvtXE;NNqb=mLLDJg8n()q(A=(tO#%adc0z z7a#5#Ol;|OZ|}41#BFf=yO^EZc~;KLP5aqa#sJ< zwCj-1t06SK23!%OGjk^GzREUPM~GOgMqj#f5%BHrt2%_l0)+n@cY(Rut7nBduP2^m zSP)1Kh`bNS`(?Kh<07V(W2tfY)SBV%JO5~%eYpa+s2PC#@_z;WPFkDdYO;kdAwC7sJ%kEE**E;d6_6R_@3W!=oD^TjKWc;QXX zIWY&#mv9jhn|353n*_sW8Asg)fxd-^M*SMbr8b*i#zOa~%YMi+lL0eY;-BWHz$iIN zQ$htJvLJ_7i^&ne8(aWWo(RS-<-Fb(j91Sy8Ad}#Jvt2r zo+4cXuYS5%^fDwCA&{6?b_+m65rW7Hr?=^>g5fPR%@5Shz7)ibki#&d>R*+-e&I&L z^x&$~lA+GJhbQZG50x|CiWV5CmPPn8S$K`Md?VOV-L4Y*S=l(9=J%wY1-)u?R%jaDs?Ac$(}F8vQy`55k7 zlz9Ob;%-kHJ>?{d9)Pm~?$19iK%UwM0p}W(9QO4|x28{B^Z2?*-!o-As4!AX20KW1 zo8;5+xS@vI^l|}EvCdb5%xy!&ndjrlOfLmGaX}4?y8TwS0lxnxIJYt?2*C;f)J2w* z6vg%!}+nkwm;F^&PHenR;EaTe(`?701G9MQHdLWe_b4Dkbi$ zQHYwp`SYdbi8kqUdW(1e9L~?`#lf7z7l7Ch;yYQOSvu&Z4eLgdqr&~PCdoI;#wJ@% z`G#20*DLCZang)Ei1>ff49(xEyq%4q<1~I2GASAJxaAgOq9Jq73CmbTHNOujTQa>g zSQYx?D7-F?4pb?HN&zLc!k*Z-+&j#g`0y zVg9ibA$m}>r}!+YDNKkF!8p1f!(n%5pvYhkDH$ba=aXfi<*ulMg{%+c|HP+6nr z6s_}D5bKY(8NA?s`=>LD;sJcyfg!%Zw|Edni3NDU!G+p)Uh3%&cS^Q!r$r#s2T3wdOs zG}cs`WeZsIsA-X(dnudRvd4F#BXI>P$}V|6r$XT7v7#viRceB(^pph25U&u#B8i?R zfhuf+xn35^+DEenZZ3^TqY4f`h26Rk?ae#5^`$IX14_#Mq0|%RBNoFy0Z~*H=_RfQ zksTzFhj(s~#M^g%%Xji#1V~<#@b<`tiw&Pf`&-dMgJ+}6yZ{$jlp{g?PK?|>^ur?A zLSxuM$AhgrbP+gCe`(%WA@1b*Qr+~P0aOxLrBnZ~ps9G~@*k`?WHj0igzJATn7rU0 zXzJ@n(!vph+VlZ+<3CQ>hr4-H(+@cqD!H@wB`|}jZaFo< z3$H}WsLpeJH73@-$6$bQbx~&Z{nY(8AC+u?gyTpyfe8O|1?W>Z;Y90@*z1#AE8n-l zM?f@07|06UM5ieF0Cz~Ij2gmxV?O&UD$X6j29GC3Rjw`45F%_vD(b-}=`dHW0Kn6Z z4ASc{xI$8L(Pg@=tm^QtIppeNk)H!BzVr+)vrxoG96(_!o|8)fyV-wUND*^h5@Tpc z0u8-8X>c$|_0?t^R+< z6l+3}^&=tjl9hk%zj+`okpL8)vb`zZb3vTBPFlGgfU({sGfj#`t;d!nJlM~86pGv8 zygw@krqVdqY;P>$0n*!rVWwR0_w9p7{`6i7S##m2*}DQol0G zE)(N{{9G+^EI`W`K?Ispw8y(Zh61^h5Pa<0?4Gu zhJ>ig|74;8GHn2voGc;L@7FVZvrq!_u%Vbw3~#3V2@oD$cJ$L=QWV_vE!S(aTj|Ea z43+>F8|lUd)ck0Hj8cTN^Ewx~ooygLL}*M!jRng=9Kzybuof!mpf#R6j~C*jXSP6(Aq z8koV}$N}B%COPFsWVG*J8Hq z3gFw{0ptOizsHIDRdRR80t5dI`XK_~5v>@1VAfEn|8A66{X&8s_r}@?DRN8$>3>G~%yQ9Iod5A8fOR?7LKZl6z_s6IJ=)Tb=M zMqY)VfpDn;n0mt=w3^-QpNGf?v<{v|Q zBX~{VA49$XI0Suc>uAdDq|U^1femX3k@UtG)|#)nUU?M%&?(qNc4YPZb;m_m?7;zMi>@z-^AR{FK?IauvGEGDl*G}%jc9E5q!Q_g_-?q& zL4YEzo+T`~VEx_x9PS(QR&?+SA$LVYQQ)(vY!Nnrvz(^z6PmM`zfG&-SvS2x)_=F= z!6aobpW#ELw`$T4f1XtNarJOyb^50Qe=fg0_g(43ILcil+BhCYzuz$o2x;xp<1va{N zT;c*Dmqj|SV+()4Wq&L=Qw*pdbo@4S&n8S%3*-`@f=uh`V`L&xlF1dI= z0O|MQwSi0<8o&V_oe#-A1ufx?<)Le@PMER!=z*tHO@xrCFJIV0KaRZj6Eyg}pQ+hG zM;TkpZprbNTB-*HMy*RJmyztXH%`rjg3&O*QN(pJ8~>!d&bkmU0EnlW@)p-YrIwEo z#X6eoW`p??i&2KPhWo)YOT#26&YD?5@(V{1r_s{zpmX{}g^VC5Zp3_2?rYksc^2-< zp#FKxE6p5N8*y%%1vT0e)_~l1po7)n`szovhc&2^aS*BAL#i5&x8J=xz54^rDdN@N zwO|7EOl1-`KQOjJvXh?(b-h4dv z_bXM@%5v+#M-l%&!RUC!$I3EJ9?lAZE1tr{w6v%v@}ws zPX7uss{2&uDX7J|Z@m-nT7dv;V+*Hhnb{&lRUW;1NNJ#^K}xa^$4f(f&GE7bY< zSDpV!^c@$A{2K*gpJb5*xm+iM@^q+v92*>vU%gCBhI+TQC@+UF2L}2TgC+Rw4z8&} zZ1a0uy+zBVF_&PX3+Hhp4~|c&Mnx=c=57RODKneErYh($1JI2O)>^diBS z67+b(9_Bm1`3?@cA4i#$ga)C$xO&wrc#K48NPJVng?%@Dq4JKE--zt zx*@@HF_o!RKbC&-TeU)@J%_R8TFzv9zX;{uGx`len8vEl`o}U-4f4P{cM`A@6J!>1cI@1GB&?oR;Gn$ zjYa`LgoH{zfP*C5|7wsU2FwHT6}S*{Pg_Ml+vRs8@TrPwaJ|7SV6zy{}sj z7KA`U{g)m9=G^nWwv#x5sCN3K*aTASf|E*;FoP%S)D(u>g>8%)=Jxy8a}JdM?8W~8 z*$D!!Ls{vaGL;vhCV1Qrjv z6pKtTj5_L)y3-JwHtX5-NpjE+8~8^-Ao!IT4i@@*>+g9u%^85C^j9_N3#P1arcvu= zPn~iJm&FqEB3DmrKIZoUueaAfL!cGJNvP~EhvA;)$B=v0Jh%?IH?eT(UF@Mq*Sr}e zH_|T>`C(4krzc@>*57T3#llM}^g6N5Rf}WfdF*S$NIAf`Bsh>*sxh4MFlRy5r~;CP z5Wt%PEai^s7Ct1HOZz+^{i7plMw4c4A7XlPiomnp+cn3*#=g%?688yKMgdL+U zPILhm90m-R*k`2M@S87lT0wG!zx6@=z<6K$$sO?zc4x(Yn+B!y>Znjp5>@c0YkJX6 z(n-K*ek=F`1VMblXgRyylb6uaH}Q0X9|$*b$tNQs(MtU1Uh)|zspQ;HS4O`j@pt2c z^DG1%S1G#{@=ngHwyV2g;LepW^}PUjR|DzO#&dwD4a$;1PZnbX=ukOifk6}28;??+ z@p*|JtCS`xZIkd{Fduf59jiUfTJ`IwX1Ud=7@WzRWnQ{=N4JQ6jb&1p_z(YC5=q3Z zIQE~VxLFQ`#u~MJULOJg6ezYMrD!JPP3CD5u8>zhC6ORq5W-SJM0TmYxV&@S_>>_! z_|1$zF4pN)S+K1UZYWOkipZ`48FBns+SRezoatY3AW=J>&`Mok_qx*8C!GrpQi-(gQ~q5i zFq;8>OdCMtyMpg$sh|I(w|v7#_S&c?0pw#Pdkt&zgWMrKKW886(;%fNx9TGHn8DBdt^Lnr;-C;AmDB3iD zK1sMyQzGxRt2YBK0UU4y+`vUMam!Dtq6$MMxNGA67J;e89c%)DdadRn3Ic)R(-{1m zbSZT`w+qb6=?r_qv!NGk?EDF*qxNqW5PkMs5vaQbfM*sz`K(cLHq04JT8wq#A+U)+ zcee2gzbsqDwwSzqbaN-%$wF_Uq9){HzWDzH_Tds=>~Y3E*3Md{Nu8O)1d-n zc0Pu;Po~T%iQMi~p#qz^Ub=DAYj3NX%&Q+SeBUIAtbXZ$SErYZ>C#GUZaD-a~V z?lCoU;Ued2z<9Ee-6m4J3)t3GpgEuF4u)C1ia?##cTEXdy-iwBCmc~KBuc+|v9wd( zM8f&Zk&DDZ{;{jYb_=sI>c~Gz&`=GT-e0;1ti!4|oT5zpjmaR0;O3`E@38mnzdyI4Y5R*sY9@7X7 zqbH!9n$LO0#^@00i%X#BAg@#!MpH9zf>j%ETPy`sKr2L1ASDv9dGX%5Jikr#eq^j) zI`RE>^w-XsP8%xBc~=ljtDLhikBrxK+BEY_RYiey{ueg)?7`_8I}*@jF>;Mn0WU2z z6WM6+wd+&r(+)JRSkh)<2)y24;#ca6{BcKy=mbuF(6%w%qfbO1Od59G!$TC=Oxy=l zj^5DyU;m-R+rYMK`kqjf-_vdC`g_1PB?=qq#rSTM-WiR4oTlOhwvT{(8QoPlAuhEL z0?+Pa-8)Mo8p(?h3^(Y+-hS*TJ$MpxWH8;P3~w8Y7FBfI@8*fj`}`M6(Q zTsSE%b(nJ9N0aoClZY9;$zS6G@H};&#*$ipVkE6^g(gnYQV9Teo|k#<>$%(-9zk$y z*mB5pbI`oV#S_JUtWo<#8G;4n(IS*@h~Q_?lGZGfD&G(&CE~a}oz38Re;@~@f_V7r zQ5Sl<@MFQ(V}sLirF&<|S7?2a$9t`&U&Zw0aMiYSSi5yKOx?>Q0Xi5=BIP53HgYG_ zE(PH^#<}Tha;Gfe;TO3-R+SX+5MUTcG?gVpp(qJaK)9l(zul)FnRTDr+w3T6e-v_f zSsXNBE)%ohNkTP^p?m+wIIzSdDK1P>-3C0K0^`_F6Ad|=aFK3-AdE4Ua-b$ekitfo z^K?c3U=-#>yE;up(#z!0$&3F^BeZd6LV5Cna-mmU>lk2@U^a449F8hv!u7#sCX}ui zA53hS`c+yzf=n%UQ~%x!W`%NQq^56oEfB2F9x^U0CVdO78G<`e$z7OY`FP{Cleb&x z-@XJ*jOKr}F#J*1W8vXb%MDra5_H$+@+$lVJi zkZ&K9SPf(?UuF|1dNSA|w;*kFWim!yebv)fK+0y(_L_M$VC~`Qlzki z=eWb=>3l9S_Lj4LMQ|#Rj{`g*7cW&ifp-!u@fGKL*y5An2GvUgra z$`}BN+p-@~w*VUmfEoEasj)E{u@mN0*qW`Z!rs1jlWdn*Ck@FcVmI}#3ol%hraU`i z#yXj;8d{y>7`l4ZO$YYKR9~SUfv4!jz}MkGPss(ZQLYY`?4eN?ic#a1#%I`qB>BMUy^!i0DSPQBJ>@{lG9kF*}+N$xEHr&R2o%_kX zL6)98`dv1{i%+QGSjo+AA|-^X7_DQ@1+I;5Jg9aSGcuqlO@QI1#oz8KtQ^o+T`m$} z9Jx6#xWDRK+31;>fmX9yA2aKm0`fv?Gh^a@Sbq^q5-1@FXt2;VO6o71H zULh<lQA3Y&V_St+nayBh|Yj17R*JVY(0&v0dgUU<_% z;YPGg7c@Bcg;Q`*eq*kI#?S6t6LT8 z3l&1UdK9bfuOl&!m%`P+aDtfz1@E8ENc{&^cc;O6CE$9CG0s?DwA%ntI~;HvPOJiwUER!1kSn$6Ak3K?1Su)_SW)jd@K7 z1AkZ)TBMQ)(W)Owlb}9JF@6P4Q}IED)Lv!|ZGv1HZ$gR8EZM&;Xzq$U8dAS@8)Txt zTGzsJS`pVD^~HMek%m9J*IO&o#IN}^9BDrkm-~8#X2>pf`%2OW2pWGObA)pfS)@US zGTH7n;Zn=qg`NSAxKC5uBwtTaHC-B0Gh!$>=Apgm4xfC0Z=!cu{T8m~$i5bZUa7gi z7(H#}kv45D1hf4>291q^GqyrcFbP)oXIp7OdWo=y2ZBDN5DTr$^U$}7o`zpvgoa59 z)(hzJ_gOZ}UASZ;S5q0_=nG- zWj)u5Z{|Ihxnt7H`oelV-DRW;ID&@W8=wHUvv+=4w`k_53vp#;9I-%C8nNBZFN9zN z$18)%mo~~-6tW=H-?g7Lzph$Ptc z`!$Ql!=IGr+@FS`3Jp3oQW&ZTo9*=5p&en(>33K_?dVT!j(cgd250azr*z zh`qwtmZFzjnbQgmYmTn~`B7cDrMAk`yyRNI^0=wrOX~z^N~O=Y|H>|e(dfQDche^X z8h?D;xt64O^43IZM$?1_oo z@~fX~om1T;>*-ILdq-apXJYPdNll}Eq4`u@u2Vr655;z;`D;O4Z_o`DYXNnaLi5>f z1U|?+Qs}a2%b0ul#=I?h=t{oZv&eWg)n&D{c*C#;QkC4-epv7+kfxb1j1o_-z^WvM zGjsYYASf3l#W7pb~_|Bd3{$e(owBZHzQHrgAm zj6v+8diP?hh+owVMS16MyU{7|n^+k6i7&vX_Zlan{x27R!{Og^5_+}3V-DC4kq3|V zdn^EhwmzkT@}~JoxU=_H_@<1-NM?oA9q`hxP5hMTxVnwkse@K-duEMJ!N|lXF1F0v zpHwwH8|X{y1w2%6>wKV|u%iEK6Q)No32?`|71P7<*7{Zo6-u_VP1Y~>dEbzURN8%l zow;K671C>Ld;c6nPx&&)0+l&si#yRde$%k@9fq&{+r4Nf<@Exw9qso9rdD?o4#tJs z_=6?EW`rf^$Az9w_l6F4-^Dx8?!XskEdHpaq%ou%&MucNZq(p(FIRvujb(%*M5}HAP8w)H zd&BaWxSkvQ8XL>2k5^VUh27>$sZ@ctP1#Ijtib1t;Gp@_(rUR4z-(YsR3 zAE$MV4ar$P(~rzcpYBKM-$?4&Y1mmmlDoQk!qM!iGvjeJxh!B^ZdCINTFiVB%w9sK z!!OMpW@_F?7&N3rh3~q3mth2)R}w_(tI2aE-(hE`wvesQ`pl4Zjt$18YuTuX+PU~@ zBHK|Iv)4wbMqMA8;L~q^KAv!WH=m1F>M>7`6GzUo#Mq?r@FXbf_zqsvB>eQHS68oc zP0bk`48ez_u8uV4t^}S5l?r1x>6B1c=vLE18$HQ z5QOM#6Iy{^ru^BM099*`Kad4CCJLyK+>rYi_$??qVoA~+$1KS^P*wulB!Bc`BGtrTvcUx_9Bf`}XiOea^LIV`+;Kw8@$eiSz{@k> zkOt-{K^MHhj2`G-30SH)=D4m;8YOQ9Ev19Z>ataKGF!-SpYNQT1u;~lHCWv^mpqXd z7BL@rCPi`-F3)dxuZX==dLY;I+g^WS?}L91T#t#i}Ucz{xdySn_oNT=^dxQ|nh z{`8Cb3Gxtc$!_cKtP?{6_TN2h4?12>k_748edVms=&AqP-5$hzwv4$d{SJxsxEYMZ zxUQ-sQs+tbXA;fjVP52eiFl0qh8!+*a)D%fT~oP*((c`+n#bwc>~j%6aquffK4|GJ zke$`T9b~WevZN0Q5jrEC5)K2##1+?Hafi14mMFSMT{IXD^Rdt@;nV}mBTMxv2KE_Ot&f^YMY^RDAFkaKAS2geQX3%JNX|`C zF@dW(g(1s9cV0guAkFT!*PSHb#;UpMhmAq-ea%3;twYYE`k7v&QLq}~Q6qc@gomWF zea^O;DNqgdsZrrvAYirvb3X?0Cnawl^pnbJoEI)b`bB$!2OqY}bx@KFJP(tpWoXav z8(e7{DiQOS?MWl|^-r((|H}qzWrR;+7d@^tm<zmr`}3lOr~W3M>Mf)SK|Y0X%WP=s2?9CqPw)|^ zA<-s9kRJq`34@F#3r{cDBLrs%(%HZzr@j~$A#--x^#go9k-jlPNdIei4G_NtOy5{m zT|<9>hn*I6{&ysUJ?xyY11Wk0K2~y>`d(VtkAhRv1U6w4(i{vGRUOgeE z^RMPhJ1_Z`VzP~!ggL@1)4`xrYm4idl+dK!StAdv8LdFY!&2Q+;YW#ZpES77HT5Sp zfT&mA9)wLckiadAyt*GQ?)z*oFwx7b^mcaE$Xq79a4PIMME__D^e3RrIpW!}*ti}O zOZ<(Z)V34pYn1DP1o9mqT<(s{Nv_l)hEK1e-$C-?eP^^_XfZf|cJtw5Uu|r>36t7V zO7Bt$ku>G%(^9Y9Is6y0`=>L`SkfQc00u!yIXww=F}_*XtTS4p>(pN7DYaNX zQf9F{ep}?*2~PZ?yM*_}frk>EDNUHkN@97ic@u*Ab_-x*M@r|g6aF>yWF zpI5UMU8)xyRF~bl^5>E@*y$|!gCFbE{f%g2i&{0~uTWW=+;u8x^`ouaEX0fdeL@Dl z;)_DrCTjHJ=mCXfa;@fDMvSV;q&dqHzZ2kW&w0o1p}`j*1`6zE7; z3a5k;^TR9pF_n4y_^b;;sn_rj8(;ltCzJz8?Tnl?D<&U`qy*ahNw0dFsmZ$O-W4sw z)47^lbhX$2!oxITo@@YDhhfscB~~Z~Qr4gPq0MW`0>1}I!zJEe;$W%Y$NQWUl6!p& zN4qN0<9)32`;hYyrSXfD)z9g%DZo}2P;U@TmfKWYa zV`4COM;Q*%-#t^my`}KW5NZwl!>3QX@S3bw`E1j{A8!R{sl=*#(oAZFw2!yf;Bg(l zHC^?FP$L6^OULfaH8p^<|ETG&^cDOT?MOsdw3oxr69(Xej*eB-xt@x?Pum^<%Zo;4LtS`RjpIM7pyhM^#JxcI2Js>%b)FtK)tBHJH+ zxsh-}HGIg~=@_OzZp&-6Kk)AC*<>->X<(m~1_3^b<90X>IfPjW$QDq-U;d$xR&{uD z2&{C(ga|?OIVwDb7E zZWxBLPsC>}*wc)(`W}*;yKjIC^j1S8^}LLhoEKh5cBI=Gs{`}zwe`?0h_iY3KAmeS zKSlYV?-0By>@nF475LK#${E1%0f zElF4%JcVGX!0etXkwM!4ebWW{Yo@xKDS0WO0H?phD$tZHVH1Ws*aYD#$$P&<-1>(9 zmnoZm|HPQx)DdZc0vKzY+X&kG$mmQowY@Uc_55E;QmWh8Y4XpcHLq1CsSOgI6Y2Se zFesP(By8ZfH|&$wWY&?aWE9gV;9TVzL&>JtC9bU+m=nhrO%4fPF2tRx z$bhMz4myV=iOs9LV#Eu86&sAIkXJH5S>4!bdO=J^-M8` zaIz!^H=pCf*86tkIqb7V{P~`6wLa;x5luzf-qjipIe??Pen={ETo5CCxD&tI-S(^t z$uVa9^CfQC_mDa)81=#L_AQ4?ff=n-bo}GXAz;BQB>kjNn}rEw(9k8}AOb#2rB5`c zb|4OZ)C&`bgtdmi(Ly3RB2Xn4;kI^?Uh*SlrDy@XsZ`EPvxe(3uYa1MPEK*aGNUfo zrMWUP*aj=4r8L&1_)nd-iQku-EC7%7d0VMMW=ayPbyNju(AMa%wkV_6VtLxOyA#$< zI?#bP^TCMp5$gp)>r)W7tUMCtiHaUD&!pAohgCQo)Dw{JeT^%3cxO{k+fAEfJXR<+ z;t>dVW)?NM^`k*wcn+RjoN^-k=(!hEO+xwPLU|*VQ6+ya|510a-Et9vv1%wQ9T!2) zkw;Pz*C_4Ae^^C(o0-XizYv-%Uq}?d?tSSXpA0|x03Q)rK@k|`rG)6*6;j7MCuKx7L5k~tkJH!@U$8`>rB$Qwj5>suBup%CLz;Dz@Gzlpm?V>d>~`EE*7UWe`4_$ z>m25jMQ#Qg{E&QNv>0F!1y#4Yh2c_gc_whGqb^ie64FKpw?W|}4U(z>GQ6dRBVWBh zhE=S^i3WZopaPDtMxh(8nLws0wouz*a<4bQlRit(v%zSw{rkW^{B>b81e&%bdq^Q7 zj2Bl9DXb{`G_9?n)~e~#A=_rQtet=KVo?#_#{7lKKeRzJj@cIRIpfq=K~<$fZ}Vd} z0qe4MC~~MgbDfG#==8&d@ao*XEf29#ODR;$wIQSThrGw@aJZ;_9<<>;+&evNjk;Kz zFxI9VGw5vk-gw}m2I{>{fYIHO@?u3sqRjg@8AM+~C~dD)xK9%NNGh>SQmQTUzVJ+o z^}Tu~tT3WYlJCXHD1mkL62F^SYI=8SYeVgggzy6NGVPd8!uHPbuFsTOZ(0Rw!_Ny! z-%}&2r)p&9PC_jZJwcRh1-sHtV`ti7yGu@yuYal_~9Bq1t#pYrT~H z=B@%qpXm%d=UBYNFBdw=+BiRd57);oV|I42H~>`NQb6{YlaQD`XQM6v+`$7-$@L2m zU>J5iOU};vCz}pr2*I;arhW_c*C7mHh40*%q6S(Ys5?q#QTp`T{N>csmbR&rQSKJ4fZW~ajP?| zYN+#pbQyJkhUY~4Wc05nln*gaJEnr2Itg{8bi4N6RSdNHWG9M*@dFhbo z2tv;uLg_ZC4+y48x}9GdAekNENlFi5mwX0x)Iv$gE)&sD8|~EKkF;3Pk7Q4@A>S^d zIM6|nSJ7}L+D0EQr6D!{O8wQpjAQY*9tIh#x*i~#FachNyyU6Q+iG3G5`W2z7Z1EH$bCY#p2?t3+g~*)_A7IlfbZFU54kr#7cm6+&GGKOx*NCN zSADlK3C6VXN=-1*aF5@~8o%B)rI%Bltk3~MW)BY{{G$4$82+@$t>XnbATLIJ5F({v@?>NbxUkn9p8M=vmKOUyvNr7qQ}ZdXH4s zJCUSXK5n_Q59Y4zRaGKr)1T}&tmvbJl}O$18C8``y7*>iZTp_7_)~hCVC@dT)k7HD z(Qi4^JmNe9pad>Vt6B%zD9H{LiCm);pEc}u6DC!w0y*8T2R|#c6dtczF(8rKyCmrP zMAZq+^ioyCZ6YrtY)cazjnbBQd&HWaqLE^R(0CR(xVnS1YC59uVzv6%WtI2Tt{k@Z zp$Ew(&a{R8XcxqJ1RFzVh6G>49YgKkBA-3l7RHNG?<=K5lbm5Qx>tgt5|s^;l#O_M z@EV>nELE}eeNi)aQy!WwVIdE|hfH7FYNwPCpJ*6JXh!Cb<+enrs`yH1CZ+TZe%d0? zUt@3Vpt7WNe2Zz29eE{)vqI}=fc_eY{FSA3q$1Z{&5;Fl8Fe?xH>W}u^7;=r7|}#w zI}IYuS)5zp0dD1%oo>aTgeLkw*83(Yna|L7{M(rsH`d~%RBm{I{~ji88tKDCb4TMw}AfQqZAq_S|_k%rXJ}d=LJgx0FI0WHRL|Dt;b(bqMzjN z!%MO7ZISVFrd<%fE2K~GwpVc`q= z4A2rca@%-8knpU&Xqqu`z57LWR?EbJ0 znJ9rz1?AFH)#V1V(Cob9Z%$m9X5&>8psW@tBTRqHenU&0P`ar1E_j*}!)3QM63vNh zb79@Osc6Q`RXw#u*R`wwsMn# zX~sM1$cJ80BQ*JO%M0gB1Y{|?)P_amff8N}~Ffbiiz5KTPB*R*p3-!BNNAUSVVI~ylEc@@h4UZXw)Dw(-z z1b=dR@1`FuI1(vXcggqg!v!|UlF<)2{4E|>O%Pn7urr6GhXN_UD`=>qJr4EL?&Tzw zz3*b92BsV!Wl#LYs=l*7lMdJ$72jT*vXl)gbM=QfajPdi+_r1$b_Q^CYR^j%-YLi9 ziqf`i=|2zXJ{3Nw?n(6*1bN-&c+>a2&vu(`UCUo0CS92R_f>5puhR|GCroQn`+t6n z?JnJ5TL-~C@grU--PY*r3BE|%o&8|&P!RBd1O4Z>7u~OQwydt6tfXZ>I&awiqS>KZ zxLfjbHLU6?Mj`%NF_C-Qd$75Z6}L(WLh*hfd&EnBWF57UHO$)Z7|CF}1qa3d775h{ zrGf_T)}In7o{U7*W9vdNz-B|mer=VK$Q9?#HWGHpJxX@dibR+P{L?fkkZ!nXw258C;=W0nT6HdYU{>qU4hva->>UzeFjUT+ zmv+d?tC>7qlY88v0l?`7xyVqlH;PS^3IZtR{v1Sv2ln(eNkt5v z0QcS&lL;ZNG3Fdk8Z;awnrcN%o_f#7WE>=n9-6+$06ujBXj=f3r?Y;F*uB5$(;6hR zWe%}~P^mL|cDMKQI_ax?K8~BGzhkWMSIav;ZV82dacJKuBHL?@_a$_%kKaJVIrzSwwaO zqd>YSeG>;CCBKSG8i9rtd-w_ZZa%lwZX4H^0%3U{Tpxip9!d$A(Z{PS4uOA;MG`s$FIhZw=-k&lJBk9<4khgfIMqkywPjjMjnacmTPge`7=Y;_DVgx zrxr_QU0(ZsukIbm(T&Arryk!}f2R!-Jjgm3o)grP_UBJSeYEo>Qw4KMKVgvFsxF7P z5N|lYhx;(BZmo)D6)8;f?;fw+e6xD=vjI1fjkzXD2x`jTQO&Ww#^|rWhh>sl;v+r- z1jgs>xpZ)AC{n)cQPKd_Ppjx*EHHdT_^4e^jkoE>TMBN7nhgTJV=4eF7N_*|y{_Jt z;mNKyQ$?BxC1$YRJuiqWr;Q#@?{(EE5PNI&w+k?eA zWu#R0>|_^0mTV*I@633=zyFwd=DFvdd+u32=iGBIpv0&ky|%D0e%sWm(ef0POd*k*zF&+7w4F-GS&s~Db8PXYz!<{c14`w6uFjoW3cg%q z%SVq>=kh`n=ZHT|q5nbJ`{tj?D}%`X(dVdaF=^iQ7 zj?dREIVa5Gk~}T}hnaxf7f?v1+Wap~)500T@;$(Z^Z*hlK{gLRw4AmWn2c`2iPD9y z*rQe2TPbTWm$~|)Pu8f_i2$Mauc8Z`KdDh>S*ES&pWq*Gj(4{&z|uF*Eq!IYPwRq1 zQ1FR6>-&v$#VV!R3hxp}ehPngol6|aykLKBBZ0YEZU_#1Nm~RZ>@g}!A$p#od6AVL z9D7EYwDw5LrYkhGD;{OMX@0`N(3WDXRo&dfXEOANaeYLTQ%VIA6pubxc6T?cN31`&)DI?0uhjT%VNIvWb(Y zVg0h=*nO6@LY@`!--15Zx6y;LuIZVhRSpfCLq8OUAUJ#XwdC9T(!8Y188_1N&dJ~W zU(GZQ0luh@;hjLvriWB*#^|q~>>B#Px4u{)RKVi=pB>(JQ?aWMKj8`ax$uL08v}^) z-Wo}ni=z&CGAk_NM2pS+Oa59-m=xN2EX4?_sWN4G&7mj3hmBdX`T7U%x-cp@9HpH~ znUAQo^(G&cUBrKZ!_-?An7;g`BjLwo?qa$T#|16ceXmiLVCnnMPHq&a|~2BxW6XczD{hZeEPQ zk+5*i>H$Wq5+k-H*Y)Fr;JUlHC3&y35v5@ItOK|ikt-ZCEv;rUOcP6h?EFJV+j>Bb zt~$#F4|fLWC4RJo=dedppnfrg)q5(+juE?$D#t^lyj=dL}UPGgQ;xBKwZ%FZvprh{kcGqY*v+dLO)kc`U zQ+M8%{Hd?GZ~VTYCw9F!I$HZuB5_UP4PnmE4eMsD)hY4m`3m`~x!xx>x929$H+ONt^4+}!s(pwlut12&(Cj0iA;JKaZtfK;JXpGLSCc%!{@w^ftFt zJ)Jx>HRhdm?+y%|%1beaaOn2LLo8ydw!m_?KGBoXFdUhi*ASa(^*rZ)-n==u6j3Ws zdLu-|3Q3}xqEmWx3xg1E%$zJYGCq3_y&zd5=^6+r6ekNuM{9k#7P9HxO3+G~^2eQZ zy=CWvqcp&gY1Y29AyYVN$`?j-Un7ng8|+j@zthL`g<%OetNS??3|c>Oz@$-fTKjkI zd`vT>t3jln4)gv>;(*c|esDOEINe#3BCAGxRo5$I0lh0=E{gn}OAage`J2slpN(r~ zAsb=!&~kVmI1vg%{3-EH8>di6YGCfAmdo-A?;45dxc5feL(y$HOr*svMzNO}(Sxw& zxhawB3jfNia9iPAd=H50a-+bw4)Q@co&(NGiak#hR0axE#x$r5-c6d9i{S44kR$iX|jdbRBE){_eM;RMaROECsmPbm2O-cnlemN017_>0RNRWg-HDw~Bx7@7eEYo0)SkZVBqp`~|U>MApU+5|(8M%ciiNb=gvn z;JJl^04W{|VQ2F00j!3~xQSHR?n`36XI_v?Uf|B!ej|R2dU7z%o3tu-KJ>1RlPUR) ztP{^BjT}INnbMWrw`DVl%o%_Mq~_#aQoZPO(JgLCT8YNCh^aDbgjc3x3J@<#F@hBi z^C(H{>-Q9{HT~N5Qcpm@Mo-=*@zj$MRzsOp{yR4?rjbw6H3B4A?zN3n&&sw~4S>BzQm>29 zX>sqfEd?f1Tz?k`FXQy^Um)d}2rgza;jhFR{n_lU6lWHHirB<=+Dz-#GV~9(rXsGw zyUafPJYFUtxyiY?j@8<*8WsZ#w&}ZNjHsgNPYuiz*|yGBie>KU(z4?Am9>VzO~b_M zl7;tx`yK@uaXe`g@UGR6EADEbRF|lZ`9W4)9KXZ3|7UNn2$`iy&mH^sZ#=xBp{6dhtN$W;9z?&}x_W_u1g_#OI9d!9kxV;?s5Gxei8~tHc zzpdPj);ZN?A3~-UI$|SU#cv(6IvYh+a5zKDVTd^!s^#IYS&Oz!coN-~*ymUjJ;hL} zF}A?MwXL7Xk-tAD{s5TxUE#(ObW#os?yre=Vh#wHdxBp1L31;rlHT~?lO9Ds?hW~S zFCQp()sypLRm-awNdF7zUr>#;gD4nl0Wj7^m?;Ok8siPRHOiq7$n}yb`6o$AosDy2 zYI_hAm?5Ov2catNi0Z$tSuCQoqylONC9ZlF%17K@etX8r=b6;^yI7X>34@1!@8MLG zy}<`n?Mpd?*Oh&a-(Qm|5A5;xdfHIN3)g#8^%n12J_z-5Qgq8-IRelEgx7*;f-bj| z+wY4-y78ae>aMkzUSG4$eI{&j%;iN~q>A61%sGmU^wwr#y=%nw@Gn&FP|;j0RA@s1 z5C02rnG^}{1P+<1l~z&?wyIp^;gLHSdFs#&X_KIj zi@c@l4YugB0M>dQmmD#2t|4C_E{ebYS=+J<1eM{m@iX7)T+NpR{H}D@w2b*0?0D&8 z+N24U5#D8yM;}n!kzYi98c0dX-E#VB%6wN_*?;2}ILx2pSGSn_XnqlxKKB`=Ae9ES zmPwDO+tggEWseUFsi0$}v|EcgmDfmU{Q~9MMZ_4d-*&Tk$^bJ(_)}SGs&o&M{u7gL z+{~NpjuGs*TMq<3mQWiYsf4x0iqf)j@Qz5Te-N6`sAO~*3x?oasiSUbH#ny~Tg*?h z)Feg!bo;N0a^#gr|7vci0$MRW*!YIHY9X&{_uj^9@3OlyTSe66^|GS^K8u5t>K-*0 zbCv)FUK;k7%Sx-w_d)C*F+Jp(U|*(S(*Ld(&914?t&c;Y7CgIvRU~S)ETF3YS*hUd z8bOs&uI7f$iMtlVjzG;NHXtm1y zB#*VS`mNkr$st2h!{10rhyK_8DqjF3YY-Y#%=^#7PVR@gbZGzQJ$Bv~;_5ygU)7J; z-+PffS%42lb6YQC?3x_a3nnMHLB=qHrctCb7GXO*aCljWL#>Uh$i$?HTD zwt(#L*i}=Hfz}h>8{ays%`pXzu&Y~Q14PiS-F&J&mI-i4C~2p|h76&K@;qBZV-N|! zxy>s6%*4kfWu8NKTZ(tH3&|F6pRlpaN-S0wn{b zt>ps)-)z^!9%9)m+3>QEu9&&XQ8Kkn&q>{^k?tlNCu5#h?bpKY128rzbLddl6zaXx zc$z=uHWBFD0VU13S6Ci)g{|m#h|*1@p6_POyQjg|s73d2ZEiZ#w9$hvO;R@d<~fng z^9B=-Hm+g%TxLGQ1YUK>bS7o830d&}jPI;1Ss9*vFT;i961lz6oI2}p=(O0!n?}xS zML&B>^>`8T*j;}T!5a?oOvKW{~}DpKYGE}YFhy{&P`LewlVaxqv* z(%8PHl*$H3D%w}#9WDn`HD&s5c>^{j_1PBU9RyYOFO~lBEXH3|d^7QtEq_x&f=_;M zwk7t&0tf&9w$INBd6z z5tinV8~*(M(~DOA3InI=`Z0xp!^P&m2vh?DQ2{b37G-djC3y#+&T z3=GB@z)Jo!$sb($^b6^BYYmX&aHRO7N;V1S5Eph17EHgOJHwkU7U3HA0ml&D04wGsJij9Wz`o7STcR~ za`Tif#;)5{ITX5f{0d-dR-7GC>Gr10r*JIQ4MaW7Z@YBb0V;E}QddV{>lqwWC8fj0 zGfgM%Pm`S13;4i9bws|D=Q_Aef5s~)=!jLy#vw0PhMyMfEG5HxjNdf%zzv}-uf8WA zm2N$FPqa9rHTJm&=F$w|HPjG@66~zLTMgaUn-(D~XNMeq9qm_uGpK)OGL_s)=Xv&{ z<;17~o6&F55YVK3wVUSF%aBLqdjd>O`ys7%7nG9*Wyta{3L*#VLri;H+^UwfQRf2Z z5_iF({)kit{DuGgrx2a314M?8miYfsoiidyGZf&ZfPRkj|5Vii3=r#OZ>%o8y=~iL zks9F@KXqrh)1 zR^c(dd2j)`kvs=7(9yj$3GQJbs++>nR(TmZYbcrg^w(^|krWgAr?229`d;`wv`TUF zp2pj7W@!D*OZoOO722`_Q;o8dn?MjEG_Jd>MHBa=kK^>Doodpg77u=gjs#ywP>NC} zm*F%@Ur$S}kgZCqEk~ayO=6Wp!aY!?GW^mc zH^SM-l$6NAaR|YELO6}_fzfUG0rbBzlG>*=yp^do*^>qi1NI$1l~8wDAbd4-+<8y% zr5y=Z^Wd6Xt=jCE=(Z=x&15%B`+QkW(Yg`FB{A|`4zquJ;`Kipz$Ya&^Xp#~D&0bt zujcB~s$J-$?NQJZm{`vHcT(#=+58TKl3kTqCqzDC*`;k6gQ^gm$S%9)rMsq)eNa%P zR@zw=M}q}gO>f`eY7Mk|$9UlG4br9N_JeUbYH=%T1qJ;2d8G?J%ZsK+bLWu6Px`YL z*BbiaWWoDhh#-{I#q1<$JU95u6gl9o>eLU@LQM_HO$8h3@JhT7Ck<_5DuYr*zT{0b zT<2(JBVlkprU3^v0Z2-vcfD zqgk6)yRe{z6X~WRjh^a+*MUqDJ!d#LUVRr@C=jMG;yI7fg9^U>j$1LhWG`?PRadsp zPTCd!+Y+)uT@CUwG0P?Wi$BE}kaS=fA}PzTXv)`^b}rZhqO3%&wYP_0Q0M{#kD!J9 zW1i=JrfqP|>gH6l$y55tv7t)ElaG}~>7SKhhagMPxGcX!`_klrbstba zTq2s}@Lo@8hjyg(kXvNO1%W|OTL?ntWs8%0bx4Y&(Sc-BD&zqNhHx2x;eJTW9+3}5vB5^_T+`waxVkS+23D>fU)6Fmi~7A)F>drFDYn& zlT;bD^o62wprE)Pv`v?D8fx0+v`W-U>CXIGZdKU9WBBvCW`4;nLBH?x3)1rge6C3T z=9i1`4kf6~0l8$OWvdmQd-HuzD+mBI3Ek#)-JX-8D!eJL_CwrMXe<*~vG1{}Tyx|t zCw}C8`k+ANuighLEed%ro;M4)o}_Rm@2bEVz;HX?>+x)yN0tir6@bfOEI<;jBR7Hq zw^rrf=Lqb19W5*R%c(Y=ANy@LvU#ABQq<{%YA~vQ*S!1@1i5oZh6-w~*^K1Nvh83c zgvNm|%#$5x{dotpUZe4)fCheWPJLPHAat^HAUH@MQp2vNYRlseI z4Lg|p`R`A%7@-<+zii2Uz##!G{)tS#jL~94THfu5mi`wLj-=!f_g0pA zK;qur#Y@F``7EBdh#cQ=t%Gy>5EPtFL8ajx}$Z4Ftod2Mwv(>>p+t7}F9TvavD5p(=lAjF%a$aUeq8wSC-zq2k5q4Z zrcCosdpO0_a*-k$${)5P8G-K}?$hc!X@njo@{%<@vNBC;9|cW|swPh+on1)rZ_zQ- z)V(tyz%^fx-|hc6bVDlfLrPx|CA&r%g3Wm2N>_$(_w^37xOReCoOTqAKld69YtjNQ zJYf8iSAk4)$LD2F>JOg(WKfKtckvm9qmyMiV(cQL_i?ym>R9Lm#JT-;-yNx@UOH9y zVVHWeo3RE~D4|FZ+#z>wt8dsaGU-JmB9Hohq z+odH2Cm3^7oa;}k;J`OSrbfSJH-*s_6yj)t*$OC>di2p~I=u;27tMM^^Fzl~Wpb|0 z{@PNp6)SPb`_x(G>&~xQZ}w6wywMxG4}sNw8%JO*Xx|#tV(Vw_Y=xh#-V-F|FY7ef zxHpj8?Na^w6n*7yyCjCN=>O>Yr?AjPDFAi$)TFj9E9vm&HB~H?u-Xz!x%MGBz5DvI zILm0Ymr1o57m?vEd-PED(XZkwP^1TpV0hJ`XYg1PZeAX0xr`A{JX!fIA2ynd$E zbm0~{Lj9&n;>8j|NTtM!j~4q@7iqbnLl$KUnf}eq z3LS3)YQ8C4)}L;PHvNIbiS6>qXdkcsv<9}B=g(Yj*ibwy+i;2gW7?+3>-mluy`|K3 z%XF47Hd-DR7uHzN3}3F#I3tabR9`nh{N@p{&uJNB;*<`8WiipC zUvEvD3YH1O{bD2a3hV}t2l{$~J3SNhcT#Y_Tu}(?U@t2z*25nA@;yaElJ>|r-QR>o zc;F#K$CtgaLKNJybVq?RBjjUJJl1)-V8~AlCD{-l`OyNeu&c3i2>yI%hGX9QP>s8V zdt6@?E@O&gLOq?UDB_tR-K)6&uMG<6I!@TO7|A|uQEsE~W$Kd(B)W9IpL6P2B>m3U z@v1ZHc%}SA)Duro3|*NTZ8%dC9zcps}~2_ zOU`iEU;G7C9iZoPrgqZYlfXgrw)C_`V;hH#O8 zl6G@R(X(@5`p%}DV1|5wzXknAvkp>R%Q(4XVPHVNtijor*%M{VM)j2nP3M;xX|;YI z7g32*!+TM4H0U$LUJH}3wSoS;#`fs-BH~wCM9Of!Kw8QoQgVFwaYg_PW^p9-Frn)E z7wz^kcbA5qiGG(+dL+auKYR?6UfPbT-~fv7-%R%e#9J|)COMhW*D6(P&ztP|_phnAg%(U6A{JaLkpHSxYq{H)=d60L3}Kh)BVt;Txi05e7*WYF&aPO+5PTi$UC5<@az$~%~nqu zZr5A7jM$F3fmI+ClptSr(pVY83)_;?vv=D4FVzpWD4fb%OhfPm+S!tC6qPD1j%?mJ z!U+9u@v{vI?DppC0aM21`ZJcV4{LHGA*s2%nH_QgKjM3X9u$11K$wX5HFd1WtsUT+ zg4-!Ntd&Pqo7s#*Q;CJ&JexU73<|%iKg7vJt2wUCd-y7o#tswY!D?G`V}-UOy}-=z z6|^}@iZttvU}EZmd5QlXLeN*mK07n}ac{phYN7aMB++D5zyH+&9G>lDTh6t}*0X%0 zM@zycHMyRs>iS7B|9F`1ICcd#;ITSr8kmvkoFqwXm&~|-vLh3Pm4|DCxIg})OjWkP?nB$J z@%S{ruD^SAy&FigqbMPYMCj!r?=6gomD}m|_&$OEcJ<4Q+4`yv7rKVwS?T1? z4Nn*!D^q{AH(-Gm`!Pc26zMoMe2GgZSo`D-&O}S28SzEhVso7i>kd)nMn^Wsu^&@F z=(Ft2P4TPV%6ej#8>D8VRSzSqhRtrW?ca*XlsRFw6G{|wR`!EV2BkdixApI?InG|J zsJz~ii_+0=_MK1Hv*yLFQ#F^4rg@pCwMzG>9mXx=n-CIafbsSAR^Zv{;K}bSr~Ec9 z_I>X)bMH!_Sew4+Tc^i4m=RhJmD+D^6!ZcqJg@sBrufjEp|J}~@NZzdT)I==Tt^4| zO{#|-Hu3YTBdtyfm4nRLO{*4h?L^Nk%0POUBHr&Vi=No^t~Y|xLn#ZB!2GJTE`sI$ z6r%T)nRazK6n1ICGjzd^&4}XdQl_?HmSA@NlMjwEl-u6zSNkdEFBi^fV~wWc`*FqW z=o6(X4%EIE*jw%qW1*fl7Tu$7z9mmgAZQw~&sQ}cgoYkiA{q(Lge0_ZZ-)Vy_kZ@Q z`e$nwT=y#RP1LUj0d{Qe)%oIm8c*dY+TjRYhTR9>8CT#a)A;Yk{9TShcT1Alfp{Hbnk{TK|X2Nh8BP6qSzdPClkEFu>65z6s@SXS$Zn( zZ^yc_UN*;xYPl7TJqYdT?%E(9T2G2vLpBf48FY;#?*|3blkm^>+w1E5x)f&ThQUNj z+#`xZ<;iSF+$pL9(|+%Y+}^iB1qP3~bypFrvHs)pPG8AA9l7ScSBjww6U?!D)~QM^ z4$;vwN5Xqi*a5IY{6Wfcb5fMC7+%|bhzj+*MxKQc5|{GrJ_P^&{1E??FZ9?~yMl7f z=~HE9@1Uu2cxZ9-t@|HSxDYK-g5S-h$j~yEuCC?~Kv0JLv$S10E^9uxC>CM89FM}@ zXttJL}-_h#10y6U;iZd?U$0Ye*mk)NV z;60gispkd2=LoA_y9dFDShx?Q$APS~Yrm5$_98nTuP+=X*-AJsY0;D5K^-&m-#rGy zH*#Kvu07SayhxA89w$^Cx0t&xBtQ-?q{M`4Y5X+B1*a-ckH@d+M|mK|iN*2kc|Mdt z0FScoo5Qtx+?oHWNV!o_oJJSE)TT@SoBeR!k@n5623b zM1mixxB*_8^eYf2d?$Dt3kMXwOoqyz(Lw_6=M-=3j(B)do{OfKi!8d%;KL}jmWvc# z67$e^9jBPXxb~mu%JSj-&#b*Z1c2t z|AjGtuT{_QF5067Uc5k-vTt+vUQ+77bkc6_AU^wi74L-atcS>`6UOa%+iaUD3JjlE zgy^V7W%FAFieknq0!4erd;gLbCGxb`5b@Jrkhrqhy1SIIO44QDoSJc)r);kN`VSei z`*eT!$~wX~h59LAX!`5ZMrh*LsoP8{8aJf0^|E{@G?TFZ3Wk3SA+?<7{vN%qd!U7F zw2m6(8@+Pw8j?#8Ae)8rBSyFK89B)RiGEo?Tj6`V4{wFYb)FF2hgX2ekgmY6qtb!s)P&bT0>I#zb#oKvBV=y9Z9ALC}6pT6Q?U<{H^#vh>iajeRO~a@0T--8YdZxA41<)DeTH} zqG7=QP)wYP8?eWnh4&$~t?$fxADJ|tOiBcS`W18Z-4pKu7!=9v@&WaFKNgqNHFOQp zq87;^aI%_x#j$xSbWM~XpU}X_fKQ|-lMY-Bj0`Vuj&AZUq$uo}Ke-`r!J`Tzv5T4XmFhtcTY|diGz_e)+#iX8leIK0 zm<1DNwVZRdju%XZeGgOY8z0RfbfD{ABfl5=zngv0$gn`O^;vXRAw?*UzOwt!0xrtY z(~^y12*PdHd_mG45rJju)HV_H$jR6Tm*8uGxYr3}VBWX z3Et%ktIYHGs)cyKKxS%ni(8r%+}F} zQkjLY+48^NItT(>3(U?9MZSz-kx>y-mC{^gXJ|CM(-VMMIqg?4P&VBj0 zO_gU#An?A{YqR>=PSGa`Xu>KtJET0;9%Cu=A`1TuNMr1IFF+2Wnsw5B=79Y{lZ1b- zVBq#Hl5MoZb&RiQMJ!j5)kMi$6vh9*N(7Nun;W7>^t?9~A)k>!ii{u*`iXZ` z->NVoq_5G%Ek3$GUM9%H$Hq_YE0BFps0di#XTK?6*p|b^5I#ogOgEo$#+@tsZ>G-4 zn|qKSSMm+V6Abgsx&Dw2v#OUz-k=79b(&0#!)Be?dY##*xbpHY6Z#s84C$D)dmP#H zloM%g0?1l)p(>!D)JCG&@zXm2rVM{;81W!HBI+s&FE#2AHjh z@CNUuTYU9fKP)O!;HP{s1kN};YiBfg)jQGaO&<}oq5@fFL18d)H*ky#$e2};bGmMS zuAos4L1z86a)C!O2gWGZtEojcFDtKedss>+7>Us4RT3_9Una^ zO(>Hi*C1FvKp~??B}4|bK1JB=55rNlKu;%UTyWoVYP8Kxx7bR+ctQ;=`htzrX5;_7 zwVtOS#v-H7>Ol9f>XfVcUYoxT+`xBk==)qt;=6$#$)0rL&}9RCLWxAdMO#`(+R*Zu zXZ_Qq91+?O&2tOYll+g?3;k#fZhx&$8V0A2&%5uVxGOq=_{EwN;lrx0d5+OEpyJ}6 zC|oWZhG`;rFT1|ET-j6daw{;ZTCXBBlgWn9Nz*Yl^8@^+5RzKw(|oxJMr_&cel>89 zfC|jOLT)Z_fQzHou|h~sT=e=zKtrgfT?gZ&d8H+=J1&gC$;XxvGHJ&qp_%~O6)k{s zI!?V@4;s$ys`hM_$}`~bJ6=_2sptmG?h=I@;;)p-nZG9yhEtRBxiyecd`gLA5I<;j zea?%I%RhR;8(g?@xN398Q2h&L%~-P{@8)gR2Dn2Hh`tu1L8m|t|I(T9D6@mfBBO(` z#+IFf^m<&7SL)@+{{;F*>$`!gnw$JwDPgM6UU`|26a`F34dUjfm~7)G*9jLW4#M}| z9GPo~tQ9$9XV<|iu|9GpMgAU2Lu3o4AIO?CXMzvq~lKqmss8&rO zn>fXL)VC1h`8(x^PDxNbMJ>m@F5Z86<}$`NP>(XUHApmQ=GhCMir?P{E7O%0#D2=2 z=@5KX=q;>mtJdsh?}(}spDfFoAtS7zRK^><%2*Re*c%LSm7+~62=p-c?1%APXlQ8b zX;Ncvi+x{Y9nl2WJK^IS6e2n^7z={FS-df%MW4gvecRQ}sdP0-9j4zeq=Oswxbk>f z;E@NdB#NFitte1FL03Sdqt<-2YknwR8$UwrtVF8sph?y8sWsqVwTx!irkH%^Q-mJv zY!?f%k7mEz-9SV3%kzE4AZCpwdAh1^Q1Pto&H8!kBtaCK3zye0$F}RaqY>R7=CV%L z)v*@w9N-oW8)$C!y&EX10Y?{4^oqg#o)mm|9d-2Z*Iq@g?K5*n23PXrf#eHFBPToD3 zymcpxFK5`qA0*5)ugNcIM8edC;*oNi_ z$by{HWEj4;wO z>p#GyCmjt1^9=QW$1D>|6lIMO68{6~EV$a>`s(&XKbZ%G9i5)vN|y1#&r*FXQXeGU zg2TB2iQHV0VE>Qk{zOe=ZEst4&D2Cl)(po4cOFxC6i5LkhOVk(-RI=8s-!Nb|B)Bj z))Bi5RoQ3;LRGT9rKSJbcIF%Z`IqP`V8^=n+sYMazf%&l$qjjE*p7B^saL+)?vcwWQ1Ajy#O9uf(L?4 zv~M4~u(C~uWMt%4{_dPPfNW}4s#HjwKoY540`cL+r*GZGSXDlW9zDKm?)hkIB-5|Y zJxl)CFUJQH#-6t)O(ciENk7WOfG1uO+c(Z{kT1ja$84d2g9(l%X-QvWCg+bq zCP+c@&KcC&Gw+hFVonmn?#esB$_LXbri(B!VDWU-I&~Y+_4PD40N!p~3P8D_d)A`^ zfKI>O(Xi{21a|y5K9h8>a_LRUb}KU2w*|1)?Rn}ZK!F8@$iY41XN(sL+y{3H&+nkB z#UKUoM*3Sz`H;_M-C-13pWwCQ40ehJRYvgj6-ISY4|yOr4CS<mJMD@Jw zOPaf~&~Uq=USYMN@_RocOc!e~edv*LqT#WTn`{rukDn{Y39}+qCke*j5^?=Wc|Vw4 z;wSgybhQ`Ij)bJyeMk>o>wJFzA^oa6f$|4Kl7K<5tnLxDp`QD=EOq!0(ZyGE@l(1e zQcG4h$D}=~Yd880e2N=-Ivd=}ci@;PS0xvUnl{mHos?CU8 zXUE+4(k-P#j~*uU$oL~UhJH61TpZc_yiELk_Lsh`v6h^amx+Y&ei+E+ObBiyT+*%I z;!nEq5F62%i<%H47)x|^4t1rXji@c@01h)u4B4p;m6v;qAn6Mkbbn*iRNLv1FvI4n zb9XTEe|(~tNNSbe6!xto&?EODXS=%cfXszAK@FR>utECP|Rj$u6kePRpCl5j(tYcb69She-Z`J~zPZwc9m5@EZTC$$nnCp}4f~K4yv2 zFYVqiF8G4u3#cUle7{ntGI*+6pBcI)Gbx`8?lq8Q(!7Q9%GRrvx*Bzj1~r($lVb2V z9X8k;Ee+a8>@$ST`7$d*!PzdZ$0Ap`}9WOfiZwLU>H?B8-Rsu^@2u@-KEfV*p z&$jyaX%)F|h~v4P&6haA7eZ#xum2vTP+i5JByQHyU}03gcW*iOa5b!}M540p0T{EaV|_V!efcMZRznW>zt0dH{-#)U zgV^VD%)j5PEdTR*nG+w->dJtUioiQ#Xs2XGa}mtGhG`vVNSeLIkcup3H7`| z`++a{C4kRI;90ok2J8!f%QD&5`C9P&p=@}}Q^zcqqwc8_pZeU~%Dj5xHZ`ghN; zA&xO{H+bi+FK+u&Fa6?^DFE&{^G5i`tDEqAqYnujc96RWJiC zBrH05wcX(7eX*IMBf*@| zXYYJJA(DLYQSxg>;xhxgtoylXLTC@7$-qrJ9@B^U_G4Ev=Fj(jvZE5U{8R7?*Pop5 zoMQLtkiXj|G|U)uO+O+7M79Xi+R`mjIskKxfqb0ldy-SqJgdkKX=6F zr(ttZBF=N00sGW_ZsX3Z*D7l5jSp1BQMFnaWcz3j2G>jhwmV5o8;lR}CiQ?~&~BQx z=+;Wpu~d-XbQpk1BP2}-N{K34&4MkLnGbs=>cd@AmjRgtl7?-VeB%#yYf=`j1cJ79 z(@bxxqYwnwdns@O&irhI7FnwzZ2h#qiu8AR&0ZS$D}b+QP*9m!VOqfL^2RjbcT-(0 zA;kNO*ylNTCtt?m#x?)UQ=+5yo2IQNeHG-*<|(@|uI#Ow%%n=$x{2n4(?i7u>WwK{ zpM(kfqV)x{&1oN4=A{W|@W6#cN2ks)uf}g2q|j7o4lCshszUm54jUvM*NWbfr_Q%~ z05qh)O^di{+*9?+KCk>XMG!CWi(-|Ol>r)x0z{xv2;~e#weXJgjBkUv(IMU~qN6Qs zd?yI9eG<*K`lHK3!N+xk1>s z{I)A!(!MO9PlhxkvwWWIa&?x>ayRJ39-XdRa}=$CkkHX}7*XnTg*9laKloU**@R3R z)5p`|3^dxwGwbHejoaxAnW_tS;({Sqppb~vLW=H^Q`tft@Nu&tGt?9Q_XD6i1bSXN z+e`i2a*sUnln;Mo{O`uqa;<2G-~bf%w>?uM3pevY1WXKmKP0>C4fZpCUn=6}($ikb zie5*@)kgc7aiZ4tA8`cE=IhS*6;!J>cIUM9mrDO#IiPRt-1xs*03hMucK%i2n85uj zcm}2XnwpPT{G}Y?(pxPs*zE<=lZw`U9#noHX!+fBM~nD{hvKB@kk&_Il+7W-1DA`& zKn{Q*b)M}cDc3i9>$nGQj%-Xzx=r16jbStos2OSG=#j}aGwxR@ouAPYV}6;z~9aKle)mj+n&AZWQGtn=OfL|j*2Y*ax=-W|!uHi+zmNoLY! z^pYma)^Q~6EeKRcqVf{*Ad#wsk7n;MoLh2_{eC6Y$aLkgX!WV)VBX-(; zhY+k1Nz$3!{$)k2JHfuixS=PjrZij{sLCe>fv=%asj(cL7vObcbioYj*NN7bao6c}NbnQxQTW8bX zXUd`wwIHNc87Fc%_`UC6WTK8C=sV*UHl=nStcqEYLa%Wh_hz>%G$?yCg0#0Y7EB0! zQ1}x6mmYiB{+Efug~h>7v{Kag2*DJuzCSeD@SD+U5YmL%Xujdct-Q`z66m?Ej7k{B zRs0UEL-Q6x7q5NCCMgi8k|IFiXoNwVrf^Rf^q)FHw-LFXxZK0zs?9=6aiU1ZVEv(} z&ycnm$IXc7eEs4cX)t#Wv-a{|dZZ_xpXth~QRSJvNYrA+>MPEQJ&?W_Zt!hUM0So+ zcZf-rfQSb{(||cJApVW+?}*?kS6YJDeAHW#@jiYw3c7gzDhE!&9!J0S3~)Oo5fzEf zLJs}MK3eR?!NPP^zxi&@sd-YO6eJzAuLw8(UI ze|$y(Qh0c>OyUrFMQDalG!n9%eUpkqaHh4J)>ffINxH8);7*=tTwn4%{jOhrn4%l= zK72weDn_6E^pL8_RFy!IdD#$x6^9-Ccc#>`{%qq--)sU6VzrpxUn}i2DFMAp@ts@M zKI_RySX*Y_BlTY=kO$|Rq4eC}FFW^xDq4#aG_djgg0_MUToJMN4Wpq7pTRJrkk~Md zq9Y4snSNDnfwsWCOLH$i@oKvD z`6{!{!iJRT9w^q(suEmVY$X-m)T`6CDIMZ2JG7x302k@)u(?^XKWOo4ei(&>OhCz0 zZyuX`N`fGQ9d3pgKlopoxKloY!>jHQ(#tnF}-?pVQnlO?*R(ur`6V2RJwxvM+eIdIgn9K?1`Hc#jEM_?o}^EnB}~ z9DL#p?=G*2BxdyWo$f|>4cbP5d=o|Z?&CE;zB8-{m6S`->kr-kHYdfXY8UmAYOEpi z$yIdSa^s;zS>R%#-@!GQDc&hdX4;+Ur)HF&QEDzush~h#W5al+XG{B}KwUvs9q$Tp zvabdoZMxy+RX#|Obo3Eo{Q96}_14?#4pf7C01qY}K430$K2Z3$fOT?L_YcBR z@63BRw!&Kw(GW!=;MmbJ&7Y#CC+EbJvax)l8#^hvRV51yXHXGaNZZ#&fV4zfOY zMBv%OSh2T9SpJal;R{FR==vxTbDqwF5`Y2}i#EI|R)P$v9>{|EDfo7^f| zp8|%=VZ1u5ooG&>yR4m!*zo3^P4h1sP&|6d{$QAfuB!%--OrGAC{L2;TBox>DfB@k z?E3PPl*P`WkKHq$&;El#j9w?5Q#zG~W8gusgVTcpuvf1*Tv<;+azloB5xDzh%uSUb zpW7m@IDv{2p}OP?Y(Ir6FTz z!w5$f8lF+uM?V2VnP6aU(boF=1ThTp!_0$I6izE%Nvz3s7%$s(dg_3;cM({ zt36L2t*o5ccIGMRSDKfY+}7g@Cs^Ny+Hu)uMQrm8_sk3^t6P^Vbi(9yhujl)Kc)%EO^*|NfCec2CwwQZceeBwLA8cCwXa zD9Ku(80)l1$kJmO*|$lC7^JdKBxRdI$gZ->He*-b`^-Flyzfsv|Gd}r{_{FrS67`g z=iK-Ce81n%ce(F#-;|Og`qDF&rGl{gti$+oY?vBP+ugxsFS?t{C686_S*ne$I@0A7 zp$QsE$V^^mkN6q8w$>cl!%X|iv+Cu(O@3w*=i`p0?*g1OzT98q->Wk$#dJXW+1vA-sxgc zl)<~-H9U0XQZ+XtsYt#tFM=fB@-62xolp4Q6%VeZugj4KlnWy!Ho$(ekPK=OYIIl4 z_kQMH_uh)_0g7+4;HGVGt-lT9NpsTh8RIq4kB6NJO$%vzr`~i0;OMOk9#K+-{8XS| zLx~2*R_fOTKyME)&<)uRd=4mT6@my1BLX>Mi>k!Nk`_--hDVeowV;)AN)0#92kc2+ z@O2g7dq}^yxN@<*lC+oE6ZQFc;R8O|%Q_KrjZ-#mcg?xgXVz_Gghz&&>?|wgK;{cI z>$TpzFxAm8*`fRP-kR*M&tMC&GIhz%Wt)p0$6W*`M}PLO*%fM77fkjp$5*QpS{MRh z*`is%T1!xgDDXPyQt;2_Zp_2#!TSKe1a$H@&k4~|uiKqHCuB6kXRAah_IZQ8^z7?2 zaqMXsM%sJZ8=w6X{(v3)+tV?om z4xRk@V(N)wo;PZ$f91Tioa(oAj!<@PQJ&oVX^B|7A-OBA!FZo?yz`fgR2DEvQ1FJT z!V6f^c+_1aguGqdF~lpsqT0>*HB8&3C}cE8L|PyQn;!-%EKG&H|Qt6 z0|{~3{ThQi#b(^(lW=6cRtJUzr1*(bA4?K z%4%`ouA!RqDdqPM169h6a=q&C;C8%o<{)FJm|C0=d2Knbe0i~k>Cx)$ThCol?UMs- zgBV)t8Oi4cP|~+(ad0~rhy`*_s0`{HOMF=Z+sM1!cWFeXO@3M{mx^*jPeIWZWSLWo zn`L73?@m&Z>-@9U>?-HQ^o7?jb&8Mr&Fk#G)*)w<)a zRmTwSEB?=y{QF=13hJt})`-}9<}{VCnG@j73c&7}xv^9zp>6?8gp$=EFr;+-#OH{x zL8I8z5GCD-*)=~+l_f6d0o9Q5E&4a`7~|HLT&!v53M%xo zd_@k*IJM)C?ucRSkLD0KN=yBtLfdCLR?Dn;Iv{~&@>5>L%MC8?OLY8kizY`@i5E-X z9tizWqC@ArSveaLY-^ueW5VQcbkMVRKFW6v%)u2}SEN64gsSVmOn|N{8_WZ`sN%Ln zjA?N+m+8_Xwf)1sVnX@ppmoVq7yY~=mge*?M-H^k*aprnn?tbMb!n5ID94Qkb1SIf zXB#sU%tf1Q83+~PjrJD{!e;|iGTxM(%XKaf4#@B1b zMNVQFBjDh6^q!}|b_aA;K2iLH3!)?T!T*PtHC0nbQPm|mF+C zrxcN9{7x?-0A=#*D1;wO99@0#?u#XI-&HNy5#zDNUkyLNW6+CDY}0yjgL89WN!r$E zT0-8!gXF~p8GDG;H%PNcdX`V?61;R zdgzj3c$H$vqK|I|m`Q~`@Lr2aFD8=2Hyhg~s>s!iw`>j?tHL)lf>gypNtr3%qV>&s zo>SYY_H2^W*diwZbcH~7Ot0K2adqdgtq23nkT_czLQ>(oE;AgK=4-cSH#|MZqjX=G z^*sbtOeu_Z8gNm9JOUmF9+gkp{hS8f(lllR_EZP!14oa@4d}&q@XbpD^oJhMx{%v5 z8o{@T3_|MU@7sEoeF+Y^(0}ssx{Z!^TJ3>05bm7ZG z8mO0jgw>6sIVB15n>w&CyCB#2OjZ02}@QSSaCkdIFU~t@+mPe+VDuO$=Ry_K|a>^ zMfdZN438S4J0((!g2I0mV#-c7jzdrYJMn1NarN~tdUvWGZ&q*e5fMB|K4*+5dZ5}4 zxZzsp%MV}_c7-!y47Mr=sg1Y`Hd+7Y^}WoZa+ZOjr?zdlHV(Evg$Q{)hCQ~Pp%lBf z&}Y?kOY{=t6ANNAd{EU%cVxf3-U*jP`7fZJ1+9s<5>vUc@p7-2=tO7ZRz-_v^*!HB ziEOeRp<)mwlxCzWkifeW$x~V{?U-~;g@S+q2Q%c_@f_AJ&>hpsd2*4Uf%Me;3znf5 z1N4w>^VhynJI$+8@;vw6>x#4OroU%+-WB6VO)?X5@9bjx&J_;45F-W zV)lK^@rat))Iu6`ikV7-z&pMkgBiqh?91c@8IReQHu|{`901=YXmB*w%#?9JqX2f*B07#6~$TTkZLBS@uf$?U}+Us?vUd}p zhIW2$Af}fteFw_uooWYMbq`L*|^#qP87b7_;sTpO)mx~toua{BtPO`$FS|mH4fJMI=>zmrCac9d{dy7 zgP`a+4SPO4f$l}Uv{L#^?Ncz|2Mryr_g|Ej#mLARLesgG?(3J@v%gh-=u3jzhq?~~ zVVg$fG~Co5v7HKp$Q=^Z20t%G<>Y|#-_C%^Z8H}U_dbw}gP2Sn#8rfs2%@4~S|g9U zBzn0e&_MAqq*s}-UtjwKmdQQ(_+e~Cne3vHlRUfT*%;(I=Lol8c8Goo8{3LIVx28t z<8c}+f6GKL@Od~Z)^#4H0Q9Rc)ctVWc?@dc1l9{|=yDyR;eD-=lUlQE{lo;eQRqRs zpuz0r=d&9V-^V37f2oz1fv?Y5(X9>+=I%ggm`%RteE;c#F^yvtRv)p5WR#G^qZ~qgu^JoS^6)Udu!L2*w3D`Jzc||My#}sjOf@n}uLT_FqrBM}hs`RV21#e3@5SZf~mn4?629hk8wsAfPu zdf#)c8scc<*beHhS{z`ey={J9Y;gK=0+=}<N1^t^xZvWwq2 zig2dWo~{7_vgBpw&G;|5* z?BI+};0dL@z{aEZLh-lwvC`(?OJ!(WcH^+8twen%m@{tu62!M8iME(=}>LL(EdO?NT!F}oX=t$ zS&ZRV1hdgrzD^rgOSQiWmO1DU?|P>glLqR#pqV8r3G%>Ok>K#-7FmmY$OJVl!xTL= zaN(phmuDvr2ji`)mybaFOG=|{g)A!IJeZA^+t7}BJ=*@^)2X6gq8Ij{1+Mz-!%DZl z1{e8FIBT0tO-~>ikv&L)-qM4KlLS=kx<@OZ?$%-JzF~q z4zo@oKBp{ z6pRZ#|JwW`fODE?`q`W)vATvlxf&Vb!!|M&-SD;SZDRjk3c*3uf5pgv?%W=9i>W*FEBju&ZWpQ5RBuX$!E*o50I2!sl2K6O; z>~RZKKPqtypZV*D6HQ`#pelv%h$}p`usNVJLRTYE>=!r_#+0Vi`>sLpbL^yXa|JNE z0S;GN)l~hkqY3R6v|hJ&A(vGPo9erK!Cfg!=f#>&88NogNd$T2EVsHvx_DJvW$SK_=9i{c5N%MowCKZf8goszG{f=1))~r^FeX*)9*PHCG;k3Jrm;1P7iL#5md(7^awWG#wGTI>dUt&sycW3in~PAF8B{ zqRSNefUa}z@hL+N72`(t+$v)jWjn}#=lby`LFW!Umx)jv#uCo68ity(+DWp#&Va7U zAyZr+GDPeiO*bFK8F0T9T=pAoR31Ap)0@p;73i9HefA*eoYu{mN$ePA>B-@GIo&Xi zrn;4tEX7*yQX;QYMhAAoqNwR4CDesZgzlP5!X+BirqnMET-%Y~@3ynkn$H;qW7E9( zQkwQk`xBOI$WLp6Qivr$pzU#BC*TMBehOKYu^0!6T|r$@o6hNfCp~BSCY;~^ILxxW zImpI6&Duutj1?kGOkJNTSk_6KQKso@eA`#vSS=|r#zQGim#LZNoQXZhy0ES3qQJA| z5Tjidxs^gUxH4`ju)+DPzqad<6e1QOOV3+bv62{c`5x7L=U@!>Qu^V-t+c}}o)Av7 zVS3Y;#bBVXWu)6vQk>BH^_#+@GG;ro86ZcOmxi1%Hh!tAslkpZw@f-}d#LYk_SC7` z^=st07XnYcr3~59OO|fvN(G=wf5J5obeCjoLR8pMUK&G?=)GjYcp#`@Z;)i{8vz~% znbsmua0bB*NEs{{%zoL?y?xc6LD?zMzE**tHD)A2O97>j>$Oa11(u2lzCd7 zlI0K@>ksufR0|u$%u6s9?75{x0i$9zj@^Oi2cCgHL#3qBu#;6K8-5o) zZyej7zFPN;)_Mkb|Kw!apdA#fgtK00gYg-_K5@4`eP_k{k;bG zXV4rDq}XE?3%BNh+e$D#aXWt3P(@E^S>DQC;5*^tq&Bs=2&Y6;*&9<+Y~68{7$!Tl z*R+IGUp1jU4wmq*Tu)u1Gclb=N8IK3wG)p@GfBi$A#^A~3-a*fygX6SwmuGc>7GS` z6(eZ?LF+fI(E7rvq3b++H#p2rQk)(BCP%l|lS0Hjr6^i?Rw8s-(S?ou2s}vu8pnXl z;YXhyETLeT**+)8f_MkFi6+zbX0{dhA$V(NN?rep8s@Ma2@9(_-FP&&H^wG@T`jIi%11smS`ARFj%FvZ?u__QaR z-fNLPSX!xhrUwB_a!xK$B!Fv;6Fd4tU`hKP6ZyRx2jZB~3Ps9L)2jY~W`vD8uW@?% z9EJLUR8HcaI`tWVQFdIc;&2Kd7-ugK9-@c3(Kuh$Pfdlh5lVAm#GA;`9^w>KnT;D& zHxc9)7~(2ZnR_<`LDrn4?j31py+%9Z7zJIN)9C)X0sP<|pMzjrJcs3yH;tM?6hV}OLx6XKKG~<45RBkDvE`g4-NRT zms^#grA}+372#+6;3C@Q8P3bF*-rdEjoHLYg#QzU5{hKdfj&TaC1YzVyaF8J zksaioxyq<_e|1i}XUvTZ+<N;fF|MVi898+NOna6ozy%^Q9M$kD#6d)lYfNMmCzV$smyLYAQ zdRUkCVW2c>Kn4Hf1;%r6zG~mwY*yKuwU@(F%Oy4DdWtdHiWClBG65lm(mtLHeN^gt z)vKMVN5vbh8@v31M=32qd5c~i9R&vjrL0!LhZ4&+J`YPof0SD}huZ)bbhRb~q#?H$ zkw|L#19fv#jaK<7E|slZfQR0Ovkouj_NggApqS$>#q1>@l46Qc+BYrDK4V^(~(Qnst5)hDagYi!<*!|-_o`wh?$1y*4X0qX|pf)>a0>?U} zW6$ds63C5X7gtHMzCJuNJ;zFSIoRn;B@sV^s@8D`Ex1*x>HRC1(n2?6YIJ44y!ej! zp3_RCy5qI;@Pp9-#->D4e{RCMzU-zoowAs&ZORu7U$fmt;z)zj z0cL6X@qlj9ry5B$DJE-UqshM&6Z#KP?xp({_xF=HS;}G)loK+{TZKVB<9gDH+&0$6 zG;&mZA-?uWNbq0r`zv{%67oS71M6}y#)MFTybm_#= zq_$YH#^Zk?+yC}^@K5r)&DWHv760?}KmY5eGXHO{ji05O{_igS6FvX#64=Pd7^2$! zZ!Uj)OmUCDxdPN6lFIMDz34@)_Wt%-I-u|0UVu;Tqq?NuUf+I2TJ+qMV~)BOI}rW@q%fdOZ?>E{24ZU~!2-VKzj;@-ydF?@w4HN$|Ae-ZvD z9Nm9$+UNf4>}7T%|M*CeKL;0tLw`1*gq?r(^f7;CrR|s7DhTwpocx#i%EN!DuRQ*9eMRVBibvZow?*F2w&b>AvaOa|F@gTS+FW|~ZOFE&F7|*7 QWyZrfEq%=b4dmVb0>IrvYXATM literal 0 HcmV?d00001 diff --git a/docs/_static/js/copybutton.js b/docs/_static/js/copybutton.js new file mode 100644 index 00000000..f5960d26 --- /dev/null +++ b/docs/_static/js/copybutton.js @@ -0,0 +1,64 @@ +$(document).ready(function() { + /* Add a [>>>] button on the top-right corner of code samples to hide + * the >>> and ... prompts and the output and thus make the code + * copyable. */ + var div = $('.highlight-python .highlight,' + + '.highlight-python3 .highlight,' + + '.highlight-pycon .highlight,' + + '.highlight-default .highlight'); + var pre = div.find('pre'); + + // get the styles from the current theme + pre.parent().parent().css('position', 'relative'); + var hide_text = 'Hide the prompts and output'; + var show_text = 'Show the prompts and output'; + var border_width = pre.css('border-top-width'); + var border_style = pre.css('border-top-style'); + var border_color = pre.css('border-top-color'); + var button_styles = { + 'cursor':'pointer', 'position': 'absolute', 'top': '0', 'right': '0', + 'border-color': border_color, 'border-style': border_style, + 'border-width': border_width, 'color': border_color, 'text-size': '75%', + 'font-family': 'monospace', 'padding-left': '0.2em', 'padding-right': '0.2em', + 'border-radius': '0 3px 0 0' + } + + // create and add the button to all the code blocks that contain >>> + div.each(function(index) { + var jthis = $(this); + if (jthis.find('.gp').length > 0) { + var button = $('>>>'); + button.css(button_styles) + button.attr('title', hide_text); + button.data('hidden', 'false'); + jthis.prepend(button); + } + // tracebacks (.gt) contain bare text elements that need to be + // wrapped in a span to work with .nextUntil() (see later) + jthis.find('pre:has(.gt)').contents().filter(function() { + return ((this.nodeType == 3) && (this.data.trim().length > 0)); + }).wrap(''); + }); + + // define the behavior of the button when it's clicked + $('.copybutton').click(function(e){ + e.preventDefault(); + var button = $(this); + if (button.data('hidden') === 'false') { + // hide the code output + button.parent().find('.go, .gp, .gt').hide(); + button.next('pre').find('.gt').nextUntil('.gp, .go').css('visibility', 'hidden'); + button.css('text-decoration', 'line-through'); + button.attr('title', show_text); + button.data('hidden', 'true'); + } else { + // show the code output + button.parent().find('.go, .gp, .gt').show(); + button.next('pre').find('.gt').nextUntil('.gp, .go').css('visibility', 'visible'); + button.css('text-decoration', 'none'); + button.attr('title', hide_text); + button.data('hidden', 'false'); + } + }); +}); + diff --git a/docs/conf.py b/docs/conf.py new file mode 100644 index 00000000..e8004f7f --- /dev/null +++ b/docs/conf.py @@ -0,0 +1,91 @@ +# Configuration file for the Sphinx documentation builder. +# +# This file only contains a selection of the most common options. For a full +# list see the documentation: +# https://www.sphinx-doc.org/en/master/usage/configuration.html + +# -- Path setup -------------------------------------------------------------- + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +# +# import os +# import sys +# sys.path.insert(0, os.path.abspath('.')) + +import os + +import sphinx_rtd_theme + + +def get_version() -> str: + # https://packaging.python.org/guides/single-sourcing-package-version/ + with open(os.path.join("..", "TorchOpt", "__init__.py"), "r") as f: + init = f.read().split() + return init[init.index("__version__") + 2][1:-1] + + +# -- Project information ----------------------------------------------------- + +project = "TorchOpt" +copyright = "2022 MetaOPT Team" +author = "TorchOpt Contributors" + +# The full version, including alpha/beta/rc tags +release = get_version() + +# -- General configuration --------------------------------------------------- + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", +] + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] +source_suffix = [".rst"] + +# The root document. +root_doc = "index" + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +# This pattern also affects html_static_path and html_extra_path. +exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"] +spelling_exclude_patterns = [""] + +# -- Options for HTML output ------------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +# +html_theme = "sphinx_rtd_theme" +html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +html_logo = "_static/images/logod-05.png" + + +def setup(app): + app.add_js_file("js/copybutton.js") + app.add_css_file("css/style.css") + + +# -- Extension configuration ------------------------------------------------- + +# -- Options for intersphinx extension --------------------------------------- + +# Example configuration for intersphinx: refer to the Python standard library. +# intersphinx_mapping = {'https://docs.python.org/3/': None} + +# -- Options for todo extension ---------------------------------------------- + +# If true, `todo` and `todoList` produce output, else they produce nothing. +# todo_include_todos = False diff --git a/docs/index.rst b/docs/index.rst new file mode 100644 index 00000000..c7781713 --- /dev/null +++ b/docs/index.rst @@ -0,0 +1,46 @@ +:github_url: https://github.com/metaopt/TorchOpt/tree/main/docs + +TorchOpt +-------- + +**TorchOpt** is a high-performance optimizer library built upon `PyTorch `_ for easy implementation of functional optimization and gradient-based meta-learning. It consists of two main features: + +* TorchOpt provides functional optimizer which enables `JAX-like `_ composable functional optimizer for PyTorch. With TorchOpt, one can easily conduct neural network optimization in PyTorch with functional style optimizer, similar to `Optax `_ in JAX. +* With the desgin of functional programing, TorchOpt provides efficient, flexible, and easy-to-implement differentiable optimizer for gradient-based meta-learning research. It largely reduces the efforts required to implement sophisticated meta-learning algorithms. + +Installation +------------ + +Requirements + +(Optional) For visualizing computation graphs +`Graphviz `_ (for Linux users use ``apt/yum install graphviz`` or ``conda install -c anaconda python-graphviz``) + +.. code-block:: bash + + pip install TorchOpt + +You can also build shared libraries from source, use: + +.. code-block:: bash + + git clone git@github.com:metaopt/TorchOpt.git + cd TorchOpt + python setup.py build_from_source + +The Team +-------- + +TorchOpt is a work by Jie Ren, Xidong Feng, Bo Liu, `Luo Mai `_ and `Yaodong Yang `_. + +Support +------- + +If you are having issues, please let us know by filing an issue on our +`issue tracker `_. + + +License +------- + +TorchOpt is licensed under the Apache 2.0 License. \ No newline at end of file diff --git a/examples/L2R/helper/argument.py b/examples/L2R/helper/argument.py index 22ba29e8..a44095e0 100644 --- a/examples/L2R/helper/argument.py +++ b/examples/L2R/helper/argument.py @@ -1,23 +1,39 @@ import argparse + import torch + def parse_args(): - parser = argparse.ArgumentParser([],description='L2R') + parser = argparse.ArgumentParser([], description='L2R') parser.add_argument('--seed', type=int, default=42) parser.add_argument('--epoch', type=int, default=30, help='Training Epoch') parser.add_argument('--lr', type=float, default=1e-3, help='learning rate') - parser.add_argument('--pos_ratio', type=float, default=0.995, help='Ratio of positive examples in training') - parser.add_argument('--ntest', type=int, default=500, help='Number of testing examples') - parser.add_argument('--ntrain', type=int, default=5000, help='Number of testing examples') - parser.add_argument('--nval', type=int, default=10, help='Number of valid examples') - parser.add_argument('--batch_size', type=int, default=100, help='Batch size') - + parser.add_argument('--pos_ratio', + type=float, + default=0.995, + help='Ratio of positive examples in training') + parser.add_argument('--ntest', + type=int, + default=500, + help='Number of testing examples') + parser.add_argument('--ntrain', + type=int, + default=5000, + help='Number of testing examples') + parser.add_argument('--nval', + type=int, + default=10, + help='Number of valid examples') + parser.add_argument('--batch_size', + type=int, + default=100, + help='Batch size') + ### For baseline parser.add_argument('--algo', type=str, default='both') - args = parser.parse_args() # use the GPU if available - return args \ No newline at end of file + return args diff --git a/examples/L2R/helper/model.py b/examples/L2R/helper/model.py index ee295864..469b1c97 100644 --- a/examples/L2R/helper/model.py +++ b/examples/L2R/helper/model.py @@ -34,36 +34,44 @@ import torch import torch.nn as nn + class LeNet5(nn.Module): def __init__(self, args): super(LeNet5, self).__init__() - self.model = nn.Sequential(nn.Conv2d(1, 16, 5), nn.ReLU(), nn.MaxPool2d(2), nn.Conv2d(16, 32, 5), nn.ReLU(), nn.MaxPool2d(2), nn.Flatten(), nn.Linear(512, 128), nn.ReLU(), nn.Linear(128, 1), nn.Sigmoid()) + self.model = nn.Sequential(nn.Conv2d(1, 16, 5), nn.ReLU(), + nn.MaxPool2d(2), nn.Conv2d(16, 32, 5), + nn.ReLU(), nn.MaxPool2d(2), nn.Flatten(), + nn.Linear(512, 128), nn.ReLU(), + nn.Linear(128, 1), nn.Sigmoid()) self.args = args - self.meta_weights = torch.zeros(self.args.batch_size, requires_grad=True).to(self.args.device) + self.meta_weights = torch.zeros(self.args.batch_size, + requires_grad=True).to( + self.args.device) self.criterion = nn.BCELoss() - + def forward(self, x): return self.model(x).squeeze(dim=-1) - + def reset_meta(self, size): - self.meta_weights = torch.zeros(size, requires_grad=True).to(self.args.device) - + self.meta_weights = torch.zeros(size, requires_grad=True).to( + self.args.device) + def normalise(self): self.meta_weights = self.meta_weights.detach() weights_sum = torch.sum(self.meta_weights) weights_sum = weights_sum + 1 if weights_sum == 0 else weights_sum self.meta_weights /= weights_sum - + def inner_loss(self, train_x, train_y): result = self.forward(train_x) - + # manually implement bce_loss to make the loss differentiable w.r.t self.meta_weights - loss = - (train_y * torch.log(result + 1e-10) + (1-train_y) * torch.log(1 - result + 1e-10)) + loss = -(train_y * torch.log(result + 1e-10) + + (1 - train_y) * torch.log(1 - result + 1e-10)) weighted_loss = torch.sum(self.meta_weights * loss) return weighted_loss - + def outer_loss(self, valid_x, valid_y): result = self.forward(valid_x) loss = self.criterion(result, valid_y) return loss - diff --git a/examples/L2R/helper/utils.py b/examples/L2R/helper/utils.py index 1720e4cd..dece9938 100644 --- a/examples/L2R/helper/utils.py +++ b/examples/L2R/helper/utils.py @@ -16,12 +16,13 @@ # https://github.com/uber-research/learning-to-reweight-examples import random -import torch + import numpy as np -from torch.utils.data import TensorDataset import seaborn as sns +import torch +from torch.utils.data import TensorDataset + - def get_imbalance_dataset(mnist_train, mnist_test, pos_ratio=0.9, @@ -30,15 +31,15 @@ def get_imbalance_dataset(mnist_train, ntest=500, class_0=4, class_1=9): - + ratio = 1 - pos_ratio ratio_test = 0.5 # In training, we have 10% 4 and 90% 9. # In testing, we have 50% 4 and 50% 9. - x_train = mnist_train.train_data.numpy()/255.0 + x_train = mnist_train.train_data.numpy() / 255.0 y_train = mnist_train.train_labels.numpy() - x_test = mnist_test.test_data.numpy()/255.0 + x_test = mnist_test.test_data.numpy() / 255.0 y_test = mnist_test.test_labels.numpy() x_train_0 = x_train[y_train == class_0] x_test_0 = x_test[y_test == class_0] @@ -51,15 +52,17 @@ def get_imbalance_dataset(mnist_train, nval_small_neg = int(np.floor(nval * ratio_test)) ntrain_small_neg = int(np.floor(ntrain * ratio)) - nval_small_neg - x_val_0 = x_train_0[:nval_small_neg] # 450 4 in validation. - x_train_0 = x_train_0[nval_small_neg:nval_small_neg + ntrain_small_neg] # 500 4 in training. + x_val_0 = x_train_0[:nval_small_neg] # 450 4 in validation. + x_train_0 = x_train_0[nval_small_neg:nval_small_neg + + ntrain_small_neg] # 500 4 in training. print('Number of train negative classes', ntrain_small_neg) print('Number of val negative classes', nval_small_neg) idx = np.arange(x_test_0.shape[0]) np.random.shuffle(idx) - x_test_0 = x_test_0[:int(np.floor(ntest * ratio_test))] # 450 4 in testing. + x_test_0 = x_test_0[:int(np.floor(ntest * + ratio_test))] # 450 4 in testing. x_train_1 = x_train[y_train == class_1] x_test_1 = x_test[y_test == class_1] @@ -72,25 +75,34 @@ def get_imbalance_dataset(mnist_train, nvalsmall_pos = int(np.floor(nval * (1 - ratio_test))) ntrainsmall_pos = int(np.floor(ntrain * (1 - ratio))) - nvalsmall_pos - x_val_1 = x_train_1[:nvalsmall_pos] # 50 9 in validation. - x_train_1 = x_train_1[nvalsmall_pos:nvalsmall_pos + ntrainsmall_pos] # 4500 9 in training. + x_val_1 = x_train_1[:nvalsmall_pos] # 50 9 in validation. + x_train_1 = x_train_1[nvalsmall_pos:nvalsmall_pos + + ntrainsmall_pos] # 4500 9 in training. idx = np.arange(x_test_1.shape[0]) np.random.shuffle(idx) x_test_1 = x_test_1[idx] - x_test_1 = x_test_1[:int(np.floor(ntest * (1 - ratio_test)))] # 500 9 in testing. + x_test_1 = x_test_1[:int(np.floor(ntest * + (1 - ratio_test)))] # 500 9 in testing. print('Number of train positive classes', ntrainsmall_pos) print('Number of val positive classes', nvalsmall_pos) - y_train_subset = np.concatenate([np.zeros([x_train_0.shape[0]]), np.ones([x_train_1.shape[0]])]) - y_val_subset = np.concatenate([np.zeros([x_val_0.shape[0]]), np.ones([x_val_1.shape[0]])]) - y_test_subset = np.concatenate([np.zeros([x_test_0.shape[0]]), np.ones([x_test_1.shape[0]])]) + y_train_subset = np.concatenate( + [np.zeros([x_train_0.shape[0]]), + np.ones([x_train_1.shape[0]])]) + y_val_subset = np.concatenate( + [np.zeros([x_val_0.shape[0]]), + np.ones([x_val_1.shape[0]])]) + y_test_subset = np.concatenate( + [np.zeros([x_test_0.shape[0]]), + np.ones([x_test_1.shape[0]])]) y_train_pos_subset = np.ones([x_train_1.shape[0]]) y_train_neg_subset = np.zeros([x_train_0.shape[0]]) - x_train_subset = np.concatenate([x_train_0, x_train_1], axis=0)[:, None, :, :] + x_train_subset = np.concatenate([x_train_0, x_train_1], axis=0)[:, + None, :, :] x_val_subset = np.concatenate([x_val_0, x_val_1], axis=0)[:, None, :, :] x_test_subset = np.concatenate([x_test_0, x_test_1], axis=0)[:, None, :, :] @@ -113,12 +125,19 @@ def get_imbalance_dataset(mnist_train, x_test_subset = x_test_subset[idx].astype(np.float32) y_test_subset = y_test_subset[idx].astype(np.float32) - x_train_subset, y_train_subset, x_val_subset, y_val_subset, x_test_subset, y_test_subset = torch.tensor(x_train_subset), torch.tensor(y_train_subset), torch.tensor(x_val_subset), torch.tensor(y_val_subset), torch.tensor(x_test_subset), torch.tensor(y_test_subset) - - train_set, val_set, test_set = TensorDataset(x_train_subset, y_train_subset), TensorDataset(x_val_subset, y_val_subset), TensorDataset(x_test_subset, y_test_subset) + x_train_subset, y_train_subset, x_val_subset, y_val_subset, x_test_subset, y_test_subset = torch.tensor( + x_train_subset), torch.tensor(y_train_subset), torch.tensor( + x_val_subset), torch.tensor(y_val_subset), torch.tensor( + x_test_subset), torch.tensor(y_test_subset) + + train_set, val_set, test_set = TensorDataset( + x_train_subset, y_train_subset), TensorDataset( + x_val_subset, y_val_subset), TensorDataset(x_test_subset, + y_test_subset) return train_set, val_set, test_set + def set_seed(seed, cudnn=True): """ Seed everything we can! @@ -131,16 +150,18 @@ def set_seed(seed, cudnn=True): torch.random.manual_seed(seed) torch.cuda.manual_seed(seed) # note: the below slows down the code but makes it reproducible - torch.cuda.manual_seed_all(seed) # Sets the seed for generating random numbers on all GPUs. It’s safe to call this function if CUDA is not available; in that case, it is silently ignored. + torch.cuda.manual_seed_all( + seed + ) # Sets the seed for generating random numbers on all GPUs. It’s safe to call this function if CUDA is not available; in that case, it is silently ignored. if cudnn: torch.backends.cudnn.deterministic = True torch.backends.cudnn.benchmark = False - - + + def plot(baseline, l2r): - import seaborn as sns import matplotlib.pyplot as plt import numpy as np + import seaborn as sns sns.set(style='darkgrid') sns.set_theme(style="darkgrid") plt.plot(baseline, label='baseline') @@ -149,4 +170,4 @@ def plot(baseline, l2r): plt.ylabel('Test acc') plt.xlabel('Epoch') plt.title('Comparison between Baseline and L2R') - plt.savefig('./result.png') \ No newline at end of file + plt.savefig('./result.png') diff --git a/examples/L2R/train_l2r.py b/examples/L2R/train_l2r.py index a8466de3..3cc2a018 100644 --- a/examples/L2R/train_l2r.py +++ b/examples/L2R/train_l2r.py @@ -27,24 +27,23 @@ # # -import numpy as np +import json import os +import time -from helper.model import LeNet5 -from helper.argument import parse_args -from helper.utils import set_seed, get_imbalance_dataset, plot -from torch.utils.tensorboard import SummaryWriter +import numpy as np import torch import torch.nn as nn -from torchvision.datasets import MNIST -from torch.utils.data import DataLoader -import json +from helper.argument import parse_args +from helper.model import LeNet5 +from helper.utils import get_imbalance_dataset, plot, set_seed from torch import device +from torch.utils.data import DataLoader +from torch.utils.tensorboard import SummaryWriter +from torchvision.datasets import MNIST import TorchOpt -import time - def run_baseline(args, mnist_train, mnist_test): print('Run Baseline') @@ -64,14 +63,24 @@ def run_baseline(args, mnist_train, mnist_test): args.device = torch.device( "cuda:0" if torch.cuda.is_available() else "cpu") - train_set, val_set, test_set = get_imbalance_dataset( - mnist_train, mnist_test, pos_ratio=pos_ratio, ntrain=ntrain, nval=nval, ntest=ntest) - train_loader = DataLoader( - train_set, batch_size=args.batch_size, shuffle=True, num_workers=4) - valid_loader = DataLoader( - val_set, batch_size=args.batch_size, shuffle=True, num_workers=1) - test_loader = DataLoader( - test_set, batch_size=args.batch_size, shuffle=True, num_workers=1) + train_set, val_set, test_set = get_imbalance_dataset(mnist_train, + mnist_test, + pos_ratio=pos_ratio, + ntrain=ntrain, + nval=nval, + ntest=ntest) + train_loader = DataLoader(train_set, + batch_size=args.batch_size, + shuffle=True, + num_workers=4) + valid_loader = DataLoader(val_set, + batch_size=args.batch_size, + shuffle=True, + num_workers=1) + test_loader = DataLoader(test_set, + batch_size=args.batch_size, + shuffle=True, + num_workers=1) model = LeNet5(args).to(args.device) model_optimiser = torch.optim.Adam(model.parameters(), lr=args.lr) @@ -82,8 +91,8 @@ def run_baseline(args, mnist_train, mnist_test): for _epoch in range(epoch): model.train() for idx, (train_x, train_label) in enumerate(train_loader): - train_x, train_label = train_x.to( - args.device), train_label.to(args.device) + train_x, train_label = train_x.to(args.device), train_label.to( + args.device) outer_loss = model.outer_loss(train_x, train_label) model_optimiser.zero_grad() @@ -95,12 +104,10 @@ def run_baseline(args, mnist_train, mnist_test): if step % 10 == 0 and step > 0: running_train_mean = np.mean(np.array(running_train_loss)) - print( - "EPOCH: {}, BATCH: {}, LOSS: {}".format( - _epoch, idx, running_train_mean) - ) - writer.add_scalar('running_train_loss', - running_train_mean, step) + print("EPOCH: {}, BATCH: {}, LOSS: {}".format( + _epoch, idx, running_train_mean)) + writer.add_scalar('running_train_loss', running_train_mean, + step) running_train_loss = [] step += 1 @@ -114,10 +121,8 @@ def run_baseline(args, mnist_train, mnist_test): writer.add_scalar('train_acc', train_acc, _epoch) writer.add_scalar('test_acc', test_acc, _epoch) test_acc_result.append(test_acc) - print( - "EPOCH: {}, TRAIN_ACC: {}, TEST_ACC: {}".format( - _epoch, train_acc, test_acc) - ) + print("EPOCH: {}, TRAIN_ACC: {}, TEST_ACC: {}".format( + _epoch, train_acc, test_acc)) return test_acc_result @@ -139,14 +144,24 @@ def run_L2R(args, mnist_train, mnist_test): args.device = torch.device( "cuda:0" if torch.cuda.is_available() else "cpu") - train_set, val_set, test_set = get_imbalance_dataset( - mnist_train, mnist_test, pos_ratio=pos_ratio, ntrain=ntrain, nval=nval, ntest=ntest) - train_loader = DataLoader( - train_set, batch_size=args.batch_size, shuffle=True, num_workers=2) - valid_loader = DataLoader( - val_set, batch_size=args.batch_size, shuffle=True, num_workers=1) - test_loader = DataLoader( - test_set, batch_size=args.batch_size, shuffle=True, num_workers=1) + train_set, val_set, test_set = get_imbalance_dataset(mnist_train, + mnist_test, + pos_ratio=pos_ratio, + ntrain=ntrain, + nval=nval, + ntest=ntest) + train_loader = DataLoader(train_set, + batch_size=args.batch_size, + shuffle=True, + num_workers=2) + valid_loader = DataLoader(val_set, + batch_size=args.batch_size, + shuffle=True, + num_workers=1) + test_loader = DataLoader(test_set, + batch_size=args.batch_size, + shuffle=True, + num_workers=1) model = LeNet5(args).to(args.device) model_optimiser = TorchOpt.MetaSGD(model, lr=args.lr) real_model_optimiser = torch.optim.Adam(model.parameters(), lr=args.lr) @@ -165,8 +180,9 @@ def run_L2R(args, mnist_train, mnist_test): except: valid = iter(valid_loader) valid_x, valid_label = valid.next() - train_x, train_label, valid_x, valid_label = train_x.to(args.device), train_label.to( - args.device), valid_x.to(args.device), valid_label.to(args.device) + train_x, train_label, valid_x, valid_label = train_x.to( + args.device), train_label.to(args.device), valid_x.to( + args.device), valid_label.to(args.device) # reset meta-parameter weights model.reset_meta(size=train_x.size(0)) @@ -208,15 +224,15 @@ def run_L2R(args, mnist_train, mnist_test): running_valid_mean = np.mean(np.array(running_valid_loss)) running_train_mean = np.mean(np.array(running_train_loss)) print( - "EPOCH: {}, BATCH: {}, WEIGHTED_TRAIN_LOSS: {}, VALID_LOSS: {}".format( - _epoch, idx, running_train_mean, running_valid_mean) - ) + "EPOCH: {}, BATCH: {}, WEIGHTED_TRAIN_LOSS: {}, VALID_LOSS: {}" + .format(_epoch, idx, running_train_mean, + running_valid_mean)) running_valid_loss = [] running_train_loss = [] - writer.add_scalar('running_valid_loss', - running_valid_mean, step) - writer.add_scalar('running_train_loss', - running_train_mean, step) + writer.add_scalar('running_valid_loss', running_valid_mean, + step) + writer.add_scalar('running_train_loss', running_train_mean, + step) step += 1 @@ -229,10 +245,8 @@ def run_L2R(args, mnist_train, mnist_test): writer.add_scalar('train_acc', train_acc, _epoch) writer.add_scalar('test_acc', test_acc, _epoch) test_acc_result.append(test_acc) - print( - "EPOCH: {}, TRAIN_ACC: {}, TEST_ACC: {}".format( - _epoch, train_acc, test_acc) - ) + print("EPOCH: {}, TRAIN_ACC: {}, TEST_ACC: {}".format( + _epoch, train_acc, test_acc)) return test_acc_result diff --git a/examples/LOLA/helper/agent.py b/examples/LOLA/helper/agent.py index 96549228..1ae36688 100755 --- a/examples/LOLA/helper/agent.py +++ b/examples/LOLA/helper/agent.py @@ -3,14 +3,15 @@ import torch import torch.nn as nn + import TorchOpt class theta_model(nn.Module): def __init__(self, theta): super().__init__() - self.theta = nn.Parameter(torch.tensor( - theta.detach(), requires_grad=True)) + self.theta = nn.Parameter( + torch.tensor(theta.detach(), requires_grad=True)) class Agent(): @@ -19,18 +20,18 @@ def __init__(self, args): self.args = args # init theta and its optimizer self.theta = nn.Parameter(torch.zeros(5, requires_grad=True)) - self.theta_optimizer = torch.optim.Adam((self.theta,), lr=args.lr_out) + self.theta_optimizer = torch.optim.Adam((self.theta, ), lr=args.lr_out) # init values and its optimizer self.values = nn.Parameter(torch.zeros(5, requires_grad=True)) - self.value_optimizer = torch.optim.Adam((self.values,), lr=args.lr_v) + self.value_optimizer = torch.optim.Adam((self.values, ), lr=args.lr_v) self.set_virtual() def set_virtual(self): self.virtual_theta = theta_model(self.theta) - self.virtual_optimiser = TorchOpt.MetaSGD( - self.virtual_theta, lr=self.args.lr_in) + self.virtual_optimiser = TorchOpt.MetaSGD(self.virtual_theta, + lr=self.args.lr_in) def value_update(self, loss): self.value_optimizer.zero_grad() diff --git a/examples/LOLA/helper/argument.py b/examples/LOLA/helper/argument.py index 96a8c8bd..acd50a52 100755 --- a/examples/LOLA/helper/argument.py +++ b/examples/LOLA/helper/argument.py @@ -1,19 +1,44 @@ import argparse + def parse_args(): - parser = argparse.ArgumentParser([],description='LOLA') + parser = argparse.ArgumentParser([], description='LOLA') parser.add_argument('--seed', type=int, default=6666) - parser.add_argument('--lr_in', type=float, default=0.3, help='Inner Learning rate') + parser.add_argument('--lr_in', + type=float, + default=0.3, + help='Inner Learning rate') - parser.add_argument('--lr_out', type=float, default=0.2, help='Outer learning rate') - parser.add_argument('--lr_v', type=float, default=0.1, help='Learning rate of value function') - parser.add_argument('--gamma', type=float, default=0.96, help='Discount factor') - parser.add_argument('--n_update', type=int, default=100, help='Number of updates') - parser.add_argument('--n_lookaheads', type=int, default=1, help='Number of updates') - parser.add_argument('--len_rollout', type=int, default=150, help='Length of IPD') - parser.add_argument('--batch_size', type=int, default=1024, help='Natch size') + parser.add_argument('--lr_out', + type=float, + default=0.2, + help='Outer learning rate') + parser.add_argument('--lr_v', + type=float, + default=0.1, + help='Learning rate of value function') + parser.add_argument('--gamma', + type=float, + default=0.96, + help='Discount factor') + parser.add_argument('--n_update', + type=int, + default=100, + help='Number of updates') + parser.add_argument('--n_lookaheads', + type=int, + default=1, + help='Number of updates') + parser.add_argument('--len_rollout', + type=int, + default=150, + help='Length of IPD') + parser.add_argument('--batch_size', + type=int, + default=1024, + help='Natch size') parser.add_argument('--use_baseline', action='store_false', default=True) args = parser.parse_args() - return args \ No newline at end of file + return args diff --git a/examples/LOLA/helper/env.py b/examples/LOLA/helper/env.py index 6e97efc6..8ac392c8 100755 --- a/examples/LOLA/helper/env.py +++ b/examples/LOLA/helper/env.py @@ -1,15 +1,10 @@ # This file is modified from: # https://github.com/alexis-jacq/LOLA_DiCE - import gym import numpy as np - from gym.spaces import Discrete, Tuple -import gym -import numpy as np - class OneHot(gym.Space): """ @@ -36,7 +31,8 @@ def __repr__(self): def __eq__(self, other): return self.n == other.n - + + class IPD(gym.Env): """ A two-agent vectorized environment. @@ -50,15 +46,13 @@ class IPD(gym.Env): def __init__(self, max_steps, batch_size=1): self.max_steps = max_steps self.batch_size = batch_size - self.payout_mat = np.array([[-2,0],[-3,-1]]) - self.states = np.array([[1,2],[3,4]]) - - self.action_space = Tuple([ - Discrete(self.NUM_ACTIONS) for _ in range(self.NUM_AGENTS) - ]) - self.observation_space = Tuple([ - OneHot(self.NUM_STATES) for _ in range(self.NUM_AGENTS) - ]) + self.payout_mat = np.array([[-2, 0], [-3, -1]]) + self.states = np.array([[1, 2], [3, 4]]) + + self.action_space = Tuple( + [Discrete(self.NUM_ACTIONS) for _ in range(self.NUM_AGENTS)]) + self.observation_space = Tuple( + [OneHot(self.NUM_STATES) for _ in range(self.NUM_AGENTS)]) self.available_actions = [ np.ones((batch_size, self.NUM_ACTIONS), dtype=int) for _ in range(self.NUM_AGENTS) @@ -85,4 +79,4 @@ def step(self, action): reward = [r0, r1] done = (self.step_count == self.max_steps) info = [{'available_actions': aa} for aa in self.available_actions] - return observation, reward, done, info \ No newline at end of file + return observation, reward, done, info diff --git a/examples/LOLA/helper/utils.py b/examples/LOLA/helper/utils.py index 47f6b0f2..86421034 100755 --- a/examples/LOLA/helper/utils.py +++ b/examples/LOLA/helper/utils.py @@ -1,10 +1,11 @@ # This file is modified from: # https://github.com/alexis-jacq/LOLA_DiCE -import torch import numpy as np +import torch from torch.distributions import Bernoulli + # evaluate the policy def step(ipd, theta1, theta2, values1, values2, args): # just to evaluate progress: @@ -14,16 +15,18 @@ def step(ipd, theta1, theta2, values1, values2, args): for t in range(args.len_rollout): a1, lp1, v1 = act(s1, theta1, values1) a2, lp2, v2 = act(s2, theta2, values2) - (s1, s2), (r1, r2),_,_ = ipd.step((a1, a2)) + (s1, s2), (r1, r2), _, _ = ipd.step((a1, a2)) # cumulate scores - score1 += np.mean(r1)/float(args.len_rollout) - score2 += np.mean(r2)/float(args.len_rollout) + score1 += np.mean(r1) / float(args.len_rollout) + score2 += np.mean(r2) / float(args.len_rollout) return (score1, score2) + # dice operator def magic_box(x): return torch.exp(x - x.detach()) + # replay buffer class Memory(): def __init__(self, args): @@ -46,7 +49,9 @@ def dice_objective(self, use_baseline=True): rewards = torch.stack(self.rewards, dim=1) # apply discount: - cum_discount = torch.cumprod(self.args.gamma * torch.ones(*rewards.size()), dim=1)/self.args.gamma + cum_discount = torch.cumprod( + self.args.gamma * torch.ones(*rewards.size()), + dim=1) / self.args.gamma discounted_rewards = rewards * cum_discount discounted_values = values * cum_discount @@ -57,28 +62,34 @@ def dice_objective(self, use_baseline=True): stochastic_nodes = self_logprobs + other_logprobs # dice objective: - dice_objective = torch.mean(torch.sum(magic_box(dependencies) * discounted_rewards, dim=1)) + dice_objective = torch.mean( + torch.sum(magic_box(dependencies) * discounted_rewards, dim=1)) if use_baseline: # variance_reduction: - baseline_term = torch.mean(torch.sum((1 - magic_box(stochastic_nodes)) * discounted_values, dim=1)) + baseline_term = torch.mean( + torch.sum( + (1 - magic_box(stochastic_nodes)) * discounted_values, + dim=1)) dice_objective = dice_objective + baseline_term - return -dice_objective # want to minimize -objective + return -dice_objective # want to minimize -objective def value_loss(self): values = torch.stack(self.values, dim=1) rewards = torch.stack(self.rewards, dim=1) return torch.mean((rewards - values)**2) + def act(batch_states, theta, values): batch_states = torch.from_numpy(batch_states).long() probs = torch.sigmoid(theta)[batch_states] - m = Bernoulli(1-probs) + m = Bernoulli(1 - probs) actions = m.sample() log_probs_actions = m.log_prob(actions) return actions.numpy().astype(int), log_probs_actions, values[batch_states] + def sample(ipd, policy, value, args): theta1, theta2 = policy value1, value2 = value @@ -88,7 +99,7 @@ def sample(ipd, policy, value, args): for t in range(args.len_rollout): a1, lp1, v1 = act(s1, theta1, value1) a2, lp2, v2 = act(s2, theta2, value2) - (s1, s2), (r1, r2),_,_ = ipd.step((a1, a2)) + (s1, s2), (r1, r2), _, _ = ipd.step((a1, a2)) memory_agent1.add(lp1, lp2, v1, torch.from_numpy(r1).float()) memory_agent2.add(lp2, lp1, v2, torch.from_numpy(r2).float()) return memory_agent1, memory_agent2 diff --git a/examples/LOLA/lola_dice.py b/examples/LOLA/lola_dice.py index ed2f3a4f..1eee2ae7 100755 --- a/examples/LOLA/lola_dice.py +++ b/examples/LOLA/lola_dice.py @@ -15,22 +15,21 @@ # This file is modified from: # https://github.com/alexis-jacq/LOLA_DiCE -import numpy as np +from copy import deepcopy + import matplotlib.pyplot as plt +import numpy as np import torch import torch.nn as nn -from torch.distributions import Bernoulli -from copy import deepcopy - -from helper.env import IPD -from helper.argument import parse_args from helper.agent import Agent -from helper.utils import step, sample - -import numpy as np +from helper.argument import parse_args +from helper.env import IPD +from helper.utils import sample, step +from torch.distributions import Bernoulli import TorchOpt + def main(args): ipd = IPD(args.len_rollout, args.batch_size) agent1, agent2 = Agent(args), Agent(args) @@ -38,63 +37,77 @@ def main(args): n_lookaheads = args.n_lookaheads joint_scores = [] print("start iterations with", n_lookaheads, "lookaheads:") - + for update in range(args.n_update): # reset virtual update agent1.set_virtual() agent2.set_virtual() - + # agent 2 assumes that agent 1 conducts n-step lookahead for _ in range(n_lookaheads): - memory1, memory2 = sample(ipd, [agent1.virtual_theta.theta, agent2.theta], [agent1.values, agent2.values], args) + memory1, memory2 = sample( + ipd, [agent1.virtual_theta.theta, agent2.theta], + [agent1.values, agent2.values], args) inner_loss = memory1.dice_objective(use_baseline=args.use_baseline) agent1.virtual_optimiser.step(inner_loss) - + # agent 1 assumes that agent 2 conducts n-step lookahead for _ in range(n_lookaheads): - memory1, memory2 = sample(ipd, [agent1.theta, agent2.virtual_theta.theta], [agent1.values, agent2.values], args) + memory1, memory2 = sample( + ipd, [agent1.theta, agent2.virtual_theta.theta], + [agent1.values, agent2.values], args) inner_loss = memory2.dice_objective(use_baseline=args.use_baseline) agent2.virtual_optimiser.step(inner_loss) - + # update agent 1 - memory1, memory2 = sample(ipd, [agent1.theta, agent2.virtual_theta.theta], [agent1.values, agent2.values], args) + memory1, memory2 = sample(ipd, + [agent1.theta, agent2.virtual_theta.theta], + [agent1.values, agent2.values], args) outer_loss = memory1.dice_objective(use_baseline=args.use_baseline) agent1.theta_optimizer.zero_grad() outer_loss.backward(retain_graph=True) agent1.theta_optimizer.step() - + # update agent 1 value function v_loss = memory1.value_loss() agent1.value_update(v_loss) - + # update agent 2 - memory1, memory2 = sample(ipd, [agent1.virtual_theta.theta, agent2.theta], [agent1.values, agent2.values], args) + memory1, memory2 = sample(ipd, + [agent1.virtual_theta.theta, agent2.theta], + [agent1.values, agent2.values], args) outer_loss = memory2.dice_objective(use_baseline=args.use_baseline) agent2.theta_optimizer.zero_grad() outer_loss.backward(retain_graph=True) agent2.theta_optimizer.step() - + # update agent 2 value function v_loss = memory2.value_loss() agent2.value_update(v_loss) - + # evaluate progress: - score = step(ipd, agent1.theta, agent2.theta, agent1.values, agent2.values, args) - joint_scores.append(0.5*(score[0] + score[1])) + score = step(ipd, agent1.theta, agent2.theta, agent1.values, + agent2.values, args) + joint_scores.append(0.5 * (score[0] + score[1])) # print - if update%10==0 : + if update % 10 == 0: p1 = [p.item() for p in torch.sigmoid(agent1.theta)] p2 = [p.item() for p in torch.sigmoid(agent2.theta)] - print('update', update, 'score (%.3f,%.3f)' % (score[0], score[1]) , 'policy (agent1) = {%.3f, %.3f, %.3f, %.3f, %.3f}' % (p1[0], p1[1], p1[2], p1[3], p1[4]),' (agent2) = {%.3f, %.3f, %.3f, %.3f, %.3f}' % (p2[0], p2[1], p2[2], p2[3], p2[4])) - + print( + 'update', update, 'score (%.3f,%.3f)' % (score[0], score[1]), + 'policy (agent1) = {%.3f, %.3f, %.3f, %.3f, %.3f}' % + (p1[0], p1[1], p1[2], p1[3], p1[4]), + ' (agent2) = {%.3f, %.3f, %.3f, %.3f, %.3f}' % + (p2[0], p2[1], p2[2], p2[3], p2[4])) + return joint_scores -if __name__=="__main__": + +if __name__ == "__main__": args = parse_args() joint_score = dict() for nla in range(3): args.n_lookaheads = nla joint_score[nla] = main(args) np.save('result.npy', joint_score) - \ No newline at end of file diff --git a/examples/LOLA/visualise.py b/examples/LOLA/visualise.py index e84db9a2..da5ea0da 100755 --- a/examples/LOLA/visualise.py +++ b/examples/LOLA/visualise.py @@ -1,6 +1,7 @@ import matplotlib.pyplot as plt -import seaborn as sns import numpy as np +import seaborn as sns + def plot(file): data = np.load('result.npy', allow_pickle=True).tolist() @@ -12,7 +13,8 @@ def plot(file): plt.xlabel('Iteartions', fontsize=20) plt.ylabel('Joint score', fontsize=20) plt.savefig('./result.png') - + + # plot progress: -if __name__=="__main__": - plot('result.npy') \ No newline at end of file +if __name__ == "__main__": + plot('result.npy') diff --git a/examples/MAML-RL/helpers/Tabular_mdp.py b/examples/MAML-RL/helpers/Tabular_mdp.py index b788851e..b5786296 100644 --- a/examples/MAML-RL/helpers/Tabular_mdp.py +++ b/examples/MAML-RL/helpers/Tabular_mdp.py @@ -15,8 +15,8 @@ # This file is modified from: # https://github.com/tristandeleu/pytorch-maml-rl -import numpy as np import gym +import numpy as np from gym import spaces from gym.utils import seeding from gym.wrappers.time_limit import TimeLimit @@ -37,8 +37,12 @@ class TabularMDPEnv(gym.Env): Pieter Abbeel, "RL2: Fast Reinforcement Learning via Slow Reinforcement Learning", 2016 (https://arxiv.org/abs/1611.02779) """ - - def __init__(self, num_states, num_actions, max_episode_steps, seed, task={}): + def __init__(self, + num_states, + num_actions, + max_episode_steps, + seed, + task={}): super(TabularMDPEnv, self).__init__() self.max_episode_steps = max_episode_steps self.num_states = num_states @@ -47,13 +51,18 @@ def __init__(self, num_states, num_actions, max_episode_steps, seed, task={}): self.action_space = spaces.Discrete(num_actions) self.observation_space = spaces.Box(low=0.0, high=1.0, - shape=(num_states,), + shape=(num_states, ), dtype=np.float32) self._task = task - self._transitions = task.get('transitions', - np.full((num_states, num_actions, num_states), 1.0 / num_states, dtype=np.float32)) - self._rewards_mean = task.get('rewards_mean', np.zeros((num_states, num_actions), dtype=np.float32)) + self._transitions = task.get( + 'transitions', + np.full((num_states, num_actions, num_states), + 1.0 / num_states, + dtype=np.float32)) + self._rewards_mean = task.get( + 'rewards_mean', + np.zeros((num_states, num_actions), dtype=np.float32)) self._state = 0 self._elapsed_steps = None @@ -65,10 +74,17 @@ def seed(self, seed=None): def sample_tasks(self, num_tasks): transitions = self.np_random.dirichlet(np.ones(self.num_states), - size=(num_tasks, self.num_states, self.num_actions)) - rewards_mean = self.np_random.normal(1.0, 1.0, size=(num_tasks, self.num_states, self.num_actions)) - tasks = [{'transitions': transition, 'rewards_mean': reward_mean} - for (transition, reward_mean) in zip(transitions, rewards_mean)] + size=(num_tasks, + self.num_states, + self.num_actions)) + rewards_mean = self.np_random.normal(1.0, + 1.0, + size=(num_tasks, self.num_states, + self.num_actions)) + tasks = [{ + 'transitions': transition, + 'rewards_mean': reward_mean + } for (transition, reward_mean) in zip(transitions, rewards_mean)] return tasks def reset_task(self, task): @@ -90,7 +106,9 @@ def step(self, action): mean = self._rewards_mean[self._state, action] reward = self.np_random.normal(mean, 1.0) - self._state = self.np_random.choice(self.num_states, p=self._transitions[self._state, action]) + self._state = self.np_random.choice(self.num_states, + p=self._transitions[self._state, + action]) observation = np.zeros(self.num_states, dtype=np.float32) observation[self._state] = 1.0 self._elapsed_steps += 1 diff --git a/examples/MAML-RL/helpers/__init__.py b/examples/MAML-RL/helpers/__init__.py index 2402e30f..a83c9eee 100644 --- a/examples/MAML-RL/helpers/__init__.py +++ b/examples/MAML-RL/helpers/__init__.py @@ -2,13 +2,11 @@ # https://github.com/tristandeleu/pytorch-maml-rl from gym.envs.registration import register -register( - 'TabularMDP-v0', - entry_point='helpers.Tabular_mdp:TabularMDPEnv', - kwargs={'num_states': 10, 'num_actions': 5, 'max_episode_steps':10, 'seed':1} - ) - - - - - \ No newline at end of file +register('TabularMDP-v0', + entry_point='helpers.Tabular_mdp:TabularMDPEnv', + kwargs={ + 'num_states': 10, + 'num_actions': 5, + 'max_episode_steps': 10, + 'seed': 1 + }) diff --git a/examples/MAML-RL/helpers/policy.py b/examples/MAML-RL/helpers/policy.py index a423fa44..0ef52c6a 100644 --- a/examples/MAML-RL/helpers/policy.py +++ b/examples/MAML-RL/helpers/policy.py @@ -11,15 +11,18 @@ class CategoricalMLPPolicy(nn.Module): `Categorical` distribution output. This policy network can be used on tasks with discrete action spaces (eg. `TabularMDPEnv`). """ - - def __init__(self, - input_size, - output_size, - ): + def __init__( + self, + input_size, + output_size, + ): super(CategoricalMLPPolicy, self).__init__() - self.torso = nn.Sequential(nn.Linear(input_size, 32), nn.ReLU(), - nn.Linear(32, 32), nn.ReLU(), - ) + self.torso = nn.Sequential( + nn.Linear(input_size, 32), + nn.ReLU(), + nn.Linear(32, 32), + nn.ReLU(), + ) self.policy_head = nn.Linear(32, output_size) self.value_head = nn.Linear(32, 1) diff --git a/examples/MAML-RL/run_MAML.py b/examples/MAML-RL/run_MAML.py index 6c58f97f..8d328f08 100644 --- a/examples/MAML-RL/run_MAML.py +++ b/examples/MAML-RL/run_MAML.py @@ -17,12 +17,12 @@ from typing import NamedTuple import gym +import numpy as np import torch import torch.optim as optim -import numpy as np +from helpers.policy import CategoricalMLPPolicy import TorchOpt -from helpers.policy import CategoricalMLPPolicy TASK_NUM = 40 TRAJ_NUM = 20 @@ -48,19 +48,12 @@ class Traj(NamedTuple): def sample_traj(env, task, policy): env.reset_task(task) - obs_buf = np.zeros( - shape=(TRAJ_LEN, TRAJ_NUM, STATE_DIM), - dtype=np.float32) - next_obs_buf = np.zeros( - shape=(TRAJ_LEN, TRAJ_NUM, STATE_DIM), - dtype=np.float32) - acs_buf = np.zeros( - shape=(TRAJ_LEN, TRAJ_NUM), - dtype=np.int8) - rews_buf = np.zeros(shape=(TRAJ_LEN, TRAJ_NUM), - dtype=np.float32) - gammas_buf = np.zeros(shape=(TRAJ_LEN, TRAJ_NUM), - dtype=np.float32) + obs_buf = np.zeros(shape=(TRAJ_LEN, TRAJ_NUM, STATE_DIM), dtype=np.float32) + next_obs_buf = np.zeros(shape=(TRAJ_LEN, TRAJ_NUM, STATE_DIM), + dtype=np.float32) + acs_buf = np.zeros(shape=(TRAJ_LEN, TRAJ_NUM), dtype=np.int8) + rews_buf = np.zeros(shape=(TRAJ_LEN, TRAJ_NUM), dtype=np.float32) + gammas_buf = np.zeros(shape=(TRAJ_LEN, TRAJ_NUM), dtype=np.float32) with torch.no_grad(): for batch in range(TRAJ_NUM): ob = env.reset() @@ -77,7 +70,11 @@ def sample_traj(env, task, policy): rews_buf[step][batch] = rew gammas_buf[step][batch] = done * GAMMA ob = next_ob - return Traj(obs=obs_buf, acs=acs_buf, next_obs=next_obs_buf, rews=rews_buf, gammas=gammas_buf) + return Traj(obs=obs_buf, + acs=acs_buf, + next_obs=next_obs_buf, + rews=rews_buf, + gammas=gammas_buf) def a2c_loss(traj, policy, value_coef): @@ -106,8 +103,12 @@ def evaluate(env, seed, task_num, policy): pre_reward_ls = [] post_reward_ls = [] inner_opt = TorchOpt.MetaSGD(policy, lr=0.5) - env = gym.make('TabularMDP-v0', - **dict(num_states=STATE_DIM, num_actions=ACTION_DIM, max_episode_steps=TRAJ_LEN, seed=args.seed)) + env = gym.make( + 'TabularMDP-v0', + **dict(num_states=STATE_DIM, + num_actions=ACTION_DIM, + max_episode_steps=TRAJ_LEN, + seed=args.seed)) tasks = env.sample_tasks(num_tasks=task_num) policy_state_dict = TorchOpt.extract_state_dict(policy) optim_state_dict = TorchOpt.extract_state_dict(inner_opt) @@ -133,11 +134,14 @@ def main(args): torch.manual_seed(args.seed) torch.cuda.manual_seed_all(args.seed) # Env - env = gym.make('TabularMDP-v0', - **dict(num_states=STATE_DIM, num_actions=ACTION_DIM, max_episode_steps=TRAJ_LEN, seed=args.seed)) + env = gym.make( + 'TabularMDP-v0', + **dict(num_states=STATE_DIM, + num_actions=ACTION_DIM, + max_episode_steps=TRAJ_LEN, + seed=args.seed)) # Policy - policy = CategoricalMLPPolicy(input_size=STATE_DIM, - output_size=ACTION_DIM) + policy = CategoricalMLPPolicy(input_size=STATE_DIM, output_size=ACTION_DIM) inner_opt = TorchOpt.MetaSGD(policy, lr=0.5) outer_opt = optim.Adam(policy.parameters(), lr=1e-3) train_pre_reward = [] @@ -177,7 +181,7 @@ def main(args): train_post_reward.append(sum(train_post_reward_ls) / TASK_NUM) test_pre_reward.append(sum(test_pre_reward_ls) / TASK_NUM) test_post_reward.append(sum(test_post_reward_ls) / TASK_NUM) - + print('Train_iters', i) print("train_pre_reward", sum(train_pre_reward_ls) / TASK_NUM) print("train_post_reward", sum(train_post_reward_ls) / TASK_NUM) @@ -186,8 +190,9 @@ def main(args): if __name__ == "__main__": - parser = argparse.ArgumentParser(description='Reinforcement learning with ' - 'Model-Agnostic Meta-Learning (MAML) - Train') + parser = argparse.ArgumentParser( + description='Reinforcement learning with ' + 'Model-Agnostic Meta-Learning (MAML) - Train') parser.add_argument('--seed', type=int, default=1, diff --git a/examples/MGRL/toy.py b/examples/MGRL/toy.py index 85327ac7..a27d177f 100644 --- a/examples/MGRL/toy.py +++ b/examples/MGRL/toy.py @@ -16,6 +16,7 @@ import torch from torch import nn from torch.nn import functional as F + import TorchOpt @@ -33,8 +34,8 @@ def get(): def rollout(trajectory, gamma): out = [trajectory[-1]] for i in reversed(range(9)): - out.append(trajectory[i] + gamma[i] * - out[-1].clone().detach_()) + out.append(trajectory[i] + + gamma[i] * out[-1].clone().detach_()) out.reverse() return torch.hstack(out).view(10, 1) diff --git a/examples/few-shot/maml-omniglot.py b/examples/few-shot/maml-omniglot.py index 9140bb45..b501a3f9 100644 --- a/examples/few-shot/maml-omniglot.py +++ b/examples/few-shot/maml-omniglot.py @@ -28,7 +28,6 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - """ This example shows how to use higher to do Model Agnostic Meta Learning (MAML) for few-shot Omniglot classification. @@ -40,19 +39,21 @@ https://github.com/bamos/HowToTrainYourMAMLPytorch """ -from support.omniglot_loaders import OmniglotNShot -import TorchOpt -import torch.optim as optim -import torch.nn.functional as F -from torch import nn -import torch -import matplotlib.pyplot as plt import argparse import time -import pandas as pd -import numpy as np import matplotlib as mpl +import matplotlib.pyplot as plt +import numpy as np +import pandas as pd +import torch +import torch.nn.functional as F +import torch.optim as optim +from support.omniglot_loaders import OmniglotNShot +from torch import nn + +import TorchOpt + mpl.use('Agg') plt.style.use('bmh') @@ -60,15 +61,18 @@ def main(): argparser = argparse.ArgumentParser() argparser.add_argument('--n_way', type=int, help='n way', default=5) - argparser.add_argument( - '--k_spt', type=int, help='k shot for support set', default=5) - argparser.add_argument( - '--k_qry', type=int, help='k shot for query set', default=15) - argparser.add_argument( - '--task_num', - type=int, - help='meta batch size, namely task num', - default=32) + argparser.add_argument('--k_spt', + type=int, + help='k shot for support set', + default=5) + argparser.add_argument('--k_qry', + type=int, + help='k shot for query set', + default=15) + argparser.add_argument('--task_num', + type=int, + help='meta batch size, namely task num', + default=32) argparser.add_argument('--seed', type=int, help='random seed', default=1) args = argparser.parse_args() @@ -96,21 +100,16 @@ def main(): # Before higher, models could *not* be created like this # and the parameters needed to be manually updated and copied # for the updates. - net = nn.Sequential( - nn.Conv2d(1, 64, 3), - nn.BatchNorm2d(64, momentum=1., affine=True), - nn.ReLU(inplace=False), - nn.MaxPool2d(2, 2), - nn.Conv2d(64, 64, 3), - nn.BatchNorm2d(64, momentum=1., affine=True), - nn.ReLU(inplace=False), - nn.MaxPool2d(2, 2), - nn.Conv2d(64, 64, 3), - nn.BatchNorm2d(64, momentum=1., affine=True), - nn.ReLU(inplace=False), - nn.MaxPool2d(2, 2), - nn.Flatten(), - nn.Linear(64, args.n_way)).to(device) + net = nn.Sequential(nn.Conv2d(1, 64, 3), + nn.BatchNorm2d(64, momentum=1., affine=True), + nn.ReLU(inplace=False), nn.MaxPool2d(2, 2), + nn.Conv2d(64, 64, 3), + nn.BatchNorm2d(64, momentum=1., affine=True), + nn.ReLU(inplace=False), nn.MaxPool2d(2, 2), + nn.Conv2d(64, 64, 3), + nn.BatchNorm2d(64, momentum=1., affine=True), + nn.ReLU(inplace=False), nn.MaxPool2d(2, 2), + nn.Flatten(), nn.Linear(64, args.n_way)).to(device) # We will use Adam to (meta-)optimize the initial parameters # to be adapted. @@ -166,8 +165,8 @@ def train(db, net, meta_opt, epoch, log): qry_logits = net(x_qry[i]) qry_loss = F.cross_entropy(qry_logits, y_qry[i]) qry_losses.append(qry_loss.detach()) - qry_acc = (qry_logits.argmax( - dim=1) == y_qry[i]).sum().item() / querysz + qry_acc = (qry_logits.argmax(dim=1) + == y_qry[i]).sum().item() / querysz qry_accs.append(qry_acc) # Update the model's meta-parameters to optimize the query @@ -233,11 +232,9 @@ def test(db, net, epoch, log): # The query loss and acc induced by these parameters. qry_logits = net(x_qry[i]).detach() - qry_loss = F.cross_entropy( - qry_logits, y_qry[i], reduction='none') + qry_loss = F.cross_entropy(qry_logits, y_qry[i], reduction='none') qry_losses.append(qry_loss.detach()) - qry_accs.append( - (qry_logits.argmax(dim=1) == y_qry[i]).detach()) + qry_accs.append((qry_logits.argmax(dim=1) == y_qry[i]).detach()) TorchOpt.recover_state_dict(net, net_state_dict) TorchOpt.recover_state_dict(inner_opt, optim_state_dict) diff --git a/examples/few-shot/support/omniglot_loaders.py b/examples/few-shot/support/omniglot_loaders.py index 9a49a0e5..95eba9ce 100644 --- a/examples/few-shot/support/omniglot_loaders.py +++ b/examples/few-shot/support/omniglot_loaders.py @@ -17,15 +17,15 @@ # https://github.com/dragen1860/MAML-Pytorch/blob/master/omniglot.py # https://github.com/dragen1860/MAML-Pytorch/blob/master/omniglotNShot.py -import torchvision.transforms as transforms -from PIL import Image -import numpy as np +import errno +import os +import os.path +import numpy as np import torch -import torch.utils.data as data -import os -import os.path -import errno +import torch.utils.data as data +import torchvision.transforms as transforms +from PIL import Image class Omniglot(data.Dataset): @@ -37,7 +37,6 @@ class Omniglot(data.Dataset): processed_folder = 'processed' training_file = 'training.pt' test_file = 'test.pt' - ''' The items are (filename,category). The index of all the categories can be found in self.idx_classes Args: @@ -46,8 +45,10 @@ class Omniglot(data.Dataset): - target_transform: how to transform the target - download: need to download the dataset ''' - - def __init__(self, root, transform=None, target_transform=None, + def __init__(self, + root, + transform=None, + target_transform=None, download=False): self.root = root self.transform = transform @@ -57,9 +58,11 @@ def __init__(self, root, transform=None, target_transform=None, if download: self.download() else: - raise RuntimeError('Dataset not found.' + ' You can use download=True to download it') + raise RuntimeError('Dataset not found.' + + ' You can use download=True to download it') - self.all_items = find_classes(os.path.join(self.root, self.processed_folder)) + self.all_items = find_classes( + os.path.join(self.root, self.processed_folder)) self.idx_classes = index_classes(self.all_items) def __getitem__(self, index): @@ -82,9 +85,10 @@ def _check_exists(self): os.path.exists(os.path.join(self.root, self.processed_folder, "images_background")) def download(self): - from six.moves import urllib import zipfile + from six.moves import urllib + if self._check_exists(): return @@ -135,8 +139,15 @@ def index_classes(items): class OmniglotNShot: - - def __init__(self, root, batchsz, n_way, k_shot, k_query, imgsz, rng, device=None): + def __init__(self, + root, + batchsz, + n_way, + k_shot, + k_query, + imgsz, + rng, + device=None): """ Different from mnistNShot, the :param root: @@ -153,16 +164,18 @@ def __init__(self, root, batchsz, n_way, k_shot, k_query, imgsz, rng, device=Non if not os.path.isfile(os.path.join(root, 'omniglot.npy')): # if root/data.npy does not exist, just download it self.x = Omniglot( - root, download=True, - transform=transforms.Compose( - [lambda x: Image.open(x).convert('L'), - lambda x: x.resize((imgsz, imgsz)), - lambda x: np.reshape(x, (imgsz, imgsz, 1)), - lambda x: np.transpose(x, [2, 0, 1]), - lambda x: x/255.]), + root, + download=True, + transform=transforms.Compose([ + lambda x: Image.open(x).convert('L'), lambda x: x.resize( + (imgsz, imgsz)), + lambda x: np.reshape(x, (imgsz, imgsz, 1)), + lambda x: np.transpose(x, [2, 0, 1]), lambda x: x / 255. + ]), ) - temp = dict() # {label:img1, img2..., 20 imgs, label2: img1, img2,... in total, 1623 label} + temp = dict( + ) # {label:img1, img2..., 20 imgs, label2: img1, img2,... in total, 1623 label} for (img, label) in self.x: if label in temp.keys(): temp[label].append(img) @@ -170,11 +183,13 @@ def __init__(self, root, batchsz, n_way, k_shot, k_query, imgsz, rng, device=Non temp[label] = [img] self.x = [] - for label, imgs in temp.items(): # labels info deserted , each label contains 20imgs + for label, imgs in temp.items( + ): # labels info deserted , each label contains 20imgs self.x.append(np.array(imgs)) # as different class may have different number of imgs - self.x = np.array(self.x).astype(np.float) # [[20 imgs],..., 1623 classes in total] + self.x = np.array(self.x).astype( + np.float) # [[20 imgs],..., 1623 classes in total] # each character contains 20 imgs print('data shape:', self.x.shape) # [1623, 20, 84, 84, 1] temp = [] # Free memory @@ -197,15 +212,21 @@ def __init__(self, root, batchsz, n_way, k_shot, k_query, imgsz, rng, device=Non self.n_way = n_way # n way self.k_shot = k_shot # k shot self.k_query = k_query # k query - assert (k_shot + k_query) <=20 + assert (k_shot + k_query) <= 20 # save pointer of current read batch in total cache self.indexes = {"train": 0, "test": 0} - self.datasets = {"train": self.x_train, "test": self.x_test} # original data cached + self.datasets = { + "train": self.x_train, + "test": self.x_test + } # original data cached print("DB: train", self.x_train.shape, "test", self.x_test.shape) - self.datasets_cache = {"train": self.load_data_cache(self.datasets["train"]), # current epoch data cached - "test": self.load_data_cache(self.datasets["test"])} + self.datasets_cache = { + "train": self.load_data_cache( + self.datasets["train"]), # current epoch data cached + "test": self.load_data_cache(self.datasets["test"]) + } def normalization(self): """ @@ -244,25 +265,33 @@ def load_data_cache(self, data_pack): for i in range(self.batchsz): # one batch means one set x_spt, y_spt, x_qry, y_qry = [], [], [], [] - selected_cls = self.rng.choice(data_pack.shape[0], self.n_way, False) + selected_cls = self.rng.choice(data_pack.shape[0], self.n_way, + False) for j, cur_class in enumerate(selected_cls): - selected_img = self.rng.choice(20, self.k_shot + self.k_query, False) + selected_img = self.rng.choice(20, + self.k_shot + self.k_query, + False) # meta-training and meta-test - x_spt.append(data_pack[cur_class][selected_img[:self.k_shot]]) - x_qry.append(data_pack[cur_class][selected_img[self.k_shot:]]) + x_spt.append( + data_pack[cur_class][selected_img[:self.k_shot]]) + x_qry.append( + data_pack[cur_class][selected_img[self.k_shot:]]) y_spt.append([j for _ in range(self.k_shot)]) y_qry.append([j for _ in range(self.k_query)]) # shuffle inside a batch perm = self.rng.permutation(self.n_way * self.k_shot) - x_spt = np.array(x_spt).reshape(self.n_way * self.k_shot, 1, self.resize, self.resize)[perm] + x_spt = np.array(x_spt).reshape(self.n_way * self.k_shot, 1, + self.resize, self.resize)[perm] y_spt = np.array(y_spt).reshape(self.n_way * self.k_shot)[perm] perm = self.rng.permutation(self.n_way * self.k_query) - x_qry = np.array(x_qry).reshape(self.n_way * self.k_query, 1, self.resize, self.resize)[perm] - y_qry = np.array(y_qry).reshape(self.n_way * self.k_query)[perm] + x_qry = np.array(x_qry).reshape(self.n_way * self.k_query, 1, + self.resize, self.resize)[perm] + y_qry = np.array(y_qry).reshape(self.n_way * + self.k_query)[perm] # append [sptsz, 1, 84, 84] => [b, setsz, 1, 84, 84] x_spts.append(x_spt) @@ -270,17 +299,20 @@ def load_data_cache(self, data_pack): x_qrys.append(x_qry) y_qrys.append(y_qry) - # [b, setsz, 1, 84, 84] - x_spts = np.array(x_spts).astype(np.float32).reshape(self.batchsz, setsz, 1, self.resize, self.resize) - y_spts = np.array(y_spts).astype(np.int).reshape(self.batchsz, setsz) + x_spts = np.array(x_spts).astype(np.float32).reshape( + self.batchsz, setsz, 1, self.resize, self.resize) + y_spts = np.array(y_spts).astype(np.int).reshape( + self.batchsz, setsz) # [b, qrysz, 1, 84, 84] - x_qrys = np.array(x_qrys).astype(np.float32).reshape(self.batchsz, querysz, 1, self.resize, self.resize) - y_qrys = np.array(y_qrys).astype(np.int).reshape(self.batchsz, querysz) + x_qrys = np.array(x_qrys).astype(np.float32).reshape( + self.batchsz, querysz, 1, self.resize, self.resize) + y_qrys = np.array(y_qrys).astype(np.int).reshape( + self.batchsz, querysz) x_spts, y_spts, x_qrys, y_qrys = [ - torch.from_numpy(z).to(self.device) for z in - [x_spts, y_spts, x_qrys, y_qrys] + torch.from_numpy(z).to(self.device) + for z in [x_spts, y_spts, x_qrys, y_qrys] ] data_cache.append([x_spts, y_spts, x_qrys, y_qrys]) @@ -296,7 +328,8 @@ def next(self, mode='train'): # update cache if indexes is larger cached num if self.indexes[mode] >= len(self.datasets_cache[mode]): self.indexes[mode] = 0 - self.datasets_cache[mode] = self.load_data_cache(self.datasets[mode]) + self.datasets_cache[mode] = self.load_data_cache( + self.datasets[mode]) next_batch = self.datasets_cache[mode][self.indexes[mode]] self.indexes[mode] += 1 diff --git a/examples/visualize.py b/examples/visualize.py index e080839e..10307eda 100644 --- a/examples/visualize.py +++ b/examples/visualize.py @@ -1,8 +1,9 @@ import torch +import torchviz from torch import nn from torch.nn import functional as F + import TorchOpt -import torchviz class Net(nn.Module): @@ -41,20 +42,22 @@ def draw_TorchOpt(): pred = net(xs, meta_param) loss = F.mse_loss(pred, torch.ones_like(pred)) # set enable_visual - net_state_0 = TorchOpt.extract_state_dict( - net, enable_visual=True, visual_prefix='step0.') + net_state_0 = TorchOpt.extract_state_dict(net, + enable_visual=True, + visual_prefix='step0.') optimizer.step(loss) # set enable_visual - net_state_1 = TorchOpt.extract_state_dict( - net, enable_visual=True, visual_prefix='step1.') + net_state_1 = TorchOpt.extract_state_dict(net, + enable_visual=True, + visual_prefix='step1.') pred = net(xs, meta_param) loss = F.mse_loss(pred, torch.ones_like(pred)) # draw computation graph - TorchOpt.visual.make_dot(loss, - [net_state_0, net_state_1, { - meta_param: "meta_param"}] - ).render("TorchOpt_graph", format="svg") + TorchOpt.visual.make_dot( + loss, [net_state_0, net_state_1, { + meta_param: "meta_param" + }]).render("TorchOpt_graph", format="svg") if __name__ == '__main__': diff --git a/include/adam_op/adam_op.h b/include/adam_op/adam_op.h index b74d96cb..7834ed0b 100644 --- a/include/adam_op/adam_op.h +++ b/include/adam_op/adam_op.h @@ -21,33 +21,33 @@ #include "common.h" namespace TorchOpt { -TensorArray<3> adamForwardInplace(torch::Tensor &updates, torch::Tensor &mu, - torch::Tensor &nu, const float b1, +TensorArray<3> adamForwardInplace(torch::Tensor& updates, torch::Tensor& mu, + torch::Tensor& nu, const float b1, const float b2, const float eps, const float eps_root, const int count); -torch::Tensor adamForwardMu(const torch::Tensor &updates, - const torch::Tensor &mu, const float b1); +torch::Tensor adamForwardMu(const torch::Tensor& updates, + const torch::Tensor& mu, const float b1); -torch::Tensor adamForwardNu(const torch::Tensor &updates, - const torch::Tensor &nu, const float b2); +torch::Tensor adamForwardNu(const torch::Tensor& updates, + const torch::Tensor& nu, const float b2); -torch::Tensor adamForwardUpdates(const torch::Tensor &new_mu, - const torch::Tensor &new_nu, const float b1, +torch::Tensor adamForwardUpdates(const torch::Tensor& new_mu, + const torch::Tensor& new_nu, const float b1, const float b2, const float eps, const float eps_root, const int count); -TensorArray<2> adamBackwardMu(const torch::Tensor &dmu, - const torch::Tensor &updates, - const torch::Tensor &mu, const float b1); +TensorArray<2> adamBackwardMu(const torch::Tensor& dmu, + const torch::Tensor& updates, + const torch::Tensor& mu, const float b1); -TensorArray<2> adamBackwardNu(const torch::Tensor &dnu, - const torch::Tensor &updates, - const torch::Tensor &nu, const float b2); +TensorArray<2> adamBackwardNu(const torch::Tensor& dnu, + const torch::Tensor& updates, + const torch::Tensor& nu, const float b2); -TensorArray<2> adamBackwardUpdates(const torch::Tensor &dupdates, - const torch::Tensor &updates, - const torch::Tensor &new_mu, - const torch::Tensor &new_nu, const float b1, +TensorArray<2> adamBackwardUpdates(const torch::Tensor& dupdates, + const torch::Tensor& updates, + const torch::Tensor& new_mu, + const torch::Tensor& new_nu, const float b1, const float b2, const int count); } // namespace TorchOpt diff --git a/include/adam_op/adam_op_impl.h b/include/adam_op/adam_op_impl.h index 720e6fc1..1bf99046 100644 --- a/include/adam_op/adam_op_impl.h +++ b/include/adam_op/adam_op_impl.h @@ -21,34 +21,34 @@ #include "common.h" namespace TorchOpt { -TensorArray<3> adamForwardInplaceCPU(torch::Tensor &updates, torch::Tensor &mu, - torch::Tensor &nu, const float b1, +TensorArray<3> adamForwardInplaceCPU(torch::Tensor& updates, torch::Tensor& mu, + torch::Tensor& nu, const float b1, const float b2, const float eps, const float eps_root, const int count); -torch::Tensor adamForwardMuCPU(const torch::Tensor &updates, - const torch::Tensor &mu, const float b1); +torch::Tensor adamForwardMuCPU(const torch::Tensor& updates, + const torch::Tensor& mu, const float b1); -torch::Tensor adamForwardNuCPU(const torch::Tensor &updates, - const torch::Tensor &nu, const float b2); +torch::Tensor adamForwardNuCPU(const torch::Tensor& updates, + const torch::Tensor& nu, const float b2); -torch::Tensor adamForwardUpdatesCPU(const torch::Tensor &new_mu, - const torch::Tensor &new_nu, const float b1, +torch::Tensor adamForwardUpdatesCPU(const torch::Tensor& new_mu, + const torch::Tensor& new_nu, const float b1, const float b2, const float eps, const float eps_root, const int count); -TensorArray<2> adamBackwardMuCPU(const torch::Tensor &dmu, - const torch::Tensor &updates, - const torch::Tensor &mu, const float b1); +TensorArray<2> adamBackwardMuCPU(const torch::Tensor& dmu, + const torch::Tensor& updates, + const torch::Tensor& mu, const float b1); -TensorArray<2> adamBackwardNuCPU(const torch::Tensor &dnu, - const torch::Tensor &updates, - const torch::Tensor &nu, const float b2); +TensorArray<2> adamBackwardNuCPU(const torch::Tensor& dnu, + const torch::Tensor& updates, + const torch::Tensor& nu, const float b2); -TensorArray<2> adamBackwardUpdatesCPU(const torch::Tensor &dupdates, - const torch::Tensor &updates, - const torch::Tensor &new_mu, - const torch::Tensor &new_nu, +TensorArray<2> adamBackwardUpdatesCPU(const torch::Tensor& dupdates, + const torch::Tensor& updates, + const torch::Tensor& new_mu, + const torch::Tensor& new_nu, const float b1, const float b2, const int count); } // namespace TorchOpt diff --git a/include/utils.h b/include/utils.h index 25d81e10..ddc0a992 100644 --- a/include/utils.h +++ b/include/utils.h @@ -23,7 +23,7 @@ #endif namespace TorchOpt { -__forceinline__ size_t getTensorPlainSize(const torch::Tensor &tensor) { +__forceinline__ size_t getTensorPlainSize(const torch::Tensor& tensor) { const auto dim = tensor.dim(); size_t n = 1; for (std::decay_t i = 0; i < dim; ++i) { diff --git a/setup.py b/setup.py index db42bac5..ea627c34 100644 --- a/setup.py +++ b/setup.py @@ -1,7 +1,8 @@ import os -import sys import pathlib -from setuptools import setup, find_packages +import sys + +from setuptools import find_packages, setup from setuptools.command.build_ext import build_ext from torch.utils import cpp_extension @@ -22,8 +23,8 @@ def copy(self, build_temp): files = os.listdir(op_path) for file in files: if file.split('.')[-1] == 'so': - copy_file(os.path.join(op_path, file), os.path.join( - cwd, 'TorchOpt', '_lib')) + copy_file(os.path.join(op_path, file), + os.path.join(cwd, 'TorchOpt', '_lib')) def build_cmake(self): cwd = pathlib.Path().absolute() @@ -52,10 +53,7 @@ def build_cmake(self): "-DCMAKE_BUILD_TYPE=" + config ] - build_args = [ - "--config", config, - "--", "-j4" - ] + build_args = ["--config", config, "--", "-j4"] os.chdir(build_temp) self.spawn(["cmake", f"{str(cwd)}"] + cmake_args) @@ -74,10 +72,12 @@ def __init__(self): op_urls = [] if sys.version_info >= (3, 8) and sys.version_info < (3, 9): op_urls.append( - "https://torchopt.oss-cn-beijing.aliyuncs.com/torch1_11/adam_op.cpython-38-x86_64-linux-gnu.so") + "https://torchopt.oss-cn-beijing.aliyuncs.com/torch1_11/adam_op.cpython-38-x86_64-linux-gnu.so" + ) elif sys.version_info >= (3, 9) and sys.version_info < (3, 10): op_urls.append( - "https://torchopt.oss-cn-beijing.aliyuncs.com/torch1_11/adam_op.cpython-39-x86_64-linux-gnu.so") + "https://torchopt.oss-cn-beijing.aliyuncs.com/torch1_11/adam_op.cpython-39-x86_64-linux-gnu.so" + ) if len(op_urls) == 0: import warnings @@ -99,7 +99,7 @@ def __init__(self): setup( name="TorchOpt", version="0.4.1", - author="Jie Ren", + author="TorchOpt Contributors", author_email="jieren9806@gmail.com", description="A Jax-style optimizer.", license="Apache License Version 2.0", diff --git a/src/adam_op/adam_op.cpp b/src/adam_op/adam_op.cpp index c2a9e573..b4e12ca8 100644 --- a/src/adam_op/adam_op.cpp +++ b/src/adam_op/adam_op.cpp @@ -13,16 +13,17 @@ // limitations under the License. // ============================================================================== +#include "adam_op/adam_op.h" + #include #include -#include "adam_op/adam_op.h" #include "adam_op/adam_op_impl.cuh" #include "adam_op/adam_op_impl.h" namespace TorchOpt { -TensorArray<3> adamForwardInplace(torch::Tensor &updates, torch::Tensor &mu, - torch::Tensor &nu, const float b1, +TensorArray<3> adamForwardInplace(torch::Tensor& updates, torch::Tensor& mu, + torch::Tensor& nu, const float b1, const float b2, const float eps, const float eps_root, const int count) { if (updates.device().is_cuda()) { @@ -34,8 +35,8 @@ TensorArray<3> adamForwardInplace(torch::Tensor &updates, torch::Tensor &mu, throw std::runtime_error("Not implemented"); } }; -torch::Tensor adamForwardMu(const torch::Tensor &updates, - const torch::Tensor &mu, const float b1) { +torch::Tensor adamForwardMu(const torch::Tensor& updates, + const torch::Tensor& mu, const float b1) { if (updates.device().is_cuda()) { return adamForwardMuCUDA(updates, mu, b1); } else if (updates.device().is_cpu()) { @@ -45,8 +46,8 @@ torch::Tensor adamForwardMu(const torch::Tensor &updates, } }; -torch::Tensor adamForwardNu(const torch::Tensor &updates, - const torch::Tensor &nu, const float b2) { +torch::Tensor adamForwardNu(const torch::Tensor& updates, + const torch::Tensor& nu, const float b2) { if (updates.device().is_cuda()) { return adamForwardNuCUDA(updates, nu, b2); } else if (updates.device().is_cpu()) { @@ -56,8 +57,8 @@ torch::Tensor adamForwardNu(const torch::Tensor &updates, } }; -torch::Tensor adamForwardUpdates(const torch::Tensor &new_mu, - const torch::Tensor &new_nu, const float b1, +torch::Tensor adamForwardUpdates(const torch::Tensor& new_mu, + const torch::Tensor& new_nu, const float b1, const float b2, const float eps, const float eps_root, const int count) { if (new_mu.device().is_cuda()) { @@ -69,9 +70,9 @@ torch::Tensor adamForwardUpdates(const torch::Tensor &new_mu, } }; -TensorArray<2> adamBackwardMu(const torch::Tensor &dmu, - const torch::Tensor &updates, - const torch::Tensor &mu, const float b1) { +TensorArray<2> adamBackwardMu(const torch::Tensor& dmu, + const torch::Tensor& updates, + const torch::Tensor& mu, const float b1) { if (dmu.device().is_cuda()) { return adamBackwardMuCUDA(dmu, updates, mu, b1); } else if (dmu.device().is_cpu()) { @@ -81,9 +82,9 @@ TensorArray<2> adamBackwardMu(const torch::Tensor &dmu, } }; -TensorArray<2> adamBackwardNu(const torch::Tensor &dnu, - const torch::Tensor &updates, - const torch::Tensor &nu, const float b2) { +TensorArray<2> adamBackwardNu(const torch::Tensor& dnu, + const torch::Tensor& updates, + const torch::Tensor& nu, const float b2) { if (dnu.device().is_cuda()) { return adamBackwardNuCUDA(dnu, updates, nu, b2); } else if (dnu.device().is_cpu()) { @@ -93,10 +94,10 @@ TensorArray<2> adamBackwardNu(const torch::Tensor &dnu, } }; -TensorArray<2> adamBackwardUpdates(const torch::Tensor &dupdates, - const torch::Tensor &updates, - const torch::Tensor &new_mu, - const torch::Tensor &new_nu, const float b1, +TensorArray<2> adamBackwardUpdates(const torch::Tensor& dupdates, + const torch::Tensor& updates, + const torch::Tensor& new_mu, + const torch::Tensor& new_nu, const float b1, const float b2, const int count) { if (dupdates.device().is_cuda()) { return adamBackwardUpdatesCUDA(dupdates, updates, new_mu, new_nu, b1, b2, diff --git a/src/adam_op/adam_op_impl.cpp b/src/adam_op/adam_op_impl.cpp index 6b5bd39e..fe951f16 100644 --- a/src/adam_op/adam_op_impl.cpp +++ b/src/adam_op/adam_op_impl.cpp @@ -13,12 +13,13 @@ // limitations under the License. // ============================================================================== +#include "adam_op/adam_op_impl.h" + #include #include #include -#include "adam_op/adam_op_impl.h" #include "utils.h" namespace TorchOpt { @@ -28,8 +29,8 @@ template void adamForwardInplaceCPUKernel( const other_t b1, const other_t inv_one_minus_pow_b1, const other_t b2, const other_t inv_one_minus_pow_b2, const other_t eps, - const other_t eps_root, const size_t n, scalar_t *__restrict__ updates_ptr, - scalar_t *__restrict__ mu_ptr, scalar_t *__restrict__ nu_ptr) { + const other_t eps_root, const size_t n, scalar_t* __restrict__ updates_ptr, + scalar_t* __restrict__ mu_ptr, scalar_t* __restrict__ nu_ptr) { #pragma omp parallel for num_threads(32) for (size_t tid = 0; tid < n; ++tid) { const scalar_t updates = updates_ptr[tid]; @@ -49,8 +50,8 @@ void adamForwardInplaceCPUKernel( } } // namespace -TensorArray<3> adamForwardInplaceCPU(torch::Tensor &updates, torch::Tensor &mu, - torch::Tensor &nu, const float b1, +TensorArray<3> adamForwardInplaceCPU(torch::Tensor& updates, torch::Tensor& mu, + torch::Tensor& nu, const float b1, const float b2, const float eps, const float eps_root, const int count) { using other_t = float; @@ -70,10 +71,10 @@ TensorArray<3> adamForwardInplaceCPU(torch::Tensor &updates, torch::Tensor &mu, namespace { template -void adamForwardMuCPUKernel(const scalar_t *__restrict__ updates_ptr, - const scalar_t *__restrict__ mu_ptr, +void adamForwardMuCPUKernel(const scalar_t* __restrict__ updates_ptr, + const scalar_t* __restrict__ mu_ptr, const other_t b1, const size_t n, - scalar_t *__restrict__ mu_out_ptr) { + scalar_t* __restrict__ mu_out_ptr) { #pragma omp parallel for num_threads(32) for (size_t tid = 0; tid < n; ++tid) { const scalar_t updates = updates_ptr[tid]; @@ -84,8 +85,8 @@ void adamForwardMuCPUKernel(const scalar_t *__restrict__ updates_ptr, } } // namespace -torch::Tensor adamForwardMuCPU(const torch::Tensor &updates, - const torch::Tensor &mu, const float b1) { +torch::Tensor adamForwardMuCPU(const torch::Tensor& updates, + const torch::Tensor& mu, const float b1) { using other_t = float; auto mu_out = torch::empty_like(mu); @@ -102,10 +103,10 @@ torch::Tensor adamForwardMuCPU(const torch::Tensor &updates, namespace { template -void adamForwardNuCPUKernel(const scalar_t *__restrict__ updates_ptr, - const scalar_t *__restrict__ nu_ptr, +void adamForwardNuCPUKernel(const scalar_t* __restrict__ updates_ptr, + const scalar_t* __restrict__ nu_ptr, const other_t b2, const size_t n, - scalar_t *__restrict__ nu_out_ptr) { + scalar_t* __restrict__ nu_out_ptr) { #pragma omp parallel for num_threads(32) for (size_t tid = 0; tid < n; ++tid) { const scalar_t updates = updates_ptr[tid]; @@ -117,8 +118,8 @@ void adamForwardNuCPUKernel(const scalar_t *__restrict__ updates_ptr, } } // namespace -torch::Tensor adamForwardNuCPU(const torch::Tensor &updates, - const torch::Tensor &nu, const float b2) { +torch::Tensor adamForwardNuCPU(const torch::Tensor& updates, + const torch::Tensor& nu, const float b2) { using other_t = float; auto nu_out = torch::empty_like(nu); @@ -135,13 +136,13 @@ torch::Tensor adamForwardNuCPU(const torch::Tensor &updates, namespace { template -void adamForwardUpdatesCPUKernel(const scalar_t *__restrict__ new_mu_ptr, - const scalar_t *__restrict__ new_nu_ptr, +void adamForwardUpdatesCPUKernel(const scalar_t* __restrict__ new_mu_ptr, + const scalar_t* __restrict__ new_nu_ptr, const other_t inv_one_minus_pow_b1, const other_t inv_one_minus_pow_b2, const other_t eps, const other_t eps_root, const size_t n, - scalar_t *__restrict__ updates_out_ptr) { + scalar_t* __restrict__ updates_out_ptr) { #pragma omp parallel for num_threads(32) for (size_t tid = 0; tid < n; ++tid) { const scalar_t new_mu = new_mu_ptr[tid]; @@ -153,8 +154,8 @@ void adamForwardUpdatesCPUKernel(const scalar_t *__restrict__ new_mu_ptr, } } // namespace -torch::Tensor adamForwardUpdatesCPU(const torch::Tensor &new_mu, - const torch::Tensor &new_nu, const float b1, +torch::Tensor adamForwardUpdatesCPU(const torch::Tensor& new_mu, + const torch::Tensor& new_nu, const float b1, const float b2, const float eps, const float eps_root, const int count) { using other_t = float; @@ -179,10 +180,10 @@ torch::Tensor adamForwardUpdatesCPU(const torch::Tensor &new_mu, namespace { template -void adamBackwardMuCPUKernel(const scalar_t *__restrict__ dmu_ptr, +void adamBackwardMuCPUKernel(const scalar_t* __restrict__ dmu_ptr, const other_t b1, const size_t n, - scalar_t *__restrict__ dupdates_out_ptr, - scalar_t *__restrict__ dmu_out_ptr) { + scalar_t* __restrict__ dupdates_out_ptr, + scalar_t* __restrict__ dmu_out_ptr) { #pragma omp parallel for num_threads(32) for (size_t tid = 0; tid < n; ++tid) { const scalar_t dmu = dmu_ptr[tid]; @@ -193,9 +194,9 @@ void adamBackwardMuCPUKernel(const scalar_t *__restrict__ dmu_ptr, } } // namespace -TensorArray<2> adamBackwardMuCPU(const torch::Tensor &dmu, - const torch::Tensor &updates, - const torch::Tensor &mu, const float b1) { +TensorArray<2> adamBackwardMuCPU(const torch::Tensor& dmu, + const torch::Tensor& updates, + const torch::Tensor& mu, const float b1) { using other_t = float; auto dupdates_out = torch::empty_like(updates); @@ -213,11 +214,11 @@ TensorArray<2> adamBackwardMuCPU(const torch::Tensor &dmu, namespace { template -void adamBackwardNuCPUKernel(const scalar_t *__restrict__ dnu_ptr, - const scalar_t *__restrict__ updates_ptr, +void adamBackwardNuCPUKernel(const scalar_t* __restrict__ dnu_ptr, + const scalar_t* __restrict__ updates_ptr, const other_t b2, const size_t n, - scalar_t *__restrict__ dupdates_out_ptr, - scalar_t *__restrict__ dnu_out_ptr) { + scalar_t* __restrict__ dupdates_out_ptr, + scalar_t* __restrict__ dnu_out_ptr) { #pragma omp parallel for num_threads(32) for (size_t tid = 0; tid < n; ++tid) { const scalar_t dnu = dnu_ptr[tid]; @@ -229,9 +230,9 @@ void adamBackwardNuCPUKernel(const scalar_t *__restrict__ dnu_ptr, } } // namespace -TensorArray<2> adamBackwardNuCPU(const torch::Tensor &dnu, - const torch::Tensor &updates, - const torch::Tensor &nu, const float b2) { +TensorArray<2> adamBackwardNuCPU(const torch::Tensor& dnu, + const torch::Tensor& updates, + const torch::Tensor& nu, const float b2) { using other_t = float; auto dupdates_out = torch::empty_like(updates); @@ -249,14 +250,14 @@ TensorArray<2> adamBackwardNuCPU(const torch::Tensor &dnu, namespace { template -void adamBackwardUpdatesCPUKernel(const scalar_t *__restrict__ dupdates_ptr, - const scalar_t *__restrict__ updates_ptr, - const scalar_t *__restrict__ new_mu_ptr, +void adamBackwardUpdatesCPUKernel(const scalar_t* __restrict__ dupdates_ptr, + const scalar_t* __restrict__ updates_ptr, + const scalar_t* __restrict__ new_mu_ptr, const other_t one_minus_pow_b1, const other_t inv_one_minus_pow_b2, const size_t n, - scalar_t *__restrict__ dnew_mu_out_ptr, - scalar_t *__restrict__ dnew_nu_out_ptr) { + scalar_t* __restrict__ dnew_mu_out_ptr, + scalar_t* __restrict__ dnew_nu_out_ptr) { #pragma omp parallel for num_threads(32) for (size_t tid = 0; tid < n; ++tid) { const scalar_t dupdates = dupdates_ptr[tid]; @@ -280,10 +281,10 @@ void adamBackwardUpdatesCPUKernel(const scalar_t *__restrict__ dupdates_ptr, } } // namespace -TensorArray<2> adamBackwardUpdatesCPU(const torch::Tensor &dupdates, - const torch::Tensor &updates, - const torch::Tensor &new_mu, - const torch::Tensor &new_nu, +TensorArray<2> adamBackwardUpdatesCPU(const torch::Tensor& dupdates, + const torch::Tensor& updates, + const torch::Tensor& new_mu, + const torch::Tensor& new_nu, const float b1, const float b2, const int count) { using other_t = float; diff --git a/tests/unit/high_level/test_high_level_inplace.py b/tests/unit/high_level/test_high_level_inplace.py index 623888cb..dc55ce0c 100644 --- a/tests/unit/high_level/test_high_level_inplace.py +++ b/tests/unit/high_level/test_high_level_inplace.py @@ -13,13 +13,14 @@ # limitations under the License. # ============================================================================== +import copy import unittest -import copy import torch -from torch.utils import data from torch.nn import functional as F +from torch.utils import data from torchvision import models + from TorchOpt import SGD, Adam, RMSProp @@ -32,8 +33,8 @@ def setUpClass(cls): cls.model_backup = copy.deepcopy(cls.model) cls.batch_size = 2 - cls.dataset = data.TensorDataset(torch.randn( - 2, 3, 224, 224), torch.randint(0, 1000, (2,))) + cls.dataset = data.TensorDataset(torch.randn(2, 3, 224, 224), + torch.randint(0, 1000, (2, ))) cls.loader = data.DataLoader(cls.dataset, cls.batch_size, False) cls.lr = 1e-3 @@ -59,12 +60,15 @@ def test_sgd(self) -> None: optim_ref.step() with torch.no_grad(): - for p, p_ref in zip(self.model.parameters(), self.model_ref.parameters()): + for p, p_ref in zip(self.model.parameters(), + self.model_ref.parameters()): mse = F.mse_loss(p, p_ref) self.assertAlmostEqual(float(mse), 0) - for b, b_ref in zip(self.model.buffers(), self.model_ref.buffers()): + for b, b_ref in zip(self.model.buffers(), + self.model_ref.buffers()): b = b.float() if not b.is_floating_point() else b - b_ref = b_ref.float() if not b_ref.is_floating_point() else b_ref + b_ref = b_ref.float( + ) if not b_ref.is_floating_point() else b_ref mse = F.mse_loss(b, b_ref) self.assertAlmostEqual(float(mse), 0) @@ -84,12 +88,15 @@ def test_adam(self) -> None: optim_ref.step() with torch.no_grad(): - for p, p_ref in zip(self.model.parameters(), self.model_ref.parameters()): + for p, p_ref in zip(self.model.parameters(), + self.model_ref.parameters()): mse = F.mse_loss(p, p_ref) self.assertAlmostEqual(float(mse), 0) - for b, b_ref in zip(self.model.buffers(), self.model_ref.buffers()): + for b, b_ref in zip(self.model.buffers(), + self.model_ref.buffers()): b = b.float() if not b.is_floating_point() else b - b_ref = b_ref.float() if not b_ref.is_floating_point() else b_ref + b_ref = b_ref.float( + ) if not b_ref.is_floating_point() else b_ref mse = F.mse_loss(b, b_ref) self.assertAlmostEqual(float(mse), 0) @@ -113,12 +120,15 @@ def test_accelerated_adam_cpu(self) -> None: optim_ref.step() with torch.no_grad(): - for p, p_ref in zip(self.model.parameters(), self.model_ref.parameters()): + for p, p_ref in zip(self.model.parameters(), + self.model_ref.parameters()): mse = F.mse_loss(p, p_ref) self.assertAlmostEqual(float(mse), 0) - for b, b_ref in zip(self.model.buffers(), self.model_ref.buffers()): + for b, b_ref in zip(self.model.buffers(), + self.model_ref.buffers()): b = b.float() if not b.is_floating_point() else b - b_ref = b_ref.float() if not b_ref.is_floating_point() else b_ref + b_ref = b_ref.float( + ) if not b_ref.is_floating_point() else b_ref mse = F.mse_loss(b, b_ref) self.assertAlmostEqual(float(mse), 0) @@ -142,17 +152,21 @@ def test_accelerated_adam_cuda(self) -> None: optim_ref.step() with torch.no_grad(): - for p, p_ref in zip(self.model.parameters(), self.model_ref.parameters()): + for p, p_ref in zip(self.model.parameters(), + self.model_ref.parameters()): mse = F.mse_loss(p, p_ref) self.assertAlmostEqual(float(mse), 0) - for b, b_ref in zip(self.model.buffers(), self.model_ref.buffers()): + for b, b_ref in zip(self.model.buffers(), + self.model_ref.buffers()): b = b.float() if not b.is_floating_point() else b - b_ref = b_ref.float() if not b_ref.is_floating_point() else b_ref + b_ref = b_ref.float( + ) if not b_ref.is_floating_point() else b_ref mse = F.mse_loss(b, b_ref) self.assertAlmostEqual(float(mse), 0) def test_rmsprop(self) -> None: - optim = RMSProp(self.model.parameters(), self.lr, decay=0.99) # pytorch uses 0.99 as the default value + optim = RMSProp(self.model.parameters(), self.lr, + decay=0.99) # pytorch uses 0.99 as the default value optim_ref = torch.optim.RMSprop(self.model_ref.parameters(), self.lr) for xs, ys in self.loader: pred = self.model(xs) @@ -167,14 +181,20 @@ def test_rmsprop(self) -> None: optim_ref.step() with torch.no_grad(): - for p, p_ref in zip(self.model.parameters(), self.model_ref.parameters()): + for p, p_ref in zip(self.model.parameters(), + self.model_ref.parameters()): mse = F.mse_loss(p, p_ref) - self.assertAlmostEqual(float(mse), 0, delta=1e-4) # Optax and pytorch have different implementation - for b, b_ref in zip(self.model.buffers(), self.model_ref.buffers()): + self.assertAlmostEqual( + float(mse), 0, delta=1e-4 + ) # Optax and pytorch have different implementation + for b, b_ref in zip(self.model.buffers(), + self.model_ref.buffers()): b = b.float() if not b.is_floating_point() else b - b_ref = b_ref.float() if not b_ref.is_floating_point() else b_ref + b_ref = b_ref.float( + ) if not b_ref.is_floating_point() else b_ref mse = F.mse_loss(b, b_ref) self.assertAlmostEqual(float(mse), 0) + if __name__ == '__main__': unittest.main() diff --git a/tests/unit/low_level/test_low_level_inplace.py b/tests/unit/low_level/test_low_level_inplace.py index 2593c743..de9d9861 100644 --- a/tests/unit/low_level/test_low_level_inplace.py +++ b/tests/unit/low_level/test_low_level_inplace.py @@ -13,16 +13,17 @@ # limitations under the License. # ============================================================================== +import copy import unittest -import copy +import functorch import torch -from torch.utils import data from torch.nn import functional as F +from torch.utils import data from torchvision import models -import functorch -from TorchOpt import sgd, adam, rmsprop + import TorchOpt +from TorchOpt import adam, rmsprop, sgd class LowLevelInplace(unittest.TestCase): @@ -34,8 +35,8 @@ def setUpClass(cls): cls.model_backup = copy.deepcopy(cls.model) cls.batch_size = 2 - cls.dataset = data.TensorDataset(torch.randn( - 2, 3, 224, 224), torch.randint(0, 1000, (2,))) + cls.dataset = data.TensorDataset(torch.randn(2, 3, 224, 224), + torch.randint(0, 1000, (2, ))) cls.loader = data.DataLoader(cls.dataset, cls.batch_size, False) cls.lr = 1e-3 @@ -72,7 +73,8 @@ def test_sgd(self) -> None: self.assertAlmostEqual(float(mse), 0) for b, b_ref in zip(buffers, self.model_ref.buffers()): b = b.float() if not b.is_floating_point() else b - b_ref = b_ref.float() if not b_ref.is_floating_point() else b_ref + b_ref = b_ref.float( + ) if not b_ref.is_floating_point() else b_ref mse = F.mse_loss(b, b_ref) self.assertAlmostEqual(float(mse), 0) @@ -101,7 +103,8 @@ def test_adam(self) -> None: self.assertAlmostEqual(float(mse), 0) for b, b_ref in zip(buffers, self.model_ref.buffers()): b = b.float() if not b.is_floating_point() else b - b_ref = b_ref.float() if not b_ref.is_floating_point() else b_ref + b_ref = b_ref.float( + ) if not b_ref.is_floating_point() else b_ref mse = F.mse_loss(b, b_ref) self.assertAlmostEqual(float(mse), 0) @@ -134,7 +137,8 @@ def test_accelerated_adam_cpu(self) -> None: self.assertAlmostEqual(float(mse), 0) for b, b_ref in zip(buffers, self.model_ref.buffers()): b = b.float() if not b.is_floating_point() else b - b_ref = b_ref.float() if not b_ref.is_floating_point() else b_ref + b_ref = b_ref.float( + ) if not b_ref.is_floating_point() else b_ref mse = F.mse_loss(b, b_ref) self.assertAlmostEqual(float(mse), 0) @@ -167,14 +171,16 @@ def test_accelerated_adam_cuda(self) -> None: self.assertAlmostEqual(float(mse), 0) for b, b_ref in zip(buffers, self.model_ref.buffers()): b = b.float() if not b.is_floating_point() else b - b_ref = b_ref.float() if not b_ref.is_floating_point() else b_ref + b_ref = b_ref.float( + ) if not b_ref.is_floating_point() else b_ref mse = F.mse_loss(b, b_ref) self.assertAlmostEqual(float(mse), 0) def test_rmsprop(self) -> None: fun, params, buffers = functorch.make_functional_with_buffers( self.model) - optim = rmsprop(self.lr, decay=0.99) # pytorch uses 0.99 as the default value + optim = rmsprop(self.lr, + decay=0.99) # pytorch uses 0.99 as the default value optim_state = optim.init(params) optim_ref = torch.optim.RMSprop(self.model_ref.parameters(), self.lr) for xs, ys in self.loader: @@ -193,10 +199,13 @@ def test_rmsprop(self) -> None: with torch.no_grad(): for p, p_ref in zip(params, self.model_ref.parameters()): mse = F.mse_loss(p, p_ref) - self.assertAlmostEqual(float(mse), 0, delta=1e-4) # Optax and pytorch have different implementation + self.assertAlmostEqual( + float(mse), 0, delta=1e-4 + ) # Optax and pytorch have different implementation for b, b_ref in zip(buffers, self.model_ref.buffers()): b = b.float() if not b.is_floating_point() else b - b_ref = b_ref.float() if not b_ref.is_floating_point() else b_ref + b_ref = b_ref.float( + ) if not b_ref.is_floating_point() else b_ref mse = F.mse_loss(b, b_ref) self.assertAlmostEqual(float(mse), 0) diff --git a/tests/unit/test_clip.py b/tests/unit/test_clip.py index 03453b4d..b66c6f9b 100644 --- a/tests/unit/test_clip.py +++ b/tests/unit/test_clip.py @@ -13,16 +13,17 @@ # limitations under the License. # ============================================================================== +import copy import unittest -import copy import torch -from torch.utils import data from torch.nn import functional as F from torch.nn.utils import clip_grad_norm_ +from torch.utils import data from torchvision import models -from TorchOpt import Optimizer, sgd + import TorchOpt +from TorchOpt import Optimizer, sgd class HighLevelInplace(unittest.TestCase): @@ -34,8 +35,8 @@ def setUpClass(cls): cls.model_ref = copy.deepcopy(cls.model) cls.batch_size = 2 - cls.dataset = data.TensorDataset(torch.randn( - 2, 3, 224, 224), torch.randint(0, 1000, (2,))) + cls.dataset = data.TensorDataset(torch.randn(2, 3, 224, 224), + torch.randint(0, 1000, (2, ))) cls.loader = data.DataLoader(cls.dataset, cls.batch_size, False) cls.lr = 1e0 @@ -49,8 +50,7 @@ def setUp(self) -> None: def test_sgd(self) -> None: chain = TorchOpt.combine.chain( TorchOpt.clip.clip_grad_norm(max_norm=self.max_norm), - sgd(lr=self.lr) - ) + sgd(lr=self.lr)) optim = Optimizer(self.model.parameters(), chain) optim_ref = torch.optim.SGD(self.model_ref.parameters(), self.lr) for xs, ys in self.loader: @@ -68,12 +68,15 @@ def test_sgd(self) -> None: optim_ref.step() with torch.no_grad(): - for p, p_ref in zip(self.model.parameters(), self.model_ref.parameters()): + for p, p_ref in zip(self.model.parameters(), + self.model_ref.parameters()): mse = F.mse_loss(p, p_ref) self.assertAlmostEqual(float(mse), 0) - for b, b_ref in zip(self.model.buffers(), self.model_ref.buffers()): + for b, b_ref in zip(self.model.buffers(), + self.model_ref.buffers()): b = b.float() if not b.is_floating_point() else b - b_ref = b_ref.float() if not b_ref.is_floating_point() else b_ref + b_ref = b_ref.float( + ) if not b_ref.is_floating_point() else b_ref mse = F.mse_loss(b, b_ref) self.assertAlmostEqual(float(mse), 0) diff --git a/tests/unit/test_schedule.py b/tests/unit/test_schedule.py index 602ad4be..0143cb7f 100644 --- a/tests/unit/test_schedule.py +++ b/tests/unit/test_schedule.py @@ -35,8 +35,7 @@ def test_linear(self) -> None: init_value=self.init_value, end_value=self.end_value, transition_steps=self.transition_steps, - transition_begin=self.transition_begin - ) + transition_begin=self.transition_begin) for i in range(self.transition_begin, self.transition_steps): lr = schedule(i) lr_gt = self.init_value - self.gap_value * \ From bc1f3f9d81b51085f751953d41bf3fb7b46db9b2 Mon Sep 17 00:00:00 2001 From: Benjamin-eecs Date: Wed, 29 Jun 2022 03:47:22 +0800 Subject: [PATCH 02/19] feat(all): addlicense and pytest --- .github/ISSUE_TEMPLATE/bug_report.md | 62 +++++++++++++++++++ .github/ISSUE_TEMPLATE/feature_request.md | 30 +++++++++ .github/PULL_REQUEST_TEMPLATE.md | 41 ++++++++++++ .github/workflows/lint.yml | 38 ++++++++++++ LICENSE | 2 +- Makefile | 29 ++++++++- TorchOpt/__init__.py | 36 +++++++++-- TorchOpt/_src/MetaOptimizer.py | 6 +- TorchOpt/_src/__init__.py | 2 +- TorchOpt/_src/hook.py | 2 +- TorchOpt/_src/pytypes.py | 15 +++++ examples/L2R/helper/argument.py | 15 +++++ examples/L2R/helper/utils.py | 1 + examples/LOLA/helper/agent.py | 15 +++++ examples/LOLA/helper/argument.py | 15 +++++ examples/LOLA/helper/env.py | 15 +++++ examples/LOLA/helper/utils.py | 15 +++++ examples/LOLA/lola_dice.py | 1 + examples/LOLA/visualise.py | 15 +++++ examples/MAML-RL/helpers/Tabular_mdp.py | 1 + examples/MAML-RL/helpers/__init__.py | 16 +++++ examples/MAML-RL/helpers/policy.py | 15 +++++ examples/few-shot/support/omniglot_loaders.py | 3 +- examples/visualize.py | 15 +++++ 24 files changed, 388 insertions(+), 17 deletions(-) create mode 100644 .github/ISSUE_TEMPLATE/bug_report.md create mode 100644 .github/ISSUE_TEMPLATE/feature_request.md create mode 100644 .github/PULL_REQUEST_TEMPLATE.md create mode 100644 .github/workflows/lint.yml diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md new file mode 100644 index 00000000..a74f7620 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -0,0 +1,62 @@ +--- +name: Bug report +about: Create a report to help us improve +title: "[BUG]" +labels: '' +assignees: Benjamin-eecs + +--- + +## Describe the bug + +A clear and concise description of what the bug is. + +## To Reproduce + +Steps to reproduce the behavior. + +Please try to provide a minimal example to reproduce the bug. Error messages and stack traces are also helpful. + +Please use the markdown code blocks for both code and stack traces. + +```python +import metarl +``` + +```bash +Traceback (most recent call last): + File ... +``` + +## Expected behavior + +A clear and concise description of what you expected to happen. + +## Screenshots +If applicable, add screenshots to help explain your problem. + +## System info + +Describe the characteristic of your environment: + * Describe how the library was installed (pip, source, ...) + * Python version + * Versions of any other relevant libraries + +```python +import metarl, numpy, sys +print(metarl.__version__, numpy.__version__, sys.version, sys.platform) +``` + +## Additional context + +Add any other context about the problem here. + +## Reason and Possible fixes + +If you know or suspect the reason for this bug, paste the code lines and suggest modifications. + +## Checklist + +- [ ] I have checked that there is no similar issue in the repo (**required**) +- [ ] I have read the [documentation](https://metarl.readthedocs.io/) (**required**) +- [ ] I have provided a minimal working example to reproduce the bug (**required**) diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md new file mode 100644 index 00000000..0e6ca2a8 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/feature_request.md @@ -0,0 +1,30 @@ +--- +name: Feature request +about: Suggest an idea for this project +title: "[Feature Request]" +labels: '' +assignees: Benjamin-eecs + +--- + +## Motivation + +Please outline the motivation for the proposal. +Is your feature request related to a problem? e.g., "I'm always frustrated when [...]". +If this is related to another issue, please link here too. + +## Solution + +A clear and concise description of what you want to happen. + +## Alternatives + +A clear and concise description of any alternative solutions or features you've considered. + +## Additional context + +Add any other context or screenshots about the feature request here. + +## Checklist + +- [ ] I have checked that there is no similar issue in the repo (**required**) diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md new file mode 100644 index 00000000..064d15bc --- /dev/null +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -0,0 +1,41 @@ +## Description + +Describe your changes in detail. + +## Motivation and Context + +Why is this change required? What problem does it solve? +If it fixes an open issue, please link to the issue here. +You can use the syntax `close #15213` if this solves the issue #15213 + +- [ ] I have raised an issue to propose this change ([required](https://metarl.readthedocs.io/en/latest/pages/contributing.html) for new features and bug fixes) + +## Types of changes + +What types of changes does your code introduce? Put an `x` in all the boxes that apply: + +- [ ] Bug fix (non-breaking change which fixes an issue) +- [ ] New feature (non-breaking change which adds core functionality) +- [ ] New environment (non-breaking change which adds 3rd-party environment) +- [ ] Breaking change (fix or feature that would cause existing functionality to change) +- [ ] Documentation (update in the documentation) +- [ ] Example (update in the folder of example) + +## Implemented Tasks + +- [ ] Subtask 1 +- [ ] Subtask 2 +- [ ] Subtask 3 + +## Checklist + +Go over all the following points, and put an `x` in all the boxes that apply. +If you are unsure about any of these, don't hesitate to ask. We are here to help! + +- [ ] I have read the [CONTRIBUTION](https://metarl.readthedocs.io/en/latest/pages/contributing.html) guide (**required**) +- [ ] My change requires a change to the documentation. +- [ ] I have updated the tests accordingly (*required for a bug fix or a new feature*). +- [ ] I have updated the documentation accordingly. +- [ ] I have reformatted the code using `make format` (**required**) +- [ ] I have checked the code using `make lint` (**required**) +- [ ] I have ensured `make test` pass. (**required**) diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml new file mode 100644 index 00000000..123a4cfc --- /dev/null +++ b/.github/workflows/lint.yml @@ -0,0 +1,38 @@ +name: Lint + +on: [pull_request] + +jobs: + lint: + runs-on: ubuntu-latest + steps: + - name: Cancel previous run + uses: styfle/cancel-workflow-action@0.9.1 + with: + access_token: ${{ github.token }} + - uses: actions/checkout@v2 + - name: Set up Python 3.8 + uses: actions/setup-python@v2 + with: + python-version: 3.8 + - name: Upgrade pip + run: | + python -m pip install --upgrade pip + - name: flake8 + run: | + make flake8 + - name: isort and yapf + run: | + make py-format + - name: addlicense + run: | + make addlicense + - name: mypy + run: | + make mypy + - name: docstyle + run: | + make docstyle + - name: spelling + run: | + make spelling \ No newline at end of file diff --git a/LICENSE b/LICENSE index 46474282..710ed864 100644 --- a/LICENSE +++ b/LICENSE @@ -187,7 +187,7 @@ same "printed page" as the copyright notice for easier identification within third-party archives. - Copyright [2022] [Jie Ren] + Copyright [2022] [MetaOPT Team. All Rights Reserved.] Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. diff --git a/Makefile b/Makefile index b4e42f22..db744e54 100644 --- a/Makefile +++ b/Makefile @@ -1,10 +1,13 @@ print-% : ; @echo $* = $($*) SHELL = /bin/bash PROJECT_NAME = TorchOpt +PROJECT_PATH=${PROJECT_NAME}/ +PROJECT_FOLDER = $(PROJECT_NAME) examples include src tests PYTHON_FILES = $(shell find . -type f -name "*.py") CPP_FILES = $(shell find . -type f -name "*.h" -o -name "*.cpp") COMMIT_HASH = $(shell git log -1 --format=%h) - +COPYRIGHT = "MetaOPT Team. All Rights Reserved." +PATH := $(HOME)/go/bin:$(PATH) # installation @@ -32,6 +35,12 @@ clang-format-install: clang-tidy-install: command -v clang-tidy || sudo apt-get install -y clang-tidy +go-install: + # requires go >= 1.16 + command -v go || (sudo apt-get install -y golang-1.16 && sudo ln -sf /usr/lib/go-1.16/bin/go /usr/bin/go) + +addlicense-install: go-install + command -v addlicense || go install github.com/google/addlicense@latest doc-install: $(call check_install, pydocstyle) @@ -40,10 +49,21 @@ doc-install: $(call check_install, sphinx_rtd_theme) $(call check_install_extra, sphinxcontrib.spelling, sphinxcontrib.spelling pyenchant) +pytest-install: + $(call check_install, pytest) + $(call check_install, pytest_cov) + $(call check_install, pytest_xdist) + + +# test + +pytest: pytest-install + pytest tests --cov ${PROJECT_PATH} --durations 0 -v --cov-report term-missing --color=yes + # python linter flake8: flake8-install - flake8 $(PYTHON_FILES) --count --show-source --statistics + flake8 $(PYTHON_FILES) --count --select=E9,F63,F7,F82,E225,E251 --show-source --statistics py-format: py-format-install isort --check $(PYTHON_FILES) && yapf -r -d $(PYTHON_FILES) @@ -61,6 +81,9 @@ clang-format: clang-format-install # documentation +addlicense: addlicense-install + addlicense -c $(COPYRIGHT) -l apache -y 2022 -check $(PROJECT_FOLDER) + docstyle: doc-install pydocstyle $(PROJECT_NAME) && doc8 docs && cd docs && make html SPHINXOPTS="-W" @@ -79,5 +102,5 @@ format: py-format-install clang-format-install isort $(PYTHON_FILES) yapf -ir $(PYTHON_FILES) clang-format-11 -style=file -i $(CPP_FILES) - + addlicense -c $(COPYRIGHT) -l apache -y 2022 $(PROJECT_FOLDER) diff --git a/TorchOpt/__init__.py b/TorchOpt/__init__.py index 28e783d5..0a505b27 100644 --- a/TorchOpt/__init__.py +++ b/TorchOpt/__init__.py @@ -13,12 +13,36 @@ # limitations under the License. # ============================================================================== -from ._src import (accelerated_op_available, clip, combine, hook, schedule, +from TorchOpt._src import (accelerated_op_available, clip, combine, hook, schedule, visual) -from ._src.alias import adam, rmsprop, sgd -from ._src.MetaOptimizer import MetaAdam, MetaOptimizer, MetaRMSProp, MetaSGD -from ._src.Optimizer import SGD, Adam, Optimizer, RMSProp -from ._src.update import apply_updates -from ._src.utils import extract_state_dict, recover_state_dict, stop_gradient +from TorchOpt._src.alias import adam, rmsprop, sgd +from TorchOpt._src.Optimizer import SGD, Adam, Optimizer, RMSProp +from TorchOpt._src.MetaOptimizer import MetaAdam, MetaOptimizer, MetaRMSProp, MetaSGD +from TorchOpt._src.update import apply_updates +from TorchOpt._src.utils import extract_state_dict, recover_state_dict, stop_gradient __version__ = "0.4.1" + +__all__ = ( + "accelerated_op_available", + "clip", + "combine", + "hook", + "schedule", + "visual", + "adam", + "rmsprop", + "sgd", + "MetaAdam", + "MetaOptimizer", + "MetaRMSProp", + "MetaSGD", + "SGD", + "Adam", + "Optimizer", + "RMSProp", + "apply_updates", + "extract_state_dict", + "recover_state_dict", + "stop_gradient", +) \ No newline at end of file diff --git a/TorchOpt/_src/MetaOptimizer.py b/TorchOpt/_src/MetaOptimizer.py index fa9c541f..e1240d7e 100644 --- a/TorchOpt/_src/MetaOptimizer.py +++ b/TorchOpt/_src/MetaOptimizer.py @@ -17,10 +17,10 @@ import torch from torch import nn -import TorchOpt from TorchOpt._src import base from TorchOpt._src.alias import adam, rmsprop, sgd from TorchOpt._src.pytypes import ScalarOrSchedule +from TorchOpt._src.update import apply_updates class MetaOptimizer(object): @@ -61,9 +61,7 @@ def step(self, loss: torch.Tensor): allow_unused=True) updates, state = self.impl.update(grad, state, False) self.state_groups[idx] = state - new_params = TorchOpt.apply_updates(flatten_params, - updates, - inplace=False) + new_params = apply_updates(flatten_params, updates, inplace=False) unflatten_new_params = containers_tree.unflatten(new_params) for (container, unflatten_param) in zip(param_containers, unflatten_new_params): diff --git a/TorchOpt/_src/__init__.py b/TorchOpt/_src/__init__.py index f57f9a2d..522a892f 100644 --- a/TorchOpt/_src/__init__.py +++ b/TorchOpt/_src/__init__.py @@ -13,4 +13,4 @@ # limitations under the License. # ============================================================================== -from .accelerated_op import accelerated_op_available +from TorchOpt._src.accelerated_op import accelerated_op_available diff --git a/TorchOpt/_src/hook.py b/TorchOpt/_src/hook.py index 95c6ba63..a53e3319 100644 --- a/TorchOpt/_src/hook.py +++ b/TorchOpt/_src/hook.py @@ -16,7 +16,7 @@ import jax import torch -from .base import EmptyState, GradientTransformation +from TorchOpt._src.base import EmptyState, GradientTransformation def zero_nan_hook(g: torch.Tensor) -> torch.Tensor: diff --git a/TorchOpt/_src/pytypes.py b/TorchOpt/_src/pytypes.py index ca14c319..f662198a 100644 --- a/TorchOpt/_src/pytypes.py +++ b/TorchOpt/_src/pytypes.py @@ -1,3 +1,18 @@ +# Copyright 2022 MetaOPT Team. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + from typing import Any, Callable, Iterable, Mapping, Union from torch import Tensor diff --git a/examples/L2R/helper/argument.py b/examples/L2R/helper/argument.py index a44095e0..26de1fd4 100644 --- a/examples/L2R/helper/argument.py +++ b/examples/L2R/helper/argument.py @@ -1,3 +1,18 @@ +# Copyright 2022 MetaOPT Team. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + import argparse import torch diff --git a/examples/L2R/helper/utils.py b/examples/L2R/helper/utils.py index dece9938..8281a961 100644 --- a/examples/L2R/helper/utils.py +++ b/examples/L2R/helper/utils.py @@ -14,6 +14,7 @@ # ============================================================================== # This file is modified from: # https://github.com/uber-research/learning-to-reweight-examples +# ============================================================================== import random diff --git a/examples/LOLA/helper/agent.py b/examples/LOLA/helper/agent.py index 1ae36688..350a77fa 100755 --- a/examples/LOLA/helper/agent.py +++ b/examples/LOLA/helper/agent.py @@ -1,5 +1,20 @@ +# Copyright 2022 MetaOPT Team. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== # This file is modified from: # https://github.com/alexis-jacq/LOLA_DiCE +# ============================================================================== import torch import torch.nn as nn diff --git a/examples/LOLA/helper/argument.py b/examples/LOLA/helper/argument.py index acd50a52..c9b20902 100755 --- a/examples/LOLA/helper/argument.py +++ b/examples/LOLA/helper/argument.py @@ -1,3 +1,18 @@ +# Copyright 2022 MetaOPT Team. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + import argparse diff --git a/examples/LOLA/helper/env.py b/examples/LOLA/helper/env.py index 8ac392c8..d361c908 100755 --- a/examples/LOLA/helper/env.py +++ b/examples/LOLA/helper/env.py @@ -1,5 +1,20 @@ +# Copyright 2022 MetaOPT Team. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== # This file is modified from: # https://github.com/alexis-jacq/LOLA_DiCE +# ============================================================================== import gym import numpy as np diff --git a/examples/LOLA/helper/utils.py b/examples/LOLA/helper/utils.py index 86421034..76267da0 100755 --- a/examples/LOLA/helper/utils.py +++ b/examples/LOLA/helper/utils.py @@ -1,5 +1,20 @@ +# Copyright 2022 MetaOPT Team. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== # This file is modified from: # https://github.com/alexis-jacq/LOLA_DiCE +# ============================================================================== import numpy as np import torch diff --git a/examples/LOLA/lola_dice.py b/examples/LOLA/lola_dice.py index 1eee2ae7..cde65c9e 100755 --- a/examples/LOLA/lola_dice.py +++ b/examples/LOLA/lola_dice.py @@ -14,6 +14,7 @@ # ============================================================================== # This file is modified from: # https://github.com/alexis-jacq/LOLA_DiCE +# ============================================================================== from copy import deepcopy diff --git a/examples/LOLA/visualise.py b/examples/LOLA/visualise.py index da5ea0da..2640f6a7 100755 --- a/examples/LOLA/visualise.py +++ b/examples/LOLA/visualise.py @@ -1,3 +1,18 @@ +# Copyright 2022 MetaOPT Team. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + import matplotlib.pyplot as plt import numpy as np import seaborn as sns diff --git a/examples/MAML-RL/helpers/Tabular_mdp.py b/examples/MAML-RL/helpers/Tabular_mdp.py index b5786296..f0d1d313 100644 --- a/examples/MAML-RL/helpers/Tabular_mdp.py +++ b/examples/MAML-RL/helpers/Tabular_mdp.py @@ -14,6 +14,7 @@ # ============================================================================== # This file is modified from: # https://github.com/tristandeleu/pytorch-maml-rl +# ============================================================================== import gym import numpy as np diff --git a/examples/MAML-RL/helpers/__init__.py b/examples/MAML-RL/helpers/__init__.py index a83c9eee..37381f3f 100644 --- a/examples/MAML-RL/helpers/__init__.py +++ b/examples/MAML-RL/helpers/__init__.py @@ -1,5 +1,21 @@ +# Copyright 2022 MetaOPT Team. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== # This file is modified from: # https://github.com/tristandeleu/pytorch-maml-rl +# ============================================================================== + from gym.envs.registration import register register('TabularMDP-v0', diff --git a/examples/MAML-RL/helpers/policy.py b/examples/MAML-RL/helpers/policy.py index 0ef52c6a..5b572a76 100644 --- a/examples/MAML-RL/helpers/policy.py +++ b/examples/MAML-RL/helpers/policy.py @@ -1,5 +1,20 @@ +# Copyright 2022 MetaOPT Team. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== # This file is modified from: # https://github.com/tristandeleu/pytorch-maml-rl +# ============================================================================== import torch import torch.nn as nn diff --git a/examples/few-shot/support/omniglot_loaders.py b/examples/few-shot/support/omniglot_loaders.py index 95eba9ce..2dc92b32 100644 --- a/examples/few-shot/support/omniglot_loaders.py +++ b/examples/few-shot/support/omniglot_loaders.py @@ -11,11 +11,12 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - +# ============================================================================== # These Omniglot loaders are from Jackie Loong's PyTorch MAML implementation: # https://github.com/dragen1860/MAML-Pytorch # https://github.com/dragen1860/MAML-Pytorch/blob/master/omniglot.py # https://github.com/dragen1860/MAML-Pytorch/blob/master/omniglotNShot.py +# ============================================================================== import errno import os diff --git a/examples/visualize.py b/examples/visualize.py index 10307eda..03a58c24 100644 --- a/examples/visualize.py +++ b/examples/visualize.py @@ -1,3 +1,18 @@ +# Copyright 2022 MetaOPT Team. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + import torch import torchviz from torch import nn From 786e3d529f30453607d52f7c1826607719c09e81 Mon Sep 17 00:00:00 2001 From: Benjamin-eecs Date: Wed, 29 Jun 2022 04:05:43 +0800 Subject: [PATCH 03/19] feat(all): actions, ignore, lint --- .clang-format | 2 - .github/workflows/lint.yml | 14 +- .github/workflows/release.yml | 34 ++ .github/workflows/test.yml | 31 + .gitignore | 140 +++++ Makefile | 16 +- TorchOpt/__init__.py | 67 ++- TorchOpt/_lib/adam_op.py | 46 +- TorchOpt/_src/MetaOptimizer.py | 214 +++---- TorchOpt/_src/Optimizer.py | 226 ++++---- TorchOpt/_src/accelerated_op/__init__.py | 26 +- .../_src/accelerated_op/adam_op/AdamOp.py | 188 ++++--- TorchOpt/_src/alias.py | 133 +++-- TorchOpt/_src/base.py | 45 +- TorchOpt/_src/clip.py | 99 ++-- TorchOpt/_src/combine.py | 31 +- TorchOpt/_src/hook.py | 22 +- TorchOpt/_src/pytypes.py | 4 +- TorchOpt/_src/schedule.py | 75 +-- TorchOpt/_src/transform.py | 507 +++++++++-------- TorchOpt/_src/update.py | 28 +- TorchOpt/_src/utils.py | 208 ++++--- TorchOpt/_src/visual.py | 336 +++++------ docs/conf.py | 14 +- docs/spelling_wordlist.txt | 0 examples/L2R/helper/argument.py | 62 +- examples/L2R/helper/model.py | 70 +-- examples/L2R/helper/utils.py | 281 +++++----- examples/L2R/train_l2r.py | 457 +++++++-------- examples/LOLA/helper/agent.py | 44 +- examples/LOLA/helper/argument.py | 66 +-- examples/LOLA/helper/env.py | 123 ++-- examples/LOLA/helper/utils.py | 165 +++--- examples/LOLA/lola_dice.py | 160 +++--- examples/LOLA/visualise.py | 20 +- examples/MAML-RL/helpers/Tabular_mdp.py | 163 +++--- examples/MAML-RL/helpers/__init__.py | 18 +- examples/MAML-RL/helpers/policy.py | 41 +- examples/MAML-RL/run_MAML.py | 300 +++++----- examples/MGRL/toy.py | 114 ++-- examples/few-shot/maml-omniglot.py | 395 ++++++------- examples/few-shot/support/omniglot_loaders.py | 530 +++++++++--------- examples/visualize.py | 94 ++-- include/adam_op/adam_op.h | 7 +- include/adam_op/adam_op_impl.cuh | 7 +- include/adam_op/adam_op_impl.h | 7 +- setup.cfg | 43 ++ setup.py | 199 +++---- src/adam_op/adam_op.cpp | 19 +- src/adam_op/adam_op_impl.cpp | 19 +- src/adam_op/adam_op_impl.cu | 19 +- tests/requirements.txt | 3 + .../high_level/test_high_level_inplace.py | 338 ++++++----- .../unit/low_level/test_low_level_inplace.py | 357 ++++++------ tests/unit/test_clip.py | 96 ++-- tests/unit/test_schedule.py | 48 +- 56 files changed, 3593 insertions(+), 3178 deletions(-) create mode 100644 .github/workflows/release.yml create mode 100644 .github/workflows/test.yml create mode 100644 docs/spelling_wordlist.txt create mode 100644 setup.cfg create mode 100644 tests/requirements.txt diff --git a/.clang-format b/.clang-format index 7e93992a..3d22e0a8 100644 --- a/.clang-format +++ b/.clang-format @@ -1,3 +1 @@ BasedOnStyle: Google -DerivePointerAlignment: false -PointerAlignment: Left diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 123a4cfc..cd4a45b8 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -1,6 +1,6 @@ name: Lint -on: [pull_request] +on: [push, pull_request] jobs: lint: @@ -18,6 +18,12 @@ jobs: - name: Upgrade pip run: | python -m pip install --upgrade pip + - name: Install requirements + run: | + python -m pip install -r requirements.txt + - name: Install dependencies + run: | + python -m pip install -e . - name: flake8 run: | make flake8 @@ -33,6 +39,6 @@ jobs: - name: docstyle run: | make docstyle - - name: spelling - run: | - make spelling \ No newline at end of file + # - name: spelling + # run: | + # make spelling \ No newline at end of file diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml new file mode 100644 index 00000000..016dd0ef --- /dev/null +++ b/.github/workflows/release.yml @@ -0,0 +1,34 @@ +name: pypi + +on: + release: + types: [created] + +jobs: + deploy: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - name: Set up Python + uses: actions/setup-python@v1 + with: + python-version: '3.x' + - name: Check consistency between the package version and release tag + run: | + RELEASE_VER=${GITHUB_REF#refs/*/} + PACKAGE_VER="v`python setup.py --version`" + if [ $RELEASE_VER != $PACKAGE_VER ] + then + echo "package ver. ($PACKAGE_VER) != release ver. ($RELEASE_VER)"; exit 1 + fi + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install setuptools wheel twine + - name: Build and publish + env: + TWINE_USERNAME: ${{ secrets.PYPI_USERNAME }} + TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }} + run: | + python setup.py sdist bdist_wheel + twine upload dist/* \ No newline at end of file diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml new file mode 100644 index 00000000..413f9415 --- /dev/null +++ b/.github/workflows/test.yml @@ -0,0 +1,31 @@ +name: Test + +on: + release: + types: [created] + +jobs: + test: + runs-on: ubuntu-latest + steps: + - name: Cancel previous run + uses: styfle/cancel-workflow-action@0.9.1 + with: + access_token: ${{ github.token }} + - uses: actions/checkout@v2 + - name: Set up Python 3.8 + uses: actions/setup-python@v2 + with: + python-version: 3.8 + - name: Upgrade pip + run: | + python -m pip install --upgrade pip + - name: Install dependencies + run: | + python -m pip install -r tests/requirements.txt + - name: Install dependencies + run: | + python -m pip install -e . + - name: Test with pytest + run: | + make pytest \ No newline at end of file diff --git a/.gitignore b/.gitignore index f74fff7e..5a67f740 100644 --- a/.gitignore +++ b/.gitignore @@ -13,3 +13,143 @@ docs/_build/ # mkdocs documentation /site + + +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +wheelhouse/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ +cover/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +.pybuilder/ +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +# For a library or package, you might want to ignore these files since the code is +# intended to run in multiple environments; otherwise, check them in: +# .python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# pytype static type analyzer +.pytype/ + +# Cython debug symbols +cython_debug/ \ No newline at end of file diff --git a/Makefile b/Makefile index db744e54..17dd1f53 100644 --- a/Makefile +++ b/Makefile @@ -1,12 +1,12 @@ print-% : ; @echo $* = $($*) -SHELL = /bin/bash -PROJECT_NAME = TorchOpt -PROJECT_PATH=${PROJECT_NAME}/ +SHELL = /bin/bash +PROJECT_NAME = TorchOpt +PROJECT_PATH = ${PROJECT_NAME}/ PROJECT_FOLDER = $(PROJECT_NAME) examples include src tests -PYTHON_FILES = $(shell find . -type f -name "*.py") -CPP_FILES = $(shell find . -type f -name "*.h" -o -name "*.cpp") -COMMIT_HASH = $(shell git log -1 --format=%h) -COPYRIGHT = "MetaOPT Team. All Rights Reserved." +PYTHON_FILES = $(shell find . -type f -name "*.py") +CPP_FILES = $(shell find . -type f -name "*.h" -o -name "*.cpp" -o -name "*.cuh" -o -name "*.cu") +COMMIT_HASH = $(shell git log -1 --format=%h) +COPYRIGHT = "MetaOPT Team. All Rights Reserved." PATH := $(HOME)/go/bin:$(PATH) # installation @@ -66,7 +66,7 @@ flake8: flake8-install flake8 $(PYTHON_FILES) --count --select=E9,F63,F7,F82,E225,E251 --show-source --statistics py-format: py-format-install - isort --check $(PYTHON_FILES) && yapf -r -d $(PYTHON_FILES) + isort --check $(PYTHON_FILES) && yapf -ir $(PYTHON_FILES) mypy: mypy-install mypy $(PROJECT_NAME) diff --git a/TorchOpt/__init__.py b/TorchOpt/__init__.py index 0a505b27..2fc37a35 100644 --- a/TorchOpt/__init__.py +++ b/TorchOpt/__init__.py @@ -13,36 +13,51 @@ # limitations under the License. # ============================================================================== -from TorchOpt._src import (accelerated_op_available, clip, combine, hook, schedule, - visual) +from TorchOpt._src import ( + accelerated_op_available, + clip, + combine, + hook, + schedule, + visual, +) from TorchOpt._src.alias import adam, rmsprop, sgd +from TorchOpt._src.MetaOptimizer import ( + MetaAdam, + MetaOptimizer, + MetaRMSProp, + MetaSGD, +) from TorchOpt._src.Optimizer import SGD, Adam, Optimizer, RMSProp -from TorchOpt._src.MetaOptimizer import MetaAdam, MetaOptimizer, MetaRMSProp, MetaSGD from TorchOpt._src.update import apply_updates -from TorchOpt._src.utils import extract_state_dict, recover_state_dict, stop_gradient +from TorchOpt._src.utils import ( + extract_state_dict, + recover_state_dict, + stop_gradient, +) __version__ = "0.4.1" __all__ = ( - "accelerated_op_available", - "clip", - "combine", - "hook", - "schedule", - "visual", - "adam", - "rmsprop", - "sgd", - "MetaAdam", - "MetaOptimizer", - "MetaRMSProp", - "MetaSGD", - "SGD", - "Adam", - "Optimizer", - "RMSProp", - "apply_updates", - "extract_state_dict", - "recover_state_dict", - "stop_gradient", -) \ No newline at end of file + "accelerated_op_available", + "clip", + "combine", + "hook", + "schedule", + "visual", + "adam", + "rmsprop", + "sgd", + "MetaAdam", + "MetaOptimizer", + "MetaRMSProp", + "MetaSGD", + "SGD", + "Adam", + "Optimizer", + "RMSProp", + "apply_updates", + "extract_state_dict", + "recover_state_dict", + "stop_gradient", +) diff --git a/TorchOpt/_lib/adam_op.py b/TorchOpt/_lib/adam_op.py index 0a72e0b1..ceb2eb9e 100644 --- a/TorchOpt/_lib/adam_op.py +++ b/TorchOpt/_lib/adam_op.py @@ -12,31 +12,49 @@ # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== +import torch -def forward_(updates, mu, nu, lr, b1, b2, eps, eps_root, count): - ... +def forward_( + updates: torch.Tensor, mu: torch.Tensor, nu: torch.Tensor, b1: float, + b2: float, eps: float, eps_root: float, count: int +) -> torch.Tensor: + ... -def forwardMu(updates, mu, b1): - ... +def forwardMu( + updates: torch.Tensor, mu: torch.Tensor, b1: float +) -> torch.Tensor: + ... -def forwardNu(updates, nu, b2): - ... +def forwardNu( + updates: torch.Tensor, nu: torch.Tensor, b2: float +) -> torch.Tensor: + ... -def forwardUpdates(new_mu, new_nu, lr, b1, b2, eps, eps_root, count): - ... +def forwardUpdates( + new_mu: torch.Tensor, new_nu: torch.Tensor, b1: float, b2: float, eps: float, + eps_root: float, count: int +) -> torch.Tensor: + ... -def backwardMu(dmu, updates, mu, b1): - ... +def backwardMu( + dmu: torch.Tensor, updates: torch.Tensor, mu: torch.Tensor, b1: float +) -> torch.Tensor: + ... -def backwardNu(dnu, updates, nu, b2): - ... +def backwardNu( + dnu: torch.Tensor, updates: torch.Tensor, nu: torch.Tensor, b2: float +) -> torch.Tensor: + ... -def backwardUpdates(dupdates, updates, new_mu, new_nu, lr, b1, b2, count): - ... +def backwardUpdates( + dupdates: torch.Tensor, updates: torch.Tensor, new_mu: torch.Tensor, + new_nu: torch.Tensor, b1: float, b2: float, count: int +) -> torch.Tensor: + ... diff --git a/TorchOpt/_src/MetaOptimizer.py b/TorchOpt/_src/MetaOptimizer.py index e1240d7e..bd0aea05 100644 --- a/TorchOpt/_src/MetaOptimizer.py +++ b/TorchOpt/_src/MetaOptimizer.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== +from typing import Union import jax import torch @@ -24,9 +25,10 @@ class MetaOptimizer(object): - """A high-level optimizer base class for meta learning.""" - def __init__(self, net: nn.Module, impl: base.GradientTransformation): - """ + """A high-level optimizer base class for meta learning.""" + + def __init__(self, net: nn.Module, impl: base.GradientTransformation): + """ Args: net (nn.Module): a network whose parameters should be optimized. impl (base.GradientTransformation): a low level optimizer function, it could be a @@ -34,13 +36,14 @@ def __init__(self, net: nn.Module, impl: base.GradientTransformation): `combine.py`. Note that use `MetaOptimizer(sgd(moment_requires_grad=True))` or `MetaOptimizer(chain(sgd(moment_requires_grad=True))) is equavalent to `MetaSGD`. """ - self.impl = impl - self.param_containers_groups = [] - self.state_groups = [] - self.add_param_group(net) + self.impl = impl + self.param_containers_groups = [] # type: ignore + self.state_groups = [] # type: ignore + + self.add_param_group(net) - def step(self, loss: torch.Tensor): - """Compute the gradients of the loss to the network parameters and update network parameters. + def step(self, loss: torch.Tensor): + """Compute the gradients of the loss to the network parameters and update network parameters. Graph of the derivative will be constructed, allowing to compute higher order derivative products. We use the differentiable optimizer (pass argument inplace=False) to scale the gradients and update @@ -49,119 +52,138 @@ def step(self, loss: torch.Tensor): Args: loss (torch.Tensor): the loss that is used to compute the gradients to the network parameters. """ - # step parameter only - for idx, (state, param_containers) in enumerate( - zip(self.state_groups, self.param_containers_groups)): - flatten_params, containers_tree = jax.tree_util.tree_flatten( - param_containers) - flatten_params = tuple(flatten_params) - grad = torch.autograd.grad(loss, - flatten_params, - create_graph=True, - allow_unused=True) - updates, state = self.impl.update(grad, state, False) - self.state_groups[idx] = state - new_params = apply_updates(flatten_params, updates, inplace=False) - unflatten_new_params = containers_tree.unflatten(new_params) - for (container, unflatten_param) in zip(param_containers, - unflatten_new_params): - container.update(unflatten_param) - - def add_param_group(self, net): - from .utils import _extract_container - net_container = _extract_container(net, with_buffer=False) - flatten_param, _ = jax.tree_util.tree_flatten(net_container) - flatten_param = tuple(flatten_param) - optim_state = self.impl.init(flatten_param) - self.state_groups.append(optim_state) - self.param_containers_groups.append(net_container) - - def state_dict(self): - """Extract the references of the optimizer states. + # step parameter only + for idx, (state, param_containers) in enumerate( + zip(self.state_groups, self.param_containers_groups) + ): + flatten_params, containers_tree = jax.tree_util.tree_flatten( + param_containers + ) + flatten_params = tuple(flatten_params) + grad = torch.autograd.grad( + loss, flatten_params, create_graph=True, allow_unused=True + ) + updates, state = self.impl.update(grad, state, False) + self.state_groups[idx] = state + new_params = apply_updates(flatten_params, updates, inplace=False) + unflatten_new_params = containers_tree.unflatten(new_params) + for (container, + unflatten_param) in zip(param_containers, unflatten_new_params): + container.update(unflatten_param) + + def add_param_group(self, net): + from TorchOpt.utils import _extract_container + net_container = _extract_container(net, with_buffer=False) + flatten_param, _ = jax.tree_util.tree_flatten(net_container) + flatten_param = tuple(flatten_param) + optim_state = self.impl.init(flatten_param) + self.state_groups.append(optim_state) + self.param_containers_groups.append(net_container) + + def state_dict(self): + """Extract the references of the optimizer states. Note that the states are references, so any in-place operations will change the states inside `MetaOptimizer` at the same time. """ - out_groups = tuple(group for group in self.state_groups) - return out_groups + out_groups = tuple(group for group in self.state_groups) + return out_groups - def load_state_dict(self, state_dict): - self.state_groups = list(group for group in state_dict) + def load_state_dict(self, state_dict): + self.state_groups = list(group for group in state_dict) class MetaSGD(MetaOptimizer): - """A canonical Stochastic Gradient Descent optimiser.""" - def __init__(self, - net, - lr: ScalarOrSchedule, - momentum: float = None, - nesterov: bool = False, - moment_requires_grad: bool = True): - """ + """A canonical Stochastic Gradient Descent optimiser.""" + + def __init__( + self, + net, + lr: ScalarOrSchedule, + momentum: Union[float, None] = None, + nesterov: bool = False, + moment_requires_grad: bool = True + ): + """ Args: net (nn.Module): a network whose parameters should be optimized. args: other arguments see `alias.sgd`, here we set `moment_requires_grad=True` to make tensors like momentum be differentiable. """ - super().__init__( - net, - sgd(lr=lr, - momentum=momentum, - nesterov=nesterov, - moment_requires_grad=moment_requires_grad)) + super().__init__( + net, + sgd( + lr=lr, + momentum=momentum, + nesterov=nesterov, + moment_requires_grad=moment_requires_grad + ) + ) class MetaAdam(MetaOptimizer): - """The classic Adam optimiser.""" - def __init__(self, - net, - lr: ScalarOrSchedule, - b1: float = 0.9, - b2: float = 0.999, - eps: float = 1e-8, - eps_root: float = 0.0, - moment_requires_grad: bool = True, - use_accelerated_op: bool = False): - """ + """The classic Adam optimiser.""" + + def __init__( + self, + net, + lr: ScalarOrSchedule, + b1: float = 0.9, + b2: float = 0.999, + eps: float = 1e-8, + eps_root: float = 0.0, + moment_requires_grad: bool = True, + use_accelerated_op: bool = False + ): + """ Args: net (nn.Module): a network whose parameters should be optimized. args: other arguments see `alias.adam`, here we set `moment_requires_grad=True` to make tensors like momentum be differentiable. """ - super().__init__( - net, - adam(lr=lr, - b1=b1, - b2=b2, - eps=eps, - eps_root=eps_root, - moment_requires_grad=moment_requires_grad, - use_accelerated_op=use_accelerated_op)) + super().__init__( + net, + adam( + lr=lr, + b1=b1, + b2=b2, + eps=eps, + eps_root=eps_root, + moment_requires_grad=moment_requires_grad, + use_accelerated_op=use_accelerated_op + ) + ) class MetaRMSProp(MetaOptimizer): - """The classic RMSProp optimiser.""" - def __init__(self, - net, - lr: ScalarOrSchedule, - decay: float = 0.9, - eps: float = 1e-8, - initial_scale: float = 0., - centered: bool = False, - momentum: float = None, - nesterov: bool = False): - """ + """The classic RMSProp optimiser.""" + + def __init__( + self, + net, + lr: ScalarOrSchedule, + decay: float = 0.9, + eps: float = 1e-8, + initial_scale: float = 0., + centered: bool = False, + momentum: Union[float, None] = None, + nesterov: bool = False + ): + """ Args: net (nn.Module): a network whose parameters should be optimized. args: other arguments see `alias.adam`, here we set `moment_requires_grad=True` to make tensors like momentum be differentiable. """ - super().__init__( - net, - rmsprop(lr=lr, - decay=decay, - eps=eps, - initial_scale=initial_scale, - centered=centered, - momentum=momentum, - nesterov=nesterov)) + super().__init__( + net, + rmsprop( + lr=lr, + decay=decay, + eps=eps, + initial_scale=initial_scale, + centered=centered, + momentum=momentum, + nesterov=nesterov + ) + ) diff --git a/TorchOpt/_src/Optimizer.py b/TorchOpt/_src/Optimizer.py index d825118f..780ae971 100644 --- a/TorchOpt/_src/Optimizer.py +++ b/TorchOpt/_src/Optimizer.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== +from typing import Union import jax import torch @@ -22,9 +23,10 @@ class Optimizer(object): - """A high-level base class that has the similar with `torch.optim.Optimier`""" - def __init__(self, params, impl): - """ + """A high-level base class that has the similar with `torch.optim.Optimier`""" + + def __init__(self, params, impl): + """ Args: params (iterable): an iterable of `torch.Tensor`s. Specifies what Tensors should be optimized. impl (base.GradientTransformation): a low level optimizer function, it could be @@ -33,16 +35,16 @@ def __init__(self, params, impl): is equavalent to `SGD`. """ - if not isinstance(params, list): - params = list(params) - self.impl = impl - self.param_groups = [] - self.param_tree_groups = [] - self.state_groups = [] - self.add_param_group(params) + if not isinstance(params, list): + params = list(params) + self.impl = impl + self.param_groups = [] + self.param_tree_groups = [] + self.state_groups = [] + self.add_param_group(params) - def zero_grad(self, set_to_none: bool = False): - """Sets the gradients of all optimized `torch.Tensor`s to zero. + def zero_grad(self, set_to_none: bool = False): + """Sets the gradients of all optimized `torch.Tensor`s to zero. The behivour is similar to `torch.optim.Optimizer.zero_grad`. @@ -50,34 +52,34 @@ def zero_grad(self, set_to_none: bool = False): set_to_none (bool): instead of setting to zero, set the grads to None. """ - for group in self.param_groups: - if set_to_none: + for group in self.param_groups: + if set_to_none: - def f(p): - p.grad = None - return None - else: + def f(p): + p.grad = None + return None + else: - def f(p): - if p.grad is None: - return None - if p.grad.grad_fn is not None: - p.grad.detach_() - else: - p.grad.requires_grad_(False) - p.grad.zero_() - return None + def f(p): + if p.grad is None: + return None + if p.grad.grad_fn is not None: + p.grad.detach_() + else: + p.grad.requires_grad_(False) + p.grad.zero_() + return None - jax.tree_map(f, group) + jax.tree_map(f, group) - def state_dict(self): - return self.state_groups + def state_dict(self): + return self.state_groups - def load_state_dict(self, state_dict): - self.state_groups = state_dict + def load_state_dict(self, state_dict): + self.state_groups = state_dict - def step(self, closure=None): - """Performs a single optimization step (parameter update). + def step(self, closure=None): + """Performs a single optimization step (parameter update). The behivour is similar to `torch.optim.Optimizer.step`. @@ -85,98 +87,116 @@ def step(self, closure=None): closure (callable, optional): A closure that reevaluates the model and returns the loss. """ - loss = None - if closure is not None: - with torch.enable_grad(): - loss = closure() + loss = None + if closure is not None: + with torch.enable_grad(): + loss = closure() - for param, state in zip(self.param_groups, self.state_groups): + for param, state in zip(self.param_groups, self.state_groups): - def f(p): - return p.grad + def f(p): + return p.grad - grad = jax.tree_map(f, param) - updates, _ = self.impl.update(grad, state) - apply_updates(param, updates) + grad = jax.tree_map(f, param) + updates, _ = self.impl.update(grad, state) + apply_updates(param, updates) - return loss + return loss - def add_param_group(self, params): - params, tree = jax.tree_flatten(params) - params = tuple(params) - self.param_groups.append(params) - self.param_tree_groups.append(tree) - self.state_groups.append(self.impl.init(params)) + def add_param_group(self, params): + params, tree = jax.tree_flatten(params) + params = tuple(params) + self.param_groups.append(params) + self.param_tree_groups.append(tree) + self.state_groups.append(self.impl.init(params)) class SGD(Optimizer): - """The classic Adam optimiser.""" - def __init__(self, - params, - lr: ScalarOrSchedule, - momentum: float = None, - nesterov: bool = False): - """ + """The classic Adam optimiser.""" + + def __init__( + self, + params, + lr: ScalarOrSchedule, + momentum: Union[float, None] = None, + nesterov: bool = False + ): + """ Args: params (iterable): an iterable of `torch.Tensor`s. Specifies what Tensors should be optimized. args: other arguments see `alias.adam`. """ - super().__init__( - params, - sgd(lr=lr, - momentum=momentum, - nesterov=nesterov, - moment_requires_grad=False)) + super().__init__( + params, + sgd( + lr=lr, + momentum=momentum, + nesterov=nesterov, + moment_requires_grad=False + ) + ) class Adam(Optimizer): - """A canonical Stochastic Gradient Descent optimiser.""" - def __init__(self, - params, - lr: ScalarOrSchedule, - b1: float = 0.9, - b2: float = 0.999, - eps: float = 1e-8, - eps_root: float = 0.0, - use_accelerated_op: bool = False): - """ + """A canonical Stochastic Gradient Descent optimiser.""" + + def __init__( + self, + params, + lr: ScalarOrSchedule, + b1: float = 0.9, + b2: float = 0.999, + eps: float = 1e-8, + eps_root: float = 0.0, + use_accelerated_op: bool = False + ): + """ Args: params (iterable): an iterable of `torch.Tensor`s. Specifies what Tensors should be optimized. args: other arguments see `alias.sgd`. """ - super().__init__( - params, - adam(lr=lr, - b1=b1, - b2=b2, - eps=eps, - eps_root=eps_root, - moment_requires_grad=False, - use_accelerated_op=use_accelerated_op)) + super().__init__( + params, + adam( + lr=lr, + b1=b1, + b2=b2, + eps=eps, + eps_root=eps_root, + moment_requires_grad=False, + use_accelerated_op=use_accelerated_op + ) + ) class RMSProp(Optimizer): - """An RMSProp optimiser.""" - def __init__(self, - params, - lr: ScalarOrSchedule, - decay: float = 0.9, - eps: float = 1e-8, - initial_scale: float = 0., - centered: bool = False, - momentum: float = None, - nesterov: bool = False): - """ + """An RMSProp optimiser.""" + + def __init__( + self, + params, + lr: ScalarOrSchedule, + decay: float = 0.9, + eps: float = 1e-8, + initial_scale: float = 0., + centered: bool = False, + momentum: Union[float, None] = None, + nesterov: bool = False + ): + """ Args: params (iterable): an iterable of `torch.Tensor`s. Specifies what Tensors should be optimized. args: other arguments see `alias.sgd`. """ - super().__init__( - params, - rmsprop(lr=lr, - decay=decay, - eps=eps, - initial_scale=initial_scale, - centered=centered, - momentum=momentum, - nesterov=nesterov)) + super().__init__( + params, + rmsprop( + lr=lr, + decay=decay, + eps=eps, + initial_scale=initial_scale, + centered=centered, + momentum=momentum, + nesterov=nesterov + ) + ) diff --git a/TorchOpt/_src/accelerated_op/__init__.py b/TorchOpt/_src/accelerated_op/__init__.py index c7cc70c4..d6fa1792 100644 --- a/TorchOpt/_src/accelerated_op/__init__.py +++ b/TorchOpt/_src/accelerated_op/__init__.py @@ -17,16 +17,16 @@ def accelerated_op_available(devices=None): - import torch - op = AdamOp() - if devices is None: - devices = [torch.device("cuda"), torch.device("cpu")] - elif isinstance(devices, torch.device): - devices = [devices] - try: - for device in devices: - updates = torch.tensor(1., device=device) - op(updates, updates, updates, 1) - return True - except: - return False + import torch + op = AdamOp() + if devices is None: + devices = [torch.device("cuda"), torch.device("cpu")] + elif isinstance(devices, torch.device): + devices = [devices] + try: + for device in devices: + updates = torch.tensor(1., device=device) + op(updates, updates, updates, 1) + return True + except: + return False diff --git a/TorchOpt/_src/accelerated_op/adam_op/AdamOp.py b/TorchOpt/_src/accelerated_op/adam_op/AdamOp.py index 92fd92d4..e726a61a 100644 --- a/TorchOpt/_src/accelerated_op/adam_op/AdamOp.py +++ b/TorchOpt/_src/accelerated_op/adam_op/AdamOp.py @@ -21,94 +21,100 @@ class AdamOp(object): - class MuOp(torch.autograd.Function): - @staticmethod - def jvp(ctx: Any, *grad_inputs: Any) -> Any: - pass - - @staticmethod - def forward(ctx, *args): - updates, mu, b1 = args - new_mu = adam_op.forwardMu(updates, mu, b1) - ctx.save_for_backward(updates, mu) - ctx.b1 = b1 - return new_mu - - @staticmethod - def backward(ctx, *args): - dmu = args[0] - updates, mu = ctx.saved_tensors - b1 = ctx.b1 - result = adam_op.backwardMu(dmu, updates, mu, b1) - return result[0], result[1], None - - class NuOp(torch.autograd.Function): - @staticmethod - def jvp(ctx: Any, *grad_inputs: Any) -> Any: - pass - - @staticmethod - def forward(ctx, *args): - updates, nu, b2 = args - new_nu = adam_op.forwardNu(updates, nu, b2) - ctx.save_for_backward(updates, nu) - ctx.b2 = b2 - return new_nu - - @staticmethod - def backward(ctx, *args): - dnu = args[0] - updates, nu = ctx.saved_tensors - b2 = ctx.b2 - result = adam_op.backwardNu(dnu, updates, nu, b2) - return result[0], result[1], None - - class UpdatesOp(torch.autograd.Function): - @staticmethod - def jvp(ctx: Any, *grad_inputs: Any) -> Any: - pass - - @staticmethod - def forward(ctx, *args): - new_mu, new_nu, (b1, b2, eps, eps_root, count) = args - new_updates = adam_op.forwardUpdates(new_mu, new_nu, b1, b2, eps, - eps_root, count) - ctx.save_for_backward(new_updates, new_mu, new_nu) - ctx.others = (b1, b2, eps, eps_root, count) - return new_updates - - @staticmethod - def backward(ctx, *args): - dupdates = args[0] - updates, new_mu, new_nu = ctx.saved_tensors - b1, b2, eps, eps_root, count = ctx.others - result = adam_op.backwardUpdates(dupdates, updates, new_mu, new_nu, - b1, b2, count) - return result[0], result[1], None - - def __init__(self, b1=0.9, b2=0.999, eps=1e-8, eps_root=0., inplace=True): - self.b1 = b1 - self.b2 = b2 - self.eps = eps - self.eps_root = eps_root - self.inplace = inplace - - def __call__(self, mu, nu, updates, count): - if updates is None: - return mu, nu, None - if updates.is_cuda: - current_device = torch.cuda.current_device() - torch.cuda.set_device(updates.device) - if self.inplace: - new_updates, new_mu, new_nu = adam_op.forward_( - updates, mu, nu, self.b1, self.b2, self.eps, self.eps_root, - count) - else: - new_mu = self.MuOp.apply(updates, mu, self.b1) - new_nu = self.NuOp.apply(updates, nu, self.b2) - new_updates = self.UpdatesOp.apply( - new_mu, new_nu, - (self.b1, self.b2, self.eps, self.eps_root, count)) - if updates.is_cuda: - torch.cuda.set_device(current_device) - return new_mu, new_nu, new_updates + + class MuOp(torch.autograd.Function): + + @staticmethod + def jvp(ctx: Any, *grad_inputs: Any) -> Any: + pass + + @staticmethod + def forward(ctx: Any, *args: Any, **kwargs: Any) -> Any: + updates, mu, b1 = args + new_mu = adam_op.forwardMu(updates, mu, b1) + ctx.save_for_backward(updates, mu) + ctx.b1 = b1 + return new_mu + + @staticmethod + def backward(ctx: Any, *args: Any) -> Any: + dmu = args[0] + updates, mu = ctx.saved_tensors + b1 = ctx.b1 + result = adam_op.backwardMu(dmu, updates, mu, b1) + return result[0], result[1], None + + class NuOp(torch.autograd.Function): + + @staticmethod + def jvp(ctx: Any, *grad_inputs: Any) -> Any: + pass + + @staticmethod + def forward(ctx: Any, *args: Any, **kwargs: Any) -> Any: + updates, nu, b2 = args + new_nu = adam_op.forwardNu(updates, nu, b2) + ctx.save_for_backward(updates, nu) + ctx.b2 = b2 + return new_nu + + @staticmethod + def backward(ctx: Any, *args: Any) -> Any: + dnu = args[0] + updates, nu = ctx.saved_tensors + b2 = ctx.b2 + result = adam_op.backwardNu(dnu, updates, nu, b2) + return result[0], result[1], None + + class UpdatesOp(torch.autograd.Function): + + @staticmethod + def jvp(ctx: Any, *grad_inputs: Any) -> Any: + pass + + @staticmethod + def forward(ctx: Any, *args: Any, **kwargs: Any) -> Any: + new_mu, new_nu, (b1, b2, eps, eps_root, count) = args + new_updates = adam_op.forwardUpdates( + new_mu, new_nu, b1, b2, eps, eps_root, count + ) + ctx.save_for_backward(new_updates, new_mu, new_nu) + ctx.others = (b1, b2, eps, eps_root, count) + return new_updates + + @staticmethod + def backward(ctx: Any, *args: Any) -> Any: + dupdates = args[0] + updates, new_mu, new_nu = ctx.saved_tensors + b1, b2, eps, eps_root, count = ctx.others + result = adam_op.backwardUpdates( + dupdates, updates, new_mu, new_nu, b1, b2, count + ) + return result[0], result[1], None + + def __init__(self, b1=0.9, b2=0.999, eps=1e-8, eps_root=0., inplace=True): + self.b1 = b1 + self.b2 = b2 + self.eps = eps + self.eps_root = eps_root + self.inplace = inplace + + def __call__(self, mu, nu, updates, count): + if updates is None: + return mu, nu, None + if updates.is_cuda: + current_device = torch.cuda.current_device() + torch.cuda.set_device(updates.device) + if self.inplace: + new_updates, new_mu, new_nu = adam_op.forward_( + updates, mu, nu, self.b1, self.b2, self.eps, self.eps_root, count + ) + else: + new_mu = self.MuOp.apply(updates, mu, self.b1) + new_nu = self.NuOp.apply(updates, nu, self.b2) + new_updates = self.UpdatesOp.apply( + new_mu, new_nu, (self.b1, self.b2, self.eps, self.eps_root, count) + ) + if updates.is_cuda: + torch.cuda.set_device(current_device) + return new_mu, new_nu, new_updates diff --git a/TorchOpt/_src/alias.py b/TorchOpt/_src/alias.py index a34ea4dc..3f676efe 100644 --- a/TorchOpt/_src/alias.py +++ b/TorchOpt/_src/alias.py @@ -39,27 +39,30 @@ def _scale_by_lr(lr: ScalarOrSchedule, flip_sign=True): - m = -1 if flip_sign else 1 - if callable(lr): + m = -1 if flip_sign else 1 + if callable(lr): - def schedule_wrapper(count): - def f(scaled_lr): - return m * scaled_lr + def schedule_wrapper(count): - return jax.tree_map(f, lr(count)) + def f(scaled_lr): + return m * scaled_lr - return transform.scale_by_schedule(schedule_wrapper) - return transform.scale(m * lr) + return jax.tree_map(f, lr(count)) # type: ignore + return transform.scale_by_schedule(schedule_wrapper) + return transform.scale(m * lr) -def adam(lr: ScalarOrSchedule, - b1: float = 0.9, - b2: float = 0.999, - eps: float = 1e-8, - eps_root: float = 0.0, - moment_requires_grad: bool = False, - use_accelerated_op: bool = False) -> base.GradientTransformation: - """The classic Adam optimiser. + +def adam( + lr: ScalarOrSchedule, + b1: float = 0.9, + b2: float = 0.999, + eps: float = 1e-8, + eps_root: float = 0.0, + moment_requires_grad: bool = False, + use_accelerated_op: bool = False +) -> base.GradientTransformation: + """The classic Adam optimiser. Adam is an SGD variant with learning rate adaptation. The `lr` used for each weight is computed from estimates of first- and second-order @@ -84,24 +87,26 @@ def adam(lr: ScalarOrSchedule, Returns: the corresponding `GradientTransformation`. """ - adam_inst = transform.scale_by_accelerated_adam if use_accelerated_op else transform.scale_by_adam - return combine.chain( - adam_inst(b1=b1, - b2=b2, - eps=eps, - eps_root=eps_root, - moment_requires_grad=moment_requires_grad), - _scale_by_lr(lr), - ) + adam_inst = transform.scale_by_accelerated_adam if use_accelerated_op else transform.scale_by_adam + return combine.chain( + adam_inst( + b1=b1, + b2=b2, + eps=eps, + eps_root=eps_root, + moment_requires_grad=moment_requires_grad + ), + _scale_by_lr(lr), + ) def sgd( - lr: ScalarOrSchedule, - momentum: Optional[float] = None, - nesterov: bool = False, - moment_requires_grad: bool = False, + lr: ScalarOrSchedule, + momentum: Optional[float] = None, + nesterov: bool = False, + moment_requires_grad: bool = False, ) -> base.GradientTransformation: - """A canonical Stochastic Gradient Descent optimiser. + """A canonical Stochastic Gradient Descent optimiser. This implements stochastic gradient descent. It also includes support for momentum, and nesterov acceleration, as these are standard practice when @@ -121,22 +126,28 @@ def sgd( Returns: A `GradientTransformation`. """ - return combine.chain( - (transform.trace(decay=momentum, - nesterov=nesterov, - moment_requires_grad=moment_requires_grad) - if momentum is not None else base.identity()), _scale_by_lr(lr)) - - -def rmsprop(lr: ScalarOrSchedule, - decay: float = 0.9, - eps: float = 1e-8, - initial_scale: float = 0., - centered: bool = False, - momentum: Optional[float] = None, - nesterov: bool = False) -> base.GradientTransformation: - # pylint: disable=line-too-long - """A flexible RMSProp optimiser. + return combine.chain( + ( + transform.trace( + decay=momentum, + nesterov=nesterov, + moment_requires_grad=moment_requires_grad + ) if momentum is not None else base.identity() + ), _scale_by_lr(lr) + ) + + +def rmsprop( + lr: ScalarOrSchedule, + decay: float = 0.9, + eps: float = 1e-8, + initial_scale: float = 0., + centered: bool = False, + momentum: Optional[float] = None, + nesterov: bool = False +) -> base.GradientTransformation: + # pylint: disable=line-too-long + """A flexible RMSProp optimiser. RMSProp is an SGD variant with learning rate adaptation. The `learning_rate` used for each weight is scaled by a suitable estimate of the magnitude of the gradients on previous steps. Several variants of RMSProp can be found @@ -160,18 +171,20 @@ def rmsprop(lr: ScalarOrSchedule, Returns: the corresponding `GradientTransformation`. """ - # pylint: enable=line-too-long - if centered: - return combine.chain( - transform.scale_by_stddev(decay=decay, - eps=eps, - initial_scale=initial_scale), - _scale_by_lr(lr), - (transform.trace(decay=momentum, nesterov=nesterov) - if momentum is not None else base.identity())) + # pylint: enable=line-too-long + if centered: return combine.chain( - transform.scale_by_rms(decay=decay, - eps=eps, - initial_scale=initial_scale), _scale_by_lr(lr), - (transform.trace(decay=momentum, nesterov=nesterov) - if momentum is not None else base.identity())) + transform.scale_by_stddev( + decay=decay, eps=eps, initial_scale=initial_scale + ), _scale_by_lr(lr), ( + transform.trace(decay=momentum, nesterov=nesterov) + if momentum is not None else base.identity() + ) + ) + return combine.chain( + transform.scale_by_rms(decay=decay, eps=eps, initial_scale=initial_scale), + _scale_by_lr(lr), ( + transform.trace(decay=momentum, nesterov=nesterov) + if momentum is not None else base.identity() + ) + ) diff --git a/TorchOpt/_src/base.py b/TorchOpt/_src/base.py index 5b2ad532..8b1559e6 100644 --- a/TorchOpt/_src/base.py +++ b/TorchOpt/_src/base.py @@ -45,18 +45,19 @@ class EmptyState(NamedTuple): - """An empty state for the simplest stateless transformations.""" + """An empty state for the simplest stateless transformations.""" class TransformInitFn(typing_extensions.Protocol): - """A callable type for the `init` step of a `GradientTransformation`. + """A callable type for the `init` step of a `GradientTransformation`. The `init` step takes a tree of `params` and uses these to construct an arbitrary structured initial `state` for the gradient transformation. This may hold statistics of the past updates or any other non static information. """ - def __call__(self, params: Params) -> OptState: - """The `init` function. + + def __call__(self, params: Params) -> OptState: + """The `init` function. Args: params: The initial value of the parameters. @@ -64,11 +65,11 @@ def __call__(self, params: Params) -> OptState: Returns: The initial state of the gradient transformation. """ - ... + ... class TransformUpdateFn(typing_extensions.Protocol): - """A callable type for the `update` step of a `GradientTransformation`. + """A callable type for the `update` step of a `GradientTransformation`. The `update` step takes a tree of candidate parameter `updates` (e.g. their gradient with respect to some loss), an arbitrary structured `state`, and the @@ -76,11 +77,12 @@ class TransformUpdateFn(typing_extensions.Protocol): optional, it must however be provided when using transformations that require access to the current values of the parameters. """ - def __call__(self, - updates: Updates, - state: OptState, - inplace: bool = True) -> Tuple[Updates, OptState]: - """The `update` function. + + def __call__(self, + updates: Updates, + state: OptState, + inplace: bool = True) -> Tuple[Updates, OptState]: + """The `update` function. Args: updates: A tree of candidate updates. @@ -90,11 +92,11 @@ def __call__(self, Returns: The transformed updates, and the updated state. """ - ... + ... class GradientTransformation(NamedTuple): - """A pair of pure functions implementing a gradient transformation. + """A pair of pure functions implementing a gradient transformation. TorchOpt optimizers are all implemented as _gradient transformations_ like Optax. A gradient transformation is defined to be a pair of pure functions, @@ -120,22 +122,23 @@ class GradientTransformation(NamedTuple): If the inplace flag is true, the output results are the same instance as the input. """ - init: TransformInitFn - update: TransformUpdateFn + init: TransformInitFn + update: TransformUpdateFn def identity() -> GradientTransformation: - """Stateless identity transformation that leaves input gradients untouched. + """Stateless identity transformation that leaves input gradients untouched. This function passes through the *gradient updates* unchanged. Returns: An (init_fn, update_fn) tuple. """ - def init_fn(_): - return EmptyState() - def update_fn(updates, state, inplace=False): - return updates, state + def init_fn(_): + return EmptyState() + + def update_fn(updates, state, inplace=False): + return updates, state - return GradientTransformation(init_fn, update_fn) + return GradientTransformation(init_fn, update_fn) diff --git a/TorchOpt/_src/clip.py b/TorchOpt/_src/clip.py index 3b50c40c..b0e24aed 100644 --- a/TorchOpt/_src/clip.py +++ b/TorchOpt/_src/clip.py @@ -26,60 +26,65 @@ def clip_grad_norm( - max_norm: float, - norm_type: float = 2., - error_if_nonfinite: bool = False) -> base.GradientTransformation: - """Clips gradient norm of an iterable of parameters. + max_norm: float, + norm_type: float = 2., + error_if_nonfinite: bool = False +) -> base.GradientTransformation: + """Clips gradient norm of an iterable of parameters. Args: max_delta: The maximum absolute value for each element in the update. Returns: An (init_fn, update_fn) tuple. """ - def init_fn(params): - del params - return ClipState() - def update_fn(updates, state, inplace=True): - available_updates = [] - for g in updates: - if g is not None: - available_updates.append(g) - if len(available_updates) == 0: - return torch.tensor(0.) - device = available_updates[0].device - with torch.no_grad(): - if norm_type == inf: - norms = [p.abs().max().to(device) for p in available_updates] - total_norm = norms[0] if len(norms) == 1 else torch.max( - torch.stack(norms)) - else: - total_norm = torch.norm( - torch.stack([ - torch.norm(p, norm_type).to(device) - for p in available_updates - ]), norm_type) - if error_if_nonfinite and torch.logical_or(total_norm.isnan(), - total_norm.isinf()): - raise RuntimeError( - f'The total norm of order {norm_type} for gradients from ' - '`parameters` is non-finite, so it cannot be clipped. To disable ' - 'this error and scale the gradients by the non-finite norm anyway, ' - 'set `error_if_nonfinite=False`') - clip_coef = max_norm / (float(total_norm) + 1e-6) - # Note: multiplying by the clamped coef is redundant when the coef is clamped to 1, but doing so - # avoids a `if clip_coef < 1:` conditional which can require a CPU <=> device synchronization - # when the gradients do not reside in CPU memory. - clip_coef_clamped = min(clip_coef, 1.) - if inplace: + def init_fn(params): + del params + return ClipState() - def f(g): - return g.mul_(clip_coef_clamped) if g is not None else None - else: + def update_fn(updates, state, inplace=True): + available_updates = [] + for g in updates: + if g is not None: + available_updates.append(g) + if len(available_updates) == 0: + return torch.tensor(0.) + device = available_updates[0].device + with torch.no_grad(): + if norm_type == inf: + norms = [p.abs().max().to(device) for p in available_updates] + total_norm = norms[0] if len(norms) == 1 else torch.max( + torch.stack(norms) + ) + else: + total_norm = torch.norm( + torch.stack( + [torch.norm(p, norm_type).to(device) for p in available_updates] + ), norm_type + ) + if error_if_nonfinite and torch.logical_or( + total_norm.isnan(), total_norm.isinf() + ): + raise RuntimeError( + f'The total norm of order {norm_type} for gradients from ' + '`parameters` is non-finite, so it cannot be clipped. To disable ' + 'this error and scale the gradients by the non-finite norm anyway, ' + 'set `error_if_nonfinite=False`' + ) + clip_coef = max_norm / (float(total_norm) + 1e-6) + # Note: multiplying by the clamped coef is redundant when the coef is clamped to 1, but doing so + # avoids a `if clip_coef < 1:` conditional which can require a CPU <=> device synchronization + # when the gradients do not reside in CPU memory. + clip_coef_clamped = min(clip_coef, 1.) + if inplace: - def f(g): - return g.mul(clip_coef_clamped) if g is not None else None + def f(g): + return g.mul_(clip_coef_clamped) if g is not None else None + else: - new_updates = jax.tree_map(f, updates) - return new_updates, state + def f(g): + return g.mul(clip_coef_clamped) if g is not None else None - return base.GradientTransformation(init_fn, update_fn) + new_updates = jax.tree_map(f, updates) + return new_updates, state + + return base.GradientTransformation(init_fn, update_fn) diff --git a/TorchOpt/_src/combine.py b/TorchOpt/_src/combine.py index 6a1b241c..396a2bc4 100644 --- a/TorchOpt/_src/combine.py +++ b/TorchOpt/_src/combine.py @@ -34,7 +34,7 @@ def chain(*args: base.GradientTransformation) -> base.GradientTransformation: - """Applies a list of chainable update transformations. + """Applies a list of chainable update transformations. Given a sequence of chainable transforms, `chain` returns an `init_fn` that constructs a `state` by concatenating the states of the individual @@ -48,20 +48,21 @@ def chain(*args: base.GradientTransformation) -> base.GradientTransformation: A single (init_fn, update_fn) tuple. """ - init_fns, update_fns = zip(*args) + init_fns, update_fns = zip(*args) - def init_fn(params): - return tuple(fn(params) for fn in init_fns) + def init_fn(params): + return tuple(fn(params) for fn in init_fns) - def update_fn(updates, state, inplace=True): - if len(update_fns) != len(state): - raise ValueError( - 'The number of updates and states has to be the same in ' - 'chain! Make sure you have called init first!') - new_state = [] - for s, fn in zip(state, update_fns): - updates, new_s = fn(updates, s, inplace) - new_state.append(new_s) - return updates, tuple(new_state) + def update_fn(updates, state, inplace=True): + if len(update_fns) != len(state): + raise ValueError( + 'The number of updates and states has to be the same in ' + 'chain! Make sure you have called init first!' + ) + new_state = [] + for s, fn in zip(state, update_fns): + updates, new_s = fn(updates, s, inplace) + new_state.append(new_s) + return updates, tuple(new_state) - return base.GradientTransformation(init_fn, update_fn) + return base.GradientTransformation(init_fn, update_fn) diff --git a/TorchOpt/_src/hook.py b/TorchOpt/_src/hook.py index a53e3319..93ca980b 100644 --- a/TorchOpt/_src/hook.py +++ b/TorchOpt/_src/hook.py @@ -20,25 +20,27 @@ def zero_nan_hook(g: torch.Tensor) -> torch.Tensor: - return torch.where(torch.isnan(g), torch.zeros_like(g), g) + return torch.where(torch.isnan(g), torch.zeros_like(g), g) def register_hook(hook) -> GradientTransformation: - """Stateless identity transformation that leaves input gradients untouched. + """Stateless identity transformation that leaves input gradients untouched. This function passes through the *gradient updates* unchanged. Returns: An (init_fn, update_fn) tuple. """ - def init_fn(_): - return EmptyState() - def update_fn(updates, state, inplace=False): - def f(g): - return g.register_hook(hook) if g is not None else None + def init_fn(_): + return EmptyState() - jax.tree_map(f, updates) - return updates, state + def update_fn(updates, state, inplace=False): - return GradientTransformation(init_fn, update_fn) + def f(g): + return g.register_hook(hook) if g is not None else None + + jax.tree_map(f, updates) + return updates, state + + return GradientTransformation(init_fn, update_fn) diff --git a/TorchOpt/_src/pytypes.py b/TorchOpt/_src/pytypes.py index f662198a..07b0e9e5 100644 --- a/TorchOpt/_src/pytypes.py +++ b/TorchOpt/_src/pytypes.py @@ -19,7 +19,9 @@ Scalar = Union[float, int] Numeric = Union[Tensor, Scalar] -TensorTree = Union[Tensor, Iterable['TensorTree'], Mapping[Any, 'TensorTree']] Schedule = Callable[[Numeric], Numeric] ScalarOrSchedule = Union[float, Schedule] + +# mypy: ignore-errors +TensorTree = Union[Tensor, Iterable['TensorTree'], Mapping[Any, 'TensorTree']] diff --git a/TorchOpt/_src/schedule.py b/TorchOpt/_src/schedule.py index 192cca3c..ad24cf82 100644 --- a/TorchOpt/_src/schedule.py +++ b/TorchOpt/_src/schedule.py @@ -37,12 +37,14 @@ from TorchOpt._src import base, pytypes -def polynomial_schedule(init_value: pytypes.Scalar, - end_value: pytypes.Scalar, - power: pytypes.Scalar, - transition_steps: int, - transition_begin: int = 0) -> base.Schedule: - """Constructs a schedule with polynomial transition from init to end value. +def polynomial_schedule( + init_value: pytypes.Scalar, + end_value: pytypes.Scalar, + power: pytypes.Scalar, + transition_steps: int, + transition_begin: int = 0 +) -> base.Schedule: + """Constructs a schedule with polynomial transition from init to end value. Args: init_value: initial value for the scalar to be annealed. end_value: end value of the scalar to be annealed. @@ -57,38 +59,43 @@ def polynomial_schedule(init_value: pytypes.Scalar, Returns: schedule: A function that maps step counts to values. """ - if transition_steps <= 0: - logging.info( - 'A polynomial schedule was set with a non-positive `transition_steps` ' - 'value; this results in a constant schedule with value `init_value`.' - ) - return lambda count: init_value + if transition_steps <= 0: + logging.info( + 'A polynomial schedule was set with a non-positive `transition_steps` ' + 'value; this results in a constant schedule with value `init_value`.' + ) + return lambda count: init_value - if transition_begin < 0: - logging.info( - 'An exponential schedule was set with a negative `transition_begin` ' - 'value; this will result in `transition_begin` falling back to `0`.' - ) - transition_begin = 0 + if transition_begin < 0: + logging.info( + 'An exponential schedule was set with a negative `transition_begin` ' + 'value; this will result in `transition_begin` falling back to `0`.' + ) + transition_begin = 0 - def schedule(count): - def impl(count): - count = np.clip(count - transition_begin, 0, transition_steps) - frac = 1 - count / transition_steps - return (init_value - end_value) * (frac**power) + end_value + def schedule(count): - return jax.tree_map(impl, count) + def impl(count): + count = np.clip(count - transition_begin, 0, transition_steps) + frac = 1 - count / transition_steps + return (init_value - end_value) * (frac**power) + end_value - return schedule + return jax.tree_map(impl, count) + + return schedule # Alias polynomial schedule to linear schedule for convenience. -def linear_schedule(init_value: pytypes.Scalar, - end_value: pytypes.Scalar, - transition_steps: int, - transition_begin: int = 0) -> base.Schedule: - return polynomial_schedule(init_value=init_value, - end_value=end_value, - power=1, - transition_steps=transition_steps, - transition_begin=transition_begin) +def linear_schedule( + init_value: pytypes.Scalar, + end_value: pytypes.Scalar, + transition_steps: int, + transition_begin: int = 0 +) -> base.Schedule: + return polynomial_schedule( + init_value=init_value, + end_value=end_value, + power=1, + transition_steps=transition_steps, + transition_begin=transition_begin + ) diff --git a/TorchOpt/_src/transform.py b/TorchOpt/_src/transform.py index 6c293684..7cdc9c86 100644 --- a/TorchOpt/_src/transform.py +++ b/TorchOpt/_src/transform.py @@ -30,7 +30,7 @@ # limitations under the License. # ============================================================================== -from typing import List, NamedTuple +from typing import List, NamedTuple, Tuple, Union import jax import torch @@ -41,15 +41,16 @@ ScaleState = base.EmptyState -def inc_count(updates, count: List[int]) -> List[int]: - def f(c, g): - return c + 1 if g is not None else c +def inc_count(updates, count: Tuple[int]) -> Tuple[int]: - return jax.tree_map(f, count, updates) + def f(c, g): + return c + 1 if g is not None else c + + return jax.tree_map(f, count, updates) def scale(step_size: float) -> base.GradientTransformation: - """Scale updates by some fixed scalar `step_size`. + """Scale updates by some fixed scalar `step_size`. Args: step_size: a scalar corresponding to a fixed scaling factor for updates. @@ -57,33 +58,34 @@ def scale(step_size: float) -> base.GradientTransformation: Returns: An (init_fn, update_fn) tuple. """ - def init_fn(params): - del params - return ScaleState() - def update_fn(updates, state, inplace=True): - if inplace: + def init_fn(params): + del params + return ScaleState() + + def update_fn(updates, state, inplace=True): + if inplace: - def f(g): - return g.mul_(step_size) if g is not None else None - else: + def f(g): + return g.mul_(step_size) if g is not None else None + else: - def f(g): - return g.mul(step_size) if g is not None else None + def f(g): + return g.mul(step_size) if g is not None else None - updates = jax.tree_map(f, updates) - return updates, state + updates = jax.tree_map(f, updates) + return updates, state - return base.GradientTransformation(init_fn, update_fn) + return base.GradientTransformation(init_fn, update_fn) class ScaleByScheduleState(NamedTuple): - """Maintains count for scale scheduling.""" - count: List[int] + """Maintains count for scale scheduling.""" + count: Tuple[int, ...] # type: ignore def scale_by_schedule(step_size_fn: Schedule) -> base.GradientTransformation: - """Scale updates using a custom schedule for the `step_size`. + """Scale updates using a custom schedule for the `step_size`. Args: step_size_fn: a function that takes an update count as input and proposes @@ -92,91 +94,91 @@ def scale_by_schedule(step_size_fn: Schedule) -> base.GradientTransformation: Returns: An (init_fn, update_fn) tuple. """ - def init_fn(params): - return ScaleByScheduleState(count=tuple(0 for _ in range(len(params)))) - - def update_fn(updates, state, inplace=True): - step_size = step_size_fn(state.count) - if inplace: - updates = jax.tree_map(lambda g, step_size: g.mul_(step_size), - updates, step_size) - else: - updates = jax.tree_map(lambda g, step_size: g.mul(step_size), - updates, step_size) - return updates, ScaleByScheduleState( - count=inc_count(updates, state.count)) - return base.GradientTransformation(init_fn, update_fn) + def init_fn(params): + return ScaleByScheduleState(count=tuple(0 for _ in range(len(params)))) + def update_fn(updates, state, inplace=True): + step_size = step_size_fn(state.count) + if inplace: + updates = jax.tree_map( + lambda g, step_size: g.mul_(step_size), updates, step_size + ) + else: + updates = jax.tree_map( + lambda g, step_size: g.mul(step_size), updates, step_size + ) + return updates, ScaleByScheduleState(count=inc_count(updates, state.count)) -class ScaleByRStdDevState(NamedTuple): - """State for centered exponential moving average of squares of updates.""" - mu: base.Updates - nu: base.Updates + return base.GradientTransformation(init_fn, update_fn) def _update_moment(updates, moments, decay, order, inplace=True): - """Compute the exponential moving average of the `order`-th moment.""" - if inplace: + """Compute the exponential moving average of the `order`-th moment.""" + if inplace: - def f(g, t): - return t.mul_(decay).add_(g**order, alpha=1 - - decay) if g is not None else t - else: + def f(g, t): + return t.mul_(decay).add_( + g**order, alpha=1 - decay + ) if g is not None else t + else: - def f(g, t): - return t.mul(decay).add(g**order, alpha=1 - - decay) if g is not None else t + def f(g, t): + return t.mul(decay).add( + g**order, alpha=1 - decay + ) if g is not None else t - return jax.tree_map(f, updates, moments) + return jax.tree_map(f, updates, moments) def _update_moment_per_elem_norm(updates, moments, decay, order, inplace=True): - """Compute the EMA of the `order`-th moment of the element-wise norm.""" + """Compute the EMA of the `order`-th moment of the element-wise norm.""" - if inplace: + if inplace: - def f(g, t): - return t.mul_(decay).add_(g**order, alpha=1 - - decay) if g is not None else t - else: + def f(g, t): + return t.mul_(decay).add_( + g**order, alpha=1 - decay + ) if g is not None else t + else: - def f(g, t): - return t.mul(decay).add(g**order, alpha=1 - - decay) if g is not None else t + def f(g, t): + return t.mul(decay).add( + g**order, alpha=1 - decay + ) if g is not None else t - return jax.tree_map(f, updates, moments) + return jax.tree_map(f, updates, moments) class ScaleByAdamState(NamedTuple): - """State for the Adam algorithm.""" - count: List[int] - mu: base.Updates - nu: base.Updates + """State for the Adam algorithm.""" + count: Tuple[int, ...] # type: ignore + mu: base.Updates + nu: base.Updates def _bias_correction(moment, decay, count, inplace=True): - """Perform bias correction. This becomes a no-op as count goes to infinity.""" - if inplace: + """Perform bias correction. This becomes a no-op as count goes to infinity.""" + if inplace: - def f(t, c): - return t.div_(1 - decay**c) - else: + def f(t, c): + return t.div_(1 - decay**c) + else: - def f(t, c): - return t.div(1 - decay**c) + def f(t, c): + return t.div(1 - decay**c) - return jax.tree_map(f, moment, count) + return jax.tree_map(f, moment, count) def scale_by_adam( - b1: float = 0.9, - b2: float = 0.999, - eps: float = 1e-8, - eps_root: float = 0.0, - moment_requires_grad: bool = False, + b1: float = 0.9, + b2: float = 0.999, + eps: float = 1e-8, + eps_root: float = 0.0, + moment_requires_grad: bool = False, ) -> base.GradientTransformation: - """Rescale updates according to the Adam algorithm. + """Rescale updates according to the Adam algorithm. References: [Kingma et al, 2014](https://arxiv.org/abs/1412.6980) @@ -192,48 +194,51 @@ def scale_by_adam( Returns: An (init_fn, update_fn) tuple. """ - def init_fn(params): - mu = jax.tree_map( # First moment - lambda t: torch.zeros_like(t, requires_grad=moment_requires_grad), - params) - nu = jax.tree_map( # Second moment - lambda t: torch.zeros_like(t, requires_grad=moment_requires_grad), - params) - return ScaleByAdamState(count=tuple(0 for _ in range(len(mu))), - mu=tuple(mu), - nu=tuple(nu)) - - def update_fn(updates, state, inplace=True): - mu = _update_moment(updates, state.mu, b1, 1, inplace) - nu = _update_moment_per_elem_norm(updates, state.nu, b2, 2, inplace) - count_inc = inc_count(updates, state.count) - mu_hat = _bias_correction(mu, b1, count_inc, False) - nu_hat = _bias_correction(nu, b2, count_inc, False) - if inplace: - - def f(g, m, v): - return m.div_(torch.sqrt_( - v.add_(eps_root)).add_(eps)) if g is not None else None - else: - - def f(g, m, v): - return m.div(torch.sqrt( - v.add(eps_root)).add(eps)) if g is not None else None - - updates = jax.tree_map(f, updates, mu_hat, nu_hat) - return updates, ScaleByAdamState(count=count_inc, mu=mu, nu=nu) - - return base.GradientTransformation(init_fn, update_fn) + + def init_fn(params): + mu = jax.tree_map( # First moment + lambda t: torch.zeros_like(t, requires_grad=moment_requires_grad), + params) + nu = jax.tree_map( # Second moment + lambda t: torch.zeros_like(t, requires_grad=moment_requires_grad), + params) + return ScaleByAdamState( + count=tuple(0 for _ in range(len(mu))), mu=tuple(mu), nu=tuple(nu) + ) + + def update_fn(updates, state, inplace=True): + mu = _update_moment(updates, state.mu, b1, 1, inplace) + nu = _update_moment_per_elem_norm(updates, state.nu, b2, 2, inplace) + count_inc = inc_count(updates, state.count) + mu_hat = _bias_correction(mu, b1, count_inc, False) + nu_hat = _bias_correction(nu, b2, count_inc, False) + if inplace: + + def f(g, m, v): + return m.div_( + torch.sqrt_(v.add_(eps_root)).add_(eps) + ) if g is not None else None + else: + + def f(g, m, v): + return m.div( + torch.sqrt(v.add(eps_root)).add(eps) + ) if g is not None else None + + updates = jax.tree_map(f, updates, mu_hat, nu_hat) + return updates, ScaleByAdamState(count=count_inc, mu=mu, nu=nu) + + return base.GradientTransformation(init_fn, update_fn) def scale_by_accelerated_adam( - b1: float = 0.9, - b2: float = 0.999, - eps: float = 1e-8, - eps_root: float = 0.0, - moment_requires_grad: bool = False, + b1: float = 0.9, + b2: float = 0.999, + eps: float = 1e-8, + eps_root: float = 0.0, + moment_requires_grad: bool = False, ) -> base.GradientTransformation: - """Rescale updates according to the Adam algorithm. + """Rescale updates according to the Adam algorithm. This function is acceleracted by using some fused accelerated operators. @@ -251,46 +256,46 @@ def scale_by_accelerated_adam( Returns: An (init_fn, update_fn) tuple. """ - from .accelerated_op import AdamOp - - def init_fn(params): - mu = jax.tree_map( # First moment - lambda t: torch.zeros_like(t, requires_grad=moment_requires_grad), - params) - nu = jax.tree_map( # Second moment - lambda t: torch.zeros_like(t, requires_grad=moment_requires_grad), - params) - return ScaleByAdamState(count=tuple(0 for _ in range(len(params))), - mu=mu, - nu=nu) - - def update_fn(updates, state, inplace=True): - count_inc = inc_count(updates, state.count) - op = AdamOp(b1, b2, eps, eps_root, inplace) - out = jax.tree_map(op, state.mu, state.nu, updates, count_inc) - new_mus, new_nus, new_updates = [], [], [] - for new_mu, new_nu, new_update in out: - new_mus.append(new_mu) - new_nus.append(new_nu) - new_updates.append(new_update) - return tuple(new_updates), ScaleByAdamState(count=count_inc, - mu=tuple(new_mus), - nu=tuple(new_nus)) - - return base.GradientTransformation(init_fn, update_fn) + from .accelerated_op import AdamOp + + def init_fn(params): + mu = jax.tree_map( # First moment + lambda t: torch.zeros_like(t, requires_grad=moment_requires_grad), + params) + nu = jax.tree_map( # Second moment + lambda t: torch.zeros_like(t, requires_grad=moment_requires_grad), + params) + return ScaleByAdamState( + count=tuple(0 for _ in range(len(params))), mu=mu, nu=nu + ) + + def update_fn(updates, state, inplace=True): + count_inc = inc_count(updates, state.count) + op = AdamOp(b1, b2, eps, eps_root, inplace) + out = jax.tree_map(op, state.mu, state.nu, updates, count_inc) + new_mus, new_nus, new_updates = [], [], [] + for new_mu, new_nu, new_update in out: + new_mus.append(new_mu) + new_nus.append(new_nu) + new_updates.append(new_update) + return tuple(new_updates), ScaleByAdamState( + count=count_inc, mu=tuple(new_mus), nu=tuple(new_nus) + ) + + return base.GradientTransformation(init_fn, update_fn) class TraceState(NamedTuple): - """Holds an aggregation of past updates.""" - trace: base.Params + """Holds an aggregation of past updates.""" + trace: base.Params def trace( - decay: float, - nesterov: bool = False, - moment_requires_grad: bool = False, + decay: float, + nesterov: bool = False, + moment_requires_grad: bool = False, ) -> base.GradientTransformation: - """Compute a trace of past updates. + """Compute a trace of past updates. Note: `trace` and `ema` have very similar but distinct updates; `trace = decay * trace + t`, while `ema = decay * ema + (1-decay) * t`. @@ -304,64 +309,70 @@ def trace( Returns: An (init_fn, update_fn) tuple. """ - def init_fn(params): - if decay == 0.: - return TraceState(trace=()) - else: - return TraceState(trace=jax.tree_map( - lambda t: torch.zeros_like( - t, requires_grad=moment_requires_grad), params)) - def update_fn(updates, state, inplace=True): - if nesterov: - if inplace: + def init_fn(params): + if decay == 0.: + return TraceState(trace=()) + else: + return TraceState( + trace=jax.tree_map( + lambda t: torch.zeros_like(t, requires_grad=moment_requires_grad), + params + ) + ) - def f1(g, t): - return t.copy_(g.add(t, alpha=decay)) + def update_fn(updates, state, inplace=True): + if nesterov: + if inplace: - def f2(g, t): - return g.add_(t, alpha=decay) + def f1(g, t): + return t.copy_(g.add(t, alpha=decay)) - new_trace = jax.tree_map(f1, updates, state.trace) - updates = jax.tree_map(f2, updates, new_trace) - else: + def f2(g, t): + return g.add_(t, alpha=decay) - def f(g, t): - return g.add(t, alpha=decay) + new_trace = jax.tree_map(f1, updates, state.trace) + updates = jax.tree_map(f2, updates, new_trace) + else: - new_trace = jax.tree_map(f, updates, state.trace) - updates = jax.tree_map(f, updates, new_trace) - else: - if inplace: + def f(g, t): + return g.add(t, alpha=decay) - def f(g, t): - return g.add_(t, alpha=decay) + new_trace = jax.tree_map(f, updates, state.trace) + updates = jax.tree_map(f, updates, new_trace) + else: + if inplace: - updates = jax.tree_map(f, updates, state.trace) - state.trace.copy_(updates) - new_trace = state.trace - else: + def f(g, t): + return g.add_(t, alpha=decay) + + updates = jax.tree_map(f, updates, state.trace) + state.trace.copy_(updates) + new_trace = state.trace + else: - def f(g, t): - return g.add(t, alpha=decay) + def f(g, t): + return g.add(t, alpha=decay) - updates = jax.tree_map(f, updates, state.trace) - new_trace = updates + updates = jax.tree_map(f, updates, state.trace) + new_trace = updates - return updates, TraceState(trace=new_trace) + return updates, TraceState(trace=new_trace) - return base.GradientTransformation(init_fn, update_fn) + return base.GradientTransformation(init_fn, update_fn) class ScaleByRmsState(NamedTuple): - """State for exponential root mean-squared (RMS)-normalized updates.""" - nu: base.Updates + """State for exponential root mean-squared (RMS)-normalized updates.""" + nu: base.Updates -def scale_by_rms(decay: float = 0.9, - eps: float = 1e-8, - initial_scale: float = 0.) -> base.GradientTransformation: - """Rescale updates by the root of the exp. moving avg of the square. +def scale_by_rms( + decay: float = 0.9, + eps: float = 1e-8, + initial_scale: float = 0. +) -> base.GradientTransformation: + """Rescale updates by the root of the exp. moving avg of the square. References: [Hinton](www.cs.toronto.edu/~tijmen/csc321/slides/lecture_slides_lec6.pdf) @@ -374,44 +385,47 @@ def scale_by_rms(decay: float = 0.9, Returns: An (init_fn, update_fn) tuple. """ - def init_fn(params): - nu = jax.tree_map(lambda n: torch.full_like(n, initial_scale), - params) # second moment - return ScaleByRmsState(nu=nu) - def update_fn(updates, state, params=None, inplace=True): - del params - nu = _update_moment_per_elem_norm(updates, state.nu, decay, 2, inplace) - if inplace: + def init_fn(params): + nu = jax.tree_map( + lambda n: torch.full_like(n, initial_scale), params + ) # second moment + return ScaleByRmsState(nu=nu) + + def update_fn(updates, state, inplace=True): + nu = _update_moment_per_elem_norm(updates, state.nu, decay, 2, inplace) + if inplace: - def f(g, n): - return g.mul_(torch.rsqrt(n.add(eps))) - else: + def f(g, n): + return g.mul_(torch.rsqrt(n.add(eps))) + else: - def f(g, n): - return g.mul(torch.rsqrt(n.add(eps))) + def f(g, n): + return g.mul(torch.rsqrt(n.add(eps))) - # """The followings are pytorch style""" - # if inplace: - # def f(g, n): return g.div_(torch.sqrt_(n).add_(eps)) - # else: - # def f(g, n): return g.div(torch.sqrt(n).add(eps)) - updates = jax.tree_map(f, updates, nu) - return updates, ScaleByRmsState(nu=nu) + # """The followings are pytorch style""" + # if inplace: + # def f(g, n): return g.div_(torch.sqrt_(n).add_(eps)) + # else: + # def f(g, n): return g.div(torch.sqrt(n).add(eps)) + updates = jax.tree_map(f, updates, nu) + return updates, ScaleByRmsState(nu=nu) - return base.GradientTransformation(init_fn, update_fn) + return base.GradientTransformation(init_fn, update_fn) class ScaleByRStdDevState(NamedTuple): - """State for centered exponential moving average of squares of updates.""" - mu: base.Updates - nu: base.Updates + """State for centered exponential moving average of squares of updates.""" + mu: base.Updates + nu: base.Updates -def scale_by_stddev(decay: float = 0.9, - eps: float = 1e-8, - initial_scale: float = 0.) -> base.GradientTransformation: - """Rescale updates by the root of the centered exp. moving average of squares. +def scale_by_stddev( + decay: float = 0.9, + eps: float = 1e-8, + initial_scale: float = 0. +) -> base.GradientTransformation: + """Rescale updates by the root of the centered exp. moving average of squares. References: [Hinton](www.cs.toronto.edu/~tijmen/csc321/slides/lecture_slides_lec6.pdf) @@ -424,31 +438,32 @@ def scale_by_stddev(decay: float = 0.9, Returns: An (init_fn, update_fn) tuple. """ - def init_fn(params): - mu = jax.tree_map(torch.zeros_like, params) # First moment - nu = jax.tree_map(lambda n: torch.full_like(n, initial_scale), - params) # second moment - return ScaleByRStdDevState(mu=mu, nu=nu) - - def update_fn(updates, state, params=None, inplace=True): - del params - mu = _update_moment(updates, state.mu, decay, 1, inplace) - nu = _update_moment_per_elem_norm(updates, state.nu, decay, 2, inplace) - if inplace: - - def f(g, m, n): - return g.mul_(torch.rsqrt(n.sub(m**2).add(eps))) - else: - - def f(g, m, n): - return g.mul(torch.rsqrt(n.sub(m**2).add(eps))) - - # """The followings are pytorch style""" - # if inplace: - # def f(g, m, n): return g.div_(torch.sqrt_(n.sub_(m ** 2)).add(eps)) - # else: - # def f(g, m, n): return g.div(torch.sqrt(n.sub(m ** 2)).add(eps)) - updates = jax.tree_map(f, updates, mu, nu) - return updates, ScaleByRStdDevState(mu=mu, nu=nu) - - return base.GradientTransformation(init_fn, update_fn) + + def init_fn(params): + mu = jax.tree_map(torch.zeros_like, params) # First moment + nu = jax.tree_map( + lambda n: torch.full_like(n, initial_scale), params + ) # second moment + return ScaleByRStdDevState(mu=mu, nu=nu) + + def update_fn(updates, state, inplace=True): + mu = _update_moment(updates, state.mu, decay, 1, inplace) + nu = _update_moment_per_elem_norm(updates, state.nu, decay, 2, inplace) + if inplace: + + def f(g, m, n): + return g.mul_(torch.rsqrt(n.sub(m**2).add(eps))) + else: + + def f(g, m, n): + return g.mul(torch.rsqrt(n.sub(m**2).add(eps))) + + # """The followings are pytorch style""" + # if inplace: + # def f(g, m, n): return g.div_(torch.sqrt_(n.sub_(m ** 2)).add(eps)) + # else: + # def f(g, m, n): return g.div(torch.sqrt(n.sub(m ** 2)).add(eps)) + updates = jax.tree_map(f, updates, mu, nu) + return updates, ScaleByRStdDevState(mu=mu, nu=nu) + + return base.GradientTransformation(init_fn, update_fn) diff --git a/TorchOpt/_src/update.py b/TorchOpt/_src/update.py index 885ca71a..a77adf7e 100644 --- a/TorchOpt/_src/update.py +++ b/TorchOpt/_src/update.py @@ -35,10 +35,12 @@ from TorchOpt._src import base -def apply_updates(params: base.Params, - updates: base.Updates, - inplace: bool = True) -> base.Params: - """Applies an update to the corresponding parameters. +def apply_updates( + params: base.Params, + updates: base.Updates, + inplace: bool = True +) -> base.Params: + """Applies an update to the corresponding parameters. This is a utility functions that applies an update to a set of parameters, and then returns the updated parameters to the caller. As an example, the update @@ -56,15 +58,15 @@ def apply_updates(params: base.Params, Returns: Updated parameters, with same structure, shape and type as `params`. """ - if inplace: + if inplace: - def f(p, u): - if u is not None: - p.data.add_(u) - return p - else: + def f(p, u): + if u is not None: + p.data.add_(u) + return p + else: - def f(p, u): - return p.add(u) if u is not None else p + def f(p, u): + return p.add(u) if u is not None else p - return jax.tree_map(f, params, updates) + return jax.tree_map(f, params, updates) diff --git a/TorchOpt/_src/utils.py b/TorchOpt/_src/utils.py index ad30373b..23c28ae9 100644 --- a/TorchOpt/_src/utils.py +++ b/TorchOpt/_src/utils.py @@ -23,13 +23,14 @@ class _ModuleState(NamedTuple): - params: List[Dict] + params: List[Dict] - visual_contents: Union[None, Dict] = None + visual_contents: Union[None, Dict] = None +# mypy: ignore-errors def stop_gradient(target): - """Stop the gradient for the input object. + """Stop the gradient for the input object. Since a tensor use `grad_fn` to connect itself with the previous computation graph, the back-propagated gradient will flow over the tensor and continue @@ -46,31 +47,29 @@ def stop_gradient(target): will return a detached copy of the target. The in-place operation is fast and memory efficient but may raise back-propagation error. """ - def f(obj): - if isinstance(obj, torch.Tensor): - requires_grad = obj.requires_grad - obj.detach_().requires_grad_(requires_grad) - return None - - if isinstance(target, _ModuleState): - true_target = target.params - elif isinstance(target, nn.Module): - true_target = tuple(target.parameters()) - elif isinstance(target, MetaOptimizer): - true_target, _ = jax.tree_flatten(target.state_dict()) - else: - true_target = target - jax.tree_map(f, true_target) + def f(obj): + if isinstance(obj, torch.Tensor): + requires_grad = obj.requires_grad + obj.detach_().requires_grad_(requires_grad) + return None + + if isinstance(target, _ModuleState): + true_target = target.params + elif isinstance(target, nn.Module): + true_target = tuple(target.parameters()) + elif isinstance(target, MetaOptimizer): + true_target, _ = jax.tree_flatten(target.state_dict()) + else: + true_target = target + + jax.tree_map(f, true_target) -def extract_state_dict(mod, - copy=False, - *, - with_buffer=True, - enable_visual=False, - visual_prefix=''): - """Extract target state. +def extract_state_dict( + mod, copy=False, *, with_buffer=True, enable_visual=False, visual_prefix='' +): + """Extract target state. Since a tensor use `grad_fn` to connect itself with the previous computation graph, the back-propagated gradient will flow over the tensor and continue @@ -92,86 +91,85 @@ def extract_state_dict(mod, Returns: State extracted of the input object. """ - if isinstance(mod, nn.Module): - if enable_visual: - visual_contents = {} - - for k, v in mod.named_parameters(): - if v.grad_fn is not None: - visual_contents.update({v.grad_fn: (visual_prefix + k, v)}) - else: - visual_contents.update({v: visual_prefix + k}) - else: - visual_contents = None - - params = [] - - def get_v(v): - if copy: - requires_grad = v.requires_grad - return v.clone().detach_().requires_grad_(requires_grad) - else: - return v - - def _update(term): - if len(term) != 0: - params.append({k: get_v(v) for k, v in term.items()}) - - _update(mod._parameters) - if with_buffer: - _update(mod._buffers) - for module in mod.modules(): - if module is mod: - continue - _update(module._parameters) - if with_buffer: - _update(module._buffers) - return _ModuleState(params=tuple(params), - visual_contents=visual_contents) - elif isinstance(mod, MetaOptimizer): - state = mod.state_dict() - if copy: - flatten_state, state_tree = jax.tree_flatten(state) - - def get_v(v): - if not isinstance(v, torch.Tensor): - return v - requires_grad = v.requires_grad - return v.clone().detach_().requires_grad_(requires_grad) - - flatten_state = jax.tree_map(get_v, flatten_state) - return state_tree.unflatten(flatten_state) - else: - return state + if isinstance(mod, nn.Module): + if enable_visual: + visual_contents = {} + for k, v in mod.named_parameters(): + if v.grad_fn is not None: + visual_contents.update({v.grad_fn: (visual_prefix + k, v)}) + else: + visual_contents.update({v: visual_prefix + k}) + else: + visual_contents = None + + params = [] + + def get_v(v): + if copy: + requires_grad = v.requires_grad + return v.clone().detach_().requires_grad_(requires_grad) + else: + return v + + def _update(term): + if len(term) != 0: + params.append({k: get_v(v) for k, v in term.items()}) + + _update(mod._parameters) + if with_buffer: + _update(mod._buffers) + for module in mod.modules(): + if module is mod: + continue + _update(module._parameters) + if with_buffer: + _update(module._buffers) + return _ModuleState(params=tuple(params), visual_contents=visual_contents) + elif isinstance(mod, MetaOptimizer): + state = mod.state_dict() + if copy: + flatten_state, state_tree = jax.tree_flatten(state) + + def get_v(v): + if not isinstance(v, torch.Tensor): + return v + requires_grad = v.requires_grad + return v.clone().detach_().requires_grad_(requires_grad) + + flatten_state = jax.tree_map(get_v, flatten_state) + return state_tree.unflatten(flatten_state) else: - raise RuntimeError(f"Unexpected class of {mod}") + return state + + else: + raise RuntimeError(f"Unexpected class of {mod}") def _extract_container(mod, with_buffer=True): - if isinstance(mod, nn.Module): - containers = [] - - def _update(term): - if len(term) != 0: - containers.append(term) - - _update(mod._parameters) - if with_buffer: - _update(mod._buffers) - for module in mod.modules(): - if module is mod: - continue - _update(module._parameters) - if with_buffer: - _update(module._buffers) - return tuple(containers) - else: - raise RuntimeError(f"Unexpected class of {mod}") + if isinstance(mod, nn.Module): + containers = [] + + def _update(term): + if len(term) != 0: + containers.append(term) + + _update(mod._parameters) + if with_buffer: + _update(mod._buffers) + for module in mod.modules(): + if module is mod: + continue + _update(module._parameters) + if with_buffer: + _update(module._buffers) + return tuple(containers) + else: + raise RuntimeError(f"Unexpected class of {mod}") def recover_state_dict(mod, state): - """Recover state. + """Recover state. This function is compatiable for the `extract_state`. @@ -182,11 +180,11 @@ def recover_state_dict(mod, state): mod: targe that need to recover. state: the recovering state. """ - if isinstance(mod, nn.Module): - target_container = _extract_container(mod) - for target, source in zip(target_container, state.params): - target.update(source) - elif isinstance(mod, MetaOptimizer): - mod.load_state_dict(state) - else: - raise RuntimeError(f"Unexpected class of {mod}") + if isinstance(mod, nn.Module): + target_container = _extract_container(mod) + for target, source in zip(target_container, state.params): + target.update(source) + elif isinstance(mod, MetaOptimizer): + mod.load_state_dict(state) + else: + raise RuntimeError(f"Unexpected class of {mod}") diff --git a/TorchOpt/_src/visual.py b/TorchOpt/_src/visual.py index e71c5ebc..aabf1ca3 100644 --- a/TorchOpt/_src/visual.py +++ b/TorchOpt/_src/visual.py @@ -31,43 +31,43 @@ def get_fn_name(fn, show_attrs, max_attr_chars): - name = str(type(fn).__name__) - if not show_attrs: - return name - attrs = dict() - for attr in dir(fn): - if not attr.startswith(SAVED_PREFIX): - continue - val = getattr(fn, attr) - attr = attr[len(SAVED_PREFIX):] - if torch.is_tensor(val): - attrs[attr] = "[saved tensor]" - elif isinstance(val, tuple) and any(torch.is_tensor(t) for t in val): - attrs[attr] = "[saved tensors]" - else: - attrs[attr] = str(val) - if not attrs: - return name - max_attr_chars = max(max_attr_chars, 3) - col1width = max(len(k) for k in attrs.keys()) - col2width = min(max(len(str(v)) for v in attrs.values()), max_attr_chars) - sep = "-" * max(col1width + col2width + 2, len(name)) - attrstr = '%-' + str(col1width) + 's: %' + str(col2width) + 's' - - def truncate(s): return s[:col2width - 3] + \ + name = str(type(fn).__name__) + if not show_attrs: + return name + attrs = dict() + for attr in dir(fn): + if not attr.startswith(SAVED_PREFIX): + continue + val = getattr(fn, attr) + attr = attr[len(SAVED_PREFIX):] + if torch.is_tensor(val): + attrs[attr] = "[saved tensor]" + elif isinstance(val, tuple) and any(torch.is_tensor(t) for t in val): + attrs[attr] = "[saved tensors]" + else: + attrs[attr] = str(val) + if not attrs: + return name + max_attr_chars = max(max_attr_chars, 3) + col1width = max(len(k) for k in attrs.keys()) + col2width = min(max(len(str(v)) for v in attrs.values()), max_attr_chars) + sep = "-" * max(col1width + col2width + 2, len(name)) + attrstr = '%-' + str(col1width) + 's: %' + str(col2width) + 's' + + def truncate(s): return s[:col2width - 3] + \ "..." if len(s) > col2width else s - params = '\n'.join(attrstr % (k, truncate(str(v))) - for (k, v) in attrs.items()) - return name + '\n' + sep + '\n' + params + params = '\n'.join( + attrstr % (k, truncate(str(v))) for (k, v) in attrs.items() + ) + return name + '\n' + sep + '\n' + params -def make_dot(var, - params=None, - show_attrs=False, - show_saved=False, - max_attr_chars=50): - """ Produces Graphviz representation of PyTorch autograd graph. +# mypy: ignore-errors +def make_dot( + var, params=None, show_attrs=False, show_saved=False, max_attr_chars=50 +): + """ Produces Graphviz representation of PyTorch autograd graph. If a node represents a backward function, it is gray. Otherwise, the node represents a tensor and is either blue, orange, or green: @@ -90,147 +90,147 @@ def make_dot(var, max_attr_chars: if show_attrs is `True`, sets max number of characters to display for any given attribute. """ - if LooseVersion(torch.__version__) < LooseVersion("1.9") and \ - (show_attrs or show_saved): - warnings.warn( - "make_dot: showing grad_fn attributes and saved variables" - " requires PyTorch version >= 1.9. (This does NOT apply to" - " saved tensors saved by custom autograd functions.)") - - param_map = {} - - if params is not None: - from .utils import _ModuleState - if isinstance(params, _ModuleState): - param_map.update(params.visual_contents) - elif isinstance(params, Dict): - param_map.update({v: k for k, v in params.items()}) - elif isinstance(params, Generator): - param_map.update({v: k for k, v in params}) - else: - for param in params: - if isinstance(param, _ModuleState): - param_map.update(param.visual_contents) - elif isinstance(param, Generator): - param_map.update({v: k for k, v in param}) - else: - param_map.update({v: k for k, v in param.items()}) - - node_attr = dict(style='filled', - shape='box', - align='left', - fontsize='10', - ranksep='0.1', - height='0.2', - fontname='monospace') - dot = Digraph(node_attr=node_attr, graph_attr=dict(size="12,12")) - seen = set() - - def size_to_str(size): - return '(' + (', ').join(['%d' % v for v in size]) + ')' - - def get_var_name(var, name=None): - if not name: - name = param_map[var] if var in param_map else '' - return '%s\n %s' % (name, size_to_str(var.size())) - - def get_var_name_with_flag(var): - if var in param_map: - return '%s\n %s' % (param_map[var][0], - size_to_str(param_map[var][1].size())) + if LooseVersion(torch.__version__) < LooseVersion("1.9") and \ + (show_attrs or show_saved): + warnings.warn( + "make_dot: showing grad_fn attributes and saved variables" + " requires PyTorch version >= 1.9. (This does NOT apply to" + " saved tensors saved by custom autograd functions.)" + ) + + param_map = {} + + if params is not None: + from TorchOpt.utils import _ModuleState + if isinstance(params, _ModuleState): + param_map.update(params.visual_contents) + elif isinstance(params, Dict): + param_map.update({v: k for k, v in params.items()}) + elif isinstance(params, Generator): + param_map.update({v: k for k, v in params}) + else: + for param in params: + if isinstance(param, _ModuleState): + param_map.update(param.visual_contents) + elif isinstance(param, Generator): + param_map.update({v: k for k, v in param}) else: - return None - - def add_nodes(fn): - assert not torch.is_tensor(fn) - if fn in seen: - return - seen.add(fn) - - if show_saved: - for attr in dir(fn): - if not attr.startswith(SAVED_PREFIX): - continue - val = getattr(fn, attr) - seen.add(val) - attr = attr[len(SAVED_PREFIX):] - if torch.is_tensor(val): - dot.edge(str(id(fn)), str(id(val)), dir="none") - dot.node(str(id(val)), - get_var_name(val, attr), - fillcolor='orange') - if isinstance(val, tuple): - for i, t in enumerate(val): - if torch.is_tensor(t): - name = attr + '[%s]' % str(i) - dot.edge(str(id(fn)), str(id(t)), dir="none") - dot.node(str(id(t)), - get_var_name(t, name), - fillcolor='orange') - - if hasattr(fn, 'variable'): - # if grad_accumulator, add the node for `.variable` - var = fn.variable - seen.add(var) - dot.node(str(id(var)), get_var_name(var), fillcolor='lightblue') - dot.edge(str(id(var)), str(id(fn))) - - fn_name = get_fn_name(fn, show_attrs, max_attr_chars) - fn_fillcolor = None - var_name = get_var_name_with_flag(fn) - if var_name is not None: - fn_name = '%s\n %s' % (fn_name, var_name) - fn_fillcolor = 'lightblue' - - # add the node for this grad_fn - dot.node(str(id(fn)), fn_name, fillcolor=fn_fillcolor) - - # recurse - if hasattr(fn, 'next_functions'): - for u in fn.next_functions: - if u[0] is not None: - dot.edge(str(id(u[0])), str(id(fn))) - add_nodes(u[0]) - - # note: this used to show .saved_tensors in pytorch0.2, but stopped - # working* as it was moved to ATen and Variable-Tensor merged - # also note that this still works for custom autograd functions - if hasattr(fn, 'saved_tensors'): - for t in fn.saved_tensors: - dot.edge(str(id(t)), str(id(fn))) - dot.node(str(id(t)), get_var_name(t), fillcolor='orange') - - def add_base_tensor(var, color='darkolivegreen1'): - if var in seen: - return - seen.add(var) - dot.node(str(id(var)), get_var_name(var), fillcolor=color) - if (var.grad_fn): - add_nodes(var.grad_fn) - dot.edge(str(id(var.grad_fn)), str(id(var))) - if var._is_view(): - add_base_tensor(var._base, color='darkolivegreen3') - dot.edge(str(id(var._base)), str(id(var)), style="dotted") - - # handle multiple outputs - if isinstance(var, tuple): - for v in var: - add_base_tensor(v) + param_map.update({v: k for k, v in param.items()}) + + node_attr = dict( + style='filled', + shape='box', + align='left', + fontsize='10', + ranksep='0.1', + height='0.2', + fontname='monospace' + ) + dot = Digraph(node_attr=node_attr, graph_attr=dict(size="12,12")) + seen = set() + + def size_to_str(size): + return '(' + (', ').join(['%d' % v for v in size]) + ')' + + def get_var_name(var, name=None): + if not name: + name = param_map[var] if var in param_map else '' + return '%s\n %s' % (name, size_to_str(var.size())) + + def get_var_name_with_flag(var): + if var in param_map: + return '%s\n %s' % ( + param_map[var][0], size_to_str(param_map[var][1].size()) + ) else: - add_base_tensor(var) + return None - resize_graph(dot) + def add_nodes(fn): + assert not torch.is_tensor(fn) + if fn in seen: + return + seen.add(fn) - return dot + if show_saved: + for attr in dir(fn): + if not attr.startswith(SAVED_PREFIX): + continue + val = getattr(fn, attr) + seen.add(val) + attr = attr[len(SAVED_PREFIX):] + if torch.is_tensor(val): + dot.edge(str(id(fn)), str(id(val)), dir="none") + dot.node(str(id(val)), get_var_name(val, attr), fillcolor='orange') + if isinstance(val, tuple): + for i, t in enumerate(val): + if torch.is_tensor(t): + name = attr + '[%s]' % str(i) + dot.edge(str(id(fn)), str(id(t)), dir="none") + dot.node(str(id(t)), get_var_name(t, name), fillcolor='orange') + + if hasattr(fn, 'variable'): + # if grad_accumulator, add the node for `.variable` + var = fn.variable + seen.add(var) + dot.node(str(id(var)), get_var_name(var), fillcolor='lightblue') + dot.edge(str(id(var)), str(id(fn))) + + fn_name = get_fn_name(fn, show_attrs, max_attr_chars) + fn_fillcolor = None + var_name = get_var_name_with_flag(fn) + if var_name is not None: + fn_name = '%s\n %s' % (fn_name, var_name) + fn_fillcolor = 'lightblue' + + # add the node for this grad_fn + dot.node(str(id(fn)), fn_name, fillcolor=fn_fillcolor) + + # recurse + if hasattr(fn, 'next_functions'): + for u in fn.next_functions: + if u[0] is not None: + dot.edge(str(id(u[0])), str(id(fn))) + add_nodes(u[0]) + + # note: this used to show .saved_tensors in pytorch0.2, but stopped + # working* as it was moved to ATen and Variable-Tensor merged + # also note that this still works for custom autograd functions + if hasattr(fn, 'saved_tensors'): + for t in fn.saved_tensors: + dot.edge(str(id(t)), str(id(fn))) + dot.node(str(id(t)), get_var_name(t), fillcolor='orange') + + def add_base_tensor(var, color='darkolivegreen1'): + if var in seen: + return + seen.add(var) + dot.node(str(id(var)), get_var_name(var), fillcolor=color) + if (var.grad_fn): + add_nodes(var.grad_fn) + dot.edge(str(id(var.grad_fn)), str(id(var))) + if var._is_view(): + add_base_tensor(var._base, color='darkolivegreen3') + dot.edge(str(id(var._base)), str(id(var)), style="dotted") + + # handle multiple outputs + if isinstance(var, tuple): + for v in var: + add_base_tensor(v) + else: + add_base_tensor(var) + + resize_graph(dot) + + return dot def resize_graph(dot, size_per_element=0.15, min_size=12): - """Resize the graph according to how much content it contains. + """Resize the graph according to how much content it contains. Modify the graph in place. """ - # Get the approximate number of nodes and edges - num_rows = len(dot.body) - content_size = num_rows * size_per_element - size = max(min_size, content_size) - size_str = str(size) + "," + str(size) - dot.graph_attr.update(size=size_str) + # Get the approximate number of nodes and edges + num_rows = len(dot.body) + content_size = num_rows * size_per_element + size = max(min_size, content_size) + size_str = str(size) + "," + str(size) + dot.graph_attr.update(size=size_str) diff --git a/docs/conf.py b/docs/conf.py index e8004f7f..8dfa64e6 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -20,10 +20,10 @@ def get_version() -> str: - # https://packaging.python.org/guides/single-sourcing-package-version/ - with open(os.path.join("..", "TorchOpt", "__init__.py"), "r") as f: - init = f.read().split() - return init[init.index("__version__") + 2][1:-1] + # https://packaging.python.org/guides/single-sourcing-package-version/ + with open(os.path.join("..", "TorchOpt", "__init__.py"), "r") as f: + init = f.read().split() + return init[init.index("__version__") + 2][1:-1] # -- Project information ----------------------------------------------------- @@ -41,7 +41,7 @@ def get_version() -> str: # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ - "sphinx.ext.autodoc", + "sphinx.ext.autodoc", ] # Add any paths that contain templates here, relative to this directory. @@ -74,8 +74,8 @@ def get_version() -> str: def setup(app): - app.add_js_file("js/copybutton.js") - app.add_css_file("css/style.css") + app.add_js_file("js/copybutton.js") + app.add_css_file("css/style.css") # -- Extension configuration ------------------------------------------------- diff --git a/docs/spelling_wordlist.txt b/docs/spelling_wordlist.txt new file mode 100644 index 00000000..e69de29b diff --git a/examples/L2R/helper/argument.py b/examples/L2R/helper/argument.py index 26de1fd4..e29bdb0a 100644 --- a/examples/L2R/helper/argument.py +++ b/examples/L2R/helper/argument.py @@ -19,36 +19,32 @@ def parse_args(): - parser = argparse.ArgumentParser([], description='L2R') - - parser.add_argument('--seed', type=int, default=42) - parser.add_argument('--epoch', type=int, default=30, help='Training Epoch') - - parser.add_argument('--lr', type=float, default=1e-3, help='learning rate') - parser.add_argument('--pos_ratio', - type=float, - default=0.995, - help='Ratio of positive examples in training') - parser.add_argument('--ntest', - type=int, - default=500, - help='Number of testing examples') - parser.add_argument('--ntrain', - type=int, - default=5000, - help='Number of testing examples') - parser.add_argument('--nval', - type=int, - default=10, - help='Number of valid examples') - parser.add_argument('--batch_size', - type=int, - default=100, - help='Batch size') - - ### For baseline - parser.add_argument('--algo', type=str, default='both') - - args = parser.parse_args() - # use the GPU if available - return args + parser = argparse.ArgumentParser([], description='L2R') + + parser.add_argument('--seed', type=int, default=42) + parser.add_argument('--epoch', type=int, default=30, help='Training Epoch') + + parser.add_argument('--lr', type=float, default=1e-3, help='learning rate') + parser.add_argument( + '--pos_ratio', + type=float, + default=0.995, + help='Ratio of positive examples in training' + ) + parser.add_argument( + '--ntest', type=int, default=500, help='Number of testing examples' + ) + parser.add_argument( + '--ntrain', type=int, default=5000, help='Number of testing examples' + ) + parser.add_argument( + '--nval', type=int, default=10, help='Number of valid examples' + ) + parser.add_argument('--batch_size', type=int, default=100, help='Batch size') + + ### For baseline + parser.add_argument('--algo', type=str, default='both') + + args = parser.parse_args() + # use the GPU if available + return args diff --git a/examples/L2R/helper/model.py b/examples/L2R/helper/model.py index 469b1c97..5a3ff2fa 100644 --- a/examples/L2R/helper/model.py +++ b/examples/L2R/helper/model.py @@ -36,42 +36,46 @@ class LeNet5(nn.Module): - def __init__(self, args): - super(LeNet5, self).__init__() - self.model = nn.Sequential(nn.Conv2d(1, 16, 5), nn.ReLU(), - nn.MaxPool2d(2), nn.Conv2d(16, 32, 5), - nn.ReLU(), nn.MaxPool2d(2), nn.Flatten(), - nn.Linear(512, 128), nn.ReLU(), - nn.Linear(128, 1), nn.Sigmoid()) - self.args = args - self.meta_weights = torch.zeros(self.args.batch_size, - requires_grad=True).to( - self.args.device) - self.criterion = nn.BCELoss() - def forward(self, x): - return self.model(x).squeeze(dim=-1) + def __init__(self, args): + super(LeNet5, self).__init__() + self.model = nn.Sequential( + nn.Conv2d(1, 16, 5), nn.ReLU(), nn.MaxPool2d(2), nn.Conv2d(16, 32, 5), + nn.ReLU(), nn.MaxPool2d(2), nn.Flatten(), nn.Linear(512, 128), nn.ReLU(), + nn.Linear(128, 1), nn.Sigmoid() + ) + self.args = args + self.meta_weights = torch.zeros( + self.args.batch_size, requires_grad=True + ).to(self.args.device) + self.criterion = nn.BCELoss() - def reset_meta(self, size): - self.meta_weights = torch.zeros(size, requires_grad=True).to( - self.args.device) + def forward(self, x): + return self.model(x).squeeze(dim=-1) - def normalise(self): - self.meta_weights = self.meta_weights.detach() - weights_sum = torch.sum(self.meta_weights) - weights_sum = weights_sum + 1 if weights_sum == 0 else weights_sum - self.meta_weights /= weights_sum + def reset_meta(self, size): + self.meta_weights = torch.zeros( + size, requires_grad=True + ).to(self.args.device) - def inner_loss(self, train_x, train_y): - result = self.forward(train_x) + def normalise(self): + self.meta_weights = self.meta_weights.detach() + weights_sum = torch.sum(self.meta_weights) + weights_sum = weights_sum + 1 if weights_sum == 0 else weights_sum + self.meta_weights /= weights_sum - # manually implement bce_loss to make the loss differentiable w.r.t self.meta_weights - loss = -(train_y * torch.log(result + 1e-10) + - (1 - train_y) * torch.log(1 - result + 1e-10)) - weighted_loss = torch.sum(self.meta_weights * loss) - return weighted_loss + def inner_loss(self, train_x, train_y): + result = self.forward(train_x) - def outer_loss(self, valid_x, valid_y): - result = self.forward(valid_x) - loss = self.criterion(result, valid_y) - return loss + # manually implement bce_loss to make the loss differentiable w.r.t self.meta_weights + loss = -( + train_y * torch.log(result + 1e-10) + + (1 - train_y) * torch.log(1 - result + 1e-10) + ) + weighted_loss = torch.sum(self.meta_weights * loss) + return weighted_loss + + def outer_loss(self, valid_x, valid_y): + result = self.forward(valid_x) + loss = self.criterion(result, valid_y) + return loss diff --git a/examples/L2R/helper/utils.py b/examples/L2R/helper/utils.py index 8281a961..96f469b7 100644 --- a/examples/L2R/helper/utils.py +++ b/examples/L2R/helper/utils.py @@ -24,151 +24,156 @@ from torch.utils.data import TensorDataset -def get_imbalance_dataset(mnist_train, - mnist_test, - pos_ratio=0.9, - ntrain=5000, - nval=10, - ntest=500, - class_0=4, - class_1=9): - - ratio = 1 - pos_ratio - ratio_test = 0.5 - - # In training, we have 10% 4 and 90% 9. - # In testing, we have 50% 4 and 50% 9. - x_train = mnist_train.train_data.numpy() / 255.0 - y_train = mnist_train.train_labels.numpy() - x_test = mnist_test.test_data.numpy() / 255.0 - y_test = mnist_test.test_labels.numpy() - x_train_0 = x_train[y_train == class_0] - x_test_0 = x_test[y_test == class_0] - - # First shuffle, negative. - idx = np.arange(x_train_0.shape[0]) - np.random.shuffle(idx) - x_train_0 = x_train_0[idx] - - nval_small_neg = int(np.floor(nval * ratio_test)) - ntrain_small_neg = int(np.floor(ntrain * ratio)) - nval_small_neg - - x_val_0 = x_train_0[:nval_small_neg] # 450 4 in validation. - x_train_0 = x_train_0[nval_small_neg:nval_small_neg + - ntrain_small_neg] # 500 4 in training. - - print('Number of train negative classes', ntrain_small_neg) - print('Number of val negative classes', nval_small_neg) - - idx = np.arange(x_test_0.shape[0]) - np.random.shuffle(idx) - x_test_0 = x_test_0[:int(np.floor(ntest * - ratio_test))] # 450 4 in testing. - - x_train_1 = x_train[y_train == class_1] - x_test_1 = x_test[y_test == class_1] - - # First shuffle, positive. - idx = np.arange(x_train_1.shape[0]) - np.random.shuffle(idx) - x_train_1 = x_train_1[idx] - - nvalsmall_pos = int(np.floor(nval * (1 - ratio_test))) - ntrainsmall_pos = int(np.floor(ntrain * (1 - ratio))) - nvalsmall_pos - - x_val_1 = x_train_1[:nvalsmall_pos] # 50 9 in validation. - x_train_1 = x_train_1[nvalsmall_pos:nvalsmall_pos + - ntrainsmall_pos] # 4500 9 in training. - - idx = np.arange(x_test_1.shape[0]) - np.random.shuffle(idx) - x_test_1 = x_test_1[idx] - x_test_1 = x_test_1[:int(np.floor(ntest * - (1 - ratio_test)))] # 500 9 in testing. - - print('Number of train positive classes', ntrainsmall_pos) - print('Number of val positive classes', nvalsmall_pos) - - y_train_subset = np.concatenate( - [np.zeros([x_train_0.shape[0]]), - np.ones([x_train_1.shape[0]])]) - y_val_subset = np.concatenate( - [np.zeros([x_val_0.shape[0]]), - np.ones([x_val_1.shape[0]])]) - y_test_subset = np.concatenate( - [np.zeros([x_test_0.shape[0]]), - np.ones([x_test_1.shape[0]])]) - - y_train_pos_subset = np.ones([x_train_1.shape[0]]) - y_train_neg_subset = np.zeros([x_train_0.shape[0]]) - - x_train_subset = np.concatenate([x_train_0, x_train_1], axis=0)[:, - None, :, :] - x_val_subset = np.concatenate([x_val_0, x_val_1], axis=0)[:, None, :, :] - x_test_subset = np.concatenate([x_test_0, x_test_1], axis=0)[:, None, :, :] - - x_train_pos_subset = x_train_1[:, None, :, :] - x_train_neg_subset = x_train_0[:, None, :, :] - - # Final shuffle. - idx = np.arange(x_train_subset.shape[0]) - np.random.shuffle(idx) - x_train_subset = x_train_subset[idx].astype(np.float32) - y_train_subset = y_train_subset[idx].astype(np.float32) - - idx = np.arange(x_val_subset.shape[0]) - np.random.shuffle(idx) - x_val_subset = x_val_subset[idx].astype(np.float32) - y_val_subset = y_val_subset[idx].astype(np.float32) - - idx = np.arange(x_test_subset.shape[0]) - np.random.shuffle(idx) - x_test_subset = x_test_subset[idx].astype(np.float32) - y_test_subset = y_test_subset[idx].astype(np.float32) - - x_train_subset, y_train_subset, x_val_subset, y_val_subset, x_test_subset, y_test_subset = torch.tensor( - x_train_subset), torch.tensor(y_train_subset), torch.tensor( - x_val_subset), torch.tensor(y_val_subset), torch.tensor( - x_test_subset), torch.tensor(y_test_subset) - - train_set, val_set, test_set = TensorDataset( - x_train_subset, y_train_subset), TensorDataset( - x_val_subset, y_val_subset), TensorDataset(x_test_subset, - y_test_subset) - - return train_set, val_set, test_set +def get_imbalance_dataset( + mnist_train, + mnist_test, + pos_ratio=0.9, + ntrain=5000, + nval=10, + ntest=500, + class_0=4, + class_1=9 +): + + ratio = 1 - pos_ratio + ratio_test = 0.5 + + # In training, we have 10% 4 and 90% 9. + # In testing, we have 50% 4 and 50% 9. + x_train = mnist_train.train_data.numpy() / 255.0 + y_train = mnist_train.train_labels.numpy() + x_test = mnist_test.test_data.numpy() / 255.0 + y_test = mnist_test.test_labels.numpy() + x_train_0 = x_train[y_train == class_0] + x_test_0 = x_test[y_test == class_0] + + # First shuffle, negative. + idx = np.arange(x_train_0.shape[0]) + np.random.shuffle(idx) + x_train_0 = x_train_0[idx] + + nval_small_neg = int(np.floor(nval * ratio_test)) + ntrain_small_neg = int(np.floor(ntrain * ratio)) - nval_small_neg + + x_val_0 = x_train_0[:nval_small_neg] # 450 4 in validation. + x_train_0 = x_train_0[nval_small_neg:nval_small_neg + ntrain_small_neg + ] # 500 4 in training. + + print('Number of train negative classes', ntrain_small_neg) + print('Number of val negative classes', nval_small_neg) + + idx = np.arange(x_test_0.shape[0]) + np.random.shuffle(idx) + x_test_0 = x_test_0[:int(np.floor(ntest * ratio_test))] # 450 4 in testing. + + x_train_1 = x_train[y_train == class_1] + x_test_1 = x_test[y_test == class_1] + + # First shuffle, positive. + idx = np.arange(x_train_1.shape[0]) + np.random.shuffle(idx) + x_train_1 = x_train_1[idx] + + nvalsmall_pos = int(np.floor(nval * (1 - ratio_test))) + ntrainsmall_pos = int(np.floor(ntrain * (1 - ratio))) - nvalsmall_pos + + x_val_1 = x_train_1[:nvalsmall_pos] # 50 9 in validation. + x_train_1 = x_train_1[nvalsmall_pos:nvalsmall_pos + ntrainsmall_pos + ] # 4500 9 in training. + + idx = np.arange(x_test_1.shape[0]) + np.random.shuffle(idx) + x_test_1 = x_test_1[idx] + x_test_1 = x_test_1[:int(np.floor(ntest * (1 - ratio_test))) + ] # 500 9 in testing. + + print('Number of train positive classes', ntrainsmall_pos) + print('Number of val positive classes', nvalsmall_pos) + + y_train_subset = np.concatenate( + [np.zeros([x_train_0.shape[0]]), + np.ones([x_train_1.shape[0]])] + ) + y_val_subset = np.concatenate( + [np.zeros([x_val_0.shape[0]]), + np.ones([x_val_1.shape[0]])] + ) + y_test_subset = np.concatenate( + [np.zeros([x_test_0.shape[0]]), + np.ones([x_test_1.shape[0]])] + ) + + y_train_pos_subset = np.ones([x_train_1.shape[0]]) + y_train_neg_subset = np.zeros([x_train_0.shape[0]]) + + x_train_subset = np.concatenate([x_train_0, x_train_1], axis=0)[:, + None, :, :] + x_val_subset = np.concatenate([x_val_0, x_val_1], axis=0)[:, None, :, :] + x_test_subset = np.concatenate([x_test_0, x_test_1], axis=0)[:, None, :, :] + + x_train_pos_subset = x_train_1[:, None, :, :] + x_train_neg_subset = x_train_0[:, None, :, :] + + # Final shuffle. + idx = np.arange(x_train_subset.shape[0]) + np.random.shuffle(idx) + x_train_subset = x_train_subset[idx].astype(np.float32) + y_train_subset = y_train_subset[idx].astype(np.float32) + + idx = np.arange(x_val_subset.shape[0]) + np.random.shuffle(idx) + x_val_subset = x_val_subset[idx].astype(np.float32) + y_val_subset = y_val_subset[idx].astype(np.float32) + + idx = np.arange(x_test_subset.shape[0]) + np.random.shuffle(idx) + x_test_subset = x_test_subset[idx].astype(np.float32) + y_test_subset = y_test_subset[idx].astype(np.float32) + + x_train_subset, y_train_subset, x_val_subset, y_val_subset, x_test_subset, y_test_subset = torch.tensor( + x_train_subset + ), torch.tensor(y_train_subset), torch.tensor(x_val_subset), torch.tensor( + y_val_subset + ), torch.tensor(x_test_subset), torch.tensor(y_test_subset) + + train_set, val_set, test_set = TensorDataset( + x_train_subset, y_train_subset + ), TensorDataset(x_val_subset, + y_val_subset), TensorDataset(x_test_subset, y_test_subset) + + return train_set, val_set, test_set def set_seed(seed, cudnn=True): - """ + """ Seed everything we can! Note that gym environments might need additional seeding (env.seed(seed)), and num_workers needs to be set to 1. """ - random.seed(seed) - np.random.seed(seed) - torch.manual_seed(seed) - torch.random.manual_seed(seed) - torch.cuda.manual_seed(seed) - # note: the below slows down the code but makes it reproducible - torch.cuda.manual_seed_all( - seed - ) # Sets the seed for generating random numbers on all GPUs. It’s safe to call this function if CUDA is not available; in that case, it is silently ignored. - if cudnn: - torch.backends.cudnn.deterministic = True - torch.backends.cudnn.benchmark = False + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + torch.random.manual_seed(seed) + torch.cuda.manual_seed(seed) + # note: the below slows down the code but makes it reproducible + torch.cuda.manual_seed_all( + seed + ) # Sets the seed for generating random numbers on all GPUs. It’s safe to call this function if CUDA is not available; in that case, it is silently ignored. + if cudnn: + torch.backends.cudnn.deterministic = True + torch.backends.cudnn.benchmark = False def plot(baseline, l2r): - import matplotlib.pyplot as plt - import numpy as np - import seaborn as sns - sns.set(style='darkgrid') - sns.set_theme(style="darkgrid") - plt.plot(baseline, label='baseline') - plt.plot(l2r, label='l2r') - plt.legend() - plt.ylabel('Test acc') - plt.xlabel('Epoch') - plt.title('Comparison between Baseline and L2R') - plt.savefig('./result.png') + import matplotlib.pyplot as plt + import numpy as np + import seaborn as sns + sns.set(style='darkgrid') + sns.set_theme(style="darkgrid") + plt.plot(baseline, label='baseline') + plt.plot(l2r, label='l2r') + plt.legend() + plt.ylabel('Test acc') + plt.xlabel('Epoch') + plt.title('Comparison between Baseline and L2R') + plt.savefig('./result.png') diff --git a/examples/L2R/train_l2r.py b/examples/L2R/train_l2r.py index 3cc2a018..22deb2ce 100644 --- a/examples/L2R/train_l2r.py +++ b/examples/L2R/train_l2r.py @@ -46,243 +46,246 @@ def run_baseline(args, mnist_train, mnist_test): - print('Run Baseline') - set_seed(args.seed) - - pos_ratio = args.pos_ratio - ntrain = args.ntrain - nval = args.nval - ntest = args.ntest - epoch = args.epoch - - folder = './result/baseline/' - writer = SummaryWriter('./result/baseline') - with open('./result/baseline/config.json', 'w') as f: - json.dump(args.__dict__, f) - - args.device = torch.device( - "cuda:0" if torch.cuda.is_available() else "cpu") - - train_set, val_set, test_set = get_imbalance_dataset(mnist_train, - mnist_test, - pos_ratio=pos_ratio, - ntrain=ntrain, - nval=nval, - ntest=ntest) - train_loader = DataLoader(train_set, - batch_size=args.batch_size, - shuffle=True, - num_workers=4) - valid_loader = DataLoader(val_set, - batch_size=args.batch_size, - shuffle=True, - num_workers=1) - test_loader = DataLoader(test_set, - batch_size=args.batch_size, - shuffle=True, - num_workers=1) - model = LeNet5(args).to(args.device) - - model_optimiser = torch.optim.Adam(model.parameters(), lr=args.lr) - - step = 0 - running_train_loss = [] - test_acc_result = [] - for _epoch in range(epoch): - model.train() - for idx, (train_x, train_label) in enumerate(train_loader): - train_x, train_label = train_x.to(args.device), train_label.to( - args.device) - outer_loss = model.outer_loss(train_x, train_label) - - model_optimiser.zero_grad() - outer_loss.backward() - model_optimiser.step() - - running_train_loss.append(outer_loss.item()) - writer.add_scalar('train_loss', outer_loss.item(), step) - - if step % 10 == 0 and step > 0: - running_train_mean = np.mean(np.array(running_train_loss)) - print("EPOCH: {}, BATCH: {}, LOSS: {}".format( - _epoch, idx, running_train_mean)) - writer.add_scalar('running_train_loss', running_train_mean, - step) - running_train_loss = [] - - step += 1 - - print('Beginning to Test') - model.eval() - train_acc = evaluate(train_loader, model, args) - test_acc = evaluate(test_loader, model, args) - model.train() - - writer.add_scalar('train_acc', train_acc, _epoch) - writer.add_scalar('test_acc', test_acc, _epoch) - test_acc_result.append(test_acc) - print("EPOCH: {}, TRAIN_ACC: {}, TEST_ACC: {}".format( - _epoch, train_acc, test_acc)) - return test_acc_result + print('Run Baseline') + set_seed(args.seed) + + pos_ratio = args.pos_ratio + ntrain = args.ntrain + nval = args.nval + ntest = args.ntest + epoch = args.epoch + + folder = './result/baseline/' + writer = SummaryWriter('./result/baseline') + with open('./result/baseline/config.json', 'w') as f: + json.dump(args.__dict__, f) + + args.device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu") + + train_set, val_set, test_set = get_imbalance_dataset( + mnist_train, + mnist_test, + pos_ratio=pos_ratio, + ntrain=ntrain, + nval=nval, + ntest=ntest + ) + train_loader = DataLoader( + train_set, batch_size=args.batch_size, shuffle=True, num_workers=4 + ) + valid_loader = DataLoader( + val_set, batch_size=args.batch_size, shuffle=True, num_workers=1 + ) + test_loader = DataLoader( + test_set, batch_size=args.batch_size, shuffle=True, num_workers=1 + ) + model = LeNet5(args).to(args.device) + + model_optimiser = torch.optim.Adam(model.parameters(), lr=args.lr) + + step = 0 + running_train_loss = [] + test_acc_result = [] + for _epoch in range(epoch): + model.train() + for idx, (train_x, train_label) in enumerate(train_loader): + train_x, train_label = train_x.to(args.device + ), train_label.to(args.device) + outer_loss = model.outer_loss(train_x, train_label) + + model_optimiser.zero_grad() + outer_loss.backward() + model_optimiser.step() + + running_train_loss.append(outer_loss.item()) + writer.add_scalar('train_loss', outer_loss.item(), step) + + if step % 10 == 0 and step > 0: + running_train_mean = np.mean(np.array(running_train_loss)) + print( + "EPOCH: {}, BATCH: {}, LOSS: {}".format( + _epoch, idx, running_train_mean + ) + ) + writer.add_scalar('running_train_loss', running_train_mean, step) + running_train_loss = [] + + step += 1 + + print('Beginning to Test') + model.eval() + train_acc = evaluate(train_loader, model, args) + test_acc = evaluate(test_loader, model, args) + model.train() + + writer.add_scalar('train_acc', train_acc, _epoch) + writer.add_scalar('test_acc', test_acc, _epoch) + test_acc_result.append(test_acc) + print( + "EPOCH: {}, TRAIN_ACC: {}, TEST_ACC: {}".format( + _epoch, train_acc, test_acc + ) + ) + return test_acc_result def run_L2R(args, mnist_train, mnist_test): - print('Run L2R') - set_seed(args.seed) - - pos_ratio = args.pos_ratio - ntrain = args.ntrain - nval = args.nval - ntest = args.ntest - epoch = args.epoch - - folder = './result/l2r/' - writer = SummaryWriter('./result/l2r/log') - with open('./result/l2r/config.json', 'w') as f: - json.dump(args.__dict__, f) - - args.device = torch.device( - "cuda:0" if torch.cuda.is_available() else "cpu") - - train_set, val_set, test_set = get_imbalance_dataset(mnist_train, - mnist_test, - pos_ratio=pos_ratio, - ntrain=ntrain, - nval=nval, - ntest=ntest) - train_loader = DataLoader(train_set, - batch_size=args.batch_size, - shuffle=True, - num_workers=2) - valid_loader = DataLoader(val_set, - batch_size=args.batch_size, - shuffle=True, - num_workers=1) - test_loader = DataLoader(test_set, - batch_size=args.batch_size, - shuffle=True, - num_workers=1) - model = LeNet5(args).to(args.device) - model_optimiser = TorchOpt.MetaSGD(model, lr=args.lr) - real_model_optimiser = torch.optim.Adam(model.parameters(), lr=args.lr) - - step = 0 - time_bp = 0 - running_valid_loss = [] - valid = iter(valid_loader) - running_train_loss = [] - test_acc_result = [] - for _epoch in range(epoch): - model.train() - for idx, (train_x, train_label) in enumerate(train_loader): - try: - valid_x, valid_label = valid.next() - except: - valid = iter(valid_loader) - valid_x, valid_label = valid.next() - train_x, train_label, valid_x, valid_label = train_x.to( - args.device), train_label.to(args.device), valid_x.to( - args.device), valid_label.to(args.device) - - # reset meta-parameter weights - model.reset_meta(size=train_x.size(0)) - - net_state_dict = TorchOpt.extract_state_dict(model) - optim_state_dict = TorchOpt.extract_state_dict(model_optimiser) - - for _ in range(1): - inner_loss = model.inner_loss(train_x, train_label) - model_optimiser.step(inner_loss) - - # caclulate outer_loss, deirve meta-gradient and normalise - outer_loss = model.outer_loss(valid_x, valid_label) - model.meta_weights = - \ - torch.autograd.grad(outer_loss, model.meta_weights)[0] - model.meta_weights = torch.nn.ReLU()(model.meta_weights) - model.normalise() - - # log loss - running_valid_loss.append(outer_loss.item()) - writer.add_scalar('validation_loss', outer_loss.item(), step) - - # reset the model and model optimiser - TorchOpt.recover_state_dict(model, net_state_dict) - TorchOpt.recover_state_dict(model_optimiser, optim_state_dict) - - # reuse inner_adapt to conduct real update based on learned meta weights - inner_loss = model.inner_loss(train_x, train_label) - for _ in range(1): - inner_loss = model.inner_loss(train_x, train_label) - real_model_optimiser.zero_grad() - inner_loss.backward() - real_model_optimiser.step() - - running_train_loss.append(inner_loss.item()) - writer.add_scalar('weighted_train_loss', inner_loss.item(), step) - - if step % 10 == 0 and step > 0: - running_valid_mean = np.mean(np.array(running_valid_loss)) - running_train_mean = np.mean(np.array(running_train_loss)) - print( - "EPOCH: {}, BATCH: {}, WEIGHTED_TRAIN_LOSS: {}, VALID_LOSS: {}" - .format(_epoch, idx, running_train_mean, - running_valid_mean)) - running_valid_loss = [] - running_train_loss = [] - writer.add_scalar('running_valid_loss', running_valid_mean, - step) - writer.add_scalar('running_train_loss', running_train_mean, - step) - - step += 1 - - print('Beginning to Test') - model.eval() - train_acc = evaluate(train_loader, model, args) - test_acc = evaluate(test_loader, model, args) - model.train() - - writer.add_scalar('train_acc', train_acc, _epoch) - writer.add_scalar('test_acc', test_acc, _epoch) - test_acc_result.append(test_acc) - print("EPOCH: {}, TRAIN_ACC: {}, TEST_ACC: {}".format( - _epoch, train_acc, test_acc)) - return test_acc_result + print('Run L2R') + set_seed(args.seed) + + pos_ratio = args.pos_ratio + ntrain = args.ntrain + nval = args.nval + ntest = args.ntest + epoch = args.epoch + + folder = './result/l2r/' + writer = SummaryWriter('./result/l2r/log') + with open('./result/l2r/config.json', 'w') as f: + json.dump(args.__dict__, f) + + args.device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu") + + train_set, val_set, test_set = get_imbalance_dataset( + mnist_train, + mnist_test, + pos_ratio=pos_ratio, + ntrain=ntrain, + nval=nval, + ntest=ntest + ) + train_loader = DataLoader( + train_set, batch_size=args.batch_size, shuffle=True, num_workers=2 + ) + valid_loader = DataLoader( + val_set, batch_size=args.batch_size, shuffle=True, num_workers=1 + ) + test_loader = DataLoader( + test_set, batch_size=args.batch_size, shuffle=True, num_workers=1 + ) + model = LeNet5(args).to(args.device) + model_optimiser = TorchOpt.MetaSGD(model, lr=args.lr) + real_model_optimiser = torch.optim.Adam(model.parameters(), lr=args.lr) + + step = 0 + time_bp = 0 + running_valid_loss = [] + valid = iter(valid_loader) + running_train_loss = [] + test_acc_result = [] + for _epoch in range(epoch): + model.train() + for idx, (train_x, train_label) in enumerate(train_loader): + try: + valid_x, valid_label = valid.next() + except: + valid = iter(valid_loader) + valid_x, valid_label = valid.next() + train_x, train_label, valid_x, valid_label = train_x.to( + args.device + ), train_label.to(args.device), valid_x.to(args.device + ), valid_label.to(args.device) + + # reset meta-parameter weights + model.reset_meta(size=train_x.size(0)) + + net_state_dict = TorchOpt.extract_state_dict(model) + optim_state_dict = TorchOpt.extract_state_dict(model_optimiser) + + for _ in range(1): + inner_loss = model.inner_loss(train_x, train_label) + model_optimiser.step(inner_loss) + + # caclulate outer_loss, deirve meta-gradient and normalise + outer_loss = model.outer_loss(valid_x, valid_label) + model.meta_weights = - \ + torch.autograd.grad(outer_loss, model.meta_weights)[0] + model.meta_weights = torch.nn.ReLU()(model.meta_weights) + model.normalise() + + # log loss + running_valid_loss.append(outer_loss.item()) + writer.add_scalar('validation_loss', outer_loss.item(), step) + + # reset the model and model optimiser + TorchOpt.recover_state_dict(model, net_state_dict) + TorchOpt.recover_state_dict(model_optimiser, optim_state_dict) + + # reuse inner_adapt to conduct real update based on learned meta weights + inner_loss = model.inner_loss(train_x, train_label) + for _ in range(1): + inner_loss = model.inner_loss(train_x, train_label) + real_model_optimiser.zero_grad() + inner_loss.backward() + real_model_optimiser.step() + + running_train_loss.append(inner_loss.item()) + writer.add_scalar('weighted_train_loss', inner_loss.item(), step) + + if step % 10 == 0 and step > 0: + running_valid_mean = np.mean(np.array(running_valid_loss)) + running_train_mean = np.mean(np.array(running_train_loss)) + print( + "EPOCH: {}, BATCH: {}, WEIGHTED_TRAIN_LOSS: {}, VALID_LOSS: {}" + .format(_epoch, idx, running_train_mean, running_valid_mean) + ) + running_valid_loss = [] + running_train_loss = [] + writer.add_scalar('running_valid_loss', running_valid_mean, step) + writer.add_scalar('running_train_loss', running_train_mean, step) + + step += 1 + + print('Beginning to Test') + model.eval() + train_acc = evaluate(train_loader, model, args) + test_acc = evaluate(test_loader, model, args) + model.train() + + writer.add_scalar('train_acc', train_acc, _epoch) + writer.add_scalar('test_acc', test_acc, _epoch) + test_acc_result.append(test_acc) + print( + "EPOCH: {}, TRAIN_ACC: {}, TEST_ACC: {}".format( + _epoch, train_acc, test_acc + ) + ) + return test_acc_result def evaluate(data_loader, model, args): - running_accuracy = 0 - total = 0 - with torch.no_grad(): - for data in data_loader: - inputs, outputs = data - inputs, outputs = inputs.to(args.device), outputs.to(args.device) - predicted = model(inputs) - predicted[predicted >= 0.5] = 1.0 - predicted[predicted < 0.5] = 0.0 - total += outputs.size(0) - running_accuracy += (predicted == outputs).sum().item() - - accuracy = running_accuracy / total - return accuracy + running_accuracy = 0 + total = 0 + with torch.no_grad(): + for data in data_loader: + inputs, outputs = data + inputs, outputs = inputs.to(args.device), outputs.to(args.device) + predicted = model(inputs) + predicted[predicted >= 0.5] = 1.0 + predicted[predicted < 0.5] = 0.0 + total += outputs.size(0) + running_accuracy += (predicted == outputs).sum().item() + + accuracy = running_accuracy / total + return accuracy def main(): - mnist_train = MNIST(root='./helper/mnist_data', download=True, train=True) - mnist_test = MNIST(root='./helper/mnist_data', download=True, train=False) + mnist_train = MNIST(root='./helper/mnist_data', download=True, train=True) + mnist_test = MNIST(root='./helper/mnist_data', download=True, train=False) + args = parse_args() + + assert args.algo in ['baseline', 'l2r', 'both'] + if args.algo == 'baseline': + run_baseline(args, mnist_train, mnist_test) + elif args.algo == 'l2r': + run_L2R(args, mnist_train, mnist_test) + else: + baseline_test_acc = run_baseline(args, mnist_train, mnist_test) args = parse_args() - - assert args.algo in ['baseline', 'l2r', 'both'] - if args.algo == 'baseline': - run_baseline(args, mnist_train, mnist_test) - elif args.algo == 'l2r': - run_L2R(args, mnist_train, mnist_test) - else: - baseline_test_acc = run_baseline(args, mnist_train, mnist_test) - args = parse_args() - l2r_test_acc = run_L2R(args, mnist_train, mnist_test) - plot(baseline_test_acc, l2r_test_acc) + l2r_test_acc = run_L2R(args, mnist_train, mnist_test) + plot(baseline_test_acc, l2r_test_acc) if __name__ == '__main__': - main() + main() diff --git a/examples/LOLA/helper/agent.py b/examples/LOLA/helper/agent.py index 350a77fa..7676cadd 100755 --- a/examples/LOLA/helper/agent.py +++ b/examples/LOLA/helper/agent.py @@ -23,32 +23,34 @@ class theta_model(nn.Module): - def __init__(self, theta): - super().__init__() - self.theta = nn.Parameter( - torch.tensor(theta.detach(), requires_grad=True)) + + def __init__(self, theta): + super().__init__() + self.theta = nn.Parameter(torch.tensor(theta.detach(), requires_grad=True)) class Agent(): - def __init__(self, args): - self.args = args - # init theta and its optimizer - self.theta = nn.Parameter(torch.zeros(5, requires_grad=True)) - self.theta_optimizer = torch.optim.Adam((self.theta, ), lr=args.lr_out) + def __init__(self, args): + + self.args = args + # init theta and its optimizer + self.theta = nn.Parameter(torch.zeros(5, requires_grad=True)) + self.theta_optimizer = torch.optim.Adam((self.theta,), lr=args.lr_out) - # init values and its optimizer - self.values = nn.Parameter(torch.zeros(5, requires_grad=True)) - self.value_optimizer = torch.optim.Adam((self.values, ), lr=args.lr_v) + # init values and its optimizer + self.values = nn.Parameter(torch.zeros(5, requires_grad=True)) + self.value_optimizer = torch.optim.Adam((self.values,), lr=args.lr_v) - self.set_virtual() + self.set_virtual() - def set_virtual(self): - self.virtual_theta = theta_model(self.theta) - self.virtual_optimiser = TorchOpt.MetaSGD(self.virtual_theta, - lr=self.args.lr_in) + def set_virtual(self): + self.virtual_theta = theta_model(self.theta) + self.virtual_optimiser = TorchOpt.MetaSGD( + self.virtual_theta, lr=self.args.lr_in + ) - def value_update(self, loss): - self.value_optimizer.zero_grad() - loss.backward() - self.value_optimizer.step() + def value_update(self, loss): + self.value_optimizer.zero_grad() + loss.backward() + self.value_optimizer.step() diff --git a/examples/LOLA/helper/argument.py b/examples/LOLA/helper/argument.py index c9b20902..33a29f38 100755 --- a/examples/LOLA/helper/argument.py +++ b/examples/LOLA/helper/argument.py @@ -17,43 +17,35 @@ def parse_args(): - parser = argparse.ArgumentParser([], description='LOLA') + parser = argparse.ArgumentParser([], description='LOLA') - parser.add_argument('--seed', type=int, default=6666) - parser.add_argument('--lr_in', - type=float, - default=0.3, - help='Inner Learning rate') + parser.add_argument('--seed', type=int, default=6666) + parser.add_argument( + '--lr_in', type=float, default=0.3, help='Inner Learning rate' + ) - parser.add_argument('--lr_out', - type=float, - default=0.2, - help='Outer learning rate') - parser.add_argument('--lr_v', - type=float, - default=0.1, - help='Learning rate of value function') - parser.add_argument('--gamma', - type=float, - default=0.96, - help='Discount factor') - parser.add_argument('--n_update', - type=int, - default=100, - help='Number of updates') - parser.add_argument('--n_lookaheads', - type=int, - default=1, - help='Number of updates') - parser.add_argument('--len_rollout', - type=int, - default=150, - help='Length of IPD') - parser.add_argument('--batch_size', - type=int, - default=1024, - help='Natch size') - parser.add_argument('--use_baseline', action='store_false', default=True) + parser.add_argument( + '--lr_out', type=float, default=0.2, help='Outer learning rate' + ) + parser.add_argument( + '--lr_v', type=float, default=0.1, help='Learning rate of value function' + ) + parser.add_argument( + '--gamma', type=float, default=0.96, help='Discount factor' + ) + parser.add_argument( + '--n_update', type=int, default=100, help='Number of updates' + ) + parser.add_argument( + '--n_lookaheads', type=int, default=1, help='Number of updates' + ) + parser.add_argument( + '--len_rollout', type=int, default=150, help='Length of IPD' + ) + parser.add_argument( + '--batch_size', type=int, default=1024, help='Natch size' + ) + parser.add_argument('--use_baseline', action='store_false', default=True) - args = parser.parse_args() - return args + args = parser.parse_args() + return args diff --git a/examples/LOLA/helper/env.py b/examples/LOLA/helper/env.py index d361c908..bb72c5b0 100755 --- a/examples/LOLA/helper/env.py +++ b/examples/LOLA/helper/env.py @@ -22,76 +22,79 @@ class OneHot(gym.Space): - """ + """ One-hot space. Used as the observation space. """ - def __init__(self, n): - self.n = n - def sample(self): - return np.random.multinomial(1, [1. / self.n] * self.n) + def __init__(self, n): + self.n = n + + def sample(self): + return np.random.multinomial(1, [1. / self.n] * self.n) - def contains(self, x): - return isinstance(x, np.ndarray) and \ - x.shape == (self.n, ) and \ - np.all(np.logical_or(x == 0, x == 1)) and \ - np.sum(x) == 1 + def contains(self, x): + return isinstance(x, np.ndarray) and \ + x.shape == (self.n, ) and \ + np.all(np.logical_or(x == 0, x == 1)) and \ + np.sum(x) == 1 - @property - def shape(self): - return (self.n, ) + @property + def shape(self): + return (self.n,) - def __repr__(self): - return "OneHot(%d)" % self.n + def __repr__(self): + return "OneHot(%d)" % self.n - def __eq__(self, other): - return self.n == other.n + def __eq__(self, other): + return self.n == other.n class IPD(gym.Env): - """ + """ A two-agent vectorized environment. Possible actions for each agent are (C)ooperate and (D)efect. """ - # Possible actions - NUM_AGENTS = 2 - NUM_ACTIONS = 2 - NUM_STATES = 5 - - def __init__(self, max_steps, batch_size=1): - self.max_steps = max_steps - self.batch_size = batch_size - self.payout_mat = np.array([[-2, 0], [-3, -1]]) - self.states = np.array([[1, 2], [3, 4]]) - - self.action_space = Tuple( - [Discrete(self.NUM_ACTIONS) for _ in range(self.NUM_AGENTS)]) - self.observation_space = Tuple( - [OneHot(self.NUM_STATES) for _ in range(self.NUM_AGENTS)]) - self.available_actions = [ - np.ones((batch_size, self.NUM_ACTIONS), dtype=int) - for _ in range(self.NUM_AGENTS) - ] - - self.step_count = None - - def reset(self): - self.step_count = 0 - init_state = np.zeros(self.batch_size) - observation = [init_state, init_state] - info = [{'available_actions': aa} for aa in self.available_actions] - return observation, info - - def step(self, action): - ac0, ac1 = action - self.step_count += 1 - - r0 = self.payout_mat[ac0, ac1] - r1 = self.payout_mat[ac1, ac0] - s0 = self.states[ac0, ac1] - s1 = self.states[ac1, ac0] - observation = [s0, s1] - reward = [r0, r1] - done = (self.step_count == self.max_steps) - info = [{'available_actions': aa} for aa in self.available_actions] - return observation, reward, done, info + # Possible actions + NUM_AGENTS = 2 + NUM_ACTIONS = 2 + NUM_STATES = 5 + + def __init__(self, max_steps, batch_size=1): + self.max_steps = max_steps + self.batch_size = batch_size + self.payout_mat = np.array([[-2, 0], [-3, -1]]) + self.states = np.array([[1, 2], [3, 4]]) + + self.action_space = Tuple( + [Discrete(self.NUM_ACTIONS) for _ in range(self.NUM_AGENTS)] + ) + self.observation_space = Tuple( + [OneHot(self.NUM_STATES) for _ in range(self.NUM_AGENTS)] + ) + self.available_actions = [ + np.ones((batch_size, self.NUM_ACTIONS), dtype=int) + for _ in range(self.NUM_AGENTS) + ] + + self.step_count = None + + def reset(self): + self.step_count = 0 + init_state = np.zeros(self.batch_size) + observation = [init_state, init_state] + info = [{'available_actions': aa} for aa in self.available_actions] + return observation, info + + def step(self, action): + ac0, ac1 = action + self.step_count += 1 + + r0 = self.payout_mat[ac0, ac1] + r1 = self.payout_mat[ac1, ac0] + s0 = self.states[ac0, ac1] + s1 = self.states[ac1, ac0] + observation = [s0, s1] + reward = [r0, r1] + done = (self.step_count == self.max_steps) + info = [{'available_actions': aa} for aa in self.available_actions] + return observation, reward, done, info diff --git a/examples/LOLA/helper/utils.py b/examples/LOLA/helper/utils.py index 76267da0..30b8cf51 100755 --- a/examples/LOLA/helper/utils.py +++ b/examples/LOLA/helper/utils.py @@ -23,98 +23,101 @@ # evaluate the policy def step(ipd, theta1, theta2, values1, values2, args): - # just to evaluate progress: - (s1, s2), _ = ipd.reset() - score1 = 0 - score2 = 0 - for t in range(args.len_rollout): - a1, lp1, v1 = act(s1, theta1, values1) - a2, lp2, v2 = act(s2, theta2, values2) - (s1, s2), (r1, r2), _, _ = ipd.step((a1, a2)) - # cumulate scores - score1 += np.mean(r1) / float(args.len_rollout) - score2 += np.mean(r2) / float(args.len_rollout) - return (score1, score2) + # just to evaluate progress: + (s1, s2), _ = ipd.reset() + score1 = 0 + score2 = 0 + for t in range(args.len_rollout): + a1, lp1, v1 = act(s1, theta1, values1) + a2, lp2, v2 = act(s2, theta2, values2) + (s1, s2), (r1, r2), _, _ = ipd.step((a1, a2)) + # cumulate scores + score1 += np.mean(r1) / float(args.len_rollout) + score2 += np.mean(r2) / float(args.len_rollout) + return (score1, score2) # dice operator def magic_box(x): - return torch.exp(x - x.detach()) + return torch.exp(x - x.detach()) # replay buffer class Memory(): - def __init__(self, args): - self.self_logprobs = [] - self.other_logprobs = [] - self.values = [] - self.rewards = [] - self.args = args - - def add(self, lp, other_lp, v, r): - self.self_logprobs.append(lp) - self.other_logprobs.append(other_lp) - self.values.append(v) - self.rewards.append(r) - - def dice_objective(self, use_baseline=True): - self_logprobs = torch.stack(self.self_logprobs, dim=1) - other_logprobs = torch.stack(self.other_logprobs, dim=1) - values = torch.stack(self.values, dim=1) - rewards = torch.stack(self.rewards, dim=1) - - # apply discount: - cum_discount = torch.cumprod( - self.args.gamma * torch.ones(*rewards.size()), - dim=1) / self.args.gamma - discounted_rewards = rewards * cum_discount - discounted_values = values * cum_discount - - # stochastics nodes involved in rewards dependencies: - dependencies = torch.cumsum(self_logprobs + other_logprobs, dim=1) - - # logprob of each stochastic nodes: - stochastic_nodes = self_logprobs + other_logprobs - - # dice objective: - dice_objective = torch.mean( - torch.sum(magic_box(dependencies) * discounted_rewards, dim=1)) - - if use_baseline: - # variance_reduction: - baseline_term = torch.mean( - torch.sum( - (1 - magic_box(stochastic_nodes)) * discounted_values, - dim=1)) - dice_objective = dice_objective + baseline_term - - return -dice_objective # want to minimize -objective - - def value_loss(self): - values = torch.stack(self.values, dim=1) - rewards = torch.stack(self.rewards, dim=1) - return torch.mean((rewards - values)**2) + + def __init__(self, args): + self.self_logprobs = [] + self.other_logprobs = [] + self.values = [] + self.rewards = [] + self.args = args + + def add(self, lp, other_lp, v, r): + self.self_logprobs.append(lp) + self.other_logprobs.append(other_lp) + self.values.append(v) + self.rewards.append(r) + + def dice_objective(self, use_baseline=True): + self_logprobs = torch.stack(self.self_logprobs, dim=1) + other_logprobs = torch.stack(self.other_logprobs, dim=1) + values = torch.stack(self.values, dim=1) + rewards = torch.stack(self.rewards, dim=1) + + # apply discount: + cum_discount = torch.cumprod( + self.args.gamma * torch.ones(*rewards.size()), dim=1 + ) / self.args.gamma + discounted_rewards = rewards * cum_discount + discounted_values = values * cum_discount + + # stochastics nodes involved in rewards dependencies: + dependencies = torch.cumsum(self_logprobs + other_logprobs, dim=1) + + # logprob of each stochastic nodes: + stochastic_nodes = self_logprobs + other_logprobs + + # dice objective: + dice_objective = torch.mean( + torch.sum(magic_box(dependencies) * discounted_rewards, dim=1) + ) + + if use_baseline: + # variance_reduction: + baseline_term = torch.mean( + torch.sum( + (1 - magic_box(stochastic_nodes)) * discounted_values, dim=1 + ) + ) + dice_objective = dice_objective + baseline_term + + return -dice_objective # want to minimize -objective + + def value_loss(self): + values = torch.stack(self.values, dim=1) + rewards = torch.stack(self.rewards, dim=1) + return torch.mean((rewards - values)**2) def act(batch_states, theta, values): - batch_states = torch.from_numpy(batch_states).long() - probs = torch.sigmoid(theta)[batch_states] - m = Bernoulli(1 - probs) - actions = m.sample() - log_probs_actions = m.log_prob(actions) - return actions.numpy().astype(int), log_probs_actions, values[batch_states] + batch_states = torch.from_numpy(batch_states).long() + probs = torch.sigmoid(theta)[batch_states] + m = Bernoulli(1 - probs) + actions = m.sample() + log_probs_actions = m.log_prob(actions) + return actions.numpy().astype(int), log_probs_actions, values[batch_states] def sample(ipd, policy, value, args): - theta1, theta2 = policy - value1, value2 = value - (s1, s2), _ = ipd.reset() - memory_agent1 = Memory(args) - memory_agent2 = Memory(args) - for t in range(args.len_rollout): - a1, lp1, v1 = act(s1, theta1, value1) - a2, lp2, v2 = act(s2, theta2, value2) - (s1, s2), (r1, r2), _, _ = ipd.step((a1, a2)) - memory_agent1.add(lp1, lp2, v1, torch.from_numpy(r1).float()) - memory_agent2.add(lp2, lp1, v2, torch.from_numpy(r2).float()) - return memory_agent1, memory_agent2 + theta1, theta2 = policy + value1, value2 = value + (s1, s2), _ = ipd.reset() + memory_agent1 = Memory(args) + memory_agent2 = Memory(args) + for t in range(args.len_rollout): + a1, lp1, v1 = act(s1, theta1, value1) + a2, lp2, v2 = act(s2, theta2, value2) + (s1, s2), (r1, r2), _, _ = ipd.step((a1, a2)) + memory_agent1.add(lp1, lp2, v1, torch.from_numpy(r1).float()) + memory_agent2.add(lp2, lp1, v2, torch.from_numpy(r2).float()) + return memory_agent1, memory_agent2 diff --git a/examples/LOLA/lola_dice.py b/examples/LOLA/lola_dice.py index cde65c9e..f5a112da 100755 --- a/examples/LOLA/lola_dice.py +++ b/examples/LOLA/lola_dice.py @@ -32,83 +32,89 @@ def main(args): - ipd = IPD(args.len_rollout, args.batch_size) - agent1, agent2 = Agent(args), Agent(args) - agent1_copy, agent2_copy = Agent(args), Agent(args) - n_lookaheads = args.n_lookaheads - joint_scores = [] - print("start iterations with", n_lookaheads, "lookaheads:") - - for update in range(args.n_update): - # reset virtual update - agent1.set_virtual() - agent2.set_virtual() - - # agent 2 assumes that agent 1 conducts n-step lookahead - for _ in range(n_lookaheads): - memory1, memory2 = sample( - ipd, [agent1.virtual_theta.theta, agent2.theta], - [agent1.values, agent2.values], args) - inner_loss = memory1.dice_objective(use_baseline=args.use_baseline) - agent1.virtual_optimiser.step(inner_loss) - - # agent 1 assumes that agent 2 conducts n-step lookahead - for _ in range(n_lookaheads): - memory1, memory2 = sample( - ipd, [agent1.theta, agent2.virtual_theta.theta], - [agent1.values, agent2.values], args) - inner_loss = memory2.dice_objective(use_baseline=args.use_baseline) - agent2.virtual_optimiser.step(inner_loss) - - # update agent 1 - memory1, memory2 = sample(ipd, - [agent1.theta, agent2.virtual_theta.theta], - [agent1.values, agent2.values], args) - outer_loss = memory1.dice_objective(use_baseline=args.use_baseline) - agent1.theta_optimizer.zero_grad() - outer_loss.backward(retain_graph=True) - agent1.theta_optimizer.step() - - # update agent 1 value function - v_loss = memory1.value_loss() - agent1.value_update(v_loss) - - # update agent 2 - memory1, memory2 = sample(ipd, - [agent1.virtual_theta.theta, agent2.theta], - [agent1.values, agent2.values], args) - outer_loss = memory2.dice_objective(use_baseline=args.use_baseline) - agent2.theta_optimizer.zero_grad() - outer_loss.backward(retain_graph=True) - agent2.theta_optimizer.step() - - # update agent 2 value function - v_loss = memory2.value_loss() - agent2.value_update(v_loss) - - # evaluate progress: - score = step(ipd, agent1.theta, agent2.theta, agent1.values, - agent2.values, args) - joint_scores.append(0.5 * (score[0] + score[1])) - - # print - if update % 10 == 0: - p1 = [p.item() for p in torch.sigmoid(agent1.theta)] - p2 = [p.item() for p in torch.sigmoid(agent2.theta)] - print( - 'update', update, 'score (%.3f,%.3f)' % (score[0], score[1]), - 'policy (agent1) = {%.3f, %.3f, %.3f, %.3f, %.3f}' % - (p1[0], p1[1], p1[2], p1[3], p1[4]), - ' (agent2) = {%.3f, %.3f, %.3f, %.3f, %.3f}' % - (p2[0], p2[1], p2[2], p2[3], p2[4])) - - return joint_scores + ipd = IPD(args.len_rollout, args.batch_size) + agent1, agent2 = Agent(args), Agent(args) + agent1_copy, agent2_copy = Agent(args), Agent(args) + n_lookaheads = args.n_lookaheads + joint_scores = [] + print("start iterations with", n_lookaheads, "lookaheads:") + + for update in range(args.n_update): + # reset virtual update + agent1.set_virtual() + agent2.set_virtual() + + # agent 2 assumes that agent 1 conducts n-step lookahead + for _ in range(n_lookaheads): + memory1, memory2 = sample( + ipd, [agent1.virtual_theta.theta, agent2.theta], + [agent1.values, agent2.values], args + ) + inner_loss = memory1.dice_objective(use_baseline=args.use_baseline) + agent1.virtual_optimiser.step(inner_loss) + + # agent 1 assumes that agent 2 conducts n-step lookahead + for _ in range(n_lookaheads): + memory1, memory2 = sample( + ipd, [agent1.theta, agent2.virtual_theta.theta], + [agent1.values, agent2.values], args + ) + inner_loss = memory2.dice_objective(use_baseline=args.use_baseline) + agent2.virtual_optimiser.step(inner_loss) + + # update agent 1 + memory1, memory2 = sample( + ipd, [agent1.theta, agent2.virtual_theta.theta], + [agent1.values, agent2.values], args + ) + outer_loss = memory1.dice_objective(use_baseline=args.use_baseline) + agent1.theta_optimizer.zero_grad() + outer_loss.backward(retain_graph=True) + agent1.theta_optimizer.step() + + # update agent 1 value function + v_loss = memory1.value_loss() + agent1.value_update(v_loss) + + # update agent 2 + memory1, memory2 = sample( + ipd, [agent1.virtual_theta.theta, agent2.theta], + [agent1.values, agent2.values], args + ) + outer_loss = memory2.dice_objective(use_baseline=args.use_baseline) + agent2.theta_optimizer.zero_grad() + outer_loss.backward(retain_graph=True) + agent2.theta_optimizer.step() + + # update agent 2 value function + v_loss = memory2.value_loss() + agent2.value_update(v_loss) + + # evaluate progress: + score = step( + ipd, agent1.theta, agent2.theta, agent1.values, agent2.values, args + ) + joint_scores.append(0.5 * (score[0] + score[1])) + + # print + if update % 10 == 0: + p1 = [p.item() for p in torch.sigmoid(agent1.theta)] + p2 = [p.item() for p in torch.sigmoid(agent2.theta)] + print( + 'update', update, 'score (%.3f,%.3f)' % (score[0], score[1]), + 'policy (agent1) = {%.3f, %.3f, %.3f, %.3f, %.3f}' % + (p1[0], p1[1], p1[2], p1[3], p1[4]), + ' (agent2) = {%.3f, %.3f, %.3f, %.3f, %.3f}' % + (p2[0], p2[1], p2[2], p2[3], p2[4]) + ) + + return joint_scores if __name__ == "__main__": - args = parse_args() - joint_score = dict() - for nla in range(3): - args.n_lookaheads = nla - joint_score[nla] = main(args) - np.save('result.npy', joint_score) + args = parse_args() + joint_score = dict() + for nla in range(3): + args.n_lookaheads = nla + joint_score[nla] = main(args) + np.save('result.npy', joint_score) diff --git a/examples/LOLA/visualise.py b/examples/LOLA/visualise.py index 2640f6a7..de71afef 100755 --- a/examples/LOLA/visualise.py +++ b/examples/LOLA/visualise.py @@ -19,17 +19,17 @@ def plot(file): - data = np.load('result.npy', allow_pickle=True).tolist() - sns.set(style='darkgrid') - sns.set_theme(style="darkgrid") - for step in range(3): - plt.plot(data[step], label='Step ' + str(step)) - plt.legend() - plt.xlabel('Iteartions', fontsize=20) - plt.ylabel('Joint score', fontsize=20) - plt.savefig('./result.png') + data = np.load('result.npy', allow_pickle=True).tolist() + sns.set(style='darkgrid') + sns.set_theme(style="darkgrid") + for step in range(3): + plt.plot(data[step], label='Step ' + str(step)) + plt.legend() + plt.xlabel('Iteartions', fontsize=20) + plt.ylabel('Joint score', fontsize=20) + plt.savefig('./result.png') # plot progress: if __name__ == "__main__": - plot('result.npy') + plot('result.npy') diff --git a/examples/MAML-RL/helpers/Tabular_mdp.py b/examples/MAML-RL/helpers/Tabular_mdp.py index f0d1d313..32a9d929 100644 --- a/examples/MAML-RL/helpers/Tabular_mdp.py +++ b/examples/MAML-RL/helpers/Tabular_mdp.py @@ -24,7 +24,7 @@ class TabularMDPEnv(gym.Env): - """Tabular MDP problems, as described in [1]. + """Tabular MDP problems, as described in [1]. At each time step, the agent chooses one of `num_actions` actions, say `i`, receives a reward sampled from a Normal distribution with mean `m_i` and @@ -38,83 +38,84 @@ class TabularMDPEnv(gym.Env): Pieter Abbeel, "RL2: Fast Reinforcement Learning via Slow Reinforcement Learning", 2016 (https://arxiv.org/abs/1611.02779) """ - def __init__(self, - num_states, - num_actions, - max_episode_steps, - seed, - task={}): - super(TabularMDPEnv, self).__init__() - self.max_episode_steps = max_episode_steps - self.num_states = num_states - self.num_actions = num_actions - - self.action_space = spaces.Discrete(num_actions) - self.observation_space = spaces.Box(low=0.0, - high=1.0, - shape=(num_states, ), - dtype=np.float32) - - self._task = task - self._transitions = task.get( - 'transitions', - np.full((num_states, num_actions, num_states), - 1.0 / num_states, - dtype=np.float32)) - self._rewards_mean = task.get( - 'rewards_mean', - np.zeros((num_states, num_actions), dtype=np.float32)) - self._state = 0 - self._elapsed_steps = None - - self.seed(seed) - - def seed(self, seed=None): - self.np_random, seed = seeding.np_random(seed) - return [seed] - - def sample_tasks(self, num_tasks): - transitions = self.np_random.dirichlet(np.ones(self.num_states), - size=(num_tasks, - self.num_states, - self.num_actions)) - rewards_mean = self.np_random.normal(1.0, - 1.0, - size=(num_tasks, self.num_states, - self.num_actions)) - tasks = [{ - 'transitions': transition, - 'rewards_mean': reward_mean - } for (transition, reward_mean) in zip(transitions, rewards_mean)] - return tasks - - def reset_task(self, task): - self._task = task - self._transitions = task['transitions'] - self._rewards_mean = task['rewards_mean'] - - def reset(self): - # From [1]: "an episode always starts on the first state" - self._state = 0 - observation = np.zeros(self.num_states, dtype=np.float32) - observation[self._state] = 1.0 - self._elapsed_steps = 0 - - return observation - - def step(self, action): - assert self.action_space.contains(action) - mean = self._rewards_mean[self._state, action] - reward = self.np_random.normal(mean, 1.0) - - self._state = self.np_random.choice(self.num_states, - p=self._transitions[self._state, - action]) - observation = np.zeros(self.num_states, dtype=np.float32) - observation[self._state] = 1.0 - self._elapsed_steps += 1 - if self._elapsed_steps >= self.max_episode_steps: - done = True - else: - done = False - return observation, reward, done, {'task': self._task} + + def __init__( + self, num_states, num_actions, max_episode_steps, seed, task={} + ): + super(TabularMDPEnv, self).__init__() + self.max_episode_steps = max_episode_steps + self.num_states = num_states + self.num_actions = num_actions + + self.action_space = spaces.Discrete(num_actions) + self.observation_space = spaces.Box( + low=0.0, high=1.0, shape=(num_states,), dtype=np.float32 + ) + + self._task = task + self._transitions = task.get( + 'transitions', + np.full( + (num_states, num_actions, num_states), + 1.0 / num_states, + dtype=np.float32 + ) + ) + self._rewards_mean = task.get( + 'rewards_mean', np.zeros((num_states, num_actions), dtype=np.float32) + ) + self._state = 0 + self._elapsed_steps = None + + self.seed(seed) + + def seed(self, seed=None): + self.np_random, seed = seeding.np_random(seed) + return [seed] + + def sample_tasks(self, num_tasks): + transitions = self.np_random.dirichlet( + np.ones(self.num_states), + size=(num_tasks, self.num_states, self.num_actions) + ) + rewards_mean = self.np_random.normal( + 1.0, 1.0, size=(num_tasks, self.num_states, self.num_actions) + ) + tasks = [ + { + 'transitions': transition, + 'rewards_mean': reward_mean + } for (transition, reward_mean) in zip(transitions, rewards_mean) + ] + return tasks + + def reset_task(self, task): + self._task = task + self._transitions = task['transitions'] + self._rewards_mean = task['rewards_mean'] + + def reset(self): + # From [1]: "an episode always starts on the first state" + self._state = 0 + observation = np.zeros(self.num_states, dtype=np.float32) + observation[self._state] = 1.0 + self._elapsed_steps = 0 + + return observation + + def step(self, action): + assert self.action_space.contains(action) + mean = self._rewards_mean[self._state, action] + reward = self.np_random.normal(mean, 1.0) + + self._state = self.np_random.choice( + self.num_states, p=self._transitions[self._state, action] + ) + observation = np.zeros(self.num_states, dtype=np.float32) + observation[self._state] = 1.0 + self._elapsed_steps += 1 + if self._elapsed_steps >= self.max_episode_steps: + done = True + else: + done = False + return observation, reward, done, {'task': self._task} diff --git a/examples/MAML-RL/helpers/__init__.py b/examples/MAML-RL/helpers/__init__.py index 37381f3f..c3fee90d 100644 --- a/examples/MAML-RL/helpers/__init__.py +++ b/examples/MAML-RL/helpers/__init__.py @@ -18,11 +18,13 @@ from gym.envs.registration import register -register('TabularMDP-v0', - entry_point='helpers.Tabular_mdp:TabularMDPEnv', - kwargs={ - 'num_states': 10, - 'num_actions': 5, - 'max_episode_steps': 10, - 'seed': 1 - }) +register( + 'TabularMDP-v0', + entry_point='helpers.Tabular_mdp:TabularMDPEnv', + kwargs={ + 'num_states': 10, + 'num_actions': 5, + 'max_episode_steps': 10, + 'seed': 1 + } +) diff --git a/examples/MAML-RL/helpers/policy.py b/examples/MAML-RL/helpers/policy.py index 5b572a76..54ee3f5c 100644 --- a/examples/MAML-RL/helpers/policy.py +++ b/examples/MAML-RL/helpers/policy.py @@ -22,27 +22,28 @@ class CategoricalMLPPolicy(nn.Module): - """Policy network based on a multi-layer perceptron (MLP), with a + """Policy network based on a multi-layer perceptron (MLP), with a `Categorical` distribution output. This policy network can be used on tasks with discrete action spaces (eg. `TabularMDPEnv`). """ - def __init__( - self, - input_size, - output_size, - ): - super(CategoricalMLPPolicy, self).__init__() - self.torso = nn.Sequential( - nn.Linear(input_size, 32), - nn.ReLU(), - nn.Linear(32, 32), - nn.ReLU(), - ) - self.policy_head = nn.Linear(32, output_size) - self.value_head = nn.Linear(32, 1) - def forward(self, inputs, params=None): - embedding = self.torso(inputs) - logits = self.policy_head(embedding) - values = self.value_head(embedding) - return Categorical(logits=logits), values + def __init__( + self, + input_size, + output_size, + ): + super(CategoricalMLPPolicy, self).__init__() + self.torso = nn.Sequential( + nn.Linear(input_size, 32), + nn.ReLU(), + nn.Linear(32, 32), + nn.ReLU(), + ) + self.policy_head = nn.Linear(32, output_size) + self.value_head = nn.Linear(32, 1) + + def forward(self, inputs, params=None): + embedding = self.torso(inputs) + logits = self.policy_head(embedding) + values = self.value_head(embedding) + return Categorical(logits=logits), values diff --git a/examples/MAML-RL/run_MAML.py b/examples/MAML-RL/run_MAML.py index 8d328f08..1507e8bc 100644 --- a/examples/MAML-RL/run_MAML.py +++ b/examples/MAML-RL/run_MAML.py @@ -39,163 +39,173 @@ class Traj(NamedTuple): - obs: np.ndarray - acs: np.ndarray - next_obs: np.ndarray - rews: np.ndarray - gammas: np.ndarray + obs: np.ndarray + acs: np.ndarray + next_obs: np.ndarray + rews: np.ndarray + gammas: np.ndarray def sample_traj(env, task, policy): - env.reset_task(task) - obs_buf = np.zeros(shape=(TRAJ_LEN, TRAJ_NUM, STATE_DIM), dtype=np.float32) - next_obs_buf = np.zeros(shape=(TRAJ_LEN, TRAJ_NUM, STATE_DIM), - dtype=np.float32) - acs_buf = np.zeros(shape=(TRAJ_LEN, TRAJ_NUM), dtype=np.int8) - rews_buf = np.zeros(shape=(TRAJ_LEN, TRAJ_NUM), dtype=np.float32) - gammas_buf = np.zeros(shape=(TRAJ_LEN, TRAJ_NUM), dtype=np.float32) - with torch.no_grad(): - for batch in range(TRAJ_NUM): - ob = env.reset() - for step in range(TRAJ_LEN): - ob_tensor = torch.from_numpy(ob) - pi, _ = policy(ob_tensor) - ac_tensor = pi.sample() - ac = ac_tensor.cpu().numpy() - next_ob, rew, done, info = env.step(ac) - - obs_buf[step][batch] = ob - next_obs_buf[step][batch] = next_ob - acs_buf[step][batch] = ac - rews_buf[step][batch] = rew - gammas_buf[step][batch] = done * GAMMA - ob = next_ob - return Traj(obs=obs_buf, - acs=acs_buf, - next_obs=next_obs_buf, - rews=rews_buf, - gammas=gammas_buf) + env.reset_task(task) + obs_buf = np.zeros(shape=(TRAJ_LEN, TRAJ_NUM, STATE_DIM), dtype=np.float32) + next_obs_buf = np.zeros( + shape=(TRAJ_LEN, TRAJ_NUM, STATE_DIM), dtype=np.float32 + ) + acs_buf = np.zeros(shape=(TRAJ_LEN, TRAJ_NUM), dtype=np.int8) + rews_buf = np.zeros(shape=(TRAJ_LEN, TRAJ_NUM), dtype=np.float32) + gammas_buf = np.zeros(shape=(TRAJ_LEN, TRAJ_NUM), dtype=np.float32) + with torch.no_grad(): + for batch in range(TRAJ_NUM): + ob = env.reset() + for step in range(TRAJ_LEN): + ob_tensor = torch.from_numpy(ob) + pi, _ = policy(ob_tensor) + ac_tensor = pi.sample() + ac = ac_tensor.cpu().numpy() + next_ob, rew, done, info = env.step(ac) + + obs_buf[step][batch] = ob + next_obs_buf[step][batch] = next_ob + acs_buf[step][batch] = ac + rews_buf[step][batch] = rew + gammas_buf[step][batch] = done * GAMMA + ob = next_ob + return Traj( + obs=obs_buf, + acs=acs_buf, + next_obs=next_obs_buf, + rews=rews_buf, + gammas=gammas_buf + ) def a2c_loss(traj, policy, value_coef): - lambdas = np.ones_like(traj.gammas) * LAMBDA - _, next_values = policy(torch.from_numpy(traj.next_obs)) - next_values = torch.squeeze(next_values, -1).detach().numpy() - # Work backwards to compute `G_{T-1}`, ..., `G_0`. - returns = [] - g = next_values[-1, :] - for i in reversed(range(next_values.shape[0])): - g = traj.rews[i, :] + traj.gammas[i, :] * \ - ((1 - lambdas[i, :]) * next_values[i, :] + lambdas[i, :] * g) - returns.insert(0, g) - lambda_returns = torch.from_numpy(np.array(returns)) - pi, values = policy(torch.from_numpy(traj.obs)) - log_probs = pi.log_prob(torch.from_numpy(traj.acs)) - advs = lambda_returns - torch.squeeze(values, -1) - action_loss = -(advs.detach() * log_probs).mean() - value_loss = advs.pow(2).mean() - - a2c_loss = action_loss + value_coef * value_loss - return a2c_loss + lambdas = np.ones_like(traj.gammas) * LAMBDA + _, next_values = policy(torch.from_numpy(traj.next_obs)) + next_values = torch.squeeze(next_values, -1).detach().numpy() + # Work backwards to compute `G_{T-1}`, ..., `G_0`. + returns = [] + g = next_values[-1, :] + for i in reversed(range(next_values.shape[0])): + g = traj.rews[i, :] + traj.gammas[i, :] * \ + ((1 - lambdas[i, :]) * next_values[i, :] + lambdas[i, :] * g) + returns.insert(0, g) + lambda_returns = torch.from_numpy(np.array(returns)) + pi, values = policy(torch.from_numpy(traj.obs)) + log_probs = pi.log_prob(torch.from_numpy(traj.acs)) + advs = lambda_returns - torch.squeeze(values, -1) + action_loss = -(advs.detach() * log_probs).mean() + value_loss = advs.pow(2).mean() + + a2c_loss = action_loss + value_coef * value_loss + return a2c_loss def evaluate(env, seed, task_num, policy): - pre_reward_ls = [] - post_reward_ls = [] - inner_opt = TorchOpt.MetaSGD(policy, lr=0.5) - env = gym.make( - 'TabularMDP-v0', - **dict(num_states=STATE_DIM, - num_actions=ACTION_DIM, - max_episode_steps=TRAJ_LEN, - seed=args.seed)) - tasks = env.sample_tasks(num_tasks=task_num) - policy_state_dict = TorchOpt.extract_state_dict(policy) - optim_state_dict = TorchOpt.extract_state_dict(inner_opt) - for idx in range(task_num): - for _ in range(inner_iters): - pre_trajs = sample_traj(env, tasks[idx], policy) - - inner_loss = a2c_loss(pre_trajs, policy, value_coef=0.5) - inner_opt.step(inner_loss) - post_trajs = sample_traj(env, tasks[idx], policy) - - # Logging - pre_reward_ls.append(np.sum(pre_trajs.rews, axis=0).mean()) - post_reward_ls.append(np.sum(post_trajs.rews, axis=0).mean()) - - TorchOpt.recover_state_dict(policy, policy_state_dict) - TorchOpt.recover_state_dict(inner_opt, optim_state_dict) - return pre_reward_ls, post_reward_ls + pre_reward_ls = [] + post_reward_ls = [] + inner_opt = TorchOpt.MetaSGD(policy, lr=0.5) + env = gym.make( + 'TabularMDP-v0', + **dict( + num_states=STATE_DIM, + num_actions=ACTION_DIM, + max_episode_steps=TRAJ_LEN, + seed=args.seed + ) + ) + tasks = env.sample_tasks(num_tasks=task_num) + policy_state_dict = TorchOpt.extract_state_dict(policy) + optim_state_dict = TorchOpt.extract_state_dict(inner_opt) + for idx in range(task_num): + for _ in range(inner_iters): + pre_trajs = sample_traj(env, tasks[idx], policy) + + inner_loss = a2c_loss(pre_trajs, policy, value_coef=0.5) + inner_opt.step(inner_loss) + post_trajs = sample_traj(env, tasks[idx], policy) + + # Logging + pre_reward_ls.append(np.sum(pre_trajs.rews, axis=0).mean()) + post_reward_ls.append(np.sum(post_trajs.rews, axis=0).mean()) + + TorchOpt.recover_state_dict(policy, policy_state_dict) + TorchOpt.recover_state_dict(inner_opt, optim_state_dict) + return pre_reward_ls, post_reward_ls def main(args): - # init training - torch.manual_seed(args.seed) - torch.cuda.manual_seed_all(args.seed) - # Env - env = gym.make( - 'TabularMDP-v0', - **dict(num_states=STATE_DIM, - num_actions=ACTION_DIM, - max_episode_steps=TRAJ_LEN, - seed=args.seed)) - # Policy - policy = CategoricalMLPPolicy(input_size=STATE_DIM, output_size=ACTION_DIM) - inner_opt = TorchOpt.MetaSGD(policy, lr=0.5) - outer_opt = optim.Adam(policy.parameters(), lr=1e-3) - train_pre_reward = [] - train_post_reward = [] - test_pre_reward = [] - test_post_reward = [] - - for i in range(outer_iters): - tasks = env.sample_tasks(num_tasks=TASK_NUM) - train_pre_reward_ls = [] - train_post_reward_ls = [] - - outer_opt.zero_grad() - - policy_state_dict = TorchOpt.extract_state_dict(policy) - optim_state_dict = TorchOpt.extract_state_dict(inner_opt) - for idx in range(TASK_NUM): - - for _ in range(inner_iters): - pre_trajs = sample_traj(env, tasks[idx], policy) - inner_loss = a2c_loss(pre_trajs, policy, value_coef=0.5) - inner_opt.step(inner_loss) - post_trajs = sample_traj(env, tasks[idx], policy) - outer_loss = a2c_loss(post_trajs, policy, value_coef=0.5) - outer_loss.backward() - TorchOpt.recover_state_dict(policy, policy_state_dict) - TorchOpt.recover_state_dict(inner_opt, optim_state_dict) - # Logging - train_pre_reward_ls.append(np.sum(pre_trajs.rews, axis=0).mean()) - train_post_reward_ls.append(np.sum(post_trajs.rews, axis=0).mean()) - outer_opt.step() - - test_pre_reward_ls, test_post_reward_ls = evaluate( - env, args.seed, TASK_NUM, policy) - - train_pre_reward.append(sum(train_pre_reward_ls) / TASK_NUM) - train_post_reward.append(sum(train_post_reward_ls) / TASK_NUM) - test_pre_reward.append(sum(test_pre_reward_ls) / TASK_NUM) - test_post_reward.append(sum(test_post_reward_ls) / TASK_NUM) - - print('Train_iters', i) - print("train_pre_reward", sum(train_pre_reward_ls) / TASK_NUM) - print("train_post_reward", sum(train_post_reward_ls) / TASK_NUM) - print("test_pre_reward", sum(test_pre_reward_ls) / TASK_NUM) - print("test_post_reward", sum(test_post_reward_ls) / TASK_NUM) + # init training + torch.manual_seed(args.seed) + torch.cuda.manual_seed_all(args.seed) + # Env + env = gym.make( + 'TabularMDP-v0', + **dict( + num_states=STATE_DIM, + num_actions=ACTION_DIM, + max_episode_steps=TRAJ_LEN, + seed=args.seed + ) + ) + # Policy + policy = CategoricalMLPPolicy(input_size=STATE_DIM, output_size=ACTION_DIM) + inner_opt = TorchOpt.MetaSGD(policy, lr=0.5) + outer_opt = optim.Adam(policy.parameters(), lr=1e-3) + train_pre_reward = [] + train_post_reward = [] + test_pre_reward = [] + test_post_reward = [] + + for i in range(outer_iters): + tasks = env.sample_tasks(num_tasks=TASK_NUM) + train_pre_reward_ls = [] + train_post_reward_ls = [] + + outer_opt.zero_grad() + + policy_state_dict = TorchOpt.extract_state_dict(policy) + optim_state_dict = TorchOpt.extract_state_dict(inner_opt) + for idx in range(TASK_NUM): + + for _ in range(inner_iters): + pre_trajs = sample_traj(env, tasks[idx], policy) + inner_loss = a2c_loss(pre_trajs, policy, value_coef=0.5) + inner_opt.step(inner_loss) + post_trajs = sample_traj(env, tasks[idx], policy) + outer_loss = a2c_loss(post_trajs, policy, value_coef=0.5) + outer_loss.backward() + TorchOpt.recover_state_dict(policy, policy_state_dict) + TorchOpt.recover_state_dict(inner_opt, optim_state_dict) + # Logging + train_pre_reward_ls.append(np.sum(pre_trajs.rews, axis=0).mean()) + train_post_reward_ls.append(np.sum(post_trajs.rews, axis=0).mean()) + outer_opt.step() + + test_pre_reward_ls, test_post_reward_ls = evaluate( + env, args.seed, TASK_NUM, policy + ) + + train_pre_reward.append(sum(train_pre_reward_ls) / TASK_NUM) + train_post_reward.append(sum(train_post_reward_ls) / TASK_NUM) + test_pre_reward.append(sum(test_pre_reward_ls) / TASK_NUM) + test_post_reward.append(sum(test_post_reward_ls) / TASK_NUM) + + print('Train_iters', i) + print("train_pre_reward", sum(train_pre_reward_ls) / TASK_NUM) + print("train_post_reward", sum(train_post_reward_ls) / TASK_NUM) + print("test_pre_reward", sum(test_pre_reward_ls) / TASK_NUM) + print("test_post_reward", sum(test_post_reward_ls) / TASK_NUM) if __name__ == "__main__": - parser = argparse.ArgumentParser( - description='Reinforcement learning with ' - 'Model-Agnostic Meta-Learning (MAML) - Train') - parser.add_argument('--seed', - type=int, - default=1, - help='random seed (default: 1)') - args = parser.parse_args() - main(args) + parser = argparse.ArgumentParser( + description='Reinforcement learning with ' + 'Model-Agnostic Meta-Learning (MAML) - Train' + ) + parser.add_argument( + '--seed', type=int, default=1, help='random seed (default: 1)' + ) + args = parser.parse_args() + main(args) diff --git a/examples/MGRL/toy.py b/examples/MGRL/toy.py index a27d177f..5ce5ad1c 100644 --- a/examples/MGRL/toy.py +++ b/examples/MGRL/toy.py @@ -21,62 +21,64 @@ def test_gamma(): - class Rollout: - @staticmethod - def get(): - out = torch.empty(5, 2) - out[:, 0] = torch.randn(5) - out[:, 1] = 0.1 * torch.ones(5) - label = torch.arange(0, 10) - return out.view(10, 1), F.one_hot(label, 10) - - @staticmethod - def rollout(trajectory, gamma): - out = [trajectory[-1]] - for i in reversed(range(9)): - out.append(trajectory[i] + - gamma[i] * out[-1].clone().detach_()) - out.reverse() - return torch.hstack(out).view(10, 1) - - class ValueNetwork(nn.Module): - def __init__(self): - super().__init__() - self.fc = nn.Linear(10, 1) - - def forward(self, x): - return self.fc(x) - - torch.manual_seed(0) - inner_iters = 1 - outer_iters = 10000 - net = ValueNetwork() - inner_optimizer = TorchOpt.MetaSGD(net, lr=5e-1) - gamma = torch.zeros(9, requires_grad=True) - meta_optimizer = TorchOpt.SGD([gamma], lr=5e-1) - net_state = TorchOpt.extract_state_dict(net) - for i in range(outer_iters): - for j in range(inner_iters): - trajectory, state = Rollout.get() - backup = Rollout.rollout(trajectory, torch.sigmoid(gamma)) - pred_value = net(state.float()) - - loss = F.mse_loss(pred_value, backup) - inner_optimizer.step(loss) - - trajectory, state = Rollout.get() - pred_value = net(state.float()) - backup = Rollout.rollout(trajectory, torch.ones_like(gamma)) - - loss = F.mse_loss(pred_value, backup) - meta_optimizer.zero_grad() - loss.backward() - meta_optimizer.step() - TorchOpt.recover_state_dict(net, net_state) - if i % 100 == 0: - with torch.no_grad(): - print(f"epoch {i} | gamma: {torch.sigmoid(gamma)}") + + class Rollout: + + @staticmethod + def get(): + out = torch.empty(5, 2) + out[:, 0] = torch.randn(5) + out[:, 1] = 0.1 * torch.ones(5) + label = torch.arange(0, 10) + return out.view(10, 1), F.one_hot(label, 10) + + @staticmethod + def rollout(trajectory, gamma): + out = [trajectory[-1]] + for i in reversed(range(9)): + out.append(trajectory[i] + gamma[i] * out[-1].clone().detach_()) + out.reverse() + return torch.hstack(out).view(10, 1) + + class ValueNetwork(nn.Module): + + def __init__(self): + super().__init__() + self.fc = nn.Linear(10, 1) + + def forward(self, x): + return self.fc(x) + + torch.manual_seed(0) + inner_iters = 1 + outer_iters = 10000 + net = ValueNetwork() + inner_optimizer = TorchOpt.MetaSGD(net, lr=5e-1) + gamma = torch.zeros(9, requires_grad=True) + meta_optimizer = TorchOpt.SGD([gamma], lr=5e-1) + net_state = TorchOpt.extract_state_dict(net) + for i in range(outer_iters): + for j in range(inner_iters): + trajectory, state = Rollout.get() + backup = Rollout.rollout(trajectory, torch.sigmoid(gamma)) + pred_value = net(state.float()) + + loss = F.mse_loss(pred_value, backup) + inner_optimizer.step(loss) + + trajectory, state = Rollout.get() + pred_value = net(state.float()) + backup = Rollout.rollout(trajectory, torch.ones_like(gamma)) + + loss = F.mse_loss(pred_value, backup) + meta_optimizer.zero_grad() + loss.backward() + meta_optimizer.step() + TorchOpt.recover_state_dict(net, net_state) + if i % 100 == 0: + with torch.no_grad(): + print(f"epoch {i} | gamma: {torch.sigmoid(gamma)}") if __name__ == "__main__": - test_gamma() + test_gamma() diff --git a/examples/few-shot/maml-omniglot.py b/examples/few-shot/maml-omniglot.py index b501a3f9..1d942593 100644 --- a/examples/few-shot/maml-omniglot.py +++ b/examples/few-shot/maml-omniglot.py @@ -59,220 +59,221 @@ def main(): - argparser = argparse.ArgumentParser() - argparser.add_argument('--n_way', type=int, help='n way', default=5) - argparser.add_argument('--k_spt', - type=int, - help='k shot for support set', - default=5) - argparser.add_argument('--k_qry', - type=int, - help='k shot for query set', - default=15) - argparser.add_argument('--task_num', - type=int, - help='meta batch size, namely task num', - default=32) - argparser.add_argument('--seed', type=int, help='random seed', default=1) - args = argparser.parse_args() - - torch.manual_seed(args.seed) - if torch.cuda.is_available(): - torch.cuda.manual_seed_all(args.seed) - np.random.seed(args.seed) - rng = np.random.default_rng(args.seed) - - # Set up the Omniglot loader. - device = torch.device('cuda:0') - db = OmniglotNShot( - '/tmp/omniglot-data', - batchsz=args.task_num, - n_way=args.n_way, - k_shot=args.k_spt, - k_query=args.k_qry, - imgsz=28, - rng=rng, - device=device, - ) - - # Create a vanilla PyTorch neural network that will be - # automatically monkey-patched by higher later. - # Before higher, models could *not* be created like this - # and the parameters needed to be manually updated and copied - # for the updates. - net = nn.Sequential(nn.Conv2d(1, 64, 3), - nn.BatchNorm2d(64, momentum=1., affine=True), - nn.ReLU(inplace=False), nn.MaxPool2d(2, 2), - nn.Conv2d(64, 64, 3), - nn.BatchNorm2d(64, momentum=1., affine=True), - nn.ReLU(inplace=False), nn.MaxPool2d(2, 2), - nn.Conv2d(64, 64, 3), - nn.BatchNorm2d(64, momentum=1., affine=True), - nn.ReLU(inplace=False), nn.MaxPool2d(2, 2), - nn.Flatten(), nn.Linear(64, args.n_way)).to(device) - - # We will use Adam to (meta-)optimize the initial parameters - # to be adapted. - meta_opt = optim.Adam(net.parameters(), lr=1e-3) - - log = [] - for epoch in range(10): - train(db, net, meta_opt, epoch, log) - test(db, net, epoch, log) - plot(log) + argparser = argparse.ArgumentParser() + argparser.add_argument('--n_way', type=int, help='n way', default=5) + argparser.add_argument( + '--k_spt', type=int, help='k shot for support set', default=5 + ) + argparser.add_argument( + '--k_qry', type=int, help='k shot for query set', default=15 + ) + argparser.add_argument( + '--task_num', + type=int, + help='meta batch size, namely task num', + default=32 + ) + argparser.add_argument('--seed', type=int, help='random seed', default=1) + args = argparser.parse_args() + + torch.manual_seed(args.seed) + if torch.cuda.is_available(): + torch.cuda.manual_seed_all(args.seed) + np.random.seed(args.seed) + rng = np.random.default_rng(args.seed) + + # Set up the Omniglot loader. + device = torch.device('cuda:0') + db = OmniglotNShot( + '/tmp/omniglot-data', + batchsz=args.task_num, + n_way=args.n_way, + k_shot=args.k_spt, + k_query=args.k_qry, + imgsz=28, + rng=rng, + device=device, + ) + + # Create a vanilla PyTorch neural network that will be + # automatically monkey-patched by higher later. + # Before higher, models could *not* be created like this + # and the parameters needed to be manually updated and copied + # for the updates. + net = nn.Sequential( + nn.Conv2d(1, 64, 3), nn.BatchNorm2d(64, momentum=1., affine=True), + nn.ReLU(inplace=False), nn.MaxPool2d(2, 2), nn.Conv2d(64, 64, 3), + nn.BatchNorm2d(64, momentum=1., affine=True), nn.ReLU(inplace=False), + nn.MaxPool2d(2, 2), nn.Conv2d(64, 64, 3), + nn.BatchNorm2d(64, momentum=1., affine=True), nn.ReLU(inplace=False), + nn.MaxPool2d(2, 2), nn.Flatten(), nn.Linear(64, args.n_way) + ).to(device) + + # We will use Adam to (meta-)optimize the initial parameters + # to be adapted. + meta_opt = optim.Adam(net.parameters(), lr=1e-3) + + log = [] + for epoch in range(10): + train(db, net, meta_opt, epoch, log) + test(db, net, epoch, log) + plot(log) def train(db, net, meta_opt, epoch, log): - net.train() - n_train_iter = db.x_train.shape[0] // db.batchsz - inner_opt = TorchOpt.MetaSGD(net, lr=1e-1) - - for batch_idx in range(n_train_iter): - start_time = time.time() - # Sample a batch of support and query images and labels. - x_spt, y_spt, x_qry, y_qry = db.next() - - task_num, setsz, c_, h, w = x_spt.size() - querysz = x_qry.size(1) - - # TODO: Maybe pull this out into a separate module so it - # doesn't have to be duplicated between `train` and `test`? - - # Initialize the inner optimizer to adapt the parameters to - # the support set. - n_inner_iter = 5 - - qry_losses = [] - qry_accs = [] - meta_opt.zero_grad() - - net_state_dict = TorchOpt.extract_state_dict(net) - optim_state_dict = TorchOpt.extract_state_dict(inner_opt) - for i in range(task_num): - # Optimize the likelihood of the support set by taking - # gradient steps w.r.t. the model's parameters. - # This adapts the model's meta-parameters to the task. - # higher is able to automatically keep copies of - # your network's parameters as they are being updated. - for _ in range(n_inner_iter): - spt_logits = net(x_spt[i]) - spt_loss = F.cross_entropy(spt_logits, y_spt[i]) - inner_opt.step(spt_loss) - - # The final set of adapted parameters will induce some - # final loss and accuracy on the query dataset. - # These will be used to update the model's meta-parameters. - qry_logits = net(x_qry[i]) - qry_loss = F.cross_entropy(qry_logits, y_qry[i]) - qry_losses.append(qry_loss.detach()) - qry_acc = (qry_logits.argmax(dim=1) - == y_qry[i]).sum().item() / querysz - qry_accs.append(qry_acc) - - # Update the model's meta-parameters to optimize the query - # losses across all of the tasks sampled in this batch. - # This unrolls through the gradient steps. - qry_loss.backward() - - TorchOpt.recover_state_dict(net, net_state_dict) - TorchOpt.recover_state_dict(inner_opt, optim_state_dict) - - meta_opt.step() - qry_losses = sum(qry_losses) / task_num - qry_accs = 100. * sum(qry_accs) / task_num - i = epoch + float(batch_idx) / n_train_iter - iter_time = time.time() - start_time - - print( - f'[Epoch {i:.2f}] Train Loss: {qry_losses:.2f} | Acc: {qry_accs:.2f} | Time: {iter_time:.2f}' - ) - - log.append({ - 'epoch': i, - 'loss': qry_losses, - 'acc': qry_accs, - 'mode': 'train', - 'time': time.time(), - }) + net.train() + n_train_iter = db.x_train.shape[0] // db.batchsz + inner_opt = TorchOpt.MetaSGD(net, lr=1e-1) + for batch_idx in range(n_train_iter): + start_time = time.time() + # Sample a batch of support and query images and labels. + x_spt, y_spt, x_qry, y_qry = db.next() -def test(db, net, epoch, log): - # Crucially in our testing procedure here, we do *not* fine-tune - # the model during testing for simplicity. - # Most research papers using MAML for this task do an extra - # stage of fine-tuning here that should be added if you are - # adapting this code for research. - net.train() - n_test_iter = db.x_test.shape[0] // db.batchsz - inner_opt = TorchOpt.MetaSGD(net, lr=1e-1) + task_num, setsz, c_, h, w = x_spt.size() + querysz = x_qry.size(1) + + # TODO: Maybe pull this out into a separate module so it + # doesn't have to be duplicated between `train` and `test`? + + # Initialize the inner optimizer to adapt the parameters to + # the support set. + n_inner_iter = 5 qry_losses = [] qry_accs = [] + meta_opt.zero_grad() + + net_state_dict = TorchOpt.extract_state_dict(net) + optim_state_dict = TorchOpt.extract_state_dict(inner_opt) + for i in range(task_num): + # Optimize the likelihood of the support set by taking + # gradient steps w.r.t. the model's parameters. + # This adapts the model's meta-parameters to the task. + # higher is able to automatically keep copies of + # your network's parameters as they are being updated. + for _ in range(n_inner_iter): + spt_logits = net(x_spt[i]) + spt_loss = F.cross_entropy(spt_logits, y_spt[i]) + inner_opt.step(spt_loss) + + # The final set of adapted parameters will induce some + # final loss and accuracy on the query dataset. + # These will be used to update the model's meta-parameters. + qry_logits = net(x_qry[i]) + qry_loss = F.cross_entropy(qry_logits, y_qry[i]) + qry_losses.append(qry_loss.detach()) + qry_acc = (qry_logits.argmax(dim=1) == y_qry[i]).sum().item() / querysz + qry_accs.append(qry_acc) + + # Update the model's meta-parameters to optimize the query + # losses across all of the tasks sampled in this batch. + # This unrolls through the gradient steps. + qry_loss.backward() + + TorchOpt.recover_state_dict(net, net_state_dict) + TorchOpt.recover_state_dict(inner_opt, optim_state_dict) + + meta_opt.step() + qry_losses = sum(qry_losses) / task_num + qry_accs = 100. * sum(qry_accs) / task_num + i = epoch + float(batch_idx) / n_train_iter + iter_time = time.time() - start_time - for batch_idx in range(n_test_iter): - x_spt, y_spt, x_qry, y_qry = db.next('test') - - task_num, setsz, c_, h, w = x_spt.size() - querysz = x_qry.size(1) - - # TODO: Maybe pull this out into a separate module so it - # doesn't have to be duplicated between `train` and `test`? - n_inner_iter = 5 - - net_state_dict = TorchOpt.extract_state_dict(net) - optim_state_dict = TorchOpt.extract_state_dict(inner_opt) - for i in range(task_num): - # Optimize the likelihood of the support set by taking - # gradient steps w.r.t. the model's parameters. - # This adapts the model's meta-parameters to the task. - for _ in range(n_inner_iter): - spt_logits = net(x_spt[i]) - spt_loss = F.cross_entropy(spt_logits, y_spt[i]) - inner_opt.step(spt_loss) - - # The query loss and acc induced by these parameters. - qry_logits = net(x_qry[i]).detach() - qry_loss = F.cross_entropy(qry_logits, y_qry[i], reduction='none') - qry_losses.append(qry_loss.detach()) - qry_accs.append((qry_logits.argmax(dim=1) == y_qry[i]).detach()) - - TorchOpt.recover_state_dict(net, net_state_dict) - TorchOpt.recover_state_dict(inner_opt, optim_state_dict) - - qry_losses = torch.cat(qry_losses).mean().item() - qry_accs = 100. * torch.cat(qry_accs).float().mean().item() print( - f'[Epoch {epoch+1:.2f}] Test Loss: {qry_losses:.2f} | Acc: {qry_accs:.2f}' + f'[Epoch {i:.2f}] Train Loss: {qry_losses:.2f} | Acc: {qry_accs:.2f} | Time: {iter_time:.2f}' ) - log.append({ - 'epoch': epoch + 1, + + log.append( + { + 'epoch': i, 'loss': qry_losses, 'acc': qry_accs, - 'mode': 'test', + 'mode': 'train', 'time': time.time(), - }) + } + ) + + +def test(db, net, epoch, log): + # Crucially in our testing procedure here, we do *not* fine-tune + # the model during testing for simplicity. + # Most research papers using MAML for this task do an extra + # stage of fine-tuning here that should be added if you are + # adapting this code for research. + net.train() + n_test_iter = db.x_test.shape[0] // db.batchsz + inner_opt = TorchOpt.MetaSGD(net, lr=1e-1) + + qry_losses = [] + qry_accs = [] + + for batch_idx in range(n_test_iter): + x_spt, y_spt, x_qry, y_qry = db.next('test') + + task_num, setsz, c_, h, w = x_spt.size() + querysz = x_qry.size(1) + + # TODO: Maybe pull this out into a separate module so it + # doesn't have to be duplicated between `train` and `test`? + n_inner_iter = 5 + + net_state_dict = TorchOpt.extract_state_dict(net) + optim_state_dict = TorchOpt.extract_state_dict(inner_opt) + for i in range(task_num): + # Optimize the likelihood of the support set by taking + # gradient steps w.r.t. the model's parameters. + # This adapts the model's meta-parameters to the task. + for _ in range(n_inner_iter): + spt_logits = net(x_spt[i]) + spt_loss = F.cross_entropy(spt_logits, y_spt[i]) + inner_opt.step(spt_loss) + + # The query loss and acc induced by these parameters. + qry_logits = net(x_qry[i]).detach() + qry_loss = F.cross_entropy(qry_logits, y_qry[i], reduction='none') + qry_losses.append(qry_loss.detach()) + qry_accs.append((qry_logits.argmax(dim=1) == y_qry[i]).detach()) + + TorchOpt.recover_state_dict(net, net_state_dict) + TorchOpt.recover_state_dict(inner_opt, optim_state_dict) + + qry_losses = torch.cat(qry_losses).mean().item() + qry_accs = 100. * torch.cat(qry_accs).float().mean().item() + print( + f'[Epoch {epoch+1:.2f}] Test Loss: {qry_losses:.2f} | Acc: {qry_accs:.2f}' + ) + log.append( + { + 'epoch': epoch + 1, + 'loss': qry_losses, + 'acc': qry_accs, + 'mode': 'test', + 'time': time.time(), + } + ) def plot(log): - # Generally you should pull your plotting code out of your training - # script but we are doing it here for brevity. - df = pd.DataFrame(log) - - fig, ax = plt.subplots(figsize=(6, 4)) - train_df = df[df['mode'] == 'train'] - test_df = df[df['mode'] == 'test'] - ax.plot(train_df['epoch'], train_df['acc'], label='Train') - ax.plot(test_df['epoch'], test_df['acc'], label='Test') - ax.set_xlabel('Epoch') - ax.set_ylabel('Accuracy') - ax.set_ylim(70, 100) - fig.legend(ncol=2, loc='lower right') - fig.tight_layout() - fname = 'maml-accs.png' - print(f'--- Plotting accuracy to {fname}') - fig.savefig(fname) - plt.close(fig) + # Generally you should pull your plotting code out of your training + # script but we are doing it here for brevity. + df = pd.DataFrame(log) + + fig, ax = plt.subplots(figsize=(6, 4)) + train_df = df[df['mode'] == 'train'] + test_df = df[df['mode'] == 'test'] + ax.plot(train_df['epoch'], train_df['acc'], label='Train') + ax.plot(test_df['epoch'], test_df['acc'], label='Test') + ax.set_xlabel('Epoch') + ax.set_ylabel('Accuracy') + ax.set_ylim(70, 100) + fig.legend(ncol=2, loc='lower right') + fig.tight_layout() + fname = 'maml-accs.png' + print(f'--- Plotting accuracy to {fname}') + fig.savefig(fname) + plt.close(fig) if __name__ == '__main__': - main() + main() diff --git a/examples/few-shot/support/omniglot_loaders.py b/examples/few-shot/support/omniglot_loaders.py index 2dc92b32..9aa9f6ed 100644 --- a/examples/few-shot/support/omniglot_loaders.py +++ b/examples/few-shot/support/omniglot_loaders.py @@ -30,15 +30,15 @@ class Omniglot(data.Dataset): - urls = [ - 'https://github.com/brendenlake/omniglot/raw/master/python/images_background.zip', - 'https://github.com/brendenlake/omniglot/raw/master/python/images_evaluation.zip' - ] - raw_folder = 'raw' - processed_folder = 'processed' - training_file = 'training.pt' - test_file = 'test.pt' - ''' + urls = [ + 'https://github.com/brendenlake/omniglot/raw/master/python/images_background.zip', + 'https://github.com/brendenlake/omniglot/raw/master/python/images_evaluation.zip' + ] + raw_folder = 'raw' + processed_folder = 'processed' + training_file = 'training.pt' + test_file = 'test.pt' + ''' The items are (filename,category). The index of all the categories can be found in self.idx_classes Args: - root: the directory where the dataset will be stored @@ -46,110 +46,106 @@ class Omniglot(data.Dataset): - target_transform: how to transform the target - download: need to download the dataset ''' - def __init__(self, - root, - transform=None, - target_transform=None, - download=False): - self.root = root - self.transform = transform - self.target_transform = target_transform - - if not self._check_exists(): - if download: - self.download() - else: - raise RuntimeError('Dataset not found.' + - ' You can use download=True to download it') - - self.all_items = find_classes( - os.path.join(self.root, self.processed_folder)) - self.idx_classes = index_classes(self.all_items) - - def __getitem__(self, index): - filename = self.all_items[index][0] - img = str.join('/', [self.all_items[index][2], filename]) - - target = self.idx_classes[self.all_items[index][1]] - if self.transform is not None: - img = self.transform(img) - if self.target_transform is not None: - target = self.target_transform(target) - - return img, target - - def __len__(self): - return len(self.all_items) - - def _check_exists(self): - return os.path.exists(os.path.join(self.root, self.processed_folder, "images_evaluation")) and \ - os.path.exists(os.path.join(self.root, self.processed_folder, "images_background")) - - def download(self): - import zipfile - - from six.moves import urllib - - if self._check_exists(): - return - - # download files - try: - os.makedirs(os.path.join(self.root, self.raw_folder)) - os.makedirs(os.path.join(self.root, self.processed_folder)) - except OSError as e: - if e.errno == errno.EEXIST: - pass - else: - raise - - for url in self.urls: - print('== Downloading ' + url) - data = urllib.request.urlopen(url) - filename = url.rpartition('/')[2] - file_path = os.path.join(self.root, self.raw_folder, filename) - with open(file_path, 'wb') as f: - f.write(data.read()) - file_processed = os.path.join(self.root, self.processed_folder) - print("== Unzip from " + file_path + " to " + file_processed) - zip_ref = zipfile.ZipFile(file_path, 'r') - zip_ref.extractall(file_processed) - zip_ref.close() - print("Download finished.") + + def __init__( + self, root, transform=None, target_transform=None, download=False + ): + self.root = root + self.transform = transform + self.target_transform = target_transform + + if not self._check_exists(): + if download: + self.download() + else: + raise RuntimeError( + 'Dataset not found.' + ' You can use download=True to download it' + ) + + self.all_items = find_classes( + os.path.join(self.root, self.processed_folder) + ) + self.idx_classes = index_classes(self.all_items) + + def __getitem__(self, index): + filename = self.all_items[index][0] + img = str.join('/', [self.all_items[index][2], filename]) + + target = self.idx_classes[self.all_items[index][1]] + if self.transform is not None: + img = self.transform(img) + if self.target_transform is not None: + target = self.target_transform(target) + + return img, target + + def __len__(self): + return len(self.all_items) + + def _check_exists(self): + return os.path.exists(os.path.join(self.root, self.processed_folder, "images_evaluation")) and \ + os.path.exists(os.path.join(self.root, self.processed_folder, "images_background")) + + def download(self): + import zipfile + + from six.moves import urllib + + if self._check_exists(): + return + + # download files + try: + os.makedirs(os.path.join(self.root, self.raw_folder)) + os.makedirs(os.path.join(self.root, self.processed_folder)) + except OSError as e: + if e.errno == errno.EEXIST: + pass + else: + raise + + for url in self.urls: + print('== Downloading ' + url) + data = urllib.request.urlopen(url) + filename = url.rpartition('/')[2] + file_path = os.path.join(self.root, self.raw_folder, filename) + with open(file_path, 'wb') as f: + f.write(data.read()) + file_processed = os.path.join(self.root, self.processed_folder) + print("== Unzip from " + file_path + " to " + file_processed) + zip_ref = zipfile.ZipFile(file_path, 'r') + zip_ref.extractall(file_processed) + zip_ref.close() + print("Download finished.") def find_classes(root_dir): - retour = [] - for (root, dirs, files) in os.walk(root_dir): - for f in files: - if (f.endswith("png")): - r = root.split('/') - lr = len(r) - retour.append((f, r[lr - 2] + "/" + r[lr - 1], root)) - print("== Found %d items " % len(retour)) - return retour + retour = [] + for (root, dirs, files) in os.walk(root_dir): + for f in files: + if (f.endswith("png")): + r = root.split('/') + lr = len(r) + retour.append((f, r[lr - 2] + "/" + r[lr - 1], root)) + print("== Found %d items " % len(retour)) + return retour def index_classes(items): - idx = {} - for i in items: - if i[1] not in idx: - idx[i[1]] = len(idx) - print("== Found %d classes" % len(idx)) - return idx + idx = {} + for i in items: + if i[1] not in idx: + idx[i[1]] = len(idx) + print("== Found %d classes" % len(idx)) + return idx class OmniglotNShot: - def __init__(self, - root, - batchsz, - n_way, - k_shot, - k_query, - imgsz, - rng, - device=None): - """ + + def __init__( + self, root, batchsz, n_way, k_shot, k_query, imgsz, rng, device=None + ): + """ Different from mnistNShot, the :param root: :param batchsz: task num @@ -159,180 +155,178 @@ def __init__(self, :param imgsz: """ - self.resize = imgsz - self.rng = rng - self.device = device - if not os.path.isfile(os.path.join(root, 'omniglot.npy')): - # if root/data.npy does not exist, just download it - self.x = Omniglot( - root, - download=True, - transform=transforms.Compose([ - lambda x: Image.open(x).convert('L'), lambda x: x.resize( - (imgsz, imgsz)), - lambda x: np.reshape(x, (imgsz, imgsz, 1)), - lambda x: np.transpose(x, [2, 0, 1]), lambda x: x / 255. - ]), - ) - - temp = dict( - ) # {label:img1, img2..., 20 imgs, label2: img1, img2,... in total, 1623 label} - for (img, label) in self.x: - if label in temp.keys(): - temp[label].append(img) - else: - temp[label] = [img] - - self.x = [] - for label, imgs in temp.items( - ): # labels info deserted , each label contains 20imgs - self.x.append(np.array(imgs)) - - # as different class may have different number of imgs - self.x = np.array(self.x).astype( - np.float) # [[20 imgs],..., 1623 classes in total] - # each character contains 20 imgs - print('data shape:', self.x.shape) # [1623, 20, 84, 84, 1] - temp = [] # Free memory - # save all dataset into npy file. - np.save(os.path.join(root, 'omniglot.npy'), self.x) - print('write into omniglot.npy.') + self.resize = imgsz + self.rng = rng + self.device = device + if not os.path.isfile(os.path.join(root, 'omniglot.npy')): + # if root/data.npy does not exist, just download it + self.x = Omniglot( + root, + download=True, + transform=transforms.Compose( + [ + lambda x: Image.open(x).convert('L'), + lambda x: x.resize((imgsz, imgsz)), + lambda x: np.reshape(x, (imgsz, imgsz, 1)), + lambda x: np.transpose(x, [2, 0, 1]), lambda x: x / 255. + ] + ), + ) + + temp = dict( + ) # {label:img1, img2..., 20 imgs, label2: img1, img2,... in total, 1623 label} + for (img, label) in self.x: + if label in temp.keys(): + temp[label].append(img) else: - # if data.npy exists, just load it. - self.x = np.load(os.path.join(root, 'omniglot.npy')) - print('load from omniglot.npy.') - - # [1623, 20, 84, 84, 1] - # TODO: can not shuffle here, we must keep training and test set distinct! - self.x_train, self.x_test = self.x[:1200], self.x[1200:] - - # self.normalization() - - self.batchsz = batchsz - self.n_cls = self.x.shape[0] # 1623 - self.n_way = n_way # n way - self.k_shot = k_shot # k shot - self.k_query = k_query # k query - assert (k_shot + k_query) <= 20 - - # save pointer of current read batch in total cache - self.indexes = {"train": 0, "test": 0} - self.datasets = { - "train": self.x_train, - "test": self.x_test - } # original data cached - print("DB: train", self.x_train.shape, "test", self.x_test.shape) - - self.datasets_cache = { - "train": self.load_data_cache( - self.datasets["train"]), # current epoch data cached - "test": self.load_data_cache(self.datasets["test"]) - } - - def normalization(self): - """ + temp[label] = [img] + + self.x = [] + for label, imgs in temp.items( + ): # labels info deserted , each label contains 20imgs + self.x.append(np.array(imgs)) + + # as different class may have different number of imgs + self.x = np.array(self.x).astype( + np.float + ) # [[20 imgs],..., 1623 classes in total] + # each character contains 20 imgs + print('data shape:', self.x.shape) # [1623, 20, 84, 84, 1] + temp = [] # Free memory + # save all dataset into npy file. + np.save(os.path.join(root, 'omniglot.npy'), self.x) + print('write into omniglot.npy.') + else: + # if data.npy exists, just load it. + self.x = np.load(os.path.join(root, 'omniglot.npy')) + print('load from omniglot.npy.') + + # [1623, 20, 84, 84, 1] + # TODO: can not shuffle here, we must keep training and test set distinct! + self.x_train, self.x_test = self.x[:1200], self.x[1200:] + + # self.normalization() + + self.batchsz = batchsz + self.n_cls = self.x.shape[0] # 1623 + self.n_way = n_way # n way + self.k_shot = k_shot # k shot + self.k_query = k_query # k query + assert (k_shot + k_query) <= 20 + + # save pointer of current read batch in total cache + self.indexes = {"train": 0, "test": 0} + self.datasets = { + "train": self.x_train, + "test": self.x_test + } # original data cached + print("DB: train", self.x_train.shape, "test", self.x_test.shape) + + self.datasets_cache = { + "train": self.load_data_cache(self.datasets["train"] + ), # current epoch data cached + "test": self.load_data_cache(self.datasets["test"]) + } + + def normalization(self): + """ Normalizes our data, to have a mean of 0 and sdt of 1 """ - self.mean = np.mean(self.x_train) - self.std = np.std(self.x_train) - self.max = np.max(self.x_train) - self.min = np.min(self.x_train) - # print("before norm:", "mean", self.mean, "max", self.max, "min", self.min, "std", self.std) - self.x_train = (self.x_train - self.mean) / self.std - self.x_test = (self.x_test - self.mean) / self.std - - self.mean = np.mean(self.x_train) - self.std = np.std(self.x_train) - self.max = np.max(self.x_train) - self.min = np.min(self.x_train) - - # print("after norm:", "mean", self.mean, "max", self.max, "min", self.min, "std", self.std) - - def load_data_cache(self, data_pack): - """ + self.mean = np.mean(self.x_train) + self.std = np.std(self.x_train) + self.max = np.max(self.x_train) + self.min = np.min(self.x_train) + # print("before norm:", "mean", self.mean, "max", self.max, "min", self.min, "std", self.std) + self.x_train = (self.x_train - self.mean) / self.std + self.x_test = (self.x_test - self.mean) / self.std + + self.mean = np.mean(self.x_train) + self.std = np.std(self.x_train) + self.max = np.max(self.x_train) + self.min = np.min(self.x_train) + + # print("after norm:", "mean", self.mean, "max", self.max, "min", self.min, "std", self.std) + + def load_data_cache(self, data_pack): + """ Collects several batches data for N-shot learning :param data_pack: [cls_num, 20, 84, 84, 1] :return: A list with [support_set_x, support_set_y, target_x, target_y] ready to be fed to our networks """ - # take 5 way 1 shot as example: 5 * 1 - setsz = self.k_shot * self.n_way - querysz = self.k_query * self.n_way - data_cache = [] - - # print('preload next 50 caches of batchsz of batch.') - for sample in range(10): # num of episodes - - x_spts, y_spts, x_qrys, y_qrys = [], [], [], [] - for i in range(self.batchsz): # one batch means one set - - x_spt, y_spt, x_qry, y_qry = [], [], [], [] - selected_cls = self.rng.choice(data_pack.shape[0], self.n_way, - False) - - for j, cur_class in enumerate(selected_cls): - - selected_img = self.rng.choice(20, - self.k_shot + self.k_query, - False) - - # meta-training and meta-test - x_spt.append( - data_pack[cur_class][selected_img[:self.k_shot]]) - x_qry.append( - data_pack[cur_class][selected_img[self.k_shot:]]) - y_spt.append([j for _ in range(self.k_shot)]) - y_qry.append([j for _ in range(self.k_query)]) - - # shuffle inside a batch - perm = self.rng.permutation(self.n_way * self.k_shot) - x_spt = np.array(x_spt).reshape(self.n_way * self.k_shot, 1, - self.resize, self.resize)[perm] - y_spt = np.array(y_spt).reshape(self.n_way * self.k_shot)[perm] - perm = self.rng.permutation(self.n_way * self.k_query) - x_qry = np.array(x_qry).reshape(self.n_way * self.k_query, 1, - self.resize, self.resize)[perm] - y_qry = np.array(y_qry).reshape(self.n_way * - self.k_query)[perm] - - # append [sptsz, 1, 84, 84] => [b, setsz, 1, 84, 84] - x_spts.append(x_spt) - y_spts.append(y_spt) - x_qrys.append(x_qry) - y_qrys.append(y_qry) - - # [b, setsz, 1, 84, 84] - x_spts = np.array(x_spts).astype(np.float32).reshape( - self.batchsz, setsz, 1, self.resize, self.resize) - y_spts = np.array(y_spts).astype(np.int).reshape( - self.batchsz, setsz) - # [b, qrysz, 1, 84, 84] - x_qrys = np.array(x_qrys).astype(np.float32).reshape( - self.batchsz, querysz, 1, self.resize, self.resize) - y_qrys = np.array(y_qrys).astype(np.int).reshape( - self.batchsz, querysz) - - x_spts, y_spts, x_qrys, y_qrys = [ - torch.from_numpy(z).to(self.device) - for z in [x_spts, y_spts, x_qrys, y_qrys] - ] - - data_cache.append([x_spts, y_spts, x_qrys, y_qrys]) - - return data_cache - - def next(self, mode='train'): - """ + # take 5 way 1 shot as example: 5 * 1 + setsz = self.k_shot * self.n_way + querysz = self.k_query * self.n_way + data_cache = [] + + # print('preload next 50 caches of batchsz of batch.') + for sample in range(10): # num of episodes + + x_spts, y_spts, x_qrys, y_qrys = [], [], [], [] + for i in range(self.batchsz): # one batch means one set + + x_spt, y_spt, x_qry, y_qry = [], [], [], [] + selected_cls = self.rng.choice(data_pack.shape[0], self.n_way, False) + + for j, cur_class in enumerate(selected_cls): + + selected_img = self.rng.choice(20, self.k_shot + self.k_query, False) + + # meta-training and meta-test + x_spt.append(data_pack[cur_class][selected_img[:self.k_shot]]) + x_qry.append(data_pack[cur_class][selected_img[self.k_shot:]]) + y_spt.append([j for _ in range(self.k_shot)]) + y_qry.append([j for _ in range(self.k_query)]) + + # shuffle inside a batch + perm = self.rng.permutation(self.n_way * self.k_shot) + x_spt = np.array(x_spt).reshape( + self.n_way * self.k_shot, 1, self.resize, self.resize + )[perm] + y_spt = np.array(y_spt).reshape(self.n_way * self.k_shot)[perm] + perm = self.rng.permutation(self.n_way * self.k_query) + x_qry = np.array(x_qry).reshape( + self.n_way * self.k_query, 1, self.resize, self.resize + )[perm] + y_qry = np.array(y_qry).reshape(self.n_way * self.k_query)[perm] + + # append [sptsz, 1, 84, 84] => [b, setsz, 1, 84, 84] + x_spts.append(x_spt) + y_spts.append(y_spt) + x_qrys.append(x_qry) + y_qrys.append(y_qry) + + # [b, setsz, 1, 84, 84] + x_spts = np.array(x_spts).astype( + np.float32 + ).reshape(self.batchsz, setsz, 1, self.resize, self.resize) + y_spts = np.array(y_spts).astype(np.int).reshape(self.batchsz, setsz) + # [b, qrysz, 1, 84, 84] + x_qrys = np.array(x_qrys).astype( + np.float32 + ).reshape(self.batchsz, querysz, 1, self.resize, self.resize) + y_qrys = np.array(y_qrys).astype(np.int).reshape(self.batchsz, querysz) + + x_spts, y_spts, x_qrys, y_qrys = [ + torch.from_numpy(z).to(self.device) + for z in [x_spts, y_spts, x_qrys, y_qrys] + ] + + data_cache.append([x_spts, y_spts, x_qrys, y_qrys]) + + return data_cache + + def next(self, mode='train'): + """ Gets next batch from the dataset with name. :param mode: The name of the splitting (one of "train", "val", "test") :return: """ - # update cache if indexes is larger cached num - if self.indexes[mode] >= len(self.datasets_cache[mode]): - self.indexes[mode] = 0 - self.datasets_cache[mode] = self.load_data_cache( - self.datasets[mode]) + # update cache if indexes is larger cached num + if self.indexes[mode] >= len(self.datasets_cache[mode]): + self.indexes[mode] = 0 + self.datasets_cache[mode] = self.load_data_cache(self.datasets[mode]) - next_batch = self.datasets_cache[mode][self.indexes[mode]] - self.indexes[mode] += 1 + next_batch = self.datasets_cache[mode][self.indexes[mode]] + self.indexes[mode] += 1 - return next_batch + return next_batch diff --git a/examples/visualize.py b/examples/visualize.py index 03a58c24..4e7d2684 100644 --- a/examples/visualize.py +++ b/examples/visualize.py @@ -22,61 +22,65 @@ class Net(nn.Module): - def __init__(self, dim): - super().__init__() - self.fc = nn.Linear(dim, 1) - def forward(self, x, meta_param): - return self.fc(x) + meta_param + def __init__(self, dim): + super().__init__() + self.fc = nn.Linear(dim, 1) + + def forward(self, x, meta_param): + return self.fc(x) + meta_param def draw_torchviz(): - net = Net(dim).cuda() - optimizer = TorchOpt.MetaAdam(net, lr=1e-3, use_accelerated_op=False) - meta_param = torch.tensor(1., requires_grad=True) + net = Net(dim).cuda() + optimizer = TorchOpt.MetaAdam(net, lr=1e-3, use_accelerated_op=False) + meta_param = torch.tensor(1., requires_grad=True) - xs = torch.ones(batch_size, dim).cuda() + xs = torch.ones(batch_size, dim).cuda() - pred = net(xs, meta_param) - loss = F.mse_loss(pred, torch.ones_like(pred)) - optimizer.step(loss) + pred = net(xs, meta_param) + loss = F.mse_loss(pred, torch.ones_like(pred)) + optimizer.step(loss) - pred = net(xs, meta_param) - loss = F.mse_loss(pred, torch.ones_like(pred)) - # draw computation graph - torchviz.make_dot(loss).render("torchviz_graph", format="svg") + pred = net(xs, meta_param) + loss = F.mse_loss(pred, torch.ones_like(pred)) + # draw computation graph + torchviz.make_dot(loss).render("torchviz_graph", format="svg") def draw_TorchOpt(): - net = Net(dim).cuda() - optimizer = TorchOpt.MetaAdam(net, lr=1e-3, use_accelerated_op=True) - meta_param = torch.tensor(1., requires_grad=True) - - xs = torch.ones(batch_size, dim).cuda() - - pred = net(xs, meta_param) - loss = F.mse_loss(pred, torch.ones_like(pred)) - # set enable_visual - net_state_0 = TorchOpt.extract_state_dict(net, - enable_visual=True, - visual_prefix='step0.') - optimizer.step(loss) - # set enable_visual - net_state_1 = TorchOpt.extract_state_dict(net, - enable_visual=True, - visual_prefix='step1.') - - pred = net(xs, meta_param) - loss = F.mse_loss(pred, torch.ones_like(pred)) - # draw computation graph - TorchOpt.visual.make_dot( - loss, [net_state_0, net_state_1, { - meta_param: "meta_param" - }]).render("TorchOpt_graph", format="svg") + net = Net(dim).cuda() + optimizer = TorchOpt.MetaAdam(net, lr=1e-3, use_accelerated_op=True) + meta_param = torch.tensor(1., requires_grad=True) + + xs = torch.ones(batch_size, dim).cuda() + + pred = net(xs, meta_param) + loss = F.mse_loss(pred, torch.ones_like(pred)) + # set enable_visual + net_state_0 = TorchOpt.extract_state_dict( + net, enable_visual=True, visual_prefix='step0.' + ) + optimizer.step(loss) + # set enable_visual + net_state_1 = TorchOpt.extract_state_dict( + net, enable_visual=True, visual_prefix='step1.' + ) + + pred = net(xs, meta_param) + loss = F.mse_loss(pred, torch.ones_like(pred)) + # draw computation graph + TorchOpt.visual.make_dot( + loss, [net_state_0, net_state_1, { + meta_param: "meta_param" + }] + ).render( + "TorchOpt_graph", format="svg" + ) if __name__ == '__main__': - dim = 5 - batch_size = 2 - draw_torchviz() - draw_TorchOpt() + dim = 5 + batch_size = 2 + draw_torchviz() + draw_TorchOpt() diff --git a/include/adam_op/adam_op.h b/include/adam_op/adam_op.h index 7834ed0b..3499a3e9 100644 --- a/include/adam_op/adam_op.h +++ b/include/adam_op/adam_op.h @@ -18,11 +18,12 @@ #include -#include "common.h" +#include "adam_op/common.h" namespace TorchOpt { -TensorArray<3> adamForwardInplace(torch::Tensor& updates, torch::Tensor& mu, - torch::Tensor& nu, const float b1, +TensorArray<3> adamForwardInplace(const torch::Tensor& updates, + const torch::Tensor& mu, + const torch::Tensor& nu, const float b1, const float b2, const float eps, const float eps_root, const int count); diff --git a/include/adam_op/adam_op_impl.cuh b/include/adam_op/adam_op_impl.cuh index 8e4d8777..9e37df1b 100644 --- a/include/adam_op/adam_op_impl.cuh +++ b/include/adam_op/adam_op_impl.cuh @@ -18,11 +18,12 @@ #include -#include "common.h" +#include "adam_op/common.h" namespace TorchOpt { -TensorArray<3> adamForwardInplaceCUDA(torch::Tensor &updates, torch::Tensor &mu, - torch::Tensor &nu, const float b1, +TensorArray<3> adamForwardInplaceCUDA(const torch::Tensor &updates, + const torch::Tensor &mu, + const torch::Tensor &nu, const float b1, const float b2, const float eps, const float eps_root, const int count); diff --git a/include/adam_op/adam_op_impl.h b/include/adam_op/adam_op_impl.h index 1bf99046..96393d16 100644 --- a/include/adam_op/adam_op_impl.h +++ b/include/adam_op/adam_op_impl.h @@ -18,11 +18,12 @@ #include -#include "common.h" +#include "adam_op/common.h" namespace TorchOpt { -TensorArray<3> adamForwardInplaceCPU(torch::Tensor& updates, torch::Tensor& mu, - torch::Tensor& nu, const float b1, +TensorArray<3> adamForwardInplaceCPU(const torch::Tensor& updates, + const torch::Tensor& mu, + const torch::Tensor& nu, const float b1, const float b2, const float eps, const float eps_root, const int count); diff --git a/setup.cfg b/setup.cfg new file mode 100644 index 00000000..c1893e6b --- /dev/null +++ b/setup.cfg @@ -0,0 +1,43 @@ +[yapf] +based_on_style = yapf +spaces_before_comment = 2 +dedent_closing_brackets = true +column_limit = 79 +continuation_indent_width = 2 + +[flake8] +exclude = + .git +indent_size = 2 + +[pydocstyle] +convention = google + +[isort] +profile = black +multi_line_output = 3 +indent = 2 +line_length = 79 + +[mypy] +allow_redefinition = True +check_untyped_defs = True +disallow_incomplete_defs = False +disallow_untyped_defs = False +ignore_missing_imports = True +no_implicit_optional = True +pretty = True +show_error_codes = True +show_error_context = True +show_traceback = True +strict_equality = True +strict_optional = True +warn_no_return = True +warn_redundant_casts = True +warn_unreachable = True +warn_unused_configs = True +warn_unused_ignores = True + + +[doc8] +max-line-length = 200 \ No newline at end of file diff --git a/setup.py b/setup.py index ea627c34..9c201878 100644 --- a/setup.py +++ b/setup.py @@ -8,110 +8,113 @@ class MyBuild(build_ext): - def run(self): - self.build_cmake() - - def copy(self, build_temp): - from distutils.file_util import copy_file - cwd = str(pathlib.Path().absolute()) - src = os.path.join('.', build_temp, 'src') - ops = os.listdir(src) - for op in ops: - op_path = os.path.join(src, op) - if not os.path.isdir(op_path): - continue - files = os.listdir(op_path) - for file in files: - if file.split('.')[-1] == 'so': - copy_file(os.path.join(op_path, file), - os.path.join(cwd, 'TorchOpt', '_lib')) - - def build_cmake(self): - cwd = pathlib.Path().absolute() - - build_temp = f"{pathlib.Path(self.build_temp)}" - os.makedirs(build_temp, exist_ok=True) - - config = "Debug" if self.debug else "Release" - - PYTHON_INCLUDE_DIR = "" - for path in self.include_dirs: - PYTHON_INCLUDE_DIR += path + ';' - - TORCH_INCLUDE_PATH = "" - for path in cpp_extension.include_paths(): - TORCH_INCLUDE_PATH += path + ';' - - TORCH_LIBRARY_PATH = "" - for path in cpp_extension.library_paths(): - TORCH_LIBRARY_PATH += path + ';' - - cmake_args = [ - "-DPYTHON_INCLUDE_DIR=" + PYTHON_INCLUDE_DIR, - "-DTORCH_INCLUDE_PATH=" + TORCH_INCLUDE_PATH, - "-DTORCH_LIBRARY_PATH=" + TORCH_LIBRARY_PATH, - "-DCMAKE_BUILD_TYPE=" + config - ] - - build_args = ["--config", config, "--", "-j4"] - - os.chdir(build_temp) - self.spawn(["cmake", f"{str(cwd)}"] + cmake_args) - if not self.dry_run: - self.spawn(["cmake", "--build", "."] + build_args) - os.chdir(str(cwd)) - self.copy(build_temp) + + def run(self): + self.build_cmake() + + def copy(self, build_temp): + from distutils.file_util import copy_file + cwd = str(pathlib.Path().absolute()) + src = os.path.join('.', build_temp, 'src') + ops = os.listdir(src) + for op in ops: + op_path = os.path.join(src, op) + if not os.path.isdir(op_path): + continue + files = os.listdir(op_path) + for file in files: + if file.split('.')[-1] == 'so': + copy_file( + os.path.join(op_path, file), os.path.join(cwd, 'TorchOpt', '_lib') + ) + + def build_cmake(self): + cwd = pathlib.Path().absolute() + + build_temp = f"{pathlib.Path(self.build_temp)}" + os.makedirs(build_temp, exist_ok=True) + + config = "Debug" if self.debug else "Release" + + PYTHON_INCLUDE_DIR = "" + for path in self.include_dirs: + PYTHON_INCLUDE_DIR += path + ';' + + TORCH_INCLUDE_PATH = "" + for path in cpp_extension.include_paths(): + TORCH_INCLUDE_PATH += path + ';' + + TORCH_LIBRARY_PATH = "" + for path in cpp_extension.library_paths(): + TORCH_LIBRARY_PATH += path + ';' + + cmake_args = [ + "-DPYTHON_INCLUDE_DIR=" + PYTHON_INCLUDE_DIR, + "-DTORCH_INCLUDE_PATH=" + TORCH_INCLUDE_PATH, + "-DTORCH_LIBRARY_PATH=" + TORCH_LIBRARY_PATH, + "-DCMAKE_BUILD_TYPE=" + config + ] + + build_args = ["--config", config, "--", "-j4"] + + os.chdir(build_temp) + self.spawn(["cmake", f"{str(cwd)}"] + cmake_args) + if not self.dry_run: + self.spawn(["cmake", "--build", "."] + build_args) + os.chdir(str(cwd)) + self.copy(build_temp) class download_shared(): - def __init__(self): - import urllib - dir_path = os.path.dirname(os.path.realpath(__file__)) - print(f"setup.py at {dir_path}") - print("downloading shared libraries") - op_urls = [] - if sys.version_info >= (3, 8) and sys.version_info < (3, 9): - op_urls.append( - "https://torchopt.oss-cn-beijing.aliyuncs.com/torch1_11/adam_op.cpython-38-x86_64-linux-gnu.so" - ) - elif sys.version_info >= (3, 9) and sys.version_info < (3, 10): - op_urls.append( - "https://torchopt.oss-cn-beijing.aliyuncs.com/torch1_11/adam_op.cpython-39-x86_64-linux-gnu.so" - ) - - if len(op_urls) == 0: - import warnings - warnings.warn("no pre-compiled libraries for you python version") - return - - for url in op_urls: - data = urllib.request.urlopen(url) - filename = url.rpartition('/')[-1] - file_path = os.path.join(dir_path, 'TorchOpt', '_lib', filename) - with open(file_path, 'wb') as f: - f.write(data.read()) - print("shared libraries downloaded") + + def __init__(self): + import urllib + dir_path = os.path.dirname(os.path.realpath(__file__)) + print(f"setup.py at {dir_path}") + print("downloading shared libraries") + op_urls = [] + if sys.version_info >= (3, 8) and sys.version_info < (3, 9): + op_urls.append( + "https://torchopt.oss-cn-beijing.aliyuncs.com/torch1_11/adam_op.cpython-38-x86_64-linux-gnu.so" + ) + elif sys.version_info >= (3, 9) and sys.version_info < (3, 10): + op_urls.append( + "https://torchopt.oss-cn-beijing.aliyuncs.com/torch1_11/adam_op.cpython-39-x86_64-linux-gnu.so" + ) + + if len(op_urls) == 0: + import warnings + warnings.warn("no pre-compiled libraries for you python version") + return + + for url in op_urls: + data = urllib.request.urlopen(url) + filename = url.rpartition('/')[-1] + file_path = os.path.join(dir_path, 'TorchOpt', '_lib', filename) + with open(file_path, 'wb') as f: + f.write(data.read()) + print("shared libraries downloaded") if 'build_from_source' not in sys.argv: - download_shared() + download_shared() setup( - name="TorchOpt", - version="0.4.1", - author="TorchOpt Contributors", - author_email="jieren9806@gmail.com", - description="A Jax-style optimizer.", - license="Apache License Version 2.0", - keywords="meta learning", - url="https://github.com/metaopt/TorchOpt", - packages=find_packages(), - package_data={"": ["_lib/*.so"]}, - include_package_data=True, - cmdclass={'build_from_source': MyBuild}, - install_requires=[ - 'jax[cpu]', - 'torch==1.11', - 'graphviz', - ], + name="TorchOpt", + version="0.4.1", + author="TorchOpt Contributors", + author_email="jieren9806@gmail.com", + description="A Jax-style optimizer.", + license="Apache License Version 2.0", + keywords="meta learning", + url="https://github.com/metaopt/TorchOpt", + packages=find_packages(), + package_data={"": ["_lib/*.so"]}, + include_package_data=True, + cmdclass={'build_from_source': MyBuild}, + install_requires=[ + 'jax[cpu]', + 'torch==1.11', + 'graphviz', + ], ) diff --git a/src/adam_op/adam_op.cpp b/src/adam_op/adam_op.cpp index b4e12ca8..f8cfffce 100644 --- a/src/adam_op/adam_op.cpp +++ b/src/adam_op/adam_op.cpp @@ -22,8 +22,9 @@ #include "adam_op/adam_op_impl.h" namespace TorchOpt { -TensorArray<3> adamForwardInplace(torch::Tensor& updates, torch::Tensor& mu, - torch::Tensor& nu, const float b1, +TensorArray<3> adamForwardInplace(const torch::Tensor& updates, + const torch::Tensor& mu, + const torch::Tensor& nu, const float b1, const float b2, const float eps, const float eps_root, const int count) { if (updates.device().is_cuda()) { @@ -34,7 +35,7 @@ TensorArray<3> adamForwardInplace(torch::Tensor& updates, torch::Tensor& mu, } else { throw std::runtime_error("Not implemented"); } -}; +} torch::Tensor adamForwardMu(const torch::Tensor& updates, const torch::Tensor& mu, const float b1) { if (updates.device().is_cuda()) { @@ -44,7 +45,7 @@ torch::Tensor adamForwardMu(const torch::Tensor& updates, } else { throw std::runtime_error("Not implemented"); } -}; +} torch::Tensor adamForwardNu(const torch::Tensor& updates, const torch::Tensor& nu, const float b2) { @@ -55,7 +56,7 @@ torch::Tensor adamForwardNu(const torch::Tensor& updates, } else { throw std::runtime_error("Not implemented"); } -}; +} torch::Tensor adamForwardUpdates(const torch::Tensor& new_mu, const torch::Tensor& new_nu, const float b1, @@ -68,7 +69,7 @@ torch::Tensor adamForwardUpdates(const torch::Tensor& new_mu, } else { throw std::runtime_error("Not implemented"); } -}; +} TensorArray<2> adamBackwardMu(const torch::Tensor& dmu, const torch::Tensor& updates, @@ -80,7 +81,7 @@ TensorArray<2> adamBackwardMu(const torch::Tensor& dmu, } else { throw std::runtime_error("Not implemented"); } -}; +} TensorArray<2> adamBackwardNu(const torch::Tensor& dnu, const torch::Tensor& updates, @@ -92,7 +93,7 @@ TensorArray<2> adamBackwardNu(const torch::Tensor& dnu, } else { throw std::runtime_error("Not implemented"); } -}; +} TensorArray<2> adamBackwardUpdates(const torch::Tensor& dupdates, const torch::Tensor& updates, @@ -108,7 +109,7 @@ TensorArray<2> adamBackwardUpdates(const torch::Tensor& dupdates, } else { throw std::runtime_error("Not implemented"); } -}; +} } // namespace TorchOpt PYBIND11_MODULE(adam_op, m) { diff --git a/src/adam_op/adam_op_impl.cpp b/src/adam_op/adam_op_impl.cpp index fe951f16..48427213 100644 --- a/src/adam_op/adam_op_impl.cpp +++ b/src/adam_op/adam_op_impl.cpp @@ -20,7 +20,7 @@ #include -#include "utils.h" +#include "include/utils.h" namespace TorchOpt { using std::size_t; @@ -50,8 +50,9 @@ void adamForwardInplaceCPUKernel( } } // namespace -TensorArray<3> adamForwardInplaceCPU(torch::Tensor& updates, torch::Tensor& mu, - torch::Tensor& nu, const float b1, +TensorArray<3> adamForwardInplaceCPU(const torch::Tensor& updates, + const torch::Tensor& mu, + const torch::Tensor& nu, const float b1, const float b2, const float eps, const float eps_root, const int count) { using other_t = float; @@ -99,7 +100,7 @@ torch::Tensor adamForwardMuCPU(const torch::Tensor& updates, mu_out.data_ptr()); })); return mu_out; -}; +} namespace { template @@ -132,7 +133,7 @@ torch::Tensor adamForwardNuCPU(const torch::Tensor& updates, nu_out.data_ptr()); })); return nu_out; -}; +} namespace { template @@ -176,7 +177,7 @@ torch::Tensor adamForwardUpdatesCPU(const torch::Tensor& new_mu, updates_out.data_ptr()); })); return updates_out; -}; +} namespace { template @@ -210,7 +211,7 @@ TensorArray<2> adamBackwardMuCPU(const torch::Tensor& dmu, dmu_out.data_ptr()); })); return TensorArray<2>{std::move(dupdates_out), std::move(dmu_out)}; -}; +} namespace { template @@ -246,7 +247,7 @@ TensorArray<2> adamBackwardNuCPU(const torch::Tensor& dnu, dupdates_out.data_ptr(), dnu_out.data_ptr()); })); return TensorArray<2>{std::move(dupdates_out), std::move(dnu_out)}; -}; +} namespace { template @@ -305,5 +306,5 @@ TensorArray<2> adamBackwardUpdatesCPU(const torch::Tensor& dupdates, n, dmu_out.data_ptr(), dnu_out.data_ptr()); })); return TensorArray<2>{std::move(dmu_out), std::move(dnu_out)}; -}; +} } // namespace TorchOpt diff --git a/src/adam_op/adam_op_impl.cu b/src/adam_op/adam_op_impl.cu index ccb189d0..0b7b4cea 100644 --- a/src/adam_op/adam_op_impl.cu +++ b/src/adam_op/adam_op_impl.cu @@ -18,7 +18,7 @@ #include #include "adam_op/adam_op_impl.cuh" -#include "utils.h" +#include "include/utils.h" namespace TorchOpt { @@ -49,8 +49,9 @@ __global__ void adamForwardInplaceCUDAKernel( } } // namespace -TensorArray<3> adamForwardInplaceCUDA(torch::Tensor &updates, torch::Tensor &mu, - torch::Tensor &nu, const float b1, +TensorArray<3> adamForwardInplaceCUDA(const torch::Tensor &updates, + const torch::Tensor &mu, + const torch::Tensor &nu, const float b1, const float b2, const float eps, const float eps_root, const int count) { using other_t = float; @@ -103,7 +104,7 @@ torch::Tensor adamForwardMuCUDA(const torch::Tensor &updates, mu_out.data_ptr()); })); return mu_out; -}; +} namespace { template @@ -140,7 +141,7 @@ torch::Tensor adamForwardNuCUDA(const torch::Tensor &updates, nu_out.data_ptr()); })); return nu_out; -}; +} namespace { template @@ -188,7 +189,7 @@ torch::Tensor adamForwardUpdatesCUDA(const torch::Tensor &new_mu, updates_out.data_ptr()); })); return updates_out; -}; +} namespace { template @@ -226,7 +227,7 @@ TensorArray<2> adamBackwardMuCUDA(const torch::Tensor &dmu, dmu_out.data_ptr()); })); return TensorArray<2>{std::move(dupdates_out), std::move(dmu_out)}; -}; +} namespace { template @@ -266,7 +267,7 @@ TensorArray<2> adamBackwardNuCUDA(const torch::Tensor &dnu, dupdates_out.data_ptr(), dnu_out.data_ptr()); })); return TensorArray<2>{std::move(dupdates_out), std::move(dnu_out)}; -}; +} namespace { template @@ -328,5 +329,5 @@ TensorArray<2> adamBackwardUpdatesCUDA(const torch::Tensor &dupdates, n, dmu_out.data_ptr(), dnu_out.data_ptr()); })); return TensorArray<2>{std::move(dmu_out), std::move(dnu_out)}; -}; +} } // namespace TorchOpt diff --git a/tests/requirements.txt b/tests/requirements.txt new file mode 100644 index 00000000..cdff8c3e --- /dev/null +++ b/tests/requirements.txt @@ -0,0 +1,3 @@ +jax[cpu] +graphviz +torch \ No newline at end of file diff --git a/tests/unit/high_level/test_high_level_inplace.py b/tests/unit/high_level/test_high_level_inplace.py index dc55ce0c..728b0158 100644 --- a/tests/unit/high_level/test_high_level_inplace.py +++ b/tests/unit/high_level/test_high_level_inplace.py @@ -25,176 +25,174 @@ class HighLevelInplace(unittest.TestCase): - @classmethod - def setUpClass(cls): - torch.manual_seed(0) - cls.model = models.resnet18() - cls.model_ref = copy.deepcopy(cls.model) - cls.model_backup = copy.deepcopy(cls.model) - - cls.batch_size = 2 - cls.dataset = data.TensorDataset(torch.randn(2, 3, 224, 224), - torch.randint(0, 1000, (2, ))) - cls.loader = data.DataLoader(cls.dataset, cls.batch_size, False) - - cls.lr = 1e-3 - - def setUp(self) -> None: - torch.manual_seed(0) - self.model = copy.deepcopy(self.model_backup) - self.model_ref = copy.deepcopy(self.model_backup) - - def test_sgd(self) -> None: - optim = SGD(self.model.parameters(), self.lr) - optim_ref = torch.optim.SGD(self.model_ref.parameters(), self.lr) - for xs, ys in self.loader: - pred = self.model(xs) - pred_ref = self.model_ref(xs) - loss = F.cross_entropy(pred, ys) - loss_ref = F.cross_entropy(pred_ref, ys) - optim.zero_grad() - loss.backward() - optim.step() - optim_ref.zero_grad() - loss_ref.backward() - optim_ref.step() - - with torch.no_grad(): - for p, p_ref in zip(self.model.parameters(), - self.model_ref.parameters()): - mse = F.mse_loss(p, p_ref) - self.assertAlmostEqual(float(mse), 0) - for b, b_ref in zip(self.model.buffers(), - self.model_ref.buffers()): - b = b.float() if not b.is_floating_point() else b - b_ref = b_ref.float( - ) if not b_ref.is_floating_point() else b_ref - mse = F.mse_loss(b, b_ref) - self.assertAlmostEqual(float(mse), 0) - - def test_adam(self) -> None: - optim = Adam(self.model.parameters(), self.lr) - optim_ref = torch.optim.Adam(self.model_ref.parameters(), self.lr) - for xs, ys in self.loader: - pred = self.model(xs) - pred_ref = self.model_ref(xs) - loss = F.cross_entropy(pred, ys) - loss_ref = F.cross_entropy(pred_ref, ys) - optim.zero_grad() - loss.backward() - optim.step() - optim_ref.zero_grad() - loss_ref.backward() - optim_ref.step() - - with torch.no_grad(): - for p, p_ref in zip(self.model.parameters(), - self.model_ref.parameters()): - mse = F.mse_loss(p, p_ref) - self.assertAlmostEqual(float(mse), 0) - for b, b_ref in zip(self.model.buffers(), - self.model_ref.buffers()): - b = b.float() if not b.is_floating_point() else b - b_ref = b_ref.float( - ) if not b_ref.is_floating_point() else b_ref - mse = F.mse_loss(b, b_ref) - self.assertAlmostEqual(float(mse), 0) - - def test_accelerated_adam_cpu(self) -> None: - self.model - self.model_ref - optim = Adam(self.model.parameters(), self.lr, use_accelerated_op=True) - optim_ref = torch.optim.Adam(self.model_ref.parameters(), self.lr) - for xs, ys in self.loader: - xs = xs - ys = ys - pred = self.model(xs) - pred_ref = self.model_ref(xs) - loss = F.cross_entropy(pred, ys) - loss_ref = F.cross_entropy(pred_ref, ys) - optim.zero_grad() - loss.backward() - optim.step() - optim_ref.zero_grad() - loss_ref.backward() - optim_ref.step() - - with torch.no_grad(): - for p, p_ref in zip(self.model.parameters(), - self.model_ref.parameters()): - mse = F.mse_loss(p, p_ref) - self.assertAlmostEqual(float(mse), 0) - for b, b_ref in zip(self.model.buffers(), - self.model_ref.buffers()): - b = b.float() if not b.is_floating_point() else b - b_ref = b_ref.float( - ) if not b_ref.is_floating_point() else b_ref - mse = F.mse_loss(b, b_ref) - self.assertAlmostEqual(float(mse), 0) - - def test_accelerated_adam_cuda(self) -> None: - self.model.cuda() - self.model_ref.cuda() - optim = Adam(self.model.parameters(), self.lr, use_accelerated_op=True) - optim_ref = torch.optim.Adam(self.model_ref.parameters(), self.lr) - for xs, ys in self.loader: - xs = xs.cuda() - ys = ys.cuda() - pred = self.model(xs) - pred_ref = self.model_ref(xs) - loss = F.cross_entropy(pred, ys) - loss_ref = F.cross_entropy(pred_ref, ys) - optim.zero_grad() - loss.backward() - optim.step() - optim_ref.zero_grad() - loss_ref.backward() - optim_ref.step() - - with torch.no_grad(): - for p, p_ref in zip(self.model.parameters(), - self.model_ref.parameters()): - mse = F.mse_loss(p, p_ref) - self.assertAlmostEqual(float(mse), 0) - for b, b_ref in zip(self.model.buffers(), - self.model_ref.buffers()): - b = b.float() if not b.is_floating_point() else b - b_ref = b_ref.float( - ) if not b_ref.is_floating_point() else b_ref - mse = F.mse_loss(b, b_ref) - self.assertAlmostEqual(float(mse), 0) - - def test_rmsprop(self) -> None: - optim = RMSProp(self.model.parameters(), self.lr, - decay=0.99) # pytorch uses 0.99 as the default value - optim_ref = torch.optim.RMSprop(self.model_ref.parameters(), self.lr) - for xs, ys in self.loader: - pred = self.model(xs) - pred_ref = self.model_ref(xs) - loss = F.cross_entropy(pred, ys) - loss_ref = F.cross_entropy(pred_ref, ys) - optim.zero_grad() - loss.backward() - optim.step() - optim_ref.zero_grad() - loss_ref.backward() - optim_ref.step() - - with torch.no_grad(): - for p, p_ref in zip(self.model.parameters(), - self.model_ref.parameters()): - mse = F.mse_loss(p, p_ref) - self.assertAlmostEqual( - float(mse), 0, delta=1e-4 - ) # Optax and pytorch have different implementation - for b, b_ref in zip(self.model.buffers(), - self.model_ref.buffers()): - b = b.float() if not b.is_floating_point() else b - b_ref = b_ref.float( - ) if not b_ref.is_floating_point() else b_ref - mse = F.mse_loss(b, b_ref) - self.assertAlmostEqual(float(mse), 0) + + @classmethod + def setUpClass(cls): + torch.manual_seed(0) + cls.model = models.resnet18() + cls.model_ref = copy.deepcopy(cls.model) + cls.model_backup = copy.deepcopy(cls.model) + + cls.batch_size = 2 + cls.dataset = data.TensorDataset( + torch.randn(2, 3, 224, 224), torch.randint(0, 1000, (2,)) + ) + cls.loader = data.DataLoader(cls.dataset, cls.batch_size, False) + + cls.lr = 1e-3 + + def setUp(self) -> None: + torch.manual_seed(0) + self.model = copy.deepcopy(self.model_backup) + self.model_ref = copy.deepcopy(self.model_backup) + + def test_sgd(self) -> None: + optim = SGD(self.model.parameters(), self.lr) + optim_ref = torch.optim.SGD(self.model_ref.parameters(), self.lr) + for xs, ys in self.loader: + pred = self.model(xs) + pred_ref = self.model_ref(xs) + loss = F.cross_entropy(pred, ys) + loss_ref = F.cross_entropy(pred_ref, ys) + optim.zero_grad() + loss.backward() + optim.step() + optim_ref.zero_grad() + loss_ref.backward() + optim_ref.step() + + with torch.no_grad(): + for p, p_ref in zip( + self.model.parameters(), self.model_ref.parameters() + ): + mse = F.mse_loss(p, p_ref) + self.assertAlmostEqual(float(mse), 0) + for b, b_ref in zip(self.model.buffers(), self.model_ref.buffers()): + b = b.float() if not b.is_floating_point() else b + b_ref = b_ref.float() if not b_ref.is_floating_point() else b_ref + mse = F.mse_loss(b, b_ref) + self.assertAlmostEqual(float(mse), 0) + + def test_adam(self) -> None: + optim = Adam(self.model.parameters(), self.lr) + optim_ref = torch.optim.Adam(self.model_ref.parameters(), self.lr) + for xs, ys in self.loader: + pred = self.model(xs) + pred_ref = self.model_ref(xs) + loss = F.cross_entropy(pred, ys) + loss_ref = F.cross_entropy(pred_ref, ys) + optim.zero_grad() + loss.backward() + optim.step() + optim_ref.zero_grad() + loss_ref.backward() + optim_ref.step() + + with torch.no_grad(): + for p, p_ref in zip( + self.model.parameters(), self.model_ref.parameters() + ): + mse = F.mse_loss(p, p_ref) + self.assertAlmostEqual(float(mse), 0) + for b, b_ref in zip(self.model.buffers(), self.model_ref.buffers()): + b = b.float() if not b.is_floating_point() else b + b_ref = b_ref.float() if not b_ref.is_floating_point() else b_ref + mse = F.mse_loss(b, b_ref) + self.assertAlmostEqual(float(mse), 0) + + def test_accelerated_adam_cpu(self) -> None: + self.model + self.model_ref + optim = Adam(self.model.parameters(), self.lr, use_accelerated_op=True) + optim_ref = torch.optim.Adam(self.model_ref.parameters(), self.lr) + for xs, ys in self.loader: + xs = xs + ys = ys + pred = self.model(xs) + pred_ref = self.model_ref(xs) + loss = F.cross_entropy(pred, ys) + loss_ref = F.cross_entropy(pred_ref, ys) + optim.zero_grad() + loss.backward() + optim.step() + optim_ref.zero_grad() + loss_ref.backward() + optim_ref.step() + + with torch.no_grad(): + for p, p_ref in zip( + self.model.parameters(), self.model_ref.parameters() + ): + mse = F.mse_loss(p, p_ref) + self.assertAlmostEqual(float(mse), 0) + for b, b_ref in zip(self.model.buffers(), self.model_ref.buffers()): + b = b.float() if not b.is_floating_point() else b + b_ref = b_ref.float() if not b_ref.is_floating_point() else b_ref + mse = F.mse_loss(b, b_ref) + self.assertAlmostEqual(float(mse), 0) + + def test_accelerated_adam_cuda(self) -> None: + self.model.cuda() + self.model_ref.cuda() + optim = Adam(self.model.parameters(), self.lr, use_accelerated_op=True) + optim_ref = torch.optim.Adam(self.model_ref.parameters(), self.lr) + for xs, ys in self.loader: + xs = xs.cuda() + ys = ys.cuda() + pred = self.model(xs) + pred_ref = self.model_ref(xs) + loss = F.cross_entropy(pred, ys) + loss_ref = F.cross_entropy(pred_ref, ys) + optim.zero_grad() + loss.backward() + optim.step() + optim_ref.zero_grad() + loss_ref.backward() + optim_ref.step() + + with torch.no_grad(): + for p, p_ref in zip( + self.model.parameters(), self.model_ref.parameters() + ): + mse = F.mse_loss(p, p_ref) + self.assertAlmostEqual(float(mse), 0) + for b, b_ref in zip(self.model.buffers(), self.model_ref.buffers()): + b = b.float() if not b.is_floating_point() else b + b_ref = b_ref.float() if not b_ref.is_floating_point() else b_ref + mse = F.mse_loss(b, b_ref) + self.assertAlmostEqual(float(mse), 0) + + def test_rmsprop(self) -> None: + optim = RMSProp( + self.model.parameters(), self.lr, decay=0.99 + ) # pytorch uses 0.99 as the default value + optim_ref = torch.optim.RMSprop(self.model_ref.parameters(), self.lr) + for xs, ys in self.loader: + pred = self.model(xs) + pred_ref = self.model_ref(xs) + loss = F.cross_entropy(pred, ys) + loss_ref = F.cross_entropy(pred_ref, ys) + optim.zero_grad() + loss.backward() + optim.step() + optim_ref.zero_grad() + loss_ref.backward() + optim_ref.step() + + with torch.no_grad(): + for p, p_ref in zip( + self.model.parameters(), self.model_ref.parameters() + ): + mse = F.mse_loss(p, p_ref) + self.assertAlmostEqual( + float(mse), 0, delta=1e-4 + ) # Optax and pytorch have different implementation + for b, b_ref in zip(self.model.buffers(), self.model_ref.buffers()): + b = b.float() if not b.is_floating_point() else b + b_ref = b_ref.float() if not b_ref.is_floating_point() else b_ref + mse = F.mse_loss(b, b_ref) + self.assertAlmostEqual(float(mse), 0) if __name__ == '__main__': - unittest.main() + unittest.main() diff --git a/tests/unit/low_level/test_low_level_inplace.py b/tests/unit/low_level/test_low_level_inplace.py index de9d9861..e42209c5 100644 --- a/tests/unit/low_level/test_low_level_inplace.py +++ b/tests/unit/low_level/test_low_level_inplace.py @@ -27,188 +27,181 @@ class LowLevelInplace(unittest.TestCase): - @classmethod - def setUpClass(cls): - torch.manual_seed(0) - cls.model = models.resnet18() - cls.model_ref = copy.deepcopy(cls.model) - cls.model_backup = copy.deepcopy(cls.model) - - cls.batch_size = 2 - cls.dataset = data.TensorDataset(torch.randn(2, 3, 224, 224), - torch.randint(0, 1000, (2, ))) - cls.loader = data.DataLoader(cls.dataset, cls.batch_size, False) - - cls.lr = 1e-3 - - def setUp(self) -> None: - torch.manual_seed(0) - self.model = copy.deepcopy(self.model_backup) - self.model_ref = copy.deepcopy(self.model_backup) - - def test_sgd(self) -> None: - fun, params, buffers = functorch.make_functional_with_buffers( - self.model) - optim = sgd(self.lr) - optim_state = optim.init(params) - optim_ref = torch.optim.SGD(self.model_ref.parameters(), self.lr) - - for xs, ys in self.loader: - pred = fun(params, buffers, xs) - pred_ref = self.model_ref(xs) - loss = F.cross_entropy(pred, ys) - loss_ref = F.cross_entropy(pred_ref, ys) - - grad = torch.autograd.grad(loss, params) - updates, optim_state = optim.update(grad, optim_state) - params = TorchOpt.apply_updates(params, updates) - - optim_ref.zero_grad() - loss_ref.backward() - optim_ref.step() - - with torch.no_grad(): - for p, p_ref in zip(params, self.model_ref.parameters()): - mse = F.mse_loss(p, p_ref) - self.assertAlmostEqual(float(mse), 0) - for b, b_ref in zip(buffers, self.model_ref.buffers()): - b = b.float() if not b.is_floating_point() else b - b_ref = b_ref.float( - ) if not b_ref.is_floating_point() else b_ref - mse = F.mse_loss(b, b_ref) - self.assertAlmostEqual(float(mse), 0) - - def test_adam(self) -> None: - fun, params, buffers = functorch.make_functional_with_buffers( - self.model) - optim = adam(self.lr) - optim_state = optim.init(params) - optim_ref = torch.optim.Adam(self.model_ref.parameters(), self.lr) - for xs, ys in self.loader: - pred = fun(params, buffers, xs) - pred_ref = self.model_ref(xs) - loss = F.cross_entropy(pred, ys) - loss_ref = F.cross_entropy(pred_ref, ys) - - grad = torch.autograd.grad(loss, params) - updates, optim_state = optim.update(grad, optim_state) - params = TorchOpt.apply_updates(params, updates) - - optim_ref.zero_grad() - loss_ref.backward() - optim_ref.step() - with torch.no_grad(): - for p, p_ref in zip(params, self.model_ref.parameters()): - mse = F.mse_loss(p, p_ref) - self.assertAlmostEqual(float(mse), 0) - for b, b_ref in zip(buffers, self.model_ref.buffers()): - b = b.float() if not b.is_floating_point() else b - b_ref = b_ref.float( - ) if not b_ref.is_floating_point() else b_ref - mse = F.mse_loss(b, b_ref) - self.assertAlmostEqual(float(mse), 0) - - def test_accelerated_adam_cpu(self) -> None: - self.model - self.model_ref - fun, params, buffers = functorch.make_functional_with_buffers( - self.model) - optim = adam(self.lr, use_accelerated_op=True) - optim_state = optim.init(params) - optim_ref = torch.optim.Adam(self.model_ref.parameters(), self.lr) - for xs, ys in self.loader: - xs = xs - ys = ys - pred = fun(params, buffers, xs) - pred_ref = self.model_ref(xs) - loss = F.cross_entropy(pred, ys) - loss_ref = F.cross_entropy(pred_ref, ys) - - grad = torch.autograd.grad(loss, params) - updates, optim_state = optim.update(grad, optim_state) - params = TorchOpt.apply_updates(params, updates) - - optim_ref.zero_grad() - loss_ref.backward() - optim_ref.step() - with torch.no_grad(): - for p, p_ref in zip(params, self.model_ref.parameters()): - mse = F.mse_loss(p, p_ref) - self.assertAlmostEqual(float(mse), 0) - for b, b_ref in zip(buffers, self.model_ref.buffers()): - b = b.float() if not b.is_floating_point() else b - b_ref = b_ref.float( - ) if not b_ref.is_floating_point() else b_ref - mse = F.mse_loss(b, b_ref) - self.assertAlmostEqual(float(mse), 0) - - def test_accelerated_adam_cuda(self) -> None: - self.model.cuda() - self.model_ref.cuda() - fun, params, buffers = functorch.make_functional_with_buffers( - self.model) - optim = adam(self.lr, use_accelerated_op=True) - optim_state = optim.init(params) - optim_ref = torch.optim.Adam(self.model_ref.parameters(), self.lr) - for xs, ys in self.loader: - xs = xs.cuda() - ys = ys.cuda() - pred = fun(params, buffers, xs) - pred_ref = self.model_ref(xs) - loss = F.cross_entropy(pred, ys) - loss_ref = F.cross_entropy(pred_ref, ys) - - grad = torch.autograd.grad(loss, params) - updates, optim_state = optim.update(grad, optim_state) - params = TorchOpt.apply_updates(params, updates) - - optim_ref.zero_grad() - loss_ref.backward() - optim_ref.step() - with torch.no_grad(): - for p, p_ref in zip(params, self.model_ref.parameters()): - mse = F.mse_loss(p, p_ref) - self.assertAlmostEqual(float(mse), 0) - for b, b_ref in zip(buffers, self.model_ref.buffers()): - b = b.float() if not b.is_floating_point() else b - b_ref = b_ref.float( - ) if not b_ref.is_floating_point() else b_ref - mse = F.mse_loss(b, b_ref) - self.assertAlmostEqual(float(mse), 0) - - def test_rmsprop(self) -> None: - fun, params, buffers = functorch.make_functional_with_buffers( - self.model) - optim = rmsprop(self.lr, - decay=0.99) # pytorch uses 0.99 as the default value - optim_state = optim.init(params) - optim_ref = torch.optim.RMSprop(self.model_ref.parameters(), self.lr) - for xs, ys in self.loader: - pred = fun(params, buffers, xs) - pred_ref = self.model_ref(xs) - loss = F.cross_entropy(pred, ys) - loss_ref = F.cross_entropy(pred_ref, ys) - - grad = torch.autograd.grad(loss, params) - updates, optim_state = optim.update(grad, optim_state) - params = TorchOpt.apply_updates(params, updates) - - optim_ref.zero_grad() - loss_ref.backward() - optim_ref.step() - with torch.no_grad(): - for p, p_ref in zip(params, self.model_ref.parameters()): - mse = F.mse_loss(p, p_ref) - self.assertAlmostEqual( - float(mse), 0, delta=1e-4 - ) # Optax and pytorch have different implementation - for b, b_ref in zip(buffers, self.model_ref.buffers()): - b = b.float() if not b.is_floating_point() else b - b_ref = b_ref.float( - ) if not b_ref.is_floating_point() else b_ref - mse = F.mse_loss(b, b_ref) - self.assertAlmostEqual(float(mse), 0) + + @classmethod + def setUpClass(cls): + torch.manual_seed(0) + cls.model = models.resnet18() + cls.model_ref = copy.deepcopy(cls.model) + cls.model_backup = copy.deepcopy(cls.model) + + cls.batch_size = 2 + cls.dataset = data.TensorDataset( + torch.randn(2, 3, 224, 224), torch.randint(0, 1000, (2,)) + ) + cls.loader = data.DataLoader(cls.dataset, cls.batch_size, False) + + cls.lr = 1e-3 + + def setUp(self) -> None: + torch.manual_seed(0) + self.model = copy.deepcopy(self.model_backup) + self.model_ref = copy.deepcopy(self.model_backup) + + def test_sgd(self) -> None: + fun, params, buffers = functorch.make_functional_with_buffers(self.model) + optim = sgd(self.lr) + optim_state = optim.init(params) + optim_ref = torch.optim.SGD(self.model_ref.parameters(), self.lr) + + for xs, ys in self.loader: + pred = fun(params, buffers, xs) + pred_ref = self.model_ref(xs) + loss = F.cross_entropy(pred, ys) + loss_ref = F.cross_entropy(pred_ref, ys) + + grad = torch.autograd.grad(loss, params) + updates, optim_state = optim.update(grad, optim_state) + params = TorchOpt.apply_updates(params, updates) + + optim_ref.zero_grad() + loss_ref.backward() + optim_ref.step() + + with torch.no_grad(): + for p, p_ref in zip(params, self.model_ref.parameters()): + mse = F.mse_loss(p, p_ref) + self.assertAlmostEqual(float(mse), 0) + for b, b_ref in zip(buffers, self.model_ref.buffers()): + b = b.float() if not b.is_floating_point() else b + b_ref = b_ref.float() if not b_ref.is_floating_point() else b_ref + mse = F.mse_loss(b, b_ref) + self.assertAlmostEqual(float(mse), 0) + + def test_adam(self) -> None: + fun, params, buffers = functorch.make_functional_with_buffers(self.model) + optim = adam(self.lr) + optim_state = optim.init(params) + optim_ref = torch.optim.Adam(self.model_ref.parameters(), self.lr) + for xs, ys in self.loader: + pred = fun(params, buffers, xs) + pred_ref = self.model_ref(xs) + loss = F.cross_entropy(pred, ys) + loss_ref = F.cross_entropy(pred_ref, ys) + + grad = torch.autograd.grad(loss, params) + updates, optim_state = optim.update(grad, optim_state) + params = TorchOpt.apply_updates(params, updates) + + optim_ref.zero_grad() + loss_ref.backward() + optim_ref.step() + with torch.no_grad(): + for p, p_ref in zip(params, self.model_ref.parameters()): + mse = F.mse_loss(p, p_ref) + self.assertAlmostEqual(float(mse), 0) + for b, b_ref in zip(buffers, self.model_ref.buffers()): + b = b.float() if not b.is_floating_point() else b + b_ref = b_ref.float() if not b_ref.is_floating_point() else b_ref + mse = F.mse_loss(b, b_ref) + self.assertAlmostEqual(float(mse), 0) + + def test_accelerated_adam_cpu(self) -> None: + self.model + self.model_ref + fun, params, buffers = functorch.make_functional_with_buffers(self.model) + optim = adam(self.lr, use_accelerated_op=True) + optim_state = optim.init(params) + optim_ref = torch.optim.Adam(self.model_ref.parameters(), self.lr) + for xs, ys in self.loader: + xs = xs + ys = ys + pred = fun(params, buffers, xs) + pred_ref = self.model_ref(xs) + loss = F.cross_entropy(pred, ys) + loss_ref = F.cross_entropy(pred_ref, ys) + + grad = torch.autograd.grad(loss, params) + updates, optim_state = optim.update(grad, optim_state) + params = TorchOpt.apply_updates(params, updates) + + optim_ref.zero_grad() + loss_ref.backward() + optim_ref.step() + with torch.no_grad(): + for p, p_ref in zip(params, self.model_ref.parameters()): + mse = F.mse_loss(p, p_ref) + self.assertAlmostEqual(float(mse), 0) + for b, b_ref in zip(buffers, self.model_ref.buffers()): + b = b.float() if not b.is_floating_point() else b + b_ref = b_ref.float() if not b_ref.is_floating_point() else b_ref + mse = F.mse_loss(b, b_ref) + self.assertAlmostEqual(float(mse), 0) + + def test_accelerated_adam_cuda(self) -> None: + self.model.cuda() + self.model_ref.cuda() + fun, params, buffers = functorch.make_functional_with_buffers(self.model) + optim = adam(self.lr, use_accelerated_op=True) + optim_state = optim.init(params) + optim_ref = torch.optim.Adam(self.model_ref.parameters(), self.lr) + for xs, ys in self.loader: + xs = xs.cuda() + ys = ys.cuda() + pred = fun(params, buffers, xs) + pred_ref = self.model_ref(xs) + loss = F.cross_entropy(pred, ys) + loss_ref = F.cross_entropy(pred_ref, ys) + + grad = torch.autograd.grad(loss, params) + updates, optim_state = optim.update(grad, optim_state) + params = TorchOpt.apply_updates(params, updates) + + optim_ref.zero_grad() + loss_ref.backward() + optim_ref.step() + with torch.no_grad(): + for p, p_ref in zip(params, self.model_ref.parameters()): + mse = F.mse_loss(p, p_ref) + self.assertAlmostEqual(float(mse), 0) + for b, b_ref in zip(buffers, self.model_ref.buffers()): + b = b.float() if not b.is_floating_point() else b + b_ref = b_ref.float() if not b_ref.is_floating_point() else b_ref + mse = F.mse_loss(b, b_ref) + self.assertAlmostEqual(float(mse), 0) + + def test_rmsprop(self) -> None: + fun, params, buffers = functorch.make_functional_with_buffers(self.model) + optim = rmsprop( + self.lr, decay=0.99 + ) # pytorch uses 0.99 as the default value + optim_state = optim.init(params) + optim_ref = torch.optim.RMSprop(self.model_ref.parameters(), self.lr) + for xs, ys in self.loader: + pred = fun(params, buffers, xs) + pred_ref = self.model_ref(xs) + loss = F.cross_entropy(pred, ys) + loss_ref = F.cross_entropy(pred_ref, ys) + + grad = torch.autograd.grad(loss, params) + updates, optim_state = optim.update(grad, optim_state) + params = TorchOpt.apply_updates(params, updates) + + optim_ref.zero_grad() + loss_ref.backward() + optim_ref.step() + with torch.no_grad(): + for p, p_ref in zip(params, self.model_ref.parameters()): + mse = F.mse_loss(p, p_ref) + self.assertAlmostEqual( + float(mse), 0, delta=1e-4 + ) # Optax and pytorch have different implementation + for b, b_ref in zip(buffers, self.model_ref.buffers()): + b = b.float() if not b.is_floating_point() else b + b_ref = b_ref.float() if not b_ref.is_floating_point() else b_ref + mse = F.mse_loss(b, b_ref) + self.assertAlmostEqual(float(mse), 0) if __name__ == '__main__': - unittest.main() + unittest.main() diff --git a/tests/unit/test_clip.py b/tests/unit/test_clip.py index b66c6f9b..c129db6e 100644 --- a/tests/unit/test_clip.py +++ b/tests/unit/test_clip.py @@ -27,59 +27,59 @@ class HighLevelInplace(unittest.TestCase): - @classmethod - def setUpClass(cls): - torch.manual_seed(0) - cls.model = models.resnet18() - cls.model_backup = copy.deepcopy(cls.model) - cls.model_ref = copy.deepcopy(cls.model) - cls.batch_size = 2 - cls.dataset = data.TensorDataset(torch.randn(2, 3, 224, 224), - torch.randint(0, 1000, (2, ))) - cls.loader = data.DataLoader(cls.dataset, cls.batch_size, False) + @classmethod + def setUpClass(cls): + torch.manual_seed(0) + cls.model = models.resnet18() + cls.model_backup = copy.deepcopy(cls.model) + cls.model_ref = copy.deepcopy(cls.model) - cls.lr = 1e0 - cls.max_norm = 10. + cls.batch_size = 2 + cls.dataset = data.TensorDataset( + torch.randn(2, 3, 224, 224), torch.randint(0, 1000, (2,)) + ) + cls.loader = data.DataLoader(cls.dataset, cls.batch_size, False) - def setUp(self) -> None: - torch.manual_seed(0) - self.model = copy.deepcopy(self.model_backup) - self.model_ref = copy.deepcopy(self.model_backup) + cls.lr = 1e0 + cls.max_norm = 10. - def test_sgd(self) -> None: - chain = TorchOpt.combine.chain( - TorchOpt.clip.clip_grad_norm(max_norm=self.max_norm), - sgd(lr=self.lr)) - optim = Optimizer(self.model.parameters(), chain) - optim_ref = torch.optim.SGD(self.model_ref.parameters(), self.lr) - for xs, ys in self.loader: - pred = self.model(xs) - pred_ref = self.model_ref(xs) - loss = F.cross_entropy(pred, ys) - loss_ref = F.cross_entropy(pred_ref, ys) - optim.zero_grad() - loss.backward() - optim.step() - optim_ref.zero_grad() - loss_ref.backward() - clip_grad_norm_(self.model_ref.parameters(), - max_norm=self.max_norm) - optim_ref.step() + def setUp(self) -> None: + torch.manual_seed(0) + self.model = copy.deepcopy(self.model_backup) + self.model_ref = copy.deepcopy(self.model_backup) - with torch.no_grad(): - for p, p_ref in zip(self.model.parameters(), - self.model_ref.parameters()): - mse = F.mse_loss(p, p_ref) - self.assertAlmostEqual(float(mse), 0) - for b, b_ref in zip(self.model.buffers(), - self.model_ref.buffers()): - b = b.float() if not b.is_floating_point() else b - b_ref = b_ref.float( - ) if not b_ref.is_floating_point() else b_ref - mse = F.mse_loss(b, b_ref) - self.assertAlmostEqual(float(mse), 0) + def test_sgd(self) -> None: + chain = TorchOpt.combine.chain( + TorchOpt.clip.clip_grad_norm(max_norm=self.max_norm), sgd(lr=self.lr) + ) + optim = Optimizer(self.model.parameters(), chain) + optim_ref = torch.optim.SGD(self.model_ref.parameters(), self.lr) + for xs, ys in self.loader: + pred = self.model(xs) + pred_ref = self.model_ref(xs) + loss = F.cross_entropy(pred, ys) + loss_ref = F.cross_entropy(pred_ref, ys) + optim.zero_grad() + loss.backward() + optim.step() + optim_ref.zero_grad() + loss_ref.backward() + clip_grad_norm_(self.model_ref.parameters(), max_norm=self.max_norm) + optim_ref.step() + + with torch.no_grad(): + for p, p_ref in zip( + self.model.parameters(), self.model_ref.parameters() + ): + mse = F.mse_loss(p, p_ref) + self.assertAlmostEqual(float(mse), 0) + for b, b_ref in zip(self.model.buffers(), self.model_ref.buffers()): + b = b.float() if not b.is_floating_point() else b + b_ref = b_ref.float() if not b_ref.is_floating_point() else b_ref + mse = F.mse_loss(b, b_ref) + self.assertAlmostEqual(float(mse), 0) if __name__ == '__main__': - unittest.main() + unittest.main() diff --git a/tests/unit/test_schedule.py b/tests/unit/test_schedule.py index 0143cb7f..1e8f2831 100644 --- a/tests/unit/test_schedule.py +++ b/tests/unit/test_schedule.py @@ -19,29 +19,31 @@ class TestSchedule(unittest.TestCase): - @classmethod - def setUpClass(cls): - cls.init_value = 1. - cls.end_value = 0. - cls.gap_value = cls.init_value - cls.end_value - cls.transition_steps = 10 - cls.transition_begin = 1 - - def setUp(self) -> None: - pass - - def test_linear(self) -> None: - schedule = TorchOpt.schedule.linear_schedule( - init_value=self.init_value, - end_value=self.end_value, - transition_steps=self.transition_steps, - transition_begin=self.transition_begin) - for i in range(self.transition_begin, self.transition_steps): - lr = schedule(i) - lr_gt = self.init_value - self.gap_value * \ - (i - self.transition_begin) / self.transition_steps - self.assertEqual(lr, lr_gt) + + @classmethod + def setUpClass(cls): + cls.init_value = 1. + cls.end_value = 0. + cls.gap_value = cls.init_value - cls.end_value + cls.transition_steps = 10 + cls.transition_begin = 1 + + def setUp(self) -> None: + pass + + def test_linear(self) -> None: + schedule = TorchOpt.schedule.linear_schedule( + init_value=self.init_value, + end_value=self.end_value, + transition_steps=self.transition_steps, + transition_begin=self.transition_begin + ) + for i in range(self.transition_begin, self.transition_steps): + lr = schedule(i) + lr_gt = self.init_value - self.gap_value * \ + (i - self.transition_begin) / self.transition_steps + self.assertEqual(lr, lr_gt) if __name__ == '__main__': - unittest.main() + unittest.main() From 26177470b37f39e6e86bfca99e664484b61ace53 Mon Sep 17 00:00:00 2001 From: Xuehai Pan Date: Fri, 1 Jul 2022 22:53:22 +0800 Subject: [PATCH 04/19] feat(CMakeLists.txt): custom Python executable path (#18) --- CMakeLists.txt | 91 ++++++++++++++++++++++++++++++++++++++------------ 1 file changed, 70 insertions(+), 21 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index 3b0f3229..03344847 100755 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -39,59 +39,108 @@ set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wall -pthread -fPIC -fopenmp") set(CMAKE_CXX_FLAGS_RELEASE "${CMAKE_CXX_FLAGS_RELEASE} -O3") set(CMAKE_CUDA_FLAGS_RELEASE "${CMAKE_CUDA_FLAGS_RELEASE} -O3") +function(system) + set(options STRIP) + set(oneValueArgs OUTPUT_VARIABLE ERROR_VARIABLE WORKING_DIRECTORY) + set(multiValueArgs COMMAND) + cmake_parse_arguments(SYSTEM + "${options}" + "${oneValueArgs}" + "${multiValueArgs}" + "${ARGN}") + + if (NOT DEFINED SYSTEM_WORKING_DIRECTORY) + set(SYSTEM_WORKING_DIRECTORY "${PROJECT_SOURCE_DIR}") + endif() + + execute_process( + COMMAND ${SYSTEM_COMMAND} + OUTPUT_VARIABLE STDOUT + ERROR_VARIABLE STDERR + WORKING_DIRECTORY "${SYSTEM_WORKING_DIRECTORY}" + ) + if("${SYSTEM_STRIP}") + string(STRIP "${STDOUT}" STDOUT) + string(STRIP "${STDERR}" STDERR) + endif() + + set("${SYSTEM_OUTPUT_VARIABLE}" "${STDOUT}" PARENT_SCOPE) + if (DEFINED SYSTEM_ERROR_VARIABLE) + set("${SYSTEM_ERROR_VARIABLE}" "${STDERR}" PARENT_SCOPE) + endif() +endfunction() + +if (NOT DEFINED PYTHON_EXECUTABLE) + set(PYTHON_EXECUTABLE python3) +endif() + +system( + STRIP OUTPUT_VARIABLE PYTHON_EXECUTABLE + COMMAND which "${PYTHON_EXECUTABLE}" +) + +system( + STRIP OUTPUT_VARIABLE PYTHON_VERSION + COMMAND "${PYTHON_EXECUTABLE}" --version +) + +message("-- Use Python version: ${PYTHON_VERSION}") +message("-- Use Python executable: \"${PYTHON_EXECUTABLE}\"") + if(NOT DEFINED PYTHON_INCLUDE_DIR) message("-- Auto detecting Python include directory...") - execute_process ( - COMMAND python3 -c "import sys; import os; path = sys.path[2]; splited_path = path.split('/'); splited_path[-2] = 'include'; print(os.path.join('/', *splited_path))" - OUTPUT_VARIABLE PYTHON_INCLUDE_DIR) - string(STRIP ${PYTHON_INCLUDE_DIR} PYTHON_INCLUDE_DIR) + system( + STRIP OUTPUT_VARIABLE PYTHON_INCLUDE_DIR + COMMAND "${PYTHON_EXECUTABLE}" -c "print(__import__('sysconfig').get_path('include'))" + ) endif() if("${PYTHON_INCLUDE_DIR}" STREQUAL "") message(FATAL_ERROR "-- Python include directory not found") else() - message("-- Detected Python include directory: ${PYTHON_INCLUDE_DIR}") + message("-- Detected Python include directory: \"${PYTHON_INCLUDE_DIR}\"") include_directories(${PYTHON_INCLUDE_DIR}) endif() if(NOT DEFINED TORCH_INCLUDE_PATH) message("-- Auto detecting PyTorch include directory...") - execute_process ( - COMMAND python3 -c "from torch.utils import cpp_extension; print(cpp_extension.include_paths()[0], end='')" - OUTPUT_VARIABLE TORCH_INCLUDE_PATH) - string(STRIP ${TORCH_INCLUDE_PATH} TORCH_INCLUDE_PATH) + system( + STRIP OUTPUT_VARIABLE TORCH_INCLUDE_PATH + COMMAND "${PYTHON_EXECUTABLE}" -c "print(__import__('torch.utils.cpp_extension', fromlist=[None]).include_paths()[0])" + ) endif() if("${TORCH_INCLUDE_PATH}" STREQUAL "") message(FATAL_ERROR "-- Torch include directory not found") else() - message("-- Detected Torch include directory: ${TORCH_INCLUDE_PATH}") + message("-- Detected Torch include directory: \"${TORCH_INCLUDE_PATH}\"") include_directories(${TORCH_INCLUDE_PATH}) endif() - if(NOT DEFINED TORCH_LIBRARY_PATH) message("-- Auto detecting PyTorch library directory...") - execute_process ( - COMMAND python3 -c "from torch.utils import cpp_extension; print(cpp_extension.library_paths()[0], end='')" - OUTPUT_VARIABLE TORCH_LIBRARY_PATH) - string(STRIP ${TORCH_LIBRARY_PATH} TORCH_LIBRARY_PATH) + system( + STRIP OUTPUT_VARIABLE TORCH_LIBRARY_PATH + COMMAND "${PYTHON_EXECUTABLE}" -c "print(__import__('torch.utils.cpp_extension', fromlist=[None]).library_paths()[0])" + ) endif() if("${TORCH_LIBRARY_PATH}" STREQUAL "") message(FATAL_ERROR "-- Torch library directory not found") else() - message("-- Detected Torch library directory: ${TORCH_LIBRARY_PATH}") + message("-- Detected Torch library directory: \"${TORCH_LIBRARY_PATH}\"") endif() +unset(TORCH_LIBRARIES) +foreach(VAR_PATH ${TORCH_LIBRARY_PATH}) + file(GLOB TORCH_LIBRARY "${VAR_PATH}/*.so") + list(APPEND TORCH_LIBRARIES "${TORCH_LIBRARY}") +endforeach() +message("-- Detected Torch libraries: \"${TORCH_LIBRARIES}\"") + add_definitions(-D_GLIBCXX_USE_CXX11_ABI=0) add_subdirectory("third_party/pybind11") include_directories(include) -foreach(TMP_PATH ${TORCH_LIBRARY_PATH}) - file(GLOB TORCH_LIBRARY ${TMP_PATH}/*.so) - set(TORCH_LIBRARIES "${TORCH_LIBRARIES};${TORCH_LIBRARY};") -endforeach() - add_subdirectory(src) From b9e5710dabd03c90244062aa01efd2897a936915 Mon Sep 17 00:00:00 2001 From: Benjamin-eecs Date: Mon, 4 Jul 2022 15:48:02 +0800 Subject: [PATCH 05/19] fix(all): pass lint --- .github/workflows/lint.yml | 6 +-- Makefile | 2 +- TorchOpt/__init__.py | 1 + TorchOpt/_src/MetaOptimizer.py | 50 ++++++++++----------- TorchOpt/_src/Optimizer.py | 80 +++++++++++++++++++--------------- TorchOpt/_src/visual.py | 54 +++++++++++------------ docker/dev.dockerfile | 25 +++++++++++ setup.cfg | 1 - 8 files changed, 128 insertions(+), 91 deletions(-) create mode 100644 docker/dev.dockerfile diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index cd4a45b8..aeb9496c 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -36,9 +36,9 @@ jobs: - name: mypy run: | make mypy - - name: docstyle - run: | - make docstyle + # - name: docstyle + # run: | + # make docstyle # - name: spelling # run: | # make spelling \ No newline at end of file diff --git a/Makefile b/Makefile index 17dd1f53..fc0ade67 100644 --- a/Makefile +++ b/Makefile @@ -96,7 +96,7 @@ spelling: doc-install doc-clean: cd docs && make clean -lint: flake8 py-format clang-format cpplint mypy docstyle spelling +lint: flake8 py-format clang-format cpplint mypy format: py-format-install clang-format-install isort $(PYTHON_FILES) diff --git a/TorchOpt/__init__.py b/TorchOpt/__init__.py index 2fc37a35..f42bd7c6 100644 --- a/TorchOpt/__init__.py +++ b/TorchOpt/__init__.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== +"""TorchOpt: a high-performance optimizer library built upon PyTorch.""" from TorchOpt._src import ( accelerated_op_available, diff --git a/TorchOpt/_src/MetaOptimizer.py b/TorchOpt/_src/MetaOptimizer.py index bd0aea05..f4cbd045 100644 --- a/TorchOpt/_src/MetaOptimizer.py +++ b/TorchOpt/_src/MetaOptimizer.py @@ -45,13 +45,13 @@ def __init__(self, net: nn.Module, impl: base.GradientTransformation): def step(self, loss: torch.Tensor): """Compute the gradients of the loss to the network parameters and update network parameters. - Graph of the derivative will be constructed, allowing to compute higher order derivative products. - We use the differentiable optimizer (pass argument inplace=False) to scale the gradients and update - the network parameters without modifying tensors in-place. + Graph of the derivative will be constructed, allowing to compute higher order derivative products. + We use the differentiable optimizer (pass argument inplace=False) to scale the gradients and update + the network parameters without modifying tensors in-place. - Args: - loss (torch.Tensor): the loss that is used to compute the gradients to the network parameters. - """ + Args: + loss (torch.Tensor): the loss that is used to compute the gradients to the network parameters. + """ # step parameter only for idx, (state, param_containers) in enumerate( zip(self.state_groups, self.param_containers_groups) @@ -83,9 +83,9 @@ def add_param_group(self, net): def state_dict(self): """Extract the references of the optimizer states. - Note that the states are references, so any in-place operations will - change the states inside `MetaOptimizer` at the same time. - """ + Note that the states are references, so any in-place operations will + change the states inside `MetaOptimizer` at the same time. + """ out_groups = tuple(group for group in self.state_groups) return out_groups @@ -98,18 +98,18 @@ class MetaSGD(MetaOptimizer): def __init__( self, - net, + net: nn.Module, lr: ScalarOrSchedule, momentum: Union[float, None] = None, nesterov: bool = False, moment_requires_grad: bool = True ): + """The `init` function. + Args: + net (nn.Module): a network whose parameters should be optimized. + args: other arguments see `alias.sgd`, here we set `moment_requires_grad=True` + to make tensors like momentum be differentiable. """ - Args: - net (nn.Module): a network whose parameters should be optimized. - args: other arguments see `alias.sgd`, here we set `moment_requires_grad=True` - to make tensors like momentum be differentiable. - """ super().__init__( net, sgd( @@ -135,12 +135,12 @@ def __init__( moment_requires_grad: bool = True, use_accelerated_op: bool = False ): + """The `init` function. + Args: + net (nn.Module): a network whose parameters should be optimized. + args: other arguments see `alias.adam`, here we set `moment_requires_grad=True` + to make tensors like momentum be differentiable. """ - Args: - net (nn.Module): a network whose parameters should be optimized. - args: other arguments see `alias.adam`, here we set `moment_requires_grad=True` - to make tensors like momentum be differentiable. - """ super().__init__( net, adam( @@ -169,12 +169,12 @@ def __init__( momentum: Union[float, None] = None, nesterov: bool = False ): + """The `init` function. + Args: + net (nn.Module): a network whose parameters should be optimized. + args: other arguments see `alias.adam`, here we set `moment_requires_grad=True` + to make tensors like momentum be differentiable. """ - Args: - net (nn.Module): a network whose parameters should be optimized. - args: other arguments see `alias.adam`, here we set `moment_requires_grad=True` - to make tensors like momentum be differentiable. - """ super().__init__( net, rmsprop( diff --git a/TorchOpt/_src/Optimizer.py b/TorchOpt/_src/Optimizer.py index 780ae971..8544d3da 100644 --- a/TorchOpt/_src/Optimizer.py +++ b/TorchOpt/_src/Optimizer.py @@ -12,46 +12,47 @@ # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== -from typing import Union +from typing import Iterable, Union import jax import torch +from TorchOpt._src import base from TorchOpt._src.alias import adam, rmsprop, sgd from TorchOpt._src.pytypes import ScalarOrSchedule from TorchOpt._src.update import apply_updates class Optimizer(object): - """A high-level base class that has the similar with `torch.optim.Optimier`""" + """A high-level base class that has the similar with `torch.optim.Optimier`.""" - def __init__(self, params, impl): + def __init__(self, params: Iterable, impl: base.GradientTransformation): + """The `init` function. + + Args: + params (iterable): an iterable of `torch.Tensor`s. Specifies what Tensors should be optimized. + impl (base.GradientTransformation): a low level optimizer function, it could be + a optimizer function provided by `alias.py` or a customerized `chain` provided by + `combine.py`. Note that use `MetaOptimizer(sgd())` or `MetaOptimizer(chain(sgd())) + is equavalent to `SGD`. """ - Args: - params (iterable): an iterable of `torch.Tensor`s. Specifies what Tensors should be optimized. - impl (base.GradientTransformation): a low level optimizer function, it could be - a optimizer function provided by `alias.py` or a customerized `chain` provided by - `combine.py`. Note that use `MetaOptimizer(sgd())` or `MetaOptimizer(chain(sgd())) - is equavalent to `SGD`. - - """ if not isinstance(params, list): params = list(params) self.impl = impl - self.param_groups = [] - self.param_tree_groups = [] - self.state_groups = [] + self.param_groups = [] # type: ignore + self.param_tree_groups = [] # type: ignore + self.state_groups = [] # type: ignore self.add_param_group(params) def zero_grad(self, set_to_none: bool = False): """Sets the gradients of all optimized `torch.Tensor`s to zero. - The behivour is similar to `torch.optim.Optimizer.zero_grad`. + The behivour is similar to `torch.optim.Optimizer.zero_grad`. - Args: - set_to_none (bool): instead of setting to zero, set the grads to None. + Args: + set_to_none (bool): instead of setting to zero, set the grads to None. - """ + """ for group in self.param_groups: if set_to_none: @@ -73,20 +74,27 @@ def f(p): jax.tree_map(f, group) def state_dict(self): + """Returns the state of the optimizer.""" return self.state_groups def load_state_dict(self, state_dict): + """Loads the optimizer state. + + Args: + state_dict (dict): optimizer state. Should be an object returned + from a call to :meth:`state_dict`. + """ self.state_groups = state_dict def step(self, closure=None): """Performs a single optimization step (parameter update). - The behivour is similar to `torch.optim.Optimizer.step`. + The behivour is similar to `torch.optim.Optimizer.step`. - Args: - closure (callable, optional): A closure that reevaluates the model and returns the loss. + Args: + closure (callable, optional): A closure that reevaluates the model and returns the loss. - """ + """ loss = None if closure is not None: with torch.enable_grad(): @@ -121,11 +129,13 @@ def __init__( momentum: Union[float, None] = None, nesterov: bool = False ): + """The `init` function. + + Args: + params (iterable): an iterable of `torch.Tensor`s. Specifies what Tensors should be optimized. + args: other arguments see `alias.adam`. + """ - Args: - params (iterable): an iterable of `torch.Tensor`s. Specifies what Tensors should be optimized. - args: other arguments see `alias.adam`. - """ super().__init__( params, sgd( @@ -150,11 +160,12 @@ def __init__( eps_root: float = 0.0, use_accelerated_op: bool = False ): + """The `init` function. + + Args: + params (iterable): an iterable of `torch.Tensor`s. Specifies what Tensors should be optimized. + args: other arguments see `alias.sgd`. """ - Args: - params (iterable): an iterable of `torch.Tensor`s. Specifies what Tensors should be optimized. - args: other arguments see `alias.sgd`. - """ super().__init__( params, adam( @@ -183,11 +194,12 @@ def __init__( momentum: Union[float, None] = None, nesterov: bool = False ): + """The `init` function. + + Args: + params (iterable): an iterable of `torch.Tensor`s. Specifies what Tensors should be optimized. + args: other arguments see `alias.sgd`. """ - Args: - params (iterable): an iterable of `torch.Tensor`s. Specifies what Tensors should be optimized. - args: other arguments see `alias.sgd`. - """ super().__init__( params, rmsprop( diff --git a/TorchOpt/_src/visual.py b/TorchOpt/_src/visual.py index aabf1ca3..aa3e9702 100644 --- a/TorchOpt/_src/visual.py +++ b/TorchOpt/_src/visual.py @@ -54,8 +54,8 @@ def get_fn_name(fn, show_attrs, max_attr_chars): sep = "-" * max(col1width + col2width + 2, len(name)) attrstr = '%-' + str(col1width) + 's: %' + str(col2width) + 's' - def truncate(s): return s[:col2width - 3] + \ -"..." if len(s) > col2width else s + def truncate(s): + return s[:col2width - 3] + "..." if len(s) > col2width else s params = '\n'.join( attrstr % (k, truncate(str(v))) for (k, v) in attrs.items() @@ -67,29 +67,29 @@ def truncate(s): return s[:col2width - 3] + \ def make_dot( var, params=None, show_attrs=False, show_saved=False, max_attr_chars=50 ): - """ Produces Graphviz representation of PyTorch autograd graph. - - If a node represents a backward function, it is gray. Otherwise, the node - represents a tensor and is either blue, orange, or green: - - Blue: reachable leaf tensors that requires grad (tensors whose `.grad` - fields will be populated during `.backward()`) - - Orange: saved tensors of custom autograd functions as well as those - saved by built-in backward nodes - - Green: tensor passed in as outputs - - Dark green: if any output is a view, we represent its base tensor with - a dark green node. - - Args: - var: output tensor - params: [dict of (name, tensor) or state_dict] to add names to node that requires grad - show_attrs: whether to display non-tensor attributes of backward nodes - (Requires PyTorch version >= 1.9) - show_saved: whether to display saved tensor nodes that are not by custom - autograd functions. Saved tensor nodes for custom functions, if - present, are always displayed. (Requires PyTorch version >= 1.9) - max_attr_chars: if show_attrs is `True`, sets max number of characters - to display for any given attribute. - """ + """Produces Graphviz representation of PyTorch autograd graph. + + If a node represents a backward function, it is gray. Otherwise, the node + represents a tensor and is either blue, orange, or green: + - Blue: reachable leaf tensors that requires grad (tensors whose `.grad` + fields will be populated during `.backward()`) + - Orange: saved tensors of custom autograd functions as well as those + saved by built-in backward nodes + - Green: tensor passed in as outputs + - Dark green: if any output is a view, we represent its base tensor with + a dark green node. + + Args: + var: output tensor + params: [dict of (name, tensor) or state_dict] to add names to node that requires grad + show_attrs: whether to display non-tensor attributes of backward nodes + (Requires PyTorch version >= 1.9) + show_saved: whether to display saved tensor nodes that are not by custom + autograd functions. Saved tensor nodes for custom functions, if + present, are always displayed. (Requires PyTorch version >= 1.9) + max_attr_chars: if show_attrs is `True`, sets max number of characters + to display for any given attribute. + """ if LooseVersion(torch.__version__) < LooseVersion("1.9") and \ (show_attrs or show_saved): warnings.warn( @@ -226,8 +226,8 @@ def add_base_tensor(var, color='darkolivegreen1'): def resize_graph(dot, size_per_element=0.15, min_size=12): """Resize the graph according to how much content it contains. - Modify the graph in place. - """ + Modify the graph in place. + """ # Get the approximate number of nodes and edges num_rows = len(dot.body) content_size = num_rows * size_per_element diff --git a/docker/dev.dockerfile b/docker/dev.dockerfile new file mode 100644 index 00000000..6c86fee0 --- /dev/null +++ b/docker/dev.dockerfile @@ -0,0 +1,25 @@ +#!/bin/bash + +CPU_PARENT=ubuntu:18.04 +GPU_PARENT=nvidia/cuda:10.1-cudnn7-runtime-ubuntu18.04 + +TAG=metaopt/TorchOpt +VERSION=$(cat ./stable_baselines3/version.txt) + +if [[ ${USE_GPU} == "True" ]]; then + PARENT=${GPU_PARENT} + PYTORCH_DEPS="cudatoolkit=10.1" +else + PARENT=${CPU_PARENT} + PYTORCH_DEPS="cpuonly" + TAG="${TAG}-cpu" +fi + +echo "docker build --build-arg PARENT_IMAGE=${PARENT} --build-arg PYTORCH_DEPS=${PYTORCH_DEPS} -t ${TAG}:${VERSION} ." +docker build --build-arg PARENT_IMAGE=${PARENT} --build-arg PYTORCH_DEPS=${PYTORCH_DEPS} -t ${TAG}:${VERSION} . +docker tag ${TAG}:${VERSION} ${TAG}:latest + +if [[ ${RELEASE} == "True" ]]; then + docker push ${TAG}:${VERSION} + docker push ${TAG}:latest +fi diff --git a/setup.cfg b/setup.cfg index c1893e6b..52dc6283 100644 --- a/setup.cfg +++ b/setup.cfg @@ -38,6 +38,5 @@ warn_unreachable = True warn_unused_configs = True warn_unused_ignores = True - [doc8] max-line-length = 200 \ No newline at end of file From f385ad9111d3673b25dfae861730ba165cf43a89 Mon Sep 17 00:00:00 2001 From: Xuehai Pan Date: Sat, 2 Jul 2022 21:47:16 +0800 Subject: [PATCH 06/19] fix(CMakeLists.txt): fix building from source (#19) * fix(CMakeLists.txt): include all torch library and include paths --- CMakeLists.txt | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index 03344847..546a4f26 100755 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -76,12 +76,12 @@ endif() system( STRIP OUTPUT_VARIABLE PYTHON_EXECUTABLE - COMMAND which "${PYTHON_EXECUTABLE}" + COMMAND bash -c "type -P '${PYTHON_EXECUTABLE}'" ) system( STRIP OUTPUT_VARIABLE PYTHON_VERSION - COMMAND "${PYTHON_EXECUTABLE}" --version + COMMAND "${PYTHON_EXECUTABLE}" -c "print(__import__('platform').python_version())" ) message("-- Use Python version: ${PYTHON_VERSION}") @@ -106,7 +106,7 @@ if(NOT DEFINED TORCH_INCLUDE_PATH) message("-- Auto detecting PyTorch include directory...") system( STRIP OUTPUT_VARIABLE TORCH_INCLUDE_PATH - COMMAND "${PYTHON_EXECUTABLE}" -c "print(__import__('torch.utils.cpp_extension', fromlist=[None]).include_paths()[0])" + COMMAND "${PYTHON_EXECUTABLE}" -c "print('\\\;'.join(__import__('torch.utils.cpp_extension', fromlist=[None]).include_paths()))" ) endif() @@ -121,7 +121,7 @@ if(NOT DEFINED TORCH_LIBRARY_PATH) message("-- Auto detecting PyTorch library directory...") system( STRIP OUTPUT_VARIABLE TORCH_LIBRARY_PATH - COMMAND "${PYTHON_EXECUTABLE}" -c "print(__import__('torch.utils.cpp_extension', fromlist=[None]).library_paths()[0])" + COMMAND "${PYTHON_EXECUTABLE}" -c "print('\\\;'.join(__import__('torch.utils.cpp_extension', fromlist=[None]).library_paths()))" ) endif() @@ -140,6 +140,7 @@ message("-- Detected Torch libraries: \"${TORCH_LIBRARIES}\"") add_definitions(-D_GLIBCXX_USE_CXX11_ABI=0) +set(PYBIND11_PYTHON_VERSION "${PYTHON_VERSION}") add_subdirectory("third_party/pybind11") include_directories(include) From dceae5092e5c93a9f9bd5407fa63b299ff153410 Mon Sep 17 00:00:00 2001 From: Xuehai Pan Date: Mon, 4 Jul 2022 23:08:33 +0800 Subject: [PATCH 07/19] chore: rename package to follow PEP8 naming convention (#20) * chore: rename package to follow PEP8 naming convention * fix: fix building from source * chore: add .editorconfig * style: reindent code * chore: reorganize submodules to follow PEP8 naming convention * fix: fix type hints * style: resolve isort * style: format code * style: format code with yapf * fix(.github): fix issue template * fix(torchopt): delete pre-compiled .so Co-authored-by: Benjamin-eecs --- .editorconfig | 35 ++ .github/ISSUE_TEMPLATE/bug_report.md | 8 +- .github/PULL_REQUEST_TEMPLATE.md | 4 +- .gitignore | 6 +- CITATION.cff | 2 +- CMakeLists.txt | 2 +- Makefile | 8 +- README.md | 71 +-- TorchOpt/__init__.py | 64 --- TorchOpt/_lib/adam_op.py | 60 --- TorchOpt/_src/MetaOptimizer.py | 189 ------- TorchOpt/_src/Optimizer.py | 214 -------- .../_src/accelerated_op/adam_op/AdamOp.py | 120 ----- TorchOpt/_src/alias.py | 190 ------- TorchOpt/_src/base.py | 144 ----- TorchOpt/_src/clip.py | 90 ---- TorchOpt/_src/schedule.py | 101 ---- TorchOpt/_src/transform.py | 469 ---------------- TorchOpt/_src/utils.py | 190 ------- TorchOpt/_src/visual.py | 236 -------- docker/dev.dockerfile | 4 +- docs/conf.py | 14 +- docs/index.rst | 17 +- examples/L2R/README.md | 13 +- examples/L2R/helper/argument.py | 51 +- examples/L2R/helper/model.py | 71 ++- examples/L2R/helper/utils.py | 263 +++++---- examples/L2R/train_l2r.py | 435 +++++++-------- examples/LOLA/README.md | 12 +- examples/LOLA/helper/agent.py | 42 +- examples/LOLA/helper/argument.py | 42 +- examples/LOLA/helper/env.py | 120 ++--- examples/LOLA/helper/utils.py | 162 +++--- examples/LOLA/lola_dice.py | 175 +++--- examples/LOLA/visualise.py | 20 +- examples/MAML-RL/README.md | 12 +- examples/MAML-RL/helpers/Tabular_mdp.py | 170 +++--- examples/MAML-RL/helpers/__init__.py | 16 +- examples/MAML-RL/helpers/policy.py | 44 +- examples/MAML-RL/run_MAML.py | 293 +++++----- examples/MGRL/README.md | 8 +- examples/MGRL/toy.py | 120 ++--- examples/few-shot/README.md | 13 +- examples/few-shot/maml-omniglot.py | 399 +++++++------- examples/few-shot/support/omniglot_loaders.py | 510 +++++++++--------- examples/visualize.py | 108 ++-- include/adam_op/adam_op.h | 6 +- include/adam_op/adam_op_impl.cuh | 6 +- include/adam_op/adam_op_impl.h | 6 +- include/common.h | 2 +- include/utils.h | 4 +- setup.cfg | 11 +- setup.py | 190 ++++--- src/adam_op/CMakeLists.txt | 2 +- src/adam_op/adam_op.cpp | 18 +- src/adam_op/adam_op_impl.cpp | 9 +- src/adam_op/adam_op_impl.cu | 6 +- .../high_level/test_high_level_inplace.py | 326 ++++++----- .../unit/low_level/test_low_level_inplace.py | 349 ++++++------ tests/unit/test_clip.py | 95 ++-- tests/unit/test_schedule.py | 50 +- torchopt/__init__.py | 64 +++ {TorchOpt => torchopt}/_lib/__init__.py | 0 torchopt/_lib/adam_op.py | 57 ++ {TorchOpt => torchopt}/_src/__init__.py | 2 +- .../_src/accelerated_op/__init__.py | 28 +- .../_src/accelerated_op/adam_op/__init__.py | 2 +- .../_src/accelerated_op/adam_op/adam_op.py | 116 ++++ torchopt/_src/alias.py | 205 +++++++ torchopt/_src/base.py | 151 ++++++ torchopt/_src/clip.py | 88 +++ {TorchOpt => torchopt}/_src/combine.py | 53 +- {TorchOpt => torchopt}/_src/hook.py | 30 +- torchopt/_src/optimizer/__init__.py | 20 + torchopt/_src/optimizer/adam.py | 55 ++ torchopt/_src/optimizer/base.py | 127 +++++ torchopt/_src/optimizer/meta/__init__.py | 19 + torchopt/_src/optimizer/meta/adam.py | 56 ++ torchopt/_src/optimizer/meta/base.py | 94 ++++ torchopt/_src/optimizer/meta/rmsprop.py | 58 ++ torchopt/_src/optimizer/meta/sgd.py | 54 ++ torchopt/_src/optimizer/rmsprop.py | 58 ++ torchopt/_src/optimizer/sgd.py | 45 ++ torchopt/_src/schedule.py | 111 ++++ torchopt/_src/transform.py | 472 ++++++++++++++++ .../pytypes.py => torchopt/_src/typing.py | 0 {TorchOpt => torchopt}/_src/update.py | 61 +-- torchopt/_src/utils.py | 197 +++++++ torchopt/_src/visual.py | 238 ++++++++ tutorials/1_Functional_Optimizer.ipynb | 32 +- tutorials/2_Visualization.ipynb | 18 +- tutorials/3_Meta_Optimizer.ipynb | 72 +-- tutorials/4_Stop_Gradient.ipynb | 34 +- 93 files changed, 4555 insertions(+), 4479 deletions(-) create mode 100644 .editorconfig delete mode 100644 TorchOpt/__init__.py delete mode 100644 TorchOpt/_lib/adam_op.py delete mode 100644 TorchOpt/_src/MetaOptimizer.py delete mode 100644 TorchOpt/_src/Optimizer.py delete mode 100644 TorchOpt/_src/accelerated_op/adam_op/AdamOp.py delete mode 100644 TorchOpt/_src/alias.py delete mode 100644 TorchOpt/_src/base.py delete mode 100644 TorchOpt/_src/clip.py delete mode 100644 TorchOpt/_src/schedule.py delete mode 100644 TorchOpt/_src/transform.py delete mode 100644 TorchOpt/_src/utils.py delete mode 100644 TorchOpt/_src/visual.py create mode 100644 torchopt/__init__.py rename {TorchOpt => torchopt}/_lib/__init__.py (100%) create mode 100644 torchopt/_lib/adam_op.py rename {TorchOpt => torchopt}/_src/__init__.py (91%) rename {TorchOpt => torchopt}/_src/accelerated_op/__init__.py (61%) rename {TorchOpt => torchopt}/_src/accelerated_op/adam_op/__init__.py (91%) create mode 100644 torchopt/_src/accelerated_op/adam_op/adam_op.py create mode 100644 torchopt/_src/alias.py create mode 100644 torchopt/_src/base.py create mode 100644 torchopt/_src/clip.py rename {TorchOpt => torchopt}/_src/combine.py (58%) rename {TorchOpt => torchopt}/_src/hook.py (56%) create mode 100644 torchopt/_src/optimizer/__init__.py create mode 100644 torchopt/_src/optimizer/adam.py create mode 100644 torchopt/_src/optimizer/base.py create mode 100644 torchopt/_src/optimizer/meta/__init__.py create mode 100644 torchopt/_src/optimizer/meta/adam.py create mode 100644 torchopt/_src/optimizer/meta/base.py create mode 100644 torchopt/_src/optimizer/meta/rmsprop.py create mode 100644 torchopt/_src/optimizer/meta/sgd.py create mode 100644 torchopt/_src/optimizer/rmsprop.py create mode 100644 torchopt/_src/optimizer/sgd.py create mode 100644 torchopt/_src/schedule.py create mode 100644 torchopt/_src/transform.py rename TorchOpt/_src/pytypes.py => torchopt/_src/typing.py (100%) rename {TorchOpt => torchopt}/_src/update.py (54%) create mode 100644 torchopt/_src/utils.py create mode 100644 torchopt/_src/visual.py mode change 100755 => 100644 tutorials/1_Functional_Optimizer.ipynb mode change 100755 => 100644 tutorials/4_Stop_Gradient.ipynb diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 00000000..1ee2f625 --- /dev/null +++ b/.editorconfig @@ -0,0 +1,35 @@ +# https://editorconfig.org/ + +root = true + +[*] +charset = utf-8 +end_of_line = lf +indent_style = space +indent_size = 4 +trim_trailing_whitespace = true +insert_final_newline = true + +[*.py] +indent_size = 4 +src_paths=torchopt,tests,examples + +[*.md] +indent_size = 2 +x-soft-wrap-text = true + +[*.rst] +indent_size = 4 +x-soft-wrap-text = true + +[Makefile] +indent_style = tab + +[*.cpp] +indent_size = 2 + +[*.h] +indent_size = 2 + +[*.cuh?] +indent_size = 2 diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md index a74f7620..9520f2ee 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.md +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -20,7 +20,7 @@ Please try to provide a minimal example to reproduce the bug. Error messages and Please use the markdown code blocks for both code and stack traces. ```python -import metarl +import torchopt ``` ```bash @@ -43,8 +43,8 @@ Describe the characteristic of your environment: * Versions of any other relevant libraries ```python -import metarl, numpy, sys -print(metarl.__version__, numpy.__version__, sys.version, sys.platform) +import torchopt, numpy, sys +print(torchopt.__version__, numpy.__version__, sys.version, sys.platform) ``` ## Additional context @@ -58,5 +58,5 @@ If you know or suspect the reason for this bug, paste the code lines and suggest ## Checklist - [ ] I have checked that there is no similar issue in the repo (**required**) -- [ ] I have read the [documentation](https://metarl.readthedocs.io/) (**required**) +- [ ] I have read the [documentation](https://torchopt.readthedocs.io/) (**required**) - [ ] I have provided a minimal working example to reproduce the bug (**required**) diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index 064d15bc..b19443c7 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -8,7 +8,7 @@ Why is this change required? What problem does it solve? If it fixes an open issue, please link to the issue here. You can use the syntax `close #15213` if this solves the issue #15213 -- [ ] I have raised an issue to propose this change ([required](https://metarl.readthedocs.io/en/latest/pages/contributing.html) for new features and bug fixes) +- [ ] I have raised an issue to propose this change ([required](https://torchopt.readthedocs.io/en/latest/pages/contributing.html) for new features and bug fixes) ## Types of changes @@ -32,7 +32,7 @@ What types of changes does your code introduce? Put an `x` in all the boxes that Go over all the following points, and put an `x` in all the boxes that apply. If you are unsure about any of these, don't hesitate to ask. We are here to help! -- [ ] I have read the [CONTRIBUTION](https://metarl.readthedocs.io/en/latest/pages/contributing.html) guide (**required**) +- [ ] I have read the [CONTRIBUTION](https://torchopt.readthedocs.io/en/latest/pages/contributing.html) guide (**required**) - [ ] My change requires a change to the documentation. - [ ] I have updated the tests accordingly (*required for a bug fix or a new feature*). - [ ] I have updated the documentation accordingly. diff --git a/.gitignore b/.gitignore index 5a67f740..87e9b834 100644 --- a/.gitignore +++ b/.gitignore @@ -2,8 +2,8 @@ .idea build __pycache__ -TorchOpt/**/*.so -TorchOpt.egg-info +torchopt/**/*.so +torchopt.egg-info dist **/.ipynb_checkpoints/* @@ -152,4 +152,4 @@ dmypy.json .pytype/ # Cython debug symbols -cython_debug/ \ No newline at end of file +cython_debug/ diff --git a/CITATION.cff b/CITATION.cff index 5c239556..fdfacfc4 100644 --- a/CITATION.cff +++ b/CITATION.cff @@ -27,4 +27,4 @@ authors: version: 0.4.1 date-released: "2022-04-09" license: Apache-2.0 -repository-code: "https://github.com/metaopt/TorchOpt" +repository-code: "https://github.com/metaopt/torchopt" diff --git a/CMakeLists.txt b/CMakeLists.txt index 546a4f26..808d40c5 100755 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -14,7 +14,7 @@ # ============================================================================== cmake_minimum_required(VERSION 3.1) -project(TorchOpt LANGUAGES CXX CUDA) +project(torchopt LANGUAGES CXX CUDA) find_package(CUDA REQUIRED) diff --git a/Makefile b/Makefile index fc0ade67..6e07d1a1 100644 --- a/Makefile +++ b/Makefile @@ -1,9 +1,9 @@ print-% : ; @echo $* = $($*) SHELL = /bin/bash -PROJECT_NAME = TorchOpt +PROJECT_NAME = torchopt PROJECT_PATH = ${PROJECT_NAME}/ PROJECT_FOLDER = $(PROJECT_NAME) examples include src tests -PYTHON_FILES = $(shell find . -type f -name "*.py") +PYTHON_FILES = $(shell find examples torchopt tests -type f -name "*.py" -o -name "*.pyi") CPP_FILES = $(shell find . -type f -name "*.h" -o -name "*.cpp" -o -name "*.cuh" -o -name "*.cu") COMMIT_HASH = $(shell git log -1 --format=%h) COPYRIGHT = "MetaOPT Team. All Rights Reserved." @@ -66,7 +66,8 @@ flake8: flake8-install flake8 $(PYTHON_FILES) --count --select=E9,F63,F7,F82,E225,E251 --show-source --statistics py-format: py-format-install - isort --check $(PYTHON_FILES) && yapf -ir $(PYTHON_FILES) + isort --project torchopt --check $(PYTHON_FILES) && \ + yapf --in-place --recursive $(PYTHON_FILES) mypy: mypy-install mypy $(PROJECT_NAME) @@ -103,4 +104,3 @@ format: py-format-install clang-format-install yapf -ir $(PYTHON_FILES) clang-format-11 -style=file -i $(CPP_FILES) addlicense -c $(COPYRIGHT) -l apache -y 2022 $(PROJECT_FOLDER) - diff --git a/README.md b/README.md index 4ceb9de3..24f53664 100644 --- a/README.md +++ b/README.md @@ -4,7 +4,7 @@ **TorchOpt** is a high-performance optimizer library built upon [PyTorch](https://pytorch.org/) for easy implementation of functional optimization and gradient-based meta-learning. It consists of two main features: -- TorchOpt provides functional optimizer which enables [JAX-like](https://github.com/google/jax) composable functional optimizer for PyTorch. With TorchOpt, one can easily conduct neural network optimization in PyTorch with functional style optimizer, similar to [Optax](https://github.com/deepmind/optax) in JAX. +- TorchOpt provides functional optimizer which enables [JAX-like](https://github.com/google/jax) composable functional optimizer for PyTorch. With TorchOpt, one can easily conduct neural network optimization in PyTorch with functional style optimizer, similar to [Optax](https://github.com/deepmind/optax) in JAX. - With the desgin of functional programing, TorchOpt provides efficient, flexible, and easy-to-implement differentiable optimizer for gradient-based meta-learning research. It largely reduces the efforts required to implement sophisticated meta-learning algorithms. -------------------------------------------------------------------------------- @@ -21,35 +21,36 @@ The README is organized as follows: - [Installation](#installation) - [Future Plan](#future-plan) - [The Team](#the-team) +- [Citing TorchOpt](#citing-torchopt) ## TorchOpt as Functional Optimizer The desgin of TorchOpt follows the philosophy of functional programming. Aligned with [functorch](https://github.com/pytorch/functorch), users can conduct functional style programing with models, optimizers and training in PyTorch. We use the Adam optimizer as an example in the following illustration. You can also check out the tutorial notebook [Functional Optimizer](./tutorials/1_Functional_Optimizer.ipynb) for more details. ### Optax-Like API -For those users who prefer fully functional programing, we offer Optax-Like API by passing gradients and optimizers states to the optimizer function. We design base class `TorchOpt.Optimizer` that has the same interface as `torch.optim.Optimizer`. Here is an example coupled with functorch: +For those users who prefer fully functional programing, we offer Optax-Like API by passing gradients and optimizers states to the optimizer function. We design base class `torchopt.Optimizer` that has the same interface as `torch.optim.Optimizer`. Here is an example coupled with functorch: ```python -import torch -from torch import nn -from torch import data -from nn import functional as F import functorch -import TorchOpt +import torch +import torch.nn as nn +import torch.nn.functional as F +import torchopt +from torch.utils.data import DataLoader -class Net(nn.Module):... +class Net(nn.Module): ... -class Loader(data.DataLoader):... +class Loader(DataLoader): ... net = Net() # init loader = Loader() -optimizer = TorchOpt.adam() +optimizer = torchopt.adam() func, params = functorch.make_functional(net) # use functorch extract network parameters opt_state = optimizer.init(params) # init optimizer xs, ys = next(loader) # get data pred = func(params, xs) # forward -loss = F.cross_entropy(pred, ys) # compute loss +loss = F.cross_entropy(pred, ys) # compute loss grad = torch.autograd.grad(loss, params) # compute gradients updates, opt_state = optimizer.update(grad, opt_state) # get updates -params = TorchOpt.apply_updates(params, updates) # update network parameters +params = torchopt.apply_updates(params, updates) # update network parameters ``` ### PyTorch-Like API We also offer origin PyTorch APIs (e.g. `zero_grad()` or `step()`) by warpping our Optax-Like API for traditional PyTorch user: @@ -57,7 +58,7 @@ We also offer origin PyTorch APIs (e.g. `zero_grad()` or `step()`) by warpping o ```python net = Net() # init loader = Loader() -optimizer = TorchOpt.Adam(net.parameters()) +optimizer = torchopt.Adam(net.parameters()) xs, ys = next(loader) # get data pred = net(xs) # forward loss = F.cross_entropy(pred, ys) # compute loss @@ -71,15 +72,15 @@ On top of the same optimization function as `torch.optim`, an important benefit # get updates updates, opt_state = optimizer.update(grad, opt_state, inplace=False) # update network parameters -params = TorchOpt.apply_updates(params, updates, inplace=False) +params = torchopt.apply_updates(params, updates, inplace=False) ``` ## TorchOpt as Differentiable Optimizer for Meta-Learning -Meta-Learning has gained enormous attention in both Supervised Learning and Reinforcement Learning. Meta-Learning algorithms often contain a bi-level optimisation process with *inner loop* updating the network parameters and *outer loop* updating meta parameters. The figure below illustrates the basic formulation for meta-optimization in Meta-Learning. The main feature is that the gradients of *outer loss* will back-propagate through all `inner.step` operations. +Meta-Learning has gained enormous attention in both Supervised Learning and Reinforcement Learning. Meta-Learning algorithms often contain a bi-level optimisation process with *inner loop* updating the network parameters and *outer loop* updating meta parameters. The figure below illustrates the basic formulation for meta-optimization in Meta-Learning. The main feature is that the gradients of *outer loss* will back-propagate through all `inner.step` operations.
-Since network parameters become a node of computation graph, a flexible Meta-Learning library should enable users manually control the gradient graph connection which means that users should have access to the network parameters and optimizer states for manually detaching or connecting the computation graph. In PyTorch designing, the network parameters or optimizer states are members of network (a.k.a. `nn.Module`) or optimizer (a.k.a. `optim.Optimizer`), this design significantly introducing difficulty for user control network parameters or optimizer states. Previous differentiable optimizer Repo [higher](https://github.com/facebookresearch/higher), [learn2learn](https://github.com/learnables/learn2learn) follows the PyTorch designing which leads to inflexible API. +Since network parameters become a node of computation graph, a flexible Meta-Learning library should enable users manually control the gradient graph connection which means that users should have access to the network parameters and optimizer states for manually detaching or connecting the computation graph. In PyTorch designing, the network parameters or optimizer states are members of network (a.k.a. `nn.Module`) or optimizer (a.k.a. `optim.Optimizer`), this design significantly introducing difficulty for user control network parameters or optimizer states. Previous differentiable optimizer Repo [higher](https://github.com/facebookresearch/higher), [learn2learn](https://github.com/learnables/learn2learn) follows the PyTorch designing which leads to inflexible API. In contrast to them, TorchOpt realizes differentiable optimizer with functional programing, where Meta-Learning researchers could control the network parameters or optimizer states as normal variables (a.k.a. `torch.Tensor`). This functional optimizer design of TorchOpt is beneficial for implementing complex gradient flow Meta-Learning algorithms and allow us to improve computational efficiency by using techniques like operator fusion. @@ -91,8 +92,8 @@ We hope meta-learning researchers could control the network parameters or optimi ### Meta-Learning API -- We design a base class `TorchOpt.MetaOptimizer` for managing network updates in Meta-Learning. The constructor of `MetaOptimizer` takes as input the network rather than network parameters. `MetaOptimizer` exposed interface `step(loss)` takes as input the loss for step the network parameter. Refer to the tutorial notebook [Meta Optimizer](./tutorials/2_Meta_Optimizer.ipynb) for more details. -- We offer `TorchOpt.chain` which can apply a list of chainable update transformations. Combined with `MetaOptimizer`, it can help you conduct gradient transformation such as gradient clip before the Meta optimizer steps. Refer to the tutorial notebook [Meta Optimizer](./tutorials/2_Meta_Optimizer.ipynb) for more details. +- We design a base class `torchopt.MetaOptimizer` for managing network updates in Meta-Learning. The constructor of `MetaOptimizer` takes as input the network rather than network parameters. `MetaOptimizer` exposed interface `step(loss)` takes as input the loss for step the network parameter. Refer to the tutorial notebook [Meta Optimizer](./tutorials/2_Meta_Optimizer.ipynb) for more details. +- We offer `torchopt.chain` which can apply a list of chainable update transformations. Combined with `MetaOptimizer`, it can help you conduct gradient transformation such as gradient clip before the Meta optimizer steps. Refer to the tutorial notebook [Meta Optimizer](./tutorials/2_Meta_Optimizer.ipynb) for more details. - We observe that different Meta-Learning algorithms vary in inner-loop parameter recovery. TorchOpt provides basic functions for users to extract or recover network parameters and optimizer states anytime anywhere they want. - Some algorithms such as [MGRL](https://proceedings.neurips.cc/paper/2018/file/2715518c875999308842e3455eda2fe3-Paper.pdf) initialize the inner-loop parameters inherited from previous inner-loop process when conducting a new bi-level process. TorchOpt also provides a finer function `stop_gradient` for manipulating the gradient graph, which is helpful for this kind of algortihms. Refer to the notebook [Stop Gradient](./tutorials/4_Stop_Gradient.ipynb) for more details. @@ -101,40 +102,40 @@ We give an example of [MAML](https://arxiv.org/abs/1703.03400) with inner-loop A ```python net = Net() # init # the constructor `MetaOptimizer` takes as input the network -inner_optim = TorchOpt.MetaAdam(net) -outer_optim = TorchOpt.Adam(net.parameters()) +inner_optim = torchopt.MetaAdam(net) +outer_optim = torchopt.Adam(net.parameters()) for train_iter in range(train_iters): outer_loss = 0 for task in range(tasks): loader = Loader(tasks) - + # store states at the inital points - net_state = TorchOpt.extract_state_dict(net) # extract state - optim_state = TorchOpt.extract_state_dict(inner_optim) + net_state = torchopt.extract_state_dict(net) # extract state + optim_state = torchopt.extract_state_dict(inner_optim) for inner_iter in range(inner_iters): # compute inner loss and perform inner update xs, ys = next(loader) pred = net(xs) - inner_loss = F.cross_entropy(pred, ys) + inner_loss = F.cross_entropy(pred, ys) inner_optim.step(inner_loss) # compute outer loss and back-propagate - xs, ys = next(loader) + xs, ys = next(loader) pred = net(xs) outer_loss += F.cross_entropy(pred, ys) - + # recover network and optimizer states at the inital point for the next task - TorchOpt.recover_state_dict(inner_optim, optim_state) - TorchOpt.recover_state_dict(net, net_state) - + torchopt.recover_state_dict(inner_optim, optim_state) + torchopt.recover_state_dict(net, net_state) + outer_loss /= len(tasks) # task average outer_optim.zero_grad() outer_loss.backward() outer_optim.step() # stop gradient if necessary - TorchOpt.stop_gradient(net) - TorchOpt.stop_gradient(inner_optim) + torchopt.stop_gradient(net) + torchopt.stop_gradient(inner_optim) ``` ## Examples In *examples/*, we offer serveral examples of functional optimizer and 5 light-weight meta-learning examples with TorchOpt. The meta-learning examples covers 2 Supervised Learning and 3 Reinforcement Learning algorithms. @@ -168,13 +169,13 @@ Requirements - (Optional) For visualizing computation graphs - [Graphviz](https://graphviz.org/download/) (for Linux users use `apt/yum install graphviz` or `conda install -c anaconda python-graphviz`) ```bash -pip install TorchOpt +pip install torchopt ``` You can also build shared libraries from source, use: ```bash -git clone git@github.com:metaopt/TorchOpt.git -cd TorchOpt +git clone git@github.com:metaopt/torchopt.git +cd torchopt python setup.py build_from_source ``` ## Future Plan @@ -196,6 +197,6 @@ If you find TorchOpt useful, please cite it in your publications. year = {2022}, publisher = {GitHub}, journal = {GitHub repository}, - howpublished = {\url{https://github.com/metaopt/TorchOpt}}, + howpublished = {\url{https://github.com/metaopt/torchopt}}, } ``` diff --git a/TorchOpt/__init__.py b/TorchOpt/__init__.py deleted file mode 100644 index f42bd7c6..00000000 --- a/TorchOpt/__init__.py +++ /dev/null @@ -1,64 +0,0 @@ -# Copyright 2022 MetaOPT Team. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== -"""TorchOpt: a high-performance optimizer library built upon PyTorch.""" - -from TorchOpt._src import ( - accelerated_op_available, - clip, - combine, - hook, - schedule, - visual, -) -from TorchOpt._src.alias import adam, rmsprop, sgd -from TorchOpt._src.MetaOptimizer import ( - MetaAdam, - MetaOptimizer, - MetaRMSProp, - MetaSGD, -) -from TorchOpt._src.Optimizer import SGD, Adam, Optimizer, RMSProp -from TorchOpt._src.update import apply_updates -from TorchOpt._src.utils import ( - extract_state_dict, - recover_state_dict, - stop_gradient, -) - -__version__ = "0.4.1" - -__all__ = ( - "accelerated_op_available", - "clip", - "combine", - "hook", - "schedule", - "visual", - "adam", - "rmsprop", - "sgd", - "MetaAdam", - "MetaOptimizer", - "MetaRMSProp", - "MetaSGD", - "SGD", - "Adam", - "Optimizer", - "RMSProp", - "apply_updates", - "extract_state_dict", - "recover_state_dict", - "stop_gradient", -) diff --git a/TorchOpt/_lib/adam_op.py b/TorchOpt/_lib/adam_op.py deleted file mode 100644 index ceb2eb9e..00000000 --- a/TorchOpt/_lib/adam_op.py +++ /dev/null @@ -1,60 +0,0 @@ -# Copyright 2022 MetaOPT Team. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== -import torch - - -def forward_( - updates: torch.Tensor, mu: torch.Tensor, nu: torch.Tensor, b1: float, - b2: float, eps: float, eps_root: float, count: int -) -> torch.Tensor: - ... - - -def forwardMu( - updates: torch.Tensor, mu: torch.Tensor, b1: float -) -> torch.Tensor: - ... - - -def forwardNu( - updates: torch.Tensor, nu: torch.Tensor, b2: float -) -> torch.Tensor: - ... - - -def forwardUpdates( - new_mu: torch.Tensor, new_nu: torch.Tensor, b1: float, b2: float, eps: float, - eps_root: float, count: int -) -> torch.Tensor: - ... - - -def backwardMu( - dmu: torch.Tensor, updates: torch.Tensor, mu: torch.Tensor, b1: float -) -> torch.Tensor: - ... - - -def backwardNu( - dnu: torch.Tensor, updates: torch.Tensor, nu: torch.Tensor, b2: float -) -> torch.Tensor: - ... - - -def backwardUpdates( - dupdates: torch.Tensor, updates: torch.Tensor, new_mu: torch.Tensor, - new_nu: torch.Tensor, b1: float, b2: float, count: int -) -> torch.Tensor: - ... diff --git a/TorchOpt/_src/MetaOptimizer.py b/TorchOpt/_src/MetaOptimizer.py deleted file mode 100644 index f4cbd045..00000000 --- a/TorchOpt/_src/MetaOptimizer.py +++ /dev/null @@ -1,189 +0,0 @@ -# Copyright 2022 MetaOPT Team. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== -from typing import Union - -import jax -import torch -from torch import nn - -from TorchOpt._src import base -from TorchOpt._src.alias import adam, rmsprop, sgd -from TorchOpt._src.pytypes import ScalarOrSchedule -from TorchOpt._src.update import apply_updates - - -class MetaOptimizer(object): - """A high-level optimizer base class for meta learning.""" - - def __init__(self, net: nn.Module, impl: base.GradientTransformation): - """ - Args: - net (nn.Module): a network whose parameters should be optimized. - impl (base.GradientTransformation): a low level optimizer function, it could be a - optimizer function provided by `alias.py` or a customerized `chain` provided by - `combine.py`. Note that use `MetaOptimizer(sgd(moment_requires_grad=True))` or - `MetaOptimizer(chain(sgd(moment_requires_grad=True))) is equavalent to `MetaSGD`. - """ - self.impl = impl - self.param_containers_groups = [] # type: ignore - self.state_groups = [] # type: ignore - - self.add_param_group(net) - - def step(self, loss: torch.Tensor): - """Compute the gradients of the loss to the network parameters and update network parameters. - - Graph of the derivative will be constructed, allowing to compute higher order derivative products. - We use the differentiable optimizer (pass argument inplace=False) to scale the gradients and update - the network parameters without modifying tensors in-place. - - Args: - loss (torch.Tensor): the loss that is used to compute the gradients to the network parameters. - """ - # step parameter only - for idx, (state, param_containers) in enumerate( - zip(self.state_groups, self.param_containers_groups) - ): - flatten_params, containers_tree = jax.tree_util.tree_flatten( - param_containers - ) - flatten_params = tuple(flatten_params) - grad = torch.autograd.grad( - loss, flatten_params, create_graph=True, allow_unused=True - ) - updates, state = self.impl.update(grad, state, False) - self.state_groups[idx] = state - new_params = apply_updates(flatten_params, updates, inplace=False) - unflatten_new_params = containers_tree.unflatten(new_params) - for (container, - unflatten_param) in zip(param_containers, unflatten_new_params): - container.update(unflatten_param) - - def add_param_group(self, net): - from TorchOpt.utils import _extract_container - net_container = _extract_container(net, with_buffer=False) - flatten_param, _ = jax.tree_util.tree_flatten(net_container) - flatten_param = tuple(flatten_param) - optim_state = self.impl.init(flatten_param) - self.state_groups.append(optim_state) - self.param_containers_groups.append(net_container) - - def state_dict(self): - """Extract the references of the optimizer states. - - Note that the states are references, so any in-place operations will - change the states inside `MetaOptimizer` at the same time. - """ - out_groups = tuple(group for group in self.state_groups) - return out_groups - - def load_state_dict(self, state_dict): - self.state_groups = list(group for group in state_dict) - - -class MetaSGD(MetaOptimizer): - """A canonical Stochastic Gradient Descent optimiser.""" - - def __init__( - self, - net: nn.Module, - lr: ScalarOrSchedule, - momentum: Union[float, None] = None, - nesterov: bool = False, - moment_requires_grad: bool = True - ): - """The `init` function. - Args: - net (nn.Module): a network whose parameters should be optimized. - args: other arguments see `alias.sgd`, here we set `moment_requires_grad=True` - to make tensors like momentum be differentiable. - """ - super().__init__( - net, - sgd( - lr=lr, - momentum=momentum, - nesterov=nesterov, - moment_requires_grad=moment_requires_grad - ) - ) - - -class MetaAdam(MetaOptimizer): - """The classic Adam optimiser.""" - - def __init__( - self, - net, - lr: ScalarOrSchedule, - b1: float = 0.9, - b2: float = 0.999, - eps: float = 1e-8, - eps_root: float = 0.0, - moment_requires_grad: bool = True, - use_accelerated_op: bool = False - ): - """The `init` function. - Args: - net (nn.Module): a network whose parameters should be optimized. - args: other arguments see `alias.adam`, here we set `moment_requires_grad=True` - to make tensors like momentum be differentiable. - """ - super().__init__( - net, - adam( - lr=lr, - b1=b1, - b2=b2, - eps=eps, - eps_root=eps_root, - moment_requires_grad=moment_requires_grad, - use_accelerated_op=use_accelerated_op - ) - ) - - -class MetaRMSProp(MetaOptimizer): - """The classic RMSProp optimiser.""" - - def __init__( - self, - net, - lr: ScalarOrSchedule, - decay: float = 0.9, - eps: float = 1e-8, - initial_scale: float = 0., - centered: bool = False, - momentum: Union[float, None] = None, - nesterov: bool = False - ): - """The `init` function. - Args: - net (nn.Module): a network whose parameters should be optimized. - args: other arguments see `alias.adam`, here we set `moment_requires_grad=True` - to make tensors like momentum be differentiable. - """ - super().__init__( - net, - rmsprop( - lr=lr, - decay=decay, - eps=eps, - initial_scale=initial_scale, - centered=centered, - momentum=momentum, - nesterov=nesterov - ) - ) diff --git a/TorchOpt/_src/Optimizer.py b/TorchOpt/_src/Optimizer.py deleted file mode 100644 index 8544d3da..00000000 --- a/TorchOpt/_src/Optimizer.py +++ /dev/null @@ -1,214 +0,0 @@ -# Copyright 2022 MetaOPT Team. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== -from typing import Iterable, Union - -import jax -import torch - -from TorchOpt._src import base -from TorchOpt._src.alias import adam, rmsprop, sgd -from TorchOpt._src.pytypes import ScalarOrSchedule -from TorchOpt._src.update import apply_updates - - -class Optimizer(object): - """A high-level base class that has the similar with `torch.optim.Optimier`.""" - - def __init__(self, params: Iterable, impl: base.GradientTransformation): - """The `init` function. - - Args: - params (iterable): an iterable of `torch.Tensor`s. Specifies what Tensors should be optimized. - impl (base.GradientTransformation): a low level optimizer function, it could be - a optimizer function provided by `alias.py` or a customerized `chain` provided by - `combine.py`. Note that use `MetaOptimizer(sgd())` or `MetaOptimizer(chain(sgd())) - is equavalent to `SGD`. - """ - if not isinstance(params, list): - params = list(params) - self.impl = impl - self.param_groups = [] # type: ignore - self.param_tree_groups = [] # type: ignore - self.state_groups = [] # type: ignore - self.add_param_group(params) - - def zero_grad(self, set_to_none: bool = False): - """Sets the gradients of all optimized `torch.Tensor`s to zero. - - The behivour is similar to `torch.optim.Optimizer.zero_grad`. - - Args: - set_to_none (bool): instead of setting to zero, set the grads to None. - - """ - for group in self.param_groups: - if set_to_none: - - def f(p): - p.grad = None - return None - else: - - def f(p): - if p.grad is None: - return None - if p.grad.grad_fn is not None: - p.grad.detach_() - else: - p.grad.requires_grad_(False) - p.grad.zero_() - return None - - jax.tree_map(f, group) - - def state_dict(self): - """Returns the state of the optimizer.""" - return self.state_groups - - def load_state_dict(self, state_dict): - """Loads the optimizer state. - - Args: - state_dict (dict): optimizer state. Should be an object returned - from a call to :meth:`state_dict`. - """ - self.state_groups = state_dict - - def step(self, closure=None): - """Performs a single optimization step (parameter update). - - The behivour is similar to `torch.optim.Optimizer.step`. - - Args: - closure (callable, optional): A closure that reevaluates the model and returns the loss. - - """ - loss = None - if closure is not None: - with torch.enable_grad(): - loss = closure() - - for param, state in zip(self.param_groups, self.state_groups): - - def f(p): - return p.grad - - grad = jax.tree_map(f, param) - updates, _ = self.impl.update(grad, state) - apply_updates(param, updates) - - return loss - - def add_param_group(self, params): - params, tree = jax.tree_flatten(params) - params = tuple(params) - self.param_groups.append(params) - self.param_tree_groups.append(tree) - self.state_groups.append(self.impl.init(params)) - - -class SGD(Optimizer): - """The classic Adam optimiser.""" - - def __init__( - self, - params, - lr: ScalarOrSchedule, - momentum: Union[float, None] = None, - nesterov: bool = False - ): - """The `init` function. - - Args: - params (iterable): an iterable of `torch.Tensor`s. Specifies what Tensors should be optimized. - args: other arguments see `alias.adam`. - - """ - super().__init__( - params, - sgd( - lr=lr, - momentum=momentum, - nesterov=nesterov, - moment_requires_grad=False - ) - ) - - -class Adam(Optimizer): - """A canonical Stochastic Gradient Descent optimiser.""" - - def __init__( - self, - params, - lr: ScalarOrSchedule, - b1: float = 0.9, - b2: float = 0.999, - eps: float = 1e-8, - eps_root: float = 0.0, - use_accelerated_op: bool = False - ): - """The `init` function. - - Args: - params (iterable): an iterable of `torch.Tensor`s. Specifies what Tensors should be optimized. - args: other arguments see `alias.sgd`. - """ - super().__init__( - params, - adam( - lr=lr, - b1=b1, - b2=b2, - eps=eps, - eps_root=eps_root, - moment_requires_grad=False, - use_accelerated_op=use_accelerated_op - ) - ) - - -class RMSProp(Optimizer): - """An RMSProp optimiser.""" - - def __init__( - self, - params, - lr: ScalarOrSchedule, - decay: float = 0.9, - eps: float = 1e-8, - initial_scale: float = 0., - centered: bool = False, - momentum: Union[float, None] = None, - nesterov: bool = False - ): - """The `init` function. - - Args: - params (iterable): an iterable of `torch.Tensor`s. Specifies what Tensors should be optimized. - args: other arguments see `alias.sgd`. - """ - super().__init__( - params, - rmsprop( - lr=lr, - decay=decay, - eps=eps, - initial_scale=initial_scale, - centered=centered, - momentum=momentum, - nesterov=nesterov - ) - ) diff --git a/TorchOpt/_src/accelerated_op/adam_op/AdamOp.py b/TorchOpt/_src/accelerated_op/adam_op/AdamOp.py deleted file mode 100644 index e726a61a..00000000 --- a/TorchOpt/_src/accelerated_op/adam_op/AdamOp.py +++ /dev/null @@ -1,120 +0,0 @@ -# Copyright 2022 MetaOPT Team. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== - -from typing import Any - -import torch - -from TorchOpt._lib import adam_op - - -class AdamOp(object): - - class MuOp(torch.autograd.Function): - - @staticmethod - def jvp(ctx: Any, *grad_inputs: Any) -> Any: - pass - - @staticmethod - def forward(ctx: Any, *args: Any, **kwargs: Any) -> Any: - updates, mu, b1 = args - new_mu = adam_op.forwardMu(updates, mu, b1) - ctx.save_for_backward(updates, mu) - ctx.b1 = b1 - return new_mu - - @staticmethod - def backward(ctx: Any, *args: Any) -> Any: - dmu = args[0] - updates, mu = ctx.saved_tensors - b1 = ctx.b1 - result = adam_op.backwardMu(dmu, updates, mu, b1) - return result[0], result[1], None - - class NuOp(torch.autograd.Function): - - @staticmethod - def jvp(ctx: Any, *grad_inputs: Any) -> Any: - pass - - @staticmethod - def forward(ctx: Any, *args: Any, **kwargs: Any) -> Any: - updates, nu, b2 = args - new_nu = adam_op.forwardNu(updates, nu, b2) - ctx.save_for_backward(updates, nu) - ctx.b2 = b2 - return new_nu - - @staticmethod - def backward(ctx: Any, *args: Any) -> Any: - dnu = args[0] - updates, nu = ctx.saved_tensors - b2 = ctx.b2 - result = adam_op.backwardNu(dnu, updates, nu, b2) - return result[0], result[1], None - - class UpdatesOp(torch.autograd.Function): - - @staticmethod - def jvp(ctx: Any, *grad_inputs: Any) -> Any: - pass - - @staticmethod - def forward(ctx: Any, *args: Any, **kwargs: Any) -> Any: - new_mu, new_nu, (b1, b2, eps, eps_root, count) = args - new_updates = adam_op.forwardUpdates( - new_mu, new_nu, b1, b2, eps, eps_root, count - ) - ctx.save_for_backward(new_updates, new_mu, new_nu) - ctx.others = (b1, b2, eps, eps_root, count) - return new_updates - - @staticmethod - def backward(ctx: Any, *args: Any) -> Any: - dupdates = args[0] - updates, new_mu, new_nu = ctx.saved_tensors - b1, b2, eps, eps_root, count = ctx.others - result = adam_op.backwardUpdates( - dupdates, updates, new_mu, new_nu, b1, b2, count - ) - return result[0], result[1], None - - def __init__(self, b1=0.9, b2=0.999, eps=1e-8, eps_root=0., inplace=True): - self.b1 = b1 - self.b2 = b2 - self.eps = eps - self.eps_root = eps_root - self.inplace = inplace - - def __call__(self, mu, nu, updates, count): - if updates is None: - return mu, nu, None - if updates.is_cuda: - current_device = torch.cuda.current_device() - torch.cuda.set_device(updates.device) - if self.inplace: - new_updates, new_mu, new_nu = adam_op.forward_( - updates, mu, nu, self.b1, self.b2, self.eps, self.eps_root, count - ) - else: - new_mu = self.MuOp.apply(updates, mu, self.b1) - new_nu = self.NuOp.apply(updates, nu, self.b2) - new_updates = self.UpdatesOp.apply( - new_mu, new_nu, (self.b1, self.b2, self.eps, self.eps_root, count) - ) - if updates.is_cuda: - torch.cuda.set_device(current_device) - return new_mu, new_nu, new_updates diff --git a/TorchOpt/_src/alias.py b/TorchOpt/_src/alias.py deleted file mode 100644 index 3f676efe..00000000 --- a/TorchOpt/_src/alias.py +++ /dev/null @@ -1,190 +0,0 @@ -# Copyright 2022 MetaOPT Team. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== -# This file is modified from: -# https://github.com/deepmind/optax/blob/master/optax/_src/alias.py -# ============================================================================== -# Copyright 2019 DeepMind Technologies Limited. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== - -from typing import Optional - -import jax - -from TorchOpt._src import base, combine, transform -from TorchOpt._src.pytypes import ScalarOrSchedule - - -def _scale_by_lr(lr: ScalarOrSchedule, flip_sign=True): - m = -1 if flip_sign else 1 - if callable(lr): - - def schedule_wrapper(count): - - def f(scaled_lr): - return m * scaled_lr - - return jax.tree_map(f, lr(count)) # type: ignore - - return transform.scale_by_schedule(schedule_wrapper) - return transform.scale(m * lr) - - -def adam( - lr: ScalarOrSchedule, - b1: float = 0.9, - b2: float = 0.999, - eps: float = 1e-8, - eps_root: float = 0.0, - moment_requires_grad: bool = False, - use_accelerated_op: bool = False -) -> base.GradientTransformation: - """The classic Adam optimiser. - - Adam is an SGD variant with learning rate adaptation. The `lr` - used for each weight is computed from estimates of first- and second-order - moments of the gradients (using suitable exponential moving averages). - - References: - Kingma et al, 2014: https://arxiv.org/abs/1412.6980 - - Args: - lr: this is a fixed global scaling factor. - b1: the exponential decay rate to track the first moment of past gradients. - b2: the exponential decay rate to track the second moment of past gradients. - eps: a small constant applied to denominator outside of the square root - (as in the Adam paper) to avoid dividing by zero when rescaling. - eps_root: (default `0`), a small constant applied to denominator inside the - square root (as in RMSProp), to avoid dividing by zero when rescaling. - This is needed for example when computing (meta-)gradients through Adam. - moment_requires_grad: (default `False`), if True the momentums will be created with flag - `requires_grad=True`, this flag is often used in Meta Learning algorithms. - use_accelerated_op: (default `False`), if True use our implemented fused operator. - - Returns: - the corresponding `GradientTransformation`. - """ - adam_inst = transform.scale_by_accelerated_adam if use_accelerated_op else transform.scale_by_adam - return combine.chain( - adam_inst( - b1=b1, - b2=b2, - eps=eps, - eps_root=eps_root, - moment_requires_grad=moment_requires_grad - ), - _scale_by_lr(lr), - ) - - -def sgd( - lr: ScalarOrSchedule, - momentum: Optional[float] = None, - nesterov: bool = False, - moment_requires_grad: bool = False, -) -> base.GradientTransformation: - """A canonical Stochastic Gradient Descent optimiser. - - This implements stochastic gradient descent. It also includes support for - momentum, and nesterov acceleration, as these are standard practice when - using stochastic gradient descent to train deep neural networks. - - References: - Sutskever et al, 2013: http://proceedings.mlr.press/v28/sutskever13.pdf - - Args: - lr: this is a fixed global scaling factor. - momentum: (default `None`), the `decay` rate used by the momentum term, - when it is set to `None`, then momentum is not used at all. - nesterov (default `False`): whether nesterov momentum is used. - moment_requires_grad: (default `False`), if True the momentums will be created with flag - `requires_grad=True`, this flag is often used in Meta Learning algorithms. - - Returns: - A `GradientTransformation`. - """ - return combine.chain( - ( - transform.trace( - decay=momentum, - nesterov=nesterov, - moment_requires_grad=moment_requires_grad - ) if momentum is not None else base.identity() - ), _scale_by_lr(lr) - ) - - -def rmsprop( - lr: ScalarOrSchedule, - decay: float = 0.9, - eps: float = 1e-8, - initial_scale: float = 0., - centered: bool = False, - momentum: Optional[float] = None, - nesterov: bool = False -) -> base.GradientTransformation: - # pylint: disable=line-too-long - """A flexible RMSProp optimiser. - RMSProp is an SGD variant with learning rate adaptation. The `learning_rate` - used for each weight is scaled by a suitable estimate of the magnitude of the - gradients on previous steps. Several variants of RMSProp can be found - in the literature. This alias provides an easy to configure RMSProp - optimiser that can be used to switch between several of these variants. - References: - Tieleman and Hinton, 2012: http://www.cs.toronto.edu/~hinton/coursera/lecture6/lec6.pdf - Graves, 2013: https://arxiv.org/abs/1308.0850 - Args: - learning_rate: this is a fixed global scaling factor. - decay: the decay used to track the magnitude of previous gradients. - eps: a small numerical constant to avoid dividing by zero when rescaling. - initial_scale: (default `0.`), initialisation of accumulators tracking the - magnitude of previous updates. PyTorch uses `0`, TF1 uses `1`. When - reproducing results from a paper, verify the value used by the authors. - centered: (default `False`), whether the second moment or the variance of - the past gradients is used to rescale the latest gradients. - momentum: (default `None`), the `decay` rate used by the momentum term, - when it is set to `None`, then momentum is not used at all. - nesterov (default `False`): whether nesterov momentum is used. - Returns: - the corresponding `GradientTransformation`. - """ - # pylint: enable=line-too-long - if centered: - return combine.chain( - transform.scale_by_stddev( - decay=decay, eps=eps, initial_scale=initial_scale - ), _scale_by_lr(lr), ( - transform.trace(decay=momentum, nesterov=nesterov) - if momentum is not None else base.identity() - ) - ) - return combine.chain( - transform.scale_by_rms(decay=decay, eps=eps, initial_scale=initial_scale), - _scale_by_lr(lr), ( - transform.trace(decay=momentum, nesterov=nesterov) - if momentum is not None else base.identity() - ) - ) diff --git a/TorchOpt/_src/base.py b/TorchOpt/_src/base.py deleted file mode 100644 index 8b1559e6..00000000 --- a/TorchOpt/_src/base.py +++ /dev/null @@ -1,144 +0,0 @@ -# Copyright 2022 MetaOPT Team. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== -# This file is modified from: -# https://github.com/deepmind/optax/blob/master/optax/_src/base.py -# ============================================================================== -# Copyright 2019 DeepMind Technologies Limited. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== - -from typing import Callable, NamedTuple, Tuple - -import typing_extensions - -from TorchOpt._src import pytypes - -OptState = pytypes.TensorTree # States are arbitrary nests of `torch.Tensor`. -# Parameters are arbitrary nests of `torch.Tensor`. -Params = pytypes.TensorTree -Updates = Params # Gradient updates are of the same type as parameters. - -Schedule = Callable[[pytypes.Numeric], pytypes.Numeric] - - -class EmptyState(NamedTuple): - """An empty state for the simplest stateless transformations.""" - - -class TransformInitFn(typing_extensions.Protocol): - """A callable type for the `init` step of a `GradientTransformation`. - - The `init` step takes a tree of `params` and uses these to construct an - arbitrary structured initial `state` for the gradient transformation. This - may hold statistics of the past updates or any other non static information. - """ - - def __call__(self, params: Params) -> OptState: - """The `init` function. - - Args: - params: The initial value of the parameters. - - Returns: - The initial state of the gradient transformation. - """ - ... - - -class TransformUpdateFn(typing_extensions.Protocol): - """A callable type for the `update` step of a `GradientTransformation`. - - The `update` step takes a tree of candidate parameter `updates` (e.g. their - gradient with respect to some loss), an arbitrary structured `state`, and the - current `params` of the model being optimised. The `params` argument is - optional, it must however be provided when using transformations that require - access to the current values of the parameters. - """ - - def __call__(self, - updates: Updates, - state: OptState, - inplace: bool = True) -> Tuple[Updates, OptState]: - """The `update` function. - - Args: - updates: A tree of candidate updates. - state: The state of the gradient transformation. - inplace: (Optionally) if true, modify updates and state using inplace operations. - - Returns: - The transformed updates, and the updated state. - """ - ... - - -class GradientTransformation(NamedTuple): - """A pair of pure functions implementing a gradient transformation. - - TorchOpt optimizers are all implemented as _gradient transformations_ like - Optax. A gradient transformation is defined to be a pair of pure functions, - which are combined together in a `NamedTuple` so that they can be referred - to by name. - - Since gradient transformations do not contain any internal state, all stateful - optimizer properties (such as the current step count when using optimizer - scheduels, or momemtum values) are passed through gradient transformations by - using the optimizer _state_ pytree. Each time a gradient transformation is - applied, the state is computed and returned, ready to be passed to the next - call to the gradient transformation. - - Attributes: - init: A pure function which, when called with an example instance of the - parameters whose gradients will be transformed, returns a pytree - containing the initial value for the optimizer state. - update: A pure function which takes as input a pytree of updates (with the - same tree structure as the original params pytree passed to init), the - previous optimizer state (which may have been initialized using the init - function), and optionally the inplace flag. The update function then - returns the computed gradient updates, and a updates optimizer state. - If the inplace flag is true, the output results are the same instance as - the input. - """ - init: TransformInitFn - update: TransformUpdateFn - - -def identity() -> GradientTransformation: - """Stateless identity transformation that leaves input gradients untouched. - - This function passes through the *gradient updates* unchanged. - - Returns: - An (init_fn, update_fn) tuple. - """ - - def init_fn(_): - return EmptyState() - - def update_fn(updates, state, inplace=False): - return updates, state - - return GradientTransformation(init_fn, update_fn) diff --git a/TorchOpt/_src/clip.py b/TorchOpt/_src/clip.py deleted file mode 100644 index b0e24aed..00000000 --- a/TorchOpt/_src/clip.py +++ /dev/null @@ -1,90 +0,0 @@ -# Copyright 2022 MetaOPT Team. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== -# This file is modified from: -# https://github.com/pytorch/pytorch/blob/master/torch/nn/utils/clip_grad.py -# ============================================================================== - -import jax -import torch -from torch._six import inf - -from TorchOpt._src import base - -ClipState = base.EmptyState - - -def clip_grad_norm( - max_norm: float, - norm_type: float = 2., - error_if_nonfinite: bool = False -) -> base.GradientTransformation: - """Clips gradient norm of an iterable of parameters. - Args: - max_delta: The maximum absolute value for each element in the update. - Returns: - An (init_fn, update_fn) tuple. - """ - - def init_fn(params): - del params - return ClipState() - - def update_fn(updates, state, inplace=True): - available_updates = [] - for g in updates: - if g is not None: - available_updates.append(g) - if len(available_updates) == 0: - return torch.tensor(0.) - device = available_updates[0].device - with torch.no_grad(): - if norm_type == inf: - norms = [p.abs().max().to(device) for p in available_updates] - total_norm = norms[0] if len(norms) == 1 else torch.max( - torch.stack(norms) - ) - else: - total_norm = torch.norm( - torch.stack( - [torch.norm(p, norm_type).to(device) for p in available_updates] - ), norm_type - ) - if error_if_nonfinite and torch.logical_or( - total_norm.isnan(), total_norm.isinf() - ): - raise RuntimeError( - f'The total norm of order {norm_type} for gradients from ' - '`parameters` is non-finite, so it cannot be clipped. To disable ' - 'this error and scale the gradients by the non-finite norm anyway, ' - 'set `error_if_nonfinite=False`' - ) - clip_coef = max_norm / (float(total_norm) + 1e-6) - # Note: multiplying by the clamped coef is redundant when the coef is clamped to 1, but doing so - # avoids a `if clip_coef < 1:` conditional which can require a CPU <=> device synchronization - # when the gradients do not reside in CPU memory. - clip_coef_clamped = min(clip_coef, 1.) - if inplace: - - def f(g): - return g.mul_(clip_coef_clamped) if g is not None else None - else: - - def f(g): - return g.mul(clip_coef_clamped) if g is not None else None - - new_updates = jax.tree_map(f, updates) - return new_updates, state - - return base.GradientTransformation(init_fn, update_fn) diff --git a/TorchOpt/_src/schedule.py b/TorchOpt/_src/schedule.py deleted file mode 100644 index ad24cf82..00000000 --- a/TorchOpt/_src/schedule.py +++ /dev/null @@ -1,101 +0,0 @@ -# Copyright 2022 MetaOPT Team. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== -# This file is modified from: -# https://github.com/deepmind/optax/blob/master/optax/_src/schedule.py -# ============================================================================== -# Copyright 2019 DeepMind Technologies Limited. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== - -import jax -import numpy as np -from absl import logging - -from TorchOpt._src import base, pytypes - - -def polynomial_schedule( - init_value: pytypes.Scalar, - end_value: pytypes.Scalar, - power: pytypes.Scalar, - transition_steps: int, - transition_begin: int = 0 -) -> base.Schedule: - """Constructs a schedule with polynomial transition from init to end value. - Args: - init_value: initial value for the scalar to be annealed. - end_value: end value of the scalar to be annealed. - power: the power of the polynomial used to transition from init to end. - transition_steps: number of steps over which annealing takes place, - the scalar starts changing at `transition_begin` steps and completes - the transition by `transition_begin + transition_steps` steps. - If `transition_steps <= 0`, then the entire annealing process is disabled - and the value is held fixed at `init_value`. - transition_begin: must be positive. After how many steps to start annealing - (before this many steps the scalar value is held fixed at `init_value`). - Returns: - schedule: A function that maps step counts to values. - """ - if transition_steps <= 0: - logging.info( - 'A polynomial schedule was set with a non-positive `transition_steps` ' - 'value; this results in a constant schedule with value `init_value`.' - ) - return lambda count: init_value - - if transition_begin < 0: - logging.info( - 'An exponential schedule was set with a negative `transition_begin` ' - 'value; this will result in `transition_begin` falling back to `0`.' - ) - transition_begin = 0 - - def schedule(count): - - def impl(count): - count = np.clip(count - transition_begin, 0, transition_steps) - frac = 1 - count / transition_steps - return (init_value - end_value) * (frac**power) + end_value - - return jax.tree_map(impl, count) - - return schedule - - -# Alias polynomial schedule to linear schedule for convenience. -def linear_schedule( - init_value: pytypes.Scalar, - end_value: pytypes.Scalar, - transition_steps: int, - transition_begin: int = 0 -) -> base.Schedule: - return polynomial_schedule( - init_value=init_value, - end_value=end_value, - power=1, - transition_steps=transition_steps, - transition_begin=transition_begin - ) diff --git a/TorchOpt/_src/transform.py b/TorchOpt/_src/transform.py deleted file mode 100644 index 7cdc9c86..00000000 --- a/TorchOpt/_src/transform.py +++ /dev/null @@ -1,469 +0,0 @@ -# Copyright 2022 MetaOPT Team. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== -# This file is modified from: -# https://github.com/deepmind/optax/blob/master/optax/_src/transform.py -# ============================================================================== -# Copyright 2019 DeepMind Technologies Limited. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== - -from typing import List, NamedTuple, Tuple, Union - -import jax -import torch - -from TorchOpt._src import base -from TorchOpt._src.pytypes import ScalarOrSchedule, Schedule - -ScaleState = base.EmptyState - - -def inc_count(updates, count: Tuple[int]) -> Tuple[int]: - - def f(c, g): - return c + 1 if g is not None else c - - return jax.tree_map(f, count, updates) - - -def scale(step_size: float) -> base.GradientTransformation: - """Scale updates by some fixed scalar `step_size`. - - Args: - step_size: a scalar corresponding to a fixed scaling factor for updates. - - Returns: - An (init_fn, update_fn) tuple. - """ - - def init_fn(params): - del params - return ScaleState() - - def update_fn(updates, state, inplace=True): - if inplace: - - def f(g): - return g.mul_(step_size) if g is not None else None - else: - - def f(g): - return g.mul(step_size) if g is not None else None - - updates = jax.tree_map(f, updates) - return updates, state - - return base.GradientTransformation(init_fn, update_fn) - - -class ScaleByScheduleState(NamedTuple): - """Maintains count for scale scheduling.""" - count: Tuple[int, ...] # type: ignore - - -def scale_by_schedule(step_size_fn: Schedule) -> base.GradientTransformation: - """Scale updates using a custom schedule for the `step_size`. - - Args: - step_size_fn: a function that takes an update count as input and proposes - the step_size to multiply the updates by. - - Returns: - An (init_fn, update_fn) tuple. - """ - - def init_fn(params): - return ScaleByScheduleState(count=tuple(0 for _ in range(len(params)))) - - def update_fn(updates, state, inplace=True): - step_size = step_size_fn(state.count) - if inplace: - updates = jax.tree_map( - lambda g, step_size: g.mul_(step_size), updates, step_size - ) - else: - updates = jax.tree_map( - lambda g, step_size: g.mul(step_size), updates, step_size - ) - return updates, ScaleByScheduleState(count=inc_count(updates, state.count)) - - return base.GradientTransformation(init_fn, update_fn) - - -def _update_moment(updates, moments, decay, order, inplace=True): - """Compute the exponential moving average of the `order`-th moment.""" - if inplace: - - def f(g, t): - return t.mul_(decay).add_( - g**order, alpha=1 - decay - ) if g is not None else t - else: - - def f(g, t): - return t.mul(decay).add( - g**order, alpha=1 - decay - ) if g is not None else t - - return jax.tree_map(f, updates, moments) - - -def _update_moment_per_elem_norm(updates, moments, decay, order, inplace=True): - """Compute the EMA of the `order`-th moment of the element-wise norm.""" - - if inplace: - - def f(g, t): - return t.mul_(decay).add_( - g**order, alpha=1 - decay - ) if g is not None else t - else: - - def f(g, t): - return t.mul(decay).add( - g**order, alpha=1 - decay - ) if g is not None else t - - return jax.tree_map(f, updates, moments) - - -class ScaleByAdamState(NamedTuple): - """State for the Adam algorithm.""" - count: Tuple[int, ...] # type: ignore - mu: base.Updates - nu: base.Updates - - -def _bias_correction(moment, decay, count, inplace=True): - """Perform bias correction. This becomes a no-op as count goes to infinity.""" - if inplace: - - def f(t, c): - return t.div_(1 - decay**c) - else: - - def f(t, c): - return t.div(1 - decay**c) - - return jax.tree_map(f, moment, count) - - -def scale_by_adam( - b1: float = 0.9, - b2: float = 0.999, - eps: float = 1e-8, - eps_root: float = 0.0, - moment_requires_grad: bool = False, -) -> base.GradientTransformation: - """Rescale updates according to the Adam algorithm. - - References: - [Kingma et al, 2014](https://arxiv.org/abs/1412.6980) - - Args: - b1: decay rate for the exponentially weighted average of grads. - b2: decay rate for the exponentially weighted average of squared grads. - eps: term added to the denominator to improve numerical stability. - eps_root: term added to the denominator inside the square-root to improve - numerical stability when backpropagating gradients through the rescaling. - moment_requires_grad: if true, states will be created with flag `requires_grad = True`. - - Returns: - An (init_fn, update_fn) tuple. - """ - - def init_fn(params): - mu = jax.tree_map( # First moment - lambda t: torch.zeros_like(t, requires_grad=moment_requires_grad), - params) - nu = jax.tree_map( # Second moment - lambda t: torch.zeros_like(t, requires_grad=moment_requires_grad), - params) - return ScaleByAdamState( - count=tuple(0 for _ in range(len(mu))), mu=tuple(mu), nu=tuple(nu) - ) - - def update_fn(updates, state, inplace=True): - mu = _update_moment(updates, state.mu, b1, 1, inplace) - nu = _update_moment_per_elem_norm(updates, state.nu, b2, 2, inplace) - count_inc = inc_count(updates, state.count) - mu_hat = _bias_correction(mu, b1, count_inc, False) - nu_hat = _bias_correction(nu, b2, count_inc, False) - if inplace: - - def f(g, m, v): - return m.div_( - torch.sqrt_(v.add_(eps_root)).add_(eps) - ) if g is not None else None - else: - - def f(g, m, v): - return m.div( - torch.sqrt(v.add(eps_root)).add(eps) - ) if g is not None else None - - updates = jax.tree_map(f, updates, mu_hat, nu_hat) - return updates, ScaleByAdamState(count=count_inc, mu=mu, nu=nu) - - return base.GradientTransformation(init_fn, update_fn) - - -def scale_by_accelerated_adam( - b1: float = 0.9, - b2: float = 0.999, - eps: float = 1e-8, - eps_root: float = 0.0, - moment_requires_grad: bool = False, -) -> base.GradientTransformation: - """Rescale updates according to the Adam algorithm. - - This function is acceleracted by using some fused accelerated operators. - - References: - [Kingma et al, 2014](https://arxiv.org/abs/1412.6980) - - Args: - b1: decay rate for the exponentially weighted average of grads. - b2: decay rate for the exponentially weighted average of squared grads. - eps: term added to the denominator to improve numerical stability. - eps_root: term added to the denominator inside the square-root to improve - numerical stability when backpropagating gradients through the rescaling. - moment_requires_grad: if true, states will be created with flag `requires_grad = True`. - - Returns: - An (init_fn, update_fn) tuple. - """ - from .accelerated_op import AdamOp - - def init_fn(params): - mu = jax.tree_map( # First moment - lambda t: torch.zeros_like(t, requires_grad=moment_requires_grad), - params) - nu = jax.tree_map( # Second moment - lambda t: torch.zeros_like(t, requires_grad=moment_requires_grad), - params) - return ScaleByAdamState( - count=tuple(0 for _ in range(len(params))), mu=mu, nu=nu - ) - - def update_fn(updates, state, inplace=True): - count_inc = inc_count(updates, state.count) - op = AdamOp(b1, b2, eps, eps_root, inplace) - out = jax.tree_map(op, state.mu, state.nu, updates, count_inc) - new_mus, new_nus, new_updates = [], [], [] - for new_mu, new_nu, new_update in out: - new_mus.append(new_mu) - new_nus.append(new_nu) - new_updates.append(new_update) - return tuple(new_updates), ScaleByAdamState( - count=count_inc, mu=tuple(new_mus), nu=tuple(new_nus) - ) - - return base.GradientTransformation(init_fn, update_fn) - - -class TraceState(NamedTuple): - """Holds an aggregation of past updates.""" - trace: base.Params - - -def trace( - decay: float, - nesterov: bool = False, - moment_requires_grad: bool = False, -) -> base.GradientTransformation: - """Compute a trace of past updates. - - Note: `trace` and `ema` have very similar but distinct updates; - `trace = decay * trace + t`, while `ema = decay * ema + (1-decay) * t`. - Both are frequently found in the optimisation literature. - - Args: - decay: the decay rate for the trace of past updates. - nesterov: whether to use Nesterov momentum. - moment_requires_grad: if true, states will be created with flag `requires_grad = True`. - - Returns: - An (init_fn, update_fn) tuple. - """ - - def init_fn(params): - if decay == 0.: - return TraceState(trace=()) - else: - return TraceState( - trace=jax.tree_map( - lambda t: torch.zeros_like(t, requires_grad=moment_requires_grad), - params - ) - ) - - def update_fn(updates, state, inplace=True): - if nesterov: - if inplace: - - def f1(g, t): - return t.copy_(g.add(t, alpha=decay)) - - def f2(g, t): - return g.add_(t, alpha=decay) - - new_trace = jax.tree_map(f1, updates, state.trace) - updates = jax.tree_map(f2, updates, new_trace) - else: - - def f(g, t): - return g.add(t, alpha=decay) - - new_trace = jax.tree_map(f, updates, state.trace) - updates = jax.tree_map(f, updates, new_trace) - else: - if inplace: - - def f(g, t): - return g.add_(t, alpha=decay) - - updates = jax.tree_map(f, updates, state.trace) - state.trace.copy_(updates) - new_trace = state.trace - else: - - def f(g, t): - return g.add(t, alpha=decay) - - updates = jax.tree_map(f, updates, state.trace) - new_trace = updates - - return updates, TraceState(trace=new_trace) - - return base.GradientTransformation(init_fn, update_fn) - - -class ScaleByRmsState(NamedTuple): - """State for exponential root mean-squared (RMS)-normalized updates.""" - nu: base.Updates - - -def scale_by_rms( - decay: float = 0.9, - eps: float = 1e-8, - initial_scale: float = 0. -) -> base.GradientTransformation: - """Rescale updates by the root of the exp. moving avg of the square. - - References: - [Hinton](www.cs.toronto.edu/~tijmen/csc321/slides/lecture_slides_lec6.pdf) - - Args: - decay: decay rate for the exponentially weighted average of squared grads. - eps: term added to the denominator to improve numerical stability. - initial_scale: initial value for second moment - - Returns: - An (init_fn, update_fn) tuple. - """ - - def init_fn(params): - nu = jax.tree_map( - lambda n: torch.full_like(n, initial_scale), params - ) # second moment - return ScaleByRmsState(nu=nu) - - def update_fn(updates, state, inplace=True): - nu = _update_moment_per_elem_norm(updates, state.nu, decay, 2, inplace) - if inplace: - - def f(g, n): - return g.mul_(torch.rsqrt(n.add(eps))) - else: - - def f(g, n): - return g.mul(torch.rsqrt(n.add(eps))) - - # """The followings are pytorch style""" - # if inplace: - # def f(g, n): return g.div_(torch.sqrt_(n).add_(eps)) - # else: - # def f(g, n): return g.div(torch.sqrt(n).add(eps)) - updates = jax.tree_map(f, updates, nu) - return updates, ScaleByRmsState(nu=nu) - - return base.GradientTransformation(init_fn, update_fn) - - -class ScaleByRStdDevState(NamedTuple): - """State for centered exponential moving average of squares of updates.""" - mu: base.Updates - nu: base.Updates - - -def scale_by_stddev( - decay: float = 0.9, - eps: float = 1e-8, - initial_scale: float = 0. -) -> base.GradientTransformation: - """Rescale updates by the root of the centered exp. moving average of squares. - - References: - [Hinton](www.cs.toronto.edu/~tijmen/csc321/slides/lecture_slides_lec6.pdf) - - Args: - decay: decay rate for the exponentially weighted average of squared grads. - eps: term added to the denominator to improve numerical stability. - initial_scale: initial value for second moment - - Returns: - An (init_fn, update_fn) tuple. - """ - - def init_fn(params): - mu = jax.tree_map(torch.zeros_like, params) # First moment - nu = jax.tree_map( - lambda n: torch.full_like(n, initial_scale), params - ) # second moment - return ScaleByRStdDevState(mu=mu, nu=nu) - - def update_fn(updates, state, inplace=True): - mu = _update_moment(updates, state.mu, decay, 1, inplace) - nu = _update_moment_per_elem_norm(updates, state.nu, decay, 2, inplace) - if inplace: - - def f(g, m, n): - return g.mul_(torch.rsqrt(n.sub(m**2).add(eps))) - else: - - def f(g, m, n): - return g.mul(torch.rsqrt(n.sub(m**2).add(eps))) - - # """The followings are pytorch style""" - # if inplace: - # def f(g, m, n): return g.div_(torch.sqrt_(n.sub_(m ** 2)).add(eps)) - # else: - # def f(g, m, n): return g.div(torch.sqrt(n.sub(m ** 2)).add(eps)) - updates = jax.tree_map(f, updates, mu, nu) - return updates, ScaleByRStdDevState(mu=mu, nu=nu) - - return base.GradientTransformation(init_fn, update_fn) diff --git a/TorchOpt/_src/utils.py b/TorchOpt/_src/utils.py deleted file mode 100644 index 23c28ae9..00000000 --- a/TorchOpt/_src/utils.py +++ /dev/null @@ -1,190 +0,0 @@ -# Copyright 2022 MetaOPT Team. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== - -from typing import Dict, List, NamedTuple, Union - -import jax -import torch -from torch import nn - -from TorchOpt._src.MetaOptimizer import MetaOptimizer - - -class _ModuleState(NamedTuple): - params: List[Dict] - - visual_contents: Union[None, Dict] = None - - -# mypy: ignore-errors -def stop_gradient(target): - """Stop the gradient for the input object. - - Since a tensor use `grad_fn` to connect itself with the previous computation - graph, the back-propagated gradient will flow over the tensor and continue - flow to the tensors that is connected by `grad_fn`. Some algorithms requires - manually detaching tensors from the computation graph. - - Note that the stop_gradient operation is in-place. - - Args: - target: the target that to be detached from the computation graph, it coule - be a `nn.Module`, `TorchOpt.MetaOptimizer`, state of the - `TorchOpt.MetaOptimizer`, or just a plain list of tensors. - inplace: if True, the target will be detached in-place. if False, this function - will return a detached copy of the target. The in-place operation is fast - and memory efficient but may raise back-propagation error. - """ - - def f(obj): - if isinstance(obj, torch.Tensor): - requires_grad = obj.requires_grad - obj.detach_().requires_grad_(requires_grad) - return None - - if isinstance(target, _ModuleState): - true_target = target.params - elif isinstance(target, nn.Module): - true_target = tuple(target.parameters()) - elif isinstance(target, MetaOptimizer): - true_target, _ = jax.tree_flatten(target.state_dict()) - else: - true_target = target - - jax.tree_map(f, true_target) - - -def extract_state_dict( - mod, copy=False, *, with_buffer=True, enable_visual=False, visual_prefix='' -): - """Extract target state. - - Since a tensor use `grad_fn` to connect itself with the previous computation - graph, the back-propagated gradient will flow over the tensor and continue - flow to the tensors that is connected by `grad_fn`. Some algorithms requires - manually detaching tensors from the computation graph. - - Note that the extracted state is a reference, which means any in-place operatior - will affect the target that the state is extracted from. - - Args: - mod: it coule be a `nn.Module` or `TorchOpt.MetaOptimizer`. - with_buffer: extract buffer together with parameters, this argument is only - used if the input target is `nn.Module`. - enable_visual: add additional annoations, which could be used in computation - graph visualization. Currently, this flag only has effect on `nn.Module` but - we will support `TorchOpt.MetaOptimizer` later. - visual_prefix: prefix for the visualization annoations. - - Returns: - State extracted of the input object. - """ - if isinstance(mod, nn.Module): - if enable_visual: - visual_contents = {} - - for k, v in mod.named_parameters(): - if v.grad_fn is not None: - visual_contents.update({v.grad_fn: (visual_prefix + k, v)}) - else: - visual_contents.update({v: visual_prefix + k}) - else: - visual_contents = None - - params = [] - - def get_v(v): - if copy: - requires_grad = v.requires_grad - return v.clone().detach_().requires_grad_(requires_grad) - else: - return v - - def _update(term): - if len(term) != 0: - params.append({k: get_v(v) for k, v in term.items()}) - - _update(mod._parameters) - if with_buffer: - _update(mod._buffers) - for module in mod.modules(): - if module is mod: - continue - _update(module._parameters) - if with_buffer: - _update(module._buffers) - return _ModuleState(params=tuple(params), visual_contents=visual_contents) - elif isinstance(mod, MetaOptimizer): - state = mod.state_dict() - if copy: - flatten_state, state_tree = jax.tree_flatten(state) - - def get_v(v): - if not isinstance(v, torch.Tensor): - return v - requires_grad = v.requires_grad - return v.clone().detach_().requires_grad_(requires_grad) - - flatten_state = jax.tree_map(get_v, flatten_state) - return state_tree.unflatten(flatten_state) - else: - return state - - else: - raise RuntimeError(f"Unexpected class of {mod}") - - -def _extract_container(mod, with_buffer=True): - if isinstance(mod, nn.Module): - containers = [] - - def _update(term): - if len(term) != 0: - containers.append(term) - - _update(mod._parameters) - if with_buffer: - _update(mod._buffers) - for module in mod.modules(): - if module is mod: - continue - _update(module._parameters) - if with_buffer: - _update(module._buffers) - return tuple(containers) - else: - raise RuntimeError(f"Unexpected class of {mod}") - - -def recover_state_dict(mod, state): - """Recover state. - - This function is compatiable for the `extract_state`. - - Note that the recovering process is not in-place, so the tensors of the object - will not be modified. - - Args: - mod: targe that need to recover. - state: the recovering state. - """ - if isinstance(mod, nn.Module): - target_container = _extract_container(mod) - for target, source in zip(target_container, state.params): - target.update(source) - elif isinstance(mod, MetaOptimizer): - mod.load_state_dict(state) - else: - raise RuntimeError(f"Unexpected class of {mod}") diff --git a/TorchOpt/_src/visual.py b/TorchOpt/_src/visual.py deleted file mode 100644 index aa3e9702..00000000 --- a/TorchOpt/_src/visual.py +++ /dev/null @@ -1,236 +0,0 @@ -# Copyright 2022 MetaOPT Team. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== -# This file is modified from: -# https://github.com/szagoruyko/pytorchviz/blob/master/torchviz/dot.py -# ============================================================================== - -import warnings -from collections import namedtuple -from distutils.version import LooseVersion -from typing import Dict, Generator - -import torch -from graphviz import Digraph - -Node = namedtuple('Node', ('name', 'inputs', 'attr', 'op')) - -# Saved attrs for grad_fn (incl. saved variables) begin with `._saved_*` -SAVED_PREFIX = "_saved_" - - -def get_fn_name(fn, show_attrs, max_attr_chars): - name = str(type(fn).__name__) - if not show_attrs: - return name - attrs = dict() - for attr in dir(fn): - if not attr.startswith(SAVED_PREFIX): - continue - val = getattr(fn, attr) - attr = attr[len(SAVED_PREFIX):] - if torch.is_tensor(val): - attrs[attr] = "[saved tensor]" - elif isinstance(val, tuple) and any(torch.is_tensor(t) for t in val): - attrs[attr] = "[saved tensors]" - else: - attrs[attr] = str(val) - if not attrs: - return name - max_attr_chars = max(max_attr_chars, 3) - col1width = max(len(k) for k in attrs.keys()) - col2width = min(max(len(str(v)) for v in attrs.values()), max_attr_chars) - sep = "-" * max(col1width + col2width + 2, len(name)) - attrstr = '%-' + str(col1width) + 's: %' + str(col2width) + 's' - - def truncate(s): - return s[:col2width - 3] + "..." if len(s) > col2width else s - - params = '\n'.join( - attrstr % (k, truncate(str(v))) for (k, v) in attrs.items() - ) - return name + '\n' + sep + '\n' + params - - -# mypy: ignore-errors -def make_dot( - var, params=None, show_attrs=False, show_saved=False, max_attr_chars=50 -): - """Produces Graphviz representation of PyTorch autograd graph. - - If a node represents a backward function, it is gray. Otherwise, the node - represents a tensor and is either blue, orange, or green: - - Blue: reachable leaf tensors that requires grad (tensors whose `.grad` - fields will be populated during `.backward()`) - - Orange: saved tensors of custom autograd functions as well as those - saved by built-in backward nodes - - Green: tensor passed in as outputs - - Dark green: if any output is a view, we represent its base tensor with - a dark green node. - - Args: - var: output tensor - params: [dict of (name, tensor) or state_dict] to add names to node that requires grad - show_attrs: whether to display non-tensor attributes of backward nodes - (Requires PyTorch version >= 1.9) - show_saved: whether to display saved tensor nodes that are not by custom - autograd functions. Saved tensor nodes for custom functions, if - present, are always displayed. (Requires PyTorch version >= 1.9) - max_attr_chars: if show_attrs is `True`, sets max number of characters - to display for any given attribute. - """ - if LooseVersion(torch.__version__) < LooseVersion("1.9") and \ - (show_attrs or show_saved): - warnings.warn( - "make_dot: showing grad_fn attributes and saved variables" - " requires PyTorch version >= 1.9. (This does NOT apply to" - " saved tensors saved by custom autograd functions.)" - ) - - param_map = {} - - if params is not None: - from TorchOpt.utils import _ModuleState - if isinstance(params, _ModuleState): - param_map.update(params.visual_contents) - elif isinstance(params, Dict): - param_map.update({v: k for k, v in params.items()}) - elif isinstance(params, Generator): - param_map.update({v: k for k, v in params}) - else: - for param in params: - if isinstance(param, _ModuleState): - param_map.update(param.visual_contents) - elif isinstance(param, Generator): - param_map.update({v: k for k, v in param}) - else: - param_map.update({v: k for k, v in param.items()}) - - node_attr = dict( - style='filled', - shape='box', - align='left', - fontsize='10', - ranksep='0.1', - height='0.2', - fontname='monospace' - ) - dot = Digraph(node_attr=node_attr, graph_attr=dict(size="12,12")) - seen = set() - - def size_to_str(size): - return '(' + (', ').join(['%d' % v for v in size]) + ')' - - def get_var_name(var, name=None): - if not name: - name = param_map[var] if var in param_map else '' - return '%s\n %s' % (name, size_to_str(var.size())) - - def get_var_name_with_flag(var): - if var in param_map: - return '%s\n %s' % ( - param_map[var][0], size_to_str(param_map[var][1].size()) - ) - else: - return None - - def add_nodes(fn): - assert not torch.is_tensor(fn) - if fn in seen: - return - seen.add(fn) - - if show_saved: - for attr in dir(fn): - if not attr.startswith(SAVED_PREFIX): - continue - val = getattr(fn, attr) - seen.add(val) - attr = attr[len(SAVED_PREFIX):] - if torch.is_tensor(val): - dot.edge(str(id(fn)), str(id(val)), dir="none") - dot.node(str(id(val)), get_var_name(val, attr), fillcolor='orange') - if isinstance(val, tuple): - for i, t in enumerate(val): - if torch.is_tensor(t): - name = attr + '[%s]' % str(i) - dot.edge(str(id(fn)), str(id(t)), dir="none") - dot.node(str(id(t)), get_var_name(t, name), fillcolor='orange') - - if hasattr(fn, 'variable'): - # if grad_accumulator, add the node for `.variable` - var = fn.variable - seen.add(var) - dot.node(str(id(var)), get_var_name(var), fillcolor='lightblue') - dot.edge(str(id(var)), str(id(fn))) - - fn_name = get_fn_name(fn, show_attrs, max_attr_chars) - fn_fillcolor = None - var_name = get_var_name_with_flag(fn) - if var_name is not None: - fn_name = '%s\n %s' % (fn_name, var_name) - fn_fillcolor = 'lightblue' - - # add the node for this grad_fn - dot.node(str(id(fn)), fn_name, fillcolor=fn_fillcolor) - - # recurse - if hasattr(fn, 'next_functions'): - for u in fn.next_functions: - if u[0] is not None: - dot.edge(str(id(u[0])), str(id(fn))) - add_nodes(u[0]) - - # note: this used to show .saved_tensors in pytorch0.2, but stopped - # working* as it was moved to ATen and Variable-Tensor merged - # also note that this still works for custom autograd functions - if hasattr(fn, 'saved_tensors'): - for t in fn.saved_tensors: - dot.edge(str(id(t)), str(id(fn))) - dot.node(str(id(t)), get_var_name(t), fillcolor='orange') - - def add_base_tensor(var, color='darkolivegreen1'): - if var in seen: - return - seen.add(var) - dot.node(str(id(var)), get_var_name(var), fillcolor=color) - if (var.grad_fn): - add_nodes(var.grad_fn) - dot.edge(str(id(var.grad_fn)), str(id(var))) - if var._is_view(): - add_base_tensor(var._base, color='darkolivegreen3') - dot.edge(str(id(var._base)), str(id(var)), style="dotted") - - # handle multiple outputs - if isinstance(var, tuple): - for v in var: - add_base_tensor(v) - else: - add_base_tensor(var) - - resize_graph(dot) - - return dot - - -def resize_graph(dot, size_per_element=0.15, min_size=12): - """Resize the graph according to how much content it contains. - Modify the graph in place. - """ - # Get the approximate number of nodes and edges - num_rows = len(dot.body) - content_size = num_rows * size_per_element - size = max(min_size, content_size) - size_str = str(size) + "," + str(size) - dot.graph_attr.update(size=size_str) diff --git a/docker/dev.dockerfile b/docker/dev.dockerfile index 6c86fee0..01c00a0e 100644 --- a/docker/dev.dockerfile +++ b/docker/dev.dockerfile @@ -3,8 +3,8 @@ CPU_PARENT=ubuntu:18.04 GPU_PARENT=nvidia/cuda:10.1-cudnn7-runtime-ubuntu18.04 -TAG=metaopt/TorchOpt -VERSION=$(cat ./stable_baselines3/version.txt) +TAG=metaopt/torchopt +VERSION=$(shell git log -1 --format=%h) if [[ ${USE_GPU} == "True" ]]; then PARENT=${GPU_PARENT} diff --git a/docs/conf.py b/docs/conf.py index 8dfa64e6..4b42352a 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -20,10 +20,10 @@ def get_version() -> str: - # https://packaging.python.org/guides/single-sourcing-package-version/ - with open(os.path.join("..", "TorchOpt", "__init__.py"), "r") as f: - init = f.read().split() - return init[init.index("__version__") + 2][1:-1] + # https://packaging.python.org/guides/single-sourcing-package-version/ + with open(os.path.join("..", "torchopt", "__init__.py"), "r") as f: + init = f.read().split() + return init[init.index("__version__") + 2][1:-1] # -- Project information ----------------------------------------------------- @@ -41,7 +41,7 @@ def get_version() -> str: # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ - "sphinx.ext.autodoc", + "sphinx.ext.autodoc", ] # Add any paths that contain templates here, relative to this directory. @@ -74,8 +74,8 @@ def get_version() -> str: def setup(app): - app.add_js_file("js/copybutton.js") - app.add_css_file("css/style.css") + app.add_js_file("js/copybutton.js") + app.add_css_file("css/style.css") # -- Extension configuration ------------------------------------------------- diff --git a/docs/index.rst b/docs/index.rst index c7781713..90bf6a38 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -1,12 +1,12 @@ -:github_url: https://github.com/metaopt/TorchOpt/tree/main/docs +:github_url: https://github.com/metaopt/torchopt/tree/main/docs TorchOpt -------- **TorchOpt** is a high-performance optimizer library built upon `PyTorch `_ for easy implementation of functional optimization and gradient-based meta-learning. It consists of two main features: -* TorchOpt provides functional optimizer which enables `JAX-like `_ composable functional optimizer for PyTorch. With TorchOpt, one can easily conduct neural network optimization in PyTorch with functional style optimizer, similar to `Optax `_ in JAX. -* With the desgin of functional programing, TorchOpt provides efficient, flexible, and easy-to-implement differentiable optimizer for gradient-based meta-learning research. It largely reduces the efforts required to implement sophisticated meta-learning algorithms. +* TorchOpt provides functional optimizer which enables `JAX-like `_ composable functional optimizer for PyTorch. With TorchOpt, one can easily conduct neural network optimization in PyTorch with functional style optimizer, similar to `Optax `_ in JAX. +* With the desgin of functional programing, TorchOpt provides efficient, flexible, and easy-to-implement differentiable optimizer for gradient-based meta-learning research. It largely reduces the efforts required to implement sophisticated meta-learning algorithms. Installation ------------ @@ -18,14 +18,14 @@ Requirements .. code-block:: bash - pip install TorchOpt + pip install torchopt You can also build shared libraries from source, use: .. code-block:: bash - git clone git@github.com:metaopt/TorchOpt.git - cd TorchOpt + git clone git@github.com:metaopt/torchopt.git + cd torchopt python setup.py build_from_source The Team @@ -37,10 +37,9 @@ Support ------- If you are having issues, please let us know by filing an issue on our -`issue tracker `_. - +`issue tracker `_. License ------- -TorchOpt is licensed under the Apache 2.0 License. \ No newline at end of file +TorchOpt is licensed under the Apache 2.0 License. diff --git a/examples/L2R/README.md b/examples/L2R/README.md index e2f8007e..8528fe24 100644 --- a/examples/L2R/README.md +++ b/examples/L2R/README.md @@ -1,14 +1,16 @@ # Learning-to-reweight-examples -Code On Mnist reweighting example in paper [Learning to Reweight Examples for Robust Deep Learning](https://arxiv.org/abs/1803.09050)] using `TorchOpt`. The idea of L2R is to use virtual update of inner-loop neural network optimisation to meta-learn the reweighting parameters for robust deep learning. We use `MetaSGD` as the inner-loop optimiser. +Code On Mnist reweighting example in paper [Learning to Reweight Examples for Robust Deep Learning](https://arxiv.org/abs/1803.09050)] using TorchOpt. The idea of L2R is to use virtual update of inner-loop neural network optimisation to meta-learn the reweighting parameters for robust deep learning. We use `MetaSGD` as the inner-loop optimiser. + +## Usage -# Usage We use traditional supervised training as the baseline. + ```bash ### Run both algorithms and conduct comparison python3 train_l2r.py --algo both -### For baseline +### For baseline python3 train_l2r.py --algo baseline ### For L2R algorithm @@ -16,8 +18,9 @@ python3 train_l2r.py --algo l2r ``` # Results + The test accuracy comparison between baseline and L2R validate the effectiveness of algorithms. +
- +
- diff --git a/examples/L2R/helper/argument.py b/examples/L2R/helper/argument.py index e29bdb0a..1440f27a 100644 --- a/examples/L2R/helper/argument.py +++ b/examples/L2R/helper/argument.py @@ -15,36 +15,25 @@ import argparse -import torch - def parse_args(): - parser = argparse.ArgumentParser([], description='L2R') - - parser.add_argument('--seed', type=int, default=42) - parser.add_argument('--epoch', type=int, default=30, help='Training Epoch') - - parser.add_argument('--lr', type=float, default=1e-3, help='learning rate') - parser.add_argument( - '--pos_ratio', - type=float, - default=0.995, - help='Ratio of positive examples in training' - ) - parser.add_argument( - '--ntest', type=int, default=500, help='Number of testing examples' - ) - parser.add_argument( - '--ntrain', type=int, default=5000, help='Number of testing examples' - ) - parser.add_argument( - '--nval', type=int, default=10, help='Number of valid examples' - ) - parser.add_argument('--batch_size', type=int, default=100, help='Batch size') - - ### For baseline - parser.add_argument('--algo', type=str, default='both') - - args = parser.parse_args() - # use the GPU if available - return args + parser = argparse.ArgumentParser([], description='L2R') + + parser.add_argument('--seed', type=int, default=42) + parser.add_argument('--epoch', type=int, default=30, help='Training Epoch') + + parser.add_argument('--lr', type=float, default=1e-3, help='learning rate') + parser.add_argument( + '--pos_ratio', type=float, default=0.995, help='Ratio of positive examples in training' + ) + parser.add_argument('--ntest', type=int, default=500, help='Number of testing examples') + parser.add_argument('--ntrain', type=int, default=5000, help='Number of testing examples') + parser.add_argument('--nval', type=int, default=10, help='Number of valid examples') + parser.add_argument('--batch_size', type=int, default=100, help='Batch size') + + # For baseline + parser.add_argument('--algo', type=str, default='both') + + args = parser.parse_args() + # use the GPU if available + return args diff --git a/examples/L2R/helper/model.py b/examples/L2R/helper/model.py index 5a3ff2fa..d3a0beac 100644 --- a/examples/L2R/helper/model.py +++ b/examples/L2R/helper/model.py @@ -28,54 +28,49 @@ # # Models for MNIST experiments. # -from __future__ import division, print_function -import numpy as np import torch import torch.nn as nn class LeNet5(nn.Module): - def __init__(self, args): - super(LeNet5, self).__init__() - self.model = nn.Sequential( - nn.Conv2d(1, 16, 5), nn.ReLU(), nn.MaxPool2d(2), nn.Conv2d(16, 32, 5), - nn.ReLU(), nn.MaxPool2d(2), nn.Flatten(), nn.Linear(512, 128), nn.ReLU(), - nn.Linear(128, 1), nn.Sigmoid() - ) - self.args = args - self.meta_weights = torch.zeros( - self.args.batch_size, requires_grad=True - ).to(self.args.device) - self.criterion = nn.BCELoss() + def __init__(self, args): + super(LeNet5, self).__init__() + self.model = nn.Sequential( + nn.Conv2d(1, 16, 5), nn.ReLU(), nn.MaxPool2d(2), nn.Conv2d(16, 32, 5), nn.ReLU(), + nn.MaxPool2d(2), nn.Flatten(), nn.Linear(512, 128), nn.ReLU(), nn.Linear(128, 1), + nn.Sigmoid() + ) + self.args = args + self.meta_weights = torch.zeros( + self.args.batch_size, requires_grad=True + ).to(self.args.device) + self.criterion = nn.BCELoss() - def forward(self, x): - return self.model(x).squeeze(dim=-1) + def forward(self, x): + return self.model(x).squeeze(dim=-1) - def reset_meta(self, size): - self.meta_weights = torch.zeros( - size, requires_grad=True - ).to(self.args.device) + def reset_meta(self, size): + self.meta_weights = torch.zeros(size, requires_grad=True).to(self.args.device) - def normalise(self): - self.meta_weights = self.meta_weights.detach() - weights_sum = torch.sum(self.meta_weights) - weights_sum = weights_sum + 1 if weights_sum == 0 else weights_sum - self.meta_weights /= weights_sum + def normalise(self): + self.meta_weights = self.meta_weights.detach() + weights_sum = torch.sum(self.meta_weights) + weights_sum = weights_sum + 1 if weights_sum == 0 else weights_sum + self.meta_weights /= weights_sum - def inner_loss(self, train_x, train_y): - result = self.forward(train_x) + def inner_loss(self, train_x, train_y): + result = self.forward(train_x) - # manually implement bce_loss to make the loss differentiable w.r.t self.meta_weights - loss = -( - train_y * torch.log(result + 1e-10) + - (1 - train_y) * torch.log(1 - result + 1e-10) - ) - weighted_loss = torch.sum(self.meta_weights * loss) - return weighted_loss + # manually implement bce_loss to make the loss differentiable w.r.t self.meta_weights + loss = -( + train_y * torch.log(result + 1e-10) + (1 - train_y) * torch.log(1 - result + 1e-10) + ) + weighted_loss = torch.sum(self.meta_weights * loss) + return weighted_loss - def outer_loss(self, valid_x, valid_y): - result = self.forward(valid_x) - loss = self.criterion(result, valid_y) - return loss + def outer_loss(self, valid_x, valid_y): + result = self.forward(valid_x) + loss = self.criterion(result, valid_y) + return loss diff --git a/examples/L2R/helper/utils.py b/examples/L2R/helper/utils.py index 96f469b7..0fb01ad4 100644 --- a/examples/L2R/helper/utils.py +++ b/examples/L2R/helper/utils.py @@ -19,161 +19,142 @@ import random import numpy as np -import seaborn as sns import torch from torch.utils.data import TensorDataset def get_imbalance_dataset( - mnist_train, - mnist_test, - pos_ratio=0.9, - ntrain=5000, - nval=10, - ntest=500, - class_0=4, - class_1=9 + mnist_train, mnist_test, pos_ratio=0.9, ntrain=5000, nval=10, ntest=500, class_0=4, class_1=9 ): - ratio = 1 - pos_ratio - ratio_test = 0.5 - - # In training, we have 10% 4 and 90% 9. - # In testing, we have 50% 4 and 50% 9. - x_train = mnist_train.train_data.numpy() / 255.0 - y_train = mnist_train.train_labels.numpy() - x_test = mnist_test.test_data.numpy() / 255.0 - y_test = mnist_test.test_labels.numpy() - x_train_0 = x_train[y_train == class_0] - x_test_0 = x_test[y_test == class_0] - - # First shuffle, negative. - idx = np.arange(x_train_0.shape[0]) - np.random.shuffle(idx) - x_train_0 = x_train_0[idx] - - nval_small_neg = int(np.floor(nval * ratio_test)) - ntrain_small_neg = int(np.floor(ntrain * ratio)) - nval_small_neg - - x_val_0 = x_train_0[:nval_small_neg] # 450 4 in validation. - x_train_0 = x_train_0[nval_small_neg:nval_small_neg + ntrain_small_neg - ] # 500 4 in training. - - print('Number of train negative classes', ntrain_small_neg) - print('Number of val negative classes', nval_small_neg) - - idx = np.arange(x_test_0.shape[0]) - np.random.shuffle(idx) - x_test_0 = x_test_0[:int(np.floor(ntest * ratio_test))] # 450 4 in testing. - - x_train_1 = x_train[y_train == class_1] - x_test_1 = x_test[y_test == class_1] - - # First shuffle, positive. - idx = np.arange(x_train_1.shape[0]) - np.random.shuffle(idx) - x_train_1 = x_train_1[idx] - - nvalsmall_pos = int(np.floor(nval * (1 - ratio_test))) - ntrainsmall_pos = int(np.floor(ntrain * (1 - ratio))) - nvalsmall_pos - - x_val_1 = x_train_1[:nvalsmall_pos] # 50 9 in validation. - x_train_1 = x_train_1[nvalsmall_pos:nvalsmall_pos + ntrainsmall_pos - ] # 4500 9 in training. - - idx = np.arange(x_test_1.shape[0]) - np.random.shuffle(idx) - x_test_1 = x_test_1[idx] - x_test_1 = x_test_1[:int(np.floor(ntest * (1 - ratio_test))) - ] # 500 9 in testing. - - print('Number of train positive classes', ntrainsmall_pos) - print('Number of val positive classes', nvalsmall_pos) - - y_train_subset = np.concatenate( - [np.zeros([x_train_0.shape[0]]), - np.ones([x_train_1.shape[0]])] - ) - y_val_subset = np.concatenate( - [np.zeros([x_val_0.shape[0]]), - np.ones([x_val_1.shape[0]])] - ) - y_test_subset = np.concatenate( - [np.zeros([x_test_0.shape[0]]), - np.ones([x_test_1.shape[0]])] - ) - - y_train_pos_subset = np.ones([x_train_1.shape[0]]) - y_train_neg_subset = np.zeros([x_train_0.shape[0]]) - - x_train_subset = np.concatenate([x_train_0, x_train_1], axis=0)[:, - None, :, :] - x_val_subset = np.concatenate([x_val_0, x_val_1], axis=0)[:, None, :, :] - x_test_subset = np.concatenate([x_test_0, x_test_1], axis=0)[:, None, :, :] - - x_train_pos_subset = x_train_1[:, None, :, :] - x_train_neg_subset = x_train_0[:, None, :, :] - - # Final shuffle. - idx = np.arange(x_train_subset.shape[0]) - np.random.shuffle(idx) - x_train_subset = x_train_subset[idx].astype(np.float32) - y_train_subset = y_train_subset[idx].astype(np.float32) - - idx = np.arange(x_val_subset.shape[0]) - np.random.shuffle(idx) - x_val_subset = x_val_subset[idx].astype(np.float32) - y_val_subset = y_val_subset[idx].astype(np.float32) - - idx = np.arange(x_test_subset.shape[0]) - np.random.shuffle(idx) - x_test_subset = x_test_subset[idx].astype(np.float32) - y_test_subset = y_test_subset[idx].astype(np.float32) - - x_train_subset, y_train_subset, x_val_subset, y_val_subset, x_test_subset, y_test_subset = torch.tensor( - x_train_subset - ), torch.tensor(y_train_subset), torch.tensor(x_val_subset), torch.tensor( - y_val_subset - ), torch.tensor(x_test_subset), torch.tensor(y_test_subset) - - train_set, val_set, test_set = TensorDataset( - x_train_subset, y_train_subset - ), TensorDataset(x_val_subset, - y_val_subset), TensorDataset(x_test_subset, y_test_subset) - - return train_set, val_set, test_set + ratio = 1 - pos_ratio + ratio_test = 0.5 + + # In training, we have 10% 4 and 90% 9. + # In testing, we have 50% 4 and 50% 9. + x_train = mnist_train.train_data.numpy() / 255.0 + y_train = mnist_train.train_labels.numpy() + x_test = mnist_test.test_data.numpy() / 255.0 + y_test = mnist_test.test_labels.numpy() + x_train_0 = x_train[y_train == class_0] + x_test_0 = x_test[y_test == class_0] + + # First shuffle, negative. + idx = np.arange(x_train_0.shape[0]) + np.random.shuffle(idx) + x_train_0 = x_train_0[idx] + + nval_small_neg = int(np.floor(nval * ratio_test)) + ntrain_small_neg = int(np.floor(ntrain * ratio)) - nval_small_neg + + x_val_0 = x_train_0[:nval_small_neg] # 450 4 in validation. + x_train_0 = x_train_0[nval_small_neg:nval_small_neg + ntrain_small_neg] # 500 4 in training. + + print('Number of train negative classes', ntrain_small_neg) + print('Number of val negative classes', nval_small_neg) + + idx = np.arange(x_test_0.shape[0]) + np.random.shuffle(idx) + x_test_0 = x_test_0[:int(np.floor(ntest * ratio_test))] # 450 4 in testing. + + x_train_1 = x_train[y_train == class_1] + x_test_1 = x_test[y_test == class_1] + + # First shuffle, positive. + idx = np.arange(x_train_1.shape[0]) + np.random.shuffle(idx) + x_train_1 = x_train_1[idx] + + nvalsmall_pos = int(np.floor(nval * (1 - ratio_test))) + ntrainsmall_pos = int(np.floor(ntrain * (1 - ratio))) - nvalsmall_pos + + x_val_1 = x_train_1[:nvalsmall_pos] # 50 9 in validation. + x_train_1 = x_train_1[nvalsmall_pos:nvalsmall_pos + ntrainsmall_pos] # 4500 9 in training. + + idx = np.arange(x_test_1.shape[0]) + np.random.shuffle(idx) + x_test_1 = x_test_1[idx] + x_test_1 = x_test_1[:int(np.floor(ntest * (1 - ratio_test)))] # 500 9 in testing. + + print('Number of train positive classes', ntrainsmall_pos) + print('Number of val positive classes', nvalsmall_pos) + + y_train_subset = np.concatenate([np.zeros([x_train_0.shape[0]]), np.ones([x_train_1.shape[0]])]) + y_val_subset = np.concatenate([np.zeros([x_val_0.shape[0]]), np.ones([x_val_1.shape[0]])]) + y_test_subset = np.concatenate([np.zeros([x_test_0.shape[0]]), np.ones([x_test_1.shape[0]])]) + + y_train_pos_subset = np.ones([x_train_1.shape[0]]) + y_train_neg_subset = np.zeros([x_train_0.shape[0]]) + + x_train_subset = np.concatenate([x_train_0, x_train_1], axis=0)[:, None, :, :] + x_val_subset = np.concatenate([x_val_0, x_val_1], axis=0)[:, None, :, :] + x_test_subset = np.concatenate([x_test_0, x_test_1], axis=0)[:, None, :, :] + + x_train_pos_subset = x_train_1[:, None, :, :] + x_train_neg_subset = x_train_0[:, None, :, :] + + # Final shuffle. + idx = np.arange(x_train_subset.shape[0]) + np.random.shuffle(idx) + x_train_subset = x_train_subset[idx].astype(np.float32) + y_train_subset = y_train_subset[idx].astype(np.float32) + + idx = np.arange(x_val_subset.shape[0]) + np.random.shuffle(idx) + x_val_subset = x_val_subset[idx].astype(np.float32) + y_val_subset = y_val_subset[idx].astype(np.float32) + + idx = np.arange(x_test_subset.shape[0]) + np.random.shuffle(idx) + x_test_subset = x_test_subset[idx].astype(np.float32) + y_test_subset = y_test_subset[idx].astype(np.float32) + + x_train_subset, y_train_subset, x_val_subset, y_val_subset, x_test_subset, y_test_subset = ( + torch.tensor(x_train_subset), torch.tensor(y_train_subset), torch.tensor(x_val_subset), + torch.tensor(y_val_subset), torch.tensor(x_test_subset), torch.tensor(y_test_subset) + ) + + train_set, val_set, test_set = ( + TensorDataset(x_train_subset, y_train_subset), TensorDataset(x_val_subset, y_val_subset), + TensorDataset(x_test_subset, y_test_subset) + ) + + return train_set, val_set, test_set def set_seed(seed, cudnn=True): - """ + """ Seed everything we can! Note that gym environments might need additional seeding (env.seed(seed)), and num_workers needs to be set to 1. """ - random.seed(seed) - np.random.seed(seed) - torch.manual_seed(seed) - torch.random.manual_seed(seed) - torch.cuda.manual_seed(seed) - # note: the below slows down the code but makes it reproducible - torch.cuda.manual_seed_all( - seed - ) # Sets the seed for generating random numbers on all GPUs. It’s safe to call this function if CUDA is not available; in that case, it is silently ignored. - if cudnn: - torch.backends.cudnn.deterministic = True - torch.backends.cudnn.benchmark = False + + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + torch.random.manual_seed(seed) + torch.cuda.manual_seed(seed) + # note: the below slows down the code but makes it reproducible + # Sets the seed for generating random numbers on all GPUs. It’s safe to + # call this function if CUDA is not available; in that case, it is + # silently ignored. + torch.cuda.manual_seed_all(seed) + if cudnn: + torch.backends.cudnn.deterministic = True + torch.backends.cudnn.benchmark = False def plot(baseline, l2r): - import matplotlib.pyplot as plt - import numpy as np - import seaborn as sns - sns.set(style='darkgrid') - sns.set_theme(style="darkgrid") - plt.plot(baseline, label='baseline') - plt.plot(l2r, label='l2r') - plt.legend() - plt.ylabel('Test acc') - plt.xlabel('Epoch') - plt.title('Comparison between Baseline and L2R') - plt.savefig('./result.png') + import matplotlib.pyplot as plt + import numpy as np + import seaborn as sns + + sns.set(style='darkgrid') + sns.set_theme(style="darkgrid") + plt.plot(baseline, label='baseline') + plt.plot(l2r, label='l2r') + plt.legend() + plt.ylabel('Test acc') + plt.xlabel('Epoch') + plt.title('Comparison between Baseline and L2R') + plt.savefig('./result.png') diff --git a/examples/L2R/train_l2r.py b/examples/L2R/train_l2r.py index 22deb2ce..c04a90e1 100644 --- a/examples/L2R/train_l2r.py +++ b/examples/L2R/train_l2r.py @@ -28,264 +28,227 @@ # import json -import os -import time import numpy as np import torch -import torch.nn as nn -from helper.argument import parse_args -from helper.model import LeNet5 -from helper.utils import get_imbalance_dataset, plot, set_seed -from torch import device from torch.utils.data import DataLoader from torch.utils.tensorboard import SummaryWriter from torchvision.datasets import MNIST -import TorchOpt +import torchopt + +from .helper.argument import parse_args +from .helper.model import LeNet5 +from .helper.utils import get_imbalance_dataset, plot, set_seed def run_baseline(args, mnist_train, mnist_test): - print('Run Baseline') - set_seed(args.seed) - - pos_ratio = args.pos_ratio - ntrain = args.ntrain - nval = args.nval - ntest = args.ntest - epoch = args.epoch - - folder = './result/baseline/' - writer = SummaryWriter('./result/baseline') - with open('./result/baseline/config.json', 'w') as f: - json.dump(args.__dict__, f) - - args.device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu") - - train_set, val_set, test_set = get_imbalance_dataset( - mnist_train, - mnist_test, - pos_ratio=pos_ratio, - ntrain=ntrain, - nval=nval, - ntest=ntest - ) - train_loader = DataLoader( - train_set, batch_size=args.batch_size, shuffle=True, num_workers=4 - ) - valid_loader = DataLoader( - val_set, batch_size=args.batch_size, shuffle=True, num_workers=1 - ) - test_loader = DataLoader( - test_set, batch_size=args.batch_size, shuffle=True, num_workers=1 - ) - model = LeNet5(args).to(args.device) - - model_optimiser = torch.optim.Adam(model.parameters(), lr=args.lr) - - step = 0 - running_train_loss = [] - test_acc_result = [] - for _epoch in range(epoch): - model.train() - for idx, (train_x, train_label) in enumerate(train_loader): - train_x, train_label = train_x.to(args.device - ), train_label.to(args.device) - outer_loss = model.outer_loss(train_x, train_label) - - model_optimiser.zero_grad() - outer_loss.backward() - model_optimiser.step() - - running_train_loss.append(outer_loss.item()) - writer.add_scalar('train_loss', outer_loss.item(), step) - - if step % 10 == 0 and step > 0: - running_train_mean = np.mean(np.array(running_train_loss)) - print( - "EPOCH: {}, BATCH: {}, LOSS: {}".format( - _epoch, idx, running_train_mean - ) - ) - writer.add_scalar('running_train_loss', running_train_mean, step) - running_train_loss = [] - - step += 1 - - print('Beginning to Test') - model.eval() - train_acc = evaluate(train_loader, model, args) - test_acc = evaluate(test_loader, model, args) - model.train() - - writer.add_scalar('train_acc', train_acc, _epoch) - writer.add_scalar('test_acc', test_acc, _epoch) - test_acc_result.append(test_acc) - print( - "EPOCH: {}, TRAIN_ACC: {}, TEST_ACC: {}".format( - _epoch, train_acc, test_acc - ) + print('Run Baseline') + set_seed(args.seed) + + pos_ratio = args.pos_ratio + ntrain = args.ntrain + nval = args.nval + ntest = args.ntest + epoch = args.epoch + + folder = './result/baseline/' + writer = SummaryWriter('./result/baseline') + with open('./result/baseline/config.json', 'w') as f: + json.dump(args.__dict__, f) + + args.device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu") + + train_set, val_set, test_set = get_imbalance_dataset( + mnist_train, mnist_test, pos_ratio=pos_ratio, ntrain=ntrain, nval=nval, ntest=ntest ) - return test_acc_result + train_loader = DataLoader(train_set, batch_size=args.batch_size, shuffle=True, num_workers=4) + valid_loader = DataLoader(val_set, batch_size=args.batch_size, shuffle=True, num_workers=1) + test_loader = DataLoader(test_set, batch_size=args.batch_size, shuffle=True, num_workers=1) + model = LeNet5(args).to(args.device) + + model_optimiser = torch.optim.Adam(model.parameters(), lr=args.lr) + + step = 0 + running_train_loss = [] + test_acc_result = [] + for _epoch in range(epoch): + model.train() + for idx, (train_x, train_label) in enumerate(train_loader): + train_x, train_label = train_x.to(args.device), train_label.to(args.device) + outer_loss = model.outer_loss(train_x, train_label) + + model_optimiser.zero_grad() + outer_loss.backward() + model_optimiser.step() + + running_train_loss.append(outer_loss.item()) + writer.add_scalar('train_loss', outer_loss.item(), step) + + if step % 10 == 0 and step > 0: + running_train_mean = np.mean(np.array(running_train_loss)) + print("EPOCH: {}, BATCH: {}, LOSS: {}".format(_epoch, idx, running_train_mean)) + writer.add_scalar('running_train_loss', running_train_mean, step) + running_train_loss = [] + + step += 1 + + print('Beginning to Test') + model.eval() + train_acc = evaluate(train_loader, model, args) + test_acc = evaluate(test_loader, model, args) + model.train() + + writer.add_scalar('train_acc', train_acc, _epoch) + writer.add_scalar('test_acc', test_acc, _epoch) + test_acc_result.append(test_acc) + print("EPOCH: {}, TRAIN_ACC: {}, TEST_ACC: {}".format(_epoch, train_acc, test_acc)) + return test_acc_result def run_L2R(args, mnist_train, mnist_test): - print('Run L2R') - set_seed(args.seed) - - pos_ratio = args.pos_ratio - ntrain = args.ntrain - nval = args.nval - ntest = args.ntest - epoch = args.epoch - - folder = './result/l2r/' - writer = SummaryWriter('./result/l2r/log') - with open('./result/l2r/config.json', 'w') as f: - json.dump(args.__dict__, f) - - args.device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu") - - train_set, val_set, test_set = get_imbalance_dataset( - mnist_train, - mnist_test, - pos_ratio=pos_ratio, - ntrain=ntrain, - nval=nval, - ntest=ntest - ) - train_loader = DataLoader( - train_set, batch_size=args.batch_size, shuffle=True, num_workers=2 - ) - valid_loader = DataLoader( - val_set, batch_size=args.batch_size, shuffle=True, num_workers=1 - ) - test_loader = DataLoader( - test_set, batch_size=args.batch_size, shuffle=True, num_workers=1 - ) - model = LeNet5(args).to(args.device) - model_optimiser = TorchOpt.MetaSGD(model, lr=args.lr) - real_model_optimiser = torch.optim.Adam(model.parameters(), lr=args.lr) - - step = 0 - time_bp = 0 - running_valid_loss = [] - valid = iter(valid_loader) - running_train_loss = [] - test_acc_result = [] - for _epoch in range(epoch): - model.train() - for idx, (train_x, train_label) in enumerate(train_loader): - try: - valid_x, valid_label = valid.next() - except: - valid = iter(valid_loader) - valid_x, valid_label = valid.next() - train_x, train_label, valid_x, valid_label = train_x.to( - args.device - ), train_label.to(args.device), valid_x.to(args.device - ), valid_label.to(args.device) - - # reset meta-parameter weights - model.reset_meta(size=train_x.size(0)) - - net_state_dict = TorchOpt.extract_state_dict(model) - optim_state_dict = TorchOpt.extract_state_dict(model_optimiser) - - for _ in range(1): - inner_loss = model.inner_loss(train_x, train_label) - model_optimiser.step(inner_loss) - - # caclulate outer_loss, deirve meta-gradient and normalise - outer_loss = model.outer_loss(valid_x, valid_label) - model.meta_weights = - \ - torch.autograd.grad(outer_loss, model.meta_weights)[0] - model.meta_weights = torch.nn.ReLU()(model.meta_weights) - model.normalise() - - # log loss - running_valid_loss.append(outer_loss.item()) - writer.add_scalar('validation_loss', outer_loss.item(), step) - - # reset the model and model optimiser - TorchOpt.recover_state_dict(model, net_state_dict) - TorchOpt.recover_state_dict(model_optimiser, optim_state_dict) - - # reuse inner_adapt to conduct real update based on learned meta weights - inner_loss = model.inner_loss(train_x, train_label) - for _ in range(1): - inner_loss = model.inner_loss(train_x, train_label) - real_model_optimiser.zero_grad() - inner_loss.backward() - real_model_optimiser.step() - - running_train_loss.append(inner_loss.item()) - writer.add_scalar('weighted_train_loss', inner_loss.item(), step) - - if step % 10 == 0 and step > 0: - running_valid_mean = np.mean(np.array(running_valid_loss)) - running_train_mean = np.mean(np.array(running_train_loss)) - print( - "EPOCH: {}, BATCH: {}, WEIGHTED_TRAIN_LOSS: {}, VALID_LOSS: {}" - .format(_epoch, idx, running_train_mean, running_valid_mean) - ) - running_valid_loss = [] - running_train_loss = [] - writer.add_scalar('running_valid_loss', running_valid_mean, step) - writer.add_scalar('running_train_loss', running_train_mean, step) - - step += 1 - - print('Beginning to Test') - model.eval() - train_acc = evaluate(train_loader, model, args) - test_acc = evaluate(test_loader, model, args) - model.train() - - writer.add_scalar('train_acc', train_acc, _epoch) - writer.add_scalar('test_acc', test_acc, _epoch) - test_acc_result.append(test_acc) - print( - "EPOCH: {}, TRAIN_ACC: {}, TEST_ACC: {}".format( - _epoch, train_acc, test_acc - ) + print('Run L2R') + set_seed(args.seed) + + pos_ratio = args.pos_ratio + ntrain = args.ntrain + nval = args.nval + ntest = args.ntest + epoch = args.epoch + + folder = './result/l2r/' + writer = SummaryWriter('./result/l2r/log') + with open('./result/l2r/config.json', 'w') as f: + json.dump(args.__dict__, f) + + args.device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu") + + train_set, val_set, test_set = get_imbalance_dataset( + mnist_train, mnist_test, pos_ratio=pos_ratio, ntrain=ntrain, nval=nval, ntest=ntest ) - return test_acc_result + train_loader = DataLoader(train_set, batch_size=args.batch_size, shuffle=True, num_workers=2) + valid_loader = DataLoader(val_set, batch_size=args.batch_size, shuffle=True, num_workers=1) + test_loader = DataLoader(test_set, batch_size=args.batch_size, shuffle=True, num_workers=1) + model = LeNet5(args).to(args.device) + model_optimiser = torchopt.MetaSGD(model, lr=args.lr) + real_model_optimiser = torch.optim.Adam(model.parameters(), lr=args.lr) + + step = 0 + time_bp = 0 + running_valid_loss = [] + valid = iter(valid_loader) + running_train_loss = [] + test_acc_result = [] + for _epoch in range(epoch): + model.train() + for idx, (train_x, train_label) in enumerate(train_loader): + try: + valid_x, valid_label = valid.next() + except BaseException: + valid = iter(valid_loader) + valid_x, valid_label = valid.next() + train_x, train_label, valid_x, valid_label = ( + train_x.to(args.device), train_label.to(args.device), valid_x.to(args.device), + valid_label.to(args.device) + ) + + # reset meta-parameter weights + model.reset_meta(size=train_x.size(0)) + + net_state_dict = torchopt.extract_state_dict(model) + optim_state_dict = torchopt.extract_state_dict(model_optimiser) + + for _ in range(1): + inner_loss = model.inner_loss(train_x, train_label) + model_optimiser.step(inner_loss) + + # caclulate outer_loss, deirve meta-gradient and normalise + outer_loss = model.outer_loss(valid_x, valid_label) + model.meta_weights = - \ + torch.autograd.grad(outer_loss, model.meta_weights)[0] + model.meta_weights = torch.nn.ReLU()(model.meta_weights) + model.normalise() + + # log loss + running_valid_loss.append(outer_loss.item()) + writer.add_scalar('validation_loss', outer_loss.item(), step) + + # reset the model and model optimiser + torchopt.recover_state_dict(model, net_state_dict) + torchopt.recover_state_dict(model_optimiser, optim_state_dict) + + # reuse inner_adapt to conduct real update based on learned meta weights + inner_loss = model.inner_loss(train_x, train_label) + for _ in range(1): + inner_loss = model.inner_loss(train_x, train_label) + real_model_optimiser.zero_grad() + inner_loss.backward() + real_model_optimiser.step() + + running_train_loss.append(inner_loss.item()) + writer.add_scalar('weighted_train_loss', inner_loss.item(), step) + + if step % 10 == 0 and step > 0: + running_valid_mean = np.mean(np.array(running_valid_loss)) + running_train_mean = np.mean(np.array(running_train_loss)) + print( + "EPOCH: {}, BATCH: {}, WEIGHTED_TRAIN_LOSS: {}, VALID_LOSS: {}".format( + _epoch, idx, running_train_mean, running_valid_mean + ) + ) + running_valid_loss = [] + running_train_loss = [] + writer.add_scalar('running_valid_loss', running_valid_mean, step) + writer.add_scalar('running_train_loss', running_train_mean, step) + + step += 1 + + print('Beginning to Test') + model.eval() + train_acc = evaluate(train_loader, model, args) + test_acc = evaluate(test_loader, model, args) + model.train() + + writer.add_scalar('train_acc', train_acc, _epoch) + writer.add_scalar('test_acc', test_acc, _epoch) + test_acc_result.append(test_acc) + print("EPOCH: {}, TRAIN_ACC: {}, TEST_ACC: {}".format(_epoch, train_acc, test_acc)) + return test_acc_result def evaluate(data_loader, model, args): - running_accuracy = 0 - total = 0 - with torch.no_grad(): - for data in data_loader: - inputs, outputs = data - inputs, outputs = inputs.to(args.device), outputs.to(args.device) - predicted = model(inputs) - predicted[predicted >= 0.5] = 1.0 - predicted[predicted < 0.5] = 0.0 - total += outputs.size(0) - running_accuracy += (predicted == outputs).sum().item() - - accuracy = running_accuracy / total - return accuracy + running_accuracy = 0 + total = 0 + with torch.no_grad(): + for data in data_loader: + inputs, outputs = data + inputs, outputs = inputs.to(args.device), outputs.to(args.device) + predicted = model(inputs) + predicted[predicted >= 0.5] = 1.0 + predicted[predicted < 0.5] = 0.0 + total += outputs.size(0) + running_accuracy += (predicted == outputs).sum().item() + + accuracy = running_accuracy / total + return accuracy def main(): - mnist_train = MNIST(root='./helper/mnist_data', download=True, train=True) - mnist_test = MNIST(root='./helper/mnist_data', download=True, train=False) - args = parse_args() - - assert args.algo in ['baseline', 'l2r', 'both'] - if args.algo == 'baseline': - run_baseline(args, mnist_train, mnist_test) - elif args.algo == 'l2r': - run_L2R(args, mnist_train, mnist_test) - else: - baseline_test_acc = run_baseline(args, mnist_train, mnist_test) + mnist_train = MNIST(root='./helper/mnist_data', download=True, train=True) + mnist_test = MNIST(root='./helper/mnist_data', download=True, train=False) args = parse_args() - l2r_test_acc = run_L2R(args, mnist_train, mnist_test) - plot(baseline_test_acc, l2r_test_acc) + + assert args.algo in ['baseline', 'l2r', 'both'] + if args.algo == 'baseline': + run_baseline(args, mnist_train, mnist_test) + elif args.algo == 'l2r': + run_L2R(args, mnist_train, mnist_test) + else: + baseline_test_acc = run_baseline(args, mnist_train, mnist_test) + args = parse_args() + l2r_test_acc = run_L2R(args, mnist_train, mnist_test) + plot(baseline_test_acc, l2r_test_acc) if __name__ == '__main__': - main() + main() diff --git a/examples/LOLA/README.md b/examples/LOLA/README.md index 8ef37723..1decc337 100755 --- a/examples/LOLA/README.md +++ b/examples/LOLA/README.md @@ -1,8 +1,9 @@ # LOLA-examples -Code On LOLA a in paper [Learning with Opponent-Learning Awareness](https://arxiv.org/abs/1709.04326)] using `TorchOpt`. The LOLA learning rule includes a term that accounts for the impact of one agent's policy on the anticipated parameter update of the other agents. We use `MetaSGD` as the inner-loop optimiser. +Code On LOLA a in paper [Learning with Opponent-Learning Awareness](https://arxiv.org/abs/1709.04326)] using TorchOpt. The LOLA learning rule includes a term that accounts for the impact of one agent's policy on the anticipated parameter update of the other agents. We use `MetaSGD` as the inner-loop optimiser. + +## Usage -# Usage ```bash ### Run LOLA python3 lola_dice.py @@ -11,9 +12,10 @@ python3 lola_dice.py python3 visualise.py ``` -# Results +## Results + The figure illustrate the experimental result. +
- +
- diff --git a/examples/LOLA/helper/agent.py b/examples/LOLA/helper/agent.py index 7676cadd..969a04f7 100755 --- a/examples/LOLA/helper/agent.py +++ b/examples/LOLA/helper/agent.py @@ -19,38 +19,36 @@ import torch import torch.nn as nn -import TorchOpt +import torchopt class theta_model(nn.Module): - def __init__(self, theta): - super().__init__() - self.theta = nn.Parameter(torch.tensor(theta.detach(), requires_grad=True)) + def __init__(self, theta): + super().__init__() + self.theta = nn.Parameter(torch.tensor(theta.detach(), requires_grad=True)) class Agent(): - def __init__(self, args): + def __init__(self, args): - self.args = args - # init theta and its optimizer - self.theta = nn.Parameter(torch.zeros(5, requires_grad=True)) - self.theta_optimizer = torch.optim.Adam((self.theta,), lr=args.lr_out) + self.args = args + # init theta and its optimizer + self.theta = nn.Parameter(torch.zeros(5, requires_grad=True)) + self.theta_optimizer = torch.optim.Adam((self.theta,), lr=args.lr_out) - # init values and its optimizer - self.values = nn.Parameter(torch.zeros(5, requires_grad=True)) - self.value_optimizer = torch.optim.Adam((self.values,), lr=args.lr_v) + # init values and its optimizer + self.values = nn.Parameter(torch.zeros(5, requires_grad=True)) + self.value_optimizer = torch.optim.Adam((self.values,), lr=args.lr_v) - self.set_virtual() + self.set_virtual() - def set_virtual(self): - self.virtual_theta = theta_model(self.theta) - self.virtual_optimiser = TorchOpt.MetaSGD( - self.virtual_theta, lr=self.args.lr_in - ) + def set_virtual(self): + self.virtual_theta = theta_model(self.theta) + self.virtual_optimiser = torchopt.MetaSGD(self.virtual_theta, lr=self.args.lr_in) - def value_update(self, loss): - self.value_optimizer.zero_grad() - loss.backward() - self.value_optimizer.step() + def value_update(self, loss): + self.value_optimizer.zero_grad() + loss.backward() + self.value_optimizer.step() diff --git a/examples/LOLA/helper/argument.py b/examples/LOLA/helper/argument.py index 33a29f38..b8e67cc5 100755 --- a/examples/LOLA/helper/argument.py +++ b/examples/LOLA/helper/argument.py @@ -17,35 +17,19 @@ def parse_args(): - parser = argparse.ArgumentParser([], description='LOLA') + parser = argparse.ArgumentParser([], description='LOLA') - parser.add_argument('--seed', type=int, default=6666) - parser.add_argument( - '--lr_in', type=float, default=0.3, help='Inner Learning rate' - ) + parser.add_argument('--seed', type=int, default=6666) + parser.add_argument('--lr_in', type=float, default=0.3, help='Inner Learning rate') - parser.add_argument( - '--lr_out', type=float, default=0.2, help='Outer learning rate' - ) - parser.add_argument( - '--lr_v', type=float, default=0.1, help='Learning rate of value function' - ) - parser.add_argument( - '--gamma', type=float, default=0.96, help='Discount factor' - ) - parser.add_argument( - '--n_update', type=int, default=100, help='Number of updates' - ) - parser.add_argument( - '--n_lookaheads', type=int, default=1, help='Number of updates' - ) - parser.add_argument( - '--len_rollout', type=int, default=150, help='Length of IPD' - ) - parser.add_argument( - '--batch_size', type=int, default=1024, help='Natch size' - ) - parser.add_argument('--use_baseline', action='store_false', default=True) + parser.add_argument('--lr_out', type=float, default=0.2, help='Outer learning rate') + parser.add_argument('--lr_v', type=float, default=0.1, help='Learning rate of value function') + parser.add_argument('--gamma', type=float, default=0.96, help='Discount factor') + parser.add_argument('--n_update', type=int, default=100, help='Number of updates') + parser.add_argument('--n_lookaheads', type=int, default=1, help='Number of updates') + parser.add_argument('--len_rollout', type=int, default=150, help='Length of IPD') + parser.add_argument('--batch_size', type=int, default=1024, help='Natch size') + parser.add_argument('--use_baseline', action='store_false', default=True) - args = parser.parse_args() - return args + args = parser.parse_args() + return args diff --git a/examples/LOLA/helper/env.py b/examples/LOLA/helper/env.py index bb72c5b0..df4522f6 100755 --- a/examples/LOLA/helper/env.py +++ b/examples/LOLA/helper/env.py @@ -22,79 +22,75 @@ class OneHot(gym.Space): - """ + """ One-hot space. Used as the observation space. """ - def __init__(self, n): - self.n = n + def __init__(self, n): + self.n = n - def sample(self): - return np.random.multinomial(1, [1. / self.n] * self.n) + def sample(self): + return np.random.multinomial(1, [1. / self.n] * self.n) - def contains(self, x): - return isinstance(x, np.ndarray) and \ - x.shape == (self.n, ) and \ - np.all(np.logical_or(x == 0, x == 1)) and \ - np.sum(x) == 1 + def contains(self, x): + return isinstance(x, np.ndarray) and \ + x.shape == (self.n, ) and \ + np.all(np.logical_or(x == 0, x == 1)) and \ + np.sum(x) == 1 - @property - def shape(self): - return (self.n,) + @property + def shape(self): + return (self.n,) - def __repr__(self): - return "OneHot(%d)" % self.n + def __repr__(self): + return "OneHot(%d)" % self.n - def __eq__(self, other): - return self.n == other.n + def __eq__(self, other): + return self.n == other.n class IPD(gym.Env): - """ + """ A two-agent vectorized environment. Possible actions for each agent are (C)ooperate and (D)efect. """ - # Possible actions - NUM_AGENTS = 2 - NUM_ACTIONS = 2 - NUM_STATES = 5 - - def __init__(self, max_steps, batch_size=1): - self.max_steps = max_steps - self.batch_size = batch_size - self.payout_mat = np.array([[-2, 0], [-3, -1]]) - self.states = np.array([[1, 2], [3, 4]]) - - self.action_space = Tuple( - [Discrete(self.NUM_ACTIONS) for _ in range(self.NUM_AGENTS)] - ) - self.observation_space = Tuple( - [OneHot(self.NUM_STATES) for _ in range(self.NUM_AGENTS)] - ) - self.available_actions = [ - np.ones((batch_size, self.NUM_ACTIONS), dtype=int) - for _ in range(self.NUM_AGENTS) - ] - - self.step_count = None - - def reset(self): - self.step_count = 0 - init_state = np.zeros(self.batch_size) - observation = [init_state, init_state] - info = [{'available_actions': aa} for aa in self.available_actions] - return observation, info - - def step(self, action): - ac0, ac1 = action - self.step_count += 1 - - r0 = self.payout_mat[ac0, ac1] - r1 = self.payout_mat[ac1, ac0] - s0 = self.states[ac0, ac1] - s1 = self.states[ac1, ac0] - observation = [s0, s1] - reward = [r0, r1] - done = (self.step_count == self.max_steps) - info = [{'available_actions': aa} for aa in self.available_actions] - return observation, reward, done, info + + # Possible actions + NUM_AGENTS = 2 + NUM_ACTIONS = 2 + NUM_STATES = 5 + + def __init__(self, max_steps, batch_size=1): + self.max_steps = max_steps + self.batch_size = batch_size + self.payout_mat = np.array([[-2, 0], [-3, -1]]) + self.states = np.array([[1, 2], [3, 4]]) + + self.action_space = Tuple([Discrete(self.NUM_ACTIONS) for _ in range(self.NUM_AGENTS)]) + self.observation_space = Tuple([OneHot(self.NUM_STATES) for _ in range(self.NUM_AGENTS)]) + self.available_actions = [ + np.ones((batch_size, self.NUM_ACTIONS), dtype=int) for _ in range(self.NUM_AGENTS) + ] + + self.step_count = None + + def reset(self): + self.step_count = 0 + init_state = np.zeros(self.batch_size) + observation = [init_state, init_state] + info = [{'available_actions': aa} for aa in self.available_actions] + return observation, info + + def step(self, action): + ac0, ac1 = action + self.step_count += 1 + + r0 = self.payout_mat[ac0, ac1] + r1 = self.payout_mat[ac1, ac0] + s0 = self.states[ac0, ac1] + s1 = self.states[ac1, ac0] + observation = [s0, s1] + reward = [r0, r1] + done = (self.step_count == self.max_steps) + info = [{'available_actions': aa} for aa in self.available_actions] + return observation, reward, done, info diff --git a/examples/LOLA/helper/utils.py b/examples/LOLA/helper/utils.py index 30b8cf51..6b487a40 100755 --- a/examples/LOLA/helper/utils.py +++ b/examples/LOLA/helper/utils.py @@ -23,101 +23,97 @@ # evaluate the policy def step(ipd, theta1, theta2, values1, values2, args): - # just to evaluate progress: - (s1, s2), _ = ipd.reset() - score1 = 0 - score2 = 0 - for t in range(args.len_rollout): - a1, lp1, v1 = act(s1, theta1, values1) - a2, lp2, v2 = act(s2, theta2, values2) - (s1, s2), (r1, r2), _, _ = ipd.step((a1, a2)) - # cumulate scores - score1 += np.mean(r1) / float(args.len_rollout) - score2 += np.mean(r2) / float(args.len_rollout) - return (score1, score2) + # just to evaluate progress: + (s1, s2), _ = ipd.reset() + score1 = 0 + score2 = 0 + for t in range(args.len_rollout): + a1, lp1, v1 = act(s1, theta1, values1) + a2, lp2, v2 = act(s2, theta2, values2) + (s1, s2), (r1, r2), _, _ = ipd.step((a1, a2)) + # cumulate scores + score1 += np.mean(r1) / float(args.len_rollout) + score2 += np.mean(r2) / float(args.len_rollout) + return (score1, score2) # dice operator def magic_box(x): - return torch.exp(x - x.detach()) + return torch.exp(x - x.detach()) # replay buffer class Memory(): - def __init__(self, args): - self.self_logprobs = [] - self.other_logprobs = [] - self.values = [] - self.rewards = [] - self.args = args - - def add(self, lp, other_lp, v, r): - self.self_logprobs.append(lp) - self.other_logprobs.append(other_lp) - self.values.append(v) - self.rewards.append(r) - - def dice_objective(self, use_baseline=True): - self_logprobs = torch.stack(self.self_logprobs, dim=1) - other_logprobs = torch.stack(self.other_logprobs, dim=1) - values = torch.stack(self.values, dim=1) - rewards = torch.stack(self.rewards, dim=1) - - # apply discount: - cum_discount = torch.cumprod( - self.args.gamma * torch.ones(*rewards.size()), dim=1 - ) / self.args.gamma - discounted_rewards = rewards * cum_discount - discounted_values = values * cum_discount - - # stochastics nodes involved in rewards dependencies: - dependencies = torch.cumsum(self_logprobs + other_logprobs, dim=1) - - # logprob of each stochastic nodes: - stochastic_nodes = self_logprobs + other_logprobs - - # dice objective: - dice_objective = torch.mean( - torch.sum(magic_box(dependencies) * discounted_rewards, dim=1) - ) - - if use_baseline: - # variance_reduction: - baseline_term = torch.mean( - torch.sum( - (1 - magic_box(stochastic_nodes)) * discounted_values, dim=1 - ) - ) - dice_objective = dice_objective + baseline_term - - return -dice_objective # want to minimize -objective - - def value_loss(self): - values = torch.stack(self.values, dim=1) - rewards = torch.stack(self.rewards, dim=1) - return torch.mean((rewards - values)**2) + def __init__(self, args): + self.self_logprobs = [] + self.other_logprobs = [] + self.values = [] + self.rewards = [] + self.args = args + + def add(self, lp, other_lp, v, r): + self.self_logprobs.append(lp) + self.other_logprobs.append(other_lp) + self.values.append(v) + self.rewards.append(r) + + def dice_objective(self, use_baseline=True): + self_logprobs = torch.stack(self.self_logprobs, dim=1) + other_logprobs = torch.stack(self.other_logprobs, dim=1) + values = torch.stack(self.values, dim=1) + rewards = torch.stack(self.rewards, dim=1) + + # apply discount: + cum_discount = torch.cumprod( + self.args.gamma * torch.ones(*rewards.size()), dim=1 + ) / self.args.gamma + discounted_rewards = rewards * cum_discount + discounted_values = values * cum_discount + + # stochastics nodes involved in rewards dependencies: + dependencies = torch.cumsum(self_logprobs + other_logprobs, dim=1) + + # logprob of each stochastic nodes: + stochastic_nodes = self_logprobs + other_logprobs + + # dice objective: + dice_objective = torch.mean(torch.sum(magic_box(dependencies) * discounted_rewards, dim=1)) + + if use_baseline: + # variance_reduction: + baseline_term = torch.mean( + torch.sum((1 - magic_box(stochastic_nodes)) * discounted_values, dim=1) + ) + dice_objective = dice_objective + baseline_term + + return -dice_objective # want to minimize -objective + + def value_loss(self): + values = torch.stack(self.values, dim=1) + rewards = torch.stack(self.rewards, dim=1) + return torch.mean((rewards - values)**2) def act(batch_states, theta, values): - batch_states = torch.from_numpy(batch_states).long() - probs = torch.sigmoid(theta)[batch_states] - m = Bernoulli(1 - probs) - actions = m.sample() - log_probs_actions = m.log_prob(actions) - return actions.numpy().astype(int), log_probs_actions, values[batch_states] + batch_states = torch.from_numpy(batch_states).long() + probs = torch.sigmoid(theta)[batch_states] + m = Bernoulli(1 - probs) + actions = m.sample() + log_probs_actions = m.log_prob(actions) + return actions.numpy().astype(int), log_probs_actions, values[batch_states] def sample(ipd, policy, value, args): - theta1, theta2 = policy - value1, value2 = value - (s1, s2), _ = ipd.reset() - memory_agent1 = Memory(args) - memory_agent2 = Memory(args) - for t in range(args.len_rollout): - a1, lp1, v1 = act(s1, theta1, value1) - a2, lp2, v2 = act(s2, theta2, value2) - (s1, s2), (r1, r2), _, _ = ipd.step((a1, a2)) - memory_agent1.add(lp1, lp2, v1, torch.from_numpy(r1).float()) - memory_agent2.add(lp2, lp1, v2, torch.from_numpy(r2).float()) - return memory_agent1, memory_agent2 + theta1, theta2 = policy + value1, value2 = value + (s1, s2), _ = ipd.reset() + memory_agent1 = Memory(args) + memory_agent2 = Memory(args) + for t in range(args.len_rollout): + a1, lp1, v1 = act(s1, theta1, value1) + a2, lp2, v2 = act(s2, theta2, value2) + (s1, s2), (r1, r2), _, _ = ipd.step((a1, a2)) + memory_agent1.add(lp1, lp2, v1, torch.from_numpy(r1).float()) + memory_agent2.add(lp2, lp1, v2, torch.from_numpy(r2).float()) + return memory_agent1, memory_agent2 diff --git a/examples/LOLA/lola_dice.py b/examples/LOLA/lola_dice.py index f5a112da..82d585d6 100755 --- a/examples/LOLA/lola_dice.py +++ b/examples/LOLA/lola_dice.py @@ -16,105 +16,94 @@ # https://github.com/alexis-jacq/LOLA_DiCE # ============================================================================== -from copy import deepcopy - -import matplotlib.pyplot as plt import numpy as np import torch -import torch.nn as nn -from helper.agent import Agent -from helper.argument import parse_args -from helper.env import IPD -from helper.utils import sample, step -from torch.distributions import Bernoulli -import TorchOpt +from .helper.agent import Agent +from .helper.argument import parse_args +from .helper.env import IPD +from .helper.utils import sample, step def main(args): - ipd = IPD(args.len_rollout, args.batch_size) - agent1, agent2 = Agent(args), Agent(args) - agent1_copy, agent2_copy = Agent(args), Agent(args) - n_lookaheads = args.n_lookaheads - joint_scores = [] - print("start iterations with", n_lookaheads, "lookaheads:") - - for update in range(args.n_update): - # reset virtual update - agent1.set_virtual() - agent2.set_virtual() - - # agent 2 assumes that agent 1 conducts n-step lookahead - for _ in range(n_lookaheads): - memory1, memory2 = sample( - ipd, [agent1.virtual_theta.theta, agent2.theta], - [agent1.values, agent2.values], args - ) - inner_loss = memory1.dice_objective(use_baseline=args.use_baseline) - agent1.virtual_optimiser.step(inner_loss) - - # agent 1 assumes that agent 2 conducts n-step lookahead - for _ in range(n_lookaheads): - memory1, memory2 = sample( - ipd, [agent1.theta, agent2.virtual_theta.theta], - [agent1.values, agent2.values], args - ) - inner_loss = memory2.dice_objective(use_baseline=args.use_baseline) - agent2.virtual_optimiser.step(inner_loss) - - # update agent 1 - memory1, memory2 = sample( - ipd, [agent1.theta, agent2.virtual_theta.theta], - [agent1.values, agent2.values], args - ) - outer_loss = memory1.dice_objective(use_baseline=args.use_baseline) - agent1.theta_optimizer.zero_grad() - outer_loss.backward(retain_graph=True) - agent1.theta_optimizer.step() - - # update agent 1 value function - v_loss = memory1.value_loss() - agent1.value_update(v_loss) - - # update agent 2 - memory1, memory2 = sample( - ipd, [agent1.virtual_theta.theta, agent2.theta], - [agent1.values, agent2.values], args - ) - outer_loss = memory2.dice_objective(use_baseline=args.use_baseline) - agent2.theta_optimizer.zero_grad() - outer_loss.backward(retain_graph=True) - agent2.theta_optimizer.step() - - # update agent 2 value function - v_loss = memory2.value_loss() - agent2.value_update(v_loss) - - # evaluate progress: - score = step( - ipd, agent1.theta, agent2.theta, agent1.values, agent2.values, args - ) - joint_scores.append(0.5 * (score[0] + score[1])) - - # print - if update % 10 == 0: - p1 = [p.item() for p in torch.sigmoid(agent1.theta)] - p2 = [p.item() for p in torch.sigmoid(agent2.theta)] - print( - 'update', update, 'score (%.3f,%.3f)' % (score[0], score[1]), - 'policy (agent1) = {%.3f, %.3f, %.3f, %.3f, %.3f}' % - (p1[0], p1[1], p1[2], p1[3], p1[4]), - ' (agent2) = {%.3f, %.3f, %.3f, %.3f, %.3f}' % - (p2[0], p2[1], p2[2], p2[3], p2[4]) - ) - - return joint_scores + ipd = IPD(args.len_rollout, args.batch_size) + agent1, agent2 = Agent(args), Agent(args) + agent1_copy, agent2_copy = Agent(args), Agent(args) + n_lookaheads = args.n_lookaheads + joint_scores = [] + print("start iterations with", n_lookaheads, "lookaheads:") + + for update in range(args.n_update): + # reset virtual update + agent1.set_virtual() + agent2.set_virtual() + + # agent 2 assumes that agent 1 conducts n-step lookahead + for _ in range(n_lookaheads): + memory1, memory2 = sample( + ipd, [agent1.virtual_theta.theta, agent2.theta], [agent1.values, agent2.values], + args + ) + inner_loss = memory1.dice_objective(use_baseline=args.use_baseline) + agent1.virtual_optimiser.step(inner_loss) + + # agent 1 assumes that agent 2 conducts n-step lookahead + for _ in range(n_lookaheads): + memory1, memory2 = sample( + ipd, [agent1.theta, agent2.virtual_theta.theta], [agent1.values, agent2.values], + args + ) + inner_loss = memory2.dice_objective(use_baseline=args.use_baseline) + agent2.virtual_optimiser.step(inner_loss) + + # update agent 1 + memory1, memory2 = sample( + ipd, [agent1.theta, agent2.virtual_theta.theta], [agent1.values, agent2.values], args + ) + outer_loss = memory1.dice_objective(use_baseline=args.use_baseline) + agent1.theta_optimizer.zero_grad() + outer_loss.backward(retain_graph=True) + agent1.theta_optimizer.step() + + # update agent 1 value function + v_loss = memory1.value_loss() + agent1.value_update(v_loss) + + # update agent 2 + memory1, memory2 = sample( + ipd, [agent1.virtual_theta.theta, agent2.theta], [agent1.values, agent2.values], args + ) + outer_loss = memory2.dice_objective(use_baseline=args.use_baseline) + agent2.theta_optimizer.zero_grad() + outer_loss.backward(retain_graph=True) + agent2.theta_optimizer.step() + + # update agent 2 value function + v_loss = memory2.value_loss() + agent2.value_update(v_loss) + + # evaluate progress: + score = step(ipd, agent1.theta, agent2.theta, agent1.values, agent2.values, args) + joint_scores.append(0.5 * (score[0] + score[1])) + + # print + if update % 10 == 0: + p1 = [p.item() for p in torch.sigmoid(agent1.theta)] + p2 = [p.item() for p in torch.sigmoid(agent2.theta)] + print( + 'update', update, 'score (%.3f,%.3f)' % (score[0], score[1]), + 'policy (agent1) = {%.3f, %.3f, %.3f, %.3f, %.3f}' % + (p1[0], p1[1], p1[2], p1[3], p1[4]), + ' (agent2) = {%.3f, %.3f, %.3f, %.3f, %.3f}' % (p2[0], p2[1], p2[2], p2[3], p2[4]) + ) + + return joint_scores if __name__ == "__main__": - args = parse_args() - joint_score = dict() - for nla in range(3): - args.n_lookaheads = nla - joint_score[nla] = main(args) - np.save('result.npy', joint_score) + args = parse_args() + joint_score = dict() + for nla in range(3): + args.n_lookaheads = nla + joint_score[nla] = main(args) + np.save('result.npy', joint_score) diff --git a/examples/LOLA/visualise.py b/examples/LOLA/visualise.py index de71afef..2640f6a7 100755 --- a/examples/LOLA/visualise.py +++ b/examples/LOLA/visualise.py @@ -19,17 +19,17 @@ def plot(file): - data = np.load('result.npy', allow_pickle=True).tolist() - sns.set(style='darkgrid') - sns.set_theme(style="darkgrid") - for step in range(3): - plt.plot(data[step], label='Step ' + str(step)) - plt.legend() - plt.xlabel('Iteartions', fontsize=20) - plt.ylabel('Joint score', fontsize=20) - plt.savefig('./result.png') + data = np.load('result.npy', allow_pickle=True).tolist() + sns.set(style='darkgrid') + sns.set_theme(style="darkgrid") + for step in range(3): + plt.plot(data[step], label='Step ' + str(step)) + plt.legend() + plt.xlabel('Iteartions', fontsize=20) + plt.ylabel('Joint score', fontsize=20) + plt.savefig('./result.png') # plot progress: if __name__ == "__main__": - plot('result.npy') + plot('result.npy') diff --git a/examples/MAML-RL/README.md b/examples/MAML-RL/README.md index 26a80200..d99738e3 100755 --- a/examples/MAML-RL/README.md +++ b/examples/MAML-RL/README.md @@ -1,16 +1,20 @@ # Reinforcement learning with Model-Agnostic Meta-Learning (MAML) -Code on Tabular MDP example in paper *Model-Agnostic Meta-Learning* [[MAML](https://arxiv.org/abs/1703.03400)] using `TorchOpt`. The idea of MAML is to learn the initial parameters of an agent's policy so that the agent can rapidly adapt to new environments with a limited number of policy-gradient updates. We use `MetaSGD` as the inner-loop optimiser. +Code on Tabular MDP example in paper *Model-Agnostic Meta-Learning* [[MAML](https://arxiv.org/abs/1703.03400)] using TorchOpt. The idea of MAML is to learn the initial parameters of an agent's policy so that the agent can rapidly adapt to new environments with a limited number of policy-gradient updates. We use `MetaSGD` as the inner-loop optimiser. + +## Usage -# Usage Specify the seed to train. + ```bash ### Run MAML python run_MAML.py --seed 1 ``` -# Results +## Results + The training curve and testing curve between initial policy and adapted policy validate the effectiveness of algorithms. +
- +
diff --git a/examples/MAML-RL/helpers/Tabular_mdp.py b/examples/MAML-RL/helpers/Tabular_mdp.py index 32a9d929..1df07599 100644 --- a/examples/MAML-RL/helpers/Tabular_mdp.py +++ b/examples/MAML-RL/helpers/Tabular_mdp.py @@ -20,18 +20,17 @@ import numpy as np from gym import spaces from gym.utils import seeding -from gym.wrappers.time_limit import TimeLimit class TabularMDPEnv(gym.Env): - """Tabular MDP problems, as described in [1]. - - At each time step, the agent chooses one of `num_actions` actions, say `i`, - receives a reward sampled from a Normal distribution with mean `m_i` and - variance 1 (fixed across all tasks), and reaches a new state following the - dynamics of the Markov Decision Process (MDP). The tabular MDP tasks are - generated by sampling the mean rewards from a Normal distribution with mean - 1 and variance 1, and sampling the transition probabilities from a uniform + """Tabular MDP problems, as described in [1]. + + At each time step, the agent chooses one of `num_actions` actions, say `i`, + receives a reward sampled from a Normal distribution with mean `m_i` and + variance 1 (fixed across all tasks), and reaches a new state following the + dynamics of the Markov Decision Process (MDP). The tabular MDP tasks are + generated by sampling the mean rewards from a Normal distribution with mean + 1 and variance 1, and sampling the transition probabilities from a uniform Dirichlet distribution (ie. with parameter 1). [1] Yan Duan, John Schulman, Xi Chen, Peter L. Bartlett, Ilya Sutskever, @@ -39,83 +38,76 @@ class TabularMDPEnv(gym.Env): Learning", 2016 (https://arxiv.org/abs/1611.02779) """ - def __init__( - self, num_states, num_actions, max_episode_steps, seed, task={} - ): - super(TabularMDPEnv, self).__init__() - self.max_episode_steps = max_episode_steps - self.num_states = num_states - self.num_actions = num_actions - - self.action_space = spaces.Discrete(num_actions) - self.observation_space = spaces.Box( - low=0.0, high=1.0, shape=(num_states,), dtype=np.float32 - ) - - self._task = task - self._transitions = task.get( - 'transitions', - np.full( - (num_states, num_actions, num_states), - 1.0 / num_states, - dtype=np.float32 - ) - ) - self._rewards_mean = task.get( - 'rewards_mean', np.zeros((num_states, num_actions), dtype=np.float32) - ) - self._state = 0 - self._elapsed_steps = None - - self.seed(seed) - - def seed(self, seed=None): - self.np_random, seed = seeding.np_random(seed) - return [seed] - - def sample_tasks(self, num_tasks): - transitions = self.np_random.dirichlet( - np.ones(self.num_states), - size=(num_tasks, self.num_states, self.num_actions) - ) - rewards_mean = self.np_random.normal( - 1.0, 1.0, size=(num_tasks, self.num_states, self.num_actions) - ) - tasks = [ - { - 'transitions': transition, - 'rewards_mean': reward_mean - } for (transition, reward_mean) in zip(transitions, rewards_mean) - ] - return tasks - - def reset_task(self, task): - self._task = task - self._transitions = task['transitions'] - self._rewards_mean = task['rewards_mean'] - - def reset(self): - # From [1]: "an episode always starts on the first state" - self._state = 0 - observation = np.zeros(self.num_states, dtype=np.float32) - observation[self._state] = 1.0 - self._elapsed_steps = 0 - - return observation - - def step(self, action): - assert self.action_space.contains(action) - mean = self._rewards_mean[self._state, action] - reward = self.np_random.normal(mean, 1.0) - - self._state = self.np_random.choice( - self.num_states, p=self._transitions[self._state, action] - ) - observation = np.zeros(self.num_states, dtype=np.float32) - observation[self._state] = 1.0 - self._elapsed_steps += 1 - if self._elapsed_steps >= self.max_episode_steps: - done = True - else: - done = False - return observation, reward, done, {'task': self._task} + def __init__(self, num_states, num_actions, max_episode_steps, seed, task={}): + super(TabularMDPEnv, self).__init__() + self.max_episode_steps = max_episode_steps + self.num_states = num_states + self.num_actions = num_actions + + self.action_space = spaces.Discrete(num_actions) + self.observation_space = spaces.Box( + low=0.0, high=1.0, shape=(num_states,), dtype=np.float32 + ) + + self._task = task + self._transitions = task.get( + 'transitions', + np.full((num_states, num_actions, num_states), 1.0 / num_states, dtype=np.float32) + ) + self._rewards_mean = task.get( + 'rewards_mean', np.zeros((num_states, num_actions), dtype=np.float32) + ) + self._state = 0 + self._elapsed_steps = None + + self.seed(seed) + + def seed(self, seed=None): + self.np_random, seed = seeding.np_random(seed) + return [seed] + + def sample_tasks(self, num_tasks): + transitions = self.np_random.dirichlet( + np.ones(self.num_states), size=(num_tasks, self.num_states, self.num_actions) + ) + rewards_mean = self.np_random.normal( + 1.0, 1.0, size=(num_tasks, self.num_states, self.num_actions) + ) + tasks = [ + { + 'transitions': transition, + 'rewards_mean': reward_mean + } for (transition, reward_mean) in zip(transitions, rewards_mean) + ] + return tasks + + def reset_task(self, task): + self._task = task + self._transitions = task['transitions'] + self._rewards_mean = task['rewards_mean'] + + def reset(self): + # From [1]: "an episode always starts on the first state" + self._state = 0 + observation = np.zeros(self.num_states, dtype=np.float32) + observation[self._state] = 1.0 + self._elapsed_steps = 0 + + return observation + + def step(self, action): + assert self.action_space.contains(action) + mean = self._rewards_mean[self._state, action] + reward = self.np_random.normal(mean, 1.0) + + self._state = self.np_random.choice( + self.num_states, p=self._transitions[self._state, action] + ) + observation = np.zeros(self.num_states, dtype=np.float32) + observation[self._state] = 1.0 + self._elapsed_steps += 1 + if self._elapsed_steps >= self.max_episode_steps: + done = True + else: + done = False + return observation, reward, done, {'task': self._task} diff --git a/examples/MAML-RL/helpers/__init__.py b/examples/MAML-RL/helpers/__init__.py index c3fee90d..e8761adc 100644 --- a/examples/MAML-RL/helpers/__init__.py +++ b/examples/MAML-RL/helpers/__init__.py @@ -19,12 +19,12 @@ from gym.envs.registration import register register( - 'TabularMDP-v0', - entry_point='helpers.Tabular_mdp:TabularMDPEnv', - kwargs={ - 'num_states': 10, - 'num_actions': 5, - 'max_episode_steps': 10, - 'seed': 1 - } + 'TabularMDP-v0', + entry_point='helpers.Tabular_mdp:TabularMDPEnv', + kwargs={ + 'num_states': 10, + 'num_actions': 5, + 'max_episode_steps': 10, + 'seed': 1 + } ) diff --git a/examples/MAML-RL/helpers/policy.py b/examples/MAML-RL/helpers/policy.py index 54ee3f5c..66ab1fa3 100644 --- a/examples/MAML-RL/helpers/policy.py +++ b/examples/MAML-RL/helpers/policy.py @@ -22,28 +22,28 @@ class CategoricalMLPPolicy(nn.Module): - """Policy network based on a multi-layer perceptron (MLP), with a - `Categorical` distribution output. This policy network can be used on tasks - with discrete action spaces (eg. `TabularMDPEnv`). + """Policy network based on a multi-layer perceptron (MLP), with a + `Categorical` distribution output. This policy network can be used on tasks + with discrete action spaces (eg. `TabularMDPEnv`). """ - def __init__( - self, - input_size, - output_size, - ): - super(CategoricalMLPPolicy, self).__init__() - self.torso = nn.Sequential( - nn.Linear(input_size, 32), - nn.ReLU(), - nn.Linear(32, 32), - nn.ReLU(), - ) - self.policy_head = nn.Linear(32, output_size) - self.value_head = nn.Linear(32, 1) + def __init__( + self, + input_size, + output_size, + ): + super(CategoricalMLPPolicy, self).__init__() + self.torso = nn.Sequential( + nn.Linear(input_size, 32), + nn.ReLU(), + nn.Linear(32, 32), + nn.ReLU(), + ) + self.policy_head = nn.Linear(32, output_size) + self.value_head = nn.Linear(32, 1) - def forward(self, inputs, params=None): - embedding = self.torso(inputs) - logits = self.policy_head(embedding) - values = self.value_head(embedding) - return Categorical(logits=logits), values + def forward(self, inputs, params=None): + embedding = self.torso(inputs) + logits = self.policy_head(embedding) + values = self.value_head(embedding) + return Categorical(logits=logits), values diff --git a/examples/MAML-RL/run_MAML.py b/examples/MAML-RL/run_MAML.py index 1507e8bc..252f25e0 100644 --- a/examples/MAML-RL/run_MAML.py +++ b/examples/MAML-RL/run_MAML.py @@ -20,9 +20,10 @@ import numpy as np import torch import torch.optim as optim -from helpers.policy import CategoricalMLPPolicy -import TorchOpt +import torchopt + +from .helpers.policy import CategoricalMLPPolicy TASK_NUM = 40 TRAJ_NUM = 20 @@ -39,173 +40,161 @@ class Traj(NamedTuple): - obs: np.ndarray - acs: np.ndarray - next_obs: np.ndarray - rews: np.ndarray - gammas: np.ndarray + obs: np.ndarray + acs: np.ndarray + next_obs: np.ndarray + rews: np.ndarray + gammas: np.ndarray def sample_traj(env, task, policy): - env.reset_task(task) - obs_buf = np.zeros(shape=(TRAJ_LEN, TRAJ_NUM, STATE_DIM), dtype=np.float32) - next_obs_buf = np.zeros( - shape=(TRAJ_LEN, TRAJ_NUM, STATE_DIM), dtype=np.float32 - ) - acs_buf = np.zeros(shape=(TRAJ_LEN, TRAJ_NUM), dtype=np.int8) - rews_buf = np.zeros(shape=(TRAJ_LEN, TRAJ_NUM), dtype=np.float32) - gammas_buf = np.zeros(shape=(TRAJ_LEN, TRAJ_NUM), dtype=np.float32) - with torch.no_grad(): - for batch in range(TRAJ_NUM): - ob = env.reset() - for step in range(TRAJ_LEN): - ob_tensor = torch.from_numpy(ob) - pi, _ = policy(ob_tensor) - ac_tensor = pi.sample() - ac = ac_tensor.cpu().numpy() - next_ob, rew, done, info = env.step(ac) - - obs_buf[step][batch] = ob - next_obs_buf[step][batch] = next_ob - acs_buf[step][batch] = ac - rews_buf[step][batch] = rew - gammas_buf[step][batch] = done * GAMMA - ob = next_ob - return Traj( - obs=obs_buf, - acs=acs_buf, - next_obs=next_obs_buf, - rews=rews_buf, - gammas=gammas_buf - ) + env.reset_task(task) + obs_buf = np.zeros(shape=(TRAJ_LEN, TRAJ_NUM, STATE_DIM), dtype=np.float32) + next_obs_buf = np.zeros(shape=(TRAJ_LEN, TRAJ_NUM, STATE_DIM), dtype=np.float32) + acs_buf = np.zeros(shape=(TRAJ_LEN, TRAJ_NUM), dtype=np.int8) + rews_buf = np.zeros(shape=(TRAJ_LEN, TRAJ_NUM), dtype=np.float32) + gammas_buf = np.zeros(shape=(TRAJ_LEN, TRAJ_NUM), dtype=np.float32) + with torch.no_grad(): + for batch in range(TRAJ_NUM): + ob = env.reset() + for step in range(TRAJ_LEN): + ob_tensor = torch.from_numpy(ob) + pi, _ = policy(ob_tensor) + ac_tensor = pi.sample() + ac = ac_tensor.cpu().numpy() + next_ob, rew, done, info = env.step(ac) + + obs_buf[step][batch] = ob + next_obs_buf[step][batch] = next_ob + acs_buf[step][batch] = ac + rews_buf[step][batch] = rew + gammas_buf[step][batch] = done * GAMMA + ob = next_ob + return Traj(obs=obs_buf, acs=acs_buf, next_obs=next_obs_buf, rews=rews_buf, gammas=gammas_buf) def a2c_loss(traj, policy, value_coef): - lambdas = np.ones_like(traj.gammas) * LAMBDA - _, next_values = policy(torch.from_numpy(traj.next_obs)) - next_values = torch.squeeze(next_values, -1).detach().numpy() - # Work backwards to compute `G_{T-1}`, ..., `G_0`. - returns = [] - g = next_values[-1, :] - for i in reversed(range(next_values.shape[0])): - g = traj.rews[i, :] + traj.gammas[i, :] * \ - ((1 - lambdas[i, :]) * next_values[i, :] + lambdas[i, :] * g) - returns.insert(0, g) - lambda_returns = torch.from_numpy(np.array(returns)) - pi, values = policy(torch.from_numpy(traj.obs)) - log_probs = pi.log_prob(torch.from_numpy(traj.acs)) - advs = lambda_returns - torch.squeeze(values, -1) - action_loss = -(advs.detach() * log_probs).mean() - value_loss = advs.pow(2).mean() - - a2c_loss = action_loss + value_coef * value_loss - return a2c_loss + lambdas = np.ones_like(traj.gammas) * LAMBDA + _, next_values = policy(torch.from_numpy(traj.next_obs)) + next_values = torch.squeeze(next_values, -1).detach().numpy() + # Work backwards to compute `G_{T-1}`, ..., `G_0`. + returns = [] + g = next_values[-1, :] + for i in reversed(range(next_values.shape[0])): + g = traj.rews[i, :] + traj.gammas[i, :] * \ + ((1 - lambdas[i, :]) * next_values[i, :] + lambdas[i, :] * g) + returns.insert(0, g) + lambda_returns = torch.from_numpy(np.array(returns)) + pi, values = policy(torch.from_numpy(traj.obs)) + log_probs = pi.log_prob(torch.from_numpy(traj.acs)) + advs = lambda_returns - torch.squeeze(values, -1) + action_loss = -(advs.detach() * log_probs).mean() + value_loss = advs.pow(2).mean() + + a2c_loss = action_loss + value_coef * value_loss + return a2c_loss def evaluate(env, seed, task_num, policy): - pre_reward_ls = [] - post_reward_ls = [] - inner_opt = TorchOpt.MetaSGD(policy, lr=0.5) - env = gym.make( - 'TabularMDP-v0', - **dict( - num_states=STATE_DIM, - num_actions=ACTION_DIM, - max_episode_steps=TRAJ_LEN, - seed=args.seed + pre_reward_ls = [] + post_reward_ls = [] + inner_opt = torchopt.MetaSGD(policy, lr=0.5) + env = gym.make( + 'TabularMDP-v0', + **dict( + num_states=STATE_DIM, + num_actions=ACTION_DIM, + max_episode_steps=TRAJ_LEN, + seed=args.seed + ) ) - ) - tasks = env.sample_tasks(num_tasks=task_num) - policy_state_dict = TorchOpt.extract_state_dict(policy) - optim_state_dict = TorchOpt.extract_state_dict(inner_opt) - for idx in range(task_num): - for _ in range(inner_iters): - pre_trajs = sample_traj(env, tasks[idx], policy) + tasks = env.sample_tasks(num_tasks=task_num) + policy_state_dict = torchopt.extract_state_dict(policy) + optim_state_dict = torchopt.extract_state_dict(inner_opt) + for idx in range(task_num): + for _ in range(inner_iters): + pre_trajs = sample_traj(env, tasks[idx], policy) - inner_loss = a2c_loss(pre_trajs, policy, value_coef=0.5) - inner_opt.step(inner_loss) - post_trajs = sample_traj(env, tasks[idx], policy) + inner_loss = a2c_loss(pre_trajs, policy, value_coef=0.5) + inner_opt.step(inner_loss) + post_trajs = sample_traj(env, tasks[idx], policy) - # Logging - pre_reward_ls.append(np.sum(pre_trajs.rews, axis=0).mean()) - post_reward_ls.append(np.sum(post_trajs.rews, axis=0).mean()) + # Logging + pre_reward_ls.append(np.sum(pre_trajs.rews, axis=0).mean()) + post_reward_ls.append(np.sum(post_trajs.rews, axis=0).mean()) - TorchOpt.recover_state_dict(policy, policy_state_dict) - TorchOpt.recover_state_dict(inner_opt, optim_state_dict) - return pre_reward_ls, post_reward_ls + torchopt.recover_state_dict(policy, policy_state_dict) + torchopt.recover_state_dict(inner_opt, optim_state_dict) + return pre_reward_ls, post_reward_ls def main(args): - # init training - torch.manual_seed(args.seed) - torch.cuda.manual_seed_all(args.seed) - # Env - env = gym.make( - 'TabularMDP-v0', - **dict( - num_states=STATE_DIM, - num_actions=ACTION_DIM, - max_episode_steps=TRAJ_LEN, - seed=args.seed - ) - ) - # Policy - policy = CategoricalMLPPolicy(input_size=STATE_DIM, output_size=ACTION_DIM) - inner_opt = TorchOpt.MetaSGD(policy, lr=0.5) - outer_opt = optim.Adam(policy.parameters(), lr=1e-3) - train_pre_reward = [] - train_post_reward = [] - test_pre_reward = [] - test_post_reward = [] - - for i in range(outer_iters): - tasks = env.sample_tasks(num_tasks=TASK_NUM) - train_pre_reward_ls = [] - train_post_reward_ls = [] - - outer_opt.zero_grad() - - policy_state_dict = TorchOpt.extract_state_dict(policy) - optim_state_dict = TorchOpt.extract_state_dict(inner_opt) - for idx in range(TASK_NUM): - - for _ in range(inner_iters): - pre_trajs = sample_traj(env, tasks[idx], policy) - inner_loss = a2c_loss(pre_trajs, policy, value_coef=0.5) - inner_opt.step(inner_loss) - post_trajs = sample_traj(env, tasks[idx], policy) - outer_loss = a2c_loss(post_trajs, policy, value_coef=0.5) - outer_loss.backward() - TorchOpt.recover_state_dict(policy, policy_state_dict) - TorchOpt.recover_state_dict(inner_opt, optim_state_dict) - # Logging - train_pre_reward_ls.append(np.sum(pre_trajs.rews, axis=0).mean()) - train_post_reward_ls.append(np.sum(post_trajs.rews, axis=0).mean()) - outer_opt.step() - - test_pre_reward_ls, test_post_reward_ls = evaluate( - env, args.seed, TASK_NUM, policy + # init training + torch.manual_seed(args.seed) + torch.cuda.manual_seed_all(args.seed) + # Env + env = gym.make( + 'TabularMDP-v0', + **dict( + num_states=STATE_DIM, + num_actions=ACTION_DIM, + max_episode_steps=TRAJ_LEN, + seed=args.seed + ) ) - - train_pre_reward.append(sum(train_pre_reward_ls) / TASK_NUM) - train_post_reward.append(sum(train_post_reward_ls) / TASK_NUM) - test_pre_reward.append(sum(test_pre_reward_ls) / TASK_NUM) - test_post_reward.append(sum(test_post_reward_ls) / TASK_NUM) - - print('Train_iters', i) - print("train_pre_reward", sum(train_pre_reward_ls) / TASK_NUM) - print("train_post_reward", sum(train_post_reward_ls) / TASK_NUM) - print("test_pre_reward", sum(test_pre_reward_ls) / TASK_NUM) - print("test_post_reward", sum(test_post_reward_ls) / TASK_NUM) + # Policy + policy = CategoricalMLPPolicy(input_size=STATE_DIM, output_size=ACTION_DIM) + inner_opt = torchopt.MetaSGD(policy, lr=0.5) + outer_opt = optim.Adam(policy.parameters(), lr=1e-3) + train_pre_reward = [] + train_post_reward = [] + test_pre_reward = [] + test_post_reward = [] + + for i in range(outer_iters): + tasks = env.sample_tasks(num_tasks=TASK_NUM) + train_pre_reward_ls = [] + train_post_reward_ls = [] + + outer_opt.zero_grad() + + policy_state_dict = torchopt.extract_state_dict(policy) + optim_state_dict = torchopt.extract_state_dict(inner_opt) + for idx in range(TASK_NUM): + + for _ in range(inner_iters): + pre_trajs = sample_traj(env, tasks[idx], policy) + inner_loss = a2c_loss(pre_trajs, policy, value_coef=0.5) + inner_opt.step(inner_loss) + post_trajs = sample_traj(env, tasks[idx], policy) + outer_loss = a2c_loss(post_trajs, policy, value_coef=0.5) + outer_loss.backward() + torchopt.recover_state_dict(policy, policy_state_dict) + torchopt.recover_state_dict(inner_opt, optim_state_dict) + # Logging + train_pre_reward_ls.append(np.sum(pre_trajs.rews, axis=0).mean()) + train_post_reward_ls.append(np.sum(post_trajs.rews, axis=0).mean()) + outer_opt.step() + + test_pre_reward_ls, test_post_reward_ls = evaluate(env, args.seed, TASK_NUM, policy) + + train_pre_reward.append(sum(train_pre_reward_ls) / TASK_NUM) + train_post_reward.append(sum(train_post_reward_ls) / TASK_NUM) + test_pre_reward.append(sum(test_pre_reward_ls) / TASK_NUM) + test_post_reward.append(sum(test_post_reward_ls) / TASK_NUM) + + print('Train_iters', i) + print("train_pre_reward", sum(train_pre_reward_ls) / TASK_NUM) + print("train_post_reward", sum(train_post_reward_ls) / TASK_NUM) + print("test_pre_reward", sum(test_pre_reward_ls) / TASK_NUM) + print("test_post_reward", sum(test_post_reward_ls) / TASK_NUM) if __name__ == "__main__": - parser = argparse.ArgumentParser( - description='Reinforcement learning with ' - 'Model-Agnostic Meta-Learning (MAML) - Train' - ) - parser.add_argument( - '--seed', type=int, default=1, help='random seed (default: 1)' - ) - args = parser.parse_args() - main(args) + parser = argparse.ArgumentParser( + description='Reinforcement learning with ' + 'Model-Agnostic Meta-Learning (MAML) - Train' + ) + parser.add_argument('--seed', type=int, default=1, help='random seed (default: 1)') + args = parser.parse_args() + main(args) diff --git a/examples/MGRL/README.md b/examples/MGRL/README.md index 65299729..e2952d12 100644 --- a/examples/MGRL/README.md +++ b/examples/MGRL/README.md @@ -1,8 +1,10 @@ # MGRL-examples -Code on toy example of meta-learning the discount factor in paper [Meta-Gradient Reinforcement Learning](https://arxiv.org/abs/1805.09801) using `TorchOpt`. We use `MetaSGD` as the inner-loop optimiser. +Code on toy example of meta-learning the discount factor in paper [Meta-Gradient Reinforcement Learning](https://arxiv.org/abs/1805.09801) using TorchOpt. We use `MetaSGD` as the inner-loop optimiser. + +## Usage -# Usage ```bash -### Run +### Run python3 toy.py +``` diff --git a/examples/MGRL/toy.py b/examples/MGRL/toy.py index 5ce5ad1c..4f0feeb3 100644 --- a/examples/MGRL/toy.py +++ b/examples/MGRL/toy.py @@ -14,71 +14,71 @@ # ============================================================================== import torch -from torch import nn -from torch.nn import functional as F +import torch.nn as nn +import torch.nn.functional as F -import TorchOpt +import torchopt def test_gamma(): - class Rollout: - - @staticmethod - def get(): - out = torch.empty(5, 2) - out[:, 0] = torch.randn(5) - out[:, 1] = 0.1 * torch.ones(5) - label = torch.arange(0, 10) - return out.view(10, 1), F.one_hot(label, 10) - - @staticmethod - def rollout(trajectory, gamma): - out = [trajectory[-1]] - for i in reversed(range(9)): - out.append(trajectory[i] + gamma[i] * out[-1].clone().detach_()) - out.reverse() - return torch.hstack(out).view(10, 1) - - class ValueNetwork(nn.Module): - - def __init__(self): - super().__init__() - self.fc = nn.Linear(10, 1) - - def forward(self, x): - return self.fc(x) - - torch.manual_seed(0) - inner_iters = 1 - outer_iters = 10000 - net = ValueNetwork() - inner_optimizer = TorchOpt.MetaSGD(net, lr=5e-1) - gamma = torch.zeros(9, requires_grad=True) - meta_optimizer = TorchOpt.SGD([gamma], lr=5e-1) - net_state = TorchOpt.extract_state_dict(net) - for i in range(outer_iters): - for j in range(inner_iters): - trajectory, state = Rollout.get() - backup = Rollout.rollout(trajectory, torch.sigmoid(gamma)) - pred_value = net(state.float()) - - loss = F.mse_loss(pred_value, backup) - inner_optimizer.step(loss) - - trajectory, state = Rollout.get() - pred_value = net(state.float()) - backup = Rollout.rollout(trajectory, torch.ones_like(gamma)) - - loss = F.mse_loss(pred_value, backup) - meta_optimizer.zero_grad() - loss.backward() - meta_optimizer.step() - TorchOpt.recover_state_dict(net, net_state) - if i % 100 == 0: - with torch.no_grad(): - print(f"epoch {i} | gamma: {torch.sigmoid(gamma)}") + class Rollout: + + @staticmethod + def get(): + out = torch.empty(5, 2) + out[:, 0] = torch.randn(5) + out[:, 1] = 0.1 * torch.ones(5) + label = torch.arange(0, 10) + return out.view(10, 1), F.one_hot(label, 10) + + @staticmethod + def rollout(trajectory, gamma): + out = [trajectory[-1]] + for i in reversed(range(9)): + out.append(trajectory[i] + gamma[i] * out[-1].clone().detach_()) + out.reverse() + return torch.hstack(out).view(10, 1) + + class ValueNetwork(nn.Module): + + def __init__(self): + super().__init__() + self.fc = nn.Linear(10, 1) + + def forward(self, x): + return self.fc(x) + + torch.manual_seed(0) + inner_iters = 1 + outer_iters = 10000 + net = ValueNetwork() + inner_optimizer = torchopt.MetaSGD(net, lr=5e-1) + gamma = torch.zeros(9, requires_grad=True) + meta_optimizer = torchopt.SGD([gamma], lr=5e-1) + net_state = torchopt.extract_state_dict(net) + for i in range(outer_iters): + for j in range(inner_iters): + trajectory, state = Rollout.get() + backup = Rollout.rollout(trajectory, torch.sigmoid(gamma)) + pred_value = net(state.float()) + + loss = F.mse_loss(pred_value, backup) + inner_optimizer.step(loss) + + trajectory, state = Rollout.get() + pred_value = net(state.float()) + backup = Rollout.rollout(trajectory, torch.ones_like(gamma)) + + loss = F.mse_loss(pred_value, backup) + meta_optimizer.zero_grad() + loss.backward() + meta_optimizer.step() + torchopt.recover_state_dict(net, net_state) + if i % 100 == 0: + with torch.no_grad(): + print(f"epoch {i} | gamma: {torch.sigmoid(gamma)}") if __name__ == "__main__": - test_gamma() + test_gamma() diff --git a/examples/few-shot/README.md b/examples/few-shot/README.md index d617b62d..0437541a 100644 --- a/examples/few-shot/README.md +++ b/examples/few-shot/README.md @@ -1,15 +1,18 @@ # MAML few-shot Omniglot classification-examples -Code On MAML few-shot Omniglot classification in paper [Model-Agnostic Meta-Learning for Fast Adaptation of Deep Networks](https://arxiv.org/abs/1703.03400) using `TorchOpt`. We use `MetaSGD` as the inner-loop optimiser. +Code On MAML few-shot Omniglot classification in paper [Model-Agnostic Meta-Learning for Fast Adaptation of Deep Networks](https://arxiv.org/abs/1703.03400) using TorchOpt. We use `MetaSGD` as the inner-loop optimiser. + +## Usage -# Usage ```bash -### Run +### Run python3 maml-omniglot.py ``` -# Results +## Results + The figure illustrate the experimental result. +
- +
diff --git a/examples/few-shot/maml-omniglot.py b/examples/few-shot/maml-omniglot.py index 1d942593..856f8f01 100644 --- a/examples/few-shot/maml-omniglot.py +++ b/examples/few-shot/maml-omniglot.py @@ -47,233 +47,224 @@ import numpy as np import pandas as pd import torch +import torch.nn as nn import torch.nn.functional as F import torch.optim as optim -from support.omniglot_loaders import OmniglotNShot -from torch import nn -import TorchOpt +import torchopt + +from .support.omniglot_loaders import OmniglotNShot mpl.use('Agg') plt.style.use('bmh') def main(): - argparser = argparse.ArgumentParser() - argparser.add_argument('--n_way', type=int, help='n way', default=5) - argparser.add_argument( - '--k_spt', type=int, help='k shot for support set', default=5 - ) - argparser.add_argument( - '--k_qry', type=int, help='k shot for query set', default=15 - ) - argparser.add_argument( - '--task_num', - type=int, - help='meta batch size, namely task num', - default=32 - ) - argparser.add_argument('--seed', type=int, help='random seed', default=1) - args = argparser.parse_args() - - torch.manual_seed(args.seed) - if torch.cuda.is_available(): - torch.cuda.manual_seed_all(args.seed) - np.random.seed(args.seed) - rng = np.random.default_rng(args.seed) - - # Set up the Omniglot loader. - device = torch.device('cuda:0') - db = OmniglotNShot( - '/tmp/omniglot-data', - batchsz=args.task_num, - n_way=args.n_way, - k_shot=args.k_spt, - k_query=args.k_qry, - imgsz=28, - rng=rng, - device=device, - ) - - # Create a vanilla PyTorch neural network that will be - # automatically monkey-patched by higher later. - # Before higher, models could *not* be created like this - # and the parameters needed to be manually updated and copied - # for the updates. - net = nn.Sequential( - nn.Conv2d(1, 64, 3), nn.BatchNorm2d(64, momentum=1., affine=True), - nn.ReLU(inplace=False), nn.MaxPool2d(2, 2), nn.Conv2d(64, 64, 3), - nn.BatchNorm2d(64, momentum=1., affine=True), nn.ReLU(inplace=False), - nn.MaxPool2d(2, 2), nn.Conv2d(64, 64, 3), - nn.BatchNorm2d(64, momentum=1., affine=True), nn.ReLU(inplace=False), - nn.MaxPool2d(2, 2), nn.Flatten(), nn.Linear(64, args.n_way) - ).to(device) - - # We will use Adam to (meta-)optimize the initial parameters - # to be adapted. - meta_opt = optim.Adam(net.parameters(), lr=1e-3) - - log = [] - for epoch in range(10): - train(db, net, meta_opt, epoch, log) - test(db, net, epoch, log) - plot(log) - + argparser = argparse.ArgumentParser() + argparser.add_argument('--n_way', type=int, help='n way', default=5) + argparser.add_argument('--k_spt', type=int, help='k shot for support set', default=5) + argparser.add_argument('--k_qry', type=int, help='k shot for query set', default=15) + argparser.add_argument( + '--task_num', type=int, help='meta batch size, namely task num', default=32 + ) + argparser.add_argument('--seed', type=int, help='random seed', default=1) + args = argparser.parse_args() + + torch.manual_seed(args.seed) + if torch.cuda.is_available(): + torch.cuda.manual_seed_all(args.seed) + np.random.seed(args.seed) + rng = np.random.default_rng(args.seed) + + # Set up the Omniglot loader. + device = torch.device('cuda:0') + db = OmniglotNShot( + '/tmp/omniglot-data', + batchsz=args.task_num, + n_way=args.n_way, + k_shot=args.k_spt, + k_query=args.k_qry, + imgsz=28, + rng=rng, + device=device, + ) -def train(db, net, meta_opt, epoch, log): - net.train() - n_train_iter = db.x_train.shape[0] // db.batchsz - inner_opt = TorchOpt.MetaSGD(net, lr=1e-1) + # Create a vanilla PyTorch neural network that will be + # automatically monkey-patched by higher later. + # Before higher, models could *not* be created like this + # and the parameters needed to be manually updated and copied + # for the updates. + net = nn.Sequential( + nn.Conv2d(1, 64, 3), nn.BatchNorm2d(64, momentum=1., affine=True), nn.ReLU(inplace=False), + nn.MaxPool2d(2, 2), nn.Conv2d(64, 64, 3), nn.BatchNorm2d(64, momentum=1., affine=True), + nn.ReLU(inplace=False), nn.MaxPool2d(2, 2), nn.Conv2d(64, 64, 3), + nn.BatchNorm2d(64, momentum=1., affine=True), nn.ReLU(inplace=False), nn.MaxPool2d(2, 2), + nn.Flatten(), nn.Linear(64, args.n_way) + ).to(device) + + # We will use Adam to (meta-)optimize the initial parameters + # to be adapted. + meta_opt = optim.Adam(net.parameters(), lr=1e-3) + + log = [] + for epoch in range(10): + train(db, net, meta_opt, epoch, log) + test(db, net, epoch, log) + plot(log) - for batch_idx in range(n_train_iter): - start_time = time.time() - # Sample a batch of support and query images and labels. - x_spt, y_spt, x_qry, y_qry = db.next() - task_num, setsz, c_, h, w = x_spt.size() - querysz = x_qry.size(1) +def train(db, net, meta_opt, epoch, log): + net.train() + n_train_iter = db.x_train.shape[0] // db.batchsz + inner_opt = torchopt.MetaSGD(net, lr=1e-1) + + for batch_idx in range(n_train_iter): + start_time = time.time() + # Sample a batch of support and query images and labels. + x_spt, y_spt, x_qry, y_qry = db.next() + + task_num, setsz, c_, h, w = x_spt.size() + querysz = x_qry.size(1) + + # TODO: Maybe pull this out into a separate module so it + # doesn't have to be duplicated between `train` and `test`? + + # Initialize the inner optimizer to adapt the parameters to + # the support set. + n_inner_iter = 5 + + qry_losses = [] + qry_accs = [] + meta_opt.zero_grad() + + net_state_dict = torchopt.extract_state_dict(net) + optim_state_dict = torchopt.extract_state_dict(inner_opt) + for i in range(task_num): + # Optimize the likelihood of the support set by taking + # gradient steps w.r.t. the model's parameters. + # This adapts the model's meta-parameters to the task. + # higher is able to automatically keep copies of + # your network's parameters as they are being updated. + for _ in range(n_inner_iter): + spt_logits = net(x_spt[i]) + spt_loss = F.cross_entropy(spt_logits, y_spt[i]) + inner_opt.step(spt_loss) + + # The final set of adapted parameters will induce some + # final loss and accuracy on the query dataset. + # These will be used to update the model's meta-parameters. + qry_logits = net(x_qry[i]) + qry_loss = F.cross_entropy(qry_logits, y_qry[i]) + qry_losses.append(qry_loss.detach()) + qry_acc = (qry_logits.argmax(dim=1) == y_qry[i]).sum().item() / querysz + qry_accs.append(qry_acc) + + # Update the model's meta-parameters to optimize the query + # losses across all of the tasks sampled in this batch. + # This unrolls through the gradient steps. + qry_loss.backward() + + torchopt.recover_state_dict(net, net_state_dict) + torchopt.recover_state_dict(inner_opt, optim_state_dict) + + meta_opt.step() + qry_losses = sum(qry_losses) / task_num + qry_accs = 100. * sum(qry_accs) / task_num + i = epoch + float(batch_idx) / n_train_iter + iter_time = time.time() - start_time + + print( + f'[Epoch {i:.2f}] Train Loss: {qry_losses:.2f} | Acc: {qry_accs:.2f} | Time: {iter_time:.2f}' + ) + + log.append( + { + 'epoch': i, + 'loss': qry_losses, + 'acc': qry_accs, + 'mode': 'train', + 'time': time.time(), + } + ) - # TODO: Maybe pull this out into a separate module so it - # doesn't have to be duplicated between `train` and `test`? - # Initialize the inner optimizer to adapt the parameters to - # the support set. - n_inner_iter = 5 +def test(db, net, epoch, log): + # Crucially in our testing procedure here, we do *not* fine-tune + # the model during testing for simplicity. + # Most research papers using MAML for this task do an extra + # stage of fine-tuning here that should be added if you are + # adapting this code for research. + net.train() + n_test_iter = db.x_test.shape[0] // db.batchsz + inner_opt = torchopt.MetaSGD(net, lr=1e-1) qry_losses = [] qry_accs = [] - meta_opt.zero_grad() - - net_state_dict = TorchOpt.extract_state_dict(net) - optim_state_dict = TorchOpt.extract_state_dict(inner_opt) - for i in range(task_num): - # Optimize the likelihood of the support set by taking - # gradient steps w.r.t. the model's parameters. - # This adapts the model's meta-parameters to the task. - # higher is able to automatically keep copies of - # your network's parameters as they are being updated. - for _ in range(n_inner_iter): - spt_logits = net(x_spt[i]) - spt_loss = F.cross_entropy(spt_logits, y_spt[i]) - inner_opt.step(spt_loss) - - # The final set of adapted parameters will induce some - # final loss and accuracy on the query dataset. - # These will be used to update the model's meta-parameters. - qry_logits = net(x_qry[i]) - qry_loss = F.cross_entropy(qry_logits, y_qry[i]) - qry_losses.append(qry_loss.detach()) - qry_acc = (qry_logits.argmax(dim=1) == y_qry[i]).sum().item() / querysz - qry_accs.append(qry_acc) - - # Update the model's meta-parameters to optimize the query - # losses across all of the tasks sampled in this batch. - # This unrolls through the gradient steps. - qry_loss.backward() - - TorchOpt.recover_state_dict(net, net_state_dict) - TorchOpt.recover_state_dict(inner_opt, optim_state_dict) - - meta_opt.step() - qry_losses = sum(qry_losses) / task_num - qry_accs = 100. * sum(qry_accs) / task_num - i = epoch + float(batch_idx) / n_train_iter - iter_time = time.time() - start_time - - print( - f'[Epoch {i:.2f}] Train Loss: {qry_losses:.2f} | Acc: {qry_accs:.2f} | Time: {iter_time:.2f}' - ) + for batch_idx in range(n_test_iter): + x_spt, y_spt, x_qry, y_qry = db.next('test') + + task_num, setsz, c_, h, w = x_spt.size() + querysz = x_qry.size(1) + + # TODO: Maybe pull this out into a separate module so it + # doesn't have to be duplicated between `train` and `test`? + n_inner_iter = 5 + + net_state_dict = torchopt.extract_state_dict(net) + optim_state_dict = torchopt.extract_state_dict(inner_opt) + for i in range(task_num): + # Optimize the likelihood of the support set by taking + # gradient steps w.r.t. the model's parameters. + # This adapts the model's meta-parameters to the task. + for _ in range(n_inner_iter): + spt_logits = net(x_spt[i]) + spt_loss = F.cross_entropy(spt_logits, y_spt[i]) + inner_opt.step(spt_loss) + + # The query loss and acc induced by these parameters. + qry_logits = net(x_qry[i]).detach() + qry_loss = F.cross_entropy(qry_logits, y_qry[i], reduction='none') + qry_losses.append(qry_loss.detach()) + qry_accs.append((qry_logits.argmax(dim=1) == y_qry[i]).detach()) + + torchopt.recover_state_dict(net, net_state_dict) + torchopt.recover_state_dict(inner_opt, optim_state_dict) + + qry_losses = torch.cat(qry_losses).mean().item() + qry_accs = 100. * torch.cat(qry_accs).float().mean().item() + print(f'[Epoch {epoch+1:.2f}] Test Loss: {qry_losses:.2f} | Acc: {qry_accs:.2f}') log.append( - { - 'epoch': i, - 'loss': qry_losses, - 'acc': qry_accs, - 'mode': 'train', - 'time': time.time(), - } + { + 'epoch': epoch + 1, + 'loss': qry_losses, + 'acc': qry_accs, + 'mode': 'test', + 'time': time.time(), + } ) -def test(db, net, epoch, log): - # Crucially in our testing procedure here, we do *not* fine-tune - # the model during testing for simplicity. - # Most research papers using MAML for this task do an extra - # stage of fine-tuning here that should be added if you are - # adapting this code for research. - net.train() - n_test_iter = db.x_test.shape[0] // db.batchsz - inner_opt = TorchOpt.MetaSGD(net, lr=1e-1) - - qry_losses = [] - qry_accs = [] - - for batch_idx in range(n_test_iter): - x_spt, y_spt, x_qry, y_qry = db.next('test') - - task_num, setsz, c_, h, w = x_spt.size() - querysz = x_qry.size(1) - - # TODO: Maybe pull this out into a separate module so it - # doesn't have to be duplicated between `train` and `test`? - n_inner_iter = 5 - - net_state_dict = TorchOpt.extract_state_dict(net) - optim_state_dict = TorchOpt.extract_state_dict(inner_opt) - for i in range(task_num): - # Optimize the likelihood of the support set by taking - # gradient steps w.r.t. the model's parameters. - # This adapts the model's meta-parameters to the task. - for _ in range(n_inner_iter): - spt_logits = net(x_spt[i]) - spt_loss = F.cross_entropy(spt_logits, y_spt[i]) - inner_opt.step(spt_loss) - - # The query loss and acc induced by these parameters. - qry_logits = net(x_qry[i]).detach() - qry_loss = F.cross_entropy(qry_logits, y_qry[i], reduction='none') - qry_losses.append(qry_loss.detach()) - qry_accs.append((qry_logits.argmax(dim=1) == y_qry[i]).detach()) - - TorchOpt.recover_state_dict(net, net_state_dict) - TorchOpt.recover_state_dict(inner_opt, optim_state_dict) - - qry_losses = torch.cat(qry_losses).mean().item() - qry_accs = 100. * torch.cat(qry_accs).float().mean().item() - print( - f'[Epoch {epoch+1:.2f}] Test Loss: {qry_losses:.2f} | Acc: {qry_accs:.2f}' - ) - log.append( - { - 'epoch': epoch + 1, - 'loss': qry_losses, - 'acc': qry_accs, - 'mode': 'test', - 'time': time.time(), - } - ) - - def plot(log): - # Generally you should pull your plotting code out of your training - # script but we are doing it here for brevity. - df = pd.DataFrame(log) - - fig, ax = plt.subplots(figsize=(6, 4)) - train_df = df[df['mode'] == 'train'] - test_df = df[df['mode'] == 'test'] - ax.plot(train_df['epoch'], train_df['acc'], label='Train') - ax.plot(test_df['epoch'], test_df['acc'], label='Test') - ax.set_xlabel('Epoch') - ax.set_ylabel('Accuracy') - ax.set_ylim(70, 100) - fig.legend(ncol=2, loc='lower right') - fig.tight_layout() - fname = 'maml-accs.png' - print(f'--- Plotting accuracy to {fname}') - fig.savefig(fname) - plt.close(fig) + # Generally you should pull your plotting code out of your training + # script but we are doing it here for brevity. + df = pd.DataFrame(log) + + fig, ax = plt.subplots(figsize=(6, 4)) + train_df = df[df['mode'] == 'train'] + test_df = df[df['mode'] == 'test'] + ax.plot(train_df['epoch'], train_df['acc'], label='Train') + ax.plot(test_df['epoch'], test_df['acc'], label='Test') + ax.set_xlabel('Epoch') + ax.set_ylabel('Accuracy') + ax.set_ylim(70, 100) + fig.legend(ncol=2, loc='lower right') + fig.tight_layout() + fname = 'maml-accs.png' + print(f'--- Plotting accuracy to {fname}') + fig.savefig(fname) + plt.close(fig) if __name__ == '__main__': - main() + main() diff --git a/examples/few-shot/support/omniglot_loaders.py b/examples/few-shot/support/omniglot_loaders.py index 9aa9f6ed..731c41be 100644 --- a/examples/few-shot/support/omniglot_loaders.py +++ b/examples/few-shot/support/omniglot_loaders.py @@ -20,7 +20,6 @@ import errno import os -import os.path import numpy as np import torch @@ -30,122 +29,115 @@ class Omniglot(data.Dataset): - urls = [ - 'https://github.com/brendenlake/omniglot/raw/master/python/images_background.zip', - 'https://github.com/brendenlake/omniglot/raw/master/python/images_evaluation.zip' - ] - raw_folder = 'raw' - processed_folder = 'processed' - training_file = 'training.pt' - test_file = 'test.pt' - ''' + """ The items are (filename,category). The index of all the categories can be found in self.idx_classes Args: - root: the directory where the dataset will be stored - transform: how to transform the input - target_transform: how to transform the target - download: need to download the dataset - ''' - - def __init__( - self, root, transform=None, target_transform=None, download=False - ): - self.root = root - self.transform = transform - self.target_transform = target_transform - - if not self._check_exists(): - if download: - self.download() - else: - raise RuntimeError( - 'Dataset not found.' + ' You can use download=True to download it' - ) - - self.all_items = find_classes( - os.path.join(self.root, self.processed_folder) - ) - self.idx_classes = index_classes(self.all_items) - - def __getitem__(self, index): - filename = self.all_items[index][0] - img = str.join('/', [self.all_items[index][2], filename]) - - target = self.idx_classes[self.all_items[index][1]] - if self.transform is not None: - img = self.transform(img) - if self.target_transform is not None: - target = self.target_transform(target) - - return img, target - - def __len__(self): - return len(self.all_items) - - def _check_exists(self): - return os.path.exists(os.path.join(self.root, self.processed_folder, "images_evaluation")) and \ - os.path.exists(os.path.join(self.root, self.processed_folder, "images_background")) - - def download(self): - import zipfile - - from six.moves import urllib - - if self._check_exists(): - return - - # download files - try: - os.makedirs(os.path.join(self.root, self.raw_folder)) - os.makedirs(os.path.join(self.root, self.processed_folder)) - except OSError as e: - if e.errno == errno.EEXIST: - pass - else: - raise - - for url in self.urls: - print('== Downloading ' + url) - data = urllib.request.urlopen(url) - filename = url.rpartition('/')[2] - file_path = os.path.join(self.root, self.raw_folder, filename) - with open(file_path, 'wb') as f: - f.write(data.read()) - file_processed = os.path.join(self.root, self.processed_folder) - print("== Unzip from " + file_path + " to " + file_processed) - zip_ref = zipfile.ZipFile(file_path, 'r') - zip_ref.extractall(file_processed) - zip_ref.close() - print("Download finished.") + """ + + urls = [ + 'https://github.com/brendenlake/omniglot/raw/master/python/images_background.zip', + 'https://github.com/brendenlake/omniglot/raw/master/python/images_evaluation.zip' + ] + raw_folder = 'raw' + processed_folder = 'processed' + training_file = 'training.pt' + test_file = 'test.pt' + + def __init__(self, root, transform=None, target_transform=None, download=False): + self.root = root + self.transform = transform + self.target_transform = target_transform + + if not self._check_exists(): + if download: + self.download() + else: + raise RuntimeError('Dataset not found. You can use download=True to download it') + + self.all_items = find_classes(os.path.join(self.root, self.processed_folder)) + self.idx_classes = index_classes(self.all_items) + + def __getitem__(self, index): + filename = self.all_items[index][0] + img = str.join('/', [self.all_items[index][2], filename]) + + target = self.idx_classes[self.all_items[index][1]] + if self.transform is not None: + img = self.transform(img) + if self.target_transform is not None: + target = self.target_transform(target) + + return img, target + + def __len__(self): + return len(self.all_items) + + def _check_exists(self): + return os.path.exists(os.path.join(self.root, self.processed_folder, "images_evaluation")) and \ + os.path.exists(os.path.join(self.root, self.processed_folder, "images_background")) + + def download(self): + import zipfile + + from six.moves import urllib + + if self._check_exists(): + return + + # download files + try: + os.makedirs(os.path.join(self.root, self.raw_folder)) + os.makedirs(os.path.join(self.root, self.processed_folder)) + except OSError as e: + if e.errno == errno.EEXIST: + pass + else: + raise + + for url in self.urls: + print('== Downloading ' + url) + data = urllib.request.urlopen(url) + filename = url.rpartition('/')[2] + file_path = os.path.join(self.root, self.raw_folder, filename) + with open(file_path, 'wb') as f: + f.write(data.read()) + file_processed = os.path.join(self.root, self.processed_folder) + print("== Unzip from " + file_path + " to " + file_processed) + zip_ref = zipfile.ZipFile(file_path, 'r') + zip_ref.extractall(file_processed) + zip_ref.close() + print("Download finished.") def find_classes(root_dir): - retour = [] - for (root, dirs, files) in os.walk(root_dir): - for f in files: - if (f.endswith("png")): - r = root.split('/') - lr = len(r) - retour.append((f, r[lr - 2] + "/" + r[lr - 1], root)) - print("== Found %d items " % len(retour)) - return retour + retour = [] + for (root, dirs, files) in os.walk(root_dir): + for f in files: + if (f.endswith("png")): + r = root.split('/') + lr = len(r) + retour.append((f, r[lr - 2] + "/" + r[lr - 1], root)) + print("== Found %d items " % len(retour)) + return retour def index_classes(items): - idx = {} - for i in items: - if i[1] not in idx: - idx[i[1]] = len(idx) - print("== Found %d classes" % len(idx)) - return idx + idx = {} + for i in items: + if i[1] not in idx: + idx[i[1]] = len(idx) + print("== Found %d classes" % len(idx)) + return idx class OmniglotNShot: - def __init__( - self, root, batchsz, n_way, k_shot, k_query, imgsz, rng, device=None - ): - """ + def __init__(self, root, batchsz, n_way, k_shot, k_query, imgsz, rng, device=None): + """ Different from mnistNShot, the :param root: :param batchsz: task num @@ -155,178 +147,168 @@ def __init__( :param imgsz: """ - self.resize = imgsz - self.rng = rng - self.device = device - if not os.path.isfile(os.path.join(root, 'omniglot.npy')): - # if root/data.npy does not exist, just download it - self.x = Omniglot( - root, - download=True, - transform=transforms.Compose( - [ - lambda x: Image.open(x).convert('L'), - lambda x: x.resize((imgsz, imgsz)), - lambda x: np.reshape(x, (imgsz, imgsz, 1)), - lambda x: np.transpose(x, [2, 0, 1]), lambda x: x / 255. - ] - ), - ) - - temp = dict( - ) # {label:img1, img2..., 20 imgs, label2: img1, img2,... in total, 1623 label} - for (img, label) in self.x: - if label in temp.keys(): - temp[label].append(img) + self.resize = imgsz + self.rng = rng + self.device = device + if not os.path.isfile(os.path.join(root, 'omniglot.npy')): + # if root/data.npy does not exist, just download it + self.x = Omniglot( + root, + download=True, + transform=transforms.Compose( + [ + lambda x: Image.open(x).convert('L'), lambda x: x.resize((imgsz, imgsz)), + lambda x: np.reshape(x, (imgsz, imgsz, 1)), + lambda x: np.transpose(x, [2, 0, 1]), lambda x: x / 255. + ] + ), + ) + + temp = dict( + ) # {label:img1, img2..., 20 imgs, label2: img1, img2,... in total, 1623 label} + for (img, label) in self.x: + if label in temp.keys(): + temp[label].append(img) + else: + temp[label] = [img] + + self.x = [] + for label, imgs in temp.items(): # labels info deserted , each label contains 20imgs + self.x.append(np.array(imgs)) + + # as different class may have different number of imgs + self.x = np.array(self.x).astype(np.float) # [[20 imgs],..., 1623 classes in total] + # each character contains 20 imgs + print('data shape:', self.x.shape) # [1623, 20, 84, 84, 1] + temp = [] # Free memory + # save all dataset into npy file. + np.save(os.path.join(root, 'omniglot.npy'), self.x) + print('write into omniglot.npy.') else: - temp[label] = [img] - - self.x = [] - for label, imgs in temp.items( - ): # labels info deserted , each label contains 20imgs - self.x.append(np.array(imgs)) - - # as different class may have different number of imgs - self.x = np.array(self.x).astype( - np.float - ) # [[20 imgs],..., 1623 classes in total] - # each character contains 20 imgs - print('data shape:', self.x.shape) # [1623, 20, 84, 84, 1] - temp = [] # Free memory - # save all dataset into npy file. - np.save(os.path.join(root, 'omniglot.npy'), self.x) - print('write into omniglot.npy.') - else: - # if data.npy exists, just load it. - self.x = np.load(os.path.join(root, 'omniglot.npy')) - print('load from omniglot.npy.') - - # [1623, 20, 84, 84, 1] - # TODO: can not shuffle here, we must keep training and test set distinct! - self.x_train, self.x_test = self.x[:1200], self.x[1200:] - - # self.normalization() - - self.batchsz = batchsz - self.n_cls = self.x.shape[0] # 1623 - self.n_way = n_way # n way - self.k_shot = k_shot # k shot - self.k_query = k_query # k query - assert (k_shot + k_query) <= 20 - - # save pointer of current read batch in total cache - self.indexes = {"train": 0, "test": 0} - self.datasets = { - "train": self.x_train, - "test": self.x_test - } # original data cached - print("DB: train", self.x_train.shape, "test", self.x_test.shape) - - self.datasets_cache = { - "train": self.load_data_cache(self.datasets["train"] - ), # current epoch data cached - "test": self.load_data_cache(self.datasets["test"]) - } - - def normalization(self): - """ - Normalizes our data, to have a mean of 0 and sdt of 1 + # if data.npy exists, just load it. + self.x = np.load(os.path.join(root, 'omniglot.npy')) + print('load from omniglot.npy.') + + # [1623, 20, 84, 84, 1] + # TODO: can not shuffle here, we must keep training and test set distinct! + self.x_train, self.x_test = self.x[:1200], self.x[1200:] + + # self.normalization() + + self.batchsz = batchsz + self.n_cls = self.x.shape[0] # 1623 + self.n_way = n_way # n way + self.k_shot = k_shot # k shot + self.k_query = k_query # k query + assert (k_shot + k_query) <= 20 + + # save pointer of current read batch in total cache + self.indexes = {"train": 0, "test": 0} + self.datasets = {"train": self.x_train, "test": self.x_test} # original data cached + print("DB: train", self.x_train.shape, "test", self.x_test.shape) + + self.datasets_cache = { + "train": self.load_data_cache(self.datasets["train"]), # current epoch data cached + "test": self.load_data_cache(self.datasets["test"]) + } + + def normalization(self): + """ + Normalizes our data, to have a mean of 0 and sdt of 1 + """ + self.mean = np.mean(self.x_train) + self.std = np.std(self.x_train) + self.max = np.max(self.x_train) + self.min = np.min(self.x_train) + # print("before norm:", "mean", self.mean, "max", self.max, "min", self.min, "std", self.std) + self.x_train = (self.x_train - self.mean) / self.std + self.x_test = (self.x_test - self.mean) / self.std + + self.mean = np.mean(self.x_train) + self.std = np.std(self.x_train) + self.max = np.max(self.x_train) + self.min = np.min(self.x_train) + + # print("after norm:", "mean", self.mean, "max", self.max, "min", self.min, "std", self.std) + + def load_data_cache(self, data_pack): """ - self.mean = np.mean(self.x_train) - self.std = np.std(self.x_train) - self.max = np.max(self.x_train) - self.min = np.min(self.x_train) - # print("before norm:", "mean", self.mean, "max", self.max, "min", self.min, "std", self.std) - self.x_train = (self.x_train - self.mean) / self.std - self.x_test = (self.x_test - self.mean) / self.std - - self.mean = np.mean(self.x_train) - self.std = np.std(self.x_train) - self.max = np.max(self.x_train) - self.min = np.min(self.x_train) - - # print("after norm:", "mean", self.mean, "max", self.max, "min", self.min, "std", self.std) - - def load_data_cache(self, data_pack): - """ Collects several batches data for N-shot learning :param data_pack: [cls_num, 20, 84, 84, 1] :return: A list with [support_set_x, support_set_y, target_x, target_y] ready to be fed to our networks """ - # take 5 way 1 shot as example: 5 * 1 - setsz = self.k_shot * self.n_way - querysz = self.k_query * self.n_way - data_cache = [] - - # print('preload next 50 caches of batchsz of batch.') - for sample in range(10): # num of episodes - - x_spts, y_spts, x_qrys, y_qrys = [], [], [], [] - for i in range(self.batchsz): # one batch means one set - - x_spt, y_spt, x_qry, y_qry = [], [], [], [] - selected_cls = self.rng.choice(data_pack.shape[0], self.n_way, False) - - for j, cur_class in enumerate(selected_cls): - - selected_img = self.rng.choice(20, self.k_shot + self.k_query, False) - - # meta-training and meta-test - x_spt.append(data_pack[cur_class][selected_img[:self.k_shot]]) - x_qry.append(data_pack[cur_class][selected_img[self.k_shot:]]) - y_spt.append([j for _ in range(self.k_shot)]) - y_qry.append([j for _ in range(self.k_query)]) - - # shuffle inside a batch - perm = self.rng.permutation(self.n_way * self.k_shot) - x_spt = np.array(x_spt).reshape( - self.n_way * self.k_shot, 1, self.resize, self.resize - )[perm] - y_spt = np.array(y_spt).reshape(self.n_way * self.k_shot)[perm] - perm = self.rng.permutation(self.n_way * self.k_query) - x_qry = np.array(x_qry).reshape( - self.n_way * self.k_query, 1, self.resize, self.resize - )[perm] - y_qry = np.array(y_qry).reshape(self.n_way * self.k_query)[perm] - - # append [sptsz, 1, 84, 84] => [b, setsz, 1, 84, 84] - x_spts.append(x_spt) - y_spts.append(y_spt) - x_qrys.append(x_qry) - y_qrys.append(y_qry) - - # [b, setsz, 1, 84, 84] - x_spts = np.array(x_spts).astype( - np.float32 - ).reshape(self.batchsz, setsz, 1, self.resize, self.resize) - y_spts = np.array(y_spts).astype(np.int).reshape(self.batchsz, setsz) - # [b, qrysz, 1, 84, 84] - x_qrys = np.array(x_qrys).astype( - np.float32 - ).reshape(self.batchsz, querysz, 1, self.resize, self.resize) - y_qrys = np.array(y_qrys).astype(np.int).reshape(self.batchsz, querysz) - - x_spts, y_spts, x_qrys, y_qrys = [ - torch.from_numpy(z).to(self.device) - for z in [x_spts, y_spts, x_qrys, y_qrys] - ] - - data_cache.append([x_spts, y_spts, x_qrys, y_qrys]) - - return data_cache - - def next(self, mode='train'): - """ + + # take 5 way 1 shot as example: 5 * 1 + setsz = self.k_shot * self.n_way + querysz = self.k_query * self.n_way + data_cache = [] + + # print('preload next 50 caches of batchsz of batch.') + for sample in range(10): # num of episodes + + x_spts, y_spts, x_qrys, y_qrys = [], [], [], [] + for i in range(self.batchsz): # one batch means one set + + x_spt, y_spt, x_qry, y_qry = [], [], [], [] + selected_cls = self.rng.choice(data_pack.shape[0], self.n_way, False) + + for j, cur_class in enumerate(selected_cls): + + selected_img = self.rng.choice(20, self.k_shot + self.k_query, False) + + # meta-training and meta-test + x_spt.append(data_pack[cur_class][selected_img[:self.k_shot]]) + x_qry.append(data_pack[cur_class][selected_img[self.k_shot:]]) + y_spt.append([j for _ in range(self.k_shot)]) + y_qry.append([j for _ in range(self.k_query)]) + + # shuffle inside a batch + perm = self.rng.permutation(self.n_way * self.k_shot) + x_spt = np.array(x_spt) \ + .reshape(self.n_way * self.k_shot, 1, self.resize, self.resize)[perm] + y_spt = np.array(y_spt) \ + .reshape(self.n_way * self.k_shot)[perm] + perm = self.rng.permutation(self.n_way * self.k_query) + x_qry = np.array(x_qry) \ + .reshape(self.n_way * self.k_query, 1, self.resize, self.resize)[perm] + y_qry = np.array(y_qry).reshape(self.n_way * self.k_query)[perm] + + # append [sptsz, 1, 84, 84] => [b, setsz, 1, 84, 84] + x_spts.append(x_spt) + y_spts.append(y_spt) + x_qrys.append(x_qry) + y_qrys.append(y_qry) + + # [b, setsz, 1, 84, 84] + x_spts = np.array(x_spts, dtype=np.float32) \ + .reshape(self.batchsz, setsz, 1, self.resize, self.resize) + y_spts = np.array(y_spts, dtype=np.int).reshape(self.batchsz, setsz) + # [b, qrysz, 1, 84, 84] + x_qrys = np.array(x_qrys, dtype=np.float32) \ + .reshape(self.batchsz, querysz, 1, self.resize, self.resize) + y_qrys = np.array(y_qrys, dtype=np.int).reshape(self.batchsz, querysz) + + x_spts, y_spts, x_qrys, y_qrys = [ + torch.from_numpy(z).to(self.device) for z in [x_spts, y_spts, x_qrys, y_qrys] + ] + + data_cache.append([x_spts, y_spts, x_qrys, y_qrys]) + + return data_cache + + def next(self, mode='train'): + """ Gets next batch from the dataset with name. :param mode: The name of the splitting (one of "train", "val", "test") :return: """ - # update cache if indexes is larger cached num - if self.indexes[mode] >= len(self.datasets_cache[mode]): - self.indexes[mode] = 0 - self.datasets_cache[mode] = self.load_data_cache(self.datasets[mode]) - next_batch = self.datasets_cache[mode][self.indexes[mode]] - self.indexes[mode] += 1 + # update cache if indexes is larger cached num + if self.indexes[mode] >= len(self.datasets_cache[mode]): + self.indexes[mode] = 0 + self.datasets_cache[mode] = self.load_data_cache(self.datasets[mode]) + + next_batch = self.datasets_cache[mode][self.indexes[mode]] + self.indexes[mode] += 1 - return next_batch + return next_batch diff --git a/examples/visualize.py b/examples/visualize.py index 4e7d2684..028669e9 100644 --- a/examples/visualize.py +++ b/examples/visualize.py @@ -14,73 +14,67 @@ # ============================================================================== import torch +import torch.nn as nn +import torch.nn.functional as F import torchviz -from torch import nn -from torch.nn import functional as F -import TorchOpt +import torchopt class Net(nn.Module): - def __init__(self, dim): - super().__init__() - self.fc = nn.Linear(dim, 1) + def __init__(self, dim): + super().__init__() + self.fc = nn.Linear(dim, 1) - def forward(self, x, meta_param): - return self.fc(x) + meta_param + def forward(self, x, meta_param): + return self.fc(x) + meta_param def draw_torchviz(): - net = Net(dim).cuda() - optimizer = TorchOpt.MetaAdam(net, lr=1e-3, use_accelerated_op=False) - meta_param = torch.tensor(1., requires_grad=True) - - xs = torch.ones(batch_size, dim).cuda() - - pred = net(xs, meta_param) - loss = F.mse_loss(pred, torch.ones_like(pred)) - optimizer.step(loss) - - pred = net(xs, meta_param) - loss = F.mse_loss(pred, torch.ones_like(pred)) - # draw computation graph - torchviz.make_dot(loss).render("torchviz_graph", format="svg") - - -def draw_TorchOpt(): - net = Net(dim).cuda() - optimizer = TorchOpt.MetaAdam(net, lr=1e-3, use_accelerated_op=True) - meta_param = torch.tensor(1., requires_grad=True) - - xs = torch.ones(batch_size, dim).cuda() - - pred = net(xs, meta_param) - loss = F.mse_loss(pred, torch.ones_like(pred)) - # set enable_visual - net_state_0 = TorchOpt.extract_state_dict( - net, enable_visual=True, visual_prefix='step0.' - ) - optimizer.step(loss) - # set enable_visual - net_state_1 = TorchOpt.extract_state_dict( - net, enable_visual=True, visual_prefix='step1.' - ) - - pred = net(xs, meta_param) - loss = F.mse_loss(pred, torch.ones_like(pred)) - # draw computation graph - TorchOpt.visual.make_dot( - loss, [net_state_0, net_state_1, { - meta_param: "meta_param" - }] - ).render( - "TorchOpt_graph", format="svg" - ) + net = Net(dim).cuda() + optimizer = torchopt.MetaAdam(net, lr=1e-3, use_accelerated_op=False) + meta_param = torch.tensor(1., requires_grad=True) + + xs = torch.ones(batch_size, dim).cuda() + + pred = net(xs, meta_param) + loss = F.mse_loss(pred, torch.ones_like(pred)) + optimizer.step(loss) + + pred = net(xs, meta_param) + loss = F.mse_loss(pred, torch.ones_like(pred)) + # draw computation graph + torchviz.make_dot(loss).render("torchviz_graph", format="svg") + + +def draw_torchopt(): + net = Net(dim).cuda() + optimizer = torchopt.MetaAdam(net, lr=1e-3, use_accelerated_op=True) + meta_param = torch.tensor(1., requires_grad=True) + + xs = torch.ones(batch_size, dim).cuda() + + pred = net(xs, meta_param) + loss = F.mse_loss(pred, torch.ones_like(pred)) + # set enable_visual + net_state_0 = torchopt.extract_state_dict(net, enable_visual=True, visual_prefix='step0.') + optimizer.step(loss) + # set enable_visual + net_state_1 = torchopt.extract_state_dict(net, enable_visual=True, visual_prefix='step1.') + + pred = net(xs, meta_param) + loss = F.mse_loss(pred, torch.ones_like(pred)) + # draw computation graph + torchopt.visual.make_dot(loss, [net_state_0, net_state_1, { + meta_param: "meta_param" + }]).render( + "torchopt_graph", format="svg" + ) if __name__ == '__main__': - dim = 5 - batch_size = 2 - draw_torchviz() - draw_TorchOpt() + dim = 5 + batch_size = 2 + draw_torchviz() + draw_torchopt() diff --git a/include/adam_op/adam_op.h b/include/adam_op/adam_op.h index 3499a3e9..33aa53b7 100644 --- a/include/adam_op/adam_op.h +++ b/include/adam_op/adam_op.h @@ -18,9 +18,9 @@ #include -#include "adam_op/common.h" +#include "common.h" -namespace TorchOpt { +namespace torchopt { TensorArray<3> adamForwardInplace(const torch::Tensor& updates, const torch::Tensor& mu, const torch::Tensor& nu, const float b1, @@ -51,4 +51,4 @@ TensorArray<2> adamBackwardUpdates(const torch::Tensor& dupdates, const torch::Tensor& new_mu, const torch::Tensor& new_nu, const float b1, const float b2, const int count); -} // namespace TorchOpt +} // namespace torchopt diff --git a/include/adam_op/adam_op_impl.cuh b/include/adam_op/adam_op_impl.cuh index 9e37df1b..bc29171f 100644 --- a/include/adam_op/adam_op_impl.cuh +++ b/include/adam_op/adam_op_impl.cuh @@ -18,9 +18,9 @@ #include -#include "adam_op/common.h" +#include "common.h" -namespace TorchOpt { +namespace torchopt { TensorArray<3> adamForwardInplaceCUDA(const torch::Tensor &updates, const torch::Tensor &mu, const torch::Tensor &nu, const float b1, @@ -53,4 +53,4 @@ TensorArray<2> adamBackwardUpdatesCUDA(const torch::Tensor &dupdates, const torch::Tensor &new_nu, const float b1, const float b2, const int count); -} // namespace TorchOpt +} // namespace torchopt diff --git a/include/adam_op/adam_op_impl.h b/include/adam_op/adam_op_impl.h index 96393d16..2514aa48 100644 --- a/include/adam_op/adam_op_impl.h +++ b/include/adam_op/adam_op_impl.h @@ -18,9 +18,9 @@ #include -#include "adam_op/common.h" +#include "common.h" -namespace TorchOpt { +namespace torchopt { TensorArray<3> adamForwardInplaceCPU(const torch::Tensor& updates, const torch::Tensor& mu, const torch::Tensor& nu, const float b1, @@ -52,4 +52,4 @@ TensorArray<2> adamBackwardUpdatesCPU(const torch::Tensor& dupdates, const torch::Tensor& new_nu, const float b1, const float b2, const int count); -} // namespace TorchOpt +} // namespace torchopt diff --git a/include/common.h b/include/common.h index e5c681b6..e4362013 100644 --- a/include/common.h +++ b/include/common.h @@ -18,7 +18,7 @@ #include -namespace TorchOpt { +namespace torchopt { template using TensorArray = std::array; } diff --git a/include/utils.h b/include/utils.h index ddc0a992..92f9bad0 100644 --- a/include/utils.h +++ b/include/utils.h @@ -22,7 +22,7 @@ #define __forceinline__ __inline__ __attribute__((always_inline)) #endif -namespace TorchOpt { +namespace torchopt { __forceinline__ size_t getTensorPlainSize(const torch::Tensor& tensor) { const auto dim = tensor.dim(); size_t n = 1; @@ -31,4 +31,4 @@ __forceinline__ size_t getTensorPlainSize(const torch::Tensor& tensor) { } return n; } -} // namespace TorchOpt +} // namespace torchopt diff --git a/setup.cfg b/setup.cfg index 52dc6283..f43fc9bc 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,14 +1,15 @@ [yapf] based_on_style = yapf +indent_width = 4 +continuation_indent_width = 4 spaces_before_comment = 2 dedent_closing_brackets = true -column_limit = 79 -continuation_indent_width = 2 +column_limit = 100 [flake8] exclude = .git -indent_size = 2 +indent_size = 4 [pydocstyle] convention = google @@ -16,7 +17,7 @@ convention = google [isort] profile = black multi_line_output = 3 -indent = 2 +indent = 4 line_length = 79 [mypy] @@ -39,4 +40,4 @@ warn_unused_configs = True warn_unused_ignores = True [doc8] -max-line-length = 200 \ No newline at end of file +max-line-length = 200 diff --git a/setup.py b/setup.py index 9c201878..1c4df4a0 100644 --- a/setup.py +++ b/setup.py @@ -8,113 +8,103 @@ class MyBuild(build_ext): - - def run(self): - self.build_cmake() - - def copy(self, build_temp): - from distutils.file_util import copy_file - cwd = str(pathlib.Path().absolute()) - src = os.path.join('.', build_temp, 'src') - ops = os.listdir(src) - for op in ops: - op_path = os.path.join(src, op) - if not os.path.isdir(op_path): - continue - files = os.listdir(op_path) - for file in files: - if file.split('.')[-1] == 'so': - copy_file( - os.path.join(op_path, file), os.path.join(cwd, 'TorchOpt', '_lib') - ) - - def build_cmake(self): - cwd = pathlib.Path().absolute() - - build_temp = f"{pathlib.Path(self.build_temp)}" - os.makedirs(build_temp, exist_ok=True) - - config = "Debug" if self.debug else "Release" - - PYTHON_INCLUDE_DIR = "" - for path in self.include_dirs: - PYTHON_INCLUDE_DIR += path + ';' - - TORCH_INCLUDE_PATH = "" - for path in cpp_extension.include_paths(): - TORCH_INCLUDE_PATH += path + ';' - - TORCH_LIBRARY_PATH = "" - for path in cpp_extension.library_paths(): - TORCH_LIBRARY_PATH += path + ';' - - cmake_args = [ - "-DPYTHON_INCLUDE_DIR=" + PYTHON_INCLUDE_DIR, - "-DTORCH_INCLUDE_PATH=" + TORCH_INCLUDE_PATH, - "-DTORCH_LIBRARY_PATH=" + TORCH_LIBRARY_PATH, - "-DCMAKE_BUILD_TYPE=" + config - ] - - build_args = ["--config", config, "--", "-j4"] - - os.chdir(build_temp) - self.spawn(["cmake", f"{str(cwd)}"] + cmake_args) - if not self.dry_run: - self.spawn(["cmake", "--build", "."] + build_args) - os.chdir(str(cwd)) - self.copy(build_temp) + def run(self): + self.build_cmake() + + def copy(self, build_temp): + from distutils.file_util import copy_file + cwd = str(pathlib.Path().absolute()) + src = os.path.join('.', build_temp, 'src') + ops = os.listdir(src) + for op in ops: + op_path = os.path.join(src, op) + if not os.path.isdir(op_path): + continue + files = os.listdir(op_path) + for file in files: + if file.split('.')[-1] == 'so': + copy_file(os.path.join(op_path, file), + os.path.join(cwd, 'torchopt', '_lib')) + + def build_cmake(self): + cwd = pathlib.Path().absolute() + + build_temp = str(pathlib.Path(self.build_temp)) + os.makedirs(build_temp, exist_ok=True) + + config = "Debug" if self.debug else "Release" + + PYTHON_INCLUDE_DIR = ";".join(self.include_dirs) + TORCH_INCLUDE_PATH = ";".join(cpp_extension.include_paths()) + TORCH_LIBRARY_PATH = ";".join(cpp_extension.library_paths()) + + cmake_args = [ + f"-DCMAKE_BUILD_TYPE={config}", + f"-DPYTHON_EXECUTABLE={sys.executable}", + f"-DPYTHON_INCLUDE_DIR={PYTHON_INCLUDE_DIR}", + f"-DTORCH_INCLUDE_PATH={TORCH_INCLUDE_PATH}", + f"-DTORCH_LIBRARY_PATH={TORCH_LIBRARY_PATH}", + ] + + build_args = ["--config", config, "--", "-j4"] + + os.chdir(build_temp) + self.spawn(["cmake", f"{str(cwd)}"] + cmake_args) + if not self.dry_run: + self.spawn(["cmake", "--build", "."] + build_args) + os.chdir(str(cwd)) + self.copy(build_temp) class download_shared(): - - def __init__(self): - import urllib - dir_path = os.path.dirname(os.path.realpath(__file__)) - print(f"setup.py at {dir_path}") - print("downloading shared libraries") - op_urls = [] - if sys.version_info >= (3, 8) and sys.version_info < (3, 9): - op_urls.append( - "https://torchopt.oss-cn-beijing.aliyuncs.com/torch1_11/adam_op.cpython-38-x86_64-linux-gnu.so" - ) - elif sys.version_info >= (3, 9) and sys.version_info < (3, 10): - op_urls.append( - "https://torchopt.oss-cn-beijing.aliyuncs.com/torch1_11/adam_op.cpython-39-x86_64-linux-gnu.so" - ) - - if len(op_urls) == 0: - import warnings - warnings.warn("no pre-compiled libraries for you python version") - return - - for url in op_urls: - data = urllib.request.urlopen(url) - filename = url.rpartition('/')[-1] - file_path = os.path.join(dir_path, 'TorchOpt', '_lib', filename) - with open(file_path, 'wb') as f: - f.write(data.read()) - print("shared libraries downloaded") + def __init__(self): + import urllib + dir_path = os.path.dirname(os.path.realpath(__file__)) + print(f"setup.py at {dir_path}") + print("downloading shared libraries") + op_urls = [] + if sys.version_info >= (3, 8) and sys.version_info < (3, 9): + op_urls.append( + "https://torchopt.oss-cn-beijing.aliyuncs.com/torch1_11/adam_op.cpython-38-x86_64-linux-gnu.so" + ) + elif sys.version_info >= (3, 9) and sys.version_info < (3, 10): + op_urls.append( + "https://torchopt.oss-cn-beijing.aliyuncs.com/torch1_11/adam_op.cpython-39-x86_64-linux-gnu.so" + ) + + if len(op_urls) == 0: + import warnings + warnings.warn("no pre-compiled libraries for you python version") + return + + for url in op_urls: + data = urllib.request.urlopen(url) + filename = url.rpartition('/')[-1] + file_path = os.path.join(dir_path, 'torchopt', '_lib', filename) + with open(file_path, 'wb') as f: + f.write(data.read()) + print("shared libraries downloaded") if 'build_from_source' not in sys.argv: download_shared() setup( - name="TorchOpt", - version="0.4.1", - author="TorchOpt Contributors", - author_email="jieren9806@gmail.com", - description="A Jax-style optimizer.", - license="Apache License Version 2.0", - keywords="meta learning", - url="https://github.com/metaopt/TorchOpt", - packages=find_packages(), - package_data={"": ["_lib/*.so"]}, - include_package_data=True, - cmdclass={'build_from_source': MyBuild}, - install_requires=[ - 'jax[cpu]', - 'torch==1.11', - 'graphviz', - ], + name="torchopt", + version="0.4.1", + author="TorchOpt Contributors", + author_email="jieren9806@gmail.com, xidong.feng.20@ucl.ac.uk, benjaminliu.eecs@gmail.com", + description="A Jax-style optimizer.", + license="Apache License Version 2.0", + keywords="meta learning", + url="https://github.com/metaopt/torchopt", + packages=find_packages(), + package_data={"": ["_lib/*.so"]}, + include_package_data=True, + cmdclass={'build_from_source': MyBuild}, + install_requires=[ + 'jax[cpu]', + 'torch==1.11', + 'graphviz', + ], ) diff --git a/src/adam_op/CMakeLists.txt b/src/adam_op/CMakeLists.txt index 88991ad0..cea0371b 100644 --- a/src/adam_op/CMakeLists.txt +++ b/src/adam_op/CMakeLists.txt @@ -47,4 +47,4 @@ pybind11_add_module(adam_op adam_op.cpp adam_op_impl.cpp adam_op_impl.cu) target_link_libraries( adam_op PRIVATE ${TORCH_LIBRARIES} - ) +) diff --git a/src/adam_op/adam_op.cpp b/src/adam_op/adam_op.cpp index f8cfffce..130e3a27 100644 --- a/src/adam_op/adam_op.cpp +++ b/src/adam_op/adam_op.cpp @@ -21,7 +21,7 @@ #include "adam_op/adam_op_impl.cuh" #include "adam_op/adam_op_impl.h" -namespace TorchOpt { +namespace torchopt { TensorArray<3> adamForwardInplace(const torch::Tensor& updates, const torch::Tensor& mu, const torch::Tensor& nu, const float b1, @@ -110,14 +110,14 @@ TensorArray<2> adamBackwardUpdates(const torch::Tensor& dupdates, throw std::runtime_error("Not implemented"); } } -} // namespace TorchOpt +} // namespace torchopt PYBIND11_MODULE(adam_op, m) { - m.def("forward_", &TorchOpt::adamForwardInplace); - m.def("forwardMu", &TorchOpt::adamForwardMu); - m.def("forwardNu", &TorchOpt::adamForwardNu); - m.def("forwardUpdates", &TorchOpt::adamForwardUpdates); - m.def("backwardMu", &TorchOpt::adamBackwardMu); - m.def("backwardNu", &TorchOpt::adamBackwardNu); - m.def("backwardUpdates", &TorchOpt::adamBackwardUpdates); + m.def("forward_", &torchopt::adamForwardInplace); + m.def("forwardMu", &torchopt::adamForwardMu); + m.def("forwardNu", &torchopt::adamForwardNu); + m.def("forwardUpdates", &torchopt::adamForwardUpdates); + m.def("backwardMu", &torchopt::adamBackwardMu); + m.def("backwardNu", &torchopt::adamBackwardNu); + m.def("backwardUpdates", &torchopt::adamBackwardUpdates); } diff --git a/src/adam_op/adam_op_impl.cpp b/src/adam_op/adam_op_impl.cpp index 48427213..71807d09 100644 --- a/src/adam_op/adam_op_impl.cpp +++ b/src/adam_op/adam_op_impl.cpp @@ -13,16 +13,15 @@ // limitations under the License. // ============================================================================== -#include "adam_op/adam_op_impl.h" - #include #include #include -#include "include/utils.h" +#include "adam_op/adam_op_impl.h" +#include "utils.h" -namespace TorchOpt { +namespace torchopt { using std::size_t; namespace { template @@ -307,4 +306,4 @@ TensorArray<2> adamBackwardUpdatesCPU(const torch::Tensor& dupdates, })); return TensorArray<2>{std::move(dmu_out), std::move(dnu_out)}; } -} // namespace TorchOpt +} // namespace torchopt diff --git a/src/adam_op/adam_op_impl.cu b/src/adam_op/adam_op_impl.cu index 0b7b4cea..c32f1ad3 100644 --- a/src/adam_op/adam_op_impl.cu +++ b/src/adam_op/adam_op_impl.cu @@ -18,9 +18,9 @@ #include #include "adam_op/adam_op_impl.cuh" -#include "include/utils.h" +#include "utils.h" -namespace TorchOpt { +namespace torchopt { namespace { template @@ -330,4 +330,4 @@ TensorArray<2> adamBackwardUpdatesCUDA(const torch::Tensor &dupdates, })); return TensorArray<2>{std::move(dmu_out), std::move(dnu_out)}; } -} // namespace TorchOpt +} // namespace torchopt diff --git a/tests/unit/high_level/test_high_level_inplace.py b/tests/unit/high_level/test_high_level_inplace.py index 728b0158..04544ecf 100644 --- a/tests/unit/high_level/test_high_level_inplace.py +++ b/tests/unit/high_level/test_high_level_inplace.py @@ -17,182 +17,170 @@ import unittest import torch -from torch.nn import functional as F +import torch.nn.functional as F from torch.utils import data from torchvision import models -from TorchOpt import SGD, Adam, RMSProp +import torchopt class HighLevelInplace(unittest.TestCase): - @classmethod - def setUpClass(cls): - torch.manual_seed(0) - cls.model = models.resnet18() - cls.model_ref = copy.deepcopy(cls.model) - cls.model_backup = copy.deepcopy(cls.model) - - cls.batch_size = 2 - cls.dataset = data.TensorDataset( - torch.randn(2, 3, 224, 224), torch.randint(0, 1000, (2,)) - ) - cls.loader = data.DataLoader(cls.dataset, cls.batch_size, False) - - cls.lr = 1e-3 - - def setUp(self) -> None: - torch.manual_seed(0) - self.model = copy.deepcopy(self.model_backup) - self.model_ref = copy.deepcopy(self.model_backup) - - def test_sgd(self) -> None: - optim = SGD(self.model.parameters(), self.lr) - optim_ref = torch.optim.SGD(self.model_ref.parameters(), self.lr) - for xs, ys in self.loader: - pred = self.model(xs) - pred_ref = self.model_ref(xs) - loss = F.cross_entropy(pred, ys) - loss_ref = F.cross_entropy(pred_ref, ys) - optim.zero_grad() - loss.backward() - optim.step() - optim_ref.zero_grad() - loss_ref.backward() - optim_ref.step() - - with torch.no_grad(): - for p, p_ref in zip( - self.model.parameters(), self.model_ref.parameters() - ): - mse = F.mse_loss(p, p_ref) - self.assertAlmostEqual(float(mse), 0) - for b, b_ref in zip(self.model.buffers(), self.model_ref.buffers()): - b = b.float() if not b.is_floating_point() else b - b_ref = b_ref.float() if not b_ref.is_floating_point() else b_ref - mse = F.mse_loss(b, b_ref) - self.assertAlmostEqual(float(mse), 0) - - def test_adam(self) -> None: - optim = Adam(self.model.parameters(), self.lr) - optim_ref = torch.optim.Adam(self.model_ref.parameters(), self.lr) - for xs, ys in self.loader: - pred = self.model(xs) - pred_ref = self.model_ref(xs) - loss = F.cross_entropy(pred, ys) - loss_ref = F.cross_entropy(pred_ref, ys) - optim.zero_grad() - loss.backward() - optim.step() - optim_ref.zero_grad() - loss_ref.backward() - optim_ref.step() - - with torch.no_grad(): - for p, p_ref in zip( - self.model.parameters(), self.model_ref.parameters() - ): - mse = F.mse_loss(p, p_ref) - self.assertAlmostEqual(float(mse), 0) - for b, b_ref in zip(self.model.buffers(), self.model_ref.buffers()): - b = b.float() if not b.is_floating_point() else b - b_ref = b_ref.float() if not b_ref.is_floating_point() else b_ref - mse = F.mse_loss(b, b_ref) - self.assertAlmostEqual(float(mse), 0) - - def test_accelerated_adam_cpu(self) -> None: - self.model - self.model_ref - optim = Adam(self.model.parameters(), self.lr, use_accelerated_op=True) - optim_ref = torch.optim.Adam(self.model_ref.parameters(), self.lr) - for xs, ys in self.loader: - xs = xs - ys = ys - pred = self.model(xs) - pred_ref = self.model_ref(xs) - loss = F.cross_entropy(pred, ys) - loss_ref = F.cross_entropy(pred_ref, ys) - optim.zero_grad() - loss.backward() - optim.step() - optim_ref.zero_grad() - loss_ref.backward() - optim_ref.step() - - with torch.no_grad(): - for p, p_ref in zip( - self.model.parameters(), self.model_ref.parameters() - ): - mse = F.mse_loss(p, p_ref) - self.assertAlmostEqual(float(mse), 0) - for b, b_ref in zip(self.model.buffers(), self.model_ref.buffers()): - b = b.float() if not b.is_floating_point() else b - b_ref = b_ref.float() if not b_ref.is_floating_point() else b_ref - mse = F.mse_loss(b, b_ref) - self.assertAlmostEqual(float(mse), 0) - - def test_accelerated_adam_cuda(self) -> None: - self.model.cuda() - self.model_ref.cuda() - optim = Adam(self.model.parameters(), self.lr, use_accelerated_op=True) - optim_ref = torch.optim.Adam(self.model_ref.parameters(), self.lr) - for xs, ys in self.loader: - xs = xs.cuda() - ys = ys.cuda() - pred = self.model(xs) - pred_ref = self.model_ref(xs) - loss = F.cross_entropy(pred, ys) - loss_ref = F.cross_entropy(pred_ref, ys) - optim.zero_grad() - loss.backward() - optim.step() - optim_ref.zero_grad() - loss_ref.backward() - optim_ref.step() - - with torch.no_grad(): - for p, p_ref in zip( - self.model.parameters(), self.model_ref.parameters() - ): - mse = F.mse_loss(p, p_ref) - self.assertAlmostEqual(float(mse), 0) - for b, b_ref in zip(self.model.buffers(), self.model_ref.buffers()): - b = b.float() if not b.is_floating_point() else b - b_ref = b_ref.float() if not b_ref.is_floating_point() else b_ref - mse = F.mse_loss(b, b_ref) - self.assertAlmostEqual(float(mse), 0) - - def test_rmsprop(self) -> None: - optim = RMSProp( - self.model.parameters(), self.lr, decay=0.99 - ) # pytorch uses 0.99 as the default value - optim_ref = torch.optim.RMSprop(self.model_ref.parameters(), self.lr) - for xs, ys in self.loader: - pred = self.model(xs) - pred_ref = self.model_ref(xs) - loss = F.cross_entropy(pred, ys) - loss_ref = F.cross_entropy(pred_ref, ys) - optim.zero_grad() - loss.backward() - optim.step() - optim_ref.zero_grad() - loss_ref.backward() - optim_ref.step() - - with torch.no_grad(): - for p, p_ref in zip( - self.model.parameters(), self.model_ref.parameters() - ): - mse = F.mse_loss(p, p_ref) - self.assertAlmostEqual( - float(mse), 0, delta=1e-4 - ) # Optax and pytorch have different implementation - for b, b_ref in zip(self.model.buffers(), self.model_ref.buffers()): - b = b.float() if not b.is_floating_point() else b - b_ref = b_ref.float() if not b_ref.is_floating_point() else b_ref - mse = F.mse_loss(b, b_ref) - self.assertAlmostEqual(float(mse), 0) + @classmethod + def setUpClass(cls): + torch.manual_seed(0) + cls.model = models.resnet18() + cls.model_ref = copy.deepcopy(cls.model) + cls.model_backup = copy.deepcopy(cls.model) + + cls.batch_size = 2 + cls.dataset = data.TensorDataset(torch.randn(2, 3, 224, 224), torch.randint(0, 1000, (2,))) + cls.loader = data.DataLoader(cls.dataset, cls.batch_size, False) + + cls.lr = 1e-3 + + def setUp(self) -> None: + torch.manual_seed(0) + self.model = copy.deepcopy(self.model_backup) + self.model_ref = copy.deepcopy(self.model_backup) + + def test_sgd(self) -> None: + optim = torchopt.SGD(self.model.parameters(), self.lr) + optim_ref = torch.optim.SGD(self.model_ref.parameters(), self.lr) + for xs, ys in self.loader: + pred = self.model(xs) + pred_ref = self.model_ref(xs) + loss = F.cross_entropy(pred, ys) + loss_ref = F.cross_entropy(pred_ref, ys) + optim.zero_grad() + loss.backward() + optim.step() + optim_ref.zero_grad() + loss_ref.backward() + optim_ref.step() + + with torch.no_grad(): + for p, p_ref in zip(self.model.parameters(), self.model_ref.parameters()): + mse = F.mse_loss(p, p_ref) + self.assertAlmostEqual(float(mse), 0) + for b, b_ref in zip(self.model.buffers(), self.model_ref.buffers()): + b = b.float() if not b.is_floating_point() else b + b_ref = b_ref.float() if not b_ref.is_floating_point() else b_ref + mse = F.mse_loss(b, b_ref) + self.assertAlmostEqual(float(mse), 0) + + def test_adam(self) -> None: + optim = torchopt.Adam(self.model.parameters(), self.lr) + optim_ref = torch.optim.Adam(self.model_ref.parameters(), self.lr) + for xs, ys in self.loader: + pred = self.model(xs) + pred_ref = self.model_ref(xs) + loss = F.cross_entropy(pred, ys) + loss_ref = F.cross_entropy(pred_ref, ys) + optim.zero_grad() + loss.backward() + optim.step() + optim_ref.zero_grad() + loss_ref.backward() + optim_ref.step() + + with torch.no_grad(): + for p, p_ref in zip(self.model.parameters(), self.model_ref.parameters()): + mse = F.mse_loss(p, p_ref) + self.assertAlmostEqual(float(mse), 0) + for b, b_ref in zip(self.model.buffers(), self.model_ref.buffers()): + b = b.float() if not b.is_floating_point() else b + b_ref = b_ref.float() if not b_ref.is_floating_point() else b_ref + mse = F.mse_loss(b, b_ref) + self.assertAlmostEqual(float(mse), 0) + + def test_accelerated_adam_cpu(self) -> None: + self.model + self.model_ref + optim = torchopt.Adam(self.model.parameters(), self.lr, use_accelerated_op=True) + optim_ref = torch.optim.Adam(self.model_ref.parameters(), self.lr) + for xs, ys in self.loader: + xs = xs + ys = ys + pred = self.model(xs) + pred_ref = self.model_ref(xs) + loss = F.cross_entropy(pred, ys) + loss_ref = F.cross_entropy(pred_ref, ys) + optim.zero_grad() + loss.backward() + optim.step() + optim_ref.zero_grad() + loss_ref.backward() + optim_ref.step() + + with torch.no_grad(): + for p, p_ref in zip(self.model.parameters(), self.model_ref.parameters()): + mse = F.mse_loss(p, p_ref) + self.assertAlmostEqual(float(mse), 0) + for b, b_ref in zip(self.model.buffers(), self.model_ref.buffers()): + b = b.float() if not b.is_floating_point() else b + b_ref = b_ref.float() if not b_ref.is_floating_point() else b_ref + mse = F.mse_loss(b, b_ref) + self.assertAlmostEqual(float(mse), 0) + + def test_accelerated_adam_cuda(self) -> None: + self.model.cuda() + self.model_ref.cuda() + optim = torchopt.Adam(self.model.parameters(), self.lr, use_accelerated_op=True) + optim_ref = torch.optim.Adam(self.model_ref.parameters(), self.lr) + for xs, ys in self.loader: + xs = xs.cuda() + ys = ys.cuda() + pred = self.model(xs) + pred_ref = self.model_ref(xs) + loss = F.cross_entropy(pred, ys) + loss_ref = F.cross_entropy(pred_ref, ys) + optim.zero_grad() + loss.backward() + optim.step() + optim_ref.zero_grad() + loss_ref.backward() + optim_ref.step() + + with torch.no_grad(): + for p, p_ref in zip(self.model.parameters(), self.model_ref.parameters()): + mse = F.mse_loss(p, p_ref) + self.assertAlmostEqual(float(mse), 0) + for b, b_ref in zip(self.model.buffers(), self.model_ref.buffers()): + b = b.float() if not b.is_floating_point() else b + b_ref = b_ref.float() if not b_ref.is_floating_point() else b_ref + mse = F.mse_loss(b, b_ref) + self.assertAlmostEqual(float(mse), 0) + + def test_rmsprop(self) -> None: + optim = torchopt.RMSProp( + self.model.parameters(), self.lr, decay=0.99 + ) # pytorch uses 0.99 as the default value + optim_ref = torch.optim.RMSprop(self.model_ref.parameters(), self.lr) + for xs, ys in self.loader: + pred = self.model(xs) + pred_ref = self.model_ref(xs) + loss = F.cross_entropy(pred, ys) + loss_ref = F.cross_entropy(pred_ref, ys) + optim.zero_grad() + loss.backward() + optim.step() + optim_ref.zero_grad() + loss_ref.backward() + optim_ref.step() + + with torch.no_grad(): + for p, p_ref in zip(self.model.parameters(), self.model_ref.parameters()): + mse = F.mse_loss(p, p_ref) + self.assertAlmostEqual( + float(mse), 0, delta=1e-4 + ) # Optax and pytorch have different implementation + for b, b_ref in zip(self.model.buffers(), self.model_ref.buffers()): + b = b.float() if not b.is_floating_point() else b + b_ref = b_ref.float() if not b_ref.is_floating_point() else b_ref + mse = F.mse_loss(b, b_ref) + self.assertAlmostEqual(float(mse), 0) if __name__ == '__main__': - unittest.main() + unittest.main() diff --git a/tests/unit/low_level/test_low_level_inplace.py b/tests/unit/low_level/test_low_level_inplace.py index e42209c5..c34cd324 100644 --- a/tests/unit/low_level/test_low_level_inplace.py +++ b/tests/unit/low_level/test_low_level_inplace.py @@ -18,190 +18,185 @@ import functorch import torch -from torch.nn import functional as F +import torch.nn.functional as F from torch.utils import data from torchvision import models -import TorchOpt -from TorchOpt import adam, rmsprop, sgd +import torchopt class LowLevelInplace(unittest.TestCase): - @classmethod - def setUpClass(cls): - torch.manual_seed(0) - cls.model = models.resnet18() - cls.model_ref = copy.deepcopy(cls.model) - cls.model_backup = copy.deepcopy(cls.model) - - cls.batch_size = 2 - cls.dataset = data.TensorDataset( - torch.randn(2, 3, 224, 224), torch.randint(0, 1000, (2,)) - ) - cls.loader = data.DataLoader(cls.dataset, cls.batch_size, False) - - cls.lr = 1e-3 - - def setUp(self) -> None: - torch.manual_seed(0) - self.model = copy.deepcopy(self.model_backup) - self.model_ref = copy.deepcopy(self.model_backup) - - def test_sgd(self) -> None: - fun, params, buffers = functorch.make_functional_with_buffers(self.model) - optim = sgd(self.lr) - optim_state = optim.init(params) - optim_ref = torch.optim.SGD(self.model_ref.parameters(), self.lr) - - for xs, ys in self.loader: - pred = fun(params, buffers, xs) - pred_ref = self.model_ref(xs) - loss = F.cross_entropy(pred, ys) - loss_ref = F.cross_entropy(pred_ref, ys) - - grad = torch.autograd.grad(loss, params) - updates, optim_state = optim.update(grad, optim_state) - params = TorchOpt.apply_updates(params, updates) - - optim_ref.zero_grad() - loss_ref.backward() - optim_ref.step() - - with torch.no_grad(): - for p, p_ref in zip(params, self.model_ref.parameters()): - mse = F.mse_loss(p, p_ref) - self.assertAlmostEqual(float(mse), 0) - for b, b_ref in zip(buffers, self.model_ref.buffers()): - b = b.float() if not b.is_floating_point() else b - b_ref = b_ref.float() if not b_ref.is_floating_point() else b_ref - mse = F.mse_loss(b, b_ref) - self.assertAlmostEqual(float(mse), 0) - - def test_adam(self) -> None: - fun, params, buffers = functorch.make_functional_with_buffers(self.model) - optim = adam(self.lr) - optim_state = optim.init(params) - optim_ref = torch.optim.Adam(self.model_ref.parameters(), self.lr) - for xs, ys in self.loader: - pred = fun(params, buffers, xs) - pred_ref = self.model_ref(xs) - loss = F.cross_entropy(pred, ys) - loss_ref = F.cross_entropy(pred_ref, ys) - - grad = torch.autograd.grad(loss, params) - updates, optim_state = optim.update(grad, optim_state) - params = TorchOpt.apply_updates(params, updates) - - optim_ref.zero_grad() - loss_ref.backward() - optim_ref.step() - with torch.no_grad(): - for p, p_ref in zip(params, self.model_ref.parameters()): - mse = F.mse_loss(p, p_ref) - self.assertAlmostEqual(float(mse), 0) - for b, b_ref in zip(buffers, self.model_ref.buffers()): - b = b.float() if not b.is_floating_point() else b - b_ref = b_ref.float() if not b_ref.is_floating_point() else b_ref - mse = F.mse_loss(b, b_ref) - self.assertAlmostEqual(float(mse), 0) - - def test_accelerated_adam_cpu(self) -> None: - self.model - self.model_ref - fun, params, buffers = functorch.make_functional_with_buffers(self.model) - optim = adam(self.lr, use_accelerated_op=True) - optim_state = optim.init(params) - optim_ref = torch.optim.Adam(self.model_ref.parameters(), self.lr) - for xs, ys in self.loader: - xs = xs - ys = ys - pred = fun(params, buffers, xs) - pred_ref = self.model_ref(xs) - loss = F.cross_entropy(pred, ys) - loss_ref = F.cross_entropy(pred_ref, ys) - - grad = torch.autograd.grad(loss, params) - updates, optim_state = optim.update(grad, optim_state) - params = TorchOpt.apply_updates(params, updates) - - optim_ref.zero_grad() - loss_ref.backward() - optim_ref.step() - with torch.no_grad(): - for p, p_ref in zip(params, self.model_ref.parameters()): - mse = F.mse_loss(p, p_ref) - self.assertAlmostEqual(float(mse), 0) - for b, b_ref in zip(buffers, self.model_ref.buffers()): - b = b.float() if not b.is_floating_point() else b - b_ref = b_ref.float() if not b_ref.is_floating_point() else b_ref - mse = F.mse_loss(b, b_ref) - self.assertAlmostEqual(float(mse), 0) - - def test_accelerated_adam_cuda(self) -> None: - self.model.cuda() - self.model_ref.cuda() - fun, params, buffers = functorch.make_functional_with_buffers(self.model) - optim = adam(self.lr, use_accelerated_op=True) - optim_state = optim.init(params) - optim_ref = torch.optim.Adam(self.model_ref.parameters(), self.lr) - for xs, ys in self.loader: - xs = xs.cuda() - ys = ys.cuda() - pred = fun(params, buffers, xs) - pred_ref = self.model_ref(xs) - loss = F.cross_entropy(pred, ys) - loss_ref = F.cross_entropy(pred_ref, ys) - - grad = torch.autograd.grad(loss, params) - updates, optim_state = optim.update(grad, optim_state) - params = TorchOpt.apply_updates(params, updates) - - optim_ref.zero_grad() - loss_ref.backward() - optim_ref.step() - with torch.no_grad(): - for p, p_ref in zip(params, self.model_ref.parameters()): - mse = F.mse_loss(p, p_ref) - self.assertAlmostEqual(float(mse), 0) - for b, b_ref in zip(buffers, self.model_ref.buffers()): - b = b.float() if not b.is_floating_point() else b - b_ref = b_ref.float() if not b_ref.is_floating_point() else b_ref - mse = F.mse_loss(b, b_ref) - self.assertAlmostEqual(float(mse), 0) - - def test_rmsprop(self) -> None: - fun, params, buffers = functorch.make_functional_with_buffers(self.model) - optim = rmsprop( - self.lr, decay=0.99 - ) # pytorch uses 0.99 as the default value - optim_state = optim.init(params) - optim_ref = torch.optim.RMSprop(self.model_ref.parameters(), self.lr) - for xs, ys in self.loader: - pred = fun(params, buffers, xs) - pred_ref = self.model_ref(xs) - loss = F.cross_entropy(pred, ys) - loss_ref = F.cross_entropy(pred_ref, ys) - - grad = torch.autograd.grad(loss, params) - updates, optim_state = optim.update(grad, optim_state) - params = TorchOpt.apply_updates(params, updates) - - optim_ref.zero_grad() - loss_ref.backward() - optim_ref.step() - with torch.no_grad(): - for p, p_ref in zip(params, self.model_ref.parameters()): - mse = F.mse_loss(p, p_ref) - self.assertAlmostEqual( - float(mse), 0, delta=1e-4 - ) # Optax and pytorch have different implementation - for b, b_ref in zip(buffers, self.model_ref.buffers()): - b = b.float() if not b.is_floating_point() else b - b_ref = b_ref.float() if not b_ref.is_floating_point() else b_ref - mse = F.mse_loss(b, b_ref) - self.assertAlmostEqual(float(mse), 0) + @classmethod + def setUpClass(cls): + torch.manual_seed(0) + cls.model = models.resnet18() + cls.model_ref = copy.deepcopy(cls.model) + cls.model_backup = copy.deepcopy(cls.model) + + cls.batch_size = 2 + cls.dataset = data.TensorDataset(torch.randn(2, 3, 224, 224), torch.randint(0, 1000, (2,))) + cls.loader = data.DataLoader(cls.dataset, cls.batch_size, False) + + cls.lr = 1e-3 + + def setUp(self) -> None: + torch.manual_seed(0) + self.model = copy.deepcopy(self.model_backup) + self.model_ref = copy.deepcopy(self.model_backup) + + def test_sgd(self) -> None: + fun, params, buffers = functorch.make_functional_with_buffers(self.model) + optim = torchopt.sgd(self.lr) + optim_state = optim.init(params) + optim_ref = torch.optim.SGD(self.model_ref.parameters(), self.lr) + + for xs, ys in self.loader: + pred = fun(params, buffers, xs) + pred_ref = self.model_ref(xs) + loss = F.cross_entropy(pred, ys) + loss_ref = F.cross_entropy(pred_ref, ys) + + grad = torch.autograd.grad(loss, params) + updates, optim_state = optim.update(grad, optim_state) + params = torchopt.apply_updates(params, updates) + + optim_ref.zero_grad() + loss_ref.backward() + optim_ref.step() + + with torch.no_grad(): + for p, p_ref in zip(params, self.model_ref.parameters()): + mse = F.mse_loss(p, p_ref) + self.assertAlmostEqual(float(mse), 0) + for b, b_ref in zip(buffers, self.model_ref.buffers()): + b = b.float() if not b.is_floating_point() else b + b_ref = b_ref.float() if not b_ref.is_floating_point() else b_ref + mse = F.mse_loss(b, b_ref) + self.assertAlmostEqual(float(mse), 0) + + def test_adam(self) -> None: + fun, params, buffers = functorch.make_functional_with_buffers(self.model) + optim = torchopt.adam(self.lr) + optim_state = optim.init(params) + optim_ref = torch.optim.Adam(self.model_ref.parameters(), self.lr) + for xs, ys in self.loader: + pred = fun(params, buffers, xs) + pred_ref = self.model_ref(xs) + loss = F.cross_entropy(pred, ys) + loss_ref = F.cross_entropy(pred_ref, ys) + + grad = torch.autograd.grad(loss, params) + updates, optim_state = optim.update(grad, optim_state) + params = torchopt.apply_updates(params, updates) + + optim_ref.zero_grad() + loss_ref.backward() + optim_ref.step() + with torch.no_grad(): + for p, p_ref in zip(params, self.model_ref.parameters()): + mse = F.mse_loss(p, p_ref) + self.assertAlmostEqual(float(mse), 0) + for b, b_ref in zip(buffers, self.model_ref.buffers()): + b = b.float() if not b.is_floating_point() else b + b_ref = b_ref.float() if not b_ref.is_floating_point() else b_ref + mse = F.mse_loss(b, b_ref) + self.assertAlmostEqual(float(mse), 0) + + def test_accelerated_adam_cpu(self) -> None: + self.model + self.model_ref + fun, params, buffers = functorch.make_functional_with_buffers(self.model) + optim = torchopt.adam(self.lr, use_accelerated_op=True) + optim_state = optim.init(params) + optim_ref = torch.optim.Adam(self.model_ref.parameters(), self.lr) + for xs, ys in self.loader: + xs = xs + ys = ys + pred = fun(params, buffers, xs) + pred_ref = self.model_ref(xs) + loss = F.cross_entropy(pred, ys) + loss_ref = F.cross_entropy(pred_ref, ys) + + grad = torch.autograd.grad(loss, params) + updates, optim_state = optim.update(grad, optim_state) + params = torchopt.apply_updates(params, updates) + + optim_ref.zero_grad() + loss_ref.backward() + optim_ref.step() + with torch.no_grad(): + for p, p_ref in zip(params, self.model_ref.parameters()): + mse = F.mse_loss(p, p_ref) + self.assertAlmostEqual(float(mse), 0) + for b, b_ref in zip(buffers, self.model_ref.buffers()): + b = b.float() if not b.is_floating_point() else b + b_ref = b_ref.float() if not b_ref.is_floating_point() else b_ref + mse = F.mse_loss(b, b_ref) + self.assertAlmostEqual(float(mse), 0) + + def test_accelerated_adam_cuda(self) -> None: + self.model.cuda() + self.model_ref.cuda() + fun, params, buffers = functorch.make_functional_with_buffers(self.model) + optim = torchopt.adam(self.lr, use_accelerated_op=True) + optim_state = optim.init(params) + optim_ref = torch.optim.Adam(self.model_ref.parameters(), self.lr) + for xs, ys in self.loader: + xs = xs.cuda() + ys = ys.cuda() + pred = fun(params, buffers, xs) + pred_ref = self.model_ref(xs) + loss = F.cross_entropy(pred, ys) + loss_ref = F.cross_entropy(pred_ref, ys) + + grad = torch.autograd.grad(loss, params) + updates, optim_state = optim.update(grad, optim_state) + params = torchopt.apply_updates(params, updates) + + optim_ref.zero_grad() + loss_ref.backward() + optim_ref.step() + with torch.no_grad(): + for p, p_ref in zip(params, self.model_ref.parameters()): + mse = F.mse_loss(p, p_ref) + self.assertAlmostEqual(float(mse), 0) + for b, b_ref in zip(buffers, self.model_ref.buffers()): + b = b.float() if not b.is_floating_point() else b + b_ref = b_ref.float() if not b_ref.is_floating_point() else b_ref + mse = F.mse_loss(b, b_ref) + self.assertAlmostEqual(float(mse), 0) + + def test_rmsprop(self) -> None: + fun, params, buffers = functorch.make_functional_with_buffers(self.model) + optim = torchopt.rmsprop(self.lr, decay=0.99) # pytorch uses 0.99 as the default value + optim_state = optim.init(params) + optim_ref = torch.optim.RMSprop(self.model_ref.parameters(), self.lr) + for xs, ys in self.loader: + pred = fun(params, buffers, xs) + pred_ref = self.model_ref(xs) + loss = F.cross_entropy(pred, ys) + loss_ref = F.cross_entropy(pred_ref, ys) + + grad = torch.autograd.grad(loss, params) + updates, optim_state = optim.update(grad, optim_state) + params = torchopt.apply_updates(params, updates) + + optim_ref.zero_grad() + loss_ref.backward() + optim_ref.step() + with torch.no_grad(): + for p, p_ref in zip(params, self.model_ref.parameters()): + mse = F.mse_loss(p, p_ref) + self.assertAlmostEqual( + float(mse), 0, delta=1e-4 + ) # Optax and pytorch have different implementation + for b, b_ref in zip(buffers, self.model_ref.buffers()): + b = b.float() if not b.is_floating_point() else b + b_ref = b_ref.float() if not b_ref.is_floating_point() else b_ref + mse = F.mse_loss(b, b_ref) + self.assertAlmostEqual(float(mse), 0) if __name__ == '__main__': - unittest.main() + unittest.main() diff --git a/tests/unit/test_clip.py b/tests/unit/test_clip.py index c129db6e..5967c9f4 100644 --- a/tests/unit/test_clip.py +++ b/tests/unit/test_clip.py @@ -17,69 +17,64 @@ import unittest import torch -from torch.nn import functional as F +import torch.nn.functional as F from torch.nn.utils import clip_grad_norm_ from torch.utils import data from torchvision import models -import TorchOpt -from TorchOpt import Optimizer, sgd +import torchopt class HighLevelInplace(unittest.TestCase): - @classmethod - def setUpClass(cls): - torch.manual_seed(0) - cls.model = models.resnet18() - cls.model_backup = copy.deepcopy(cls.model) - cls.model_ref = copy.deepcopy(cls.model) + @classmethod + def setUpClass(cls): + torch.manual_seed(0) + cls.model = models.resnet18() + cls.model_backup = copy.deepcopy(cls.model) + cls.model_ref = copy.deepcopy(cls.model) - cls.batch_size = 2 - cls.dataset = data.TensorDataset( - torch.randn(2, 3, 224, 224), torch.randint(0, 1000, (2,)) - ) - cls.loader = data.DataLoader(cls.dataset, cls.batch_size, False) + cls.batch_size = 2 + cls.dataset = data.TensorDataset(torch.randn(2, 3, 224, 224), torch.randint(0, 1000, (2,))) + cls.loader = data.DataLoader(cls.dataset, cls.batch_size, False) - cls.lr = 1e0 - cls.max_norm = 10. + cls.lr = 1e0 + cls.max_norm = 10. - def setUp(self) -> None: - torch.manual_seed(0) - self.model = copy.deepcopy(self.model_backup) - self.model_ref = copy.deepcopy(self.model_backup) + def setUp(self) -> None: + torch.manual_seed(0) + self.model = copy.deepcopy(self.model_backup) + self.model_ref = copy.deepcopy(self.model_backup) - def test_sgd(self) -> None: - chain = TorchOpt.combine.chain( - TorchOpt.clip.clip_grad_norm(max_norm=self.max_norm), sgd(lr=self.lr) - ) - optim = Optimizer(self.model.parameters(), chain) - optim_ref = torch.optim.SGD(self.model_ref.parameters(), self.lr) - for xs, ys in self.loader: - pred = self.model(xs) - pred_ref = self.model_ref(xs) - loss = F.cross_entropy(pred, ys) - loss_ref = F.cross_entropy(pred_ref, ys) - optim.zero_grad() - loss.backward() - optim.step() - optim_ref.zero_grad() - loss_ref.backward() - clip_grad_norm_(self.model_ref.parameters(), max_norm=self.max_norm) - optim_ref.step() + def test_sgd(self) -> None: + chain = torchopt.combine.chain( + torchopt.clip.clip_grad_norm(max_norm=self.max_norm), torchopt.sgd(lr=self.lr) + ) + optim = torchopt.Optimizer(self.model.parameters(), chain) + optim_ref = torch.optim.SGD(self.model_ref.parameters(), self.lr) + for xs, ys in self.loader: + pred = self.model(xs) + pred_ref = self.model_ref(xs) + loss = F.cross_entropy(pred, ys) + loss_ref = F.cross_entropy(pred_ref, ys) + optim.zero_grad() + loss.backward() + optim.step() + optim_ref.zero_grad() + loss_ref.backward() + clip_grad_norm_(self.model_ref.parameters(), max_norm=self.max_norm) + optim_ref.step() - with torch.no_grad(): - for p, p_ref in zip( - self.model.parameters(), self.model_ref.parameters() - ): - mse = F.mse_loss(p, p_ref) - self.assertAlmostEqual(float(mse), 0) - for b, b_ref in zip(self.model.buffers(), self.model_ref.buffers()): - b = b.float() if not b.is_floating_point() else b - b_ref = b_ref.float() if not b_ref.is_floating_point() else b_ref - mse = F.mse_loss(b, b_ref) - self.assertAlmostEqual(float(mse), 0) + with torch.no_grad(): + for p, p_ref in zip(self.model.parameters(), self.model_ref.parameters()): + mse = F.mse_loss(p, p_ref) + self.assertAlmostEqual(float(mse), 0) + for b, b_ref in zip(self.model.buffers(), self.model_ref.buffers()): + b = b.float() if not b.is_floating_point() else b + b_ref = b_ref.float() if not b_ref.is_floating_point() else b_ref + mse = F.mse_loss(b, b_ref) + self.assertAlmostEqual(float(mse), 0) if __name__ == '__main__': - unittest.main() + unittest.main() diff --git a/tests/unit/test_schedule.py b/tests/unit/test_schedule.py index 1e8f2831..66950050 100644 --- a/tests/unit/test_schedule.py +++ b/tests/unit/test_schedule.py @@ -15,35 +15,35 @@ import unittest -import TorchOpt +import torchopt class TestSchedule(unittest.TestCase): - @classmethod - def setUpClass(cls): - cls.init_value = 1. - cls.end_value = 0. - cls.gap_value = cls.init_value - cls.end_value - cls.transition_steps = 10 - cls.transition_begin = 1 - - def setUp(self) -> None: - pass - - def test_linear(self) -> None: - schedule = TorchOpt.schedule.linear_schedule( - init_value=self.init_value, - end_value=self.end_value, - transition_steps=self.transition_steps, - transition_begin=self.transition_begin - ) - for i in range(self.transition_begin, self.transition_steps): - lr = schedule(i) - lr_gt = self.init_value - self.gap_value * \ - (i - self.transition_begin) / self.transition_steps - self.assertEqual(lr, lr_gt) + @classmethod + def setUpClass(cls): + cls.init_value = 1. + cls.end_value = 0. + cls.gap_value = cls.init_value - cls.end_value + cls.transition_steps = 10 + cls.transition_begin = 1 + + def setUp(self) -> None: + pass + + def test_linear(self) -> None: + schedule = torchopt.schedule.linear_schedule( + init_value=self.init_value, + end_value=self.end_value, + transition_steps=self.transition_steps, + transition_begin=self.transition_begin + ) + for i in range(self.transition_begin, self.transition_steps): + lr = schedule(i) + lr_gt = self.init_value - self.gap_value * \ + (i - self.transition_begin) / self.transition_steps + self.assertEqual(lr, lr_gt) if __name__ == '__main__': - unittest.main() + unittest.main() diff --git a/torchopt/__init__.py b/torchopt/__init__.py new file mode 100644 index 00000000..6672c724 --- /dev/null +++ b/torchopt/__init__.py @@ -0,0 +1,64 @@ +# Copyright 2022 MetaOPT Team. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""TorchOpt: a high-performance optimizer library built upon PyTorch.""" + +from torchopt._src import ( + accelerated_op_available, + clip, + combine, + hook, + schedule, + visual, +) +from torchopt._src.alias import adam, rmsprop, sgd +from torchopt._src.optimizer import SGD, Adam, Optimizer, RMSProp, meta +from torchopt._src.optimizer.meta import ( + MetaAdam, + MetaOptimizer, + MetaRMSProp, + MetaSGD, +) +from torchopt._src.update import apply_updates +from torchopt._src.utils import ( + extract_state_dict, + recover_state_dict, + stop_gradient, +) + +__version__ = "0.4.1" + +__all__ = [ + "accelerated_op_available", + "clip", + "combine", + "hook", + "schedule", + "visual", + "adam", + "rmsprop", + "sgd", + "Optimizer", + "SGD", + "Adam", + "RMSProp", + "MetaOptimizer", + "MetaSGD", + "MetaAdam", + "MetaRMSProp", + "apply_updates", + "extract_state_dict", + "recover_state_dict", + "stop_gradient", +] diff --git a/TorchOpt/_lib/__init__.py b/torchopt/_lib/__init__.py similarity index 100% rename from TorchOpt/_lib/__init__.py rename to torchopt/_lib/__init__.py diff --git a/torchopt/_lib/adam_op.py b/torchopt/_lib/adam_op.py new file mode 100644 index 00000000..ca10e621 --- /dev/null +++ b/torchopt/_lib/adam_op.py @@ -0,0 +1,57 @@ +# Copyright 2022 MetaOPT Team. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ==============================================================================\ + +from typing import Tuple + +import torch + + +def forward_( + updates: torch.Tensor, mu: torch.Tensor, nu: torch.Tensor, b1: float, b2: float, eps: float, + eps_root: float, count: int +) -> Tuple[torch.Tensor, torch.Tensor, torch.Tensor]: + ... + + +def forwardMu(updates: torch.Tensor, mu: torch.Tensor, b1: float) -> torch.Tensor: + ... + + +def forwardNu(updates: torch.Tensor, nu: torch.Tensor, b2: float) -> torch.Tensor: + ... + + +def forwardUpdates( + new_mu: torch.Tensor, new_nu: torch.Tensor, b1: float, b2: float, eps: float, eps_root: float, + count: int +) -> torch.Tensor: + ... + + +def backwardMu(dmu: torch.Tensor, updates: torch.Tensor, mu: torch.Tensor, + b1: float) -> Tuple[torch.Tensor, torch.Tensor]: + ... + + +def backwardNu(dnu: torch.Tensor, updates: torch.Tensor, nu: torch.Tensor, + b2: float) -> Tuple[torch.Tensor, torch.Tensor]: + ... + + +def backwardUpdates( + dupdates: torch.Tensor, updates: torch.Tensor, new_mu: torch.Tensor, new_nu: torch.Tensor, + b1: float, b2: float, count: int +) -> Tuple[torch.Tensor, torch.Tensor]: + ... diff --git a/TorchOpt/_src/__init__.py b/torchopt/_src/__init__.py similarity index 91% rename from TorchOpt/_src/__init__.py rename to torchopt/_src/__init__.py index 522a892f..75b3cf8d 100644 --- a/TorchOpt/_src/__init__.py +++ b/torchopt/_src/__init__.py @@ -13,4 +13,4 @@ # limitations under the License. # ============================================================================== -from TorchOpt._src.accelerated_op import accelerated_op_available +from torchopt._src.accelerated_op import accelerated_op_available diff --git a/TorchOpt/_src/accelerated_op/__init__.py b/torchopt/_src/accelerated_op/__init__.py similarity index 61% rename from TorchOpt/_src/accelerated_op/__init__.py rename to torchopt/_src/accelerated_op/__init__.py index d6fa1792..ab494d23 100644 --- a/TorchOpt/_src/accelerated_op/__init__.py +++ b/torchopt/_src/accelerated_op/__init__.py @@ -13,20 +13,20 @@ # limitations under the License. # ============================================================================== -from TorchOpt._src.accelerated_op.adam_op import AdamOp +from torchopt._src.accelerated_op.adam_op import AdamOp def accelerated_op_available(devices=None): - import torch - op = AdamOp() - if devices is None: - devices = [torch.device("cuda"), torch.device("cpu")] - elif isinstance(devices, torch.device): - devices = [devices] - try: - for device in devices: - updates = torch.tensor(1., device=device) - op(updates, updates, updates, 1) - return True - except: - return False + import torch + op = AdamOp() + if devices is None: + devices = [torch.device("cuda"), torch.device("cpu")] + elif isinstance(devices, torch.device): + devices = [devices] + try: + for device in devices: + updates = torch.tensor(1., device=device) + op(updates, updates, updates, 1) + return True + except BaseException: + return False diff --git a/TorchOpt/_src/accelerated_op/adam_op/__init__.py b/torchopt/_src/accelerated_op/adam_op/__init__.py similarity index 91% rename from TorchOpt/_src/accelerated_op/adam_op/__init__.py rename to torchopt/_src/accelerated_op/adam_op/__init__.py index 95a47453..d1203e92 100644 --- a/TorchOpt/_src/accelerated_op/adam_op/__init__.py +++ b/torchopt/_src/accelerated_op/adam_op/__init__.py @@ -13,4 +13,4 @@ # limitations under the License. # ============================================================================== -from TorchOpt._src.accelerated_op.adam_op.AdamOp import AdamOp +from torchopt._src.accelerated_op.adam_op.adam_op import AdamOp diff --git a/torchopt/_src/accelerated_op/adam_op/adam_op.py b/torchopt/_src/accelerated_op/adam_op/adam_op.py new file mode 100644 index 00000000..94098520 --- /dev/null +++ b/torchopt/_src/accelerated_op/adam_op/adam_op.py @@ -0,0 +1,116 @@ +# Copyright 2022 MetaOPT Team. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +from typing import Any + +import torch + +from torchopt._lib import adam_op + + +class AdamOp(object): + + class MuOp(torch.autograd.Function): + + @staticmethod + def jvp(ctx: Any, *grad_inputs: Any) -> Any: + pass + + @staticmethod + def forward(ctx: Any, *args: Any, **kwargs: Any) -> Any: + updates, mu, b1 = args + new_mu = adam_op.forwardMu(updates, mu, b1) + ctx.save_for_backward(updates, mu) + ctx.b1 = b1 + return new_mu + + @staticmethod + def backward(ctx: Any, *args: Any) -> Any: + dmu = args[0] + updates, mu = ctx.saved_tensors + b1 = ctx.b1 + result = adam_op.backwardMu(dmu, updates, mu, b1) + return result[0], result[1], None + + class NuOp(torch.autograd.Function): + + @staticmethod + def jvp(ctx: Any, *grad_inputs: Any) -> Any: + pass + + @staticmethod + def forward(ctx: Any, *args: Any, **kwargs: Any) -> Any: + updates, nu, b2 = args + new_nu = adam_op.forwardNu(updates, nu, b2) + ctx.save_for_backward(updates, nu) + ctx.b2 = b2 + return new_nu + + @staticmethod + def backward(ctx: Any, *args: Any) -> Any: + dnu = args[0] + updates, nu = ctx.saved_tensors + b2 = ctx.b2 + result = adam_op.backwardNu(dnu, updates, nu, b2) + return result[0], result[1], None + + class UpdatesOp(torch.autograd.Function): + + @staticmethod + def jvp(ctx: Any, *grad_inputs: Any) -> Any: + pass + + @staticmethod + def forward(ctx: Any, *args: Any, **kwargs: Any) -> Any: + new_mu, new_nu, (b1, b2, eps, eps_root, count) = args + new_updates = adam_op.forwardUpdates(new_mu, new_nu, b1, b2, eps, eps_root, count) + ctx.save_for_backward(new_updates, new_mu, new_nu) + ctx.others = (b1, b2, eps, eps_root, count) + return new_updates + + @staticmethod + def backward(ctx: Any, *args: Any) -> Any: + dupdates = args[0] + updates, new_mu, new_nu = ctx.saved_tensors + b1, b2, eps, eps_root, count = ctx.others + result = adam_op.backwardUpdates(dupdates, updates, new_mu, new_nu, b1, b2, count) + return result[0], result[1], None + + def __init__(self, b1=0.9, b2=0.999, eps=1e-8, eps_root=0., inplace=True): + self.b1 = b1 + self.b2 = b2 + self.eps = eps + self.eps_root = eps_root + self.inplace = inplace + + def __call__(self, mu, nu, updates, count): + if updates is None: + return mu, nu, None + if updates.is_cuda: + current_device = torch.cuda.current_device() + torch.cuda.set_device(updates.device) + if self.inplace: + new_updates, new_mu, new_nu = adam_op.forward_( + updates, mu, nu, self.b1, self.b2, self.eps, self.eps_root, count + ) + else: + new_mu = self.MuOp.apply(updates, mu, self.b1) + new_nu = self.NuOp.apply(updates, nu, self.b2) + new_updates = self.UpdatesOp.apply( + new_mu, new_nu, (self.b1, self.b2, self.eps, self.eps_root, count) + ) + if updates.is_cuda: + torch.cuda.set_device(current_device) + return new_mu, new_nu, new_updates diff --git a/torchopt/_src/alias.py b/torchopt/_src/alias.py new file mode 100644 index 00000000..a29adca1 --- /dev/null +++ b/torchopt/_src/alias.py @@ -0,0 +1,205 @@ +# Copyright 2022 MetaOPT Team. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +# This file is modified from: +# https://github.com/deepmind/optax/blob/master/optax/_src/alias.py +# ============================================================================== +# Copyright 2019 DeepMind Technologies Limited. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +from typing import Optional + +import jax + +from torchopt._src import base, combine, transform +from torchopt._src.typing import ScalarOrSchedule + + +def _scale_by_lr(lr: ScalarOrSchedule, flip_sign=True): + m = -1 if flip_sign else 1 + if callable(lr): + + def schedule_wrapper(count): + + def f(scaled_lr): + return m * scaled_lr + + return jax.tree_map(f, lr(count)) # type: ignore + + return transform.scale_by_schedule(schedule_wrapper) + return transform.scale(m * lr) + + +def adam( + lr: ScalarOrSchedule, + b1: float = 0.9, + b2: float = 0.999, + eps: float = 1e-8, + eps_root: float = 0.0, + moment_requires_grad: bool = False, + use_accelerated_op: bool = False +) -> base.GradientTransformation: + """The classic Adam optimizer. + + Adam is an SGD variant with learning rate adaptation. The `lr` + used for each weight is computed from estimates of first- and second-order + moments of the gradients (using suitable exponential moving averages). + + References: + Kingma et al, 2014: https://arxiv.org/abs/1412.6980 + + Args: + lr: + This is a fixed global scaling factor. + b1: + The exponential decay rate to track the first moment of past gradients. + b2: + The exponential decay rate to track the second moment of past gradients. + eps: + A small constant applied to denominator outside of the square root + (as in the Adam paper) to avoid dividing by zero when rescaling. + eps_root: (default `0`) + A small constant applied to denominator inside the square root (as + in RMSProp), to avoid dividing by zero when rescaling. This is needed + for example when computing (meta-)gradients through Adam. + moment_requires_grad: (default `False`) + If True the momentums will be created with flag `requires_grad=True`, + this flag is often used in Meta Learning algorithms. + use_accelerated_op: (default `False`) + If True use our implemented fused operator. + + Returns: + The corresponding `GradientTransformation` instance. + """ + + adam_inst = transform.scale_by_accelerated_adam if use_accelerated_op else transform.scale_by_adam + return combine.chain( + adam_inst( + b1=b1, b2=b2, eps=eps, eps_root=eps_root, moment_requires_grad=moment_requires_grad + ), + _scale_by_lr(lr), + ) + + +def sgd( + lr: ScalarOrSchedule, + momentum: Optional[float] = None, + nesterov: bool = False, + moment_requires_grad: bool = False, +) -> base.GradientTransformation: + """A canonical Stochastic Gradient Descent optimiser. + + This implements stochastic gradient descent. It also includes support for + momentum, and nesterov acceleration, as these are standard practice when + using stochastic gradient descent to train deep neural networks. + + References: + Sutskever et al, 2013: http://proceedings.mlr.press/v28/sutskever13.pdf + + Args: + lr: + This is a fixed global scaling factor. + momentum: (default `None`) + The `decay` rate used by the momentum term, when it is set to `None`, + then momentum is not used at all. + nesterov (default `False`): + Whether nesterov momentum is used. + moment_requires_grad: (default `False`) + If True the momentums will be created with flag `requires_grad=True`, + this flag is often used in Meta-Learning algorithms. + + Returns: + A `GradientTransformation` instance. + """ + + return combine.chain( + ( + transform.trace( + decay=momentum, nesterov=nesterov, moment_requires_grad=moment_requires_grad + ) if momentum is not None else base.identity() + ), _scale_by_lr(lr) + ) + + +def rmsprop( + lr: ScalarOrSchedule, + decay: float = 0.9, + eps: float = 1e-8, + initial_scale: float = 0., + centered: bool = False, + momentum: Optional[float] = None, + nesterov: bool = False +) -> base.GradientTransformation: + """A flexible RMSProp optimizer. + RMSProp is an SGD variant with learning rate adaptation. The `learning_rate` + used for each weight is scaled by a suitable estimate of the magnitude of the + gradients on previous steps. Several variants of RMSProp can be found + in the literature. This alias provides an easy to configure RMSProp + optimizer that can be used to switch between several of these variants. + + References: + Tieleman and Hinton, 2012: http://www.cs.toronto.edu/~hinton/coursera/lecture6/lec6.pdf + Graves, 2013: https://arxiv.org/abs/1308.0850 + + Args: + learning_rate: + This is a fixed global scaling factor. + decay: + The decay used to track the magnitude of previous gradients. + eps: + A small numerical constant to avoid dividing by zero when rescaling. + initial_scale: (default `0.`) + Initialization of accumulators tracking the magnitude of previous + updates. PyTorch uses `0`, TF1 uses `1`. When reproducing results + from a paper, verify the value used by the authors. + centered: (default `False`) + Whether the second moment or the variance of the past gradients is + used to rescale the latest gradients. + momentum: (default `None`) + The `decay` rate used by the momentum term, when it is set to `None`, + then momentum is not used at all. + nesterov (default `False`): + Whether nesterov momentum is used. + + Returns: + The corresponding `GradientTransformation` instance. + """ + + if centered: + return combine.chain( + transform.scale_by_stddev(decay=decay, eps=eps, initial_scale=initial_scale), + _scale_by_lr(lr), ( + transform.trace(decay=momentum, nesterov=nesterov) + if momentum is not None else base.identity() + ) + ) + return combine.chain( + transform.scale_by_rms(decay=decay, eps=eps, initial_scale=initial_scale), _scale_by_lr(lr), + ( + transform.trace(decay=momentum, nesterov=nesterov) + if momentum is not None else base.identity() + ) + ) diff --git a/torchopt/_src/base.py b/torchopt/_src/base.py new file mode 100644 index 00000000..03cd0b97 --- /dev/null +++ b/torchopt/_src/base.py @@ -0,0 +1,151 @@ +# Copyright 2022 MetaOPT Team. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +# This file is modified from: +# https://github.com/deepmind/optax/blob/master/optax/_src/base.py +# ============================================================================== +# Copyright 2019 DeepMind Technologies Limited. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +from typing import Callable, NamedTuple, Tuple + +import typing_extensions + +from torchopt._src import typing + +OptState = typing.TensorTree # States are arbitrary nests of `torch.Tensor`. +# Parameters are arbitrary nests of `torch.Tensor`. +Params = typing.TensorTree +Updates = Params # Gradient updates are of the same type as parameters. + +Schedule = Callable[[typing.Numeric], typing.Numeric] + + +class EmptyState(NamedTuple): + """An empty state for the simplest stateless transformations.""" + + +class TransformInitFn(typing_extensions.Protocol): + """A callable type for the `init` step of a `GradientTransformation`. + + The `init` step takes a tree of `params` and uses these to construct an + arbitrary structured initial `state` for the gradient transformation. This + may hold statistics of the past updates or any other non static information. + """ + + def __call__(self, params: Params) -> OptState: + """The `init` function. + + Args: + params: + The initial value of the parameters. + + Returns: + The initial state of the gradient transformation. + """ + ... + + +class TransformUpdateFn(typing_extensions.Protocol): + """A callable type for the `update` step of a `GradientTransformation`. + + The `update` step takes a tree of candidate parameter `updates` (e.g. their + gradient with respect to some loss), an arbitrary structured `state`, and the + current `params` of the model being optimized. The `params` argument is + optional, it must however be provided when using transformations that require + access to the current values of the parameters. + """ + + def __call__(self, + updates: Updates, + state: OptState, + inplace: bool = True) -> Tuple[Updates, OptState]: + """The `update` function. + + Args: + updates: + A tree of candidate updates. + state: + The state of the gradient transformation. + inplace: (optional) + If true, modify updates and state using inplace operations. + + Returns: + The transformed updates, and the updated state. + """ + ... + + +class GradientTransformation(NamedTuple): + """A pair of pure functions implementing a gradient transformation. + + TorchOpt optimizers are all implemented as _gradient transformations_ like + Optax. A gradient transformation is defined to be a pair of pure functions, + which are combined together in a `NamedTuple` so that they can be referred + to by name. + + Since gradient transformations do not contain any internal state, all stateful + optimizer properties (such as the current step count when using optimizer + schedules, or momentum values) are passed through gradient transformations by + using the optimizer _state_ pytree. Each time a gradient transformation is + applied, the state is computed and returned, ready to be passed to the next + call to the gradient transformation. + + Attributes: + init: + A pure function which, when called with an example instance of the + parameters whose gradients will be transformed, returns a pytree + containing the initial value for the optimizer state. + update: + A pure function which takes as input a pytree of updates (with the + same tree structure as the original params pytree passed to init), + the previous optimizer state (which may have been initialized using + the init function), and optionally the inplace flag. The update + function then returns the computed gradient updates, and a updates + optimizer state. If the inplace flag is true, the output results are + the same instance as the input. + """ + + init: TransformInitFn + update: TransformUpdateFn + + +def identity() -> GradientTransformation: + """Stateless identity transformation that leaves input gradients untouched. + + This function passes through the *gradient updates* unchanged. + + Returns: + An (init_fn, update_fn) tuple. + """ + + def init_fn(_): + return EmptyState() + + def update_fn(updates, state, inplace=False): + return updates, state + + return GradientTransformation(init_fn, update_fn) diff --git a/torchopt/_src/clip.py b/torchopt/_src/clip.py new file mode 100644 index 00000000..c5da0812 --- /dev/null +++ b/torchopt/_src/clip.py @@ -0,0 +1,88 @@ +# Copyright 2022 MetaOPT Team. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +# This file is modified from: +# https://github.com/pytorch/pytorch/blob/master/torch/nn/utils/clip_grad.py +# ============================================================================== + +import jax +import torch +from torch._six import inf + +from torchopt._src import base + +ClipState = base.EmptyState + + +def clip_grad_norm( + max_norm: float, + norm_type: float = 2., + error_if_nonfinite: bool = False +) -> base.GradientTransformation: + """Clips gradient norm of an iterable of parameters. + + Args: + max_delta: + The maximum absolute value for each element in the update. + + Returns: + An (init_fn, update_fn) tuple. + """ + + def init_fn(params): + del params + return ClipState() + + def update_fn(updates, state, inplace=True): + available_updates = [] + for g in updates: + if g is not None: + available_updates.append(g) + if len(available_updates) == 0: + return torch.tensor(0.) + device = available_updates[0].device + with torch.no_grad(): + if norm_type == inf: + norms = [p.abs().max().to(device) for p in available_updates] + total_norm = norms[0] if len(norms) == 1 else torch.max(torch.stack(norms)) + else: + total_norm = torch.norm( + torch.stack([torch.norm(p, norm_type).to(device) for p in available_updates]), + norm_type + ) + if error_if_nonfinite and torch.logical_or(total_norm.isnan(), total_norm.isinf()): + raise RuntimeError( + f'The total norm of order {norm_type} for gradients from ' + '`parameters` is non-finite, so it cannot be clipped. To disable ' + 'this error and scale the gradients by the non-finite norm anyway, ' + 'set `error_if_nonfinite=False`' + ) + clip_coef = max_norm / (float(total_norm) + 1e-6) + # Note: multiplying by the clamped coef is redundant when the coef is clamped to 1, but doing so + # avoids a `if clip_coef < 1:` conditional which can require a CPU <=> device synchronization + # when the gradients do not reside in CPU memory. + clip_coef_clamped = min(clip_coef, 1.) + if inplace: + + def f(g): + return g.mul_(clip_coef_clamped) if g is not None else None + else: + + def f(g): + return g.mul(clip_coef_clamped) if g is not None else None + + new_updates = jax.tree_map(f, updates) + return new_updates, state + + return base.GradientTransformation(init_fn, update_fn) diff --git a/TorchOpt/_src/combine.py b/torchopt/_src/combine.py similarity index 58% rename from TorchOpt/_src/combine.py rename to torchopt/_src/combine.py index 396a2bc4..081421c9 100644 --- a/TorchOpt/_src/combine.py +++ b/torchopt/_src/combine.py @@ -30,39 +30,40 @@ # limitations under the License. # ============================================================================== -from TorchOpt._src import base +from torchopt._src import base def chain(*args: base.GradientTransformation) -> base.GradientTransformation: - """Applies a list of chainable update transformations. + """Applies a list of chainable update transformations. - Given a sequence of chainable transforms, `chain` returns an `init_fn` - that constructs a `state` by concatenating the states of the individual - transforms, and returns an `update_fn` which chains the update transformations - feeding the appropriate state to each. + Given a sequence of chainable transforms, `chain` returns an `init_fn` + that constructs a `state` by concatenating the states of the individual + transforms, and returns an `update_fn` which chains the update transformations + feeding the appropriate state to each. - Args: - *args: a sequence of chainable (init_fn, update_fn) tuples. + Args: + *args: + A sequence of chainable (init_fn, update_fn) tuples. - Returns: - A single (init_fn, update_fn) tuple. - """ + Returns: + A single (init_fn, update_fn) tuple. + """ - init_fns, update_fns = zip(*args) + init_fns, update_fns = zip(*args) - def init_fn(params): - return tuple(fn(params) for fn in init_fns) + def init_fn(params): + return tuple(fn(params) for fn in init_fns) - def update_fn(updates, state, inplace=True): - if len(update_fns) != len(state): - raise ValueError( - 'The number of updates and states has to be the same in ' - 'chain! Make sure you have called init first!' - ) - new_state = [] - for s, fn in zip(state, update_fns): - updates, new_s = fn(updates, s, inplace) - new_state.append(new_s) - return updates, tuple(new_state) + def update_fn(updates, state, inplace=True): + if len(update_fns) != len(state): + raise ValueError( + 'The number of updates and states has to be the same in ' + 'chain! Make sure you have called init first!' + ) + new_state = [] + for s, fn in zip(state, update_fns): + updates, new_s = fn(updates, s, inplace) + new_state.append(new_s) + return updates, tuple(new_state) - return base.GradientTransformation(init_fn, update_fn) + return base.GradientTransformation(init_fn, update_fn) diff --git a/TorchOpt/_src/hook.py b/torchopt/_src/hook.py similarity index 56% rename from TorchOpt/_src/hook.py rename to torchopt/_src/hook.py index 93ca980b..77ae1bd0 100644 --- a/TorchOpt/_src/hook.py +++ b/torchopt/_src/hook.py @@ -16,31 +16,31 @@ import jax import torch -from TorchOpt._src.base import EmptyState, GradientTransformation +from torchopt._src.base import EmptyState, GradientTransformation def zero_nan_hook(g: torch.Tensor) -> torch.Tensor: - return torch.where(torch.isnan(g), torch.zeros_like(g), g) + return torch.where(torch.isnan(g), torch.zeros_like(g), g) def register_hook(hook) -> GradientTransformation: - """Stateless identity transformation that leaves input gradients untouched. + """Stateless identity transformation that leaves input gradients untouched. - This function passes through the *gradient updates* unchanged. + This function passes through the *gradient updates* unchanged. - Returns: - An (init_fn, update_fn) tuple. - """ + Returns: + An (init_fn, update_fn) tuple. + """ - def init_fn(_): - return EmptyState() + def init_fn(_): + return EmptyState() - def update_fn(updates, state, inplace=False): + def update_fn(updates, state, inplace=False): - def f(g): - return g.register_hook(hook) if g is not None else None + def f(g): + return g.register_hook(hook) if g is not None else None - jax.tree_map(f, updates) - return updates, state + jax.tree_map(f, updates) + return updates, state - return GradientTransformation(init_fn, update_fn) + return GradientTransformation(init_fn, update_fn) diff --git a/torchopt/_src/optimizer/__init__.py b/torchopt/_src/optimizer/__init__.py new file mode 100644 index 00000000..3d07bcdd --- /dev/null +++ b/torchopt/_src/optimizer/__init__.py @@ -0,0 +1,20 @@ +# Copyright 2022 MetaOPT Team. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +from torchopt._src.optimizer import meta +from torchopt._src.optimizer.adam import Adam +from torchopt._src.optimizer.base import Optimizer +from torchopt._src.optimizer.rmsprop import RMSProp +from torchopt._src.optimizer.sgd import SGD diff --git a/torchopt/_src/optimizer/adam.py b/torchopt/_src/optimizer/adam.py new file mode 100644 index 00000000..1b0ce395 --- /dev/null +++ b/torchopt/_src/optimizer/adam.py @@ -0,0 +1,55 @@ +# Copyright 2022 MetaOPT Team. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +from torchopt._src.alias import adam +from torchopt._src.optimizer.base import Optimizer +from torchopt._src.typing import ScalarOrSchedule + + +class Adam(Optimizer): + """A canonical Stochastic Gradient Descent optimizer.""" + + def __init__( + self, + params, + lr: ScalarOrSchedule, + b1: float = 0.9, + b2: float = 0.999, + eps: float = 1e-8, + eps_root: float = 0.0, + use_accelerated_op: bool = False + ): + """The `init` function. + + Args: + params (iterable): + An iterable of `torch.Tensor`s. Specifies what Tensors should be + optimized. + args: + Other arguments see `alias.sgd`. + """ + + super().__init__( + params, + adam( + lr=lr, + b1=b1, + b2=b2, + eps=eps, + eps_root=eps_root, + moment_requires_grad=False, + use_accelerated_op=use_accelerated_op + ) + ) diff --git a/torchopt/_src/optimizer/base.py b/torchopt/_src/optimizer/base.py new file mode 100644 index 00000000..82f5284b --- /dev/null +++ b/torchopt/_src/optimizer/base.py @@ -0,0 +1,127 @@ +# Copyright 2022 MetaOPT Team. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +from typing import Iterable + +import jax +import torch + +from torchopt._src.base import GradientTransformation +from torchopt._src.update import apply_updates + + +class Optimizer(object): + """A high-level base class that has the similar with `torch.optim.Optimizer`.""" + + def __init__(self, params: Iterable, impl: GradientTransformation): + """The `init` function. + + Args: + params (iterable): + An iterable of `torch.Tensor`s. Specifies what Tensors should be + optimized. + impl (GradientTransformation): + A low level optimizer function, it could be a optimizer function + provided by `alias.py` or a customized `chain` provided by + `combine.py`. + Note that use `MetaOptimizer(sgd())` or `MetaOptimizer(chain(sgd()))` + is equivalent to `SGD`. + """ + + if not isinstance(params, list): + params = list(params) + self.impl = impl + self.param_groups = [] # type: ignore + self.param_tree_groups = [] # type: ignore + self.state_groups = [] # type: ignore + self.add_param_group(params) + + def zero_grad(self, set_to_none: bool = False): + """Sets the gradients of all optimized `torch.Tensor`s to zero. + + The behavior is similar to `torch.optim.Optimizer.zero_grad`. + + Args: + set_to_none (bool): + Instead of setting to zero, set the grads to None. + """ + + for group in self.param_groups: + if set_to_none: + + def f(p): + p.grad = None + return None + + else: + + def f(p): + if p.grad is None: + return None + if p.grad.grad_fn is not None: + p.grad.detach_() + else: + p.grad.requires_grad_(False) + p.grad.zero_() + return None + + jax.tree_map(f, group) + + def state_dict(self): + """Returns the state of the optimizer.""" + + return self.state_groups + + def load_state_dict(self, state_dict): + """Loads the optimizer state. + + Args: + state_dict (dict): + Optimizer state. Should be an object returned from a call to :meth:`state_dict`. + """ + + self.state_groups = state_dict + + def step(self, closure=None): + """Performs a single optimization step (parameter update). + + The behavior is similar to `torch.optim.Optimizer.step`. + + Args: + closure (callable, optional): + A closure that reevaluates the model and returns the loss. + """ + + loss = None + if closure is not None: + with torch.enable_grad(): + loss = closure() + + def f(p): + return p.grad + + for param, state in zip(self.param_groups, self.state_groups): + grad = jax.tree_map(f, param) + updates, _ = self.impl.update(grad, state) + apply_updates(param, updates) + + return loss + + def add_param_group(self, params): + params, tree = jax.tree_flatten(params) + params = tuple(params) + self.param_groups.append(params) + self.param_tree_groups.append(tree) + self.state_groups.append(self.impl.init(params)) diff --git a/torchopt/_src/optimizer/meta/__init__.py b/torchopt/_src/optimizer/meta/__init__.py new file mode 100644 index 00000000..86fcb3b3 --- /dev/null +++ b/torchopt/_src/optimizer/meta/__init__.py @@ -0,0 +1,19 @@ +# Copyright 2022 MetaOPT Team. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +from torchopt._src.optimizer.meta.adam import MetaAdam +from torchopt._src.optimizer.meta.base import MetaOptimizer +from torchopt._src.optimizer.meta.rmsprop import MetaRMSProp +from torchopt._src.optimizer.meta.sgd import MetaSGD diff --git a/torchopt/_src/optimizer/meta/adam.py b/torchopt/_src/optimizer/meta/adam.py new file mode 100644 index 00000000..d699b3b5 --- /dev/null +++ b/torchopt/_src/optimizer/meta/adam.py @@ -0,0 +1,56 @@ +# Copyright 2022 MetaOPT Team. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +from torchopt._src.alias import adam +from torchopt._src.optimizer.meta.base import MetaOptimizer +from torchopt._src.typing import ScalarOrSchedule + + +class MetaAdam(MetaOptimizer): + """The classic Adam optimizer.""" + + def __init__( + self, + net, + lr: ScalarOrSchedule, + b1: float = 0.9, + b2: float = 0.999, + eps: float = 1e-8, + eps_root: float = 0.0, + moment_requires_grad: bool = True, + use_accelerated_op: bool = False + ): + """The `init` function. + + Args: + net (nn.Module): + A network whose parameters should be optimized. + args: + Other arguments see `alias.adam`, here we set `moment_requires_grad=True` + to make tensors like momentum be differentiable. + """ + + super().__init__( + net, + adam( + lr=lr, + b1=b1, + b2=b2, + eps=eps, + eps_root=eps_root, + moment_requires_grad=moment_requires_grad, + use_accelerated_op=use_accelerated_op + ) + ) diff --git a/torchopt/_src/optimizer/meta/base.py b/torchopt/_src/optimizer/meta/base.py new file mode 100644 index 00000000..486ff15d --- /dev/null +++ b/torchopt/_src/optimizer/meta/base.py @@ -0,0 +1,94 @@ +# Copyright 2022 MetaOPT Team. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +import jax +import torch +import torch.nn as nn + +from torchopt._src.base import GradientTransformation +from torchopt._src.update import apply_updates + + +class MetaOptimizer(object): + """A high-level optimizer base class for meta learning.""" + + def __init__(self, net: nn.Module, impl: GradientTransformation): + """ + Args: + net (nn.Module): + A network whose parameters should be optimized. + impl (GradientTransformation): + A low level optimizer function, it could be a optimizer function + provided by `alias.py` or a customerized `chain` provided by + `combine.py`. + Note that use `MetaOptimizer(sgd(moment_requires_grad=True))` + or `MetaOptimizer(chain(sgd(moment_requires_grad=True))) is + equivalent to `MetaSGD`. + """ + + self.impl = impl + self.param_containers_groups = [] # type: ignore + self.state_groups = [] # type: ignore + + self.add_param_group(net) + + def step(self, loss: torch.Tensor): + """Compute the gradients of the loss to the network parameters and update network parameters. + + Graph of the derivative will be constructed, allowing to compute higher order derivative products. + We use the differentiable optimizer (pass argument inplace=False) to scale the gradients and update + the network parameters without modifying tensors in-place. + + Args: + loss (torch.Tensor): + The loss that is used to compute the gradients to the network parameters. + """ + + # step parameter only + for idx, (state, param_containers) in enumerate( + zip(self.state_groups, self.param_containers_groups) + ): + flatten_params, containers_tree = jax.tree_util.tree_flatten(param_containers) + flatten_params = tuple(flatten_params) + grad = torch.autograd.grad(loss, flatten_params, create_graph=True, allow_unused=True) + updates, state = self.impl.update(grad, state, False) + self.state_groups[idx] = state + new_params = apply_updates(flatten_params, updates, inplace=False) + unflatten_new_params = containers_tree.unflatten(new_params) + for container, unflatten_param in zip(param_containers, unflatten_new_params): + container.update(unflatten_param) + + def add_param_group(self, net): + from torchopt._src.utils import _extract_container + + net_container = _extract_container(net, with_buffer=False) + flatten_param, _ = jax.tree_util.tree_flatten(net_container) + flatten_param = tuple(flatten_param) + optim_state = self.impl.init(flatten_param) + self.state_groups.append(optim_state) + self.param_containers_groups.append(net_container) + + def state_dict(self): + """Extract the references of the optimizer states. + + Note that the states are references, so any in-place operations will + change the states inside `MetaOptimizer` at the same time. + """ + + out_groups = tuple(group for group in self.state_groups) + return out_groups + + def load_state_dict(self, state_dict): + self.state_groups = list(group for group in state_dict) diff --git a/torchopt/_src/optimizer/meta/rmsprop.py b/torchopt/_src/optimizer/meta/rmsprop.py new file mode 100644 index 00000000..eb742b04 --- /dev/null +++ b/torchopt/_src/optimizer/meta/rmsprop.py @@ -0,0 +1,58 @@ +# Copyright 2022 MetaOPT Team. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +from typing import Union + +from torchopt._src.alias import rmsprop +from torchopt._src.optimizer.meta.base import MetaOptimizer +from torchopt._src.typing import ScalarOrSchedule + + +class MetaRMSProp(MetaOptimizer): + """The classic RMSProp optimizer.""" + + def __init__( + self, + net, + lr: ScalarOrSchedule, + decay: float = 0.9, + eps: float = 1e-8, + initial_scale: float = 0., + centered: bool = False, + momentum: Union[float, None] = None, + nesterov: bool = False + ): + """The `init` function. + + Args: + net (nn.Module): + A network whose parameters should be optimized. + args: + Other arguments see `alias.adam`, here we set `moment_requires_grad=True` + to make tensors like momentum be differentiable. + """ + + super().__init__( + net, + rmsprop( + lr=lr, + decay=decay, + eps=eps, + initial_scale=initial_scale, + centered=centered, + momentum=momentum, + nesterov=nesterov + ) + ) diff --git a/torchopt/_src/optimizer/meta/sgd.py b/torchopt/_src/optimizer/meta/sgd.py new file mode 100644 index 00000000..bbd57b46 --- /dev/null +++ b/torchopt/_src/optimizer/meta/sgd.py @@ -0,0 +1,54 @@ +# Copyright 2022 MetaOPT Team. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +from typing import Union + +import torch.nn as nn + +from torchopt._src.alias import sgd +from torchopt._src.optimizer.meta.base import MetaOptimizer +from torchopt._src.typing import ScalarOrSchedule + + +class MetaSGD(MetaOptimizer): + """A canonical Stochastic Gradient Descent optimizer.""" + + def __init__( + self, + net: nn.Module, + lr: ScalarOrSchedule, + momentum: Union[float, None] = None, + nesterov: bool = False, + moment_requires_grad: bool = True + ): + """The `init` function. + + Args: + net (nn.Module): + A network whose parameters should be optimized. + args: + Other arguments see `alias.sgd`, here we set `moment_requires_grad=True` + to make tensors like momentum be differentiable. + """ + + super().__init__( + net, + sgd( + lr=lr, + momentum=momentum, + nesterov=nesterov, + moment_requires_grad=moment_requires_grad + ) + ) diff --git a/torchopt/_src/optimizer/rmsprop.py b/torchopt/_src/optimizer/rmsprop.py new file mode 100644 index 00000000..d1aaf278 --- /dev/null +++ b/torchopt/_src/optimizer/rmsprop.py @@ -0,0 +1,58 @@ +# Copyright 2022 MetaOPT Team. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +from typing import Union + +from torchopt._src.alias import rmsprop +from torchopt._src.optimizer.base import Optimizer +from torchopt._src.typing import ScalarOrSchedule + + +class RMSProp(Optimizer): + """An RMSProp optimizer.""" + + def __init__( + self, + params, + lr: ScalarOrSchedule, + decay: float = 0.9, + eps: float = 1e-8, + initial_scale: float = 0., + centered: bool = False, + momentum: Union[float, None] = None, + nesterov: bool = False + ): + """The `init` function. + + Args: + params (iterable): + An iterable of `torch.Tensor`s. Specifies what Tensors should be + optimized. + args: + Other arguments see `alias.sgd`. + """ + + super().__init__( + params, + rmsprop( + lr=lr, + decay=decay, + eps=eps, + initial_scale=initial_scale, + centered=centered, + momentum=momentum, + nesterov=nesterov + ) + ) diff --git a/torchopt/_src/optimizer/sgd.py b/torchopt/_src/optimizer/sgd.py new file mode 100644 index 00000000..9e3e1c98 --- /dev/null +++ b/torchopt/_src/optimizer/sgd.py @@ -0,0 +1,45 @@ +# Copyright 2022 MetaOPT Team. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +from typing import Union + +from torchopt._src.alias import sgd +from torchopt._src.optimizer.base import Optimizer +from torchopt._src.typing import ScalarOrSchedule + + +class SGD(Optimizer): + """The classic SGD optimizer.""" + + def __init__( + self, + params, + lr: ScalarOrSchedule, + momentum: Union[float, None] = None, + nesterov: bool = False + ): + """The `init` function. + + Args: + params (iterable): + An iterable of `torch.Tensor`s. Specifies what Tensors should be + optimized. + args: + Other arguments see `alias.adam`. + """ + + super().__init__( + params, sgd(lr=lr, momentum=momentum, nesterov=nesterov, moment_requires_grad=False) + ) diff --git a/torchopt/_src/schedule.py b/torchopt/_src/schedule.py new file mode 100644 index 00000000..864afb69 --- /dev/null +++ b/torchopt/_src/schedule.py @@ -0,0 +1,111 @@ +# Copyright 2022 MetaOPT Team. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +# This file is modified from: +# https://github.com/deepmind/optax/blob/master/optax/_src/schedule.py +# ============================================================================== +# Copyright 2019 DeepMind Technologies Limited. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +import jax +import numpy as np +from absl import logging + +from torchopt._src import base, typing + + +def polynomial_schedule( + init_value: typing.Scalar, + end_value: typing.Scalar, + power: typing.Scalar, + transition_steps: int, + transition_begin: int = 0 +) -> base.Schedule: + """Constructs a schedule with polynomial transition from init to end value. + + Args: + init_value: + Initial value for the scalar to be annealed. + end_value: + End value of the scalar to be annealed. + power: + The power of the polynomial used to transition from init to end. + transition_steps: + Number of steps over which annealing takes place, the scalar starts + changing at `transition_begin` steps and completes the transition + by `transition_begin + transition_steps` steps. + If `transition_steps <= 0`, then the entire annealing process is + disabled and the value is held fixed at `init_value`. + transition_begin: + Must be positive. After how many steps to start annealing (before + this many steps the scalar value is held fixed at `init_value`). + + Returns: + schedule: + A function that maps step counts to values. + """ + + if transition_steps <= 0: + logging.info( + 'A polynomial schedule was set with a non-positive `transition_steps` ' + 'value; this results in a constant schedule with value `init_value`.' + ) + return lambda count: init_value + + if transition_begin < 0: + logging.info( + 'An exponential schedule was set with a negative `transition_begin` ' + 'value; this will result in `transition_begin` falling back to `0`.' + ) + transition_begin = 0 + + def schedule(count): + + def impl(count): + count = np.clip(count - transition_begin, 0, transition_steps) + frac = 1 - count / transition_steps + return (init_value - end_value) * (frac**power) + end_value + + return jax.tree_map(impl, count) + + return schedule + + +# Alias polynomial schedule to linear schedule for convenience. +def linear_schedule( + init_value: typing.Scalar, + end_value: typing.Scalar, + transition_steps: int, + transition_begin: int = 0 +) -> base.Schedule: + + return polynomial_schedule( + init_value=init_value, + end_value=end_value, + power=1, + transition_steps=transition_steps, + transition_begin=transition_begin + ) diff --git a/torchopt/_src/transform.py b/torchopt/_src/transform.py new file mode 100644 index 00000000..7aef0c84 --- /dev/null +++ b/torchopt/_src/transform.py @@ -0,0 +1,472 @@ +# Copyright 2022 MetaOPT Team. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +# This file is modified from: +# https://github.com/deepmind/optax/blob/master/optax/_src/transform.py +# ============================================================================== +# Copyright 2019 DeepMind Technologies Limited. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +from typing import List, NamedTuple, Tuple, Union + +import jax +import torch + +from torchopt._src import base +from torchopt._src.typing import ScalarOrSchedule, Schedule + +ScaleState = base.EmptyState + + +def inc_count(updates, count: Tuple[int]) -> Tuple[int]: + + def f(c, g): + return c + 1 if g is not None else c + + return jax.tree_map(f, count, updates) + + +def scale(step_size: float) -> base.GradientTransformation: + """Scale updates by some fixed scalar `step_size`. + + Args: + step_size: + A scalar corresponding to a fixed scaling factor for updates. + + Returns: + An (init_fn, update_fn) tuple. + """ + + def init_fn(params): + del params + return ScaleState() + + def update_fn(updates, state, inplace=True): + if inplace: + + def f(g): + return g.mul_(step_size) if g is not None else None + else: + + def f(g): + return g.mul(step_size) if g is not None else None + + updates = jax.tree_map(f, updates) + return updates, state + + return base.GradientTransformation(init_fn, update_fn) + + +class ScaleByScheduleState(NamedTuple): + """Maintains count for scale scheduling.""" + + count: Tuple[int, ...] # type: ignore + + +def scale_by_schedule(step_size_fn: Schedule) -> base.GradientTransformation: + """Scale updates using a custom schedule for the `step_size`. + + Args: + step_size_fn: + A function that takes an update count as input and proposes the + step_size to multiply the updates by. + + Returns: + An (init_fn, update_fn) tuple. + """ + + def init_fn(params): + return ScaleByScheduleState(count=tuple(0 for _ in range(len(params)))) + + def update_fn(updates, state, inplace=True): + step_size = step_size_fn(state.count) + if inplace: + updates = jax.tree_map(lambda g, step_size: g.mul_(step_size), updates, step_size) + else: + updates = jax.tree_map(lambda g, step_size: g.mul(step_size), updates, step_size) + return updates, ScaleByScheduleState(count=inc_count(updates, state.count)) + + return base.GradientTransformation(init_fn, update_fn) + + +def _update_moment(updates, moments, decay, order, inplace=True): + """Compute the exponential moving average of the `order`-th moment.""" + + if inplace: + + def f(g, t): + return t.mul_(decay).add_(g**order, alpha=1 - decay) if g is not None else t + else: + + def f(g, t): + return t.mul(decay).add(g**order, alpha=1 - decay) if g is not None else t + + return jax.tree_map(f, updates, moments) + + +def _update_moment_per_elem_norm(updates, moments, decay, order, inplace=True): + """Compute the EMA of the `order`-th moment of the element-wise norm.""" + + if inplace: + + def f(g, t): + return t.mul_(decay).add_(g**order, alpha=1 - decay) if g is not None else t + else: + + def f(g, t): + return t.mul(decay).add(g**order, alpha=1 - decay) if g is not None else t + + return jax.tree_map(f, updates, moments) + + +class ScaleByAdamState(NamedTuple): + """State for the Adam algorithm.""" + + count: Tuple[int, ...] # type: ignore + mu: base.Updates + nu: base.Updates + + +def _bias_correction(moment, decay, count, inplace=True): + """Perform bias correction. This becomes a no-op as count goes to infinity.""" + + if inplace: + + def f(t, c): + return t.div_(1 - decay**c) + else: + + def f(t, c): + return t.div(1 - decay**c) + + return jax.tree_map(f, moment, count) + + +def scale_by_adam( + b1: float = 0.9, + b2: float = 0.999, + eps: float = 1e-8, + eps_root: float = 0.0, + moment_requires_grad: bool = False, +) -> base.GradientTransformation: + """Rescale updates according to the Adam algorithm. + + References: + [Kingma et al, 2014](https://arxiv.org/abs/1412.6980) + + Args: + b1: + Decay rate for the exponentially weighted average of grads. + b2: + Decay rate for the exponentially weighted average of squared grads. + eps: + Term added to the denominator to improve numerical stability. + eps_root: + Term added to the denominator inside the square-root to improve + numerical stability when backpropagating gradients through the rescaling. + moment_requires_grad: + If true, states will be created with flag `requires_grad = True`. + + Returns: + An (init_fn, update_fn) tuple. + """ + + def init_fn(params): + mu = jax.tree_map( # First moment + lambda t: torch.zeros_like(t, requires_grad=moment_requires_grad), + params) + nu = jax.tree_map( # Second moment + lambda t: torch.zeros_like(t, requires_grad=moment_requires_grad), + params) + return ScaleByAdamState(count=tuple(0 for _ in range(len(mu))), mu=tuple(mu), nu=tuple(nu)) + + def update_fn(updates, state, inplace=True): + mu = _update_moment(updates, state.mu, b1, 1, inplace) + nu = _update_moment_per_elem_norm(updates, state.nu, b2, 2, inplace) + count_inc = inc_count(updates, state.count) + mu_hat = _bias_correction(mu, b1, count_inc, False) + nu_hat = _bias_correction(nu, b2, count_inc, False) + if inplace: + + def f(g, m, v): + return m.div_(torch.sqrt_(v.add_(eps_root)).add_(eps)) if g is not None else None + else: + + def f(g, m, v): + return m.div(torch.sqrt(v.add(eps_root)).add(eps)) if g is not None else None + + updates = jax.tree_map(f, updates, mu_hat, nu_hat) + return updates, ScaleByAdamState(count=count_inc, mu=mu, nu=nu) + + return base.GradientTransformation(init_fn, update_fn) + + +def scale_by_accelerated_adam( + b1: float = 0.9, + b2: float = 0.999, + eps: float = 1e-8, + eps_root: float = 0.0, + moment_requires_grad: bool = False, +) -> base.GradientTransformation: + """Rescale updates according to the Adam algorithm. + + This function is accelerated by using some fused accelerated operators. + + References: + [Kingma et al, 2014](https://arxiv.org/abs/1412.6980) + + Args: + b1: + Decay rate for the exponentially weighted average of grads. + b2: + Decay rate for the exponentially weighted average of squared grads. + eps: + Term added to the denominator to improve numerical stability. + eps_root: + Term added to the denominator inside the square-root to improve + numerical stability when backpropagating gradients through the rescaling. + moment_requires_grad: + If true, states will be created with flag `requires_grad = True`. + + Returns: + An (init_fn, update_fn) tuple. + """ + + from .accelerated_op import AdamOp + + def init_fn(params): + mu = jax.tree_map( # First moment + lambda t: torch.zeros_like(t, requires_grad=moment_requires_grad), + params) + nu = jax.tree_map( # Second moment + lambda t: torch.zeros_like(t, requires_grad=moment_requires_grad), + params) + return ScaleByAdamState(count=tuple(0 for _ in range(len(params))), mu=mu, nu=nu) + + def update_fn(updates, state, inplace=True): + count_inc = inc_count(updates, state.count) + op = AdamOp(b1, b2, eps, eps_root, inplace) + out = jax.tree_map(op, state.mu, state.nu, updates, count_inc) + new_mus, new_nus, new_updates = [], [], [] + for new_mu, new_nu, new_update in out: + new_mus.append(new_mu) + new_nus.append(new_nu) + new_updates.append(new_update) + return tuple(new_updates), ScaleByAdamState( + count=count_inc, mu=tuple(new_mus), nu=tuple(new_nus) + ) + + return base.GradientTransformation(init_fn, update_fn) + + +class TraceState(NamedTuple): + """Holds an aggregation of past updates.""" + + trace: base.Params + + +def trace( + decay: float, + nesterov: bool = False, + moment_requires_grad: bool = False, +) -> base.GradientTransformation: + """Compute a trace of past updates. + + Note: `trace` and `ema` have very similar but distinct updates; + `trace = decay * trace + t`, while `ema = decay * ema + (1-decay) * t`. + Both are frequently found in the optimisation literature. + + Args: + decay: + The decay rate for the trace of past updates. + nesterov: + Whether to use Nesterov momentum. + moment_requires_grad: + If true, states will be created with flag `requires_grad = True`. + + Returns: + An (init_fn, update_fn) tuple. + """ + + def init_fn(params): + if decay == 0.: + return TraceState(trace=()) + else: + return TraceState( + trace=jax. + tree_map(lambda t: torch.zeros_like(t, requires_grad=moment_requires_grad), params) + ) + + def update_fn(updates, state, inplace=True): + if nesterov: + if inplace: + + def f1(g, t): + return t.copy_(g.add(t, alpha=decay)) + + def f2(g, t): + return g.add_(t, alpha=decay) + + new_trace = jax.tree_map(f1, updates, state.trace) + updates = jax.tree_map(f2, updates, new_trace) + else: + + def f(g, t): + return g.add(t, alpha=decay) + + new_trace = jax.tree_map(f, updates, state.trace) + updates = jax.tree_map(f, updates, new_trace) + else: + if inplace: + + def f(g, t): + return g.add_(t, alpha=decay) + + updates = jax.tree_map(f, updates, state.trace) + state.trace.copy_(updates) + new_trace = state.trace + else: + + def f(g, t): + return g.add(t, alpha=decay) + + updates = jax.tree_map(f, updates, state.trace) + new_trace = updates + + return updates, TraceState(trace=new_trace) + + return base.GradientTransformation(init_fn, update_fn) + + +class ScaleByRmsState(NamedTuple): + """State for exponential root mean-squared (RMS)-normalized updates.""" + + nu: base.Updates + + +def scale_by_rms( + decay: float = 0.9, + eps: float = 1e-8, + initial_scale: float = 0. +) -> base.GradientTransformation: + """Rescale updates by the root of the exp. moving avg of the square. + + References: + [Hinton](www.cs.toronto.edu/~tijmen/csc321/slides/lecture_slides_lec6.pdf) + + Args: + decay: + Decay rate for the exponentially weighted average of squared grads. + eps: + Term added to the denominator to improve numerical stability. + initial_scale: + Initial value for second moment + + Returns: + An (init_fn, update_fn) tuple. + """ + + def init_fn(params): + nu = jax.tree_map(lambda n: torch.full_like(n, initial_scale), params) # second moment + return ScaleByRmsState(nu=nu) + + def update_fn(updates, state, inplace=True): + nu = _update_moment_per_elem_norm(updates, state.nu, decay, 2, inplace) + if inplace: + + def f(g, n): + return g.mul_(torch.rsqrt(n.add(eps))) + else: + + def f(g, n): + return g.mul(torch.rsqrt(n.add(eps))) + + # """The followings are pytorch style""" + # if inplace: + # def f(g, n): return g.div_(torch.sqrt_(n).add_(eps)) + # else: + # def f(g, n): return g.div(torch.sqrt(n).add(eps)) + updates = jax.tree_map(f, updates, nu) + return updates, ScaleByRmsState(nu=nu) + + return base.GradientTransformation(init_fn, update_fn) + + +class ScaleByRStdDevState(NamedTuple): + """State for centered exponential moving average of squares of updates.""" + + mu: base.Updates + nu: base.Updates + + +def scale_by_stddev( + decay: float = 0.9, + eps: float = 1e-8, + initial_scale: float = 0. +) -> base.GradientTransformation: + """Rescale updates by the root of the centered exp. moving average of squares. + + References: + [Hinton](www.cs.toronto.edu/~tijmen/csc321/slides/lecture_slides_lec6.pdf) + + Args: + decay: + Decay rate for the exponentially weighted average of squared grads. + eps: + Term added to the denominator to improve numerical stability. + initial_scale: + Initial value for second moment + + Returns: + An (init_fn, update_fn) tuple. + """ + + def init_fn(params): + mu = jax.tree_map(torch.zeros_like, params) # First moment + nu = jax.tree_map(lambda n: torch.full_like(n, initial_scale), params) # second moment + return ScaleByRStdDevState(mu=mu, nu=nu) + + def update_fn(updates, state, inplace=True): + mu = _update_moment(updates, state.mu, decay, 1, inplace) + nu = _update_moment_per_elem_norm(updates, state.nu, decay, 2, inplace) + if inplace: + + def f(g, m, n): + return g.mul_(torch.rsqrt(n.sub(m**2).add(eps))) + else: + + def f(g, m, n): + return g.mul(torch.rsqrt(n.sub(m**2).add(eps))) + + # """The followings are pytorch style""" + # if inplace: + # def f(g, m, n): return g.div_(torch.sqrt_(n.sub_(m ** 2)).add(eps)) + # else: + # def f(g, m, n): return g.div(torch.sqrt(n.sub(m ** 2)).add(eps)) + updates = jax.tree_map(f, updates, mu, nu) + return updates, ScaleByRStdDevState(mu=mu, nu=nu) + + return base.GradientTransformation(init_fn, update_fn) diff --git a/TorchOpt/_src/pytypes.py b/torchopt/_src/typing.py similarity index 100% rename from TorchOpt/_src/pytypes.py rename to torchopt/_src/typing.py diff --git a/TorchOpt/_src/update.py b/torchopt/_src/update.py similarity index 54% rename from TorchOpt/_src/update.py rename to torchopt/_src/update.py index a77adf7e..2d17adb7 100644 --- a/TorchOpt/_src/update.py +++ b/torchopt/_src/update.py @@ -32,41 +32,42 @@ import jax -from TorchOpt._src import base +from torchopt._src import base -def apply_updates( - params: base.Params, - updates: base.Updates, - inplace: bool = True -) -> base.Params: - """Applies an update to the corresponding parameters. +def apply_updates(params: base.Params, updates: base.Updates, inplace: bool = True) -> base.Params: + """Applies an update to the corresponding parameters. - This is a utility functions that applies an update to a set of parameters, and - then returns the updated parameters to the caller. As an example, the update - may be a gradient transformed by a sequence of`GradientTransformations`. This - function is exposed for convenience, but it just adds updates and parameters; - you may also apply updates to parameters manually, using `tree_map` - (e.g. if you want to manipulate updates in custom ways before applying them). + This is a utility functions that applies an update to a set of parameters, + and then returns the updated parameters to the caller. As an example, the + update may be a gradient transformed by a sequence of`GradientTransformations`. + This function is exposed for convenience, but it just adds updates and parameters; + you may also apply updates to parameters manually, using `tree_map` (e.g. if + you want to manipulate updates in custom ways before applying them). - Args: - params: a tree of parameters. - updates: a tree of updates, the tree structure and the shape of the leaf - nodes must match that of `params`. - inplace: if True, will update params in a inplace manner. + Args: + params: + A tree of parameters. + updates: + A tree of updates, the tree structure and the shape of the leaf + nodes must match that of `params`. + inplace: + If true, will update params in a inplace manner. - Returns: - Updated parameters, with same structure, shape and type as `params`. - """ - if inplace: + Returns: + Updated parameters, with same structure, shape and type as `params`. + """ - def f(p, u): - if u is not None: - p.data.add_(u) - return p - else: + if inplace: - def f(p, u): - return p.add(u) if u is not None else p + def f(p, u): + if u is not None: + p.data.add_(u) + return p - return jax.tree_map(f, params, updates) + else: + + def f(p, u): + return p.add(u) if u is not None else p + + return jax.tree_map(f, params, updates) diff --git a/torchopt/_src/utils.py b/torchopt/_src/utils.py new file mode 100644 index 00000000..79921916 --- /dev/null +++ b/torchopt/_src/utils.py @@ -0,0 +1,197 @@ +# Copyright 2022 MetaOPT Team. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +from typing import Dict, List, NamedTuple, Union + +import jax +import torch +import torch.nn as nn + +from torchopt._src.optimizer.meta import MetaOptimizer + + +class _ModuleState(NamedTuple): + params: List[Dict] + visual_contents: Union[None, Dict] = None + + +# mypy: ignore-errors +def stop_gradient(target): + """Stop the gradient for the input object. + + Since a tensor use `grad_fn` to connect itself with the previous computation + graph, the back-propagated gradient will flow over the tensor and continue + flow to the tensors that is connected by `grad_fn`. Some algorithms requires + manually detaching tensors from the computation graph. + + Note that the stop_gradient operation is in-place. + + Args: + target: + The target that to be detached from the computation graph, it could + be a `nn.Module`, `torchopt.MetaOptimizer`, state of the + `torchopt.MetaOptimizer`, or just a plain list of tensors. + inplace: + If true, the target will be detached in-place. if false, this function + will return a detached copy of the target. The in-place operation is + fast and memory efficient but may raise back-propagation error. + """ + + def f(obj): + if isinstance(obj, torch.Tensor): + requires_grad = obj.requires_grad + obj.detach_().requires_grad_(requires_grad) + return None + + if isinstance(target, _ModuleState): + true_target = target.params + elif isinstance(target, nn.Module): + true_target = tuple(target.parameters()) + elif isinstance(target, MetaOptimizer): + true_target, _ = jax.tree_flatten(target.state_dict()) + else: + true_target = target + + jax.tree_map(f, true_target) + + +def extract_state_dict(mod, copy=False, *, with_buffer=True, enable_visual=False, visual_prefix=''): + """Extract target state. + + Since a tensor use `grad_fn` to connect itself with the previous computation + graph, the back-propagated gradient will flow over the tensor and continue + flow to the tensors that is connected by `grad_fn`. Some algorithms requires + manually detaching tensors from the computation graph. + + Note that the extracted state is a reference, which means any in-place operator + will affect the target that the state is extracted from. + + Args: + mod: + It could be a `nn.Module` or `torchopt.MetaOptimizer`. + with_buffer: + Extract buffer together with parameters, this argument is only used + if the input target is `nn.Module`. + enable_visual: + Add additional annotations, which could be used in computation graph + visualization. Currently, this flag only has effect on `nn.Module` but + we will support `torchopt.MetaOptimizer` later. + visual_prefix: + Prefix for the visualization annotations. + + Returns: + State extracted of the input object. + """ + + if isinstance(mod, nn.Module): + if enable_visual: + visual_contents = {} + + for k, v in mod.named_parameters(): + if v.grad_fn is not None: + visual_contents.update({v.grad_fn: (visual_prefix + k, v)}) + else: + visual_contents.update({v: visual_prefix + k}) + else: + visual_contents = None + + params = [] + + def get_v(v): + if copy: + requires_grad = v.requires_grad + return v.clone().detach_().requires_grad_(requires_grad) + else: + return v + + def _update(term): + if len(term) != 0: + params.append({k: get_v(v) for k, v in term.items()}) + + _update(mod._parameters) + if with_buffer: + _update(mod._buffers) + for module in mod.modules(): + if module is mod: + continue + _update(module._parameters) + if with_buffer: + _update(module._buffers) + return _ModuleState(params=tuple(params), visual_contents=visual_contents) + elif isinstance(mod, MetaOptimizer): + state = mod.state_dict() + if copy: + flatten_state, state_tree = jax.tree_flatten(state) + + def get_v(v): + if not isinstance(v, torch.Tensor): + return v + requires_grad = v.requires_grad + return v.clone().detach_().requires_grad_(requires_grad) + + flatten_state = jax.tree_map(get_v, flatten_state) + return state_tree.unflatten(flatten_state) + else: + return state + + else: + raise RuntimeError(f"Unexpected class of {mod}") + + +def _extract_container(mod, with_buffer=True): + if isinstance(mod, nn.Module): + containers = [] + + def _update(term): + if len(term) != 0: + containers.append(term) + + _update(mod._parameters) + if with_buffer: + _update(mod._buffers) + for module in mod.modules(): + if module is mod: + continue + _update(module._parameters) + if with_buffer: + _update(module._buffers) + return tuple(containers) + else: + raise RuntimeError(f"Unexpected class of {mod}") + + +def recover_state_dict(mod, state): + """Recover state. + + This function is compatible for the `extract_state`. + + Note that the recovering process is not in-place, so the tensors of the object + will not be modified. + + Args: + mod: + Target that need to recover. + state: + The recovering state. + """ + + if isinstance(mod, nn.Module): + target_container = _extract_container(mod) + for target, source in zip(target_container, state.params): + target.update(source) + elif isinstance(mod, MetaOptimizer): + mod.load_state_dict(state) + else: + raise RuntimeError(f"Unexpected class of {mod}") diff --git a/torchopt/_src/visual.py b/torchopt/_src/visual.py new file mode 100644 index 00000000..696a1f77 --- /dev/null +++ b/torchopt/_src/visual.py @@ -0,0 +1,238 @@ +# Copyright 2022 MetaOPT Team. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +# This file is modified from: +# https://github.com/szagoruyko/pytorchviz/blob/master/torchviz/dot.py +# ============================================================================== + +import warnings +from collections import namedtuple +from distutils.version import LooseVersion +from typing import Dict, Generator + +import torch +from graphviz import Digraph + +Node = namedtuple('Node', ('name', 'inputs', 'attr', 'op')) + +# Saved attrs for grad_fn (incl. saved variables) begin with `._saved_*` +SAVED_PREFIX = "_saved_" + + +def get_fn_name(fn, show_attrs, max_attr_chars): + name = str(type(fn).__name__) + if not show_attrs: + return name + attrs = dict() + for attr in dir(fn): + if not attr.startswith(SAVED_PREFIX): + continue + val = getattr(fn, attr) + attr = attr[len(SAVED_PREFIX):] + if torch.is_tensor(val): + attrs[attr] = "[saved tensor]" + elif isinstance(val, tuple) and any(torch.is_tensor(t) for t in val): + attrs[attr] = "[saved tensors]" + else: + attrs[attr] = str(val) + if not attrs: + return name + max_attr_chars = max(max_attr_chars, 3) + col1width = max(len(k) for k in attrs.keys()) + col2width = min(max(len(str(v)) for v in attrs.values()), max_attr_chars) + sep = "-" * max(col1width + col2width + 2, len(name)) + attrstr = '%-' + str(col1width) + 's: %' + str(col2width) + 's' + + def truncate(s): + return s[:col2width - 3] + "..." if len(s) > col2width else s + + params = '\n'.join(attrstr % (k, truncate(str(v))) for (k, v) in attrs.items()) + return name + '\n' + sep + '\n' + params + + +# mypy: ignore-errors +def make_dot(var, params=None, show_attrs=False, show_saved=False, max_attr_chars=50): + """Produces Graphviz representation of PyTorch autograd graph. + + If a node represents a backward function, it is gray. Otherwise, the node + represents a tensor and is either blue, orange, or green: + - Blue: reachable leaf tensors that requires grad (tensors whose `.grad` + fields will be populated during `.backward()`) + - Orange: saved tensors of custom autograd functions as well as those + saved by built-in backward nodes + - Green: tensor passed in as outputs + - Dark green: if any output is a view, we represent its base tensor with + a dark green node. + + Args: + var: + Output tensor. + params: ([dict of (name, tensor) or state_dict]) + Parameters to add names to node that requires grad. + show_attrs: + Whether to display non-tensor attributes of backward nodes + (Requires PyTorch version >= 1.9) + show_saved: + Whether to display saved tensor nodes that are not by custom + autograd functions. Saved tensor nodes for custom functions, if + present, are always displayed. (Requires PyTorch version >= 1.9) + max_attr_chars: + If show_attrs is `True`, sets max number of characters + to display for any given attribute. + """ + + if LooseVersion(torch.__version__) < LooseVersion("1.9") and \ + (show_attrs or show_saved): + warnings.warn( + "make_dot: showing grad_fn attributes and saved variables" + " requires PyTorch version >= 1.9. (This does NOT apply to" + " saved tensors saved by custom autograd functions.)" + ) + + param_map = {} + + if params is not None: + from torchopt._src.utils import _ModuleState + + if isinstance(params, _ModuleState): + param_map.update(params.visual_contents) + elif isinstance(params, Dict): + param_map.update({v: k for k, v in params.items()}) + elif isinstance(params, Generator): + param_map.update({v: k for k, v in params}) + else: + for param in params: + if isinstance(param, _ModuleState): + param_map.update(param.visual_contents) + elif isinstance(param, Generator): + param_map.update({v: k for k, v in param}) + else: + param_map.update({v: k for k, v in param.items()}) + + node_attr = dict( + style='filled', + shape='box', + align='left', + fontsize='10', + ranksep='0.1', + height='0.2', + fontname='monospace' + ) + dot = Digraph(node_attr=node_attr, graph_attr=dict(size="12,12")) + seen = set() + + def size_to_str(size): + return '(' + (', ').join(['%d' % v for v in size]) + ')' + + def get_var_name(var, name=None): + if not name: + name = param_map[var] if var in param_map else '' + return '%s\n %s' % (name, size_to_str(var.size())) + + def get_var_name_with_flag(var): + if var in param_map: + return '%s\n %s' % (param_map[var][0], size_to_str(param_map[var][1].size())) + else: + return None + + def add_nodes(fn): + assert not torch.is_tensor(fn) + if fn in seen: + return + seen.add(fn) + + if show_saved: + for attr in dir(fn): + if not attr.startswith(SAVED_PREFIX): + continue + val = getattr(fn, attr) + seen.add(val) + attr = attr[len(SAVED_PREFIX):] + if torch.is_tensor(val): + dot.edge(str(id(fn)), str(id(val)), dir="none") + dot.node(str(id(val)), get_var_name(val, attr), fillcolor='orange') + if isinstance(val, tuple): + for i, t in enumerate(val): + if torch.is_tensor(t): + name = attr + '[%s]' % str(i) + dot.edge(str(id(fn)), str(id(t)), dir="none") + dot.node(str(id(t)), get_var_name(t, name), fillcolor='orange') + + if hasattr(fn, 'variable'): + # if grad_accumulator, add the node for `.variable` + var = fn.variable + seen.add(var) + dot.node(str(id(var)), get_var_name(var), fillcolor='lightblue') + dot.edge(str(id(var)), str(id(fn))) + + fn_name = get_fn_name(fn, show_attrs, max_attr_chars) + fn_fillcolor = None + var_name = get_var_name_with_flag(fn) + if var_name is not None: + fn_name = '%s\n %s' % (fn_name, var_name) + fn_fillcolor = 'lightblue' + + # add the node for this grad_fn + dot.node(str(id(fn)), fn_name, fillcolor=fn_fillcolor) + + # recurse + if hasattr(fn, 'next_functions'): + for u in fn.next_functions: + if u[0] is not None: + dot.edge(str(id(u[0])), str(id(fn))) + add_nodes(u[0]) + + # note: this used to show .saved_tensors in pytorch0.2, but stopped + # working* as it was moved to ATen and Variable-Tensor merged + # also note that this still works for custom autograd functions + if hasattr(fn, 'saved_tensors'): + for t in fn.saved_tensors: + dot.edge(str(id(t)), str(id(fn))) + dot.node(str(id(t)), get_var_name(t), fillcolor='orange') + + def add_base_tensor(var, color='darkolivegreen1'): + if var in seen: + return + seen.add(var) + dot.node(str(id(var)), get_var_name(var), fillcolor=color) + if (var.grad_fn): + add_nodes(var.grad_fn) + dot.edge(str(id(var.grad_fn)), str(id(var))) + if var._is_view(): + add_base_tensor(var._base, color='darkolivegreen3') + dot.edge(str(id(var._base)), str(id(var)), style="dotted") + + # handle multiple outputs + if isinstance(var, tuple): + for v in var: + add_base_tensor(v) + else: + add_base_tensor(var) + + resize_graph(dot) + + return dot + + +def resize_graph(dot, size_per_element=0.15, min_size=12): + """Resize the graph according to how much content it contains. + Modify the graph in place. + """ + + # Get the approximate number of nodes and edges + num_rows = len(dot.body) + content_size = num_rows * size_per_element + size = max(min_size, content_size) + size_str = str(size) + "," + str(size) + dot.graph_attr.update(size=size_str) diff --git a/tutorials/1_Functional_Optimizer.ipynb b/tutorials/1_Functional_Optimizer.ipynb old mode 100755 new mode 100644 index 868bb00d..2dff7be4 --- a/tutorials/1_Functional_Optimizer.ipynb +++ b/tutorials/1_Functional_Optimizer.ipynb @@ -37,12 +37,12 @@ "import torch\n", "import functorch\n", "import torch.autograd\n", - "from torch import nn\n", + "import torch.nn as nn\n", "import optax\n", "import jax\n", "from jax import numpy as jnp\n", "\n", - "import TorchOpt\n", + "import torchopt\n", "\n", "\n", "class Net(nn.Module):\n", @@ -138,7 +138,7 @@ " func, params = functorch.make_functional(net)\n", "\n", " lr = 1.\n", - " optimizer = TorchOpt.adam(lr)\n", + " optimizer = torchopt.adam(lr)\n", "\n", " opt_state = optimizer.init(params)\n", "\n", @@ -150,7 +150,7 @@ " grad = torch.autograd.grad(loss, params)\n", " updates, opt_state = optimizer.update(grad, opt_state)\n", " print(params)\n", - " params = TorchOpt.apply_updates(params, updates)\n", + " params = torchopt.apply_updates(params, updates)\n", " print(params)" ] }, @@ -181,7 +181,7 @@ "- Full TorchOpt\n", "\n", "The Third example is to illustrate that TorchOpt can also directly replace torch.optim with exactly the same usage. Note the API \n", - "difference happens between TorchOpt.adam() and TorchOpt.Adam(). " + "difference happens between torchopt.adam() and torchopt.Adam(). " ] }, { @@ -196,7 +196,7 @@ " net = Net(dim)\n", "\n", " lr = 1.\n", - " optim = TorchOpt.Adam(net.parameters(), lr=lr)\n", + " optim = torchopt.Adam(net.parameters(), lr=lr)\n", "\n", " xs = 2 * torch.ones(batch_size, dim)\n", " ys = torch.ones(batch_size)\n", @@ -294,7 +294,7 @@ "## 2. Differentiable Optimization with functional optimizor\n", "Coupled with functional optimizer, you can conduct differentiable optimization by setting the inplce flag as False in update and apply_updates function. (which might be helpful for meta-learning algorithm implementation with functional programing style). \n", "\n", - "Note that TorchOpt.SGD, TorchOpt.Adam do not support differentiable optimization. Refer to the Meta Optimizer notebook for pytorch-like differentiable optimizers." + "Note that torchopt.SGD, torchopt.Adam do not support differentiable optimization. Refer to the Meta Optimizer notebook for pytorch-like differentiable optimizers." ] }, { @@ -311,7 +311,7 @@ "\n", " lr = 1.\n", " # sgd example\n", - " optimizer = TorchOpt.sgd(lr)\n", + " optimizer = torchopt.sgd(lr)\n", " meta_param = torch.tensor(1., requires_grad=True)\n", "\n", " opt_state = optimizer.init(params)\n", @@ -325,7 +325,7 @@ " loss = ((pred - ys) ** 2).sum()\n", " grad = torch.autograd.grad(loss, params, create_graph=True)\n", " updates, opt_state = optimizer.update(grad, opt_state, inplace=False)\n", - " params = TorchOpt.apply_updates(params, updates, inplace=False)\n", + " params = torchopt.apply_updates(params, updates, inplace=False)\n", "\n", " pred = func(params, xs)\n", " loss = ((pred - ys) ** 2).sum()\n", @@ -365,7 +365,7 @@ "metadata": {}, "outputs": [], "source": [ - "optim = TorchOpt.adam(lr=1., moment_requires_grad=False)" + "optim = torchopt.adam(lr=1., moment_requires_grad=False)" ] }, { @@ -374,7 +374,7 @@ "metadata": {}, "outputs": [], "source": [ - "optim = TorchOpt.adam(lr=1., moment_requires_grad=True)" + "optim = torchopt.adam(lr=1., moment_requires_grad=True)" ] }, { @@ -383,7 +383,7 @@ "metadata": {}, "outputs": [], "source": [ - "optim = TorchOpt.sgd(lr=1., momentum=0.8, moment_requires_grad=True)" + "optim = torchopt.sgd(lr=1., momentum=0.8, moment_requires_grad=True)" ] }, { @@ -418,7 +418,7 @@ } ], "source": [ - "TorchOpt.accelerated_op_available(torch.device(\"cpu\"))" + "torchopt.accelerated_op_available(torch.device(\"cpu\"))" ] }, { @@ -438,7 +438,7 @@ } ], "source": [ - "TorchOpt.accelerated_op_available(torch.device(\"cuda\"))" + "torchopt.accelerated_op_available(torch.device(\"cuda\"))" ] }, { @@ -448,7 +448,7 @@ "outputs": [], "source": [ "net = Net(1).cuda()\n", - "optim = TorchOpt.Adam(net.parameters(), lr=1., use_accelerated_op=True)" + "optim = torchopt.Adam(net.parameters(), lr=1., use_accelerated_op=True)" ] }, { @@ -457,7 +457,7 @@ "metadata": {}, "outputs": [], "source": [ - "optim = TorchOpt.adam(lr=1., use_accelerated_op=True)" + "optim = torchopt.adam(lr=1., use_accelerated_op=True)" ] } ], diff --git a/tutorials/2_Visualization.ipynb b/tutorials/2_Visualization.ipynb index f1ce0aa6..c8593b94 100644 --- a/tutorials/2_Visualization.ipynb +++ b/tutorials/2_Visualization.ipynb @@ -98,12 +98,12 @@ ], "source": [ "import torch\n", - "import TorchOpt\n", + "import torchopt\n", "\n", "\n", "x = torch.tensor(1., requires_grad=True)\n", "y = 2 * x\n", - "TorchOpt.visual.make_dot(y, params={'x': x, 'y': y})" + "torchopt.visual.make_dot(y, params={'x': x, 'y': y})" ] }, { @@ -245,8 +245,8 @@ } ], "source": [ - "from torch import nn\n", - "from torch.nn import functional as F\n", + "import torch.nn as nn\n", + "import torch.nn.functional as F\n", "\n", "\n", "class Net(nn.Module):\n", @@ -264,7 +264,7 @@ "xs = torch.ones(batch_size, dim)\n", "pred = net(xs)\n", "loss = F.mse_loss(pred, torch.ones_like(pred))\n", - "TorchOpt.visual.make_dot(loss, params=(net.named_parameters(), {\"loss\": loss}))" + "torchopt.visual.make_dot(loss, params=(net.named_parameters(), {\"loss\": loss}))" ] }, { @@ -317,7 +317,7 @@ "dim = 5\n", "batch_size = 2\n", "net = MetaNet(dim).cuda()\n", - "optimizer = TorchOpt.MetaSGD(net, lr=1e-3)\n", + "optimizer = torchopt.MetaSGD(net, lr=1e-3)\n", "meta_param = torch.tensor(1., requires_grad=True)\n", "\n", "xs = torch.ones(batch_size, dim).cuda()\n", @@ -325,17 +325,17 @@ "pred = net(xs, meta_param)\n", "loss = F.mse_loss(pred, torch.ones_like(pred))\n", "# set enable_visual\n", - "net_state_0 = TorchOpt.extract_state_dict(\n", + "net_state_0 = torchopt.extract_state_dict(\n", " net, enable_visual=True, visual_prefix='step0.')\n", "optimizer.step(loss)\n", "# set enable_visual\n", - "net_state_1 = TorchOpt.extract_state_dict(\n", + "net_state_1 = torchopt.extract_state_dict(\n", " net, enable_visual=True, visual_prefix='step1.')\n", "\n", "pred = net(xs, meta_param)\n", "loss = F.mse_loss(pred, torch.ones_like(pred))\n", "# draw computation graph\n", - "TorchOpt.visual.make_dot(loss,\n", + "torchopt.visual.make_dot(loss,\n", " [net_state_0, net_state_1,\n", " {\"meta_param\": meta_param, 'loss': loss}]\n", " ).render(\"meta_graph\", format=\"png\")\n", diff --git a/tutorials/3_Meta_Optimizer.ipynb b/tutorials/3_Meta_Optimizer.ipynb index b76114f4..a846c81c 100644 --- a/tutorials/3_Meta_Optimizer.ipynb +++ b/tutorials/3_Meta_Optimizer.ipynb @@ -4,7 +4,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "# TorchOpt as MetaOptimizer" + "# torchopt as Meta-Optimizer" ] }, { @@ -20,7 +20,7 @@ "source": [ "## 1. Basic API for differentiable optimizer\n", "\n", - "`MetaOptimizer` is the main class for our differnetiabl optimzier. Combined with the functional optimizer `TorchOpt.sgd` and `TorchOpt.adam` mentioned in the tutorial 1, we can define our high-level API `TorchOpt.MetaSGD` and `TorchOpt.MetaAdam`. We will discuss how this combination happens with `TorchOpt.chain` in Section 3. Let us consider the problem below." + "`MetaOptimizer` is the main class for our differnetiabl optimzier. Combined with the functional optimizer `torchopt.sgd` and `torchopt.adam` mentioned in the tutorial 1, we can define our high-level API `torchopt.MetaSGD` and `torchopt.MetaAdam`. We will discuss how this combination happens with `torchopt.chain` in Section 3. Let us consider the problem below." ] }, { @@ -56,7 +56,7 @@ "outputs": [], "source": [ "import torch\n", - "from torch import nn\n", + "import torch.nn as nn\n", "\n", "class Net(nn.Module):\n", " def __init__(self):\n", @@ -105,9 +105,9 @@ } ], "source": [ - "import TorchOpt\n", + "import torchopt\n", "\n", - "optim = TorchOpt.MetaSGD(net, lr=1.)\n", + "optim = torchopt.MetaSGD(net, lr=1.)\n", "inner_loss = net(x)\n", "optim.step(inner_loss)\n", "outer_loss = net(x)\n", @@ -160,21 +160,21 @@ "source": [ "import matplotlib.pyplot as plt\n", "from matplotlib import image as imgplt\n", - "from torch.nn import functional as F\n", + "import torch.nn.functional as F\n", "\n", "net = Net()\n", "x = torch.tensor(2., requires_grad=True)\n", "y = torch.tensor(1.)\n", "\n", - "optim = TorchOpt.MetaAdam(net, lr=1., moment_requires_grad=False)\n", + "optim = torchopt.MetaAdam(net, lr=1., moment_requires_grad=False)\n", "inner_loss = F.mse_loss(net(x), y)\n", - "net_state_0 = TorchOpt.extract_state_dict(\n", + "net_state_0 = torchopt.extract_state_dict(\n", " net, enable_visual=True, visual_prefix='step0.')\n", "optim.step(inner_loss)\n", - "net_state_1 = TorchOpt.extract_state_dict(\n", + "net_state_1 = torchopt.extract_state_dict(\n", " net, enable_visual=True, visual_prefix='step1.')\n", "outer_loss = F.mse_loss(net(x), y)\n", - "TorchOpt.visual.make_dot(outer_loss, params=[net_state_0, net_state_1,{'x': x, 'outer_loss': outer_loss}]).render(\"graph\", format=\"png\")\n", + "torchopt.visual.make_dot(outer_loss, params=[net_state_0, net_state_1,{'x': x, 'outer_loss': outer_loss}]).render(\"graph\", format=\"png\")\n", "plt.figure(figsize=(15,15))\n", "plt.imshow(imgplt.imread('graph.png'))" ] @@ -219,16 +219,16 @@ "x = torch.tensor(2., requires_grad=True)\n", "y = torch.tensor(1.)\n", "\n", - "optim = TorchOpt.MetaAdam(net, lr=1.)\n", + "optim = torchopt.MetaAdam(net, lr=1.)\n", "inner_loss = F.mse_loss(net(x), y)\n", - "net_state_0 = TorchOpt.extract_state_dict(\n", + "net_state_0 = torchopt.extract_state_dict(\n", " net, enable_visual=True, visual_prefix='step0.')\n", "optim.step(inner_loss)\n", - "net_state_1 = TorchOpt.extract_state_dict(\n", + "net_state_1 = torchopt.extract_state_dict(\n", " net, enable_visual=True, visual_prefix='step1.')\n", "\n", "outer_loss = F.mse_loss(net(x), y)\n", - "TorchOpt.visual.make_dot(outer_loss, params=[net_state_0, net_state_1, {'x': x, 'outer_loss': outer_loss}]).render(\"graph\", format=\"png\")\n", + "torchopt.visual.make_dot(outer_loss, params=[net_state_0, net_state_1, {'x': x, 'outer_loss': outer_loss}]).render(\"graph\", format=\"png\")\n", "plt.figure(figsize=(15,15))\n", "plt.imshow(imgplt.imread('graph.png'))" ] @@ -255,7 +255,7 @@ "\n", "We observe that how to reinitialize the inner-loop parameter in a new bi-level process vary in different Meta-Learning algorithms. For instance, in algorithm like MAML, every time a new task comes, we need to reset the parameters to the initial ones. In other cases such as Meta-gradient reinforcement learning, the inner-loop network parameter just inherit previous updated parameter to continue the new bi-level process.\n", "\n", - "We provide the `TorchOpt.extract_state_dict` and `TorchOpt.recover_state_dict` function to extract and restore the state of network and optimizer. By default, the extracted state dictionary is a reference (this design is for accumulating gradient of multi-task batch training, MAML for example). You can also set `copy=True` to extract the copy of state dictionary." + "We provide the `torchopt.extract_state_dict` and `torchopt.recover_state_dict` function to extract and restore the state of network and optimizer. By default, the extracted state dictionary is a reference (this design is for accumulating gradient of multi-task batch training, MAML for example). You can also set `copy=True` to extract the copy of state dictionary." ] }, { @@ -275,13 +275,13 @@ "source": [ "net = Net()\n", "x = torch.tensor(2., requires_grad=True)\n", - "optim = TorchOpt.MetaAdam(net, lr=1.)\n", - "init_net_state = TorchOpt.extract_state_dict(net)\n", - "init_optim_state = TorchOpt.extract_state_dict(optim)\n", + "optim = torchopt.MetaAdam(net, lr=1.)\n", + "init_net_state = torchopt.extract_state_dict(net)\n", + "init_optim_state = torchopt.extract_state_dict(optim)\n", "\n", "# get the copy of state dictionary\n", - "init_net_state_copy = TorchOpt.extract_state_dict(net, copy=True)\n", - "init_optim_state_copy = TorchOpt.extract_state_dict(optim, copy=True)\n", + "init_net_state_copy = torchopt.extract_state_dict(net, copy=True)\n", + "init_optim_state_copy = torchopt.extract_state_dict(optim, copy=True)\n", "\n", "# Conduct 2 inner-loop optimization \n", "inner_loss = net(x)\n", @@ -291,8 +291,8 @@ "print(net.a)\n", "\n", "# Recover and reconduct 2 inner-loop optimization \n", - "TorchOpt.recover_state_dict(net, init_net_state)\n", - "TorchOpt.recover_state_dict(optim, init_optim_state)\n", + "torchopt.recover_state_dict(net, init_net_state)\n", + "torchopt.recover_state_dict(optim, init_optim_state)\n", "inner_loss = net(x)\n", "optim.step(inner_loss)\n", "inner_loss = net(x)\n", @@ -352,14 +352,14 @@ "\n", "net = Net2Tasks()\n", "x = torch.tensor(2., requires_grad=True)\n", - "optim = TorchOpt.MetaSGD(net, lr=1.)" + "optim = torchopt.MetaSGD(net, lr=1.)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "Once we call `step` method of `MetaOptimizer`, the parameters of the network would be changed. We should use `TorchOpt.extract_state_dict` to extract state and use `TorchOpt.recover_state_dict` to recover the state. Note that if we use optimizers that have momentum buffers, we should also extract and recover them, vanilla SGD does not have momentum buffers so codes `init_optim_state = TorchOpt.extract_state_dict(optim)` and `TorchOpt.recover_state_dict(optim, init_optim_state)` have no effect." + "Once we call `step` method of `MetaOptimizer`, the parameters of the network would be changed. We should use `torchopt.extract_state_dict` to extract state and use `torchopt.recover_state_dict` to recover the state. Note that if we use optimizers that have momentum buffers, we should also extract and recover them, vanilla SGD does not have momentum buffers so codes `init_optim_state = torchopt.extract_state_dict(optim)` and `torchopt.recover_state_dict(optim, init_optim_state)` have no effect." ] }, { @@ -378,8 +378,8 @@ } ], "source": [ - "init_net_state = TorchOpt.extract_state_dict(net)\n", - "init_optim_state = TorchOpt.extract_state_dict(optim)\n", + "init_net_state = torchopt.extract_state_dict(net)\n", + "init_optim_state = torchopt.extract_state_dict(optim)\n", "# it's SGD so state_dict is empty\n", "print(init_optim_state)\n", "\n", @@ -389,8 +389,8 @@ "lo1.backward()\n", "print(x.grad)\n", "\n", - "TorchOpt.recover_state_dict(net, init_net_state)\n", - "TorchOpt.recover_state_dict(optim, init_optim_state)\n", + "torchopt.recover_state_dict(net, init_net_state)\n", + "torchopt.recover_state_dict(optim, init_optim_state)\n", "li2 = net.task2(x)\n", "optim.step(li2)\n", "lo2 = net.task2(x)\n", @@ -451,8 +451,8 @@ "net = Net()\n", "x = torch.tensor(2., requires_grad=True)\n", "\n", - "impl = TorchOpt.combine.chain(TorchOpt.clip.clip_grad_norm(max_norm=2.), TorchOpt.sgd(lr=1., moment_requires_grad=True))\n", - "optim = TorchOpt.MetaOptimizer(net, impl)\n", + "impl = torchopt.combine.chain(torchopt.clip.clip_grad_norm(max_norm=2.), torchopt.sgd(lr=1., moment_requires_grad=True))\n", + "optim = torchopt.MetaOptimizer(net, impl)\n", "li = net(x)\n", "optim.step(li)\n", "lo = net(x)\n", @@ -496,7 +496,7 @@ } ], "source": [ - "TorchOpt.accelerated_op_available(torch.device(\"cpu\"))" + "torchopt.accelerated_op_available(torch.device(\"cpu\"))" ] }, { @@ -516,7 +516,7 @@ } ], "source": [ - "TorchOpt.accelerated_op_available(torch.device(\"cuda\"))" + "torchopt.accelerated_op_available(torch.device(\"cuda\"))" ] }, { @@ -552,16 +552,16 @@ "x = torch.tensor(2., requires_grad=True, device=torch.device(\"cuda\"))\n", "y = torch.tensor(1., device=torch.device(\"cuda\"))\n", "\n", - "optim = TorchOpt.MetaAdam(net, lr=1., use_accelerated_op=True)\n", + "optim = torchopt.MetaAdam(net, lr=1., use_accelerated_op=True)\n", "\n", "inner_loss = F.mse_loss(net(x), y)\n", - "net_state_0 = TorchOpt.extract_state_dict(\n", + "net_state_0 = torchopt.extract_state_dict(\n", " net, enable_visual=True, visual_prefix='step0.')\n", "optim.step(inner_loss)\n", - "net_state_1 = TorchOpt.extract_state_dict(\n", + "net_state_1 = torchopt.extract_state_dict(\n", " net, enable_visual=True, visual_prefix='step1.')\n", "outer_loss = F.mse_loss(net(x), y)\n", - "TorchOpt.visual.make_dot(outer_loss, params=[net_state_0, net_state_1,{'x': x, 'outer_loss': outer_loss}]).render(\"graph\", format=\"png\")\n", + "torchopt.visual.make_dot(outer_loss, params=[net_state_0, net_state_1,{'x': x, 'outer_loss': outer_loss}]).render(\"graph\", format=\"png\")\n", "plt.figure(figsize=(15,15))\n", "plt.imshow(imgplt.imread('graph.png'))" ] diff --git a/tutorials/4_Stop_Gradient.ipynb b/tutorials/4_Stop_Gradient.ipynb old mode 100755 new mode 100644 index 4c13f420..21492fc5 --- a/tutorials/4_Stop_Gradient.ipynb +++ b/tutorials/4_Stop_Gradient.ipynb @@ -4,14 +4,14 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "# TorchOpt.stop_gradient in meta learning" + "# `torchopt.stop_gradient` in Meta-Learning" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "In this tutoial, we will illustrate the usage of TorchOpt.stop_gradient with a meta-learning example. We use TorchOpt.visual to help us visualize what is going on in automatic differentiation. Firstly, we define a simple network and the objective function for inner, outer optimization." + "In this tutoial, we will illustrate the usage of torchopt.stop_gradient with a meta-learning example. We use torchopt.visual to help us visualize what is going on in automatic differentiation. Firstly, we define a simple network and the objective function for inner, outer optimization." ] }, { @@ -21,8 +21,8 @@ "outputs": [], "source": [ "import torch\n", - "from torch import nn\n", - "from torch.nn import functional as F\n", + "import torch.nn as nn\n", + "import torch.nn.functional as F\n", "\n", "class Net(nn.Module):\n", " def __init__(self):\n", @@ -69,8 +69,8 @@ "metadata": {}, "outputs": [], "source": [ - "import TorchOpt\n", - "from TorchOpt import MetaSGD\n", + "import torchopt\n", + "from torchopt import MetaSGD\n", "from matplotlib import image as imgplt\n", "from matplotlib import pyplot as plt\n", "\n", @@ -125,7 +125,7 @@ "# inner loss\n", "loss = loss_fn(net(x), y)\n", "print(f\"inner loss: {loss:.4f}\")\n", - "TorchOpt.visual.make_dot(loss).render(\"full_graph\", format=\"png\")\n", + "torchopt.visual.make_dot(loss).render(\"full_graph\", format=\"png\")\n", "plt.figure(figsize=(10,10))\n", "plt.imshow(imgplt.imread('full_graph.png'))" ] @@ -195,12 +195,12 @@ ], "source": [ "# extract state_dict for updated network\n", - "one_step_net_state = TorchOpt.extract_state_dict(net)\n", - "one_step_optim_state = TorchOpt.extract_state_dict(optim)\n", + "one_step_net_state = torchopt.extract_state_dict(net)\n", + "one_step_optim_state = torchopt.extract_state_dict(optim)\n", "# calculate outer loss\n", "outer_loss = loss_fn(net(x), y)\n", "print(f\"outer loss: {outer_loss:.4f}\")\n", - "TorchOpt.visual.make_dot(outer_loss).render(\"full_graph\", format=\"png\")\n", + "torchopt.visual.make_dot(outer_loss).render(\"full_graph\", format=\"png\")\n", "plt.figure(figsize=(10,10))\n", "plt.imshow(imgplt.imread('full_graph.png'))" ] @@ -294,7 +294,7 @@ "loss = inner_loss * meta_parameter\n", "optim.step(loss)\n", "outer_loss = loss_fn(net(x), y)\n", - "TorchOpt.visual.make_dot(outer_loss).render(\"full_graph\", format=\"png\")\n", + "torchopt.visual.make_dot(outer_loss).render(\"full_graph\", format=\"png\")\n", "plt.figure(figsize=(10,10))\n", "plt.imshow(imgplt.imread('full_graph.png'))\n", "meta_optim.zero_grad()\n", @@ -306,7 +306,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "From the graph we can see, directly conducting the second bi-level process links the graph of first and second bi-level process together. We should manually stop gradient with `TorchOpt.stop_gradient`. `TorchOpt.stop_gradient` will detach the node of gradient graph and make it become a leaf node. It allows the input of network, optimizer, or state dictionary and the gradient operation happens in an inplace manner.\n", + "From the graph we can see, directly conducting the second bi-level process links the graph of first and second bi-level process together. We should manually stop gradient with `torchopt.stop_gradient`. `torchopt.stop_gradient` will detach the node of gradient graph and make it become a leaf node. It allows the input of network, optimizer, or state dictionary and the gradient operation happens in an inplace manner.\n", "\n", "Let's use recover_state_dict to come back to one-step updated states." ] @@ -318,8 +318,8 @@ "outputs": [], "source": [ "# Reset to previous one-step updated states\n", - "TorchOpt.recover_state_dict(net, one_step_net_state)\n", - "TorchOpt.recover_state_dict(optim, one_step_optim_state)" + "torchopt.recover_state_dict(net, one_step_net_state)\n", + "torchopt.recover_state_dict(optim, one_step_optim_state)" ] }, { @@ -356,14 +356,14 @@ ], "source": [ "# stop gradient and make them become the leaf node\n", - "TorchOpt.stop_gradient(net)\n", - "TorchOpt.stop_gradient(optim)\n", + "torchopt.stop_gradient(net)\n", + "torchopt.stop_gradient(optim)\n", "\n", "inner_loss = loss_fn(net(x), y)\n", "loss = inner_loss * meta_parameter\n", "optim.step(loss)\n", "outer_loss = loss_fn(net(x), y)\n", - "TorchOpt.visual.make_dot(outer_loss).render(\"full_graph\", format=\"png\")\n", + "torchopt.visual.make_dot(outer_loss).render(\"full_graph\", format=\"png\")\n", "plt.figure(figsize=(10,10))\n", "plt.imshow(imgplt.imread('full_graph.png'))\n", "meta_optim.zero_grad()\n", From 103ff1a808ec7f513aafeb4cb889fb425600a7de Mon Sep 17 00:00:00 2001 From: Xuehai Pan Date: Sun, 10 Jul 2022 16:43:11 +0800 Subject: [PATCH 08/19] chore(setup): rewrite packaging (#24) --- .editorconfig | 11 +- .github/workflows/lint.yml | 98 ++++++--- .github/workflows/release.yml | 111 +++++++--- .github/workflows/test.yml | 31 --- .github/workflows/tests.yml | 60 ++++++ .gitmodules | 4 - CITATION.cff | 2 +- CMakeLists.txt | 49 +++-- MANIFEST.in | 7 + Makefile | 130 +++++++----- README.md | 94 +++++++-- conda-recipe.yaml | 69 ++++++ docker/dev.dockerfile | 2 +- docs/conf.py | 14 +- docs/index.rst | 10 +- examples/L2R/helper/argument.py | 2 +- examples/L2R/result.png | Bin examples/LOLA/README.md | 0 examples/LOLA/helper/agent.py | 2 +- examples/LOLA/helper/argument.py | 2 +- examples/LOLA/helper/env.py | 2 +- examples/LOLA/helper/utils.py | 2 +- examples/LOLA/lola_dice.py | 0 examples/LOLA/result.png | Bin examples/LOLA/visualise.py | 2 +- examples/MAML-RL/README.md | 0 examples/MAML-RL/helpers/__init__.py | 3 +- examples/MAML-RL/helpers/policy.py | 2 +- examples/MAML-RL/run_MAML.py | 1 + examples/few-shot/maml-omniglot.py | 1 + examples/visualize.py | 2 +- image/time.png | Bin image/torchviz_torchopt.jpg | Bin requirements.txt | 4 +- setup.cfg | 8 +- setup.py | 198 +++++++++--------- src/adam_op/adam_op_impl.cpp | 3 +- tests/requirements.txt | 13 +- .../high_level/test_high_level_inplace.py | 2 + .../unit/low_level/test_low_level_inplace.py | 2 + third_party/pybind11 | 1 - torchopt/__init__.py | 24 +-- torchopt/_lib/{adam_op.py => adam_op.pyi} | 0 torchopt/_src/accelerated_op/__init__.py | 5 +- torchopt/_src/base.py | 12 +- torchopt/_src/clip.py | 1 + torchopt/_src/transform.py | 1 + torchopt/_src/typing.py | 3 +- torchopt/_src/visual.py | 12 +- torchopt/version.py | 17 ++ 50 files changed, 665 insertions(+), 354 deletions(-) delete mode 100644 .github/workflows/test.yml create mode 100644 .github/workflows/tests.yml delete mode 100644 .gitmodules mode change 100755 => 100644 CMakeLists.txt create mode 100644 MANIFEST.in create mode 100644 conda-recipe.yaml mode change 100755 => 100644 examples/L2R/result.png mode change 100755 => 100644 examples/LOLA/README.md mode change 100755 => 100644 examples/LOLA/helper/agent.py mode change 100755 => 100644 examples/LOLA/helper/argument.py mode change 100755 => 100644 examples/LOLA/helper/env.py mode change 100755 => 100644 examples/LOLA/helper/utils.py mode change 100755 => 100644 examples/LOLA/lola_dice.py mode change 100755 => 100644 examples/LOLA/result.png mode change 100755 => 100644 examples/LOLA/visualise.py mode change 100755 => 100644 examples/MAML-RL/README.md mode change 100755 => 100644 image/time.png mode change 100755 => 100644 image/torchviz_torchopt.jpg delete mode 160000 third_party/pybind11 rename torchopt/_lib/{adam_op.py => adam_op.pyi} (100%) create mode 100644 torchopt/version.py diff --git a/.editorconfig b/.editorconfig index 1ee2f625..96ef7342 100644 --- a/.editorconfig +++ b/.editorconfig @@ -14,6 +14,9 @@ insert_final_newline = true indent_size = 4 src_paths=torchopt,tests,examples +[*.{yaml,yml}] +indent_size = 2 + [*.md] indent_size = 2 x-soft-wrap-text = true @@ -25,11 +28,5 @@ x-soft-wrap-text = true [Makefile] indent_style = tab -[*.cpp] -indent_size = 2 - -[*.h] -indent_size = 2 - -[*.cuh?] +[*.{cpp,h,cu,cuh}] indent_size = 2 diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index aeb9496c..deb23ab6 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -1,44 +1,78 @@ name: Lint -on: [push, pull_request] +on: + push: + pull_request: + +permissions: + contents: read jobs: lint: runs-on: ubuntu-latest + timeout-minutes: 30 steps: - - name: Cancel previous run - uses: styfle/cancel-workflow-action@0.9.1 - with: - access_token: ${{ github.token }} - - uses: actions/checkout@v2 - - name: Set up Python 3.8 - uses: actions/setup-python@v2 - with: - python-version: 3.8 - - name: Upgrade pip - run: | - python -m pip install --upgrade pip - - name: Install requirements - run: | - python -m pip install -r requirements.txt - - name: Install dependencies - run: | - python -m pip install -e . - - name: flake8 - run: | - make flake8 - - name: isort and yapf - run: | - make py-format - - name: addlicense - run: | - make addlicense - - name: mypy - run: | - make mypy + - name: Cancel previous run + uses: styfle/cancel-workflow-action@0.10.0 + with: + access_token: ${{ github.token }} + + - name: Checkout + uses: actions/checkout@v3 + with: + submodules: "recursive" + fetch-depth: 1 + + - name: Set up Python 3.7 # the lowest version we support + uses: actions/setup-python@v4 + with: + python-version: "3.7" + update-environment: true + + - name: Setup CUDA Toolkit + uses: Jimver/cuda-toolkit@v0.2.7 + id: cuda-toolkit + with: + cuda: "11.3.1" + method: network + sub-packages: '["nvcc"]' + - run: | + echo "Installed CUDA version is: ${{steps.cuda-toolkit.outputs.cuda}}" + echo "CUDA install location: ${{steps.cuda-toolkit.outputs.CUDA_PATH}}" + nvcc -V + + - name: Upgrade pip + run: | + python -m pip install --upgrade pip setuptools + + - name: Install dependencies + run: | + python -m pip install -r tests/requirements.txt + + - name: Install TorchOpt + run: | + python -m pip install -e . + + - name: flake8 + run: | + make flake8 + + - name: isort and yapf + run: | + make py-format + + - name: addlicense + run: | + make addlicense + + - name: mypy + run: | + make mypy + # - name: docstyle # run: | # make docstyle + # - name: spelling # run: | - # make spelling \ No newline at end of file + # make spelling diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 016dd0ef..974b8f49 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -7,28 +7,91 @@ on: jobs: deploy: runs-on: ubuntu-latest + timeout-minutes: 30 steps: - - uses: actions/checkout@v2 - - name: Set up Python - uses: actions/setup-python@v1 - with: - python-version: '3.x' - - name: Check consistency between the package version and release tag - run: | - RELEASE_VER=${GITHUB_REF#refs/*/} - PACKAGE_VER="v`python setup.py --version`" - if [ $RELEASE_VER != $PACKAGE_VER ] - then - echo "package ver. ($PACKAGE_VER) != release ver. ($RELEASE_VER)"; exit 1 - fi - - name: Install dependencies - run: | - python -m pip install --upgrade pip - pip install setuptools wheel twine - - name: Build and publish - env: - TWINE_USERNAME: ${{ secrets.PYPI_USERNAME }} - TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }} - run: | - python setup.py sdist bdist_wheel - twine upload dist/* \ No newline at end of file + - name: Checkout + uses: actions/checkout@v3 + with: + submodules: "recursive" + fetch-depth: 1 + + - name: Set up Python 3.7 + id: py37 + uses: actions/setup-python@v4 + with: + python-version: "3.7" + update-environment: false + + - name: Set up Python 3.8 + id: py38 + uses: actions/setup-python@v4 + with: + python-version: "3.8" + update-environment: false + + - name: Set up Python 3.9 + id: py39 + uses: actions/setup-python@v4 + with: + python-version: "3.9" + update-environment: false + + - name: Set up Python 3.10 + id: py310 + uses: actions/setup-python@v4 + with: + python-version: "3.10" + update-environment: false + + - name: Set up Python executable paths + run: | + DEFAULT_PYTHON="${{ steps.py37.outputs.python-path }}" + echo "DEFAULT_PYTHON='${DEFAULT_PYTHON}'" >> "${GITHUB_ENV}" + + PYTHON_EXECUTABLES="${{ steps.py37.outputs.python-path }}" + PYTHON_EXECUTABLES="${PYTHON_EXECUTABLES}:${{ steps.py38.outputs.python-path }}" + PYTHON_EXECUTABLES="${PYTHON_EXECUTABLES}:${{ steps.py39.outputs.python-path }}" + PYTHON_EXECUTABLES="${PYTHON_EXECUTABLES}:${{ steps.py310.outputs.python-path }}" + echo "PYTHON_EXECUTABLES='${PYTHON_EXECUTABLES}'" >> "${GITHUB_ENV}" + + - name: Check consistency between the package version and release tag + run: | + RELEASE_VER="${GITHUB_REF#refs/*/}" + PACKAGE_VER="v$(python setup.py --version)" + if [[ "${RELEASE_VER}" != "${PACKAGE_VER}" ]]; then + echo "package ver. (${PACKAGE_VER}) != release ver. (${RELEASE_VER})" + exit 1 + fi + + - name: Setup CUDA Toolkit + uses: Jimver/cuda-toolkit@v0.2.7 + id: cuda-toolkit + with: + cuda: "11.3.1" + method: network + sub-packages: '["nvcc"]' + - run: | + echo "Installed CUDA version is: ${{steps.cuda-toolkit.outputs.cuda}}" + echo "CUDA install location: ${{steps.cuda-toolkit.outputs.CUDA_PATH}}" + nvcc -V + + - name: Build sdist and wheels + run: | + while IFS='' read -rd':' PYTHON || [[ -n "${PYTHON}" ]]; do + [[ -z "${PYTHON}" ]] && continue + echo "Building wheel with Python: ${PYTHON} ($("${PYTHON}" --version))" + "${PYTHON}" -m pip install --upgrade pip setuptools wheel build + if [[ "${PYTHON}" == "${DEFAULT_PYTHON}" ]]; then + "${PYTHON}" -m build + else + "${PYTHON}" -m build --wheel + fi + done <<< "${PYTHON_EXECUTABLES}" + + - name: Publish to PyPI + env: + TWINE_USERNAME: ${{ secrets.PYPI_USERNAME }} + TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }} + run: | + "${DEFAULT_PYTHON}" -m pip install --upgrade twine + "${DEFAULT_PYTHON}" -m twine upload dist/* diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml deleted file mode 100644 index 413f9415..00000000 --- a/.github/workflows/test.yml +++ /dev/null @@ -1,31 +0,0 @@ -name: Test - -on: - release: - types: [created] - -jobs: - test: - runs-on: ubuntu-latest - steps: - - name: Cancel previous run - uses: styfle/cancel-workflow-action@0.9.1 - with: - access_token: ${{ github.token }} - - uses: actions/checkout@v2 - - name: Set up Python 3.8 - uses: actions/setup-python@v2 - with: - python-version: 3.8 - - name: Upgrade pip - run: | - python -m pip install --upgrade pip - - name: Install dependencies - run: | - python -m pip install -r tests/requirements.txt - - name: Install dependencies - run: | - python -m pip install -e . - - name: Test with pytest - run: | - make pytest \ No newline at end of file diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml new file mode 100644 index 00000000..ffeeced6 --- /dev/null +++ b/.github/workflows/tests.yml @@ -0,0 +1,60 @@ +name: Tests + +on: + push: + branches: + - main + pull_request: + +permissions: + contents: read + +jobs: + test: + runs-on: ubuntu-latest + timeout-minutes: 30 + steps: + - name: Cancel previous run + uses: styfle/cancel-workflow-action@0.10.0 + with: + access_token: ${{ github.token }} + + - name: Checkout + uses: actions/checkout@v3 + with: + submodules: "recursive" + fetch-depth: 1 + + - name: Set up Python 3.7 # the lowest version we support + uses: actions/setup-python@v4 + with: + python-version: "3.7" + update-environment: true + + - name: Setup CUDA Toolkit + uses: Jimver/cuda-toolkit@v0.2.7 + id: cuda-toolkit + with: + cuda: "11.3.1" + method: network + sub-packages: '["nvcc"]' + - run: | + echo "Installed CUDA version is: ${{steps.cuda-toolkit.outputs.cuda}}" + echo "CUDA install location: ${{steps.cuda-toolkit.outputs.CUDA_PATH}}" + nvcc -V + + - name: Upgrade pip + run: | + python -m pip install --upgrade pip setuptools + + - name: Install dependencies + run: | + python -m pip install -r tests/requirements.txt + + - name: Install TorchOpt + run: | + python -m pip install -e . + + - name: Test with pytest + run: | + make pytest diff --git a/.gitmodules b/.gitmodules deleted file mode 100644 index 111a2bef..00000000 --- a/.gitmodules +++ /dev/null @@ -1,4 +0,0 @@ -[submodule "third_party/pybind11"] - path = third_party/pybind11 - url = https://github.com/pybind/pybind11.git - shallow = true \ No newline at end of file diff --git a/CITATION.cff b/CITATION.cff index fdfacfc4..5c239556 100644 --- a/CITATION.cff +++ b/CITATION.cff @@ -27,4 +27,4 @@ authors: version: 0.4.1 date-released: "2022-04-09" license: Apache-2.0 -repository-code: "https://github.com/metaopt/torchopt" +repository-code: "https://github.com/metaopt/TorchOpt" diff --git a/CMakeLists.txt b/CMakeLists.txt old mode 100755 new mode 100644 index 808d40c5..d97cac96 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -13,25 +13,17 @@ # limitations under the License. # ============================================================================== -cmake_minimum_required(VERSION 3.1) +cmake_minimum_required(VERSION 3.4) project(torchopt LANGUAGES CXX CUDA) -find_package(CUDA REQUIRED) - -# include(FindCUDA/select_compute_arch) -# CUDA_DETECT_INSTALLED_GPUS(INSTALLED_GPU_CCS_1) -# string(STRIP "${INSTALLED_GPU_CCS_1}" INSTALLED_GPU_CCS_2) -# string(REPLACE " " ";" INSTALLED_GPU_CCS_3 "${INSTALLED_GPU_CCS_2}") -# string(REPLACE "." "" CUDA_ARCH_LIST "${INSTALLED_GPU_CCS_3}") -# message("-- nvcc generates code for arch ${CUDA_ARCH_LIST}") -# SET(CMAKE_CUDA_ARCHITECTURES ${CUDA_ARCH_LIST}) -SET(CMAKE_CUDA_ARCHITECTURES 53;60;61;70;75;80;86) - if(NOT CMAKE_BUILD_TYPE) - set(CMAKE_BUILD_TYPE Release) + set(CMAKE_BUILD_TYPE Release) endif() -set(CMAKE_INCLUDE_CURRENT_DIR ON) +find_package(CUDA REQUIRED) +cuda_select_nvcc_arch_flags(CUDA_ARCH_FLAGS All) +list(APPEND CUDA_NVCC_FLAGS ${CUDA_ARCH_FLAGS}) + set(CMAKE_CXX_STANDARD 14) set(CMAKE_CUDA_STANDARD 14) set(CMAKE_CXX_STANDARD_REQUIRED ON) @@ -49,7 +41,7 @@ function(system) "${multiValueArgs}" "${ARGN}") - if (NOT DEFINED SYSTEM_WORKING_DIRECTORY) + if(NOT DEFINED SYSTEM_WORKING_DIRECTORY) set(SYSTEM_WORKING_DIRECTORY "${PROJECT_SOURCE_DIR}") endif() @@ -65,12 +57,13 @@ function(system) endif() set("${SYSTEM_OUTPUT_VARIABLE}" "${STDOUT}" PARENT_SCOPE) - if (DEFINED SYSTEM_ERROR_VARIABLE) + + if(DEFINED SYSTEM_ERROR_VARIABLE) set("${SYSTEM_ERROR_VARIABLE}" "${STDERR}" PARENT_SCOPE) endif() endfunction() -if (NOT DEFINED PYTHON_EXECUTABLE) +if(NOT DEFINED PYTHON_EXECUTABLE) set(PYTHON_EXECUTABLE python3) endif() @@ -102,6 +95,23 @@ else() include_directories(${PYTHON_INCLUDE_DIR}) endif() +set(PYBIND11_PYTHON_VERSION "${PYTHON_VERSION}") + +if(NOT DEFINED PYBIND11_CMAKE_DIR) + message("-- Auto detecting pybind11 CMake directory...") + system( + STRIP OUTPUT_VARIABLE PYBIND11_CMAKE_DIR + COMMAND "${PYTHON_EXECUTABLE}" -m pybind11 --cmakedir + ) +endif() + +if("${PYBIND11_CMAKE_DIR}" STREQUAL "") + message(FATAL_ERROR "-- Pybind11 CMake directory not found") +else() + message("-- Detected Pybind11 CMake directory: \"${PYBIND11_CMAKE_DIR}\"") + find_package(pybind11 CONFIG PATHS "${PYBIND11_CMAKE_DIR}") +endif() + if(NOT DEFINED TORCH_INCLUDE_PATH) message("-- Auto detecting PyTorch include directory...") system( @@ -132,16 +142,15 @@ else() endif() unset(TORCH_LIBRARIES) + foreach(VAR_PATH ${TORCH_LIBRARY_PATH}) file(GLOB TORCH_LIBRARY "${VAR_PATH}/*.so") list(APPEND TORCH_LIBRARIES "${TORCH_LIBRARY}") endforeach() + message("-- Detected Torch libraries: \"${TORCH_LIBRARIES}\"") add_definitions(-D_GLIBCXX_USE_CXX11_ABI=0) -set(PYBIND11_PYTHON_VERSION "${PYTHON_VERSION}") -add_subdirectory("third_party/pybind11") include_directories(include) - add_subdirectory(src) diff --git a/MANIFEST.in b/MANIFEST.in new file mode 100644 index 00000000..08cf6257 --- /dev/null +++ b/MANIFEST.in @@ -0,0 +1,7 @@ +recursive-include torchopt *.pyi +include LICENSE + +# Include source files in sdist +include CMakeLists.txt +recursive-include src * +recursive-include include * diff --git a/Makefile b/Makefile index 6e07d1a1..ebac34fd 100644 --- a/Makefile +++ b/Makefile @@ -1,36 +1,54 @@ print-% : ; @echo $* = $($*) -SHELL = /bin/bash PROJECT_NAME = torchopt -PROJECT_PATH = ${PROJECT_NAME}/ -PROJECT_FOLDER = $(PROJECT_NAME) examples include src tests -PYTHON_FILES = $(shell find examples torchopt tests -type f -name "*.py" -o -name "*.pyi") -CPP_FILES = $(shell find . -type f -name "*.h" -o -name "*.cpp" -o -name "*.cuh" -o -name "*.cu") -COMMIT_HASH = $(shell git log -1 --format=%h) COPYRIGHT = "MetaOPT Team. All Rights Reserved." +PROJECT_PATH = ${PROJECT_NAME} +SHELL = /bin/bash +SOURCE_FOLDERS = $(PROJECT_PATH) examples include src tests +PYTHON_FILES = $(shell find $(SOURCE_FOLDERS) -type f -name "*.py" -o -name "*.pyi") +CXX_FILES = $(shell find $(SOURCE_FOLDERS) -type f -name "*.h" -o -name "*.cpp" -o -name "*.cuh" -o -name "*.cu") +COMMIT_HASH = $(shell git log -1 --format=%h) PATH := $(HOME)/go/bin:$(PATH) +PYTHON ?= $(shell command -v python3 || command -v python) + +.PHONY: default +default: install -# installation +install: + $(PYTHON) -m pip install . -check_install = python3 -c "import $(1)" || (cd && pip3 install $(1) --upgrade && cd -) -check_install_extra = python3 -c "import $(1)" || (cd && pip3 install $(2) --upgrade && cd -) +# Tools Installation +check_pip_install = $(PYTHON) -m pip show $(1) &>/dev/null || (cd && $(PYTHON) -m pip install $(1) --upgrade) +check_pip_install_extra = $(PYTHON) -m pip show $(1) &>/dev/null || (cd && $(PYTHON) -m pip install $(2) --upgrade) flake8-install: - $(call check_install, flake8) - $(call check_install_extra, bugbear, flake8_bugbear) + $(call check_pip_install,flake8) + $(call check_pip_install_extra,bugbear,flake8_bugbear) py-format-install: - $(call check_install, isort) - $(call check_install, yapf) + $(call check_pip_install,isort) + $(call check_pip_install,yapf) mypy-install: - $(call check_install, mypy) + $(call check_pip_install,mypy) + +docs-install: + $(call check_pip_install,pydocstyle) + $(call check_pip_install,doc8) + $(call check_pip_install,sphinx) + $(call check_pip_install,sphinx_rtd_theme) + $(call check_pip_install_extra,sphinxcontrib.spelling,sphinxcontrib.spelling pyenchant) + +pytest-install: + $(call check_pip_install,pytest) + $(call check_pip_install,pytest_cov) + $(call check_pip_install,pytest_xdist) cpplint-install: - $(call check_install, cpplint) + $(call check_pip_install,cpplint) clang-format-install: - command -v clang-format-11 || sudo apt-get install -y clang-format-11 + command -v clang-format || sudo apt-get install -y clang-format clang-tidy-install: command -v clang-tidy || sudo apt-get install -y clang-tidy @@ -42,65 +60,67 @@ go-install: addlicense-install: go-install command -v addlicense || go install github.com/google/addlicense@latest -doc-install: - $(call check_install, pydocstyle) - $(call check_install, doc8) - $(call check_install, sphinx) - $(call check_install, sphinx_rtd_theme) - $(call check_install_extra, sphinxcontrib.spelling, sphinxcontrib.spelling pyenchant) - -pytest-install: - $(call check_install, pytest) - $(call check_install, pytest_cov) - $(call check_install, pytest_xdist) - - -# test +# Tests pytest: pytest-install - pytest tests --cov ${PROJECT_PATH} --durations 0 -v --cov-report term-missing --color=yes + cd tests && $(PYTHON) -m pytest unit --cov ${PROJECT_PATH} --durations 0 -v --cov-report term-missing --color=yes + +test: pytest -# python linter +# Python linters flake8: flake8-install - flake8 $(PYTHON_FILES) --count --select=E9,F63,F7,F82,E225,E251 --show-source --statistics + $(PYTHON) -m flake8 $(PYTHON_FILES) --count --select=E9,F63,F7,F82,E225,E251 --show-source --statistics py-format: py-format-install - isort --project torchopt --check $(PYTHON_FILES) && \ - yapf --in-place --recursive $(PYTHON_FILES) + $(PYTHON) -m isort --project torchopt --check $(PYTHON_FILES) && \ + $(PYTHON) -m yapf --in-place --recursive $(PYTHON_FILES) mypy: mypy-install - mypy $(PROJECT_NAME) + $(PYTHON) -m mypy $(PROJECT_NAME) -# c++ linter +# C++ linters cpplint: cpplint-install - cpplint $(CPP_FILES) + $(PYTHON) -m cpplint $(CXX_FILES) clang-format: clang-format-install - clang-format-11 --style=file -i $(CPP_FILES) -n --Werror + clang-format --style=file -i $(CXX_FILES) -n --Werror -# documentation +# Documentation addlicense: addlicense-install - addlicense -c $(COPYRIGHT) -l apache -y 2022 -check $(PROJECT_FOLDER) + addlicense -c $(COPYRIGHT) -l apache -y 2022 -check $(SOURCE_FOLDERS) + +docstyle: docs-install + $(PYTHON) -m pydocstyle $(PROJECT_NAME) && doc8 docs && make -C docs html SPHINXOPTS="-W" + +docs: docs-install + make -C docs html && cd _build/html && $(PYTHON) -m http.server + +spelling: docs-install + make -C docs spelling SPHINXOPTS="-W" + +clean-docs: + make -C docs clean -docstyle: doc-install - pydocstyle $(PROJECT_NAME) && doc8 docs && cd docs && make html SPHINXOPTS="-W" +# Utility functions -doc: doc-install - cd docs && make html && cd _build/html && python3 -m http.server +lint: flake8 py-format mypy clang-format cpplint addlicense -spelling: doc-install - cd docs && make spelling SPHINXOPTS="-W" +format: py-format-install clang-format-install addlicense-install + $(PYTHON) -m isort --project torchopt $(PYTHON_FILES) + $(PYTHON) -m yapf --in-place --recursive $(PYTHON_FILES) + clang-format -style=file -i $(CXX_FILES) + addlicense -c $(COPYRIGHT) -l apache -y 2022 $(SOURCE_FOLDERS) -doc-clean: - cd docs && make clean +clean-py: + find . -type f -name '*.py[co]' -delete + find . -depth -type d -name ".mypy_cache" -exec rm -r "{}" + + find . -depth -type d -name ".pytest_cache" -exec rm -r "{}" + -lint: flake8 py-format clang-format cpplint mypy +clean-build: + rm -rf build/ dist/ + rm -rf *.egg-info .eggs -format: py-format-install clang-format-install - isort $(PYTHON_FILES) - yapf -ir $(PYTHON_FILES) - clang-format-11 -style=file -i $(CPP_FILES) - addlicense -c $(COPYRIGHT) -l apache -y 2022 $(PROJECT_FOLDER) +clean: clean-py clean-build clean-docs diff --git a/README.md b/README.md index 24f53664..dab7ff49 100644 --- a/README.md +++ b/README.md @@ -1,14 +1,19 @@ + +
- +
**TorchOpt** is a high-performance optimizer library built upon [PyTorch](https://pytorch.org/) for easy implementation of functional optimization and gradient-based meta-learning. It consists of two main features: + - TorchOpt provides functional optimizer which enables [JAX-like](https://github.com/google/jax) composable functional optimizer for PyTorch. With TorchOpt, one can easily conduct neural network optimization in PyTorch with functional style optimizer, similar to [Optax](https://github.com/deepmind/optax) in JAX. -- With the desgin of functional programing, TorchOpt provides efficient, flexible, and easy-to-implement differentiable optimizer for gradient-based meta-learning research. It largely reduces the efforts required to implement sophisticated meta-learning algorithms. +- With the design of functional programing, TorchOpt provides efficient, flexible, and easy-to-implement differentiable optimizer for gradient-based meta-learning research. It largely reduces the efforts required to implement sophisticated meta-learning algorithms. -------------------------------------------------------------------------------- + The README is organized as follows: + - [TorchOpt as Functional Optimizer](#torchopt-as-functional-optimizer) - [Optax-Like API](#optax-like-api) - [PyTorch-Like API](#pytorch-like-api) @@ -23,11 +28,16 @@ The README is organized as follows: - [The Team](#the-team) - [Citing TorchOpt](#citing-torchopt) +-------------------------------------------------------------------------------- ## TorchOpt as Functional Optimizer -The desgin of TorchOpt follows the philosophy of functional programming. Aligned with [functorch](https://github.com/pytorch/functorch), users can conduct functional style programing with models, optimizers and training in PyTorch. We use the Adam optimizer as an example in the following illustration. You can also check out the tutorial notebook [Functional Optimizer](./tutorials/1_Functional_Optimizer.ipynb) for more details. + +The design of TorchOpt follows the philosophy of functional programming. Aligned with [`functorch`](https://github.com/pytorch/functorch), users can conduct functional style programing with models, optimizers and training in PyTorch. We use the Adam optimizer as an example in the following illustration. You can also check out the tutorial notebook [Functional Optimizer](./tutorials/1_Functional_Optimizer.ipynb) for more details. + ### Optax-Like API -For those users who prefer fully functional programing, we offer Optax-Like API by passing gradients and optimizers states to the optimizer function. We design base class `torchopt.Optimizer` that has the same interface as `torch.optim.Optimizer`. Here is an example coupled with functorch: + +For those users who prefer fully functional programing, we offer Optax-Like API by passing gradients and optimizers states to the optimizer function. We design base class `torchopt.Optimizer` that has the same interface as `torch.optim.Optimizer`. Here is an example coupled with `functorch`: + ```python import functorch import torch @@ -52,9 +62,12 @@ grad = torch.autograd.grad(loss, params) # compute gradients updates, opt_state = optimizer.update(grad, opt_state) # get updates params = torchopt.apply_updates(params, updates) # update network parameters ``` + ### PyTorch-Like API + We also offer origin PyTorch APIs (e.g. `zero_grad()` or `step()`) by warpping our Optax-Like API for traditional PyTorch user: + ```python net = Net() # init loader = Loader() @@ -66,21 +79,29 @@ optimizer.zero_grad() # zero gradients loss.backward() # backward optimizer.step() # step updates ``` + ### Differentiable + On top of the same optimization function as `torch.optim`, an important benefit of functional optimizer is that one can implement differentiable optimization easily. This is particularly helpful when the algorithm requires to differentiate through optimization update (such as meta learning practices). We take as the inputs the gradients and optimizer states, use non-in-place operators to compute and output the updates. The processes can be automatically implemented, with the only need from users being to pass the argument `inplace=False` to the functions: + ```python # get updates updates, opt_state = optimizer.update(grad, opt_state, inplace=False) # update network parameters params = torchopt.apply_updates(params, updates, inplace=False) ``` + +-------------------------------------------------------------------------------- + ## TorchOpt as Differentiable Optimizer for Meta-Learning + Meta-Learning has gained enormous attention in both Supervised Learning and Reinforcement Learning. Meta-Learning algorithms often contain a bi-level optimisation process with *inner loop* updating the network parameters and *outer loop* updating meta parameters. The figure below illustrates the basic formulation for meta-optimization in Meta-Learning. The main feature is that the gradients of *outer loss* will back-propagate through all `inner.step` operations. +
- +
-Since network parameters become a node of computation graph, a flexible Meta-Learning library should enable users manually control the gradient graph connection which means that users should have access to the network parameters and optimizer states for manually detaching or connecting the computation graph. In PyTorch designing, the network parameters or optimizer states are members of network (a.k.a. `nn.Module`) or optimizer (a.k.a. `optim.Optimizer`), this design significantly introducing difficulty for user control network parameters or optimizer states. Previous differentiable optimizer Repo [higher](https://github.com/facebookresearch/higher), [learn2learn](https://github.com/learnables/learn2learn) follows the PyTorch designing which leads to inflexible API. +Since network parameters become a node of computation graph, a flexible Meta-Learning library should enable users manually control the gradient graph connection which means that users should have access to the network parameters and optimizer states for manually detaching or connecting the computation graph. In PyTorch designing, the network parameters or optimizer states are members of network (a.k.a. `nn.Module`) or optimizer (a.k.a. `optim.Optimizer`), this design significantly introducing difficulty for user control network parameters or optimizer states. Previous differentiable optimizer Repo [`higher`](https://github.com/facebookresearch/higher), [`learn2learn`](https://github.com/learnables/learn2learn) follows the PyTorch designing which leads to inflexible API. In contrast to them, TorchOpt realizes differentiable optimizer with functional programing, where Meta-Learning researchers could control the network parameters or optimizer states as normal variables (a.k.a. `torch.Tensor`). This functional optimizer design of TorchOpt is beneficial for implementing complex gradient flow Meta-Learning algorithms and allow us to improve computational efficiency by using techniques like operator fusion. @@ -88,9 +109,10 @@ In contrast to them, TorchOpt realizes differentiable optimizer with functional Since network parameters become a node of computation graph, a flexible meta-learning library should enable users manually control the gradient graph connection which means that users should have access to the network parameters and optimizer states for manually detaching or connecting the computation graph. In the PyTorch design, the network parameters or optimizer states are members of network (a.k.a. `nn.Module`) or optimizer (a.k.a. `optim.Optimizer`), this design incurs difficulties for user to control network parameters or optimizer states. -We hope meta-learning researchers could control the network parameters or optimizer states as normal variables (a.k.a. `torch.Tensor`). Inspired by [Optax](https://github.com/deepmind/optax), we think designing a functional style optimizer that treat network parameters or optimizer states as variables instead of class members, which mathces our demond of making network parameters or optimizer states. This design would be beneficial for implementing complex gradient flow meta-learning algorithms and allow us to dig potential performance by using techniques like operator fusion. --> +We hope meta-learning researchers could control the network parameters or optimizer states as normal variables (a.k.a. `torch.Tensor`). Inspired by [Optax](https://github.com/deepmind/optax), we think designing a functional style optimizer that treat network parameters or optimizer states as variables instead of class members, which matches our demand of making network parameters or optimizer states. This design would be beneficial for implementing complex gradient flow meta-learning algorithms and allow us to dig potential performance by using techniques like operator fusion. --> ### Meta-Learning API + - We design a base class `torchopt.MetaOptimizer` for managing network updates in Meta-Learning. The constructor of `MetaOptimizer` takes as input the network rather than network parameters. `MetaOptimizer` exposed interface `step(loss)` takes as input the loss for step the network parameter. Refer to the tutorial notebook [Meta Optimizer](./tutorials/2_Meta_Optimizer.ipynb) for more details. - We offer `torchopt.chain` which can apply a list of chainable update transformations. Combined with `MetaOptimizer`, it can help you conduct gradient transformation such as gradient clip before the Meta optimizer steps. Refer to the tutorial notebook [Meta Optimizer](./tutorials/2_Meta_Optimizer.ipynb) for more details. @@ -100,7 +122,8 @@ We hope meta-learning researchers could control the network parameters or optimi We give an example of [MAML](https://arxiv.org/abs/1703.03400) with inner-loop Adam optimizer to illustrate TorchOpt APIs: ```python -net = Net() # init +net = Net() # init + # the constructor `MetaOptimizer` takes as input the network inner_optim = torchopt.MetaAdam(net) outer_optim = torchopt.Adam(net.parameters()) @@ -137,66 +160,93 @@ for train_iter in range(train_iters): torchopt.stop_gradient(net) torchopt.stop_gradient(inner_optim) ``` + +-------------------------------------------------------------------------------- + ## Examples -In *examples/*, we offer serveral examples of functional optimizer and 5 light-weight meta-learning examples with TorchOpt. The meta-learning examples covers 2 Supervised Learning and 3 Reinforcement Learning algorithms. + +In [`examples`](examples), we offer several examples of functional optimizer and 5 light-weight meta-learning examples with TorchOpt. The meta-learning examples covers 2 Supervised Learning and 3 Reinforcement Learning algorithms. + - [Model Agnostic Meta Learning (MAML)-Supervised Learning](https://arxiv.org/abs/1703.03400) (ICML2017) - [Learning to Reweight Examples for Robust Deep Learning](https://arxiv.org/pdf/1803.09050.pdf) (ICML2018) - [Model Agnostic Meta Learning (MAML)-Reinforcement Learning](https://arxiv.org/abs/1703.03400) (ICML2017) - [Meta Gradient Reinforcement Learning (MGRL)](https://proceedings.neurips.cc/paper/2018/file/2715518c875999308842e3455eda2fe3-Paper.pdf) (NeurIPS 2018) - [Learning through opponent learning process (LOLA)](https://arxiv.org/abs/1709.04326) (AAMAS 2018) +-------------------------------------------------------------------------------- + ## High-Performance + One can think of the scale procedures on gradients of optimizer algorithms as a combination of several operations. For example, the implementation of the Adam algorithm often includes addition, multiplication, power and square operations, one can fuse these operations into several compound functions. The operator fusion could greatly simplify the computation graph and reduce the GPU function launching stall. In addition, one can also implement the optimizer backward function and manually reuse some intermediate tensors to improve the backward performance. Users can pass argument `use_accelerated_op=True` to `adam`, `Adam` and `MetaAdam` to enable the fused accelerated operator. The arguments are the same between the two kinds of implementations. -Here we evaluate the performance using the maml-omniglot code with the inner-loop Adam optimizer on GPU. We comparble the run time of the overall algorithm and the meta-optimization (outer-loop optimization) under different network architecture/inner-step numbers. We choose [higher](https://github.com/facebookresearch/higher) as our baseline. The figure below illustrate that our accelerated Adam can achieve at least 1/3 efficiency improvement over the baseline. +Here we evaluate the performance using the maml-omniglot code with the inner-loop Adam optimizer on GPU. We comparable the run time of the overall algorithm and the meta-optimization (outer-loop optimization) under different network architecture/inner-step numbers. We choose [`higher`](https://github.com/facebookresearch/higher) as our baseline. The figure below illustrate that our accelerated Adam can achieve at least 1/3 efficiency improvement over the baseline. +
- +
Notably, the operator fusion not only increases performance but also help simplify the computation graph, which will be discussed in the next section. +-------------------------------------------------------------------------------- + ## Visualization -Complex gradient flow in meta-learning brings in a great challenge for managing the gradient flow and verifying the correctness of it. TorchOpt provides a visualization tool that draw variable (e.g. network parameters or meta parameters) names on the gradient graph for better analyzing. The visualization tool is modified from [torchviz](https://github.com/szagoruyko/pytorchviz). We provide an example using the [visualization code](./examples/visualize.py). Also refer to the notebook [Visualization](./tutorials/3_Visualization.ipynb) for more details. -The figure below show the visulization result. Compared with torchviz, TorchOpt fuses the operations within the Adam together (orange) to reduce the complexity and provide simpler visualization. +Complex gradient flow in meta-learning brings in a great challenge for managing the gradient flow and verifying the correctness of it. TorchOpt provides a visualization tool that draw variable (e.g. network parameters or meta parameters) names on the gradient graph for better analyzing. The visualization tool is modified from [`torchviz`](https://github.com/szagoruyko/pytorchviz). We provide an example using the [visualization code](./examples/visualize.py). Also refer to the notebook [Visualization](./tutorials/3_Visualization.ipynb) for more details. + +The figure below show the visualization result. Compared with [`torchviz`](https://github.com/szagoruyko/pytorchviz), TorchOpt fuses the operations within the Adam together (orange) to reduce the complexity and provide simpler visualization.
- +
+-------------------------------------------------------------------------------- + ## Installation + Requirements - - (Optional) For visualizing computation graphs - - [Graphviz](https://graphviz.org/download/) (for Linux users use `apt/yum install graphviz` or `conda install -c anaconda python-graphviz`) + +- PyTorch +- JAX +- (Optional) For visualizing computation graphs + - [Graphviz](https://graphviz.org/download/) (for Linux users use `apt/yum install graphviz` or `conda install -c anaconda python-graphviz`) + ```bash -pip install torchopt +pip3 install torchopt ``` You can also build shared libraries from source, use: + ```bash -git clone git@github.com:metaopt/torchopt.git -cd torchopt -python setup.py build_from_source +git clone https://github.com/metaopt/TorchOpt.git +cd TorchOpt +pip3 install . ``` + +-------------------------------------------------------------------------------- + ## Future Plan + - [ ] Support general implicit differentiation with functional programing. - [ ] Support more optimizers such as AdamW, RMSPROP - [ ] CPU-acclerated optimizer +-------------------------------------------------------------------------------- + ## The Team + TorchOpt is a work by Jie Ren, Xidong Feng, [Bo Liu](https://github.com/Benjamin-eecs/), [Luo Mai](https://luomai.github.io/) and [Yaodong Yang](https://www.yangyaodong.com/). ## Citing TorchOpt If you find TorchOpt useful, please cite it in your publications. -``` +```bibtex @software{TorchOpt, author = {Jie Ren and Xidong Feng and Bo Liu and Luo Mai and Yaodong Yang}, title = {TorchOpt}, year = {2022}, publisher = {GitHub}, journal = {GitHub repository}, - howpublished = {\url{https://github.com/metaopt/torchopt}}, + howpublished = {\url{https://github.com/metaopt/TorchOpt}}, } ``` diff --git a/conda-recipe.yaml b/conda-recipe.yaml new file mode 100644 index 00000000..ba90aa56 --- /dev/null +++ b/conda-recipe.yaml @@ -0,0 +1,69 @@ +# Create virtual environment with command: +# +# conda env create --file conda-recipe.yaml +# + +name: torchopt + +channels: + - pytorch + - defaults + - nvidia + - conda-forge + +dependencies: + - python = 3.8 + + # Learning + - pytorch::pytorch = 1.11 + - pytorch::torchvision + - jax + - jaxlib + - tensorboard + - wandb + - pip: + - functorch + + # Device select + - nvidia::cudatoolkit = 11.3.1 + - cudnn + + # Build toolkit + - cmake >= 3.4 + - make + - cxx-compiler + - gxx >= 6.0, < 12.0 + - nvidia/label/cuda-11.3.1::cuda-minimal-build + - pybind11 + + # Misc + - pip + - typing-extensions + - numpy + - matplotlib-base + - seaborn + - python-graphviz + - pillow + + # Documentation + - sphinx + - sphinxcontrib-spelling + - sphinx-autobuild + - sphinx-copybutton + - sphinx_rtd_theme + + # Testing + - pytest + - pytest-cov + - pytest-xdist + - isort + - yapf + - mypy + - flake8 + - flake8-bugbear + - doc8 + - pydocstyle + - pyenchant + - clang-format + - clang-tools # clang-tidy + - cpplint diff --git a/docker/dev.dockerfile b/docker/dev.dockerfile index 01c00a0e..f8d26b24 100644 --- a/docker/dev.dockerfile +++ b/docker/dev.dockerfile @@ -3,7 +3,7 @@ CPU_PARENT=ubuntu:18.04 GPU_PARENT=nvidia/cuda:10.1-cudnn7-runtime-ubuntu18.04 -TAG=metaopt/torchopt +TAG=metaopt/TorchOpt VERSION=$(shell git log -1 --format=%h) if [[ ${USE_GPU} == "True" ]]; then diff --git a/docs/conf.py b/docs/conf.py index 4b42352a..5b69ee0e 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -14,16 +14,20 @@ # import sys # sys.path.insert(0, os.path.abspath('.')) -import os +import pathlib +import sys import sphinx_rtd_theme +HERE = pathlib.Path(__file__).absolute().parent +PROJECT_ROOT = HERE.parent + + def get_version() -> str: - # https://packaging.python.org/guides/single-sourcing-package-version/ - with open(os.path.join("..", "torchopt", "__init__.py"), "r") as f: - init = f.read().split() - return init[init.index("__version__") + 2][1:-1] + sys.path.insert(0, str(PROJECT_ROOT / 'torchopt')) + import version # noqa + return version.__version__ # -- Project information ----------------------------------------------------- diff --git a/docs/index.rst b/docs/index.rst index 90bf6a38..e4ffd624 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -1,4 +1,4 @@ -:github_url: https://github.com/metaopt/torchopt/tree/main/docs +:github_url: https://github.com/metaopt/TorchOpt/tree/main/docs TorchOpt -------- @@ -24,9 +24,9 @@ You can also build shared libraries from source, use: .. code-block:: bash - git clone git@github.com:metaopt/torchopt.git - cd torchopt - python setup.py build_from_source + git clone https://github.com/metaopt/TorchOpt.git + cd TorchOpt + pip3 install . The Team -------- @@ -37,7 +37,7 @@ Support ------- If you are having issues, please let us know by filing an issue on our -`issue tracker `_. +`issue tracker `_. License ------- diff --git a/examples/L2R/helper/argument.py b/examples/L2R/helper/argument.py index 1440f27a..34bd8502 100644 --- a/examples/L2R/helper/argument.py +++ b/examples/L2R/helper/argument.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/examples/L2R/result.png b/examples/L2R/result.png old mode 100755 new mode 100644 diff --git a/examples/LOLA/README.md b/examples/LOLA/README.md old mode 100755 new mode 100644 diff --git a/examples/LOLA/helper/agent.py b/examples/LOLA/helper/agent.py old mode 100755 new mode 100644 index 969a04f7..58fdae7c --- a/examples/LOLA/helper/agent.py +++ b/examples/LOLA/helper/agent.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/examples/LOLA/helper/argument.py b/examples/LOLA/helper/argument.py old mode 100755 new mode 100644 index b8e67cc5..39618134 --- a/examples/LOLA/helper/argument.py +++ b/examples/LOLA/helper/argument.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/examples/LOLA/helper/env.py b/examples/LOLA/helper/env.py old mode 100755 new mode 100644 index df4522f6..1367d845 --- a/examples/LOLA/helper/env.py +++ b/examples/LOLA/helper/env.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/examples/LOLA/helper/utils.py b/examples/LOLA/helper/utils.py old mode 100755 new mode 100644 index 6b487a40..8cdd3396 --- a/examples/LOLA/helper/utils.py +++ b/examples/LOLA/helper/utils.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/examples/LOLA/lola_dice.py b/examples/LOLA/lola_dice.py old mode 100755 new mode 100644 diff --git a/examples/LOLA/result.png b/examples/LOLA/result.png old mode 100755 new mode 100644 diff --git a/examples/LOLA/visualise.py b/examples/LOLA/visualise.py old mode 100755 new mode 100644 index 2640f6a7..26b53f1e --- a/examples/LOLA/visualise.py +++ b/examples/LOLA/visualise.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/examples/MAML-RL/README.md b/examples/MAML-RL/README.md old mode 100755 new mode 100644 diff --git a/examples/MAML-RL/helpers/__init__.py b/examples/MAML-RL/helpers/__init__.py index e8761adc..d7b9a7f0 100644 --- a/examples/MAML-RL/helpers/__init__.py +++ b/examples/MAML-RL/helpers/__init__.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -18,6 +18,7 @@ from gym.envs.registration import register + register( 'TabularMDP-v0', entry_point='helpers.Tabular_mdp:TabularMDPEnv', diff --git a/examples/MAML-RL/helpers/policy.py b/examples/MAML-RL/helpers/policy.py index 66ab1fa3..9b32b8c8 100644 --- a/examples/MAML-RL/helpers/policy.py +++ b/examples/MAML-RL/helpers/policy.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/examples/MAML-RL/run_MAML.py b/examples/MAML-RL/run_MAML.py index 252f25e0..3e7571d2 100644 --- a/examples/MAML-RL/run_MAML.py +++ b/examples/MAML-RL/run_MAML.py @@ -25,6 +25,7 @@ from .helpers.policy import CategoricalMLPPolicy + TASK_NUM = 40 TRAJ_NUM = 20 TRAJ_LEN = 10 diff --git a/examples/few-shot/maml-omniglot.py b/examples/few-shot/maml-omniglot.py index 856f8f01..f651f127 100644 --- a/examples/few-shot/maml-omniglot.py +++ b/examples/few-shot/maml-omniglot.py @@ -55,6 +55,7 @@ from .support.omniglot_loaders import OmniglotNShot + mpl.use('Agg') plt.style.use('bmh') diff --git a/examples/visualize.py b/examples/visualize.py index 028669e9..7360dc3b 100644 --- a/examples/visualize.py +++ b/examples/visualize.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/image/time.png b/image/time.png old mode 100755 new mode 100644 diff --git a/image/torchviz_torchopt.jpg b/image/torchviz_torchopt.jpg old mode 100755 new mode 100644 diff --git a/requirements.txt b/requirements.txt index cdff8c3e..41bb1b8a 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,3 +1,5 @@ +torch==1.11 jax[cpu] +numpy graphviz -torch \ No newline at end of file +typing-extensions diff --git a/setup.cfg b/setup.cfg index f43fc9bc..4f82d935 100644 --- a/setup.cfg +++ b/setup.cfg @@ -2,9 +2,10 @@ based_on_style = yapf indent_width = 4 continuation_indent_width = 4 +column_limit = 100 spaces_before_comment = 2 dedent_closing_brackets = true -column_limit = 100 +blank_lines_between_top_level_imports_and_variables = 2 [flake8] exclude = @@ -16,9 +17,10 @@ convention = google [isort] profile = black -multi_line_output = 3 indent = 4 -line_length = 79 +line_length = 100 +lines_after_imports = 2 +multi_line_output = 3 [mypy] allow_redefinition = True diff --git a/setup.py b/setup.py index 1c4df4a0..cbc52b43 100644 --- a/setup.py +++ b/setup.py @@ -1,110 +1,116 @@ import os import pathlib +import shutil import sys from setuptools import find_packages, setup -from setuptools.command.build_ext import build_ext -from torch.utils import cpp_extension - - -class MyBuild(build_ext): - def run(self): - self.build_cmake() - - def copy(self, build_temp): - from distutils.file_util import copy_file - cwd = str(pathlib.Path().absolute()) - src = os.path.join('.', build_temp, 'src') - ops = os.listdir(src) - for op in ops: - op_path = os.path.join(src, op) - if not os.path.isdir(op_path): + + +try: + from pybind11.setup_helpers import Pybind11Extension as Extension + from pybind11.setup_helpers import build_ext +except ImportError: + from setuptools import Extension + from setuptools.command.build_ext import build_ext + +HERE = pathlib.Path(__file__).absolute().parent + +sys.path.insert(0, str(HERE / 'torchopt')) +import version # noqa + + +class CMakeExtension(Extension): + def __init__(self, name, source_dir='.', **kwargs): + super().__init__(name, sources=[], **kwargs) + self.source_dir = os.path.abspath(source_dir) + + +class cmake_build_ext(build_ext): + def copy(self, extdir): + for op_path in pathlib.Path(extdir).iterdir(): + if not op_path.is_dir(): continue - files = os.listdir(op_path) - for file in files: - if file.split('.')[-1] == 'so': - copy_file(os.path.join(op_path, file), - os.path.join(cwd, 'torchopt', '_lib')) - - def build_cmake(self): - cwd = pathlib.Path().absolute() - - build_temp = str(pathlib.Path(self.build_temp)) - os.makedirs(build_temp, exist_ok=True) - - config = "Debug" if self.debug else "Release" - - PYTHON_INCLUDE_DIR = ";".join(self.include_dirs) - TORCH_INCLUDE_PATH = ";".join(cpp_extension.include_paths()) - TORCH_LIBRARY_PATH = ";".join(cpp_extension.library_paths()) - - cmake_args = [ - f"-DCMAKE_BUILD_TYPE={config}", - f"-DPYTHON_EXECUTABLE={sys.executable}", - f"-DPYTHON_INCLUDE_DIR={PYTHON_INCLUDE_DIR}", - f"-DTORCH_INCLUDE_PATH={TORCH_INCLUDE_PATH}", - f"-DTORCH_LIBRARY_PATH={TORCH_LIBRARY_PATH}", - ] - - build_args = ["--config", config, "--", "-j4"] - - os.chdir(build_temp) - self.spawn(["cmake", f"{str(cwd)}"] + cmake_args) - if not self.dry_run: - self.spawn(["cmake", "--build", "."] + build_args) - os.chdir(str(cwd)) - self.copy(build_temp) - - -class download_shared(): - def __init__(self): - import urllib - dir_path = os.path.dirname(os.path.realpath(__file__)) - print(f"setup.py at {dir_path}") - print("downloading shared libraries") - op_urls = [] - if sys.version_info >= (3, 8) and sys.version_info < (3, 9): - op_urls.append( - "https://torchopt.oss-cn-beijing.aliyuncs.com/torch1_11/adam_op.cpython-38-x86_64-linux-gnu.so" - ) - elif sys.version_info >= (3, 9) and sys.version_info < (3, 10): - op_urls.append( - "https://torchopt.oss-cn-beijing.aliyuncs.com/torch1_11/adam_op.cpython-39-x86_64-linux-gnu.so" - ) - - if len(op_urls) == 0: - import warnings - warnings.warn("no pre-compiled libraries for you python version") - return - - for url in op_urls: - data = urllib.request.urlopen(url) - filename = url.rpartition('/')[-1] - file_path = os.path.join(dir_path, 'torchopt', '_lib', filename) - with open(file_path, 'wb') as f: - f.write(data.read()) - print("shared libraries downloaded") - - -if 'build_from_source' not in sys.argv: - download_shared() + for file in op_path.iterdir(): + if str(file).rpartition('.')[-1] == 'so': + shutil.copy(file, HERE / 'torchopt' / '_lib') + + def build_extensions(self): + import pybind11 + from torch.utils import cpp_extension + + cmake = shutil.which('cmake') + if cmake is None: + raise RuntimeError('Cannot find CMake executable.') + + build_temp = pathlib.Path(self.build_temp) + build_temp.mkdir(parents=True, exist_ok=True) + + config = 'Debug' if self.debug else 'Release' + + for ext in self.extensions: + extdir = os.path.abspath(os.path.dirname(self.get_ext_fullpath(ext.name))) + print(self.get_ext_fullpath(ext.name)) + + PYTHON_INCLUDE_DIR = ';'.join(self.include_dirs) + TORCH_INCLUDE_PATH = ';'.join(cpp_extension.include_paths()) + TORCH_LIBRARY_PATH = ';'.join(cpp_extension.library_paths()) + + cmake_args = [ + f'-DCMAKE_BUILD_TYPE={config}', + f'-DCMAKE_LIBRARY_OUTPUT_DIRECTORY_{config.upper()}={extdir}', + f'-DCMAKE_ARCHIVE_OUTPUT_DIRECTORY_{config.upper()}={self.build_temp}', + f'-DPYTHON_EXECUTABLE={sys.executable}', + f'-DPYBIND11_CMAKE_DIR={pybind11.get_cmake_dir()}', + f'-DPYTHON_INCLUDE_DIR={PYTHON_INCLUDE_DIR}', + f'-DTORCH_INCLUDE_PATH={TORCH_INCLUDE_PATH}', + f'-DTORCH_LIBRARY_PATH={TORCH_LIBRARY_PATH}', + ] + + build_args = ['--config', config] + + if ( + 'CMAKE_BUILD_PARALLEL_LEVEL' not in os.environ + and hasattr(self, 'parallel') and self.parallel + ): + build_args.append(f'-j{self.parallel}') + + try: + os.chdir(build_temp) + self.spawn(['cmake', ext.source_dir] + cmake_args) + if not self.dry_run: + self.spawn(['cmake', '--build', '.'] + build_args) + self.copy(extdir) + finally: + os.chdir(HERE) + setup( - name="torchopt", - version="0.4.1", - author="TorchOpt Contributors", - author_email="jieren9806@gmail.com, xidong.feng.20@ucl.ac.uk, benjaminliu.eecs@gmail.com", - description="A Jax-style optimizer.", - license="Apache License Version 2.0", - keywords="meta learning", - url="https://github.com/metaopt/torchopt", - packages=find_packages(), - package_data={"": ["_lib/*.so"]}, + name='torchopt', + version=version.__version__, + author='TorchOpt Contributors', + author_email='jieren9806@gmail.com, xidong.feng.20@ucl.ac.uk, benjaminliu.eecs@gmail.com', + description='A Jax-style optimizer for PyTorch.', + license='Apache License Version 2.0', + keywords='Meta-Learning, PyTorch, Optimizer', + url='https://github.com/metaopt/TorchOpt', + packages=find_packages(include=['torchopt', 'torchopt.*']), + package_data={'sharedlib': ['_lib/*.so']}, include_package_data=True, - cmdclass={'build_from_source': MyBuild}, + cmdclass={'build_ext': cmake_build_ext}, + ext_modules=[ + CMakeExtension('torchopt._lib.adam_op', source_dir=HERE) + ], + setup_requires=[ # for `torch.utils.cpp_extension` + 'torch==1.11', + 'numpy', + 'pybind11', + ], install_requires=[ - 'jax[cpu]', 'torch==1.11', + 'jax[cpu]', + 'numpy', 'graphviz', + 'typing-extensions', ], + python_requires='>=3.7' ) diff --git a/src/adam_op/adam_op_impl.cpp b/src/adam_op/adam_op_impl.cpp index 71807d09..ba3e4c7a 100644 --- a/src/adam_op/adam_op_impl.cpp +++ b/src/adam_op/adam_op_impl.cpp @@ -13,12 +13,13 @@ // limitations under the License. // ============================================================================== +#include "adam_op/adam_op_impl.h" + #include #include #include -#include "adam_op/adam_op_impl.h" #include "utils.h" namespace torchopt { diff --git a/tests/requirements.txt b/tests/requirements.txt index cdff8c3e..17d36433 100644 --- a/tests/requirements.txt +++ b/tests/requirements.txt @@ -1,3 +1,10 @@ -jax[cpu] -graphviz -torch \ No newline at end of file +--extra-index-url https://download.pytorch.org/whl/cu113 +torch==1.11 +torchvision +functorch + +--requirement ../requirements.txt + +pytest +pytest_cov +pytest_xdist diff --git a/tests/unit/high_level/test_high_level_inplace.py b/tests/unit/high_level/test_high_level_inplace.py index 04544ecf..69a7ff18 100644 --- a/tests/unit/high_level/test_high_level_inplace.py +++ b/tests/unit/high_level/test_high_level_inplace.py @@ -16,6 +16,7 @@ import copy import unittest +import pytest import torch import torch.nn.functional as F from torch.utils import data @@ -123,6 +124,7 @@ def test_accelerated_adam_cpu(self) -> None: mse = F.mse_loss(b, b_ref) self.assertAlmostEqual(float(mse), 0) + @pytest.mark.skipif(not torch.cuda.is_available(), reason='No CUDA device available.') def test_accelerated_adam_cuda(self) -> None: self.model.cuda() self.model_ref.cuda() diff --git a/tests/unit/low_level/test_low_level_inplace.py b/tests/unit/low_level/test_low_level_inplace.py index c34cd324..538642cc 100644 --- a/tests/unit/low_level/test_low_level_inplace.py +++ b/tests/unit/low_level/test_low_level_inplace.py @@ -17,6 +17,7 @@ import unittest import functorch +import pytest import torch import torch.nn.functional as F from torch.utils import data @@ -135,6 +136,7 @@ def test_accelerated_adam_cpu(self) -> None: mse = F.mse_loss(b, b_ref) self.assertAlmostEqual(float(mse), 0) + @pytest.mark.skipif(not torch.cuda.is_available(), reason='No CUDA device available.') def test_accelerated_adam_cuda(self) -> None: self.model.cuda() self.model_ref.cuda() diff --git a/third_party/pybind11 b/third_party/pybind11 deleted file mode 160000 index ad0de0f5..00000000 --- a/third_party/pybind11 +++ /dev/null @@ -1 +0,0 @@ -Subproject commit ad0de0f5a6bebbebbeb7f8f2f15c0c1430f34268 diff --git a/torchopt/__init__.py b/torchopt/__init__.py index 6672c724..b9ac2730 100644 --- a/torchopt/__init__.py +++ b/torchopt/__init__.py @@ -14,30 +14,14 @@ # ============================================================================== """TorchOpt: a high-performance optimizer library built upon PyTorch.""" -from torchopt._src import ( - accelerated_op_available, - clip, - combine, - hook, - schedule, - visual, -) +from torchopt._src import accelerated_op_available, clip, combine, hook, schedule, visual from torchopt._src.alias import adam, rmsprop, sgd from torchopt._src.optimizer import SGD, Adam, Optimizer, RMSProp, meta -from torchopt._src.optimizer.meta import ( - MetaAdam, - MetaOptimizer, - MetaRMSProp, - MetaSGD, -) +from torchopt._src.optimizer.meta import MetaAdam, MetaOptimizer, MetaRMSProp, MetaSGD from torchopt._src.update import apply_updates -from torchopt._src.utils import ( - extract_state_dict, - recover_state_dict, - stop_gradient, -) +from torchopt._src.utils import extract_state_dict, recover_state_dict, stop_gradient +from torchopt.version import __version__ -__version__ = "0.4.1" __all__ = [ "accelerated_op_available", diff --git a/torchopt/_lib/adam_op.py b/torchopt/_lib/adam_op.pyi similarity index 100% rename from torchopt/_lib/adam_op.py rename to torchopt/_lib/adam_op.pyi diff --git a/torchopt/_src/accelerated_op/__init__.py b/torchopt/_src/accelerated_op/__init__.py index ab494d23..70a22322 100644 --- a/torchopt/_src/accelerated_op/__init__.py +++ b/torchopt/_src/accelerated_op/__init__.py @@ -13,16 +13,19 @@ # limitations under the License. # ============================================================================== +import torch + from torchopt._src.accelerated_op.adam_op import AdamOp def accelerated_op_available(devices=None): - import torch op = AdamOp() + if devices is None: devices = [torch.device("cuda"), torch.device("cpu")] elif isinstance(devices, torch.device): devices = [devices] + try: for device in devices: updates = torch.tensor(1., device=device) diff --git a/torchopt/_src/base.py b/torchopt/_src/base.py index 03cd0b97..24d3c8a0 100644 --- a/torchopt/_src/base.py +++ b/torchopt/_src/base.py @@ -30,12 +30,14 @@ # limitations under the License. # ============================================================================== +from abc import abstractmethod from typing import Callable, NamedTuple, Tuple -import typing_extensions +from typing_extensions import Protocol from torchopt._src import typing + OptState = typing.TensorTree # States are arbitrary nests of `torch.Tensor`. # Parameters are arbitrary nests of `torch.Tensor`. Params = typing.TensorTree @@ -48,7 +50,7 @@ class EmptyState(NamedTuple): """An empty state for the simplest stateless transformations.""" -class TransformInitFn(typing_extensions.Protocol): +class TransformInitFn(Protocol): """A callable type for the `init` step of a `GradientTransformation`. The `init` step takes a tree of `params` and uses these to construct an @@ -56,6 +58,7 @@ class TransformInitFn(typing_extensions.Protocol): may hold statistics of the past updates or any other non static information. """ + @abstractmethod def __call__(self, params: Params) -> OptState: """The `init` function. @@ -66,10 +69,9 @@ def __call__(self, params: Params) -> OptState: Returns: The initial state of the gradient transformation. """ - ... -class TransformUpdateFn(typing_extensions.Protocol): +class TransformUpdateFn(Protocol): """A callable type for the `update` step of a `GradientTransformation`. The `update` step takes a tree of candidate parameter `updates` (e.g. their @@ -79,6 +81,7 @@ class TransformUpdateFn(typing_extensions.Protocol): access to the current values of the parameters. """ + @abstractmethod def __call__(self, updates: Updates, state: OptState, @@ -96,7 +99,6 @@ def __call__(self, Returns: The transformed updates, and the updated state. """ - ... class GradientTransformation(NamedTuple): diff --git a/torchopt/_src/clip.py b/torchopt/_src/clip.py index c5da0812..52e164f0 100644 --- a/torchopt/_src/clip.py +++ b/torchopt/_src/clip.py @@ -22,6 +22,7 @@ from torchopt._src import base + ClipState = base.EmptyState diff --git a/torchopt/_src/transform.py b/torchopt/_src/transform.py index 7aef0c84..290c8000 100644 --- a/torchopt/_src/transform.py +++ b/torchopt/_src/transform.py @@ -38,6 +38,7 @@ from torchopt._src import base from torchopt._src.typing import ScalarOrSchedule, Schedule + ScaleState = base.EmptyState diff --git a/torchopt/_src/typing.py b/torchopt/_src/typing.py index 07b0e9e5..69096c99 100644 --- a/torchopt/_src/typing.py +++ b/torchopt/_src/typing.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -17,6 +17,7 @@ from torch import Tensor + Scalar = Union[float, int] Numeric = Union[Tensor, Scalar] diff --git a/torchopt/_src/visual.py b/torchopt/_src/visual.py index 696a1f77..898cab9c 100644 --- a/torchopt/_src/visual.py +++ b/torchopt/_src/visual.py @@ -18,11 +18,12 @@ import warnings from collections import namedtuple -from distutils.version import LooseVersion from typing import Dict, Generator import torch from graphviz import Digraph +from pkg_resources import parse_version + Node = namedtuple('Node', ('name', 'inputs', 'attr', 'op')) @@ -92,12 +93,11 @@ def make_dot(var, params=None, show_attrs=False, show_saved=False, max_attr_char to display for any given attribute. """ - if LooseVersion(torch.__version__) < LooseVersion("1.9") and \ - (show_attrs or show_saved): + if (parse_version(torch.__version__) < parse_version("1.9") and (show_attrs or show_saved)): warnings.warn( - "make_dot: showing grad_fn attributes and saved variables" - " requires PyTorch version >= 1.9. (This does NOT apply to" - " saved tensors saved by custom autograd functions.)" + "make_dot: showing grad_fn attributes and saved variables " + "requires PyTorch version >= 1.9. (This does NOT apply to " + "saved tensors saved by custom autograd functions.)" ) param_map = {} diff --git a/torchopt/version.py b/torchopt/version.py new file mode 100644 index 00000000..4359b2e3 --- /dev/null +++ b/torchopt/version.py @@ -0,0 +1,17 @@ +# Copyright 2022 MetaOPT Team. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""TorchOpt: a high-performance optimizer library built upon PyTorch.""" + +__version__ = "0.4.1" From b82b2fe4d6f62072b090cb99bc616cd2cc7cb7fc Mon Sep 17 00:00:00 2001 From: Xuehai Pan Date: Mon, 11 Jul 2022 15:04:38 +0800 Subject: [PATCH 09/19] deps: bump PyTorch version to 1.12 --- .github/workflows/lint.yml | 2 +- .github/workflows/release.yml | 2 +- .github/workflows/tests.yml | 2 +- CMakeLists.txt | 30 +++++++++++++++--------------- conda-recipe.yaml | 22 +++++++++++++--------- requirements.txt | 4 ++-- setup.py | 8 ++++---- tests/requirements.txt | 8 ++++---- 8 files changed, 41 insertions(+), 37 deletions(-) diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index deb23ab6..1e91dde2 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -33,7 +33,7 @@ jobs: uses: Jimver/cuda-toolkit@v0.2.7 id: cuda-toolkit with: - cuda: "11.3.1" + cuda: "11.6.2" method: network sub-packages: '["nvcc"]' - run: | diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 974b8f49..45e24779 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -67,7 +67,7 @@ jobs: uses: Jimver/cuda-toolkit@v0.2.7 id: cuda-toolkit with: - cuda: "11.3.1" + cuda: "11.6.2" method: network sub-packages: '["nvcc"]' - run: | diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index ffeeced6..ab319544 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -35,7 +35,7 @@ jobs: uses: Jimver/cuda-toolkit@v0.2.7 id: cuda-toolkit with: - cuda: "11.3.1" + cuda: "11.6.2" method: network sub-packages: '["nvcc"]' - run: | diff --git a/CMakeLists.txt b/CMakeLists.txt index d97cac96..f5aaa5f8 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -77,11 +77,11 @@ system( COMMAND "${PYTHON_EXECUTABLE}" -c "print(__import__('platform').python_version())" ) -message("-- Use Python version: ${PYTHON_VERSION}") -message("-- Use Python executable: \"${PYTHON_EXECUTABLE}\"") +message(STATUS "Use Python version: ${PYTHON_VERSION}") +message(STATUS "Use Python executable: \"${PYTHON_EXECUTABLE}\"") if(NOT DEFINED PYTHON_INCLUDE_DIR) - message("-- Auto detecting Python include directory...") + message(STATUS "Auto detecting Python include directory...") system( STRIP OUTPUT_VARIABLE PYTHON_INCLUDE_DIR COMMAND "${PYTHON_EXECUTABLE}" -c "print(__import__('sysconfig').get_path('include'))" @@ -89,16 +89,16 @@ if(NOT DEFINED PYTHON_INCLUDE_DIR) endif() if("${PYTHON_INCLUDE_DIR}" STREQUAL "") - message(FATAL_ERROR "-- Python include directory not found") + message(FATAL_ERROR "Python include directory not found") else() - message("-- Detected Python include directory: \"${PYTHON_INCLUDE_DIR}\"") + message(STATUS "Detected Python include directory: \"${PYTHON_INCLUDE_DIR}\"") include_directories(${PYTHON_INCLUDE_DIR}) endif() set(PYBIND11_PYTHON_VERSION "${PYTHON_VERSION}") if(NOT DEFINED PYBIND11_CMAKE_DIR) - message("-- Auto detecting pybind11 CMake directory...") + message(STATUS "Auto detecting pybind11 CMake directory...") system( STRIP OUTPUT_VARIABLE PYBIND11_CMAKE_DIR COMMAND "${PYTHON_EXECUTABLE}" -m pybind11 --cmakedir @@ -106,14 +106,14 @@ if(NOT DEFINED PYBIND11_CMAKE_DIR) endif() if("${PYBIND11_CMAKE_DIR}" STREQUAL "") - message(FATAL_ERROR "-- Pybind11 CMake directory not found") + message(FATAL_ERROR "Pybind11 CMake directory not found") else() - message("-- Detected Pybind11 CMake directory: \"${PYBIND11_CMAKE_DIR}\"") + message(STATUS "Detected Pybind11 CMake directory: \"${PYBIND11_CMAKE_DIR}\"") find_package(pybind11 CONFIG PATHS "${PYBIND11_CMAKE_DIR}") endif() if(NOT DEFINED TORCH_INCLUDE_PATH) - message("-- Auto detecting PyTorch include directory...") + message(STATUS "Auto detecting PyTorch include directory...") system( STRIP OUTPUT_VARIABLE TORCH_INCLUDE_PATH COMMAND "${PYTHON_EXECUTABLE}" -c "print('\\\;'.join(__import__('torch.utils.cpp_extension', fromlist=[None]).include_paths()))" @@ -121,14 +121,14 @@ if(NOT DEFINED TORCH_INCLUDE_PATH) endif() if("${TORCH_INCLUDE_PATH}" STREQUAL "") - message(FATAL_ERROR "-- Torch include directory not found") + message(FATAL_ERROR "Torch include directory not found") else() - message("-- Detected Torch include directory: \"${TORCH_INCLUDE_PATH}\"") + message(STATUS "Detected Torch include directory: \"${TORCH_INCLUDE_PATH}\"") include_directories(${TORCH_INCLUDE_PATH}) endif() if(NOT DEFINED TORCH_LIBRARY_PATH) - message("-- Auto detecting PyTorch library directory...") + message(STATUS "Auto detecting PyTorch library directory...") system( STRIP OUTPUT_VARIABLE TORCH_LIBRARY_PATH COMMAND "${PYTHON_EXECUTABLE}" -c "print('\\\;'.join(__import__('torch.utils.cpp_extension', fromlist=[None]).library_paths()))" @@ -136,9 +136,9 @@ if(NOT DEFINED TORCH_LIBRARY_PATH) endif() if("${TORCH_LIBRARY_PATH}" STREQUAL "") - message(FATAL_ERROR "-- Torch library directory not found") + message(FATAL_ERROR "Torch library directory not found") else() - message("-- Detected Torch library directory: \"${TORCH_LIBRARY_PATH}\"") + message(STATUS "Detected Torch library directory: \"${TORCH_LIBRARY_PATH}\"") endif() unset(TORCH_LIBRARIES) @@ -148,7 +148,7 @@ foreach(VAR_PATH ${TORCH_LIBRARY_PATH}) list(APPEND TORCH_LIBRARIES "${TORCH_LIBRARY}") endforeach() -message("-- Detected Torch libraries: \"${TORCH_LIBRARIES}\"") +message(STATUS "Detected Torch libraries: \"${TORCH_LIBRARIES}\"") add_definitions(-D_GLIBCXX_USE_CXX11_ABI=0) diff --git a/conda-recipe.yaml b/conda-recipe.yaml index ba90aa56..98e37eda 100644 --- a/conda-recipe.yaml +++ b/conda-recipe.yaml @@ -1,6 +1,6 @@ # Create virtual environment with command: # -# conda env create --file conda-recipe.yaml +# $ CONDA_OVERRIDE_CUDA=11.7 conda env create --file conda-recipe.yaml # name: torchopt @@ -8,36 +8,40 @@ name: torchopt channels: - pytorch - defaults + - nvidia/label/cuda-11.6.2 - nvidia - conda-forge dependencies: - python = 3.8 + - pip # Learning - - pytorch::pytorch = 1.11 + - pytorch::pytorch = 1.12 - pytorch::torchvision + - pytorch::pytorch-mutex = *=*cuda* + - pip: + - functorch - jax - - jaxlib + - jaxlib >= 0.3=*cuda* + - optax - tensorboard - wandb - - pip: - - functorch # Device select - - nvidia::cudatoolkit = 11.3.1 + - nvidia::cudatoolkit = 11.6 - cudnn # Build toolkit - cmake >= 3.4 - make - cxx-compiler - - gxx >= 6.0, < 12.0 - - nvidia/label/cuda-11.3.1::cuda-minimal-build + - gxx = 10 + - nvidia/label/cuda-11.6.2::cuda-nvcc + - nvidia/label/cuda-11.6.2::cuda-cudart-dev - pybind11 # Misc - - pip - typing-extensions - numpy - matplotlib-base diff --git a/requirements.txt b/requirements.txt index 41bb1b8a..21fb120c 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,5 +1,5 @@ -torch==1.11 -jax[cpu] +torch == 1.12 +jax[cpu] >= 0.3 numpy graphviz typing-extensions diff --git a/setup.py b/setup.py index cbc52b43..67f83c37 100644 --- a/setup.py +++ b/setup.py @@ -101,16 +101,16 @@ def build_extensions(self): CMakeExtension('torchopt._lib.adam_op', source_dir=HERE) ], setup_requires=[ # for `torch.utils.cpp_extension` - 'torch==1.11', + 'torch == 1.12', 'numpy', 'pybind11', ], install_requires=[ - 'torch==1.11', - 'jax[cpu]', + 'torch == 1.12', + 'jax[cpu] >= 0.3', 'numpy', 'graphviz', 'typing-extensions', ], - python_requires='>=3.7' + python_requires='>= 3.7' ) diff --git a/tests/requirements.txt b/tests/requirements.txt index 17d36433..d1b782e4 100644 --- a/tests/requirements.txt +++ b/tests/requirements.txt @@ -1,10 +1,10 @@ ---extra-index-url https://download.pytorch.org/whl/cu113 -torch==1.11 +--extra-index-url https://download.pytorch.org/whl/cu116 +torch == 1.12 torchvision functorch --requirement ../requirements.txt pytest -pytest_cov -pytest_xdist +pytest-cov +pytest-xdist From f677fb1dd908de9bbf98e0a91172023c0656956e Mon Sep 17 00:00:00 2001 From: Xuehai Pan Date: Tue, 12 Jul 2022 13:53:43 +0800 Subject: [PATCH 10/19] chore(workflow): use API token for PyPI upload --- .github/workflows/release.yml | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 45e24779..ceb45f06 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -1,4 +1,4 @@ -name: pypi +name: PyPI on: release: @@ -90,8 +90,9 @@ jobs: - name: Publish to PyPI env: - TWINE_USERNAME: ${{ secrets.PYPI_USERNAME }} - TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }} + TWINE_USERNAME: "__token__" + TWINE_PASSWORD: ${{ secrets.PYPI_UPLOAD_TOKEN }} run: | "${DEFAULT_PYTHON}" -m pip install --upgrade twine - "${DEFAULT_PYTHON}" -m twine upload dist/* + "${DEFAULT_PYTHON}" -m twine upload --repository testpypi dist/* + "${DEFAULT_PYTHON}" -m twine upload --repository pypi dist/* From b8a8b2ea7e46976c096833075b34ff809eb18442 Mon Sep 17 00:00:00 2001 From: Xuehai Pan Date: Wed, 13 Jul 2022 15:11:16 +0800 Subject: [PATCH 11/19] deps: add linters to test requirements --- tests/requirements.txt | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/tests/requirements.txt b/tests/requirements.txt index d1b782e4..2bbfedbe 100644 --- a/tests/requirements.txt +++ b/tests/requirements.txt @@ -8,3 +8,12 @@ functorch pytest pytest-cov pytest-xdist +isort +yapf +mypy +flake8 +flake8-bugbear +doc8 +pydocstyle +pyenchant +cpplint From 1ffa5822421a2260b04d9646798614faae2c95fe Mon Sep 17 00:00:00 2001 From: Xuehai Pan Date: Wed, 13 Jul 2022 15:08:54 +0800 Subject: [PATCH 12/19] docs(tutorials): update tutorial notebooks --- tutorials/1_Functional_Optimizer.ipynb | 298 +++++++++-------- tutorials/2_Visualization.ipynb | 307 ++++-------------- tutorials/3_Meta_Optimizer.ipynb | 426 ++++++++++++------------- tutorials/4_Stop_Gradient.ipynb | 304 +++++++++++------- 4 files changed, 610 insertions(+), 725 deletions(-) diff --git a/tutorials/1_Functional_Optimizer.ipynb b/tutorials/1_Functional_Optimizer.ipynb index 2dff7be4..467791c1 100644 --- a/tutorials/1_Functional_Optimizer.ipynb +++ b/tutorials/1_Functional_Optimizer.ipynb @@ -18,29 +18,26 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "## 1. Basic API" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "In this first part, we will illustrate how TorchOpt can be used as a functional optimizer. We compare it with different api in Jax and PyTorch to help understand the similarity and dissimilarity. We use simple network, adam optimizer and MSE loss objective." + "## 1. Basic API\n", + "\n", + "In this first part, we will illustrate how TorchOpt can be used as a functional optimizer. We compare it with different API in [JAX](https://github.com/google/jax) and [PyTorch](https://pytorch.org) to help understand the similarity and dissimilarity. We use simple network, Adam optimizer and MSE loss objective." ] }, { "cell_type": "code", - "execution_count": 2, + "execution_count": 1, "metadata": {}, "outputs": [], "source": [ - "import torch\n", + "from collections import OrderedDict\n", + "\n", "import functorch\n", + "import jax\n", + "import jax.numpy as jnp\n", + "import optax\n", + "import torch\n", "import torch.autograd\n", "import torch.nn as nn\n", - "import optax\n", - "import jax\n", - "from jax import numpy as jnp\n", "\n", "import torchopt\n", "\n", @@ -48,20 +45,25 @@ "class Net(nn.Module):\n", " def __init__(self, dim):\n", " super().__init__()\n", - " self.fc = nn.Linear(dim, 1, bias=False)\n", - " self.fc.weight.data = torch.ones_like(self.fc.weight.data)\n", + " self.fc = nn.Linear(dim, 1, bias=True)\n", + " nn.init.ones_(self.fc.weight)\n", + " nn.init.zeros_(self.fc.bias)\n", "\n", " def forward(self, x):\n", - " return self.fc(x)" + " return self.fc(x)\n", + "\n", + "\n", + "def mse(inputs, targets):\n", + " return ((inputs - targets) ** 2).sum()" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "- Original JAX implementation\n", + "### 1.1 Original JAX implementation\n", "\n", - "The first example is jax implementation coupled with optax, which belongs to functional programing style." + "The first example is JAX implementation coupled with [Optax](https://github.com/deepmind/optax), which belongs to functional programing style." ] }, { @@ -71,24 +73,31 @@ "outputs": [], "source": [ "def origin_jax():\n", - " learning_rate = 1.\n", " batch_size = 1\n", " dim = 1\n", + " params = OrderedDict([('weight', jnp.ones((dim, 1))), ('bias', jnp.zeros((1,)))])\n", + "\n", + " def model(params, x):\n", + " return jnp.matmul(x, params['weight']) + params['bias']\n", + "\n", + " # Obtain the `opt_state` that contains statistics for the optimizer\n", + " learning_rate = 1.\n", " optimizer = optax.adam(learning_rate)\n", - " # Obtain the `opt_state` that contains statistics for the optimizer.\n", - " params = {'w': jnp.ones((dim, 1))}\n", " opt_state = optimizer.init(params)\n", "\n", - " def compute_loss(params, x, y): return (\n", - " (jnp.matmul(x, params['w']) - y) ** 2).sum()\n", + " def compute_loss(params, x, y):\n", + " pred = model(params, x)\n", + " return mse(pred, y)\n", "\n", " xs = 2 * jnp.ones((batch_size, dim))\n", - " ys = jnp.ones((batch_size, ))\n", + " ys = jnp.ones((batch_size, 1))\n", + "\n", " grads = jax.grad(compute_loss)(params, xs, ys)\n", " updates, opt_state = optimizer.update(grads, opt_state)\n", - " print(params)\n", + "\n", + " print('Parameters before update:', params)\n", " params = optax.apply_updates(params, updates)\n", - " print(params)" + " print('Parameters after update:', params)" ] }, { @@ -96,19 +105,18 @@ "execution_count": 3, "metadata": {}, "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "WARNING:absl:No GPU/TPU found, falling back to CPU. (Set TF_CPP_MIN_LOG_LEVEL=0 and rerun for more info.)\n" - ] - }, { "name": "stdout", "output_type": "stream", "text": [ - "{'w': DeviceArray([[1.]], dtype=float32)}\n", - "{'w': DeviceArray([[6.67572e-06]], dtype=float32)}\n" + "Parameters before update: {\n", + " 'weight': DeviceArray([[1.]], dtype=float32)),\n", + " 'bias': DeviceArray([0.], dtype=float32)\n", + "}\n", + "Parameters after update: {\n", + " 'weight': DeviceArray([[6.735325e-06]], dtype=float32),\n", + " 'bias': DeviceArray([-0.99999326], dtype=float32)\n", + "}" ] } ], @@ -120,9 +128,9 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "- Functorch with TorchOpt\n", + "### 1.2 `functorch` with TorchOpt\n", "\n", - "The Second example is functorch coupled with TorchOpt. It basically follows the same structure with the jax example." + "The second example is [`functorch`](https://pytorch.org/functorch) coupled with TorchOpt. It basically follows the same structure with the JAX example." ] }, { @@ -135,23 +143,25 @@ " batch_size = 1\n", " dim = 1\n", " net = Net(dim)\n", - " func, params = functorch.make_functional(net)\n", - "\n", - " lr = 1.\n", - " optimizer = torchopt.adam(lr)\n", + " model, params = functorch.make_functional(net) # get the functional version of the model\n", "\n", + " # Obtain the `opt_state` that contains statistics for the optimizer\n", + " learning_rate = 1.\n", + " optimizer = torchopt.adam(learning_rate)\n", " opt_state = optimizer.init(params)\n", "\n", - " xs = 2 * torch.ones(batch_size, dim)\n", - " ys = torch.ones(batch_size)\n", + " xs = 2 * torch.ones((batch_size, dim))\n", + " ys = torch.ones((batch_size, 1))\n", "\n", - " pred = func(params, xs)\n", - " loss = ((pred - ys) ** 2).sum()\n", - " grad = torch.autograd.grad(loss, params)\n", - " updates, opt_state = optimizer.update(grad, opt_state)\n", - " print(params)\n", + " pred = model(params, xs)\n", + " loss = mse(pred, ys)\n", + "\n", + " grads = torch.autograd.grad(loss, params)\n", + " updates, opt_state = optimizer.update(grads, opt_state)\n", + " \n", + " print('Parameters before update:', params)\n", " params = torchopt.apply_updates(params, updates)\n", - " print(params)" + " print('Parameters after update:', params)" ] }, { @@ -163,10 +173,14 @@ "name": "stdout", "output_type": "stream", "text": [ - "(Parameter containing:\n", - "tensor([[1.]], requires_grad=True),)\n", - "(Parameter containing:\n", - "tensor([[0.]], requires_grad=True),)\n" + "Parameters before update: (\n", + " Parameter containing: tensor([[1.]], requires_grad=True),\n", + " Parameter containing: tensor([0.], requires_grad=True)\n", + ")\n", + "Parameters after update: (\n", + " Parameter containing: tensor([[0.]], requires_grad=True),\n", + " Parameter containing: tensor([-1.], requires_grad=True)\n", + ")" ] } ], @@ -178,10 +192,9 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "- Full TorchOpt\n", + "### 1.3 Full TorchOpt\n", "\n", - "The Third example is to illustrate that TorchOpt can also directly replace torch.optim with exactly the same usage. Note the API \n", - "difference happens between torchopt.adam() and torchopt.Adam(). " + "The third example is to illustrate that TorchOpt can also directly replace `torch.optim` with exactly the same usage. Note the API difference happens between `torchopt.adam()` and `torchopt.Adam()`." ] }, { @@ -195,20 +208,20 @@ " dim = 1\n", " net = Net(dim)\n", "\n", - " lr = 1.\n", - " optim = torchopt.Adam(net.parameters(), lr=lr)\n", + " learning_rate = 1.\n", + " optim = torchopt.Adam(net.parameters(), lr=learning_rate)\n", "\n", - " xs = 2 * torch.ones(batch_size, dim)\n", - " ys = torch.ones(batch_size)\n", + " xs = 2 * torch.ones((batch_size, dim))\n", + " ys = torch.ones((batch_size, 1))\n", "\n", " pred = net(xs)\n", - " loss = ((pred - ys) ** 2).sum()\n", + " loss = mse(pred, ys)\n", "\n", - " print(net.fc.weight)\n", + " print('Parameters before update:', dict(net.named_parameters()))\n", " optim.zero_grad()\n", " loss.backward()\n", " optim.step()\n", - " print(net.fc.weight)" + " print('Parameters after update:', dict(net.named_parameters()))" ] }, { @@ -220,10 +233,14 @@ "name": "stdout", "output_type": "stream", "text": [ - "Parameter containing:\n", - "tensor([[1.]], requires_grad=True)\n", - "Parameter containing:\n", - "tensor([[0.]], requires_grad=True)\n" + "Parameters before update: {\n", + " 'fc.weight': Parameter containing: tensor([[1.]], requires_grad=True),\n", + " 'fc.bias': Parameter containing: tensor([0.], requires_grad=True)\n", + "}\n", + "Parameters after update: {\n", + " 'fc.weight': Parameter containing: tensor([[0.]], requires_grad=True),\n", + " 'fc.bias': Parameter containing: tensor([-1.], requires_grad=True)\n", + "}" ] } ], @@ -235,9 +252,9 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "- Original PyTorch\n", + "### 1.4 Original PyTorch\n", "\n", - "The final example is to original PyTorch example with torch.optim." + "The final example is to original PyTorch example with `torch.optim`." ] }, { @@ -251,20 +268,20 @@ " dim = 1\n", " net = Net(dim)\n", "\n", - " lr = 1.\n", - " optim = torch.optim.Adam(net.parameters(), lr=lr)\n", + " learning_rate = 1.\n", + " optim = torch.optim.Adam(net.parameters(), lr=learning_rate)\n", "\n", - " xs = 2 * torch.ones(batch_size, dim)\n", - " ys = torch.ones(batch_size)\n", + " xs = 2 * torch.ones((batch_size, dim))\n", + " ys = torch.ones((batch_size, 1))\n", "\n", " pred = net(xs)\n", - " loss = ((pred - ys) ** 2).sum()\n", + " loss = mse(pred, ys)\n", "\n", - " print(net.fc.weight)\n", + " print('Parameters before update:', dict(net.named_parameters()))\n", " optim.zero_grad()\n", " loss.backward()\n", " optim.step()\n", - " print(net.fc.weight)" + " print('Parameters after update:', dict(net.named_parameters()))" ] }, { @@ -276,10 +293,14 @@ "name": "stdout", "output_type": "stream", "text": [ - "Parameter containing:\n", - "tensor([[1.]], requires_grad=True)\n", - "Parameter containing:\n", - "tensor([[1.1921e-07]], requires_grad=True)\n" + "Parameters before update: {\n", + " 'fc.weight': Parameter containing: tensor([[1.]], requires_grad=True),\n", + " 'fc.bias': Parameter containing: tensor([0.], requires_grad=True)\n", + "}\n", + "Parameters after update: {\n", + " 'fc.weight': Parameter containing: tensor([[1.1921e-07]], requires_grad=True),\n", + " 'fc.bias': Parameter containing: tensor([-1.0000], requires_grad=True)\n", + "}" ] } ], @@ -291,15 +312,16 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "## 2. Differentiable Optimization with functional optimizor\n", - "Coupled with functional optimizer, you can conduct differentiable optimization by setting the inplce flag as False in update and apply_updates function. (which might be helpful for meta-learning algorithm implementation with functional programing style). \n", + "## 2. Differentiable Optimization with Functional Optimizer\n", + "\n", + "Coupled with functional optimizer, you can conduct differentiable optimization by setting the `inplace` flag as `False` in update and `apply_updates` function. (which might be helpful for meta-learning algorithm implementation with functional programing style). \n", "\n", - "Note that torchopt.SGD, torchopt.Adam do not support differentiable optimization. Refer to the Meta Optimizer notebook for pytorch-like differentiable optimizers." + "Note that `torchopt.SGD` and `torchopt.Adam` do not support differentiable optimization. Refer to the Meta-Optimizer notebook for PyTorch-like differentiable optimizers." ] }, { "cell_type": "code", - "execution_count": 28, + "execution_count": 10, "metadata": {}, "outputs": [], "source": [ @@ -307,43 +329,45 @@ " batch_size = 1\n", " dim = 1\n", " net = Net(dim)\n", - " func, params = functorch.make_functional(net)\n", + " model, params = functorch.make_functional(net) # get the functional version of the model\n", "\n", - " lr = 1.\n", - " # sgd example\n", - " optimizer = torchopt.sgd(lr)\n", - " meta_param = torch.tensor(1., requires_grad=True)\n", + " # Meta-parameter\n", + " meta_param = nn.Parameter(torch.ones(1))\n", "\n", + " # SGD example\n", + " learning_rate = 1.\n", + " optimizer = torchopt.sgd(learning_rate)\n", " opt_state = optimizer.init(params)\n", "\n", - " xs = torch.ones(batch_size, dim)\n", - " ys = torch.ones(batch_size)\n", + " xs = torch.ones((batch_size, dim))\n", + " ys = torch.ones((batch_size, 1))\n", "\n", - " pred = func(params, xs)\n", - " # where meta_param is used\n", + " pred = model(params, xs)\n", + " # Where meta_param is used\n", " pred = pred + meta_param\n", - " loss = ((pred - ys) ** 2).sum()\n", - " grad = torch.autograd.grad(loss, params, create_graph=True)\n", - " updates, opt_state = optimizer.update(grad, opt_state, inplace=False)\n", - " params = torchopt.apply_updates(params, updates, inplace=False)\n", + " loss = mse(pred, ys)\n", "\n", - " pred = func(params, xs)\n", - " loss = ((pred - ys) ** 2).sum()\n", + " grads = torch.autograd.grad(loss, params, create_graph=True)\n", + " updates, opt_state = optimizer.update(grads, opt_state, inplace=False)\n", + " params = torchopt.apply_updates(params, updates, inplace=False) # update parameters with single step SGD update\n", + "\n", + " pred = model(params, xs)\n", + " loss = mse(pred, ys)\n", " loss.backward()\n", "\n", - " print(meta_param.grad)" + " print('Gradient for the meta-parameter:', meta_param.grad)" ] }, { "cell_type": "code", - "execution_count": 29, + "execution_count": 11, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "tensor(8.)\n" + "Gradient for the meta-parameter: tensor([32.])\n" ] } ], @@ -355,13 +379,14 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "## 2.1. Track the gradient of moment\n", - "Note that most modern optimizers involve moment term in the gradient update (basically only SGD with momentum = 0 does not involve). We provide an option for user to choose whether to also track the meta-gradient through moment term. The default option is `moment_requires_grad=True`." + "### 2.1 Track the Gradient of Momentum\n", + "\n", + "Note that most modern optimizers involve momentum term in the gradient update (basically only SGD with `momentum = 0` does not involve). We provide an option for user to choose whether to also track the meta-gradient through momentum term. The default option is `moment_requires_grad=True`." ] }, { "cell_type": "code", - "execution_count": 22, + "execution_count": 12, "metadata": {}, "outputs": [], "source": [ @@ -370,7 +395,7 @@ }, { "cell_type": "code", - "execution_count": 23, + "execution_count": 13, "metadata": {}, "outputs": [], "source": [ @@ -379,7 +404,7 @@ }, { "cell_type": "code", - "execution_count": 27, + "execution_count": 14, "metadata": {}, "outputs": [], "source": [ @@ -390,60 +415,55 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "## 3. Accletated Optimizer\n", - "Users can use acclerated optimizer by seeting the `use_accelerated_op` as True. Currently we only support the Adam optimizer." + "## 3. Accelerated Optimizer\n", + "\n", + "Users can use accelerated optimizer by setting the `use_accelerated_op` as `True`. Currently we only support the Adam optimizer." ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "Check whether the accelerated_op is avariable:" + "Check whether the `accelerated_op` is available:" ] }, { "cell_type": "code", - "execution_count": 3, + "execution_count": 15, "metadata": {}, "outputs": [ { - "data": { - "text/plain": [ - "True" - ] - }, - "execution_count": 3, - "metadata": {}, - "output_type": "execute_result" + "name": "stdout", + "output_type": "stream", + "text": [ + "True\n" + ] } ], "source": [ - "torchopt.accelerated_op_available(torch.device(\"cpu\"))" + "torchopt.accelerated_op_available(torch.device('cpu'))" ] }, { "cell_type": "code", - "execution_count": 4, + "execution_count": 16, "metadata": {}, "outputs": [ { - "data": { - "text/plain": [ - "True" - ] - }, - "execution_count": 4, - "metadata": {}, - "output_type": "execute_result" + "name": "stdout", + "output_type": "stream", + "text": [ + "True\n" + ] } ], "source": [ - "torchopt.accelerated_op_available(torch.device(\"cuda\"))" + "torchopt.accelerated_op_available(torch.device('cuda'))" ] }, { "cell_type": "code", - "execution_count": 24, + "execution_count": 17, "metadata": {}, "outputs": [], "source": [ @@ -453,7 +473,7 @@ }, { "cell_type": "code", - "execution_count": 25, + "execution_count": 18, "metadata": {}, "outputs": [], "source": [ @@ -462,13 +482,10 @@ } ], "metadata": { - "interpreter": { - "hash": "238ad0feaa04228775e5e27229169b0e3e76c0e018d5a6d65c4906ccad5c5a9e" - }, "kernelspec": { - "display_name": "OpTorch", + "display_name": "Python 3.8.13 64-bit", "language": "python", - "name": "optorch" + "name": "python3" }, "language_info": { "codemirror_mode": { @@ -480,7 +497,12 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.9.7" + "version": "3.8.13" + }, + "vscode": { + "interpreter": { + "hash": "2a8cc1ff2cbc47027bf9993941710d9ab9175f14080903d9c7c432ee63d681da" + } } }, "nbformat": 4, diff --git a/tutorials/2_Visualization.ipynb b/tutorials/2_Visualization.ipynb index c8593b94..61f2b489 100644 --- a/tutorials/2_Visualization.ipynb +++ b/tutorials/2_Visualization.ipynb @@ -4,14 +4,14 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "# Visualizatoin in TorchOpt" + "# Visualization in TorchOpt" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "In PyTorch, if the attribute `requires_grad` a tensor is `True`, the computation graph will be created if we use the tensor to do any operations. The computation graph is implemented likes link-list -- `Tensor`s are nodes and they are linked by their attribute `gran_fn`. PyTorchViz is a Python package that uses Graphviz as a backend for plotting computation graphs. TorchOpt use PyTorchViz as the blueprint and provide more easy-to-use visualization functions on the premise of supporting all its functions." + "In [PyTorch](https://pytorch.org), if the attribute `requires_grad` a tensor is `True`, the computation graph will be created if we use the tensor to do any operations. The computation graph is implemented likes link-list -- `Tensor`s are nodes and they are linked by their attribute `gran_fn`. [PyTorchViz](https://github.com/szagoruyko/pytorchviz) is a Python package that uses [Graphviz](https://graphviz.org) as a backend for plotting computation graphs. TorchOpt use PyTorchViz as the blueprint and provide more easy-to-use visualization functions on the premise of supporting all its functions." ] }, { @@ -23,94 +23,44 @@ }, { "cell_type": "code", - "execution_count": 4, + "execution_count": 1, "metadata": {}, "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n" + ] + }, { "data": { - "image/svg+xml": [ - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "%3\n", - "\n", - "\n", - "\n", - "140558415956464\n", - "\n", - "y\n", - " ()\n", - "\n", - "\n", - "\n", - "140558415963712\n", - "\n", - "MulBackward0\n", - "\n", - "\n", - "\n", - "140558415963712->140558415956464\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "140558415963664\n", - "\n", - "AccumulateGrad\n", - "\n", - "\n", - "\n", - "140558415963664->140558415963712\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "140558415956064\n", - "\n", - "x\n", - " ()\n", - "\n", - "\n", - "\n", - "140558415956064->140558415963664\n", - "\n", - "\n", - "\n", - "\n", - "\n" - ], - "text/plain": [ - "" - ] + "image/svg+xml": "\n\n\n\n\n\n%3\n\n\n\n139996637621680\n\ny\n ()\n\n\n\n139993377217744\n\nMulBackward0\n\n\n\n139993377217744->139996637621680\n\n\n\n\n\n139993377217840\n\nAccumulateGrad\n\n\n\n139993377217840->139993377217744\n\n\n\n\n\n139996637619360\n\nx\n ()\n\n\n\n139996637619360->139993377217840\n\n\n\n\n\n" }, - "execution_count": 4, "metadata": {}, - "output_type": "execute_result" + "output_type": "display_data" } ], "source": [ + "from IPython.display import display\n", + "\n", "import torch\n", + "import torch.nn as nn\n", + "import torch.nn.functional as F\n", + "\n", "import torchopt\n", "\n", "\n", "x = torch.tensor(1., requires_grad=True)\n", "y = 2 * x\n", - "torchopt.visual.make_dot(y, params={'x': x, 'y': y})" + "display(torchopt.visual.make_dot(y, params={'x': x, 'y': y}))" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "The figure shows `y` is connected by the multiplication edge. The gradient of `y` will flow through the multiplication backward function then accumulated on x. Note that we pass a dictionary for adding node labels." + "The figure shows `y` is connected by the multiplication edge. The gradient of `y` will flow through the multiplication backward function then accumulated on `x`. Note that we pass a dictionary for adding node labels." ] }, { @@ -122,137 +72,29 @@ }, { "cell_type": "code", - "execution_count": 5, + "execution_count": 2, "metadata": {}, "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n" + ] + }, { "data": { - "image/svg+xml": [ - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "%3\n", - "\n", - "\n", - "\n", - "140562207781168\n", - "\n", - "loss\n", - " ()\n", - "\n", - "\n", - "\n", - "140558416955520\n", - "\n", - "MseLossBackward0\n", - "\n", - "\n", - "\n", - "140558416955520->140562207781168\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "140558416954944\n", - "\n", - "AddmmBackward0\n", - "\n", - "\n", - "\n", - "140558416954944->140558416955520\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "140552833283456\n", - "\n", - "AccumulateGrad\n", - "\n", - "\n", - "\n", - "140552833283456->140558416954944\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "140562207783728\n", - "\n", - "fc.bias\n", - " (1)\n", - "\n", - "\n", - "\n", - "140562207783728->140552833283456\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "140552833283792\n", - "\n", - "TBackward0\n", - "\n", - "\n", - "\n", - "140552833283792->140558416954944\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "140558416606736\n", - "\n", - "AccumulateGrad\n", - "\n", - "\n", - "\n", - "140558416606736->140552833283792\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "140562207782928\n", - "\n", - "fc.weight\n", - " (1, 5)\n", - "\n", - "\n", - "\n", - "140562207782928->140558416606736\n", - "\n", - "\n", - "\n", - "\n", - "\n" - ], - "text/plain": [ - "" - ] + "image/svg+xml": "\n\n\n\n\n\n%3\n\n\n\n139993376880096\n\nloss\n ()\n\n\n\n139996875678480\n\nMseLossBackward0\n\n\n\n139996875678480->139993376880096\n\n\n\n\n\n139996875677952\n\nAddmmBackward0\n\n\n\n139996875677952->139996875678480\n\n\n\n\n\n139996875678336\n\nAccumulateGrad\n\n\n\n139996875678336->139996875677952\n\n\n\n\n\n139993376879696\n\nfc.bias\n (1)\n\n\n\n139993376879696->139996875678336\n\n\n\n\n\n139996875678912\n\nTBackward0\n\n\n\n139996875678912->139996875677952\n\n\n\n\n\n139996875679152\n\nAccumulateGrad\n\n\n\n139996875679152->139996875678912\n\n\n\n\n\n139993376879616\n\nfc.weight\n (1, 5)\n\n\n\n139993376879616->139996875679152\n\n\n\n\n\n" }, - "execution_count": 5, "metadata": {}, - "output_type": "execute_result" + "output_type": "display_data" } ], "source": [ - "import torch.nn as nn\n", - "import torch.nn.functional as F\n", - "\n", - "\n", "class Net(nn.Module):\n", " def __init__(self, dim):\n", " super().__init__()\n", - " self.fc = nn.Linear(dim, 1)\n", + " self.fc = nn.Linear(dim, 1, bias=True)\n", "\n", " def forward(self, x):\n", " return self.fc(x)\n", @@ -261,10 +103,12 @@ "dim = 5\n", "batch_size = 2\n", "net = Net(dim)\n", - "xs = torch.ones(batch_size, dim)\n", + "xs = torch.ones((batch_size, dim))\n", + "ys = torch.ones((batch_size, 1))\n", "pred = net(xs)\n", - "loss = F.mse_loss(pred, torch.ones_like(pred))\n", - "torchopt.visual.make_dot(loss, params=(net.named_parameters(), {\"loss\": loss}))" + "loss = F.mse_loss(pred, ys)\n", + "\n", + "display(torchopt.visual.make_dot(loss, params=(net.named_parameters(), {'loss': loss})))" ] }, { @@ -276,89 +120,67 @@ }, { "cell_type": "code", - "execution_count": 7, + "execution_count": 3, "metadata": {}, "outputs": [ { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 7, - "metadata": {}, - "output_type": "execute_result" + "name": "stdout", + "output_type": "stream", + "text": [ + "\n" + ] }, { "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAeUAAARiCAYAAACave+lAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/YYfK9AAAACXBIWXMAAAsTAAALEwEAmpwYAAEAAElEQVR4nOzddXwUd/748desZiXubgSIQJDgUooXCi3UoEqNuructr2212uv3+q1/dGrQalRpFiLu1uAAEkgARIS4rpZnd8fIXsEgifZTfJ5Ph69C7s7M+/ZmZ33fD7zEUmWZQRBEARBcD2FqwMQBEEQBKGeSMqCIAiC4CZEUhYEQRAENyGSsiAIgiC4CZGUBUEQBMFNiKQsCIIgCG6i1ZOyJEljJUk6KElSliRJL7b29gVBEATBXUmt2U9ZkiQlcAgYBRwHtgJTZVne32pBCIIgCIKbau2Scl8gS5blw7IsW4DZwHWtHIMgCIIguKXWTsrhwLHT/n381GuCIAiC0OGpXB3AmSRJmg5MBzAYDL27du3q4ogEQRAEofls3769WJblwKbea+2knAdEnvbviFOvOcmy/DnwOUBaWpq8bdu21otOEARBEFqYJEm553qvtZPyViBBkqRY6pPxFODWVo5BEM7LbDZTUlKCyWRydShCM9JoNPj5+WEwGFwdiiCcU6smZVmWbZIkPQosBZTAl7Is72vNGAThQg4cOMCf//pXdu7Zi4deXMDbA6vFQmigP39+9RXGjh3r6nAE4Zxa/ZmyLMuLgEWtvV1BuFh2ux2jjx/j7ryP5L4DXR2O0AxO5Bxm74rF2O12V4ciCOfldg29BMEdqDUa/IJDCY/r5OpQmiYD0vleP9cHLvReK2qNME5tw26zovHwaOGNCcKVE8NsCsKlar3xds7tXMlMutAHLvReM7jY76c17gvc4N5DEC6FSMqCcKna2IW+9GQhaxbM4ceP3yNj22asFnPLbrCJ7+dY1iF2rl1JTVXlBRe322ws/f5ryooKWyA4QXBvovpaEC5Vo2rXc9fBnjx+lCMZe4nqnERodMwVbdJirmPXulUc3LkNrYeOAWMnEB7XCYVCcd5qYFmW2bdlA/u2bCQivhNavR5JauLDV1yVfNoKmliXWqNBZzDWx3uB5e02G7//8B0JPXrhGxh81iePZR3ieHYmXXulNfm+ILRloqQsCJdKOuc/GikvKebgzm3NUuJL37SOzX8swcsvgOqKcn7+9H3qamuQZfm8ydRmtVBwNIfgiEiuue1u4pK6oVJrzv7gZSfkhrrq01bQxLpComLo2qsPOoPxHOu5+ACKT+RxYMcWqisqLjJEd3jeIAgXR5SUBeEyybJMSUE+8778lJPHjxPVuStjptyJwcuLjUt+Y8n3X1NRUkz65vUYPL25/ZmXiUvujqmmmt9nf0vm7u146I2MnnIHXXv1obqinK/e/hsRcQlkpe8iMqELV028gcCwCPZsWEtgeARDJ07Gajbz93unknswg64906Cpki9weH86P33yb3Iy9qFUqdizcR0jb7qVtOGj8dDp2b1hDesXz6eytJiUvoO45ra7UaqaviRsXraEutoa+o0c6+wmJssyH738FFMffx4PvZ5F337JkYx0vPz8GXXz7XTq1oOyopMs+2km+7ZspFv/wYy86Va8/QMAKCk4weKZX3Is6xB9ho9m8x+LefSt/0OnN+Cw29m/ZRPzvvgEg5c3I26cSlBEFCvn/MDq+b9QXV7GwV3b8dDpefDv/yQ4MrrpGgA45/cjCO5IJGVBuAILv52Bpa6OSdMfobSwgIztWxh4zbX0GDIMhVLJno1rSe4zgNikFEKiYpAkiZ8/eZ+aygquvet+Sk8W8tXbf+PFT75CdjjYt3UjkqRg+ORb2LV+NWsW/MLwyVMpLSygT0oqXj5+KJRKfAODOJZ1iC49ep+zjBkaHcvk6Y+x7MeZ6Dw9GXrtZPyCQ9BotBzYuY1f/vMBw66/idDoGHIPZVBwNOecrc1tFjMFuUeoKi9zJuWyokIO70vHQ6/nm3deRwIm3vMQBblHmP3BOzz+zw8xevswdOINaLRaik+cwGKuf54tyzLzvvwUWZYZf+d97F6/hl3rV2M1m9HpDdhsVo4eOsDwG6eSvnEtq+b9zB3PvsqAsdei1mo5uHMb/UZdQ3BENL5BogpbaD9EUhaEK1BZWoqnry+dUlJxJHbDZrOiVKrwCwohLDaeo4cyCI2JIy6pG1CfyLat+p1n/+//ERmfgM1qZdMfi9m3eT1JfQagNxjpN3Is3QcOxWI2s2fjWvKOZJ1ar5Iv//Fn+p4qrdZWVSKfs6mzjM5gILJTF3wCgzB6+xCX3I2G6uZNS3+jS4/e9Bt1DTqDgZiuyU1Xa5/iHxJK7qEMyouL+OWzD+g5+GqUKhURcZ2oLCth8x+L+NfcP/APDiE8rhPpm9dzaPd20oaNIjgiiqCIKMqKi5zrKysqJGvPLu5+5W/EJXYjMDSCBf/9zPm+Sq0hbfgoUvoORHY4+OOnmVjMdQSGRRASFUPB0Rwi4jsT2anzFR9DQXAnIikLwhW48cEnmPff//D6/bcREBbB+DvuIzYx+ZyfLy0qpLaqir9OuwlJoQBZpq62hrjkbiT1GYBKrSEgLByVWo3R2xuFpMBiMqGQFEiSgluffAGNh455Mz5BrdVyEX2jmny98PhR+g4fg9ZDh0KhPM+z3nr+wWHYbTYy03dSV1tL9r7dyLJMTGIypScLMdVU88JN40AGWXYgSRJdeqadc33V5WUo1So8vX1RKJUER0UjKf/XxEWhUOAfEoZSpULjoUNCwmaxnDfGs7hJd2xBuBQiKQvChZzn4h4SHcO9r7yGxVzHhiULmPX+W7zy2bcAp55xSpzecdfb1x+jtw9/mjEbo7e383WVSk1VeRkOu52ayvoGTBazGYfDjl9wCHovL0qLClCqVEgKBUV5eQRHRJ37OeoFePv5U1ZchCw7LurzvkHByDLs37qZPsNHk7FtMwVHc5gw7QH8g0Iwevvw8dL19Tcap/Za2VDybqIwrzN6Yq4zYbPWJ9qq8lJkR+NYJMUZ+3aqwdb/9vkCDbhEQhbaINH6WhDO6/ytm7/+5985tHs7Docdc21toySn1ekx1VRTUnACm9WKLMv4h4QRmdCFRd9+gbm2lqqyUuZ+8THZe3cDUF1ZztoFv1JwNJf92zahUKkIi4knPjmVA9u3cjTzIMt/molCIdG5Ry8kSUKWZY5nH+L2tC5sXfF7o9Cb2h2AviPHsmb+z2Rs30pNZQVrf/uVnIz9QH2XpB1rVvDktVeTsX0LAEqVCh//AI5lHsQ3MAitXk/ekSwiO3UmJDqWyIQuzPn8Q6xmM6WFBfz21RccPbgfWZb/V8Uu1/9Pw/cQFB7Jijk/UFZUyG9ffVHfkvwi6I1eVJQUU15UhN1uq9+GaGEttBMiKQvCeZ2/uDV0wmR++vjfPDpqENtXLeOWR591vhcWE0tsYgqzP3iH23snsGfDGmRZZvqf/0F1RSXPTBrFX+66EQ+9nk7dUgHqq2oVCv50+/UU5B5hxA1T8TAYGDpxMqExsfzz0XtY+O2XTHv57xi8vE8rNUqnlcwbh65QKP7XP/jUaz2HDOf6+x7hm3/+jUfGDCI7fTeRCQmN97xh3XL938GR0cQmpWDw9CK2azIRcQl46PUAPPWvjzl5/BhPXjuMtx+ZhsHbi+guiRzYvoXbe3fm3888zKLvZvDI6IF8+qdnKS0s4LanXuJY5kGenjgSlVqNUqVCpVYD9TcB0qlgJUlCoVQ6W1HHJCYTHteJD196glt7dOLooYyLPZiC4PYkd77DFPMpC66wY8cOPvjsC8J6DiDt6lEXv+AVDjddVnSStx+5m0fffJ+I+ITzf9jFmuNxrd1mA0ChVFJZVsrDI/vz7dYD9Qm4mR09lMG6H7/h7ik3MX78+GZfvyBcCkmStsuy3GSjC/FMWRCaiyuHm5ZlHA4HpuqqM0rQLePMtcuyTE1VJR463XlbcZ9u++pl2KxWojsnsnLuj/QZMdr5TFoQOirxCxAEN6FUKvEPCXNW4V6q3IP7+b/nH3OWQK0WCyWFJziRe4TiE/lYzHVXFF9NZQWFx3I5kXuEE7lHKD1ZgOO0qRBnvP4q21Ytu+j1de3Vh8zdO/m/5x+jtOAEdzz76hXFJwjtgSgpC0JruIj6Xi8/f5774ItLWll92ykZm9XK4pn/Zei1k1Gp63/WJQX5LPjqc/ZsWIN/SChTn3jhvN2ULhTiyl9/ZNnPs+qf/SpVJKb145ZHn0VnrO9ONeGu+/nitVfoNWT4RU2T6OXrz10v/PkidlcWo3IJHYZIyoLQGpo1p0iN/pKBipJiMrZt5s7n/uR8PyQqhvv//A92r1/N5j8WX3mIEgy5dhIjbpyKj3/gWW9Hd03GZrWStXc3SWn9LnpvLhzYpX55ooOy0HaJ6mtBaIvOaJ+Zlb4L38BgPL19m/7A+Ra+BAW5R9i24g92rl1FefFJHKf6FktSfevvzqm92Lt5w8WtrMXamJ6WkN23HasgNEkkZUG4VO5woT+jIFh4LBf/0LDTXm/+VmcxXZMJCA0n91AGK36ZzdLvv/nfTFWn1hoQFk7hsZyLW2FrFGZFgVloY0T1tSBcKje80MvILf7YNaXvQFL6DsRqMXNw13b+32uv0HfkNcR0SXRWMUsgBvIQhCsgSsqCcKnkc/7DZYLCIykpLLj8FVzCbqg1Wrr27APyqeExZRlOjdtVfCKfoIiolg1AENoxkZQF4VJJ5/yHy3Tq1oOywgKqKyouehmzycSHLz7B9tXLzzPbVL262hr2bd1IzoH91FZXsX318vouXMEhSAoFslxfQs7cs5PkPgMuYw9a8HsUJXehDRHV14LQDvgGBtM5tTd7Nqxh4DUTgPoRwlbMmc3GJQuoKC0hK30XQ6+7kasm3oCnjy8lhSfYsmwpV0+6+YLrV6k11NXUsHrez5QVFaFUKrnm9nsICI1wfib3UAaSpCChe88W28/LIrpTCW2ISMqC4AoyyFLzlA8lSUKlUjH21mn88tkH9BkxBrVGg9HbhyHXTqLn4Kudn/Xy83P2K96/dSN9RowmPiX1gttQqlQkpfUnLDYec50JjUaLb1AIGg8P56QYi7/7kvF33uscD1sQhEsnkrIgXLYr6A/bTAn5f+uTiE1OYfpf3jw1IpiMWqMmKDySoPDI0z74v6rcAWMn0G/kNXjoDRccllOSJHRGozOhN+XWJ1/E4Ol1hTtykURXZKGdEklZEC6b+2SF+tKyGr/gkAt90vlXcyZQSZLwDQxqtvVdeIMX8RmRuIU2SDT0EoQLEe2Eroyrvj+RkIU2SCRlQTgv+ZIu7heXfy41S8lN/HUpi8lNvHZx22sWLZYcxd2S0P6IpCwI53VpGeXiPn2pWUpy5p9LWrKp0b0uagVnfMhtc58oCgvtj3imLAhNkUGWHY2mJhTaLofDccG+2ILgDkRSFoQzSJJEnamWIxn7UKkub25jwb0U5R+ntqra1WEIwgWJpCwIZ9DpdHjrPdi0ZhkZa5a5OpwWVVhYCEBwcLCLI2l5CZ3i8fJqpS5bgnCZJHcePD4tLU3etm2bq8MQhHbrqaeeAuC99967YF9lQRCahyRJ22VZTmvqPdHQSxAEQRDchEjKgiAIguAmRFIWBEEQBDchkrIgCIIguAmRlAVBEATBTYikLAiCIAhuQiRlQRAEQXATIikLgiAIgpsQSVkQBEEQ3IRIyoIgCILgJkRSFgRBEAQ3IZKyIAiCILgJkZQFQRAEwU2IpCwIgiAIbkIkZUEQBEFwEyIpC4IgCIKbEElZEARBENyESMqCIAiC4CZEUhYEQRAENyGSsiAIgiC4CZGUBUEQBMFNiKQsCIIgCG5CJGVBEARBcBMiKQuCIAiCmxBJWRAEQRDchEjKgiAIguAmRFIWBEEQBDchkrIgCIIguAmRlAVBEATBTYikLAiCIAhuQuXqAARBaD2yLCPLcqN/S5KEw+FAkiTn6wqFuF8XBFcQSVkQOpBjx45x8uRJ7HY7AIWFhQBs27YNAEmS8PPzo1OnTi6LURA6MpGUBaEDmTt3Lv/4xz+cybjB7NmzAfDw8OD555/nb3/7myvCE4QOT9RRCUIHMmzYMMLDwxtVVZ/O09OT66+/vnWDEgTBSSRlQehAkpKSiIuLQ61Wn/WeQqEgNjaW7t27uyAyQRBAJGVB6FBUKhVjx47F39//rPe0Wi133HEHSqXSBZEJggBt9JlybW0tubm5lJaWujoU4Rw8PT2Jj4/HYDC4OhThDCNHjuT999+noKCgUUtsnU7HhAkTXBiZIAhtMikXFhby3cyZZGRmYfD0dnU4whlMNdUE+fvx1BOPk5CQ4OpwhDNERUXRu3dvsrOzMZlMQH0JevDgwURFRbk4OkHo2NpkUq6pqeH4iQLUAWF06jPA1eEIZ8jeu5ujRw5QXVPj6lCEJkiSxOTJk1m4cKEzKSsUCu65555zNgATBKF1tMmkDKDRehDUuSvdBgxuxa3KQFu7aLV+zHa7jdrCY626TeHSDB06lKCgIEpKSpBlmYCAAIYNG+bqsAShwxMNvS5JW0vI0DZjFlqaj48PY8eORa/Xo9FomDx5Mt7e4lGQILiaSMrnIteXMVvD8cNZfPmPv/DizeNZMecH6movr9p32U+zWDX3J0w11U2+73A4eP+5R8jeu+dKwhXaiVtuuQWlUonNZmPatGmuDkcQBNpw9XVzOJKxl51rV9J9wBA6devR+E2poYx5cdW/pppqln7/DRuXLEBn9OTmR56mS68+5+leUp/yZRnSN60DSeaBv71NUHgkGg/dZe1PefFJ1BotDrujyfclSeKOZ17F08f3stbfETkcDgoKCqhph8/HjUYjISEh2O12dDodmZmZrg6p2anVamJiYlwdhiBctA6dlK0WC9XlZVjM5vN86uKqfzcu+Y3svXuY8sQL5B7K4Ou3/86rM2Zi9PI5R+OZ+tcsZhNVpSX4B4US0yUJqQUnApAkCf+Q0HO82xafl7e8uro6nn32WZYuXerqUFpEZWUlAEOGDHFxJC0jOjqazZs3NzlYiiC4o3adlGVZ5kTuET776wscz8qkS8807njmZTx9/Vk880t++ewDHHY7i777Lwqlghc++pJuA4ZQXVHOzPfeZNfalei9vLjtqZdIu3oU5SXFvH7/bXTqlsru9WvonNqLSfc/SkR8Agd3badT91S69OxNUp9+LPtpJlm7d5I6eNg5W7Tu3bKBj158ktKTBYDEDx/9iymPP8/Im25Fb/Rk3cK5LPj6C0oL8uk/+lrueuHPqC5wcTl66ABvPnQn5cUnGXfHvYy48Va0HjrmfPERP3/yPiqVipc+/ZrEtH4A5B7K4JNXn+FY1iG8fP0ZeeOtjLr5Nrz9AygtLODbd99gz8Z1BEdEct+rbxCblNKhWujKskxVVRUvvPACQ4YMQaVqXz+ZwsJCVCpVk4OJtGWyLJORkSHG8BbanHZyhTl3Ke/nT98nqlNXnvu/L8g5uJ+9Wzcy8sZbueHBJ+jWfzCb/lhE2tWjSOzVF8Wpqub/9/qr6HR63v55EYVHc3n36QeJTUxBqVJRXVGOw27nT1/MZMWc2Sz/ZRbX3nk/1RVlBIaG46HTIykUBIVHcOJoDt3lcz+ZTu4zgPfmL2fe//sEvacn1941HUmhQJIkdq1bxU+fvs+0F/5CfEp3dqxezvGsTGISk86710ezDjL18efQGT35+JWn6dIjjdikblx/78NMnDadl6ZMwOH4X/V2VEJX/vH9AmSHnbzDWcz57EP2bdnAgLHXsuznWWi0Hvzz58WUF59k26o/iE1KOc8x4JzHoa3TaDTodLo2l5QbpmY8l+joaOff7elmy+FwoNVqXR2GIFyytnWFOadzX0yUKjUqtQqNh5bE3n1J7tMfSZKQJAmFUolCklAolChPXWyLT+RzaOc2/v7tLxi9fTF28yG570B2b1hNr6EjMHr7MHTiDYTGxJHcdyBbli/l+OEsHA4Hsizz/nOPMuTaSajUGswmE+drLiad2rakUCApFM4YADb9voirJkwmpe9AVBoNV11307n3+rTsnDpoKLFJ3fAJCKRLjzQydmwhuksiKrUGUCE18V1JgCxJhMclEBwZTVnRSepqTSiUSlQqFSqVkpguScQlnishn/8YdDQXSoStSZIkZFnG4XA450xWKpXO+C4nzivdvytZvuGGsuE3LAjtTbtvfX3rky9gs1r5291TeP+ZRzm4Y9t5P19RWozVYubp60YybUAK0/qnsG3l71SVlwGgVCrx9PZFkiTUWi1KhRKH3Y5CoQQknnznI9KuHoXVYsZDr+dyk1VFSTG+gSHOkvN5L0Knvaw3eqHWaJAkCU8fX2oqKxoNpXg6WZYpO1nIzH+/yePXDOXuASnM+/JTaqurcDjsjJ06DZ+gYN598gFen34721Yuu6x96UhkWcZms2EymZxzFrua1Wrl/fffZ+DAgdxyyy1kZWVd9LIOhwOLxYLNZnO+dr5kKMsysixjNpupra3FZDJhs9kanYMNNwqXY8aMGcyYMYO6urrLWl4Q3F07KSmfm4dez10v/BmHw8HaBb/yw8fv8pcvfwBAUiiQZXDY7c67d9/AILz9A3n5P1/jFxwKsoy5rg61RkN1RTk2m5XSkwVEdOqMqboKu8NOYGg4Xr5+FOUfw1RTjaRQUJSfT2h03P9KKnY7VRVlGIxeqC+iWi0wLJzC47lYLWaUSiVWqwWlUoVSpaq/8FstmGpq0Bs9Gz1nriwtoa62Fg+9gfLik4RE90WSmr73stttZKbv5Mj+vbz6+XcEhIXz/fv/dFbRqjVqrr/nIW584HF2b1jD/z3/GF8OF92pzqeqqopZs2axfPlynnzySQYNGuTqkNBoNDzzzDOMGTOG77///pKWzc3NZd68eXTr1o0RI0Zc1DJ5eXl8/PHHpKenExgYyM0338zo0aMb9UQQpVxBaFq7T8rzZnxKTGIyQeGRlBWfxNsvwPmewdMLm83K0cwD+AYGEhAajm9gMIm9+7Dgq88Zcu0kbDYru9asos+IMfiHhFJbXcWm3xeh0Xqwf+smjN7ehETH0rV3X3asXs7ezRvIO5yJT0AAnbp1d1588nMO88KN1/Dku5/Qd8SYC8bdf/R4vvv3m0TEJxARl0D2/j10Se1NRKfOOOx20jeu479v/ZWHXvsXSacabQEc3LmNuORuaHU6TuQcYfL0x1AqlRQcy8Vhs2G1mCkpPEHhsVwMXt4oJAVqrQcVpSXkHjpAZvpOegweBsDq+XPw0OuITOhK4bGj+AWFNO/BaYcqKyvJyclxdjEaMGAAilMt6mtra6msrMRms6HT6fD19XWeH9XV1VRVVSHLMkajES8vL0wmE3V1dXh5eaFSqSguLkan0yFJElVVVdhsNrRaLbIsY7FYCAwMRKlUUlxcjK+vLxqNhpqaGsxm81kDg5xehXzy5ElMJhOSJKHRaPD19UWtVlNTU8OJEyeoqKigqKiI3Nxc/P39MRgMOBwOKioqqK2tRaFQ4OPjg06nw2w289577+Hp6ckXX3xBXV0dOTk5VFZW4uvrS0VFBVarFZvNhsViQa1WO0cWayj9qtVq/Pz80Gq1OBwOTCYTFRUVKBQK6urq8PDwaMUjKgitq/0kZVmGJu6+h1w7id++/oK8w1mExyUw+YHHnO8FRUSSlNafVfN+Ys38X7j7pb+RkNqLqU++wKJv/8us999GrVHTf/S1xCQmUVVWhodOj09gEL989gExXRIZdv3NeOj19Bs5lpqqCpZ+/zUeBiO3P/MKOqNXfUyyjMbDg7jk7hi8Gl8cFQoFfsEheOgbz6aU1Kc/N0x/jDXzf+GPH2fS+6oRhMbE1b8pSeg9vYjunIjutOV8g4LpPmgou9etorK8jAl3Tyc0OhZJoeDTPz1HXU0VGq0HC7/+ghVzfmDS/Y/SqXsPDu9P54cP3yGyUxfikroRFB6JUqmk19Cr+f2H71j+0/d4+fkz/S9vXtJ339HIskxpaSk1NTX069eP48ePU1VVhbe3N7W1tSxcuJD169dTUVFBly5deOCBB/Dx8aGsrIzvvvuOffv2ATBo0CBuuukm1q9fz7Zt27j33nsJCgri3Xff5aqrrkKpVDJr1ixUKhVKpZKQkBD27t3LCy+8QExMDC+88AKvvvoqnTp1YtWqVaSnp/Pggw82ivX0kuqMGTPYs2dPfU2Rry8TJ07kqquuYsOGDXz22WeUlZWxZcsW5s6dy/3338+wYcPIzs7mxx9/5MiRIyiVSgYNGsTUqVM5duwYO3bsYPbs2YSE1N/ExcbGOrf1/fffs2/fPiRJorS0lMjISF5++WW++uordu7cCdSPNnbttdcybtw4qqurmTdvHitWrMDX15eysjIGDBDj3QvtV/tJyk0kBUmSCI/rxAN/e7vJRVQqNQPGjGfAmPH1L5xqMGX08uHmR55qchmth44h469n6uPPNXrdQ29g3G33MO62e5qMLTgiitdn/nrWWxoPD0bfckeT2+o9bCS9h40863WlUknXXn3o2qtPo9dH3DC1yfUA/O2rH5t+Q4ZbHnu2ybc89AZuffKFc67TSSRkAMxmM4cPH0av19OjRw+WLVtGbm4u3bt3Z8uWLaxZs4YpU6aQmprK/v37qaysxMfHhwULFrBnzx5eeeUVfHx8yM7Oprr67FHZTk+kHh4e/PnPf+aWW27htttuIzo6mrVr117WQBkvvfSS81n4pk2b+OCDDxgyZAhjxoyhc+fOZ1VfWywW/vvf/xIZGcnDDz9MZWUlTz31FH369OH48eN4e3sTEhJCbW0te/bsQavVEhMTg69v/aA11dXV3HffffTp0we1Wo0kSTz//PPIsozdbmfr1q28+eabXHXVVeTm5rJq1SoeffRRPDw8+Oijjxr1HhCE9qb9JOWLdb4xMpozt1xwLI7zfeACCzfnOB8inzYLWZapra1l//79dOnShdDQUJRKJTk5OXTr1o309HRSUlJISUnBaDTSt29f53KrV69m4sSJzhJlWlraObfRICgoCB8fH7y9vYmKiqKqqorc3NzLij0vL4+srCxqa2udjbOKiooadZc6XXFxMdnZ2XTq1IlNmzYB9aXb7du34+Pj4xyoo7KyktmzZ1NZWck999zD4MH1k8f07duXpKQkNBpNoxiys7OpqanBYrFgMpnIz88nPz8fvV5Pz549sdvtJCUliefRQrvW8ZLyFfyetR46Bl4zAaO3z+Vt57Rk6nDInMw7SnnRSTr36O187njWwqeW2bayvo+wf3DolSfS5kjqYgCws1RWVrJ9+3YSExOxWq1kZWVhNBqpra3FZrOh1+ubTChWq7VRgmrQ0OK+oUvT6a2YGxpNKRQKlEolCoXCWYJs+Luh5NnQCrxh26cnd5PJxOeff45arUalUmE2mxu1tm4qXpvNhiRJZGdnU1RUhCzLdO7cmejoaIxGIxUVFVRXVxMSEsJjjz3Gd99912h5vV7fqL+3yWRixowZ9T0a1GqsVisWiwWr1Yrdbnd24arvQtjuO4wIHVzHS8pXQGc0MvHuBy/8wXM57fpWU1HOjtXLUSiUdO7RG4Dcgxns2bSWmooKohK6knb1KDSnGrUczTxIVvouppxRbX6lcbh0He2IzWYjOzsbi8VCWFgYDoeDoKAgiouLOXHiBAkJCaxfv57c3Fw6depEXl4eXl5eBAYG0rt3b9auXUtaWhp6vZ7CwkICAgLQarWYzWaKioqoqamhqKjoglW3SqUSvV7P0aNH8fPz4/Dhw84EazQaqauro6qqCofDgUKhoKKigj/++IMvvviCuLg4tmzZwsaNGxslf1mWqaysdCbIgIAAIiIiGDBggHO6x/379xMZGYlOpyMoKIjFixczduxY6urqMJ93GNv6m5lly5bx0Ucf0blzZ7Zv386KFStQKpUEBQVRXV1NdnY2KpWKnJwc4uPjr/yACYKbEkm5NThLlfV/yLJM0Yl8juzfy3X3PuS8+1drtXj5+HFg+xaKT+TTfdBQNB5aQOKqiTfwxgO3M2bqnfgGBl9BLKeCueSkekbRWJSUG7FYLOzcuZOBAwfy0EMPIUkSO3fu5KeffiIvL49+/fpx9OhRfv31V+ewltdffz0A1113Hf/973/57LPP0Gg0xMfHM3r0aCIjI9Hr9XzzzTdERUU5Wzqfj4eHB71792bu3Lns3r2bgoICfHx8gPoq75iYGObMmcPixYt58MEHMRqN9O/fnx9//BGdToder280EpaPjw9hYWGsXr2affv2MX78eFJTU7n++utZt24d6enpKBQKvLy8iI6OxsvLi7vvvpulS5eSkZGBLMt4e3sTHh5+zpgbYvjll1/Q6XR4enqi1WpRKBTExMTQrVs3ZsyYga+vLyUlJSQkJFzx8RIEdyWS8iW5zEwkNf7DZrFwIvcwDoedkKj/PbcLi4kjLCaOytIS8nMON1rGNygYv+BQ9m3ZyODx119+zJf9PE467z87qoauRRqNhjFjxmA0Gp1VvgkJCUydOpWAgAACAwO54YYbyMnJoaamhpCQEAIC6rvnxcTEcM8993Ds2DEcDgfR0dF4enri4eHB5MmTOXbsGL6+vqSlpRETE1PfgDE8HI1Gw/PPP4+fnx+pqalERETg4eHB+PHjiY+PR61WYzAY0Gg0jeZNPnz4MCaTCQ8PDwwGA48++ig5OTkAREZG0qdPH2fLaYPBwLBhwwgPD6e2tpbAwEAkSaJfv34EBARQWFiIJElEREQQFBSEJEkMGTKEoKAgioqK0Gg0hIWFERERAcCoUaNQKpXObk2yLGMwGHjooYfIyclBlmWioqLo0aMHoaGh6PV6brrpJrKysvDw8GD48OH4+fk1Wd1/+vEQhLZKJOVL0jw/dou5jqK8Y/gFh5wa/vIitixJxCYmc2j3jktMyuIC1ZIaEoBaraZbt26N3jMajY1eCw4OJji46VqOmJiYs1pON5Sam6qubVjPoEGDkGUZvV5PaGj9DGBBQUEEBQU1uZ3w8PCzSq1xcXHExsY2uplooFAomoxbo9GQmJhIYmLiWdvQarV07969ye2fuS8N24yNjW3UdapTp07Ov8PCwggLC2tyfWcSCVlo60SriXORzzdq9ZWx2+3UVlejN3pe0nKevn5UFBe1UFRCW9UciUgkM0FwDyIpn4vzsWvzp2ZJklCp1dis1ibePff2rHV1aHS6Zo9HaNsudxzp5l6HIAhXTiTlC2r+EoRao8U3MPjUPMoXuz2ZEzlHiIzv3OzxNKxfcL3LSY6uKimLRC4IzU8k5QateH3ReHgQFhtPZUkJ1RUVF7WMwyGTsWMLqYOuaqGoRPWlO2hL1citEatI/EJH004aejVD/5xGi1/++i5mSUmSCAgNIzQmnkO7ttFz6HAkSSJ903q+euuv5B7KAFlm2c+zuPP5PzP+9ns4sGMLfkEhRHXu2gxRNt++NP40l7REWySSROsT37nQkbSTpNzcieAy1ncJ3X8lScI/OJTOPXqxf/sWug8cikqtJqXfQP716+9Nfn7BV18w5fFnG5VOnH+10Ahdl7bK9p2M8/PzOXjwYKPpBwX3JcvyZQ87Kgiu1E6S8kVqpqJlk6u5xPWq1Gr6jhjTaBrH81UHvvjxl+demRgys8VIkoTBYOCzzz5zdSjNxmQyoVKpnGNUX4zTp1q8lOVcraFftyC0FR0rKTfTb7Pd/cRPG21MaEyv1zN79mxXh9Fs7HY7N910E2PGjOHOO+9Ed5Gt+ZcvX86//vUvxo8fzx133IGXl5dIdoLQAjpWUu5ILjnHigtsR5CVlUVBQQFpaWmNhtO8kOHDh1NZWcknn3yCwWDgxhtvbDR6mSAIzaP9JGVZdst5fZuz/HlJ67qUjV5pkG763Qtn27RpE3FxcQQFBV3SjEuSJDFx4kSqq6v5+uuv8fDwYOLEieec+UoQhMvTfpJyc1wYWqAGtzlX1yKXvubYZ3FRbhNsNhtbtmwhJSUFT89LG00O6meMmjJlCrW1tXz55ZdotVrGjRuHVqsViVkQmkmbTcrmOhM5GfvQGYyuDkU4Q/a+PZhqalwdhnCGgoICcnNzGT9+PEbj5f1u1Go106ZNo66ujk8//RSlUsm4ceNQqVQiMQtCM2iTSdloNBIfHcXe/RkcXveHq8O5YuXl5Zw4cYLg4GD8/PxcHc4Vs5rNdI6JwusySmNCy9mxY4dzikWV6vJ/+lqtlunTp2O32/nwww+RJInx48ejUChEYhaEK9Qmk3JISAh33XUXVVVVrg6lWezevZsFCxYwatQo+vXr5+pwmoVOp7vomX2ElifLMjt37iQ2NhZ/f/8rXp9Op+OBBx4A4P333wdgwoQJV7xeQejo2mRS9vDwOGuau7asuroaf39/4uLizpr+TxCaQ1FREZmZmYwePRpfX99mWafBYGD69OkolUreeecdVCoV48aNa5Z1C0JH1SaTsiAIlyYjIwNJkoiLi0Ojubg5vC+G0WjknnvuQZIk/vrXv6JWqxk1alSzrV8QOhqRlAWhA9i7dy/+/v6EhoY2+3NfT09Ppk2bhizLvPjiixiNRgYMGNCs2xCEjkLMEiUI7VxVVRUHDhwgMjKSkJCQFtmGp6cnd999N5MnT+bJJ58kIyOjRbYjCO2dSMqC0M5lZ2dTU1NDp06d0Ov1LbINSZLw9PTk4Ycfpn///jz88MMcP368RbYlCO2ZSMqC0M5lZmai0WiIiopq0S5LkiTh4+PDK6+8QlRUFI8//jjl5eUttj1BaI9EUhaEdsxqtZKVlYWXlxdRUVEtvj1JkggMDOS1117DYrHw8ssvU1dX1+LbFYT2QiRlQWjH8vLyOH78OPHx8a06ME1ERARvvPEGe/fu5YMPPsDhcLTatgWhLRNJWRDaKVmWOXr0KFarlU6dOrXaaFuSJCFJEklJSbz44ossWLCAOXPmiMQsCBdBdIkShHaqISnb7Xbi4+NbdduSJKFWq7nqqqs4duwYM2bMICwsjP79+1/S7FSC0NGIX4cgtFNlZWUcOnSI0NBQIiIiXBKDwWBg0qRJ9OzZk88++4ysrCxRYhaE8xBJWRDaqYKCAoqKikhOTr6iCSiuVFBQEHfeeScAX331FYWFhciy7LJ4BMGdiaQsCO2Qw+EgLy+P8vJykpOTXR0OXbt2Zfr06Rw6dIiffvqJiooKkZgFoQnimbIgtEMmk4msrCwMBkOrP08+lwEDBlBQUMA333xDaGgo1157LTqdztVhCYJbEUlZENqhkpISDh48SLdu3TAYDK4OBwCFQsG1115LYWEhP/zwA4GBgQwcOLBZJ8gQhLZOVF8LQjsjyzJFRUUcO3aM3r17uzqcRrRaLVOnTqVLly7MmDGDzMxM7Ha7q8MSBLchkrIgtDNms5nMzEwcDgdJSUmuDucsvr6+PPDAA6hUKv7zn/9QXFwsni8LwikiKQtCO1NdXc3u3btJSUnB19fX1eE0KSoqiieffJKcnBw+++wzzGazSMyCgEjKgtDuVFZWsm/fPgYMGNBqo3hdju7du/Pss8+ydOlSZs2aJZKyICCSsiC0KzabjaysLMrLy+nTp4+rwzkvSZIYNGgQTz75JO+//z6rVq0SiVno8ERSFoR2xGQysWXLFhITEwkICHB1OBekUqm49tprueuuu3jyySc5evSoq0MSBJcSSVkQ2hGTycTWrVsZPHhwmxljWqfT8cADD5Camsr06dPFVI9Ch9Y2frWCIFyQw+GgsLCQrKwsBg8e7OpwLonRaOTf//43xcXF/P3vf3d1OILgMiIpC0I7YbVaWbt2LXFxcURHR7s6nEvm7+/Pl19+yZdffsmcOXNcHY4guIRIyoLQDsiyjNVqZd26dQwbNqzNVF2fTpIkunXrxltvvcVTTz1Fdna2aPgldDht75crCEKTamtr2bJlC8OGDXN1KJdNoVAwdepURo0axSOPPILJZHJ1SILQqkRSFoR2wOFwsG7dOgwGA6mpqa4O54poNBreeOMNTCYT77zzDjabzdUhCUKrEUlZENoBh8PBsmXLGDlypEvnTm4OkiQREBDA66+/zvz581myZAkOh8PVYQlCqxBJWRDaOFmWMZvNrFq1itGjR7s6nGahVCrp1asX06ZN46OPPuLw4cPi+bLQIYikLAjtwM6dO6mpqWHAgAGuDqXZGAwGbrjhBqKjo/n444+prKx0dUiC0OJEUhaENsZisZCbm0tFRYWz9Pjbb78xbNgwvLy8XBxd8woODubuu+/m2LFjzJ07F7PZ7OqQBKFFte2HT22U2Wxm//79FBcXA5CRkUF+fj47duxwzi2r1+tJS0tDq9W6MlTBDZWXl/PGG28QFRVF165dCQsLY+nSpbzwwguuDq3ZKZVKUlJSGDt2LIsXL6ZLly707du3TXb5EoSLIZKyC9TU1PDzzz8ze/ZsZFnGYrFQW1vL1q1b0Wq1SJLEwIED+eCDD0RSFs6iVCpZuHAhJ06cQK/Xk5KSwoEDB9i3bx/z5s0jISGBmJgY9Hq9q0NtFkajkTFjxnDo0CF+/vlnIiIiiIiIcHVYgtAixO2mC+j1ehISEsjNzeXIkSPk5eVRVlZGfn4+R44cIScnh5SUFDQajatDFdyQwWBAkiRkWaampoYtW7ZgtVp5++23efrpp/nTn/7Ejz/+6Kx1aQ/Cw8O54YYbKCoqYuHChVRXV7s6JEFoESIpu4BWq6V79+7Ex8c3Od+t0Whk2LBheHh4uCA6wd1pNJpG3Z5kWcbhcGCz2Th8+DCbNm1CrVa79VzKl0qhUNC9e3eGDx/OmjVrGj3qEYT2RCRlF5AkidDQUEaOHHnWszGlUsmQIUOIjIxEqVS6KELBnSkUinM+1vD29uauu+5i4sSJ7e65q06nY9y4cURGRvLrr79y/PhxV4ckCM1OPFN2EX9/fwYPHsyMGTMa3fE7HA7Gjx+Pj4+P64IT3J6Hh4ezCvv01/r168djjz2Gp6enC6NrOQEBAdx666387W9/Y9myZUyZMgWDweDqsNxWaWkpVqvV1WG0K0ajsUXPOZGUXUStVpOQkEBqairbtm1zjlgUEhJCjx49RNW1cF6nP1eG+toXPz8/3nzzTUJCQlwcXcuRJInExETGjx/PnDlz6N27N927d293tQLN5b777uPQoUOuDqNdefzxx5k+fXqLrV8kZReRJInIyEgGDRrEtm3bgPpEPXLkSCIiIsRFRjivM+/UtVot7733Hqmpqe3qWXJT1Go1N910Exs3buTbb7/llVdewc/Pz9VhuaVDWdn0nXgT3Qde5epQ2oVv//l3ioqKWnQbIim7UGBgID169ECn01FTU4PNZmPo0KEEBga6OjTBzRmNRmfy1Wq13HfffUyaNKnDtEMwGo088cQT3H333QwZMoTx48ejVqtdHZZb8g8OJbJTZ1eH0S7oW+GxkCiOuZAkSXTp0oW0tDSUSiVxcXF06dJF9E0WLsjLywtJklAqlcTHx/Pmm292qC50kiSRlJTEbbfdxj//+U+Ki4vF2NjnJCFJ7fi/Vty/1iCSsgtJkkR8fDw9evRAoVBw1VVXERUV1e6rH4Ur5+3tjSRJqFQqfv755w7Z2EmhUPDggw/i6enJRx99hNVqFYn5Cl3st9fwOVmWnf8151Yu6ShKl7yEW2sz1deyLLfLfok+Pj506dLFOQ9uYGBgu5w/tqFa1d1uOBouJm3t3DIajSgUCj744AM6derklvErlcoWP95arZa33nqLCRMmMGHCBPr164csy253nrUVF/utNXwu58A+1v02l+4Dh5A66GKfW5+xFfnsly796EnnXNdla851XYI2k5Tz8/OZNm0aVqu13f3gKisrMRgMfPnll/z666+uDqfZWa1WPvvsMxITE93u2MmyzO7du3nsscfa1DPJY8eO4efnx8yZM/n+++9dHU4jsizj4+PDF1980eLtIyRJomfPntx99908/fTTLFu2DJ1O16LbbG8cdjs2mxWH3eFsYOpw2FFrPU79W8ZqtuBw2OtrZzRaFAoFNpsVs8mEzWrFXFeHqaYajdYDhVKJ1WLGbrMDMpJCgVqjbbq9w2mJz2qxIDscyLIDWZZRqTUoVSokScJcZ8Jx6sZToVCi1mhQnFqf3WbDZrMiIWG318eo0WpPvW5rFINCocBus2G3WZFlUCgV9TdxSKg1GqTTG9i66FLVZpJyXV0dmZmZTJo0iYCAAFeH06zMZjPFxcV4enq2u1l+ZFnm/fffd9tp92RZpqKigkOHDvHEE0+4OpyLlpmZSUREhFsmoKqqKr777rtWndHp+eefZ+7cuXzxxRc89thjbnfz585yDuxn4TdfcOTAPvyCQzF4epG9bw/T//wmKf0HcSL3MN+//09yD+zHJzCICdOmk9xvIKvm/sSPH72L1WJm5dwfUanVPPz6v+g1dATfvfsPNv2xCIfNjn9IKDc++CRpw0edfVycVc8Scz7/kNyD+6ksK6W6opyrJt7I2Fun4aHX8/HLT7F380YUSiVh0bHc+NATdBswBEmS2L1hDXO/+Bijtzc5BzPwCQjk3ldfZ+PS31i3cC42iwW/4DBueOBx+owYzdoFc1j03ZeATFBkNJY6Mzq9ntuefongyOjWPwBnaDNJGcDT05Phw4cTGxvr6lCuSFPVa6f3Nz3XZ9oih8PBF1984eowLshoNHLddde5OoxW01LnlyzLFBcX89133zX7us/H09OT1157jSeeeIKJEye2+WtEi2qiWlahVHLbUy8y78v/0HvYSGITU9i+ejlde/Xho5eepsegoTz02jtk79vDT5/8m9ikFMbfcS9Jaf3Orr6W4Z6X/849L/8du81K+qb1fPTSU3QfNAStR1M3kf8Lpry4iFuffAGd0ZMPX3yC5L796dStJ0+/9x9kWcZmtbBtxR989+4/eH3WXDTa+vEcqsrL6H31SB75x7/Re9Y3goxP7s7tT7+M3WZj7+b1fPTik6QOGgKAl68ftz79Eh++8AQ3PPg4eYezyN63xy2Ssmjo5QJNXQzPbN3XHhKy4L7a4/l13XXXMXjwYP7yl79gMplcHY77auLQe/n54xMYjJevP+Gx8YRExVBdWU7BsRyO7N9Dt4FDOJF7BACfgECy9+4+7/qrK8o5lnWQnAP7MXj7YLfZOHn82NmfPaN9VnK/gUR06kx8cnfik1PJ3L0Th91OZVkpRw8dIPfQAQze3ljq6igtLHAuF901ibSrR2Hw8nae2zWVFRzLPkTOgX0YvLyx2+0UHj+GJEkERUTh5euHb2AQ4XGdMHh5U1dbc8lfZUtoUyXltk6WZUwmEzt37kSpVBIaGkp0tOvvzC6V3W5nx44dpKamdqhuOK2lrq6O/Px8vL298ff3b7XtOhwOtm3bRo8ePdrkcZUkib/97W+MGTOGxYsXM3nyZFeH1GZIivpGeQqlAoVSiUKpRHbYqa2qQgZ++c8H9R+UZfSenmh1554W1Ga1svyX78ncswtzbQ12u53a6iqsliYeZ5xxg6DReqBU1qclrU6Hua4Oq8XMgq8+I+9wFhZzHVazBVNtNVaLxbmc1sMDD/3/eiDYrFZWz/uZAzu2Yqqpxm63U1NVgdVcB4BCqQIk575KEsinRlU8p1Zq4N1uk3JJSQmbN2+mS5cuxMfHt2p18Pm2VVVVxS+//EJxcTG9e/e+6OeYWVlZmM1mkpOTL7gNqE+c2dnZHD9+HIfDQVRUFAkJCVf8HciyjNVq5bnnnuOnn37qUAOd7N271/l9nk6j0TBw4EBWrVoF1Lc6DggIID4+/rLGMC8qKuL777+nf//+jBgxohkivzgNx/Xnn3/G39+fsrIyMjIyqKurIyIigtjYWLfvQx8fH8+TTz7Jv/71L/r160d4eLirQ2qzZFkmKDySgNBwbn/mZSLiErCa6ygpPIFvYP1QrkqlCrvDQZ2pFoejvqFYTWUFq379iduffYWUvgM4efwYL0+dCBfRbaogN4eK0hJkh4PC47kk9xlAdUU5S2Z+xZ9nfE90l0QO79/Lhy8+wfmyZG1VJWt/+5Xr7n2YHoOvoig/j1emTjz3vp53bae0UuVSu03K69at47333mPkyJH86U9/atXqunNtS5IkgoOD+fvf/868efMoLi6+6HUuX76c4uJiZ1K+0P7s3buXX3/9FUmS8PDwIDs7m6CgoCua6KK9POe+XHl5eaSnp2Oz2fjpp58YNmwYgYGBGAwGEhMT+fvf/87IkSPR6/VYLBZ69OjBNddc4/aJrCk1NTUsXbqUbdu2odPp0Gq1TJw4sU0M43nXXXexZMkSPv30U/7yl7+0qVb17sbLz58RN0zhjx++IywmDofdhqRQMGDMtRi9vfH09cPg6cWutasoys+j91Uj8PL1I6pzVw7u2ErB0SPUVlU1btV8HiUF+axfOBeHw4GHTk9Cak+0Oj1xyd3YtW41h/bsoLy4yNny+lxUGg0RnTqTlb6LkoJ8TDXV4ObnbYN2m5RXrlzJuHHjWL16NSaTydlKtba2lt27d5OZmYleryctLY2oqCgUCgVHjhxh586dlJWVERMTw9ChQ1GpVMyaNYtRo0YRFBREeXk5K1euZMCAAeTk5HD48GE8PDwwGAyUlJTQu3dv4uPjmT9/Pn379nXeqX/11VdMmjTpvElx586d7NixA7PZjJeXF4MGDUKr1TJv3jy2bt1KbW0tH3/8MUajkVtvvRWFQkF+fj5btmyhqqqKTp06kZqailqt5ocffiA0NJSbbroJLy8vMjMznY3JzGYzv/zyC2FhYRw5cgSHw0HPnj0xGAxs2rSJ6upqPD096du3L/Hx8ajVaoqLi1mxYgXl5eV07dq1xY+fOxozZgxjxowBYMGCBdx5552kpqYCOKubH3roIfz9/Zk7dy7bt2+nf//+FBQUsGPHDurq6vDy8mLgwIHExMSgUNR3x9i7dy+7d+/GarWSnJxMcHCwc5s1NTVs374dg8HAyZMnSUhIIDY21tm9RJZlZsyYwc0338zy5cvJz89HpVIRGRlJ//79nWNCb9++neLiYqqrqykqKsLPz48JEyZQU1PjPK6JiYnO7RYVFbFq1SqmTJlC165d+eabb9iyZQudO3dGrz931aWrSZKEp6cnzz//PE8//TRjxoxhyJAhrg7LrfkGBZE6YAi+gcEMGDOBoPBIrBYL/UeNR6FQcM1td7Nz7SqK8o6i1elJSO2FX3AoAN5+/vQZPop9WzdRW1WJzWpFb/RkwrTpHNixFYvZTFJaP/RGL3wDg88fCBCTmIynjy92u53r73sY/+BQlCoVtzz2LId27cBmsdJ9wBCCI6Lw8qvvhRMaHUuf4WPQG4zO9eiNnlxz291kbN+CxVxH11590Bk88QsKQaVW4+UXgMHTk6uuuxEf/0C69OiNSu0ej2zaZVIuKytj7969PPbYY6xdu5b9+/fTu3dvzGYzW7Zs4ffffyc+Pp7a2lqOHTtGYGAgZWVlzJ49G71eT0BAALm5uRQUFBAeHs7s2bNJTU11JuX58+cTHx/P5s2bKSwsxOFwoNFokCQJk8lESEgI8+bNIywsjLCwMAC++eYbhg8fft6k7OHhgZ+fH3V1dRQVFTFjxgymT5+Ov78/er0eh8OBv78/Op0OSZIoKipizpw5WK1WAgICWLVqFQ6Hg7CwMHJzc7n11lsJCgpCoVA4kwfUJ+WZM2eSlJREcnIyvr6+ztKQr68vGo2G0tJSZs2axSOPPEJgYCBfffUVVVVVJCQksGHDBrccrMJd2O12Z396hUKBTqfD19fXeVy//PJLXnzxRQwGAxkZGXzxxRckJydjNBo5cuSIs2RnMplYu3Yt6enpDBs2jPT0dGpqaggLC3MmxrKyMmbNmsXUqVPx8vLCbDZjsVhIT0+nuLiY22+/HYVCwY4dO1i7di09e/YkKCgIb29vAL7++msqKysbHVe73U5hYSG1tbX07t0bLy8voqOjOXLkCOXl5W6dlBv079+f8ePH8/7779O9e3fn/gpn8w0MdibMAWPGO18Pj40DJHR6IwPHXtvksgqlkpiuycR0TW70eqduPejUrYfz38l9B15U9bVvQCBXXX8TBs/GXUOT0vqRlNa//h8ydOs/2PleaHQsodFnt7aPS+pGXFI3579T+g0CwC84xBnv0An17Q58AtznMVy7TMrbt2/H39+fqKgoevXqxerVq+nduzfV1dWsXbuWLl26cMMNN+BwOLBYLKjVajZt2oTVamXcuHFER0dTUlKCXq8/b1Wdh4cHPXv2pLi4GKVSSVRUFLt376auru6y4k5MTCQxMRGHw0FpaSnTp0/HarVy0003UVZWRnFxMVOmTAHAZrORlZVFVlYWzz77LIGBgcyZM4esrCxMJhNqtRovLy8UCgWvvfYa5eXljB8/nuHDhwP1z0HT0tKYOHGic25egJiYGGRZpqysjOeff568vDxkWeb333/no48+IjY2lq1bt7JgwYLL2sf2rLS0lDfeeAOlUondbmf06NH4+voSFBREly5dnMf1wQcfpKCggPj4eH799VeCgoK45ZZb0Ol0lJaWUlFRgdlsZt26deh0OkaNGkX37t05ceIER48epaamhvfff59bbrnFuR69Xs+IESOcz/w3btzIvHnzuO6665wJKSIigmuuuYaEhAQUCgVFRUX8/vvvfPDBB8TFxbFt2zbmzq2vOqysrEShUHDs2DEyMjKw2Ww4HI5zntvu9GhDkiQ0Gg133nknDz30EPPmzePOO+90dVhtkNTo/66UzWbl/ecebfK95D4DqKmswOOc/e6lJv+8MBcNy3UF2mVSXr16Nb169UKpVNK/f3+++OILnnjiCcxmMwUFBYwdOxaj0dhomdzcXCIjIwkODkaj0RAaWl89c74xXdVqNQaDgerqanQ6HV5eXtjt9ssefzczM5PFixdz6NAh6urqyMvLo7y8vMn12Ww2Tpw4we7du3nttdeQJIni4mK6du1KUFAQDofDudz48eP55JNPOHLkiHN5jUZD9+7dGw0+cezYMZYuXUp6ejq1tbUcOHCA8vJyFAoFSqWS2NhYVCoV3bp1c5sLsDsxGAyMHz8eg8HA7t27OXLkCMXFxdTU1LB48WIOHjxIXV0dx44dcx7XvXv3ctdddzlrUEJDQ7HZbJSVlZGTk0NcXBwKhQKtVkt8fDxbt27l0KFDZGRksHXrVkpLS+nRoweyLLN69WqWLl3qTOxGo5Ha2lpnUo6NjSU0NNRZ9X3y5MlGx7VhXmJJkpyfCQ4ORq/XO6cXPdeUou52PkhS/dSot99+O99++y2jR49u1/NMtwUKpYrRt9xx1us2q5Ws9F0EhIbTf9Q1eJxq2b3gq8/p3KMXCd16XvAZ8rldzHnpXom73SVlk8nEtm3bqKurY9u2bdTU1FBaWsrBgwfx8/NDq9VSU3N2fzSdTofJZGqyWlatVmOz2eo7r9tszj6Qp/ctbvi7IRE2LAP11Zm1tbXO9TVUa57eire2tpa5c+ei1+u5++670el0PPPMM87PnHkxVCgUeHp6kpKSwqOPPuqMo2FKv1mzZlFYWEhERAS9evU6axQ0SZIaNYAxm80sX76coqIibr31Vjw9PXn99ddxOBzodDrq6uqcsZhMJjHwfxO0Wi09e/Z0JrLFixdz4MABduzYgUajcR7X5557znmeeXp6UlFRcda6/Pz86N27N3V1dWzYsIG4uDhiYmIoLi5m8+bNXH/99ezYsYOSkhLuuOMOiouL+fDDD7n//vuJjIwkOzubhQsXNjrHVCpVo/PozONaW1uLLMsolUrnGOx2u52YmBjWrl2LSqVqE1XXDTQaDcOGDeO3337jxx9/5PHHH3d1SB2aQqGge/8hZ+W/nAP7kR0O4pO7Exge6byWRcQnsPi7r4h5PRmtsiVHrnOfhAztcPCQ3bt3I8syn3/+Oe+99x6ffPIJ3bp1Y/369RgMBrp27cqiRYs4evQo2dnZ7Nixg4qKCnr16sXu3bvZuXMnlZWVrF271tk4KjQ0lA0bNlBeXs7vv/9+VpeYpoSEhLB582Zqa2uZM2dOo2SvUqnw8vLi+PHjVFdXA/XdUUpKSpxdabKzs6murnYmPz8/Pw4fPuxM7mq1mqioKNRqNYWFhQQHB3Py5Emys7MxGo307duXH3/8kSNHjlBdXU1JScl547XZbJSXl2M0GunUqRMnTpxwluYiIyPx8fFh/vz51NTU8MMPP1zu4Wn3ZFnGYrFQWFhIdXU1CoWC0tJS53E9fPgwVVVVzs+PHTuWH3/8kYMHD1JWVsbatWs5fvw4Go2GiIgIRowYQWlpKRs2bEClUuHn58eGDRvo27cvdXV1HDp0iJSUFGpqaigpKaFz584EBARw8OBBrFZrk/E1aOq4NlRD+/v7ExgYyLJly8jIyGD//v1ERUU5G461BZIkERISwk033cT8+fMb1RQJLnJG/nM4HOQeysBiNhPVuWujGpfkvgM5kXuYvMNZp87bjlEQaHdJeePGjaSlpREdHU1ERARRUVEMGzaMdevWYTQaufbaawkICOCJJ57g7bffpra2FqPRSO/evZk4cSKzZs1iypQprF27lrCwMCRJYtq0aSxdupR77rmH2traiyot3HLLLWzdupWbbrrJWZXYQKPRkJKSgizLTJw4kQ8++AAPDw+uvvpqFi5cyNSpUzl48CC+vr7Ok/Tqq69GlmUmTJjAjTfeiNlsJi4ujuuvv55Zs2Zx9913s2TJEgICAvD09GTatGmEh4fz1FNPccstt2A0Ghk9evQ549Xr9fTv3599+/Zx++23s379ejw9PVEqlajVal599VXmzp3LDTfcACC6mTShoKCAW265hYkTJ/Lrr78ydOhQ+vXrx7Bhw1iyZAlTp04lIyOj0XEdP34848aN409/+hPTpk3j0KFDBAUFAfVJJS4ujv79+7Nx40aysrLo1KkTYWFhGI1GEhMTCQwMxGg0Eh4ezvjx43nkkUd4/PHHsVgsFxxHXa1W88orrzB//nznQBsNxzUoKIibbrqJRYsW8dxzzxEQEMCwYcMaTSrQFmpLNBoNffr0ITw8XNxMuqGaygqK8/Pw9PXF6O3T6D21RkNsUjfSN647NbCHe5VoW4rkzj+stLQ0ueFZVnZ2Ntdffz3/+Mc/zjuubcOcqmq12lmd3NCiVKvVOqugG2YTaajSk6T6GUYaGrQ0JCP4X+kH6geGcDgcqNVq5zoaShcN61Cp6p8KWCyW+tlOVCrsdjsajea0WVgczjgatuVwOJzxNyyjVqudy1gsFhwOB5IkOfu+NqxHlmUUCgWqU7OqAM71N1RJnv6dNDRwO70688z9b4ijoevOmfvTEMP5nic6HA4mTJjA7Nmz6dev3zmfSbqK3W5n7dq13HPPPcyfP/+ilzOZTGi1Wud3I8tyowkYGo5Fw2OK07/XhvOn4bto+N6h/vxSKpXYbDbn3w3nr0qlwuFwNDr/Gs4rqD/eNpvN+XgE/ldl3VBqPv38AJyNHRuOq81mw8PDw/ne6efjmcs2pWHs6zvuuINNmzYRERFx0d9pS7FYLCxYsIAvv/ySd999t0N16Uvpnso19z7KgHO0nna1wmO5/P7jd0TEJXD1pJvPev+3r78ge98eHv3Hv1GqzvG0tRUfCf/76QcZO7Avr7zyyhWtR5Kk7bIspzX1Xrt7pnxmCa4h8TYkyoZnqU2V9BougmdqGIDjTE0lmNOXP32ZM7enUCjQaDSNWq0qFIrzLtPUIBTnirlh+ab28/SkfrHrOvM7aGq97tQCt7mca59ObyDXcEN2rhmbzve9Ao3OzwanD3N5+vKnn3NnLtfU8T69jUNTznfOXSjutkKtVtOjRw/CwsKYN28eXbp0aXfnaXO52PzW+HOXmhX/93mb1YrdZkV9xrCuDZ/QGYzUVl1ghjnp9CXaPvcqtrQid6khcMdJKC73u3GX+JvT5eyTu5xb0DLHxJ3272JIkkR0dDQjR45k69at7Nu3r83tQ2u52LPF+Tn5UpY6eytKtRqlUt1oHOvTP2GqqUZv9Lz4dTbnYXXRKdJhk3JLJpDW+MG35DbaY3K9WM3xvbb3768t7p9KpaJnz574+vryxx9/iMFvmssVngoGoyc6g5HK0pKzfnuyLJN3JIuozokXf84156npotO8wyblltQaF622eGFsC8T32n7Fx8czePBgdu3axYEDB0Rp2Q0YvH0ICA2jsqz0rGpqm81K9t7ddB8w+KLHzj5b2zvGIim7wMVcDMQFQ2hJHfH8UiqV9O3bF6VSyaZNm5yN6zqclj70l7B+hUJBdOdENFotRw81vlHK2LaZoPAoIuI7X8HNctu7yW5TDb0sFgvHjx93dRhXpKUbQ8my7JxCzR1KfQ0teN2d1Wp1236sDRcqdzieF9IwRKu7SkhIoGfPnmzbto2hQ4c2y3Smbc4Fd/cKG01dYpuviE6dOZZ9iMryUmSHA+lU48LMPTsZd8c9ZzUCa+/aTFLWaDT4+fnx22+/tckJ2KG+68vRo0edIz61BJPJRG5uLkqlkqCgILy8vFx+0QkKCjpny2RXkyQJvV6Pv78/X375pavDaVJlZSU1NTX4+vo22QvA3dhsNhISEs5qUe4OVCoVQ4cOZfPmzezYsYOYmJg2ez1pOa14vZDq+yM3TAxxuhsecLMR2Fqpcsn9fjXn4O/vz5tvvtlmG2g4HA727t3L559/zm233dZik6+Xl5ezbt06Dh48iFKpJD4+npSUFCIjI12anGNjY11+c9AUSZJISEjg3XffdXUo57Rs2TIOHjzIhAkTiIqKcnU4F6VhxjF31DA72qZNmxgwYADR0dGuDsn9nKuw3JZ6Hl0o1kvdl1ba7zaTlPV6PcOGDXN1GJdFlmVMJhMzZ87kqquuYurUqS06ItaECRPYuXMn8+bNIycnh7q6OgwGAwMGDCAyMrLFttsWSZKEr68vI0aMcHUoTZJlmR07dtClSxfGjBnjnApUuHxqtZoxY8bw5ptvsn//fsLCwsQIdWeSaDppuUNCvthkeqHPuMO+NKHNJOW2bvny5ezcuZPvv/++xS8AGo2Gfv36kZqayo4dO1i2bBnz5s1j6dKlDB48mBEjRrSZEldHZzKZyM/PJzw8XMwJ3IySk5OJjY1l3bp19O7d2zm0qXAaN01abhtXMxFJuRWUl5fz9ttvc9ttt5GYmNhq2/Xw8GDgwIH06tWLXbt2sX79elauXMkff/zB8OHDGTdunCh5ubmioiKqq6sJCQlx2+fybZFWq2XChAm8+eabZGdn4+/v3y5GL2tep4qkzVVlLctw2Y+wTguiLVWhXwaRlFtBQwOiadOmueS5qoeHB/369aNHjx4cOHCALVu2sHHjRpYsWcKoUaO47rrrxFyzbqqgoACFQkFgYKDbjRve1vXu3ZvQ0FBWrlxJcnLyBSfwaKuOZh7AeGq+buHKVJQUt/g2RFJuYUeOHOGjjz7igw8+cGkVWcPY1ampqXTt2pXBgwezceNGli9fzuLFi5k0aRLXXnst/v7+LotROFthYSFKpfKs+bCFK6fX67nuuuv4v//7P2655RY8PT3dsjHilUjo1Inj+3ZTkpPl6lDOyeFwUFNdjUqlQufm83WrHTYCAwNbdBsiKbewN998kz59+jBy5EhXhwL8LzknJiYSExPDoEGDWLt2LT///DM///wzd955J2PGjGm3pYa2prCwEJVKJZJyC7n66qv55JNPWL16NREREU1O1NKW/fu9d6k7bfYyd1RcVMSMGV8SFxfLTTefPVOUO5GgxX+LIim3oPXr1/Pbb7/x+++/u13/0ob+uZ07d3bOOb106VI+/PBDZs2axaOPPsqgQYPcLu6OpK6ujoKCAoxGo6jBaCENc6zPnTuXyZMnt7ukHBMT4+oQLuiElxfe3l4EBQXRtUsXV4fjcuIhVQtxOBy89tpr3H777XTt2tVtq8UUCgV6vZ74+Hjuu+8+vvjiC3r16sXjjz/ObbfdxtatWzvucIQuVlpaSmlpKaGhoRgMBleH0y5JksSNN95IZmYmu3btwuFwuDokoYMTSbmFLF26lAMHDvDUU0+1iVadDfPqdu7cmRdffJE5c+YQFxfHpEmTmDZtmhjA3wVKSkqwWq0EBwe77U1dexAcHMzIkSOZPXt2mxgSVmjfLjspS5IUKUnSSkmS9kuStE+SpCdOve4nSdIfkiRlnvp/31OvS5IkfSBJUpYkSXskSerVXDvhbhwOB++//z733XdfizcKaG6SJKFWq+ncuTNvvfUWf/zxBxqNhr59+3LXXXdx+PBhZFkWCboVnJ6UhZYjSRK33norixYtorKyUpzbgktdSUnZBjwjy3IS0B94RJKkJOBFYLksywnA8lP/BrgGSDj133Tg0yvYttuSZZkFCxZw7Ngx7r33XpRKZZss5UiShFKppGvXrsyYMYONGzdit9vp3r07jz76KMeOHcNut4sLWAuRZZnS0lJsNptIyq2gf//+hIWFMW/ePHFOCy512UlZluUTsizvOPV3FZABhAPXAV+f+tjXwPWn/r4O+EautwnwkSQp9HK3766sViuff/4506ZNw9fXt00m5NNJkoQkSSQnJ/Pdd9+xYsUKCgoK6NOnDy+88AJHjx7FYrGIC1kzs9vtnDx5EqVSKZJyK5AkiSlTpjBr1ixxsym4VLM8U5YkKQboCWwGgmVZPnHqrQKg4YoSDhw7bbHjp15rNxpKyaWlpdx0003triWnJEn07duXn376iZ9//pmcnByuueYa3nnnHY4cOYLJZBIXs2ZSUVHBiRMnCA4OxkcM/NAqbr75Znbt2kVGRoarQxE6sCtOypIkGYFfgCdlWa48/T25/gp9SVdpSZKmS5K0TZKkbUVFRVcaXquqqqpi5syZ3HzzzQQFBbX5UvK5KBQKhgwZwsyZM3nvvffYsmUL06ZN47///S/Z2dnU1taK5HyFKioqqK6uJjw8vN2eR+4mLCyMUaNG8c0337g6FKEDu6KkLEmSmvqEPFOW5TmnXi5sqJY+9f8nT72eB5w+RVHEqdcakWX5c1mW02RZTmtLjaRkWWbhwoXU1dUxfvz4Fpsv2Z1otVrGjh3Ld999x5NPPsmyZct48sknmTlzJgcOHKCmpkYk58tUUVFBVVUVERERrg6lQ7n99tv59ddfMZlMrg5F6KCupPW1BMwAMmRZfu+0t+YDd536+y5g3mmv33mqFXZ/oOK0au42r7i4mIULFzJu3DhCQ0M7VOnG09OTyZMn8/nnn3PjjTeyZMkS/vrXv/LDDz+wb98+Ua19iWRZdpaURVJuXVdffTVWq5V169a5OhShg7qSEb0GAXcA6ZIk7Tr12svAW8CPkiTdC+QCDeOmLQLGAVlALXD3FWzbrTgcDpYsWYLNZmPYsGEYjUZXh+QSAQEBTJs2jdGjR/Pbb7+xfPly1qxZw6BBg+jbty+JiYmo1eoOdcNyORoaedntdsLD21WzC7dnNBq57rrrmDVrFqNHj3Z1OEIHdNlJWZbldZx7Aq2zZow/9Xz5kcvdnjvLy8tj5cqVDB48mKioqA6fdMLCwpg+fTrXXHMNixcvZt26dWzcuJGePXsyYMAAevTogUolRng9l9raWo4fP05AQAC+vr6uDqfDmTJlCrfccgvFxcVizHGh1Ykr4xWy2+2sXLkSu93OoEGD8PT0dHVIbiMyMpL777+fUaNGsXLlSjZs2MD69evp3r07w4YNo2/fviI5N6G6upqTJ08SExMjvh8X6N27N76+vqxYsYKb3XyCBKH9Eb/4K3T8+HE2bdpE7969SUhI6PCl5DNJkkRsbCwxMTEMGzaMjRs3smnTJv75z3/SvXt3rrnmGvr06SOSz2lqamooKiqid+/erg6lQ9Jqtc5JKkRSFlqbuBJeAYfDwebNm6mtraV///5i0oDzkCSJuLg4YmNjGTJkCDt37mT16tW88cYbpKSkcNNNN9GzZ08UCjEce3V1NcXFxW1ihp/26vrrr+eGG26gtLQUPz8/V4cjdCAiKV+B/Px8Nm3aROfOnUlKShKl5IsgSRKRkZGEh4fTu3dv9u3bx++//84rr7xCjx49uPXWW0lJSemwydlms3Hy5Enq6upEUnahlJQUfHx8WL9+PRMmTHB1OEIH0jGvfM1AlmXS09MpLCxk0KBBopR8CRrG1Q4PD2fEiBE8++yzPPPMM5SXl/Poo4/y5z//mczMzA7ZjcpsNpObm0tAQIAoobmIJEl4eHgwYsQIFi1a5OpwhA5GlJQv08mTJ1m3bh0RERGkpaWJUvJlkCQJlUpFSEgIAQEBpKamsnPnTr755humTZvG9ddfz5133tmuR0c7U11dHUePHiUuLq5NTPnZXkmSxNixY3nyySexWq2o1WpXhyR0EKKkfJmys7PJzMxk+PDhHWL0rpbUMF1kYGAgI0aM4IMPPuCZZ57hjz/+YOLEify///f/qK6u7hAl57q6Oo4fP06nTp1cHUqH1jDOu9lsZu/eva4OR+hARFK+DOXl5axduxZPT08GDx7cYUpxLa2h5Ozn58d1113HTz/9xBNPPMF//vMfrrrqKmbOnInVam3Xybmuro68vDzi4+NdHUqHp9fr6dGjBxs2bHB1KEIHIpLyJZJlmfz8fLZu3cro0aNFKbkFNDxz9vLyYsqUKaxatYp7772Xl19+maFDhzJ//nxkWW53ydnhcFBeXk5paSlxcXGuDqdDazgH+/Xrx8aNG9vduSa4L5GUL5HZbGbHjh3U1NQwatQoUUpuQZIkoVAoMBqNPPTQQ+zfv5+bb76Z6dOnM3ToUH777TdsNhsOh8PVoTYLq9XKoUOH8PPzIzS03U013uYolUoGDBjA9u3bxQQVQqsRSfkSyLJMcXExS5Ys4ZprrhGtY1uJJElIkoTRaOSpp54iIyODMWPG8OCDDzJhwgRWrVqFyWTCbre7OtQrYrVaOXz4sBiExk0oFAqSkpJQq9Wkp6e7OhyhgxBJ+RLY7Xb2799PdnY21113navD6bD8/Px45ZVX2L59O7169WL69OlMmzaNtWvXUlVV1WaTs9Vq5ciRI3Tp0sXVoQjU3wxqNBoSExNFUhZajUjKF0mWZSorK/nxxx8ZMWIE0dHRrg6pQ5MkiZCQEF577TWWLVtGTEwMzzzzDK+88gqbNm2irKwMm83m6jDPy+FwOJ9VyrKMxWIhOztbJGU3olarSUxMZM+ePa4OReggRD/liyTLMllZWezYsYNvvvnG1eEIpygUCmJiYnjjjTe46667mDFjBq+88gr9+/dn3LhxJCUl4evr65Z9fgsKCsjNzcVgMGA0GikuLqagoICEhARXhyacolarSU5O5tNPPxX9lZuJw+GgoqKCiooKoH7Mh+rqakpLS8nJyXF+LiQkBK1W2+Ee5YikfBFkWcZsNjNz5kz69OlDcnKyq0MSzqBSqUhKSuKNN95g586dzJo1i7feeou0tDSuvvpqUlJS8Pf3d6vhOw8ePMjo0aMJDg6ma9euGAwGampq2L17NydOnMDb25vQ0FD8/f073IXJXahUKrp27UpVVRXHjh0TreKbgdVqZdWqVXzxxRfOfx8/fpydO3eydu1aAHQ6HW+//TZxcXEd7twXSfkiHTt2jFWrVvHJJ590uJOkLfHw8GDAgAGkpqayceNG5syZwyeffEJSUhJDhgyhV69e+Pr6nvMYNnS1ao3k3blzZ2RZJi8vj7y8PJRKJbIsc/PNNxMUFESPHj2YPn06EyZMcMuSfkegUCjw8fEhODiYgwcPiqTcDJRKJb6+vqxevZra2tqz3pckif79+6PVat3qJrq1dLw9vgwOh4Mff/yR6Oho+vTp4+pwhIug1+sZMWIEb731Fvfffz8VFRV8/fXXvPfeeyxatIiysrIm+55WVlaydetWDh8+3OIxhoSEYDQanf+22+04HA7sdjsnTpxwzlAkErJrabVaIiIiWuWc6AhUKhWxsbH06dOnyaSrUCi47rrr8PHxaf3g3IBIyhfQ0A3qp59+4r777kOj0bg6JOESeHp6Mnr0aP7+979z1113UVdXx8yZM/nb3/7G3LlzKS0tdX7W4XBw4MAB/vSnP/Hpp5+Sm5vborEplcpzjtwVFBTE+PHjxU2gG9BoNISHh4uk3Iz8/PyYOHFik0nZ29ub/v37d9iBmUT19UWYP38+BoOBq6++2tWhCJfJy8uLkSNH0rt3b3bu3MmaNWv48ccfWbBgAaNHj2bMmDFIksQff/zBmjVr2Lt3L1qtlgceeIDIyMgWiys1NZVdu3Y1GgBFqVTSvXt3brnlFnQ6XYttW7g4Wq2WyMhIdu3ahc1mQ6USl80rZTAY6N27N35+fpw8edL5ulKpZPDgwURFRXXYGiJxdl2A2Wzms88+48EHH8TT09PV4QhXyNfXl+HDhzvncm5IzvPnzyc5OZlvv/0Wi8XCiRMn+O9//4tGo+GBBx4gODi4ReLp1q0bGo2Guro652vh4eFMmTJFjH/tJjQaDdHR0ZSWllJSUtJi50JHolAoiIqKYsiQIcydO9c5toAsy4wcOZKgoCAXR+g6ovr6ApYuXUpVVRWTJ092dShCM/L29mbAgAE89NBDvPTSS/To0YP58+eTm5vrfNacn5/Pf/7zH7766itKSkpaJI6uXbs2erat1Wrp1asXkyZN6rAlBXejUCjw9fXFw8ODEydOuDqcdiM4OJhhw4Y1Ov8jIyNJSUnp0DVEIilfwKeffsqUKVPw9fV1dShCM5MkCW9vb3r16sWwYcM4dOgQFoul0WdOnDjBe++9x6xZs6isrGz2GBITExttMzg4mMcee0ycb25EkiQ8PDzw8fGhsLDQ1eG0GzqdjqSkJOdATCqViqFDhxITE9MhW1036Lh7fhH27t3Lpk2buPvuu0U3qHasrq6OuXPnUltb22SL7JMnT/L3v/+dX375pdknJggPD0er1QJgNBoZN26cmA7UDWm1Wry9vRs9/xSujCRJxMTEMGTIEGd3wL59+3b4yVhEUj6Pr776ilGjRhEVFeXqUIQW0tBP+NNPPz2rlHy64uJinn76aRYuXIjVam227SuVSmJiYgDw8fHhhRdeEC383VBDUi4qKnJ1KO1KREQEaWlpSJJEQkICXbt2dd6kdlQiKZ9DbW0ts2bNYvr06aLU0o45HA6OHz/O4MGDSUhIcHbDUKvV6HS6Ri1ty8vLufvuu1mxYkWzTXohSRLJyclotVqefPJJMaa6mxIl5ZahVqvp0qULCQkJ9OvXT8yQhmh9fU6//vorXl5eDBs2zNWhdGiffvopy5cvb9Q6ubk1jOIVHx9PXFwcZrMZk8lETU0NNTU1VFdXU1NTQ11dHdXV1UycOJG0tLRme+6bmZmJSqVi2bJlrFy5slnW6Q5CQkJ47rnn2sUEGxqNBi8vL3JycpBludUTR1VVFW+88QZ79+5t1e22BpPJRFVVFVu3buXhhx9ud0lZkiSmTZvGhAkTLqoWTCTlJsiyzNdff820adNQKpXt7iRpS3bv3k1lZaVzXltXkmWZmpoaysvLqauro1OnTs2yXpVKRUpKSrsawrG6upoNGzY0GpylLVOpVOh0OqqqqnA4HK3eMt5isbBjxw48PDzaxU3OmQICAlAqlYSHh7s6lGa3ZMkScnJyLrp2TSTlJuzbt4/du3fz+eefi4TsBnr27MnkyZMxGAwtvq3WLgU1JPrTh9tsDwoLC9m9e7erw2g2arUab29vzGYztbW1LhmzwMPDg8GDBzNu3LhzfqY1zt+W2IbNZkOSpGa72XFFbca5ZGZmXtJ+iWfKZ5BlmZkzZzJ69GjCwsLc5sAKraO1j7ckSe0uIbdHSqUST09PbDYb1dXVrg7nnFrj/G2JbahUqmatfWjL122RlM9QXV3N4sWLufnmm8XgDe3UsWPHsNlsZ3V/MpvNnDx5stkacV0ps9lMWVlZizxPb6rrF9RPipGXl+c234G7kCQJjUaDSqVy66R8Iec67q21bZvNRm5uLseOHWuzjzZkWXZeQ1qCSMpnWL58OVqtloEDB3boDuxtkc1mo7CwkOzsbGpqas75uSlTpjTZijYrK4tXX331si66sixTV1fH3r172bt3L/v37+fYsWNXlFD379/P559/zv79+y97HedyrpJEdXU19957L+Xl5ciyTFVVFZmZmRw4cICTJ0+22IWoLVCr1ajV6mbvq94Sampq2Ldv31lduFxdgiwtLeX555/nkUce4bPPPrvo5WRZ5uTJk+Tn51/0Mg6Hg8LCQg4cOEBGRgaFhYXNclNit9uZMmVKi91UiGfKp7HZbMyfP59x48ah1+tdfgILl6a8vJxvvvmGXbt28fDDDzNo0KBW3f7Ro0e59957SUpKQqfT4efnxzXXXEO/fv3a5CQG1dXVLF26lGXLluFwOOjcuTM33ngj0dHRHfK3oVAoUCgUbeLGZOPGjfz1r39l4sSJPPfcc25xvCRJIigoiK+++ooFCxaQnZ19Scv/+uuvlJSU8PLLL1/U53Nzc/n+++/Jy8tDpVKRmJjIHXfc0SptU65E27tStKCjR4+yb98+7r//fjGAQxtUWlpKUVERBoOBI0eO0L9/f5RKJTU1NWRkZFBaWkpERESjGZnKyspIT0/HZrM5S7W1tbXk5+dz8uRJtFotGo2GyspKunTpgizL5ObmYjKZ8PX1xeFwUFlZSe/evYH6wRDefPNNDAYD33zzDevWrSMxMZHi4mJycnKw2WwYDAaSkpIICAhAkiTnOnNycnA4HERHRxMbG9to32pra8nIyCA4OJijR4/StWtXfH19ncs7HA5Wr15Nnz59SE9Pp7y8HJVKRWhoKPHx8ej1ejIyMjCbzdTU1FBZWYmXlxf9+vWjtraW3bt3YzKZCAgIAP5XRbd69WomT55MdHQ0M2bMYPv27YSFhXXI34dCoUCpVLp1Um5o4LR69WqGDBnCunXrePLJJ53Hy2q1cuTIEfLy8lCr1cTHxxMSEoIkSRQVFXH48GEqKysJCgoiKSkJlUrFmjVrSE1NxcfHh7q6OrZv305ycjJFRUWcOHHC2TK9urqaqKgowsPD2bFjBzExMQQGBgL1NZD9+/c/b/uJ3NxcsrKyMJvN6PV6unTpQnBwMBUVFWzatIlDhw5RXV3NokWL0Ol0DB06FEmSKCkpITMzE5PJRFhYGDExMSiVSr799luUSiUvvvgiPj4+7Ny5k5qaGgwGAxaLhU2bNuHr6+scOtVoNGIymTCZTHh4eNCpUyfCwsJQqVRUVlaSnp5OdXV1i84aByIpN7J48WJiY2Pp1KmTeJ7sps7VqtJut1NQUIAsy/Ts2ZOCggLKy8vx8/Nj1apVbNiwAZ1OR3p6unNELofDwY8//khWVhb+/v5UVlbicDgoKytj4cKF5OfnY7fb8fX1pby8nP79++Pp6ckPP/xASkoKe/bsIS0tjUOHDuFwOBr9WJVKJVqtlqqqKux2O4WFhezdu5e6ujrq6urYsWMHjzzyCGq1mpycHP773/86bwCKioqcyRHq+3GuWbOGHTt2MHbsWGbPns3111/P0KFDnSXw8vJy3nnnHb7++msyMzOdz4UVCgVXXXUVgwYNYuHChWRmZhIREeFM2Glpafz888/s378ff39/5/fSELPVaqVPnz54e3sTHR3N8ePHqaysbBRfR9EWSsqSJFFdXc2uXbt44403eOaZZ8jMzCQ5ORmbzcb+/fuZM2cOGo0GnU5HXV0d3t7emEwm5s2bx4kTJ1Cr1RiNRgIDAwkNDeWdd97hzTffxMfHh8rKSj788ENeffVV1q5dy969e52D7NjtdhITE5k0aRL/7//9P+644w78/f1RKBS89tprfP311+dNyiUlJezbt4/q6mrMZjPbt29n+vTp1NTUsGfPHgoKCqirq2P37t14eXkxePBgKioq+O2338jNzUWr1eJwOLjmmmvw9/dn//79vPTSS4SHh6NQKBg6dKhzWyaTiXfeeYeUlBS8vLzQ6XQEBARQVVVFWVkZFouF7du3c8cddxAcHMz8+fPZvXs3vr6+pKent2ibC5GUT6mpqWHFihWMGTNGtIZ1Y+eqhqutreXw4cP4+/uTnJzM6tWrycvLQ6PRsGTJEkaNGsXw4cPZtm0bM2fOBOqHzpwzZw7vvPMOUVFR/PLLL85ZgLRaLcOGDXOWTn19fTl48CCdOnXC29ubBx54gOuvv56nnnqKoKAgdu3aRWRkJCUlJXz99ddIkkRBQQFDhw7F29ubIUOGMGTIEBwOB8eOHePhhx9mypQphISEMG/ePKxWK4888ggGg6HRpAd1dXWsXr0aWZYZMWIEKSkpdO/enaysLAYMGMB///tfrrvuOg4fPkxkZCRBQUHcddddzq5Wv/zyC9u2bXOW5D09PbnxxhtJSEhAqVRSVlbG999/z9tvv01SUpKzutput1NeXo5araagoID9+/ejUqmoqanBbDa38FF2Tw1JuTmHWW0J6enpaDQaOnXqRM+ePVm5ciXJycmYTCb++OMPvLy8mDZtGoCz/cSOHTs4cuQIkyZNIikpifz8/As+clGr1fTu3RuFQkFxcTGJiYkcOHDgshvC9erVi169euFwOMjPz+fZZ5/luuuuIy4ujueff57PP/+ckpISXnrpJaD+RjwzM5Ndu3bxwAMPEB4ezuzZs9m7dy8RERGo1WrnTcFXX31FTU0NAwYMoFevXkD98YyPj+eWW27BaDQ6ry0Oh8M53v3Ro0dRq9X8/PPPvPjii/Ts2ZNVq1bxww8/XNY+XgyRlE/ZtWsXNTU19OnTp8OPvdoWVVZWkp2dTY8ePQgPD8dms3Hs2DG8vLyoq6ujS5cuGAwGBgwY4LzY5OfnI0kSKSkpQH1/6PXr1wP1fUK9vLzQ6/X4+fnh5+eHxWJBlmX8/PwwGAwYDAaCgoIoLCzk8OHDQH0J2dfX1zlHclVVFTU1NeTn57N+/Xry8vKwWq2UlZVRXFxMcHAw27dv59Zbb3XO03v6TWFBQQEFBQUkJSURFBSERqMhKSmJefPmkZuby/z58wkLC+PEiROkpqY6B5nYunUrVVVV5OTkEB4e7myc1L17d2dJGepnwXI4HKSmpqJQKBgwYAAffvhho+9Wo9FgMBicFy13eD7pCpIkoVAoGj3+cEfr1q0jNTUVtVrNwIEDmTlzJg899BBWq5Xs7GxuvfVWZ61Iw/8fOXKEiIgIYmNj0ev1FzUwTsN50VCb5O/vj91uv+xSZH5+Phs2bCAnJwez2UxhYSElJSXExsY2ec5ZrVby8vLIzMzkl19+QaFQkJOTQ0RExFmj7fn6+rJkyRJ0Op0zKet0OmftF9SX1Ddt2sTBgwcxmUzk5ORQUlKCTqfDZrPRvXt3NBpNi08YI5LyKStWrCAxMZHQ0FDR6rqNcTgczh/UkSNHWLFiBdnZ2QQEBBAXF+ccRhPqf8gNf6tUqkZVkaf/LUmS87+GElLDcg3nR8N7Dc91oX5SiYkTJ+Lv78/KlSvZvHkzWVlZrFmzBrVa7RyZ7Pfff3duT61Wn7P06ePjQ1paGkePHmXbtm0EBQXRtWtX/u///o+NGzdyww03sHnzZiorK5k8eTInTpzg66+/ZujQoc5xhM1mszOReHh4NHo00/AdNMTfEJNKpcLPzw+r1YqPjw+xsbGsXbsWDw+PJm9a3Wmwhpbmym5FF2I2m9m6dStms5kXXniB0tJScnJynLVIKpWqyZK+UqnEYrE0ecNx+jlis9kanasNNyln/kYaqrOh/vfZ0F6j4Tw58+bGbDazdOlSTp48SWJiIlqtlvXr15/1mzxdQze14OBg+vbtiyRJpKWlERQUhKenJ1arleLiYsLCwrjuuuvYsGFDo+UVCoVz3mar1cqmTZvYsmULPXr0QK/Xs3fvXux2O0qlErvd3uQ1pCWI7EN9NeauXbvo27cvPj4+rg5HOIemfgiyLGOxWDh48CARERE88sgj3HHHHVxzzTUUFRUhyzLe3t5s2bKF8vJyFi1a5PyhR0VFodVqWbVqFcXFxc5q4kvZ/rnes9lsVFVVUVdXh8ViIScnh5iYGIYOHdqosZAkSQwbNowFCxZw5MgRysrK2LFjh7PLlre3N6mpqYwaNYrNmzdz6NAhPD09MRgMrFixgmuuuYaSkhIOHTpEcnIyFRUVFBQUMGDAAJKTk7Farect2UVGRmIwGFi2bBk1NTUsXLgQWZZRKBQEBwej1WrZtGkTGRkZ5OTkEBkZiZeX11nr6SgJ2d0dOHCA2tpaHn/8cW6++WYefPBBunTp4ryhSk5O5o8//uD48ePk5eWxd+9e5zC2hw8fJj09ncrKSrZv3+4c5zskJIRt27ZRU1PDqlWrLuqZemBgIOnp6dTU1PD77787Z2BrSMg+Pj4UFBQ4q7otFgvHjx8nICCAwYMHO0unp/Px8eHo0aPOWh+1Wk1kZCS+vr74+PjQr18/fHx8sFqtBAYG0r17d+bOncvx48epra2lvLz8nPHabDYKCgqcJWE/Pz/njUhYWBhGo5EVK1ZQW1vL/PnzL/PoXBxRUqZ+fGWVSkVCQoKounZjTV34JUnCZDKRkZHBgAED6N+/v3NS+pkzZ1JeXs7UqVP5/vvvWbp0KX379nWOoe3p6ckjjzzCt99+i0KhICoq6oLP0S50h5ybm8tDDz3krPYePXo03bt3Jy8vjwULFvDzzz+TlpbWaJjGCRMmUFZWxhtvvIEkSQwfPpykpCTy8vKcF7G0tDT27dvH/PnziYyMJDExkezsbHx9fencuTPFxcXOKruUlBReffVVAgICnLMbnYtOp+Ppp59m1qxZfPfdd/Tu3RuVSoUkSURGRjJy5Eh+/vlnzGYzffv2pU+fPm2ye1dHsWXLFpKTkxkwYICzFHjgwAFWrlzJtGnTGDduHN9//z1/+ctf8PT0ZOLEiXTu3JkePXpw8uRJ5s+fz1dffUVKSgr33XcfALfeeisfffQRGzZsoEePHs71ns+ECRP4+OOP2bJlC0OGDGnUDalhZiitVst9993H4MGDuffee+nbty/z5s1j1apVpKamotPpGv3mBw8ezOLFi7nnnnvw9fXl3//+NwkJCQwfPpxZs2ZRVVVFdHQ0kyZNwmAwcNddd/Hdd9/x6quvIkkSPj4+DBkypMl4tVot3bp1Iz09naeffpouXbqgUqmcLcsfffRRZs6cyezZsxs9AmsJkjtXxaSlpcnbtm1r8e289dZbFBUV8dRTTxEREdHi2xMu3oMPPoi3t/d5x7622+1UVFSgVqudyc5isVBVVYVer0elUlFRUYHZbMZoNFJVVUVISIizyq6srAyHw+G8O/f29qaurg61Wo3FYnEOAVhbW4tGo8FiseDr60tBQQGBgYFYLBbMZnOjRloNVWuenp54eHhgMpmorKzEbrdjMBgwmUz4+fk5u6pUV1dTXV2NLMt4enpiNBqxWCyYTCZ0Op2zJbfZbMbHx4eamhqsViv+/v5UVVVhMpkIDg7G4XBQUVFBbW2t86KiUCjw8vKiqqoKlUqFXq9v9IjGYrFQWlqK3W7HaDRSW1tLUFAQCoWCuro6KioqcDgcGI1GDAbDRfVMKCws5E9/+hPvvvsuAwYMuNLTwC1s376djz/+mNtvv53hw4e36rZLSkq4++67Lzj2dUVFBXa7HR8fH2d1csNMZyEhITgcDmc7h4ahQxuSX8NsTVarFZ1Oh4+PD5IkYbVaKSkpQZZlDAYDdXV1+Pr6YjabndtwOBzOdhQNv9PS0lJsNhuenp5UVVURFBTkvCFuaEhoMpnQ6/XO9VVWVmK1WtHr9c5zvaGgJMsyxcXFWCwWFAqFsytXw3I2mw2tVounpydqtdo5+E1NTQ2yLKPVavH19UWpVOJwOCgqKsLPz88Zk8VioaKiAovFgoeHh/P3oNPpnO1ArFYrnp6ezu/zYn4Lr776KsOGDeOBBx5w3tBIkrRdluW0pj7f4W95S0pK2Lt3L8OGDeuQ3TzaA6VSiZ+fX6PXNBqNsxEL0OjYnl5y1Gq1hISEnLXOhmR5eqnAw8Oj0WdCQ0MBnIkOOGcfRr1e7/wMcNZjEk9PTzw9PRs9m9VqtY1qbho+c+Y+eHl5OauUFQoFvr6+TU4rea5HMxqNptF3cPq6dTrdOUtGHek5cltxZq1Iw9jqDY0HFQoF3t7eTdaenOtYazQa57l++jaa6qve8BuRZdnZcBE465GHUqls9PtsWPbM39iZ+xIUFNTkNptaTpKkc+5rw+OZ02k0Gme/6jM1PLtucL7apyvV4Z8p7927F0mSRNW14BbaUpJrS7EKrUucG5evQydlWZZJT0/H19eXiIgIcSIJZ3HnxzsXo63HL7SOtn6etPX4T9ehk3JFRQUZGRnOoeEE4UyXc6PmTheIlrjRdKf964ha4vt3pwLJ5eyfO8V/pTp0Uj506BB1dXV07dr1vM8yhI6jOS547ekC0ZT2vn/u7nzff2vcMLX0Nprj/GrLN44dtqGXLMtkZ2ejVquJiYlxdTjCeZjNZucY0kLbUF1d7fYjX7U1siw7W/E393obGu2JG67md6mDjXTYpGwymcjOzsbHx6fFZ/0QLp/RaGT9+vUcOHCg3Y20ZrVaMZlMTQ7G0dZZrVYkSXJ2NxGujCRJqFQqFi9ezObNm694fQ6HA4vFgsVioba2lurqavz8/JxdqVqa3W53dllsbzOOnT7CH9QPYapWqy/6hqfDJuW8vDxOnDhBr169WrR5u3Blbr75ZtLS0tx6Zp7LtXv3bhYtWsSDDz7o6lBahNFoFLVQzcRgMPDwww83mqzkUlksFsrLyykrK6OgoID8/Hxqa2uRJIlOnTrRt29fYmJiWmVwmBMnTvD9998zfPhwunbt2uLbc7VevXpd9A1qh0zKDXPF1tXV0blzZ1eHI5xH37596du3r6vDaBE6nY4//viD2267TVQbCuel1WoZNWrUJS0jyzKVlZUUFRVRWFjIkSNHKCkpITs7m9LSUjp16sTUqVMZOHDgRY1m11xsNhsrVqxg0aJFPPbYY2f1F+7oOmRSbpjoW6FQXNRsKIIgCG2B3W7n5MmT5OXlcezYMQ4ePMjRo0c5efIkkiTRtWtXbr31Vnr16kV4eLhLbgZra2vZsGEDSUlJIiE3oUMm5ZKSEg4fPkx0dHSTI8QIgiC0FSaTifz8fA4fPkxmZia5ubmUl5dTXV1NQEAAKSkpdOvWjdTUVJe3X2gY9nPr1q3cfffdLo3FXXXIpFxYWEhBQQETJky4qLFLBUEQ3IUsy9TW1nLkyBH279/P/v37KSwsxG63o1KpiIuLY+DAgXTp0oX4+HjnBCPuwOFwkJ2dTWFhYbsZE725dbikbLfbycvLo6Kiwjm5vSAIgjtrmGhk37597Nq1i4yMDKqqqjAYDAQGBtKvXz/i4+OJi4sjJCTEOc+3u7FaraxZs4akpKRG42kL/9PhknJtbS1ZWVn4+/uLrlCCILgtWZYpLCxk69atbN26lX379iFJEsHBwcTHx9O1a1fCw8OJiIjAx8enTdT6mc1mVq5cyR133NHuujg2lw6XlCsqKsjNzaVr165iAgpBENyKw+HgwIEDbNy4kfXr15OZmUlwcDCdO3fm2muvJTk5GX9/fwIDA/H09HTL0vC5OBwODh8+TG5uLldffbWrw3FbHS4pV1ZWcvToUcaOHevqUARBEKipqSE9PZ1Vq1axfPlyKioqSEhIIDU1lbvuuovQ0FB8fHyccx+3VQ6Hg9WrV5OYmEhYWJirw3FbHSop2+12CgsLKSsrIzk52dXhCILQQZWVlbF582ZWrFjBqlWrsFqt9OzZkylTpjBw4EB8fX0xGAzo9fo2US19MRwOB8uXL2fUqFHtZp9aQodKyiaTiX379hEaGioaGQiC0GocDgdlZWWsWrWKBQsWsH79ejw8PBg8eDAvv/wyvXv3xtvbG61W6xx2si1VTV+ILMtUVFSwdetW3nrrLVeH49Y6VFKura3l0KFDpKamikYGgiC0qIaGWosXL2bBggVs2LCBgIAARo4cyX/+8x969eqFXq9HpVK5bWvp5rRq1SoCAwNJTEx0dShurcMk5Ya+fYcOHWL8+PGuDkcQhHbizBmAMjIyWLhwIfPnz2fr1q2kpKQwceJEXnnlFbp3745CoXAWCtp7Im4gyzJ//PEHo0aNEgWiC+hQSbmoqIiTJ0/SvXt3V4cjCEIb1jDdYcN0ijt37mThwoX88ssvnDx5kgEDBnDLLbfw/fffEx4e3mjZjpKITyfLMsuXL+fDDz90dShur8MkZYvFwu7du4mIiBDPkwVBuCQNpeGGKQ/z8/PZsmULixYtYtmyZTgcDoYOHcrrr7/OmDFj8Pb27pDJtymyLLN7925KSkoYPHiwq8Nxex0mKVutVg4ePChG8RIE4aI0lIQtFgt1dXWcPHmSzMxM3n77bQoKClCr1Vx11VV88sknDBkyBIPBIBLxOSxatIihQ4e6fOzttqDDJGWLxcKhQ4e4+eabXR2KIAhuzGq1UltbS3V1NYWFhWzatIn169ezdetWTCYT48eP56233qJbt27o9XpXh+v2ZFlmyZIl3Hbbba4OpU3oEEn59AHcRUlZEITTNVwfysvLqaioIDMzk/T0dHbv3k12djYJCQkMHjyY0aNHs2rVKm6++Wb69evn6rDbjMLCQvbs2cPXX3/t6lDahA6RlBtmJgHE/MmCIGC326moqODkyZMUFBSQkZHBgQMHOHr0KJWVlSQlJTFp0iT69+9PbGwskiSxfft2Vq9e7erQ25wlS5Y4J8sQLqxDJGW73c6ePXvo3LkzBoPB1eEIguACVquVkpISjh07Rk5ODpmZmRw9epT8/Hy8vLxITk7m2muvJS0tDR8fH/F8uJksXryY8ePHi+/zInWYpJyenk5qaqqrQxEEoZXIsozNZuP48eNkZ2eTlZVFTk4OZWVlmM1mAgMD6dOnD927dyc5ORmdTicSRzOrqalhw4YNPProo64Opc1o90lZlmVnUp4wYYKrwxEEoQU1PB8+fPgwGRkZZGRkUFhYiM1mQ6vVEhkZycCBA0lMTCQyMhIPDw9Xh9yu7d69G1mW6dmzp6tDaTPafVIGKCkpIS8vT5SUBaGdKiwsJD09nT179rBv3z7MZjNGoxFfX1/69u1LbGwscXFxhIaGolJ1iMueW1i2bBkDBw7EaDS6OpQ2o92fnbIsk52djcFgENOFCUI7YbPZyM3NZdu2bWzatImsrCw8PDwIDQ2le/fudO7cmbCwMMLDw/Hz8xNDO7rIypUrmTJliqvDaFM6RFI+dOgQcXFxqNVqV4cjCMJlMpvNZGVlsX79etatW0d+fj7BwcF07tyZe+65h4iICAIDAwkMDBQNOt1Afn4+GRkZXH311a4OpU1p90kZIDMzk4SEBFeHIQjCJTKbzezdu5cVK1awZs0aioqKSEhIoHfv3tx///2EhYXh4+ODt7c3SqVSNNRyI5s2bSIiIoLIyEhXh9KmtPukLMsymZmZjBkzxtWhCIJwAQ0NtdauXcsff/zB2rVrMZvN9OzZk8mTJ9OnTx9nSVin03WIKQ/bqtWrVzNo0CDUarU4RpegQyTlrKws0SRfcLkVK1bw1VdfUVpaCkBRUREnTpxw9gqQJIlOnTrxzjvvdLjGSMePH2flypUsWbKE1atXYzQaueqqq3juuecYMGAAXl5eaLVa1Gq1eD7cBsiyzLp163jppZfE8bpE7fqX3zCtWl5enhjJS3C50NBQNm3a5BxdrmHCg8WLFwM4GyoplUpXhtkqHA4HmZmZLF68mAULFrBt2za6du3KVVddxTfffEO/fv1QqVSoVCpRGm6DMjMzKSwspH///uLYXaJ2nZQBsrKyUKvV4rmG4HJdu3YlJiaGw4cPY7fbna87HA4AvL29uf7669vVRaxhykOoH1Fr//79/Pbbb8yZM4cjR47Qu3dvJk6cyBdffEFUVBSSJDlLVu3pe+hoNm7cSHJyMj4+Pq4Opc1p90n54MGDxMXFdbjqQMH9SJLEpEmT2LNnD4WFhWe97+HhwbBhw1o/sPNoKM1LknRRSbIhCTcsV1tby+7du1m0aBELFiygpKSEfv368dhjjzF+/HgCAgKc6xVJuH2QZZmNGzfSt29fVCqVOK6XqN1nquzsbOLj48WJIbiFa6+9ljfffPOs1z08PLj22mvdZipAWZZxOBxkZWWRl5fHVVdddd5q9YYhLW02G7W1taxfv57FixezcuVKamtrGTx4MK+++ipXX301QUFBrbgnQmuz2Wxs27aNv/zlL6IwdBna/TeWn59PRESEq8MQBAAiIyPp1q0bJ06cwGazOV/X6/VuM9e3LMvU1NSwZs0aXnnlFcLCwoiJiTlrlh+73Y7ZbMZsNlNYWMiGDRtYuXIla9euxcPDg6uvvpp3332XwYMH4+3t7aK9EVrbgQMHMJvNdO/evUO0j2hu7T4pnzhxgsGDB7s6DEFwuvXWW9m8eTMlJSVAfbVtYGAg/fv3d2lcDVXO+fn5zJ49m9dff53a2lpsNht79+4lNjYWq9VKTU0N1dXV5ObmsmHDBjZu3Eh6ejrR0dGMGDGCRx99lF69eonBejqoXbt2OWfkEzWUl67dJ+WCggJCQ0NdHYYgOI0dOxadTuf8t1arZerUqS6t6pNlGbPZTHp6Ou+99x4//vijswHa4cOHWbt2LVFRURw5coTdu3ezfft2Tp48SWJiIhMmTODdd991zjssdGy7du0iKSkJrVbr6lDapHaflPPz8wkJCXF1GILg5O/vz/Dhw/n++++xWq1oNBpuvPFGl8ZUWFjI77//zuuvv86RI0ecCRnAZDIxd+5cdu7ciU6nIzU1lYceeoh+/frh6+sr+qEKTmazmX379nH33Xej0WhcHU6b1K6TssVioaSkRExEIbidO+64g19//RWbzUa3bt1ITEx0SRx2u52dO3cya9YsPvnkE2w2W6PuWoCzm9Itt9zCzTffjJeXlygRC03Kzc2lqqqKLl26iMcXl6ldJ+Xi4mKsVqsoKbdDDa2D26q+ffsSEhLC4cOHueuuu5zzfremkpISFi5cyLfffsuaNWvOuX2Hw0FxcTFQ3yDtzO9d9CsWGuzdu5ewsDAxM9cVaNdJOT8/H4PBgJeXl6tDEZqZLMu8//77ziEr26KGRLx582ZycnJaffsnT57k119/pby8/II3BGVlZcyePZvDhw+fdbGdPHkyPXv2FElZID09nbi4OLfp2tcWteukXFhYSHBwsLhja4ccDgf/+c9/SExMJCoqytXhXJYRI0YQEhKCh4cHFRUVrb59lUrFpEmTMJvNWK1W538WiwWbzYbFYsFsNmOz2bBarc4ha0+/yZ0/fz4JCQn06NGj1eMX3IvFYuHAgQOMGDFCJOUr0K6Tcnl5uegf2Y4pFApGjRpFv379XB3KZXE4HC7tR99QUrfZbDgcDux2+1n/bvjbZrOh/f/s3Xd8FHX++PHXbMsmu+m9hyQQQi+hNylSRRCQooBiPxt3p2LveJb7Yjmsh4eeXVA8UBEQpEpNaKEFEtIL6XU3W+f3B2R/REIRkmzK5/l48CDZnZ15z8xm3vMp8/m4uODn54dWq3WsIzExUZSQBQByc3MpKysjNja23ndE+HPadFI2GAzijq0NkyQJjUbTqs9xx44drzmp1Q2D6Yx1iMEhhDopKSl4e3vj5+cnaievQZs+ckajsd7zoEL7df7ECBeTlpbG4sWLqa6uvuC9xYsXc/jw4SbpjPXhhx/yww8/XPXnryaZPvbYY5w+fdrRaUuUdoVrlZKSQnBwsKidvEZtuqRcW1srkrIAwMmTJ3nppZcYO3Yst956a4MDddTW1pKVldVg4s3KyqKmpgaz2cxtt91GVVUVCoWCkJAQbrnlFkaOHHnVseXn51/TwCFXU8pNT0/HaDQCZztx/d///Z9j1K6HH374giE1BeFS7HY7J0+eJDo6WnSsvUZtuqRcW1sr2jYE4Gzbp91uZ/PmzddU2q0bhvKFF17gv//9L7fccguvvvoqubm5jRLnlZTo/+haS7nvvPMOBoOBJUuWEBERwWuvvXZFx+hqYhXapoKCAgoKCujQoQN6vd7Z4bRqbbqkLKqvhTqJiYlMmzaNpUuXUlpa6nh2PTk5mX/84x9UVVXRv39/x/J1jwD9+OOP9OrVi7y8vHrr8/T0xNfXl8GDB6NWqzl9+jQqlYpFixZx/Phx3N3dmThxIrfeeqtjW9u2beOzzz4jIyOD4cOH89hjj9W7abRYLOzYsYOVK1cyatQojh07xoMPPoiPjw9wNgkuXLiQ22+/nRUrVrB161bsdjsdOnTgiSeecPSAzsrK4plnniEqKopdu3bh4uLCrFmzyM/PZ9OmTQwZMoSysjLgbI/Z9evX8+GHHxITE8Ott97KbbfdxsmTJy87oImo8hbqZGRk4OrqKp52aQRt+uiJpCzA2UfjcnNz6du3L507d2bXrl3IsozFYuH5559n8uTJvP/++1RXV2O1WpFlmW3btpGRkcErr7xCr169qK2trVcyrOu5nJiYSFVVFTExMQQEBLB8+XJ+//13li9fjtFo5Ndff8Vut3Pw4EGWL1/O1KlT+eabb+jbty/p6emOddpsNpKSkvjPf/7DwoUL6dy5My4uLhQUFDgmiigqKsJiseDj48Mrr7zC9u3b2bZtG7fffjuPP/64o3RbN9iHLMt89tln/PDDD5hMJmpqanjnnXeIiIjAZDIBZ5/lr62tpVOnTsDZwUH8/f3Jyclp5rMktGaZmZl4enri7e0tbtauUZtOyqJNWQA4cOAAwcHBuLu7k5CQwO+//44sy6Snp1NWVsbs2bMJCwvjpptuQpZlqquryc7OplOnTvTs2ZMJEyZcMKnJnDlz6Nevn6PkWlcaliQJSZIICQkhNjaWsrIyqqur2b17NwkJCQwZMgRfX18mTpxIfHw8kiQhyzJHjhzh5Zdf5sEHH6RTp054enri7u5Obm4u77zzDh9++CFHjx4lJCQErVbruPAplUpGjRqFzWYjOzvbEV9QUBDTp08nKCiI6upq8vLy6Nu3L3FxcUybNg0vLy/g7I2rQqHAarUyffp0UlNT0Wq1jvbmhohqa+F8siyTlZXlSMrCtWnT1ddms1mMv9rOybLMoUOHCAkJwWazER8fz+eff47JZKK8vBwPDw9HdZu7uzsKhQKz2YzNZnMkP71ef8Hg+t988w0dO3YkLS2NJ554gqioKBISEvjyyy/5+eefKS0txWQyMW3aNGw2GxUVFQQHB6NWq+uVJOpKwQcPHqRnz55s27aNfv364eHhgYeHB4cPH0ahUFBSUsLBgwcJCQnBxcWFtWvX8uWXX5Kdne0Y9KO2ttaxXrVa7Si11L1eN2uPu7u7o2NZ3bCZSqWS77//nurqaoxG4yUfMxMlIeF8tbW15Obm0qFDB9HzuhG06ZIyiAtIe1dTU8PJkyf56aefmD17Ns8++ywGg4G9e/fi6+tLVVWVY87goqIi7HY7Li4uaDQaqqursVgslJaW1kt4dRQKBVFRUfTq1Yv9+/eTmprK5s2befvtt9m8eTMLFy5ErVYjyzJ+fn6UlJRgMBiw2+2OkbLq1nPrrbeyaNEikpOT2b59OzqdDnd3d5KTk4mIiHD8HBwcjNFo5L333uPBBx/kt99+47fffkOv119Qgq377ru5uaFQKBzV83XV4ADBwcHo9XqOHz+OLMtUVlZSUlJCeHh4E58Zoa0oKCigurqa0NDQVj1mQEvRpkvKgnDs2DEkSWL58uXExsYiSRJPPfUUmzdvdnSG+vrrr+nduzcbNmxAkiR0Oh3R0dFs27aN7du3k5+fT2FhYb315ufno9VqKSws5OTJk9x8881IkoSbmxvl5eXk5eWxZ88ex2hdgwYN4v3332fLli1069aNU6dOER8fT2xsLHB2yMugoCDmzJnD119/TceOHfH29sZoNCLLMgEBAZSUlBAYGIharcbd3R2TyURGRga//fabo424IR4eHsTExHDkyBECAwNJSUmhsrISAI1Gw8SJE/niiy9Qq9WsX7+emJgYR1yCcDl5eXmo1Wr8/PycHUqbIJKy0KaVlZXRv3//eh1QJkyYwPfff49KpeKJJ55g2bJlHDhwgD59+uDh4YFKpWLgwIGUl5fz3Xff0a1bN7p37+6o3u7cuTMfffQRCoUCb29vxo0bx8SJE6mtrWXYsGF8+OGHhISE0KlTJwIDA1GpVHTp0oXbbruNH3/8kQ0bNjBw4EDGjx+PJEmEhobi7++PRqNh0KBBpKens3v3bnr06MGIESMIDw9HrVYzdOhQfH198ff35/bbb2flypWYzWYmTpzIoEGDHP0nXFxciIqKclS5S5LE2LFjMRqNfP755/Tr14/u3bs7lr///vt59913+ec//0lkZCSPPvqoGKlLuGJ1Sdnf39/ZobQJUkvutJGQkCAnJiZe9ecXLFhAWFgYL7/8ciNGJbQEVquVbt268be//Y0hQ4Y4O5x269577+Wee+5h7ty5bT6RJyUl8d577zF37lxGjRrl7HBaBJvNxtKlS8nOzuaxxx4T0+ReIUmSkmRZTmjovTbfpiwIzaEl39wKQlOprq7mzJkz+Pr6ip7XjUQkZUFoBKJDodAeFRcXU1NT43gqQLh2IikLgiAIV6W4uBir1UpgYKCzQ2kzRFIWBCdzdtW3s7cvtF7FxcXYbDbRltyIRFIWhGvwx6E3r+Zzzq76dvb2hdbp/OFcRUm58YhHooRWzW63OwbCEJqfKGW3XzU1NeTl5eHl5SWeUW5EIikLrZbFYuHVV18Vowg5UU5ODna73dlhCE5QUVFBeXm5Y/hYoXGIpCy0Skqlkq+++qrB4S9but9++40ffviBJUuWXDCmdmsUFxcnputrhyoqKhw9r0UTSOMRSVlotRISGnz2vsXbsmULCQkJDB06tE0kZXFBbp/OT8pC4xFJWWiVJElqtSNIHT16lAEDBqBWq1vtPghCVVUVRqNR9LxuZKLOSRCakSzLHDt2jK5du4oSptBqWSwWioqKUCgUoud1IxNJWRCaUVZWFhUVFXTp0kW0wwqtlsFgID8/Hz8/P9zd3Z0dTpsirgqC0IyOHz9OaGgoHh4eoqQstFo1NTWUl5cTGhoqvseNTCRlQWhGx48fp1OnTqhUojuH0HoZDAZHUhYal0jKgtCMjh8/TlxcnOjgJbRqBoOB0tJSkZSbgEjKgtBMrFYrJ06cIC4uTpSUhVZLlmWqqqqoqKggLCzM2eG0OSIpC0IzOXPmDKWlpaKkLLRqVquV4uJiJEkSj0M1AZGUBaGZpKSk4Ovri6+vr+h5LbRaJpOJvLw8fH190ev1zg6nzRFXBkFoJidOnCA6OrpNjOIltF9ms5ni4mLR87qJiKQsCM0kJSVFJGWh1TOZTBQVFYlOXk1EJGVBaAZWq5XU1FSRlIVWz2QyOUrKQuMTSVkQmkFBQQHl5eXExMSIpCy0amazmaKiIjERRRMRSVkQmsHp06fx9PTEx8dHtMMJrZbdbqeqqoqqqipRUm4iIikLQjNIS0sjNDQUNzc3kZSFVstms5Gbm4tWqyUgIMDZ4bRJIikLQjNIS0sjJCQErVbr7FAE4apZrVYKCgoIDAxErVY7O5w2SSRlQWhidrudjIwMQkNDcXV1dXY4gnDV6gYOCQoKEjU+TUQkZUFoYoWFhRQVFREVFSVKykKrZrPZHElZaBoiKQtCE8vKykKn0+Hj4yNG8hJatbqkHBgY6OxQ2ixxhRCEJpadne0YklBU+QmtWV31tUjKTUckZUFoYllZWXh7e4txgoVWTZblem3KQtMQSVkQmpDdbic7Oxs/Pz+RlIVWz2g0UllZKZJyExJJWRCaUHl5OYWFhYSEhKDT6ZwdjiBcNbvdTl5eHhqNBn9/f2eH02aJpCwITSgvLw+AgIAA8Vyn0KrZ7XaKiorw8fERQ8U2IZWzAxCEtsRoNJKVlYVer8fDw4OsrCzc3Nzw9vZ2dmiCcE1kWaa4uBg/Pz9nh9KmiaQsCI0oOzub5557Dl9fXzp06EB6ejplZWXk5ubi7++Pj48Per1ePBoltDp2u53S0lKRlJuYSMqC0IgsFgvJycmkpKQgyzJKpRIfHx8KCwuJiYkhNDSUm2++ma5du6JUKp0driBcMbvdTklJiUjKTUwkZUFoRG5ubkRHRzuSstVqpbCwkOLiYnbu3Imvry+jR492dpiC8KfJskxJSQk9e/Z0dihtmqhDE4RG5OrqSocOHS4oBdvtdux2O/PmzaN3796i+lpodepKyr6+vs4OpU0TJWVBaERubm6EhYVht9vrva5UKunZsye33nqrGNlLaJVkWRZtys1A3K4LQiNydXUlLCwMm81W73WFQsFdd91Fx44dRVuy0CrVdfQSJeWmJZKyIDQilUqFr69vvSkalUolEyZMYPTo0bi5uTkxOkG4OrIsY7fbqaqqwtPT09nhtGkiKQtCI5IkCZ1OR0BAgOM1vV7P7NmziYqKEtXWQqtlNBqx2Wy4u7s7O5Q2TSRlQWhkHh4eREZGIkkSCoWCWbNmMXToUDGil9CqVVVVIUmSSMpNTHT0EoRG5u7uTnh4OBqNhuDgYMaPH09oaKgoJQutlizLVFRUoFQqRRNMExNJuZ2RZdnZIbR5er2eoKAgbDYbU6dOZcSIEUiSJI59IxM3Oc2rqqoKnU4nHudrYiIptyOyLFNVVcXq1avFBa0Jmc1m8vLyCAkJQaPRsHbtWmeH1KaEh4fTq1cv0eGomVVXV+Pu7i6uHU1MJOV25syZMyxYsIDu3buLO94mVFFRgc1mY8OGDWzcuNHZ4bQZpaWl9O/fnxdeeEEk5WZUd0Pv4eHh7FDaPJGU2yG9Xs+yZcvQarXODqXNKikpwWg0EhYW5uxQ2pRffvmFzMxMZ4fRLlVXV6PX650dRpsnkrIgNIGWPsCCLMuiGlK4YrIsYzKZcHFxcXYobZ6ovxSESygvL6egoACz2dys262qqiI7O/uCkcEaQ1FREYWFhU2y7ivZ7h+HIBVaB6vVKh7rawaipCw0qKioiIyMDCIjI/Hz88Nut5OYmIinpyfx8fFXtc5jx47h6elJSEhIk5TSZFmmqKiIU6dOAaBWqwkICHB0uLoa27dv59SpU8yaNYvQ0NDGDPeSDh48yE8//cTTTz9NSUkJeXl5SJLkeMwqKCjoqofrXL16NQaDgTvuuKNZqyNXr16NzWZj/vz5qNVqMjMzKS4uRq1WExMTg4eHhyi9t2AWi0Uk5WYgkrLQoD179vDBBx9w8803M3v2bPLz83nsscfo2bMn77777lWt87333iMhIYHbb7+9cYM9R5Zldu/ezXvvvUeXLl1wcXFBq9Uya9YsOnXq1KRjTjdVdbDdbuf7779n165dREVFoVarcXNz45577iEwMLDVJrHjx4/z7bffYjQaqa2tJSEhgXnz5qFUKlvtPrV1Iik3D5GUhYuKioriwIEDTJs2jY0bN9K5c2fgbAIqLy9n7969VFVVERISQo8ePdDr9VitVk6fPs3JkyexWq3Ex8cTHR19yT9mm81GQUEBhw4dora2lg4dOhAXF4ckSSQnJ5OdnY2rqyt9+/Z1DF9ZVVXF7t27qaiowM/Pj759+zpKfX369GHRokXIsswrr7zC4cOHiYyM5ODBg2RmZiLLMp6envTr18+xPrPZTEpKCqdPn0atVtOlSxciIiLqxVleXs6uXbvo3bs3e/bsYcSIEXh6ejqSSGVlJXv27KFLly4cOHCAyspKtFotcXFxxMbGOtrjNmzYgK+vL7m5uVRXVxMQEMCoUaMoLCwkMTERu91OeXl5vW2PHTuWW2+9lZqaGh5++GGOHz+On58f27dvJz8/H0mS8Pb2JiEhwTGLT01NDcePHyc7Oxt3d3d69OiBv79/vfWWlJSwY8cOhgwZwu+//87IkSMdPWxlWSYvL4/s7Gx8fX05evQoBoMBnU5H9+7diYiIQKU6ewlZs2YN4eHhZGZmOjq4DRo0iJycHA4fPoyLiwsFBQX4+/tjt9tZs2YN3t7ePPjgg6Snp/Pcc89x/fXXN2tthPDnWK1Wx/kWmo44wsJFBQcHk5ubS0FBAUlJSfTr14/Dhw9js9lYv349SUlJxMTEUFVVhVarpU+fPpw4cYL169cjSRJKpZKDBw9yxx13XLIXcnl5OatWraKkpAQPDw+OHDlCbW0tAOvXr8fLywutVovNZmPChAkolUo2bNjAzp076dSpExUVFajVagYPHlxvvZIk1St1GQwGKisrsdlsZGRkkJyczKJFi7Db7SQnJ7Nq1SoCAgJwcXFBrVbXm6KuvLycdevWUVJSQrdu3RzLDhw4EDibwLKzs1m5ciVPP/001dXVVFRUkJ+fz/Hjx5k+fbrjRuPzzz/H29ubmJgYNBoNer0es9nMV199RXl5OQEBAaSlpTV4AWxonyoqKpBlmfT0dE6dOsUDDzyAyWRi9+7dbNu2DT8/P9RqNR4eHvUeaSktLWXVqlWYzWYGDBjAd999R2hoKAkJCcDZm6Xk5GSSkpKYOnUqVVVVVFdXk5OTw5EjR7jnnnvw8/NDkiQ++ugjOnXqRHR0NBqNBoPBQFlZGStWrMBkMuHp6Ulqaiq+vr7U1NSQnp7O3Llz8fPzw9/fH1dXV44ePSqScgsmSsrNQyRl4aIUCgW9e/fm+++/x9PTE29vb8fIVMnJyURERHD77bdTVlaGxWLBZDKxZ88eFAoFc+bMQavV8uabb3LkyBGCgoIa3Ibdbic3N5eDBw/yyCOPEBwczBdffMHhw4cdCWjatGl4eHiQm5vreLb6wIEDBAYGsmDBAiorK6moqHCsMzk5mTfffBOz2YzFYqFz5864uLgwZswYxowZg91uJycnhwULFnDnnXfi6urKr7/+SkBAAPPnz0ehUFBdXe2o7q6srOTbb79Fp9Mxc+ZMQkND6d27N8nJyfTt25d3332XmTNnkp2dTVhYGJGRkURGRjpqFP71r3+RmprqSFhwdgCMefPm4e3tDUBmZiZbtmxhyZIl+Pr68s0335CWlubYp61bt5KXl0d1dTWBgYGO6vhJkyY5ZvA5deoUTz/9NLfeeisGg4Hdu3fTqVMnJk+ejMlkwmazOY5faWkpy5Ytw8PDg9tuuw1/f3969uzJkSNHiImJYcWKFYwbN46ysjKCgoLo2rUrXbt2xW63U1xczFNPPUVOTg4+Pj6O4xQVFcX8+fPx8PDAbrezf/9+Tp06xTPPPINSqaSgoMDxvKvJZEKv1/PNN98wdOhQfHx8KCoqauRvsNCYRFJuHiIpC5c0ePBgnnzySe677z5H6VWhUDBu3Di2b9/O+++/T0hICCNGjKCmpob8/HwOHjxITk4OkiSRmpqKj48P1113XYPrt9lslJSUoFAo6NSpEyqVitDQUDIyMujTpw9Wq5Vvv/0WNze3euuYNGkSmzdv5t1338XX15dRo0Y53vP19aVXr16OxJCTk0PHjh1JTU1l48aNnDlzBqvVSlVVFWVlZajVak6fPs38+fMdSfL8gSlOnTpFbW0tvXv3xsfHB4VCQa9evfjuu+84ffo027dvp0OHDpw5c4a4uDjKy8vZtm0b+/fvd1Qhx8TEYLVaHUl54MCBeHl5IUkSdrudgoICNBoNsbGxyLJMXFwcp0+fdsQQGhpKnz59HMk2IyODwMBADh48yKZNmygpKcFkMjluUMxmM2VlZdxwww0NDvhw+PBhlEol/fv3x9fXF0mS6NmzJ+vWraNLly78/vvv+Pr6UlZWRqdOncjPz2fz5s0cP34co9FIWloaZWVl9YYOHTJkiOO42Ww2ioqK0Ol0REREYLfbCQ8Pd3x/6kr7cXFxuLu7Y7fbxWA2LZgsy9hsNvFIVDMQSVm4pICAAF566SWio6PZtGkTcPaiOmDAAMLCwigpKSExMZFVq1Yxb9483Nzc6N+/PyNGjADO/jEHBQVd9I9ZkiTUajU2m83RZmW1WpEkiY4dO9KpUyfKyso4deoU//rXv3jnnXdwcXEhISGB4OBgysvLOXToEJ988gnPPvssACEhIYwaNQqdTofBYCA5OZnOnTvzwQcfMHToUAYOHIhCoWDXrl3YbDYkScLFxQWj0dhgjFFRUQwbNowffviBffv2cd1119GjRw8WL17M3r17ufnmm9m7dy+yLJOQkEBycjLbt29n9OjReHl58e233yLLcr0E9sfjodFoMJvNjuUsFku992NjYxkzZgxKpZLc3Fx27txJx44defPNN7npppsYPnw4tbW1nDx50lEiVqlUmEymBvepc+fOTJw4kXfffZekpCT69+9P9+7dWbJkCSdOnGDUqFHs27cPT09PgoKC2LJlC+np6Vx33XXodDqys7Ox2+319un8wWjqzqvFYnFc0K1WK0qlEk9PT1xdXSktLWXkyJFIksSZM2dE1XUrIDrhNT1xaypcUl0V9vklR5vNxooVKzCbzURHR6NQKDhz5oyjVGQ2m/H09KRjx46UlJRcUKI6n1KpJDAwEJVKxfbt28nMzOT48eP4+Phw5MgR0tPTCQ8Px9vbm6ysLMd6vv76awwGg6MDVU5OTr31yrJMbW0tZ86cAcBkMpGdnU3nzp3p2bMnubm5jmePNRoNvXr1Yu3atWRlZZGTk8OBAwccna18fX3p2rUr06ZN44svvqCgoAAvLy88PDz49ddfGTNmDCUlJeTm5hIdHU1ZWRk2m42ePXui1WodQ25ejCRJREZGolKp2Lx5M7m5uezYseOCYybLMgaDgfz8fEe7bWZmJj169KBHjx719snLy4ugoCB+++038vPzOXnyJEeOHMFgMAAQGBhIjx49mDVrFm+99Rbl5eV4e3vj6urKnj17GDx4MIWFhVRVVREUFERRURF6vZ6ePXtiNpupqam55PPGSqWSsLAwjEYje/fu5cSJExw5cgQ4m7zrSuP5+fn88ssvAPTo0eOi6xOE9kKUlIU/TaFQEBsby3vvvedIRLNnz0apVDJy5Ehqamp4/fXXqa2tpU+fPvTt29fR7rh8+XJWrFgBnH2OeNy4cSxYsIDJkyfz+eefU1tbS9++fRk+fDiyLPPdd9/xzjvvoNFouPPOOx1tWl27dmXZsmVkZGQQEhLC/PnzHfH99ttv7N+/H51OR2RkJLfddhsxMTFMnjyZ119/HZvNxpgxY/Dx8QHOJolJkyZRXl7OE088gbu7OzNmzKBbt2719vm6664jMTGRDz/8kOeff54ePXpQXFyMp6cncXFxZGZm4uXlRXx8PNu3b+eBBx6gY8eOKJXKyz4n7enpyT333MPy5ctRqVSEhITU6+j12Wef8b///Q+9Xk90dDRTp04lJCSE6dOn8+STT6JUKhk+fLij+t3Hx4eJEyfy7bff8re//Y2QkBDmzZvnKKHXzfU8YcIE9uzZw1tvvcVzzz1Ht27dsNls+Pj4EBMTg1qtdvTq/vLLL7nnnnvo06cPbm5ul3zETJIkwsPDGTduHO+88w6+vr7Isuyour755pv56KOPeOihh3Bzc+ORRx4RY1kLAiC15OnkEhIS5MTExKv+/IIFCwgLC+Pll19uxKhaL1mWSU1NpV+/fmzcuPGSY1+bTCasViuurq6Otj6TyYTZbHY8+mQ0GrHZbKhUKlxdXVGpVI7h+EwmE7Iso1arcXV1RZIkqqurL6iWdXFxwc3NDYvFgtFoxG63O54vrivtms1mJEnCzc3NkZRtNhsGgwGbzeaY41WpVDpKcXA2MdTFplQqMZlM1NbWIssyWq3W0dlIpVJht9vrbcvV1RW1Wo3ZbHa0pSmVSsc29Xo9BoMBu92OXq93xK7X67HZbBiNRiwWiyOxqtVqXFxckCSJyspKx/Gqqw6UZRmr1YrBYECWZcfn3NzcqK2tdbTn11ULu7m5IUlSvfdcXFywWCzodDpUKhU2m43a2losFgsKhcKxzbpjUHde6kq9dfsE4Orq6ljv+efHZrM5mhvqjpEkSZSXlzuO5fnfN7PZjNFodCRjlUrlOLdGoxGz2YxCocDNza3e8biYurGvH3jggasexOZqJSUl8d577zF37tx6fRjaA4vFwqJFi9Bqtbz66qvODqfVkyQpSZblhIbeEyVloUEuLi4XtHue/5parUatVl8waIYkSWi12gYTvru7+0W3p9FoGixN6nQ6dDrdBa+rVKoGOzA1FHedP8bl6urq+LkuMfxxAvc/ruv898+P6/zXlUrlJUfKaijuumTbUGmxobjO34fz9+N8SqWywWP3x+XPX+Zi+1R3vi/Gy8vrgtfq2uobOh+SJF303J5PjNEttDeiTVm4JudfMFtyrcvVau371NrjFwlZaG9EUhYucLUX8rZ4Ab2afWpJibCpxhh35ucFoS0TSVm4QFtMrleqMRJGWz9+17p/dQPQCIJwIZGUBeE8bT2hthTiOAtCw0RHr3ZIlmVqamqafT7d1kh0NGpZTCaTmI9ZaNNEUm5nFAoFSqWSNWvWiBlfLqPukSyFQoFGo7nm5Fx3E3T+MJPCn3Pq1Cn8/PzEkJxCmyWuyu2Mh4cHs2fPdjyPKjSspqaGlJQUzpw5Q3x8POHh4dc8H/OpU6fIzs5mwIABl30USGhYYGAgvXr1EgONCG2WSMrtiCRJ+Pv78/777zs7lBZLlmXKyspYv349FouFkSNHcvfddxMdHX3NpdutW7fy/PPPM23aNCZNmiRm3BEE4QIiKQvCOXVTOq5cuZKkpCRGjhzJtGnT6s2rfC2GDRvG9OnT+eCDD+jYsSPx8fGiGlYQhHrEFUEQAKvVypEjR3jnnXdITk7mlltu4dZbb220hAxn25Lnzp1LcHAw77zzDtXV1eLRIEEQ6hFJWWj3zGYzmzZt4rXXXsNisfDggw8yfvz4Jmn39fb25rHHHuPo0aN8+eWXjb5+QRBaN1F9LbRbdZMiLFu2jF9++YXRo0czY8YMoqKimrR3dJcuXXjsscd49tln6dWrFwMHDhS9sQVBAERSFtopWZYpLCzk+eef5+TJkzzwwAOMGjXKMfVhU5IkiXHjxrFr1y4ef/xxVq9e3SzbFQSh5RPV10K7dPDgQWbPnk15eTlvvfUWN9xwQ7MmRldXVx599FGUSiUvvfRSs21XEISWTSRlod1ZuXIlN998M4MHD+add96he/fuF53usSn5+fnx5ptv8v3337N27dpm374gCC2PqL4W2g2TycQLL7zAihUreO2115g0aRKurq5Oa8+VJIkePXrwyCOP8Mgjj9C3b18CAwOdEosgCC2DKCkL7UJBQQFz5szh119/5fvvv+emm25yakKuo1AouOeee4iLi+Pxxx8Xj0gJQjsnkrLQZsmyjCzL7N27lxtuuAGFQsGPP/5Iz549USqVTk/IcLa0rNVq+b//+z927tzJqlWrRGIWhHZMJGWhTZJlGbvdzqpVq7jjjjsYPnw4H3/8MUFBQUDLmjpQkiQiIyN56qmneOmll8jOzhaJWRDaqWtOypIkKSVJOiBJ0k/nfu8gSdIeSZJSJUn6VpIkzbnXXc79nnru/ahr3bYgNESWZaqrq3n33Xd59dVX+fvf/87LL7+Ml5cXkiS1qIRcR61WM2XKFLp168Zrr72G1Wp1dkiCIDhBY5SUFwLHz/v9deAtWZZjgTLgznOv3wmUnXv9rXPLCUKjstls5OTk8Oqrr/K///2PF198kblz57aKWZk8PT1ZtGgRR48eZdWqVWK+a0Foh64pKUuSFAZMAj4+97sEjAK+O7fIf4Gp536ecu53zr0/WmqJRRahVZJlGbPZzJEjR3jllVc4fvw4r7zyChMmTECj0Tg7vCuiUCiIjY3llltu4b///S+pqamiGlsQ2plrLSm/DSwC7Od+9wXKZVmuq3vLAULP/RwKZAOce7/i3PKCcE1kWaampobNmzfzj3/8A0mSeP311xk8eHCrm4VJp9MxceJEoqKiWL58OZWVlc4OSRCEZnTVVyxJkm4ACmVZTmrEeJAk6R5JkhIlSUosKipqzFULbZDdbqeoqIgVK1bw9ttv061bN1588UU6derk7NCuWkhICHPmzOH06dP8/PPPmM1mZ4ckCEIzuZbBQ4YAN0qSNBHQAh7AO4CXJEmqc6XhMCD33PK5QDiQI0mSCvAESv64UlmW/w38GyAhIUHU3QkXZbfbOXnyJN988w1JSUnMmjWLmTNn4ubm5uzQrolSqaRXr15cd911rFu3jvj4eHr16tUiO6gJgtC4rrqkLMvyk7Ish8myHAXMBn6TZflWYDMw49xitwGrz/285tzvnHv/N1k0mAlXQZZlbDYb27dv54033iAtLY1HH32U+fPnt/qEXMfd3Z0JEybg7e3NDz/8QGFhobNDEgShGTRFg9vjwN8lSUrlbJvxf869/h/A99zrfweeaIJtC+2AwWDgk08+4a233iI4OJjnn3+eESNGtLr248uJiopi6tSppKWlsWnTJoxGo7NDEgShiTXK2NeyLG8Btpz7+TTQv4FlaoGbG2N7QvuVlZXF0qVLOXLkCLNmzWLKlCltdtpDhUJB//79SU5OZt26dXTu3JlevXq1uZsPQRD+PzEhhdBqbNu2jSVLlqDT6XjqqacYMGBAq3nc6WrpdDqmTZtGSkoKP/zwA6GhoWLSCkFow8Qtt9DiWSwWPv74Y5588kn69OnDiy++yNChQ9t8Qq4TGhrK3LlzOXjwIFu2bKG2ttbZIQmC0ERESVlo0crKynj99dfZtWsXCxcuZMyYMXh7e7ernsiSJNG3b1/GjRvHZ599Rs+ePenUqZOoxhaENkj8VQstVmpqKg8++CDHjh3jtdde48Ybb2x3CbmORqNh3rx5+Pr6smzZMqqqqpwdkiAITUAkZaFF2rNnDw8++CCurq4sWbKE/v374+Li0i4Tch0PDw8ef/xxNm3axIYNG7BYLM4OSRCERiaSstCiWCwWVqxYwUMPPcTAgQNZvHgxsbGxLWb+Y2eSJIn4+HjuvfdeXn75ZQoKChxzRh8/fpwFCxaIsbIFoZUTSVloVrIsY7Va2bBhQ70EIssyVVVVfPTRR7z22mvcfvvtPPLIIwQGBrb7ZHw+hULB3XffTceOHXnhhRcwmUwsXbqUPn368MMPP3D8+PHLr0QQhBZLJGWh2f3rX/9i+vTp7N6921HSKyws5I033uDTTz/lySef5J577kGv14uE3AClUskbb7zBTz/9RN++fVm0aBG1tbUolUq2bdvm7PAEQbgGIikLzSotLY2nn36ampoabr31VrKyssjIyOCZZ55h586dLF26lJtvvhmVSiUScgNkWcZkMrFr1y7MZjOnTp3CZDIBUFlZya+//iqqsAWhFROPRAnNpra2lvvvvx+73Y4sy+Tn5zN16lR8fX0JCAjg448/pkOHDs4Os8WSZZmMjAyefvppfvzxR6qrq+u9b7Va2b9/PwaDAZ1O56QoBUG4FqKkLDQLu93O559/ztatWx1TEdbW1pKamoqHhwdLliwRCfkyTCYTr7/+OuvWraOmpuaiyyQlNepsqoIgNCORlIVmcfLkSV544YUL5gaurq5m7969/PLLL2KkqsvQarX84x//4K677qJjx44NDh5iNBrZvn27E6ITBKExiKQsNLmKigpeeOEFSktLG3w/NzeXt956i+3bt2O1Wps5utbFx8eHV199lVdeeYWhQ4de8Ox2VVUV27ZtE8dREFopkZSFJmW321m5ciXr1q1rsCSsUCjQarUoFAqSkpLEgBhXQKlUMmPGDD788EPmzJlDSEiIo9Rss9nIzMwkLy/PyVEKgnA1REcvoUklJyfz3nvv1euUpFQqAYiIiKBbt2506dKFgQMHkpCQgIuLi7NCbXXi4+P517/+xfLly1m1apWj2tpoNLJnzx4iIiKcHKEgCH+WSMpCk6moqGDp0qWkpqYCZ0ekio6Opl+/fsTFxREXF0evXr3o2LEjKpX4Kl4Nd3d3Hn74YRISEvjoo4/47rvvKCsrY8eOHdx8s5i+XBBaG3ElbCY2m43ly5c7O4xmlZKSwo8//ojdbic0NBRPT0+ioqIIDw8nMDCQ6upqduzYwY4dO5o1rhtvvJGAgIBGew7aYrGwZs2ai7aZNwdZlomIiCAuLo60tDQ2bdrEBx980GpvdlQqFf369aNbt27ODkUQmlXr/IttZWRZxmKx8MorrzBkyBBcXV2dHVKTMxqNpKenExcXh6enJ3q9HldXVxQKBaWlpU5LYFu2bKF3794EBAQ02jrNZjPLli3DxcUFf3//Rlvv1ejZsydBQUHk5+ezfv16/Pz8nBrP1bDb7Zw8eRK73S6SstDuiKTcTOqGkrz++usJDg52djhNzmq1YjKZ0Gg0LWp0rjVr1jhGwGossixTVlbGpEmTGDBgQKOu+2rY7XZqampQKBStchARi8XCO++8I6anFNolkZSbkUKhIDAwkNDQUGeH0qLIstzkSbtuG3WdzBqbJEn4+vq2mHNbN9RmS7kZ+jPMZnO7qE0ShIaIR6JaibY8nnFzJI7WmJyuVEPfDUmS/tQ+t+XvlyC0JiIptxJXk1Sa4kJrt9t54403+Oqrrxp93Zcyf/58UlNT23zyuJr9a+i7kZOTw9tvv33Fnega6/tlMpmYOnUqJSUlf3p9giCI6usWKTMzkw8++IB9+/YRFBTEX/7yF4YMGfKnL5ySJGEymRg3bpyjjTEiIoK77rqLcePGXXV8NTU1GI1GoHmqnuHs41U2mw1Zlnn77bf56quvUKlUzJ49m4ULFzb59hvDO++8w6pVqzAYDPVe9/DwYMWKFYwfPx5JknBxcaFPnz4sWLCAXr16XdW26tqV/zisaWNq6LzLskx5eTl2ux2r1cqhQ4d46623KCoqYvLkycyZMwdfX98mi0kQWjtRUnay80sbsixjs9l48803CQ4O5ptvvuGFF15gw4YNjvfr/v3x94v9g7MTP3z++eds376dv//977z22mvk5ORc8jMXW39DsV/pZ69muYa2u3DhQn7//XcmTZrUqoaTfOihh/jtt9/YtWsXer2e9957j127drF+/XpkWcbNzY2VK1fy448/0qtXL7788ktKS0sb5Txdyzm40uX+uN3CwkK++uorhg0bxssvv0xaWhpbtmzBbrc30REWhNZPlJSd7PzShiRJlJaWcubMGR544AH8/Pzw8/PjxRdfRJIkzGYzc+bMYfHixcTHx3P48GH+85//cPvtt7NhwwZ+//13AgMD8fLy4sSJE/ztb39j4MCBwNlOZmq1mt69e+Pu7k5mZiYAjz32GCdOnMDDw4PJkyczd+5cAgMDAdi4cSOfffYZWVlZXHfddTzxxBP1Rtwym81s2rSJN954g7i4OBYvXux4JEiWZf72t78xc+ZMfv75ZzZu3IjNZiMyMpInnniCfv36IUkSBQUFPPTQQ8TFxbFz505UKhX3338/AwYMYNmyZWzZsoURI0ZQUVHh2K5CoWhRPbr/6GK1B3+cQEKhUFzQ8UypVOLp6Unnzp1JTk6mpKSELVu28OGHH1JcXExwcDAPPPAAo0ePRqPRIMsyK1eu5KuvvqKqqoobb7yRBx54oN46CwsL+fjjjwkJCeHgwYNMnDiRkSNHotFoHPEOHz6cFStW8Oyzz7J//35cXFwYMGAADz74ILGxsUiSxL///W9OnTrFmTNnOHnyJNHR0bz//vvk5+fz0ksvkZWVxYwZMxzrLC0tpaCggL/+9a+EhoZy+PBh0tLSKCsrE6VlQbgIUVJuYby8vAgMDGTlypWcOnUKo9HoqLY9X0PtefPmzcPf35/IyEhmzJjBwYMHHeNN19bWOgbrMBqNxMTEEBYWxtdff01SUhJffPEF1dXVbN26FVmWSUxM5PPPP2f+/PmsWbOGQYMGORI5nH1s5ffff2fVqlU8+eSTREVFkZubi8ViwWw2U1RUhCRJ+Pn58Y9//IO9e/eyZ88eHnzwQZ588sl6JVyDwYAkSXzzzTds2LCBKVOm8MMPP6BSqVi+fDkdOnTAbDa3mvbkq71ZkGUZg8HAmTNnOHToEFqtloCAAKZNm8aGDRvYt28fL774Ip988omjzXbDhg188sknvPzyy6xatYrOnTuTk5PjWGdxcTHffPMNnp6eTJkyhR49enDmzBlqa2sxGAyOsbL1ej0hISH85z//Yf/+/axfv55evXrxySef1CvZnj59mltuuYUtW7bw1Vdf4e7uznPPPceECRNYv349FosFi8WCzWajoKAAtVqNv78/FosFT09PzGYz1dXVreZcCkJzEyXlFkatVvP444/z1Vdf8eijj6LX67nxxhuZPn16veX+eOH39PQkKCgIHx8foqKicHd3Jysry3Hxu//++1EqlSgUCm655RZ8fHyw2+3U1tZSW1uLm5sbQUFBlJaWUlNTw65duxg0aBD9+/fH09OT8ePHA2fbKmVZ5tChQ/zyyy88/vjjREdHk5GRQVZWFnv37qWmpoYePXoQGBiIm5sbJpPJcXPRo0cP7HY72dnZREdHA+Dn58f06dMdJfTS0lLy8/MZNmwYUVFR+Pr68vXXXzf1oXe68vJy7rzzThQKBUFBQdx5553odDqsVitGoxGLxUJUVBQajYbc3FwCAwP58ccfmTt3Lt27d0eSJMd5ysrKorq6mtWrVxMeHs69996Lj48P0dHRJCYmUlFRwX333cezzz5LYWEhvXr1QpZlx7my2+3ExMRw6NAhSkpKHIOtXHfddfTr18/xyFJ2djbFxcXMmjULFxcXbrvtNtasWYMsy4727MTERPbu3YunpydwdnS7llrLIQjOJpKyE1yuc1RISAiPPPIIf//73zl48CCPPPIIcXFxdO3aFUmSGmzHUygUjn8qlQqlUulIoADLly8nJiaG9PR0nn32WSIjI+nTpw+ff/45+/bto6qqivLycqZMmYLNZqOqqoqIiIgGh2m02+2cPn2aXr16sX37duLi4vDx8eHQoUP4+flRXV3N/v37CQgIQKvV8tNPP7Fu3ToKCwuxWq2Ul5fXG8BDpVLh7e3t+N1sNiNJEmq1GgCdTtdkzxe3JN7e3nzxxRf4+fmxefNmNm7cSHBwMEajkVWrVpGamkptbS35+fmOGpDS0tKLjiJWVVWF1WrFarWSkpJCeHg40dHR/Pzzzxw4cIBOnTqRlJREeXk5vXr1wmKx8Nlnn7F9+3YqKiowGAxERkbWq9Xw8PCo14RRVVVV7/x4eHggSRIKhQI3NzdkWaZv374MGTKElStXOkrPgiA0TFRfO8GlErLNZqOwsBCDwYAsy3Tq1ImwsDByc3MBcHNzo6ysDIvFQnFxcYPTIdb5YxWhQqEgJCSEmJgYjh8/TmpqKomJiSxevJivv/6aWbNmOZJwaGgoeXl5VFRUYLVaqaiocPS4ViqVTJ8+nQceeIDMzEz27NmDp6cnJ06cwMfHB39/f44cOUJgYCBms5nPPvuM+fPn8+233/LJJ584LtYXo9PpUKlUlJWVYTKZyM7ObvRRuFqqut7XERERuLq6kp6ezubNmwkLC+Ojjz7is88+IyYmxrF8bGwsR48exWg0Os5T3XciKCiIBQsWMHjwYPbs2UNaWhrBwcEYDAZ27tzJrFmzOH36NEeOHKFnz54UFhby5Zdf8uSTT/Ltt9/y9NNPO9qtL8bPz4/a2loKCwux2+2kpaUhyzJKpZKgoCAkSeL06dNUV1dTUFCAXq/Hw8OjyY+jILRWoqTcwpjNZn799Vfc3Nzw8fEhLy8Pq9VKly5dUCgUdOvWjQ0bNmAwGNi9e/cFnYcu5uDBgxQWFlJeXs6ZM2cYOXIkLi4uuLu7c+LECZKTkzly5AhxcXEADBkyhGXLlrFhwwYiIiIoKipiwIABjukA6y66U6dO5aeffmLQoEEoFApsNpvjwu/n54erqytBQUHk5uZit9s5ceIENpvtkrG6u7vTtWtXjh49ikqlIi8vz3FDAJCfn09aWpqj1LV3715iYmJafechi8VCUlISaWlppKamIkkS4eHh5OfnU1lZyYEDB8jLy6O8vNzxmZtuuol//etfrF27Fh8fH8rLyx2d+yRJQqPR0LdvX9LS0ti6dSsBAQGEh4dz8OBBYmNj0ev1FBcXEx0dTWlpKUFBQaSlpZGTk8POnTsv21M6KCiI/v378+WXX5KQkEBiYqLjptPPz4+uXbuyZs0aIiMjyc/PZ8yYMbi7uzfZMRSE1k4k5RbG1dWV/v37s2XLFvbv34+7uzsPPfQQHTp0AODmm29mxYoV7N+/n+7duxMREYG3tzexsbH4+/vTuXNngoODcXV1pXv37ri4uDB27FgSExNRKBR4eHgwdepURowYgdFo5Prrr2fv3r0EBgYyZMgQgoKCUKvVdOzYkXnz5rF161a2bt1Kr169HENIdu/eHT8/PzQaDf369aO0tBSbzcbkyZMdF/obbriB4OBgfHx8uO2229iyZQvHjx9n6NCh3HTTTY72Ra1Wy4ABA+qN0SxJEmPGjAHO3kz07NmT8ePHO6pG09PT+fHHHx3tmuvWreOmm25qVUl59OjR+Pj4OH53cXFh6NChjh7oAQEB3HDDDfTo0QO9Xs/GjRvZvn07PXv2ZOLEiY5Zrvr06cM999zDjh07OHbsGAMHDiQwMJCysjK6du1KYGAg/v7+jB07lsOHD1NVVcXQoUMJCwvDzc2NUaNGOc5lQEAACxYsYPfu3bi5udGrVy9iY2Mdxzk2Nha1Wl2vSUOhUPDQQw+xcuVKNm/ezLBhwzCZTLi4uKDT6Zg2bRo//vgjqampDB8+nP79+1/xjaQgtEdSS+4FmZCQICcmJl715xcsWEBYWBgvv/xyI0b158myTG1tLf7+/qxcuZLw8HCnxnO1mmugkKaM4frrr+e7775j8ODBjbYv1dXVjBkzhnnz5jFixIhGWWd7VHduzWYzL7zwAqNGjeKvf/2rs8NySEpK4r333mPu3LmMGjXK2eE0K4vFwqJFi9Bqtbz66qvODqfVkyQpSZblhIbeE7eswhVzdkJuKTEITUOcW0EQSVkQnM7ZtVXO3r4gCP+fSMrCFWmsC7dIAGedfxycXUJsjO1f7Xk9f6hWQRBEUm41nH3RaqzEcS3raegYOPu4XK3GTMSNcQyudR1Xuz91n3P2jYkgtBQiKbcSbfmidaUJoaFjcKXHpbUm7yvRGN+Ntvz9EoTWRDwS1YxMJhMffPABer3e2aG0W5WVlU2yXpPJxIoVK/j999+bZP3tic1mIyUlpd31cBYEEEm52ajVah599NFWPcTg1q1b8fHxoXv37s4O5ao9/PDDhIWFNeo6NRoNt99+O8XFxS2+xHns2DEMBgPdunVDq9U6O5yLio+Pp1+/fs4OQxCanUjKzUCSJJRKJS+++KKzQ7kmhw8fZtCgQS3q2dGr0dB43tdCrVZz//33t4oq8mXLlnHs2DEeeughxwQgLVHd+NmC0N6IpNxM6oY8bM2sVisuLi6tfj8a2/mTZ7R0er0es9mMQqEQ51EQWiBxKypcMbPZLC7krZybmxu1tbWXHdNaEATnEElZuGIWi6XVlAiFhul0Ompray87KYggCM4hkrJwxSwWS6O3xwrNS5SUBaFlE0lZuGJWq1WUlFs5nU6H0WgUJWVBaKFEUhaumKi+bv3qqq9FSVkQWiaRlIUrJpJy66fT6TCZTFit1lbxCJcgtDciKQtXTPS+bv3c3NwAMBqNIikLQgskkrJwxaxWq+jo1crpdDpUKhVVVVUiKQtCCySSsnDFRPV166fVatFoNFRVVYl2ZUFogURSFq6YSMqtn0KhwM3NDYPBIJKyILRAIikLV0y0KbcNer2empoaUX0tCC2QSMrCFRMl5bahLimLkrIgtDwiKQtXTIzo1Tbo9XpRfS0ILZRIysIVkWVZjOjVRtQlZVF9LQgtj0jKwhWxWq0Aok25DdDpdKKkLAgtlEjKwhWxWCwAoqTcBoiSsiC0XCIpC1dEJOW2Q7QpC0LLJZKycEVEUm476qqvRUlZEFoekZSFK1LXpix6X7d+Op1OPBIlCC2USMrCFbFYLMiyLErKbYBer8doNIqkLAgtkEjKwhWpKymLpNz6iRG9BKHlEklZuCIWiwVJkkT1dRtw/iNRIjELQssikrJwRcxmMyqVCkmSnB2KcI30ej0mk8nReU8QhJZDJGXhiohxr9sOvV4PIKqwBaEFEnWRQoMKCwvZv38/1dXVSJJEVlYWSqWSn3/+GY1Gg1KpRKvVkpCQgIuLi7PDFS7BYrGQn59PcXExJpOJyspKrFYra9eu5dChQ1gsFmw2G6NHjyYiIsLZ4QpCuyaSstCgqqoq3nzzTU6cOIEkSciyjNFo5NFHH0Wj0aBWq4mPj+fjjz92dqjCZdjtdtavX88PP/xATU0NBoOB4uJi3nzzTWRZxmQyAbBz504nRyoIgkjKQoNCQ0NxcXEhPz/f0fMaICUlBThbBTpq1Ci0Wq2zQhSukEajQaVSkZKSQmZmJjabDTh741WnU6dOxMTEOCtEQRDOEW3KQoO0Wi2jR4/Gx8enwfc1Gg233nprM0clXA1JkhgxYgRdunRp8NlkjUbD5MmTRZ8BQWgBRFIWLmr8+PGOTkHnUygU9OjRgy5dujghKuFqRERE0K9fP7y9vS94z2azMXXq1OYPShCEC4ikLFxUTEwMsbGxKJXKeq+7uroyb948MY1jK6JSqRg7diwxMTEXPNbm5eVF3759nRSZIAjnE0lZuCi1Ws0NN9yAl5dXvdddXFxEyaoV6t27N3FxcfWqqZVKJePHjxc96AWhhRBJWbikSZMmoVD8/6+JWq1m6tSpDVaDCi2bRqPhxhtvJDIy0lFaVqlUTJkyRQwKIwgthEjKwiVFRUXRuXNnR2KWJIk777xTXMRbIUmSGDduHAEBAY7XbDYbo0aNcmJUgiCcTyRl4ZIUCgU333wzHh4eAAQFBTFw4EAnRyVcLXd3dyZMmICvry8AAwYMwN3d3clRCYJQRyRl4bImT56MzWZDrVZz7733ilJyKyZJEjNnzsTb2xuNRsOUKVNQKpXinApCCyEGD2kiJpMJs9ns7DAaha+vL9HR0Rw+fJipU6dSXV3t7JAahUajaZQOTrIsO2Zdag2CgoKIj48nPT2dQYMGUVNT06qSskqlwsXFpV5fB0FoK0RSbiJPPPEEy5cvbzMXDrvdjqurK0OGDHF2KI3CaDTy5JNP8vzzz1/zumpqahg8eDDp6emtampLtVrN5MmTnR3Gn2KxWBg/fjyLFy+mc+fOzg5HEBpd67mCtDI2m43x48czd+7cNpGYi4qKSE9Pp3///s4OpVG89dZbjVqy1Wq1/P3vf281x8dms7Ft2zaGDRvWqm4ktm/fTnl5uZjdSmizWs9fYyvk4eFBeHh4q7roXUxkZCS9evVqM+2Pbm5ujbo+hUJBQEAAkZGRjbrephQZGYlCoWhV5/PYsWNtpvlEEBrS+otw7cChQ4coLy93dhioVCrHBdxms5GWlkZBQUGzx7F7927HzEbCn5eTk0NmZiayLDdrQs7JySE7O9sxIYYgCBdq/UW4Vqy6upqMjAy8vLwICwsDoKCggNzcXOLj4x2luW3btjF+/Ph6I2v92QuqzWZj/fr1wNkeuB4eHsTGxhIYGPinYq7brslk4osvviA+Pp6ZM2f+qXVcq4cffpgff/yRgIAADAYDhw8fpqqqCn9/fzp16oROp2vWeK5UdnY2KSkpdO3alcDAQGw2G5s2bcLf3/+iw1za7XZSUlJIT08HzlaTR0VFERERcdU1MOvWrcNgMHDHHXc0OLZ5U1m3bh02m4358+c7Zq3Ky8vDxcWF7t274+3t3apK7YLQFERSdqKKigp+/vlnQkNDufXWW5Ekie3bt5OcnEx4eLgjKT/00EMXfPbPXrysVitPPPEEkyZNwsPDA7PZzL59+7j77ruvOIk1d8nqYjHUqUtqGzduxNvbG4vFwnXXXcf111/v9DgbkpiYyL///W9mz57NnDlzyMjI4JVXXqF3796XTMo///wzBw4cIC4uDkmS+O233/jLX/5CSEhIi9zPK3Ho0CF++OEHVCoVlZWVHDlyhHvvvbfNNI8IwtUSSdmJfHx8CAkJIS8vD4PBgKurKydOnKBjx47odDoOHjzI7t27sdls3HjjjYSHhwNnL9QlJSXs3LmTkpISwsLCSEhIIC0tDY1GQ3x8vGOyCFmW+eyzz5g4cSKSJHHvvfcSHh7O0aNHWbx4MRMnTkSv17NlyxbKyspwc3OjV69exMfHO+ZKTklJYf/+/dTU1BAbG8uwYcPq7YfZbOb48eOUlJSg1WrRaDR0797d8biRLMt8+eWX3HDDDWzbto3s7GwUCgX+/v4MGzbMUVrPyMjg0KFDKJVK8vPzcXNz4/rrr0etVrN9+3bOnDlD165dkWUZWZapra3l66+/ZtasWQwfPpy1a9eyadMm+vfvf8F43S1FTEwM+/btY8aMGWzYsIHu3bsjyzLHjh0jOTkZi8XiqAHQ6XSOjmPDhw/n1ltvpba2loceeogTJ04QGBjIL7/8QlZWFpIk4e/vz/Dhwx3Hs6qqigMHDpCeno6HhwcDBgwgODi4XjyFhYWsW7eOsWPHsmHDBiZPnuwYQlWWZTIzMzl9+jQBAQEkJSVRXV2Nu7s7/fv3JyYmxjGO9hdffEF0dDSpqanU1NTQoUMHxowZw+nTp9m7dy9arZacnByCg4Ox2+2sXbuWoKAgZs+eTXZ2No8//jg33HADERERzXg2BKHlEW3KTqTVagkLC8NkMlFQUEBpaSnFxcXExsai1WrRarX4+PiwevVqcnJyHJ8zGAysXLmSo0ePolKpOHjwIFu2bOHo0aOOC2ed8vJyvvnmm3rblWXZ0SarUqlQq9V4eXnh4+OD2WxmzZo1ZGRkIMsyaWlpLF++3JGwT58+zZkzZxzrslgsJCcns3btWiwWC1lZWSQmJlJZWelYpqKigpUrV2Kz2fDw8MDX1xcPDw8yMjL4/PPPHb2gMzMz+fLLLzl06BCurq54eXmhVCpZtWoV+/fvR6PRsGvXLqxWK3C2R3heXh7Dhg3D29ubzp07YzQayc/Pb/yT1UiCgoKQZZnc3FwOHjxI3759kWWZEydOsHbtWqqrq/nqq69IT08nMTGRtLS0ep83m82OgVzqmiF8fX3x8vIiKyuLTz/9FLvdjsFgYMeOHWzcuBG73U5lZSU5OTn12uKLior4+OOPqaqqQqlUsnnzZlJSUhzvW61Wjh8/zu7du9FqtXh7e+Pj40NVVRVff/01paWljpqLr7/+mq+++gqr1YqPjw96vZ7i4mJWrFhBfn4+ZWVlpKenY7fbqaqqIjMzk549e+Lt7U2vXr3Q6XQcO3aseU6CILRgoqTsRJIkERwcjFarJTc3l5KSEjw9PQkICECpVNK5c2c6d+7MqlWrHJ+x2+3k5uayY8cOXnjhBSIiIti2bRvHjh3D39+f0tJSRyIeNmwY1dXVREZG4ubmhizLvPbaa7i6ulJVVcWgQYMICQlBq9UyceJEZFmmqqqKN954g4yMDKKjo9mwYQM6nY7p06fj4+NDYWGhox3SZrNx7Ngxjh49yqBBgxg2bBjJycns3buXkpISVq9eTZ8+fbBYLISHh6PT6RgxYoRjP9LS0njmmWcoLS3Fz88PODtQyciRI0lISECtVlNWVsa2bdu477776NOnD0ePHuWzzz4DcHR+M5vNLFu2jI4dO6LRaKipqWnGs/jnSJJEv379+OKLLwgMDMTT09PxelhYGKNHj2bPnj0MGjSI5ORkR0e6devWcezYMYxGIxEREXTq1AmFQsGIESMcNQfp6ek89thjlJaWYjQaSUxMpHv37kycOBGz2YzdbndMw1lcXMy//vUvoqOjmT17Nt7e3vTo0YOjR48SGxvLJ598wk033URpaSkhISHExsYSGxuLLMuUl5ezaNEicnJy8PPzc6yze/fuzJw5E51Oh91uJzExkczMTF566SUkSSInJwdZlqmpqcFsNuPm5sbHH3/M6NGj8fb2pri42DknRRBaEJGUm8Gl2mKDg4NxdXUlNzcXm81GaGio40LdELvdTl5eHmlpabzxxhtIkkRlZSV+fn5ERUWRk5PDiRMnOH78OK6urphMJrp3744kSUiSxIQJE/Dz8yM9PZ39+/eTmZmJn58fv/76K3v37qWqqor09HS6deuGzWbj5MmTDB06FF9fX1QqFaGhocDZ0rrJZOLw4cOEh4djs9lwc3MjKCgIi8VCSkoKJ06cwG63I8sy8fHxKBQKdu3axc8//0xxcTEmk4nKykqqqqocSTkgIICoqChH9XtdaSw8PNxRLV6XBOr+1+l0DB8+nIqKCux2e4t/LnzIkCF88803PPLII1RUVABnk7K7uzsuLi64ubnh6emJWq121Ap069aN0aNHU11dzbp16zhx4gR+fn7s27ePn3/+maKiIsfxrKysxGw2U1FRQVxcHDqd7oJ+A4cOHUKr1eLv74+npyeSJNGzZ09Wr15Nly5dOHLkCOHh4ZSXlxMfH09OTg7r1q0jOTkZg8HAiRMnKC8vr5fo+/XrV++GraSkBL1e76gdCA4ORpZlx2NYkiQxfPhwAgICsNlsbeLRQUG4VuKvoBlcquOKu7s7gYGBnDlzhqysLEaMGOGY/OFi63J3dyc8PJy///3vjnW7urri4eHBoUOHSExMZPjw4Rw9epQzZ84wa9Ysx3I9e/YkMjKS2NhYUlJS2Lt3L0FBQSQlJTFt2jS8vb35+OOPgbM3Ezqdjurq6gYHa9BqtYwbN46YmBi2bdtGjx49CAkJcZSS+vfvT0pKCvn5+UyfPt1RCr/77ruJjIzEYDDw8ssv1xvEQ6lU1rs4u7i4YLFYHMsYjUbHz0FBQUiSRHl5OXFxcY5HpRqaVrIldFKr4+fnx9KlSwkJCWHdunWO1+viq0tY8P87toWFhdGvXz/UajWpqans3r2bzp07889//pM77riDqKgoamtrefbZZx2JUqVSYTQaG4yhV69eTJkyhVdffZXdu3czZMgQunfvzmuvvUZKSgpjx44lMTERb29vAgMD+e233ygsLOSWW27B3d2dV1555YLvRN2NVN0+aDQaTCYTsixjtVoxm82oVCq8vLxwdXWlqKiIUaNGoVAoyM/PdzyBIAjtWcsuUjSCljDyz6ViUCgUhIeHk5OTQ3FxMUFBQfUmob/Y8j4+PqSmphIcHOwoudhsNry9vTl8+DCdOnUCIDU1lfj4+HoJqa4Uk5+fj6urK5WVlSgUCmJjY6murqawsNDxLOnw4cP55ZdfOHr0KNXV1WzevNnRzqlQKAgODmbAgAGEhYWxevVq7HY7Xl5eHD9+nMjISFQqFampqXTq1Ina2lrOnDlDp06diIyMJDk5+bLPGwcFBeHj48OmTZsoLy/n22+/dSRYDw8PunXrxqpVq8jOzmbHjh34+/s7SvPnaykJGc7GUteZr6H36vzxe1PXvJCRkYGbm1u94xkVFUVycrJjvHUfHx+Cg4PZsGEDubm5HD9+nEOHDjmq9n18fOjYsSMLFixgyZIllJaW4uHhgV6vZ/fu3QwYMIDi4mKqqqoICgqivLwcvV5PTEwMBQUFlJWVXXJENJVKRXh4OEajkZ07d3Ls2DEOHz4MnL2Z6969O9u3bycvL481a9agVCrp0aPHNR9bQWjt2nRSliSpRSTlyyWEuurfuk47dcvfeuutjBgxghMnTvDII48watQofvnlF3x9fbn33nvZsGED8+fP57///S8eHh54e3sTERFBeHg4er2eTp06ERgYiF6vdxyLuXPncv311/PCCy/Qu3dvJkyYQPfu3bHZbCxYsIDvv//e0YMa4LrrrmPq1Km88cYb3HzzzSQlJdUr0UiShJ+fH8OHD6egoIDt27cTFhZGREQE7u7uxMbGEhwcjLu7u6O37cMPP8wtt9xCTU0NPj4+lzw2KpWKv/zlL+zdu5fZs2djMpkcNy0ajYYHHniAQ4cOcddddzlKcpe6qWnpLvZ9/c9//sOkSZO47bbb0Gq1TJ8+nbCwMGbPns1f//pXZs+eTXV1teN4ent7O8a1fuCBB3j//fex2+2OHvV1pfHrr7+eLl268MYbbzgSo5+fH97e3nTq1AkfHx+8vb0ZMGAAx44dY968eWzfvh13d/fLVjeHh4dzww038NZbb/H++++jUCgcVd0zZ850zDr29ddf8+STT4opJAUBkFpC0rqYhIQEOTEx8ao/f/fdd+Pr68trr73WiFFdmYcffhij0cgDDzxw2YuX3W53tB2qVCpHm2htbe0FF2m1Wo1SqUSWZUe1rkKhcHzOZrNht9tRqVTY7XZsNpsjwdbW1jrWI0mSo4qzrnrRZrM5tq1UKh3PjFqtVqxWK7Iso1KpHPtjsVgc267bh7r2wro2wvNjkCQJi8VSb1/tdjtqtdoR+/k9i88/PnX7qlKpsFqtjuRy/nGo258raVN+6qmn6N27Ny+++OJll72c6upqxowZw7x58xwd2RpitVod+1u3f3XHve6cKpVKLBYLarUam83muJmqO2Zw9tzU3XjUnRs4ezzrjrVCoXCcE7vdjiRJju2ev7wkSZjNZmRZRqPRYLFYAOq1Z9fFUreuuljrjrUkSdTW1jq2W0eWZcfn6va37vsCZ78/dftY9x344w3sH5sdfvnlFzIzM3nggQeIj4+/irN19ZKSknjvvfeYO3cuo0aNatZtO5vFYmHRokVotVpeffVVZ4fT6kmSlCTLckJD77XpNuW6i1RLVtfxRaPRXHABqks8DZEkqcFpB8+/ATj/Aghn250vtq7z2wMbWmdDNxbnf0ahUNRLNnUloj/GoFarL1qSrbsR+COFQlFvX8///MWOQ0vU0DFs6NjW7c/5Ce5ix+yPx/P8n/94Ti62rvPP4/k/n7/cxc5NnYa+q5IkXfS788dtXUxLanYQhObQpquvlUplvRJGS3T+RaelXICutvakpcTvbC2p9qkpzsm17l9LOj6C0NKIpNxKNeWFTSTXa9PWj9+17l9L6eshCC1Rm07K57eLtTVt/cIvtG3i+ysIDWvTSdnV1bVe56aW4kpKCaIkITQl8f0ShJapTSfluoEvWporKSWIkoTQlMT3SxBapjbd+9rd3b3exAjNSZIk8vLy2LVrV4sf9rE9KioqatT12Ww2jh8/LoaKbGInT54UpXyhTWvTVxC9Xu+0knLXrl3JzMzk559/dsr2G1tycjIajYa4uDhnh9IofHx8HKOeXSuVSkW/fv3IyMggIyOjUdbZHJKSkvD19SUyMrLVlJxlWWbQoEGOMbYFoa1p00nZw8ODqqoqp2x79uzZ3HjjjW3mrv6xxx7Dz8+Pxx9/3NmhNJrGGkFKo9Hw0ksvOQbeaC3uuusuhgwZwm233daqanO0Wq0Y/Utos9p0Uvb29qa0tNQp2/bw8LjkxBKtjaurq2PGn9ZSqmouCoXCMctVa6LVavHw8CAoKOiSA4MIgtB8Ws/t8VXw9/enrKys1ZVgBEEQhPapTSflurl9y8rKnB2KIAiCIFxWm07Knp6euLi4UFhY6OxQBEEQBOGy2nRSViqV+Pn5iaQsCIIgtAptOikDhIaGkp2d7ewwBEEQBOGy2nxS7tChQ6t6dlQQBEFov9pFUk5PT3d2GIIgCIJwWSIpC4IgCEIL0eaTckxMDOnp6W1mZC1BEASh7WrzSTk+Pp6Kigpyc3OdHYogCIIgXFKbTsqSJOHl5UVoaChHjx51djiCIAiCcEltOinD2cTcrVs3kpOTnR2KIAiCIFxSu0jK3bt3F0lZEARBaPHaRVLu06cPhw4dwmQyOTscQRAEQbiodpGUe/XqRUVFBadPn3Z2OIIgCIJwUe0iKXt5edG5c2f27Nnj7HAEQRAE4aLafFIGUKlU9O/fn507dzo7FEEQBEG4qHaTlIcNG8b+/fspLS11djiCIAiC0KB2kZQVCgXx8fFotVqSkpKcHY4gCIIgNKhdJGVJknB3d6dv375s2bLF2eEIgiAIQoPaRVIG0Gq1DB06lJ07d1JdXe3scARBEAThAu0mKavVanr27IlCoWD37t3ODkcQBEEQLtBukrIkSfj6+tK3b182bNjg7HAEQRAE4QLtJikDeHh4MGLECJKSkigoKHB2OIIgCIJQT7tKyiqVipiYGHx9fdm2bZuYY/kSZFmu9+9yrwst2x/PmzingtAytaukLEkSgYGB9OrVi40bN2KxWJwdUov1xBNP4OnpiUKhQKFQ8PHHH/PKK6+gVCpRKBS4urqyaNEiZ4cp/AmLFi1Cr9c7zumqVat46KGHUKvVKBQKNBoNr776qrPDFIR2rV0lZQAvLy8GDx5MUVGReGb5EkaOHElISAiSJDX4vk6nY8qUKc0clXC1JEli0KBBREZGXvScqtVqpk+f3syRCYJwvnaXlCVJokOHDkRERLB161ZsNpuzQ2qRhg4diq+vb4PvSZKEv78/AwcObOaohGsxePBgwsLCGnyvbt7xuLi4Zo5KEITztbukDBASEkJCQgJHjhwhNTXV2eG0SHq9nmHDhuHu7n7Be1qtljlz5qBSqZwQmXC1goKC6NKlCzqd7oL3XFxcuOOOO5wQlSAI52uXSVmtVtOnTx90Oh3btm0TbcsXMXnyZPz8/C54XaPRMGPGDCdEJFyr8ePHEx4efkEVtkqlEs0RgtACtMukDBAbG0v37t1JSkoiMzPT2eG0SAkJCYSGhqJUKh2v1Y0j3qVLFydGJlytgQMHEhgYWC8pKxQKRo0aRXBwsBMjEwQB2nFSdnFxYciQIZjNZnbv3o3JZHJ2SC2Oi4sLkyZNqlfdqVarueOOOy7aWUho2eo6Onp4eDheU6lU3HnnnU6MShCEOu02KQN07tyZrl27snPnTrKyspwdTot044031mtXdnNzY9KkSU6MSLhWN954Y71OfAEBAVx33XXOC0gQBId2nZRdXV2ZMGEC1dXV7NixA4PB4OyQWpxOnTrRo0cP1Go1KpWKUaNGERIS4uywhGvQs2dPoqKiUCqVqFQqpk+f3mCHPkEQml+7TspwNukMHjyYzZs3k5mZKUY0+gOlUsmsWbNwcXEB4K677nJyRMK10mq1TJgwAZ1Oh0KhYO7cuc4OSRCEc9r9My0qlYrJkyezfft2Nm3aRFhYWKOXGoxGI8nJyY26zubk7++Pi4sLrq6uaDQa9u7d6+yQrlrPnj3RaDRN1iZutVopKCggLy+vSdbfWMLDw9FoNAQEBGAwGNi3b5+zQ7ooSZLw8fEhJibG2aEIQpNr90kZIDg4mMmTJ/PZZ59x3XXX0aVLFxSKxqtEyMvL4+677yYoKKjR1tnc7HY7SqWS119/3dmhXBVZlikqKmLt2rVNeh6qq6tZtWoV33zzTYuuErbb7VgsFsxmM4sXL27RHfdqa2sZNGgQr732mrNDEYQmJ5IyZx8JmThxIj/99BP/+9//iIyMbNQLam1tLcePH+ell15qtHU2t9TUVLy9vS86yldLV1ZWxmOPPdbkz6TbbDbKy8sJDQ1t8dXCR44cISYmBldXV2eHclFms5lt27aJjphCuyGS8jnu7u7ccccdPP7449x4441069atUUvLkiRdU/WbLMtOLc2Eh4c7JqNoyaWqiykqKmq2EcgUCgXe3t7NVt16td+NsLAw1Gq147Mt8bzW3dCWlJQ4OxRBaBbtvqNXHUmSGDZsGD179uS9997DaDQ26rqdsY7G7LSm0WhQKpUt8sLdllzNObvac+Li4uK4yXL290sQhLNEUj6PWq3mySefZNOmTWzbtq1VTFZhtVr59ttv6devHzfddBO7du1yvNfYCfTVV19l2bJl9V5rjgvzHXfcwaFDh7Db7U2+LWdripuenTt38vbbb5OSktKo671crKmpqdx7771UVFQ06nYFoS0T1dd/EB0dzd13381zzz3HsGHD0Ol013yhvFTiOr/qcfPmzbz99tvMmTOHKVOmYLfbueGGG+jSpQtLly5tsDpdqVRy880307dvX7777rt6NxJms5mJEydSXFyMQqEgLCyMe+65h0mTJl3VPpnNZqxWa73Xzl9PU1Wxm0wm7HY7sixTUlLCs88+S1JSEtHR0Tz//PPExcW1yhL8woUL2bZt2wXfD4VCgd1uR5IkPDw8GDNmDPPnzyciIuJP76csy9hsNiwWS7OXbO12OyaTCVmWqa2t5aeffuLTTz/FZrNx7733MnbsWFxdXVvluROEpiJKyn8gSRILFy7EZrPx4YcfNsqF7FIXnT8mNW9vbw4dOkRNTQ07d+7Ew8PDEYPVanX8LMuyI0HWTVrfEJvNxhdffMG+fft4/vnneeWVV8jKysJqtWKxWLBardhstnr7abfbsdls9d47/31ZlrHb7VitVsey5yfkukRw/nrqtlWXXM/fh7plzn///O2f75///Cdubm6sXbuWYcOG8eyzz7aKGo2GvP322+zfv5+9e/ei1+v56quv2L9/Px999BG9e/dm/fr1fPvtt9TW1vLrr79iMBguezzrzkvde39Ud8zrljv/3Nb9XPf6xb4jdefnj7Gc//nzv6sABw4cYOfOnTz00EP89a9/5fvvvyc9Pb0ZjrIgtC4iKTdAq9Xyyiuv8M9//pOMjIxmLWF07NiR3Nxcampq2LZtG0OGDEGSJGRZZtq0aRw7dgyA9PR07r33XgoLC69ovQqFgh49euDl5cWxY8f4y1/+wtChQxkzZgzPPPMMaWlpjov6rl27ePjhh5k4cSLPPvssZWVl9dZls9nYsWMHt99+Oxs3buThhx+moKDA8b4sy7z44ov8+uuvvP7664wePZphw4Yxe/Zsdu7c6ViusrKSGTNm8PLLL3PDDTcwadIkvv76a86cOcOSJUuYNGkSzz33nGMfTSYT27ZtY8GCBfj4+DB37lzKysocx6S1uVxbriRJBAQEEBkZSVVVFQaDgXfeeYcxY8Y4jueOHTuAs8fcaDSyfPlyZsyYwcyZM/n222/r1WzY7XZSU1N58MEH2bp1K7Nnz77g+fmKigqmTZvG0aNHufPOOxk6dCjjx49nyZIl5OfnO26+HnnkERYvXszs2bMZPnw49913H7Isk5iYyJw5c5g8eTJff/01cPb7kpmZiaenJ/369WP06NF4e3tz7NgxMea8IPyBSMoXMWHCBMaNG8eTTz55zY/R/JmkrlQqiYqK4tixYxQUFDQ4zd6fVVFRQVFREb/99hsmk4nevXuzbNkydu/ezffff09ERATff/89NpuNY8eOsXLlSkaOHMnXX3/NuHHjOHPmTL3S+u7du/nggw949tln6dq1K8HBwWRlZVFbW0t1dTXFxcXY7Xb8/Px46qmn2Lp1Kzt27GDhwoU899xzmM1mR2xmsxmDwcCnn37KunXrmD17tqNUuHTpUnr16uWoes3Pz8dsNhMVFUV5eTkKhYLAwEBycnKu6fi0RFarldLSUo4dO0ZaWhr+/v64u7vz97//nS1btrBjxw7++te/8uKLLzqq93/++Wd+++03Xn/9dd59911CQ0MdvZbtdjunT5/mo48+YvLkyQwaNIiePXuSkpKC3W6npKQEq9VKVlYWXl5edOvWjf/+97/s2rWL//73v5hMJrZu3Vqv9H3ixAmee+45duzYwbJly7BYLLz00kvMnj2bTz75xFFqNhqNlJaW4uHhgVqtxmKx4O/vT0lJiZg2VRD+QLQpX8LixYsZPHgwa9asYfr06VedHP/4ucu1vfbv358VK1bQs2fPRnmM5/nnn0ej0WC325k6dSru7u5UVlZSVVWFzWYjICCA3NxcKioqOHr0KBEREQwePBhfX19GjBhRb13Hjh3jxx9/5NFHHyUuLo7S0lKCgoI4ffo06enppKSkMHToULy8vHB3d8doNFJRUYHZbCYiIgK73U5OTo7jcSF3d3dmzpzpGNCjqqqK/Px8unTpQseOHQkJCeHbb78FziZwhUKBzWbjiSeeYOHChbi4uNRL8m3F8ePHWbhwIZIkERsbS5cuXVCpVI7jabFYCA8PR5ZlsrOziYqK4rfffmPmzJnExcUBOMYoP3XqFIWFhezevZuRI0cyaNAgXFxc6NatG0ePHqW6upobb7yRpUuXcvr0abp27Yrdbqeqqorq6mpsNhve3t6UlZVhMBgcz/BPnz6duLg4lEolsiyTnp5OZWUlU6dORZIkbrzxRt566y1HVTnA6tWrcXd3d1R9ix7cglCfSMqXEBERwdNPP81LL73E4MGDG20ihssl9/j4eAICAhg+fHi9uZ4lSXK0IdZVNZ//3sXW+/bbbxMfH09ubi6PP/44/v7+FBYWcuTIEQwGA+Xl5XTq1AmbzYbRaESj0aBWqxtcV2ZmJoMHD2bjxo0MGjQINzc3goOD2b59uyMZJCUl4ePjg06n4+eff2bbtm2UlZVhtVopLy+vV2WpVCrx9vZ2/F5XKq7bvqurq+NRLL1eD5wtRX744YcYjUaqqqpa9MhZV6t79+68/vrraLVavvrqKzZt2oS/vz/79u1j69atlJaWYrVaKSsrcxzPqqoqvLy8GlxfaWkpoaGhZGRkkJOTg6enJ926dWPVqlUkJiaSkJDAzp07KS8vp1evXhQVFfHZZ5+RkpKCwWCgqKiIcePG1Wu/9/HxqdeXoaamBjc3N8drbm5uSJKEWq1Gq9VSWVnJzTffjEajYf/+/bi6ujbqWACC0BaIv4jLWLBgAaGhobz55ptX3f71Z0sDarWa119/nT59+qBUKh2vu7u7U1hYiMViIS8vj9raWsd7dSXh2traBjv4SJKEv78/ERER7Nixg19++YVHH32Ujz76iAULFqBSqZBlmcDAQMrLyzlz5gxms5mSkhKqqqoc+zBx4kT+9re/UV1dzerVq1EoFAQEBJCWloZCoaBDhw4cPXrUkRxWrVrF5MmT+eijjxydtC51PNzc3NBqtZSUlFBTU0NGRgYGgwFZlgkICCAgIIB9+/Y5qlqrqqqIjo7+U8e3tZAkCTc3N0JDQzEYDOTn57N69WomTJjAhx9+yP/93/+h0+kcNS8xMTEkJydTXV1NbW0tpaWlju9I586dufvuu4mMjGTDhg2UlJQQGRlJWVkZW7Zs4a677uLEiROcOnWKzp07k5GRwcGDB3nqqaf44IMPmDx5cr3vYkMCAwOpra11fDfr+im4uLgQGBhIRUUFeXl5FBUVcebMGUJCQtBoNM1xKAWh1RAl5ctwcXHhhRde4K677mLs2LGMHTv2T6+jsQYP6d+/P+vXr6e6upqUlJR6F0kvLy98fHw4cOAABoOBQYMGOSay37lzJ5mZmRgMBkpLS7nuuuswGAzs37+fI0eOkJSU5FhPz549OXbsGJs2beLUqVPU1tYyYMAAdDodcLZk6+7uzl133cXSpUuJi4vD3d0dvV6PyWQiNjYWm82Gp6cner2e8PBwTp8+jdFopKCg4LI3KG5ubnTr1o0dO3awatUqDAYDNTU1SJKESqVi5syZ/Pzzz1RVVZGcnMzgwYOJiIi45uPb0pSWlrJp0yZcXV1JTk4mODgYPz8/wsLCyMjIwGw2U1BQ4LgBUygUTJo0ic8//5zVq1fj5uaGq6sr/fv3d7zv4uLC9OnTefPNN9m+fTs33HADMTExnDx5ks6dO+Pu7k5eXh7h4eEYjUY8PT0dE1WcOHGCjh07XjLm4OBghgwZwueff07Hjh05ePAgkiShUCiIi4vj6NGjrFmzxvGUQXx8/EVrZAShvRJJ+TIkSSIhIYG5c+eyZMkSunTpQlhYWJNsKywsDJvNVq86tkOHDowYMQJJkpgxYwarV68mOzubvn37EhYW5hi3WKfTMWrUKLZv386xY8fo1q0bXl5e3HTTTZSUlFBaWoper2fmzJkMGzaMoKAgDh8+jF6vZ9iwYcDZhOju7s60adPYvXs3mZmZxMbGEhwc7Lgp0Ov1KBQKunXrxrRp0ygvLycmJoYZM2YQFBREQEAAM2bMICYmBk9PT+bMmcOuXbtITU0lISGBefPm4ePjA5wt3Y8dOxZPT896x3vAgAHIsszRo0eJjY1lxowZ+Pv7I0kS06dPR6PRcPr0aaKiopgwYcJlS3AtnUKhYPr06fj4+Dh6XA8cOJD09HQ0Gg0xMTEMGTKEsLAwZs2axc6dO0lNTaVv377MmzcPX19fJEmiT58+2Gw2Dh48SFVVFQMGDMDLy4uQkBD69OmDt7c3gYGBzJgxg7y8POx2O1OmTKG0tBSlUsmkSZPo3r07Li4uREdHM3nyZE6cOIGfnx9jxozB39/fUbIdNmwYYWFhjurnuuT7l7/8hbVr15KXl8ewYcOIi4vDxcWFmJgYJkyYwK5du5BlmdmzZxMSEtIsz7kLQmsiteSOFgkJCXJiYqKzw0CWZc6cOcN9991Hz549eeaZZ/7UHf6xY8fo3bt3vRKp0LyKioq45ZZb2LNnT6P0aL+YkpISPvjgA7Kysnj44YebZBvtSW1tLWvXruXEiRN89dVXTo0lKSmJ9957j7lz5zJq1CinxtLcLBYLixYtQqvV8uqrrzo7nFZPkqQkWZYTGnpPtClfgbr22IceeojNmzc7ng29Ui35xkdwPmd/P5y9fUEQ/j+RlK+QQqFgwIABjB8/nvfff58zZ844O6Rm1VgXbpEAzjr/ODi7yrYxtn+15/X8UcIEQRBJ+YrV9YSdNm0asizzzTffXPGFxJkXvcbSWInjWtbT0DFw9nG5Wo2ZiBvjGFzrOq71GX5n35gIQkshkvKfUPfIzy233MKGDRvYv3//FX2uqcfPbu2u5eamLR+XK+WsqUEFQWh8Iin/SRqNhoEDBxIXF8dnn31GTU1Ns2y3tZYIr4RICNemJZSUBUFoHOKRqD9JkiSCgoKYMmUKb731FuvXr2fatGmX/Yzdbmfbtm3NFKXwR5WVlc02+YHNZiMrK0uc70ZgsVg4efKks8MQhGYjkvJVUCgU9OnTh8GDB/PDDz/Qt29fIiMjL7q8RqOhQ4cOvPfee80YZeOqrKxEoVA4hrpsbWRZxs/Pr1HGEr+UusFVMjIyWvz5rqioQKPROJ51b6lkWWbChAnODkMQmoVIyldJr9czfvx4Dhw4wP/+9z/uv//+iz67HBMT02qnF6xz3333ERAQwEsvveTsUK5JUw804unpyV//+tdW8YzyzJkzue6667jvvvta/BjUoolDaC9EUr5KkiTRrVs3Jk6cyJo1axgwYAADBgy4aGekpi6hNbW6EZvqJocQGlY3MUhLT3JQ/5y29lHRBKGtaPlXjhZMoVAwfvx4/P39WbNmDcXFxaLDjCAIgnDVRFK+Rv7+/kyfPp0jR46wc+dOx7yxgiAIgvBniaTcCEaMGMGwYcP49ttvSUlJaXDqREEQBEG4HJGUG4FKpWLevHkoFApWrlxJSUmJqMYWBEEQ/jSRlBtJUFAQ9913Hzt37uS3337DbDY7OyRBEAShlRFJuRENGTKE6dOn8+mnn3Lo0CFsNpuzQxIEQRBaEZGUG5EkSdxyyy107tyZf/3rX+Tk5IhqbEEQBOGKiaTcyDw8PFi4cCE1NTW8++67GAwGkZgFQRCEKyKSchOIioriiSeeYNeuXXzyySfODkcQBEFoJURSbiL9+vXj0Ucf5d///jfr1q0TpWVBEAThslr32I8tWN1oX9nZ2TzxxBOEhobSo0cPZ4clCIIgtGCipNyEtFott99+O2PGjOH++++nsLDQ2SEJgiAILZhIyk3M3d2dZ555Bj8/P+6//35qa2udHZIgCILQQomk3Ay8vLxYunQp6enpvPTSS6J9WRAEQWiQSMrNJDQ0lOXLl/O///2Pjz76yNnhCIIgCC2QSMrNoG6O3a5du7JkyRKWLFnC6tWrRYlZEARBqEck5WYiSRJKpZKRI0fy+OOP8/LLL7N//36RmAVBEAQHkZSbkSRJaLVaZsyYwaRJk3j22Wc5ffq0SMyCIAgCcI1JWZIkL0mSvpMk6YQkScclSRokSZKPJEm/SpJ06tz/3ueWlSRJ+pckSamSJB2WJKlP4+xC6+Pl5cWdd95JcHAwr732GgUFBSIxC4IgCNdcUn4HWCfLcmegJ3AceALYJMtyR2DTud8BJgAdz/27B/jgGrfdqoWHh/PXv/6V6upqPvroI8rKykRiFgRBaOeuOilLkuQJDAf+AyDLslmW5XJgCvDfc4v9F5h67ucpwGfyWbsBL0mSgq92+62dJEl06dKFu+++m8OHD/P1119TVVUlErMgCEI7di3DbHYAioBPJEnqCSQBC4FAWZbzzy1TAASe+zkUyD7v8znnXsunnVIqlQwdOpSioiJWrFiBp6cnU6dORa/XOzs0QRAEwQmuJSmrgD7AQ7Is75Ek6R3+f1U1ALIsy5Ik/aminyRJ93C2epuIiIhrCK910Gg0TJo0icrKSlavXo1Wq2XChAnodDpnhyYIgiA0s2tpU84BcmRZ3nPu9+84m6TP1FVLn/u/bsDnXCD8vM+HnXutHlmW/y3LcoIsywn+/v7XEF7rodfrmTVrFiNHjmTlypVs3LgRo9Ho7LAEQRCEZnbVJWVZlgskScqWJClOluUUYDRw7Ny/24DXzv2/+txH1gAPSpL0DTAAqDivmrvd8/Dw4NZbb8VisfDZZ5+hVqsZM2YMGo3GsYwsy5hMJrRabZPHU1ZWVu/GwGAwUFVVRV5eHpIkAWcn3PDx8WnyWARBENqLa5268SHgS0mSNMBpYAFnS98rJEm6E8gEZp5bdi0wEUgFDOeWFc7j6enJ/PnzsdlsfPDBB7i4uDBixAhUKhWyLHPw4EF++uknnnrqKZRKZZPG8uuvv/LLL79gNBqRJInExERcXFwcM10pFAqmTJnCzJkzL7MmQRAE4UpdU1KWZfkgkNDAW6MbWFYGHriW7bUH3t7e3H777dhsNv7xj3+gUCgYMWIEBw8eZOHChZw+fZrRo0czePDgJo1DrVazbds2MjIyABy9wo8fPw6cnf3qtttua9IYBEEQ2hsxolcL5OPjw5133smYMWN48skn+eKLL7jnnnv4/fffKSoq4s0338RmszVpDMOGDSMgIABZlrHb7ciy7PjZbrcTGBjY5DcGgiAI7Y1Iyi2Uj48Pf/nLX+jduzfPPfecY5xsi8XCvn372L59e5Nvf/Dgwbi7u1/wnpubm3h0SxAEoQmIpNyC5eTksGHDBrKzs+sNKlJQUMA777zTpKVlhULBpEmT8Pb2bvD9OXPmNNm2BUEQ2iuRlFuokydPcuutt5KVlYXdbq/3nsVi4ciRI2zdurVJYxgyZAh+fn6O3tZ1YmNj6datW5NuWxAEoT0SSbkFqqqq4t577+XEiRNYrdYL3pdlmaysLD766KMLEnZj0mg0TJw4sV4VtpubG/Pnz0elutaO+4IgCMIfiaTcAun1elasWMFLL72En58frq6uFyxjsVg4duwY27Zta7LxsiVJYsqUKfWeizabzeIxKEEQhCYiknILJEkSfn5+LFq0iPT0dP75z38SGhpar2OVLMucPHmSTz/9tEnblvv06UNwcLCjCnvAgAGEhYU12fYEQRDaM5GUWyhJkpAkCb1ezwMPPEBaWhrvvfceXbp0wd3dHYVCgdVq5dixY+zatatJS8tz5sxBr9ej1Wq54447LmhjFgRBEBqHaBhsJLIsYzAYGmwDbixTpkxh/PjxrF69mqVLl3IqNZWjR4+yYsUKunbt2mSjfI0ePZrFixdjtdkYOXIkFRUVTbIdZ1IoFA0+/iUIgtCcRFJuJDU1NSxZsoRjKadQNkMnqPjuPfDy8yf11El+WL2G1NMZePv6Ntn2tG46PH18efr5F5tsG85it1nx8/Hm3aVLnR1Kk5FlmdLSUioqKhy1KjU1NRQXF5OWlua4ofPx8bnoY3CCIDQ9kZQbidlsZteuXSj8QgmNjIFmqOL1iOlK7MARlBcXoVSp0PkFNNm2eg4z46rTo4vs0GTbcApZpiA7g0MbNzo7kib34YcfsnTp0nqJecuWLbz22mvA2aFVlyxZwj333OPMMAWhXRNJuZEljLqeHoOGoVC0kuZ6GbiC+4eK0mJc3XSoXbSN0KZ8hRtt5I82xG63cyxxN6l7mnaENGeTJIm+ffsSEBBAYWFhg30QtFotY8eOdUJ0giDUaSWZoxW6bL+rSy1wmQ83Zp+uK0xwnj5+aLSujdTJ67x1/Kl9kRuIt2k6uLVFCQkJREZGNnjDqFQqGTRoEFFRUc0fmCAIDiIpN5WGcpd8uQUaeK+hnCNBVXkZu9b/hLm29oK3k7Zs5Ez2hSOBXXKdVyB59++cOnzgmtZxgSvI8TvX/UhZ4Rka7mAueoJfKT8/PxISEvDy8rrgPaVSyZ133tn8QQmCUI+ovm5OV5g/Ss8U8Pva1UTExdOt/+AGO46VFxXy03+X0bX/YDTnDe4BsPG7rxh10yy8AwL55YvlGGuqkRQK9B6edO0/iKjOXa8q/N/X/g/vwGA69ujdrLnwx08+wicgCE9fP0ymWvb8+guFudl4+vjRb/RYvP0Dmy+YVm7s2LF89913lJSU1Hvd19eXUaNGOSkqQRDqiJJyc5Av+OGSTuzfy5b/rWTD159hsZj/zEfrsdtsrPn0IyxmEx7ePlRXVPDV269RUVpy+Q9f4A+NuU6qNd6y+jt2rf8JjYuWlAOJrP7Ph9jtTTuNZVvSo0cPIiMj6w2TqlarmTJlCj4+Pk6MTBAEECXl5iFd8MMlHd23mz4jRvP7L2uoqSjHRXt2mM2CzAzWf/MZxupqQqKiHcsba6o5uGMrh3ZuIzgiirLCgnrrGzTuBqK79qCqrIRHp33J6aOHObF/L3npp1G7uBDTrSeDx92Ad8DZEmfWyRPs/W09hTlZRHaKZ9S02Wh1OuoysWy3k5WawrY139Nv9HiO7dvF9TPn4u519lEaWZbZ8M3nRHXuQmbKcY4n7cFqteITEMjoGbcQHtsJSZIw1tTw7dJ/EtIhhrQjh5HtdvqPGU/nvv1J/G09x5P2EhnXheqKcgCsVgu/fLGcm+5+kMETbiTjxFHeeuQBxs2eR/B5x0O4OJ1Ox5gxY9i3bx+FhYXA2fN1++23OzcwQRAAUVJucSrLSslOPUnvYSPxDQzi5MEkZFlGttv5/P8Wo1Qq6TX0OrJOHsdmtYIMJw/uJ2nLr3To3AVZlqksK7ugd61st1OUl4fFbMLbP4D+o8dz/ay59B8znuK8HLb/9AN2u538rLOJ32axkHDd9ajUagpzc/7/+mSZnNOn+OadN4iM64LW1ZXc06kUZGU4tlVTVcnJQ/upNdTQpd9ARs+4hTEz5uATGMyn/3jeUbK1Wszs+20Dh3dtp9fQEVw39WYi4+I5tGMLxxP30KlXXwyVFRiqq5BlmfLCQorycuk19DrUGg0RHeNw0+vITjt1maMqOoOdb9KkSXh6ejp+j4uLo0ePHk6MSBCEOqKk3MKcPnYYV50On8Bg4hMGcGDHFvqNGU9xbg7Hk/Zx93P/QOfhiSRBzrL3MNUayTx5HHdvH4ZNnkZNZQVJW36tt853Fj2Ei1ZLrbGWEVNuJjS6I2qNBlmWsdms2K02Du3chqGqklMHk1AoFPQfM4Hw2E6YjAZUarWj13V+ZjofPPsYo6bNZsCYCRiqq/APCSUvPY3i/DzyMtLo3Kcfrno9ei9vwmI6EhbTEVmWie3ei58+/TdFebkEhUcC4OLqxsCxk0gYORaVWo2xupr0Y0cIiY5lyIQplBcXsv3n/wFQXXV2JDG1i4Y3HrqTmQ88gs7Dk5pzJemLE53BztehQwe6du1KRkYGCoWCuXPn1pt0RBAE5xFJ2WnqSm/1E8bJg/sJDI/EVaejS79BfPTcIqwWM6WFBWjd3PA6N0BIUEQHJEnCYqql1lCDu6c3rjo9Wjcdrvr6w0XOe+RpwmM7kZ+dwTfv/JPkXdupKC3mt1XfUlZUiLG6ms59EjCbaikrKkTn4YWXnz8qtRqV2rPeug7t2EpodCxZp1JwcXXFbrPhFxzKqeSD+AWFkJeehizLeHh5o/fw5Mie3/nly0/JSTuJxWymvKQIQ1WlY31qjYbwmE6oNRoAao01mM0m3L18cHF1JSAsAs256nu1WoMs21EolNz59Mu46vRYzCZUGpfGPDFtnkqlYurUqWzbto3KykpmzZrl7JAEQThHVF87jcQfE7K5tpbTx46wceWXLJw0gjf/ei9FeTmk7E9C5+GJsboau92GbLc7qnSVKjUqlQqTyYgsy5iMBqwWS731+gQGERQRRY+Bw4ju0o31337O/z5+n1kPPMLLn3/PbYueO5fwZLQ6HRZTLRazqcGoR0yZzv2Ll3Do9y0c2rkNF1fXs0n58AE0Wi2B4ZGc2L8Pd28fZGS+e/9t+o4YzbPLvuTVb35E5+6B/IdHtRR1Y3bLoNa4IEkSZlMtIGOsqT5bTQ/4BYeiUmvIz0zHNygEGSgpKCAgNLwxTki7MnHiRFxdXRk2bBghISHODkcQhHNESbkFyTqVQm1NNU9/+Dkx3XsBsPSJh9m/dSPzHn0GvacXG1d+RcJ1Y/ht1TcAuOp0BIRFsGfjL5w6tJ8z2Zn12nfryLJMcX4upw4dJLJzPBaLBd+gEIzV1Rz8fQtm89le3rHde3Fw+2YO7NjCwNHjOZV8EG//QDrEn32MylXvQWB4BNPvfZgv33yVZz/+Ep2HJ5baWipKigmNjmHnLz+i9/RCrXbBaDTgHRCIm7sHm777GmNN9cUPgAR6D0/8Q8JIO3KIzJQEUg4kUl5cBICLqyu9h41k9X8+4M5nXmbDN5/j7uVFTNdujXcSrlF5eTnLli3jgw8+cHYol1VaWsrBgweJj493diiXpFAomDx5Mm+99ZazQxGEJieSsjP94SmjjJSjBIRF4BMUgovr2dGz+o+ZwHcfvM1tT7zAg6++xUcvPM53H7zDwOsnoFKrQZLoPmgYeRmnWfL3++jcux8u2v8/FKaExJOzJyMpFHj7+dN/zARmPvAIqz58m6fmTMYnMIjorj3Qe5ytpo7u0p1xt9zOj598xLdL/48+w0Zx2+PPIikUIElI0tmSbb/R4zj0+1a+e/8dxs2ZT3S3Hnj5B+AfEk54x07oPb3wCQxizIw5/GfxM1RXljNu9m34BATVOwQXjBAmSQwaP5mivBwW3zOXPsNHo9G6OJZb8NSLvPvEQu4b2Z+wmE78ZfESlGpNk52iP8tms2EwGIiPj2fq1KnODueSTpw4QWRkJK6urs4O5aLMZjP79u3jzJkzzg5FEJqF1FTz8DaGhIQEOTEx0dlhXJHS0lJuueUW+s+8/arHvj7/XNQlobrXJElqsjmTz3cl27nSWC613B/fOz85/9n9lJDO3txcxbjYdWNff/XK05w4ceLPfbgBJSUlfPDBB2RlZfHQQw9d8/raO5PJxNq1azlx4gRfffWVU2NJSkrivffeY+7cue1uoBWLxcKiRYvQarW8+uqrzg6n1ZMkKUmW5YSG3hMl5RakoXGlz3+tccadbsAfktmVbOdKY7nUchd776r38xoOT1Mc2yY7X22ALMvi+AhCA0RHL+FcMruy0mldZ7KKkmIqSkuwnuuEdZlPNbieqrJSRyeuhhhrqjEZjRd0DBP+HLPZTE1NzRWeq8ZjtVqpqKhocAx2kZAFoWGipNyKyLJMdUX52UeOvC8zJOIlqnLtdjuVpSUYa6pRKJXoPb3QuXtc0fZlWWbVv99l78Z1uOp0/GXxEsJjO13mkxcGYjbV8tz8Gfz1/94jMq7hjkbrv/kMn4AgZKefXQAAnZtJREFU+o8eh9ZNd9n4WpuioiIMBsMFr59fta9QKHBzc8PDwwO1Wn1V29mzZw+7d+9m8uTJdO7cGWiekurp06f5xz/+wdtvv42Hhwc1NTVUVp59HM7T0xM3N7fWM8WpIDQTkZSbWGNO/2u32fjly08w19Yy95GnLr3wJTZqNZv5dcUX7N7wM1aLlYnz7mDc7PlXFIO5tpZfvlzOP1etJzDs7AAgyDI0wQV+6p33N/o6W5JPP/2U/fv3Y7fbSUlJISoqCq1Wi9Fo5MyZM4SHh+Pm5kaHDh2YPn06Xbp0QVn3+Ng1au6Sanl5OatXr2br1q3YbDZGjhzJTTfdhJeXlyg1C8J5RFJuYn/2cmMxm8jPTKeipAS1iwvhMR1xc/egprKCkweTKMjKwGo2c3DHFtzcPYjt3hNkKC8poig3B6vFgl9wCL5BIajUalIOJqL38KKsqBCFQkFIhxi8/Py5+f6/MWr6bFZ9tPSKYzuxfx9FeTnYLFbyM9Ipysslukt33PTumGqNFOVmU1FSgourK8GRHXBz97jIBffcGNqyTGlhAVXlZWjd3AiKiELv6UV1RTl56WkYqqsIjorGLyjEMVPW6aPJVJaVgCTh6qYjLKYjOg9P7HY7VWWl5GemY7Na8Q0Kxi84BFUL6pn9R4899hhwtnp51KhR/OMf/6Bz584kJSXx/vvv8/rrr6NSqXj//ff5/fffiYyMpKSkhDNnzmC329FqtURHR+Pp6YkkSWeHSc3Pp6Dg7NjnISEhBAXV7+1uNBo5ffo0QUFBZGVl0bFjR3Q6naN0brVaSU5OpmPHjpw+fZrq6mpUKhUBAQGEhITg4nJ2oJYTJ06gVqupqKigtrYWvV5P9+7dqa6uJjU1FavV6qgFkGWZQ4cOcfz4ce655x7UajXvv/8+3bp1o2/fvo12oyEIbYFIyk3lskXkhhdIP3aEX1d+hdlUi5venZ6Dh9N/zHjKS4rZsvo7slNTsNvsbP5hBcFR0UR36U5pYQHbf/ofuWknQZJw9/Rm7Jz5BEd2YNlLTxHRsTOSQoHJaCAgJJzZCx9D4/Lnh1Xcu3Ed+ZnpWCwWNv+wAp2HJwGh4ajUGk7s38eudT9hNtWi9/Bk0PjJdOzR6yJJ8ex+Wy1m9m5ah8loxG630a3/YEZMuZny4iKStm5i94afGTPjFkbffAtuendA5vDu7WQcP4qMDLJExx69mDT/TgzVVexc9yPJu3eg8/AkomNnBoyZQEBY6x1YRJIktFoter0eq9WKzWYjOTmZvXv3YjabkWWZjh07ctdddwGQnZ3N559/jsFgQKPR0KNHD8aNG+dYn9FoZNu2bezcuZNp06axdOlS7r77bgYMGOC4eSouLmbx4sW8/fbbbNmyhdzcXAC8vb2ZNGkS3bt3R5IkPvzwQxQKBa6urlitViIjI+natSurV69m37596PV6FAqFI9FnZ2fj5eVFt27d0Ol0+Pn5cerUKbp164abm1vzH1xBaKFEUm4qFysgXmbGqJSDSSiVSm792xOoVGoyT559TCcsOpaFbyzl+4/+Va/62mq1cDxxDwWZ6dx451/w8vXnm6X/JPXwAfyCzo7U5OLqyty/P01pYQGvPbCAwRNvJLZbzz+9S/MXPUtxXi7H9u3ib//3vmMXCnOySNr8K/6hYYyZcQvGmmrsdjt2++U7j3n5+TNp3l0c3LGFvZvW0W3AEMJiOjJn4dnhRdX1bh4kpt55P7IsY7fbSE0+yPtPPcJ1U2+mpqqStCOH6T1sJIPG30BBViaSovVWixYWFrJq1SrsdjtZWVmMHTsWnU7HjTfeyI033ojNZiMtLY3777+fGTNm4OnpyY8//kh1dTULFy7E1dWVM2fOODp31dbWsmXLFiorK7nhhhvo0qULPXv25Pjx4yQkJLBy5UrGjBlDeno6ISEhREREsHDhwrMTgZSX89///pcDBw7QuXNnNOeGRFWr1dxzzz2Eh4cjSRJlZWV8/vnnvPzyy3Tq1ImVK1eSnp5ObW0tlZWVuLq6cvr0abRaLe7u7lRUVNTrfCZ6ZAuCSMrN6wquN9FdulNRUsyeDT/j5uFJfJ8Bl7xQmYxGCrIyyMtIY9f6n1BICgpzstG5e9BnxGgAeg8bhateT6g+ltAOMaQfO3JVSfli+1JVUU5VeRlDJkzB09cPT1+/K1qFSq2h+6Dh6D29CI3uiHbndorycgiKiLroZzJTjnF0326qK8oxm2oxGqopLynG08eX2O49OZOTxc5ffiQ4Mrre9JatjdVqpaqqCqVSiaurKxaLBbPZTH5+PomJiRQXF2O1WqmpqaGwsBB3d3f279/PrFmzCA4OBsDLy8uxvpycHAoLC+nbty/h4eGo1Wq6d+/OunXryM3N5ZNPPsHb25ucnBy6dft/7N13eFRV/sfx95lMeu89IUAIBAgtdBCQovSiIgiCri72tqu7uq7+bGvZXTsWLGsvKAgiKkWq9N4CoZMG6b1NptzfHyEjJZRAkpkk39fz8JBMOfc7N8l85px77rmdqKysZO/evezfv5+ysjKSk5Pp0KEDlZWV1lAeMGAAoaGh1h5xeno6ZrOZxMTq0y979+7N2rVrz3pdVVVV1sefSwJZCDklqnFo531xQe269qDv9WMIDI+kuCCf7999zXqJRjj/jUsphYNej7uXN74BwXj7B9Br6HV0HTDYeh3mM98ALRYLuit689Mu+K06vY63RbuCU5esp8toaFy8p1ReWsK3b/2H0sICPH188fYLQLNomE1G3L286T18FJ169UMpxdYVSzm8e0fd67ETYWFhzJw5k3vuuYcuXbqwb98+UlNT+fbbb0lLS8PT0xN/f3/r8DBUz9Q2m821tufp6cnIkSMpKipi9+7dGAwGOnbsyOHDh9m0aRPjx49n06ZNJCcnEx8fT1paGnPnzq2+DrafH66urpjN5rNOb6o5Fl3j3LCtma3v7OyMp6cnlZWVxMfH06lTJ8rKyvDy8kKvb3r9AvnwIBqShHJjuMSQ9ZkObN+MyWikS/9BRLZtx/5tm9E0y+nrVyhcPTzJOZmB0VAJVA9Nh0TF4BccQly3RAaMnkBoq9a4untWL40JbP1tCeUlxaQeOkD60UPWdbUvpKykmLlv/5eNS3/GUFlRe+1nfOvp64u3vz+7N6ylICebjONHSTt80Prc4vw83nnyL+zbvOGsN21jVRXb16yguCCf1EPJGMor/pjRXYvykhIObN9Kj8HDGTJxMt7+AdYPAqVFhRzcuY2wmDZ0SOxDWUkRuZmnLrm/beVyVi2rHqa3YDAYMBgMlJeXs3//fjp37sy4ceMIDg62Bp9Op6NXr14sW7aM9PR08vPz2bdvHwUFBQAEBQXRr18/hgwZwtKlS0lNTcXf3x9HR0eWL1/OxIkTyczM5OjRo7Rv357c3FyysrIYPnw4ffr0wc3N7ZI11/TA169fT2FhIb///jtQPcwdERFBYWEh+/fvZ/fu3eTm5tKmTRvrxDF7XlnwTDLELhpa0/uY2sy5e3mz/pdFzH//TTSLxqjpt6NzqP4x6XQ6OvXux5blv/LiPTOJaB3LjL89TXxibwpzs5n//puYjFWERscwZOLN1qsvObq48NbfH8RsMnLtpCkER0ax+sd5LJ/7BdkZ6ezZ+DvpRw9z3dSZRLRui9FgYN+WDegc9HTpf80la/YNDKL3sJGsWTSP959+DN/AYIZMmmztBRkqK9i6YikdevQmXuvzx7rcqnqBkNmPP4STqyu9h43CLziE3xcvYNWC7zh5/AiOzi6s//VHht14C4nXjqDf9WP46rUX0Ts5ERPf6fQEMHDQ66koK+Wr116irLiI0OgY2nTsfLrC+jwxrX5c6o394MGDPPbYY7i6uuLm5saIESOIjY1lwIABzJ8/n/nz59OxY0c8PDxQSlkv2lBYWMgrr7yCk5MTAwcOpE2bNtbt6fV6rrnmGpKSkpg/fz73338/HTt2JC0tjcDAQGJiYiguLsbf3x+DwUB4eDj/+te/CAwMpLS09LyZ3Ofy9PTkrrvu4ssvv8TJyYmgoCAcHBxQSpGQkMDx48f58MMPsVgs9O7dm3bt2lnPU24KQVc9T8Iis8VFg5K1r+tJfax9DdW9x8LcbMpKinFw0OMXHIKbh6f1lBWzyUR2eiqGygqcXKpPPQIoKy6iKD8Xs8mEm4eX9XrIj91wHVMeeIyA0HB0Djr8gkJw8/SiKC+Hguzs6o0qcHF1wy8kFBdXN8wmE1npqbi6e+Dt528Nd03TMBmNZBw7Qqv28efUbaAoL5eykhKcnJ3xDQrG2aX6ohomYxXpx47gHxyKh7eP9fSdjGNHcPPwpLSo0PocFzd3CnOzyc/Orj7/+TTfoOqh+eL8XArzcgHw8vWnvKS4ega4kxPlJSUU5mZjNpnw8PbB29//kqdE1ax9/c2L/+TAgQNX9DM705lrXz/44IOX9RyLxcLhw4eJiorCxcWF8vJyUlJSrGHr6uqKv78/bm5uFBUVkZubi9lsxtfXl+LiYsLDw3F1dcVisZCfn09hYSEA/v7++Pj4UFZWRllZGd7e3ri4uJCbm0tFRQWhoaHk5eVRVVVFREQEubm5lJeXEx0djclkIicnh+LiYpydndHr9Tg7O+Pr64teryc9PR1vb2/rhwKo/v2oqKjg1KlTWCwWPD09reGu0+koKioiLy8PTdMIDAzEy8vrkn8nlZWVdrP29ZYtW5g9ezZ33303/fr1s2ktjU3Wvq5fsva1XaoJnLN7CI5OjgSGhRNIxHnPUEqhd3QkLKbNefd5+vji6eNb65bcPDyIbtfeOpwN4BsYjG9gcK2Pd9Dra50kpZTC0cnpvECurtuZgNBwAkLPeHmnX5re0YlWcWc/R6fTWVcC8w8JPes+n4AgfAKCaq3t3Pv8gv54DR7e3nh4e9f6PHum0+mIi4uzfu/u7k58/Pn7GKonb505gSsoKOis1b8CAgIICDh7op2HhwceHh7W78+8Pzi4ev/VBGUNvV5PaGioddLYuSIiav/9dHNzs/bOz+Xr64uvb+2/o02B9JRFY5BjyjajqH1I9UK3N7SzR0zKS0t4+4mHOXXi2GUd7zu0ewfz3n+TnIz06hvqYS5ZS3QlI1f1MfR7JW3Y8yhbQ6gJ5aY4OU00HRLKzdyLXy+iffdel7EM5tn3b1z2M8HhUbh7+1hv+/LVf3FLtzZMT2zHl6++eNbjgyOi0Osd2b3h7FNg6sT+Dys2uKZwbLVGU6q1PtRMqpOesmhIEsq2VFtHow6dj8t5qJOLC7rTk20ul8lkYtfaVbTp3AU3T0/rc2955Ak+35LMNWNvwGQ0nvUcLz9/fAODOHniKMUF+XWoUDSkltabbUgyfC0ag4SyLV1o9LquT6+P990z2shMOY7JZMQvKAQHhz+G6nQ63UUD3i84BLPJTHZ62rkV1mt9F7zvKvZDcw2vhujNXu2+aqr7uuYUNQll0ZDk4EhzcLXvu+ecMZSXeRI3Dy+cnJ0v+01dKYW7lzeaZqm+YER9u1gZ6jIec6nm6zG8NE3DaDTWellGUTcGg4Gqqipbl2H9mRqNRlxdXW1djmjGJJTF6TD7I5mrDJU46B3qfFpXzZWczOcMbdvjecINpSbcly1bxrp162xczYUZjUZKS0vt/tKJNWE4YsQIW5eCyWTCYrFIKIsGJaHcXNU5B/94sKu7B8Yq4wWXbLwQo8EAVB/HvlDbzZ2XlxczZ85k+PDhti7lovbs2cMrr7zCnDlzcHd3t3U5l3TuaV6NzWKxUF5ebl2LXIiGIqHcwOqzj1intuqy0XMaDgqPxFBeRkVZ2eUtK6hpaEBJYQFK6S54jnFLoNfriYqKIioqytalXFRJSQkBAQH06tULT09PW5dj90wmE6WlpTg5OeFy3odOIeqPTPRqYPXZR2yQ/mYtSe8fGo53QCBZaSnW3i9A7qkMdq9fQ+6pk+RkpLF303oKc7NBKUxGI3mZp3B1dycw7PyFJc7f6FXUK65aeXm5XMe4DsxmM+Xl5Tg7O1uvkiVEQ5BQbiiXDI/LmU58pW3XQS1J7+DgQO9h13Nkz05Kiwqts2Uzjh9l9cLvcXFzQ+/kxLpfFpJ9erGQvKxTFORm07pjAm6X7HmdsdE6vRatlnolpa9EWVkZbm5udn082Z6YTCYqKytln4kGJ8PXDaW2v9uzeqWXM5343OdcxlMvx2WMg8cn9iH31MmzJnt16XcNXfpdc8E2WsXFE9et1uVcL+wKj3tfYQPiNAnlujGbzRgMhiZx/F00bRLKjelK3v8a4j3zMtp0cnFh2E23XHYbIZHRhERe+LKLwr6Ul5efdz1kcWE1p7h5eXnZuhTRzMnwdWPQzvui7k+ur1FaTbvCtrSLfiuaFukp143RaKSsrKxJX1BDNA0Syo3hsoasL/HkK37vPCc9lbrCttRFv61TDXXVRFeAsmc1oSwuT8153RLKoqFJKDd79tATusoarro3J6F+rprZ19JTvjxVVVWUlZWdddlMIRqChLJoASR4ziXD13Vz5gpoQjQkmehVzwqyMslMOY5S8nmnKdA0C/lZmbYuo9FJKNeNHFMWjUVCuZ7UvLl9/K+nqi+CLm92TYOmYTabiY681IInzYssHnL5NE3DYDBQWVkpPWXR4CSU64mPjw8LFy6s83rRTV1paSmff/4527ZtY86cOTg6Otq6pCtS14tvNHXSU758ZrOZvLw89Hq99JRFg5NQridKqRa5Jq6bmxt9+vRhzZo1pKamkpCQYOuSxGUoLy/H1dVVQvkyGI1GsrKy8PX1ldEF0eBaVvdANIjg4GAiIiLYunVrk72AfUuiaZrMvq4Do9FIXl4eISEhsr9Eg5NQFldFKUVwcDAxMTHs2LHDLi5ILy7OYrFY13EWl2YymcjNzSU4ONjWpYgWQEJZXDUvLy86d+5MXl4eycnJti5HXEJ5eTkWiwUPDw/p+V2Gmp5yaGiorUsRLYCEsrhqSimio6Px9fVl9+7dMoRt52pCWda+vjxGo1F6yqLRSCiLehEREUFYWBj79u2jrKzM1uWIi6ioqADA1dXVxpXYP03TpKcsGpWEsqgXXl5edOzYkby8PI4cOWLrcsRFlJeX4+TkhIODg/SUL0NpaSmlpaUSyqJRSCiLeqGUIjY2Fr1ez4EDB7BYLLYuSVxAzelQLe3c7CthMplITU3Fzc2NgIAAW5cjWgD5qxT1plWrVgQHB5OcnExRUZGtyxEXIOcoXz6z2cyJEyeIjIzE2dnZ1uWIFkBCWdQbT09POnXqRGZmJqmpqbYuR1yAhPLlq+kpt27d2taliBZCQlnUq86dO1NZWcmRI0da3JKjTUVFRQUuLi4yfH0ZTCYTKSkpEsqi0chfpahXMTExhIaGcuDAAQoLC21djqiF9JQvn4SyaGwSyqJeubi40K1bN44fP86pU6dsXY6oRUVFhYTyZdA0jdLSUnJzc4mJibF1OaKFkFAW9a5Hjx7k5eVx4sQJTCaTrcsR55DZ15fHYrFw4sQJnJ2dCQsLs3U5ooWQv0pR71q1akWrVq3YtWuXzMK2Q+Xl5bi4uEhP+RIsFgsHDx6kdevWTfaSpKLpkVAW9U6v19OnTx/2799PXl6ercsR55BQvjwWi4Xk5GTi4+NlX4lGI6EsGkT//v05efIkKSkpMoRtZ2T29eWxWCwcOHCADh062LoU0YLIX6VoEBEREcTHx7Nu3TpKS0ttXY44g8y+vjRN0zCbzRw+fJj4+HhblyNaEAll0SCUUgwbNozNmzdTVFQkV46yIzU9ZQnli0tLS6O0tJS4uDhblyJaEAll0WCuvfZaCgoKSEpKkoVE7IgcU740TdPYt28fUVFReHl52boc0YJIKIsG4+Pjw8CBA1m2bBkVFRXSW7YTNecpyzHlC6sJ5c6dO9u6FNHCyF+laFBjxoxh7dq1FBQU2LoUQXXYSE/50jRNY+/evRLKotFJKIsG1bdvX9zd3Vm7dq0MYdsBi8VCZWWlTPS6CE3TMJlM7N69m+7du9u6HNHCSCiLBuXs7MzYsWNZuHAhBoNBhrBtzGAwYDKZcHNzk+Hri0hLS6OgoEB6yqLRyV+laHDjxo1jz549nDhxwtaltHgVFRWYzWbc3d2lp3wRmzdvpk2bNvj5+dm6FNHC6G1dgGj+4uLi6NSpEwsWLKB9+/Y4ODjYuqQWo6ysjAMHDnDq1CmcnZ0pLS2luLiY48eP4+LigqOjI3q9nqioKJydnW1drt3YvHkzvXv3tnUZogWSUBYNTinFlClTePnll3nwwQfx9PSUXlojMRgM/Pzzz3z11VfWVbxOnjzJ7Nmz8fPzw8HBAXd3d9577z0J5TNs3ryZ+++/39ZliBZIQlk0iqFDh/LUU0+xceNGRowYYetyWgwvLy+8vLw4efIkZWVlAOh0OjZu3IhSCp1Ox9ChQ3FxcbFxpfajsLCQgwcP0rNnT1uXIlogOaYsGoW/vz8jR47kyy+/tHUpLYper6d9+/bExsZaJ3ZZLBbMZjMmkwmz2cy9994rhxTOsGfPHjw8PGjdurWtSxEtkISyaDQzZ85k6dKlnDp1ytaltCjt27enXbt2tc58j4uLY8CAATaoyn6tW7eOnj17ynC+sAkJZdFoOnXqROvWrVm4cKGtS2lRIiIiar0msKOjI3fccYcc4z/H77//zsCBA21dhmihJJRFo3F0dOSWW27hk08+sXUpLYper6djx45ERkaeFb6urq7cdNNNcr7yGaqqqtiyZYuMHgibkb9G0aimTJnCoUOH2L17t61LaTGUUiQkJNC6dWtrKDs5OXHzzTcTEBBg4+rsy65du9A0ja5du9q6FNFCSSiLRhUYGMjw4cP55ptvbF1KixIXF0dYWJj1uLLRaGTGjBk4OTnJ0PUZNm7cSM+ePXFycrJ1KaKFklAWjW7atGl8++23GI1GW5fSYjg5OREfH4+/vz9KKXr27ElcXJwMXZ9B0zQ2btxI3759bV2KaMHkL1I0upEjR6JpGqtWrbJ1KS2GUopevXrRqlUrnJycmDFjBh4eHtJLPoOmaWzatIn+/fvbuhTRgsniIaJRKaVwdHRkwoQJzJ07l+HDhzeZYDCZTFgsliZ7UY34+Hh8fHxwcXGhf//+6HQ6DAaDrcuqM51Oh16vr/ffm4MHD1JUVESvXr3qtV0h6kJCWTQ6nU7H5MmTmTp1Knl5eU1mstHChQvZu3dvkw1lgOLiYuLi4vjyyy9xc3OzdTl1ZrFYaN++PSNGjCAoKKhe2161ahU9evSQU8SETUkoi0anlKJ79+5ERkayePFibrvtNluXdFl+/vln9u/fT0xMTJNdAcvLywsXFxcyMjKa5PHklJQUTpw4QY8ePeo1lDVNY/Xq1QwePFgCWdiUhLKwCb1ez4033sj8+fOZOnVqk1k9adCgQdx8881Npt5zVVVVodfrm2QgA/z666+kpKTUe7tlZWVs376dBx98UEJZ2FTT/MsUTZ5er2f06NGkpKSwb98+W5fTYjg5OTXZQG5I27dvx9HRkc6dO9u6FNHCSU9Z2IRSitDQUPr27csPP/xA9+7dm3QPRdO0K65f0zQqKytZtWoVDg4OhIeH06lTp3qu8A9Hjx7FZDLRpk0b9PrGews4evQomqbZ5fD/2rVr6d27Ny4uLk3691A0fRLKwmZcXFwYPXo0r776KllZWYSEhNi6pCtW80ZeXl7Ozp07MRgM9OzZEzc3N7Zt20ZBQQF9+vTBx8en1udXVVWxfft20tPTiYyMtIayxWJh165dHDhwAKUUrq6uxMXF0a5duysO1FWrVlFeXs6f/vQnPDw8rqiNK92u2WxmxowZODg4sGvXLlJSUnB1daVXr14EBgbaJBDNZjPr16/nxhtvtLsPC6LlkXEsYTN6vZ6uXbvi5eXF6tWrbV1OvSguLuaXX37hq6++4ujRo+Tk5LBw4UK+/PJLcnJyan2OUgpvb28ee+wxhg0bdtZ9FouFlStXsmrVKnJzczlx4gSffPIJGRkZWCyWxnhJDWLbtm0sXLiQlJQU1q1bx2effYbZbLbJzPaMjAxSU1Pp3bu3DO0Lm5OesrAZpRS+vr5cc801/Prrr0yYMAEXFxdbl3XVPD09ycvL48iRI5SUlKCUsvaQv/32W3r06EGbNm3Q6XS8/vrrzJgxA39//4u22bNnT6ZPn05VVRX3338/hw4dIjw8nLlz53Ls2DGUUoSEhHDdddcRHh4OQGFhIZs3b+bgwYP4+voyaNAgIiMjz2r31KlTzJ8/n4kTJ7JgwQKmTp1qrUXTNI4cOUJycjLh4eGsX7+e4uJivLy8GDRoEB06dLBeeerdd98lLi6O/fv3U1paSrt27Rg3bhwHDx5kzZo1uLm5cfz4caKiorBYLCxZsoSoqChuuukmMjMzeeihh5g0aZJNrmG8efNmwsLCCAsLk6FrYXPysVDYlJubG/379ycrK6tJTvg6s2dX87WzszM+Pj4UFxeTnJyMh4eHNZTXrl3LyZMnrb3chQsXUlxcfNnbKi0txWAw4ObmhlKKdu3akZiYSNeuXSkvL+eDDz7AYrFQVlbG6tWr+f333wkLC8PZ2ZnMzEwqKyut7WVmZjJ79mw8PDxwc3Nj165dJCcnW+83Go0cOnSIffv2ERgYSEJCgvU83q+++orc3Fzra/7555/5/vvvCQkJsX7oyMnJYe7cuUD1FcLS0tKwWCwUFxeTnp5Op06d8PHxoUOHDnh5eXHgwIEr/0FchQ0bNpCYmCjHk4VdkJ6ysCkHBweio6Np06YNK1asIDEx0dYl1cmZb+I1X9f0WktKSsjMzCQmJoaMjIyr2s7ChQvZtGkTlZWVxMbG0q5dO3Q6HT169EDTNDRNIz09nYcffpjc3FwqKirYvXs3PXv2ZMSIEdbVyGp6tllZWfznP/8hMTGRiRMn4uXlRUJCAklJScTGxjJ79mymTZtGQUEB4eHhREZGEhkZiaZplJSU8Oijj5Kenk5QUJD1OGzfvn0ZM2YMLi4umM1mtmzZQlZWFg899BAAx44dQ9M0ysvLMRqNuLi48MYbbzBq1Ch8fHwoKCi4qn10JSorK9mxYwcPPfSQXIRC2AUJZWFz/v7+9O3bl4ULF5KVlUVwcLCtS7pqISEhlJWV4erqSkRExFWHcv/+/Rk5ciQlJSXMnTuXXbt2MWTIEDZs2MAPP/xAVlYWVVVVFBUVUVpaSlVVFcXFxbRu3RpXV9fz2tu1axdeXl6cOnUKd3d3ALp06cK8efPo2LEjqampbNu2jcLCQjp16kRKSgqLFi1i586dlJWVkZqaSlFRERaLxRrKXbp0sW7LbDZTUFCAh4cH/v7+aJpGcHCw9fFKKZRS3HDDDQQGBmI0Gq0fGBpTcnIyBoOBjh07NupMdCEuRIavhc05OzvToUMHnJ2d2bp1q63LqRfOzs5MnjyZBx54gMDAQOvtjo6OmEwmoHot7bKyMut9Sin0en2tE7gCAwNp3749AwYMID4+nq1bt5KTk8PLL7/M6NGjeemll/jXv/6Fs7MzFosFvV6Pk5PTWe2fqW/fvjz11FPs2rWLdevWAdCpUycOHDhAcnIyo0aNsoZycHAwK1asoLS0lEceeYT//Oc/tGvX7rw2zww1nU6Hi4sLFRUVaJqG0WjEYDCgaRo+Pj64ubmRnZ1tHVo/efLkece7G8O2bdto27Yt3t7eMnQt7IKEsrA5pRQRERFER0ezbt06a2g1ZUopPD098fX1Pes0m+DgYLZu3UpJSQnff/89ZrPZep+DgwP+/v6kpKRQVFR0XpuaplFQUMChQ4fw9PSksrKSoqIiWrVqRUBAAGvXrrUeM/b19SUsLIxff/2VtLQ0kpKS2LFjB6WlpUD1ZLTIyEjuvfde/vOf/5CXl4enpyfe3t5s2rSJnj17kp+fT2lpKcHBwZSWluLu7k5YWBjJyclkZ2dfdPa3Xq8nKiqKyspKfv/9d/bt28fOnTuB6g8sCQkJrFmzhoyMDObPn4+joyNdunSpl31fF9u3byc+Pr5JrgMumicJZWEXAgMD6dGjBydOnODw4cO2LqfB3HjjjSQlJTFp0iSKiorw9PS03ufg4ECHDh0IDAxk1KhRPPXUU1RVVQHw/vvvc+2113LLLbfg4eHB5MmTiY6OZvr06dx9991MmDABg8FgvbiHn58f48ePx9HRkVmzZvH222+jlLKGT83w8ZAhQ+jevTsvvvgiOp2OLl26EBISgpeXFx06dMDf3x9fX1/69evHvn37uOmmm1i/fj3e3t6XHO6NiIhgwoQJvPrqq7zzzjs4OTlZP6BMmTIFV1dXbr/9dr799luefvrpWoOxIU+RysvL48CBA3Tt2rXWIX4hbEHZ8xVvEhMTtW3bttm6DNEINE1jz549zJ49m0GDBjFt2jS7G068/fbbCQwMvOja1zWTrgDrOa81t9W8npoeplIKTdPQ6XTW+2oea7FYUEqh0+ms39c8p+bfuds7t72a+2qr58y2ai5HWbOtM9uqed65NdQ8vuZ7s9l81us4s7Yz32MuVPuZbV1MzdrX9913Hx06dLjoYy9l2bJlvPPOO7z88su0b9/e7n7f7InRaORvf/sbLi4uvPTSS7Yup8lTSm3XNK3WWa0ys0HYBaUUbdq0ITo6mu3btzNu3Di8vLxsXVadnTkD+8zbzvz+YqtG1Tz2zEUszv2+tu1drK1LPefcbV3o65q6zwzkGrW9pgttv+YDii1DUNM06/FkHx8fCWRhN2T4WtgNDw8PunTpQkFBAUlJSU32usXN/Q3+al/fmb1wW6msrGTPnj20b98eb29vm9YixJkklIVd6dixI05OTuzdu/esSVCiebH1B5f9+/dTUVFB+/bt5XiysCsSysKuREdHEx8fT1JS0lWf2ysuzNY9VVvSNI29e/cSFhZGcHCwzT8gCHEmCWVhVxwcHEhMTKSgoICDBw826Ysu2LOWHERGo5F9+/YRFRV11jnkQtgDmegl7E5CQgJeXl7s2bOH3r1729Uxv6KiIjIyMmyy+pSoPo3pas9jP3HiBFlZWfTr18+ufreEAAllYYe8vLzo06cPK1euJD09HS8vL7vo2fn5+bF//34WLlzY7C7xl52djZeXl91fpSszM5PWrVtf1Yei5ORkXF1diYqKanY/R9H0SSgLu9SvXz8WLVrEgQMHaNu27QXPC25MEyZMoHPnzs1yAtrcuXM5deoUf/7zny96ypY9iI6Oti6SUldGo5EDBw7g6+trvcSlEPZEQlnYpejoaDp27MjmzZsZMGAAISEhti6JgQMHMnDgQFuXUe80TaN169Y88MADtGvXjsGDB9u6pAaTnZ1Neno68fHxVxzsQjQkGbsRdsnBwYHhw4eTlJREamqqTPhqQEopBg4cyI033shLL71Ebm6urUtqMMeOHcNgMBAbGyvzAoRdklAWdqtr1674+/uzYcMGysvLbV1Os+bg4MCsWbMwmUx89NFHti6nQZjNZo4cOYKDgwOtW7e2dTlC1EpCWdgtNzc3rrvuOlasWEFhYWGLPre2oSmlCAkJ4bHHHuO7775jx44dti6p3uXn53PkyBHCwsLkeLKwWxLKwq5df/31FBcXs3Xr1mZxSUd7ppSiX79+DBo0iFdffdV6GcjmIjU1lezsbOuqcULYIwllYdf8/f0ZOnQoP/74IxUVFbYup1lTSuHh4cGsWbNIS0tj3rx5ti6p3lgsFk6cOEF5eTmdO3e2i1PshKiNhLKwa0opJk+ezIYNGzh27JhM+GpgSilatWrF7bffzkcffURaWpqtS6oX+fn57Nu3j7CwMGJiYmxdjhAXJKEs7F67du1ITExk/vz5MoTdwJRSuLi4MHToUMLDw/nggw+axbH8rKwsUlJS6NGjh92fhy1aNgllYfd0Oh3Tp0/nu+++o7y8vFmEhD1TShEWFsatt97K+vXr2bRpU5Pe5xaLhZMnT5KdnU337t1l6FrYNQll0SSMGDECNzc3Fi1a1KQDoqlwcHCga9eu9OjRgy+++KJJn5JWXFzM9u3bCQsLo23btrYuR4iLklAWTYJer+fWW2/l008/xWQySTA3MKUUwcHBTJgwgaysLJYsWdIkj+drmkZ+fj4HDhygX79+sta1sHvyGyqajOnTp7Nv375meQ6tPVJKkZCQQJ8+fVi8eDFpaWlN7sOQpmlkZmaSlpZG3759bV2OEJckoSyajKCgICZOnMicOXOaXDg0VZ6engwfPhyTycTSpUupqqqydUl1Ul5ezsaNGwkPDyc2NtbW5QhxSRLKokm54447+PXXX0lPT7d1KS1Gp06dGDRoEL///jt79uxpMsPYmqZRUlLCli1bGDJkiAxdiyZBfktFk9KzZ08SEhL43//+Z+tSWgy9Xm+daLd8+XIKCgpsXdJlsVgspKWlkZKSwjXXXGPrcoS4LBLKoklRSnHvvffyzTffkJeXZ+tyWoyoqCgmTJjA7t272bp1a5MYxq6qqmLVqlW0adNGLkAhmgwJZdHk1PTafvrpJ1uX0qIMHDiQ2NhYFi9ezKlTp+z6uL6maZSWlrJixQpGjx4tQ9eiyZDfVNHkuLq68uc//5mPPvqIsrIyW5fTYnh4eDB16lQyMzNZtWqVXa9FbrFY2LNnD7m5uQwZMsTW5Qhx2SSURZM0adIkCgoKWLVqla1LaVE6dOjA2LFjmT9/PocPH7bbSV8mk4mFCxfSv39/QkJCbF2OEJdNQlk0OUopAgMDufHGG5kzZ46sh92IdDod48aNIyAggK+//touRyo0TaOgoIDffvuNyZMny7KaokmRUBZNkk6n49Zbb+Xo0aNs2bLF1uW0KL6+vtx3332sWbOGdevW2d2HIk3TWLZsGb6+vvTo0cPW5QhRJxLKosmKiIhg6NChfPbZZ7YupcXp0qUL48eP57XXXqOkpMSuJn1pmsbXX3/NhAkTcHNzs3U5QtSJhLJokpRSODk5MWPGDH7//XcOHTpk65JaFEdHR+6++26MRiMffvih3Rxb1jSNo0ePsm3bNiZPnmzrcoSoMwll0WQppWjbti3dunXj+++/t3U5LY6Pjw9PP/00b775JocOHbKb3vK3335Lnz59iIqKsnUpQtSZhLJospRSeHh4cOONN/Ljjz/KYiKNTCnFoEGDGDlyJI899pitywH+GLq+5ZZb5Nxk0STJb61o0vR6PT179iQgIIBffvnFbnprLYWDgwP/+te/2LZtG/Pnz0fTNOs/i8WC0Whs0O1///33LFu2jMrKSjRNY/369WRmZjJ27NgG3a4QDUVCWTRpSil8fHwYNmwYP/74Y4OHgDhfUFAQzz33HI888giFhYVYLBYyMjJ47LHHeOaZZxp024sXL2bcuHHExMTw9NNP8/777zNx4kTc3d3lA5pokiSURZPn7u7OkCFDKCwsZMOGDbYup8VRSnHnnXfSunVrnn/+eb7++msSExOZPXs2O3fubNBJYBUVFdZrJr/66qt888037Nmzh2+//ZaioiIMBoOEs2hSJJRFk6eUIiwsjO7du/Pzzz9jNpttXVKLomkalZWV3HXXXcybN4/bb7+drKwsqqqqyM/P59SpUw227aqqKmvo1wT0jh07uO2224iLi+PFF1+kqKiowbYvRH2TUBbNgr+/P4MGDeLgwYMcPHjQ1uW0GJWVlRw/fpyXX36ZP/3pT5w6dcr6oUgpRUVFRYP+PKqqqmrtCRuNRiorK8nNzcXd3b3Bti9EfZNQFs2CXq+nXbt2hIeHs3LlSuktN5KkpCTGjRvHf/7zHwwGw1mre2maRm5uLvv372+w7V9oeNrFxYURI0bwyiuv4Ojo2GDbF6K+SSiLZiM8PJyuXbuyY8cOTp48aetyWoTg4GC6d++Oj49Pracg5efns3///gY7rmw0Gs8LZZ1Ox4ABA3jrrbfw8PBokO0K0VAklEWz4ebmRmJiIpqmsW7dOuktN4KIiAjeffdd7r33Xjp27HjexR8MBgPp6emUlpY2yPbPDWWlFP379+ett94iNDS0QbYpREOSUBbNStu2bYmNjWXbtm3k5OTYupwWwcPDgyeffJJnn32WIUOGnBfMRUVFnDhxot63a7FYzhou1+l09OrVi1dffZUOHTrU+/aEaAwSyqJZ8fb2pk+fPuTm5rJr1y45HaaR6HQ6JkyYwEsvvcStt96Ki4uLdTg7JyeHw4cP1/s2jUajdVjcwcGB9u3b869//YvExMR635YQjUVCWTQ7CQkJBAcHs3XrVgoKCmxdTouhlKJnz548++yzPPbYYwQGBqLT6cjMzGyQC4bUnA6l0+kIDw+39tSFaMoklEWz4+/vT58+fTh06FCD9NDEhSmliI6O5uGHH+b555+nXbt2lJSUkJKSQlVVVb1uy2AwYLFY8PLy4plnnmHs2LEopc4bPheiKdHbugAh6ptSin79+vHLL7+wdetWOnbs2GRn4R47dox58+dz+PARmtZAvEZ5WTkOekecnV1Y/ttv3PnnP+Pk7FJvW6goLycjIwMvb2+W/fYb6zduBJp2ICtg5PXXMX78eBwcHGxdjrABCWXRLAUFBdGnTx82bNjAsGHDaN++va1LuiLZ2dls2b4Dg6MbQZHRti6nTlwDoGdULO2LCshKTaFQOREYEF5v7ZuLi+gxbBTBEZE4Ojk3sQ8ttdu2ajmhoSGMHTtWQrmFklAWzZJOp+O6667jp59+YufOncTExODs7GzrsurMYrHg7OZOXO9BxPfsa+tyrohmsVBlqEQphZOLaz22a8ZiseCg19PUe8g10o8dllP5WjgJZdFshYeH07t3b9asWcPAgQOJiIiwdUlXRKfT4ezigmsTXi7S9fThg0Y93qtxdlbX9XsbaE4fMMSVkYleotnS6XSMGzeOpKQkjh49Kj0QW9GwzQSsczdX1+8bS3MYdxf1RkJZNGsdO3akS5cu/PzzzxQXFzf585brVL123he1NqBpmvXfhR704//eY957b2CoKD/vvjsGdCErPfWsds5q8wrDrjg/jw+fe4LVC7+/wCMa5mepaRq3JsaRn5V54ddUbzVc+f4RzZOEsmjWlFLcdNNNrFy5kuzsbFuXc9Xq9P6tzvui1gb2bVrP5I5RLPzo3dofpIFm0S64frXFbAYNqioquLFDODd3imZ6j1j+OW0iO9auuKoPQhaL5SLPr8c0O2cTZrMJDdA0C8eT9/Hk1HH8qV9n3vvno+RkpJ1R09XWIIksziahLJq9AQMGEBUVxYIFC6isrGzyveX6tm31ctp06sz21cut+0bTNMwmE5Xl5RgqK6qD9zSLxUJVZSWV5WUYDQa0MxLN0cmJN35ezf/W72XEzdP5Yc7b5GdlWtuqKCulsrwMU82a1aefajIaqawo/6PNc35GmqZhrKrCWGWgymD44/ln3F9ZXobFYsFQWUFFWSkVZaUYKs6u3WQyYqisOP2YMioryrFYLFg0C4aK6ttMZ5xPXVFayrx33qBDzz48/clcNDSWf/81FoscChENQyZ6iWbPwcGBadOm8e9//5sZM2bIhQrOYDab2bl2JTP+9jRv/f0BCrKz8A0KxlBRzvpfF/HLFx/j4e2LUor4nn3QgNTDycx77w2OJe2j55BhGA1nLwqiACcXF6Lj4tE7OlGQk0X6scN8/cYr5J7MwNPXl6E3TGXIxMm4e3ljqqpi2dwvWfvTD5QVF5I4eDhTH37c2p6maRRkZ7H8uy8xm02UFRcR3jqWIRMn4+pePYHMYjbzwMiBvPTtYj556WkObN+Cg96RqLZxTLr7AeIT+6CUYt3ihfy+eAEODg6kHTlEQFg4977wKsUF+XzzxstkpqUyYOQ4TneTKS0q4sTBJKY/+iQhUa3o0m8QW1cuJe/USYIiouqwp+1gFploEqSnLFqEMWPG4Orqyg8//HDWRQxaurQjBykpKqRznwG0ju/Mzt9XApCVlsLqBd8z7ZEnuOXhvwFgMVswG0389MkHRLZpx/NfzscvOBST0WhtT9M0sjPSSTl4gJ2/r8LZ1ZWgiCi69LuGV777mQ/Xbuehf7/NjjUryDh+BDSNTct+ZuX8b7nzny/w7++XEJ/Ym9yTGdb2CvNyWLNoHhVlpVx/y+3Edu5GeUkxZcXF5GaepLK8nJMnjuHu5Y1vYBCPvfURH6/bzewlvzNw7ES+f+d1TMY/PjgU5ubQc+h1vPbjCp77fD6BoeF8+eq/SLx2BP/9YSlOrq4YjVVomkZeZgZK6fANCKIoLwdnV1d0Dg6UFBbWcU9LIIvLI6EsWgRnZ2dmzZrF559/TkFBQRMfwtZq+ar2+y9l26pltO/WE0dnZzr17s/2NSsxGY1kpqbg5OJK14GDad0xgfbdewJQmJtNYW42XQcOwS8ohGE33YLeycnantlk4tNXnuHdJ//C1pVLSeg3EEcnZ6oMlWSnp3EieT9mkwmldORlnsJkMrF5+RKGTJxMm04JuHl60mvYSMJbtwHAUFHBthXLSNqykb7XjcEvKJjgyGjKS0rIzzrFm4/dz7ZVyzi8ewexCd1QSlFckEfqoWTSjhzG3dMLQ0UFBTl/zCeI69aDLv0H4erhgVKKnJPpFOfnMWDUeNy9vBk57XbrTHFjlRGlFNkZaSz+7ENKiwpRSmE2n/HBrkn/Lgl7I8PXosUYN24cs2fPZv78+dx1111NeI1kVctXtd9/MWaTiZ1rVxMcGcXu9WvQgIM7t1FSkIexyoCTszNK6XBw0KN3dMJkMmKsMqBzcLCuNuXs6nbWftQ7OvLYWx8RFB7Jod3b+emTOYS1aoObhwerFnxHYW4OJqOREweTqDp9fL+irBR3L+9a6zabTCidDr/gUPZv30xUuw4ER0ZRXlbC/m2biWwbx7H9eykrLiY2oRsmYxU/fvwep04cp6qqksrycirLS8/qzbu4ueN8xiImhsoK9I5O6HQO1vs5/ZrcvbywWCwEhIYz/a9Psm3VsuoFXc5cBKU+fo9kdFucJj1l0WJ4eHhw77338vHHH5OTk9O0e8sXKr2206Au8KDsjDROpRzHWGVg9cLvSTucjIubGwe2b8HHP5Dy0hJyTqVTkJNFzsk00MDT2xe93pGs9FQMFeUc2bsLSy2HA3QODnj5+uPi7kF2eiqHdu1AKR2z/u8l7n7u30S363A6zDVax3cieedWivPzMBmNZGekUVZcBIC7pxdDJtzEwDETST9yiP3bNuLp64dO6UjaupGeQ0dQlJfL0X27adu5G8UF+Sz95nMm3fUgj735ITfe/RB6R6eL9mZ9A4IAyDhxFJPRyMGd29AsFpRS+AeH4uHlTfLOLZQWFXEq5QTOLi74BAZd/OdTVxLI4jTpKYsWZfz48cyePZt58+Zx77332rqcK3ehN/HaToO6wIN2rVtDeEwbHnzlbfSOjpjNZj596f/YvX4NNz/wGOGt2/Lz5x/h5etPZsoJ/IJCcff2ocuAQezZ+DvZ6alUlJae9eHGbDaz5sd5ePn6UVKYj97RidYdu5CVdoKUgwdY9/OPWCxmigvyrCUOGDOR7999nWVzv8Dd0wsN6DtiFI5OzqDAwdGRNh0TSDtykJ1rVxIaFUNQRBQphw7gHxyGb1AwB3dtJyQyCpPJREyHTuz8fSUHd24l51QGSnfxvoeXfwC9hl3P6gXfcXj3Dorz89E5OIBSuHl50XfkWNb+9AOHd+8kN/MkXfpfg5eP7+X8lISoMwll0aJ4enry4IMP8vLLL3PTTTcRFFTPPZ4mJCQqmvF33IuDXo9SCgedjsETJpN6OBnvgACumzqTpM3rcXZzY+hNt+AfEore0ZH+I8fh6eNLdkYanXr3x9XDA3dvbxwcHbnp3kcAKC8rxScgiO7XDKVtpwQCQ8MAyMs8SWh0DKNvvZOY9p1wcNAT3a4Dk/58P4d276CivIzWHTrhGxSM0WAgcfBw/ENCcfP0JHHwMI4GBgGKLv0HERwRhW9gEH2GjyKsVWucXF1xddAz5cHHOLxnJxaLmW4DhtAqLh5P3+oQbdW+I75BwTi7uln3g1KK66fOZPua3yjMzaHbNUNw8/DEzd0DJ2cXhkyYzLbVyykpKKD3sJHVM7kvEfRCXCllz0N4iYmJ2rZt22xdhmhmiouLGTFiBNOmTeOBBx6wdTkXtWHDBt7736fEDhxOpz4Dan9QfRyPvOo2zmjgitpqrgdV6/a6Pnr+H3RrFcEz//c0jo6ODVdWHRmNRv72t7/h4uLCSy+9ZOtymjyl1HZN0xJru08+7okWx8vLi0ceeYR3332X3NxcW5dz9eojy664jVpWtrqithS71q1m9j8e4Y1H7+PkiWNXWtD5LtXxOOP+r19/mRPJ+y9rvsH6XxaxY+0KDBUVF3lUzc6w386PsC8SyqJFGjVqFL6+vnz99de2LsXmfl+8gMWffXiFz/4jgTXNws9ffMwzt03mxbtnsObH+ZfdisVs5svXXqRjzz4Mu+kWfPwDrrCe2kq8xKeEM+7vNex6/EMub3GZtCMHOZVyHLPJeN59mqaxZtE8fv3q05qNXGaxoqWTUBYtkoeHBw8++CDvvvsuhXVeCKJ5yclII+P40XpoSdF72EimPPgYrm7uZGekXvYzS4sLOXXiGP1GjqNT7/64eXrVQz1117ZzVzx9fOvldLns9DRO1WePX7QIMtFLtFhjxozhrbfe4uuvv27aM7Fr9cewcnlJCWt/ms/vixfi5OLM2Jl30blvfzKOHeXNx+4nP7t6berd69fg5uHBv75ZhN7RkRPJ+1n0v/fJSkuhc58BDL95OkHhkXz2yrPoHBw4snc3JqORsbfNotew69HpFAGhYXh4++BRh9nJ/3nwTo4l7cVQUcEjY4egd3TmkVffIaZDJ0qLCln5w7dsWbEUD28frp96Gwn9BqJTulo7n/+cPpH7/vU6odGtqveCppF+9DA/fvwetz76JEeTdrPsmy8oLsyn3/XjGDT+Bjx9fFm14Dt+/Pg9qgyV3P/iG3RI7FW9SIjJxMFd2/n+3dfQ6RwIiWqFm6cnk2Y9CMDJE8d458m/kJORzrU3TGHoDVNJPXKI1x65i5LCAjSLhW2rlxMYFs6TH3yJk7PLVf5cRXMnoSxaJKUUbm5u3HPPPdY1sT08PGxdVj36I7EO7trG8QNJTLjzXnwDgzi0ewdtE7oS0TaWZz/7nsWff0h+Zia3PvZPlE7h5OxCzsl0fpjzNu26dGfinfey+sd5bF7+KyNuvpWi/DxOpRxn+l/+QXlJMR/965/EJnTFLzj0itbRuOf5/5KXeYonpozhxW8Xo5TC3csLQ0U56375kT0b1zHtkSfQNAuVZWWUFhXi5etXa1te3r4c3berOpS16qs95ZxMp6qygoxjR1i9cB59rhtNeEwbfvxkDkERkXS/5lr6jRxLj0FDef7Pt1BlqASqA728tIRv3/oPfUaMJCq2PT9+/B4+AYFoWvUVs9IOHWTCn+/DWGXgmzdeoffwUUS3a8+L3yzip0/nUFJYyC0P/x2dg0P1KV5CXIIMX4sWSynF+PHj8fDwYN68ebYup8FYzNULYXj6+NA6vjMjptyKp0/1IiBefv64uLrh6OKCt38AXr7+WMxmTh4/SklhPtdNnUF0XDzxPftQXlJMfnYmUH3sNaZDR3oMHkZYTBt2b/j9wgVcYo5TTc9aKYW3nz/efv7o9Y5UlJVxYOsmBo27gbhuibTv3osuAwbjeZFeeLtuPTiydxdlJcXce11fThxIIjsjjcDwSFIPJ+Pu5UW/kWNp07krHXr04tSJo5SXluDs4oqXnz8O+j/6KZqmVe+HgnyGTZ5Ou6496D5o6Fnzxjr16UebTgn0HDICUBTl5eCgd8DLzx9nVzecXVzx9g+4+JC4zAETZ5CesmixlFK4uroyc+ZMPvzwQ6ZMmYKLi30PL9bpBJvTD+7Yqw+lRQV8/94bVFVUMvSGqfS5bjQubm61Ps1sNpOfncn+rZuY2acjmqahWSwk9B1Ir+EjAfD2D8TB0RGdgwO+AUEU5lzkWtVXeHjWbDJRUlhAQGg4ulrPCz5/b8R1TeSr11/iyN5d+AQEsXfTOkqLigiJiiY/O5PfvvuKlT/MrX5NZjMDx046awnOs5vXKC7Iw93bC0fH6vW9Pbx9ULo/tunu5Y2joxM6Bwf0To6nlw6ty8qbdXqwaAEklEWLptfrmThxIp999hkLFixgypQpdr0mdp0qO/1gZxdXBo6dxDXjbuDY/r189Nw/iO3anfCY6os+KAcHNIsFTdOqFxFxcMA/OJTug4by93c+QafTWU8Rqtk3JQV5mE0mNIuFwrwcOvbse8lyNE3DYrGgAKXTXXI/O+gd8PD2IT8r849TlE4nnlIKTQOL2WRtSylF604JpB45RNKWjUy44x5++/5rXNzd6Xf9GDRNY8xts7j10SdR6pzXVNunHaXw9PGjtKgIs6n6+s1lxUWXvTyr0umwnLFfL/Coy2pLtBwyfC1aNKUUXl5eTJ06lU8//ZSKi55z2jTt27KR377/ioxjRyjKy0Gn16M7IyT8g0M5cXA/hTnZVJaXoXNwICQ6Br2jI799/xXZGWlsXv4Lv/+0gOL8PAA2L/uFtCOH2LV+LakHD5DQbyAAJqORyvIyzCYjpiojleXl1isqlRUX8d4//8rH/3rqj571RfLNxc2D9j16smrhXI4m7eHo3l3sWr+WkoICAApysnh04gh+/vxjjFUGAJycXQiNjGb76uV06NEbY1UVRTk5RMfFE966LQXZmWxe/itZaSmsmPc121Yuw1BeTmVlORVlpWgWC1WVlVSWl2ExmwmLaYO7pxfL5n7J0X172L5mBeoyg9Q/OJSUQ/spzM2msry8aa+1LhqN9JRFi+fi4sLYsWNZtGgRP/30E5MnT7br3nJdRcd14ERyEm/97QGU0jFy+u34BYdY7+819Do2//Yrj988BkdnZ15dsJyAkDAm/vl+Fnz4Dr9+9Qmt4zsz6tY/WSdYte7UhU9efBqTsYrbnngW/5DqZTRX/vAt3771H2vbezf9zi2PPEGn3v2qJ9d5eOLk4vrHMpWnd7NOp8Pb7+xzk13c3Bg4ZhIVpaXMefpvePsHMPa2P+Ph42N9jqevL86urmcFZUK/gRTl5+Hk4kKn3v3Iz8pE7+hIu4TuVJSW8MtXn1KYnUWnPv1JmDEQZzc3/m/GjWQcP4Kmabz/f4+h0zkw429P0X/kOKY+9He+fuNlNi39mYCwcBwc9egcHHBxqz5mXPNaPL19cdA7WivpNex6tqxYwt8nj8YvKITnPpuHk50fHhG2J8tsCgGUlpby0UcfsX79ej766CO8vb1tXRJwmctsNrK3/v4gXfsPos+I0c0+ZDSLhYqyUvSOTlgsFlYv/J7s9NTqmeoN8MFNltlsGRpsmU2l1CNKqSSl1D6l1DdKKRelVIxSarNS6ohSaq5Syun0Y51Pf3/k9P2trmbbQtQnd3d3RowYgclkYsmSJVgsFluXdBFaLV/Vfv+VtWpnbNhxMBqr2LpqOVtWLGHH2hWkHztM24SuZweyHXdsRNNzxaGslAoHHgQSNU3rBDgAU4BXgNc1TWsLFAB3nH7KHUDB6dtfP/04IeyCUorw8HAGDhzIkiVLyMvLs3VJF6Fq+ar2+6+s1YuLatce36Dgy75SksViIS/zFNnpaVd2XLXOPdI/tmGsMpB6OJmK0tIrer7e0YnYhK4c3r2DLSuW0qZjAt2vufYq67voJkULd7XHlPWAq1LKCLgBp4BrgVtO3/8Z8AzwHjD+9NcA84DZSiml2fP4uWhRvLy8GDRoENu2bWPlypXcdNNNFzgVxw5c6Nwo6+0XO3nq6q7INOGOuq1+VlpYwMZlP+Pm7sHgidXH60+lHCf10AGqDAaCwiOJTehWfQ3jK3Bg+2byMjOtC3oEhUcS1616ZLCyvJzff1pAm04J9Bkx+jJb/GPf6HSKsFZtuP0fz11RbZet+UxhEFfpit9xNE3LAP4LpFIdxkXAdqBQ0zTT6YelA+Gnvw4H0k4/13T68f5Xun0h6ptSijZt2tCtWzfWrFlDVlaWrUu6sEueYXOxd/kGToAzPmZrmkb6sSPknkwntkt364ecwtwcju7bw8+ff8xv33+NqZaLOlyuxZ9/xJpF80jesZWDu7aTmXrCep+ruwcde/Vl68rlVJTVpbdcQ9JSNK4r7ikrpXyp7v3GAIXA98D1V1uQUmoWMAsgKirqapsTok68vb0ZMGAAe/fuZcOGDUycONF+e8v26owcMxoMZKYcR9M0QiKjrbd36NGLDj164ej0unWVsKsxaPwN9L1uDA4OZ7+lOej1BIZFYDYZSTtyiHZdul/1toRoSFfzbjMMOK5pWo6maUbgB6A/4KOUqvnLiAAyTn+dAUQCnL7fGzjvwJ2maR9ompaoaVpiYGDgVZQnRN0ppejQoQPx8fFs3LiRzMyrD4wGVx8HgK66Da3WL8vLSsjPzsQnIAhH54Zb+3nDrz/x2cvP8dNnH1KQ88cIh1IKFzd3fAICST9yqMG2f2FydE7UzdWEcirQRynlpqqnIg4F9gOrgBtPP2Ym8OPprxed/p7T96+U48nCHnl7e9O/f3+ysrLYtm2bnc/Epn5GWK+4jT+uRlVbW0ZDFZXlZbh5el7pBi5p3G13MXjCZGI6dubovt18+dpLWMwm6/0Oej3Orm4UF+bX3kCDvg3V7Ax5qxOX52qOKW+mesLWDmDv6bY+AP4O/EUpdYTqY8Yfn37Kx4D/6dv/Ajx+FXUL0WCUUnTt2pWYmBjWr1/fNHrLNnPxNFc6hdI5YDGbG6yCuG6J9Bp6HQNHT2Din+9ny/IlZGdkWO+vWbv73KHtP4psjOPGcmxaXJ6rmn2tadr/Af93zs3HgF61PLYSuOlqtidEY/Hw8GDIkCG89957JCUlERISIseWL8O587qdXVxx9/KiuOACvdR6pHd0JKxVa4xVBqoMfyyXaqwyUFZSRKsOHRu8BiGulrzLCFELpRS9e/cmMjKSVatWkZOTY+uS6kij9iHTC91eP9s8tz/o5uFJYFgkRbk5lzX7WdM0CnNzeWTsEDYv//VimwIgPzuLX7/8hMy0E5SVFLPww9kEhIQSHBFlba+irIz8rExi2ne6wtd1/naFaCgSykJcgKurK2PGjGHXrl0cOnTI/o8tn0VR+5DphW6vr22eTefgQFh0DO5e3qQcPGC9fee61Tw26ToWfDSbVQu/467BiayY9w0A2RmpFORkExbT+pKb8vbzR+kUb/z1Ph6dOIKDu3fw8H/fwcnFFbTqXnL60UP4BYUSHHnO2RznBuzlBK6MQosGJhekEOIClFIMGDCA+fPns3jxYtq3b4+cEVA3SinCW7clKCKKQ7t3EJvQDQe9noS+A4n/ujea5YzVs5wc0TSNPevXMvTGWwhvHXvJ9nUODgyfPJ0hk24GTUPpHNA7OlqXwSwrLmb3+rWMuHn6+YuTnHepxqt9tVfo6tZyEc2MhLIQF6HX65k6dSqPPfYYkyZNwt/f36bHluv0/l3b6l6X1UD9poSjszPDbpxa3erpsHRwcMDBwfX8LWsak+5+EOAy9vPp6z/r9Tjoa38r8wkI5O7nXkGp+viZ1bZfrnZfaY000Uw0FTJ8LcRFKKXo168f3bp145tvvqGwsNCm18Wt09t3bat7XVYD5zzoKl+uUgql06HT6S55ZSWlFLrTj72Mli9r2zqdw6Wv6HRZr/FChwOuhgSyOJuEshCXoJTiT3/6E6tWrWqCx5brQS25UW8fS+xl4pRko7ATEspCXIYePXowePBgPvroI0pLS23aW7YH9ZZhLSYMW/bvi7h8EspCXKb77ruPDRs2sGPHjhYfyqKuWsynD3GVZKKXEJepXbt2TJo0ibfffpvu3bvj5eV16WOV9cBsNlNRVkpJYUGDb0vYlrHKgPSqWzYJZSHq4O677+b666/nt99+Y9KkSQ2+PQcHB4yVFez+fTUnjx1p8O3ZC02zUFVlxNnJqUXNTs5JTcExPs7WZQgbklAWog4iIiK46667eOONN7jmmmsICAho0N5ySEgIQ64ZyPHjx9G48msONykaFBYXsuTHH5kyZSpOzk62rqjRDL+mP71795IlXVswCWUh6mj69Ol89dVXfP311zz44IMNuq3o6GjuveeeBt2GvdE0jUOHDrF86VKefeb/8PX1tXVJQjQa+TgmRB35+vry6KOP8sEHH3DixAlblyOEaEYklIW4AiNHjiQmJoa3335bZmILIeqNhLIQV8Dd3Z0nn3ySBQsWsHPnTluXI4RoJiSUhbhCPXr04LrrruOVV17BbDbbuhwhRDMgoSzEFXJ0dOSxxx5jz549/Pzzz7YuRwjRDEgoC3EVoqOj+fOf/8wrr7xCaWmprcsRQjRxEspCXCGlFA4ODsycORO9Xs9nn31m65KEEE2chLIQV8nb25uHH36YOXPmkJmZaetyhBBNmISyEFehprc8aNAgYmNjef/99+UUKSHEFZNQFuIqKaXw8vLivvvu46effmL//v0SzEKIKyKhLEQ90Ov1dO7cmd69e/PRRx/JKVJCiCsioSxEPfHz82PKlCns3buXjRs3Sm9ZCFFnEspC1BMHBwfat29P7969mTt3rpwiJYSoMwllIepRYGAgo0aNIjMzkxUrVmCxWGxdkhCiCZFQFqIeKaXo0KEDPXr0YNmyZXKKlBCiTiSUhahnfn5+DBs2jPLycn777TeqqqpsXZIQoomQUBaiAcTHx9O9e3c2bNjA8ePHbV2OEKKJkFAWogG4u7szdOhQTCYTK1eupKyszNYlCSGaAAllIRpIbGws/fv3Z8OGDbKgiBDiskgoC9FAnJycGDlyJF5eXixZsoS8vDxblySEsHMSykI0oODgYG644Qa2bNnCzp07MRqNti5JCGHHJJSFaEBKKfr27Uvnzp2ZN28eubm5ti5JCGHHJJSFaGCurq7ceeedHD58mNWrV2MwGGxdkhDCTkkoC9EIWrVqxeTJk/nwww/JycmRSV9CiFpJKAvRCHQ6HTNmzMDV1ZVPP/1UestCiFrpbV2AEC2Fq6srjz/+OLfddhsTJ04kPj4epZSty7ILR48e5X//+591rfDi4mIcHR154YUXcHJyAqB9+/aMHj2agIAAW5YqRIOSUBaikSilGDBgAEOGDOH555/n008/xdnZWYKZ6lnqb7/9NpWVlWftj9mzZwOgaRp33HEHU6ZMsVWJQjQKGb4WohEppfi///s/NmzYwNKlS21djt3w8PBg7NixWCwWqqqqzvsXFhZGjx49cHZ2tnWpQjQoCWUhGllkZCSPPvooTz31FIWFhTLp67SZM2ei158/eKeUok2bNvTt29cGVQnRuCSUhbCBWbNm4e3tzWuvvYbZbLZ1OXZhyJAh+Pn5nXe7o6MjrVq1Ii4uzgZVCdG4JJSFsAEXFxdefPFFPv30U3bs2GHrcuyCXq/nlltuOa+3HBYWxuDBg2vtRQvR3EgoC2EjAwcO5Oabb+aZZ56RdbFPmzp16lnhq5QiPDycgQMH2rAqIRqPhLIQNvToo49SWFjIxx9/jMlksnU5NtelSxc6dOiATlf91uTm5kbHjh2JioqycWVCNA4JZSFsKDg4mKeffppvvvmGjRs32rocm1JKodPpuO2226yhHBQUxOjRo63fC9HcyW+6EDY2dOhQxo8fz7///W9OnTpl63JsSinF6NGjcXNzQ6fTERwcTL9+/WxdlhCNRkJZCBtSSqHX67njjjsA+Pjjj1v0bGylFBEREQwePBhXV1cGDhwoK3iJFkWmMwphY0opwsLCePjhh3nuuefo27cvQ4cObZBtbd++na1bt1JRUdEg7dcHi8WCo6MjAAUFBbz++us2rujiIiMjGTFiBF5eXrYuRTQDEspC2AGdTkevXr24/vrrmT17Np07dyYoKKjet7Np0yYWLVpEcHCw3a6OpWkaLi4uhIeHYzab2b9/v61LuqCMjAz8/Pzo1auXhLKoFxLKQtgBpRTu7u7ccMMN7Nixg08++YTHHnus3ic4lZWVERQUxMSJE/Hx8anXtuuTxWJhwoQJ+Pv727qUi1q9ejWHDh1q0YccRP2SUBbCTuh0Olq1asXMmTN5++23GThwYINMcnJxccHf3/+s1bM0TbOrC2NomkZgYKDdz7r29PS0+xpF0yK/TULYEUdHR3r37k2vXr349NNPKSoqapTtXkkgN+Sa3TWnRwnR0shvvRB2RClFQEAAY8aMIScnhwULFti6pAuqr571G2+8wZw5c2odAm7I4H/66adZvnw5RqOxwbYhRF3J8LUQdkYpRZcuXRg+fDi//vorvXv3pn379o02vJyfn89f/vIXdu3addbtjo6OPPXUUyxZsoQNGzag0+kIDQ3l9ttvZ9KkSVfcszUajRdc17ohX3NVVZX1g0BBQQEvvPAC69evJzw8nGeffZaOHTva1ZC+aBmkpyyEHXJ2dmbYsGG4urqyYMGCRj2FydfXl08++YSdO3fy+uuvM3jwYH799Ve2bNnCyJEjsVgsvPTSS2zYsIGXX36Z119/naNHj6JpGiaTCaPRiMlkwmw2n9XT1TQNs9l81n3n9oRr2rBYLJhMpvOeb7FYsFgs1nZqtmWxWKyPrWmj5jFnbq+m3XNre/fddwFYtGgR119/PY899phM3hI2IaEshB1SShEbG8uYMWPYvn07GzduxGKxNNq2a/6de/uZdDod8fHxBAQEcOLECTRN44EHHmDAgAEMHjyYBx54gKSkJGsY7t27l3/84x+MHTuWRx99lJSUlLOC0WKxsHXrVm688Ua2bdvG9OnTSUtLs96vaRqffPIJn332GR9++CEjR45k4MCBTJ48mRUrVliD2WKxMHbsWF555RXGjx/P8OHDefPNNyksLOTTTz9l/PjxPPLIIxw+fBgAg8HApk2bmDx5MgEBAcycOZOioiJ2797dgHtZiNrJ8LUQdkopxfDhw9m8eTM//vgjsbGxREZG2sWQaklJCbm5uRw8eJCysjLi4uJQSvHee++haRplZWX8/PPPfPDBB7z22mtkZGTwww8/0KpVKx555BFSUlIoKSmxXoTDbDazbds2/vOf//Diiy8SHR1N27ZtOXLkCCEhIVRWVmI2m6moqCAyMpLx48dz9913Yzab2bp1K88///xZ5wpbLBZSU1OZM2cO4eHhaJrG6tWr2bNnD88//zx5eXnMnj0bi8VCdnY2lZWVREVFUVRUhIeHB+Hh4aSmptKjRw9b7mbRAkkoC2HHvL29mTp1Ks8//zxLly7llltuwd3d3dZl8e677/Lpp59isVi47rrr8PX1BaC0tJTi4mJMJhPh4eFUVFSQm5tLSkoKer2eIUOGEBISQkhIyFntHTx4kEWLFvGXv/yF+Ph4KisriY2N5eDBgzg5OfHNN99w8803o5QiMDCQyspKiouLMRgMhIaGopQiPT2d+Ph4oPr49/Tp04mIiACgvLycjIwMwsLC6Nq1KwaDgcWLFwNYJ3pZLBaefPJJHn74YVxcXKisrGys3SmElYSyEHauW7duTJo0iYULF9KpUyd69ux5wYlRjeXvf/871157LXl5eTz++OMsWbKE8ePH89lnn7F7927KysooLy/HxcUFo9FIVVUVSilcXFxqbS8lJYWBAweydOlSBgwYgLu7O7GxsXzxxRcEBATg4+PD1q1bredYr127lmXLlpGdnY3RaOTUqVMYDIaz2jxzzeya48vOzs4opXBycsLR0dG6aItOp8NkMvHOO++gaRrFxcV4e3s36D4UojZyTFkIO6eUYuLEiURGRvLtt9+SlZXVoKcK1aUuPz8/YmJiOHLkCLm5uXz11Vc89NBDfPTRR/z1r3/FyckJTdPw8fHBYrGQlpaGwWCgsLCQ4uJi63Hy6667jkcffRSDwcB3332HxWIhIiKCtLQ0srOz6dWrF/v370cphaurK8uWLaN79+7Mnj2bV199lYCAgIvuE2dnZzw9PSksLKSoqIhTp05RVFSEpmn4+/sTFBTErl27MBgMpKamkp+fT2xsbGPtyibBHn7nWgLpKQvRBHh4eHDnnXfyxBNP8PPPP3Prrbfi6upqs3p27txp7QFnZmZy00034ejoSExMDLt37+bw4cMcOHDA+viYmBjCwsJYv349WVlZmEwmEhISrMGn0+lwdXXloYce4rnnnqNDhw7ExcURHBxMXl4eAwcORKfT4eLigq+vLyEhIWRlZbFq1SpycnLO6yWfy8nJiXbt2rF3716+//57dDod2dnZ1qt0jRs3jjVr1mAwGDhw4AADBgygdevWDboPmxqDwSCjB41AQlmIJqJTp07MmDGDOXPmEB8fT9++fXFwcGjQbUZERDBw4EDrcWydTseQIUM4duwY+/fvx83NjQkTJjBo0CD0ej133XUX27dvx8nJiZ49e9KmTRs8PT3x9fVl1KhRbNq0idTUVCIjIwkNDUWv19O9e3frUHL79u2ZPn06eXl5uLu7c+ONN+Lm5kZYWBgTJ04kJCQET09Pxo4dy/r16zl69ChdunThlltusV7AQynF2LFjrce5a26LjY1l5MiRbNu2jdDQUMaPH0+rVq3Q6XSMHj0anU7HsWPHCAsLY/To0Q2+b5uaiooK3NzcbF1Gs6fseUgiMTFR27Ztm63LEMJulJeX89xzz3H8+HFeffVVwsPD6zQb+9///jeHDx/m1ltvPWvta3FllixZws6dO3nhhReIiYmxdTkNxmg0Mn36dPr378+DDz5o63KaPKXUdk3TEmu7T44pC9GEuLm5cc8991BRUcHbb79NVVXVVR/rs+cP5sI+aJpGRUWFTQ+ZtBQSykI0MdHR0fzjH/9g5cqVzJs376rbq4/znq802M9chas+2Fs7zYkMXzcOCWUhmqCePXvy8MMP89JLL1lXzboaV/v8Kw32mufV14Io9d2OhPMfysvLpafcCCSUhWiCHBwcmDhxIkOGDOGhhx6ivLz8qtqzh1XC7JHsl2qaplFeXi495UYgoSxEE+Xm5sbTTz9NWVkZzz333FW1JT1CcSm5ublnLcgiGoacEiVEExYQEMB7773HqFGjGDhwIGPGjLnkc3bt2kVmZibOzs6NUGHzlp6ejr+/v63LaFCaplFUVERJSQmhoaG2LqfZk1AWogmrufbys88+ywMPPEDHjh0vempOv379rBd2sGdFRUUsXbqUCRMm4OTkZOtyLqhDhw7ExMTg4+Nj61IaVEZGBnq9nuDgYFuX0uxJKAvRxCmluO2229i8eTOzZs1i0aJFF5yQ079/f/r169fIFdaNpmkcPnyYhQsX8re//a1JBF5zP/acnp5OSEiILKjSCOSYshBNnFIKR0dHXnnlFcrLy3n66acveO1lpRQ6nc7u/505K9vWtdSl3uYqIyOjzgvViCsjoSxEM1BzcYg333yT5cuXs3DhwgsGsxB1VRPKouFJKAvRTOh0Ojp27Mj999/Pa6+9xp49eySYxVXTNE1CuRFJKAvRjLi6ujJx4kT69+/Pv/71L06cOCGnO4mrduzYMVq1amXrMloECWUhmhk/Pz/uuecevL29efvtt+3m+suiaTIajRw4cIDOnTvbupQWQUJZiGZGKUVUVBR33303mZmZfPHFFxQWFtq6LNFEpaWlUVFRQVxcnK1LaREklIVohnQ6HQkJCUybNo1t27axaNEiSkpKbF2WaIJ27dpFdHQ03t7eti6lRZDzlIVoppycnBg8eDA5OTksXboUb29vRowYIesXizrZvXs3Xbt2tXUZLYaEshDNmIeHB+PHj6e0tJT58+fj7u7ONddcI0tsisu2a9cuBg0aZOsyWgwZvhaimfPz8+Pmm28mLi6OTz75hF27dmEymWxdlmgCLBYLe/fuJSEhwdaltBgSykK0AEFBQdx+++1ERETw1ltvcfjwYTmHWVxSSkoKubm5dOnSxdaltBgSykK0EOHh4dx///14e3vz7LPPkpqaKqdKiYvasGED7dq1k0s2NiIJZSFakMjISJ544gmcnZ155JFHyMzMlGAWF7R27VoGDhyITidR0VhkTwvRgiiliIiI4OWXX8bJyYk77riDvLw8CWZRq3Xr1jFw4EBbl9GiSCgL0cIopQgJCeHtt9/GbDYzY8YMysrKbF2WsDNpaWmcOHGC/v3727qUFkVCWYgWSClFYGAgX331FSUlJUyaNEkWFxFn2bhxI7GxsQQFBcklGxuRhLIQLZRSCn9/f37++WdMJhMTJkyguLhYhrIFUB3K/fr1k0BuZBLKQrRgSik8PT358ccfUUoxbtw4OcYs0DSNtWvX0r9/fwnlRiahLEQLp5TCw8OD+fPn4+bmxvTp0+XKUi3csWPHOHHiBNdee62tS2lxJJSFECil8PLy4rPPPsPZ2Zl77rmH1NRUWWCkhVq6dCk9e/bE399fesqNTEJZCAFUB3NAQABz5szB29ubv/71ryQnJ8uSnC2MxWJh+fLlDB8+XM5PtgHZ40IIK6UUwcHBvPTSS4SFhfHcc8+xa9cuqqqqbF2aaCS5ubns3buXa6+9VkLZBmSPCyHOUnMe8+OPP06rVq147bXX2LhxIxUVFbYuTTSCNWvWEBYWRps2bWTo2gYklIUQ51FKERYWxv3330+7du34+OOPWbFiBaWlpbYuTTQgTdNYsWIF11xzDU5OThLKNiChLIS4oIiICO655x66devGN998w4IFCygsLLR1WaKB5OTksHPnTgYNGoRer7d1OS2S7HUhxEUFBwczY8YMPD09Wbx4MQUFBdxyyy3nzczVNA2TyYSjo2Odt5GTk8PKlSuxWCxomkZWVhZVVVXMnz8fd3d3oPoDQrdu3fDw8Ki31ybO9vvvv+Pj40N8fDwODg62LqdFklAWQlySv78/N998M76+vnz77bfk5eVxzz33EBwcjFIKTdM4duwY8+fP59FHH72iCUIvvPCCdXjcZDJRWFjICy+8gFIKBwcHbrzxRrp161bfL02cZrFYWLZsGQMGDMDLy0uGrm1Ehq+FEJfF09OT0aNHc99995GcnMwLL7xARkYGmqaRn5/PI488wuuvv85vv/1W57b9/PwICwuzXgQhPT2dyspKUlJSOHHiBKWlpQQHB+Pm5tYAr0wAHD9+nIMHDzJw4EBcXV1tXU6LJaEshLhsLi4uDBgwgCeffJLS0lIeeeQR9u/fz1//+leWL19OdnY2zzzzDPn5+XVq18HBgZkzZ9baw1ZK0aZNG/r37y+n6DSgdevWERYWRkxMjAxd25D8hgsh6kSv19OpUyeeffZZ/P39uf322/nxxx8xGAxYLBaSkpJ4880369zu6NGjrcePz+Tg4ECrVq3o1KlTfZQvamEymVi7di3du3fH19dXhq5tSEJZCFFnOp2OqKgo4uLiOHDgAEVFRda1sktKSvj666/ZunVrndr08vJiwoQJ5836DQkJoX///jKk2oAOHjzIyZMnSUxMlIl0NiahLIS4Ilu2bOGtt96irKzsrItXaJpGamoqL7zwQp1XApsxY8Z5F8IICQlh8ODB0ntrQOvWrSMiIoKIiAjZzzYmoSyEqLPU1FTrRStqu5qU0Whk165dzJ0797LbVErRv39/goKCrLfp9Xqio6OJi4url7rF+UpKSli/fj0JCQmEhIRIKNuYhLIQos5OnDiBr68ver0eZ2fn8+7XNI20tDTmzJnDqVOnLrtdvV7Prbfeam0zJCSEkSNHygSvBqJpGtu2baOqqoouXbrUekxfNC75TRdC1NnAgQP57bffOHr0KC+++CKtW7fG2dn5rIDWNI3k5GTeeeedy74EpFKKqVOnYjQagerjzEOHDpXeWwPRNI0tW7YQFhZGq1atZD/bAQllIUSdKaVQShEREcFf/vIXjhw5woYNG7j77rsJDQ3Fy8sLvV5Pfn4+K1asYPPmzbUOc9fWblxcHLGxsTg6OtKhQwdatWrV8C+ohUpJSWH//v106tSJsLAwW5cjkBW9hGiySktLKSsrs3UZVuHh4TzxxBM88MAD/Prrr3z99dds2bKFHTt28OGHHxIeHl7rUPe5TCYTY8aM4eOPP2bQoEFkZWU1QvWNS6/X4+npiZOTk81q0DSNXbt2odfr6dixo6x1bSfU5Xx6tZXExERt27Ztti5DCLv0zDPP8NZbb9n9cUCLxYLFYkGn0132sWGLxUJJSQne3t71Xk/NsqC2YjQaadu2Lc899xzXXnutzeooKSnh1VdfxWAw8Le//Q1fX1+b1dLSKKW2a5qWWNt98tFIiCbKbDbTu3dv7rvvPrs/FlhVVYXFYsHFxeWyHm80Gtm4cSPXXHNNA1fW+A4ePMj27dsv+zh7Q9A0jQMHDpCSksLo0aMlkO2IhLIQTZi7uztRUVHNbnaypmnNdrnHwsJCmw5bA1RWVrJz504cHBxITKy1wyZspHn9JQshrpjZbOa3336jsrLyvPuOHTvG4cOHMZvNjVJLzZWhLiQ1NZXk5ORaa21IJ0+eZO/evZhMpos+zp4PCwKkpaWxY8cOunTpQmRkpK3LEWeQnrIQLUBJSQnbtm3D2dmZrl271nq1JZPJxPPPP8+8efPOG2Zes2YNBoOBiIiIOi93qWkahw8fZuvWrSilcHFxoU2bNsTFxZ21HU3TLnsYfsuWLaSlpTFt2rTLHhKvD3v27GHTpk08+uijODs7s3//fo4ePYper6dHjx6Ehoai0+msx63t8bBCVVUVSUlJFBUVMXjw4GY5GtGUSSgL0QIcPXqUuXPnopQiPDyc6OjoRtu2xWJh+/bt/PTTT/Tp0welFFu3bmXatGl06NDBGgr2GGAXk5SUxKJFi3BycqK4uJgDBw5w//334+bmZj1lzB7l5eWxceNGYmNjadeuna3LEeeQUBaiBTh8+DChoaEcOnSInJwcIiMj0el0ZGVlsXjxYvLy8ujWrZv18UajkUOHDrFmzRpcXFw4fvw44eHhpKens2fPHk6dOkVwcDA6nY60tDQmT55MXl4emzdvpqKigoiICCorK7FYLFx//fUAxMfHc/vtt6OU4p///CfJycm0adOGX375hYMHD6JpGoGBgYwcOdI6pFpeXs6WLVvYu3cvbm5u9O/f/7wgycrKYuHChYwcOZIff/yRqVOnEhAQAFT3vrOysli1ahXdunVj1apV5Ofn4+HhQZ8+fejSpYu1p/3xxx8THR1NcnIyRUVFtGrViilTpnDixAmWLVuGUory8nKg+oPGunXrcHd3Z8qUKZSWlvL3v/+dUaNG0bFjR7sNZLPZzJEjRzhw4ABPPPHEZZ2iJhqXHFMWopkrLy/nxIkTtG3blujoaI4ePWoNzDlz5pCZmUmHDh3Yvn279Vhpbm4u3377LRaLBScnJ1JTU7FYLOTm5rJ+/Xrc3NxYuHAhubm55OTksHLlSo4fP87KlSsJCQnh66+/pqSkhOPHj7N3715rLZqmUVZWhsFgwNXVFZ1OR5s2bUhMTKR79+6YTCbefPNNLBYLBoOBDRs28PPPPxMUFISPjw9ZWVlnnZudnZ3Nhx9+iF6vx93dnd27d7N//37r/RaLhZSUFLZs2YKvry+dO3emR48e+Pv789NPP3H8+HHr8d+lS5fy3XffERgYSGJiInFxcZSXl/PJJ59QWVmJr68vR48exWKxUFZWxsmTJ4mJiSEoKIjY2Fh8fX05fPjweRfnsCfFxcWsWrWK8PBwunbtautyRC2kpyxEM5eRkUFZWZn1eOfBgwe59tprKS4u5vfff+eDDz4gMjKS8PBwfvzxR8xmMydOnODUqVM89NBDQPVa1zUB4+Pjw8CBA1mzZg0JCQkEBwezc+dOEhMTCQ0NZciQIcydO5eePXuyd+9eMjIy0Ol0rFmzhmPHjmE0GgkNDaVDhw44Ojpaw8FisZCXl8e0adPIzs7G2dmZdevW0aVLF8aOHYumaRiNRmvPNi8vj3feeYfOnTszbtw4/Pz86Nq1K3v27CExMZGnnnqKe++9l5MnTxIVFUVwcDDBwcHWDwYvvvgiqamptGnTxjobunfv3owZMwY3NzfrMqEHDhzg3XffxcXFhcLCQtLT06msrMRgMODk5MQnn3xCYmIi7u7uFBYWnhXE9tRjtlgspKamsmbNGh5//PFa5xUI25NQFqKZS0lJQdM0goOD8fPzY8mSJZSUlFBWVoaTkxPh4eE4ODjQvn17dDodZrPZOsTr7+9vfW7NetSurq7Wda69vLwoLS3FYDAA4OHhgZOTEy4uLnh5eeHk5ERlZSXOzs506dKFKVOmUFVVxcKFC9mzZw+hoaHs3LmT+fPnc+rUKYxGI0VFRRQXF+Pt7U1mZiajRo2qNUD279+PXq8nKysLV1dXlFJ069aNzz//nIMHD5KRkcGWLVvIy8ujbdu2ZGdns3jxYjZs2EBJSQknT56kW7duZ80o79y5s/WYcM3IgJubm/XKVeHh4WRkZODg4GA9bjxy5Ei8vb2xWCw4Ojo2wk/0ypSXl7No0SJCQ0Pp37+/rcsRFyChLEQzZjKZSElJYdmyZaxfvx6A/Px89u/fT2xsLOXl5dZFLEpLS9E0DZ1Oh4uLCxUVFdbeqcFgsPb6zuz91QRTTRtnPqbm6zN72HFxcbi7u3P06FGSkpJISEjg9ddf5/bbbyc+Ph6lFDNmzLCuAObq6kpxcXGtr61bt26MGjWKt956i3Xr1jF8+HASEhJITk5m27ZtTJkyhbVr1+Ls7Mw111zDli1bOHjwIPfeey8BAQG88cYb57V5ZqgqpXBzc6OiosK6KlllZSWapuHp6Ymnpyf5+fn4+/vj5OREVlYWYWFhdtU7rlFzbH3RokU8//zz0ku2Y3JMWYhmLDs7m4yMDO68807mzp3Ld999xw033MCePXsICgrCz8+PH374gdLSUr766is0TUOv1xMVFYXBYGDt2rUkJSWxY8eO89q+kvDRNI3i4mKOHj2Ks7OztWccFRVFYGAgq1atoqKiAgA3Nzc6d+7MokWLOH78OEePHmXLli0UFBQA1b3yiIgI7rjjDj744AMyMzNxd3cnICCAFStWMGDAAIqLi8nOziYqKoqysjL0ej3h4eGkpaWRnp5+0VW1lFJER0ej0+lYvnw5x44dY+3atUB1eLdv3569e/dy9OhRfv75Z8xmM506dbLLhVyqqqr49ttvCQ0NtenSnuLS7O+3RwhRb2qGhNu3b4+fnx9+fn7079+fpKQkLBYLzzzzDD/99BOjR4/GycnJemw1IiKCSZMm8dprr/H222/j5OR03vmsdZ3E9MMPPzBixAhuuukmSktLmThxIm3btuXmm2/mL3/5C2PGjKGiooLAwECgOpTHjRtHq1atuP/++3n22WcpLy/Hy8vL2qZSiv79+5OYmMh///tfNE2ja9euhIaG4u7uTseOHQkMDMTb25tu3bpRUVHB1KlTWbBgAR4eHpccbvbx8eHuu+9mzpw5PPXUU2fth+uvv56wsDD+8pe/8L///Y8HHngAPz+/Ou2TxqBpGoWFhXz11VfMmjXLrofYhVyQQogm66mnnuLAgQM8/fTTF+ydaZpmXcTizOHkmuFh4Kyh55rh6zOfW6OmjZr2zm3jzKHrmvvOfP65Q9y1befMGmq+PvP+M59zZk0Wi8X6vJrHn7n9mq/P7RnXPF8phdlstrZ/7tD7mc87d1+eW9ulRhB2797Nb7/9xrRp0xg2bNhFH1sfzGYzr732GosXL2blypWyWIgdkAtSCNFC1baIxblLWF7oTfpi4XKpNmpbEOTMQD33uHRd6q/tOWd+KLlQ25daurO2+2qefyX7yB5omkZRURHvvvsur732ml0OrYuzyU9ICFEnVxtEtr50YkuiaRpvv/02kZGRjBs3zu4/RAjpKQshbEDCoXHULK7yv//9T4atmwjpKQvRAklPtfkzm828/PLLdOnSheHDh9u6HHGZpKcsRAskPdXmLzk5me+//54ffvhBft5NiPSUhRCimTEYDLz88stcd9119OrVy9bliDqQnrIQTVh6ejpLliyRnlATkpKSQmFhYYNuY+3atWzZsoUFCxY06HZE/ZNQFqKJSkhI4MSJE+zZs8fWpTS6Q4cOcfLkSbp3737WYiJNRXx8PCEhIQ3Sdn5+Pv/+97+ZOXMm7du3lw9sTYyEshBN1MiRIxk0aFCLnLS1Y8cO3nnnHQYMGMBNN93U5NZydnR0xMPDo0Hanjt3LkajkZkzZ0ogN0ESykI0UR4eHg32xm7vhg0bRnFxMZ988gmdO3dm2LBhsnwk1SMIn3/+OX/5y18ICQmRUG6CJJSFEE2Oo6MjY8aMIT09nTfffJOIiAg6duzYolesMpvNfPjhh8TFxXHttde26H3RlMlPTQjRJLm7u3PbbbfRtm1bnnrqKXJzc1vkUH6NDRs2sHHjRmbNmoWPj4/0kpsoCWUhRJPl7+/PP/7xDywWC//4xz8wGo0tMpjLysp4//33ufbaa1v8iEFTJz85IUSTFhoayn//+1+2bt3K7NmzbV2OTfzwww/k5uZy00034enpKb3kJkxCWQjRpCmlaNu2Le+88w7//ve/+fnnn21dUqNKT0/nu+++Y9KkSbRu3VoCuYmTUBZCNHk6nY5+/frx4osvcscdd3Dw4EFbl9QoNE1j7ty5+Pn5MWTIENzc3CSUmzgJZSFEs6DT6bjtttuYMGECU6dOpaSkpFkfX9Y0jR07drB69WpGjx5NTEyMBHIzIKEshGg2lFK8+eabeHt7c++992KxWGxdUoOprKxk4cKFREVF0bt3b/R6OcO1OZBQFkI0G0opnJ2d+eSTT9i5cyfvvPMOZrPZ1mXVO03TWLNmDUlJSYwcOZKoqCjpJTcTEspCiGZFKUVkZCRvvPEG7733HsuXL29WwaxpGqdOneLXX38lLi6O/v37SyA3IxLKQohmx8HBgQEDBnDPPffw4osvsn///mZzfLmqqopVq1aRlZXFhAkT8PX1tXVJoh5JKAshmiUXFxduvfVWevTowSuvvEJGRoatS7pqFouFAwcO8Ouvv9KvXz+6d+9u65JEPZNQFkI0Wz4+Ptx3330opXj//ffJz8+3dUlXTNM0ioqKWLZsGU5OTkyaNEkuwtEMSSgLIZotpRStWrVi1qxZHDp0iHnz5lFaWmrrsq6I0Whk06ZNrFu3jsmTJxMREWHrkkQDkFAWQjRrer2e7t27M2HCBFasWMHatWupqqqydVl1YrFYSE9P55tvviExMZHBgwfbuiTRQCSUhRDNnru7OyNGjKBHjx7MmzePvXv3NqlzmCsqKpg3bx5lZWXMnDkTFxcXW5ckGoiEshCiRfD392fy5Mn4+PjwxRdfkJKSYuuSLovJZGL9+vUsXryYWbNmER0dbeuSRAOSUBZCtAhKKaKiopgxYwYFBQXMmzfP7id+aZpGTk4Or7/+OiNGjGDQoEG2Lkk0MAllIUSLodPp6NixI9OmTWPdunUsXbqUiooKW5d1QWazmf/+97/o9XpmzZolw9YtgCyWKoRoURwdHbnmmms4duwYn376KTExMfTs2RMHBwdbl3YWTdP44Ycf+OWXX/jqq68ICgqydUmiEUhPWQjR4ri4uDB9+nS6du3Kyy+/zMmTJ+1uxa+0tDSeeOIJ/vrXv5KQkGDrckQjkVAWQrRI7u7uPPbYYyilePXVVykrK7N1SVYmk4lZs2YxYMAAbr31VrkCVAsioSyEaJGUUvj7+/PKK6+wYsUKvvjiC0wmk63LAuC1117jyJEjvPrqqzg7O9u6HNGIJJSFEC2WUorY2Fj++9//8sILL7B58+bzhrE1TWuQoe2ads9sW9M0du/ezcsvv8ycOXPw9/ev9+0K+yahLIRo0ZRSjBgxgnvvvZeZM2dajy/X/DMYDJw6daret1taWkpGRgbl5eXWYC4rK2PWrFncfffdDBkyRC7J2AJJKAshWjylFE888QQ9e/ZkxowZVFRUoGkaJ0+eZNasWTz44IP1vgLY7t27mTlzJv/+97/JysrCaDTy/PPP4+TkxBNPPCGB3ELJ7AEhhKD6HOb33nuP4cOH8+STTzJ9+nT+9Kc/kZycTLt27Th69CixsbH1si1N09i1axe7d+9m1apVJCcnM3LkSBYuXMjnn3+Ou7u7hHILJaEshBCn+fj48Pbbb3Pdddfx6aefUlZWhtFopKCggN9++422bdvWS1gWFRVx8OBB8vPz0TSNBQsWsHr1aiZNmkSbNm0kkFswGb4WQgiqr8R04sQJli1bRnl5OUVFRRiNRgCysrL47bffMBgM9bKtQ4cOkZycjE5X/RZsNBrJz8/niy++4NNPPyU9Pd3uzpsWjUNCWQjR4mmaxvbt27n//vt5/vnnMZlMZ4Wi2WwmNTWVpKSkq96WxWIhOTmZPXv2YDabrbebTCbKysp4/PHH+ec//0l2drYEcwskoSyEaPE0TWP58uUXvKSjpmlkZGSwevXqqw7K4uJikpOTyc3NPe8+nU6Hu7s72dnZnDx58qq2I5omOaYshGjxdDod9913H4GBgfzyyy8sXbqUqqqqs3qy2dnZbNu2jaKiInx8fK54WydOnGDHjh1n3VZzDDkhIYHRo0dzxx130KpVKzm23AJJKAshBODt7c2f//xnBg0aRI8ePVi8eDFbtmwBqnvKFouFY8eOsXv37iu+hKLFYuHo0aPs2LHD2iPX6XR4e3szcuRIbr75ZkaMGIGzs7MEcgsloSyEEGeIjY3l0UcfpV+/fixYsIBvvvmGwsJCzGYzR44cYdOmTQwcONA6SasuiouL2b9/P/n5+SilUErRpUsXpk2bxsSJE4mOjra7q1WJxiWhLIQQZ1BK4eLiwpAhQ+jQoQN9+vThiy++4LfffqO4uJi9e/eSlZVFaGhonds+efIkGzZsAMDX15cJEyYwdepUevfujYeHh/SOhYSyEMI+aJrGli1b6n3lrKuhaRoRERHccMMN+Pv7s2zZMrZv3868efNITEysc1vbt29n8+bNREZGMmbMGAYMGICzszP79u1roFdwZeLi4vD19ZUPCTag7HnKfWJiorZt2zZblyGEaASaphEfH4+3t7fdXapQ0zTKy8vJzs6mrKyMoKAggoKC6tSGxWIhLy+PkydP4u/vT0BAAC4uLg1U8ZXLy8vjrbfeYsiQIXb3c2gulFLbNU2r9VOd7HEhhN04ceIEb775JjExMbYu5YIqKiooLy/Hw8OjTpdVrKqqIj8/n8DAQLs+bvzwww9TXl5u6zJaLAllIYRdCQgIuKLjtY2l5upRNRO16vK8iIgIdDqdXQ8LOzo62rqEFk1CWQhht2rCz57UFsaXU6dSyq57yMI+yIpeQgi7dbGgO/OaxxeaG5Oens7UqVMpKCg4775//OMfLF26FKPReF5bdZ1rc26dzz33HHPmzLnk8+pzTs+sWbPYsmULZrO5zq/HnucWtTQSykKIJik5OZlx48bx3//+l4qKigs+7kKzuS0WC5qmUVVVxahRo+jWrRuJiYmMGzeOhQsXXlVQ1bRdmzNvv5JRgAu1e+Y28/LyuO++++jTpw+TJ08mKSnpoq/H3kYjWjIZvhZCNEm7d+8mKCiIHTt2UFVVhaurK1AdTlVVVWiaZr3KE1SHmclkwmg0otPpzgvrTz/9lE6dOrFv3z7uuusuunbtSqtWraisrMRisViHnx0dHa0Lh5jNZoxGIxaLBQcHB5ycnM5qs2YlMIPBgFIKZ2dn63Nr6tPpdJjNZmsPV6fT4eTkZD32rGkalZWV1sdB9XFfvV6P0WjEZDLh4OBwVui++uqreHh48OuvvzJ//nz++c9/8v3338vx4iZAQlkI0eRYLBb27t3LddddxyeffEJWVhZeXl4opdi6dStvvvkmpaWl9O7d2xpW+fn5/PDDD/zyyy+0atWKlJQUrr322rPa1el01tOy0tLSCAgI4PHHHycpKQkXFxd69erFrbfeStu2bTGbzaxdu5b58+eTmppKt27dePjhh/H19bW2V1VVxdq1a3nzzTdxdXXl9ddfJyIiAqgO5ddee434+HiOHTvG0qVLqaysJDg4mDvvvJOhQ4cCUF5ezpQpU0hMTGTnzp2YzWYmT57MsGHD+Pbbb1m1ahUdO3a0XsCioqKCDRs2MHv2bHx9fZk2bRpff/01+/bto1u3bo3x4xFXQYavhRBNzqlTp8jJyaFDhw4kJCSwadMmzGYzVVVVPPfcc0yePJlPP/0UvV6PyWTCYrGwa9cuDhw4wDPPPMPYsWOxWCxn9ZYzMzNJSUlhyZIlaJpGXFwcnp6evPPOO6xatYovv/yS4OBgfvrpJ8xmM3v37mXhwoVcf/31fP755wwfPpzc3FzrhwCDwcDatWv55ptv+Oc//0nnzp05ceIEZWVl5OXlkZOTg9lsxt/fn4cffphff/2V5cuX8/DDD/Pyyy9TUVFhbavmso7vv/8+ixYt4pZbbmHt2rXk5+fz73//m/79+1t72llZWVRVVREVFcWpU6fQNI2QkBC5RnMTIT1lIUSTs2/fPvz8/PD29qZ79+6sXbuWm2++mZSUFIqLixk3bhxKKcaPH8+WLVuoqKggIyODkJAQOnfujMFgYNGiRWcdS3333Xdxc3PDaDQyYsQIXF1dsVgs5Ofnk5eXh9FoxNXVlZycHIqLi0lKSqJNmzb06tULX19fBg4ceFaNe/fuZenSpTzwwAN07NiR5ORkDh8+TE5ODlu3bmXIkCG4u7vj4+NDSUkJOTk5VFRU4OHhgdlsJj09ndjYWADc3Ny48cYbCQkJAaC0tJTMzEzatm1LXFwcUVFRfPvttwAYjUbrsPd///tf7r33Xpydna23C/smoSyEaFIsFgv79++nsrKSffv2YTAY2Lt3L3l5eVRUVODi4mI9buvi4oJSCovFgtlsxsnJCaUUTk5O552e9Nxzz9G5c2cyMzP5+9//zvLlyxk6dCj/+9//yMjIoKqqipycHNq0aYPZbKayshInJ6cLrnqVm5vLoEGDWLlyJd27dycyMpLFixfTrVs3XFxc2L59O35+fnh4ePDLL7+wfft2SktLMZvNFBUVUVVVZW1Lp9OdNSxuNpuxWCzWbZ/5ejw9PYHqnvqrr75KVVUVRUVFeHt7198PQTSYSw5fK6X+p5TKVkrtO+M2P6XUcqXU4dP/+56+XSml3lJKHVFK7VFKdT/jOTNPP/6wUmpmw7wcIURzV1BQQGpqKqWlpaxevZp9+/bh6urKxo0bCQ4OxmQycfz4cSorK9m/fz+apuHk5IS3tzcFBQXk5eWRmppKQUFBrcO5vr6+hISEkJKSQmZmJmvWrOH+++/n5ZdfZty4cdZJVeHh4eTk5HDy5EkMBgOZmZkUFhZa2xw+fDh33XUXZrOZxYsX4+fnR0pKClVVVcTFxXHgwAHc3d3R6/UsWbKEwYMH85///Ie///3vuLm5XXSo2dXVFQ8PD3JzcyksLOTYsWOUlpYC1YuvhIeHs3HjRqqqqjh8+DAlJSW0bdu2YX4gol5dTk/5U2A28PkZtz0OrNA07WWl1OOnv/87MBKIPf2vN/Ae0Fsp5Qf8H5AIaMB2pdQiTdPOP3lQCCEu4tChQ+h0Oh544AESEhJQSvH222+zdu1aJkyYwIgRI/jiiy9o1aoVqamp1tnMcXFxJCUl8dVXX+Hs7Gy9fGKNZcuWsW/fPoxGI6WlpfTt2xd3d3ciIiJYvXo1zs7OJCUlWXun3bp14+DBgyxbtowdO3aglGLQoEHWHqmDgwNeXl7MmDGDd999FxcXF/z9/TEYDMTHx1vv9/LyonXr1iQnJ5Ofn09ZWdkl94GLiwudO3dm2bJlfP311yilKCkpQSmFXq9nypQprFy5kvz8fI4ePcrw4cOtE8yEfbtkKGuatlYp1eqcm8cDg09//RmwmupQHg98rlV/xNuklPJRSoWefuxyTdPyAZRSy4HrgW+u/iUIIVoSf39/xo4dS2RkpDVUR44cSUBAADqdjjvvvJNVq1ZRUVHB0KFD6dChA66urrRp04axY8eyd+9eAgMDmTZtGrGxsTg6OjJ9+nSKi4spLS3Fzc2N6dOn06tXLywWCzNnziQ5ORlXV1euu+469Ho97u7uuLu7M2nSJHbt2kVeXh6tWrWyHvOtOV6s0+no0KEDU6ZMwWg0Mm3aNGtP/JZbbqF169Z4enoyefJkduzYQUVFBV27dsXT05Pg4GCg+vSnyZMn4+/vb90HSinrB5Lk5GQiIiK47bbbiIiIQCnFqFGjcHFxISMjg65duzJ48GBZTayJuKyrRJ0O5cWapnU6/X2hpmk+p79WQIGmaT5KqcXAy5qmrTt93wqqw3ow4KJp2gunb38KqNA07b8X265cJUqIlkPTNNzc3Pjqq69o166drctpNuq6VOn06dN59tlnGT16tFwlqoFc7CpRV31K1Olecb3Ns1dKzVJKbVNKbcvJyamvZoUQLUB9nfJjb+1cDZlx3bRcaShnnR6W5vT/2advzwAiz3hcxOnbLnT7eTRN+0DTtERN0xIDAwOvsDwhREtUXwFU087VhqoEoqirKw3lRUDNDOqZwI9n3D7j9CzsPkCRpmmngKXACKWU7+mZ2iNO3yaEEHZLQlU0tkseMFBKfUP1MeEApVQ61bOoXwa+U0rdAaQAk08//BdgFHAEKAduB9A0LV8p9Tyw9fTjnquZ9CWEEEKIapcz+3rqBe4aWstjNeC+C7TzP+B/dapOCCGEaEFkap0Qwq7s3LmTzMxMW5fRYpWUlNi6hBZNQlkIYTdiYmL45ptvmuyxXIPBgMlkwt3d3dalXDGlFG5ubrYuo8WSUBZC2AWlFElJSbYu46q89957LFmyhAULFljX3xaiLiSUhRB2o6n2kGvTnF6LaDzyUU4IIYSwExLKQgghhJ2QUBZCCCHshISyEEIIYScklIUQQgg7IaEshBBC2AkJZSGEEMJOSCgLIYQQdkJCWQghhLATEspCCCGEnZBQFkIIIeyEhLIQQghhJySUhRBCCDshoSyEEELYCQllIYQQwk5IKAshhBB2QkJZCCGEsBMSykIIIYSdkFAWQggh7ISEshBCCGEnJJSFEEIIOyGhLIQQQtgJCWUhhBDCTkgoCyGEEHZCb+sChBCiqcrMzCQ7OxuTyQRAWloaRUVF7Ny5E52uus/j7OxMx44dbVmmaEIklIUQ4grt37+fd999l+zsbJRSZGZmUlhYyF//+lcANE2jb9++vPLKKzauVDQVEspCCHGFoqOjSU1NZfv27Wfdvm7dOgA8PDyYMmWKLUoTTZQcUxZCiCsUHR1NfHw8Tk5OWCyW8/45ODgwfvx4W5cpmhAJZSGEuEJ6vZ7rr7+ekJAQlFJn3efg4MA111xDaGiojaoTTZGEshBCXIXhw4fj5eWFpmln3e7s7Mz06dOtE76EuBzy2yKEEFfBz8+PXr164erqetbter2eUaNG2agq0VRJKAshxFVQSjF+/Hj8/f2tt+n1eiZNmnReUAtxKRLKQghxlYYOHYq7u7v1e51Ox4wZM847zizEpUgoCyHEVXJ1dWXYsGHWnrGPjw/XXHONjasSTZGEshBC1IObbroJDw8P9Ho9M2bMkAle4orI4iFCiHpnNpvJz8/HYrHYupRG06ZNG9zc3MjNzeXaa68lOzvb1iU1Kg8PD9zc3GTI/ipJKAsh6l1BQQGPP/44BoMBvb7lvM34+PhQUlLCt99+y9y5c21dTqMpKyvjlltuYdSoUTg7O9u6nCat5fy1CCEaTWVlJZs2baJ///5ERETYupxG4+vry8mTJ2ndunWL6TFaLBaWLl3KsWPHMJvNti6nyZNQFkI0CHd3dwYNGkSXLl1sXUqjsVgsFBUV4ePjY5NQ1jStwbd77jZMJhNHjx5t0G22JDITQQhhF85dEasp0ul0+Pr6opSyyetpjA8CLWUEwFYklIUQdqG5vdnX5fXs2LGDTZs2YTAYGrCi8+3atYvNmzdjNBobdbviwmT4WghhM1u3bmXVqlVMmjSJNm3aUFlZyZtvvklcXBwTJ06sc3uapvH1118TGxtLr169GqDiamazmSVLlrBz506UUnh5edG7d2+6d+9+RRPbdu3aRUlJCfHx8fU+UepiQ9p79uyhpKSEhIQEzGYzq1evJikpCTc3N0aNGkVUVFSz+7Bk76SnLISwmcOHD7NixQpWrlyJxWJh165dLFu2jI0bN15xm6tXr+bQoUNXVdelhp41TWPr1q1UVlaSkJCAv78/H330EWlpaWc91x6G5C83VNeuXcvq1asJCQmhoKCAd955B6PRaBevoSWRnrIQwqY6derExo0b+dOf/sSyZcvo1asXFosFs9nM4cOHmTt3LsXFxfTu3ZsRI0bg4+NDeXk5y5YtY8OGDeh0OsaNG0fv3r0vumBHZWUlW7duZcmSJZhMJoYNG0a/fv1wd3cnLy+PL7/8ksOHDxMcHMzMmTOJiorCYrFw7Ngxvv76awoKCujRowejRo3Cy8sLgPbt2zNs2DAsFguLFi3i8OHDREVF8eabb7J//350Oh0xMTFMmTKFmJgY8vLyWL58Odu3bycgIIDx48fTrl27s+pMSUnh/fff58477+SDDz7gkUceISQkBKgO+aSkJDZv3ky7du1YvHgx+fn5+Pr6Mm7cOHr27GntaT/55JN07dqVrVu3UlRUREJCArNmzWLXrl0sWLAANzc3iouLiY6Oxmw2s2bNGuLi4hg9ejSlpaXcdtttHDp0iI4dOzbQT17URkJZCGFTPj4+WCwWDh48yKFDhxg5ciR79uyhtLSURYsW4ePjw6hRozh16hTHjx+nW7duLF68mH379jFy5EhMJhNz5swhOjqasLCwC27n4MGD/Prrr3Tu3BkPDw8WL16Mj48PPXr0YOHCheTk5PCnP/2JwsJC1qxZw6233kpFRQU//PAD7u7ujBo1iszMTI4cOUL37t2t7WqaxsmTJykpKSEgIAClFDfccAPXX389RqOR7du3M3v2bP7v//6PZcuWsXv3bsaMGUNVVRWFhYVUVFRY20pLS+Odd97huuuuIzQ0lJKSEg4cOGANZaPRSGpqKllZWdxwww2EhoZiMBg4fvw4c+fOJTIy0jrkvGfPHtLT07n99tsJCgrC09OT7OxsvvrqK3r06IGvry/z5s0jIiKCwsJCsrOzuf766/H09MTb25uAgAAOHz4sodzIJJSFEDZ3zTXX8Prrr9OpUyecnJyA6uO2JSUlhIaG0rVrVzp27IhOp6O4uJitW7dy7bXX0q9fP5RS/P7772zbto2xY8fW2r7RaOT48eMopRg5ciSurq7s3r2bw4cP06FDBwoLC3FxcaF9+/bo9XprUGqaRklJCTExMXTp0uW8gJozZw5ffvklJpOJQYMGERsbi1KK6Oho67BvQEAAS5cu5eDBg+zfv58hQ4bQr18/LBYLmqZZX++pU6d49tlnmThxIsOHD8fV1ZWEhASSkpLo2LEjTz75JA8//DD5+flERkbi4+ODj48PmqbRtm1blixZQkZGBuHh4dbj2uPGjaN///7o9XrMZjP/3969B0dd3nscfz+/3c1uNoHcISFAQKByCZegUG2LiERja4uHKBQK1oLntONIxenxdux0HE9FW6eoh5FTq+LosR3rrfVyaunxRIfDiODlIMJBKCAXUSCBkISQZLObfc4fu8QkhFtIsr8kn9dMht3nt7u/7z7L5JPn+f1+z27YsIGamhpmz56NtbZ5mr+hoYFIJILX6+Xee++ltLSU1NRUampqOv/DltPSMWURSbjLLruMrVu3Ulxc3NzWv39/5syZw7Zt21iyZAnPPPMMlZWVVFdXU1VVxS9/+UtKSkq48sorefPNN9m7d+8pj3+Gw2FqampISUkhLS0Nv99PVlYWtbW1hMNhFixYQCAQ4NZbb+WOO+5ovu42OTmZ+fPns3v3bm655RZ+97vfcfjw4ebXveGGG1i1ahWPPPIIn332GWvXriUajbJ69WoWLlxIcXEx8+bNo6qqiqqqKmpra8nPz8fn8+H3+wkEAs1T7hs3biQpKYnNmzfj8/kAmDhxIp988gnbt28nEonw/vvvU1lZydChQ9m9ezcPPvggs2bNoqSkhHXr1nHs2LFWfTBy5Ei8Xi/GmOZrqPv160dKSgqpqanNl2/5fD4cx8EYw5133sn48eNpbGzU6lwJoJGyiCRcamoqZWVl+Hw+du/eDYDH46GwsJDRo0dTUVHB888/z+rVq5k9ezYZGRn86le/YsqUKc2v4fV6T3lM2efzEQwGaWhoIBQKEQgEOHbsGAMHDsTr9ZKens7SpUtpampi/fr13HXXXbz11ls4jsPo0aP5xS9+wdGjR3nxxRd59dVXufnmmwEIBoNkZmaSl5fHRRddxAcffEBRURH3338/y5cvZ+zYsdTU1LBkyRI8Hg9+v5+qqqp2aywuLmbu3LncdtttlJWVUVJSwrhx45pH2KWlpfztb38jJyeHgQMHUlZWRjQaZeXKlWRnZ/Ozn/3spJO6WvaH4zikpKRQW1tLNBolEolQX1+PtZaMjAxSUlI4ePAgRUVFAHzxxRcUFBR0+DOVjtFIWURcITk5uVWIVFdX8/jjj7Njxw4ikQihUAhrLenp6QwfPpw1a9bw5ZdfUl1dze9//3s+/vjj5lGitbbVj9frJT8/n6qqKt577z02btzI9u3bGTZsGMFgkKeeeooNGzbQ1NREXV1d83KRdXV1rFy5snmkGgqFTvqSjWg0Snl5OZs2bWLAgAE0NjYSiUTIzs6mqamJl156ibq6OtLT0xk6dChvvPEGe/bsYfPmzbz//vvNU8RJSUlkZGRw++2385vf/IaKigqSk5MZMGAA7777LpMnT6a2tpaamhpyc3MJhUIkJycTDAZZs2YNe/fuPe0XgHg8HgoKCgiHw5SVlTXvH2J/tEycOJG1a9eyb98+XnjhBfx+f59ajc0tFMoi0q1aTq8aY/B4PK22O46Dx+MhGAwyfvx4HnjgARYtWkQ0GuXKK6/EGMPChQvJzs7m1ltv5cYbbyQUClFYWIgxBmMMDz/8MJMnT2by5MlMnz6dJ554ggkTJjB9+nQeffRR7rnnHi677DImTZqE4zjMnDmTl19+mauuuoqnn36aZcuWAeD3+5k6dSoPPfQQCxYsoKqqilmzZgGxkFu+fDnTpk1jwYIF5OXlMX/+fIYOHcrixYtZvHgxpaWleDwecnNzSU9PZ/bs2WRmZvKTn/yEFStW4PV6SU1NxXGc5unjb3zjG1x++eUsW7YMx3EoKipi2LBhpKSkMH78eHJzc0lLS2PatGns3LmT0tJS1q1bR05OTvO094n62o6c8/LyWLBgAY899hgrVqygX79+zc+ZO3cu2dnZ3HTTTbzyyivcd999BAIBXafczYybr0G7+OKL7YcffpjoMkTkHO3fv5/S0lKWLl3a7mirO9Zo7osStfb18uXLmTRpEjfffDPBYLBL998bGGM+stZe3N42jZRFpNspkLuG1r7u+RTKIiJ9hJtnRiVGoSwi0kdolOt+CmURERGXUCiLiHSyRE0Td8d+NQXetbR4iIh0iYaGBrZs2UJdXV2iS5EuFI1GOXjwYKLL6DUUyiLS6fx+P2PGjGHfvn1UVFQkupxuU15eTnV1NSNHjuwzx2+ttSQnJ5Ofn3/SNedy7hTKItLp0tLSuPPOO2lsbEx0Kd3qtddeY8OGDSxevPi0XyPZGxUUFLRavEQ6RqEsIp0uKSmJiy66KNFldLuNGzeydetWLrnkEo0apUP61p9yIiIiLqZQFhERcQmFsoiIiEsolEVERFxCoSwiIuISCmURERGXUCiLiIi4hEJZRETEJRTKIiIiLqFQFhERcQmFsoiIiEsolEVERFxCoSwiIuISCmURERGXUCiLiIi4hEJZRETEJRTKIiIiLqFQFhERcQmFsoiIiEsolEVERFxCoSwiIuISCmURERGXUCiLiIi4hEJZRETEJbyJLkBEpKeKRCJEIhGstQCEw2Gi0SgNDQ04TmzM4zgOfr8/kWVKD6JQFhHpoLfffpvnnnuOw4cPY62loqKC6upqrrvuOiAWyIWFhTz00EMJrlR6CoWyiEgHZWVlsXnzZjZt2tSqfdeuXQAEAgGmTJmSiNKkh9IxZRGRDpowYQIjRozA5/O1uz0YDHL99dd3c1XSkymURUQ6yOfzUVxcTHZ29knbHMfhwgsvZNy4cQmoTHoqhbKIyHkoLi4mJycHY0yr9qSkJH70ox81n/Alcjb0v0VE5DyMGDGCwsJCAoFAq/aUlBSuueaaBFUlPZVCWUTkPDiOw7XXXkv//v2b27xeLzNmzCA/Pz+BlUlPpFAWETlPM2bMIDs7u9UU9qJFixJYkfRUCmURkfOUnZ3NjBkzCAaDAAwaNIhp06YluCrpiRTKIiLnyRjDnDlz8Pv9+Hw+5syZQ2pqaqLLkh5IoSwi0gmmTJnCoEGDiEaj3HDDDYkuR3ooreglIr1OKBTipz9dwvoN73Xrfvfu3QvA/B98H6+3+369+nxJPPXkUxQVTe62fUrXUCiLSK9jreXzA7sZP9PP10sKum2/Rw4NZufHFXz9qgIwZ358Z6iqqOeFFZs4fvx49+xQupRCWUR6JceB4WMy+XrxkG7bp7XQeN0I/IHu+9V66PNa3nxuG7bb9ihdSaEsIr2WccDxdOWpM5a2Q+KAJ7YOdtsVvk73nPNhHNNdg3LpBgplEZF2nPiO5KZIFIiFn+Nx2gTgyXF46jA+9XNETtDZ1yIip7B/VzUzMx7n23lPsvLud9vEqSaMpfNppCwi0sKJyWVjDENGpvPXg//EX579lAN7ato8svNHvJ07sS09kUJZRKSF04Viy9Bsaopy7GiIumONGMeQlhUgOcWHMYZwYxOV5XWEQ00kBbxk5CTj9TrU14WpPtKAjVpS0/ykpCXh8TjNr6xAFoWyiMhZahma+/5exZ8e38yOTeV4vA7TvncB1/5jIYGgly3rD7Lq/g001kcYNDyN626ewAWFWZS9tIPVf9iGx+swtXgIJT8YzYD8VDQ+lhMUyiIi56gpEuWtP24HLPc8WUzV4QYe/PF/M3n6YEZNyOb1p7dQ8LV0lvx6Ggf21vD534+SOTDIx2v3UzL/Qi67dgQ7N1fQcDyc6LciLqMTvUREzlF9bZgvd1czdspA8oenMeHSPHIL+rNjUwXRqGX05AEYY/jonf1UVdQz8Zv5BIJehozKoLK8jm0fHSIjJ8iAwWdeH1unk/UtCmURkTMwBmz0q3iMRi3WgsfjNF8C5fU5RMKxy6e+t2gcE781iF1bDrP29c94bdUW0nOSuWrehWTlprD5vQP85dmt7Nh0+Mz77pq3JC6lUBYROQ2PxyHYL4mjFfWE6iMAJKf6yMpNYfenlRw5eJy9249y6PNjFFyYgXEMm979knFTc/n+0iJGjM/inT/vou5YmPL9tXzzO8Mo+cFojlbUs3f70QS/O3EbHVMWETkNX5LD8DGZrPnzTh6+bQ2Tp+czc84opn1vOP/1x7/z5H3riYSjFE3L54JxWRgDtdUhXv73T2ioD9NQG+GK60YSbYqyZ1sla17dSV1tmOQUHwWjMxL99sRlFMoiIu2KXabkeByGj81k4R0XU/FlLXkF/TGOYdzUXFL6+zn0+TE8XocRhVmkpvvBGC4pKSBzYAo1lQ2kpiUxamIOqWl+Lr16GHnD+hMONZE7tB9DRqWfVQ3SdyiURUTa9VUYBoI+xl+a12qrx+PwtUk5fG1STpvnWfqlB7jo8sEnvWLu0H7kDu13zjUomvsOHVMWEekkx6pC/HzeXwnVh5vXzj5e08hvf76O3VuPnPH5J59pHWtRIPcdCmURkbNw5kuTLK+t2kL+iDS8SZ7mVl+Sw7DRGfz5iS1nfIV2vt7i3IqUHk+hLCJ9RNtYPbcrgM8Uj+HGKH95Zisl80fjOKb5Uimf38PYqbl8tvUIB/edWD9bVx9L+xTKItJHtI3Vzh2F7vjkMI7HkH9B/5P2k9I/ibyh/dj6waE2+z77cLbt3JLeR6EsItJhXwXknk8rGTQsrdUoGWILj/iSPGTmBtm/q5rWoXr2fxiYdm5J76NQFhE5K7adMepXAXm8ppFAijeWwm04DiT5vdTXhlGoyukolEVEzkrbr1ZsHdGpaf5Y6NqTozsatTTWRwj283VphdLzKZRFRFo4+yO2rSN6+NhMDuypoSlimy+HglhGh0NRDh88zpCR6Z20b+mtFMoiIi2cbnL5dKE5ojALx2vYt6PtetaWY1Uhyr+oZczFA0/7yprYFoWyiMhZON2qWpbYyVyzbipk9R+2EW36arTc2NDEpnVfcmHRAAYOOdVqXopjiVEoi4icwZmWuTyx7dsLx1B/PNz8FY4ATZEoNUcamLV4XFeWKL2E1r4WETmNc1l3OrV/Enf/dmarZwX7JfHDuy7ulv1Lz6eRsojIaXQsEDsvRhXIfYtCWURExCUUyiIiIi6hUBYRaVdHrhpub9Wvjr7W+TxPeiqd6CUivZKNWo4crGfPtspEl9Kljhw4TkNdONFlSCdRKItIr+Tg542nP+adV3YnupQu1RSJUn04jMfxnPnB4noKZRHpdfx+P08+uYqG+vpEl9I9jCEvNy/RVUgnUCiLSK9jFFLSQ+lELxEREZdQKIuIiLiEse1896dbGGMqgOPA4UTX0gdlo35PFPV9YqjfE6Mv9nuBtTanvQ2uDmUAY8yH1tqOLxwrHaJ+Txz1fWKo3xND/d6apq9FRERcQqEsIiLiEj0hlJ9IdAF9lPo9cdT3iaF+Twz1ewuuP6YsIiLSV/SEkbKIiEif4NpQNsZcbYzZbozZaYy5O9H19DbGmKeNMeXGmC0t2jKNMW8ZY3bE/82ItxtjzIr4Z/GJMWZy4irv2YwxQ4wx7xhjthpj/s8YszTerr7vQsaYgDHmfWPMpni/3xdvH26M2RDv3xeMMUnxdn/8/s749mEJfQM9nDHGY4zZaIz5z/h99fspuDKUjTEeYCXwbWAsMN8YMzaxVfU6zwBXt2m7Gyiz1o4CyuL3IfY5jIr//Bj4bTfV2BtFgH+21o4FLgFuif/fVt93rRBwhbV2IjAJuNoYcwnwa+ARa+1I4ChwU/zxNwFH4+2PxB8nHbcU+LTFffX7KbgylIGpwE5r7WfW2kbgj8C1Ca6pV7HW/g/Q9jvtrgWejd9+FviHFu3/YWPWA+nGGC0s3AHW2gPW2v+N3z5G7BdVPur7LhXvv9r4XV/8xwJXAC/H29v2+4nP42VgpjHGdE+1vYsxZjBwDfBU/L5B/X5Kbg3lfODzFvf3x9ukaw201h6I3z4IDIzf1ufRBeJTc0XABtT3XS4+hfoxUA68BewCqqy1kfhDWvZtc7/Ht1cDWd1acO/xKHAnEI3fz0L9fkpuDWVJMBs7LV+n5ncRY0wq8Apwm7W2puU29X3XsNY2WWsnAYOJzcaNTmxFvZ8x5rtAubX2o0TX0lO4NZS/AIa0uD843iZd69CJqdH4v+Xxdn0encgY4yMWyH+w1v4p3qy+7ybW2irgHeBSYocDTnyFbcu+be73+PY04Ej3VtorfBOYZYzZQ+ww5BXAv6F+PyW3hvIHwKj4GXpJwDzg9QTX1Be8DtwYv30j8FqL9h/GzwS+BKhuMdUq5yB+fGwV8Km19uEWm9T3XcgYk2OMSY/fTgauJHY8/x3g+vjD2vb7ic/jeuBtq0Udzpm19l+stYOttcOI/R5/21q7APX7Kbl28RBjzHeIHYvwAE9ba5cltqLexRjzPHA5sW9oOQTcC7wKvAgMBfYCc621lfEgeYzY2dp1wCJr7YcJKLvHM8Z8C1gLbOarY2z3EDuurL7vIsaYCcROIPIQG4y8aK39V2PMBcRGcJnARmChtTZkjAkAzxE75l8JzLPWfpaY6nsHY8zlwO3W2u+q30/NtaEsIiLS17h1+lpERKTPUSiLiIi4hEJZRETEJRTKIiIiLqFQFhERcQmFsoiIiEsolEVERFxCoSwiIuIS/w+M23XqBod+8AAAAABJRU5ErkJggg==\n", - "text/plain": [ - "
" - ] - }, - "metadata": { - "needs_background": "light" + "image/svg+xml": "\n\n\n\n\n\n%3\n\n\n\n139993376892384\n\nloss\n ()\n\n\n\n139993376862752\n\nMseLossBackward0\n\n\n\n139993376862752->139993376892384\n\n\n\n\n\n139993376862800\n\nAddBackward0\n\n\n\n139993376862800->139993376862752\n\n\n\n\n\n139993376862896\n\nAddmmBackward0\n\n\n\n139993376862896->139993376862800\n\n\n\n\n\n139993377217840\n\nAddBackward0\n step1.fc.bias\n (1)\n\n\n\n139993377217840->139993376862896\n\n\n\n\n\n139993376863136\n\nAccumulateGrad\n\n\n\n139993376863136->139993377217840\n\n\n\n\n\n139993376863664\n\nAddmmBackward0\n\n\n\n139993376863136->139993376863664\n\n\n\n\n\n139993376891904\n\nstep0.fc.bias\n (1)\n\n\n\n139993376891904->139993376863136\n\n\n\n\n\n139993376863088\n\nMulBackward0\n\n\n\n139993376863088->139993377217840\n\n\n\n\n\n139993376863184\n\nViewBackward0\n\n\n\n139993376863184->139993376863088\n\n\n\n\n\n139993376863376\n\nSumBackward1\n\n\n\n139993376863376->139993376863184\n\n\n\n\n\n139993376863472\n\nMseLossBackwardBackward0\n\n\n\n139993376863472->139993376863376\n\n\n\n\n\n139993376864000\n\nTBackward0\n\n\n\n139993376863472->139993376864000\n\n\n\n\n\n139993376863568\n\nAddBackward0\n\n\n\n139993376863568->139993376863472\n\n\n\n\n\n139993376863664->139993376863568\n\n\n\n\n\n139993376863760\n\nTBackward0\n\n\n\n139993376863760->139993376863664\n\n\n\n\n\n139993376863856\n\nAccumulateGrad\n\n\n\n139993376863856->139993376863760\n\n\n\n\n\n139993377218464\n\nAddBackward0\n step1.fc.weight\n (1, 5)\n\n\n\n139993376863856->139993377218464\n\n\n\n\n\n139993376891664\n\nstep0.fc.weight\n (1, 5)\n\n\n\n139993376891664->139993376863856\n\n\n\n\n\n139993376862848\n\nAccumulateGrad\n\n\n\n139993376862848->139993376862800\n\n\n\n\n\n139993376862848->139993376863568\n\n\n\n\n\n139996637619600\n\nmeta_param\n ()\n\n\n\n139996637619600->139993376862848\n\n\n\n\n\n139993376863040\n\nTBackward0\n\n\n\n139993376863040->139993376862896\n\n\n\n\n\n139993377218464->139993376863040\n\n\n\n\n\n139993376863424\n\nMulBackward0\n\n\n\n139993376863424->139993377218464\n\n\n\n\n\n139993376863616\n\nTBackward0\n\n\n\n139993376863616->139993376863424\n\n\n\n\n\n139993376863808\n\nTBackward0\n\n\n\n139993376863808->139993376863616\n\n\n\n\n\n139993376863904\n\nMmBackward0\n\n\n\n139993376863904->139993376863808\n\n\n\n\n\n139993376864000->139993376863904\n\n\n\n\n\n" }, + "metadata": {}, "output_type": "display_data" } ], "source": [ - "from matplotlib import pyplot as plt\n", - "from matplotlib import image as imgplt\n", - "\n", "class MetaNet(nn.Module):\n", " def __init__(self, dim):\n", " super().__init__()\n", - " self.fc = nn.Linear(dim, 1)\n", + " self.fc = nn.Linear(dim, 1, bias=True)\n", "\n", " def forward(self, x, meta_param):\n", " return self.fc(x) + meta_param\n", "\n", + "\n", "dim = 5\n", "batch_size = 2\n", - "net = MetaNet(dim).cuda()\n", + "net = MetaNet(dim)\n", + "\n", + "xs = torch.ones((batch_size, dim))\n", + "ys = torch.ones((batch_size, 1))\n", + "\n", "optimizer = torchopt.MetaSGD(net, lr=1e-3)\n", "meta_param = torch.tensor(1., requires_grad=True)\n", "\n", - "xs = torch.ones(batch_size, dim).cuda()\n", + "# Set enable_visual\n", + "net_state_0 = torchopt.extract_state_dict(net, enable_visual=True, visual_prefix='step0.')\n", "\n", "pred = net(xs, meta_param)\n", - "loss = F.mse_loss(pred, torch.ones_like(pred))\n", - "# set enable_visual\n", - "net_state_0 = torchopt.extract_state_dict(\n", - " net, enable_visual=True, visual_prefix='step0.')\n", + "loss = F.mse_loss(pred, ys)\n", "optimizer.step(loss)\n", - "# set enable_visual\n", - "net_state_1 = torchopt.extract_state_dict(\n", - " net, enable_visual=True, visual_prefix='step1.')\n", + "\n", + "# Set enable_visual\n", + "net_state_1 = torchopt.extract_state_dict(net, enable_visual=True, visual_prefix='step1.')\n", "\n", "pred = net(xs, meta_param)\n", "loss = F.mse_loss(pred, torch.ones_like(pred))\n", - "# draw computation graph\n", - "torchopt.visual.make_dot(loss,\n", - " [net_state_0, net_state_1,\n", - " {\"meta_param\": meta_param, 'loss': loss}]\n", - " ).render(\"meta_graph\", format=\"png\")\n", - "plt.figure(figsize=(20,20))\n", - "plt.imshow(imgplt.imread('meta_graph.png'))" + "\n", + "# Draw computation graph\n", + "display(torchopt.visual.make_dot(loss, [net_state_0, net_state_1, {'meta_param': meta_param, 'loss': loss}]))" ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] } ], "metadata": { - "interpreter": { - "hash": "238ad0feaa04228775e5e27229169b0e3e76c0e018d5a6d65c4906ccad5c5a9e" - }, "kernelspec": { - "display_name": "OpTorch", + "display_name": "Python 3.8.13 ('torchopt')", "language": "python", - "name": "optorch" + "name": "python3" }, "language_info": { "codemirror_mode": { @@ -370,7 +192,12 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.9.7" + "version": "3.8.13" + }, + "vscode": { + "interpreter": { + "hash": "2a8cc1ff2cbc47027bf9993941710d9ab9175f14080903d9c7c432ee63d681da" + } } }, "nbformat": 4, diff --git a/tutorials/3_Meta_Optimizer.ipynb b/tutorials/3_Meta_Optimizer.ipynb index a846c81c..76d43132 100644 --- a/tutorials/3_Meta_Optimizer.ipynb +++ b/tutorials/3_Meta_Optimizer.ipynb @@ -4,42 +4,41 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "# torchopt as Meta-Optimizer" + "# TorchOpt as Meta-Optimizer" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "In this tutorial, we will show how to treat TorchOpt as a differentiable optimizor with traditional PyTorch optimization API. In addition, we also provide many other API for easy meta-learning algorithm implementations." + "In this tutorial, we will show how to treat TorchOpt as a differentiable optimizer with traditional PyTorch optimization API. In addition, we also provide many other API for easy meta-learning algorithm implementations." ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "## 1. Basic API for differentiable optimizer\n", + "## 1. Basic API for Differentiable Optimizer\n", "\n", - "`MetaOptimizer` is the main class for our differnetiabl optimzier. Combined with the functional optimizer `torchopt.sgd` and `torchopt.adam` mentioned in the tutorial 1, we can define our high-level API `torchopt.MetaSGD` and `torchopt.MetaAdam`. We will discuss how this combination happens with `torchopt.chain` in Section 3. Let us consider the problem below." + "`MetaOptimizer` is the main class for our differentiable optimizer. Combined with the functional optimizer `torchopt.sgd` and `torchopt.adam` mentioned in the tutorial 1, we can define our high-level API `torchopt.MetaSGD` and `torchopt.MetaAdam`. We will discuss how this combination happens with `torchopt.chain` in Section 3. Let us consider the problem below." ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "Assume a tensor `x` is a meta parameter and `a` is a normal parameters (such as network parameters). We have inner loss li = `a0` * x^2 and we update `a` use the gradient dl/d`a0` = x^2 and `a1` = `a0` - dl/d`a0` = `a0` - x^2. Then we compute the outer loss lo = `a1` * x^2. So the gradient of outer loss to x would be:\n", + "Assume a tensor $x$ is a meta parameter and $a$ is a normal parameters (such as network parameters). We have inner loss $\\mathcal{L}^{\\textrm{in}} = a_0 \\cdot x^2$ and we update $a$ use the gradient $\\frac{\\partial \\mathcal{L}^{\\textrm{in}}}{\\partial a_0} = x^2$ and $a_1 = a_0 - \\eta \\, \\frac{\\partial \\mathcal{L}^{\\textrm{in}}}{\\partial a_0} = a_0 - \\eta \\, x^2$. Then we compute the outer loss $\\mathcal{L}^{\\textrm{out}} = a_1 \\cdot x^2$. So the gradient of outer loss to $x$ would be:\n", "\n", - "dlo/dx\n", - "\n", - "= da1/dx * x^2 + a1 * d(x^2)/dx\n", - "\n", - "= d(a0 - x^2)/dx * x^2 + 2 * a1 * x\n", - "\n", - "= -2 * x * x^2 + 2 * (a0 - x^2) * x\n", - "\n", - "= -2 * x^3 + 2 * a0 * x - 2 * x^3\n", - "\n", - "= -4 * x^3 + 2 * a0 * x" + "$$\n", + "\\begin{split}\n", + " \\frac{\\partial \\mathcal{L}^{\\textrm{out}}}{\\partial x}\n", + " & = \\frac{\\partial (a_1 \\cdot x^2)}{\\partial x} \\\\\n", + " & = \\frac{\\partial a_1}{\\partial x} \\cdot x^2 + a_1 \\cdot \\frac{\\partial (x^2)}{\\partial x} \\\\\n", + " & = \\frac{\\partial (a_0 - \\eta \\, x^2)}{\\partial x} \\cdot x^2 + (a_0 - \\eta \\, x^2) \\cdot 2 x \\\\\n", + " & = (- \\eta \\cdot 2 x) \\cdot x^2 + (a_0 - \\eta \\, x^2) \\cdot 2 x \\\\\n", + " & = - 4 \\, \\eta \\, x^3 + 2 \\, a_0 \\, x\n", + "\\end{split}\n", + "$$" ] }, { @@ -55,23 +54,29 @@ "metadata": {}, "outputs": [], "source": [ + "from IPython.display import display\n", + "\n", "import torch\n", "import torch.nn as nn\n", + "import torch.nn.functional as F\n", + "\n", + "import torchopt\n", + "\n", "\n", "class Net(nn.Module):\n", " def __init__(self):\n", " super().__init__()\n", - " self.a = nn.Parameter(torch.tensor(1., requires_grad=True))\n", + " self.a = nn.Parameter(torch.tensor(1.), requires_grad=True)\n", " \n", " def forward(self, x):\n", - " return self.a * x ** 2" + " return self.a * (x ** 2)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "Then we declear network and x. Do not forget to set flag `requires_grad=True` for x." + "Then we declare the network (parameterized by `a`) and the meta parameter `x`. Do not forget to set flag `requires_grad=True` for `x`." ] }, { @@ -81,14 +86,14 @@ "outputs": [], "source": [ "net = Net()\n", - "x = torch.tensor(2., requires_grad=True)" + "x = nn.Parameter(torch.tensor(2.), requires_grad=True)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "Next we declear meta optimizer. The meta optimizer takes as input the network and use method `step` to update the network." + "Next we declare the meta optimizer. The meta optimizer takes as input the network and use method `step` to update the network (parameterized by `a`)." ] }, { @@ -100,33 +105,39 @@ "name": "stdout", "output_type": "stream", "text": [ - "tensor(-28.)\n" + "x.grad = tensor(-28.)\n" ] } ], "source": [ - "import torchopt\n", - "\n", "optim = torchopt.MetaSGD(net, lr=1.)\n", + "\n", "inner_loss = net(x)\n", "optim.step(inner_loss)\n", + "\n", "outer_loss = net(x)\n", "outer_loss.backward()\n", - "# x.grad should be:\n", - "# = -4 * x^3 + 2 * a0 * x \n", - "# = -4 * 2^3 + 2 * 1 * 2 \n", - "# = -32 + 4 \n", - "# = -28\n", - "print(x.grad)" + "# x.grad = - 4 * lr * x^3 + 2 * a_0 * x\n", + "# = - 4 * 1 * 2^3 + 2 * 1 * 2\n", + "# = -32 + 4\n", + "# = -28\n", + "print(f'x.grad = {x.grad!r}')" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "### 1.1 Track the gradient of moment\n", - "Note that most modern optimizers involve moment term in the gradient update (basically only SGD with momentum = 0 does not involve). We provide an option for user to choose whether to also track the meta-gradient through moment term. The default option is `moment_requires_grad=True`.\n", - "- When you do not track the meta-gradient through moment" + "### 1.1 Track the Gradient of Momentum\n", + "\n", + "Note that most modern optimizers involve moment term in the gradient update (basically only SGD with `momentum = 0` does not involve). We provide an option for user to choose whether to also track the meta-gradient through moment term. The default option is `moment_requires_grad=True`." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "- When you do not track the meta-gradient through moment (`moment_requires_grad=False`)" ] }, { @@ -135,55 +146,41 @@ "metadata": {}, "outputs": [ { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 4, - "metadata": {}, - "output_type": "execute_result" + "name": "stdout", + "output_type": "stream", + "text": [ + "\n" + ] }, { "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAUIAAANSCAYAAAAQ5McuAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/YYfK9AAAACXBIWXMAAAsTAAALEwEAmpwYAADsPklEQVR4nOzdd3wUZf7A8c9szW6y6b2RkISQ0HsXDCJNEAULimCX07O3s53o2fWsZxfPE2wgFqRI7xA6CSUJpJEQEtJ7Ntvm9wfu/oigBkiym+R5v168IMPOzDezs9995qmSLMsIgiB0ZgpnByAIguBsIhEKgtDpiUQoCEKnJxKhIAidnkiEgiB0eiIRCoLQ6bV5IpQkaYIkSRmSJGVKkvSPtj6/IAjC70lt2Y9QkiQlcBQYB5wAdgMzZVk+0mZBCIIg/E5blwgHA5myLGfLsmwCvgWubOMYBEEQmlC18fnCgPwzfj4BDPmzHfz9/eWoqKjWjEkQhE4gNzeX0tJS6Vz/19aJ8FxBnPVsLknSncCdAJGRkezZs6e142px7XHooiSd8x4RhA5h4MCBf/h/bZ0ITwARZ/wcDpz8/YtkWf4E+ARg4MCB7S6jyLJMeXk5x48fd3Yozebm5kZiYqKzwxAEp2jrRLgbiJMkKRooAK4HbmjjGNrEL7/8wmvvfUCXhB7ODuUvmY1GKvOy2LNrl7NDEQSnaNNEKMuyRZKkvwOrACXwuSzLh9syhraiVKoYPHY8E2ff7uxQ/lJNeRn/e/YRZ4chCE7T1iVCZFleAaxo6/M6g0KpRK3WODuMv6RUqZ0dgiA4lRhZIghCpycSYUfR7pqUBMF1iETYDsiyTH7mUY6m7KOhrpYDWzdSU1nR9EV/1vNFJElB+FNtXkcoXBg3nZ7NvyzhRNYx6murSRw4tPk7i+6BgvCnRImwHZAkCf/QMCJi4ln3/dcMGTsRtVbr7LAEocMQidAl/PmzqyzLVJSc4kTWUYaOv4J9m9djMZvb5egVQXBFIhG6hD9/dpVlmZwjh+jefxBjp1+PQqGg9GRBG8UmCB2fqCNsBxQKBQPGXOb4edx1s5wYjSB0PKJEKAhCpycSYTsiyzIF2ZmUFp50/GzvUiMIwoUTibAdsdmsrFm0EIVS6dhWWljA/s0bRMOJIFwEkQjbWrPzlfy7vyHrUAp6gyc+AYHA6W41fUeOYdf6VVjM5hYNUxA6E5EI21qzOzdLv/sbCrIzCQqPbDKBqpveHZVaTWVpSYuFKAidjUiE7YjFYkGpPLuhX6FQYLNazrGHeFwWhOYQibAdCY7oQmlR0/6DZlMjjQ0NePn5n2MPMbZOEJpDJMJ2JLZXX4pP5GNsqAdOtxpnHUwhtmcftDq9k6MThPZLJMJ2xE3vzsjJ0yg+kefYVllWwugrZ4iFlwThIoiRJe2IJEkkDhra5OfhE6Y4MSJB6BhEibCjEO0ignDBRCLsKMTErIJwwcSjcSuRkamprKAoL9fZofyl2qpKbFars8MQBKcRibCVREZEoK1Zxfb/vd+ix929ezfR0dH4+5+ru8yFGzV8WIseTxDaE5EIW8moUaMYNqzlk8t9993HjTfeyJAhQ1r0uKLVWejMRCJsBZIkoVQqUZ4xOUJLkGUZpVKJWq1GK6bqF4QWIxpLBEHo9EQiFASh0xOJUBCETk8kQkEQOj2RCAVB6PREIhQEodMTiVAQhE5PJEJBEDo9kQgFQej0RCIUBKHTE4lQEIROTyRCQRA6PZEIBUHo9EQiFASh0xOJUBCETk8kQkEQOj2RCAVB6PREIhQEodMTiVAQhE5PJEJBEDo9kQgFQej0xCp27UBOTg4nT55ElmWKi4s5ePAgVqsVDw8PunXrhl6vd3aIgtCuiUTYDqxdu5Z58+ZhsVior69nw4YNqFQqhg8fzkcffSQSoSBcJPFo3A5ceumlVFdXU1xcTG1tLeXl5ZSXlxMbG0tAQICzwxOEdk8kwnYgPDyckSNHIkmSY5u7uzszZsxAoRBvoSBcLPFo3A5oNBquuuoq1qxZg9VqBSAqKorevXs7OTLBFTU2NnLkyBGqqqqcHcoFi4+PJyQkpM3OJxJhOyBJEoMGDSIuLo709HSUSiWzZ89Gq9U6OzTBBZWWlvKvF1+iTqFF5+7u7HDOW97RNO69/RZuueWWNjunSITtgCRJxMTE0K9fP9LT0/H29mb48OFNHpUFwU6WZbz8/Jkw8zb8Q8KcHc55W/Hlp9DG97aoYGonDAYDY8eOxWAwMGbMGGJjY0UiFFqX7OwA2o4oEZ7BZrNhsVicHcYfSkpKIiAggAEDBuDh4YHJZHJ2SOekUChQKpUiUbsCGbjQt+Gv9ruYY7sYkQjPsGXLFp555hlUKte8LLIsU19fz3fffceaNWucHc45WSwWBg0axGuvvYZSqXR2OMI5E9W5M9iqb74kcfBQImK6NfvYddVVrF60kKyDKQyfOJUhl01A6aKfnz/T/iJuRVVVVSQmJnLjjTe6bDIsLS3F19fXZbvNpKamsn//fmS5Ez1XtTPG+noy9u+htKiQLt26E9ktgWOp+9i3eR3lJUUk9B9Cr6EjMJsaydi/h5rKCrr26E1Il2iyDqVgMhqpKCmmW98BFGRnUltZyXX3PszKr74gunsiIVFdzzpnaWEBh3btQKlU0jWxFyFRXV3qHnbNT7sTabVaPDw8UKvVzg7lnAwGg7ND+FN6vd6lbnDhbEdT9pG6fQtde/RCrdUiSRJaNx0arRad3h03vR6bzcbu9aspP1WEzsPAmkVfMfO+R9mxejlWsxmfwGDyFqfjFxxKaFQ0fsGheHh5U1tVec5zKpQq3HQ6aior2LL8JybeeAtevn5t+4v/CZEIXVhNTQ1paWkEBQURGRnp9Do3+1hnNzc3vLy8nBqLcOHcDV4YG+opKyokKDIKpUpJbK++hMXE0XPoSGJ79qG+tobDu5M5mZN1uguOJGGTbQAMvXwy4TFxfPavp/D2D6C2uo41i75C4tztK7IsU1ZYwN5N66goKcbcaOTSq651qUQovrrP086dO1m+fDlms7nVz1VXV8fq1atZtmwZNpvtT1+blpZGWlranz6Sms1mdu7cyQcffMCWLVvOu7FFlmXWr1/P4cOHz2s/wbV07dGLGX+7H41Ox/YVP2MyGgFQqtTUVVchyzIqtRof/wCm3HwXj//nc5744AvcdKfHtBfkZFJZWoxSrcY3KARjbS2Dki6nrqYad8/TX5DFJ/JZ9c3/ALDZrGQeSqFb7/7MeugJfAODwcWqTkSJ8DzYbDaWLl2Ku7s7ffr0ITw8HFmWqaqqor6+HrVajZ+fHw0NDVRXVyNJEn5+flgsFmw2GzqdjtraWnQ6HfX19TQ0NKDVarFYLPj6+mI0GlEoFGg0Gurq6ggICGDChAkcOnTIEYPFYqGkpARZlvHw8MDDw4P6+nr27t0LgK+vL76+viiVSmpqaqivr0ev12MwGDhy5AirVq1i4sSJ1NXVUV1djbu7O/X19ZhMJtzc3PD09KSkpMQRr5eXF5IkOX7HtvgCEFpX6vbNrPz6C1QqNaOmXIVa6wZAr6Ej+fGT90jbs5Or77yXEZOuZPV3C1j93ZcEhkUw5/F/AnA8I419m9ZzydTpdO83kNy0Qyx4/V8MuWwiQWERANRUlnNkTzLjZ85BoVAS27MPy778jGMHD+DtH+hyDSquFY2LO3nyJBqNhujoaPLz8wkLCyMnJ4dvvvkGhUJBREQE48eP5/vvv6eiogIfHx9mzJhBSkoKtbW1jBkzhm+++YZx48axbNky8vPz0Wg0WCwW7r33Xvbs2YO3tze9e/fmm2++4YYbbjgrhurqar777juMRiN6vZ5rr72WgoICtmzZAkBDQwNTp07FZDLxww8/YDQacXNz46qrriI1NZV+/foxcOBAx/FWrVrF8uXLCQsLo2fPnowePZrvv/+euro6FAoF11xzDZ6enrz77rtotVoqKyuJjY1ts2sutLzewy+h9/BLztoe33cA//jgC8fPYV1jueWJ58563dgZM4mM6+74+dq/P3zWa7r26M0Db3wAnB4QENu7Hw+88X4LRN86RCJsJlmWOXjwIPHx8fj4+JCTk8PgwYPZunUr/fv3Z9y4cahUKvbt24fVauW+++7Dw8PjD4/n6enJlClT2Lx5M+Hh4dTW1jYrBpVKhYeHB8ePH6e0tJQRI0YwYMAArrzySgAmTpwIwNKlS1m/fj3h4eEUFxczePBgGhsb0el0pKSk8MUXX3D33XcjyzI9e/bklltuQaPRYDKZ8Pb25vjx4xQUFNCvXz98fHzw8fHh9ttv59NPP22ZCyo4zR/VNTenDnrs1dfjExiE1WpBoTjdV9T+7zMbyX5/LGfXb/8VkQibyWQyceTIEY4dO4ZKpcJqtXL11VcDnFUv9/uflUolRqOR+vp6qqurAVCpVLi7u+Pm5oZGo8Fms6FSqaivr6e2tpaamhrH64xGo+OYhw8fpqamhkceeYQFCxY4JmFQKBTU19c7zqnVarnqqquYPn06arUalUpFaWkp+fn5DB48mJiYGKqqqlAoFAQHBztayXNzczl69CgPP/ww33//PRaLpclNLLrFdG6h0THUVVex4sv5jJw8DZ/AIDb9/D2hUTEkDBjs7PAumGgsaaby8nKsVivPP/88b775JgEBARQWFjJq1Cj279/Pq6++ypdffklkZCQqlYq3336b//znP5SUlBAWFsahQ4f48ssvqaqq+sNvx6ioKLZv3863337rKCGGhYVRWlrKRx99REFBAX5+fhQUFPDll19SWVnp2DcyMpI9e/bw8ccfU1xcTGJiIsXFxbzzzjt8/PHHFBUVMWDAAMrKynjrrbcoLy/Hz+//W+3sMXl5eVFfX8/nn39OYWEhANHR0VRUVPDWW29RVFTUSldYaC8KsjOxyTa8AwKRJImeg4ezfeUv7fpLUpQImykgIIC5c+fi4eGBQqHgkUcewc3NDbVazb333utoLPH19WXOnDnU1tYiSRLe3t74+vry0EMPYbPZUKvV6PV6RyksPj4epVKJWq12HPfMR2CFQsG9996LyWTCy8sLpVLJE088gc1mQ6vVOmagiY+P5+GHHz494P631/3tb3+jvr7eEYdarWbu3LnU1dWh1WoxGAyEhIQ0uYEDAwN58sknsVgsjt/Pzc2Nhx56CKPRiFarxc3NzVlvg+AC0vfvoUu3BMejcEBYBDVVFVSVleLt3z4nChaJsJlUKhWenp6On3//7zN/1uv1Z02f7+vr2+Rn+6OoRqP509cBZ/XZO7MkZ6dUKs/abjAYzuqAbW9pttPpdE3+X5Kkc8bw+99R6LyqyktJ8Pr/x2BJktC5e1BbVdluE6F4NBYE4bwYvLypr6luss1YX4fe0H6/KEUiFAThvMT3G8Txo+mOTv6lhQXo3D3wCQh0cmQXTjwa/44sy44/giCcLSK2G+n7dlNVWoJ3QCAHd2xl2PjJLt9F5s+IRHgGrVZLRkYGn3/+uUu9qSaTCYVCcc4ZcSwWC42Njbi7yJTspaWleHt7OzsMoSX8wXyD7p5eTLrpVlTq0/XbIyZdiUrjmpOUNJdIhGfo06cP9913n6NvniuQZZn58+c7ZqX+vYqKCj7++GNuu+02l1naMyQkRMxA4ypaYWJW+2w19mNrf9fg1h6JRHiG4OBgJk+e7OwwmsjJyUGj0XDLLbfg4+Nz1v9bLBZOnDhBbW0ts2fPdtnpwwQnOY+JWVvm2O2T+Np2YbIss23bNvr16/eHXVfsK9qlpKRw8OBBUbcpNEMHymAtRJQIXVhdXR0pKSmMHz/+Dx81JUkiNDSUpKQkvvzyS3r37u2ys2sLbcdkNJKbdpiKkuLWOUErrldSWngSYiNa5+B/QHxiXFhRURFlZWX06NHjTxtvlEolM2bMYNeuXfzwww9Mnz5drBfSien1enomxJOxZwuFrdDoV1dXx9GjGfTu0xdlK9QF+0gy0V3Pnu6/NYlE6MJ27NhBTEwMgYF/3T/Lzc2NuXPn8uKLLzJw4EC6tvGNJLgOX19fnvjHP1rt+JmZmfzrX//i4w/e7zDDLUUdoYsymUz8+uuvTJw4sdmlu7i4OAYPHsx3333nskt9CoIrEonQRe3evRu1Wk2fPn2avY9KpeKaa67h8OHDpKSktGJ0gtCxiETogqxWK2vXrmXChAnn1R9PkiRCQkK45ZZb+OCDD6iqqmrFKAWh4xCJ0AWdOHGC48ePM2LEiPPeV5IkRo8eTUREBN98841YY0QQmkEkQhcjyzJHjx4lICAAPz+/Cxrqp1Qquf7669m2bRt5eXmib6Eg/AWRCF2M1Wpl48aNDBky5Ky5AptLkiTi4+NJSkpi/vz5olQoCH9BJEIXU15eTnp6OkOHDr2oiR/sfQuLiopYt26dKBUKwp8QidCFyLLM2rVr6dGjB8HBwRd9PIPBwN/+9jcWLVpEQUGBSIaC8AdEInQhdXV1rFixgmuuuabFZm/p06cPCQkJLF682DGRpiAITYlE6CJkWebIkSO4ubm16ALqGo2GGTNmkJycTGZmpigVCsI5iEToImw2G8nJyQwZMuSsBZ0uVnR0NLNmzeKtt95yrJcsCML/E4nQRdTW1pKenk7//v1bfMIESZIYP3487u7uLFu2zKUmnhUEVyASoYvIz8+noaGBmJiYVjm+Wq1m5syZrF692rFwuyAIp4lE6CKWLVvGyJEjz1rDuKVIkkTfvn0ZOHAg8+fPx2KxtMp5BKE9EonQBdTU1LBjxw4uu+yyVl00SqVSMXPmTI4dO8bOnTtFw4kg/EYkQhewefNmunbtSkRE68/K6+vry9y5c/n8888pLi4WyVAQEInQ6RobG9m6dStJSUltsoSoJEn079+fkJAQVq5cKRKhICASoVPJskxhYSFlZWX07t27zc6r0+mYNWsWy5cvJy8vDzjdfaehoUG0KAvnJMsyZrMZs9mMxWJBlmUsFgtmsxmr1druv1DFVP1OlpaWhp+fH4GBgW22qLx9Uobp06fzzjvv8Oyzz7Jjxw62bt3K3Llz2+QRXWhfjEYj7777LvX19ZSXl5Oamsorr7yCSqXi0ksvZdSoUW12/7YGkQidSJZlduzYQb9+/dp87QdJkpg4cSIrV65k1qxZHD58mIqKCq666irCw8Pb9U0ttDylUsmyZcvYtm2bo/SXkpKCp6cnsbGx7f5+EY/GTlRVVcXBgwcZMWJEm95IsixTVlbGypUr2b59OytXriQ3N5eamhqOHj3aZnEI7YdKpWLWrFlndfZPSEhg5MiRIhEKF0aWZbZu3UrXrl0JCQlp03M3Njby2WefMXfuXLKyshyTMUiSxJ49e9p9fY/Q8iRJYvDgwU3uVYVCQe/evTtEVYpIhE5isVhYuXIlU6dObbGZZppLo9Fw9dVXc9lllzV5JLdarWzatEnMUiOcRZIkoqOjueSSSxylP51Ox8yZMzvEGtoiETpJVlYW1dXV9OvXr83PrVAoiIuLc5QK/f39Hf+Xm5tLWVlZm8ckuD4vLy9GjBiBu7s7ABEREQwaNMjJUbUMkQidQJZlDh06RLdu3Zy6QLaXlxfz5s3j9ddfd0wEa7PZOHjwoNNiElyXJEkkJSURGBiIQqFgxowZYoF34cIZjUZ27drF4MGDUavVTotDkiQ8PT2ZPXs2CxcupF+/fhiNRrZv3+60mATX1q1bNwYMGICPjw+XXXZZh3gsBtF9xilqa2spKCige/fuZ7W22SdoTUtLa9OWOKvVyqRJk6iurmb9+vV0797dJW9yWZYJCAhg+PDhTv0SaS0mk4l9+/Zx4sQJl22JDQ4OJjAwkLS0NEpLS50dzh8aNWoUgYGBzXqtSIROsGPHDsLCws7Z2maz2Vi+fDlbtmxptSm5/ogsy1x22WWcOnWK9evXu+RjT0VFBSaTib59+7baTD3OVF9fz6effgqcXnPGFZlMJrp160ZaWhrp6enODuecdu3a5Rio0BwiEbYxWZZZs2YNU6ZM+cMSl32R9gkTJrRxdKfZh9m1dIlQluWLLuVkZmaybNmyForI9di7Ll177bWEhYU5OZpzsw+vc+USeWVl5Xn1fhCJsI3l5+eTn5/P8OHDnR3KH2qtR2JXfdQTzo8kSS6dBC+EaCxpQ7Iss337dvr37+/ogtAemEwmUlNTMRqNF7T/H3XQPnHihGPSB8E12Ww2MjIy+PHHH13mvTKZTKSlpVFXV9dixxQlwjbU2NjIvn37GDdu3AUf49SpU7z55psoFAp69erF5MmT8fT0/MvSlizL7N+/n6+//hqdTsfQoUMZO3Zss+oBGxoaWLlyJaGhoRdUb/hHsR06dAiz2UxERAR79+5l+fLlxMXFce2116JSiVvzj2zZsoWjR49y4403tno9riRJaLVa0tLS0Gq1REZG/unrV61aRffu3enSpctZ/2d/pN62bRubN2+mT58+XH755eh0uvOKqb6+nk2bNnHFFVe0WIFC3G1t6NSpU1RXV9OtW7cLPkZdXR0eHh7cfvvtfPvtt2zbto3LL7+cgoICysvLHS16ubm5REVFoVQqqauro7KyksLCQnr06MG4ceP473//S9euXTGZTJjNZnx9fenSpQsKhYKamhry8vIwmUx06dLFkZRkWeb48eP4+PhQUlLiOL7RaKSsrAxvb2+OHj2KzWYjKCiIkJAQGhoaOHHiBI2Njbi5udG1a1fKysooKiqisrISvV5PY2Mj3377LbNmzWLNmjXs2LGDUaNGtdRl71CsVitr165Fr9dz/Phx4uPjsdlsFBQUUFxcjJubm6M3wokTJygpKUGn09G9e3dOnDhBQEAAAKWlpQQEBHDy5Enq6upwc3NDkiR8fHyQZRlvb29OnTqFv78/UVFRJCYmNhkBVVZWRm5uLkqlkvDwcLy9vcnPz2fLli1UVFRgNBqJiYnBZrNx/PhxamtrCQsLo6ioiE2bNnHjjTdSU1NDTU0Nbm5unDhxApPJREVFBREREeh0Oo4dO4YkSYSGhhIYGIjZbCYvL4+ysjLMZnOLXleRCNuILMvk5OTg4eFBQEDARdWXqdVqPDw80Ol0mEwmTpw4wYIFC/D396euro7rr7+eb7/9ljvvvJOAgADS09NJT0/H09OTrVu3cvLkSRQKBV5eXmzfvp26ujpKS0uZNGkSMTEx/PDDD1RUVBAUFIS3tzd+fn7Isszu3bs5ceIEU6ZMYdGiRcyZM4fQ0FAKCgrYuHEjV1xxBRkZGRiNRrZu3cq0adOorKzkrbfeYuTIkXTp0oXg4GA+/vhjgoKCyMnJYcSIERQXF6NWq+nTpw8VFRVkZGSIRPgHCgsL0Wg0xMfHc/z4cbp160ZWVhaLFy92fAnaty1ZsoSQkBCCg4OJj49n8eLFXH311QD8+OOPTJ8+na+++gqbzYZCoaCqqoqkpCQsFguXXHIJS5cuZerUqedstKmqqnI8nu7du5cZM2Zw8uRJSkpKyMnJwdfXl6ioKHbt2sWhQ4dQqVRs3boVLy8vEhMTiYmJcXwGLBYL33zzDSaTibCwMNzd3fHx8SEjI4OGhgZ27tzJNddcQ05ODqtWrXIk3ZYkEmEb2rJlC/379z/vR4FzHef48ePExMQwePBgMjIyiIyM5Nprr+Wrr74iPz+frl27sn37djIyMkhISCA0NJT6+nr69u3L6NGjWb58OadOnaK4uJi0tDSKi4vp3r07ERERZGVlce+99zoSdlVVFVlZWRw8eJB//vOfhISE0KNHD7Zu3Upubi7Dhg3DYDCgVCo5efIkmZmZVFZWMmTIEHQ6HT169GD27Nmo1WqysrJQKpXceuutLFmyBMAxuedPP/2EUqkUC0v9AVmWOXz4MP7+/o5rOXr0aPbu3Uvfvn0ZP368o6Fr165dDBkyhKSkpD8dyx4UFER8fDz5+flkZWXR0NDgaAj5fd2u/WdZljEajWRnZ5Obm4skSUybNo0RI0aQmprKiBEj6N27Nw0NDWzdupXU1FTc3d2pq6tj9OjReHt7c/jwYf79738zd+5cBgwYAMCUKVPo1asXkiSRn59PXl4e2dnZGI1GkpKSyMzMZPDgwfTp04cFCxa06LUVjSVtxP7N2RJTbo0aNYq3336bhx9+mJCQEDQaDXV1dTQ2NlJfX49arSYqKop169bh4+NDamoq/v7+SJKEr68v8fHx6PV6UlNTKSgo4Nlnn2XEiBHYbDZHbL9fCD4sLIwbbriBX375haqqKhISEtiwYQMGg4Hk5GQCAwPJyspClmXmzZtHjx49HCUNg8GAWq121Dc1NDRgNBqprKwEICAgALPZzPDhwyktLXXZbiPOZjKZOHbsGNnZ2ezdu5eUlBQaGhrQarWUl5c3mV1cp9NRVlbWZJtWq6WsrIzS0lLq6+uB09NraTQa1Go1KpUKpVJJTU0NVVVVTcacu7m5UVNT46jnS05OZsCAATz66KMEBwc7kqRKpaKurg5ZllEoFPj4+PC3v/2Njz/+mK+//pqePXuSlZVFeHg4l1xyCQ0NDY79vLy8UCgUyLLMgQMHCAkJ4emnnyY6Oho4/SRUW1uL0Whs0YYSEImwzezbt4+goCDCw8Mv6jhndl2wJ624uDhqamp44YUX0Gg0xMbG4ufnh8FgYPDgwdTU1ODv749SqWTp0qU89dRTGI1GhgwZgtVq5Y033qCqqgqlUombmxvDhw9n/vz5PPXUU+zbtw9JkjAYDAwaNIjIyEg2bNiAl5cXBoOB4cOHU1hYSEREBKGhoRQWFvLOO+9gsVhQKpVIkoRKpXLEGhwcTEhICC+88AInT55EqVTi4eHBwIEDefHFF8nLy3PprkXOVFtbi9ls5r777uOpp54iLi6OgoICRowYQUlJCU899RRvvvkmjY2NjBo1ivz8fJ544gneeecdzGYzAwcO5JtvvmHlypUoFArHe6NUKlEqlY4v0LS0ND7//PMm/T579epFcnIy//nPf6ivrycmJoZ169bx3XffIUlSk9d99dVXfPzxx9hsNkaOHMmWLVt47LHHeO2114iNjUWn0/Gvf/2L/Px8x+dBpVI5Sq6SJNGlSxdSU1P57LPPgNNdunr37k1KSgoff/xxk3O2BMnV554bOHCgvGfPHmeHcVFsNhvPPPMMgwYNYtq0aX/6WqvVyptvvokkSefsUG2z2bBarU2Si/1b2mazOW5qWZaxWq0olUrH6202G2azGUmSUCgUqFQqx34KhcLxx348WZYdN6jFYnEcQ5ZlxyOs/W97PGaz2VEasD+m2Wy2Jg0uVqsVq9Xa5Jw2mw2LxeKI649ucnuH6n//+98dcmRJRUUFjzzyyDk7VNvfF/v1sV8vSZIc1/TML0r7e2vfZl93xJ5ElEql472XZdlx/9irJs78ErPvC5zzWPbXnXmPnRmH/f1Wq9VN3n/7fXHm/QA47gd7HPYv1TPv1z+7T9544w1mz55NUlKSY9vAgQPZs2fPOXcQdYRtoLS0lOPHjzN37tyLPtaZN4vduTq42pOdfR/AkSTPdK6OsZIkodFozvm6M/e3bzvztb/f78zz24+tUqnO6h5zrtiEpn7/Pp95Dc91Tc91T2i12ibbznxv7Nf/XO/h7++Jcx3Lfrzfb1er1WfF/ftYf/+zQqE4Zxyt1ZFbPBq3MlmWycjIIDAwEF9fX2eHI3RArv5U1x6IRNjK7PP7xcfHu+QkBkL7J4YuXjzxaNzKTCYT6enpXHvttc2ekt/ePaG6urqVo2t/amtrO/xSArIsU1tbK97/i2Aymc7r9SIRtrKSkhKKi4tJTExs1je3JEmEhYXx3Xff4QqNRIWFhTQ0NNC1a9fz3re+vp5Dhw5htVrp0aMHnp6eFx2PLMskJiZ22CF4arWabt268eWXXzothtLSUo4dO0afPn3Q6/XN3s8+87qXl9dfDsVrC+dTFSVajVvZN998Q0ZGBs8++2yzEqEsy5hMJhobG9sgur/2/fffU1hYyL333nve+9bV1bFnzx42bNhAQUEB06ZNIykp6aI7lKtUKnQ6XYd8JLQ/DbT0ELLmKigo4I033qBHjx7MnDnzvMby2qeY++mnn3j66acJDQ1txUj/ml6vb/KFKVqNncRisbB69Wrmzp3b7A+tvTXuXC1yzqBUKvH09MRgMJx34vH09OSKK67gsssuc/RNW7VqFTNmzGDw4MH4+/u3+Qp+rk6SJHQ63UV/WZwv+zjy119/nWHDhnHTTTddUJ321KlTOXnyJJ9//jlPP/10izwFtAVxF7ainJwcampq6N27t7NDuWCNjY3n7MbQXPYPdv/+/R1DqjZt2sQ///lPFixYQEVFRQtGK1wIe8+G559/ntGjR19wEoTTXW9mzZqFzWZj4cKFTUa2uDKRCFuJLMvs27eP3r17u0zp7kKYTKYWi1+r1TJkyBDmzZvHXXfdRVZWFnfeeSfffPPNWcPBhLYhyzLp6em8+OKLjB8/npkzZ1507wZfX18effRRkpOTWbZsWbt4X8WjcStpbGwkJSWlyYLY7VFjYyN+fn4tdjxJknB3d6dv37706dOH9PR0Fi5cyKpVqxg+fDiXXXYZ0dHR7fqatRc2m429e/fy5ptvcuONNzJhwoQW69QeGBjIE088wUsvvURwcDCDBw926fdUlAhbgSzLVFZWUlFRQdeuXV36BvgrJpPpoh6N/4h95EtiYiLz5s3j3nvvpaqqiieeeIIPP/yQkpISx3A+oeXZbDaSk5P597//zc0338zEiRP/dMja+ZIkibi4OKZPn84HH3xARUWFS7+XIhG2khMnTqBUKgkJCXF2KBelJR+N/4hGo6F///48+OCD/Pvf/6ayspJ77rmHjz76iGPHjolpuVqYxWJh69atvP3228ydO7fV1idWqVRMmTKF4cOH8/LLL7t0v0iRCFvJzp07SUhIwMPDw9mhXBT7APfWZh+DHB4ezuOPP87zzz9PXV0dr776Km+++Sa5ubkuXaJoL+w9GT766CMeeughRo8e3apjvJVKpaPxZPHixS77pSYSYSuw2Wzs2LGDYcOGtevHYmdRKpXEx8fzwAMPMG/ePAwGA4888givvvoqx48fp7GxUSTFC2A2m1m5ciULFy7koYceYsiQIW1yf+r1eu666y7Wr1/Pnj17XPK9E4mwFdhn+u3Zs6ezQ2m37DOthIeHM3fuXN5//308PDx45plneO2119i3b5/Lli5ckdFoZNGiRSxatIhnnnmGAQMGtNmXtL2+8O9//zvvvPMOx48fb5Pzng+RCFuYLMts27aNIUOGtEojQ2djn+8uKCiIu+++m+eee46IiAjeeOMN5s2bR2ZmpmMOROHczGYzixYtYt26dTzxxBMkJCS0+ZOKJEkMGTKEMWPG8Mknnzhmu3YVIhG2MKPRSHJyMmPHjnV2KB2OQqEgKiqK2bNn89FHH9GjRw+efvppXnjhBbZv3+6Y9l34fzU1NcyfP59t27Yxb948EhISnBaLQqHg2muvpaGhgZ9++slpcZyLSIQtSJZlTpw4gdVqdYlB5x2RvduNl5cXM2fO5O2336Zr167873//48knn2T37t2O2bU7O6PRyEcffURqair//Oc/iYyMdGqdtX250LvuuoulS5dy8OBBl3mfRCJsYVlZWQQHB2MwGJwdSqcQHBzMrFmzePXVV7nssst47733ePjhh9mzZw/V1dUu80FrS7IsU1NTw3/+8x8KCwtdYgKEM8XHxzN37lzefvttioqKXOI9uuBEKElShCRJGyRJSpMk6bAkSff/tt1XkqQ1kiQd++1vnzP2eUKSpExJkjIkSRrfEr+AK7FarRw4cIDExMR2PayuvVEqlfj4+DB58mTef/99xowZw/z583n22WdZtWpVi6945spkWaa8vJzXXnuNsrIynn32WUJDQ12q94IkSYwcOZKuXbvyzTffuMQQvIspEVqAh2VZTgCGAvdIkpQI/ANYJ8tyHLDut5/57f+uB3oAE4APJEnqUItUmM1mMjIy6NGjh0vdeJ2JwWBg2rRpvPDCC0yaNImffvqJe++9l61bt9LQ0NChJ3W1T+j6yiuvoFQqeeSRR1x29heNRsPs2bPZu3cve/fudXqp8IIToSzLhbIs7/vt3zVAGhAGXAn877eX/Q+Y9tu/rwS+lWW5UZblHCATGHyh53dF2dnZNDY2Eh8f7+xQWox9hbn2RJIk/P39ueyyy3j77beZPXs2X3zxBf/4xz9YunQpJSUlzg6xxcmyTEFBAc888wz+/v489thj+Pr6uuwXsiRJREREcN999/HWW2+Rl5fn1HhapI5QkqQooB+wEwiSZbkQTidLIPC3l4UB+WfsduK3bec63p2SJO2RJGlPe7lpZVlm06ZNDB06tEOtTaJSqZw2SejFkiQJNzc3xowZw9tvv82kSZPYtm0bjzzyCD/++CNGoxFZlp1eGrlYsixTVlbGiy++SGhoKH//+9/R6/UumwTtJEli4MCBDBkyhIULF2I0Gp0Wy0UnQkmSPIAlwAOyLP/ZYMJzvSvnvANlWf5EluWBsiwPDAgIuNgQW43ZbKaqqgqLxUJjYyNbtmxh3Lhxzg6rRWk0mvNe/8EVeXh4cPnll/P8889z//33s2HDBu666y5+/vlnl6mwvxA2m42cnBz+8Y9/MGDAAO69997zml7f2RQKBddffz25ubkkJyc77X24qGm4JElSczoJfiXL8g+/bT4lSVKILMuFkiSFAMW/bT8BRJyxezhw8mLO72xFRUW8++676PV6AgICMJlMF7S2hytTq9XttkT4e2dOEtu7d2+Sk5NZvnw5S5cuZcyYMUyYMIGAgACXLUlZrVaMRiM6nc6xKHtmZiavvPIKw4cP55Zbbml3a0NLkkRwcDBz5szhww8/pEePHjij8HMxrcYSMB9Ik2X5zTP+aykw57d/zwF+PmP79ZIkaSVJigbigF0Xen5XYDab2bFjBy+++CLPPPMMmzdvZubMmXzyyScdZubljpQIz6RSqRgxYgTPPPMMDz74IJmZmdxzzz0sXLjQMUnsuUonznqUttcB3nPPPezevRur1UpWVhbPPfccY8eOZdasWe122QNJkhg6dCh9+/Zl/vz5Tlmv52JKhCOAm4CDkiQd+G3bk8ArwCJJkm4D8oBrAGRZPixJ0iLgCKdbnO+RZdn57eYXQaPRYDAYsNlsVFZWAvDTTz+xf/9+Bg0ahI+Pz58foB3QarUus5BUS5MkCb1eT8+ePenRoweZmZn873//Y8OGDQwZMoTx48cTGRnpSDA2m42UlBTUarVTegZ8//33fPXVV+zfv58HH3yQtWvXMnPmTCZPnuz4fdorlUrFrbfeyuOPP87WrVtJSkpq09/nYlqNt8qyLMmy3FuW5b6//Vkhy3KZLMtjZVmO++3v8jP2eVGW5RhZluNlWV7ZMr+C86jV6rMW2fHw8ODuu++mR48eToqqZbm5uXX4oWv20SpxcXE8++yz3H///Y5JYt99913Ky8ux2WzU1tby8ssvM3fuXHJyctq0ZHjq1Cnmz5+PxWLh8OHDPPvss4wYMYIJEyY4xmO3dz4+PkydOpXvvvuuzRtO2mdZ2kVoNJomLcRKpZKrrrqKO+64A7Va7cTIWo5er6e+vt7ZYbQJSZLQaDT07t2bhx56iNdeew2LxcIdd9zBhx9+yLJly/j555/ZsWMHs2bNIjU1tU26FpnNZj755BOysrKA03WFBQUFfP3112RnZ7f6+duKQqFgwoQJBAYGsmDBgjbtaC0S4UVQq9VoNBpkWUaSJGJjY3n66afx9vbuEN/QADqdrsOXCH/PPklsREQEDz/8MM8//zxVVVXMmzcPi8WCzWZj586d3HvvvRw9erTVS4bHjh3jxx9/bFJFYbVa2blzJ2+99ZZTu520NI1Gw1133cW6detIT09vs/OKRHgR1Gq1Yyidl5cX8+bNIyYmxslRtSydTtephqj9niRJJCYm0rNnT2pqahwlQJvNxrZt27jttts4duxYq5UMLRYLS5Ys4fDhw45t7u7uJCQk8OSTT/Loo492mKcPu7CwMK644goWLlzYZveeSIQXwV5H6Obmxl133cUVV1zRblvu/khnejT+I1VVVSxevJji4uIm2+0LIN1xxx0cOnSoVUqG+fn5fPTRR1itVjw8PJg8eTIvvfQSGzdu5NlnnyU6OrrddZn5KwqFgokTJ1JWVkZycnKbnLNdLOcpyzJGo5GjR486O5SzVFdX06VLF8aMGeOow3EGhUJBbGzsWY03F8vDw4Pa2lrH4397Z7PZSE9PP68uQZWVlfj4+JCUlERBQQGnTp1y9BJQqVQkJyczd+5cXnrppRbvKfDxxx9TVlbGgAEDuOqqq+jfvz9BQUEUFhZSWFjYoueyCw8Pd/rwPD8/P2bNmsXHH3/M4MGDW302J8nVe9QPHDhQ3r17NwcPHmTGjBkkJCS4VKmroqICrVbr1N78VquV3NxcFi5cSO/evVv02AUFBY7541zpul+ohoYGRo4cSWBg4HkNhbTZbI7lRe1/7Nvs27VabYvOOmQ2mzEajahUKtRqNUqlstWT04kTJ7jjjju4/fbbnf5+22w2nnvuOUJDQ7nttttQqS6u3DZw4ED27NlzzgvYLkqEcLpU2K9fPx5//HGXmgK/trYWNze3i36TLkZ9fT3vvvtuqzyaGQwGjEYjJpOpw4yhDgoK4uGHHyYoKMjZofwpi8WCJElt9ugryzI//fSTy0yyIUkSM2fO5KWXXuLyyy8nKiqq1b4I2v9XvJN5eHicVxJ09RL473l4eKBWqx2PgkLbUalUzUqC7e2eai5JkoiJiWHAgAEsWbKkVRO0SIQXyGazsX37dmpqas5rv7/6RrPZbBw+fJiTJ11jGLZ9WvyOMmSwJdTW1vK///2PtWvXtsioG6PRyM6dOy+4m9If3VNZWVlOrbduCWq1mquvvppdu3Zx7NixVjtPu3k0/is7d+7kiy++QK/XM3jwYKZOnYqbm9ufJh5Zljl69ChvvPEGHh4e9O3bl2nTpuHl5fWX57PZbKxfv54uXbq0aEWuLMscOHCA2NhYTp06xbfffktQUBB33XUX7u7uLXae8+Hj49MpSoSyLPPVV1+hUCi48sormT9/Pt26dePyyy9vUl+m0WgICgoiJSWFwYMHo9VqKSsr48knn8TNzY2uXbtyww03NHvygIaGBjZt2tTijV0ZGRnIskx0dDSbNm1i5cqVdOvWjZtuuqldzaAeHh7O1VdfzWeffcZLL73UKlVjHaZEWFFRwciRI3n88cdJSUkhOzubrKws1q9fT3p6OlarlZSUFMdauPX19WRlZVFZWUn37t15/vnnKSoq4vDhw9TW1rJu3TrWrl1LZmamYwD+yZMn2bRpExs3bmySGDIyMjh16hSHDh1ylBBMJhNZWVnU1NSwceNGVq9ezaFDhxwLC504cYIDBw6wdu1ajhw5gslk4siRI2zatIna2lrMZjMLFy5k9uzZeHp6smbNGmdcVgACAwM5deqU087flsrLy0lLSyMrK4t9+/ZRXV1NZWUlWVlZWCwWsrOzaWhoICEhoUkDmclkQq/X89prr6HT6di6dSsWi8Xx3h88eNDx3peVlbFt2zbWrVtHUVGR4xiyLHP48GF27dpFdfXpGe1sNhvZ2dlUVVWRnJzMqlWr2L17NyaTCVmWqaioYPfu3WzatIm9e/diMpnIzs5mw4YNlJaWAqfv9V9++YU77riD6upqtm/f3rYX9SJJksSUKVOoqKhg27ZtrVIV0GESIfz/mFE4nRi//PJLCgsLWbJkCVlZWfz6668UFRVhs9k4ceIEW7ZswWw2s23bNl566SVqa2uJiIjAZrPR2NhIZWUlK1asoKCggPLycr777jvy8vIc6+jKssyePXvYsGEDNpuNDRs2cPz4ccckDGvWrKGhoYHGxkbq6+tZs2aN41s6OTmZL7/80jGfYXFxMd988w2nTp0iIyODsrIyZFkmISGBgQMHOrXrUGBg4Fl96DoqnU6HXq9n/fr19OrVC0mSyMvLY/369ZjNZjZu3MiJEyfOuW9GRgbPP/886enpdO/eHVmWMZlM1NfXs3btWjIyMjCZTHz//feOL017vZd9Yt9du3axe/duDh8+jCzLWCwWli9fTmVlJSaTiYaGBrZt28aePXuA04+/H374IUVFRVgsFurr6/n8888pLCzk0KFDABQXF6PX64mJiaFv377k5OS0zcVsQTqdjvHjx7N69epWGUnTYR6NAZYsWcKuXbsYNGgQCoUCPz8/rrvuOn755RcyMjLo2bMnGzZsIDs7m1GjRuHl5YWbmxuDBg3i5ptv5ocffuDYsWPEx8ezb98+CgsLqa6uZtSoUdTV1aFSqZgxYwZubm5YrVaKiopIT0/n3nvvJTg4mD59+rB161Z++OEHRo8ejUKhQKvVkpqaSn5+PqWlpSQkJDjWlr300kuZPHkyCoWCbdu20aVLF6ZPn05VVZWjW8aaNWswm81ObckLCQlxfKg6OkmS6NmzJ0ePHqVXr15UVVU5tv/VFFwxMTE88MADrFu3jn379hEeHk5KSkqT9z48PJyCggIefPBBx1DMiooKjh49yqFDh3jmmWdoaGggOTmZAwcO0KtXLyorKwkICGDVqlUcOXKEU6dO4ePjw7Bhw5AkiX79+jFjxgwUCgUZGRl4eHhw3XXXORpabDYbJpOJn376qV0uvQCn66ovvfRSVq5cyZEjRxgwYEDLHr9Fj+ZkM2bM4N///jc33ngj/v7+VFVVUVtbS0lJCXq9nri4OLZu3UpYWBjJyckEBgY6pnP39/fHYDBQWVnJsWPH8Pb25qmnniI6OhpZllGr1dTU1DgeWQD8/f25+eabWbduHSUlJXTv3p3k5GS8vLzYtm0bwcHBFBUVUVNTwzPPPEO/fv0cHySFQuGYYBNOD5uqqKigtraW4uJifH19gdNLH9bU1BAVFdXm19MuKCiI4uLiDts6+XuhoaE8/PDDjt4ASqWS+vp6qqqqHI1YSqWSxsZGx9MBnK7Y9/PzIzg4mOLiYgoKCpq89zabzTGhqv2x1a5Lly7MmTOHn3/+GT8/P9LS0jCZTBw4cIDg4GCqq6tJT0/nySefZNy4cY5kZp9s1t7H0MPDg6qqKqqrqx0drgMCArBYLAwdOpSioiIiIiJojwICApg+fTqfffZZi8+a3mESoVarbdI4Eh4eTmBgIPPmzcNkMtG/f398fX0JDQ1lxIgR1NXVERERgVqtZtu2bTz00EMUFRUxcOBAunbtSn5+Pu+9956je0x4eDg9evTgjTfe4OmnnyYnJwcfHx969uzJJZdcwsqVK9Hr9YSGhjJs2DAqKyuJj48nJCQEWZZ57bXXHLOb2JPvmRXWMTExKJVKXnrpJbRaLQaDgSlTpvDvf/+bw4cPO3UJgJCQECoqKjrsvIRn0ul0jrG79tmFgoODHX01dTodKpUKf39//Pz8ePnll0lJSUGpVJKbm8t9993Hhg0bGD9+vGMtYft7b+94n5SUxOeff86jjz7Kvn37kCQJPz8/+vXrR7du3di3bx9BQUH07duXuro6+vTp47h3X3nlFcrLyx2NKmq1ukldZXBwMDExMbzwwgvU1dU57qXhw4fz0ksvUVVVxfDhw9v+wraQyy67DLPZzJYtW1r0i7ndjCxJTU3lpZde+sMO1Wf+HvbHmOayv9YZQ4rs52xOvH8Un/1D+uijj9KnT58WjQ9O1zE9+OCDvP32206ZRr0lNTQ0MH369HN2qL6Ye+jM/Zt7H53PsMW/iud87qMzX//7eH766ScCAgK48847nT6y5FxkWWbDhg38/PPPvPDCC+fVY+PPRpa43m96geyTU9rf4DN//qs/CoUChUJxXvu01J9zxf9Xr21rKpUKvV7fpFqgI7qYe+hC7qPzee1fxXOu38EV76WLJUkSvXr1or6+nszMzBYrFXaYRCi0HrVajYeHR6foSyi4Pn9/f0aPHs0PP/zQYg0/IhEKf0mlUuHu7u5oQRUEZ5IkiXHjxpGWlsbx48db5JjtqvuM0WikvLy8w01EebHsfRVbi1qtxtPTk/Ly8g4xHZfFYqGiosKpE2W4IlmWqauraxf1wIGBgUycOJEFCxbw1FNPXfR72W7uBHd3d2w2G19//bVLfRDtXRRCQkKcFoN9GqjWGoKnVCoJDg6msLAQm83WricCVSgUBAYG8ssvv7hEIqypqaGystJlurRUV1czdOhQZ4fxlyRJ4sorr+T+++8nKyuL+Pj4izqe8++EZoqMjOS9995zdhhnWbduHXv27OHxxx93ahySJLVaMpYkCV9fX3Jzc7Fare06EarVal555RXHUEtn27lzJ+vWreOJJ55wmS94Pz8/l4nlz3h5eZGYmEhycjJxcXEX1crdLhKhvf+dMzsV/5EJEybw448/EhQU1OKzQ7uSoKAgSktL23RlsdagUCgIDw93dhgOubm5uLu7t+pcex2VSqVizJgxLFiwgOnTp+Ph4XHBxxKNJRcpJCQEHx8f0tLSnB1KqwoICKCsrMxlSlKCYO9K09jYeNFj8UUivEgKhYIhQ4aQnJzcoYegBQUFYTabxbyEgksxGAxcccUVfPvttxf1tCISYQsYMmQIhw4d6lDry/6eTqcjIiLCJRfQEjovSZK4/PLLL3oSWpEIW0BYWBhms7nDz9kXFxcnEqHgctzd3Rk6dCibN2++4KcykQhbgKenJ15eXhQUFHTox+OEhASOHDni7DAEoQlJkrj00ks5cODABXf6F4mwBeh0OkJDQ8nOzu7QiTA+Pp68vLwLXltDEFqDJElERERgMpkueLo4kQhbgEKhcJSW2uOkl83l5+eHwWAgNzfX2aEIQhP+/v7ExsayY8eOC9pfJMIWIEkSsbGxjnUtOiqFQkF0dDTZ2dnODkUQmlAqlVxyySWOZTPOl0iELaRLly5IktQu14NoLkk6vc5sVlZWh64CENqnvn37Ul9fT3p6+nnvKxJhC1Gr1QwaNOiCi+btgSRJdOvWjfz8fFFPKLgcrVbLpZdeyrp16857X5EIW4gkSfYZcDtsaUmSTk8p39jYSG1trbPDEYQmJEli+PDhpKSknPcXtUiELahfv36cOnWqQ/cn9Pf3x2QyUVVV1WETvtB+RUREoFAozrtBTyTCFuTm5kZISEiHblX19vbGx8fnonrxC0Jr0ev1BAUFkZOTc15f1CIRtiClUkmXLl06dH9ClUpFnz592L17t7NDEYSzaLVaEhISOHTo0HmNPRaJsAUplUq6detGdnZ2h+5GM3DgQPbv39+hf0eh/UpISCAzM/O81j4WibAFSZJEaGgoZWVlHbpVNSoqCoVC0aG7CgntkyRJREdHU1ZWdl6rLopE2MJCQkKoqqqivr7e2aG0GpVK1eG7Cgntl7e3N127dmX//v3N3kckwhYWEBCAWq12rGXSUQ0ePJj9+/djNpudHUq7U1tby6FDh9i7dy9Hjx6luLiYffv2sXfvXvLy8pwdXrsnSRL9+vVj//79za6rbxdT9bcnGo2GhIQE9u3bR79+/ZwdTquJj4+nurqakydP0qVLF2eH065UVFTw+OOPc+DAAcxmMyaTiY0bNwLw0ksvMWfOHOcG2AH079+fn3/+udmLjYkSYQuTJIk+ffqcV7G8PfLy8sLPz69Dt5C3ltDQUGJiYjh16hQlJSVUVVVx8uRJ6urqGD16tLPD6xC6dOmC0Whsdp9ekQhbQc+ePcnKyqKurs7ZobQanU5HfHw8qampHXrGndagVCqZMmVKk+VXFQoFY8aMceqysB2JWq0mNDS02VUNIhG2An9/f/z9/Tv0bM4KhYJ+/fqRkpIi6gkvwKhRo5qsZaxUKrn66qtRq9VOjKrjsM9ReOLEiWa9XiTCVhIXF0dmZqazw2hV8fHx1NXVUVBQ4OxQ2h2NRsNVV13lWIu3W7du9O7d+6LW5hX+nyRJBAcHc+rUqWZV3Yir3koSExPJyMjo0I+N7u7uDB8+nNWrVzs7lHZHkiQmTZqEp6cnkiQxaNAgunXr5uywOgxJkujSpQuFhYXNemIRibCVxMXFcfLkyQ7dn1CSJMaPH8/mzZupqalxdjjtir3j7/Dhw/H09OTSSy9Fr9c7O6wOQ5IkvLy8MBqNNDY2/uXrRfeZViBJEj4+PgBUVlbi4eHh5IhaT3R0NF5eXqSkpDBixAgkSWr2vjabjcrKyk5bxyjLMt27dyclJcUxc1FnJEkSBoMBnU7XosfV6/VYLBYaGxsxGAx/+lqRCFuJh4cHarWa8vJywsPDnR1Oq1Gr1YwcOZKtW7cyZMiQ86rsr66u5pZbbqGurq7Tlobq6+sJDAzkqaeecnYoTlNaWsrcuXOZPXt2ix7X3d0dq9XarDHHIhG2End3d9zd3R2rap1PSak9sfeb3Lp1K1VVVfj7+zd7X1mW8fT05LrrriMyMrIVo3RdFouF6upqfH19nR2KU8iyzC+//NKsx9fzpdVqsVqtzXriEImwlWg0GqKiosjIyCApKalDJ8KYmBhUKhVpaWmMGjXqvPZXKpUYDAa8vb1bJ8B2wM/PD6DD3iN/xmaz4ebm1irHtifC5sySJBpLWokkSURFRXH8+PEOP/JCr9dzySWXsGLFCmeH4lJMJhPr1q37y4YkSZLOKwkeO3aM1NTUi4pty5YtLT4evqSkhK1bt7pMTwmNRoPNZsNisfzlZ1AkwlYUHR1Nbm6uy9wYrWncuHGkpaW12MzVFouFN998k2+//Zby8nKefvppNm7c2KwvlcrKSt577z2qqqouOg5Zltm1axezZs3ivvvuY/78+VRWVjZrX4vFws6dO1u850BeXh7Hjh2jurqam2++mXvuuYd58+ad18zo+/bto6ysrEXjqqioYP/+/dhsNnJycnjqqaf4xz/+QUZGhlMKAyqVCqVSKVqNnS0yMhKj0UhxcTFhYWHODqdVeXt7M3HiRJYsWcJDDz2ESnVxt5Ysy1gsFtLS0ujWrRvHjx/HZDJhNBpJSUkhNzeXqKgoBgwYgNVqJSUlhYKCAuLj4wkMDKSkpKTJF5DFYuHw4cNkZWURExNDz549kWWZQ4cOcfToUfz8/BgxYgRKpZKDBw9y7NgxAgMDGTZsGPX19YwZM4ZrrrmG//znP2RnZ3Py5Elqa2sJDw9n8ODBqNVqysrK2LNnDzU1NQwYMIDg4GDH77J3717HMg4DBw5Eo9FQX19PdnY2YWFhbNiwAYvFQkxMDL1796auro7du3fT2NiIVqtl1KhR5Ofnc+jQISorK/H09MRms6HT6Xj33XdZtGgRGzduZPbs2axcuZKamhpCQ0MZOnSoo9Fuz549VFVV0b9/f0dcWVlZGI1G1Go1BoPBEXNGRgY+Pj6kp6dTWFiIr68vI0aMwN3dnerqanbu3InJZEKtVtOjRw8OHDhAdXU1FosFq9XKkiVLmDBhAhaLheXLlxMXF9fmj/5KpRKVStWsRChKhK1Iq9XSpUsXjh075uxQ2kRSUhIZGRkUFha2SAnAw8MDNzc3Nm3aRGJiIpIkkZ2dzZYtW/Dz88PX1xdJkhxTWKnValauXElJSclZxyouLmbJkiVoNBqWL1/OsWPHqKioYOXKlej1egICAlAqlZSWlrJ69WoMBgP+/v6OmUuWLVvG008/jdVqJTQ0FH9/f/z8/Ni9ezf79+/HarWyePFiioqKCAsLc4wjtlqtrFu3joMHD6LT6di1axd5eXmYTCaKi4tJTk5GlmUCAwPx9vZm06ZNHDt2jFOnTvG///0PgKCgIEwmE//9739RqVRN1pXOy8vj4YcfZvPmzQwYMABJkggICMDPz89xXWw2G99//z0nT54kLCzM0Z3r2LFjrFixAq1WS05ODgcOHHAksjVr1lBbW4uvry/+/v5kZmayadMm4HQr73//+19sNht+fn4sXbqU8vJyKioqHHNx1tTU0K1bN2JiYhwNhm1NoVCgUqlEq7Er6N69O+np6YwZM8bZobQq+9jOsLAwdu7c2SJdhuwjLnJychzjcg0GgyMhenp6YjabSUlJITk52fGFc66qiBMnThAaGsrEiRMxGo3k5uYSEhKCu7s7OTk56PV6rFYrWq0WnU5Hdna2YxvAhAkTuO666/jyyy8dcwmWlpZSXFxMZGQkffr04fjx4zz++ON4e3sjSRL19fXk5+dz7NgxHnjgAXx8fOjbty/r168nPz+fpKQk9Ho9ZrOZ9evXU1VVRWlpKf379yc0NJRevXoxadIkJEkiKysLvV7P5MmTARwf7vDwcF566SWSk5NZu3YtXbt2ZdOmTRQXF1NcXExoaCgDBgwgOzubxx9/HB8fHyRJwmKx8MsvvzB69GhHi/2qVauYP38+UVFRnDp1ipCQEL766iuOHTtGSUmJo/VVkiR69OjB5MmTqaqqYt26dVx99dWUlJSwZs0axzVPT0+nuLj4ou+DCyVJEkqlUowscQX2MccdvcEETq/id+mll/Lrr7+22HomoaGh3HHHHY7HqvDwcG6//XZiYmIcpZaAgADGjx/Pq6++yptvvkloaChw+tHP/sdgMFBaWkplZSWFhYW4u7vj6enJrbfeyogRI1i3bh2FhYX4+Phw6623MmTIENauXev4IKtUKnQ6HQqFgry8PMrLy3n00Ufp3bu3o3uUWq12NI7Z3++wsDDuuOMOVqxYwalTp4iLi2PXrl1ER0ezbds2goODOXnyJCqViscee4yoqCjHvhqNBoVC4RglUVFRQXl5Obm5uY7XSJKEm5sbBoOByspKSktLKSkp4dFHH6Vfv36O2LRarWM/WZZRKpVcf/31VFRUcPjwYcLCwigoKKC+vp6srCwCAwOpr68nLS2NBx98kLFjxzZ5tLXHptVqkWWZU6dOUVhY6OgTah866O/vT1BQkFNaxM/nnCIRtrL4+Hjy8vIwGo3ODqVNDBs2DFmWSU5Ovqjj2EfnaLVaADw9PfHw8CA9PZ3nnnuO5cuX079/f7y9vRk9ejS1tbU8/PDD/POf/6S8vJzi4mKeeOIJ7r77bpYuXUpERATh4eG8+OKLeHp60q9fP0pLS5k3bx5ffPEFPXr0IDg4mKKiIubNm8eCBQvo1asXgYGB6PV6Nm3axGOPPYZKpWLChAkEBgby+uuvOz70Go2G6dOn89NPP/Hwww+TnJyMQqEgNDSUhIQELr30UrZs2YK7uzvdu3dn5MiRSJJEbGws0dHRWK1W3nrrLYKDg3F3d0er1Tq61cDpLjajRo3i1VdfxWw24+npiUKhwGw2c++997Js2TJmzpxJaGgoISEhvPbaa2i1Wry8vFCpVMyYMYOlS5fy0EMPsW3bNvz9/enatSvXXXcdGzduxGq1EhsbS//+/XFzc2PgwIGOEuzrr79ORUWFIx6NRuPoL6rT6Zg4cSKLFi1i586dBAcHo1KpmD59OmvWrGH16tVMnjzZ5bsGSa5eUhk4cKC8Z88eZ4dxwWpqapg7dy4vv/xyp+k0vGHDBr799lv+/e9//+nwwoqKCh588EGmT59OdHR0G0YouAp7/WVERAR33HFHix//4YcfZsqUKYwePZpBgwaxZ8+ec2ZkUSJsZUqlEl9fX6fWlbS1wYMHI0kSO3bs6BRVAkL7JxJhK1OpVPj7+3eqAfV6vZ5p06axbNmyDj1Lt9BxiETYypRKJYGBgRQVFXWKjtVwun5v8ODBNDQ0sG/fPlEqFFye6D7TyhQKBeHh4Y4+WhqNxtkhtQkfHx/mzJnDBx98QJ8+ffDy8jrn66xWK1VVVZSXl7dxhIIrkGWZhoYGZ4chEmFrs3chqKqqwmw2d5pEKEkSw4YNY+nSpfzwww/Mnj37rGUV7dPSL126tFNMw2UymTCZTB16fsrzJcsyZWVlDBgwwKlxiETYBuyJ0GQyNVm5rKOTJIk5c+bw2muvMWLEiLOGWXl4ePDiiy92iq5FJpOJDz/8kNDQUK655hpnh+NSFAoFAQEBTo1BJMI24OfnR11dXavMuebKJEkiPj6eUaNGMX/+fF544YUmE7cqlcpO0aVIlmXWr1+P0Wjktttuc1oHY+GPicaSNuDp6YlWq+1UXWjslEolV111FcXFxWzatKnTNZzIskxVVRWff/45119/PYGBgSIJuiCRCNuAveSTnZ3t7FCcwsfHh4cffpjPPvuMtLS0TpUMGxoaeOeddxg6dChJSUliuU4XJd6VNiBJEpGRkeTl5Tk7FKeQJInu3btz2WWX8emnn7pEK2FbsNlsbNq0iby8PG644YazGosE1yESYRvp0qULOTk5zg7DaVQqFddddx06nY4FCxZ0ipXrSkpK+Pbbb5kzZ45jVUPBNYlE2Ebs0/Z3lk7V5+Lh4cFDDz3Eli1bWLVqlWOKq46ooaGBt99+myFDhjBixAjxSOzixLvTRkJCQqipqenUQ84kScLPz4977rmHr7/+mkOHDnXI+kKbzcaaNWsoKyvjxhtvFI/E7YBIhG1Ep9Ph5eXVqcYcn4skSQwZMoRbbrmFV199lezs7A6VDO3T3y9evJi7774bT09PZ4ckNINIhG3IvpZGZ6dQKBg7dixXXHEFr7zySotN7e8KbDYb//3vfxk+fLhjeQHB9YlE2EYkSSI4OLjTlwjtFAoFV199Nb169eLll1/uEF8QVquV77//nqqqKmbPnt1phlN2BCIRtqHAwMBO2an6j7i5uXH77bcTGxvLyy+/3K6/JGRZJj09nV9++YV77723Uw2l7AhEImxDgYGBlJaWdpjHwJag1+u54447CA0N5Y033qCsrKxdXp/Gxka++OILkpKSiI2NdXY4wnkSibANBQUFUV5e3in60J0PnU7H3/72NwICAnjppZc4efKks0M6L/Z1fE0mE9dee61oJW6HRCJsI5IkYTAYMJvNnW7yhb8iSRIeHh7cfffddOnShX/961/k5+e3i5KhLMscOXKEn376ifvuuw8PDw/RQNIOiUTYhuwrsolEeG4eHh7ceeedDB48mGeeeYbs7GyX74BeVVXF/Pnzufbaa8UCVO2YSIRtSCTCv+bm5sasWbOYMGEC8+bNY+fOnS6dDJcuXYpKpWL8+PFi9Eg7Jt65NmRPhJ1hItKLodFomDFjBrNnz+add95h1apVLlevKssyqampLF26lHvuuQeDweDskISLICZmbUP2RGgymZwcietTKpVcdtllhIeH8/LLL5OTk8OsWbMwGAwuUQdXWlrK+++/z+23305UVJRLxCRcOFEibEP22ZnNZnO7aAhwJkmSHNN3Pfvss2RlZfHCCy9QWVnplGt36tQp8vPzsdls2Gw2fvzxR/z8/Bg1apRIgh2ASIRtSKPRoFAoOs18fC1BkiS6du3K888/T1RUFA8++CC7du1qMnONLMvU1NS0Wl2iLMv8+uuvjB07lk8//ZRNmzaxfv167rjjjk6x6FRnIBJhG1Kr1bi7u1NZWensUNoVSZJwd3fnzjvv5IorruC9997jhx9+cNQb5ufn8+KLL7Za/0Oz2UxqaiqZmZk8+uij3HbbbYwePZro6GhRGuwgRB1hG5IkCU9PT5EIL5BKpWLatGn06dOHN998k8OHDzNnzhy+/PJL3nvvPU6cOMEHH3zQ4jO+lJaWcuTIEQBqamqora3lvffeQ6FQcMMNN4iGkg5AlAjbkD0RVlVVOTuUdkulUhEbG8trr72Gl5cX999/P2+99Rb19fX88ssvvP/++y3aKi/LMkVFRezevdtRN2kfV/zmm29y9OhRUd/bAYhE2Mb0er2oI7xI9pEo11xzDfn5+VRXVwNQXV3Ne++9x+bNm1s0OaWnpzcpxavVagYNGsR//vMfevXq1WLnEZxHJMI2ptPpqK+vd3YY7V5jYyOfffYZ6enpTZJeYWEhf//738/afqHsDSX2Y/n6+jJnzhyWLVvGZZddhkajEfWEHYBIhG3Mzc0No9EoHqcuUnV1NTk5OWi1WlQqVZOJDnJzc3n55ZcpLi6+6OtsMpnYsmULAMHBwbz88su8/vrr+Pv7iwTYgYjGkjam0+nEo/FvbDYbZWVlF9TB3Gaz8dhjj/G3v/2NAwcOsGnTJrZs2UJ9fT319fUsXryYyMhI7rjjDlSqC7/NU1NTqaiooFevXjz77LP079+furq6Nlt7xtPTU0zk0AZEImxjIhH+v7q6Ov75z39SU1PjGHVzofR6PePGjaO6upqysjLKy8v55ptvOHToEH5+fhd83PT0dKKjo0lISGDZsmUsW7bsouI8H5WVlVx55ZXMmjVLJMJWJhJhG7M/GgtgsViorq4mKSmJ8PDwFj22LMs0NjYiSdJFJdnq6mr0ev1FlSov1Lp169rtRLXtjUiEbUytVrvcBALOpFarCQwMJCwszNmhnEWWZUJDQwGcUiLz8fERJcE2IhJhG1MqlR16YfO2dmbfPvv45JY4HrRc8muNYwotS7QatzGRCJtHlmXy8/MpKSnBYrGQm5t7zgkXZFlm/fr13HTTTaxcuRI4Pc3ZoUOHOHz4MPn5+Vgslmaft6CggPnz57d4Y8iyZcvYvn07sixTWlpKWloaRUVF4rHXRYgSYRsTibB5LBYLn3zyCb6+vlxzzTU888wz3HDDDUyYMKHJ6+xrJNfX1zuSSl5eHi+88AJjxoyhrKyMqVOnNrvjs81mo6GhocUTVGNjI2azGaPRyOeff+6ow7zzzjsJDg5u0XMJ508kwjamVCrPq4TSmfn7+1NaWkp6ejoeHh6oVCr27NlDdXU1Q4YM4ddff2XkyJHnTCTR0dFMnTqVr7/+moqKCioqKnjllVdobGxkwIABXHXVVbi5uZGcnMzKlSuxWq3MmTPHsQxnWVkZ33//PUlJSfzyyy/ceuut6PV6srOz2bdvHykpKVRVVREXF8dNN92El5cXu3btYuPGjZSWltKzZ0+uvvpqfvzxRw4fPoyXlxf+/v4UFRVRV1fHY489xqJFi0hNTRWJ0AWIRNjGlEqlS08970o0Gg3h4eGsX7+eAQMGAFBbW0tlZSVWq5XS0tKz+iDaS3KHDx/mww8/xGazERQUhIeHB9OnT+f48eMkJycTHx9PZGQk69at49Zbb3XMJJOfn09hYSHvvvsuc+bMISIiArVaTXp6OiaTCaPRiEajYfr06eTk5JCcnMy2bduYNGkSlZWVGI1GnnnmGQwGA1lZWY4Za95//30AysvL0el0HD16FB8fHzHu3EWIOkLBpfXr14/rrrsOHx8fxzZZlpFluUkVgyRJTR5nBwwYwLx58xgxYgQ7d+4kOzubX3/9FZ1Oh8FgoLGxEZPJhE6nIyQkpElDi9FoRK/XO8YwR0dHs3btWlauXMnx48eRJInvv/8etVqNn5+fo1+oQqGgR48ejlm0KysrCQoKwtfX17Gwk7u7Ow0NDWg0GhoaGi66/6TQMkQiFFyah4cHffr0cfzs7e1NdnY2W7duJTc317E9LCyMw4cPk5aWhtlspqSkhN27d3P06FH8/f0dJUetVusohRkMBhQKBatXr+bAgQOO7VFRUdxxxx1s27aNo0ePEhYWRnZ2NpdccgkHDhzAx8cHi8WCTqdrMhmDJEkoFApHQu3SpQsnTpxg3bp1pKamOuIEOHr0KBkZGSQmJrbatROaTzwaCy5JqVQybNgwgoKCAOjVqxcajYaAgADKy8upq6vj6quvxtvbG4AePXpQWFjI8ePH6d27NwMHDuTYsWP06dOHQYMGoVKpGDx4MNXV1Vx//fWEhYXh7e3Ntddey549ezh69ChBQUF4e3s76h2vvPJKJEkiNDSUm266iYSEBJRKJX379sVqtXLq1CmmTp2Kr68vAHFxcU1WKPT19WXGjBlkZWVx1VVXERkZibu7O9deey2pqalcccUVdO3atc2vrXA2kQgFl6RQKOjbt6/j527dujn+PXbs2LNer9FomDRpkuPnm2+++azX/L7FGSAyMpLIyMgm2/r37w9AQkKCY9uIESMAGDNmDACjR48+61hdunRp8rNSqWTAgAGO+k27Hj160KNHj7P2F5xHPBoLgtDpiUQoCEKnJx6NBaeSZRmz2SzWej4H0d+07YhEKDhVYWEh77//fpsvgGTvanOhY3/tY5tbU35+Ptdff32rnkM4TSRCwWk8PT357LPP2nxaMqvVynfffUdBQQGPPvroee+fmprK22+/zZQpU5gwYQI6na4VojzN398fhULUYLU2kQgFp1EqlWe12LY2WZY5ePAgmZmZPPfcc8TExJz3MeLi4ujWrRsffPABy5Yt48477yQgIKAVohXaiviqEToV+6QH48ePP6u7S3NJkkSvXr14/vnnUalUPP744xw6dEgMnWzHRCIUOg2r1co333yDWq1m+vTpTRZ8Ol+SJBEYGMiDDz7ItGnTeOmll/jhhx+ora1twYiFtiISodApyLJMSkoKq1ev5u9//zt6vb5FGjs0Gg1XXHEFTz75JKtWreL111+nqqpKzDPYzohEKHQK5eXlfPrpp9xwww1ERES06LHtky28/PLLeHp68tBDD3HgwAGxJEM7IhKh0CksWbIEHx8fkpKSWqUVVpIk/P39eeCBB5g6dSpvvvkm33zzjegL2E6IVmOhQ5Nlmd27d7Nu3TrefPNNPDw8WvV8SqWSK664gtjYWN577z2OHz/OHXfcQVBQkFivxIWJEqHQYcmyTFFREZ988gl33HGHY0W61qZUKklMTOTNN99Eq9Xy5JNPsn37dtGq7MJEiVDosGw2G99//z1hYWEMHz68TUtkkiSh1+u599572bRpE++99x4FBQVMnToVrVYrSocuRpQIhQ5JlmX27t3L9u3buf3221t19Mef0el0XH755bz00kts3LiRl156iYKCAtGq7GJEIhQ6pJMnT/L+++/zt7/9jYiICKeWwBQKBV27duWFF17Az8+Pp59+muTkZPGo7EJEIhQ6HJPJxNdff01CQgJDhw51djgOvr6+zJ07lxtuuIG3336bJUuWUFdXJ0qHLkAkQqHDSU5OJi0tjZtuugm1Wu3scJrQaDSMGzeO1157jY0bN/LCCy+Ql5cnkqGTiUQodBiyLFNcXMyHH37IrbfeSmhoqMs1SthXy4uMjOS5554jIiKCJ598kv3792O1WkVCdBKRCIUOo6GhgQ8++ICRI0e2eSvx+ZIkCT8/P+666y7uuusuXn31Vb744gsxVtlJRCIUOgRZltm2bRsFBQVcc801Lp0E7SRJQqlUMnLkSJ577jnS09P517/+RX5+vigZtjGRCIUO4dSpU3z++efceuutBAQEtItEaKdQKIiPj+fZZ58lNjaWf/zjH+zatUuMVW5DokO10O7V1dXx9ttvc+mllzJ06NB2lQTtJEnCw8ODW2+9ldjYWD744ANGjBjBddddh5eXl7PD6/BEiVBodxoaGigpKcFqtWKz2Vi7di1VVVXt5pH4z6hUKi699FKefvppMjIyePbZZzl16pToc9jKRCIU2p3t27czceJEvv76aw4dOsS3337LHXfcgbe3t7NDaxGSJBEbG8sLL7xA7969eeihh1izZk2TR+W6ujqKi4tFgmwh4tFYaFfsjSKpqak8/PDD9OjRg8svv5yePXs6O7QWJUkSOp2OOXPmEBYWxsKFCzl+/Dhz5sxBrVazatUqlixZwvvvv99hvgCcSSRCoV2pqanh0KFDmM1mSkpK2Lx5M0VFRURERDBt2rRWn2arrSmVSsaNG0ffvn15/fXXeeKJJ5g8eTKPPPIIJ0+epFu3bjzxxBNoNBpnh9quXfSjsSRJSkmS9kuStOy3n30lSVojSdKx3/72OeO1T0iSlClJUoYkSeMv9txC53P8+HGOHj3q+Nlms3H06FEeeeQRDh8+7MTIWo9CoSAwMJB58+YREBDAvffeS15eHo2NjXzyySds2LBBdLe5SC1RR3g/kHbGz/8A1smyHAes++1nJElKBK4HegATgA8kSbrw1XOETkeWZXJycpokQkmSiIuL48UXX6RHjx5OjK71KZVKGhoayM7Oxmq1AlBUVMTbb7/NiRMnRDK8CBeVCCVJCgcmA5+dsflK4H+//ft/wLQztn8ry3KjLMs5QCYw+GLOL3QuFouFPXv20NjYCIBWq2XSpEl899133Hrrrbi7uzs5wta1du1a3n33XUwmk2ObzWZj1apVvP/++6Lh5CJcbInwbeAx4Mx3IEiW5UKA3/4O/G17GJB/xutO/LZNEJrFbDY7HgN9fHy47777+PDDD+ndu7djDG9HFhkZyQMPPEC/fv1wc3NzTCghyzL/+9//2Lx5sygVXqALbiyRJOkKoFiW5b2SJI1pzi7n2HbOd02SpDuBO+H0my+0PpvNRk1NTZPShqspKipi//79hIeHM2/ePCZMmIBaraa0tLRNzq/X6897GdCWvK6hoaHMnTuXm2++mYyMDH799VfWrVtHSUkJpaWlzJs3j7fffpvw8PCLPldbkSQJNzc3pzdyXUyr8QhgqiRJkwA3wFOSpIXAKUmSQmRZLpQkKQQo/u31J4Az11EMB06e68CyLH8CfAIwcOBA8RXXBmpra/nwww85evToRS183poKCgpQq9XExcWxfft2duzY0Wbnbmxs5LLLLuOmm246r/1qa2t57733yMnJaZXV83r16kVOTg6SJJGSksKDDz5IbGxsuykdm81mLrnkEm699VanxnHBiVCW5SeAJwB+KxE+IsvyLEmSXgfmAK/89vfPv+2yFPhakqQ3gVAgDth1wZELLcpkMpGTk8Pw4cNbfN3fllJcXMw999yDStX2vb62bt1KTk7Oee9nMpk4ePAg06dPb9WhcrIsY7PZsNlsqFSqdpMId+/eTXp6urPDaJV+hK8AiyRJug3IA64BkGX5sCRJi4AjgAW4R5ZlayucX7hAKpWKwMBAwsJcs+rWmfML+vr6XvAUWZIkERwcjK+vbwtHdTZ7HWF7SYTZ2dlUV1c7O4yWSYSyLG8ENv727zJg7B+87kXgxZY4p9Dx2Ww2rFaro4TTEh9uWZaxWq2OKbA6muZcI/sEsEqlssUSpizLWCwWlEplq1QBtDYxskRollOnTpGRkYFSqSQsLIyIiIhmJRKbzUZubi75+floNBqioqIIDg5u1gcwLS2NnTt3csMNN+Dm5tYSvwZWq5VFixYRHx9P3759yc/P5+TJk0RERBAeHt7mJanc3Fzy8vJQq9V06dKFkJCQZiezjIwMSktL0Wq1xMbG4uvr26x9N27cSE1NDVOnTm2x37euro7vvvuOsWPHEhYWRm5uLmVlZURHRxMYGOjyJdT2l7oFp9i9ezcrV64kIyOD+fPnU1paiizLf/gH/r/09fPPP7N3714OHTrE119/jdFo/NN97X/sJUL7sc51/PP9AzhmramoqGDhwoUcOHCAhQsXUlxcfO5fvhWtWLGCHTt2kJqayscff4zJZGrWdTWbzXz++edkZWWxZ88eFi9e3Oxr0hrX1V4itHd6//bbbx1xtYdZt0WJUGi2/v37M2nSJN544w1KS0upr6/n22+/paysjKSkJHr37s3q1au58cYb0Wq17Nixg/LycgDi4+PRaDQkJyeTn5/PJ598gsViYeDAgUybNg2VSsXq1avZvHkzWq2WO++8Ezj9oczMzGTHjh2MHz+eH374gTvvvBO9Xk9qaipZWVnExMTwxRdfoFAoGDVqFBMmTGDNmjWkpKRQUlLCyJEjSUpKYsGCBRQUFGAwGOjevTvHjx/Hw8ODm2++mQULFnDs2DGCgoLa/LqOHj2axMREnnzySWprayksLOS7776jsbGRq666Cjc3N/Ly8rjiiitQKpX8+uuvuLm5IUkSvXr1Ij8/n6ysLAAWL17M5s2bMRgMXHfddcTExPDzzz+zZ88efH19uf/++4HTJfU9e/Zw5MgRRowYwfbt25k5cyYajYatW7fS0NCAWq3mxx9/xM3NjQkTJjB69GgsFgtPPPEEISEh5OfnM2vWLNavX09FRQXu7u7IskxaWhrdu3dn7NixfP755xQXF2MwGNr8up4PkQiFZtuxYwenTp3CarXi6+vL2rVrSUxMpE+fPixYsIC4uDgsFgvHjh2jurqaI0eOMGjQII4dO8a2bdvQ6XR4eHjg7+/PhAkTyMzMZOfOnfTr1w+1Ws3hw4d57LHHHDNMHzx4kOzsbNLS0rj33nsJCgpCp9ORkZFBZWUlVVVVBAQEEBkZybhx48jIyGDjxo0MHjyYmpoafHx8eOSRR3Bzc2Pv3r1YrVYef/xx3nvvPQCqq6tRqVTs3r0bd3d36uvrnXJdf/31V5KTkwkMDESj0fDrr79y5ZVXotVq+eGHH7j++uspLy8nPT0di8XCwYMHmTFjBrIss3r1ahobG+nSpQsAQ4YMwWq1cuTIEVasWMGIESOoqKjgxRdfbDLyJiUlhZSUFO655x7UajUmk4mjR49SU1PD4cOHufTSS/H29qayspK0tDR+/fVXxzow5eXlzJ07l5iYGFauXElYWBgzZ87ks88+w2azUVdXh1qtZv/+/ahUKpfum2onEqHQbOHh4YwYMYKgoCB8fX0xm80EBwfj7e2NSqVCq9Wi1+tZuXIlarWa2tpaRwnr+uuvp2vXrjz//PNs27aNjIwMBg8ezKlTp7BYLNhsNjw9PfH09GxyToVCQUBAACdPniQyMpLg4GB+/fVXANzd3YmPj3esVdKrVy/Ky8ux2WwolUoSExPRarXA6f58/v7+eHp6EhISAoCHhwcmk4mQkBCOHj3aYvWQ5ys2NpbevXs76k6tVivBwcGo1WrMZjN+fn7U19fz66+/4uXl5fgCkCSJu+66C6VSyZNPPsm0adP47rvv6NOnD7169SIjI4Pa2lpCQkLO+t1UKhVubm6UlpYSGxvrSMAajYaKigp8fHxYvnw57u7u9OrVi+TkZGRZRpIkDAYDcXFxyLKM0WgkICAALy8v/Pz8HNOHGY1GAgMDsVqtLrek6rmIOkKh2SIiIujTpw+hoaGOho+tW7eydOlS9Ho9vr6+GAwGysrKiI2NxWQyodPpgNOlyRUrVqBSqdDr9RiNRoqKihyPzkFBQRiNRn7++Wc2bdpEVVUVANHR0dx4441s376d3NxcAgMDKSwsZOjQoeTn5+Pt7Y0kSdTX11NYWOjYz97KbK+k79q1K1lZWfz8889kZGQAEBUVRW1tLTt27KC4uJi4uLi2vqTA6UTYq1cvAgMD0ev1REVF8csvv/DTTz8RHR2Nt7c3SqXSkVT8/f1RKpXIssyGDRtYvny5ozSpUCgoLy+noKAAs9lMfHw8x44dY9myZWzdutUxTrtnz55MmTKFX3/9lbq6Ojw8PKioqKBLly7IsoxWq0WSJKqqqjh58mST0rL9mkqSRFRUFPv27WPFihXk5+ejUCjo3r07WVlZ7N69G5VKRUBAgFOu6/kQJUKhWfr27YvFYmnyIRg6dCgeHh5UVVUxduxY3N3dGTx4MHFxcfj5+REdHY1er2f8+PFkZmai0Wi49dZbCQ4ORqPRYDabue222wgPD8fb25trr72W9PR0zGYzsiwTHh7O6NGjCQ0N5corr0Sv1xMfH8/tt99OREQE3t7e+Pn5MXz4cPR6PUqlkl69euHj48PgwYOblIJCQkKYPn06xcXF3HzzzYSFheHj48N1113n6EgeHBzc5tc1KSkJPz8/x88KhYJJkyaxf/9+LBYLvXv3RqlUcvnllyPLMgqFApvNhkaj4brrrqOwsBBPT09Gjx6Nu7s7M2bMICMjg9jYWLRaLV27duWaa67h2LFjjuvao0cPzGYzoaGhWCwWVCoVQ4cOJTExEW9vb+Li4vDw8GDChAkcPHgQDw8PBg8ejFqtRpIkZs2a5bgHevbsiSzLVFdXM2fOHPz9/dFqtUyZMoWioiISExPPKuW7IpEIhWY51/hVvV7P4MFNJxAKCQlxPHraH4sTExNJTExs8rpRo0addbyIiIizRrX4+JyezjI+Pt6xzd4x2f5/Go2GSy+9tMl+v5+JRqVS0adPn7POGR8f3+TYba179+5nbTMYDFxyySVNtnXt2vWs1w0aNOisbVFRUURFRTXZFhsbS2xsrOPn0NBQx7/t18Tb29vRkd7+/gUHB5/zy+HM99zNze2ccfTp0+ec19tViUdjoc205swoYtYV4WKIRCi0mdbsVOvqHXYF1yYSodDq2rK0JkqGwoUQdYSCQ3V1NV999ZVLrIpmHwGhUChcorSXm5t7Vn1oc1ksFubPn++07jlnso8CcZWJbE+cOHFW/bEziEQoAODp6cl9993XZpOc/pX6+nrefvttHn74YUdfQGeSZZno6Ojz3s/T05PHHnuMsrKyVojq/JWWlrJkyRJmzpzpMq259s7gziQSoQCcbnk9V+ufs1RVVTF//nwuu+wyp89efDE0Gs0FlyRbQ15eHlu3biUpKald9O9rK6KOUBCETk8kQkEQOj2RCAVB6PREIhQEodMTiVAQhE5PJEJBEDo9kQgFQej0RCIUBKHTE4lQEIROTyRCQRA6PZEIBUHo9EQiFASh0xOJUBCETk8kQkEQOj2RCAVB6PREIhQEodMTiVAQhE5PzFAtuAybzUZtbS02m42amhosFgvV1dVYLBY0Gg06nc4l1tlob+wLsNv/NplMVFdXo1arUavV6PX6Tn9dRSIUXIbZbOaZZ54hPz8fq9XKkSNHuPvuu1EoFFx++eXcdtttqNVqZ4fZ7lgsFp555hlOnDiB0Wjk2LFjPPDAA6jVaiZOnMicOXPQaDTODtOpRCIUXIZGo8FsNrNs2TJsNhsAy5Ytw93dnYkTJ6JSidv1QkiShEKhaHJdc3Jy0Gq1TJ48WVxXRB2h4EIkSeLGG29EoVBgtVodf4KDg5kwYUKnf3y7UEqlkiuuuAI3N7cm1zUxMZERI0agUIg0IK6A4FK6detG7969HUlPkiT69+9PeHi4kyNrvyRJIi4ujgEDBjTZlpCQQNeuXZ0YmesQiVBwKV5eXkyePNmRCFUqFbNnzxalwYsUFhbG0KFDHY/Ber2ea665ptPXDdqJRCi4FI1Gw7BhwwgNDUWSJGJiYujXr5+zw2r3VCoV48ePx8/PD4CAgABGjx7t5Khch0iEgssZMGAAsbGxSJLExIkT8fb2dnZIHcKQIUMICwtDqVRy5ZVX4u7u7uyQXIZoLhIcZFl2dggA+Pr6ctlll3HkyBFGjRqFVqt1mdiA835Md5XY3dzcuP7668nMzOSKK65AoVC4TGzOrvoQiVAAoKamhk8//ZSUlBSn35QA+fn5mM1mPv/8c37++Wdnh4Msy0ycOJHrrrvuvParqanhk08+ITU11SWua0FBAbIs8+6777Jw4UJnhwPAuHHjuPHGG50ag0iEAgCNjY2kp6czaNAgwsLCnB0ONpuNjIwMEhISnB0KAMnJyaSnp5/3fo2NjezcuZPJkyfj6enZCpGdH6PRyIQJE4iOjnaJxLxv3z4OHDggEqHgOtRqNZGRka3WpUKW5WZ/+GRZplu3buf1+tb8YOfm5lJbW3tB+yoUCqKjo/H19W3hqM6fLMv07NnTJZIgQFFRETk5Oc4OQzSWCBfParVSV1fnGLXwR9vO58MnSRIWi4X6+vpm1WOdT8JsaGjAZDI1OxZnsVgs1NXVNfn9z7XtfEiShMlkwmg0tmj9oM1mo76+HovF0mLHbEsiEQrNlp+fz+bNmzGbzU22l5SU8N///peqqirHtqKiIj7//HNqamo4dOgQP/zwAytWrCAnJ6dJcvwz6enpfPXVVzQ2NjY7xr/6cFutVr7//ntSUlKwWq2kpaWxfPlyjh492uy4WlpGRga7d+8+K4nk5uby+eefY7VaHduysrL473//i8ViYefOnfz444/8+uuvFBUVNTuxbd68meXLl7doIqyrq2PhwoXk5+djMpnYv38/K1asIC8vz2UaZP6MSIRCs8iyzMqVK/n55585ceKEY5vFYmnyQT1zmyzLWK1W1qxZQ3l5OdXV1SxatAij0YjZbMZsNjteJ8syNpvNsf3MD4/9OGf+DadLIVar9az97MeyWCxNzmG1WrFYLI79y8rKWLx4MRUVFSxatIiioqI2vKL//7t99913rFixgvLy8nNeQ/vveuY2s9nM4sWLgdNfOosWLQJOJ3qz2ez4Pe2/95nbzjz3H11X+7W1X1ebzeY4nsVicVzb38cFkJmZyfLlyykuLuaHH36gurq6za7nhRJ1hEKzVFdXU1xczMiRIzl8+DBdunTh0KFDLF682DFNFsCBAwdYsmQJVqsVrVbr2N/DwwONRoPNZuPkyZN89tlnmM1mEhMTmTFjBmq1mqVLl7Jv3z70ej233norcPrDeujQIfbt28fEiRP59ttvufvuu/Hw8GD//v1kZWWRkJDAl19+CcDAgQOZMmUKK1euZP/+/TQ0NDB8+HAuueQS5s+fT0VFBRqNhoSEBHJzc/Hx8eGaa65hwYIFZGZmEhoa2qbXtbS0lMbGRrp3705eXh7+/v5s376dZcuWoVAocHd3x2azsXXrVlauXIlSqWzS6GIwGKitrXUkoZ9++oktW7bg5ubGtGnTiI+P5/vvvyc9PR0/Pz/+/ve/A6eT3ZYtW8jJyWHkyJFs3LiR2bNno9Vq2bBhA2azGZ1Ox88//4xarWb06NGMHz8ei8XC/fffT1RUFMXFxcyYMYO1a9dSX1/veH/T09NJTExk7NixfPbZZ5SUlODl5dWm1/V8iUQoNEtOTg5BQUFER0eza9cu6urq2LNnD2PGjCEoKIjvv/8ek8nErl27GD9+PAaDoUm3l5ycHHQ6HSqVCm9vb4YNG0ZaWhq7du1iyJAhqNVqjh8/zmOPPYafnx+SJHHw4EHS09M5dOgQjzzyCMHBwXh7e5OWlsapU6cwm82EhYUREhLCoEGDSEtLY/PmzYwaNYrGxka6dOni+HDv3r0bnU7HXXfdxfvvvw/gqGtbv349bm5uGI3GNr+uqamp9OrVCz8/P/Ly8oiPj2fbtm3Mnj2b0tJStm7dSn19Pdu2beP222/nxIkT7Nq1y7H/0aNHqaurw8PDA4DevXtTUlJCRkYGa9asoaGhAYvFwr/+9S/0er1jv127dqHVarnvvvscky4cOXKEiooKMjIyGDduHHq9nj59+pCWlsbatWtJSkpCkiQaGxu57rrriIyMZPny5cTFxTFy5Ejmz5+PLMsYjUaUSiWbN29GqVSeVZXiisSjsfCXbDYbmZmZrFq1infeeYddu3ZRWVmJ1WrF09MTDw8PR8lFlmU8PT0xGAzodDrHMSZPnswdd9xBcXExmzdvJjs7mwkTJhAREeF4BNPr9U0+rACenp5ER0eTkZGBJEmEhoayYsUK0tPTycnJwc/Pjy1btlBfX8+ECRPw9/fHZrOhVCqJi4tzlFSNRqMjJvtIFU9PT8xmMz169KC2tvasc7c2s9nMoUOHWLp0KQsWLGDTpk0YjUYUCgWenp54eXmh1Woxm80oFAoMBgNeXl6O30mSJK677jruvPNOUlNTKS8vZ9GiRfTq1YuxY8ciSRJGoxFfX9+zxhT7+vri4+PD8ePHcXd3R6vVsnLlStLS0iguLsbLy4vly5fj6+vL+PHj0ev1jlKnh4cHUVFRSJKE2WzG09MTd3d3DAaDoxRrtVrp3r17k6cFVyZKhMJfMhqN5OfnM2/ePOLj43n//fcpLi6ma9eurF+/Hr1eT2lpKVqtlqioKFavXo1Go6GiosJxjNWrV+Pt7Y1er8ff35+UlBT27dtHWVkZACEhIdhsNr7++msCAgIYOXIkcHqygEmTJvH1118TERGBv78/FRUVXHrppSxbtgwfHx90Oh3p6ek0NjZSVVXVpCuN/e+YmBjWrFnD119/TU5ODkOGDCEqKgqj0cjPP/9MbW0t3bp1a9PrWl5ejtFo5M0338Tb25vnn3+ehoYGIiMjWbJkCSaTibq6Ojw9PYmIiGDx4sXU19c7SliyLPPjjz9isViIiIjAzc0Nd3d3Dh06RFVVFWazmYSEBL777ju+/vpr/Pz8SEpKAk7P8tOrVy9+/PFHIiMj8fDwoKGhgV69elFWVoabmxt6vZ7s7GwkSaK+vt4R95nXNiYmhl9++YWCggIKCwuRJInExES+/fZbSkpK0Ol0BAYGtul1vRAiEQp/SaPRMHXqVEJCQlCr1cyYMQM3NzcSEhIIDAzEYrFgMBgwGAxccsklhIaGYrPZ8PDwwGAwMG3aNAoKClCr1UyYMAE/Pz+8vLywWCyMHj2agIAADAYDM2fO5Pjx4wCOpOrt7U1QUBDXXXcder2e0NBQ/v73vxMUFERISAi+vr6MGDGCwMBAlEolSUlJBAQEOIbm2QUFBTF79mwqKiq45JJLHOe86aabKCwsJCwsjICAgDa9rp6enlx77bX4+fmhVqu57bbb8Pb2ZtKkSWRmZqJUKvHw8ECtVjN16lQyMzNRq9WOEtztt9/u+ALq0qULer2e66+/nuPHj+Pp6YlGoyEyMpIbb7yREydOoNFoUCqV9OvXD4vFgr+/P9OnT8fd3Z1Ro0bRv39/DAYDvXr1wsPDg0mTJpGVlYVOp0Or1aLRaJAkiblz5wI4kp6bmxt1dXWMHTuWoKAgNBoNM2fOpLy8nKioKMdjuysTiVD4SyqVipiYGMfPZ84N2KtXryavVavV9O7du8m26OhooqOjm2zr06fPWecJDAw8q/RgMBgAiIyMdGyzNxbY/0+tVtO/f/8m+4WEhDT5WalUEhsbe9Y5IyMjmxy7Lel0uibX5cxr/PsZdzw9Pc/6Hbt3737WMYODgwkODm6yLTw8vMl75u/v7/i3/Zp4eHg4vgjsM9T4+vqesxN4YmKi498ajeaccZzrWrsyUUcoCEKnJxKhILRz7aHDsqsTiVAQ2jlXGTfcnok6QsHBaDSyfv16UlNTnR0KcHqUhFKpdHYYAKSlpREXF3dB+9pH5bjCRKj2UTcKhcIlEmhmZiZBQUHODkMkQuE0d3d3rrzySnJzc50dCgAmk4lffvmFqVOnusRaxkOGDDmrsaI53N3dueGGGxyt4c5WXV3Njh07GDNmTJv3mzyX/v3707dvX2eHIRKhcJpOp2Pq1KnODsOhqqqK9evXc9ddd7WL7hd/RKfTceWVVzo7DIe8vDzy8/O57bbb2ry7kCsTdYSCIHR6IhEKgtDpiUQoCEKnJxKhIAidnkiEgiB0eiIRCoLQ6YlEKAhCpycSoSAInZ5IhIIgdHoiEQqC0OmJRCgIQqcnEqEgCJ2eSISCIHR6IhEKgtDpiUQoCEKnJxKhIAidnkiEgiB0emKGakHo4GRZxmQyIcsyjY2NWK1WGhsbMRqNKJVKVCqVS6xf4kwiEQpCB2e1Wnn99dcpLi6mpqaG3bt389xzz6HT6Rg5ciRXXXWVS6wL40wiEQpCJ5Cfn8/8+fOxWq0AHD58GI1GQ0JCgsusFOhMoo5QEDo4pVLJ1KlT0Wg0TbZ3796d4cOHd/rHYhCJUBA6PEmSiI+Pp0+fPk22x8fHEx8fLxIhIhEKQqfQpUsXhg0bhkJx+iOv1+uZMWMGWq3WyZG5BpEIBaETUKvVJCUl4evrC4CnpydJSUmiNPgbkQgFoZMYM2YMwcHBKBQKpkyZgre3t7NDchkiEQpCJ+Hu7s60adPQarVMmTJFtBafQXSfEYRWVF9fz+LFi8nKynJ2KADk5ORgMBhYsWIFu3fvdnY4yLLMsGHDmDRpklPjEIlQEFpRfX09y5cvp1+/fri7uzs7HDw8PPD39ycqKsrRcOJM6enpbNy4USRCQejoFAoFI0aMcDRUOJMsy1itVlQq1/joq9VqcnJynB2GqCMUBFfU2NhIWVlZs14ry3KzjytJEkajkaqqqvPa769YrVbKy8sxmUwtdsy25BpfC4LQCTU2NrJ9+3by8vKIjIxk0KBBeHh4AFBdXc2xY8cYPnz4Ofe1WCxs2rSJ3NxcPDw8GD58OOHh4c3qDrNjxw6qq6u56qqrWqz7jL0u9PLLLyc0NJQ9e/aQl5fHoEGDiImJcfluOqJEKAhOYjKZ2LZtGz4+PqSlpfHzzz9jtVoxm824ubnRv39/AGw2G0ajEZvNhizLNDQ0YDabWblyJTExMSgUChYtWgSA0Wikurqa2tparFarY8aZmpoaamtrsdlsjvNbrVYaGhqwWq0YjUZHCdFsNmMymTCZTI5jmc1mZFlGlmXq6uqor6+nuroai8VCfX099fX1jv3T09PZtGkTWq2Wn376iYqKija+sudPlAgFwYlUKhV9+vShX79+vP/++0yfPp309HQWLFhA165dueeee6irq+OTTz7hxhtvBODTTz/lwQcfBKCkpISSkhL0ej0Aa9asYceOHSgUCsaNG0diYiLfffcdhYWF+Pv7c/vttwOnk+DatWspLi5m+PDhrF27lltuuQU3NzfWrVuHLMt4eXmxYsUKAAYMGMCUKVOwWq38/e9/p3v37tTU1DB+/HjWrl2LzWZz/MnMzKRHjx4kJSXx6aefUlZW5hL1o39GJEJBcAE6nY7GxkZkWaZv377odDpWr14NnG7pDQsLIz09HYvFQo8ePRx9AE0mEwqFgsbGRgC6du1KWloamZmZbNy4EYvFglar5emnn0an0znOt3XrVgwGAw899BBwutEiNTWV4uJicnNzmTRpEgqFguDgYNLT09mwYQMTJ05EkiQkSWLWrFkEBwezbNkyevTowciRI5k/f76jBGoymVi5ciUKhQKLxdLGV/P8iUdjQXAyWZYpLCzEYDCcs0uLJEmMGjWKdevWkZWVRUJCgiMhTZgwgZkzZ3LkyBFKS0sd9XTXXnstKpUKi8WCTqc7q/N0VFQUISEhHD58GHd3d7RaLStXriQ/P5+SkhIMBgPLly8nPj6eWbNm4eXl5Xj0dXd3JzQ0FEmSsFqtuLm5oVar0Wq1KBQKPD09sVqtDB482PGY7+pEiVAQnKihoYGvv/4ahULBJZdcglKpZNOmTWzatImsrCwWLFjADTfcQGhoKDabjdLSUkJCQoDTCfS///0vAN26dXP0EVy3bh0mkwmz2UyPHj349ttv+fjjj/H392fq1KnA6UkY+vbty+LFi4mNjcVgMCDLMhEREZSVleHm5oafnx+7d+9Go9FQX1/viNne8CFJEt26dWPJkiVkZWVRVlaGJEkkJiby1VdfkZeXh5+fHwEBAW18Vc+fSISC4CR6vZ4777yTmpoaDAYDAQEBKJVK4uPjCQwMBE4/stoTz/3334/VasXT0xOABx54gJqaGlQqFYGBgbi5uXHjjTc66gxVKhVBQUHccsstlJWVoVKp0Gq1DBkyBJvNhsFgYM6cOXh7e5OUlMTw4cPR6XQMHjwYd3d3Jk+ezKlTp9BoNI59AR599FHg/6f3uu2222hsbMTd3R0fHx9UKhW33347tbW1BAYGukRH8r8iEqEgOIlSqSQsLOys7cHBwQQHB5+13Z4c7cLDw896jY+PDz4+Pk22+fn54efn5/jZnkgBR+lSp9M5thsMBsff9n+fKSIiwvFvtVp9zjjsx20vRB2hIAidnkiEgiB0eiIRCoLQ6Yk6QkFoZTabjf379zuGzwn/7+jRoy6xlKhIhILQinQ6HZdffjnp6enODgXAsa7xsGHDmnSwdhaVSsXIkSOdHYZIhILQmtzd3R3D2lxBXl4eL774IvPmzWsX/fvaiqgjFASh0xOJUBCETk8kQkEQOj2RCAVB6PREIhQEodMTiVAQhE5PJEJBEDo9kQgFQej0RCIUBKHTE4lQEIROTyRCQRA6PZEIBUHo9EQiFASh0xOJUBCETk8kQkEQOj2RCAVB6PREIhQEodMTiVAQhE5PTNUvuIzGxkbefPNNTp06hclkIiMjg8cffxy1Ws2wYcO46qqr0Gg0zg6z3bFYLLz11luUlZVRXV3Nnj17+Ne//oVer2fQoEFMmTKl019XkQgFl6FWqzl8+DDffPMNsiwjyzIffvghOp2OLl26oFKJ2/VCHT58mK+++gqbzYYsyxw4cAC1Ws0bb7yBUql0dnhOJx6NBZehUCi48cYbUSgUyLIMgCzLBAQEMH78eBQKcbteCKVSyeTJk1EoFI5EaLPZiIqKYuDAgSIRIhKh4GJ69+5N165dm2xLSEggLi7OSRG1f5Ik0atXL3r27Nlke/fu3enVq5eTonItIhEKLsXPz4+pU6c6Sn8qlYqbb77ZJRYBb8+ioqIYNmyY47q6ubkxZcoU9Hq9kyNzDSIRCi5Fq9UyZMgQvL29AQgICGDIkCHODaoDcHNzY9SoUY7rqtfrmTBhApIkOTcwFyESoeBSJEli6NChJCQkoFAomDJliliIvIVcfvnlBAYGolAomDhxIkFBQc4OyWWIRCi4nLCwMAYNGoSHhwcjR44Uj28txNvbm3HjxqFSqbjyyitFI8kZRH8EAQCj0ciWLVsoLCx0icclrVaLj48P+fn5LFy40NnhYLPZSExMZODAged1fYxGI5s3b6aoqMglrqterycgIIDMzEyXuK6yLBMfH+/06g+RCAUAamtrWbBgARqNBj8/P2eHg81mY9iwYVRWVlJVVeXscMjIyCAzM5OBAwee1361tbV89NFHhIWFuUTJ1mQyMWbMGMrLy6msrHR2OOTk5JCamioSoeA6DAYD48ePP6v7ijPIskxjYyNubm7ODgWANWvWUFtbe0H7ajQarrnmGnx9fVs4qvNntVoxmUzodDpnhwLAli1byMnJcXYYoo5QcE2SJLlMEuxIlEqlyyRBVyISoXBR6uvrWblyZascu7CwkOTkZMxmc4sd02azsXfvXvLz81vsmK2ltLSUrVu3YrPZHNuKi4vP2na+jh49SmpqqmP0TktobGwkOTmZ8vLyFjtmW7qoR2NJkryBz4CegAzcCmQA3wFRQC5wrSzLFb+9/gngNsAK3CfL8qqLOb/QNsxmM19//TVbtmwhNjaWK664gsTERBQKBVqt9k/rzSwWCwsXLmTLli14eXkxZswYJk2a1Kxxw6WlpRw8eJC+ffu2WIdqm83GkSNHsNls+Pv78/3337Nnzx7HpA5arbZFznM+vvnmGwoLC5k7d26TesSKigr279/P0KFDHR2hKyoqOHDgAAMGDODdd98lOzsbT09PpkyZwqhRo5rVIHP8+HGqq6vp2bNnizXg/F979x0fVZX/f/x1ZtJm0jshPSRAgCSUUESQumLBiig25LfY67qWtbu663d3Lbs2xMaq2FEsFEFAiiKghBIIEEggCaT3ZNKmnt8fSWaJ0kkyk8x5+sjD5GZm7ic3wzvn3HPuuSaTiczMTMLDwxFC8Omnn5KTk8O0adOYOnWq018nfrbVvQKslFJeJYTwAPTAY8APUsp/CiEeAR4B/iKEGATMAgYDfYE1Qoj+UkrrWdagdDEpJSaTiTlz5uDp6clXX31FREQEZrOZLVu2EBISQmhoKBaLhX379hEVFYWvry87duxg4MCB1NTUcO+99+Lp6cnChQsZPnw4W7ZsAVovnxswYAAajYaCggIyMzMRQjBu3Dj7/pubm9m1axdJSUns37+f9PR03NzcqKiooKKiAj8/PzZv3oybmxspKSnEx8dz6NAhysrKKCkpISEhgSFDhrBr1y4KCwupr68H4MiRI+Tl5fHQQw+xcOFCsrOzSUtL69ZjazKZ2LZtG9HR0RQXF5OYmEhVVRW//PILdXV19tZwZWUlv/zyC/X19VgsFqSUVFZW8n//93/k5uayfPlyxo0bx/bt28nJycHPz48xY8bg7+/P/v372bt3LzqdjsmTJ9v3XV1dTV5eHvHx8RQXFzNkyBA0Gg1FRUU0NzdjtVrZuXMnOp2O4cOHExkZic1mY926dbi7u1NaWsr48ePtx7q5uRkpJTt27EBKyV133cXnn39OSkoKkZGR3XpcT9cZd42FEH7AecACACmlSUpZC1wGfND2sA+Ay9s+vwz4TEpplFLmAbnAqDPdv9L9tFotAwYMAKCxsREvLy/8/Pz47rvvgNbzert27WLfvn2Ul5fz888/YzKZkFLyyiuv8MorrxAbG4teryc8PBy9Xs+qVasoKiqivLycL774Ao1GQ9++fe0tM7PZzJdffklZWRkAGzdupLq6GpPJRFZWFrm5uXh6ehIWFgbAihUrqK2tZevWrSxdupTg4GACAwMpKipi6dKleHh4kJubC0BZWRnh4eEEBwcTHR1NRUVFdx9S8vLyiIiIIDIykqKiIsxmM99++y1VVVX2EXOz2czixYupra2lqqoKg8EAtLZun3zySRYsWEBaWhpCCPz8/AgNDaWkpITvv/+enJwcvvnmG/z9/YmIiLDPHWxqamLRokU0NDTQ1NTEzz//jMFgwGw2s2nTJkpKSvD29iYsLIyGhgaWL1+O2WzGYrHw/vvvU1xcTGRkJNnZ2fz8888IITh8+DBSSioqKoiNjSU0NBRvb+8zHmTqTmfTIkwAKoD3hBBpwDbgPiBcSlkCIKUsEUKEtT0+Ethy1PML27b9jhDiVuBWgJiYmLMoUelKAQEBDB06lLVr1wKtq8ekpKSwadMmzGYzffv2xdvbGyEEd955JyEhIcyfP5/8/Hx++OEH6urqqK2tZfLkyQgh7KPWR3dPt2/fjk6n46KLLsLHx4fIyEhWr15NTk4OsbGxjBgxgiNHjrBmzRpqa2tpbm7miiuuQAjB1KlTmTRpEkII1q9fT2JiIueffz5FRUX21zcYDHzzzTdYrdZun+cnpSQjIwOAiooKCgsLGTx4MFVVVdx4441UVVWxatUqDAYDNTU1zJgxg+Li4g7H+4knnqC+vp533nmHiy++mMzMTDIyMqisrKRPnz74+fkxcOBAJk+e3GEC9fr164mMjOTqq6/Gw8MDLy8vvvvuO0pLSzEajYwfP569e/eyZs0aqqur0Wg0WK1WNBoNvr6+zJw5EyEEX331FUOHDmXMmDEUFhbaX7+6uppvv/22U89DdqWzGSxxA4YD86WUw4BGWrvBx3Osd9kxj5KU8m0pZbqUMl1dXuU8LBYL+/fvR0qJt7e3fc1AwP7/pKQkioqK2L17NwkJCfZ/fO7u7vj4+NDc3ExOTg7h4eHcf//99su8dDod1dXV1NTUdHjd1NRULrnkEpYsWYLRaCQ8PJwtW7bQr18/Dh06hI+PD9nZ2QwdOpQ77rjDPkVFq9Xi7u5uDzd/f3/Kysqora21D5SEh4fT1NTEuHHjKC4u7vZL+ZqamigqKsLHxwdvb29ycnIQQiCltHfrGxoa8PT0xGq12rc1NTXZX8PT0xM/Pz/q6+tpaGhgx44d3HzzzVx++eW4u7vj6+tLQUGBvdvaflzHjx9PWloaa9aswcPDAw8PD3bs2EFQUBAGgwE3Nzd27drF9OnTufHGG/Hx8bHv093dHa1WixACvV5PaWkp1dXV9knjYWFhGAwGxo4dS1NTU4fnOquzaREWAoVSyl/avv6S1iAsE0JEtLUGI4Dyox4ffdTzo4Dis9i/0k2EEPbze/369eOqq64iICCAtWvXsmjRIgwGAw8//DCPPfYYgYGBjBo1it27dzNgwACEEAQHB/Paa6/h5+fHueeey+jRo/nvf//L22+/TUxMDF5eXsTGxpKens6///1v3N3dufXWW/Hy8qJv376MGTOGqqoq9u/fT58+fRg4cCDp6enU1NQQHBxMWloaH330EXv27CEqKgo3Nzf8/Pzw9va2/wxJSUls2bKFF154gZiYGHQ6HdHR0fTr148XX3yRsWPH2rv93aW+vp6QkBCuvPJK/Pz8aGpqoqamhgsvvJDPP/8cT09P+vTpg7e3NxdffDGffPIJOp2OiIgIe+A89thj+Pn5MWPGDAIDAznnnHN44403iI6OJjw8nPT0dMrKynjyyScJDAzkvvvus7esR40axaeffkpZWRnR0dGYzWb69++PlBJfX19GjRrF119/TUhICJGRkQghEEIQERFhf1+kp6fz8ccf88477xAVFYWHhwdDhw5l//79zJs3r8dc0yzOpukqhPgJuFlKuV8I8Veg/Z1XddRgSZCU8mEhxGDgE1rPC/YFfgCSTjZYkp6eLtu7D71BQUEBf/rTn1i8eLFTLTRaWVnJ008/7TQTqp1N+4TqJ5544rS60JWVldx9993ceeedTjGh2tm0T6h+/vnnu+T1H3jgAS655BImTJjAyJEjycjIOOYv72xHje8BPm4bMT4E/D9au9uLhBBzgcPATAAp5R4hxCJgL2AB7lIjxoqiOIOzCkIp5U7gWJPIphzn8c8Bz53NPhVFUTqb8/TNFEVRHMS5p3sr3cpkMpGfn9+pl7SdrfZz2I5ewqq4uLjD4MvpsNlsHDx40CHzFJ1dYWGhU0yxUUGoAK3TMFJSUjhw4IB9wrEz2LFjB4GBgcTFxTm0DovFwsiRI0/7eV5eXowdO5bt27c7PMx3796Nt7e3Uw2GWa3WDlcROYoKQgUAHx8fbrnlFiwWi6NL6eDDDz+kvr6eu+66y9GlnNFN0L29vbntttscflxtNhtPPPEEF110kVMEz9Gc4ebyKggVoLXr6YzLM40cOZJ58+bh4+Pj8BbVmXCW41pdXU1jYyPJycn4+vo6uhynowZLFKc2YMAASktLnWKV6p6spqYGnU7nFKHsjFQQKk7N19eXmJgY9u7d6+hSeqz2lWr0er1T3C7AGakgVJzeoEGD2Ldvn6PL6NFKSkoICAhQLcLjUEGoOL3U1FSys7MdPuDQU9lsNvLy8oiJiVG38DwOFYSKUxNCEB8fT11dXY9dBt7RbDYbR44cITY2tkcOOHUHFYSK0/P29sbDw0MF4Rmy2Wz2hVSVY1NBqDg9X19ffHx8KC0tdYqrEHqa8vJyTCYTUVFRji7FaakgVJyel5cXUVFR5ObmqiA8A/v27SMhIcEhN6bqKVQQKk5PCEFSUhI5OTlndRtLV7V7925SUlIcXYZTU0Go9AjtS/NbrWoJy9PRfvvSIUOGOLoUp6aCUOkRoqOj7Sf9lVNXU1NDdXW1Uy204IxUECo9goeHB0OGDKE33bahOxw5coTQ0FA1kfokVBAqPYIQgrS0NHbt2qUGTE6RlJKSkhLCw8PVROqTUEGo9BgjRowgOzu7w+0slRM7fPgw0dHRKghPQgWh0mOEhISg1+s5fPiwo0vpEUwmE0VFRcTExKgrSk5CBaHSY7i7uxMVFUV+fr7qHp+ClpYWDAYDYWFhKghPQgWh0mO4ubmRmJio5hOegNVqxWQyAa1BWF9fT2hoqIOrcn5qhWqlxxBCEBcXx8qVKzEajWptvWOoq6vjnnvuISwsjIiICBobG/H09MRsNqPVahFCqNbhMaggVHoMIQRRUVFUVlbS0tKigvAYfHx8WL9+PaWlpfZL6tasWcOwYcN48sknGT9+vIMrdE6qa6z0KBEREZjNZqqqqhxdilPSaDTExsZis9lobm6mubmZqqoqamtriYyMVK3B41BBqPQo3t7eJCYmsnPnTkeX4pSEEKSmpqLR/O+ftre3N/feey/x8fEOrMy5qSBUehQhBCNGjGDr1q2OLsUpCSFITk62B6EQgvHjx3PVVVd1CEelI3VklB4nLS2Nffv20dLS4uhSnI4QgoSEBPviFOHh4dxzzz14eXk5uDLnpoJQ6XH69u2Lr68vBw4ccHQpTiksLAy9Xo+7uzvXXHMNEydOVOcGT0KNGis9Tnv3Lzs7m5SUFIf8I3fmCd2+vr5ERERgNBq57bbb0Ol0TlWvM4ayCkKlR0pNTWX79u3MmDHDIdfR1tXVOe0CEOXl5ej1es455xzKysooLy93dEkA6HQ6hg8fjpub88WO81WkKKcgKSmJ77//nrq6OoKCgrp9/zt37uT+++8nJSXF6QYhrFYrMTEx2Gw23n//fUeXA0BjYyNFRUUsX76cwMBAR5fzOyoIlR5HCEFAQABCCGpqahwShADp6encfvvtuLu7O2T/x2Oz2WhpacHLy8tpQrqkpIR33nnH0WUclwpCpUcKCAhAp9NRWlpKQkKCw847CSGcJmzaaTQafHx8HF1GB85+aZ9z/QYV5RTpdDpiYmLYv3+/o0s5JiklNpsNm82GlPK0ziUe/fj2z8/ktRoaGvjuu+9oaGg4o5/heDIzM8nMzDxmbT2VahEqPZIQgkGDBvHdd98hpXSq1oaUkqysLL766ivMZjMXX3wxo0aNOuVBneXLl+Pv78+4ceMAWLlyJQsXLiQsLIxBgwYxa9Ys/P39T/o6JpOJPXv2MHr06E5tIRYWFgIwZMgQFi9ezE8//URUVBR33HEHfn5+nbaf7qSCUOmxBg4cyGuvvYbJZHKqCcMWi4WNGzcyceJE0tLSKC8vx2g0snXrVgoKCoiOjmbgwIGEh4fzyy+/0NjYSG5uLmPGjMFisbBy5Uq8vLwoKSnhD3/4AyaTiSuvvJKJEyfyxhtvUFFRQW1tLatWrcLNzY2RI0eSnJxMcXExP//8M42NjUyYMMF+7tRms7FhwwaSkpLIyspi4sSJeHh4UFtbS05ODrGxsSxduhSbzUZqairDhw9Hq9WSkZFBXV0dBw8eZOTIkfaAb2lpITo6moaGBrZs2cKjjz7Kt99+y5YtWzj//PMdfPTPjOoaKz1WREQEer2enJwcR5fSgVarJT4+nl9++YXdu3cTEhJCQUEBGzduJDY2lm+//ZbS0lKklGzcuJHdu3dz7rnnEh0dTVxcHMnJyaSmpjJy5Ej7TZc++eQTHn/8cTw9PQkJCSEgIIC0tDRCQ0NZuXIlhYWFfPbZZ+j1esaNG0dYWBjQ2ir84osvqKysJCAggL1793L48GHq6+spLCwkOzsbT09Phg4dSkJCAhs2bODQoUNIKdm0aROZmZmce+65BAQE8Pnnn5OYmGhfIby8vBwfHx/8/PwYMGCAvaXYE6kWodJjabVa0tPT2bp1q1PdwFwIwbRp0xg+fDibN2/mnXfeYfDgwcTHxzNmzBj27dtnf6xGo2H69OkkJibau/cREREEBAQQFxdnf9y1117LBRdcwFtvvUVeXh5ms5nly5djMpmoqamhvr6ehoYGJk2ahI+PD0IIqqurKSgoID8/n3vuuQedTsfgwYNZvXo1BQUFjB8/nsDAQKqrq1myZAlSSkpLS6mvr7fXdtFFF5GcnMy+ffsIDw/n3HPPpaysDGg9/s3Nzfz88880NTU53aDR6ei5lSsKMHz4cHbu3OlUK1ZbrVZ27dqFwWAgOTmZ+vp6vL29KSkpoaysjPz8fPtjtVotGo2mwzlODw8PqqqqsFgs9gGI9gEJk8mExWIhLy+P5ORkrr32Wry9ve3nHw8dOoTZbLYfj7i4OG655RaWLl1KWVkZCQkJbNu2jdTUVLZs2UJoaChFRUUEBQUxd+5cAgIC7PvUarX2xVyDgoIoKyujtLTUPkAVFhaGzWYjPj6evLy8Hr26jQpCpUcbPHgw5eXlVFRUOLoUO61Wi6+vL9988w0LFy7k/PPPZ/jw4YSGhvL22293GOiIior63T2HR44cSU5ODi+//DIVFRWEhISwYcMGnnvuORISEkhOTiY9PZ2cnBxWrFjBkCFDCAoKYsaMGSxbtoynnnqKzMxM3NzcSEhIYMCAAUycOJHMzEz8/f0ZPXo048aNIzQ01N4VNxqNvP/++wwZMsQ+4BEZGWlf/DYsLIzx48fz+uuvExoaSnBwMHq9nksvvZQFCxYQEBDAOeec030HuZMJZx/yTk9Pl73ppt4FBQX86U9/YvHixT26K+EsGhoaeOSRR5gzZw7p6endtt/169fz8ccfc8cdd+Dh4XHKzzOZTHzyySekpaUxbNiwLqzQuRQXF/POO+/w9ttvd+uVJQ888ACXXHIJEyZMYOTIkWRkZBxzeoH6l6j0aJ6enkRGRpKXl9cj5rG5u7tzxRVXkJyc7OhSlKOoIFR6NDc3NwYOHMj+/fsxm82OLuekhBD4+/s71XQfRQWh0sMJIYiJiaGkpMR+G0tFOV1q+ozS48XFxVFZWUl9fX23XmNbW1vLwYMHnXJZKWdTUVFBc3Ozo8s4LvUbVHq8wMBAIiMjycrKom/fvt22T61Wy5o1a5zi8j6j0UhxcTFxcXFOUc9vWSwWIiMjnW6lnnYqCJUeT6PRMGbMGDZt2tRtl3gNHjyYN99802kGaJYsWUJ2djYPP/ywo0s5Lq1Wi7e3t6PLOCYVhEqvMHr0aD755BP7Onxdzc3NjYCAgC7fz6nasWMHF110kVMuetoTqMESpVfo27cvXl5e5OXlObqUbldZWUlubi4jR450dCk9lgpCpVfQarUMGTKE3bt3O7qUbrdt2zaSkpJOaWku5dhUECq9ghCCwYMHk52djcVicXQ53cZms5GZmcmIESOccpCkp1BBqPQaiYmJVFRU2FdPcQU1NTXk5+eTlpbm6FJ6NBWESq8ghCAkJASLxUJNTY2jy+k2VVVVQOuiCKpFeOZUECq9RnBwMD4+PuTn5zvNtJauJKVk//79BAcHO+xOfr2FCkKl13B3dyclJYXetFrRyWzevNlpb5rek6ggVHoNIQQpKSns2rXLJVqEJpOJvXv3MnToUNUtPksqCJVeZdCgQTQ0NHDkyBFHl9Lltm3bRlBQELGxsY4upcdTQaj0Kp6enqSmprpE9/jnn39m3LhxaoHfTqCOoNLrjB07ll9//dWp7mPS2QwGA7t27WL8+PGOLqVXUEGo9DppaWkUFxdTWVnp6FK6zMGDB9Hr9YSHhzu6lF5BBaHS63h7e+Pv709hYWGvHDSRUnLo0CEiIyPVStedRAWh0uvo9Xr69u3LwYMHHV1KlzCbzWRmZpKamuq06/v1NCoIlV7Hzc2N/v37c+DAgV553bHJZKKgoKDDTeGVs6OCUOl1hBD079/ffrPz3iY/Px+r1Uq/fv0cXUqvoYJQ6ZXi4+NpaGigvLzc0aWcNSllh3Od69evZ/To0b+7Mbxy5tR1OUqv5OPjw5AhQ9i8eTNxcXGOLuesGI1GXnvtNZqbm5k2bRobNmzg4YcfxmazIYRQ3eNOoFqESq8khGDSpEmsW7cOq9Xq6HLOipubG9nZ2Tz77LOcd955rF69mv/+978sXLiQffv2Obq8XkEFodJrpaamUl5e3uO7x1qtFi8vL6xWKyaTibq6Ot5++21uu+02l7iCpjuoIFR6Lb1eT2JiIrt27XJ0KWdFCIFOp+twKZ1Go+Giiy7ikksucWBlvYcKQqXX0mg0pKSkkJWV1eO7xz4+Ph2CMDExkYcfftip7qTXk6kgVHotIQQDBw6ksLCQxsZGR5dzVvz8/NBqtfbPH3zwQdLT09VASSdRQaj0WkIIoqKiaGxspLa2tkdfbhcQEIBWq0Wj0XD55Zcze/ZsdVVJJ1JBqPRqoaGh+Pj49Pj7Hfv7++Ph4UF6ejoPP/ywWpG6k6mjqfRq7u7uDB8+nJ9//pnzzjvvpI83mUwcPHiQhoaGbqju1BUXF+Pp6cn5559PY2OjQ0eLo6OjCQ8P71XdchWESq8mhCA9PZ1vvvkGq9V60pZUVVUVzz33HDabzamu3KioqCA5OZmioiLeeusth9Vx+PBhZs6cyS233OKwGrqCCkKl10tMTESj0ZCTk0NycvIJH2uz2fDy8mL69OlERER0U4Un19LSgkajwcPDw6F1fPvtt73y+m0VhEqv5+bmxqhRo9i8efNJgxBap93odDq8vb27obpTo9frARzeHfX09HR4DV1BDZYoLmHs2LHs2LGDlpaWs34ti8WC0Wj83Sh0dXU1S5Ys6TBVp7Kykm+//ZbGxkaam5tpbm7GbDaf1gj2oUOHWLVqVacuKWa1WlmzZg05OTlIKTGbzbS0tGCxWHr06PqZUi1CxSUkJCTQ0tJCUVHRWS9f9fXXX7Nx40aefPJJQkJC7NuNRiMFBQUdAqulpYWCggK2bNnCG2+8QXx8PH369OH6668/5a63wWCgsLCwU+/BIqWkqKiIwMBADAYD//3vf8nPz2fAgAHceOON+Pj4dNq+egIVhIpL8PX1JSAggCNHjpCQkHDG3TuTyUR2djYDBw4kJyeH4OBgqqqq+O6776itrbW3OCsqKli+fDkGg4GWlhZsNhtTpkzhxhtv5LXXXqOoqAiAjz/+GIBzzjmHUaNGodVq2b17N+vXr0ej0TBz5kz7vuvq6ti6dSsjRowgIyODadOm4e7uzuHDh8nNzSUzMxOj0UhaWhqTJ0/Gw8ODjIwMjhw5Qm5uLsOHD2fs2LGsW7eO3Nxce8vv8OHDGI1GHnvsMT744ANyc3MZOnToWRztnkd1jRWXoNfrSU5OZseOHWfVsiooKMDf35+UlBRyc3MxmUysWrUKDw8PBgwYQFVVFRaLheXLl+Pr60tCQgLV1dUALF++nAceeACj0UhUVBQBAQFMnTqV1NRU1q1bx+HDhykoKGDp0qVMnDiRa665huDgYKA1BN99912CgoLw8fHh0KFDFBcXU1paSlZWFiaTialTpzJ27Fi2b99OZmYmUkqys7PJysri6quvZtSoUeTl5bF9+3YmTpzI4cOHASgvLycoKAiLxUJ4eDg1NTVnf8B7GNUiVFyCEIKUlBTmz5+PxWKxX652OqSU5OTkkJOTQ2NjI3V1ddTU1GAwGBg7diyBgYHs3bsXk8lEY2Mj55xzDp6enuzfvx+Aiy++mLlz57Jw4UJ27dpF3759+fbbb9Hr9ZSVldHU1ITZbCYsLIzk5OQOI8S5ubl4e3sTGhqKp6cn4eHhfP/99xQXF9OnTx8GDhzIF198QUBAAGVlZdTV1QGtAz8TJkyw3wS+tLSUhIQEBg0axMCBA4HWuZYNDQ1kZmZiNpvP6Nj0dKpFqLiMgQMHUldXR2lp6Rk932QycfjwYcaNG8ekSZPQaDQ0Njai1+vJycnh4MGDlJWV4eHhgZeXFwcOHODgwYNUVFQArecQ6+vraWxsRAhBaWkpYWFhXHjhhfb5jX5+fpSXl1NQUEBDQ4N9sYiUlBRmzJjBsmXLMBgMhISEkJmZSUpKiv28pJubG5dddlmH0W6NRoNWq7WfCggLC6OgoID8/HxycnIAiIyMpL6+nqioKHuwuhoVhIrL8PHxYdSoUaxdu/aMnm+1WunTpw+TJ09m9OjRTJkyhebmZs4//3zKy8vJzMxk5MiR6HQ6LrroIo4cOcLevXsZOXIk4eHhlJSU8OqrrxIZGcno0aMZOnQoFouFVatWMX78ePz9/YmPj2fatGl88cUXzJs3j7KyMgICAhgwYABpaWkkJydTUlJCXFwcU6ZMYcSIEQwfPpzhw4cTERHBokWLGDVqlP3Kj5iYGPuAjhCCpKQk+vfvz5dffsmYMWMIDAwkKiqK0aNH8/XXXzN+/HiXvBeKcPah8vT0dNmbFp8sKCjgT3/6E4sXL+6wrJLSPTZv3sy7777LW2+9dcyrTIqKinjmmWeYMWMGkZGRDqjQuS1evJjQ0FDuuOOOHjGf8IEHHuCSSy5hwoQJjBw5koyMjGMWrf4lKi4lNTWVxsbGHr8Ig9K5VBAqLsXDw4OkpCT27t3r6FIUJ6KCUHEpbm5uDB061D7lRFFATZ9RXFD//v354YcfaGhoICgo6HffN5vNVFZWuuQ0kpOpr6+3z23sTVQQKi6lfdXq5uZmKisrfxeEWq0Ws9nMRx991O0rvZhMJiwWi32BBWdUVVV1SgtX9DQqCBWXExAQYL+ao3///h2+FxYWxttvv93tN3uqq6vjySef5Pzzz2f69Onduu/T5enp6egSOp0KQsXlCCGYMGECL730EjfffHOHLrBGo+n2FpnFYmHRokUEBQVxxRVX4OXl1a37V9RgieKiUlJSsFgsZGdnO7oUsrOzWbt2LXPnzu2Vra2eQAWh4pLc3NwYO3YsP/74o0PX36uqquLFF1/k2muvJTk5uUdMUu6NVBAqLmvs2LHs3r3bITdqklJiMplYuHAh8fHxnH/++SoEHUgFoeKyYmNjkVJy+PBhh7QKf/rpJ3bv3s2tt96q7lHsYCoIFZckhCAgIIDIyMhuv8pESkl1dTULFizguuuu63W3xuyJVBAqLsvd3Z3Ro0ezcePGbp0u09zczCuvvMK5557LxIkT1eIbTkD9BhSXJYRgxIgRHD58mPLy8m7Zp9VqZdmyZZSVlTF79uyT3mdZ6R4qCBWXFhgYyPDhw1m7dm23nCcsKSlhyZIlzJkzx+VukOTMVBAqLk0IwWWXXcbq1atpamrq0n3V19fzj3/8gwsuuIDRo0er84JORAWh4vL69euHm5sb2dnZXdYqtFqtfP3117i7u3PllVeq84JORv02FJfn5eXF8OHD+eWXXzr13sHtpJRkZmayZs0a7rrrLnQ6XafvQzk7KggVl6fVaklPT2fPnj1d0j1ubGzkjTfe4PLLLz+reyorXUcFoaLQeoe7xsZGjhw50qmvazabeffdd4mKiuLiiy9Waxw6KRWEikLrbTTHjRvHkiVLOu01bTYb69atY+fOndxzzz1qVRknpoJQUWgdPT7//PPZsmWL/eboZ6uuro5PP/2UWbNmERgY2CmvqXQNFYSK0iYqKoqEhAQ2btx41qPHzc3NvPTSSwwbNoypU6eqUWInp347itJGCMHkyZPZtGmT/cZOUsrTDkWbzcb3339PWVkZc+bMUVeP9AAqCBXlKEOGDKGiooKdO3fy6aefcvPNN9PY2HjC59hsNrZu3UpNTQ1SSvLy8vj666+5/fbb8fX17abKlbOh/lQpCv9bEWbbtm1kZWUxffp0qqur8ff356WXXjrhcysrK3nggQcIDQ3l7rvv5quvvmLSpEmkpaWpqTI9hApCxeVJKdmzZw9PPfUUmzdvpqyszN4dNpvNJ+watz/38OHDbN68me3bt9O/f3+efvppNVWmB1FdY0UB4uPjGTJkiL17285sNp/weTabjczMTAoLC7FYLOTn57Nx40buueceMjMzHXobAOXUqSBUXJ4QAm9vb+6//36uvPLKDt1Zq9V6wjBrampi586dHS7Na2pq4quvvmL58uUqCHsI1TVWlDYBAQH8/e9/p7KyknXr1mGxWJBSYrFYjvuc2tpa1q1b1yHw4uPjufvuu5kzZ446R9hDqBahorQRQhAfH89zzz3HoEGD7CFmNBqP+5ycnByKioqA1hWvx40bx1tvvcX9999PUFCQCsIeQgWhohylfdXq559/3n41SPucwmP5/vvvAfD19eXmm2/mvffeY8qUKSoAexjVNVackpSS4uJiampqHLL/8PBwbr31VubNm8eePXtobm7+3WMsFgvLli0jJCSEP/7xj1x99dU0Nzd3282g/Pz8iIqKUletdAIVhIpTslqtfPjhh2RkZODn5+eQGkwmE3Fxcbz//vsEBAT87vu1tbWYTCaGDRtGcXExr776arfV1tjYSL9+/XjyySfV+oadQAWh4pSklNTX1zNu3DjGjBnjsDpsNhtWq/WY9x02Go1otVqHXEK3Z88eMjMzu2QhWVekglBxWkIIvLy8TukmR1LKbj8v58ibL+l0OnUeshOpkwtKr9AeCu0tya6Yv2ez2WhoaOj0VlhLSwstLS2d+prK6VFBqPQ4UkoKCgr4+uuv+eGHH6ioqLB/z2w2s3LlyhM+9/Dhw3z11VcsXbqUvXv3nnCe4NHq6uqYP39+pw/grF27ljVr1th/ruXLl7Nt2zbV7e1GKgiVHmn37t1s376d8vJyFi5cSGVlJVJKNBoNV155JdAaejabDZvNhpQSq9WKzWZj165d7Nq1Cykl3377LcXFxZjNZsxms/1KkvbnWiwWzGazPZTaW5rtr9U+6br9e+3P/+3rHf1a7c+xWq0drmW2Wq189NFHVFRU8M0335Cbm+uAI+ua1DlCpUfSarUMHDiQK664gnfffZcDBw7Qv39/3n77bXJzc1mwYAEAq1evxmq1MmXKFBYtWkRqairQeo5Np9NhtVoxmUz8/e9/p76+nsjISG644QZCQkL44YcfWL9+PVJKrr76auLj45FSUlhYyOeff87555/P559/zp133klgYCAFBQWsX7+eSZMm8eabb2IymUhKSuLqq6+msLCQt99+G19fX2JjY7n22mv5+OOPycvLw9PTkzFjxlBeXk5dXR0PP/wwq1atYuvWrfTv39+Rh9llqBah0qO5ubmh0+loamoiJCSE+++/v8Oy+LGxsRQWFlJZWUl1dTURERFIKSkpKSE3NxeNRoOHhwfjxo0jNjaW/fv3s3v3bpqbm9m4cSM333wz//jHPxg+fDgARUVFzJs3j3HjxpGYmEhsbCyZmZksWbKEiooKvLy8CAoKYuzYsfTp04ft27eTl5eHxWIhMDCQRx55hNtuu436+nqKiop44okn6NOnDwAGgwF3d3c2bdqERqOhoaHBIcfUFakgVHo0o9GIwWA45giuEIK+ffvS0tLCzp078fHxITg4GCEE55xzDnPnzsXPz49NmzaxYcMGpkyZQkpKSoeu8G/nD+p0OgYNGkRWVhZms5nk5GRWrVpFVlYWe/bsISgoiAMHDrBv3z6mTZtGQkICFosFIQTR0dH4+fkhhKC5uRlvb2/0ej1BQUH2fbW0tJCUlERjYyP+/v5dfvx6s9MZMFNBqPRIVquVjIwMPv74Y9zd3RkwYABlZWUsWrSIw4cP88UXX1BSUoKvry+RkZEsXbqUsWPHotFokFKydetWPv30U6qqqoiKisJsNrNt2zYKCwuRUuLl5UVcXByffPIJX375JQUFBQAEBwcza9YsKisr2bx5MxEREVRXVzNhwgQyMjKIjo7G09MTg8HAtm3bKC8vt9cshLCPbkdHR2M0Glm4cCGZmZn2146IiGDRokXs3LnTofMne4P2OaAeHh4nfaw6R6j0SCNHjiQwMBCdTkdCQgL+/v7U19fTr18/7rvvPrRaLV5eXvb7kCQkJJCYmAjA6NGjCQwMRKvVMn78eKKioggKCqKhoYHx48fj5+eHh4cHM2bMIC8vD6PRiF6vx8fHh6uuuoqgoCCuvfZaWlpaCA4O5s9//jORkZEEBgYSFxeHVqtl5syZmEwmzjvvPIKDg9FoNISEhNjr9/Ly4pZbbqG4uJjx48cTEBCAVqtl9uzZ5OXlERgYSExMjKMOb69gs9kwm814enqe9LEqCJUeRwhBeHg44eHhHbb7+/szbty43z0+MDCww3nDsLAwwsLCOjxm0KBBv3teQEAAw4YN67AtISEBgNDQUPu29gGNo1+jfVDmaEdfKiiEICoqiqioqA6POVZtyplpH5U/lftJq66xoii9Uvs0JXd395NehXNWQSiEuF8IsUcIkSWE+FQI4SWECBJCrBZC5LT9P/Coxz8qhMgVQuwXQkw7m30riqKcSHNzM1qt9pjXif/WGQehECISuBdIl1IOAbTALOAR4AcpZRLwQ9vXCCEGtX1/MHAB8IYQQt3dRlGULtHS0nLKQXi25wjdAJ0QwgzogWLgUWBi2/c/ANYDfwEuAz6TUhqBPCFELjAK2HyWNSi9lNFo5N133+WLL75wdClA69JX3t7eji4DaK1lyJAhauGFE+iWIJRSFgkhXgQOA83AKinlKiFEuJSypO0xJUKI9jO/kcCWo16isG2bovyOm5sbjz32GPfee6+jSwFa1yacM2cOr7/+utOEoV6vP6WBAFdVX1+Pm5vbKR2jMw7CtnN/lwHxQC3whRDihhM95RjbjjnjUQhxK3AroKYQuCghBEFBQfbJxo7W0tKCl5cXUVFR+Pr6Oroc5SSklNTW1qLT6bp81HgqkCelrJBSmoGvgLFAmRAiAqDt/+0zSguB6KOeH0VrV/p3pJRvSynTpZTpR09TUBRFORXtK/lERESc0oTqswnCw8AYIYRetJ6omALsA5YAN7U95ibg27bPlwCzhBCeQoh4IAn49Sz2ryiKckztQRgTE3NK51HP5hzhL0KIL4HtgAXYAbwN+ACLhBBzaQ3LmW2P3yOEWATsbXv8XVJK65nuX1EU5XhsNht5eXlcddVVp/T4sxo1llI+DTz9m81GWluHx3r8c8BzZ7NPRVGUk6murqa2ttZ+JdDJqCtLFEXpdbZv305SUhJ6vf6UHq+CUFGUXsVms7Fu3TomT558yvMsVRAqitKrVFdXk5eXx4gRI075OSoIFUXpNaSUZGdnEx4e/rtFdU9EBaGiKL2G1Wpl+/btDB069JQurWunglBRlF6jqamJAwcOkJKSgkZz6vGmglBRlF6joKCAxsZGBgwYcFoLUqggVBSlV5BSsnTpUs4999zTvvGVCkJFUXqF0tJSfvnlFy677LLTXp5MBaGiKD2ezWZj5cqVjBw5ssNNsk6VCkJFUXo0KSVVVVVs2bKFCy+88IxeQwWhoig93o4dO/D19WXAgAFn9HwVhIqi9GhWq5VvvvmGSZMm4e3tfUa3L1D3NVaU47DZbFRUVNDY2IjJZMJoNJKfn4+3tzdeXl6Eh4ej1ar7jzmSlJI1a9ZgNpuZMmXKGd/DRQWhohyHzWZj3rx5LF26FKvVSmFhIddffz0ajYZzzz2X//znPyoIHay6uppFixZx2223ndX9W1QQKspxuLm5kZSURG5uLg0NDQDU1NTg4eHB7NmzT+sSLqXz2Ww21q5dS1BQEKmpqWf1WuocoaKcwIUXXvi7ybk6nY4rr7xS3UrTwSoqKliyZAmzZs0667v5qSBUlBMICgpi6tSp9i6wRqNh3Lhx9OnTx8GVuTYpJV9++SX9+/cnJSXlrP8oqSBUlBMQQjBr1izc3FrPImm1Wq666qpTujOa0jWklPz666/8+OOP3HnnnXh6ep71a6ogVJQTEELQv39/UlNT7Z+npaWd1somSueqq6vjww8/ZNasWZ1232v121SUk4iMjGT8+PFoNBpGjBhB//79HV2Sy7LZbHz33Xd4enoyZcox7xF3RtSoseKUpJQ0NDTQ1NTk6FIASE1NJSwsjBEjRtDQ0GAfRXYkLy8v/Pz8XGrQZs+ePXz99dc899xz+Pr6dtrProJQcUpWq5U333yTNWvWOMU0lfZJ1YsXL2bVqlWOLgeLxUJ6ejqPP/44Op3O0eV0ufbriefNm8f1119PUlJSp/4BUEGoOCUpJTU1NUyfPp2xY8c6uhwADh48SL9+/RxdBgBZWVls27YNm83m6FK6hcVi4cMPPyQiIoJp06Z1eitYBaHitIQQuLu7d8qoYGcYNGiQo0uwc3d3d5kusZSSH3/8ke3bt/P3v/+9S1rAarBE6bWklBgMBnJzc6mpqTnr17NarVRUVGCxWDqhuv8xGAwYDIZOfc3eQkpJVlYWb775Jg8//DAxMTFdsh8VhEqPI6Vk8+bN/PLLL5hMJtauXcvu3buP2U0sKipi3rx5LFu2DCklUkpyc3NZsGABH374Idu2bcNsNp/Sfuvr63nvvfeoq6vr1J/np59+YsOGDUgpycnJ4eOPP+ann35ymW7v8Ugpqaio4PXXX2fmzJkMHjy4y1rBKgiVHmnz5s2sWLGC0tJSFi5cSFZWFhaLBaPRiJQSo9GIzWZj4MCBXHLJJR0uwcrOzqa6uprY2FhWrlxJYWEhBoOB+vp6mpub7YFpsVhoaGjAYDB0CEspJc3NzVgsFvvjoXVqh9FoxGq12l+vpaXF/lqNjY32kfD2xxoMBnsL02w28/HHH6PT6VizZg179+7t3oPqZMxmM2+88QaxsbFcfPHFXTp3U50jVHokX19fGhsb2bBhA7Gxsbi5uZGdnc2vv/7K9ddfz+eff87IkSMZPHjwMZ9vMBgoKyuzfz1v3jwMBgN+fn5cd911REREsGzZMjIyMnB3d+eKK64gOjoaKSUHDx5k69atXHTRRXz00UfceeedhISEkJ+fz7p16/jDH/7ABx98gNFopE+fPlxzzTX2lmlkZCRxcXFcfvnlLFiwgOrqamw2G+eeey7l5eW0tLRw+eWX4+3tzfbt2xkyZEh3HVKn0tzczDvvvIPBYOBvf/tbl4+Mqxah0iMJIYiLi2Pnzp0MHz4cwN6Sg9bWWfvnvyWlxGq12ltiUkqSk5Px8PBgz549ZGdn09zczLZt27jlllt46qmn7KubFBQU8OabbzJlyhQSEhJITExk586dfPLJJ1RUVODj40NAQABJSUkAZGRkUFBQgM1mIyoqigcffJCbbrqJ2tpaamtrefTRR4mNjQVap+hoNBpWr15tb226IpvNxooVK8jMzOS+++5Dr9d3+cCQCkIHON4/UOX0DBgwgL/+9a/4+fkBrQsitHePjx58cHNzw2g02r8WQpCamsqMGTPw8/Nj8+bNbN26ldmzZzN69GhMJpP9d/TbVU0CAwMZN24cmzZtwmg0MnjwYFauXElRURE7d+4kJCSE7OxsCgsL+eMf/0hycjJWqxUhBH379rWvoGyxWPDw8MDd3R29Xg+0LvBgNBoZNmwYBoOB4ODgrj6ETsdqtfLTTz+xePFiHnjgAaKjo7tlvyoIu5lWq3X5k+CdRaPR4Ovra//a39+fsrIyFi5cSGlpqX17XFwc+/fvZ9GiRdTU1CClZMOGDbz11lvU1NSQmJiIyWRi+fLlFBYWAq0BOGDAAD744AMWLFhAbm6ufR/Tp0/HbDbz008/ERoaSlNTE+PHj2f37t1ERkbi6+tLZWUly5cvp7KyEsDeomn/f1RUFADz589nz549QGsQxsTE8M4775CVlcWYMWO6+Ag6F5vNxqZNm5g/fz4PPPAAgwYN6rYpQuocYTdzc3M75VFK5fguv/xye0sqPT0di8WCr68vt9xyCy0tLXh7e9vXEYyKiuLWW2/FYrHg7e3NeeedR2JiIhqNhoCAAIKCgujbty8tLS34+vqi0+ns5wXLysqwWCyEhYWh1+u56aabCA4O5rrrrsNkMuHv788TTzxBUFAQkZGRhIeHo9FouP3227FarXh7e9svBWsPPwBPT0/uvPNOqqur8fb2xtvbG41Gw0033UR5eTk+Pj5EREQ45Ng6gpSSffv2MW/ePObOncvQoUO7df8qCLuZu7s7ZrNZdY/PghCiw71rj24V9u3b93etCI1G06GL5enp+bvFVo/VBfP29iYhIaHDtvDwcAB7d7x9n799jbi4uN+9Xntwt/8MwcHBv+v+BgQEEBAQ8Lvn9mY2m41du3bxz3/+k1tuuYVJkyZ1+2Rx1TXuZu3r2KlWYddwlastegspJXv37uVf//oXs2bNYtKkSWg0GhWEvZ1er8fd3Z3a2lpHl6IoDmWz2di9ezd/+9vfuO6665g+fbrD/pCpIOxmWq2WgIAAqqurHV2KojiMzWbj119/5Z///Cdz5szh4osvxs3NzWFBqM4ROkBwcLAKwpMQQmCz2Vi+fDnbtm1zdDn29RF9fHycovtdXV1NeHi4U9RyuqxWK1u3buU///kPc+fOZcqUKQ5f8VsFoQOEhYV1uKpB+b32EdSioiJHlwK0ntN96qmneOqpp5xm/b+wsDCnWZnnVBmNRlasWMHnn3/OAw88wMiRIx1dEqCC0CEiIyOd5h+4s9JoNAwcOJCBAwc6uhQAWlpa8PPzY8KECR1GqZVT0341z2effcbq1av585//THp6utO0aFUQOkBcXBzfffcdUkqneSMoSleRUlJdXc17773HwYMH+b//+z+io6Od6r2vgtABYmJiKC0txWQy9biujaKcDikltbW1/OMf/8DDw4Nnn32W0NBQR5f1O2rU2AGCgoLQaDT2y68UpTeyWq0cOHCAv/zlL8THx/Poo492mAjvTFSL0AE8PDwICAigrKyMyMhIR5ejKJ1OSsm6det4//33ueCCC7juuuscPjJ8IioIHcDd3Z3AwEDKy8vVeUKlV5FS0tTUxBdffMHatWu54447GDlypFOHIKggdAgPDw+ioqLs69RptVpHl6QonaKoqIjXX3+dpqYmXnjhBcLCwnrEH3oVhA6g0WiIi4tj/fr1WCwWFYRKj9e+cMKrr77K8OHDmTVrFsHBwT0iBEEFocPExcVRVFSkRo6VHq39iptly5bxzTffcMsttzBhwgTc3d0dXdppUUHoIFFRUZhMJsrKytQEXaVHklKSl5fHggULaGpq4umnnyY5ObnHtAKPpoLQQXQ6HampqWzevJnExERHl6Mop0xKiclkYvPmzcybN49p06YxY8YMAgMDHV3aGVNB6CBCCKZMmcL8+fO59tprcXNTvwqlZygpKWHhwoUcOHCARx55xH7zrJ7Muce0e7nBgwdTW1urrjtWnF77vZk3b97MX/7yF7RaLX/7298YPnw4Qoge2R0+mmqGOJBOp2PIkCFkZGQQExPT499MSu8kpaSkpISPPvqIzMxMbr/9dsaMGePQ9QM7m2oROpAQgtGjR7Njx44Ot5tUFGdhsVhYsWIFTz/9NBqNhhdeeIFx48bh7u7ea0IQVIvQ4QYOHMiyZcuoqqo65o2HFKW7SSmx2WyUlpaycOFC9uzZw2233caoUaN67VQvFYQO1H7Tb51OR05Ojv1uaIriSAaDgW+++YbVq1eTnp7Om2++ibe3t6PL6lIqCB1Mp9MxatQo1q5dy4QJExxdjuKi2hdO3bVrF/Pnz8ff35/777+fIUOG2O+82JupIHQwIQRTp07l008/pbS01KVu6u3sbDYbBoMBk8lk/6iqqqKlpQU3Nzf8/Px6xeWRVquV/Px8Fi9ezI4dO5g9ezaTJ0/Gw8PDZU7VqCB0AqGhoYwYMYJVq1Yxe/Zsl3nzOTur1cpbb73Fli1bkFJy8OBB/vSnP6HRaEhOTuapp57qsUEopQRab0Hw+eefs2bNGtLT03nxxRdd8ly1CkInccUVV/Cf//yHSy+9tEfP0O9N3Nzc0Ov1rFy5kpaWFgCWLFmCm5sbKSkpPbbL2H598C+//MLChQsJCwvjiSeeoF+/fr1qSszpUEHoJOLj4/Hx8WH37t2MHz/eJd+MzkYIwaWXXsrf/vY3mpub7ds9PDyYOXNmj/wdWSwWMjIy+OqrrzAYDNxwww1MmjSpxy2S0NlUEDoJb29vRo0axY8//sg555zj8m9MZxEZGcmYMWNYunSpfRHdYcOGkZCQ4OjSjslqtdLU1NTh/svtV4WUlJTw3//+l9zcXGbOnMn48eMJDAzskYHe2dSEaieh0WiYOHEiu3btory83NHlKG00Gg3XX3+9/VpwrVbLNddc45Tz6UwmE99++y1z586lpqYGaA3G3NxcXn75ZR5++GEiIyOZP3++/RSMCsFWKgidSGRkJCNGjODrr7+2n8xWHC81NZXExESEEERFRTFixAinW3rearWyYsUKHnzwQX744QdWrVpFTU0N8+bN49lnn0Wn0/H3v/+duXPn4uvr2yuuD+5MzvXbdHEajYY5c+awfv168vPzHV2OQut5wujoaCZMmIAQglGjRjndmntms5nvvvuOW265hfz8fGpra/nXv/7FTTfdhM1m47nnnuP2228nMTHR6QLcWaij4mRCQkIYOXIka9aswWq1Orochdbzt+eeey7BwcGMGzcOf39/R5dkZ7FYWL58Offccw8VFRX2y+P27NnDFVdcwX333UdMTIxa5u0k1NFxMhqNhj/84Q+8+eabXHrppYSHhzu6JIewWCy8/vrrrFq7Fq2b4weO6uvrsdgki776mlXr1ju6HGxWCyOHD2fI4ME88MADHDlypMP3zWYzn3/+Oddff32PnebTnVQQOhkhBIMHDyYsLIzVq1dz/fXXO1U3rLtIKSkqLSN+9ASS08c4uhwAhmZuJynNORYhzd+Xxfpli3n5P/+hvr4ejUaDzWZDSolerycgIICGhgYOHDjAkCFDHF2u01NB6IQ8PT2ZNWsWf/3rX7n44otddoK1VqslKLwPfeOcY6pKn+hYhEbTyX+YJHB6r2ez2TiYlcm+PVn079+fhIQEoqOjiYyMJDw8HD8/P/z8/AgKCiI6OroTa+29VBA6qUGDBjFixAg++eQTbrvtNnWO5zTYrFaqykrw8Q9A5+0DtI6qVpWW4BcYhJdef2ava7NRUXiY0L6RuLl3VndTUFtZAUBASOipPUMIQiMiuezKK3nur0/j6+uLRqNBq9XaB0NcsRdxNtRgiZPSaDRcddVVbNmyhYKCAkeX45RKjxTw7X/n09zY0GG72WTip2VfU3o4377N1NLMT0sXU150hIN7dvHBv57lk5f/xY6f1mE2mU5pf00NBlZ++j4NdXWd+WOwe8vP7Nr0Y+v1zFmZfP76i/y8Ygm24wyWCSHQaLW4uXug1+vx8vLCw8MDrVarpsWcIRWETiwuLo7hw4ezdOlSNYL8G1JK9m7dzP4dGezfuQ1obQnWVVdRXVaCzdJ6vKxWK3VVlVSXl7YeQykpPJiDl17PiAlT2Lp2FaWH86gsKaa86Aj1NVX2c22mlhaqykqpKC6kpanp6L1jqKnGZGyhrroSm80GgMViprG+DrPJREVxERVFhTTU1SJtNkxGIzUV5VSWFlNbWYHNaqXRUE9lSREmY+vle2aTke8+WkDcgMFk/fIz+3dmdOsxdWWqv+XE3NzcuPLKK3nooYe4+OKLSUpKcnRJTsPY3ERlSQnjLr6crC0/kzpmHMX5h1jy3pt4eHnRUFvLiIlTOZKzn+8+WoCHpxeN9f9ryRUeysGtbZkpN3cP1nzxMU0NBoTQcP41NxAeHcO6rz8nf/9e9D6+nHvRZYRERIKE3N2ZHMzK5LxLZ7DkvTeZecf9BIX3oST/EFvXrmbcRZeyatFHmFpa8NJ7c/7VN1BRUsRnrz5P3MDB9I3vx9gLLmHR6y8hpY2GujqGnzeZqtISbDbJqCnTsEkbBzJ3kDxitAOPsutQLUInJoQgJiaGyy67jDfffLPDhf+uTEpJfU01LU2N9I3vR1NDPbVVlRTs30fcwMHMvON+/AKDkFKSv38viSlDufK2e/HxD7C/hn9QCH2i4wAwNjfjFxREbWUF2Tu2UnBgH8bmZnJ3Z3LFLXdzwwOPkzAoBYDCgzl8/fbrjDn/IiJi44kbOJg9Wzfz5ZuvUF1WirefPzpvH7x9fKkqLSHrl58pKzqMlBCdOIBr7/sL519zI3VVlZhMRm544HFiByQDYGpb4WbD0sWYWlowG1u69bi6MhWETk4IwZVXXonRaGTFihX2bpirKynII2vLRha9/hLF+XkUHjzQYfBV0nqJojhqy9GiEvszaso09L5+ZP3yM4f27Gb2Q08yfPwkrBbL/57zm0sdg8L7MG76FWxZ9R0tTY0kDklj08qltDQ1smfrFoLCwsjbtwdDXR1z/vIUSSlDsVmtaDSCwLBwPHU6+3m89trad+EfEorZaCTt3AkYamsIDleL9HYXFYQ9gJeXF1dddRVLly61X0zv6nIytzPzrj/z0KvvMPnKayg6dJCYxAHkZ+/h89dfoqG2FiEEcQMHk7t7J1/Of7l1kKMtfX5e/i3v/fNpGutrSUwZirGlhW/enUd5UevEZE+djqTUYXz19mssfP5ZDu7ZBYDe149RUy/Aw8uLjHWr8QsKQdokw8ZNomD/XsIiY/APCaGiuJAl771FXU11W8Wi9b+2gYzQyGjcPb1Y+PzfOHxgHwC+AYHE9B/Awuf/Rn72HlLOGd+tx9SVCWe/uD89PV1mZKiTxiaTiRdffBFfX1/uvPPOHrsy8qkym808+ddncI9PZtj4yR2+J6WkyVCPh6cX7p6eGFuasVoseOn0NBrqMZtMeHh64qX3RqPR0FBfh9Vsxr1tm8VipqG2BiE0eOr16L19aKirxWI24+HlhbuHBx5eOswmE431ddhsVnz8AnD39KCpoQG9tw9mkwmr1YKnTkeTwYDO24cmQz3efv4IIaivrcZmteHh6YmHV2sr0Gwyovfxtf8MzY0NtDQ24u7pibunJ55eOozNzTQa6vDw9MLHP+AYI8Ct8w4P7NzG4c1reeG5v/X6Gyt1lvT0dDIyMo45pK4GS3oIDw8P5s6dy91338348eMZOnSoo0tyGCEE3n7/u97X00tn/9w34PeTz/0Cgzp87ebujpeu41xCv6Dg3z3Pw9MTj9CwDtt82vbrqfv9Po9+jYDg388JdD/qUjchBHofX3swtvPS608yz1FNjekKqmvcg4SFhXHttdfywQcfUFdXp5bqUpROooKwh5k6dSoajYYVK1Y4upRepb6mmqK83ONOYj4ek9HIzo3r2bRyWYfpOUrPooKwBxFC4Ovry5w5c/jqq6/Iy8tTrcIzIKVE2mytE6fbJk8X5+WSsW41ZrOp9fttH0c/pv3ro7chJWaTiVWfL6SytPi4+2t/XvtkbcW5qHOEPUz76jRXX301L774Is8995zLLspwpmoqyln2wTsUHjzA4JHnMGD4SD586f8w1NZwMGsXl829g6h+SaxZ9DH7d2QQ1CeCadfehJdOz2uP3Edwnwh8/QO55P/dSmBoOCMnn8+29WtOuM/3/vE0FUVHCArrw+U330lYVEw3/bTKqVAtwh5Io9Ewffp0/Pz8WLx4MRb7vDflVNRWlFFfXcXFN85l6szrGTgsndkPPsHUq67lnn++TL/BqRQfOsj6b74gqE8EJfmH2LlxPTabDa2bG9fc/QBBffqwZ+uWU97n6KkXkJgyjKqyEnb+vKELfzrlTKgg7KE8PT258cYbWbNmDbm5uY4up0eJ6tefiZfPJHtHBl+/8zoNdbUg2rrMbb3W5sYGEoakMu6iy7jmngc55/yLAfDxD8AnIJCA4FCMHa4/7shqsWCoqUZKSV1VJT98+SkDh4+k/9ARp3DFiOo6dzcVhD2UEIJBgwZx44038vzzz1NdXX3yJykANBrqqCwtRu/ji5u7OzabDR+/ACpLitm4/BuqSkuISkxCIzRk78jgSM5+WpoaASjOy7V3meMGDsJsNLL9x7UU5eWSsXYVJQV5QOu1zP93x01YrRaERgMIDudkU5Kf99uLVY5BTZHpbuocYQ8mhGDq1Kls2rSJDz/8kNtvvx2PtoUElOPTat3w9vXFx8+fqMT++LatWzjhsquoq6pE6+aGr38gl829g6JDudhsNtza7jMdGhlDdNIAktNHEzdgEDabDS+9notunItGo7WvUxgaEcn19z+CVqPFLzCI6XNuobainIHDR9rXSGx1+guzKp1PBWEP5+HhwZ133slTTz3F2rVrueCCCxxdktPzCwpm5ORpHba5e3iQOCStw7Y+MXH0iYmzf11fU03/tGGknjPe/sdGCww6xq0E9L5+DBl9rv3rfoNTj1ONCkFnoLrGPZwQgr59+3LrrbfywQcfkJeX5+iSeq3qshKGjZ8EQEVxIQcytyPVIhi9ggrCXkAIQXp6OjNmzODFF1+ksrJSzVXrZGaTiTVffoq//dI5we4tG2lQk6h7BRWEvYRWq+WSSy4hNDSU999/30Wm1HRf2Odn78E3IBDfgECEEASFh2NsaqKqtET90ekFVBD2Ip6ensydO5ddu3bx008/ucDahd13fq28bTJ0O63WDd+gICpLirqtBqXrqCDsRYQQREdH88gjjzB//ny2bdvmAmHoQF0+4Ktamt1FBWEvI4QgOTmZG264gfnz51NSorpup+0YhyssMprqshL711aLhfqa6tb7mHQZNaLcXVQQ9kJCCC688EJGjx7NP//5TxoaGk7+JOV/jpE/8cmDaayvo77tapHqslK89HqCwyOOM2+zq/74qD9qXUEFYS/l4eHBnDlzCAkJYd68eTQ2Njq6pB7Nzd2DKTOvw9C+9L4QpJ4z3r5Q6+91VWtOtRK7ggrCXszT05Pbb7+dI0eOsGTJEnVv5LMU2z+ZqH5JCCEI7RtJUuqwtsvnlJ5O/RZ7ubCwMO677z6WLVvGjz/+qAZPFOUYVBD2ckIIkpKSuO+++3jzzTfZtm2bGjxRlN9Q1xq7gPYrT26++WZeeeUVnnjiCQYOHOjosk7KZrPRUFdLTUVZl7y+lNBT16cw1Nac9m0FlONTQegiNBoNkyZNor6+nhdeeIFnnnmGqKgoR5d1XEII/Hx92LRpA8V7d3XJPkpLSvDz90d/wrvGOaemBgPJcTFo1DnKTqGC0IW4ublx6aWX2sPwscceIywszCmX7dJqtdx+661cV1/f6a9tk5KNP/3E4sWLee7Rh/Dx8Tn5k5yQt7c3np6eji6jV1BB6GLc3d2ZNWsW5eXl/Pvf/+bxxx/Hz8/P0WX9jhCCkJAQQkJCOvV1pZQcPHiQH374gSeffJKUlBSn/EOgdC/VrnZBOp2OW265BTc3N15//XUaGxtdZgDFbDazYMECRo8ezdChQ1UIKoAKQpcVFBTEo48+Sk1NDa+++qpLTLi2WCx8/PHHNDU1MXv2bNzbVp1WFBWELszb25sHH3yQ0tJS3n33Xcxms6NL6jJSSn799VdWr17Nn//8Z/z8/FRrULFTQejChBCEhYXx5JNPsn//ft59912am5t7ZTe5tLSUd955h9mzZxMTo+4prHSkgtDFCSEIDg7mqaeeYseOHSxcuLDXXYpnNBp58803SUtLY9KkSaolqPyOCkIFIQR9+vThL3/5C7/++isff/wxJpOpV7QMbTYbS5cupaysjDlz5qjpJsoxqSBUgNYwTEhI4B//+AcbNmzgrbfewmg0OrqssyKlJCMjg6+//poHH3wQf//jrRSjuDoVhIqdEILQ0FAef/xxdu/ezQcffIDFYumRLUMpJY2NjbzzzjtcfvnlxMXFqS6xclwqCJUO2luGzz33HNu3b+fNN9/skQMoLS0tvPrqq8THx3PZZZfh5qauHVCOTwWh8jvtV3U89dRT7Nmzh3feeQeTyeTosk6Z1Wpl+fLlHDx4kLvuugsPDw9Hl6Q4ORWEyjG13zj+iSee4ODBg7z++usYDIYe0TI8cuQI33zzDTfffLNTXj6oOB8VhMpxtYfh3//+d8rKynjhhReora11dFknVF1dzT/+8Q8uu+wyRo8erc4LKqdEBaFyQkIIfH19eeihh3B3d+f555+nurra6VqGUkqMRiMLFy4kJCSESy+9VC1RpZwy9U5RTqp9NLl9Csqzzz7LkSNHnC4Mf/zxR7KysrjzzjvVeUHltKggVE6ZTqfj7rvvJjExkeeee46cnBxHl2R35MgR3n33XebMmUPfvn1Vl1g5LSoIldPi4+PDzTffzJQpU3jmmWfYsWOHQ28IJaWkrq6O//znP1x44YWMGTNGhaBy2lQQKqfNy8uLGTNmcOONN/L888/z008/Oez6ZIvFwkcffYSnpyezZs1S8wWVM6LeNcoZ0Wq1/OEPf8Df359XXnmFuro6Lrzwwg5r/LWfQ+zMFlptbS0HDx5k8ODBeHp6snv3bn766SeeffZZdR2xcsZUECpnTKvVMmbMGAICAnj++ecpKSnhuuuuw9fXF6vVSkZGBgkJCYSGhnbK/qSU7Nmzh9mzZzNt2jSuvfZa3njjDe69916SkpJUl1g5Y6prrJwVIQTJyck8++yz7N+/n3//+99UVVWxa9cu7rrrLv7973932jlEKSVZWVkUFBSwYMECrr32WiIiItR5QeWsqSBUOkV0dDSPP/447u7uPPjggzz00ENkZmby6aefsm7duk4JQ5PJxPfff4+UEpPJRFFREV988QWvvvoqtbW1TjedR+k5VBAqnSYoKIi5c+dSWFjI+vXrsVgsFBQU8O9//5uKioqzfv3q6urfhWpRUREvv/wyP//8swpC5YypIFQ6jdls5osvvmDr1q0dRpE3bNjAkiVLzrpVuHHjRpqbm+1fu7m5kZSUxH/+8x/OO+881T1WzpgKQqVTSCkpLCzk008/xWg0dri8rbGxkWeeeYYDBw6c8etbLBYWL16M0WhECEFgYCC33HILq1at4rLLLsPX11cFoXLGVBAqnSYqKop58+bx+uuvM2vWLIKDg+3z+srLy3nllVfOeAWbwsJCDh48iBCCpKQkXnrpJf71r38RGxurrilWzpqaPqNgMBioqanplNcKCgpiypQpjBkzhptvvplly5axePFiqqur+eKLLxgxYgR/+MMfTqv1JqVk1apVHDlyhAkTJvCXv/yFAQMGUFNT02l1n4i7uzuhoaFqsnYvJpz9BHN6errMyMhwdBm92scff8yXX37ZZff0sNls1NbWUlpaislkYvDgwad1c3WbzUZ+fj4AcXFx3d4CbG5u5uWXXyYiIqJb96t0rvT0dDIyMo75F1j9iVOoqqoiKSmJadOmdel+bDYbzc3N6HS60wqz9iW2PDw8uj0EbTYbL730Uo9aoVs5fSoIFQB8fX0JDw93dBlOx2q1qi6xC1BnmZUTklJis9mQUnb4/LcaGxt54YUXePfdd6mrq7M/tv2j/fmnavXq1Rw+fLgzfxTKyspYvXo1Vqu1Q32nW5vS+6ggVE4oIyOD22+/nYqKClasWMH999+PwWD43eN0Oh1XXXUVDQ0NNDc3YzKZ+Pvf/84dd9zBgw8+yIoVK05rv9nZ2Z0+EGIwGNi3bx82m439+/fz6KOP8tBDD7Fjx45O3Y/S85y0zS+E+C8wHSiXUg5p2xYEfA7EAfnA1VLKmrbvPQrMBazAvVLK79u2jwDeB3TAd8B9Uv0Zdnpms5nw8HA2bNhAVVUV0HrebPny5VxwwQWUlJRQUVHBsGHD8Pf3t3cjpZQ0NTXx7LPPcvjwYZYuXcoFF1zApk2b2LVrFwEBAUyePJmwsDDMZjPr1q0jPz+fuLg4zj//fPv+s7OzaWhoQKfT4ePjQ0xMDAC7d++muLiYgoIC9Ho95557LvHx8fY72Lm7u3P48GFmzJiBlJJ169bR0NCA2WzGYrGwfPlyLr30UgDWrFlDWloaWq22m4+u4ixOpUX4PnDBb7Y9AvwgpUwCfmj7GiHEIGAWMLjtOW8IIdrfXfOBW4Gkto/fvqbipCIiIti9ezfBwcH2JfBXrlyJzWajuLiYzMzMYz5PSsljjz3G/PnzSU1NRQhBTEwMqampGI1Gli1bBrR2g7OzsznnnHMYPHiw/flZWVl8//33hIWFUV5eTmZmJk1NTRiNRtauXUtQUBBpaWl4enqydOlSjEYjVquVxYsXYzKZOO+88/Dy8uLrr7/Gzc0NNzc36urqaG5upqGhgZiYGPr27UtVVZXqGru4k7YIpZQ/CiHifrP5MmBi2+cfAOuBv7Rt/0xKaQTyhBC5wCghRD7gJ6XcDCCEWAhcDpxef0lxCL1ez4QJE4iIiGDbtm3A/9YYPNFlc0IInnvuOUwmE//+97+58MIL2bhxI/v376e2tha9Xg/Azp07ueaaa+jXr5/9da1WK+vXr2fUqFGEhYVhs9lYsmQJ77//PlFRUZSVlVFZWcmvv/5KY2Mjzc3N9oENX19fLrnkEjQaDXV1dRgMBi699FJqa2uprq6276OgoIDy8nI1IVs543OE4VLKEoC2/4e1bY8Ejhz1uMK2bZFtn/92u9IDaLVaJk+eTP/+/e3bdDod+fn5ZGdn28NQq9VitVoxGAz2bVar1T5tprm5mezsbK655homTpxo70ZHRkaSkZFBc3MzFovF/lqzZs3CZDKxfft2IiIiKC8vx2g0cuTIEYKCgjh06BATJ07kkksu6bAoq1arRaPRIITAy8sLIQSHDx+moKCA+vp69Ho9wcHBNDQ02EfL1eV5rq2z/xQe690kT7D92C8ixK1CiAwhREZnrFqinDl/f3/7tBohBAkJCbi5uXHppZfy3//+F4vFYp9o7OPjw9ChQ1mwYAEZGRnEx8fzz3/+k3feeYfrr7+eoKAgJk2axMKFCykpKSExMRGAGTNmUFdXx5NPPsmHH36IlJKoqCji4uKYNWsWO3fuxGKxkJaWxpgxYwgLC2PChAmMGzeOH374ge3btzNo0CB7+CUlJdnr9/T05LLLLmPNmjVkZWWRmJiIVqvlqquuYseOHWzatEnd+lM5tStL2rrGy44aLNkPTJRSlgghIoD1UsoBbQMlSCn/0fa474G/0jqgsk5KObBt+7Vtz7/tZPtWV5Z0vVdffZW6ujquuOIKR5fidKxWK48//jjz5s0jNjbW0eUoZ+FEV5ac6Z/BJcBNbZ/fBHx71PZZQghPIUQ8rYMiv7Z1nw1CiDGitQ8y+6jnKIqiONSpTJ/5lNaBkRAhRCHwNPBPYJEQYi5wGJgJIKXcI4RYBOwFLMBdUsr2henu4H/TZ1agBkoURXESpzJqfO1xvjXlOI9/DnjuGNszgCGnVZ2iKEo3UBdRKkDr/UAaGxsdXYbTab8MT+ndVBAqaDQavv/+e3bu3OnoUoDWidjOMp1FSkltba3T1KN0DRWECjfddBOXX365U1xd0dLSwquvvsrEiRMZNWqUo8sBWhdmDQkJcXQZShdSQajg6+uLr6+vo8sAoKmpCV9fX8LCwoiOjnZ0OYqLULNIFUVxeSoIFUVxeSoIFUVxeSoIFUVxeSoIFUVxeSoIFUVxeSoIFUVxeSoIFUVxeSoIFUVxeSoIFUVxeSoIFUVxeSoIFUVxeSoIFUVxeSoIFUVxeSoIFUVxeSoIFUVxeSoIFUVxeWqFasXhpJSYTCZaWlpobm6230iqrq4OIQQ+Pj5oNOpvttJ1VBAqDiel5Msvv+Sxxx7DarXS2NjIhx9+iIeHB3FxcSxevJiwsDBHl6n0YioIFYcTQpCYmEhLSwvl5eUdtk+dOhW9Xu/A6hRXoPobisO1B+GECRM63DbT29ubSZMm4e3t7cDqFFegglBxCkFBQYwePRovLy/7tv79+zN69Gh1T2Gly6kgVJyCEII//OEP9nOBQgiGDBlCv379HFyZ4gpUECpOY8iQIQwePBghBDqdjuuvvx43N3UaW+l6KggVp6HRaLj66qvRaDSEhYUxatQoR5ekuAj157YXs1gsVFdXY7VaHV3KKRs0aBCBgYFMmzaNhoYGmpubHV3SKdPr9fj7+zu6DOUMqCDsxUpKSnjmmWd61GCD2WwmICCAQ4cO8cwzzzi6nFNmMpkYMWIE9957r6NLUc6ACsJerKGhgeLiYu6//35Hl3JaLrzwQjw9PXvUtJk9e/aQnZ3t6DKUM6SCsJfz8PCgT58+PapV2KdPH4AeVXNJSYmjS1DOghosUZyOEOKUQtBisWCxWJBSdtq+pZSYzWZsNlunvabi/FQQuqi6ujoyMjIwmUxUVVWxfft2TCbTKT8/Ozubpqams67DarWye/dufvzxR3799Vdqa2tPOdhWrVrFqlWrzrqGo9XV1fH+++9TWlqK2Wxm//79bNmyhYqKik4NXMW5qCB0Ubm5ubz22mvs27ePdevW8fLLL9PQ0ICUssMHcMxt77//fofrgo/1uFP5MJlMvPXWW+Tn5/PLL7/w1VdfHXefv91ms9nsLbdTefypfrS3Mg8cOMCiRYv49ddfWbx4cacEv+Kc1DlCF5aWlsaaNWsIDw/Hx8cHgC1btvD5559jMpn485//TEJCAps2bWL58uUIIZg1axapqam/ey0pJcuXL2ft2rX4+flx2223UVtby8KFC6mtrWX27NmMGTOGPXv2sHDhQgwGA3PnzmXQoEFoNBrS0tI4cOAAxcXFAHz00Uds2bIFf39/rr/+egYPHkxRUZG9tXbOOefg7++P1Wpl8+bN5ObmMnLkSHbu3MlVV12Fu7s769evB1pHdJctW4Zer2f69Omce+65mM1mHnroIfr27UtpaSnXXXcda9asob6+Hr1ej81mIysri6FDhzJmzBgWLFhATU1NjxrAUU6dCkIX5u3tTW1tLXq93r7Cy6+//srQoUOZOHEiMTEx1NXV8d577zFo0CCam5v55JNPjhmE9fX1rFmzhkcffZSMjAyWL1+Oj48PERER/PGPfyQuLg4hBDt27CA2NpZp06YRGxuLxWLBZrOxcuVKGhsb6d+/PwBjx45Fo9Gwd+9eVqxYweDBg/nyyy8ZPXo0kyZNws3NjWXLlrF161bc3Ny48847sVgsNDY2cuDAAZqamsjKymL69Ol4eXlhMBjYt28fK1asYMyYMUgpqa2t5d577yUuLo5vvvmGpKQkhg0bxgcffIDNZqOpqQmdTkdmZiYajQaLxdKtvx+l+6iusQtzc3PjxhtvZMKECWi1WgBmzJiBTqfj7bffZvv27TQ2NhIQEMD48eO58MILmTt37jFfq7m5GZ1OR0hIiD1Ap06dSmxsLJ9++ik//PADUkouuOAC+vbty0cffcSGDRuQUqLVarnzzju5//77+fnnn6mpqeGzzz4jPDycIUOG2AOopqaGxMREe63QOiqu0+moqqrC398fIQQrVqxg69atVFRU4O/vz5IlS9BqtaSkpKDRaOxdZ39/f/u1zEajkZCQEPz9/QkKCkKj0eDl5YXVaiU0NNRep9I7qRahCxNCEB4e3mH154qKCnx9fQkICKC5uZnw8HAiIyPZv38/oaGh6HQ6oLW7+dNPP5GdnU1gYCBpaWl4e3vz2WefceTIEc455xyqq6vx8PAgKCiIlpYWAKqqqvDy8uqwzWaz8cMPP2AwGIiMjMTd3R0hBFVVVRQVFdmDcNy4cXzxxRcMHjyY2NhYAIYNG0Z0dDTfffcdc+fOxcvLi4aGBmJjY6mpqbG/Vm1tLc3NzfZ9tv/8Qgi0Wi1xcXH8/PPPFBYWUlRUhEajYdCgQSxbtoyqqip8fHwIDAzslt+L0v1UELqouLg4/Pz87CF49dVX4+3tjcViwWq1MmbMGNLT03F3d+emm25i586dNDU12S/XmzlzJhUVFZhMJiwWC1qtlj/+8Y9kZWURHx9Peno6Bw8exGKxMGjQIIYPHw78b8pLSkoKI0aMwMPDg+uvv57y8nL69OnD1KlT8fHx4ZprriEnJ4d+/frZl+aaMGECfn5+VFRUYLVaGTp0KFJK+vTpg81mQ6vVct5555Geno63tzcpKSl4e3szffp0srKy8PPzY+zYsbi5uSGl5IYbbgBaA3HYsGG4u7vT0NBAWloagYGBhIaGYrPZqKioYPDgwer8YC8mnH1KQHp6uszIyHB0GT3Svn37ePTRR/nb3/7WoyYn90Tbt29ny5YtvPHGG44uRTmO9PR0MjIyjvkPQZ0jVLpNV/7RdfY/6IpzU0GodJuubJWqFq9yNlQQKl2uO1trqmWonAk1WNKLCSEoLi7m+eefd3Qpp8VqtaLRaHpUK6+yspKEhARHl6GcIRWEvVhMTAyvvfbaaV1D7GhGo5GFCxcyevToY07cdmbtq+YoPY8Kwl5Mr9czevRoR5dxWpqamli7di2pqamMHz/e0eUoLkKdI1QUxeWpIFQUxeWpIFQUxeWpIFQUxeWpIFQUxeWpIFQUxeWpIFQUxeWpIFQUxeWpIFQUxeWpIFQUxeWpIFQUxeWpIFQUxeWpIFQUxeWpIFQUxeWpIFQUxeWpIFQUxeWpIFQUxeWpFaoVh5NS0tzcTEtLC83NzRiNRgwGA9XV1Qgh8PPzQ6vVOrpMpRdTQag4hTVr1vDaa69hs9k4cuQIGzduxM/Pj9DQUF599VVCQkIcXaLSi6kgVJxCYGAg27Zto6amxr5NCME111yDh4eHAytTXIE6R6g4nBCCAQMGMGHChA638NTpdEyZMgUfHx8HVqe4AhWEilMIDg4mPT0dd3d3+7Z+/foxatQoNBr1NlW6lnqHKU5Bq9Uybdo0+72BhRAMGTKE5ORkB1emuAIVhIrTSEtLo1+/fgB4eXkxc+bMDi1ERekqKggVp+Hu7s7MmTNxc3MjICCACRMmOLokxUWoUWPlmCwWC1artdv3O3nyZAICArjyyivx9PTEaDR2ew3u7u7qvKSLUUGoHNPChQtZs2Y1Uspu3a/RaERKG7/++gtzb56LOPlTOlVAQCB/+tOfGDBgQDfvWXEkFYTKMW3fmUHqFQYGjQzv5j17c1HhVDz1bvgHeXXrnqWEj/6+h8rKShWELkYFoXJcQX30RMT5dft+HbFPaL3Uz0uvBmdckQpCpUew2ST11S0gwSfAEzd3dQ5P6Tzq3aR0GSkle34pxVB79gMexhYLi+fv4tk/rqJgf83Jn6Aop0EFoXJGbDaJxWTFYrbZB1QsltbPbTaJ1WrDarGx6rP9lB8xYG37npQSi9mGxWzFZpO/2WbDbLIec4BGp3fnhodG0H9oqH3biV7LbLJitdp+t81mtXX7AJDi/FTXWDltNptk6Xt72PJ9AR6eWuY8Ooq45ECevmElz3w4jR0/FbHnlzJMLRZ+WnqInF2VTLoikRl3pLBzYzErP86mudHMuOkJnD+rP431Jh64dAkJg4IwGq3c869xBIbqT1iDlJK9v5bx9Tu7aTKYOe/SBP5wTX9ydlWy6LWd1Ne0cPHsQUy4vB/Z28pZ9NpOGuqNXH7zEMZeFI/o7uFoxampIFROm6Gmhc0r8nno9Ulk/VLKqs/2c+szY2hpsgBgtUi89G7c9Eg6zQ1mLrtlCHEDgzDUGln85i40GoGbu4av5u/i/Fn9aW+gzf7LSPrE+nZYeOF4LCYbm1fmM+nKRPrG+fHpyzsYf0kCBftr8PJ258rbUkhMDUGjEeTtrcI30JOZd6eRmNK6TVGOpoJQOW0moxU3Dy2+gV6ERHjT3GgGQKMVmIxWGutN/3uwAJu1tXtqNlnR+3hw0exkAkN0uHn878xMULj+pCEoAKERWK22tu63JCBEh0+AJ9DaShw3PR5vPw9WfJxN9C8BXHVXGhOu6IdvoBfL3t9LvyEhXHHbELRadVZI+R8VhMppCwrTExblw8J/baWqtInxlyQAEDsgkI9e2EZNeRMx/QMBCI30YdkHezlnWhzDJ0QycHgYW74vIChcT0SsL5EJ/kDrIgsnawlq3DSE9NHz/cfZTJqRRGJKMKs/P4CnTkvfeD88PLXk7aumtKAevY8H7p6tq1ofOVBL2REDel8P3D1VACq/p4JQOW1aNw03PDCCvL1VeOrdGTCsdQDj6ruHcjinhoBgL7z9Wltp064bwMGsKoLCdWi0gotmJ5O7q5KGeiMBwToAdN7uzHl05Mn3qxVMnpHEwawqAoK9SEwJpk+sH8ZmC3HJQbh7avEN8CQqMYCEIcEkDApGqxX4BXoSnRhAYkoI/YYEq66x8jsqCJUzEhSuJyi844BGWJQPYVEdF1ENDvcmONzb/rXO252UcyI6PMbNXXQYDS7Jr+dgVqX9a627hsSUEEL7+hAYpid98v/2O3hUnw6v1Tfen77x/h22RfYLILJfwOn9gIpLUUGoOBUpJU0GE2VHGuzb3Dw0RKkgU7qQCkLFqTQaTGRuKmb8pQmERfpgs0nee+5XfNsGRBSlK6gzx0oXO73Jy0cO1GKoMRLSp7X7KwQkpYXw45JDqHnQSldRQah0sdMbmNi1uaR1QOOo6S2xA4Io2F+NqcXS2cUpCqCCUDmp7m2GlRc2EBj2v8EQIQSeOi02G/YJ24rS2VQQKifRvVNN9L7utDSZO2yzWSVIqVacUbqMemcpTmXwqD7k7a1G2lpbolJKyo4YCA73Ru+j1gpUuoYKQsWpDBgWiqHWSG1Vc+sGCdvWFXLu9HiEmgitdBE1fUZxKv5BXlx+8xC8/TxaNwiY/v8G/26itqJ0JhWEilPRaDW/GyzpE+PrwIoUV6C6xoqiuDzVIlSOy2q2YTJ2/72NHUZK+yCN4lpUECrHJuHz1zJZ88UBR1fSfSQcybLCdY4uROluKgiVY3r0kcepqrrd5e7v4e7uTmxsrKPLULqZCkLlmPr27Uvfvn0dXYaidAsVhMoxncp9QxSltxDO3vURQhiA/Y6uo00IUHnSR3UfZ6rHmWoB56pH1XJ83VlPrJQy9Fjf6Aktwv1SynRHFwEghMhwllrAuepxplrAuepRtRyfs9Sj5hEqiuLyVBAqiuLyekIQvu3oAo7iTLWAc9XjTLWAc9Wjajk+p6jH6QdLFEVRulpPaBEqiqJ0KacNQiHEBUKI/UKIXCHEIw6qIV8IsVsIsVMIkdG2LUgIsVoIkdP2/8Au2vd/hRDlQoiso7Ydd99CiEfbjtV+IcS0bqrnr0KIorbjs1MIcVF31COEiBZCrBNC7BNC7BFC3Ne2vduPzwlqcdSx8RJC/CqEyGyr55m27Y44NserxSHH5oSklE73AWiBg0AC4AFkAoMcUEc+EPKbbc8Dj7R9/gjwry7a93nAcCDrZPsGBrUdI08gvu3Yabuhnr8CDx7jsV1aDxABDG/73Bc40LbPbj8+J6jFUcdGAD5tn7sDvwBjHHRsjleLQ47NiT6ctUU4CsiVUh6SUpqAz4DLHFxTu8uAD9o+/wC4vCt2IqX8Eag+xX1fBnwmpTRKKfOAXFqPYVfXczxdWo+UskRKub3tcwOwD4jEAcfnBLUcT1cfGymlbGj70r3tQ+KYY3O8Wo6ny9/Hx+OsQRgJHDnq60JO/ObqKhJYJYTYJoS4tW1buJSyBFr/EQBh3VjP8fbtyON1txBiV1vXub271W31CCHigGG0tjYcenx+Uws46NgIIbRCiJ1AObBaSumwY3OcWsDB75vfctYgPNaFro4Y3j5XSjkcuBC4SwhxngNqOBWOOl7zgX7AUKAEeKk76xFC+ACLgT9JKetP9NCurucYtTjs2EgprVLKoUAUMEoIMeQED+/Seo5Ti0PfN8firEFYCEQf9XUUUNzdRUgpi9v+Xw58TWszvUwIEQHQ9v/ybizpePt2yPGSUpa1vdFtwDv8rxvT5fUIIdxpDZ6PpZRftW12yPE5Vi2OPDbtpJS1wHrgAhz83jm6Fmc4Nr/lrEG4FUgSQsQLITyAWcCS7ixACOEthPBt/xw4H8hqq+OmtofdBHzbjWUdb99LgFlCCE8hRDyQBPza1cW0/8NqcwWtx6fL6xFCCGABsE9K+e+jvtXtx+d4tTjw2IQKIQLaPtcBU4FsHHNsjlmLo47NCXXHiMyZfAAX0ToCdxB43AH7T6B1BCsT2NNeAxAM/ADktP0/qIv2/ymt3QYzrX8p555o38DjbcdqP3BhN9XzIbAb2EXrmziiO+oBxtHaZdoF7Gz7uMgRx+cEtTjq2KQCO9r2mwU8dbL3bRcem+PV4pBjc6IPdWWJoiguz1m7xoqiKN1GBaGiKC5PBaGiKC5PBaGiKC5PBaGiKC5PBaGiKC5PBaGiKC5PBaGiKC7v/wPTw3l4vNFIBgAAAABJRU5ErkJggg==", - "text/plain": [ - "
" - ] - }, - "metadata": { - "needs_background": "light" + "image/svg+xml": "\n\n\n\n\n\n%3\n\n\n\n140393111569088\n\nouter_loss\n ()\n\n\n\n140393111544592\n\nMseLossBackward0\n\n\n\n140393111544592->140393111569088\n\n\n\n\n\n140393111544736\n\nMulBackward0\n\n\n\n140393111544736->140393111544592\n\n\n\n\n\n140396237940576\n\nAddBackward0\n step1.a\n ()\n\n\n\n140396237940576->140393111544736\n\n\n\n\n\n140393111545216\n\nAccumulateGrad\n\n\n\n140393111545216->140396237940576\n\n\n\n\n\n140393111545984\n\nMulBackward0\n\n\n\n140393111545216->140393111545984\n\n\n\n\n\n140393111534464\n\nstep0.a\n ()\n\n\n\n140393111534464->140393111545216\n\n\n\n\n\n140393111544112\n\nMulBackward0\n\n\n\n140393111544112->140396237940576\n\n\n\n\n\n140393111545168\n\nDivBackward0\n\n\n\n140393111545168->140393111544112\n\n\n\n\n\n140393111545408\n\nDivBackward0\n\n\n\n140393111545408->140393111545168\n\n\n\n\n\n140393111545552\n\nAddBackward0\n\n\n\n140393111545552->140393111545408\n\n\n\n\n\n140393111545648\n\nPowBackward0\n\n\n\n140393111545648->140393111545552\n\n\n\n\n\n140393111545744\n\nMulBackward0\n\n\n\n140393111545744->140393111545648\n\n\n\n\n\n140393111546272\n\nPowBackward0\n\n\n\n140393111545744->140393111546272\n\n\n\n\n\n140393111545840\n\nMseLossBackwardBackward0\n\n\n\n140393111545840->140393111545744\n\n\n\n\n\n140393111545984->140393111545840\n\n\n\n\n\n140393111545792\n\nPowBackward0\n\n\n\n140393111545792->140393111545744\n\n\n\n\n\n140393111545792->140393111545984\n\n\n\n\n\n140393111546128\n\nAccumulateGrad\n\n\n\n140393111546128->140393111545792\n\n\n\n\n\n140393111545024\n\nPowBackward0\n\n\n\n140393111546128->140393111545024\n\n\n\n\n\n140393111534624\n\nx\n ()\n\n\n\n140393111534624->140393111546128\n\n\n\n\n\n140393111545360\n\nAddBackward0\n\n\n\n140393111545360->140393111545168\n\n\n\n\n\n140393111545696\n\nSqrtBackward0\n\n\n\n140393111545696->140393111545360\n\n\n\n\n\n140393111545936\n\nAddBackward0\n\n\n\n140393111545936->140393111545696\n\n\n\n\n\n140393111545888\n\nDivBackward0\n\n\n\n140393111545888->140393111545936\n\n\n\n\n\n140393111546176\n\nAddBackward0\n\n\n\n140393111546176->140393111545888\n\n\n\n\n\n140393111546272->140393111546176\n\n\n\n\n\n140393111545024->140393111544736\n\n\n\n\n\n" }, + "metadata": {}, "output_type": "display_data" } ], "source": [ - "import matplotlib.pyplot as plt\n", - "from matplotlib import image as imgplt\n", - "import torch.nn.functional as F\n", - "\n", "net = Net()\n", - "x = torch.tensor(2., requires_grad=True)\n", + "x = nn.Parameter(torch.tensor(2.), requires_grad=True)\n", "y = torch.tensor(1.)\n", "\n", "optim = torchopt.MetaAdam(net, lr=1., moment_requires_grad=False)\n", + "\n", + "net_state_0 = torchopt.extract_state_dict(net, enable_visual=True, visual_prefix='step0.')\n", "inner_loss = F.mse_loss(net(x), y)\n", - "net_state_0 = torchopt.extract_state_dict(\n", - " net, enable_visual=True, visual_prefix='step0.')\n", "optim.step(inner_loss)\n", - "net_state_1 = torchopt.extract_state_dict(\n", - " net, enable_visual=True, visual_prefix='step1.')\n", + "net_state_1 = torchopt.extract_state_dict(net, enable_visual=True, visual_prefix='step1.')\n", + "\n", "outer_loss = F.mse_loss(net(x), y)\n", - "torchopt.visual.make_dot(outer_loss, params=[net_state_0, net_state_1,{'x': x, 'outer_loss': outer_loss}]).render(\"graph\", format=\"png\")\n", - "plt.figure(figsize=(15,15))\n", - "plt.imshow(imgplt.imread('graph.png'))" + "display(torchopt.visual.make_dot(outer_loss, params=[net_state_0, net_state_1, {'x': x, 'outer_loss': outer_loss}]))" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "- When you track the meta-gradient through moment" + "- When you track the meta-gradient through moment (`moment_requires_grad=True`, default for `torchopt.MetaAdam`)" ] }, { @@ -192,45 +189,34 @@ "metadata": {}, "outputs": [ { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 5, - "metadata": {}, - "output_type": "execute_result" + "name": "stdout", + "output_type": "stream", + "text": [ + "\n" + ] }, { "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAa0AAANSCAYAAADWDZHSAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/YYfK9AAAACXBIWXMAAAsTAAALEwEAmpwYAAEAAElEQVR4nOzddXxUV/r48c+dmczE3YUkxAgJJEBwggSnWIEC3Rpshcpuu223bvv9VbZbl63LdttCixYoFHeX4AnEPRDXiYzd3x80s6WKJJmZ5Lxfr77aTCf3Prlz5z73nHvOcyRZlhEEQRAEW6CwdACCIAiCcLlE0hIEQRBshkhagiAIgs0QSUsQBEGwGSJpCYIgCDZDJC1BEATBZnR60pIkaZIkSRmSJGVLkvR4Z+9fEARBsF1SZ87TkiRJCWQC44Fi4AhwoyzL6Z0WhCAIgmCzOrulNQjIlmU5V5ZlHfAtMKOTYxAEQRBslKqT9xcEFP3k52Jg8O/9gre3txwWFtaRMQmCIAhWJjU1tVKWZZ+fv97ZSUv6ldd+0T8pSdJdwF0APXr04OjRox0dlyAIXZCtlqmTpF+7VHYvkiQV/NrrnZ20ioGQn/wcDJT+/E2yLH8MfAyQlJRkm2edIAgW19LSwrlz52hqarJ0KJfF39+fsLAwlEqlpUOxWp2dtI4AUZIkhQMlwHzgT50cgyAI3URxcTH3P/gQ7mFRKBTWPcOnpryMfpGh/POll3B0dLR0OFarU5OWLMsGSZL+AmwClMDnsiyndWYMgiB0L15+/tz48JMoVXaWDuV3ZZ1IpfrEfkuHYfU6u6WFLMs/AD909n4FQeimJAlJUlh9S0uy8vishThKgiAIgs0QSUsQBEGwGSJpCYIgXCZdawsHNq2ntaWZnDOnyE07bemQuh2RtARBEC6TUqWiubGBTUv+y551q/Dw9bV0SN2OSFqCIAiXSaFQ0mfoCI5s30xkn0TcvUXS6mwiaQmCIFwuWebs0UPEDx5G1qnjNNbV2mzVDVslkpYgCMJlqqkop7r8AjPvuI+hE6/j+J4dIml1sk6fpyUIgmCrPP38mXnHfQD06j+IXv0HWTii7ke0tARBEH7XL1tSep2OpsYGZFlGlmUaamtEi6uTiKQlCILwuy6tuC7LMif37eLY7u0AGI0G1v33EyrPl1giuG5HJC1BEIQr0NrcREFGOmExvQFQKlWEx8aRdviAaG11ApG0BEEQrkBjXS261lac3dzN6155BwRRUVqCXqezcHRdn0hagiAIV8BgMGAyGVGqLo5jkyQJpcoOo9GAyWiwcHRdn0hagiAIV0Ct1qBQKM2tKlmW0bU0Y6dWW/3yJ12BSFqCIAhXwNndHXtHJxpqqy8+w5JlyooLCQgNR2UnklZHE0lLEAThCtipNfTql0RhxlkAjEYjpXk5xA8eZn7GJXQcMblYEAThCkiSRK8Bg8wjBZUqFXPve0gs4thJRNISBEG4QpIkXdKqkpRKC0bTvYhbA0EQBMFmiKQlCIIg2AzRPSgIQpclSRItjQ1sW/YVCkX7deEZDAYUSgUKqf3u+ytKiwl3d2q37XVVImkJgtBl+fr6ctef/0xNTXW7bdNoMvHD+vX0TUggNKRHu21XDvEhNjYWtVrdbtvsikTSEgShy3J1dWXWrOvbdZt6vZ7MjAymTJ7M4MGDxTD3TiaeaQmCIAg2QyQtQRAEwWaIpCUIgiDYDJG0BEEQBJshkpYgCIJgM0TSEgRBEGyGSFqCIAiCzRBJSxAEQbAZImkJgiAINkMkLUEQBMFmiKQlCIIg2AyRtARBEASbIZKWIAiCYDNE0hIEQRBshkhagiAIgs0QSUsQBEGwGSJpCYIgCDZDJC1BEATBZoikJQiCINgMkbQEQRAEmyGSliAIgmAzVJYOQBAEwdrJsozJZMJgMGAwGDCZTOj1enQ6HQBqtRpJkiwcZfcgkpYgCMJl2L17N2+88QZ6vZ7c3FwOHTqEq6srbm5ufPjhh7i7u1s6xG5BJC1BEIQ/IEkSDg4OnD17lpycHACysrJQKBRMmjQJtVpt4Qi7D/FMSxAE4TL07t2bpKSkS15TKpXMnDkTe3t7C0XV/YiWlpVqamqioaHB0mFcFYVCgaurKxqNxtKhCEK7cXV1ZeTIkaxbtw6tVgtAVFQUAwYMQKEQ9/+dRSQtK7V+/Xre//Aj7GztDk4Ge7UdTz7+GEOGDLF0NILQrqZOncrzzz9vTloJCQlER0dbOKruRSQtK1VdU0N08jjG3XCTpUO5IkajkXWfvEN9vW22EgXh9wQHBzNy5EiWL1+Ok5MTkyZNwsnJydJhdSsiaVkxhUKBUmV7H5Ekia4SoWuSJIkbbriBlStX4urqyvjx48VQ905me1dEQRC6PJPJZOkQflNCQgJRUVEMHDgQHx8fq41VkqQumVBF0upC6qoqKMnLIapvf+yuYAhuRWkJBzZ+j7O7B8nXzcRODKAQLGzt2rUcPnzY0mH8qpaWFmRZpqKigmeffdbS4fwqe3t7Zs+eTVxcnKVDaXciadkgWZbRt7bS1HjxuZGLhweySaY4J5vje3fg5ReAh68/CqWCpvp6ZMDJxRWVnR0NdbUoJAmjwYCTmxuSpGDdfz8mJDKGwsyzHHdzZ9DYib/YZ7O2kZamJpRKJY4uLihVdl3yLk6wDnv27AEgPj7ewpH8uuDgYBwdHa12hOzmzZvJzMwUSUuwHhsWf05B5jmc3dyZveh+Wpq0bF76FecLctHW1zFuzp84X5BHxvGj6PU6YgcMYtT0Obz18L2ExsRSXXaBUdPn0DOuD+UlRdz66DPknDnF3vWrfzVpnT6wl+N7dmA0GkkYlsywydMt8FcL3YVKpaJnz56/mBclXJ5Tp05ZOoQOI5KWLZJlykuKCQgNJyllAq4enrh5eTN70f0c2bGJKbfcga65mcVvvoxCqbzYlVH6HaOmz0ahVDJ29o3odTo2fvMFwZFRKFV2nNizk9bmZlqbm39ldzLeAUGo1Hacz8rj6M6tDJ00TbS0hA5lLc9kZFnGaDQCFycTWzqmtjqICoXiV2ORZdkCUXUeMczLFkkS8//6d3pE92L5e2+SffrEjy9LGA1GZJMJWTbh6efP7U89z0NvfMAjb38MXLwIqOzsUNnZYTIacXZ1x2gwENqrN5JCwss/ALg4dL0oO5PGulpamrRsX/UtQydMZfrCRdg7OABd+4shWLempibOnDlD86/cZLW35uZm3njjDR588EHy8vJ+9716vZ6ioiL0ev3vvicjI4MTJ05QV1d3xUmmpKSETz75pFP+dmskkpaN2rF6OeeOHcHJ1RVHV1cAnNzcaKyrY8UHb1FbWUFMYhLrv/qMFR+8xZHtmwEw6PVsXb6Y1Z+9T9Lo8Tg4O5M4fCTfvv0KBzf/wMCUi12DupZmVn30Djlpp1DZ2eHlH8jeH9ZwbPd29K2tFvu7BQHg7NmzvPLKK5w8ebLD9+Xo6Mj9999PdHT0H44UrK+vZ9WqVb9ZzUaWZXbt2sWKFSs4evQoR44cueJ4ZFk2V5fvjkT3oI0aMn4yep0Oe0cnPHz9AHD39mXWXX+hWduIu7cPQeGRVFeUYdDpcHB2BsBOrWbc3JtBBi9/fwBGXz+XhOGjUKrszK9pHBy55ZFncHRxwU6tYeL8W6mrrkStsUehVIq5WILFGI1GCgsLGTFiBIcOHWLw4MGYTCYOHDjAhg0b0Ol03HzzzTg7O7NkyRLKy8uZPHkykydP5qWXXuKxxx7j+PHjVFZW4uvry8qVK3FxcaG5uZlhw4aRlpbG3Xffzfnz5zl06BA33fTrE/w/+OADzpw5g4+PD3/+85+xs7Pj9ddfJy0tjbS0NG6++WaCgoJYunQptbW1TJkyhYSEBHbu3Mldd91FQEAArT/eADY2NvLss8/i7e1NQ0MDycnJbNq0CY1Gw4QJExg9ejTZ2dksXboUvV7frWsdiqRlgyRJwje4xy9eVygUuHv74O7tY37N72fvSxozHu+AQJTK/330GnsH/HuE/WJbnj8mQwBHFxccXZyB3+rPl3/n/wlC+9HpdBQVFTFx4kS++uorqqurUSgUbNmyhdtuu42oqCgAXnrpJVJSUhgyZIi5NmBZWRnwv9qeLi4uhIWFUVdXx8SJEzl8+DDnz5/HZDLR2tpKbW3tb8Yxbtw4vL29OXnyJJs2beKOO+7giSee4Ouvv+aWW27BwcGBV199FTs7O0JCQvjqq68ICwtDpVLh5OTEkSNHqKysZNq0aZhMJiorK/nrX/9KaGgotbW1KJVKzp07x8aNG0lISGD79u2MGTMGtVrNDz/80OHH2VqJ2+VuZtwNN1FXWcnBzevR6y7e5Wnr69i5ejkG/R91OfxeUhIJS+gc9fX1bN26lbfeeousrCzOnTuHwWBApVIREBBgfl9dXR2RkZGXDFhQKBSYTCZz95okSXh6euLh4YGTkxNKpRK42JrT6/WXPG+SJAlZlpFlmYaGBr755htcXV0JCwujpaXlkkERsiybW1F9+vShX79+LFq0CCcnJ/R6PUajEScnJ9asWWPeh6+vL2FhYZhMJrZt20ZxcTGhoaHY2dmh0+kwmUz4+fnh4+ODm5tbxx5kKyZaWrainRoysixz9thhJIUClcoOADu1hvrqKgoyz9Gzdx+Lj44ShN+TmprK7NmzmTdvHgcPHiQ9PZ0+ffrg6urK999/T1hYGNHR0fTv358VK1YQFxdHeHg4PXr0wMfHhzVr1pCRkWFukf2UJEmEh4ezZs0aGhoaaGlpAS6OGnR1deXAgQOo1Wrc3d0xGAzo9XoqKipwdHQEwM7ODoPBwJ49exgwYAA9e/akrKwMZ2dnXFxc8PLyIjExkWXLlhEaGoq/v/8lCbXtvw0GA0ajkYaGBrRaLfb29vj7+7Nz505UKhWVlZWddLStj0hatqKd8ohBr+d8QR6Jw0cj/dhlYqfR4OrpRWl+Lj1j40EkLcGKhYaGkpCQgL29PQkJCbi7u+Pg4MDcuXM5efIk5eXlhIaGMnXqVA4fPkxlZSU+Phe7zOfNm0d6ejoTJkzA09MTJycn/P390el0+Pj4MHr0aLy9vTl69CiRkZG4uLgAF+eNjRs3jiNHjqDVagkNDWXu3LlkZ2eTkpJiTlptRXQzMjJobW1l5syZHDlyhIqKCvMgjqlTp3Lo0CFqa2u57bbbgIsVLK6//nrzvlJSUkhNTcXe3p6YmBicnZ0ZN24cqampyLJMYmIidnZ2nX3orYJIWt2MyWigobYG5590L0iShJObGxUlxZhMJpRibSDBiv20yoOHhwceHh4ABAQEXNI9CJCSknLJz5GRkURGRv7mtr29vQGYNm3aJa9LkkRgYCAzZswwvxYfH/+Lih1KpZK4uLhLYhwzZswl73FwcGD06NGXvKZWqxk6dKj5Zz8/P6ZMmXLJezQaDWPHjv3N2LsLcXXqNtr65iWUSiWmHydLtjEZjCiUStHKEgTBqomk1W1cTEYqOxWePn7UVlWYHwCbjEbqa6rw8vUXK7AKgmDVxBWqm1Gq7Ajv3Yfz+Xnm0jTNTVrqqqoIi+16xTUFQehaxDOtbkaSJHrG9cHVw9M8tsPOTs2AUWPx8g8UIwcFq9Da2srGjRtJT0+3aBxtdf7ahsL/lp/WJrQGZ86cYeTIkZYOo0OIpNUNOTg5ExIVY/5ZbW9PeG/rXAJC6J5uueUW8vPzLXoTJcsyO3bsoLy8nFmzZqH+jTXqZFnmhx9+QKFQMGHCBKtIXGPGjOmyFfJF0hIEweoMGDCAAQMGWDSG1NRUKioqeP7554mJifnN98myTK9evXjttdcYNmwYgYGix6IjiWdagiAIPyHLMvn5+fz73//mrrvuIjo6+nffL0kSUVFR9O7dm9WrV3f5pUEsTbS0rJUsU1ZUwNlU61xy/LeYTEbqqiosHYYgXLWWlhY+/PBDBgwYQHJy8mW1muzs7Lj55pu5//77GTdu3O+2zIRrI5KWlerZsyc+J09Rlrq33bddX19PUVERMTExqFTtewrIskyEvw/+/n5//GZBsDImk4lly5bR3NzMTTfd9JvPsX5OkiQCAgKYOXMm//3vf3n22We7dSX2jiSSlpVKSUkhOTm5Q7Z9+vRpvvrqK5579mmcnJzaffuSJHXbEjOC7TKZTGzevJktW7bw+uuv4+7ufsXbmDRpEnv37uXw4cOX3UoTroxIWlZKqVR22CgktVqNUqlErVaLu0FB+FFxcTHffPMNd9xxB76+vleVcDw8PBg3bhyrVq1iyJAhl91SEy6fGIghCEK319jYyOuvv87o0aMZPnz4VbeQ2oa9NzU1sXv3bjEoowOIpCUIQrfW3NzMBx98gJubG7fccss1d207Oztzzz338MUXX1BRIQYltTeRtARB6LZkWWbPnj1kZGRw5513tkuXvCRJxMfHExYWxsaNG83VMoT2IZKWIAjdVmZmJp999hn33XcfwcHB7TZwQqVSMW/ePHbs2MH58+fbZZvCRSJpCYLQ7ciyTFlZGW+88Qbz588nMTGxXUf6SZJETEwMUVFRfP/99+LZVjsSSUsQhG6npaWFTz75hPDwcKZMmdIhQ9PVajV33HEH27Zt49y5cyJxtRORtARB6FZkWWb9+vWUlJTw5z//GY1G02H78vHxYfbs2Xz99dc0Nzd32H66E5G0BEHoNmRZ5siRIyxbtoyHHnoIHx+fDt/nmDFjKC8vJy0tTbS22oFIWoIgdAuyLFNUVMRHH33EokWLiIyM7PCKFZIk4efnx9SpU/nyyy/R6XQdur/uQCQtQRC6BaPRyCeffEJcXNw1TSC+UpIkkZKSQnNzM/v27ROtrWskkpYgCF2eTqfjiy++QKvVsmjRok4vX+bi4sJ9993Hf/7zH8rLyzt1312NSFqCIHRpsiyzf/9+9uzZw9/+9rcOKRJ9OXr37k3Pnj3FhONrJJKWIAhd2oULF/jPf/7DrbfeSnBwsMXiUKvVTJ8+ne3bt1NdXW2xOGydSFqCIHRZVVVVvPTSS4wbN44xY8agUFjukidJEomJicTHx7N48WJMJpPFYrFlImkJgtAl6XQ6lixZgouLC3PmzLFowmqjVCq55ZZbOHDgAJmZmWJQxlWw/KcoCILQzmRZZseOHZw+fZp7773XqtaN8/PzY/r06WLC8VUSSUsQBJsny/IlrZaMjAz+85//sGjRIoKCgqxqBWFJkhg3bhxlZWWkp6eL1tYVEisXdxN1dXX861//4tChQzQ1NVFZWcnx48dRqVTMnj2bu+++26q+2IJwJfbt28fBgwf585//jE6n4+233+bGG2+kX79+Vnle+/j4MGnSJJYsWULfvn3FCsdXQCStbsLBwQFJkti5c6f5AXB2djZKpZL777/fwtEJwtWTZZnPP/+cxYsXc/ToUXr37k1YWBgTJ060iudYv0ahUHDdddexadMmtm3bxqRJk6wyuVoj6/xEhXanVqsZNmwY/v7+l7weGxtLfHy8haIShGtXXl7OkSNH0Ol0LFu2jMWLF9OzZ09UKuu+J7e3t+eOO+5gxYoVVFZWim7CyySSVjcyfPhwIiIizD8rFArGjx9PQECAuMsTbNauXbsoLS0FLra6srOzuffee/nkk09obW21cHS/r2/fvgQGBrJp0yaRtC6TSFrdiLu7O6NGjTLfgXp4eDBkyBCrGlklCFeitbWVgwcPUltba35NlmWam5spKiqy+rlQGo2GGTNmsHHjxkv+BuG3iaTVzdx4443mh76hoaGMHDlStLIEm1VaWsr+/fvNyUmtVjN48GC++uornnrqKRwcHCwc4e9rm3Dct29fFi9eLMo7XQaRtLqZ0NBQBg8ejJ2dHcOHD//FMy5BsBWyLJOVlcWJEyeQJImAgAD+8pe/sHLlSq6//nqL1Ri8UiqVigULFrB3717OnTtn6XCsnnU/qbRRsixjMBisso9aqVQyZcoUDhw4wOzZs612fR+FQoFSqRStQAuRZRmTyWTVd/6yLLNp0yZza+XFF19kyJAhODk5dfh53d7np7e3N5MnT2bFihVERESILvvfIZJWB2hqauLVV19FpVJZ5UU3NzcXf39/tmzZwt69ey0dzi/IsoyPjw+33Xab+PJaiCzL7Nq1i927d2NnZ2fpcH6V0Wjkm2++ISoqivHjx5OamsqxY8c6fL8mk4mgoCBuuOEGXFxc2mWbkiQxceJEnn32WdLT0612fpk1EEmrAzQ3N7NmzRoWLFiAUqm0dDi/0KdPH8LDw3Fzc7PKL4bBYGDFihXceOONImlZiCzLpKamUlhYSP/+/S0dzq9qbm7mb3/7m3kOYmepqqpi586dTJkypV2Tlp+fH+PHj+err74iISHBKq8d1kAkrQ7i4eHByJEjrfIuta3kjSRJVpm0Wltb2bhxo6XD6PZUKhWxsbGMHj3a0qH8KpPJZJFzuLi4mAsXLrT7dhUKBTNmzGDHjh1s3rxZTDj+DWIgRjckSRIKhUJ8IQSb1hXPYY1Gw2233cbq1auprKy0dDhWSSQtCzCZTNTW1qLT6TplsEZDQwOPPvoo//znP3938TlZlmlsbPzDh+8Gg4Gamhqqq6sxGAxXHE9eXh5Lly61yoEqwuUxGAxUVlZSWVlJQ0PDZc+HMhqN5t+rr6+/onlUZWVlfPPNN2i12qsN+xdOnz7NunXrgItLmVRVVVFXV2fRc7Nv3754eXldUnJN+B+RtCzgwoULPPzww2zdurVT9ufs7Mzf//53XF1dfzfJGI1GPvjgA86fP/+b72lb8uGVV17hww8/5OTJk1ccT0tLiyhbY+MyMzO57777eOutt3j99dfJy8u7rM+zuLiYhQsX8t577/Hmm2+SkZFx2fvU6XSUl5e364hGrVZrvpFbu3YtL7/8Ms8//zx5eXm/+Tsd3bpzcHBg7ty5rFmzRqxw/CvEM61OJssyFRUVBAYGkpGRwaRJk4CLX+bvvvuO8vJypkyZwtChQ8nPz2ft2rVUVlYyffp04uPjWbVqFfPnzyc1NZXy8nJKSkrMiSgiIoKJEyfy+eefs2DBAnJycigtLWX06NG/+KKtX7+evXv34uXlxZw5c3Bzc+Prr79m9+7dFBQUMHnyZKZMmUJRURHLli3DaDQyb948XF1d2bRpEw888ACenp40Njai1+v55JNPcHBwIDs7m/vuu49z586xZcsWPDw8mDFjBtHR0eTl5bFixQokSbL6unDCHxs8eDCLFi1i8eLFZGRk4Ofnx4YNGzh37hyjRo0iKSmJtWvXMmvWLNRqNXl5eezcuZPevXvz6KOP8umnn1JYWEhwcDBvvfUWLS0tJCUlMXnyZNRqNWlpaWzYsIGWlhbmzZuHo6MjcHF07qZNmygvL2f+/Pl4eXnR0tLC7t27iY+P5+uvv6a2tpbY2Fhmz56Ng4MDJ06c4Pjx4xQWFhIXF0ePHj3YsmULDg4OeHl50dTUxN69e3nyySc5cuQIO3fupGfPnr/6d3f0zZYkScTHxxMbG8vy5cvFCgw/I1paFnDixAkmTpxIVVUVNTU1mEwmvv76a+Li4nj00UcZOHAgBoOBxYsXk5iYyKOPPkpiYiIGg4G0tDRkWaa0tJSsrCwaGxtRKpWEhISwd+9ejEYjJ0+exGQyUVNTQ2Fh4a/GkJSUxHXXXYezszPfffcdbm5uLFiwgCFDhvDXv/6VcePG0dzczEcffURwcDCBgYF8+umnVFRU4OTkhI+PD0VFReb4Dx06RExMDI899hh+fn707duXadOm4eXlxXfffYdWq+Xbb79lzJgxhIWF0dLS0slHXWhvZWVlHD58mNLSUry9vTl58iTFxcXMmzePvXv3kp+fT2VlJfn5+WRnZ5ORkYHRaOTYsWM88sgjFBQUEBERgYODAzNnzmTMmDEcP36cU6dOUVNTw6pVq5gwYQIPP/ywuWZmbW0t77//PkFBQfj4+HD69Gny8/Oprq7m3LlzODo6Mn36dMaPH09eXh779+8HoKSkhNzcXO6++25Gjx7NihUrmDt3Lg4ODubvip2dHUajET8/P3MtQ0tRqVQsWrSIPXv2cPr0adEr8RMiaXUyvV7Pzp072bRpE4WFhaSmpiLLMtXV1fTv3x9XV1fs7OwwmUw0NjaSkJCAi4uLeRRi2x1XWxeJj48P/v7+eHl5odFo0Ov1v3jPz7W2tvL9999z+vRp6uvrqampQZIkHBwcUKvVODo6otFoqKuro6GhgaamJoxGI0OGDEGj0dDS0oLBYODs2bO8++67ALi4uDBkyBBcXV0xGo1s2rSJY8eOUVtbS21tLVqtFkmSiIqKIjo62maqFQi/rbq6moKCAiZMmEB8fDy1tbWEh4cTGhqKt7c3zc3NBAYGsnnzZr766iuKiooICAggKSmJt99+m3HjxrFnzx5KSkpYsWIFBQUFtLS00NTUREtLC2q12nyutLXMKyoqaGxsxN7envj4eA4dOsRXX33F6dOnUSqV5rlbubm5tLS00NDQAFwctJGUlISfnx/Nzc04OzsTGRlJXFwcSqUSjUZDa2srlZWVVFdXo9FoLHlogYsjkMeOHcvatWuttgiAJYik1ckyMjIICwvjz3/+M3fccQepqamYTCYCAgLYvXs3xcXFNDY2olAo8PT0ZN++fZSUlKDVas1rA508eZJTp04BvxwJKMsy7u7uHDlyxLxtuFiTzWg0UlBQQG1tLWVlZSQkJODv729+T1u3XW5uLnV1dXh4eODj40NERARjx45l2LBhBAUF4e7uzq5duwgODsbNzQ3gkuoABoOBsrIyevfuTXBwMEajEWdnZ1QqFceOHePkyZM0NjZ26nEX2l9sbCx/+tOfGDZsGA4ODvj4+JCVlcWZM2eoqqrCw8MDf39/0tLSSEpKIisrC3d3d5qbmykpKeH8+fPY29tTX1+PSqUiKSkJk8mELMs4OjpiMplITU3l/Pnz5pZ5REQEd955J5s2bcLOzo7s7GwiIiLYu3cvPXv2RKvVotPpGDRo0CXznH76PWnrTjx27BjHjh3DaDTi4eGBt7c3Fy5cID09ncTERAsd1f9RKBRMnDiR7OxssrKyLB2O1RBJq5MZDAZmzJhBeHg4AwcOJD4+Hr1ez0033URtbS3ffvstGRkZqFQqbrrpJsrLy/n222/Jzs7G3t6e8ePHc+DAAYYOHUpiYiIxMTGEh4fj7+/PwIEDUalU3HDDDRw9epTQ0FBiY2OBiy2h5ORk9u/fT3l5OdOmTePo0aM4OTkxYsQI4OKXZOzYsaSmppKamopGo+HOO+8kKyuLxYsXs3fvXlQqFXfeeScXLlzg4MGD3HLLLSiVSkaOHGn+Gx0cHJg8eTJpaWlIksSYMWOwt7fnxhtvJCsrC5VKxeDBg0U/vQ3z9PQkMTHRfCMlSRJ9+vQhPj6effv2MW7cOEJDQwkLC2PGjBkMGDCAkSNHEhYWhr+/P9988w1OTk5MnjyZqKgoIiMj2bt3LxMmTCA4OBh3d3fmzp1Leno6y5cvp6SkBGdnZ/r160doaCgTJkygvr6e6dOnM2LECPr160diYiJBQUEMGjSInTt3MmLECPNzqdDQUMLDwwFwdHTk1ltv5eDBg4SEhNC7d2+USiW33XYb586dw9fXl1GjRlns2LaRJImgoCBmz57NRx99ZPXLrHQWydr7SpOSkuSjR49aOowrUllZydy5c3n99detcnKxtWttbeWpp55i2bJluLq6WjqcbsloNPLOO+9gMBiYPHmypcOxKiUlJaxevZrnnnuuUwpONzc388ADD3DDDTcwbty4bnOzJ0lSqizLST9/XbS0BEEQrkBn3+i3rXC8fPlyMeEYkbQEQRCsmiRJxMXF4enpyZ49e7r9SEIxWaYDtS3vIFyZ7v6ltCbiHLYOjo6O3HDDDbz++uuMGTMGDw8PS4dkMSJpdZCSkhI+++wz84Nq4fIZjUb0er2lw+j2dDodO3bsoKioyNKhWJX6+vpOHxQhSZJ5sMmXX37JX/7yl25bBV4krQ7g6urKW2+91SEL6MmyTGtrK3v37uWHH35g0qRJjB8/vlMezhoMBo4ePcqyZcvo06cPEydOxNfXt0OqW9x8883mCghC51MoFMyaNYu4uLhr2k7b+ZqWlsby5cvx9/fn1ltvxdPT84q3tW3bNlpaWrjuuuuuKab24O3tjbu7e6fuU6FQcNNNN/Hkk0+SkZFBbGxstxmU8VNi9GAH6Khj2trayr59+9i0aRMNDQ3cfvvtJCYmduodlyzLVFVV8cUXX5CamkpSUhLjx48nNja2Q0ZKdscvpTVoj3O4oaGB/fv3s2XLFqqqqrjxxhsZOXLkVa+RtnLlSrKzs3nssceuObb20tnnp9Fo5L///S8lJSU89thjqNXqTt1/Z/qt0YOipdUB2vtElmWZ/Px8PvnkE2pqapg4cSLjxo3D2dm5XfdzOSRJwsfHh4ceeoiCggI2bdrE66+/TlRUFLfccgs9evSw2nW6hMt3tZ+fLMvodDp27tzJihUrcHFxYeTIkYwaNeqansPIsoybm5u5wkV3Pb+USiUTJkzgiSeeICcnxzwPszsRScuKmUwmKisr2bBhAxs2bGDixInMmTMHJycniz8rUyqVhIeHc+edd3LDDTewceNGHnnkEWJjY5k/fz7h4eHY29t324tLd9NWiuzYsWOsXr0avV7PggULSExMxNHRsV3OAw8PD3OB5q7cwvgjQUFB3HDDDXzwwQe88sor3W51b5G0rJTBYGDDhg2sWbOGoKAgXnjhBSIiIqwqCbSVffL29uamm24iJSWFDRs28M477+Dl5cXMmTMZMGCAxROs0HFkWaauro5t27axfft2NBoNM2fOZNSoUe2aWNpqYxqNRnQ6XbdOWpIkMWnSJH744Qd27tzJxIkTreq60NFE0rIyBoOB3NxcPvvsMxobG1m4cCH9+/fHwcHB0qH9LkmSCAwMZOHChUyZMoUDBw7w1ltvERQUxM0330x0dDQajaZbfbm6KlmWkWWZ+vp69u/fz9dff42Pjw8LFy4kJiYGZ2fnDvmc7ezsUCgUongsF4/FTTfdxNKlSxk4cCCenp7d5rslkpYVqa6u5rvvvmPHjh2MHz+eG2+8ETs7O5s6GRUKBf7+/lx//fVMmjSJtWvX8vbbbxMYGMioUaMYPny41Sdg4bfJskxZWRlbt25l586duLm58fjjjxMXF3dJ4eaO0PZdEEnrov79+/P999+zc+dOZs2aZelwOo1IWhbWNnnz5MmTfPjhh3h5efHMM88QERFhswsltl24HB0dmTdvHmPHjmXnzp0sXbqU1atXc9ttt5GQkGBzCbm7amtZNTc3s2rVKjZu3EhUVBR33HEHCQkJnfbsUqVSIUmSmMPHxe+Yk5MT8+bN49VXX2XcuHHmFRe6Otu8KnYRJpOJ4uJiVq5cydGjR7nzzjvNa1Z1lYu5JEl4e3sze/Zspk6dyoEDB3jvvfdwd3dn9uzZxMfH4+7u3mX+3q7GZDJRWFjIzp072bhxI7169eK5554jLCys02862vYnktb/9OvXjwEDBvDFF19w7733dosC3SJpWYjJZGLfvn18+umn9OrVi9dff71TKkZbQtsQeAcHB1JSUkhKSmLPnj188803mEwmZsyYYV6+RLAOsixz4cIFvvvuO06cOEFoaCgPP/ywRQfWtHU/GgwGi+zfGikUCubNm8dTTz1FTk4OvXr1snRIHU4krU7W9kxg8eLFnDp1invuuYfExESrWCm1s7i6ujJlyhSGDx9Oeno633zzDd9++y233norSUlJuLi4iBGHFqLX6zl//jxbtmzhhx9+YODAgTz66KOEhISgVqst2iJum0QvktalAgMDGT58OKtXr+bhhx/u8q0tkbQ6UWtrKwcOHOCbb74hLCyMN954o1uN+vkpSZJwd3dn6NChJCUlcfDgQVasWMGaNWsYNmwYkyZN6vQyOd2ZwWAgOzubDRs2cOrUKXr27Mmbb75JSEgIYB2TedueaYmkdSmlUsn06dN55JFHyMzMvObSW9ZOJK1OIMsyjY2NvPfee2RkZLBw4UIGDRokusO4eDFUq9WMHDmS/v37c/ToUbZu3cq6deuYPXs248ePx9HRUbS8OoAsyxiNRioqKvjyyy85duwYI0eO5KmnniI0NNTq7tiVSiWSJHVITU9bJkkSAQEBzJo1i08++YR//etfXbrnRiStDmYwGEhNTeXTTz8lKiqK1157rdu2rv6Is7OzeVh8WloaX3zxBevWrWPixIkMHTqU4OBgcdzaSWtrKxkZGWzatImjR4+SkpLCe++9h4eHR4cPXb9abXGJpPVLkiQxdepUtm/fzpYtW5gyZUqXvdETSauDtN3FLl++nPXr1zNz5kxmzJhhdXev1kaSJOzs7EhMTOSVV17h2LFjrF+/nnXr1jFu3DimTp1qrmFnjRdWa9ZWBPfcuXN8/fXXlJaWMnToUF5++WXCwsKs/ni2XYTF+l6/Tq1Wc/3117NmzRpGjBjRZbvXRdLqALIsk5ubyyeffIJer+fFF18kODi4265/c7XUajWDBw8mMTGR4uJivvzyS+666y6mT5/O2LFj8fPzM3cZCb+vubmZjIwM1qxZQ1paGnPmzOGvf/0rPj4+NnNe/rSlJcuy+Nx/RpIkhgwZwoYNG9ixYwczZ87sksfoqpOWJEkhwJeAP2ACPpZl+W1JkjyBpUAYkA/MlWW55sffeQK4HTAC98uyvOmaordCBoOBXbt2sWTJEgYMGMBtt92Gk5OTpcOyWZIkYW9vT2RkJM899xxZWVksXbqUZ555hv79+zNjxgzRbfg79Ho9hw4dYsOGDVRVVZGUlMT9999vkyvfSpKEUqkU87R+h7OzM7fddhsvvvgiycnJeHt7WzqkdnctLS0D8LAsy8ckSXIBUiVJ2gIsALbJsvyyJEmPA48Dj0mS1BuYD8QBgcBWSZKiZVnuMh3UWq2Wb7/9lu3bt/PAAw+QmJjYrQt7tjelUklMTAxPPPEE2dnZbN26lYcffpgRI0bwpz/9CXd3d9Hy4n8LL2ZmZvL5559TU1PDtGnTGDFiBL6+vjb7rEOhUGBnZ9fpqwbbmri4OPr27cuyZctYtGiRzbSkL9dVJy1Zls8D53/87wZJks4CQcAMYPSPb/svsBN47MfXv5VluRXIkyQpGxgEHLjaGKyFLMsUFRXx4Ycf0trayiuvvEJgYGC3v3h2hLbRhrGxsfTq1Yvrr7+epUuX8sADDzBw4EDGjRtHbGxsl/uiXo62UarHjh3jhx9+4MKFC8yaNYtJkyaZb55s+ZxsW1VAtLR+n0Kh4LbbbuOpp55i9OjRXW6F43Z5piVJUhjQDzgE+P2Y0JBl+bwkSb4/vi0IOPiTXyv+8bVf295dwF0APXr0aI8QO4wsy5w4cYJ33nmHAQMGcMstt3SbGmCW1FZlIyQkhIceeohz586xbds2XnnlFXr16sWf/vSnbrEgZdvgCqPRyIEDB1iyZAkmk4nrrruOYcOG4eXl1WX+/q7+WbYXSZLw8/MjKSmJDRs2EBMTQ2trK3v27CE5ORlHR0dLh3hNrjlpSZLkDKwE/ibLcv3vnFS/9j9+dU1vWZY/Bj4GSEpK6pi166+RLMs0NTWxZcsWlixZwh133MHo0aPF6EALUCgUxMbGEh0dzbx58/juu+945JFHGDZsGOPHjycyMrJLzomTZZna2lqOHz/O8uXLaWlpYcGCBfTv37/DlgexNEmSzIla+G12dnbMmTOH+++/n//+9798+eWXVFVVsWzZMptf7fiakpYkSXZcTFiLZVle9ePLZZIkBfzYygoAyn98vRgI+cmvBwOl17J/S2pqauLdd9+loKCA559/npiYGEuH1K21dR35+vqyaNEiZsyYwcqVK3njjTcICQlh9uzZ9O7d+xeV8/Py8ggJCbHKivptJb+cnJxwcXG55P81NTWxefNmtm3bhkKhYNq0aeb6jV0xWf2USFp/TK/Xk5WVRV1dHffddx+tra34+vqSn5/ffZOWdPGb8RlwVpblN37yv9YCtwEv//jvNT95fYkkSW9wcSBGFHD4avdvKbIsU1FRwRtvvIFKpeLZZ5/tsoVubZm/vz+LFi2ivLyc3bt3m+ci/fnPfyY4OBiNRkNjYyMPPfQQw4cP595778XBwcFqLviyLJOdnc1f//pXFi5cyA033ABAfX29uRvQ39+fuXPn0rdvX1xdXa0m9o4kWlp/zGg08tJLL/Hpp59y4cIFc9mruro6cnJybH66wLXcXg4HbgFOS5J04sfXnuRislomSdLtQCFwA4Asy2mSJC0D0rk48vA+Wxs5KMsy6enpvPPOO8TFxXHPPfeY66EJ1kelUhEYGMi8efOYMmUKGzZs4B//+Ac9evTguuuuIyMjg71797Jr1y60Wi2PPvqoVSxQKcsyGRkZ3HPPPezdu5fq6moGDBhAamoqu3btws7OjkWLFjFs2DDzuSfOQaGNQqEgJSWF1atXU1r6v86slpYWioqKMBqNVtmzcLmuZfTgXn79ORXA2N/4nReBF692n5bStgjeyZMnee2117juuuu4/vrrxfMrGyFJEq6ursydO5cRI0aYl4Q5cOAAVVVVyLLM22+/jbOzM3/9618tujilLMvk5OTw2GOPsX//fgwGA6dOneKhhx4iMTGRm2++mcTERJt/mH61ZFm22SH7nUWSJIYNG8aHH37I3/72N44cOYLRaESSJMrLy9FqtTY9WMx2020nMhgMrF27lpUrV/LAAw+QlJTULYdU2zpJkggKCmLOnDkolUoWL15s7mqqqanhpZdeorm5mQcffBBHR8dOT1xtlVTuvPNO9u7da+7WaW1tpb6+nrvuuougoKBu26qSZRmDwSDmPl4GpVLJoEGDzHO1du7cSUtLCzk5OdTX19t00hK3LH9Ar9ezdOlS1q1bx6OPPsrAgQNFwrJxRqORzz777BeFV6urq3nzzTf58MMPO72+nSzLZGZm8vDDD1+SsNqcPn2affv2devnOSaTCYPBYNNdW51JkiSCg4P54IMPuPHGG9FoNOTk5FBXV2fp0K6J+PR/R1NTE4sXL+bgwYM8//zz3fou11oZjUZqa2vR6XSX/Tu5ubnk5+ej0WhQKBQoFAp0Oh16vZ6amhpefvllJEli7ty5nXaDUl1dzd/+9jd2796N0WhEqVSi0WhQKpUYDAZMJhN79+5l8ODBV7TshKura5cpI9bW0rLlbnmDwUBdXd0Vna/XSq1W8/e//52mpiZWrVpFXl4eXl5enbb/K+Hq6vqHvRyStd+5JSUlyUePHu30/TY1NfHxxx+TnZ3NE088ISpcWKnS0lKeeuopdDrdFSWYtueUOp3O/E9LSwutra3odDqMRiOBgYF4eHh0+OduMBgoLCykqakJtVqNWq1Go9GY/7GzszNfqK8kloqKCv70pz9xyy23dFTonUqn0/H0009z/fXXM2TIEJv8PhYXF/Pss8/S0tLS6S1Gg8Fgvlmzxnqd5eXl3Hrrrdx4441tE8lTZVlO+vn7REvrZ2RZpqGhgQ8//JDz58/z9NNP4+fnZ3UfsHCRXq+nqamJm2++ud1G/rUls7aSUR3NZDKh0+lQqVTteiHbuHEjjY2N7bY9SzOZTOaBGLb6fdTr9eh0OubNm2eRFnDb/q2x9b1+/Xq0Wu0fvk8krZ+pqanhrbfewmg08txzz3XZNWm6krZJxdb4RbQkV1dXS4fQrtqSlq0/U247X38+YbyztPWuWVviv9y5hmIgxo9kWaa6uprXXnsNjUbDww8/LBKWjWvrAvz5Pz/X2trKkiVL+Pe//8358+cv+/d+zfnz59mzZ0+7PrMwGo0cPHiQoqKiq46rK2gbONOVB2Jczed7pefrT2s4VlRUsGvXrnatnG8ymTh06BD5+fkdcr6KpPWjtoTl6OjIAw88gKenp6VDEq5RcXExDzzwAGlpaZw6dcpcIePn7OzsmDBhAq2trVRXV2MymVi8eDELFizgwQcfZOnSpZedhGpra0lPT2/XJeFNJhMZGRlUVFSg1Wr55JNPeOCBB/j222+71TIdbYs/duWk9Z///Ifbb7+dBx98kGXLll3WKFaTycTy5cu57bbbePDBB1myZMllnxd1dXWkpaX9YrTqtWgbCVteXk5zczOff/45999/P19//XW7nK9d99O/ArW1tbz11ls4Ojryl7/8BWdnZ0uHJLQDg8GAh4cHu3btokePHmi1WkwmE/v27SMxMZHW1lZyc3MZMGAAHh4e5mdisiyj1Wq56667CAgI4LPPPmPChAnk5eVx6NAhHBwcGDRoEKGhoZhMJlJTUzl37hzu7u6Eh4cDFwfypKamEhsbS3p6OkOHDkWlUlFWVkZZWRlubm7m6hb9+/cnKioKWZbZtWsXkiSRn5/PqFGjKC0tpaioiPr6egDy8/Opqqri0Ucf5auvviInJ4e4uDiLHePOZDAYukT34O9pbGzk3nvvJSgoiH/84x9cd9116PV686jS5ORkmpqaaG1tJSoqCkmSyMzM5OzZsyxYsICIiAg+/fRT6uvrKSoqYv/+/djb2zNw4EDCwsKQZZnjx4+Tnp6Om5sb0dHRwMVqGcePH6dXr16kpaUxfPhwVCoVFRUVlJaW4uXlxY4dO1AqlfTr18/8e3v37kWWZfLy8khOTqa8vJyCggLzsPqioiJKS0t5/PHH+eqrr8jIyCAhIeGajlG3bmnJskx9fT3//ve/UalU3H///aJLsIvx8/OjoqKCqqoq8yqu27Zto7GxkaqqKvbt2/ebv/v+++/z8ssvExAQgKOjIx4eHkRERACwbt06WltbOXbsGNu2bSM6Opro6Gjz8PmlS5dSX1+PSqXi8OHDVFRU0NzcTFpaGgUFBTg7OxMZGYmjoyMbNmygtrYWo9HImjVrKCwsJD4+npqaGrZu3Yq3tze5ubkAlJWV4e/vj5ubG35+ftTU1HT8QbQS3aGlBRdveKqqqsyVP1auXEljYyNarZbvvvuO8vJyUlNTaW5uRqfTsX37dpqamvjkk0946aWX8Pb2xtnZGTc3NyIjI1Eqlaxbt47m5mZOnjzJli1biIqKIiYmBqVSiU6nY/ny5dTW1qJWqzly5AhlZWU0NTVx9uxZ8vLycHR0JCIiAhcXFzZu3Gjukfj+++/JyckhPj6exsZGNm7ciI+PDzk5OcDF89XPzw83NzcCAwOprKy85uPTtT/9P9Da2sonn3yCVqvlscces9iDUaHjSJJESkoKer3efOFv0/Zg/7fcfffdREVF8e6771JVVUVGRgY7duygvr4evV6PwWAgPT2d4cOHM3jwYCRJ4uzZs5w6dQp7e3umTp2Kk5MTwcHBbN68mZycHMLDw+nfvz+5ubnm0X2NjY20tLTg7OyMnZ0ds2bNwtnZmR07dhAdHc2oUaPMsUuSRH19PT/88EOnT4C2tO7wTAtg8eLFBAcHc8stt2A0GikrK+Ouu+7CZDLxzjvvMGXKFI4ePco333yDg4MDRUVFREREMH78ePr27cu///1vKioqyM/PZ+vWrTQ0NNDS0oJer+fcuXMMGjTIPGUgOzubtLQ07OzsmDRpEk5OToSFhbFlyxZycnKIiYmhV69eFBQUsHHjRrRaLfX19TQ1NeHu7o5KpWL27Nm4ubmxZ88eIiIiGD16NHl5ecDFOoht52tbKalr1S1bWrIso9fr+frrr8nPz+eBBx7A3d3d6kbTCO2jf//+jB071vz5Ojk5UVRUREFBAU1NTeb32dnZUVdXd8noqrbl3VtbWzl79ixjx45l3rx55tp/Xl5enD17lpaWFnMSiY+PZ9asWaxdu5ampib8/Pw4fPgwMTEx5OTk4OLiQlZWFn379mXhwoWXjPJTKpXm7i83NzdKS0upqqqioKAAuFi9vrGxkcGDB1NWVmZuPXYH3aWldeedd/L0008zbNgw7O3tUSqVFBQUUFhYiIODA97e3mi1Wi5cuEBjY6P5ZqdteZ62OYfnzp0jOTmZP/3pT+ZHHl5eXpw7d47m5mbz+RobG8u8efNYvXo1jY2NBAQEcPjwYXr16sW5c+dwdXUlJyeH2NhYbr/99kt6o9rOV0mScHd35/z581RVVZGfnw9cPF+bmpoYNGgQpaWl+Pj4XPPx6dqf/m8wGo0sX76cgwcP8s9//rNdDqRgfTQaDb6+vuZulqCgIOzs7Jg8eTJfffUVvr6+BAVdXDxboVAwZMgQvvzyS/OX68svv8TV1ZVBgwYRGBjIkCFDWLFiBZ6envTs2dNcTXvVqlU8+eSTBAYGMmPGDEJCQkhKSqK6upqMjAwCAgJISEhg4MCB1NfX4+3tTf/+/Vm8eDGZmZlERkaaLzohISHmeUgxMTEcO3aMd999l7CwMJycnOjRowc9e/bk3XffZfTo0ebuyu5Ar9djMpm6dO1BPz+/S6ZuqNVq5syZw+LFizEYDMyfPx+NRkN0dDR2dnbo9XpCQ0PR6XR8++23/PDDDwwcOJAePXowePBgli1bhru7Oz179kSpVJKcnEx1dTVPPfUUfn5+zJkzhx49etC/f39qamo4e/Ys/v7+JCQkMGjQIGpqavD19SUxMdH8DDUyMhK1Wm0uE9X2/YqMjMTHx4e33nqL8PBwcy9DTEwM77zzDsnJyfTq1euaj1G3rIixc+dOPv30Ux5//HHi4uJEC8uGFRQU8OSTT3L//feLeVo/s2rVKnx8fLjnnnssHUq7yMjI4N///jf/7//9Pzw8PCwdzlXJy8vj//7v/1i0aJF4HPEzy5cvJyQkhNtvv/13K2J0q+5Bk8nE8ePH+fDDD3n44Yfp3bu3SFiCYAPaqpTIsnxFtReFrqfbdA/KskxRURH//ve/WbhwIYmJiSJhCYINaWhoQKPRdOnuQeGPdZuWll6v57333qNv376MGjVKJCxBsDE1NTW4uLh06Xlawh/rFi2tlpYWPvjgAzQaDXfffbfoXuhiDAYDZWVl7VYwt6uoq6vrUqMLa2tru8QoX4PBYB75J/xPXV0dwcHBf/i+Lp+0TCYTu3bt4syZM7z44osiYXUxarUad3d3Vq1aZfE7cFmWqaurQ6PRWEUCraysZMCAAZYOo91UV1fb/OR/tVqNs7Mzq1evtrqh+0ajkerqary8vMwjAjtTRUUFQ4cO/cP3WddR6wDFxcV89dVX3HPPPfj6+lo6HKGd+fj48MILL3Tqonq/xWg08s4775CUlERycrKlwwGw6WXVf66iosLmh/j7+fnx/PPPW8X5+nN6vZ5nnnmGqVOnMmTIEIvEcDnna5dOWtXV1bz88svMmDGDYcOG2Xy3gvBLKpXKaubZGQwGHB0d8fT0NM//EtpPWVmZzd94WtP5+nOyLHPnnXfy9ddfm6tjWKMuOxDDYDCwatUqPDw8mDZtmkhYgmDjKioqutQzOmsjSRIDBw5EoVBw+PBhq132pssmrZMnT7Jnzx5uv/128RxLEGxcU1MTWq3W5lta1s7e3p4ZM2awfv36S0qcWZMumbQaGhp4//33mTt3LmFhYaKVJQg2rqysDBcXF6sY4NKVSZLEgAEDqK2t/UWBaWvR5ZJWWyHc0NBQJkyYYJFRMIIgtK/S0lICAwPFDWgn8PHxYcKECSxZsqRdFzNtL13qii7LMmlpaRw8eJAFCxZY3ZBSQRCuTlFRESEhIZYOo1uQJIkpU6aQn59PWlqapcP5hS6VtPR6PUuWLGH8+PEEBQWJuzJB6AJkWSY/P5+wsDBLh9JtODk5MX36dFatWoVer7d0OJfoUklr+/bt1NfXM2vWLItPNBUEoX0YjUYKCgqIjIy0dCjdhiRJjB49mqKiInJycqxqJGGXSFqyLFNeXs6SJUu48847zQv0CYJg+xoaGmhubhYjBzuZl5cXMTExHDhwQCSt9mYymVi/fj3R0dH06dPH0uEIgtCOqqurcXR0xN7e3tKhdCt2dnZMmjSJrVu3otVqLR2OWZdIWlVVVezcuZPp06djZ2dn6XAEQWhHtbW1ODk5iSVJOpkkScTFxREYGMiWLVssHY6ZzSctWZZZt24dERERxMbGisEXgtCFyLJMTU0Nzs7O4obUApRKJbfeeiurV6+mrq7O0uEAXSBplZSUsGHDBhYuXCiGuAtCFyPLMoWFhfj5+YmWloVER0fj5+fH3r17reLZlk0nLZPJxPfff8+gQYMIDg4WrSxB6GKMRiPFxcWEhISIQgEWolarSU5OZt++fbS0tFg6HNtOWhcuXODYsWNMnz7d0qEIgtABDAYDJSUl9OjRw9KhdFuSJDFkyBBKS0spLi62dDi2m7RkWebEiRN4e3sTEhIiWlmC0AW1trZSVVUllnqxMB8fHwYPHszatWstHYrtJi2j0ci6desYN26cKKIpCF1UVlYWnp6eeHh4WDqUbk2SJGbNmsW+ffu4cOGCRWOx2aSVnp5ObW0tw4cPF60sQeiiTp48SZ8+fcTzLCvg6+tLUlISGzdutOiADJs8E2RZZv369UycOFFMOBSELkqWZdLT0+ndu7elQxF+NG7cOI4ePUptba3FYrDJpFVcXMyZM2eYMGGCpUMRBKGD1NbWkpeXR2JioqVDEbjYRRgdHY1CoSAzM9NirS2bS1qyLHP27FlCQkJwd3e3dDiCIHSQ7Oxs/Pz8cHFxsXQowo9cXV3p168fu3btslgMNpe0DAYDx44dIyEhQXQNCkIX1XZzGhMTI1ZssCIKhYJx48Zx6NAhampqLBODRfZ6DVpbW8nJySEuLk4MwBCELkqn05GTk0Pv3r3FIAwrExISQt++ffnhhx8ssn+bOxuKi4tpaWkhIiLC0qEIAgBr1qxh/PjxTJo0iW+++YYnnniC8ePHc88991BUVGTp8GxSY2MjNTU1otKNlbr++uvZvn27ReoR2lyxvv3795OUlCTWzBKsRkREBPv27aO5udn8miRJBAYGiueuV6m2tpaWlhb8/f0tHYrwK3r27ImzszOnTp1ixIgRnXpjYVMtLVmWSU1NpX///lZ59yXLss39I1y74OBghgwZcsk56eTkxLBhw3B2drZgZLYrMzMTHx8fiyd9S38/rfU77eDgQEJCAseOHcNoNHbKPtvYVEursrKS4uJiqx0C29rayvbt29HpdJYO5bKEh4eTkJBg6TBsnouLC9OnT2fPnj0YDAYA/Pz8mDx5slXeXFk7WZbZvn07KSkpFh+EodVq2b59OyaTyaJxXB6JXr1i6NWrV4fvSaFQMGjQIN588020Wi1ubm4dvs82NpW0srOzCQkJsdquwZqaGp74x/NEDRiMxt66S0uVFeXTO8iPd95+y9Kh2DylUklSUhJhYWFkZ2cjSRIDBw4kJCTE0qHZpOrqajIyMnj88cctHQqlpaU88X8vED80GZWddS+NUpSdwfjBA3jm6ac6fF9tc7Y0Gg2nT59mxIgRHb7PNjaVtAoLCwkNDbXqu9eA0HCm3nYnrh6elg7ld53av4e6tCOWDqPLSEhIIDY2luzsbJRKJX/6058sHZLNOnToEDExMXh6Wsd3qEd0LNP/fA8OTk6WDuV3Hdi0DoW2stP2p9FouO6661i9ejXDhg3rtFGeNvNMy2QyUVBQQFhYmFUnLZshDmG7cnZ2ZuLEiTg4OBAdHW21z12tnV6vZ8+ePYwdO9bSoQiXISUlhcLCQnJycjptnzbT0mpboqAzm6Fdj0xHZyuTydRtB3hMnjyZZ555hpSUFNzc3Dr9AbW1UCgUV52wKyoqqKqqolevXiLp2wB7e3v69evHoUOHiIyM7JTPzGaSVnNzM3q9Hnd3d3EyX7WOP2579uxh+/btqFQ2c2q1G6PRSFBQEBcuXODNN9+0dDidzmQy4ejoyJ133nlVo/5kWSYvLw9XV1e8vLzaP0Ch3UmSxKhRo1ixYgUzZszolJJbNnNlaW1txWQyidJNVu748ePk5+eTlJRk6VAsYv78+ahUKqsdLNSR6uvr2bVrF/PmzbvqpHXy5EkiIyNxsvLnR8JFkiQRFhZGY2MjFRUVODs7d3ijwmaSlk6nw2g0igUf/1DHdwH+HkmSiImJYcyYMZ2+b1mWf/ML83v/rz3339Y1aoulh671GFVUVJCZmXnVv9/a2srhw4f529/+ZpPHz9LfPUvx8/OjR48eHD16lPDw8A7fn82cGTqdDlmWRdL6Q93vS9Pm9y64ndGlLEkSCoXCRi+4nXOMfs+pU6dQKpU2vH5W9/zuKRQKxowZw+bNmzvlebbNfLuMRiMmk6lbPivpKoxGI62trciyjMFgMN+IXA6dTtcuEzxlWUan09Hc3ExLSwtGo/GyYzhy5AgHDx5s1y9mfX0969ato6amBoPBYI7LYDBc9n5KSkpYt24dLS0t7RaX0Whk27ZtZGZmYjQaaWlpuaLP60qZTCa+//57Jk+ejFpt3fOhhEtJkkS/fv2oq6sjNze3w/dnExlAlmXzSKxum7S6QM/Dzp07Wbp0KS+++CJr164lKyuLZ5555rKeX7z66qvMnTuXqKioa4qhtbWVf/zjH2i1WjQaDcnJyUyfPv2yfreiosJc8aK96PV6CgsLGTBgAGvXrmX79u34+voSGRnJzTfffFnHpqmpiaKionat2iDLMqWlpbi6urJmzRp27dqFo6MjDzzwQIfUA8zNzSU3N5eHH3643bctdDwHBwdGjx7N5s2biYiI6NBWu81kAJ1Oh0qlstmul2vW3ueABUalG41G/P39OXDgAFVVVdjb2yPLMidOnGDbtm0oFAoWLFiATqdj9erV6PV6ZsyYQUhICHq9/pKLcttIs3Xr1uHq6srs2bPR6/WsXbuW4uJixo8fz8CBA6murmb16tVcuHCB6667jl69emE0GnnuuefIzs5m48aNTJ8+nY0bN3Ls2DG8vb2ZMWMGXl5eHDp0iAMHDuDi4sLNN99s3ndRUZF5GfjCwkKGDh2KUqnk7Nmz5OXlkZ6ejiRJDB8+nKSkJCRJYtOmTeh0OtLT05k6dSohISFs2rSJ8vJyWltbzcdnwYIFhIeH89lnn3H+/HnWrl2LwWAgKSmJESNGoFKpOHfuHNu3b8doNHL99deb49JqtezYsYP+/ftz+PBhpkyZgp2dHSUlJWRmZnLq1Cmam5uJi4tjwoQJqNVqjh8/Tm5uLnl5efTt25fk5GR27dplXpm2ubmZ3bt389hjj7Fnzx527tzJ/Pnz2/W8MJlMHD58mPj4eFGr0UZJkkRycjLvvfceWq22Qz9Hm8kAra2tqFQqi9ci6zIs0GqTJAl/f38OHz5MdHQ0kiSZu4Xi4uKYNWsWarWazz77DC8vL/z8/PjPf/7zq9vS6XR8+umnDB48GJVKxZo1a9i7dy+tra3ccMMN9O7dG0mS2LlzJ5IkccMNNxAVFWXe52OPPcbHH39srr3Yt29fxowZg0KhYM2aNRw/fpw9e/Ywc+ZMZs+ebX6WWlJSwpIlSwgJCUGWZc6dO0dlZSWVlZUcOHAALy8vxo0bR1xcHFu3bqWkpARZljlw4AD19fXceuutREREsHv3brRaLX379qW8vBy4mIg/+OADXnjhBXx8fPDx8WHcuHEMHDiQ/fv3k5GRQXl5OatWrWLQoEHceOON5lZPQ0MDn376KS4uLri7u5Ofn09RUREXLlzg3LlzNDY2MnbsWIYPH86ZM2dITU1FlmWysrI4deoUc+bMYejQoRQWFnL48GHGjBlDUVERVVVVqNVq7O3tCQ8P75ClVrRaLUeOHCE5Obn79qR0AcHBwahUKvLy8jr02ZbNJC2TydQtW1myLFOYlUF5caH557Oph2lqbLjSLbV/cFfB09OTsWPHmhfxlCSJ/v37k5GRwYYNG8jPz+fChQvk5+dTXFxMz549f3U7dXV1yLLMwIEDGT58OHl5ecTGxmIwGNi0aRPp6enIskx8fDxNTU1s2rTJ3HpQKBS88sorPP/88+zcuROtVsuGDRvYt28f+fn5nD9/npKSEuLi4ujZsyfe3t7mc+/MmTPo9Xo8PT3NcwbXrl3LN998Q1FRETqdjrVr13Lq1CnKysrMy5WoVComTJhAcHAwarWampoaevfuTe/evQkKCgIuJvV77rmH1157jaqqKk6cOMHKlSs5duwYZWVlNDY20tjYiJOTE3379sXX19f8/CcvL4/y8nLzaz169GDz5s289957FBUV4eDgwPLlyzly5AgXLlwwr4OkUChITk4mLCwMV1dXysrK6NmzJ71796ZXr17Y2dnR2tpKZmYmlZWV2NnZtfs5kZOTQ0NDQ7dZ2FWWZQoyzlJ5vsT8c9rh/bQ0NVk4smvj4uJCSEgIZ8+e7dD9dL8sYGNMRiPbVixB/ZMCvPU11RzdseUK72as42KgVqsZM2YMvr6+wMUL9YgRI5gyZQoVFRXU1NTg5eVFcnIyCxcuZNasWebf1Wq11NfX09jYiLOzM0ajkfT0dI4dO0ZQUBA9evRg5syZ+Pj4cOrUKUwmE2FhYcycORN3d3dOnTplPmYNDQ3U1taaB4QUFBQwceJEYmNjgYvDeM+ePWtOFm1dkyNHjmTAgAFs2rQJtVqNUqkkLy8Pd3d3DAYDVVVV+Pv7M3HixEtaDT8dVahUKnF1dSU7O5u8vDzOnz9vfl9zczN1dXW0tLRQVVWFs7Mz06ZNQ6PRAODo6EhjYyPZ2dk0NDSYn7HFxsZy0003sW7dOqqrq/H39+fEiRMkJiZy7tw583D8mTNn4uLiYj4ObSMe25KFj48PBQUF5Ofnk5WVhZeXFw4ODigUCrKzs9t9ZJ/JZGLVqlWkpKTg4eHRrtu2Vga9nu2rvsVO8785p7VVlRzfvf0yv9PWcQP6cyqVir59+3LixIl2f/Z7yX46bMtCu8g+cwJnN3fcvLyBixeZvkNH8NFzjzNkwnWof7yYXTELnPd+fn7mJQzs7OyIi4tDoVDw8ccfU1VVRUxMDAMHDiQ8PJwVK1awZs0a4uPj+dOf/kR0dDTLli0zb+fuu+/mz3/+M9999x2urq7cdNNNHDhwgK1bt+Lq6sqsWbNQKpXs2LGDnTt34uHhwZw5c1CpVPTp04cPP/wQV1dXbr/9djw8PJg0aRKrV68mLCyMhIQEkpKSaGpq4uOPP8bNzY0777yTgIAAfH19iYuLY+3atdTX1xMfH09gYCDe3t74+fnRp08fli9fzvbt2xkxYgSurq5IkkRcXJw58UiSxMiRI1m7di379+9n0KBB5u63jRs3cuTIEVJSUkhKSqKsrIzVq1czfPhwvLy88Pf3Z9q0afzwww8YjUbmz5+Pi4sLvXr1onfv3jQ0NFBUVERwcDDjxo1j0KBB6HQ6BgwYQFFREd9++y0DBgwgICAASZIICQm5ZLBHREQEsbGxLF++nEGDBuHr68utt97KqlWrCAwMZPTo0e16TmRmZnL27FkeffTRbtHKAsg8dQx3Lx9zUW1JkkgYPpL/vPQcSSkTsPvD0ZPWeZzaRhH+5z//oaGhocMKHkvWXicuKSlJPnLkCAcOHGDVqlX861//strnWufPn2fh/Q8z/+Gn2q3K+87Vyy+WSpkxx/yayWTig2f+ztz7HsInMPiqtnvqwB7qzhzhzddebZc427z99ttotdrLHpEndB0VFRV88cUXPP/88/To0eMP328wGHjzzTfNNwXWmLQyMzN54LkXufmRZ9qtyvuWZV/j6OLK8Mn/+44Y9Ho++sfjzP/r3/HyD7iq7R7YtA53bSVPPfFEu8R5NUwmEw899BDTpk275qLHkiSlyrL8i9I6onvQypmMxl99lidJCkxGW1iYThB+SZZl8vPzyczMZMKECZYOp1MZf+s7rZBsZLHJ36ZQKBg3bhxbtmzpsL9FJC0r5xcSSuX50kte07e20trchJu3t4WiEoRrt2vXLiIjIwkMDLTKVlZHCQgNp6K05JLXdK0t6Ft1uFrJGmLXYsiQIWRnZ1NdXd0h2xfPtKxcZJ8E9m/8niZtI45OzsiyTObJY8QkJl3b6sgd2CtsMpk69EGsYJ2upLpIdXU1Gzdu5Pnnn+92FTCiE/pzZNsmmrVaHJyckGWZjONHie0/ELXG9guCu7q6EhwcTHp6OsnJye1+QyKSlpVT2zswavpsqs6X4hgZDUCztoHkqTOv7WTooBtbSZLMk4e7u5+O0OsOtFotFy5c+MO/12Aw8PnnnzN8+HBiYmI6KTrrYe/oxIgpM6gpv4BDeAQALU1aRlw3o0ucK22DnU6ePMnw4cPbfQyCSFpWTpIkohMHXPLzoLGTrnJrHV8L6rrrriMsLMzm++bbw44dO6iurub666/vNnMMnZ2d8f6dbuu2Cc2nT5/mpZde6hIX6SslSRK9Bgy65OehE6daMKL2JUkSvXr14rvvvqOpqand19gSSatb6fgLREREBBERER2+H2snyzIxMTH8v//3/0hJSbmq9aW6Ir1ez3fffUdycjIBAVc3Sk6wbpIkERoaSl1dHQ0NDe2etLrH7Z8gdDJJkoiOjiYsLIzNmzdbOhyrkZGRQXp6OpMnT7baqSvCtfP19cXBwYGCgoJ237ZoabUzo9FAs7bR6muotTY1WevE+i5DqVRy66238sILLzB+/PhuU/HhtzQ1NfHhhx8yf/58AgMDLR3OZTPo9TRrG5BNRkuH8rtam5ut5jttZ2dHYmIihw8fZujQoe26beu+stoYpVIJLU1sX/J5u9Zoq6urQ2Nvj/3VVr/4FfU11QyMi2237Qm/LiIigsDAQPbs2cO0adO65TMcuDiy8LvvvsPe3p5JkybZzDM+tVqNrq6KLV9+3G4tQxmoq63F0ckJdTteJ2oqyolOGdVu27sWkiQxaNAg/vnPf2I0Gtu1VS2SVjvy8PDgjX/9E307D/d+79//ZvDggSQNHNhu25QAb2+fdtue8Ovs7OwYPXo0e/bsYdy4cTg6Olo6pE4nyzIlJSVs3bqVv/zlLzbVLRgQEMA7r72CsZ3XKnvt1VeZOnaUudZle5DAqp4TRkVF0dTURGlpKSEhIe22XZG02pGdnR19+vRp123Ksoy3tzcRERH079evXbctdLy2O84VK1aQn59vw0vJX72WlhY++OADkpOTSUxMtKnWpkajMS9f015MJhOenp5ER0V16e+0Wq0mMTGRo0ePtmvSso02uiDYMG9vb0aMGMF3333XoesMWSOTycTatWtpaGhgzpw5NtXKEq5dYmIip0+fbtcpMCJpCUIHkySJWbNmcezYMXJzcy0dTqeRZZn9+/fz/fff8/DDD+Pq6mrpkIRO1qtXLy5cuEB9fX27bVMkLUHoBO7u7owdO5bvv/8eo9G6R6G1l8LCQj766CNuvfVWQkNDLR2O0MkkScLV1RWlUtmuFXJE0hKETpKSkkJ6ejplZWVduptQlmWqqqp45ZVXmDhxIqNHj7aZ0YJC+3J2dkatVlNZWdlu57w4kwShE7RVCfD19eXw4cOWDqdDNTY28tZbbxEWFsb8+fO7XUFc4X+cnJwIDg4mOzu73bYpkpYgdBIHBwdSUlLYsGFDl+wilGUZg8HAp59+SlNTEwsWLBADL7o5hUJBeHg4OTk57TYYQyQtQehEw4cPx2AwcOjQIUuH0q5kWaapqYn33nuP/Px8nnnmGby9vW1qeLvQMaKjo8nKyhJJSxBskUajYf78+SxdupTm5mZLh9NuTCYT//3vf8nOzuaJJ57Azc1NJCwBgNDQUGpra2loaGiX7YmkJQidbODAgTQ3N5OWlmbzAzLaWlifffYZp0+f5rHHHsPPz08kLMHM0dGRoKAgMjMz22V7ImkJQidzcXFh6NChbNu2zeZXeG5oaODNN9/kzJkzvPjiiwQFBYmEJVxCkiTi4uJIT09vl+2JpCUInUypVDJs2DBOnTpFXV2dpcO5KrIsU1dXx2uvvUZjYyNPPPEEHh4eImEJvyo6OprMzMx26VkQSUsQLCA6OprevXvz/fffWzqUK9a2+vCzzz6Ls7Mzzz33HP7+/iJhCb8pNjaWvLy8dnmOK5KWIFiAQqFg7ty5bNy4kZqaGkuHc9lkWebEiRO8+OKLxMXFcd9992Fvby8SlvC7PDw8UKvVlJeXX/O2RNISBAvp2bMnPXv2ZMeOHTYxIKOlpYXVq1fz8ssvs2DBAhYsWICTk5OlwxJsgEqlwtvbWyQtQbBlCoWCqVOnsmvXrnYtKNreZFmmrKyMf//732zatIl//OMfjB49WlS6EC6bUqnEw8ODysrKa96WSFqCYCGSJNGrVy90Oh25ublW2doymUxkZmby1FNP0dLSwnPPPUevXr1Ed6BwRZRKJW5ublRXV1/zeS4WgRQEC/L09GTUqFGsWLHCvNhgU1MTKpUKjUZjsbhkWaahoYGNGzeyevVqbrrpJsaNG4darRYJS7hiCoWCgIAACgsLMRqNqFRXn3pE0rJSp0+f5vjx48iyTHZ2NiqViuLiYtzd3UlOTsbT09PSIQrtQJIkxo0bx4oVKzhz5gwZGRkcOHCAO+64wyKrHLfdBZeUlPDWW2+h0+l48skniY+P7/RYuprDhw9z9uxZZFmmoKCAjRs3kp6ejo+PDyNGjOjS641JkoSHhwdpaWkYDAaRtLqi8vJyHnzwQVpaWjAYDPzwww8olUpSUlIYPny4pcMT2oEsy+j1ehobG3F2dmbmzJnU1NTg7OzMdddd16FJy2g0IkmS+Z82jY2NbN26lW+//ZapU6cyY8YMXFxcOiyO7iQvL48HHngAvV6PwWBg+/btKBQKZs2aRXJysqXD63AeHh40NjZe84R6kbSs1MCBA819wG3UajX9+/fHy8vLgpEJ7cVoNPLOO+/wxRdfkJWVhU6nAy52pXTkpGOj0ciOHTtwc3MjKSnJ/Nq5c+f48ssv0ev1PPXUU/Tp00d0Bbaj5ORk7OzsLvlsHR0dGTx4cLe4MXB3d0er1aLX669pO2IghpVydHRk5syZlyzt4OjoyPXXXy8uJF2EJEk4OTlRWFhoTlhwcWh5Y2Njh+xTlmVSU1P529/+xpNPPmne1yeffMLzzz9Pv379ePbZZ0XC6gCenp5Mnjz5kgUxXV1dmTJligWj6jwuLi60tLRc87I8oqVlpZRKJZMnT+azzz6jvr4eSZLo06ePRZ5zCB1DoVCwcOFC9Ho9zz77rPkOvLW1Fa1W2+77k2WZM2fOcOedd3L27FmKior4v//7P2pqavDx8eHll18mNDRUJKsOotFomDp1KitWrKC5uRlJkhg+fDihoaGWDq1TODk5odPpREurq5IkiaioKAYMGGD+ee7cuWJRvS5EkiTs7e25/fbbefDBB82frdFopKmpqd3WH4KLCSsnJ4cnnniC9PR0TCYT9fX1LF++nOTkZJ599lnCwsJEwupAbYVj2248FQoFN9xwQ7c55g4ODmg0Gmpra69pOyJpWbGQkBCGDBmCSqUiKCiI4cOHd5sTvDtxcnLioYce4tFHH8XZ2RmAurq6dl3duLS0lL/97W9s3rz5kgfhJSUl1NbWipuhTtJ2I6pQKIiOjjbflHYHCoUCT09PKioqrm077RSP0AGUSiUTJ07E09OTESNGiDvhLszFxYWHH36YO+64A41GQ319fbstW1JWVsZzzz3Hxo0bL+maUSgUyLLMxx9/fMmAH6HjqNVqJk2ahLOzM6NHjyYgIKDbfKclScLT05Oqqqpr2k63f6ZlMpkoKipql/IiHUGlUuHu7o6vry85OTlWe4LHxsbi4OBgtfFdifr6erKysiyy7ylTpnD06FFycnI4evQojo6O17S91tZW/vOf/7BkyRIkSUKhUJhb7r179yY6OpqwsDCys7MpLCz8w+3Z29sTFhZm1TUHDQYDhYWFVluI2MPDA3d3d7y9vTl37pylw/lVbV2Z7T3BXSStdtDS0sLLL79MXl6e1U7u69mzJ4WFhbzyyiuWDuUXTCYTp06dYunSpSQmJlo6nHZx4MABHnzwQYtNqPX390er1fL2229fMtLsarR1MU6ePNl8Q/HTfxcXF1NcXMzevXsva1vV1dW8+uqr5qHy1qihoYFnnnmGurq6a076HaVXr16kp6dbZdIyGAycOXOGzZs3ExYW1q7b9vDwoKio6Jq20e2TFlxszSxatIioqChLh/ILsizT3NyMRqOxyucOer2e1157zdJhtCtJkhgxYgT333+/RfYvyzItLS1oNJprSlqyLGMymdrtvGlpaeGjjz5ql211NHt7exYsWEBAQIClQ/kFWZZpamrCwcHhmm9KOkJTUxMvv/xyh9TCtLe3p7W19Zq2IZKWlZMkyWrvFoWOIUkSDg4O7bIda7zR6e7a5ud1RxqN5pqTlvWl+W7OZDKxf/9+3njjDTIyMiwdDkajkaNHj1JcXGzpUGxKcXExr732Gtu2bbvmoet6vZ5t27bR0NDQTtFdHJyxb98+q6ws39UYjUZ27drFW2+9RU5OjqXDwWAwcPjwYc6fP9/p+xZJq5OsW7eOF154ocOqFPyUJEnExsbi5OR0Sd/vr11cdu/eza5du37zwtNWqXvx4sU89NBDLF++/IqXu5ZlmfT0dMrKyq7sD+mi1qxZwz//+U+0Wi0ffPABn3766SXVLNp4e3sTExNDXl6e+XO4/fbbuffee3nuuefIy8u77H0aDAYOHTrUrhOOq6qqOHHiBACZmZk89thjPPnkk+Tn53eLRLZy5Ur+9a9/dcgk7p9TKBT06dMHOzu7P7z527p1KwcOHPjNz8BgMLBp0yb+/ve/884771zVoAaj0ciZM2eueUDE1VCr1aJ7sKPp9XoOHz6Mp6cnOTk5JCQkmD/0zMxMXFxcGDt2LAqFgpMnT5KdnY27uztxcXE0NjYSHh5OZmYmQUFBFBQUUFhYiJ+fH3V1dQwbNozKykokScLPz4/MzEwiIyPx8PC4pL5gQ0MDmzdvxmQyER0dTVxcHLm5uWzevBlZlmlsbGTYsGE4OTmRlpZGbm4uERER9O7dm0OHDlFeXs7TTz9NZWUlBoOBvLw8iouLKSsro0ePHsTFxbFp0yb0ej2RkZH07dsXgOPHj1NcXNyud/i2TqvV0tDQwOnTp8nIyCAqKorm5mZOnjzJgAEDyMnJwcnJicDAQPz9/c0XBpPJhJ2dHe+++y4rVqxg586dhIWFsWHDBurr6wkMDGTIkCHY2dlRXV3N0aNHqa+vp1+/fgQFBQH/K8FUX19P79698fX1BeDcuXN4eXlx5swZysvL8fLyYvjw4Tg4OFBTU8OxY8dobm7G0dGRmJgYTpw4QX19PUajEb1ez9KlS5kzZw5VVVWsX7+ee++912LHtzPodDqOHj2Kv78/hYWFxMbGYjQaOXHiBDk5Obi7uzN27FgAUlNTyc/Px8vLi6ioKPR6PcHBwWRmZhISEkJubi7FxcX4+vrS0NDA8OHDKSkpwdHREQ8PD7KysujVqxeenp54eHiYY6itrWXr1q3AxZG3MTExZGZmsmXLFuzt7amtrWX48OFoNBpOnz5NQUEB0dHRuLi4sG3bNvPz1paWFvNKEGVlZZw/f56IiAgiIyPZsmULRqOR6Oho4uPjMZlMHDt2jNLS0k5J1r9GtLQ6QXFxMa6ursTExJCfn4/JZOLMmTOsX78eLy8v/P39USgUnDhxgo0bN+Lj44O/vz8FBQUcPXqU1tZW9u/fT1lZGQcOHCAnJ4fly5dz7NgxiouLSUtLIz09naamJvbs2fOrCUKlUuHr62s+YfPz83FxccHLywtvb28CAgJQq9VkZWWxa9cuNBoNO3bsIDc3l4KCAuLj43F1dSUkJARHR0dOnjzJqlWr8PDwwNPTE6VSia+vLx4eHuzatYvMzEyKi4tZv349arWa7OxsCxx566RUKvH29mbr1q307dsXSZJobGxk48aNGI1Gjh8//ptdQCUlJTz88MPs2LGD/v37AxdbZF5eXhw/fpzU1FRMJhMrV66kpKSEwMBA82TjtiK3p06dorS0lJMnT2IwGDAYDGzZsgWtVms+HzIzM82jAS9cuMCXX35pnti5cuVKGhoaqKioQKvV0tjYSFNTE9HR0cTExFBSUtI5B9KCCgsL8fLyIjQ0lKKiImRZ5vjx42zatMn8/ZUkiSNHjrB9+3Z8fX3NN5WnTp2iubmZvXv3UllZyb59+8jLy+O7777j4MGDVFRUmM+BxsZGdu3aRVNT0y9iUKlU+Pn54ejoyObNmykpKcHV1RUvLy98fHwICAhApVJx5swZ9u/fj1qtZvPmzezevZvQ0FCCg4PNccmyzJEjR/j+++/x9vbGw8PDvH1XV1e2b99Obm4uubm5bNmyBZVKRW5urgWO/MWk9Ws9E1dCtLR+hyzLpKWl0drayoULFygrK2P8+PGkpaUxaNAgxowZYx4+fOrUKZKTkxkxYgSSJHHw4MFLtgPg7OxMQkICO3fuJDw8/JKuOlmWf7VLQJZlqqur2bZtG/X19VRXVzNixAgiIyOJiYkBoF+/fua6cocOHaKgoAD433BnSZI4fvw4r776Kk8//TSSJDF69GhSUlKQJInKykq2b99OfX09FRUV5gtqZGQkEyZMEM+zfiY8PJzq6mr69u3L4cOHL/l/P31+9fM5awEBAbz00kscPnyYrVu3EhUVxe7duykrK6O8vBx/f39za+2xxx7Dw8MDSZJobm6mpKSErKws7r//flxdXdmyZQvZ2dlERkZSWVmJn58fmzZtIjc3l/LychQKBePHjwcgISGByZMnU11djVarZdKkSeTn55sTmyzLnDp16porFdgCWZY5ceIERqORiooK6uvrGT16NCdPnmTkyJGXVJ05duwY48aNY8CAAUiSRHl5+SXbAcw3tLt27cLPz4+mpibz7//8O/3T1ysqKti2bRt1dXXU1taSkpJCQkICUVFRuLq6kpCQgMlk4vTp0xw6dMjcndzWA3PhwgVeeOEFhg4dyk033YQkSYwfP57Ro0cjSRLnz583XzOqqqoYOnQodXV19OrVi/Hjx1/zsPOrpVQqr3nSvGhp/Q6dTkdhYSHOzs6oVCpKSkpoamrCw8OD7Oxsc9NclmU8PDzIzMxEp9MhyzJ2dnbmJNN299o2mkuhUCBJErIso1arqaqqorKy8pLnRo6OjualqQsKCnBzc+Phhx8mKCjI/EVo24csy0iShI+PD8nJybz44ou88cYb9O7dm9DQUNLS0ujZsydDhw7FZDKhUCguWYG2tLQUpVLJI488Qnh4OLIs4+rqSllZGbW1tSJp/Yybmxu33367uRWkUCgwGAxUV1dfcger0Wioq6szJzKFQoG9vT2urq7U1tZSWVlJeXk5jzzyCP379zd/jhqNxvxsqe2zDggI4M4772Tjxo1oNBqKiopobW0lMzMTPz8/tFotWVlZPPTQQ4waNcocgyRJqNVq874NBgPl5eWUlJTQ3NyMi4sLzs7O2Nvb4+DgQHBwcCceyc7X0tJCUVERzs7OaDQa8vLyaGlpwdPT85LvryzLeHl5cfbs2Uu+0zU1NVRVVZkHMfz0O91WYUSj0VBRUUFFRcUlRQscHBzM3+m8vDz8/Px48MEH8ff3N7/np0uXtH2nU1JS+Oc//8kbb7zB2LFjKSwsxGQyMXPmTPON6U+/07Ism//Gv//97wQHB//iO11aWtqJR719iaT1O9rmR82bN4+bbrqJQYMGcf78eZKTk3F0dOTxxx/n5ZdfprW1lbFjxyJJEo8++iivvfYaQUFBNDU18dlnn+Hv749Go8Hd3R0HBwe8vb1xc3PDwcGB3r17U1hYyLfffou/v795Rc9BgwZx4sQJ3n33XQIDA6mrq+O9994jKCjIPBw6Pj6ezMxMXnzxRUpKSujfvz9OTk48+eSTPPnkkxQUFDB48GC8vb154YUXUKlUBAcHmy9UbUJDQzGZTLz55pv4+fnh5OREdHQ0Tk5OvP766wQEBLTLEOyuwM3NzXzsNBoNnp6eODs7ExwczBtvvIGHh4d5ikJERAQajYYXXnjBfKH561//yurVq7nxxhsJCAggKCiIV155BTs7O9zc3FCpVMyZM4e1a9fy0EMPsW/fPhQKBYGBgcTGxpKSksLhw4cJCwtjwIABaDQakpKS8PT0JD4+nldffRWtVmte2botRrhY43Dq1Kl88cUXnDx5El9fX1QqFfPnz2flypXs3buX6667zjIHtpM0Njbi5ubG/Pnzufnmm0lMTOT8+fOMGzcOWZZ59NFHef311zEYDEyePBmtVssjjzzCO++8Q0REBBUVFXz11Vf4+/ujVqsv+U67u7uj0WhITEwkPT2d1atX4+fnZ552MGTIEA4ePMgHH3xAWFgYFy5c4OOPPyYkJAR7e3vgYq/J8ePH+ec//0lFRQVDhgwB4NFHH+Xpp58GICUlhbfeeovDhw8zdOhQ4OJ52XbeSZJEz549aWxs5N133yUoKAhHR0d69+6NLMu8/fbbBAYGtnu1i84iWftIoaSkJPnIkSMcOHCAVatW8a9//atd5540NTXx2GOPkZKSYpWTi61d2+Tiv//97yQmJnaJMk6bN29mxYoVFptcbK3aJhcvWrTIqiti1NTU8Pe//5358+db5eRia9c2ufj1118nPDy8XbedlpbGv/71L7788ss/fK8kSamyLP/iRBMtLUEQBMFmiKQlCIIg2AyRtARBEASbIYa8c3GYctscLOHKGAyGa16J1NrIskxVVVWnldFqGzVo7XQ6ncUmpV4pk8lEbm4u9fX1lg7F5rS0tFj1cev2SUulUjFo0CCOHj1Kfn6+pcP5hVOnTuHm5kZoaKilQ/lNISEhl1TwsHWBgYH4+Piwe/fuDt/XiRMnqK2tZdiwYajV6sv+vbbqBs7OzsTExHRa0ouMjMTb27tT9nW11Go1gwYNIi0tzWpvBrRaLWfOnCE+Pt4qi+fGxMTg4uJi6TB+VbdPWnZ2dsybN4/Zs2dbOpRfkGWZZ599lnHjxl0y98YadZUFIOFiWZ2OXG5FlmX0ej2bN2+mtLSUl19+md69e1/RqFhZljl06BAffPABCxYsICoqqlOOvyRJ5uHZ1srR0ZGFCxe228rPHaG8vJzXXnuNe+65p91H6LUXa11dotsnLWv+Era2ttLQ0EDPnj0vmVcldCyVStWhx7u5uZkvvviC48eP88orrxAWFnZVCWfs2LHIssybb77J008/TXR0dJe5cbgW1vydbmM0GrGzs0OhUIjv9hUSAzGsWFNTE7IsW20zXbgysiyj1Wp59913yc7O5umnn77qhAUXL84pKSmMHz+eN954g4qKim5Rob0rsLe3R6VS0djYKD6zKySSlhVraGhArVZjZ2dn6VCEayTLMufPnzcvcfPSSy8RHh5+zS0jhULB3Llz6du3L6+88kqXGxTTVanVajw9Pblw4YKlQ7E5ImlZMa1Wi0ajEUmrCygpKeGll17C09OThx9+GEdHx3bpymvrCluwYAGurq68++67YikZGyBJEt7e3pfUJhQuj0haVqyxsVEkLRsnyzKnT5/m6aefZujQofzlL3/Bzc2t3Z89OTk58Ze//IXGxka+/PLLa16zSOh4Pj4+l1SOFy6PSFpWTCQt22YymThw4ACvv/46M2bMYN68eR1aeNjDw4MHH3yQU6dOsXLlSvR6fYftS7h2ImldHZG0rJQsy9TV1eHk5CSSlg1qWxb93Xff5Y477mDq1KnmCv4dRZIkAgICeOyxx9i4caN55VrBOrV1D4qBGFdGJC0rJcsy5eXleHt7o1CIj8mWtLS0sHz5cr799lueeuophg8f3qk3HuHh4TzyyCMsWbLkksVIBevi6+tLVVWVqMRzhcTV0EqZTCbq6upwd3cXc29sRNuQ9s8++4ydO3fy9NNPExcX1+mfnyRJxMXFsWjRIt5//33Onj0r7uatUNuzzZqaGkuHYlNE0rJSsixTU1NjXsBPsH51dXW89tpr5Ofn849//KPTqlT8GoVCwfDhw7nlllt49dVXyc7OFonLyqhUKvz8/MyrIAuXRyQtK2UymaitrcXDw8PSoQh/oG1585dffhmVSsUTTzxhFYsPKhQKUlJSGDlyJG+++aYYXm2FAgICRNK6QiJpWam2gRgiaVk3WZYpLi7m//7v/wgKCuKBBx6wqtaxWq1m7ty5REVF8eqrr1JTUyNaXFbE399fTDC+QiJpWSmdTkdjY6NVXQCFSxmNRo4dO8bTTz/NpEmTuPvuu62yYrejoyP33nsv9vb2vP322zQ1NVk6JOFHQUFBlJaWWjoMmyKSlpWqqKjAwcHBKi+CwsUW1v79+3n99deZPXs2119/PXZ2dlY7aEatVnPfffdRVVXF119/LUasWQk/Pz/KyspE6/cKiKRlpUpKSggMDBTD3a1QS0sL69at4/333+fBBx/kuuuus/rPSZIkfH19+b//+z9SU1P55ptv0Ol0lg6r2wsKCqK6ulpUMLkC1/xNkyRJKUnScUmS1v34s6ckSVskScr68d8eP3nvE5IkZUuSlCFJ0sRr3XdXVlpaahUP84VL6fV6vv76a1avXs0zzzzDwIEDUSqVVtvC+ilJkvDw8OCRRx5h+/bt7NixQ9zhW1hbDUpRL/Lytcft4QPA2Z/8/DiwTZblKGDbjz8jSVJvYD4QB0wC3pck6fJXvetmSktLCQwMtHQYwo9kWaaxsZF3332XU6dO8dRTTxEbG2vpsK6YJElERERw77338sUXX3Dy5EnRVWhBKpUKBwcHGhsbLR2KzbimpCVJUjBwHfDpT16eAfz3x//+LzDzJ69/K8tyqyzLeUA2MOha9t+VyLJMfX09dXV1mEwmSkpKCAoKsnRYAv+rTvLSSy9RWVnZbsuKWIpCoaB///7cf//9vPrqq5w6dUq0uCxEpVLh6OhIfX09LS0tVFZWWvWKy9bgWouhvQU8Cvx0lUI/WZbPA8iyfF6SJN8fXw8CflpTpvjH14QfrV69mm+//RYnJyfOnTuHLMuUlJQQHh5OUlISarXa0iF2eW3rXvn5+aFUKpFlmcrKSl544QWCgoK46667cHJystmE1UaSJAYOHMjUqVN59913efnll/H29qa6upr9+/eTkpIiBgF1kLZ5fSdOnCA3N5cjR45w9uxZVCoVvr6+/Otf/8Lb29vSYVqtq05akiRNBcplWU6VJGn05fzKr7z2q7d3kiTdBdwF0KNHj6sN0aa0rYt08OBBc1mXs2fPotFouO6661iyZImFI+weqqurWbBgAdOmTeP2228nKyuLt99+m5EjR3LTTTehUqlsPmG1UalU3HDDDej1ep5++mkWLlzISy+9xNmzZ1m8eDEDBw7sMn+rtcnMzOT222+nvr4evV6PLMsoFArmz58vCmT/gWtpaQ0HpkuSNAWwB1wlSfoaKJMkKeDHVlYA0FZ7vxgI+cnvBwO/OkFBluWPgY8BkpKSuk2/RZ8+ffDw8DAnLaPRiMFg4Pbbb+/wCuHCxeO9dOlSDh48yLlz56ivrycjI4MpU6Ywa9asLnkxUalUzJ8/n4yMDG699VYKCgowGo3s3r2bAQMGoFSKx87tTZIk+vTpw+DBg1m/fr35dVmWiYmJES3cP3DVz7RkWX5CluVgWZbDuDjAYrssyzcDa4HbfnzbbcCaH/97LTBfkiSNJEnhQBRw+Koj74IiIiJwc3Mz/6xQKJg4cSKDBolHfx1NlmWys7P58ssvaWxspKioiFdffZWBAwdy/fXXd9muWYPBwMmTJ1m9ejW5ubnodDqMRiOff/45Wq3W0uF1WT4+Ptxwww2XVLxxdXUlMjJS3Cj8gY6YXPIyMF6SpCxg/I8/I8tyGrAMSAc2AvfJsiwW+/kJOzu7S7pk3N3dufHGGy9JZELHMJlMfP311xw5csQ8KKGuro4PP/yQ1NTULjnCTpZlcnNzuf/++zl37twla2/l5eWxefNmC0bXtSkUCqZPn06fPn3MrwUHB9O7d2/RJfsH2iVpybK8U5blqT/+d5Usy2NlWY768d/VP3nfi7IsR8iyHCPL8ob22HdXIkkSycnJ5q7AhIQExo4da/UTV22dLMtkZWXxxRdf/CI5FRQU8Nlnn3XZ0kd+fn4888wzjBo1Co1GY369tbWVZcuW0dzcbMHoujYPDw/+/Oc/m7/vHh4ehIeHWzgq6yeuhlZm0KBBSJKEg4MDd999Nz4+PpYOqcvTarW88sor5mrbCoWCkJAQpk2bxn//+19ef/31LvmcQZIk3NzcmDx5Mt999x3vv/8+w4cPR61WI8syqampHDp0SAyH70DTpk1jwIABKBQKevXqhaurq6VDsnrd/um+tX0h3dzciIyMxN7enokTLxYNsYYYr6TLwhrivRJ79+5l8+bNKBQKIiIiuOGGGxg1ahSDBg265CLS3n+XNR1TV1dXFi5cyKhRo/j666/5/PPPKS4uZvv27QwbNswmBqFcabeaNZynrq6u3HLLLaSlpTF06FDAOuJqY41dld0+aen1elauXMmpU6es4gGoTqdDpVLh4+PDK6+8YtGTxmAwEBMTw6233npFx6a+vp7FixdTXFxs9V2ber2e7du3YzKZGDNmDD169ECn07Fr1y527drV7vvT6XSMHTuWCRMmXNHvlZSU8PXXX9PQ0NDh54ROp2P06NEcOXLEvE9rbmm2tLSwcOHCK34elJaWxtKlSy2eJKqqqnB3d+fgwYPk5uZaNBa4mDR1Oh2PP/44Xl5elg7nF7p90jIYDOzduxcPDw+rKJskyzLXX389jo6OODs7WzSWnJwcDh48yM0333xFSauxsZH9+/cTFxdn9YNI9Ho9U6dOxdPTs1NuWo4cOcKZM2cYP378FV1gKyoqOHr0KMOGDcPe3r4DI7woLCyMgQMHUlFRgYuLC46Ojh2+z6u1fv16CgsL6d279xX9Xl5eHufOnWPkyJEWvWENDAzE2dmZHj16WMWNc2trKytXrqS+vl4kLWulUCgYMGAAUVFRlg4FWZYxmUxIkmTxVoqLiwtHjx69qt91dHRk8ODB+Pr6/vGbu5GGhoarnnPn6enJ8OHDO7XV09YKscZuojZnzpy56t8NDg5mxIgRFu/+HDZsmNUUXm5qamLPnj2WDuM3WXffjY0wGo2kpaWxdetWqqqqrmlbkiRRVVVFcXFxu3Zb6PV6srOzu+wouN/T3NxMRkYGer2+3bYpyzKFhYVUVFS02zatkSRJv3ohbWxsJCsrq13r5MmyTEFBwTV/hzpL25SBjRs3UlJSck3bUqlUNDQ0kJ2d3a7H1GQykZeXZy5Y0BWIltbv0Gq1LF26lEGDBuHr68vy5cuZPn06ISEhv3ivwWBg+/btODs74+npyalTp1i+fDn29vYMHjyYkSNHXjKk+PekpaVRUVFBUFBQu3UXNDU1sXXrViZNmsT58+fZuXMnCQkJTJw4sdO7JHbv3s3WrVtxcHAgOTmZoUOH/mEMsixz5MgR1q5di4ODA8OGDTOPdPsjNTU1bNiwgYULF7Zrd+XBgwfx9/fH09OTffv2sXfvXgYOHEhKSkqnH9Nt27axc+dOnJycGDNmDAMHDvzDlrrJZGL//v1s3LgRR0dHRowYwdChQy+r1VFRUcGmTZtYsGBBu3Vjm0wm9u7dS0REBK6uruzdu5cDBw4wbNgwkpOTO/WYVlRUsHz5cmbMmIHBYGDNmjXccsstl0wGhos3rIcOHcJkMhEUFITJZGLXrl1s27YNJycnRo4cyeDBgy+rdV1SUsKuXbu47bbb2q0CTlt1kz59+uDk5MSuXbs4evQoycnJDBs2zOK9OVdDJK3fodPpyM7OprGxkcGDB7Nz505Gjx5NRUUFjo6O2NnZUV1dja+vLwkJCRw/ftz8u6WlpYSFhZGSksLixYsJDw/Hz8+PoqIiFAoFPj4+eHp6AhcfxFZUVKBUKgkLCzNvo6Wlhbq6OnQ6HSEhISiVSlpbW6mrq8PZ2ZmCggJkWcbb2xtvb28kSaKsrAy9Xk99fT3+/v64urpy/vx5qqqqMBgMNDQ0sGbNGubOncsPP/xAQEAA/fr169TjmpOTw6BBgwgPD+fDDz8kPj4eBwcHSkpKMBgM5meLLS0teHl5IUkStbW1nD59mtjYWAYOHMg333xDREQE7u7u5gEffn5+uLu7I8uy+Zja2dmZbxbaqud7eHhQVVVFUFAQCoWClpYW6uvrcXJyoqCgAEmSzJ+PJEmcP38eo9FIfX09AQEB6HQ6amtrzXOYqqur2bhxIzfccAPff/89/v7+l0wa7QwZGRmMGjUKX19fPvroI+Lj41EqlZSUlGAymfD39zfHr1Ao0Gq1NDY2kpubS//+/YmJiWH58uXExMSg0WgoKSlBpVLh5+eHm5sbsixTUVFBVVXVJTcKRqORkpISPD09qaysNB/T5uZmGhsbcXBwoLCw0LwIpYeHB3V1dTQ0NKDVanFzc8PX15fy8nLq6urMiyGWlZWxfft2Zs2aZT6mvXr1+tW/vSMGUmi1WnJzc9m+fTve3t7s37/fPHjGxcWF6upq3N3diYqKIjEx0Xzxb5vzN2LECAICAli5ciW9e/c2F2JWqVTm72V9fT0XLlxAluVLaqwaDAbzMa2qqjIvBtvU1ER9fT21tbUYjUbc3d3x9/dHoVBQXV1t/kw9PDzw8fGhrKzMfP0AKCoqYv/+/UybNo1169YREBBAREREux+7jiaS1h8ICAigoqKCzMxM81Iha9eupX///vj6+rJs2TIWLVr0iwfVsixz9OhR6urqUCqVuLi4UFtby4EDB2hubkaj0TB37ly0Wi3ffvstarUab29v88KPBoOB9evXI0kSp0+f5r777sPPz4/c3FwOHjzIxIkTOXjwIC0tLRiNRubMmYO3tzdffvklOp0OLy8vRowYQVFREWvXrjUnrwsXLuDs7ExiYiJFRUXk5uZ2etICcHZ2xtfX1zxS6fDhw6SmpiJJEj169CA2NpbTp08zf/581Go1P/zwA2fPnqW1tZXS0lI0Gg1OTk5UVlZy4MABmpqacHR0ZN68edTU1PDtt9/i6OiIt7c3SUlJyLLMwYMHKSkpMc+/WrRoET4+PmRkZHDq1ClSUlLMx1SSJObMmYOHhweffPIJGo0GNzc3+vfvz/r16/Hx8aGwsJDw8HBKS0vx9PSkb9++ZGVlUVBQ0OlJCy5eTP38/DAajej1enbs2MGZM2cwmUwEBwdTUlLCPffcg7u7O8eOHTNfMPfu3UtWVhZOTk7Y29tTVlZmPqaurq7MmzePCxcusGzZMlxcXPDx8aFv377IssyePXuoqKhgypQpfPnll9x99914eXlx5swZ88X74MGDNDc3mwv07t69m507dxIdHU18fDySJPHhhx+aj2nv3r0pKSnB19eXPn36cObMGYqLi38zaXXUc6Dw8HAyMzNRKBR4enpy9OhRc0t25cqVTJs2jeDg4F/93Z07d+Lm5oa7uztqtZqCggL2799PU1MTnp6eTJo0iZUrV5pvzNzd3YGL140dO3ZQW1vLhAkT+Oqrr7j77rvx8PAwV4U3mUw0NTWh1WqZMWMGPXv2ZPPmzaSmphIREUFCQgJ6vZ7PPvsMb29vCgsLGTBggHm5o/j4eI4fP8758+dF0uqKFAoF/fr1o6Cg4IorzkdHRzNixAi2bt1KXV0dGo2GqqoqsrKy0Ov1TJo0iezsbDw9Pbnxxhsv6T7csWMH3t7ePProowAcOHCA3NxcEhMT8fLyQqVSUVdXR2ZmJvX19YwcORJvb29kWWbKlCkkJiYiyzKrVq1iwIABJCUlmROaXq9n+fLlKJVKi63d89FHH+Hj40P//v2xt7cnLS2NmTNn4uLiwscff8zQoUPR6XRs376dpqYmsrOz6dWrFzqdjv79+7N161YaGhrMa13l5ORgMpmYOnUqZ86cITg4mDlz5mBnZ0dpaSnp6emcOHGCF154AV9fXyIjIzl48CDZ2dnExsbi5+eHJEnU1NSQlZVFU1MTKSkpeHh4IMsyM2bMICYmhpMnT+Lp6cldd93F559/Dlxskbe2trJ06VIUCoXFjuk777yDl5cXw4cPR6FQcObMGebNm4dSqeT9998nKiqKgwcPkpWVhaenJ7169SItLY0+ffoQHR3Ntm3baGpqQpZlysrKyMnJQaFQMGPGDE6cOEFUVBTTp09HpVKRl5fHqVOnOHHiBM8//zze3t6Ehoaatx8WFkZwcLD5GW1WVhatra3m1sqgQYOYO3cuKpWKQ4cO4e/vz8KFC/n004tL8+n1elpaWvjmm2+QJMkix1ShUBAfH2++CbwSCQkJBAUFsX37dlpaWswtrdzcXDQaDfHx8Wi1Wu68805cXFzM596xY8c4ceIE/+///T88PT0JCgoyH1M/Pz9CQkI4e/Ys6enplJeXk5CQQM+ePZFlmeHDhzN9+nQUCgV79uwhNDSUefPm8cknnwAXj6lWq+Wbb75BoVBcUrbLltheh6YFDBo0iAceeMDcz6xWq6mtraWmpuaSFUfVajVarRZZlpEkCX9/fxISEnB0dKS8vJzDhw8TEhLCk08+SWhoKLIso1KpaG5u/sUJ1L9/f3r16sWePXuIjIxk69at2Nvbk5qaio+PD2lpadjZ2fHMM88QExNjLj+kUqnw8vJCoVAgSRIqlYqWlhZ0Oh0tLS34+PiY5wpVVFRYbHTfXXfdxVtvvcXChQvRaDRIkkRzczMtLS0oFAq8vLzQ6XQcOnSImpoaDAYDLi4uBAYGkpCQgJ2dnbmVFRMTw6OPPkpQUBCyLGNnZ4dWq73kmIaGhjJz5ky+//57mpqaiIyMZMuWLTg4OHDixAm8vb05deoUbm5uPP3000RERJiPqZ2dHZ6enigUCtRqNa2treh0OnNBWV9fX/MFuaKiwmJrIf31r3/lrbfe4uabb8bOzs58TJubm1Gr1QQEBLBlyxY0Gg05OTnm7umgoCD69etn7lbds2cP/fr14+9//zv+/v7mY9rY2HhJmauIiAimTJnC+vXraW1tJSwszHxM0/8/e/cdHlWV/3H8fadlJpn0nkAqoXdClSoIKAI2ENF117JFd91dy1q24Lp2V93V/a29UxSkdxTpvZdQEkIIIb33ZOr9/RFnliotZBLyfT0PD2Qyc+c7h5n7mXPuuecePkxQUBC7d+8mIiKCP//5z8THx+N0OtFqtWecYqDX67FYLNhsNvdEodDQUCwWC2PHjqWoqMhd6/lcy/Osbr75Zu677z40Gg06nY6amhqqqqooLXWvToeXl5f7C5RL27Zt6d27N1arldLSUtavX8/gwYN54oknCAkJQaPRYLPZ3EN3Lu3bt2fUqFGsWLECu91OdHQ03333HUajkWPHjlFTU0NeXh7Tpk1jwIAB7ve4Vqt1f+6hYV9UX19/TptarVZuvvlm97lhLZH0tC7CtfN3TUFXFIUuXbrwzTffYDabz9gxdu3alU8++YTc3FwCAwNZvHixe0eYkJCAXq9nzpw5pKWluWeydenShUOHDvHSSy8RGBjIL3/5S/cxlUGDBjF9+nRCQkIwGo0MHDiQr776isjISCwWC0uXLuWDDz7A4XC4h0hOP1jtqnXmzJns3bvXHQYxMTG8/PLLhIWFeWRoUKPRnNGuBoOBAQMGMH/+fKxWK0OGDMHHx8d9LMVsNrsnpcydO5dt27YRGBhITEwMdXV1LFy4kIMHD7q/jffo0YOjR4/ywgsvEBISwoQJE/Dz82Pw4MEsWrSIbdu2ERcX554kM3PmTMLCwtDr9Xz//fdkZ2djs9nO26axsbE4HA5ee+0193siLCyM0NBQXnnlFaKioujatavH29RoNDJo0CBmzJiB3W5n5MiRBAYGYjKZ6NGjB6dOncLX1xeNRsPMmTNZs2YNQUFBREVFERcXx8qVK/Hz83O/v13HEZ9//nnCw8MZM2YMfn5+DBs2jLlz57J7926Cg4MxGo0kJyczZ84cQkJC3L2l48ePu9v09D8ACQkJLF++nNdff919e3R0NGazmVdffZXY2NgLDg3CtRsePL09tVotbdu2ZcWKFaSnp7u/mELDZ/hf//oXxcXFTJ06FY1Gw+eff05oaCghISGEhYURHx/P8uXL8fPzcx9jbNu2Lf/617/w9vbm3nvvRVEUAgICGD58OLNnz+bgwYPu/7OePXuyaNEiEhMTWbt2Lf/+97/RarXukDq9Vmj4QvHdd9/xz3/+0317TEwMGo2G1157jXbt2pGQkHBN2u1aUzx9NvjFJCcnqzt37mTr1q3Mnz+f119/vVFnEdXW1vLMM89w4403nnOelqqq2Gw2dDodGo0Gi8Xivgig67iHoijunoLT6aSurg6tVuueNAEN3yQNBgNOp9N9m+tbu6Io2Gw29wfaaDTidDrdvTCr1eoextPr9VitVvcwYn19PdCwU3XVaLVa0ev17jez0+nEarW6v+Hq9XocDod75Q1XDeezd+9edu3axZtvvnlZ57Hk5OTwwgsvMGXKlPP25KxWKxqN5ozzUlxto6oqBoPB/ZpdOwfX3xaLxd2DvNQ29fLywm63YzAY3MGm1WrdbWWz2dyTCy7Wpq5jcHa73X0frVbrfr6LtemyZcswGo089thjlzVza+/evbz//vs89NBD5z1P60Jt6no9rveM3W5Hp9Nhs9nw8vJyf9s/X5ue/v4GzmlTh8Phbj9XgNtstjPaVFVV9//P6e8/1/MBF21TvV7v7jmez3//+18mTJjA2LFjLyvAlixZwpo1a7j//vvPeX87nU73Z871eXd9/lRVdb8W13uirq7ujPeaq01dtTscDiwWiztcXO3nahvXCeOnt6nrvqf/n7l6+k6n091Ors/KhdpUp9Od8V52va4LtWltbS2vvfYab731VqMv4Hvo0CFef/11vvrqq4veV1GU3aqqJp99u/S0foKrF+By+jGn860QoNFoztihnP1B0Gq1532cwWA443lO35m5ntP1ZjSZTD9Zw9nT6jUazTkrKGg0Go+eTHm+aeoajeaM1wbntt/5brvUNj19KMrF1S6nfwm6WJu6djhnt/PZz9fULtSmZ78e12t1/X2+ui+3Tc9320+16dlh3Rzb1PUFyMX1Xjn7PQoN9Z/+Gs9Xtys4LnbbpbTp+Wo4+3NxoTY9320tjRzTaiGae4+4OfipNpL2u7iW2kbNYRUJ0XQktFoI+WBe3E+1kbTfxbXUNmqpYSuujAwP/sjpdLbYKaDXytVcrde1hqK06ZlcxyuvhKqqOBwOadOzXE1oyfv0XA6Ho1l/EZDQomE8eOXKlT85rba5c50L4uXl1WhLs7hORL5crgO+ixYtahargzscDvdBbU87duwYw4YNu+zHabVaSktLmTt3rscXd4Xm1aZpaWlXtOyRXq8nIyODb775ptE+M1arFavVire3d4tcIgkaJuvk5+c32/pbfWgZDAamTp3K8ePHPV3KVampqWHp0qWkpKQwZswYbrjhhkbZobiWj7ocAQEBPPDAAxQUFFz18zeGxYsXExsbS48ePTxdCr1796Zbt26X/X/Ttm1bHn74YSorK69RZZdOVVU+//xzxowZ414lxpN69+59RacZdO/enQceeOCcc6WuRHFxMXPnzqW4uJg77riDjh07Ntud/qUYOHCgx843vJhWH1o6nY7k5GSSk8+ZWdmiqKrKHXfcwb59+5g+fTo7duzgrrvuomvXru4z7puKyWRi+PDhTfZ8P0VVVVJTU+nVqxfjx4/3dDlXLDAwkJtvvtnTZQANQ5zfffcdo0eP9sg5aY0lKiqK22+//Yofb7fbyc3NZd26daxdu5abbrqJe+65h+jo6J+coi+uTqsPrevF6Scm9u/fn1WrVjFjxgyMRiOjR49mxIgRLX6qqxDNgWvh5SVLlrB3717atm3Lyy+/TPv27SWomoCE1nXIZDIxceJEhg4dyvbt21mwYAGLFi3igQcecK+oLh8uIS6daxJMRUUFc+bMYd26dfTr14+nn36amJiYnzyhXDQuCa3rlKIoBAYGMnbsWG666SbWr1/Phx9+iLe3N7fddht9+vTB399fPmhCXITD4SAzM5OVK1eyefNmevTowZtvvkl0dPQFL5Iprh0JreuY68Ok0+m48cYb6dOnj3s5rG+++YZ7772XwYMHuydayIdPiAauKd+FhYXMmDGDAwcO0KtXL5577jk6d+7c5Bf5FP8jodVKuI55jR07liFDhnDo0CE+/fRTvv32W+666y569uxJQECABJdo9ex2OydOnGDNmjV8//33DB48mJdeeomIiIhmcbpBayeh1cooioLZbKZfv350796dDRs2sHDhQubPn8+oUaMYO3asTNgQrZLT6eTkyZMsXLiQI0eO0K5dO/eK6KL5kNBqpRRFwWQyMXr0aAYMGMDevXuZP38+ixcv5v7776dXr15NPlVeiKamqip2u53CwkLmzZvH+vXrGTlyJH/5y1+IioqSnlUzJKHVyimK4r4u0tChQ9m0aROzZ89m9uzZjB07lsGDBxMYGCjhJa47drud48ePs3TpUvbv30+PHj344IMP3CfVynu+eZLQEu4Pp6IoDB06lF69erFjxw6WLl3KvHnz+NnPfsbgwYPd1w0ToqVSVRVVVcnPz+ezzz4jNTWVwYMHM23aNOLj42WCRQsgoSXO4evry4033sjgwYM5evQoH330EXPmzOH2228nOTmZkJAQCS/R4thsNtLS0li1ahVbtmxh9OjR/PKXvyQkJETCqgWR0BLn5bqInOuclB07drB48WIWLlzIqFGjmDBhgkzYEC2Cqqqkp6fz7bffcvLkSbp168Y///lP4uLi5MtXCyShJS7KZDIxdOhQ+vTpw+HDh5k3bx5Lly5l6tSp9O3bl4CAgBa9OKi4PlmtVrKysli0aBFbtmzh1ltv5ec//znh4eFXtCq8aB7kf05cEtdU+b59+9KnTx+2b9/Ot99+y8KFCxk6dCijR48mKChIvrkKj7Pb7Rw9epTFixeTnp5Oz549+fjjjwkMDARkgkVLJ6ElLouiKGi1WgYNGkT37t3Zu3cvy5Ytc/e8hg4ditlslh2DaFKutQFPn2Bx0003ce+999KmTRs5ZnUdkdASV8xsNjN48GAGDBhAWloan3zyCQsWLOCWW25h4MCBhIeHy7ChuOYsFgtHjhxh6dKlHDhwgHHjxvG73/1Ohq2vUxJa4qooioJer6dLly68+uqr7Nu3j0WLFrF48WLGjBnDxIkTMZlM7vsK0RhcawOmpqYyc+ZMcnJyGDRoEPfffz9t27aV99p1TEJLNBqj0Uj//v3p0aMHGRkZfP311yxZsoTJkyczaNAggoODZZhGXLX6+nqOHTvG8uXL2b17N3feeSe/+93vZOp6KyGhJRqVa3moLl268Pe//519+/Yxd+5cVqxYQb9+/Zg4cWKTXMa7vLyc8vJy978LCwvJzMxEq9USGhqK0Wi85jVcbwoLC6mtrcXpdFJdXU1ubi5msxkvLy9CQkKu+ZJHdrudAwcOsGjRInJycujbty8ffvihe4KFaB0ktMQ1o9PpSE5OpkuXLhw8eJAVK1bw29/+lkmTJjFy5Ej8/f3Pe8zBNfRzNUM8O3fu5M9//jMajYaCggJWrFjBhx9+SEJCAq+++irx8fFXvO3W6ttvv+Xzzz9Ho9GQnZ1NamoqBoOBgQMH8uKLL+Lv739F2/2p/29VVbFarWRnZ/Pll1+SkZHBrbfeyq9//WvCw8OlZ9UKSWiJa85kMrmnyh8/fpzPP/+clStXMmLECIYOHUqbNm3O2GEVFxdTU1NDXFzcFT9nz549OXXqFAUFBe7bFEWhc+fOREVFXc3LabX69+/P008/TW1tLQB5eXl4eXlx++234+fnd8XbtVgsnDhxgqSkpDPOn6qrqyMlJYVFixZx4sQJxo0bx9NPP42Pjw8gx0hbK5laI5qEa6p8+/bteeGFF/jNb35DWloazz77LDNmzKCystK9Ltynn37Kn/70J9LT093fwi9XQEAAo0ePPuObuI+PD2PGjJGVPK5Qhw4d6Nix4xlh4efnx7hx464oQFzT1D/++GOeeuopMjMzUVUVp9PJwYMHmTZtGu+99x6dOnXijTfe4J577nGfTiGB1XpJT0s0OYPBQJ8+fejWrRvZ2dlMnz6dX/3qV9x2221ERkYyd+5c9u/fT3V1Ne+++y5JSUmX/Rw6nY477riDb7/9FofDAUBgYCBjx45t7JfTaphMJiZNmsT+/fvdbdqlSxe6dOly2dtSVZX6+no+/fRTXn75ZUpKSliwYAFDhw5l5cqVHD16lHvuuYfBgwfL1HVxBnknCI9wrW2YmJjIX//6V5599llSUlJ49tlnOXjwIHa7nVWrVvHMM8+QlZV1Rdvv1KkT3bp1A0Cj0TBu3LirGsZq7bRaLQMHDiQiIgJoaNN77rnnio4rqarKl19+ycsvv0xBQQEOh4P//Oc/fPDBB7Rr144PPviA8ePHExQUJIElziDvBuFxOp2Onj178uCDD5KdnY3VagUadmxLly7l6aefdg8dXY64uDj69u2LRqNBr9czYcIE2QFeBdcxwZ49ewIQHh7OoEGDLmsbrokVM2fO5KWXXiI/P999e0FBAQkJCUyZMgV/f38ZAhTnJZ9g0WzMnj2b3NzcM26z2WzMmTOHxx9//JzfXYzBYGDs2LEEBgbSr18/Onfu3JjltkohISEMGTIEo9HI6NGjL/tEXrvdzueff85zzz1HTk7OGb+zWq0sXLjwinrWovWQY1qi0bkuYX45PSObzUZNTQ39+/cnKyuLgoICnE4nBoMBu93O0qVLCQwM5B//+AdhYWGXvN3+/fsTHBxMz549CQ4OxmazXclLalRarRaNRnNZO3un04nD4bjiiSmNacyYMbz77rv06tULo9Ho7hlfigULFvDCCy+Qn5+PwWBwvy6z2UxUVBRJSUnk5+cTHR19WTXpdDqZoNFKSGiJRpefn88f//hHsrKyLusSEDabDbvdTlhY2DknIKuqyr59+5g8efJlH0NRFIWNGzdy8803X9bjroXy8nIeeOAB/vjHP17WDjYlJYXHH3+c+vp6jw9xqqqK0Whk1qxZzJ0797IeZ7PZCA0NJTQ01P36VVVFo9Gg0WjIyMjgqaeeuqzXWFBQwH//+19GjRp12a9FtDwSWqLROZ1OfH19+ctf/tLoq184HI7L7qWUlpbi5eXlPr/Hk1atWnVFoeNwOIiLi2Pq1Kkefx2qqpKXl0dERMRlvRan03lNekOffPIJdru9Ubcpmi8JLXFNaDQavL29MZvNTfacqqqes0NUVbVZnYxqNBqvuA6tVovZbL5moXW+9rvQ/ZKSkppFe0LDsUvReshEDNGsVFRUnHEeEDQMqR04cACn0/mTjz3fTlRRFGpra9m/fz8Wi6XR6lRVlWPHjp0zmaA5Orv9LBYLBw8epKam5oz7XWoIKYpCVVUVBw4caNRjhKqqkpqa6p5ReDmPE62H9LSER6xevZqMjAzuv//+MxavLS4uZv369XTq1Ml97KqwsJD169dTWlrKvHnz3NfxGjVq1CWtblFRUcG6deuIj49v1NUw9u7dS0REBOHh4axevZoNGzbQv39/xo0b55HLuS9fvpyCggKmTp16xut0tZ/rJODa2lo2bdpEUFAQW7duZfHixZjNZoYOHcqNN954ST2XkpISNmzYQEJCQqMtlOt0Otm1axeJiYkEBQXx3XffsWXLFgYPHszo0aM90qai+ZF3gWhydrudtWvXEhgYyKlTp0hKSqK6upq0tDQKCwvdvayqqipSU1MpKirCbrdTUFDA0KFD6d+/P5999hldu3bFx8eH1NRUtFotcXFx7pmFOTk5nDp1CoPBQGhoKNBwXCgtLY2IiAhycnJo3749Wq2W6upqSkpKCAgI4NChQwC0bdvWPYPt+PHjOBwOioqKaNeuHXV1dRQWFlJVVUVERATFxcVs2LCBX/ziF3z77bfExMTQq1cvj7RpREQEeXl5xMXFndF+TqfTvcZfUVERVqsVVVXJzc1lzJgxdOzYkS+//JKePXuiKArHjh1Dr9e721RVVbKyssjJycFoNLpP0rbb7aSlpREZGUlOTg5JSUlotVqqqqooKyvD19eXw4cPo9FoiImJITIykqKiIkpKSigvLyc0NJS4uDhOnjxJUVHRGesa7tixg1/84hfMnj2bmJgYunbtet7X3lyGKUXTkOFB0eRycnIwm80kJCSQlZWFw+Fg5cqVbNy4kUOHDlFWVobD4WDJkiVs27aNlJQUKisrAVixYgUfffQR3t7e+Pn5UV9fT35+PkePHmXx4sVUV1dz8uRJZs6cSUZGBsXFxTidTpxOJ+vWrWPr1q1YrVYWLFhAcXExqqqSnp7Oxo0bqa+vp6CggIyMDBYvXuze2c+aNYslS5aQk5NDbm4un332GUePHuXw4cNAw2zJ4OBgEhMTad++PdnZ2U3epllZWQQHBxMdHU1ubi52u/2M9quoqGDv3r0sWrSI1NTUM2pcvHgxn376Kf7+/vj4+FBXV0dBQQGHDh1iyZIl1NXVcfz4cb7++msyMzMpKSlxrxO5evVqduzYgcViYcGCBZSWlqKqKkeOHGHLli3uNk1PT2fx4sWUlpaydetWPvzwQzIzM6msrKSwsJAvv/yS1NRUjh49CjSEVnh4OAkJCcTHx5OXl9fkbSqaJwkt0aRUVeXAgQMEBATgdDpJTU2lvLycrKwsJkyYwJgxY/Dz86O6uprs7GzuuOMObrrpJveEjmHDhnHfffdhtVopKSmhsrKSgwcPsnHjRrZt20ZtbS2HDx8mMTGRKVOmMHbsWLy8vDh06BCLFy9mxIgRBAcH0759ezZs2MArr7xCdnY2ERER1NfXc/jwYTZu3Mj27dspKytz133HHXdw9913Aw0rQdx777107NgRaOht1NXV8c0332C32y967O1atOmePXsICQlBVVUOHz5MRUUFp06dcrefyWQiIyOD/v37c9ttt7mXYgIYOXIk9957L9XV1ZSXl7uPK27atImtW7dSX1/PgQMH6Nq1K1OmTGH06NHo9Xr27dvHihUrGDFiBEFBQcTHx7NhwwZeffVVTp06Rdu2bamtreXQoUPu/x/Xl48hQ4Zw991306dPH7Kysmjbti1Tp051rzPpcDioqalh1qxZ7kV0hQAJLdHELBYLGRkZ7h7NwYMH3cOBlZWVVFRUUF9fj06nQ1VVKioqKC8vd0+i8Pf3JyYmBo1GQ0VFBbt376ZHjx786U9/cu+ITSYTxcXFZ0wSSExMZPLkySxcuJDq6moSEhJYt24d4eHh7N+/n+DgYA4fPkx4eDh/+ctfiIuLcx/g1+l07tXFvb29qayspLa2lpKSEqAhxCwWC6NGjSI/P/+yTn5uDHV1dWRkZJCXl0dWVhZ79uxx94QqKyspLy/HarWi1+upqKigpqbGHR7QsCJ+bGwsTqeTyspKdu7cycCBA3niiSfcr8VkMlFYWHjG1PIOHTpw++23s3jxYmpra4mJiXG36eHDhwkMDOTAgQPEx8fz3HPP0bZtW1RVdc+CdB2zNJlMVFZWUlNT427TsLAw6uvrGTNmDAUFBU1y4VDRMsgxLdGkXDvLRx55hJCQEN555x1KSkoYNGgQ33zzDVqtFn9/f7y9vbnhhhv44osv0Ov1BAYGotfrmT17NuvXryc2Npa4uDjsdjuzZ89m3759eHl5oSgKvXv35vjx4+7zxCZNmkRgYCD9+/entLSUbdu20a5dO0JCQujfvz9ff/014eHhmEwmfvjhBzIzMzEaje6d6unT1GNjYzGZTLz00kv4+fmh0+kIDQ0lJiaGl156ifbt27sX6W0qZWVlGI1GHn74Yfz8/HjzzTcpLy9n8ODBfP755+j1eoKCgujVqxfffPMNO3fuxNfXF41Gg8Fg4KuvvmLFihXEx8cTHR1Nhw4dWLBgAX5+fu42HTBgALNnz+a5554jLCyM2267jcDAQAYNGsSiRYvYvXs34eHhhISE0LdvX+bOnUtISAh2u9099Ofl5eVeB/L0SRUJCQk4nU5eeeUV/P390el0REVFERoayksvvUSXLl3cvVohlOY+XTQ5OVnduXMnW7duZf78+bz++utytdJmLicnhxdeeIEpU6ac0+tw9QBcJ5m6TjgF3ENAp5+Aevptp/98+gnGpw8duU52dQ0pKYqCRqNxP+fpV8l1Op1oNBr33xfaluv3rsefvm3XH9dtrvtdaHLAsmXLMBqNPPbYY5d1Yu7evXt5//33eeihh845T+vsNnWdgH2+Nj379Z0+9NZUbeq6/+nbPXvbpz/fxdr0vffeY/z48YwdO1YmZTRzhw4d4vXXX+err7666H0VRdmtqmry2bdLT0s0qbN3PqfvuM/3ZeTs2863oz/f407f+bl+Pv3v0x93+uMvVoOr/rPrON9tTeXsNv2p13P2zxeq+1q26dnB0hzbVDRf8o4QQrRozX20SDQuCS0hRIsmQ4KtiwwPimvC6XRSVVXVqCtQXA9qa2uvuE3sdjuVlZWyOOxZ6uvrPV2CaEISWqLReXl5ERwczLx585rFpJmCggJMJpN7FQdPqqqqYuLEiZfdOzCbzeh0OmbMmNEsjvNkZmYSERFxxhJcnlJaWkpAQICnyxBNREJLNLqAgAAef/xx6urqPF0KAO+//z6dO3dm2LBhni4FgMDAwMt+TExMDNOmTWsWF7FUVZW//vWv/OIXv3CfDOxp4eHhni5BNBEJLdHodDrdGSsueJKqqpjNZoKDg4mPj/d0OVfMy8uLNm3aeLoMoGHo12g0EhUV1aLbVLRMnh9nEEIIIS6RhJYQQogWQ0JLCCFEiyGhJYQQosWQ0BJCCNFiSGgJIYRoMSS0hBBCtBgSWkIIIVoMCS0hhBAthoSWEEKIFkNCSwghRIshoSWEEKLFkNASQgjRYkhoCSGEaDEktIQQQrQYElpCCCFaDAktIYQQLYZcuVgIcUnWrVvHoUOHcDqdHDt2jG+++YaIiAjatGnDyJEj8fX19XSJohWQ0BJCXJKMjAyee+457HY7NpuNnTt3otVqmTp1KmPGjPF0eaKVkOFBIcQluemmm9BqtdTV1WG327FYLGi1WoYNG4bRaPR0eaKVkNASQlySkJAQbrzxRjSa/+02/Pz8uOmmm1AUxYOVidZEQksIcUm8vLyYOHEiOl3DUQWNRsPYsWMJDQ31cGWiNZHQEkJcEo1GQ/fu3enYsSMAWq2WO+64Q3pZoklJaAkhLlnHjh3p1asXGo2GHj160KVLF0+XJFoZCS0hxCUzGo2MHj0aHx8fhg8fTnh4uPS0RJOSKe9CtAClpaVs3ryZmpoaT5dCRUUFvr6+WCwWFi5c6OlycDqdDBs2jKioKAnQVkBCS4gWICsriw8//JDExEQMBoOny2HQoEFoNBp2797t6VLYunUrgYGBREVFeboU0QQktIRoAVRVJSoqiqlTp+Lj4+PxWmpra/H29m4WPZvm0PsUTUdCSwhxWRRF8Xhwnk5VVU+XIJqQTMQQ4jqlqiqFhYXs2LGj0bddVlbG9u3bsVqtjbZNp9PJgQMHOHXq1GU9rjn09kTTkZ6WEC1cTU0Nn376KWlpaXTq1ImJEycSHR0NgK+vL/Hx8ed9nNPpZPny5SxYsAB/f38GDx7MuHHj8PLyuuhzlpeXs3PnTrp06dJox9hUVeXgwYMkJiYSFhbGwoUL2bZtG4MGDWLixInN4lie8DwJLSFaOIfDQW1tLX/4wx9ITU3l66+/5o9//CN5eXns37+fdu3aERwcTEZGBr6+voSGhpKamkp4eDilpaVMnjyZTp068cknnzBw4EBsNhv79u3DYDDQuXNn2rRpg6qqpKWlkZ6ejo+PD23atAHAarWyd+9e4uLiOH78OD179kSn01FWVkZhYSGBgYFs374djUZDx44diYuLIycnh7y8PIqKimjTpg2dOnXi6NGj5OTkUFVVBUBOTg6HDx/m8ccfZ8aMGRw5coQePXqc9/XL8GDrIsODQlwnvLy8SE5OpqCgAIfDgV6vp7y8nLVr1wJw7Ngxtm/fTkVFBWvWrHEHxIwZM3jrrbcICAjA19cXnU6H0WikrKyMZcuWUVdXR2pqKgsWLECj0bgnYDgcDpYtW8axY8dQFIUffviB4uJi7HY7qampHDhwAEVRMJlMWCwWVqxYQUlJCXv27OGbb75BURS8vLwoLCzk22+/pb6+nrS0NAAKCgoICwsjIiKC2NhYCgoKLvi6ZXiwdZGelhDXEUVR3D2PiIgIevfuzfr161EUhaSkJL777jsyMzPR6/WEhYUBMGnSJHr27Mnnn39OcXExhYWFbNiwgaKiIpxOJ3fffTeHDh2iZ8+ejB07Fo1Gw4kTJzhw4AAajYZp06bh5+dHQkICa9asITU1lS5dutC2bVuKi4vZuHEjxcXF1NbWcssttwAwbNgwbr75ZjQaDZs3byYuLo7x48dTUlICNPSeqqur+fbbb3E4HD/5mqWn1bpIT0uI64TNZmP//v2EhoaesRI7NIRZ27Ztqa2tZevWrcTHx7uPERmNRgIDA7Hb7dTU1HDgwAFuuOEGHnnkEQIDA4GG1dyzsrKwWCzubXbs2JHJkyezcOFCqquradOmDZs3byYhIYFDhw4REBDA0aNHSUpK4vHHHyciIgJVVdFqtRiNRneNvr6+FBcXU1lZSU5ODgDh4eHU1dUxfPhw8vLyZFFe4SY9LSFaOI1Gg0aj4Z///CdJSUlMmjQJrVbL119/zerVq6mtraWgoIBp06bRu3dvVqxYwZQpU1AUBbPZzIwZM1i2bBldu3YlJiaGmpoa5syZg7+/PyEhIWg0GgYOHEhubi7PPvssYWFhTJo0ifDwcPr27UtJSQn79u0jMjKSuLg4+vXrR25uLqGhoSiKwueff05aWhohISHo9XpMJhN6vd5df2JiIj4+Prz66qtERkbi5eVFVFQU8fHxvPbaayQnJ9OpU6cLvn4ZHmxdlObetU5OTlZ37tzJ1q1bmT9/Pq+//jpardbTZYkWQlVVXnzxRXr16sX48eM9Xc4V27t3L++//z4PPfTQOedIXepn+PShw+bAFTaXU//5/Pe//2XChAmMHTtWAqyZO3ToEK+//jpfffXVRe+rKMpuVVWTz75delpCtHCXs6Nujjv15liTaL7kmJYQokWT0GtdpKclRAuiqmqzGuJrDqQ9WhcJLSFaAEVRKC8vZ8eOHRiNRk+X06y4ZhyK1kFCS4gWIDw8nBtuuOEnT7JtSsuXL6dfv36EhIR4uhS6d+9OYmKip8sQTURCS4gWIDIykj/84Q+eLgNoWLMwLy+Pxx57jK5du3q6HNHKyEQMIYQQLYaElhBCiBZDQksIIUSLIaElhBCixZDQEkII0WJIaAkhhGgxJLSEEEK0GBJaQgghWgwJLSGEEC2GhJYQQogWQ0JLCCFEiyGhJYQQosWQ0BJCCNFiSGgJIYRoMSS0hBBCtBgSWkIIIVoMCS0hhBAthly5WFyX6urqqKurc/+7urqa0tJSNBoNPj4+6PV6D1fY8tTX12O323E6ndhsNmpra6murkaj0WA0GtFo5DuwuPYktMR1adu2bfznP/9BURTS0tLYuHEj3377LdHR0Tz11FPExsZ6usQW5+uvv2bp0qWoqsqePXvIy8vDz8+PXr168dhjjxEQEODpEkUrIKElrkuJiYmsW7eOsrIy922KonDnnXcSEhLiwcparqioKJYvX059fT0AJ0+eRKfTkZiYiK+vr4erE62F9OfFdSkiIoKhQ4eeMWRlMpm46aab8PHx8WBlLVe/fv2Iioo64zY/Pz9uvfVWtFqth6oSrY2Elrgu6XQ67rjjjjNCy8fHh1tvvdWDVbVsPj4+3HHHHWcEVHR0NP379/dgVaK1kdAS1yVFUejduzedOnUCQKPRMHr0aMLDwz1cWcul1+u58cYb8ff3Bxra9L777sPLy8vDlYnWREJLXJcURSE+Pp4+ffqgKAoajYaJEyfKDLeroCgKnTt3pnv37gCYzWaGDx/u2aJEqyOfYHHd8vb2ZtSoUfj5+dGtWzd69uzp6ZJavLZt2zJgwAAMBgM33ngjCQkJKIri6bJEKyKhJa5biqIwatQoQkJCGDBgANHR0bKDvUoajYZbb70Vf39/BgwYQGBgoKdLEq3MVU15VxQlAPgE6AqowINAKjAbiAMygcmqqpb9eP/ngIcAB/B7VVVXXc3zi+apuLiYt956i2PHjnl8OE5VVRwOB7t27eLnP/+5x0PLYrFw9913c88991xWLenp6bzxxhuUlZV5/DVYrVa0Wi0rVqxg9+7dHq0FoLa2lmnTptG3b1+Pt4249q72PK13gJWqqt6lKIoB8Ab+DPygqupriqI8CzwLPKMoSmdgCtAFiAJWK4rSXlVVx1XWIJoZi8VCfn4+48ePJygoyNPlcPPNN2M0GpvFuUTr16+noKAAVVUvawdbVVVFXV0dkyZNwmQyXcMKL05VVUaMGEFMTAw6nedP9Zw1axYlJSWeLkM0kSt+xymK4gcMBX4BoKqqFbAqijIRGP7j3b4E1gHPABOBb1RVtQAnFEVJB/oBW6+0BtF86fV62rZtS1hYmEfrUFWVuLg4gGbxLfzw4cNX3Ps0mUzExsZ6/DwzV5t6uhftIitxtC5X865LAIqAzxVF2asoyieKovgA4aqq5gH8+LdrrxUNnDrt8dk/3nYORVF+pSjKLkVRdhUVFV1FiaIlqampISMjo1G3qSgKFouFzMxMbDZbo21XVVVyc3NbxDf86upqMjMzUVUVAJvNxsmTJ90rW1wuRVGor68nMzMTu93eaHWqqkpOTs4Zq5hcaj2i9bia0NIBvYH3VVXtBdTQMBR4Ied7Z6nnu6Oqqh+pqpqsqmpyaGjoVZQomhuLxcLixYt57bXXmDdvHmVlZe6dqcPhoKqq6ryPc6139+KLL/Lmm2+ycePGSw6h0tJSFi1aRG1tbaO9DoBNmzZx6NAhHA4HW7Zs4a233mLt2rU4nc5GfZ7LqWfhwoXntEtubi5Llixx11VdXc3y5cspKSlh27ZtvPjii7z11lts2bLlkkOooKCApUuXXnHwnY/T6WTdunWkpqZit9vZuHEjb731Fps2bfJYm4rm52oGpLOBbFVVt//481waQqtAUZRIVVXzFEWJBApPu3/b0x7fBsi9iucXLZDdbictLY2BAweSk5PDzJkz+dWvfoXVaqW4uJjo6IbOt8PhoKysjICAADQaDQUFBRw/fpyYmBj69OnD/PnziYuLIzAwkIKCAjQaDcHBwe7jVuXl5ZSVlaHT6dzfxJ1OJ4WFhfj5+VFeXk5YWBgajQaLxUJNTQ0mk4m8vDwAAgMD8ff3R1EUiouLcTgcVFdXExoais1mo6qqCovFAkBZWRnLli1jwoQJrFq1ivDwcDp37tyk7epwOFi2bBn+/v4kJyfTpk0b6uvrKSwspKioCFVVsdvtFBYWUlJSgsPhQFVV0tLS6NChA+3bt2fRokUkJibi7e1NYWEhWq2W4OBgzGaz+3WWl5ej0+ncIeJwONxtWlFRQWhoKBqNhvr6emprazEajeTn56MoCoGBgfj5+VFdXU1NTQ11dXWYzWaCg4MpLS2lqqoKq9UKNITi999/z7hx41i1ahVhYWG0b9++SdtUNE9XHFqqquYrinJKUZQOqqqmAiOBwz/++Tnw2o9/L/rxIYuBWYqivE3DRIwkYMfVFC9aJo1GQ2RkJB06dOCdd97BarWSm5vL/PnzMRgMPPHEE1gsFr766ivuuusutFotX3zxBYmJiezfv5+KigoMBgMmk4n8/HxWrVpFfX09gYGBTJ48mfLycmbPno2iKISEhDBo0CBUVWXnzp1kZmYyfvx4PvnkEx555BFCQkJITU1l3759jBgxwr0to9HIpEmTCAgI4P3330ev12M0Gunfvz+rVq3Cz8+PgoICYmNjyc3NJSAggOTkZDIzM8nIyGjy0MrNzcXb25uYmBiys7OJjIxkxYoVHDlyBLvdjkaj4eDBgyxbtgyTyURpaan7sXv27CEnJweTyYTRaCQ7O5sffviB+vp6QkNDmTx5MoWFhcyePRu9Xk9YWBh9+vRBVVW2bt1KTk4O48aN45NPPuHRRx8lKCiIlJQU0tLSGDRoECtXrqS+vh6z2cykSZPYsGEDa9euJS4ujm7dutG5c2c++OADd5t26tSJnJwcQkND6dOnD2lpaWRlZUloCeDqZw8+Bsz8ceZgBvAADUOOcxRFeQjIAiYBqKp6SFGUOTSEmh34rcwcbN30ej12ux1VVWnfvj2TJk1i8eLFQMOkg6SkJA4ePIhOp6Nbt25YLBZiY2Pp1q0bmzdvpra2Fp1OR11dHWlpaTgcDsaNG0dKSgrh4eFMnjwZg8FAbm4uR44cYf/+/fzjH/8gIiKCxMREdu7cyfHjx2nfvj3h4eEYDAZsNhvHjh2jurqaG2+8kYCAAJxOJ7feeitdunThwIED+Pv788gjj/DZZ58BDUOeNpuNOXPmoNFoGvU4z6VQVZX9+/fTrl07fH19ycjIoEOHDqSlpXHvvfdSUFDA2rVrSU1NpX///nTr1o3p06e7Hx8XF0e7du3YunUrdXV1GAwGampqOHbsGEePHmXChAns3buX+Ph4brvtNvR6PSdOnODgwYPs27ePF198kdDQUGJjY9m5cyfp6em0bduW6Oho9Ho9VquVY8eOUVdXx0033YSqqvTp04cpU6ag1+vZsWMHYWFhPPDAA3z88cfuNq2vr3d/+WjqNhXN11VN/1FVdd+Px566q6p6m6qqZaqqlqiqOlJV1aQf/y497f4vq6qaqKpqB1VVV1x9+aKlcjqdnDhxArPZfN4VwhVFYdCgQWzcuNEdLIqiEBsby8CBAzEYDBQWFrJp0ybi4+N56qmniI6ORlVVNBoNVqvVfawMGlZyGDduHCtWrKCuro6kpCRWrVqFoijs37+fkJAQ9u7di8lk4rnnniMpKck9BObqXWg0GnQ6HXa7HYfD4R7KCg0NxWKxcNNNN1FcXExwcHDTNOKPLBYLR44cYevWraxdu5ZNmza5d/J2ux2bzeaeYu+q/fQQSEhI4IYbbkBVVYqLi1m3bh1du3bliSeeIDIyElVV0Wq157RpXFwco0ePZuXKlVitVuLi4txtevjwYYKDg9m5cydBQUE888wzxMfH43Q60Wq1hIaGuqfLa7Xa87ap1WplzJgxlJSUyEnMws3zJ1mIVqe2tpYPPviAkJAQRowYgcFgYMWKFaxatYpTp07hdDr5/e9/T1BQEGazmdLSUiIjI0lJSWHBggXs3r0bk8lEmzZtqK6uZtmyZRw5csQ9KaBHjx4cOXKEV155hZCQEMaOHYufnx/Dhw9n/vz5bNu2jZiYGLRaLQMGDGDGjBmEhYWh0+lYvXo1+fn51NTUuOs9fXZabGwsFouFf/7zn1itVhRFISwsjMDAQN544w2Cg4Pp0qVLk7anawbj3/72NwICAnjxxReprKyke/fufP755yiKgslkokuXLsybN4+dO3ficDjcr2327Nls3LgRb29vIiIiaNu2LatXr2bv3r3u43bJycnMmjWLl19+mbCwMPfCuTfeeCNz5sxh9+7dBAUFodVq6du3L3PmzCE4OJja2lo2bdpEVlaW+0rSLq52jY+PZ+nSpbz55pvu0IqOjsZoNPLGG28QERFBhw4dmqo5RTOnnP7NqTlKTk5Wd+7cydatW5k/fz6vv/66XLunmcvJyeGFF15gypQp55ynpaoqNTU1WK1W93EpjUZDXV2dO3Q0Go17JfG6ujpUVcXb2xuLxUJtbS2KouDl5YXJZMLhcFBTU4OiKGi1Wkwmk3tKdn19PRqNBh8fH6xWK0aj0b1T1Ov11NfX4+XlRX19Pd7e3qiqSnV1NdBwaRMvLy+0Wi21tbXuOlVVpa6uDqvVil6vd/9xPZ+rrgtNw162bBlGo5HHHnvsss5z2rt3L++//z4PPfTQOedpORwOLBaL+5L3NTU17pXXXW3jej21tbU4nU50Oh1GoxGbzXZOm9rtdmpqatBoNJfcpq72t1gsZ7Sp0+l0fwFwPaerl+eqUVVVamtrsdls6PV6DAYDOp3O/XxGoxGj0XjBNv3vf//LhAkTGDt2rEx/b+YOHTrE66+/zldffXXR+yqKsltV1eSzb5eelmhSiqK4Z6OdztvbG29v7/Pe7uLaeZ1Op9O5A+50JpPpjJUjXENRpz/etfM/vZ7zbev0kFAU5by1nv18TUmr1Z5Rz+n1nv16zl4VRKvVntOmer3+vCfsXkqbum5ztenpX0BOf87TKYpy3hOmL7VNJahal+ZxSrsQQghxCSS0hBAtWnM/xCEalwwPimtCVVWcTqf7gL9o4HQ6r3gn61qxXtr0TBJarYuElmh0Wq0WRVGYMWPGOcdLPEFVVaqqqjAajRgMBo/WkpOTwy233HLZx2EMBgOVlZV89tlnHl1Z3TXxxWw2N5sFc48dO9Ys3meiaUhoiUYXGBjI7373u8te+PRasdvtzJgxg+7du9OzZ0+PH7h3rTp/OWJjY3nqqacaff3Ey5Wbm8uCBQu47bbbPL7a/Om6du3q6RJEE5HQEo3Oy8uL7t27e7oMN1VVSUlJISQkhGHDhnk8tK6E2Wymb9++ni6DXbt20aZNG4YOHXreWaBCXGvNo38vxDWkKAqRkZHk5ubK8Y+rVFJSgp+fH3q93tOliFZKQku0CuHh4eTn50toXQXXMk+BgYHN4orFonWS0BKtQnR0tPS0rpLD4SA3N5fo6OhmMwlDtD7yzhOtQnh4OJWVlR6fyNCS2e12ysvLCQ4ObpHHBcX1QUJLtApms5nQ0FBOnDjh6VJaLNeFOeVq4sKTJLREq6AoCu3atSM9Pd3TpbRYrp5WSEiIp0sRrZiElmg1kpKSOHbsmKfLaLFKS0tRVZWgoCBPlyJaMQkt0Wp06tTJfYVjcflcVySW6e7CkyS0RKsRHR1NdXU1FRUVni6lRcrMzCQuLk4mYQiPktASrYaXlxcBAQEUFBR4upQWKT09nXbt2nm6DNHKSWiJVkOn0xEcHExhYaGnS2lxnE4nmZmZJCQkeLoU0cpJaIlWwxVaBQUFcpLxZSorK6OmpoaoqChPlyJaOQkt0WpotVratGlDdna2TMa4TLm5uYSFhXn80i5CSGiJVkNRFMLCwigtLcVms3m6nBaloKCA8PBwmYQhPE5CS7Qq4eHhlJWVYbVaPV1Ki5KTk0NUVJSElvA4CS3RqoSFhVFeXo7FYvF0KS2G0+kkLy9PQks0CxJaolUJCgrC19eXrKwsT5fSrKmqitPpRFVV6uvrKSsrIzIy0tNlCSFXLhati0ajoWPHjqSkpJCcnOzpcpotp9PJqlWryM3NxcfHh9zcXKxWKzabTSZjCI+S0BKtiqIodOjQgQ0bNni6lGZNVVU2bdrEv/71L3x8fNDpdKSmphIcHMytt97KL3/5Swkv4REyPChana5du3L06FGZQfgTtFotXbt2xWq1UlJSQkFBATt37mTbtm1y5WLhURJaotWJiIgAID8/38OVNG9RUVH4+vqecduAAQMYOXKkXLlYeIy880Sro9VqiY+P5/jx454updlyndN2+rJN/v7+PProo4SFhXmwMtHaSWiJVkdRFOLj4+UqxhcRGhpKXFwc0NBmN9xwA6NHj5Zp78KjJLREq9SuXTsyMzNlOaefEBgYSHh4OAB+fn489thjMvlCeJyElmiV2rZtS1lZGVVVVZ4updnSarUkJibi7e3N5MmTGThwoKdLEkKmvIvWR1EU/P39UVWVyspKAgICPF0Sx48fp6amxtNlnMPLy4ugoCB69uzJyZMnPV0O0PD/16ZNGwIDAz1divAACS3RKgUEBKDRaCgtLaVt27YeP07zxBNP4HA4MBqNHq3jbPX19YSFhbF69WrWrFnj6XKAhlmfv/vd75gyZYqnSxEeIKElWiUfHx8iIiI4fvw4PXr08HQ5ADz22GNER0d7uowz2O12KisrCQoK8nQpbjNnzpRz7FoxOaYlWiWNRkNSUhJHjx71dCnNmk6na1aBJYSElmi1OnXqxOHDh+UqxpcoJSWFsrKyRtuexWLhyJEj1NXVNdo2xfVPQku0WnFxcVRUVFBaWurpUn6Sw+Fg1apV/OMf/2Dx4sWXdS2w8vJyli9fTk1NDVarlS+//JKnn36aV155hZSUlMsK7O+//568vLwreQnnVVNTw9q1a6msrKSqqorp06fz8ssvc+DAAfkiIS5IjmmJVsvb25ukpCQOHDjAjTfe6OlyLig/P58ffviBxx57jLy8PAoLCzGbzWRmZuLj44PJZKJNmzbU1dWRmZmJ3W5Hq9XSpk0b9uzZw8aNG/H29qZdu3YcPnyY3/72t2RlZTF//ny6dOnCyZMnKSwsxGQyER8fj7e3Nw6HgxMnTlBRUUFQUNAZK2MUFhZSXV2N0WgkMjISRVEoKyvDbrdjt9vJycnBYDAQGxuLn58fTqeT1NRUtFotNTU1JCYmUlBQQHFxMXa7HVVV2bx5M9XV1YwcOZLly5cTGxuLv7+/B1tdNFcSWqLVUhSFbt26kZKS0qxDS6fTodFoyMjIoGfPnpjNZj788EMcDgdlZWUEBga6A+3FF19kxIgRREVFERgYSEZGBsXFxaSlpRESEoLT6eSrr76ipqaGbt26AQ2hePToUcrKymjXrh3jxo1j165drF69mujoaJKSktyhlZ2dzfr164mIiKCoqIif/exnmEwm1qxZQ2hoKIGBgRw+fJiKigpCQ0OZNGkSVquVf/zjHwwfPhyz2UxxcTE7d+7Ez8+PnJwcd9B17dqVjh07smbNGmprayW0xHlJaIlWrUePHnz00UfYbDb0er2nyzmv0NBQfv3rX5OWlsYHH3zADTfcQFZWFs8//zzbt2/n8OHD7vu2a9eO+++/H4PBgKIojB8/nrq6OiZPnozRaERRFG655RYqKipYv349DoeD2tpaDh48SG5uLqWlpYwbN461a9dy++2306lTJ/fiuFarlRkzZnDzzTczaNAgli5dytq1aykqKiI/P5/Bgwdz8uRJjh49yqlTpwgKCuLOO+8EGtYt/NnPfobBYGD27NkMGjSIDh06MHPmTKDhUiiFhYUsWLAAVVVleFBckBzTEq1aTEwMFouFoqIiT5dyQbW1tTgcDoYNG0ZiYiJHjx51h01FRQVOpxNo6DmazWZ3YLlus9vt7iniiqIQGxtL9+7dycvLo6ysjNWrV/PII4/w4IMP4uXlBYDJZKKoqMi9bQC9Xs+dd97JqVOnKCsrQ6PRsGXLFux2O9XV1ej1etauXcudd97Jb3/7W/z8/NyPNZlMeHt7o9Fo0Ol0VFdXU1dXR21tLVqtloCAAOx2O4MHD0ZRFHcdQpxNelqiVfPy8sLf35/8/HyioqI8Xc4FrV69mhMnTmA2m7nnnnsIDAzktddew2QyuVddVxQFvV5/xonSPj4+6PV6Xn31VW677TZ0Oh1///vfMZlMDB48GH9/f9q3b897771HQECAu7c5fvx4pk+fzpIlS+jTpw9Tp07FYDDQvn174uPjWbJkCW3atCEkJISIiAhqamrw9vamffv2zJo1C39/f3cP7fS6NBoNPXr04Ouvv2br1q0YjUa0Wi19+/blyy+/ZOvWrQwfPvyMwBPidEpz74YnJyerO3fuZOvWrcyfP5/XX38drVbr6bLEdcJqtfLWW2/RoUMHbr/9do+tjDFx4kQeffTR855crKoqdrsdp9Pp7qk4nU7sdjspKSns2LGDRx55BKfTidPpPOMCjaqq4nA4cDgc7sc5HA4URXEfK3NtyxUyrsef/px6vd59H1fv7fTenKqqaLVa97YURXE/h2tbrkBUVfWMnt/Zz+eq60L/FzNnzqRz58787Gc/a4ymF03o0KFDvP7663z11VcXva+iKLtVVU0++3bpaYlWTa/Xk5CQwPHjx8/YsTYnrp7K6bRaLVqtltjYWPfSTxqN5pyLM7pCwRUMWq32gts629n3Oz0ML9ROl7ItRVHOu1p8c2x70fzIMS3RqimKQmJiIidPnmyRSwOFhITQpUsXT5chRJOR0BKtXnx8PEVFRVRXV3u6FCHERcjwoGj1AgMDiYyM5NChQx67lLyqqlRXV1NZWemR529JLBaLp0sQHiShJVo9jUbDwIED2bx5MyNGjPBIDZ07d2b69Okev0SKqqqkp6fj4+PjXu2iudHpdNx6662eLkN4iISWEECfPn2YN28eVqvVI5eUf/7553E4HE3+vGerra3ltdde46abbmLIkCGeLue85Dyu1k1CSwgaTjLW6XScOHGCDh06NPnzm0ymJn/O8ykrK6OyspJu3bphNps9XY4Q55CJGELQMFU7KSmJtLQ0T5fiUQcPHiQ0NJSIiAhPlyLEeUloCUHDca1OnTq5l0hqjVRV5YcffmDYsGFnnJMlRHMioSXEj5KSksjLy2u1U9/z8/NJT0/nhhtu8HQpQlyQhJYQNBzcDwkJob6+vlWGlqqqbNu2je7du8uxLNGsSWgJ8aPw8HB0Oh3Z2dmt7tIYVquVLVu2MHLkSE+XIsRPktAS4kdGo5GuXbuya9cuT5fS5PLy8qiqqqJdu3bN8twsIVwktIQ4Td++fdm5c2er6mmpqsqJEycICAggKCjI0+UI8ZMktIQ4TZcuXaioqCA7O9vTpTQZp9PJli1b6NGjR7M5X0yIC5HQEuI0BoOB7t27t6ohQpvNxsGDB+ndu7cMDYpmT0JLiLMMHjyYbdu2YbfbPV1Kk9i1axcBAQEkJSV5uhQhLkpCS4izdOjQgdLSUoqLiz1dyjXnOqF45MiR51xAUojmSN6lQpwlICAAPz8/Tp48ed1PyCgoKODo0aMMHTrU06UIcUkktIQ4i4+PD7GxsRw5cuS6D63Dhw/Tpk0b/P39PV2KEJdEQkuIs2i1Wrp27cqhQ4eu63UI7XY7+/fvp0uXLh65HIsQV0JCS4jz6NatGydOnLiuryRcW1vLsWPH6NGjh8waFC2GhJYQ5xESEkJMTAy7d+/2dCnXTEFBATU1NSQkJEhoiRZDQkuI89BoNIwePZrvv/8ep9Pp6XIaTX5+PsXFxTgcDr7//nv69euHn5+fp8sS4pJJaAlxAX379uXkyZMUFBR4upRG8+677/Kb3/yGN954g2XLlnHDDTdIL0u0KHKlNyEuwNfXl/j4eA4ePEhERMR1sXMvKSlh/vz5LF68GKPRyG9+8xtGjx7Nz3/+cxITEz1dnhAXJT0tIS5Ap9PRu3dv9u7de92sjuG6IrHNZqOqqort27fz7rvvkpKS4uHKhLg0ElpCXIBGo6FDhw7k5ORQU1Pj6XIahSu0Tv/54YcfZsyYMR6qSIjLI6ElxE+Ij4+ntraW/Px8T5fSKPR6vXuYU6fTMWnSJJ588km8vLw8XJkQl0ZCS4if4OfnR9euXdm8ebOnS2kUrtBSFIX4+Hj+/Oc/ExkZeV0crxOtg4SWED9BURRGjhzJDz/8gNVq9XQ5V81gMKAoCn5+frz88st06dLF0yUJcVkktIS4CNcyR/v37/d0KVfNYDDg4+PDk08+ybhx46SHJVocmfIuxEUoisKQIUPYuHEjffr0uaRLeNTU1DBr1iyys7MbLRhUVb3qbW3ZsgV/f3/y8/N54403GqWuy+V0Ohk1ahSDBw+Wy6GIyyahJcQlGDJkCG+//TYlJSWEhoZe9P61tbWsW7eOxMREQkJCmqDCS9OtWzdGjBiB2Wz2WA379+9nx44dDBo0SEJLXDYJLSEuQlEUQkND0ev15OTkEBISckk9HrPZTP/+/YmNjW2CKi+N3W5Hq9V6dFjQbrdf16vni2tLvuYIcQn8/f3p1KkT27Ztu+bX2LLb7ZSUlLh37KqqUlZWRl1d3RVv0+l0UlRUBNCogVVVVXVdr4Qvmh8JLSEugUajYdCgQWzatAmbzdYo2zxx4gRLly49Z1ZiSUkJX3/9NRUVFUBDiC1evJgjR46Qk5PDp59+ypdffsm2bduwWCyX9Fz19fV8/PHHlJSUNErtLps3b2bdunWoqkp6ejqzZs1iw4YN19Uiw6J5kdAS4hJ17twZg8HAvn37rnpbqqry/fffs2HDBjIzM4GG3lBNTQ21tbWoqoqqqtTV1VFdXe0OgaysLE6cOEH79u1Zu3Ytx44dw+FwUFVVRVVVFRaLxf1Yq9VKVVXVGY9XVZX6+nrsdjt1dXXuXqPT6cRisbi3VVlZSX19Paqq4nA4qKmpobq6mpqaGvd9q6qq3Mtb2e12Zs6cidFoZM2aNRw+fPiq20iI85FjWkJcIoPBwO233878+fPp06fPOUsiXY7y8nJKSkoYMmQIKSkptGvXjr1797Jw4UL3fYqLi5k9ezY2mw2Hw0GPHj2AhpmJBQUF2O12vLy8yM/P55NPPsFmsxETE8OkSZPQarXMnTuX48eP4+/vz6RJkwA4efIk8+bN46abbmLu3Lk89NBDhIeHk5uby8qVKxkzZgxffvkldXV1hIWFcffdd1NaWsqbb75JTEwMbdq04a677uKLL76gqKgIp9PJwIEDKSwspLa2lokTJ2I2m9m9ezddu3a9qvYW4nykpyXEZejfvz85OTlkZ2df8TZUVSUrK4ugoCBiY2PJz8+nurqaAwcOMGLECO677z4MBgPp6emEhoby5JNP4uvr6368w+HAZrOh0Wiw2+2YzWYSExNxOBxs27aN/Px8MjMzqaio4Omnn+bpp592B9MHH3zAwIEDad++Pe3bt2ffvn188803FBYWYjAYCAgIoH379mi1Wvbu3cuJEydwOp1ERUXxxBNP8NBDD1FZWUlxcTHPPvssCQkJQMNsSY1Gw+rVq929OCGuBQktIS5DQEAASUlJ7Nq164onZKiqyrFjx1i7di3/93//x+7duyktLcXpdOLl5YXBYECv17t7UjqdDqPRCDRMoujQoQO33347sbGxpKens3v3bioqKnjggQdITEzE6XTicDjQ6/Vn9Ab9/PwYPHgw27dvp76+nq5du/Ldd9+RlZXFnj17CA4OJiMjg/T0dB544AG6du2K3W5Ho9EQERGBr68viqJgt9sxGAwYDAZMJhMAgYGBWCwWevXqRXV1dbOa5i+uLxJaQlwGvV5P//792bVr1yVPgjib1Wrl1KlT/PnPf+a9996jd+/e5OXluY9TzZ07l4qKChITEzl58iSffvopubm5QEPg7dy5kw8++IATJ04QHx+Pv7+/e3ivtLQUVVWJiYkB4JNPPmH69OmUlpZiNpu5+eab8fLy4ocffiA0NJTa2loGDx7M/v37iY2NxdfXl4qKCpYtW0ZhYaE7mF3rFQJER0ej1Wp5//333Zc0CQwMJD4+no8//pgDBw7Qv3//q21qIc5LjmkJcRkURaF79+5Mnz6dwsJCdzhcDr1ezz333ENgYCA6nY4pU6ag1+vx8vIiNjYWu92OyWQiODiY3/zmN9TU1ODj44O/vz+qqvLMM8+gKAr+/v6EhobidDoJCgpCVVW8vb3x9/fHaDRy3333UVRUhKIoBAcH8+CDDxIcHMzkyZOpra3F39+fv/3tbwQFBdG2bVvCwsLQarU8+uij2Gw2zGYzZrMZrVZ7xgnVBoOB3/zmN5SWluLt7Y3ZbEaj0XD//fdTUFCA2WwmMjKyMZtdCDcJLSEuU2RkJP3792fhwoX8/ve/v+zHa7VaIiIi3D8HBwe7/312CJ5v59+pU6dzbouPjz/ntoCAAAICAtw/u4by9Hq9e0WMqKgoANq2beu+3/lOhnY9FnCH4Ol1Q8O5bP7+/uc8VojGJMODQlwmRVGYOHEiGzZsoLS01NPlCNGqSGgJcQXatm1Lhw4dWLNmzTVfIUMI8T8SWkJcAdd1trZu3UpNTY2nyxGi1ZBjWkJcAUVR6Ny5M3PmzCErK4tOnTqdsaafoijU1NTw/fffExQU1OT1uVay8PTiuOeTmppKcnKyp8sQLZSElhBXKCwsjJ49e7JixYpzJkf4+PgwadIk8vPzPVJbeno6R48eZfTo0RgMBo/UcCHh4eH0799fLksiroiElhBXSKPRcMstt/D444/z85///IwTak0mExMnTvRIXXa7nbfffpukpCQeeughtFqtR+oQ4lqQrzpCXIW2bdsyaNAgZs+e3WyuEZWXl8fu3bu55ZZbpDcjrjvyjhbiKiiKwoQJE9ixYwe5ubken0moqiqzZ89m8ODBREdHN7vjWUJcLQktIa5STEwMSUlJrF271uOhdezYMXbt2sWUKVMksMR1SUJLiKvk5eXFzTffzA8//ODRq/ja7Xa+//57brjhBo/MWBSiKUhoCdEIunXrRmxsLIsWLfLYVXtLS0vZu3cvw4YNk2NZ4rol72whGoHBYOCRRx5h2bJlZGRkeKSG1atXExUVdc45Y0JcTyS0hGgk4eHhDB06lEWLFmGz2Zr0uSsrK1m4cCGTJ09Gr9c36XML0ZQktIRoJIqiMG7cOFJSUpq0t+V0Olm8eDEdO3akc+fOTfa8QniChJYQjURRFGJiYtznbTXVTMKSkhI2btzIxIkTZVhQXPcktIRoRFqtlkmTJpGWlsb27duveXCpqsq+ffvw8fGhffv2ElriuiehJUQj8/f351e/+hVffPEFJSUl1/S5bDYb8+bNY9y4ce4LOwpxPZPQEqKRKYpCnz59CAgIYM2aNddsCryqquzatQur1cqAAQOklyVaBQktIa4Bb29vpk6dysKFCykoKLgmz1FbW8v06dP5+c9/jo+PzzV5DiGaGwktIa4BRVHo2rUrw4YN46OPPmr0KfCqqrJnzx6cTqdcm0q0KhJaQlwjGo2Gu+++m1OnTrFp06ZGnZRhsVhYuXIlo0aNwmQyNdp2hWjuJLSEuIb8/f2ZOnUqs2bNorS0tNG2m5mZyalTpxgyZIgcyxKtioSWENeQoigMGDCANm3aMHfu3DMmZaiqesm9r9Pv63Q6mTNnDsOHDycsLExCS7QqElpCXGMmk4nHHnuM9evXu8/dKi8v59ChQ5d04UhVVcnOzmbt2rVUVFRw+PBhUlJSmDRpkiyMK1odnacLEOJ6pygKgYGB/PznP2fmzJmYzWb+7//+j5SUFJYvX46/v/9Ft7Fp0yaeeOIJRo0ahd1uZ+zYsZjNZlRVlZ6WaFUktIRoIn379mXRokWMGzeO/Px8NBoN6enp9OnT5ycf53Q6yc/Pp7CwkJkzZ6LX6zl58iShoaEMHDiQ4OBgCS7RasjYghBNoKioiH//+9/Mnz+fU6dOYbPZcDgcrFix4qKPdTqdFBQUoNFoUFUVq9XK1q1beeCBB1iyZInHrt8lhCdIaAlxjdntdubPn8/7779PYWHhGbevW7cOq9X6k493Op3k5uaeEU4mk4mJEydy8803y3Et0arIu12Ia0yr1fLQQw/x+uuvk5SUhFardf8uJyeHI0eO/OTjHQ4HJ06ccIeWyWTinnvu4Z133iE8PFyGBkWrIqElxDWmKAp6vZ6f/exnfPLJJ9x4443odA2Hk3Nycti3b99PTn139bQAvLy8uP3223nxxRfx8fGRwBKtjoSWEE1Er9czePBgvvnmG5577jkiIiKora1l79691NfXX/BxDoeDoqIiDAYDkyZN4p133iEyMlICS7RKMntQCBrOhaqtrb3o8aXGoCgKjz76KAkJCfz73/9m69atZGZmEhERcd775+bm4nA4GDNmDNOmTUOr1VJeXn7N6zydl5cXJpNJglJ4nISWEDQMwb300kts2LABvV6Poijuc6Au9LfL+X4PnPe2029XVZWamhoKCwt58MEHMRqN7u2drq6uDh8fH06ePMkvf/nLc7ZzqTVe7HGn/871b2iYMDJ8+HCee+45vL29G6W9hbhSElpC0LBTt1gsPPTQQ02+arrNZsNut19w4dvi4mK8vb09FhiZmZksX75cptaLZkFCS4jTKIrS5FPIvby88PLyuuDvw8LCmrCac8mQoGhOZCKGEFfB4XCQnp7eqJcdgYae38mTJ6mrq2vU7VZWVpKXl9eo2xSiKUloCXERqqpy+PBh3n77bb744gtOnjx5xu+ys7Mv+NjKykr+9a9/8fbbb7NgwQKqq6sv+XlnzZrV6AGTlpbmXoUjNzeXDz/8kNmzZ//k7EUhmhMJLSEuwalTp1AUhfj4eL788ktOnTqF3W6npKSETp06AWC1WqmoqHD/u7Kykurqao4dO8bEiRPJzs5m06ZNOJ1O8vLyyM3Npby8HKfTiaqq1NfXU1BQQF5e3hkhUlVVRU1NDRUVFe4rIDudTqqqqrBarRQUFJCTk0NJSQlOpxOn00lhYSFFRUXk5+djt9upq6sjPz/fHZpOp5OvvvqKqKgo8vLyWL9+faP3FoW4FuSYlhCXQFEUQkJCGDRoEEePHuX48eOYzWYWLlzIli1b+OKLLygoKGD69Ok8/fTTbNu2jZycHIYNG0ZpaSlbt26lqKiIgQMHYrVamTt3LrW1tWi1WiZNmkRERATz5s0jIyMDX19fbrnlFgCys7NZtWoVAwcOZOfOnQwcOJCuXbtSX1/P9OnTGTduHN999507sO68807Cw8P53e9+R79+/fD29uaee+5h/vz55ObmUl9fT0xMDGVlZeTn5/OnP/2JI0eOsGTJEsaMGePhVhbi4iS0hLgMrtUtbDYbgYGB3H///Rw7dgyAiIgIvLy8yMrKYvv27dx5551Aw7JLbdu2pbi4mNLSUhRFISAggKysLHJycujduzdms5n09HR+/etfEx4eDjT0sD7//HMmTZpE9+7dqaysJCUlhcOHD9O5c2eqq6sJCQkhODiY9PR0srKy6NixI+Hh4QQFBfHwww/j7+9PUVERJ06c4Omnn2bTpk3k5ORgtVrRarVs3LiRuro6GR4ULUaLGh6U4QvhSaqqUllZSXFxMUFBQef8XqfT0b17d5YtW4bdbic+Ph6A4OBghgwZQr9+/Th8+DDHjx/n+PHjPPnkkwwYMAC73e7e/unvcZPJxOjRozl48CDl5eV06dKFHTt2kJWVxY4dOwgODqa8vJxt27bxhz/8gdGjR7uHD/39/fH39z/nXC3X9v39/XE6nXTo0AGtVktkZOQ1azchGlOLCS2dTuce+xeiqdntdlavXs37779PZGQk7du3JzMzk9dff53Dhw/zz3/+kxMnThAXF8euXbvo27ev+7Gpqam8+uqrrFq1iuTkZIKCgqiuruazzz4jPz8fALPZTPv27fn44495++23SUtLQ6fT0a9fP3r16sXcuXPdaw1269aN1NRUOnbsiK+vL1qtls8//5yMjAx3SCmK4v53UFCQe/WNLVu24HQ68fb2ZsiQIfz73//mu+++Y8SIEU3fqEJcAeVqQkBRlMeBhwEVOAg8AHgDs4E4IBOYrKpq2Y/3fw54CHAAv1dVddXFniM5OVnduXMn+/bt4/PPP+ett95Cr9dfcc1CnI/dbufpp5+mW7duZwQO4J4kUVFRgVarxc/PD4PBgNVqpbS0FACNRkNAQABarZaKigp8fX0xGAzudQMBDAYDfn5+aDQaysvLsdvtGI1GDAYDXl5e7skbTqcTf39/rFar+4Ti6upq/Pz8qKqqwmg0UldXh7e3N3q9noqKCqxWK15eXuj1eoxGI5WVlQQEBLhfQ319PZWVlej1evR6PWazGavVSnl5OVqtlsDAwAuen3bixAmWLVvGm2++idlsvgatL1qLQ4cO8frrr/PVV19d9L6KouxWVfWcM/2v+JiWoijRwO+Bzqqq1imKMgeYAnQGflBV9TVFUZ4FngWeURSl84+/7wJEAasVRWmvqqrjUp5Pr9fjcDjkrHzR5BRFwWQynbNihZeX13mH1YKDg93/1mq1511T8HzDi15eXoSGhrp/di3rBLgDyN/f333fs393urNvMxqNZ2wPGkLU0ycuC3G5rnZ4UAeYFEXR0dDDygUmAl/++Psvgdt+/PdE4BtVVS2qqp4A0oF+l/IkiqKg0+lQVdU9Zi+EEKL1ueLQUlU1B3gTyALygApVVb8DwlVVzfvxPnmA66tcNHDqtE1k/3jbJdHpdGg0GgktIYRoxa5meDCQht5TPFAOfKsoyn0/9ZDz3HbeA2qKovwK+BVATExMQ6E6HYqiNMmlI0TrpaqqDEGfRSY/iebkas7TGgWcUFW1CEBRlPnAIKBAUZRIVVXzFEWJBAp/vH820Pa0x7ehYTjxHKqqfgR8BA0TMaBh/F2j0cj5JOKaUBQFLy8vvvnmGxYvXuzpcigrK3NP+vA0q9VKr169mnwhYSHO52pCKwsYoCiKN1AHjAR2ATXAz4HXfvx70Y/3XwzMUhTlbRomYiQBOy71yQwGA1qtttEXEBUCGmb/Pf7445e1NuC19NVXX2E2m7njjjs8XQoAvr6+50zkEMITrji0VFXdrijKXGAPYAf20tA7MgNzFEV5iIZgm/Tj/Q/9OMPw8I/3/+2lzhyEhtlSGo2G2tracy5wJ8TVUhSFsLCwZjObLjg4GH9/fxISEjxdihDNylUt46Sq6vPA82fdbKGh13W++78MvHwlz2UymfDx8XGf8yKEEKL1aTGD1K6lZrKzs+XAsBBCtFItJrQURSEmJoaTJ09KaAkhRCvVYkILIDY2lszMTAktIYRopVpUaMXHx1NeXu5e700IIUTr0qJCy2Qy0b59e/bu3evpUoQQQnhAiwotRVHo168fO3fulCFCIYRohVpUaAEMGDCAlJQUKisrPV2KEEKIJtbiQisoKIiAgAAyMjKktyWEEK1Miwsto9FI+/btOXTokISWEEK0Mi0utLRaLd27d+fgwYNYLBZPlyOEEKIJtbjQUhSFjh07kp2dTXl5uafLEUII0YRaXGgBREdH06lTJ3744QdPlyKEEKIJtcjQUhSFW2+9lZUrV8r1tYQQohVpkaEF0LVrV/z8/NiyZYtMyBBCiFaixYaWVqtl9OjRrFmzRiZkCCFEK9FiQwsgOTmZ/Px8WfldCCFaiau6CKQnKYpCVFQUvXr1Yvny5bRv397TJQlxVXJzc9m2bRsWi4U9e/bg7e2NXq9Hp9MxdOhQwsPDPV2iEB7XYkMLQKPRMH78eJ544gnuvvtuoqKiPF2SEFcsLy+PF198kaNHj+JwOFAUhc8++ww/Pz927Njh6fKEaBZa9PAgQNu2bRk7diyfffYZVqvV0+UIccU6d+5MYmIiFosFm82G1WrFYrHQv39/+UImxI9afGi5pr+npaVx9OhRObYlWiyTycSYMWMwGo3u2zQaDXfeeSdardaDlQnRfLT40AIIDQ1l1KhRzJs3T3pbokUbP348Pj4+7p/btWtH7969URTFg1UJ0XxcF6Gl0Wi45ZZbyMrKYs+ePZ4uR4grFhISwtixY1EUBUVRGDx4MPHx8RJaQvzouggtRVEIDg7mwQcf5IMPPqCkpESGCUWLpNVqGT9+PDqdDrPZzMCBA8/oeQnR2l0XoQUNwTVw4EA6dOjAzJkzsdlsni5JiCvSs2dPunfvTmRkJDfffLP0soQ4TYue8n42rVbL3XffzT/+8Q8OHz5Mjx495AMvLonT6WTXrl1kZWV5uhTq6+vx9fVFURS2bNni6XIAiIuLo2fPnuh019UuQ7RA19U7UFEU4uPjufvuu3n33Xd5++23CQgI8HRZogVwOp3MnDmTwsLCZjG9PDg4mDZt2rB161ZPl0J5eTne3t506tRJQkt43HX3DtRoNIwZM4aDBw/y3nvv8Yc//EGOCYhL4lrPsm/fvp4uhfr6egwGAxqN50fwT5w4wbJly+Q4sWgWPP+JuAa0Wi2/+MUvyMrKYtWqVTidTk+XJFoBp9NJZWUltbW1V72DNxqNWK1WHA5HI1XXwG63ywLTokW7LkMLICwsjN/97nfMmTOHAwcOeLoc0YKpqsrRo0c5cuQINpuNffv2kZGRcU4wVVRU8MILL/Daa69RVVUFQF1dHatWrWLVqlXs3r37kq//pqoq77zzTqMfY9u/fz9ff/01AGVlZaxbt47t27fLxCXRYly3oaUoCl26dOHBBx/knXfeIScnR4Y3xBVbuXIls2fPpqioiP/85z9s2bIFp9OJqqruPwEBATz22GNERka6H1dWVsb06dOpr69n9erV7N69G1VVcTqd7j+nb8N1G3DO71x/gPM+5vT7X8pt8+bN48CBAyxbtox9+/Z5olmFuGzX3TGt0ymKwogRI0hPT+fdd9/lmWeeISgoyNNliRbIbDZTWVnJ1q1bCQsLQ6/Xc/ToUXbv3s2kSZNYsGABvXv3PmMJJhdvb2/i4uI4duwYVquVuro6XnrpJWpqakhMTGTq1KkEBASwZs0a1qxZg6qq3HvvvQBUVlYyffp02rVrx9GjRxk2bBiJiYnU19czZ84chg0bxowZMygsLCQyMpJf/OIXeHt786c//Yk2bdpgs9n44x//yNatW1m7di0mk4mYmBgqKys5ePAgr732GikpKWzYsKFZHMsT4mKu256Wi16v57777sPb25v333+furo6T5ckWiCNRkObNm3YsmWLe+dut9vdw3319fUXPP5UUlLCpk2bqKqqwsfHB4PBwE033UTHjh05cuQIKSkpVFVVsXnzZn71q1/x2muv0a1bN2pra/niiy/w8fFhwIABtG3blkOHDrFx40by8vIoKioiODiYkSNH0qVLF7Kzs9mxYweqqmKz2XjooYf4xz/+gaIobNq0iWeeeYbu3bsDUFtbi16vJzU1ldraWiorK5umIYW4Std9aAH4+fnxxz/+kbKyMj777DPq6+tlqFBcto4dO/L444+7e+uKouB0Os8IL0VRzhjGA4iPj+eRRx5h7NixbNu2jczMTNatW0ffvn1p166de8KFVqs9YyTAFZS5ublYrVY6derEzp072bRpE3v27MFkMlFbW8vixYvp3bs3Xbt2dU+yCA0NJSoqCkVRqK+vx2g04u/vT1hYGFqtFm9vb6xWK6Ghoaiqir+/fxO2pBBXrlWEFjQE1zPPPMPBgwf5+uuvZUahuGx6vZ42bdq4T1j38/OjsLCQhQsXkpmZ6b7NbrezcuVKcnNzgYbrZC1cuJDNmzeTmJiIXq/HarVy7Ngx8vLyAPDx8SE0NJQ5c+awbNky8vPzMRqN3HbbbYSGhrJkyRLCwsIoLy+nS5cu7Nq1i86dO6PValFVlWPHjnHq1Cl3WGo0GnedoaGh+Pn5MWvWLDZs2IDNZsPPz48ePXrw7bffsmbNGoYNG9bErSlaI1VVr3rBh+v6mNbpFEUhJCSEJ554gpdeegl/f38mTpx4xodbiAu58cYb3b2Rzp07k5CQQHh4OHfccQeVlZX07duXqKgozGYzEyZMICsrC51Oh6+vLw8++KB7Gx07dsRgMDB58mSqq6vp1asXgYGBGI1GJk2a5D7updPpuP3224mMjGT8+PFkZGSg0Wh45JFHiIiIID4+npiYGPz8/LjvvvsoLS2lV69e+Pr64u3t7T4mBg1h+7Of/YyjR4/Sq1cv/P39URSFO++8k4MHD2IymdzDhkJcSzabDb1ef1XbaDWhBQ3BlZSUxCuvvMLf/vY3KisrmTp1KgaDwdOliWZMURTi4uLcP4eGhrr/3bVr13PuHxcXd8b9R44cec59evXqdc5tISEhhISEnPd5unXrBjQEJnDGMOL5anDd//Rtnb49gICAAIYMGXLOY4W4ViwWC15eXle1jVYzPOiiKArR0dE899xzbNmyhdmzZ2O32z1dlhBCXPdsNttVdxJaXWjB/3pcf//731m/fj1ffvkldXV1MjlDtGry/hfXWlVVFWaz+aq20SpDCxqCKzIykueff549e/bwf//3f+5VDIRojeTYrrjWysrKrvpc2VZ1TOtsiqLQpk0b/vrXv/Lhhx/y2muv8cc//pHQ0FD5ALdCdrud48ePN4tjnK5eT3N4H7pmOApxtcrKyq76yhutOrTgfz2up59+mo8++ohp06bx5JNP0q5du2axwxBNQ6PR0KNHD3bt2sXmzZs9XQ5paWno9Xri4+M9XQoAAwYMuOpZX0IUFxcTGxt7VdtQmvs4dnJysrpr164mea7a2loWLlzIihUreOyxx0hOTm4Wl4YQrc9//vMf/P39uf/++z1dihCNwul08uijj/Lwww+TnJx80fsrirJbVdVz7tjqe1qn8/b2ZvLkyURGRvKvf/2L22+/nVtuuQUfHx/pdQkhxFWorq6msrLyqi+yKqF1Fp1Ox/Dhw4mNjeWdd97h4MGD/OY3v3EviSOEEOLylZeXYzKZ5Dyta0FRFBISEpg2bRpBQUH85S9/IS0tDYfDIdOChRDiCpSWlmI2m6/62KiE1k8ICgri97//Pffeey8vvPACX3zxBRUVFZ4uSwghWhRVVSksLCQgIEBOLr6WFEVBq9UycuRI/va3v3Hy5En++te/kpaWds5K3kIIIc5PVVVyc3MJDQ2VnlZT0Gg0dOzYkT//+c+MGDGCv/3tb8yePZuqqioJLiGEuAi73U52djYxMTFXPSNbQusSKYqC0Wjk9ttvZ9q0aezYsYMXX3yR/fv3y2VOhBDiJ9jtdvLy8mjbtu1VT2iT0LpMGo2GLl268PzzzzN48GDeeOMNPvjgA0pKSiS8hBDiPGpqaiguLiYmJuaqtyWhdYX8/PyYMGEC77zzDoWFhTz55JOsWLHCfQVbIYQQDQ4cOEBUVNRVrzsIElpXTFEU94Uln332WX7xi1+wbNky90QNmR4vhBANkzB27txJcnJyo5zrKicXXyXXsa5hw4aRnJzM8uXLmTZtGn369GHixIkkJCSg00kzCyFap7q6Onbt2sXUqVMbZXvS02okiqJgNpuZPHky//73v/Hy8uLll1/mo48+orS01NPlCSGER6SlpeHr63vVyze5SGhdA+Hh4TzyyCO88MILVFdX8+tf/5rZs2dTXFyMw+HwdHlCCNEkVFVl//79dO/evdEWH5dxq2tAURT0ej2xsbE89dRTHDlyhJkzZ7J69WoGDhzIuHHjCA8P93SZQghxTdXW1pKSksKECRMabe1W6WldQ4qinDFF/pe//CXZ2dn84Q9/YN68eVRWVso0eSHEdUlVVUpLS6mqqiI2NrbRQkt6Wk3Ey8uLvn370rt3b9LT0/nss89YsmQJw4cPZ+DAgbRr1w6tVuvpMoUQotEcOnSIwMBAIiMjG22bElpNSFEUdDodHTt25OWXX+bQoUP88MMPvPHGG8TFxXHXXXfRoUMH93R6IYRoyVavXs3w4cMb9arXEloeotfr6dGjB126dKG0tJTFixfzt7/9jS5dujB+/HgSEhIIDAyU8GpF7HY7NpsNAJvNhtVqpa6uDmh4v8ipE6Ilyc7OJj09nWnTpjXqdpXmfgJscnKyumvXLk+X0SRKS0v57rvv2Lx5M4qi0K9fP4YNG0Z0dHSjzbwRzdeRI0eYMWMGNTU1HDhwAIPBQMeOHdFqtTz66KMkJiZ6ukQhLomqqnz88cdUVVXx5JNPXtE2FEXZrapq8jm3S2g1L6qqUllZSXp6OsuXL2ffvn0MGDCA22+/nejoaLy8vCTArlOpqancf//97Ny584zbw8PDSUlJITg42EOVCXF5ysrKePrpp/n9739Pt27drmgbFwotGW9oZhRFwd/fn969e9OrVy9KSkpYuHAh//jHPwgODqZfv3706NGD9u3by3DRdaZdu3b06tWLXbt2uWeVajQa7rrrLvz9/T1cnRCXRlVVDhw4gNlsJi4urtG3L3u9Zso1GSM0NJSHH36Yu+66iwMHDrBhwwaWL19OZGQkEyZMIDk5GS8vL/djRMul1WqZMGGCe4gQGo5ljRkzRmaWihbD4XCwdetWevbsiY+PT6NvX0KrBVAUhcDAQIYOHcrAgQOprq5m8+bNfPnll7z//vsMHjyY3r17Ex8fT3BwsOzgWrBhw4YRGRlJeno6AP369aNLly4erkqIS1dSUsLevXu55557rsmhDAmtFkRRFAwGA0FBQYwfP55bbrmF9PR0fvjhB7744gucTicdOnRg2LBhdO/evVGnmYqmYTQaueuuu3jjjTcA6Nu3L9HR0dKLFi3GggUL6N27d6NcO+t8JLRaMK1WS4cOHUhKSqKyspJTp06xe/du/vOf/2CxWBg6dCgjR44kLCwMHx8fCbEWQKPRMGrUKN577z2MRiMjR47EYDB4uiwhLkleXh5r167ln//85zX7oiWhdR3QaDQEBATg7+9P165due+++zh27Bjr16/nxRdfRK/X06lTJ/r06UPHjh2JjIyUb+7NlKIodOrUiYEDB1JYWMjQoUM9XZIQl8TpdLJ69Wq6devWaCu6n49Meb+OuabPZ2RkcPDgQQ4ePMipU6eIiYlhyJAhDB48GF9fXzQaTatfhcPhcPDee++xectWFI1n28Fht7N7924sFgtDmkNoqSpDhwzhgQcewGg0eroa0UyVl5fzl7/8hXvuuYcbbrjhqvcnMuW9FXJNn+/Zsyc9evTAarVSW1vLtm3b2LBhA5999hmJiYn06NGDjh07EhYWRmRkZKscjlJVlaPHMwjvPZCkbr08XQ6Jw9PxDQjCL/DqL09+tQqys9i3awN2u93TpYhmbNu2bej1enr37n1NvwBLaLUCrl6U0WjEaDRyyy23cPPNN1NcXExKSgr79u1j+vTp1NbW4u/vT69evejbt2+rW8RXq9UREhFFVLznV56IiI1vNr1fFZWiXZ6vQzRftbW1zJo1i0cffRRvb+9r+lwSWq2U6xywESNGMHToUKqrq6msrOTYsWPs2LGDRYsWodPp6NWrFwMGDCA+Ph5fX198fHzQ6XTNYmfaXKhOJ7XV1RiMRvQ/9lLramoavihc4QdYo9FQVV6GyceMrhEn0Fgt9disVnx8/Rptm6J1czqdrFixgpCQEJKTzxnNa3QSWgKtVou/vz/+/v60adOGESNG4HA4SE1NZe/evSxcuJDKykp8fX0JCwujXbt2tG/fnri4OPz8/FpNgJUW5nNox1b6jRyDl+l/YWS11LPqmy/pNWQE8Z26ArBj9Qr0BgO9htzI+sVzUVWV0Kg2dO1/wwWCTAX+146qqjLnv28x5p6f0yYhqdFew7ED+zi2fw+3PfwoZUWF7N24Fm+zL32Gj3IHrhCXo6ioiJUrV/Kb3/ymSUZmJLTEGVwBpNPp6NKlC507d+buu++mpKSE3NxcMjMzSU1NZdWqVZSWlpKQkECfPn3o06cPbdu2RavVotVq3ZM7rheqqnJ45zbWLfyW4IgoOif3R1VV7DYbVosF14Qmu82Gw2F3L8NUXVHOlpVLue3hR0jZvgVVVUkecRNWSz2ooNFq3T0p1enAbrOhoqLT/y9A7DYbiqKgqiqaH9tWVVUcdjsarRa71frj7zTux9mslh/rBoOXAadTxW6z4rDb3K9n5awvMPn4kH5wH0aTNz2HDG+q5hTXCVVV2bx5M8HBwXTs2LFJPvMSWuInKYqCXq8nIiKC8PBwevXqhcPhwOFwUFlZyb59+9i9ezdLlixBURSio6OJi4sjPj6eyMhIwsLCCAkJQa/Xt+gQs1ksFJzKYvC4iezfsp6OvftSlHOKhZ/8F0t9HRqNDlt/C1+/8wYlBbloNVr6DB8F4A4Ya309Or0eS10dX7z2d2qrqwiJjOKW+x4kICSUjUsXcmDbRnRaPWPuuR9VhfKiInasXkm7br04dmA3fYaNIq5jFyx1dSyb/gkDRo/ju9nTKSvMx+wfyPhf/AqzfwCvPvJzYjt0RlEU7v7dk2z7fjmHd25Dq9MRFZdIVXkZWceO8sTbH3D80H62rlxKz8HDT+/sCXFRpaWlzJ8/nyeeeOKaH8tykdASl+z0XphOpyM0NJSbbrqJm266CavVSnZ2NllZWWRkZLBjxw7Ky8uprKxEr9eTlJREUlISHTt2JDY29pwZis090KoryrHU1ZLUvTfpB/ZRVVZKxpEUImLjGTzuNua+928Kc7KpKi/ll397hcWffeB+rLW+juLcXHdvTKfX0zm5P2kH9pJ+cD8njhyifc8+HN2zg8mPPkFETBwAK2d9wcJP/ssNt0yka/9B1NVUcfzQAXIzM4hOaEdVWRmBoWF0SR5Ayo4tnEw9zJHdO0gecRMGLyN3/eYPBIVHUFFSxNE9O3ngzy+we91qyosKsdTVodPrObJ7O3XV1dRVV0tgicvicDiYMWMGXbt2pXv37k32GZbQEo3CYDCQkJBAQkICw4YNw2q1Ul9fT11dHfn5+aSkpLB7925mzpxJdXU1MTExdOjQgYSEBPckD7PZjI+PD0ajsVmFmKqqFOVms331Co4d3Iu1vp7s48dw2GyYfMwYvIyYfMzYrBaM3j4YjCZ8/P63KntYmxjGTv05B7dv5sDWTQSGhpOyYwvjfvYwm5YvxG6zojqdKIoGk4/Z/TidXk+Hnslkp6dRX1tDYpcezHz7FXwDgygvLsLHz4/aqkq2rFzCbQ8/yt4Na7FZLSgoBISEEhQegaIo2G129AYDRpM3Pr5+VJQU4+3ri91mIyougYzDKQSEhHqiaUULpaoqe/fuZdeuXbz99ttNesUJCS3R6BRFwcvLCy8vL/z9/YmIiKBnz55Aw0yj0tJSMjMzOX78OAcPHmT58uWoqoqXlxdGo5Ho6GhiYmKIjIwkOjqa8PDwRjl3rKKiAofDQVDQ5Z/7dHTvTu5+7E8MHH0LG5YuIPt4Gu179mHV119SWphPcV4OfUeO4eienSz46D/knDhOSGTDqgB5mRks/PQ9Sgvy6dx3ICYfHyx1dexcs4rC7FMkdO6GyWwmvG0MS7/8iMDQcHoPG4ney4sBY8ZxbP8e1sz7htFTfoalro5eQ3uyZ/0PDLn1dvReDQG/c+335J88QfuefVz/Ce7gDwgJJSgsgvkf/R/lxYWERrXBx9ePrv0GseCT96irrmLs1F9cdfuK1sNisTBv3jzGjh17RZ+nqyErYgiPUlXVPd2+vLycoqIisrKyOHXqFPn5+eTl5WG1WomOjqZDhw7ExcWRlJTkPgnaNfHDNQ3/p3poy5Yt41//+hdTp07l9ttvx8/Pzz1hxG6388QzzxHacwBd+g06p8bCnFOY/QPw8fWjqrwMS10d/sEhFOacoq66Ch+/AAJCQqmpLKesqAAfX3/M/gGYfMwcP7QfVQUfXz8iYuLQ6fVkZ6RjqavFNyAQb18/fAMCqamsoDA7C7vdTmRsPDWVFQSGhoMCxbk5RMTGUZh9Cr8fe1pBYREYfXzIzcygprIC34BAjN7e+AYGU3DqJNGnnW9WVV5GflYmRm9vTD6+hERGUVtdRU5GOgYvL9oktkd7gW/LOSfSObRwFm+++U/MZvN57yNaD1VV2bp1K59++imvv/46ISEh1+R5ZEUM0SwpioKvry++vr5ER0e7j/uoqoqqqu6e2YkTJ0hLS+PAgQMsXLiQuro6/P39CQwMxN/fn+DgYEJDQwkMDHSvwxgYGIivr6976CItLY3169ezYcMG3n33XX71q18xatQo2rVrd9Eaw9v8b8Vq34BAfAMCAc4IBgCTjw8hkdFn3Naxd79zthmT1OGc28z+AZj9A854HpfohIYaI2PjAc4Yfjy7hvPddnrNLt5mX5K6e371D9GylJaW8t577/H73//eI1fTltASzYqrp+T6W6vVEh4eTnh4OAMGDHAHWVlZGcXFxRQVFVFaWkpxcTHHjh2jqqqK6upqampqqKmpwWAwuIcbN2zYgMPhQFVV9u/fzxNPPEGfPn0YP348U6ZMoSlHHU5/rks9fqeqKqhqwxldZ7WTEE3Bbrczb9484uPj6dOnj0fefxJaokVRFAWtVktISAghISF07Njxf+dI2e3uPw6HA6vVSklJCSdPnuTUqVOcOHHijLCwWCxs2bKFPXv28N///pfINjGMSuqOqqrX/MOYtm8XWcdSGXH73Ze14sX8j//L9u+Xc9vDjzJwzK3XsEIhzqSqKocOHWLr1q389a9/vSYXeLwUElqixXMFjF6vP+eaYWFhYXTq1Am73c7MmTPPeIxWq0VRFGJjY+nUqRM5+QXU1lShqk4UpXHO7Hc6naQf2EvKji0YjEZG3H43FSXFfD9nJmVFhVRXVDBo7K2ERLUhbd9uUvfuIiI2jl6DR7BhyXy8TCYsdXUMHncbJh8f7vz1Y5h8fFBV5wWfc+eaVZw4cgi/oGAGjh6Hf/C1OeYgWher1conn3zC2LFjiYuL81gv3zNRKUQTq66uxul0EhYWRlxcHJMnT+Y///kPGzZsYNmyZXz44Yf0GzCQ4PAINJrGWoqmYcWMdYu+JSy6LV36DsTLaCQgJJSOvfsR26EjPW4Yhl9QMMV5Oaz/8X4Zhw5wcPsm1sz/pqH2inI2L194yc8aHd+OpO69qK2qYtOyS3+cEBficDiYM2cOer2eCRMmeHQhbelpiVZBo9Hw9NNPExMTQ1JS0jnXhXI4HNdguKOhN9cmoT0njqRQWVZGaFQbzP4BRMbGY6mrJSapPTq9gSO7tnPi6GGcP56v5WX0xtvXl/6jxlJaWMDizz9k1KSLP6PNamHnmu/IzcygoqSYoPDwRn5NorVRVZWUlBRWrVrFK6+8gslk8mg90tMSrYKfnx+33nor3bt3x2QyuafHX97lPy5/ooZGq2XEHZMZOuFOMg4fIO/kCQC0Oh3VlRVYLRacTidB4ZHEdejMlMf+xC+nvUKXfgOx1NWRmXqYjMMHCQoLb1gOyuHA6WxYRsvpcABQU1XJvA/fpTgvl7qaGjJTD3Przx+m15DhjdhrFK2VxWJhxowZjBo16ppekfhSSU9LiEt2+WP4dpuVz1/9OzUV5ST17E1M+44AtElM4tDObXz+6vOM+9nDRCe0o2v/QUx/62VUVeXmex9AVVX2rF9DbXUVd/76MQB+mDuLfRvXodFqsdVbGHnXPahOJ/W1NTidDnwDAuh742jmf/gfohPauafKC3ElHA4HCxYswGazceeddzaL6+vJycVCwE+eXOwJqqry8q9/xp8/+NLjvSU5ubh1ci3V9MYbb/DKK6+QkJDQpM9/oZOLZXhQiGZIURR+8483UBT5iIqmp6oqhYWFfPjhh9x7773Ex8d7uiQ3+UQI0WR+alTjzN+VFRVSU1nhvo5WcV4uuSeON+kJ0KL1cjqdfPXVV8TGxjJq1KhmdRK7hJYQ18zZAfNTH/zTrlrsdLLtu2XUVlW5b6utrmLn2u+x1Nc1bolCnEVVVb7//nsOHTrEL3/5S4/PFjybhJYQ18yVfTstKy6irLjQvc6goihExsZTkp9LZWlJYxZ4JunEtXqqqnL48GFmzpzJU089dc0Ww70aElpCNDPVFWVoNVoMXv87l0xvMOAbEEhRTva1e+LmMwIkPKSuro4PP/yQG2+8kQ4dOjSrYUEXCS0hmhnV2dDlOXt/odHpsNttF3rUtS1KXPfq6ur4z3/+Q1BQEFOmTDlnSbTmQkJLiEvSdKHg7euH0+nEZrW6b3PY7VSWlpxz2ZP/aX7fiEXLYbfbWbBgAcePH+ePf/xjszuOdToJLSEuSdOFQmBoGAYvLyp+PH6lqiqlhfl4+5gJCAltsjpE6+A6jrVs2TIeffRR/P39L/4gD5LQEqKZ0ep0DBw7nrLCfPdtddXVdB84BG8fOblXNK6srCxee+01HnzwQXr06NEsj2OdTpZxEqKZURSFqLgEouIS3D+7ln8SorGoqkpFRQX/93//x7Bhwxg+fHizDyyQnpYQQrRKVquVjz/+GG9vb6ZOneqxizperpZRpRBCiEZjt9uZO3cumZmZ/Pa3v8XX17dF9LJAhgeFcHM47BTn55F74rinS2lWCnOysTvsqKqKqqotZucmzs/pdPL999+zatUqXnrpJcLCwjxd0mWR0BKChuNG3Tp2YOvWrew8JFcVOF1VZSU1NTV8++23jBkzhqioKAmuFmz37t3MmDGDp556irZt23q6nMsmlyYRgoaD0k6nUxakPY+amhoOHDjApk2bSElJITk5mXvvvZfg4GA0Go0EWAuhqioZGRlMmzaNBx98kOHDhzeL62NdyIUuTSKhJYS4KFVVsdvt5Ofns2TJEn744QeSk5MZOXIknTp1wtfX19Mlip+gqipZWVm8/PLLjB49mttvv71ZBxZIaAkhGomqqpw6dYrVq1eza9cu9Ho9t9xyCzfeeGOzXfqntauoqOD5558nKSmJBx98sFmveOEioSWEaFROp5PS0lL27t3LwoULqa6u5q677qJ///4EBwc3+2/yrYGqqpSXl/PWW28RFBTEI4880iICCyS0hBDXgGv/oaoq27ZtY+HChZSXl9OzZ09GjRpFUlKSHPPyoPr6et544w1sNhvPPfcc3t7eni7pkl0otGT2oBDiirkCSVEUBg0aRO/evTl69Chr1qxh2rRpdOvWjcmTJxMXF4dOp5MAayKqqmKz2fj0008pKyvjueeeazE9rIuRnpYQolG5ZmJWVlby7bffsn79euLi4rjpppvo2bMn/v7+El7XWF1dHZ988glHjx7llVdewc/Pr8W1uQwPCiGanKqqlJSUsHbtWjZs2IDFYuHWW29lxIgRmM0Ni/+2tJ1pc+ban3/++eds376dP//5z8TExLTINpbQEkJ4jNPppKamhrS0NGbMmEFeXh633norw4YNIyIiQoYOG4GqqlgsFmbPns3GjRt58cUXiYiIaLHtKse0hBAeo9Fo8PX1pU+fPvTs2ZPU1FS+/fZbfvjhBzp16sQtt9xCx44d0elkl3SlVFVlzpw5bNy4kWnTprXowPop0tMSQjQ510SBkydPsmXLFlauXEmbNm144IEHiI+Px2g0Xpc73GvBdeL3okWLWLZsGc8++yxJSUktZtX2C5HhQSFEs+R0Oqmurmb9+vXMnj2bgIAAbr75Zvr160dISIiE10VYrVZmzJjBxo0beemll66btSEltIQQzZqqqpSVlbFjxw7Wr19Pbm4uI0aM4M4775RJG+fhWnV/9uzZfP/99/zpT3+iY8eO100bSWgJIVoEVVWpr68nMzOTBQsWsGvXLm688UZGjx5NTEwMRqPR0yV6nKuNZs2axdatW1v8pIvzkdASQrQ4TqeT1NRUVqxYQVpaGmFhYe6hw9a8TJTdbueTTz5h3759PPvss8TGxl5XgQUSWkKIFszhcJCXl8f27dtZunQpJpOJe+65hx49emA2m1v8pINLpaoqtbW1fPHFF6SkpPDcc8/Rtm3b6y6wQEJLCNHCufZVNpuN1atXs2TJEgwGA3379mXYsGG0adPmutx5n66qqop33nmHoqIi/v73vxMQEHDdvmYJLSHEdaW6upo9e/awbt06jhw5wg033MAdd9xBeHj4dXdxSlVVqa6u5l//+hd1dXX84Q9/IDw8/Lp6jWeT0BJCXHdc5ygVFBQwa9Ysdu3axYABAxg2bBgdO3bEx8fH0yVeNVVVycnJ4Z133iEgIIAnn3wSLy+v6zqwQEJLCHGdc12ccsWKFezduxdvb2/Gjx/PwIEDW9SMw+zsbKKjo92hlJOTw8svv0y7du345S9/2WquEi2hJYRoFRwOB2VlZezdu5fZs2djs9mYMmUKycnJBAUFnXfWocPhaBZDivn5+Tz44IM89NBDTJgwgdTUVN566y1GjRrFHXfccd1cXuRSyNqDQohWQavVEhISwqhRoxg2bBi7du3i22+/Zc6cOQwcOJCRI0cSFxd3Rnht27YNm83GsGHDPBZcFouFjz76iHXr1pGZmUlpaSmbN29mwoQJTJgwQdZl/JH0tIQQ1zVVVamrq+PYsWOsWbOGbdu20b17d+6//34iIiKor6/nqaeeYsuWLbz++uuMGTOmyXtdqqqyd+9epkyZWfgQeAAAHaZJREFUwrFjx1AUhTZt2vD2228zceJE9Hp9k9XSXMjwoBCiVXNdnLK4uJilS5eyfPlyEhMTSUpKYtq0aRQUFBAbG8vzzz/Pvffe26RBUV9fzwMPPMC3336Lw+EAGnqMY8eO5f33328V0/nPdqHQah1n5AkhWj1FUdBqtYSHh/Pggw/y7rvv0rt3b9577z0KCwtRVZWTJ0/yl7/8hVmzZmG322mKL/WqqrJ69WqWLVvmDixoOM62ceNGpk+fjs1mu+Z1tBQySCqEaHUURSE6OpobbriB3NxcnE4n0BAgubm5PPvss5SWlvLrX/8ab2/va1aHqqpkZ2fz73//m+rqagD0ej0REREkJSXx0EMPMXLkSDmedRppCSFEq+R0OpkxYwY1NTVotdozejkFBQU8//zz2Gw2fv/731+zKfN2u51Zs2axbds2TCYT/fr1Y9iwYYwdO5ZevXrh5eV1TZ63JZNjWkKI68aOHTv46quvLun4j6qqHDp0iOzsbOx2OxaLBavVis1mcw8Nms1mbrjhBqKioq7J+oZ1dXWsXbsWi8VCeHg4bdu2JSoq6posBuzn58cf/vAHwsLCGn3b14JMeRdCXPdOnDhBWVkZo0ePvqT79+3b97y3u1baqKmpwWq1EhgYeE0mZlRUVNC7d+8mWblj1qxZVFRUtJjQuhAJLSHEdSUyMpI+ffp4uoxmRVVVFi5c6OkyGoXMHhRCiB85HA5OnDhBWloadXV1V7294uJiqqqqGqGy/7FYLBQUFJxxDK41uWhoKYrymaIohYqipJx2W5CiKN8rinLsx78DT/vdc4qipCuKkqooypjTbu+jKMrBH3/3rtLaTjoQQnhUeno68+bNw2q1cuDAAZYvX47Vaj3jPna7nf379/PBBx9w8uRJAKxWK3PnzuW9995j5syZFBQUXPJzrly5kr179zbq6ygsLGT27NlYLBaqq6tZuHAhM2fOJC8vr0mm6HvapfS0vgDGnnXbs8APqqomAT/8+DOKonQGpgBdfnzMe4qiuI4ovg/8Ckj68c/Z2xRCiGsmIyODNWvWkJqayvLly9m8eTNWq5XKykpUVcVisaCqKuPGjSMxMdH9OJvNxnfffceQIUPQarXMnz8fgMrKSoqLiykvL3f3ehwOB+Xl5RQXF1NTU+PehsVioaamBovFQn19vTtc6uvr3eFTXFxMWVmZexJIZWWl+zmsVit2u53y8nKqqqpQVRVVVfnhhx/Izc0FYMmSJdjt9qZqTo+56DEtVVU3KIoSd9bNE4HhP/77S2Ad8MyPt3+jqqoFOKEoSjrQT1GUTMBPVdWtAIqifAXcBqy46lcghBCXQFEU2rVrx/fff09ISAgWiwWHw8ELL7zAG2+8wcaNG6murmbcuHHnPNbhcHDkyBFOnjzJ/7d378FR1/f+x5+fvSfZzY1LIBfuFxEUIRgUpWLAKwq23g/WUtuezun5HdvRjq116vn1N3XmnLa/Xxl1Tmfa6oxTraCABazXqhXUCqKABJVLIBhIIAmYkGSzye5+P78/drMNchE1ZrPJ6zGTye4nu9/vZ9/M7IvP9/v5fr5jx44F4K9//Su7du3CcRwWLVpEeXk5Gzdu5Pnnn8fj8TB37lwAwuEwzzzzDIFAgOHDh3Pw4EG+/vWv4/F4WLVqFRMnTqSxsZHNmzcTj8eprKxkzpw53HfffZSVlREOh/nmN7/JgQMHeOWVVzDGpEJ2165dLF68mGAwyO9+9ztisdiAX/Lpi57TKrLW1gMkf3dPRykBanu87kCyrST5+NPtIiJ9Ji8vj/b2diZOnJiaFt896jndoTVjDKFQiIKCAlpaWrDWUlZWhuM47Nu3j40bNwLw0ksvcfPNN/Pzn/+cyspKANasWUN9fT2XXXYZRUVFtLa28tZbb7F+/Xqqq6uZOHEiJSUluFwuamtr2bBhQ6o/N954I/fffz/FxcVUVVWxePFirr/+evx+f2p0V1tby5tvvjkoRlnQ+xMxTnaeyp6m/eQbMeZfjTGbjTGbGxsbe61zIjK45eXlcc8993DOOeek2lwuF+FwmCNHjgCJgHK5XESj0VSQeb3e1O1B3nvvPRoaGlizZg1Lly7l+uuvT62oEY/HTxjpzJw5E2MM1dXVFBUV0dLSwhtvvMGePXtSt0RZtWoV119/PUuWLEmtfhEIBE64PsztduN2u3G5XPh8Pvx+P9nZ2UyePJns7Oyv5Fqy/uaLTnk/bIwZaa2tN8aMBBqS7QeAsh6vKwXqku2lJ2k/KWvt74HfQ+Li4i/YRxGR4xhj8Pv9RCKR1PPi4mIefvhhOjs7mTFjBi6Xi9LSUlasWEFzczMzZ84kEomwbNkyOjs7qaioSI26nn76aTo6OigoSMxFW7BgAU8++SS5ublUVFQAMHnyZPLz83nuuef4wQ9+QCAQIBAIYK2ltLQUv99PUVERa9asIR6Pn3TE5/f7Oeuss1i7di0ul4tIJILX6+XCCy9k7dq1WGupqKgYFMs9ndGKGMlzWs9aa6cln/8aOGKt/S9jzE+BQmvtPcaYqcCfgQqgmMQkjYnW2rgx5h3gP4CNwHPAQ9ba5z5r31oRQ0TO1IoVK3jnnXdYunTpCX9rb2/HcRxCoVDqRpGFhYW0trbS0tJCdnY2fr+fYDBIJBKhsbGRYDBIfn4+hw4dorOzE4/Hw9ChQ8nKyuKTTz7h2LFjZGdn4/F4KCgoIBaL0djYSGdnJ3l5ebhcLrxeL36/n6amJoYMGUJ7e3vqvFT3YcfW1laam5sJBAK43W4KCwtpbGxk2LBhqcOYXV1dNDU1AeDz+SgsLMRxHBobG4nH4wwdOhS/33/S1UCstdx///386le/YuLEiV/pv0Fv+cIrYhhjniQx6WKoMeYA8J/AfwFPGWO+A3wM3Ahgrd1hjHkK+ACIAf9ure2+mODfSMxEzCIxAUOTMESkz/RcdaL7RpGQOGSYl5d33GuzsrIYNWpU6nlxcfEJ2ysoKEiNsLp5PB5Gjhx50v0XFRWl9vdpubm55ObmHtf26ZUrfD7fCf1wuVyn3N9AdSazB289xZ/mn+L1DwAPnKR9MzDtc/VORESkh4F/1k5ERAaMgX/WTkQGncGwMsRgpdASkQHD4/HwwQcf8Nhjj6W7KziOQywWw+fzpbsrQGIiykCYEq/QEpEBo/tWI93XTaVTbW0tmzZtYuHChV/ZTSQ/j8rKytRkkEym0BKRAWPUqFHHzfpLpy1btlBfX8/ixYtPmBkoX1zmjxVFRGTQUGiJiEjGUGiJiEjGUGiJiEjGUGiJiEjGUGiJiEjGUGiJiEjGUGiJiEjGUGiJiEjGUGiJiEjGUGiJiEjGUGiJiEjGUGiJiEjGUGiJiEjGUGiJiEjGUGiJiEjGUGiJiEjG0J2LRUR6SSwW4+OPP6azs5N9+/Zx9OhRdu7cSTAYJD8/nxEjRmCMSXc3M5pCS0Skl4TDYZYtW8bbb79NJBLh6NGjbNu2DWMMd955J0uWLFFofUkKLRGRXpKdnU1JSQlbtmwhFosBcPDgQQoKChg/frwCqxfonJaISC/xeDxUVFRQWlp6XPucOXMYN26cQqsXKLRERHpReXk5kyZNSj33er3Mnj2bYcOGpbFXA4dCS0SkF4VCIa688kp8Ph8AI0eO5KqrrsLl0tdtb1AVRUR6kTGGRYsW4ff7ARg9ejTTpk3TocFeotASEellpaWlXHrppXi9Xm677TYCgUC6uzRgaPagiPR71loaGhqIRqPp7soZsdZy8cUXs2HDBqZMmcKBAwfS3aUzFgqFyMvLS3c3TkmhJSIZ4Y477iASieD1etPdlTNy7Ngx8vPz+cUvfoHHkxlfteFwmIULF/KTn/wk3V05pcyopIgMeo7j8LOf/Yzhw4enuytnJBKJ0NHRQX5+fsacz9qwYQN1dXXp7sZpKbREJKNkSgBkZWWRlZWV7m4MOJqIISKSZvv376elpaVXt9ne3k5NTQ2O4/TqdtNNoSUiGW/9+vWsXbuWrq4u1q1bx2uvvUY8Hv/M9zmOw+rVq3slMCKRCA8//DAPPPAAf/jDH2hqajrj9z7//PNUV1d/6T70VFdXx7p16wiHwzz33HMsW7aM7du3Y63t1f30NYWWiGS83bt3849//IOamhqeffZZqquraW1tZd++fVRXV9PZ2Ym1lra2Nmpqajhw4ADRaBRrLW+//TbhcDi1rUgkwv79+6mtraWrqwuAjo4Oampq2LNnDx0dHUBi0sK+ffvYs2cPkUiEaDTKli1b+Pa3v00oFGLNmjUcPXqUPXv2sH//fjo6OrDW4jgOhw8fprq6mrq6uuNCpLW1lYaGBpqamlLt7e3ttLS00NLSQnV1NTU1NbS1taW2dfDgQerr61OfMxKJ8PHHH9PY2IjjOGzfvp2qqirKy8t59tlnP1eY9kc6pyUiGc/n85GXl8fLL7/MpEmTcLlcrFy5ksOHD5OXl8ett95KXl4ey5cvp729nWg0SkVFBRdddNFx23Ech7/85S/s3buXzs5OLrnkEiorK3nhhReoqqoiFApxww03UFpayrp169i9ezehUIibb76ZnJyc1KimpqaGefPmsWvXLjZt2kQ4HGbKlClce+21fPTRR6xcuZJQKMS4ceNYtGgRAEeOHGHr1q2EQiHq6ur47ne/S05ODhs2bMBxHEpLS3n99deJRCIUFxdz4403Yq3lvvvuY8aMGRhjuOmmm9i4cSM7duwgHo/jOA779+9nypQpzJgxg3feeYfm5uaMXlJKoSUiGc8Yw+jRo6mqqmL+/PnU1NTQ3NxMdnY2s2fPJjc3l/r6etatW0dRURGRSARjzAmh1drayvbt27nrrrtoaGjg6aefprKykpaWFrxeL+effz5Dhw4FoKWlBb/fT0VFBYWFhXR2duLz+Zg+fTo+n4+amhrKy8tpamqipqaGpqYmrrnmGl577TXmz5/PBRdcgNvtBhJhuXz5ci644AIuvfRS1qxZw8aNG6mtraW9vZ3LL7+cWCzGsWPHqK6uprq6muuuuw6Px4PP5+O2226jsLCQ1tZWdu7cya233kpTUxOvvvoqsViMlpYWVq5ciTEm489xKbREZEAoKyvjpptuYuvWrQB8//vfZ8eOHSxfvhzHcSgrK2PatGncfffd5ObmnnQWojEGay2xWIxoNJoKlSVLlvD++++zatUq2trauOKKK1i6dCnbtm1j5cqVdHZ2Ul5ejt/vZ9asWRQVFfHAAw9w8OBBvvGNbxAOh3nppZcAcLvdRCKR4w4LGmNYsGABdXV1HD58mGAwyMsvv8zYsWNTz1etWsWFF17IwoULWblyZer9WVlZFBYWYoxJfaZYLEYsFsNaS0FBAY2NjcyfP58nnngi42c0KrREJON1f2G73e7U4z/96U/U19fj8/kYPnw4w4cPZ8KECSxbtgyXy0VlZSUXXXQRDQ0N/OY3vyErK4vp06czY8YMHnzwQWKxGNdeey0Aq1evpqqqilgsRnFxMQArVqxg165dAIwYMQKA5uZm7r//fhzH4YorriAcDvPUU0+RlZWVur/WZZddxhNPPMHrr7/OpEmTWLJkCS6Xi8mTJ3P22Wezbt06Jk+eTCAQYNy4cTQ0NJCTk8OoUaN48cUXCYVCqXNt3Z+9W05ODueeey6PPfYYxhgCgQBTp07l8ccf56GHHmLq1KkZfWgQwPT3mSSzZs2ymzdvTnc3RCSNrLVcffXV/PjHP6aoqOiEv0ciEdxuN16vl2g0mhopxWIxPB4PwWAQYwzRaJRwOIzjOGRlZREIBDh27FhqpqHP58Pv99Pe3o4xhmAwiNvtpq2tja6uLtxuN6FQCJfLRWtrK9FoNLV9SKyC4TgObrebnJwc4vE4HR0duN1uXC4X2dnZQGJyRVdXF16vl2AwSCQSwePx4Ha7aW9vx+fzEY/H8Xg8xGKxVOi1t7fjcrlwu92pbbW1taU+H5B6nTEGj8dDIBCgo6ODrq4usrOz8fl8p7zWbf369dTV1fHLX/6y1/8NPy9jzLvW2lmfbtdIS0QyXs8Fab1eL16v96SHwXw+X+qWId1Ots5efn7+cc+7Q6mnUCj0me/zeDyp1d5Pt72eff30drv76/V6T9j+yV7v8XhO+Ew5OTnk5OSc8N5MpCnvIiKSMRRaIiKSMXR4UEQyQs9ZffLVOJNVRNJNoSUiGaGgoCA18y8ThMNhjhw5QnFxcWrqfH8Xi8W45ppr0t2N01JoiUhGePjhh1PTxjNBVVUVq1ev5q677jrpRI7+yBiTmpXYXym0RKTfM8YwZMiQdHfjc6mrqyM7O5thw4aRm5ub7u4MGJkxzhYREUGhJSIiGUShJSIiGUOhJSIiGUOhJSIiGUOhJSIiGUOhJSIiGUOhJSIiGUOhJSIiGUOhJSIiGUOhJSIiGUOhJSIiGUOhJSIiGUOhJSIiGUOhJSIiGUOhJSIiGUOhJSIiGUN3LhYR6SUdHR288MILHDp0iAMHDvD+++/zyCOPkJWVxXnnncfs2bMxxqS7mxlNoSUi0kvi8Thr167l8ccfx3EcAP72t7/hcrl49NFHmT17dpp7mPl0eFBEpJfk5OQwb948/H4/juOkfsaPH095eXm6uzcgKLRERHqJMYZZs2YxYcKE49rmzJlDWVmZDg32AoWWiEgvmjx5MtOnT8flSny9hkIh5s6dSzAYTHPPBgaFlohIL/J4PNxwww34/X4ARo4cyYIFCzTK6iUKLRGRXnbxxRczdOhQjDFUVFRQUlKS7i4NGAotEZFeFgqFuOmmm/D7/dx+++2pQ4Xy5WnKu4gMaG++9SYffLADsH22T2stbW3HCAZzeG/Lu+yrqe6zfQP4vD4uueRSxowZ06f77QsKLREZ0Fb/5SkanY2UTsjr0/36xnZy+e1jOJL9Cs2xvj2f9f6rRykoGKLQEhHJNI6NceHVZcyqLO3T/cbjDtaxuD2uPp2EYa2l9UgV8Xi8z/bZlxRaIjLwpWHintvtAvfxbdaxtLZ0Eos6hPL9eH3uk79ZTklnB0VEPkMs6rBj0yE62qJfajvRrjgv/nkn/3nbC+x8r6GXeje4KLREZNCy1uLELdGuOPGYg7U29ds6NnGIz1oi7VFeenInRxvCxOOJNQUdxxKLxolF41hrU9uLRZ1ku5Nq7+YLeLjh385l+sXHT4G3n9qWtZZYzDmuX93bjnbFU/0ajHR4UEQGvlN8vztxy5O/3cKOdw6RWxjg2/eezyO/3MQPfzOXg3tbeOv5GhbdMZU//p+NbHxpP3s/OMLi70yj8oaJ/P2ZPaxfuxcsLPzWFM6fP4qajz7h//7w7wwvCVJYlM3Se88nmOc/fdccyxt/3ceLT+7EOpYb/n06+UMCrHhwK0cOtTPv6xO44l/OoubDo6x4cAufNHVw+c2TmX/jRNyewXfBskJLRAatTxrCfPjuYX7yP5WsX7uX9ev20hmOgU2MpLo64wwZkcOdv57L77O93PLD8ygqC3G0IcxTD22ldHw+kY4Ya/5YxfnzR+HEHXJCPv7jvy8mf1jWGU3ACLdHeW31Hr73vy/gk8NhXn5qF3OuHEM8bvmXu8uZdN4wPF4XtXuacXtc3H7PLCaeOwy3Z3AeKFNoicig1RmJE8jxkpPrY8iIbKq3N+H2GDojMTraojjxnkO0xKFEay2dHTGGlQS58X9Nx5/lISvHCyQWxy0em/uZgeVyG2LJw36xLgcM5A8JgIXOjhgz55Xi9bvZsHYvW14/yJK7ZzL78lEEcrz87endbHujjlt+NGNQTuRQaInIwHeK/BhWkoPP7+bxX7/L4QOtLLz9bHwBD0/+dgvxmINxJd7o9hgC2V7WPFrF164dz6TzhjJqUgEb1u0ltyDAhHOHUlQWSuzKmNOPsAyUjsvjlad3E49ZzptbzLizh/Dkb7cQbosyY24Jhz5upXZ3M/5sD/7sxNd03b5jHKxuJivHiy9r8H51D95PLiKDxynOaXl9bpbeez61u5uZUzCGCecMZdSkfPZsbyKY5yeQ7U29btEdU6nd/Qm5hX48Xjc333ke1duPEOmIUjAsC4ARo0Ms/s7Uz+zOnKvGMGRkDgXDs3C5DNd9bxq732/C5TJMnD6UtpYuSsfnMWZKAWPOKsQXcBPM81E6IZ9xU4cwdkohHq8OD4qIDEynGPgYYygqC6VGSQB5Q7Ion1d2wutGjsll5JjcVFtuQYAZXzt+FmB20MfoswoBONoQ5oNNh/65DZdh7NlDKB6TSzDfT/m8f17sHMzzM2NuyXHbGV5y/K1MRozKZcSoXAY7hZaIyFegsyPG4dpWuhPT5UoEpLVWtyn5EhRaIjLw9fElTd3XVbm9bq64ZTJZQS/trV2s+p9tjBydS06ur287NIAMzoOiIjK49PHAxlr46N0GsoNeAsmJFP6Am+yQjw/eOcwgvS64Vyi0RER6WTzmsO+Do5SMzesxA9HFsJIgtXs+wXGcNPcwcym0RGTgS8PhwZYjEYL5/1wNwxhDTshH+7EunJiGWl+UQktEpJcZDP4sN12dsVSbtYk1Dr0+d2r0JZ+fQktEBr4+zgiXxzBqUgGH9remFrZ1HEtTfTsjRucOyjUDe4tCS0QGvj4+GudyGc6aOZz6/cfoiiRuxtjZEePQ/mOcPWu4prx/CZryLiIDXx9nhDGGMVMKGTIiB48vMTbwBzws/t45DBuZ07edGWAUWiIiXwGvz01hUXbqudvjOmGVC/n8dHhQREQyhkZaIjLgWcfixAfPtVED+a7GCi0RGdDiUcuq373PhnXV6e5Kn6nZHmb+Zy82n5EUWiIyoN31o3s4fPhb6e5Gn3Lf5mb8+PHp7sZXQqElIgPa6NGjGT16dLq7Ib1EoSUiA5quiRpYTH8/YWeMaQV2prsf/dRQoCndneinVJuTU11OTbU5tXTUZrS1dtinGzNhpLXTWjsr3Z3oj4wxm1Wbk1NtTk51OTXV5tT6U210nZaIiGQMhZaIiGSMTAit36e7A/2YanNqqs3JqS6nptqcWr+pTb+fiCEiItItE0ZaIiIiQD8OLWPMlcaYncaYPcaYn6a7P33NGPOoMabBGFPVo63QGPOyMWZ38ndBj7/dm6zVTmPMFenpdd8wxpQZY14zxnxojNlhjPlhsn3Q18cYEzDGbDLGbEvW5hfJ9kFfGwBjjNsYs8UY82zyueoCGGNqjDHbjTFbjTGbk239szbW2n73A7iBamAc4AO2AWenu199XIOvATOBqh5tvwJ+mnz8U+C/k4/PTtbID4xN1s6d7s/wFdZmJDAz+TgE7ErWYNDXh8Sdo4LJx15gI3CBapOqz13An4Fnk89Vl8TnrQGGfqqtX9amv460KoA91tq91touYDmwOM196lPW2vXA0U81LwYeSz5+DLiuR/tya22ntXYfsIdEDQcka229tfa95ONW4EOgBNUHm9CWfOpN/lhUG4wxpcBC4I89mgd9XU6jX9amv4ZWCVDb4/mBZNtgV2StrYfEFzcwPNk+aOtljBkDzCAxolB9SB0C2wo0AC9ba1WbhGXAPUDPe5SoLgkWeMkY864x5l+Tbf2yNv11RYyTLRamaY6nNijrZYwJAquAH1lrj51mjblBVR9rbRw4zxiTDzxjjJl2mpcPitoYY64BGqy17xpj5p3JW07SNuDq0sNF1to6Y8xw4GVjzEeneW1aa9NfR1oHgLIez0uBujT1pT85bIwZCZD83ZBsH3T1MsZ4SQTWE9ba1clm1acHa20z8HfgSlSbi4BFxpgaEqcbKo0xj6O6AGCtrUv+bgCeIXG4r1/Wpr+G1jvARGPMWGOMD7gFWJvmPvUHa4HuGwN9C1jTo/0WY4zfGDMWmAhsSkP/+oRJDKkeAT601v6/Hn8a9PUxxgxLjrAwxmQBC4CPGOS1sdbea60ttdaOIfF98qq19jYGeV0AjDE5xphQ92PgcqCK/lqbdM9aOc1slqtJzAqrBu5Ld3/S8PmfBOqBKIn/2XwHGAK8AuxO/i7s8fr7krXaCVyV7v5/xbW5mMThiPeBrcmfq1UfC3AusCVZmyrg/mT7oK9Nj887j3/OHhz0dSExS3tb8mdH9/dtf62NVsQQEZGM0V8PD4qIiJxAoSUiIhlDoSUiIhlDoSUiIhlDoSUiIhlDoSUiIhlDoSUiIhlDoSUiIhnj/wN8bP8+gceLBAAAAABJRU5ErkJggg==", - "text/plain": [ - "
" - ] - }, - "metadata": { - "needs_background": "light" + "image/svg+xml": "\n\n\n\n\n\n%3\n\n\n\n140393102737552\n\nouter_loss\n ()\n\n\n\n140393111544400\n\nMseLossBackward0\n\n\n\n140393111544400->140393102737552\n\n\n\n\n\n140393111544304\n\nMulBackward0\n\n\n\n140393111544304->140393111544400\n\n\n\n\n\n140396584753232\n\nAddBackward0\n step1.a\n ()\n\n\n\n140396584753232->140393111544304\n\n\n\n\n\n140393111544016\n\nAccumulateGrad\n\n\n\n140393111544016->140396584753232\n\n\n\n\n\n140393111547280\n\nMulBackward0\n\n\n\n140393111544016->140393111547280\n\n\n\n\n\n140393111570848\n\nstep0.a\n ()\n\n\n\n140393111570848->140393111544016\n\n\n\n\n\n140393111544256\n\nMulBackward0\n\n\n\n140393111544256->140396584753232\n\n\n\n\n\n140393111544160\n\nDivBackward0\n\n\n\n140393111544160->140393111544256\n\n\n\n\n\n140393111546512\n\nDivBackward0\n\n\n\n140393111546512->140393111544160\n\n\n\n\n\n140393111544112\n\nAddBackward0\n\n\n\n140393111544112->140393111546512\n\n\n\n\n\n140393111546368\n\nMulBackward0\n\n\n\n140393111546368->140393111544112\n\n\n\n\n\n140393111547040\n\nAccumulateGrad\n\n\n\n140393111547040->140393111546368\n\n\n\n\n\n140393111569408\n\n ()\n\n\n\n140393111569408->140393111547040\n\n\n\n\n\n140393111546272\n\nPowBackward0\n\n\n\n140393111546272->140393111544112\n\n\n\n\n\n140393111547088\n\nMulBackward0\n\n\n\n140393111547088->140393111546272\n\n\n\n\n\n140393111547328\n\nPowBackward0\n\n\n\n140393111547088->140393111547328\n\n\n\n\n\n140393111547184\n\nMseLossBackwardBackward0\n\n\n\n140393111547184->140393111547088\n\n\n\n\n\n140393111547280->140393111547184\n\n\n\n\n\n140393111546944\n\nPowBackward0\n\n\n\n140393111546944->140393111547088\n\n\n\n\n\n140393111546944->140393111547280\n\n\n\n\n\n140393111546320\n\nAccumulateGrad\n\n\n\n140393111546320->140393111546944\n\n\n\n\n\n140393111544208\n\nPowBackward0\n\n\n\n140393111546320->140393111544208\n\n\n\n\n\n140393111571168\n\nx\n ()\n\n\n\n140393111571168->140393111546320\n\n\n\n\n\n140393111546848\n\nAddBackward0\n\n\n\n140393111546848->140393111544160\n\n\n\n\n\n140393111547136\n\nSqrtBackward0\n\n\n\n140393111547136->140393111546848\n\n\n\n\n\n140393111547232\n\nAddBackward0\n\n\n\n140393111547232->140393111547136\n\n\n\n\n\n140393111545360\n\nDivBackward0\n\n\n\n140393111545360->140393111547232\n\n\n\n\n\n140393111547424\n\nAddBackward0\n\n\n\n140393111547424->140393111545360\n\n\n\n\n\n140393111547520\n\nMulBackward0\n\n\n\n140393111547520->140393111547424\n\n\n\n\n\n140393111547616\n\nAccumulateGrad\n\n\n\n140393111547616->140393111547520\n\n\n\n\n\n140393111570288\n\n ()\n\n\n\n140393111570288->140393111547616\n\n\n\n\n\n140393111547328->140393111547424\n\n\n\n\n\n140393111544208->140393111544304\n\n\n\n\n\n" }, + "metadata": {}, "output_type": "display_data" } ], "source": [ "net = Net()\n", - "x = torch.tensor(2., requires_grad=True)\n", + "x = nn.Parameter(torch.tensor(2.), requires_grad=True)\n", "y = torch.tensor(1.)\n", "\n", - "optim = torchopt.MetaAdam(net, lr=1.)\n", + "optim = torchopt.MetaAdam(net, lr=1., moment_requires_grad=True)\n", + "\n", + "net_state_0 = torchopt.extract_state_dict(net, enable_visual=True, visual_prefix='step0.')\n", "inner_loss = F.mse_loss(net(x), y)\n", - "net_state_0 = torchopt.extract_state_dict(\n", - " net, enable_visual=True, visual_prefix='step0.')\n", "optim.step(inner_loss)\n", - "net_state_1 = torchopt.extract_state_dict(\n", - " net, enable_visual=True, visual_prefix='step1.')\n", + "net_state_1 = torchopt.extract_state_dict(net, enable_visual=True, visual_prefix='step1.')\n", "\n", "outer_loss = F.mse_loss(net(x), y)\n", - "torchopt.visual.make_dot(outer_loss, params=[net_state_0, net_state_1, {'x': x, 'outer_loss': outer_loss}]).render(\"graph\", format=\"png\")\n", - "plt.figure(figsize=(15,15))\n", - "plt.imshow(imgplt.imread('graph.png'))" + "display(torchopt.visual.make_dot(outer_loss, params=[net_state_0, net_state_1, {'x': x, 'outer_loss': outer_loss}]))" ] }, { @@ -251,11 +237,11 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "### 2.1 Baisc API\n", + "### 2.1 Basic API\n", "\n", - "We observe that how to reinitialize the inner-loop parameter in a new bi-level process vary in different Meta-Learning algorithms. For instance, in algorithm like MAML, every time a new task comes, we need to reset the parameters to the initial ones. In other cases such as Meta-gradient reinforcement learning, the inner-loop network parameter just inherit previous updated parameter to continue the new bi-level process.\n", + "We observe that how to reinitialize the inner-loop parameter in a new bi-level process vary in different meta-learning algorithms. For instance, in algorithm like Model-Agnostic Meta-Learning (MAML) ([arXiv:1703.03400](https://arxiv.org/abs/1703.03400)), every time a new task comes, we need to reset the parameters to the initial ones. In other cases such as Meta-Gradient Reinforcement Learning (MGRL) ([arXiv:1805.09801](https://arxiv.org/abs/1805.09801)), the inner-loop network parameter just inherit previous updated parameter to continue the new bi-level process.\n", "\n", - "We provide the `torchopt.extract_state_dict` and `torchopt.recover_state_dict` function to extract and restore the state of network and optimizer. By default, the extracted state dictionary is a reference (this design is for accumulating gradient of multi-task batch training, MAML for example). You can also set `copy=True` to extract the copy of state dictionary." + "We provide the `torchopt.extract_state_dict` and `torchopt.recover_state_dict` functions to extract and restore the state of network and optimizer. By default, the extracted state dictionary is a reference (this design is for accumulating gradient of multi-task batch training, MAML for example). You can also set `copy=True` to extract the copy of state dictionary." ] }, { @@ -267,69 +253,70 @@ "name": "stdout", "output_type": "stream", "text": [ - "tensor(-1., grad_fn=)\n", - "tensor(-1., grad_fn=)\n" + "a = tensor(-1., grad_fn=)\n", + "a = tensor(-1., grad_fn=)\n" ] } ], "source": [ "net = Net()\n", - "x = torch.tensor(2., requires_grad=True)\n", + "x = nn.Parameter(torch.tensor(2.), requires_grad=True)\n", + "\n", "optim = torchopt.MetaAdam(net, lr=1.)\n", + "\n", + "# Get the reference of state dictionary\n", "init_net_state = torchopt.extract_state_dict(net)\n", "init_optim_state = torchopt.extract_state_dict(optim)\n", "\n", - "# get the copy of state dictionary\n", + "# Set `copy=True` to get the copy of state dictionary\n", "init_net_state_copy = torchopt.extract_state_dict(net, copy=True)\n", "init_optim_state_copy = torchopt.extract_state_dict(optim, copy=True)\n", "\n", - "# Conduct 2 inner-loop optimization \n", - "inner_loss = net(x)\n", - "optim.step(inner_loss)\n", - "inner_loss = net(x)\n", - "optim.step(inner_loss)\n", - "print(net.a)\n", + "# Conduct 2 inner-loop optimization\n", + "for i in range(2):\n", + " inner_loss = net(x)\n", + " optim.step(inner_loss)\n", + "\n", + "print(f'a = {net.a!r}')\n", "\n", - "# Recover and reconduct 2 inner-loop optimization \n", + "# Recover and reconduct 2 inner-loop optimization\n", "torchopt.recover_state_dict(net, init_net_state)\n", "torchopt.recover_state_dict(optim, init_optim_state)\n", - "inner_loss = net(x)\n", - "optim.step(inner_loss)\n", - "inner_loss = net(x)\n", - "optim.step(inner_loss)\n", - "outer_loss = net(x)\n", - "outer_loss.backward()\n", - "print(net.a)\n", "\n", - "# same result" + "for i in range(2):\n", + " inner_loss = net(x)\n", + " optim.step(inner_loss)\n", + "\n", + "print(f'a = {net.a!r}') # the same result" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "### 2.2 Multi-task example with extract_state_dict, recover_state_dict\n", + "### 2.2 Multi-task Example with `extract_state_dict` and `recover_state_dict`\n", "\n", "Let's move to another more complex setting. Meta Learning algorithms always fix network on several different tasks and accumulate outer loss of each task to the meta gradient.\n", "\n", - "Assume `x` is a meta parameter and `a` is a normal parameter. We firstly update `a` use inner loss li1 = `a0` * x^2 to `a1`. Then we use a1 to compute the outer loss lo1 = a1 * x^2 and backpropagate it. Then we use `a0` to compute the inner loss li2 = `a0` * x and update `a0` to `a2` (`a2` = `a0` - dli2/d`a0` = `a0` - x). Then we compute outer loss lo2 = `a2` * x and backpropagate it. So the accumulated meta gradient would be:\n", - "\n", - "dlo1 / dx + dlo2 / dx\n", - "\n", - "= (-4 * x^3 + 2 * a0 * x) + d(a2 * x)/dx\n", - "\n", - "= (-4 * x^3 + 2 * a0 * x) + da2/dx * x + a2\n", + "Assume $x$ is a meta parameter and $a$ is a normal parameter. We firstly update $a$ use inner loss $\\mathcal{L}_1^{\\textrm{in}} = a_0 \\cdot x^2$ to $a_1$. Then we use $a_1$ to compute the outer loss $\\mathcal{L}_1^{\\textrm{out}} = a_1 \\cdot x^2$ and back-propagate it. Then we use $a_0$ to compute the inner loss $\\mathcal{L}_2^{\\textrm{in}} = a_0 \\cdot x$ and update $a_0$ to $a_2 = a_0 - \\eta \\, \\frac{\\partial \\mathcal{L}_2^{\\textrm{in}}}{\\partial a_0} = a_0 - \\eta \\, x$. Then we compute outer loss $\\mathcal{L}_2^{\\textrm{out}} = a_2 \\cdot x$ and back-propagate it. So the accumulated meta gradient would be:\n", "\n", - "= (-4 * x^3 + 2 * a0 * x) + d(a0 - x)/dx * x + a0 - x\n", - "\n", - "= (-4 * x^3 + 2 * a0 * x) - 2 * x + a0" + "$$\n", + "\\begin{split}\n", + " \\frac{\\partial \\mathcal{L}_1^{\\textrm{out}}}{\\partial x} + \\frac{\\partial \\mathcal{L}_2^{\\textrm{out}}}{\\partial x}\n", + " & = (- 4 \\, \\eta \\, x^3 + 2 \\, a_0 \\, x) + \\frac{\\partial (a_2 \\cdot x)}{\\partial x} \\\\\n", + " & = (- 4 \\, \\eta \\, x^3 + 2 \\, a_0 \\, x) + (\\frac{\\partial a_2}{\\partial x} \\cdot x + a_2) \\\\\n", + " & = (- 4 \\, \\eta \\, x^3 + 2 \\, a_0 \\, x) + [\\frac{\\partial (a_0 - \\eta \\, x)}{\\partial x} \\cdot x + (a_0 - \\eta \\, x)] \\\\\n", + " & = (- 4 \\, \\eta \\, x^3 + 2 \\, a_0 \\, x) + [(- \\eta) \\cdot x + (a_0 - \\eta \\, x)] \\\\\n", + " & = (- 4 \\, \\eta \\, x^3 + 2 \\, a_0 \\, x) + (- 2 \\, \\eta \\, x + a_0)\n", + "\\end{split}\n", + "$$" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "Let's define network and variables first." + "Let's define the network and variables first." ] }, { @@ -341,7 +328,7 @@ "class Net2Tasks(nn.Module):\n", " def __init__(self):\n", " super().__init__()\n", - " self.a = nn.Parameter(torch.tensor(1., requires_grad=True))\n", + " self.a = nn.Parameter(torch.tensor(1.), requires_grad=True)\n", " \n", " def task1(self, x):\n", " return self.a * x ** 2\n", @@ -351,7 +338,8 @@ "\n", "\n", "net = Net2Tasks()\n", - "x = torch.tensor(2., requires_grad=True)\n", + "x = nn.Parameter(torch.tensor(2.), requires_grad=True)\n", + "\n", "optim = torchopt.MetaSGD(net, lr=1.)" ] }, @@ -359,7 +347,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "Once we call `step` method of `MetaOptimizer`, the parameters of the network would be changed. We should use `torchopt.extract_state_dict` to extract state and use `torchopt.recover_state_dict` to recover the state. Note that if we use optimizers that have momentum buffers, we should also extract and recover them, vanilla SGD does not have momentum buffers so codes `init_optim_state = torchopt.extract_state_dict(optim)` and `torchopt.recover_state_dict(optim, init_optim_state)` have no effect." + "Once we call `step` method of `MetaOptimizer`, the parameters of the network would be changed. We should use `torchopt.extract_state_dict` to extract state and use `torchopt.recover_state_dict` to recover the state. Note that if we use optimizers that have momentum buffers, we should also extract and recover them, vanilla SGD does not have momentum buffers so code `init_optim_state = torchopt.extract_state_dict(optim)` and `torchopt.recover_state_dict(optim, init_optim_state)` have no effect." ] }, { @@ -371,67 +359,66 @@ "name": "stdout", "output_type": "stream", "text": [ - "((EmptyState(), EmptyState()),)\n", - "tensor(-28.)\n", - "tensor(-31.)\n" + "init_optim_state = ((EmptyState(), EmptyState()),)\n", + "Task 1: x.grad = tensor(-28.)\n", + "Accumulated: x.grad = tensor(-31.)\n" ] } ], "source": [ + "# Get the reference of state dictionary\n", "init_net_state = torchopt.extract_state_dict(net)\n", "init_optim_state = torchopt.extract_state_dict(optim)\n", - "# it's SGD so state_dict is empty\n", - "print(init_optim_state)\n", + "# The `state_dict` is empty for vanilla SGD optimizer\n", + "print(f'init_optim_state = {init_optim_state!r}')\n", "\n", - "li1 = net.task1(x)\n", - "optim.step(li1)\n", - "lo1 = net.task1(x)\n", - "lo1.backward()\n", - "print(x.grad)\n", + "inner_loss_1 = net.task1(x)\n", + "optim.step(inner_loss_1)\n", + "outer_loss_1 = net.task1(x)\n", + "outer_loss_1.backward()\n", + "print(f'Task 1: x.grad = {x.grad!r}')\n", "\n", "torchopt.recover_state_dict(net, init_net_state)\n", "torchopt.recover_state_dict(optim, init_optim_state)\n", - "li2 = net.task2(x)\n", - "optim.step(li2)\n", - "lo2 = net.task2(x)\n", - "lo2.backward()\n", + "inner_loss_2 = net.task2(x)\n", + "optim.step(inner_loss_2)\n", + "outer_loss_2 = net.task2(x)\n", + "outer_loss_2.backward()\n", "\n", - "# extract_state_dict extract the reference so gradient accumulate\n", - "# x.grad should be (-4 * x^3 + 2 * a0 * x) - 2 * x + a0 = -28 - 2 * 2 + 1 = -31\n", - "print(x.grad)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## 3. Gradient transformation in MetaOptimizer" + "# `extract_state_dict`` extracts the reference so gradient accumulates\n", + "# x.grad = (- 4 * lr * x^3 + 2 * a_0 * x) + (- 2 * lr * x + a_0)\n", + "# = (- 4 * 1 * 2^3 + 2 * 1 * 2) + (- 2 * 1 * 2 + 1)\n", + "# = -28 - 3\n", + "# = -31\n", + "print(f'Accumulated: x.grad = {x.grad!r}')" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "We can also use some gradient normalization tricks in our `MetaOptimizer`. In fact `MetaOptimizer` decendents like `MetaSGD` are specializations of `MetaOptimizer`. Specifically, `MetaSGD(net, lr=1.)` is `MetaOptimizer(net, alias.sgd(lr=1., moment_requires_grad=True))`, where flag `moment_requires_grad=True` means the momentums are created with flag `requires_grad=True` so the momentums will also be the part of the computation graph.\n", - "\n", - "In the desiging of TorchOpt, we treat these functions as derivations of `combine.chain`. So we can build our own chain like `combine.chain(clip.clip_grad_norm(max_norm=1.), sgd(lr=1., requires_grad=True))` to clip the gradient and update parameters using sgd." + "## 3. Gradient Transformation in `MetaOptimizer`" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "dlo/dx\n", - "\n", - "= da1/dx * x^2 + a1 * d(x^2)/dx\n", - "\n", - "= d(a0 - x^2 / scale)/dx * x^2 + 2 * a1 * x\n", + "We can also use some gradient normalization tricks in our `MetaOptimizer`. In fact `MetaOptimizer` decedents like `MetaSGD` are specializations of `MetaOptimizer`. Specifically, `MetaSGD(net, lr=1.)` is `MetaOptimizer(net, alias.sgd(lr=1., moment_requires_grad=True))`, where flag `moment_requires_grad=True` means the momentums are created with flag `requires_grad=True` so the momentums will also be the part of the computation graph.\n", "\n", - "= -2 * x / scale * x^2 + 2 * (a0 - x^2 / scale) * x\n", + "In the designing of TorchOpt, we treat these functions as derivations of `combine.chain`. So we can build our own chain like `combine.chain(clip.clip_grad_norm(max_norm=1.), sgd(lr=1., requires_grad=True))` to clip the gradient and update parameters using `sgd`.\n", "\n", - "= -2 * x^3 / scale + 2 * a0 * x - 2 * x^3 / scale\n", - "\n", - "= -4 * x^3 / scale + 2 * a0 * x" + "$$\n", + "\\begin{aligned}\n", + " \\frac{\\partial \\mathcal{L}^{\\textrm{out}}}{\\partial x}\n", + " & = \\frac{\\partial (a_1 \\cdot x^2)}{\\partial x} \\\\\n", + " & = \\frac{\\partial a_1}{\\partial x} \\cdot x^2 + a_1 \\cdot \\frac{\\partial (x^2)}{\\partial x} \\\\\n", + " & = \\frac{\\partial (a_0 - \\eta \\, g)}{\\partial x} \\cdot x^2 + (a_0 - \\eta \\, g) \\cdot 2 x & \\qquad (g \\propto \\frac{\\partial \\mathcal{L}^{\\textrm{in}}}{\\partial a_0} = x^2, \\ {\\lVert g \\rVert}_2 \\le G_{\\max}) \\\\\n", + " & = \\frac{\\partial (a_0 - \\eta \\, \\beta^{-1} \\, x^2)}{\\partial x} \\cdot x^2 + (a_0 - \\eta \\, \\beta^{-1} \\, x^2) \\cdot 2 x & \\qquad (g = \\beta^{-1} \\, x^2, \\ \\beta > 0, \\ {\\lVert g \\rVert}_2 \\le G_{\\max}) \\\\\n", + " & = (- \\beta^{-1} \\, \\eta \\cdot 2 x) \\cdot x^2 + (a_0 - \\beta^{-1} \\, \\eta \\, x^2) \\cdot 2 x \\\\\n", + " & = - 4 \\, \\beta^{-1} \\, \\eta \\, x^3 + 2 \\, a_0 \\, x\n", + "\\end{aligned}\n", + "$$" ] }, { @@ -443,24 +430,28 @@ "name": "stdout", "output_type": "stream", "text": [ - "tensor(-12.0000)\n" + "x.grad = tensor(-12.0000)\n" ] } ], "source": [ "net = Net()\n", - "x = torch.tensor(2., requires_grad=True)\n", + "x = nn.Parameter(torch.tensor(2.), requires_grad=True)\n", + "\n", + "optim_impl = torchopt.combine.chain(torchopt.clip.clip_grad_norm(max_norm=2.), torchopt.sgd(lr=1., moment_requires_grad=True))\n", + "optim = torchopt.MetaOptimizer(net, optim_impl)\n", + "\n", + "inner_loss = net(x)\n", + "optim.step(inner_loss)\n", "\n", - "impl = torchopt.combine.chain(torchopt.clip.clip_grad_norm(max_norm=2.), torchopt.sgd(lr=1., moment_requires_grad=True))\n", - "optim = torchopt.MetaOptimizer(net, impl)\n", - "li = net(x)\n", - "optim.step(li)\n", - "lo = net(x)\n", - "lo.backward()\n", - "# p.grad is -4 * x^3 / scale + 2 * a0 * x = -4 * 2^3 / scale + 2 * 1 * 2 = 4 - 32 / scale\n", - "# since max_norm is 2 and the gradient is x^2, so the scale should be x^2 / 2 = 2^2 / 2 = 2\n", - "# finally p.grad is 4 - 32 / 2 = -12\n", - "print(x.grad)" + "outer_loss = net(x)\n", + "outer_loss.backward()\n", + "# Since `max_norm` is 2 and the gradient is x^2, so the scale = x^2 / 2 = 2^2 / 2 = 2\n", + "# x.grad = - 4 * lr * x^3 / scale + 2 * a_0 * x\n", + "# = - 4 * 1 * 2^3 / 2 + 2 * 1 * 2\n", + "# = -16 + 4\n", + "# = -12\n", + "print(f'x.grad = {x.grad!r}')" ] }, { @@ -469,112 +460,92 @@ "source": [ "## 4. Accelerated Optimizer\n", "\n", - "Users can use acclerated optimizer by seeting the `use_accelerated_op` as True. Currently we only support the Adam optimizer." + "Users can use accelerated optimizer by setting the `use_accelerated_op` as `True`. Currently we only support the Adam optimizer." ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "Check whether the accelerated_op is avariable:" + "Check whether the `accelerated_op` is available:" ] }, { "cell_type": "code", - "execution_count": 8, + "execution_count": 10, "metadata": {}, "outputs": [ { - "data": { - "text/plain": [ - "True" - ] - }, - "execution_count": 8, - "metadata": {}, - "output_type": "execute_result" + "name": "stdout", + "output_type": "stream", + "text": [ + "True\n" + ] } ], "source": [ - "torchopt.accelerated_op_available(torch.device(\"cpu\"))" + "torchopt.accelerated_op_available(torch.device('cpu'))" ] }, { "cell_type": "code", - "execution_count": 9, + "execution_count": 11, "metadata": {}, "outputs": [ { - "data": { - "text/plain": [ - "True" - ] - }, - "execution_count": 9, - "metadata": {}, - "output_type": "execute_result" + "name": "stdout", + "output_type": "stream", + "text": [ + "True\n" + ] } ], "source": [ - "torchopt.accelerated_op_available(torch.device(\"cuda\"))" + "torchopt.accelerated_op_available(torch.device('cuda'))" ] }, { "cell_type": "code", - "execution_count": 10, + "execution_count": 12, "metadata": {}, "outputs": [ { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 10, - "metadata": {}, - "output_type": "execute_result" + "name": "stdout", + "output_type": "stream", + "text": [ + "\n" + ] }, { "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAmcAAANSCAYAAAAgcmm7AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/YYfK9AAAACXBIWXMAAAsTAAALEwEAmpwYAAEAAElEQVR4nOzdd3hTZfvA8e/J6t67paXsvQrIFlDmC8iQJSiIglt5BVRAcaKgKCqKskQQXwFBRUVkb2QPEWjZlC4obelukzY5vz8w+YEMFdomae/PdXEBGee50+Y85z7PVFRVRQghhBBCOAaNvQMQQgghhBD/T5IzIYQQQggHIsmZEEIIIYQDkeRMCCGEEMKBSHImhBBCCOFAJDkTQgghhHAgZZ6cKYrSTVGU44qinFIUZXxZly+EEEII4ciUslznTFEULXAC6AwkAnuBB1RVPVZmQQghhBBCOLCybjm7CzilquoZVVVNwBKgdxnHIIQQQgjhsHRlXF4EkHDV/xOBFrd6Q2BgoBodHV2aMQkhhBBClKmTJ08WZWVlGW70XFknZ8oNHruuX1VRlMeAxwCioqLYt29facclhBBCCFFmateuXXCz58q6WzMRiLzq/5WA5L++SFXVOaqqNlNVtVlQUFCZBSeEEEIIYW9l3XK2F6ihKEoVIAkYDAwp4xiEEEKUotTUVDZu3GjvMEpd06ZNqV69Oopyo04hIW5fmSZnqqoWK4ryDLAG0ALzVVU9WpYxCCGEKF2xsbE88fQzRFStjs5wwyE1Ti/x1Elee3kCzzzzjCRnosSVdcsZqqquAlaVdblCCCHKTu2Yuxj16jv4BYfYO5RSMfPlMfYOQZRjskOAEEIIIYQDkeRMCCGEEMKBSHImhBBCCOFAJDkTQgghhHAgZT4hQAghhLhTqqqCqmJRVTSaK+0MFosFRVFsf4RwVtJyJoQQwimdPHyQLya/TEr8WVITE5j92kvEHdhzi3dctyGNEA5JWs6EEEI4HUVRqFK3PueOH2PzimUU5OVSrV4DajVudotWM2lNE85BWs6EEEI4Jb3BhZZdenD++DGSzpzi7vvuR6uTNgfh/CQ5E0II4ZRUi4VThw/i4uEJqsqJ3w+iWiz2DkuIOya3GEIIIZyOqqoknDrBrrWr6PHgo2g0Gn5eMBu/oCAiq9eSCQHCqUlyJoQQwumoqkpRkYm7OnWjSp16oCjc3bs/xvx8VFWV5Ew4NUnOhBBCOB2NRkO1eg2h3v8/1rT9vfYLSIgSJGPOhBBCCCEciCRnQgghnJqqqpw5doTsjHTbY6bCQs6fjCM/N8eOkQlxeyQ5E0II4dSyM9L5btbHGI2FtseKi0zs2bCG4wf2yQxO4XQkORNCCOG0VFVlz4bVhFepSmBouO1xN08vIqpU58Tv+zEWFtgxQiH+PUnOhBBCOC2L2cy+Teto0q7jNTM0FUUhomp1ks6eJj8398penEI4CUnOhBBCOK2sjHSy0tOIqFL9uud8AgLIz82hQMadCScjyZkQQginVWQsBFT0BsN1z+l0BjSKBlNh4fVvFMKBSXImhBDCabl6eKBotBTk5133nMlYiEW14ObpaYfIhLh9kpwJIYRwWt5+AYRGVubM0cPXPZd+IQWfgEDcPCQ5E85FkjMhhBBOS1EUWnTpwYGtG1HV/18yw2Ixk3j6BJWq1cDNw1O2cxJORZIzIYQQTq1R67sxFhRw/sRx22O5WZlcOH+O2k2ao3dxsWN0Qvx7sremEEIIp+bq7s7ISW9juCoJ8/T2pfejT2FwcZFWM+F0JDkTQgjh1BRFwd3T65rHNFotbh4edopIiDsj3ZpCCCGEEA5EkjMhhBBCCAciyZkQQgghhAORMWdCCCFKnLEwn7QLyRSZjPYOpVQU5l2/6K0QJUWSMyGEECXuzNEjLJj6BnqD3t6hlIrE06egeyd7hyHKKUnOhBBClKhmzZqxc/vWMitPVVX+85//MHXqVBo1alRm5YaEhMgyHaJUSHImhBCiRHl4eFC3bt0yK89isWAwGIiOji7TcoUoLTIhQAghhBDCgUhyJoQQQgjhQCQ5E0IIIYRwIJKcCSGEEEI4EEnOhBBCCCEciCRnQgghhBAORJIzIYQQQggHIsmZEEIIIYQDkeRMCCGEEMKBSHImhBBCCOFAJDkTQgghhHAgkpwJIYQQQjgQSc6EEEIIIRyIJGdCCCGEEA5EkjMhhBBCCAciyZkQQgghhAOR5EwIIYQQwoFIciaEEEII4UAkORNCCCGEcCCSnAkhhBBCOBBJzoQQQgghHIgkZ0IIIYQQDkSSMyGEEEIIByLJmRBCCCGEA5HkTAghhBDCgUhyJoQQQgjhQCQ5E0IIIYRwIJKcCSGEEEI4EEnOhBBCCCEciM7eAQghhBD/1qFDh0hLSwNAVVWMRiP79u0jLy8PAE9PT2JiYjAYDPYMU4jbIsmZEEIIp7Ns2TJmz55t+39OTg6vvvoqWq0WRVFo164d33zzjR0jFOL2SbemEEIIp9O1a1cyMjJIT08nPT0dk8lEZmYm6enpZGVl0aZNG1xdXe0dphC3RZIzIYQQTqdRo0Y0bNgQRVGue87V1ZUBAwbYISohSoYkZ0IIIZyOm5sb/fr1u+5xRVHo2LEj4eHhdohKiJIhY84qgMLCQg4fPmzvMOzCzc2NBg0a2DsMIUQJ0+v1tG3bloCAANvEAKtBgwah0Ujbg3BekpxVABcuXKBfv37oDAYUpeJUWBazmWpVq7Bx40Z7hyKEKGGKolC9enWaN2/Or7/+anu8evXqNG3a9IbdnUI4C0nOKoDi4mK8QiJ45p3phERWtnc4ZebQ9s38Mmu6vcMQQpSS8PBwWrVqxYYNGzCZTCiKQqdOnQgNDZXkTDg1Sc4qCEUBRaNBo9Ve+4QKlNM6TCpnIco3nU5HixYtCA8P59y5c3h7e9OyZUu8vLzsHZoQd6Ti9HGJG5P8RQjhxJo3b05UVBQANWrUoHHjxjLeTDg9+QYLIYRwWr6+vnTt2hUXFxfq1atH7dq17R2SEHdMujXF9W7Q1Zmfm0P88VhqxzTnSm/h7TW5WSwWUuLPcjEhHjcPT6Jq1MLD2+efByKEuCO5ubkcOHCAzMxMe4dSYvR6Pa6urri7u7N69Wp7h1Ni3N3dadKkCQEBAfYORZQxSc4quhvlPzfIh9KSk/h6+jtM/vp77qTBNeHUcX6Y8ylBEZFkpaVStV5D7uk3GMMNV/KWxEyIkpaUlMS4cePw9vYuVyvoR0ZGEh8fz5w5c+wdSonIz88nMzOTjz/+mHbt2tk7HFHGJDmr6K7KfwoL8vl+9ieciztKtXoN6T50BG6eXmz5cRlrliwiJf4skx66n8hqNen3+LMEhkWQcOo465Z+TWpSAo3bdqBT/yHoXVxY/NG76A0GzsYdwz8klP5PjMbbP4CDWzfh5etHv1HPcPz3/WxY/g3N7ulCYOiNF4xUVZUL58/yy6IvSE1MoE7Tu+j18OPo9Poy+gEJUT49/fTTBAYG2juMEpOVlYWPz81a4Z3PhQsXmD9/vr3DEHYiyZmw2bXmF5LOnGLUq+8QfzyW9IsXiPb1o0OfAVSuVY8vp7zKy7MXodXq0BkM5GReZumnH9C8Yxfue+QJvp/9CVtXfs+99z9AwqkTaLRaho4Zz6bvl/LDnE8ZMmY8l5ITiK5dn/y8HLz9/CnIzcVYkH/LuGL37aF9r/vxDQzmq/cns2PVj7Tv3b+MfipClE8+Pj74+fnZO4wSoaoqvr6+5WqGdkFBAdq/zq4XFYZMCBA2QeGV8PLx5dSR33Hz9CQ0KhoUBZ3egIurK4pGg6u7OwZXVxRFIf1CMomnTpKfl8OR3TvQGwyc+uN3LBYLWp2W5vd2JTy6Gq269uLY/t2oFgtmsxmdXs++jWtJiT+LqqqoqnrLuOq1aE1G6kWO7tuFweDC2dgjZfMDEUI4BUVRylViJoS0nAmb2k2b4+ruTmpSArvX/UrGxYu0+c99Vyo9Ba4MUPt/qgoarQYFheKiYiJr1CY8uqrteVtlqVx5sU5vwMPLm8y0S9z3yOMknDyO3mBAp7t5F6W5uIhvPpxKpWrV8Q0Mpri4iKIiU8l/eCGEU9i1axerV6/G1dWVMWPGYDAY/tX7VVW1WyKXlZXFjh07aNSoEREREXaJQTgHaTkTNvs3byD78mVqNW6GVqslNen8lQwM8PDywZhXQEr8OYpMRlBVAkPDqFS9Jj4BgTS/pzN+wSEU5Oei0WgwF5vZt3EtF86fY9e6VdRs0gyNVkv1+o05suc3ks+e5sju3/ANCr5mtuaSGdP4csprtta0IpOJc3FHqd+yLXWbtyI/J/uvOaIQopSoqkpRUREmk+lvW7jLSu3atenTpw/bt2+nuLj4H71HVVWKi4spLi7+R4mZqqpYLBZMJhNGo5GioqIS+fwFBQUcOnSIy5cv3/GxRPkmLWfCpnZMc375ah4rv5pLpSrV6dBnAMqfizn6h4TSefCDzH1zIsERkfR/YjSBYeH0f2I065Z+zYbvllAnpjmdBg4FQKPVEhAWzoKpbxAQGsaAp/4LQPN7u5Kbk8WXU14nKLwSPYePwtPH1xaD3sUF81UVrqu7B/2eeI7vZs3Ay9ePene1tsUkhChd+fn5vPHGG5w+fZpvvvkGFxcXe4eEr68v0dHR/2o8ltls5ttvv6WoqIjhw4f/7etNJhObN2/m119/JTc3lyZNmtC/f39CQkLuJHQh/jFJziq6q5bS8Pbz54HRL97wZYqi0HXwMLoOHnbN45Vr1mHkpLdv+Pr6LVoz/MVXr3lcp9fTecBQOg8YesNy7n/8ueuO077X/bTvdf8//EBCiJJy5swZ9Ho9Wq2Wo0ePEhMTA1zZr/fEiRPExsai1+tp3bq1beZnSkoKe/fupaioiAYNGlC9enUuXbpEfHw8DRo0QFEUDh8+TFRUFGazmX379gEQGhpKcnIytWrVom7duuzevZvAwECqVatGQkIC58+fp1WrVjdd/d9isZCYmMihQ4coLCykbt261KtXj/z8fFauXMnu3bsxm81X6qb69YmJicFsNnPs2DGOHz+Oi4sLzZo1IyQkhFOnTrFy5UoGDhxITEwMSUlJtla6vLw89u7di9lsJjU1FVdXV9q3b4+7uzv79u0jMTGRgIAAWrRogZeXF8XFxbYyNBoNRUVFZfCbE85OmiAqOhlDK4S4AVVVOXXqFCEhIbRr145t27bZHo+Li2PevHm2hOXAgQMApKWlMX36dC5duoSLiwuHDx+mqKiIhIQENmzYQF5eHvn5+WzcuJHz589z5swZVqxYwZkzZ1i6dCkFBQUsXrwYk8nE+vXrOXr0KACnT5/m119/vWXXYkFBAbt27QKu7Lm5YMECTp8+jVarJTQ0FF9fX3x8fIiIiLAtubFv3z6++eYb9Ho9Fy5cYNmyZWRnZxMbG0tAQAAxMTF4eHhQs2ZN2xix3Nxc5s+fz9atW/H29iYkJAS9Xs+hQ4dISEjAx8eHPXv2sHr1asxmM8ePH+fbb78FriSuZ8+eLYXflihvpOVMlIrH33gXg6sbRSYTFosZg4urbayHqbAQ4CYLzwohHEFubi6xsbE0adKEyMhIPvjgAzIzM/Hx8WH16tU0aNCAfv36oSiKrTXot99+w2AwMHToUPR6PSaT6W8H7FeuXJmmTZtiNpu5++672bt3L7m5uf86XldXVzp16gRcadmLi4vjyJEjVK9enTZt2pCYmEhRURH33nsvAEVFRWzcuJF69erRunVrMjMzmTFjBpmZmWRkZODt7Y2bmxtz5szhxx9/5MEHH2Tw4MEABAQE0L17d+666y7bTFFrK6G1ZW/Hjh307NmTo0ePEh4eTq9evUhOTubcuXP/+rOJikeSM1EqPH18MZuL2fbzDxQW5HPv/Q+g/7OS3r9lPWePHWHQc+PQauUrKISjUVWVjIwMTp48SXh4ODqdjuzsbI4ePUrr1q3JyMigdu3a6P9cDFqnu3IeZ2ZmEhAQgIuLC4qi4ObmBnDdIHxri5uiKBgMBnQ6Ha6urmg0GjQaDRaLBUVRbC1lZrP5mlYz6/Gsj6mqSk5ODvPmzSMpKQmTyURKSgpVqlS57j1WJpOJ3NxcVq5cyfbt223laTQagoODuXDhAoWFhYwYMQKdTkd+/v+vx+jp6Ymfn58tEbNYLGzbto3169dTWFhIWloa7u7umM1m8vLy8PT0xGAw4Obmhqen5x3+dkRFIN2aotRkZ6Rz7vgxqtZtcM2K/k3uvodzx4+RfPa0HaMTQtzK6dOnMZlMFBQUcPr0aSIiIjh27Bhms5n69euzb98+Ll26xOXLl4mNjQWgVq1axMXFER8fT25uLseOHaOoqAgXFxdyc3PJysoiJSWFhISEW5atqip+fn6cPXuWnJwcTpw4QeGfLe4Abm5uuLq6kpiYaEvQLl26xLFjxxg9ejSTJk2iUqVKttdbk8CsrCxMpitL8bi6uhIdHc0999zDu+++yzvvvMOIESMIDAykZs2aXL58mcOHD2M0Gq9rybtRsrlq1SruvvtupkyZQs+ePXFxcUGn0xEWFsb58+fJyMjg3LlzJCUl3f4vRVQY0mwhSoWqqqRfvEBBbg5B4RHXVGYurm5UrduAwzu3U6laTVk8UggHY7FY2LNnD/feey8PP/wwiqKwdetWVq9eTWZmJp07d+bixYu8/vrreHh42Lr7GjVqxL333sv06dNRFIWOHTtSo0YNKlWqRFBQEO+88w6RkZF4eXldV+ZfF5K95557eO+995g0aRIeHh621jkAg8FA3759mTJlCjVq1GDkyJEEBAQQGRnJlClTCAkJwcPDw/Z6jUZDnTp12LRpE08++SQ9evSgX79+9OzZk+XLlzNp0iT0ej2tWrWiYcOGVKtWjU6dOrFkyRIKCgrw8fFhwIAB18VrpdPp6NixI6tWrWL9+vX4+vqi0+lQFIWmTZsSFxfHK6+8go+PT7nbyUCUDsVR1q65mWbNmqnW2Tzi9pw6dYr7Bg/lmSkfXVn1/479dbf063dPt1gs7PjlR079cYhhL76C9qqFZlVVZefqlRzavokn3nr/prOvbrwr+z93cOtGVn7+AQf/HKwshIDjx4/z0EMPMXXqVIKDg2/4GlVVMRqNaLVa9Hq9bd2voqIiDAaDbZxZUVERiqLg4uJiW9qiuLjYti6atcsSsL1eo9GgKIrtuGazGa1Wi8ViQafTXTNOrbCwEFVVbce2dpdayzEajSiKguufu5aYTCaKiorQarW2LlJr16vFYsFoNGI2m9Hr9bi4uNjWcbN+Dr1eb4vX+nqLxYJWq8VgMNjiNJlMtlmsVtZ4gGvivdln/7ulQJKTk5kxYwYvvfSSbHxeTtWuXTs7Li7uhhvCSsuZuMo/TYb++pobvEdVycm6jMHN9ZrEzMrDx4fC/HyKTaZbTAz4p4nZnSVxQohrWROeq/+v1WqvSSgMBsMNB/vrdLprWrn+yeuvdnW57u7uN43xRuW4uLjcdC02jUZjGwNnZe3uvFFcWq32huVrNJprYrxVPFY3K0OIm5ExZ+IqJZjgKAqubu4Um4qwmM3XPW0sKEBnMKC9SWX2LwsrgWMIIYQQjkGSM/Gnku3eVhSF0Kho8nOzyc/Nue75iwnxhFSKQvMvVvm+NcfunhdCCCH+KUnOxJ9KvvUpMCwcg6sbaSlJ10yDNxcXcfzgPhq2aleCA2Ol9UwIIUT5IMmZKBWKouAXHEJoVDTxJ+Ku6do89cchXN3dqd6wsf0CFEIIIRyUTAgQpUZvcKFllx4U5OaAotjG7fv4B3L/E8+hN9h/E2UhKqrly5c7zYKoFovlFrO6/551VujNBuw7ouzsbDIyMuwdhrAT5/mmCqcUEBIKIaHXPBYWXdVO0QghvLy86NSpEwkJCbe1TVJZy83NtW2UfvXCsv/G2bNnSUhIoFmzZrecAepIVFWlefPmtg3lRcUiyZlwLrJqhhB3JCwsjLffftveYfwjGRkZvPvuuwDMmDHDtvn47RxnxIgR1KlThxdeeOGGi+AK4UhkzJlwLv8oMZOZm0LcjHUlfkf/U1hYyMcff0xcXByvvfYaERERt30sPz8/Jk6cyL59+/jhhx8wm812/3z/5o+oeCQ5E87lH+VdUpkJ4axUVcVkMvHhhx+yadMm3njjDRo0aHBHSYpGoyEmJoaHHnqIOXPmcODAARx9dxxRsUm3ZgVhsVgwGQsxFuTbO5QyU2QySSOaEE7Eum3UvHnzWLFiBXPmzKFhw4Z3NBnAymAwMGDAAJKSkhgzZgxff/01lStXlpYp4ZAkOasgLiUl8tP8WXh4eds7lDKTmpyIxXL97gRCCMdUVFTE4sWLWbFiBVOmTCmxxMxKr9czevRozp49y8SJE5k2bdptj2MTojRJclYBBAUF8forE+1S9h9//MHevXsZMmTIDfejK10NCQgIKOMyhRC368cff2ThwoU8++yztG3btkQTMyu9Xs+ECRN46aWXmDNnDuPGjZMJAsLhSHJWAfj4+PDss8/apewff/yRzMxMRo0aha+vr11iEEI4NlVVWbduHW+//Tbjxo2jZ8+eN93A/E4pikJERATPPPMMkyZNok6dOgwYMOCaTd2FsDeZECCEEMJuiouL2bJlC2+99RbPPPMMQ4YMKbXEzEqj0dCyZUueeuoppk2bxt69e7FYLKVaphD/hiRnQggh7MJisbB//34+/PBDevfuzUMPPVQqXZk3otFobGVOmDCBY8eOlUm5QvwTkpwJIYSwi7Nnz/LGG2/QsGFDHn300VJvMfsrnU7HyJEjiYmJYdq0aSQnJ5dp+ULcjCRnQgghypSqqiQlJfHcc89RpUoVxo0bZ7cxqR4eHjz33HPk5+fzxRdfkJeXJ2ugCbuT5EwIIUSZUVWV+Ph4XnzxRUJCQnj//ffx9va223pjiqIQFRXFiy++yPr161m+fLmMPxN2J8mZEEKIMqGqKqmpqbz77ru4uLgwefJkXF1d7b4QrKIoxMTEMHr0aD777DO2bNli13iEkKU0hBBClDpVVSkqKmLy5MmkpqbyzjvvEBoaavfEzEqr1dKzZ09SUlJ48803CQkJoW7dug4Tn6hYpOVMCCFEqVJVlfz8fF5//XWOHj3K1KlTqVGjRpnNzPynDAYDDz/8MI0aNeLdd98lOTlZxp8Ju3CsM0MIIUS5k52dzUcffcSePXv47LPPHDIxs3J3d2fcuHEYjUZmzZpFfn7F2Y9YOA7HPDuEEEKUC0ajkQULFrBjxw5effVVatasae+QbklRFCpVqsTYsWPZsWMHS5YssXdIogKS5EwIIUSpUFWVH3/8kaVLl/L000/TqlUrh20xu5p1gsDYsWP55JNP2Lx5s3RvijLl+GeJEEIIp2Mymfj555+ZMmUKEyZMoHv37uj1enuH9Y/pdDq6d+/OqFGjeOONN/j9999liQ1RZiQ5E0IIUaJMJhNr167lgw8+YMyYMfTq1cspWsz+SqPRMGLECBo3bszHH39MSkqKtKCJMuF8Z4sQQgiHZbFY2Lt3LzNmzGDgwIEMGjTI3iHdETc3N0aPHo3RaGTGjBkUFRXZOyRRAUhyJoQQokSoqsqZM2cYP34899xzD8OHD3eqrswbse4gMHbsWLZs2cLixYule1OUOknOhBBC3DGLxUJcXByjRo2iQ4cO/Pe//8XDw6NcLOKq0WiIiYnh9ddf5+OPP2bt2rUUFxfbOyxRjskOAUIIIe6IqqqcOHGCV155hZiYGMaPH4+rq6u9wypRiqLQtWtXTp8+zYwZMwgNDaVRo0blIvkUjkdazoQQQtw2VVW5ePEikydPJjw8nBdeeAF3d3d7h1VqHnzwQerXr8/7779PWlqaTBAQpUKSMyGEELctJyeH8ePHo9FoePHFFwkJCSm3rUmKouDt7c3TTz9NYWEh06dPx2Qy2TssUQ5JciaEEOK2pKamMnHiRDIzM3nrrbeIjIwst4mZlXWCwMsvv8zWrVv56quvMBqN9g5LlDOSnAkhhPjXMjIymDFjBvHx8UyZMoXKlSvbO6QyoygKTZo04dVXX+XLL79k3bp1mM1me4clyhGZECCEEOJfKSwsZMGCBRw8eJA33niD2rVr2zsku7jnnns4e/YsH3/8MZUrV6Z+/frlvuVQlA1pORNCCPGPmUwmFi1axPfff8+ECROIiYmpsAmJTqdjyJAhNG/enHfffZfU1FSZICBKhCRnQggh/hGj0ci3337L3LlzmTRpEm3atHHKbZlKinWCwGOPPYbRaOTjjz+moKDA3mGJckC6NUWJSk5OZsGCBZw9exaA+Ph4Tp06xejRozEYDAB07tyZ+++/H61Wa89QhRA3YbFY2LJlC7Vr1yYsLAyAoqIifv75Z2bNmsXEiRPp3LlzhW0x+6vo6GgmTZrEU089xcKFCxk5cqTT74wg7EuSM1GiPD09OXjwIMuXL7/mcWuyptFopFIXwsHl5eXx9NNP07x5c9v6Zdu2beOjjz7iqaeeokePHhW6xexGGjRowFtvvcWLL75IVFQU3bt3l5+RuG2SnIkS5eXlRceOHVm3bh1ZWVnXPd+gQQMaNmwoyZkQDmzp0qWcPHmSuLg4srOzGTVqFG+++SaPPPII999/PzqdXDr+SlEU2rZty5NPPsnMmTMJCwujSZMmUteJ2yJpvShRiqLQuXNn/Pz8bvhcmzZtCA8PlwpLCAeVmZnJokWLMJvNqKrKihUrePzxx2nYsCEPPvggLi4ucv7ehF6vp0+fPtSrV4/p06eTmppq75CEk5LkTJS4qlWr0qxZs+ua9P39/WnRogWenp52ikwI8Xc2bdrEiRMnrpl1mJyczLFjxzh48KDMRvwb/v7+PPfcc+Tl5TF16lRZoFbcFknORInTarUMGTLkurvrqlWrctddd8k4DCEcVE5ODj///PN1LT4Wi4Vdu3bxzDPPsH37diwWi50idA6VKlVi+vTp7Nq1i9mzZ1NUVGTvkISTkaukKBX33HMP4eHhtv/rdDoaNmxI9erV7RiVEOJmVFXl999/57fffrth8mXd4PzHH3+U/ST/hqIoREdHM3XqVL7//nt++eUXSdDEvyLJmSgV7u7u9O3b19Z65u7uTq9evWQgsRAOymg0snnzZk6dOnXN44qi4O7uzqBBg/jkk0+YNGkSLi4udorSeSiKQosWLRg4cCBz587l6NGj0iUs/jFJzkSp0Ol0dO/e3ba2mZ+fHx06dLBvUEKIG1JVlcuXL/Ptt99es0ekXq+nXbt2fP3113z44Yf07dsXHx8fmRDwD7m6ujJkyBDq1KnDG2+8QW5uriRo4h+R5EyUmlq1atGqVSt0Oh2DBg3C29vb3iEJIW7i119/5ciRI8CVJXEaNWrE3LlzWbFiBb179yY4OFjGi94GHx8fJkyYgEaj4eWXXyY/P9/eIQknIH1MDsRkMpGSkmLvMEqM0WikVq1a/Pbbb7Rs2ZLz58/bO6QSo9PpiIiIsHcYogzl5+dz6dIle4dRKoxGI5999hkGg4HIyEg6d+7Mo48+SmBgINnZ2WRnZ9stNp1Oh7+/P25ubnaL4U4oikJAQADvvfcejz32GPPmzWPUqFG4u7vbOzThwCQ5cyDJyck89thj9g6jRCUkJKAoCh9//LGti7M8CA8PZ8GCBfYOQ5ShQ4cO8cYbb5TLbimj0cihQ4fQarXodDpOnDjBhAkT7B0WACEhITz//PPExMTYO5Q7UqVKFcaNG8cHH3xA1apV+c9//iNb2ImbkuTMgeTm5nLhwgWeeuopgoKC7B1OicjJySErK4vw8PBy0yVy5MgRli5dau8wRBlLTU3l/PnzTJ482d6hlLj4+HieeeYZhztHMzMzWbx4Menp6fYO5Y5pNBruuece4uPj+fDDD6lTpw7VqlWT8XvihiQ5czDu7u5UrVr1mmUonNnVrQzlpRLKzMwsN59F/Duurq7UqlXL3mGUuJo1awKOd46mpaXh6upq7zBKjMFgYNiwYZw9e5Znn32WhQsXEhwcbO+whANyrNskUe4oimL7I4RwTHKOlg3rsiRjx47Fz8+Pd955p1y0CoqSJ8mZKBFms5mJEycybty4my5i6ci2b9/O6tWr7R2GEKVm+/bt/Pzzz2VW3smTJ/nuu+9k+6IbCA4O5tVXX+XIkSN88803FBYW2jsk4WAkOXNCqqpiNpvJyclxmFWnNRoNTz/9NJGRkSQkJPzj95nNZvLy8v7xIGtVVSksLCQnJ4fc3FyKi4tLZIB2fHw8J0+evOPjCGE0Gm0zHHNzcykqKrrt72h+fr7tWHl5ebbNyG9HfHw8cXFxt/Xe25Gens7Ro0dt56jJZLKdt45Sb9lTzZo1mTBhAsuXL2fz5s1Od0MrSpeMOXNSBw4cYNy4cYwdO5aePXvafSCvoiiEhITg5eX1r94XHx/PG2+8wfz58//RzKWEhATmz59PamoqXl5e3HfffbRu3fp2wxaixC1ZsoTly5cTFhaGTqcjJiaGIUOG4Obm9q+7DidMmEBCQgLBwcG4uLjQs2dP7r33XqfrgszNzWXZsmXs2bMHjUZD27Zt6du3r9Muj1ESFEXh7rvv5qGHHmLy5MlUqlSJ+vXr2zss4SAkOXNCqqqya9cuqlevzpkzZygoKMDDwwO4MoB2x44dXL58mXr16tGoUSMMBgN5eXkcOHCAU6dOERoaSps2bfD29mbdunXUqFGD6Ohojh8/zsWLF4mJiWHPnj2kpqbi4uKCp6cnhYWFdOrUidzcXI4dO0ZMTAwuLi7s3buXiIgIoqOjbxpvfn4+O3fuJD4+nuDgYNq1a4enpyeHDh1i9erVpKSkMGvWLEJCQujYsSP+/v6kpaWxc+dO0tPTqV69Oq1atUJVVWbPnk1wcDDPPfccFouF5ORkWzm7d+8mJyeHtLQ0MjMzadGiBQ0aNODcuXPs3bsXk8lETEyMrQLMyMhgw4YNFBcXk5ycXK4GHgv7UVWVdu3aMWbMGGJjY5k2bRrt2rWjevXqnDp1iv379wMQExNDzZo1Wb16NU2aNCEsLAy40pq8f/9+fHx8AHj00Ufp2rUra9as4YcffqB9+/YAHD16lMOHD+Pi4kLLli2pXLkyiqJQUFDAwYMHOXHiBIGBgbRt2xZfX19bfBaLhf3795OXl0f16tWJj4+nWbNmti2ZEhMTiYuLo3379pw9e5b9+/djMplo2rQp9erVs5WxZcsWLBYLFy5cQK/X061bNzw9Pdm6dSspKSkYjUbbbgMnTpzgjz/+4Pnnn6eoqIh58+ZRv359GjZsWFa/FoejKAp6vZ4BAwZw5swZXn31VT7++GMiIyPtHZpwANKt6YSKiorYt28f3bt3JyUlhezsbFRVJSsri88//5ykpCSioqJITEwkMzOToqIi1q5dy4YNG4iIiMBsNtv2z1u7di3x8fEAxMXFsW3bNvLy8lizZg0ajYaff/6ZU6dOsWHDBpKTk0lLS2PLli22rokdO3bY3n8z+/fvJyMjg6pVq3L48GF+/fVXLBYLISEh1KlTBw8PDxo3bkzNmjVxc3OjsLCQhQsXcv78eaKioli7di3bt2/n4sWLHD16lL59++Ln50dAQAANGjSwlbNnzx5mzZqFi4sLDRo0ICQkhNzcXPbv309gYCD+/v589dVXnDlzBoB58+Zx/vx5fHx8+P3336/ZtkaIO6XRaKhUqRKurq5kZWWRlJTEN998g6IoGAwGvvrqK+Lj4zl8+DD79+9HVVVUVbWdr9aB4qqqYrFYyMnJwdXVFUVRSExM5MSJE1SuXBmj0ci3335Leno6xcXFbNq0iVWrVhEaGoqiKBw/ftwWk8ViYefOnaxcuZKAgAAyMzPZtm0bmZmZtvIPHTrEkSNHyMrK4uDBgwQFBeHr68uiRYs4e/YsAAUFBXzzzTf89ttvVK5cmXr16uHm5savv/7Kli1bCA8PJz4+nsuXL6OqKsnJyQQEBBAeHk5kZCQ+Pj5cuHDBLr8XR+Pj48PYsWPRarW8//77tvpcVGzScuaEDh48CMA999zD7t27ba1hx44dIz8/n6eeego/Pz/MZjNarZbMzEwOHjxI165dadWq1T8a2+Dt7U1MTAwnTpygbt26XL58mbS0tNvagikmJoacnBxUVSU7O5tdu3bRt29fKlWqhMlk4scff6Rly5a2bs3ExEQOHDjA+PHjbReQzZs3ExISQnFxMUFBQRw9epRPPvmE8+fPs3LlStt7W7ZsSa9evdBqtSiKgtlsplOnThQVFVFYWGhrUQgICGDnzp3MmjWLoKAgjh8/bveuYVF+5Ofnk5iYaGuxjYqKst0Qde/eHY1Gw+nTp4mLiyMmJoZ9+/bh7e3N5s2beeSRR0hJSaFq1aoAzJgxw9btP3LkSLRaLZUqVcLLy4vi4mJ8fHz44YcfSE1NxdXVlV27dtGxY0c6duyIxWK55kL/xx9/cO7cOUaMGEG9evVITU0F4PLly3z00Ue0bduWM2fOUKdOHXx9fW3nTkFBAYcOHeLUqVO2uAICAvjPf/5Dy5YtURSFwsJCduzYwYABA7jrrruwWCxs3boVi8VCfn4+bm5uLFmyBE9PTwwGg0wUuIq/vz/vvfceo0aNYv78+TzxxBO4uLg4Xfe1KDmSnDkZVVXZuHEjLi4u7Nmzh8LCQjZs2ECbNm0oKCjAzc0NFxcXNBqNLdkwm80UFRXh6el5zeNw5e7emqxdPbheq9Wi0WgwGAzo9Xq0Wi3FxcXXxKGq6jWPXX0863EsFgvr169n586dWCwWUlNT0elu/bXLyckhKSmJL7/80pZ0NWzYEF9fX1xcXLh48SL169dn4sSJPPvss9e8NzQ01HZ8VVVJS0tj3rx5ZGdnU1xczPnz52nYsCGFhYVYLBa8vLzQarV4eXnJjClRYg4fPswnn3xCYGAgzz77LIGBgRw5cgQXFxcMBgOKouDi4oLRaKRVq1Z8/fXX1KpVi4SEBOLi4nB1dbWtfzV69Gi6du3KwYMH+eijj2jWrBlJSUl89913FBYWkpubS2ZmJsXFxVgsFoxGI15eXted69ZWueDgYA4dOkTDhg3x8/PDxcWFEydOUFhYyJEjR8jLy6N79+5cunSJ+fPnk52dTVFREYmJiTRt2tR2PHd3d3x8fGxlFBUVYTKZ8Pb2RlEU3Nzc0Ol0aDQaPDw8yM/P57HHHsNsNvPhhx/KMIKrKIpC5cqVmTBhAm+//TZVq1alZ8+ekpxVYNJU4GTS09M5cuQITZs2JSMjgxo1anD48GHy8vKIjIzk0qVLHDt2jLy8POLi4sjOzsbNzY3AwED2799PdnY2iYmJtu4JPz8/zpw5Q2ZmJkePHv3brj2DwUBhYSEZGRmkpKTYugjhSmLm5eXFpUuXMJlMtuRtzZo1tG3blldeeYX27dtfk5y5urpiNBq5fPmy7bGQkBAaNGjAkCFDePPNNxk5ciQNGzYkKCiIJk2a8N1335GRkUF2dvZ18f61Mrtw4QIJCQk8/fTTPPPMM/j5+QFX7lQDAgLYt28fly5d4o8//pDZUqLEtGzZknfffZeXXnqJpk2botVqCQwM5PLlyyQlJXHx4kWSk5MJCQkhICAARVE4c+YMrVu3ZtWqVdSvX/+axMqa7OTl5dmSKFdXV1566SWGDBliG3NqMBiIiIhg3759ZGZmkpKSYmuxUxSFpk2bMmrUKM6cOcPWrVsxGAyEhYWxcuVK2rVrR1FRkS25S05OJjk5mWeffZann34aX1/fW3a3ubu7ExERwa5du8jJybHVP4qiEBERQXp6OgkJCcTHx5OVlWUbYyeu0Gg0tGnThiFDhjB9+nSOHj1q75CEHd12y5miKJHAV0AoYAHmqKr6saIo/sBSIBo4BwxUVfXyn++ZADwKmIHnVFVdc0fRV0CxsbH4+vry0EMP4e7uTmFhIYcOHeLQoUO0atWK/v37s2TJEnJzc2nTpg39+vXD3d2dXr16sWzZMp5//nmqVKnCQw89BFzpYpk+fTqHDx/Gy8vLNqPMemHQaDQoimLrJgwJCaFKlSq89957VK5cmcDAQFtCpNFoaNKkCfv27ePxxx/ngQceoGvXrnTv3p0VK1awcuVKQkJCrpmhFRwczF133cWYMWOIjIzk8ccfJyIigkGDBrF8+XJmzZpFjRo1GDBgADqdjscee4zZs2czduxYdDodvXr1uibWv3ZNhoWFERwczKRJk4iIiLDFq9frGTVqFAsXLkSv12MwGGSfO1EibvQ9VBSF6tWr07hxY6ZNm4ZGo6F169Y0bNgQRVGoW7cuiqLQtWtXfv75Z4YOHQpcacH+/PPPWbJkCe7u7vTp04fg4GAaNGjA/v37GTNmDJUrV8bX19fWGterVy++/fZbxo4dS2RkJMOGDbPFpdPpiIiIYMiQIXz44YfUqlWLKlWqkJ2dTevWrcnLy+P8+fN4eHgQERGBv78/L7/8MpUqVSIwMND2ua6uE6x0Oh0DBw5kzpw5jBkzBl9fXzw9PQGoVasWzZs3Z9q0aWi1Wrp3727blUD8P1dXVx544AHb3qbz5s0jJCREWtAqIOV2Bx4qihIGhKmqekBRFC9gP9AHeBjIUFV1qqIo4wE/VVVfUhSlLrAYuAsIB9YDNVVVvWVTTbNmzdR9+/bdVozO5siRI4wcOZI333zzpts3WSwWzGYzOp0ORVFsrVPWbkjr86qq2h6zvs5sNmOxWNBoNLaK9equSWsFoNVqbePVzGaz7bjWY1nLsK4qfvXF6OpytFotWq32mpisr7u6YjebzbbjXR3X1e/56+MWi+Wasq3jy6zHtvrr661/ro7L+tmtj/+d3bt3y51tBbRixQreeOMNFi1adMvXXX3+XX1RtQ7st7bQ/nXoAWAbPmA9d61dlcB155p1GMJfb07++p231hXWuHQ63TX1hrV8nU5nG5Jw9eM3OnesZdyo6/Tqesb6mYBr6g3r5/s7aWlpvP/++4wePZrOnTv/7evLi/z8fEaMGEFAQACTJ0/Gz89PErRyqHbt2tlxcXE+N3rutlvOVFVNAVL+/HeOoiixQATQG+jw58sWApuBl/58fImqqkbgrKIop7iSqO283Rgqor9WhtZWoJs9f/XrbjTW66/vt7K+1vr31ce0Jl03cqNybhbTrY53devdP/kc1uP8m9f/XVxC3I5bnRs3O3eu/o5efT7eanzmjc5bazk3et/V5d6o3rhR7Dcr/1b1ya3OT2md/mfc3Nx48803efHFF1m4cCFPPvmkjNGrYErkyqQoSjTQBNgNhPyZuFkTOOuurhHA1UvHJ/75mBBCCCH+pCgK1apV46mnnmLlypVs3LhRlteoYO54tqaiKJ7Ad8B/VVXNvkXT642euOG3TVGUx4DHAKKiou40RCGEEMKp6HQ6OnbsyLlz53j11VepUqUKtWvXlu7NCuKOWs4URdFzJTH7n6qq3//58MU/x6NZx6Wl/vl4InD10seVgGRuQFXVOaqqNlNVtVlQUNCdhCiEEEI4JYPBwMiRI+natStjxozh3Llz0oJWQdx2cqZcSd+/AGJVVZ1+1VM/AcP//Pdw4MerHh+sKIqLoihVgBrAntstXwghxM05y0XcWeK0F61WywsvvEBYWBgfffSRbecIUb7dSbdmG+Ah4A9FUQ79+dhEYCrwraIojwLngQEAqqoeVRTlW+AYUAw8/XczNSsyqbCEcEzOdG46S6w3i9NZ4i9tPj4+jBkzhpdffpmvv/6ap556CoPBYO+wRCm6k9ma27nxODKAe2/ynreBt2+3zPJOq9Vy5swZHnvsMbvNIlRVlYKCAkwmEx4eHjedEVbWro7LYDDY1mOzh+LiYtvq7aLiMBgMJCcn07t371Itx2w2k5+fj9lstu34URLy8/PRaDRONetPVVX8/Pwcph6yF+taeE8//TQvv/wytWvXpkuXLjLbvByT7ZscSFRUFD/88IPdys/MzGTXrl1s2bIFvV7P0KFDqVWrlt3iuVpBQQG//fYbmzdvxmw207RpU5o3b06lSpXsUkE50wVOlIyWLVvyww8/lFprjtFo5OTJk+zcuZOjR48SGhpK9+7dadCgQYl8xz/44AP8/f0ZMWJECURbdlxcXKhevbq9w7A7jUZD586dSUlJ4bXXXiMwMJCmTZvKBIFy6rYXoS0rFWkRWnsxmUzs3buXRYsWcerUKXr06MF9991HlSpVHOrOTFVVEhISWLVqFevXr8disdC1a1f69euHTBwRzkpVVY4cOcJ3333Hnj17CAsLo2fPnrRu3Zrg4OASu/hOmDABo9HI9OnT//7FwmGZTCamTp3KwYMHmTZtmiSuTuxWi9BKclZBWTcuv3TpEu+99x47duygS5cuPPDAA1StWrXEulJKmnUF8pSUFLZt28b8+fMxm8088sgj9O7dGy8vL7mTFA7Pev4lJCQwb948fvnlFxo1asTQoUNp0qQJ/v7+Jf49fu+994iLi2P+/PklelxRtlRVJS0tjfHjx+Ph4cHrr78uOwg4qVLZIUA4L4vFQkZGBqtWreLTTz+lRo0afP7559SvX9+21Yujsq5AXqlSJQYPHsx9993HsmXLmDt3LgsXLuSpp56iTZs2BAQE3HJ1dSHswTp2Mj4+niVLlrBy5UqqVKnC559/TpMmTdDr9aV2/vn7+5ORkVEqxxZlR1EUAgMDGTt2LGPHjmXRokU8+eSTMkGgnJGrVwVjNBrZvHkzS5cuJTU1lWeffZaBAwc6bEvZzVj3+fP09OThhx+me/fu/PDDD8ybN4+lS5fSvXt37r33XipVquTQyaaoGKxJ2YEDB1i7di07duygevXqTJs2jTZt2pTJ+efr60tWVhaqqso54eQURaFOnTqMGzeOV155hcqVK3Pfffc51DAUcWckOasgVFXl9OnTzJkzh/3799OlSxcmTpxI1apVnf6EVhSF0NBQnnjiCbp168aGDRv4+eefWb58OX369KFv374EBATYO0xRQZnNZnbt2sWiRYs4efIkTZs25ZVXXqFZs2Z4eXmVWRz+/v7k5+djNBplQks5oCgK7du355lnnmHq1KmEhobSsmVLe4clSogkZ+Wcqqrk5uaybNky5syZQ926dfn000+Jjo7G1dW1XN1BK4pCdHQ0Dz/8ML169WLjxo18/vnn/O9//+Pxxx/nP//5D56enk6fjArHp6oqJpOJ48ePM3PmTHbu3Em3bt2YPn061apVw8PDo8zPPT8/P4qLi8nNzZXkrJzQ6XT079+f8+fPM2XKFKZPn07VqlXLVb1eUcmEgHIsPz+fo0eP8v7773Px4kX++9//0q1bN1xcXMr9yWv9Xufm5rJkyRK+/vpr3N3dGTlyJC1btiQkJETGpIkSZ7FYyMrKIjY2loULF3LgwAFat27N6NGjiY6OtnXH28P58+fp2bMnP/74I1WqVLFLDKLkqapKRkYGL774Il5eXrz66qsyQcBJyGzNCsY6C2zx4sWsWbOGtm3bMmzYMKpVq1YhT1hVVUlOTuann35i9erVuLq60qlTJ3r06EF4eLi9wxPlgLWFetOmTaxevZpTp07RsmVLevfuTcOGDR1iEdXs7Gxat27N4sWLadCggb3DESVIVVViY2N58cUXad++Pc8//7zcfDoBma1ZQVgT7Q0bNjB9+nQCAgKYMGEC7dq1q9DdGIqiEBERweOPP063bt3YunUr3333Hd9++y1Dhgyhb9+++Pj4VMjEVdwZVVVtk2zmzZtHeno63bt3Z+TIkdSrV8+hJtro9XpcXFzIzc21dyiihCmKQu3atRk3bhxjx46levXq9OnTR+o0JybJWTmgqioWi4XExEQ+++wz1qxZw8iRIxkyZAi+vr4yxupPGo2G6OhoIiMj6d27N7/88guff/45ixYt4oknnqBz5854e3uj1WrtHapwcBaLhZycHPbv38/nn3/O6dOnGTx4MEOHDiUoKKhUl8S4XYqi4OHhQV5enr1DEaVAo9HQrl07Jk6cyDvvvENISAgtW7aU+t9JSXJWDmRnZ7Nq1SoWLlxIcHAwixcvpnbt2gAOd4GwN+s6aT4+PgwZMoTu3bvbJkt88803DBw4kLZt2xIRESFJmriO2Wzm4sWL7Nmzh2XLlpGQkEC3bt34+OOPCQsLAxz7nHNxcaGwsNDeYYhSotVque+++zh27BjTp0/nnXfeoUaNGg79nRQ3JmPOnJiqqsTFxTFnzhyOHDnCkCFD6N27twwG/ZdUVSUxMZFffvmFdevWoSgKnTt3pm/fvrLBubBJT0/np59+Yu3ateTl5dGpUye6detG9erVnaJ1wmg00r9/f4YPH07//v3tHY4oJaqqkpqayqRJk9DpdEydOlV2TnFQMuasnLFO01+1ahUfffQRNWvWZMaMGVSvXt0hBh47G0VRqFSpEiNHjuQ///kPW7du5auvvmLp0qWMGDFCtoWqoKw3rtnZ2axcuZL58+djMBgYNGgQ7dq1o3Llymi1Wqf6Xri4uGA0Gu0dhihFiqIQHBzM888/z+jRo5kzZw5jx461d1jiX5LkzMmYzWZSU1OZOXMma9eu5dlnn6Vfv364u7s71UXC0Vi7OyMjI3nggQfo06cPy5cvZ86cOSxYsICnnnqKdu3a4e/vL7OgKoCioiLS09PZtGkTX3zxBSaTieHDhzN48GBcXV3RaDROd74pioKrq6t0a1YA1gkCkydP5rHHHqNy5cr07dtX6i4nIr8pJ2Gdqr9hwwa++OIL/Pz8+OSTT2jevLlTdKk4C0VR0Gq1eHp6Mnz4cLp3787333/PvHnzWLJkCT179qRjx45ERkY63cVZ/D2z2cyZM2fYuHEja9asoaioiEGDBtGnTx8CAwOd/ncuY84qDkVRaN68OS+++CIzZ84kNDSU1q1by1haJyHJmRNQVZVLly7xwQcfcODAAfr27cvgwYPx9/e3d2jlmqIohISE8MQTT9C9e3fWrVvHTz/9xPLly7nvvvvo16+fbAtVjiQnJ7N48WI2bNiAj48Pffv2pWPHjlSqVMneoZUIRVGkW7MC6tevH2fOnOHDDz8kIiKCKlWqOP1NRkUgEwIcmHWJjKNHj/LCCy/g6enJ+PHjadSokUNO1S/PVFXFbDZz6dIltmzZwueff45Go+Gxxx6jR48esi2UE7KeX5cuXWLRokUsW7aMiIgInnzySRo3bkxAQEC5amUoKipi3LhxhIeH89JLL9k7HFFGrBMEXnnlFbRaLR9++CFubm72Dktw6wkBcjVxUKqqkpmZyVdffcVjjz3GXXfdxcKFC2nWrBkGg0ESszJmHZMWGhrKoEGD+OWXX3jwwQeZPXs2AwYM4PvvvycpKYni4mJ7hyr+hqqqFBYWcvLkSWbOnEnfvn3ZsWMHb7zxBkuXLqVz584EBweXq8TMSqfTyXe0grFOEBg/fjynTp3i448/pqCgwN5hib8h3ZoOKi4ujlmzZnHmzBlGjx7N/fffj8FgsHdYFZ41Kfb09OSRRx6he/fu/PTTT3z11VcsW7aMzp070717d8LDwyWBdkBms5lDhw6xevVqdu7cSWBgIOPGjaNTp074+NzwBrbcsN5gSHJW8SiKQrVq1XjjjTcYO3YskZGRDBw4UGb3OzBJzhxMcXExa9as4YMPPqBWrVq8++671KxZU2bZOCBFUQgPD+exxx6ja9eubNmyhRUrVvDtt98yePBg+vbti5+fn73DFFxZ0T8uLo758+eze/duGjRowLPPPkvz5s0r1LqAkpxVbK1atWLs2LHMnDmTypUr06ZNmwrz3Xc2csV3EKqqcvnyZebNm8fSpUt5+umnGThwIB4eHnLyOLirt4W67777WLNmDTNnzrRtC9W1a1e8vLzKZTeZI7OuB5iQkMCsWbNYt24djRs35tNPP6VatWq4u7tXuHGCOp0Ok8lk7zCEnSiKQs+ePTl37hzvv/8+kZGRREVFyTXGAUly5gAsFgunTp1i6tSppKSkMGPGDFq2bCkXcydi7TLy8/Nj8ODBdO/enaVLlzJnzhz+97//MWTIEFq3bi3bQpUBVVXJycnh6NGjrF69mjVr1lC/fn1mz55Ns2bNnG7h2JKk1+vJz89HVdUK+zOoyKxr3T388MOcOHGCKVOmMHXqVHx9fe0dmvgLma1pZyaTiS1btvDxxx9TrVo1nnzySWrVqiUVZzmgqipJSUmsXLnSti1Up06d6Nu3LyEhIfYOr1wqLCxk165dfPfdd5w6dYq6devyn//8hzZt2uDq6mrv8OzKbDYzbdo0Ll68yPTp06WOqeCOHz/O6NGjufvuuxkzZkyFPz/sQbZvckDWLpevv/6a+fPnM3jwYB588EEZo1SOKIpCREQEo0aNonv37mzdupVvvvmGZcuWMWzYMPr27SvbQpUAVVUpLi7m4MGDfP755xw7dox7772Xt956i9q1a8vQgKsoioKqqtJyJqhZsybvvPMOTzzxBNHR0QwePLjCdfM7MknO7MA6vuz9999n48aNTJo0ic6dO8vMmXLIuuNAVFQUQ4YMoU+fPrYdB7766iueeuop2rdvj6+vr0z6+JcsFgt5eXnExcXx2WefceDAATp16sTSpUsJDw+XtQCFuAVFUWjUqBETJ07kww8/pFKlSrRt29aWoBmNRkwmE15eXnaOtGKSq0EZKyoq4vfff+ejjz7CaDTy6aef0qxZM3uHJUqZNUnz8vJi2LBh/Oc//+G7775j7ty5LF68mPvuu4/27dsTGRkpd69/Q1VV0tPT2blzJ2vXruXIkSO0bt2asWPHUrduXfn5CfEPabVaOnfuTGxsLB9//DERERFUrVqV9PR0li5dSkJCAlOmTJGbHDuQ5KwMFRcX8/PPPzNnzhxiYmJ4+umniYiIsHdYoowpikJQUBCPP/443bt3Z82aNfz0009899139OjRg379+hEYGGjvMB1SdnY2mzZtYunSpWRnZ9OmTRsefvhhGjduLBMthLgNHh4ejBgxgtOnTzNlyhTGjBnDrFmzWLRoEYGBgYwePZqwsDB7h1nhSHJWBqxb/yxYsIBZs2bx5JNPMmDAAGkuruAURSEqKopHHnmEnj17snXrVmbPns0333zDqFGj6NWr1w23hTKZTKxdu5YOHTrg6elpp+hLjnXv2ISEBGJiYq67S7dYLJhMJjZv3sznn39OamoqAwYMoG/fvoSFheHi4iJ39kLcgZCQEF599VUGDhzIsGHDOHbsGAUFBWg0GjZs2MDQoUPlHCtjkpyVMlVVSUtL49NPP+XXX3/l3XffpWPHjiiKIl92YVuCIywsjIEDB9KzZ0++/fZb5syZw4IFC3jiiSdo1aoVwcHB6HQ6VFVl69at/Pe//6Vbt2689dZbTj2JxLqMzFNPPUVRURG//PKLLeG07mW6e/duFi5cSFJSEv3792f48OEEBgbKOSRECSkoKODo0aOkpKSQlJSE2WwG4PLly/z666/cf//9sh9nGZPkrBSpqkp8fDzTp08nPj6eGTNm0KJFC7mgiOtYEw1PT09GjBhB9+7d+fHHH1mwYAHffvstXbp0sS1m+9NPP3Hu3DnmzZuHm5sbY8eOJTQ01N4f4V9TVZUTJ04wfvx4tm/fjoeHBytXrmTQoEEkJyezdu1a1q9fT1ZWFvfccw/9+vUjOjra3mELUW5Y93BeuHAh06dPJzExkauX11JVldjYWOLi4mjSpIkdI614JDkrRWfPnuWFF17Aw8ODyZMnU7duXUnMxN9SFIWwsDAef/xxunbtysaNG1mxYgXLly+nUaNGrFixArPZjNlsZs6cOej1el588UWnW0gyJSWFF154gfXr12M0GikqKmLBggUkJiayYcMGDAYDvXv3pl27dlStWlXGlJUAi8UiLY7iGklJSSxevJikpCRutO7pyZMnOXToEI0aNZLJNmVIftIlzDq+bPfu3YwYMYJKlSrxwQcfUK9ePbm4iH9FURSio6MZNmyYbS28r776iqSkJNtrsrOz+eijj5g+fTr5+fl2jPafU1WVixcvMnz4cNauXUthYSFwJXHYsmUL3333HcOHD+eLL75g2LBh1KhRQ86dEmBdD06WbBFXq1mzJnPnzmXQoEF4e3tf93xubi7bt28nKyvLDtFVXJKclSDrwrIrVqxg4sSJdOrUiQ8++IDAwEC54xC3RVEU9Ho9gYGBxMTEkJ2djcViueY1BQUFvP3223z44YcOX4FaLBZOnjzJyJEj2bJly3X7PBYWFlKtWjW6dOlCQECAJBIlrKioCJ1OJy1nArhSvxgMBho0aMCiRYv48ssvadasGe7u7te8bv369SQnJ9+wZU2UDskYSlBxcTFffvkln3/+OQ8++CBjx46VilCUCLPZfMvWMYvFwrvvvstnn33msAmaqqqcPXuWl19+mfXr11NUVHTD1/3yyy/88ccfZRxdxVBcXCyLXYvrWNdh7NOnD19//TWjR4+mUqVKtkaFhIQEdu3aJclZGZLkrASoqmpbUHbevHk8//zzPPDAA9fdfQhxu/bs2cO6detuuWl3Tk4O06dP58svv6SgoMChKlJVVcnIyGD06NGsXLnS1pV5I5mZmcyePfu6FkJx56RbU9yKRqOhVq1aTJw4kUWLFnHvvffaxiguXrxYzskyJGfpHbLOdpk5cya//PILs2fPlgUxhY11Nfs7HQ9msVh47rnnOHHiBGfOnOH06dO2Ke/Wrs/i4mLS0tJ4+eWX0el09OrVy2FabfPz8xkzZgxr1qyxDUrXaDRYLBZbEunn50d0dDRVqlShcuXKnD17FoPBcNtluru74+fnJ+fin2TMmWMrLi4mOTnZ3mHYVKlShWnTprFw4ULmzp3Ljh072Lt3b7lcOD0oKMjhlgpRHOnu+kaaNWum7tu3z95h3FRaWhoffPABhw8f5o033iAmJkbGlwkbVVUZNGgQO3bsICgoqMzKLCoqwmw2O0SFYzabbWOdyioxyMnJoUmTJnz66adOucxIaSgqKmLs2LFERkbywgsv2Dsc8Rfx8fHUrVuXqKgoXFxc7B3OdUwmE1lZWWVWj5WF4uJi22zVbt26lXn5tWvXzo6Li/O50XNyC3UH0tPTmTx5MmfPnuWtt96SqcbihgwGAz169KB3795lUp61m91isThE17rZbMZoNKLX68tsvNPvv//OgQMHyqQsZyItZ44tKiqKsWPHOuR2SUajkfT0dMLDw+0dSonJyspi1qxZ9g7jhuQsvQ3WVf/feOMNTpw4wWeffUbVqlUlMRM3pCgKfn5+VK5c2d6hVBgpKSn2DsHhWGeTS3LmuFxcXAgLC3OaukJVVYcZOnE70tPTHeIG9kbkLP2XVFUlOTmZd999l6SkJL788kvCw8Od+gsqhKgYCgoKHPZiJJyPXPdKjzT1/Eupqam8/fbbXLp0iffee08SM2E3hYWFLF26lKVLl3LhwoVSmZ2pqipHjx4lNja2xI99MwkJCezfv9+hZpuWB6qqUlBQ4BDjEEX5IvVEyZPk7F+wdmWmpKTwxhtvUL16dUnMhN0oioKLiwvr1q3jzJkzpVbOtm3b+O2330rt+H917NgxVq9eXW4rXXsqLCyU5EyUim3btrFjx44yKy82NpbVq1eX2+U9pFvzH7COMXvzzTc5d+4cc+bMISIiQhIzUWJiY2P58MMP6datG3379mXTpk3MnTuXiRMn0qBBgxu+x8XFhW7dut1wwdZnn32WxMRE3NzcqFKlCo8//jiRkZFO9501mUz89NNPrFy5Ej8/Px588EGaNGki4ztvg6qq5OfnS3Lm5L744gt++eUX9Ho9oaGhDBo0iJYtW97WOfHXeuKxxx4jKipK6gkHIMnZ37AmZtYxZrNmzZLETJQ46zZGGzdupEuXLhw/fpyLFy9iMpm4fPkyubm5hIWFYTabuXDhAoGBgXh4eNz0eBkZGYwdO5ZmzZqxaNEiPvvsM6ZMmUJxcTEXLlzg0qVLuLi4EBkZiaenJ4qiYDQaSU5OJjMzE29vbypVqnTNOmMWi4WEhAQ0Gg1ubm6YTCZCQ0NtFeDly5fJzMwkKiqKtLQ0Lly4gKIoRERE4O/vj6IoFBQUkJSUhMViIScnB1dXV9um5mfOnLHNCLO2mu3atYtNmzbxyiuvcOLECZYuXUpYWFi5mjFWlmTMmfPLysqiU6dOjBw5kg0bNjB9+nTmzp2Lj48PqampXLx4Ea1WS0REBL6+vhw/fpyoqCjb7724uJiUlBS8vb3JyMhgzJgxNG/e3FZPTJ069Y7qCVVVOX/+/D+qJ9LT00lJSbnjemL37t1s2rSJl19+mZMnT7JkyRKnryckOfsbhYWFzJgxg1OnTvHWW285ZeuDcA7e3t74+PiwZcsWXF1d8ff3B+DQoUMcPHiQUaNGkZOTw9dff02fPn2oV6/eLY9n3ZLFw8PDlvzFx8ezbds2cnNzSU9Pp06dOtx3331otVpb96W3tzfe3t60bduWGjVqAFcSs0OHDvHzzz9zzz33YDQaiY2N5ZFHHrEliRs2bCAxMZEhQ4awadMm0tPTyc7OxtvbmyFDhuDn50dycjLvvPMOERERBAYGEhISQnh4OLt372bDhg1ERERw+vRpfHx8MJvNnDlzhlq1ahEZGYmLiwsbN24sd9P5y4p1zJmrq6u9QxF3yLonZrNmzfjiiy9IS0sjJyeHb775Bq1WS3FxMX5+fgwePJi5c+cycOBAWrRoAVxZA/B///sfXbp0sR3LWk9Yt1Q7f/48W7dutdUTtWvX5r777kOn07F9+3Z27Njxt/VEhw4dKCoq4tixY9fUExs3biQhIYGhQ4fazufs7Gy8vLwYOnQofn5+pKSkMGXKFMLCwggKCiI4OJiwsDD27t3L+vXrbfWEt7c3FouFM2fOULNmTSIjI3Fzc2Pjxo2kpaU5dT0hydlNWBfy/Oijj9i8eTOffPIJ9erVk8RMlIibTUG/++67+fLLL+nRowe+vr63ffzi4mI++eQTvL29yc3N5cknnwTA39+f7t27ExAQwO+//87KlStp3749Go2GHTt20KZNG9q1a0dBQYEtPovFwpEjRzh9+jS9e/emRYsWxMXFsWvXLrKzs1m0aBHt2rUjLi6ONm3a4OnpSfv27fH39+fSpUt88cUXxMfH4+fnB4CbmxvdunWjRYsW6HQ6CgsL+f777xkxYgSNGzdmwYIFpKSkYDabycnJwdfXl8WLF9uSVaPR+Lc/R3E9k8nkMAsTiztjvT4dPHgQVVUJCAhg586dqKrKyJEjyczMZN68ecTHx9OgQQNiY2MpKioiISGBtm3bkpOTY2uJ//TTT/nqq6/Izc3liSeeAG5eT2i1WrZv307r1q25++67r6knVFXl8OHDnDlzhvvuu4+WLVty/Phxdu7caasn2rZtS1xcHK1bt8bDw+O6euLcuXO2esLV1ZVu3brRsmVLtFotRqOR77//nuHDh9OkSRO++uorEhMTr6knli5dip+fn62F72Y/O2eoMyQ5uwHrIp6zZ89mxYoVzJs3j3r16jl1/7VwLDerHKpWrUp4eDjVqlW7ZtuhqwfHX/3vmx1Hq9XaKrHNmzezePFiWrRoQWpqKosXL+bEiRNkZ2fj6elJUVERFosFk8lkW53cukK59SKQkJCAr6+vrQUuIiKC3NxcLl68yK5du3B1dSUlJYV69eqRlZXF4sWLOXToENnZ2QC2u3S4slVKRESEbb2t3NxcsrOzqVGjBm5ublSqVIn09HTbtlRGo5EBAwaQnZ3N1q1br/m5OEMl6yhyc3PRaDTSrVkOfPfdd2zevJnAwECeffZZvLy8yMjIIDg42Nb96OLiQl5eHk2aNOHrr79Go9GwadMmoqOj0ev1BAUFodVqeeihh4iJibHVEy1btiQ1NZVvvvnmmnqiuLiYoqIijEbjTeuJ8+fP4+/vb0uMwsPDycvL48KFC+zatQsXFxdSUlKoW7cu2dnZLFmyhIMHD5KdnY2qqnTu3Nn2GYOCgqhUqdI19URWVhY1a9bEzc2NiIgIUlNTba2IRqORgQMH3rCeuJqz1BmSbdxAcXExS5Ys4aeffuLdd9+VxEyUGT8/P9555x1iYmJsj1kr2cLCQtLT00lLS7M9p9VqcXNzu2b8BVypgDw9PQkMDKRJkyYkJCSQkZHBL7/8QnBwMLNnz2bChAmEhIQAV1qzDAYD586dw2g02saFwJUdDnr27MkzzzzDt99+y7lz5/D29sbf35/vv/+eu+++m4SEBDw9PXFzc+PAgQOkp6czbdo03n//fapWrXpNbBqN5poK0svLCz8/P+Li4igoKCAhIQGj0YhOpyMiIoL4+HjbvqGKouDt7V1aP/5yLS0tDb1ej5eXl71DEXeof//+fPvtt3z22Wd06NABnU5HQEAAFy9eJCcnh/T0dIxGI56enkRFRZGSkkJeXh5169Zl3bp11KhRw5b0XF1PJCYm2uqJoKAgZs+ezcSJEwkJCUFVVdzc3HB1db1lPfH000+zfPlyWz0REBDADz/8wN13301iYiIeHh64u7tz8OBBLl26xLRp0/jggw+oVq3aNZ/xr0mUl5cX/v7+xMbG2uoJ66LK4eHhnD9/nqKiIls94eNzw12RnIa0nN3Ahg0bWLBgAU8//TStW7eWxEzYVeXKlbFYLMyaNQt3d3eKi4ttz+l0Opo1a8bq1as5c+YMnTt3pm7dupjNZpYtW8Zvv/1GdnY2HTt2xN/fn+rVq7Nr1y5mz56NyWSytYT5+vrSrl07tm/fzqFDhwgICKBjx474+PjYNimPiYkhPj6e2bNn89JLL1G7dm0WLFjAyJEjWbRoEcHBwRgMBlt3yZdffonBYKCwsPCWn8/FxYX+/fvz448/snPnTpKSkvDy8kJRFJo2bcoff/zBzJkzMZlMNG7c2CG3tnEGaWlpGAwGSc7KIUVRaNCgAX/88QezZs3CbDZTuXJloqKi0Ov1BAcHo9Pp6NatG6+88go9evQArgxZWL58ua3r8ep6YufOnTetJ7Zt28bvv/9OQEAAHTp0sCVC1nri/PnzzJ49mxdffJFatWrx5ZdfMnLkSL7++muCgoJsM00tFsu/rid++ukndu3aZasnAGJiYjh8+DAzZ86kqKioXNQTsvH5VVRV5ffff+fxxx9nxIgRDB8+XMZniDuiqirDhg0jPDychx566Kavy8/PJy0tjYiICFtz/NmzZwkODsbV1ZXU1FQuX76Mh4cHWq2WgIAA23ezsLCQ5ORkCgsLCQ8Px9fXl5MnT1JYWGjr3ggJCcHLy4uCggJSUlJsd9UAoaGhGAwGTCYTqamptm6MsLAwdDodqampaDQagoKCKCwsJCEhgaioKIxGIxcvXqRatWq2GWJBQUEUFRWRkpJCbm4uHh4e6HQ6fHx88PLysrX+BQYGXrO5s7Xr1GQy4ebmZrsbhitbrKSmpqLX6wkLC7PFfSu7du1i9erVsvH5VZYuXcrcuXNZt26d03TtVCTx8fH07t2bt99++5bbN6WkpKDRaGyt3lYWi8XWsq7VagkJCcHb2xtVVUlJScHFxQUfHx/OnDlD5cqVcXV15dSpU7ZxYwaDgdDQ0DuqJxRFITg4+Kb1xNV1iSPUE+np6UyZMoX//ve/svG5o7JYLJw4cYLx48fTtWtXRo0addM+ayFKmru7O1FRUdc8VqVKFdu/w8LCbnonaJ1mfjXr7KkblfPX7gMrg8FApUqVrnv86ouAq6ur7dguLi62LsarYzMYDDe9uLi6uhIREXHd43q9/rrPYBUYGEhgYOANnxP/XGpqKoGBgZKYObmb1QPWpCcoKOiax63LVFjVqlXL9u/q1avf8FilVU9cfaMk9cStSX8dV1o3Lly4wLRp06hevTovvPCCdGUKIcqV9PR0AgIC7B2GEOIfkAyEKxMAPv/8c/Ly8hgzZoxttosQQpQXaWlp5aplQYjyrMInZxaLhcWLF7Nu3TrGjBlDdHS0JGZCiHInLS1NWs6EcBIVesyZ2Wxm48aNfPTRR7z99ts0b95cEjMhboOzLOxYkaWlpV03Hkk4FznPKo4Km5ypqkpcXBwfffQRw4YNo2vXrvKlF6XCbDZz6tQpNm7caO9QKowTJ07YOwSHUlRURGZmprScObisrCx27drF6dOn7R1KqbBYLCiK4jDX2tzcXFJTU+0dxg1V2OQsKyuLmTNnUqVKFYYNG+YwXxZR/lSrVo09e/Y4XXJmNBpJTEwkOjra6WYuFxUV0aRJk2s2ZK7ILl++bFtqRTgmFxcXKleuzKFDh5zufPsnTCYTZ86cwdvbm7CwMIe45losFnx9fW1bRjmSCpmcmc1mli5dypkzZ/jwww9te3EJURrGjx9v21DYmcTFxTFq1Cg++eSTO9rn0170er2sU/inCxcuUFxcfMMlEIRjCA4OZsWKFfYOo9RYLBZ++eUXpk6dytChQ7n33nsdJgm1bsruSCpccqaqKtu3b2f27NlMnjyZOnXq2DskUY4piuKQJ/4/YZ217OPj45TJmfh/Fy9exMvL6x8tzCnsQ6PRlPvzbOjQoQBMmzYNT09POnXqJK3bN1HhkrPTp08zefJkhg8ffs1mzEIIUV6lpqYSEhIiPQTCrjQaDQ888ABGo5H33nsPgM6dO6PX6+0cmeOpMMmZqqrk5+fzxRdfEBoaysMPP2zb+FUIIcoza3ImhL1ptVqGDh2KyWTivffew9/fnxYtWsiNw19UmHXOLBYL69atY+/evbz44ou27SSEEKI8s+6A4iiDsEXFpigKrq6ujBgxgvvuu4/nn3+eo0ePYrFY7B2aQ6kQyZmqqsTHxzNnzhwGDx5MrVq1pJISQlQIZrOZtLS0m+7JKERZUxQFNzc3nnjiCXr06MHo0aP5/fffUVXV3qE5jAqTnH366acEBwfTp08fGYAohKgwcnNzycrKolKlSnJTKhyKu7s7o0ePpnXr1rz++uscO3ZMErQ/lfvkTFVV1qxZw4YNGxg3bpwswiiEqFCys7PJycmRZTSEQ/L09OS5556jSpUqvPHGG1y4cEESNCpAcpaQkMD06dN56qmnqFevntw5CiEqlPT0dLRaLT4+PvYORYjrKIpCYGAgL730El5eXkyYMIH09HR7h2V35To5Kygo4H//+x8BAQEMGjRIEjMhRIVz+fJlPDw8cHV1lTpQOCRFUQgLC2PSpElkZ2fzzjvvkJaWZu+w7KrcJmfWvTM3b97MI488IrMzhRAVUkZGhi05E8KRRUdH88477xAbG8usWbPIycmxd0h2U26TM6PRyPz582nYsCFt2rRBoym3H1UIIW6ouLiYCxcuEBAQIMmZcAq1atXirbfeYuPGjSxevBij0Vghx6CV24xl79697NmzhyFDhuDu7m7vcIQQoswZjUbOnz9PVFSUJGfCacTExPDSSy+xYMECVq5cWSHXQCuXS+RnZ2fz/vvv07t3bxo2bCjjLIQQFZLRaCQtLY2YmBiH2WRaiFtRFAVFUbjnnnu4fPmybR/OLl26VKhreblLzlRVZcWKFaSlpTFq1CipkIQQFZbRaCQnJ4eAgIAKdWETzk+v19O3b19ycnJ45513CAwMJCYmpsJ8j8tdt2ZSUhJLlizhmWeeITAw0N7hCCGEXaiqSmFhIbm5uVIXCqfk4uLC0KFDueeee5gyZQqnT5+uMOPPylVyVlxczPr163Fzc6NTp04VJsMWQogbSU9Px2w2y+Lbwmm5u7vz2GOP4evrywcffEBGRoa9QyoT5So5S0tLY82aNfTo0QM/Pz97hyOEEHb1xx9/EBAQIC1nwqmFhobywgsvcP78eaZPn05BQYG9Qyp15SY5s1gs7Nixg5ycHDp27IhOV+6G0wkhxD+mqipHjhyhatWquLm52TscIW6boijUrFmT999/n/Xr1zNv3jyKi4vtHVapKjfJWWFhIV988QU9evQgOjra3uEIIYRdWZOzunXryhAP4fQURaF27dq8//77LF26lB9++KFcJ2jlJjn77bffSEtLo0ePHlIRCSEqvJycHJKSkqhVq5a9QxGiRCiKQosWLRgxYgRffvklhw4dKrcTBMpFclZUVMS8efPo0aMHkZGR9g5HCCHsLjY2FkVRqF69ur1DEaLEGAwG7r//fho1asSbb75ZbvfgdPrkTFVVdu7cSWxsLI888oi0mgkhBHDo0CGqVq2Kj4+PvUMRokT5+PjwwgsvYDAYmDhxInl5efYOqcQ5fXJmMpn4+eef6dSpE2FhYfYORwghHMLBgwcr1KKdouJQFAU/Pz8++eQTzp07xyeffFLuEjSnT87OnTtHXFwcvXv3lt0AhBACKCgo4MSJEzRq1MjeoQhRKhRFISQkhIkTJ7Jp0yZWrVpFUVGRvcMqMU6dnKmqyrFjx3B1daV69epyhyiEEEBiYiKXL1+mcePG9g5FiFKj0Who3bo1/fv3Z+bMmZw8ebLcTBBw6uQsLy+PDRs2cNdddxEUFGTvcIQQwiHExcUREBBAcHCwvUMRolQZDAYefPBBWrVqxZgxY8jMzLR3SCXCaZMzVVW5dOkScXFxtGzZEr1eb++QhBDC7lRVJTY2ltq1a8ti3KLcUxQFNzc3XnrpJby9vXnrrbfKRYLmtMkZXFnbzNPTkwYNGtg7FCGEcAh5eXmcOnWKxo0byzhcUWH4+vry6quvcujQIZYvX47JZLJ3SHfEqZOz1atX07JlS5kqLoQQf0pNTeXixYvUq1cPjcapq3gh/pU6derw9NNPs2jRIqdfoNZpz9y0tDT279/PvffeKxMBhBCCK12aFy5cQKvVEhwcLHWjqFA0Gg3du3enS5cuTJ48mYsXL9o7pNvmtMnZhg0bCAgIkC5NIUpIcXExycnJnD59mtOnT5OYmIjFYuHs2bO2x86dO+fUd6PlncVi4fTp0/j6+uLv72/vcIQoU4qi4O7uzqOPPoqHhwfTpk0jNzfX3mHdFqccLaqqKuvXr6djx464urraOxwhygWLxcLs2bPZtGkTqqqSn59PQkICTz/9tG1gebNmzZg2bZoMNHdQRqORgwcPUqtWLRnuISqs0NBQJk2axKOPPsrixYt55JFHnG78pVPWsKmpqcTGxvLwww/bOxQhyg29Xo+vry+HDh0iJyfH9viuXbts/+7du7d0lTmwwsJCTp06Rfv27Z3uYiRESapTpw6vvfYakydPpk6dOrRp08ap6i6n7NaMi4sDoF69enaORIjyQ1EUevbsiZeX1w2fDwoKonPnzjLI3EGpqsrFixdJS0ujdu3aTnUhEqKkKYpChw4d6NGjB7NmzSI5OdmphmQ4ZS178uRJqlatiouLi71DEaJcqVatGs2bN7/hhb19+/aEhYXJRd+Bbd68mUqVKlG5cmV7hyKE3bm6uvLAAw9QWFjIggULsFgs9g7pH3O6bs3CwkJOnDhBvXr1yt3Cs4WFhWzcuJGDBw/aO5RSpSgK48aNw2Aw2DsU8RcajYYhQ4bw008/XfO4Xq+ndevWMsjcwW3YsIHWrVvLWNx/KTU1lblz59o7jFLXvn17WrduXaFav6OionjmmWcYO3YsrVu3pkOHDk5xg+l0yVl6ejrx8fHce++95W5QstFoZOXKlRzf/gXVyumuK5fzYNsJDaNHj5bkzEHdc889hIaGkpKSYnusZs2a3HXXXTKOyYFdvHiRo0eP8sorr9g7FKdz6dIl5s54kzY1waOcdsjsPwuK8hotW7asUMmZRqOhXbt2jBgxgldffZVFixZRuXJlh0/QnC67yczMxGw2ExQU5PA/3NtRXFzMw21MPNTO3pGUjt/j4YGZGqfq+69oPD096d+/P59++imqqqIoCjVr1qR+/frl8pwrLzZt2kRQUBD169e3dyhOR1VVqgeZeLs/RJfTbZpfWgxms9neYdiFVqvlscce4+DBg0yfPp0333wTX19fe4d1S06XPmdnZ6OqKt7e3vYORYhyyWAw0KlTJ9zc3ADw8PCgXbt2sjSDAzObzWzdulVmaQpxE3q9nueff57Tp0+zcuVKioqK7B3SLTlVcmaxWEhNTcXNzU0uFEKUEo1GQ61atWjUqBGKouDp6UmPHj3sHZa4hcTERI4cOSK/JyFuQlEUatWqxaBBg/jiiy9ISEhw6B4cp0rOzGYzsbGxhIeHS3ImRCmKioqidevWKIrCXXfdRY0aNewdkrgJVVU5dOgQ3t7e1KpVS7qehbgJnU7H/fffT/369Zk8eTKFhYX2DummnGrMmdlsJjExkerVq8tgcnGds2fPcvbsWYe+G3ImWq0WDw8PatasycaNG+0dTrlRpUoVoqOjS2xQtslk4uDBgzRp0sTWFS2EuJ6iKHh4ePDcc8/x8MMPs3z5coYOHeqQEyScLjnLzMwkICDA3qEIB/Tzzz/z9ddfExxcTqe6lrGCggI8PDw4dOgQsbGx9g6nXLh48SKDBw/mueeeK7ELwqVLl4iNjWXo0KFy0yrEP1CjRg3GjRvHhx9+SOPGjR1yj26nSs4sFguZmZn4+fnZOxThgLKysggPD+e5556zdyjlRlJSEmFhYQ55Z+mMFixYQGZmZom17qqqSmJiIrm5udSoUUN+T0L8Q126dGHDhg3Mnz+f119/HW9vb4caEiDJmShXXF1dpeWsBAUFXVlX4O8qLeuSG+LWPDw8SvR4xcXFrF27lho1ahAdHV2ixxaiPHN3d+fpp5/m+eefZ926dfTt29ehZjo71W1WQUEBBQUFkpwJUUYURflHSVdZJ2YyrvCKwsJCtm3bRuvWrWW8mRD/gqIoVK9enf79+/PFF1+QlJRk75Cu4VTJ2eXLl9FqtTfdmFkIcWvHjx/nyy+/xGQylUl52dnZLFu2jDNnzpTocQsKCli2bBlHjhwp0ePeyrp161izZk2ZlfdPHDx4kKysLFq3bm3vUIRwOnq9nn79+hEQEMBHH33kUHtvOlVylpubi06nkw3PxW0zm81kZ2eTm5uLqqoUFxeTmZlJQUHBbbXGFBYWkpOTU6otOYWFhaSnp5Oens7ly5cpLCy87fIuX75MbGxsmVVCJpOJkydPkpWVhaqqts+RkZFBbm7ubcdRXFzMyZMnycjIKOGIb+78+fOcP38euNJyl5ubS0ZGBtnZ2XZZed1isbBkyRLatm1LeHh4mZcvRHng5+fHK6+8wsaNG1m5cqXDtMo71ZizgoICtFqtzEgSt+3SpUu8/fbbuLi48Oabb3LixAlefvllHnzwQQYOHPivxxysWrWKvXv3Mnny5FIbr/Drr78yd+5coqOjcXV1pXbt2gwYMMDpuvcLCwvp168fVatWxdfXFxcXFx555BFq1qxp79D+tYSEBGbOnEleXh4Gg4EhQ4YQExNTpgPy4+Pj2bZtG19++aVMBBDiNlkXp33qqaeYNWsWjRs3Jioqyt5hSXImKh5fX19Onz5Neno6iYmJ6HQ6NJor+32ePn2abdu2UVRURKtWrahXrx6KopCfn8/GjRtJTEwkNDSUrl273nKMj9lsJi4ujt27d9sWcq1Tpw6KopCWlsa6devIzs6mYcOGtGrVCkVRMJlMrF27loSEBCIjI2nfvj2enp4AtGvXjueee47k5GRmzZpFfHw8Pj4+xMfHs2PHDnJycmjSpAktWrRAURQsFgvHjh1j7969wJXNzK+ucFRV5dy5c/zxxx+0adOGDRs20KlTJ1vCl52dzb59+2z7aW7atIm0tDSqV6/Ovffea0tE161bh8lk4uLFi2RlZdG9e3eqVavGwYMHOXjwIK6urtd0oQYHB/PSSy9RpUoVPvnkE3755Rdq1qxJbm4umzZtIiEhgfDwcDp16oSHhweKonDp0iW2bNlCeno6DRo0oGnTptf8rE0mE7/++ivVqlXj4sWLREVFUb16dRRFQVVVDhw4gNlspmHDhuzevZtjx47h5+dHhw4dCAkJQVEUjh8/zunTp8nJySElJYXatWvTvn17MjMzWbduHRaLhYsXLxIUFISqqnz//ff4+voyceJEfv75Z3788UcaNGhQpq36q1atIjo6moYNG5ZZmUKUV/369WPjxo189913PPnkk7i6uto1Hqe63bImZ3q93t6hCCdmMBioW7cu27dv5+zZs7aWm9zcXFasWIHFYqFWrVocOXLEtpfr//73Pw4dOkS9evWIjY3lhx9+uOnxrcsbLF68GG9vb/z8/Fi0aBHnzp1DVVV++uknEhMTqV+/PidOnCA9PR2A3377jQ0bNtCgQQPy8/NtXWjWY1osFnJzczGbzej1eoxGI3v27CEsLIxatWrxzTffcPToUQDOnTvHjBkz8PPzo0qVKhw8ePCaGFNSUli4cKFtmMCBAwc4deoUqqqiqiqXLl1iz549tgHnnp6e1K9fn1WrVrFz507bcbZu3crSpUsJDw/n7rvvJjg4mBMnTvD9999TqVIlzGYzJ06cuO7nYzQaycjIsI0f3b17N8XFxTRu3Jjff/+dNWvWoKoqmZmZfPbZZ6Snp1OvXj0uXbpEZmam7VhGo5H58+dz9uxZQkNDiY+Pt8Vn7TJdv349aWlpHDt2jNTUVJo0aUJ6ejorVqygoKDA9vOaM2cOJpOJNm3aULt2bbRaLbNmzSInJ4fQ0FAOHTpki/3UqVO0bNkSLy8v6tWrx7lz58q0azM9PZ3NmzczYMAAqQ+FuEOKouDn58fQoUP59ddfbXW1PTlVy5nRaESj0aDTOVXYwgG1b9+eL7/8kpo1axIZGQlgS0z8/f2pWrUq9evXx8vLi/z8fH7++WfGjx9PWFgYrVu3Zu7cuQwePPimx09ISECv19OlSxdby8zx48epXLkyZrMZb29vIiMjqVmzJv7+/sCV1jZXV1fCw8OpXr36Nd2W69atY//+/SiKQrt27ahUqRIuLi506tSJvLw8iouLCQkJ4ciRI7YkqlGjRvTq1cvWkmaVmZnJ22+/TevWrencuTOKolC7dm1Onz7NoUOHyMnJoV27duj1evz9/bn33ntt46pq1KjB7t27adu2LXBlF4F77rnH9jktFgubNm0iPDyce++9l4yMDOLi4mxlp6Sk8NJLL9mO/eijj6KqKi1btuTy5csUFxdTs2ZNDhw4QJ8+fYiNjcVoNNq6cc1mMxqNhtzcXIxGI1999RV+fn5MmTIFd3d36tevz/fff09sbCyLFi1i5MiRHD9+nAceeICgoCBCQ0MpKiqiRo0abNq0ifz8fNzd3QGoV68effr0wdPT09bC+ccffzB37lz8/Pw4ePCgrYXTaDSi1+v59NNPiYyMvKNxgP+WdbumnJwcOnToUCZlClHeabVaOnTowLp165g9ezZTp0616/h2p8pyrJWfrKdUcorMsP8M1AwD/ys9aGTkwrk0qB0O7uW0B9nX15dOnTpRs2ZNdu3aBYCXlxcDBw5k27ZtfPvttwAMGzYMgPz8fFasWGFrpWjSpMktW0oKCwvR6/W2GwmDwYDJZEKj0dC/f382bdrEihUruHz5MiNHjqRSpUrcfffdqKrKr7/+SlpaGl26dKFFixbAlQUTn3vuOVtL0t69e2nevDmzZ88mPz8fVVU5deoUlSpVAiAnJ4dKlSrZuh+vHpOUlZVFhw4dOHHiBElJSURFRVGjRg3Wrl2Lv78/p06dIjIyksDAQFxdXZk5cyYXL15Eo9Fw9uxZateubTuWoigEBQXZjm9tWXJxcUGn06HT6a7pHggLC+Ott96iSpUqrFixgtmzZ/P666/z448/cuTIEbRaLUlJSbb3WI+l1+tRFOWaGzOj0Uh0dDSpqans37+fdu3aUbNmTRISEkhOTuby5cscPXoURVEIDw9n8+bNbNmyBYCMjAzy8vKuSaj8/f3R6XS2+sVoNALg5uaGRqOxrVHm4uKCm5sbBQUFPPvssxw4cMDWBVsWCgoK2LJlC40bNyYgIEDqQye1+xRUCoCIP+/BjEVwJBEqB0KgLEhgF15eXgwfPpwnn3yS3377jY4dO9otFqfq1hQl7/RFmLUBLFfd9JuK4evtcCIFHGTiSqkYMmQIzZo1s/2/oKCAxMREunTpwqBBgzh27BhJSUn4+vpSt25dOnbsyIQJExg1ahQxMTG37E4KCgoiIyODxMRELly4QHJyMiEhIaiqytGjR7nrrrt46KGHSE5O5uTJkwAcPXqU8PBwhg0bhp+fn60b7WparZbi4mIKCwu5fPkyR44cYcSIETz11FPXtLQ1a9aMXbt2kZCQwOXLl9m/f7/tWFFRUTzwwAPUqlWLH3/8kby8PEJDQzl79iyenp7Uq1eP7du3U7lyZUwmE+vWrWPgwIGMHj36bxc61Wq1hIeHk5CQQGpqKqdPnyY+Pv661ymKgsFgICcnB6PRyJo1a+jUqRNjx469ZmB9ZGQk6enp/PHHH+Tl5REbG2vr1vT29qZTp048/vjjLF26lBMnTuDj40NAQABbtmyhffv2bNu2jTp16qCqKrt27aJatWq88MILdOrU6bqxq39NcgICAvD29ubAgQNcuHCBw4cPo6qqrVt827ZtZGRkcPDgQapVq/a3Lfol0bKmqiqpqans27ePDh062Fr9hHO5lA3v/QxX399ZVNgaB+v+ALPjrOhQ4TRu3JgBAwYwbdo0Ll26ZLc4nKrlTJQsVYWlO6FpFQjw/P/Hg30gOhA2HIUGkaAtRzfm1taXqy/EOp0OrVaLTqcjOzubN998k7y8PFq0aEGNGjXQ6XQ899xzfPXVVyxbtozg4GBbl6ZWq+Xw4cM88MADtmO+8MIL1KlTh6ZNmzJt2jQURaFDhw40bNgQRVFwcXFhxowZXLp0iWrVqtkGuAcEBLBo0SKOHz9OYGAgo0aNQqPRoNVq2bhxIwcOHLCNcWratCleXl7Ur1+f119/nbCwMEJDQ20tZW3btiU5OZm33noLvV7Pgw8+CFxpQTMYDLi6utKjRw9mzJjBrl27aNiwIUFBQURGRlKpUiV27txJdHQ0Li4u9OzZkw8++AB/f38CAgKuSUr1ev01s1QVRaFx48YcP36cCRMmEBwcTEREhC3ZSk9PZ/z48Xh6ehIUFMTDDz+Mt7c3ffv2ZdGiRXz77beEhYXZko7KlSszePBgvvvuO7744gvatWtHv379rim7UaNGdOzYkWXLlvHMM88QExPDkSNH6NSpExs3bqRbt27odDo6dOjAN998w5YtW6hcuTI+Pj62uK3DJa7+Xlhnk3711Ve2bljrawYMGMAnn3zCs88+S3h4OKNGjfrbsV8l0cKlqirr16/Hw8ODpk2bSquZE1JVWHUIqoVC5FXbRLvqoWEUrDkMnRtI65m9aLVaHnvsMdasWcP//ve/Et0H999Q7D3o7e80a9ZM3bdvHwBLlizhiy++YN26dXaOqnRkZWXxwgsv0E47l4falX55xiK4+y1Y+MSVLsyr/XYC3v4RfngeDCWYwv8eDw/M1LAnLss2E7GkvPXWWxw9epRXXnnlpq+xjiu7euV763isG/3f+sf6Puv5otFobGOs/noOXd3Fd3VX/NUX0n9axl+P89dj/bUM6/tuFu/Vn9/6fut7r37cYrHc9HNcHZvFYrnus/217Bv9rK/+WVnL/uvv4eqfy19/XtZy/lqu9fXW11o/xz8p46/fC+tx/vqev/58r/4d/p2ZM2cSGBjIpEmTbmvWeW5uLvfddx/Dhg1j+PDhkpyVkCNHjvDfwQ2YNwqig0q3rGIzjJgNQ9tC1wZw9a/wTCq88T2M73WlTi7JX+9Li8HzrjeZMGGCjNv+G6qqsnHjRqZMmcK0adNo3LhxqZxrtWvXzo6Li/O50XPyG6rAEjMgtxCq3GArysgASL4M+aaSTc7s7UYX0b/eFd1ovbKbXXxvdUd1q5P535Rxq+Pc7LlbHevqx2/277+2ht3MjT7/rRKVm60FpyjKv37ur2VcnXDdqLy/K+Nmx/s3v6vStnbtWgoLC+nataskZk4qPRdSs6FGyPXP+XlAUTFcziv7uMT/UxSFZs2aUadOHVasWEGtWrXKfAiBjDmrwPJNoNGA4QbXKxfdleb3wrLZ5ceplWbrs6O3bDsjZ/2Z5uTk8MUXX/DAAw8QGhpq73DEbTIVg1m9ctP71/xapwGtBgqk3rU7b29v+vXrx7Zt2zh58mSZ1xuSnFVg/p5gsUBO4fXPZReATgvespfy3yrNFgxpHSl5zvgzVVWVNWvWkJGRwdChQ53yM4grPF2v3BBnF1w/4aqwCIos4CPzPOxOURRat25NkyZN+PTTTykuLi7T8iU5q8DCfa9M2z50/WQ6jqdAvQjQl6MuTUfirK03jqai/BwvX77MypUrGTx48DUTGYTz8XWHWmGw/+z1z13MAi+XKzfOkn/bn8Fg4NFHH2Xfvn22ZXjKiiRnFZiiwMAWsPr3a5fSKDLD3jNXZgyVp5majqSitnyUdDJVEX6OFouFvXv3cunSJbp16yb7aDo5RYEejWH9kWtbziwqnLx4Ze2zIJmp6RAURaFOnTo88MADfPrpp1y+fLnMypZ2kQquW2P4bdmV9c5q/DmM5WzqlbFmrWs6392bddshIRxRUlISgYGB/+o9eXl5fP/997Rq1YrKlStXiIS0vGtTCxbvhH1n4a5qVx7LLYTD56FNjStdn8IxKIrC8OHDWblyJWvXrmXgwIFlcg5KclbBhXjDx8OunZFZLQRe73/jAauOzNfXl5SUFN5//317h1LuFBcXk5aWho+Pzy03fBe3VlxcTIcOHf5x5a6qKr///juxsbE888wzdt1ORpQcNwN8+jDor5qM5eUKL/W68pgz1bsVQWBgIA899BA//PAD7dq1IywsrNQTNEnOKjhFuVJRXE3754whZ/Pss8/y7LPP2juMciktLY1nnnmGPn36MHDgQOlaKyMFBQV88MEHdO3alXr16kmrWTny13pXUa4sRCscj1arpXPnzvzyyy9s27aN/v3733RZnpIiNawQ4m/5+/vTpEkT1q1bd8s9RUXJWrVqFcnJyQwfPlwSYiHsKCIigk6dOvHdd99dty9vaZCzXQjxtzQaDW3atGH//v1kZWXZO5xyT1VVkpOTmTt3Lk888QQRERH2DkmICk2v19OrVy/S0tLKZOamJGdCiH+kefPmBAYGsnr1anuHUu4VFRWxZMkSPDw8uP/++6U7UwgHEBUVRb9+/fjoo4/Iz88v1bJkzJkDupAFJy/YO4qSpapX/pxPv7JCtnA+Li4udO7cmZ9//pnBgwfL/nylRFVVTpw4wcaNG3n88cdLfA9acWP5Jjh36cpSQuXR5TyQb9KdURSFQYMG8b///Y+VK1cyaNCgUitLalcHNGcj/LDX3lGULJUryVlBEVzItHc04nb16tWLr7/+mnPnzlG9enV7h1MuFRUV8c033xAZGUmbNm1krFkZOZYIY/93Zeu68ig+HZ7oaO8onF9gYCDDhg3jm2++oVOnTgQEBJRKOeX0a+icvLy8mDp1KrmvvFJmZXbt2pVXXnmFdu3alWo5mZmZTJo0ifj4eL768FVZEsBJVa9enaioKLZv3061atWku62EqarK7t27Wb9+PTNmzMDPz8/eIVUINWvW5HDcDbZKuQOFhYXcf//9jB49mi5dutzWMY4ePcrLL7/MCy+8QJs2be44Jh8fn1KfZVjeKYpC3759+f7779m6dSt9+vQplXpQkjMHotFo8Pf3x9/fv0zKs1gsAERGRhIVFVWqZUVGRjJ//nw+++wzpk6dSl5eHj169CizzypKhk6no3Pnzmzfvp2BAwfi7i6bAJakixcv8tprrzFkyBBatGghyW8ZMRgMJV4HLl++HE9PT/r374+vr+9tHSMiIoJjx47x1Vdf0a1bN0nWHYS/vz/du3dnzZo1dOjQoVR+L9JeXoEZjUZUVS2TVixFUQgICGDcuHGMHz+er776irfffpszZ85UmP0RywNFUWjXrh3JycmcO3fO3uGUK3l5eXz22Wf4+/vzyCOPSGLmxAoKCli2bBl9+/a97cQMrqyvNWzYMFRVZf78+bKMjYPQ6/V07NiR+Ph4jh8/XirXMEnOKjCTyQRQpl2Mbm5u9OnTh/fee4/Lly/z/PPPExsbKwmak1AUhfDwcDw9PUutUqqILBYLW7ZsYfPmzbz00kt4ecnmis5KVVUOHTrE6dOnGTBgwB0fLygoiAkTJvDtt99y4MABOeccgKIo1KxZk1q1arFy5UqKi0t+lpskZxWY0WgEyjY5gyvdt40bN+a9996jQYMGPPjgg6xfv57CwsIyjUPcHn9/fxo3bszu3bspKCiwdzhOT1VVzpw5w8yZM3nwwQdp3LixvUMSd6C4uJh169bRokULwsPDS+SYLVq0oEOHDnz55ZdkZmZKguYA3N3d6dWrF2vWrOHSpUslfnxJziowk8mEqqq4upb9LruKohAYGMhrr73Gk08+yZtvvsns2bNJT0+XisfBubi40KRJE44dO0ZWVpb8vu5QXl4eM2bMoFKlSvTr1w+9Xi9dmk4sPj6e/fv306tXL/T6ktmPycXFhUceeYT4+Hg2bNhgGy8s7EdRFNq0aUPlypVZtGhRiR9fkrMKzF4tZ1fT6/U88sgjTJw4kY0bN/LWW2+RlpYmF3wHV7duXYxGIydOnLB3KE7NbDYzf/58Dh8+zHPPPUdgYKC9QxJ3wGKxcOTIEQwGA3Xr1i2xZVAURaFKlSr06tWLL7/8slRaasS/5+rqypAhQ1ixYgVpaWklemxJziqwspwQcCvWTWUnT55MUlISTz/9NKmpqZKgObCIiAhq164tuwXcAVVVWbVqFQsWLGDy5MnUqVPH3iGJO1RYWMjq1atp1qwZYWFhJXpsg8FA//79cXFxYd68eVI/OoiuXbui1+tZt25dif5OJDmrwKwtZ/bo1vwrnU5H/fr1mT17Nh4eHjz66KP8/vvvpTLQUtw565Ia69atk7GCt8FisbBnzx5mzJjB6NGjadWqlSw2Ww5cuHCB33//nQ4dOpTKDhoBAQG89NJLLFu2jJ07d0qC5gDc3d0ZMGAAP/30E9nZ2SV2XKkNKjDrbE2DwWDnSK5QFAV/f38++ugj6tWrx2uvvcamTZtk+riDatWqFQC//fabnSNxLqqqcv78eT766CNatmxJ3759ZWHQcuL7778nOjqaBg0alMq4QUVRuOuuuxg4cCAff/wxqampJV6G+Pe6d+9Oeno6hw8fLrFjSnJWgRmNRrRarcPtkejt7c348eNp06YNb731FqtWrbJ3SOIGAgICaN26Nb/88ou9Q3Equbm5vPnmm7i7u/Pkk0/i7e1t75BECSgoKOCnn36iZ8+epb448/Dhw8nLy+Pnn3+mqKioVMsSt6YoCsHBwdSuXZvdu3fbGj3ulCRnFZjJZLL7eLMbURQFX19fnnnmGUaOHMmkSZNYtmwZxcXF0ozvYPr27cvu3bu5ePGivUNxeKqqkpOTw6uvvkpSUhJvvvlmiY9LEvazdetWsrKy6NGjR6mWoygKYWFhPPzww3z33XecO3dO6kU78/T0pF27duzZs6fEVhyQ5KwCKywsdIjxZjeiKAru7u489NBDvPrqq8yYMYMlS5bYJjEIx9C8eXMMBgO7d++2dygOTVVVsrKyeP/99zly5AizZ88mPDxclswoJ4qKivj+++/p1asXPj4+pV6eRqOhU6dOREdHM3fu3BJrrRG3R6PR0KRJE7Kysjh+/HjJHLNEjiKckslkcpjxZjejKAo9e/bkqaeeYsGCBSxdulSSMwdiMBho3rw5+/btkwvELZhMJubMmcP+/ft56623iIqKksSsHDl16hRxcXH07du3TMqz9i6MGDGCLVu2sHPnzjIpV9xc5cqVadKkCT/88IO0nIk746jdmn9lMBjo06cPTzzxBJ999hnffPMNqqpKkuYAdDodHTp0IDY2VgYn34CqqlgsFj777DNWrlzJmDFjaNq0qczMLEdUVWXPnj2EhIRQrVq1Mi07JiaG/v378+6778oC3nam1+vp1KmTrXv7TkkNUYEZjUanSM7g//fkfPXVV/nggw/4+uuvZSCsA1AUhWrVqlFUVCRjX/5CVVUKCwuZMWMGX3/9Ne+99x7t27cvsVXjhWPIyMhg69at3HPPPXh6epZpi6hOp+Opp54CYN68eTKz3c5at26Nh4cHa9euveNjSXJWgZlMJocdc3YjOp2O7t278+qrr7Jw4UKZqeQArBuhR0VFsXfvXlmX7k/Wwf+ff/45P/30Ex999BEtWrSQJTPKGeuyKBcuXKB58+Z2mfnu7u7OSy+9xLp169i7d6/cINmRdb/NH3744Y6vTZKcVWBGo9Hhx5z9lUajoXv37gwePJhZs2bJQowOwNPTk6ZNm7Jr1y5ZkPZPRUVFzJw5k7Vr1zJ+/HhatWolY8zKIYvFwm+//UZoaGiZd2laKYpC06ZNad68OYsXLyYnJ8cucYgrevXqxYkTJzh16tQdHUeSswrMaDQ6VcuZlaurKw888ABdunRh/PjxxMbGSoJmR4qi0KRJE+Lj47lw4YK9w7Era1fmW2+9xa+//sqkSZPo2LGjw60lKEpGcXExa9eupVWrVmUyS/NmPD09GTRoEEePHmX79u1SH9pR1apViYqKYseOHXf0e5DkrAJzxpYzKw8PD/773//Svn17Ro8eTVxcHBaLxd5hVVh16tQhOjqalStX2jsUu1FVlQsXLjB+/Hi2bNnCvHnzaNWqlYwxK8dOnjzJ+fPnufvuu+3aMqooCg0bNmTQoEFMmTKF9PR0u8VS0en1ejp27MiePXvIz8+/7eNIclaBOdOEgBvR6/W88sor1K5dmylTppCYmCh3jHai1+vp3Lkzv/76a4VcUkNVVc6ePcvrr79OYmIin3/+OTVq1JBZmeXc8uXLadCggd26NK+m0WgYPHgwwcHBfPzxx7a9k0XZ0mg0tGrViuTkZBISEm7/OHcaiKIoWkVRDiqKsvLP//srirJOUZSTf/7td9VrJyiKckpRlOOKonS907LFnXGWpTRuxd3dnXHjxqEoCh9++KFUSHbUpUsXEhMTiY2NtXcoZe7o0aO88MILmM1mpkyZQp06dWSMWTmXnZ3N+vXr6dOnj8NM9PD29mbChAmsXbtW9ry1E0VRCA0NxdXVlbNnz952g0FJ3NaNBq6ujccDG1RVrQFs+PP/KIpSFxgM1AO6AZ8piuIY3+gKytlma96IoihERkYyZswY9u/fz8KFC6V7004iIiJo0KAB69atqxAtmKqqUlxczLp16xg1ahTR0dG8++67VK9eXVrMKoBdu3aRn59Pp06d7B3KNRo1akTv3r358ssvSU1NrRDnoqMJCgqiZs2a7N+//7Z7Eu6oBlEUpRLQA5h31cO9gYV//nsh0Oeqx5eoqmpUVfUscAq4607KF3fGmcecXU2j0dCwYUNef/11Pv/8c9atWyfr/diBoij07t2brVu3VogZYzk5OXzxxRe89tprDBs2jKlTp+Lv7y8tZhWANSnv2LEjnp6e9g7nGjqdjiFDhpCZmcmqVaukLrQDV1dXGjduzKFDh2573Nmd3t59BLwIXN1UEaKqagr8H3tnHV9V/f/x57nr7k42xthg9OiO0SAhCqhICCrdIigGgkoKCBIKgoG0tIR0bDQjNtjGurvj3nt+f/C998dkwIA15/l4+GCefJ9zP/E6n8/7837D//61/t92B+DxCdjo/217AkEQxgqCcEUQhCtJSUmvaKLE06juPmePIwgCnTp1YuzYsaxYsYK7d+9KX4yVQNu2bcnOzubOnTuVbUq5EhcXx/fff8/OnTv55JNPGDVqFFpaWpIwe02IiYnh7t279OzZs8r95qrZhLfffps///yTqKgoqS2sYARBoE6dOiQlJZGamvpS7/+lxZkgCH2ARFEUr5b2lBK2lWixKIrrRVFsJopiMysrq5c1UeI51CRxpmLYsGF4eHjw008/kZaWVtnmvFao8v25uLhw8+bNGjm9LIoid+7cYdq0ady/f58lS5bQq1evGlePJJ7NvXv30NTUpG7dupVtSoloaGjQq1cvbG1tWbVqVWWb81ri7u6OkZER165de6nzX2XkrA3QTxCEcGAb0FkQhN+ABEEQ7AD+968q4V404PTY+Y5A7CvcX+IVEEWxxokzQRAwMTFh3LhxhIWFcfDgQWlIv4IxMDCgbdu2XLlyhfT09Mo2p0wpKCjg2LFjfPjhh9jb27N69WoaNGggxTB7zcjLy+PSpUv4+PhU6WlsExMTZs+ezYkTJ14bP9CqhKGhIY0aNeLUqVMvdf5LizNRFOeIougoiqIrjxz9/xVF8R1gHzDif4eNAP7+39/7gLcFQdARBKEW4AEEvOz9JV4NpVKJXC6v9gsC/osgCHh7ezNmzBhWrlz5Wq4crEw0NDTw8fEhISGB+Pj4GtEhiKJIXFwcq1ev5vPPP2fYsGF88cUXWFtbV9mOWaL8SE9P5+bNm7Ro0QI9Pb3KNuepCIKAl5cXY8aMYcWKFcTExFS2Sa8VgiDQunVr/P39XyqKQHksKfoW6CYIwgOg2//+H1EU7wDbgbvAEWC8KIrSsEYlIZfLUSgUNU6cwaNK0atXLzp27MjChQtr3AhOVcfV1RVDQ0MCAwOrvThTKBRcvXqVOXPmcPbsWRYtWsSYMWMwMjKqbNMkKgFVLs28vDw8PT2rhTgfOnQohoaG7NixQ0qvVsE0a9aMnJwcgoODX/jcMhFnoiieEkWxz//+ThFFsYsoih7/+zf1seO+EUXRXRRFT1EUD5fFvSVeDrlcXiNHzlTo6ekxffp0YmJi2LFjR7UXCdUJCwsLGjRowMmTJ6vttLIoiiiVSvbu3cvUqVNxcnJi6dKldOzYUYr4/xojiiL//vsvzs7OuLi4VLY5z0UQBMzNzXn//fc5cuQIDx48kNrCCsTS0hIXFxeuX7/+wudKwXheU4qKilAoFFV6WP5VsbGxYebMmWzfvr1GjOJUFzQ0NGjbti2XL18mMzOzss15YeRyOZGRkcyePZtFixYxefJk5s2bh5ubW7UYKZEoP+RyOcePH6dDhw7VJgyRTCajffv2+Pj4sGbNGvLy8irbpNcGmUyGr68vly9ffuH+RxJnrymqac2atCDgvwiCQJs2bfDw8GDXrl1So1SBNG7cGFNTU06cOAFATk4Ot2/f5sGDB5Vs2dMRRZGcnBz27t3LxIkTiYuLY/PmzQwePBgdHR1JmEkQFBREQkICbdq0qVblwdDQkBEjRnDz5k2OHTum3i6KIrm5uWRnZ1eidTWb5s2bExwc/MKxH6VlRq8JCoWCAwcOcP36dTQ1NZHL5cTFxXHkyBHCw8PR0tJCX1+fXr16VZsvwtJgamrKkCFD+Oqrr+jfvz+NGzeuVo1qdcXQ0JAOHTqwdetWUlJS8Pf3Jzg4mCFDhjB16tTKNq9EIiIiWLVqFYGBgQwYMIDBgwdjaWlZ2WZJVCGOHDmCt7c3Tk5Ozz+4ilGvXj1Gjx7NsmXLaNy4Mc7Ozty4cYNff/2VHj160KNHj8o2sUbSoEEDcnJyiIiIwMfHp9TnSeLsNUEmkxESEsKqVavIzc1FEASKiooIDw9Xjwq0atWKbt261ShxJggCLVu2pEGDBqxZs4Z169ZVmTx4NQ1VOqOkpCROnjzJiRMnCAgI4Ny5c2RkZKCtrc2AAQMq3CbVvyWlVFKFlPn333/59ttvcXR0ZMWKFbi7u9foUWWJF6eoqIhTp04xaNCgatmGqBKjHzp0iGXLluHg4MCGDRuIjo7G3Nyc7t27Sx+u5YCpqSnm5uZERkZK4kziSQRBoH379qxatYrUVPUaDbKzs8nOzkYmk9G9e/ca2SHp6Ojw0UcfMWTIEM6dO0eHDh0q26Qay6JFi1i8eDH5+fkoFApEUVSvEJPJZJUi/ENDQ4mKiqJDhw7FBFpRURGhoaGsW7cOf39/xowZw7Bhw6QpTIkSuXv3LnFxcXTr1q2yTXlpRFGkb9++fPjhhyiVSoqKigAICwsjKysLY2PjSraw5qGpqYmdnR3R0dGIoljqtkUSZ68RPj4+ODs7ExER8cQ+Nzc3WrVqVS2/CJ+HIAh4enoycOBA1q1bR9OmTatcPryaQteuXdmyZQuhoaFP7BMEocLFWWhoKOPHj0ehUODm5oaLiwuiKJKYmMjevXvZsWMHHh4erFq1ikaNGtXI8i9RNty4cQNnZ2esra2ff3AVQy6Xc+fOHf766y82btxIYWFhMQf1qKgoEhMTJXFWDmhqauLo6EhMTAxyubzUq70lcfYaoauri5+fHxcvXkQulxfb165dO2rVqlWjRwzef/99hg4dyoULF/Dz86tsc2ocgiDg6+vLrFmz+PTTT0lJSXnimIoUZzExMXz++eecOHECfX19Dhw4wAcffIC/vz8rV64kNzeXjz76iM6dO2NmZlZhdqkoKCggNDS0xqcZ09bWxtfXt7LNeCUKCgq4efMmzZs3r3YZIURR5P79+8ycOZPTp09TWFj4xDEPHz4kISGB2rVrV4KFNRstLS08PDz4999/yc7OLnVbU71KmcQrM3DgQL7++uti2ywsLOjQoQOmpqaVY1QFYW9vj5+fHwcPHqRNmzYYGBhUtkk1Dk1NTd566y1u377N+vXri0XGrqiRM1EUSU5O5ptvvmH37t0oFAqys7PZvHkz165d4/LlywwfPpx33nkHW1vbEn3RKoKkpCTmz5/P1Ysn0KyhA3ZFCtA3tuXO3buVbcorER8fT1hYGP369auWo6tubm4MGzaMBw8eEB4e/sT+qKgoYmNjX2jaTaJ0CIKAvb09WVlZZGZmSuJMomRq165Nw4YNuXz5snqbu7s77du3r/GVUlNTkz59+vD5558TEhJCgwYNavwzVzSCIGBsbMzs2bN58OAB//zzTzGn/IoQZ9nZ2Sxbtoyff/5ZPUogiiJXr15FoVDw66+/0qhRI7W9lYVSqSQrK4sdH6XRxLXSzChXzgbB2D+qd6BrURSJiYlRd7LVDUEQ0NHRYcSIEdSpU4dp06Zx9erVYrMncrmc27dv07dv3xobmLyyUOV8ViqVLxSyRIpz9pqhoaHBgAED1J2StrY2rVu3xtnZuZItK38EQcDV1RU7Ozv8/f2rbfT6qo4gCDg4OPDVV18VC9xaEeKssLCQn376iVWrVpU4fRMfH4++vj6CIFQpYS4INfM/qs4rfmmUSiUPHz7EzMysSic6fxaq8t6qVSs2btzIwIED0dfXL7b/1q1bL5UDUuL5GBgYqGPKlRZJnL1myGQyOnbsqK6YOjo6DB06tFoO1b8M5ubmtGrViuPHj0tBacuZpk2b8uWXXxbrBMpLnImiiEKhYMuWLSxbtoycnJwSj0lISGDx4sWSMJcoNYWFhVy/fh0PD49q7/ohCALe3t4sWbKEcePGqRdGiaLItWvXpNyb5YTqPWdlZZU6U4Akzl4zBEHAycmJli1bIpPJaNiwIU2bNq1ssyoMVSqTqKgoQkJCKtucGo0gCPTv358pU6ago6ODUqksN3Eml8vZtWsXCxYsICEhocRjZDIZenp6XLp0qdi0voTEs8jPz+fBgwd4e3vXiI9YmUyGo6MjixYtYsWKFVhaWiIIAlFRUepwDxJli6GhIaampsTGxqJUKkt1juRz9hxEUSQ7O7tYbLDqTl5eHh4eHpw8eZJ+/foRHR1d2SaVGYaGhlhYWDzzGHd3d+rVq8f+/ftp3LhxBVlWtoiiSEpKSokjRFUNPz8/Lly4wKVLl0hNTS0xlMuroPInmz9/PpGRkeqAswYGBpiammJoaIihoSGWlpa4u7vj5eWFKIovbYeenh7m5ubVbtWexIsjiiJJSUmkpqZSp06d505pRkdHV6tR2c6dO/PFF1+wbNkywsPDOXbs2GuTFcPBwaHC6rCBgQEWFhZERkaiVCpLJfKl1qUUnDp1ilWrVlW2GWVKVFQUAHv27CmWa62606NHD6ZNm/bMYzQ0NOjcuTM//fQTs2fPrraBdzdu3Mi///5b2WaUiujoaIqKili6dGmZh61QhQqIjIwstl0mkyGTydDS0kJXV5eioiKCg4MJDg5m7969L32/li1bMmnSpNemE3vduXjxIpaWlri4uDz32BkzZlS7D3mFQqEWnevWras2bcqrsmXLFmxtbSvkXpqampiampKWllbqkUlJnJWCqKgoCgsLmThxYmWbUmYkJiYSHx+Pl5dXqYPiVXV27txJcHBwqY7t2rUrCxYs4NatW9U2BlNwcDA2Nja88cYblW1KqYiMjFRPq5clSqWywsJh3L9/nwsXLkiO068RZ86coXHjxujp6T332MuXLzNy5Ei8vLwqwLKyRRRFjh07Rrdu3arloofSEh8fz6pVqyrc59jIyIioqChpWrOsMTY2xtPTs7LNKDNq166NKIpoaGjUmIpoaWlZ4gq9krCysqJp06YcO3as2oozQRCwtLSsNuWyTp06T81x+SqovkQrohzn5+dz4cKFcr+PRNUgLy+PK1euMHz48FKVL5lMhpOTU7Wpk48jiqJ66ram9AkloaenVyqhXdYYGxtLCwIkno+Ghgaampo1uhI+j7Zt23Ljxg1pFKSCEAShXEa4anpnIlF53L59G7lcro6LV5NR1U+pLpUPRkZGZGdnS+JM4hGiKDJv3jxmzJjBmTNnSj2kWlW4cOEChw4dKpdrt2rViri4OGJjY8vl+tWV3NxcZs6cyYwZM7h9+3a5rd46ceJEhfo73r59m127dlUrh22JyuXq1avUrl27UtJ7VTRbtmxhxowZrFu3Tp0QvboQFhbGrl27XiiOWEVjYGBATk6OJM4qA1WspaysrCpVuD/++GPc3d2JiIgodcFQKBQvVJBEUSQ/P5+srCyys7ORy+Vl0qlHRUVx//79V75OSdjZ2WFgYEBYWFi5XL8qoEpdVFBQgCiKFBUVkZWV9URu1cfR1dVl4sSJGBgYkJiYWGxfTk4OmZmZZGZmkpOTg0KheOnfOSwsrELDmSQmJnL37l1EUUQURQoLC4uVVwmJxxFFkStXrtCiRYsyH00SRZGsrKwqFWuxb9++DB48mMuXL5f6I14URfLy8kpdf1RtUHZ2NllZWU8kYH9Z0tLSuHv3bpXqd/+Ljo6Ouh0uDZI4K2MCAwPp06cP+/btqxKjVIIgYG1tjbGx8QudFxkZyYQJE0pd6WJjY/n++++ZPXs2X331FefPn38ZcysMQRDQ1dXF2dmZ0NDQGhvbJygoiNGjR7N27VoADh8+TN++fZ8Z50smk2FjY1Ni7tGPPvqI0aNHq5ObX7x4sdxsL08yMjLYsGEDM2bMYPbs2Rw4cECa3pYoRlJSEiEhIbRu3brMr52amso777zD/Pnzq4ygMDMzw8rK6oXOyczMZPny5dy4caNUx2dkZLB161Y++eQT5s2bx+7du6tFOKCyQEtL64UGLaQFAWXMhQsX8PDwIDw8nOzsbLUoSklJ4cKFC6SkpODl5UWjRo3Q0dEhNzeX69ev8+DBA6ysrGjbti0mJiYcP34cd3d3atWqxf3794mLi6NZs2ZcvnyZ+Ph4tLW1MTY2Jjc3ly5dulBQUMCtW7do0qQJenp6XL58GVtbW9zc3J5qa15eHpcuXeLhw4dYWlrSrl07jI2NCQwM5NChQ8TGxrJu3Tqsra3p1KkTlpaWpKamcuHCBZKTk3Fzc1M3XBs2bMDY2FidVD0mJkZ9nytXrpCenk5KSgppaWn4+vrSsGFDIiMjuXz5Mnl5eTRu3Fid6zI1NZUTJ05QWFhIfHx8ua3E09bWxsXFhejoaAoLC6ttSI3nYWNjw71790hOTiYxMRFtbW110MnIyEiaNm2KQqHg2rVreHh4PHN5uUwmY/z48bRp04a9e/fy119/0aZNGwoKCrh58yZ37tzB0NCQ1q1bY29vjyAIZGdnc/XqVcLCwrC3t6dVq1bFPhaUSqVa5NnY2JCamkrjxo3Vq4jDw8MJCwujffv23L9/n2vXrgHg6+urdmDOyMjg0qVLyOVy4uLiMDIyolu3bujq6nLixAnS0tLIyspCqVQiiiI3btwgLCyMuXPnEhcXx/bt26lfvz61a9cux19CojoRFhaGUqksF+f+y5cv4+bmRk5ODpGRkbi7uwOPXAquXbtGSEgI1tbWtGnTRp2X8d69e9y4cUMdSNvBwYHAwEAKCwtp3LgxKSkp3L59G19fX8LCwrhz5w6CIODi4kJERAStWrXC2dmZf/75h/r16+Po6EhgYCA5OTm0bNnyqbYWFRVx9+5dbt68iaamJi1btqRWrVokJSXx+++/ExgYSF5eHjdu3KBdu3Z4enpSVFTEuXPniIiIwNzcnI4dO2JoaMjJkye5d+8e06dPx8rKSh3SCR7NkgQFBZGbm0t8fDxOTk506NCBwsJCzp49S3JyMs7OznTs2BFNTU3y8vI4f/48UVFRyOXyKj/6ramp+UJCXBo5K0Pkcjn+/v707NmT5ORk0tPTEUWRzMxMNmzYQEREBC4uLsTFxZGamopcLufEiRP8888/2NnZAain8I4fP87Dhw8BePDgAadPnyY3N5djx44hiiIHDhwgODiYM2fOEBUVRWpqKqdOnVJPWZ0/f/6503XXr18nKSkJd3d37t27x8GDB1EoFFhbW+Pl5YWhoSGNGjWibt266OvrU1RUxK+//srDhw9xdnbm+PHjnD17lsTERAIDA3njjTcwNzfH3NwcHx8f9X0uX77M2rVr0dLSwsfHBxsbG3Jzc7l8+bL6a23Lli2EhIQgiiK//PILDx8+xNTUlFu3bpWbj5CWlhbu7u7ExsbW6K83PT096tSpwz///IOGhob66zgqKooLFy6Qn59PTk4OZ8+eJT4+vlTXVCgUZGZmqlMzhYaGEhYWhru7O6mpqezevZvMzEwKCws5fPgwp06dwtHRkaKiomLlUqlUcuLECY4ePYq1tTXJycmcO3eO3Nxc9fSjv78/wcHBJCUlERgYiL29Pdra2vzxxx/qj4DMzEy2bt3K9evXcXd3p27duujq6rJr1y4uX76MjY0NISEhZGdno1QqiY6OxtnZGWtra1xdXdHU1CQlJaWM37xEdUUURR4+fIi9vX2JI8ivgkKhICAggNatW+Po6KieapfL5Rw/fpyjR49iZ2eHKIo8ePAAeCQUf/zxR3R1dbGwsODq1avAo5maa9euoVQqSUpKUvcBN27cICgoiJs3b3LkyBEyMzM5c+YMcrmcI0eOqOtNYGAgly5deqa9sbGxBAUF4ezsjCiKbNu2jcTERPT19fHx8cHKygoPDw8aNGiAubk5oiiyb98+jh8/jqOjIyEhIezYsYOcnBxu375N8+bNcXZ2xsDAgLp166rzTkZFRbFu3TpiY2OpV68ebm5uaGlpcfbsWURRxMXFhcOHD3PhwgVEUeT48eOcOHECBwcHoqOjSU5OLtPfqayRRs4qkRs3bqBQKOjcuTNXr17lwYMHODk5ERwcTGZmJqNGjcLS0hKFQoGGhgZZWVlcvnyZzp070759e/VX/bMwMjKicePGPHz4kLp165Kfn09KSgrW1tYvbG+jRo1wc3NT+z9cuHABhUKBvb09jRo14u+//6ZFixbqEYz4+HguXbrE3LlzsbKyIicnh+PHj+Pg4EBRURHW1tYEBQWxatUqQkJCOHTokDoCs6+vL/369VOH7lAoFHTt2pWioiIKCwu5du0aQUFBWFtbc+7cOdauXYu1tTWhoaHl9kWkmvLNzs4mOzsbc3PzcrlPVaBz584sWrSIwYMHv5Jzs1Kp5Ntvv0VfXx9NTU117D93d3d12dbU1OTQoUOkp6dTWFjIrVu36N27N82bNy821a8aMbt9+zYTJkygdu3aaGlpkZOTQ3p6Ot988w09e/YkODiYrl27YmlpSZcuXZDL5WRkZHDnzh0iIyNxdHQEHo269enTh4YNGwKPRiFOnTrF9OnTqVu3LpmZmWqxn5ubi4GBAT///DNWVlbIZLIqM70kUfkoFAoePHhA7dq1yzzlWHR0NNHR0YwYMQItLS3u3LlDly5dKCws5MqVK3Tt2pW2bdsWqyuHDh3Cx8eH/v37I5PJntsmamtrU7duXRQKBQUFBXh7e3P9+vVShxp6HDs7Ozp37oxCocDCwoLg4GDi4uJo1KgRvr6+BAQE4O3tTbNmzYBHMzK7d+9m7Nix1K5dGxMTE5YsWcIbb7xBTk4OFhYW5Ofns2TJEs6cOcOyZcto0KAB8KgdGTBgADY2Nmo/v44dO6r9nx8+fMjFixdp3rw5ly5dokePHrRt2xZNTc0qH0xdU1PzhfoySZyVEaIocurUKXR0dPD39ycvL4+TJ0/SsWNH8vPz0dHRQUdHRx21HB51ToWFhRgZGRXbDo+Eg6pyPv6DamhoqKOea2lpoaGhgVwuVxdk1WjD4+eoQg08LvyUSiX//vsv58+fR6lUkpiY+MQx/3WCzc7OJj4+ns2bN6vTT3h7e2NiYoKuri4JCQl4eXkxZ84cxo8fX+xatra2amEmiiJpaWmsX7+ejIwM5HI5kZGReHl5kZ+fj1KpxNjYGA0NDYyMjEhPT3/Vn6dEBEHAzMyMwsJCsrOzy+UeVQVbW1t69OhBo0aNOHv2LECx3/tpZea/fpMymYxPPvmENm3a4O/vz5o1a2jUqBF37tzh4MGDFBYWkpGRQU5ODkqlEoVCgVwux9DQ8Ikyrgoea2JiwrVr1/D09MTGxgaFQkFISAi5ubncvn2b9PR03N3diYqK4vfffyc7O5v8/HwSEhLo3Lmz+nqGhoYYGRmpy21hYSGFhYWYmJio0zlpamqioaGBgYEBSUlJfPjhh2RkZHD16tVyy/tZlcjKg7BEqG0LBv+bxY9Lg5Rs8HIADWkuBXjkYB4aGkqvXr3KtFyoslnk5ORw5coVEhMTuX//PgkJCZiYmFBYWFhiXcnIyMDDw0P9sauy6fE6qlQq1X8/HrZGW1sbmUym/viXyWTqev/fDxLVOY+3C6qRr9zcXHJzc0lJSXmmyMjPzyc/P5+dO3eqXUVq166NhoYGZmZmJCcno6ury5QpU8jNzS02M2Jubo6urq66DisUCv744w/1aHtkZCS1a9dGLpdTUFCAsbGxOmduVQ+m/t/+9XlIVbGMSE1NJTAwkKZNm5Kamoqbmxt3794lOzsbBwcH0tLSuHPnDjk5OQQHB5Oeno6Ojg42NjZcvXqVjIwMYmNj1YXQ3NycsLAw0tPTuXPnzjO/6kVRREtLi8LCQlJSUoiPjy+2Ck4mk2FsbExycjL5+fnFhtBbtGjB3Llz6dy5c7HCraurS0FBQbFUJNbW1tSvX5+3336br776ig8++IBGjRphaWlJkyZN2LVrFykpKWRmZj4xFflfoZeYmEhoaCgfffQRkydPVg+Hq6Y5/f39SU5O5vbt2+W6sMLY2Bi5XF6ll2CXBXp6eowaNYo6deqot+nq6pKenk5GRoY66bEKTU1NjI2NiYmJKbEhVomdrKws8vPzuXr1KjY2Nnz66acMHDhQHeRRX18fMzMzrl27RlZWFlFRUeqclir/lXHjxnH16lX8/f3R1dXFzs6OXbt20blzZ1JTU9HW1kZXV5ewsDCys7OZNm0aY8aMwdjY+InG7vFyZmRkhKOjI+fPnyc9PZ27d++Sm5urDhQaERFBXFwcoaGhKBSKGp+OSSlCQCjs8AfFY1UqMw8WH4S0mjuz/8KofBSdnJzK1Oe1oKCAoKAgnJycyM/PV/t/RkdHF+sPMjMziYmJUfcHjRs3xt/fn9jYWNLS0tQO+EZGRkRHR5OTk0NYWFippuaNjIyIiIggNTX1iVA5hoaGKBQKkpOT1R/6QUFBCILAzJkzee+99zA0NFSfo6mpiUwmIyUlRd1OGxkZUadOHTp37sz8+fOZNm0aXbt2xdjYmPr16xMQEEBERAR5eXnk5+c/09bCwkIOHTrEW2+9xezZs6lXrx6CIKCnp4eDgwOXL18mKytL3afWJKSRszIiODgYIyMj3n33XYyMjMjPzycwMJDr16/Ttm1bBg0axO7du9m0aROtWrVi0KBB6Onp0a9fP7Zv3860adNwcXHh3XffBaBnz54sXryY27dvY2xsrK7EqtEnVbBA1ReWpaUlHh4eLF26VO1Lo+qoBEHAx8eHS5cu8eGHH/LWW2/Rq1cv/Pz82LNnD4cPH8bW1lbtPwSPIui3bNmSmTNn4uDgwLhx43B2duadd95hz549rF+/Hjc3N4YMGYKmpiZjxoxh/fr1zJw5Ew0NDXr27KkeXfvvVyA8moJycnJi/vz52NvbY2lpiUwmQ1tbm7Fjx7Jp0yZ27dqFjo7Oc5PTiqL40kvdVaMpmZmZr3SdqkpJgV9VZahWrVpYWVnx2WefUatWLczNzdXPL5PJaNu2LRs3buTMmTOMGDGC9u3bIwgCy5Yt4+eff8bAwIDhw4djbm6Or68vv/32G1OnTsXJyUnt8G9oaEi/fv3YuXMnU6ZMoXbt2rzzzjvqe2hoaODq6so777zDTz/9hIuLC3Xq1OHEiRN06NCBmJgYlEolOjo6uLu7c+LECWbNmoWzszPm5ubFnu3xZMKiKKKpqcn777/PTz/9xPHjxzEyMlKPrDVp0oT79+/z5ZdfoqurS//+/cs8rVRVQ6GAwzehRW0w0v3/7bVtwcwAjt2Goa0qz76qgiiKZGRkoFQqy1ywZ2RkEBcXR//+/WnZsiUKhQKFQsGtW7do0aIFffv2Zfv27UydOhVXV1d1f9ChQwdiY2P58ssv0dHRYfjw4QA0bdqUc+fOMW3aNBwcHNTlW9U/qEbKHt/m5+fHunXr8Pf3R09Pr1i9MTY2plmzZsyePRtfX18+/vhjvLy8uHjxIjNmzMDZ2RkzMzN1O6Gnp0eTJk3YuXMn27dv5/3336ddu3Z8+OGHbNmyhcOHD2NiYkLfvn0RBIEOHTqQlpbGt99+S1FREbVq1VL7Wz/ev6nQ1tamd+/erFy5ElNTU0xNTdWj3wMGDGDjxo1MnToVMzOzF45IUNURqnoIgWbNmolXrlwBYNu2bfz8888VOrcsiiJr167lyJEjLFy48KnHqaZwVFH3VaNTqg5ItV+VMunxyqNQKFAqlQiC8MT58P+jARoaGigUCvUQ9eP/qoa3VUlsHxduqudQ3UdDQ+MJm1THPV45VA3H45VGdR3VOf/drvp6evwZVaNo/+08H39u1X8aGholPvvzBBo8StpbWFjIunXrSv37ZmZmMmnSJLp168bQoUMrLEfjqyKKIqNHj8bExITRo0c/9TjVVMfjv6uqXD5eZlTP/d8yo3JgVZWZoqKiYumSnlYugBLLxuNl5vFyobqXpqam+nhNTU11eVJd879lRmWv6pz/Nu6PP8PjdqnezePluzS//c2bN9m2bRs//fQTDg4Opfy1SiYyMpKxY8fyTbt/aFrrlS5VKjJyofdi+O1jcP1PxIR9V2HrOdg+Ccry++RMEIz93Y6g0OoT6FmpVPLXX39x5MgRVqxY8UI+mh4eHsybN4+mTZuWuF9VTh8vt6rpRlW5fFp/8Pi0paq8/rduqfb9d7bhcaGmOke1Hf6/fVXdR2Xjf20qqZ34b9+mqs+PP8fj9VL1HP/tQ0pqq1T3frxfU9lbUh9SmnSEkZGRzJ07l927d1OrVgVUvP8RGBjI0KFDuXDhglpI1q1bNzMoKMikpOOlkbMyoiSfscenCUsaPVIdV5Lw+O/5KlTHPl5JVKgqRkmUdJ+n2fSs6z0tBc/TnkN1nRc5/mnPXh7o6+tjampKXFxchSbQrihK+o0ff+/PKzP//R2e9rs8KzXT037rx+/7+L0ev9Z/bXtWmXmReqS69tOevSYSlgiiCA4lrHvxtIP78VAoB52q7bpT7qhGslxdXct8NOa/5VQlKB7naeX4ae3x8+pWSdd6Vl0t6T7P+jguqY15Xn9Qkn1P64+edvyz7lETqFk9kYTEC6KhoYGJiQmpqak1NhCthAQ8WgygpQlaJfTbetogV0CBtGAVuVxOaGgobm5ur5V4l6haSOJM4rVGEAT09fVr/IIACQlL40fiK6+EaArpuaCrDfo1Mw7zC1FUVER4eLg6MKyERGUgiTOJ1x49PT3y8vKkkTOJGk0tKzDWgwclxBm+9hBauEmhNOBRIHBRFKVsERKVilQVJV57dHV11SFGJMoX6R1XHloa0NEL/EMf+Z6pKJDDqXswqHnZLgaorly5cgVHR8cXzjNZlZDqWfWn5nrTlQNSga+ZqEbOqivVrVxWN3trChoy6OYDuwIgMRNs/rdG7NpDMNEDX2kWD1EUuXbtGg0bNnwlf7OqUMargg0SL48kzkqBjo4OAQEB9O/fv7JNqRCUSiW5ubno6OhU+ajLj5Ofn8/gwYNf+DxViJLqhpaWFtu3b+fvv/+u8HsXFhaSl5eHnp5elYmsX1hYqA4yW14RwxUKBa1bt66W8fAEARq7PsoEoPvYq2laCxo4g37V+BkrFdVKzT59+rzU+VpaWnz22WdVehWhUqkkJycHbW1tdQT/1wlV/1bVV+ZX3RJUhejduzd169atbDMqjNTUVH766Sfs7Ox49913q3RD819sbGwq24QKY/r06bz33nsVek9RFAkODubQoUPExMQwYsQIdV68ykRl1+HDh4mNjcXOzo7GjRvj4+ODhYVFmYopc3PzaptNQEP2/2mbVGhrPvpP4lEMrPT09Jcu07/++utL5a+sKBQKBcePH+fw4cMMHjyY1q1bV3mRUl5U9b5CqpLPQRAEbG1tsbW1rWxTKgylUomRkRGffvopqampDBgwoLJNKneq20iIIAjUqVOnWDqm8iY7O5t9+/axZ88eHBwcmDFjBs2aNasyo6tt27blrbfe4urVqxw/fpxLly5x7949OnbsSI8ePXB0dKwyv7MoiuTl5UmrhKsYDx48wMjICHt7+5c639fXt4wtKjvy8/PZtm0bJ0+e5KOPPuLNN9/EyMioss2SeAqSOJN4AlXqnsmTJ/Ptt9/i4uJC48aNq0zHVtZIvhlPR5Vf7+HDhyxevJibN28ydepUOnfuXOYjUmWBoaEh7du3p3nz5kRHR3Pu3Dm2b9/Ozz//TJ8+fXj//fexs7NTZ0ioKB5PJJ2UlKROd3P9+nXoXmFmSDyH+/fvF0swXhNQfQhs3LiRLVu28NVXX9GlS5fXckqzOlFzSqBEmaKpqcnAgQO5ffs2CxcuZPHixbi6ula5zrgsqImZAcoCURRJT0/nyJEj/PDDDzRs2JA///wTZ2fnKv2+VImRa9eujbu7O0OGDOHkyZNs2rSJ7du3q1N1ubm5YWpqWu7PUlhYSEpKCsHBwezZs4dTp05haWlJly5d/pem7EK53l+idCiVSoKDg/H29q5R7Vxqaipr1qzhxIkT/PDDD7Rs2VIKrlsNkMSZxFPR1NRk4sSJzJo1izVr1vDZZ5+pE+vWJPLz89HV1X3+ga8RBQUFXLt2jU2bNvHw4UOmTJlC3759MTAwqGzTSo0q356BgQF9+vSha9euBAQEsGPHDj799FOcnZ3p1q0bLVq0wMnJqUw7LFEUSUtL49atW1y5coWLFy+SnZ1NkyZNWLZsGe3atSM+Pp4LFyRhVlXIzMwkKiqKbt26VbYpZUZMTAxLlizh/v37fP/99zRr1qxKf1hJ/D+SOJN4KoIgYGlpyaRJk/jkk0/4888/GTt2bGWbVeaoxFlNE50vS0ZGBj/99BPHjh2jVatWTJ06lbp161b796Orq0u7du1o0aIFQUFBnDt3jj179rBp0ybatm3L4MGDqVOnzit1XnK5nAcPHnDs2DFOnz5Nbm4u9erVY9iwYTRq1AhnZ+cnfPS+2AXmhq/6dFWTxIySMxJUReLj48nJyalQP87yQhRFHjx4wIIFC5DL5SxevBgvL69qX4dfJyRxJvFMZDIZDRs2ZOrUqcyZMwcvLy/atWtXoyq5NHL2qDFXKBTcuHGD+fPnI5fLmTlzJm3btkVfX7/G/N6CIKCjo0PDhg3x9vZm8ODBXL16la1bt7J9+3batm3LBx98gLe3N1paWurRt6chiiJyuZy8vDyOHz/OX3/9xZ07d/D09GTQoEG0bt0aS0tLDAwMnriOmZkZ48ePJyLi5cI2VCRyuZz9+/eTn59P3759MTQsvZocUk1GWxMSEjAwMMDY2Lhal3fVyuXZs2djZ2fHZ599hoODQ2WbJfGCSOJM4rloaGjQo0cPwsLC+Oyzz1i9ejX169ev1g3Y46jiddWU53lRRFEkIiKCbdu2sWfPHrp06cKcOXMwNDSs0e9ES0sLa2trevbsiZ+fH7du3WLjxo2MHTsWLy8v3nzzTRo1aoStre0To10FBQUkJCQQGhrKyZMn+ffffxEEgS5dujB37ly8vLzUTuVPe4eGhoYvHU+rohFFkZ49ezJt2jTy8/P56KOPMDY2rmyzygxRFImNjcXc3Bw9Pb3KNuelkcvlXLt2jVmzZtG6dWumT5+Oubl5ZZsl8RJI4kyiVAiCwKhRowgLC2PZsmV888032NnZVfvOW6lUkpmZiZmZWbV/lpchNzeXEydOsHnzZrS1tVm4cCHt27evMuExyhvVb66pqUmTJk1YtWoVd+7c4fDhw2zevBlBEGjXrh0dO3bE29ub9PR0rl+/jr+/Pzdv3qSwsBBvb29mzJhB+/btX6gjrE7lTRW6Zf78+cybNw89PT0mTpyIvr5+ZZtWJsjlcmJiYrCxsam2o+gFBQUcPXqUxYsX069fPz744ANMTEwq2yyJl0QSZxKlRldXlwkTJvDpp5/y888/M2vWrGq/HLugoICsrCzq1KlTrTrLV0UURaKioli1ahUXLlxgxIgR9OrVC3t7+9faYVhDQ4MGDRrg7e1NbGwsV69e5cCBA+zcuRNra2vS09MB8PHxYeTIkXh6euLo6FhtO/QXQRAEmjVrxjfffMPkyZPR1tbm448/rvZtADwaPY+IiKB+/frV8nkKCgrYuXMn69atY+TIkbz11ls1Rji/rry+rbDECyMIAi4uLkycOJFDhw6xb9++apn26HHy8vLIysrCxsbmtRBnoiiSm5vLoUOHGDZsGHFxcWzYsIGRI0fi6Oj4WgszePR+lEolBQUF6OjokJeXR3R0NGFhYQQHB3Pv3j1EUaR9+/a0bNkSV1fX10KYqZDJZDRt2pQVK1awY8cOfv31V/Lz86t9rMC8vDzS0tKq5cdJfn4+P//8M6tWrWL69OkMHz68Wk/NSjxCGjmTeCFkMhmtWrVi5syZ6qnN6pwCJDc3l5ycnCoZULWsKSws5N69e2zZsgV/f3/1F3ZJzuqvI9nZ2YSHh3Pnzh3Onj3LrVu3MDU1pU+fPqxduxYXFxciIiL4448/WLVqFZs2baJv3760atWK2rVrvzYrflUCbf78+SxatAg9PT2GDh1arQO35ubmkp2djZWVVbX6DZOSkli3bh2HDx/m66+/pkuXLtW2LZYoTvWtTRKVhiAI9OvXj7CwMBYtWsSSJUvw8vKqbLNeClX6nJq0IrEksrKy+OOPP9i3bx8eHh6sWrWKRo0a1ehnLg1KpZLw8HAuXLjApUuXCAkJwdDQEF9fXwYPHkzTpk2Lpbhxc3Pj008/5d133+XkyZOcO3eOffv24eXlRe/evWnZsuVrMZKmoaFBp06dyMrKYvny5ejp6TFw4MBqKQxEUSQrK4uCggIsLCwq25xSIYoiMTExLF26lPDwcJYsWUKrVq0q2yyJMkQSZxIvhYaGBqNGjSI0NJS1a9fyxRdfVEun+uzsbDQ1NWvkNIBqqunevXt89dVXpKSkMHbsWLp164aJiUm1+63KAtU7yc3N5fLly+zdu5fz589jaWlJu3bteOedd3BycsLKygptbe0SryGTyXBxcWHEiBH06dOHoKAgDh06xIwZM6hVqxbDhw/Hz89PXaZq6nvW1tamb9++iKLIokWL0NXVpXfv3tXyecPCwjAwMKgWCe1FUSQxMZFZs2ahoaHB119/Tb169SrbLIkyRhJnEi+FIAiYmZkxefJkpk+fzu+//864ceOe2qFVVTIzM2ukOFPlcNyzZw8///wzrVq1YvXq1Zibm1fL0Y1XQRRFCgsLycjIIDY2lkOHDnH48GFycnJo164dy5cvp0mTJmhra6OhoVFqcaEK0tymTRtatGjBhAkT2LJlCwsXLmT58uW8++67+Pn5YWVlVWOnPHV0dBgwYAAFBQV8//336Orq0qlTp2qVHkgURW7fvo29vX2VX92oUCh4+PAhEydOxN7eni+//BIHB4caWbZedyRxJvHSCIKAp6cnU6dOZe7cubi5udGrV69q01CopjO0tLRqlDjLz8/n0qVLbN68mZSUFD7//HO6d+/+2oTHUKFUKklOTubevXv4+/tz7do1kpKS8PDw4OOPP6ZLly5l4mMkCAJaWlrY29vzySefMHLkSPWCmZ07d9K6dWs6dOhAkyZNik2R1hRUeXhTU1PVU5ytW7euVu1AaGgoDRs2rNIfLkVFRVy4cIFvv/2WevXqMXfuXMzMzCrbLIlyQhJnEq+EIAh07NiR8ePHs3DhQmxtbWnatGllm1UqlEol8fHxmJqa1ghxpgqk+csvv3D8+HF69erFoEGDcHNzq9KdTllTWFhIQEAAp06d4urVqwC4u7szYMAA6tWrh6enZ7kKVRsbG95//3169+7NlStXOHHiBAsWLMDJyYmePXvWyGllfX193nvvPXJycpg/fz5Lly6lYcOGlW1WqRBFkYcPH9K/f//KNuWpKBQKjh07xooVK+jUqRPjx4+vUUGAJZ5EEmcSr4ympiZDhgwhNDSUb7/9lqVLl+Lk5FTlO5+ioiIePnyIg4NDtY4JpEq9dP78eb7++msMDAz47rvvaNSoUY13ThdFEVEUKSoqIjIykgMHDrBv3z7S0tJo1qwZI0eOpGHDhlhYWKCvr19hIlUQBHX2gfbt2xMXF8fBgwdZvXo1P/zwA8OHD+ett97C2Nj4haZSqzKmpqZMnDiRrKwsZsyYwQ8//FAt8jnm5OSQmJiIi4tLZZtSIgqFgr///puFCxcyZswY3nnnHQyqSUosiZdHEmcSZYKOjg4TJkxg6tSprFmzhjlz5lT5HHWFhYXExcXh6elZrXxkHkculxMREcHmzZs5ePAgI0aM4P3336/RX9WqWGRpaWnEx8dz7do1Dh8+TFBQEA4ODrz55psMGjQIS0tLtRirrHIoCAKGhobUrl2bSZMmMWLECP755x9++eUXtmzZQq9evejZsye1a9euEaNphoaGfP7558ybN4/58+fz1Vdf4enpWaVHbu/fv4+WllaVE2cqt4vffvuNX375hTlz5jBgwIDn5nuVqBlI4kyiTFA5R0+fPp05c+awbds2PvjggyrdiBQVFZGamlotA9CKokhOTg779u1j+/btmJqasnLlymodc+55iKJIZmYm9+7d4/Lly9y4cYO4uDgsLS3Vsffq1atXJSO8qzpUMzMz3nrrLXr37s3Jkyc5cuQIc+fOxd3dnW7dutGqVSusra2r9W+oo6PDzJkz+frrr1m0aBELFizA2dm5ss16Kvfv38fKygpTU9PKNkWNKIqkpqayevVqAgICWLhwId26dat27ZTEyyOJM4kyQxAEfHx8mDRpEp9//jl169alQ4cOlW1WiYiiSEFBgVqcVTdCQkL4/vvvCQkJYfjw4fTv3x9LS8sa2XiLosi9e/c4dOgQZ8+eJSMjg9q1a9OxY0e8vLxwd3fH1NS02jy7IAgYGRnRt29fOnXqxP379zl16hS//PILGzZsoGfPngwZMqRalkv4/ynd2bNn89lnnzF37lyWL19eZcNU3L9/Hw8PjyoliLOzs/n888+Jiopi7ty5NG/evNqUb4myQRJnEmWKpqYmfn5+hIWFMW/ePNatW1dl/U5CQkLQ0tLCwcGhsk0pFappjv379/PDDz/g4+PDL7/8gqOjY41ZiamasszPzyclJYVjx46xd+9eQkJC8Pb2ZvDgweoE4/r6+tV2Ohr+X6Q1adIEHx8fhg8fzrlz5/j555/ZvHkzvXv3ZtiwYTg7O6Onp1cl69DTEAQBR0dHlixZwgcffMCcOXNYuHBhlfuAEEWR4ODgKhPAValUEhsby6xZs8jJyWHZsmXUqlWrWpdziZdDEmcSZY6mpiZjx44lKCiI7777jkWLFmFnZ1elGmWA69evY29vX2W/6B+nsLCQwMBANmzYQFBQEFOmTGHw4MFoaWlVuff6MoiiSEpKChEREdy6dYuzZ89y7949bGxs6NKlCytXrsTV1VV9fE14ZhWCIKCtrY2NjQ2DBg2if//+nDp1it9//53Ro0fTsGFD9UpTGxubatNRC4KAhYUFa9asYfz48Xz77bd88sknVUqg5efnEx4ezqhRoyrbFBQKBYGBgXz77bfo6ury448/VqsRYYmyRRJnEuWClpaW2v/s559/ZubMmVVu5eDt27epW7dulZrO+C+qYLLbtm3j77//pkWLFvz44494eXlVabtLS0FBAXfv3uXcuXMEBASQnp6OtbU1TZo04cMPP6RevXqvzco0VSespaVFt27daNu2LTdv3uTo0aOsXLkSfX192rVrR48ePXB3d68WnbYgCNjY2PDll1/yxRdfsGzZMj777LMqszo6NjaWnJwc6tSpU6l2iKLIxYsXWbJkCXXq1GHatGmSMHvNkcSZRLkgCALOzs5MmDCBOXPmUK9ePfVKo6rCnTt36NGjR2WbUSKqEBFXrlxh4cKFFBUVMWvWLNq0aYOhoWFlm/dSqFInASQkJHD48GH27t1LQkICrq6u9OnThwYNGuDg4ICZmVmNEJ+vgp6eHi1atKBJkyaEh4dz9epV9u3bx9atW+natSsjRozA3d0dmUxWperVf5HJZHh7ezN37lymT5/OkiVLmDNnDpqampVud3R0NLq6ulhbW1fK/VV14syZM8yePZuePXsyYcIEzM3NK/3dSFQukjiTKDc0NDRo1aoV48eP5+uvv8bFxYXGjRtXiU43NDSU1NTUKhkwV6FQkJKSwp9//snPP//MkCFD+OCDD8okmn1loFAoyM3NJT09nYCAAPbs2cONGzewt7end+/e9O7dGwcHB3X6JIn/RzXl6eHhgbu7O/379+f69eusX79enZh97NixeHp6VumYaRoaGjRq1Igff/yRUaNGoaury4QJEyrdly4mJgZ7e3s0NSu+K1QtStq9ezfLli1j/PjxDB8+vMa4Kki8GpI4kyhXNDQ0eOuttwgNDWX+/PksW7YMDw+PSm98AgICsLGxqVJL/EVRJD8/n3///Zdff/2VoqIili9fTqdOnaqEoH1RsrKyePDgAbdv3+bKlSvcvXsXAwMDWrZsyYQJE2jWrFmldIrVEUEQ0NDQQF9fX53LMzAwkG3btjF37lxsbW3VoTg8PDyqpMiVyWR4eXmxdOlSvvzySwwMDBg1alSlZueIjY2ttNyUubm5bN68mT179jB9+nTefPNNqT5IqJFKgkSF8PHHHxMdHc3q1auZP38+FhYWlWrPlStX8PX1rVKiJyIigrVr13L9+nW6d+/O0KFDq+RCiqehylRw+/Ztzp49y9WrV0lKSsLOzo5GjRoxcOBA6tevXy0WYFR1NDU1ady4MQ0aNCA4OJizZ89y7Ngxtm3bRqtWrejXrx8+Pj5VbhWvIAg0b96cyZMns2LFCoyMjHj33XcrpYyr0rfZ29tXeDuQlZXFsmXLOHfuHNOnT6dz586SMJMohlQaJModQRAwNTVV+5/98ccffPjhh5XWcWRnZ3Pnzh3GjRtXKfd/HNXUxj///MOSJUtwcnJi8eLF1KlTp8rn+1T5xcnlcmJjYzl69Ch79+4lJiaGOnXq0L17d9q1a4elpSUmJiZS51MOaGho4O3tjYeHBwMGDOD27dv89ttvjB49mgYNGvDhhx/StGlTtX9XVRD6qgUPhYWFfP3115iYmNC3b98KF0jZ2dkkJibSuHHjCnsvqlXJCxcu5MqVK6xcuZL69etLdUPiCaQSIVEhCIKAt7c3EydO5JNPPsHDwwM/P79KGbkKDQ0lIyMDX1/fCr/346hye65du5aLFy+q8+bp6OhUiU60JFSjY+np6SQmJnL58mUOHz5McHAw1tbWdOvWjf79+1OrVi21o3pVfZaahJaWFtbW1nTq1In27dsTFhbG1q1b+fjjj3FxceHtt9+mRYsWat++ykZbW5v+/fsjiiILFiwAoGfPnhVqW0ZGBmlpadSqVatCyqgoioSFhfHdd9+RmJjIb7/9Vi1yEEtUDpI4k6gwBEGgS5cuTJgwga+++gorK6tKccgPCQnBzs6u0vJPqlKzHDhwgK1bt+Lh4cHatWtp0KBBlfQVgkdTQCkpKdy9e5erV68SGBioTp3UunVrZs2aVWVTJ71OCIKApqYmderU4auvvuK9997j0KFDbN++nd9++40OHTrQrl07GjRoUOnhLGQyGf369SMpKYmffvoJXV1dunbtWmF1ICMjA5lMVmE5gO/evcvXX3+NgYEBy5Ytk4SZxDORxJlEhSKTyXj77bcJCQlh8eLFfP/99xXqlK9QKAgNDaVWrVqVMoIgiiI3b95k+fLlJCUlMWbMGLp3746ZmVmF21IaioqKuHbtGkeOHCEgIICioiI8PT3p2rUrHh4eeHh4SPGYqiiCIODh4cGECRMYNGgQN27c4MCBAxw6dAgPDw8GDx5Mp06dKtUvTVNTk3feeYe8vDwWL16MhYUFTZs2rZDylJmZiba2drm7D4iiyIULF/j6669p1KgR48ePx9HRUaozEs9EEmcSFY5qGf2MGTPYsGEDs2bNwtDQsEIaq4yMDIKDg+nQoUOFijOlUklOTg47duzghx9+oEuXLnz++ee4urpWmUUJKv+x/Px8QkND2bdvH//88496CviDDz6gWbNmmJqaoq+vX2Xslng2GhoaODo64uDgQIcOHQgPD2fv3r3MmjULJycnRo4cSYcOHTA2Nq4Uoaavr8/YsWPJzc1l0qRJbNy4sdyDQ4uiSHp6OlpaWuU2gqhyATh16hRffPEFffr0YcKECdU2TqFExSKJM4kKRxAEbG1tmTFjBjNnzmTnzp289957FTKdkZCQQHJyMp6enhUiLkRRpLCwEH9/f3766Sfi4uJYtGgRPXr0qDL+WPn5+cTGxhIZGcnly5c5f/48sbGx1K9fn/Hjx9O9e3fMzc3Vx1cFmyVeHFUuz/r161O/fn3GjRvHn3/+yY8//sjq1avp06cPXbt2pVatWhX2saSyS09Pj1mzZpGXl8fUqVP57rvv8PHxKbc2QZV5w9jYuNxGzoqKiti3bx/Lly/nvffe47333qtyWVIkqi6SOJOoFARBoEGDBkycOJFFixbh7u5O+/bty/WeoigSGRmJlpYW9vb2FdL5xMTEsHXrVk6fPo2vry8LFiyoMAfkZ6FQKIiOjubatWsEBAQQFBSEUqmkVq1aDB48mBYtWlCrVi1pFVkNRFX2rK2tmTRpEm+99RZnzpzh2LFjnDhxgjp16tClSxc6dOiAkZFRhdmkqanJrFmz+Prrr1m0aBFffvklnp6e5VJXioqKiIqKwtbWtlxGzrKzs/njjz/UizIGDBhQ5VdfS1QtpJZXotLQ0NCgR48eREVFMX/+fNauXVtujTGAXC7n0qVLuLm5YWNjUy73gP+fzjh37hzff/89enp6fPrppzRp0gQDA4NKEWaqNDFpaWlcvHiRw4cPc+3aNYyNjfH19WXSpEk4Ojpib2+Pvr5+pYtHiYpBNYr95ptv0q1bN+7du8exY8dYuHAh69ev5+2336Z3797qxTPlXS4MDQ2ZNm0a33zzDZ9++inr16/HwsKizO9bVFREfHw89erVK9OpXFEUyczM5Mcff+TQoUN89dVXtGvXrsrFm5Oo+kjiTKJS0dHRYfTo0dy9e5fvv/+ehQsXYmNjUy6dQGFhIVeuXGHYsGHl1lgqFAqSkpJYs2YNBw8eZPjw4YwePbrCVoSpUMUgy8/PJysri9u3b7N7924uXLiAhoYGHTt2ZPHixdSvXx89PT0pZcxrjiAImJmZ0apVK5o1a8bHH3/M7t27+emnn/jxxx8ZMmQIgwYNwtzcvFxTLqkSpX/++edMnz6djz76iB9//LHMU5cVFRWRlpZW5tdNTU1lyZIlXL58meXLl9O0aVPJN1PipZDEmUSlo6uryyeffMK0adPYuHEj06dPR1dX95UbzZycHAAMDAyAR6laYmNjyyXopCiKZGRkcObMGdavX4+xsTFr1qyhWbNmFRoeQxRFsrOzefjwIXfv3uXatWsEBgYil8vx9fVlyZIltGzZUhodkygRVS5PKysrxo0bx9tvv83x48fZsWMHf//9N82bN6dz5840adKk3DI9CIKAlZUVy5YtY9KkScydO5cvvvjilVwRRFGkqKhI/RFSVFREeno6VlZWZWKzKIpERESwaNEiEhMTWbp0KQ0aNJDqmMRLI4kziUpHEAQcHR2ZPHkyn3/+OZ6engwePBh41OgFBwdTUFBAw4YNX+i6V65c4ffff6d58+Z06NCBY8eO4eTkRK1atcrUfpWNa9euJTg4mAEDBjBw4MAya/hLQ0FBATdu3ODChQvcvHmT1NRUzMzM8PHxYfLkydSvX7/ScghKVF9MTEwYNGgQfn5++Pv7c+rUKZYtW4a5uTndunWjR48e2Nralku5srCw4Ouvv+aLL75g8eLFfP7555iZmanvpVoBXRq/uIKCAhYuXEh+fj7W1tbo6OgQHx9Peno6SUlJWFhYvPQIlyiKBAYG8s0332BiYsLixYupXbv2S11LQkKFJM4kqgQymYyWLVsyZswYvv32W9zd3WnYsCEHDx5k3rx5dOvWjcWLF79QA5qUlMSWLVvYuXMn1tbWGBgY0LlzZ/Ly8tDU1FRHsH8WqunBklZWqr7G9+7dy5IlS2jSpAnffvst9erVK1dH+sfTJoWHh3P48GGOHDlCTEwMdevWpUuXLrRq1QpbW1vMzMwkfxeJV8bIyIguXbrQunVrYmNjOXHiBL///jsbNmygX79+jB49GnNz81LVqdIiCAK1atVi9uzZzJ07l++++44FCxagpaWFUqnkjz/+4MaNG3z33XfPHZ1WKpXcuHGDw4cPq2Ob5efnM3nyZAwNDTE2NmbYsGF8+OGHT627BQUFbNy4kffee0+9mlWpVHLr1i1mzZpF48aNmTp1arn6s0q8PkjiTKLKoKWlxdChQwkJCWHWrFk0a9aMX375haSkJMzMzIiIiHihUS+VkEtLSyMtLQ1BEAgMDOTPP/+kZ8+ejBgxgtatWz9V8ImiSGJiIgcOHGDAgAHqr3aVMLp79y5r1qzhxo0bzJo1i379+pWr71Z+fj5paWnExMRw9uxZ/v33Xx4+fIiHhwd9+vRhwIABWFlZoaGhUWXCdEjUHARBQF9fH3d3d9zc3Bg2bBhnz55l/fr17Nixgx49etC/f3/c3d0xNzd/bvlTKpXPLacymYx69eqxaNEiPv74YxYtWsTYsWPZsWMH8+bNw8HBgSFDhtC8efNn3ktLS4vmzZuzf/9+5HI5ubm5wP+7PtjY2ODs7PxUkadUKtm/fz8LFy7k2rVrLF26FAMDAwICApg+fTq9evVi0qRJmJiYSPVOokyQxJlElUImk9G7d2/Wrl3L2bNnKSwsBODBgwfcunULV1fXUjd+MplMvUoR/n+kKyYmhl9++QUDAwNatmz5VHGWnZ3NmjVrWLlyJbm5uUyYMAGAxMREdu7cyYEDB6hTp446BVN5+LHJ5XIePnxIcHAw165d486dO2RnZ+Pg4ED//v1p06YNHh4eUsgLiQpDJaiMjIzo1asX3bp148yZM+zbt4/PPvsMW1tbevXqRYsWLXBycipR8BQUFHD+/Hnq1q2LnZ3dM+uOKi/vsmXLmDVrFv7+/ly5coXMzEwKCwv5559/aNiw4TNTh2loaKjToykUimL7ZDIZzZs3p2vXrk+1Iz4+nj///JO4uDj++usvLCws8PT0ZPPmzbzzzjuMGjWq0tNhSdQspBZdosqgVCo5f/48X3zxBUlJSSiVSvW++Ph4rly5gp+fX6njBT1rCrR79+7MnDnzqaJGoVBw5MgR1q9fT3p6OitWrKBNmzbk5+ezcuVKMjMz+fDDD+nYseNzV2KKokh4eDgZGRnUr1//uUJKqVQSFxfHxYsXOXr0KGFhYejp6dGgQQPeeecd3NzccHNzK9dVcxISpUVLS0s95Xn//n3Onj3Ln3/+yaZNm2jXrh3Dhg17YsQ7NDSUr7/+Gmtra3744QdsbW2fex8vLy/Mzc3ZuXOn+qMrPz+fY8eOMWzYMNzd3Z96rkwmw97eHjs7O6Kjo4vt09DQYPLkyU8VV6oo///++y+iKJKTk8P69etxcXFh5syZDBkypEokk5eoWUjiTKJKIJfL2bNnD59//jn3798vJsxUnDlzhnHjxpXasb0kcSYIAo0bN2b+/PnY2dmVeJ4oijx48IAZM2YQHx8PQHh4OG+++SYmJiYMHDiQ0aNHY21t/VxfF7lcztWrV5k0aRL16tXj+++/L7bKTRRFlEolRUVF5OTkcPbsWXbv3s21a9cwMTGhe/fufPDBBzg5OamjmZfm2f38/IiKCEWjhq7iF0Wo16ApixcvxsXF5ZWuFRAQwDvDh6KlATVV6xbK4dO5nzFixIhyE/R6eno0bNgQb29vBg8eTEBAAFu2bGHbtm1069aN999/Hw8PD7S0tLhw4QKXLl1CqVSSnZ3NypUrqVWrVol1ViWI5s2bx+HDh4uNhgP4+/vj7++Pq6vrM+ujarQrJiZGfQ2ZTEavXr1o3br1U89LS0tj9erVZGRkqLdlZGSo26mKXI1dVuzevZtPZs+s8WV++YqV9O7du7JNeSkkcSZRJZDJZNSuXZsGDRqQmJhIampqsf2iKHLx4kVCQkJwcHAo9TUfb8hVq0JnzZqFr6/vU0fWoqOj+eCDD4iJiVFvU41mvffee6UK9SGKIikpKezatYtPPvmErKws4uPjSUhIwMLCAoVCQWpqKjExMdy5c4fTp09z8+ZNDA0N6dChA1OmTKF+/fpqZ/4X7VDjY6MY2SSMZm4vdFq1IS4d1t60o6io6JWvlZ+fj7UsjMndwaKGpj1ccvCRoFAtbilPtLS0sLGxoW/fvvTp00ct0saOHUvdunXp0aMHO3fuJD8/H4AjR44wduxYFi1aRNOmTZ8QO7m5uSxYsICNGzeqfcQep6ioiA0bNtCnTx91sNySsLCwwN3dnRMnTqi3GRsbM3HixKemVRJFkV9//ZWLFy8+sS8/P5+JEydiYWFB9+7dq5VrQXZ2Np5GYYzrDPpPnw2u1sz4gxLLS3Wh+pQmiRqNTCajcePGrFu3jj///JPNmzdz9erVYv4hcrmc/fv3065du1J9rf5XnGlra/Pxxx/Tv3//pwqz5ORkvvvuuyfuDZCXl8eBAwcYNGgQ9erVe+p9lUol9+7dY9WqVWzbtk39xR0VFcW5c+eIjo7m5s2b3L17l7S0NMzMzGjYsCHDhg2jadOmz+xgSotMBo1coPPTzazWhCXC2ptldz0rI2hTB+zNyu6aVYk/LlTs/VQCUBAEWrZsSePGjbl9+zZHjx5lyZIl3LlzR32sUqnk33//ZcqUKXz77be0a9eumIBUKpU0bdqU3r17c+jQIbKzs4vdSxRFzp8/T0BAAF27dn2qTYaGhtSqVQstLS2KiooQBIHevXs/Mx5ZSEgIa9asUS8E+u8zZmdn8/3331OvXj1cXV1f9DVVKvZm0K4umNRQVzkzg8q24NWQxJlElcLU1JSxY8fSqlUr1q1bx2+//aZujEVRZPfu3Xz99delcr79b4P79ttv89FHHz31K7mgoIDt27fzxx9/kJeXV+Ix169fZ82aNaxYseIJPxNV433ixAnmzp1LYGCgenRAxYoVK7CyssLLy4tu3bpRv359nJycMDU1lSKJS9RYdHR0aNq0KT4+Ppw+fZqCgoInjrl48SITJ05kyZIlapGlWngwePBg2rVrx5tvvsm6des4c+aMerEQPPpw++GHH+jUqdNTP9xkMhlubm5YWloSFxeHra0t/fr1w8LCosTji4qKWLNmDVFRUU8IM5UP2+DBg9WZEyQkyhJJnElUOTQ0NGjYsCHLli3jjTfeYNq0aYSFhZGfn09sbCynT5+mZ8+epbqOIAhoaGjQqlUrvvzyy6eOSqmmTb/66ivS09Of2K+pqYmhoSFaWlrcuHGDxMREHB0di52fkZHBpk2bWLRoEcnJyU806KIoEh0dzeHDh9WBMMsyLpSERFXn4cOHnD9//om6oeLWrVtMmDCBRYsW0adPH3VoGlVapwEDBtClSxf++ecfFi5cSHh4OFlZWYiiyOXLlzl//jzt27d/6v1r166NhYUF8fHx+Pr60rVr16f6uZ07d47Dhw+rp841NDQwNDTE2dmZ0aNH88Ybb2BjY4OOjo5UhyXKHEmcSVRJBEFAT08PPz8/jh07xpIlS9i1a5d6Kbufn99zpzZVwqdOnTosWLAAJyenZ05fTJw4kaSkJERRRCaTYWVlhYODA9bW1tSuXZuWLVvSsmVLXFxcivmXKJVKbt68yYoVK9i1a9cz/RxUkfzfeOONl3ovEhLVFYVCwbp164qNJquE1+MLgO7fv8/UqVPJyMhg2LBh6hAZqg8tMzMz3n77bXr16sXWrVv5888/uX79OqmpqeqMIE8bHVeNnJmYmDBixIinjnilp6fzxx9/8ODBA7S0tPD09KRhw4b079+fPn36lHrFuITEyyKJM4liFBYWcujQIc6cOVPZphQjJycHfX19dYiLadOmPVecRUVFoVAoEEVRnRvwaah8wB7vJHR1dTEzM8PBwQEtLS2uXr3K1atXnzhXFEUuXLjAlStXSlxl+jhKpZIlS5aU+H7d3Nz44IMPnhmvSUKisgkNDWXDhg3FphVLg1wuZ/fu3cjlcuD/hZnq78dH0yIjI/nkk0+4fPnyU4UWPBJ8ZmZmaGpqkpuby5EjR5g8ebI6n25JREZGUlRUxD///MO5c+dKPCYtLY1Dhw6hVCrR1NTE2NgYQ0NDLl68WOLigIrA0NCQN954gyZNmlTK/SUqFkmcSRSjqKiIU6dOcfPmzRfOZVme6Onp0b17d/z8/IiLiyMyMvK5IRQEQaBDhw74+PgAlCicVFkALC0tGT9+/FP9vp4mugoKCkhPT1ePsBUWFiKXyyksLKSoqIiioiLkcjlFRUXqbUlJSeTk5BT7+n7w4AF37txhxIgRkjiTqNJER0fz66+/8tZbb73QeTKZTJ0z93mIokh+fj45OTkolcqnCjRBEHB3d8fd3R2lUklUVBSxsbHPDKtRu3Zt2rRpg56eXon1WrUyu1evXpiYmDyxrzJQLYby8vKSxNlrgiTOJEqkZcuWvP3225VtRomocks+z4E+MzMTfX395y5xV32xv6zfiOr8kvxoSvI7U03PPM7x48ef+hUvIVHVMDAwYOTIkeV+nxcN/VGatiEvL0/t7/k0SpNaqiLJz88nLCysss2QqEAkcSZRIiUJiOqGmVn1iYsgrdSUqG5U1/bB0PD5weyq2rOpFjdJvD5IPYKERE2k5MVwEhISEhLVAEmcSbw27N27l82bN1fY/a5evcqPP/5YYkynckf6yJaQeCFCQkJYtmzZE9lJyouCggLWr1+Pv79/hdxPonohTWtKvBRFRUUEBwdjaWmJra0t2dnZxMbGUqdOnaeeo1QqSUtLIy4uDkEQsLOzw8zM7JnD9aGhoWRmZiIIAgYGBjg6Oj43ddLTyMzMJC0t7YXPe1ny8vJISUl5akwnCYmaSmFhIVFRUWhra+Pg4EB+fj4RERFYWlpiaWn51PpbWFhIREQEeXl5GBsb4+zs/Mwp/6ysLEJDQxFFES0tLaysrLCysnopN4GCggKSk5PVq0nLG6VSSWpqKrm5uRVyP4nqhSTOJErN4865mZmZjB8/nm7dujFz5kyCgoJYuXIlW7Zseer5SUlJxSL+GxkZ8d577xVLBP5flixZgpaWFq6urqSkpNCoUSMGDRok+V9ISFRhUlNTWblyJUqlkm+//ZZ79+4xb948RowYwZAhQ57q03X8+HGOHz+OlZUVCQkJvP/++zRq1Oip97lz5w7ff/89zZo1Q1NTE7lczvDhw5+7kltCoqojiTOJUvNfQSSTycjNzSU0NLTY9iNHjqCtrU2nTp0ICwvj4sWLDBw4kKCgIJKTk5k4cSJKpZLVq1dz584dWrduzdq1a9HV1eXhw4d4eXnx1ltvqUNK9O7dm65du/Lvv/+yY8cOBgwYQGFhIadPn+b06dMYGhrSv39/vL29EQSBzMxMjh49ytWrV3F2dmbw4MFYWVmp7VMoFBw8eJC8vDzq16/P/fv36dGjhzq0xb179wgICGDo0KFcv36df/75h8LCQnr27Enr1q0RBIGMjAx27NhBUVERYWFhGBgYMGbMGExMTPjzzz8JDw/HwMDgifycEhKvC6ampoSEhJCQkEB0dDQGBgbIZDJSU1M5cuQIfn5+WFhYcODAAezt7WncuDF//vkno0aNomXLlmzbto3t27fTsGFD/vnnH5KTkwkNDUVbW5vBgwdTu3ZtADw8PBg3bhyiKLJixQqCg4NxdnYmPDyc3bt3k5iYiK+vLwMHDlTn2w0MDGTfvn3I5XL69OnzRHiKqKgoDh8+TI8ePdi9ezdvvvkmDg4OwKOYi0ePHqVu3bpYW1vz119/ER4ejqenJ0OHDkVPTw9BEPj7779JS0sjOjqaxMRE3njjDdq1a0dAQABHjhzByMioWifmlihfJJ8ziZdGX1+funXrcv369WJTAZGRkURHRwOQkZFBUFAQBQUFREVFYW1tjYWFBebm5piZmZGYmIhSqeTChQtkZGQwcuRIbt++zb59+9TXy8rKIiEhgeDgYOzs7BAEgYiICAoLCxk3bhwNGjRg586dJCQkUFhYyJ49e7h79y7vv/8+zZo1Izw8XH2tgoIC9u7dy82bN+ncuTOiKHL//n0yMzPJysqioKCAmzdvUlhYSHp6OrGxsbz99tu88cYb7Nixg6CgIPV1Tp48SVZWFh9//DGTJ0/GxsaGrVu3kpSUxPvvv09hYWGFTqNKSFQlNDU18fX15dixY+qPLngUFiIoKIjc3FxEUSQsLIyEhAQSExNJT0/Hx8cHPT09GjRowP3791EoFERGRrJv3z4GDBiAm5tbsRF4VT0LDw8nMzMTMzMzlEolV65coWPHjowZM4bTp0+rAz+HhYXx3Xff0a5dO4YNG0ZsbGyxcDgxMTGsWrUKNzc37OzsyM3NVefJzc7OJicnh9u3b6OhocHFixdp0KABH330EVFRUezfv1/9/GFhYRw4cIDu3bszb948WrVqRXR0NDt37qRbt240bdqUwMDACv5VJKoLkjiTeGk0NDTw9vZWN6zPQhRFCgsL0dTUVKdV0tTUpKioCFEUMTU1pV27dnh4eNCzZ08uXbqkbjB37tzJN998w82bN2nQoAEAzs7O1K5dm8jISPLz88nMzCQ9PZ3c3FyCg4Px8/OjTp06NGvWjGbNmgGPfDwuX77MwYMH6d27N5aWllhZWaFQKEhNTeWTTz7hwIEDhIWFUa9ePczMzGjYsCFJSUmkpKSgqalJTEyM+pkcHBzw8/PD1dUVMzMzCgoKuH37Nm+88QYeHh60bt36iSCWEhKvEy1btiQgIAAAW1vbZx5bWFiIKIpoa2sDoK2tjUKhUI8+t2/fnvr169O2bVsSEhLIy8sDHk1tLl68mI0bN2JpaYmNjQ0ymYy2bdtSVFRETEwM5ubm3LlzB4BTp07RpEkT2rdvj4eHB3379lVPs6alpbFs2TLMzc3p2LEjmpqaNGrUiLt377J3714WLFhAamoqBQUF2Nra0rp1a7S1tYmMjMTIyOgJsdW1a1eaNWuGlZUVurq66uOaN29Oq1at1IJVQuK/SOJMotSU5Nju6OiInp4e9+/fV++XyWTqBlUul6NQKJDJZJiampKTk0NRUREKhYL8/HwMDQ3VwR5Vok1DQ6PYdODIkSNZuXIlU6ZMYceOHaSkpBAQEMD27dsJCgoiMjKSrKwslEplsSCUqus+Ph1rZWVFw4YNOXnyJPn5+ZiZmaGtrU1gYCDGxsaEhoaSnZ2No6MjERERbNq0iVu3bhEeHk5GRkaxEUIdHZ1iixNU91Y19K9TUvPAKDh9D4oem8U9FwwXH1SeTVUVUYTdlyHsse+ZjFw4fBPi0yvNrHLBwMCA4cOH07t3b/U2VZ1UtReq1cwmJibIZDLS09OBR0LJyMgILS0tgGL1SlXXARo3bszChQv57rvvMDIy4tSpU+Tk5LB69Wr8/f0JCwsjOTlZnW5KoVCgqalZYvsgiiJ9+vQhLi6OGzduAODl5cX9+/fJzc0lMzOTe/fuYWFhgb6+Phs3buTMmTOEhoaSkJBQLG+o6pkev8d/g9u+LvENi+SPynz0Ywth49PhyE1Il9ZDlMjrUTIkyoSShIa2tja+vr5cvXpVLagsLCwICgoiNTWVwMBA0tLS0NDQwNXVldjYWEJCQggJCSEuLk6djDw9PZ2rV6+SnJzMyZMnqV+/fjHRo1QqyczMJDc3F5lMxsOHDzE0NGTQoEHUrVtX3cDr6enh5OTEmTNnSEpKIiQkRP01K5PJ8PT05N133yU5OZlDhw6hpaWFo6Mjf//9Nx07dqSgoAC5XI6BgQGJiYlkZ2fTr18/WrRo8dxVVQYGBri5uXHs2DFSUlK4fv06GRkZZfkTVElyCmDf1UeNrMZjRcTMABYfgNwXS8FY44lMgU2nwUz//7dpaUBgJJwNhkrKEFRudO7cudgqbi0tLRQKBVFRUURERKjrp6mpKR4eHhw5coT4+HiOHTtGixYt1ALmwoULxMbGqj+kHk9zplQqKSwsJCMjA1EU1aPY3bp1o1u3bsU+qlq0aIG/vz+3b98mKSmJU6dOFWu7OnToQO/evdm7dy9xcXGYmZmRm5tLdnY2bdq04cCBA3h6elJUVERAQADt2rWjd+/eJaaD+m+b6eDgQHp6Og8ePCAoKEjtJlHTCY6HXQFg/Fi+eB0tOHEHbkc9+mCRKI4kziReCg0NDezt7ZHJZDRq1AhPT0/1tEWnTp0wNDRk9uzZpKam4u7ujkwmo379+nTv3p1Vq1axZs0a+vbtq857aWhoSEJCArNnz8bAwIChQ4cCYG1tzYYNG3j33Xf5888/mTx5Mubm5rRv357MzEymTJlCYGAgPj4+aGtro6Ojw5tvvqm+/19//aVeDGBsbIyZmRlmZmZ8/PHHnDp1isjISOrWrYuFhQWtW7emXr161KlTBwMDAzw9PXFycmLOnDn8/ffftGjRAn39Rz2qpqYmFhYW6ikYeCT+Ro0aRWZmJp988gmFhYW4ubnV6NEzUYSIZIhLB28HePxRPe3ARB9O3a0086ocogi/n4cOXmD6WG5uPW3wcYaAUMjMf/r51QUNDQ3Mzc2L1Q9TU1OMjIwwMzOjU6dObN26lS1bttCiRQv1YoFp06bx8OFDZs6ciYGBASNGjFCfX7duXRYuXMihQ4cYNmwYxsbG6OrqEhISwkcffcSsWbOwsrKiR48emJqa8uabb/L999+zcuVKGjRooM4Y4uPjw7hx41i3bh3z589Xj6Lp6OhgaWmJtrY2bdu2xdnZmUuXLqGpqUnTpk1xdXWle/fuaGpq4u3tja6uLuPGjWP9+vUsWLAACwsL7Ozs1PaamZlhZGSk/n9BEHB1daVHjx6sXLmSHTt20KZNG3WbUlMRRdh8GrrUA6PHUqSa6kN9R/jnFsildVNPIK3WlHgpTE1N+emnn9T//8UXXxTb99lnn5V4Xu/evYtNccAjXxMdHR0GDx5M/fr1i+378ssvS7yOm5vbU/dZWVnx4Ycf8uGHHxbb/sYbb6j/dnJyYtWqVQC4uLiwcuVKAAYOHKg+RldXl2nTppV4D3Nzc6ZMmfLEdgsLC+bOnVviORVKBX6JRiQ/up2tSXFxJpNBE1e48AB6Niy+73WloAjOBMGCN4u/D0EAd2v4+RRk5YGJXvV+X1ZWVowfP77YtnfeeUf9t5+fH35+fk+c5+DgwKJFi0q8pouLC3PmzCk2FdioUSN27dpV4vHvvPNOsXuqkMlkdOjQgQ4dOhTbXrt27WL1fezYseq/J0+erP573bp16r9VI3Ml8f777z+xTVtbm549e9KzZ88Sz6mJ5BXC6SD4sOuTZd7HCf648MgdQktSI8WQRs4kJMqBSg88W0Edu1KEiKRHX8EGOsX3CYCzJUSnFPdFe51JynrkX+Zq9eQ+c0PIL3y0X6J0vEw9q/S6+ZoRm/5IoDlbPLnP1hRSsiG7EpKoVHUkrSpR6WhpaTF37txqlaj8edTkqczHEUUokIOGrOSRHm0NkCtrnh/VyyJXPBpl1Czhs1j1DosqJkB9teLNN998wnkfXq6evS51s6pQKH9Urksq85r/i0UslfknkUbOJCodVSonXV3d5x8sUaWQCWBn+mgqrqCEBjYhE8wNHjn/SjxaJKGj+WgE7b9kPYoMgalhxdpUHTAzM8PU1FQSVtUQa+NHo+gp2U/uS8t5VB9Marbb3UshiTMJCYmXRuUrlV8Eyf8RHKL4aCVWI9fq7T9VlpjoQ32nR47//yUq5ZHQNdSR3pdEzcHCELwc4HLYk/uCY6Ge46PVyhLFkcSZhMQr8jr7sAgCOFmCoe7/FgY89ioy8uBWJHStV3n2VTUEAYa0gAPXi0/1yhVwJ+aRcDOWRhEqnNe5Dpc3ggDDWsHB/5T5QjlcfQid60nirCQknzOJJ9DU1OTYsWOEhIRUtilVkpSUFHUAWg0NjTKZaomNjcXIyKhaTttYGkFrj0dxuprV+v8pzP3XoH9TcCzBEfh1pnUd2HvlUcfk6/5oW2o2JGVCnyaP/PSqMoIgkJeXx1dffVUp91cqlRQUFKCjo/PaBHFVKBRER0dX2+ftXO/Ris2gWPB2fLQtPOmRKGvlIY0Ul4QkziSKoa2tTffu3WuUc35ZIooid+7c4cGDB+Tk5GBoaIiTkxO1atXCzMzspcVVw4YNcXJyKhYXqrogE+CNZk9uf7fto3+lhrc4Whqw7D8RHqyM4dP+j/6u6u/L1dWVyZMnU1RUVKH3FUWR2NhYAgICyM3NpU+fPq9VerS2bds+EWqoumCoC0uHF9/mYQtz33j0d1Uv85WBJM4kiqGlpfXM2D0Sj75ig4ODuXjxIpcuXSI2NpaUlBTq1q1LmzZt8PT0VKeaeV0oqXGVGtyn8993U53elbOzM7NmzarQexYVFXHmzBk2bNhA7dq1eeedd/Dz80NLS6tajja/bkjtw4sjiTMJiRdElfC9bt26DBgwgKioKC5cuMDhw4fZsGED3t7eDB06lHbt2qGn9yhfidSBSEi8GCo/sOTkZNauXcvff//N4MGDeeedd7C3t3/tPoBelsf96QoLC/n333/ZsWMH9pVok8TzkcSZhMRLIpPJMDc3x8zMjHr16jFixAjCw8PZsWMHn376KXp6evj5+TFw4EDs7e0xMjJSp4qRkJB4NgUFBVy7do2FCxcCsGHDBnx8fNSJ0CWej0KhICsri4iICPbv38+BAwdQKpUYGBhIywGrOJI4k5B4RQRBQFNTU51z7/PPP2fSpEmcOnWKI0eOMGHCBOzt7WndujVNmzbF29v7tfKVkZB4ERQKBeHh4Wzfvp0DBw7Qv39/3n//faytrSvbtGqBKIrk5OTw4MEDbty4wdmzZwkPD8fV1ZWJEyfSp08f/v77b87/eqqyTZV4BpI4k5AoYwRBwMzMjAEDBtC7d2+Cg4O5fPky/v7+7Nu3DxsbG9q2bUu3bt1wdXUtlxVYCiUcvvkovEV1Q6585I+i8YwBxv/GVHtVwpPhj/M1NxhmcCz4VLYRpaCoqIhDhw6xadMmjIyMWLBgAW3btpVGy0qBUqkkOjqa06dPc/LkSRISErC1taVt27ZMmjSJOnXqFEuyHhQLv54FvRr6amPTKtuCV0MSZxIS5Yi2tjY+Pj54e3szYMAAEhISOHXqFIcOHWLt2rU0a9aM4cOH06JFC3VogLKY9mzTpj3RibWIT3/1Z6hoHj4MJz4uDh8fHwwMDUp8H0oltGzpXayzeVnMzc1x9unJlQIQCl/5cqXC/5I/Xt5eGBsbV8j9LDwfrbKsqlPqSqWStLQ0Fi1axOnTpxk1ahQDBw7E2tq6ytpc2YiiiFKppKioiLNnz7Jz5078/f2xsrKiT58+dO7cGXt7e8zMzJ74AHRwcMDMoycXsmuuY75HE7Czs6tsM14aSZxJSFQAGhoa6hQ0derUYeTIkQQFBbFz505mzZqFiYkJ7dq1o0+fPjg6OmJubv5KowU/rllTbQNrpqWl8fnnnxMYGMjkmdNp3bp1iSFGBEEok1HHevXqsWvPvle+zovg5eXFp/O/o0WLFhV2z6oYI0sURbKysjh37hwrVqzA2NiYLVu24OnpWWIuTYlHvnjJyclERkZy8uRJDh8+TEFBAa1bt2b16tU0a9YMbW3tZ76/Tp060aFDhwq2vOKpimW+tEjiTEKiAlE1mDo6OjRs2JAGDRowYcIEzpw5w8mTJ5k3bx7m5ub4+vri6+tL48aNX2p0qDqvZLOysmLZsmWsXLmS5cuXExkZydChQ9HV1S2XzlrlM1hRqESzTCar0PtWNURRJCgoiJ9//pmbN2/Ss2dPxowZU22DMZcnoiiSlJTE7du3CQgI4Nq1a2RnZ+Pm5sb48ePp3LnzC/nkyWSyai1cXgde35ZBQqIKIAgCNjY2vPnmm/Tq1YuwsDBu3LjBuXPn2L17Nw4ODnTp0oUePXpgb2//2jSourq6TJo0iVq1arF27VrCw8OZPXv2o1VmEtWevLw8/v77bzZu3IiHhwcLFy6kQYMG6OjoVLZpVYrCwkLu3r3L/v37uXjxIgqFAg8PD4YOHYqXlxfOzs7o6elJYrYGIokzCYkqgoGBAfXr18fb25v+/fuTkpLC0aNH2bVrF6tWraJ169YMHz6chg0boqurW6PDcgiCgJ6eHgMGDMDDw4OpU6cSFxfH/PnzXyuRWtNQKBTExsaydOlSLly4wNSpU+nTpw+GhoY1tiyXFpUPWUFBASkpKfz999/s37+fmJgYfH19+fjjj2nSpAnGxsYYGJTsiylRc5DEmYREFUIQBDQ0NDA2NsbIyIixY8cycuRIbt68qfZPMzMzo3PnzrRs2RJ3d3esrKyq9TTms9DS0qJRo0Zs27aNGTNmMGXKFKZNm0aLFi1q7DPXRERRJC0tjRMnTrB8+XI8PT359ddfqVu3LvB6B2lWKpWkp6cTHh7OjRs3OHXqFEFBQTg4ODB48GB69eqFnZ2d+h29zu/qdUISZxISVRRVI6ytrY2vry/NmjUjMjKSS5cuceHCBf7991+MjY3x9fWlVatWNG3atFrm5nweqqnfxYsXs3btWr744gs+/vhj+vTp81r7bFUXFAoFt27d4pdffiEkJISxY8fyxhtvYGpqWtmmVSoFBQXcuXOHgIAAAgICiImJwdbWlubNmzN+/HgaNWokTfO+xkgtm4RENUEQBFxcXHB2dqZ3795ERkZy8+ZNjh07xrZt23BxcaFfv3706NEDS0tL9Tk1AUEQsLOzY8aMGezfv59vvvmGqKgoRo8eLfncVFFEUUShULBt2zZ++uknmjdvznfffUf9+vVfu2lp1SIQURSJiYnh6NGj7N+/n/j4eGrVqkWXLl1o1KgRrq6umJubv3bvR+JJJHEmIVHNEAQBQ0NDvLy88PT0pH///iQkJLBnzx42b97MsmXLaN++PcOHD8fd3R0jI6MakyDa2NiYt956C2dnZ2bPnk1MTAxTpkzBxsamRjxfTUAUReRyOUFBQSxevJigoCBmz55Nr169ym3FbVVE5UOWk5NDeno6AQEB7Nmzh5s3b2Jvb0+fPn0YMGAA5ubm6OrqllmMQ4magSTOJCSqKSr/NH19fWrVqsW0adMYN24cV65cYf/+/Xz66aeYmJjQokULfH198fLywtrautp/lWtqatKmTRt++uknFixYwJw5c5g2bRr169eXOrcqQHJyMnv27GHnzp3Url2bnTt34uzsXNlmVRiqgLoPHz7k9u3bXLhwgdDQUAwNDenQoQNz5szB09NTynog8UwkcSYhUYMwMDCgQ4cOtGvXjvDwcK5evcqlS5dYunQpOjo6tGzZko4dO9KkSZNq7VAvk8nw8fHh+++/Z/Xq1UybNo1PPvmEDh06SH5olYRCoeDatWv8+OOPpKWlMW7cOPz8/DA0NKxs0yqEgoICbt68yfnz57l8+TIpKSlYW1vTsmVLRowYQb169TAxMZE+ICRKhdSKSUjUQGQyGW5ubtSqVYuePXsSFxfHlStXOHDgAL/99hu1a9dm6NChdOnSBWNj42o5pSIIAq6ursybN4+tW7cyb948pkyZwoABA2rMNG51QBXlf9OmTfz222907tyZr7/+Gnt7+2r9AfAsRFFUT1tGRUVx8OBBDh48SGRkJF5eXvTo0YPmzZtjZ2eHmZmZ9MEg8cJIJUZCogaj8k+rXbs27u7uDB48mOjoaPbs2cPKlStZsmQJbdq0YdCgQbi6umJhYaFO/VJdMDExYdy4cTg6OvLtt98SGRnJmDFjMDMzq2zTajz5+fkEBQWxcOFCkpOTWbBgAZ06daqx4rioqIiMjAwSEhIICAjg6NGjBAYG4u7uzhtvvEH//v2xsLBAQ0NDSj8l8UpI4kxC4jVA1VHIZDJq1arF1KlTGTNmjLqD+eabb9DX16dx48b4+vrSqFEjLC0tq0XnIggC2tra9O/fH3Nzc5YuXUpUVBQTJ07E3d292vvYVUVEUSQuLo6//vqLv//+m/bt2zNmzJga6VumVCpJSEjg7t273Lp1i5s3b5Keno6JiQktW7Zkzpw5eHl5ST5kEmWKJM4kJF5DBEHA2NiYrl270rFjRx4+fEhgYCAXLlxg8eLF6OrqqhOxe3p6VguBI5PJ6NChA3Z2dixevJhZs2Yxb948GjduXC3sry6Iosjp06dZtWoVWlpazJgxg65du6Krq1vZppUpmZmZXLlyhRMnTnD9+nVEUcTDw4OePXtSt25d3NzcpMwGEuWGJM4kJF5zNDU18fDwwN3dHT8/P9LS0jh37hw7duxg8+bN1K9fn2HDhtG2bdtqEZbDw8ODBQsWsHr1asaPH8/3339P69atJb+fV0S1CnHDhg388ccfDBkyhFGjRmFjY1OtfctUMcjkcjmFhYWEhISwY8cOTpw4QU5ODs2aNePDDz+kSZMmmJiYYGBgIIl9iXJHaq0kJCSARyNPhoaGGBgY8Pbbb/PWW2/x8OFDdu7cyfLly/nhhx9o3rw5vXr1wsXFBVtb2yrpnyYIAtbW1nz22Wc4Ozszbdo0PvjgA4YMGYKpqWmVs7c6kJeXx8WLF1m7di05OTls3LgRX19foPoGOhZFkZycHBISEnj48CFnzpzh/PnzZGRk0KRJE2bPnk3Hjh0xMTFRn1Ndn1Wi+iGJMwkJiWI8nsPP3d2dWbNmqf3Tzpw5w5IlS9DS0qJhw4a0atWKFi1aYGxsXMlWF0flhzZ69GhsbGxYt24d4eHhTJo0CVtbW6mTLSWiKBIeHs5vv/3GqVOn6NKlCyNGjMDe3r7avkO5XE5ISAhXrlzhypUrREREoFQq8fLyYty4cTRp0gR3d/dq+3wSNQNJnElISDwTQRCwsLCgZ8+edO7cmaioKO7du8epU6eYP38+pqamdOnShYEDB+Li4lKlEjTLZDJ69+6Nvb09S5cuZfLkySxduhRHR8cqYV9VRRXl/8SJE6xYsQIjIyMWLFhAw4YN0dfXr2zzSs3jaZMiIiI4evQoJ0+eJCoqCicnJ5o3b07//v1xdXXF3t5eymUpUWWQxJmEhESp0dHRoXbt2ri5udG1a1cyMzP5559/2LFjBz///DPNmjXjrbfewtfXFyMjoyqRrkdDQ4MmTZqwfPly5s+fz/Dhw/n222/x9fWVVtiVgEKhIDU1ldWrV3Pw4EFGjRrF0KFDq82UsCiKFBUVkZubS0JCAqdOneLgwYMEBwfj5uaGn58fCxYswMbGBl1dXTQ1NavFc0m8XkjiTEJC4oWRyWTo6emhp6fHe++9x9ChQ7l9+zaHDh3ihx9+UAuiTp06UadOHezs7CpVCMlkMmxtbVm1ahVLlizhs88+4/3332fQoEFS4vT/IYoimZmZnD59mnXr1mFkZMSPP/6Ir69vlXeAVwmy6OhowsPDuX79OpcvXyY2NhYrKys6duzIggULpJAXEtUGSZxJSEi8MlpaWjRu3JiGDRsyatQorl+/jr+/P2vXrkWpVOLj40P79u1p06YNenp6lWantrY2U6dOxcXFhc2bNxMVFcWkSZNemxRDT0MURUJDQ1m7di23b9+mT58+DB06FEtLy8o27ZnI5XKioqK4evWqOoclgJubG7169aJevXp4enpiYGAgCXCJaoUkziQkJMoMmUyGnZ0ddnZ2dO7cmfj4eO7evcuRI0eYNWsWVlZW9O3blwEDBmBra6sekanIjlNPT4/Bgwfj5ubGvHnziIyM5IsvvsDGxua168BFUUShUKgDEdetW5eFCxfi4+NT5UaYVP5jSqWSlJQU/v33X/7++2+Cg4OxtramdevWTJs2DScnJ6ysrKQYZBLVGkmcSUhIlAu6urq4uLjg4uJCt27dSE9P5++//2b37t2sX7+e5s2bM3DgQHx8fLCwsKjQ6UVVEvjff/+dKVOmMH78eObMmUPjxo2rdcyuF0GhUBASEsL69es5deoU48ePZ/jw4VUqPIpqujIjI4PU1FT8/f05evQot27dwtzcnO7du/PJJ5/g4eGBtra2Om2ShER1RxJnEhIS5Yaqo9TS0sLKyooxY8bwzjvvEBgYyOHDh1m/fj0APj4+tGrVinr16uHo6FghAkkQBGxsbFi5ciXLly9nzpw5TJ48me7du1e5UaOyJj09ncOHD7Nlyxbs7e355Zdf8PHxqTK+ZUVFRcTFxREUFMTNmzcJDAwkNjYWGxsbmjRpwvjx42nSpEmVEpISEmWJJM4kJCQqFF1dXXx9fWnatClxcXHcvn0bf39/1q9fT0FBAU2bNsXPz69CovqrAtbOnj2bnTt38s033xAZGcl7771XI/3QlEolQUFBrFq1ipCQEEaOHEm3bt2qTB7V+Ph4zp07x/nz57l37x5aWlp4eHjQo0cP6tSpg6enJ0ZGRpVtpoREuSOJMwkJiUpBJpPh4OCAvb09HTp0IDk5mcDAQPbu3cvHH3+Mo6MjgwYNonfv3lhaWqKpqVluIzumpqa8++67uLu78+mnn5KQkMCkSZMwNzcvJlpEUUQURXUi+dJy9uxZUlJS1NfIycnhzJkzJCQkAGBoaFiuiyVEUaSgoIC9e/fyww8/0KhRI9auXYurq2ulpLUSRRGlUolcLicjI4Pz58+zf/9+rly5gpmZGe3bt+fTTz/Fzc0NU1NT9PX1q8yonoRERSCJMwkJiUpFEAR0dXVxcHDAwcGB7t27k5iYyM6dO9m9ezebNm2iYcOG9O3bl7p162JnZ1cu8dN0dHTo2LEj69ev57PPPmPKlCl88cUXuLm5IQiCWlRduXIFT0/PF8o0cPDgQb7//nv1/4uiyJw5c4BHIlWVgP5lkcvlCIJQ4nRwQUEBwcHBrF69msDAQKZMmUL//v3R0dGp8NGygoICkpKSiIuL4/r165w/f57bt29jbm5Ou3btmDBhAg0aNEAmk1WpYMYSEhWNJM4kJCSqBI93xra2tkyYMIGRI0dy69YtTpw4waZNm5DL5Xh5edG8eXN8fX3LPI2QIAj4+PiwfPlyli9fztSpU5k+fTqtW7dGJpOxa9cuvv76a6ZPn86YMWNK7Zs2aNAgli1bRlFRkXqbavWhIAgMGjTopUewFAoFZ86cQUNDgzZt2qivI4oi6enp/PXXX+zduxcvLy9++eUXvLy8Xuo+L4NqNWhERAS3b9/m+vXrBAYGkpubi42NDc2aNePjjz+mXr16NXIaWULiZZHEmYSERJXFwMCAVq1a0bx5c2JjY7l//z7nz59n7dq1/PDDD7Ru3Zo33niDZs2aAc8fZVEqlYiiWGxkpiRq1arFF198webNm/nss88YP3489vb2zJ8/n4iICFasWEGPHj1wdXUtlTj09PSkadOmBAQEoFQqi+3T09Ojd+/epXgbT6KKTzZ//nxyc3PZv38/dnZ2ANy9e5cFCxaQlZXFmDFj8PPzeyF/rcfF44ueo1QqCQ8P5/z58xw/fpzw8HDMzMyoX78+I0eOxMXFBUdHR0xMTKTpSgmJEpDEmYSERJVHQ0MDJycnHB0dadu2LZmZmVy5coU///yTESNGULt2bQYPHkynTp2eGZbj0qVL/Pbbb3z++edYW1s/UxiYmZnx0UcfYWtryzfffENcXBzJyckAhIaGsmjRIn788cdSjZ7p6ekxcOBA/P39i22XyWT07dsXKyurF3wjj8jOzuarr77i4sWLAHzzzTd8+eWX7Nixg3Xr1tGhQweWLl2KjY3NC62Azc/PJyIiAnd39+eO6ImiSGFhIXl5ecTHx3PixAkOHz5MSEgITk5O+Pn5MWPGDJycnNDT06uU6VQJieqGJM4kJCSqDYIgoKOjg5WVFT169KBHjx5ERkayZ88edu7cyebNm6lXrx7du3enTp06ODs7q8WAUqlk69at/Pzzz4SHh/PFF1/QpEmTp4oPlS9c69atcXR05O7du+p9CoWCbdu20bt3b/r16/dcsaGpqUnbtm2xs7MjNjZWvV0mkzF48OCXCh1SUFDA2rVr+eOPP9QjVps2bSIxMZHs7GwWLVqEn5/fc0cJH0epVBIZGcmvv/7Krl27+PPPP6lXr94Tx4miSHZ2NjExMURERODv78/169dJTEzEycmJXr160blzZ2rXrq1+NkmQSUiUHkmcSUhIVEtUnb2LiwuTJ09W+6dduHCBrVu3UlBQgIeHB+3bt6dFixbk5eVx7tw5ioqK+Oeff0hJSWHmzJkMGjToqcIhKyuLVatWcfbsWRQKRbF9ubm5/PjjjzRq1AgXF5fn2uri4kLLli3ZvXu3epuXlxc+Pj4vLFyUSiX79+9n+fLlamEGjwTb3bt3Wbp0qVqYlRa5XM65c+dYsWIF//zzD3p6egQEBBQTZ9nZ2dy7d4+rV69y7do14uPjkclkeHt7M2TIEOrXr0+dOnWk+GMSEq+IJM4kJCSqPYIgYGJiQrt27WjZsiUJCQk8ePCAY8eOsWjRIrS0tLC0tCQmJgZ4JG4CAgKYNm0aSUlJjB49Gi0trSfCZvz+++9s3LiR7OzsJ+6pUCi4ePEiu3btYsKECWhraz/TRmtra5o1a8bBgwcpKChAFEU6duz4wmmjRFHkxo0bLF26lMTExGL7lEolYWFhnD17lrZt25Yqp6TKaf/nn39m8eLFREREIJfLKSws5Nq1a7z55psEBQVx4MABzp07R1paGl5eXrRp04bGjRtja2uLjY1NpeZMlZCoaUjiTEJCokahpaWFo6MjDg4OtG3blpycHA4ePMh3331HWlpasWOjoqL49NNPCQsLY9q0aU+Ex3BxcaFr165cu3aNlJQUsrKyip2fk5PDxo0bad++PU2bNn2mENLU1KRdu3a4uroSHByMmZkZrVq1eqFViqIoEhcXx7fffou/v3+xUTMV+fn5rFu3jnbt2uHn5/dMm+RyOWFhYXz11Vfs2bOH3Nxc9T6lUsmuXbs4c+YMoiji6+vLuHHjaN++Paampuq4c9II74K/BwAAdTVJREFUmYRE2SOJMwkJiRqJIAhoaWlhamqKl5fXU0VEeno6K1euJCYmhnnz5uHt7a0+v1evXvj5+RESEsLhw4c5ffo0d+/e5eHDhygUCkRR5N69e6xevZq1a9c+d/SoUaNGuLu7c//+fby9vZ8r6P5LUVERa9asYffu3cWEmSoOm0wmw8bGBicnJ+7cuUOXLl2eOrWZl5fH/v37WbZsGVevXkUulz9xTE5ODpMmTWLUqFFYWVlJQkxCooKQxJmEhESNRqFQcPPmTYKDg596TGFhIdu2bSMhIYG5c+fSsWNHtajR1NSkbt26eHp68t5773H79m0uXbrE8ePHOXv2LIWFhezYsYOBAwfSr1+/Z9piaGhIt27dOHXqFPXr18fV1bXUzyGKItu3b+fHH39EoVCoBZmuri7169enUaNGNGzYkFq1auHs7PzM6P8JCQksW7aM33//XT3VWxLZ2dno6elhZmYmCTMJiQpEEmcSEhKVwuXLl9m6dWu5x7kqKipSi6jn3ev06dOEhITQvHnzZwa4LSwsxMLCglatWhEdHU1MTAxTpkzh8OHD6OjoPPMeKSkpyGQyHj58yKxZs0r9HKmpqfz7779kZmaiq6uLpaUlVlZWWFpaYmxsjFwu5/79+4SEhDzzOunp6dy8eZOgoCAKCgqeeaxSqWTDhg0EBwc/16euNIiiSLt27Rg8ePArX0tCoibzSuJMEARTYCNQHxCBUUAw8BfgCoQDQ0RRTPvf8XOA0YACmCSK4j+vcn8JCYnqy71799i3bx/dunUr1/uIokidOnWoVauWeptSqSwWDFaV61FFXl4eGRkZz4xhZmRkhJGREbVr10Yul5OamkpycjKmpqbPtEdXV5fGjRtjb29PTk5OqZ5BLpeTlJSEt7e32tG/JKGZl5f3zOsoFArS09OxtLSkZcuWyOXyYv8pFAqUSiUKhUL9/4WFhWRnZz9XdJaGq1evAkjiTELiObzqyNkPwBFRFAcLgqAN6AOfAidEUfxWEIRPgE+A2YIgeANvA/UAe+C4IAh1RFFUPO3iEhISNRtXV1cmT55c2WZUKKIokpGR8VwR999zoPxjhSmVSoqKisjPzycvL4+CggLy8/OpVavWE6tZX4aVK1eWkaUSEjWblxZngiAYA+2B9wFEUSwECgVB6A90/N9hvwKngNlAf2CbKIoFwENBEEKA5sDFl7VBQkJC4kURRbFS/acEQXghYaY6pyKQyWTo6Oigo6ODiYlJhdxTQkLiSV7F2cMNSAI2CYJwXRCEjYIgGAA2oijGAfzvX+v/He8ARD12fvT/tklISEiUC6IoqkeAVNRkx3ZRFMnNzaWwsLDC7qkaZZOQkCg7XkWcaQJNgLWiKDYGcng0hfk0SmoRnwzSAwiCMFYQhCuCIFxJSkp6BRMlJCRqMiXF+fovGzZsYO/eveVvzCtSmmd5Hnl5eaxcuZLTp0+XgUWlY8eOHWzatKnC7ich8TrwKj5n0UC0KIqqTL47eSTOEgRBsBNFMU4QBDsg8bHjnR473xGIpQREUVwPrAdo1qzZq7dYEhIS1RKFQsGhQ4fw8fHBxcWFxMREzp49S79+/co8RVBSUhJnzpxh0KBBr3SdoqIiLly4wN27d7G0tKRDhw7PjBGmUCjYsGEDhYWFaGtr4+rqSqdOnV7ZAb+yp28lJCRenpceORNFMR6IEgTB83+bugB3gX3AiP9tGwH8/b+/9wFvC4KgIwhCLcADCHjZ+0tISNR8FAoFR48eJSrqkUdEcnIy+/bto6ioCFEUS/Xf4zzruOTkZPbv349SqSzVNZ+27+zZs+zatQs3NzfCw8P566+/yM/Pf+rxSqWSAwcOYGdnR/369fn33385ePDgM+/xtH3/5VnHvOgzPuu9SEhIlC2vulpzIvD7/1ZqhgEjeST4tguCMBqIBN4EEEXxjiAI23kk4OTAeGmlpoSExMty6dIl9uzZQ0FBAZmZmXTv3p3+/fujra3N8ePH2bFjB3p6emhqamJjYwM8Ste0adMmQkNDcXBwYPz48VhaWvLHH3+we/duEhMTGTJkCN7e3kyYMAEzMzMuXrzInj17SEtLo3nz5gwfPhwTExPCw8NZvnw5mZmZNG7cmA8++AAtLS3OnDlDhw4d6Nq1K46OjqxcuZLs7GxCQ0PZtGkToiiSmppKhw4dGDJkCNra2mhra+Pt7U3dunW5f/8+t27dYuDAgcTFxbFlyxbu3r2Lra0tY8eOxd3dHYVCwa1bt/jrr7+Ij4+ndevW6mvBI1GVmJjI6tWradu2LWFhYTg7O9OrVy/1+9u2bRsFBQX07duX3bt3c/78eYyMjBg2bBjNmzdHQ0ODc+fOceDAAeRyOdHR0TRv3pzRo0fz4MEDNm7cqF5A4OHhUSllQEKipvJK0R9FUbwhimIzURQbiKL4hiiKaaIopoii2EUURY///Zv62PHfiKLoLoqipyiKh1/dfAkJidcVhUJBbGwsI0eOZO7cuVy/fp179+6RmZnJX3/9xbhx45g1axZxcXHqc+Li4nj77bdZu3Ytzs7O7NmzBx0dHUaNGsWSJUvw9vZm586dfPXVV1hZWREXF8euXbt4++23WbFiBTk5ORw7dgyAo0ePYm9vzw8//ECLFi3Izs4mOzub9PR0HB0dkclkmJqakp+fT25uLgqFgoSEBAYNGsSCBQsIDg5Wx/3Ky8vj3Llz7N69m4CAAFq3bg1AfHw8ffv25aeffqJevXrs3bsXhUJBTEwMW7dupWvXrqxevZrWrVur0y+Josj9+/dZuXIlbdq0wc/PD1dXV+7du0dWVhbXr18nKyuLW7duUadOHVJTU2nSpAlr167ljTfe4OjRo+ocpAqFgoiICAYPHszvv//O9OnT0dfX58cff2TIkCF8/vnn5OXlSaNnEhJljJQhQEJColrxuB+Vp6cnzs7O6OjoYGVlRXJyMgYGBiiVSurXr4+uri4NGjRQH29ra8vRo0dJTk4mIiLiubkwY2P/r737Do+ySvs4/j0zmZLee0hICEkg9IReQhWRDqKgYAEWltVVV1111fVV1xXddd0VC4ING4LCCkgRkCJNSmjSCRA6SQiQ3qY87x9JZkFAqZlJuD/XlYvkmZln7pwwk1/OOc85J9m3bx/z589nyZIlHDlyBKUUNpuN1NRUFi9ezIwZM4iKiqJly5aUlJRctOfl+WJjY4mLiyMgIICYmBiysrKA/11VWlZWRmBgIPn5+djtdkJDQ1myZAnz5s3j+PHjjvueOHECHx8fWrVqhZeXF02aNAGgpKSE4uJitmzZgsFg4IknnkApRUJCguMc7733HmPHjuXo0aM0atTIsb3VDz/8wOnTp8nLy7vg6tYmTZqQnJzs2Arq7NmznDt3jrZt2+Ll5UWLFi0uWMBXCHH9amU4k4muQtwa9Ho9BoMBm61yBoTNZsNkMjle/+Xl5Y7V/m02G3q9HrPZjN1up6KiApPJ5FjmwWKx8O9//5u2bdvSsWNHNmzYwI4dOxzvJ9V7VZ7//uLu7k5sbCzDhg3DZDJht9vx9/dHp9PRtGlTIiMjKSgoYNq0aRiNRtLS0vDx8eHUqVOOxWZNJhPu7u7k5eVRUVGB1WrFbrdjtVrR6/UAeHh40KtXLxo1asT27dt566236N69O5MnTyY+Pp4BAwawfft2NmyovP7KYDBgsViwWq0X1Ww2mxk8eDDZ2dl89NFHPP7440RHR1NSUsK2bdsICwtj27Ztjm2fPv30U86ePUufPn04efIkixcvviBgmkymC3YjqL5Qoby8HE9PT0pLS2/I7gFCiP+5uZva3WDVb47Vb9RCiLpNp9ORmprKihUr2LRpE6tWrSIuLs7Ri7N//35+/PFHVq1aRX5+PjExMYSEhBAZGcncuXNZtmwZu3btAnAEIqPRSEFBAfv27btgPbCgoCCsViurV6/m8OHDWCwWoqKiiIyMZNeuXRQWFrJjxw4OHjyIUooFCxawZcsWzp07R1lZGW5ubhgMBjp16sTy5ctZu3YtixcvJjExES8vLwAyMzP58ccfWbt2LceOHSMxsfJ6KovFwqFDh9ixYwerVq0iKioKs9nsqLe4uJj9+/c7hhAbNmyI2Wzm22+/JT09ncWLFzuGb93c3AgKCuL3v/89WVlZjmVEkpOTWb58Of3792fz5s00adIETdOwWq0YDAYqKio4dOgQZ886ZqIAF/f++fn5kZKSwowZM1i5ciWbN2+WYU0hbrBaFc48PT2xWq2/uVmvEKJuUEoxcOBAWrVqxdq1a/H19eX+++937HnZuHFjcnNzOXz4MMOGDSM2Nhaz2czYsWMpLS3l6NGjjB49moYNG2IymXjooYc4deoUW7dupUuXLvTs2dMRPgICAhg9ejSbNm1i7dq1lJaWEhAQwKhRo7DZbKxZswZfX1+aN28OQJcuXcjLy+Onn34iLS2Njh07opSiW7du9OvXj40bN+Lv788999zjGD6Nj4+npKSEPXv2MHToUBo1aoROp6Nbt27s3LmTJUuWEBgYyMMPP4ynpydjx44lPz+fjRs3kpqayu23345er8fX15fx48fj6+vLunXrcHd3JyAgADc3N1JSUoiOjsZsNjNhwgTsdjtlZWX06tWLXr160bx5c3r06EHnzp3R6/X069cPf39/Vq9eTUREBEOGDHGEycjISFq2bHnRHqPjx4/HbDazf/9+7rrrLkebCCFuDOXqf/GkpqZq6enpAGzatIlHH32UOXPmEBIS8huPFEK4ss8++4yPP/6Yd95555oev2bNGn766SfGjRtXK7Ya2r59O9999x3jx48nODjY2eU4xaRJk3B3d+ett95ydilC1KgdO3YwYsQI1q1bh4+PDwBJSUkFe/fuveSbV63qOYuKiqKsrAzZNUAI4ebmhtlsrjXzT6vnw9WWeoUQzlOrLgjw8PDA19eXU6dOkZyc7OxyhBBO1LZtW9q2bevsMq5YcnKyvG8JIa5IrQpnbm5uREREcPToUWeXIoRwstrWA1Xb6hVCOE+tGtY0GAwkJSWRkZFxwVVWQgghhBB1Ra3qOTMYDKSkpPDJJ59w+vRpIiMjnV2SEOI62Gw2xzpkou6r3sVACPHralU4U0oRGxtLRUUFR44cISIiQoYKhKjFMjMzmTRpkrPLEDVk27ZtdOvWzdllCOHyalU4g8p1dyIjI9m0aRNt2rRxLEYphKhd2rRpw9NPP+3sMpzi5Zdf5r777qN+/frOLqVGtW3blqZNmzq7DCFcXq1LNt7e3rRv356ZM2cyevRovL29nV2SEOIaJCUlkZSU5OwyapymaUyaNIk777yT9u3bO7scIYQLqlUXBEDl0Gb37t0pKChg9erVzi5HCCGEEOKGqnXhDCqHNvv27cvUqVMpLi52djlCCCGEEDdMrQxnAKNGjeLUqVMsXbrU2aUIIYQQQtwwtTachYeHM3LkSD777DNOnz6Nq+8RKoQQQghxJWptOAMYMWIEVquV2bNny/o5QgghhKgTam04U0rh5+fHvffey8KFC8nMzJTeMyGEEELUerU2nEHlXps9e/YkPDyct99+G5vN5uyShBBCCCGuS60OZwABAQE88sgjrF+/nrlz52K3251dkhBCCCHENav14UwpRXJyMk8++SRvvPEG6enpEtCEEEIIUWvV+nBWbfDgwfTq1Ys33niDo0ePOrscIYQQQohrUmfCmcFgYPz48ZjNZqZMmUJRUZFcICCEEEKIWqfOhDOlFBERETz++ONs3LiRTz75hIqKCmeXJYQQQghxVepMOIPKgNa8eXOee+45vvjiC2bNmkV5ebmzyxJCCCGEuGJ1KpxBZUDr2rUrzz77LO+99x7z5s3DYrE4uywhhBBCiCvi5uwCbgadTscdd9xBWVkZb731Fn5+fnTv3h29Xu/s0oQQQgghflWdDGdQuUDtoEGDKCws5IUXXgCgW7duuLnV2W9ZCCGEEHVAnRvWrKaUwmQyMXLkSMaOHctf//pXvvvuO7lIQAghhBAurc53I5nNZkaOHInJZOKf//wn5eXlDBkyBKPR6OzShBBCCCEuUufDGYDRaOSuu+7C09OT119/ncLCQkaOHIm7u7uzSxNCCCGEuMAtEc6UUhiNRgYMGIDZbOaVV14hLy+PBx54gODgYGeXJ4QQQgjhcEuEs2p6vZ7evXvj4+PDG2+8wZEjR3j88ceJjY1FKeXs8oQQQggh6u4FAZej0+no0KEDEydOpLS0lMcff5yMjAzZ6kkIIYQQLuGWC2dQOcyZmJjIa6+9RlJSEvfeey+LFi2iuLhYQpoQQgghnOqWGtY8n1KK4OBgXn75ZRo1asQbb7xBeno6o0ePJjIyUoY5hRBCCOEUt2w4q2Y0Ghk1ahRxcXFMnjyZp556iueee47GjRtLQBNC3BBffvklGRkZAGiaRkFBAR9//DGLFy8GIDQ0lPvvvx8PDw9nlimEcBG3fDiDynloHTt2JC4ujkmTJvHAAw/w7LPP0q9fP9zc3CSkCSGuy+HDh3nttdccX1ssFj799FN0Oh1KKfr06cO4ceOcWKEQwpXcknPOLkWn0xEREcGrr77Kww8/zMSJE3n11Vc5fPiwzEMTQlyXIUOGoJSivLyc8vJy7HY7FovF8fndd98te/8KIRwknJ1HKYVer2fkyJG8+eabHDp0iD//+c8sWLCAsrIyZ5cnhKil6tWrR4cOHS7ZCx8QEECPHj2cUJUQwlVJOLsEvV5Px44dee211+jVqxevvvoqL730EsePH3d2aUKIWshsNjN48OCLjiuluOOOO/Dz86v5ooQQLkvC2WUopQgPD2f06NFMnTqVzMxM7r//ftasWSObpwshroper6d169bExMRcdHzYsGEypCmEuICEs99gMBhITk7mvffeo3fv3jzxxBO8+eabHDx4EKvV6uzyhBC1gFKKuLg4OnTocMHxlJQUkpKS5KIjIcQFJJxdAaUUAQEBPP7447z++utkZGTw5JNPMm3aNAoKCuSCASHEbwoICKB169Z4eXkBle8rXbp0kf19hRAXkXB2Fdzc3EhLS+O1117jzjvv5Msvv2TMmDFs3rwZTdMkpAkhLkuv19OuXTsiIyMBCAkJoU2bNrK2mRDiIhLOrpJSiqCgIIYPH8706dOJjY3ld7/7HW+++SYnTpzAbrc7u0QhhItq1aoViYmJji3kUlJSZEhTCHERWYT2GlQvuREeHs7EiRO57bbbmDp1KqtWreL++++nS5cuBAYGypuuEDeJ3W4nLy+PwsJCZ5dy1dq1a8fSpUupX78+AEeOHHFuQVfJZDIREhKCTid/2wtxs0g4u056vZ4ePXrQuHFjFixYwAcffMDcuXO59957SUtLw2QyObtEIeqc8vJyZsyYwZw5c5xdylXLz8/HZrOxadMmxo8f7+xyrlrTpk155ZVXcHd3d3YpQtRZEs5uAKUUERERPPDAA3Tv3p05c+bwwgsv0LBhQ1544QXi4uIc27QIIa6fzWZj79691K9fn969ezu7nKv2ww8/0KNHj1r3nvDTTz+xf/9+bDabs0sRok6TcHYDGQwG4uLi+NOf/sTAgQP5z3/+w/Dhwxk0aBDDhg2jQYMGslenEDdQSEgIiYmJzi7jqmiaRlxcXK18Lzh8+DAHDx50dhlC1HkyaeAGU0qh0+mIj4/nrbfe4rXXXuPAgQM89thjTJkyhSNHjshVnULcwpRSGAyGWhfMhBA1R3rObiK9Xk/Pnj1p0aIFa9as4auvvmLevHkMGjSIu+++m4CAAHmDFsLF7Nq1i2nTpgHwwgsv4O3tfcOfo7S0lMWLF5OQkEDjxo1v+PkvZcWKFdjtdtnHU4haQHrObjKlFMHBwQwcOJDJkyfzhz/8gXnz5tG3b1/HIrY2m01604S4CSwWC4WFhVgsFgDKysooLCz81TlTDRo0YPz48WRmZl6wVZumaRQUFDg+SktLr/l1a7FY2LNnDzk5Odf0+GuRmZnJoUOHgMrvpbS0lIKCAkpKSmQJICFcjPSc1RCdTkdAQAADBw6kR48ezJ8/nw8++ID//ve/3HXXXaSlpVGvXj3pSRPiBlqxYgX/+te/eOSRR+jTpw+TJk1i1apVTJo0ibi4uEs+xmw2ExoaisFguOB4eXk5/fv3JyoqCl9fX3x8fBg7dizx8fE18a3cUCdPnuSdd97hzJkzeHp6MmrUKFq0aCHLYwjhIiSc1aDq4OXt7c3w4cNJS0tj3rx5fPvtt8yZM4eBAwdy2223ERoaKiFNiBtA0zSCg4PZu3cvzZs3p6CgALPZDEB6ejpGo5GmTZty6tQp9u7dS6dOnTAajZc9X3BwMH/961+JjY1l0qRJfPfdd/zpT3+iuLiYNWvWcOLECcLCwujSpQuenp4opcjNzWXdunWcPXuWxo0b07x58wvOabFY+OGHH6hfvz65ublEREQQFxeHUgpN0/j555+xWq0kJyezZcsW9u7di5+fHx07diQkJASlFAcPHuTQoUMUFRWRk5NDgwYN6NixIwUFBaxcuRK73U5OTg6BgYFomsZ///tfPD09efrpp/nuu++YM2cOycnJsvSPEC5C/kxykurlN8aOHcubb77JoEGD+PLLLxk1ahQffvghhYWFsiWUEDeAv78/3t7efPfddzRv3hw3t8q/SdPT09m+fTsAp06dYvny5RcMY16OpmlYLBYKCgocWy9t2LCBwsJC4uLi2LRpE0uXLkXTNPLz85kyZQrHjh0jOjqakydPcu7cOce5LBYLn332Gbt37yYgIICMjAzWrVvneB6AJUuWOMLjsWPHiI+P5/jx48ydO5fS0lIADh06xOTJk8nLyyM5OZmYmBj0ej0ffPABWVlZeHl5sW3bNjRNo6Kigv3799O+fXt8fX1p2rQphw8fxmq13rhGF0JcF+k5czI3Nzfq169PvXr16Nu3L8uXL2fKlCl8/PHHjBkzhjvuuIOgoCC5ukuIa+Th4UF0dDQfffQRL774It9+++01nysrK4s///nPGAwG/P39efDBB9E0jTZt2lBUVISmaeTm5rJ582YGDBjA3r17KS4uZvz48QQEBGCz2dDr9RQVFVFeXs4XX3yBn58fr7zyCl5eXiQnJzN37lz279/PF198wYMPPsjevXsZNmwYoaGhhIeHY7fbsdls/PDDDxQXFzsCYmJiIkOGDMHHxwelFGfOnGHLli28//77BAYGsmPHDqByeLa8vByTycSkSZOIjo6mrKxM/hAUwoVIOHMRer2egIAA7rzzTm677Tbmz5/P119/zezZs+nZsyfdu3encePGMuwgxFXS6XQ0bNiQQYMGERoaesHx6kBis9kumBSvlEIpddFE+bCwMF555RViY2OZM2cO77//Pi+99BILFy5k8+bNaJpGVlaWIzCVlZVhNpsxGo3odLoL5nSVlZURERFBbm4uP//8Mx06dCAxMZEjR45w4sQJcnJy2LVrF3a7naioKNasWcPSpUuxWq2cPXuWioqKC+oLCgrCaDQ6/oirvmDBy8sLvV7vuOrUZDJhNpspLS3lkUceYdu2bXh4eMgff0K4EAlnLsjHx4cRI0Zw2223sXr1ahYtWsTChQtp27YtQ4YMoXnz5hdNVhZCXF6DBg1o0KDBBUN3vr6+7Nu3j6KiIg4cOEBBQYHjNrPZjJeXF5mZmQQFBV0UXHQ6HR4eHuTn51NWVsbChQsZNWoUKSkpfPXVV+zcuROAqKgozpw5w65du2jWrBnHjx8nLCwMpRQ+Pj506NABLy8vPvnkE0JCQoiPjycgIICVK1fSpUsX1qxZQ6NGjdA0jXXr1hEfH8+dd97J8uXLWbBgwa9+z0FBQfj4+LB161bi4+PZuXMnKSkpGI1GGjVqxJo1a0hJSWH79u2ORXGFEDeHpmlX9QeQvBpdlFKKoKAgBg4cSFpaGvv27WPatGmMHz+e5s2bM2HCBFq2bIler3fcXwhRqfqNUKfTOV4j1apDSMeOHVm3bh1PPPEEYWFhjt6u6vvcfffdvPvuu4SGhjJu3DiioqLIzc3l2WefxdvbG39/f0aNGoWPjw/9+vXjyy+/ZPbs2QQHBzsuOoiJiWHo0KHMmjWLjz/+mA4dOjBkyBCUUuj1enQ6HS1btuTQoUPMnDmThx9+mJYtW/Lzzz/Tq1cvVq5cSY8ePXBzc6NTp07MmDGDtWvXEhkZiZeXl+N1X32+85lMJh544AE+/fRTTCYTnp6ejm3k7rzzTiZNmsSjjz5KcHAwY8aM+dULIarbVAhxbaxW60Wv0V+jXP0Fl5qaqqWnpzu7DKfTNA2r1cqBAwf47LPPWLRoEUlJSYwYMYKUlBTCwsLkL19xyygqKuLZZ5/Fx8eH4cOHX/Z+drsdu93ueG1Uv46qv7ZarWia5hhu1Ov1jsBjt9uxWq2O4KOUcqyXBlwQsKrvC1x0ruo5YpqmOe4PlUOp1WFJ0zTsdjt6vR673e64b/UbevVzVJ/nUs9R/fjq+qsvKKpe0616qFav1zuO2+12lFJXvJXU4sWLWbNmDZ9//jleXl5X+NMSQqxfv55HHnmEZcuWOaYYJCUlFezdu9f3UveX3+a1RPWWL40aNeLVV19l9OjRzJs3j48//piPP/6YXr16kZaWRmJi4m/+BSzEreKX87yqX0fVfm16gE6nu+i1dLnX1qXue7kaqp3/x1R1Lx9wwV/X59d3ufNc7rbqMHapx1QHMiFEzbBYLBfMCf0t8uqshZRSNGzYkEcffZShQ4eyceNGx3ppLVu25K677iI1NVUWlBRCCCFcQEVFxVXNFZdwVotVL8MRHR1Nz5492bt3L19++SWjR4+mSZMmjB49mrZt2+Lp6XnBcIcQQgghao70nN2CdDod/v7+tG/fnvbt23P06FE+++wz/v73v+Pp6cntt99O165diYmJcayBJIQQQoiaUVRUdFVL1kg4qyPO/4HHxMTw3HPPMXbsWNasWeNYiqNhw4Z069aNLl26EBQU5MRqhRA329Veui+EuHny8vLw9fW94ulGEs7qKJ1OR3h4OHfeeSc9evRg7969rFixgnfeeYcpU6bQv39/Bg0aRGRkJCBLcYjay9WvOHcmaRshnE/TNPLy8vDz85OeM1FJKUVAQADt27cnNTWVCRMmsGjRIqZPn87UqVNJS0vj3nvvJSEhAW9v7yu+pF4IZ1JK4eHhwSeffML06dOdXc5Vy8/Px9PTs9ZdMWmxWGjRooW8RwhxFarDmb+/v/SciQsppTAajQQEBHDvvfdy11138fPPP/P111/zl7/8BR8fH3r06EG7du1ITEyUuWnCpZnNZsaPH0///v2dXco1GT58OM8//zzJycnOLuWq+fr6OhbZFUL8trKyMvLy8khOTpZwJn6dwWAgJSXFsTr5Tz/9xKpVq1iwYAHR0dF069aN7t27ExoaKiFNuBy9Xk9sbCyxsbHOLuWqaZqG2WymadOmtG/f3tnlCCFusuLiYs6ePUtERIQMa4oro9PpiI+PJy4ujr59+3L06FFWr17NZ599xn/+8x+6d+/OXXfdRZMmTRxrtEhYE0IIIa5MQUEBZWVlhISESDgTV0en0xEQEIC/vz9NmzZl9OjRrFu3ji+++IIHHniAuLg4Bg0aROfOnQkKCsLb21sWuRVCCCF+Q15eHjqd7qqmC0k4Exeo3nvP09OTXr160b17dw4ePMiyZcv4/vvv+fLLL0lMTKRdu3a0bNmShISEq1r1WAghhLhVVF8MoNfrHXtqXgkJZ+JX6fV6EhISSEhIYMSIEezYsYMNGzYwZ84cPvjgA1q1akXv3r3p2LEjXl5eMuQphBBCVLHb7Zw6dQpvb28JZ+Lm8PPzo1OnTrRt25b77ruPQ4cO8c033/DXv/4VvV7PwIEDGTRoELGxsbi5ucmWUUIIIW5pVquVffv2ERUVJeFM3DzVS3KEhIQQHBxMmzZtKCwsZM2aNcyYMYPZs2cTGRlJ9+7d6d69O+Hh4fj7+6PX651duhBCCFGjKioqyMjIoF+/flf1e1DCmbhmSimUUvj6+tK3b1969+7N/v37WbduHevWrWPx4sWEhobSunVr2rZtS3JysqyPJIQQ4pZRVlbGyZMniYuLu6qRJAln4oZxc3OjcePGNGrUiKFDh3Lo0CF27drF2rVr+fLLL4mMjKRr16707duX6Ohox+Nk6FMIIURddODAATRNIy4u7qoeJ+FM3HBKKfz9/WnVqhUtWrRgyJAh5OTk8N133zFnzhzefvttUlNTGTx4MG3btsXPzw93d3dZmkMIIUSdsm7dOmJiYggJCbmqx0k4EzdN9bIcXl5eeHp68sgjj/DQQw+xb98+5s+fz4cffsikSZNo2LAhPXv2JCEhgdjYWHx9faU3TQghRK1mt9tZt24daWlpV935IOFM1IjqsOXm5kZycjKNGzdm/Pjx7Ny5k02bNjFnzhzy8vIIDw+nXbt2tG/fnoSEBEwmk5MrF0IIIa5eVlYWe/fu5fnnn7/qx0o4E06hlHIszdG+fXtycnI4ceIEGzduZOnSpbz//vvUr1+fnj170r9/fyIjIx0BT3rVhBBCuLp169bh5+dHcnLyVT9WwplwOr1eT3h4OGFhYbRs2ZLRo0eTnZ3N8uXLmTt3Lu+++y7x8fF0796dnj17Ehoaio+PDwaDQYKaEEIIl2O321mxYgVdu3a9pl10JJwJl1E9R02v1xMTE8ODDz7I/fffz549e1i2bBnr169n7ty5hIWF0bp1a5o3b05CQgIRERFyMYEQQgiXkZWVxZ49e3jhhReu6fESzoRL0+l0jjlqDzzwAAcOHGDHjh1s3ryZ5cuXo2kaTZo0oX379rRr146QkBDpTRNCCOFUO3fuRK/X06hRo2t6vIQzUSsopfDx8XEszzF48GDy8vLIyMjghx9+4M033+TcuXOkpKTQr18/OnfujJ+fH25ubo7FcoUQQoibraKigs2bN9O0aVO8vb2v6fePhDNR6+h0Onx8fPDx8aFevXp069aNiooK9uzZw7fffst//vMfXnnlFRITE+nTpw8pKSkEBwcTEBDgCGtCCCHEzZCdnc3mzZu5//77cXd3v6ZzSDgTtVp1r5jZbKZly5a0aNGCJ554gm3btrFp0yaWLFnCN998g7+/P02aNKFZs2Y0atSIqKgo3Nzkv78QQogbR9M0Dh8+THl5OQkJCdfcGSC/nUSdUr3XZ1paGl26dOHs2bMcOXKEQ4cOsW3bNqZMmUJeXh4xMTGkpaXRrVs3YmJi5IICIYQQ181ms7Fy5Uri4uIu2Kbwakk4E3WWUorAwEACAwNp0aIFffv2pbi4mCNHjrBs2TKmT5/OP//5TxISEkhLS6Nfv36Eh4djNBoxGo2OcwghhBBXorS0lCVLlvDYY49hNpuv+TwSzsQtQafT4e7ujtlsJjAwkFatWvHEE0+we/duli1bxurVq5k5cyZRUVE0a9aM1q1bExMTQ3h4OL6+vuj1emd/C0IIIVzcwoUL0el0pKWlXdcf9xLOxC3l/BeLXq+nadOmNG3alAkTJrB//3527NjBvn37mDFjBsXFxXh7exMfH0+LFi1o2bIl4eHhMgQqhBDiImVlZUyfPp3BgwcTFBR0XeeScCYEYDKZHEGtoqKCs2fPcvLkSfbu3cvGjRt56623KCgoIDExkU6dOtGtWzfi4+MvOIcMgQohxK1J0zQ2btzI4cOHueuuu677fBLOhPgFo9FIWFgYoaGhNG/enKFDh1JeXs7OnTtZtmwZM2fO5D//+Q8NGjSgffv29OjRg+joaLy8vPDw8ECn00lQExcoLS3FZrNdcKysrIyioiKgctjdbDZLr6wQtZTVamXu3LmOLQavl9I07QaUdfOkpqZq6enpzi5DCAe73c6BAwf46aefWLduHQcOHMBkMtGgQQNatmxJfHw8sbGxhIWFXdOeaqLu+eSTT9i6davj6xkzZtC9e3dCQkIAiIqK4o9//OM1r4kkhHCubdu28dRTT/G3v/2NNm3aXNEf6ElJSQV79+71vdRt0nMmxFXS6XQkJCSQkJDAPffcw4kTJ8jIyGDXrl2sW7eOWbNmAdC4cWNSUlJITU0lLi5OLiq4hRUVFfH2229fcGzmzJlA5dzHkSNHYjKZnFGaEOI6VVRUsGLFCurXr39da5udT3rOhLgBNE3DZrNRUFDAuXPnOHjwID/++CMbNmwgKyuLevXq0alTJ7p27UrLli0xGAzodDrHMJYMg9ZtR48eJTk52TGMeT43NzfmzZtHnz59nFCZEOJ6aJrGyZMn+f3vf88DDzzA4MGDr3h6gvScCXGTKaVwc3MjICAAf39/4uLi6NGjB5qmcerUKVavXs3y5cuZO3cuNpuNRo0a0bZtW8dm7X5+fnh5eUnvWh0VFBREv379+Prrr7Hb7RfcVq9ePdLS0pxUmRDiev3www8YDAY6dep0w+aNSjgT4gar7gWrDlr16tXjnnvu4Z577uHcuXNs27aNrVu3sm3bNpYsWYKnpydBQUEkJibSsGFDEhISiIqKciyEK2o/k8lEnz59mD179gXhTCnF8OHDZUhTiFoqLy+PadOmcd999znmkN4IEs6EqEH+/v5069aNrl27UlxczKlTpzh+/DgZGRns2LGDRYsWUVxcTP369UlJSaFTp040btz4ulaaFs6n0+lo3rw5iYmJ7Nq1i+rpJGazmd69e8tVmkLUUl9//TUAAwYMuKHTUyScCeEESim8vLxo2LAh8fHxdO7cmbKyMoqLizl69CirVq1i9erVTJ48GV9fX1JSUujQoQPdu3fHx8cHg8GAwWCQuWq1hFKKmJgY2rZty86dOx3HOnXqRIMGDZxcnRDiWhw5coRPP/2Up59+moCAgBt6bglnQjhZ9Xw1Ly8vPD09CQkJITU1FYDTp0+zbt061q5dy8cff8ykSZOoX78+jRo1IjU1laioKIKDgwkKCsJoNEpYc2G+vr60b9+e2bNnk5eX59jiJTg4WH5uQtQyFRUVTJ8+nXr16tG7d+8b/hqWcCaEC/nlCzwkJIRBgwYxaNAgCgsL2bNnD3v27CEjI4Pp06djsVgwGAxERkYSHx9PYmIijRo1Ijg4WIbKXIxSig4dOlC/fn22bdtGVFQUKSkpMt9MiFpG0zQOHDjAmjVreOihh27K/GAJZ0LUEt7e3rRp04Y2bdpgsVg4e/Ysp0+f5sSJE+zcuZN169bx2WefoWkasbGxdO7cmXbt2pGUlHRBAJBeGueJj4+ncePG/PzzzyQkJNCiRQtnlySEuEoVFRXMnTuXiIgI2rZte1P+EJZwJkQtZDAYCA0NJTQ0lOTkZLp3747FYqG0tJSMjAx+/PFHFixYwFtvvYWXlxctWrSgY8eOtG3blsDAQDw8PDCbzbi51c63ALvdTn5+PoWFhc4u5aq1a9eOefPm0aBBA8rLyzl69KizS7oqJpNJembFLau612z+/Pm8/PLLN3yuWTVZhFaIOsput5Odnc22bdvYvHkzmzdvJjc3Fy8vLxo0aECzZs1ISEggIiKC8PBwvLy8ak2vWlFREc8++ywzv52LX1Cws8u5atnHjhAcEYWulq1rV5h3jqTY+sybNxcvLy9nlyNEjSstLWXcuHGEhYXxyiuvXNe0BFmEVohbkE6nIzw8nPDwcG6//XaKi4s5cuQImZmZZGRksHnzZhYvXkxpaSkhISE0bNiQRo0a0aRJE+Li4mpFr1rXQcPo0n+ws8u4OhrsTl9PYsvW6N1qVzjbsmoFWTvkj2Vx61q4cCH79u3jpZdeuqnzRV3/3VcIcd2ql+5ITk6mcePG2O12CgoKyM/P59y5c+zatYvNmzfz/vvvk5WVhb+/P82aNaNdu3Z07tyZ8PBwdDodSinHhyvwDQgkPCbO2WVcFU3TCIqIwlALFxn2D9pFjq52BUohbgRN0zh27BhTpkxhwoQJxMbG3tTnk3AmxC1GKYVer8ff3x9/f39iYmJo3rw5w4cPdwyFrlu3jo0bN/LBBx8wceJEgoKCaNq0KampqbRq1QpfX1+8vb3x9vZGr9e7TFi7kAa4Wl0aSqlaGcyEuJWVlZXx+eefExAQwNChQ2/6e56EMyFucdU9YdUTvOvVq8fdd9/N3XffTUVFBYcPH2bHjh3s3LmTpUuXMmPGDDw8PAgKCiIpKYmYmBiio6OJiYkhKCjout+0qufBXv+b35U//kzWKbKOHSaxRSpuBoPjeHlZKYd2/UxYdH38g0PPr/Kqzv/LmiwVFRzYsY3AsHBCIutdw3muXubundg1Ow2Sm9XI8wlRV9jtdjZt2sTq1at55plnamS+pYQzIcRlGY1GEhISSEhIYMiQIRQVFXHixAlOnDjBkSNH2LVrF2vXriUvLw+TyURsbCxNmzalefPmJCcnX9NFBhs3buTgwYMMGTLkOraturrwdCLzID99/x1xjZpeEM7KiotZu/A7Ot4x4Bfh7HqCo0ZFWSnrFs2jece0Ggtn29etwma1XEc4c+2Lx4S4WUpLS3nvvfdo27YtrVu3rpErlSWcCSGuiFIKb29vkpKSSEpKwmazObacKigoYNeuXaxfv55vv/2Wt956C4PBQLNmzWjdujVt27YlMTERk8mEXq/Hzc3tknPXNE1j5cqV/Pvf/+brr7/m//7v/2jSpInj/ldRreMzu93OpmWL+fbDd3n41X8TGRd/Tb1yFWVlPHNXXzQ03D29SGjRiqHjH8Xbz/8qz+T8oday0hJmTf4Puzb+RL34RIaO/yMhUdG/0S7Or1uImma325k2bRrZ2dn885//xMPDo0aeV8KZEOKa6PV6PD098fT0JDg4mAYNGjBgwAA0TaOkpMSxMO7GjRv56quvAIiOjiYuLo6UlBSio6Px9/cnMDAQX19fdDodFouFffv2kZ2dzbx58/jpp58YP348I0eOJD4+/pr+Yi0vLWH/9i2UFBawf/tmImIboJTCarFw8vAhSgoLOHPqBHbNDoDdZuNMdhZnsk5itVRgs1mByuCodDoefvVNwqLrM+21F1n4xcfc/fATWCoqOHUkk+KCfDy8vYmoH4eboXI7rbKSYk4dPUx5SQn+IaEEhUVcUJ/dZuP4wQy8/f0pLS7G08cXH/8AlFJomsaZ7FNodjsBIWGcPnmcc7k5mMzuhEXXx92zsmcy/+wZCs6ewVJRTmlREb6BQYRF16eivIyTmQfRNI3SoiKMZhOaprHkq8/IOnKYJ/79Pku//oIFn3/EqCefw2CU3QqEqKZpGhs3buTDDz/kn//8J1FRUTU2v1bCmRDiup3/hlV9ZWi7du1o164dNpuNM2fOOJbw2LdvH//9738pKChAp9MRFBREfHw8CQkJeHp6kpmZCVS+MZ4+fZq///3vrFq1irFjx3LHHXdc1aKPmqZRUlRI1tHDdOwzgN2b1tOl/1B0Oh071q/hx7mzCAqPJPv4UTR7ZTjLOXGM+Z9+gE6nR9PsZB3NvOh7dXMz4O7phaWiHIADO7ayc8M6LOXlnD51gs59B9GyS3fKSkpY+vWXnDqaibePH75BwXS4vZ8jBNlsVjYuW8z2tT/Sc9g9bFqxlMCQMHreda/j+ZbO/ILAsAiSWqayZfVySouKyMvNoWGzVnQZMASjyczuTT8x/7MPaZDcDJO7O7GNmhAUHsGSGZ9zNGMfASGh7NuWTvOOXbBZLezblk672/oSGBZB0/ad+ebdN7FarBLOhDjP8ePH+fe//03//v3p1q1bjV74JOFMCHFT6fV6QkJCCAkJoW3btlgsFvLy8sjNzeX06dMcP36c3bt3880333D8+HH279/veKymaWiaxqpVq9i9ezcLFizgySefJCEh4YqfP3PXTpRStO/dj3efe5z8M6fxCQhk7aJ5tErrQbted7Byzjfs3LgWTbNzaNfPANz5h8c4m53Fl/+e6DjX2ZwsPnj5OYxmEzarlbHP/x1N0wiOrMdtd4/C08eH1fO/ZfOPy2jRqSvHMvayf/sWhv3hMcJj4ijKz8PdyxtrRWWP3NqFcwHF8D8+SXj9OM5knWLb2h/JPnaE9UsW0KnvYDJ376RNj9vxDQqmS78h+AQEsjt9PesWfUdq914YTZXz8jy9fLhj5GhCoqLR6XQUF+azdtFcHvr7vwmJqsfUF58BFOVlZZQWF+Pl48vcj97DPziUksJCtKqeQyFE5dWZX3/9NQC/+93vanxHDAlnQogaZTAYCA4OJjg4mKSkJDRNw2KxUF5ezpw5c3jooYcuekx1L9qsWbNYunQp48aNo6ioCLcrmO61Ydn3hEZF4+Hjg4e3D5t/XEaH2/tTeO4s9RomYjCZCIqIxOzhhd1u51zuaXyDgvH08cVmtRIQEuY4l19QCPc89jSh9WL44ZsvWfrNFzz4zIucyDzA919MIzfrJIV5Z4mIiUMD8nJP4+3nT2BYBAajEf/gEACsFRUU5edzLieHspIix1T7uOSmzPt4MqdPniB95Q9ExjWkIO8M0QlJHDuwj4VffMyxjH0UFeTj5eOL3Wpz1BYR1wC/oBDHL5GCs+ewWiyE14/FZHYnvH4cCtC7uaF3c8Nut9N7xP0c2bcHN4PBRZdDEaLmaZpGeno6c+fO5cUXXyQyMrLGXx+yOZoQwmmql/AwmUx4eHhw7tw5ioqKLnv/6iHS119/nRkzZlTNs6q47P0L886ya+Najh/K4OO//xWb1cqm5UtwMxjwDQziyN7dVJSXkXPiGKXFReh0OgJCQsk7nUNRfh55uTmcyT7lOJ9Op8PTx4eA0FCSWrXm+IEMCvPzmPXef+jcbxAvfzqLQWP+gMm9ctKwf0gohefOcvrEcSwV5ZzNzqK8tAQAH/8ABoweT/8Hx/PVW//gXE52ZYgzmdm6ajlN23Zk6+oV1E9MRqfTseK/M4moH8v/fTKTB595Ef/gELTzrqDUKd0Fc/Z9AwMxGE2cOHSA4sICThw6gAaYzO5E1I9j39Z0UIoTmQcIjY6pdVtJCXGznDhxgpdffplBgwbRuXNnp+wjKz1nQgiXUFJSwp49exyBzWar7BXS6/UEBARQr149wsPDCQ4Oxt/fnw0bNlBRUY7dbrvsOXesX0tovfo8+dZUDEYTuadO8tofHiT7+DE69xvMslkzOLxnF+dyswFQSkeD5Gbs3byJmZP+WRVY/heACs6dZd4nU/ANDKLg7Bna3dYXT28fGqW0ZeualRzctYP83NOOmqLiE0hKacPirz7F3dOL4MgoOtze39FTpZSO9r37c+zAfpbM/JyBoyfQsHkrDu/Zxagnn+ODl58lbdCdKKWISWrM7k3r+ea9f1NeWorVYvnV9nT39CJtwFC++2QKfkHBFOadrfoeFV0HDWP2lEl89o+/UVyQT+d+gzEYZGFcIQoKCnjttdcIDw/nvvvuw3De0jo1STY+F0K4hDNnzvD0009z8uRJGjRoQGxsLDExMY5N2T08PHB3d8fd3R273c5LL71EeUAEt9/zwGXPee50DuWlJYRF1wdAs9s5djCDoPAITGZ3R4+Z2cMDvd6N4IgoUIr83NOcy83BaDJjMBrxCQjE5O7Bod070Ox2lNJh9vAgOCISk7sHRfl5nD55HAAPL2/sdjth0fUrr9YsLSH35AkqysvwDQjCLzgE0DiTlYWHtzee3j4U5edTlH+O4IgoivLPUVJURFi9GE4ePoR/cAiePr6UFhdx+uQJLBXleHj5ABrBEZG4GYwU5udRXlJMQGi44698TdMoLy0h+/hR0DRM7h4YTWYCQsOw2+2cPnmcorxzmD29CImMuqKLAdYt+o59Py7mq88/lY3PRZ1jtVr54IMPmD59Op9//jn169e/qc8nG58LIVxeQEAA7733HvC/4c7L7eVZVFR0RXNAqud4VVM6HdENEx1fh8dcen88/5BQ/ENCLzoe36T5Je/v5euHl6/fJW8zu3sQ1aDhRcdDIqMcn3v7+eHtV/l4v6AQ/IIq6z7/ce6eXhfUfj5vXz+8f/H8SinMHp7EJDS66P46nY7QqGhCo6IveT4hbjV2u52VK1fy9ddf8/LLLxMd7dzXhoQzIYRLUEphlD0nr9Evd0RwxX1FhXBNmqaRmZnJO++8Q79+/WjXrp1T5pmdTy4IEEKIWu+XQUyCmRBXqri4mL///e8EBAQwcuRI3N3dnV2ShDMhhLhemqax5cdlzJr8n+s+19pF83hy8G081q8bu9M3XH9xQojLqqio4F//+heHDx/m5ZdfJiQk5LcfVANkWFMIIa6ApmmUlZaQffQwlooK/INDCQwLx2a1cvzgfg7u/Jkj+/ewd8smx/ZJoJF/Jpcz2VkopQgKj8Tbzx+b1cKpI5U7D5SVlOAbEEhQRBQ6nY4Ot/enSZsOTPvHS45lN36N3W6n8NxZzmSdxK5pBIVF4BsYJOuWCfEbysvL+eKLL1i6dCnvv/++U9YzuxwJZ0KIuu03p19d2fwszW5n/eIFbF+3iqCwcDx9/Og9/D6UTrFr008c2Lmdszmn2LR8MfFNWhBWL4asY0dYMuNzQMNqteIXGETPu0Zis1qZ/Nc/E9WgIR7ePhSePcvAMROITkiqvABCp0Nd4dBkcUE+G5d9T+7JE5QUF2EwGOn/4PhfXAwhc9CEOJ/dbmfNmjV88803PPbYYyQlJblMMAMJZ0KIuu4332+v7A3ZbrdzeO8u4ho35bbho8jPPY2bwYDRbOaOkWPwCwxh79aNjHry+ar729i+bhV6NzcGjplAeVkpsye/RdbRwwRHRGE0mWnbqw/JbTowZ+o7pK/8geiEpKv+9oxmM806dMEvMJiSokJmTvonR/bt/kU4c51fOkI4W/UFABMnTqR379706dMHNzfXikOuVY0QQrgovZsbPe68h2WzpvPOXx4jpmES/R/8/WXvb62wcCbrJD/O/YbNP/4AGpSVFNPutr6AhpevH0FhkZhMZsLrx5GxYxuapl3VX++aplFeUlK5N+j6tRTl52GxVNCsY9oN+I6FqHs0TSMnJ4dHHnmEpk2bMmHCBDw8PJxd1kUknAkhxBXQNA03g4Fhf3gc0HjzTxOITW5GateeABiMxqqV+ytwMxhxMxoJDI3g9nsfoPeIBzAYjJw7nY1vYBClxcUU5eeRm3WS0OgYTh3JJCA49DeDmabZ2bXxJ4Ij6hFar3Idpsy9uzi8ZycPT/w3Sqfn8zdegRu5uLiMiIo6QtM0srKyeO655wgICOBvf/sbnp6eLjWcWU3CmRCibrtRc840jYO7fubo/r3YLBa8/f2JjG3guD2ifhz5Z87w+RuvktQylXa33UHzDp1Z9t8ZzH7/LfR6N3wDA+l4xyAALBUVbPzhe3asX0PB2TMMHD0BgBOHDvD99Gkc2rWDivIyjmbsddxmt9n54G/P0fvuUdwxagwAvgGBGM3uLPziY4xmd0qLCq/5e7wk1/u9JcQ1KSgoYNKkSZSWlvLqq6+6bDADCWdCiLruBs050+l0pKT1oF6DhlSUl+MbEEhI1Qr7SinC6scy8olnKS7IxzcwCJQiLLo+fe8by9msLDTNTkBoGH6BQeSdycU/OIQ2PW/H288f38AggiOi0ADfwCA69BlAhz4DgMo5ZY4a9Hoe/cfbjh0ElFJENWjI3X98gsK8PNw9Pek+5G48fX65I8yVfY/SSSbqKpvNxgcffMDWrVv529/+RkxMjMsGM5BwJoQQV0QphYeXN/WTki95u5ubgXrxCb98EP5BIfgHXbx2kl6vJzQq+qKtnbx8/WiU0uayNcQ1bnrh8xqMRNRvcMn7Xy3X/VUlxLUrLy9n+vTpzJw5kzfffJOUlBSn7wDwWyScCSFEDTOZzTRu3Q4Pb29nlyJEnVZaWsrs2bP58MMPefXVV+ncubOzS7oirh0dhRCijtE0jfSVP9C0XScCQsKAyrXK1i9ZyJnsU06uToi6w2q1snjxYqZNm8YjjzxCjx49nF3SFZNwJoQQNSj72BHWLpyLf0io45jBZCLn+BF2b1qP3WZzYnVC1A12u521a9fy+uuvc8899zBo0CCXnmP2SxLOhBCihmiaxo/zZtG8YxfMHp6O4wajiejExhzYsZ2S4iInVihE7WexWFizZg2PP/44I0eOZNSoURiNRglnQgjhMm7gkl/XrKqGivIy9m3dTOOUthf8olBKERoVzemTxykrLkK7keuUCXELsVqt/Pjjj7zwwgs8+OCDjBs3DoPBUKuCGUg4E0LUda7wnlxVQ+G5s5QWFRIUHnnRXbx8/bBWVFBcWFADBUn4E3WP3W5n3bp1vPbaawwZMoSxY8diMBicXdY1kXAmhBA1xGazoaGh0+svuk3pdKDAbq2JOWeukFiFuHFsNhvp6ek8+eST3HHHHYwZMwaTyeTssq6ZhDMhhACurTfp6h7j7euP0WQm78zpi24rLSxEKYWn7y8XkBVC/BqLxcLKlSt5+OGHGT58OA899BAeHh61bijzfBLOhBACuLbepKt7jLuXF/WTGpOxfctFt50+dZyA0DDMHq67pYwQrsZisbBkyRJeffVVRo4cyUMPPYTJZKr1ryEJZ0IIUUOUUnToM5D0FUux2/83fGmzWTmWsY/ohEZ4eMnCtEJcCbvdzvLly3nttdcYOnQoY8eOxWg0OrusG0LCmRBC1KCEZi0Jr9+AQ7t3Oo4V5eVRVlJM0zYd0LtVb9zyyyFTmcQvRLXy8nIWLFjAM888wwMPPMCYMWNwd3ev9T1m1WT7JiFEHeS6W3jr9Dru/dMzFxzzDQxi4Jg//OKev6zf2d+PhEPhGgoLC5k1axYffvghTz31FHfffbfL75V5tSScCSHqoJsdZK4n/Dk7ZF2r2lq3qEtKSkr46KOPWLx4MU8++SQDBw6sc8EMJJwJIeqsm9l7dq3ndd0ePSFcmaZpWCwWXn/9dX788Ueee+45unbtWieDGUg4E0LUWa4YglyxpqtT+78DUdtomkZWVhYTJ05kx44d/Pvf/6ZFixZ1Zn7ZpUg4E0IIccVk5pmoSZqmceDAAV577TUKCgp47733aNSokbPLuumuK5wppf4EjKXy9boDeBDwAGYC9YHDwF2app2ruv9fgDGADXhE07TF1/P8QohbW/bxo+zbmu7sMuocu82KTn/xr4eThw9hs1qcUJG4Ve3Zs4fnnnuOsLAwJk6cSIMGDZxdUo245nCmlIoEHgEaa5pWqpT6GhgONAaWaZr2mlLqGeAZ4GmlVOOq25OBCOAHpVSCpmk1sVeJEKIOUUrh4eHBxtlfsG3FEmeXU+dYLFZKS0vQAHd398qNowFLRQWtWjSv08NJwvk0TcNms7Fu3Tr+/Oc/k5aWxlNPPUVgYOAt83/veoc13QB3pZSFyh6zk8BfgK5Vt38KrASeBgYCMzRNKwcylVIHgDbAT9dZgxDiFuPh4cHEiROZOHGis0upk3bu3Mkbb7xBRkYGffr0oX///iQlJdXqvQpF7aBpGsXFxcyePZsPPviAu+++m0cffRSdTnfLBDO4jnCmadoJpdQbwFGgFFiiadoSpVSopmmnqu5zSikVUvWQSGD9eac4XnVMCCGuyq30Ju0MTZo0YfLkyaxatYrvvvuOp59+mk6dOjFo0CAaNWqEm5tMVxY3R0FBAZMmTeKnn37i4YcfZsiQIej1emeXVeOuZ1jTn8resFggD/hGKTXy1x5yiWOXnFuqlBoHjAOIjo6+1hKFEEJcg+ph4969e9OuXTu2b9/OtGnT+P3vf0+vXr34wx/+QHBwsOO+QlwvTdM4ffo0f/3rXzl06BAvvvgirVu3rjPbMV2t61kgpCeQqWnaaU3TLMB/gQ5AtlIqHKDq35yq+x8H6p33+Cgqh0EvomnaVE3TUjVNS61+AxBCCFGzlFL4+fnRpUsX3nnnHV5//XV27NhBnz59mDx5MqdOncJisaBpcg2nuDbV88t+/vlnxo8fz7lz5/jss89o3779LRvM4PrC2VGgnVLKQ1X+6dQD2APMA+6vus/9wNyqz+cBw5VSJqVULNAQ2Hgdzy+EEKIGKKXw9PSkU6dOfPXVV/zlL3/hhx9+YNy4cXz00UccPHgQu93u7DJFLWSz2ViyZAnPP/88DRo0YPLkyYSFhdXZxWWv1PXMOduglJoFbAGswFZgKuAFfK2UGkNlgBtWdf9dVVd07q66/0NypaYQQtQuRqORoUOHkpaWxvLly5k7dy7z5s3j9ttvZ8iQIURGRspQp7gihYWFfPHFF3z11VcMHz6cESNG4O/v7+yyXIJy9e7o1NRULT1d1jESQghXY7PZyM3NZf369Xz44Yfk5+czZswYhg8fXrn8hlIS1MRFNE0jJyeHN954g9WrV/PKK6/QqVMnzGazs0urUUlJSQV79+71vdRtEs6EEEJcM03T0DSNwsJC5s2bx/vvv4+XlxdjxowhLS2N4ODgW36ISvyP1Wpl586dvPzyy+j1el599VUaNGhwS/4fkXAmhBCiRuTm5vLVV1+xZMkSfHx8GDBgAGlpaYSFhTm7NOFkhYWFLFq0iHfffZcuXbrw+9//nsjIW3dFLQlnQgghaozVauXw4cOsWLGCuXPnYjKZGDlyJL1798bDw8PZ5YkapmkaZ8+e5b333mPZsmWMGzeOgQMH4unp6ezSnErCmRBCiBpnsVjIzs7m22+/5auvviIgIIBnnnmG1NRUjEbjLTmUdaux2+2cOnWKxx9/nJKSEp577jlSU1NlIWN+PZzJK0MIIcRNYTAYiIyM5OGHH2bWrFm0b9+eP/3pTzz88MOsWLGCc+fOyRppdVT1PMSFCxcyYsQIQkNDmTp1Km3btr0lV/y/WtJzJoQQokZomsaOHTuYPXs2mzZtIiYmhqFDh9KhQwcZ7qxDNE0jKyuLqVOnsmrVKoYMGcKDDz4oP+Nf+LWeM+lXFEIIUSOUUjRr1oyEhAQyMjKYM2cOzz//PE2aNGH8+PG0atXqltvgui6p7uzZvn07L7zwAp6enrzwwgu0b98eg8Hg5OpqFxnWFEIIUaPMZjNNmjThqaee4ssvvyQwMJAxY8bw2GOPkZGRQWlpqQx31jKaplFUVMTnn3/Ogw8+SLNmzXj33Xfp3LkzRqNRAvdVkmFNIYQQTmW329m+fTvvv/8++/bto3PnzvTt25eWLVtiMpmcXZ74DZqmsWfPHj788EN+/vlnJkyYwODBg+WCj98gV2sKIYRweeXl5axbt465c+eyc+dO2rVrx5133kmTJk3k6j4XVV5ezvfff8+UKVOIjY3ld7/7nfy8rpDMORNCCOHyTCYTXbt2pVWrVuzfv59p06YxduxYbr/9dh566CFCQ0NlSygXoWkaubm5TJo0iUWLFvHHP/6R/v374+/vLz+fG0DCmRBCCJehlMLX15fU1FSaNWtGeno6b731FgMHDmTEiBEMHjyYiIgIx96douaVlJSwdetWXnrpJTw9Pfnggw9o1qyZLJFxA0k4E0II4XKUUphMJjp27EhKSgrLly/n008/5YcffqBnz57cdtttNG7cWAJaDbLb7Zw4cYIvv/yS+fPnM2jQIO6//36Cg4OdXVqdI+FMCCGESzObzfTp04fWrVuzevVq5s+fz3fffceAAQMYNmwYEREREtJuMrvdzsqVK3nvvfcwm828+OKLdO3aVeaW3SRyQYAQQohaw2azUVBQwPr165k8eTK5ubncd9993HvvvXh6esoVgjeY3W6nsLCQDz74gM8//5wRI0Zw//33ExoaKm19neSCACGEEHWCXq/H39+f22+/nc6dO7No0SKmTJnC3LlzeeCBB+jUqRNhYWEy/+k6aZpGaWkpP/30E++++y4lJSV89NFHtGrVSi7KqAHScyaEEKJWy87OZt68eSxcuBCj0Ujv3r3p37+/zIW6RpqmcfjwYT799FPWrVtH165dGT16tONqWXFjyDpnQggh6jSbzcaxY8dYsWIFM2bMwM3Njfvuu4++ffvi6ekpoeIKaJpGeXk5ixYtYvLkyYSFhTFhwgRatGiBu7u7s8urcyScCSGEuCVYrVYKCgqYM2cOU6dOJSgoiCeffJLU1FQ8PDxkntRlWK1WTp06xT/+8Q82bNjAhAkTGDJkCD4+PhJsbxIJZ0IIIW4pmqZx6tQpPvvsMxYtWkRsbCx33XUXbdu2JSAgQAJHFbvdTk5ODt9//z2ffPIJSUlJPProozRq1Eja6CaTcCaEEOKWpGka+/fvZ/bs2axZs4awsDCGDBlCjx49LjtUp2kamqbV+onvmqZRUlKC0WjEYDBcdLvNZuOHH37gyy+/pLCwkLvvvpv+/fvj6enphGpvPRLOhBBC3LKq51JlZmayaNEiZsyYQWJiIn/84x9p1aoVer3eEcKqtyWaO3cuXbt2pUGDBrUyoGmaRkVFBU8//TS9e/fm9ttvRylF9e/8rKws3nrrLZYtW8aQIUMYNmwYsbGxcpVrDZKlNIQQQtyylFKYzWaSkpJo2LAhd911F5MmTeL3v/89rVu35uGHHyY2NhZPT080TWPjxo088cQTdOnShcmTJxMVFeXsb+GqFRcX87e//Y2pU6eyZ88eWrVqRUhICGfPnmXZsmW8++67hISE8Nlnn9GwYcMLAqpwPglnQgghbglKKdzc3IiKimLixImMHDmSadOm8cc//pF27drRr18/kpKSeOeddygoKGD+/Pl4eXnx+uuvEx0d7ezyr1hxcTEfffQRn3zyCaWlpWzevJmZM2fSpEkTpk+fzsmTJ3nwwQcZNmwYHh4eEspckAxrCiGEuGWVl5ezfv165s+fz5YtW6hXrx4zZsygvLwcAIPBwKhRo3jppZdqRQ+azWZj1qxZ/PnPf+bYsWOO4y1btsTf359evXoxePBg4uPjZQjTyWRYUwghhLgEk8lEly5daNWqFdu2beP+++93BDMAi8XC9OnTcXd358UXXyQwMNBle5qqh2Qff/xxTp48ecFtO3bs4NFHH+Xhhx/Gy8vLSRWKKyULvgghhLilKaXw9PRkz549nDt37qLby8rKmDx5Mq+++ir5+flOqPC32Ww2Nm3axH333cepU6cuut1qtfLpp5+SlZXlhOrE1ZJwJoQQ4pZ3/PhxZs2aRV5e3iVvt9vtvP322/zrX/9yuYCmaRq7du3imWee4fDhw1xuutKZM2f4xz/+QUVFRQ1XKK6WhDMhhBC3NLvdzubNm9mxY4fj2KWuXrRarbz99tu8/fbbFBUV1XSZl6RpGtnZ2bz00kusW7cOq9XquO38ddrc3d1JTk7GYDBcNoAK1yEXBAghhKi1tm/fzs6dO6/7PKWlpZw7d44zZ85w8uRJsrKyOHXqFCdOnCAvL8+xKK1Op8PDw4Phw4fTqVMnp0+q1zSNzz//nOXLl2O327Hb7Sil8PX1JTY2ltjYWOrXr09ISAju7u6YTKYbso2V0WjkzjvvdNn5d7WBLEIrhBCiTvrrX//Kp59+Sr169W7YvplX+nvRbrej0+mcFlA0TaO0tBSTyfSr3/uNrq+goICTJ0+Sk5Mj4ew6yNWaQggh6qy+ffsybty4S25RdLNomobVasXNzc1pAcVut1NQUICXlxdubv/7dV7dy3ezbN++neeff/6mnV9IOBNCCCGumlKqRsPgpeh0Ovz8/C46Lr1ZtZ+EMyGEEOIXCgsLyc/PR6fTERYWdsOGTM9ntVopLCzE09MTo9F4Q8/9y96z6qHagoIC3NzcZHNzFydXawohhBC/sHnzZt58882LFqW9kU6dOsVbb73FoUOHbvi5f9l7Vr3p+ccff8zixYtv+POJG0t6zoQQQtRpGzduJD09nQEDBhAZGcl3333HuXPnGDRoEL6+l5yPTdeuXWnevDkPPPDABcfPnDnD1KlTsVqteHh40LJlSzp16nTDe75qQnl5OXPmzOHo0aM0bNiQPn36YDKZnF2WQHrOhBBC1HEHDhzg+++/Z+vWrVgsFr7++mvWrl1LSUkJVqsVu90OVE6wt1qtv3q1ZlFREbt37yYtLY0uXbrw3XffsXbtWqBy6NBisVBRUXHReex2u+M2m8120XNUX2BQfb/qmqpvs9lsjsdZrVYqKiqwWCxomuY4l81mw2q1XlRD9TnPr0nTNP773/+yfv162rRpw8qVK1m6dOkVX6kqbi7pORNCCFGn6XQ6QkNDyc3NZcWKFTRp0oSjR48CMHHiRFJTU+nTpw8rVqxg9erVvPDCC786qd7Dw4PY2FiCg4MJCQlxbDC+c+dOZsyYQVZWFiEhITz22GOEhoZSUVHB0qVL+f7777FarXTr1o2BAwc6zme329m2bRuzZ89m2LBhfPnllwwdOpS2bdsClft7vv/++yQmJpKUlMRXX31FRkYGfn5+jB49msaNGwPw1VdfsWnTJpRS5Obm0q9fPwYMGMCiRYtYtGgRXl5e2Gw26tevj81mY+HChYwdO5YuXbpQUFDA999/T9++fW/Wj0FcBQlnQggh6rzQ0FAAvv32W0aNGsWXX34JVAaj83uTbDbbb54rOzubefPmodfrOXHiBHfccQdQuZDt+PHjCQgI4P3332fBggWMHj2a3bt38+233/Lkk08SERHB4cOHHeey2WysW7eOjIwMRowYQbNmzdi+fTs7duwgMjKS48ePk5SUxLFjx7jtttsoKChgyJAh1KtXj2+//ZYFCxaQmJiIm5sbdrudc+fO8fLLLzvWfcvOzmbRokU8+uij+Pv78/TTTwOVPYBFRUX4+vqyYcMG3N3dZd9NFyLhTAghRJ3n4+PjWA8sIiLius5lMBjw8/PD3d0dX19fsrOz0TQNDw8PZsyYwZkzZzh69CgJCQkA7Nu3j4YNG9KwYUP0ej3NmjVznCs3N5fDhw9jNptJSEhAKUWLFi34+OOPqVevHrNnz+axxx6joKCAevXqcebMGebNm8fx48fJysrCbDZfMASakpJCdHQ0Op3OsbWTXq8nISEBg8FAo0aNgP9dMKDT6fDy8qK0tFSW4HAhMudMCCFEnafT6ejbty+vvPIKHh4ejuNGo5GysjLHgq7nBx2DwYBSivLy8gvmYgUEBNClSxcGDBhA27ZtWbduHfn5+bz22ms0a9aMhx56iK5duzoe4+XlRUFBgWNOWUVFheO2gIAAHn74YaKjo5k+fTpWq5WEhAROnjzJiRMnCA4OZu3atdSvXx+j0cjbb7+Nv78/EyZMoH///hdN4DebzReELE9PT2w2G6WlpdhsNgoLCwHw9vbGz8+PnJwcGjVqxLlz54iKirrxDS+uiYQzIYQQtwQPDw/8/PwuCC9NmjRh9erVLF++nI0bN16wbIanpycJCQnMmTOHXbt2UVZWBlSuFbZt2zbWrFlDeno6cXFxGAwGPDw8KCwsZN++fezcudMR9Nq2bUtZWRkzZ85k5cqVLFy4kNLSUqAyAPr6+jJu3Dj27dvHqlWr0Ol0REVFcezYMfr378/3339Py5Yt0TQNd3d3ysvLOXToED///LPjPJeilCIsLIzIyEj++9//smDBAg4ePOi4bdCgQcyfP58FCxawatUqmW/mQmRYUwghRJ2WnJxMdHS042tPT0/uuOMOvLy86NatG2VlZZw6dYpevXoBF64R9vvf/56VK1eSkZFBdHQ0vr6+pKWlcezYMQwGA506daJNmzZ4enryyCOPsHHjRqxWK3feeacjnAUFBfH444+zZs0ajh07RkpKCiaTCR8fH9LS0ggKCsLf3597772XoqIiNE1j2LBhaJpGixYtGDp0KC1btsRoNDJmzBhWr17N0aNH6dKlCxaLxbH5evPmzbFYLBd87x4eHjzwwAOsXbuWiooKJkyYQEREBEop+vTpg8Fg4OTJk/Tt25cuXbrI0KaLkI3PhRBC1Fp//etfyc3NrfG9NW9l1XtrHjp0SMLcdfi1jc9lWFMIIYQQwoVIOBNCCCGEcCESzoQQQgghXIiEMyGEEEIIFyLhTAghhBDChchSGkIIIWq1Xbt28c033ziWlBA31/Hjxy9YD07ceBLOhBBC1Frt2rUjLy/PsfJ9bWGxWFi2bBnx8fHEx8c7u5yr4uXlxciRI51dRp0m4UwIIUSt1atXL9LS0pxdxlUrLi4mJyeHwYMHM3jwYGeXc9VkfbObS8KZEEKIWstoNGI0Gp1dxjVxc3PDbDbj5eXl7FKEi5ELAoQQQgghXIiEMyGEEEIIFyLhTAghhBDChUg4E0IIIYRwIRLOhBBCCCFciIQzIYQQQggXIuFMCCGEEMKFSDgTQgghhHAhEs6EEEIIIVyIhDMhhBBCCBci4UwIIYQQwoVIOBNCCCGEcCESzoQQQgghXIiEMyGEEEIIFyLhTAghhBDChUg4E0IIIYRwIRLOhBBCCCFciIQzIYQQQggXIuFMCCGEEMKFSDgTQgghhHAhEs6EEEIIIVyIhDMhhBBCCBci4UwIIYQQwoW4ObsAIYQQoq6z2+1s27aNw4cPA1BWVsbx48fZsGEDer0eAG9vb1q1akVgYKATKxWuQMKZEEIIUQO2bdvGE0884QhjJSUl7Nq1i2nTpmG322nfvj1Tp051cpXCFUg4E0IIIW4ypRRt2rTBw8ODkydPOo6XlpYCYDQaSUpKIjw83FklChcic86EEEKIm0wpRWhoKGlpaZe83dfXl759+6LTya9lIeFMCCGEqBEBAQG0b98ek8l00W3h4eG0a9fOCVUJVyThTAghhKgBer2eNm3akJCQcNHxe+65B3d3dydVJlyNhDMhhBCihiQnJ5OUlIRSynHMZDIxaNCgC46JW5uEMyGEEKKGeHl5kZaWhpeXF1A5F61z585ERUU5uTLhSiScCSGEEDWoX79+eHt7O74eMmQIZrPZiRUJVyPhTAghhKhB0dHRdOrUCZ1OR4MGDUhNTZWrNMUF5H+DEEIIUYOUUowaNQqlFO3btyc6Olrmm4kLyCK0QgghXEJ+fj7jxo1zdhk1oqioCLvdzsaNG5kwYcIt0XM2aNAgRowY4ewyagUJZ0IIIVxCWVkZq1evZujQoQQFBTm7nJvKZrPh6elJcHAwISEhzi7nprLb7axatYodO3ZIOLtCEs6EEEK4DD8/P3r16kVsbKyzS7npSktLMRgMuLnV7V/FNpuN06dPO7uMWqVu/48QQghRK90Kc7A8PDycXYJwURLOhBBCiFpC0zSg5sJr9fPV5HMKuVpTCCGEqDWWLFnCxIkTa+z59u/fz7/+9S9ycnJq7DmF9JwJIYSoRfLz89m5cyfh4eHExsaSk5PD7t27adq06TVdRJCRkYHdbicxMfEmVFtpz549HD16FABvb28SEhIIDAy8pp4oi8VCaWnpjS7xsux2O2VlZWiahqZp5ObmsnfvXqxWKw0bNpSdDW4S6TkTQghRa5w4cYL333+fTz75BE3TWLVqFS+++CL79++/pvMtXryY+fPn3+AqLzR37lwWLVrE6dOnWbNmDTNmzKjRgHWjFBQU8NVXX7FmzRp27NjB1KlTycrKcnZZdZL0nAkhhKhVQkNDycnJ4ciRIxQWFjq2QrJYLCxdupSFCxfi6enJ8OHDadGiBQCZmZl89dVXHD16lNTUVEaOHIm7u/tln6OsrIwFCxawbNky/P39GTZsGM2aNUPTNLZs2cKMGTMoLy9nyJAhdO3aFZ1Ox+nTp3n33XfJysqiTZs2jBw5EqPRCEDLli0ZMWIE27dv58MPP6SkpAS9Xs+aNWv47rvvsNvtDB48mLS0NJRSlJaWsnDhQlauXImfnx+/+93viImJcdRns9lYunQpGRkZDBgwgHnz5jFy5Ej8/f0BOHDgAEuWLGHUqFEcOHCAr7/+mrNnz9KzZ0+GDBmCXq/HYrHw1ltvoWkamZmZlJeX8+c//5nIyEhmzJhBeno6ISEhjnlnOTk57N+/n0ceeYSQkBAmTpzIli1buOOOO27Gj/mWJuFMCCFEraLX6+nUqRMzZswgPDyc0NBQlFJkZmayYsUKxowZg6+vL8eOHaO0tBRN05g0aRJdunRh3LhxfPzxx8yaNYtRo0Zd8vyapvHjjz/y008/8dRTT7Fv3z5mzJhBSEgI7u7uLFiwgG7dupGamsqWLVsoLCzE19eXOXPmYDKZePnll9myZQunT58mMjISgNOnT7N3717S09MJCgrCZDJx7tw5KioqeOaZZzh9+jTTpk0jLCyMxMREFi9ezPr163n88cfRNI0jR444wllFRQULFy5k69atjBkzBoPBQGFhIVlZWZSXl2MymcjMzMRut2Oz2Th27Bjjxo3DaDQyceJEwsLC6Ny5M5qmsXfvXiIiInjuuecICgrCzc2Nr7/+muPHj/N///d/LFy4kA0bNqBpGjk5OZjNZgwGA6WlpURERHDy5Mka+7nfSiScCSGEqHVat27N999/T3JyMp6engD4+PgQHR3Nvn37CA4OJiEhAXd3d44cOcLevXtp1qwZy5cvR9M0fv75Z+x2+yXPbbfbyczMpGXLlkRFRWEymVi5ciV5eXn4+voSFxdHVlYW27dvJz4+3tFz16BBA86cOUN6evpFi8tu376d4uJiioqKqF+/PjqdDj8/P8LDw1m/fj0FBQWUl5dz+vRpEhISSE9Pp3v37sTFxTnOXW3//v3k5OQwZMgQwsPDKS0txc/Pj6ysLKZNm0Z0dDTe3t5ERUXh4eFBYmIiO3fupKSkBJ1Ox5EjR+jcuTNQua5ct27dHCGyrKyMQ4cO0alTJ8LCwkhNTeXw4cNAZW+dTqcjIyMDpRQ6ne6ybSiuj8w5E0IIUesEBATw2GOP0b59e8fE+pCQEIYNG0b9+vXJzc3lm2++ITc3F6UUBoMBPz8/vLy8aNq0KXffffdlz119vvOXkag+7uHhQf/+/WnZsiV2u53PP/+cI0eOANCpUyf69u2L2Wxm7ty5bN682fHYnj178vzzz/PII4+wY8cOMjMz2bdvH9988w3l5eWONc9sNpvjuX75/NUCAwPp27cvmzZtIjs7G7PZTHBwMNu2bcPf35+TJ0+SnZ1NREQEhYWFTJkyhby8PDw8PNDr9VitVse59Hr9RcO7l3pupRR+fn6UlZXRrFkzunTpQkFBAT4+Ppf/IYlrJuFMCCFErePm5kbr1q0JDg52HDt69Cjp6elEREQQERFBVlYWZWVlBAUF0ahRI3Q6HSkpKXh7e3PmzBnHfpYWi4WioiKKioooLi5G0zTi4+PZtm0bhw8fZsuWLY5wUlhYyMqVK/H29qZBgwZkZWVRVFQEwMKFCykrKyMhIYGSkhLOnDnjqK2iooKioiJOnjxJUVER7u7unD17FpvNRtu2bQkJCXHcXylFmzZtWL58Ofv372f//v2sWrXKca7w8HD69u1LeHg4M2bMwGazER4ezoYNG2jevDlhYWEcPXqUoKAgiouLOX36NCkpKcTFxZGdnX3J4FXNZDKRkJDAmjVrOHbsGBs3biQ7OxuA4OBgDAYDmzZtcgTM5OTkG/yTFSDDmkIIIWoRDw8P4uLiMBgMjmPx8fH4+PgQHh5ORkYGkyZNwmg0MmTIECIjI1FK8fDDDzNr1iyWL19O48aNufPOOwGIiIhg4cKFPPLIIwAYjUaeffZZOnfuTElJCe+++y7+/v6MGDGC0NBQ7HY79erV47PPPqO4uJghQ4Y4Akrbtm354osvOHbsGI0bN6ZHjx4AREdHs2zZMjZs2EBgYCD3338/0dHR+Pv7s2/fPv75z38SHx/Pbbfdhp+fH0opevXqhaZpTJkyBT8/P8aMGQNU9prFxsZiNpu5++67effdd8nMzCQmJoaWLVvSsmVLQkND0ev1BAUFYTab6du3L5MnTyYsLIw+ffo4lhzR6XTExcU5hmWhMqj16dOH0tJS/vWvfxEbG0v79u0xGo34+flx3333MXPmTEpLS7n//vtp0qTJzf+h34LU5bpNXUVqaqqWnp7u7DKEEELcZNnZ2XTr1o3XXnvNMddKXD1N01xqNX+r1crUqVPx8/Pj1VdfdXY5LiMpKalg7969vpe6TYY1hRBC1Bnndzi4eufDzeJKwUxcGwlnQggh6ozzg4mEFFFbSTgTQgghhHAhEs6EEEIIIVyIXK0phBDCpVSvbC/qBrvdfsvO/7tWEs6EEEK4BJ1OR35+Pm+//bZj1f+6StM0ysvLcXNzw82tbv8qrt5+aujQoc4updao2/8jhBBC1Bqenp48//zzzi6jRlRUVDBz5kyaNWtG8+bNnV1OjWjVqpWzS6g1JJwJIYRwCR4eHkyYMMHZZdSIoqIi1q5dS48ePRg2bJizyxEuRi4IEEIIIYRwIRLOhBBCCCFciIQzIYQQQggXIuFMCCGEEMKFSDgTQgghhHAhEs6EEEIIIVyIhDMhhBBCCBci4UwIIYQQwoVIOBNCCCGEcCESzoQQQgghXIiEMyGEEEIIFyLhTAghhBDChUg4E0IIIYRwIRLOhBBCCCFciIQzIYQQQggXIuFMCCGEEMKFSDgTQgghhHAhEs6EEEIIIVyIhDMhhBBCCBci4UwIIYQQwoVIOBNCCCGEcCESzoQQQgghXIiEMyGEEEIIF+Lm7AKEEEKIuk7TNKxWK3a7HYDy8nLsdjsWi4Xy8nIAlFK4ubmh00m/ya1OwpkQQghxk2maxqpVq1i1ahUAFRUV7Nmzh2+//ZZ9+/YBEBERwaBBgwgNDXVmqcIFSDgTQgghasC5c+f429/+hqZpjmO7d+92fH7HHXcwbNgwZ5QmXIz0nQohhBA3mVKKZs2akZycjFLqots9PT3p0qUL/v7+TqhOuBoJZ0IIIcRNppQiPDycbt26XfJ2Hx8f+vfvf8ngJm49Es6EEEKIGuDl5UW7du0ICAi44LhSiubNm5OYmOikyoSrkXAmhBBC1AClFC1atKBBgwYX3XbPPfeg1+udUJVwRRLOhBBCiBqSkJBAs2bNLghiAQEB9O7d24lVCVcj4UwIIYSoIW5ubvTv3x9fX1+gsjftzjvvxMfHx8mVCVci4UwIIYSoQZ07dyY4OBgAo9FIr169MBqNTq5KuBIJZ0IIIUQN8vPzo2/fvri5udG8eXMaN24sV2mKC8gitEIIIeqciooKTp065ewyLqtLly785z//ISkpCU3TOHr0qLNLuiQPDw9HL5+oORLOhBBC1DmZmZk8MGYU/sHuuLm53iCR1WIjKNSbHXs38sz/PeTsci6iaVBcWEG7lDReffVVZ5dzy5FwJoQQos4pKSmhoOIoA+9NJTDUw9nlXMRu00jNbINvgBmfALOzy7mIpsGqeQc5fPiws0u5JUk4E0IIUSd5+5tIbhNKWLTrXQmpaRrNOkag0yl0etebb6ZpGpm7z3DkpLMruTVJOBNCCCFqmFIKN4PrhTLhGn5zIF4p9bFSKkcptfO8YwFKqaVKqYyqf/3Pu+0vSqkDSql9Sqne5x1PUUrtqLptkpJLU4QQQoiL2G12Pnt9Ex//fSP7tuag2TVnlyRq2JXMkpwG3P6LY88AyzRNawgsq/oapVRjYDiQXPWY95RS1csgTwbGAQ2rPn55TiGEEMLFaWja/z5uCqVo0DSIYxl57E7P5mY9jXBdvxnONE1bBZz9xeGBwKdVn38KDDrv+AxN08o1TcsEDgBtlFLhgI+maT9plf+bPzvvMUIIIUQtoTibXcLE8cs4mVlwU55Bp1O06RlNTILfTTm/cH3XOucsVNO0UwCapp1SSoVUHY8E1p93v+NVxyxVn//y+CUppcZR2ctGdHT0NZYohBBC/DpLhY29m3PIP1OKb5A7Cc2DMZr1ZB8rpKTQQkySP2UlVo4fyKNevB8lxRbWzs8kc89Z1i06TESsD4ktQwgK96Si3Mb+bafJyy3Fx99Eo5RQDKbKwaP9205jqbBRmFdOaZGFpFYhhMV4X/Xis1aLnf3bTnM2uxgffzPxzYNx93RDs8Pxg3kcO5CH0aynQZMg/IPdAcjLLSNj+2ks5TZiEv2JbOAri966uBt9QcClftrarxy/JE3TpgJTAVJTU6VDVwghxE2x+rtMVszOoEHTIA7vOUun/rH0uLMhW348wdF953jg2TacPlHEt1N2MOJPLXEzVAa30mILOSeK0LvpiEkMAGDpzH1sXXWC6AR/ju4/x+mTxfS4syEAcz7YwaFdZ2jdIxqzhxvR19grtmHpURZ9voeGzSuHPVt0iaTPyCRyjhUxc9I2fAPNmD3cKCmw0P72GAAWfb6HYxnniIzz5WhGHv0fbIyXr+mGtJ+4Oa41nGUrpcKres3CgZyq48eBeufdLwo4WXU86hLHhRBCCKew2zXmfLCDO//QjPa96/Pj3IMsmb6P7kMbXvYxkXG+DB7flCP7z9H/wcZEN6y8Hq681Mo372znT292IbFVCLs3ZTPt1U10Hxpf2UulFEmtQrnn8VaYPdyuqedKs2ss+Gw3XfrH0WNYQzYsOcKcD3Zy2/BE8s+Wce50KYPHNSW8vg+Wcht6g47yUisnDuWT2CqU3iMSKSoox+QuCzW4umv9Cc0D7gdeq/p37nnHpyul3gQiqJz4v1HTNJtSqlAp1Q7YANwHvH1dlQshhBDXobzEQsHZMqIT/DGY9ITX9+FMVgkASoGGRuUFAJVB7ny/jFb5Z8rIPl7EPx5egV6vQ9M0/IM9sFTYMJrc0OkgNjkAd0/DFdWmlKOI/9VbZiU/t7SyXqOekHre5J8pQ7NrNGgSyB33NeLrd7dTVmyhU79Yug5qgLuHgaETmrHg0928MnYp9ZMCGPGnlhiM+ss+t3C+3wxnSqmvgK5AkFLqOPB/VIayr5VSY4CjwDAATdN2KaW+BnYDVuAhTdNsVaeaQOWVn+7AoqoPIYQQwinMHgZC63mzc30WofW8OfBzLhFxlQvWunsayMsto6zESs6xQs5mlzge5+amQ9OgKK8cTdNQSuEX7E5sI3/G/l87GrcOoyivnNyTRRhN//s1q7uK3jKdToeHt5G83FKsFhsGnRsmdzfCor3ZueEUsY0DyNx9luBIL3Q6RXF+BZFxvvzpzS7s+OkU86ftpkm7cEIiPbFZ7dz3dCo2m8bzwxfRvFMEbXrKfG5X9pvhTNO0EZe5qcdl7v934O+XOJ4ONLmq6oQQQoibROkUdz/Sgm/e287WVccpL7UycGwTlIL4ZkGsnHOQfz++Ck8fA+fnKi8/E4ktQ/jijc0EhnnS74HGJLYM4b6nUln0+V4WfbYHs6eBNj2jSUq5ttp0ekVym1Bmvr2Nib9fTp97k2jTK5rB45sy461t7NuSQ0WZjT4jk9AbdFRU2Fj//WEy95ylvMxGUkoIfoFmNDsc3HWGOR/spKzEQmQDX2IbBdyYBhQ3jbpp67TcIKmpqVp6erqzyxBCCFGLbN26lQnPDOK5D3r+6vZNNpuds1kllJdaMbm74R/igd5NodkhL7eU4sIKzB5u6JTCJ9DsGA4sLqggL7cUTdMICPXAw8uIzWrnXE4pZaUWDEY9/iHujp6zszklGE36q5qIb7XYOHe6lLJiC35B7nj7m7HbNM7mlFBWbMFo1uMf4oGb4X89eQXnytHpFX6BZty9KodQy0qs5OWWYrPa8fI14Rto/s05b5qm8e2UHRxZFcr06dOvuGZx5ZKSkgr27t3re6nbZFagEEKIW5ZeryM40uui40oPAaEeBFxm03RPHyOePsYLz+WmIyjC85L3Dwi58DxWi51dG7Muup/BqCMuORCzhwE3g57giAtr0+kVQeEXP4dS4HOZTdTdPQ1XPNdNuAYJZ0IIIUQNs1psrJp38KLjnt5GwqJ9MHs4I0xdbuUrUdMknAkhhBDX4HqijFKK8Bgf2t9en8i4ypGtE4fy2b72JG6GK9lZ8Wao/m5ce7rTrcBZ/wOEEEIIJ7u+EHI9fUx70rPZk55NSNT/hi0DQj3YtyWHjJ9zb96+nVdEes+cTcKZEEKIW5RzQoimwfxpu+lwR+wFvWTungaSUkJYuyATu016r25lEs6EEEKIGlRcUM7eLTk0bRd+0VWTcU2C2L0pC5tTwpkEQlch4UwIIYS4oX495OSeKsZmsRMYfvGVoIGhHhTlVVBSWHGzivsVMpzpKiScCSGEEDfUb6whZtdAgbrE/aq3jXLxJUjFTSbhTAghhKhBgeGeKJ3ibE7JRbedyynF09uIp7esS3Yrk3AmhBBC1CBvPzMJzYMvuQjtwV1nSGwVgt5Nfj3fyuSnL4QQQtQgpaDPqCTWLsjEZrU7jpeXWtmTnk2HO2LR6WX+161MwpkQQghRw5p3jCAq3o8Th/Idx7KPFVIv3o9GKSFOrEy4AtkhQAghhKhhZg8D9z2VesGx6AR/ohP8nVSRcCXScyaEEEII4UIknAkhhBBCuBAJZ0IIIcQVkwXIxM0n4UwIIYS4YnIVpbj55IIAIYQQdY5SiuMH8/nq31vx9DE5u5xaSGPP5hyifUKdXcgtScKZEEKIOicwMJC+Pe5GV65Quc6upnZqUk8jNaW1s8u4JUk4E0IIUefUq1ePKVOmOLsMIa6JzDkTQgghhHAhEs6EEEIIIVyI0jTXvixYKVUI7HN2HbVUECCzLa6etNu1k7a7dtJ210ba7dpJ2127G9F2MZqmBV/qhtow52yfpmmpv3038UtKqXRpu6sn7XbtpO2unbTdtZF2u3bSdtfuZredDGsKIYQQQrgQCWdCCCGEEC6kNoSzqc4uoBaTtrs20m7XTtru2knbXRtpt2snbXftbmrbufwFAUIIIYQQt5La0HMmhBBCCHHLcOlwppS6XSm1Tyl1QCn1jLPrcSVKqY+VUjlKqZ3nHQtQSi1VSmVU/et/3m1/qWrHfUqp3s6p2jUopeoppVYopfYopXYppR6tOi7t9yuUUmal1Eal1Paqdnup6ri02xVQSumVUluVUvOrvpZ2uwJKqcNKqR1KqW1KqfSqY9J2V0Ap5aeUmqWU2lv1ftde2u63KaUSq/6/VX8UKKUeq9G20zTNJT8APXAQiAOMwHagsbPrcpUPoAvQCth53rF/AM9Uff4M8HrV542r2s8ExFa1q97Z34MT2y4caFX1uTewv6qNpP1+vd0U4FX1uQHYALSTdrvi9nscmA7Mr/pa2u3K2u0wEPSLY9J2V9Z2nwJjqz43An7SdlfdhnogC4ipybZz5Z6zNsABTdMOaZpWAcwABjq5Jpehadoq4OwvDg+k8sVI1b+Dzjs+Q9O0ck3TMoEDVLbvLUnTtFOapm2p+rwQ2ANEIu33q7RKRVVfGqo+NKTdfpNSKgroC3x43mFpt2snbfcblFI+VP4R/xGApmkVmqblIW13tXoABzVNO0INtp0rh7NI4Nh5Xx+vOiYuL1TTtFNQGUCAkKrj0paXoZSqD7SkshdI2u83VA3NbQNygKWapkm7XZn/AE8B9vOOSbtdGQ1YopTarJQaV3VM2u63xQGngU+qhtM/VEp5Im13tYYDX1V9XmNt58rhTF3imFxaem2kLS9BKeUFzAYe0zSt4Nfueoljt2T7aZpm0zStBRAFtFFKNfmVu0u7AUqpfkCOpmmbr/Qhlzh2y7XbeTpqmtYK6AM8pJTq8iv3lbb7Hzcqp75M1jStJVBM5VDc5Ujb/YJSyggMAL75rbte4th1tZ0rh7PjQL3zvo4CTjqpltoiWykVDlD1b07VcWnLX1BKGagMZl9qmvbfqsPSfleoanhkJXA70m6/pSMwQCl1mMrpGd2VUl8g7XZFNE07WfVvDvAtlcNF0na/7ThwvKp3G2AWlWFN2u7K9QG2aJqWXfV1jbWdK4ezTUBDpVRsVXodDsxzck2ubh5wf9Xn9wNzzzs+XCllUkrFAg2BjU6ozyUopRSV8zD2aJr25nk3Sfv9CqVUsFLKr+pzd6AnsBdpt1+ladpfNE2L0jStPpXvY8s1TRuJtNtvUkp5KqW8qz8HbgN2Im33mzRNywKOKaUSqw71AHYjbXc1RvC/IU2oybZz9pUQv3GVxB1UXkl3EHjO2fW40kfVf5hTgIXK1D4GCASWARlV/wacd//nqtpxH9DH2fU7ue06Udnl/DOwrerjDmm/32y3ZsDWqnbbCbxQdVza7crbsCv/u1pT2u232yuOyqvgtgO7qn8PSNtdcfu1ANKrXrNzAH9puytuOw/gDOB73rEaazvZIUAIIYQQwoW48rCmEEIIIcQtR8KZEEIIIYQLkXAmhBBCCOFCJJwJIYQQQrgQCWdCCCGEEC5EwpkQQgghhAuRcCaEEEII4UIknAkhhBBCuJD/BzCU4DZ6PtbKAAAAAElFTkSuQmCC", - "text/plain": [ - "
" - ] - }, - "metadata": { - "needs_background": "light" + "image/svg+xml": "\n\n\n\n\n\n%3\n\n\n\n140393102828544\n\nouter_loss\n ()\n\n\n\n140393111546128\n\nMseLossBackward0\n\n\n\n140393111546128->140393102828544\n\n\n\n\n\n140393111546032\n\nMulBackward0\n\n\n\n140393111546032->140393111546128\n\n\n\n\n\n140396237940288\n\nAddBackward0\n step1.a\n ()\n\n\n\n140396237940288->140393111546032\n\n\n\n\n\n140393111546464\n\nAccumulateGrad\n\n\n\n140393111546464->140396237940288\n\n\n\n\n\n140393102725760\n\nMulBackward0\n\n\n\n140393111546464->140393102725760\n\n\n\n\n\n140393102827744\n\nstep0.a\n ()\n\n\n\n140393102827744->140393111546464\n\n\n\n\n\n140393102725232\n\nMulBackward0\n\n\n\n140393102725232->140396237940288\n\n\n\n\n\n140393112318976\n\nUpdatesOpBackward\n\n\n\n140393112318976->140393102725232\n\n\n\n\n\n140396647894368\n\nMuOpBackward\n\n\n\n140396647894368->140393112318976\n\n\n\n\n\n140393102725472\n\nMulBackward0\n\n\n\n140393102725472->140396647894368\n\n\n\n\n\n140393112318736\n\nNuOpBackward\n\n\n\n140393102725472->140393112318736\n\n\n\n\n\n140393102725616\n\nMseLossBackwardBackward0\n\n\n\n140393102725616->140393102725472\n\n\n\n\n\n140393102725760->140393102725616\n\n\n\n\n\n140393102725568\n\nPowBackward0\n\n\n\n140393102725568->140393102725472\n\n\n\n\n\n140393102725568->140393102725760\n\n\n\n\n\n140393102725904\n\nAccumulateGrad\n\n\n\n140393102725904->140393102725568\n\n\n\n\n\n140393111543968\n\nPowBackward0\n\n\n\n140393102725904->140393111543968\n\n\n\n\n\n140393111485872\n\nx\n ()\n\n\n\n140393111485872->140393102725904\n\n\n\n\n\n140393102725328\n\nAccumulateGrad\n\n\n\n140393102725328->140396647894368\n\n\n\n\n\n140393111534224\n\n ()\n\n\n\n140393111534224->140396647894368\n\n\n\n\n\n140393111534224->140393102725328\n\n\n\n\n\n140393111531904\n\n ()\n\n\n\n140393111531904->140396647894368\n\n\n\n\n\n140393111531904->140393112318736\n\n\n\n\n\n140393112318736->140393112318976\n\n\n\n\n\n140393102725712\n\nAccumulateGrad\n\n\n\n140393102725712->140393112318736\n\n\n\n\n\n140393102827824\n\n ()\n\n\n\n140393102827824->140393112318736\n\n\n\n\n\n140393102827824->140393102725712\n\n\n\n\n\n140393102828784\n\n ()\n\n\n\n140393102828784->140393112318976\n\n\n\n\n\n140393102828144\n\n ()\n\n\n\n140393102828144->140393112318976\n\n\n\n\n\n140393102828224\n\n ()\n\n\n\n140393102828224->140393112318976\n\n\n\n\n\n140393111543968->140393111546032\n\n\n\n\n\n" }, + "metadata": {}, "output_type": "display_data" } ], "source": [ - "net = Net().cuda()\n", - "x = torch.tensor(2., requires_grad=True, device=torch.device(\"cuda\"))\n", - "y = torch.tensor(1., device=torch.device(\"cuda\"))\n", + "net = Net().to(device='cuda')\n", + "x = nn.Parameter(torch.tensor(2., device=torch.device('cuda')), requires_grad=True)\n", + "y = torch.tensor(1., device=torch.device('cuda'))\n", "\n", - "optim = torchopt.MetaAdam(net, lr=1., use_accelerated_op=True)\n", + "optim = torchopt.MetaAdam(net, lr=1., moment_requires_grad=True, use_accelerated_op=True)\n", "\n", + "net_state_0 = torchopt.extract_state_dict(net, enable_visual=True, visual_prefix='step0.')\n", "inner_loss = F.mse_loss(net(x), y)\n", - "net_state_0 = torchopt.extract_state_dict(\n", - " net, enable_visual=True, visual_prefix='step0.')\n", "optim.step(inner_loss)\n", - "net_state_1 = torchopt.extract_state_dict(\n", - " net, enable_visual=True, visual_prefix='step1.')\n", + "net_state_1 = torchopt.extract_state_dict(net, enable_visual=True, visual_prefix='step1.')\n", + "\n", "outer_loss = F.mse_loss(net(x), y)\n", - "torchopt.visual.make_dot(outer_loss, params=[net_state_0, net_state_1,{'x': x, 'outer_loss': outer_loss}]).render(\"graph\", format=\"png\")\n", - "plt.figure(figsize=(15,15))\n", - "plt.imshow(imgplt.imread('graph.png'))" + "display(torchopt.visual.make_dot(outer_loss, params=[net_state_0, net_state_1, {'x': x, 'outer_loss': outer_loss}]))" ] } ], "metadata": { - "interpreter": { - "hash": "238ad0feaa04228775e5e27229169b0e3e76c0e018d5a6d65c4906ccad5c5a9e" - }, "kernelspec": { - "display_name": "OpTorch", + "display_name": "Python 3.8.13 ('torchopt')", "language": "python", - "name": "optorch" + "name": "python3" }, "language_info": { "codemirror_mode": { @@ -586,7 +557,12 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.9.7" + "version": "3.8.13" + }, + "vscode": { + "interpreter": { + "hash": "2a8cc1ff2cbc47027bf9993941710d9ab9175f14080903d9c7c432ee63d681da" + } } }, "nbformat": 4, diff --git a/tutorials/4_Stop_Gradient.ipynb b/tutorials/4_Stop_Gradient.ipynb index 21492fc5..4e3d3053 100644 --- a/tutorials/4_Stop_Gradient.ipynb +++ b/tutorials/4_Stop_Gradient.ipynb @@ -11,30 +11,32 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "In this tutoial, we will illustrate the usage of torchopt.stop_gradient with a meta-learning example. We use torchopt.visual to help us visualize what is going on in automatic differentiation. Firstly, we define a simple network and the objective function for inner, outer optimization." + "In this tutorial, we will illustrate the usage of `torchopt.stop_gradient` with a meta-learning example. We use `torchopt.visual` to help us visualize what is going on in automatic differentiation. Firstly, we define a simple network and the objective function for inner- and outer- optimization." ] }, { "cell_type": "code", - "execution_count": 53, + "execution_count": 1, "metadata": {}, "outputs": [], "source": [ + "from IPython.display import display\n", + "\n", "import torch\n", "import torch.nn as nn\n", "import torch.nn.functional as F\n", "\n", + "import torchopt\n", + "\n", + "\n", "class Net(nn.Module):\n", - " def __init__(self):\n", + " def __init__(self, dim):\n", " super().__init__()\n", - " self.fc = nn.Linear(1, 1)\n", + " self.fc = nn.Linear(dim, 1, bias=True)\n", " \n", " def forward(self, x):\n", " return self.fc(x)\n", "\n", - "def fn(x):\n", - " return 2 * x + 1\n", - "\n", "loss_fn = F.mse_loss" ] }, @@ -42,40 +44,39 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "We define the input x and output y. y will be served as the regression target in the following code." + "We define the input `x` and output `y`. `y` will be served as the regression target in the following code." ] }, { "cell_type": "code", - "execution_count": 54, + "execution_count": 2, "metadata": {}, "outputs": [], "source": [ - "x = torch.rand(5, 1)\n", - "y = fn(x)\n", - "net = Net()" + "batch_size = 64\n", + "dim = 16\n", + "\n", + "x = torch.randn((batch_size, dim))\n", + "y = torch.zeros((batch_size, 1))\n", + "net = Net(dim)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "Let us define the meta-parameter, MetaSGD as the inner-loop optimizer, Adam as the outer-loop optimizer. " + "Let us define the meta-parameter, we use `MetaSGD` as the inner-loop optimizer and `Adam` as the outer-loop optimizer. " ] }, { "cell_type": "code", - "execution_count": 55, + "execution_count": 3, "metadata": {}, "outputs": [], "source": [ - "import torchopt\n", - "from torchopt import MetaSGD\n", - "from matplotlib import image as imgplt\n", - "from matplotlib import pyplot as plt\n", + "meta_parameter = nn.Parameter(torch.tensor(1.), requires_grad=True)\n", "\n", - "meta_parameter = torch.tensor([1.], requires_grad=True)\n", - "optim = MetaSGD(net, lr=1e-1)\n", + "optim = torchopt.MetaSGD(net, lr=1e-1)\n", "meta_optim = torch.optim.Adam([meta_parameter], lr=1e-1)" ] }, @@ -88,63 +89,55 @@ }, { "cell_type": "code", - "execution_count": 56, + "execution_count": 4, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "inner loss: 4.4117\n" + "inner loss: 0.5540\n", + "\n" ] }, { "data": { - "text/plain": [ - "" - ] + "image/svg+xml": "\n\n\n\n\n\n%3\n\n\n\n139978828415600\n\ninner_loss\n ()\n\n\n\n139978603488640\n\nMseLossBackward0\n\n\n\n139978603488640->139978828415600\n\n\n\n\n\n139978603489744\n\nAddmmBackward0\n\n\n\n139978603489744->139978603488640\n\n\n\n\n\n139978603490800\n\nAccumulateGrad\n\n\n\n139978603490800->139978603489744\n\n\n\n\n\n139975938634512\n\nstep0.fc.bias\n (1)\n\n\n\n139975938634512->139978603490800\n\n\n\n\n\n139978603490224\n\nTBackward0\n\n\n\n139978603490224->139978603489744\n\n\n\n\n\n139978603490368\n\nAccumulateGrad\n\n\n\n139978603490368->139978603490224\n\n\n\n\n\n139975938634432\n\nstep0.fc.weight\n (1, 16)\n\n\n\n139975938634432->139978603490368\n\n\n\n\n\n" }, - "execution_count": 56, "metadata": {}, - "output_type": "execute_result" - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYMAAAJCCAYAAAAiOKueAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/YYfK9AAAACXBIWXMAAAsTAAALEwEAmpwYAACIi0lEQVR4nOzdd3xV9f348de5Ozc3e08ICSskQCBMBVG24sCtVdtvh9pq69dWa+36VW2/rR3aqrXurXXhQCwoooDI3nuEEQhkr5ubm7vP7w/IKSGDBJLcjPfz8eBB7rnn3vfnrs/7nM86iqqqCCGE6N90wS6AEEKI4JNkIIQQQpKBEEIISQZCCCGQZCCEEAJJBkIIIeiiZKAoyhxFUfYpilKgKMovuiKGEEKIzqN09jwDRVH0wH5gJlAEbABuUlV1d6cGEkII0Wm64sxgPFCgquohVVU9wNvAlV0QRwghRCcxdMFzpgDHTrtdBExo6wGxsbHqwIEDu6AoQgghGh05coSKigqlpfu6Ihm0i6IotwO3A6Snp7Nx48ZgFUUIIfqF/Pz8Vu/rimai40DaabdTT21rQlXV51RVzVdVNT8uLq4LiiGEEKK9uiIZbAAGK4qSoSiKCbgRWNgFcYQQQnSSTm8mUlXVpyjK3cBngB54SVXVXZ0dRwjRnNfrxefz4ff7g12UfstsNqPX69Hpetc0ri7pM1BV9T/Af7riuYUQrSsqKuLQoUOUl5cHuyj91vjx40lKSiIkJCTYRemQoHUgCyE638qVK3nppZdYvXp1sIvSbz377LNceumlkgyEEMGjqirW8AjGXjyLG+7+WbCL06801Dv41c1XEggE6I0XDZNkIEQfozcYsFnCGDB0GHD6kHL1jNtt6ci+bT2u/8Sst9vP4Xl6jt7VwyGEOA8dqfTOpYKUmL2ZJAMh+qzzq6Babug4W/OHxOytJBkIIVrUcnXXtZVgf4nZE0kyEEKcIRidn/0lZs8lyUCIfqU9FWBHjorb83z9JWbvJslAiH6lI6NsOuv5+kvM3k2SgRD90VnrwC6o/PpLzF5KkoEQfVZLNeGpbUrLd59/Y0h/jtm7STIQos9q6ahXafPusx4nn9ORdn+J2btJMhCiX1Jb+butbWh14LkdF/eXmL2TJAMh+iWllb9b26a2fG+Hasv+ErN3kmQghGiHxmpRbddmidn7SDIQQrRDYy2otGuzxOx9JBkIIc7Q0uHvqWE5Z97VaZVjf4nZc0kyEEKcobWarytrxP4Ss+eSZCBEn3WW0TPnMv7+nMZk9peYvZskAyH6rDZGz7RyTZfzPybuzzF7N0kGQvRHwTjy7S8xeylJBkKI/9LqxlbWcThzt+Z/SMxeSpKBEOK/FGjPOEql9T8kZi8lyUAIcYZgVHj9JWbPJclAiL6uv1xErL/E7CKGYBdACNHFWlp+pxMPines/YaSo4dJzRzC8LHjuyVmizoYc+e6b6gsLSE8Kpq8KRd3TsxeTM4MhOizzjxsPW2N/w4/tmWO2hoKdmylYOc2jGZTt8Q8++PaF1On01NWdJTNK5bhcbtQ1Y7EbyVmLyZnBkL0WWfWhh05jG3fvscPH6Si+DgGg5GM4blNHuducFJvt1NXU609ZXxKGiGhtvOK+V+Nh/7nFjMjewRFB/ez9euvqCotISFtwDnFPLey9zySDIQQHdZ4FL3l6y/R6XVkjhiJXq9vsk/h/r2s+vQjPvv3a6hqAFVV+dWzrzP6wmnnGZxTdW/zCrgjMUNCw4hLSSM6MZmNX33Opbd+H0VppVJvI2ZfIc1EQvR7597EsXvD2lNnBTnN7hs8Mo/v/OJ3vL5xH/c98fwZR+fn0azSRn3c0ZjRCYmkDx7Khi8/b7uZqO/mAI0kAyH6oVYu4dIhZcePUVddjS0ikrjk1Gb3K4qCTqdDp9eh0+nOCNM1tWtHY9oiIolNSuHI3t0E/L4O9hv0LZIMhOizWq/YOqMqrq2sIBDwY7KEYLZazxqz65x7TJPZQmh4OK4GJ656J6oa6PKYPZUkAyH6rLaXWjhfHlcDAHq9Hr2+sfuxa2O27Nxj6vR6DEYTqj+Az+uh/ScGwXidXUuSgRB9WsvnAK1WYx2o3/SGkwlAVVUCgdOPqDt43tEpdeq5xVRVlYDfD6ealzr2LH2rI0GSgRB9XQuVbavV2Ol3nKWSjoyJA04O52w8S2jvY881ZpvOIabX7aK+zo7RZMIaFo6i62CV2IdODiQZCNHXnesB7Fkel5A+kPDoGOpqqikrOtotMTv7sXU11ZSfKCIjOwedQX/2B3RCzJ5KkoEQ/d3Zjm5buV9RFAaPzMPv83G0YN95x9y6ajl//vH3ePy+u0423XS2FmJWl5VSdPAAoyZfhKLoWp9n0A/IpDMh+gNt0lQLC/acrf5r6UphpyrN7HGT2PL1lxzZu5vJcy5Hp/vv0XVtZQWHdu9kzWefUFFyArezgUWvPs/6ZZ+RkJbOld/9YZPnrLfbKS48THR84jm3vnQkprvBSWVpMVWlJVx6y3fPPRF0x7pL3UDODIToD867smq5es4YNgJbeAT2qkrKi483uS8QCODzumlwOAi1hTN++mxCQm046+y4nM4m+/p8XtwuJz6vl4HDslGaXG+g/ToSs7ToKLWVFYRHR5OWNaTN19mmPpAIQM4MhOhnGmuujh7OtrxvTGISA4eOABSO7ttLQkq6dl9UXDzjLpnNuEtmnzVmg8NBXfXJ9YQmzLwURdF1sHwdj3n8UAEA4y6e1cZ6Sf2HJAMh+iNVObe80MK+k+bMY9LseWd/jjZilhw9gqvBSWbOKIaMGnPWmO0udxsxJ82e18bjziNmLyXJQIj+SGnl7448rj3b2xkzM2cUg7JzW14OootitvtxHY3ZS0kyEKKPOqeD2A4/qOkDzjWmTqeDdo/x75yY3f46ezjpQBaijzqnyqrDDzr1gI5cw6a/xuzhJBkIIc5ff7mefV/MAqdIMhBCdILetVpp74rZPSQZCCHOQ2Pl2J2rePaXmN1LkoEQ4gznssrc+Xap9peYPZckAyH6u2Z1onKW+0/b3vjvvJet7qMxexEZWipEH+Oqd3Dw2D7efOyPwS5Kv+L1uINdhPMiyUCIPiQyMpLkxERKi4vZv/qrYBenQzweD3a7nZKSEkaMGNErVxDNyckhOjoao9EY7KJ0mCQDIfqQMWPGEBMTQ2VlZbCL0mElJSWsX7+e1157jYceegi9/hyuL9AD5OXlYbP1vrWOJBkI0Yekp6eTnp5+9h17oIMHD+J0OtHpdFxxxRW98ui6N5MOZCGEEJIMhBBCSDIQQgiBJAMhhBBIMhBCCIEkAyGEEEgyEEIIgSQDIYQQSDIQQgiBJAMhhBBIMhBCCIEkAyGEEEgyEEIIgSQDIYQQSDIQQgiBJAMhhBBIMhBCCIEkAyGEEEgyEEIIgSQDIYQQSDIQQgiBJAMhhBBIMhBCCIEkAyGEEEgyEEIIgSQDIYQQSDIQQgiBJAMhhBBIMhBCCIEkAyGEEEgyEEIIgSQDIYQQSDIQQgiBJAMhhBBIMhBCCIEkAyGEEEgyEEIIgSQDIYQQSDIQQgiBJAMhhBBIMhBCCIEkAyGEEEgyEEIIgSQDIYQQSDIQQgiBJAMhhBBIMhBCCAEYgl0AIUT/4nK5qKioYMuWLU22l5SUsHv3bgKBAJ9++il6vV67T1EUQkJCmDZtWpPtovNIMhBCdCu9Xk9FRQV/+ctfaGho0La73W5qamoIBAL8/ve/R1EU7T6LxcKUKVOYOnWqJIMuIslACNGtdDodOp0Ou93Ojh07CAQCzfbZtGlTk9uRkZHMmTOnSYIQnUv6DIQQ3Uqv1xMVFcWUKVPQ6c5eBSmKQmhoKNOmTWvX/uLcyDsrhOh2ERERzJkzB7/ff9Z9jUYjsbGxjB8/Xs4MupAkAyFEtwsLC+Oiiy4iNja2zaN9vV7PoEGDuPzyyzEYpFW7K0kyEEIEhdFo5IorriAsLKzVI36/309iYiIzZ85EURQ5M+hCkmo76L333mPBggXBLkaflZqayg033EB+fr788PswRVHQ6/XMnDmTjz/+GFVVW9zParWSmprKiBEjurmE/Y8kgw7asWMHHy1cSELaAMKjYoJdnD6lrOgoMeE2LrjgAvLz84NdHNHFdDod48aNIyIigurq6mb9B4qikJqaSlZWFtHR0UEqZf8hyeAchEVGM376HIbm5SPHrp1nxcIFVBzcG+xiiG6i0+kYNGgQGRkZVFRUUFtb2+z+3NxcRo0aJWeJ3UCSwTmwWK0Mys4hb8rFHf6SqtDtCaS3xNy3dZMkg35o1qxZlJSUUFdX12TOgd/vJzc3l7y8vCCWrv+QDuTucFpzaIcr5ZabUiWm6DOuvPJKkpOTm00+S0tLY/jw4aSnpwepZP3LWZOBoigvKYpSpijKztO2RSuKslRRlAOn/o86tV1RFOUJRVEKFEXZrijKmK4sfK9xPofl2mM7WFv22piivxk4cCCZmZnExsZq2wwGA7NnzyYjI0OaiLpJe84MXgHmnLHtF8AyVVUHA8tO3QaYCww+9e924F+dU8y+rL0Vbmf+IHpyTNHfmEwmcnNzGTZsmDbnwO/3M2nSJJKTk4Ncuv7jrMlAVdWVQNUZm68EXj3196vAVadtf009aS0QqShKUieVtY86vcJtR4XZKXVqL4gp+pXGZNB4FhAZGcnw4cOJiooKcsn6j3PtM0hQVbX41N8lQMKpv1OAY6ftV3RqWzOKotyuKMpGRVE2lpeXn2Mxgq9zq7V2HIkr/SSm6FeGDx/OkCFD0Ov1KIpCTk4OKSkpWK3WYBet3zjv0USqqqqKonS4rlBV9TngOYD8/Pxee6jYdrWmgqq0vJOqwjm2hfaXmL2JqqraP9FxkZGRDBo0iMGDB7Nnzx6uvPJKrFZru9YuEi1rbHJrb5/LuSaDUkVRklRVLT7VDFR2avtxIO20/VJPbeunWqkg4WQF2SVjPvtLzJ5lzZo1PProo3z22WfBLkqvFQgE8Pl8qKrKL37xC375y19K5/E5MplMvPjii1x22WXtPrs612SwEPg28KdT/3982va7FUV5G5gA1J7WnNS/tKcCbOP+c6o/+0vMHqhxWOSAAQO49NJLZanlc+Dz+aiqqmL16tVceeWVkgjOUWlpKe+9916L14loy1mTgaIo/wamAbGKohQB/4+TSeBdRVG+BxQC15/a/T/ApUAB4AT+p0Ol6UuaDM/s+Jdaafa4djxPn4jZe+l0OuLj47n44otlhc1zEAgEqKurIy0tjQsuuECSwTk6cOAA7733Xocfd9ZvrKqqN7Vy1/QW9lWBuzpcij6ts77QHXme3hyz99LpdISGhpKWlobRaDzv51NVtd0VYkf27ckxfT4fCQkJxMXFtfjcfeV1dmXMurq6c3qcnMt2g5a7FM/W0Xh+X77+ErMv60hl0FlH0cGOaTAYiI+Pb/W5+8rr7K6YHSHJoDOozf5ootnHqra4VWIKIYJGkkFnUJr90c79JaYQomeQXq4eoz0dqZ3d2dpTY4q+6tFHH2Xbtm2oqorJZOKf//wnVqtVRl+d4nA4+Oijj3A6nVx99dVN1mvqapIMulx7K7/O2qcvxBR9VV5eHpGRkRQVFbF48WKZVHYGv99PcXExDocDr9fbrbElGXSZxsqxOyu//hKzbwoEAjgcDnbt2oXZbCYnJweTyRTsYnWqWbNmUV9fz6ZNm1i8ePF5P18gEMBut3Pw4EGGDBlCWFjYOT1P48zx0tJSKisrcTqdBAIBbDYbAwcOJCQkpM+fvUgy6ExNDo5bqBzPe2mGFo6++2TM/snj8bB3717uueceYmNjeemll0hISJDx9m3w+Xzs2bOHRx55hD//+c/k5OSc83M5nU4+//xz1q5dy9GjR/H5fGRlZXHnnXeSlZWFxWLpxJL3PJIMOtNZZ+KetsM5NZ238IA+GbN/OnHiBHv37iUxMZETJ05w5MgRbDYbNputyX5trX/UmDg6uk97HtfW853tuTp8RcBzKMeZa0N15DUFAgGeeeYZVq5cydy5c/nf//1f4uLiKCgoYPXq1SQmJmrJ4GzPd+b70do+7S1bd5FkcN5Or+1aqvlaqQ2VNu+VmL3c6ZOG2juB6NixY+zbt48LL7yQAwcOsHnzZuLj45slg+3bt/P555+zefNmqqurSU5O5u6772b48OGEhIQAJ4+YN2/ezKuvvsrRo0cxm81kZ2dzww03MGLECBRF4cknn8TpdDJhwgQuvvhiAN5++20OHTrEiBEjmDlzJuvXr+e5557Typ+cnMzgwYP58MMPmTx5MrfffjuJiYns2LGDF154gYsvvph58+ZhNBrZvHkza9asobq6ml//+tcdmnBVWlrKm2++yebNm6moqMBmszFt2jTuuOMOjEYjVVVVLF++nL/+9a/4/X5cLhff//730ev1xMfHM3fuXG6//XYURcHj8bB582btPXO73eTn53PNNdeQnZ2NwWCgtraWBQsW8P3vf5+LL76Y9PR0dDodeXl5jBw5sskkQofDwUMPPURERASVlZUUFRVRVlaGwWDg17/+NRdeeCEOh4N169axdOlSdu7ciV6v5+KLL+aqq64iKytLSxglJSU888wz7NixA7/fT0pKCuHh4c2aBztr4ltbJBmcN6WVv1vb1rRaPLePt7/E7L1O/+G250fs8/k4ceIER48e5dZbbyU0NJSdO3cyfvx4Bg0aBJysEI4cOcJzzz1HfHw88+fPJykpiYaGBvbt26e1bTudTgoKCvjb3/7GxRdfzPz58zEYDDidTvbu3cuIESMAcLvduFwufD6fVg6v14vL5dI6L30+HxUVFdxzzz1s2bKFwsJCysvL+cUvfsGjjz7KlVdeSXR0NH6/H6fT2aTT0+/343a7aWho6NB7p6oqK1asIDU1ldGjR2MymSgtLeXjjz9myJAhTJ48mbCwMKZMmUJycjL79+/n1Vdf5c477yQjIwOTyURMTIz2fEuXLmXz5s14PB5+8pOfYDQa+eyzz/jqq6+or68nPz+fQ4cO4XQ6GTp0KLGxsej1euDkJLgzlxZRVRWn08mOHTuYO3cus2fPJioqCqPRSEZGBgaDge3bt1NbW8vUqVO5/vrrqamp4YMPPmDlypX4/X6GDx9OIBDg2WefpaGhgWuuuYaEhASKiop44403mDRpUpOY3dFUKMmg2wWjWuwvMXuvsrIySktL8fl8DB06FLPZzOeff055eTkNDQ2EhISgqiobN27E6XSSlZXFpEmTiIuLw+FwUFhYqDVj1NTUsHXrVgKBAJMmTWLQoEEoioLdbqesrOwsJWkuNDSU7OxsqqqqsNvthIeHk5+fj8fjoa6uDrfb3dlvBxkZGURGRhIfH4/FYqGkpIRvvvmGHTt2MHr0aMLCwoiPjycyMhIAi8VCdnZ2sz4Dj8fDpk2bqKurY/LkyYwfPx6DwUBZWRlr165l//795OXlUVNTg6qqREdHExISQllZGdu3b+fAgQMAXHPNNc2WyIiJiWHUqFGMHDmSiIiIJkkjMTGR6OhooqKiSExMpL6+njVr1lBSUkJRURFDhw7Vtt1www1MmDCBuLg44uLi+Pjjjzv9/WwPSQbdrpUGky5tR+kvMXuvQ4cOUVlZSWRkJMnJyZhMJlwuFydOnKCyspLU1FRUVWXr1q2kpqYyfPhw0tJOrhZvsViajEe32+3s3buX7Oxshg8fjtlsBiAsLIyUlBavNdUqRVGIiIggJCSEiIgIYmJiiI2NxWq1YrVacbvdnT4EUlEUhgwZQnl5udaR63A4sNlsFBUVdShefX09R48exWQyYbFY2LdvHwBGo5HS0lLtrKax7d5gMKAoCg6HgwMHDrBs2TL279/PlClTiI2N1ZJB4wV4srKympyFNEpNTaW2tpaamhoqKirw+/2YzWaqqqqorq7G7/dTW1tLaWkpubm5JCcnY7VaSUtL05qRupskgy7VUs2n0OLFYDrhs3e7GtApOnQGPXr96R9t+2KqqorX4wYU9Ho9+navvNm9r7OvUVWVvXv3YrfbSUlJweVyYbPZiI6OprCwkMOHD2vJoLq6mszMzDZHtng8Hmpraxk+fHindNw2Xn1MUU59L041oeh0OgKBgPaYMztO27rYT2vlanzMunXrWLhwIYWFhdrZh9frJT8/v0NLM1dXV+PxeNiyZQtbtmxpFnfYsGH4/X6io6O1JOD1ehk0aBDf+973mDRpEt/97ndbfO6YmJgWFyRs/DxXrlzJunXrKCsrw+Vy4fF4yMrKIi8vTxsSq6oqYWFhWh+BTqcjIiKi2+cYgCSDLtbaD1HpcKXYngPqV/74O+KSUxgxfjJD8/LPKeZ7/3wMvcHIyElTGD5uYjuL2Xmvsz/y+/3s3LmTb775BrfbzTvvvKPdZ7fbGTBgAFOmTEFRFBITEykvL6e+vr7V5zObzcTGxnLs2LE2K06j0ah1sDaqra1tcvtsGit7nU6H0Whs8liXy9XiCpp6vV6rRD0eT7MEUl9fz69//WvuvPNO/vd//5cBAwbgcrl45ZVX2LNnT7PnayvhxcfHYzabufrqq7n77ruJiIhotk8gECAjIwOLxcLevXuJj49v1xlUa3FdLhcvvvgier2eO++8k4kTJ2KxWJqUX6/Xa2ca1dXV2gGA3++noqKC8PDws8bvbH17FkWPdQ7D99q4LxAIcGTvbg7t3kHyoCwGjcg955gzb7iFgu1b2LNpPS6Ho8PlPJeY/d22bdsoKyvj0ksv5ZNPPmHFihWsWLGCJ598ktjYWA4fPkxVVRWKonDppZeye/duli1bxpYtW6irq+Pw4cO8/fbbVFVVAZCQkMBFF13E8uXLWbp0KSUlJZSVlbFt2zbee+89rfKNj4/XJoDZ7Xb279/PN998Q0VFRYdfg9VqJSEhgZUrV1JTU8ORI0fYvHkz27dvb7av2WwmMjKSqKgoVq1ahdvt1sqkqioejwefz6c1T9XW1rJlyxYWL17cLLnpdDri4uK0mI3vwenlGjt2LNXV1bz88suUlJRQX1/Phg0bePnll1m6dCk6nY7w8HCuueYaPvnkE7744gsKCwtxOp0UFhZ2ePin1+vF5/NhMpkIDw9HURR27NjB559/TnHxyWt96fV6oqOjyc3NZfHixezatYvi4mI2b97MunXrOnxhms4gZwbnq7PG7p9HzIDfz7ovFpM+eCgxCUkYjE2HpW37ZiWbVnzB8UMFAOgNBn7+5AvN9gOIikskNjmVuppqDu7aRs6EC1qM2T5yWtAemzdvJiwsjMzMTFJSUrSj5tGjR5OYmIjdbmfXrl1ceOGFDB06lJtvvlkbQRMIBAgPD2fWrFlaU4PNZiM3N5fvfe97rFixgs8//xyz2UxSUhIXXHCBFjc/P5+6ujpWr17NL37xC1JTU0lLSzuniiguLo5p06bx/PPP85vf/IbExER8Ph8ZGRnN9lUUhdjYWG644QYWLlzIkiVLyM3NZeLEiYwdOxabzcaNN97I8uXLWbZsGaGhoYSHhzNy5Ehqa2ubPJderycpKYkpU6bw6aefsmjRIqKiopg4cSJXXHEFOp2O2bNnExcXx/bt2/nLX/6C1+slNjaWnJwcBg4cqDV/XXvttej1erZv38769euBk8nmxhtvbNJfcDZWq5VLLrmEbdu28fzzz2MwGEhMTGTAgAHaGV1jzO9///v85z//4dVXX8VkMmG1Whk1alRQZjtLMjhfZ/1+dEGP6elzulSVQMDPzrXfMH7GbMKioptN/g2NiCAxbSBej4ey40fZump5iz94RVEwmkwkZ2RScvQIh/fs+m8yCMbr7CcGDx5MbGwsmZmZTdqgIyIimD59Og6Hg5iYGBRFISwsjAsvvJCYmBiOHTuG0+kkMjKSzMxMraPYYDAQHR3NzJkzCQ8Pp7q6GoPBQFJSUpPKOTU1lfHjxxMSEkJNTY02xt3r9WpJacCAAcyZMwer1UpmZibh4eFERkaiKArz589n4MCBWCwWTCYT2dnZXHrppVRUVBAbG0tERAQmk6nJ0NVGYWFhTJs2TVuCIzk5mbCwMBRFwWQyMXfuXO3Mp7HZKyoqiqqqqiZNKIqiEBISwmWXXcbBgwex2+2EhIQ06dRNS0tDURRsNhtHjx7F7XaTlJTEiBEjSEpK0vYbMGAA06ZN4+DBg1RUVGhnJ6NHjyYiIkJLBiaTiRkzZpCUlERoaGiz12YwGJgwYQKRkZEcOXJEez+joqKor6/XmqAURSEvLw+Hw8GJEycIBAJERUVpZxMtPXdXkmTQVbS6UaGtilK7p/kf7QujqrgbGjiydxdXff9H2MLCm8XMyhlFVs4oKkuL2bpqOTvWrGotOABJAwdx/HABRQX7m3QOBvN19mXTpk1rcbuiKMyaNavZ9tTUVFJTU9t8Tr1eT0ZGRotH5o2sVit5eXnk5eW1us/gwYMZPHgwgDY/odF3vvOdJrfj4uK4+eab2yxXI7PZTFZWFllZWc3uUxSFkSNHMnLkyHY9l6IoTJ06lalTp7Z4f+P4/7bei8YO8jFjxjBmzJg241ksFubPn9/mPoMGDdLmh7QVMzw8nMsuu6zN/bqL9Bl0FaXlG2e2Pmpzes+xXvR5PVQUH8dZ7yAhLZ0QbbZq6zG17a3EjE9ORQ2olB4/SuBsq0p20+sUQnQtSQbdrI1xN2fZo2U+r5fq8lKMRiOWUBs6ffOTvY7GDI2IwGQ243G5qK+zd6g85xqzvwvGGjUSU2KeTpJBT9fBz7dTJqu0MT68y/TzgUfBmGQkMSXm6SQZ9HQtLflzGr3egC0iEr/Ph6ehAb+/eWddR7mcTvw+H0ajCUuI9byfr13kRKFdeuMRp8TsWTFbI8mgJ2rr+3FGpak3GoiMjUdvNFJbVYn3XNeJOS2mvaoSVQ0QFhWFoSsvrtJzfge9Rm884pSYPStmayQZ9ESnfz/OUmHq9QYiomOJio2nrKgQ57lOFDstZtmJYyg6HQlpA9DpdF33he3A6xQntXYk2ZVHmBKzb8VsjSSDnqK1z/4s9bCiKOgMekZeMJUje3djr65s9kUKBAL4vF58Xi/+U2O+fR4PXo+nyWihxnVhDu/ehcFgJCu39SGH5+wcX6c4qbXE3JVHmBKzb8VsjSSDTnUe2fycP3sVvd7AzOtvYce6byg/XoSqNp1QtnfzBv70o+9w16zJPPe7X+D3+bhtQjb/MzmXpe++2WTf2spyCvftIiwyipGTp7Qa85xJpS9EjySTzjrVedR05zwHS0FRYMCQYWQMG8GJI4eI3pNI5oj/TtjJyh3FXf/3GG6Xq+kjFYWwyKgm27764F0GjRjJ0NFjCQlteoWt02Oesz4818zr9bJ7927+9Kc/9fmLp4ue68z1mdpLkkGX6WCtdx4V5MllJMzMuuE2zNYQImLimtxvMlswxbXvYt6jp0xDURQiY+PbWaF13+vsycLCwsjKyqK2thbXGUlXtI/X68XhcFBeXs6QIUOCXZxeS6/Xk5+fT3R0dIcOSiQZdJnOqPU6VtEOHnV+bfyKopAxfESHYgbjdfZE8fHxXHLJJWRmZga7KL1WVVUVu3fvZseOHfzoRz+Ss6vz0LgcSeO1J9pDksF5arUa65T67Szr/PTxmL1JUlIS8+bNC3YxerWDBw/y8ccfs2DBAu68884WLxwjuo6k3vPUajXWhfVbf4kphOg+kgyEEEJIMuhUZxtx2RXzSPpLTCFEl5Jk0JnO1mRyrk0qarM/+mZMIUTQSDLodudw2HzelWsviSmECBoZTXQOAn4/DQ4HddXnNrlDtMztagh2EYTotyQZdJCiKFRXlPP5O2+wcfkXwS7OWf33spXQ09tvjhXsI9wswwmFCAZJBh00fPhw5sycoV0ztScLqCpffPEFQ4cOJT4ujpCQ9s1CDpakUbmkpKSQnJwc7KII0e9IMuig2bNnM2HChGAXo128Xi85n3zCpXPnMn36dNLS0oJdpLMyGo1ERUWdfUchRKeSZNBBUVFRvaay8ng8qKpKVFQUqampZGRkBLtIQogeSkYTCSGEkGQghBBCkoEQQggkGQghhECSgRBCCCQZCCGEQJKBEEIIJBkIIYRAkoEQQggkGQghhECSgRBCCCQZCCGEQJKBEEIIJBkIIYRAkoEQQggkGQghhECSgRBCCCQZCCGEQJKBEEIIJBkIIYRAkoEQQggkGQghhECSgRBCCCQZCCGEQJKBEEIIJBkIIYRAkoEQQggkGQghhECSgRBCCCQZCCGEQJKBEEIIJBkIIYRAkoEQQggkGQghhECSgRBCCCQZCCGEAAzBLoA4f6qq4nK5UFW1yXav16v973K5cDqdTe43mUzo9XoURem2sgqhqip+vx+Px9Nku8vlwuv1oigKDQ0N2ve3kaIoWCwW+b52EUkGfUAgEGDZsmU0NDQ0SQg+n49AIMCuXbsICQkhPj6+yeMmTJhAQkICFoulu4ss+jGXy0VpaSnr169vsr2srIwDBw5gsVj49NNP0ev12n2KomC1WpkzZ06T7aLzSDLoA3w+H++99x4LFy5sdvQfCAR44403eOutt5ocUdlsNpYsWUJKSkp3F1f0cwaDgZKSEu68807q6+ub3BcIBPD5fHznO99psj00NJTLL7+cmTNnSjLoItJn0Afo9XqmT5+OTqfD4/E0+Qfg9/vxer3atkAgwKRJk4iKipIfluh2BoOB6OhoJk6cSCAQaPJ99fl8AM2+x3q9nhkzZsj3tQtJMugDdDodkyZNIjw8HJ3u7B+pqqpMmzaN8PBwaX8V3U5RFCIiIpg2bVqzfq6W6HQ6wsPDmThxonxfu5Akgz5AURQyMjIYOHAgNpvtrD8YvV7P5MmTsdls3VRCIZqy2WxMmjSpXUf6NpuNAQMGMHDgQEkGXUiSQR+gKAoGg4GpU6eSnJzc5tmBXq8nJiaGUaNGERIS0o2lFOK/rFYro0ePJjY2ts2EoNPpSElJYerUqRiNRkkGXUiSQR8yd+5cEhIS8Pv9Ld6vKArh4eFce+21mEwm+WGJoFEUBbPZzPz58wkLC2v1uxgIBEhOTmbOnDndXML+R5JBH5Kfn09mZibh4eEt3q+qKmazmWuuuUY64kTQ6fV6rr32WoxGY6t9BzabjYyMDMaOHdvNpet/JBn0IXq9nry8PHJyclqs7E0mE/Hx8YwePVrOCkTQ6XQ68vLySEhIwGQyNbtfr9czatQo8vLyMBhkFHxXk2TQhyiKQm5uLkOHDm12pKUoCnFxcYwZM6bVMwchult4eDh5eXnExMQ0O0BRVZXhw4eTk5MjBy/dQJJBHzN48GCysrIwGo1NtiuKQlJSElOmTEFRFPlxiaBr/B5OmTKFxMTEZt9Jg8FAVlYWgwcPDlIJ+xdJBn1McnIyWVlZpKamNtmuqioJCQlMmTIlSCUTomVTp04lPj6+2dlsSkoKWVlZJCUlBalk/Yskgz5o0KBBzJkzp0k7a1xcnHbWIERPMmTIEIYMGUJcXJy2zWg0Mm/ePAYNGhTEkvUvkgz6oMzMTGbNmqVN7dfr9UyaNInJkydL85DocRRFYfLkyYwbN04b+OD1epk5cyYDBgwIcun6D0kGfVBYWBgZGRlkZGQAJ8dqZ2dnM2rUqCCXTIiWjR49muzsbAKBAAADBw5k0KBBhIWFBblk/Yckgz7o9IXAFEUhNjaWQYMGkZycHOyiCdGilJQUMjIytFFFkyZNIjo6utlACNF1euTg3UAgQH19PSUlJe1ayEo0V1tbS2ZmJoqiEB8fTyAQ4MSJE8EuVq8VGhpKdHR0tyzhoaoqJSUl1NfXa0fK/UVcXBxVVVVkZmZy4sQJ6urqgl2kbhMSEkJ0dDShoaFBid8jk4HH42Hbtm38/e9/R1VVaec+B4FAAJfLhaIoBAIBFixYwOeffx7sYvVKgUCAMWPGcN111zF06NAuj+fz+XjzzTfZuHGjduWv/sBut2u/902bNrF79+5+89oDgQBDhw7lmmuuIT8/Pyhl6JHJwO/3U1RUxMKFCxk7dqxMkjpHqqoSGRlJREQEfr+/Xx1ldZZAIMCOHTvw+XxMnz6922KuX7+edevWkZiYSGRkZLfEDbZAIEBERASRkZH4fD4cDkewi9Rtdu3aRVlZGRdccEHQytAjk8Hp7rjjDoYMGRLsYvRKqqpy6NAh0tLSZMXHc+T1enn44Ye7fTkEVVUZPHgw119/PTk5Od0aO1hUVcXj8XD06FGysrL61ff1//7v/6iurg5qGXp8MggNDe2SM4NgND91d0xVVRkyZAghISHtuuhNZ8btK++tx+PBYDAEpWIyGAzNvv996b1tSSAQwGq1YrVa+/TrPDNmS2szdbcenwy6SjB+3N0dU1GUoHRG9Yf3Nlj6+nur0+mC1oHa19/bs+kXQ0uDMSJJYvatmP1Ff/k8+0vMjugXZwZ9NeP7/X5tljGgjcnuztfbk99bVVW1C/3odLpubSrrrbr68/T7/Vql2NgP01O/Q43fH1VVO6WpsCedBbSkXySDvuqNN97g8ccf18aif/DBBwwYMEAm6pzmqaeewm63M3PmTCZNmhTs4vR7ixcvZsuWLZjNZn7+858HuzhtstvtPP/88zgcDu64444+v2CeHCr1Ytdccw0ff/wxL774Yo8/BQ0WVVXlvelBGj+P3vKZ9Kaynq8+e2YQCARwOp388Y9/JDw8nKuuuqpbJgx1J5vNhtlsBjrnFLRxKOoXX3xBdnb2eS13HQgE2Lt3Lzt37qSgoACn00lqaioXX3wxaWlpWK3W8y6vaG7dunWsW7eOoqKiFu83GAxceOGFzJgxg3//+9/s3bsXv9+PoihYrVYyMjKYOHEiAwYM0L5b4r8aV0f49NNP2bNnD4qikJ6eTm5uLuPGjQt28c5Ln00GHo+HY8eOsWrVKu3yeenp6d2ynEBvVllZydq1awkJCTmvZHDgwAG++eYbDh06hKIoGAwG7HY7hw8fxmazSTLoInq9HpPJhMViAWDt2rWYzWaSk5NJSEhAr9drbfWbNm2ioKCAtLQ0EhISUFWVLVu2oNfrUVW1zx08dQa73c6OHTtYsWIFkZGRqKrK7t27qa6uJiEhgbS0tB7fN9CaPpsMGhoa2L59OykpKRw/fpzi4mIqKyubXfTF4/HQ0NCAw+HA6/ViNBqJiIjAYrFoPxpVVXG5XNTV1eF2u7UOpejoaMxmM4qi4HA4qK+vx2q1YrPZUBSF+vp6XC4XANHR0TidTmpra7U2fp1Oh9VqxW63YzabCQ8PJyQkBL/fT2lpqVZpGgwGvF4vDocDj8dDXFxchzpDfT4f9fX1OJ1OPB4PcPKsIiIiQvvhu91uSkpKKCkpweVyUVVVxeHDh1EUhbCwMKKiotDpdKiqitfrxW6343Q6UVUVs9nc5HUHAgGWLFnCzp07SUtL4/rrryc9PZ3Dhw9TUFDQZGapqqqUl5drf/t8PrxeL6qqEhsbq82R8Hg82O12GhoaALBYLNr70/jj8/l81NXV4XQ68fl8GI1GfD5fvznNB8jPz9eWM1BVlXvvvZeoqChmzJjR4uzWIUOGcN111zF58mRqamr429/+xrp16zAYDNrlU/1+P7W1tTQ0NOD3+7XvbXR0NPDfs1JVVWloaKC2tlZbRqPxe934OzldYxNMRUUFcHJtHrPZTGlpKbGxsZjN5ibfc1VVsdvtuN1uQkNDCQkJwefzUVNTo30vTCYToaGh2mqnjTHr6+txOBzodDp8Pp/2Ow4JCdF+dz6fj+rqau0+t9utDUBojF9SUsKnn36Kw+HgwQcfxOfzsXDhQjZt2kRKSgo33HBDp3yOwdAnk4GqqjgcDtavX8/48eM5dOgQJSUlHDx4sEkyUFWVI0eO8M033/DZZ59x7Ngx0tLS+Pa3v83YsWObXH1py5YtfPDBB+zZs4eGhgbS09P58Y9/zMiRIzEajaxYsYIlS5Ywbdo0rr76agBWrVrF5s2b0el03H///axfv56XXnqJuro6FEXBYrEwY8YM3nzzTXJzc7n++uuZPHkyDoeDX/ziF8yePZvZs2cTGxtLWVkZH3zwAYcPH+bhhx/GZrO1+/2oqKhgyZIlrFy5kgMHDmAwGJg1axbf/va3SUxMpKGhgT179vD9738fVVUJBALs3r2bV155BZPJxI033sidd96J1WrF6/Vy7Ngx3nrrLVatWoXb7Wbo0KFMnz6dSy+9lNDQUOrq6li2bBkTJkxg3rx5DBs2DIARI0YwYsSIZuX705/+hNFopKGhgZKSEo4cOYLH4+F3v/sdF154IWazmQMHDvDWW2+xbt069Ho9eXl5zJo1i1mzZmlr4De+R8uXL6esrIxBgwYRFxenHSWLtkVGRjJq1CiWLVumJWiAkpISXnvtNTZt2kR5eTk2m41p06bxk5/8RGtKapw9vGnTJl5++WUKCwsxm83k5OTwrW99i5EjR7YY0+Vy8dhjj+Hz+Zg8eTKjR4/mRz/6Eb/5zW8YM2ZMszP5Tz75hO3btzN37lwmTpzIsWPHeOGFF1i/fj1w8rKvF198Mddff732vQBYvXo1n3zyCaGhoZSUlLB//34aGhoYP348t912G5MmTaKkpIQnn3ySHTt24Pf7SU1NJSYmpkkZKioq2LRpEz/84Q9JSkrCaDSSn59PdXU1K1askGTQ0/j9fqqqqli3bh3XX389CQkJrFmzhr1793LRRRdp+xUUFPDMM89QWVnJt7/9bSZNmkRpaSmbNm2itLSU+Ph4/H4/e/fu5YEHHuC6667jlltuITk5mbKyMlavXk12dnaHRu84nU6+9a1vERkZyeuvv87OnTt54403+OlPf8revXu75JoDy5YtIzQ0lLvvvpusrCzKysr4+c9/TmRkJJdccglDhgxh7NixbNy4kY0bN/LMM88wbdo0brnlFuDk0VXjEdratWtZuHAhdXV1PP/881gsFj788EM2bdpERUUFd955J7t378bpdJKent7uK1WtX7+eiRMnct9995GdnY3FYkGn06EoClu3bmXz5s3MmTOHhx56iPLycl588UWWLl2KqqpceumlAPz973/H7XZz5ZVXMnv2bPbv3899993HxIkTO/097YvKy8tZtWoVVqtVO2gKBAJ88sknjBw5khtvvJGIiAgOHTrEI488QkpKCpdeeilRUVE4HA527NjBvffey3333cekSZPQ6/UcP36crVu3NksGfr+f6upq7rvvPlJTU/nWt75FdnY2FRUVjBw5kp07dzJixIgmFbGqquzatYsBAwYQFxdHaWkpX375JdOnT+fXv/41tbW1LFy4kC+//BKz2awdlDWy2+1s3LiRhx56iLFjxxIaGqoNGfX7/fz2t78lLi6OBx98kAEDBrB9+3Yeeugh5syZA5w8u6iqqqKhoYGsrCztNxEdHU10dLSWkHqrPpkMysrK2Lt3LzqdjuHDhxMZGcnKlSs5evQo5eXl2uX1vv76a0wmExdeeCFTp04lJCQEq9VKUlKSdsTjdrtZsmQJeXl5XHLJJQwePBiDwUBERASpqakd7mSLjY0lJiaGqKgo0tPTiY+PJyYmpslicp3dnj5v3jxtyntje/KUKVMoLi6moqJCaxvW6/VaBazT6Zqtx+NyuTh48CCFhYXcf//9JCUlodPpmDZtGm63m40bNwInE14gENDeT7vdzmuvvcbSpUsJBAJ873vf44orrmjSBJCXl8eUKVPIzc3VmhQaT/GHDx9ORkYGRqORkJAQUlJSGDt2LDt37mT//v3MnTuXmpoa9u/fz8yZM5k6dSoxMTGMGTOGsWPHSv9EGzZs2MDOnTu15smQkBBuvvlmbRiuTqfjxhtvxGg0as02gwYNYsqUKezevZuLL76YqKgoysrKWLVqFRMnTmTGjBlERESgKAoxMTHamWEjj8fDkSNHePjhh4mMjOQ73/kOKSkp6HQ6zGYz2dnZbN++HY/HwwcffMD+/fvJzMzkmmuuYd++fUyYMIGYmBhiY2O54YYbMJlM2nctNzcXu93O7t27myWDsLAwLrroIiZPntzkYKMxMe3evZtHHnmEESNGEB4ejsFgYMyYMdr31Ov14vF4tJn9f/3rX7Xfsslk0pqEe6s+mQxKSko4dOgQAwYMIDQ0lJSUFCIjI6mrq2Pfvn1aMiguLsZisZCYmKhNgW+sMBs1rqCanJxMZGSkVvnr9fpzGm1hMpkwGAwYDAYsFgshISHo9XqMRiOBQKDJJLLOUlVVxfbt2zly5Ah2ux2Px8P+/fuJiorS+hDao66ujtraWo4fP85bb73FokWLgJNHTEePHsXv9+PxeAgNDUWn0+H1evF6vZjNZq29+v3336e2trZZO35jUmypg9/pdHLo0CE2bNhAZWUlHo+H48eP4/P5tEUMG9u0IyIiiI2NRa/XY7FYtLM70bK0tDTGjh1LZmYmDQ0N7Nq1i927d2Oz2ZgxYwY6nY6ysjLWrVtHWVkZTqeThoYGCgoKSEhI0N5bl8tFSUkJ6enphIWFaWfLBoOh2Wdqt9s5fvw4RqORiooKbUJg4wHLkCFD+Pjjj3E6nZSXl7N//36cTiezZ8+mqqqKxMREIiIicLvdFBUV8c0332jfi7KyMux2e4sXcjIajaSmpjZb96ixT8TlcpGYmIjNZsNoNGK1WklISND20+v1Wh+bx+Phoosuwmq1UltbS3FxcbcvZtjZenfpWxAIBCgtLWXfvn34fD6WLFmCoihUVFTgdDrZvXs3F154IdD+6eHtWcDq9KOMRl6vF5/P1yS5NB5lNB75nn779DLpdDoCgYB2u/EL2JLGttHT9298jN/vZ/ny5RQWFmrXN2istH0+X7OLp5x+RN7S+3D68zceCen1ejIyMoiPjwdOnv0YjUYteSQkJDB69GgMBgNLlixp8bktFkuLzW1+v5+CggKWL19OZWWl9r54PJ4mM7Aby3Vm+XvryI7ukpiYyOTJk5k8eTINDQ2Eh4ezcOFCTCYTkyZNwmKxsHjxYsrKyrTvitvt1hL96d/P9i70pigK4eHhTJ48mU8++YTNmzdjsVhISEjAZDKRnp5OVVUVR48eJRAIYDabqa2t5ciRI1gsFu1I/MiRI3z22WeUl5c3+V54vd4WDwDaWveoPd+fxjMQvV5PdXU148ePx2AwsHXrVu1ApDc7azJQFCUNeA1IAFTgOVVV/6EoSjTwDjAQOAJcr6pqtXLy3fsHcCngBL6jqurmril+cx6Ph+LiYg4cOIDVauWJJ54ATlbMJpOJ3bt3ayMiEhMTOXjwIGVlZTQ0NGA2m7WK0mg0YjKZ0Ov12oikxqNqvV6vXTymcQSNwWDAaDTicDgIBAIEAgFqa2txOBzaqIv2aPxBhYSE4HA48Pl8+Hw+GhoaqKqqara/TqfDZDKh0+majKJp/BJ7PB4++ugjxowZwzXXXENOTg6KovDUU09x9OjRZs/X+NoaK9rTj3bCwsIIDw8nLS2Nn/zkJ6Snp2ujMzwej/bDTUxMJDo6mtLSUg4fPkxUVBQGg6HNkT2tJSGv18v27dtZsWIFP/rRj7jkkkvQ6XR8+umnbNq0SXu+yMhILBYLdrudqqoqzGYzHo+H8vJyuY5uO1ksFnJycli0aBEVFRXU1NQQGRnJG2+8wV133cXUqVNJSUmhrq6OF198kcLCQu2xISEhxMfHc+zYMZxOJ3q9HkVRtNFhoaGh2ucbFRVFQkICN998M+Xl5SxatIioqChtVE9CQgJGo5G1a9cSGRnJkCFDKC8vZ/Xq1QwcOJCQkBACgQCHDx/m448/5uc//znTpk3DZDKxZs0ali9fTm1tbbPX11qi0uv12ll/WVkZKSkphIeH09DQQFlZmXaQYzabiYiIIDw8nIMHD5KXl6eNhqqurmbAgAFd8Kl0n/aMT/QBP1NVNRuYCNylKEo28Atgmaqqg4Flp24DzAUGn/p3O/CvTi91GwoKCigsLCQ5OZm3336bJUuW8Nlnn/HMM89wySWXUFBQoI2UmDp1Kg0NDaxcuZJVq1ZRX19PYWEhn376KQcOHABOHg3MnTuXjRs38tVXX7Fv3z5qamo4ePAg7733Hm63G4Dw8HBsNhtr1qzBbrdz6NAhNm3axKFDhzr8GvR6Penp6WzYsIHi4mKKi4vZvXs369ata1aZ6vV6wsPDiYuLY/PmzZSXlzc5Q2g8SmpsU/V6vezYsYOlS5e2eLGbmJgY3G43J06c4Pjx49pzNQ7Dy8zMJC0tjX/+858UFxdr7bMLFy7kjTfe0Iaizp49m8LCQj788EP27duHy+Xi6NGjHb5yV2NSUhRFa4fet28fX3/9NXv27AFO/sijoqIYPnw4+/fvZ8WKFdTU1LBlyxY2btyI0+ns8GfQnzSOIHM4HKxYsYKqqirCw8OJjo7G6/USCAQICwvDZDJRW1vL1q1bWbp0aZOj77i4OKZMmcI333zDsmXLKCkpoaysjB07dvDRRx81iafT6bT5DnfddReqqrJs2TLWrl2r3Z+dnc2XX34JwPjx4xk8eDDvvfcew4cP1/o3Gg8uIiIi0Ol0HDp0iDVr1mh9V+2l1+uJiooiJyeHzz//nN27d1NcXMyWLVvYtGmTdkakKApxcXGMGzeOBQsWcPz4cQ4ePMjGjRs5cuQIF1988Xl8CsF31jMDVVWLgeJTf9cpirIHSAGuBKad2u1VYDnwwKntr6kna6O1iqJEKoqSdOp5uty2bduor68nNze3yRF5ZmYmQ4cOZe3atSxfvpzrr7+erKws7rjjDr755hteeOEFHnroIdLS0vjud7+rtRUaDAZGjBjBn/70Jz788EP+85//4HK5SE9P595779WOnLOzs1FVlX/84x9ceuml5OTkEB8ff04TdywWC7feeiu//e1v+dnPfkZUVBQDBgxg6tSp1NTUNNvfaDTyy1/+kqeffpoXXniBtLQ05syZw3e+8x3Cw8O54447WLJkCT/+8Y/x+/3k5ORw8cUXt3hN5PT0dObOncuKFSu45ZZb8Hq93HDDDdxxxx1YrVYmTZpEamoq77zzDnfccQf19fUMHDiQyZMna6MuAG666SaSkpL48ssvue+++7Db7aSkpHDttdcyceLEds+TsFqtTJ06VbvITF1dHTk5OSQnJ5Odnd1k35/85Cd88MEHfPzxxzz33HNkZmZy8cUX9/q23K709ddfs2LFCq0JJS0tjSuuuIJJkyZpY/nvv/9+3nnnHZ544glsNhtZWVnMmjWLI0eOaM8TFhZGfn4+f/3rX3n11Vf517/+pZ1p3Hbbba3Gt1gsPPDAAzzxxBMsWrSIuLg4RowYwejRozl06BBJSUmMHj2a0NBQHn/8cUaOHElISAgWi4WRI0fyP//zP/z+97/H4XAwePBgUlJSGDduXIu/k7bo9Xp+97vf8c9//pP/+7//IxAIkJaWxsyZM5t8VwcOHMgdd9zB448/zne/+11tkMr8+fOZO3dur26WVDoyIUdRlIHASiAHOKqqauSp7QpQrapqpKIoi4A/qaq66tR9y4AHVFVtNV3n5+erp2fz+vp6PvnkE2677TbefPNNhg8f3u4yVldX09DQgF6v1yr0xtdot9uprq7WLq2nKAput5v6+nrsdrs26ayxI/P0SWeNE8ZcLheBQACTyaSNYW9sh3e73ZSXl+N2u7XHN/YLxMXFUV9fT21trTbZy263YzQaiYqK4sSJE9oEncbJUiUlJbjdbm1WqcFgwO/3azNJG6mqSn19PRUVFXg8HkwmE2FhYcTExGiv226343K5tCN8k8mE1+slPDy8WTNKdXU1DodD2z8yMpLY2Ngmk86qq6upr6/H7/djMpmw2Wza0WNjmRwOBw6HA6fTqe0XFhbWZBkNVVUpLi7GaDRis9la7EB2uVw4HA5qa2u1s5zT+xcSExOBk2dBjU1zjR3XjU1mYWFh57ROvsfj4cEHH8Rms2lDJrua2+3mlltuoa6ujltvvfWchxurqsqJEyfQ6/Utvv7jx49TX1/fpJ/KZDIRHh6O1WrFbDajqiq1tbXU1NTg9Xq1gRNGoxGPx0N8fHyTz9zpdGqduTqdDovF0mRyZuPEQUVRtOaXxt+NoihER0djsViorq6mtraWmJgYbDYbbrebo0ePkpKSorXbe71e6urqqK6uxu/3a/1OjU2XycnJWuVcV1dHXV2d1ucATZuNGic8VlRU0NDQgKqqmEwm7XsWExOD0WjU+uHKy8txOBzafKGwsLCz9hm01afy29/+ltLSUh588EHmzZvX0Y+63fLz89m4cWOLhWj3IZOiKDZgAfC/qqraz3gjVUVROjTNU1GU2znZjER6enpHHtqmqKgo7cM+LRYAERERzT4ws9mM2Wxus12/cShZW5WJXq/HarW22W5os9maTBY7veJLSUlpsq/RaCQtLa3V5zqzfGc+9+nCw8M7dLW4lt7D02OZTKYmoyxa2y8sLOys7fWKorQ48uN0FosFi8VCbGxsm/sZjUZiY2PPul9/oShKs+/V6dq67/TniIyMbNd1mNvzO2npu2g2m5utDNA4dr9RSEhIs7Nso9HYbL/WnO27qCgKRqPxrCuTNvYPnssKpj39rKFd5+qKohg5mQjeVFX1g1ObSxVFSTp1fxJQdmr7ceD0Wiz11LYmVFV9TlXVfFVV8xuHenan/nJxC4nZt/SX91Zidr+zJoNTTUAvAntUVX3stLsWAt8+9fe3gY9P236bctJEoLa7+gs6oqdeUENi9p6YwdBf3luJ2f3a00x0AXArsENRlK2ntv0S+BPwrqIo3wMKgetP3fcfTg4rLeDk0NL/6cwCCyGE6HztGU20CmgtfU1vYX8VuOs8yyWEEKIb9akrnfWXNj+J2bdiBkN/eW/7S8zO0KeSQX9p85OYfStmMPSX97a/xOwMfSoZCCGEODc9fmpm47KxQgRD43IMwRAIBOT7308E6zt2uh6fDDZu3NjisglCdAe/309ZWdk5zV4+X5WVldpFg0TfduLEiQ5dJKsr9Phk8PTTT3foer9CdKbG5TfOnCHbHfbv38+uXbvk+98PeDweJkyYENQydGhtoq5y5tpEfr+fyspK9u7d2yNOn/obv9/PZ599xieffEJ1dTW///3vycjIaLIeUn8TExNDenp6t6xZHwgE2LNnD1VVVb3qwjwej4evvvqKv//970yZMoU5c+YwZsyYYBer14iIiCA9PV1bU6wrdMraRN1Jr9cTHx+vLWQlulcgECAiIgKr1cpHH33Erl27mDZtGqmpqXJx+W6g0+kYMWJEsIvRIbW1tezYsYP9+/eTn5/PjTfeyLRp09p9DWwRfD0yGYjg0ul0jB49GqvVSllZGe+//z5ZWVnMnj2btLS0c7rcp+i7Gq9p8f7777NhwwZ++tOfMnfu3HNazE0EjyQD0SKDwcDQoUN55JFHOH78OI888gher5d58+b1+yYj8V9er5fNmzfz7rvv8u6773LXXXfxwx/+UA4YeiHpmRKt0ul0REdH8+9//5sLLriAxx9/nD//+c/s27cv2EUTPcTChQv5wx/+wNKlS/nf//1ffve73zW55rfoPSQZiFY1zqQMCQnh6aef5qqrrmLjxo385Cc/Yfv27b122r04f6qq8vrrr/Ob3/wGg8HAL37xC+69995Wr2Utej5JBqJNjT/u2NhYfvCDHzB//nzq6+v55S9/yYEDB/B6vcEuouhm9fX1LFiwgL/97W+MGDGCW265hblz5wZlLoboPJIMRLvo9XqGDRvG3LlzmTVrFocPH+bFF1/kyJEjuFyuYBdPdJPKykpWr17Na6+9RlRUFFdeeSVTpkw569XqRM8nHcii3QwGA6NHjyYiIoLi4mLefPNN0tPTmTlzJgMGDJBOwz6upqaGrVu38s4777B9+3YefvhhLr300nZddlL0fJIMRIcYDAYyMzP585//THFxMb///e9xOBxcffXVZGZmymzZPsrr9bJixQr+/e9/s2LFCn77299y8803YzBIFdJXyC9XdJiiKERERPDOO+9w0UUX8eyzz/LQQw+xe/fuYBdNdJFnnnmGhx9+mIMHD/LHP/6RO++8U4YX9zGSDESHnT7K6LHHHuOaa65h7969/PjHP5ZRRn2M3+/nscce4/HHH2fw4ME88MADXHfddTJqqA+SZCDOSWNlkJCQwK233sr8+fNpaGjgt7/9LQcOHJBll/uA2tpa/v3vf/Pyyy8zbdo0br75ZqZMmSKjhvooafAT50Wv15OdnY3f78flcvHee+/x+uuvc+utt5KWlkZISEiwiyg6SFVVSkpKWLduHW+99RaJiYlcffXVjB8/XtYL68MkGYjzZjAYGDlyJFFRURw/fpyXXnqJxMREZsyYQUZGhsxI7UVUVaW6upq1a9fyxhtvsHfvXv72t78xbdo0bDZbsIsnupA0E4lOodfrSU9P5x//+Ad5eXn84Q9/4O233+bIkSOyDHkvoKoqqqri8/lYsGAB//znP9myZQv/93//x5VXXimJoB+QZCA6jaIo2Gw23nnnHWbMmMGbb77Jgw8+yK5du4JdNNFOjzzyCI899hhGo5Gnn36a66+/XjqK+wlJBqLTnD7K6A9/+APXX389RUVF3H333ezYsUPOEHqoxqu5/b//9/947bXXmD59Ovfddx9Tp05Fp9NJMugnJBmITqUoCjqdjuTkZK677jptlNHDDz9MQUEBbrc72EUUp1FVlbKyMp5//nkWLFjAZZddxvz588nLy8NqtQa7eKIbSQey6BJ6vZ4RI0agKAp2u5333nuPd955h+uuu44BAwbIKKMeQFVVioqKWLVqFe+++y6pqalce+21jB49mqioqGAXT3QzSQaiyxgMBkaMGMGPfvQjjh07xr/+9S8iIyOZNWuWjDIKMlVVqaqq4quvvuLf//43JSUlPP3000yaNEkubdpPSTOR6FJ6vZ7k5GSeeeYZxo4dy6OPPsrLL7/M4cOHZaZyEDSOGvJ6vTz//PM8/fTTFBcX869//YtLLrlEEkE/JslAdDlFUbBarbz11lvMnTuXjz/+mJ/+9Kds37492EXrl3w+Hz/84Q959tlnyczM5F//+hfTpk0LdrFEkEkyEF2ucTRKaGgov/rVr7jxxhuprKzkxz/+MTt37sTv9we5hP1DIBCgrq6Oe++9l6VLl3LTTTdx9913M3r0aBk1JCQZiO7ROMooLS2Nq666ivnz5+N0OvnTn/5EQUGBXCCniwUCAYqKinj++ef54osvuOaaa5g3bx7Z2dnSmS8A6UAW3axxlJHRaKS6upp3332XDz74gKuuuoqBAwdKxdQFAoEAR44cYfny5SxYsICsrCxuuukmsrOzZWax0EgyEN3OYDAwdOhQ7r33XgoLC3nyySexWq3Mnj2bzMxMjEZjsIvYJzR2FldUVPDpp5/y7rvv4nQ6eeqppxg5cqRcmEY0Ic1EIij0ej3x8fG8+OKLjBs3jr/+9a/861//oqCgQEYZdYLG97ChoYFHHnmEZ555Bp1Ox4svvkheXp4kAtGMJAMRVCEhIbz22mtcccUVfPnll/z4xz9m27ZtkhA6QUNDA7fddhuffPIJl1xyCU8++SR5eXnSUSxaJMlABE1jpRQWFsZPf/pTbrrpJurr67nnnnvYs2cPPp8vyCXsnfx+P2VlZdxzzz1s2bKFu+66i+9///sMGTJErlEtWiXfDBFUjaOMBgwYoK2LY7fb+dvf/kZBQQENDQ3BLmKv4vV62b9/P8899xyrVq3ipptuYvbs2QwePFgmlIk2ScOh6BEal66wWCyUlZXx/vvvM2zYMC677DIyMjJklFE7+Hw+Dhw4wNKlS/nwww8ZMmQIt956KwMHDpREIM5KkoHoMQwGA1lZWTz44IMUFhby+OOPoygK8+bNIysrSzo9W9E4aqi0tJR3332XhQsXYjabefTRRxk8eDB6vT7YRRS9gDQTiR5Fr9cTHR3Na6+9xqRJk3jyySd57LHH2L9/v3Qqt6DxPbHb7dx///289dZbDBgwgFdeeYVhw4ZJIhDtJslA9EgWi4UXXniBa665hvXr13PnnXfKKKMWBAIBqqurufbaa9mwYQO33norjz32GIMHDw520UQvI8lA9DiKoqAoChEREfzwhz/k5ptvxuv1cu+997Jnzx68Xm+wi9gjeDwejhw5wt13301hYSH33nsv1113HSkpKTJqSHSYfGNEj6XT6cjIyGDOnDlcddVVVFZW8tRTT8koI8DlcrFr1y5efvllNm/ezM0338yMGTMYOHCgXCdCnBPpkRM9msFg0NbQOXHiBB9++CGZmZnMmTOHQYMG9ctRRh6Ph3379rFkyRIWLVpETk4O3/ve90hKSpKlPMQ5k2QgejyDwcDAgQN5+OGHOXbsGI899hhut5v58+czePDgfjPKqHHU0IkTJ3jppZf46quvSElJ4YknniApKUlmFovzIs1EolfQ6XSEh4fz5ptvMnXqVF544QX+8Ic/sHfv3mAXrVuVlZVxxx138Pnnn3PBBRfw8ssvSyIQnUKSgehVLBYLTz75JDfccAN79uzhjjvuYOvWrS2OMlJVlbVr11JcXNzjr5fQ0NDA0aNHW01uXq+XwsJCLr/8co4dO8bdd9/Nr3/9a+Lj4yURiE4hyUD0Go2jjKKiovjOd77DzTffTCAQ4P7772fv3r14PB5tX4/Hw7Zt23jggQd4/fXXKSgoCGLJz2716tU89dRT/PGPf+TAgQNNkpvT6WTXrl3cd999OBwO7r//fubOnUt8fLyMGhKdpn80too+Ra/Xk5mZyezZs/F6vbz++us8++yz/OAHPyAjIwNVVTl48CD/+te/WL9+PYFAgEGDBjF06NAe2cHq8XhYsWIFS5Ysobq6moSEBH76058SFxeHy+Vi69atfPzxx+zZs4dbbrmF6dOnS2ex6HSSDESvZDAYGD58OOHh4Rw7dowPPviAtLQ0pk2bhqqqfPbZZ7zyyit4PB42bdrEzp07mTx5MsnJycEuejNFRUWsW7eO/fv34/V6eeGFF8jNzeWSSy6hsLCQTz/9lCVLljB27Fh++MMfEhERITOLRaeTZCB6Lb1eT2pqKo8++ijHjx/nscceY9euXSiKwptvvqktge3xeNi4cSPDhw/n+uuv7zFt7I1NQR9++CFHjhzB4/Ggqip2u50f/ehHPProo3z66accOnSIUaNG8fe//52oqKgeU37Rtyg9YXp/fn6+unHjxmAXQ/RCjd9fp9PJLbfcwurVq6mqqiIQCBAIBLT9rFYrl112Ga+++mqPmZugqioNDQ2MGjWKwsLCJjOr9Xo9ZrOZpKQkbrrpJu6//37CwsIkEYjzkp+fz8aNG1v8Eknvk+jVFEVBVVV27tzJvn37qKmpaZYI4OSM3YKCAj788MMglbQ5j8fDq6++SkVFRbML+fj9fjweD+Xl5Zw4cYKKigpJBKJLSTIQvZrb7Wbnzp388pe/pKioCJ/P1ywRwMkF3QoLC3n77bfxer1BX/AuEAhQX1/Pa6+9Rn19fYvl8fv9NDQ0sGLFCv74xz9y7NixoJdb9F2SDESv5XA42Lt3Ly+++CKrV6+mvr6+xUTQyG63s2PHDnbv3t3mft3B4XCwbds2du3a1erlPVVVxefzcezYMZYuXcprr71GdXU1fr+/m0sr+gNJBqLXqqysZO3atTz33HN4vd6zVvA+n4/KykoWLVoU1LMDVVUpLy/nk08+afWs4PR9fT4fJSUlPPnkk9qIIyE6myQD0WvV1NRw5MgRQkNDMRqN7WpTdzqdPPfcczidzqAlA6/Xy5EjR3j99dfbdYbSeJ3ohoYGli1bRn19fTeUUvQ3kgxErzVy5Eh+//vfU1BQwJ/+9Cftou9tJQW/309RUREfffQRlZWV3Vja/9q6dSuLFi2iqqqqzf30ej0mk4nc3FweeOABdu7cyYMPPkh0dHQ3lVT0JzLPQPRajUfM4eHhfPe73+Wqq65i7dq1LF68mIULF1JTU9Pq0f9TTz1FXl4esbGx3TpKx+fzsWbNGj744INWy6bX67Fardx4443MmzePnJwcYmJiCA0N1ZbkEKKzSTIQvVpj5RgWFobVamXatGlkZmYyc+ZMVq9ezQcffEBFRQV+v1+rfAOBAAcOHGDLli3Ex8eTkpLSbeXdtGkTW7dupbS0VCtPY+XeOK/gkksuYcaMGQwdOpT09HQiIiL6zTLdInjkGyb6DL1eT0JCAnFxcQwfPpyMjAwiIiLYvn07+/fv5/jx4zidThRFob6+npUrV5KRkdEtyaDxWgTLly9n+/btTRbVCwsLY/DgwWRlZZGdnc2UKVOYPHkyJpNJzgJEt5Fk0I/4fD48Hk+/GY0yYsQIhg0bxqpVq/jPf/7DmjVrKCoqor6+Ho/Hw1dffUV2dja5ubndsuib3W5n2bJl7Nu3Dzh5JmCz2cjKyuKKK67goosuYvjw4ej1elwuV49fdvt8KYpCSEgIBoNBkl4PIMmgH6murmbXrl2cOHEi2EXpdlOmTCE3N5edO3fy6aefUlRUxNGjR/nkk09wu91kZmZ2eRm+/vprtm/fTn19PRaLhfT0dObNm8fIkSMxGo0UFhZSWFjY5eXoKXQ6nbZ4oDSDBZ+sTdSPrFq1ij/96U8sXrw42EUJmsbmmtN11zUBWordnzuETSYTr7zyCpdffjlWqzXYxekX2lqbSNJxP2M0Gpk8eTL33XefHI0BpaWl1NbWkpiYSHh4eJfE8Pv9lJeX43Q6SU5OxmKxdEmc3uTAgQM88MADwS6GOI3UBv2MTqcjNDSU9PR0uTgKEBcXh9vt7tDEtY5SVZWEhAR8Ph+hoaGShDm5HIfoWeRbKTqdqqotVqqtbQ9mTKvVes5NFO2NqSgKoaGh5xTjXGN2pmDEFN1PZiD3Y13VX9RaBdG43LTElJii55Ezg36su47qTu84bSvm6ft1ZqduMI5eG2Oe+dq7siyd8dyNayW1t6xyZtB3SDIQXc5ut/PSSy9RV1fHD37wA5KSklrcb82aNaxevZpAIMD999/fJyqao0ePsmjRIvbv38+jjz7a4zuPX3zxRcrLyxk1ahSXXXZZsIsjupE0E4lO11LTgc/nO+s6/I3LNbe2vn9HY7a1vTO0J6aqqvj9/nN6Teca83wEAgH8fn+TzyoY763ofnJmIDrE7/dz/Phx3nrrLSIiIrj11lux2WxN9glms0xb2wsKCliwYIE2A9tisRATE0NGRgbjx48nJCSkQ2VvT8zO1t0xGxP0ihUrOHjwIHa7nZCQEHJzc5kyZUqfOHsTJ0kyEB3icDg4ePAgixYtwmKxMHfuXCwWS5PhksEYZdKemMXFxXz00UcMHDiQ+Ph4jEYjJ06coLi4mNDQUPLz8zs9ZmcLRsx9+/bx1VdfYbfbtb6EoqIiMjIySEpKkiHKfYQkA9EhZWVlHDp0iKSkJIqKijh27BhRUVFEREQAJyurQCCAw+HQjsCdTmezi7g0HnE27qfX65tdcMbtdtPQ0NCkycJisdDQ0IBer8disWA2m/F4PE0u+KLX6zEYDDQ0NGAymbDZbOj1eu3xt912GzNnzsTpdPLZZ5+xYsUKPvjgAy0ZBAIB3G43brdbew1ms7nZOjqKouDxeHC5XNqV1vR6PTabrdU5C6qq4nK5cLvd6HQ6wsLCqKqqIiQkBJPJ1GwOgtvtxuPxoKoqYWFhBAIBnE4nHo+HQCCAoijaGkeNZWp8DY2Vd2PTT+NnEB4ejtls1l6ny+XC5/NhMBjw+XzaZ9D4WS5YsIBjx45xzTXXMGbMGI4ePcr/+3//jxUrVnDZZZfJ9RX6CEkGot1UVaWoqIgDBw4wadIkbRno5ORkLRnAySPwJ554gm3btuH3+0lLSyMuLq5J56nL5aKwsJC//OUv7N69m9TUVOLi4ggPD9dmAq9fv5533nmHo0ePotPp0Ol0zJs3j3feeYcBAwZw6aWXaktVP/nkk+h0OlRVJSUlhWHDhvHee+9x4YUX8uMf/7jFTuvG1UILCwvZsGGDtr26uppvvvmGxYsXs337dvR6PTNmzOC6665j6NChTYZU7ty5k//85z9s2LCByspKUlNT+dnPfsaIESOazV9QVRWv18uSJUtYtmwZsbGx/PrXv+aOO+7gyiuv5KKLLiI9Pb3JY9auXcv69etxuVz8+te/prS0lFdeeYUNGzZQXl6OzWZj+vTp3HPPPU2O0B0OB7/61a+IiIigoqKCY8eOUVpaisFg4Pe//z3Tpk3TFs77+OOPOXToEEOGDCEuLo6QkBDteQKBAMuWLeO2225j1KhRpKWlER4ezowZM/jyyy+ZOnVqh5KBzE3ouaQDWbSbz+fj6NGjHDx4kEmTJjFx4kQ2bdpEWVmZto/f7+d3v/sdAL/4xS949tlnueqqq1i+fHmTs4N9+/bxl7/8hYSEBN5//30eeOABoqKi2LJlS5OYDQ0NTJs2jd/85jdYrVZ27NjBP/7xD6xWK9u2bcNutwMnK7/f/e53TJ8+HbfbTVVVFf/+97/5+uuvOXHiBG63u9nrqampYdeuXRw4cIARI0Zo29esWYPD4eDmm2/m008/5YUXXuDgwYN8+umnbN68Wdtvz549PPTQQ/h8Pu677z4WLFjAb37zG7Zv347T6WwWz+l08te//pVVq1YxduxYHnzwQfR6PTk5OVRWVlJcXNzsMUePHsXj8TB8+HACgQCffPIJubm5/PnPf2bBggX87ne/45tvvmHBggXU1NQ0eayqqnz11VeMGzeOv/71r3z11VesWLGC6dOnYzQaee211/j6668ZP348H3/8MT/4wQ/Yvn279nn6/X4qKyux2+2kpqZqSdpgMJCZmcmRI0dafF/bIomg55IzA9Fuhw4d4vjx45jNZoYNG0ZoaChvvvkmJ06cwG63ExoaqlWwv/nNbxgxYgSRkZEYjUZyc3O1uQMNDQ2UlpZSWFjIj370I+Li4oiKiuLQoUNUV1c3iRkZGUlsbCyxsbGkpqaSkJBAQkICkZGRAFoyiI+PJyYmhsTERKqqqkhLSyM+Ph6bzYbD4dAqLafTyRNPPMErr7xCIBBAVVUGDhzItddeq8W84IILCAQCmM1mrFYrISEhjBs3jsrKSm3F18Yj5gEDBnDBBReQl5eHxWIhIiKC+Pj4JmdKqqricDj485//jNfrZfLkyUybNg2TyQTAkCFD2L9/P+Xl5Zw4cYKXX36ZAQMGMH/+fIqLi/F4PGRmZqLT6Zg/fz4mk4mQkBD0ej06nY4JEyawZ88eLrroIqKiorS4iqIwYcIEJk2aRGZmJiaTSfsMamtrKSgoICoqitmzZxMTE0NeXh4jR47U9lFVlYaGBuDkTO1ly5ZRXl5ObGwsNpsNl8vVrms4i95BkoFot4KCAurq6khOTiY8PJz09HRCQkI4fvw4J06cYNCgQdTW1uJ0OklJSSEiIgKTyURYWBiJiYna8zQ0NGC32/F4PGRkZGA0GjGZTMTExDRrcjCZTJhMJoxGIyEhIdoaQkajEb/fj8fjQVEUrRO7sW3farViMBgwmUxNhrUajUZGjhzJkCFD8Pl82gJyq1evZujQoeh0OlwuFwUFBRw6dIja2lq8Xi+7du3CZDIxdOhQrWyFhYXEx8drSQdOHjWfOZfA7/dz7NgxjEYjDQ0NeDwe7RKWAJmZmWzcuJHy8nLq6+vZt28fFRUVXHLJJVRXV2OxWEhNTQVOngFt2bKFiooKGhoaaGhoYPfu3URGRrY4dDctLY2oqKhmZbLb7dTX15OSkqItIW2z2UhISKCurk7br7GvxefzkZqaSmJiImazmZKSEvR6vRzp9yGSDES7BAIBCgoKKCkpITw8nNWrVwMnjx4PHz5MYWEhgwYN0iqkMyuK0ztGA4GAdkR5+nadTqdVPqdva3yexn4DaNpRevp+jddFPn2/xjMAOJkMpk2bxsyZM7VKfsWKFSxatIjrr78eq9XKpk2btOaSxjkC5eXlREVFNalwvV4vBoPhrLOlG/sKxowZw4YNGzh27Bj79+9n5MiRAAwYMACPx0NJSQkVFRWEhoZSVVVFUVERbrebmJgYoqKi8Pl8LF26lEOHDuFyufD7/bjdbiorK7FarS2O+z+98/x0jZcBPfM9P/316HQ6LanW1dWRk5NDfHw8DoeDvXv3EhYW1uJzi95JkoFoF7fbzcGDB9m3bx96vV5r228cfVJYWIiiKFqzUEVFBampqej1ejweDxUVFcTExAAnmxzCwsLQ6XSUlJQQGhqKqqrU1dVRU1NDQkJCu8p0vpOeLBYLgwYNori4mPfee4+Kigri4+NZtGgRBoOBa6+9lgkTJmAwGHjxxRcpKCho8vikpCQqKiqw2+3aiChVVfF4PJjNZq1SNRgMZGdn8z//8z+EhYWxZcsWFi5cSFZWFlarlYSEBEJCQjh69Cg7d+5k1KhRHDhwgM2bN2M0GklISECn0+FwOHj++ee56667mDJlCqmpqdTX1/PSSy9x6NChFl9ja0fukZGR2sisxmTS2EfQmPB0Oh0RERFERkZqTYExMTG43W4KCwtJT0/XmrpE7ycdyKJdtm3bRllZGZdddhmffPIJK1euZOXKlTz11FPExsZy+PBhamtriY6OJjc3l8WLF7Nr1y6Ki4vZtGkT69at047irVYrycnJZGRk8Nprr1FTU8Pu3bvZsGEDe/bs6ZbX09gevnv3btavX4/FYiE+Pl47EzCZTISHh6MoCjt27ODzzz9v0sGr0+m47LLL2L17N8uWLWPLli3U1dVx+PBh3nnnnSZ9H4qiaENSr7/+ekaOHMmBAwd48803tX0GDhxIdXU1q1atYuLEiUyaNIn//Oc/6HQ6UlNTtbMLv99PeHg4ISEh2O12tmzZwpIlS846u/tMkZGRDBkyhKqqKj755BMcDgebN29m3bp1VFVVaeXW6XTMnDmTFStWsHXrVo4dO8auXbtYunQpM2bM6LJrQIjuJ2cGol1Wr15NTEwMWVlZTTpHc3NzSUtLo6Kigg0bNjBnzhx+85vf8Oyzz/Loo4+iqirJycnMmDGjSXPK4MGD+elPf8oTTzzBddddR3JyMjExMR2e+NVRDoeD3/72tzz88MNYLBaioqIYMmQIv//977Vlpm+88Ua+/PJLfvnLX+J2u8nJyWHMmDFaJdkoOzubX/3qVyxbtow///nP1NbWkpSUxD333NNkeObpTCYTs2fPxmq18swzzzBmzBhGjRrFoEGDOHDgAA6Hg+zsbOLj4/nXv/5FYmIiqamp6HQ6IiMj+dnPfsa7777L008/jc1mY9CgQcyYMeOcLpf5rW99i6+++orFixezYMECsrKyGDNmTJOjfZ1Ox5133skzzzzDO++8w9NPP01YWBhXXXUV8+bN6/FrLYn2k8te9iOrVq3i8ccfp76+nj/+8Y/tmjnaOC68qKgIl8tFREQEcXFxTe4/evQoXq+XqKgooqOj8Xg8nDhxgvr6elRVxWw2YzKZUBSFpKQkTCaTNuHp2LFjOJ1ObR+9Xo9eryc1NRW73U51dTWhoaGEh4dTWlqKyWQiLi6OEydOoKoq0dHReL1eysvLGThwIHV1dTQ0NGA2m4mNjWXPnj0kJSVhs9lwOp0cOnRIO0PR6/UYjUasVqs2QkZRFGpra6mursZut+P3+7HZbFgsFrxeLzabjfj4eO3119XVUV1dTV1dnXZGkZaWpo32cblcVFZW0tDQwKBBg9DpdHi9Xmprazl+/DgZGRnYbDYtpt/vJysrC6/Xy969e4mNjSUqKoqQkBBUVaWiooLy8nI8Hg96vZ6QkBAsFgsul4u0tDTMZjNwsvnuyJEjhIaGEhMTo1Xwp4/z93q91NTUUFlZicvlIiQkBLPZjKIo2Gw2YmJitFVXG5uJPB4PBoOB6OhokpKS2tWB3NLcgl27dnHbbbfx2muvyWUvu5Fc9lKcs8YfceNolpbuHzBgQJNtZrOZjIyMNp9Xp9MREhLCkCFDWt0nIiKiyVnI6ROy0tLSmuzbOKSysV+iUXZ2dpPny8vLa7NcLcVtS1hYGGFhYa3eb7FYSElJabLNaDRqw2VPL//pw0JNJpPWwdxIURTi4uKaJOPWGAwGsrKymm0/vVI2Go1nfb7GTvnWPv/2kBFHvYP0GQghhJBk0J+dudSyxJSYvSGm6BqSDPqxM6/TKzElZm+IKbqGJAMhhBCSDIQQQkgyEEIIgSQDIYQQSDIQQgiBJAMhhBDIDOR+x+VysXz5cmbPnh3sooh+zOfzBbsI4gySDPqRlJQUrr76aoYPHx7sovRajVcIW7lyJffcc89Zr2UgWmcwGBg2bFiTa1qI4JFPoR+Ji4tj2rRp7VqfR7SsqKiI5cuXs2rVKm666Sa5uMt5UBSF9PR0SQY9hHwK/YjNZtMuzyjOTUREBAUFBSiKwujRo9u18qsQvYGc4wohhJBkIIQQQpKBEEIIJBkIIYRAkoEQQggkGQghhECSgRBCCCQZCCGEQJKBEEIIJBkIIYRAkoEQQggkGQghhECSgRBCCCQZCCGEQJKBEEII2pEMFEWxKIqyXlGUbYqi7FIU5aFT2zMURVmnKEqBoijvKIpiOrXdfOp2wan7B3bxaxBCCHGe2nNm4AYuUVV1FDAamKMoykTgUeBxVVWzgGrge6f2/x5QfWr746f2E0II0YOdNRmoJzlO3TSe+qcClwDvn9r+KnDVqb+vPHWbU/dPVxRF6awCCyGE6Hzt6jNQFEWvKMpWoAxYChwEalRV9Z3apQhIOfV3CnAM4NT9tUBMC895u6IoGxVF2VheXn5eL0IIIcT5aVcyUFXVr6rqaCAVGA8MO9/Aqqo+p6pqvqqq+XFxcef7dEIIIc5Dh0YTqapaA3wFTAIiFUUxnLorFTh+6u/jQBrAqfsjgMrOKKwQQoiu0Z7RRHGKokSe+jsEmAns4WRSuPbUbt8GPj7198JTtzl1/5eqqqqdWGYhhBCdzHD2XUgCXlUURc/J5PGuqqqLFEXZDbytKMrvgS3Ai6f2fxF4XVGUAqAKuLELyi2EEKITnTUZqKq6HchrYfshTvYfnLndBVzXKaUTQgjRLWQGshBCCEkGQgghJBkIIYRAkoEQQggkGQghhECSgRBCCCQZCCGEQJKBEEIIJBkIIYRAkoEQQggkGQghhECSgRBCCCQZCCGEoH1LWAvR7zidToqKivjiiy+abC8vL2f79u34/X6eeeYZ9Hq9dp+iKISHh3PjjTc22S5EbyDJQIgWGI1G6uvref3113E6neh0OhRFwe12U1dXh06n46WXXkJRFAACgQAmk4lLLrmE66+/XpKB6HUkGQjRAr1ej9FoxOfzsXv37mb3q6rKzp07tduBQICIiAiuu+46LUEI0ZtIn4EQLdDpdERGRjJt2jQURcHn8zX55/f7m9xWVZXQ0FAuuugidDr5WYneR761QrQiMjKSuXPn4vP5zrqv2WwmMTGRsWPHypmB6JUkGQjRitDQUCZMmEBSUlKbR/t6vZ7MzEyuvPJKOSsQvZZ8c4Vog9Fo5KqrriIsLKzVI36/309iYiIzZsxAURQ5MxC9kiQDIVqhKAo6nY4ZM2ZgMplQVbXF/Ww2G6mpqQwdOrSbSyhE55FkIEQbdDodY8eOJTIyEoOh+eA7RVFIS0tj8ODBREVFBaGEQnQOSQZCtEGn05Genk5WVhZhYWHN7tfr9YwcOZJRo0YFoXRCdB5JBkK0w5w5c0hLS2vWQezz+cjNzSUvLy9IJROic0gyEKIdLr/8cpKTkwkEAk22DxkyhOHDh5OcnBykkgnROSQZCNEOKSkpDBkyhMTERG2bwWBgxowZDBgwQEYQiV5PkoEQ7WAymcjJyWHo0KFaU1EgEGDSpElyViD6BEkGQrRTbm4uw4YN025HRUUxbNgwGUUk+gRJBkK007Bhwxg6dChGoxFFURg9ejQJCQlYLJZgF02I8yarlvYBqqq2OiFKdJ7w8HAGDRpEbm4uW7Zs4eqrr8ZmszXrVBadT2Z2dz2lJ1Qi+fn56saNG4NdjF7L7Xbzve99j8WLF9PQ0BDs4vRpfr8fr9eLqqoYjUb0er1UUl1s4sSJ3H///cydOzfYRen18vPz2bhxY4tfWDkz6CPcbjeJiYnk5uaSlJQU7OL0WX6/n5qaGtatW8fcuXMlEXSxpUuX4na78fv9wS5KnyfJoA+JjY0lPz+/SSen6FyBQACn08nAgQMZN26cJIMutnPnTpxOZ7CL0S9IMuhDQkJCSEpKYuDAgcEuSp/m9/tJTU0lIiJCkkEXCwsLk2TQTWQ0kWhTMPqUenpMvV5PZGTkeSeCnv46e3NM0XGSDESb2lPhdfaPXWJKTNH9JBmI8xaMphKJKTFF55I+gz5OVdVu/6FJzI775ptv2L9/P0ajkVtuuaVbYrbXmTEdDgcLFy6koaGBK6+8ktjY2C6PKbqeJIM+rr8c5fX2mFVVVRw7dgyz2dxtMdvrzJh+v5/jx4/jcDjwer3dElN0PUkGfdzZjrDsdjsVFRVUVVWRmppKfHw8Op2OqqoqysrKqKurY8iQIYSFhbX7Yu/tOaqrr6+nuLgYgEGDBp33heQ780hy//791NTUaO3YBoOBsLAwEhISsNls6PX6To/ZXj01pqqqeL1ejh07RnV1NYqiYLPZiIqKIj4+vktiis4lyaCPO9sPau/evSxatIilS5dy++23861vfQuj0cjGjRt577332L59O3//+9/Jz89vd4Xdnh/x0aNHeeWVVwgEAjzyyCPnvb5PZ1YcTz31FGvXrkVRFAwGA+Hh4QwZMoT58+czevRoIiMjOz1me/XUmD6fj/Lycp599lm2bNmCoigMHjyYKVOmcPXVV2MymTo9puhc0oEssFgsREdHs2zZMm25hePHj3PkyBGsVmuwixcUkydP5rHHHmPVqlW8/vrrGAwGPvzwQ7744otgF61HKiws5IUXXmDfvn0888wzPPHEE6SkpPDhhx/y+eefy2ihXkDODARms5n4+Hg8Hg+lpaWUlJSgqirDhg1j79692n5Op5NvvvmGTz/9lL1792KxWJgzZw6zZ89mwIAB6HQ6VFWloaGBd955h/Xr13P8+HH0ej35+fncfffdTZpZ2qO+vp61a9eyePFidu/ejclk4oILLuDyyy8nKysLg8GAqqo4HA7tqLS6uprIyEjmzJnDTTfdhMFgQFEU7ej1ySefZOvWrRgMBkaMGMFFF13ErFmzWl0MLTo6mnHjxrFixQrKysqAk80YpaWlvPXWW2zZsoWKigpCQ0OZNm0at99+u7ayKYDX62Xz5s28/vrrFBYWYjabyc7O5oYbbiA7O7tZTFVV8Xg8PProo3g8HiZMmMDIkSP52c9+xs9//nNGjhzZ5ExKVVXef/99du7cyYwZMxg3bhwnTpzglVdeYdOmTaiqyuDBg5k6dSrz589v8jqXL1/OkiVLsNlsFBcXc+DAARoaGhg7diw33XQT48ePp6SkhGeffZadO3fi9/tJTk4mLCxMO9pXVZWysjLWrl3LbbfdRmpqKgaDgUmTJuF0Olm6dCmXXXZZuz9zERySDASKomAymRg5ciRbtmzh8OHDREZGkpOT0yQZbNq0ic2bN2MwGPj5z3+OoigcOHCAiooKoqKiiIyMJBAI8MYbb7B3717Gjh3LNddcg8fjYdmyZbz//vtcddVVxMTEtLtsy5YtY/PmzZhMJn72s5/h8Xj46KOP+PLLL3G73YwaNQpVVfnss88oLCzkoosuIisrC5/PR0FBASUlJSQmJmI0GikrK+PLL7+kpKSEe+65B5PJRHl5ObW1tZSVlZGQkNBiGWpra9mzZw8mk0kru6qqfP311yQnJ5OTk4PJZKKsrIxFixYxZMgQJk2apM2ePXjwIH//+9+ZMmUKl19+OQaDgYaGBvbu3Ut2dnaTWIFAgLq6Op5++mm8Xi8XXXQRI0eOxO/3Ex8fz/79+xk2bFizZLBv3z7Cw8Ox2WxUVVWxYcMGsrOzmTVrFvX19WzdupW1a9cSFxfHlClTtMd6vV7Ky8vZvn07N998M9dddx0mk4moqCiSkpIIBAI8//zzOJ1OrrzySuLj4zlx4gRvvfUWEydOBMDlclFbW0t9fT2DBg3CYDCg1+uJjo4mMjKSXbt2tfvzFsEjyUAAYDQaGTduHJ999hnFxcVMmTKFlJSUJvtUVVVht9uxWCyMGTMGo9FIWFgYcXFxGI1G7Yj2iy++YPLkyeTn55OVlYXL5aKsrIzly5czffr0dicDVVXZtm0bdrudiRMnMnHiRDweD5s3b6awsJDExEQtGZSWluJ2u0lPT2fs2LEEAgHCwsIICQnRjoLdbjdlZWX4/X5GjRqFzWajrKwMu93ebBTP0aNHWbJkCbt376a+vp4jR46Qn5/P0KFDtX3S0tKIjIzUrmlQUlLC6tWr2bFjB6NHjyYsLIza2lq2bt2K2+1m0qRJWmd5bW0tpaWlTWI2LoL36aefcvToUS699FJGjhxJfHw8VVVVZGVlcfjwYbxeL9u3b6esrIzY2FhGjhzJ4cOHufjii4mMjCQkJISBAwcSHR3NgAEDcLlcOJ1Oqqqq2LFjR5NkACc7yGNiYpg4cSKpqamYTCZ0Oh2BQACHw8Hq1au59tprmThxIvHx8Rw+fJiPP/5Ye7zH48HlchEIBIiIiOCrr74iNDQUl8tFSEgIdXV17fq8RXBJMhDAyQohOzubZ555Bo/Hg9VqJS4ursk+UVFRxMTEYLfbOXLkiLYWUlRUFCEhIdpR7f79+5k7d652VOzz+YiMjGT//v00NDS0e/3/xr6L6OhoxowZQ2hoKKGhoeTl5bF48WJtNJKiKCQkJBAREUFtbS1Hjx7FYrEwcOBAoqKitI5vs9lMQkICYWFhHDt2DJvNRkhIiLbO0OkqKyvZtm0bhw4dwu/3YzKZiI6OJjw8XBvpkp2dTUVFBcXFxfh8PhwOBxERERQVFeHxeID/nlXk5OSQnZ2tJZ2wsDBSU1ObxHS5XBw9elRrcho/fjyxsbHamVtWVhYfffQRbrebDRs2sGXLFrKyshg+fDhFRUUkJiYSGRmJzWZjyJAhlJeXs2/fPgKBAC6XC4PBQFFRUbP3OSQkhKysLDIzM5s0Wfn9fmpraykpKWHUqFGkpKRgtVpJT08nKytL2zcQCGifqcFgYPfu3URFRWkj0GTF0d5BkoEAQKfTERUVRVxcHHFxcaSlpTVry54wYQJWq5WVK1fy5JNP4nK5GDFiBPPmzdOOmCsrK1FVlaeffrrF0Ucej6fdlYPD4cDj8WAymbDZbNr2iIgIfD6fdu0GRVG47LLL8Hq9bNmyhcWLF6MoCiNGjOD222/HZrNpCWPu3Lk4HA6ef/55GhoaSEhIYMyYMVx11VWEhIRoMfLy8rjuuuuYPHkydrudd955hy+//JLKykp++MMfotfrWbt2LR999BGFhYXY7Xbcbjc+n49x48ZplaPH46G2tpbhw4efdYRMTU0NZWVlTJo0ifXr13PixAktYZnNZoYNG8ahQ4eora3F7XZTXV1NQUEBlZWVOBwOkpKSCA8Pp7KykjVr1vDee+9x4sQJGhoa8Hq9hIaGMnbs2GZxDQZDixPHAoEAdrsdVVWb9BHodDoiIyO1OQZGo1G7r76+nrvuugu9Xs+uXbsoLCxs8r6KnkuSgdAoisJjjz2m3d66dWuT+00mE2PHjtUqlJqaGu677z6WLFmCy+Vi7NixJCYmoigKzz33HMOHDz/rJKq2REZGYjabcblc1NTUkJiYCJxMOEajsUmCsFgs3Hjjjdx44414vV4KCwt54IEHSExM5NJLLyU6OlpLeLfffjtwMtm8//77rFixApPJxDXXXNNiOcLDw7niiivYunUrhYWFHD9+nJiYGH71q19x5513cu+995Keno7L5eLVV19lz5492mPNZjOxsbEcO3bsrGdESUlJpKenc//99/PII4/w5JNPcvvttzNu3DiMRiMZGRkEAgGWLVuGyWQiJyeHyspKvvzyS4YNG4bNZsPv97N9+3b++te/8rOf/YxLLrmE0NBQNmzYwPLly5s1TUHrVxHT6/XamUl1dTUul0uLUVFRQXh4OAChoaHa2eGhQ4fIzs5Gr9dTXV1NVVUVAwYMOMsnLXoCGVoqmmmtcvj8889ZsGAB69evp76+noaGBjweD4qioNPp0Ol02Gw2pk2bxrvvvss333xDZWUlNTU1rF69mscff5yysrJ2jyFXFIWcnBycTicLFy6kurqaoqIiFi1aREREBIMHDwZO9i28/PLLfP755xQWFjYp2+lXIjt27BivvvoqX331FXV1dTidTlwuFz6fr80RTk6nk6+++orKykrCwsKIjo7G4/Hg8/kIDw/X2sW3bt3KkiVLmlT6CQkJTJ06lRUrVvDFF19QUlKiddi+//77TYZc6nQ69Ho9RqORn/zkJ/j9fj7//HPWrFmjvR8jRoxg6dKlwMkztSFDhvD222+TnZ1NSEiINjQ4EAgQHR2NwWDg0KFDfPPNN6xbt65d73sjvV5PVFQUOTk5Wv9JcXExmzdvZv369U1eZ1xcHBMmTODdd9/l+PHjHDx4kPXr13Pw4EGmT5/eobgiOOTMQDTRVkU9aNAgtm/fztKlS3n77bcBiI+PZ+zYsQwaNEibpPXtb3+bZcuWsWzZMhYvXgycHJ7Z2MzUyOfzsWXLFn760582aVIym8386Ec/YuDAgUyfPp3Q0FC2bt3Kww8/jKIoREREMGXKFHJzc7XHjBo1itWrV7NixQoaGhpQFIXhw4czceJEQkNDAbTJY19//TWLFi1CVVV0Oh05OTmMHz++yWvduXMn5eXlvP322xgMBgKBAOPHj2f8+PFYrVb8fj/XXXcdX3/9NStXrsRqtRIWFsaIESOadJiGhoaSm5vLt7/9bVauXKkd1ScmJmqjcVp6/6Ojo7n++uv57LPPtFFL6enpDB06lKNHjxIdHc3w4cMxGAw899xzDB48GLPZjMlkIiMjg9mzZ/P222+zYMECoqOj8fv9ZGRkdGi8f+Pn+d3vfpfPPvuM119/HbPZjMViITc3V/vMFEUhOTmZq666ildffZW//OUv2uc4ceJELrzwwnbHFMEjyaCfS0hIIC8vj4yMjGb3xcXFMX/+fJKSktDpdCQnJ+NyubS1aQwGA5mZmQwfPlyblQuQnZ2N0+nkyJEjVFVVoaoqKSkp5OTkaEMio6OjueCCC5p1UkPTNujGphGr1cqJEye0mDk5OdqoJEVRyMrKorKykpCQEBwOBxaLhczMTG3MO5ysmAcPHkx5eTlmsxmdTkdcXBxZWVlaExTARRddRFpamta3YTabiY6OZsSIEWRkZGhDJy+99FI2bdqkjUaKj48nOjqaiooKwsLCtNcSHR3NrFmzWLNmDVVVVRgMBlJSUpq85xkZGRiNRm1OhF6vZ9y4cdTX16PX6zGZTCiKwqRJk7BarQwbNoyYmBhUVeW6664jOzsbi8WCXq8nKSmJ2bNns2HDBlwul1Yuo9HY7EIx6enpXHjhhU1e/+kURWHs2LE4HA6OHz9OIBAgKipKu7BPY3IPDQ1l2LBhzJo1i4KCAi1BDB06tNXnFj2L0hNmBubn56sbN24MdjF6LbfbzS233EJdXR233noro0aN6rTnPtsaMV2xhozElJiNfvvb31JaWsqDDz7IvHnzOrVc/VF+fj4bN25s8c2WPgPRprNVDF2xhozElJii+0ky6Gf6y2UPJWbfiim6niSDfqalo7Cu/nFLTIkpej5JBv3U6T/e9qxVLzElZk+IKbqOJIN+qiPttJ3VpisxJabouSQZCCGEkGQghBBCkkGf1dlttO15PokpMXtaTNF+MgO5Dzl48CCvvfZaiytQCtEbbdu2TWYwdxNJBn1A43IM1dXV6HQ6HA5HsIvUZ3m9XhwOB+Xl5QwZMiTYxenzUlNTGTx4cJPlTkTXkGTQB+j1eqZMmUJqaio+ny/YxenTqqqq2Lt3Lzt27OD2229v8ZoNonMlJyc3u+qe6HySDPqAxkXTRNc7ePAgH3/8MR9++CF33303RqMx2EUSolPIYY0QQghJBkIIISQZCCGEQJKBEEIIJBkIIYRAkoEQQggkGQghhECSgRBCCCQZCCGEQJKBEEIIJBkIIYRAkoEQQggkGQghhECSgRBCCCQZCCGEQJKBEEIIJBkIIYRAkoEQQggkGQghhECSgRBCCCQZCCGEQJKBEEIIJBkIIYRAkoEQQggkGQghhECSgRBCCCQZCCGEQJKBEEIIJBkIIYRAkoEQQggkGQghhECSgRBCCCQZCCGEQJKBEEIIJBkIIYRAkoEQQggkGQghhAAMwS6AED2Rx+OhpqaGgoKCJttPnDjBkSNHCAQCrF27Fr1er92nKAomk4m8vDx0OjnOEr2LJAMhWqCqKsXFxTz66KN4vV5tu9PppLS0lEAgwB/+8IcmlX5jIhg5cqQkA9HrSDIQogV6vR6fz8fOnTs5fPgwqqo22+ezzz5rcjs8PJysrKzuKqIQnUoOX4RogcFgICYmhgsuuKBJU1BbrFYr06dPl7MC0SvJt1aIVkRGRnL55Zfj9/vPuq9erycyMpJp06ZJMhC9knxrhWhFeHg4M2bMICwsDEVRWt1Pp9MxcOBA5s6di8Vi6cYSCtF5JBkI0QpFUQgJCWHWrFnYbLZWE0IgECAxMZFLLrkERVHaTBxC9FSSDIRohaIoGAwGpk+fjslkarETGcBoNJKUlMSoUaO6uYRCdB5JBkK0QafTMXnyZKxWa4t9AYqikJCQQEZGBsnJyUEooRCdQ5KBEG1QFIWcnBxSUlJa7A/Q6/WMGjWKvLy8do86EqInancyUBRFryjKFkVRFp26naEoyjpFUQoURXlHURTTqe3mU7cLTt0/sIvKLkSXUxQFnU7H3LlzSU9Pb3Z24PP5GDVqFPn5+UEqoRCdoyNnBvcAe067/SjwuKqqWUA18L1T278HVJ/a/vip/YTo1S677DKSkpIIBAJNtsfHx5Odnc2gQYOCVDIhOke7koGiKKnAZcALp24rwCXA+6d2eRW46tTfV566zan7pysyvEL0csOGDWPgwIFERERo2wwGAxdffDEDBgyQuQWi12vvN/jvwM+BxsOiGKBGVVXfqdtFQMqpv1OAYwCn7q89tb8QvVZoaCjZ2dlkZmZqFX8gEGDSpEmkpKTIcFLR6501GSiKMg8oU1V1U2cGVhTldkVRNiqKsrG8vLwzn1qILpGbm8vw4cO1it9isTBy5EhiY2ODXDIhzl97zgwuAK5QFOUI8DYnm4f+AUQqitK40F0qcPzU38eBNIBT90cAlWc+qaqqz6mqmq+qan5cXNx5vQghusPIkSMZNmyYNt8gMzOTjIwMwsLCglwyIc7fWZOBqqoPqqqaqqrqQOBG4EtVVb8FfAVce2q3bwMfn/p74anbnLr/S7W12TpCcHK56N7wLzExkaFDhzJo0CB0Oh033HADYWFhQS9Xe/4JcTbns4T1A8DbiqL8HtgCvHhq+4vA64qiFABVnEwgQrRIVVU2b97M+++/T3FxcbCLc1YVFRUoioKqqqxZs4a9e/f2ivkF8+fP58orrwx2MUQP1qFkoKrqcmD5qb8PAeNb2McFXNcJZRP9RElJCStWrKDeV0bSgJ7d5OLWeQmN82ArN+EyHqWs4Tg9tu9YhZpKF8cP15KTkxPs0ogeTi5uI4LO7XZTVVVFaq6OKdckBbs4bVIDKk6Hl8O7osmZmAg9NRGcsntDKQf3HcPpdAa7KKKHk2QgeozM3Bhm3Tg02MU4K4/bT/ERO+lDInv0kFJVVTGa9Hzz6eFgF0X0ApIMhOggk1nPgKFRwS6GEJ1Kpk0KIYSQZCB6o2AMlewvMUV/JclA9DjqWSvB9rbTt78y7S8xhWiNJAPR4yinVYLnV821v3O3v8QUojWSDESPdno1d2aFqbZ6z5ma7tmR4/HeHFOIjpBkIHoFlebHv0oLf7X82KZ7dqTxpTfHFKIjJBmIHu7kMe75VG4df2x/iSnEf0kyED1K8+aNrq/i+ktMIdoiyUD0KJ1dJTavdLu+Gu6pMYVoiyQD0cOdfQBmW49tXum2pxruLzGF+C9JBqKHU86jWmvpke2pcvtLTCH+S5KB6DPaV/117hFzf4kp+j5JBqJHU9u4dabOqv76S0whTifJQPRoShu3JKYQnUeSgeinZOE5IU4nyUAIIYQkA9FfBaMpRpp/RM8lyUD0aB3pWJWYQpw7SQaiR+tIx2pnVaH9JaYQp5NkIPqM9jXCdG5V2l9iir5PkoHo4c5vmYbmun5piN4TU4j/MgS7AEK0TUFpcZX/9j22K2K6nF4ObKsgJtFKfKoNg1Gv3aeqcHBHBTq9QkxiKBExlk6JebbHCnG+5MxA9CgtHx13bWXXkZgBf4DKEievPbqRXetKcLv8zZ5sxccH+c9rezi0q4JAoOVnD8brFKItkgxEj9K8Ouz6tu+OxKytclOwvYKCHRVcdFUmVpux2ZNd9YMctqw6zq71pTQ4vOcdU4juIMlA9HAnq83zqSo7/tjWYx7ZU8maz45wybVZGIwt/3yiE6wMHR1PxYl6Nq8oOu+Y7SXpRJwPSQaih2m9AeVcKrvGVvi2H9u+mKoKpUUODmyrYNSFKaAoKMoZg0IVBRSFIaPjqK/zsG9L2XnFbK/2vU4hWifJQPQwTStXtdV7OvZsbT+2fTHr7W6qSp04at1kDI9GaeVJFSBlUAQel4/iw/ZW+g2C8TqFaJ0kA9HjnD7I8vwqt/YfJ7cnZl21m7pqN6gQlxzaZszoBCuqCjUVDfg8/lb27P7XKURrJBmIHufs1/xqb+XX/iq2PTGddR7cTi9Gkw6L1disiej0mKHhJvQGHR63H2erncjd/zqFaI0kA9ELBX+ROVU9Sy+EqnL+R+xSyYvuI8lAiHYKi7IQEmbC7fJTX+dpMyHUVbvxeQJYrAbCIs3dWEohzo0kAyHaKTIuhJjEUAxGHUf317S5b3GhHYNJR0JaGDq9HOGLnk+SgegVesJ1yYwmPTEJVhLTwziwrZy2WooK91ZhDjGQPjSqlb6F9sUUortIMhC9QudfeP7sbfpnxlQUiEuxkTUylh1rilEDarOmIlVV8br9HNpdRWi4mazc2POKea4kqYiOkmQg+pXzvfB8ckY4Iycns2XFcVxOb4tnB+XFDooP24lNCmXYmPigXOxeGqZER0kyEP2Uck5Hz5GxIQzJi2PomDiWvXcAZ52n6Q4qLHh6O2OmpZI7MZGQJmsXnVtMIbqDLGEt+q1zPXqOS7Lx6xdmYjDqMJr0Te9U4Pv/byKKAgajvvlyFecYU4iuJslA9DLnuuZ/58XU6RWsYaYW91QUhZBQY4v3nU9MIbqaNBOJXib4E876bkzRn0kyEEIIIclA9EWtd9N2XQduf4kp+ipJBqIPar2JpesaX/pLTNFXSTIQQgghyUAIIYQMLRU9SG2li6P7q4NdjD6lotiBt5WL6whxOkkGosc4sqeK5R8dDHYx+pRDOytavbiOEKeTZCCCTq/XYzab2bryCFtXlga7OH2SwSA/ddE2+YaIoFIUhSuuuILLL7882EXp0zqyjLbonyQZiKBTFEUqKyGCTEYTCSGEkGQghBBCkoEQQvQbgUCg1XHGkgyEEKKfqKurs7d2n3LmNVyDQVGUcqAeqAh2WbpRLPJ6+7L+9nqh/73m3vh6B6iqGtfSHT0iGQAoirJRVdX8YJeju8jr7dv62+uF/vea+9rrlWYiIYQQkgyEEEL0rGTwXLAL0M3k9fZt/e31Qv97zX3q9f7/9s4dtIogjMLfQTSFCr5AggomYmOlQcQiWPpIE+1SmUKwUdDCImJjq6CFIBaioCLaqGgj+ECwMr7ISyUaH6AhmkIQKxX9LWaCa8heMTc3k0z+D5adnd3inHuW+zOzc+9OmWcGjuM4Tjqm0sjAcRzHSUTyYiBpq6R+SQOSOlLrqQWS3knqldQl6XHsWyTptqRXcb8wtc5qkHRW0rCkvkLfmB4VOBEz75HUlE75+Cjxe1jSYMy5S1JL4dzB6Ldf0pY0qsePpBWS7kl6LumZpH2xP8uMK/jNNmPMLNkGzAJeA43AHKAbWJNSU418vgOWjOo7CnTEdgdwJLXOKj1uApqAvn95BFqAm4RX9W4EOlPrnyC/h4EDY1y7Jt7bdUBDvOdnpfbwn37rgabYng+8jL6yzLiC32wzTj0y2AAMmNkbM/sOXAZaE2uaLFqBc7F9DtieTkr1mNl94POo7jKPrcB5CzwAFkiqnxShE0SJ3zJagctm9s3M3gIDhHt/2mBmQ2b2NLa/Ai+AZWSacQW/ZUz7jFMXg2XA+8LxByp/4NMVA25JeiJpd+xbamZDsf0RWJpGWk0p85hz7nvjtMjZwtRfVn4lrQTWAZ3MgIxH+YVMM05dDGYKzWbWBGwD9kjaVDxpYZyZ9bKumeAROAWsAtYCQ8CxpGpqgKR5wBVgv5n99T83OWY8ht9sM05dDAaBFYXj5bEvK8xsMO6HgWuE4eOnkWFz3A+nU1gzyjxmmbuZfTKzn2b2CzjNn2mCLPxKmk34YrxoZldjd7YZj+U354xTF4NHwGpJDZLmAG3AjcSaJhRJcyXNH2kDm4E+gs/2eFk7cD2NwppS5vEGsDOuONkIfClMNUxbRs2J7yDkDMFvm6Q6SQ3AauDhZOurBoVX0Z0BXpjZ8cKpLDMu85tzxsmfYBNWHbwkPH0/lFpPDfw1ElYZdAPPRjwCi4G7wCvgDrAotdYqfV4iDJt/EOZLd5V5JKwwORkz7wXWp9Y/QX4vRD89hC+H+sL1h6LffmBbav3j8NtMmALqAbri1pJrxhX8Zpux/wLZcRzHST5N5DiO40wBvBg4juM4Xgwcx3EcLwaO4zgOXgwcx3EcvBg4juM4eDFwHMdx8GLgOI7jAL8Bjz1xRX81v+YAAAAASUVORK5CYII=\n", - "text/plain": [ - "
" - ] - }, - "metadata": { - "needs_background": "light" - }, "output_type": "display_data" } ], "source": [ + "init_net_state = torchopt.extract_state_dict(net, enable_visual=True, visual_prefix='step0.')\n", + "\n", "# inner loss\n", - "loss = loss_fn(net(x), y)\n", - "print(f\"inner loss: {loss:.4f}\")\n", - "torchopt.visual.make_dot(loss).render(\"full_graph\", format=\"png\")\n", - "plt.figure(figsize=(10,10))\n", - "plt.imshow(imgplt.imread('full_graph.png'))" + "inner_loss = loss_fn(net(x), y)\n", + "\n", + "print(f'inner loss: {inner_loss:.4f}')\n", + "display(\n", + " torchopt.visual.make_dot(\n", + " inner_loss,\n", + " params=(init_net_state, {'inner_loss': inner_loss})\n", + " )\n", + ")" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "Conduct inner-loop optimization with MetaSGD, here the meta-parameter is served as a factor controling the scale of inner-loop loss." + "Conduct inner-loop optimization with `MetaSGD`, here the meta-parameter is served as a factor controlling the scale of inner-loop loss." ] }, { "cell_type": "code", - "execution_count": 57, + "execution_count": 5, "metadata": {}, "outputs": [], "source": [ "# inner-step optimization\n", - "loss = loss * meta_parameter\n", + "loss = inner_loss * meta_parameter\n", "optim.step(loss)" ] }, @@ -153,56 +146,47 @@ "metadata": {}, "source": [ "We compute the outer loss and draw the full computation graph of the first bi-level process. In this graph, three main parts are included.\n", + "\n", "- Inner-loop: forward process and inner-loss calculation\n", - "- Inner-loop optimization: MetaSGD optimization step given inner-loss\n", + "- Inner-loop optimization: `MetaSGD` optimization step given inner-loss\n", "- Outer-loop: forward process and outer-loss calculation" ] }, { "cell_type": "code", - "execution_count": 61, + "execution_count": 6, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "outer loss: 1.5181\n" + "outer loss: 0.2297\n", + "\n" ] }, { "data": { - "text/plain": [ - "" - ] + "image/svg+xml": "\n\n\n\n\n\n%3\n\n\n\n139975938634752\n\nouter_loss\n ()\n\n\n\n139975938188288\n\nMseLossBackward0\n\n\n\n139975938188288->139975938634752\n\n\n\n\n\n139975938188336\n\nAddmmBackward0\n\n\n\n139975938188336->139975938188288\n\n\n\n\n\n139975938188096\n\nAddBackward0\n step1.fc.bias\n (1)\n\n\n\n139975938188096->139975938188336\n\n\n\n\n\n139978603490800\n\nAccumulateGrad\n\n\n\n139978603490800->139975938188096\n\n\n\n\n\n139978603489744\n\nAddmmBackward0\n\n\n\n139978603490800->139978603489744\n\n\n\n\n\n139975938634512\n\nstep0.fc.bias\n (1)\n\n\n\n139975938634512->139978603490800\n\n\n\n\n\n139975938188480\n\nMulBackward0\n\n\n\n139975938188480->139975938188096\n\n\n\n\n\n139975938188144\n\nViewBackward0\n\n\n\n139975938188144->139975938188480\n\n\n\n\n\n139975938187664\n\nSumBackward1\n\n\n\n139975938187664->139975938188144\n\n\n\n\n\n139975938188720\n\nMseLossBackwardBackward0\n\n\n\n139975938188720->139975938187664\n\n\n\n\n\n139975938189200\n\nTBackward0\n\n\n\n139975938188720->139975938189200\n\n\n\n\n\n139975938188816\n\nMulBackward0\n\n\n\n139975938188816->139975938188720\n\n\n\n\n\n139975938188912\n\nAccumulateGrad\n\n\n\n139975938188912->139975938188816\n\n\n\n\n\n139975938635072\n\nmeta_parameter\n ()\n\n\n\n139975938635072->139975938188912\n\n\n\n\n\n139978603489744->139975938188720\n\n\n\n\n\n139978603490224\n\nTBackward0\n\n\n\n139978603490224->139978603489744\n\n\n\n\n\n139978603490368\n\nAccumulateGrad\n\n\n\n139978603490368->139978603490224\n\n\n\n\n\n139975938187808\n\nAddBackward0\n step1.fc.weight\n (1, 16)\n\n\n\n139978603490368->139975938187808\n\n\n\n\n\n139975938634432\n\nstep0.fc.weight\n (1, 16)\n\n\n\n139975938634432->139978603490368\n\n\n\n\n\n139975938188384\n\nTBackward0\n\n\n\n139975938188384->139975938188336\n\n\n\n\n\n139975938187808->139975938188384\n\n\n\n\n\n139975938188672\n\nMulBackward0\n\n\n\n139975938188672->139975938187808\n\n\n\n\n\n139975938189008\n\nTBackward0\n\n\n\n139975938189008->139975938188672\n\n\n\n\n\n139975938189104\n\nTBackward0\n\n\n\n139975938189104->139975938189008\n\n\n\n\n\n139975938188864\n\nMmBackward0\n\n\n\n139975938188864->139975938189104\n\n\n\n\n\n139975938189200->139975938188864\n\n\n\n\n\n" }, - "execution_count": 61, "metadata": {}, - "output_type": "execute_result" - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAARwAAAJCCAYAAAASmHj6AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/YYfK9AAAACXBIWXMAAAsTAAALEwEAmpwYAACkxUlEQVR4nOy9d3wU17n//55t0q5WvfeGhBASQiCKKQZjg+m4G7fYjtOvfVN/cUnu9yaOk3t94zhO7MRpjknsOI4BAzZgbHoxRSDUkIQEqPfetX1+f8BuANPUdlfivF8vvbRlZs4zZ2c+c85zznkeSZZlBAKBwBkoXG2AQCC4eRCCIxAInIYQHIFA4DSE4AgEAqchBEcgEDgNITgCgcBpOF1wJElaKklSqSRJZyVJes7Z5QsEAtchOXMejiRJSqAMWAzUAseBh2RZLnaaEQKBwGU4u4UzEzgry3K5LMsm4H1gjZNtEAgELkLl5PIigZqL3tcCsy7eQJKkrwFfA/Dy8pqekpLiPOsEAsGIkJOT0yrLcvDlnztbcK6LLMt/Av4EkJWVJZ84ccLFFt0cWCwWzGazq81woFQq0Wg0rjZj1DCbzVgsFlebcUUkScLDwwNJkoZzjKorfe5swakDoi96H3XhM4GL2b17N//41weoNR6uNgWrxcLM6Zl865vfdLUpo8a27dv5cMtHblHfFyPbbIQE+vM/v/jFqBzf2YJzHEiSJCme80KzFnjYyTYIrkBVVRXRs29j8sxbXG0KTdWVlB/e5WozRpXqmhri5y1m4rQZrjblEvq7u9n519dH7fhOFRxZli2SJD0NfAoogb/KslzkTBsEV0fj4YGnVudqM9B4eA6rOT82kFC7SX1fjMVkRiGN3liS0304sixvB7Y7u1yBQOB6xExjgUBwEaM7L08IjkAguIjR7coKwREIBBchWjgCgWCcIARHIBBchOhSCQQCpyG6VAI3R5ZlOltbGOjrveL3HS1NGPr7ERlChobJYKCtqWFE6q+7o52+7q5rHEu0cARujtVi5uC2TZhNJqwWC2fyT1J0/Ag2mw2A5rpa8j7f51ojxyiyLFOSc4zas2UANNdWc/LAHno6O66/r81G5eli8g8fwGwyAdDX3cWRT7e5TPyF4AiGTU9nJ2aDES8fHyRJQqXxYM/G97FeWJwYHhtP5eli0cIZArIsU5aXQ9ykyUiShMbTk9Lc41SfOX39nSUJD62Wz7dvob+nG4CAkDBaG+owG41XK3HkjL8CQnAEw6arrQWtXo9CoUShVBIRl4DO28fxvafOC2N/P1aL+6xGHyuYjAb6envw9vUHwC8ohODI6OvsdR5JkgiJjMYnINDxmUqtRpZlTIaBq+01XJOviRAcwbDReukxGQxXbcFYzCaUKiVKpdtFQ3F7VCo1KrUak9Fwxe9lWcZqtd5w69FmO7+tSn210B+ihSNwc3wDg7FYzJiNBmxWK6V5J2iqqSL/8AFsNhvtzY2ERMYgKcTlNliUKhXRE5KpqziHLMs019ZQXlRAUfYRujvaAfjnb16mtaH+C/vKskzF6SLqK8vJPbQPs8lEX083Om9v1J6eVylxdFs44pEjGDYeWi3ps+bS39uLX6AnUYlJPPHcT1BrNEiShGlggMz5t7nazDGJJElMueVWmuvOB8rU+/mx8vGvIUkSnjodhv5+FAol3v7+V9w/OCKKR7/3AkqlEqVSSX93N9MX3I5SqXTmaTgQgiMYNpIkkTx1uuN9QEjYJd8nTJ7ibJPGFf7BIfgHhwCg03uj03s7vrOqrKx56ptXDHMhSRK+AYH4XuTDiYhPHH2Dr4Fo4woEYxilUonXRQ56d0cIjkAguIjRdRqLLpUAALVazcm9n1F3tnToB5HBaDLiofEYlu+xp6ODKK/xfWkqFRI5u3dQfXr4AS9tVhtWqxW1Rj3sY5kNBgx9PcM+ztUY37+q4IZZvnw5w03JYzQaeeONN3jy6afx8BhecPCQkJBh7e/u3H3XXUzLzByRY5WUlJCXl8dDdz00IsfT6/UjcpwrIQRHAEBoaCihoaHDOsbAwADr169n5syZ6HTuFavX3YiIiCAiImJEjqVSqejo6OCWW1wfAP96CB+OQCBwGkJwBAKB0xCCIxAInIYQHIFA4DSE4AgEAqchBEcgEDgNITgCgcBpCMERCAROQwiOQCBwGkJwBAKB0xCCIxAInIYQHIFA4DSE4AgEAqchBEcgEDgNITgCgcBpCMERCAROQwiOQCBwGiLin2DYyLKMzWZzZIC0Wq1YLBYUCgUKkfxuVJBl2VHv9j+LxYIkSSgUCiRpdBPaDRUhOIJh09XVxS9/+Us6Ojo4dOgQP/zhD/Hy8uJrX/saycnJrjZv3LJ//342bdpEU1MT9fX11NTUkJiYyNe//nU8r5pZ07UIwREMG7VaTU5ODp9++ikAhYWFJCQk8P3vf9/Flo1vVCoV69ato6fnfJaFzz//nO9+97toNFfLG+56xr3gmEwm+vr6XG2GA41Gg06nc9sm71DQ6XSsWLGC3bt3Y7FYAFi8ePG4z7zgSiRJYurUqUyaNIns7GwAPDw8uOuuu9y6GzvuBWfnzp383xtvEhQe6WpTsJhNxAf588v/exm1evg5hNwFSZK488478ff3p6WlBZ1Ox8qVK936wh8P6PV6Vq9e7RCczMxM0tLSXGzVtRn3gmM0mchctJRblq5ytSn0dnWQ86+/Isujm93QFURHRzN//nw+/PBDYmNjueWWW8ZVK85dufPOO3n11Vfp6OjgzjvvxM/Pz9UmXZNxLzgASpUKtRv0a1VqDYzTm9DT05MVK1awdetWVqxY4fYX/nhAkiTS0tJIT0/n5MmTrFy50u1F/qYQnJsBWZZpaWnBaDS6zIaJEycSEhJCRkYG9fX1LrNDqVQSGhqKUqkc9bJMJhNNTU2jXs7VkGWZ+fPn09/fj06no6amxmW2eHt7X/dBIwRnnGAymfje976HSqVy2SiFzWZj4sSJ7NmzhwMHDrjEBoDq6mrefvttwsPDR72swsJCvvvd7w47TfJw6OnpITg4mF//+tcus6Gzs5NJkybx05/+9JrbCcEZR+j1eh555BF8fHxcZoPRaESj0bi0af/b3/4Wm83mlLJsNhszZ87ksccec0p5V8JqtWKz2Vw6EHHmzBmKi4uvu50QnHGEfZapM7oSdqxWK3V1dY7mtLNyivf19aFQKNBqtV/4ztli54rZvQ0NDQCEhYU57fc2mUwYDIYrPtBudERSjFveBNj9O/YJYiOJ1Wrl+PHjbNmy5arb9PX1XXUulCzLtLW1UVlZicFguOFys7OzOXXq1KDtdRayLNPT00Nzc/OoHLuyspI///nPV23Jmc1mOjs7r7p/b28vlZWVdHd33/CoaW1tLZ999tlQzQaE4NwUGI1G/vCHP/DRRx851t00NDRQU1OD1Wp1tFJqa2uxWq20trZisVhob2+nv7+f6upqamtrqampwWw209ra6thOqVQya9YsxxPOLiDl5eUMDAzQ39/Pxx9/zNatW2lqakKWZVpbW6moqMBoNFJbW8s777xDUVER586dQ5ZlmpqaqKiooLa2FrPZTE1NDbW1tdhsNsxmM5WVlXR0dDit2zRUPvnkE377299iNBqRZZn29nYqKiowGAyOeqisrMRoNNLV1YXBYKCnp4eenh4aGhqoqqqipqaGgYEB2tvbHSJiMpmYNm0a3t7ewPk67+vr49y5c3R0dGCxWDhx4gR//etfqaqqwmq10tPTQ3l5Od3d3fT397Nu3Try8/M5deqUw7bq6mrKy8sdjvCKigpMJhM2m436+noaGhqwWq3DqhPRpboJaG1txcfHh+bmZsxmM6dPn+bo0aMkJiai1Wo5d+4cp06dIjY2Fr1ez8aNG7nvvvvYtm0baWlpfPrppxgMBnx9fVm1ahU7d+7kkUceYePGjTzyyCOXlGU0GsnPz6erq4sTJ05wxx130N7eDkB3dzcWi4XNmzcTHh5OXl4eNpuN6dOnk5CQ4BC/3/3ud0yePJlJkyZhs9k4ffo0lZWVzJ07l4aGBsfN6Ayn8FAxmUzU19cTEhJCU1MTWq2W999/n+TkZMxmM15eXmzatMmx1iw/P58JEyY4WkQnTpzAarXi4+PDpEmTqKqqYsmSJRw+fJiMjAxiYmIuKa+oqIiOjg7Ky8tZu3YtbW1tdHZ20tbWRmBgIP/4xz+IjIykubmZadOmodPpmDdvHm1tbQBs3ryZgYEBpk2bho+PDwUFBbS1teHr68uECRP47LPPRmRAQrRwrkNXeyumKww1y7JMZ2sLZpPJBVbdOLIsc/LkSbRaLd3d3VRWVnL69GnmzZvH7bffTmBgICUlJSxcuJDbb78dX19fx372p1lMTAwJCQnExcXR09PjWKlsXx1+se/CZrPR3NxMTU0NxcXF+Pr6kp6eTkZGBklJSVRUVFBRUUFDQwO1tbXIsoxaraa5uZl//vOfAAQHB3PnnXeSnp6OyWSitraWuro6zp07x5kzZ1i6dCnTpk1z25nMsixTUVFBV1cXWq2WvLw8qqqqiI2NZcmSJSQlJXH27FkmTZrE4sWLiY2Ndexns9mQZZmAgAAmTpxIcnLyNevc/nlnZydVVVWUlZVhMpmYOnUqEyZMYNq0afT19VFYWEhNTQ319fWYzWaUSiV9fX2sW7cOs9mMVqtl8eLFzJ49G6VSSWNjI7W1tZSUlFBeXk5mZiYLFizAw8NjWHXjnr+Ym2A0DLBv83qsFjMWs5ny4kIqS4sdfd6zhXmcyT/pYiuvjb1Fk5CQQFJSEvn5+cTHx5Odnc2xY8fo6OggISGBI0eOcOzYMbq7u9HpdBw7dozy8nLgvFPU/md3jmZnZzvm2nh5edHe3s6ZM2fo6OigqqqK5ORkhyB4e3tTWlpKTU0NUVFRpKSkkJmZyZw5c5g2bRrHjx93tMLg/KJE+75FRUXo9XoiIiKQJInY2FgOHDhAYWGhW3ep8vPzSUpKIi4ujtOnTxMaGkpVVRVHjhyhsrKS2NhYR0uztrYWf39/cnNzKSwsdAiK/U+WZXx9fcnOzqasrAw4X0dqtZrc3FwMBgMnT54kKSnJ4bTX6XS0trZy+vRpvLy8mDJlClOmTGHBggVMmDCBzs5Ozp49i4+Pj6McpVKJJEnU19fT0dHBhAkTgPMPnFOnTpGdnT3seV6iS3UNWhvqUas1eOq8sFrM9HV3sX/LBv7jF79GkiRikifx+fYtTMqa5bYzPBUKBQ888AAxMTGYzWaam5sJDw8nICAAs9mMj48Pc+bMISwsDFmW0ev1rFq1iqqqKp544glCQkKIiIhAlmWUSiU6nY6IiAgaGxt56qmn0Gq1KBQK1qxZg8lkIiQkhLvvvhuTycSkSZNQKBSkpqaiUCiwWq3ExsaydOlSWltbSUhIwNvbm7vuuouWlhamTZuGUqlk1apVjhvn9ttv5+zZs+h0OgICAvD29qasrIy0tDTCwsJcXLtXZ86cOQQHB+Ph4UFSUhJhYWHcd999NDc3ExISgk6nY/Xq1XR0dBAcHExwcDB+fn5otVoCAwOZNGkSarUapVKJ1WrF09OTc+fOkZ6e7hDfBx54gIaGBpRKJY8++iitra18+ctfxt/fH5VKxerVqzEajXh6enL//fdTXV1NWFgY/v7+PProo1RVVfHII4+gVqu57bbbHD4hezdPkiSysrKIiIhApVJhNpsJCAgYVr0IwbkG7U0N+AQEAueXJSSkppO9e4fjex9/f7o72rBZrShV7lmVKpWKuLg44Pxq4ujoaADH08vOxXFr/Pz8Lpkxar8Q7ej1+i+sBI+Pj7/iseD8CvkpU6Y43kdFRREVFeV4Hx0d7bDL/v3FZU2dOvWS46Wnp3/hPN0JSZIuOR97/YeHh1/id7r8vC+uoytxeT2EhIQ4fofL6xQu/Y0DAgIuEYvAwEACAwMd70NDQx2v1Wo1kydPvuRYSUlJ17TtRhFdqmvgqfPCZBi46vdmkwmVWuO2vgSBwN1wz8eymxASGc2pY58j22zYZJkzhXm01NVyJv8kSRnTaKqpIiphApIbCY7d8ShwLnbn7c3KjZ67EJxr4O0fQFzKZPp7e9DpvQkOj+T+//geWi89AIb+PtJnz3Oxlf/GarWybt06l01xt08o0+v1LvVpdXR0OG32rUajoaCggDfffNMp5cH5iZQqleoLI0Y2m42+vj6X1H93d/cNdXWF4FwDhUJB5vzbHO8jEy71e6TNmutsk66KRqPhZz/72aBm6440BoOB//3f/+U73/nOFZccOAuVSkVQUJBTykpJSeFPf/qTU8qC84skX375Zb75zW8SERFxyXcWi4WXXnqJr3zlKwQHBzvNJjs3soZPCM44QZKkSxx/rmBgYACdTkdMTIzT1lS5Gg8PD8c8mtHGYrGwYcMG1qxZw+zZs7/QipFlmaysLDo6Opg+fbpbjpzeFIJjs1qxmM2uNgOrxQI3cT9fMHRkWWbv3r10dHTw9NNPX1VMbr31VjZs2MBtt92Gyg1HTt3PohHGW6+n9PA+ms9cf+n8tWhtbcXPz29YP6LFYmZCeKhbPnkE7k1NTQ0ffvghP/7xj6+6vECSJJKTk2lvb6elpcUtl36Me8G59dZbvzB/YSj85Cc/4etf//qwf0QPDw+3fPII3Jf+/n7efPNNHnzwQcekv6uh0WjIyMjgxIkTbhlydNxf+R4eHiPiQPP09CQgIMAlzjjBzYvNZmPTpk2Eh4czf/786wqIJEnMnz+fv//97yxdutTtsoO4zwQSgUBwCbIsU1hYyPHjx3n88cdveIJpfHw8PT09tLa2jrKFg0cIjkDgprS3t/PWW2/xta99zbHI8kZQq9Wkp6eTm5s7yhYOHiE4AoEbYjab+fvf/868efNISUkZlC9GkiTmzJnD0aNHHZlQ3QUhOAKBmyHLMvv27aOzs5M1a9YMaa1eYmIiHR0djuBn7oIQHIHAzaiurmbz5s184xvfGHKEPY1Gw6RJkygoKBhh64aHEByBwI3o7+/nj3/8Iw8++CBhYWFDHtaWJIm5c+dy+PDhYcchHkmE4AgEboJ9CDwsLIy5c+cOaw6NJEkkJSXR3NxMV1fXCFo5PITgCARugH0IPDs7e1BD4NfC09OTmJgYzp49OwIWjgxCcAQCN6Cjo4O//OUvfP3rXx/UEPi1kCSJGTNmcOzYMbeJ1SMERyBwMRaLZchD4NdCkiSmTJnCmTNnGBi4euRKZyIERyBwIbIss3//ftrb24c8BH4tfHx80Ol0NDY2juhxh4oQHIHAhdTW1rJx40a+/vWvDzvn05VQqVRkZmZy/Phxt+hWCcERCFzEwMAAf/jDH3jggQeuuwp8qNj9OHl5eW4x63jcrxYfDlarlfb2dmRZpr+/n9bWVjQaDd7e3nh6errd0n9XYbFY6OjoYGBgAKPRSEtLC1qtFh8fHzw9PV1tntsgyzLNzc2OvFGbN28mNDSUefPmjeq1FB4eTl9fH11dXU4LvXo1hOBcg87OTp566inOnDlDc3MzO3fuRK/X86c//YkZM2a42jy3oauri4ceeoiamhpaW1s5ePAgvr6+/PnPfyYjI8PV5rkNFouF/+//+/8ICwtj+fLlHD16lJ/+9KejHh/Jw8ODCRMmUFxcfEMhLkYT0aW6Bn5+fkRERFBaWkp7ezvnzp3DYrGQkJAgWjcX4e3tTWRkJGVlZbS3t1NWVobNZiMhIcHVprkVzc3N7Nu3j1/96lc8/vjjzJo1C29v71H3rSgUCjIzMzl58qTL/ThCcK6BUqnknnvuuSSI0Z133nlJxkLB+XAIq1atcjg9JUli1apVeHl5udgy90GWZY4fP05LSws2m43q6mp+8IMf8Je//MUpSw9SU1OpqKgYdm7w4SIE5zpkZmYyadIkALy8vFi5cqWLLXI/JEli3rx5jlSz/v7+LF26VLQCL8Jms3HgwAFHGh9Jkhwpk51RT97e3mg0GpcH5RKCcx0CAwO54447kCSJiRMnum36DVcTGBjI4sWLkSSJyZMnk56eLurpIvr7+9m9ezdwfiX3vffey5YtW1izZo1TkvbZ84Xn5eW5tFslBOc6KBQK7rrrLjw9PVm1ahXe3t6uNsktUavVrF69Gq1Wy5o1a8To1EXIskxRURG1tbWEhYXxf//3f7z11lukpKQ4LS+9JElkZGRQUFDg0lTQbj9KZbVaKSwsdGlTsKuri/j4eLy9vR1PKVegVquZMWPGDSWZs1gs5OTk0NPT4wTLztPb20tkZCSenp7s2bPHaeUOpl5kWebo0aP09fU5wbJ/s23bNjw9PfnGN75BSkoK2dnZwz6mLMvMmDEDPz+/G9o+Li6OxsZGjEajyxIVur3gmM1mfve73xEeHu7SCPQLFy6kp6eHI0eOuMyGkydP8qtf/eqGRn8GBgZ47bXXSEpKclq9ybLM4sWLaW1tdWqkucHUiyzL/OpXvyIlJWVUZvZeDavVypNPPgkwImIDcOLECX70ox8xc+bMG9per9ej1+tpamoiPj5+RGwYLG4vOLIso1AoWLlypUvTx9psNiRJcplfQpZlGhoabrg5LMsy3t7erFq1yql5vl1RT42NjYOqFy8vL1asWOG07rHdZzKSdSLLMr29vYPqHqlUKiZNmkRhYSFxcXEuuZaFD+cq9Pb2sn79esrKyhyi54wf6MyZM7S1tY16OVfDnlJ2165dQ+rrWywWcnNzhz3U29zcTHl5+bCO4Ux6e3vJz8+/okN2MAJcWVk5agst7X6cwsJCl/lxxrTg2Gw2du3aRW1t7Yh73rVaLZGRkZSUlFzxe1mWKSsro6Wl5Yrfm81mDh06xLZt2+jo6LjhcouKimhubh6SzTeC0Wjk008/pb29nRMnTnDq1Kkv1F1aWppjNMNms7FlyxbWr1/PyZMnryskRqORw4cPD/uCbmho4PTp0xgMBj777DN27drlGFIeDerq6li/fj1/+9vfWL9+PeXl5bz//vts3LjR8dC5Ft3d3SPSVTpz5gy1tbV0dnby8ccfj3iI0OjoaJqamjCbzSN2zMHg9l2qa9HW1sauXbtoa2vjgQceoKenh8OHD6PRaJgzZw4Gg4HDhw+j1+uZOXMm+fn5TJkyhVOnThEREUF2djYeHh4oFArmzJnD6dOnSUtLo7CwkOnTp+Pr6+sQFLsTtqGhwXHsdevWERQUxJ133klSUhJHjx6ls7OTuXPnUlhYSENDA/PmzcNoNGI2m9m/fz89PT1EREQ4tvf09GT+/Pm0trZy9OhRKisrmThx4qjVmcFgYNeuXQDk5OSQkJCA0WgkOTmZyspKIiMj8fHxcQzVyrJMQUEBjz76KBs2bCA6OpqOjg5OnTpFamoqEydOpKGhgRMnThATE0NCQgKyLNPW1kZ1dTVKpZKJEyei1+spKSnBz8+PiooKmpubmTdvHoGBgRw+fJi2tja0Wi0zZ87k8OHD1NfXExISwuHDh+np6cFms5Gdnc2tt946KvUSHBzM/Pnzeeutt3jyySdRqVRUVFRw//33s2nTJp5++mlKSkqoqqpixowZREZGcvbsWU6dOkV6erqju19bW0tHRwcGg4GMjAw0Gg15eXlER0dTUFDAwMAACxYswMPDw3E9hIeHk5iYyOHDh6mpqWHWrFls376diIgICgsLCQgIICUlZUTO08vLC41GQ3t7OxERESNyzMEw5BaOJEnRkiTtlSSpWJKkIkmSvn3h8wBJknZKknTmwn//C59LkiT9VpKks5IkFUiSNG24xp89e5bMzEzq6uowmUzs3LkTtVpNYmIiFouFbdu24evrS0xMDBaLhePHj2MwGDh58iQNDQ20trZSVFRETU0N9fX1nDx5EoPBQE5Ozhee0CaTCY1Gg6+vL5988gnh4eFkZGQwd+5cJkyYQE5ODmfPnsXb25tt27Zx+vRpsrKyKCsr48yZM5jNZvbt20d6ejpTpkyhr6+P6Ohozp07R3FxMR9//DFJSUlOaeqGhIRQXFxMUFAQkiSRl5dHX18fRUVFV2yNtbe389lnn2E2m9FoNBgMBhISEti+fTtdXV1s2rSJiRMn4ufnhyzLdHR08N577xEaGkpjYyP5+fl8/vnnHDt2DKPRiFarxcvLi88++wyAgwcPEhoayi233EJ2djaSJOHn54fZbKampgYfHx+8vb2pra0dtTrRaDT4+fnh6emJr68varWa2tpadu7ciVardfimQkND2bp1Kx0dHWzbto2srCxHZoWamho2bdpEREQEZWVlFBYWkpuby/Hjx+nr6yMwMJD+/n5Hvqj9+/eTlpZGeno6n376KRERESiVSiwWC42NjXh4eODj4zOiXSyNRsOECROu2nIfbYbTpbIA35dlORWYDfyHJEmpwHPAblmWk4DdF94DLAOSLvx9DXhzGGVjs9k4cuQI1dXVlJeXU1FRQVdXF1FRUYSFheHp6UlPTw9RUVGEh4c7LgqLxcLAwACSJOHr60tQUBB6vR6z2YzNZsNsNjumf9t/fIDW1lb2799PWVkZTU1NqFQqdDodWq0WjUZDR0cHzc3NNDU1ERERgb+/P+3t7cTExDh+3IiICKKiovD09KSgoICcnBxH87m/v5+oqCinrObVarXcdtttTJkyxeFfMJvNjqhwdn+DXfwCAwNZuXIlvr6+1NfXs3//fk6dOkVDQwNGoxGLxUJERATh4eEoFAqsVisqlYqenh7i4+M5cuQIe/fupa+vD4vFwsGDBzl79qyj6+jv709iYiJeXl50d3cTHh5OWFgYKpUKLy8vdDqd48+ZxMTEsHr1avr7+2lsbGTfvn2cPn2apqYmDAYDGo2GsLAwQkNDgfOtQavVysDAABERERw5coRdu3ah0WhobGzk6NGjVFVVOXx04eHhjuuht7eXqKgoQkJCUCgUeHp6EhQUhFKpHFGnv31iZnFxsUv8OEMWHFmWG2RZPnnhdQ9QAkQCa4C/Xdjsb8BdF16vAf4un+co4CdJUvhQy+/t7cXT05Ovf/3rPProo1RUVDBnzhz27NnD1q1b6e/v55ZbbuGTTz7hk08+wWQyERYWxubNmx0Xi5eXF3q9Hp1Oh5eXFx4eHmzdutVxw4WEhFBfX8/hw4eRJAlZllGpVI55D7Gxsezbt8/RBQsNDcVsNhMSEsLChQs5efIke/bsIT4+HkmS8Pb2dhxbqVRiMpnw9fVFp9MxY8YM3n//ferq6kZ19bBCocDHx4f09HRCQ0PR6XQkJiby8ccf09zcjEqlQq1WExYWxocffkh/fz9ms5lPPvkEWZbx8/NzCHFgYCBarZa0tDT+9a9/cejQIWRZJiEhgbVr13L06FH0ej1Wq5VbbrmFwMBAlEolNpsNtVqNr68vcH7avb0Ll5WVxaFDhxzd3ZkzZ3LixAny8vJGfYW+JEmOeMIKhYL29nZ27NiBv7+/46aXJAl/f38CAwMJDQ3l3XffJT8/H6VSSWpqKvfeey979+4lMjISgISEBGJjY1GpVJjNZse1dvH1oFAomDVrFps2beLs2bN4eHgwd+5ctm/fTltbm2NpzUgxYcIEKisrXRIfRxoJZ6skSXHAASANqJZl2e/C5xLQIcuynyRJW4H/lWX50IXvdgPPyrJ84rJjfY3zLSBiYmKmnz59mu985zs89dRTlzzhZFnGYrGgUqkczk2lUulwhtnnnpjNZiRJQqVSYbPZsFgsKBQKx4VvR6FQYLPZsFqtKBQKx01/8f4mk8mxrb1cs9mMUql03IT2m0mSJMd7lUrlePIrlUokScJqtWI2mx222FsZ9rIun4EqyzJvvvkm3/ve95gwYcJ1f5Pu7m5+8IMf8NWvfvWSJ+SV6k2hUFxStt0+q9WKRqPBZDIhyzJKpRKVSuU4r2vVg307pVLpqFP7MUwmk+NGs39vrwP7seC8KNtts/+mVxrtefPNN/nud797Q/VitVr58pe/zDe+8Y0vDItfXDeAo6WrUqkc15Z9xPLi6+ny3/fi87a3IC/+fa903vbW9cW/gclkcpR1+XnLssy7777L3XffzezZs6973hfT29vL888/zy9+8YtRmxogSVKOLMtZl38+7EepJEl6YCPwHVmWuy+uGFmWZUmSBqVosiz/CfgTQFZW1lX3lSTJISr2HxH4QqbCi9/bbwg7l9/UFwvNlfa/fKKYJEmXfHb5BLvL31987MttudLxR4Or1dvlZatUKoe9l393+XldrR7s/y+v5yuVdfGxrvUbjiYX1w3wheUZl9tx+W9oP4+hnLdCofjC96N1Peh0OkJCQqiqqiItLW1UyrgawxoWlyRJzXmx+Ycsyx9e+LjJ3lW68N8+xlsHRF+0e9SFzwQCgRORJInExMQbGu4faYbcwrnQXXoLKJFl+dWLvvoIeBz43wv/t1z0+dOSJL0PzAK6ZFluuJGy7LMq3Sll6Uggy7KjiX4j2w527oTVaqW3t9ctYtmOJkOtl7HOUGPbSJJEamqqwy/nzBnHw+lSzQUeAwolScq78NkLnBeaDyRJegqoAh648N12YDlwFugHnryRQhQKBQEBAaxbt85pK2uvhH2o0h7zZSSwDyE3NTVhsVgIDQ0lKCjoqudpMBhuuJlt97esW7du1C+oxsZGvL29XRZwazD1IkkSnp6evPPOO6NyPZlMJlpbW50yx6WtrW3IdR4eHk5LSwsWi8VpXVYYIafxaJGVlSW7S3qL9vZ2/vu//5vXXnttxOKX2Gw2mpubOXXqFCdPnqS6uhovLy8mTZrEtGnTiImJwdfX9xLBuNFp8vbW02jT39/PCy+8wA9+8IMRFePB4i71kpuby44dO3j++edHrYyLGeq6tdF2HI+a03i0ceWCyYuxD9329vbi7+8/IsdUKBSO+SuLFy/GbDZTVVVFUVERH330EW1tbQQEBDBp0iTS0tKIiYm54SeaM+pNlmXy8vKIiIggIiLCpS3QG2U060WWZU6fPk1qaqrb14VOpyMoKIja2toRH3a/Fm4vOO6CRqNBp9PR2dk5YoJjx34DaDQakpKSmDBhAmvWrMFgMFBZWUlhYSH/+Mc/6OzsJCQkhGnTppGamkp0dPQlI07Oxmw2s3XrVr70pS85JWrdWKC0tJS1a9e62ozrIkkSsbGxVFRUCMFxR+wTvjo7O51SFpyfETxp0iRSUlK4//776e7u5ty5c+Tm5nLo0CHMZjMRERFkZGQwceJEIiMjHU9WZ7RuSktLUSgUTJgwwS1aoa7GYDC4bI3SYLGPVNlXuDvr9xOCMwj8/Pzo6OhwumffXpavry/Tpk1j2rRp2Gw2R+qanJwctm/fjizLxMfHM2PGDJKTkwkICHDsO9L2Wq1WPv74Y1auXOlUp6M7U1tbi7e3t2MGtbsTExPD9u3bHZNmnYEQnEEQEhJy1XAUzkahUBAUFERQUBCzZs1yjKKVlZVx5MgRNmzYgIeHB8nJyaSlpZGcnHzJ0orhCJAsy9TX19PS0sLUqVNH6IzGPuXl5WMqZ5ler6e/v98x49kZCMEZBBERERw+fNjVZlwRlUpFVFQUUVFR3HbbbZhMJhoaGigpKWHnzp387W9/Q6/XM2XKFKZMmUJiYiIeHh5DcqLKsszHH3/M4sWLnRpN0N0pKChg/vz5rjbjhrEvFerr63NaK1UIziDw8fGhu7vb1WZcF/tSg7i4OOLi4li6dCm9vb3U19eTn5/P+vXr6e7uxt/fn/T0dCZPnkxcXJxjzc71BKi9vZ2SkhIeffTRMfM0H20sFgv19fXExMS42pQbxr4iva6ubsQHQq6GEJxBoNfrx+QMVfvK5IkTJzJx4kRkWaarq4u6ujpyc3N5++23HYHBpk2bRlpaGqGhoY6FhZetj+Ozzz7jlltuwcfHx4Vn5V60tLQgyzIhISGuNuWGkSSJiIgI6urqmDx5slMeHkJwBoGXlxcDAwOOeC9jFXuAKz8/PyZPnozFYqGjo4OqqipOnjzJ3r17sVgsxMTEkJaWRlpaGgEBAahUKnp7ezl8+DD/9V//JVo3F1FdXU1kZOSYmx4QHh4+ajGUr8TYvWtcgF6vR6FQ0N3dTUBAgKvNGTFUKhXBwcEEBwczffp0zGYzHR0dlJeXk5OTw9atWwGYOHEiBoOB2NhY/P39nT5a584UFhaSnp7uajMGTXR0NLm5uU77LYXgDAK1Wo1Op6Orq2tcCc7F2MNDhIaGEhoayuzZszEYDLS1tZGXl8fLL7/MxIkT+elPf0pycjLp6ekkJSWh1WodXbCbDVmWKS8vZ8GCBWPu/P38/Ojq6nLa8iEhOIPAHhFuLDiORwpJkhwZLGpqali6dCnPPPMMHR0dFBQUsH37dhobG/H19WXq1KlkZGQQFRXlCE4/1m7AodDV1UV3d7dL15INFZ1Oh8FgcNpcHCE4g8QuODdbd8JqtfLRRx/x8MMP4+Pjg4+PD7GxsSxfvpy+vj4aGhrIzc3lvffeo7u7m6CgIFJTU5k6dSohISF4enq6zbq4kaa+vp6AgIAxmU9dq9UiyzIDAwNOydAqBGeQBAQEODWNrTtgX8YgSRLJycmXfKdUKh0ClJycjNVqpa+vj+rqak6ePMnvf/97+vv7iYuLIyMjg/T0dPz8/BxzgMYDpaWlo5raZzTx8PDAy8uLjo4Op4w6CsEZJGFhYZw5c8bVZjiF+vp6WltbmTBhAps3b2bFihXXnCBmj8fr6+tLeno6aWlpmM1ment7KSsrIy8vj127dmGxWIiLi2PKlClMmjQJX19fNBrNmBIge6bNuLg48vPzWbVqlatNGhIKhQJfX186OjqIjY0d9fKE4AwSZy3gdDWyLHP8+HG+8pWvsGjRIiRJ4itf+cqgjmF3QAcEBDB79mxmzZqFyWSiu7ub4uJicnNz2bhxI2q1mpSUFKZPn05cXBze3t5XDBzuTnR3d/Otb33LkemiqamJBQsWsGzZsjGzlgr+PUXCWde0EJxB4uXlNSYn/w2F5uZmWltb+eCDD/Dw8KCzs5Mf/OAHLFq0aEjxXuwzoIODg7n11luZP3++I0JeQUEBu3btorGxES8vL5KSksjMzCQqKuqSTKDuglarxdPT0zGH5Q9/+AM5OTncdtttY0pw4PxD1FmLkoXgDBIvLy/6+vpcbYZTuDjHudFo5PTp0yPW8rA7kD09PYmKiiIyMpKlS5diMplobGwkLy+PDz/8kObmZoKDg0lLSyMzM5OQkBD0ev0V06YUFBQQGxv7hSiJo4GXl9clUyO8vLx49tlnx9RMYztBQUFUVlY6pSwhOIPEy8sLk8mExWJxilfflbS0tDhyRUVERPDGG29w6623jsrNfLEAxcXFERsby6pVqzAajZSXl5Ofn8+6devo7e0lNDSU1NRUpkyZQnBwMDqdjv7+fp5//nlkWebFF19k2rRpo9oqUqvVBAcHO2y///77WbFihVt3A69GQEAAeXl5TilLCM4g0el0SJJEX1+fIwPneKW+vh5ZlgkNDeWNN95g2bJlTgudKUkSSqUSnU5HWloakydPxmaz0d/fz5kzZ8jLy+ONN97AaDQSHx9PWFgYOTk5NDc3k5eXx/e+9z2++tWvjlprxx4xT5Ik4uLiePbZZ52SV2w0sK8RFF0qN0StVqPRaOjt7R3XgiPLMk1NTQQGBvLqq6+yevVql/pR7ALk7e3NtGnTyMzMxGq10tnZSXFxMevWraO1tRU4n0XiRz/6EXv37uWll14iIyNjVGyPiYnBw8OD73//+0ycOHFMtm7g/DU92FQ7Q0UIziCRJMlpfpzc3FxOnz496uVcCavVSm1tLbfddhs2m40PPvhg1MqSJInZs2cTFxd3Q9vn5eVRUlJyyWft7e2XTM+350PPz8/niSeeGJUMk2VlZYSHh6NWq3n//fdH/Pg3wuLFiwkKChrWMdRqNRaLxSnLG4TgDAFnhanYtGmTI26xszEajTz22GMEBgbS0dExqmUVFRVhtVpvWHC2bNnCwMAAkZGRwHlbe3p6mDFjBgEBAQQEBODn54e/vz9+fn5oNBpH62ckCQwM5IknnsBoNA45Kd1w2L9/P/Hx8SMiOKKF48Y4S3BUKhVTp079wuxeZ2B/2jmjm6BQKAb1dFWpVMyYMcMxu1eWZRYvXnzVpROjmRZmNI9/vbJra2tH5Fj2+ndGC8e9k+e4KYGBgbS1tblFgr6RRpZldu3aRVNT07BupN7eXkeA7tHG7t+xLxa9+A9gz549jiF+ewCxkWi1XVxGW1ubI3XucDh58iRlZWXDtm0wKBQKp6XRFi2cIRASEuL0i+JiWltb2bZtG2q1mkWLFhEaGnrN7Tdv3kxlZSX+/v6sWrXquqE1GhoaSExMvOTmsQ+P3yhms5na2lqMRiPbtm2jubmZ2267jZSUlFFrEZjNZt566y1mz55NRkaG4/P6+nomTJjgeIrX1dWRkZFxzfPp7e3lN7/5DUFBQSQnJ7Nw4cIvZEC9eH+j0ehocQyn3tra2rDZbFRUVPDJJ5/g5+fHPffcM6oLQwfbwhwOQnCGgLe3t8tCVNgnuPn6+nL77bcD54N328NINDc3U19fT319PXq9nsmTJ9Pe3s7y5cspKCigpKSEiIgIDh48iFqt5u6776azs5MdO3ag1+tZunSpIwTphx9+SFRUFBqNhqlTp1JZWUl9fT1KpZJTp04RHx/PwoULOXbsGCdPnkSv13PnnXdy4MAB2tra6O/vp7a2lqamJpYtW8ZHH31ESkrKqNVNS0sLTU1NnDx5koyMDLKzs8nLy6O2tpb58+dz8OBBSkpKqKmpoa2tjfXr1wPnfRjz589n165dKJVKNBoNy5YtQ6vVcvfdd/PXv/6VmTNncvjwYSorK8nKyiIjI4MDBw5w7tw5pk6dSkREBLIsc+LECUwmEy0tLY4g8zt27CA1NZXjx4/T2dnJqlWr8Pf35+2330ahUJCUlERERAR79+6lpaWFO++8k507d7Jw4UJyc3MpLi5m2rRpo1ZvkiQ5pSUKoks1JLRaLQMDAy4rPyMjg/7+ft577z1qa2vp6uqiq6uLvr4+2tra6OnpISYmhvDwcM6ePcvAwAAbNmzg1KlTjljFCQkJVFdXU1lZyb59+8jIyGD58uV4eHhgMpl4++23iY+PJyIiguLiYv7+97+Tk5ODl5cXWq2W+Ph4Dh06xMDAAB0dHSQmJvKlL30Jg8FAX18fK1euRJIkuru70Wg0VFdXj2qdybJMcXExWVlZtLe309HRwcGDB3nggQcIDAzEZDJx7NgxHnzwQXx9fTGZTI70zb6+vtTU1BAYGOgQ7vb2dsrKynj33Xfx8fFBoVDg5+dHeHg4e/bsobW1lcLCQh5++GGHj624uJi9e/cybdo0TCYThw4dYtu2bVRXV+Pp6UlERASSJJGdne3IK3bvvfeyYMECDhw4wNKlS0lMTMRqtdLb20tbWxuSJNHV1TVq9QbnWzhCcNwYVwqOJEmo1WqWLVvGzJkzOXLkCGq1mt7eXlpbWx3xlj08PBzNcA8PD+677z4WLVpEXl4eO3bswGAwYLFYMJlMKJVKzGYzFosFm82GQqFgwYIF5Obm4u/vT3l5OR0dHZw+fRp/f3927NiBQqHAYDAgyzJKpZKQkBAUCoXj4rUfy8/PD6PRSGRk5A3nRR8KVquV7OxsSkpKqKuro6ysDIVC4Tgvu7/FbDZjtVodC0t1Oh0ajQar1ep4r1arsVqtJCcn89RTT9Hd3U1FRQUHDx5EpVLR39/vaBVYrVYsFgsAUVFRBAcHU1paSmRkpGP4Xq/Xc+rUKaqrq1EoFI5rx8/Pz5Gq5eLfQKFQ4OPjg6+vLxaLxWkZFZyB6FINAZ1O59IWjtlsZvfu3fT19bFw4UL8/f3ZvHkzHh4eJCYmMjAwgI+PD2q1GqVSSU9PD5999hk+Pj7cfvvt1NTUUFJSQlxcHL6+vixatIidO3fS0NDAkiVLSExMJCUlhdOnT9PY2Eh6ejqpqank5uY6fBq1tbWkp6ejUqkIDQ11LFgMDw8nLCyMgwcPkpSURFRUFPHx8ezfv58VK1aMWp0YDAZSU1NZtWoV1dXV1NTUsHjxYrZu3UpwcDC+vr7cdtttbN26lYiICHx9fYmOjsZiseDp6YlGo8FgMGA0GtFoNPj5+SHLMuvXr2fq1KlERkYSGhpKe3u7I6j8LbfcwsaNG5k2bRpRUVFMnTqVrKws9u/fz7Rp00hKSiIkJARvb298fHwcLZ3o6GiUSiWJiYkOZ/fixYvZtWsXkiQRGBjI0qVL2bFjBwEBAU7N/T3aSO480pKVlSWfOHHC1WZ8ga6uLn70ox/x6quvjmoCsRdffNGRNXM8s3//fnx9fXn00UdvaPuf//znpKSkjNmgVyOBLMu8++673H333cyePXtYx+ru7ubZZ5/l9ddfH7FsJJIk5ciynHX556JLNQTs2SZd2coRCMYiQnCGgFqtRqVSCcERCAaJ8OEMAftIRn9//6iWI8syRqNx3AubyWQa1PY3S71cD7uzeiwhBGeIOMNxHBMTw86dO9m3b9+olnMjWK1W8vPzHSMxIzl5z2g08sQTT9zw9jExMezatYv9+/ePmA1Xwmq1OuYtuWMesrEYIkUIzhBxxtD4I488woMPPjiqZdwosiyTl5fHunXreOqpp0hLSxtR0RmM8/2hhx7ivvvuG7Gyr4TFYmHz5s34+/vzwx/+cFSH9IfDWIvBIwRniHh6emIwGEa1DI1GM6qjYINlzpw5BAQE8MYbb/Dggw8yb948pwXkuhi1Wj2q0RYtFgvbtm2jpKSEF154AT8/vzEb68bdEE7jIeKsFePuhCRJpKSk8Nxzz7Flyxa2bt3qtLAGzsJisbBp0yZOnjzJs88+K8RmhBGCM0ScmVrDnZAkiaioKF544QWys7N57733Rr2l5ywsFgsffvihQ2ycEYz9ZkMIzhC52XKMX4wkSQQEBPDcc8/R0tLC7373O0dM3LGKXWzy8vKE2IwiQnCGiKsXcLoae6jVZ555Bn9/f375y186chuNNSwWCxs3biQvL48f/vCHQmxGESE4Q8QZTmN3x57Y7rHHHmPatGn87Gc/o66ubkyJjl1s8vPzhdg4ASE4Q8TDw+OmFxw7arWalStXcvfdd/Pyyy9TWlo6JkTHYrGwYcMG8vPzRTfKSQjBGSKenp4uCZztriiVSubPn89XvvIVfvvb35Kdne3WomMXm4KCAn74wx/i4+MjxMYJCMEZIqKF80UkSWLKlCn84Ac/4L333uPTTz91WqzcwWCxWFi/fj0FBQWiZeNkhOAMEY1G4wgyJfg3kiQRHx/PCy+8wJ49e1i/fr1bzdWxi01hYSHPPvusaNk4GSE4Q0StViPL8phcQDfaSJJESEgIP/7xjzl79ix//OMf3WJEz2Kx8MEHH1BYWMhzzz0nxMYFCMEZInbBcaentzshSRLe3t58//vfR5IkXnnlFbq7u13m17GLzalTp3juuefw9vYWYuMChOAMEfsaJ9HCuTr2MB5f+9rXSExM5MUXX6S1tdXpoiPExn0QgjNE7KEYRQvn+qjVah588EEWL17Miy++SGVlpdNEx2Kx8K9//UuIjZsgBGcYqFQqITg3iFKpZMmSJTz66KO8/PLL5OfnO0THnqBuJETIarU6jmMXm+LiYuGzcROE4AwDZyaBHw9IksTMmTP5zne+w5/+9Cf279+PzWajvr6ev/71r8PunlqtVn7/+9+zb9++S8TGPholcD0iHs4wUKlUwoczSOwhLl544QV+9atfUV1dzUcffcT+/fuZPHkys2bNGnIrpLKykldffRVJknj00UexWq1CbNwM0cIZBqKFM3QiIyP5/ve/z29/+1s2b95Ma2srr7zyypCHz202G//4xz+orq6moqKC119/nVmzZuHt7T3ClguGgxCcYSBaOEPHbDbzl7/8hcLCQsds5E8++YSdO3cOyZdTU1PDu+++65iI2dnZyfe+9z1OnDjh1kssbjaE4AwDZyaBH2/09fXR2dlJWFgYSqUSgP7+fl555RU6OjoGdSybzca//vUvzp07B5x3UE+cOJHHH3+cmJgY4Sh2I4TgDAOFQiGenkPEz8+PV155hX379vHaa68xZ84ctFot2dnZfPDBB4MS8oaGBtatW4dCoSA9PZ3/+7//Y9euXfzoRz8iNDR0FM9CMFiE03gYKBQKt23hmEwmPvroo0G3FlyBh4cHd911FxEREXz22We8/vrrmEwmR4bT65GXl0dlZSXTp09n5cqV6PV6Pvnkk1G2emRQKpWsWLHiphFGITjDwJ27VAaDgY0bNzJv3jy3yvxwNfz8/FiyZAlLlizBZDLdsDPeZrMRHR3Na6+9Nia7Trt372by5MlCcATXx51bOADe3t7MnDnzhlsKAudj9zvdLAgfzjAQPpyRp62tjeLi4mEfp6SkhPb29kHtY7VaOXHixLADq3V2dlJUVDSsY4xXhOAMA3dv4VwJg8HAtm3baG1t5dixY5csMbgSsixz5MiRIefgam9vZ926dWzcuJGqqqrrCnRrayuFhYVDKutiTp06RXt7O9u3b+e9997j0KFD1+2mWa1Wjh07NmzB6ejooLCwELPZzO7du/n444/p6ekZ1jHHC6JLNQzGouAYjUb27duHUqnk5MmTJCQkEBwczOHDh0lISGDq1KnU19dz/PhxQkNDmTFjBseOHSMxMRG9Xo8sy5SUlFBcXMykSZNISUmhsLCQ8vJyZsyYQUREBDk5OdTV1TFnzhx6e3vp7e1l5syZbN68mW9+85tkZ2fT3NzM/PnzCQwMpLCwkHPnzjFjxgzgvMiVlpZitVrp7u4mKysLgJMnTxIfH092djZqtZoFCxZgsVjYt28f/f39TJw4EX9/f7Kzs6mqqiIzM5P8/HwefPBBtm7dSmRkJJIkkZOTQ1xcHJmZmbS1tXHkyBGCgoLIzMwEoKenh4KCAry8vIiJiSEwMJBz586hVCppaWmhqqqK2bNnExkZSXZ2No2NjSgUCm699VaOHTtGQ0MDSqWSgoICamtrCQsLY/fu3dx1112u+tndBtHCGQaSJI3JLlVISAglJSUEBwcjSRLZ2dno9Xo0Gg1ms5n333+f0NBQcnJyqK2tvWRfo9HIJ598QmZmJp999hmdnZ0cOnSIuLg4LBYLAwMDHDlyhISEBEwmEwClpaV89tlnBAYGYjKZ8PLywtvbm88++4yGhgYOHjzInDlzHPNxiouL2bt3LxERERQVFZGbm0tBQQGFhYX09fURExNDbW0tRUVF9Pf3c/ToUebNm0dSUhIfffQRaWlpjsmEvb297Nixg46ODvR6vUOY9u7dS1tbG1u2bCE2NpaoqChsNht9fX28++67BAYG0tfXR25uLnv37uX48eOYTCZUKhVhYWFs27YNWZY5efIkPj4+LFq0iNLSUjo7O4mOjsZkMlFXV4ePjw8ajYb6+nrn/shuihCcYTBWBUer1XL77bczZcoUJEli9uzZaDQaDh06RG9vL21tbVRVVeHv7/8Fh7PVakWSJIKDg1EqlSgUChYvXkxdXR0nTpxAo9Fwxx13UF5eTl5eHrIsM2nSJO677z7q6uqorq7m4MGDlJeX09LSQn9/P3q9nqCgIAIDA5FlGY1Gg8FgwGw24+Pjw7Fjx9i7dy8BAQEUFxc7WlBdXV0AxMTEEBISgkqlwmg0EhYWRkBAAHDecb5q1Sri4+OprKzkwIEDFBYW0tDQgNFoxGAwEBoaSmhoKCqVCqvVil6vp62tjfj4eHJycjh06BCNjY14enqyb98+zpw5Q3NzMwBeXl4kJibi5eVFf38/gYGBBAcHo1Kp0Ov1qNVq/P390el0zv2R3RQhODcZCoUCX19fx1Csl5cXzc3N1NTUoFar0Wq1LFu2jP7+frRaLTqdDqVSyYYNG/jb3/5GV1cXcXFxvP/++0RGRqLVaqmpqaGpqQm9Xo/NZqOqqoqWlhbHDVdTU8OOHTuIjY1Fq9Vis9lQKpX4+/sTExODh4cH7777LmfOnEGj0ZCZmcmdd97JwYMHiYuLw9vbm4CAACZMmODIB+bj44NWq0WpVDqCoKtUKmbMmME///lPWltbUavVKBQKtm3bRnd3N2FhYY7g98HBwWg0GmbPns1HH33Erl27sFgsREVF8cADD3DmzBlsNhsKhYI77rgDb29vPDw8HA+ZwMBA4LygqdVqACZPnkxZWRn79+9Hp9ORkZFBVVUV+/fvZ+7cua782d0GyZ2f0FlZWfKJEydcbcYVkWWZ119/nenTp7vlxdTd3c0PfvADvvrVr17SSpFlGavVilKpdMSgkSQJi8WCQqFwfH7x+4uDxdtDq9qPYfdjWa1WVCoVkiRhtVqx2WyXBCmTJMmxvX39mf2zi/e326hQKLBarY6RQPtncD7OjX1fe3n21zab7ZLv7bYoFApHC8b+3t6Fu3x7+3/7udn/2+victsVCoVDiOzfKxSKS87VXjeX8+6773L33Xcza9asEb8GbpTu7m6effZZXn/9dcdvMFwkScqRZTnr8s+F0/gmw94SsL+2c/HkQEmSLnlvf4JfjP1mtb+++P3lF62Hh8cl7y8/3uX7X+04V7L18u0UCsUl39tF6lrHvNge+/f2//b97f+vZLsdSZK+8P2V6u5mRnSpBAKB0xAtnHGM0Wikrq4OT09PV5syKOzdlJuB7u5uV5vgVITgjFPUajXJycl8/vnng1pjZPfpXP56ONzocWRZdoxkzZgx44a6I83NzTQ0NDhG3K5U3mie03CPp9VqCQ4OHrY9YwUhOOMUT09PXnjhBVebMSjy8/P5wx/+wK9//WumT59+Qzdya2srL774Ig899JBj4p7Afbk52q03IZIkjZk/gH379vGXv/yF7373u2RlZTlGfq73FxQUxNNPP82f/vQnx+Q6V5/PUOvgZkAIjsClWK1WNm/ezNatW3n++edJTk4e1A0oSRLJyck89NBDvPrqq/T29o7JyZg3C0JwBC7DZDLx9ttvk5uby49//GPHWqfBIkkS8+bNIyMjgz/+8Y9YLBYhOm6KEByB05Flmb6+Pn7zm9/Q1dXFs88+i7+//7COqVQqefDBBzGZTGzcuHGELBWMNEJwBE5FlmU6Ojr4xS9+QVBQEM888wxeXl4jcmwPDw++9a1vkZ2dzZEjR0Qrxw0RgiNwGrIsU1tby09/+lNmz57NY489NuLhT/38/HjmmWd45513nJrDXHBjCMEROAVZljl9+jT/8z//w3333ceKFStGbN3O5cTFxfHkk0/y2muv0dnZOSplCIaGEBzBqGOz2Th69Civv/463/rWt5g3b96oziSWJImsrCxuvfVWXn/9dQwGw6iVJRgcQnAEo4rVauWTTz7hgw8+4Ic//CGTJ092yrwThULB6tWr8fHx4b333nME5BK4FiE4glHDZDLx3nvv8fnnn/OjH/2I2NhYp05yU6vVfPWrX+Xs2bPs3r1b+HPcACE4glGhr6+PP/zhD9TX1/P8888TGBjokhm1Xl5ePPPMM2zZsoXi4mIhOi5GCI5gRLEPe7/66qt4eHjw7W9/G71e79Lp+2FhYXzzm9/k97//PY2NjS6zQyAERzCCyLJMQ0MDP//5z0lNTeXLX/4ynp6eLl8rJEkSkydPZs2aNbzxxhsiZYsLEYIjGBFkWebs2bP8z//8DytWrOCuu+5yq2h3kiRx++23ExcXx1//+tcbTiUsGFmE4AgGjT12r90fYrPZyMnJ4bXXXuPLX/4yCxYsuGLIUFejUCh47LHH6OrqYuvWrdhsNsxmM3V1dWMuv9hYRQiOYNBYLBbefPNNTp06hcViYc+ePbz77rt85zvfYerUqW4brU+SJDw8PHj66afZt28fBw8e5Ne//jVf+cpXHClnBKOLCMAlGBT25G8vvfQS//znP3nssceoqKjgueeeIzQ01OX+mushSRL+/v6sXbuWr33ta5w9exZJkigsLOTWW291tXnjnmELjiRJSuAEUCfL8kpJkuKB94FAIAd4TJZlkyRJHsDfgelAG/CgLMuVwy1f4FwMBgOvvvoqzc3NtLS00NrayrZt28aE2MC/l1j8+Mc/pri4GJvNhiRJbN68mblz57plV3A8MRJt328DJRe9fxn4tSzLE4AO4KkLnz8FdFz4/NcXthOMIWRZZteuXWzfvt3xvry8nP/5n/+ht7fXxdbdOC0tLXR1dV0S53jHjh20tbW52LLxz7AER5KkKGAF8JcL7yVgEbDhwiZ/A+668HrNhfdc+P52aSw8EgUOurq6eOWVVxziolarSUxMJDw8fMyM+kiSxPz58/n4448d3UCA8vJyjh49KiYGjjLDbeG8BvwQsLv4A4FOWZYtF97XApEXXkcCNQAXvu+6sP0lSJL0NUmSTkiSdKKlpWWY5glGCpvNxgcffMCxY8fw8/Nj0aJF/PGPf2Tfvn387Gc/G3YALWciSRJhYWH89Kc/ZfPmzaxatQqADz/8cMwI51hlyD4cSZJWAs2yLOdIkrRwpAySZflPwJ/gfKrfkTruWGT79u3k5ua6xaiPwWBg/fr1REZGkpaWRmZmJo2Njaxbt27Yx7ZaraxYsWJQWRfOnTvH+++/PyJ1k56eTnNzM/v37+fFF18csYBgN0pQUBCPP/74iMcGckeG4zSeC6yWJGk54An4AL8B/CRJUl1oxUQBdRe2rwOigVpJklSAL+edx4KrcOzYMby9vYmJiXG1KZjNZp5//nk0Gs2IO4dPnjxJUVHRoASnsrKSuro6Fi5cOCI2ZGRkMDAwgCzL6HS6ETnmjTAwMMDu3bt5+OGHheBcC1mWnweeB7jQwvmBLMuPSJK0HriP8yNVjwNbLuzy0YX3Ry58v0cWHeZrolQqSUhIIDk52aV2XPwzjYbbbahd57CwMFJTU0fMDvt5OtO12NfXx+eff+608lzNaLTVnwW+J0nSWc77aN668PlbQOCFz78HPDcKZd8UyLJ8yd9Ibi/LMjt37rxkkeNQ8if19PQ4ZvPeqJ0jweXnOphzlySJsrIyjh07NmwbPv30U9ra2gb1O90MjMjEP1mW9wH7LrwuB2ZeYRsDcP9IlHczI8uyY+6LWq1m0aJFhIWFXXOfDz/8kOrqanx9fVm9ejWBgV/w1V9CY2MjEyZM+ELLZjA3jcViob6+noGBAbZu3UpNTQ3f//73R731YA+g3tbWRnJyMikpKXzyySeEhIRwyy23MGXKlGvu39PT4xCK4dDY2IjRaOTYsWOcOHGChIQElixZMmphVccKN/fZj1EKCwvx8/Pj9ttvR5Ik8vPzkSQJrVZLc3MzdXV11NfX4+3tTWpqKh0dHSxfvpyCggJKS0sJCwvj0KFDqFQq7rnnHjo6OtixYwd6vZ5ly5YhyzKdnZ1s3LiRqKgoNBoNmZmZVFRUUF9fj1KppKioiLi4OG677TaOHDlCbm4u3t7eLFmyhP3799Pe3s7AwACenp6sWLGC3/72tyOW1/tazJo1i7CwMHbs2MGjjz5Kbm4uKSkppKWlsX//fuLj49m5cyetra0sW7aMkJAQtm7dSk9PDwsWLADO+6t27NhBeHg4NTU1LFu2DIPBwJ49e5g0aRKHDx9GqVRyzz330N3dzT/+8Q+8vb2ZO3cu7e3tFBUVUVtby6JFi9i3bx+PP/447733HllZWYSEhIzq+bs7rh/+EAyazMxMjEYj//jHP6iurqa7u5vu7m76+/tpb2+nr6+PuLg4IiIiKC8vx2AwsGHDBoqKiggODkapVBIbG0tVVRUVFRXs27ePadOmsXLlSjw8PDAajbz99tskJiYSFRVFSUkJb7/9Njk5OXh7e6PT6YiJieHQoUMMDAzQ1dVFcnIyX/rSlzAYDBgMBlavXu0Wo2sAhw4dYsuWLURERAAQGhqKRqPh8OHDlJaWYrFYeOihhwgNDcVms7Fz507a29tJTk6msbGR7du3O85VrVYTGxtLTU0NFRUVGI1GlEolTz75JElJSRw9epS1a9cSEBCA0WhElmXOnTuHJEkYjUYX14TrcY8rQnDDSJKEQqHgjjvuYNasWRw7dgy1Wk1PTw/Nzc1YrVZUKhUeHh54enoCoNFouPfee1m4cCH5+fns2LEDq9WKLMuYzWZUKhUGgwGTyYTNZkOlUrFo0SJycnLw8/OjvLycnp4eSktL8fPzY8eOHQ5hkmUZpVJJUFAQCoUCpVKJxWLBaDQ64gjb/Tj2/85m7ty5PPHEE5SUlHDy5EnKy8vx8PBgYGAAlUqF0WjEbDZjNpuRJIlp06bR2dlJZ2cnHh4eVFZWcvLkSSIjI/nss88cdWcymQAIDg5GpVI5fhuj0YjJZMLDwwOFQkF0dDSyLDt+j5sZ0aUag1itVvbs2UN/fz8LFy7E39+fLVu24OHhQUJCAgaDAW9vb9RqtUOMdu/ejY+PD4sWLaK2tpaSkhISEhIcXbNdu3bR1NTE4sWLSUxMJCUlhdLSUhobG8nIyGDSpEnk5+cTFBRESkoKNTU1ZGRkoFKpCA8Px8fHB4CIiAgiIyM5fPgwKSkpWCwW9u3bB8Ann3zCypUrR71+dDodiYmJAPj7+1NbW8snn3zC7bffTkREBJWVlSgUChITE0lOTqauro6PPvqIhQsXEhAQQGZmJv7+/pw5c4aMjAx6enocLR6NRkNRURHx8fH4+/uj0+mIjY0FQKVScccdd/DJJ58QFRWFXq9n+fLl7Ny5kxkzZhAQEDDq5+7uSO7sPc/KypJPnDjhajOuiCzLvP7660yfPp25c+eOShkvvvgiaWlpLh8WH23279+Pr68vjz766A3vs3v3bj7//HPuueeeUbRs9Onr6+Ott97i17/+tdMnHNrp7u7m2Wef5fXXXx8xp7YkSTmyLGdd/rnoUgkEAqchBEcgEDgN4cNxY6xWK1VVVa42Y9Spr6/H19d30Ps1NzdTVlY2ChY5j4GBgZsqM6gQHDcmKyuLnJwcioqKnFbm0aNHSUxMJDg42GllqlQqJk2aNKh9YmJiCAoKGtW6sVqtHDhwgFtuuWXURphkWWb27NluFXB+NBGC48asWrXKETrBGciyzEsvvcTdd99NWlqa08odCklJSfzkJz8Z1TLMZjP9/f08/fTThIeHj2pZNwvChyO4BJvN5jYT9lyNQqFAq9XS19fnalPGDeLKElyC1WoVcX0vIARn5BGCI7gE0cL5N5IkodPphOCMIOLKElyCEJxL0el0DAwMuNqMcYO4sgSXYLVaheBchE6no7+/39VmjBvElSW4BJvNJnw4FyG6VCOLEBzBJYgu1aXYWzjuvOZwLCGuLMEliC7Vpej1etGlGkHElSW4BNGluhRvb296e3tFC2eEEIIjuATRpboUvV4vBGcEEVeWwIH9phKC8288PT0dkRAFw0dcWQIH9ptKCM6/USqVLguNOh4RV5bAgT0GsfDh/BshOCOLEByBA9HC+SIXC45IaDd8RHgKAV1dXZw8eRKTyURjYyPHjx9Hr9czefJkp+bZdicMBgPV1dW0tLTQ0NDA3r17USgUZGZm3vS5pYaDEBwBra2tPPXUU9TX12Oz2fj000+Jj49n+/btN63g9PT08MQTT1BWVkZPTw+bNm0iMDCQAwcOCMEZBqLtLCAmJoaMjAxHfqbe3l6mTZt2Uwed8vf3Z/bs2bS3t2MymTCbzaSmphITE+Nq08Y0QnAEqNVqVq9e7UjDq9FoePDBB29q57FKpeLBBx/E29sbOB+q4o477rhpQoGOFkJwBMD57JT2Fk1qaiozZsxwsUWuJz093ZFv3J47fLRzo493hOAIAIiPj2f69OlIksTKlSsJDAx0tUkuR6vV8vDDD+Ph4UFERARTpkwRgjNMhNPYzZBl2TEfxpnYhebgwYOsXLnSZTbcaDfOWfW0YMECJkyYwJw5c9BqtVgsllEv056jfDyKmxAcN6Ouro5XXnllxFKuDobm5mb8/Px49913Wb9+vVPLlmWZ1NRUnnrqqRvavqysjD//+c+jbNV5uzw8PKiqquJHP/qRU8pLTEzk61//+rj0oQnBcTPa2tro6+tj7dq1Tn/C2Ww2li9f7pJh3+bmZj7//PMb3r62thZJkli2bNkoWnWehQsXYrPZHA7k0aSrq4sdO3aM21X7QnDcEL1eT0hIyHUFx2KxYLPZ0Gg0I1Z2aGgoACaTCZVKNayL3mazYTKZ8PDwuO65DOUG8/b2HnVxNJvN+Pv7o1KphvwAkGUZo9GIRqO57ixutVrtktatsxBO4zGALMsMDAxgsViQZZm+vj5kWaa7u5u6urobOkZ/fz/t7e3XDbVgv6k2bdpEQ0PDsOzu6upi/fr1yLJMf38/nZ2dV/W7jNSSAYPBQH9/P1arlZ6eni+s8jaZTLS3t9PV1XVDPqDjx49z/PjxYbU2bTYbGzZsoLOzE5PJ5Ph/MzJ+pXScsWvXLqKiokhMTOSdd97hy1/+MrIsExYWBpy/Ye03mLe3N0ajEa1WC5zPX/3uu+9is9mwWCysXLmSqKgoenp6UKvVeHl5OfaXJAm9Xo/RaMRqtdLb24tSqUSpVKLRaLBYLA7HaX9/P15eXmg0GoxGI0ajEYvFgq+vL0ajkc7OTgYGBuju7uadd95BqVQyadIkFi5c+IUbeKS6jzt37qS0tJTVq1fzxhtv8OMf/xi1Wu04h7y8PPbu3UtISAgpKSnMmzePnp4eZFnGx8cHSZLo7+/HZDLh4+PjEPmBgQEkSUKWZTw9PZFlGYPBgFqtpqenBw8PD3Q6HVarlf7+fsxmM15eXkiSRE9PD729vVitVjZs2EBPTw86nY61a9fedPN6hOCMEZKSksjNzUWSJMLDw7FarXz00Uf4+/uzZs0aysrK2L9/P0qlktTUVEpLS7n77ruxWq18/PHHmEwmZsyYQXFxscNPVFJSQnNzMw899BDV1dUcOXKE8PBwFixYgCzLlJaW0traSmxsLBaLhWnTpnHmzBk6OztRKBSOFtB9993Hrl27OHXqFKmpqWRmZrJlyxY8PDxob2+npqYGLy8vFi5cyKZNm1i4cOGo1ZMkSZjNZo4ePYq3tzd79uyhoqICtVqNj48PSUlJjtzp9fX1NDc3c+TIERoaGli0aBEBAQFs2LCBkJAQsrKykGWZ5uZm1q9fT2ZmJoWFhaxevZru7m72799PamoqZWVltLa28vDDD9PU1MTbb79NZmYmM2fO5ODBg9hsNsrLyxkYGKCmpobHHnuMf/zjH/T29uLv7z9qdeGOiC7VGECSJOLi4mhra6OoqIjJkyej1+uZNWuWo2Vw7Ngx2tvb6evr4+zZs1itVrZs2cL27dvRarXIskx+fj5dXV2oVCpsNptjgWJDQwMnT55k8eLFrFy5Eh8fH/r7+9myZQupqakkJSVx5swZ3njjDXJzc4mKisJqtTIwMEB+fr6je7Bw4UJWrVpFX18fQUFBrFq1Cr1e7/i+paXFKcPKycnJ9Pb2Ovw7GRkZJCQkAOe7VGfOnKGiosLRUjGZTHR1dVFcXExJSQnJycncc889REdHY7Va2bVrF15eXiQmJtLd3c17773H3r17CQgIcLR0KioqaG5uxmazkZaWxv33309wcDAtLS08+OCDjjqTJImGhoabduW5EJwxgoeHByEhIZw+fZqoqCgGBgZoamqivb2dzs5OYmJimDhxIvPmzWP27NkEBATQ2dlJVVUVMTExqNVqJk+eTFxcHDU1Nezbt4+4uDh8fX2xWq2EhYVRVFREWVkZfX19aLVannjiCQ4dOoQkSVRXVxMZGUl1dTU6nY5jx44xadIkVCoVsiyjUCjw9PREkiR8fHxobW2ltLSU3t5eQkND6ezspLe31ynrs+Lj4/na177mmMuiUCgucUjHx8eTmZlJbW0tJ06cQKlUOgQhNDSUyspKzpw5Q0tLC0qlkjVr1tDR0UFdXZ3DoVtWVkZ0dDR79+4lMTERb29vh7/IXg8ajQalUklRURHNzc1otVpUKhVGoxG1Wo2np+eo14W7IbpUw0ClUjnliW1n/vz5TJgwAZ1OR3NzM/39/QQFBVFfX8/cuXPJz8+nr6+PpKQkfHx86OnpwWQyERUVhZeXF9XV1QQFBZGZmcmUKVMoKytj8eLFREVFkZSURF5eHr29vXh6ejJv3jzCw8MJDAzEZrNx//33OwQnNDSU5cuX09nZyb333ouHhwfp6emOleVhYWHcdttttLa2smrVKiIiIli6dCmNjY3cddddo1pHGRkZ6HQ6VCoVt99+O56enmg0GkeLzN7i6uzs5IEHHkCj0XDy5Em8vLwIDQ11tGoaGxuJjIxk4sSJyLLMrFmzaG9vZ/HixSgUCtrb24mPj+euu+7i3LlzLFu2jLCwMIePB85fH/fddx+lpaXcdddd+Pn5sXbtWoqLi7n//vsdPrabCSE4w8CZgmP33dhbCKGhoaxYseKSbbKyshyvvb29HUPccH5dUHp6uuO9l5fXF1Y+z5492/E6KSkJOD9Eby8PICAgAICpU6desm90dLTjtUKhYPLkyZd8n5qaSmpq6g2c6fC42I6rlRcVFXXJ+0WLFl3y/mLb7ecPXLLcw15OfHw88fHxl+zv5+cHnP/NIiMjiYyMdHwXERFBRETEjZzKuER0qYaBWq3GbDa72oxxwc3oz7gZEYIzDLRarUh0P0KMx3VDgi8iulTDwNfXl6amphE/bmNjI4WFhTfVTdjW1jbo1mJ9fT2FhYWjZJFr6OnpwWg0utqMUUMIzhCRJAkvL68RT3QfERHBzJkzaWxsHNHj3ghNTU0UFBSwePFip5cNDGpdVFJSEpMmTRr2bOgbobOzk8OHD7Ns2TKnPASWL18+bgPZC8EZBlqtdsTzTgcFBfGd73xnRI95oxQXF7Nx40a++93vuqT8wRAdHc1//ud/OqWsmpoaTCYT3/nOd8atEDgLITjDwNfXl+7ubmRZHrEnnyu7Ufayx0JXztk2jqW6cWeEXA8DX1/fKy4QFAgEV0YIzjDw9fXFYrHQ29vralMEgjGBEJxh4OHhgZeXF+3t7a42RSAYEwjBGSb2BXoCgeD6CMEZJhMnTqSkpMTVZggEYwIhOMNAkiRH6AYxNV8guD5CcIZJbGwsnZ2ddHZ2utoUgcDtEYIzTDw9PQkNDaWiosLVpggEbo8QnBFgzpw5HDlyRHSrBILrIARnmEiSREZGhiO6nUAguDpCcEaAwMBAwsLCOHXqlKtNEQjcGiE4I8SSJUv47LPPnBpyVCAYawjBGQEkSWLKlCl0dXVRXl7uanMEArdFrBYfIdRqNcuXL+ejjz7iu9/97pjKC3327FkKCgqora2lpKSEDz/8EG9vb+bPn39TZhawc+7cOQoKCmhtbaWsrIzNmzfj7e3NwoULRzS98s2EEJwRQpIk5s6dy9atWzl37hzJycmuNumGqamp4cknn8RoNGKz2di8eTPLli3j1ltvdbVpLqW2tpannnqK/v5+bDYb27dvZ8WKFdx2222uNm3MIrpUI4inpyf33HMPH3zwwZgKrp6enk5iYiJGoxGz2YzJZGLVqlU3/VN8ypQpJCQkOOrFYrGwfPlyR24qweARgjOCSJLELbfcQl9fHydOnBgz83ICAwO58847He8jIyO59dZbb/pgUz4+PixdutQR5c+eb0swdITgjDAqlYonn3ySf/3rX/T09LjanBtCkiRWrVqFl5cXALfccguxsbEutsr1KJVKVq1a5UjwN2/ePKKjo296IR4OQnBGGEmSmDBhAunp6WzcuHHMRANMT09nypQpKJVK7rvvPtFtuMCkSZPIzMzEw8OD1atXj6nBAHdECM4oIEkSDzzwALm5uWMmdIVer2flypUkJCRckoHzZsfb25tly5YRFRXFggULROtmmIjH2CggSRJ6vZ4vf/nLvPXWW/zkJz/Bx8dnUMcoKyvjZz/7Gd7e3qNk5RdpbGxEqVTyi1/8wmk3ltVqJTY2lhdeeGFI+x8/fpw333xzVIfvGxoa0Gg0vPjii6OWtcFmsxEaGsqPfvSjce2sF4IzStgnA6anp/PPf/6Tr3zlK4Nqjnd2duLj48P9998/ilZeitlspr+/H19fX6eV2dbWxs6dO4e8f3NzM+Hh4aOaS8tsNtPX1+fIGT4a9PT0sGXLljHTBR8qQnBGEXvX6v/9v//HiRMnmDlz5qBaDl5eXgQGBg5qn4tHxgbbShnOvkM9jizLw/aL6PV6goKChnWMy7GfgyRJl7weznGuhUajuSn8ZsKHM4pIkoROp+M//uM/WLduHY2NjcMeKpdl+Qt/F5Ofn88vfvELR0Cwa217JXbu3El9ff2wbOzr6+Pjjz/GZrMNquyR5Er1NBhbioqKyMnJAc7/jkMVm08++YTOzk6X1YO7Mf4l1cVIkkR8fDxr1qzhD3/4A88///yQ/Q2yLPPhhx/i6elJfHw8O3fu5Bvf+MYlff4pU6Zw8uRJTCYTAJs3b6aiogIfHx9Wr15NcHDwNctobm4mMTHxC62UwdwoFouFhoYGjEYjW7dupaWlhfnz55Oenu4039CRI0c4duwY7e3tpKSkkJiYyM6dOwkODmbu3LmkpaVdc/+enh76+vqGJRCyLNPY2IjJZOLgwYMUFBSQmJjIkiVLbtrRLiE4TkCSJO644w5KS0v58MMPWbt27ZCdjz09PVRWVtLb20tLSwu5ubloNBp8fHxoaGhg7ty5l2zf2dnJ0qVLOX36NKdPn6anp4dDhw4hSRL33nsvXV1dfPLJJ+j1epYuXQpAR0cH69evJzY2FoVCQVZWFhUVFdTW1qJWqykqKiIyMpIlS5Zw7NgxsrOz8fPzY8mSJezfv5/29nZ6e3upr6+npaWFlStXsnHjRtLT04ddlzfKnDlzCAkJYf/+/Tz88MMcO3aMtLQ0EhISOHbsGFFRUezatYu2tjZWrFhBYGAgH330EUajkQULFgBgNBr5+OOPiY2Npbq6muXLl9Pb28u+fftISUnhyJEjqFQq7r33XlpbW3n//ffx8/Nj3rx5NDQ0UFZWRk1NDWazmc8//5wnn3ySd999l5kzZxIYGOi0unAnRJfKSahUKh5//HGys7MpKCgY8pNToVA4UtMEBATQ29tLT08P/f39V4yrbDKZ2LRpE4WFhQQHB6NSqYiOjqa2tpby8nIOHDjAtGnTWL16NVqtFqPRyLp160hMTCQ6OprTp0/z17/+lZycHLy9vdHpdERFRXH06FH6+vro7u5m4sSJfOlLX3I4ne+66y5UKhW9vb2o1WoqKysxGAwu704cPHiQ7du3Ex4ejiRJhISEoFAoyM7Opri4GLVazdq1awkJCcFqtbJ9+3YMBgMTJkygoaGBLVu2cPjwYSwWC2q1mujoaCoqKqiursZoNKLT6XjyySdJSEggJyeHhx56CH9/f4xGI5IkcfbsWWRZHlPLXkYaIThOxNfXl29+85u89dZbtLS0DPkGnDVrFg899BAKhQKNRkN3dzfNzc1YLBYkScLDw8ORglij0XDXXXexaNEiCgoK+PTTT4HzwmU2m9FoNPT19WEwGLBarahUKpYsWeIQmMrKSvr7+zl9+jQBAQGO1pDJZHI4fIOCglAoFCiVSsxmMwMDA1gsFvz8/BgYGCAsLAy9Xu/yOSzz5s3j0Ucfpbi4mOPHj1NfX49Wq2VgYACNRoPBYMBoNGIymVAoFMyaNYuWlhZ6enpQKpXU1dVx8uRJoqKi2LFjByqVCkmSHAISFBSEUql0+HwGBgYwGo14enoiSRJRUVHA+QSKNyuiS+VE7Gll7rzzTv7whz/w7LPPDvriS0hIICgoCH9/fyZNmkRqaiqffvopBoOBhIQEx6r1AwcOOJ7CBw4cwMfHh9tuu42GhgZOnTpFQkICAQEBxMfHs3v3btrb27njjjtISkoiMTGR8vJyGhsbycrKIikpydFCSk9Pp6qqimnTpqFWqwkPD3dM/Q8LCyMxMZGjR4+SlpZGREQEqampHDlyhFWrVo1GlV4Tb29vJkyYAJwXg6NHj9Ld3c2SJUsIDAxkz549aLVaoqOjSUlJoampie3bt7Nw4UJCQkLw8fFBp9NRXl7OtGnTMJvNNDU1OY5ZWlpKcnIyfn5+qNVq4uLigPOhSu644w527txJYmIier2eZcuWsWfPHubPn+/UaQfuhuTqZu61yMrKkk+cOOFqM0Yck8nE7373OyIiIrj//vuv6M/Jzs5mw4YNPPbYYy5vGYwmLS0tbNiwgd/97ndD2n/btm0UFBS4RNBGku7ubv7+97/z2muvOT0GUXd3N88++yyvv/76iA3NS5KUI8ty1uWfiy6VC9BoNDzxxBMcOXJkWP4cgWCsIQTHRfj5+Y2IP0cgGEsIH46LuNif8+abb/Lcc89d4s+RJImKigr279/vVLtkWXZqF667uxuj0Tjk/RUKBWfOnGHfvn0jZ9QVsNlso7aOCmBgYICBgYFRO767IATHhSgUCpYsWcLZs2dZv349Dz30kGNCWFxcHKtWrXLqBLHGxkby8vIc83GcgZ+fHzNnzhzy/mlpabS1tY2qSHZ0dHD48GFWrFgxaqLj5+dHeno6arV6VI7vLgjBcTEajYYnn3ySn/3sZ8THxzNnzhwkSSI4OJgvfelLTrWluLiYvr4+HnnkEaeWOxyio6N59NFHR7WM6upq6urqePjhh2/aGcIjhfDhuAE+Pj48/fTTvPvuu1RUVAh/jmDcIgTHDZAkidjYWJ544glee+01mpubhegIxiVCcNwESZKYMWMGy5cv59VXX6Wnp0eIjmDcIQTHjVAoFCxevJiMjAxee+01+vv7hegIxhVCcNwMpVLJ/fffT2RkJL/5zW/cYtGjQDBSCMFxQ9RqNY899hgBAQG88cYbw5qnIhC4E0Jw3BT7cLlOp+ONN964KSaFCcY/wxIcSZL8JEnaIEnSaUmSSiRJukWSpABJknZKknTmwn//C9tKkiT9VpKks5IkFUiSNG1kTmH84uHhwVe/+lW8vb359a9/TV9fn6tNEgiGxXBbOL8BdsiynAJkACXAc8BuWZaTgN0X3gMsA5Iu/H0NeHOYZd8UaDQavvzlLxMaGsovf/lLurq6XG2SQDBkhiw4kiT5ArcCbwHIsmySZbkTWAP87cJmfwPuuvB6DfB3+TxHAT9JksKHWv7NhFqt5vHHHyclJYWf//znNDc3u9okgWBIDKeFEw+0AG9LkpQrSdJfJEnyAkJlWW64sE0jEHrhdSRQc9H+tRc+uwRJkr4mSdIJSZJOtLS0DMO88YVKpeKBBx5g4cKFvPTSS1RVVYnRK8GYYziCowKmAW/KspwJ9PHv7hMA8vk7YlB3hSzLf5JlOUuW5azrZRi42VAoFCxbtoy1a9fy8ssvk5eXJ0RHMKYYjuDUArWyLB+78H4D5wWoyd5VuvDf3v6vA6Iv2j/qwmeCQSBJErfccgv/+Z//yZ///Gf27NmD1Wp1tVkCwQ0xZMGRZbkRqJEkaeKFj24HioGPgMcvfPY4sOXC64+AL10YrZoNdF3U9RIMAkmSmDhxIv/1X//FZ599xrvvvovRaBxya+fyJG0iadt5Lq+Tyz8TDJ7hhqd4BviHJEkaoBx4kvMi9oEkSU8BVcADF7bdDiwHzgL9F7YVDBFJkggLC+O//uu/+POf/8z//u//8swzz+Dv7+9IXGcPpnW9WDGnT5/m6NGj1NfXk5uby7p16/D39+fOO+9Eq9U66Yzcj9LSUo4dO0ZrayuFhYWsW7cOPz8/Vq1adUnyQcEguF5KVFf+TZ8+XRZcH5PJJG/cuFF+5pln5NLSUtlms8mnT5+WX3vtNdloNF53/3379slardbub5MB+e67776hfcczBw8elL28vC6pl3vuuUc2mUyuNm1E6erqkr/xjW/IZrN5xI4JnJCvcE+LAFzjALVazV133UVsbCy/+c1vWLx4MX/729/Yt28f8fHxrFq16pqtnClTppCWlsbx48eB887pe++996Z/iqelpZGens7Ro0eBf9fzSGU2uBkRSxvGCQqFgmnTpvH888/zpz/9ia1bt9LZ2cmPf/xjKisrr+l38PPzY9myZY73sbGxX0gZfDPi4+PDsmXLHGFFw8PDWbhw4bhO2zPaCMEZZ5w5c4acnBwsFgsARUVFvPjiixgMhqvuI0kSK1euxMfHB4C5c+cSHR191e1vFhQKBStXrkSv1wNw2223ER4u5qoOByE44wiLxcKRI0fQarWODBA2m43169fzz3/+E5vNdtV9U1NTycjIQKVScd9994nYvRdISkpixowZeHp6Oj2o/XhEdEbdFJvNNqSwFN/61rd44IEHyMnJYefOnXz++eecO3eOl156iczMTFJSUq64n0KhYOnSpTQ3N5Oenu7U1ekKhWLI+batVismk2mELfo3SqWSRYsWUVlZSVZW1jVbisPFnit+PHfZhOC4KaWlpfz3f/83AQEBQz6GJElMmTKFqKgoWlpa+P73v09SUtJVL+jW1la0Wi0vv/yy0y56i8VCREQEL7744pD2z87O5ne/+52j2zMatLW14e3tzS9+8YtRqxer1UpAQAA/+9nPxrWzXgiOm9LT00N4eDhr164dsWNeL5mbxWLBYDCM6s17Oa2trWzfvn3I+7e3t5OYmDiqubScUS89PT1s2LDhmt3e8YAQHDfGw8MDvV5/Q09Vs9mMzWZzdE3MZjOyLA/6aWkwGFCr1cPyVdi7g56ente13WAwDDu5nKenJ97e3sM6xuUMtf4uRpZlDAYDHh4e1z1HWZZHNbOnuzD+z3CcIcsyRqOR3t7eSz4vKSnh0KFDjvc5OTmcOHGCvr4+Wlpa6Orquu7TU5ZlPvroI6qrq4dlY09PD//617+wWq309/fT3t7uGDVzFr29vbS0tNDS0kJ7ezsGg8Hx+kZsyc3N5fDhw8OyQZZlPvzwQ9ra2jCZTA47bmZEC2cMsn37dk6dOsVzzz2HUqmks7OTrq4uzGYzVquVzs5Ouru78fT05IMPPsBoNGKxWFiyZAkJCQl0dnai0Wjw9vZGlmW6u7uRZRlfX19MJhM2m42enh6USiVKpRIPDw8sFgsmkwmFQkFPTw96vR5PT0+MRiMDAwOYzWb8/f0xGo2OG6u3t5d33nkHjUZDbGwsd955p9N8Q+fOnePYsWOcO3eOhQsXotPp2L9/PxEREcTFxbFo0SK6u7ux2Wz4+fkhSRJ9fX0YDAb8/PywWq1YLBb6+/uB8+Kh0+mQZZn+/n48PDzo6upytEKtViu9vb2YTCb0ej1KpZLu7m76+vqwWq1s3LiR3t5elEoljzzyyJCd5GMdIThjDIvFQnV1NTqdzvHEPnDgAD09PUyaNImTJ09y8uRJWlpamDt3LrIsk5qaSkVFBR0dHZw9e5bi4mIaGhp48MEHaWho4PPPPyciIoL58+cD5x3WR44cYcKECfT09DBr1iwqKipoampCq9XS0NDAwMAADz30EPv37ycnJ4f09HQyMzPZvHkzXl5etLa20tDQgIeHB4sXL2b9+vUsWbLEaYKTkZGBl5cXSqWSpUuXcuzYMWJiYoiJiaGuro62tjaOHDlCfX09c+bMISIigg8++ICwsDAyMjKQZZmWlhbWr19PVlYWOTk53HXXXfT19bF7924yMjI4c+YMDQ0NPPzww3R0dPDHP/6RGTNmMH36dD7//HNkWebMmTMYjUaqqqp47LHHeP/99+nu7uZmDb0iBGcMIcsyVVVVDAwMEBERQWFhIR0dHSxatIj29nY6OjooLCxk2bJllJaWolAosFgslJSUYDabkSQJi8VCX18ftbW11NbWUlhYyKJFi5gwYQKSJDEwMMDWrVt54oknmDBhAhs2bODEiROEhIQwZ84cGhoa6O3t5fTp046h81tvvZW5c+dSXl6Or68vy5Yt47333sNsNgPnHcPuEEKjqqoKAJ1O5+judXZ2UlRURG9vL/Hx8dx5550ANDQ0sH//fhYsWEBCQgKHDh3iX//6F1qtFj8/P2w2G729vVRXV9PQ0IBGoyE1NZV7770Xg8FAY2MjzzzzDF1dXY5zt2dUHe+O4WshfDhjjPz8fHx9ffHw8KCwsJDg4GCKi4spLS3FarUSHh7OqVOnOHPmDFarFaVSSXJyMrGxsdTX13Pw4EGio6MJCAjAZrM5hKukpISenh48PT154oknyM7Oxmw209jYSFRUFFVVVfj4+HD06FEmTZqEWq0G/j2HRpIkfHx86OjooKioiJ6eHoKDg+nu7qa7u5vw8HCXzy+JjIwkLS2NhoYG8vPzgfPLOGw2G2FhYVRVVXH69GkaGhpQKBSsWLECo9FIeXm5Q6TOnTtHTEwMBw4cID4+3iE+cN55DefjUHt4eJCfn09DQwNarRatVktvby8KhQIvLy+X1YGrES2cMUZaWhqRkZHodDqKi4uJj4/n1KlTREZGEhUVRUBAALm5ucyYMYOYmBgiIiKorKwkMDCQjIwMurq6KC0t5Y477iA2Npa0tDQKCwsZGBhAq9Uyb948goKCCAsLQ5ZlHnjgAUJCQqirqyMkJIQ1a9bQ1tbGAw88gFarJS0tzTGSExgY6Jg8ePfddxMaGsqaNWuor6/n7rvvdnpdhYaGsmDBAgASExPp6Oigp6eHtWvXotPpyM3NRafTERQURExMDIsWLaK5uZno6GiSkpKIj49Hq9XS0tLCokWLsNlsdHd3k5CQgL+/P2fPnmXp0qVERUWhVCqZNWsWcF6EH3jgAUpLS7nvvvvw9/fnoYceoqioiPvvv9/tBMdmszltBrUQnDGEPfCWnbS0NADHhW5nzpw5jtehoaGXzC728vIiIiLiku2nT5/ueB0fHw/gGGYOCwsDcExAnDx58iX7Xry2SKFQkJycTHJysuOzpKQkkpKSbvQURxRvb2/HeQQHB1+yQBXOdwUv5mLbLx5m9/Pz+8Kxo6Ojv7De7OJ9QkNDCQ0Ndbz39PQkJCRkaCcyylitVqcNyYsulUBwk2M2m1Gr1U7p8grBEQhucoxGo9OWU4gulRtTUFDAxo0bXW3GqNLb2/uFSYyDJScnZ8znXzcYDC5LctjX14dWq3VKC0cIjpsyceJEfvCDH7ik7L/85S+sWLHCabFfgoKChrzvrFmz8PT0HNXA5i0tLXz00Uc8+eSTo+rr8Pf3d4z+OZOamhqioqKE4NzM+Pr6cscddzi9XFmWKS4uJjY29gtOVXckKCiI22+/fVTLqK6upqCggNtvv31cxsOpq6sjMjJS+HAErmHKlCkUFBSIdCg3ATabjerqaqdFeBSCI7gESZJISEigsrLypp4Re7PQ399Pa2srMTExTilPCI7gC4SHh2MymWhra3O1KYJRpqOjAw8PD8cs6dFGCI7gC6hUKkJCQqivr3e1KYJR5vTp0yQlJTnNNyUER3BF0tLSOHXqlKvNEIwisiyTl5fH1KlTnbbOTQiO4AtIkkRycjJlZWXCcTyO6enpoaqqitTUVKeVKQRHcEXi4uIcix0F45Pq6moCAwOduphUCI7gimi1Wry9vWlqanK1KYJRQJZlDh48yNy5c50aS1kIjuCqJCcnU1pa6mozBKNAT08PxcXFzJgxw6lxioTgCK5KamoqxcXFwo8zDjl16hQRERGO9M7OQgiO4IpIkkRiYiJ1dXU3faaB8YbFYmHnzp3ceeedTk9NIwRHcFX8/f1RKpViAuA4o7a2ls7OTlJTU50e9lUIjuCqSJJETEyMI/i4YOxjs9nYtm0bd9xxh0tS1QjBEVyT9PR0CgoKXG2GYIRoaWmhuLiYW2+91SVB7UV4CsFVkSSJpKQkPv30UywWCyrVzXW5nDhxgm3bttHZ2UlOTg4/+clPCA0N5atf/eqYTGRns9n4+OOPWbBggVPzx1/MzXUFCQZNeHg4BoOBjo6OmzJ5269//WtHZtJDhw7xyCOP8I1vfMPVZg2J5uZmcnNz+fnPf+6ylD2iSyW4JkqlkuDgYBobG11titOZOHEiU6dOdUwL0Gg0rF69ekwG4bKnG77jjjvw9fV1mR1CcATXRJIkR+6qmw29Xs+KFSscAhMVFcX8+fNdntBvKFRVVVFaWsrixYtdar8QHME1sefCKisru+kCckmSxPLlyx2T4+64444x2a00m82888473HfffS5PwicER3Bd4uLiaGtruykXcsbFxXHLLbeg0+lYtWqV0yfKDRdZljl27Bg2m43Zs2e7vHUmnMaC6+Lh4YGXlxdtbW0u7f/D+ad1T0+P05ZbyLLM/PnzOXv2LBMnTqS9vd0p5cJ5n5Ferx+WSHR1dfH+++/z3e9+1yUZIS5HCI7guthT+J4+fZqEhASX2nLq1Cl+9rOfOVX4uru78ff356WXXnJaC8FsNhMVFcXPf/7zITupbTYb77//PrNnzyY+Pt7lrRsQgiO4QVJTUzl06BDLli1z6YXb19dHamoqDz74oNPKtFqtGAwGp/o/Wltb2bRp05BbcrIsU1BQQFlZGS+++KLbdAXdwwqBW2OfAFhbW+s2CzklSRrUX1dXF0ePHqWnp2fQ+6pUKoxGI11dXYPe9+I/OJ8DymKx3NC2Q0WWZbq7u3nrrbf4yle+4rJJfldCCI7ghvD19UWSJDo7O11tyhVpamqivLwcg8FAYWEhZrP5ku+VSiUFBQWcO3cOOD/F/9ChQ+Tm5t6QiObm5lJcXDwsG61WK9u3b6evr4/W1lays7Npb28fcX+U1Wrlr3/9K7fccgspKSkjeuzhIgRHcEMoFApiYmKorKx0tSlX5MSJE/zlL3+hrKyM3/zmNzQ1NVFXV8fAwADV1dXo9fpLci/l5eVx5swZKioq2LlzJyaTiYKCAgoKCrBYLNhsNs6dO0dOTg4DAwOO/aqrq2loaKCurg5ZljEajVRVVdHV1cWxY8eoqKhwtDAKCwv5/PPPaWlpoampiePHjzMwMIDZbObdd9+lvb2d9957D5PJNGL1IMsyu3fvpqOjg7vvvtttulJ23MsagdsiSRKTJ0/m1KlTbhmQS6VSERwczK5du4iPj6ehoYFDhw7R3t7Onj17rrhPV1cXLS0taDQa2traGBgYID8/n/z8fMrKyti9ezdwPneTLMvk5+dz4MABDAYDu3btoqKigrNnz3L06FFqampQKpVs3bqV1tZWqqqq2LBhA97e3igUCt5//31MJhONjY309vY6nMK9vb0YjcYRqQNZlikvL2fLli1885vfRKvVjshxRxIhOIIbQpIkUlJSOHv2LBaLxdXmXJGJEyeSlJTk6P7JsozNZnNMWFQoFNhsNodgBgYGkpSURHNzM83NzeTl5XHu3Dlqa2upqalh0qRJTJs2jfDwcKxWK5WVlSgUCsLDw+np6WHLli0cOHCAqKgoKioqOHnyJOXl5Y75SpmZmUyZMsUxHD1r1izCwsIcrY7hDnlfjCzL9PT08Prrr/PEE08QFhY2IscdaYTgCG6Y0NBQBgYG6O7udrUpX0ChUBASEsKqVatQqVT4+vpSV1fHvn37HD6ayMhIjh8/TkVFBQqFgubmZpqamlCr1TQ0NKBWqwkMDEShUDBx4kQKCgrYvn07dXV1aDQaVq1ahVKppLCwEC8vL8LDw2lqaiI6OpqqqioCAwPx9vYGcDibAXQ6HTqdji1bttDc3IyXlxe+vr4cOHCAkJCQYbdEZFnGYrHwxz/+kaysLKZPn+4WQ+BXQnLH5rGdrKws+cSJE642Q3ABq9XKSy+9xH333cfkyZNdYsOhQ4fYsWMHa9euveTzvr4+lEolnp6edHZ2otfr6ezsxGKxoNVq8fX1xWq10tzcjE6nQ6VS0drailKpJCgoCEmSaG1tRa1Wo9Pp8PLyoqOjA4PBQEhICCaTyTGCZDQaUSgUKJVKDAYDvr6+9Pf309XVhaenJz4+PsiyjNlsRq/XI8sy/f39dHZ24uHhgb+/v2MFfkBAADqd7gvn2dLSwqZNm3j11VevGxbEZrPxz3/+k7q6Or797W+7RegMSZJyZFnOuvxzMQ9HcMMoFAomT55MYWGhS8JTXouL58j4+fkBEBQUdMk2SqWS8PDwK+4D51tAFxMQEOB4ffFNf3EebrtYeHt7O1o3duw3viRJeHl5XVLe5e+HiizL7N27l4KCAl544QW3EJtrIbpUghvG7scpLS296RZyuiOyLFNUVMSmTZv49re/7fJlJzeCEBzBoIiLi6O1tZW+vj5Xm3JTI8syVVVV/O53v+M//uM/Lmm5uTOiSyUYFJ6enuh0Ojo6Opye0wjOd4uKior45z//6bQybTYbZrPZqd2Vvr4+h9/ocmRZpr6+nldeeYUnn3ySlJQUt+reXgshOIJBoVQqSU5OpqSkhNjYWKeXn5aWxnPPPefULl1TUxMffvghX//61506kS4gIOCK5bW1tfHKK69w3333kZWVNWbEBoTgCAaJJEmkpqZy5MgR7rzzTqdf7N7e3syaNcupZVZXV3Ps2DFmz57t8vCi7e3t/OpXv+L222/n1ltvdbuZxNdjbFkrcAuSk5Opqalxm4WcNwsdHR288sorTJ8+nWXLlo05sQEhOIIhYF99fDNGAHQVdrHJzMzk7rvvdnlLa6gIwREMGrVaTUxMDOXl5a425aagvb3dITb33HPPmGzZ2Bm7lgtchiRJTJo0yW0Xco4nmpubefnll5k2bZpDbMaSk/hyhOAIBo3dcXzmzJkvxJ0RjAyyLFNXV8f//d//ceutt3L33XePSHAuVyNGqQRDIigoiP7+fvr7+9FoNK42Z1whyzIVFRW89tpr3HvvvcyfP39Md6MuZnychcDpeHh4EBQURENDg6tNGVfYlyv88pe/5NFHHx2TQ9/XYvycicCpKBQKhx9HMDLYbDY+//xz3njjDZ5++mlmzJgx5rtQlyMERzAk7H6c06dPY7VaXW3OmMdqtbJt2zY2btzIj370I7dbjT9SCB+OYMjExMTQ3Nzs9BQq442BgQHeeecd6urq+PGPf0xAQMC4FBsQLRzBMPD09ESr1dLR0eFqU8YksizT1tbGL3/5S4xGIz/84Q/HtdiAaOEIhoFarWbChAmUlJQQGRk5rm+UkUaWZSorK/ntb3/L3LlzWbNmjVuk4h1tRAtHMGTsfpzh5mu6mbAHdv/88895+eWXeeihh7jnnntuCrEB0cIRDJPk5GQ2btyI0Wi8JPTmeECW5S/82Wy2IU/Ak2UZk8nE+++/z6lTp3j++eeJiYm5qVqGQnAEw8Lb2xubzUZfX9+4E5xjx46xceNGuru7yc/P54c//CGhoaF85zvfuW4wLlmWsVqtKJVKR8qalpYW3nzzTfz8/Piv//ovlwQwczVCcATDwsPDg+joaM6dO0dgYKCrzRlRPD09+fOf/+xIi5Odnc2Xv/zl62ZRkGWZvr4+fvGLX/DUU08RHx9PXl4eb731FitWrGDx4sU3TRfqcoTgCIaFfQJgUVHRuJuoNmHCBGbNmsVnn30GgFarZeXKlded+SvLMn/4wx949dVXqampYeHChRQVFfHtb3+bpKSkcVVHg0UIjmDYTJ48mUOHDmGxWMbVk9vLy4vly5ezZ88eLBYL0dHRzJ0795qCIcsyBw4c4JVXXsFoNPLBBx9gNpv5/e9/f0namZsVMUolGDaBgYEjmiPbXZAkiaVLlzrSryxduvSaomEPbv7ss8/S1NQEgMlkIjs7m9raWhHKAyE4ghFAq9USEBBAXV2dq00ZcWJiYpg/fz56vZ7ly5dfsztlMBh48cUXsWeLVSqV6HQ69Ho9ubm5QnAQXSrBCKBUKklJSaGoqIjk5ORR9VFYLBZ6e3udevPeeuutnDlzhokTJ9LZ2XnV7davX8+7776LTqdj5syZzJw5k/nz5zNx4kR8fHzo6uoaVLkajQadTjeufD4it7hgRCgsLGTLli08//zzoxpvNzs7m//3//4fYWFho1bG5fT19dHU1ERCQsJVt7FarXR1dWGxWFAoFOj1+mFNEzAajQQEBPDb3/52TMYvFrnFBaNKZGQkzc3NmM3mUb1BTCYTmZmZ3H///aNWxuXYbDYsFss1A43ZH9wj1RppbW3l448/HnfdMCE4ghFBr9fj4eFBW1sbkZGRo1qWUqm84SiDVqsVo9GITqcDznfJzGYzWq12UGXaHeLDiW4oyzIDAwN4enped2hdrVaPq66UHeE0FowI9oWcp0+fdpkNZrOZjo6OS1oFDQ0NfPzxx473VVVVfPrppxgMBhobG2lra7uheD5HjhwhNzd3WPZZrVY++OADenp6MBgMtLS0jLuRveshWjiCEcG+kDMvL4/bbrvNJWExT5w4wXvvvcf//u//otPp6OzspL29HZPJhCzLdHR00NHRgdls5vDhwxw/fpyAgABSUlKYO3euI8yGv78/kiTR3d2NyWTC398fi8WC1Wqlt7fX0fLQ6XTYbDb6+/vx8PCgo6PDMSplNpsdUwX8/PyA87ml+vv7sVgsbNy4EVmW0Wg0rF27dlzNX7oWQnAEI0ZSUhKbNm26rr9jNLDZbJw6dYpJkyZx5swZgoOD2bBhg8OOiooKtm3bhizL+Pr64u/vT2xsLF5eXrS2ttLQ0EBOTg51dXUsWrQILy8vtmzZQlRUFNOmTQOgrq6OsrIysrKyKCwsZPny5XR2dnLkyBEmTpxIVVUV9fX1PProo1RXV/Pee+8xc+ZMpk+fzqeffopWq6Wqqore3l7a2tpYu3Yt7733HgaD4aYRHNGlEowYPj4+WK1W+vr6nF52e3s7NTU1xMfHc+LECSoqKkhNTWXJkiV4eno6hOL2229Ho9EgyzLV1dW0traiVCqxWq309PTQ0tJCWVkZJSUlTJkyhVWrVhEZGYnFYmH37t34+/uTkJBAZ2cn77zzDvv37yc0NBSTyURnZyfnzp2jra0NWZbJzMxkzZo1+Pr60tvbyz333ENERISjC2ffbrw5hq+FEBzBiKHVaomKiuLs2bNOL/vs2bOOrkxNTQ0+Pj5UVlZy6tQpBgYGCA8P58yZM5w6dQqTyQScH1mbMGGCo3Wj0+mIiorCZrMRGRlJaWkpRUVFtLS0oFKpuO+++2hubqaurg5JktDpdJw9e5aoqCgOHjxIUlKSY/U8nF/YKkkSnp6eSJJEXl4ezc3N6PV6lEolnZ2d6HS66648H0+ILpVgxJAkiZSUFEpKSsjKynLqKEtkZOT/396dB0d533kef/9a3a1udat1IXSABLLMJW4iE/CJHcfxySQZ43MysWOPp6Yys9l1arLxXskmszXjytZmZz3xOJcNU+VJYsAXhiQDWOIyh4UAI5AAISGEkFq31FK3Wn389g+1eiTAHDr66RbfV5VKz/Pr53n6+7Skj57n91y88MILZGZmMm/evOgdCL1eL0uWLKGgoCB6P5rc3FysVivHjx8HYN26dSQlJXH8+HGKioqYPn06OTk5mEwmuru7cblcLF68mKSkJJYvX47H4+GRRx7BYrHQ2dlJYWEh69ato6Ghga9//evk5OQQDAajl0FYLBbWrVtHfX09Tz31FOnp6Tz99NOcOXOGJ5988qZ6rpcEjphQCxcu5Fe/+hWhUOiat3GYSAUFBdHhefPmAbB48eJR0yxbtmzU+KUnD955552jxufPnx8dHnkYfeRtOIaXMWvWLGbNmjVq/uHOYqUU+fn55OfnR1/Ly8sjLy/vqus0FckulZhQWVlZ0aM7QlxKAkdMKKfTSWZmJo2NjUaXIuLQuLZ5lVL/CXgR0MBx4HkgD/gtkAUcBr6htR5USiUD/wJ8AegAntRanxvP+4v4YzKZmDNnDtXV1ZNyIafFYuH48eN4PJ4JXe6VBAKB6HvGms/nw2q1TrmzjcccOEqpGcB/AEq01j6l1DvAU8DDwE+11r9VSr0BvAD8c+R7l9b6VqXUU8CrwJPjXgMRV5RSLFmyhI8++oi1a9dO+B/M4sWL+Yd/+IdJP5QcDodZv349paWlLFmyZFLf6/OkpqZOqeeKw/g7jc2AXSkVAFKAZuA+4JnI6xuAHzIUOH8SGQbYBPyTUkrpm+kkhJtEfn4+brd7Uk4ATElJYcGCBRO6zCvp7u7G5/Px0EMPRW/AJcZvzPGptW4C/jdwnqGg6WFoF6pbax2MTHYBGL6SbwbQGJk3GJn+srtuK6VeUkpVKKUq2traxlqeMFBaWhpWq5X29najSxkTrTWVlZXMnTuX1NRUo8uZUsYcOEqpDIa2WoqAfMABPDjegrTWv9Bal2qtS7Ozs8e7OGEAi8VCUVERp06dSsizaIPBIGVlZdx///1TbpfGaOP5NO8H6rXWbVrrAPAucAeQrpQa3lWbCQzfd7IJKACIvJ7GUOexmGKUUixatIgTJ04YXcqYtLa24vF4KC4uNrqUKWc8gXMeWKWUSlFDPYNfAk4CZcDjkWm+CXwQGf4wMk7k9Y+l/2bqKi4u5ty5c9HT/BOF1pry8nJWrVp1U11yECvj6cM5yFDnbyVDh8RNwC+A/wy8rJSqZaiP5teRWX4NZEXaXwa+P466RZzLyMhgcHCQvr4+o0u5IQMDAxw6dIh77rlnyh2SjgfjOkqltf4B8INLmuuAlVeYdgCI3X0hhaFSUlLIz8+ntraWFStWJMQfr9aa06dPk5GRgfQfTg7pEROTYuSFnIkiHA6zY8cO7rvvvoS8cXkikMARk6akpISampqEOVLl8Xior69n6dKlCbFFlogkcMSkyc7OpqenJyEu5NRac+DAARYsWCDn3kwiCRwxadLS0khPT6ehocHoUq4pGAyya9cuOfdmksknKyaNyWTi1ltv5fTp03G/W3Xx4kUCgcBl97QRE0sCR0yqxYsXU1VVZXQZV6W1pqysjNtvv13OvZlkEjhiUuXn59Pc3Hxdz34yysDAAIcPH+b222+XzuJJJoEjJlVWVhZWqxW32210KaNorbl48SIDAwOcOHGCnJwcOfcmBuSexmJSmc1mZs2aRW1tLfn5+XGzBREIBHjqqafIzc3FZDLxzDPPXHsmMW4SOGJSKaWi/Th333230eVEBYNB3G43e/bswWQyUVlZyd69e/ne977HtGnTjC5vypJdKjHpioqKqK+vj6sjVcFgMHphaTgc5uzZs7jdbmw2m8GVTW0SOGLSZWdnMzg4SG9vr9GlRAUCgVFXsq9Zs4a///u/x+FwGFjV1CeBIyad3W4nNzeXuro6o0uJGrmFU1JSwmuvvUZeXl7c9DFNVRI4YtIppViwYAHV1dVxs1s1HDh5eXm89tprLFiwQMImBqTTWMTEggULePvtt686TW9vL01NTVedZqI0NzejtebFF18kJyeHmpqaCV2+0+kc9TRQMUQCR8REbm4uXV1d+P3+z+2YLSsrY/369TH5Q+3v72f58uV0dnby85//fEKX7fP5SEpK4o033pjQ5U4FEjgiJlwuF2lpaZw/f565c+decZrBwUHuvvtu7r///kmvJxQKoZSalAs129vb2bRp04QvdyqQPhwREyaTieLiYmpra6/Zj6OUmrAvGLow0+fzjWo3m80kJSVd93KCwWD08cXX857iyiRwRMwsWbKE48eP39A8oVCII0eOUF5eTnl5OcePH6e2tpZdu3Zx/PhxgsHgNZdRVlZGc3PzWMsGwOv18vvf/55QKERDQwMHDx6kr68vbjrBE4XsUomYmTFjBhcvXiQcDl/3LTyVUmRmZnL06FGsVivFxcX88Y9/ZMaMGdTX1+P3+5k/fz5VVVWkpaUxb948QqEQ1dXVhMNhFi5cCPz7/YpTUlJITk4mOzubvr4+uru7MZvNnD17lltuuYXc3FzcbjcXL16kv7+fpUuX0tbWRkNDA8FgkI6ODjZu3MiCBQt47733+LM/+7PJ/MimHNnCETGTnZ1NUlLSDV3IaTKZmDVrFjNmzGDmzJnRDuWWlhba29ux2Wy0tLRgsVjYtWsX58+fZ9++fdF7KXu9XrTW7Ny5k5qaGrq6uvjkk0+oqanh6NGjnDlzhqamJqxWK++++y4DAwMcOXKEffv2kZOTg8fj4aOPPkJrTVdXF263G5fLRX5+Pk1NTbKFc4MkcETMmM1mCgoKxn2Zg8lkoqCggOLiYpqammhoaODw4cPU19fT2dlJY2Mjy5cvZ8mSJbhcLgYGBmhsbMRms1FUVMS5c+fYuHEjR44coaCggJMnT1JZWUldXR2Dg4OYzWZWrlzJ3Llz8fv9ZGZmsmzZMtLS0jCbh3YKXC6X3BlwDOQTEzG1ZMkSPvvssxuez2QyjfoDP3fuHO3t7SQnJ9PU1ITT6cTlckUvFi0rK+MPf/gDPT09OJ1Onn32WRoaGmhpaWFgYIAvfOELdHZ24nK5aGlpISsrC7vdHn2v4V2+7Oxsuru72bZtG/39/eTl5eH1eikvL5eTBcdAxfMmYWlpqa6oqDC6DDGB6urqeOONN3j11Vcv+2PduHEjFy5c4Mtf/vJl83m9XpRS2O12enp68Hg8WK1WMjMzCQQCdHd3k5ycjMPhwGKx0NXVhdaazMxM+vv7sdlsDA4OopQiEAhgt9vp7+8nLS2N3t5eBgYGSE5OJi0tDb/fj8lkIjk5Ga01Ho8Hn8+HzWbD5XLh8Xjwer1kZWVhsVguq7WtrY1Nmzbxs5/9bNI+x3inlDqstS69tF06jUVM5eXl4fP56OrqIjMz87rnS0lJiQ6npaWRlpYWHTebzdGtk2FZWVnR4eGnMFwaDlarFYD09PRR7SOXpZTC5XLhcrmibZeOi+snu1Qipmw2G9OnT+f8+fNGlyIMIFs4IqaUUpSUlFBdXc2yZcsuez0YDCbEc6yuJhAIyNGrzyGBI2Ju3rx5bNy4Ea31qH4cq9XKzp07OXny5IS9V39/PxaLJbr7FAt+vx+n0xmz90skEjgi5goKCujo6MDn843qm3nooYe46667Jux9+vr6+PGPf8xzzz3HggULJmy51+NKnclCAkcYwOVykZqaSlNTE3PmzIm2Dx91mgjhcJjNmzezcuVKVq9eLefMxAn5KYiYU0px6623UltbOynL11pTWVnJqVOnePrppyVs4oj8JIQhFi1aNGlP5Ozo6GD9+vW89NJLco/iOCOBIwwxe/ZsLly4QCAQmNDlBgIB3nrrLdasWcOcOXPkTOA4I4EjDDFt2jRMJtOEPpFTa82uXbvo7e3lsccem7DliokjgSMMMXwB5kSeAHjhwgU2b97MX/3VX2G1WmXrJg5J4AjDLF68+IZvyPV5BgYG+NWvfsXjjz8uj3uJYxI4whBKKYqLi6mrqxv1QLqxCIfDbNmyBafTyZo1ayRs4pgEjjDMzJkz8Xq9dHV1jWs5Z86coby8nBdffFEOgcc5+ekIw1gsFqZNmzauZ1H19fXxy1/+kueff5709HTZuolzEjjCUCUlJWO+dioUCvHb3/6WuXPnsmLFCgmbBCCBIwyjlGLevHmcOnXqhq+u1lpz7NgxTp48yTPPPCO7UglCfkrCUEVFRbS3t9Pf33/d82it6ezs5M033+Qv//Iv5WziBCKBIwzldDpJSUm5oRMAg8EgGzZs4J577pGziROMBI4wXHFxMadPn+bixYucPHnysofbaa3x+XyEQiG01uzdu5fOzk4ee+wx2ZVKMHJ7CmEIrTUtLS0cOnSIiooKfvrTn+L3+1m1ahXr168fNW0oFOLVV18lPT2dhx9+mI0bN/LKK6+QnJxsTPFizCRwhGHq6ur4i7/4C9ra2qJta9asuezmVf39/WzevJkzZ87w+uuv8/LLL5Ofny+7UglItkeFIZRSrFy5kieffDIaHEopCgoKLguSmpoaGhsb8fv91NbW8pOf/ITf/OY3E36luZh8EjjCMGazme9+97uUlJQAQxd0FhYWjppGa82+ffvweDzR8Z6eHlpbW2Nerxg/CRxhGKUUs2bN4pVXXiElJQWtNTNmzBg1zeDgILt37yYcDmMymVi+fDlvv/02f/M3fyP3DU5AEjjCUEopvv71r7N27VrMZjN5eXmjXm9ra6OiogK73c7zzz/Pe++9xwMPPCBhk6Ck01hEud1uzp07Z8h7P/DAA+zbt4/a2loGBgai7Z988gl+v5/nnnuOdevW0dLSQktLy6TWYjKZKCoqYtq0aZP6Pjcjeba4iPrlL3/J3r17yc/Pj/l7a61pb2/H5XJFD3cPt9ntdhwOR8yOSp07d44//dM/5fHHH4/J+01F8mxxcU2BQIB7772X0tLLfk8mndY6+mC84WDRWkf7bmJ5CHzHjh3jvkePuDLpwxFj1tDQwJkzZ6LjtbW1NDQ0jGlZSqnLgmV4+JNPPhm1mzUWnZ2dHDlyZFzLEOMngSOuafg5T9u3bx91VXdLSwuNjY3R8cbGRpqbm6+5vM2bN/PWW2+xffv2az5HPBQKcfTo0XE/b7ynp4eamhr8fj9bt27lnXfeGfeNv8SNk10qcU3hcJgDBw7Q3t7O6tWrGRwcZMeOHbjdbkpKSnC73Xz88ce0tLRQWlrKRx99RHd3N3a7nYULF1JdXY3X6yU5OZnVq1dTW1vLs88+y/vvv88tt9yC1ppPP/2UgoICVq9eTWtrK3v37iU7O5uVK1cC0NXVRUVFBU6nk1mzZpGTk8OpU6cwmUy0tbVRX1/P6tWrKSoqYs+ePVy8eBGr1cqaNWvYs2cPra2tJCcn89lnn9HV1UVBQQE7duxg3bp1Bn+6NxfZwhHX1NHRQSgUoqCggKamJsrLyyksLCQ3N5dgMMi//du/UVJSQkZGBsFgMPokhqSkJCorK7l48SKDg4OYTCaqq6vp7e1l8+bN0U7iYDBISUkJ+/bto6Ojgy1btrBo0SLmz58PDN3Vb8OGDRQWFhIOhzly5Ahbt27l8OHDKKWw2+0UFBSwbds2tNZUV1czc+ZMHn30Uc6cOUMoFKKkpIRAIEBLSwspKSmEQiE5edAAsoUjrkprzfHjx2loaMDhcJCUlITZbCYtLY3u7m6UUvj9ftLS0khNTQXAZrPhcrlwOp14PJ7oeGpqKsFgkLS0NJ544gk+/vhjzp49S0VFBfn5+bS3tzM4OEggEMDlcuFyuaJXg+fn59PQ0MDChQv59a9/DYDdbsdisfDxxx+Tm5tLZ2cnACkpKRQWFmK1WvH7/TidTlJTU0lKSsLlctHd3U1mZiZOp9OYD/UmJls44qqGD01/5zvf4eWXXyYcDnPbbbdRXl7OqVOnSE1N5a677uL3v/89jY2NOJ1OMjMzo4GTkZFBZmYm6enppKamkpqaitVq5d1336W3t5dZs2aRlZVFV1cX+fn5JCcnc/fdd7N161bKysoIhULMnj2br33tazQ3N0fD6MEHH2TatGk4HA5sNht+vz96OD8jIwOr1QoMPYqmoaGBvXv3kp6ezrJly3C73ezZs4e7777byI/2piTn4Yio119/nZSUlFGHxUcerh4eV0pFO49Htg+Pj5x+5DxXWubIZV1reVf7fun8I+e9Wq1XOty+Y8cO8vPzeeKJJ27sAxRRch6OGJNL/yhHXtl96XRXGh7L+Oct71rfr6f+a00vJpfsUgkhYka2cMRl4nk3WyQ2CRwRlZWVxaZNm9i1a5dhNfT09KCUwuVyGVaDx+PhW9/6lmHvP5VJ4IiotWvX8qUvfcnQGt577z1sNhsPPfSQoXXIIfPJIYEjoux2O3a73dAaUlNTsdlscmuIKUo6jYUQMSOBI4SIGQkcIUTMSOAIIWJGAkcIETMSOEKImJHAEULEjASOECJmJHCEEDEjgSOEiBkJHCFEzEjgCCFiRgJHCBEzcrW4MJzWmr6+PgKBAH19fQSDQTo7O7FarTF9priYfBI4Ii689tprbNmyhc7OTkwmE6+//jpPPfUUf/3Xfy2BM4VI4Ii4MGfOHCorK6OP9E1OTuaHP/yhhM0UI304Ii7ceeedFBYWRsfnz5/PihUrJHCmGAkcYTilFDk5Odx3333R8fvvv5/MzEyDKxMTTQJHxAWlFGvXrsVut2Oz2fjqV78afcyvmDqkD0fEBaUUq1evpqioCJfLxcKFC40uSUwCCZybmNvt5uTJk0aXERUOhykqKsJut3PkyJG46b8xmUwsWrSIrKwso0tJeBI4N7GdO3eyZcsW5syZY3QpUbNnz8Zms1FeXm50KVEnT57khRdeMPzRNVPBNQNHKfUm8CjQqrVeFGnLBH4HzAbOAU9orbvU0L+kfwQeBrzAc1rrysg83wT+W2Sxf6e13jCxqyLG4o477mDNmjVGlxEVDoev+DxwI3344YdxVU8iu55eufXAg5e0fR/YqbWeA+yMjAM8BMyJfL0E/DNEA+oHwBeBlcAPlFIZ4y1eTD0mk+mqf9yhUIi6ujpCodC43sfj8dDc3DyuZYgbd83A0VrvBjovaf4TYHgLZQPw1RHt/6KHHADSlVJ5wFeA7VrrTq11F7Cdy0NMxAGtNWfPnqWhoYGenh6OHj1KMBi86jxut5vW1tYxv9+nn35KWVkZtbW1hMPhq07v9/vZunUrgUBgTO837MKFCxw8eJBQKMSxY8eorKwc9zLFtY31uGOO1nr430MLkBMZngE0jpjuQqTt89pFHNq9ezcbN26ksrKSDRs24PP5qKyspKKigsHBQQKBAEePHuXQoUP4/X6OHTtGVVVVdH6v18v+/fupqqoiFArh8XjYu3cvZ86cQWtNd3c3e/bsob6+Hq01W7duJTU1lQ8++ID29nZaWlrYvXs3jY2NaK3xeDzs378/Oj/AwMAA1dXVnD17Fp/PB0BzczMdHR3U1NSwd+9ePB4PWmtOnz7N/v37OXDgAD6fj8OHD1NTU0M4HObYsWN8+umnVFVVcfDgQUM+75vJuE900EO/AXoCagFAKfWSUqpCKVXR1tY2UYsVN8BqtZKSkkJVVRXTp0/n/Pnz7N+/n3A4TF9fH/v37+fUqVO43W727Nkzal6tNX/4wx/o7e3l4MGDnD59mr179+J2u+np6SEYDLJz5056enro6uoiFArh9/upq6tjYGCApKQkmpubcTgcvP/++/T29rJ582aCwSA+n49QKITP5+Odd97B7/dTU1NDVVUV1dXV7Nq1i7a2Nnp6evD5fGzbtg0gGmQzZszg2LFj1NTU4PV6GRgYoLa2lsLCQgoLC6mtrTXi476pjDVw3JFdJSLfh7enm4CCEdPNjLR9XvtltNa/0FqXaq1Ls7Ozx1ieGA+lFAsWLGDp0qXYbDZyc3OZO3cuJ06coKuri7q6OhoaGmhquvxHqLXG7XazbNkyZs+eTWtrK8uWLcNsNlNdXc3g4CC33XYbwWCQ6upqAoEANpuN+fPnk5GRQXt7O1VVVRw+fJj6+np8Ph89PT0sW7aMRYsWYTab8Xg8tLW14XA4KC4u5sCBA/zud7+jvb0dp9PJ4cOHOXHiBA0NDQCkp6dz2223MXPmTFpbW5k/fz4LFy7EarVisViwWCzYbDbMZjloO9nGGjgfAt+MDH8T+GBE+5+rIauAnsiu1x+BB5RSGZHO4gcibSIOJSUlMXv2bO68804sFgt9fX14vV76+/vxer2sXLmS7OxscnJyKCwsRClFZWUlH3zwAdXV1ZSUlLBlyxaqq6u59dZb6ejoiN5yIhgM0t7eTigUorOzk1AoRDAY5OTJkwwMDGCxWGhtbSU9PR2Hw4HFYqG4uJj333+f8vJygsEgM2bM4Bvf+Abbt28nJSUFt9tNaWkpZrOZwcFBvF4vmZmZWK1WACwWS/Ss5ZKSEg4cOMDu3bvRWrNkyRKOHTvGoUOHWLFihZEf+01BDe8Tf+4ESv0GWANMA9wMHW16H3gHKAQaGDos3hk5LP5PDHUIe4HntdYVkeV8C/gvkcX+L631W9cqrrS0VFdUVNz4Wonr8q//+q90dnaOOiyutcbr9ZKcnExSUhIejweHw0Fvby+hUIiMjAyUUvT09BAKhUhPTycQCNDd3Q2Aw+EgJSWFrq4uLBYLaWlpBINBurq6sNlspKamRqe32+04nU46OjoIBALY7XZcLhd9fX0MDAxgtVpJTU1Fax2d3+Fw0N/fj8PhoK+vD7vdjtfrxW63Mzg4GH1vGNo1dDqd9Pf3Y7fbSUpKIhwO093djdYau92O3W6nq6sLrTUZGRlXvJziww8/ZMWKFTz4oBznuF5KqcNa69JL26+5Dam1fvpzXvrSFabVwLc/ZzlvAm9e6/2EsZRSOByO6LjL5QIgI2P0WQwjx81mM3a7fdTrI3eHLRYL06dPj45brdZR49OmTRs1r8vlir7vlZaXmpo6qra0tLTocoHLzgh2Op3RYZPJdNlFoXKRaOzI1XFCiJiRXrKb3MDAAH19fUaXEdf8fr/RJUwZEjg3sezsbLZt2xZXh4OHbzGanp5udClR/f39ch3VBJHAuYnde++93HHHHUaXMcqmTZuw2Ww8+uijRpcySnJystElTAkSODcxs9kcd+eeWK3W6ImHYuqRTmMhRMxI4AghYkYCRwgRMxI4QoiYkcARQsSMBI4QImYkcIQQMSOBI4SIGQkcIUTMSOAIIWJGAkcIETMSOEKImJHAEULETHxdKixuSlpr2tvbGRgYoKOjA5vNRmNjIykpKWRmZspjdqcQCRwRF1577TXWr1+P1+tFKcWPfvQjvv3tb/O3f/u3RpcmJpDsUom4sGrVKtrb2+no6KC9vZ22tjZuv/122bqZYiRwhOGUUnzxi1+kqKgo2rZ06VIWLlxoYFViMkjgiLiQkZERvW+wyWTiK1/5SvTxL2LqkMARcUEpxSOPPEJqaioOh4PHHnvsig+lE4lNOo1FXFBKUVpaypw5c8jIyGDu3LlGlyQmgQSOmBA+n48PPnwfrUNjXobWmty86dhsyWzZ+gHj6S622x185YEHL3siqDCWBI6YEL29vWx49yfc+0QBpqQxRoWGFV8LY00e5KJt07jq2fdON6u+uFoCJ85I4IgJobVmeoGdu9bOxmwZe9/L0OPpGffh8KryE9FlifghgSPiipx3M7XJYQAhRMxI4AghYkYCRwgRMxI4QoiYkcARQsSMBI4QImYkcIQQMSOBI+KG1poTB1s4W9UOQFNdDxVljXIC3xQigSPihg5DRVkjmdNTAMjKTeGzfc34+gMGVyYmigSOiBv9nkG8nkFcmTYALMlJKBN4uvwGVyYmigSOiBsBfwhlUtGLP5VSJCUpgoGwwZWJiSKBI+JGakYySqnoLlQ4GCYwGMaZZjW4MjFRJHBE3DBbTMxekMHF+l4A2lu8ZExPwZmWbHBlYqLI1eIibiiluOdPigmHh45KpU+z8eAz80gyy//FqUICR8QVW4rlisNiapB/HUKImJEtHDFhejsHOFvVTlKS8f/HvJ5Bo0sQVyCBIyaEw+FgWdEj1G0LMa67n0+QlSWLcTgcRpchLiGBIyZEamoqP/jvPzK6DBHnjN/2FULcNFQ8XxinlGoD+oF2o2sZh2kkdv0g6xAvEmkdZmmtsy9tjOvAAVBKVWitS42uY6wSvX6QdYgXU2EdZJdKCBEzEjhCiJhJhMD5hdEFjFOi1w+yDvEi4dch7vtwhBBTRyJs4QghpggJHCFEzMRt4CilHlRKnVJK1Sqlvm90PZ9HKfWmUqpVKVU1oi1TKbVdKXUm8j0j0q6UUv8vsk6fKaVWGFf5v1NKFSilypRSJ5VSJ5RS34m0J8x6KKVsSqlDSqljkXX4n5H2IqXUwUitv1NKWSPtyZHx2sjrsw1dgQilVJJS6ohS6qPIeELVfy1xGThKqSTgZ8BDQAnwtFKqxNiqPtd64MFL2r4P7NRazwF2RsZhaH3mRL5eAv45RjVeSxD4rta6BFgFfDvyeSfSeviB+7TWS4FlwINKqVXAq8BPtda3Al3AC5HpXwC6Iu0/jUwXD74DVI8YT7T6r05rHXdfwGrgjyPGXwFeMbquq9Q7G6gaMX4KyIsM5wGnIsM/B56+0nTx9AV8AHw5UdcDSAEqgS8ydGau+dLfK+CPwOrIsDkynTK47pkMBft9wEcMXQabMPVfz1dcbuEAM4DGEeMXIm2JIkdr3RwZbgFyIsNxv16RTfPlwEESbD0iuyNHgVZgO3AW6NZaByOTjKwzug6R13uArJgWfLn/C3wPGL5rfBaJVf81xWvgTBl66F9QQpx7oJRyApuB/6i17h35WiKsh9Y6pLVextCWwkpgvrEVXT+l1KNAq9b6sNG1TKZ4DZwmoGDE+MxIW6JwK6XyACLfWyPtcbteSikLQ2Hzttb63Uhzwq0HgNa6GyhjaBckXSk1fBuWkXVG1yHyehrQEdtKR7kDWKuUOgf8lqHdqn8kceq/LvEaOJ8CcyI99FbgKeBDg2u6ER8C34wMf5OhPpHh9j+PHOVZBfSM2GUxjFJKAb8GqrXW/2fESwmzHkqpbKVUemTYzlAfVDVDwfN4ZLJL12F43R4HPo5sxRlCa/2K1nqm1no2Q7/vH2utnyVB6r9uRnciXaUD7WHgNEP74f/V6HquUudvgGYgwNA+9gsM7UvvBM4AO4DMyLSKoaNvZ4HjQKnR9UfqupOh3aXPgKORr4cTaT2AJcCRyDpUAf8j0n4LcAioBTYCyZF2W2S8NvL6LUavw4h1WQN8lKj1X+1LLm0QQsRMvO5SCSGmIAkcIUTMSOAIIWJGAkcIETMSOEKImJHAEULEjASOECJm/j+F21SsJOGy3AAAAABJRU5ErkJggg==\n", - "text/plain": [ - "
" - ] - }, - "metadata": { - "needs_background": "light" - }, "output_type": "display_data" } ], "source": [ - "# extract state_dict for updated network\n", - "one_step_net_state = torchopt.extract_state_dict(net)\n", + "# Extract `state_dict`` for updated network\n", + "one_step_net_state = torchopt.extract_state_dict(net, enable_visual=True, visual_prefix='step1.')\n", "one_step_optim_state = torchopt.extract_state_dict(optim)\n", - "# calculate outer loss\n", + "\n", + "# Calculate outer loss\n", "outer_loss = loss_fn(net(x), y)\n", - "print(f\"outer loss: {outer_loss:.4f}\")\n", - "torchopt.visual.make_dot(outer_loss).render(\"full_graph\", format=\"png\")\n", - "plt.figure(figsize=(10,10))\n", - "plt.imshow(imgplt.imread('full_graph.png'))" + "print(f'outer loss: {outer_loss:.4f}')\n", + "display(\n", + " torchopt.visual.make_dot(\n", + " outer_loss,\n", + " params=(init_net_state, one_step_net_state, {'meta_parameter': meta_parameter, 'outer_loss': outer_loss})\n", + " )\n", + ")" ] }, { @@ -214,40 +198,42 @@ }, { "cell_type": "code", - "execution_count": 41, + "execution_count": 7, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "tensor([-0.0537])\n", - "tensor([1.1000], requires_grad=True)\n" + "meta_parameter.grad = tensor(-0.2464)\n", + "meta_parameter = Parameter containing: tensor(1.1000, requires_grad=True)\n" ] } ], "source": [ "meta_optim.zero_grad()\n", "outer_loss.backward()\n", - "print(meta_parameter.grad)\n", + "print(f'meta_parameter.grad = {meta_parameter.grad!r}')\n", "meta_optim.step()\n", - "print(meta_parameter)" + "print(f'meta_parameter = {meta_parameter!r}')" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "We have already conducted one bi-level optimization and optimize our meta-parameters. When you want to conduct the second bi-level optimization, you need to be careful whether you need to use the `stop_gradient` function. For example, if your new inner-loop parameters directly inherits previous inner-loop parameters (which is a common strategy in many meta-learning algorithms like MGRL), you might need `stop_gradient` function." + "We have already conducted one bi-level optimization and optimize our meta-parameters. When you want to conduct the second bi-level optimization, you need to be careful whether you need to use the `stop_gradient` function. For example, if your new inner-loop parameters directly inherits previous inner-loop parameters (which is a common strategy in many meta-learning algorithms like Meta-Gradient Reinforcement Learning (MGRL) ([arXiv:1805.09801](https://arxiv.org/abs/1805.09801))), you might need `stop_gradient` function." ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "In general, the back-propagation only free saved tensors (often used as auxiliary data for computing the gradient) but the computation graph remains. Once the outer iteration is finished, if you want to use any intermediate network parameters produced by the inner loop for the next bi-level iteration, you should detach them from the computation graph.\n", + "In general, the back-propagation only frees saved tensors (often used as auxiliary data for computing the gradient) but the computation graph remains. Once the outer iteration is finished, if you want to use any intermediate network parameters produced by the inner loop for the next bi-level iteration, you should detach them from the computation graph.\n", + "\n", "There are two main reasons:\n", - "- The network parameters are still connected to the previous computation graph (`.grad_fn` is not `None`). If later the gradient back-propagate to these parameters, the PyTorch backward engine will try to back-propagate through the previous computation graph. Which will raise a `RuntimeError`: Trying to backward through the graph a second time...\n", + "\n", + "- The network parameters are still connected to the previous computation graph (`.grad_fn` is not `None`). If later the gradient back-propagate to these parameters, the PyTorch backward engine will try to back-propagate through the previous computation graph. This will raise a `RuntimeError`: Trying to backward through the graph a second time...\n", "- If we do not detach the computation graph, the computation graph connected to these parameters can not be freed by GC (Garbage Collector) until these parameters are collected by GC." ] }, @@ -260,43 +246,109 @@ }, { "cell_type": "code", - "execution_count": 48, + "execution_count": 8, "metadata": {}, "outputs": [ { - "ename": "RuntimeError", - "evalue": "Trying to backward through the graph a second time (or directly access saved tensors after they have already been freed). Saved intermediate values of the graph are freed when you call .backward() or autograd.grad(). Specify retain_graph=True if you need to backward through the graph a second time or if you need to access saved tensors after calling backward.", - "output_type": "error", - "traceback": [ - "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", - "\u001b[0;31mRuntimeError\u001b[0m Traceback (most recent call last)", - "Input \u001b[0;32mIn [48]\u001b[0m, in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[1;32m 7\u001b[0m plt\u001b[38;5;241m.\u001b[39mimshow(imgplt\u001b[38;5;241m.\u001b[39mimread(\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mfull_graph.png\u001b[39m\u001b[38;5;124m'\u001b[39m))\n\u001b[1;32m 8\u001b[0m meta_optim\u001b[38;5;241m.\u001b[39mzero_grad()\n\u001b[0;32m----> 9\u001b[0m \u001b[43mouter_loss\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mbackward\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 10\u001b[0m meta_optim\u001b[38;5;241m.\u001b[39mstep()\n", - "File \u001b[0;32m~/miniconda3/envs/OpTorch/lib/python3.9/site-packages/torch/_tensor.py:363\u001b[0m, in \u001b[0;36mTensor.backward\u001b[0;34m(self, gradient, retain_graph, create_graph, inputs)\u001b[0m\n\u001b[1;32m 354\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m has_torch_function_unary(\u001b[38;5;28mself\u001b[39m):\n\u001b[1;32m 355\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m handle_torch_function(\n\u001b[1;32m 356\u001b[0m Tensor\u001b[38;5;241m.\u001b[39mbackward,\n\u001b[1;32m 357\u001b[0m (\u001b[38;5;28mself\u001b[39m,),\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 361\u001b[0m create_graph\u001b[38;5;241m=\u001b[39mcreate_graph,\n\u001b[1;32m 362\u001b[0m inputs\u001b[38;5;241m=\u001b[39minputs)\n\u001b[0;32m--> 363\u001b[0m \u001b[43mtorch\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mautograd\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mbackward\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mgradient\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mretain_graph\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mcreate_graph\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43minputs\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43minputs\u001b[49m\u001b[43m)\u001b[49m\n", - "File \u001b[0;32m~/miniconda3/envs/OpTorch/lib/python3.9/site-packages/torch/autograd/__init__.py:173\u001b[0m, in \u001b[0;36mbackward\u001b[0;34m(tensors, grad_tensors, retain_graph, create_graph, grad_variables, inputs)\u001b[0m\n\u001b[1;32m 168\u001b[0m retain_graph \u001b[38;5;241m=\u001b[39m create_graph\n\u001b[1;32m 170\u001b[0m \u001b[38;5;66;03m# The reason we repeat same the comment below is that\u001b[39;00m\n\u001b[1;32m 171\u001b[0m \u001b[38;5;66;03m# some Python versions print out the first line of a multi-line function\u001b[39;00m\n\u001b[1;32m 172\u001b[0m \u001b[38;5;66;03m# calls in the traceback and some print out the last line\u001b[39;00m\n\u001b[0;32m--> 173\u001b[0m \u001b[43mVariable\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_execution_engine\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mrun_backward\u001b[49m\u001b[43m(\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;66;43;03m# Calls into the C++ engine to run the backward pass\u001b[39;49;00m\n\u001b[1;32m 174\u001b[0m \u001b[43m \u001b[49m\u001b[43mtensors\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mgrad_tensors_\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mretain_graph\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mcreate_graph\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43minputs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 175\u001b[0m \u001b[43m \u001b[49m\u001b[43mallow_unreachable\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43;01mTrue\u001b[39;49;00m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43maccumulate_grad\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43;01mTrue\u001b[39;49;00m\u001b[43m)\u001b[49m\n", - "\u001b[0;31mRuntimeError\u001b[0m: Trying to backward through the graph a second time (or directly access saved tensors after they have already been freed). Saved intermediate values of the graph are freed when you call .backward() or autograd.grad(). Specify retain_graph=True if you need to backward through the graph a second time or if you need to access saved tensors after calling backward." + "name": "stdout", + "output_type": "stream", + "text": [ + "\n" ] }, { "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAO0AAAJCCAYAAAAsmECdAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/YYfK9AAAACXBIWXMAAAsTAAALEwEAmpwYAACjhklEQVR4nOz9d3xc13nnAX/PNAAzg94b0QgQIMEONrCKFCVSoiSqUJTk2FacrJ3EzpvmfbO7dpLN5t2sdzeb8tnsxmvHcZEtWzJFS7I6xSaKHSABggSI3nsbYAbTZ877BzljkARJADMoA9zv54MPgFvOfe7M/d3TnvM8QkqJgoJC6KCaawMUFBSmhiJaBYUQQxGtgkKIoYhWQSHEUESroBBiKKJVUAgxZl20Qoh9QohaIUSDEOI/zPb1FRRCHTGb87RCCDVQB+wFOoDLwMtSyupZM0JBIcSZ7Zp2I9AgpWySUjqBXwDPzLINCgohjWaWr5cOtI/7vwPYNP4AIcRXga8CGAyG9YWFhbNnnYLCPKG8vHxASpk40b7ZFu1DkVJ+D/geQElJiSwrK5sTO/oHBvjJz98AjS6o5Xo9bjasXMHOHdsRQgS1bIXp43a7+cnrP2d4zD5DV5AYw7T8zpe+iEbzcNkJIVrvt2+2RdsJZI77P+P2tnmHxWLBrItk2aatwS13dITW9ptBLVMhcDweD20Dwyzb/eSMlO/1eqk+8SEej2dSon0Qsy3ay0C+ECKHW2J9CXhllm2YNFqdlrAIfVDLdDkcSKWGnZeo1Wp04REz0gLyejyoAxSrj1kVrZTSLYT4BvAxoAb+TUp5YzZtUFAIdWa9Tyul/AD4YLavq6CwUFA8ohQUQgxFtAEgpcTpsONxu+/Y5rBZkV7vHFqmEEyklNitVibriOT1enHYbDNmjyLaALlx8Rwup4OasotcOv4RAPXXrmIZMc2tYQpBY2x0hPprV/B6PHz6y5/R09Yy4XFN1VV8/v7buF0uqi58jsfjmRF7FNEGgNfrZdQ0TFiEniUFhbhdLgAMUdEM9HbPsXUKwcIyYkKj1aFSq8lathy71Trhcek5S4Fbo9AetxuHbeLjAkURbQCoVCp0YWF43C4cdhtWsxmP243NYiEyOmauzVMIErrwCDzuWy9kq8WMbcw8YZPZ6bBjtZhxOR1IKdHqguuY40MRbYAUrtuIx+NhbHSUcL0eu3WMpIxMYhOT59o0hSARE59AcmYWXq8Xj9uN0+HA43Zx7qN38Y5rAlvNo0QYjIyNjrJ05Ro02pkR7bxzYwwlhBDEJiYBkLm0gMylBQAYlVp2QaFSq0lMywBg3Y7dwC0PqnU79qBSq/3HJWdmkZyZNeP2KKK9H/KW95JtbGxyBwPwcE8au82KTglbOy/xuN3YrWNM5nsECIuImPTx0uvBfbuJHSiKaO9DVFQkKRoPlsqzDz3WarXS1t5G4bKHr0jyer0ULFdWLs031Go1hZmpjFSem9TxUkquX7/O8uXLUY+rbR9EUVrSpI99ELO6CH6qzOUqHynlpOfl+vv7OXPmDM8999ykjvf5tiqrfOYPvu96KnOxP/vZz3jxxRcJCwub1DlT+d6FEOVSypKJ9ik17X0QQkxaVCqVCiEEKpUyrheqTPVFKqX0PyOz/b0rT5mCQoihiFZBIcRQRKugEGIoolVQCDEU0SoohBiKaBUUQgxFtAoKIYYiWgWFEEMRrYJCiKGINkCklHi93jt+Kyxsxn/XU3F3DRaKaANASskHH3zAM888w1/+5V/yjW98A/e4eFEKC5OOjg6eeeYZ/vZv/5YXXniBlpaWWb2+4nscINnZ2bS0tNDX18cXvzi5lA+hiN1up6WlBW+QaxUpISkxgcTECdPWzEsSEhLweDzU1dWRkpJCcvLsBjxYmE/YLCGEID8/n/Xr13Px4kUef/zxuTZpxmjv6OD/HXmX5NtxkIKFw2YjU+vlK1/+YlDLnUnCw8M5ePAgJ0+e5KmnniIiImJWr7/oRBvsfoharebpp5/G4XCwbNkyvEEMnepbPTQfkFKStCSHgpItQS13bHQEZ83ML7/09UODxc6dOykoKOCxxx6b9e980YnW4/Hw05/+lNHR0aAJYnBwkMzMTH7wgx8ErUyz2czv/M7vzHrTa6Hy+eefc/nyZbRabVDK83g8FBUVcezYMU6fPh2UMh0OBxs2bGDnzp0PPG7RiVZKycjICKWlpQGvg7Tb7djtdmJiYvB4PAFFJXA4HGi1Wr9NFy9eZGxSoW4UJkN/fz/Lly8PuO8spcRkMmEwGNiyZUtAYxgulwshhL+M3t5eBgcHH3reohw9VqlU6HQ6wsLCCAsLo6uri4qKCnQ63ZR+HA4Hx48fR6fTERERccd2jUbj/9/tdvPRRx9RU1Nz37Jqa2sZGxvz27RQB7TmEq1W6/98R0dHOXv27JS/c61WS01NDS0tLej1ev92l8vlf650Oh1CCE6ePElZWdl9y2pvb6e/v99vk++8h7HonwwpJbW1tfT19WEwGKiurmb37t2UlZWhVqvJzc3F4/FQVVXF4OAgqampZGdnExFxKyViVFQUAJcuXaK5uZlt27bx2muvsWHDBhwOBw6Hg4yMDBITE9FqtXz88cc4HA5Wr17NqVOn2L59O01NTTQ2NpKdnT23H8YiQUpJXV0ddXV1bNmyhePHj2MwGFCpVIyMjLB27VpGR0cZGxujt7eX7u5usrKyyM3NxeFwkJuby8jICDU1NZSXl7Nr1y5+9atfkZGRwaZNm/j000+JiooiJiaGyMhI+vv7+eSTT8jKyqKpqYl169ZhsVi4du0ajz766JTtX5Q17XgcDgc3btygq6uLGzdusGHDBgwGA2az2S+8lpYWuru7iYqKwuPxMDIyQktLC2azGbfbjdVqpaenh7q6OtxuNxs2bGDp0qVcv36dvr4+hBDY7XauXr2KEILHHnsMk8lEd3c31dXV2Gy2BdN3He7vwzZmwWGzYRuz4HI66O/qmFdOJ76X8PDwMC0tLTgcDtauXUt/fz9bt25FCEFrayvd3d2o1WqioqLo7++npaWFoaEhhBCMjY3R3d1Na2srg4ODbNq0idWrV9PZ2Ul7ezsmkwm73U5lZSUOh4OlS5eSnp5Of38/lZWVDA0NkZ6ePi37lZpWSp588kk0Gg0qlYrm5mZiY2NZvnw5tbW1LFu2jIGBAUpLS4mMjEStVqPX66mvryctLQ2tVsvAwAC5ubkkJCQQFxdHfn4+w8PDHDhwgIGBAYqKiqiqqmLTpk2Eh4ej0+lITk5my5Yt5ObmYrFYMJlMGI3Guf44AkJKyc0rlyl5ZC8djfXUX7vC7udeovHGNWITk2YsePd02LlzJxEREURERJCfn09DQwNr166lurqa1atXExkZSWJiIhERETidTlQqFZ2dneTk5JCQkEBnZycpKSns3LmT9PR0pJS0tLRQVFSE1WolOTkZq9VKRkYGsbGx/ubv+vXrycjIQKPR0NHRQUJCwpRtX/SijYiIoLDwNyFNc3NzAYiJifFv27Fjxz3npaSkAJCUlHTPvqioKLKy7gxavWXLnVMlqamppKamTtvu+YiUXpwOOxqtltSsbLqaG2710SS4Xa55I1qNRkNRUZH///GtnLy8PAC2b99+z3m+fXDv9wm/eRZ27dp1zz7fC/mRRx7xb8vJyZmi5bdYlKL1eDzYbLZ5HT3R6XTOtQlTRggVsYlJWC1mhvt6aamtoXD9RrRhYejCw+fUNikldrsd632SZ80HbDbbpLoRi060KpWK3Nxcbty4Me0yXC4XZWVlrFu37p6Yt1JKKioqyMzMnFbTx4dGo7mjtg8FhBAUrd+ESqUiPiWVx1/6IsboGFZu3opKFXiQ7kDIy8vj8uXL9PT0BFSOxWKhsbGRVatWIYTA6/Vy5coV1q5dG3Agco/HQ0nJhKGO72DRiVatVvPUU09N+3yv18sHH3zA8uXL2bZt24RD9J2dnXz44YccOnSIyMjIQMyddzysJtDcdl4I1xsI1xsmfd5Ms3r1alavXh1wOR9//DE7d+68o6yPPvqIzMxMVqxYEXD5k2HRiRamH9nf6/Vy8uRJdDodW7duvW/zOj09nV27dnH06FEOHz48676pM4FGrWGgpRGn/eEZzr0eD06Xi/BJNIldDgeFCTP/YgvUU01KyeDgIL29vezZs+eO8jZv3szbb79NYWHhrMyvL0rRTgev18v58+exWCwcOHDggU0hIQRLly7FarXy9ttv89xzz006dcR8JTMzg29+9dU7Ujvej97ePq5VXWPvo3smUbIgOjoqcANnGCklJ0+eZPv27fe4QsbExBAXF0dTUxMFBQUzbosi2kng66d2dHTw7LPPTuptKoRg1apVjI2N8dFHH/mnlUIVrVZL6u0R84chhKCjI3ba85Dzkba2NqSUEzrACCHYsmULn376KXl5eUFJsvUg5u/w6TxBSsnNmze5fv06Tz/99JQczoUQbNq0Cb1ez8mTJ4O6GkRh9nC73Zw+fZpdu3bdt0uUkJCASqWiv79/xvvvimgfgJSS1tZWzp8/z3PPPUd4ePiU+0ZqtZpHHnkEi8XChQsX5nxARmFqSCmprq4mMTHxgYsNhBBs3LiRCxcuzLhNimjvg5SS7u5ujh07xrPPPovRaJz2YIZGo+HJJ5+ktbWVa9euKcINIex2O2VlZWzfvv2B378QgszMTEwmE2azeUZtUkQ7AVJKhoaG+OCDD3j66aeDMl+q1Wp55plnqKiooLGxURFuCCClpLy8nGXLlk3KxVSj0VBcXMzVq1dn9PtVRDsBFouFo0eP8vjjj5OUlBSUhe1CCCIiIjh48CCnTp2iq6tLEe48RkqJ2WymtraW9evXTzYRNEVFRTQ0NOCZxCj7dFFEexdjY2P88pe/ZNeuXWRkZAQ13ItvKd8zzzzD+++/P6kFzwpzx2effcamTZumNF2n1+tJSkqiqalpxuxSRDsOu93Ou+++619aNxPxmYQQJCYmsn//ft555x1GR0eDfg2FwOnt7WVoaIjCwsIpPQdCCNatW0dFRcWMtaQWvWh9Ab9cLhfvv/8+eXl5FBcXz3hAtYyMDLZv387bb7+NzWbD6/UuiCkhq9XK8PAwZrMZk8kUUl0AXygZq9XKqVOn2Llz57Tm1lNTU7HZbJhMpuAbySJ3rpBScuHCBc6fP09OTg7x8fGUlJTMSgREX/jVsbExjh49ytDQEPn5+Tz++OPzJgLjdDh+/Dj/6T/9J2w2G2fOnOGf//mfQ8qp5Lvf/S5nzpxhx44dPP/889MqQwjB8uXLqa6uprS0NOjf56Kuad1uNz/84Q/51re+xZEjR1i9evWsLtfzuTt+9tln/Pmf/zn/+q//isPhmLXrzwTLli1jcHCQlpYWiouLZ9w7KJh4vV4+//xzPvjgA/73//7f046y6BuQqq+vn5GME4tatM3NzXzwwQc4nU6uX79OQ0PDrNvQ3t7O9evXcblcnDp1ipqamlm3IZjk5uayceNGEhMTefTRR0Oq1WCz2ejq6iIyMpLf/d3fnXCh+2TR6/UYDAYGBgaCaOEtQqLdMhNJjqSUHDlyBLVazbe+9S1+53d+h4yMjEn3K30P490Ppc/Oydq7bNkyjh49ymuvvcZ3v/td3nzzTVauXBn0Gl8IMaGAgv3ZqlQqnn32WVQqFUuWLAlqP32m76GzsxOn08l3vvMdvvKVr6DT6ab0PIy3TaVSsWrVKiorK0lJSQnuLMR8HigoKSmRZWVlnD17ls8//9w/9C6lfOiH8LBj3G43V69eJT4+nuzs7EmLxDdwlZuby8GDBycU7TvvvENDQ8M9fbnxNt1tn5SS9vZ2uru7Wbdund/HeaL7mMz9j8fpdLJ161a2bt16zz6Xy8WPf/xjRkdHJ/UZTObaw8PD9PT0PHDkdar3YLVaefXVV0lLS5vwet///vfR6QILZzMwMEB7ezurVq2aUrPe6XTyu7/7u8TFxd1j8y9+8Qu+9KUvTblfL4Qol1JOuCI+JGra/v5+Vq9efd+IhVJKRkdH/bGHH4TdbveLf/fu3Q98qJxO54SxaG02G9XV1fc9b3h4mM2bN2MwGO57jMlkIjIyckJhA/6oCC6Xa1LzhFJKHA7HhGtYe3p67jsn7PV6sdvtbN++PeABI6fTic1mIyoqCq/XO+3+7ET3cvny5fsGb3c4HOj1+glfSg/C7XYjpfS/IL1e731rc6fTiVqtnvCezp07h91uv2d7REQEcXFxdHV1sWTJkinZ9iBCpk+r1WrRarWo1Wo+++wzqqur/bWZWq2mqqqKpqYmRkdH+fnPf05lZSUajeaen7KyMtxut7+8iY7xlVlWVsbQ0BDvvPMOAwMDd5zzMD9UjUbjfxhOnTpFZWUlnZ2d/qiPly5doru7m/7+fl5//XVu3rzpP8d3DZvNdt/7uPsHbj3Y586d4/z586hUqjvKehDjj9VqtfT19XH58uVJXXf8j91u58MPP/QHBfd9jna7/Y7jPB4Px44du++9SSkpKyvzfx6+ch6EWq322z80NMRrr73GL3/5S958800+/fRTvF7vPdfp7Oykra3N/78vGPlENjU0NNDX18enn35KdXX1Hd/V/WwTQlBYWMjNmzcfaPtUCYmadjy+UJXV1dUUFxczNjaGRqMhIyMDq9WKzWbDbrfT2NhIT08PWq2WjRs3cuzYMYqKivwO4B6Ph5ycHEwmE4ODg4yNjVFQUMDly5fZvHkzlZWVmM1mNBoNer2e69evTyt6osfjoampCYvFwpIlS/B4PP7mtZQSi8WC0+mkvr6ehoYGYmNjWbFiBcePHyc3N5e2tjZiY2OxWCzEx8djtVppa2tDrVaTlpbGtWvXKC0t5dKlS4yMjBAZGUlYWBgWi4Xo6Ohpfb61tbU0NzeTmJjIlStX2LVrFzdu3MBmsxEfH09MTAxWq9XfvVi2bBk6nQ6Px0NsbCwAFRUV1NbWkpqayqlTp/jt3/5t6uvrMZlM5ObmEhkZ6V+yODw8zMaNGzlx4gQrV66kpqYmoFo/KSmJgoICUlJSOHXqFL29vQwMDHDq1CmKioro7+9HpVKRkZFBbW0tQggsFgt6vZ62tjYGBgZ47LHHqKuro729nejoaNra2igpKWFsbIy6urpJj4xnZWVx9uzZgNPGjCdkatrxJCYmkpOT448d29raipTSH81Or9djsVjo6+vj6tWr9PT0kJCQQH5+Pjabjba2NqxWK7/+9a8ZGBigr6+Pa9euMTAwQElJCbGxsYSHh2M0GlGr1Xg8noAeouTkZHJzczGbzbS2tmIymRBCYLVakVJiNBoxmUz09fVRXl7uj2i/ZMkSWltbcblc9Pf389577zE6Okp/fz9Xr17FZDKxdetWdDod0dHRd4S1me5Alsvl4vr16/5rrF+/npiYGAYHByktLfVP5/i6I77g7b778nq92Gw2ent7qa+vJzY21l+G77vweDw4HA5/q+exxx7zR/P/7LPPSEhICKh/KoRApVL5WxA6nY7BwUH6+/u5fPkyZrOZzZs3A9DY2IhGo6Guro4PP/zQf3+tra04HA4eeeQR3G43qampqFQqvF7vlD7b8PBwoqKi6Ovrm/b93E3I1bRCCFauXElCQgJms9nf30tJSfHnRYmJiSEtLY2xsTFWrFhBZmYm/f39tLW1sW7dOjQaDUNDQ2RlZZGQkEBraytLliwhKysLg8FAVFQU8fHxaLVa8vPzKS8vZ82aNdOyV6PRsHbtWqKiouju7qa3t5f09HSMRiPXr18nJycHg8HAihUrGBoawmg0kpuby+XLlxkZGeHw4cO43W6WL19OdnY2ycnJeL1eCgsLycjIIDo6mvDwcJqbm0lNTUUIgZTyvv3phyGlZPfu3f6+fEtLC7GxsRQXF3Pjxg2ysrIYHh4mKyuLlJQU1Go10dHR3Lhxw39fXV1dZGZmEhERQVpaGhaLBavVypNPPklXVxfLli2jqqqKkpISjEYj4eHhxMXFsW7dOtLS0ujv7w846oXvu8zMzPS3SoqLi0lJScHr9XL9+nXy8/N58cUXcbvdrFmzhlWrViGlJDU1lYyMDFwuFzExMeTn59PX10dOTg52u52kpKQpCTcnJ4eWlpagxbkOidHjt99+m8jIyHmTOsNqtVJVVcVXvvKVe/q2Xq+Xn/zkJxQWFs6bjAHd3d3YbDaefvrpe/Y5HA5+8IMfUFpaOm89l6SUXLp0iR07dpCfn3/P/p6eHn71q19NGGB8Nvj88895+umnJxzZBhgZGeHdd9/lC1/4wqTFHvKjx3Br1Ndme3gkwNnAbrc/cF7QFxh7Jr2BpjJlMtHI5nh8wdsnI9qpTtUE6/yHBW93uVxz9ny4XK4H7ve9vMfGxoISUjckRLt06VLOnj1Ld3d3QOV4PB5qamoCDnXpa55OhM81saamZsZcIr1eL9euXQNg+fLlD+3/eb1eSktLJ9ynVqvJysri+vXrD5z+slgsVFVVERYWxtq1a6d1b1JKOjo6aGpqYv369RgMhkkLWKPR3DMP6sNoNBIVFUVlZeWUberq6gK4by05GSIjIx8oRpVKRVxcHAMDA0ERbUg0j4OF2+3m5z//Oc8//zx6vT5o5c42UkrGxsY4f/48nZ2dbN++nZycnKC/JHzX8aXxLC4upri4GL1eP63aUkqJy+WiqqqKiooKVq5cydq1a4OWnX069nz44YcsW7bsjjw9M0F9fT3Nzc3s3bt3sgvq79s8DsnR4+miVqtRqVQz4sQ9mwghMBqNPProoxw4cIDy8nLeeOMN+vr6guLO52venzlzhjfeeAOj0cgXv/hFNm7cOKXacSK7dTod69at45VXXsFsNvPaa69RV1eH1+udk2V8/f39EyZRCzYpKSn09fUFxa0zJJrHwSQyMpLR0VF/MuhQRghBQkIChw4doqWlhY8++ojExES2bds2rUB0Ukrcbjc1NTVcuHCBFStW8Fu/9VuTzlA+FbsjIiLYvXs3w8PDnDp1isuXL7N7926Sk5NnbaXV2NiYf8ptpomIiMDtduNyuQIe61hUohVCEBsby/DwMBkZGXNtTtAQQpCTk0NGRgbV1dX88pe/pKioiLVr1xIWFjYpwfmiTx4/fpzExEReeuklIiMjZ3SVjhCCuLg4Dh48SGdnJ8ePHycmJoZt27YRHR094yuEOjo6SE9Pn5WVSGq12j/fHeh01qJqHgPExcUxNDQ012bMCFqtllWrVvHyyy/jcDj46U9/ys2bNx/Y9PT5+Z44cYJjx46xe/duHn/8caKiomZtWZ3PO+mll14iJyeHt956izNnzuBwOGa0ydze3k5mZuaMlT8eIQTZ2dm0tLQEfE+LTrTR0dGMjIyEVBiUqeBreu7cuZNDhw5RV1fH66+/TldX1z3ilVLS09PDz372MwwGA1/4whdIS0ubkzWwPn/t5cuX81u/9VsIIfjRj35EVVWV37E/mHi9Xtrb24PqyP8wEhISgrK+dlE1jwHCwsJCPjrEZBBCEB0dzdNPP01nZyenT5/GYDCwc+dOoqOj8Xq9XL58mdraWvbv3x/0NZ+B2B0WFsa2bdtYtWoVZ8+epbKykp07d5KRkRG0/q7FYkGj0cxqRkNfhREoi1K0TqczYCeBUEEIQUZGBi+++CL19fUcPXqUJUuWYDKZMBgMHD58eFIpKWcb30tn//799Pb2curUKcLCwti+fTvx8fEBf3f9/f0BJf2eDgaDAYfD4V9lNl0WXfN4vGgXExqNhsLCQp588kk+/vhjLl++TEpKin8p3HxFCEFKSgqHDh1i+fLlvP322xw/fty/OGSq+KJcNDc3k5ubO6svbpVKRVRUVMC17aITrW8J2UIIVzoVpJT09fXxzjvv8Ad/8Af8xV/8hb8/29ra6u/vzkRon2CgVqspKCjgy1/+MtHR0fz0pz/l6tWruFyuO+x+mO39/f384Ac/4MSJE+j1+ll/DmJiYhgeHg6ojEUnWiEEBoMBi8Uy16bMGlJKurq6eOedd3j66afJzs7GaDSyb98+9u3bx8WLF3nrrbcYGBhgYGCAY8eOzWhai+kihECr1VJSUsLLL7/M4OAgP/vZz2hoaMDhcPD+++/fN7qFD4fDwX/7b/+Nf/iHf+DVV1+lv79/lqy/RTD6tYtOtHDrg5upQNLzDV8N+8EHH3Dw4EGSk5PvCEqXnJzMCy+8wPr163n33Xf51re+xW//9m/zwQcfzMsaF+71CLt69Sr/83/+T772ta/xv/7X/3rg4oLo6GgMBgNer5dnnnnmgekrZ4KoqKiAK4xFK9pgjOLNd3yxs959912eeuopEhMTJ+zDqdVqcnJy2LZtGx9//DFdXV184xvf4Pz58/NWuPAbj7ADBw5w9uxZurq6+Lu/+zt+9KMf3belEBERQXh4OHv27OHVV1+d9cFIg8Hw0NbAw1h0o8dwy5VxMYjW5XLxzjvvsGvXLpKTk3E4HNTW1t73ga6urqagoIDY2Fi6urr44z/+Y/7mb/5mRmujiIgI8vPzJ1x1NTo6Oqm0oCaTiejoaDZs2EBXVxf/5b/8F9RqNWvXrr3nWF9InGeffZbGxsag3EN8fDxLliyZ1AtAEe00MRgM/iVZCxWv18vJkyfJy8vzJxPr7e3l17/+NUuXLp3wHI1Gw1e+8hU8Hg9OpxO73U57eztDQ0MzUiNJKWlqauIP//APJ/QFr66u5uzZs5Ny+zt48KA/eqXD4fC/oCYKP/v0009jMBioq6sL+B5sNhsOh4Ovfe1rkzo+IiIi4HW/i1K0kZGRC3ogSkpJQ0MDQ0NDd0T5l1KSnJxMcXGxP6ZWd3c3S5YsuWfe0OVy+QPNje8H+/AFbXvYwgspJTU1NWRlZdHb20tKSop/WaTX62VgYOC+NanX6yUrK8u/dtntdtPf309KSoq/3Pz8fNrb2/0B58ZjsVhoaWkhKyvLv451fIjasrKySeWedblc3Lx5k7y8PHp6eliyZIm/ZWA2m+8bTncitFptwKvMFqVojUYjZrN5rs2YEaSUjIyMcPr0aQ4fPvxAD6IrV65gs9mIjo7G7Xaj0WhwuVz+hNeVlZUYDAb27NlDfX09K1asoKOjA61W63fHs9lshIWFIaWkra3Nn3zKF+gtIiKC6upqPB6PP9r+o48+Oq176+jo4OjRo/zBH/wBTU1N1NTUoFarqa6uZsWKFTQ2NmI0GtHpdAwNDTEyMkJHRwcdHR0UFhYyNDREQUEBN2/eJDMzk87OTn/AttjYWEwmE62trSQnJ5OamkpdXR1qtRqLxUJPTw/Dw8N0dXXhcDgoKiqa1j34AgUGwqIUbUREBA6HY8qR9UIBr9fLJ598ws6dOx+6SicvL4+6ujqGhoZobGxEr9fj8Xioq6vzhxVVq9WYzWYuXLhAV1cXGo2GHTt20N7ezqlTp9i7dy8nTpzAZrMRFxdHTU0NsbGxXL16laKiIm7evInb7cZisRATEzPtFo6Ukrq6OqKjo/3hZq1WKxUVFajVarq7u6mpqSEyMpLY2Fja29v9S+Hi4+Pp6+vj+PHjtLe3k5CQgNFopLu7mytXrpCRkcGpU6dwu92oVCouXrzI4cOHqampYenSpfT09OB0OrFarQEPYvoC7wXCwnpip0B4ePi8iTkVLKSUXL9+Hb1eP6mk2GNjY3R0dOD1enE6nXR0dBAVFUVeXh4pKSkYDAYiIiIYGRkhIiKCqKgoHA4HTU1N/giNLpcLnU6H0WjEZrORkJCAEILMzExycnKQUuLxeEhLS6Ovr4+srKxp35vZbCYiIoLR0VH/utSIiAisVqs/imVqaioJCQnk5eWRkJDgzzoxMDBAfHw8cXFxtLa2MjAwwNKlS4mJiSEqKgq1Wo3BYMBqtRIXF4darWbp0qVkZWVhMpkIDw8nPT2d3t7eWV1kMBGLsqaF34ziTTfU6HzEarVy6dIlXn755Um1IPLy8sjOzkatVvujHI4X+vg0Gb6kYOP7hL5aw3eub59KpfKvFjp48KB/25e+9KVpLwAXQvDss8/6r3l3bfWgxFy+z8LXsvL99i3L861HHn/PvrhOQgheeeUVVCqVP4XlXKfvnLZohRCZwE+AZEAC35NS/pMQIg54A8gGWoAXpZTD4tan+k/AE4AVeFVKeSUw86dPMIbe5xNSSs6fP8+6desm9SLyPeS+AZWJHsTxwr/fS+B+tblv+/hyAwmmNz7HznRHsn33MNG9+Mocv+/uz+juv+eKQCxwA38mpbwihIgEyoUQx4BXgeNSyu8IIf4D8B+APwf2A/m3fzYB/3L795yg1+uxWq1zdfmgMzIyQltbGzt37rzvQ61Wq2lvbw+6i+L4putUVq/4kpXd74Wg0Wi4efNmQGtQbTYbarU64Ix698Nut8/q8j4IQLRSym6g+/bfZiFEDZAOPAPsun3Yj4FT3BLtM8BP5K12zQUhRIwQIvV2ObOKb9nX6OjobF96RpBScuXKFdauXfvAmiA1NZU/+IM/CLqTvJSS6upqqqqqePzxx6cUc0mr1d63ZbB69WoyMzOnPXAzPDzs9wa7X/jVYDDbkT2DUtcLIbKBtcBFIHmcEHu41XyGW4JuH3dax+1td4hWCPFV4KvAjHb4Y2Jigp7NbK5wOp20tLRQWlr6wKajRqMhJSVlRmxIS0sjPT2d06dP8+yzzwZlzWtYWNi0Umn4pr0+/PBDXn75ZbKyshbU2umAR4+FEEbgLeCPpZR3VF23a9UpvSallN+TUpZIKUtm0n0uJiZmQSwakFLS2tpKamrqpPLYzhS+QZp9+/Zx9OhR2tra5sRv2edvffToUXbt2rXgBAsBilYIoeWWYH8mpTx6e3OvECL19v5UwJcurBMYH0Ur4/a2OcE3vL8Q1tXW1NSwfPnyOX84fVEynn/+eY4fP87169dnXbhms5m33nqL7du3z/oi99li2qK9PRr8A6BGSvn343a9C3z59t9fBt4Zt/1L4habgZG56M/6UKlUfg+gUEZKyeDg4KwE3J4MvrCohw8fprKykvPnz8/Ki9E3GHbkyBG2b98+qXnqUCWQmnYr8EVgtxCi4vbPE8B3gL1CiHrg0dv/A3wANAENwPeBPwjg2gHji/73sMRO852RkRHCwsLmVZwnIQR6vZ7Dhw/T39/PJ598MqMvR1/6krfeeoutW7cuaMFCYKPHnwP3+2T2THC8BL4+3esFG1/C4VAXbU9Pz7yJpDgeX5SJJ598krNnz/Luu+/yxBNPEB4eHnRbrVYrb731Fps2baKgoGDefRbBZtG6McKteFGhHk51eHh4RqczAkWj0fj7l0eOHMFsNgetnyul9Au2pKSEwsLCBS9YWMSiHZ8iJJQJhbxEKpWKNWvWUFpayptvvhmURGG+pYVvvfUWa9eunRcDcbPFohStz4ndaDTS1dUV1Lf/bBMq/tNCCHJzczlw4ADvvfceTU1N04r+6Isaabfb+dWvfsWqVasoLi5eNIKFRbxg4NSpU/z93/899fX1bNmyhR/+8Ich+cU7HI5Zd6ObLr5Acs8//zzvvfceZrOZqKgoBgYG2LBhw0M/f4/Hw9GjR1mxYgUVFRWsWLGCVatWheT3FgiLsqaFWy59FRUVNDQ0hPR8nsvlmrOkzNPB50L6/PPPc/nyZV566SW++c1vMjw8/MAaV0pJZ2cn3/72tzl06BB6vZ7Vq1eH7PcWCItStEIIli1bxgsvvIBOp5swAFioEGqihd+s2CkrK6O8vJyLFy/y+uuvP/AcKSVvvvkmjY2N1NXV8fbbby+49dCTJWSbx16vN+BJ+y996Ut89tlnLFu2LCgrX3xrLmfz7e92u4O+vlNKOeOZ2YUQfPGLX0StVvP+++/zL//yLzzxxBP39Tfv7+/nZz/7GUuXLuXVV1/l5ZdfDkq8pYehUqnmXXSTkBXtqVOnuH79ekAPrNvtJi8vjw8//DDg2srlcrFu3Tq2b98eUDnTIdgvCbfbzb/927/NuCAACgsLiYmJ4dKlS/zjP/4jy5Ytm/C4trY24uPj2bJlC0ajkffee2/GbZNSkpaWxnPPPTfj15oKISvanp4eVq1adU8Evqmybds2fw05Ge6Xba+/v5++vr4Jzpg5Zqom9Hq9WCwW9uy5x0dmxnjxxRcfmMmwtLSUl19+edLlTSUr4v2OdbvdXLx4cdLXnC3mV70/RXz+w2q1mtbWVmpqavzbxv8MDQ1x8uRJBgcH79kXFhbG2bNnUavV9+y7+8fj8VBeXs7o6Cjl5eV4PB7/PrVavaAGRXxunhqNBrvdTmNjI/X19Xfcs+++h4aGcDgcD/zsfH3YyspKgHv2d3d309raet/ztVqt/+/GxkY6Ojr47LPPaG5unvC7q6ysxGazPfQ7ValUVFRUYLFYKCsrw2w233Fv85GQrWnH43a7uX79Ojk5OYyOjmIymTCZTPT29qLVaomMjKSvr4+rV6+SmZlJd3c3q1at4sqVKxQXFzM0NERNTQ1CCJKSkrBYLNTX15OUlEReXh4XL14kIiICk8mEzWbDYrEwNjZGbW0tq1atmrP7nq0cu0NDQ5w4cYLR0VG2bduGw+Fg8+bNXLp0iYSEBMxmMxkZGXi9Xj7//HOKi4txOBw0Njb6p2fWr1/vD0makpJCRUUFRUVFjIyMYLFYSEpKorOzk6GhIfLz8xkeHsbtdtPY2Mi2bdvo7OykqamJxMREamtrWb58OW1tbfT19dHd3Y1Op2P16tVcvHiR+Ph4zGYzjY2NeL1eli5dSn9/P0NDQ9hsNgoKCqioqKCgoICuri5aWloQQvjn7Pfu3Tvjn2kgLAjRqtVqVq5cSUNDAxEREbS1tdHa2ooQArVaTW5uLjabDZfLxdDQEJcuXWJ4eJjCwkJSUlLo6enxB/w6ceIEGzdupLe3l6qqKpKSkrDZbKSkpGCxWBBC4Ha7F01GebhV6/oiFprNZtxuNx0dHZw+fZrMzEx/MPHW1lYMBgPXrl0jPT0dq9WK2WzGYrHgdDpxOBxotVp/FMiuri5iY2PZv38/nZ2dnD9/ntLSUo4ePYpWq6W4uJhr166RmJjo/75u3LhBbGys3yMKbnVN6urqiIuLw+l0smzZMtra2mhoaCArK4uysjLS09PRaDSUlZWhVquJiooiKiqK6upqwsPDcblc/tzF852Qbh6PZ3BwkNHRUeLi4hgeHiY6Opr8/Hzy8vL8/d6kpCQ8Hg9Lly6loKCAmpoaOjs7Wb9+PUuWLCEqKsovZI1GQ0FBAWFhYeTl5ZGRkcHw8DCxsbEsXbqUkZGR+6bXWGhERESwZcsWSktL0Wq1OBwOEhMTWb16Nbm5uRiNRj7++GOGh4fp6OjwRzb0vdScTqdf+Hq9Hq1Wi9FoJC8vj7i4OM6ePYter+fAgQPodDoyMjJYunQpUkqysrKIjo4mPT2dpKQkEhISGBsbIzo6mrCwMH+EjMLCQuLi4rBarVRVVZGVlcWWLVvQ6XTk5OSQk5OD1+tl2bJlxMXFkZmZ6X8ufMeMjIxMOwj5bCLms/teSUmJLCsrm3Df66+/TkZGxrxxlu/r62N4eJjnnntu1vq2Xq+X73//+3z1q18N6jUdDgf/5//8Hx577LGglRmKuFwuzp07x9e/HrzFab7v7GG5f4QQ5VLKkon2hWzz2DfCOR9CWsKtvDELIQqGD5fLNeuB72arjz5Z3G73vPxO58cTPw3y8vK4ceMGvb29AZVjt9vp6OggLy8voAfG5XKxZs2agGyZL6jVajIzM6mtrZ2V60kp6enpYXBwkBUrVkz4PdTX1xMXF0d8fPys2OSzq6CgYNauN1lCVrSbN29m8+bNAZfji9r30ksvBcGqhYFarZ7SnGigNDc3c+LECb72ta/dt7vT09PD+++/z3PPPTfvlyLONCEr2mA1o8ZHr5hPTbO5ZLY+ByklN2/e5OLFi7zwwgtER0ff99opKSns2LGDd999l0OHDs2r8DqzzYIZPZ4uOp0u5IO7hSJSSioqKrh8+fJDBQu3XiT5+fmsWLGC9957b1ZcLOcri160Pk+n+TyKvtDwer1cuHCB6upqDh06hMFgmFTtLoRgzZo1xMXFcfz48Xk5SDQbLHrRCiHQ6XQhMam+EPB4PHz22Wf09PRw6NAhIiIiptQcV6lU7Ny5E5vNxsWLFxfly3bRixYWZq7a+Yjb7ebTTz/FarX6HSmmg1qtZv/+/TQ3N1NTU7PohKuIlluitdvtc23GgkVKicvl4oMPPkCtVvPYY48FvBRSp9Px9NNPc+HCBTo6OhaVcBXRooh2JpFS4nQ6efvtt4mLi2P37t1BcYgRQmAwGHjmmWf48MMPGRoaWjTCVUQLi8r5f7bxhTnNzs6mtLQ0qFEghBDEx8ezb98+3nnnnQWVJPxBKKLlVk2riDY4+FJ0OJ1OzGYzv/zlLykuLqakpGTGwrZkZmayefNm3n33XWw2G62trUFPnD2fCFnnimCi1WqVudogIaXk7/7u73C5XCxZsoSdO3fOeKoOX5rNoaEhvv3tb/PZZ5/x1ltvzWh+47lEqWlhQeT0mS/U1tby/e9/n//5P/9nUHy6J4vX66W8vJzvf//7VFRU8NFHHy3YPq4iWpTmcbBwu91897vfpbu7m9jYWAYHB7FarbNybSEEGzZsYOfOnWi1Wn75y18u2D6u0jxm8Q1EDQ0N0dXVFfSaqLW1lbfffpunnnqKF198kcLCQlpbWyd1bkpKCgkJCRPWylJKWlpasFgsDywjMjKSb33rWxQXF/OLX/yCDz/80B9VIxBUKhXZ2dnzJv2KIlpuiXYxTfkcP36c3t7eoK+Wsdls/MVf/IU/HvH169cndZ7FYiEqKoovfOELE+73er386Ec/Ii8vb1LlFRUV8a1vfYv+/v6gOM10dXWxa9euoKwqCwaKaFl8zWMhBCtWrCAxMdE/j+p2u9Hr9ffUdG63G4vFgsFgmNAhoqmpiZycnEnl4ens7CQlJQWn03mHv7Gv5n8QkZGRrF27FiEEHo8Hs9kM/CaY+ET+y74AfMnJyQ/9TNxuN11dXSQmJqJSqdDpdP7y9Hr9vOofK6Jl8TWPx+P1ejl27Bjh4eFs2bLFH7jOZrMhhKC/v58zZ85QUFDAunXrsNlsJCQkMDAwQGRkJDdu3PA3a/V6PQ6HA5PJhF6vJzIykqGhIeBWsIHr16+j0Wj49NNPOXz4MGFhYdOy2Zf1XaPRYLPZ0Ov17Nu3D6fTSUxMDOHh4fT19TE2NsbAwAAGgwGVSkVYWJg/omZycjIul4vh4WHi4+P90TotFgtGo5Enn3xy3kRFuZv5adUss5gXDAghSE9PZ2xsjJGRERoaGujq6qK3txe1Ws3atWsZHh7GZrPR1NTEyZMnWbNmDRaLhc2bNzMwMMCpU6eAW7VeYmIibW1tjIyM8Mwzz/Dee++Rm5tLR0cHarWapKQkEhMTA7I5KiqK9evXExYWxunTpzGZTDgcDj799FMiIiIoLi6mpaWFzMxMzp07R2xsLE1NTXg8HrxeL42NjTzyyCP+8K1Lly7lypUrjI6OEhMTg8PhwOVyzVvRKqPH3HJAd7vd86oJNFv48va0tbVhs9lobm5mcHCQ9PR00tLSUKvVpKamYrfbaW1txeVyYbPZ/AKIjo7213hdXV3odDo6OzuxWq3+6IfLli1DSukPbF5XVxdwNobxzeKYmBj6+/sZGBjAbDb7m9C+tB4mkwmXy+WPa+xz/NDr9WzevBmj0YjX6yUqKgq32+0PVj5fmb+WzTIqlWreBRabDVQqFYWFhWRnZxMTE0NsbCwqlcrff1WpVP4wpl6vl+3bt2M0GjGbzRgMBvbt24dKpcLlcvn7gkuWLEGtVhMZGUl0dDQ6nY79+/ej0+nQarUcOnQo4HQuvlHhxMREPB4PsbGxZGZmEhYWhsFgIDExEb1eT2FhIR6Ph1WrVvlfylarlcjISH8WBSEE+/btIzw8HI/H489YMF+Zv5bNIr6g5m63e9rLxUIR30Os1+vR6/UAxMbG3nNcSkqK/2/ftMfd/dHx/4+P8+TbPn5bamrqHdefjs2+l8p4exMSEvx/P6gJ7rvX8UwUMG6+trwU0d5GrVYvaH/Vu6msrCQyMjKgMnyRIwL1KbZYLHcI7n7HXLp0aVrlSyn9Neh0WlI9PT3zyiVSEe1tfGFnFgN79+6lv78/4Jrk2rVr6HS6oDgwPEi0KpWKf/fv/t20vatcLhcfffQR+/fvn1azV6VSkZaWNq1rzwSKaG+zmGra6OhooqOjAy6nr6+PyMhI8vPzg2DV/fGNcE8Xp9NJUlISubm5055mmk8oo8e38fVpFSaPr8k53/E13xfKS1kR7W0WU00bLEJFtEIIhBALJnqjItrbKKKdOopo5wZFtLdRRDt1vF5vSIl2vk7hTBVFtLdRRDt13G53yIh2IQU6UER7m4X0Jp4tQqWmhVsOFQsltrUiWhZen2e2CJU+LdxafjlbUTRmGkW0t1lMzhWBIqVESonb7fb7bM9XfLZGRERgtVpxuVzz2t7JoDhX3Eaj0SjztJPE7Xbzx3/8x3z++efExcXxl3/5lzzyyCNzbdZ9+clPfsL3v/99nE4nzz33HH/+538+1yYFhFLT3kZxrpg8arWamJgYqqqquHnz5rxy8ZuI5ORkKioqKC8vJzMzc67NCRhFtLdRmseTRwjB3r17iYyMpLS0dNKxm+YCIQSlpaWsXr2apKQk1q9fH/LLL5Xm8W0Ww+ix7/6CcZ9r1qxh2bJlvPDCC6hUqqAM4vkGBO8mULuNRiOHDx/m7bffJjs7OyBbffbNpfAV0d4mWA/efGZwcJA333wzKCO+Xq+XvLw82tra+P73vx9weXa7nb1797J8+fJ79kkp+clPfhJQHK/R0VHS09P50Y9+NG3BSSlJTEzk4MGDczpqroj2NouhprVYLEgpWbNmzT37fOFZprJ0bdWqVWg0Gux2O+Hh4Q893pfycqJAA83NzfT29t5XtD09PQENdtntdh555JFJ3Z/D4ZhwNZDL5aKysnLO56eVPu1tFkNNC7eC2BkMBvR6Pc3NzTgcDtra2vwPoW/f+J/29naOHj3KwMDAHdujoqLQ6/VcvHjxnnMm+hFCcP36dTo7Ozlx4oQ/XaXBYHhoRniNRoNer8dgMOBwOLhy5Yp/Ubxer8fpdPKrX/2KmpqaCa9dVVV1Rxn3szE8PJyqqiosFgsff/wxo6Ojfhv1ev28mJdWatrbLIaa9m7Gxsa4fv06cXFxXLhwgfT0dKSUfPjhh2i1WlauXInNZqOnpwe32011dTVnz56lqKjIf86OHTswmUycPXsWnU7nX6d77do10tPTEULQ1tbGxo0bOXPmDNHR0QwPD6NWq2lpaZmwZp2s3T09PTQ2NqJWq1m3bh1Wq5XGxkbi4uI4d+4cmzZtoqKigoKCAhwOB+Xl5Xg8HvLy8hgaGmJoaAiLxcIjjzzCyZMnUalUOBwORkdH0Wg0fgH7wuPMF5Sa9jaLTbRCCNLS0qipqSEjI4OMjAxGR0fp6OigoaGBwcFBTp06xblz5wgPDycyMpK+vj76+/spLy+nra2NtWvXkpiYyMDAACaTifr6ej744ANsNhv9/f2UlZVhtVp59NFHGRsb80d39LVqAqm1UlNTyczM9AeTs1qtRERE4Ha7GRgYoL29ncuXLxMXF0dhYSE2m42WlhbsdjvvvPMOfX19DAwMUFlZycjICFqtlo0bNxIWFobRaPT7os+HmvVulJp2EZOSksK6deuIiIigt7cXs9nM1q1b2bZtGzExMf6ojOHh4YyMjFBSUkJXVxcpKSmkp6dTXl5ObGws+/btQ61WYzabWb16NWlpaQwODhIbG0tCQgIGg4Hs7Gx6e3vJz8/HYDDQ1NREdnb2tOyOjo5my5YtjI2NMTY2hkajITIykoGBATIzM4mJiWHjxo3k5OTQ0dFBc3Mza9asQaPRMDg4SEZGBgkJCbS1tZGWlkZ0dDTLly8nLi6OuLg4tFotBQUFjI2NTdj/n2sU0S5itFotGzZsAGDHjh3+7RMN+OTm5gKwYsUK/7a9e/cCE0c+3LNnzwO3ZWRkTNPqW8HKJ8pDNL5M3wvBZ/dEx4xv9vqC3JWWlk5o73xCEe0iw+PxzMslag9L6u31euc88bfT6ZwXXShFtIuIyMhIvF7vtEORjkdKSX19PXl5eUHp9zkcjjtq8fEIIUhOTg7Ibo/HQ0NDQ0BZ6aWUJCcnBxwyNlAU0S4i4uLi+L3f+72glOVyuXjzzTd5/vnnJzVHOxnuJyYhBF/84hcDKttms/HWW2/x8ssvB/ySmWs3SEW0i4j7uQkGUpZvNHgmCYbdvjJ8OYBCmdC2XkFhEaKIVmFa+AZk5rqpuBhRRKswLebDKOpiRRGtQkAoNe3so4hWYVooNe3coYhWISCUmnb2UUSrMC2Ugai5QxGtwpTp7+/n008/pbq6mgsXLmC32+fapAcyPDzM6dOnqa6u5uTJk4yNjc21SQGhOFcoTJmenh5+//d/n4GBAc6dO8f7778/1yY9ELPZzB/90R/R2trKJ598wkcffYTBYJhrs6aNUtMqTJn8/Hzy8/NxuVw89thj6PX6uTbpgaSkpFBSUoLL5WLLli3Ex8fPtUkBoYhWYcqEhYXx9NNPk5SUxJNPPjnX5jwUrVbLgQMHiIqK4tlnnw35frjSPF5kWK1Went7A56yyc3NZd26deh0OpqbmwMqy2AwkJSU9EAxSSkZGhpiZGRkWtfIyspi9erVxMXFTdveuLg4YmJipnVuMFFEu8ioqqri+PHjJCcnB1SO2+1mx44dnD17NqCay+12Y7PZ+PrXv45Wq73vcVJK3n33XcbGxoiIiJjydbxeL48//jjl5eXTstcXgvXFF1+c8rnBRhHtIsPr9VJQUEBhYSFSSnp7ewkPD0cIQVRUFEII2tvbqaqqYvfu3TQ1NZGcnExcXNwdD7uUkqqqKgoLCycMiToej8dDTU0NeXl5dHV1kZGR4Q9R6nA4KCsrm5TtvgBuUVFReL1empqaGBsbw2Aw4HQ6ycnJuUfQUkquXLnCunXr/NvuJ1pfHKn09HSGh4f9MagA+vr6GBwcnJSdM40i2kWMx+PhrbfeIjIykuzsbAoKClCpVKSlpVFRUUFfXx8XL14kKiqKrVu30tbWxvLly2loaCAuLo6enh6ioqKIiIjAaDRis9lobW3FYDCQl5dHU1OTfzqooaEBh8NBbW0t/f39bN68OWD7LRaLP3ib3W5ndHSU6OhopJSkp6dTW1tLQUEBbW1tpKamIoQgISGB3t5e2trayM3NJSoqitraWoxGI729vdhsNpqamjCbzWg0GtLT0wO2M9gool3kJCYm0t/fD0BlZSUqlYrt27cDv+lHRkREMDw8zKlTp+jq6iI8PJycnBxGRkY4e/Ys69ev58SJEzgcDiIjI2lsbORLX/oSFRUVrFixgvr6elwuFxaLhdjYWEZHRwO2W6VSkZubS39/PyqVihs3bpCVlUVHRwfl5eUsW7aMvLw8wsPDqa+vJz4+HpVKxalTp4BbYVhra2vZs2cP7e3txMXFYTKZEELgdDoxGo2YzeaA7ZwJAh49FkKohRBXhRDv3f4/RwhxUQjRIIR4Qwihu7097Pb/Dbf3Zwd6bYXAEEKQnp7OU0895Y9HPDg46F8w3tra6g/gPTg4SFRUFNHR0QwNDdHZ2UlqaipZWVnodDrUajV6vR6LxUJ8fDxCCLKzs8nJycFut6NWq0lPT6erq2vaURjvRqVSYTQaiYiIICIiApVKxeDgIAkJCcTHx3Pz5k3MZjMrV65ESklsbCwqlQq9Xo/L5SIyMtIfeTEzM5OBgQEMBgOJiYmYTKZ5F+/YRzBq2j8CagBfeLz/DvyDlPIXQojvAr8D/Mvt38NSyqVCiJduH3c4CNdftAQaq1mlUrFlyxaEEGRkZFBcXOzfvn//fgC2bNmCSqVCCMHmzZv9MYvvjiaRlZXlt0dKiVqtJjExESEEhw4d8keNyM7ODlosYYPBwKZNmwDYsGEDarWaTZs2IaX026lSqXj88cf95xQWFiKE8N+DSqUiOTkZIQS/9Vu/5e/Dbt68eV7GPIYAa1ohRAbwJPCvt/8XwG7gyO1DfgwcvP33M7f/5/b+PSLUJ8xCHN9D6/utVqtRq9X+MDJqtRqtVusPKaPRaPy/fdt8P77/1Wo1Go3mjrJ9x/v+DmbIG9/1tVrtHdcfb+9Edo63abydvuOCaWewCbSm/Ufg/wtE3v4/HjBJKX3ZmTsAX08+HWgHkFK6hRAjt48fGF+gEOKrwFcBlixZEqB5CxtfbTeVaP0qlYqmpiasVuu0run1evF4PA+cnpkKbrd70r7LHo+HqqqqaU35SClxu93TtntkZISUlJRpnRtspi1aIcQBoE9KWS6E2BUsg6SU3wO+B1BSUqIs2nwIWq0Wl8s1adGuWLGCqKioaTera2pqsNlsd0yhBEpUVNRDs9kJIXjiiSemPe3icDg4efIkjz322LQDuyUlJU3rvGATSE27FXhaCPEEEM6tPu0/ATFCCM3t2jYD6Lx9fCeQCXQIITRANDA/Jr5CGJ1Oh9PpnHQYU6PRSFFR0bSu5XA4uHDhAi+88AKRkZGz2nz0xT6erlOI1Wrl+vXrFBUVzdu+6mSZdp9WSvkfpZQZUsps4CXghJTyC8BJ4IXbh30ZeOf23+/e/p/b+09IJfxBwERERGCz2Wb8OlJKqqurWbJkyawLVuFOZmLBwJ8DfyqEaOBWn/UHt7f/AIi/vf1Pgf8wA9dedOj1+mn3T6eC0+nk6tWrbNq0SRHsHBMU5wop5Sng1O2/m4CNExxjBw4F43oKv2E2alopJZWVleTm5mI0Gmf0WgoPR1maF+KEh4fPeOQIh8PBtWvX2LBhg1LLzgMU0YY4M908llJSXl5OYWFhSEd7WEgoog1xoqOjMZlMM1a+1WqlpqaGkpKSGbuGwtRQRBvixMTEYDKZZiQOsZSSy5cvs3LlSv9SOoW5RxFtiGM0GrFarTMiWovFQmNjI2vWrAnpvqwvIbXH48HlcoV8oHVFtCGOEIKwsLCgD0ZJKblw4YI/pEwo09bWxje/+U3+7d/+jT/90z/1L0UMVZT1tCGOEILw8HBsNltQoyKazWba2trYsWNHSNeyALGxsVy5coUrV64QHh5OZGTkw0+axyg17QLAYDBgsViCVp6Uks8//5yNGzeGfC0LEBkZyVNPPYVarebgwYNBy1w/VyiiDXF8IVQGBgYefvAkGR4epre3l6KiopCvZeE364Pz8/PZvXt3yN+T0jxeAKSmpnL9+vWglCWl5OzZs2zevPmhK29mEyklJ06coLq6elqrdGw2G5mZmXzyySccP358yud7PB42bNjAli1bpnxusJk/34rCtImPj2doaAgpZcC1yMDAAMPDwxQUFATJuuAgpaSzs5NVq1YRFRX18BPuOlcIwfbt2x/a3L/fZ9jf309HR8eUrjtTKKJdAGi1WqSUuFyugPqgXq+Xzz//nNLS0mmvOZ1ptFotWq0Wr9dLWVkZg4ODxMTEYLfbWbt27T3BxKWUfPbZZ+zcufOhC+AtFgt1dXWkpqbS3d3NqlWr/K2N+dTqmD+WKEwbX6gVh8MxbdFKKenr68Nms5GTkzPv+32+9bWtra0AmEwmqqur0Wq1/hjI165do6SkhP7+furr64FbeX06OjpoamqioKCAxMRELl++TGxsLF1dXeh0Ojo6OvB4PMTFxQUtCF0wUUS7ABBCEBsby+DgYEDTGZ999hmlpaUhsUjcd88xMTGo1WrsdjsOh4ORkRGuXLlCZ2cnGzduJCYmhsbGRhITE9HpdJw+fRqdTofD4aCjo4P9+/ej0Wjwer0YjUbsdrs/nI7T6Zzr25yQ+dkGUpgSQggyMzNpb2+ftrePr3YJpbhcGo2GxMREYmJikFISHx+Px+Nh2bJl5Ofnc+nSJYaGhtixYwd6vZ7ExEQyMzPJyspCr9eTm5tLREQE2dnZZGVl0dPTQ0JCAjk5OTgcDjIyMub6FidEqWkXCElJSf4m4FTxer2cOXOGHTt2zNu+7ETo9XrWrFkD4I9ZtWrVKv/+tWvXAnfGdlq6dOk95SQmJgLwyiuv+Lf5yg02Ho8n4JaMItoFgtFoZGxsbEqRGX20trbO2xQY45FS4nA45iTzvMPhCIrPstVqnVY0yfEool0g6PV6PB4PDodjSu6Mbrebs2fP8uijj86gdYEjhCAxMZHKyspp1VQej4fu7m7S09OnNcjmcrmCEoFyZGRkylNWd6OIdoGgUqlITU2lq6trwibgREgpaWlpmVR+2PnA/v37/ZkPporNZuOtt97ilVdemdMuwNDQELGxsQGVETodGIWHkp6eTmdn58MPvI3H4+HcuXNs27Zt3vdlfYHZp/sTrLICpb+/39+Hni7z+5tSmBLJycn09vZO6lgpJQ0NDcTExJCQkDDDlinArc+8q6sr4MReimgXEPHx8YyMjOByuR56rMfj4fz582zdunXeN4sXCmNjY0gpA14aqIh2AaFSqYiNjZ3Uip+amhpSUlKIi4ubBcsUgKDUsqCIdsHh85t9EE6nk8uXL1NaWqrUsrOElJIbN26wfPnygD9zRbQLCCEEOTk5tLS03HdO0ffwZGZmBjz1oDB5rFYrJpMpKJn3FNEuMHxZzO/Xr/Wl9/Alk1aYeaSU1NXVkZOTE5TVQopoFxgajQaDwcDIyMg9+6SUVFRUkJeXpwQen0U8Hg+VlZVBi2qpiHYBEhcXx+nTpzl27Ji/mVxRUUF1dTXXrl1j/fr1i6qWtVgs3Lhxg/b2dq5fvz6rbpBSShobG4mPjyc6OjooZSqiXUBIKTl9+jTf/va3+fKXv8w//dM/IaVESsmPf/xj9uzZw/nz5xkeHg752L9Tob+/n5dffpm//uu/5ktf+tKErZCZwuv1cunSpaBmG1REu8AoLi725/eJjo5GCIHT6aSzs5Pe3l5OnjxJc3PzXJs5q6Snp7Nu3TpcLhelpaUBeyRNFikltbW1xMTEBPWaimgXEEII4uPj+Zu/+RtSUlL8WdNtNhv19fXk5eXxve99j23bti2q5rFWq+Wpp54iMjKSgwcPztq9OxwOLl68GPTY0cqCgXmIw+Ggu7sbr9c7rfOTk5M5dOgQarWa5uZmBgcHcbvd/NVf/RW5ubm0tLRMuUy9Xk9ycvKsi93j8dDZ2Ynb7Q6onOzsbFasWEFiYmLALQ2dTkdaWtoD/bV92Qbz8vKCPrWmiHYe0tDQwJEjR0hLS5t2GQUFBZjNZo4fP47NZuO3fuu3sNlsnDhxYsplSSkZHBzk3//7fz/rAc6Ghob43ve+R1ZWVkDleL1e9u/fT1lZWcAvntbWVr71rW/dd12slJKBgQHq6up45ZVXgv6iU0Q7D3G5XOTl5fmjJ0gpsdvt/sj4PT09xMbG+jPAq9VqtFot4eHhdzwgvmBtRqPxoVM8Ukq6u7uJi4vD4/EQFhbmF6iUkk8//XROBq+8Xi8pKSls3rzZ3z8XQuB2u/33297ezvDwMCtXrmRsbAwhBHq9/h6xjI6O4nQ6J7VAYmBgAJ1Oh1ar9X++vvJGR0cf+Fl4PB4+/vhjdu/ePSPZBhXRhgAWi4Uf/ehHvPrqq3i9Xq5fv05hYSGdnZ1otVouX75MXFwc27dvR6vVEhcX509/2dLSQnJyMi6Xi+joaGw2G3a7HZfLRWxsLCaTibCwMLxeL1VVVeTm5nLmzBny8/PZvn37XN/6PXz++ec4HA66urp46aWX0Gg0REVFcfXqVQoLCzly5AgajcYfMjUpKQmLxYLNZmNsbIyhoSHUajVRUVE4nU7cbjdms5mEhATMZjNSSsLDw6mpqSExMZGysjJiYmLYv3//pBbf+xKXJScnz1i8LUW0IUBrayt6vZ729nZqamowmUwMDAzgdDpZvnw5IyMjaDQaKisrqa2t5YUXXuDMmTOsWbMGt9vN8ePHSUpKIiIiwi/eCxcusHPnTm7cuMHmzZuprKzEYrEQHR1NQkICfX19c33bE6JWq2lra0OtVlNWVkZ0dDQRERFoNBqklP6cRo2NjVy5coWDBw/e0bf8/PPP6enpQafTYbPZyMvL45NPPqG0tJSamho2bNhAa2sr/f396HQ6dDodVqt1UrGdpJR0dHTQ2NjIyy+/PGP9f2X0OARoaGjAZDJRW1sL3FrN41uUrVKpiIuLQwhBc3OzP5SolBKv14tGoyEuLg6VSkVjYyMej4f29nacTid2u51Vq1b5awSVSkVMTExQoivMFGFhYezZs4eUlBQ8Hg9VVVV+sfb392M0GomMjKS+vh6n0+kXsdfr9cdK1mg0tLS0YDab6e3txeFwYDabyc/P9+cvEkIQFRWFw+EgLCxsUoK1WCx89NFHPPnkkw8NjB4ISk0bAuzdu5cDBw7gcDj8iZF9OWkjIyPJyMjwByz3eDxER0cTFxeH1+slNzeXNWvW+GP4+uL5Op1OjEYjQgh0Oh2PPfYYHo+HqKgoEhMTMRqNc3zXE7NmzRq0Wi1paWkIIRgbGyMqKorU1FS0Wi0HDhzwN3HtdjvR0dHk5uZis9mIjo4mPT0dl8vF1q1bUavVCCFYtWoVBoMBlUqFSqVi586d/uN95T4ssofL5eL9999n27ZtxMfHz+gouyLaEMA3iHT3yK1PWL752PHcnR5j/IDIRKkex7vYxcfHT9vWmcZnuy94nW8E15dZYXxQO99ic51Od8dL6O77v3sU2GAw+D/zuz/HifB6vZw4cYK0tDSWLVs249NiimjnIUIIf1N2uvjcF1Uq1R1/T7es0dHRadsSCEIIOjo6uHLlSsBleb3eoMTCGt/fl1Jy8eJFHA4Hjz766KzE2lJEOw8pKCjg0KFD03auAKirq8NsNrN+/XoGBgYoKytj37590y5v165dc5KEKi4ujldffTVg5wqHw8Gnn37K/v37AxbWli1bCA8PR0pJVVUVLS0tPP/887P2+SiinYdERESwfPnyaZ8vpeTmzZs89thjpKam4na7aWlpISMjY1LNvfmERqOhsLAw4HKsVivV1dWsWLEiKLmKfGtkr169yqFDh2Z04OlulNHjBYjD4cBisfj7phqNhjVr1lBWVraoVvfMFFJKmpqaOHv2LM8++ywRERGz6t6piHYB0tfXR2xs7B1v/6KiIpqbm+dtJrhQwbc+9uTJk7zwwgtERkbOuj+2ItoFxv0CiIWFhZGTk+Of61WYOr4m8ZkzZzh06BBRUVFzslpKEe0Cw+v10tfXN2GoznXr1lFZWRnQqPRixTfodOHCBZ5//vk5DYqniHaBMTw8jE6nm3CBQExMDBqNZt66KM5XfNEnrl+/zqFDh+akSTweRbQLCN+o8f0m+IUQrF+/nqtXryoDUpPE5XJx/PhxOjs7eeGFF2Z90GkiFNEuMFpbW8nOzp5wnxCCJUuW+P1tFe6PlBKbzcavf/1rhBA89dRT6HS6ORcsKKJdUJjNZv+Su/sRFhZGamrqAwOaLzTuvs+H3beUkuHhYX7+85+TnZ3N7t27Z3Ue9mEool1AtLa2kpWV9cDawOcgf+3atVm0bG5paWnhG9/4Bt/97nf5/d///Qn79B6Ph76+PrxeL42NjRw5coQ9e/awdu3aeZcGVPGIWiD4Iv+VlpY+9Njk5GTGxsawWCwBZ3ALBWJjY7l69SoVFRWEh4ffc89SSs6dO8d//s//md/7vd/D4XDw4osv+qNZzjfm1ytEYdrY7XbMZjNJSUkPPVatVlNQUMDNmzdnwbK5JzIykieffBK1Ws0zzzxzz6qe/v5+vv3tb3Py5El++MMf8uSTTxITEzMvBQuKaBcMfX19xMfHT9qvdtmyZdTV1c37fq1vhVIgPyqViv3795OTk8OePXvuKNflcvGP//iPXLhwgaioKFwuFz09PZMqd65QmscLACkl1dXVU1pkEB8fj9vtZmRkZF4vIhgbG+Po0aMB9yutVisFBQVcvnyZq1ev+rd3dXXx4Ycf8vjjj7N8+XLy8vIoLy+nvLz8vmWpVKpZXyQwHkW0CwC3201fXx87d+6cUpMuJyeH5uZm1q5dO4PWBcbY2Bi9vb3+aIx341uyJ4TA4/H4F8PfjZTSn7VuvNgyMzPZvHnzPS8Fl8uFSqWasOXy2Wef4XK5FNEqTJ+RkRHCwsImjEhxP4QQrFixgo8++ojVq1fPuxHS8URERPj7mF6vl1OnTrF+/XqioqKoq6tDq9ViNBppaWmht7cXtVo9YfjS9vZ2BgYGKC4uRgjxwCZudXU1BoOB+vp64uPjWbt2rf+lMRNhUaeCItoQR0pJTU0NhYWFUxZeTEwMHo+HsbGxkBlFHh0dpaamhoSEBJxOJ1euXCErK4vOzk5ycnIYGBhACMGlS5fo6uri8ccf5+rVqzidTpYuXUp9fT3d3d0sXboUk8lEeHi4P19veXk5KSkpaDQaf3bB4eFhTCYTK1eunDdztfP39aowadra2qYVY1elUpGamkpnZ+cMWDUztLe3YzKZqKqqoq+vj8zMTLq6uvzz02q1Gp1OR2dnJw0NDbS2tjI4OMjWrVsRQtDQ0IBWq+Xjjz/m6tWrjI6O0tbWRl1dHTExMWzevJnBwUEyMjL8oXrm2yiyUtOGOGNjY7hcrmkNJgkhKCoqoqysbFYCkgWDyMhIvvCFLzA6Ooper6etrY3S0lLa29v9qUM8Hg9xcXEkJyeTlpbmjwldVFTE4cOHsVgsxMbGEh4ejk6nY8OGDeTm5qLRaNBoNBQUFNDZ2Ul2djZOp9O/0GK+MH8sUZgWDQ0N5OTkTLtPGh8fz8jICB6PZ149mPfjbr/qpUuXAremsODW4NrdjJ+7nijSZG5u7h3/FxQUUFBQAMDmzZsDsncmmP/fksJ98UVRCOTBCg8PJywsDJPJNKkcN3OBx+MJOLBbMAkk4F4wUEQbwjgcDkwmEykpKdMuQwhBXl6ef5R0vjWRw8PDcbvdnD9/ftLnOJ1O6urqWLp0qX9E3ePx0NHRwZIlSwK+R19SrrlCEW0I09XVRXJycsDTNenp6VMSxWwSFRXFH/7hH076eJPJxLvvvssrr7xCSUmJ/7Ox2WwcPXqUl156KWDBCSHmtCuhiDZE8S0Q8PXlAiEhIYHh4eF5OVLqS1vyMKSUNDc3c/z4cfbs2UNOTs4d9+J2u9FoNOh0ujmtJYOBMuUTorjdbnp7e8nMzAxKc89oNDI4OBgk62YXj8fDxYsXOXPmDC+88MI9gl1oKDVtiGIymdDr9ZOqhR6GEILU1FR6enpITEwMgnWzh81m49ixY6hUKg4fPjwlr7BQRalpQxBfLKj8/Pyg1SgpKSn09PQEpazZQErJ4OAgb7zxBqmpqTzxxBOLQrCgiDYkkVLS1tYW1GZgWloa3d3d836pHty6/4aGBt566y127959x4DTYkBpHocgVqsVt9sdVH9hg8GAy+XC5XIFpck9U/imf9ra2jh8+PAdKToXC4vn9bRAkFJSX19Pbm5uUEdBhRAYDAZ/5vT5hpQSq9XKO++8g9Vq5dChQ4tSsKCINiRpbGwkLy8v6OXOV9FKKenv7+fnP/85S5cuZe/evfO6NTDTKM3jEMPpdGIymSYVC2qqxMbGYjKZprViKNh4vV7/mtebN29y7tw59u/fT1pa2oKezpkMimhDjPb2dv/KlWAihCA+Pn5ejCB7vV5+9rOfkZeXh91up7+/n5deegmj0TjXps0LlOZxiFFfX+9f2RJsEhIS5oWDxY0bN/iLv/gLvvKVrzA0NMRzzz2nCHYcimhDCJfL5V/nORPo9XqsVuucTvuMjY3xt3/7t7S1tdHS0sLFixfnzJb5itI8DiEGBweJjo6esbAnKpXKH4dpMiPTXq+XkZGRoIlcSsm7777r9x9+4okneOSRRxgdHb1vQrHo6OhFNUcLimhDioaGhhlrGgP+h9/j8UxKtIODg/yv//W/JsyFOx18OXR+93d/l9jYWFQqFadPn77v8W1tbXz7299+YO6ihUhAohVCxAD/ChQDEvgKUAu8AWQDLcCLUsphcetV+U/AE4AVeFVKeSWQ6y8mPB4Pzc3NPPPMMzM2eiqEeGiUwvF4vV4yMzPZsWPHA8+72163243Var0nMbPv/PHHOxwOPB7PHVkBfPs//fTTkPDgCjaB1rT/BHwkpXxBCKED9MB/Ao5LKb8jhPgPwH8A/hzYD+Tf/tkE/Mvt3wqTYGxsDGDCZNHBwieG6Qrhxo0bfPjhh6xevRqr1cro6Cj79+/3N+mllLjdbsbGxrh27Rpbt25FrVb7I/b7Qt54PB6klKjVajo6OhgbG6OzsxOv18tjjz02b6IizhXTFq0QIhrYAbwKIKV0Ak4hxDPArtuH/Rg4xS3RPgP8RN56Ii4IIWKEEKlSyu5pW79IkFLS1NREdnb2jPffplLT3k1xcTHNzc1s27aNX/3qV1itVmpqaqipqWHPnj20tbVhs9lYt24d9fX1WCwW4uPjcTqdJCcnc+rUKUpKSmhoaCA+Pp7IyEiuXbtGYWEhJpMJrVbL2NjYvM6IMBsE8gTkAP3AD4UQV4UQ/yqEMADJ44TYAyTf/jsdaB93fsftbfOCQB7W2aC+vj6oq3omIphlh4WFER8fT319Pf39/bS3tzMyMkJRUREajQar1YpWq6WiooKqqipGR0cZGBigqamJuLg4du3ahcViISkpye/9JKUMieBzM00gn4AGWAf8oZTyohDin7jVFPYjpZRCiCkpQQjxVeCrwKx65qjVajwez6xdbyq4XC5GR0cnjCQ439iwYQNhYWGsXr2akZERUlNT6e/vJyMjAykl7e3tJCcn8/LLL+N0Olm2bBkajQaDwYBGoyE5Odkfg2ndunW0tbWRlZVFSkoKXq93RrsHoUIgou0AOqSUvom0I9wSba+v2SuESAV8GXw7gcxx52fc3nYHUsrvAd8DKCkpmbWqbz6LtrW1dUa8oGYCX5C5/Px8/7b09N80qHyL7CcS3905hWJjY/0jw4u9STyeaT8FUsoeIUS7EGKZlLIW2ANU3/75MvCd27/fuX3Ku8A3hBC/4NYA1Mh86s+GhYXhcDjm2owJqa+vp6CgYN753AohGBoaora29o7tTqcTYMad+oeGhma0/PlKoK/uPwR+dnvkuAn4bW71k98UQvwO0Aq8ePvYD7g13dPArSmf3w7w2kHF5w003/B4PHR1dfnzqs4nYmJi2LdvHy6XC7vdTn19PdXV1SQlJVFSUjLj+YEyMzNDJgdRMAlItFLKCqBkgl33PGG3R42/Hsj1ZhK9Xs/Y2Ni8i0jY29tLTEzMvFuK5vuc4uLiuHbtGkNDQ+Tn5/Pss8+SkJDgn/NVCD7zv5M0S+h0Otxu97wT7UytnZ0OvrnUwcFBKioq6O7uJi4ujrVr15KRkRESfe6FgPIp38bnd+t2u+dNrebxeGhqauL555+f0xeJz72wtraW2tpawsPDWb16Ndu3byciImJeveQWA4pob6NWq1GpVPMqRpLZbEaj0aDX62f92lJK7HY7dXV1XL9+Ha/XS0FBAc8//zwGg0Fp/s4himhvo1arUavVOJ3OeTMX2NraGpTcM5PF52bY2NhIXV0dw8PD5ObmsnfvXv9UjSLUuUcR7W2EEMTExDA8PDwvVo14vV5qa2t55JFHZlwobreb/v5+KioqOHfuHJGRkZSUlJCamrro/XznI4pox5GWlkZXV9c9+UrnAqfTidVqnbGIg75+ak1NDXV1dRgMBlasWMHmzZvZt2/fnDTJFSaHItpxJCUl0djYOOcjyFJKOjo6SEpKCmpN5+un3rx5k+rqarxeL8uWLePQoUMYDAY8Hg8VFRVKE3ieo4h2HEajEZvNNunIDTOBx+PBbrdTU1PDihUrAhaQlBKn00l7ezuVlZWYTCby8vKUfmoIo4h2HHq93r/oeq5E29nZyVe+8hUiIyP5/d//fTIyMqY1mu12u+nr6+PatWt0dXWRlJTEpk2b/A75CqGLItpxqFQqYmNj6e/vv8PJfTbxeDyUlZUxOjqK3W5n48aN9xWtlBKbzUZHR4ffQd9kMnH9+nXq6+uJioqiuLiYXbt2ERYWptSoCwRFtOMQQpCVlUVLS8ucBcVWq9WEhYWRnp7OX/3VX913IMoXdf9b3/qWP1ZSZ2cnDoeD5cuXc/jwYf9gkiLWhYUi2rtITU3l7Nmzc3Z9jUZDdHQ0f/qnf8rGjRsnFJyUkpqaGv7sz/6MTz75hIiICG7evMmzzz5LfHy8ItIFjiLau0hMTGR4eNgfr+h+eL1ejh07Rm9vb1BFYjabWbp0KSqVip/97GcTHmOxWDh69Ci1tbWkpqaiUqkoLy+fMD+r2+3m4MGD82LuWSE4KKK9C41G4+/XPig0qNfrpbq6+r65UcdHFpwoyuCDyl2+fLn/hTHROV6vl+Li4nu2T2RHWVkZQ0NDimgXEIpoJyA9PZ3Ozs6HxvNVq9VERUX5R5qHhoYwm81kZWXR1NSETqcjNTWV8vJy1q5de8eorZSSxsZGuru7Wbdu3T2uk06nk7KyMkpLSx9q7/DwMJ2dnURFRTEyMsKKFSv8AlacJBYeimgnIC8vj5MnT7J+/fopNX0rKytpa2ujtLSUsrIy1q9fz9mzZ+ns7ESv19PR0UF4eDgOh4O4uDj6+vowm814vV7MZjMbN27E6XRy48YNSkpK6OnpoaKigoSEBH84nMrKSlatWsXg4CBDQ0MUFRVx4cIFYmNjuXnzJkIIkpOTZySrnsL8QBHtBMTHx2O1WrHZbJOuqdxuNw0NDTidTsrLy/1+zL6VOn19ffT29jIwMEBcXByjo6OEhYVhs9kwmUw0NzcjpUSlUlFSUoJer6e6utofRtRisfDII4/Q2trK8PAwUVFRlJaWcvPmTaKiovB4PHi9XsLCwnC5XDP8CSnMJYsrCcokUalUJCQk0NfX9/CDb2Oz2UhNTWXJkiVERUVhMplISUnB6XQSERFBUlIS+fn5/p/MzEyio6P9Gdjj4+NJTk4mOzubM2fOYLVa2bNnDwaDwX9ueHg4kZGRZGRkkJWVRXR0NBkZGfT09JCYmOifW05ISJipj0ZhHqDUtBMghKCwsJCbN2+SlZU1qSZyZGQkBw4cuGd7VlbWA8/buXPnPdtWrFgBwJYtW+7Zl5OTc8f/2dnZM5ZFT2F+otS09yEtLY3e3l7cbvdDj/WltZivPwoLC6WmvQ8RERGEh4czPDw84aCOEAKdTsdHH3007Wu4XC56e3tJT0+fMYeI0dHRO5JXKYQ+imjvg0qlYsWKFVRVVbF79+57RKVSqfja17427fKdTifvvPMOe/bsYdOmTYoXk8KkUUT7AJYsWUJ5efmE62sDEZnT6eS9994jIyODTZs2LbqkyAqBoTwtDyAyMhKdTsfAwEDQynS5XLz//vskJyezefNmRbAKU0Z5Yh6AEILly5dz/fr1gMvyBU374IMPiI+Pp7S0VBGswrRQnpqHUFBQQHNz86RGkR+Ex+Phgw8+ICoqim3btimCVZg2ypPzEMLDw4mNjaWz854Ef5PG7Xbz0UcfYTQa2bFjhyJYhYBQnp5JsGrVKqqqqqY85+lLo/HJJ58QHh7Orl275iyMjcLCQRHtQ/BFs+jr68Nms03pXK/XyyeffIJareaRRx5RaliFoKA8RZNAo9GQnZ1NfX39pGtbj8fDp59+ihCCRx99VKlhFYKGItpJIIRg9erVlJeXc/78+fsuJPB4PP5ojidPnsTj8bB3715FsApBRXGumAQej4czZ87wz//8z7S0tPD2229P6NpYXV3N//t//489e/ag1WrZt2+f0iRWCDqKaCdJW1sbdXV1OJ1OTCbTPfs9Hg//9m//xr/8y79w48YNXn/9ddRqteKeqBB0lGpgEqjVar761a/yzW9+E51Ox9DQ0D3HNDY2cuTIEVQqFQMDA9TW1s6BpQqLgZCsaR0OB6Ojo7O+7OxLX/oStbW1tLW13dGv9Xq9/PCHP0Sj0fDnf/7nHDp0iOTkZPr7+2fUHpVKRUxMjJKBfZERkt/2lStXOHnyJDExMbN+7fXr1zM0NMSRI0f829xuN1arla997WtERkbOWtzkgYEBXnnlFZYuXTor11OYH4SkaH1R9JcuXYqUEofDgUqlQgiBRqO5ox/p9XrxeDwPzF/jS1LlSyh99+CRx+Oho6ODrKys+4ZD3b17t78sX2REtVqNXq+/51iLxYLVap1U8LWhoSHUajU6nQ6VSoVOp/OXV15ersSDWoSEpGjH43a7+elPf0p8fDyZmZnk5OQghCAqKoqBgQGklDQ1NbF161bsdjsjIyPExMQwODhIbGwsHo8Hs9lMRUUF3d3dlJaWEhcXh1arJTo6mqGhIaSUXL9+3R+9X6/XMzo6yujoKAkJCahUKoaHhzEYDNjtdsrLy+nt7SUiIoL9+/fjdDqJjo5GSonFYsHpdNLV1YXBYEAIQVhYGCMjI9jtdpKSknA6nYyMjKDX67l58yYxMTFcvXqVyMhInnjiCaU5vMgJ+W9fSklYWBiNjY1kZGTw9ttvU1RURFhYGJ988gm5ubkkJSUhpeTSpUt0dXWRmJjIzZs3OXjwIDdu3PD3j4eHh3E6nVy6dIn29nYOHDjAuXPnWLNmDbW1tSQmJmKxWPB6vVgsFrq6usjKyiI/P5+ysjI2bdrEpUuXsFgsjI2N4XA4sNlsfPrpp+j1etRqNdnZ2SQkJHDu3DliY2NpbGz0twaamprYvXu3P36y2+3GbDazbNkyNBoNdrsdt9utiHaRE/Kjx744vz4nhsTERLq6ugD8tWVdXd0dyZIjIiLQaDT09/eTkJDgr5FTUlIYHh6mra2NsbEx3G63P85SYmKiP3RLY2MjZrOZ0dFRRkZGUKvVrF+/nuTkZODWaLPRaCQmJoa+vj76+/sZGRkBbjXXhRCkpKQwMjKC0+mko6PDnyVvdHQUvV7Pli1b/DVxZGQkLpcLrVarOGoohH5Nq9Fo2LZtG1qtFo/HQ1paGnq9Hr1eT3p6OuHh4axcudIv4JGRESIjI8nKyiIxMRGr1UpmZiY6nY6xsTESEhIoLi5GCEF0dLS/b/rcc88BtwRZXFyMSqXCYrGg1+sJDw9HSolWq+Xxxx8H8MchjouLIzMzk7CwMNRqNVarlZiYGJ577jncbjcrVqzw95OtViuRkZEIIdBqtWzfvp2xsTFiYmJIT09Ho9EotaxC6IvW18eEWzVrSkqKf5/v7/GjzL7BH1+M4PG5X31pJePj4/3bxv/tw5fo6u5UHgBxcXH3bBs/4GQ0Gu97L3fv87187r4HhcVNSIpWSklbWxsej2dWr+nLADARvmbvbHpAdXd3z9q1FOYPISnalStX3jc7+kzR0NBAZ2cnO3bsmFCY169fx2QyzWoYmbS0NDIzM2flWgrzh5AUbUJCwqylvpBS0traSm1tLd/4xjf8fc672bhxIx9++CFOp1NZ7K4wo4T86PFMIqWkp6eHjz/+mIMHD95XsHCrP/3EE09gt9s5ceIEHo9Hie6vMCMoor0PUkoGBwd57733ePbZZ4mNjX1of1WtVrNv3z7cbjeffvopXq9XEa5C0FFEOwFSSkZGRnj77bfZt28fiYmJkxpgEkKgVqvZu3cvUko++eSTWR0sU1gcKKKdAKvVyjvvvMOuXbvIyMiY8oiwRqPxO3t8/PHHAYdfVVAYjyLau3A4HLz11luUlJSQl5c37SkctVrNo48+SlhYGB9++KHfu0pBIVAU0Y7D6XTy9ttvs2LFCpYvXx7wnKsQgt27d2MwGPjggw+UprJCUFBEexuXy8WHH35IRkYG69atC4qThBAClUrFrl27iIqK4r333sPpdAbBWoXFzKIWrc/Lyev1cvz4cYxGI5s3bw66V5NKpWLHjh3Ex8fz61//GqfTqSR9Vpg2i1a0vjWyra2tnD59GrfbPaNOESqViq1bt5KSksKvf/1rbDYbFRUVSpNZYcosWtG6XC7++q//mmeeeYaqqioef/zxGfdiUqlUbNmyhcTERL75zW9y6NAhampqlNpWYUosStFKKamsrOTkyZNUVVVx5MiRCSMszhStra389Kc/pampiR/84AdKbaswJUJGtOP7gMH4+elPf4rD4eCpp57ir//6r4mPj5/UeYHaB7cSen35y18mJSWFI0eOUFtbG/T7U/rMC5eQWTBgMpk4ceJEUFbQjI6OcubMGb7whS+wc+dOTCYTv/71rx94jl6v55FHHrnv6qLa2lpu3Lgxaft27dpFWloav/rVr/iHf/gHnnjiiaAOgHm9XgoKCiguLg5amQrzg5ARbX9/P21tbaxatSrgsgwGA//9v//3SQtMSkl5eTmlpaX3Fe21a9eAqS1W37hxIxs3bpyRuE8mk4nKykpFtAuQkBGtL/yLLw7T8PAwg4ODaLVavF4vsbGxREdH31Nb1dXVkZmZSURExEOvMTQ0hMViQa1Wk5CQQFhYGHCr1nrY+SqViri4OH8QOV/8p4n8lq1WKz09PaSkpPgjU4zn2rVrrFy58qE1r8vlorGxkfT0dCwWCykpKf5zNBqNskh+gRIyor0bq9XK+++/T1FRES0tLcTGxrJ7927a29spLCzEarXS2dlJd3c3YWFhGI1GIiIicLvddHR0oNPpyM7Opquri7GxMVJTU7l27RoDAwNYrVby8vLYunXrtGxzu92cOHHCL0qXy+UP36pWq7FYLFRUVJCRkUFRURFms5ns7GwaGxtJTU2lubmZ5ORkhBDExMQwNjZGZ2cncXFxJCQk0NraikqlYmxsjLa2NmJjYzlx4gSHDh2a9eAACrNPyIo2PT2dvLw8CgoKqKioICIiApPJxKefforNZqO3t5eioiKsVisnT55ky5YtVFZW4nA4MBqN1NXV8aUvfYlz585RUlJCeXk5IyMjmEwm0tLSGB4enrZtarWawsJCent7GRwcpLm5mba2NsxmMxqNhuXLlzMwMEBKSgqdnZ2cPHmS4uJijEYjS5Ysobu7mwsXLiClZGxsjIKCAhobGxkaGuL555+nrKyM3NxcOjs7/cHjJopNpbAwCZnR44nwhUKNjIxEp9PR19fnr2kiIyNpbGz0R15Uq9X+1Ta+iIxSSn+Ac6PRyOjoKBkZGXR3dwcUxkVKyejoKJ2dnahUKlpbW7Hb7SxZsoSMjAzCwsL8QeR6enowGo3o9Xp6enoYHR1lyZIlREZG+u01Go0MDw8TERGBEIL8/HxycnKwWCzodDp6e3uprq5WmsOLhJCtaQF27tyJSqXi1VdfRaPRoFKpWLt2rT8FiMvluiNNSFZWFkII/+J0rVZLcnIyKpWKkpISVq9ejUajeWgakYehUqlYv349a9euRafT8dJLL/n9kH34wrr63Cg1Go3fXl/f1BcsTqVSkZub6097Eh8fj0ql4vDhw6hUKlQqFb//+7+vNI0XCSElWl8kfh9CCKSUdzys48V2t/B8ovF5PvnmML1e7x3HazQapJT+a002AsV4+3xi8gnybnw2jxfz/eyVUvrLGG/z+HJ9g2ZTtVkh9AgZ0YaFhdHd3c2ZM2emdf7AwACxsbHTclX0CfhB50ZFRVFRUeGPiTxdfKlEAo1z7HA4KCgoCKgMhflJyIg2IyODP/uzP5t27fHaa69x+PDhaTchVSrVAwX5yCOPTHu0eTytra3U19fz6KOPBlyW0lxemISMaFUq1YRzmpPBJ/To6OgZi0ms1WoD6gf7iIiIQKfTTZgiU0EBQnz0eLJ4vV5//tr5jkqlUvqiCg9kUYjWNyobCvgG1xQU7seiEW2o9O98I84KCvdj0Yg2GP3N2UCpaRUexqIQrdPpDBnRKjWtwsNYFKJValqFhYQi2nmGUtMqPIwFLVopJd3d3TQ3N2OxWLBarfO6FrNarfT19dHf3097e7uSTkRhQkJjHiQAjhw5wne+8x1UKhVVVVX81V/91bydr7169Spf/epXGRwc5JNPPuHHP/4xkZGRc22WwjxjwYt29erVmM1mPB4Pu3fvnrUs7dNh+fLlhIWF0dfXR2FhIUajca5NUpiHLGjRCiFYsWIFGRkZJCcnU1JSMtcmPZDo6Gieeuop6urqOHDgwLxtESjMLQGJVgjxJ8DvAhKoAn4bSAV+AcQD5cAXpZROIUQY8BNgPTAIHJZStjzsGna7HZfLNW0bfQHC16xZg9frxWw2T6sctVrtX4R+N263G7vdHpT+8qOPPsqJEyfIysqatq3jEUKg1+vndQtDYWpMW7RCiHTg/wMsl1LahBBvAi8BTwD/IKX8hRDiu8DvAP9y+/ewlHKpEOIl4L8Dhx92nR/96Ef+MC3TJSYmhpGREf71X/91WudLKbFarfzJn/wJBoPhnv3Xrl3j3Xff9UejCASXy8WaNWt44403glLT9vT08PWvf50lS5YEXJbC/CDQ5rEGiBBCuAA90A3sBl65vf/HwH/mlmifuf03wBHgn4UQQj6kevJ6vezcudMfDdF3+PgHWkp53wd8dHTUn6PnYSJwOBx4PJ57alSv18vp06fvOxXjcDhYsWIFhYWF9+yz2Wyo1eoJ3SgnuhffdiklZrN5Ui8Cl8uF0+nEYDDc81mcP38+oJaKwvxj2m0mKWUn8HdAG7fEOsKt5rBJSumbq+gA0m//nQ603z7Xffv4+LvLFUJ8VQhRJoQo6+/v921DCMHg4CC//vWveeeddxgeHsbtdjMwMMD3v/99jh8/jtfr9T+gUkrcbjdXrlxhbGxsvN243e47Ijz4Ej53dXVx8eJFXnvtNRoaGvzXnUyN5zvO6/Vy6dIl2traqK+vx+Vy+cXpdrv9wvd6vZw6dYof/ehHdHR0+I/zLbh3uVxcuHDBH4HC9+M7brzdg4OD3Lhxg88++4y3334bh8MxJdsVQotAmsex3Ko9cwAT8EtgX6AGSSm/B3wPoKSk5I5a2Ol00t3dzdDQEEajkZqaGoxGI3a7nb6+Ptra2jh27BiPPPIInZ2dWCwWvF4vZ8+eJSsri7GxMRwOB4ODg1gsFp566ik++eQTf8zkyspKVCoVqamp1NXVkZ+fP2X7VSoVVquV+vp6li9fzvHjx9mwYQNOp5NPPvmEsLAw1q9fj8lkYmBgAIfDQUNDAx9++CGbN2/GbrfT2dnJ9u3b6ejo4NixYxiNRoQQJCQk8Pnnn1NUVMTAwAAqlYolS5Zw6dIlsrKyGB4eRqfTMTg4SHp6+sONVQhJAhmdeBRollL2SyldwFFgKxAjhPC9DDKAztt/dwKZALf3R3NrQGpKZGVlUVRUBEBqaipCCOLi4vB4PHR3d9PX10drayvDw8OsWLECIQQmkwmDwUBFRQVXrlxhYGCA5uZmzGYzMTEx7Nq1C7PZTHJyMuHh4YyNjU17wb0QgvT0dJqamoiPjycmJoa+vj56enoYHh4mLCyMzz//nObmZgwGA8nJyTQ1NTEwMEBjYyODg4MUFRURHh7udwa5efMmZWVl2Gw2+vv7qaurIyIigj179jA2NkZycrI/sJ3b7Q6ZFU0K0yOQPm0bsFkIoQdswB6gDDgJvMCtEeQvA+/cPv7d2/+fv73/xMP6s3cTGxvL5s2b/QHUXC4XLpeL7u5ukpKSiI6OJjw8nMzMTKSUtLe3s27dOn9/eN++ff6a0OVyER8fz9q1a9FoNKxbt47W1lZSU1Pp7u4mLy9v2h9MVlYWe/fuRaVS+WvJNWvWoNPpSEhIQKVSodPpcDgc9PT0sGnTJnp6ekhLSyM8PJyGhgYAXnzxRbxeL0VFRWg0GqKjo9m7dy+JiYn+CBfFxcU0NDSQkZGBy+VidHSU+Ph7eh0KC4hpi1ZKeVEIcQS4AriBq9xq1r4P/EII8f+7ve0Ht0/5AfCaEKIBGOLWSPOUiIiImDA9R3Z2tv/vhIQE/99JSUl3HDdRQG9fjRobG0tsbCyAP/XIdAkLCyMnJwe4la/Hx4YNG+451mf7+Oas77iJnCvWr19/x/+RkZGsXbvW/39aWtr0DVcICQIaPZZS/hXwV3dtbgI2TnCsHTg01Wt4PB66urr8IUKnw/1GaSeL1+vFarU+8JiBgQE6OjqmVf7dPGg0fKoMDQ0pg1ELjHnvEbVjxw5/+ovpUl9f7282T5etW7feNxrj0qVLMZlMQVudc/r0abZt2xaUzPQrV668p8WhENrMe9GuXr2a1atXB1TGlStXcDqdbN68OUhW3UliYiL79+8PWnm9vb08+uijk8r0p7D4WBS+bXq9ft4vyxuPVqtVluUp3JdFIVqdTofT6ZxrMyaNL6+PgsJEKKKdh2i1WkW0CvdlUYg2LCwMh8Mx12ZMGkW0Cg9CEe08ROnTKjwIRbTzEKVPq/AgFoVodTpdSIlAaR4rPIhFIVohBFqtFofDERLhSX2i9S3HU1AYz7x3rggUu93OL37xCz766CPq6up45ZVXAnbWmCm8Xi8fffQR77xza43FE088wVNPPaW4ISrcwYIXrRCC48eP89ZbbxETE8Orr7461ybdFyEE7e3t/PCHP0RKSUlJiSJYhXtY8M1jnU7HF7/4RSIiIigqKprXq2CEEOzbt4+0tDRiYmLYuXOnIlqFe1gUNe2WLVvYtGkTK1eunPfBvzMyMti/fz+tra0BLXBQWLiEjGg9Hg/nz5+f1lIzKSUFBQVERETw/vvvT2vpm6+5+qCaenBwkM8//zzgcKXJyclYrVaOHTsWUE0rpSQ2NpZt27YpNfYCIqREe+HCBYqLi6f1AD755JP+UeTp0NbWRkNDwwNF29raSltbGwUFBfc9ZjIvjA0bNrBu3bqA1hD7OHv2LKWlpUFZ5qcwPwgZ0cItJ4m0tDRUKhV2u51Lly75Q4cKIVi7du09D7rFYqG2tpZ169YBD14I393dzdjYGEII3G43+fn5/lrTarVO6mURHx/vj0LR2NiI0WjEarX6I1RcunQJKSXr1q3j0qVLJCYm3nEduCXs6upqoqKi7ojEMRFSSiorK8nKyqKpqYmsrCz/Ob5yFBYWISXa8Wi1WsLCwrDb7bS0tCCEID4+nvb2dpKSksjJyeHChQvExcXR1dXlD/6WmJhIS0sL7e3tbNy4EZfLRVVVFSkpKf5YSy0tLWg0GtLS0gLqA587dw4hBGFhYXi9XsLCwsjKyqKurg6bzUZ9fT1tbW0MDg76m99Xr171O4NYrVZ/zd3X14fFYmFoaIji4mIqKyvJyMjAbDZz8+ZNAJqamuju7ubAgQPB+pgV5iEhK1q1Wk18fDwajYahoSFsNht2u52enh4qKyt59NFH0ev1ZGRk8O6775KXl0drayvHjx8nNjaWrq4u4FZEx5SUFHp7e4mOjvb7/Ppq20BISEhgaGiI8PBwamtrMRgMZGRk+B0mfGt8BwYGqKqqIikpif7+fh599FHKysq4evUqy5cv54033iA2Npb4+HjOnj3rz7aQlJREbW0ter0ep9NJWFhYSK1mUpgeISta+E1QtujoaJxOJ3q9HiEEeXl5pKSkcOLECcLCwti3b58/RKparfb7IiclJREXF4fRaESn01FRUcGGDRuw2+243e6AR5pTUlLYunUrjY2NDA8P09fXR1FRESaTCbPZjF6vJyoqCo/HQ35+PlFRUcCtSBtJSUns2rWLoaEh0tPTSUpKYnh4mLy8PGJiYoiIiCAqKsq/7HDJkiU0NjaycuXKgD9XhflNSIvWNyi0dOlS/7bxoU8PH35oqiA/iYmJLF++HGBaQconwhclcXy0RIBnnnkGgN/+7d++55ynnnpqStfYt+838eFffvnlqZqoEIIseOcKBYWFRsjUtEIIPB4Pn3zyScBzji6Xi6GhIZKSkiZdlsVi4YknnnjgMREREdTU1NDd3R2QfRPh9Xppa2sjNTV10lNBUkolxeUCJGREq9Fo+PrXv+5PnBUIbrebN998k02bNk06k8Bk5niXLVvGt7/97RlZmSOlpKqqivLycvbu3UtGRsakzlOr1YpwFxghI1qfaKbrHDEeKSXPPfccR44cISUlhbi4uKB4DKlUqqA4RNyPDRs2kJ2dzQcffEB+fj4bN24MyuehEFosylewEILo6Gj27NnDBx98EDLTJEIIEhMTeeWVV7DZbLz55puYTCZlze0iY1GKFm4JIDs7m2XLlvHJJ5+ExOJ4uGW3Tqdjz549bNmyhTfeeIPq6uqQsV8hcBataOGWANavX4/X66W8vDykaiwhBDk5OXzhC1+gpqaG999/H5vNFlL3oDA9FrVo4dZAzb59+7hx4wYtLS0h9dALITAajRw8eJC0tDRef/11Ojo6QuoeFKbOohct3Foo/9RTT3Hs2DFGR0dD7qH35dc9cOAAJ06c4Ny5c7jd7pC7D4XJoYgW/Nnkd+/ezbvvvhuSkRCFECQlJfHyyy8zNjbGL3/5S0ZGRhThLkAU0d7G57OcnZ3N8ePHQ3JgxzdItXfvXkpKSjhy5Ag3b94MyXtRuD+KaMchhKC0tBSr1UpFRQVutzuksu35EEKwdOlSDh8+TFVVFR999BE2m+2B57hcLjo6OhgcHKStrQ273T5L1ipMFUW0d6FWq3niiScoKyvjb/7mb/jOd74TcqKF3wxSPf/88yQmJvL666/T1dVFe3s7V65cueeeTCYTL7/8Mn/5l3/JM888Q3Nz8xxZrvAwFNFOgM1m49y5c/zt3/4tb731FoODg3Nt0rQQQqBWqykpKeHJJ5/k17/+Nb/3e7/HV7/61XtGmePj4ykqKmJkZIT09HRyc3Pn0HKFB6GIdgJUKhVJSUlER0fT0NBAWVnZXJsUEEIIkpOTcbvdHDt2jKtXr/K3f/u3d3iCCSE4ePAgBoOBZ599Fp1ON4cWKzyIkPE9DgZSSlwu10PdFjUaDf/xP/5Htm7dyn/9r/+VI0eOsHXr1qA43ut0uqALQkqJzWZ74ICT1+slPT2dV155hbNnz/LLX/6S0tJSDh486Pe7XrVqFatXr2bz5s2MjY0FZJMQgoiICGWxwgywqEQLcPToUTo6OiblaC+l5KmnnqKpqYnvfe97ATvnOxwO8vPzefbZZwMqZ6Jy//7v/x6j0fjQhQ+rV69m6dKltLa2cvLkSfr7+/2RGr1eL6tWreL06dOcPXs2IJsGBwf5oz/6I+Lj4wMqR+FeFp1obTYb27dvx2AwIKVkbGwMr9eLWq3G6/ViMBjuqR08Hg+dnZ0sWbLkoeVLKf2B5ADCw8P9QhoeHqatrS3o9+T1etHr9ezevRuVSoWUEqfTiUajQa1WYzabcTqdREZGMjAwQGxsrP+cu0W+bds2TCYTqampD73u6Ogobrcbg8EA3GpF+Mo7depUUJZRKtzLohPteNxuNydPnqSpqYno6Gg8Hg/btm0jLi4OrVZLVFQUw8PDeL1eqqqq/LWGXq/HYrFgNpuJi4tDpVJhMpkwGAzY7XYqKipwOBzY7XYOHDjgj/00W3i9Xo4ePcqaNWvIzc2lqakJj8dDTEwMzc3NDAwMYLfb2bFjB+Hh4SQmJuJ0OhkdHUWlUlFXV0dERARGoxGPx4Pb7WZkZISEhASsVisulwuj0Uh9fT0ajYba2lo0Gg0HDhxQ+sKzwKIWrVarZdOmTURHRzM8PExDQwMOh4OLFy/S3t7OgQMHOHv2LGvWrKGuro7ExEQsFgtSSsxmM11dXWRnZ7N06VIuX77Mpk2buHz5MqOjo/7AayaTadZFa7VacTqddHR0MDAwQHNzMxqNBq/XS2xsLGNjY5jNZtrb2/2L6puamkhMTCQ7O5uysjLMZjMajQaHw0FhYSEffvgh69ato7W1laKiIkZHR+no6CA3NxePx4MQArvdroh2Flj0owRCCFQqFTqdjtTUVH8Tdmxs7A53xvj4eL8YGxoaGB0dZXR0lOHhYf+0SkpKCnArqHpYWBhWq3VOcgd1dHTQ09PDzZs38Xg8qFQqhBD+e9Xr9cTHx1NXV4fdbr/D3dHnDqnRaGhvb2dgYIDh4WGsVivDw8NkZmaydu1af1m+pjGgCHaWWNQ1LUBsbCzr16/H5XJhNptJSEhgxYoV/oXyUVFRqNVqnn/+eaSUaDQaiouLUalUmM1mDAYD4eHhSCnRarU8/vjjwK0H2OFwEBMTM+v3lJOTw5/8yZ/gcDjQarWMjo5iMBiwWCwYjUYcDgcOh4PIyEjGxsaIiopCSsno6ChxcXG88MIL/gUHKpUKjUZDdnY2RqMRlUqFSqVi8+bNmM1moqOj/XO6Mxm1I1Cmk79pvrLoRavRaNBoNP7mLNz58E2UliM8PBzgjlrGh28ACsBoNAbb3Enhs89X891t73i7x9voiyM9Eb5WhI+IiAgiIiKA+S1WH16v19/aCHUWnWjdbjednZ3+B24myler1RM+HKOjozM2omqxWOjo6JjVh9LXl51oLtZkMs2aHZPBN0OwEFh0ot2yZQutra0z4k/s9Xq5du0a3d3drFixgoyMjDseFKPRyOrVq4N+XZ1Ox9atW3E4HLPqJz0wMMCNGzfweDwUFxeTmJjov9/169fPq1zALpfrvi/TUGNRiVYIQXFxMcXFxTN2jf3799Pd3c2lS5cYHh5m3bp15Ofn3zGHGWw0Gg179uyZkbIfhJQSh8NBa2srFRUVmEwmiouLKSgomHAOeC5xuVz+HEihzsK4i3mEWq0mIyODtLQ0TCYTFy9e5OLFiyxfvpw1a9bc4WwR6gghCA8PZ9myZeTn5zM6OkpFRQVvvPEGaWlprFmzhuTkZP+xc4nZbCYyMnLO7QgGYj4vOyspKZGh7qwvpfSvz62uriYrK4tNmzb5B70WwkM0HiklHo+HxsZGrl69isfjYcOGDeTm5s5p8/TcuXPo9XrWrFkzJ9efKkKIcillyYT7FNHODj7XwtraWsrLy4mJiaG0tJSEhIQFM0ByN16vl76+PsrKyhgcHGTFihUUFxcTFhY2q+KVUvLuu+9SUlLiT/g933mQaJXm8SzhSy69atUqioqKaG9v58SJE0gp2bRpE9nZ2QtOvCqVipSUFJ588klGR0cpLy/nZz/7GcuXL2fVqlWz1u+VUmKxWGbdM22mUEQ7B2i1WnJycsjOzqa3t5eLFy9y+vRp1q9fz/Lly9FoNAuq2exzVHnkkUfYsmULFRUVvP766/7UJhERETN6v263G4/Hs2A8thTRzhG+if6UlBSeeeYZTCYTZWVlXLp0yT9oNd9GYAPFt8Z28+bNrFu3joqKCn7+85+zbNky1q9fP2ODdHa7HbVavWBEq/Rp5xG+pYLXr1/n+vXrLFmyhJKSEmJjYxeUeH1IKbHb7Vy9epUbN26wZs0aVq1aFXQPK1/60d27dwe13JlEGYgKMXyDVvX19ZSXlxMZGcnmzZv9a1wXmoB9I+wXL16kpaWFLVu2sGzZsqC4HUop+fjjj1m6dClLly4NksUzjzIQFWL4Bq1WrFhBUVERra2tfPbZZ3g8HjZt2kROTo5/5c741TmhihACg8HAI488wsjICCdPnqSsrIy9e/eSnJwc8L0NDg6yefPmIFk79yiincf4oinm5uaSk5NDb28vly5d4syZM6xbt45ly5ahVqspKytj/fr1Id9nE0IQExPDwYMH6ejo4NixYyQlJbF9+/ZpD1b5ghEslJFjUNbThgy+QaunnnqKZ599loGBAV577TVee+01Xn75Zf7v//2/CybAuBCCjIwMXnrpJRISEvj5z39OdXX1tBZbtLe3k5aWtqACzC2cO1kk+Gqj3bt38/LLL/PRRx/R1tbGt7/9bf7xH/9xQQlXq9Wybt06Dh06RG1tLW+//faU8xM1NTWRk5Mzg5bOPkrzOASwWCycP38et9t9x/aRkRGqq6uJjY3F5XLxP/7H/8BsNrN169YZ7+NGRUWxcePGgCNUPgwhBFFRUTz77LPU1tby5ptvsnHjRlauXPnQ2tPj8dDR0cGOHTtm1MbZRhFtCNDb20tlZeU9q5NiYmL4u7/7O3/N4/s9001BKSWff/45q1atmnHR+hBCsGzZMjIy/v/tnXtwlMe5p5+eq0ZCGg26gSR0QSBAwmBAYIOxDRgIgZPg6zGOk5Pspip14pyqs+Wt2o1zqs7J7lbW67NVx7tJJangnNjZwpc4ju0QxzbmYoPBBiSbOwJdQBISGkkIaUYaaTS33j80MxYgISTN5fugn6qp+aa/Vs87qvlNd7/d/b6F7NmzhwsXLrBhwwbS0tLG/IHq7u6ORha5nVCi1QkOh4PCwkKklPT29mKxWAgGg9GTK1euXOHo0aNs2LCB9vZ2pk2bNur6bmNjI3l5eeNG1ZBS0tDQQEFBAS6XC4fDEf3yh0IhGhoa4vZZxyKSn+ib3/wmZ8+e5c0332TdunUUFxePKtyLFy/edkNjUKLVJW+//TaZmZmYzWbuu+8+jEZjNM5Tb28vu3btIiMjgw0bNtDS0sLcuXPp6OjAaDTS3t4OgM/ni3qb29raMBgMzJ49G6fTicvlwmazcebMGXw+HzU1NRQXF7NmzZokfuqvMBgMVFZWMnPmTP76179SWlrKqlWrrtm7HQwGOX/+PFu3btX1cthoKEeUDsnKyoqK8OjRo5w8eRKPxxMdFvf09DA4OIjb7Wbfvn3R/D2RYHP79u2ju7ubDz74gHfeeYfa2lr+9Kc/4XQ6OXjwIIODg9TU1ODxeHC5XGRlZWkufEwkEfi2bdsYHBzk7bffviYtqcvlwmg0aip6RqxQotUhDoeDrVu34nA4GBwcpLm5ORo18fz582RkZGCxWKIpP8xmMz6fj46ODtLT0ykrKyMUCkUjLrrdbqZNm0YoFKKwsJD58+cjpSQUCjFjxgw6OjrIz89P9se+gUgS7fXr11NeXs4f//hHrly5gpSSM2fOMH/+/NuulwU1PNYlkWRgeXl5hEIhpJRYLBbWrl1LMBhk6dKlGI3GaLhXs9kcXeOMhFyRUkZDn0YEGon9bDQaefjhh6NhYZ9++umEOZwmg8FgYNGiRUyfPp133nmHtWvXcuHCBR555BElWkXyiAgLvvIOR2IQR+5HXo8UWGTz/fUe5bG+zJFcQCPjKUXmvpH31+J+9ciGjCeffJIdO3bg8XiSFsI23ijR6gCLxUJraysDAwNxfR8pJVevXsVgMESDtI9Vb3BwUHO7jCJrukVFRZw5c4ajR4+yYsUKzdk5VZRodUB+fj7PPvvsTfPPxoru7m4+/vhjvF4vVVVVlJWVjTo0NplMNw1uniwic/dnnnmGPXv2sH//fh588MHbSrjqaJ7iBkKhEFevXuXIkSM4nU4qKytZvHixLiJJnjp1CqfTyfr16wkEAuzatYvU1FQefPBBXYXzudnRvNvn50cRMwwGA1lZWWzevJknn3wSv9/Pa6+9xp49e+jt7Y06v7RGKBTixIkT0QRhZrOZTZs24fF4+PTTTxMyUkkESrSKUYkcQJ82bRqrV6/m6aefJjc3lz//+c+8++67XL58WXNJozs7OzEYDNfkUzKZTGzatCkag1qLPzYTRc1pFeMSCUq+aNEiKisruXTpEgcPHsTv91NVVcWcOXOSnnJDSklNTQ1VVVU32GE2m/n617/O22+/zbRp01i4cKHmh/k3Q/W0iltGCBFNe/nEE0/wta99jYaGBn7/+99TXV2N1+tNWk/W399PV1cXpaWlowrSarWydetWqquraWlp0XWPq0SrmDCRTHk5OTls2bKFJ554gqGhIXbs2BGd9yZSFFJKjh07xsKFC2+aryc1NZWHH36Yjz76iJ6eHt0KV4lWMSUia6OrV6/m29/+Nnl5eezcuZN3332Xtra2hDh/fD4fdXV1VFRUjDvsdTgcPPTQQ7z//vv4fL642xYP1JxWERMi89677rqLiooKWlpaosHoli9fzpw5c6LB6GKJlJKTJ08yZ86cW1o3FkJQWlpKW1sbn3zyCRs3btTd/Fb1tIqYEpn3lpaWsm3bNjZu3EhDQwOvvPIKNTU1MZv3Sinp6Oigt7eXEydOjOqAupmNK1eu5OrVqzQ2NupumKxEq4gLkSWj3NxcNm/ezOOPP87AwACvvvoq+/btu2ZOGQgEbgilMx5SSn7961+zefNmnE7nhHvLyFLQJ598EvftobFmXNEKIX4nhOgUQpweUTZdCLFbCFEffnaEy4UQ4udCiAYhxEkhxNIRf/PdcP16IcR34/NxFFokksvngQce4OmnnyY7Ozu63tvW1sYHH3zAr371K4aGhm65zWAwyIkTJzh8+DC/+MUv+PzzzydsV2ZmJkuXLuXAgQP62nghpbzpA3gAWAqcHlH2r8CPw9c/Bl4IX28GPgAEcC9wJFw+HbgQfnaErx3jvfeyZcuk4vYjFArJQCAgGxoa5MsvvyyrqqpkWlqa/OlPfyoHBgZkKBQat42BgQG5ePFimZ6eLn/5y19Kn883KVv8fr/csWOHvHTp0i29b6IAauQYuhjXESWlPCCEKLmueCuwJnz9e+AT4L+Gy/9f+E0PCyEyhRAzw3V3SymvAgghdgObgNdv/edFoWeCweAN3tr8/HyKioq4cOECAwMDvPDCC/T19fGTn/wEm8120/Y6OzsBeP755/nOd74zqSG2yWTCZDKxbt069uzZw7e+9S1d7E+erPc4T0rZHr52Annh6wLg0oh6reGyscpvQAjxA+AHAEVFRZM0T6E1Dh8+zIEDB0hLS7um3OVy8dhjj+H1evH5fLS2tvLiiy+SnZ1903mqy+Vi5cqV+Hw+Xn755Qnb4/P5KCgoYNu2beTl5eFwOKirq9NFtIspL/lIKaUQImbuNynldmA7DJ/yiVW7iuTi8XiorKykuLgY+OogvRCCUChEf38/6enp+P1+/H5/dPlmNAH19vZit9vHvB9hZLsRIvXdbjf19fXA8AGJVatWsXPnTsrLyzXf207We9wRHvYSfu4Ml7cBs0bUKwyXjVWuuIMwGo3RR1tbGx999BEGgwG/309NTQ1CCLq6uti1axcvvfQSp06dAr6KyhEKhRBC8Nlnn0V3ZUXWfkdG9YjM/SKRJBsbG3njjTfo6em5xoaRgnc4HNjtdpqbm5Pyv5kIk+1pdwLfBf5X+PnPI8r/QQjxBnAP4JJStgshdgH/M+JlBjYCz03ebIWekeGYyk6nMypSg8HAyZMno+INBAI4nU5CoRDHjx9n2bJl1NbWct9999HX18fBgwdJTU3FarXS3d2N0+kkNTWV+++/n7/85S/MmDEDp9OJ1WolGAySlZXFhQsXyMnJGdUmIQTLli2jurp6zP3LWmFc0QohXmfYkZQthGgF/oVhsb4phPg+0Az8bbj6+wx7kBuAAeA/AEgprwoh/gdQHa733yNOKcWdh8/n49SpU/T399PY2EhGRgZDQ0P09vYyY8YMurq6yMvLw+Px0NraSltbG3a7nVmzZpGVlYXP56O3t5fc3Fz27duHEIJgMIjX6+Wee+6hpKSE4uJivF4vgUAAq9VKT0/PuDumCgoK2Lt3r+bjS92K9/ipMW49NEpdCfxojHZ+B/xuQtYpbksMBgOPP/44VqsVo9FIWloa6enpOBwOLl68SEVFBe3t7ZSVleHz+SgpKaGgoIDm5mY8Hg8bN27EbDZjsVjYsmULZrOZK1euYLPZyMzMZOHChWRkZOByucjMzIy2O2/evHHtmjlzJpcvX6a8vDxB/42Jo/YeKxKO2WymsLAw+nrkofUlS5YAUFZWFi2LOK+ys7NvaCvikCoo+GoxIpK+ZPHixdGykddjEckXdPbsWebOnavZIbISrSIhhEKhWxqiJor+/v5R13Vzc3M5ePAgoVBIs15kJVpFQpg3bx59fX10d3dPqZ2BgQHOnDnD8uXLp9SOlDLaq48kEifa5/ONu8EjWSjRKhJCSUkJJSUlU26nv78fg8HAY489NnWjuHGdN7IcpESruOOJ1fzQZDJFcxDFI5ZxJLFXd3d3dL6sNdTRPIWuiIg2njgcDs1lCRyJEq1CV5hMpriHbk1NTdX0GVslWoWuEEJgNBrj2tvabDYGBwfj1v5UUaJV6A6r1TqhA/MTJbLPWaso0Sp0R7xFK6XU7MYKUKJV6JCUlBS8Xm/c2g8Gg5rdWAFKtAodEu+ednBwULNrtKBEq9ARwWCQ/fv3s3v3bn7zm9+wd+/euIQ/7e/v1/cpH4VCKwghOHToED//+c8RQrBw4cK4vI/b7SY/Pz8ubccC1dMqdIPBYOCRRx7B4XCQkZHBypUr45KxoLe3F4fDMX7lJKF6WoWuKCsrY+PGjXR2dl5zHC9WhEIhAoEAFosl5m3HCiVaRcKRUlJbWzvpEz8VFRWYTCa++OKLSdswZ84cZs6ceUO53+8fji18k+x7yUa7liluW0KhEB9++CGzZs2a1NLK7NmzKS4upqura1Lv39XVhdvtZsuWLTfc83q9mEwmzGbzpNpOBEq0iqRgtVqZO3cuJpOJoaGhaFLq9PR0QqEQS5YsiUagiDA4OMjp06epqqoCbn5yKBgM8sUXX5CVlYXf76e8vDx6Kshms43pdXY6neTl5Y16Tyso0SqSjslkIiUlBbfbTVNTE1JKHA4HHR0dOBwO5s6dy+HDh3E4HLS3t9PU1EQoFCI3N5empiZaWlqoqqqKpr2srKyksbGRpqYmjEYjVquVGTNmkJmZeVM7pJQ0Nzcze/ZsTe+IUqJVJB2j0UhOTg6hUAiPx0NfXx9erxen00lNTQ0mkwmj0cisWbPYuXMnpaWlXL58mb1792K32+no6MDn8zF79uxovKn+/v5o/GODwXBLBwyklHR3d085Kka8UaJVaIKUlBTsdjt2u52BgQHS0tIIhUKUlZWRk5PD6dOnSUtLY8OGDXg8nugQNrI7Ki8vD7vdjs1mi7aRlZWF1WrF5/Pd0oH2QCDAwMAAGRkZ8f64U0KJVqEJ8vLyyM3NvSZ06Zw5c4DhuesTTzwxofauD0dzK8PdtrY28vLyNO05BiVahYa4mbASMce8ePEipaWlcX+fqaJEq0gKUkoOHDgw5ThPbrebtLS0CS0d9fX1sWLFihvsiTi0tI4SrSLhGAwGvve978XkeN2XX37JwMAAq1evvuW/EULccCCgp6cHs9l8TYY9raJEq0g4EdHE4iTNgw8+yKuvvoqUcszkWrfCxYsXYxLiNRGoAwMKXWO1Wlm3bh27d++edMC3UCjEuXPnqKio0PT6bAQlWoXuKS4uJj09nbNnz07qfK3b7UZKqfmlnghKtIrbgrVr13L06FE8Hs+E/k5Kyblz55gzZ05cgp/HA31YqVDcBCEEaWlpLFu2jAMHDkyotw2FQtTV1bFgwQJdDI1BiVZxmxCJZNHb28ulS5duWbidnZ2kpKTowmscQYlWcdtgMplYv349e/fuxe/3j1tfSkl1dTVLly7VzdAYlGgVtxk5OTmUlpZSXV09bm/r8Xi4cuVKNGm1XlCiVdxWCCFYuXIl58+fp7u7G5fLdcNSkN/v5/Lly5w4cYIFCxZo+sD7aCjRKm47LBYLq1at4mc/+xlPPfUUra2t19y/fPkyjz76KC+88AIWi0XTKUBGQ+2IUtx2+Hw+tm/fzvbt2/H7/TQ1NV0zBO7o6OD8+fNUV1djNpt55ZVXlCNKoUgmZrOZhx9+mIULFxIMBvn888+vud/e3o7X62XFihU8//zzmg5MPhqqp1VokkjUicly3333sWPHDp599llOnjyJz+eLnpPt7u5m8eLF/Pa3v6WsrGzK7yWESOgar4hHWoVYUVVVJWtqapJthiIJOJ1OXnrppRuCu00Ul8vF+fPnWbJkSdThdOHCBYxG45S9xlJKUlJS+OEPfxhzZ5YQ4gsp5ajnBFVPq9AkXq+XGTNmcO+99456PxgMYjAYEEIQCARuGm3i+tSVUkqCweAtR6iIOKquX8sNBAJ8+umnCXdkqTmtQtNEhp6BQICDBw9GN00cO3YMt9vN0NAQhw8fvqauEIKenh7eeustamtrrykXQmAwGDh06BA+n++Ge6M9mpubOX78OB9++CE9PT0JHw5fj+ppFbqgvb2d6upqysvLOXfuHPX19eTm5vLRRx/R29tLW1sbNpuN/Px86urqSElJob+/n9raWiwWC8eOHeOhhx6ioaGBrq4u/H4/J06cID09ndTUVPr7+7l48SIpKSmsXLmS/fv3Y7PZyMrKoqamBoPBQE5ODidPnmTNmjVJ/V+onlaheSIncQKBAKdOncLtdpOdnY3T6YwGIw8EAgwODtLX14fBYODq1asYDAZ8Ph9Xr16lsbGRuro6mpubWbNmDV6vl/r6eqxWKx9++CEff/wxbrebI0eO4HK5AFi9ejVtbW0UFhYCiYlTdSuonlahC3JycvjGN76B3+9nYGAAt9vN7Nmzqa6uZsGCBUyfPj162ic7OxuPx0NzczNlZWVMmzaNu+66i/z8fFJTUzl06BCVlZWkpqaSkpLC4sWLMRqNuFwu8vLySEtLo7y8HJPJRFlZGfX19dx7773U19fHLb3mRFCiVWgeIQRLliwZ9d7mzZvH/LuVK1dGrxcsWABAUVERixYtuqbeaLloI3GSy8vLo2Fd58+fPzHD44QSrUKzeDyeSWfWSwSBQACfz5fw91WiVWgSh8PBjBkzaGtri1mbTU1N2O32mCaMXrBgwaQy/00FJVqFJrHb7Wzbti2mbe7evZvS0tJo5gK9orzHCoXOUKJVKHSGEq1CoTOUaBUKnaFEq1DoDCVahUJnKNEqFDpDiVZxRzAy2IOWAz/cCkq0itueYDDIgQMHOHToELt27eLMmTPJNmlKqB1RijuC9957jxdffBGz2cwbb7yhidM6k0X1tIrbHoPBwKZNm7DZbBQXF7NixYpkmzQlVE+ruO0RQrB06VLmzZvH8uXLycvLS7ZJU0KJVqEZpJRcunSJlpaWuLRdXl5OaWkpn332Wczbj2Tti5zDjSdKtApN8cknnwCQlpYW87Y3b96M0Wiko6Mj5m1H4kstW7Ys5m1fjxKtQlMYjUZKSkqw2+1IKenp6cFisWAwGEhNTb2mbiAQYGho6KYCl1Licrno6+tj9uzZN8QnDgQC1NXVUVFRMa5t/f39XLlyhaysLLxeL9nZ2dG4UYk8DK9Eq9Asfr+f119/HbvdTlFREZmZmZSVldHX14fH4yE9PZ3a2loeeOABmpqasFqtZGVlUVdXR2FhIR0dHWRmZvLll1/S3d3N3XffjdVqxWazkZmZSXNzM0VFRVy4cAG73U5GRgZGo5GhoSFaWlooKSnBZDLR0tJCeno6ra2t+P1+jh8/zuDgIJs2bYrpgfpbRYlWoWmmT5+O0+nEYrHQ0NBAVlYWb731Fl6vl61btyKlpK+vj5qaGoQQLFu2jM8//5y1a9fy6aefsmrVKnw+H93d3QSDQc6dO0dDQwOlpaWUlpYihOD06dMYjUb6+/vxer0sX76cQ4cO0dDQQGVlJfX19dGQrOnp6fh8Pmw2G/39/UkRrVryUWgWIQT5+fls2bIFu92Oz+djYGCAgoICZs6cic1m4/Tp0xw5ciSaZSAlJSU6JE5JSaGzsxObzYbdbmdoaIjOzk5SU1NJS0ujpaUFv98fDRnj9/sRQhAMBvH7/RgMBqxWK5WVlRQUFOB2u0lLSyM9PZ3BwcGkCBZULh+FhpBS8tprr10zp42k/wgEAgQCAVJSUqKislqteL1eDAZDNEVIZHhrsVjw+XzR/LOBQACr1RoVptFoxOfzYbVaowm4IulDIm2YTKZr4j8NDQ1hNpuRUhIKhbBYLNE57alTpygvL4+ZI0rl8lHojkhnEhGN2WyOOpFMJlM0D8/1zikAm812zbPRaIz+rdVqvaHeaIyW+Gu0smR0ekq0Ck0hpeTQoUNTzpY3Gj6fD6PRGJfoiR0dHdHYyvFGiVahKR599FE8Hk9c2t6/fz9FRUWUlpbGpf3MzMy4tHs9SrQKzSCEIDU1ddQhbyyw2+1Mnz6dnJycuLSfKJT3WKHQGUq0CoXOUKJVKHSGEq1CoTOUaBUKnaFEq1DoDCVahUJnjCtaIcTvhBCdQojTI8r+txDinBDipBDiHSFE5oh7zwkhGoQQ54UQXxtRvilc1iCE+HHMP4lCMQZSSnw+H36/n6GhIQKBQLJNmhK30tO+Amy6rmw3sFBKuQioA54DEEJUANuAyvDf/EoIYRRCGIFfAl8HKoCnwnUVirgTCAR47rnneOaZZ3j00Uf5wx/+kGyTpsS4opVSHgCuXlf2kZQy8nN1GCgMX28F3pBSDkkpLwINwIrwo0FKeUFK6QPeCNdVKOKOyWRi3rx5tLW10dXVxV133ZVsk6ZELOa0/xH4IHxdAFwaca81XDZW+Q0IIX4ghKgRQtR0dXXFwDzFnY4QgvXr15OTk8Py5cspLy9PtklTYkp7j4UQ/wQEgFdjYw5IKbcD22H4PG2s2lXcngSDQYLB4Lj1cnJyuP/++1m9ejUGg+GWYjqZTCYMBu35aictWiHE94C/AR6SXx0qbANmjahWGC7jJuUKxaQ5cGgfx+p2Y0m5ubgk4Cjx0jl0nO1v/vO47Q72SZ7Y/PeUFMfnRNBUmJRohRCbgP8CPCilHBhxayfwmhDi34B8YC5wFBDAXCFEKcNi3QZ8ayqGKxQAXT2tlD3UTXb++CFXlzxWBAKEGBi37ulD3bjcvTGwMPaMK1ohxOvAGiBbCNEK/AvD3mIrsDscbuOwlPLvpZRnhBBvAmcZHjb/SEoZDLfzD8AuwAj8Tkqp7yxICk0gBJjMRizW2B5sN5m1NyyOMK5opZRPjVL87zep/zPgZ6OUvw+8PyHrFArFDWj350ShUIyKEq1CoTNUuBnFHUNf7xAGg8A74Mc+PQWTJfYB3hKB6mkVdwRSSk4cHF5lvHzRTUdrf5ItmjxKtIo7AinB4/aRkmpiWqaF3q7BZJs0aZRoFXcEQoAjN5U+1xCubi85BdOSbdKkUXNaxR2BEIK7V+cjgfLFOaSk6verr1/LFYoJYkkJf91jn7wgoSjRKnRNKCjpbh+IeU6d3iteyIxpkzFDiVaha+6afw9nzpsYcoqYtlssDBTMnDV+xSSgRKvQNRULKqlYUJlsMxKKpvPTCiG6AA9wJdm2TIBs9GOvnmyFO8veYinlqEmHNC1aACFEzVjJdbWInuzVk62g7I2g1mkVCp2hRKtQ6Aw9iHZ7sg2YIHqyV0+2grIX0MGcVqFQXIseelqFQjECJVqFQmdoVrRazP0zRl6j6UKI3UKI+vCzI1wuhBA/D9t/UgixNAn2zhJCfCyEOCuEOCOE+Ect2yyESBFCHBVCnAjb+9/C5aVCiCNhu/4ghLCEy63h1w3h+yWJtDdsg1EIcUwI8V6ibNWkaDWc++cVbsxr9GNgr5RyLrA3/BqGbZ8bfvwA+HWCbBxJAPjPUsoK4F7gR+H/o1ZtHgLWSSkXA3cDm4QQ9wIvAC9KKecAPcD3w/W/D/SEy18M10s0/wjUjngdf1ullJp7ACuBXSNePwc8l2y7wraUAKdHvD4PzAxfzwTOh69/Azw1Wr0k2v5nYIMebAZSgS+BexjeVWS6/rvBcEjeleFrU7ieSKCNhQz/6K0D3mM4vnfcbdVkT8sEcv9ogDwpZXv42gnkha819RnCw7ElwBE0bHN4uHkc6GQ4O2Mj0Cu/Svg20qaoveH7LiArgeb+H4aD9ofCr7NIgK1aFa0ukcM/o5pbQxNCTAP+BPwnKaV75D2t2SylDEop72a4F1sBzE+uRaMjhPgboFNK+UWi31uror1ZTiCt0SGEmAkQfu4Ml2viMwghzAwL9lUp5dvhYk3bDCCl7AU+ZniImSmEiJxIG2lT1N7wfTvQnSAT7wO+KYRoYjh16zrg/ybCVq2Ktppw7p+w920bw3mCtMhO4Lvh6+8yPG+MlP9d2CN7L+AaMSRNCGI4Z8u/A7VSyn8bcUuTNgshcoQQmeFrG8Pz71qGxfv4GPZGPsfjwL7wyCHuSCmfk1IWSilLGP5+7pNSPp0QW5PhZLjFSf5mhrPMNwL/lGx7wja9DrQDfobnK99neF6yF6gH9gDTw3UFwx7wRuAUUJUEe1czPPQ9CRwPPzZr1WZgEXAsbO9p4J/D5bMZTuTWAPwRsIbLU8KvG8L3Zyfpe7EGeC9RtqptjAqFztDq8FihUIyBEq1CoTOUaBUKnaFEq1DoDCVahUJnKNEqFDpDiVah0Bn/H3ClUbLY7j/fAAAAAElFTkSuQmCC\n", + "image/svg+xml": "\n\n\n\n\n\n%3\n\n\n\n139978828415600\n\nouter_loss\n ()\n\n\n\n139975938626944\n\nMseLossBackward0\n\n\n\n139975938626944->139978828415600\n\n\n\n\n\n139975938626656\n\nAddmmBackward0\n\n\n\n139975938626656->139975938626944\n\n\n\n\n\n139975938188624\n\nAddBackward0\n\n\n\n139975938188624->139975938626656\n\n\n\n\n\n139975938188096\n\nAddBackward0\n step1.fc.bias\n (1)\n\n\n\n139975938188096->139975938188624\n\n\n\n\n\n139975938188144\n\nAddmmBackward0\n\n\n\n139975938188096->139975938188144\n\n\n\n\n\n139975938187424\n\nAccumulateGrad\n\n\n\n139975938187424->139975938188096\n\n\n\n\n\n139975938188912\n\nAddmmBackward0\n\n\n\n139975938187424->139975938188912\n\n\n\n\n\n139975938634512\n\nstep0.fc.bias\n (1)\n\n\n\n139975938634512->139975938187424\n\n\n\n\n\n139975938187856\n\nMulBackward0\n\n\n\n139975938187856->139975938188096\n\n\n\n\n\n139975938188768\n\nViewBackward0\n\n\n\n139975938188768->139975938187856\n\n\n\n\n\n139975938189200\n\nSumBackward1\n\n\n\n139975938189200->139975938188768\n\n\n\n\n\n139975938189008\n\nMseLossBackwardBackward0\n\n\n\n139975938189008->139975938189200\n\n\n\n\n\n139975938189728\n\nTBackward0\n\n\n\n139975938189008->139975938189728\n\n\n\n\n\n139975938188864\n\nMulBackward0\n\n\n\n139975938188864->139975938189008\n\n\n\n\n\n139975938187952\n\nAccumulateGrad\n\n\n\n139975938187952->139975938188864\n\n\n\n\n\n139975938187712\n\nMulBackward0\n\n\n\n139975938187952->139975938187712\n\n\n\n\n\n139975938635072\n\nmeta_parameter\n ()\n\n\n\n139975938635072->139975938187952\n\n\n\n\n\n139975938188912->139975938189008\n\n\n\n\n\n139975938188480\n\nTBackward0\n\n\n\n139975938188480->139975938188912\n\n\n\n\n\n139975938188384\n\nAccumulateGrad\n\n\n\n139975938188384->139975938188480\n\n\n\n\n\n139975938187808\n\nAddBackward0\n step1.fc.weight\n (1, 16)\n\n\n\n139975938188384->139975938187808\n\n\n\n\n\n139975938634432\n\nstep0.fc.weight\n (1, 16)\n\n\n\n139975938634432->139975938188384\n\n\n\n\n\n139975938187520\n\nMulBackward0\n\n\n\n139975938187520->139975938188624\n\n\n\n\n\n139975938189296\n\nViewBackward0\n\n\n\n139975938189296->139975938187520\n\n\n\n\n\n139975938188576\n\nSumBackward1\n\n\n\n139975938188576->139975938189296\n\n\n\n\n\n139975938188720\n\nMseLossBackwardBackward0\n\n\n\n139975938188720->139975938188576\n\n\n\n\n\n139975938189824\n\nTBackward0\n\n\n\n139975938188720->139975938189824\n\n\n\n\n\n139975938187712->139975938188720\n\n\n\n\n\n139975938188144->139975938188720\n\n\n\n\n\n139975938188816\n\nTBackward0\n\n\n\n139975938188816->139975938188144\n\n\n\n\n\n139975938187808->139975938188816\n\n\n\n\n\n139975938189104\n\nAddBackward0\n\n\n\n139975938187808->139975938189104\n\n\n\n\n\n139975938189248\n\nMulBackward0\n\n\n\n139975938189248->139975938187808\n\n\n\n\n\n139975938189344\n\nTBackward0\n\n\n\n139975938189344->139975938189248\n\n\n\n\n\n139975938189536\n\nTBackward0\n\n\n\n139975938189536->139975938189344\n\n\n\n\n\n139975938189440\n\nMmBackward0\n\n\n\n139975938189440->139975938189536\n\n\n\n\n\n139975938189728->139975938189440\n\n\n\n\n\n139975938187904\n\nTBackward0\n\n\n\n139975938187904->139975938626656\n\n\n\n\n\n139975938189104->139975938187904\n\n\n\n\n\n139975938188240\n\nMulBackward0\n\n\n\n139975938188240->139975938189104\n\n\n\n\n\n139975938188048\n\nTBackward0\n\n\n\n139975938188048->139975938188240\n\n\n\n\n\n139975938188528\n\nTBackward0\n\n\n\n139975938188528->139975938188048\n\n\n\n\n\n139975938189584\n\nMmBackward0\n\n\n\n139975938189584->139975938188528\n\n\n\n\n\n139975938189824->139975938189584\n\n\n\n\n\n" + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/html": [ + "
╭──────────────────────────── Traceback (most recent call last) ────────────────────────────╮\n",
+       " <ipython-input-8-5906690e2182>:17 in <cell line: 17>                                      \n",
+       " /home/TorchOpt/Miniconda3/envs/torchopt/lib/python3.8/site-packages/torch/_tensor.py:396  \n",
+       " in backward                                                                               \n",
+       "                                                                                           \n",
+       "    393 │   │   │   │   retain_graph=retain_graph,                                         \n",
+       "    394 │   │   │   │   create_graph=create_graph,                                         \n",
+       "    395 │   │   │   │   inputs=inputs)                                                     \n",
+       "  396 │   │   torch.autograd.backward(self, gradient, retain_graph, create_graph, inputs \n",
+       "    397 │                                                                                  \n",
+       "    398 │   def register_hook(self, hook):                                                 \n",
+       "    399 │   │   r\"\"\"Registers a backward hook.                                             \n",
+       "                                                                                           \n",
+       " /home/TorchOpt/Miniconda3/envs/torchopt/lib/python3.8/site-packages/torch/autograd/__init \n",
+       " __.py:173 in backward                                                                     \n",
+       "                                                                                           \n",
+       "   170 │   # The reason we repeat same the comment below is that                           \n",
+       "   171 │   # some Python versions print out the first line of a multi-line function        \n",
+       "   172 │   # calls in the traceback and some print out the last line                       \n",
+       " 173 Variable._execution_engine.run_backward(  # Calls into the C++ engine to run th \n",
+       "   174 │   │   tensors, grad_tensors_, retain_graph, create_graph, inputs,                 \n",
+       "   175 │   │   allow_unreachable=True, accumulate_grad=True)  # Calls into the C++ engine  \n",
+       "   176                                                                                     \n",
+       "╰───────────────────────────────────────────────────────────────────────────────────────────╯\n",
+       "RuntimeError: Trying to backward through the graph a second time (or directly access saved \n",
+       "tensors after they have already been freed). Saved intermediate values of the graph are freed\n",
+       "when you call .backward() or autograd.grad(). Specify retain_graph=True if you need to \n",
+       "backward through the graph a second time or if you need to access saved tensors after calling\n",
+       "backward.\n",
+       "
\n" + ], "text/plain": [ - "
" + "\u001b[91m╭─\u001b[0m\u001b[91m─────────────────────────── \u001b[0m\u001b[1;31mTraceback \u001b[0m\u001b[1;2;31m(most recent call last)\u001b[0m\u001b[91m ───────────────────────────\u001b[0m\u001b[91m─╮\u001b[0m\n", + "\u001b[91m│\u001b[0m \u001b[33m\u001b[0m:\u001b[94m17\u001b[0m in \u001b[92m\u001b[0m \u001b[91m│\u001b[0m\n", + "\u001b[91m│\u001b[0m \u001b[2;33m/home/PanXuehai/Miniconda3/envs/torchopt/lib/python3.8/site-packages/torch/\u001b[0m\u001b[1;33m_tensor.py\u001b[0m:\u001b[94m396\u001b[0m \u001b[91m│\u001b[0m\n", + "\u001b[91m│\u001b[0m in \u001b[92mbackward\u001b[0m \u001b[91m│\u001b[0m\n", + "\u001b[91m│\u001b[0m \u001b[91m│\u001b[0m\n", + "\u001b[91m│\u001b[0m \u001b[2m 393 \u001b[0m\u001b[2m│ │ │ │ \u001b[0mretain_graph=retain_graph, \u001b[91m│\u001b[0m\n", + "\u001b[91m│\u001b[0m \u001b[2m 394 \u001b[0m\u001b[2m│ │ │ │ \u001b[0mcreate_graph=create_graph, \u001b[91m│\u001b[0m\n", + "\u001b[91m│\u001b[0m \u001b[2m 395 \u001b[0m\u001b[2m│ │ │ │ \u001b[0minputs=inputs) \u001b[91m│\u001b[0m\n", + "\u001b[91m│\u001b[0m \u001b[31m❱ \u001b[0m 396 \u001b[2m│ │ \u001b[0mtorch.autograd.backward(\u001b[96mself\u001b[0m, gradient, retain_graph, create_graph, inputs \u001b[91m│\u001b[0m\n", + "\u001b[91m│\u001b[0m \u001b[2m 397 \u001b[0m\u001b[2m│ \u001b[0m \u001b[91m│\u001b[0m\n", + "\u001b[91m│\u001b[0m \u001b[2m 398 \u001b[0m\u001b[2m│ \u001b[0m\u001b[94mdef\u001b[0m \u001b[92mregister_hook\u001b[0m(\u001b[96mself\u001b[0m, hook): \u001b[91m│\u001b[0m\n", + "\u001b[91m│\u001b[0m \u001b[2m 399 \u001b[0m\u001b[2m│ │ \u001b[0m\u001b[33mr\u001b[0m\u001b[33m\"\"\"Registers a backward hook.\u001b[0m \u001b[91m│\u001b[0m\n", + "\u001b[91m│\u001b[0m \u001b[91m│\u001b[0m\n", + "\u001b[91m│\u001b[0m \u001b[2;33m/home/PanXuehai/Miniconda3/envs/torchopt/lib/python3.8/site-packages/torch/autograd/\u001b[0m\u001b[1;33m__ini\u001b[0m \u001b[91m│\u001b[0m\n", + "\u001b[91m│\u001b[0m \u001b[1;33mt__.py\u001b[0m:\u001b[94m173\u001b[0m in \u001b[92mbackward\u001b[0m \u001b[91m│\u001b[0m\n", + "\u001b[91m│\u001b[0m \u001b[91m│\u001b[0m\n", + "\u001b[91m│\u001b[0m \u001b[2m170 \u001b[0m\u001b[2m│ \u001b[0m\u001b[2m# The reason we repeat same the comment below is that\u001b[0m \u001b[91m│\u001b[0m\n", + "\u001b[91m│\u001b[0m \u001b[2m171 \u001b[0m\u001b[2m│ \u001b[0m\u001b[2m# some Python versions print out the first line of a multi-line function\u001b[0m \u001b[91m│\u001b[0m\n", + "\u001b[91m│\u001b[0m \u001b[2m172 \u001b[0m\u001b[2m│ \u001b[0m\u001b[2m# calls in the traceback and some print out the last line\u001b[0m \u001b[91m│\u001b[0m\n", + "\u001b[91m│\u001b[0m \u001b[31m❱ \u001b[0m173 \u001b[2m│ \u001b[0mVariable._execution_engine.run_backward( \u001b[2m# Calls into the C++ engine to run th\u001b[0m \u001b[91m│\u001b[0m\n", + "\u001b[91m│\u001b[0m \u001b[2m174 \u001b[0m\u001b[2m│ │ \u001b[0mtensors, grad_tensors_, retain_graph, create_graph, inputs, \u001b[91m│\u001b[0m\n", + "\u001b[91m│\u001b[0m \u001b[2m175 \u001b[0m\u001b[2m│ │ \u001b[0mallow_unreachable=\u001b[94mTrue\u001b[0m, accumulate_grad=\u001b[94mTrue\u001b[0m) \u001b[2m# Calls into the C++ engine \u001b[0m \u001b[91m│\u001b[0m\n", + "\u001b[91m│\u001b[0m \u001b[2m176 \u001b[0m \u001b[91m│\u001b[0m\n", + "\u001b[91m╰───────────────────────────────────────────────────────────────────────────────────────────╯\u001b[0m\n", + "\u001b[1;91mRuntimeError: \u001b[0mTrying to backward through the graph a second time \u001b[1m(\u001b[0mor directly access saved \n", + "tensors after they have already been freed\u001b[1m)\u001b[0m. Saved intermediate values of the graph are freed\n", + "when you call \u001b[1;35m.backward\u001b[0m\u001b[1m(\u001b[0m\u001b[1m)\u001b[0m or \u001b[1;35mautograd.grad\u001b[0m\u001b[1m(\u001b[0m\u001b[1m)\u001b[0m. Specify \u001b[33mretain_graph\u001b[0m=\u001b[3;92mTrue\u001b[0m if you need to \n", + "backward through the graph a second time or if you need to access saved tensors after calling\n", + "backward.\n" ] }, - "metadata": { - "needs_background": "light" - }, + "metadata": {}, "output_type": "display_data" } ], "source": [ + "# Inner update with attached computation graph\n", "inner_loss = loss_fn(net(x), y)\n", "loss = inner_loss * meta_parameter\n", "optim.step(loss)\n", + "\n", + "# Outer forward process\n", "outer_loss = loss_fn(net(x), y)\n", - "torchopt.visual.make_dot(outer_loss).render(\"full_graph\", format=\"png\")\n", - "plt.figure(figsize=(10,10))\n", - "plt.imshow(imgplt.imread('full_graph.png'))\n", + "display(\n", + " torchopt.visual.make_dot(\n", + " outer_loss,\n", + " params=(init_net_state, one_step_net_state, {'meta_parameter': meta_parameter, 'outer_loss': outer_loss})\n", + " )\n", + ")\n", + "\n", + "# Outer update\n", "meta_optim.zero_grad()\n", "outer_loss.backward()\n", "meta_optim.step()" @@ -306,14 +358,14 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "From the graph we can see, directly conducting the second bi-level process links the graph of first and second bi-level process together. We should manually stop gradient with `torchopt.stop_gradient`. `torchopt.stop_gradient` will detach the node of gradient graph and make it become a leaf node. It allows the input of network, optimizer, or state dictionary and the gradient operation happens in an inplace manner.\n", + "From the graph we can see, directly conducting the second bi-level process links the graph of first and second bi-level process together. We should manually stop gradient with `torchopt.stop_gradient`. `torchopt.stop_gradient` will detach the node of gradient graph and make it become a leaf node. It allows the input of network, optimizer, or state dictionary and the gradient operation happens in an in-place manner.\n", "\n", - "Let's use recover_state_dict to come back to one-step updated states." + "Let's use `recover_state_dict` to come back to one-step updated states." ] }, { "cell_type": "code", - "execution_count": 59, + "execution_count": 9, "metadata": {}, "outputs": [], "source": [ @@ -331,45 +383,51 @@ }, { "cell_type": "code", - "execution_count": 62, + "execution_count": 10, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "tensor([1.1950], requires_grad=True)\n" + "meta_parameter.grad = tensor(-0.0914)\n", + "meta_parameter = Parameter containing: tensor(1.1887, requires_grad=True)\n", + "\n" ] }, { "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAARwAAAJCCAYAAAASmHj6AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/YYfK9AAAACXBIWXMAAAsTAAALEwEAmpwYAACkxUlEQVR4nOy9d3wU17n//55t0q5WvfeGhBASQiCKKQZjg+m4G7fYjtOvfVN/cUnu9yaOk3t94zhO7MRpjknsOI4BAzZgbHoxRSDUkIQEqPfetX1+f8BuANPUdlfivF8vvbRlZs4zZ2c+c85zznkeSZZlBAKBwBkoXG2AQCC4eRCCIxAInIYQHIFA4DSE4AgEAqchBEcgEDgNITgCgcBpOF1wJElaKklSqSRJZyVJes7Z5QsEAtchOXMejiRJSqAMWAzUAseBh2RZLnaaEQKBwGU4u4UzEzgry3K5LMsm4H1gjZNtEAgELkLl5PIigZqL3tcCsy7eQJKkrwFfA/Dy8pqekpLiPOsEAsGIkJOT0yrLcvDlnztbcK6LLMt/Av4EkJWVJZ84ccLFFt0cWCwWzGazq81woFQq0Wg0rjZj1DCbzVgsFlebcUUkScLDwwNJkoZzjKorfe5swakDoi96H3XhM4GL2b17N//41weoNR6uNgWrxcLM6Zl865vfdLUpo8a27dv5cMtHblHfFyPbbIQE+vM/v/jFqBzf2YJzHEiSJCme80KzFnjYyTYIrkBVVRXRs29j8sxbXG0KTdWVlB/e5WozRpXqmhri5y1m4rQZrjblEvq7u9n519dH7fhOFRxZli2SJD0NfAoogb/KslzkTBsEV0fj4YGnVudqM9B4eA6rOT82kFC7SX1fjMVkRiGN3liS0304sixvB7Y7u1yBQOB6xExjgUBwEaM7L08IjkAguIjR7coKwREIBBchWjgCgWCcIARHIBBchOhSCQQCpyG6VAI3R5ZlOltbGOjrveL3HS1NGPr7ERlChobJYKCtqWFE6q+7o52+7q5rHEu0cARujtVi5uC2TZhNJqwWC2fyT1J0/Ag2mw2A5rpa8j7f51ojxyiyLFOSc4zas2UANNdWc/LAHno6O66/r81G5eli8g8fwGwyAdDX3cWRT7e5TPyF4AiGTU9nJ2aDES8fHyRJQqXxYM/G97FeWJwYHhtP5eli0cIZArIsU5aXQ9ykyUiShMbTk9Lc41SfOX39nSUJD62Wz7dvob+nG4CAkDBaG+owG41XK3HkjL8CQnAEw6arrQWtXo9CoUShVBIRl4DO28fxvafOC2N/P1aL+6xGHyuYjAb6envw9vUHwC8ohODI6OvsdR5JkgiJjMYnINDxmUqtRpZlTIaBq+01XJOviRAcwbDReukxGQxXbcFYzCaUKiVKpdtFQ3F7VCo1KrUak9Fwxe9lWcZqtd5w69FmO7+tSn210B+ihSNwc3wDg7FYzJiNBmxWK6V5J2iqqSL/8AFsNhvtzY2ERMYgKcTlNliUKhXRE5KpqziHLMs019ZQXlRAUfYRujvaAfjnb16mtaH+C/vKskzF6SLqK8vJPbQPs8lEX083Om9v1J6eVylxdFs44pEjGDYeWi3ps+bS39uLX6AnUYlJPPHcT1BrNEiShGlggMz5t7nazDGJJElMueVWmuvOB8rU+/mx8vGvIUkSnjodhv5+FAol3v7+V9w/OCKKR7/3AkqlEqVSSX93N9MX3I5SqXTmaTgQgiMYNpIkkTx1uuN9QEjYJd8nTJ7ibJPGFf7BIfgHhwCg03uj03s7vrOqrKx56ptXDHMhSRK+AYH4XuTDiYhPHH2Dr4Fo4woEYxilUonXRQ56d0cIjkAguIjRdRqLLpUAALVazcm9n1F3tnToB5HBaDLiofEYlu+xp6ODKK/xfWkqFRI5u3dQfXr4AS9tVhtWqxW1Rj3sY5kNBgx9PcM+ztUY37+q4IZZvnw5w03JYzQaeeONN3jy6afx8BhecPCQkJBh7e/u3H3XXUzLzByRY5WUlJCXl8dDdz00IsfT6/UjcpwrIQRHAEBoaCihoaHDOsbAwADr169n5syZ6HTuFavX3YiIiCAiImJEjqVSqejo6OCWW1wfAP96CB+OQCBwGkJwBAKB0xCCIxAInIYQHIFA4DSE4AgEAqchBEcgEDgNITgCgcBpCMERCAROQwiOQCBwGkJwBAKB0xCCIxAInIYQHIFA4DSE4AgEAqchBEcgEDgNITgCgcBpCMERCAROQwiOQCBwGiLin2DYyLKMzWZzZIC0Wq1YLBYUCgUKkfxuVJBl2VHv9j+LxYIkSSgUCiRpdBPaDRUhOIJh09XVxS9/+Us6Ojo4dOgQP/zhD/Hy8uJrX/saycnJrjZv3LJ//342bdpEU1MT9fX11NTUkJiYyNe//nU8r5pZ07UIwREMG7VaTU5ODp9++ikAhYWFJCQk8P3vf9/Flo1vVCoV69ato6fnfJaFzz//nO9+97toNFfLG+56xr3gmEwm+vr6XG2GA41Gg06nc9sm71DQ6XSsWLGC3bt3Y7FYAFi8ePG4z7zgSiRJYurUqUyaNIns7GwAPDw8uOuuu9y6GzvuBWfnzp383xtvEhQe6WpTsJhNxAf588v/exm1evg5hNwFSZK488478ff3p6WlBZ1Ox8qVK936wh8P6PV6Vq9e7RCczMxM0tLSXGzVtRn3gmM0mchctJRblq5ytSn0dnWQ86+/Isujm93QFURHRzN//nw+/PBDYmNjueWWW8ZVK85dufPOO3n11Vfp6OjgzjvvxM/Pz9UmXZNxLzgASpUKtRv0a1VqDYzTm9DT05MVK1awdetWVqxY4fYX/nhAkiTS0tJIT0/n5MmTrFy50u1F/qYQnJsBWZZpaWnBaDS6zIaJEycSEhJCRkYG9fX1LrNDqVQSGhqKUqkc9bJMJhNNTU2jXs7VkGWZ+fPn09/fj06no6amxmW2eHt7X/dBIwRnnGAymfje976HSqVy2SiFzWZj4sSJ7NmzhwMHDrjEBoDq6mrefvttwsPDR72swsJCvvvd7w47TfJw6OnpITg4mF//+tcus6Gzs5NJkybx05/+9JrbCcEZR+j1eh555BF8fHxcZoPRaESj0bi0af/b3/4Wm83mlLJsNhszZ87ksccec0p5V8JqtWKz2Vw6EHHmzBmKi4uvu50QnHGEfZapM7oSdqxWK3V1dY7mtLNyivf19aFQKNBqtV/4ztli54rZvQ0NDQCEhYU57fc2mUwYDIYrPtBudERSjFveBNj9O/YJYiOJ1Wrl+PHjbNmy5arb9PX1XXUulCzLtLW1UVlZicFguOFys7OzOXXq1KDtdRayLNPT00Nzc/OoHLuyspI///nPV23Jmc1mOjs7r7p/b28vlZWVdHd33/CoaW1tLZ999tlQzQaE4NwUGI1G/vCHP/DRRx851t00NDRQU1OD1Wp1tFJqa2uxWq20trZisVhob2+nv7+f6upqamtrqampwWw209ra6thOqVQya9YsxxPOLiDl5eUMDAzQ39/Pxx9/zNatW2lqakKWZVpbW6moqMBoNFJbW8s777xDUVER586dQ5ZlmpqaqKiooLa2FrPZTE1NDbW1tdhsNsxmM5WVlXR0dDit2zRUPvnkE377299iNBqRZZn29nYqKiowGAyOeqisrMRoNNLV1YXBYKCnp4eenh4aGhqoqqqipqaGgYEB2tvbHSJiMpmYNm0a3t7ewPk67+vr49y5c3R0dGCxWDhx4gR//etfqaqqwmq10tPTQ3l5Od3d3fT397Nu3Try8/M5deqUw7bq6mrKy8sdjvCKigpMJhM2m436+noaGhqwWq3DqhPRpboJaG1txcfHh+bmZsxmM6dPn+bo0aMkJiai1Wo5d+4cp06dIjY2Fr1ez8aNG7nvvvvYtm0baWlpfPrppxgMBnx9fVm1ahU7d+7kkUceYePGjTzyyCOXlGU0GsnPz6erq4sTJ05wxx130N7eDkB3dzcWi4XNmzcTHh5OXl4eNpuN6dOnk5CQ4BC/3/3ud0yePJlJkyZhs9k4ffo0lZWVzJ07l4aGBsfN6Ayn8FAxmUzU19cTEhJCU1MTWq2W999/n+TkZMxmM15eXmzatMmx1iw/P58JEyY4WkQnTpzAarXi4+PDpEmTqKqqYsmSJRw+fJiMjAxiYmIuKa+oqIiOjg7Ky8tZu3YtbW1tdHZ20tbWRmBgIP/4xz+IjIykubmZadOmodPpmDdvHm1tbQBs3ryZgYEBpk2bho+PDwUFBbS1teHr68uECRP47LPPRmRAQrRwrkNXeyumKww1y7JMZ2sLZpPJBVbdOLIsc/LkSbRaLd3d3VRWVnL69GnmzZvH7bffTmBgICUlJSxcuJDbb78dX19fx372p1lMTAwJCQnExcXR09PjWKlsXx1+se/CZrPR3NxMTU0NxcXF+Pr6kp6eTkZGBklJSVRUVFBRUUFDQwO1tbXIsoxaraa5uZl//vOfAAQHB3PnnXeSnp6OyWSitraWuro6zp07x5kzZ1i6dCnTpk1z25nMsixTUVFBV1cXWq2WvLw8qqqqiI2NZcmSJSQlJXH27FkmTZrE4sWLiY2Ndexns9mQZZmAgAAmTpxIcnLyNevc/nlnZydVVVWUlZVhMpmYOnUqEyZMYNq0afT19VFYWEhNTQ319fWYzWaUSiV9fX2sW7cOs9mMVqtl8eLFzJ49G6VSSWNjI7W1tZSUlFBeXk5mZiYLFizAw8NjWHXjnr+Ym2A0DLBv83qsFjMWs5ny4kIqS4sdfd6zhXmcyT/pYiuvjb1Fk5CQQFJSEvn5+cTHx5Odnc2xY8fo6OggISGBI0eOcOzYMbq7u9HpdBw7dozy8nLgvFPU/md3jmZnZzvm2nh5edHe3s6ZM2fo6OigqqqK5ORkhyB4e3tTWlpKTU0NUVFRpKSkkJmZyZw5c5g2bRrHjx93tMLg/KJE+75FRUXo9XoiIiKQJInY2FgOHDhAYWGhW3ep8vPzSUpKIi4ujtOnTxMaGkpVVRVHjhyhsrKS2NhYR0uztrYWf39/cnNzKSwsdAiK/U+WZXx9fcnOzqasrAw4X0dqtZrc3FwMBgMnT54kKSnJ4bTX6XS0trZy+vRpvLy8mDJlClOmTGHBggVMmDCBzs5Ozp49i4+Pj6McpVKJJEnU19fT0dHBhAkTgPMPnFOnTpGdnT3seV6iS3UNWhvqUas1eOq8sFrM9HV3sX/LBv7jF79GkiRikifx+fYtTMqa5bYzPBUKBQ888AAxMTGYzWaam5sJDw8nICAAs9mMj48Pc+bMISwsDFmW0ev1rFq1iqqqKp544glCQkKIiIhAlmWUSiU6nY6IiAgaGxt56qmn0Gq1KBQK1qxZg8lkIiQkhLvvvhuTycSkSZNQKBSkpqaiUCiwWq3ExsaydOlSWltbSUhIwNvbm7vuuouWlhamTZuGUqlk1apVjhvn9ttv5+zZs+h0OgICAvD29qasrIy0tDTCwsJcXLtXZ86cOQQHB+Ph4UFSUhJhYWHcd999NDc3ExISgk6nY/Xq1XR0dBAcHExwcDB+fn5otVoCAwOZNGkSarUapVKJ1WrF09OTc+fOkZ6e7hDfBx54gIaGBpRKJY8++iitra18+ctfxt/fH5VKxerVqzEajXh6enL//fdTXV1NWFgY/v7+PProo1RVVfHII4+gVqu57bbbHD4hezdPkiSysrKIiIhApVJhNpsJCAgYVr0IwbkG7U0N+AQEAueXJSSkppO9e4fjex9/f7o72rBZrShV7lmVKpWKuLg44Pxq4ujoaADH08vOxXFr/Pz8Lpkxar8Q7ej1+i+sBI+Pj7/iseD8CvkpU6Y43kdFRREVFeV4Hx0d7bDL/v3FZU2dOvWS46Wnp3/hPN0JSZIuOR97/YeHh1/id7r8vC+uoytxeT2EhIQ4fofL6xQu/Y0DAgIuEYvAwEACAwMd70NDQx2v1Wo1kydPvuRYSUlJ17TtRhFdqmvgqfPCZBi46vdmkwmVWuO2vgSBwN1wz8eymxASGc2pY58j22zYZJkzhXm01NVyJv8kSRnTaKqpIiphApIbCY7d8ShwLnbn7c3KjZ67EJxr4O0fQFzKZPp7e9DpvQkOj+T+//geWi89AIb+PtJnz3Oxlf/GarWybt06l01xt08o0+v1LvVpdXR0OG32rUajoaCggDfffNMp5cH5iZQqleoLI0Y2m42+vj6X1H93d/cNdXWF4FwDhUJB5vzbHO8jEy71e6TNmutsk66KRqPhZz/72aBm6440BoOB//3f/+U73/nOFZccOAuVSkVQUJBTykpJSeFPf/qTU8qC84skX375Zb75zW8SERFxyXcWi4WXXnqJr3zlKwQHBzvNJjs3soZPCM44QZKkSxx/rmBgYACdTkdMTIzT1lS5Gg8PD8c8mtHGYrGwYcMG1qxZw+zZs7/QipFlmaysLDo6Opg+fbpbjpzeFIJjs1qxmM2uNgOrxQI3cT9fMHRkWWbv3r10dHTw9NNPX1VMbr31VjZs2MBtt92Gyg1HTt3PohHGW6+n9PA+ms9cf+n8tWhtbcXPz29YP6LFYmZCeKhbPnkE7k1NTQ0ffvghP/7xj6+6vECSJJKTk2lvb6elpcUtl36Me8G59dZbvzB/YSj85Cc/4etf//qwf0QPDw+3fPII3Jf+/n7efPNNHnzwQcekv6uh0WjIyMjgxIkTbhlydNxf+R4eHiPiQPP09CQgIMAlzjjBzYvNZmPTpk2Eh4czf/786wqIJEnMnz+fv//97yxdutTtsoO4zwQSgUBwCbIsU1hYyPHjx3n88cdveIJpfHw8PT09tLa2jrKFg0cIjkDgprS3t/PWW2/xta99zbHI8kZQq9Wkp6eTm5s7yhYOHiE4AoEbYjab+fvf/868efNISUkZlC9GkiTmzJnD0aNHHZlQ3QUhOAKBmyHLMvv27aOzs5M1a9YMaa1eYmIiHR0djuBn7oIQHIHAzaiurmbz5s184xvfGHKEPY1Gw6RJkygoKBhh64aHEByBwI3o7+/nj3/8Iw8++CBhYWFDHtaWJIm5c+dy+PDhYcchHkmE4AgEboJ9CDwsLIy5c+cOaw6NJEkkJSXR3NxMV1fXCFo5PITgCARugH0IPDs7e1BD4NfC09OTmJgYzp49OwIWjgxCcAQCN6Cjo4O//OUvfP3rXx/UEPi1kCSJGTNmcOzYMbeJ1SMERyBwMRaLZchD4NdCkiSmTJnCmTNnGBi4euRKZyIERyBwIbIss3//ftrb24c8BH4tfHx80Ol0NDY2juhxh4oQHIHAhdTW1rJx40a+/vWvDzvn05VQqVRkZmZy/Phxt+hWCcERCFzEwMAAf/jDH3jggQeuuwp8qNj9OHl5eW4x63jcrxYfDlarlfb2dmRZpr+/n9bWVjQaDd7e3nh6errd0n9XYbFY6OjoYGBgAKPRSEtLC1qtFh8fHzw9PV1tntsgyzLNzc2OvFGbN28mNDSUefPmjeq1FB4eTl9fH11dXU4LvXo1hOBcg87OTp566inOnDlDc3MzO3fuRK/X86c//YkZM2a42jy3oauri4ceeoiamhpaW1s5ePAgvr6+/PnPfyYjI8PV5rkNFouF/+//+/8ICwtj+fLlHD16lJ/+9KejHh/Jw8ODCRMmUFxcfEMhLkYT0aW6Bn5+fkRERFBaWkp7ezvnzp3DYrGQkJAgWjcX4e3tTWRkJGVlZbS3t1NWVobNZiMhIcHVprkVzc3N7Nu3j1/96lc8/vjjzJo1C29v71H3rSgUCjIzMzl58qTL/ThCcK6BUqnknnvuuSSI0Z133nlJxkLB+XAIq1atcjg9JUli1apVeHl5udgy90GWZY4fP05LSws2m43q6mp+8IMf8Je//MUpSw9SU1OpqKgYdm7w4SIE5zpkZmYyadIkALy8vFi5cqWLLXI/JEli3rx5jlSz/v7+LF26VLQCL8Jms3HgwAFHGh9Jkhwpk51RT97e3mg0GpcH5RKCcx0CAwO54447kCSJiRMnum36DVcTGBjI4sWLkSSJyZMnk56eLurpIvr7+9m9ezdwfiX3vffey5YtW1izZo1TkvbZ84Xn5eW5tFslBOc6KBQK7rrrLjw9PVm1ahXe3t6uNsktUavVrF69Gq1Wy5o1a8To1EXIskxRURG1tbWEhYXxf//3f7z11lukpKQ4LS+9JElkZGRQUFDg0lTQbj9KZbVaKSwsdGlTsKuri/j4eLy9vR1PKVegVquZMWPGDSWZs1gs5OTk0NPT4wTLztPb20tkZCSenp7s2bPHaeUOpl5kWebo0aP09fU5wbJ/s23bNjw9PfnGN75BSkoK2dnZwz6mLMvMmDEDPz+/G9o+Li6OxsZGjEajyxIVur3gmM1mfve73xEeHu7SCPQLFy6kp6eHI0eOuMyGkydP8qtf/eqGRn8GBgZ47bXXSEpKclq9ybLM4sWLaW1tdWqkucHUiyzL/OpXvyIlJWVUZvZeDavVypNPPgkwImIDcOLECX70ox8xc+bMG9per9ej1+tpamoiPj5+RGwYLG4vOLIso1AoWLlypUvTx9psNiRJcplfQpZlGhoabrg5LMsy3t7erFq1yql5vl1RT42NjYOqFy8vL1asWOG07rHdZzKSdSLLMr29vYPqHqlUKiZNmkRhYSFxcXEuuZaFD+cq9Pb2sn79esrKyhyi54wf6MyZM7S1tY16OVfDnlJ2165dQ+rrWywWcnNzhz3U29zcTHl5+bCO4Ux6e3vJz8+/okN2MAJcWVk5agst7X6cwsJCl/lxxrTg2Gw2du3aRW1t7Yh73rVaLZGRkZSUlFzxe1mWKSsro6Wl5Yrfm81mDh06xLZt2+jo6LjhcouKimhubh6SzTeC0Wjk008/pb29nRMnTnDq1Kkv1F1aWppjNMNms7FlyxbWr1/PyZMnryskRqORw4cPD/uCbmho4PTp0xgMBj777DN27drlGFIeDerq6li/fj1/+9vfWL9+PeXl5bz//vts3LjR8dC5Ft3d3SPSVTpz5gy1tbV0dnby8ccfj3iI0OjoaJqamjCbzSN2zMHg9l2qa9HW1sauXbtoa2vjgQceoKenh8OHD6PRaJgzZw4Gg4HDhw+j1+uZOXMm+fn5TJkyhVOnThEREUF2djYeHh4oFArmzJnD6dOnSUtLo7CwkOnTp+Pr6+sQFLsTtqGhwXHsdevWERQUxJ133klSUhJHjx6ls7OTuXPnUlhYSENDA/PmzcNoNGI2m9m/fz89PT1EREQ4tvf09GT+/Pm0trZy9OhRKisrmThx4qjVmcFgYNeuXQDk5OSQkJCA0WgkOTmZyspKIiMj8fHxcQzVyrJMQUEBjz76KBs2bCA6OpqOjg5OnTpFamoqEydOpKGhgRMnThATE0NCQgKyLNPW1kZ1dTVKpZKJEyei1+spKSnBz8+PiooKmpubmTdvHoGBgRw+fJi2tja0Wi0zZ87k8OHD1NfXExISwuHDh+np6cFms5Gdnc2tt946KvUSHBzM/Pnzeeutt3jyySdRqVRUVFRw//33s2nTJp5++mlKSkqoqqpixowZREZGcvbsWU6dOkV6erqju19bW0tHRwcGg4GMjAw0Gg15eXlER0dTUFDAwMAACxYswMPDw3E9hIeHk5iYyOHDh6mpqWHWrFls376diIgICgsLCQgIICUlZUTO08vLC41GQ3t7OxERESNyzMEw5BaOJEnRkiTtlSSpWJKkIkmSvn3h8wBJknZKknTmwn//C59LkiT9VpKks5IkFUiSNG24xp89e5bMzEzq6uowmUzs3LkTtVpNYmIiFouFbdu24evrS0xMDBaLhePHj2MwGDh58iQNDQ20trZSVFRETU0N9fX1nDx5EoPBQE5Ozhee0CaTCY1Gg6+vL5988gnh4eFkZGQwd+5cJkyYQE5ODmfPnsXb25tt27Zx+vRpsrKyKCsr48yZM5jNZvbt20d6ejpTpkyhr6+P6Ohozp07R3FxMR9//DFJSUlOaeqGhIRQXFxMUFAQkiSRl5dHX18fRUVFV2yNtbe389lnn2E2m9FoNBgMBhISEti+fTtdXV1s2rSJiRMn4ufnhyzLdHR08N577xEaGkpjYyP5+fl8/vnnHDt2DKPRiFarxcvLi88++wyAgwcPEhoayi233EJ2djaSJOHn54fZbKampgYfHx+8vb2pra0dtTrRaDT4+fnh6emJr68varWa2tpadu7ciVardfimQkND2bp1Kx0dHWzbto2srCxHZoWamho2bdpEREQEZWVlFBYWkpuby/Hjx+nr6yMwMJD+/n5Hvqj9+/eTlpZGeno6n376KRERESiVSiwWC42NjXh4eODj4zOiXSyNRsOECROu2nIfbYbTpbIA35dlORWYDfyHJEmpwHPAblmWk4DdF94DLAOSLvx9DXhzGGVjs9k4cuQI1dXVlJeXU1FRQVdXF1FRUYSFheHp6UlPTw9RUVGEh4c7LgqLxcLAwACSJOHr60tQUBB6vR6z2YzNZsNsNjumf9t/fIDW1lb2799PWVkZTU1NqFQqdDodWq0WjUZDR0cHzc3NNDU1ERERgb+/P+3t7cTExDh+3IiICKKiovD09KSgoICcnBxH87m/v5+oqCinrObVarXcdtttTJkyxeFfMJvNjqhwdn+DXfwCAwNZuXIlvr6+1NfXs3//fk6dOkVDQwNGoxGLxUJERATh4eEoFAqsVisqlYqenh7i4+M5cuQIe/fupa+vD4vFwsGDBzl79qyj6+jv709iYiJeXl50d3cTHh5OWFgYKpUKLy8vdDqd48+ZxMTEsHr1avr7+2lsbGTfvn2cPn2apqYmDAYDGo2GsLAwQkNDgfOtQavVysDAABERERw5coRdu3ah0WhobGzk6NGjVFVVOXx04eHhjuuht7eXqKgoQkJCUCgUeHp6EhQUhFKpHFGnv31iZnFxsUv8OEMWHFmWG2RZPnnhdQ9QAkQCa4C/Xdjsb8BdF16vAf4un+co4CdJUvhQy+/t7cXT05Ovf/3rPProo1RUVDBnzhz27NnD1q1b6e/v55ZbbuGTTz7hk08+wWQyERYWxubNmx0Xi5eXF3q9Hp1Oh5eXFx4eHmzdutVxw4WEhFBfX8/hw4eRJAlZllGpVI55D7Gxsezbt8/RBQsNDcVsNhMSEsLChQs5efIke/bsIT4+HkmS8Pb2dhxbqVRiMpnw9fVFp9MxY8YM3n//ferq6kZ19bBCocDHx4f09HRCQ0PR6XQkJiby8ccf09zcjEqlQq1WExYWxocffkh/fz9ms5lPPvkEWZbx8/NzCHFgYCBarZa0tDT+9a9/cejQIWRZJiEhgbVr13L06FH0ej1Wq5VbbrmFwMBAlEolNpsNtVqNr68vcH7avb0Ll5WVxaFDhxzd3ZkzZ3LixAny8vJGfYW+JEmOeMIKhYL29nZ27NiBv7+/46aXJAl/f38CAwMJDQ3l3XffJT8/H6VSSWpqKvfeey979+4lMjISgISEBGJjY1GpVJjNZse1dvH1oFAomDVrFps2beLs2bN4eHgwd+5ctm/fTltbm2NpzUgxYcIEKisrXRIfRxoJZ6skSXHAASANqJZl2e/C5xLQIcuynyRJW4H/lWX50IXvdgPPyrJ84rJjfY3zLSBiYmKmnz59mu985zs89dRTlzzhZFnGYrGgUqkczk2lUulwhtnnnpjNZiRJQqVSYbPZsFgsKBQKx4VvR6FQYLPZsFqtKBQKx01/8f4mk8mxrb1cs9mMUql03IT2m0mSJMd7lUrlePIrlUokScJqtWI2mx222FsZ9rIun4EqyzJvvvkm3/ve95gwYcJ1f5Pu7m5+8IMf8NWvfvWSJ+SV6k2hUFxStt0+q9WKRqPBZDIhyzJKpRKVSuU4r2vVg307pVLpqFP7MUwmk+NGs39vrwP7seC8KNtts/+mVxrtefPNN/nud797Q/VitVr58pe/zDe+8Y0vDItfXDeAo6WrUqkc15Z9xPLi6+ny3/fi87a3IC/+fa903vbW9cW/gclkcpR1+XnLssy7777L3XffzezZs6973hfT29vL888/zy9+8YtRmxogSVKOLMtZl38+7EepJEl6YCPwHVmWuy+uGFmWZUmSBqVosiz/CfgTQFZW1lX3lSTJISr2HxH4QqbCi9/bbwg7l9/UFwvNlfa/fKKYJEmXfHb5BLvL31987MttudLxR4Or1dvlZatUKoe9l393+XldrR7s/y+v5yuVdfGxrvUbjiYX1w3wheUZl9tx+W9oP4+hnLdCofjC96N1Peh0OkJCQqiqqiItLW1UyrgawxoWlyRJzXmx+Ycsyx9e+LjJ3lW68N8+xlsHRF+0e9SFzwQCgRORJInExMQbGu4faYbcwrnQXXoLKJFl+dWLvvoIeBz43wv/t1z0+dOSJL0PzAK6ZFluuJGy7LMq3Sll6Uggy7KjiX4j2w527oTVaqW3t9ctYtmOJkOtl7HOUGPbSJJEamqqwy/nzBnHw+lSzQUeAwolScq78NkLnBeaDyRJegqoAh648N12YDlwFugHnryRQhQKBQEBAaxbt85pK2uvhH2o0h7zZSSwDyE3NTVhsVgIDQ0lKCjoqudpMBhuuJlt97esW7du1C+oxsZGvL29XRZwazD1IkkSnp6evPPOO6NyPZlMJlpbW50yx6WtrW3IdR4eHk5LSwsWi8VpXVYYIafxaJGVlSW7S3qL9vZ2/vu//5vXXnttxOKX2Gw2mpubOXXqFCdPnqS6uhovLy8mTZrEtGnTiImJwdfX9xLBuNFp8vbW02jT39/PCy+8wA9+8IMRFePB4i71kpuby44dO3j++edHrYyLGeq6tdF2HI+a03i0ceWCyYuxD9329vbi7+8/IsdUKBSO+SuLFy/GbDZTVVVFUVERH330EW1tbQQEBDBp0iTS0tKIiYm54SeaM+pNlmXy8vKIiIggIiLCpS3QG2U060WWZU6fPk1qaqrb14VOpyMoKIja2toRH3a/Fm4vOO6CRqNBp9PR2dk5YoJjx34DaDQakpKSmDBhAmvWrMFgMFBZWUlhYSH/+Mc/6OzsJCQkhGnTppGamkp0dPQlI07Oxmw2s3XrVr70pS85JWrdWKC0tJS1a9e62ozrIkkSsbGxVFRUCMFxR+wTvjo7O51SFpyfETxp0iRSUlK4//776e7u5ty5c+Tm5nLo0CHMZjMRERFkZGQwceJEIiMjHU9WZ7RuSktLUSgUTJgwwS1aoa7GYDC4bI3SYLGPVNlXuDvr9xOCMwj8/Pzo6OhwumffXpavry/Tpk1j2rRp2Gw2R+qanJwctm/fjizLxMfHM2PGDJKTkwkICHDsO9L2Wq1WPv74Y1auXOlUp6M7U1tbi7e3t2MGtbsTExPD9u3bHZNmnYEQnEEQEhJy1XAUzkahUBAUFERQUBCzZs1yjKKVlZVx5MgRNmzYgIeHB8nJyaSlpZGcnHzJ0orhCJAsy9TX19PS0sLUqVNH6IzGPuXl5WMqZ5ler6e/v98x49kZCMEZBBERERw+fNjVZlwRlUpFVFQUUVFR3HbbbZhMJhoaGigpKWHnzp387W9/Q6/XM2XKFKZMmUJiYiIeHh5DcqLKsszHH3/M4sWLnRpN0N0pKChg/vz5rjbjhrEvFerr63NaK1UIziDw8fGhu7vb1WZcF/tSg7i4OOLi4li6dCm9vb3U19eTn5/P+vXr6e7uxt/fn/T0dCZPnkxcXJxjzc71BKi9vZ2SkhIeffTRMfM0H20sFgv19fXExMS42pQbxr4iva6ubsQHQq6GEJxBoNfrx+QMVfvK5IkTJzJx4kRkWaarq4u6ujpyc3N5++23HYHBpk2bRlpaGqGhoY6FhZetj+Ozzz7jlltuwcfHx4Vn5V60tLQgyzIhISGuNuWGkSSJiIgI6urqmDx5slMeHkJwBoGXlxcDAwOOeC9jFXuAKz8/PyZPnozFYqGjo4OqqipOnjzJ3r17sVgsxMTEkJaWRlpaGgEBAahUKnp7ezl8+DD/9V//JVo3F1FdXU1kZOSYmx4QHh4+ajGUr8TYvWtcgF6vR6FQ0N3dTUBAgKvNGTFUKhXBwcEEBwczffp0zGYzHR0dlJeXk5OTw9atWwGYOHEiBoOB2NhY/P39nT5a584UFhaSnp7uajMGTXR0NLm5uU77LYXgDAK1Wo1Op6Orq2tcCc7F2MNDhIaGEhoayuzZszEYDLS1tZGXl8fLL7/MxIkT+elPf0pycjLp6ekkJSWh1WodXbCbDVmWKS8vZ8GCBWPu/P38/Ojq6nLa8iEhOIPAHhFuLDiORwpJkhwZLGpqali6dCnPPPMMHR0dFBQUsH37dhobG/H19WXq1KlkZGQQFRXlCE4/1m7AodDV1UV3d7dL15INFZ1Oh8FgcNpcHCE4g8QuODdbd8JqtfLRRx/x8MMP4+Pjg4+PD7GxsSxfvpy+vj4aGhrIzc3lvffeo7u7m6CgIFJTU5k6dSohISF4enq6zbq4kaa+vp6AgIAxmU9dq9UiyzIDAwNOydAqBGeQBAQEODWNrTtgX8YgSRLJycmXfKdUKh0ClJycjNVqpa+vj+rqak6ePMnvf/97+vv7iYuLIyMjg/T0dPz8/BxzgMYDpaWlo5raZzTx8PDAy8uLjo4Op4w6CsEZJGFhYZw5c8bVZjiF+vp6WltbmTBhAps3b2bFihXXnCBmj8fr6+tLeno6aWlpmM1ment7KSsrIy8vj127dmGxWIiLi2PKlClMmjQJX19fNBrNmBIge6bNuLg48vPzWbVqlatNGhIKhQJfX186OjqIjY0d9fKE4AwSZy3gdDWyLHP8+HG+8pWvsGjRIiRJ4itf+cqgjmF3QAcEBDB79mxmzZqFyWSiu7ub4uJicnNz2bhxI2q1mpSUFKZPn05cXBze3t5XDBzuTnR3d/Otb33LkemiqamJBQsWsGzZsjGzlgr+PUXCWde0EJxB4uXlNSYn/w2F5uZmWltb+eCDD/Dw8KCzs5Mf/OAHLFq0aEjxXuwzoIODg7n11luZP3++I0JeQUEBu3btorGxES8vL5KSksjMzCQqKuqSTKDuglarxdPT0zGH5Q9/+AM5OTncdtttY0pw4PxD1FmLkoXgDBIvLy/6+vpcbYZTuDjHudFo5PTp0yPW8rA7kD09PYmKiiIyMpKlS5diMplobGwkLy+PDz/8kObmZoKDg0lLSyMzM5OQkBD0ev0V06YUFBQQGxv7hSiJo4GXl9clUyO8vLx49tlnx9RMYztBQUFUVlY6pSwhOIPEy8sLk8mExWJxilfflbS0tDhyRUVERPDGG29w6623jsrNfLEAxcXFERsby6pVqzAajZSXl5Ofn8+6devo7e0lNDSU1NRUpkyZQnBwMDqdjv7+fp5//nlkWebFF19k2rRpo9oqUqvVBAcHO2y///77WbFihVt3A69GQEAAeXl5TilLCM4g0el0SJJEX1+fIwPneKW+vh5ZlgkNDeWNN95g2bJlTgudKUkSSqUSnU5HWloakydPxmaz0d/fz5kzZ8jLy+ONN97AaDQSHx9PWFgYOTk5NDc3k5eXx/e+9z2++tWvjlprxx4xT5Ik4uLiePbZZ52SV2w0sK8RFF0qN0StVqPRaOjt7R3XgiPLMk1NTQQGBvLqq6+yevVql/pR7ALk7e3NtGnTyMzMxGq10tnZSXFxMevWraO1tRU4n0XiRz/6EXv37uWll14iIyNjVGyPiYnBw8OD73//+0ycOHFMtm7g/DU92FQ7Q0UIziCRJMlpfpzc3FxOnz496uVcCavVSm1tLbfddhs2m40PPvhg1MqSJInZs2cTFxd3Q9vn5eVRUlJyyWft7e2XTM+350PPz8/niSeeGJUMk2VlZYSHh6NWq3n//fdH/Pg3wuLFiwkKChrWMdRqNRaLxSnLG4TgDAFnhanYtGmTI26xszEajTz22GMEBgbS0dExqmUVFRVhtVpvWHC2bNnCwMAAkZGRwHlbe3p6mDFjBgEBAQQEBODn54e/vz9+fn5oNBpH62ckCQwM5IknnsBoNA45Kd1w2L9/P/Hx8SMiOKKF48Y4S3BUKhVTp079wuxeZ2B/2jmjm6BQKAb1dFWpVMyYMcMxu1eWZRYvXnzVpROjmRZmNI9/vbJra2tH5Fj2+ndGC8e9k+e4KYGBgbS1tblFgr6RRpZldu3aRVNT07BupN7eXkeA7tHG7t+xLxa9+A9gz549jiF+ewCxkWi1XVxGW1ubI3XucDh58iRlZWXDtm0wKBQKp6XRFi2cIRASEuL0i+JiWltb2bZtG2q1mkWLFhEaGnrN7Tdv3kxlZSX+/v6sWrXquqE1GhoaSExMvOTmsQ+P3yhms5na2lqMRiPbtm2jubmZ2267jZSUlFFrEZjNZt566y1mz55NRkaG4/P6+nomTJjgeIrX1dWRkZFxzfPp7e3lN7/5DUFBQSQnJ7Nw4cIvZEC9eH+j0ehocQyn3tra2rDZbFRUVPDJJ5/g5+fHPffcM6oLQwfbwhwOQnCGgLe3t8tCVNgnuPn6+nL77bcD54N328NINDc3U19fT319PXq9nsmTJ9Pe3s7y5cspKCigpKSEiIgIDh48iFqt5u6776azs5MdO3ag1+tZunSpIwTphx9+SFRUFBqNhqlTp1JZWUl9fT1KpZJTp04RHx/PwoULOXbsGCdPnkSv13PnnXdy4MAB2tra6O/vp7a2lqamJpYtW8ZHH31ESkrKqNVNS0sLTU1NnDx5koyMDLKzs8nLy6O2tpb58+dz8OBBSkpKqKmpoa2tjfXr1wPnfRjz589n165dKJVKNBoNy5YtQ6vVcvfdd/PXv/6VmTNncvjwYSorK8nKyiIjI4MDBw5w7tw5pk6dSkREBLIsc+LECUwmEy0tLY4g8zt27CA1NZXjx4/T2dnJqlWr8Pf35+2330ahUJCUlERERAR79+6lpaWFO++8k507d7Jw4UJyc3MpLi5m2rRpo1ZvkiQ5pSUKoks1JLRaLQMDAy4rPyMjg/7+ft577z1qa2vp6uqiq6uLvr4+2tra6OnpISYmhvDwcM6ePcvAwAAbNmzg1KlTjljFCQkJVFdXU1lZyb59+8jIyGD58uV4eHhgMpl4++23iY+PJyIiguLiYv7+97+Tk5ODl5cXWq2W+Ph4Dh06xMDAAB0dHSQmJvKlL30Jg8FAX18fK1euRJIkuru70Wg0VFdXj2qdybJMcXExWVlZtLe309HRwcGDB3nggQcIDAzEZDJx7NgxHnzwQXx9fTGZTI70zb6+vtTU1BAYGOgQ7vb2dsrKynj33Xfx8fFBoVDg5+dHeHg4e/bsobW1lcLCQh5++GGHj624uJi9e/cybdo0TCYThw4dYtu2bVRXV+Pp6UlERASSJJGdne3IK3bvvfeyYMECDhw4wNKlS0lMTMRqtdLb20tbWxuSJNHV1TVq9QbnWzhCcNwYVwqOJEmo1WqWLVvGzJkzOXLkCGq1mt7eXlpbWx3xlj08PBzNcA8PD+677z4WLVpEXl4eO3bswGAwYLFYMJlMKJVKzGYzFosFm82GQqFgwYIF5Obm4u/vT3l5OR0dHZw+fRp/f3927NiBQqHAYDAgyzJKpZKQkBAUCoXj4rUfy8/PD6PRSGRk5A3nRR8KVquV7OxsSkpKqKuro6ysDIVC4Tgvu7/FbDZjtVodC0t1Oh0ajQar1ep4r1arsVqtJCcn89RTT9Hd3U1FRQUHDx5EpVLR39/vaBVYrVYsFgsAUVFRBAcHU1paSmRkpGP4Xq/Xc+rUKaqrq1EoFI5rx8/Pz5Gq5eLfQKFQ4OPjg6+vLxaLxWkZFZyB6FINAZ1O59IWjtlsZvfu3fT19bFw4UL8/f3ZvHkzHh4eJCYmMjAwgI+PD2q1GqVSSU9PD5999hk+Pj7cfvvt1NTUUFJSQlxcHL6+vixatIidO3fS0NDAkiVLSExMJCUlhdOnT9PY2Eh6ejqpqank5uY6fBq1tbWkp6ejUqkIDQ11LFgMDw8nLCyMgwcPkpSURFRUFPHx8ezfv58VK1aMWp0YDAZSU1NZtWoV1dXV1NTUsHjxYrZu3UpwcDC+vr7cdtttbN26lYiICHx9fYmOjsZiseDp6YlGo8FgMGA0GtFoNPj5+SHLMuvXr2fq1KlERkYSGhpKe3u7I6j8LbfcwsaNG5k2bRpRUVFMnTqVrKws9u/fz7Rp00hKSiIkJARvb298fHwcLZ3o6GiUSiWJiYkOZ/fixYvZtWsXkiQRGBjI0qVL2bFjBwEBAU7N/T3aSO480pKVlSWfOHHC1WZ8ga6uLn70ox/x6quvjmoCsRdffNGRNXM8s3//fnx9fXn00UdvaPuf//znpKSkjNmgVyOBLMu8++673H333cyePXtYx+ru7ubZZ5/l9ddfH7FsJJIk5ciynHX556JLNQTs2SZd2coRCMYiQnCGgFqtRqVSCcERCAaJ8OEMAftIRn9//6iWI8syRqNx3AubyWQa1PY3S71cD7uzeiwhBGeIOMNxHBMTw86dO9m3b9+olnMjWK1W8vPzHSMxIzl5z2g08sQTT9zw9jExMezatYv9+/ePmA1Xwmq1OuYtuWMesrEYIkUIzhBxxtD4I488woMPPjiqZdwosiyTl5fHunXreOqpp0hLSxtR0RmM8/2hhx7ivvvuG7Gyr4TFYmHz5s34+/vzwx/+cFSH9IfDWIvBIwRniHh6emIwGEa1DI1GM6qjYINlzpw5BAQE8MYbb/Dggw8yb948pwXkuhi1Wj2q0RYtFgvbtm2jpKSEF154AT8/vzEb68bdEE7jIeKsFePuhCRJpKSk8Nxzz7Flyxa2bt3qtLAGzsJisbBp0yZOnjzJs88+K8RmhBGCM0ScmVrDnZAkiaioKF544QWys7N57733Rr2l5ywsFgsffvihQ2ycEYz9ZkMIzhC52XKMX4wkSQQEBPDcc8/R0tLC7373O0dM3LGKXWzy8vKE2IwiQnCGiKsXcLoae6jVZ555Bn9/f375y186chuNNSwWCxs3biQvL48f/vCHQmxGESE4Q8QZTmN3x57Y7rHHHmPatGn87Gc/o66ubkyJjl1s8vPzhdg4ASE4Q8TDw+OmFxw7arWalStXcvfdd/Pyyy9TWlo6JkTHYrGwYcMG8vPzRTfKSQjBGSKenp4uCZztriiVSubPn89XvvIVfvvb35Kdne3WomMXm4KCAn74wx/i4+MjxMYJCMEZIqKF80UkSWLKlCn84Ac/4L333uPTTz91WqzcwWCxWFi/fj0FBQWiZeNkhOAMEY1G4wgyJfg3kiQRHx/PCy+8wJ49e1i/fr1bzdWxi01hYSHPPvusaNk4GSE4Q0StViPL8phcQDfaSJJESEgIP/7xjzl79ix//OMf3WJEz2Kx8MEHH1BYWMhzzz0nxMYFCMEZInbBcaentzshSRLe3t58//vfR5IkXnnlFbq7u13m17GLzalTp3juuefw9vYWYuMChOAMEfsaJ9HCuTr2MB5f+9rXSExM5MUXX6S1tdXpoiPExn0QgjNE7KEYRQvn+qjVah588EEWL17Miy++SGVlpdNEx2Kx8K9//UuIjZsgBGcYqFQqITg3iFKpZMmSJTz66KO8/PLL5OfnO0THnqBuJETIarU6jmMXm+LiYuGzcROE4AwDZyaBHw9IksTMmTP5zne+w5/+9Cf279+PzWajvr6ev/71r8PunlqtVn7/+9+zb9++S8TGPholcD0iHs4wUKlUwoczSOwhLl544QV+9atfUV1dzUcffcT+/fuZPHkys2bNGnIrpLKykldffRVJknj00UexWq1CbNwM0cIZBqKFM3QiIyP5/ve/z29/+1s2b95Ma2srr7zyypCHz202G//4xz+orq6moqKC119/nVmzZuHt7T3ClguGgxCcYSBaOEPHbDbzl7/8hcLCQsds5E8++YSdO3cOyZdTU1PDu+++65iI2dnZyfe+9z1OnDjh1kssbjaE4AwDZyaBH2/09fXR2dlJWFgYSqUSgP7+fl555RU6OjoGdSybzca//vUvzp07B5x3UE+cOJHHH3+cmJgY4Sh2I4TgDAOFQiGenkPEz8+PV155hX379vHaa68xZ84ctFot2dnZfPDBB4MS8oaGBtatW4dCoSA9PZ3/+7//Y9euXfzoRz8iNDR0FM9CMFiE03gYKBQKt23hmEwmPvroo0G3FlyBh4cHd911FxEREXz22We8/vrrmEwmR4bT65GXl0dlZSXTp09n5cqV6PV6Pvnkk1G2emRQKpWsWLHiphFGITjDwJ27VAaDgY0bNzJv3jy3yvxwNfz8/FiyZAlLlizBZDLdsDPeZrMRHR3Na6+9Nia7Trt372by5MlCcATXx51bOADe3t7MnDnzhlsKAudj9zvdLAgfzjAQPpyRp62tjeLi4mEfp6SkhPb29kHtY7VaOXHixLADq3V2dlJUVDSsY4xXhOAMA3dv4VwJg8HAtm3baG1t5dixY5csMbgSsixz5MiRIefgam9vZ926dWzcuJGqqqrrCnRrayuFhYVDKutiTp06RXt7O9u3b+e9997j0KFD1+2mWa1Wjh07NmzB6ejooLCwELPZzO7du/n444/p6ekZ1jHHC6JLNQzGouAYjUb27duHUqnk5MmTJCQkEBwczOHDh0lISGDq1KnU19dz/PhxQkNDmTFjBseOHSMxMRG9Xo8sy5SUlFBcXMykSZNISUmhsLCQ8vJyZsyYQUREBDk5OdTV1TFnzhx6e3vp7e1l5syZbN68mW9+85tkZ2fT3NzM/PnzCQwMpLCwkHPnzjFjxgzgvMiVlpZitVrp7u4mKysLgJMnTxIfH092djZqtZoFCxZgsVjYt28f/f39TJw4EX9/f7Kzs6mqqiIzM5P8/HwefPBBtm7dSmRkJJIkkZOTQ1xcHJmZmbS1tXHkyBGCgoLIzMwEoKenh4KCAry8vIiJiSEwMJBz586hVCppaWmhqqqK2bNnExkZSXZ2No2NjSgUCm699VaOHTtGQ0MDSqWSgoICamtrCQsLY/fu3dx1112u+tndBtHCGQaSJI3JLlVISAglJSUEBwcjSRLZ2dno9Xo0Gg1ms5n333+f0NBQcnJyqK2tvWRfo9HIJ598QmZmJp999hmdnZ0cOnSIuLg4LBYLAwMDHDlyhISEBEwmEwClpaV89tlnBAYGYjKZ8PLywtvbm88++4yGhgYOHjzInDlzHPNxiouL2bt3LxERERQVFZGbm0tBQQGFhYX09fURExNDbW0tRUVF9Pf3c/ToUebNm0dSUhIfffQRaWlpjsmEvb297Nixg46ODvR6vUOY9u7dS1tbG1u2bCE2NpaoqChsNht9fX28++67BAYG0tfXR25uLnv37uX48eOYTCZUKhVhYWFs27YNWZY5efIkPj4+LFq0iNLSUjo7O4mOjsZkMlFXV4ePjw8ajYb6+nrn/shuihCcYTBWBUer1XL77bczZcoUJEli9uzZaDQaDh06RG9vL21tbVRVVeHv7/8Fh7PVakWSJIKDg1EqlSgUChYvXkxdXR0nTpxAo9Fwxx13UF5eTl5eHrIsM2nSJO677z7q6uqorq7m4MGDlJeX09LSQn9/P3q9nqCgIAIDA5FlGY1Gg8FgwGw24+Pjw7Fjx9i7dy8BAQEUFxc7WlBdXV0AxMTEEBISgkqlwmg0EhYWRkBAAHDecb5q1Sri4+OprKzkwIEDFBYW0tDQgNFoxGAwEBoaSmhoKCqVCqvVil6vp62tjfj4eHJycjh06BCNjY14enqyb98+zpw5Q3NzMwBeXl4kJibi5eVFf38/gYGBBAcHo1Kp0Ov1qNVq/P390el0zv2R3RQhODcZCoUCX19fx1Csl5cXzc3N1NTUoFar0Wq1LFu2jP7+frRaLTqdDqVSyYYNG/jb3/5GV1cXcXFxvP/++0RGRqLVaqmpqaGpqQm9Xo/NZqOqqoqWlhbHDVdTU8OOHTuIjY1Fq9Vis9lQKpX4+/sTExODh4cH7777LmfOnEGj0ZCZmcmdd97JwYMHiYuLw9vbm4CAACZMmODIB+bj44NWq0WpVDqCoKtUKmbMmME///lPWltbUavVKBQKtm3bRnd3N2FhYY7g98HBwWg0GmbPns1HH33Erl27sFgsREVF8cADD3DmzBlsNhsKhYI77rgDb29vPDw8HA+ZwMBA4LygqdVqACZPnkxZWRn79+9Hp9ORkZFBVVUV+/fvZ+7cua782d0GyZ2f0FlZWfKJEydcbcYVkWWZ119/nenTp7vlxdTd3c0PfvADvvrVr17SSpFlGavVilKpdMSgkSQJi8WCQqFwfH7x+4uDxdtDq9qPYfdjWa1WVCoVkiRhtVqx2WyXBCmTJMmxvX39mf2zi/e326hQKLBarY6RQPtncD7OjX1fe3n21zab7ZLv7bYoFApHC8b+3t6Fu3x7+3/7udn/2+victsVCoVDiOzfKxSKS87VXjeX8+6773L33Xcza9asEb8GbpTu7m6effZZXn/9dcdvMFwkScqRZTnr8s+F0/gmw94SsL+2c/HkQEmSLnlvf4JfjP1mtb+++P3lF62Hh8cl7y8/3uX7X+04V7L18u0UCsUl39tF6lrHvNge+/f2//b97f+vZLsdSZK+8P2V6u5mRnSpBAKB0xAtnHGM0Wikrq4OT09PV5syKOzdlJuB7u5uV5vgVITgjFPUajXJycl8/vnng1pjZPfpXP56ONzocWRZdoxkzZgx44a6I83NzTQ0NDhG3K5U3mie03CPp9VqCQ4OHrY9YwUhOOMUT09PXnjhBVebMSjy8/P5wx/+wK9//WumT59+Qzdya2srL774Ig899JBj4p7Afbk52q03IZIkjZk/gH379vGXv/yF7373u2RlZTlGfq73FxQUxNNPP82f/vQnx+Q6V5/PUOvgZkAIjsClWK1WNm/ezNatW3n++edJTk4e1A0oSRLJyck89NBDvPrqq/T29o7JyZg3C0JwBC7DZDLx9ttvk5uby49//GPHWqfBIkkS8+bNIyMjgz/+8Y9YLBYhOm6KEByB05Flmb6+Pn7zm9/Q1dXFs88+i7+//7COqVQqefDBBzGZTGzcuHGELBWMNEJwBE5FlmU6Ojr4xS9+QVBQEM888wxeXl4jcmwPDw++9a1vkZ2dzZEjR0Qrxw0RgiNwGrIsU1tby09/+lNmz57NY489NuLhT/38/HjmmWd45513nJrDXHBjCMEROAVZljl9+jT/8z//w3333ceKFStGbN3O5cTFxfHkk0/y2muv0dnZOSplCIaGEBzBqGOz2Th69Civv/463/rWt5g3b96oziSWJImsrCxuvfVWXn/9dQwGw6iVJRgcQnAEo4rVauWTTz7hgw8+4Ic//CGTJ092yrwThULB6tWr8fHx4b333nME5BK4FiE4glHDZDLx3nvv8fnnn/OjH/2I2NhYp05yU6vVfPWrX+Xs2bPs3r1b+HPcACE4glGhr6+PP/zhD9TX1/P8888TGBjokhm1Xl5ePPPMM2zZsoXi4mIhOi5GCI5gRLEPe7/66qt4eHjw7W9/G71e79Lp+2FhYXzzm9/k97//PY2NjS6zQyAERzCCyLJMQ0MDP//5z0lNTeXLX/4ynp6eLl8rJEkSkydPZs2aNbzxxhsiZYsLEYIjGBFkWebs2bP8z//8DytWrOCuu+5yq2h3kiRx++23ExcXx1//+tcbTiUsGFmE4AgGjT12r90fYrPZyMnJ4bXXXuPLX/4yCxYsuGLIUFejUCh47LHH6OrqYuvWrdhsNsxmM3V1dWMuv9hYRQiOYNBYLBbefPNNTp06hcViYc+ePbz77rt85zvfYerUqW4brU+SJDw8PHj66afZt28fBw8e5Ne//jVf+cpXHClnBKOLCMAlGBT25G8vvfQS//znP3nssceoqKjgueeeIzQ01OX+mushSRL+/v6sXbuWr33ta5w9exZJkigsLOTWW291tXnjnmELjiRJSuAEUCfL8kpJkuKB94FAIAd4TJZlkyRJHsDfgelAG/CgLMuVwy1f4FwMBgOvvvoqzc3NtLS00NrayrZt28aE2MC/l1j8+Mc/pri4GJvNhiRJbN68mblz57plV3A8MRJt328DJRe9fxn4tSzLE4AO4KkLnz8FdFz4/NcXthOMIWRZZteuXWzfvt3xvry8nP/5n/+ht7fXxdbdOC0tLXR1dV0S53jHjh20tbW52LLxz7AER5KkKGAF8JcL7yVgEbDhwiZ/A+668HrNhfdc+P52aSw8EgUOurq6eOWVVxziolarSUxMJDw8fMyM+kiSxPz58/n4448d3UCA8vJyjh49KiYGjjLDbeG8BvwQsLv4A4FOWZYtF97XApEXXkcCNQAXvu+6sP0lSJL0NUmSTkiSdKKlpWWY5glGCpvNxgcffMCxY8fw8/Nj0aJF/PGPf2Tfvn387Gc/G3YALWciSRJhYWH89Kc/ZfPmzaxatQqADz/8cMwI51hlyD4cSZJWAs2yLOdIkrRwpAySZflPwJ/gfKrfkTruWGT79u3k5ua6xaiPwWBg/fr1REZGkpaWRmZmJo2Njaxbt27Yx7ZaraxYsWJQWRfOnTvH+++/PyJ1k56eTnNzM/v37+fFF18csYBgN0pQUBCPP/74iMcGckeG4zSeC6yWJGk54An4AL8B/CRJUl1oxUQBdRe2rwOigVpJklSAL+edx4KrcOzYMby9vYmJiXG1KZjNZp5//nk0Gs2IO4dPnjxJUVHRoASnsrKSuro6Fi5cOCI2ZGRkMDAwgCzL6HS6ETnmjTAwMMDu3bt5+OGHheBcC1mWnweeB7jQwvmBLMuPSJK0HriP8yNVjwNbLuzy0YX3Ry58v0cWHeZrolQqSUhIIDk52aV2XPwzjYbbbahd57CwMFJTU0fMDvt5OtO12NfXx+eff+608lzNaLTVnwW+J0nSWc77aN668PlbQOCFz78HPDcKZd8UyLJ8yd9Ibi/LMjt37rxkkeNQ8if19PQ4ZvPeqJ0jweXnOphzlySJsrIyjh07NmwbPv30U9ra2gb1O90MjMjEP1mW9wH7LrwuB2ZeYRsDcP9IlHczI8uyY+6LWq1m0aJFhIWFXXOfDz/8kOrqanx9fVm9ejWBgV/w1V9CY2MjEyZM+ELLZjA3jcViob6+noGBAbZu3UpNTQ3f//73R731YA+g3tbWRnJyMikpKXzyySeEhIRwyy23MGXKlGvu39PT4xCK4dDY2IjRaOTYsWOcOHGChIQElixZMmphVccKN/fZj1EKCwvx8/Pj9ttvR5Ik8vPzkSQJrVZLc3MzdXV11NfX4+3tTWpqKh0dHSxfvpyCggJKS0sJCwvj0KFDqFQq7rnnHjo6OtixYwd6vZ5ly5YhyzKdnZ1s3LiRqKgoNBoNmZmZVFRUUF9fj1KppKioiLi4OG677TaOHDlCbm4u3t7eLFmyhP3799Pe3s7AwACenp6sWLGC3/72tyOW1/tazJo1i7CwMHbs2MGjjz5Kbm4uKSkppKWlsX//fuLj49m5cyetra0sW7aMkJAQtm7dSk9PDwsWLADO+6t27NhBeHg4NTU1LFu2DIPBwJ49e5g0aRKHDx9GqVRyzz330N3dzT/+8Q+8vb2ZO3cu7e3tFBUVUVtby6JFi9i3bx+PP/447733HllZWYSEhIzq+bs7rh/+EAyazMxMjEYj//jHP6iurqa7u5vu7m76+/tpb2+nr6+PuLg4IiIiKC8vx2AwsGHDBoqKiggODkapVBIbG0tVVRUVFRXs27ePadOmsXLlSjw8PDAajbz99tskJiYSFRVFSUkJb7/9Njk5OXh7e6PT6YiJieHQoUMMDAzQ1dVFcnIyX/rSlzAYDBgMBlavXu0Wo2sAhw4dYsuWLURERAAQGhqKRqPh8OHDlJaWYrFYeOihhwgNDcVms7Fz507a29tJTk6msbGR7du3O85VrVYTGxtLTU0NFRUVGI1GlEolTz75JElJSRw9epS1a9cSEBCA0WhElmXOnTuHJEkYjUYX14TrcY8rQnDDSJKEQqHgjjvuYNasWRw7dgy1Wk1PTw/Nzc1YrVZUKhUeHh54enoCoNFouPfee1m4cCH5+fns2LEDq9WKLMuYzWZUKhUGgwGTyYTNZkOlUrFo0SJycnLw8/OjvLycnp4eSktL8fPzY8eOHQ5hkmUZpVJJUFAQCoUCpVKJxWLBaDQ64gjb/Tj2/85m7ty5PPHEE5SUlHDy5EnKy8vx8PBgYGAAlUqF0WjEbDZjNpuRJIlp06bR2dlJZ2cnHh4eVFZWcvLkSSIjI/nss88cdWcymQAIDg5GpVI5fhuj0YjJZMLDwwOFQkF0dDSyLDt+j5sZ0aUag1itVvbs2UN/fz8LFy7E39+fLVu24OHhQUJCAgaDAW9vb9RqtUOMdu/ejY+PD4sWLaK2tpaSkhISEhIcXbNdu3bR1NTE4sWLSUxMJCUlhdLSUhobG8nIyGDSpEnk5+cTFBRESkoKNTU1ZGRkoFKpCA8Px8fHB4CIiAgiIyM5fPgwKSkpWCwW9u3bB8Ann3zCypUrR71+dDodiYmJAPj7+1NbW8snn3zC7bffTkREBJWVlSgUChITE0lOTqauro6PPvqIhQsXEhAQQGZmJv7+/pw5c4aMjAx6enocLR6NRkNRURHx8fH4+/uj0+mIjY0FQKVScccdd/DJJ58QFRWFXq9n+fLl7Ny5kxkzZhAQEDDq5+7uSO7sPc/KypJPnDjhajOuiCzLvP7660yfPp25c+eOShkvvvgiaWlpLh8WH23279+Pr68vjz766A3vs3v3bj7//HPuueeeUbRs9Onr6+Ott97i17/+tdMnHNrp7u7m2Wef5fXXXx8xp7YkSTmyLGdd/rnoUgkEAqchBEcgEDgN4cNxY6xWK1VVVa42Y9Spr6/H19d30Ps1NzdTVlY2ChY5j4GBgZsqM6gQHDcmKyuLnJwcioqKnFbm0aNHSUxMJDg42GllqlQqJk2aNKh9YmJiCAoKGtW6sVqtHDhwgFtuuWXURphkWWb27NluFXB+NBGC48asWrXKETrBGciyzEsvvcTdd99NWlqa08odCklJSfzkJz8Z1TLMZjP9/f08/fTThIeHj2pZNwvChyO4BJvN5jYT9lyNQqFAq9XS19fnalPGDeLKElyC1WoVcX0vIARn5BGCI7gE0cL5N5IkodPphOCMIOLKElyCEJxL0el0DAwMuNqMcYO4sgSXYLVaheBchE6no7+/39VmjBvElSW4BJvNJnw4FyG6VCOLEBzBJYgu1aXYWzjuvOZwLCGuLMEliC7Vpej1etGlGkHElSW4BNGluhRvb296e3tFC2eEEIIjuATRpboUvV4vBGcEEVeWwIH9phKC8288PT0dkRAFw0dcWQIH9ptKCM6/USqVLguNOh4RV5bAgT0GsfDh/BshOCOLEByBA9HC+SIXC45IaDd8RHgKAV1dXZw8eRKTyURjYyPHjx9Hr9czefJkp+bZdicMBgPV1dW0tLTQ0NDA3r17USgUZGZm3vS5pYaDEBwBra2tPPXUU9TX12Oz2fj000+Jj49n+/btN63g9PT08MQTT1BWVkZPTw+bNm0iMDCQAwcOCMEZBqLtLCAmJoaMjAxHfqbe3l6mTZt2Uwed8vf3Z/bs2bS3t2MymTCbzaSmphITE+Nq08Y0QnAEqNVqVq9e7UjDq9FoePDBB29q57FKpeLBBx/E29sbOB+q4o477rhpQoGOFkJwBMD57JT2Fk1qaiozZsxwsUWuJz093ZFv3J47fLRzo493hOAIAIiPj2f69OlIksTKlSsJDAx0tUkuR6vV8vDDD+Ph4UFERARTpkwRgjNMhNPYzZBl2TEfxpnYhebgwYOsXLnSZTbcaDfOWfW0YMECJkyYwJw5c9BqtVgsllEv056jfDyKmxAcN6Ouro5XXnllxFKuDobm5mb8/Px49913Wb9+vVPLlmWZ1NRUnnrqqRvavqysjD//+c+jbNV5uzw8PKiqquJHP/qRU8pLTEzk61//+rj0oQnBcTPa2tro6+tj7dq1Tn/C2Ww2li9f7pJh3+bmZj7//PMb3r62thZJkli2bNkoWnWehQsXYrPZHA7k0aSrq4sdO3aM21X7QnDcEL1eT0hIyHUFx2KxYLPZ0Gg0I1Z2aGgoACaTCZVKNayL3mazYTKZ8PDwuO65DOUG8/b2HnVxNJvN+Pv7o1KphvwAkGUZo9GIRqO57ixutVrtktatsxBO4zGALMsMDAxgsViQZZm+vj5kWaa7u5u6urobOkZ/fz/t7e3XDbVgv6k2bdpEQ0PDsOzu6upi/fr1yLJMf38/nZ2dV/W7jNSSAYPBQH9/P1arlZ6eni+s8jaZTLS3t9PV1XVDPqDjx49z/PjxYbU2bTYbGzZsoLOzE5PJ5Ph/MzJ+pXScsWvXLqKiokhMTOSdd97hy1/+MrIsExYWBpy/Ye03mLe3N0ajEa1WC5zPX/3uu+9is9mwWCysXLmSqKgoenp6UKvVeHl5OfaXJAm9Xo/RaMRqtdLb24tSqUSpVKLRaLBYLA7HaX9/P15eXmg0GoxGI0ajEYvFgq+vL0ajkc7OTgYGBuju7uadd95BqVQyadIkFi5c+IUbeKS6jzt37qS0tJTVq1fzxhtv8OMf/xi1Wu04h7y8PPbu3UtISAgpKSnMmzePnp4eZFnGx8cHSZLo7+/HZDLh4+PjEPmBgQEkSUKWZTw9PZFlGYPBgFqtpqenBw8PD3Q6HVarlf7+fsxmM15eXkiSRE9PD729vVitVjZs2EBPTw86nY61a9fedPN6hOCMEZKSksjNzUWSJMLDw7FarXz00Uf4+/uzZs0aysrK2L9/P0qlktTUVEpLS7n77ruxWq18/PHHmEwmZsyYQXFxscNPVFJSQnNzMw899BDV1dUcOXKE8PBwFixYgCzLlJaW0traSmxsLBaLhWnTpnHmzBk6OztRKBSOFtB9993Hrl27OHXqFKmpqWRmZrJlyxY8PDxob2+npqYGLy8vFi5cyKZNm1i4cOGo1ZMkSZjNZo4ePYq3tzd79uyhoqICtVqNj48PSUlJjtzp9fX1NDc3c+TIERoaGli0aBEBAQFs2LCBkJAQsrKykGWZ5uZm1q9fT2ZmJoWFhaxevZru7m72799PamoqZWVltLa28vDDD9PU1MTbb79NZmYmM2fO5ODBg9hsNsrLyxkYGKCmpobHHnuMf/zjH/T29uLv7z9qdeGOiC7VGECSJOLi4mhra6OoqIjJkyej1+uZNWuWo2Vw7Ngx2tvb6evr4+zZs1itVrZs2cL27dvRarXIskx+fj5dXV2oVCpsNptjgWJDQwMnT55k8eLFrFy5Eh8fH/r7+9myZQupqakkJSVx5swZ3njjDXJzc4mKisJqtTIwMEB+fr6je7Bw4UJWrVpFX18fQUFBrFq1Cr1e7/i+paXFKcPKycnJ9Pb2Ovw7GRkZJCQkAOe7VGfOnKGiosLRUjGZTHR1dVFcXExJSQnJycncc889REdHY7Va2bVrF15eXiQmJtLd3c17773H3r17CQgIcLR0KioqaG5uxmazkZaWxv33309wcDAtLS08+OCDjjqTJImGhoabduW5EJwxgoeHByEhIZw+fZqoqCgGBgZoamqivb2dzs5OYmJimDhxIvPmzWP27NkEBATQ2dlJVVUVMTExqNVqJk+eTFxcHDU1Nezbt4+4uDh8fX2xWq2EhYVRVFREWVkZfX19aLVannjiCQ4dOoQkSVRXVxMZGUl1dTU6nY5jx44xadIkVCoVsiyjUCjw9PREkiR8fHxobW2ltLSU3t5eQkND6ezspLe31ynrs+Lj4/na177mmMuiUCgucUjHx8eTmZlJbW0tJ06cQKlUOgQhNDSUyspKzpw5Q0tLC0qlkjVr1tDR0UFdXZ3DoVtWVkZ0dDR79+4lMTERb29vh7/IXg8ajQalUklRURHNzc1otVpUKhVGoxG1Wo2np+eo14W7IbpUw0ClUjnliW1n/vz5TJgwAZ1OR3NzM/39/QQFBVFfX8/cuXPJz8+nr6+PpKQkfHx86OnpwWQyERUVhZeXF9XV1QQFBZGZmcmUKVMoKytj8eLFREVFkZSURF5eHr29vXh6ejJv3jzCw8MJDAzEZrNx//33OwQnNDSU5cuX09nZyb333ouHhwfp6emOleVhYWHcdttttLa2smrVKiIiIli6dCmNjY3cddddo1pHGRkZ6HQ6VCoVt99+O56enmg0GkeLzN7i6uzs5IEHHkCj0XDy5Em8vLwIDQ11tGoaGxuJjIxk4sSJyLLMrFmzaG9vZ/HixSgUCtrb24mPj+euu+7i3LlzLFu2jLCwMIePB85fH/fddx+lpaXcdddd+Pn5sXbtWoqLi7n//vsdPrabCSE4w8CZgmP33dhbCKGhoaxYseKSbbKyshyvvb29HUPccH5dUHp6uuO9l5fXF1Y+z5492/E6KSkJOD9Eby8PICAgAICpU6desm90dLTjtUKhYPLkyZd8n5qaSmpq6g2c6fC42I6rlRcVFXXJ+0WLFl3y/mLb7ecPXLLcw15OfHw88fHxl+zv5+cHnP/NIiMjiYyMdHwXERFBRETEjZzKuER0qYaBWq3GbDa72oxxwc3oz7gZEYIzDLRarUh0P0KMx3VDgi8iulTDwNfXl6amphE/bmNjI4WFhTfVTdjW1jbo1mJ9fT2FhYWjZJFr6OnpwWg0utqMUUMIzhCRJAkvL68RT3QfERHBzJkzaWxsHNHj3ghNTU0UFBSwePFip5cNDGpdVFJSEpMmTRr2bOgbobOzk8OHD7Ns2TKnPASWL18+bgPZC8EZBlqtdsTzTgcFBfGd73xnRI95oxQXF7Nx40a++93vuqT8wRAdHc1//ud/OqWsmpoaTCYT3/nOd8atEDgLITjDwNfXl+7ubmRZHrEnnyu7Ufayx0JXztk2jqW6cWeEXA8DX1/fKy4QFAgEV0YIzjDw9fXFYrHQ29vralMEgjGBEJxh4OHhgZeXF+3t7a42RSAYEwjBGSb2BXoCgeD6CMEZJhMnTqSkpMTVZggEYwIhOMNAkiRH6AYxNV8guD5CcIZJbGwsnZ2ddHZ2utoUgcDtEYIzTDw9PQkNDaWiosLVpggEbo8QnBFgzpw5HDlyRHSrBILrIARnmEiSREZGhiO6nUAguDpCcEaAwMBAwsLCOHXqlKtNEQjcGiE4I8SSJUv47LPPnBpyVCAYawjBGQEkSWLKlCl0dXVRXl7uanMEArdFrBYfIdRqNcuXL+ejjz7iu9/97pjKC3327FkKCgqora2lpKSEDz/8EG9vb+bPn39TZhawc+7cOQoKCmhtbaWsrIzNmzfj7e3NwoULRzS98s2EEJwRQpIk5s6dy9atWzl37hzJycmuNumGqamp4cknn8RoNGKz2di8eTPLli3j1ltvdbVpLqW2tpannnqK/v5+bDYb27dvZ8WKFdx2222uNm3MIrpUI4inpyf33HMPH3zwwZgKrp6enk5iYiJGoxGz2YzJZGLVqlU3/VN8ypQpJCQkOOrFYrGwfPlyR24qweARgjOCSJLELbfcQl9fHydOnBgz83ICAwO58847He8jIyO59dZbb/pgUz4+PixdutQR5c+eb0swdITgjDAqlYonn3ySf/3rX/T09LjanBtCkiRWrVqFl5cXALfccguxsbEutsr1KJVKVq1a5UjwN2/ePKKjo296IR4OQnBGGEmSmDBhAunp6WzcuHHMRANMT09nypQpKJVK7rvvPtFtuMCkSZPIzMzEw8OD1atXj6nBAHdECM4oIEkSDzzwALm5uWMmdIVer2flypUkJCRckoHzZsfb25tly5YRFRXFggULROtmmIjH2CggSRJ6vZ4vf/nLvPXWW/zkJz/Bx8dnUMcoKyvjZz/7Gd7e3qNk5RdpbGxEqVTyi1/8wmk3ltVqJTY2lhdeeGFI+x8/fpw333xzVIfvGxoa0Gg0vPjii6OWtcFmsxEaGsqPfvSjce2sF4IzStgnA6anp/PPf/6Tr3zlK4Nqjnd2duLj48P9998/ilZeitlspr+/H19fX6eV2dbWxs6dO4e8f3NzM+Hh4aOaS8tsNtPX1+fIGT4a9PT0sGXLljHTBR8qQnBGEXvX6v/9v//HiRMnmDlz5qBaDl5eXgQGBg5qn4tHxgbbShnOvkM9jizLw/aL6PV6goKChnWMy7GfgyRJl7weznGuhUajuSn8ZsKHM4pIkoROp+M//uM/WLduHY2NjcMeKpdl+Qt/F5Ofn88vfvELR0Cwa217JXbu3El9ff2wbOzr6+Pjjz/GZrMNquyR5Er1NBhbioqKyMnJAc7/jkMVm08++YTOzk6X1YO7Mf4l1cVIkkR8fDxr1qzhD3/4A88///yQ/Q2yLPPhhx/i6elJfHw8O3fu5Bvf+MYlff4pU6Zw8uRJTCYTAJs3b6aiogIfHx9Wr15NcHDwNctobm4mMTHxC62UwdwoFouFhoYGjEYjW7dupaWlhfnz55Oenu4039CRI0c4duwY7e3tpKSkkJiYyM6dOwkODmbu3LmkpaVdc/+enh76+vqGJRCyLNPY2IjJZOLgwYMUFBSQmJjIkiVLbtrRLiE4TkCSJO644w5KS0v58MMPWbt27ZCdjz09PVRWVtLb20tLSwu5ubloNBp8fHxoaGhg7ty5l2zf2dnJ0qVLOX36NKdPn6anp4dDhw4hSRL33nsvXV1dfPLJJ+j1epYuXQpAR0cH69evJzY2FoVCQVZWFhUVFdTW1qJWqykqKiIyMpIlS5Zw7NgxsrOz8fPzY8mSJezfv5/29nZ6e3upr6+npaWFlStXsnHjRtLT04ddlzfKnDlzCAkJYf/+/Tz88MMcO3aMtLQ0EhISOHbsGFFRUezatYu2tjZWrFhBYGAgH330EUajkQULFgBgNBr5+OOPiY2Npbq6muXLl9Pb28u+fftISUnhyJEjqFQq7r33XlpbW3n//ffx8/Nj3rx5NDQ0UFZWRk1NDWazmc8//5wnn3ySd999l5kzZxIYGOi0unAnRJfKSahUKh5//HGys7MpKCgY8pNToVA4UtMEBATQ29tLT08P/f39V4yrbDKZ2LRpE4WFhQQHB6NSqYiOjqa2tpby8nIOHDjAtGnTWL16NVqtFqPRyLp160hMTCQ6OprTp0/z17/+lZycHLy9vdHpdERFRXH06FH6+vro7u5m4sSJfOlLX3I4ne+66y5UKhW9vb2o1WoqKysxGAwu704cPHiQ7du3Ex4ejiRJhISEoFAoyM7Opri4GLVazdq1awkJCcFqtbJ9+3YMBgMTJkygoaGBLVu2cPjwYSwWC2q1mujoaCoqKqiursZoNKLT6XjyySdJSEggJyeHhx56CH9/f4xGI5IkcfbsWWRZHlPLXkYaIThOxNfXl29+85u89dZbtLS0DPkGnDVrFg899BAKhQKNRkN3dzfNzc1YLBYkScLDw8ORglij0XDXXXexaNEiCgoK+PTTT4HzwmU2m9FoNPT19WEwGLBarahUKpYsWeIQmMrKSvr7+zl9+jQBAQGO1pDJZHI4fIOCglAoFCiVSsxmMwMDA1gsFvz8/BgYGCAsLAy9Xu/yOSzz5s3j0Ucfpbi4mOPHj1NfX49Wq2VgYACNRoPBYMBoNGIymVAoFMyaNYuWlhZ6enpQKpXU1dVx8uRJoqKi2LFjByqVCkmSHAISFBSEUql0+HwGBgYwGo14enoiSRJRUVHA+QSKNyuiS+VE7Gll7rzzTv7whz/w7LPPDvriS0hIICgoCH9/fyZNmkRqaiqffvopBoOBhIQEx6r1AwcOOJ7CBw4cwMfHh9tuu42GhgZOnTpFQkICAQEBxMfHs3v3btrb27njjjtISkoiMTGR8vJyGhsbycrKIikpydFCSk9Pp6qqimnTpqFWqwkPD3dM/Q8LCyMxMZGjR4+SlpZGREQEqampHDlyhFWrVo1GlV4Tb29vJkyYAJwXg6NHj9Ld3c2SJUsIDAxkz549aLVaoqOjSUlJoampie3bt7Nw4UJCQkLw8fFBp9NRXl7OtGnTMJvNNDU1OY5ZWlpKcnIyfn5+qNVq4uLigPOhSu644w527txJYmIier2eZcuWsWfPHubPn+/UaQfuhuTqZu61yMrKkk+cOOFqM0Yck8nE7373OyIiIrj//vuv6M/Jzs5mw4YNPPbYYy5vGYwmLS0tbNiwgd/97ndD2n/btm0UFBS4RNBGku7ubv7+97/z2muvOT0GUXd3N88++yyvv/76iA3NS5KUI8ty1uWfiy6VC9BoNDzxxBMcOXJkWP4cgWCsIQTHRfj5+Y2IP0cgGEsIH46LuNif8+abb/Lcc89d4s+RJImKigr279/vVLtkWXZqF667uxuj0Tjk/RUKBWfOnGHfvn0jZ9QVsNlso7aOCmBgYICBgYFRO767IATHhSgUCpYsWcLZs2dZv349Dz30kGNCWFxcHKtWrXLqBLHGxkby8vIc83GcgZ+fHzNnzhzy/mlpabS1tY2qSHZ0dHD48GFWrFgxaqLj5+dHeno6arV6VI7vLgjBcTEajYYnn3ySn/3sZ8THxzNnzhwkSSI4OJgvfelLTrWluLiYvr4+HnnkEaeWOxyio6N59NFHR7WM6upq6urqePjhh2/aGcIjhfDhuAE+Pj48/fTTvPvuu1RUVAh/jmDcIgTHDZAkidjYWJ544glee+01mpubhegIxiVCcNwESZKYMWMGy5cv59VXX6Wnp0eIjmDcIQTHjVAoFCxevJiMjAxee+01+vv7hegIxhVCcNwMpVLJ/fffT2RkJL/5zW/cYtGjQDBSCMFxQ9RqNY899hgBAQG88cYbw5qnIhC4E0Jw3BT7cLlOp+ONN964KSaFCcY/wxIcSZL8JEnaIEnSaUmSSiRJukWSpABJknZKknTmwn//C9tKkiT9VpKks5IkFUiSNG1kTmH84uHhwVe/+lW8vb359a9/TV9fn6tNEgiGxXBbOL8BdsiynAJkACXAc8BuWZaTgN0X3gMsA5Iu/H0NeHOYZd8UaDQavvzlLxMaGsovf/lLurq6XG2SQDBkhiw4kiT5ArcCbwHIsmySZbkTWAP87cJmfwPuuvB6DfB3+TxHAT9JksKHWv7NhFqt5vHHHyclJYWf//znNDc3u9okgWBIDKeFEw+0AG9LkpQrSdJfJEnyAkJlWW64sE0jEHrhdSRQc9H+tRc+uwRJkr4mSdIJSZJOtLS0DMO88YVKpeKBBx5g4cKFvPTSS1RVVYnRK8GYYziCowKmAW/KspwJ9PHv7hMA8vk7YlB3hSzLf5JlOUuW5azrZRi42VAoFCxbtoy1a9fy8ssvk5eXJ0RHMKYYjuDUArWyLB+78H4D5wWoyd5VuvDf3v6vA6Iv2j/qwmeCQSBJErfccgv/+Z//yZ///Gf27NmD1Wp1tVkCwQ0xZMGRZbkRqJEkaeKFj24HioGPgMcvfPY4sOXC64+AL10YrZoNdF3U9RIMAkmSmDhxIv/1X//FZ599xrvvvovRaBxya+fyJG0iadt5Lq+Tyz8TDJ7hhqd4BviHJEkaoBx4kvMi9oEkSU8BVcADF7bdDiwHzgL9F7YVDBFJkggLC+O//uu/+POf/8z//u//8swzz+Dv7+9IXGcPpnW9WDGnT5/m6NGj1NfXk5uby7p16/D39+fOO+9Eq9U66Yzcj9LSUo4dO0ZrayuFhYWsW7cOPz8/Vq1adUnyQcEguF5KVFf+TZ8+XRZcH5PJJG/cuFF+5pln5NLSUtlms8mnT5+WX3vtNdloNF53/3379slardbub5MB+e67776hfcczBw8elL28vC6pl3vuuUc2mUyuNm1E6erqkr/xjW/IZrN5xI4JnJCvcE+LAFzjALVazV133UVsbCy/+c1vWLx4MX/729/Yt28f8fHxrFq16pqtnClTppCWlsbx48eB887pe++996Z/iqelpZGens7Ro0eBf9fzSGU2uBkRSxvGCQqFgmnTpvH888/zpz/9ia1bt9LZ2cmPf/xjKisrr+l38PPzY9myZY73sbGxX0gZfDPi4+PDsmXLHGFFw8PDWbhw4bhO2zPaCMEZZ5w5c4acnBwsFgsARUVFvPjiixgMhqvuI0kSK1euxMfHB4C5c+cSHR191e1vFhQKBStXrkSv1wNw2223ER4u5qoOByE44wiLxcKRI0fQarWODBA2m43169fzz3/+E5vNdtV9U1NTycjIQKVScd9994nYvRdISkpixowZeHp6Oj2o/XhEdEbdFJvNNqSwFN/61rd44IEHyMnJYefOnXz++eecO3eOl156iczMTFJSUq64n0KhYOnSpTQ3N5Oenu7U1ekKhWLI+batVismk2mELfo3SqWSRYsWUVlZSVZW1jVbisPFnit+PHfZhOC4KaWlpfz3f/83AQEBQz6GJElMmTKFqKgoWlpa+P73v09SUtJVL+jW1la0Wi0vv/yy0y56i8VCREQEL7744pD2z87O5ne/+52j2zMatLW14e3tzS9+8YtRqxer1UpAQAA/+9nPxrWzXgiOm9LT00N4eDhr164dsWNeL5mbxWLBYDCM6s17Oa2trWzfvn3I+7e3t5OYmDiqubScUS89PT1s2LDhmt3e8YAQHDfGw8MDvV5/Q09Vs9mMzWZzdE3MZjOyLA/6aWkwGFCr1cPyVdi7g56ente13WAwDDu5nKenJ97e3sM6xuUMtf4uRpZlDAYDHh4e1z1HWZZHNbOnuzD+z3CcIcsyRqOR3t7eSz4vKSnh0KFDjvc5OTmcOHGCvr4+Wlpa6Orquu7TU5ZlPvroI6qrq4dlY09PD//617+wWq309/fT3t7uGDVzFr29vbS0tNDS0kJ7ezsGg8Hx+kZsyc3N5fDhw8OyQZZlPvzwQ9ra2jCZTA47bmZEC2cMsn37dk6dOsVzzz2HUqmks7OTrq4uzGYzVquVzs5Ouru78fT05IMPPsBoNGKxWFiyZAkJCQl0dnai0Wjw9vZGlmW6u7uRZRlfX19MJhM2m42enh6USiVKpRIPDw8sFgsmkwmFQkFPTw96vR5PT0+MRiMDAwOYzWb8/f0xGo2OG6u3t5d33nkHjUZDbGwsd955p9N8Q+fOnePYsWOcO3eOhQsXotPp2L9/PxEREcTFxbFo0SK6u7ux2Wz4+fkhSRJ9fX0YDAb8/PywWq1YLBb6+/uB8+Kh0+mQZZn+/n48PDzo6upytEKtViu9vb2YTCb0ej1KpZLu7m76+vqwWq1s3LiR3t5elEoljzzyyJCd5GMdIThjDIvFQnV1NTqdzvHEPnDgAD09PUyaNImTJ09y8uRJWlpamDt3LrIsk5qaSkVFBR0dHZw9e5bi4mIaGhp48MEHaWho4PPPPyciIoL58+cD5x3WR44cYcKECfT09DBr1iwqKipoampCq9XS0NDAwMAADz30EPv37ycnJ4f09HQyMzPZvHkzXl5etLa20tDQgIeHB4sXL2b9+vUsWbLEaYKTkZGBl5cXSqWSpUuXcuzYMWJiYoiJiaGuro62tjaOHDlCfX09c+bMISIigg8++ICwsDAyMjKQZZmWlhbWr19PVlYWOTk53HXXXfT19bF7924yMjI4c+YMDQ0NPPzww3R0dPDHP/6RGTNmMH36dD7//HNkWebMmTMYjUaqqqp47LHHeP/99+nu7uZmDb0iBGcMIcsyVVVVDAwMEBERQWFhIR0dHSxatIj29nY6OjooLCxk2bJllJaWolAosFgslJSUYDabkSQJi8VCX18ftbW11NbWUlhYyKJFi5gwYQKSJDEwMMDWrVt54oknmDBhAhs2bODEiROEhIQwZ84cGhoa6O3t5fTp046h81tvvZW5c+dSXl6Or68vy5Yt47333sNsNgPnHcPuEEKjqqoKAJ1O5+judXZ2UlRURG9vL/Hx8dx5550ANDQ0sH//fhYsWEBCQgKHDh3iX//6F1qtFj8/P2w2G729vVRXV9PQ0IBGoyE1NZV7770Xg8FAY2MjzzzzDF1dXY5zt2dUHe+O4WshfDhjjPz8fHx9ffHw8KCwsJDg4GCKi4spLS3FarUSHh7OqVOnOHPmDFarFaVSSXJyMrGxsdTX13Pw4EGio6MJCAjAZrM5hKukpISenh48PT154oknyM7Oxmw209jYSFRUFFVVVfj4+HD06FEmTZqEWq0G/j2HRpIkfHx86OjooKioiJ6eHoKDg+nu7qa7u5vw8HCXzy+JjIwkLS2NhoYG8vPzgfPLOGw2G2FhYVRVVXH69GkaGhpQKBSsWLECo9FIeXm5Q6TOnTtHTEwMBw4cID4+3iE+cN55DefjUHt4eJCfn09DQwNarRatVktvby8KhQIvLy+X1YGrES2cMUZaWhqRkZHodDqKi4uJj4/n1KlTREZGEhUVRUBAALm5ucyYMYOYmBgiIiKorKwkMDCQjIwMurq6KC0t5Y477iA2Npa0tDQKCwsZGBhAq9Uyb948goKCCAsLQ5ZlHnjgAUJCQqirqyMkJIQ1a9bQ1tbGAw88gFarJS0tzTGSExgY6Jg8ePfddxMaGsqaNWuor6/n7rvvdnpdhYaGsmDBAgASExPp6Oigp6eHtWvXotPpyM3NRafTERQURExMDIsWLaK5uZno6GiSkpKIj49Hq9XS0tLCokWLsNlsdHd3k5CQgL+/P2fPnmXp0qVERUWhVCqZNWsWcF6EH3jgAUpLS7nvvvvw9/fnoYceoqioiPvvv9/tBMdmszltBrUQnDGEPfCWnbS0NADHhW5nzpw5jtehoaGXzC728vIiIiLiku2nT5/ueB0fHw/gGGYOCwsDcExAnDx58iX7Xry2SKFQkJycTHJysuOzpKQkkpKSbvQURxRvb2/HeQQHB1+yQBXOdwUv5mLbLx5m9/Pz+8Kxo6Ojv7De7OJ9QkNDCQ0Ndbz39PQkJCRkaCcyylitVqcNyYsulUBwk2M2m1Gr1U7p8grBEQhucoxGo9OWU4gulRtTUFDAxo0bXW3GqNLb2/uFSYyDJScnZ8znXzcYDC5LctjX14dWq3VKC0cIjpsyceJEfvCDH7ik7L/85S+sWLHCabFfgoKChrzvrFmz8PT0HNXA5i0tLXz00Uc8+eSTo+rr8Pf3d4z+OZOamhqioqKE4NzM+Pr6cscddzi9XFmWKS4uJjY29gtOVXckKCiI22+/fVTLqK6upqCggNtvv31cxsOpq6sjMjJS+HAErmHKlCkUFBSIdCg3ATabjerqaqdFeBSCI7gESZJISEigsrLypp4Re7PQ399Pa2srMTExTilPCI7gC4SHh2MymWhra3O1KYJRpqOjAw8PD8cs6dFGCI7gC6hUKkJCQqivr3e1KYJR5vTp0yQlJTnNNyUER3BF0tLSOHXqlKvNEIwisiyTl5fH1KlTnbbOTQiO4AtIkkRycjJlZWXCcTyO6enpoaqqitTUVKeVKQRHcEXi4uIcix0F45Pq6moCAwOduphUCI7gimi1Wry9vWlqanK1KYJRQJZlDh48yNy5c50aS1kIjuCqJCcnU1pa6mozBKNAT08PxcXFzJgxw6lxioTgCK5KamoqxcXFwo8zDjl16hQRERGO9M7OQgiO4IpIkkRiYiJ1dXU3faaB8YbFYmHnzp3ceeedTk9NIwRHcFX8/f1RKpViAuA4o7a2ls7OTlJTU50e9lUIjuCqSJJETEyMI/i4YOxjs9nYtm0bd9xxh0tS1QjBEVyT9PR0CgoKXG2GYIRoaWmhuLiYW2+91SVB7UV4CsFVkSSJpKQkPv30UywWCyrVzXW5nDhxgm3bttHZ2UlOTg4/+clPCA0N5atf/eqYTGRns9n4+OOPWbBggVPzx1/MzXUFCQZNeHg4BoOBjo6OmzJ5269//WtHZtJDhw7xyCOP8I1vfMPVZg2J5uZmcnNz+fnPf+6ylD2iSyW4JkqlkuDgYBobG11titOZOHEiU6dOdUwL0Gg0rF69ekwG4bKnG77jjjvw9fV1mR1CcATXRJIkR+6qmw29Xs+KFSscAhMVFcX8+fNdntBvKFRVVVFaWsrixYtdar8QHME1sefCKisru+kCckmSxPLlyx2T4+64444x2a00m82888473HfffS5PwicER3Bd4uLiaGtruykXcsbFxXHLLbeg0+lYtWqV0yfKDRdZljl27Bg2m43Zs2e7vHUmnMaC6+Lh4YGXlxdtbW0u7f/D+ad1T0+P05ZbyLLM/PnzOXv2LBMnTqS9vd0p5cJ5n5Ferx+WSHR1dfH+++/z3e9+1yUZIS5HCI7guthT+J4+fZqEhASX2nLq1Cl+9rOfOVX4uru78ff356WXXnJaC8FsNhMVFcXPf/7zITupbTYb77//PrNnzyY+Pt7lrRsQgiO4QVJTUzl06BDLli1z6YXb19dHamoqDz74oNPKtFqtGAwGp/o/Wltb2bRp05BbcrIsU1BQQFlZGS+++KLbdAXdwwqBW2OfAFhbW+s2CzklSRrUX1dXF0ePHqWnp2fQ+6pUKoxGI11dXYPe9+I/OJ8DymKx3NC2Q0WWZbq7u3nrrbf4yle+4rJJfldCCI7ghvD19UWSJDo7O11tyhVpamqivLwcg8FAYWEhZrP5ku+VSiUFBQWcO3cOOD/F/9ChQ+Tm5t6QiObm5lJcXDwsG61WK9u3b6evr4/W1lays7Npb28fcX+U1Wrlr3/9K7fccgspKSkjeuzhIgRHcEMoFApiYmKorKx0tSlX5MSJE/zlL3+hrKyM3/zmNzQ1NVFXV8fAwADV1dXo9fpLci/l5eVx5swZKioq2LlzJyaTiYKCAgoKCrBYLNhsNs6dO0dOTg4DAwOO/aqrq2loaKCurg5ZljEajVRVVdHV1cWxY8eoqKhwtDAKCwv5/PPPaWlpoampiePHjzMwMIDZbObdd9+lvb2d9957D5PJNGL1IMsyu3fvpqOjg7vvvtttulJ23MsagdsiSRKTJ0/m1KlTbhmQS6VSERwczK5du4iPj6ehoYFDhw7R3t7Onj17rrhPV1cXLS0taDQa2traGBgYID8/n/z8fMrKyti9ezdwPneTLMvk5+dz4MABDAYDu3btoqKigrNnz3L06FFqampQKpVs3bqV1tZWqqqq2LBhA97e3igUCt5//31MJhONjY309vY6nMK9vb0YjcYRqQNZlikvL2fLli1885vfRKvVjshxRxIhOIIbQpIkUlJSOHv2LBaLxdXmXJGJEyeSlJTk6P7JsozNZnNMWFQoFNhsNodgBgYGkpSURHNzM83NzeTl5XHu3Dlqa2upqalh0qRJTJs2jfDwcKxWK5WVlSgUCsLDw+np6WHLli0cOHCAqKgoKioqOHnyJOXl5Y75SpmZmUyZMsUxHD1r1izCwsIcrY7hDnlfjCzL9PT08Prrr/PEE08QFhY2IscdaYTgCG6Y0NBQBgYG6O7udrUpX0ChUBASEsKqVatQqVT4+vpSV1fHvn37HD6ayMhIjh8/TkVFBQqFgubmZpqamlCr1TQ0NKBWqwkMDEShUDBx4kQKCgrYvn07dXV1aDQaVq1ahVKppLCwEC8vL8LDw2lqaiI6OpqqqioCAwPx9vYGcDibAXQ6HTqdji1bttDc3IyXlxe+vr4cOHCAkJCQYbdEZFnGYrHwxz/+kaysLKZPn+4WQ+BXQnLH5rGdrKws+cSJE642Q3ABq9XKSy+9xH333cfkyZNdYsOhQ4fYsWMHa9euveTzvr4+lEolnp6edHZ2otfr6ezsxGKxoNVq8fX1xWq10tzcjE6nQ6VS0drailKpJCgoCEmSaG1tRa1Wo9Pp8PLyoqOjA4PBQEhICCaTyTGCZDQaUSgUKJVKDAYDvr6+9Pf309XVhaenJz4+PsiyjNlsRq/XI8sy/f39dHZ24uHhgb+/v2MFfkBAADqd7gvn2dLSwqZNm3j11VevGxbEZrPxz3/+k7q6Or797W+7RegMSZJyZFnOuvxzMQ9HcMMoFAomT55MYWGhS8JTXouL58j4+fkBEBQUdMk2SqWS8PDwK+4D51tAFxMQEOB4ffFNf3EebrtYeHt7O1o3duw3viRJeHl5XVLe5e+HiizL7N27l4KCAl544QW3EJtrIbpUghvG7scpLS296RZyuiOyLFNUVMSmTZv49re/7fJlJzeCEBzBoIiLi6O1tZW+vj5Xm3JTI8syVVVV/O53v+M//uM/Lmm5uTOiSyUYFJ6enuh0Ojo6Opye0wjOd4uKior45z//6bQybTYbZrPZqd2Vvr4+h9/ocmRZpr6+nldeeYUnn3ySlJQUt+reXgshOIJBoVQqSU5OpqSkhNjYWKeXn5aWxnPPPefULl1TUxMffvghX//61506kS4gIOCK5bW1tfHKK69w3333kZWVNWbEBoTgCAaJJEmkpqZy5MgR7rzzTqdf7N7e3syaNcupZVZXV3Ps2DFmz57t8vCi7e3t/OpXv+L222/n1ltvdbuZxNdjbFkrcAuSk5Opqalxm4WcNwsdHR288sorTJ8+nWXLlo05sQEhOIIhYF99fDNGAHQVdrHJzMzk7rvvdnlLa6gIwREMGrVaTUxMDOXl5a425aagvb3dITb33HPPmGzZ2Bm7lgtchiRJTJo0yW0Xco4nmpubefnll5k2bZpDbMaSk/hyhOAIBo3dcXzmzJkvxJ0RjAyyLFNXV8f//d//ceutt3L33XePSHAuVyNGqQRDIigoiP7+fvr7+9FoNK42Z1whyzIVFRW89tpr3HvvvcyfP39Md6MuZnychcDpeHh4EBQURENDg6tNGVfYlyv88pe/5NFHHx2TQ9/XYvycicCpKBQKhx9HMDLYbDY+//xz3njjDZ5++mlmzJgx5rtQlyMERzAk7H6c06dPY7VaXW3OmMdqtbJt2zY2btzIj370I7dbjT9SCB+OYMjExMTQ3Nzs9BQq442BgQHeeecd6urq+PGPf0xAQMC4FBsQLRzBMPD09ESr1dLR0eFqU8YksizT1tbGL3/5S4xGIz/84Q/HtdiAaOEIhoFarWbChAmUlJQQGRk5rm+UkUaWZSorK/ntb3/L3LlzWbNmjVuk4h1tRAtHMGTsfpzh5mu6mbAHdv/88895+eWXeeihh7jnnntuCrEB0cIRDJPk5GQ2btyI0Wi8JPTmeECW5S/82Wy2IU/Ak2UZk8nE+++/z6lTp3j++eeJiYm5qVqGQnAEw8Lb2xubzUZfX9+4E5xjx46xceNGuru7yc/P54c//CGhoaF85zvfuW4wLlmWsVqtKJVKR8qalpYW3nzzTfz8/Piv//ovlwQwczVCcATDwsPDg+joaM6dO0dgYKCrzRlRPD09+fOf/+xIi5Odnc2Xv/zl62ZRkGWZvr4+fvGLX/DUU08RHx9PXl4eb731FitWrGDx4sU3TRfqcoTgCIaFfQJgUVHRuJuoNmHCBGbNmsVnn30GgFarZeXKlded+SvLMn/4wx949dVXqampYeHChRQVFfHtb3+bpKSkcVVHg0UIjmDYTJ48mUOHDmGxWMbVk9vLy4vly5ezZ88eLBYL0dHRzJ0795qCIcsyBw4c4JVXXsFoNPLBBx9gNpv5/e9/f0namZsVMUolGDaBgYEjmiPbXZAkiaVLlzrSryxduvSaomEPbv7ss8/S1NQEgMlkIjs7m9raWhHKAyE4ghFAq9USEBBAXV2dq00ZcWJiYpg/fz56vZ7ly5dfsztlMBh48cUXsWeLVSqV6HQ69Ho9ubm5QnAQXSrBCKBUKklJSaGoqIjk5ORR9VFYLBZ6e3udevPeeuutnDlzhokTJ9LZ2XnV7davX8+7776LTqdj5syZzJw5k/nz5zNx4kR8fHzo6uoaVLkajQadTjeufD4it7hgRCgsLGTLli08//zzoxpvNzs7m//3//4fYWFho1bG5fT19dHU1ERCQsJVt7FarXR1dWGxWFAoFOj1+mFNEzAajQQEBPDb3/52TMYvFrnFBaNKZGQkzc3NmM3mUb1BTCYTmZmZ3H///aNWxuXYbDYsFss1A43ZH9wj1RppbW3l448/HnfdMCE4ghFBr9fj4eFBW1sbkZGRo1qWUqm84SiDVqsVo9GITqcDznfJzGYzWq12UGXaHeLDiW4oyzIDAwN4enped2hdrVaPq66UHeE0FowI9oWcp0+fdpkNZrOZjo6OS1oFDQ0NfPzxx473VVVVfPrppxgMBhobG2lra7uheD5HjhwhNzd3WPZZrVY++OADenp6MBgMtLS0jLuRveshWjiCEcG+kDMvL4/bbrvNJWExT5w4wXvvvcf//u//otPp6OzspL29HZPJhCzLdHR00NHRgdls5vDhwxw/fpyAgABSUlKYO3euI8yGv78/kiTR3d2NyWTC398fi8WC1Wqlt7fX0fLQ6XTYbDb6+/vx8PCgo6PDMSplNpsdUwX8/PyA87ml+vv7sVgsbNy4EVmW0Wg0rF27dlzNX7oWQnAEI0ZSUhKbNm26rr9jNLDZbJw6dYpJkyZx5swZgoOD2bBhg8OOiooKtm3bhizL+Pr64u/vT2xsLF5eXrS2ttLQ0EBOTg51dXUsWrQILy8vtmzZQlRUFNOmTQOgrq6OsrIysrKyKCwsZPny5XR2dnLkyBEmTpxIVVUV9fX1PProo1RXV/Pee+8xc+ZMpk+fzqeffopWq6Wqqore3l7a2tpYu3Yt7733HgaD4aYRHNGlEowYPj4+WK1W+vr6nF52e3s7NTU1xMfHc+LECSoqKkhNTWXJkiV4eno6hOL2229Ho9EgyzLV1dW0traiVCqxWq309PTQ0tJCWVkZJSUlTJkyhVWrVhEZGYnFYmH37t34+/uTkJBAZ2cn77zzDvv37yc0NBSTyURnZyfnzp2jra0NWZbJzMxkzZo1+Pr60tvbyz333ENERISjC2ffbrw5hq+FEBzBiKHVaomKiuLs2bNOL/vs2bOOrkxNTQ0+Pj5UVlZy6tQpBgYGCA8P58yZM5w6dQqTyQScH1mbMGGCo3Wj0+mIiorCZrMRGRlJaWkpRUVFtLS0oFKpuO+++2hubqaurg5JktDpdJw9e5aoqCgOHjxIUlKSY/U8nF/YKkkSnp6eSJJEXl4ezc3N6PV6lEolnZ2d6HS66648H0+ILpVgxJAkiZSUFEpKSsjKynLqKEtkZOT/396dB0d533kef/9a3a1udat1IXSABLLMJW4iE/CJHcfxySQZ43MysWOPp6Yys9l1arLxXskmszXjytZmZz3xOJcNU+VJYsAXhiQDWOIyh4UAI5AAISGEkFq31FK3Wn389g+1eiTAHDr66RbfV5VKz/Pr53n6+7Skj57n91y88MILZGZmMm/evOgdCL1eL0uWLKGgoCB6P5rc3FysVivHjx8HYN26dSQlJXH8+HGKioqYPn06OTk5mEwmuru7cblcLF68mKSkJJYvX47H4+GRRx7BYrHQ2dlJYWEh69ato6Ghga9//evk5OQQDAajl0FYLBbWrVtHfX09Tz31FOnp6Tz99NOcOXOGJ5988qZ6rpcEjphQCxcu5Fe/+hWhUOiat3GYSAUFBdHhefPmAbB48eJR0yxbtmzU+KUnD955552jxufPnx8dHnkYfeRtOIaXMWvWLGbNmjVq/uHOYqUU+fn55OfnR1/Ly8sjLy/vqus0FckulZhQWVlZ0aM7QlxKAkdMKKfTSWZmJo2NjUaXIuLQuLZ5lVL/CXgR0MBx4HkgD/gtkAUcBr6htR5USiUD/wJ8AegAntRanxvP+4v4YzKZmDNnDtXV1ZNyIafFYuH48eN4PJ4JXe6VBAKB6HvGms/nw2q1TrmzjcccOEqpGcB/AEq01j6l1DvAU8DDwE+11r9VSr0BvAD8c+R7l9b6VqXUU8CrwJPjXgMRV5RSLFmyhI8++oi1a9dO+B/M4sWL+Yd/+IdJP5QcDodZv349paWlLFmyZFLf6/OkpqZOqeeKw/g7jc2AXSkVAFKAZuA+4JnI6xuAHzIUOH8SGQbYBPyTUkrpm+kkhJtEfn4+brd7Uk4ATElJYcGCBRO6zCvp7u7G5/Px0EMPRW/AJcZvzPGptW4C/jdwnqGg6WFoF6pbax2MTHYBGL6SbwbQGJk3GJn+srtuK6VeUkpVKKUq2traxlqeMFBaWhpWq5X29najSxkTrTWVlZXMnTuX1NRUo8uZUsYcOEqpDIa2WoqAfMABPDjegrTWv9Bal2qtS7Ozs8e7OGEAi8VCUVERp06dSsizaIPBIGVlZdx///1TbpfGaOP5NO8H6rXWbVrrAPAucAeQrpQa3lWbCQzfd7IJKACIvJ7GUOexmGKUUixatIgTJ04YXcqYtLa24vF4KC4uNrqUKWc8gXMeWKWUSlFDPYNfAk4CZcDjkWm+CXwQGf4wMk7k9Y+l/2bqKi4u5ty5c9HT/BOF1pry8nJWrVp1U11yECvj6cM5yFDnbyVDh8RNwC+A/wy8rJSqZaiP5teRWX4NZEXaXwa+P466RZzLyMhgcHCQvr4+o0u5IQMDAxw6dIh77rlnyh2SjgfjOkqltf4B8INLmuuAlVeYdgCI3X0hhaFSUlLIz8+ntraWFStWJMQfr9aa06dPk5GRgfQfTg7pEROTYuSFnIkiHA6zY8cO7rvvvoS8cXkikMARk6akpISampqEOVLl8Xior69n6dKlCbFFlogkcMSkyc7OpqenJyEu5NRac+DAARYsWCDn3kwiCRwxadLS0khPT6ehocHoUq4pGAyya9cuOfdmksknKyaNyWTi1ltv5fTp03G/W3Xx4kUCgcBl97QRE0sCR0yqxYsXU1VVZXQZV6W1pqysjNtvv13OvZlkEjhiUuXn59Pc3Hxdz34yysDAAIcPH+b222+XzuJJJoEjJlVWVhZWqxW32210KaNorbl48SIDAwOcOHGCnJwcOfcmBuSexmJSmc1mZs2aRW1tLfn5+XGzBREIBHjqqafIzc3FZDLxzDPPXHsmMW4SOGJSKaWi/Th333230eVEBYNB3G43e/bswWQyUVlZyd69e/ne977HtGnTjC5vypJdKjHpioqKqK+vj6sjVcFgMHphaTgc5uzZs7jdbmw2m8GVTW0SOGLSZWdnMzg4SG9vr9GlRAUCgVFXsq9Zs4a///u/x+FwGFjV1CeBIyad3W4nNzeXuro6o0uJGrmFU1JSwmuvvUZeXl7c9DFNVRI4YtIppViwYAHV1dVxs1s1HDh5eXm89tprLFiwQMImBqTTWMTEggULePvtt686TW9vL01NTVedZqI0NzejtebFF18kJyeHmpqaCV2+0+kc9TRQMUQCR8REbm4uXV1d+P3+z+2YLSsrY/369TH5Q+3v72f58uV0dnby85//fEKX7fP5SEpK4o033pjQ5U4FEjgiJlwuF2lpaZw/f565c+decZrBwUHuvvtu7r///kmvJxQKoZSalAs129vb2bRp04QvdyqQPhwREyaTieLiYmpra6/Zj6OUmrAvGLow0+fzjWo3m80kJSVd93KCwWD08cXX857iyiRwRMwsWbKE48eP39A8oVCII0eOUF5eTnl5OcePH6e2tpZdu3Zx/PhxgsHgNZdRVlZGc3PzWMsGwOv18vvf/55QKERDQwMHDx6kr68vbjrBE4XsUomYmTFjBhcvXiQcDl/3LTyVUmRmZnL06FGsVivFxcX88Y9/ZMaMGdTX1+P3+5k/fz5VVVWkpaUxb948QqEQ1dXVhMNhFi5cCPz7/YpTUlJITk4mOzubvr4+uru7MZvNnD17lltuuYXc3FzcbjcXL16kv7+fpUuX0tbWRkNDA8FgkI6ODjZu3MiCBQt47733+LM/+7PJ/MimHNnCETGTnZ1NUlLSDV3IaTKZmDVrFjNmzGDmzJnRDuWWlhba29ux2Wy0tLRgsVjYtWsX58+fZ9++fdF7KXu9XrTW7Ny5k5qaGrq6uvjkk0+oqanh6NGjnDlzhqamJqxWK++++y4DAwMcOXKEffv2kZOTg8fj4aOPPkJrTVdXF263G5fLRX5+Pk1NTbKFc4MkcETMmM1mCgoKxn2Zg8lkoqCggOLiYpqammhoaODw4cPU19fT2dlJY2Mjy5cvZ8mSJbhcLgYGBmhsbMRms1FUVMS5c+fYuHEjR44coaCggJMnT1JZWUldXR2Dg4OYzWZWrlzJ3Llz8fv9ZGZmsmzZMtLS0jCbh3YKXC6X3BlwDOQTEzG1ZMkSPvvssxuez2QyjfoDP3fuHO3t7SQnJ9PU1ITT6cTlckUvFi0rK+MPf/gDPT09OJ1Onn32WRoaGmhpaWFgYIAvfOELdHZ24nK5aGlpISsrC7vdHn2v4V2+7Oxsuru72bZtG/39/eTl5eH1eikvL5eTBcdAxfMmYWlpqa6oqDC6DDGB6urqeOONN3j11Vcv+2PduHEjFy5c4Mtf/vJl83m9XpRS2O12enp68Hg8WK1WMjMzCQQCdHd3k5ycjMPhwGKx0NXVhdaazMxM+vv7sdlsDA4OopQiEAhgt9vp7+8nLS2N3t5eBgYGSE5OJi0tDb/fj8lkIjk5Ga01Ho8Hn8+HzWbD5XLh8Xjwer1kZWVhsVguq7WtrY1Nmzbxs5/9bNI+x3inlDqstS69tF06jUVM5eXl4fP56OrqIjMz87rnS0lJiQ6npaWRlpYWHTebzdGtk2FZWVnR4eGnMFwaDlarFYD09PRR7SOXpZTC5XLhcrmibZeOi+snu1Qipmw2G9OnT+f8+fNGlyIMIFs4IqaUUpSUlFBdXc2yZcsuez0YDCbEc6yuJhAIyNGrzyGBI2Ju3rx5bNy4Ea31qH4cq9XKzp07OXny5IS9V39/PxaLJbr7FAt+vx+n0xmz90skEjgi5goKCujo6MDn843qm3nooYe46667Jux9+vr6+PGPf8xzzz3HggULJmy51+NKnclCAkcYwOVykZqaSlNTE3PmzIm2Dx91mgjhcJjNmzezcuVKVq9eLefMxAn5KYiYU0px6623UltbOynL11pTWVnJqVOnePrppyVs4oj8JIQhFi1aNGlP5Ozo6GD9+vW89NJLco/iOCOBIwwxe/ZsLly4QCAQmNDlBgIB3nrrLdasWcOcOXPkTOA4I4EjDDFt2jRMJtOEPpFTa82uXbvo7e3lsccem7DliokjgSMMMXwB5kSeAHjhwgU2b97MX/3VX2G1WmXrJg5J4AjDLF68+IZvyPV5BgYG+NWvfsXjjz8uj3uJYxI4whBKKYqLi6mrqxv1QLqxCIfDbNmyBafTyZo1ayRs4pgEjjDMzJkz8Xq9dHV1jWs5Z86coby8nBdffFEOgcc5+ekIw1gsFqZNmzauZ1H19fXxy1/+kueff5709HTZuolzEjjCUCUlJWO+dioUCvHb3/6WuXPnsmLFCgmbBCCBIwyjlGLevHmcOnXqhq+u1lpz7NgxTp48yTPPPCO7UglCfkrCUEVFRbS3t9Pf33/d82it6ezs5M033+Qv//Iv5WziBCKBIwzldDpJSUm5oRMAg8EgGzZs4J577pGziROMBI4wXHFxMadPn+bixYucPHnysofbaa3x+XyEQiG01uzdu5fOzk4ee+wx2ZVKMHJ7CmEIrTUtLS0cOnSIiooKfvrTn+L3+1m1ahXr168fNW0oFOLVV18lPT2dhx9+mI0bN/LKK6+QnJxsTPFizCRwhGHq6ur4i7/4C9ra2qJta9asuezmVf39/WzevJkzZ87w+uuv8/LLL5Ofny+7UglItkeFIZRSrFy5kieffDIaHEopCgoKLguSmpoaGhsb8fv91NbW8pOf/ITf/OY3E36luZh8EjjCMGazme9+97uUlJQAQxd0FhYWjppGa82+ffvweDzR8Z6eHlpbW2Nerxg/CRxhGKUUs2bN4pVXXiElJQWtNTNmzBg1zeDgILt37yYcDmMymVi+fDlvv/02f/M3fyP3DU5AEjjCUEopvv71r7N27VrMZjN5eXmjXm9ra6OiogK73c7zzz/Pe++9xwMPPCBhk6Ck01hEud1uzp07Z8h7P/DAA+zbt4/a2loGBgai7Z988gl+v5/nnnuOdevW0dLSQktLy6TWYjKZKCoqYtq0aZP6Pjcjeba4iPrlL3/J3r17yc/Pj/l7a61pb2/H5XJFD3cPt9ntdhwOR8yOSp07d44//dM/5fHHH4/J+01F8mxxcU2BQIB7772X0tLLfk8mndY6+mC84WDRWkf7bmJ5CHzHjh3jvkePuDLpwxFj1tDQwJkzZ6LjtbW1NDQ0jGlZSqnLgmV4+JNPPhm1mzUWnZ2dHDlyZFzLEOMngSOuafg5T9u3bx91VXdLSwuNjY3R8cbGRpqbm6+5vM2bN/PWW2+xffv2az5HPBQKcfTo0XE/b7ynp4eamhr8fj9bt27lnXfeGfeNv8SNk10qcU3hcJgDBw7Q3t7O6tWrGRwcZMeOHbjdbkpKSnC73Xz88ce0tLRQWlrKRx99RHd3N3a7nYULF1JdXY3X6yU5OZnVq1dTW1vLs88+y/vvv88tt9yC1ppPP/2UgoICVq9eTWtrK3v37iU7O5uVK1cC0NXVRUVFBU6nk1mzZpGTk8OpU6cwmUy0tbVRX1/P6tWrKSoqYs+ePVy8eBGr1cqaNWvYs2cPra2tJCcn89lnn9HV1UVBQQE7duxg3bp1Bn+6NxfZwhHX1NHRQSgUoqCggKamJsrLyyksLCQ3N5dgMMi//du/UVJSQkZGBsFgMPokhqSkJCorK7l48SKDg4OYTCaqq6vp7e1l8+bN0U7iYDBISUkJ+/bto6Ojgy1btrBo0SLmz58PDN3Vb8OGDRQWFhIOhzly5Ahbt27l8OHDKKWw2+0UFBSwbds2tNZUV1czc+ZMHn30Uc6cOUMoFKKkpIRAIEBLSwspKSmEQiE5edAAsoUjrkprzfHjx2loaMDhcJCUlITZbCYtLY3u7m6UUvj9ftLS0khNTQXAZrPhcrlwOp14PJ7oeGpqKsFgkLS0NJ544gk+/vhjzp49S0VFBfn5+bS3tzM4OEggEMDlcuFyuaJXg+fn59PQ0MDChQv59a9/DYDdbsdisfDxxx+Tm5tLZ2cnACkpKRQWFmK1WvH7/TidTlJTU0lKSsLlctHd3U1mZiZOp9OYD/UmJls44qqGD01/5zvf4eWXXyYcDnPbbbdRXl7OqVOnSE1N5a677uL3v/89jY2NOJ1OMjMzo4GTkZFBZmYm6enppKamkpqaitVq5d1336W3t5dZs2aRlZVFV1cX+fn5JCcnc/fdd7N161bKysoIhULMnj2br33tazQ3N0fD6MEHH2TatGk4HA5sNht+vz96OD8jIwOr1QoMPYqmoaGBvXv3kp6ezrJly3C73ezZs4e7777byI/2piTn4Yio119/nZSUlFGHxUcerh4eV0pFO49Htg+Pj5x+5DxXWubIZV1reVf7fun8I+e9Wq1XOty+Y8cO8vPzeeKJJ27sAxRRch6OGJNL/yhHXtl96XRXGh7L+Oct71rfr6f+a00vJpfsUgkhYka2cMRl4nk3WyQ2CRwRlZWVxaZNm9i1a5dhNfT09KCUwuVyGVaDx+PhW9/6lmHvP5VJ4IiotWvX8qUvfcnQGt577z1sNhsPPfSQoXXIIfPJIYEjoux2O3a73dAaUlNTsdlscmuIKUo6jYUQMSOBI4SIGQkcIUTMSOAIIWJGAkcIETMSOEKImJHAEULEjASOECJmJHCEEDEjgSOEiBkJHCFEzEjgCCFiRgJHCBEzcrW4MJzWmr6+PgKBAH19fQSDQTo7O7FarTF9priYfBI4Ii689tprbNmyhc7OTkwmE6+//jpPPfUUf/3Xfy2BM4VI4Ii4MGfOHCorK6OP9E1OTuaHP/yhhM0UI304Ii7ceeedFBYWRsfnz5/PihUrJHCmGAkcYTilFDk5Odx3333R8fvvv5/MzEyDKxMTTQJHxAWlFGvXrsVut2Oz2fjqV78afcyvmDqkD0fEBaUUq1evpqioCJfLxcKFC40uSUwCCZybmNvt5uTJk0aXERUOhykqKsJut3PkyJG46b8xmUwsWrSIrKwso0tJeBI4N7GdO3eyZcsW5syZY3QpUbNnz8Zms1FeXm50KVEnT57khRdeMPzRNVPBNQNHKfUm8CjQqrVeFGnLBH4HzAbOAU9orbvU0L+kfwQeBrzAc1rrysg83wT+W2Sxf6e13jCxqyLG4o477mDNmjVGlxEVDoev+DxwI3344YdxVU8iu55eufXAg5e0fR/YqbWeA+yMjAM8BMyJfL0E/DNEA+oHwBeBlcAPlFIZ4y1eTD0mk+mqf9yhUIi6ujpCodC43sfj8dDc3DyuZYgbd83A0VrvBjovaf4TYHgLZQPw1RHt/6KHHADSlVJ5wFeA7VrrTq11F7Cdy0NMxAGtNWfPnqWhoYGenh6OHj1KMBi86jxut5vW1tYxv9+nn35KWVkZtbW1hMPhq07v9/vZunUrgUBgTO837MKFCxw8eJBQKMSxY8eorKwc9zLFtY31uGOO1nr430MLkBMZngE0jpjuQqTt89pFHNq9ezcbN26ksrKSDRs24PP5qKyspKKigsHBQQKBAEePHuXQoUP4/X6OHTtGVVVVdH6v18v+/fupqqoiFArh8XjYu3cvZ86cQWtNd3c3e/bsob6+Hq01W7duJTU1lQ8++ID29nZaWlrYvXs3jY2NaK3xeDzs378/Oj/AwMAA1dXVnD17Fp/PB0BzczMdHR3U1NSwd+9ePB4PWmtOnz7N/v37OXDgAD6fj8OHD1NTU0M4HObYsWN8+umnVFVVcfDgQUM+75vJuE900EO/AXoCagFAKfWSUqpCKVXR1tY2UYsVN8BqtZKSkkJVVRXTp0/n/Pnz7N+/n3A4TF9fH/v37+fUqVO43W727Nkzal6tNX/4wx/o7e3l4MGDnD59mr179+J2u+np6SEYDLJz5056enro6uoiFArh9/upq6tjYGCApKQkmpubcTgcvP/++/T29rJ582aCwSA+n49QKITP5+Odd97B7/dTU1NDVVUV1dXV7Nq1i7a2Nnp6evD5fGzbtg0gGmQzZszg2LFj1NTU4PV6GRgYoLa2lsLCQgoLC6mtrTXi476pjDVw3JFdJSLfh7enm4CCEdPNjLR9XvtltNa/0FqXaq1Ls7Ozx1ieGA+lFAsWLGDp0qXYbDZyc3OZO3cuJ06coKuri7q6OhoaGmhquvxHqLXG7XazbNkyZs+eTWtrK8uWLcNsNlNdXc3g4CC33XYbwWCQ6upqAoEANpuN+fPnk5GRQXt7O1VVVRw+fJj6+np8Ph89PT0sW7aMRYsWYTab8Xg8tLW14XA4KC4u5sCBA/zud7+jvb0dp9PJ4cOHOXHiBA0NDQCkp6dz2223MXPmTFpbW5k/fz4LFy7EarVisViwWCzYbDbMZjloO9nGGjgfAt+MDH8T+GBE+5+rIauAnsiu1x+BB5RSGZHO4gcibSIOJSUlMXv2bO68804sFgt9fX14vV76+/vxer2sXLmS7OxscnJyKCwsRClFZWUlH3zwAdXV1ZSUlLBlyxaqq6u59dZb6ejoiN5yIhgM0t7eTigUorOzk1AoRDAY5OTJkwwMDGCxWGhtbSU9PR2Hw4HFYqG4uJj333+f8vJygsEgM2bM4Bvf+Abbt28nJSUFt9tNaWkpZrOZwcFBvF4vmZmZWK1WACwWS/Ss5ZKSEg4cOMDu3bvRWrNkyRKOHTvGoUOHWLFihZEf+01BDe8Tf+4ESv0GWANMA9wMHW16H3gHKAQaGDos3hk5LP5PDHUIe4HntdYVkeV8C/gvkcX+L631W9cqrrS0VFdUVNz4Wonr8q//+q90dnaOOiyutcbr9ZKcnExSUhIejweHw0Fvby+hUIiMjAyUUvT09BAKhUhPTycQCNDd3Q2Aw+EgJSWFrq4uLBYLaWlpBINBurq6sNlspKamRqe32+04nU46OjoIBALY7XZcLhd9fX0MDAxgtVpJTU1Fax2d3+Fw0N/fj8PhoK+vD7vdjtfrxW63Mzg4GH1vGNo1dDqd9Pf3Y7fbSUpKIhwO093djdYau92O3W6nq6sLrTUZGRlXvJziww8/ZMWKFTz4oBznuF5KqcNa69JL26+5Dam1fvpzXvrSFabVwLc/ZzlvAm9e6/2EsZRSOByO6LjL5QIgI2P0WQwjx81mM3a7fdTrI3eHLRYL06dPj45brdZR49OmTRs1r8vlir7vlZaXmpo6qra0tLTocoHLzgh2Op3RYZPJdNlFoXKRaOzI1XFCiJiRXrKb3MDAAH19fUaXEdf8fr/RJUwZEjg3sezsbLZt2xZXh4OHbzGanp5udClR/f39ch3VBJHAuYnde++93HHHHUaXMcqmTZuw2Ww8+uijRpcySnJystElTAkSODcxs9kcd+eeWK3W6ImHYuqRTmMhRMxI4AghYkYCRwgRMxI4QoiYkcARQsSMBI4QImYkcIQQMSOBI4SIGQkcIUTMSOAIIWJGAkcIETMSOEKImJHAEULETHxdKixuSlpr2tvbGRgYoKOjA5vNRmNjIykpKWRmZspjdqcQCRwRF1577TXWr1+P1+tFKcWPfvQjvv3tb/O3f/u3RpcmJpDsUom4sGrVKtrb2+no6KC9vZ22tjZuv/122bqZYiRwhOGUUnzxi1+kqKgo2rZ06VIWLlxoYFViMkjgiLiQkZERvW+wyWTiK1/5SvTxL2LqkMARcUEpxSOPPEJqaioOh4PHHnvsig+lE4lNOo1FXFBKUVpaypw5c8jIyGDu3LlGlyQmgQSOmBA+n48PPnwfrUNjXobWmty86dhsyWzZ+gHj6S622x185YEHL3siqDCWBI6YEL29vWx49yfc+0QBpqQxRoWGFV8LY00e5KJt07jq2fdON6u+uFoCJ85I4IgJobVmeoGdu9bOxmwZe9/L0OPpGffh8KryE9FlifghgSPiipx3M7XJYQAhRMxI4AghYkYCRwgRMxI4QoiYkcARQsSMBI4QImYkcIQQMSOBI+KG1poTB1s4W9UOQFNdDxVljXIC3xQigSPihg5DRVkjmdNTAMjKTeGzfc34+gMGVyYmigSOiBv9nkG8nkFcmTYALMlJKBN4uvwGVyYmigSOiBsBfwhlUtGLP5VSJCUpgoGwwZWJiSKBI+JGakYySqnoLlQ4GCYwGMaZZjW4MjFRJHBE3DBbTMxekMHF+l4A2lu8ZExPwZmWbHBlYqLI1eIibiiluOdPigmHh45KpU+z8eAz80gyy//FqUICR8QVW4rlisNiapB/HUKImJEtHDFhejsHOFvVTlKS8f/HvJ5Bo0sQVyCBIyaEw+FgWdEj1G0LMa67n0+QlSWLcTgcRpchLiGBIyZEamoqP/jvPzK6DBHnjN/2FULcNFQ8XxinlGoD+oF2o2sZh2kkdv0g6xAvEmkdZmmtsy9tjOvAAVBKVWitS42uY6wSvX6QdYgXU2EdZJdKCBEzEjhCiJhJhMD5hdEFjFOi1w+yDvEi4dch7vtwhBBTRyJs4QghpggJHCFEzMRt4CilHlRKnVJK1Sqlvm90PZ9HKfWmUqpVKVU1oi1TKbVdKXUm8j0j0q6UUv8vsk6fKaVWGFf5v1NKFSilypRSJ5VSJ5RS34m0J8x6KKVsSqlDSqljkXX4n5H2IqXUwUitv1NKWSPtyZHx2sjrsw1dgQilVJJS6ohS6qPIeELVfy1xGThKqSTgZ8BDQAnwtFKqxNiqPtd64MFL2r4P7NRazwF2RsZhaH3mRL5eAv45RjVeSxD4rta6BFgFfDvyeSfSeviB+7TWS4FlwINKqVXAq8BPtda3Al3AC5HpXwC6Iu0/jUwXD74DVI8YT7T6r05rHXdfwGrgjyPGXwFeMbquq9Q7G6gaMX4KyIsM5wGnIsM/B56+0nTx9AV8AHw5UdcDSAEqgS8ydGau+dLfK+CPwOrIsDkynTK47pkMBft9wEcMXQabMPVfz1dcbuEAM4DGEeMXIm2JIkdr3RwZbgFyIsNxv16RTfPlwEESbD0iuyNHgVZgO3AW6NZaByOTjKwzug6R13uArJgWfLn/C3wPGL5rfBaJVf81xWvgTBl66F9QQpx7oJRyApuB/6i17h35WiKsh9Y6pLVextCWwkpgvrEVXT+l1KNAq9b6sNG1TKZ4DZwmoGDE+MxIW6JwK6XyACLfWyPtcbteSikLQ2Hzttb63Uhzwq0HgNa6GyhjaBckXSk1fBuWkXVG1yHyehrQEdtKR7kDWKuUOgf8lqHdqn8kceq/LvEaOJ8CcyI99FbgKeBDg2u6ER8C34wMf5OhPpHh9j+PHOVZBfSM2GUxjFJKAb8GqrXW/2fESwmzHkqpbKVUemTYzlAfVDVDwfN4ZLJL12F43R4HPo5sxRlCa/2K1nqm1no2Q7/vH2utnyVB6r9uRnciXaUD7WHgNEP74f/V6HquUudvgGYgwNA+9gsM7UvvBM4AO4DMyLSKoaNvZ4HjQKnR9UfqupOh3aXPgKORr4cTaT2AJcCRyDpUAf8j0n4LcAioBTYCyZF2W2S8NvL6LUavw4h1WQN8lKj1X+1LLm0QQsRMvO5SCSGmIAkcIUTMSOAIIWJGAkcIETMSOEKImJHAEULEjASOECJm/j+F21SsJOGy3AAAAABJRU5ErkJggg==\n", - "text/plain": [ - "
" - ] - }, - "metadata": { - "needs_background": "light" + "image/svg+xml": "\n\n\n\n\n\n%3\n\n\n\n139975938621248\n\nouter_loss\n ()\n\n\n\n139975251126352\n\nMseLossBackward0\n\n\n\n139975251126352->139975938621248\n\n\n\n\n\n139975251126592\n\nAddmmBackward0\n\n\n\n139975251126592->139975251126352\n\n\n\n\n\n139975251125920\n\nAddBackward0\n\n\n\n139975251125920->139975251126592\n\n\n\n\n\n139975251126400\n\nAccumulateGrad\n\n\n\n139975251126400->139975251125920\n\n\n\n\n\n139975251127120\n\nAddmmBackward0\n\n\n\n139975251126400->139975251127120\n\n\n\n\n\n139975938636032\n\nstep1.detached.fc.bias\n (1)\n\n\n\n139975938636032->139975251126400\n\n\n\n\n\n139975251126304\n\nMulBackward0\n\n\n\n139975251126304->139975251125920\n\n\n\n\n\n139975251127072\n\nViewBackward0\n\n\n\n139975251127072->139975251126304\n\n\n\n\n\n139975251128080\n\nSumBackward1\n\n\n\n139975251128080->139975251127072\n\n\n\n\n\n139975251126448\n\nMseLossBackwardBackward0\n\n\n\n139975251126448->139975251128080\n\n\n\n\n\n139975251127456\n\nTBackward0\n\n\n\n139975251126448->139975251127456\n\n\n\n\n\n139975251127312\n\nMulBackward0\n\n\n\n139975251127312->139975251126448\n\n\n\n\n\n139975251126016\n\nAccumulateGrad\n\n\n\n139975251126016->139975251127312\n\n\n\n\n\n139975938635072\n\nmeta_parameter\n ()\n\n\n\n139975938635072->139975251126016\n\n\n\n\n\n139975251127120->139975251126448\n\n\n\n\n\n139975251126880\n\nTBackward0\n\n\n\n139975251126880->139975251127120\n\n\n\n\n\n139975251126544\n\nAccumulateGrad\n\n\n\n139975251126544->139975251126880\n\n\n\n\n\n139975251128272\n\nAddBackward0\n\n\n\n139975251126544->139975251128272\n\n\n\n\n\n139975938635552\n\nstep1.detached.fc.weight\n (1, 16)\n\n\n\n139975938635552->139975251126544\n\n\n\n\n\n139975251126256\n\nTBackward0\n\n\n\n139975251126256->139975251126592\n\n\n\n\n\n139975251128272->139975251126256\n\n\n\n\n\n139975251127744\n\nMulBackward0\n\n\n\n139975251127744->139975251128272\n\n\n\n\n\n139975251126112\n\nTBackward0\n\n\n\n139975251126112->139975251127744\n\n\n\n\n\n139975251126640\n\nTBackward0\n\n\n\n139975251126640->139975251126112\n\n\n\n\n\n139975251126976\n\nMmBackward0\n\n\n\n139975251126976->139975251126640\n\n\n\n\n\n139975251127456->139975251126976\n\n\n\n\n\n" }, + "metadata": {}, "output_type": "display_data" } ], "source": [ - "# stop gradient and make them become the leaf node\n", + "# Stop gradient and make them become the leaf node\n", "torchopt.stop_gradient(net)\n", "torchopt.stop_gradient(optim)\n", + "one_step_net_state_detached = torchopt.extract_state_dict(net, enable_visual=True, visual_prefix='step1.detached.')\n", "\n", + "# Inner update\n", "inner_loss = loss_fn(net(x), y)\n", "loss = inner_loss * meta_parameter\n", "optim.step(loss)\n", + "\n", + "# Outer update\n", "outer_loss = loss_fn(net(x), y)\n", - "torchopt.visual.make_dot(outer_loss).render(\"full_graph\", format=\"png\")\n", - "plt.figure(figsize=(10,10))\n", - "plt.imshow(imgplt.imread('full_graph.png'))\n", "meta_optim.zero_grad()\n", "outer_loss.backward()\n", + "print(f'meta_parameter.grad = {meta_parameter.grad!r}')\n", "meta_optim.step()\n", - "print(meta_parameter)" + "print(f'meta_parameter = {meta_parameter!r}')\n", + "\n", + "display(\n", + " torchopt.visual.make_dot(\n", + " outer_loss,\n", + " params=(one_step_net_state_detached, {'meta_parameter': meta_parameter, 'outer_loss': outer_loss})\n", + " )\n", + ")" ] }, { @@ -381,13 +439,10 @@ } ], "metadata": { - "interpreter": { - "hash": "238ad0feaa04228775e5e27229169b0e3e76c0e018d5a6d65c4906ccad5c5a9e" - }, "kernelspec": { - "display_name": "OpTorch", + "display_name": "Python 3.8.13 ('torchopt')", "language": "python", - "name": "optorch" + "name": "python3" }, "language_info": { "codemirror_mode": { @@ -399,7 +454,12 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.9.7" + "version": "3.8.13" + }, + "vscode": { + "interpreter": { + "hash": "2a8cc1ff2cbc47027bf9993941710d9ab9175f14080903d9c7c432ee63d681da" + } } }, "nbformat": 4, From 36374422daae45b09cdde760d425aea72cc3dd1b Mon Sep 17 00:00:00 2001 From: Xuehai Pan Date: Wed, 13 Jul 2022 16:19:35 +0800 Subject: [PATCH 13/19] docs: fix typos --- README.md | 101 +++++++++--------- examples/L2R/README.md | 10 +- examples/L2R/{train_l2r.py => l2r.py} | 0 examples/LOLA/README.md | 4 +- examples/LOLA/{visualise.py => visualize.py} | 0 examples/MAML-RL/README.md | 4 +- examples/MAML-RL/helpers/__init__.py | 2 +- .../{Tabular_mdp.py => tabular_mdp.py} | 0 examples/MAML-RL/{run_MAML.py => maml.py} | 0 examples/MGRL/README.md | 4 +- examples/MGRL/{toy.py => mgrl.py} | 0 examples/few-shot/README.md | 4 +- .../{maml-omniglot.py => maml_omniglot.py} | 0 torchopt/_src/alias.py | 2 +- torchopt/_src/optimizer/adam.py | 4 +- torchopt/_src/optimizer/meta/base.py | 2 +- torchopt/_src/transform.py | 18 ++-- torchopt/_src/visual.py | 4 +- 18 files changed, 81 insertions(+), 78 deletions(-) rename examples/L2R/{train_l2r.py => l2r.py} (100%) rename examples/LOLA/{visualise.py => visualize.py} (100%) mode change 100644 => 100755 rename examples/MAML-RL/helpers/{Tabular_mdp.py => tabular_mdp.py} (100%) rename examples/MAML-RL/{run_MAML.py => maml.py} (100%) rename examples/MGRL/{toy.py => mgrl.py} (100%) rename examples/few-shot/{maml-omniglot.py => maml_omniglot.py} (100%) diff --git a/README.md b/README.md index dab7ff49..b58c5bbb 100644 --- a/README.md +++ b/README.md @@ -32,7 +32,7 @@ The README is organized as follows: ## TorchOpt as Functional Optimizer -The design of TorchOpt follows the philosophy of functional programming. Aligned with [`functorch`](https://github.com/pytorch/functorch), users can conduct functional style programing with models, optimizers and training in PyTorch. We use the Adam optimizer as an example in the following illustration. You can also check out the tutorial notebook [Functional Optimizer](./tutorials/1_Functional_Optimizer.ipynb) for more details. +The design of TorchOpt follows the philosophy of functional programming. Aligned with [`functorch`](https://github.com/pytorch/functorch), users can conduct functional style programing with models, optimizers and training in PyTorch. We use the Adam optimizer as an example in the following illustration. You can also check out the tutorial notebook [Functional Optimizer](tutorials/1_Functional_Optimizer.ipynb) for more details. ### Optax-Like API @@ -43,41 +43,46 @@ import functorch import torch import torch.nn as nn import torch.nn.functional as F -import torchopt from torch.utils.data import DataLoader +import torchopt + class Net(nn.Module): ... class Loader(DataLoader): ... -net = Net() # init +net = Net() # init loader = Loader() optimizer = torchopt.adam() -func, params = functorch.make_functional(net) # use functorch extract network parameters -opt_state = optimizer.init(params) # init optimizer -xs, ys = next(loader) # get data -pred = func(params, xs) # forward -loss = F.cross_entropy(pred, ys) # compute loss -grad = torch.autograd.grad(loss, params) # compute gradients -updates, opt_state = optimizer.update(grad, opt_state) # get updates -params = torchopt.apply_updates(params, updates) # update network parameters + +model, params = functorch.make_functional(net) # use functorch extract network parameters +opt_state = optimizer.init(params) # init optimizer + +xs, ys = next(loader) # get data +pred = model(params, xs) # forward +loss = F.cross_entropy(pred, ys) # compute loss + +grads = torch.autograd.grad(loss, params) # compute gradients +updates, opt_state = optimizer.update(grads, opt_state) # get updates +params = torchopt.apply_updates(params, updates) # update network parameters ``` ### PyTorch-Like API -We also offer origin PyTorch APIs (e.g. `zero_grad()` or `step()`) by warpping our Optax-Like API for traditional PyTorch user: - +We also offer origin PyTorch APIs (e.g. `zero_grad()` or `step()`) by wrapping our Optax-Like API for traditional PyTorch user: ```python net = Net() # init loader = Loader() optimizer = torchopt.Adam(net.parameters()) -xs, ys = next(loader) # get data -pred = net(xs) # forward + +xs, ys = next(loader) # get data +pred = net(xs) # forward loss = F.cross_entropy(pred, ys) # compute loss -optimizer.zero_grad() # zero gradients -loss.backward() # backward -optimizer.step() # step updates + +optimizer.zero_grad() # zero gradients +loss.backward() # backward +optimizer.step() # step updates ``` ### Differentiable @@ -85,9 +90,9 @@ optimizer.step() # step updates On top of the same optimization function as `torch.optim`, an important benefit of functional optimizer is that one can implement differentiable optimization easily. This is particularly helpful when the algorithm requires to differentiate through optimization update (such as meta learning practices). We take as the inputs the gradients and optimizer states, use non-in-place operators to compute and output the updates. The processes can be automatically implemented, with the only need from users being to pass the argument `inplace=False` to the functions: ```python -# get updates +# Get updates updates, opt_state = optimizer.update(grad, opt_state, inplace=False) -# update network parameters +# Update network parameters params = torchopt.apply_updates(params, updates, inplace=False) ``` @@ -95,36 +100,29 @@ params = torchopt.apply_updates(params, updates, inplace=False) ## TorchOpt as Differentiable Optimizer for Meta-Learning -Meta-Learning has gained enormous attention in both Supervised Learning and Reinforcement Learning. Meta-Learning algorithms often contain a bi-level optimisation process with *inner loop* updating the network parameters and *outer loop* updating meta parameters. The figure below illustrates the basic formulation for meta-optimization in Meta-Learning. The main feature is that the gradients of *outer loss* will back-propagate through all `inner.step` operations. +Meta-Learning has gained enormous attention in both Supervised Learning and Reinforcement Learning. Meta-Learning algorithms often contain a bi-level optimization process with *inner loop* updating the network parameters and *outer loop* updating meta parameters. The figure below illustrates the basic formulation for meta-optimization in Meta-Learning. The main feature is that the gradients of *outer loss* will back-propagate through all `inner.step` operations.
-Since network parameters become a node of computation graph, a flexible Meta-Learning library should enable users manually control the gradient graph connection which means that users should have access to the network parameters and optimizer states for manually detaching or connecting the computation graph. In PyTorch designing, the network parameters or optimizer states are members of network (a.k.a. `nn.Module`) or optimizer (a.k.a. `optim.Optimizer`), this design significantly introducing difficulty for user control network parameters or optimizer states. Previous differentiable optimizer Repo [`higher`](https://github.com/facebookresearch/higher), [`learn2learn`](https://github.com/learnables/learn2learn) follows the PyTorch designing which leads to inflexible API. +Since network parameters become a node of computation graph, a flexible Meta-Learning library should enable users manually control the gradient graph connection which means that users should have access to the network parameters and optimizer states for manually detaching or connecting the computation graph. In PyTorch designing, the network parameters or optimizer states are members of network (a.k.a. `torch.nn.Module`) or optimizer (a.k.a. `torch.optim.Optimizer`), this design significantly introducing difficulty for user control network parameters or optimizer states. Previous differentiable optimizer Repo [`higher`](https://github.com/facebookresearch/higher), [`learn2learn`](https://github.com/learnables/learn2learn) follows the PyTorch designing which leads to inflexible API. In contrast to them, TorchOpt realizes differentiable optimizer with functional programing, where Meta-Learning researchers could control the network parameters or optimizer states as normal variables (a.k.a. `torch.Tensor`). This functional optimizer design of TorchOpt is beneficial for implementing complex gradient flow Meta-Learning algorithms and allow us to improve computational efficiency by using techniques like operator fusion. - - ### Meta-Learning API - -- We design a base class `torchopt.MetaOptimizer` for managing network updates in Meta-Learning. The constructor of `MetaOptimizer` takes as input the network rather than network parameters. `MetaOptimizer` exposed interface `step(loss)` takes as input the loss for step the network parameter. Refer to the tutorial notebook [Meta Optimizer](./tutorials/2_Meta_Optimizer.ipynb) for more details. -- We offer `torchopt.chain` which can apply a list of chainable update transformations. Combined with `MetaOptimizer`, it can help you conduct gradient transformation such as gradient clip before the Meta optimizer steps. Refer to the tutorial notebook [Meta Optimizer](./tutorials/2_Meta_Optimizer.ipynb) for more details. +- We design a base class `torchopt.MetaOptimizer` for managing network updates in Meta-Learning. The constructor of `MetaOptimizer` takes as input the network rather than network parameters. `MetaOptimizer` exposed interface `step(loss)` takes as input the loss for step the network parameter. Refer to the tutorial notebook [Meta Optimizer](tutorials/2_Meta_Optimizer.ipynb) for more details. +- We offer `torchopt.chain` which can apply a list of chainable update transformations. Combined with `MetaOptimizer`, it can help you conduct gradient transformation such as gradient clip before the Meta optimizer steps. Refer to the tutorial notebook [Meta Optimizer](tutorials/2_Meta_Optimizer.ipynb) for more details. - We observe that different Meta-Learning algorithms vary in inner-loop parameter recovery. TorchOpt provides basic functions for users to extract or recover network parameters and optimizer states anytime anywhere they want. -- Some algorithms such as [MGRL](https://proceedings.neurips.cc/paper/2018/file/2715518c875999308842e3455eda2fe3-Paper.pdf) initialize the inner-loop parameters inherited from previous inner-loop process when conducting a new bi-level process. TorchOpt also provides a finer function `stop_gradient` for manipulating the gradient graph, which is helpful for this kind of algortihms. Refer to the notebook [Stop Gradient](./tutorials/4_Stop_Gradient.ipynb) for more details. +- Some algorithms such as MGRL ([arXiv:1805.09801](https://arxiv.org/abs/1805.09801)) initialize the inner-loop parameters inherited from previous inner-loop process when conducting a new bi-level process. TorchOpt also provides a finer function `stop_gradient` for manipulating the gradient graph, which is helpful for this kind of algorithms. Refer to the notebook [Stop Gradient](tutorials/4_Stop_Gradient.ipynb) for more details. -We give an example of [MAML](https://arxiv.org/abs/1703.03400) with inner-loop Adam optimizer to illustrate TorchOpt APIs: +We give an example of MAML ([arXiv:1703.03400](https://arxiv.org/abs/1703.03400)) with inner-loop Adam optimizer to illustrate TorchOpt APIs: ```python net = Net() # init -# the constructor `MetaOptimizer` takes as input the network +# The constructor `MetaOptimizer` takes as input the network inner_optim = torchopt.MetaAdam(net) outer_optim = torchopt.Adam(net.parameters()) @@ -133,30 +131,31 @@ for train_iter in range(train_iters): for task in range(tasks): loader = Loader(tasks) - # store states at the inital points - net_state = torchopt.extract_state_dict(net) # extract state + # Store states at the initial points + net_state = torchopt.extract_state_dict(net) # extract state optim_state = torchopt.extract_state_dict(inner_optim) for inner_iter in range(inner_iters): - # compute inner loss and perform inner update + # Compute inner loss and perform inner update xs, ys = next(loader) pred = net(xs) inner_loss = F.cross_entropy(pred, ys) inner_optim.step(inner_loss) - # compute outer loss and back-propagate + + # Compute outer loss and back-propagate xs, ys = next(loader) pred = net(xs) - outer_loss += F.cross_entropy(pred, ys) + outer_loss = outer_loss + F.cross_entropy(pred, ys) - # recover network and optimizer states at the inital point for the next task + # Recover network and optimizer states at the initial point for the next task torchopt.recover_state_dict(inner_optim, optim_state) torchopt.recover_state_dict(net, net_state) - outer_loss /= len(tasks) # task average + outer_loss = outer_loss / len(tasks) # task average outer_optim.zero_grad() outer_loss.backward() outer_optim.step() - # stop gradient if necessary + # Stop gradient if necessary torchopt.stop_gradient(net) torchopt.stop_gradient(inner_optim) ``` @@ -167,10 +166,10 @@ for train_iter in range(train_iters): In [`examples`](examples), we offer several examples of functional optimizer and 5 light-weight meta-learning examples with TorchOpt. The meta-learning examples covers 2 Supervised Learning and 3 Reinforcement Learning algorithms. -- [Model Agnostic Meta Learning (MAML)-Supervised Learning](https://arxiv.org/abs/1703.03400) (ICML2017) -- [Learning to Reweight Examples for Robust Deep Learning](https://arxiv.org/pdf/1803.09050.pdf) (ICML2018) -- [Model Agnostic Meta Learning (MAML)-Reinforcement Learning](https://arxiv.org/abs/1703.03400) (ICML2017) -- [Meta Gradient Reinforcement Learning (MGRL)](https://proceedings.neurips.cc/paper/2018/file/2715518c875999308842e3455eda2fe3-Paper.pdf) (NeurIPS 2018) +- [Model Agnostic Meta Learning (MAML) - Supervised Learning](https://arxiv.org/abs/1703.03400) (ICML2017) +- [Learning to Reweight Examples for Robust Deep Learning](https://arxiv.org/abs/1803.09050) (ICML2018) +- [Model Agnostic Meta Learning (MAML) - Reinforcement Learning](https://arxiv.org/abs/1703.03400) (ICML2017) +- [Meta Gradient Reinforcement Learning (MGRL)](https://arxiv.org/abs/1805.09801) (NeurIPS 2018) - [Learning through opponent learning process (LOLA)](https://arxiv.org/abs/1709.04326) (AAMAS 2018) -------------------------------------------------------------------------------- @@ -179,7 +178,7 @@ In [`examples`](examples), we offer several examples of functional optimizer and One can think of the scale procedures on gradients of optimizer algorithms as a combination of several operations. For example, the implementation of the Adam algorithm often includes addition, multiplication, power and square operations, one can fuse these operations into several compound functions. The operator fusion could greatly simplify the computation graph and reduce the GPU function launching stall. In addition, one can also implement the optimizer backward function and manually reuse some intermediate tensors to improve the backward performance. Users can pass argument `use_accelerated_op=True` to `adam`, `Adam` and `MetaAdam` to enable the fused accelerated operator. The arguments are the same between the two kinds of implementations. -Here we evaluate the performance using the maml-omniglot code with the inner-loop Adam optimizer on GPU. We comparable the run time of the overall algorithm and the meta-optimization (outer-loop optimization) under different network architecture/inner-step numbers. We choose [`higher`](https://github.com/facebookresearch/higher) as our baseline. The figure below illustrate that our accelerated Adam can achieve at least 1/3 efficiency improvement over the baseline. +Here we evaluate the performance using the MAML-Omniglot code with the inner-loop Adam optimizer on GPU. We comparable the run time of the overall algorithm and the meta-optimization (outer-loop optimization) under different network architecture/inner-step numbers. We choose [`higher`](https://github.com/facebookresearch/higher) as our baseline. The figure below illustrate that our accelerated Adam can achieve at least $1/3$ efficiency improvement over the baseline.
@@ -191,9 +190,9 @@ Notably, the operator fusion not only increases performance but also help simpli ## Visualization -Complex gradient flow in meta-learning brings in a great challenge for managing the gradient flow and verifying the correctness of it. TorchOpt provides a visualization tool that draw variable (e.g. network parameters or meta parameters) names on the gradient graph for better analyzing. The visualization tool is modified from [`torchviz`](https://github.com/szagoruyko/pytorchviz). We provide an example using the [visualization code](./examples/visualize.py). Also refer to the notebook [Visualization](./tutorials/3_Visualization.ipynb) for more details. +Complex gradient flow in meta-learning brings in a great challenge for managing the gradient flow and verifying the correctness of it. TorchOpt provides a visualization tool that draw variable (e.g. network parameters or meta parameters) names on the gradient graph for better analyzing. The visualization tool is modified from [`torchviz`](https://github.com/szagoruyko/pytorchviz). We provide an example using the [visualization code](examples/visualize.py). Also refer to the notebook [Visualization](tutorials/3_Visualization.ipynb) for more details. -The figure below show the visualization result. Compared with [`torchviz`](https://github.com/szagoruyko/pytorchviz), TorchOpt fuses the operations within the Adam together (orange) to reduce the complexity and provide simpler visualization. +The figure below show the visualization result. Compared with [`torchviz`](https://github.com/szagoruyko/pytorchviz), TorchOpt fuses the operations within the `Adam` together (orange) to reduce the complexity and provide simpler visualization.
@@ -227,8 +226,8 @@ pip3 install . ## Future Plan - [ ] Support general implicit differentiation with functional programing. -- [ ] Support more optimizers such as AdamW, RMSPROP -- [ ] CPU-acclerated optimizer +- [ ] Support more optimizers such as AdamW, RMSProp +- [ ] CPU-accelerated optimizer -------------------------------------------------------------------------------- diff --git a/examples/L2R/README.md b/examples/L2R/README.md index 8528fe24..e9317235 100644 --- a/examples/L2R/README.md +++ b/examples/L2R/README.md @@ -1,6 +1,6 @@ # Learning-to-reweight-examples -Code On Mnist reweighting example in paper [Learning to Reweight Examples for Robust Deep Learning](https://arxiv.org/abs/1803.09050)] using TorchOpt. The idea of L2R is to use virtual update of inner-loop neural network optimisation to meta-learn the reweighting parameters for robust deep learning. We use `MetaSGD` as the inner-loop optimiser. +Code on MNIST re-weighting example in paper [Learning to Reweight Examples for Robust Deep Learning](https://arxiv.org/abs/1803.09050)] using TorchOpt. The idea of L2R is to use virtual update of inner-loop neural network optimization to meta-learn the re-weighting parameters for robust deep learning. We use `MetaSGD` as the inner-loop optimizer. ## Usage @@ -8,16 +8,16 @@ We use traditional supervised training as the baseline. ```bash ### Run both algorithms and conduct comparison -python3 train_l2r.py --algo both +python3 l2r.py --algo both ### For baseline -python3 train_l2r.py --algo baseline +python3 l2r.py --algo baseline ### For L2R algorithm -python3 train_l2r.py --algo l2r +python3 l2r.py --algo l2r ``` -# Results +## Results The test accuracy comparison between baseline and L2R validate the effectiveness of algorithms. diff --git a/examples/L2R/train_l2r.py b/examples/L2R/l2r.py similarity index 100% rename from examples/L2R/train_l2r.py rename to examples/L2R/l2r.py diff --git a/examples/LOLA/README.md b/examples/LOLA/README.md index 1decc337..1523851a 100644 --- a/examples/LOLA/README.md +++ b/examples/LOLA/README.md @@ -1,6 +1,6 @@ # LOLA-examples -Code On LOLA a in paper [Learning with Opponent-Learning Awareness](https://arxiv.org/abs/1709.04326)] using TorchOpt. The LOLA learning rule includes a term that accounts for the impact of one agent's policy on the anticipated parameter update of the other agents. We use `MetaSGD` as the inner-loop optimiser. +Code on LOLA a in paper [Learning with Opponent-Learning Awareness](https://arxiv.org/abs/1709.04326)] using TorchOpt. The LOLA learning rule includes a term that accounts for the impact of one agent's policy on the anticipated parameter update of the other agents. We use `MetaSGD` as the inner-loop optimizer. ## Usage @@ -9,7 +9,7 @@ Code On LOLA a in paper [Learning with Opponent-Learning Awareness](https://arxi python3 lola_dice.py ### After get the result.npy, run visualization code -python3 visualise.py +python3 visualize.py ``` ## Results diff --git a/examples/LOLA/visualise.py b/examples/LOLA/visualize.py old mode 100644 new mode 100755 similarity index 100% rename from examples/LOLA/visualise.py rename to examples/LOLA/visualize.py diff --git a/examples/MAML-RL/README.md b/examples/MAML-RL/README.md index d99738e3..be2150bd 100644 --- a/examples/MAML-RL/README.md +++ b/examples/MAML-RL/README.md @@ -1,6 +1,6 @@ # Reinforcement learning with Model-Agnostic Meta-Learning (MAML) -Code on Tabular MDP example in paper *Model-Agnostic Meta-Learning* [[MAML](https://arxiv.org/abs/1703.03400)] using TorchOpt. The idea of MAML is to learn the initial parameters of an agent's policy so that the agent can rapidly adapt to new environments with a limited number of policy-gradient updates. We use `MetaSGD` as the inner-loop optimiser. +Code on Tabular MDP example in paper *Model-Agnostic Meta-Learning* [[MAML](https://arxiv.org/abs/1703.03400)] using TorchOpt. The idea of MAML is to learn the initial parameters of an agent's policy so that the agent can rapidly adapt to new environments with a limited number of policy-gradient updates. We use `MetaSGD` as the inner-loop optimizer. ## Usage @@ -8,7 +8,7 @@ Specify the seed to train. ```bash ### Run MAML -python run_MAML.py --seed 1 +python maml.py --seed 1 ``` ## Results diff --git a/examples/MAML-RL/helpers/__init__.py b/examples/MAML-RL/helpers/__init__.py index d7b9a7f0..213c216b 100644 --- a/examples/MAML-RL/helpers/__init__.py +++ b/examples/MAML-RL/helpers/__init__.py @@ -21,7 +21,7 @@ register( 'TabularMDP-v0', - entry_point='helpers.Tabular_mdp:TabularMDPEnv', + entry_point='helpers.tabular_mdp:TabularMDPEnv', kwargs={ 'num_states': 10, 'num_actions': 5, diff --git a/examples/MAML-RL/helpers/Tabular_mdp.py b/examples/MAML-RL/helpers/tabular_mdp.py similarity index 100% rename from examples/MAML-RL/helpers/Tabular_mdp.py rename to examples/MAML-RL/helpers/tabular_mdp.py diff --git a/examples/MAML-RL/run_MAML.py b/examples/MAML-RL/maml.py similarity index 100% rename from examples/MAML-RL/run_MAML.py rename to examples/MAML-RL/maml.py diff --git a/examples/MGRL/README.md b/examples/MGRL/README.md index e2952d12..2ad228ac 100644 --- a/examples/MGRL/README.md +++ b/examples/MGRL/README.md @@ -1,10 +1,10 @@ # MGRL-examples -Code on toy example of meta-learning the discount factor in paper [Meta-Gradient Reinforcement Learning](https://arxiv.org/abs/1805.09801) using TorchOpt. We use `MetaSGD` as the inner-loop optimiser. +Code on toy example of meta-learning the discount factor in paper [Meta-Gradient Reinforcement Learning](https://arxiv.org/abs/1805.09801) using TorchOpt. We use `MetaSGD` as the inner-loop optimizer. ## Usage ```bash ### Run -python3 toy.py +python3 mgrl.py ``` diff --git a/examples/MGRL/toy.py b/examples/MGRL/mgrl.py similarity index 100% rename from examples/MGRL/toy.py rename to examples/MGRL/mgrl.py diff --git a/examples/few-shot/README.md b/examples/few-shot/README.md index 0437541a..d25eafc4 100644 --- a/examples/few-shot/README.md +++ b/examples/few-shot/README.md @@ -1,12 +1,12 @@ # MAML few-shot Omniglot classification-examples -Code On MAML few-shot Omniglot classification in paper [Model-Agnostic Meta-Learning for Fast Adaptation of Deep Networks](https://arxiv.org/abs/1703.03400) using TorchOpt. We use `MetaSGD` as the inner-loop optimiser. +Code on MAML few-shot Omniglot classification in paper [Model-Agnostic Meta-Learning for Fast Adaptation of Deep Networks](https://arxiv.org/abs/1703.03400) using TorchOpt. We use `MetaSGD` as the inner-loop optimizer. ## Usage ```bash ### Run -python3 maml-omniglot.py +python3 maml_omniglot.py ``` ## Results diff --git a/examples/few-shot/maml-omniglot.py b/examples/few-shot/maml_omniglot.py similarity index 100% rename from examples/few-shot/maml-omniglot.py rename to examples/few-shot/maml_omniglot.py diff --git a/torchopt/_src/alias.py b/torchopt/_src/alias.py index a29adca1..8c3fb571 100644 --- a/torchopt/_src/alias.py +++ b/torchopt/_src/alias.py @@ -110,7 +110,7 @@ def sgd( nesterov: bool = False, moment_requires_grad: bool = False, ) -> base.GradientTransformation: - """A canonical Stochastic Gradient Descent optimiser. + """A canonical Stochastic Gradient Descent optimizer. This implements stochastic gradient descent. It also includes support for momentum, and nesterov acceleration, as these are standard practice when diff --git a/torchopt/_src/optimizer/adam.py b/torchopt/_src/optimizer/adam.py index 1b0ce395..9e1552fe 100644 --- a/torchopt/_src/optimizer/adam.py +++ b/torchopt/_src/optimizer/adam.py @@ -19,7 +19,7 @@ class Adam(Optimizer): - """A canonical Stochastic Gradient Descent optimizer.""" + """The classic Adam optimizer.""" def __init__( self, @@ -38,7 +38,7 @@ def __init__( An iterable of `torch.Tensor`s. Specifies what Tensors should be optimized. args: - Other arguments see `alias.sgd`. + Other arguments see `alias.adam`. """ super().__init__( diff --git a/torchopt/_src/optimizer/meta/base.py b/torchopt/_src/optimizer/meta/base.py index 486ff15d..2d6bbd4b 100644 --- a/torchopt/_src/optimizer/meta/base.py +++ b/torchopt/_src/optimizer/meta/base.py @@ -31,7 +31,7 @@ def __init__(self, net: nn.Module, impl: GradientTransformation): A network whose parameters should be optimized. impl (GradientTransformation): A low level optimizer function, it could be a optimizer function - provided by `alias.py` or a customerized `chain` provided by + provided by `alias.py` or a customized `chain` provided by `combine.py`. Note that use `MetaOptimizer(sgd(moment_requires_grad=True))` or `MetaOptimizer(chain(sgd(moment_requires_grad=True))) is diff --git a/torchopt/_src/transform.py b/torchopt/_src/transform.py index 290c8000..af123a98 100644 --- a/torchopt/_src/transform.py +++ b/torchopt/_src/transform.py @@ -187,7 +187,7 @@ def scale_by_adam( Term added to the denominator to improve numerical stability. eps_root: Term added to the denominator inside the square-root to improve - numerical stability when backpropagating gradients through the rescaling. + numerical stability when back-propagating gradients through the rescaling. moment_requires_grad: If true, states will be created with flag `requires_grad = True`. @@ -248,7 +248,7 @@ def scale_by_accelerated_adam( Term added to the denominator to improve numerical stability. eps_root: Term added to the denominator inside the square-root to improve - numerical stability when backpropagating gradients through the rescaling. + numerical stability when back-propagating gradients through the rescaling. moment_requires_grad: If true, states will be created with flag `requires_grad = True`. @@ -298,7 +298,7 @@ def trace( Note: `trace` and `ema` have very similar but distinct updates; `trace = decay * trace + t`, while `ema = decay * ema + (1-decay) * t`. - Both are frequently found in the optimisation literature. + Both are frequently found in the optimization literature. Args: decay: @@ -406,10 +406,12 @@ def f(g, n): return g.mul(torch.rsqrt(n.add(eps))) # """The followings are pytorch style""" + # # if inplace: - # def f(g, n): return g.div_(torch.sqrt_(n).add_(eps)) + # def f(g, n): return g.div_(torch.sqrt_(n).add_(eps)) # else: - # def f(g, n): return g.div(torch.sqrt(n).add(eps)) + # def f(g, n): return g.div(torch.sqrt(n).add(eps)) + # updates = jax.tree_map(f, updates, nu) return updates, ScaleByRmsState(nu=nu) @@ -463,10 +465,12 @@ def f(g, m, n): return g.mul(torch.rsqrt(n.sub(m**2).add(eps))) # """The followings are pytorch style""" + # # if inplace: - # def f(g, m, n): return g.div_(torch.sqrt_(n.sub_(m ** 2)).add(eps)) + # def f(g, m, n): return g.div_(torch.sqrt_(n.sub_(m ** 2)).add(eps)) # else: - # def f(g, m, n): return g.div(torch.sqrt(n.sub(m ** 2)).add(eps)) + # def f(g, m, n): return g.div(torch.sqrt(n.sub(m ** 2)).add(eps)) + # updates = jax.tree_map(f, updates, mu, nu) return updates, ScaleByRStdDevState(mu=mu, nu=nu) diff --git a/torchopt/_src/visual.py b/torchopt/_src/visual.py index 898cab9c..1f508f37 100644 --- a/torchopt/_src/visual.py +++ b/torchopt/_src/visual.py @@ -63,7 +63,7 @@ def truncate(s): # mypy: ignore-errors -def make_dot(var, params=None, show_attrs=False, show_saved=False, max_attr_chars=50): +def make_dot(var, params=None, show_attrs=False, show_saved=False, max_attr_chars=50) -> Digraph: """Produces Graphviz representation of PyTorch autograd graph. If a node represents a backward function, it is gray. Otherwise, the node @@ -225,7 +225,7 @@ def add_base_tensor(var, color='darkolivegreen1'): return dot -def resize_graph(dot, size_per_element=0.15, min_size=12): +def resize_graph(dot, size_per_element=0.5, min_size=12): """Resize the graph according to how much content it contains. Modify the graph in place. """ From 509eb39db5770bcc9b61031a63ae34e8f782af82 Mon Sep 17 00:00:00 2001 From: Xuehai Pan Date: Wed, 13 Jul 2022 16:22:01 +0800 Subject: [PATCH 14/19] docs(image): rename image files --- README.md | 2 +- docs/conf.py | 2 +- image/TorchOpt.png | Bin 79492 -> 98049 bytes image/{logod-07.png => logo-large.png} | Bin image/{logod-05.png => logo.png} | Bin 5 files changed, 2 insertions(+), 2 deletions(-) rename image/{logod-07.png => logo-large.png} (100%) rename image/{logod-05.png => logo.png} (100%) diff --git a/README.md b/README.md index b58c5bbb..a81c66a6 100644 --- a/README.md +++ b/README.md @@ -2,7 +2,7 @@
- +
**TorchOpt** is a high-performance optimizer library built upon [PyTorch](https://pytorch.org/) for easy implementation of functional optimization and gradient-based meta-learning. It consists of two main features: diff --git a/docs/conf.py b/docs/conf.py index 5b69ee0e..101ac54d 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -74,7 +74,7 @@ def get_version() -> str: # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ["_static"] -html_logo = "_static/images/logod-05.png" +html_logo = "_static/images/logo.png" def setup(app): diff --git a/image/TorchOpt.png b/image/TorchOpt.png index 76327240488885f616344e58557b7ae5ccdf4bda..04a90032d399af99ebffb922b966790e524619ba 100644 GIT binary patch literal 98049 zcmeFZbyQW``!~8(kWv9bP#Q##?q-8-x|I$o>F!Pm5s*%4N$KvE?(UNA?tUj8J@@;& zxMQxl)|$_J>hnCG<@-)j=rJ-5G6Vv7EF#P=4S_s-4S_uPig+J< zqbQz(2mV1a7gn-@K(L?Q{eMp{aS)7xJ2H?}v{sZ5=hQMYq1Dtj`=~=}Z((1r`Z<33EOjeFI@fOC1?UNm(sNBP|YX5?&r6 zZhKBJfr*Z_CXu~~v8ffOJ(T44yqw_u-Pd#^M8Ahv8$n6l-kp$0QQ{pDpP8i&5gRQd zjTSvUJrNrREek8F&POelkId9WjP#6*bo4BA^c*zw?3|1&oGdIve_kZugqGU6oYMS) ze@+KJK}qzjt<5><=nRtl^coxP?x9Rn@>-Cg}2sIB$y zICC3Iu5V^-W~FcTe=+sHKmV5#fYp_d`1c(D-D@#1 z`S%o7)&jQRHvTNge|NN%tb@4@owSaXnT@5Ej({z=%$Ij}W6sHEsiSFaW+`iCX8c!H z@BUsg5hD{VGtp}aO)Ue{yPKf+k3)3$HLZ1^BzJP7p=SgC$ucr=GO}?pvrqv6($oJl zRKiT#K-b~F4rO4HWnkrGWaeaI`Y%JlzR}jS*8Cq1*4EoC)>e`H~yVPIj>(PGs4$jGdz^Y?guGcB7t+5SCV`+xCxDN6&OUYf@L+wFa<= z|8KW^e5yf*0R=X=R@Vkwiv_Jdf*!$KeN0iize}2mmd4W$LE+o z)71Ab7Y9~(;=ge`VzOF!)V+GWF|x@Xx6IYo&Al48>k#>HWMl;Hx0MBd8U}DW@&A5+ z3`+@f-~02)oiy0~&Vv7bi4g7odr0V;1*E^;AAC-F z>-q38&TyI-Hi94`ga}18==bXFJoF{^>FDenG%T&Q-5%UL-V*TE8*(qB;k-PzHJu`9 z$QQs>r^`gFtNQRiHdO%QIP?Zks2a^4r%c+$(EQI2T!cg`*{x{VQoDn}8% z805VPP^Sj>drx?=Anx&SBPmj1^>uuSW^SBgTXSEmQwOcR%Nbk#~Q^}jOZ z47M+7xw*bn$WbgbnPR=@`>~g>*yJ(n635Uhe6fER%ZL)nbg;(T_Vopm(P++hJ|E8c zeh7r+scg`L;l;}*PCP#nnKpiSq+AxAy1KEuU|f@D3H=tif_t;fHYO%!b4!cKTwOfc z;8~%NUf>QPQ^C$yXiJ^&&m=1+`~E1zIKE}ENf39Z+$^QFd8EZ$&zPBlXDIC(vyu(MSfYRY&)un zk$i|qj~;!fvEM)2UtE5I44H;khdyz2?L;x_J~U1b=K6s^X@*;#Bf-9R{m|nd5x*f5 z<{mNb2z>>VklgFYrRW+V?DJ#CPW7EH-R$=(vwiLOAusf#t_qe9gKRQ_F#fEDq))TVV>P$yW4ijQP_%NtIM%6R=w!3(R}sWi_QEle00b#LXP7j0($334>w19*1PM;&Z?<3-B&+! z8|Qlm{q0$VcSF2UusiWRjePz6FQ?{i1QC5-%-kNbwXql+{g_6dNCrzr{>XV_l+Mno zSAC+)WGcV@hSPfOU8U!J_o@K7pe8c$SXQ@Xu+6*hLud$$hR@bD>83Ml(r)+R{%pWZ z;{7883k!?={e7=sshca~9I_8P=36W^c6#1XZ3!6Ij7&@xR`8G$jgdvMV(b8iZIjBT zLWQ7=CTOe!HeT18e@r(K1!=;~DiC`T>Y)}3-v_t*TvXoH213G~-9s(kzJ1%CD&L){ zE}urf_hzOktBoPQcKZTjT&bg8#SA8_Ir~@F1{uvk8hO<-T%r%kgxKAITCO(F+lQO z!QWCY$Le!1h)FLhT^Uj?F<|L6R|jC@Ej>YiYWju`eKo9! zk&X^ib9F%evrImG3%+QuyPbO88S$Q2icB2$l^wOTNF1x7w(zfIDus0MZzOiqkav3G z3EW^vS7YC!vF?lN=|bUGhNE+y2#@M5ml^+KvYMNVO#~*U>N2zQbCt!VKmld2X>PSI zBZ1dzKAMMU1dOS>L7A9_^0lH24sR*!s0U-pekHo%SO2+5b9Otz56(cpyCP|f2G<58 zP{{0z(}30)fZd@#*Kl&%(cT4hT_|iLz-2d20xR#nnf%-2^+{e`__E?JJrPn4Vlm9w zn!Pliv}+4m<+)f(`9bhIz`<+D#gK|+HSr4!B=d4UKk?erNCRTvGa}6IHBkY&*$phZ z)6v>Mt_oY_%li<8=B&1L!kmrtEb?M!zeCf3EE@zJ)7HSRUmyL^#lVHp;z{WFQkb{5 zw_76P$6ogGirkNpCcVyXO*;(@4OXkY9r!4a3KC^#U?Mu3<)C<_mPMB8oO7AMF>nRH z4SwK4ofdcP?ad{a!zIyp*Hnod)*fe}3ErzKDpqJT-mlcU2Z`d#7Sajg*@?6v@%kFa zEHjaU`e)sREQYY8;Njt=pvRcR+N;T9524F+cI=FI=5Wh{4v9EJG9DQjITTnf^Jh7Z z&U!6&s!qHw??yN+__%c^y!oS<>>jaY_A$I}JjB|D{9l}O;}MBH6Alt_&|!!`Rl~Tj zAm+p+j(AsBn5W&G=&-62SKYlsnZGt)$h-|p>hfe8m(@5LxD%P|xug99Ah?NeIaw&n z;?O}S*WIs-+Fz#UA?{cD$7dA^hxXf=eY(fa$x`SM zC``E9>Ux@ElOJ(^|KN*59wD)Ii{CTLw)+s;Px3*0ZBkaNKna?QzkM7(Gx%$SXQuO7 zhUS}m5*=2)wFW`6E)Da_^<*lWLX;eqW zsyvi-#&WP)fU2^|0%`d(u36Bwz~2)$)+%#@Knf--YcHi8A3e&W_I@7P(e(2kCF_b4@Ws^v2*|wlO>{3-?G;r1ZjY&yK>28%7lYR$9Nv?d$fi0zQ$G96q zgQ$OeUC;FEeXkWslKwTMhIpv{4l`y?e0NgIA790{3DHgjm(npDL11;+@d`XhNA~RP)gJ;p(yDH3%1j@Gz(^7vxSSQCw(a0w`@|8zE><(a{@S*U!(-cZtBAGx_zYp8($%-%kF%O1WvNt4=1yy@#G} zQwF@ez1NQ&MNqYaj*gDP!^404_|Y5B^^%Fi`|9c{I5_wPlkRktjo6Oj2bAj(gp*LF z{qWk|OBNKpD;wFdOifprrmIovIE#&To)?SgN#2XW8y3K8pX@eE+9ZqSs91BVIyyQ! zJ3IF!KwpxOyep6v78jSLj96b^w?w$}%B3r`z8Mb?o}j^Co}8U{J}4+C8~g-V;n&jA z(m7_8FOPrJhLK5#iim_9p*_)N#?kAJCVr2LU^iN<*Ec==p)$o>;7EaR+{m3;sQ$yz zqEpO0BP>C^-6?sweIoMcTC*v?N;ufpMQWh7hSS%L&`DZpFgWgA^aBKM_(+^N>#P`4}c z$?G3brxShV7#syL2?-XQfD^*r<6O1cDknRCPc}jxXZ!8RQqK6m8>`tGhX)TIAwGU= zBX9jH{m%SEm4iU2m)G&6Zns|Tg0_b(ASfXp=V}YX)vd~$(rLLmADAQMV)`nXu>`5J z>yGnJNM|-%2jIArtTu3AnIjv&G#;@lM*Wb@TJDNcx!8=LQh0*TSz-XVq#*4$H%2^3cv8`>gpDx z?la@`9IX#EH8riLgoCY@lS9qT4XzeKWI8w!C@C;Jff{wjk|dzL+AU!W7Y0Oa)LNNz zy@iSFt(;r7T5-%LEgdSInc4+*((~7Fse*31xl>f}&eNSLt*Pgz8-5hKqy;$`BN8mZ zv{$nBogJT)VE*mB8}Glr>-V0Xp4RD#DNSVB=A~7*BIyySYry5=E4IAu)+r#`bV){vvX^4zRYn|*OIVXzs zg3dZ~660A+Ql^{dZcfEo*D$Y=XtbMI$F~!UR37ueW73~=8AE&ZD@YUSQ6WD1si*>X;mLiGLmH5FxZG#5kN3dH!+EEF{>azY*V>AZkT5_{OW8d$Gn0XV z!6UPo4+i5GjqTCzB}f6@!xa@QoX4jn15&VtTdH>ov)gKP%%UNcJWd5*4Xyp?nR@x` z5q;(ydjnaLxqG&VRZU=q zJHMIt<;$0f3igmWM0JDB;|pMmFLc+rN-;mD{Q#K}8U}_|4;ky`R5_h$0dwr9>r!x> z&Innq0=zmYuB4ua<5%vS&T#^L6SDELR?*oU<&i?`+pG44EpuHdEa1ApBIG#h%P!Es z8#=kdL4w7cov%_w!j=N>H(;4Ev<~(0-ijj{Y%eZf@kPnWns7MlUWO2!CQy@j**me~ z6Jj0Rq<-g7Be3>nQ!rC6r;0^m%u#3<_0DJKg%Qw@D1SJKX1Dn0?9A;jKGi!n=d6Y7 z+4x3JUYasu^lNc7a380$PEzzwDX5Sy}8>#f&4^a{?&$pNyHLu2(9{r=8)x+3IRxRP=%DYL%{J<+mKe;?~_nTJ7V(u z1m+hW1?v$Wdocc&I`p~{8i@_Prm=>uE`QggEw7%kjiC(GdMmNrcz9iAoJ5zCiKQiN zF^5{P_@mLVT=^lpT>0#@o8EEHo7q@qmuUl`oEn|(@e8x6tM1o__18wcs!1pWYCZ9o z@)G*!6Y9{Xu$As_VWFY0dRdT`zZ42A(L^+)$l?zW#H^(Y*@wOV}4YrHrjD<0_)(75&7e zn|5<9LYvZ695fAVv-X}hBkwbqZ1XhV7F`VKGY8_bnF3Qs5>b;DOER^gsQmtLznE=# z`M4gzFLIWPaJW=y$at6-LCeBGi4YA zkcMeY2Sx&CDeYMembyJnJE@=F%yP!qC?9ox@nEr{tXD5tO);2Fknqosx<0&Y@(Om_ z|@IhmwWfq=QYXAOfl>yn{iVFoWE zBO;b#^-U4z9l!BXudp| zGF!iY%T5e7)U5@G3uA8Hj0gEIqgz^8>Gj4LtgEiBtWaY5)zok+YlTNfnogBTTop~7 zE0MFB6BBz$ZL(+CraiL<@xV+R)-NFXb#;78PBAH|41rq5V?!)$ARy}_Pmb3HvwovC z&aV=`ed}l~zc^bAg0C*E3{)-B7ExBl5z7nlQC|e1VB|FO^GD936vV`zT-x-Z-Lq%F zGw`>{VsY@^hqQ}#ZY4DDL#z zH0e8`=iO()kpOU0^?-7L#;0S8mDSZ^gi1n}qmcCn&TejQx$<2*{@t@i9Ph-%A9c|p zDR&lZp0&5P18a_GTJG%Pvf3NpWr+t&5=nUx*g{u_+M}$7Ahg{2=>n81U193`_wQg; zwni6x2we-T5xrp+TjLC_s5dt^&M4+!3*+PGju@Vwvb$UyjgKom=!GZ$~RJNH&X`!7hxDpuj|~>-bz>UtcrBWOM`qxEd@eawfY0?+l;7(vse1GfGvz z=101bh?P9CLingKCV)7J}SjvCFDn{K4TNxl}^;O-FgSg~oQ4 zeD?Y2Qyd)8uA#z=VlZ&uY47-0wL~8sB}g|OZmzb!zdrz%t>_tCVoJ(}Vllwh8~oTf zI2?ASE3fRn)>)k3$Aavp-CioP7XUv+i`mN_9``|HTJTBbRYKhGLS{~~;m=UBwSxp zSl=Rfp<1b7#BmP-B_-vsVX0^uh(|~s#G$@m^rBV7p&pe4X{+@Y`MTZitMkK&BDH)% zhL3Qynn@{5(Jgj8TuvuBX(Qu{7BYsV%9*l)LP9E{QD+wyMJrF+f(Xk?5`%ay+KY`~iqda0zmn(93I`Qy>P z_yfqVa|&*$GR>FK_obU4=dX^i5h(`=l~>&^SbVQTzJRRe%F4EJ3iP z4zU@xhD(p|ydI($W~X`NqP`Wc%<&?yX+I)kV#upeBdM5eFHytO=g|I(R9hn%-2(3u zP7cpAIXD`(n=d(gCaxLp=qd#kd<`{BNth@Kz77_s-Al(`QzS5Kthd8}-LPuD1A1;L zsUg{#U>Un@(&iX5%nn1yE(2Avb{c3s;igLBU`7rw=)jQ5n|>@VC@6S{b+k3X1Y$SB zr!2(8Z~H$8XOE<(rO7EMOmS$S;lAh`1{t40KEkEhAhk=*RvCyFV;_4G7JJrrL>(D5 z2**Nt9%%rO7z^Okm?%oYjVsQ8aN7<2?X?hVC0X&P#}DhDpYoRk#&rV1L2B z*1avMYl=^;_b$|G8-#yr_1xA0`mq_&c0M*Fq%prK@aNq5?&NXOr&!o`w2sero5}}G zm+NAHPvxBUEN5tABpW}}Wk)3O{qjp2)s!dpxV>=X z=)%trm)mpJ%p8A8L`_XiLIS(u7Vue31illuG~ltma5sxr>KS%VSz21*!1W#~Do`|84}UPW#_uIX8Po@#6(s~d9g>(I$$fu$cCA;?dj=+0>6ZkdxGn|(_A zY{QAKZdQCR;b&9R<*`%0tVD3!WRVVvs)m*T`c8;(055Hs%E#}Gx`!+B0nB}R@gy%_ z3KXe6eV-}&;>C;6v69kURg>%{BO6YmvAp;*$WNAqHlx8!3H5}L(00r+ak%H$|TFOkTB-73Uj2f#lf&IEHu zUrP2uobGqh=$0NH8~_6ait}9Gg{E$}@lAGyxsq}x(#7Fh#wHFXC+Mn#$+`SiR`6`} z1PE0JnSwP;xomGVa+B}Qtl0%>I?s5mihU?a;xYNPb#y?ObJpyOvAn!|g~unEBAqGt z^*MvhhU~uBDu4}Bgo1!`-#m77O(vv5l9bhb@#wQwTVOnoOHAO#$DUfWAl^BZkIPWf zS3t-nMNX%tlWUVtN5dPW=^{AU9?|3y?8?hIvdRqCl+Dn_yY>6z?e;SaVdt?{_crd< z&+Y{|cm+qAaPiEqQa*<5%3s6q;_XSiy5qT`dSrBa9@$CXVo{Gw8M|k=USAY$(Yr89 z+#1k7S5)V|v0%=Ip0zUS?u)E~Vts7ngSm+Dr*Cy4LPHOt>aSWj>P5X_3?G}n09&HG zu^}*_RE=y(OkH?rEx*XUSt`-bUJ%IAvP0h0+1YH@OCAeK_(m5k1uIvG?^Kwtk49C@ zXw>U!$`+$Id>S>Lvzg*$zy7GDb#C;&R_!*(?aGtvXuT$~EfAkc1*u^ET!|$*CI)1# zI4@p^`1qdfRPTxt<`f;G=-773jyia_6*Nj^XmA$9tK|m;1f&_DbH?B2O?$*=M9!&? z?{eofN-F>i^YOt45HIJ~UHFaQ<|xKF)Hmqv5qVj!^?#z#)6pp;r?tj=)0hkA!Xv>K z5EPWir1#_!?)=lsl#~=~8s%oU3gEP~TEBkBZEbGeBWDLen51U5LOw{>0S>L(9sL%O z0Ve#gW&?F!dgK#W`{JYKO z=>9P}m)r1oUm*FwnW$Ir@bIuwv97PSIl%9(S0ezj3eu{Xtwf+UIWIgoL6B^=X4FY+ zK}JUAdbQt1a+$gR+FIGpSNN2L$ZO5IYLe_=qsoUS!PTO7K8uo|L>T);&3f~9g!p3E z=dTum;-ewSEf}SyGgY9eVsIHi3L}H?wG_UtC>UhoNlq*IH%ijJqq2sjF?E(FTS^zm zPH`G8KI|fehFS6gl(01E{h^nL*4vXKF?{3d)uZji8BAPU*8agIfoZic(n${Y_zxV{ zCB_qsCrwL3g5mY`Zqm|&pp$03Ih-@z^!nmB5@#hAKOvEYHC1K~(NN6R^}AfydJV!u z&rk8Yit1Wa88WZw=rXdmXKSa77kr~*u|8k>x=4dC$(-_g3k-4IM=v5Az)VW?-mu0w zzQS)wz)Z6NaL18nP@fq9$-ug&+;ZD)J$K@7mYfq!8!Tb6*VkVICF-@$f+-9vEDI9-!V5H#B0ZKI`dY^U~3dwfg6PRaCP=@Oy7Q z6>jvfS=$myy$Ah*f`X!UHb&4o^@l04l)w)F@DKwNa~L=k&{BbP;WU-xbh>lGZ9isM zYD{Y=`2F3{_EeN)JV^8E{rr$^r3*hbY5;JJzBV?kN$t@K##Eso0x90>BLIJyTd13{ zS7zhm)=(t*oab>ylz%nG9~0%C2}y0ia2THt9Hj><3G(y&f4vY(D1@Dmen*R7EsdN%4a`$=WI zp^^&`Vu?)m?&#~&JFjT*$BtDcr=T!6-I)pHDIVTD2L5@fPzyo5w*GRb+L@b&hsUs# zlZ$IgI&LXQ;y@~qr{_S(n43o|D1eWT&tMFuuCDI-2I=JS?s#F!#A8rF+&% z-gem>r6Emd0Qis;_dXo)lJd7E-IqnFImvz}#V@@|P8gVmQF!mmYFv`124fEB2DMjJY$o^egNR%sDx%ALEQ!Q}1HscGtOHHF%|mOUi@_IFt&tCrNxSI zz1V9)1u)-F<(kgr@j|VfXG>1Iwd6%B;BT8_-oAwH^N{&-7M#bqN#l#ieuQs=xwp=2 zK`dYAs7+&ZhKs-%59=}OLk2R_47zu={z2k;+srM+rE`k`tn?-!(vK0txBg5{Svk-4!^%p&3W93y!ZUzwVD1ZdM{`G+mMUGBA(8;c6=}j9jXW z*(!Lr9^S9iIJq1Sh#=DJ<1s0|CpWMk^6#z|_5A`wtQVS*_B{_Ati)aCj_iO zNPKCa!fTL(87EU+b+9fz$Qe0vrN~Rb;a*cyTMIfYwvg{y2iAu&*6(kMn~0d3(_hjQ z=h~Bur5Uq6dMm~03V9pjL_#K$UNoPb>sIhSdmWQn0;TCA6PMooroON9a3LWf9!fGT z16I}_K70@s{xb3GOgaGA6eOg(Lg$JIYu5vufL37K4VmxiqZ)V?X!BS!AU^fLcq+Ak z?3Yh>R$eeyB=%i^-;5t$pnobaIoX4!hrX7V!9QVXjix6MpQ~Br-r3pN>j+VG^|?C_ z(W#fUgaJT4an`z+0|a#94Bhs6-dhIWUJEo+>%0Ca)EDv!DHe0MBhsc)~+ky_Yw0!qYPeHh%BP9fYt_gV{R{$e}Sc^?1ABYY#2^B@iNK5O> ziQ|5G3>uTp_U3=+ep&NP<#a_=Hp`eqly}US)TNE zj*Uef+l_=8qF8JfKk<=g@N)RDxbDelc*G_X`XAGQ3|Rpf{$ z`^UY?VG|vLO@s#parl5p$w@xkbDFTJ>#54<|5wdX9Z4}A7*DA?^o zy}=Nfm=*yBjOTm_gA>9b(LQ>jIxP0&MfZ=JkYq(t36A%+PXx#*fM2kE(dsye^bk$E z5jQ|El7F^v@hyV$D1jgO`qrD?IFrwT-!<3sN1byXdTKU$z>~a;_s47reI*vdW@b7$ z7PK4+DnXz()#L(>iiXAnxLS`PKLFSQ2sMC|k#W$#s>@^3%0JKcD8x0yN`*U(khd?g zi!!T~cG^67Z6kw8kZS6To$0GGMQBns{=B6)fQ}cf=H*R$DL(>m4IN#xteW4|l?V5} zNT%#0y_c5;qe80&J-?vfsIA#xM{jT0uqG)PS?l{%;kxyEL3DcLVMj*v&S&PMj?UfD z47yWDD0N7?HHU%6jn#}G(X;3fQQhXQp`LwuYq#B2QYq6$?kQA9+BYT+M?p&nT9ls&nlo{zc1r++2+H4qGfZ<+ zfe8eLHrSZI>t|gjXz1v~YxX%KhM+93gypu}84+n45g$)89;v#Lo{`Z8QiS8oMh$?s zf!3l({oVs*O+6#W?98!SVu@go%RGJhan(5wc=xSV4Nz$WnlOKxHMR*#u)y+Ah*^;@ z^yfE%^&x-#2H0SLSq|#_t}M`2*nn2^1-(?ifPh>!ngq}zAtvAenyp##P09&e5C*(a zGH&|QUOUa5a@DJeAnxxO-fYyQNq$L{7o|wX2yr98TzRAAU^jqL12fUr(Lq5&BLQmufq_`zZ%j?|CM+PJj2T6(BrPk;FQWis*~}C7uKZ7|z4xopE4bU^VD*Rh*!J4Ai%H z1R%Q4FxBbmO!2Ds<|?uCMDZh7ijZkTec+cBOUm(YJAq$P$c9oY6_Ijs#?XQa#b)-!aFUgkb=0~)4D_4y!vpbQVU7O7A?P1|!0Xlq>I%vw`fu%UpFSN- z7Ck6k}6IIzAbT9uR55#9Nlm%rRRaTsU_wV0>?=;MK zsj0utpl8ZvT`*K(@VOm(llQPSo97VI)k$KP++O{%90>9XP84&!*qsa-qy9?fWpB@t zSZ!G>CZQ`d@G~UYk(tGA=R{!Qtb=i01RezE3wQ1w!N_c^2SX?SJ@aaxSx)uOYQvfQcqiZZ=x29 zM!COO8$>qJncC2|APe3+iubw5%}b3_%xjA=^pP7g4wcyi!;8g~lu=`!5WS9OJpRoj zV0I~(_gcpsmrG95O%30AY#<{eqjdcZ^kzjyvKkJ*b_z8GNGMJK8Q8Kbp--PaRospV z4JD~i{xvWhA)ie{Mb*{SrG-t|Y~lIoP5fQE6<~az!-^!;!!O>Ts1xjf#{0bmU^43M zoql=>Cpf-CKXN-P20O@-?!!#!*>RF&ZrYO^!EYJ<9ZepV?5_Oe-Z0G`@%j6pamJYs z!YR(@=*U@JFZ~)56bp5k?t?(>NH8xo)r-64Z{6 z0Z?^MyIBXJJvhOBZk3dQt?A04qppU0EL>cbTQbSy`iq}oLUHCfy;8@I2``a}mli6o zyz?K&;;fvSYP(I2W-f*`=6$mvK}8Wt-WZS-74`d7Z77;d8M1$TygY`!6#csHFn+?- z_6nVeiRm1t`=~7fEOc1d%Yk;TEB9)%+T%z}AGB4k$&q6zy_4ZW>ob)`f8$|cn6SCI zIVf)3rlskL)P+PT=hm6e!)gukk!gDc$a1sa2N6KmJO=bJ>#diHPmHsbHT%AUysG7- zHBlW-Cf)C-*y^V>vV8gyh*NbBiSaxVHO)ab8Yy3VG!HP5K?I1guuwvM{q1z6)w$&6 zk>T6?v9^|$fi_iL8MA+Sn5a*ey6+V?=u|2!_^!y4j%1d899tNh7@uE+d9v2u9smn) zY46alp!Q6-n|hG-J5^EU5J5+H}nUA%8?R?EAZHpt$+qE;7^@pXWN)~c+Q~a);Og=838O6 z*bksOz#QLZ=?pp?Kl{*&MN)TkcQ2_(qQmsf9O5OT!o!y~H*xXsxceUCFIS3ecvoxf}h9pK{QPC+W9G zdxo=iYtic{3N&ZVQAWD)?M*)|loxy@+vFDwpUU|@y7ijaisNx!twzEi`yzykgQF}f zt9{Z|3aY8`lH`nJCD(A*z*AwQaYeq8{GqCPYppAVU#;B{>g5GjcLdji2Fx%hVgp>A z;wd&6C!ua{^cBf;%@Q~gSw70+m?J;YiQB>!8fFWKu0HjKbsF;}O`QVJZj z3`%4)@FJnDQ;-cN&YbgSv5T2$0%+&x9t5{-Mi#KF#MJ@hpAl4jvNfS)&;)jZpErtW z`db^D(rE^EU+GMMCa8D6;I#;5O0UgPk$$owrCQc8H%EwM0Ab_LZsV=LRRYwZc6|RX zj;huA#M^7SyY}-2cozMBcUs_dfUm(lLBqxt${>8@MM_Q{>!SaC#4sf}dCd~kndjg@ zgO~?vgOsrFk9~BHpdeI}Z`snB`f3zROkumPqN1bQ9SMh6Ks8@NW{{=M2sD2L;Rzwm z?>ZD06@dsvTpe4(OyWDJ#%uRaLxbqBhEAnlp3vq{aEdAV`oa-?3FF1PU0+>Z{#GZ{ zNGvKQX5dIgO6sNa5EM#vSyx+--vN_aPw?QK%Ff_|FL!rQqU9&e8dWs>nK)3Uzy;hD-W1d)GyT$NFp*o51^kV9u(}Kj!-I+5 z|C}5wN?61omt>v^vPzfHz`%~~_0yn#{uBe_+&J*V8Cf0xK+#K<`}+FE^qEO40OS(E z;=mMt{;5tP(q@5+)%9%N$NDY6`g^TldNvw-9_+St%0E&-3o^e^mBcrA(A?~&U#O9Q z4f>+pE&*W+H0>(z`@`S$Di4cU6TOdg`4SYQ7dGAxKwtmolY4CmwAmi#-r@*om`mTo zMbjo&*?Yf9HqOl@mTo5(lN4kU0~gw;tUF>`9{3C>$Dp}&Z?L;Ngmq3B^wAo)@lCEr z=e44`=v)v?bfaEPr8@nx3QQ3UC}QsO)sXjDThsr*f6pSjSw>%9AMC~a@L5sbov%tw zo54qmtKiedY;ASZswc#q6Ew2xu!$iD?tmuyF==ya^@*O}?0|Pc@CXRN{~s6-4b7jw zeEE{@Lxl$GV{K^TuXnl#XiH6K-TplYTXE}{aA8{Dde?Xj$a+7|aE|IFy0l`)NMFgTB%m?^Pjq#Q57krZj!r=9H z;2@|t&~1T~4BjO8e?-jY=5`AETx>Wv9X$3f_!$Vo4qSYhpuL+yu6|3wS@r#@IkSMQ`?icLEWAz_;f(b!Z(9x(K>Yfm?wa ziU{^$;$2!^R*H$!1l$kKc*#AA0=}^^0|(GU&RoGx44O)Q0N3OdoJ_Y`cCrt+3{mE_ zIV8VI`C;u~hWq#L-?3hn8nfEcRHPg)%kBoeQ*&J0#&-Nm!m%|6?fCcZ-LrPDO5K)> zVV^#BDZ2(q`a-jBMecoby&T8s%Nb74QU1({E%)Ug#>m#9nlFG^h()*pLF-ot z7uIB0)cl6frmf#PogR4??!S^bEU}&JokFruL zL`F$KZZI&#D;VG*zbPeN2muMKx!E-);apf8alNj-WU0xMZM+#K!@2;j58c#OEgigC8ej|1Md0m zckp^gSYhB#ZU~Y}ui#U*t=WSn129zr4R4&QTaziQeW68cDG(m=0W+6?Emtixwonz! zL=HH2=_6w8P35R}b3jgEHio4TX(hL6hgS>k z?QZ!THk}$b=nbxHx(C4qbjd5i6F`|*m`RS9Mi<%1IE1HKl{mjHp96uY*B{El`)N+F zKraA458yJEL5b{Fi)rm|8zpd{ly>}2tB$*7ko;NI*yL+KMH^HABkp)S1|^Q%uH2_T zGq+q{syXMp*Q$IO`c7_nJHjesnsJ){5Aj&h6xxcxWWV$8gzf*ZSY?m_w3! z*ZRK$8lxGJ*A^hK)=6wGKWW=PF;T8xt+w$B4p4Us__K`j(=K1h(?Nj~kn-k0ZVpI( zhz4j_!y-Fh@W^;LJSDZtK_L=b&Hneaftn*)jfIb3?P0YK0VTA_`L;!xGhha^=&CvI z3*1Q$yngru+g~Kk);its8$p-_V(({a1jMS79rz&+awaXpb>l%{an4~C9~|J$=_?oH z?I6S*>u_neNn^+Vq`+**F~;T(pNR+`Pzg1_%^a;5_W7a!0wot9BUZjF>H>?o-*JgV zTv%8L0<(}if@zcDv7$NkqU3Tefs_v*IjV*#)ZJcViy{4yhp(nQ7gOEYf`6t=##5M} zH}@UeE{1oOf9H$gHsDQW(9D%kWdx%Yna*%t*g_y%fXMbs@3U0wtrTjX?j3&EP^BG>bU_%(FI6+;wpQ6yf#RofcSZqxi?(ZQVf6rv#Gq9*8F-9lUl6XSEJ)Sy{l335NT$qqi4L%bUz2!O;C@U`pf;Mpl=Nm=!s=>J04M>Q>_Yjx@wLj>}57M zQUo0|p#HZ7-=RtZkt5rv&3!-CD& zK2~SO)6+Rur|r_QvgALu35ztra1U_*dH*p3eeGl0JyaXxq638yu$`pe9OR3lUDh2g z5kw`xYzBST%YlfH4GwweO?0dS+3FjVdS2*2>$54ec<6r9s$1eYpCR&JR=M?>q2B)# zZ+d+&?J)G!J^*{8xL!ldv7L|E&(106Y8d*8zh!H$QvFMth6w|{Xe71LUrJU-uzi;I z>71JzXtVJ`GwVxd=sWg*F*7$+ocVPhxHq7Uqy}vRfUM^g447C$dk{$6SGge2$LkZ2 z$ZGs^xUIHE=j&>ep9<&!DYqM{%JNhByMvs;-0>bB9^g2$sj>jHK_J_KzT-rW|AV)JUmHjPfXY)$*{GGs`|ggP^9cxzhV22fys1?RkS#kqG{bRC@3bCx z#%tgi|Ev-b8)<@Zq1jv=VA~8{f=+2!`@Vh6w;MdFapr3vvSQQ0BGp?Oa0>+4KZxP^ zK0mVf`TZWmkn{%u!jHC7{1fMQ_1`7gts4hiCY$AeZoe9hHpX*yLb5S$Y-}C>{T=9juC$fM10IgYurIVo6(XWu8 ze`Bv0B&iAI^nPfwHCdO|!RZ{wu1d2*A>`MOr6#B}gzpNt?uf}6l>VO|8WQbo?8d?Tt_F2Zj2N?OhFx>e`; zSgdL@%7%@0-7WLA-sNBY=nS^EtO3k_Lh!_lUyYuC1YhBJoYiw_FjcmF_xX-f-=9RQ z8pq&{32{&Td$+ix38^-qIrX1scO>$<)fE)%ttPtV{X&D)u!Ax#|36ReSie;%E2)ho zfVlhf{c)jB{_}Y#%;8*jZsGUqN8*3}^8Y_6<$#b>o$Q052*osCPqf-j3IzGeN#n%Xe*Q(N^*_t`i+57{f{57nV4ii z7kWm9kGFSAOG^hj%iURr-;QX+1C;A zilBmsl7b-8N;e7yNQ1PLbl0X!N(7`tx}`+AJCyG3?(XipHvZ3h#{1zN<9@k!oH5S+ zB8q!I&;C8XSZl61=MoeQ;EIZ%prp)`^7ZorDGYcRF=xG2{r2tK{UOg(%s#q;v>d%YwvK-R#-$|{>7yEE~kK#_=u zsKR1FEX5-*Jp2(6(OP~@-z`B!f}3t)v0Z`8*{Dhr+t4e*MHL`c-;5 zx+EbAe)I||Mj8f&j;0dIS=rZdrqw@I2BNIYYrI3(US*CVsjR{8DefgnJOdDz95X9F z|3~ujN7f9tOVgy{tB8U;5B~UvEc!o2CBt4=%(nuClwXja-}P-mAQ+B-W(%Y|U2`C| zc~uRFvg65qQVY~=a`J;FCnup;gU$c#?F`6ZIv212Wi)Klqhw3B0iK1?!H>wuGH?rM za%B<78t`;?hjN}kN{?5T9y9(zTjarRkNCpEg7E!T-~oZG7x+{#_%AD5YJ&<6zkM+n zSJW*~v_l2p5x6om*6b``lz^}N_1iaQr9?PG<1wa-Fag~7RP++887vTCC~8~-SsZJ# zg}56~-PGmE0$E5>axy2U$j(3hrRa`iz8sN36zDlhA$JEjq;gEw#>S?CQ1&*;W5B~# z<-M;$2?D{Zi|6o?;NHDgZXzNgkj;ixJ0}-JH`hckTsz~1iU;tw-)0R6@6w})fVWBp z)SYwZCMVS%J$d2d2N}^qM=XQM^L|h?YqH&e=|4>~AWzpG&g40+UE9zgd|*m|qTf1!y(8Stz)Nf0nV(fhwpKFWfA%g*i?^7|j}wLX4~!bpA+ufjjwqzEB| zb_?*)f?iHf&7RCN2M0vA>rz|>* z#UP|SP+kM6-#DL&wjR%cO=Q^FkAyEKISMpmdv+C#m^V?_eSKhx^Vgk#wi7ClIQ;#Y z$~1c7HNCwb>>l$A0@esv5{r^(EH5axoI&n5t8SGphVZ$n#aeGmjTbJ z!fH*Cm01OzG!P}sQa}1D95YG)C1VIG^IU20;gnCGPM@Mn3HVSN08cNH9$nkm3KdGq z<6?uXR#{Nb&(lV3l>m%7k^$G?`kkbi>FL28{gabx&f86t2++BBMeXbFA%+3S6Q!j% zUsaY$PvN9{DO;Gns~f8?Y~q{o_9As)1>Oc*6A5 z1muS0lp)U&56zfg?^p9aQDl?a#XbM6>>vjwS_xg?Du7OH5!gL%{cB)FUk2EkxDFOjogM`IfUwo*mjY0?r>##Jr zOZUf?)T5~C?ob!dz}SICiLY+0fyDXSZ(MT9jYK<3R6ct@3^DAEx4Ezbq^}hMnz#D% z<7ZKacJXYZiyy5}M_Ro*E$u&0*s73qD@jUWhW92ZWXJ`6m8IaNC{6y{$~rnc9O_A8 z!dht(&sy7QIxe2$U}Mvs8Ol)0z^*OGA`Xiw{mU&9 z%F1o#f&HK`5YH(p$L%>X z!qQ*AzWRo?`hj{nx`#ZnU9oQX-dzh5J{}%80o#*JNMF1NB$@ItZmhC~Z4MV#>-ek6 zbIQ_V(*0Y}4>8UF2 zO*Y0WmRtpA@VIEkAuYDZZCWURORPiR>R0({zR%|d2npWSzD2u}CH z5p~LIc`bZL+rUE$u`~+%;vcGA#b|4%vWeE4WDSjcu~1HZ-@$FCoOvtt`S|}8f+hs!xVBY>IjXI zN}Zi>jT3cWDw~-oi<5ut{evZe>BQEBT`rSjO;f2(38ap4pF^D7;Pb_0=CGf~KZohE-+WFqd9f`36iSf%%bC_?`}mC@fR@gC6fu}0w-8EnwsNl zlss+)!LP6H99zmr1kby#MK;qNk?sKyeNV#aGZS9PX7bVN5yt0>-FWH_B=^Qf z!Tn;v&l_wnFPfIcgWuQ1tnX~lz@_U_C7uB>S8r^+^?`UFOZ3y#BgSl_qfhsbo+4Sr zAPPP86eJmeVJwYN7Ab|SEd6igac0l+;3<=D858<&tWF2xJGMYgbCtK2)OrlQW8!{# zRk*rbX$b!2NXKz)P!fjYxs=K#<+-vF=asyJXM=ps6~=br!duF$*Lh^G9|#uqty(>e zIpp?8>Lqp7XeIes?-$}==y0o`-jDxO24BpkV7i$lCE75}W{7RHTl|~j(GY5m9%Ju~ z@3Q3%Z8HI$ILU{0eJ=I^oLjZa`aLOJ_nif_jtQptFh)4P$c=>BOlRTH%J`phcmO9^ zM|a<+)_Q88V*okMnT8MQ>pG8ub`zp^2q9#+a1l9)W^?OyeljZM* z)cFhMP)B6Nf*wSl@O!u6#k3-(JSk(l5S*mG2fAc~U$BI@hjz4H${9TOs11@C92oo3 zMUkl8mWOdg7|$WOOp9*WsQoqiW^ih-{vON~)@sspac-~M_J8{CMM*ls6u_$ry(U*&U&$&QESX?NPjnP$w>%DT(*!1M`$Ex zppGEbOM@#Xe(|t2r;)=${BN$czUUBy;N9oer$+H@EF7O{cJ`64^X(P}qRItDW$Qk9 zhOIW?n*##<%y=66{E{g>H_b7V6DY!r4VD;NrTV@6^XmJPYyVPE2>b1D824P+8yvyA z-KR7BFr4T^>WA$m8&)+`CF$q-#B`eH(c3Qsqiv>1u+dp_7Fr%3qPJQVJe{YO{sz^V#((hj%=3`MF!@&U7kiA+oGL-Wn-{ z+hFTW{~6d0!4@ZDTMmg(l-F-2uP$<&q%rp7ep?6wIYHo4G&YA3;L(gHoFOLYfRFLy zD_6kfW*PrG=P4b%uKfD1*D1ZY3?uJLegx$(8uQ zh@SIYL~>?hU{~^is_+2yhM|Qh)?_o#M3;(&rVm0y15t1z?|HFJ8+bw0gq-ZHP->7*gVs0W zV~{8n1X;93pe3OzBz*KpuSRo9JDT0hl^z(pfuHm8@`lVaZmbhAYN9JQwzU;dz4W{n ze$LI#UR>YCWwoliByM=yG6uNq8pT+I>=K$CkkX{r7cu*(q3LH=E`sU%GFn1K<$Ty63q*-Xn#B|ss0UT9 zSwM;S-h&5)VEI$32z612gO}zF-(n<59ksDdgvES@5+@WR2Kk`8UVi}_b*>!)h%(dq zw(Y^JLQGg#qwnu@ozI(b^yvQ2YjlupCUkAmaD&J&&ODYXlT9;MtLpLETgVRXXf=XPf+`V=ME(>4YKPQo=eKt_{C$cIqB=;0Tw8)WJ0+}M zL%}#ALNSdEBNN3dB|=M6Gh53*>V|gCa8-R8eJYGj?ro38-IuG|VCdrjG)zBEJ6l^0 z<-1KSclisA$C^W;nXLV5_Lh1YV%iM2Q00Y$ZYrsiBJ|v!S@C8gLZ2-;IEQ3opaxPE zJuJi;X@-d1**GXNlFMYg9I0f*#P9t0T_uaHj0OU6K6|*a@E00yq4@6T-~gBiSpFtEW|}tAKthEykBWtkhvCt4 zwa-DW^Mqg%KsRuBqQyk8jq^S8W65_qV2%R84q%l+P!}_YYEqS$z`I+MU=))s_u)e)y$SGEXHN~Wg%hzcs8>D4hk?=ukod^%t*8HJ%d7kTjQGB3vKc?c z(IKL^0O?-icmK4q)hvz`9N83z>V;XT%Wi?lMVsW^2=aNPhkCc|IOj1%sQtP}Il1r* zK?520Fr)~xO+iC>^wex@p~*o%-p>)3lp|G)e;EGGM-Z;&m`?r0rIGC|j+pG3MXGuo zwujSUONimN?e10uitLeCgf=EIw+$S=A?L$T5yQ`Gg%!HIJ=a2a))%oKOyaQlC(@)+ zZ(kgWv&2k#7Rqa+I9}(=FgPa019dO@(_wlL zfBayfQDl;E7E<7R>s9CW*D(uU(2-NLF)=b8e#ACVsF8c}&-x%_;uBrA?1!tXA6qHw zM3AzUNGQ5Tzi0)HbN+*ZT4p)jj?h04cF_TqH*BmA;1d{ZGMm6LPPh|;0>!*HnyGrv zdvAi(?MuMsh2KVwWM|V;vaps|U+r&vGi&@DJTxi{-dX12z)Azy#!8C`qGu{Uti3)c zeZ#g_1?))JvcsQFLUI7qq7ZuJW;U0n?jXE|dINY9Z2+lJR7mJWg>WJpHw0XMqG-Ju zSg9P*;#mVdl)ez(`?WFUnj{D=>i{-@h=+0u_MGFgZXM_y+PD9L9-)ATW^1B8AmA<< z8d?Zsmq>~j;8a7UmT8ak#L>H!5C9CGfK~^_=eqB2!?dP9V*6~j)6MQ=R=KixJg#g9 zQIo-_o0>@$JXP?DC-J=+HrD$XFz z0rSQ`QWuPsV-;Bzh8QEKPya#7Q(a=PQW5GD-{KV2o4FSkkiw9HY#71C&rm z$A}oGW4$eWrOrM(Sn42WeE3`qCFZ1`0G`n#jJe^E)YY=LK$)~wOS#FFgD7kC#Wc9J zGU!?35K57E4Oh~kW(LZ21Iu>{cqRsfIiuT?6lny#Sv=Ih%wdV4UF|t=Ocpxh1>lPF z7D524LQx3PU{_4H%NG>Neh#c0Ksk$n;V`8PmK`Xqt)eG#X)FZWmKMvY?e90&66J5Yr2J@{LWet&IWd2CC^1AZ@xMRtLMf={OyEU>{^; z{CcT*JR>mqY6s$&dVdlVDw(N` zjVxfa8NzH)H*u%1KHghtU>rt-Q67r}o9%d7fin`GP}c`#02!>T3IzrD@@O4zt$w5$ zKOW>rz=0Ix-jlcGyWPUmAit@+?H&MyKwe0b5#d$^$t~2u`hVST!Yp64PgKqrYRNtR$RR3UU6lD^LVQMbYN&04Lu2PEM_7gNbh2&AMZ8?+m@2 zbSos!G(;hfCbH^wFZdC@;mo8CZU#zbUeD^Srxf?${0Q63U+14ry&*fU+`0TVq>0Et zX~1W|?)0YDAOeldH8y5QDx+bUeIyw>Yi%wI^CQ#NhqkeW-osQddEsP?O$wK;W5ZJy zpZ1C+lso11#YJZ=aS&iP#sq3IN3D#uYhYUcC@Wd5eD19Igfnyc{G8)KReG;Kf?_9z{r!A zS7c)MnPB%3;I`gXIRI;0|`uu6ca~$-zQ;n z7`V0kO}AoEBkHN;iteh^b0v*JvL%N^0RFqKG`Fx|z#IWF5(F#&k=O6;!O!mQ?q9{p zTm6SX+*c%Gp9GqJ_@)V6jqO)J36t;S$@1Hb_6q4n>>m}t) zu(rGg7X|5I^EurW$edhCiPfeo0WpA}rYLI@%~WzP;88|k*D0356g}atH;@^<Rfc1Pm3ko6PG2G#%Gwb1JKZiw@y+dPT(hGac6d9PA ze<+s&QXCm0-lmwRjcIT2KFJ(O)9&X62b-aRI!eO_OY|?SR^?0MdX`>`O@m0^kLH~L zX`lv2IGACa@H+ueOs2XGa_XE$g{omtG$$n{&d$$AV6D}Loh1(euGD*zdF}`BodW~a zohLX)Hba?DuWQ0ls8OX=popv<9TP&pEt=K`t3hr#-s=7>RH7{-$e+@Ox*(pR>*GVv zl9r03DfG|yixxC#qiS8l&N(k}ZBhc^z|aveyR%2%17lWEaTv~j*ny+Z$`fNU+g%8^ zk;hMgqLHQL9(*fsgar`EnCHg|!2nFp@+H#{A{u{ZPWHjQ8hyf^ zh1(9E=p&rX3K#DE#PlQ6Ue06P2`J|`zf590w)aU{=G*Vc?Ok-caM`4jJ~X0Z37d|U z&e>L<5K5BLTk@QDr;pn6I%A=%Q!LNhAB4DJcL$sKJvfKm=Z;_+Y5!SI;-{cplO|Kw zl12M9>WmzmTiYtNS=IwCo+|YSOLlt?92a_9O$$)C!%`2dR{+e%H6=YcXDj7)o+JIB zULcWV_SKWeA{Vtq*K&6yzv#STb5CtLY`dvDb^d#(Hz$I2&KFfN#5s$N+N9R%$oH2%JErh-zELALHB9EyJ%ZNZdOry`k6G{)6x_Ey;NJOLz z;N5y97|Ab*vQ)H8GvwB-L@S=H9vPV7zXUI~w5z&*T`SDa=jk{(hx028jKpnLBcvo| zb_OPJuLmj%3a&vu=GjYY?`An&F@j>cobsl_Qt^ zc|`Z_Z@+@3BE+ADN_*LUzEvJY>AfvJ9y=1Ts-RuVV-L*&v=gK5RWH3GdFj90LNqD{ zZ*`s!+IK%o_Ptp0@{+j^j{}}563^_TJu`2Mxl0vtpjn13#0|T4&q3V)U2-sX%TKA~ zR4LCN4@YihjHaunIzZeKSE>SnI@NYpqa$k_&JZ+b-f1S;e<0cFU=Efzhos-KzK#}e z=*Ae#kPQwRrzB80FX`S{713F04$pF?wJg@?qjfa>+mA;Wh+5zNCHa+iZ+*zmY1N{6 z>fz=;B1aaUv|f~4Rks)QF+(12k$ji)aP5BZ!3|N2)8k*gOW!V~MbAzWE<2ALbK_ryUX-siMD|OA z-I%?;MKt_o37qG4P$HJqlP@)Y;I6-P&~Bx>)Pw^nT8!@eoNp|3jVZ!{YdGSkkKQHn zu&WL<#n@mgv_Z9~f}XcFeyMP^=Uz$FueH59NG4Hhni;vXw6-&O`WNgHrYYxfSAyGC z8c)u9rn9=!ZLPIEZGFCd)3hv&`$REM%$@7XsI(;iC`0fztJ**V8I{$!=^o?O2jw1z zyCJ;q@r1a+s4DMu3I*8&&MR1Oo=jzQ6CusCOUWz0#R;V>a=(~_&@&GeCOdx`h)(8v z`5Pd7yG zVgGCG@%Xup-z<67(#Z*7H^<`+TX}2E$EE|YaTv(*uG>GOqr=T2h5#U`R|QH{hYgxb z1ja~Q&R9R#20QWlQFBAYmweZqual;ys&ra1hhDUU;_|c2;S(uITvS%lHRBOUw&d~| zWHBp6KiHW>Ya@AX-K!v1GwGMYGkitb5@VA))$%#BUkzf4`M_Z&dnS{%w%chhL`|yY zQ7MV(E<~!EGhe3rc=E?13-B4TgIX0*srPqnrnhiC_bLJqoJ1FcuP!axMmX5fV`*`g zc3Ye^_gG*r$(Hn*EU8e;(Do%V95M1G{BHB93>qHHcWmYN8T(G)3}1P@DG}W+NLEP~ zIpt?mcMEf@G2_FCcx+f2n|y)ZQebE!o=jr!vozDzb!l!oEmhI}Y*L(oM2D4{@{PY<5IHBub7nXBl!b@z~GbHrPfW2et==zYy@j<;SkmHZz=*4K#(EEzSTc~c?0 z=snRpb84wONe5(NvpqR6XaCU60Gsb2ERJ)7CL*R+nq$b7diDe~ze)?`q!+f$ zxUt`&(WeYWL1BMTrt-qxtBEZqCqF~)fF~rZ@B^QG_#9q9+O%c#a(g$9HVTUARRZF} zdl)s{H*;^MtJ1}q#w%!)8V+A?$`=h7>cYNGb#1FQ==Nq7@S@b~vtfF|I8Jx6rqI*i>Ab(* zN}rWpA86gFRWi(SU1G`X8W4RwmY?Fg5l>~n~t-y17Yg)}E4@Iikj z`!q&358uG<_pRgl2&~@fJ4WhMbe}jCT?H3D8Qe&nwAQkdsKos~6fMt(9imtCX*ko9 z?^)T>_Rmm2C?CFSCDTXRW0c42LEksig#+hE=loCgCR#1tg{Ir;6-muiWBAXE8h0A%EP4-^31Jw8^nt?l4F`NoSA2DXkM zh9T8nDFB-?KuSPp9tEQEL^SNs*jPR&`SO3|kC!hpj^5KkMX?0b`a#CSe}s_9taQ$~ z3-EnYaom$BHB9B?aXMvJ-rU}vRt1aS{?~J@-q4gM6Npt$o@n&`lKf6A7zO^>{{EEi zU^)*FWz)mUwrs_`FaY`BA5q$wuE10Z>N2=;hb*uZLUM(H`Grsi+YAK2&iyvRGb<~R zAt7%~T!0J!=>*Xx>uZ0ClQwZ1o8m6r56|v{^BPDQ8E;%e*~dwoM18=ZB0`KsuXa2D zlSQ~F3A$<}A|m(s%vj0o@k zluGUH?haT-8sVdu337AP02u`HhTGc&FTiyGf4Ar1;sRHe)$bG_5BKu+22fMNx8Y~e z%Y3v6aOEWesP1&?N!dgF&5|86QT_*qA<73}x>#1oat}KvzK(iiM&(ps)Ob zfS?|3@PMPr-S5G{xIivK7ZMg`!uOq3$4Cb5+~(#c`0GbL4Mx&)h-Dd&fXdCAIgDx3 zPjOA4TZEt>y3xvg0s^1(%T{;tZ`LcF_QEH_%a?u;l1cyFN+^&lAbO_(B}>ES_fvfB zRtOCZ?|JZ(o@|z)Ov)SbY6F7}jST#X5QG%M7I6~EOiS5j6W4o4F_K@u7;$PwxSJdF z&Ul9g$G=bdI~$K27d4)Io9~_AF{`I96fg1m-8-n^-{MRLCRnEj(7K)yaF#MS*@*BtQwXTs>^xFZMq z|BMEW(~^`V8`a)7_Mrv9*rdt~BVn%i>aDWBnb|b|`w#O}W!PDJ`E>t>mXcGenO1H( zay_VAC!?2JFmf*W00|8FJ&~^f_dKQlw5ik@zWI3%sm%IC0-Y6SMsFa0|M(iR;bh#n zfZTs^wEufQN*(_#gtR;|u$lw2e z)A*7b$}`B<-m6q&gP}TQTsHA@DW~;=6WrZ5ciyNaG@O->U>T0B1wU=$D(2UssBr@p?(zn z&8LBPcaLN&zGHOWDZcxSxzoh%_a1|!5k7e5OC!?D2cnixR^-D#7-iIHQlzcr=2W)T z?1EJ>k60~|@Ky`3M9w%o_e%onXS(%qdfoy{@`=u-G_9{DDUvbbyN25j zy`D46W3matrMFYg5q8!gzs7KS>DopxL%5KnXT5*7DPgd1AgVK?Y5(kMEPk1=!|Eoz zz<^DG)A8SUwx!}3Jb%YVwr^L9`V2mP86}0c38uyWn@znhir(jD#)>Dm`0||Uw{l7v zl~#?%4-oEm#OBT*d-g`w5Ft@oV%YcC|njg%mWJ10~&OVo9P&)W-8a zmJ7FiW3bY7U*cW$%wSt9c(gtFP7SE4XVl408^Kof|Mn-PwV1c_F_aoF|Oz*|jwDXHSfc)XD8!$e8WS zu0@vYy6Fx!u9$65R=`JKo(ubuG-P~FC(xCz0PxC;@PSHen^e= zW1Q%8cTEpYJAQ+f^SG8y z76?#}vlGX&cRkJ_Q7ZOnbs-yopPH@{oa0K&8uhwD&j3cw*>>Jg4B*)4U zpAkEL;?6HGY_opP!pxPe%(_N{kA9wKE&#V$`meq4XT`&{f%k=mO2V=4@NGHi3DZWY zn?4zNT-SS|(z$2^YkJYVDODz9zn^qg`5l@(lhaG<3msn8k(ggXd1B`s+>M8)PF1cQ z=cDWgRSM`_cNEq4@6!Fy7}-B$QqC+6w^(b%)0Q*9J^Vg&b#>5$bY-gX?G@y-{I^}{ z$2Z&jY3H#yL|6-w_tW$lskda5R?_A^B`9d_K9nZ*eyPA27_7=8hzE<^=UZGaKSzI= zaT~#3^GxW&wwUp135I(^W*d`6u@u-oP^Y-J@D*gM!pT)w)rhoE_&N2nCSzL_% z9_SdZ<2|jkh26w6GP3Ep-LtE2ZUt&m^a@v+KpRVdp)TWz{yP^5NbEwIAgHuEI_c_&00K%za}W^YC@kZ#@xo z+1QiSuz%s?VW3l~+jF+(z0xp9!_v;yz^mFX=sobE>y5)sJ+Twwv&DjQGh8LJX~-)@ zNe+ws2<#(JP_Sx?Y<@Gw6p?NrvX-4|f@p>@Vw;4PVSCZup;Du8QnSg!w=GP%9E!Nk zJ}h`k+=IhP#gH#ou;pye*p+oxrzl%&gpi*lHUkZvkUv-|V$8|+M>lt)kCJ6ZAGsEM z-7*|&h>!1U9dJg#1QFx?^^MqIT4Sfs@P`>OUrM~f0ZrDh!)q9AyPI^EjjHt<1}->| z&trDF%cEd;zrMuRutc?fS>Zqm}$ab{Y9D;TPoXN8c4 z+$cR?JEs%2SJMkroT9WVlle}On+&_FKw1Qm_z`6>6uK?)!(J6AE1XoCk^Z$@k$16aFj0XLz5?QCo+G2t(~{*vwQ3^ECfk-Kc0RQP6Ol;MY#%6qY_f*+hzgS=n%?i=9j>WuhN1j;I_ zYG<_W@|7DvqpxxL{)@>P5(Mp$g9#hqCeN)IjEdlD1qK!aRJa zb)YT*L!muQ3(t4445BF0Cl5ao!b7WmAdaQ#tiRGotwBmUt0E%=w?&a6dt!w@cl263 zRbZ((nQfWgCz5KsB0jW*%VvSky8UU0 z=3Z%Q5vnKp!+hF{6G_Afgkg0}WyA#j&WJ;ug}}v9ALHxgZrJO7 z2_V~)bM4rm$ui$9XS688VV`UUJHJ??lIjE6@bUWwj& zc^hMNlhpOsA%8;qMx=_l9o^XMz(rbI!>4x(YzGq8d0DJN_bTq28Qq59nz(cr86FR2 z8q~76)b!{18In1iy+h-RrS5)^3@Rf|?2}g`-Z}gn)lsts_?npUTwtD#bRYFn029M`nNkWaJ%xJ5+LdqB4@~b!K znc5(*fMagAcTm~k-dGxSd}N*+tY4m(5i+SpU+xhrf*h9{)qd^g%VYHAvI@u@-}-YV zX!M~AJH)_ytYfMSeMWR;#c8muRPOnDl4W#oc?(gL}|G{jRzak6oh4b=SCA1QXplX};5 z|DiY~n=h{N7G+&`4x%a>RP1EK7&7J;F7y6g8U49n1%VlbH z`Lp1^i0AU>fZ7Cew8;m?py@h4J^C;8^7;PS2(I(&-aBxdT>*oEf8u4ZKkoi5LsK$= zobo6r+!D}hGHVSS60$y)kQEnFx}(!?j1(BWV_3a*`I^HbL8(p!SiBiJt$`DR&1AI; zG^BC+^SF|c>rMC^7=qou&kQdr^UzvXZ@s2{o$~xa!U8jfqhW9pCM{&ru3JN z(vt8pLsZJuXRZ5y2L=2ZB$o)_*meg4h(e$r`!&#g0Obfew*s|d|5LVeWG8X-UpvI@ zrS8n&zb!Fm6VGL5qpH5!9)Idznszv2*|!5zDCkG?HbJlJrw7l;CElRW+hQVp{?NGDEcgj=Id0j_FHN_)P79>?Ktp4` zV>?eQA|#h9VF|2;uZ>?r`P>4A2K3NscC6TJ2HY-|_&~`|O!DA9ar*6*uAHQzl>6_> z6;#6Ew%%46OV6jmfG+vW)cZzq+iru0QxZEFa_9tSXXj1-&{WiGHDceq%kOk?cHn%z z6RVi94;Bu)!I?ygmCxh2+Sr5;5)m%L=7+6A!fZ=4pnrr`C`d~-0@hj)39#aMyruah zL$>Tw-7PCId|YgH-qlnvSHL8=H4*pP>7v3;TKi9Ot8^-d$8IoKwZQDAp{8!;_&1+> z?|^(w0el%!Vx8<}>M{Ah{)Ki<*>N|o#o~9aiHrG6frXS&GdHpopI=@V_+Ns)$^!U^ zK=QCugwsF@+H>|-o^H1wZ6l1w*J5;ucRpUg+x2HO07l{t1_sRAHvKH0$&g03$MU@uDQ5)u(??kom9Z?Ml0c%YQ{2|wZxr76292{ zwZ_VFGXAeADGR_Nfwq$r))FWvh49p=Lm5pPuPdrWBD$iRDwysY~#G0ZC!uo2iMPtGmW13 zDwEL@drM1LmOyl6)M+O~c2|d2zyw3pDK-H?*=3*alzv_e$~y#jPjwnN4Gi0<5Thn0RM+E>mqXFagX#=41_XUH4*$5_3&W|F8sgL^Oby@(vq9oiVLjx` zbdwYa@lbOi;zayHj)^>^Wj>W>`y*f{0wM&l495pmznvoHue{Wwr3K((f+x zjXo@@Iul)o^hp0pZ4fjTZW9&}A$Ko76!8y!p;6AWvPqsi7i_IfVRW)_C*>w72=_#h z+W5=0`S`jQrB3RvUp6zoZ25|Gq_b^!0(ezFL%7JwS}&}RQ!d)b+@aemRHp1~n^>z@ zQ9iQ-*kD5!8t^U|t#alg)$F2KE>cOSOgNysFzI)726ZoVd)QtpngCUDGZFH%`&=e4 z-E zCA*+`DAde?ohoS(8gASDR>AZ}Z2oYP?PeWg5im8~1Y=;NdE4cfRcDZHkr#lv-)yEH zx}0VcN1t=-9HCSl?`#(Qs@)vusO;cYPM3Jb3S4K+#=FQl^t=O-%b>^_)T~|n(5Y6G z*P1#y;t%=GyXRWYO05Rj#d{Pb?||ZrO;$v#lbFXo6{ubljt9!1J^uHcsvY5)sbJ>( zjesq)wA7AYzB6>Aw`SD=d*8i&4FG1p7<&S#;Q zJ<=3R|8y`Ra$ejIe>4x;agvc;EkVYSD8F=&o=^j-&e35r{q5nDHnn%ya?M};2k=;6=R`GI;n zI)(gL6!v;>4B`VyK7XU9^jimZt>V!s_L>Qj${t|bBToZ!&ub`~XzMXHP>qUs?!;}h zf|%_I2TQLH?|7qyMtDTSp2vgP4fuIgf1{xc#WN%cs+ej5oTu}`&taU^WGBGq9Ue>^ z+A%X>OQnvDNcypJ3QNdR{_NYGgMJ@{7L2JQo~vIa)ok}sH}S^^;v!tIwI6*sysigi zU+86Vc?W!ZCE^Ocw{O(Iq+2Q$Tkq-jIsjv=Z^72x<`_s;yvYQw>clZq8`?`{>$%zO zmXZrVVaaUHRAibeet+Mlc@!!}Z!kvj8vrmYzTKgGAxM#C`WS1c?5$kk4plfyRspEE zY?;n@xYa;HDH681l2uj7S>7?B`iRt;y?y_H>Pa*8xVyp;qLT2 zUCdLlsIb`Bi7W$8Ukes+P>b~4VLppl4S*`K*{2F3i+geHklrRQb`pkIZcAJ*yyegY zurB0zo5pzK*@1YZvv7HelmTrMn{8Pyw^;pgfLk@J7e&5If6$Buz@Z0t=PABWzEusjJC^dp$fCqh)g zcdpbd=Iv0u^~L!a>_srtD34J`8XoSBEahfhEUMrzo``V_uuMk+_72Hg@2M^W#^JAwNn4sIs0nA%fv&!DP@(*M zw@8%R@no$C%Ev(D?$S^^v1uamYlS~DzC98^hrSc1(8F^M3#~dMdV(!lI~o$|TMvtA zb$1J)oZ)Kuzaho!WN{s$Bo`$FZ`JeA6v7V+a;>I;dW^t z{}BXZ!9RW=?F&(i!)>}(_L#I=AG_1qz2RA;axSScyXh9Qt-9+SahDM^)fFlX@E?tr z0V8i(DnJZXO?a-^RZ!%bW6TW#mf1N6Nte~iH{8~#Xuj$CKM=$d?;M^B{s#;m9Tsv<7s~{_5 zR0c>^&FT)a8ZEh6xtNTT{H1+6fXV_Nqjh9f0+7+tfRpyk15$eygH-*! z*;&Xw@tpnPyUcKDl<&dQbf1DaFCSL4G>F~m#Jm=2c*nR|$zs{q) z!0u$B40?~PJDwhzffpf4(YzI14ANa6^jdJ%>Mk%&!bIo+8)XoeU)mo&K(W>=H~&nZ zZ^zi&EXwI#E{wM>6AZ|f$9>70qZP^y+$AP34p6Na++2V%#O>oX0Q5~+jlMS)LO#qe z$`zb!4{(U26UB^59mST07ObY}oI6mK$R@2>N_2uUlFjT+AwXRPz88kWi9r({k9J4& z9@2jQDPVy_MM>ym`k=7}I2q1CvvK89{p+NfaI8Ya76LQlz#PzM+9I;? zCcgo|*fy-D5GB*cFYK>wN+9BK7P_2S_cs$RcGsA_Gb|3QwgA{un6E2SW>;Nc-)45e<4*hma!o4k-Q&)`)lN3r)7R3-&*28gOQg9PmHigI&+#jA^KOHwyst zL+9Kg0bE0pn)y{g3T|1+n=vi)_6LTTi?g*GC9ZW1epyu}*d2jJb|?XFUcLGQ;U+q% zu!?jGkq^z!x?C`5hU5|X01xI=@=DtNMS?Rmt}z=EqU-V>dw0&td@(SGqhx@aQlwsU z0~`}gpU@i3_Aw^P64=>T(^q|eg`Qe$+Z}@vapKYK(yCWf3|LRuZRIEO3!^b;Yr**T zhnB?Kk0x@~0WqW}wA+Eb5HK&;mm}JqojFzgbll)*ex5e`0r0-=+o7Iho2fZvuszhC zBhtuM8lnSXE+8igM@iKDl5T#3$$rvWwusw+R%P1_i+5 zEZ=}F3uW{NL4nOMD8|4R6Q5Og^E^@AykWYVH;g-7-R*{|^VuG#axRxOPj_RU?27#% zw-6pwx^Pu{#9;a{A3=pM^zq7TbYyjub?~UbwWDnvS-}g2Mn8g3H%9zn(B+JUD1!L_ z!w6@00YIp7#V@B56-u(}o6AdRk~ed~hDF?W?2VEz%wdyY&G0cdRe!eYTh_3eS$Mz7|RNC5`pdVzSQ-mHJ~gWKeVI9DizX!a#dF0W~7JaRq;WgSn)Sx>d{hPZ0rlrWz^(qYokT!uXfcR*Bl`Th427|rW@p)Z*H2~PqdWZZ!TMV zEW84>p%e^lE;?#jW&z#qbm_bh>QciRwFRiML_)&(01xlZ;(iL-TOK-V+RXKigkvoD zEA*Cx-!&6qpcFr`el(+3=Z)(g5Y}}zhn3j=_0bsLe5{ge=Eoxt9fRZVXC+ww$pZLi z`vz-_5>;c)ode%|)9CK8Cfgj%IfmR|tdg_zrO&?ckvprupqNcJo^Yj)(Ve(AZo;_J zgn^T}0jl-ZW!dvi>e`VXy>2*vN40XAKx_p&Q?d2bzfYvVC(v#&oB-b|p~AF#Z7?)^!HlM_w}`dIn? z!J3YNixPmuZ+&t8EeX|z2y*N{$#}Cl`;gH-F56of0Bvr}`zpMcB~oiz%NmT1-Nn?_ z)}AF`a3Nz;q-X2F^gkL2#<|7cS4%s0nXP|kgeRN?dHU9&ywp!;2^gAZB|B$h|MAnx zO`xO_$Q7u*O_2y$fkoKLG^;P;5~ zWcD5~6^sQQ9#GQ{W#d99{?Xig8=~&ge4`h7f(#`Zslrb&AtvdGmrfA_t2FQqYl7ge zIPjRS_^{rW;3j+n>iLrMS@lO(Klcu9_OZ27D3vrNMforBYuw?Zv5;ZaF!Gz#*X#x0 z*YE7X4H%D$4!Mzo7y;FED2%3&r_~!NSY$)RqmbzAoMC|W%!`FHt?#ic3dAdym0H*} zI4410OX~i5X1j7eq&RSE+jj{Fw-GczpIp=((_h++)t0EVcyr(I3A$Zh#$fPDv1ams_I1- z%NIaztlkR=k-C>}irfKXS6G(^5M4M`J6xn2D@4aP@-}Rh6-_LKne&>&p z&htF?bKlo}jrVok*BeA4ID$b*3Dw&1Y?d~n>YLMItg-I`1Flz z{VZFOEVZ7S%!d>))j^T|{)Ignu-><}$!-m4=i-#QqceB>OK?(r&UV7xVHs5~l94cj zBkDlx+-okom&8bn&1`Zjo7~haZKnNl0_A1Tpgrtwz#~EjZUo@jM5r0F2Rci?ZyI1M zDbAO7KG!Di(i~WXqFpH|srdVJ;RnVZwSKeQR!JkCIa{u#Hb9e1NJuE=GJ1(}^wht2 zreC5$)6c2MjoZUaZTIDXfSsuaWb_h=PhI-jYCb&$8vd?7!zth2pqijA6je<3rLd75UANbu9w^FbZ-uwnAb#1f?B&=c&?{kX?lYPlJz@wWe ze{v;fbVg-26tY%I1-wQ?TSlI@f>>N@!}u=NF)kGAu{Uus>Id-Fh(GQ?5-xu|gQ+@U zb+&pz*dI%`+&(?qolmL9y7|}3yJ@E*AF|vq^{RW7k2Hg14BA)ppV+(Y;VT5SqEL4} zV6S1<2`C(z8pF`wSuZV2t#$p_2GONh;NDHj56l4QXLJ5#NB@cHi?L#vLs{%ZvDhzw z!&6EB@E3deB}@!Z4{iWwbFeeZeR=qhi=QE;tyqHjh_pMF+MorEtvJAV%Y3rCRN-ub z+G&&Cg{(lj5+t8UTmrRU=dVF>Q01v(T>nK=>}|c{857o548@RKi%>QN+qk-s4%KY! zPKx;8Y3@mESf%D;ZMmD%Lgl}P&bVE585(CHT?{6*nhT2>hY7__mvnQ2dpwe9g z(oCb5T9rhFx**#@%sf^jUxJ^$@%=@(-cqX=Bs0oZqE|fdP!vdl7-BmI*liQxZ$y4< z+_>A>SDTZL`RF*{omE!R2_E@c93=(eg-QJ3vC#pT$-;Xc7FReYvB=@`{5NI^rk&fw>#mG5Obj|N7oK14{u z?mEUEtIO5UsvEco)Pj+6fV^HnD|Pna4xwJgWsylRT3DzDikT_0zx~JP*Gjs{y{ms9 z;2wm`1cEdo4umD$rNt`#zZWkUwg%;36s6KR0HmorSk=>dr$5IVyBVyT`+4~`147l#9bWsV{Im;a@c^C5Y1 z1QC4&{-4VWhG@o|RQL(`g~=uXs`a52HKvf3eCgqBiL<$9IOJku5&TJ#hLUSVgs>y)YM>Ml=At=-x9ITf5D?(}JQLR^;t=(pi$05Er;J|E8;kIxB zdV>;J%6Oe2*!V~&Wv$02ac0!N$<;scWdNiI1*KrDVoogM{`W2i`-b7_tQ+t+;xAD^ z4NJA@yu&z&F{}UgM%70#<)&{&)1@-)>Jgg-G@)y~)_nWXb;yKB@d~k5*G8*ZIfZx;}gScAZm) zZy7!1h3w5n8b9G6`r#866S}mYmb+da744MXG}+@REuTp;JuEwH6nZE`!=b1>fbT{9 zyRA;;?p;FWx&qTH>6EH&Lpwe57OohYH7=kp?2A>i2Go#myU=EYWN&YT_0&6I2Dy&) zJWCsKs%(6Zje}n`9FbPsuib?kBiR|A^b2toph5nZ-XUc;aD1@vm&|uLV zf3njQt*G1-iFSzQ&tFMQ;WZf%X5LRzBj$`DwhFpmX5AA=L)adzdonrEh-JLsWvCIbycUoM2a_8Srw|`G2BttQ! z?+tb~xzg=hk?$oAThR)rgu!G?9`S{U{ zNIKUFOz0z`Qs49|_cdnI%?pGwy@C%Xm-_8@-`qTNcbqRI3CF>`Shx5&I?>%)|D`nn{>&z&^(~`&wyRrP01wJwAo+_SaSAKn$=`z%~slo~b!N&+A=bDf>QEBFqdf0b>MAkuez-flcyIwKoP z8Sh`{`F&uK)STe6rKj1|`~;uzg)1|jQpCHQmdZfez*cGLHWl_><9N-YC+cOEJnv?DK? z>cJWhq{T7_v+X;B8FV1Sn~~o=#Zhy5{l1RAp+*riL)S7t(+VBG2J!<}@o1T!T#LUo z*ZTE-Yyl6SLs|X{;Ua8J_C%qmX+JJ`DHxjap877A6nXL0l5&aN)%7C`N6V$?gw^aQ z|Gufis_jc@1#on+klO~)`lE|=U5zbcJ{m4TP1ZXPfdKzf1RQKP1-VlFhczo+@H?K# z6BnC#LGDV-%x>_b@Og*Z;7Ao#-UJJl1Mm-PJjl^n>IjYcCN(%1?d=Qf0|{lvHa{MOZ56}f9|C73sB;j2^dC{$af{M$J{8%MqfTV!InU}zWzYPAoEYS~bDQ=( zFj(#~O}h|QPlo>OoHoi_zQ3Vl-)}#I(9h47h$*1DTJrK?l<9T=*@47}c7^*;_M+*P z{T{(o!5G2X!aPoqD0(r+Mw_?7G97C8&X2w#&w+~0*qL8i9)h*Uc92)hJl^>Vdiz_X z-zzIY@f%2%cm6?708)L8Guv`|vcBZhJGn z2ihY?6{8cb6|2A6>}xt#CxLy^GD1YqvTQ6Jm#A%!?rhqZ!t8dv1fAQPHbW&Hm_rwN zEU*ElaAB!`v!LqDvn$2Aw+2(mfY!CD#HH+PjUf3yEowZT4z1##r5(^82)sE*F|s>* zG&C#iJQX*#lC|OHA$2SuF%YXZ3TkWN`}Knt!H2tdpPf@e=TbR^mp(>^MR<@`W_ zVdy&|DYaf38+7Iz_sjeK$oVJqOIKshOW(uI%B5^5Vi+DChD#+z_Ugw(7<)2#wc_In zTQ+tHrMef;tWcCjPB8~#CCqA2$R^GEPDZl$;Rzb z=q(Y){*$GJET-xIWdC9XRZIBjy7*_6g{u1(Se!Zr3a-a^27KXu=) zxbWMJ7G-JyywhAYDiB0rgLcZ#4nrV_DPRS&z{=PVedOcG!Gz^j3zr-yt_JB+OV; zc)felvvXG2Fl0eDme2f{l)m71o5R?Xy*bMEmEn%EnIE1K)4dPTwp09_TBN9IfkcfW z>Qjfjn$2wucYZPmOx*vGbcL6JzVT;Dz4DtHoUc~LYu=YjNJevZW21zJndLAsagLJQ z;qOquZK9La#*L?3=+?|BQQwIt-=qQG>wC2$>>ph;R_j~#((kvHm&84yH#kxmoWInX zi=U4B+8eHR)RXs=$PicU=5-B5kL^~|SYns3`MOo!XR&5w{Jd=i{sq5eUL}Cr_El4l zIe(24*xR0$5jYa;t={cvENEeWHfZmDUTR>nCwn^Nj_T!NoCEKp2H{oA4Bw^kXG*tM zKMGj)5=A)ot$z+ieUix@Ks}O@gV#H5ppN=l3&a!Dl`#b~xlLkrY2CRb6j8?t> z7vIpgc@U7Df%8o6tX1PvQc3eK3NvAE9}N7^A!-PZAwe;DNm7WC zLzk|GP-cf2E_rL~Xz?v2=~orYqGcAw&ll#7Tcf_5$$SzF7d~)28V_Z@ee%==CM`T?K8BiNyy3)@UW9z{933OUGzDyQb<)tixEC_bnNg&rbh&}qXAU!-oL*?oJb<_ z&Q{}CYhCw)B5_C7&f-9=1hsUA(jUI4m4SuUnF{Wx$E9kdvqMRaxN0=XJDZ92PxB_l zHBu|Z4hk&mjqP3ab_2S#wz;1Z$<_IdzaVJaLNR>_O=XozON_FU7IalL73Cjoj^c&< zK6n)9+C{o-vY8cWi;bnR1P!+%dV0ZUaXA+g7scEcs|y+{a75Luv8kY9>_4Kqs_5s4d&e*=qYecm-?sZF%0a3HDN> zr%iqzxsBNxAz#BeQz9} z3faz*v7)W;hO!fqgX%Duq>I21!&TK=URH8v@#{{MBfsZ%Q&cp>$Itm_^9FENOA<2! zW=ao?F(K$yx56Y<_+nTH_WhC5cry@aq$~n^_mph z{J6-uHKQep(M^tccOpdMx0wU9|AY?|^=WfPHcdgdY@>q5i$9QONpeR+zd2^Z-P9oi zfx3kDfxA}pD6ddoI369**Z1gk{kwhb@rf%dRAn5>0?#^r##HXt9NvTuTZYndxUo{* zgX||%!-;>0d+*eS${QFbBCQhN>3dhy(eZF<>YPC_A<9STfuZPI_QXSN7BOf2XMb+L zxNK$Zsc4(OlW7ZMKuNYscdBG+%Q?zZmocNL-uPUd2%!3;%GUJj7Y}3E%FBT5z#*D+ zt<|;Q-riD06KG@w&0lz;6su_v!69I>QzAAFVdHqj&{}c++&MKhwQ#W|@3fz?Ce9)x zF2);EyRAPJ2Oz z={x__fp$yCHlu}zmKGOQD|x}(;xLoZ0_}U_(YFVSUr~~wq@Sw0K*9iI$or9+`h~I> zZZEfA74r2K|1B{n2|M|sW~~f{nb!3xV)@{poWnO1cr3T(yI)fo za9?;Yf7Q-Qku@ptb8PG-qcq)|v@vvMH=DUD?lGUqF9})$Q-p7*vtJgxQ{b#c6x4A* zL2kVz8H#`_5g&7_>VuFTo`8POm&d@!^hMY-CTc&uIx?a%sw6=@t}y75(E9F7f-yE) zk@cmtRIwV{xuLK9=KcNs%^uRPbaO)9ya9p+zL*v~{)@;=`uy=BBwPMA5$ryN?$mH& zg@wB6UT?hr3o~f*lU~*l4`oBxt14m!G;p>h2VpssMxM$rwJa?AVLAzJ?hiO4p$Pe1 zg&u3zIU1VIrNY80b$vJ+4>EM}g*Uwi^bs{HqMgEV&`5H#tAAngE#JS}NHnkwYn{HM zzhCKCb&XlW63U`p`P^JRd+yxbzo+(% z%`RqZlkFga8s?_)>4+{x@+Si-s*8Kq)ck>j>hpIQEzFRswi(n6*Y9EZ)( z(IdtxQMJYCSJPv?J#MA@t5D`w36&Ng_<=*0P+s~iPM;u`b)K5KV80>n-ud&dhG;rO zOOy4tDStwDxo^NHs28V_*2O%JoXSN>&-dj`K>J45&DpiN<=W%DoQH7l3BBW69;N%% zK(-C{@&`pOd*=aBJk+T{9g8)K7mL9iIpFo+K3}An z^;sM__k|L79Iz+`))RyT$63$gc;(tKqj*5WkrJm=8B^X3Tb(w_=ZM@vwogk_dc{GBBZY8qNs5|(iy z4<~Kd1gbXr~($8(`9gO)stikp2PwT5-602 zGL%kYhP%4@=_pm>SbViB-JGE^66u$f!i02gATJxr?p**1C+JQAxlwKnB6yTnHNVHf z4p1IZOY;mLuaB${-5u_#8adYcu@B6u>N&d51;i;Y25RaAh(&=8^Yg=`ujc*I8}Y21 zJGo=y&+@eDlKfsrqAT~tXbY~2mfIn`5lXM?KXseE3^>tm{&jKD92yKSb}amb{&i4U zMf3V)uL2_TlH5+?0cdFdDkj~~8C2F8;mL-z+X?@A(3L{xcv>3%qXHf8B#q3cHS_9k zbEt8L#DCfDbWHK0knr+|UzqXpddt&XO7U6r+ksD-hLZ%g=Zh=`wxD1qIXO9(^5adW z%36$YTy{oAAT_eFVJfa2VdZ3H8|FD&m4`R`*@TtD)~8FmwYH#bAr#7@ zf#?KzwBYH^-z>9%S!eWkzLu9m1K5CP&<|AL&J6{3>%NcI^0X!uM7A|rn-2@ei{}9d z1NfSxEIUj$r>(JZj35RWilLDjnrF&RCMgg2FR0DgZrlJ?x^ms~$Q0P0@#LpKL38pC zP_ENrL?_I&F3}de;+`bgG)MgLpm%;|nDdWAt~_U?)!?^>S?aHcF2fOfDyQ*`>}+T{ zybgPXm^p_vMBek7B-_R_PvryoBX#KpAa?1^(wea+7lEePVPQ}CWBNtTk>0hnwKXzg z&1orkbOW;Hd5lKgR~sQe<0^D~bl^+R;MZ|HTE)bfp1k+HEcgJNP-H9bMR zwkDh{fSj8Jzm_58uFw09iIsUS7{#aAf!d9Yuc~o{jZ^E;y;(6H=nQ)2|L(!@h_a#9 z;q(p@=mp!heINp3K%qulpn4gK=I;|ftiN+SGO<0>QI^)nVO9k8=IHP!ayp+>x&bPV zbb*^SPm2K6o}Wpe#F6I_S>K;@;f4vms3TMmVxsxxU=jAW7>|;7M!}~$6<99Cuq=>{ z0jHxoYsb9i=fIK!IoQJkVBGOLWja(?0}f#oG?L|sT7;7fiaT=+Z?6D#8GIlc23^_2 zt-iX_yq&p6LoaU^ud8!-t@>#sO`3Rp~$KrCkG~mX1Wv^4b=rmTnoUt z2m8+v`ZH;?ZqCa(O6@L(87N45*BC^a!>q!E|vpHtGLfiBj}g1)AY9}gDZ z3X%znh=4|5ZVt>EleR*}^9j{&xsdS?qkz>75XzyV=b$m|-A0TVzbVjDb6}HVxx>}U z5BrO?wk}=uJq4xpz;=`sR|h_mFnB8-7SLD|%BHy3*{}F+R^Gh)*SjqKqFq{F&|Xtn zjKc8I;=sCI8x=x>M|0y5H$;l@*(tpX(1qrUI=gF{a=M~|!bI6YSeF7B%`W0EdwJ!V z_vb^c0h~BGE$zqdDzRU!lQeF5uIypU*X6^trMAkUC)d%w_RE8&W8d#WRp!~UhbWYV zceP3;bW8=Oi`Jtw{dweXqUC7&m5+4U)D|28BdtBz$7Y@ohj{K*_)(pq=Do&oKdsx0 z@LM4;BtT!CXGiq+%g22a{^=jm1VrI_`J0)U&CW#g#{^OFOk(T1w$X|g*rq4Tg32hs z$B|oOW@o;`8JU)n0xUP2I;F4rSWqY{rq$mv&V5tRU%M=e2>vxHK}7~HPB+z{!A?DvFu_)q;mQV zpxs%~gi)fzeXGNg`S|&dW+aZUU%RF+u2Z=*Z5iM=B5tqGGQ+FS2>RKq*E{386~&?T zBHs|gaSh5zbbHVaTCj`ioJC2>5{vFuL!B{jnD=DsXlZLVFTM#VebV67kcfYFt$6si z&T{via+R-t;`Z4S9+i3*cmhxW1OqCmigOSx3(RbD{QvZLWY5B1`yvtD9Aq{}*@;Ev z!s-oxlKnoEM8d|MjSto+E_0M*36dm#usQ4GP{yB7J~lq}q9=@Rc(c1E1I+f^QH<-~ zXdc*)=_=v!DccfwP~5<@I;lWJ$uL-G8rE+<3=N%b-^MS{8{Ul;7c1LZzSuGIBir2t zTfr&%1*Y3yuPbrm7QL{ZokQAl;aCyR#N~tf(v9v`;^fZOos#^Pb%@;W?n_)o#} zgm@jRIZkLYg1^5OPxam7YmS)b*o?~81f|&BwYn(yD=$7j50e7o&erX|JtSb%e1X}M z4~42Brgvf+`aHcD5T}18|AbHC7`mX}4f4YC^JZ2`(2MIPoW9HK{uNa(gS3f5Uo^BzCyR}SPUHpo`2>#~;rJ%1`ZlcSYw)w9TZ{_&%K>`;HiaXI zVl2b~$kOR9wi*GwoB>)}zB}n2CF56Wl%$U@g(%_7s0&OrqvydI=y>E1Q3(72TxY+J zbYx{;X>(Q`?JWR}0DO=}^jeTr>(R0TNNXjlc=%6+tE03mv^>_$adh#hB9;(}@Kk0C zm-h39&S^n1SB4^v=wX|C=~KnF{Wvj-9Ea;af%Kq-(LUnME)8fbPUEA9K^MOTW=$h-ZCh>7H?Qij9a1u~IX~#QM(t`#?bK<)#~LQL=Z%(6d|+r~B<$x6 zk&k9pa8QtgV!-;jI3*1=wW-ZgXqZ~Hfb0{=RbV55AwY+7T(CzhaYE8sdQ{Gh^rcUL z*u@|jjWNn7#c95+$zFjs#U+ z7^VXPlqB;i3khwI-^Ea{lEx+KeqE#w6l=^SQaVg7`5W3kW4PowhW@!=#z*VfNQ-D? zRF&5%WY6(1%1^j2eE9ZGGYjhi0q>~-;JTUh+b3~3@}X$V{>8l}KpE^l1W<)ICoY7O zq8LM?h>c*^ziKr6#5=u+E$ix|4pMA1%wJqw{Qg~fi$Kh?&DDT=W3;dF9#kwD8nSpc zZ9#`#)(FJ$M~&M3&$~+-@?4}=;E8F0=G1mxzyZP*4s)8;by}K_mYka1hF7YMvBP#Q z_F=zS5s1f{=27E0Xyg67+YY%8@fqYkxg@4$W@px2rempE8PuXbl8078?>p8>Rp?Bd zzF=59(C<)ZYI&Ai2smqC`4gkBU#rkf2oHY-|08mXwgnJ9q3}efPwJUbr4zz&oFXDx zUaw)nB&%x+Y(oEetg#pYAQw&BBS3lS-5!+-1ky0m8*;2+5WAP?<+Yj>$;ilbS^z!E zq#_{$+&wI;cK0kal1oiVS)=vHA6Nn*Er5s-u%t0>Wa#DI#!s1$(SNK}6}fJ3-qkG6 z`kQp3KBk>)bWs~#6XdT}k>>SX;K&g^R$>h+$#9AP@S(v_8n*<6Cs;>LNa`a)(Y&!$ zC-qtkh)JNipx3eLofKs--r!wBxBo4V$}3WQi_f?(^f~sK@MQqUAk7fYATxQx(i5;L z7KL$Q^Uz_9b-Gh6*OhkDn3#VZfb0H9Rkej@L`7}DB zsAQ>}-tncT7Adll`I)Ek+tHF-7S%8?kf0*KI^Zfjs2ILuz%xU>RyM>yN2k!`3_7XN z7gIfGBg{{IMCQHl*9AKA8OYNj(;@ks86^0K!W*5=8`EOuU`K(48HEu3bITf~(##0F z9mt<dnY!nplLubiIu5QmJOAl?OKa=&ejqX`s_M`~ z7!{#6`nZ~ND_NBdI){t7g{x=YS5oSMNQ%FW3r=AE-_HEVfk zJ@e|Mv8LK$dXb!R%@4Dp>8U9QAZ@lbqF;kac^w$o7eXj22XjKKNi6D(o6b(~I1--B z-2Aswwtp70$+4M`NE`h}0pbtMxJzV8{ zw_JbX!|OZX%z+Tb(+~kWCv@@;M_SDIP~%~$seKbrlEPq?o>>1S0SDv52if9>i;cyx z6rFdHQ)W#poS1Wvw50U+f}5+P$m4t-D1@uyLHRv$I}X3#)}uz)iddzDjz`X523fTF7DtV-bwd-4i1iIks;i)LH{6u zSZPpzqEe%cdKXWfOi`92&Kp=#+zMltPvL=9oR%|FQyo!`qwz{mt+%Rl=0pbZw z{dstx|6|z7$-bp6h)ihNPmz$oG02(!h>a5!6w%qh^ylK0xkE%Yb{hJ z>1LE-tj!;sKI_n@@^tJ3YmWhVWViA%yfSRc?J=|)4>{@ph7|%0##y&uxwK+KVHeL1 z<^friDuvm(tpky*tfY+esL|5gC6y5Q!k2Gcp4 zaQX0vh-r_t%P22(snP3HbwFyjH%|KUQ$_|rLhE9(>q`kuY?gb zCmACY=Vr_wb90vhvgJDMqtvpSBa~iBj7iLJ!ZwcdiDVf z>Ao!p4nR!tRiW9~pDLL?RIdD&T#x8Ej-j9XZZJ_lPEP($8;*lMPy`oXjMW-cPNdg( zqTXERv*eEfvSD-%9L0mX0pIV>2tsz8C!zM}(P!oK4{?x9$?V4IMx%9m9Y7I+S?2$` z(0WKa;bdI&UM&WU5@K0mbm~X6^wPeJz@EQ$lI)L{7l;YOVc3=7`@{%Ak$j4Q@t|P- zqZ{xja&mG$mYU!Yubj<=UaqTG?l{3Wns}5Kw>cV%EmyBODoykZ>p4b#vk3`Zd_9hnn#@O8Js`mE8HiAAus$Cav992O zkXw9KWeaPZJlcU>5q*t4tP!B;w-i@U|KvY_=z91sAP*HEm%=%l35Tyns7TPi)tn!gs!QYI;y^+NWDgS&`dAAg5=^*}^p5i9a85x$J7 zMqDc|h{y;9adx-q%cEX{yfk&f3@R+*EzWsF%QP(6L-RlPv~uzn6)zka>>z3}QiTyXYU z3>{4M35VRRYyb;KUx{=gbjrVL0I{g`}LsU>qkx6bCi7K>731;!uS7uC=Ig+95v zj_~xWR|)5okO9{g+$7+DdG{g0!$|$KEJnmhJ5Dn>Y3pYhU1Ipzg-s0 zcmY!6MA7zxY7)#6@ySF30hBLmSR)e4eC;-S2G+9*NPIz_?%%%;QRw0gG)|ooYL)Pd ziP2}`TtF|hAx~6|er~_{A~%Hkz*_8-Oh9}A=JyIf>~-cj{bIY^b-9L3?%d#ah;mGs|%;UM$*RaVx^ zhPX16WpFI1oIoX;WwC9kLjRzr7;G7Rc;W#C?{Tosvq2>A`qR+R0KGvH^qTa!1tX%@ zoj?C`^(aAzya~M1KK{7AHF?A(SjJY;Z!VLE_Dt?D0W)EI6xVIm^6S_7ZtOQOr$v3-$4Mr#K)`iMk|lXMOiTBu;6Db>6pr1$mcha@PIp;6n8W zETT~Xu)l~AoqjVfjb=e0u}?)(`TvT>8*a@(W@rtNQ1~pvE#s8#k=^4KIqiWLdPA}3 zMOAWI8t=nozy@G4#%fr@0xHyL2~f;4BM<+%)Vq3dNH)Z95)vaOe70L~whkQDkq9%0 zNQ1jXfoTio#iI-vRW;&Gu#kD9cj!Q|0tIaG00}d&e@A)F%iz*N`Df zgOS)F)2s@hx{Nb%4-JU~31U2dRzAQbgV(yF{wpv{!UXZvLjU5k8DZuYfF3!{SSp!pq6XoE&QX^=th1rcJG7u^k>2)hckrLE%KBp#kLpF2wco^@SDQ zIAl-kHUl?~KJ5q@BkBQYk@R$&7~M|LSi3+{&2A_vMW(5>*oZ?v%7%2+)UJj#?pQI& z1_MP}Q>V=q1R;nOH|s7a*dS&GM)521ZV}JI~S}yksmQLz4FcpI4AMI!)&5jYKI#vyoozO} zn$$7-h%@>=#QHZ0v(y!ghfn0*^iG7GgblaBg?v#TKFns?47r=}(NrOkKI$Hr;2i=y zOuI0kV(B-A4FntErNZlPj!_J@&)M_GaDal+5otEzBjwGWn8S&jwiG-SvErFG$5_gS zmOZ}H@4DupjnYL>Y5%-Er3arTN-1YXY%T=J6!bS&2DWkGt~`RU*oz-DohDSq+@xL> zy|Kj-)(BpdgK!+x621f^Xuybp>YX|77v%|2jk^^5!F)isv9b2}ckjlX&c*v^BX3EC0xN2Ajv)SH5X|cg zv=x!ZptA1Hi~w%PQ(xaO0z*}fC@%@D(YL0ZwAH)Eh+O4E1(u2dqWM3;IwIxu1{lz4qEb0FP=b^-b+jW`?qQdKJ!rq}bXcUx`f+rqjcg?|r`T`z`>d;q# z{%be-A2{MuApy!w0p~yzYy(dw=mVuOVg$jZ-jM+yD2t0TtP!G4u!OI<03iTW1PX9g zzDkAuQ-7dkX@iUocGeEaR{>DW)!M%BMy z3sdkEM1_II70TlPW;fxx+;n#5HcsU7pGq$7I>x@6w18T%ar?j#NX}M>@*<-kxbU+P z0DPeThWK^d;xqw+oU!Q3v)xV`wraiDR9E5M$yhxe(-6&}a(-**y`+?8=a}&< z-SFqnmVy2<*-6QG&6g@;s;b)lekY#mjR()llyPaPIe58!E#E!Azf#qmB_2+xg?vTu zoYJ{(X`iBhIGS!eulM1B-18!u*a_^w2*@B921{@-yP_O9Tbd6SEOwuVtp4FfNz2Zx zwt_Ht)Ya)8w?C14(&!gkS%URFVrDMS-{m%Y z7Yr~=7mSYRL#|e@Tv}KzQzP0b#cl1KbFhS7aFH8m+vGVG4vn@s?1o-m+KTw`^J#k2 zF-{?#8T&qC*6=(ebTe`1B@QPCN*8)Cc>SarrLYD5kBs-ND5bH#9AUx3XD&v-7fcNKdZbo>hviJK10 zpfIXfp%!oYJnxLc=t-|oRkm{?nj=0@C2+gRxI>O+B|PF&Ge_!T+mQ|>_U>X(b*GHG z(_FmLroy5h3bk`4iwq!S0u(6gTbJpVE5Q@yF!6K*Z${4+B%BEbuQ zse@Fas~RDKL8JYt%^`DwAWiD=;8rvOz=U%8zLt2JeZ-qlDhNFqSEH{VC|0Cxa!Ha0 zFyk712x<(}O_On9qakF!$=LZ_ygWkdH&6ea;)jrq`cELe*T{Q8i=M+GBSPv&@NE`dy(dKeG@NYH*lWU!C5>E z6G4h{<5}9arl#8rL_1f6JI~Y5K>T8a4oc}-3#a>2#?VoUeuJinjE{l^ zJ_t^JE}C8?{;4v-PvX%-WF_Y zbMxfHCqBQ0uytkJ;8!l8M*tbiQfH5BTKh(<3>uRdvRmAF5EAwQi~z25Qpc;QRmpLj z#Ez>HK|uuKFdk5&ul`bSsTc+IRuraXY;VFXHrBuw_%GgZ^QfG0cQa z!cmH;U2_oY^i7B{*O~Bv22E%Vv2*cYEq}%zQz43QdZ2c0!NrLGY~!R0IaubDZnKAr zBnN~lM*$EdjU%m3r z%~4ZJ>6+`?cMt#g{@pueHMRQdT+i|zQGLQ$FP~LvSxH%yu#fCtznV4nRX@6Vez4uF zsF|@0$!Rv#YGw5R8)YdZ*FJna=jCamy%>QM9(RGY)LZGnVJwqVQ;AAU~QJzGj zj*#~7@TiIT4_RN1&Ncuct|=gu3^n|~Y#bN#AC*DE);B}-}5 z0A)f4bD;mhjOg-H+eol+nk|RzA>{S(O#1C#D&S38X9268vMt%3N*KHs_rM(~;bQvm zWSyS)lsB3xN;;7{Dq7_iWTzonY{y;JrvgM@$65HJZnRf z?R@XX7%m8|u7Wfu9+mKFvi$RLHA34joO<0|$wR8tH2B@naU z*Dc3-Gm@E_y<0XE`0|d<7bYnxG_29^`@2i^e|u-2Y<3Fk@rm z1$$wuR+9FU3hbQC!hMp(=xy_2{%q>~vgRu%+|Ts6?5%rda$Y(}>d+jKB_<&wzu10t zlGEAsusT=?vpm&w(f>JJ0XwIgihIB2i0VimFv9$aKQ-IAJp3v%e^1EMqI1}8K}p-l z0z1S%-Z`ii#YqedDtl+mu#gFC@1G($wsUXn!5;Z0NU4@kT~(Z?0UT?H1b9 zdrL3l@cie`CLsY&)B2Z1XikPNrj_1-x$P%#F*%L@&A*yjHdr$&b;JE1q`}&A;sL}H5GUQd_yC8bI$unIOcBJ*P_s4b$VAhI zk5Rn#^X=sud&17SyTw_#q%$;hAR#+aElNO*K~rzn^Q@y(y5q3x`) zGwI#(rQ#|7YbtJ$B+|~kFgo&Grtq-v;_uY3$SYM}9QG#uy{a?o{2eQ%nW_1lwxX>W z(0M2H*Xf<)X5XZB2AQ?SLyni34&?!Sl9}BtRcsLC$A3xI=Z}Hpsv_%wuv9K=9Kalc z|9*fF9s+wz&6R@wHzc{$)ResOp9!32kN=kWMa35V-JF_xHZyl{OS(a&So<)0otCjO zHjX8Qn9QAXetsrNgnOi$-o&Z*&!sR@GV=bU`Q~`xpK`g%4i?6xY1A86LY8|f>z%Q6 z3vbd6CeEvD#ib2ANo1znHcMa_@-A8K3?$3UI#1bq>v)G`y?tewe0vOJD8UXtQeAro zGj&%$V1)dq=?;l3&3qY&lQ#ws&VK{AGMIOZi7a&uC4+`-!^=#Y~X9iI>*Syb!tRY|O}5HOXom zSZfbOO_BawP>8^PdyBg8MCu>!JI{=bc>~TN=5<7x{VZwOkP`7t+Mq|FWIm+4cUx_C z;2;Aao#Lm?bf`SOK?AkC#$;3qk4#>5xg{KpCBC>4{ghF0D+r@|w1@G0sUyxf*l7Cs zT1js=hCMn{B&tmtdrmm2ub`squv}Bq#=8;i@=e24$jGp#t-4{lKo>J?vt|puv?zQNKB|FPodZ-h*|Pzx3P787(DoaAMtiNy z?mf{Ce967pRlP@#8sbSIHiirwoFGDPsA<(jRsIy!`uNj3^h|cyukK10kiEy?2QiP#5vWo{O-q#)B4~R+)3viqwtBqu#e}!5j`#A7ULsh$zU=Tp*us=F2wjZ*2 z=|e6ycKRBWEoG@MJPTTdrtgT~VisO+kl5XW+UZJhCvz4!s2J27ar)LebpA5ya!IuD zy`f9f-Br*^MD_&e$@j_lm}%ZfAw4LhuB(ZcNmI`px)cCFLGXulOf1uBl`G{fVlPwG z;q4#~j(vF@h0=w1(YmtE5|W)%uyTShN)j)?w-wpKpe{@AI|-QBScot9WTA?xZ@+@8 zv+m0mW9|!rgbgR~8Y9YtZw0N4H})o;9y1W=(dzxWF@L&au1SYu>mGgPy~+zHNlAEj z$ZL3eLa~Re{LuK>33g(<*_7FKRc%!p7DcRpDFd+eOrK2Bsj(uPg3KaV%w28CElXG? zt?lU5LH^d(Qc)>B`GG)-%ELG@9gbj~({A>~TjI__1C*A$@$%-)>(0ya(8AkGLVD+V z14!0U9dvmKmmja;D}_}9z#DRyQECJzbK0O(@Exyr)&UM;TQV54j=2QJb8^L-BO#VX z5=GK`E9a}a?G{ELtRC$7JY9^*3ERZz>C+Cq(}GH%=DH#_vMEHg2fj_%+fB-zrulO$ z|LYyh0Pm4YM|TtGW__NF<)BrwF=@~VH+&O(XrNXF1IhX8P)s3{4j6l?Y}kaIBy4@a zxy0|!MQB+*Ip*5%^*)D^++&zs)0lj=xNN8N6tnx2tMa);aE84FW)ok8w6`)slyOMv zbk9Ng_S%}l?$5Y;p7BGTN6q0;))TG=a4rhAf2;OoC5>z1+5 zm*0m9#$kMxl(4LiI&}%PLj{Q*`odfPyV9ee1v=I?+n?)OPt6wcLdAzHb1%y;HfoLt zVz-5`H?K&)T9&+>k&;-M=gg{Urb?tcV7G_&Skor4o(t=Htw7H%3?@sNgB+vPT2N~Y zgCDM}hH~9U@l~UiYfz0gicHU_-nG=ALsep`Jem8>I**IpXu)9K8p>i z8CwuYq44v9L{!pBdM2W^Y`a;+WQT6n=$`?$!{xChyMeh6SgVtX$;ip3Dn&UmZfCx$ z$D5GKgTsLZXsnOi`v?^3RsCFTqEZ?->@?|jN1p<2Q&rd6U@b95f%+qZ)VQ6DCTi;GK_`2TDJyRKJSTA9o8t zPMsun_}?~^AmoPBiJr6WA_?k`V&a2Yd*PptZ6RTh+3ivwPfu=s)1f6?{%U8O18kqaOM(9r{={nTF->UfaH#C-_+^+B+SxqyPuim&k={C!?+5WAd4T=J7Qst zcV67bmJLDu9VGmaA9XUjNjrtTBxQPQIB=(xn*SZ>nwxnD-`mfeoSCsxS~$e@Ua96{ z+P{uM^e6@Z_m@BZ6LKa4d=y+aavhcM571VOvi8%>fuqQflaIxG)&x}LXI-7Ol0pJ+ zxey1+>&^!_R*qO8lw=^4$NF=?{+XawSg}6pe{uERfmHtC`#32h$xI=nLZyuCJxVsA zC>hDf7LIkS6h$(k?3Ecp*^X8Ap4odJ^BBio-}`ufe&6x)PXp(i=XqYQ*L~mDecjgu zXouiaQ^3Z;!ZP}o)P)HDXrB@p-sqkL0bwtz*ZKvHnF-8glt{MV(^TB8!quQNFdXm> z?@L@<8x6avvND4AuUvW$PtQA`<%Y81xd%jfHRN0drq5}T1I*zx!Sq?N*U(NV)L{g# zWgI8>wAt2#fs6C50i3;+`8JK)nF)Vr*MPeW5I;_d(9k)F@wdkczjqj-)dtKP*Fk>2 z7RF48d|LK@W%J$u#No1eBOLy9X-Fo))cv8{mWHznJu)C+^25(!qugV^FVNKAC z(q)7j73EyL8hjRR|I!w&W%>F{szy|Jc;DO_HA^i}&LkJk;H}+o5g^1{16cGXspiVH zPVg(>Wl4c%oM4YDLWI!)SU_7%C=A{NA)qz9>Z-7-cu(3r{*hRoij|xt2z>N_nV)A* z)dF{?YY(%{fBR62LvxsC>pR|CTPW^da)TF*JzLGj@5gb_dWXmw7-&5ljl?~p2?Gkx z&}(Poui*j6@cyq z9LM5b;hG(>q{8_#Y7~+G)8B-72%EW~TQuiqL;%^sJ>dNMi3@#eXy5s^EZ*Al zXm}2CD|kkbT`63@si!N(P0N8#3A}^ukT8HNfkL7~{kE2ewAT*Thb|~65O6k3mu=&x zSAiz97W545zLy9Av+6NY_hINo9B}?-oA31Sm!1HIF#ti>;DFQlkxON=coUTA->(B{ zFE(~$ZVU4tQR-^{KcifEJUq{v+)Ol-E@$v4fLWUK=ZPpWH1U5z=(p*K5}g83NwM12 zDx;2X{}7gh*58i|dCcAO^BX+QN$OxBtvg!>rwR$y$E$5}DJ%dEky zGT?2f>|P6KU2fismYUq9f(g=I3(9W7kTdjmpm#HsBGvQpeQj;6z)vr^2i*u2@dSPv z+@m>^%c@ob7~r&<`YJG50J=}_-u-=<=%oTYuvf3HkIJj50r56S+A@FeDs(X;BIIDT z-i9V4oO>TW4UiL^9^;^VQN(iiP;F`P1+YJuEpU(StN?k98mwuU8-!{9?00{t&z^&- z;eHevh%_q7%Kt11fyYiEbK?FcDBTSpR&heSl=rt!YH#8&Xs1I*# zpS`B?ztdP~*%{`A_FS|n9{xziP(L<3Y0r*_m)$+fc4=N|8T1a$io(SGMH)AYZy{-E zY%IC2+es#u)l*BYr2a3M`Ye$S*NC~XPI4D_)8pl7TuVt*pud#CTN7g{R%2o zg&x3-8vs)Zdt4$@{ms7m8;BV&VAlTL_mm`IRNaGj?Y!7603^oEgw@j%03~ULc=Gqf zOaufCX#gY|>g!R>9}yy~a&X#r-GcLQjr0`71750ql033PpEIINQ42OIu#P zTF;!L2~(eqc(LT&C|RpNBj zxP7q0021W>>2%F>FihLXt&IDgjDmrZ+Y(IDAeXXX?gckn1O5}7+iDcV|Lsg&-vl7@ z=$*^&wcsZc{tp8#@9?4h-l7CkX05=UIQtFyeWzozrm%=NOQ8IM-*~Y+ecDlAhvbfx zweBVNTDRGnAbKl_IFd+qO-?SDDOK z1blF4aSK}-@jPB85%cEcvODFgRn_TCXD?1`W%a5wDG3Ez>hV#nM4G-w6Hkvp86$MAC%7h+x}%1tyy zfJ+;g(XffI>6N_k9volLc^<6aHR>k2RU(X%%SEf|%`oOJ{NwrQ(7?Sv{_Dg3{)c_k zhkXaKEl0BXj<~Gfabo&*8_()8roXWVYsVhcw8v(GssJ{^5e()C2rp1fe!M^&Gzl3= zL|oj;q6_q@wXzhHSU<50B(@wdo>JIgUVilIG@=k6?|g;6#^I_o8QRu4Sz;X;ELmPtliTra`JAd0Qk9rtk z7B9Q)sB|3-8;pL3Rvu!#n(NZX;IEzz1=!&mspGn+PN|)7_supK9E`eeqG)CHliU*` z{1rlECGg;3W@i^fNI*yQ%du_4uXd6f^*g}~L)xzWh5dRR4VO-Y4i3~?ia+kG-Wjy9 z`XMRjtQyHZ*(Jx^YsTlU%QMln7B*@F-69ZpNx7&e5dzl&P)zB5PLuKZ_J?K@Qn?ZL z7bI^X&v>&dm|IyLc6c3YK7RbmZN~;RU*A}1muYwO3bV@Kc3JIm>STJwoJy4zW2ze= zIg^-4^Vj)!H%>kp4r%-~Qi2L@3KxsyckkZe6i1XlmCjfCP)7SuPM>37f<2;CborYV z7>gdUp3YjrT%~H9$kRx|vsVDi*T~@hcu^N_tr&0yY-r2AHW>rUvh56tVy$y2Rjrx) zQ#_38TnBeBm<~$rUs8(`ktJS0WZyNc-Gw)KVYEu~lT0$)LpyKuSzb0;H#?33`~aO{ z8HR^Dwej~8_YuCG@b=b+kYu=ThT_lzT1eEXbwa!e^5QI1Bo0M?B9O@0)$^KOi-ixK#vIcv4nI#`n!! z^9?tHS-r*DncYe9PXj5Tj0w@Co54DjtUisi8$hFoQ|(U;I5|QR$ipcyN%Fr3947=V zeXNHH%wd=s$P_*ys})JBG1#vnJhID=j!fhX$BMSHR2g$``ar&;JSj z-a^C;SultMxEX8zDh6{R=0Cz66T}r`Rk*Tq&{dk=BqZtrFd)VK>eLbf38d`o*1^!sd1XkJ*uT_mrcpr+7W5q$@fIp~?l0QH zgnQ(}s51iH2{Y$pboU>^%=s?!1?-f}M1~1&wi4uV8R+R{WMr6-`>8y^;_OZVIz@Xm zC+-{VQ)y|S{3Ob$Te1dootGh(^CZw?x;Bv5|K9I#eQ;nLj%1tFknqc%sj_B93bI3zcuc46!m{5-e0VA%oaOqUO zoa7W%olDZ^578hJQNCW!0N#^qJ|nb(h^d0oPo6xW8_@exu36xxF1BV&&nJXcDZCu~ zzo4L#LcyDUl;SwIA1CZEcaxPBhI?UQm*-goBpe4($sF@owN`0c`!gTEZ8KF;RE*|* z9Ry%!NRrgjh6TdaACxZiOYKeE-QA_7y+JYV3HX(P`~t}3z#Jxgm7Bx2++m`^&hEGE ziva=j7M^r!QIQkO+43#&?N9!@oY%i^^bb0a=*ycj+e2OC=~ObH?;s9C&qG5xzZuS5 zlf8mwmF@Os-|C+iGB~4K0N!i0ew3$z7Ru2)UdX=YUvEzI%lPh=w_tyM3o~s6NvP*} z#_%&R(~o;Lch2OuoQya7ze4PRx=o0dqKe9Ft~k7aUHNsd2upR4flrX|NK;E-VrKRN z9U1=tst)V1@=~<~iCh@@{rNl}+UVd{cB%pvCupnB(7ydoQ|$=jb;2NmKtmisi(OVv z>?b%I1Nj078(gBH(e1hIt}|!9|75wJtu0dWp2Hr0Ts7(W^Y8&+ZNN&#yLpzN;;aHP zQ}4e#(Z$otsJW#DEV-U(Y4shYH8$w$x(_qr7n;_RAAB13GHPEi$OK{s2;+2?IopD^ z#kIUD$Gyw6UqNcg+OiHs{|T0bxVt4@M#`=6v)+}0WTe=6`kL6 z_33G8oCcL*A|fKt8aAN4dFR%xND;?OT~5I$Wgeck417496jMVfG}P6*KEyoN(}Rt^ z2y5ewD#sP9T z2v}W_a4&)NLcvUkG!I48hy8&p@dG)tI$WdM%fsapPpX&dAedzBd?9LcTn$P78Spm- zo>vbQ6#Q|&MqS)O)c5Nlap?-PjPsslZbi7wXL|n3ra};6Xq>f|Y5YvbyL<8PuZ}8< z-aB^5s&;mEVA>Z~^)RlvxfyiX6p39|3i}5pSxgUFxT>sb%x_%34y&uew2eNdEln#6 z6wTbbb;!Tto*lU6Qr>tpU{&PvC3>($;H(C$%I>4IAMp2^pF83I4_3=^fBkw4S-;-} zf(m1t>djJLHmHP%c^(cVmdRBn8h`{Yr&E9nO91-#ou^*w=#5}TQ33mzM%dDy02s4N zYzZ*iTI>~wQQ?fJ0^6~7dVC@xI=eIU7EPh7Ae#*k%!?u`e?LEpR5ELB!|Jm#@(2XB zz`Q4xX+ovd>^~3AwBPK1?d!Uh@ZSaged&V!{t=evljw$YlNnHEs#I=ieH!Rh{`>d( z_m50K6@4X0B*k|HoeInYP`E}pF2T^brKKf?o}b5h{a__`^%{KOf5(d#c`sLa_5biM zEG&d^`R$1DCwR;<5%d1+rDET{eJfy_g1gTqMe`xU=m=7WFpQ<}Q!TAWUSs5vhn>Mm zs$heWG8+q`8xxCHxB~dv;>ATdIonP1#SfPA$6^~Ez{dYOSFtnZ2C72DEJjR6{YLrv zm&=3?k;}gQ4PqNyGL6(~V(XwQ1j0>tlrIw_gwDpxgN8|pLKs9NKSY)WM=I5VAJkGC8R~mV!Sd`=$`BFQ9sz5>75EJ`Y<&K1+X==UiC3|sIv*8 zU{kg|R=~M)f`KAPEnT~H0o%=telNcEL|NhHvi#W1W->9GON3-mj+>-qF+DGH*MQkV}*MVWYg?x`X5Ar~)TJ$X8b949j19G$B5 zhkPQS)XjM9;iJL$=qjtu|CX%X)WfP5UwE!O(bB?+(129d^&HVL9XL?o*z^w$PRE2y zPfrgYvXw%A7`_X~&Sr0LFejH(gqpnko6S+c1!Wo``k!l&94d3>!YP*ogsvBWDi{j4 zLknHrz%k?QehA7IAb9|Kg|LP}v!@QMT6uq`pBhtAF*2?+4VD}7(J$14tl!nD1CZhC z3=~~0sX4G@Wo4z50lAKr97meRD~+haFQ;T zyIQ+$;M}$~G^q5vJz-;yov_MAu%B^qZScwkgCr1Y(dLYhJUQGf*N;|z{#?O}Wb|Xr z?t`8(hz>eGSD!q8P6x*|+*Z&-x~p3fl2a)NnIpV!gn?%ig~sE@FY^b9Sr`OuX3Df^ ztOZzV7qEXon8Vj*cLvifzBBSbaH!~!en97v{|^Fjm!uYzJGc(zh7k-fLdqcFx-rAJ z17=vSjQJ2zRt06-UAE%> zk?(D2Obo{WX7kGpIdnZJI{B7fXQ1BR zNXc>4&p&6r}d|PqFnw zfR-hvo;X?@bskp{oUtzru?-$H`)3ZSbKo#JkY_@=DWE^~|AaBy&rvG^>AVkRWg)|8 zg(Cod7FwO(Tk_Y3W`VlZa|E`Bsot0L1D-m#A8qv`-Y*Qv?8Ew8coYyN4LkR{<{1Zv zxZLd{fMCErQcy%By03Y*7yPdu2o~9lF7j>Sn=BYn*Ee+Pvqw#&d z#>B)#MD*u)95fg_yGl==g0cJ2+1UyI7|A*61w=gO5iOqANAGx*eF6{eWAz-!p(Q-P zH@f@tUh(MU`}sM%H^Yn%AQ@64j*{>|{c9%asw+R|S#@WoP>9KO<_p-31kv+1S25AB zDMy0qY*3I>A?$?bQG)XwZEdN02H+tgZ)p&~iIJb+G=N=`d}-D4Qx9Dl{349cm9C`h~AO~!f+Gt2~5|FlyT zYmvTn>sBJ>ki38wZsWMaz{Vy= zmvbDx=@!SOme?X%ry243-CbCTubk4-(n7=2{2?}PE%dyGp<}ol*oVxXyOVHr!xXQL zlfA^nZvU?Rk&}~F)heMJirOBs9Cdewv%0`VQ&I6#OwC+-+)~(uWLh2$s_rxMS3&&G zvp;O_J=4^U=nwjsuWJP2Z(pKcJT7E&ZH&ALLZ_D=)GMo7hBkeRA)t`w?i6L6_0~uK z3A>Z+zsdTBARJVwtcQzi1jjaZNyj{+8C{sX44bwD$MUR)1$2}jas$-0a16%*h)PjQ zH%C8il{?CA4qm&IM%)g0+@Z+_#;yN}&%(J;ui!{1c=8w0wq@7r7f|I4?g&@5#%dZn z4o=Q7n9M#fe+3}~P)LeAICALq;lxqa)g6KTH+(hE*y2**orl$5)2Nn%|A2Ee8iZcZ z*kBC=q9EqeGW;Gc&TdX1!Sl$3)zg$wD&5N$H}fs1`!x41Cnu9QvS4TL)tFSS^Mjw> zOmDS8a%{ZVHX+HNH923Oy6j%}2Kr||&Cx5!<>eB7;GZENL<-&%?IhB!?*T!gg+(1| zO#>3ronVt{9~#;*6OBa+9>SqSnWe_{8`%N@*2rphm?`45&w~VIS zS&sGk*0*miU;(x4IX4&YI!C9yGFa}KuaQwOyuv5FPE-$0!2>A6Pp-@v`%j}iG8pJ*cq^P0xLFo9 z+|dNOs_>0tUHA`?*wZ1FCxkL46ckwl)q}<$YG5MHfbyaid_8%xJeZHm9I`9S;Dlp{UZg=*eDq;PPws7YOsd+uXdQJP>fCI*SPn%sKe^3+rC$!qlOi-?wo@kBSdPOj~`t+Y*bQ z{|~t5Sn?Jq-Rk#hHo!Z}b}%ost?3)qAd`%eLk_*gL}i~yG;!&OQPmykG|veV9oO~Z z${MK?;#E3l|DG5o?u48r+J8wR26{=5>g#tMHHhuU0?cpbXg3!ZB<6O-pUdysHhYUTEV>rWXcKohagGDIb>1oPU4r}aI7f@AW6 zy4eX*T|$iMoEm^iDJYPz8-1xSM3zx>j)-|5Q|~WU^VG$h9@GtY4JtuAo|L;O=vN;;^d+2o#pjN> zom+1s_WIy>OE`DD@K4Z74RyQbSxI2r(QwWhjp7>9mTo(cT z7C4KcgGELs`t)tfxeqKx=?Kgvm@I+%c(<%s@_3yb_1FD_TEM^1@UdK48k&c4atY#Y z5Y#ImD+0Mj&^BFX+x}Pdtv{+*S)%ox{RyYXD9RD>Uem~T8Y)YgwCkdpN|8A~>I{EB zLpyss%N-3%Al0iRb03(ED*4QA?#wrF0ARC&r>og+vbAplq~?+5<)9%qTbmzgEBr{r zjL%_rZBn&Efq3#s+yO|zk}P2&Zwz_vur`^|-IZyCw9K6JWT)x`ge&OnlD}s`u?y&U zVPfJ-uwV+(d&~G~V+?jZpg_?2Q&OsXs;%7-BZyP{g=%Fe&m^GZP9mu57=ROBNLR^F&rV|U9|D?K-H ziliTm_L%4mQ%EfP^_=x!#|2!97XQdOE>O9+QNUTK%lVp32UUHTyaU4Mz+xboMI`A0 zk{olh0U|O887PP{=98L}+v`02RP?cL#kBL^cc|G=sI!|fC$kX-C=t*noP*Dpj-Q4` zqag=81jje!_)mc zhu|O7EItw>c8kfy_(+doV(w0YHJ|N$N_l6CKuUS!nJQoJO6wHfP9o0J6%onN{WNx@${>~3q;Y&HsK@y(n2lLq)aTw*j7+MIkAx+u zsTp?Q$|nJ?Vk(eS^wkeR;EZ$L%=J2I>7^5}9sBv@&We+$%3~y?5A%@afRC{4E4A7> z`z@akFcG4>JnIJSfz2wwe!$(%+QMQVuK7P~d7#d6=z?Bln|tr9d6kgcwpGAWH_xLj zZPedAGDFWO;V2IPgrQ)H7je83W-*Tg6oZ^z5My85z;l-s$Q6Tm@Cuuak%c z1Aw#ad#=b3>#>IE7iVN-zzTk=1{1j3p-h&;B zCKT0@rDlSmu!B2K0xom0`8;{@WbMn$GM{tPQzp7N5y){HG-)!((25*mxjt|8F=YN& zB`*e^qZxcWv<1C{iPJWp+Pe50-@q-)uAo<9Ss zjbQ~cOUW&>_S^1K$W~m|sEoAt^6H$excMw1J(f_x+?TCX_qdvri(3@%cMS~Gfjntk@4QaPsO^dG-rj0`g>vw(FC5O@^O41IVCA8trpB%< zBbLMc`+_RjjWj-&VF>}}stqKxHKE9a&oQtb4L*9Ed4HD={r;b{EuBuBPh}b&Iwvr! z6cvXcu;SeI3doejrv+|WcOez-g&T6-y?axe_U-G=yjp0YkdbbdDqvsA_<*u3J-e3= zVWUv~W@PN`oh56^z9Tywe0sexC0J=ek&#z){`mPf;)=Ff!Gs=}HnOo!6jKgfU1PNDx}79;K^Vz}j{;amD+X?kT;LN#Zf1kN zFFj>IaPYi)K>Lm%y8`649=OJbzkex!6btc?T(AlXMjSuJl4WN3Fq%pu1ev9H>_nX2 z5d>PZ-n{wdQ_9zG-|WU_@2~R<%zbdk0oi8d&C-$FOK6Mk9>ATg1%|;30V@CcJ=)h% zPJ}A#zjd;0I2&xzdst_>W-A&jBb-6bor_WiZQKEoT#m`r7#2TBX;XTNL@Kl0@cH-e zAKYWLsQbRpr9*WWL}No0{z00f`~0~u9LU@2LG}&~=8kJn;T)ud)4?Gx+mitTyXf(7 zDBPjL<_?fyEQ63qGmVp;m;}6(?O1tKeGo+UB@3JT`N~XzBBcpG&?HzShGb;P<5tR# z52s`v?R9vX~L9E zoT5}ZwMbw;g$$&na|p@TwB$r8`BSx_aLfWE&j1IR%_Werp}uVL-H(WuEb3r-w$Q^Ixdffx)d-e6F zAO|g`XXP$-&+b*)6;6xXYBi{&Qutp%KLLtmt!$k_ z9N255e3efXWaj%pbGus{9ut7ssa^?#ayh!C8||VP-2yYpY|eS$%~p)DpIzO;8v~)T z(frqH^PC%A7jW)fhrNF@8 zjmtcEVD9&IOfA7k!}RLBu^o46VMnZR&GWcVAB=!zGTiwVmd1S!W4o#6&|!TP=acw*51vaCn!Nn)*S^Z-CF77EM6Z_(-(I{E7pvlm9WyJzXVc8`6vHXTM)T zJHC~f52FICCHQN>7XU`>G}(gL!REv+A9?Md5wen_&4iN?xXpk)xo?mM>ci9CLB17$ z?q3Hn%X-Z^?LQ7IVRsD4HIpl751I1>9@kC|z!*tJ{-gxd)@m_bZt|G)jxLeE0P+Mm zVQ1@^eo>gEPWm>;})^dDZbZ-86S}iesb+ zo&p6A;dn`BC@m?eTIEhHKpwPQ`pN{>@R|^EO+OWJhuJ{$Z>lapug=3vAbeg|w)qFp z^%6Yy&0)Jj^t`X)3M=wizG>jW4{AH{$@U7U%IH}UsAEP{dG6ZK?0emZ4{5 z)LEYb0D(=nf2$?-ADl;u?*;(zgbIty?P8F(WqbP->gP*cM7Ydl>**NCOB^y)d=xv1 zwi(Rx^;8_6`hwt$G@7HlmrVtGCcaQzS62+^FiSaQtC02;ygdOhiqS}|w6xSl;>YV) zakubI8Po=<=jmEzR+jmjnWAAEBd7ZjrkEKpD7hYZt81A>8y>L7_VV-e3^Lqq$c__t zE6OSf&Z6^*-RDrd}=^LD8xu4l@$%vY5})imo*OT_e6bu#2=i zm;Q1Akl|mixPPZqR`~ln-t;K+QCN5BNkty?e9b*dH)T zTjdOYj*q;UX3U>4x{`WFTM}W4yrsgO~-ZBewR+*|oo? zZfw|3q0U~iE7M?-j@Y?<+j$Qj*nU=??YM*lie#4(i)0S9&n0@f46LUP$&)A8fCYp# zLw^{4W>2BLY*i$*;iwhVPFJP;f0xsi@Uix~&>{m?^^>TOJwQ}Cwm=UD9IIA9A+-J`1yO;SB#Uj=V?eK z?PrKtZbGkxky1%TCHuz@=-fOC*a{tyT$9jJD(Vtw|uv)4xZk|VzW%kUp+BzxV_ zwXflCfXQRJ4M7!m4g>%9P={;Qb+al?+;j@(x8rR7fv<(vES%y*42!XDN8 zdqge3m5Vx_Ylth`bynoA2y0c`PV)2jr{lNy2UiCsYGTm*1I$n)p&ce7=5kB#7|lF@ zrE8jxYpwxS0eAzT4u%>zzry4 z1Sobj3*i=|#cWVurVgD?ptU9^C22vZf}en5_D$yf^$OYpfS-W<784zf^9cavTuMoa z5D3q5a;`%A(ow7g9+JOEX~uI-_Xu|U(WN*i*HdpP&_B}shCV|Px>iwX?AAqOTk*u; zpr1n1;DyosIj`bTA-BPL{fYz9J}L=Za8m_+z*mBQZTWWghcOh(pYTVP7ETu#wP&KF zzNe7$v1Yp3vq4d%A}^i1qaO397V1a4JIH8Qf8J5+2tX|WB~tBgMmyd7ej5RytWqH7bN<}B5fa9SRKkUBRvci)@A)W5>>X#aPx!T&E;Sn)j15GdXT z2Qy$T9{boRL0cx#sz%+f zInIzf|U6EBXRZUb+_vf9fs@xv#E%WqPur_+;a#^Mk-mB_lKcD;Fq<;y4 z$4kO^Phir^$OF;4T6Z;hIf6Vql7t2B_JXaa7&?d2C!}3(Cb%7${X*>=G~HXt-)0D6 zJ-gjFXt&Q;0DH!!&t`fUdOK#kkk}69*8tZHjb(2C`WuwT;gCh4W7w7apTGQoJGotSufilH8P^(67oz6VRJoB*ol^1qkgRbH(Xj>%m=bHi2!4*H{x#Ek!oUU;7P2eDFbd*fJ(Cback}gYZXK2!V%QU|^G>`=M`L>}|)R>F9{C z{XMiNK79=bB@yyFC@^(LpDj_`Y((&4*1Lca>sr$+i#YHvMK*+T_9H68ZN4&(48MMS zW_)GxQbmF%j}VTh%NYUOfnpRYI;{zsZbMcA+ak^s@@JPeZki!IJVu#Q4LqcYGjelr zPQR+egG-yV9Z3w-uUnn(F4(K2zd_l}984WBZy&u&k=aNF(p5+Zotkv_(wFtK8f-IC zCsly7JzIt07N0So+r^d4 zEHmG<^)qsCU#g0Vp)IqJ@6nd&{UK#>uzR1<|Ix8>dqMk{!-)e=m+EIf#DqMi+c$iC ze4Xh;obHTAP)B%?VJWQ6wz!;+*AM>89&-OL;XaNb{QRz&Ze9?k#b2q`TY_7=@BlhX z#4K?x`WlNn23`Vmn+ru*<6WDMUTZ^yEL{f*m~1RKaA!kn7j!}N%A8nz02+l-MDgqW znF;ZLx>tZLU;m&=(le_NvV}xSf@TI-?Vv4^fIl=G_KcrCP!Q&cVN@1`V%z9oYy`e4 zgdcz`DenJaUJvO?h)R@>fvG&M6BZbYhaE9aAo8hEJfClm(ozrisR2~`;)gm?EJ-{P9a|6jH&ZPPevX!3POXY! ztaiC5NTiu7_mi%6Rv}{EEm5+3hXX}PJLapY8EbfOB#5GPj47Lab@&dqK#AgAu0{n# zFq@9)ZwFWPFYrK)HFpJnjA}D*Oznc|HBOin!$*mcRO(|2T5cSkrD9ReJ+UeKVpDsu zV!W5V5<+AMp*BV~H$`$yw21I2?(I_&eK|Wer3_F@q$s<2;=n8;&x~Df zn_aii8>b{KFj_1ZdG2*7_pce| zO|lzGP>~erW72RRA&EM%|D>SGC+8+`Zpu{_7P)nNB6qz)Z>-mN>-QEi;`-Fy9YU3t zv$L{ph*go273r(lvQ_-Xo3dZts1}3t$~M03O_u#Hxb64pkKzM~#D{Nk38x3;#HCoVqE_V$+9?rnBW-_(UgC!fLYZ@hPlT8Ri0s_~&}zG!#|FTiZ;G>vW!PiT`2wx` z#Yj*b%j#VWs|Nu&kkCjU2g!GsCcEwfqzRb*(SALFX$q;l43gB%jg3;dJHlea@Ba~x z%z6zOR9oe?#{RzFW5P<7nV!%P?N1cEb4b#f;RMEqJ|Q1Aw+{8*HF>N(G?rvFSfP0Q zzys{K7qfC{!ByGI2u^|zD?4H3`nLv<;wu~2j@xfT#4?2M{E_OYjTZ09PM3LGSG4=V zn<>S=@-k5OSkYS&!1V71Q1x{~w76Z(y$V+g|CbNl%gI|~XV%u%hK7bd?2E)VPe4}O zYW14Fs5JWV=1AnQS!GU4tGHzMV*@$jh0gCaKtcuk-#-R4II%AnY9S$+w+r>d;6v_<59Yoaw?iljP@gQ(_VVad{ZYZE0+D z0j3!Y>No-l>rB7cK`E0{xeucOAv8m?fb_U;%qZv+?Hfr7bbMF3vOZDJdPy~S@HzJH zfo(gBjgs{)B_%V4Kw#}5+By*3-mteId1ZF-h}|vReeiONj;23`4b`l#Rwf1sKd1I2dnFN4hPWqg^sAkr#e)kdx0`Bh z6rmvm72vPSTN4B}P9v>r8<$`WC>V_>yKml8qn|caOeDvRs7K#znp1c`YNS?SoQ#nK zv0~%cCCPeH@(0A_{(u$gT6p8`E`jgBBzOkt7`MF^EL=?{?KJ~c)4@& z^dw66FM-*V%CYir)v<*>O*^;4U+S9aO2VH%VS%^HHUm<*#3y?3yFdD=Wfm>0$|MZ( zVH&2@UA(Fz@vMPhPekXew8+&OHTNB)(8j^^5tsI)w8=Z<*!Ibj;=s0Y3k7J~joXbs|DC|lhvOl-dGC5f954-X;x|}kx z;NTr)y1+4lsaU4R+D1k}lV{^ShYU9M^Xau;J8gPO>o&UBs~++1?6e|+<@}x8gOx7` z+FcLhNw5*p3*4$H)`v*h8$AOVzligf9-2x_5yNY);ah{>e$lrZy*aaPC!_lHrCz*C z*AfW}+=KJudadQ8--L!{0Y7^moqUaQc`XcL9As@-wjO1U;-eEk{~p#glrn^KUGurM zUnj<+J^uQ!j9%c1+h*F%c&x*(T3t9Pwf7E_YNW_DCU%d*00o%8HK<>E^N|AGtF~+B zlAK*-{C;L_retE;V$hF@<3DLtD3Fs@^%1pAdHbC<3IN|8-mbyR2@6bq`t`nrai^X@o?wXv|LeWI0|H*V%rcFB{%UzUEHhY= zrO65dlHS(7@26ME@}I#Oj3q+?gD|G5Xrb<}IEGZIJ$`F#slx)bRnBVNL_~5!ERLB| z1Sxsrx=U6nekU82Tn3zYDGU2;^?G6p`5?PPXa(tnwYr9wjAa~atP_kw@9hHi8tqUn z*bM2CVP|rGS88;I|1KC<%D6<=L&D<8Gtv{|&q-cwsS z;Zz&hM(($7rWOZcS+*Ybs)I2lyY?#1gWJxIBRqm|EuNiUPCHviwsvU?>M>s4LMO-= zH%AYcRUJ?E<~!teHve&R8730_Zp3N!&mUjek8xE@asIyQCO#by?2s+j3^Yl*qf(s( z@||=FOTsTMW3#gE<<4P>Q&kVIq6v7XGhp!7Cq%Q{<0e&K)9g=qC*(>`?qUDBa{)rz z!O*?Iw(GjFGCps{e~&<6vaE#~gY?*c!K_psRH6y#6Jro_;Kt~Z=m$rqO}*$YDPp>V z0p$+2ojr#_MDn=g2bUg!V;G(Lx*oSaC8DgbVbLxw(8;PixX;_BFMBgVz86s3>P= z{mlehkc;%8>FwlFr=M`XAH;{g$mchD-g|9y*5A2ejM;x|7JuaOTZ+W~U{|cLjJ$_+ zpj|K3zIP{7aD(|wX+74!AnhJM9M~K=I2yM}r)#rN$>Y5gaO`n65lui*+EbGy1dIR7DGIfk4d{wi`h?18QO*1|#6u8{Uyeu|+-BT(U3f{Qqi zM9l!HN$WEiFw8k+o%#F~B$hp=zg8dMSY?6r}qU;I@%QCUK;=Z&0-6(yP-@{Z~!}R_f zZ=v1~A4+nG(BtTN_4Mi4kK-O&20j$BRae?H^F7A345V_278r!yOaLNthkPZVbY4QD zeSA*8>STC@by!m6%6cN-Qh~@*S2Jem2qN=ow?qu@E$M6pI38hJ-7mZ`IIe*WP>9+L z$D#SRLXF^E#Wl8j)0xvapY={z`m(P|G5^9SfxF3|m4wCKhjfD;K)_Didc(6VwXP23 zhee&ZLKyx&ownaX#F%aWZ-a~l%maZuV>TGx6LGPh|I_IC@9U$d)!YJN??1`d_R$)d zh4@CH?#B-PprS-m#$r9M(57DPm5UjXjR#N>LidAlKPnlZA&xLfF1H_0z4(^*MBC`a zeFqs2YZc3|l(TGc`pj0Skrb*j5>KOnUxwfcDq^u0p8iL37R7rJG8g*K#D;nd&_?>}9B|phSw;k^39Ia!U$#kz-R-j;jza@(f%`0Q$OH9`H3; z^=yT{*#BUP7hp6HW{CM-Rfh>+_sPeRrSMY_7ji^|^!B&w<;}mX$W4%>s6{p$03b9R zp4I3tM~Fn87OU547v${0zm^MM!ObCtN(X+upIvEz@6~ zJ7m3Et^~JpmOM3RJo9)(XtV?_vsxhYoLB78(SRBeL|#8~8R@*6!?Q<}sPcsKMOJbP zJ4kv*00)vCucusfl*uKk;(Y%wznG#qb-9ZPipns*2RcJ!A!KFiEY|K}Rs?6_9YT~5_5x8|kv>(&&G zFTj*N$Gc8C>&gdT=PyI~wz!WX z=IlQjQe_yPll*OS(5yF7RL;!DTD=7={&hJ$vDh}3>~?0Uhub}tnK#x(AXoOqps#xi zBjU0Baev7Hjya*qPtVzcJ)J=hO|&X=1v|)$cs&zQ2x4u_IgSyczqYU@Tm4>TeQ^f8 zrdRDML#`g!zfd!%+{~KO9<8kC3oD!t*V29L<*~Zo#@||KGrpSQtKilF`aIm{?E2zL zl!MiV+=3irkjdz5qvG1$;8qE>5-~uiq-{fRuls^V)KM)KfdAw4oI7eVxtka+_Of^!1G|x=BD;)`qx?JDj_TF6%=(u_Ey`;6o{ii z9{8Hk>doi!U*c<{rO$~N6s-cc0^6!W`u6KVeD-QlmebNgOOF2i{b7eWUavAO|LpaX zApH^#x9k|d-rxIaA1kxCe)BWGDarZ094t9{q6-w1Hj#ms5)ONU5sXag8kW|c2VLS^KbNwRA+q+%Pcm74G0BbojXwnB6f)b(x2VckjswS}ki7UQ%6UN8 z`0wav+)4N$q9}OXR`dMOHC#K1$iBYkEj|GZ2y{ciV_fD#m=5P*&&=>W%kcai48NBu zFJN@eS)d7gg&P=$C7R>%Am8_z^9x9e0<#lTU>W+8^`4m#AFaZ@ zjGGgul+k*@Wh3GmMUg~I8aI+Z3X*dBFs6SQerKLM*4f=3)*6;|S(~*n#=x;8#}0H1 zbCS3*I$V8%0*=;dW~60B?6(EWl5-gg?ZG1AO72(DMINX+!gs*X#!ASt{%ebHw#T38F`CZ`xO@vh13J>lb6CC<&Ax>P)v6v;A2iv=OAtTX)!vSV!-J zgTIXdH|3_D9N797Vl6WVw8OygtR7n!OgIqp-lr=B|Qk!vH1zj55SS~@{b&UFRbaf#>qvNSWdwd zj_RGbY-aPF;f=$zsu{w#Lpp~M48xt12?DgZ1R1liA|5vG3s^cZM$;-_GHTB)e$Ts_ z$xX>t+cC#|N0{n|n@ZyVVlMGy{UsioGPn%1WccscNZzRmzp#GVhe*tl_7q>j!5_eP z#%QbK*$1=_`vQM$bb)JFU8HR|h^eue_25cI2MJFtMg<88TLYXt#BxLh7|9~cZ*iU~ zXs=#vle8P@8(y5a?l-|N^4-l_%oHOjb~Bs?m;s9wY6vMK@H+ee=aM*Rz^JV2!4H_ zs~J+gYtnSLAa*?xHWXp=?PmP_hVbRD7hnfWKI#t*MQY%Nbx6nEPnyDeZtXqU;2%KtlvnodzSE~z*7vK3S zu1lc_hMXooel4G?Jt;W&0#Dy(VH#@T4CA+oI$&zC5!nNsOl(5RnEo(rE6;UK3o7C{ z`k{D9(GM+oqMafWJv^6wNDccbB2zFXJVUFNW)skDyKJ+8N7~13t0ypF-0ZZc#*w-9 z{cvPR5ceVFokm=2OIjF=QJlN0Z`3{6$+q_JuW|Pm*uK0;+;S#06d1AXf=C`h!gq+- z1MCk2sG%OC$KSEqH86XKeDw7)88bwsJ?JJo@HzpDmkmi3(j8LlbD>c54SWe7A%h}} zve2yVW&s@`+BbFk@t!&Pgqxwv4XuUDay!e>)8;l?(%!_8UxQ<57;~(sIIZmj zGXcUYF2~J~XPWlFU^p_t%A$F0I>#HHM7wJ9<){#&{Y5NxFrn1h!pT8u-K7)7$Jza8 zhOWF);2^uX$SOqLdjkwOK6{j=K_0+Driqo>$(@r4i`m#)2V9Ykbt%ow&D|7>CJ*e$-Ba=hQ56cbp{v-Q82QMhuQ6^|{o!yNI4Bf)Gt~QeO#KmGK7g zKRc(-Y>YzNowb5{`Cs3_Q!U1A!(AN z@38dFzowUH*3!ikg`VWJG%lAZQ4udMk79YYZ6&)vzM$SReBz7V_SeJF;X|h7o>AkK zd9WQov6oW&$IjW?C%)>lyW{nBfhOB&Yg7F7-BS=XziW}78TIq-m45knp-o8b#?-ju z`*du(aDrM+v1eMU${I(2DwNL~n1k&fH9avOT)%aDTL;t=Ej zM9!;?^?gzKnhV_`TF@q=|Lp3sp+~2^w0j~}3s+oV}|hho{=tIi$QGv%{@#>hg#dS{M`?miDu{szZn8 zv0uXV+fPGoD&{)QwH3g2kRi4sBV}tkA+-3!PSGFx zEgJLChMI9NZ?vZ}0XEw(#BL)~`*!AX2c;^W&lJBWW#{Lqy}1KmYU{R+8otJGbWmDC zt8T`f$Of=s84JShhtsscD1vn$`aCmjU!Uj`G#6>dGgezx5%=N{x9>3&b<<3!aq zP2r59+p)_haa?O;hiFxI-0C-Emyuq{Ps9QrYHK}^Bqb#kfFrVm>NA1r6MuRf^*QZ2 ztc9eh3FpFO;@Kikp%y$ zP2-crYv(@a)yqG764+e@xz?tVyHdg}jrj^r{LW^Sm#dp82h0Z*w|T&mp9bf(PZc)~07Y6>v;*Bi!$C+*Dpa)H|#Ng2SVFSX{D9miRTU2IXP~ z{CUfAt?u%T44g!ue`+{Upf4# z7-Z$iH`7)FE(d3F`&Pd4@adQrg>{2tZ*k-_)fH)Zzu4?G)3Wn^sqQR+^C|&3B{r94 zMX%L*BL9P}H5z9I(78a-Ep=sO{jmyvRL^&hXFE#-)*6BNMgY>4Jg`o9qSczikYM%0 z>1FyhXu^<`-gF$k3DW^^z{P~;MA}pwUT&Xd(8#m8^GS1E6Q;-+f<9%%p*PACGo%FL zD`qylU_GJ3q)B^-x*($18V#AVwoXcpiMMB~KyN1h!@TV)sU1)u;5qq8>(J_jzUqN( zd3OeT)SO#Dl!Ff_fdZtK(9rZ0LQw%an5$vNx`W9#v*8*yA+ZlAsP$#-QR*j)aj zz|LJ{N~R3UA{OOqY!QMX`dzZ-1b5hR%F?FoCrectm0e25lqTV!m#K+0U&49{R6Me# z>{me|l>{jLY&+&pBgoC!zVU)K$L~)wU?RuHJoS^Z3!# zuGgLOn&)Mz#Qr09;KYW&z-w3ht2qIvDZhXSH+}wUrL?de5KlX^!5K0w&i$@ls+Qm^ zj)HDZelMs@qqD2O8lsl6(LoT=w1w@FTbP#V9xPRqAML<1R8j7}Ts)46mm73ZgfBSS zFi?{@Co6G$LQ(g2-Pz~Fd^BGK$h})d1$M^2r8s0O!T+F;KeKSR0orT}hX}J`Ku2HZi4AMbL#@gR7Zf zmcInT4MpVq-2(jQB%Wyx&5@LAWoe(EU0X_Sc4=cLdz;{2Dl`eJC(&{S^~N)arQKSx z_p!a5si)7@tg(+H!rSI+U(y?Y?iENVlgimtkoP`M*E%(M59Al-N~WEvm4|P?dK80fEQ9WAlne;SVT@xnmF6ENDN|)074PZ6MsjIQM z3O0+e!g_nY{gpAPvs?36aSp~hbW1_Cal@{_AZL40IJZ5W*c6O_?|xktmh-q~@| zt>6^JMZ6Y8Il&@~{BSe_?jntI6=0?)gMjg62~-{Rz%bNKZ!;!iu*6iF1|++bLtfSV8o?i@j^cEZv>Gv1b5Lxd0Qgiu#RcEiRoh>vS;n<={Lf zjtty@`>WKC>^IHcUZ!_;kgC|2B}cLeTe{jCWx~yg2|GojI3K9bdYDS$FAUVq#a3;S z#e;-J_2fe0hUnN_O?#}9np5i!x}h3}h3~a7(rng~IP@71=@Rhq$`Q3H_|dD?Z7d#X zPf^+s6hdn2lnFg?9#$AI16=vA-bPsnWzUdlN^EP~mwP-5AG(l-Nk4dKTxr=*SS1gP!?q77S zQijt0rkbN392E3|cbdc8|1vMJTrRbQ%<6P8o4y#c0U#g=My~!ZiAC(vaV5yw=1|Rn z6#|VK*IG^#$;eh9vtEgeE5cQ$Qg`hQJP&bM>+h^2OR z6t~XL4s~&g>FXFD08P-<=_&fdI?0h4rVrTOtqpW9zqIY_T0k0RD&5#jE7No+9>24b z4l{$8+YN%>c#ape!dRVCCN)}&w6)7U;q;NYSN1E*2ez(>+|3X27U^{P3}*Li-a%&$ z?~jQhipZueoVqETp+Qhd;Xka3a{}=v=rz0igG-_t&HyK%><5k{24ZE65>Z=k1$6ut zG8ZoQC@oxkw;(kYOfed#K9huuu;YJRX4hWNu8lfEt6_KW291w^D&N?vw`{cn?KPLM zli44s)0z5LLxt;pZSD*e!$b)DiBZ~))e4t7hB{HW*R1ldsYO^{$*&VI!rPOdRr1Gn zKtr&3zfa8@;-Y=n7_ZyX2BScP_l4Yo*b?MJW*r|^K6^aR_Pjn{yilZ=TOEG@MjI*R6dX3RzRr&Q?Oeojy? zEsx$0NW%hhqWk@tyu%(`SnZf(Bp1?AS)lCTTf$hK4{RQ5FHCz#b++|`2Acow%ctI4 zN*z)>myKDlCipThFx) zttSj`_s{^pV}G>DMc6*9_d7RHwU=kYPNO*YPo*0tiy+645G;J$#ahQnW8tf>;Vy?# zBvb#LR~<=!tat}z)GfK0+SZAbcXkstgVtbs>qsyJ(F@60oAZ6!vz_`FHJwRrxBF0G zEbgnP_pA3(fW`n+Ey-@Oe;EH#$sUs=Kne?hFfG?x1(xxKL!VCG0Z<>QDreO5t=6x| zD)Z4&D}5;`^byIdT%=s&1l@-7hVz{NzXBzAd!T&g>eLYM-a2M`j&*)sKo)#Or+Vwa zq^o{$s6?m64TZ-YLaQQLzTL6kEY8#o^h6m*5bVp>VPa&&E%u&`9^8j+l*C)S4FM^V zj*dItXXk-##%XUug!bSH~G6k2+uh<64<;+x~&0 zZ0H=gR0CzVhW86k&rxpTy%!WPzEDw7&Hk~iJb1h!C9B36 z?Aq)a0r=WE&)okWm_k65LEVBb0Cz?004a8V$F-*cKemR+;Fvg-GQ^9KU;~@eJUE7jPqu>QynqF_x(RBB-)Hg z4{+6=7)YE^9%S!T7CqEJNtv2W{*{Tzqf5!r5z!p>HjV|ncZv-vc&=SDfE;H?93?!Z zJfLDvduG-^rh4K%D?OlxmBe0Ua<^JFb2!nxFw3-cp$7jeuB+x5Opjl|*QgRtp1R`k z-!Q6d=9 z7(GtPndSO$cwm(G@|Q@1o6o?n%>L0_2E*^Sudo(4hiqGkPVp={c#x@aj-#4b}Pi^XJLQ zNx)dSL$sdHw%g`VJ7O!aq69@Un%|tbHyG37|GQeN#*dO0lKTKHbmi79dEhb2oGfY9 z?3_CG54|($8|Bpf&Y2v{ORKHx#72IdLE`pxX9b{UA#U>dqivK0?qJ+&y`&z)VV_!A z=(hW&7~fb4<_&TR_)%LRdP+%P6~FOBz6n~L{0%wLK-@Ji8px|zjgZaC2Ncw$TCPP& z%<^!h*me*2R?oR2^C9_gZL*Q@VBj?$1Hf`sjpQ%tFTHdYl{`Q<0C238B)ej4_ACtx zz@b~6(_TXWJ7Qtv0VxoG8~wSfuLEf}4VdM&S5_v0E;R+oS%7txCY8O!BepsxIO2R0 z{n|qS=5rUTD@qMq&M_NIHUtBZ?Y-ZrIKf3CSMf*^Qh*k_DQu8DNEH7g`(miX!hJg0 z=q3jT88ZR2DcN3mp4K@W;LtMq6N!Wz5E`Z`1=4r#{hYVs9i-o10{k{n8u{@mDS&L> z9BJTmrV*~aW^jD)1#kucoMigy6)71RYBpL>pAn_FaIyIHIthMva-=UdCPv79q4Y)V zo99J_1XZOP&%c=G!!Xz;ixY*_ws*g&EUv6vtAuPgLVC+UAz;PlEh?vyjgTR3J(-X^ zLbx9Y7LX?_@Jy#X@E5XMtHIb46}(8IiSO`aM=)pwG9kp=aR=KN_e9Fy9OZbRbdv*O zWkFg{?ZBxPnBf6x^lWI6t$uuo1Aa`QcNH z9aaX}H2MlZm(O{!r9Xo!}E`M^$-E>ZV8}VjimZb9$ zVY)XE((;cABFy(O}nUo^TzBe znv*{+iLHt2)MYYuQl(-(xE%BUS*Z8Z50v$ofBdKb{LfaP1)t%Ait7lmysSaI59T>y zm>b>D)C4&zW`>6O&W~s)FCQcaa=ppq>W73z$SMMHcrVI2>C9097n~?X+Xwq6_-6%X z^VNt->oMgDNUYJ`!8o|NO>_uux4Z4WEC?{yKLfgjiL@N}(mWEJGQLoh@T zzL5Mf|5`OuTg^l@)2t79psHw|+ziDcegap5++Fzo93Ji+H4Uf62JkARvl){j$5<#f z_*}8)WUKyESml_So8uNO+$(?tm)Ug#EhATRM|e4S*1THyV3diY8J}Ifltd}SGNL}NC4}νr+LePxTGoXZ|d+w2(IZlYM^wlFikC_#t=S%u82i=5@K;l!ePyu)F|M=XRi zLE^V?*yUO_sBXJ8Bn|AKF(hMxzm|3pQW_KK) zX)l0$;vyq6e=;2VxYKYPbvDuYRc<5-Zj6C}0l&(G*X57x!CzEhj5Ta=`!g-;kgl*7 zAT5xaf4Kf`kizt)13zDPGmFEhQ)Z*V-nKh_+xL*0xE==A$8+MEB7J5q#F-!Q~+@rNtRUG0JTqU6Jp_H;;v*gM`uX52PRae~&$}!uPj3 zRzG}K#_i{JPGr*(b^!-VEOa0XQd>KIX4Qf+K(?+k?-r`F#?j$IuXJ&#RGsYc%2$^`klbXl>_#-xGnH_)o9Yg{mYYo!5 z$bDGCvLK*vI8;=6`uf~= zry@rZu)x@olamAZcL;t}(ABY9gZ4P72Vz54YUMl`WCh-X=>ZCL*a%~If`}93AV=Ui zaqe<5GM8Yehi>xw_ixw;6M(p&S2f3pw7(H9S}C@vnq6E~b%^cB28EQ_1!V+$4#JUQ zo5`~bI_Jp}^q!$mRR;}yeSHG_{LY8TG!l*|EC2{u5z8P*4M!&zlX&0CrkKnX1BAODC`!bah1MCIiv%#w%X+`8q&u3v>rgh61URR9@q-et@;!IZ~ zg=HZZSG(BcLVUtrVQp$*sy3YP?^l8#>8-q6sGS8E83d?$$}{p5i{CkZuOdOO>ubLW zXo6ERCp5>W!QK3xgB#e;+}S()d;fBXxVR&HG8#M{m? zJb6CnG6*;qErbipMa+xW3LqAr^5z1e7|s=>_AigYwK+1y{46=)o#1nG7nBD4htE~B zz+G=JI6tH>vfUb=Q%j^)!i$AW!6{P290~B}OJAck``ptW;gqpgKM3knzEPLv3G#ZB zE&bE1cWyybRZ$TX$Hw08UbPTnqY!yGd9mz z_~iw)D*qE?1^@jgYV)b-tWeFJN;~m`8GdH;D|7R$Qwe(WXL-Xjl#da02ule$j$44v z)a(SYHooWEl=Xum(2gZq{I3F^J*T+{?|4+pk3==%;HkLl{NT|E$E!DgpMI9mBKFAR zhgwaLaOUUEx$40?Jp&~<(CA2*TpyDLM@E(a*S0~07|Pr#=l?EXvNwd%^o4S@77v7% z9rNcz{_jFmDn1T+Sed3`AcHk#UDkcp0dAt|5}{7T@13M3d}HI>Yr;uz{BPBF^y`H8 z{@yU*)&Eq>|8?3pT_+cul++GB@Vn+IDv3()F*@_0Pl-zkl?*W~655?JMZ6+ANjwU2 z{k8m3NSPtcv}^9Y)=T*4Ts@(PJdh~`_Ekj%>g0u+TB0q#H-Sb!j?Uh&7{){*F_YT3$EYw*2P$j8X(#@r%%Wu6rEM%K()M zGeY$w>^&fK9U|MV#_fv@-B~*L9lFfwd%Dy1m>&7#)W6`-CwqmeH+kh-$*l)Z3??USg(^nYWJe5W62o}P{#f3QjB~Z{Z23g|gVlYe z-l%6{CZI;6#vSRA<*M{J&~a6`u52P7d2jU>O=3^;*39wo5R$WrBICXF-n47~oR6+* zKrQWLCo(!E_dLhsySqw+SoOD31YW|cdkFMHN#ty@s_CmV|1@#BrYKv1yex2+uYLae zQ7<&$Uc+Vf4jTyCOrG3)?pYX;oOS1r4Ta5TU99G0`Gx3JufTnO_w>1ZkOjdcc{38s zH&5e$`VEpgb1u@g5T47;FEeb4i|2+{)mVZ&zG$1@!zqF?qO6)`Y~Elj{T4&rZ^m-g z?d^E&A^0tBCK1+phDFZ2U#AWmvW_j;HLBuPN8Q&=E{OEEk#F{y?RQ7zgTFp2A9;R^j>l!!dU{#vcy&|im5W~|G~OM3FA4wwafm6~RMds*$pR<|jb zR9NZs{F#&=XwV{7Iwbi(?_lw+Pmq19HS|fdN9knj(ymhb2Rmpu&e*p9W-3bA zABO7XotuijPqWD#;&!b$!;PY*WN{%?SOm&)b57FKfHnBJ6)}-`cPC-ExB_>Lg2Wj( zW|g)3+#g4_1=3j;(SOkqq}yqs)GiEfUP;%%JLY%GlJe*s=$jO++dOoiaqvE7)p$Qc zQG!%WKb(HpcAGdqQu@97EgrjRc$%HG_Z;p;#DZf3`Z_98;X-80#Oy|H!pkG@B?(fv zZJW@a%V!!(M3+?Do8q&3(p40g%*YF=h=}xP|8~-z-d_H@gaf!pCU)^5$IF~J9s4$! zZjo502ioK8u@u%TxdnF)`Oh(#G;{?vozoGBTNBmoSZN$7&0p_2g?S4$58oEpuE=$7 zp@d53L-G97Ue-O<)@73%Pcr>bbN!GTGr?{eE5wmBjl-_0fFyO3IWeJ-eYP z?-zc@h^}NDjTt`@xQFtkD$u5{?A$JVn0tsB8W1!w$ylP_xO>GMyyBJ(wZ7uuXZ#Qq5Q@H+tdxW9fn$r`Q#7w zcE)|}hc0ptU7Ep|a(CA_L8Bp)m4-gq6t!<4{3-9ePWDCnwW^bzO1NRW@7pgt!e5!h za`KJ-LzG0r^7pE1!Dip}b)N)ve@?|G1?%QzQG>m&oEA*x{zDsedkah`ese{GZw33x z0y<xVlh9j_yeCeL`bKVjmYmDQ#=365&LsxXXC{i%7xYZb$69`5`; zPs@AhKh4mYG{kCI?tCTrdRES*!BkW&y9l?D%=hq7W``;nI9U2W31jWTWgc9CGSjUH z;bvOXzQC9-*P^lsU3K1J+~;!ft(C+=nlcs}7UPf1O7@f%=7m~?W~OE8F1vakw8FZ< zV8dYH=HlkS_OI3Zz?=6qZtj-~jCf*4zJaf-$v*Zp`=hs&lW?l_?JHb@4LP5N#>4Ik z`l_B@gmK9`2b0$B?A}Cgs)v<#>d?hpZYjoEf9`D$a9CbiOiY|#V72Rf9vP#4jEFMz zuig3`)hIWWG^MvW_clA<3CF6ryBn(W;28@WWDl|0JyF^ZFLKLY%n^1Z>f@N0`~#!1 zy?q;d#fw~~Em0qxnq8T>^B6S}w)a(YOJ}WJi;HhWk>%sd z`AV}3J{8$Ov8b7MFx-f@wyobr>Da}K{@MQYwfSr$cW;B$cJEWTEzN&#>(6A%33anyyR24M;YtQ$B0B_k1SG+f1M;%2qi5tAxh zy)3|As3Mx1p$J{kUAVoS>e}H)dcjva>-f!uZzC`Ormih(+aw3=-4g8aT@C!n{*sg# ztRi;1eS6TdZ`dFUgV2dM54L`9kjZXB7mOaV1`S2Cxj)mqnlu)Ah;)PTH32~`e@WO? z%aeYHB5%zv2$~oD$m$`uQIsj*Lcl>*g$r zNGcmORZoC(HCQNrIJ+=kEjVj3FKDFy5_)i4i2aBFwQCnE^A8x5F3Uj$QzhQbB8+lV z&nEE23oFa)e|@09Mc2oiIX!ld7OlWe9ei-8xpc5Ex;y$$^vDnPm-1Zqau}JwdAe2H z^W5Ta;ywDBejJXC8NGLLvZ`Cu;VJ%p-OmTi=zE7_kILd7n~91ojfO<{7ehr04{GAp zYLyip%8iGzx!@Ehjk6r`R`>T8`ViHrRq1Z{%8Wq%W{1Vk%g@#;!h}`NDz=@$X&?WO z;rpsWeqZzb+1gM_z3(lH&>j%EyTvVh8eFRTY*J986ijNAe>}}o**J~M9@`}26qtYX z^s)&CjfZ`)n5l9(h{kaN{!C0r~Q?GKdsq! zRlgbqy|hHp<*<7nQ8qZ(-8njLlJmHTv)#qXOG;9C-N4YHxLCH@pZNwRl;S6~#Cv>S zp*KZhaE%{SvDSDRwhtk;*~S;Y`HD#_?+;Xf56Y*nVyO?|){>SfG=v2m^<~MUDcriW zf(Eb2?~q@En{B`RxZ~J+d=134);1-7Z?I&N*KD4vIc*~{60+}dIP1ba(A;xDbXE59 zJAI=Qin0b#>0C2&uPlr!hLc83Ev=^~-_${oa1S`Ru0F34$%KZ6zLx4X*}XE&EqT`I zZ8hKOk$2ZKlShrcnMt%w3wDruC^j}A;$sa@yO}o(8KWWl`xYa2_lN8AopGuYp}ZTZ zh0=-gBkmZRH!6)sN}z%YYxntfS031 zS^)E=%F0ULZZDa84hKDZhwErc7>XHRpe}=xO!Bw6b5+UuIgG zR0Tm*`t01Wxx*Jzcr_~us1^DZ&eJfU9Q`c3_dm{eDgTEV-GAP|(R3WXNu==KP7D4D z8=oLl_5t|quuJ#vBn;zgPnaH}p1|rD7g#q>9;YTns9tnTTPHiutyNSp$&+2LoS>m3@m~sx*owtaM#@ z2Ah}^q8mYb`ntNh+%oY%Q;kzJg4cF-c9BonnWdEE;#&-D6cq#H;{icuR5SpM7!n$hQFS zKu_62l{K`t(?|eBtiz<(`=uh>UDwgwU7{Ctent*FF{+VA64&;@jd#9}y@qxK2Kp3v z1(5Q&)jxlmSJ`An!3HH+9|Lf7v#6tuy41)zP3UL}&4m6d2#;Ql!OfEU zNc!)$#R@2q1BLTB=QIh-fRDNxy2(bjnX_b=>gOW~-I5_#dPY+czh&OxSFx{v23L}l zlmu+RrA}%IKrq4*c=kXS)bZ2{2=US7Dz_o`NrlE204w_Q+i^-_yHezn6N}$jAy?(q z-BZlKBBdT*UxPZ%Ir6uYh1?EG=`VedE;f7IG61?o^bK46wVyR|flr^e)zsdepPg+} zF#;--zqnw5oQ82|`wVfNvXW@J>>Tc+#lt7(^ApBz7PGqc3)_H@<52b)=n4V^Tj23n z<=L}_`_j_Vwj5+$dt8w5LD{kk>+!(v$+L!CB)-^I>-c9TCRmW(xQhOml~pjbh_x?* zHDPJqa1L(n$hU1z*~dDmkom9+y@U;Dh5^FM!4R6kkw+>15o%`g$_|5xXa}_9sVfa5 zZy-)LII9Z+mny%OX1S(g&J4C4XR1v?ey+Xpt$*G$Nli(kx2u7AN1TsDv zYd7Y)QZ;4O8>;_!g0ZsW>#B5GKS%%>30*;TSc#upFU9XB;)CBF9Jo5LgEYXccAiUT z0GX+qKL(p)VaJQTU{Qbe?NJYs8Fj~_1WQ4YCk1};L>FEY^;FX%a|g1)q*JY6xA|B` zq53V5ZEXl*7K%5A|3X97E3mCHvEYi7Bmd{F#nT}-7tx>XCdCn9VNvoKuz4}k$IHu$ zbR0YBQ`h-HuPsin>jSJ3i!X~4tiD7F*J2qk^5Aktu;G*5HP?OeUb1qcy8vv>*RX#a zdt$^c15)=K9LA`W2 zqaH-sfQ-)1O$T%`zm$d1D$OO^=9ZR&DQ@>0XwYGU{-Aq91+E_n(tvX0 zXNcco;ynm*2YVl@fwlR1@QW;|ZqWeHH!nwghs!b@jq>G6i;WCPNgTVVR93rV7g#qICG4=orv$t?_w5SIrbt3m)OD9Hpg-XARQiQs(erAB2Tv+v zQeoS&d}N7eE`+vRJG7jn%R&UK+1`QlFRu-@*k$R8gb7Ix`KAqe(h zp`Jm9@TGylEzPgDSXk<8yoZMgXy_(U6pXdN;*RVS;XS)*vI&YZk5yHtOKxK9#7l;) z<-Q5f<^awUh=P3HP*WQ&F@p)i4zgh}9bW0pr8+?Wd`}O$HzHAnSP4?T&eOO7FDzrr z&dv_kMN-0wHlbs6D1Z&t#{Yg!J391@2eI~siMrl2+-n^uFQy8foXQ2N&`qaPT+LpI($Er!(7}Z<8_8($T%WRTsI3bOqdaqfQ@RV(E-A z#8p{SP8OXOb$PbC$PDAnza^;vH}tgLEpkgew-;JDSz@t_gfC><<$^E?X@T9mYI`Ky zET|=1MLSM~t4#*N_FM2`NsreZr0844xO>bb$e~{J!@C#djjT4~OxeV+pArE}8~yZo zu4-uGGcB#xcWDWa`q5}`(O&TS&6{hks`1Xl4vXRCcNc&%jpPP{*OdFlZJTZn#f@Z+ z8F(F0WB=gEq{Y2U_7&P(ns=Z@YOV{}090C#)XnQW*qiVpdn2$oT&7I!N+%*nPhK<- z7aGdqzR`+u+v`efIxRf(J|izZhDYC9KBHTD8aXtM+sH(~V48B1k9NH^%{8m%(}!%K zbqLjO^6Z)e+}iIs+qtG^bM^$lJ!F#aGAm^s^~oSO}~PFO_P6*U=xUv=cD*!uj`dqVf3ue*B| zR|3`_q=7zayAe_qopJ|5DstX5b=bWJvcWnTxoBj4|-o%fape>u9sq@?21K^mBFf>x3~+R!Iqr1M%DY z-a68$JUfKdOh~B(7%JCG6$#Sfiio{9vHu)Vxd34gXt`53*D@Eybgmm~LR^iR$(PR% z=TAU5;RT7EUjl2ZmTm%FRe0RKgfDnPBt?qsfcr%tbV$c+g;4vEE(29ACGz%YO0e?~ zQUuXw*y=KX*nxa(gO!_`8w_`j%IsB0_f{K_wST6e@oC9c`8+EEQS;>s$f*nM9J~TJ z>snt0#6$kcFVx88RR`_P@@1q1++`-goA=;ySt^?u)*cHNYcLwS;Wr-psDIJEOtHyS zHh3`C7+Ajv^*!i@`O<5Wp?@4)vV^;l5NP(z2q>rKU`MMfU?m2M&@ny<=Lgo(!XTCH zVS+*_YjIr|W`^hy2)lVU15K_AT>h7rA=nqZtEHznR+seC%#Y;cgon<}MZP}6jRwPI zzM=-VgM3GD_M~Fo2OV%ZK&U|?z2i#`lM=T4Y>-Bo-7a03%^n$`-gX73TNA4Uw5sah z$CpMRlGwWA_wVt|kU?cvi1SLQXCU`!={!R$dBT1X`f65|a-_LY$3j8bAI3Qx`#PlTlLXK!g;Q+fLIYSE<{|4bO>B?V;^70IhTfctsF?N(>6rC-pw+;MO9M+R+9iR_&*ARp&u^+r~*sd&!_!1EqE|D>|%htf95_ zY-QLD0ONx*FX0PBJU&UX3K+o}9J7IgKhHYQgast^wg^CdA~`jCd&n^(`tXo+gcw9M zEL;Yrz~x~rYEPa7zyAl~B8v0C5&m)F$W1W@F7-tTdNJ4jG0Tr6P+YIco~f(P4)Mx< z%E}5ZcmgvuMa4E?Otp`ndE1Ai&Y;he_sM-U~-AKSWUuco5XNNNnOvm5slCgw~H zc+R0kRVGIE3l}ebifmYE2(5kbF+2NoVFUEtvg;t{1koX02N}XBW$=&>S+D!0Tz|Ck zz*}2IMMa=m6vm?gI=LrdFgI$51V7UG7W~|j8)2WXgOl7xA51$C%?n!%sU(jgnt9$g|%z~U@VC{>3h$8_RDv^BWF)x})&hrmo zqT-6p%B-|sImspv-&my&Z4d)fM58IY*n!u$Rtq;4flIV!yy5o`7mN_6_qE>`@kWE? z_$P5KK@?er35ghqyg98Epc>XSxP-gdNfZPuBe{F4sMGfbae`n4x&3tU6xdKj& zjx@oSA{_tj=)8OocB5BecrEg z|DvtcrVTqcKp>D!=gywK1c9s>gg{n%uU`vZ`7Qi62Kdja@Jm)FA*JoRCcyvv?03TE z1O!r%q_pI{2K@go*U!3yLm;YKmjAA5-6w=YAd0+mr%znI={Y6)rCQGk%avw@pVmI^ z6wz|)cISSTzOTD|t;r{1PP{ntYvb=fufL#ue2c-3$B7~5;cE#a8^DF*D z?3&vB``W3X@Wir=v7m6lt)u(wvnqc1`8V2@;NPB_yc|fq7%J~g3nwV#0{oRWSC1Z7 zo>Xnvq`ki=@XC=Q$F?_$Hy+YoRnXkza0kyPvqyBa>=v7n;WmpmT1_s=V-w|NY{Lt~ z{sU3@;|=NY`D-AJ&f_fblOT{r4mPTFuTITtj9#A04MQ6nH(_Tz1me71Vz_eu(1Y+x zLm9KGI?@dr#P!mTZ7xK3LG+hsKVx?N`2DsvC(rJsb~9*ze8HkDfVyz|V0ziWQtxOP zo}cbAhi}OD-f1;FX%SPFpB=C`LG;f`uSr=2xn)Tb1w@&RXtp&`#fRH`JMbaGDeIvD zcl6-8^ct$*S(_Pbsed$6F_n}NrD!1-3E+}YKc-?nJw9O9qayLjM~n~&Cke~CgEVe7 z(;MOI;8QA{4lN`)2zuSWzde#38 zpitsRU}3&LP-;54ctB_!u>5K7^c^0g*fTajj%TdKavG+kw?>y#-Fw|{%y-<4o4qpn zw1{h#vGUjk?9LVWx{d#$Lpa-i@TdCE^+z%gG)c4fVa&tj`JvSusz1S>{scK5_R21H z%0WNaQ&DAdxB2_w93<1AA9*3$ZuRmIUP|T%MxNI+e}eQ`#9}!f0yzyMO;e?1 zM|ThvkrFUhzQK5lD?;xHY{O?K{mVjoi=%|HwP@L(P9T=poU&@uEGN-Iz&9Leojn zzB9a>$OF7o_I5~lgGf?+moOu1Q|HPm${#n}XV&>=wn|1Km)9-?vQ_vlxxZN&35~wt z{B|hbf3P)XG;%E@n9u;@zjtcT({&F$RULw~5Sp$?_0}&pziZf;&aRu__G@W1EVho_ zZJtCu=w&Wt_i(SMzlK1Jxp$IYMsyY1vB3LNb>ijwtw=VuQJ#I}$Kp97TG(?}8LuY; z%)g!xqvW4GQsXQqQ4f3Pp9Ex*rfWB8maSa{2`;8@JWcS)UiyHv=Ep$(U z=))M*K=-WRQdTnORkc-!`tmD~a-Z1d`-FyXg1eUE9uGM=)z%?!C@M;Y|4Hbr4oHn;3RCHg-TmFc+{gtS<9T(k|E%X-+V;IAULs z_VQjQ^vx?Prgvhby%E&nPmugC|D4lqlT=&5r_XsmN!#@)S4;Zm5ACIl-_dE?qZPp& z<%JC98Hk76ngQQhXa0?nL6Cmpd*4pm^^kKOl}~1%{*}b&965PZ zKdSDOaGjt|S6Ba;sGzMI9oRIG0>1i5Qf)Rg(cacWn{)FHORFB6#7i=&$PViH^&Xp+ z0~7=jUwa6N4#NtccPJ;czJ>=5OmraG4TK+>T{Do?U%sWwPY1x~V?dRz zthHMg;;m3F9HW}U%fmy)zu!+D-9ARxu9Ea|$v)!^$^ljwzZOzFP;3{wltlegSM%9o zuJIM%zwtZ2SY1kY5TH{-@$)G)@o~Ne`0jl1Tj|HVvok*%9ff!y!SDFj%?uJGKF&w? z39V2uYzN!Ce;wc}`cz_Lk-jNZcjvR_T=Et5F*5jT?clGCHixcHXt_F5fa+YA3@^udpHUavy&jaPo7gACWUQ#bA3C~y!NeKm)?>ya8LnYzkY>u&`+nDjXmB;%7 zK-rB#vE!}f7cZo!lEMN610u++5pbb~yyXkMvtQ^!O)yKsAC~^}P*&G+Bxw5MunVrc zH&D3paN2x861FwY`^gJkcwi7IIwW>w?8i$k=Y>2c+ELwKpN&|dN{l;D$`I!)7lKGa zXgK!27s1CH+s(A|qCaD8HzhP(jkM1w;tRn5s2kEt9OO}z*Z=QD@M$k%gGpYrtg7P| zrT^E{nTo#{@82JC@#r26l-95K6-ZwI{z=zT*LKzTIPc0j%pp=O2Nt>-()blr++HV| ziSEW{JtI$x@L4~s^&7<@U2t?h#M}NwF)T0^b*Ekr0ug4+2M`B@u>Z$Ja4LQ0QePXY znomD0|NVla7O8=JVn~H?C^jqXal)n1`SES=2JQtHlM(>BOCk7HO^QWyCu-R+MYT~I z$c(&W&Pgr=G#K62E4{xuZw~~ie?wL1n%Z)P^ChpN`vTkxReOV5v3)zLH?@kK$BxBu z34ybD+ZqJKOQVB|LmtZ3E@LooMc>l6Zhby{IPc$aqS<+NP}K;I3Zv6;>56xh=YiiA zCQ5`~PdEGPDeA_|KGa)AWzf!LAE#(sPF{3>K<4deZtyVTv6fSeyKBA_j&uca*)L^# zI)bNTT8FfHvSdqjgxX`f@LO}p9lXk7m<{I1WkLx!!D3-}?JwZ`j0?x7a{~%4Cbap9 zPc17bfSS^UnQnk@QThAhWGl0R{e+fQt&Hde_iUu*Mpeg zjxf;c@IvK-<-eOhcb8^!1E!@O$`#UoZWsKn)etpl4~#`RI}-NUKZ(fd=*bZf*!$+Z zeaoBMBgxj)@V2d)>NDzr8Ew;dmi<%tHnB~>bhP+clg}X+MD?a>y;)p)cJLB7sH6x}8v6NuY?D)yCe>ZnDv% zZD?4e(ZZE)UgD;1HLBP3Sf zdCCkcrUp2pgJB`(!+R%`Hz>Wkie(yilK%m&+SeVeR{eEki|UBtl%;A~>++-;8t3_? zguCDQpW>FTRRK?Pq%oje?qUVL?@xZ;mSZ+RdbpQ4I2BW}Q6Yum__x1g{0f@qX2v+{ zgJ_lUBrAR(Aed{-#9|!w*C}U~QO9R!GLvc=J>p^nxZoSl*Z+~XoE^uW=NBQ+*hH?EELJ0ISbkhMeQuaA-()GHLY zS>PN}dSJTN?UTmrT~mH8GQz%gJZ`eSB^@-L@y9aNKLhc>`zRX6K6)-MrFVNnH!HpK zwTdbUT1=+;#HiWbkGHc>UAj|~Jc0mKMOMD}WDtMCCAOX<$OhGAqgR(8oAt}${_(*O z6j3K#bC1d|?9~|oYV3uS%-(ut?IFCw+CGSuT|h{*(9(PNi()8eEk`9fL-)YsjtDP( zxN_~$CI9o$1YBOsc-$M))!UyMl{Or|^6s?d5;fAQLZJ*PN3UEW*IzW4VQHXlma!|lp>d{OOP0P>EBL@PGF@yNs4msG8Yi2-#C#9VO z!_`TSNr0b`wMHEVAugibfX@?bZzyeldWdRCMoNDOt5VZQTI3OL0By(@r?Pv_1Qwwv z4nDnLM3$`s&F2NPs-G0RHwGPR=y_kmWG8GMG~gAfh>pI(vhlrh(S^rS0CAtMO@?LS zI-fDN%KR1im_AuAu!VgiC5(bQc$}{KC@9I|!6;GOvkN*UJuP4wS_99~2N}hvi!D%k zkuln0o6H;v#AAi1;hIyaSLUB_u8})fbAaSN^5s@bSwb?b#kOYd2y@O*i>rd{zuUPW z6s6+i_SK#!!wWzgnU*ur`!e>v$66aZMv?JBoK)hIuc&z-&&!b(JaEAcj8`b zvJHAtpuFl(82&}q@(%A8rFRbhIE6+B38%I2*MuJgW+(w9Jc&XWsGlwL>xl0Un|D>S z8V0;q%oL4!W@)K$hgMKlyQA|XUJaoN@qzv-**sS_{#_MqAMvis(ePf2c*anV@)FCe zBemi0AtV05?PPvTBPT#%5thI!-pO9_bN#8MHCOLLulu{lW&^5iME+0CJeL-hf@C4d zFB#Egu_W{SAS3O%gy|}r)G|OiFy3IqFC5|3L24MUq6<+|QWUvlTz*%l&5bMm2(GPb z?XkVqVorlVKT3L2ZSPLMNv82Xj51W^M$yqLFouY6HLvKEd*6&S@fY4y(-cOM!k{`a zZ=fsa&&k7%{EGUrNBpN7V~U^3Jw?KT zj>5!E!mB_#B`S`bIk;|4_{UUr1g1uM)-9q`b>0Fi+j#4VEa1G5E{L{qUax6JlO=if zo6q6Iv+GhC{GD~`d5G=9jPm83DCrU#D?~J=2`G7Psh2_JaYK2Cai)FpO7YELtPU68w^VkDP1Z|^fYH5qt9I5gt*8OEqeECy|5L7nz7?Js&uCZ{1e%qm;sj z#1V0hZ_=d{JyX&6jxNU?qTzl;P>h7uORMKoA~>3n6!{V6$7e1?&o%sy|ZOw)~Nv&((WtEw%QU&4kN;?feo!dY0pT8TAV{zjZU zGZkwMBuCF{upz~ci@^sZ!9Fz$;boRdAJMKJGLH}9+mqUy_K%PYjj0alP~7JS%hm^5 zY8mUlb1qzFO&C~Wh_w*5+lQ@h%WOJiu~lrt`iIh&;hLmS*Oj2N#Mze+$lDQo!bw|< zjo6K7fvReXe#+**Zt~o5Al<3^SRt!i73z_BrVZ1aNSq*HYhNXtqu|cBc|7$%?wl;)X0KJ|g@a-Vjbv?kUD2r=>4MJ_4w6!W%iSI8<4Fe7?2aWP1jY71=a zBJ?olfkRQBAAinJW}Kd+;lz5l;b(idGmLQ}aU6?2)?-V3QN9Vx?93GXT6g0<03cWMwvreJHcWTbSV$K5BdONB0P!Qx z<>9F5DT6q)1w-_xJ8Nrz1ifo-dTGkLP)e63bL<=(5Opc`;N&|)pU5v^DC8O7GBXwx zYu%l)6Z#eIbe_GadMm82W856O);L2D%aJ^DMZ2P8r&PJLGqT@r-CsC!Uidsi49`bS z6%>??T)OP8$j$J?dhN2fai)neIGwk?3@`KbGv3Wv8=XnAQR(SDJbto~I<1tPqW?$h z%{pgYiT-7oIi{jR7`2JyaZ%jngjto%|Jqf_>Z#QCVQ%h^dc}<4kOL3KILmzuI4a~# z$<%hPYNhl+jr^oh-D5GMa0U>mbKB1YxIN_Y*Efqeetbg;=Q5$pAS)l}$#T_qJ6ld@ zdtYPcpFY`Sm}KA{aIm_6qHmF`VKO!I-j21IXC}HQ$jyPZZw^(bSJH7@eH_k% zzL+f`TdC68w$rkBp-{VQ>)FXBm4l2&Fo zAacy{{q5FFf5${8Y;{J57>km5>Y0^9CrYV!fvI54p-M$o*V25M$5xfU=x6!9K9}!7 zhx;f4SIU!G2r<>0W=;Jwbl(*DmaG2mq3Tul;$BOY>U!q6yAA(zuCWK2pCiZeP75Y7 z%KR;iF7W!$VIw`B!96dCryPle^EEuS2e)`!dedNcs@EL2@!3!SyiR0&Y!~OOQ<`zdJJx zp3hmho?#C1kxNR@*w~}(-MOg8Y;QGedY|t-h53ZgFCpwa;P)|cD=1-B4DCF(t*SA^)v#Sy6TGBBFyJyc60K` zxo6m@<3Z6znaHfIi?VzH1COUiaSebl8Lak!{F0>CWho-Z*Xje#Gl$w25}r4DbM@2% zzY#pK5oen_EUZk%wRn~tl(=BewS{$PIvG+E51L+kKKw*-NF#$k(@)rQy7et-7#KzX z6$tiKSA<@bz-=CZ5W9rCGE{`>oZm~%Gu9nDiZ%^A0v3NsSeJe|u6ByN&UmQ!_>2h+hXP~P z9`darG5sRr=H3zeBN;D@R0bn|*Dd2g*e#l8QR|Qhw!#AN?)CJaY;+&WJ-x1vy4Y^5 zk(nv}14Ds6jW+Wt8~yVg8O4TG+@tcxYEDI5y1J=ft2-MAkQzJpz=H19pzLB#7Hoa! zD*E;TTE!j3S|*}y)Q{t)4tnpt`LJ)Ss^b}3_Nnbl1To7D8ZjA)r^ znxh)nMwpiqP#R{z_)>y}*+5x>{*Bf{CEEh-*QRkZE%_8a^*S|x>(MLkZ+zNaz0lcr5G@fUoBc7Ij zBI|Xh1$w%wn#st~3%2!1nb&N*>9!P}u>2FY*R}?g!Qnf&ecl?oTL90+!5@+r$)@s~ zX?EDdx<4mv87wSCrkf}hoD}L{V5{?AT>cD-E&V2IenM@$(= zsRw+3cLI3PI5aQcgO4j5DHu^QqF%hlHsOpm>N~t^z`$Dcsi`B*&3;#8XD{#QnK>`4 z98=x9lUAarpl_FYYETS|KUvd5W8LTJ?hGyM=cQocr21TbbC67RCI3co-y4$zhyDgY zC$MNvx?{j<8MhL9>RiAc?kFx=Dxt!_)PO(KLP)S=eIIi;??U?x?tZ`BdoO2QE9}i_ zpqg)PnjV#AYj}a>`3sjjW7STe!35yC3(T39^6Iz0P{T4-%W2#E}l>> zR3Y!VZ+Zc+mwXW&FHpGQsG{Qos@xMXF(pAaPcw2ipHzK{(?q%ajZo~icPo6BV7re~ zAY_W{iEri;Y%gM#sK1j@chO6Ax4JF!hKLh=0VK=eo8&$0m^rn&Pe}J>^1B0K7qdY! zvU+%7Yj)`Z3bh!?*-KYEo}}js?_TB$2ZFLXLDOCVF0T+oHT2{5lgL= zJo30eFVgn#H>I>IbI+LHVS!dQzE8scP^e9q`GJ!nqvkw|>PvNtXxk_cnb(;St=CBz z1S>a5qSa2~Mj=mW%G50as#dfRl6rywAng^OlkA!EuW%yE%MBg)C)%R7m%d7iXDu<@ z)Tfq)ZKcp=enlD28n0oO>-x8UrcshX>2KTr#?oH<%gHE1z1YNW++6Y;eXO9(dg<+{ z{(!ddsT#m$24I&FI0FKQxFKH0x+PqmnDZqt)t}44@T=$WcET!FARvz!Ok|U2tDcF7 zI&E6CF(c6b3`VyfOX5hq<}D7i$SZnmaK#tygZp|daKp5?xV)p37s_fvxs#mR ztg_xisai3`X0RrB}s$%WznhA1;%gB8* zIeb58jFeoq(PPx0)CV}OsA(Ftss4n%2^es~RPK+k15&|~M-=9p|FSPWMd%2jv$ZZi zYU%&G9!$gacoh#}Hi2-VDJ*4X8{R(f?&3u8>`kv!_)gD0-)vgr{xTq8hnYt#6A2B> zQF?B0q40+ebL1^mydYocL-PCge*{w|URp6YsnIDXcqWc;7hU1Yv;0n@0rk31Xi5a% zE#ZSu>h(dN{fE8O#RF7XUBS3#<@8rdrugx2D1rL5a7-zM8P=p#R+p$ZsWtT$C39B` zk*=4)a{`jXwwc-B6MG%6lcpr6BbB(&fLe(p2+$T~`ks!9!s}|a+6IkRa0RQG5)SlW zV0$)3T>?EH%sxQIZQ2a_$t4pDJzx5v*D&Cs?yl};24c*VW!x%Y_s;1}|0<}lrlbCT zO8@3iw=-t#e^e})@}I&l1Pn>2cJXna59f^NFolD;VdqzhLMC#OmL&6fz0Ez)W|UFS zkUM76YUo>=MMzO3JoAdC^3<$VlM?=fYagV|^u_WPpL zX(ikA%Ah}=j;hhiLC1ay+8}$aD}CwjuP}&6L#dB9I7VZcH?cfjbgjyL#f69xXnd`) zWt?;6(0EP1<92C(KthVQMzh#|=+tjmYUDtT<9a*#ePaapB4Jgn2E7fRi)`*Ycm7)?q_ z-ZunQ@!-07GhB*&Mi}wT0TVJSu>MCC-Zxzvd)S|bBy+ExSr-udYHpneoQu@lK^b*h zNQj6uG<^{j`H;NTUd5g@op3RhvvrSU=9^I#`s9_W3uKhk6h0G$96x2kw1YXDJZC|`x@qd=F4r(!z{1!m zxR9LW&<($bZL{-xPQOF-eT9Thx6ZU`?&046kYv?za@jq}U2r)C*fFSVZr-)K&yfNj zwsi&lgq!olzayxiI^(_1#5Yrxu=vMZiWDLshuT+;*({mdvmYqrE^ z$M!*Mx2J;%59a*xi_aT&?7k1<%6AO*z@4f7?(Lx)V%?dw7q#5}d6XC`+EEDOirVD6 zj!%6@?kbWdGO37J{+IrVLPW7QZ%=E^*dDZN!s~oQ36jVYJJI9`!A=x>fh*XuE z@Lw^JttNfDWA9GlhoO-nlj;1^;HJI9%t2+uj8Nd)`L1$fjYefdUy^<|%4%A94y(N? zVLUz5J3-ggmRq%c)lB-5V?Mrs!I!LyO&>f_)4E|vT7KmnNTmqc%COAZHXmHNC&(3{2M zMrD4KxnD#^@XQKn`Fl@DDzaK2rq9gQ(`q^#5e^%c z&>q{%1oa=(Ik2xC%0ca+b$tPC>ZG}7=(h>J_GyfFTCO9AudDd7Ed=w4S< zR4D04d8Fr)uKMrbfziqDrVefqV*GJZ(16W@9}%S|0w&M7xXuB{KAl-G7kQcNyX%I@ z<00amntU~EtSjq8%9r!}HEF#pYUq|_4FGFNs#qiLCt%`?c3cXA&dEt0go5$J&Kw|@ z8C^}wdswuvl4FnclOLiL3APcyICdvOD3aE3&nzzh3(?Zp53hAUKGi*jFBP>=csj&( zO{1-kYIML_8?WkLg*0nAZNt|mc z2JLL4mj7F#{MAFVZ*POfvZM9i+XkJjo-#GjFu?O; zM+PpUie)D0WSee%QT^cuA6z-_ST5EHFr3YwOpm67MLe!v$094um)d$pJl1WB>?}U! z9OyY>ao#XgV~O$IpXx)jXWf6}P%Z#H7%(lI43gfnsPK2&XEsPhXZUHzM2}J$u1Q@_ zrMR@deSjFf!mly{>o^XqEF5h&B$^TLfO7Ij~NwpB>8L--gxg-xB*cY4jlSCi`eyiq3%+ z-5m1Z7!si7@7NYu4I(=eqD;&}{hk1QOKwBWi4x4}5lC+uLzhW}3|Av=1 znB52##*A=`G~W^Puu650=m@vw6WV2ts*kS2(luwW=Oa!a?@E19uk$@H?;KDAY{h(q z)S*qZ-2S&a5mim*M0YETsf=cAUscMt;QhZLdd#T_kE_ zX_ck8;sM{%Wpm!BvttuZe%H2&*MN5Bhgq(f^^Xzl2uT=6Yl`|D1AjRmQUAR3Wcg58 z0PoCk8z)vdJ>g343K_#MGSu;{{!t~X9wolLJ$XXHs zGQ#ClYVE_K)2Q+X*j{~Kfc#smYEjMvu(j_bW4NX$1z_Dozw|i5W0letqndoa*M*Cc z%)>+7>PN=lIIp+(;!n#--Rj|BS8%qODLurGogMW(Xx+e!c4zF1yZ~S#rMdyS)5O%+ zt*){~VVp!(O`;Ewhf4oi;~g=j{=U?XjTsveCmdc$6u^CNynfZ7 zeoR>nSeaWgC1FFV8!%=8YS@fGbFRoWnx7 zZQ<_Bu%db^ovG?zLP!!nFT|@uhm#o1JnYVzG)0sPFV$zup6Y$yd~BK%9ibPx$*g2I z<03M2lFEWd|`clMqGCXLfaA()F@FHC$(%-eLG7S|vW`E_h&8%kETOI>7-l!lHoEQfBGCm{D29V8F#!NCWN zKrF=4X(>ED_ZuL(NI%)l#Sm**r%=; z*v^_qD8kdygd(3I-z2D^3c5ZWcKx3UZfKLG#2N6!3NwFXbpI-aS#U&S@-ZI4K|*1l%ZdtCl}tw%i77pF=wm-ihOgf(lLSu0#| zj=F&AI5sEz)vRkogASP8MTVHAmIs^Z40Yl@lI{59xv{GWFY)&>dn=P<@`Oj6i4LKr z9X4}_C;gZ%w6~L$z*V8ro#kQr=K}O^4ybvbxS47H#oaPoFDYsII(yHx*0*fh+{bk- z{U83KEW-+~{*P{&tr0}{>)hGQt8RvNc<-(E^{;KC;L;P}omR}s)P!?pxV9VBYM?^8 z46Ia>VLOSKXdjxh{n!Y&rht!}twp8&I8%G(4S;ofqkj?6F+yOj6> z)E!LGoBpWhrC+nS%292_c4oYKzrWXfIecTN$}_Nre!I_S$@nyiz(%=X(?5m3tjy`~ z;Zuy`_Rb9T+ z1bv3R^byUkI1$H*Sy`_MFPE{DgJXKFlopNYcBLU&PN7FUaakJS=Eh z)9GN`YPAq-Rr#C#Kyy!cOMem<(66iVNXlaJmWQ^4!XMNWXH5Lg7a!m;`#8itM7SS@_vXVE-uXC%x_<#-k2Dm%?nB7fxL zXf-LBZy>R@7AAlU-FD3!6wq7xWH(RA%UpEC6U-#CeP39VbygI|w^T8PJ5H>qijwFn zTF|hecMz8l^kHM<%h@cLX*?j_zr0XXUqs?q^wOeB;TVAr(deprPdRY1&|8XahCbu_ z8ii=lY|h6n@+X2+o<#Sqd-0lfHUM_BIc^zU(g!1@JBm=V3gQ(8dJT|zg6VEp!JoL+ z%N#5D+_4(_lq(V7_TmO&%cUm%g8iz9p&Uvdbk&sV1N$faDyR6;YFkw6(entiNyg8L zy!@}5K!^ghM=zEgs3y!M)OJglU-|8;YSeB?#at8>0LcCbd!_|>#mQmYx$wDVj)AAf}z8@2b zK4F*3-CvJS3vTM)U+{@K=X9c%n^(5D7}?oz;Ah|@^^u7k(}N#CECVSS^XgsHo9+f9 zITYG61W!e;tVIaqet;Rm(5xVe>QHJ@|tgyzTBx2|8X+M z*a;Xzfw(!y22$8qtzZB5BBXqxwf0fvryNSbLV!~G-UDX>J(+_;U@`V>!&&B_>gngD z^Yg=K@Gu5*;_Ydnznhbyz=AeZ6^?u6%G4MNWLN6SBc;NevA1YfhJKy({i>*s&MiA54lX2c^#ORX5&4h%_hLbusa zw)OEB=c^yQz15DaMAbg|JhpUVBZbfb30LUx{-?@RyEqL+v zz=6@pL{B`i4CnZzf7ZpXs=3=~G!ha2*(%__yi6 zf2K%h8B|wSdGK-eMs%PrDKaD+(l`quwM2PeQPau8E}q>jfxLlH&_Y3~>Sply@Km$G z>_twAQ(Uu8<;`yL|Gu-r(k^qFO+JTR%ms^cv~*Gh$F%0y%G=jrPrBSAnbF?wUg&C) zN;pSrj38c-;Q93mj(lief)%RBxE=K`$@r7q*?{RV@w1Gwq1oPY{|lo@oXg;C+r=&a z2Sl|yMv4{_sXJ62{Y$s@wCMbRHzO9Vo)?06`vxWYRe6C;4_Rkm%X$d!Fvw&pazTLR zzpFnUpSuat*e4|pY{V{*l>J_yXH6M32FcYSki~8R%a|Z1`^DZZ1>J_)xbh(}?u4$F zvU}^|)?76(540u)v_=McfejqX{2Iu;d$0OI^ZwTr0jVJxvKzjA1To)kD^vm5{KmKM zirC&t$gN25JX4iAiTa;A>wnUK=J@Pnh?!3FKS@f4M^Slcn6sgnBf zT|vnY{CZg+UMFZ7UFP$}tfrri9DA{h!MlWgP1M#{yyKz-lHDMeWg)4s+n@v@-emNGsuOX_RQp*MMZW!*`x485!e0%);hO3YJoY13Iic;F#05x3x1!cUUSxqy1#x8* zn6B|NJUl3)%eJ;{MTLz|8~#9|XRs@LeEYwAd~zxg0?7|5`++O}xH{mt@`9Ho?f??K zrCmdy{dM}fllm%1erTE9aH4o?SkznAkJgH{v6dSwXyyk_3NFpPly+&>f*v))x6F2}*t-|Et&v%KO+_{2X? z@MYwScfg*4-r;}6r}_P{AY{3lVM787cu}O72yO3m;o-i$e)glUW=H=W2OPK+XJk%y z4q!%vO@GWATk;AbdrY}AulL{|gK?^S5$x_*S?6Gf#qVHygu}8mR)LRb9h>+M*x&c< z4hUO|c%HN0%AFfFG3%bsf8uR~^ufR{TH)Y}|L?w!W$A7el9yXT`jc*66c+NJo0ZrZ zJ4zCI3yTq+PLM_s2VA$f7O_bvMofSIe*S-YO_pE2&>g2!L;R%w4G|`HXaiVMTD zcTe08wtCZCL9*TJPx{~I=~hsXQy;a~sx#1yu-olQKGi@(872_)d>2m@9{X>0x3$joAH2LDdQ|XflwF1kb6(Kg0z7%fDNksn&JfM|t?4Qw+|g=Pw1edT#lATMSMgM9Kqj6JXPn z&%y!WJ3~9LQ0JAZW63B$u7SL2yA9)eim}JTRH3K<@T?Bb3>?^;924>IeP-3_bg=- z898($UDacT)i!UU2yxzz_%l5}za-k<6Fp>=xa+^xs=JxbW`~k#XZxKD z--3mrkaknqi8iGUv#Q!rOS|A@V6{z9FMK+h#1*EGCTP9QS!`_!Mad=X(lkp2J7{Nq zm|^Kqt~hXKdI|yeV6-qgNDWMOE(fhtb(rQ;v05y1uwrVg>OMOGJEbu0T)PR{#3 zn1)8Ewo}5NAYNDg>+<_HU@|h0?DV?+$jGn`{>3a&2VuK-rWN1uUsIj*h|Wrr-YUo; zl^>%nTlxS0=l^38FdGff(Xv@=>$fY%?Y2aPGpDp65TCg`J}fmyKKHWUZsmJ|4|^=( zbn-qjEoaD=nC{JwY3C|`AHuP40s|2VNyWU-J}L!-gAC1UWXM z^|HT0mzNVPL}%{gXl8@}(uOOqpcWxn)XU8x11jgb1NB4i@SW-@$=3#5{C!UcivLR; zrk^u|q=x}pa=^}!L!#H7X&lS$ddw5A41w32Rf|< z7CEj#9hISU=fK|ps_)9L6;Z_#@9W|p*pFB^=%`%$Z|pCqNwMQ?>fEbn$)#Ajbeo{1 zH+7we@jTl;8&?b+<%?d8FZULC{dV~{NqZic4XPEDR-$MZD^x#XX_KXYol}eYhfHq% zOUFDl<-Ophe{66aioZOfp>Zf8aO`j%x3rwrb1gTLb%R4$oL^)H?HuLi5-8MS=j{)s zA?3i*{5hL$?IM~J%_uITRRt+MS=!;q5QIvix1e1~OnDKiGy@)pCcC%U1PzX@CwCsY8harV#YVY8Af>lgE!$ zE;j-^dg5@j?B~x5N^KPhg^WXj=I{A4s;x#*K)}mr;93@1Z#Y%A$bZ<()K<6mE}5PW zwBSOwk6QzJU8#XfvqPL=KQ-D_7i?kQh<#u$m!;~2-pP}nd{^3J%r7F_)C4p@y!w~s zKh~=a*N{?>Swf{xbU|oBR%(BM-f!*OZ~ZbGwI8)QcBX!U>l`(sT5A-Qr+35q&4h9$ zK?C99nB5o^@zS{%G!~nhN3x*u?HlzFYk4CNh|?3lpCc@X>f~%UMjiG z9N0NrPAHmPY{V6fTQkDCa4JS9Y<}+WW7*3d72;UMH1#q)W7aco^hF5WDSp>#$^yzh zR`=m}gQ#jd--N0t@J?mckTMipsC3SfG*>+}utvH)c`l+WN0t=*ETv02=L3YKOa zp{KL*jdBgD_!Em*SaLp=BRkHVo9|D8jcs{UQ;bt@3~~M@IZgKmsjM>NCl=RAze+Z0 zFUU|#p@CURHqf3`b#U_F#Gc#tJI|HKZiO_xbJ3O}v2zN`iQh+|nwY6IRkZQTV>KJ< zZdNfa#(H1n9_bME#@R;WZ9p>A4%GN7(TCP7%(w-|5eqmpQ6Bt`c4yY}gX}NADy;qN zC>7}ux=btpSm0`|8qn?evnmM%SJq1yyygMI6WZI1fHU?PQP_Axt=Qm+2QXFPHm7n; zpifZpzEXP0qmhFaGyOU>6C9&oRsihq1&x%rG@`Ak1gbu8U+(x zDcnFj`M&?L;OquH?AU4_I$Pe|dzU@!mV%`rz`C6sPkJGxdoI9e%yBQaxfnSQK+uew zu}x&Vg4&kMKLAAOWc@Gn#UG-HCTA+X-ji>6u|D%pK-rHQ*SxP>TXw)p|Gidy1Vq+|`L)cf>KQVnr+q#;jkS#{*%(oZhy?VPc`r z+k8L4U%JetI?*~B?YTG^Ij34JsYR)wr2aca4d}%=MRp*7+h3A^u*xg4p>U2fM(9x1 z1CB$0*$L(XZonYZ-cS7E-h#H@3DMzSqxJi$Eqt{QEF@ugZ?~-+epZ%0xnSM;#;2&6aW7S2FA8l?vkZ?@?zBHcJ`LYH( zW!O^JHv`m_gjZ^7?G1#|ra39p@j-MHQG=eyOE8l3Bl^hO%a-DOpCRGD7B3oi-bC+eAeLleVq@gtEb=Dosv?x)-CJ@A z*KLhLEh*8`sGBU>F`jfHE|UPGh=fHQui=|@pCPC_8ROPi^|Bipz=H2YB}_@=JPmZ0 zMQs9-c#0?KVIMjWQl|*b+?gqAb~Vb+EHNj5L%qW1h=yS3IuD-F-?EcM7uWgH z-!z9EtRT)isax`0B*!T@SCt0Z+!zjju&jdt5?)ldzJTQUMBP{}i6sPzFBApQNQPOZ zg(%=)dZ{O?ycG1JiGyj8zXB1|aU&>R`zrfHb#=Ds7LDN6*oaP-?Hc~A7?pGUcO6x# z&M1x1H;aj-CE>H{f@K5E=$7*zHakgd1A}A1zA|(<%VCV&lUA_0S|eR#(NUAsf^b}S zWm!e=N>;aDVS=n!2d6ew@k-D)%BJI8u8Hp#l<4W*%~a7|!>U4o@j|x5e5A{EJ-f7yKhlWMdAjT{Zuq@8iP2@`fv+#|CaTV7UpkW5CwG=!Wv|H-DaRh> zpzHTHSyv}Ae24(aleDymLJ&qfOqLA%-)4HU{Z4?T*pUvc)c(-bJ36XnX*}}y{C$fd zu?IbfBf8m}n$AON?2$$KyK&kd1%q}cr-nLAMW%X`QH`rv7bWfe!CF&Nge*9f22FQ# z(+UxyI)$$$wtRuSzVQu0wHs03Z`@+ zeK1;TTW@k(@v1#nkkfcj*nPVsVp|L?zv_Veceq{kLt&+c53mZ`6KGdFRe1oF8E((I z&G}-)FTnmorB|)V=V0js^9AvOsPI^S5hz>Il3%IdbWOQ-K6Mb3>@}^PkHnlXnQ@Ku zBDXG3t<{`_5pEkUcSX7#06Ld_#?pS_clv#5j`QNn3Fj+`>x>A4mJbJ0ITN}uyE5-K zjC)>lljht8k48anF+hH4G`xUgY4P?LArkw(1hSBoEYB9>O8)D=EZY|26~DEUflhv5 zN6l!_$oJ;7W#7;uYcF?D`*&So2ZMgLUg@p(A|{KORFa4h!?)Z*hQ2eQ*8uQEU*(w zePP}qBW31%D|wzjni|E#{bL)H6@C(+qFBx%sycr)I)-T+L=jbvLG~gQPC2qW@6Gmg z;Ll2^X#+otj%O=Rm`|yw_dFam z7NiTC4^AHEe^1ZJqD08+T$sO?X& zjD}u3WU&&y3=(J1J0*B6Gipu;=MAlTCk0AkP1Nl`Y&TP+qb@UQibmjfi~>b@>0wh9 z^`n?q<*FQeEsyyBi@o=bYBFp4Mx&0yj0I*MK}MyFI*N1)MQX-T#0Cf`C>Ww5QUlTw zLI_}G1OW?zf)ME{y(FO}K~bVah@k}%ASyzDBm@W~K-#$z=6RlZpYwk2ch*_!e19EQ z7VB2-?0fI)zV@|$SJ$VEq`?^}p*s^s_I_LRr@C{km*X3RTDxmM0*86bnQNw)(df+z zDKhcog=nI|k42Bx5(qwpH}WnxkoTgvXBb@vLcoPDNOL`Xv;FC%V>Kbr2|5sb#7|GF zlQosYo}!^u0chsH4IJN)7ZJP_CVj+sD;%TMzYWuK;Dpj0dTQxWGCOOR8D1ISt15n* zZj+Jg!6I&};g!wzeVMY-U_u-X;5-{2)Z<1Ks`jc@O{z<4 zy@-$QI>$2xR6k;_Eh3L{_Ej-(Reqw<{M=G$3uA1C_v!_~tm_kQ2BYnOIfTB0`_j;O z5?90WzU?4(!a(T9h@l&+BsNQUI4uF5vpiPKdo|&YeY(yP_AdMVelZL*TH_#k7_H8d?hDOZ zrhn57%mGMUeb>s^Pwm5WtZrTW&TF2<>G^^A$^}1REgv4)xp;K~6 zxg}u#a;vIZLE5q(YubxQg!wVCVa%hqMGj(p0E4mr5wDxvj{u|vD|D)j3D{P2XE6s* z=n_AUo8al{-G!Wj09PP1MR&rlR^=k=$G%TV{U)f+qyfZ1&N&vczbA}exyDyF`a8In z?~KY2uImHtEbH!>OQqYQpTlE1?Z0OA&9y~+eePFzm)A01*Jn<3=)<9njZsfCq3QNO zSo!UTdzMeVcvQ7yw7BW)Hke9o+~h6)T(Zpud`Em(j?vsxb@^b~5|Hi5e#*8TKTq`& z7N_6@1zd^PB;rnW&)IV1a!EG))t1r?p7q47RlB#+iS)X*`gqEf(h9%xZ+EamA^Zh) zm;DrYL6*HkuB7Up$ajEG{HUqIA7o{jTl9CUPYh*S~48$=$VpAv2@96!ikUoMP! z0i+O9TB20Pt9+L;hx0h~=;M$(S)Au{k!Q^4`x&#wFiT^6W{vPdQ59Gq{sX$PRr7u@ zt^#a%u3nDsNfvH$fxVJhF0cMIR$eQDu-ef;34_KgmLpeKzO*3#>S{J!z*8;Hushw$ z9l~-|PV3%@?v6F8b(UQr-GPy_S9A9d_?27jNl~pHvr7rpL|r`7{#k7v#=9uG%fQw4 zq^f>Hx7m@(Gj%P(B(f2Jwo8F@TyTzqJs!%Mkq=+16*R9B7L@GkzE@0DzG#=yko{;bnp_z zUdRkIZAxF1UHg+Lnv&4tEf@vp!#z{WJ3`M(EiBrw`UY=RkEf~ud0sFI+dW9@EmhE*<;g8S;mVcrGQvh;B*_iHmBI-WYUiqdko-I<9A9uVjP zN=b=dPs7xvGsm#(Q#ey2vKPv^WHibNTs*diPYY+(!63v9STG76SgfgAhH2_3J&XUPnIueEacTnf{NZBWDJqE3G^D>pwAZ!WiR%g)P>_3o`gH%1 zPE;Ra>VTVIEjD>81d(A<$7-*CIiUliSN5K!*~Nx=pE0V;FF?)9Z~>C7*mqO^sDP$U z5Fj++L?Q6MBgldU`vZPk;^YM~$Es)tGH&7t_Wi5}*>s`QnmSM3o8VmJSKYQS;s6Ap zXD9HGDSV1FV$l9jPyO0~7Xh%W;E~9U*vI3=yfMbmrc=r5Ks}-N%ZkEs)NrSIFTBvqo_$AwJ(W%o`X-0AhlE*F(M z(Vtd*H1Y4e{@$dtcH-S#M!R#^2I7HK>(w=!5sNhc(rwr<)-|eW*;tZsZ0*^o&PdmI z2A0PF7`Fl)7Ud6QHbWg&@!DXw1=*8-;?6QZK#%i27<+_yiwP#nA(;z6oFVkvfys0= zG@>k^k<0dnK<}K&H^q{l=kr=N$aa*Dx0I$2v(-sb_GC_wkPPtowc;?6)*EO1Rq)m~ z&L|rCZCK8BE~@axi6i8Ji8q8J;)G8BW1~i8toHfSQQ%N|@^n)V1XfpmquD3|T_C#_ zaNUbft2s`#(#9~;qAlW|ZniRU41xaQK;|?JiTx678iy}TVKL_s=g)$E*2yR()@53}~U>Cxa4hjcyc9TT>}vYU+^593?S^m@u3 z@SJ$l#Em==Wh>(Ow3!sjbYE^E>#(y1NBpu+u;R@!a^cL_zBw59+Y^r-BILxEp=sR? zTWGuyz}sd+z88ZW_`DMp029B|0HlU_foP%l&N@EK_0Vv<1 zH(XXY`V`CxO94AC`E^s_iv`@8rnjbtd083oK?$mZlMTPbhNbhC6Oe@9k*4p3=p)_| zYvf_7Ik(}kaGRLuAWpEUa67^WqT+@S>tUAQP<5EM(_zY|_5>Y90Vf;TrJIPYRe6U~ zeb=MnJ<7bv>0MNtIxQqXR4%ae4Ji$vs{A#NbHiAx=sM@HA1Muc&d+}h-qImE*Nr1^ zYvC|=inC=9i})oE>Oq}8d04oU;F6oxPea(E1s9IMnDjZ;L;B{`l86CQqxw@pn%b-& z;`?g7TPFr9ms?&OEDU@}Xm(a#m$3h4U^V#$V5VeD95~&N2#{Z4m2LO}&Kj#zpT(g4 zAbK?SiIY5urd?VWCCn7XyYzN$bJ80TbZ(4Q4( zfELqE0~jP^+pIW_7j}6PEwp`yyKY|8IekLzMRriYk>up*G9#QO zG2v+`bV2%rw3|j1#!M9YK1sWqjj-B1_$B`7e4RFNPbbUSh4RX;e|C`Q6M3&s%tftZ zcH#aErN7HVFf3p;WC1VKR}3XUrN^MnOX&P`ZF*Rti4dJ4JOrpPnfd*|X?c?U<5*lP zQSZc{9y35ZB{{;@pow8-&Yj5L_O5HLW8C!1iuftLU8G|FPA9||{k}uRP1ic1J1og? zd2gT2Tgfo{F!+{`0Hag|X5LKX4S0lK3vnNR8HG*dU(3Ya6DC3;A7IyEITln4;3Q?> z!y4FGeRX!>P+I6W(vWi4X#aT z!zuH{vbtX3(;7WFnp1D!mxmTCaOx-axzx+x+2)$9Wna^a&B>>z`~%kCJBm}oC`2BQ z^n^zIM-6!x7am7CUsttc|7*}To0hBDhX(@j74)Bkb^W+hTM~>%45C8K2!JLitgBa8 z;2J_`Xl)=EQFqh$WEl)vusE7*`rXu%SH1jY%Cumnp-6tLZT`TQs<8)_l0qH+JQ1L0p=h(Olampb}Va|9!s~SS}i$Qc}j* z0H*!}i|CY6W2ol=Og1<5s7S*3J%b6G|Jl$XQaA*oLSii$|Ie}Rdkzocu(Ru8*WY56h+ zsI`}wtoV^2=YMeR@#R$Q&7-10zXjB7XA{b-huCe<(Tr6qKjQG)Mm%QhVdaemZK{AX z__#89u7^g&N>GIhIPzZOBaBZ36{^IismgeLLQm!q@biRgjhDi^y;@QS|Z}2NU+Z@*C z^^WL$l2;TNl8`rusL-X^7mro5cXbqCwq2Z*9D2nEeQ{}McD}(|MM?J@FT07ikHbCB z_uJ!OmRkBHS$eHs=Y7YbpV#5V_2k;}%W9jawC0;&C!xR5P|=Agi)j}pA{gr*y;##z z+u3uyZ-z30aJ@o48-(}8Ff6#%O98&6+l-Y5eDGUnNRZf}r$2teCOZIJ_LSWssmE4v zu-j+Fsqz-XpiQj1Q~oRunyg|r#AZ_N%AN+3ll#eYdIm7&QNcXpU9rIwV~%=; zBOgeu5(Wm#^O=3y>P)ee%1NRAXyyh8-98>;s?XjpJXdbfnqNT7r*Kr8tw%(2nf2XmL@%2#`C5v#X9GR`(s% z-H`BR81a+A=+V*1o5A4w!iz8H3p#*MRqMs-?neF8FHf4${k+GzI%CaayMb4bg(gNt z)Io^K$2lT#c8BcE5#|SzM+8uJ#1b5GE3?nJ*<}m<%6S!sX89f%YydI$b4^cbrc%;k zQ^7R%`dqAP^6>aBSndU$E&EHUolCuap0FoCn{vE(xawrV=uw&wX~KKCcu6RLEx>XD z2KTH!MSitxCq6%6{l#cB0Ic7%{1!(%N%3f(w9Xsc7YZZEzExK(lYdON!M#m-82gt)p&-SPu9^e)9PB0O?YcTd!SWlh! zc&Akji%78-tsXB~^GZIm!HJ&Kssjo(`6NvOH@(1VN`cpv@WwtPw>16EV4A-5gyB>x z!^HV5B2%Dsj%ji#L^5Y`lW>}2uwMg?%<$7zm%heTY!o}&?|?Va=7bwXl3MsICIvUo z^F2Inm4Ol32HL#r_3yyEtuMYR_t-Z0h*B9qg!I(ieogF+v#5?`gPRET`9i+l$$eajo1lRD`XO!~BhcJ?h6R?@4LPQW3VM$M*u_1yVF zfi5+z>SJ9cws$v#Np%#HaFh2fx~)apITRrt?d;9k?z;W_UB;`cvR!C$q|J0KJ|$P! zayP)cPZ$_PucYP&#PIBUM2-D2rRQn78hv~JH;E(pXI9dpFR29BNZR~DK}x_=Nep@| z6lF3&1@p|$Fqg$klUk)Ice27^+Z=lE%vK%p0@1=xtGFyZE5JrSkEIzjfa|FhEx5y% zSJcbqFcrDH6sB&crJn!Q`#pN7)%Ncs38#R9ZC@roRF8sq02uVS!lpz;Q zw=DO&a~C@bwB5cit6`WjtECyhS~aEw?z}?yS#q%-UR|5Rx>8Z#U_ap9K|2awy0)7> zMKr7CuP>O0DV;FKrV|3TcIf0t>v^)PVy%tA?n0|(?Z-GGbTruBnk?Yc;_9c(#TjVU z%A1=kSV+Nz56Y85SvX;@f9(5tF0m$nwDF?l0PzpnKBcm1l*&#&usn#lIq_taOf$1h z_2+0=z@!gIBeZ>8R46$Z3<2ArjQCB{3uH;9Z=Icg&E0-TL3p%n*eLxtv#km>StW%V z%+`lM+I9v(ZO~GvpuS#9HdI$sVFy&;h=w&I%Y#@8;)?cJrn3YmU=z(@rmo`PgjMi6 z)=9Mjx>gid#jxoGE$P(;x=QR1;!9^r_nS`*bj-cfF1XpFJ_xgY2Va{QU89!`(KN)F zx-&8+U%LGH0%O!OVIyPsdB5!ycRTSOafNqHE(1}rdx@W{!=iNhJt%*!duZ_Ml-7}& zg^dnULh_9rMBd7=37Xa(Dg~!2Ul>a?uAMGepY8XB~l6 zWlZjge==UDA8mF~pS!9i-L7G3je3o)E##b9z8XfFXd3&|1q&1?h4?O~;3ZI;wmgLd zn!M~H>2o;>B;K0555WvX?+-2Q>nHVRFNc$coS5*{UvQ>%eRD5wHw(|1xQh}<* zdl<&2q*$=qdqX|x<%@X@fAb6M6xV!j%3w6T!OpYX3aA1X;BA32c@gEr3`=4}x>jWM zQ!8&4_?lDk|ct-lvJ1?#wTPDWp@@^ejSI(|F285X7U9T3R^ z`dtrlD{K9S?De0CH=2@r{2G#51l{#K%Z8@Yx{op)*Je8z{A0j$-MlHwX!Sre)SSQ-s2Jd4kX#7 z1gGMPn2YN*gk4BTy((cq^zMdJQ(Ig}Q1Ch+@&&W}xVtn;qvs?`aW3|_m3JrTkDUu) zmj*KI-9i$$32VKE!jUk2r|t zjB1eaySypBx`BmbSg7hnutpGf^t)akV$2#)^GBh+>(70vd~t2Vz0wTUEjQh>>!!2g zw^U*r+Rpwda=5_M&64%#^spxnuV3!I)L~K2+{y4%Pz&_KAeoZ!;+LQ7juw~vbcufl zf4=D4uU>iENhPEdt~Qc!2mNjbXu!cBFZ0GLnKASfV>Ny`C!6(CEi{5LaHGk1B=f}t zFQvgX;X;9FRn~Q4J!aTv84Pt+4*q=|g7orqnH)d71!j7L+5WQzic;0Xv2$IO;FF!1 ztYvTxSjTXmg*tQ9^PFXSjYb%$BtCdzU#~E~m@43V&Jxv|7Q*QB*i(K(wYFt3w!feQ zZLoK7`tRV%@&Er{{|711nxWM^dAg3PMVS0lW4pAbV3z%(;Dy@%?&CtY1txh^Gq=@f ztMg_~o9)#9{5c@}e{=utUDi*Z>OBL(vyi?2^T(fy1m<(^-pZUjpm0ciWot~PokIW zYsiP2Kzw3*zW)~oq8Fblzr6v( zVat`*_jXzza-F-VxD~WpzbW&q;yN+>h~AzPI?61L*54fHwaV+gX%pZ5`&koS9xF5W zIrsS<ssdZzr&&8w-{b#GIlG- z_qS*6p02ahiA7o`k%lonh_MN$B>Sf_J3W`n>dQBt8@a>>j=DxcO_}``|FMF-u1&sn zn$)()W#Y%_JEYl9SY$-7-}>nj@yP$xHx3yzlnR7v5O``b(Nd?$V(dJ!_vzs93~+5e z#*x{@+NI+x0`t^|Yxe#(&pSkHD7)OWj)T0W2c7BSLpj=f;LZ(>Vb)PbD{n#-^+&)X zB|AKM@;o43{0W@)YPHl=PWBico-I3~EHLd&$@_`1LhaqZ<(6UUs?Noa=2xj1H)u{0 z=+jPm4r@nk1IG3=0bF%4cyQAnIZBRcy|s8bnzz%B@m=~$t84gXZJz_A${Wkqm)1^Z zI}#5=6Kvru0ap+ecFamIozY=(v&0f*Pt3Fe^L?N}Md4{Ba%djQt+NUmj|`lvlZ1Vm z8#IN^?WZy>PGs$!v||TIrL|>tS9zyrC-TpWb7NTI1%^*^HVjM0?Ub&90?daUu-O-} z!qcbT?-M7T?vV>9k?yoXAC;m&oyEde@WpcuqWNVjbAIkFO*FG%@XU3n*ro_Rs|zn6(LyM=+E^fy*Zt;X(v<(*O*lfh)1uFXs0Q`EQ zbKO&MN~X%XZ-N0&t&QuJ;CU&Vhp`{)-qahupTa&g3xRI;06V@LC(Pa~KzhEmC6(So zeI*1wz1s6p@anuL)d>~ofT&2%tt=Qbg8hVZaoqOJW?U_P@HC^fB58}xW)62bPh(i$0!eOpwGnj8nzHx#LTG64&rl;uFK^lW3MRFp!T1QDc5gA#D-PhYueT@hRO((}U>+D+I8cJ#O`!?r$IU_kL{845QZm4oG zmwziE;gd`y`P+gIpnW3gP!D=dUQqV!HdG0fD$tCu}foU;JNbK zzsEw^zV52`bZtk02u3^OlREX)ewxEQWy9&vZx;9SYkgvYbjdnHop%M{LntAZN%Sjkq1dJ}3F%rFx<(vz zm&V?ks?~<&?fy&ffI#)$@I(01tNNS~e>jMw>1ThO_DX;8xmlF={0WCKE0oN+84)Hz z`Ch>GrAZ=8Q@uw@1HWZaa%WJKtbf?Y@2=&ip*+usmcAxR-w4+KAL( zRu0sDW-aCZANXN;TyrDuywu?iGTn_3e;XzDS&_6wn+ARV62P0jtnbr5i-;G^_T-+T z4R0ogS0fkp&Pu(AJHc ziaXl!c*I|KtyzpWmq7ndsk?heZYg5qfxW55abDC-aEnzyp84(J?C`i%sF6j#l8N}Q z3xn?;H;puh4J2AXqW;?~elxg_jEBQfxjFWy-O0LHVsjeF=kcGP(x3M{NIUyd|-bGd8xQ!g3Xo=^DqMZLY zf?97)AURExhG%$=T(*UaJ@c)e4uZXNvvKYiTN?8)zs<%KR6q7%i8hEcUV*jBL@y!r zTKbM?G1r?osW~i??J4iyMH-?QlSec+2dx)0xt)D45<`OUXha>8H_5*d_Nbi@7ZyuC zu&B1CS;YUpy`kN?RpQI@yu<7HO`}NY&u0T0IfJP^=ZjR6o4#OglSZvcEiSQAEgNUp zb=y?>(hqs522DAag+>~Pl3*=S9(rvAUIwiiM07XAW0<0`lhw}%hzdM<-`-nDIq^Ri z`<=Gk;<+bPblYVIRrO=(r3-0nvk-nLhq!}B%TYx~gbh8j4Zvgrb^mw3q5DLwlAE&- z8gdHJgc&7jO-gR;N|+44jMAnOZQl1##nXE|bH)zk_a3D(@EcxhZV7U<>ihzai)kRe(C$14 z#Z!EVEvLyJ1%I`$OAX0DE9iO64EZL>%LOL37b1m1*nrhvZ4=O*{?4)@}r91CbU$Sl=y( z{_lCv4DPN}#OL%ORShT4|G;vI8}t7tO~x3+e!tdbSR>wSBMF{wlhPwL*S17!ToKvL zz1zTP!F>{67JqVv@G1dZraMKq%+s`XO@se#Ek|`l=9P+Uz?fj%Oo7d zmT5_lKbt3ds~L%BZTkEA32P-S+UcezN8;;%7;sCMx~n#Q138r^70C1NXmdhcW0?Nh zr)>q!{2kpos}BPx_};&P>4cdFO$tD}v=yYKMxj3D zWTV{}N1t`Vl{p2Kmeb8Ij0VOLT=?`TGdl%keos*<6xX*qyCAQ2Y;%_#@9=k%{W=CM zlK6EboLUOAS|<7Q>`g5UU(f*pYd+P3K+Q%<)ukR4*I}+tb|XG0u<(YPL7bpKfUjam z(0Qeb@q3QA_(H~9iazwzlCHiiHjWmMos9$9>ILS35+N(}c0Ras%vhiFv>*&8ihiMN z8#oBG4P2_xi-RhdWji+S0$iGo?Sj}I~cCr(`M0a8mCOXE!ar3-RynVQo`|nlp zZ@V0fw9s~rw)E8mt&+WtrlBW#gF^ur28zq88@|$^AkY9;nMEH$)M%z^l^69 zb_YuFSWkok61FG;|&4B2%e1wv^>Po(tATX@4D_jwlTyaX)}R#eH+4hRNo3hN;^0 zopluSNw8W7@RBo%l36&`7okvWTb&Qa_T($D5VvHfe$~mP!VE(2xTF0daV5)mywpd> zC#-6xa=DkYj2#p)6}4+XxI<^IdCq-?6tc^CoF_oy*5!GKp$6y1mnJWWHr)83}YG%tN&w<}FBAMHe0wM6Usb6e#s}=Jce# zfiu+_*Q0X{Gv0m@^E3QL&O=33aJ`NQmy7Lp;ryuJ>Q)OR?@L>fEO;3Kmx^y4a0!l= z>JksOc#PbV00rSf)e_e>leg8tj$aRS2M#3Z0BOihf-h_g>Oa)zWQD418b` z7F2Jqs4}Imdz==lG%Zwr=sLbc0LU`E1hnjv2sl2QeYS;Nt~YTRmhU-VQP#f&l=va` za0jcz%u!W0v=Tl0=#6uzcfmqWY!F%y3w^Lc-;ze}j%N3(^ae$-!0hyv@JOAV2h3u* z_xfwf!+a1}fJBdGt20>6Poq7kR&3W!jO^>!B{B|1i}i__E(~$3fS*PwmI|u=G5)tP zQ@5*hY!>udr~ng&a*Wy7;zYCddC-q7M})4y0=*IoQ9etB1_^AUXc2=aj;+1qkK@r| z!9a%*BWy$r$hP)z`7BVZdv@X(oa`Dkz`3iw3WRH9)YC+>HR*8?RbE` zg-HL7a`z@hem1mu;Z^k-=HmcqpK7sFgjg|3YrB%3_c{?LI^@~=o8!^awd`B;#%6Ww zp}_PZi=dq#Q2+RdKE~;3^ig2b=wC|0mbs!ba36R@LIrgaQ-4+-5IMYk`tseyR%736 z62xuZ60R`c)%#fl-6o5J#6DoG10_BNdW5dmkA2o5m-H@ZCTaIoI`Zab#H{A1-XK%# zHpOg0ZYeou-~#)x<68w+<(Rhc7l0H66GvD9&UIVy#B7bru~nc`@BTV%!V2orfxd+N zZc?Y3-Q4TWEI=X*9C|S7Dt?jM>!S~K6&EYUZT+S^r`oT^!aMNp+3Y;=iy7<8XYX&n zOzrj1mmk1ugD$R9T6jjxT!7*VpAwGCE!ddoSOJ{`cN)-tAa%8QN$3sLXSvywCIhc}T9L1o>cjT={ie1X#58MZNxUYxe&T=AJ%JkXZ*XOtt) zgRvJ6qyqzo_KN5nFfs+&e5Mn>Sjnj~*^lel$?RvrRqQ^XN8vsPg~_T#zsm&C0G`Sh zt+)BrO%sb@>~lc02wnhL>;ThgD9uwA~p!*>GRS}pU^Wjm)LQ8?gTF|{+ ziX-K-(={*T9UDZ0sor=R!VYNiSh?<^VG)rxS7@odd}Ub#XR@@LXFt)?0RS1WEMmx| zc1b$fj?iW^xqopBSl-~?JI%0Ias+{10W}Mp0H(VW=_cc6j3=+}+F0d!9Q{$7ALNcT z23_2sw8^;)6WodS_oH?rd%xUYijb*s?M5NIGWhq2sTs@ll(XkV(hC`iXZQbdn{P(- z_7?zZEaqDl{HT{G*q@iz4<>yF`rVwF4&a(6L_Y9FW}w9hIazj-;Wn0rk>$ zNV?@yjn7WA5IK}SdQ~`n957}j9y+1>^^uA%!#<;1GP6^6NWun$d)4uO*0WKxO5~_3 zbitc~zF!T%B$zdj%8g@cq z0Nd=k{1e9Zm!1WnPIK<`H8+7CSd=~Em0^?zMpA>X{?$$KT-M4$CN3qZ>B_Dy;~Qng zEy46|z8JnU{_DF;93XckA>@H9x?g{CS)S*yOp>_*n3>wRbq7C{)C|18sj zE{~j*wVItUvn6SO_PWw$y+7RCX9s@TZ987iUsq-Gx0+qpmDl&}kbXi=_t0{t=B?Zx z&ck}AgXAsR^#J+Z;YA$lm@a4te z${B5q27gM-<*ni*tol7cgWt13O?595g1zA%rGD=Vh1mq^DML~M&bH2v(snhx$9Qxy z`hYVM21QXq81PsxmhJR)Wbq}}FT=3);yglNr!3)X^g2FhZ@BOY3}AFG9e_bfBp+P} zwk_kom8CFR>JvsbgA&gsE5+mq`-j~ZfRpl#>aB7;#R*#cNahDq2R(Us!7O*d*Wd?s z7Q5TL!cTp;B0I47lJI2W8bn&kc=1>K7JjQX2Tq6xeShpKkGfLT5)3I9qnoHpS_0|rKkXx&ib#L)5(+@=y5db}!kPm@YpFfYfM>>Jk|Ll1iD z|8C*=IeV0yiykTJcb4Xl?|`^5q2?0x!_BLz>cc(O-wvq+oZC_rwrvfl{n7HD3M_ew zyxu(Qpbz}O8?bPzEc_)x{=ou*rMFjP`hJ^Cvr6?YTo zIAQ|YyTwt&+Bo8My5h_FdcYR-bMmxap5Mr9i|`HZ<~RnLH|TlF=QB+oZk1gf8Oyeo3iv0bZKYUXeIE&fu{q4vADB;3gnd(hq=af!{xwx26G6NTrD zrA$1iyb(jM-rkMkjrv_(E1Bny9Zu4 zjEwObmH_c4_++gtz&jx48*B~e+}g-}ji$&M^^emLawRG}e?Pnc2orlahlKqGdUC^e z4gUxJ!;*cuWXiWMbphIE|C~N%l9&}Zjr!OX^31lpB$4Gn@hHsI8&gQJ~-dke&^i-C**hNI(I!kRYr@w|`shBupYLYXJzRMRz}#Mr81fn9NnYu*H( z*zZ&CR1cf8SPkMn0iXEN7`7=Dx%nAx6X;$j^m;S<=`cENI>BB-Y#t9@H|fyXfNf*n z#grKTMpw@ZE(*+jRN()T5|9(X{093DG_bvCZF;~R)Nl`XfQ?V ziX-ilyb*pmfm%Qm={KXMbr zMITjV39@{{wyx`^b~5b!yvKOog9=Y*GGco_++EL4w#hcuz>0>AS#$9RB)_|kxP{~I z*@V%C;~zYq9Loai@u7Xz-);zre_$bb4(mR6mALJtnqp6ka?ydG+2>1W7P<8;^vo|X zo^SE&)dY~I!qP`MAWohRd^$(Tm$WPsq`#7Y1L&8?3oO7aJ&L@Tj9clayh0~^rheD~ zP-`a2>4c!Z&RNao`~4LGHi0PRH0y4Uoi|v|4{VXH6{Wo?-{Lrk5WyelVrxZzKE@S< zv#xm?bx}0CT+BXJ-ZYJ1;?BRAAj+SLa(ThbU;RGiP7@b%yO-WSHye_9v=UACT%Zvm zy>PkR5|IXf6{zP@+NAiQimz?hvA{fO13<(24JK_~g!>AyJY8GkFUBOwzpy zcQ7|XT=RL~fywCMF_I-UARhOmyF`dVVf1 zMWCl+tR4b}MumA#o0SV590Vm!UL$h|-(LTc-gPM3n-%98v+;PzBkyjDZd?a*1E|_) z@`m6X0N^+H7nv#a`c8akvk6!IeSHD1h*!+Z#Os1i{V_!qYK%NS|G?8>=-Ir0W|l{I z$oN1v@=BJCJxa8=v(i;>>qhgD64N$2ndz%2xBS$hZ`uSwSXV zB9rZ^RqJL22O$m!8y`-D3e+HD?6|8pSA6&tP15Uu8r2GG8x#vZ+tmf;1Qt)EB)uS< zR8=2`(T3CbnJCu&QjCgS-VdN^A930s;6ZAiMeBDGTpEL8q{m9KP{pufSf)b5*`h>a zI&I!=*;#f=DsQ?HPaRTvv1GHQs3}UG{*w2XOa}HC`O@ zYVoL{6FeyoRrtrNFq_mdg|`rIQgZ194v6MZ$cMGlGp;7MpNOWROXgMcUQiUBYFGi? zDr(KzzY7U{{#g~YNmIKGFd~jNSQjs;xQJiRFdm#TWq2^IBAKf|nJZ{(pXBNZ!2?S> zFn7ayv%-JU*m;U+LUrgzBm8xh3d)qp>^3zBN7vEq@a+pumYOL4o@W{bPOow5CCp@c z!H()ozpzF(MyB4zW?NIiU7K$`5=!hh+8=dG-$X`$Bht);WIKkQWNlk!n5P!9whoXW zUhCL_I%4OZOrcTqU@zf(U2gxZ%>=S&11RLXNXdv|k1d9r5EzD`55@3~m1x2Y zVRbD$5Qy@V5~Tkk^FATS(nUrNY`GtJlnIo?2?GP+2YL$otVIst4)(*%ns|i5TnE&t zYcyzq|JIX&dyG3j0%rh?>>XA#z0h=*Kpnuqm88AuDZGBTUOET=4#02#oNpD@(5nHf zZ$JO8ey6XqxN;`@!6{3|RR$cnZ8d1KhH?%~kVoAu%TLoc+N|2o3uq1$kMVv0wH5&u z-b9{n)g=Jll(QD}JA*L`4gc-ewSlVcv3={?oD)ykFhUveNSoE5y+E($RTsqr!RG*9 zI!A=k?<9}N3jW)#qjwfBrKmx-z*k>k(BM5n3iD@B;_-aI9L*#v4c+HRVI^iz*%6119su_`ft6uYK&d)F~jND?k3iM=yqIeRTYhLc8b8n$3 z;Y4JW`}9^H!-tcOOlRiau#=$H9ZH_|DUEc`u5(E^9W$f%zxw(zFncxoudlygcRY=+ z=hbf%;JyPH?NB29=vcoKf;W24w&+McT>VDQn?V;p>K7^k=@K8W7!f(q$2+*$dz(Cd z>q&3SqyO!z%TF`Lvo-DZ*)Bfn{1b=(^c~4l=;qb?l@Tevgm{bMM&Ub`byXT4<_lq^ zuqXIkpo>2%)#R!kvU|4o2q1j^K5YO~_$eby+GEi>ZU--KO#817w9f91Th@c>=t#)# zlUR!_<5q0jSB>PmU{!1JQhI3Y!u8pXYwuK<*Z}bu{L_C~MN0ENx%cR~6N0Ch>@IU@ zn7q)_qAG4`P@%?F5jYFvXF>=N=^*`z9+gr=&>G7NG%kHK zxpkqc=q?Mv@JZVCZu1DQAuM*9#k8z|q~M^T>tV`TXXl;*n!fesU25%LI~E{Lvr_q~ zz<4<%>oz@^^ARZ9$l)?BPE^2Gz9b`qe0j-Vh8NSg{s2cLEAe4xt8gB9UIRXsd? zrK$SM>ja4FJ_4@`t})uTinGxDGiTe({z{^qCp)whA50Tc0LZ5_E)HBk(VMLx??efY z_Ai?gDnC%7n=kW2=zE8keC|y8zv~A{!f<@Ij{sg(ycVU~f-(&o(a`23RoS8|~@=_5^-%g88xZT?RzCIK#DSQUjG*xf*5W+&E1)ClM$R zd3rnR@e>a6?<>H&Wy@!1_#_p1bdhl1zT%rDk@mm9NJG0BO14St1X8S+|L~5}m1Yxx zzb~i4Qwuq=2y|8K?`G2`w>e1AxfSy&$IPZdp7ptvlr}(q`=X~00yL&cHtsTnpr5z#36_z;$_tA-3-^tNnU3- zg!CY^iYjh5rlCUeiC4CzOpnLHM|tlW=vK!^hdiQyW{Ywg)9N#jQO)QzHbo>z1Kq%oie@{n68CT`ZbOetPnCGJCI0~l} z_9N)r5oJgpCo1$rq;OJgo#O3{d;dd&eDXC!&e+kYOC{UemsE^;>ai@lb_2_+`q%Q5 zzOZvuq^xeq=bbuQinZY;bflS$#tqEoK6c}5MGFETC2(j<>y1}|Qj{@`F}cJ8D=lyo zPvSl2>Drv7wMdqqTKmh5ET!8Y@877wk?LMD)^7fq^mw2E#nO!%yah=11=@d+?3`$M z#<)-Eka>&81wU?dlbh^ncI6;tFNDL{%JR9%!WZn2|! z&AGF^+k=TPSN4VY+>h|_Sd5S7YY15Urb!#m+ZIJ2-K(3-#o@etG+}@#7^tKAQt9KY ziC-=(>&!}6GX7#d`s#85rNwwGdkE+$`zY7PkQ&MY>eA!R`&hybgIWt#j%!QlD*AY# zCt)x5JC7~m3zpjmq5J^6cD+-S9?FRZ(H{+oEv?{X(*5p%->wH9bdh{S5wm%3K}^UP z_YF&Mjp=OY$jJE;#|^79Y?^hdTAB??5VYJQ!5k0(Fbn-Vs-hQ}DLjU4&KDoMlFOVu z1fFL?Oi2&NeQhGq2P(t*3TNRJ6P3Wj900@u{CF5BgGT=bmcsjJ<}vy&6b_hdNOKm; z3r};3^lO=Wh5$Qz&+GUxlcxbBJ6sQ#{Zm%#S;IP*ZrkNcA~l=qPLxWGVeIf6P%}p9 zlsvKGI9Nbk?kK;n4LJ~cxIVrEr!m|aCJ+8MnEN-N{5iypTlbYYolP$c?l`!-*?z14 zrUG7Z^uf3ooK!L2i(@NbVR?ApSVEB*lrZ*ch7sOV810UG2?&PA^pgNq`WMh5&*bPU zJa>}n0|P|^Y!atZL=0!F69E0rt6Fpz@rn_vBUYe`mcpPuR$=->mA0jwm!)40uucyo zq5f~UcjRGr3*Zg?3I2VKx}A+PryzrrJYDGj;c+-uHw0*>g&JGL`eF3j+HZC{yn|Py%Z{BiYmvJ~OL zczcI`gvu_*(sjLZW9xJ0tb>}BgLAI@zmR4S-e%`Wk+={?$W6|9f zYd}UPm4d<-`A-%5w$n-Et$XXZ#SMUr$snmtr_APThUrx}0t&8zsGC=a`j=wN-v|u4 zuPM`%^wOt*M8SPXzk61pFLwZaF9DTk#cfkFq~{0!EEg!e3mbAaM}GpkH7BnFe6El9 z*F5Xd9oEGGe@$2!jSfUDw+mV`2BW{yLkR{gnL`;F3lo&x_Mu0a?d3OdTR<1jE5&{K zB;73=LtKxKKr>QT9AYsc2^rl( zG5V}~F8laR^OMCl`{64-)n!fnPB9d5D8}h{dkXF@&S~T*DA6xP;oP;wrMPAH=(Jjn zy1$omN_4|2V_zdX%P_NTbrMqqA#$>a^rV2jJKmm6(Pw?6&5%o#(|}V9-Lenp$`WU` zKDws~+XTyLNdQ?J5Q1?Y?J+|-0R%Re;V($K3Xx}0i~y9 zl)=q_m8U4hJ^O%81gX02S57BAVGPzOAI%quZZA}}7y(XN;Em?^0M+k>D4BCi>hXSE z*m~F_g=fKryI#K&YZ~u1aOwWSxG@z>-Q3p<^n)MTsr*)^N$i#Ym&>^6zDvZgB3j;cbu^9yVXuAgp_W-OTb2qBjY%fb?KzJDZO2D6{S+>X7FIR{{sZ1PaT$R52?FQ}g4+@xo7G*nCpV13o9!3& zSWmbEp7`*q!VSR2#eF2k7|PhSz3|!E5AX$yVeyG3wl=6`fecZ>3@F=)eFK@ajf3c@6fCwp)u16;?2J#%WVQELiE#VGjZ_(MS(ckpvt z#NF394YtqDWVEWj}*i)T9m1o4pV+( zH70m(B1H9y{!)B5KDQ(ghYYiiX=pXho*T$mS9VDNV&_4j4zlP?%s+&z{cx%+vu^V6 zNR`qBBg}kY+0)gxHC*9BZJO~KnRiI4fnT4dfugjO-`*2zMtj9NZ#c;+)%u}Nt2G?n zP7ao>Q#>y$iKrK8i^{grb(lqvEL~v$Dck!}^jwDL*kBKMoI<3;rcKhmE_d!#9E==u z>%F6}`Kg9y2h@OgtSH^VvIEt?TEqy1erzA`=+B?Rbcf_B1OoPK{zDjq*&lYgr8saTfxToqdjq)? z*^)G?;*e$RWFWM28Ql)OE~; z_Ue3I8haINV>tD6(X;wPv=lF8p)7-Y)o|vYLTN11+@az5A8KPh${xa!guYd+&A9kn z8;6Z(TJpHtTPAqTBX=kO7#7XjMF4)yxq7uDl%Hj8@^%ZY6z!W?lb1|CjeGiBQZMv~LRy0*?o%B6j`As|J79pJ|lu=e*4%tix!zA6$G@Xz8jl-{s zBZL<$9UuK0)tE5cH;w^B;fG(j75JVy2#*M2yXezB9eU(knXS!|p~ zAl2DrONa|ev81{OL3@bdSvp*)At0zBvt&(Wnf5*JTF(fRw7oRND zsG7#eGhxVvkoAh>qCDt44ZGBZ2PZP%5_?Vl%fr}k$<`O_@~c38z*-J%ROcDZg5|X6~!UOXk}|+Vd*3Q%8mO`y7wjWdGBM zEdty?tU`TSFsGiv8;nQ3bUj1xqctSUp6KlXb#tHuO~8S@3uQeZuo)lZ-{LaI6FjIP zNJROqESoyqQyA&a9`Bwv0anrw^|`e{z@l|mgHu^ysC`Q})Hp{2G7v*2*oMx6x(QGK z)LN+A`*PW`Wrjn0PDg#3A*vFY4B?w2DdVun#d7psXX;B(P3JmRzk58pBH9F3n=4vVvO)Eu-#?{1L-3F>jAw6?wb@{@?ziN zk;6;>KH2frh^D;@)Ekj=F;(79fMG&<q$HHWgfSf*AP5Y2x-8z&N%L}9 z_&AZQhv*grWQC0CX~N%BmS&9PiRx%=CyOx5H!i(5;`8U2ctQB0EiY$kDeUdw^VdGQ zBSaklCuIMh0#4I(HC8k0P2z06iYe(g`e8`#H=5GIAX&~6|L=-uS8v(Hg@j|PSIQp@ zOv@|VMAx1LvUM4}$zs_++o~F4Fyr&mu$4)0%MQk?xxYfDQ%h0lfBQQK?x2=m#&-IG zyLmpdDcKEoS^t(D*9QLoDLbA6hmsu=5O-jt)Ww~^*g{F+KUb2rFV3Cwn3aG)G zdqY8|3PIVuW!c$FLm|U9*qX&*R_+JsSQ6)Wqxu-uR~D@48YH^TY5F{4{!5Zx-tyz5 zj+e{FS{nXdYE9y&&XaAuZ?5>+23eN<|5Q(_res*U7)~PkOw73zd%JX8zmMXQN)ubN2nr$$Le{`OB22 zrQ>6zNC7geM982Xol}6^I4c^UKvrfOli}%A3gkSN)?rd9^@_P1ZgDoHY@&J>7zC-j zFZ8z^55&+RQQL%OMYCuBB#xR`5j5AK#Wo8Sf6KMx5!IbchQy?PS;pa@YBiU%q-D35(ys ziz$8kgvOy8YMC*VvnE15xP81n#=v`l3e=$Vta`7>7PU0(Z-xxPvQ?|g{wY{?lW18^ zL8B8ll6@bA$K{!t!>tn+pU#8B!|&=m^)wj@7VFa&orT>=a2D7LqO&L)n8a1v5Q;Aw zr!zq7GR}urA@Y53@b5k8Y~UdHh+KrI^go2fn&rO(I%(7un%Fz2lbLjr%1?$rDh^%K z8#nKuRPP+HY8hBj6d<-dIAJ)50+P@>`u-B&HtmGq26gk18ED538?CWvl^X}$Ov0Np zlcr7x52CzAIP-mh3mxZA@ePhhIgIp-E2DlifBHU<9EKI@9q$c-{fY{}-HcY2X2W}q zS1rc!?#bW(RejS%hxQfm{r0lY3ZZQ--wRYLH_`UdG+vh~i2 z!L=D&6V@2KHreukhf(uJFuD95p*y`67bRF^)7eKoAsME#j)dLK0|ee!-wE3t6lYUTw5J$7h_cS=uH5VbgQUk2rD+__pz-=w3_6RzOsc z_4CMLD&JH!pxP+e<5gnD+yReBEuht+P6@*m5;NKW1)6#P+Jg%PZVTz-TVe;*630Dp zAL>qkH&ldCS5zy&s6yT%aDr2i{hMd=Xv0x5&ic zxep1EeTGj6%Q@)VMip}eSjQ=oiZGv6_{M&e1#J?|G9OmE+cP#Yyzq7x)d8g5qR6wDggN+LR6yYBonV$hPiT9o zVUf2;kxRb=Zi3(zmVCs)=c<-MClxXw_-m%fEW8MV8yEto_dC-s$b6)<_SC}0d}HT?y40`k`M zj*5r@#W`E;Z?NaBV;0^1rwFU3mawQ6gD2 z@dkNF3JBAf%<+2hrgqwSbkRFs`X4g2gwTb!3H@=x2p9@p#{%>=gB6wjN5-0v;p@PQ z$(GUPk>1s%xGx0G+I-II0|po5Is5()eC}EgR_L=7T6Yx++r#iU{#deUEUVAL=kDx3 zT8hSF(C80)6NR7ZEXa_TEU!XH`-C67lj{Vq4y}y)xA_R<#^ejykYm3B1P# ze)?2Frnmx)+X;!fOCgPYeYzQSE>5E^!RA`iTR>D#O^nb2&lQpv4RGV?rOGKtHH%^O zDTYtDkaIlpY-f5>%$}DEY=g!_0ukoDj$g1t=5MdLNTlP69M#0lh&}Xm#gl*k#)bXh zBfva@B&LLGAUI-(w^6V?cCGw;?|UDj%Q{OLmz$;L9dS%^|7BSVT{p}gFd%B5P`XRF zGK2#T@x;D-^%uY>o;*EsPnudGUuoS(*wSC1R7R*TcJ7s3E&U4t;^`V+`!W!%3CX;SZA?e0i91qRChCB$zOEz9dLkcPUV&KP}Ym7@dgG3L## z=W7)i+Rdl_X^_XnnUlc)S22+$?K1&~?|o0`-dD&2y}dM6;JDA&w4V0YDY-GC4(E6j zZV+Hp*Fwa?SXm%9yM)0E&5+v$2ZK-N-u^oDsqs}|c`2^76+Nlb1PeHQ320E)<03<= zTtJYid7>2z&q1ovAB$%+)>_brIzB<#ga`8Av`&bGkSh}4RZ~qw^wp|kOS99Ih0rZB zHaD{dRz6*Sz$&Sj)$NtFTakM2Dd?Y~`Hx8Y%_xFB=LzeWTxS%ykx2JXjG^;M=MLBp z8&;X$b`4yF1x)KS2a>B`=B?Rr6#~23P(vgPaNtOy{6%)t(3MS-Kv1JqK~r($y2M}y zZLE23jz?uf3HM5&S6JTSu5dsEC8BE|5CN>B7A6&G0xmE(#N8+0_(*yaji#U}L*Tpg_a{ussKbcMWIM`H}2B3J)<78#<5#ZA$@8IhZ(Fq(U4WqzHL9>9df`63_Q49)HS6t zYf;j*N+>$lclReHm^ZcU3B?=sgA}df|k_)aGUb5}s0S2^l0)gqM_-&#Ux@~!&kM)w*-**8HM?@=KdE`;ah(yL6 zYw{Mzb$UMq*fya*9wp!2b_5L77Y2kRAPivUy7~)GdhK1zNkg#xyw2adk`#^YC^|n< z%rj-cPR<#u-r$2c{szd7A=3Ab(~hSP=q$+5#+PO}DdwK85*??B>ErHDmd(3}PbT$B zeCOS|b`cW7==F>W!1le>fw4w8aQe3~lb#FRzx@fCWS#Cn(QCJ1R1)6_WJVMmi0q^F zLOB>k&>5((tQ8;T*j`URVD+q58hTrxq9FnsCJHT-eK$9DTq9tKIUX&0A4Z!wE6@`EfGPf`G!|!YM{F>G*c@8xU&twtUO59)kvdplU)+FBF^u z7yhHPgcDiZ4Iw@F0*HS|-}>DayB=0C^Pga#CF0Rzced#Rdsasn16bRb3J40a{WS%! z$d~T)kKtu8^v0y?d=^4ia8?ZTQsqW|+JP9nz`pBS?F{59E*La13aprfob4U=2>a8( zUb3?zK1r$oHFy$TRTaEjqN&Q%)X*?qO~?8~k;<+HLLT6ah_c z_;=)eF!FcN$mDlqEyI`FzK6~Boi}*&Oc>R3OGgL{qFq}=!opieKxaZhE1|jxvr8s5 z5nuTl1AS)>;vN1LMR||0VE6ZyE3N=m%oW)|kPxvZq-)IT8mBfa2mFKr+(8Hp1j=GES>Y&*~?YR)W#ejxEqyL&m0b%&K)90pi$*#B+|7eZyt!m z$R5z1(RMY%rU?O#kY<9A85&>pP2w;*?isM(3KLB4o`f8r1ZH)dTZ-(bJhF1jI5#yL zVKpN_sL*7i3KOGA9qffkWX*C<6>R@)ocNup z?yX7esRS1Y?OZo2Q%@em)jv)8xxkU%%LEVff*%54#3;2~^VX!>oQHsy?q4(fO=bqN zKvjtZ9_?uc#cBPJ9I#jc>n8$sEF}T{Yh3RHXnN(5OweyfjZx?Y_a{7weoFgd*vhB5 zEk212#5$qiqnZIc4iWfvZ*En}UK|rPaOQY+o|LwFU(U$N)@yRRDkRfUb?BV3L z^dD-9R>jmeK8z!ak?hdr+hLR(t2#PfDxvvOgrH5Ehx}v(pQtmp$asl19uB9jWKuuq zmi^zBY&)JQh^oRM-7E${;%{HvP2&!v_NjeqJzqkq!5{(Xwf zONz|?i7+5h3Ag+w+va=6z7PHMW^gnUfBnts*AhiZh3<=E8s#s)xf_?^nLf{>GoT>A zgsnB~QCviDH-#J_<(PaC{;81Fu&9U-Z{6Zr4IYC3BF*d4K@V z)!Kv`^^`k)c;VdXga?y2!dP{_(Ke26KF9oAxe&_&L=T1)bfQOp%!an(C?E#wMm#jJ z>A!_V3y?IWcp(FGGpQnpAN8%Df9>lwU~Bv>6x+fc9|WVYVTyzX{dfXBL7ZN@ zDTGh;{&(l9NX4>6^OcY7Co#~`J<118bfs$i_eT+ayjP4u97SR}>RZmD*wWqQuLYGj_z43=&P{-f7EvUlh%=R-N*!`4;F<_L?SM~Q?;9PNvA#KRtV6$G^4 zZx2NIW2YY%V{!B{OF+xc`$e`>W2h3|ty(17EN)6Rvmk8>MU4wnfu7K<<2i<7OpW zIg>0j1aW27!Yn!#tgsBfJ^<*cY;969>zF-!W0Rha9=TE<1Q5N?<{@pJEv4n(hP zfPrrDSPketsH$EoX&18U21HF|FDB50ZorLd%3s!E_U26n5t@qgeH)#Bq_M5SAgx_2 zbL%rbbCjpY@2*^10T~y&{{s)OwMfds>Th4-sf;zn$^5anDrAX9c)k$;3qD~$e4S}6 zkFs|_z5aHXAH1C}1}WGEk5)GP7v~c%00hFc?ZfQmqB3f6NDT3*gZWUiGXOMdla2M1ZxKwwK`RLbw;IFEEEDeLB*iV~O=^AUBTxpY_4H z>#m-=aD6C30{A+Otep(t3u2ST;;m*E#zCxV>_O>=B!$3LYYOzM0)fFap(icca!>1U zRq1UXwG6tX_kNT;%`H1J$YWV0cGRd>A@x-vh7rS?%q1^>Gy1LQB&m#yXX%PTTyZv$ zgsVb7Rzw5Ld1Q%H&6~TExV34m!ze}fZ@?-x^;dM%6K@#HqV@VABGQ00BM(S?Kxm~Q zW3A?;U3upQUvIde5b3ozi37Hgt4^>WqWH0d`|?Z^I(?>uQ=tb@{Xa7}gBhOvGQ%zq zYCwHUd_O^;(p4hoI0%$+I15$Ki1Zh;s?I&L{HwB)w&Mtk%@Tj0fP&F*g`%eV>aonqd0hR;z<$z#DqowwJ#U4$B#>k$gG+ z!T=~K2sSa{cY`Sv!f#MxThAv*;~-^~?UD{M1XIWK5^1ZZ=!bovkF#>JrTl^bkl z_pdu!&}iI~<1wHZ@ge1QqjvXpH0?-}4}VJe`Z6ZNg*!i7PFm^W>(#ReQ{=RHN1DBs zC3TF<$yivy1W>Sk=()v1EerT1XjcQ_YXkk08`dr&iTwmi3;>%fHor^+_Uz2n|H4d9iApZTpYmP>tGpXVtcS%Evr`0aTC`nWq? zmj3|&oLjfGA^?D^d9*|eYW%@(6w<0~P2{=WTxccG1A5?}xh@^ZC)I;uc)!7YmUh|i z-wx~G_QL&}LV1Jsp=7>U;|h+RXssceHK~+Z_KJdDwhGc2dS1_z+BA86$Y3C-A4E-t zVf}2z10Na7z>2`G+g*kk|0?t-U1@TK)4$;}Zv)_whppIF<-PM*)3+HACU95?o74E1 z2=MP zL~|cVeBx48X4>m-Rz|sK|G5nmp#iBo7PDRnK$URU%BbYcxvi*c6&A?)0j@MpzOgoF zGrqfeSN6tIzp3t&wnnJ}5~wxL_Q>RnUH%h~iVNdhrgy&g9;;BUVjqf~t%0V+pZxh2#=lW180+6_BW9<6d zCsqM)2IQG00wA>;*azn35;hur)Hz8=&l%L`bBP&KI~S+c?CC%eYJJWJLC@|g?a&(~ zA=5p0MD4V}-{EQ#c)f=3Y?mNtSDzVj$Of%jHexo#(E-osUIn`*>n@Pit~h!Jh(R_J zYl{B?<*$7*K++!A`9i5i?uc%<#2xzXXcP73jC^l7r}7Oq>bf<8hHjSrxrkRZ&RN24 zUJknSaMZBp`n8c>Yn_*W51ikvhe!2>>0-x94O?!_y?AT*gXvdhODUcNqQIlw4$D$=g+KV!El}+jXVdL=aGu z-(E!xdFnulAh|%Zsr7iDf;})h)G`-kzuzF`d;lmN;DR8)P_|giKWR9#+eYhBVKN1g z(FK<|TYxA7(F?ajo%6i74wKA=_8j+e7vaMEXI{U)YRlsNDn~)#>H@)LMAE>OIMjr^SLEw4e3l>ntW4Fh20LP_YMVqlX z8ne4Nt@>P5P<75UOzZuL2p_nZ_{OaQkd=dT+H}R@*z)vMmDb?@=HDgWggj@qR(hWv zGIo8Kc=;|Sp{d}733%<=zTRBnAY{Sj5s9tOytX@}fbhPr>4S3YO5@_QCZhZHp(g(E zZcDVs*U#K}4VQZAaWwPeKQXh_$N#thj*>(m>u(2xKcM@esdc6mZ{NOx% z_QHSIiw5rWfnT;KDf9MR&}+%F3S(#WawXKhu8p#3&}s@K*I!$bfK)_^(03uoVNcIM z%W+N=A-NlR;q^>*gb#XcO#gs+t=IC5@i+e=lL=)sNI^Q4l@YMyD?b@O|J$#{-BIqq zFZ)$h@m*^70}&4~R|oi7t)Zp2Ww_2@qNY<`!2N7kIaIjfRi~gbzu4KhS*1GaqeCmR ziv^3gm!N?q?p>A7K(qSF@utrauVfXjz!*keuv6T0s=>V{!F}^YZCm)n|MqH^vxv7R zLd-t54P_t8SfgsqTbecU6>Hp6C^b*6og+dl&E`>*osd~ZO8pwHmhen7%#d@5ruv^1 zM(WOQWP#jsZ3O3q8B(T*Ik3J^wo$$Vt5H37d2G})?A;z8YxjX<8i`=UJCt9;=>1w( z)BtfHIo5l*bTRqGm4#mj?(YAxnCvSZn@GfCfkc_`CGNHsOD|e!j>DB>=M?S^?%O_O#z@#Sa?B|qY$%mc zY*obTtUEgz7(l#&n|vEVtOL>F^53M%iExlV0kY%T!+c1qfWyC7_U?5J%DMq%e{W*D2^upF4yx@0M&9dqCdj-?tkc}jw6rq{2Q{-| zM^jpAI-@qJyA_H?Yy~3(5rowd^WQc(r%9*Jre{mq(Cr>cK$>5!eBz`k+40t#;}}Ay z=k#TTtt3N$h&grWlU>vp$ls$qVq&ID2a`t0)rp4IVn_2pq~{}kPtYg#HH}8++V2WF z4@KFWjYL!l_D?uw79!JDVuyRKeP?*musMsqHp-`vQerq*LM}AF{W?olvU;mp+Rfcj z!3EDZ*#v>0U{3ZT{Qm|+fk1lXMGX9=Q6n*>r+I_O;o`;y7d;wg1z zMeyd|s?Ev+%~+g~*q39yI(^I?sTHyG_ik8#$p`K3 zPX;AGtnqAQW~@+qLo91=e=;bx%;=gW=S$w81dfQgk`HMbF64M{ezxX|LF!jIwA@~r zoWFhZSpeaS>!is_68DfXGJH+yUgPz7EAfg@1_Z(kf(8Aej>VhXArcHIC1cc)4PD_pM;7G;mUo zo#P9E=xi5(1^K9ir#p*uI*G3G9zwOj;BksI;4n&yc9h1_-C*G- z8D7pA9JnAccECW&?mrztIXQ$l`-XlOhj_gZdloZ{m#Vz>J&$lB{!|l_ z;v2#I04Y}|hNZsNUbSQzHBn9MEVAbWd&jf?3A`S2#!+Q+;{rvF zx`}E7(+5*EqZ5oEs-{7TdBqk%>P0_5#JN1(Cx#ww$<#zoW}Wz-!nI<8dN6u+@3%cu zhl7o_%KZ42cYV?QBP8KiZy=fZ_cK6D@NTKV>UB|Lf22n%AKbQz(3z``M{(cq(&))c zBD4zFotoR;Y_P)&8O$;K{)A!`A~VE0q+&1DsMWcf_7Hf;eR~@v;WHbecn{h?Bt2Or zPe?P-A(E~4*r_&ybz}J#MC%2gz&|cDSFrhr*gEq}5->>O;45~ZN$t^^GPE^$_bX7X zyav)L^D9_!MMaB>W?f}=zW`;A`C?%VM^xDU4=zwSVah;J&JiCGQBIIeQG-KNufu22QpD0D`rJUc&=D3{7G5t z#900$;QfSjMdLTEf#k#YyMjz(CsaC-L)XigVo_nS?H|C^gQyS0Fed~NK2W6H?HQp1DA@Z0s4;uF)()FyP>Z9*F)wE@r- z2pArD9?pApkQ)BW){47Ows`e%oGs`~$x1q-+>kL}kp%mD=|l`GaizNfJOX$)J+GI$ zfNnEqe3%a!GM>_Tsq~RCRIz^{tDq~Yn07gjbHO+tKApH=OUmQrHgutf8ZJT4oP^ct z0#;b#>MUio)SN`Ou1TF=NkUjHU2CD|E&I11eoz2O`aC^EhVxRp~e{5?AlJ}a9j8^4jycnxYgqK&lgP2$EIKN4`_BDLz*xPv>I^2k|2LU(Y$Y-37WNh+PD#g&6MmG#| zW=EgMk`Z+)aHxteo~9R5&1>a5E`N+Raak$EP=l+y3@ppoQ#{a&)&X&{B{Topl9N=4 z)I?dyVrdy!BFn0&AsRPSac&{2?@7!^t*e{@Nr@V9b$5aSPwABa@}p__FDcJemiG@$ z4zbZqAMg{;Mo)EcN=L&qFm7`w{!e&QJdBkwdYHekwkZwGLdf_n)sR(({ihm|iDc%p zfkW|m-y#-V+DsWS$&a$#y%Z|dA?1I?nMtVt-9{J-v~&4qJ}ar^CZ3#PQW^U7O(vr5 z$L8woet6#AspttQ^TdgwB`z)0lDz&YeKtltKyqpPTjQ?I#LI7ma$S(IxLVwrpHF$T7L#Pl zI>mlFS#J{7riY4{L-L6}mk_${l~zvG2k95W?%e8ulV13UMVY#(8ps3d;z9=1)+&<< z9;H}%KiaQ>4ql4PqBORvIa!yK^^mZ8#`h`@s6+bnIA?b~Q@_X=`z9+hT+{hLKL^@0 z0VoR?-~!{fLXv$tjJd>^Wb#q{8;lpXVv6eIwWr@qq{`-jU6VNZ%hzMj{x7hbA*}cc zipfQhtr9tt@htoHS;dFki_AtqixH-wkd%KW^O(IMeY)ecu8=j2eH2qInRvTp|JRf& z&d5fu#)4}GyyM@T?LuePP25LIlW@G!)=T#LL7`BM9D$xY*`CR#Pj$TR2>X`GrCI6o0X(Km5H4>4ik_2OXx2r^t#!im2+p@6jvw* zwrezud%Rz{_JD}Jmgdc!$9krTxk>;o*&|$RN4?Q_hg}R~vO~5b^j-YOJN;y%vAcSC zw7U1`#>yd$Gh$Ib#OO7Rz7+GDTZNID=D8Ye&~n?oVLpsqkC}U)w&LH%-(_0qGjNhL zTt^$&04Y~%&K4Ojkrs-npnvyx0-p1`wZ`ybSHmBQ58Yz1b|f!S2KzR615LsVdKxP3 zs<)7{kkfZt=#L*!xBh!cL)jfM|6>yW_U+<3vJ^MnEe2~t@+76RZ?e`wyl+)dF2KT5 zE3OF?Afp0Y5Vw8O!MjzoGR^7bg@$fPFdpzlKDpSCI^M4g>5Mixbk{_>QIcYAM=`IE z5?U<&M(EwSGcf$jBZNcU{3!PWdLlK)rw2IPTo`sCM|~k@hqKU9Q6(GOp;dBcbU;Jg-e^?ovuu<<~yQH_T-D$9ejN=#`09sKBH%M(3N|2 z=;jr>g?Op(Mm(7}RBjrg&&g#PO?XuM?WDz8H_T`BM0-N7BgCQzj+J{i$i`x}+_aEoQ;E1b5dCVSClx%J zicfos9yz*haG&&r^H+K~ua4AxG<>a>`zH?Tu#U}XTE`iLz<}O>xiSYcH>V8Q9cQ-T3jIGc z?zVo~^KG&NF~^^Q9Qn6g zb{uW8NH3l;wi_Ow?blU0^VWG#e75#z`8~rBlnrpE%{c?r-_FluuH_InzjqAU@IQOF z!h)PM|2b)N`hbf(OW}vA18l^JYRXrk1BbKOG3fS8ur> ztRj@VVRj=@sqiFsYUDcRdP8pHajw?7Mowq6?;|-@7D;-DH}oM;c?U_CQaV?YHnu|2 zi|9VnJ6GZMHgS`F;Lzkkj`N)gO0q~23Zi{(VWnr3xwi>$uFs{+W6#0{yBgX!XU(`< z8V$6z5B(V-zj~S=Hq~;d(zHckp&GyPbj0efAtL$>DwTa8iuponNK1!m>SKIQdWcDp zN<+^W{!=}!vFpotJ6Crl!OwWWWAj4bWzrzC)z;@(mMlisy(7L?XLvFS9YoOFK6Lhf zM!IS%GA{|$XqUnMr#y2vEvzLFahaE&jr!dcz9V)vl}O0`FMJaa>Ogw(ya@ke&q%`F z581qYUTXG1!^nDB#|@m$N7^|i-92uf;G-&zo&zYElhq@!wShHTI!|&fsfBBPI`x4Y z)~$813Lj8B?XWo|C+M%?@>85tSpE-%Bz?!tDkq+Y@XkXR_gWL>#roSAt` z!7tRKU#wB#A>+T#t~hWtDIWonV!@PNrez(fXJbldD@TxHeonZZMgfWD;ck^cd6=i8u(K z@wfo<3)Eh)nl8NF&=1~sKSE{D*bS&*nmPs%S{m3y)3xJX@h)0cmbPTeVbN_|@ zqZv1o=BvNVw~95#v+}vL`hkwXAM|CCuHCQ}CHAsOVE9K<`QRffDSd?$|Cs|HRn)IL z%W!K7W!sux@Cw2%JhJ45A-LdiV!>vnxhdF8=)u`(4*>-rsKWSQi zNb9Axq|qWp?vHYf-6V7FIj%07I&Q
  • (ZK5w^ZD_fJDxgDiZsp6I zm<&CzloeBjo9D6juk*)v52$zdA7U+KesFXDSjOGoujkyp2a_>nE|v5&(JWu!?lx*) z%(RVc8Je|U{%NRjHxu(ZqS6mG?wjd}Dq!hHjWs$h9PPiZy7RwSAD7t(NRRw%U4S-1x$? zf`)&$=#J*BRJgolA+JfHEf5Zbh2etjcZhsaCrzKNj) zj1G)T>+fYgf#kW0>#Sg{<6M$>FmBk=8ftKCKgE={*J&E2W8bal6laeq%!JA4mMnTP z$b^o%61IAilML0zgsbAnU5TJ*-!s7sc0_cU_s%@l@CDa7zc=g&y3b_GdP?3z2i^J- zBw`1#2|77Xy7wZNHZqwW2c>z@7TT9)rX5 ze45%&ed9+uquY0Hm^G!sQta>34yNwJ{p_|sQgwVX6~Hn6`wra6l`eXM%&BB*!SN38wRgj!Skz4Ec?eNDQC@)9HQKndv3%j zk=hn|-xK4;%1vXXJnT<~B@0T6P`JIiL%AO}fYyGPA4e}#ZUW9{+`Imq>gzQ)nM)J%KEFBp z<;KDiQ0rd9`}bZ=D!p|@0Y$M0_-@jjoYRr4(JsmBufUCxOxRqdT9S5EDZE${-mrTD zw$}z1GseQ)N**B{wsyv2OZ;H{21=&%mg*_e3T&QN^3~x>^(N(>#PA+HCJi(?zDq21 z^CcW@jpqcE6p_hdZ%ZE zH@DL7kQDDa>5|z}DXyl8z0H(eWJ&HRqXawSzgGJlMv>-uocQ24Z?q4I%H?5WA2Qu~ zsy3VUhgZ>hfpyWYq$3||C5luF-D$=gPPvKU!n*e}&{9%XVXZ=Jqy6Du|iYnkoDyJ=MUCDMuG-u;`C*(?sm>s&7&Nt7a5(aQ4dsloH zIeMWpw>~`VNR`QoMz5gAm#Jm6uhi+aB?Z1+*NmApy_H11yDl7IQ(pqbhPZDe=@Hl2 za$yeK%a8GcXsc>oLkP(a;sYX@_nr~~u~(CW^}n zwLTNpH`ZxwPrtN2-YiE@5RlI*QEoz&hP%GKd1Zj|ma_PAhWZzBBmbHdX2T1s#gZE) zm7Q1Ct(Dn!?zcyl(9fDlJ)e7=TlYYX1lgo`X%MsYh%CLd|2p zn~jC4Fl3X*M%u=3!IL9*Gu^b|K^8&Y^IB^(t>er*o)52vA-d6rb3@fTP-*UiZ@vdT z&&z%sZGhN>7d#znmRxXY&br9y3ig6%X3#?85Und>m>pw(Esd)ygqg zVA^usbtmU)Y^E1CY&KjXj&q(H7gDi;>{=*s?p^0V&B=cEs12rgbf>NB^D1RF<0RmEoX&Mu)}yYSfaPFaw(|Sq zO_1RYGEbiP#1@i%n*GF@ykpxiJ6@2nS((<})azi{!B<|m>1F%?*41P^2}4?b zey!d|{@ya|wK9je?n)j{Nm5C4;OB?4me#Q&&Mq$P>oO;tBq%`-LKq;5AU3s2Ho>a z=bSe&bk`(E40umF=Q2nBcFrv9$V01tD=VbGe$U+lo@L!rTI9#P%QWD6ziN>4Y#FVB zFUGPe=sDRYJqcsFyw23(%)yDtkLYF=xYEL-tkLd5OK*@C=Fqx0OG$z#s@wnbKF0&? zTAtvSu;h(;G2m9UNl5X$zu_UoF58L^D?<)nbn!|=`#vnIB4!r-6m@sa!IRya+U&1Y zbd#z|L*|Y(?QQT;@pjBQ&gkU?RRo{q4fM%6!e{xibb{{@v3g+gTfP zlqKP&R*mO3O>6@en^ti=CNtrTJg4)n;Fv#R)=w{Y1b0Ihvl;45-9A`8pS@N@ctYv5PhXPU)u&^o5#@SWtX*zbMDLt>!n&xXtz3S^EGib*o^8d)&Wm>a zk!Op{1?cbTSk3Zwqs8(+MbKAxN?L$9 zW!}PP+nI(-ksCAfTt3IRBc`u_)=yD$_-=0-qD!@}`ao(m(+Pi;-6%r;b-XUg1QBNq z$5$W{J@{)cr}JOsc@!1{p}E;MV9x0s$I85+rx6tD+PnGa9;1gI&ceNs1!)a;q@w_c zf06jDeA?8J8Sjvj&|d(-Zl`UT3C`v^K?&HNn96Saak~E;^##HSTXxEzSAePsfY*IC z5t4puDTNHys+OLT1o;5+d(p*?v|aUQTXqqX=KU7Numr_F2d3bmoRPF2FAP44Vj~sL_C&YaT zdt3f6Yo5)tavwr&cj|E5rM=7LEzDOZ@Fy)F*CC6SpaZ#qT}oxUW2C$Xu0I13n;gsuE^;Xgv#l!fN_;R7tL>yUTC zN3e;L$(6<4RZmqs$&s1=^q3MWcV^xMLS@_ie|YANtX>`u_y_;+PRkGYg$iZcyujk7 z2;lDHg^p5XmE4*f&C4j*iAb2vA3tejxEojs!~Q5|aw4xHoSli@Oa%2R(^HvNwt?$e z(6ba74dmi^3M=xycrnd;PR)43gfD+WGgcqCu3iO3<8OOriI1YAAUOz-LeW;{X%W7g ziyd?U`to!)(~`IxAe1?%4|+VYKR<6x--PjKS|WI6dM4mhZ}}Riz%IO7-qB^?shKR( zQxaBBwb{TA``Z~eCtr_^pnRNqC$*2Cn2GgLcHS+HZDiP3q%2U=*I!$N!5>zvUf8>} z*dLaAa=S7bFg@gt)&2qDiyN%?uD$Vb`f^8KY)%Sgh!hJf;@elpZRIROVANN-73$%f7KtzBm3$F zF7$us&p6mV+I?`I9JssDG<_sOXwKs|`KgEP ze52pbILx>SgkE45RQjWMMfmFglx-WZ!H%C4lKua7!(YFNIt}nB4d+26z)9)JSBaY3 zvij);Px9KmD?6>cy`;lj2D z7NF#MmxDm4&dU=I9BZ$!yR_=!QbkLFV@=iRBUvMfxRK2+n~b%i-Q1BHm1g6+!|=?z zJ5f0HyJ%M!&SbLc#a}(0-8@E|q0^GT&adp#&a0@v4hA|h!}_AdYGYS1hDee7GPd|O zm6M|dC6}N%h@pJcM^zEyPRW}hhX3CYg^@2FN%j$z#@Ykj>c*F*H!D|8 zA;Q>?6xKd(lV?ZiUKSpynYOs9lWPh3rPGkJbw=0`a{ahOtx6FgETvC3y!J0gzc>c% zPq~eLp-g3;A(zV4AEII$M57gGNm6u_hv+p0!cpT_J6~@tqk9Y>rzNVj-U!#vE=VJp z6ea2>6L;(}8uDDf>C;G_pa9(4t}En}b)-HWFzB^g8$F+M<~wWvS6( zku1JD*^>aw8*pU(I&Zf2(MELfMEY#B-5-;W)!cbV-=U7NqF zTs{-A7PU2K-C}JF3%D*SATK+?9XuoOow{bmRZ}i<>`)afD8p~|q|BisKNt1VSZ?LY zgUr>9JHc|--myY@iVE>Rie6&*%H*<#A<6fY2D}nvBY;nZlzLpNklWIuE3`}bybzZZ z)_LNsPmw2?38*ajeR)B-K|?hX$WpS6vVbgRJRK9IN#(C(N6ag1=TE_I73DAR4+n`^ z2kQxdA3|%p@c3O*uOs|ZTKP*K!=rQ{77lWA&Tk!+igS#vQk1XzQRvfbxF@iRAmrKe ztCjaV7M86plV@qaLMb1fIE!>H8USu?g_DHtkvGeRFH03V@Fxx3s0?*!2c zlFv)RBO9NYU74)jQp1!Vy6D7u-9X){8YwC$YrY3KJiPAlj>NaWJO(u}H3zP6rBCB?8t=O`UB3y$)S zd2ydYKv|?uKbIBgnP7l|ZmQ)NE72y7twk$GI-ZP;7WO9d(s|ju>AgZ^h;$aJT+gcu zM&kiqo3(FlUg^OJE}f;qo{ER}z?raC7tV1hmWr}S)vCLzCTRRJZ*0M^G>VCZ4T4M( z5odoOkCFe zLFyCB49?=q%TBPWx5NHc4Ik$W*|!&y2&XVjzmD z0&;7*XCgl`cF(Q%&%0b4CfMTdjZ^3q*4`9`JBxhKulc6TwR3F(aA*Z!-u^UXrM_Qa z^1CGLxbvoy5-ln+nXz;+bZJi9F>89o?m7&$XS$5B;pPSU6-tygCp~Gxk}fG4_Qwl+ zvxS!w(-<~n+A*@|S1z9B%1sx9JnFWboZ8++>Icfny{c{$KU9i)Ab)o^x zR#a{G9LL_ymS;z^BAg9snV`be!h=b&LD$b$q*-cKokLcfMM6)Byitat7n0}s`WHgX z!eTDj9n#qrlcl#F0LEP~VV_DZeufosO1K~jF zVobD;R=+t-eE$UEU?_S!=Zw*;l6)PN5ZhC;q|S3B-MBkP>NDrL<#7McZMwfU9kuv}o1o0Jg#Ql>wQ# zLV+L+J;~O<-hfNABlvd5&&qs{eKxMu5o2}yC>`{Rg<+gHJvM8WtcbU!^CHp@SpY+W zV}wq~8P2~}LZ>28*XY>p5{5aTd6<+>Q6b9oRS&#XL@&T%&8kORt-b9GnzFjU1+h{A zI;8SY7HgDw$NbeE5zwKQ&5z!ZNvE_txFe*EfILZynQunU%++=jeOH)dE$nc8U@>jM zVI*b?76KF|lPvxzQl46M({yfHV5Z`eoupsiG}zKnMibizBZ>T|SdCjW7c%XWV+7tp zrfL8!dQiJ|&6ajufgRSH{Ad`5$1v$C_WxI_axYw-)Y&1jv6>ZNfskV7YluU`<~RO^ zOmlAol{{#w&hz>P+x80m2EhK1P8{};{XhgY;CCJLADviYC|!A0(1kCj-ve&(SOE0! z$d4C{xCA0_`Ht*mCs;VlbVO%~Q2UVw5B?s~5Y+nt`#GVsd4k;$G{T z3zvO?c3iX)?7$peickZ-rCK)VvlUcN%MtrZ6;tY596&l{YSH>uCnyUG0af#PE&VPW z89Fqh-duRxYPKA^s169!@ui;Td#0P;Vk3{;&vJDK)H<-#fUzwn5bHp64YqXZXvG)R zO-2))R&Vj2!;}tc@L;bUmN*g9L0+C@D!XdOl@(Z4Qu&?7nI4A;>ad$mT~Vur6~OS2 zO6Sw@uZW%?c}hOXaGFkd3kHt&SO*tfc0lgq~S36*TO`)>xU8zEOG-K#7P)r(@;d} zqaXJtK}s=0K@hXD)^LeNg!O?lsN5dH@E|4hYez`~(3rN}tuXHgVP)?Dwv(=9^ zUYt{u;IP{ei^RIWzF$sr8-+0D zR1194RzM!RHy?UzZ|f+Ukq^?3iXbw25A+g&zNHf&D$bug1;+a((nZ^!ms-}4GQW@dD1ZB0}F89qE(`7Q|(9Q>2<7ngWK_|F!6 z)($_is7=X?$HH;gMt}>$koAm(y&{aT_Uz!owt z-&W@Bn90gh&jUq&lxm_I$3;F$SPh@f+EGUD7=p5*hnm_a%OXzGjVs#PMI5pbZCVuM zey>w8;ZFcVAeH$dIEOm-gn8ad3-dXTy}>z*K3hlF3D=p2yHA^3Bv)Tc4FP4NMaSm1 z%q>Ljk*ZOUs;wOHC5V=OiTJEk(UI4M)4s}In_j!>js1w=tm-s{x##zpLrxL+QgyDz z9`oAN=@^I|ep+~0M7TO_bV!X0U#k{G4nB2Jw+gb1|4Isb`y%h`<>m&3n&FcIvgWq9 z{_uF8=X(If5kM`!p(^JM=rIjwcF&w1+DKk8wDe;KL1^&MQb*>t08FI{y5sMCZA__u zqh?D!r*_2l42SQtEk=TKvPdtt+|_HEu4Bkkug2`74w60q8V<1ifr)TJvbD>TP6^L? zsf%8S|wB0o~sRem%B9cAW9AYnd1yS0pCHy z*ju*?zAqrDTQFKa;U^eF2-mk6Uvcy#%fKfxe|Yc1`o92Py1B2$NG1CL4cDE%{U|b) z-H<^>o{j~qL&LAyd@0$ZrQ$d<0oaTw2hrTyo>t2!o6cS^fHI>l=Eo0{>0fezb&--* zM@WOZ0FGeaTr>kaclwW5_@F)@N>K-V)n5U1yb9pd-GE{x0s{azxy6whd{B=|jNQw$ zj8cgyuRL;~xnoREFK5a!U52)mV$4Pg|F-D(DQI50;fAgerKP6_pNBM&i(05zQrl&( zOl<6kUX87d(cBgoWrsUpVFN(N|7Ky=uot{Hm5KIsl5A9HZ~eChaoyM4uJuh&Q6*Jd zo21HZ@csmG*h*B-YrucL=6?BkxOBOeDbuL6!$*9fujupXV>+<9U6RH;80##bUS*o~ zbDASJ{U5FY7wnPDn;qurTQLxzz1(!CgTukx**!}$$)cw*EZ`7BmD#4Fp5$7~s%}zQ{bD z2+3iwdF{F0vi@in2h6qPT>gY{&`>p5MK&q5(9k6&Cof>adWJ;<#Z{P2oY&X9oxe%$BseW{VQ+46x4WAfrJ--e#OC==D-)jqC z{Z}W6M~c1aWM8|Yvv?!9YrE*y;*tH_HJLglAx|Ps&eZs6S53bjW0J2Y8%49`?Tyux zXsCUmz%uQ^bqCK_@GZD$YNAQg+150K^&sxm#td;A=wT=R6`WSarc~p~KIiL}tJ4zL z;plIPKRX72C|ZFefxQ8m9o4u+^rAj@3j8DtzEzoa2#ao^*jAK&n-JGz$~4uiw%{E6 zg~B7TJJdWK<2;Lgsee@fal`W2u2;9o&dBi((b7bgnubgzzVK5NFdG>@N8%4D({2pR z@fe5Owp$QF(-@G1^hxvdW#M^Yi8TR71p7E!QgKtq8*KauyFM9QE)p@yBk0p>&P?Ce ziUI%SQgkXZt=!N;QtrJ2jEL;n1K`dMbaxgXagY$nE> z#GYVAOPcFH*+w9%OI0V@@jbIi(CFjy$H?e2mgJ}t#^VIxu;46CjPzjGHUuI?J~f{) zy6_>cD-v3zN9POVC{^yw@GPYO(V|!C$W-i&JV}=8+=4@cc0g2YO6<;ClRKEkXfyE+ zI2M=XY6ZA*_c-9`-d)8go5NQPYP0bJaMyXL3ND@X5R9#kB8>whm)Fu z4$X%1Nceg6DKfrIK{UA~HecP0cZXQaz#!dg+1yoG{M+%vbp?|9U*@Yg>y3NhZphp@ z!%4t1-c3%W^o(FDC7)tgL;t`bh6I~i&Gu%S84diA`9b1DFOSyaFq3oZx)6p+u4R`K#g>BkAY+nY*XqV5#WNK&C99)~|Q+j~X*<9j;*7am*5sC^CT%;9KgJ#{L>W&@s`^eIbb z(vybsfzB#4oPP_y2J(jUw>v#C)l;`>+GPy!sc6lBHfzGSb$@(hrCwRI(_Jd z#AKOS7ur&L_`3a3CRY$P+1B&m7!IqZb--ZZSfl9<3-xVdE-|c_8S|Q*BCQSC0_+A0 zK63|f*0#OeFR-3=PcP5BJo{DZ**af6RDGEf03OrCDKrT=BX?%nr=HLsn@^t|YM*t~ zc$npSjCTM~f}zbUNW8r?QAfe(idK>2!41psnpRBVLF*cXFZueX^3jp<_2$vXosiDt zuIOqF>{J^Me=0PfWFcwX-B~LT-*&EYce_TdC(*j(4cbs0TOsguzZq2tu9`qlEY?!< zQwe3xwG+rtEw^><>cCC1R7>%lW0h*5Rg@5>LO2$I?N%7om|hyBgf4hZvJuok6I?S* zjegoldvz5}s}6Ygmnr9&kf|!GCoLYU2Wuk=Xa6*ZSk`6r-oJ zm)1`h3IWc^HwOg~ei%FWdEOB-yoAE{InolkC{YhmuQ{q7G{@WxJ8#V{3(ZdE;ATS> z(L46C3;s}zxr#*>52A$~yl3M0AfaD_XA*i}O&vcyESAXf?+H0>bW78ul6?5aaw7hOpfH+G?f-ZsWyC}z|n5UaD z%We(YY5V9XEHm!iLszRMFaYBUXC`L$gRj^n)s&Ai4*Q1BU3b5?(>lNzpxQL_Id%M~ z10Y|&CXWTBUkJ26A87IT2v)Bm`6W0<_iAT!lRa3|a_z{Rfu29a?)p@Q+?c|l)+621 z(@J^#vrZXg{E&&C`raFfvC5VL-ioq zcF`5RtJD3s`QimkC;Y`s3!r&o*p?N_=r$nNRZ0hVOv*#>OnEb(>{d?7A1=x z_n5YpTQ;nJW4fcxxTgH59ceNknL;@_vwGaXG$$1WS^t2??kcvpCQ&Tr4|pWtD` z>$AWT(-d6v7tnk>ZAv)D*)Y3bgMMs|S&P-7yHmw+ke)$!O|Lun5q`<62KA=6x@07< zVKQ8MzW)NWtz}qf2UnVE`7?%PNd}Z1#F@L237<7@p*7|`pT`y|4o#qZ$u}wGO_rYB zK7u`?z~2xgJ!vp~NV9Fy(WeJCv&~OX^zv#3kZQ6WH+-T>1mL5NrtXhLWh=E~W^>*@ z+%^A4EH3sO#G}O^SW|wgL(Amspk#6%g|KxnhS{%@CG1Uw#qmf*bKYc2LYxJ84i#lsQfp$ zl0Ld@4;XoEn7J?`jJWvP?y?$rmVCXr!O!@f(tYj7-w&ceP(%vc37(dg<6^H>TB>AU zoj>MZY=hQD?aK;%dA4*iWgbS=qvohY z;}bR$$=y`DB{#HZaq-13gD23BtwG%F;X>Y-c8h*}@A=8v!mwf-(mS*iMiaesh?zto zJ$`**LmjYnTjvLsKz*J$yE6EoV|P4{KmqJTqaEO(GZC3~t+?=>c2{I6!2?O_BBdY_ z^9*Ijru_X2EOA{m8wxaFQqTq}>%wQu0-f%XelTX{5qW6_75kg$BLS1`uls zRf2ERVKzW8G{WkhYI0QD;TV>AP4k4&9(#NIuKky(5Ju)`hHeIsF z;m*S5H$w}o!6sqe^FCk?{^Yc6=8Pr6bxrNtk(jX?Pb5iq0+en(0Z1gzf5=czxgMLP zPU~(my8hhtnJi!Q3_D$`4ki8^`6c*FR!r>uSZl}$x$*l&j0cn>%V?3*{t5v7S~+!l zsQ$5U`UqhpuDCj97}(%yA9ZB=_3vD}k#Y!8R#+5TZQbqrLu-1_I!`?fL|A^n`3F<( z>NVpN71uv4)s*xID`*Ao4%d zA|sTl=ZI~ru1iR>h5x=gcNMdhdT3w91J^VitYqCwVy>IsZE`XOAa@#7_z_)|rD`u{ z0HZiXtGR!mUQW4Z5{i5ic@QdTym!?hLQ%c7qXZJx+U`A)u^8@Mzz-n8ja2WBr75Od zRG}|R=m+dQAuBgOV$cr{Ge&GbTUzEjji+73?;ou24q@&Fbk((5H{Mtfol9E@Xu@nXiX=a;t{e^_d0$SR8*Lu$x)VF&!U*0-;^O5bAflchCaE zh4D0*G#pLW!)?4}4CE-iU%zb`#n%4zhE|=UqF^p?chf=gY{&Jc2JcW{8Ad!#v+XHG zuiaSS*KbAMZCw!q%$U2^^^>^7>o|k6&AV*#d^#&2VR<>|bJ_FTz9?TK`I~&-Vy`+LKYwH5UfA#I)_Z3vBvQDxu)C zobT%ncm#UBP8%%@tyc&rRnHw8baeom9M65UFJoWFAq0HYhw10Mj9V$xu+$`~cifF_ zcN0)^e>{ux>Sl+q-SNe@u>FIQ9XyC(!A=W8GkBBfo7#RwK>Wru1!oK97On3yN`iUD z1P0{8DN=SrP#LsJCrm#Kj&*~2Y|l2d6-oja<-$-s1rg&x5K^QWvRMWsnom3ODpY@V zJ;C!Y&+Fo79kE~NWjWnd0AkYXi_E838LFT9+&&qKwV18Pi=RlL-~>^@$X_FmaV`71 zVm55j`E|Y4dZabgS#Gu}OHZBlC-t6^QSMoEv47W*DkIlyeeIKeVhx-oLnos$=KC@) zULG#>=W9%I%G`~s&}ZVclQ#LsjQQ*;|tvF9+5R&i6ql)orq9 z92?Y2rKCH=BWCc=xM-{F`N?OA@Pf3pnTrI)l%ug(%QE-}q$it_F-hbAQb_hD<|F#G zNM}AuKVN*o*q@zU%NkD^O3FP4!mbmX8g3X|Aaayz+UKV!PJg^q zFDd8dd;HLWWLgkQ4)(aSbIyjEX-#dn3S!s=yC*wHBCKW z-u=?LZz6-A*dENDOcYt%73|k(NcVy~@vi6ZP&DpiLG=?I@wE|p?K%8Qfn&Y?5^Ot? zVX4tZ=6&7TmJyO*#nW2Qh5~88vfa23UfPb3On9!k;0k2lBtQH|4rnnHY^w!f*Yd#_ z4w|IOk}JmhVEssbl57tCuXY$Pt#b#azbVW2(R;jGOqx`E`}ql!oL-hfot4W|`sI{? z>Zv-Fm!^hU58UxVDX&N;54m}i+{7A_NLAHwiz(J9`@_Y)^dt#$oDu{QP72X>W}vG0 zamZkg(ajAHAxOdwnEI5XATIvoSjl$IZV!5CcUHSEKHpadKc7(hZULkJ+}W9Wwu|_N zNOC<_^QdxhONbw8?BU?b*F{cMX5&JDb=Xv^1oa;TXY@y9;uOhc6)h_l%%^A;?DJWbtN${fhJmV6-Y@gJs*Hc6@ zzB8_p9U7+QZ}Of!w-`>Cm4g}}C#w4W5>rn4&!aDdWP~F$+usts>sba8a{Bmk5S4)DP2yY>r2IUnD!u4J> zpsW+tCYKJjUTS~iH8#l=&(Ymb$|*MRiX#kS%-xpbKoZ~%ujR@915%Jp;Y8#MvdRpF zOCgg<<;A%{RjMR2N{LNZ=%I>Mx?{dWkEQW+woQLh%Msu_Z5_-LPt8p=9n^G(SJ|Wtmi|0C;M~5`rhj4y#1&-d} z!4o#o1)YZ8MBT432szT}k*sJ*%{$192^>WB8ZE{#)2qQ4bGaq?(EdF-OD|#f{ z)|ff0z`BRw{B}{df2Sb%D;v`M4Ybv)7E0JT97PY+T3U*urK5#fC}I*ft$#u;7D&Na z@zvX5?;5WxEWwW9Y!u{((tLPw#q(3l8ajd6k2^E?x>ML%6vX{z&wl+jz zknKPt-$TNIBh#`nr2^R?iKa7lb#;mu)0uURJTj!KwhPiG-1g==5agPeFYc9k5wCV7#}zHIDS}1P4pp9 z<1usYODVJrUhsOjKqRcj-yXS$RtB=%I?RAzOU_YgGM{aly6PKguRg9A`A(~<_g&m;j9L*{PG@~(1vF_DbBuGGhYz{Sfgyv1HN|-Rv`tQl-UA1ci z2e!n-CX;3OuVr>cAU!`jVprztsncqkS^M_u_G9~Tz#5g5RKstNcuM;Om}~aH2%0F- zeiAWG0#Kt{vMMGgfdoa#R`Al>LhLUha&3v|DPQZ>jSIRa_9s+hPJE?)YCgz0-PWx7 z{z(e+l5elzdq z41kiLTryYtU7`L13)Dx3%uBt@!}u&VXg2972w&n9HYP0FV{ z$ymt)j{6lL#`1n>!yXN?xQJe#gm?VVbMy8MK$L#rq`8a}wcy8|d+2q>F}%k-$3gFYlw3DJNb~(O_HxM-b5;v|P`C)^)YUELViS2jBp-u$f4coQEyxd@4X`a=KL1uUKw7Cth{S&>PJ- z#7P4xD*p{S1;F$OvVf@Ug!n(urg$04d`KVL0sUBzg&st+;pZh)>WZkPB1>u>z}eCW zhf*-BZb3L{T0r?Bzd@e^TjuqgHwbeDNVb#!3KKgiLC?K)qb_qmPUnjpBc6FC6B8;gcP$2@Bo|^1jlMpB z=^5LnBO}3lFcR$5>~7zTu|Vmn&6f<8Awrz5#FoQ^a&qN$E}XM6COTY~qZa#&7D^yJ zs0~+b_urt{W*J{7ayWh^$H{LXk>M*OjcU1>qoKZ{ zGSzbHP+@6`+&;sV(hJjlR=uhH zN@%3FOlV{>pflE`H3t6CL2c=e0@qGYZ@_8j?JikT5SLo_8v;yI^#tKYDPK#qA^rJW zj#l=%$sxEfV|cM3%>DZ8U3Epp83(e!r=}ExL7yuX)Bjv87j;5iUXGZizSeL#4{CaD zQp+e~$ahgQ*1rRVkF(`nq<=|lpzD{Rf1PKNm^ zDWTGv0}U1HJOpt{Sgy!+_%aF3qF^s5`)Rluh} zoibjiN_Qy|Wn{Ke3w&zvtku|sK>!->CXG^pJ?8GY$4H^fU!pGd_|!fpC8Upa_3X5m z&g4?UR=*kOSYeA&liB&VEiD-@*u?R|nnh zC1iRDZXCvnqv^KTtF+QsxqX&^?%pJ0%3<<|(fb-E)3gN7d%zh>q0!LBx+nNh_V7`~ z#VHxEVK@mMXfDNJszASVqd?O2JIrMZ&I0UVQtrB^7@vPlT?gEfD5I{E(_M?!l$*JS zQF8nnC$NgGrB#s;ak!3;@RFWFbkul3sQ;fvd2*up5bw*OKQU#Va*TU~RI zQzChFv!z{hWj6M-r0jt>zo#?H_h8;&I{&KLbLY|rN;2lPRk!v4#>mTWM47Hp4Ves3 z5~EEFJv9scE`1c(+9d&kbMrkxrKWO+RRCpGxLrUl1Bep8GD?;%-cd4}=QFiPPv#P} zo{Uyid#FXn2N{fs zv`EQw>*arAI(!~z0|$MhI+EUEl$CY8GPAwr9H;m0x_HUErN2{VzL%4G zdg5nEyq_u5Xp z2i{Wozu%gmjPr>4Mp-mNgv*Ho|Krbi;d=OQxc-0$=Nr$j_@Aa+H=Xi5kSM(h@j{pk z3o7Npw%z`HcWddzt^fJQwPk+OvOJGkvAM@WFrn$@SV=d($Iedk{dH znV@)xPO|O~{Fvl>;Ii}vEtl6mk0vPhZr}Ky$Emw`J^V1gfW(@3g`_kW&g_N542YLoJfC{p4mR8l*G$rZ1{LyDjERmvFv$JAI&hAf22e!Iw_kt z;E$Bo%W3}7zP{P-x(R)9)m*t!QUc+K%5GUG03QOh#w+DF_ND@5YyI0#1FVaDN#X08zW#q~b`(Jv@@>Q(RIbOHh6CDr@!~H%|KrI1n|AMXI}x^ckXs(6Srw4@b&lRY zZTZqv0ua7GRsa7n)&M$YzXy+%853$W~2RxM>(z`h}+nU_5mTBUUL%g?^e zGI5ms^5C*e?5_2^H>`6{z$S!jQetC29AD{hc9nA`2FKMj2CikMR^xKPaZfF0l z>sKY(*TS{{x2=DD|HLt}^hS8;bL;W{!|TPqWKlxAd+_onz(*yg$f7|lLuSf9+WHJl z7R^Nl(^0??Npe86SQI~eup(J__jE-vF zpXOBfZbDNgEB<6(D**LCom46VjQJZZP?hfFiv6vT!MLW9z9a6eZ85C=D^&54>>_XA zv)sWf3yyU*-KFMpszg=g6Zd9Zu^c;^EfA)u83k9J`8F@NE>m_1vM2!=)iE^rK8;8Y zyTv-?lQ2I|X;9JOJ1A#v{CC2Ul`b4axE=2SpDml@KH!tj*9Ni-oIbQ}bqF-lu67>8 zwp1nsQ3iX9|DMMW@(go@>~Ni`iH61*hYvhdb2dqG6tmd5&fl1yVxCAg%)^>l^+a$7 zJ29uGLx4N6_(9YFV-zA)ceKJ<(l(vu-J3|4?}*^K8PCd%rH2Cfk*?Ez^R0_dyR#7< zWxm^+;tpk$H;j6o&o8+&ogy9#ydRH9Dlsw;=Y zNwi@lL!R~MO$z65i+Kr%OVWC;Nw|ZEwpy)iOg*MP?8#c?S`G#qdWm#V4h2-oq z-{w1&-KPr@Ef}BM#3um?Z_t2kw+J@2v@W-O64^!MGRvC92|=`hC40yfic_qke~di4 z%yWnyY*H}C_AWSgb7eX1`%&jFMLi__@g)lMH}Hd<=JrR(@C3+MNo&dJ=(g#s5$-J?bhJ zKR)pA#;XEb8&BaOEcD@EV$w?QKmU?@YtZd*-eO?1Ji8g-S4*60aX&SGIGkg}WU+Tf zA`e{d9}MBQz=Fw@+)5^nYD5upr+*qVobdf* z>ZXo9huQuc=+|rg3KME^NmdUD;r*b>mq!tXFzWRc1#k-uD2ZB7;w#kM3KfT_7a*Vc zmdqccS>EmY**86iv%;e-?chZ1x=~fN6v7RJd;Q)epa$j18nZ#y5harf}_(Rlx0$%a3OgaH~)0t?)AzC=$$EexSC{gRTe!>rb)^pmSp|^X|O1 z0C&vGOB61%pZ=tL6==sNp(vPMA;_#=DWUM8s}jv|!A$SZ4X<}s$3*A)dr%-Adn0oeU!zW~3 z`$ayx9qhRNDb{=Lnw8jPeP0NvfWu#s8p_gVhNc_Mda@eD)Ss}1&EZsD(hfm09{Jxp zbEFiV)HyM!&}1&(Zk`e9k%NvO#@_ec0b9^y~$b8`)s8q0;~OwvT+ z%KThlU$H|b_V2bW58F@?{oY^g;o=!rWIGtq3i`7_yDHqb7I&)B7}1hhKjMAoT^ab* z1khb!{`54?!Y#3z$o}b@dO!E<9GM1fuP*szFw5y$o@qO5uVxkYV-^4eMF8s!L*)TJ z+qq!6EzYtO420H54vzii0BX}3|>~$Q(b(%u?dVPOQ;AL8hrV~yGjyZHyNBGc|EAIPhy zsuVp93e#=Aws3RTO5?V?&$8fC?XL+^Fa|#O7VX#`g_n9-3PnfXR#-ppmi*ZcP$tEU zRbJSkTwu|XTT^g8aUepwCg1yIPfWYryCXw&V828H(PyicIS(-{=ww5n)`&1>1ILXn zF25AE(EGHx_!BSFcRR>SchURyos~WH)~S!>41ay#oQSS$bPFAA8FOzFz0Hg}QW_A= z`oKy*mOq{pQ=_q#8h0eyR-xQUGeE~9Ci;zAB86+^>yGc_p+i&JS&wX6DwvINk{a6tvW zr%9D#YQLgG-u!+Aj$B9hw&{N)1LNX;?wWwzq41_wcAb!P#0r&^Xa?cm%rD)wK-5F+ z+Y&up1n8k~?&`vTzu%~nmSeA3HS*Ut3&BM`n)MWjQmjlt!kurm%IyZUdHykB)H(g^ z^EsfE7v$dD2PEk0FMk1be*O3V*5dnrFc6!7_Oeed1~#i|8BKl_VY$FdNd%%bHpk;@ z&2kz5;TJsV5`~`it(h+Amfy`<@WZHU?5DnUR_@V)<@@!gI3Gj4by)7QVC{?A!a)^l z6g%GJAAdGyhu~O2i4B>(2NStcw~*}d>qb?CQ(>&X8!wm5_UD%q2O|F-xI+jO5k(u* z|2FV)FMNsAnrVGh=<#T4;l-M-rO7=<5&8e+#UDBW@DQ;D<`7TP8|#jD=X|TVVR%Rj zngFoP2j6r6M=0^xLIG&2s~Sb%6|g{TK5 z3O7fbj^@Qk7Re<1pZm*;V!wVhlfvrnR{h}A;oxwRW;u)Wb#N&s6te=cXPWi(vvo!K zz@;tu#5Pg;X|w(S;a|sy@+`M|u`RU;Uym3^>I3w%sd zu=jV`^?dwJdU^X1a@l$J4Uu1krQ{sAsz6lPSJgu2)w!(YXHwdKmu$TsO#?1{s|62K z@5}UqtqjtI{k1Nyh);UXpnRyR)Ufy;ZPbpAs%xd+K^>>`9gONl@sd%hV9Z=q*PRIHLWo zQ0C4Bh1KZkl|?AGZb%{1Pyc-Xh*dmH-I-v?^TCJ{W2WfbMS-PIGdMj)vD_bxiW4@p z?6~T_WU-1B^3~w06oBm^n~#dlJUkm(4eSPC>~2sdIcg2F&AG-TunyV z-!E{L-1B+Rm8P9JK~kc~Cun=*KcOp9RY;C!0*EEBx(hlRs2HBbRb|0M$Xibeyl^@j zItrTz`}I$~3T9&Uiio^9x?uS#chPtp1Le@js&;P&Km+NaFXRH4+#|yi3odwTtl`pp z7kAKn76iWeLF%6Yo?E9TH~8te^x1b`aDZ5$WvG5vfa!G*xsJnd_h%!0z4CFKS!$_u>YqU;F@1g_y@+UX+je0qIx6cfp2= zTD<}L^T>jiUl41<0d^Gjd@M`adk*;k^M)mf_oUmM>QB)YR3wm5ZwU%esc%I3?44j& zK{y9q^PAL?4sc?pVbLsc?3db01@N|xcZI+*;b@tzoi~QwC zG`9d!n~77oo{0xlWl-zu>HBHCe7aMH!v@iM7P@%#4DN|!s^T=6MlYi|%kd+Jc7lwr z)i}iGgVIH+JmF3K>Q?tjVIL#B^Z}hj4xvV_^cQy&Zk~~&jO%)0)*M2i3wCYLN}YKC$n?8OEk~KzHw@Ab zS%ZLEGQ$SG8iWV6JKY`!HG-QR3>+yunnmt#b@T`EhCopb6RoZ07Z(2Eq{9V6v7J8m z2+7fh%e3p)-_oftob-k5r<36aLtm`&bMpil$Cnd_(P^RA4C%F@!k8Uo)kX84U>@6r zi+1Jsa0zT0gQ22|3&6z0;Oa4~XP5Rn1F zX(CX+`RDB{7|sq{PE`spF_CJ-X$@ z-4zUf+YLf-2mfH5btXm^a(y}fXZhN@^#GsUgDK4n+g-@_m*eeR<5f{whub`zTd3u%v9g++n;Y&w)$54YX5ww ze;D|@$|v#b2K{PEi%z_He*M~n+ zCNpiTKhn*0b=%)taq^(^7oS+1{Kqa+JM8z*oK60J{&n&`Y9~Vvj6G5+UB&YYrfW+U)J+(cWsO9uQjZ7O^H9!%eCX* zx>wr{y_s;RM*6>n_}lpl|NLH*@&D$pUyF9%TYf!vH}LM<@3qzre`?G$J$_fd-f_z? zaN-^L?pUsOx9ZMTU;p{z>&<7t3+5r<#w`71d9$h#BWgCBmHwOACLaXy|L?UP=WaWn9BY8 zVUAz_NA}=t6|D@u%5OVXY*>|ftLs_Vj=bHc?@YT`D!xm!&|Ftf`oG6i-*;2O8}Gko z&+WF|z3-env+nw}H{Jj{r@(qXyK?p=Rz`b%S@*NcUo)K+sH@Mk-nR5_`sbV9c0g;S z133{J_q+zyVB(b_KSD3wy0tiIU44#E_{QtYZhjL~-q*3$x3S76YTsJ#=)}0+hn4}4 z=zYNbY`55{og`!3I<9iZb9*rTNuzz%ne*c31R0@xI&dmVIaa9Q$t0 zx6`+tPE_ChssB{wZ8>}2E$jXzxm{1LT>D;EFN z+Z7ylTW+iG!M?)VtSoDn%RaXgffX+@x}x51{YpfaueDgSXoKjT+CrP8e$SQ1-*tR* z4fEbB@UHubT~vGS`!CyoM>;Idl__HXb|dG0#?QL@S$ntboMFJa+rhvjcgx?fJ=eDd z7eH&*f+-;t;f;5?-gI>x(VBMs{e`|4fA@V@XFadBG5T#-&Q+rl@$w_ Date: Wed, 13 Jul 2022 16:42:49 +0800 Subject: [PATCH 15/19] docs: add notes for the conda environment recipe --- README.md | 13 +++++++++++++ docs/index.rst | 13 +++++++++++++ 2 files changed, 26 insertions(+) diff --git a/README.md b/README.md index a81c66a6..b15ed1b9 100644 --- a/README.md +++ b/README.md @@ -221,6 +221,19 @@ cd TorchOpt pip3 install . ``` +We provide a [conda](https://github.com/conda/conda) environment recipe to install the build toolchain such as `cmake`, `g++`, and `nvcc`: + +```bash +git clone https://github.com/metaopt/TorchOpt.git +cd TorchOpt + +# Use `CONDA_OVERRIDE_CUDA` if conda fails to detect the NVIDIA driver (e.g. WSL2 on Windows) +CONDA_OVERRIDE_CUDA=11.7 conda env create --file conda-recipe.yaml + +conda activate torchopt +pip3 install . +``` + -------------------------------------------------------------------------------- ## Future Plan diff --git a/docs/index.rst b/docs/index.rst index e4ffd624..23fb3250 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -28,6 +28,19 @@ You can also build shared libraries from source, use: cd TorchOpt pip3 install . +We provide a `conda `_ environment recipe to install the build toolchain such as `cmake`, `g++`, and `nvcc`: + +.. code-block:: bash + + git clone https://github.com/metaopt/TorchOpt.git + cd TorchOpt + + # Use `CONDA_OVERRIDE_CUDA` if conda fails to detect the NVIDIA driver (e.g. WSL2 on Windows) + CONDA_OVERRIDE_CUDA=11.7 conda env create --file conda-recipe.yaml + + conda activate torchopt + pip3 install . + The Team -------- From 8c67893983089aed8982c00ac77d0d54394f40b0 Mon Sep 17 00:00:00 2001 From: Bo Liu Date: Wed, 20 Jul 2022 01:46:57 +0800 Subject: [PATCH 16/19] docs: update readthedocs (#22) Co-authored-by: Xuehai Pan --- .clang-format | 2 +- .dockerignore | 1 + .github/ISSUE_TEMPLATE/bug_report.md | 12 +- .github/PULL_REQUEST_TEMPLATE.md | 4 +- .github/workflows/lint.yml | 45 +- .github/workflows/tests.yml | 11 +- .gitignore | 294 ++++++++- .pre-commit-config.yaml | 46 ++ .pylintrc | 593 ++++++++++++++++++ CMakeLists.txt | 2 +- CONTRIBUTING.md | 6 + Dockerfile | 87 +++ Makefile | 62 +- README.md | 4 +- conda-recipe.yaml | 16 +- docker/dev.dockerfile | 25 - docs/Makefile | 4 +- docs/_static/js/copybutton.js | 64 -- docs/conf.py | 95 --- docs/requirements.txt | 21 + docs/{ => source}/_static/css/style.css | 50 +- .../_static/images/logo-large.png} | Bin .../_static/images/logo-torchopt.pdf | Bin .../_static/images/logo.png} | Bin docs/source/_static/images/maml-accs.png | Bin 0 -> 33854 bytes docs/source/api/api.rst | 224 +++++++ docs/source/bibtex.json | 7 + docs/source/conf.py | 210 +++++++ docs/source/developer/contributing.rst | 72 +++ docs/source/developer/contributor.rst | 7 + docs/source/examples/MAML.rst | 277 ++++++++ docs/{ => source}/index.rst | 44 +- docs/source/references.bib | 19 + docs/source/spelling_wordlist.txt | 69 ++ docs/source/torchopt101/torchopt-101.rst | 9 + docs/spelling_wordlist.txt | 0 examples/L2R/helper/argument.py | 5 +- examples/L2R/helper/model.py | 21 +- examples/L2R/helper/utils.py | 34 +- examples/L2R/l2r.py | 35 +- examples/LOLA/helper/agent.py | 4 +- examples/LOLA/helper/env.py | 16 +- examples/LOLA/helper/utils.py | 11 +- examples/LOLA/lola_dice.py | 36 +- examples/LOLA/visualize.py | 6 +- examples/MAML-RL/helpers/__init__.py | 7 +- examples/MAML-RL/helpers/tabular_mdp.py | 15 +- examples/MAML-RL/maml.py | 34 +- examples/MGRL/mgrl.py | 7 +- examples/few-shot/maml_omniglot.py | 25 +- examples/few-shot/support/omniglot_loaders.py | 91 +-- examples/requirements.txt | 13 + examples/visualize.py | 13 +- include/adam_op/adam_op.h | 2 +- include/adam_op/adam_op_impl.cuh | 2 +- include/adam_op/adam_op_impl.h | 2 +- setup.cfg | 15 +- setup.py | 110 ++-- src/adam_op/adam_op.cpp | 6 +- src/adam_op/adam_op_impl.cpp | 4 +- src/adam_op/adam_op_impl.cu | 4 +- tests/requirements.txt | 4 +- .../high_level/test_high_level_inplace.py | 1 - .../unit/low_level/test_low_level_inplace.py | 1 - tests/unit/test_clip.py | 6 +- tests/unit/test_schedule.py | 13 +- torchopt/__init__.py | 42 +- torchopt/_lib/adam_op.pyi | 70 +-- torchopt/_src/accelerated_op/__init__.py | 7 +- .../_src/accelerated_op/adam_op/adam_op.py | 39 +- torchopt/_src/alias.py | 183 +++--- torchopt/_src/base.py | 91 ++- torchopt/_src/clip.py | 29 +- torchopt/_src/combine.py | 18 +- torchopt/_src/hook.py | 6 +- torchopt/_src/optimizer/adam.py | 41 +- torchopt/_src/optimizer/base.py | 52 +- torchopt/_src/optimizer/meta/adam.py | 42 +- torchopt/_src/optimizer/meta/base.py | 45 +- torchopt/_src/optimizer/meta/rmsprop.py | 48 +- torchopt/_src/optimizer/meta/sgd.py | 33 +- torchopt/_src/optimizer/rmsprop.py | 49 +- torchopt/_src/optimizer/sgd.py | 36 +- torchopt/_src/schedule.py | 54 +- torchopt/_src/transform.py | 73 +-- torchopt/_src/update.py | 30 +- torchopt/_src/utils.py | 102 +-- torchopt/_src/visual.py | 109 ++-- torchopt/version.py | 2 +- tutorials/1_Functional_Optimizer.ipynb | 9 +- tutorials/2_Visualization.ipynb | 7 + tutorials/3_Meta_Optimizer.ipynb | 7 + tutorials/4_Stop_Gradient.ipynb | 7 + tutorials/requirements.txt | 8 + 94 files changed, 3128 insertions(+), 1036 deletions(-) create mode 120000 .dockerignore create mode 100644 .pre-commit-config.yaml create mode 100644 .pylintrc create mode 100644 CONTRIBUTING.md create mode 100644 Dockerfile delete mode 100644 docker/dev.dockerfile delete mode 100644 docs/_static/js/copybutton.js delete mode 100644 docs/conf.py create mode 100644 docs/requirements.txt rename docs/{ => source}/_static/css/style.css (61%) rename docs/{_static/images/logod-07.png => source/_static/images/logo-large.png} (100%) rename docs/{ => source}/_static/images/logo-torchopt.pdf (100%) rename docs/{_static/images/logod-05.png => source/_static/images/logo.png} (100%) create mode 100644 docs/source/_static/images/maml-accs.png create mode 100644 docs/source/api/api.rst create mode 100644 docs/source/bibtex.json create mode 100644 docs/source/conf.py create mode 100644 docs/source/developer/contributing.rst create mode 100644 docs/source/developer/contributor.rst create mode 100644 docs/source/examples/MAML.rst rename docs/{ => source}/index.rst (57%) create mode 100644 docs/source/references.bib create mode 100644 docs/source/spelling_wordlist.txt create mode 100644 docs/source/torchopt101/torchopt-101.rst delete mode 100644 docs/spelling_wordlist.txt create mode 100644 examples/requirements.txt create mode 100644 tutorials/requirements.txt diff --git a/.clang-format b/.clang-format index 3d22e0a8..f6cb8ad9 100644 --- a/.clang-format +++ b/.clang-format @@ -1 +1 @@ -BasedOnStyle: Google +BasedOnStyle: Google diff --git a/.dockerignore b/.dockerignore new file mode 120000 index 00000000..3e4e48b0 --- /dev/null +++ b/.dockerignore @@ -0,0 +1 @@ +.gitignore \ No newline at end of file diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md index 9520f2ee..55dacf60 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.md +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -23,9 +23,9 @@ Please use the markdown code blocks for both code and stack traces. import torchopt ``` -```bash +```pytb Traceback (most recent call last): - File ... + File ... ``` ## Expected behavior @@ -33,14 +33,16 @@ Traceback (most recent call last): A clear and concise description of what you expected to happen. ## Screenshots + If applicable, add screenshots to help explain your problem. ## System info Describe the characteristic of your environment: - * Describe how the library was installed (pip, source, ...) - * Python version - * Versions of any other relevant libraries + +- Describe how the library was installed (pip, source, ...) +- Python version +- Versions of any other relevant libraries ```python import torchopt, numpy, sys diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index b19443c7..4225daaf 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -8,7 +8,7 @@ Why is this change required? What problem does it solve? If it fixes an open issue, please link to the issue here. You can use the syntax `close #15213` if this solves the issue #15213 -- [ ] I have raised an issue to propose this change ([required](https://torchopt.readthedocs.io/en/latest/pages/contributing.html) for new features and bug fixes) +- [ ] I have raised an issue to propose this change ([required](https://github.com/metaopt/TorchOpt/issues) for new features and bug fixes) ## Types of changes @@ -32,7 +32,7 @@ What types of changes does your code introduce? Put an `x` in all the boxes that Go over all the following points, and put an `x` in all the boxes that apply. If you are unsure about any of these, don't hesitate to ask. We are here to help! -- [ ] I have read the [CONTRIBUTION](https://torchopt.readthedocs.io/en/latest/pages/contributing.html) guide (**required**) +- [ ] I have read the [CONTRIBUTION](https://torchopt.readthedocs.io/en/latest/developer/contributing.html) guide (**required**) - [ ] My change requires a change to the documentation. - [ ] I have updated the tests accordingly (*required for a bug fix or a new feature*). - [ ] I have updated the documentation accordingly. diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 1e91dde2..35b9d2a1 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -2,6 +2,8 @@ name: Lint on: push: + branches: + - main pull_request: permissions: @@ -23,7 +25,7 @@ jobs: submodules: "recursive" fetch-depth: 1 - - name: Set up Python 3.7 # the lowest version we support + - name: Set up Python 3.7 # the lowest version we support uses: actions/setup-python@v4 with: python-version: "3.7" @@ -37,9 +39,15 @@ jobs: method: network sub-packages: '["nvcc"]' - run: | - echo "Installed CUDA version is: ${{steps.cuda-toolkit.outputs.cuda}}" + CUDA_VERSION="${{steps.cuda-toolkit.outputs.cuda}}" + echo "CUDA_VERSION='${CUDA_VERSION}'" >> "${GITHUB_ENV}" + TORCH_INDEX_URL="https://download.pytorch.org/whl/cu$(echo "${CUDA_VERSION}" | cut -d'.' -f-2 | tr -d '.')" + echo "TORCH_INDEX_URL='${TORCH_INDEX_URL}'" >> "${GITHUB_ENV}" + + echo "Installed CUDA version is: ${CUDA_VERSION}" echo "CUDA install location: ${{steps.cuda-toolkit.outputs.CUDA_PATH}}" nvcc -V + echo "Torch index URL: ${TORCH_INDEX_URL}" - name: Upgrade pip run: | @@ -47,20 +55,37 @@ jobs: - name: Install dependencies run: | - python -m pip install -r tests/requirements.txt + python -m pip install --extra-index-url "${TORCH_INDEX_URL}" \ + -r tests/requirements.txt -r docs/requirements.txt - name: Install TorchOpt run: | python -m pip install -e . + - name: pre-commit + run: | + make pre-commit + - name: flake8 run: | make flake8 - - name: isort and yapf + - name: pylint + run: | + make pylint + + - name: isort and black run: | make py-format + - name: cpplint + run: | + make cpplint + + - name: clang-format + run: | + make clang-format + - name: addlicense run: | make addlicense @@ -69,10 +94,10 @@ jobs: run: | make mypy - # - name: docstyle - # run: | - # make docstyle + - name: docstyle + run: | + make docstyle - # - name: spelling - # run: | - # make spelling + - name: spelling + run: | + make spelling diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index ab319544..f67d9b46 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -39,9 +39,15 @@ jobs: method: network sub-packages: '["nvcc"]' - run: | - echo "Installed CUDA version is: ${{steps.cuda-toolkit.outputs.cuda}}" + CUDA_VERSION="${{steps.cuda-toolkit.outputs.cuda}}" + echo "CUDA_VERSION='${CUDA_VERSION}'" >> "${GITHUB_ENV}" + TORCH_INDEX_URL="https://download.pytorch.org/whl/cu$(echo "${CUDA_VERSION}" | cut -d'.' -f-2 | tr -d '.')" + echo "TORCH_INDEX_URL='${TORCH_INDEX_URL}'" >> "${GITHUB_ENV}" + + echo "Installed CUDA version is: ${CUDA_VERSION}" echo "CUDA install location: ${{steps.cuda-toolkit.outputs.CUDA_PATH}}" nvcc -V + echo "Torch index URL: ${TORCH_INDEX_URL}" - name: Upgrade pip run: | @@ -49,7 +55,8 @@ jobs: - name: Install dependencies run: | - python -m pip install -r tests/requirements.txt + python -m pip install --extra-index-url "${TORCH_INDEX_URL}" \ + -r tests/requirements.txt - name: Install TorchOpt run: | diff --git a/.gitignore b/.gitignore index 87e9b834..5deaf2bb 100644 --- a/.gitignore +++ b/.gitignore @@ -1,20 +1,4 @@ -.vscode -.idea -build -__pycache__ -torchopt/**/*.so -torchopt.egg-info -dist -**/.ipynb_checkpoints/* - -# Sphinx documentation -docs/_build/ - - -# mkdocs documentation -/site - - +##### Python.gitignore ##### # Byte-compiled / optimized / DLL files __pycache__/ *.py[cod] @@ -28,7 +12,6 @@ __pycache__/ build/ develop-eggs/ dist/ -wheelhouse/ downloads/ eggs/ .eggs/ @@ -88,6 +71,7 @@ instance/ # Sphinx documentation docs/_build/ +docs/source/_build/ # PyBuilder .pybuilder/ @@ -103,7 +87,7 @@ ipython_config.py # pyenv # For a library or package, you might want to ignore these files since the code is # intended to run in multiple environments; otherwise, check them in: -# .python-version +.python-version # pipenv # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. @@ -112,7 +96,22 @@ ipython_config.py # install all needed dependencies. #Pipfile.lock -# PEP 582; used by e.g. github.com/David-OConnor/pyflow +# poetry +# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. +# This is especially recommended for binary packages to ensure reproducibility, and is more +# commonly ignored for libraries. +# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control +#poetry.lock + +# pdm +# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. +#pdm.lock +# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it +# in version control. +# https://pdm.fming.dev/#use-with-ide +.pdm.toml + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm __pypackages__/ # Celery stuff @@ -125,6 +124,7 @@ celerybeat.pid # Environments .env .venv +env/ venv/ ENV/ env.bak/ @@ -153,3 +153,257 @@ dmypy.json # Cython debug symbols cython_debug/ + +# PyCharm +# JetBrains specific template is maintained in a separate JetBrains.gitignore that can +# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore +# and can be added to the global gitignore or merged into this file. For a more nuclear +# option (not recommended) you can uncomment the following to ignore the entire idea folder. +.idea/ + + +##### macOS.gitignore ##### +# General +.DS_Store +.AppleDouble +.LSOverride + +# Icon must end with two \r +Icon + +# Thumbnails +._* + +# Files that might appear in the root of a volume +.DocumentRevisions-V100 +.fseventsd +.Spotlight-V100 +.TemporaryItems +.Trashes +.VolumeIcon.icns +.com.apple.timemachine.donotpresent + +# Directories potentially created on remote AFP share +.AppleDB +.AppleDesktop +Network Trash Folder +Temporary Items +.apdisk + + +##### Linux.gitignore ##### +*~ + +# Temporary files which can be created if a process still has a handle open of a deleted file +.fuse_hidden* + +# KDE directory preferences +.directory + +# Linux trash folder which might appear on any partition or disk +.Trash-* + +# .nfs files are created when an open file is removed but is still being accessed +.nfs* + + +##### Windows.gitignore ##### +# Windows thumbnail cache files +Thumbs.db +Thumbs.db:encryptable +ehthumbs.db +ehthumbs_vista.db + +# Dump file +*.stackdump + +# Folder config file +[Dd]esktop.ini + +# Recycle Bin used on file shares +$RECYCLE.BIN/ + +# Windows Installer files +*.cab +*.msi +*.msix +*.msm +*.msp + +# Windows shortcuts +*.lnk + + +##### Archives.gitignore ##### +# It's better to unpack these files and commit the raw source because +# git has its own built in compression methods. +*.7z +*.jar +*.rar +*.zip +*.gz +*.gzip +*.tgz +*.bzip +*.bzip2 +*.bz2 +*.xz +*.lzma +*.cab +*.xar + +# Packing-only formats +*.iso +*.tar + +# Package management formats +*.dmg +*.xpi +*.gem +*.egg +*.deb +*.rpm +*.msi +*.msm +*.msp +*.txz + + +##### Xcode.gitignore ##### +# Xcode +# +# gitignore contributors: remember to update Global/Xcode.gitignore, Objective-C.gitignore & Swift.gitignore + +## User settings +xcuserdata/ + +## Compatibility with Xcode 8 and earlier (ignoring not required starting Xcode 9) +*.xcscmblueprint +*.xccheckout + +## Compatibility with Xcode 3 and earlier (ignoring not required starting Xcode 4) +build/ +DerivedData/ +*.moved-aside +*.pbxuser +!default.pbxuser +*.mode1v3 +!default.mode1v3 +*.mode2v3 +!default.mode2v3 +*.perspectivev3 +!default.perspectivev3 + +## Gcc Patch +/*.gcno + + +##### JetBrains.gitignore ##### +# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio and WebStorm +# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839 + +# User settings +.idea/* + +# User-specific stuff +.idea/**/workspace.xml +.idea/**/tasks.xml +.idea/**/usage.statistics.xml +.idea/**/dictionaries +.idea/**/shelf + +# Generated files +.idea/**/contentModel.xml + +# Sensitive or high-churn files +.idea/**/dataSources/ +.idea/**/dataSources.ids +.idea/**/dataSources.local.xml +.idea/**/sqlDataSources.xml +.idea/**/dynamic.xml +.idea/**/uiDesigner.xml +.idea/**/dbnavigator.xml + +# Gradle +.idea/**/gradle.xml +.idea/**/libraries + +# Gradle and Maven with auto-import +# When using Gradle or Maven with auto-import, you should exclude module files, +# since they will be recreated, and may cause churn. Uncomment if using +# auto-import. +# .idea/artifacts +# .idea/compiler.xml +# .idea/jarRepositories.xml +# .idea/modules.xml +# .idea/*.iml +# .idea/modules +# *.iml +# *.ipr + +# CMake +cmake-build-*/ + +# Mongo Explorer plugin +.idea/**/mongoSettings.xml + +# File-based project format +*.iws + +# IntelliJ +out/ + +# mpeltonen/sbt-idea plugin +.idea_modules/ + +# JIRA plugin +atlassian-ide-plugin.xml + +# Cursive Clojure plugin +.idea/replstate.xml + +# Crashlytics plugin (for Android Studio and IntelliJ) +com_crashlytics_export_strings.xml +crashlytics.properties +crashlytics-build.properties +fabric.properties + +# Editor-based Rest Client +.idea/httpRequests + +# Android studio 3.1+ serialized cache file +.idea/caches/build_file_checksums.ser + + +##### VisualStudioCode.gitignore ##### +.vscode/* +# !.vscode/settings.json +# !.vscode/tasks.json +# !.vscode/launch.json +!.vscode/extensions.json +*.code-workspace + +# Local History for Visual Studio Code +.history/ + + +##### Vim.gitignore ##### +# Swap +[._]*.s[a-v][a-z] +!*.svg # comment out if you don't need vector files +[._]*.sw[a-p] +[._]s[a-rt-v][a-z] +[._]ss[a-gi-z] +[._]sw[a-p] + +# Session +Session.vim +Sessionx.vim + +# Temporary +.netrwhist +*~ +# Auto-generated tag files +tags +# Persistent undo +[._]*.un~ diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 00000000..e1fda090 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,46 @@ +# See https://pre-commit.com for more information +# See https://pre-commit.com/hooks.html for more hooks +repos: + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.3.0 + hooks: + - id: check-symlinks + - id: destroyed-symlinks + - id: trailing-whitespace + - id: end-of-file-fixer + - id: check-yaml + - id: check-toml + - id: check-ast + - id: check-added-large-files + - id: check-merge-conflict + - id: check-executables-have-shebangs + - id: check-shebang-scripts-are-executable + - id: detect-private-key + - id: debug-statements + - id: double-quote-string-fixer + - repo: https://github.com/PyCQA/isort + rev: 5.10.1 + hooks: + - id: isort + stages: [commit, push, manual] + - repo: https://github.com/psf/black + rev: 22.6.0 + hooks: + - id: black + args: [--safe, -S, -t, py37, -l, '100'] + stages: [commit, push, manual] + - repo: local + hooks: + - id: pylint + name: pylint + entry: pylint + language: system + types: [python] + require_serial: true + stages: [commit, push, manual] + exclude: | + (?x)( + ^examples/| + ^tests/| + ^setup.py$ + ) diff --git a/.pylintrc b/.pylintrc new file mode 100644 index 00000000..81c22e5d --- /dev/null +++ b/.pylintrc @@ -0,0 +1,593 @@ +[MASTER] + +# A comma-separated list of package or module names from where C extensions may +# be loaded. Extensions are loading into the active Python interpreter and may +# run arbitrary code. +extension-pkg-allow-list= + +# A comma-separated list of package or module names from where C extensions may +# be loaded. Extensions are loading into the active Python interpreter and may +# run arbitrary code. (This is an alternative name to extension-pkg-allow-list +# for backward compatibility.) +extension-pkg-whitelist= + +# Return non-zero exit code if any of these messages/categories are detected, +# even if score is above --fail-under value. Syntax same as enable. Messages +# specified are enabled, while categories only check already-enabled messages. +fail-on= + +# Specify a score threshold to be exceeded before program exits with error. +fail-under=10.0 + +# Files or directories to be skipped. They should be base names, not paths. +ignore=CVS,.vscode,.history, + examples, + tests + +# Add files or directories matching the regex patterns to the ignore-list. The +# regex matches against paths and can be in Posix or Windows format. +ignore-paths= + +# Files or directories matching the regex patterns are skipped. The regex +# matches against base names, not paths. The default value ignores emacs file +# locks +ignore-patterns=^\.# + +# Python code to execute, usually for sys.path manipulation such as +# pygtk.require(). +#init-hook= + +# Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the +# number of processors available to use. +jobs=1 + +# Control the amount of potential inferred values when inferring a single +# object. This can help the performance when dealing with large functions or +# complex, nested conditions. +limit-inference-results=100 + +# List of plugins (as comma separated values of python module names) to load, +# usually to register additional checkers. +load-plugins= + +# Pickle collected data for later comparisons. +persistent=yes + +# Minimum Python version to use for version dependent checks. Will default to +# the version used to run pylint. +py-version=3.7 + +# Discover python modules and packages in the file system subtree. +recursive=no + +# When enabled, pylint would attempt to guess common misconfiguration and emit +# user-friendly hints instead of false-positive error messages. +suggestion-mode=yes + +# Allow loading of arbitrary C extensions. Extensions are imported into the +# active Python interpreter and may run arbitrary code. +unsafe-load-any-extension=no + + +[MESSAGES CONTROL] + +# Only show warnings with the listed confidence levels. Leave empty to show +# all. Valid levels: HIGH, CONTROL_FLOW, INFERENCE, INFERENCE_FAILURE, +# UNDEFINED. +confidence= + +# Disable the message, report, category or checker with the given id(s). You +# can either give multiple identifiers separated by comma (,) or put this +# option multiple times (only on the command line, not in the configuration +# file where it should appear only once). You can also use "--disable=all" to +# disable everything first and then re-enable specific checks. For example, if +# you want to run only the similarities checker, you can use "--disable=all +# --enable=similarities". If you want to run only the classes checker, but have +# no Warning level messages displayed, use "--disable=all --enable=classes +# --disable=W". +disable=missing-module-docstring, + duplicate-code, + consider-using-from-import + +# Enable the message, report, category or checker with the given id(s). You can +# either give multiple identifier separated by comma (,) or put this option +# multiple time (only on the command line, not in the configuration file where +# it should appear only once). See also the "--disable" option for examples. +enable=c-extension-no-member + + +[REPORTS] + +# Python expression which should return a score less than or equal to 10. You +# have access to the variables 'error', 'warning', 'refactor', and 'convention' +# which contain the number of messages in each category, as well as 'statement' +# which is the total number of statements analyzed. This score is used by the +# global evaluation report (RP0004). +evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10) + +# Template used to display messages. This is a python new-style format string +# used to format the message information. See doc for all details. +#msg-template= + +# Set the output format. Available formats are text, parseable, colorized, json +# and msvs (visual studio). You can also give a reporter class, e.g. +# mypackage.mymodule.MyReporterClass. +output-format=text + +# Tells whether to display a full report or only the messages. +reports=no + +# Activate the evaluation score. +score=yes + + +[REFACTORING] + +# Maximum number of nested blocks for function / method body +max-nested-blocks=5 + +# Complete name of functions that never returns. When checking for +# inconsistent-return-statements if a never returning function is called then +# it will be considered as an explicit return statement and no message will be +# printed. +never-returning-functions=sys.exit,argparse.parse_error + + +[STRING] + +# This flag controls whether inconsistent-quotes generates a warning when the +# character used as a quote delimiter is used inconsistently within a module. +check-quote-consistency=no + +# This flag controls whether the implicit-str-concat should generate a warning +# on implicit string concatenation in sequences defined over several lines. +check-str-concat-over-line-jumps=no + + +[MISCELLANEOUS] + +# List of note tags to take in consideration, separated by a comma. +notes=FIXME, + XXX, + TODO + +# Regular expression of note tags to take in consideration. +#notes-rgx= + + +[SPELLING] + +# Limits count of emitted suggestions for spelling mistakes. +max-spelling-suggestions=4 + +# Spelling dictionary name. Available dictionaries: none. To make it work, +# install the 'python-enchant' package. +spelling-dict= + +# List of comma separated words that should be considered directives if they +# appear and the beginning of a comment and should not be checked. +spelling-ignore-comment-directives=fmt: on,fmt: off,noqa:,noqa,nosec,isort:skip,mypy: + +# List of comma separated words that should not be checked. +spelling-ignore-words= + +# A path to a file that contains the private dictionary; one word per line. +spelling-private-dict-file= + +# Tells whether to store unknown words to the private dictionary (see the +# --spelling-private-dict-file option) instead of raising a message. +spelling-store-unknown-words=no + + +[BASIC] + +# Naming style matching correct argument names. +argument-naming-style=snake_case + +# Regular expression matching correct argument names. Overrides argument- +# naming-style. If left empty, argument names will be checked with the set +# naming style. +#argument-rgx= + +# Naming style matching correct attribute names. +attr-naming-style=snake_case + +# Regular expression matching correct attribute names. Overrides attr-naming- +# style. If left empty, attribute names will be checked with the set naming +# style. +#attr-rgx= + +# Bad variable names which should always be refused, separated by a comma. +bad-names=foo, + bar, + baz, + toto, + tutu, + tata + +# Bad variable names regexes, separated by a comma. If names match any regex, +# they will always be refused +bad-names-rgxs= + +# Naming style matching correct class attribute names. +class-attribute-naming-style=any + +# Regular expression matching correct class attribute names. Overrides class- +# attribute-naming-style. If left empty, class attribute names will be checked +# with the set naming style. +#class-attribute-rgx= + +# Naming style matching correct class constant names. +class-const-naming-style=UPPER_CASE + +# Regular expression matching correct class constant names. Overrides class- +# const-naming-style. If left empty, class constant names will be checked with +# the set naming style. +#class-const-rgx= + +# Naming style matching correct class names. +class-naming-style=PascalCase + +# Regular expression matching correct class names. Overrides class-naming- +# style. If left empty, class names will be checked with the set naming style. +#class-rgx= + +# Naming style matching correct constant names. +const-naming-style=UPPER_CASE + +# Regular expression matching correct constant names. Overrides const-naming- +# style. If left empty, constant names will be checked with the set naming +# style. +#const-rgx= + +# Minimum line length for functions/classes that require docstrings, shorter +# ones are exempt. +docstring-min-length=-1 + +# Naming style matching correct function names. +function-naming-style=snake_case + +# Regular expression matching correct function names. Overrides function- +# naming-style. If left empty, function names will be checked with the set +# naming style. +#function-rgx= + +# Good variable names which should always be accepted, separated by a comma. +good-names=i, + j, + k, + ex, + Run, + _, + op, + fn, + f, + g, + p, + u, + t, + lr, + mu, + nu + +# Good variable names regexes, separated by a comma. If names match any regex, +# they will always be accepted +good-names-rgxs= + +# Include a hint for the correct naming format with invalid-name. +include-naming-hint=no + +# Naming style matching correct inline iteration names. +inlinevar-naming-style=any + +# Regular expression matching correct inline iteration names. Overrides +# inlinevar-naming-style. If left empty, inline iteration names will be checked +# with the set naming style. +#inlinevar-rgx= + +# Naming style matching correct method names. +method-naming-style=snake_case + +# Regular expression matching correct method names. Overrides method-naming- +# style. If left empty, method names will be checked with the set naming style. +#method-rgx= + +# Naming style matching correct module names. +module-naming-style=snake_case + +# Regular expression matching correct module names. Overrides module-naming- +# style. If left empty, module names will be checked with the set naming style. +#module-rgx= + +# Colon-delimited sets of names that determine each other's naming style when +# the name regexes allow several styles. +name-group= + +# Regular expression which should only match function or class names that do +# not require a docstring. +no-docstring-rgx=^_ + +# List of decorators that produce properties, such as abc.abstractproperty. Add +# to this list to register other decorators that produce valid properties. +# These decorators are taken in consideration only for invalid-name. +property-classes=abc.abstractproperty + +# Regular expression matching correct type variable names. If left empty, type +# variable names will be checked with the set naming style. +#typevar-rgx= + +# Naming style matching correct variable names. +variable-naming-style=snake_case + +# Regular expression matching correct variable names. Overrides variable- +# naming-style. If left empty, variable names will be checked with the set +# naming style. +#variable-rgx= + + +[LOGGING] + +# The type of string formatting that logging methods do. `old` means using % +# formatting, `new` is for `{}` formatting. +logging-format-style=old + +# Logging modules to check that the string format arguments are in logging +# function parameter format. +logging-modules=logging + + +[VARIABLES] + +# List of additional names supposed to be defined in builtins. Remember that +# you should avoid defining new builtins when possible. +additional-builtins= + +# Tells whether unused global variables should be treated as a violation. +allow-global-unused-variables=yes + +# List of names allowed to shadow builtins +allowed-redefined-builtins= + +# List of strings which can identify a callback function by name. A callback +# name must start or end with one of those strings. +callbacks=cb_, + _cb + +# A regular expression matching the name of dummy variables (i.e. expected to +# not be used). +dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_ + +# Argument names that match this expression will be ignored. Default to name +# with leading underscore. +ignored-argument-names=_.*|^ignored_|^unused_ + +# Tells whether we should check for unused import in __init__ files. +init-import=no + +# List of qualified module names which can have objects that can redefine +# builtins. +redefining-builtins-modules=six.moves,past.builtins,future.builtins,builtins,io + + +[TYPECHECK] + +# List of decorators that produce context managers, such as +# contextlib.contextmanager. Add to this list to register other decorators that +# produce valid context managers. +contextmanager-decorators=contextlib.contextmanager + +# List of members which are set dynamically and missed by pylint inference +# system, and so shouldn't trigger E1101 when accessed. Python regular +# expressions are accepted. +generated-members=numpy.*, + torch.* + +# Tells whether missing members accessed in mixin class should be ignored. A +# class is considered mixin if its name matches the mixin-class-rgx option. +ignore-mixin-members=yes + +# Tells whether to warn about missing members when the owner of the attribute +# is inferred to be None. +ignore-none=yes + +# This flag controls whether pylint should warn about no-member and similar +# checks whenever an opaque object is returned when inferring. The inference +# can return multiple potential results while evaluating a Python object, but +# some branches might not be evaluated, which results in partial inference. In +# that case, it might be useful to still emit no-member and other checks for +# the rest of the inferred objects. +ignore-on-opaque-inference=yes + +# List of class names for which member attributes should not be checked (useful +# for classes with dynamically set attributes). This supports the use of +# qualified names. +ignored-classes=optparse.Values,thread._local,_thread._local + +# List of module names for which member attributes should not be checked +# (useful for modules/projects where namespaces are manipulated during runtime +# and thus existing member attributes cannot be deduced by static analysis). It +# supports qualified module names, as well as Unix pattern matching. +ignored-modules= + +# Show a hint with possible names when a member name was not found. The aspect +# of finding the hint is based on edit distance. +missing-member-hint=yes + +# The minimum edit distance a name should have in order to be considered a +# similar match for a missing member name. +missing-member-hint-distance=1 + +# The total number of similar names that should be taken in consideration when +# showing a hint for a missing member. +missing-member-max-choices=1 + +# Regex pattern to define which classes are considered mixins ignore-mixin- +# members is set to 'yes' +mixin-class-rgx=.*[Mm]ixin + +# List of decorators that change the signature of a decorated function. +signature-mutators= + + +[FORMAT] + +# Expected format of line ending, e.g. empty (any line ending), LF or CRLF. +expected-line-ending-format= + +# Regexp for a line that is allowed to be longer than the limit. +ignore-long-lines=^\s*(# )??$ + +# Number of spaces of indent required inside a hanging or continued line. +indent-after-paren=4 + +# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 +# tab). +indent-string=' ' + +# Maximum number of characters on a single line. +max-line-length=100 + +# Maximum number of lines in a module. +max-module-lines=1000 + +# Allow the body of a class to be on the same line as the declaration if body +# contains single statement. +single-line-class-stmt=no + +# Allow the body of an if to be on the same line as the test if there is no +# else. +single-line-if-stmt=no + + +[SIMILARITIES] + +# Comments are removed from the similarity computation +ignore-comments=yes + +# Docstrings are removed from the similarity computation +ignore-docstrings=yes + +# Imports are removed from the similarity computation +ignore-imports=no + +# Signatures are removed from the similarity computation +ignore-signatures=no + +# Minimum lines number of a similarity. +min-similarity-lines=4 + + +[CLASSES] + +# Warn about protected attribute access inside special methods +check-protected-access-in-special-methods=no + +# List of method names used to declare (i.e. assign) instance attributes. +defining-attr-methods=__init__, + __new__, + setUp, + __post_init__ + +# List of member names, which should be excluded from the protected access +# warning. +exclude-protected=_asdict, + _fields, + _replace, + _source, + _make + +# List of valid names for the first argument in a class method. +valid-classmethod-first-arg=cls + +# List of valid names for the first argument in a metaclass class method. +valid-metaclass-classmethod-first-arg=cls + + +[DESIGN] + +# List of regular expressions of class ancestor names to ignore when counting +# public methods (see R0903) +exclude-too-few-public-methods= + +# List of qualified class names to ignore when counting class parents (see +# R0901) +ignored-parents= + +# Maximum number of arguments for function / method. +max-args=5 + +# Maximum number of attributes for a class (see R0902). +max-attributes=7 + +# Maximum number of boolean expressions in an if statement (see R0916). +max-bool-expr=5 + +# Maximum number of branch for function / method body. +max-branches=12 + +# Maximum number of locals for function / method body. +max-locals=15 + +# Maximum number of parents for a class (see R0901). +max-parents=7 + +# Maximum number of public methods for a class (see R0904). +max-public-methods=20 + +# Maximum number of return / yield for function / method body. +max-returns=6 + +# Maximum number of statements in function / method body. +max-statements=50 + +# Minimum number of public methods for a class (see R0903). +min-public-methods=2 + + +[IMPORTS] + +# List of modules that can be imported at any level, not just the top level +# one. +allow-any-import-level= + +# Allow wildcard imports from modules that define __all__. +allow-wildcard-with-all=no + +# Analyse import fallback blocks. This can be used to support both Python 2 and +# 3 compatible code, which means that the block might have code that exists +# only in one or another interpreter, leading to false positives when analysed. +analyse-fallback-blocks=no + +# Deprecated modules which should not be used, separated by a comma. +deprecated-modules= + +# Output a graph (.gv or any supported image format) of external dependencies +# to the given file (report RP0402 must not be disabled). +ext-import-graph= + +# Output a graph (.gv or any supported image format) of all (i.e. internal and +# external) dependencies to the given file (report RP0402 must not be +# disabled). +import-graph= + +# Output a graph (.gv or any supported image format) of internal dependencies +# to the given file (report RP0402 must not be disabled). +int-import-graph= + +# Force import order to recognize a module as part of the standard +# compatibility libraries. +known-standard-library= + +# Force import order to recognize a module as part of a third party library. +known-third-party=enchant + +# Couples of modules and preferred modules, separated by a comma. +preferred-modules= + + +[EXCEPTIONS] + +# Exceptions that will emit a warning when being caught. Defaults to +# "BaseException, Exception". +overgeneral-exceptions=BaseException, + Exception diff --git a/CMakeLists.txt b/CMakeLists.txt index f5aaa5f8..523dc849 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -152,5 +152,5 @@ message(STATUS "Detected Torch libraries: \"${TORCH_LIBRARIES}\"") add_definitions(-D_GLIBCXX_USE_CXX11_ABI=0) -include_directories(include) +include_directories(${CMAKE_SOURCE_DIR}) add_subdirectory(src) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 00000000..9cc25a3e --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,6 @@ + + + +# Contributing to TorchOpt + +Please refer to [torchopt.readthedocs.io/en/latest/developer/contributing.html](https://torchopt.readthedocs.io/en/latest/developer/contributing.html) diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 00000000..e38d6fa4 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,87 @@ +# Dockerfile for TorchOpt +# +# $ docker build --target base --tag torchopt:latest . +# +# or +# +# $ docker build --target devel --tag torchopt-devel:latest . +# + +ARG cuda_docker_tag="11.6.2-cudnn8-devel-ubuntu20.04" +FROM nvidia/cuda:"${cuda_docker_tag}" AS builder + +ENV DEBIAN_FRONTEND=noninteractive +SHELL ["/bin/bash", "-c"] + +# Install packages +RUN apt-get update && \ + apt-get install -y sudo ca-certificates openssl \ + git ssh build-essential gcc-10 g++-10 cmake make \ + python3.9-dev python3.9-venv graphviz && \ + rm -rf /var/lib/apt/lists/* + +ENV LANG C.UTF-8 +ENV CC=gcc-10 CXX=g++-10 + +# Add a new user +RUN useradd -m -s /bin/bash torchopt && \ + echo "torchopt ALL=(ALL) NOPASSWD:ALL" >> /etc/sudoers +USER torchopt +RUN echo "export PS1='[\[\e[1;33m\]\u\[\e[0m\]:\[\e[1;35m\]\w\[\e[0m\]]\$ '" >> ~/.bashrc + +# Setup virtual environment +RUN /usr/bin/python3.9 -m venv --upgrade-deps ~/venv && rm -rf ~/.pip/cache +RUN TORCH_INDEX_URL="https://download.pytorch.org/whl/cu$(echo "${CUDA_VERSION}" | cut -d'.' -f-2 | tr -d '.')" && \ + echo "export TORCH_INDEX_URL='${TORCH_INDEX_URL}'" >> ~/venv/bin/activate && \ + echo "source /home/torchopt/venv/bin/activate" >> ~/.bashrc + +# Install dependencies +WORKDIR /home/torchopt/TorchOpt +COPY --chown=torchopt requirements.txt requirements.txt +RUN source ~/venv/bin/activate && \ + python -m pip install --extra-index-url "${TORCH_INDEX_URL}" -r requirements.txt && \ + rm -rf ~/.pip/cache ~/.cache/pip + +#################################################################################################### + +FROM builder AS devel-builder + +# Install extra dependencies +RUN sudo apt-get update && \ + sudo apt-get install -y golang-1.16 clang-format clang-tidy && \ + sudo chown -R "$(whoami):$(whoami)" /usr/lib/go-1.16 && \ + sudo rm -rf /var/lib/apt/lists/* + +# Install addlicense +ENV GOPATH="/usr/lib/go-1.16" +ENV GOBIN="${GOPATH}/bin" +ENV GOROOT="${GOPATH}" +ENV PATH="${GOBIN}:${PATH}" +RUN go install github.com/google/addlicense@latest + +# Install extra PyPI dependencies +COPY --chown=torchopt tests/requirements.txt tests/requirements.txt +COPY --chown=torchopt tutorials/requirements.txt tutorials/requirements.txt +RUN source ~/venv/bin/activate && \ + python -m pip install --extra-index-url "${TORCH_INDEX_URL}" \ + -r tests/requirements.txt -r tutorials/requirements.txt && \ + rm -rf ~/.pip/cache ~/.cache/pip + +#################################################################################################### + +FROM builder AS base + +COPY --chown=torchopt . . + +# Install TorchOpt +RUN source ~/venv/bin/activate && \ + python -m pip install -e . && \ + rm -rf .eggs *.egg-info ~/.pip/cache ~/.cache/pip + +ENTRYPOINT [ "/bin/bash", "--login" ] + +#################################################################################################### + +FROM devel-builder AS devel + +COPY --from=base /home/torchopt/TorchOpt . diff --git a/Makefile b/Makefile index ebac34fd..f6de2a06 100644 --- a/Makefile +++ b/Makefile @@ -1,9 +1,9 @@ print-% : ; @echo $* = $($*) PROJECT_NAME = torchopt COPYRIGHT = "MetaOPT Team. All Rights Reserved." -PROJECT_PATH = ${PROJECT_NAME} +PROJECT_PATH = $(PROJECT_NAME) SHELL = /bin/bash -SOURCE_FOLDERS = $(PROJECT_PATH) examples include src tests +SOURCE_FOLDERS = $(PROJECT_PATH) examples include src tests docs PYTHON_FILES = $(shell find $(SOURCE_FOLDERS) -type f -name "*.py" -o -name "*.pyi") CXX_FILES = $(shell find $(SOURCE_FOLDERS) -type f -name "*.h" -o -name "*.cpp" -o -name "*.cuh" -o -name "*.cu") COMMIT_HASH = $(shell git log -1 --format=%h) @@ -16,27 +16,46 @@ default: install install: $(PYTHON) -m pip install . +build: + $(PYTHON) -m pip install --upgrade pip + $(PYTHON) -m pip install --upgrade setuptools wheel build + $(PYTHON) -m build + # Tools Installation check_pip_install = $(PYTHON) -m pip show $(1) &>/dev/null || (cd && $(PYTHON) -m pip install $(1) --upgrade) check_pip_install_extra = $(PYTHON) -m pip show $(1) &>/dev/null || (cd && $(PYTHON) -m pip install $(2) --upgrade) +pylint-install: + $(call check_pip_install,pylint) + flake8-install: $(call check_pip_install,flake8) $(call check_pip_install_extra,bugbear,flake8_bugbear) py-format-install: $(call check_pip_install,isort) - $(call check_pip_install,yapf) + $(call check_pip_install,black) mypy-install: $(call check_pip_install,mypy) +pre-commit-install: + $(call check_pip_install,pre-commit) + $(PYTHON) -m pre_commit install --install-hooks + docs-install: $(call check_pip_install,pydocstyle) $(call check_pip_install,doc8) $(call check_pip_install,sphinx) - $(call check_pip_install,sphinx_rtd_theme) + $(call check_pip_install,sphinx-rtd-theme) + $(call check_pip_install,sphinx-autoapi) + $(call check_pip_install,sphinx-autobuild) + $(call check_pip_install,sphinx-copybutton) + $(call check_pip_install,sphinxcontrib-katex) + $(call check_pip_install,sphinxcontrib-bibtex) + $(call check_pip_install,sphinx-autodoc-typehints) + $(call check_pip_install,myst_nb) $(call check_pip_install_extra,sphinxcontrib.spelling,sphinxcontrib.spelling pyenchant) pytest-install: @@ -63,21 +82,27 @@ addlicense-install: go-install # Tests pytest: pytest-install - cd tests && $(PYTHON) -m pytest unit --cov ${PROJECT_PATH} --durations 0 -v --cov-report term-missing --color=yes + cd tests && $(PYTHON) -m pytest unit --cov $(PROJECT_PATH) --durations 0 -v --cov-report term-missing --color=yes test: pytest # Python linters +pylint: pylint-install + $(PYTHON) -m pylint $(PROJECT_PATH) + flake8: flake8-install $(PYTHON) -m flake8 $(PYTHON_FILES) --count --select=E9,F63,F7,F82,E225,E251 --show-source --statistics py-format: py-format-install $(PYTHON) -m isort --project torchopt --check $(PYTHON_FILES) && \ - $(PYTHON) -m yapf --in-place --recursive $(PYTHON_FILES) + $(PYTHON) -m black --safe -l 100 -t py37 -S --check $(PYTHON_FILES) mypy: mypy-install - $(PYTHON) -m mypy $(PROJECT_NAME) + $(PYTHON) -m mypy $(PROJECT_PATH) + +pre-commit: pre-commit-install + $(PYTHON) -m pre_commit run --all-files # C++ linters @@ -93,10 +118,10 @@ addlicense: addlicense-install addlicense -c $(COPYRIGHT) -l apache -y 2022 -check $(SOURCE_FOLDERS) docstyle: docs-install - $(PYTHON) -m pydocstyle $(PROJECT_NAME) && doc8 docs && make -C docs html SPHINXOPTS="-W" + $(PYTHON) -m pydocstyle $(PROJECT_PATH) && doc8 docs && make -C docs html SPHINXOPTS="-W" docs: docs-install - make -C docs html && cd _build/html && $(PYTHON) -m http.server + $(PYTHON) -m sphinx_autobuild --watch $(PROJECT_PATH) --open-browser docs/source docs/build spelling: docs-install make -C docs spelling SPHINXOPTS="-W" @@ -106,11 +131,11 @@ clean-docs: # Utility functions -lint: flake8 py-format mypy clang-format cpplint addlicense +lint: flake8 py-format mypy clang-format cpplint docstyle spelling format: py-format-install clang-format-install addlicense-install $(PYTHON) -m isort --project torchopt $(PYTHON_FILES) - $(PYTHON) -m yapf --in-place --recursive $(PYTHON_FILES) + $(PYTHON) -m black --safe -l 100 -t py37 -S $(PYTHON_FILES) clang-format -style=file -i $(CXX_FILES) addlicense -c $(COPYRIGHT) -l apache -y 2022 $(SOURCE_FOLDERS) @@ -124,3 +149,18 @@ clean-build: rm -rf *.egg-info .eggs clean: clean-py clean-build clean-docs + +# Build docker images + +docker-base: + docker build --target base --tag $(PROJECT_NAME):$(COMMIT_HASH) --file Dockerfile . + @echo Successfully build docker image with tag $(PROJECT_NAME):$(COMMIT_HASH) + +docker-devel: + docker build --target devel --tag $(PROJECT_NAME)-devel:$(COMMIT_HASH) --file Dockerfile . + @echo Successfully build docker image with tag $(PROJECT_NAME)-devel:$(COMMIT_HASH) + +docker: docker-base docker-devel + +docker-run-devel: docker-devel + docker run --network=host --gpus=all -v /:/host -h ubuntu -it $(PROJECT_NAME)-devel:$(COMMIT_HASH) diff --git a/README.md b/README.md index b15ed1b9..9db7e70a 100644 --- a/README.md +++ b/README.md @@ -227,11 +227,11 @@ We provide a [conda](https://github.com/conda/conda) environment recipe to insta git clone https://github.com/metaopt/TorchOpt.git cd TorchOpt -# Use `CONDA_OVERRIDE_CUDA` if conda fails to detect the NVIDIA driver (e.g. WSL2 on Windows) +# You may need `CONDA_OVERRIDE_CUDA` if conda fails to detect the NVIDIA driver (e.g. in docker or WSL2) CONDA_OVERRIDE_CUDA=11.7 conda env create --file conda-recipe.yaml conda activate torchopt -pip3 install . +pip3 install -e . ``` -------------------------------------------------------------------------------- diff --git a/conda-recipe.yaml b/conda-recipe.yaml index 98e37eda..f7f0917a 100644 --- a/conda-recipe.yaml +++ b/conda-recipe.yaml @@ -22,6 +22,8 @@ dependencies: - pytorch::pytorch-mutex = *=*cuda* - pip: - functorch + - torchviz + - sphinxcontrib-katex # for documentation - jax - jaxlib >= 0.3=*cuda* - optax @@ -51,17 +53,24 @@ dependencies: # Documentation - sphinx - - sphinxcontrib-spelling + - sphinx_rtd_theme - sphinx-autobuild - sphinx-copybutton - - sphinx_rtd_theme + - sphinxcontrib-spelling + - sphinxcontrib-bibtex + - sphinx-autodoc-typehints + - myst-nb + - ipykernel + - pandoc + - docutils # Testing - pytest - pytest-cov - pytest-xdist - isort - - yapf + - conda-forge::black >= 22.6.0 + - pylint - mypy - flake8 - flake8-bugbear @@ -71,3 +80,4 @@ dependencies: - clang-format - clang-tools # clang-tidy - cpplint + - pre-commit diff --git a/docker/dev.dockerfile b/docker/dev.dockerfile deleted file mode 100644 index f8d26b24..00000000 --- a/docker/dev.dockerfile +++ /dev/null @@ -1,25 +0,0 @@ -#!/bin/bash - -CPU_PARENT=ubuntu:18.04 -GPU_PARENT=nvidia/cuda:10.1-cudnn7-runtime-ubuntu18.04 - -TAG=metaopt/TorchOpt -VERSION=$(shell git log -1 --format=%h) - -if [[ ${USE_GPU} == "True" ]]; then - PARENT=${GPU_PARENT} - PYTORCH_DEPS="cudatoolkit=10.1" -else - PARENT=${CPU_PARENT} - PYTORCH_DEPS="cpuonly" - TAG="${TAG}-cpu" -fi - -echo "docker build --build-arg PARENT_IMAGE=${PARENT} --build-arg PYTORCH_DEPS=${PYTORCH_DEPS} -t ${TAG}:${VERSION} ." -docker build --build-arg PARENT_IMAGE=${PARENT} --build-arg PYTORCH_DEPS=${PYTORCH_DEPS} -t ${TAG}:${VERSION} . -docker tag ${TAG}:${VERSION} ${TAG}:latest - -if [[ ${RELEASE} == "True" ]]; then - docker push ${TAG}:${VERSION} - docker push ${TAG}:latest -fi diff --git a/docs/Makefile b/docs/Makefile index d4bb2cbb..d0c3cbf1 100644 --- a/docs/Makefile +++ b/docs/Makefile @@ -5,8 +5,8 @@ # from the environment for the first two. SPHINXOPTS ?= SPHINXBUILD ?= sphinx-build -SOURCEDIR = . -BUILDDIR = _build +SOURCEDIR = source +BUILDDIR = build # Put it first so that "make" without argument is like "make help". help: diff --git a/docs/_static/js/copybutton.js b/docs/_static/js/copybutton.js deleted file mode 100644 index f5960d26..00000000 --- a/docs/_static/js/copybutton.js +++ /dev/null @@ -1,64 +0,0 @@ -$(document).ready(function() { - /* Add a [>>>] button on the top-right corner of code samples to hide - * the >>> and ... prompts and the output and thus make the code - * copyable. */ - var div = $('.highlight-python .highlight,' + - '.highlight-python3 .highlight,' + - '.highlight-pycon .highlight,' + - '.highlight-default .highlight'); - var pre = div.find('pre'); - - // get the styles from the current theme - pre.parent().parent().css('position', 'relative'); - var hide_text = 'Hide the prompts and output'; - var show_text = 'Show the prompts and output'; - var border_width = pre.css('border-top-width'); - var border_style = pre.css('border-top-style'); - var border_color = pre.css('border-top-color'); - var button_styles = { - 'cursor':'pointer', 'position': 'absolute', 'top': '0', 'right': '0', - 'border-color': border_color, 'border-style': border_style, - 'border-width': border_width, 'color': border_color, 'text-size': '75%', - 'font-family': 'monospace', 'padding-left': '0.2em', 'padding-right': '0.2em', - 'border-radius': '0 3px 0 0' - } - - // create and add the button to all the code blocks that contain >>> - div.each(function(index) { - var jthis = $(this); - if (jthis.find('.gp').length > 0) { - var button = $('>>>'); - button.css(button_styles) - button.attr('title', hide_text); - button.data('hidden', 'false'); - jthis.prepend(button); - } - // tracebacks (.gt) contain bare text elements that need to be - // wrapped in a span to work with .nextUntil() (see later) - jthis.find('pre:has(.gt)').contents().filter(function() { - return ((this.nodeType == 3) && (this.data.trim().length > 0)); - }).wrap(''); - }); - - // define the behavior of the button when it's clicked - $('.copybutton').click(function(e){ - e.preventDefault(); - var button = $(this); - if (button.data('hidden') === 'false') { - // hide the code output - button.parent().find('.go, .gp, .gt').hide(); - button.next('pre').find('.gt').nextUntil('.gp, .go').css('visibility', 'hidden'); - button.css('text-decoration', 'line-through'); - button.attr('title', show_text); - button.data('hidden', 'true'); - } else { - // show the code output - button.parent().find('.go, .gp, .gt').show(); - button.next('pre').find('.gt').nextUntil('.gp, .go').css('visibility', 'visible'); - button.css('text-decoration', 'none'); - button.attr('title', hide_text); - button.data('hidden', 'false'); - } - }); -}); - diff --git a/docs/conf.py b/docs/conf.py deleted file mode 100644 index 101ac54d..00000000 --- a/docs/conf.py +++ /dev/null @@ -1,95 +0,0 @@ -# Configuration file for the Sphinx documentation builder. -# -# This file only contains a selection of the most common options. For a full -# list see the documentation: -# https://www.sphinx-doc.org/en/master/usage/configuration.html - -# -- Path setup -------------------------------------------------------------- - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -# -# import os -# import sys -# sys.path.insert(0, os.path.abspath('.')) - -import pathlib -import sys - -import sphinx_rtd_theme - - -HERE = pathlib.Path(__file__).absolute().parent -PROJECT_ROOT = HERE.parent - - -def get_version() -> str: - sys.path.insert(0, str(PROJECT_ROOT / 'torchopt')) - import version # noqa - return version.__version__ - - -# -- Project information ----------------------------------------------------- - -project = "TorchOpt" -copyright = "2022 MetaOPT Team" -author = "TorchOpt Contributors" - -# The full version, including alpha/beta/rc tags -release = get_version() - -# -- General configuration --------------------------------------------------- - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - "sphinx.ext.autodoc", -] - -# Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates"] -source_suffix = [".rst"] - -# The root document. -root_doc = "index" - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -# This pattern also affects html_static_path and html_extra_path. -exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"] -spelling_exclude_patterns = [""] - -# -- Options for HTML output ------------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -# -html_theme = "sphinx_rtd_theme" -html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] - -html_logo = "_static/images/logo.png" - - -def setup(app): - app.add_js_file("js/copybutton.js") - app.add_css_file("css/style.css") - - -# -- Extension configuration ------------------------------------------------- - -# -- Options for intersphinx extension --------------------------------------- - -# Example configuration for intersphinx: refer to the Python standard library. -# intersphinx_mapping = {'https://docs.python.org/3/': None} - -# -- Options for todo extension ---------------------------------------------- - -# If true, `todo` and `todoList` produce output, else they produce nothing. -# todo_include_todos = False diff --git a/docs/requirements.txt b/docs/requirements.txt new file mode 100644 index 00000000..61b877af --- /dev/null +++ b/docs/requirements.txt @@ -0,0 +1,21 @@ +--extra-index-url https://download.pytorch.org/whl/cu116 +torch == 1.12 +torchvision +functorch + +--requirement ../requirements.txt + +sphinx >= 5.0 +sphinx-autoapi +sphinx-autobuild +sphinx-copybutton +sphinx-rtd-theme +sphinxcontrib-katex +sphinxcontrib-bibtex +sphinx-autodoc-typehints +IPython +ipykernel +pandoc +myst_nb +docutils +matplotlib diff --git a/docs/_static/css/style.css b/docs/source/_static/css/style.css similarity index 61% rename from docs/_static/css/style.css rename to docs/source/_static/css/style.css index b37cead2..df73d696 100644 --- a/docs/_static/css/style.css +++ b/docs/source/_static/css/style.css @@ -1,10 +1,34 @@ +/** + * Copyright 2022 MetaOPT Team. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + body { - font-family: "Lato","proxima-nova","Helvetica Neue",Arial,sans-serif; + font-family: "Lato", "proxima-nova", "Helvetica Neue", Arial, sans-serif; } /* Default header fonts are ugly */ -h1, h2, .rst-content .toctree-wrapper p.caption, h3, h4, h5, h6, legend, p.caption { - font-family: "Lato","proxima-nova","Helvetica Neue",Arial,sans-serif; +h1, +h2, +.rst-content .toctree-wrapper p.caption, +h3, +h4, +h5, +h6, +legend, +p.caption { + font-family: "Lato", "proxima-nova", "Helvetica Neue", Arial, sans-serif; } /* Use white for docs background */ @@ -16,7 +40,8 @@ h1, h2, .rst-content .toctree-wrapper p.caption, h3, h4, h5, h6, legend, p.capti max-width: 1200px !important; } -.wy-nav-content-wrap, .wy-menu li.current > a { +.wy-nav-content-wrap, +.wy-menu li.current>a { background-color: #fff; } @@ -45,7 +70,7 @@ h1, h2, .rst-content .toctree-wrapper p.caption, h3, h4, h5, h6, legend, p.capti color: #333; } -.wy-nav-top > a { +.wy-nav-top>a { display: none; } @@ -66,12 +91,18 @@ h1, h2, .rst-content .toctree-wrapper p.caption, h3, h4, h5, h6, legend, p.capti } /* Use our red for literals (it's very similar to the original color) */ -.rst-content tt.literal, .rst-content tt.literal, .rst-content code.literal { +.rst-content tt.literal, +.rst-content tt.literal, +.rst-content code.literal { color: #4692BC; } -.rst-content tt.xref, a .rst-content tt, .rst-content tt.xref, -.rst-content code.xref, a .rst-content tt, a .rst-content code { +.rst-content tt.xref, +a .rst-content tt, +.rst-content tt.xref, +.rst-content code.xref, +a .rst-content tt, +a .rst-content code { color: #404040; } @@ -104,7 +135,8 @@ footer { } footer .rst-footer-buttons { - font-size: 125%; /* revert footer settings - 1/80% = 125% */ + font-size: 125%; + /* revert footer settings - 1/80% = 125% */ } footer p { diff --git a/docs/_static/images/logod-07.png b/docs/source/_static/images/logo-large.png similarity index 100% rename from docs/_static/images/logod-07.png rename to docs/source/_static/images/logo-large.png diff --git a/docs/_static/images/logo-torchopt.pdf b/docs/source/_static/images/logo-torchopt.pdf similarity index 100% rename from docs/_static/images/logo-torchopt.pdf rename to docs/source/_static/images/logo-torchopt.pdf diff --git a/docs/_static/images/logod-05.png b/docs/source/_static/images/logo.png similarity index 100% rename from docs/_static/images/logod-05.png rename to docs/source/_static/images/logo.png diff --git a/docs/source/_static/images/maml-accs.png b/docs/source/_static/images/maml-accs.png new file mode 100644 index 0000000000000000000000000000000000000000..a3a0f4cea0c71bd1ad42c434f136ffc6e5d30fda GIT binary patch literal 33854 zcmcG$byQr4bojJ9)mAi+Xi!&dm04EPSwXLV8tA_{|m(zbP z;B;}f;hHl$YX>GlaaDNd0Rmx}J-^^eB}?o;pzv}T`C^8a_h?b5H zfgHLXjwNy8p9}M=Fl@#pAt6yOpJ`fqkLJ2NK~v>ap~H6645OrHVxkifA=Nc>L)6#T zm%4fTuO({6{)CW{llP{xysh>=TnHVPygR7XtF^}X{lh5UsMVW>g9C%eB#;LkDZG00 zYo=OG8U=Y3suqy$A5WCI<2T#sI&&yu94q-G#LIoku{^l^CqYa}y628#j|gk03|Ot!9mj z?G1wi1FvLd5gS}qDXSsw&4%2>=&m9;YhYDh!vghJ-?fqsv1FfjOJm)SQndsRH4+|o za1iSz(kN_ZauN=tprUf=FD4*BkeHa*EeO1ef`Ssfh#u0(l{Ff5-1!uYuCM&O#jXM# zJIbY6WoeHc)oj&KepkG3M@m!IJSIg>bQwx;AU{7p;|zQ5fa>fTMIbh$De08eq3Sf#~zkY$50>1y6nu>(BwzNbTSJu_>&p8TSTwe!VntOS9 zne``-z?t|ooy5h*i*9o#GttnPjTmPoB>eA`z}$5B%~VzA*7SvKajyJ-|Nd2Y^QNai zkt`DGC`i`eyv&NPW|QMZxaHnFD|mQ#2z>fl45AKmZ+}1Uq7Qx1(tQ5^4s4I3LiTE4 zf3J>~guDkhgy$bCLN3O9P#FdeiUH_Kl4D4>? z?fiIfl*!}Sr@1;iysLxx@GO@Ok~Bt@?xkjr&ku$wDk>va6obq@g8v!4*{#gcFFT(e z14cgc$=}=pDuxVVH*O=Y>Un zXxVhsvTrJwuyL0R8T-c*#eMZY)cUf-#iy zpOIKIvi!%>e6=GeQ#?S>@@Lv{GVV6^y6<-LFEJE~e_V*c15-uRd5M9JUhOO;Q~1B4 z0%QhAOUpEJ;=A#lqAJ$*7}CE+)SSEIoLO&)yuX=8i=L2`ax90BT4DB+;t0W(Xb~CJ|OvPb70+P4l>9-JV;i2ez)V!s~4N3tm~} z=DR}53TzSgt(W_ZGW7qmNbwteFT(rBeZ~XFUx`hLneHLmi-h24PLsP6^=p<+)z&VN zZ{ntc(P%zx0mShuOUXu~{-9S?gGo>c(Kw-yU5J6?i;nsnCZ6r1c+?7PcafZmEbP?( z*`@2X05bzV3GrX?1bI}EGdN9a*l&&~kl3#3$s(&Le!MkcUYnw8*Sl9`aQ^GVuyFDF zt#k6sdUviM!ie`%Af_krWJPUvA3}1l4l?NGo7pSd^XQDnzJkd;$W#{D3CzV{jkJZN zR&Ns9>}9N9-~8#n)p}FL{ps$=`@nv3Gxzg6vtknJ>+E7RY}H( zS8lXiG#R<{g zKO!afTHM@TarwDfDNX_Y<U(TTw17c(ZGUeQ z!t~<_3KtmIWHa3mqaBn-^5#)Hb~P|~`qh!(P;HqhqHwr0TWRnhx;<|V;kp9*TH_EQG}Jnn@8o4 z<(a6NEHc;gTj`r!S1e3#O#e%L3_bEWy{YIHW4z#>kJvhaI>?{KaleEMGJOnkeNiqX zv=-?!3R-H{&Dy_(0|hK51hHp*4sbT5xKvHE7Xsw2lfJ_Ayo6S}8`ox^%KHk1u+|Eme*3nk>XCCUojMczxP!v+Uu3DZIu$_9#r##g zr!O*3oYE+BR-!=r%_-U4X{*ZRnjfXuc#+LS z6c(meQNwIPQ4rz%{R3;;nG@U5T`4JJ*3(HsJnVGO$7xqc^aSeiMgHNzi^S<>*r#%C zmhlG`K9{qLT-Hh)g^Ea^|JT|5=2X;*@hi%{R^(-Aed|`Xwej9_F{F_dM0JZ!65!@! zy&6$eiIPo1bmDb89eOB^wb%b}V9c$QN=J7^bspMZ`{%3%Yb19b*!K)2UK&@;Qsq~l zJDz7r)>%BRUTvr?8bm+^wJ1BWR~D@yRZqInszn&0S<7h?;tzHKwd&kX4#c4G^U{m? zZ`v@U^gF^o0^-)hK}p{wYJ+-dlydrL{#u&4?7&$obG|NtAyCL&pf9MF zF*(pDK3HF5$v>udes920Tk)d+#{(CtX5ZWWAA!Nkwac+Z?bg|QiArJtiXC>u zC;mU&Af1q32IRi2zgsD0#G(G#3l};UmBG^JfK%bRVdd%TH#ORyWK-v zNriTh_Y!G$*L=1f=KGTtcPlf6*Zzm=PiZO$gctr7`j?;ELppPvR_4FQU0wGqwx9j=Uu*eja8feX>^RQA0{fZemVTyOkVtwN>bA61hY(Nv3_HN z9rw|6;vG5omw3$gKcvi-xWpAJdCV?=loY_GhZ)7pq*jk(3eda!(OAjgse@cKnr+v+ z=FfV1xd#~-%|$@6lYo^y*Y%m>1^D3Og+mU#ctQnX)6s6+1Lw)j zV`*JO0wz{BlopMY(*6gkS>K@5D>;gtPYi$CbC>(jq=kDVB$mRt>~G~BNFF_^(O^Wq z76##%cMIrXE+))o6xPn5+|MMwxd||d{ksuns%@Wxef1I?U}q4D(|h)3Rb({l?ylE8 zPxY?4aO!p|ZvOgx*3&8==z#8cnuWCD^bKkRe|plr&QxEuxe6$%`g6)D)gZKYl9ed$ zwoK2#_^l5Fdc0BL^OSfBa`0bj|9ZEGaWlhIZ#*94T1qA<&WFJ!^cziFP(0i_Q0H*j zV24F`Z|-zQ^um8UiVpBq-9eH=D0;4fmjz(h**59C>2gob) z2O(ZofI$dyS~>sz#fuOy;K(L3*PH#{?Yg6toD#@GI)iC}Q?vn?&Opy5gG~9~R^-jd z<-@k}fl5a#^^*9d5LBAl`Glq>@bA$WaMWu&%D-iZfH4bpw zq$^H^Kv*XF;<7!YQu?Vuv^{|D<6}8)E!swnrG9^mWH?hUtmbwYDNtK7wCx3f@jES~ zK|6N$n`4BWo5dgAz2%lSi}l+v8bfOveu3^y$a%9D>k8MW0YuqiuE$8nE0td)*zCok zLd52c#3aPR)~P|k(iU(g}sF==*^L9 zb_K0<+q|czD$dQC^X?zkPqTAV_K;NPW18+|4kQ2?yudPbS*$7KGb)OR*yI)#M@pNr zP){XH)Rt}^S(YQy_o54-E<56-rjyM+uLiWEprb?LYjJUnvs|z6+^_htg0wEc3cX;j zz92*foE8$Wg2udkh(KCh@M3nH3u$uD&+C#$Y~ zmwW(bFm7*IDrfrX7lG$$R7C8s5IFJvP zrPU`KwBfhdl_z5pwcu0s+#se;Seb`XE}O1!Qx&y^t6SLq?Hv_|SbBzu{e1{e3#)&; ztd`%OS@L7)Rad@7u3e7Q$sy2Q4&zd~87ff_p{!j-EElTH8%~4R3`#~Mmk>-Bo9xx=2{a40f+)N5@SWU&kGK^p2+6; zgvJ|zewXTN8<&iu3e<|0INHy7xc`am-L~2nnBV|#+kaRld^8?ISnT# z%|Rg|QBGM{uA;lIS>&l63v0(i<-V+6&6ya-Q_?cUvG&cCkDiR0ChoJld_kT(=6(Mb z_JgpU8+mIcr;?6P55R5gm_#26RfByM{IjVhuM;&~+^A3SuQtB}FqUn#VWZWW0(aW| z{!f|aT#lG#;r1FBlQ9cVq#U6}iR7`S1mQAhMRqE5o->g4%IarpwRZp?C%dwYcN?xC zGFO{fV(*SCiih%$W}&O19quEy=f)G@QDE2YKZd%Wv+BhA{aa|l)i_Ibo- zmqq)}#l9fRak4(UgFc($5W6X|b0;wFfDi?7&|HeL(zApjjV zP3~H&Zz?=Hox}i?0}-CDxw#JefwRbSfTf$h2Dk|_hugNDT|+*_c!QE}YgZvhuhV)u zjV4j;AO+KL)3#tg%IMalRSR!LMOyt!oh5HoM#`J7NP@}TjK&P-q*0gCHVt&*IA@!|ii# z+~2?G6gCF5AJUALdTgLtF?TX!S6wi)SvK}7ow?K!*U@1D0*88~jsT&umYv&^hW$hcW2xJ5mC@{A?p_^CU6fJkisby=fbwMCN)ik3is-DI$ zi-*G;oQ{qYcXC|yDx^;70HyOll3)xxy!?9`m>fb?6*2^gwHYg8Vy%`^7DDyK}^s1pDwrkeIArj!OkR@|7-^YIEDXF7UtRQ@~*390cy7eoly)P{90mhB4 zgREns58i0|VAk=8C;4Irp(*~wD*PKEr7$b6CiO)NDMcmxt)dI1_A8?-Zkc(DpPM_E z(WX9UnYnn#iJEa7@^@w)XLKFHzc0Ta7-#T1N?RqnB7TTKILi6VfzK9}c(@>s4JHwwikh_Q zqZbkCA&Vkhze?~Vd3-7hXkWNHe#mL6;2$jiG}4G{Z<2+><>)}@Z%+7;iB9NsWXSQc zRLLaL=jABdaLPeK1oXU1>Baf*;V-TEk3TRq{3&o%km$c)+yuT#b^z&Ru%g?W^OVMh z3ky=hvQwj-wb3Px5s(Djxa~K%OJn;EiSW&g6={v{emG6TJ9r0+F&0DD;!s$+E$}pp zYGy5xM%m(rT%iHl6NuoX+fmG24$P|6eagJ@gS|VBxhZSN^aZgidGu_D0|9Cj0M_qe zuZVu+Z5*v%JUrcw2j$$rI#dmXY38d+99*(mA@*m&QNfpr7+{d3B-fOYV@_y%JYGSf zMt;j;gb{~7*l*1sd1m>|hobzC6pK%iSDr6DVmVordi(lfrNRW3re1(xu)g)I1<&+`yZ@TM|@Jt#KtF zN=B|-VWPJQNAN?=;nUkIu-W96>$@quo6n+8$i0vUWDu(=^Y;mnJ_nM1av&}Y*&{U}oKISBm${7T39mid zc=jel$CM%@f8vU$^e?+lZGfD_#uZ_cp;)khD3Q|V7tvyb5^XKCvQ7As>S5aeu07`K zwedKcQ=d=lWfu14-=qHIUM;-TiTrsKP(#c~B#H!NPjFAZ{Cz$pU&z|p!BJl&h4`+^ zQN)mY#jJT1)Isg!rV-|%XjlKruAaIE8gzhlu!txeiYRfIQ4!mH{b!K*ckeoH32u1J zvue7{C(z!vPojo1V(Mm_#ATJh{ytOyz`qKfjYlcT$%-!Ed*- zz9~SCFfOg=KC1e$z7ua&<@fu|CLFGpf2)5Yd&J|mQ7(#V4a^|xeF;ZCIcuzhGhs_X zwuW_YyROZs2$cRgeE-WFN!LNDrZ2&_6;ZAOi8Kw7=)nH{u%j2zYiUHa8aY()?53Jx zEcax1MdwE8^=gYR<8fPy(T`(fw;rtQAk{#f#XoL?mg+L!^F7>L2ew=Xwo_OM22>Jd zlf$Ls=MkfoF^vl#pZ_X`N+r20j;54mWJ z$pc*&%v|>zPA{j;L@is6h2TdL|GZJ@0l&GXKDv&JGd7_}b~t#We*=K zcRkzsn`*j6cR(Dg1XnCbGE`^jNy5SH-5f7i>~&GjU=+@X{#!5%VQqfZ_7T6y`zC%B z!Nj((K>1CSX4VTUeO|5aEMR%O_#q@89Pge;E7vdVO@9;k9Vzw*az0SfRfJNOhF(7y zVFd)JA^*tE+_-M)+LK8hOiH(nT;2GR8#rrYKlu)D0w~g6YkmVxI?SWirR3@j&}9Wqw1LAB|kF@__MIBYE3t%2*HVrTX0PbkdE>ducx4$mrhd%F9Tyo ziPc%DlXavEb-LIyqzg4^1vyF2YzZti>=mY!rirsl1)Cz^`)gBDq&G1*E0>db1a>b;0w7 zv&qgy7?M8}aQw3X3scYlO<-f9%qrazg%t?B3cD;Gx7y6py3E!Ln{n6%C9_lnS+}a2 z0^lN9z(l~$YZl>Xz4=A*@W~}%^eXoZlf1tx%hD&_{OT30Q4u8V95td3p;DC+$P>m*%1LPRqTp*DMfe{l$X8$o4(ZJE5Q@|CXp>dRZKK^EW{IB=B?m8{Bq6y}*qP`1`{>YzgJ{^$dI;JaBacEr=$BX0$WS*N_qOc*BJFMibZf$s|O zui=kh1*#DhVu&2fKZ%o?2$=p{iuzbY5kofpT7o;&>UNVzkV&afr%BtzlRXqfdx)FJBRoh2MR^T~|nm7U4jw@kdFJ zmd-dbQ@o~%?!J3!fqmVVs6}`>(eo)cBx67~5xg=;!L5Adkc?wr1jkxCX!H_;Kw+rB zywrBr#V6IWAIn7~Wx)lKBS)7`$VdMp6?TwtFG>2c-{{ykDcp!NTKY$*(T`AGeGI3c zNSgIS4@^qioVHPqP8*iKojO>pQZ#|d(gm+x%ZWLE&cOPvA4!oz*w)ZF`;cu27p)w% zYNSKYuEG1^imrWbqt+8)%@}usN=Ox5@1}QQF~634zxcuO4}O&$s-!R<#(sy3e4&2S z-{K>N*IK-a5WmWzLfqda?8F)1Xe z#^q6Fi!~CZug_3+#GcVoCHfp!Y2al zWa|5392Q$~oF?cdzse^Ll2^kDFX+!}hY5(Md%IIpKt+t0?HZ&7H<Y!S zEx4at0wN2~KYWnE&z8Jxk;JQg8&&{6{6mYTo*gR|5hW#00Hu`!`tj=ob8*upQyLry zbCX)|u%ajTAu*E-i~b#Ywf6<&*F1uQJXVEKdv$|Pj~!*ZtLJWF=*r7!p{^He2RIG2 zrPI*3A9LVr9u{OYzB=DleIYG&iv8EDnrE3ylgPz=2|rd1Y9=k;FF1DPFyYokPql0o zYQo88Bb|XUw%mBC%Zg6I#}La!Pzkm1E9?b!RK{M}8KZu+X8>p5avdFYX{4d~*|{}s zzK8$H0F@#w?uK3ZS3K;@Y5O(i=7f>F)s$bVA6@UJt2#ap2;8IRz#aFh68JPF{8BpY z8acZcPp^bS)R^Stl(^WlSKuyCQts~Lg<8J%h zbN8w*gBX!TFdA^8*Fe`1C+qP82n4#J0eWgKZZG|U0YxNgY@`;D3+ps|d!%c6`Q_lI zKs#B{jwhI@_!Z)Bx{*{XW8xlm1nlTs~YXDKHOdeIQ(#M??X_ z&zI#Y$YAuRE^ZC1D%&?SZ*v5A(HC!x~B!W!|w%_-i3BKeR874|gl(5x!~6GRh+darMhc z>8=2^nh>+{ea2!%%86J9`NFCSoxG>gfjeSCja$t4G_ANKhSM3bBcH3(@#uyh9?wHX zAo)b=iL8+%5K(PbQi-%BT^AD(()3#hV>N5R!_?Qw^V25Oe6oC!W&A7dRQTm+A`QDy z*j55Kxv1k~`x%H-o?n!wUVgdJQ??>Ho?cgwg?Y3$HrErg?MF}%|0IK$@9cozx2E@M zeR6qs9gNHt`bKVU#ba>c?H}0$m6v~|_P|-trEe)D4%_8nBCPSs^M#=_bIKP=JfMJ;okp?m zE12q5n4wARbR=Y#yGc!<(5F$1?na3GjpBf?p|y^mq?u6w z;+DDLx_)-<118dn-Vxu(o9^m8|(tuKV=W4X~R4uTEjN0+x&_(yQKj={|pb2{cyS3+q37_Wa4~IdnR7DL!zi4 ze9wb>?*3@z6S9_Hpb_r{baoxb*k^_L&hh=V`ogY|849K=4d=jyq8YSpNFJ$aSi zLiP1~-Q@Zu3!a8jkP`}0(k69sVwjj9(l^C$+w-Q?rDY*CsXD1Rhm|F)v{97EnfJu) zm=52V;M*VOf}0QwS`Vda(m$oWm#@l)itc-OqSIj^*uhBh#CZYg)wMJ=R%P#4 zQO8)Ec-s6U>{j}R2SxT<$-2}<*L?HM;6u8koU6d}O+9HEK;m;2p5+s#Wo0qk(zk_TAp8 zdKiI}U>5jDV3yC8svfdnILn33CEi|)Lqy2XZR(Z(4N9*pSb0F&+mrX(+&&6vRz)>Z ztNU+dNbT)#6b=)0^3Km+^mTtKV>A@prbv!5AT~SwC?1iI9{FfMdKVs(#|cpHot9u_ z#pEtJkF%5d@%I@_LykV{T~EJ}+Pt8Js2o6$8`}FvCP*6(X(Il3bus)qg}4+>3ibFL*8y5sg)K=`E|WTka4q;fZcGYgKoE`rkmc?37}PLZ1DF+CD}a_IDj*XAHDoom(A&Fl4RF-;46x-pvPD8B85 z08nX*((Q1HoZRdEN%HPF@`7umdVCn=Q0-mX*j)ARJOA}r(+U6lTomfYtP(3{H#EpK zvSyGFc@|t*67O|XQ~;cDe}Dh{nW%<00x6qDEys6*Wk41S2-L1x#d~rE>8|QXQb~&- zk+QK9e@EbM_rQIL%H3fI5yCW~Pg$$gKzMM{Rp4AeW?d2#1m{!?2yVDX)7;CN!#Kabx*cqws{W($l=X`-*@>PCQt1|SgkG&3sEz^aGB?A^ZaVHq2Wx6F(BdH1LG9z|HYY7LH=dJT*6m_qc5j?lU(ZpG{P#?2c`Tcp?*~tzTMW$|c%o`gRZL@6hVWNnk z01u*uJBzVi2sY=+15(S1NMcfcQ`55zfgl9rjY2bsowV*`BK|fGu$`tk#_+|SQ7t?`|&;oMs(l@y~Yg`%c zqtA)wPnU_@yMWUnO`v-w66KGD><$msU@*K7*GtAoc{d@BeLa_48yiKr?CWvKw(Ox9 z0)0<1r<;~w5H3n|H0OYHw$hGI*BBn%5>EHj_Vyrgb(8ML>$ZYR1;jH2P7ams^aNu1 z@7u3ig~doro=d;KexKBYW^sKc#cnXYdigz%!x&- z!jCepnk<%sD7J6=O{x3E2j{u9zzo}8H&Wh2zE&Z3Wc8`J>Fq=G4ceTjt3RMQb5d7&=Z`K}t5vrj3v-{IdfkI1 zwOK#EXegN3vZ4VCzLyM-j1Bc@pfkpB^YUSQgsXdOd@o|K5X8;#CfsI+s9@F-Ywmnqk*o-^v?0UJ z{02AbZ6Md@B<(@DY&x7>r7<&@C6og2nxT^>ENTG6gkF)huPPe`C79~a4c zq<&8t+aB)xqSWoXg%ISCSo|Sytgmk)m>v3 zAHT3w11fHjY`sw`s_|V*g)&uY{qEmiyp8@XumChEF6~%v_x<(VUeu`)cGFLOvlsmr zGdz1>d)9o-uZ@dctv~hX^<1{tk4$cFaod9QowwZdGO?T2ybFHcxLHs`ZrxabW+^K; z&b5~fdD$6BRb)tzbBNK@1MCx`2(maY6BgH@ZCASIs9V70Q2k_6@J~XoAnU5!YP`$c zNI#(L6TV-P9sIg$@1Bx<>LI7n4!#`zX|+AbErx_azA z3cVoj5A+cIk-V5QMNmdX$bz4o*AS6#mE0fV$~V`+n6RDRjrMRT;foBwga`=}jk!CK;M0 zy1R&sJA17n;zZk7Y-Og3o~ft9f;Y8z?|+^DrqX0ly>Kd{_uIm=zzeAZf0%iwNhVtU5i7)EO9SA?Jg@+q4 z&&UNVKdIYhjbj(UGTxc@@A+aCYkwj%pPE3uGpA`-lK(O+xDvy0tw6K0^ygPJc3FA3 z=%d{036};7X{*NS4^1vugdl#bO4qS-C?}MP(Du&E{%?&mqfG=nn<@j@<*&O*=N@jt zC=aSim68RvbYMBhoGcIun6>|?!E=^UbbMG!>Ed+N5NOi5nHT=t{m>Bj_sTSK7~8Bg z$>*cehoa~FXlI|)ySRv@z8H5K>5=AW=;trl=|r7&T@6uimVGRzvkpTDgvw8qdwsg{ z$Ur(cG!|&Uwhm6^?&ZLoBUp$eQGC$A^TkvT(bmQv5Z-7| zK&)#2=g&1@KE4y$4_#}EQ^cIo5dZ;aa95c^s$g$u={GkB5_G<)(@ua9^mx4~I_>93 z@jd8J4(RiWi2Mt`%!Yl^xyNYC0R?W%JfH~Z8%$mnv1|gV4>NEhH_a%Q22_iALP^V`|wYCouD zdzEfkV9X#W53!hKa?Nk$xU9!a!~KbNs|=L`u!`m@CP~ZGUcLjT4IaFwK)CwQ0n>ts z=CF=0MKifyWp{G~>YWpVE3TW?cxjcU{`#y$EEZKYZT6EmQWRPwhXt`T{gKG%jNWkDiFy3mmH(+>S>EtgZ z3nj2Hc6dO$tuTMQg59kuTyc3CZ3W{#iOHl-~OR!NqiT;Wg9izBI zBnoFS{6R|}k)j}Hvxm*fDREkFq^dK5LdXHLf}$e+1{$8gOIEnfU&Kt^)`F4+asp)& z-2?ExNAy@(g%PD_-QinzNE=%?eO-DwPT;>^CE1|pq*5GV7LPUO{RkB$A^A6_BGaRU zrW@Pd`6@5jQ{*|!D;J#EJl``$&r~~*8@5`oUGn!v0&Ftimgfi5(A%b^sQ2CmT3j22 zy47vt_DfZJ*qAkaax%BSU|Hr%j66*V0RNlF0SJCNoIr{&{%Iz#vv1$Xl~?5loSX}`=*WGnBY>d8Ule-24%l%}t~ zh<991M}Kn`XTlRwQ6cjxyw^jqK4OoYdguy0O$%r$_!sL0SS-H`vDCYFk3yp21lFy1 zt*SMUGo#_X_uVg)0YCM^iQl-TYaxtADOW?r9B=Yh~VRZHTvh{rKkeiIb}zO*b7# zC*3htcl1P;OojLT8r)HR{~uZtUOAky`%&46^?BDVk1d}@tYiU0FrcPx=B!aT>!DF1EHf|^ z3*1Zm|MIDxjhYkI@5;uNxNu~wjZvXh896zm75{yXksMK?jEszd5l@p}!GDTBnJ>3^ zbzd{%GwC|MShUp*0b2g&CQExRmcRfrAb#~6=mYQlZdomoj6o#9%R~f&F|A=OCK-ME zf?IjA=cg1a(VW*z&w%>W?Dt_+off-8TyNcdX4zNzU-svJOwi9a-Hw8r%RZ}pwc`@; z=i`!8va+&z&F&09HvjZ&m&}_9TwPsEf0gt?C4A3Tjc9ILpC;0bAuib$R}*OIVuB& zdN6ElNvFdg-7I+f6=s-&9?36;R*~w?WbuD?-zp3d(9+skiZx?ngm1i8 zMQAyKD)Iv`iQ3?0_ZpG35ommyIV(ylEjIAmIL-gfYos)PjG#F@&JMB#qkko6B8;{ zHa40JB@KQ3xSnXNaF|~&6e?Xb`LYpzU~n+ZxC#n^fWS%8etyD9f+S8L?^X*%=i(L)SD}R3x;9pu=m#-z*Sy*1beT&CVh&F96z{|&{VPL?v zg+acL%E`(q<={}B;se+u0KUM101hZXGc&)^&JaER z`jwtJL#eL59u5R3{N&UWLU2U}MB0myd-#0BS83L?H>8e@e z5m7-wLV$1x3OzV*Bu0+`cEXoYdr^{^-n4IB>- zk1@1DwIm!EGPJMXy(0n0qHFHW*hECpKtF#~TcRA@?cFlku4*RtYp^xKwo{EBJY_BL zZ@^J2qO1ids=(|qadD_a!scL?Rgd0OI(ZOiaBvV19I!$kA0dFjx#cb#7Z>Nc27v=O znlwB-IQI1-rYl`y@|IRsU6#Bk&-y!?oYNi$%d>yb)h1t2O=8iFGI5@%Rl@?60giYb#PaC6|KM zW`?ZQYrCma;z~@QFd;9iuMAm-x^inCSvqPQIja9_Bv>5*yxqI2!=8irTAH++z!0su zD^ra4q$D{&3vci4PUmg%)`1rWK)Z9{`jVBEnwv{`^4E+s5lQpUobnFkcXd9!oZw_d zFpwgYR&5)qf|M!5Q>I-Z;ufWwtw*G%9J;W-D1XJs6#Y+E0Q*V`$_u(80f@;@KuEGi1gasDSF8WE?AyF2fXmOWN$r?JsdjGLy( z9TU^Oa!Vj>qKvMttfDgeqTOk8UZFXyM)se!z5(*5L=auq9O_*mvI7c1P03owxzr6HfNlJN>VZ zYVmuvE;84IoXN>pe0UrF!MP2X!vqQ&%U;jyS$K&Oo$_bFMK;Nna86*Gy1lMc-6>t~- zp~gOgDgs<7I-^KzTg=}l*fgN*yu45Zitt_jxl)J-Vu|}DvT)G9QR%;cX^wxjl&voX zwn^BIDMMK@1-~Xrw8_Xwf7u(f)LjkVR6M@_qY5#?KCIO;)Dz?tz5)cA&kJmNu0f%q zqQXm1+G2h@&IsR{PD~diG5F_a0Mu5}Kh{%nSx?wzs%@;0k6T(Xe;HRd{|zw#@@`b7 z2hs6CwcCI#KT_b5M~thSGdbf|IXb2=)CyCQ|B~V}OYy5ZGrWXEAOSEPDjOG}i;qnc z>Hp)hEnyu;fC^F>4C))S&phM=y-F~5JPITJ(<>$?nfo6F2kz<%07bR6wYxiK5AAzu z4anRUTcMOw%-o1jTX!j}L z)&7ako-<9X8FxdkO5&rBd39@)SU6b}EhoxOUBEuL`u`;;dnYGE(8cFN*VC2phqHUb zf#QpOVzeG$4gcI^PXZbQfCRO^`ke=LDI^$kY+!=h5$9izM2KxsmSX1O!V2{XtIrpU zjVtF7%oauA_z$i6-@q8)^}~Vuk(|0M1~oQ}=$+b#$8sVF{|l@F#s-wXScJ^Rpg&e7 zjAQZtSnh|xrMD|>{s8TT)Z@Pw`P{<+oET6()UyJajpa+!OXz~@5q;f1PQ>$GYQ1}h zvNlKWlc1hv$8Bbvc3@s&n6rfg^U6`3IHgkR$wbBC_pA9_{gQp6LuKN zNE(@W0hjuJc;e5YT@Hod7(hNfo%=}XiQ)Y+IipyH$5CS|Cbl3z?B}b0n1JudY%&tU zOW{9WN6%VpFWAae#?IYNm{k&eyS%<4zea?0D6q)DnSZcsm22m%*5*o02 zx;S{QJ~5gjP0Pd-?S)5+`%iiL++f#;7#f=ZNh<5<4+BM0fAoZCX#c5$fFAYkY(<7Z z%PsZz6^E+@Qe{V;;Glf}+&W#HsF+c|3gmr&E`?K4xGOD7TQH8p{%=DKd<9hbw81g^ zl)V~XGs)}?ts3Ct{y|vJ`8TCLW{w;h_0rBgz!uQ?n*YO$_c*#xrK0T-*W*P&{vH8X zhrw-kiV!+n&;Qa>lH4uTp6~)9A|fb27modh-3HX1wr7}s;>2_}_OIlcuiQVR{AXE0 z=0I9H0+r;_G*TY`D*unQ=|$2=#bpD}wjep)1ulkA! zn!m8k21pX+o@tu@wa#i$I}G-ZfRlRnU()t(#DW_q6Mq}_{BVkdkPBxj_?71@1hG9H zqq6C8M2yTo*8LrV3ReybdTE94SE{Y6`+pxM;BjPj=j_5wTpjq=eQRe&s#3=Scyh#C zy~C#yvGMcgh=G@vH(#gRtqYN8ZK6P0c|l3Y-+Fl6^}n^%%)oTnf4_3B$&GF$@LYu) z>5rOI5l=z+J_;Ib0$1slu`_`FZJD28u`|?Wg zSfU-PhNdFlb0EG+i#U*UBEwFS2jf0=$ExcD#EsP^Gk=;H(*9#+hB$; zU;o>k@7MkFM-@`_{P}dCr4=#~?7*UVI;%&L zwGv|Lb+{8e$d9NnXMX4Oh+7hkCpa=*A7=sj1t_dP6y%3eUT z&z?P_s5@G#hOCb?_usIH0($>ZEc}E6jZ93YP;c7Xujk}&D_&eXfV z_$F&nC;s3l957#SP8m*}6=N>Ds< zWT~-DBk{rK8TDKMypX*Von2ak5a?ZRgT0NC2pgymGH!yY>XDNs&6z)pgXzp{%r&Swiq} zMa8K?-Fa)eF)g9_4^4deqa#51D3x##7%#)d!Qp`~P0TeD=Y|SSQCKb`BO_7}0egZm z_B0U3yd06x^3E_bb91h~iiZ#Tq>o7XbM%RrZX_8cJf4N#6{J1cKks4 z`h)iTen9#Bojg*U|AO*M<{nZ&vEwq>%n)H&I=?7=pN_+lme$tZQto4edH%)C1 z?Ag7W@_nPZ`hr@lr&_E`g1C8nM~T&e`urC;M-MlHTa4*$oXWYe`4F3 zkgN@tRX)7RU`;(*d1|L^-?u$_ayp#6yk|duwnk0Y3htvgc%P2%$jAwB9eS;$XNzpb z!)KBKFK6bjQ*@R+tLTB4(_GoXwa;x}w&207^Bjw>glsqZDYFeE@h%+skXniH56U}s zpd|HU;Ik*cq-M7Da!LPUP)^B?{l;_V&b^+QIWKNsqN2L`9P{jszYy0HFT5rx--kVY zT8K(a@uC13_mB{-#Y>mAypKyPy|p(^Ahev0V%<0>@aV`o;A#$?*>N@S9AKy7h4imEiEmJKJk?4wLJ_*W$ay|Ur3gA&z`~? z3D&7c(@zOKKK~02BU!^Xz)LZB?Sh3f#KIq!{+b&5mRG4DZ1K`6qOjq_@|HPo8JH{z=9j0e`m`tWI-aN>OWjez)v%OD6_Dd{TC?+nRG6`DfIih_6GQ|KC$=#^Lw) zE9+7WS^iSJ;x80wPmj1d_(0Xg{aF4t@p-FdZ$GQ4`33%rGj5xgCsoLeo*3yoG%9ak zu!zhaMNn9nB|xT+5hR62R!-7|OP3!0ekTDB){=brBFjn$)UZIX@|r)NE*@!Sb=`!( zxpC;-_2d_ZnoSPh2e|{UP8t`rSvU!a`_xQsiw8?Wujc0RkelkdJ8Zsw{raZz)%k>V z5y@<#e{I+3Tv%uLB8r82F)?oH?;G9E(0TD_WT9|ku6t>z;?9d}rzPfnS%61xDYzd! z?gWjoC)MihBc@UjJchI76@-^fdu|XQdrJ6rCOsJzg^6IYEw)}>UiKrsWgf}pN$T3# zkAA;T21B2u^mMN7yTe^KOE@WTdo2?AUw4Vx&JO`8_sg?6hGGY<&e6nFwRt=|GX`IS z_xNDzBx=j?1<&{|j}V~ons&Y*`p)!M=EdNR7xZMkI+o9C*WhrJyj*<+C(W0e09IwT zoEE0vm|@W@kKcCryc{N{$q*BD%Do7u-OTY76%}mV3_&3wr~Li>bw4cqQ1k5BFAu=P zwn0=zc;oKLs;dLHeo6Pksj-o7E2l;mTv~T@R;1(m<)8ce9b_A7YrSKxxGb2fW3NXp z6tIHUh1w)(+}8F*2<9#+Dargk)ERj3qA$YV$Z+H6se1D=4pkYpGxeUbzK3^q?Nsoe zC_4XnN(>QW2ZuTn*Vw#Q&&30ynXEqzW5V)v-#?2b{~wIy0`mS#ex>6hwEiBX6=ub% z3`LXdhj4K-_m`%MuC8)`e_NfuE+)I+z9y5_NwJ*?8e0-H7={hYyo!qCe~CN*;Sjxy zriD~@+{1kmT68!e{-Grg_+z&CgI)>#{<8p;Q!pZ7qO_h^L`Ns5-s}9|7*U&`(Osr; zip5-w;+q9UJ{cSff6&E$-=pFA;p+h#nE}zTfyrd54R+W5`fK_?qpO^&*qRD zj8=O4YhYx2SQeU_kX*SZSONz)1`9n2{?#bavp`oF_R5<^vZGrKh ze{R;%;k%)>d@;N4zR>z2FP3D>)>5)Rc%_be{#^f!Qyi?yCxas*W?WWbAe?Ri=D2ya z4-E}9SxCAkbB~QNOqw0Ey^9Qto5d^bIgN!&_xs3K`@RQ40Dde$S%*f4Y9+gKKaE{h zL+@3YBjc*z&iT#ZW`>_pyHTxOxR zZQouPDeGj>)3g~)%_%8s?;qZLsB-8?wjAWB!<1~Q+`ZmE(}pcYpnY0*S+stF0m$z7qbJ`eYLR>!E8AzsZr6S|-_ zJ16Itj5rY(FpW;;`$)T7Yq#s@hBJx=qB_UbF?23K_RBm_)xcN)LvBz8*hNBjyg2gU z{E{ZgicZ$fxcOTGXZB7&pK+PTW9@AT=OJs%{aXp)U`MFPR(18DT4zQe$mb83PaTbP zn4Xy1$4m;Moe`33jjp4zAtDD)gDrmT)SXvk8>-{D92i09#N#33DOuVT)}w)^|0#wi zwo3NU`?x&jTgmALWwW#$+JZHcbm^!8wXdVNiC)a~b25tohm8k2#gM<|4#UJzKtj5p~z)?XcEXVX*r7fp5}Y=IWl5Mq@khVIpt{7cA9C%oaVW` zihAMYA~`c^b(~fKO+%Up;1t$qDq>lE50fC3R)wutu>uu|XJROp$Ehoep`)XtNdFd! zHq7t>Gf%m-wCLZtbEm*md`q;N;D!y{Vq#(xm7u}mtKg1sFmJ|m%>)f=YinH#i`mN2 z?3m|RlotdMBx?7vCnyLDoNW|-;nxRy^r%hx`U@d+XdO5}m+91thv{lfD73Q}T2IPM)O0o`wR!BXxW#c1dy?pUSJxNC_tNE&CXtcZ3Pw^ z1pm#%M24ojyUzbFlo)T(&B-@zOoa-Pq`4AvLGM3$q+n*2a87~({HC{X)&JAXjI*!e z!2|LOQUlVaYcI~ga7fAxRTPSDs4MbOyRTloN+AGJZsR&kVy=^;qps*YQgW%Oxj%fU zEZBYX<}$1&Ccj!)RW&xs={Mk3P{+|zhmRgD))hSyW{1^TuwVg2VRV&f_+$A76rMK| z61?iom4HRn)YNkO(qEds>*%1Pkle0CMbT-*aaL7TVTdJ%|4U56%+wZ1G)T|N+JAq8 zurMmDHc6HHIY~*gakZiscM_fK@nc@G?%TIT=ABiXF>4k;+y8ZGrQqtC-?)Z**)hKo z_MWh_zP{MMy!~Njrly`)Yjt(?-4$%b4p%CqigZP#i zm51)~m^_a!Y$@@Z;-ZRr5v!T(^38Fj)}8y^4&KEl6AoltyLwgkkz*1_2nDgj^OsNX zm-vs;wRT`CoQJ>YVeV$kojH4VJ0^v!+|S-)S?RF9aAgvoeG{F`NKbKQ^C@nM!c0yA zC;Yb9(10snkb$oVqS55U?d9Qo@Q_Umdkbx zg}4=uWbTs>4)?t`q^Dxl->2>LNNi}(SuY{6NLE(%rkT|IgoFeiKR<)?(QOl2$KNqe zO}Fjs+vz7h#HUuc14i(F@A1B>;&~|3LQo??%L}DG33X0iJ``ROGqEUNl|fk~`XwK4 zQ`UCwui2)mssv=|@~!jGA}Oi)`wrUdeNwl1EXdyvQ$3#l?wX%Py-n@3ez8L*dbh@F zCeuUln46uQeGW|(Wd3Y`+;>UJ?%4B_Qy6-n?C?&RG6kPm24$xQ27^u4tEQ%{5?Zu+ zb->HJizXC;@ifE#i9RkldCfbkOIR@-`{rr+hVxIw))=1vGq$<@s3@?N;EENef6q@y zz5O-&4tHGHz zYo#YR*}oB}O%kTPz8?rvj+WQaiAhkoYKySBBV zx!4;@dqwtqwh6&&JXh@S&wtPhf|21v3H|H517!zZuTz^Em3l6Me)1R6cVcF1nK!@7 zyq>IpjYEw2BWa5M3Apl%=h^6D7bqo4+^ftvI6^|f#D6-q+J#kAe&1`>M5bamZm zfR4Jykp}wuN%|$s*sBXQS4(vbHmLY`;<_gOK%N2~)rxuh%;K(euN?EY2gkB^Y&F;;8HX0}y1r;buj~yaei<_)MwU0BUo4w*9 z_Zm}88??Uf=-7PF*{!rBe^elX_>xGG_>EWL%^uHrW%Fg14!NR@0W&8$x zZyto9wgDjtg3tYNxWuC6%b8SVdKE-%w~U1aF88M%q@xPMocTrvTt~gX4G*-Qm1rpV z{%zP2W2jq)yYs!ZMOd*PbuhS@CiuZ9xBadNIfMwuT z_IyIY0*!t9ya}z1x?dB~?K|ow!W+%{VqT;M-v+I(lG5bzn}#XJ`MtII-#HJteiO5g zWGJ)D#BlB2o?_POy+re2^j#KH4EIr-U*JLEoKNRN$2=EaP{bfpL2%Emz!@ z*%>BEW6kB-=uNP21kLxK{XQkKy*KsBw;2S*qtx@LdY-iJ&>>nu%Zzs= z>bff$z4F}mVYRZ7W5RSSU-n-EOg9vm*h_I#Y#PwsnucTq|KzLZPctj_cOdt<8L zuJdq`*(>o!SLFDq-KbqqaXbo_P#xrpR`Y$Cdei{+mg{%Zp;_o8&iy|1lS(~Ak4m7jb90=yNkZ?>{pM+qR~LWcqd1J#1^KoSGaBUFU+4GaR$ofCKy6dXJa z-Z%`jqzXKlICdXAIMchB4wV0!H;13q;_v_{o4RRQUOC~R!NJY%s;(!!H1&Zm$k_On z#dfnXUz+!myaT9M{`zaF=1Q)cHO8R+uBE3x?;LpX@+A|s?1wGQf46DOjT@VmUWm%e zlU*}C<|~4Ao{0=uZ-4T%g=A@4x1U2QKHXe|ETody-ldx9UqB{ix(NEEktp> ztlg(MSGAod89`Y=NvT?@WpM@D<;$1Bk;*`Rs_sxQHy8P-5P~2lC@t-{vw}^gvenvR zq~k9Al+N13vMH=GZa^fnID}O0HRL%ZL$!|IpE})d?&{JnA78D(DGMaO&Afz zqi}Ibf!5y@kLt#+Q_%~Cfx81lVAr;(*-KIWtwhj|*U4J|;IeTRpN!9v9U%fMRxnG{ zTXUq^vR2L&h?-Pb6(9HMfm(O(G^emd?Lj*T6D%OHGjUu-56~^kE1AJ24;12=owswI zG&TkxM$WzB`dtdeOmPSY(#c|W&$h(_$)Yk3oF4`)HxOrt%gC6!efxHUzPf*U3C5$Z z!NuVYmE)}Cg7PE@&0cdICahDnGXy7JQ0$KbQ{->nyy^3MCM2fxQ=y|mzrT}AIi^7| zl7zD&_%!WLf{pQaE+3XL`J36pMyKgjdrr>%>NphaRgrDU{US{@kr*#9up7&%lWG0m)V|RS(7R^Qa z>5f6|(7OKk*=j3-X=4DmW@#b~Vt<&;ewa9}TwFH(4#&tb8vUO?(-1AlEWJYAU;HjV zK+46PG{22g9fc4Sso z7VQgq*4Rh`T9Qrdm={U=2L+=~>)SLKgF`}oQ8dSK6fW=y3i{tU1hNZ!+8uuXiDgY$ z;*pur{x)#*Wr`u?hQKl?3(=~5jhPjw+v;yhD}IWlkhnw)>fQbzqwi~ z87?m_Cbp*D^~QwB@NbNGK-VGVrKx>qLI&Z1+*&1@dsiJuM{2qThBKqymK(&5|F9M^ zvSXgkph22eSxUzk0P`-g^?6dwz<Y1a_dDV$Ms>bg=u(UDYS=>-6cl8&F3Q3A5E zyx^Sc>+2~u*o;xk6zV4fgD--69_ovH1NCGQY#MX(XF7=+uAPrUi;jZx8%BO0BTsn@ zp4&_ZhdV_fhk&isaTxggxjp{gQN87u<|9%csW?V(m ztB*duzMGYmVX}u81au?yI#gI<{d(NnLfi}>e$krnkdW!2<6Q<^>*x zhhz8Nz0*R=y&%3|60Sp=+4==uoRP8tFE=?HSQ+A0W(rmt%}F=QOhK}e7J_Tn+>iUu zs~EPfV8f#RZpT3b!?Zjd)pnl?l1ft_4j-;$D=r(>j98>%M=x9E=fM1(DH|tZUERH` zr%H+cZ04?irH3i>!1zODlrr9IyOtcN6&T1y`v9sJ8OQh`&CI;`;mz>%A{U<|a8)2m zr@_Jhd{4#>kgMkAw|AcmP`F+tATF*k$PwSdOiiCY-TjN!n|qUJ;iT0`ke(Qz(nBje zll_zV{#wY_PZ9iz@{{OAZ<~{C$Fq{m>!oql(~G#_ivRY|e_XYC^-&tYDZ5|@;Ro8P z=K=Gql1|HicgUYTwZ}wor}~n~=%RRNMUlg&x6>lp631QeYmBPi$;?$;5;f6+^*2b% zW)Obu^W%6Ty0QFzXQGyp1=q&S}%vQ$yI3I4}K#$+wxi%MUs?F_mbZa&%<2n)l% zpn`)3d1<}nCsd%{^^oUxVnh(fvC+Ofl;(c4?&v4Xh9pFLy^PJxkI-5#Wtq82)V)&QA0~g|2;~w**HH?yNI`w1dc2#mCsn z`7?R{!ix13z3`GH(@0vn0`+MgZik#a_n)@s_;3|q>T{MNrYtK}k>rT6i*zMIJ02Qx zBrCT~mhKk~dBGSfz&0Ra2y2PI6ZC=Xp-_P!==kGA$1JZ+=^2S~~6_$z!~df3#c|52uaPIpU)3=LuBLVh?UX$1=Ck8AvMBcrcT2JZy$#< zM7HlyQ)?OEDYGQLd;liBWX&X-PIQ#2iyAHIdAeh@AWZS#6Nn4v?)M>+r5}CfsLJ-w zsn|GdiqA7j(;68plPd)irzs)aLUcs=V{7L-fo;qD`Zhc;D997C{LahLoam4}QXg;m zxY?p9xr}cM$}$)gV4CiMQxH0NvUy!*A0$b=<{~e+f#CB7H^7=3Irr7&1E;)fPIAsd zIlxQQX{dY4$f1t>-`*)@0z@P4-pl^8^*K_1vgymUCx$;}kd6U#fZ_(d3kx?t-{m(C z&rVTX;@#6=4^z#R705Y`IO$O^w7jzVo!c5xhn4z$tXM@Kc_!AXLdtveYo%*fzb0BtMnTN4Gv4#29`s5rqf2nU&#{UiP49?)eY@G2Hv^(x!V`g|TLlNWhT8MUYT z+?xB6E}t~PNgIVt-h_iQkiKOxaEeSvrgN=ex)fdp7uOKbt=mKCHT~L^{M@P-UeX5fydEQ{q$F4Y6hXU?3>>AuT7JkVVR zc@T$NzBVy8FU=chAV+hK1Zl`Fjj3omu7{^pGtW_sYC5y7f>8E7xQahcik%{RDYyUm z0eftgHI^+3AL?`F;xSVKEI>o!qihGZt!KJZ)pgHCf!GD7o!@Y)N+U?95De9TF4UAk zh-T-4gG;idS{vZ@sj8{*O0AIIVgIPZx&_;;2;#C{niiZl^V2Oxl%eK>EY9TDpp0wTVaT_d{hB(*q-HmgBxKg>uc1tW_EAF>A^pFcNqd(tunsM$yYqnWDl5RLImD%;LUE!xjhx12^ z!D+^3F4%&A`1!!;8}RU;$T*`lXFeJM7PEI}?1@!ZE?@6?t{u}PKfCk3)f2WbUezQj}xqsXW7#s!tl&OD@9q2Uol!2d%}jig;DrYkkHu<1Rr5%>V_FAjnU(<8kwxgj>3M$Om>!&uq{$p7 zRWnk6NMVS%K;}J9wy-v%L&B@d=pH%J8f%@{{E(PL5sQk^2u4xFCyWRf0HlLL`>f^K z!z+2Z2W+c%Ni?^G9@^MZ>r4PeXt`?Iem0YM7yhva`n5y*Wx{N$bPUsx^SSjts#pTirrr~)mdVGQ6DKh|&ttb<+-*dm$UdmMx_ zCtdk4NYi=G;w{3N%4_VpwDtIrCi+aEUJ;RfyqseRz1jSEvLB68Fd&JABIru z<(fax;6y^<-O=w(&1-^{+tTvt&E=wsmZO>9{_Gh6q2XahG1A=NnUFvFt($b~)n0*l z*wM@ihZ-}La5jk)+|K{*h^O&dbo4Z;2RGgVGYb3ZI6a^`7_W-_d2y=k6_=sqaAD@2 zjtfM#DTos?gRD=vm6n$NF|5aA1zxA<3i@L#c!W-1m;gM89=I@8{N8RFaU~nNb3LG~ zy*^&P3SC~rMnD=Bh$~blv2%lhBy%?$H{H(j->xIYdA!rnm+ z;~x1`>5UJ2^WZ}7=W10a=Rw%ZVnh+;Z``fg)k)*l@7+&!8t*F1&qY5 z3tMZCxqRX#s;BEv^%d-KpeVBa30#3)l4u&{`QL1z+@5v(a2tI!1j7)Y^@+h|Z{nmN zDigX$JbPl84vn!Nc(u%@N9)2iI!nLr?6gFrqNn&=2K7z=+1Z2P?8(UgHWQ9iJlz;= z%0Oj$n;w09$qU4(E`nCf>*-|2wQ4CTmHA-i%VzLh=-(N5bvq$pCUp*`Yhr0;bIfC8 zdUHFctB7--c60xW)h@j(&5d@iEg9|(wy==X({Th9X}H8dmFZL_oJ|A|HY3{nMc+A< zIC(Vho{%EV5!5EYIXjJ0i17j$-W_r zaXYt9nYbm%BaJ~V$pGfsE??VVFH$jb>9M?3X{uDI`;H}Ew{KO14%~$#5E7T!8oi>a z(Jjm1x56Po_H58I9m)#jI`7?$H6$V45R&gUc+7un+3c$IE8=9~hcf;}yA zle|1VDJ@>R41Y8jN>y}M5DSlHEeZwqi+g4$EJ(Ee`l-E@^ugTuv)ba_6G2jhU^9|pvThT}ZXZCy-;3*! zibUAqAZo+SS15Z3yeo4~7#y_E#V=Phq;a>euTL@8xzs45Mu4$S^l})+!;lw|9wk%^ z$?$O?t9orcX_H-Sy;2rs%UEX$Vh7nT=%B`LoO(i`Dq%^{J>Fr7(<}Z|qVbV4$kd0MhXY#1> z$0Ab9K>-Mp9vmy>*~{+{L+h%f?L|89gyHV#H!7jC|D2`YMmctQ&8Tsa^_;-<9~|OB zf@+x7k`03YazFEf3Pg>(?Y%G7wWrT!*T#6fe>A>-gI0U~ de(m8=ev_y(Q^j8z!e0;Wf2)eZZFyby{{w&koWTG9 literal 0 HcmV?d00001 diff --git a/docs/source/api/api.rst b/docs/source/api/api.rst new file mode 100644 index 00000000..44da5b93 --- /dev/null +++ b/docs/source/api/api.rst @@ -0,0 +1,224 @@ +TorchOpt Optimizer +================== + +.. currentmodule:: torchopt + +.. autosummary:: + + Optimizer + MetaOptimizer + +Optimizer +~~~~~~~~~ + +.. autoclass:: Optimizer + :members: + +MetaOptimizer +~~~~~~~~~~~~~ + +.. autoclass:: MetaOptimizer + :members: + +------ + +Functional Optimizers +===================== + +.. currentmodule:: torchopt + +.. autosummary:: + + adam + sgd + rmsprop + +Functional Adam Optimizer +~~~~~~~~~~~~~~~~~~~~~~~~~ + +.. autofunction:: adam + +Functional SGD Optimizer +~~~~~~~~~~~~~~~~~~~~~~~~ + +.. autofunction:: sgd + +Functional RMSProp Optimizer +~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +.. autofunction:: rmsprop + +------ + +Classic Optimizers +================== + +.. currentmodule:: torchopt + +.. autosummary:: + + Adam + SGD + RMSProp + +Classic Adam Optimizer +~~~~~~~~~~~~~~~~~~~~~~ + +.. autoclass:: Adam + +Classic SGD Optimizer +~~~~~~~~~~~~~~~~~~~~~ + +.. autoclass:: SGD + +Classic RMSProp Optimizer +~~~~~~~~~~~~~~~~~~~~~~~~~ + +.. autoclass:: RMSProp + +------ + +Differentiable Meta-Optimizers +============================== + +.. currentmodule:: torchopt + +.. autosummary:: + + MetaAdam + MetaSGD + MetaRMSProp + +Differentiable Meta-Adam Optimizer +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +.. autoclass:: MetaAdam + +Differentiable Meta-SGD Optimizer +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +.. autoclass:: MetaSGD + +Differentiable Meta-RMSProp Optimizer +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +.. autoclass:: MetaRMSProp + +------ + +Optimizer Hooks +=============== + +.. currentmodule:: torchopt._src.hook + +.. autosummary:: + + register_hook + zero_nan_hook + +Hook +~~~~ + +.. autofunction:: register_hook +.. autofunction:: zero_nan_hook + +Gradient Transformation +======================= + +.. currentmodule:: torchopt._src.clip + +.. autosummary:: + + clip_grad_norm + +Transforms +~~~~~~~~~~ + +.. autofunction:: clip_grad_norm + +Optimizer Schedules +=================== + +.. currentmodule:: torchopt._src.schedule + +.. autosummary:: + + linear_schedule + polynomial_schedule + +Schedules +~~~~~~~~~ + +.. autofunction:: linear_schedule +.. autofunction:: polynomial_schedule + + +Apply Parameter Updates +======================= + +.. currentmodule:: torchopt + +.. autosummary:: + + apply_updates + +Apply Updates +~~~~~~~~~~~~~ + +.. autofunction:: apply_updates + +Combining Optimizers +==================== + +.. currentmodule:: torchopt._src.combine + +.. autosummary:: + + chain + +Chain +~~~~~ + +.. autofunction:: chain + + +General Utilities +================= + +.. currentmodule:: torchopt + +.. autosummary:: + + extract_state_dict + recover_state_dict + stop_gradient + +Extract State Dict +~~~~~~~~~~~~~~~~~~ + +.. autofunction:: extract_state_dict + +Recover State Dict +~~~~~~~~~~~~~~~~~~ + +.. autofunction:: recover_state_dict + +Stop Gradient +~~~~~~~~~~~~~ + +.. autofunction:: stop_gradient + + +Visualizing Gradient Flow +========================= + +.. currentmodule:: torchopt._src.visual + +.. autosummary:: + + make_dot + +Make Dot +~~~~~~~~ + +.. autofunction:: make_dot diff --git a/docs/source/bibtex.json b/docs/source/bibtex.json new file mode 100644 index 00000000..c2aa9165 --- /dev/null +++ b/docs/source/bibtex.json @@ -0,0 +1,7 @@ +{ + "cited": { + "examples/MAML": [ + "MAML", + ] + } +} diff --git a/docs/source/conf.py b/docs/source/conf.py new file mode 100644 index 00000000..da11e3b7 --- /dev/null +++ b/docs/source/conf.py @@ -0,0 +1,210 @@ +# Copyright 2022 MetaOPT Team. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Configuration file for the Sphinx documentation builder.""" +# This file only contains a selection of the most common options. For a full +# list see the documentation: +# https://www.sphinx-doc.org/en/master/usage/configuration.html + +# pylint: disable=all + +# -- Path setup -------------------------------------------------------------- + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +# +import inspect +import os +import pathlib +import sys + +import sphinxcontrib.katex as katex + +import torchopt + + +HERE = pathlib.Path(__file__).absolute().parent +PROJECT_ROOT = HERE.parent.parent + + +def get_version() -> str: + sys.path.insert(0, str(PROJECT_ROOT / 'torchopt')) + import version # noqa + + return version.__version__ + + +# -- Project information ----------------------------------------------------- + +project = 'TorchOpt' +copyright = '2022 MetaOPT Team' +author = 'TorchOpt Contributors' + +# The full version, including alpha/beta/rc tags +release = get_version() + +# -- General configuration --------------------------------------------------- + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + 'sphinx.ext.napoleon', + 'sphinx.ext.autodoc', + 'sphinx.ext.autosummary', + 'sphinx.ext.doctest', + 'sphinx.ext.intersphinx', + 'sphinx.ext.todo', + 'sphinx.ext.coverage', + 'sphinx.ext.mathjax', + 'sphinx.ext.ifconfig', + 'sphinx.ext.viewcode', + 'sphinx.ext.githubpages', + 'sphinx.ext.extlinks', + 'sphinx_copybutton', + 'sphinx_rtd_theme', + 'sphinxcontrib.bibtex', + 'sphinxcontrib.spelling', + 'sphinxcontrib.katex', + 'sphinx_autodoc_typehints', + 'myst_nb', # This is used for the .ipynb notebooks +] + +# Add any paths that contain templates here, relative to this directory. +templates_path = ['_templates'] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# +# source_suffix = ['.rst', '.md'] +source_suffix = '.rst' + +# The master toctree document. +master_doc = 'index' + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = 'en' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +# This pattern also affects html_static_path and html_extra_path. +exclude_patterns = ['_build', 'build', 'Thumbs.db', '.DS_Store'] +spelling_exclude_patterns = [''] +spelling_word_list_filename = ['spelling_wordlist.txt'] + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = 'default' + +# -- Options for autodoc ----------------------------------------------------- + +autodoc_default_options = { + 'member-order': 'bysource', + 'undoc-members': True, + 'special-members': True, + 'show-inheritance': True, + 'exclude-members': '__module__, __dict__, __repr__, __str__, __weakref__', +} +autoclass_content = 'both' + +# -- Options for bibtex ----------------------------------------------------- + +bibtex_bibfiles = ['references.bib'] + +# -- Options for myst ------------------------------------------------------- + +nb_execution_mode = 'force' +nb_execution_allow_errors = False + +# -- Options for katex ------------------------------------------------------ + +# See: https://sphinxcontrib-katex.readthedocs.io/en/0.4.1/macros.html +latex_macros = r""" + \def \d #1{\operatorname{#1}} +""" + +# Translate LaTeX macros to KaTeX and add to options for HTML builder +katex_macros = katex.latex_defs_to_katex_macros(latex_macros) +katex_options = 'macros: {' + katex_macros + '}' + +# Add LaTeX macros for LATEX builder +latex_elements = {'preamble': latex_macros} + +# -- Options for HTML output ------------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +# +html_theme = 'sphinx_rtd_theme' + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +# +# html_theme_options = {} + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ['_static'] +html_css_files = ['style.css'] +html_logo = '_static/images/logo.png' + + +def setup(app): + app.add_js_file('https://cdn.jsdelivr.net/npm/vega@5.20.2') + app.add_js_file('https://cdn.jsdelivr.net/npm/vega-lite@5.1.0') + app.add_js_file('https://cdn.jsdelivr.net/npm/vega-embed@6.17.0') + + app.add_css_file('css/style.css') + + +# Custom sidebar templates, must be a dictionary that maps document names +# to template names. +# +# The default sidebars (for documents that don't match any pattern) are +# defined by theme itself. Builtin themes are using these templates by +# default: ``['localtoc.html', 'relations.html', 'sourcelink.html', +# 'searchbox.html']``. +# +# html_sidebars = {} + +# -- Source code links ------------------------------------------------------- + +extlinks = { + 'gitcode': ('https://github.com/metaopt/TorchOpt/blob/HEAD/%s', '%s'), + 'issue': ('https://github.com/metaopt/TorchOpt/issues/%s', 'issue %s'), +} + +# -- Extension configuration ------------------------------------------------- + +# -- Options for napoleon extension ------------------------------------------ + +napoleon_include_init_with_doc = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True + +# -- Options for intersphinx extension --------------------------------------- + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = {'python': ('https://docs.python.org/3', None)} + +# -- Options for todo extension ---------------------------------------------- + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True diff --git a/docs/source/developer/contributing.rst b/docs/source/developer/contributing.rst new file mode 100644 index 00000000..656736a3 --- /dev/null +++ b/docs/source/developer/contributing.rst @@ -0,0 +1,72 @@ +Contributing to TorchOpt +======================== + + +Install Develop Version +----------------------- + +To install TorchOpt in an "editable" mode, run + +.. code-block:: bash + + pip install -e . + +in the main directory. This installation is removable by + +.. code-block:: bash + + python setup.py develop --uninstall + + +Lint Check +---------- + +We use several tools to secure code quality, including: + + * PEP8 code style: ``black``, ``isort``, ``pylint``, ``flake8`` + * Type hint check: ``mypy`` + * C++ Google-style: ``cpplint``, ``clang-format`` + * License: ``addlicense`` + * Documentation: ``pydocstyle``, ``doc8`` + +To make things easier, we create several shortcuts as follows. + +To automatically format the code, run: + +.. code-block:: bash + + make format + +To check if everything conforms to the specification, run: + +.. code-block:: bash + + make lint + + +Test Locally +------------ + +This command will run automatic tests in the main directory + +.. code-block:: bash + + $ make test + + +Documentation +------------- + +Documentations are written under the :gitcode:`docs/source` directory as ReStructuredText (``.rst``) files. ``index.rst`` is the main page. A Tutorial on ReStructuredText can be found `here `_. + +API References are automatically generated by `Sphinx `_ according to the outlines under directory :gitcode:`docs/source/api` and should be modified when any code changes. + +To compile documentation into webpage, run + +.. code-block:: bash + + $ make docs + +The generated webpage locates under directory ``docs/build`` and will open the browser after building documentation. + +Detailed documentation is hosted online at https://torchopt.readthedocs.io. diff --git a/docs/source/developer/contributor.rst b/docs/source/developer/contributor.rst new file mode 100644 index 00000000..e47a7c12 --- /dev/null +++ b/docs/source/developer/contributor.rst @@ -0,0 +1,7 @@ +Contributor +=========== + +We always welcome contributions to help make TorchOpt better. Below is an incomplete list of our contributors (find more on `this page `_). + +* Xuehai Pan (`XuehaiPan `_) +* Yao Fu (`future-xy `_) diff --git a/docs/source/examples/MAML.rst b/docs/source/examples/MAML.rst new file mode 100644 index 00000000..9b38feb7 --- /dev/null +++ b/docs/source/examples/MAML.rst @@ -0,0 +1,277 @@ +Model-Agnostic Meta-Learning +============================ + +Meta reinforcement learning has achieved significant successes in various applications. +**Model-Agnostic Meta-Learning** (MAML) :cite:`MAML` is the pioneer one. +In this tutorial, we will show how to train MAML on few-shot Omniglot classification with TorchOpt step by step. +The full script is at :gitcode:`examples/few-shot/maml_omniglot.py`. + +Contrary to existing differentiable optimizer libraries such as `higher `_, which follows the PyTorch designing which leads to inflexible API, TorchOpt provides an easy way of construction through the code-level. + + +Overview +-------- + +There are six steps to finish MAML training pipeline: + +1. Load Dataset: load Omniglot dataset; +2. Build the Network: build the neural network architecture of model; +3. Train: meta-train; +4. Test: meta-test; +5. Plot: plot the results; +6. Pipeline: combine step 3-5 together; + + +In the following sections, we will set up Load Dataset, build the neural network, train-test, and plot to successfully run the MAML training and evaluation pipeline. +Here is the overall procedure: + + +Load Dataset +------------ + +In your Python code, simply import torch and load the dataset, the full script is at :gitcode:`examples/few-shot/support/omniglot_loaders.py`: + +.. code-block:: python + + from .support.omniglot_loaders import OmniglotNShot + import torch + + device = torch.device('cuda:0') + db = OmniglotNShot( + '/tmp/omniglot-data', + batchsz=args.task_num, + n_way=args.n_way, + k_shot=args.k_spt, + k_query=args.k_qry, + imgsz=28, + rng=rng, + device=device, + ) + +The goal is to train a model for few-shot Omniglot classification. + +Build the Network +----------------- + +TorchOpt supports any user-defined PyTorch networks. Here is an example: + +.. code-block:: python + + import torch, numpy as np + from torch import nn + import torch.optim as optim + + net = nn.Sequential( + nn.Conv2d(1, 64, 3), + nn.BatchNorm2d(64, momentum=1., affine=True), + nn.ReLU(inplace=False), + nn.MaxPool2d(2, 2), + nn.Conv2d(64, 64, 3), + nn.BatchNorm2d(64, momentum=1., affine=True), + nn.ReLU(inplace=False), + nn.MaxPool2d(2, 2), + nn.Conv2d(64, 64, 3), + nn.BatchNorm2d(64, momentum=1., affine=True), + nn.ReLU(inplace=False), nn.MaxPool2d(2, 2), + nn.Flatten(), + nn.Linear(64, args.n_way), + ).to(device) + + # We will use Adam to (meta-)optimize the initial parameters + # to be adapted. + meta_opt = optim.Adam(net.parameters(), lr=1e-3) + +Train +----- + +Define the ``train`` function: + +.. code-block:: python + + def train(db, net, meta_opt, epoch, log): + net.train() + n_train_iter = db.x_train.shape[0] // db.batchsz + inner_opt = torchopt.MetaSGD(net, lr=1e-1) + + for batch_idx in range(n_train_iter): + start_time = time.time() + # Sample a batch of support and query images and labels. + x_spt, y_spt, x_qry, y_qry = db.next() + + task_num, setsz, c_, h, w = x_spt.size() + querysz = x_qry.size(1) + + # TODO: Maybe pull this out into a separate module so it + # doesn't have to be duplicated between `train` and `test`? + + # Initialize the inner optimizer to adapt the parameters to + # the support set. + n_inner_iter = 5 + + qry_losses = [] + qry_accs = [] + meta_opt.zero_grad() + + net_state_dict = torchopt.extract_state_dict(net) + optim_state_dict = torchopt.extract_state_dict(inner_opt) + for i in range(task_num): + # Optimize the likelihood of the support set by taking + # gradient steps w.r.t. the model's parameters. + # This adapts the model's meta-parameters to the task. + # higher is able to automatically keep copies of + # your network's parameters as they are being updated. + for _ in range(n_inner_iter): + spt_logits = net(x_spt[i]) + spt_loss = F.cross_entropy(spt_logits, y_spt[i]) + inner_opt.step(spt_loss) + + # The final set of adapted parameters will induce some + # final loss and accuracy on the query dataset. + # These will be used to update the model's meta-parameters. + qry_logits = net(x_qry[i]) + qry_loss = F.cross_entropy(qry_logits, y_qry[i]) + qry_losses.append(qry_loss.detach()) + qry_acc = (qry_logits.argmax(dim=1) == y_qry[i]).sum().item() / querysz + qry_accs.append(qry_acc) + + # Update the model's meta-parameters to optimize the query + # losses across all of the tasks sampled in this batch. + # This unrolls through the gradient steps. + qry_loss.backward() + + torchopt.recover_state_dict(net, net_state_dict) + torchopt.recover_state_dict(inner_opt, optim_state_dict) + + meta_opt.step() + qry_losses = sum(qry_losses) / task_num + qry_accs = 100. * sum(qry_accs) / task_num + i = epoch + float(batch_idx) / n_train_iter + iter_time = time.time() - start_time + + print( + f'[Epoch {i:.2f}] Train Loss: {qry_losses:.2f} | Acc: {qry_accs:.2f} | Time: {iter_time:.2f}' + ) + + log.append( + { + 'epoch': i, + 'loss': qry_losses, + 'acc': qry_accs, + 'mode': 'train', + 'time': time.time(), + } + ) + +Test +---- + +Define the ``test`` function: + +.. code-block:: python + + def test(db, net, epoch, log): + # Crucially in our testing procedure here, we do *not* fine-tune + # the model during testing for simplicity. + # Most research papers using MAML for this task do an extra + # stage of fine-tuning here that should be added if you are + # adapting this code for research. + net.train() + n_test_iter = db.x_test.shape[0] // db.batchsz + inner_opt = torchopt.MetaSGD(net, lr=1e-1) + + qry_losses = [] + qry_accs = [] + + for batch_idx in range(n_test_iter): + x_spt, y_spt, x_qry, y_qry = db.next('test') + + task_num, setsz, c_, h, w = x_spt.size() + querysz = x_qry.size(1) + + # TODO: Maybe pull this out into a separate module so it + # doesn't have to be duplicated between `train` and `test`? + n_inner_iter = 5 + + net_state_dict = torchopt.extract_state_dict(net) + optim_state_dict = torchopt.extract_state_dict(inner_opt) + for i in range(task_num): + # Optimize the likelihood of the support set by taking + # gradient steps w.r.t. the model's parameters. + # This adapts the model's meta-parameters to the task. + for _ in range(n_inner_iter): + spt_logits = net(x_spt[i]) + spt_loss = F.cross_entropy(spt_logits, y_spt[i]) + inner_opt.step(spt_loss) + + # The query loss and acc induced by these parameters. + qry_logits = net(x_qry[i]).detach() + qry_loss = F.cross_entropy(qry_logits, y_qry[i], reduction='none') + qry_losses.append(qry_loss.detach()) + qry_accs.append((qry_logits.argmax(dim=1) == y_qry[i]).detach()) + + torchopt.recover_state_dict(net, net_state_dict) + torchopt.recover_state_dict(inner_opt, optim_state_dict) + + qry_losses = torch.cat(qry_losses).mean().item() + qry_accs = 100. * torch.cat(qry_accs).float().mean().item() + print(f'[Epoch {epoch+1:.2f}] Test Loss: {qry_losses:.2f} | Acc: {qry_accs:.2f}') + log.append( + { + 'epoch': epoch + 1, + 'loss': qry_losses, + 'acc': qry_accs, + 'mode': 'test', + 'time': time.time(), + } + ) + +Plot +---- + +TorchOpt supports any user-defined PyTorch networks and optimizers. Yet, of course, the inputs and outputs must comply with TorchOpt's API. Here is an example: + +.. code-block:: python + + def plot(log): + # Generally you should pull your plotting code out of your training + # script but we are doing it here for brevity. + df = pd.DataFrame(log) + + fig, ax = plt.subplots(figsize=(6, 4)) + train_df = df[df['mode'] == 'train'] + test_df = df[df['mode'] == 'test'] + ax.plot(train_df['epoch'], train_df['acc'], label='Train') + ax.plot(test_df['epoch'], test_df['acc'], label='Test') + ax.set_xlabel('Epoch') + ax.set_ylabel('Accuracy') + ax.set_ylim(70, 100) + fig.legend(ncol=2, loc='lower right') + fig.tight_layout() + fname = 'maml-accs.png' + print(f'--- Plotting accuracy to {fname}') + fig.savefig(fname) + plt.close(fig) + + +Pipeline +-------- + +We can now combine all the components together, and plot the results. + +.. code-block:: python + + log = [] + for epoch in range(10): + train(db, net, meta_opt, epoch, log) + test(db, net, epoch, log) + plot(log) + +.. image:: /_static/images/maml-accs.png + :align: center + :height: 300 + + +.. rubric:: References + +.. bibliography:: /references.bib + :style: unsrtalpha diff --git a/docs/index.rst b/docs/source/index.rst similarity index 57% rename from docs/index.rst rename to docs/source/index.rst index 23fb3250..50ccb0fb 100644 --- a/docs/index.rst +++ b/docs/source/index.rst @@ -1,4 +1,4 @@ -:github_url: https://github.com/metaopt/TorchOpt/tree/main/docs +:github_url: https://github.com/metaopt/TorchOpt/tree/HEAD/docs TorchOpt -------- @@ -6,7 +6,7 @@ TorchOpt **TorchOpt** is a high-performance optimizer library built upon `PyTorch `_ for easy implementation of functional optimization and gradient-based meta-learning. It consists of two main features: * TorchOpt provides functional optimizer which enables `JAX-like `_ composable functional optimizer for PyTorch. With TorchOpt, one can easily conduct neural network optimization in PyTorch with functional style optimizer, similar to `Optax `_ in JAX. -* With the desgin of functional programing, TorchOpt provides efficient, flexible, and easy-to-implement differentiable optimizer for gradient-based meta-learning research. It largely reduces the efforts required to implement sophisticated meta-learning algorithms. +* With the design of functional programming, TorchOpt provides efficient, flexible, and easy-to-implement differentiable optimizer for gradient-based meta-learning research. It largely reduces the efforts required to implement sophisticated meta-learning algorithms. Installation ------------ @@ -35,16 +35,50 @@ We provide a `conda `_ environment recipe to ins git clone https://github.com/metaopt/TorchOpt.git cd TorchOpt - # Use `CONDA_OVERRIDE_CUDA` if conda fails to detect the NVIDIA driver (e.g. WSL2 on Windows) + # You may need `CONDA_OVERRIDE_CUDA` if conda fails to detect the NVIDIA driver (e.g. in docker or WSL2) CONDA_OVERRIDE_CUDA=11.7 conda env create --file conda-recipe.yaml conda activate torchopt - pip3 install . + pip3 install -e . + + +.. toctree:: + :caption: Getting Started + :maxdepth: 1 + + torchopt101/torchopt-101.rst + + +.. toctree:: + :caption: Examples + :maxdepth: 1 + + examples/MAML.rst + + +.. toctree:: + :caption: Developer Documentation + :maxdepth: 1 + + developer/contributing.rst + developer/contributor.rst + +.. toctree:: + :caption: API Documentation + :maxdepth: 2 + + api/api.rst The Team -------- -TorchOpt is a work by Jie Ren, Xidong Feng, Bo Liu, `Luo Mai `_ and `Yaodong Yang `_. +TorchOpt is a work by + +* Jie Ren (`JieRen98 `_) +* Xidong Feng (`waterhorse1 `_) +* Bo Liu (`Benjamin-eecs `_) +* Luo Mai (`luomai `_) +* Yaodong Yang (`PKU-YYang `_). Support ------- diff --git a/docs/source/references.bib b/docs/source/references.bib new file mode 100644 index 00000000..9e7910f3 --- /dev/null +++ b/docs/source/references.bib @@ -0,0 +1,19 @@ +@inproceedings{MAML, + author = {Chelsea Finn and + Pieter Abbeel and + Sergey Levine}, + editor = {Doina Precup and + Yee Whye Teh}, + title = {Model-Agnostic Meta-Learning for Fast Adaptation of Deep Networks}, + booktitle = {Proceedings of the 34th International Conference on Machine Learning, + {ICML} 2017, Sydney, NSW, Australia, 6-11 August 2017}, + series = {Proceedings of Machine Learning Research}, + volume = {70}, + pages = {1126--1135}, + publisher = {{PMLR}}, + year = {2017}, + url = {http://proceedings.mlr.press/v70/finn17a.html}, + timestamp = {Thu, 21 Jan 2021 17:37:24 +0100}, + biburl = {https://dblp.org/rec/conf/icml/FinnAL17.bib}, + bibsource = {dblp computer science bibliography, https://dblp.org} +} diff --git a/docs/source/spelling_wordlist.txt b/docs/source/spelling_wordlist.txt new file mode 100644 index 00000000..24ee9124 --- /dev/null +++ b/docs/source/spelling_wordlist.txt @@ -0,0 +1,69 @@ +TorchOpt +torchopt +Kingma +Sutskever +Pieter +Abbeel +Sergey +Doina +Precup +Tieleman +Yee +Whye +Teh +Jie +Ren +Xidong +Feng +Bo +Liu +Luo +Mai +Yaodong +Yang +Xuehai +Pan +Yao +Fu +Jupyter +Colaboratory +Omniglot +differentiable +Dataset +dataset +Optimizers +optimizers +lr +eps +nesterov +et +al +rescaling +rescale +composable +momentums +addlicense +webpage +Omniglot +differentiable +toolchain +init +fn +inplace +impl +params +iterable +nan +param +Graphviz +autograd +attrs +GradientTransformations +args +chainable +adam +Adam +rmsprop +RMSProp +sgd +SGD diff --git a/docs/source/torchopt101/torchopt-101.rst b/docs/source/torchopt101/torchopt-101.rst new file mode 100644 index 00000000..87bffd4c --- /dev/null +++ b/docs/source/torchopt101/torchopt-101.rst @@ -0,0 +1,9 @@ +Get Started with Jupyter Notebook +================================= + +In this tutorial, we will use Google Colaboratory to show you the most basic usages of TorchOpt. + +- 1: `Functional Optimizer `_ +- 2: `Visualization `_ +- 3: `Meta Optimizer `_ +- 4: `Stop Gradient `_ diff --git a/docs/spelling_wordlist.txt b/docs/spelling_wordlist.txt deleted file mode 100644 index e69de29b..00000000 diff --git a/examples/L2R/helper/argument.py b/examples/L2R/helper/argument.py index 34bd8502..5df9f314 100644 --- a/examples/L2R/helper/argument.py +++ b/examples/L2R/helper/argument.py @@ -24,7 +24,10 @@ def parse_args(): parser.add_argument('--lr', type=float, default=1e-3, help='learning rate') parser.add_argument( - '--pos_ratio', type=float, default=0.995, help='Ratio of positive examples in training' + '--pos_ratio', + type=float, + default=0.995, + help='Ratio of positive examples in training', ) parser.add_argument('--ntest', type=int, default=500, help='Number of testing examples') parser.add_argument('--ntrain', type=int, default=5000, help='Number of testing examples') diff --git a/examples/L2R/helper/model.py b/examples/L2R/helper/model.py index d3a0beac..80fae8ac 100644 --- a/examples/L2R/helper/model.py +++ b/examples/L2R/helper/model.py @@ -34,18 +34,25 @@ class LeNet5(nn.Module): - def __init__(self, args): super(LeNet5, self).__init__() self.model = nn.Sequential( - nn.Conv2d(1, 16, 5), nn.ReLU(), nn.MaxPool2d(2), nn.Conv2d(16, 32, 5), nn.ReLU(), - nn.MaxPool2d(2), nn.Flatten(), nn.Linear(512, 128), nn.ReLU(), nn.Linear(128, 1), - nn.Sigmoid() + nn.Conv2d(1, 16, 5), + nn.ReLU(), + nn.MaxPool2d(2), + nn.Conv2d(16, 32, 5), + nn.ReLU(), + nn.MaxPool2d(2), + nn.Flatten(), + nn.Linear(512, 128), + nn.ReLU(), + nn.Linear(128, 1), + nn.Sigmoid(), ) self.args = args - self.meta_weights = torch.zeros( - self.args.batch_size, requires_grad=True - ).to(self.args.device) + self.meta_weights = torch.zeros(self.args.batch_size, requires_grad=True).to( + self.args.device + ) self.criterion = nn.BCELoss() def forward(self, x): diff --git a/examples/L2R/helper/utils.py b/examples/L2R/helper/utils.py index 0fb01ad4..954b27b2 100644 --- a/examples/L2R/helper/utils.py +++ b/examples/L2R/helper/utils.py @@ -24,7 +24,14 @@ def get_imbalance_dataset( - mnist_train, mnist_test, pos_ratio=0.9, ntrain=5000, nval=10, ntest=500, class_0=4, class_1=9 + mnist_train, + mnist_test, + pos_ratio=0.9, + ntrain=5000, + nval=10, + ntest=500, + class_0=4, + class_1=9, ): ratio = 1 - pos_ratio @@ -48,14 +55,14 @@ def get_imbalance_dataset( ntrain_small_neg = int(np.floor(ntrain * ratio)) - nval_small_neg x_val_0 = x_train_0[:nval_small_neg] # 450 4 in validation. - x_train_0 = x_train_0[nval_small_neg:nval_small_neg + ntrain_small_neg] # 500 4 in training. + x_train_0 = x_train_0[nval_small_neg : nval_small_neg + ntrain_small_neg] # 500 4 in training. print('Number of train negative classes', ntrain_small_neg) print('Number of val negative classes', nval_small_neg) idx = np.arange(x_test_0.shape[0]) np.random.shuffle(idx) - x_test_0 = x_test_0[:int(np.floor(ntest * ratio_test))] # 450 4 in testing. + x_test_0 = x_test_0[: int(np.floor(ntest * ratio_test))] # 450 4 in testing. x_train_1 = x_train[y_train == class_1] x_test_1 = x_test[y_test == class_1] @@ -69,12 +76,12 @@ def get_imbalance_dataset( ntrainsmall_pos = int(np.floor(ntrain * (1 - ratio))) - nvalsmall_pos x_val_1 = x_train_1[:nvalsmall_pos] # 50 9 in validation. - x_train_1 = x_train_1[nvalsmall_pos:nvalsmall_pos + ntrainsmall_pos] # 4500 9 in training. + x_train_1 = x_train_1[nvalsmall_pos : nvalsmall_pos + ntrainsmall_pos] # 4500 9 in training. idx = np.arange(x_test_1.shape[0]) np.random.shuffle(idx) x_test_1 = x_test_1[idx] - x_test_1 = x_test_1[:int(np.floor(ntest * (1 - ratio_test)))] # 500 9 in testing. + x_test_1 = x_test_1[: int(np.floor(ntest * (1 - ratio_test)))] # 500 9 in testing. print('Number of train positive classes', ntrainsmall_pos) print('Number of val positive classes', nvalsmall_pos) @@ -109,14 +116,19 @@ def get_imbalance_dataset( x_test_subset = x_test_subset[idx].astype(np.float32) y_test_subset = y_test_subset[idx].astype(np.float32) - x_train_subset, y_train_subset, x_val_subset, y_val_subset, x_test_subset, y_test_subset = ( - torch.tensor(x_train_subset), torch.tensor(y_train_subset), torch.tensor(x_val_subset), - torch.tensor(y_val_subset), torch.tensor(x_test_subset), torch.tensor(y_test_subset) + (x_train_subset, y_train_subset, x_val_subset, y_val_subset, x_test_subset, y_test_subset,) = ( + torch.tensor(x_train_subset), + torch.tensor(y_train_subset), + torch.tensor(x_val_subset), + torch.tensor(y_val_subset), + torch.tensor(x_test_subset), + torch.tensor(y_test_subset), ) train_set, val_set, test_set = ( - TensorDataset(x_train_subset, y_train_subset), TensorDataset(x_val_subset, y_val_subset), - TensorDataset(x_test_subset, y_test_subset) + TensorDataset(x_train_subset, y_train_subset), + TensorDataset(x_val_subset, y_val_subset), + TensorDataset(x_test_subset, y_test_subset), ) return train_set, val_set, test_set @@ -150,7 +162,7 @@ def plot(baseline, l2r): import seaborn as sns sns.set(style='darkgrid') - sns.set_theme(style="darkgrid") + sns.set_theme(style='darkgrid') plt.plot(baseline, label='baseline') plt.plot(l2r, label='l2r') plt.legend() diff --git a/examples/L2R/l2r.py b/examples/L2R/l2r.py index c04a90e1..9262c0c2 100644 --- a/examples/L2R/l2r.py +++ b/examples/L2R/l2r.py @@ -57,10 +57,15 @@ def run_baseline(args, mnist_train, mnist_test): with open('./result/baseline/config.json', 'w') as f: json.dump(args.__dict__, f) - args.device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu") + args.device = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu') train_set, val_set, test_set = get_imbalance_dataset( - mnist_train, mnist_test, pos_ratio=pos_ratio, ntrain=ntrain, nval=nval, ntest=ntest + mnist_train, + mnist_test, + pos_ratio=pos_ratio, + ntrain=ntrain, + nval=nval, + ntest=ntest, ) train_loader = DataLoader(train_set, batch_size=args.batch_size, shuffle=True, num_workers=4) valid_loader = DataLoader(val_set, batch_size=args.batch_size, shuffle=True, num_workers=1) @@ -87,7 +92,7 @@ def run_baseline(args, mnist_train, mnist_test): if step % 10 == 0 and step > 0: running_train_mean = np.mean(np.array(running_train_loss)) - print("EPOCH: {}, BATCH: {}, LOSS: {}".format(_epoch, idx, running_train_mean)) + print('EPOCH: {}, BATCH: {}, LOSS: {}'.format(_epoch, idx, running_train_mean)) writer.add_scalar('running_train_loss', running_train_mean, step) running_train_loss = [] @@ -102,7 +107,7 @@ def run_baseline(args, mnist_train, mnist_test): writer.add_scalar('train_acc', train_acc, _epoch) writer.add_scalar('test_acc', test_acc, _epoch) test_acc_result.append(test_acc) - print("EPOCH: {}, TRAIN_ACC: {}, TEST_ACC: {}".format(_epoch, train_acc, test_acc)) + print('EPOCH: {}, TRAIN_ACC: {}, TEST_ACC: {}'.format(_epoch, train_acc, test_acc)) return test_acc_result @@ -121,10 +126,15 @@ def run_L2R(args, mnist_train, mnist_test): with open('./result/l2r/config.json', 'w') as f: json.dump(args.__dict__, f) - args.device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu") + args.device = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu') train_set, val_set, test_set = get_imbalance_dataset( - mnist_train, mnist_test, pos_ratio=pos_ratio, ntrain=ntrain, nval=nval, ntest=ntest + mnist_train, + mnist_test, + pos_ratio=pos_ratio, + ntrain=ntrain, + nval=nval, + ntest=ntest, ) train_loader = DataLoader(train_set, batch_size=args.batch_size, shuffle=True, num_workers=2) valid_loader = DataLoader(val_set, batch_size=args.batch_size, shuffle=True, num_workers=1) @@ -148,8 +158,10 @@ def run_L2R(args, mnist_train, mnist_test): valid = iter(valid_loader) valid_x, valid_label = valid.next() train_x, train_label, valid_x, valid_label = ( - train_x.to(args.device), train_label.to(args.device), valid_x.to(args.device), - valid_label.to(args.device) + train_x.to(args.device), + train_label.to(args.device), + valid_x.to(args.device), + valid_label.to(args.device), ) # reset meta-parameter weights @@ -164,8 +176,7 @@ def run_L2R(args, mnist_train, mnist_test): # caclulate outer_loss, deirve meta-gradient and normalise outer_loss = model.outer_loss(valid_x, valid_label) - model.meta_weights = - \ - torch.autograd.grad(outer_loss, model.meta_weights)[0] + model.meta_weights = -torch.autograd.grad(outer_loss, model.meta_weights)[0] model.meta_weights = torch.nn.ReLU()(model.meta_weights) model.normalise() @@ -192,7 +203,7 @@ def run_L2R(args, mnist_train, mnist_test): running_valid_mean = np.mean(np.array(running_valid_loss)) running_train_mean = np.mean(np.array(running_train_loss)) print( - "EPOCH: {}, BATCH: {}, WEIGHTED_TRAIN_LOSS: {}, VALID_LOSS: {}".format( + 'EPOCH: {}, BATCH: {}, WEIGHTED_TRAIN_LOSS: {}, VALID_LOSS: {}'.format( _epoch, idx, running_train_mean, running_valid_mean ) ) @@ -212,7 +223,7 @@ def run_L2R(args, mnist_train, mnist_test): writer.add_scalar('train_acc', train_acc, _epoch) writer.add_scalar('test_acc', test_acc, _epoch) test_acc_result.append(test_acc) - print("EPOCH: {}, TRAIN_ACC: {}, TEST_ACC: {}".format(_epoch, train_acc, test_acc)) + print('EPOCH: {}, TRAIN_ACC: {}, TEST_ACC: {}'.format(_epoch, train_acc, test_acc)) return test_acc_result diff --git a/examples/LOLA/helper/agent.py b/examples/LOLA/helper/agent.py index 58fdae7c..8b30a983 100644 --- a/examples/LOLA/helper/agent.py +++ b/examples/LOLA/helper/agent.py @@ -23,14 +23,12 @@ class theta_model(nn.Module): - def __init__(self, theta): super().__init__() self.theta = nn.Parameter(torch.tensor(theta.detach(), requires_grad=True)) -class Agent(): - +class Agent: def __init__(self, args): self.args = args diff --git a/examples/LOLA/helper/env.py b/examples/LOLA/helper/env.py index 1367d845..f1ef6e6f 100644 --- a/examples/LOLA/helper/env.py +++ b/examples/LOLA/helper/env.py @@ -30,20 +30,22 @@ def __init__(self, n): self.n = n def sample(self): - return np.random.multinomial(1, [1. / self.n] * self.n) + return np.random.multinomial(1, [1.0 / self.n] * self.n) def contains(self, x): - return isinstance(x, np.ndarray) and \ - x.shape == (self.n, ) and \ - np.all(np.logical_or(x == 0, x == 1)) and \ - np.sum(x) == 1 + return ( + isinstance(x, np.ndarray) + and x.shape == (self.n,) + and np.all(np.logical_or(x == 0, x == 1)) + and np.sum(x) == 1 + ) @property def shape(self): return (self.n,) def __repr__(self): - return "OneHot(%d)" % self.n + return 'OneHot(%d)' % self.n def __eq__(self, other): return self.n == other.n @@ -91,6 +93,6 @@ def step(self, action): s1 = self.states[ac1, ac0] observation = [s0, s1] reward = [r0, r1] - done = (self.step_count == self.max_steps) + done = self.step_count == self.max_steps info = [{'available_actions': aa} for aa in self.available_actions] return observation, reward, done, info diff --git a/examples/LOLA/helper/utils.py b/examples/LOLA/helper/utils.py index 8cdd3396..afa9e609 100644 --- a/examples/LOLA/helper/utils.py +++ b/examples/LOLA/helper/utils.py @@ -43,8 +43,7 @@ def magic_box(x): # replay buffer -class Memory(): - +class Memory: def __init__(self, args): self.self_logprobs = [] self.other_logprobs = [] @@ -65,9 +64,9 @@ def dice_objective(self, use_baseline=True): rewards = torch.stack(self.rewards, dim=1) # apply discount: - cum_discount = torch.cumprod( - self.args.gamma * torch.ones(*rewards.size()), dim=1 - ) / self.args.gamma + cum_discount = ( + torch.cumprod(self.args.gamma * torch.ones(*rewards.size()), dim=1) / self.args.gamma + ) discounted_rewards = rewards * cum_discount discounted_values = values * cum_discount @@ -92,7 +91,7 @@ def dice_objective(self, use_baseline=True): def value_loss(self): values = torch.stack(self.values, dim=1) rewards = torch.stack(self.rewards, dim=1) - return torch.mean((rewards - values)**2) + return torch.mean((rewards - values) ** 2) def act(batch_states, theta, values): diff --git a/examples/LOLA/lola_dice.py b/examples/LOLA/lola_dice.py index 82d585d6..7384244b 100644 --- a/examples/LOLA/lola_dice.py +++ b/examples/LOLA/lola_dice.py @@ -31,7 +31,7 @@ def main(args): agent1_copy, agent2_copy = Agent(args), Agent(args) n_lookaheads = args.n_lookaheads joint_scores = [] - print("start iterations with", n_lookaheads, "lookaheads:") + print('start iterations with', n_lookaheads, 'lookaheads:') for update in range(args.n_update): # reset virtual update @@ -41,8 +41,10 @@ def main(args): # agent 2 assumes that agent 1 conducts n-step lookahead for _ in range(n_lookaheads): memory1, memory2 = sample( - ipd, [agent1.virtual_theta.theta, agent2.theta], [agent1.values, agent2.values], - args + ipd, + [agent1.virtual_theta.theta, agent2.theta], + [agent1.values, agent2.values], + args, ) inner_loss = memory1.dice_objective(use_baseline=args.use_baseline) agent1.virtual_optimiser.step(inner_loss) @@ -50,15 +52,20 @@ def main(args): # agent 1 assumes that agent 2 conducts n-step lookahead for _ in range(n_lookaheads): memory1, memory2 = sample( - ipd, [agent1.theta, agent2.virtual_theta.theta], [agent1.values, agent2.values], - args + ipd, + [agent1.theta, agent2.virtual_theta.theta], + [agent1.values, agent2.values], + args, ) inner_loss = memory2.dice_objective(use_baseline=args.use_baseline) agent2.virtual_optimiser.step(inner_loss) # update agent 1 memory1, memory2 = sample( - ipd, [agent1.theta, agent2.virtual_theta.theta], [agent1.values, agent2.values], args + ipd, + [agent1.theta, agent2.virtual_theta.theta], + [agent1.values, agent2.values], + args, ) outer_loss = memory1.dice_objective(use_baseline=args.use_baseline) agent1.theta_optimizer.zero_grad() @@ -71,7 +78,10 @@ def main(args): # update agent 2 memory1, memory2 = sample( - ipd, [agent1.virtual_theta.theta, agent2.theta], [agent1.values, agent2.values], args + ipd, + [agent1.virtual_theta.theta, agent2.theta], + [agent1.values, agent2.values], + args, ) outer_loss = memory2.dice_objective(use_baseline=args.use_baseline) agent2.theta_optimizer.zero_grad() @@ -91,16 +101,18 @@ def main(args): p1 = [p.item() for p in torch.sigmoid(agent1.theta)] p2 = [p.item() for p in torch.sigmoid(agent2.theta)] print( - 'update', update, 'score (%.3f,%.3f)' % (score[0], score[1]), - 'policy (agent1) = {%.3f, %.3f, %.3f, %.3f, %.3f}' % - (p1[0], p1[1], p1[2], p1[3], p1[4]), - ' (agent2) = {%.3f, %.3f, %.3f, %.3f, %.3f}' % (p2[0], p2[1], p2[2], p2[3], p2[4]) + 'update', + update, + 'score (%.3f,%.3f)' % (score[0], score[1]), + 'policy (agent1) = {%.3f, %.3f, %.3f, %.3f, %.3f}' + % (p1[0], p1[1], p1[2], p1[3], p1[4]), + ' (agent2) = {%.3f, %.3f, %.3f, %.3f, %.3f}' % (p2[0], p2[1], p2[2], p2[3], p2[4]), ) return joint_scores -if __name__ == "__main__": +if __name__ == '__main__': args = parse_args() joint_score = dict() for nla in range(3): diff --git a/examples/LOLA/visualize.py b/examples/LOLA/visualize.py index 26b53f1e..6dc54ddf 100755 --- a/examples/LOLA/visualize.py +++ b/examples/LOLA/visualize.py @@ -1,3 +1,5 @@ +#!/usr/bin/env python3 + # Copyright 2022 MetaOPT Team. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -21,7 +23,7 @@ def plot(file): data = np.load('result.npy', allow_pickle=True).tolist() sns.set(style='darkgrid') - sns.set_theme(style="darkgrid") + sns.set_theme(style='darkgrid') for step in range(3): plt.plot(data[step], label='Step ' + str(step)) plt.legend() @@ -31,5 +33,5 @@ def plot(file): # plot progress: -if __name__ == "__main__": +if __name__ == '__main__': plot('result.npy') diff --git a/examples/MAML-RL/helpers/__init__.py b/examples/MAML-RL/helpers/__init__.py index 213c216b..9855e0b3 100644 --- a/examples/MAML-RL/helpers/__init__.py +++ b/examples/MAML-RL/helpers/__init__.py @@ -22,10 +22,5 @@ register( 'TabularMDP-v0', entry_point='helpers.tabular_mdp:TabularMDPEnv', - kwargs={ - 'num_states': 10, - 'num_actions': 5, - 'max_episode_steps': 10, - 'seed': 1 - } + kwargs={'num_states': 10, 'num_actions': 5, 'max_episode_steps': 10, 'seed': 1}, ) diff --git a/examples/MAML-RL/helpers/tabular_mdp.py b/examples/MAML-RL/helpers/tabular_mdp.py index 1df07599..5f8dcc17 100644 --- a/examples/MAML-RL/helpers/tabular_mdp.py +++ b/examples/MAML-RL/helpers/tabular_mdp.py @@ -52,7 +52,11 @@ def __init__(self, num_states, num_actions, max_episode_steps, seed, task={}): self._task = task self._transitions = task.get( 'transitions', - np.full((num_states, num_actions, num_states), 1.0 / num_states, dtype=np.float32) + np.full( + (num_states, num_actions, num_states), + 1.0 / num_states, + dtype=np.float32, + ), ) self._rewards_mean = task.get( 'rewards_mean', np.zeros((num_states, num_actions), dtype=np.float32) @@ -68,16 +72,15 @@ def seed(self, seed=None): def sample_tasks(self, num_tasks): transitions = self.np_random.dirichlet( - np.ones(self.num_states), size=(num_tasks, self.num_states, self.num_actions) + np.ones(self.num_states), + size=(num_tasks, self.num_states, self.num_actions), ) rewards_mean = self.np_random.normal( 1.0, 1.0, size=(num_tasks, self.num_states, self.num_actions) ) tasks = [ - { - 'transitions': transition, - 'rewards_mean': reward_mean - } for (transition, reward_mean) in zip(transitions, rewards_mean) + {'transitions': transition, 'rewards_mean': reward_mean} + for (transition, reward_mean) in zip(transitions, rewards_mean) ] return tasks diff --git a/examples/MAML-RL/maml.py b/examples/MAML-RL/maml.py index 3e7571d2..e6a149e6 100644 --- a/examples/MAML-RL/maml.py +++ b/examples/MAML-RL/maml.py @@ -71,7 +71,13 @@ def sample_traj(env, task, policy): rews_buf[step][batch] = rew gammas_buf[step][batch] = done * GAMMA ob = next_ob - return Traj(obs=obs_buf, acs=acs_buf, next_obs=next_obs_buf, rews=rews_buf, gammas=gammas_buf) + return Traj( + obs=obs_buf, + acs=acs_buf, + next_obs=next_obs_buf, + rews=rews_buf, + gammas=gammas_buf, + ) def a2c_loss(traj, policy, value_coef): @@ -82,8 +88,9 @@ def a2c_loss(traj, policy, value_coef): returns = [] g = next_values[-1, :] for i in reversed(range(next_values.shape[0])): - g = traj.rews[i, :] + traj.gammas[i, :] * \ - ((1 - lambdas[i, :]) * next_values[i, :] + lambdas[i, :] * g) + g = traj.rews[i, :] + traj.gammas[i, :] * ( + (1 - lambdas[i, :]) * next_values[i, :] + lambdas[i, :] * g + ) returns.insert(0, g) lambda_returns = torch.from_numpy(np.array(returns)) pi, values = policy(torch.from_numpy(traj.obs)) @@ -106,8 +113,8 @@ def evaluate(env, seed, task_num, policy): num_states=STATE_DIM, num_actions=ACTION_DIM, max_episode_steps=TRAJ_LEN, - seed=args.seed - ) + seed=args.seed, + ), ) tasks = env.sample_tasks(num_tasks=task_num) policy_state_dict = torchopt.extract_state_dict(policy) @@ -140,8 +147,8 @@ def main(args): num_states=STATE_DIM, num_actions=ACTION_DIM, max_episode_steps=TRAJ_LEN, - seed=args.seed - ) + seed=args.seed, + ), ) # Policy policy = CategoricalMLPPolicy(input_size=STATE_DIM, output_size=ACTION_DIM) @@ -185,16 +192,15 @@ def main(args): test_post_reward.append(sum(test_post_reward_ls) / TASK_NUM) print('Train_iters', i) - print("train_pre_reward", sum(train_pre_reward_ls) / TASK_NUM) - print("train_post_reward", sum(train_post_reward_ls) / TASK_NUM) - print("test_pre_reward", sum(test_pre_reward_ls) / TASK_NUM) - print("test_post_reward", sum(test_post_reward_ls) / TASK_NUM) + print('train_pre_reward', sum(train_pre_reward_ls) / TASK_NUM) + print('train_post_reward', sum(train_post_reward_ls) / TASK_NUM) + print('test_pre_reward', sum(test_pre_reward_ls) / TASK_NUM) + print('test_post_reward', sum(test_post_reward_ls) / TASK_NUM) -if __name__ == "__main__": +if __name__ == '__main__': parser = argparse.ArgumentParser( - description='Reinforcement learning with ' - 'Model-Agnostic Meta-Learning (MAML) - Train' + description='Reinforcement learning with Model-Agnostic Meta-Learning (MAML) - Train' ) parser.add_argument('--seed', type=int, default=1, help='random seed (default: 1)') args = parser.parse_args() diff --git a/examples/MGRL/mgrl.py b/examples/MGRL/mgrl.py index 4f0feeb3..152e4177 100644 --- a/examples/MGRL/mgrl.py +++ b/examples/MGRL/mgrl.py @@ -21,9 +21,7 @@ def test_gamma(): - class Rollout: - @staticmethod def get(): out = torch.empty(5, 2) @@ -41,7 +39,6 @@ def rollout(trajectory, gamma): return torch.hstack(out).view(10, 1) class ValueNetwork(nn.Module): - def __init__(self): super().__init__() self.fc = nn.Linear(10, 1) @@ -77,8 +74,8 @@ def forward(self, x): torchopt.recover_state_dict(net, net_state) if i % 100 == 0: with torch.no_grad(): - print(f"epoch {i} | gamma: {torch.sigmoid(gamma)}") + print(f'epoch {i} | gamma: {torch.sigmoid(gamma)}') -if __name__ == "__main__": +if __name__ == '__main__': test_gamma() diff --git a/examples/few-shot/maml_omniglot.py b/examples/few-shot/maml_omniglot.py index f651f127..1d561ef7 100644 --- a/examples/few-shot/maml_omniglot.py +++ b/examples/few-shot/maml_omniglot.py @@ -29,7 +29,7 @@ # See the License for the specific language governing permissions and # limitations under the License. """ -This example shows how to use higher to do Model Agnostic Meta Learning (MAML) +This example shows how to use TorchOpt to do Model Agnostic Meta Learning (MAML) for few-shot Omniglot classification. For more details see the original MAML paper: https://arxiv.org/abs/1703.03400 @@ -96,11 +96,20 @@ def main(): # and the parameters needed to be manually updated and copied # for the updates. net = nn.Sequential( - nn.Conv2d(1, 64, 3), nn.BatchNorm2d(64, momentum=1., affine=True), nn.ReLU(inplace=False), - nn.MaxPool2d(2, 2), nn.Conv2d(64, 64, 3), nn.BatchNorm2d(64, momentum=1., affine=True), - nn.ReLU(inplace=False), nn.MaxPool2d(2, 2), nn.Conv2d(64, 64, 3), - nn.BatchNorm2d(64, momentum=1., affine=True), nn.ReLU(inplace=False), nn.MaxPool2d(2, 2), - nn.Flatten(), nn.Linear(64, args.n_way) + nn.Conv2d(1, 64, 3), + nn.BatchNorm2d(64, momentum=1.0, affine=True), + nn.ReLU(inplace=False), + nn.MaxPool2d(2, 2), + nn.Conv2d(64, 64, 3), + nn.BatchNorm2d(64, momentum=1.0, affine=True), + nn.ReLU(inplace=False), + nn.MaxPool2d(2, 2), + nn.Conv2d(64, 64, 3), + nn.BatchNorm2d(64, momentum=1.0, affine=True), + nn.ReLU(inplace=False), + nn.MaxPool2d(2, 2), + nn.Flatten(), + nn.Linear(64, args.n_way), ).to(device) # We will use Adam to (meta-)optimize the initial parameters @@ -170,7 +179,7 @@ def train(db, net, meta_opt, epoch, log): meta_opt.step() qry_losses = sum(qry_losses) / task_num - qry_accs = 100. * sum(qry_accs) / task_num + qry_accs = 100.0 * sum(qry_accs) / task_num i = epoch + float(batch_idx) / n_train_iter iter_time = time.time() - start_time @@ -233,7 +242,7 @@ def test(db, net, epoch, log): torchopt.recover_state_dict(inner_opt, optim_state_dict) qry_losses = torch.cat(qry_losses).mean().item() - qry_accs = 100. * torch.cat(qry_accs).float().mean().item() + qry_accs = 100.0 * torch.cat(qry_accs).float().mean().item() print(f'[Epoch {epoch+1:.2f}] Test Loss: {qry_losses:.2f} | Acc: {qry_accs:.2f}') log.append( { diff --git a/examples/few-shot/support/omniglot_loaders.py b/examples/few-shot/support/omniglot_loaders.py index 731c41be..d857d386 100644 --- a/examples/few-shot/support/omniglot_loaders.py +++ b/examples/few-shot/support/omniglot_loaders.py @@ -30,17 +30,19 @@ class Omniglot(data.Dataset): """ - The items are (filename,category). The index of all the categories can be found in self.idx_classes + The items are ``(filename, category)``. The index of all the categories can be found in + :attr:`idx_classes`. + Args: - - root: the directory where the dataset will be stored - - transform: how to transform the input - - target_transform: how to transform the target - - download: need to download the dataset + root: the directory where the dataset will be stored + transform: how to transform the input + target_transform: how to transform the target + download: need to download the dataset """ urls = [ 'https://github.com/brendenlake/omniglot/raw/master/python/images_background.zip', - 'https://github.com/brendenlake/omniglot/raw/master/python/images_evaluation.zip' + 'https://github.com/brendenlake/omniglot/raw/master/python/images_evaluation.zip', ] raw_folder = 'raw' processed_folder = 'processed' @@ -77,8 +79,9 @@ def __len__(self): return len(self.all_items) def _check_exists(self): - return os.path.exists(os.path.join(self.root, self.processed_folder, "images_evaluation")) and \ - os.path.exists(os.path.join(self.root, self.processed_folder, "images_background")) + return os.path.exists( + os.path.join(self.root, self.processed_folder, 'images_evaluation') + ) and os.path.exists(os.path.join(self.root, self.processed_folder, 'images_background')) def download(self): import zipfile @@ -106,22 +109,22 @@ def download(self): with open(file_path, 'wb') as f: f.write(data.read()) file_processed = os.path.join(self.root, self.processed_folder) - print("== Unzip from " + file_path + " to " + file_processed) + print('== Unzip from ' + file_path + ' to ' + file_processed) zip_ref = zipfile.ZipFile(file_path, 'r') zip_ref.extractall(file_processed) zip_ref.close() - print("Download finished.") + print('Download finished.') def find_classes(root_dir): retour = [] for (root, dirs, files) in os.walk(root_dir): for f in files: - if (f.endswith("png")): + if f.endswith('png'): r = root.split('/') lr = len(r) - retour.append((f, r[lr - 2] + "/" + r[lr - 1], root)) - print("== Found %d items " % len(retour)) + retour.append((f, r[lr - 2] + '/' + r[lr - 1], root)) + print('== Found %d items ' % len(retour)) return retour @@ -130,12 +133,11 @@ def index_classes(items): for i in items: if i[1] not in idx: idx[i[1]] = len(idx) - print("== Found %d classes" % len(idx)) + print('== Found %d classes' % len(idx)) return idx class OmniglotNShot: - def __init__(self, root, batchsz, n_way, k_shot, k_query, imgsz, rng, device=None): """ Different from mnistNShot, the @@ -157,15 +159,17 @@ def __init__(self, root, batchsz, n_way, k_shot, k_query, imgsz, rng, device=Non download=True, transform=transforms.Compose( [ - lambda x: Image.open(x).convert('L'), lambda x: x.resize((imgsz, imgsz)), + lambda x: Image.open(x).convert('L'), + lambda x: x.resize((imgsz, imgsz)), lambda x: np.reshape(x, (imgsz, imgsz, 1)), - lambda x: np.transpose(x, [2, 0, 1]), lambda x: x / 255. + lambda x: np.transpose(x, [2, 0, 1]), + lambda x: x / 255.0, ] ), ) - temp = dict( - ) # {label:img1, img2..., 20 imgs, label2: img1, img2,... in total, 1623 label} + # {label: [img1, img2..., img20], label2: [img1, img2, ...], ... 1623 labels in total} + temp = {} for (img, label) in self.x: if label in temp.keys(): temp[label].append(img) @@ -173,7 +177,10 @@ def __init__(self, root, batchsz, n_way, k_shot, k_query, imgsz, rng, device=Non temp[label] = [img] self.x = [] - for label, imgs in temp.items(): # labels info deserted , each label contains 20imgs + for ( + label, + imgs, + ) in temp.items(): # labels info deserted , each label contains 20imgs self.x.append(np.array(imgs)) # as different class may have different number of imgs @@ -203,19 +210,22 @@ def __init__(self, root, batchsz, n_way, k_shot, k_query, imgsz, rng, device=Non assert (k_shot + k_query) <= 20 # save pointer of current read batch in total cache - self.indexes = {"train": 0, "test": 0} - self.datasets = {"train": self.x_train, "test": self.x_test} # original data cached - print("DB: train", self.x_train.shape, "test", self.x_test.shape) + self.indexes = {'train': 0, 'test': 0} + self.datasets = { + 'train': self.x_train, + 'test': self.x_test, + } # original data cached + print('DB: train', self.x_train.shape, 'test', self.x_test.shape) self.datasets_cache = { - "train": self.load_data_cache(self.datasets["train"]), # current epoch data cached - "test": self.load_data_cache(self.datasets["test"]) + 'train': self.load_data_cache(self.datasets['train']), # current epoch data cached + 'test': self.load_data_cache(self.datasets['test']), } def normalization(self): """ - Normalizes our data, to have a mean of 0 and sdt of 1 - """ + Normalizes our data, to have a mean of 0 and sdt of 1 + """ self.mean = np.mean(self.x_train) self.std = np.std(self.x_train) self.max = np.max(self.x_train) @@ -257,20 +267,21 @@ def load_data_cache(self, data_pack): selected_img = self.rng.choice(20, self.k_shot + self.k_query, False) # meta-training and meta-test - x_spt.append(data_pack[cur_class][selected_img[:self.k_shot]]) - x_qry.append(data_pack[cur_class][selected_img[self.k_shot:]]) + x_spt.append(data_pack[cur_class][selected_img[: self.k_shot]]) + x_qry.append(data_pack[cur_class][selected_img[self.k_shot :]]) y_spt.append([j for _ in range(self.k_shot)]) y_qry.append([j for _ in range(self.k_query)]) # shuffle inside a batch perm = self.rng.permutation(self.n_way * self.k_shot) - x_spt = np.array(x_spt) \ - .reshape(self.n_way * self.k_shot, 1, self.resize, self.resize)[perm] - y_spt = np.array(y_spt) \ - .reshape(self.n_way * self.k_shot)[perm] + x_spt = np.array(x_spt).reshape( + self.n_way * self.k_shot, 1, self.resize, self.resize + )[perm] + y_spt = np.array(y_spt).reshape(self.n_way * self.k_shot)[perm] perm = self.rng.permutation(self.n_way * self.k_query) - x_qry = np.array(x_qry) \ - .reshape(self.n_way * self.k_query, 1, self.resize, self.resize)[perm] + x_qry = np.array(x_qry).reshape( + self.n_way * self.k_query, 1, self.resize, self.resize + )[perm] y_qry = np.array(y_qry).reshape(self.n_way * self.k_query)[perm] # append [sptsz, 1, 84, 84] => [b, setsz, 1, 84, 84] @@ -280,12 +291,14 @@ def load_data_cache(self, data_pack): y_qrys.append(y_qry) # [b, setsz, 1, 84, 84] - x_spts = np.array(x_spts, dtype=np.float32) \ - .reshape(self.batchsz, setsz, 1, self.resize, self.resize) + x_spts = np.array(x_spts, dtype=np.float32).reshape( + self.batchsz, setsz, 1, self.resize, self.resize + ) y_spts = np.array(y_spts, dtype=np.int).reshape(self.batchsz, setsz) # [b, qrysz, 1, 84, 84] - x_qrys = np.array(x_qrys, dtype=np.float32) \ - .reshape(self.batchsz, querysz, 1, self.resize, self.resize) + x_qrys = np.array(x_qrys, dtype=np.float32).reshape( + self.batchsz, querysz, 1, self.resize, self.resize + ) y_qrys = np.array(y_qrys, dtype=np.int).reshape(self.batchsz, querysz) x_spts, y_spts, x_qrys, y_qrys = [ diff --git a/examples/requirements.txt b/examples/requirements.txt new file mode 100644 index 00000000..eaf947df --- /dev/null +++ b/examples/requirements.txt @@ -0,0 +1,13 @@ +--extra-index-url https://download.pytorch.org/whl/cu116 +torch == 1.12 +torchvision +functorch + +--requirement ../requirements.txt + +gym < 1.0.0.a0 +matplotlib +pandas +seaborn +torchviz +pillow diff --git a/examples/visualize.py b/examples/visualize.py index 7360dc3b..56de2bd5 100644 --- a/examples/visualize.py +++ b/examples/visualize.py @@ -22,7 +22,6 @@ class Net(nn.Module): - def __init__(self, dim): super().__init__() self.fc = nn.Linear(dim, 1) @@ -34,7 +33,7 @@ def forward(self, x, meta_param): def draw_torchviz(): net = Net(dim).cuda() optimizer = torchopt.MetaAdam(net, lr=1e-3, use_accelerated_op=False) - meta_param = torch.tensor(1., requires_grad=True) + meta_param = torch.tensor(1.0, requires_grad=True) xs = torch.ones(batch_size, dim).cuda() @@ -45,13 +44,13 @@ def draw_torchviz(): pred = net(xs, meta_param) loss = F.mse_loss(pred, torch.ones_like(pred)) # draw computation graph - torchviz.make_dot(loss).render("torchviz_graph", format="svg") + torchviz.make_dot(loss).render('torchviz_graph', format='svg') def draw_torchopt(): net = Net(dim).cuda() optimizer = torchopt.MetaAdam(net, lr=1e-3, use_accelerated_op=True) - meta_param = torch.tensor(1., requires_grad=True) + meta_param = torch.tensor(1.0, requires_grad=True) xs = torch.ones(batch_size, dim).cuda() @@ -66,10 +65,8 @@ def draw_torchopt(): pred = net(xs, meta_param) loss = F.mse_loss(pred, torch.ones_like(pred)) # draw computation graph - torchopt.visual.make_dot(loss, [net_state_0, net_state_1, { - meta_param: "meta_param" - }]).render( - "torchopt_graph", format="svg" + torchopt.visual.make_dot(loss, [net_state_0, net_state_1, {meta_param: 'meta_param'}]).render( + 'torchopt_graph', format='svg' ) diff --git a/include/adam_op/adam_op.h b/include/adam_op/adam_op.h index 33aa53b7..38ebd0cc 100644 --- a/include/adam_op/adam_op.h +++ b/include/adam_op/adam_op.h @@ -18,7 +18,7 @@ #include -#include "common.h" +#include "include/common.h" namespace torchopt { TensorArray<3> adamForwardInplace(const torch::Tensor& updates, diff --git a/include/adam_op/adam_op_impl.cuh b/include/adam_op/adam_op_impl.cuh index bc29171f..c9dcba85 100644 --- a/include/adam_op/adam_op_impl.cuh +++ b/include/adam_op/adam_op_impl.cuh @@ -18,7 +18,7 @@ #include -#include "common.h" +#include "include/common.h" namespace torchopt { TensorArray<3> adamForwardInplaceCUDA(const torch::Tensor &updates, diff --git a/include/adam_op/adam_op_impl.h b/include/adam_op/adam_op_impl.h index 2514aa48..87562fb1 100644 --- a/include/adam_op/adam_op_impl.h +++ b/include/adam_op/adam_op_impl.h @@ -18,7 +18,7 @@ #include -#include "common.h" +#include "include/common.h" namespace torchopt { TensorArray<3> adamForwardInplaceCPU(const torch::Tensor& updates, diff --git a/setup.cfg b/setup.cfg index 4f82d935..c3438afe 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,11 +1,7 @@ -[yapf] -based_on_style = yapf -indent_width = 4 -continuation_indent_width = 4 -column_limit = 100 -spaces_before_comment = 2 -dedent_closing_brackets = true -blank_lines_between_top_level_imports_and_variables = 2 +[black] +line-length = 100 +skip-string-normalization = True +target_version = ["py37"] [flake8] exclude = @@ -17,6 +13,7 @@ convention = google [isort] profile = black +py_version=37 indent = 4 line_length = 100 lines_after_imports = 2 @@ -42,4 +39,4 @@ warn_unused_configs = True warn_unused_ignores = True [doc8] -max-line-length = 200 +max-line-length = 1000 diff --git a/setup.py b/setup.py index 67f83c37..f399e14d 100644 --- a/setup.py +++ b/setup.py @@ -26,15 +26,11 @@ def __init__(self, name, source_dir='.', **kwargs): class cmake_build_ext(build_ext): - def copy(self, extdir): - for op_path in pathlib.Path(extdir).iterdir(): - if not op_path.is_dir(): - continue - for file in op_path.iterdir(): - if str(file).rpartition('.')[-1] == 'so': - shutil.copy(file, HERE / 'torchopt' / '_lib') - - def build_extensions(self): + def build_extension(self, ext): + if not isinstance(ext, CMakeExtension): + super().build_extension(ext) + return + import pybind11 from torch.utils import cpp_extension @@ -47,41 +43,40 @@ def build_extensions(self): config = 'Debug' if self.debug else 'Release' - for ext in self.extensions: - extdir = os.path.abspath(os.path.dirname(self.get_ext_fullpath(ext.name))) - print(self.get_ext_fullpath(ext.name)) - - PYTHON_INCLUDE_DIR = ';'.join(self.include_dirs) - TORCH_INCLUDE_PATH = ';'.join(cpp_extension.include_paths()) - TORCH_LIBRARY_PATH = ';'.join(cpp_extension.library_paths()) - - cmake_args = [ - f'-DCMAKE_BUILD_TYPE={config}', - f'-DCMAKE_LIBRARY_OUTPUT_DIRECTORY_{config.upper()}={extdir}', - f'-DCMAKE_ARCHIVE_OUTPUT_DIRECTORY_{config.upper()}={self.build_temp}', - f'-DPYTHON_EXECUTABLE={sys.executable}', - f'-DPYBIND11_CMAKE_DIR={pybind11.get_cmake_dir()}', - f'-DPYTHON_INCLUDE_DIR={PYTHON_INCLUDE_DIR}', - f'-DTORCH_INCLUDE_PATH={TORCH_INCLUDE_PATH}', - f'-DTORCH_LIBRARY_PATH={TORCH_LIBRARY_PATH}', - ] - - build_args = ['--config', config] - - if ( - 'CMAKE_BUILD_PARALLEL_LEVEL' not in os.environ - and hasattr(self, 'parallel') and self.parallel - ): - build_args.append(f'-j{self.parallel}') - - try: - os.chdir(build_temp) - self.spawn(['cmake', ext.source_dir] + cmake_args) - if not self.dry_run: - self.spawn(['cmake', '--build', '.'] + build_args) - self.copy(extdir) - finally: - os.chdir(HERE) + extdir = os.path.abspath(os.path.dirname(self.get_ext_fullpath(ext.name))) + print(self.get_ext_fullpath(ext.name)) + + PYTHON_INCLUDE_DIR = ';'.join(self.include_dirs) + TORCH_INCLUDE_PATH = ';'.join(cpp_extension.include_paths()) + TORCH_LIBRARY_PATH = ';'.join(cpp_extension.library_paths()) + + cmake_args = [ + f'-DCMAKE_BUILD_TYPE={config}', + f'-DCMAKE_LIBRARY_OUTPUT_DIRECTORY_{config.upper()}={extdir}', + f'-DCMAKE_ARCHIVE_OUTPUT_DIRECTORY_{config.upper()}={self.build_temp}', + f'-DPYTHON_EXECUTABLE={sys.executable}', + f'-DPYBIND11_CMAKE_DIR={pybind11.get_cmake_dir()}', + f'-DPYTHON_INCLUDE_DIR={PYTHON_INCLUDE_DIR}', + f'-DTORCH_INCLUDE_PATH={TORCH_INCLUDE_PATH}', + f'-DTORCH_LIBRARY_PATH={TORCH_LIBRARY_PATH}', + ] + + build_args = ['--config', config] + + if ( + 'CMAKE_BUILD_PARALLEL_LEVEL' not in os.environ + and hasattr(self, 'parallel') + and self.parallel + ): + build_args.append(f'-j{self.parallel}') + + try: + os.chdir(build_temp) + self.spawn(['cmake', ext.source_dir] + cmake_args) + if not self.dry_run: + self.spawn(['cmake', '--build', '.'] + build_args) + finally: + os.chdir(HERE) setup( @@ -90,6 +85,8 @@ def build_extensions(self): author='TorchOpt Contributors', author_email='jieren9806@gmail.com, xidong.feng.20@ucl.ac.uk, benjaminliu.eecs@gmail.com', description='A Jax-style optimizer for PyTorch.', + long_description=open('README.md', encoding='utf8').read(), + long_description_content_type='text/markdown', license='Apache License Version 2.0', keywords='Meta-Learning, PyTorch, Optimizer', url='https://github.com/metaopt/TorchOpt', @@ -97,9 +94,7 @@ def build_extensions(self): package_data={'sharedlib': ['_lib/*.so']}, include_package_data=True, cmdclass={'build_ext': cmake_build_ext}, - ext_modules=[ - CMakeExtension('torchopt._lib.adam_op', source_dir=HERE) - ], + ext_modules=[CMakeExtension('torchopt._lib.adam_op', source_dir=HERE)], setup_requires=[ # for `torch.utils.cpp_extension` 'torch == 1.12', 'numpy', @@ -112,5 +107,24 @@ def build_extensions(self): 'graphviz', 'typing-extensions', ], - python_requires='>= 3.7' + python_requires='>= 3.7', + classifiers=[ + # How mature is this project? Common values are + # 3 - Alpha + # 4 - Beta + # 5 - Production/Stable + 'Development Status :: 4 - Beta', + # Indicate who your project is intended for + 'Intended Audience :: Science/Research', + 'Intended Audience :: Developers', + 'Topic :: Scientific/Engineering :: Artificial Intelligence', + # Pick your license as you wish (should match "license" above) + 'License :: OSI Approved :: Apache Software License', + # Specify the Python versions you support here. In particular, ensure + # that you indicate whether you support Python 2, Python 3 or both. + 'Programming Language :: Python :: 3.7', + 'Programming Language :: Python :: 3.8', + 'Programming Language :: Python :: 3.9', + 'Programming Language :: Python :: 3.10', + ], ) diff --git a/src/adam_op/adam_op.cpp b/src/adam_op/adam_op.cpp index 130e3a27..a11c0116 100644 --- a/src/adam_op/adam_op.cpp +++ b/src/adam_op/adam_op.cpp @@ -13,13 +13,13 @@ // limitations under the License. // ============================================================================== -#include "adam_op/adam_op.h" +#include "include/adam_op/adam_op.h" #include #include -#include "adam_op/adam_op_impl.cuh" -#include "adam_op/adam_op_impl.h" +#include "include/adam_op/adam_op_impl.cuh" +#include "include/adam_op/adam_op_impl.h" namespace torchopt { TensorArray<3> adamForwardInplace(const torch::Tensor& updates, diff --git a/src/adam_op/adam_op_impl.cpp b/src/adam_op/adam_op_impl.cpp index ba3e4c7a..16be5251 100644 --- a/src/adam_op/adam_op_impl.cpp +++ b/src/adam_op/adam_op_impl.cpp @@ -13,14 +13,14 @@ // limitations under the License. // ============================================================================== -#include "adam_op/adam_op_impl.h" +#include "include/adam_op/adam_op_impl.h" #include #include #include -#include "utils.h" +#include "include/utils.h" namespace torchopt { using std::size_t; diff --git a/src/adam_op/adam_op_impl.cu b/src/adam_op/adam_op_impl.cu index c32f1ad3..b10942eb 100644 --- a/src/adam_op/adam_op_impl.cu +++ b/src/adam_op/adam_op_impl.cu @@ -17,8 +17,8 @@ #include -#include "adam_op/adam_op_impl.cuh" -#include "utils.h" +#include "include/adam_op/adam_op_impl.cuh" +#include "include/utils.h" namespace torchopt { diff --git a/tests/requirements.txt b/tests/requirements.txt index 2bbfedbe..6cf7a2a1 100644 --- a/tests/requirements.txt +++ b/tests/requirements.txt @@ -9,7 +9,8 @@ pytest pytest-cov pytest-xdist isort -yapf +black >= 22.6.0 +pylint mypy flake8 flake8-bugbear @@ -17,3 +18,4 @@ doc8 pydocstyle pyenchant cpplint +pre-commit diff --git a/tests/unit/high_level/test_high_level_inplace.py b/tests/unit/high_level/test_high_level_inplace.py index 69a7ff18..03e206d9 100644 --- a/tests/unit/high_level/test_high_level_inplace.py +++ b/tests/unit/high_level/test_high_level_inplace.py @@ -26,7 +26,6 @@ class HighLevelInplace(unittest.TestCase): - @classmethod def setUpClass(cls): torch.manual_seed(0) diff --git a/tests/unit/low_level/test_low_level_inplace.py b/tests/unit/low_level/test_low_level_inplace.py index 538642cc..09f39ec9 100644 --- a/tests/unit/low_level/test_low_level_inplace.py +++ b/tests/unit/low_level/test_low_level_inplace.py @@ -27,7 +27,6 @@ class LowLevelInplace(unittest.TestCase): - @classmethod def setUpClass(cls): torch.manual_seed(0) diff --git a/tests/unit/test_clip.py b/tests/unit/test_clip.py index 5967c9f4..7907b9a9 100644 --- a/tests/unit/test_clip.py +++ b/tests/unit/test_clip.py @@ -26,7 +26,6 @@ class HighLevelInplace(unittest.TestCase): - @classmethod def setUpClass(cls): torch.manual_seed(0) @@ -39,7 +38,7 @@ def setUpClass(cls): cls.loader = data.DataLoader(cls.dataset, cls.batch_size, False) cls.lr = 1e0 - cls.max_norm = 10. + cls.max_norm = 10.0 def setUp(self) -> None: torch.manual_seed(0) @@ -48,7 +47,8 @@ def setUp(self) -> None: def test_sgd(self) -> None: chain = torchopt.combine.chain( - torchopt.clip.clip_grad_norm(max_norm=self.max_norm), torchopt.sgd(lr=self.lr) + torchopt.clip.clip_grad_norm(max_norm=self.max_norm), + torchopt.sgd(lr=self.lr), ) optim = torchopt.Optimizer(self.model.parameters(), chain) optim_ref = torch.optim.SGD(self.model_ref.parameters(), self.lr) diff --git a/tests/unit/test_schedule.py b/tests/unit/test_schedule.py index 66950050..b1681949 100644 --- a/tests/unit/test_schedule.py +++ b/tests/unit/test_schedule.py @@ -19,11 +19,10 @@ class TestSchedule(unittest.TestCase): - @classmethod def setUpClass(cls): - cls.init_value = 1. - cls.end_value = 0. + cls.init_value = 1.0 + cls.end_value = 0.0 cls.gap_value = cls.init_value - cls.end_value cls.transition_steps = 10 cls.transition_begin = 1 @@ -36,12 +35,14 @@ def test_linear(self) -> None: init_value=self.init_value, end_value=self.end_value, transition_steps=self.transition_steps, - transition_begin=self.transition_begin + transition_begin=self.transition_begin, ) for i in range(self.transition_begin, self.transition_steps): lr = schedule(i) - lr_gt = self.init_value - self.gap_value * \ - (i - self.transition_begin) / self.transition_steps + lr_gt = ( + self.init_value + - self.gap_value * (i - self.transition_begin) / self.transition_steps + ) self.assertEqual(lr, lr_gt) diff --git a/torchopt/__init__.py b/torchopt/__init__.py index b9ac2730..3f94afeb 100644 --- a/torchopt/__init__.py +++ b/torchopt/__init__.py @@ -24,25 +24,25 @@ __all__ = [ - "accelerated_op_available", - "clip", - "combine", - "hook", - "schedule", - "visual", - "adam", - "rmsprop", - "sgd", - "Optimizer", - "SGD", - "Adam", - "RMSProp", - "MetaOptimizer", - "MetaSGD", - "MetaAdam", - "MetaRMSProp", - "apply_updates", - "extract_state_dict", - "recover_state_dict", - "stop_gradient", + 'accelerated_op_available', + 'clip', + 'combine', + 'hook', + 'schedule', + 'visual', + 'adam', + 'rmsprop', + 'sgd', + 'Optimizer', + 'SGD', + 'Adam', + 'RMSProp', + 'MetaOptimizer', + 'MetaSGD', + 'MetaAdam', + 'MetaRMSProp', + 'apply_updates', + 'extract_state_dict', + 'recover_state_dict', + 'stop_gradient', ] diff --git a/torchopt/_lib/adam_op.pyi b/torchopt/_lib/adam_op.pyi index ca10e621..47f04d2b 100644 --- a/torchopt/_lib/adam_op.pyi +++ b/torchopt/_lib/adam_op.pyi @@ -13,45 +13,45 @@ # limitations under the License. # ==============================================================================\ +# isort: off + from typing import Tuple import torch - def forward_( - updates: torch.Tensor, mu: torch.Tensor, nu: torch.Tensor, b1: float, b2: float, eps: float, - eps_root: float, count: int -) -> Tuple[torch.Tensor, torch.Tensor, torch.Tensor]: - ... - - -def forwardMu(updates: torch.Tensor, mu: torch.Tensor, b1: float) -> torch.Tensor: - ... - - -def forwardNu(updates: torch.Tensor, nu: torch.Tensor, b2: float) -> torch.Tensor: - ... - - + updates: torch.Tensor, + mu: torch.Tensor, + nu: torch.Tensor, + b1: float, + b2: float, + eps: float, + eps_root: float, + count: int, +) -> Tuple[torch.Tensor, torch.Tensor, torch.Tensor]: ... +def forwardMu(updates: torch.Tensor, mu: torch.Tensor, b1: float) -> torch.Tensor: ... +def forwardNu(updates: torch.Tensor, nu: torch.Tensor, b2: float) -> torch.Tensor: ... def forwardUpdates( - new_mu: torch.Tensor, new_nu: torch.Tensor, b1: float, b2: float, eps: float, eps_root: float, - count: int -) -> torch.Tensor: - ... - - -def backwardMu(dmu: torch.Tensor, updates: torch.Tensor, mu: torch.Tensor, - b1: float) -> Tuple[torch.Tensor, torch.Tensor]: - ... - - -def backwardNu(dnu: torch.Tensor, updates: torch.Tensor, nu: torch.Tensor, - b2: float) -> Tuple[torch.Tensor, torch.Tensor]: - ... - - + new_mu: torch.Tensor, + new_nu: torch.Tensor, + b1: float, + b2: float, + eps: float, + eps_root: float, + count: int, +) -> torch.Tensor: ... +def backwardMu( + dmu: torch.Tensor, updates: torch.Tensor, mu: torch.Tensor, b1: float +) -> Tuple[torch.Tensor, torch.Tensor]: ... +def backwardNu( + dnu: torch.Tensor, updates: torch.Tensor, nu: torch.Tensor, b2: float +) -> Tuple[torch.Tensor, torch.Tensor]: ... def backwardUpdates( - dupdates: torch.Tensor, updates: torch.Tensor, new_mu: torch.Tensor, new_nu: torch.Tensor, - b1: float, b2: float, count: int -) -> Tuple[torch.Tensor, torch.Tensor]: - ... + dupdates: torch.Tensor, + updates: torch.Tensor, + new_mu: torch.Tensor, + new_nu: torch.Tensor, + b1: float, + b2: float, + count: int, +) -> Tuple[torch.Tensor, torch.Tensor]: ... diff --git a/torchopt/_src/accelerated_op/__init__.py b/torchopt/_src/accelerated_op/__init__.py index 70a22322..3eb1d44f 100644 --- a/torchopt/_src/accelerated_op/__init__.py +++ b/torchopt/_src/accelerated_op/__init__.py @@ -19,17 +19,18 @@ def accelerated_op_available(devices=None): + """Check the availability of accelerated optimizer.""" op = AdamOp() if devices is None: - devices = [torch.device("cuda"), torch.device("cpu")] + devices = [torch.device('cuda'), torch.device('cpu')] elif isinstance(devices, torch.device): devices = [devices] try: for device in devices: - updates = torch.tensor(1., device=device) + updates = torch.tensor(1.0, device=device) op(updates, updates, updates, 1) return True - except BaseException: + except BaseException: # pylint: disable=broad-except return False diff --git a/torchopt/_src/accelerated_op/adam_op/adam_op.py b/torchopt/_src/accelerated_op/adam_op/adam_op.py index 94098520..cde05f73 100644 --- a/torchopt/_src/accelerated_op/adam_op/adam_op.py +++ b/torchopt/_src/accelerated_op/adam_op/adam_op.py @@ -13,6 +13,8 @@ # limitations under the License. # ============================================================================== +# pylint: disable=c-extension-no-member,invalid-name + from typing import Any import torch @@ -20,16 +22,20 @@ from torchopt._lib import adam_op -class AdamOp(object): +class AdamOp: # pylint: disable=too-few-public-methods + """Fused accelerated Adam operators.""" - class MuOp(torch.autograd.Function): + class MuOp(torch.autograd.Function): # pylint: disable=abstract-method + """Bias-corrected first moment estimate.""" @staticmethod def jvp(ctx: Any, *grad_inputs: Any) -> Any: - pass + # pylint: disable=line-too-long + """Defines a formula for differentiating the operation with forward mode automatic differentiation.""" @staticmethod def forward(ctx: Any, *args: Any, **kwargs: Any) -> Any: + """Performs the operation.""" updates, mu, b1 = args new_mu = adam_op.forwardMu(updates, mu, b1) ctx.save_for_backward(updates, mu) @@ -38,20 +44,25 @@ def forward(ctx: Any, *args: Any, **kwargs: Any) -> Any: @staticmethod def backward(ctx: Any, *args: Any) -> Any: + # pylint: disable=line-too-long + """Defines a formula for differentiating the operation with backward mode automatic differentiation (alias to the :meth:`vjp` method).""" dmu = args[0] updates, mu = ctx.saved_tensors b1 = ctx.b1 result = adam_op.backwardMu(dmu, updates, mu, b1) return result[0], result[1], None - class NuOp(torch.autograd.Function): + class NuOp(torch.autograd.Function): # pylint: disable=abstract-method + """Bias-corrected second raw moment estimate.""" @staticmethod def jvp(ctx: Any, *grad_inputs: Any) -> Any: - pass + # pylint: disable=line-too-long + """Defines a formula for differentiating the operation with forward mode automatic differentiation.""" @staticmethod def forward(ctx: Any, *args: Any, **kwargs: Any) -> Any: + """Performs the operation.""" updates, nu, b2 = args new_nu = adam_op.forwardNu(updates, nu, b2) ctx.save_for_backward(updates, nu) @@ -60,20 +71,25 @@ def forward(ctx: Any, *args: Any, **kwargs: Any) -> Any: @staticmethod def backward(ctx: Any, *args: Any) -> Any: + # pylint: disable=line-too-long + """Defines a formula for differentiating the operation with backward mode automatic differentiation (alias to the :meth:`vjp` function).""" dnu = args[0] updates, nu = ctx.saved_tensors b2 = ctx.b2 result = adam_op.backwardNu(dnu, updates, nu, b2) return result[0], result[1], None - class UpdatesOp(torch.autograd.Function): + class UpdatesOp(torch.autograd.Function): # pylint: disable=abstract-method + """Adam updates.""" @staticmethod def jvp(ctx: Any, *grad_inputs: Any) -> Any: - pass + # pylint: disable=line-too-long + """Defines a formula for differentiating the operation with forward mode automatic differentiation.""" @staticmethod def forward(ctx: Any, *args: Any, **kwargs: Any) -> Any: + """Performs the operation.""" new_mu, new_nu, (b1, b2, eps, eps_root, count) = args new_updates = adam_op.forwardUpdates(new_mu, new_nu, b1, b2, eps, eps_root, count) ctx.save_for_backward(new_updates, new_mu, new_nu) @@ -82,13 +98,17 @@ def forward(ctx: Any, *args: Any, **kwargs: Any) -> Any: @staticmethod def backward(ctx: Any, *args: Any) -> Any: + # pylint: disable=line-too-long + """Defines a formula for differentiating the operation with backward mode automatic differentiation (alias to the :meth:`vjp` function).""" dupdates = args[0] updates, new_mu, new_nu = ctx.saved_tensors - b1, b2, eps, eps_root, count = ctx.others + b1, b2, _, _, count = ctx.others result = adam_op.backwardUpdates(dupdates, updates, new_mu, new_nu, b1, b2, count) return result[0], result[1], None - def __init__(self, b1=0.9, b2=0.999, eps=1e-8, eps_root=0., inplace=True): + # pylint: disable=too-many-arguments + def __init__(self, b1=0.9, b2=0.999, eps=1e-8, eps_root=0.0, inplace=True): + """The :meth:`__init__` function.""" self.b1 = b1 self.b2 = b2 self.eps = eps @@ -96,6 +116,7 @@ def __init__(self, b1=0.9, b2=0.999, eps=1e-8, eps_root=0., inplace=True): self.inplace = inplace def __call__(self, mu, nu, updates, count): + """The :meth:`__call__` function.""" if updates is None: return mu, nu, None if updates.is_cuda: diff --git a/torchopt/_src/alias.py b/torchopt/_src/alias.py index 8c3fb571..f27f5c3a 100644 --- a/torchopt/_src/alias.py +++ b/torchopt/_src/alias.py @@ -30,6 +30,8 @@ # limitations under the License. # ============================================================================== +# pylint: disable=invalid-name + from typing import Optional import jax @@ -39,20 +41,20 @@ def _scale_by_lr(lr: ScalarOrSchedule, flip_sign=True): - m = -1 if flip_sign else 1 + sign = -1 if flip_sign else 1 if callable(lr): def schedule_wrapper(count): - def f(scaled_lr): - return m * scaled_lr + return sign * scaled_lr return jax.tree_map(f, lr(count)) # type: ignore return transform.scale_by_schedule(schedule_wrapper) - return transform.scale(m * lr) + return transform.scale(sign * lr) +# pylint: disable=too-many-arguments def adam( lr: ScalarOrSchedule, b1: float = 0.9, @@ -60,45 +62,47 @@ def adam( eps: float = 1e-8, eps_root: float = 0.0, moment_requires_grad: bool = False, - use_accelerated_op: bool = False + use_accelerated_op: bool = False, ) -> base.GradientTransformation: - """The classic Adam optimizer. + """The functional Adam optimizer. - Adam is an SGD variant with learning rate adaptation. The `lr` - used for each weight is computed from estimates of first- and second-order - moments of the gradients (using suitable exponential moving averages). + Adam is an SGD variant with learning rate adaptation. The *learning rate* used for each weight + is computed from estimates of first- and second-order moments of the gradients (using suitable + exponential moving averages). References: - Kingma et al, 2014: https://arxiv.org/abs/1412.6980 + - Kingma et al, 2014: https://arxiv.org/abs/1412.6980 Args: - lr: - This is a fixed global scaling factor. - b1: - The exponential decay rate to track the first moment of past gradients. - b2: - The exponential decay rate to track the second moment of past gradients. + lr: This is a fixed global scaling factor. + b1: The exponential decay rate to track the first moment of past gradients. + b2: The exponential decay rate to track the second moment of past gradients. eps: - A small constant applied to denominator outside of the square root - (as in the Adam paper) to avoid dividing by zero when rescaling. - eps_root: (default `0`) - A small constant applied to denominator inside the square root (as - in RMSProp), to avoid dividing by zero when rescaling. This is needed - for example when computing (meta-)gradients through Adam. - moment_requires_grad: (default `False`) - If True the momentums will be created with flag `requires_grad=True`, - this flag is often used in Meta Learning algorithms. - use_accelerated_op: (default `False`) - If True use our implemented fused operator. + A small constant applied to denominator outside of the square root (as in the Adam + paper) to avoid dividing by zero when rescaling. + eps_root: (default: :data:`0.0`) + A small constant applied to denominator inside the square root (as in RMSProp), to avoid + dividing by zero when rescaling. This is needed for example when computing + (meta-)gradients through Adam. + moment_requires_grad: (default: :data:`False`) + If :data:`True` the momentums will be created with flag ``requires_grad=True``, this + flag is often used in Meta Learning algorithms. + use_accelerated_op: (default: :data:`False`) + If :data:`True` use our implemented fused operator. Returns: - The corresponding `GradientTransformation` instance. + The corresponding :class:`GradientTransformation` instance. """ - - adam_inst = transform.scale_by_accelerated_adam if use_accelerated_op else transform.scale_by_adam + adam_inst = ( + transform.scale_by_accelerated_adam if use_accelerated_op else transform.scale_by_adam + ) return combine.chain( adam_inst( - b1=b1, b2=b2, eps=eps, eps_root=eps_root, moment_requires_grad=moment_requires_grad + b1=b1, + b2=b2, + eps=eps, + eps_root=eps_root, + moment_requires_grad=moment_requires_grad, ), _scale_by_lr(lr), ) @@ -110,96 +114,101 @@ def sgd( nesterov: bool = False, moment_requires_grad: bool = False, ) -> base.GradientTransformation: - """A canonical Stochastic Gradient Descent optimizer. + """The functional version of the canonical Stochastic Gradient Descent optimizer. - This implements stochastic gradient descent. It also includes support for - momentum, and nesterov acceleration, as these are standard practice when - using stochastic gradient descent to train deep neural networks. + This implements stochastic gradient descent. It also includes support for momentum, and nesterov + acceleration, as these are standard practice when using stochastic gradient descent to train + deep neural networks. References: - Sutskever et al, 2013: http://proceedings.mlr.press/v28/sutskever13.pdf + - Sutskever et al, 2013: http://proceedings.mlr.press/v28/sutskever13.pdf Args: - lr: - This is a fixed global scaling factor. - momentum: (default `None`) - The `decay` rate used by the momentum term, when it is set to `None`, - then momentum is not used at all. - nesterov (default `False`): - Whether nesterov momentum is used. - moment_requires_grad: (default `False`) - If True the momentums will be created with flag `requires_grad=True`, - this flag is often used in Meta-Learning algorithms. + lr: This is a fixed global scaling factor. + momentum: (default: :data:`None`) + The ``decay`` rate used by the momentum term, when it is set to :data:`None`, then + momentum is not used at all. + nesterov: (default: :data:`False`) + Whether the nesterov momentum is used. + moment_requires_grad: (default: :data:`False`) + If :data:`True` the momentums will be created with flag ``requires_grad=True``, this + flag is often used in Meta-Learning algorithms. Returns: - A `GradientTransformation` instance. + A :class:`GradientTransformation` instance. """ - return combine.chain( ( transform.trace( - decay=momentum, nesterov=nesterov, moment_requires_grad=moment_requires_grad - ) if momentum is not None else base.identity() - ), _scale_by_lr(lr) + decay=momentum, + nesterov=nesterov, + moment_requires_grad=moment_requires_grad, + ) + if momentum is not None + else base.identity() + ), + _scale_by_lr(lr), ) +# pylint: disable=too-many-arguments def rmsprop( lr: ScalarOrSchedule, decay: float = 0.9, eps: float = 1e-8, - initial_scale: float = 0., + initial_scale: float = 0.0, centered: bool = False, momentum: Optional[float] = None, - nesterov: bool = False + nesterov: bool = False, ) -> base.GradientTransformation: - """A flexible RMSProp optimizer. - RMSProp is an SGD variant with learning rate adaptation. The `learning_rate` - used for each weight is scaled by a suitable estimate of the magnitude of the - gradients on previous steps. Several variants of RMSProp can be found - in the literature. This alias provides an easy to configure RMSProp - optimizer that can be used to switch between several of these variants. + """The functional version of the RMSProp optimizer. + + RMSProp is an SGD variant with learning rate adaptation. The *learning rate* used for each + weight is scaled by a suitable estimate of the magnitude of the gradients on previous steps. + Several variants of RMSProp can be found in the literature. This alias provides an easy to + configure RMSProp optimizer that can be used to switch between several of these variants. References: - Tieleman and Hinton, 2012: http://www.cs.toronto.edu/~hinton/coursera/lecture6/lec6.pdf - Graves, 2013: https://arxiv.org/abs/1308.0850 + - Tieleman and Hinton, 2012: http://www.cs.toronto.edu/~hinton/coursera/lecture6/lec6.pdf + - Graves, 2013: https://arxiv.org/abs/1308.0850 Args: - learning_rate: - This is a fixed global scaling factor. - decay: - The decay used to track the magnitude of previous gradients. - eps: - A small numerical constant to avoid dividing by zero when rescaling. - initial_scale: (default `0.`) - Initialization of accumulators tracking the magnitude of previous - updates. PyTorch uses `0`, TF1 uses `1`. When reproducing results - from a paper, verify the value used by the authors. - centered: (default `False`) - Whether the second moment or the variance of the past gradients is - used to rescale the latest gradients. - momentum: (default `None`) - The `decay` rate used by the momentum term, when it is set to `None`, - then momentum is not used at all. - nesterov (default `False`): - Whether nesterov momentum is used. + lr: This is a fixed global scaling factor. + decay: The decay used to track the magnitude of previous gradients. + eps: A small numerical constant to avoid dividing by zero when rescaling. + initial_scale: (default: :data:`0.0`) + Initialization of accumulators tracking the magnitude of previous updates. PyTorch uses + :data:`0.0`, TensorFlow 1.x uses :data:`1.0`. When reproducing results from a paper, + verify the value used by the authors. + centered: (default: :data:`False`) + Whether the second moment or the variance of the past gradients is used to rescale the + latest gradients. + momentum: (default: :data:`None`) + The ``decay`` rate used by the momentum term, when it is set to :data:`None`, then + momentum is not used at all. + nesterov: (default: :data:`False`) + Whether the nesterov momentum is used. Returns: - The corresponding `GradientTransformation` instance. + The corresponding :class:`GradientTransformation` instance. """ - if centered: return combine.chain( transform.scale_by_stddev(decay=decay, eps=eps, initial_scale=initial_scale), - _scale_by_lr(lr), ( + _scale_by_lr(lr), + ( transform.trace(decay=momentum, nesterov=nesterov) - if momentum is not None else base.identity() - ) + if momentum is not None + else base.identity() + ), ) + return combine.chain( - transform.scale_by_rms(decay=decay, eps=eps, initial_scale=initial_scale), _scale_by_lr(lr), + transform.scale_by_rms(decay=decay, eps=eps, initial_scale=initial_scale), + _scale_by_lr(lr), ( transform.trace(decay=momentum, nesterov=nesterov) - if momentum is not None else base.identity() - ) + if momentum is not None + else base.identity() + ), ) diff --git a/torchopt/_src/base.py b/torchopt/_src/base.py index 24d3c8a0..d725d607 100644 --- a/torchopt/_src/base.py +++ b/torchopt/_src/base.py @@ -35,27 +35,27 @@ from typing_extensions import Protocol -from torchopt._src import typing +from torchopt._src.typing import Numeric, TensorTree -OptState = typing.TensorTree # States are arbitrary nests of `torch.Tensor`. +OptState = TensorTree # States are arbitrary nests of `torch.Tensor`. # Parameters are arbitrary nests of `torch.Tensor`. -Params = typing.TensorTree +Params = TensorTree Updates = Params # Gradient updates are of the same type as parameters. -Schedule = Callable[[typing.Numeric], typing.Numeric] +Schedule = Callable[[Numeric], Numeric] class EmptyState(NamedTuple): """An empty state for the simplest stateless transformations.""" -class TransformInitFn(Protocol): - """A callable type for the `init` step of a `GradientTransformation`. +class TransformInitFn(Protocol): # pylint: disable=too-few-public-methods + """A callable type for the :func:`init` step of a :class:`GradientTransformation`. - The `init` step takes a tree of `params` and uses these to construct an - arbitrary structured initial `state` for the gradient transformation. This - may hold statistics of the past updates or any other non static information. + The :func:`init` step takes a tree of ``params`` and uses these to construct an arbitrary + structured initial ``state`` for the gradient transformation. This may hold statistics of the + past updates or any other non static information. """ @abstractmethod @@ -71,64 +71,57 @@ def __call__(self, params: Params) -> OptState: """ -class TransformUpdateFn(Protocol): - """A callable type for the `update` step of a `GradientTransformation`. +class TransformUpdateFn(Protocol): # pylint: disable=too-few-public-methods + """A callable type for the :func:`update` step of a :class:`GradientTransformation`. - The `update` step takes a tree of candidate parameter `updates` (e.g. their - gradient with respect to some loss), an arbitrary structured `state`, and the - current `params` of the model being optimized. The `params` argument is - optional, it must however be provided when using transformations that require - access to the current values of the parameters. + The :func:`update` step takes a tree of candidate parameter ``updates`` (e.g. their gradient + with respect to some loss), an arbitrary structured ``state``, and the current ``params`` of the + model being optimized. The ``params`` argument is optional, it must however be provided when + using transformations that require access to the current values of the parameters. """ @abstractmethod - def __call__(self, - updates: Updates, - state: OptState, - inplace: bool = True) -> Tuple[Updates, OptState]: + def __call__( + self, updates: Updates, state: OptState, inplace: bool = True + ) -> Tuple[Updates, OptState]: """The `update` function. Args: - updates: - A tree of candidate updates. - state: - The state of the gradient transformation. + updates: A tree of candidate updates. + state: The state of the gradient transformation. inplace: (optional) - If true, modify updates and state using inplace operations. + If :data:`True`, modify updates and state using inplace operations. Returns: - The transformed updates, and the updated state. + The transformed ``updates``, and the updated ``state``. """ class GradientTransformation(NamedTuple): """A pair of pure functions implementing a gradient transformation. - TorchOpt optimizers are all implemented as _gradient transformations_ like - Optax. A gradient transformation is defined to be a pair of pure functions, - which are combined together in a `NamedTuple` so that they can be referred - to by name. + TorchOpt optimizers are all implemented as *gradient transformations* like Optax. A gradient + transformation is defined to be a pair of pure functions, which are combined together in a + :class:`NamedTuple` so that they can be referred to by name. - Since gradient transformations do not contain any internal state, all stateful - optimizer properties (such as the current step count when using optimizer - schedules, or momentum values) are passed through gradient transformations by - using the optimizer _state_ pytree. Each time a gradient transformation is - applied, the state is computed and returned, ready to be passed to the next - call to the gradient transformation. + Since gradient transformations do not contain any internal state, all stateful optimizer + properties (such as the current step count when using optimizer schedules, or momentum values) + are passed through gradient transformations by using the optimizer *state* ``pytree``. Each time + a gradient transformation is applied, the state is computed and returned, ready to be passed to + the next call to the gradient transformation. Attributes: init: - A pure function which, when called with an example instance of the - parameters whose gradients will be transformed, returns a pytree - containing the initial value for the optimizer state. - update: - A pure function which takes as input a pytree of updates (with the - same tree structure as the original params pytree passed to init), - the previous optimizer state (which may have been initialized using - the init function), and optionally the inplace flag. The update - function then returns the computed gradient updates, and a updates - optimizer state. If the inplace flag is true, the output results are - the same instance as the input. + A pure function which, when called with an example instance of the parameters whose + gradients will be transformed, returns a ``pytree`` containing the initial value for the + optimizer state. + update: + A pure function which takes as input a pytree of updates (with the same tree structure + as the original params ``pytree`` passed to :attr:`init`), the previous optimizer state + (which may have been initialized using the :attr:`init` function), and optionally the + ``inplace`` flag. The :attr:`update` function then returns the computed gradient + updates, and a updates optimizer state. If the ``inplace`` flag is :data:`True`, the + output results are the same instance as the input. """ init: TransformInitFn @@ -141,13 +134,13 @@ def identity() -> GradientTransformation: This function passes through the *gradient updates* unchanged. Returns: - An (init_fn, update_fn) tuple. + An ``(init_fn, update_fn)`` tuple. """ def init_fn(_): return EmptyState() - def update_fn(updates, state, inplace=False): + def update_fn(updates, state, inplace=False): # pylint: disable=unused-argument return updates, state return GradientTransformation(init_fn, update_fn) diff --git a/torchopt/_src/clip.py b/torchopt/_src/clip.py index 52e164f0..58500ee7 100644 --- a/torchopt/_src/clip.py +++ b/torchopt/_src/clip.py @@ -27,18 +27,15 @@ def clip_grad_norm( - max_norm: float, - norm_type: float = 2., - error_if_nonfinite: bool = False + max_norm: float, norm_type: float = 2.0, error_if_nonfinite: bool = False ) -> base.GradientTransformation: """Clips gradient norm of an iterable of parameters. Args: - max_delta: - The maximum absolute value for each element in the update. + max_delta: The maximum absolute value for each element in the update. Returns: - An (init_fn, update_fn) tuple. + An ``(init_fn, update_fn)`` tuple. """ def init_fn(params): @@ -51,7 +48,7 @@ def update_fn(updates, state, inplace=True): if g is not None: available_updates.append(g) if len(available_updates) == 0: - return torch.tensor(0.) + return torch.tensor(0.0) device = available_updates[0].device with torch.no_grad(): if norm_type == inf: @@ -60,24 +57,24 @@ def update_fn(updates, state, inplace=True): else: total_norm = torch.norm( torch.stack([torch.norm(p, norm_type).to(device) for p in available_updates]), - norm_type + norm_type, ) if error_if_nonfinite and torch.logical_or(total_norm.isnan(), total_norm.isinf()): raise RuntimeError( - f'The total norm of order {norm_type} for gradients from ' - '`parameters` is non-finite, so it cannot be clipped. To disable ' - 'this error and scale the gradients by the non-finite norm anyway, ' - 'set `error_if_nonfinite=False`' + f'The total norm of order {norm_type} for gradients from `parameters` is ' + f'non-finite, so it cannot be clipped. To disable this error and scale the ' + f'gradients by the non-finite norm anyway, set `error_if_nonfinite=False`' ) clip_coef = max_norm / (float(total_norm) + 1e-6) - # Note: multiplying by the clamped coef is redundant when the coef is clamped to 1, but doing so - # avoids a `if clip_coef < 1:` conditional which can require a CPU <=> device synchronization - # when the gradients do not reside in CPU memory. - clip_coef_clamped = min(clip_coef, 1.) + # Note: multiplying by the clamped coef is redundant when the coef is clamped to 1, but + # doing so avoids a `if clip_coef < 1:` conditional which can require a CPU <=> device + # synchronization when the gradients do not reside in CPU memory. + clip_coef_clamped = min(clip_coef, 1.0) if inplace: def f(g): return g.mul_(clip_coef_clamped) if g is not None else None + else: def f(g): diff --git a/torchopt/_src/combine.py b/torchopt/_src/combine.py index 081421c9..c7b4b237 100644 --- a/torchopt/_src/combine.py +++ b/torchopt/_src/combine.py @@ -36,19 +36,17 @@ def chain(*args: base.GradientTransformation) -> base.GradientTransformation: """Applies a list of chainable update transformations. - Given a sequence of chainable transforms, `chain` returns an `init_fn` - that constructs a `state` by concatenating the states of the individual - transforms, and returns an `update_fn` which chains the update transformations - feeding the appropriate state to each. + Given a sequence of chainable transforms, :func:`chain` returns an :func:`init_fn` that + constructs a ``state`` by concatenating the states of the individual transforms, and returns an + :func:`update_fn` which chains the update transformations feeding the appropriate state to each. Args: *args: - A sequence of chainable (init_fn, update_fn) tuples. + A sequence of chainable ``(init_fn, update_fn)`` tuples. Returns: - A single (init_fn, update_fn) tuple. + A single ``(init_fn, update_fn)`` tuple. """ - init_fns, update_fns = zip(*args) def init_fn(params): @@ -57,11 +55,11 @@ def init_fn(params): def update_fn(updates, state, inplace=True): if len(update_fns) != len(state): raise ValueError( - 'The number of updates and states has to be the same in ' - 'chain! Make sure you have called init first!' + 'The number of updates and states has to be the same in chain! Make sure you have ' + 'called init first!' ) new_state = [] - for s, fn in zip(state, update_fns): + for s, fn in zip(state, update_fns): # pylint: disable=invalid-name updates, new_s = fn(updates, s, inplace) new_state.append(new_s) return updates, tuple(new_state) diff --git a/torchopt/_src/hook.py b/torchopt/_src/hook.py index 77ae1bd0..a0081991 100644 --- a/torchopt/_src/hook.py +++ b/torchopt/_src/hook.py @@ -20,6 +20,7 @@ def zero_nan_hook(g: torch.Tensor) -> torch.Tensor: + """Registers a zero nan hook to replace nan with zero.""" return torch.where(torch.isnan(g), torch.zeros_like(g), g) @@ -29,14 +30,13 @@ def register_hook(hook) -> GradientTransformation: This function passes through the *gradient updates* unchanged. Returns: - An (init_fn, update_fn) tuple. + An ``(init_fn, update_fn)`` tuple. """ def init_fn(_): return EmptyState() - def update_fn(updates, state, inplace=False): - + def update_fn(updates, state, inplace=False): # pylint: disable=unused-argument def f(g): return g.register_hook(hook) if g is not None else None diff --git a/torchopt/_src/optimizer/adam.py b/torchopt/_src/optimizer/adam.py index 9e1552fe..ff861334 100644 --- a/torchopt/_src/optimizer/adam.py +++ b/torchopt/_src/optimizer/adam.py @@ -13,34 +13,51 @@ # limitations under the License. # ============================================================================== +from typing import Iterable + +import torch + from torchopt._src.alias import adam from torchopt._src.optimizer.base import Optimizer from torchopt._src.typing import ScalarOrSchedule class Adam(Optimizer): - """The classic Adam optimizer.""" + """The classic Adam optimizer. + See Also: + - The functional Adam optimizer: :func:`torchopt.adam`. + - The differentiable meta-Adam optimizer: :class:`torchopt.MetaAdam`. + """ + + # pylint: disable=too-many-arguments def __init__( self, - params, + params: Iterable[torch.Tensor], lr: ScalarOrSchedule, b1: float = 0.9, b2: float = 0.999, eps: float = 1e-8, eps_root: float = 0.0, - use_accelerated_op: bool = False + use_accelerated_op: bool = False, ): - """The `init` function. + r"""The :meth:`init` function. Args: - params (iterable): - An iterable of `torch.Tensor`s. Specifies what Tensors should be - optimized. - args: - Other arguments see `alias.adam`. + params (iterable of torch.Tensor): An iterable of :class:`torch.Tensor`\s. Specifies + what tensors should be optimized. + lr: This is a fixed global scaling factor. + b1: The exponential decay rate to track the first moment of past gradients. + b2: The exponential decay rate to track the second moment of past gradients. + eps: A small constant applied to denominator outside of the square root (as in the Adam + paper) to avoid dividing by zero when rescaling. + eps_root: (default: :data:`0.0`) + A small constant applied to denominator inside the square root (as in RMSProp), to + avoid dividing by zero when rescaling. This is needed for example when computing + (meta-)gradients through Adam. + use_accelerated_op: (default: :data:`False`) + If :data:`True` use our implemented fused operator. """ - super().__init__( params, adam( @@ -50,6 +67,6 @@ def __init__( eps=eps, eps_root=eps_root, moment_requires_grad=False, - use_accelerated_op=use_accelerated_op - ) + use_accelerated_op=use_accelerated_op, + ), ) diff --git a/torchopt/_src/optimizer/base.py b/torchopt/_src/optimizer/base.py index 82f5284b..428ba198 100644 --- a/torchopt/_src/optimizer/base.py +++ b/torchopt/_src/optimizer/base.py @@ -22,24 +22,21 @@ from torchopt._src.update import apply_updates -class Optimizer(object): - """A high-level base class that has the similar with `torch.optim.Optimizer`.""" +class Optimizer: + """A base class for classic optimizers that similar to :class:`torch.optim.Optimizer`.""" - def __init__(self, params: Iterable, impl: GradientTransformation): - """The `init` function. + def __init__(self, params: Iterable[torch.Tensor], impl: GradientTransformation): + r"""The :meth:`init` function. Args: - params (iterable): - An iterable of `torch.Tensor`s. Specifies what Tensors should be - optimized. - impl (GradientTransformation): - A low level optimizer function, it could be a optimizer function - provided by `alias.py` or a customized `chain` provided by - `combine.py`. - Note that use `MetaOptimizer(sgd())` or `MetaOptimizer(chain(sgd()))` - is equivalent to `SGD`. + params (iterable of torch.Tensor): An iterable of :class:`torch.Tensor`\s. Specifies + what tensors should be optimized. + impl (GradientTransformation): A low level optimizer function, it could be a optimizer + function provided by ``alias.py`` or a customized ``chain`` provided by + ``combine.py``. + Note that using ``Optimizer(sgd())`` or ``Optimizer(chain(sgd()))`` is equivalent to + :class:`torchopt.SGD`. """ - if not isinstance(params, list): params = list(params) self.impl = impl @@ -49,61 +46,53 @@ def __init__(self, params: Iterable, impl: GradientTransformation): self.add_param_group(params) def zero_grad(self, set_to_none: bool = False): - """Sets the gradients of all optimized `torch.Tensor`s to zero. + r"""Sets the gradients of all optimized :class:`torch.Tensor`\s to zero. - The behavior is similar to `torch.optim.Optimizer.zero_grad`. + The behavior is similar to :meth:`torch.optim.Optimizer.zero_grad`. Args: - set_to_none (bool): - Instead of setting to zero, set the grads to None. + set_to_none (bool): Instead of setting to zero, set the ``grads`` to :data:`None`. """ - for group in self.param_groups: if set_to_none: def f(p): p.grad = None - return None else: def f(p): if p.grad is None: - return None + return if p.grad.grad_fn is not None: p.grad.detach_() else: p.grad.requires_grad_(False) p.grad.zero_() - return None jax.tree_map(f, group) def state_dict(self): """Returns the state of the optimizer.""" - return self.state_groups def load_state_dict(self, state_dict): """Loads the optimizer state. Args: - state_dict (dict): - Optimizer state. Should be an object returned from a call to :meth:`state_dict`. + state_dict (dict): Optimizer state. Should be an object returned from a call to + :meth:`state_dict`. """ - self.state_groups = state_dict def step(self, closure=None): - """Performs a single optimization step (parameter update). + """Performs a single optimization step. - The behavior is similar to `torch.optim.Optimizer.step`. + The behavior is similar to :meth:`torch.optim.Optimizer.step`. Args: - closure (callable, optional): - A closure that reevaluates the model and returns the loss. + closure (callable, optional): A closure that reevaluates the model and returns the loss. """ - loss = None if closure is not None: with torch.enable_grad(): @@ -120,6 +109,7 @@ def f(p): return loss def add_param_group(self, params): + """Add a param group to the optimizer's :attr:`param_groups`.""" params, tree = jax.tree_flatten(params) params = tuple(params) self.param_groups.append(params) diff --git a/torchopt/_src/optimizer/meta/adam.py b/torchopt/_src/optimizer/meta/adam.py index d699b3b5..43d5f334 100644 --- a/torchopt/_src/optimizer/meta/adam.py +++ b/torchopt/_src/optimizer/meta/adam.py @@ -13,35 +13,53 @@ # limitations under the License. # ============================================================================== +import torch.nn as nn + from torchopt._src.alias import adam from torchopt._src.optimizer.meta.base import MetaOptimizer from torchopt._src.typing import ScalarOrSchedule class MetaAdam(MetaOptimizer): - """The classic Adam optimizer.""" + """The differentiable Adam optimizer. + + See Also: + - The functional Adam optimizer: :func:`torchopt.adam`. + - The classic Adam optimizer: :class:`torchopt.Adam`. + """ + # pylint: disable=too-many-arguments def __init__( self, - net, + net: nn.Module, lr: ScalarOrSchedule, b1: float = 0.9, b2: float = 0.999, eps: float = 1e-8, eps_root: float = 0.0, moment_requires_grad: bool = True, - use_accelerated_op: bool = False + use_accelerated_op: bool = False, ): - """The `init` function. + """The :meth:`init` function. Args: - net (nn.Module): - A network whose parameters should be optimized. - args: - Other arguments see `alias.adam`, here we set `moment_requires_grad=True` - to make tensors like momentum be differentiable. + net (nn.Module): A network whose parameters should be optimized. + args: Other arguments see also :func:`torchopt.adam`, + lr: This is a fixed global scaling factor. + b1: The exponential decay rate to track the first moment of past gradients. + b2: The exponential decay rate to track the second moment of past gradients. + eps: A small constant applied to denominator outside of the square root (as in the Adam + paper) to avoid dividing by zero when rescaling. + eps_root: (default: :data:`0.0`) + A small constant applied to denominator inside the square root (as in RMSProp), to + avoid dividing by zero when rescaling. This is needed for example when computing + (meta-)gradients through Adam. + moment_requires_grad: (default: :data:`True`) + Here we set ``moment_requires_grad=True`` to make tensors like momentum be + differentiable. + use_accelerated_op: (default: :data:`False`) + If :data:`True` use our implemented fused operator. """ - super().__init__( net, adam( @@ -51,6 +69,6 @@ def __init__( eps=eps, eps_root=eps_root, moment_requires_grad=moment_requires_grad, - use_accelerated_op=use_accelerated_op - ) + use_accelerated_op=use_accelerated_op, + ), ) diff --git a/torchopt/_src/optimizer/meta/base.py b/torchopt/_src/optimizer/meta/base.py index 2d6bbd4b..ac54bbf7 100644 --- a/torchopt/_src/optimizer/meta/base.py +++ b/torchopt/_src/optimizer/meta/base.py @@ -21,23 +21,21 @@ from torchopt._src.update import apply_updates -class MetaOptimizer(object): - """A high-level optimizer base class for meta learning.""" +class MetaOptimizer: + """The base class for high-level differentiable optimizers.""" def __init__(self, net: nn.Module, impl: GradientTransformation): - """ + """The :meth:`init` function. + Args: - net (nn.Module): - A network whose parameters should be optimized. - impl (GradientTransformation): - A low level optimizer function, it could be a optimizer function - provided by `alias.py` or a customized `chain` provided by - `combine.py`. - Note that use `MetaOptimizer(sgd(moment_requires_grad=True))` - or `MetaOptimizer(chain(sgd(moment_requires_grad=True))) is - equivalent to `MetaSGD`. + net (torch.nn.Module): A network whose parameters should be optimized. + impl (GradientTransformation): A low level optimizer function, it could be a optimizer + function provided by ``alias.py`` or a customized ``chain`` provided by + ``combine.py``. + Note that using ``MetaOptimizer(sgd(moment_requires_grad=True))`` or + ``MetaOptimizer(chain(sgd(moment_requires_grad=True)))`` is equivalent to + :class:`torchopt.MetaSGD`. """ - self.impl = impl self.param_containers_groups = [] # type: ignore self.state_groups = [] # type: ignore @@ -47,15 +45,14 @@ def __init__(self, net: nn.Module, impl: GradientTransformation): def step(self, loss: torch.Tensor): """Compute the gradients of the loss to the network parameters and update network parameters. - Graph of the derivative will be constructed, allowing to compute higher order derivative products. - We use the differentiable optimizer (pass argument inplace=False) to scale the gradients and update - the network parameters without modifying tensors in-place. + Graph of the derivative will be constructed, allowing to compute higher order derivative + products. We use the differentiable optimizer (pass argument ``inplace=False``) to scale the + gradients and update the network parameters without modifying tensors in-place. Args: - loss (torch.Tensor): - The loss that is used to compute the gradients to the network parameters. - """ - + loss (torch.Tensor): The loss that is used to compute the gradients to the network + parameters. + """ # pylint: disable=line-too-long # step parameter only for idx, (state, param_containers) in enumerate( zip(self.state_groups, self.param_containers_groups) @@ -71,6 +68,8 @@ def step(self, loss: torch.Tensor): container.update(unflatten_param) def add_param_group(self, net): + """Add a param group to the optimizer's :attr:`state_groups`.""" + # pylint: disable=import-outside-toplevel,cyclic-import from torchopt._src.utils import _extract_container net_container = _extract_container(net, with_buffer=False) @@ -83,12 +82,12 @@ def add_param_group(self, net): def state_dict(self): """Extract the references of the optimizer states. - Note that the states are references, so any in-place operations will - change the states inside `MetaOptimizer` at the same time. + Note that the states are references, so any in-place operations will change the states + inside :class:`MetaOptimizer` at the same time. """ - out_groups = tuple(group for group in self.state_groups) return out_groups def load_state_dict(self, state_dict): + """Load the references of the optimizer states.""" self.state_groups = list(group for group in state_dict) diff --git a/torchopt/_src/optimizer/meta/rmsprop.py b/torchopt/_src/optimizer/meta/rmsprop.py index eb742b04..313acac1 100644 --- a/torchopt/_src/optimizer/meta/rmsprop.py +++ b/torchopt/_src/optimizer/meta/rmsprop.py @@ -13,7 +13,9 @@ # limitations under the License. # ============================================================================== -from typing import Union +from typing import Optional + +import torch.nn as nn from torchopt._src.alias import rmsprop from torchopt._src.optimizer.meta.base import MetaOptimizer @@ -21,29 +23,45 @@ class MetaRMSProp(MetaOptimizer): - """The classic RMSProp optimizer.""" + """The differentiable RMSProp optimizer. + + See Also: + - The functional RMSProp optimizer: :func:`torchopt.rmsprop`. + - The classic RMSProp optimizer: :class:`torchopt.RMSProp`. + """ + # pylint: disable=too-many-arguments def __init__( self, - net, + net: nn.Module, lr: ScalarOrSchedule, decay: float = 0.9, eps: float = 1e-8, - initial_scale: float = 0., + initial_scale: float = 0.0, centered: bool = False, - momentum: Union[float, None] = None, - nesterov: bool = False + momentum: Optional[float] = None, + nesterov: bool = False, ): - """The `init` function. + """The :meth:`init` function. Args: - net (nn.Module): - A network whose parameters should be optimized. - args: - Other arguments see `alias.adam`, here we set `moment_requires_grad=True` - to make tensors like momentum be differentiable. + net (nn.Module): A network whose parameters should be optimized. + lr: This is a fixed global scaling factor. + decay: The decay used to track the magnitude of previous gradients. + eps: A small numerical constant to avoid dividing by zero when rescaling. + initial_scale: (default: :data:`0.0`) + Initialization of accumulators tracking the magnitude of previous updates. PyTorch + uses :data:`0.0`, TensorFlow 1.x uses :data:`1.0`. When reproducing results from a + paper, verify the value used by the authors. + centered: (default: :data:`False`) + Whether the second moment or the variance of the past gradients is + used to rescale the latest gradients. + momentum: (default: :data:`None`) + Here we set ``moment_requires_grad=True`` to make tensors like momentum be + differentiable. + nesterov: (default: :data:`False`) + Whether the nesterov momentum is used. """ - super().__init__( net, rmsprop( @@ -53,6 +71,6 @@ def __init__( initial_scale=initial_scale, centered=centered, momentum=momentum, - nesterov=nesterov - ) + nesterov=nesterov, + ), ) diff --git a/torchopt/_src/optimizer/meta/sgd.py b/torchopt/_src/optimizer/meta/sgd.py index bbd57b46..f1686fc7 100644 --- a/torchopt/_src/optimizer/meta/sgd.py +++ b/torchopt/_src/optimizer/meta/sgd.py @@ -13,7 +13,7 @@ # limitations under the License. # ============================================================================== -from typing import Union +from typing import Optional import torch.nn as nn @@ -23,32 +23,39 @@ class MetaSGD(MetaOptimizer): - """A canonical Stochastic Gradient Descent optimizer.""" + """The differentiable Stochastic Gradient Descent optimizer. + See Also: + - The functional SGD optimizer: :func:`torchopt.sgd`. + - The classic SGD optimizer: :class:`torchopt.SGD`. + """ + + # pylint: disable=too-many-arguments def __init__( self, net: nn.Module, lr: ScalarOrSchedule, - momentum: Union[float, None] = None, + momentum: Optional[float] = None, nesterov: bool = False, - moment_requires_grad: bool = True + moment_requires_grad: bool = True, ): - """The `init` function. + """The :meth:`init` function. Args: - net (nn.Module): - A network whose parameters should be optimized. - args: - Other arguments see `alias.sgd`, here we set `moment_requires_grad=True` - to make tensors like momentum be differentiable. + net: A network whose parameters should be optimized. + lr: This is a fixed global scaling factor. + momentum: The ``decay`` rate used by the momentum term, when it is set to :data:`None`, + then momentum is not used at all. + nesterov: Whether the nesterov momentum is used. + moment_requires_grad: Here we set ``moment_requires_grad=True`` to make tensors like + momentum be differentiable. """ - super().__init__( net, sgd( lr=lr, momentum=momentum, nesterov=nesterov, - moment_requires_grad=moment_requires_grad - ) + moment_requires_grad=moment_requires_grad, + ), ) diff --git a/torchopt/_src/optimizer/rmsprop.py b/torchopt/_src/optimizer/rmsprop.py index d1aaf278..c264ab06 100644 --- a/torchopt/_src/optimizer/rmsprop.py +++ b/torchopt/_src/optimizer/rmsprop.py @@ -13,7 +13,9 @@ # limitations under the License. # ============================================================================== -from typing import Union +from typing import Iterable, Optional + +import torch from torchopt._src.alias import rmsprop from torchopt._src.optimizer.base import Optimizer @@ -21,29 +23,46 @@ class RMSProp(Optimizer): - """An RMSProp optimizer.""" + """The classic RMSProp optimizer. + + See Also: + - The functional RMSProp optimizer: :func:`torchopt.rmsprop`. + - The differentiable meta-RMSProp optimizer: :class:`torchopt.MetaRMSProp`. + """ + # pylint: disable=too-many-arguments def __init__( self, - params, + params: Iterable[torch.Tensor], lr: ScalarOrSchedule, decay: float = 0.9, eps: float = 1e-8, - initial_scale: float = 0., + initial_scale: float = 0.0, centered: bool = False, - momentum: Union[float, None] = None, - nesterov: bool = False + momentum: Optional[float] = None, + nesterov: bool = False, ): - """The `init` function. + r"""The `init` function. Args: - params (iterable): - An iterable of `torch.Tensor`s. Specifies what Tensors should be - optimized. - args: - Other arguments see `alias.sgd`. + params (iterable of torch.Tensor): An iterable of :class:`torch.Tensor`\s. Specifies + what Tensors should be optimized. + lr: This is a fixed global scaling factor. + decay: The decay used to track the magnitude of previous gradients. + eps: A small numerical constant to avoid dividing by zero when rescaling. + initial_scale: (default: :data:`0.0`) + Initialization of accumulators tracking the magnitude of previous updates. PyTorch + uses :data:`0.0`, TensorFlow 1.x uses :data:`1.0`. When reproducing results from a + paper, verify the value used by the authors. + centered: (default: :data:`False`) + Whether the second moment or the variance of the past gradients is used to rescale + the latest gradients. + momentum: (default: :data:`None`) + The ``decay`` rate used by the momentum term, when it is set to :data:`None`, then + momentum is not used at all. + nesterov: (default: :data:`False`) + Whether the nesterov momentum is used. """ - super().__init__( params, rmsprop( @@ -53,6 +72,6 @@ def __init__( initial_scale=initial_scale, centered=centered, momentum=momentum, - nesterov=nesterov - ) + nesterov=nesterov, + ), ) diff --git a/torchopt/_src/optimizer/sgd.py b/torchopt/_src/optimizer/sgd.py index 9e3e1c98..51cc63a6 100644 --- a/torchopt/_src/optimizer/sgd.py +++ b/torchopt/_src/optimizer/sgd.py @@ -13,7 +13,9 @@ # limitations under the License. # ============================================================================== -from typing import Union +from typing import Iterable, Optional + +import torch from torchopt._src.alias import sgd from torchopt._src.optimizer.base import Optimizer @@ -21,25 +23,33 @@ class SGD(Optimizer): - """The classic SGD optimizer.""" + """The classic SGD optimizer. + + See Also: + - The functional SGD optimizer: :func:`torchopt.sgd`. + - The differentiable meta-SGD optimizer: :class:`torchopt.MetaSGD`. + """ def __init__( self, - params, + params: Iterable[torch.Tensor], lr: ScalarOrSchedule, - momentum: Union[float, None] = None, - nesterov: bool = False + momentum: Optional[float] = None, + nesterov: bool = False, ): - """The `init` function. + r"""The :meth:`init` function. Args: - params (iterable): - An iterable of `torch.Tensor`s. Specifies what Tensors should be - optimized. - args: - Other arguments see `alias.adam`. + params (iterable of torch.Tensor): An iterable of :class:`torch.Tensor`\s. Specifies + what tensors should be optimized. + lr: This is a fixed global scaling factor. + momentum: (default: :data:`None`) + The ``decay`` rate used by the momentum term, when it is set to :data:`None`, then + momentum is not used at all. + nesterov: (default: :data:`False`) + Whether the nesterov momentum is used. """ - super().__init__( - params, sgd(lr=lr, momentum=momentum, nesterov=nesterov, moment_requires_grad=False) + params, + sgd(lr=lr, momentum=momentum, nesterov=nesterov, moment_requires_grad=False), ) diff --git a/torchopt/_src/schedule.py b/torchopt/_src/schedule.py index 864afb69..d20eb18e 100644 --- a/torchopt/_src/schedule.py +++ b/torchopt/_src/schedule.py @@ -34,56 +34,52 @@ import numpy as np from absl import logging -from torchopt._src import base, typing +from torchopt._src import base +from torchopt._src.typing import Scalar def polynomial_schedule( - init_value: typing.Scalar, - end_value: typing.Scalar, - power: typing.Scalar, + init_value: Scalar, + end_value: Scalar, + power: Scalar, transition_steps: int, - transition_begin: int = 0 + transition_begin: int = 0, ) -> base.Schedule: """Constructs a schedule with polynomial transition from init to end value. Args: - init_value: - Initial value for the scalar to be annealed. - end_value: - End value of the scalar to be annealed. - power: - The power of the polynomial used to transition from init to end. + init_value: Initial value for the scalar to be annealed. + end_value: End value of the scalar to be annealed. + power: The power of the polynomial used to transition from ``init`` to ``end``. transition_steps: - Number of steps over which annealing takes place, the scalar starts - changing at `transition_begin` steps and completes the transition - by `transition_begin + transition_steps` steps. - If `transition_steps <= 0`, then the entire annealing process is - disabled and the value is held fixed at `init_value`. + Number of steps over which annealing takes place, the scalar starts changing at + ``transition_begin`` steps and completes the transition by + ``transition_begin + transition_steps`` steps. + If ``transition_steps <= 0``, then the entire annealing process is disabled and the + value is held fixed at ``init_value``. transition_begin: - Must be positive. After how many steps to start annealing (before - this many steps the scalar value is held fixed at `init_value`). + Must be *positive*. After how many steps to start annealing (before this many steps the + scalar value is held fixed at ``init_value``). Returns: schedule: A function that maps step counts to values. """ - if transition_steps <= 0: logging.info( - 'A polynomial schedule was set with a non-positive `transition_steps` ' - 'value; this results in a constant schedule with value `init_value`.' + 'A polynomial schedule was set with a non-positive `transition_steps` value; this ' + 'results in a constant schedule with value `init_value`.' ) return lambda count: init_value if transition_begin < 0: logging.info( - 'An exponential schedule was set with a negative `transition_begin` ' - 'value; this will result in `transition_begin` falling back to `0`.' + 'An exponential schedule was set with a negative `transition_begin` value; this will ' + 'result in `transition_begin` falling back to `0`.' ) transition_begin = 0 def schedule(count): - def impl(count): count = np.clip(count - transition_begin, 0, transition_steps) frac = 1 - count / transition_steps @@ -96,16 +92,16 @@ def impl(count): # Alias polynomial schedule to linear schedule for convenience. def linear_schedule( - init_value: typing.Scalar, - end_value: typing.Scalar, + init_value: Scalar, + end_value: Scalar, transition_steps: int, - transition_begin: int = 0 + transition_begin: int = 0, ) -> base.Schedule: - + """Alias polynomial schedule to linear schedule for convenience.""" return polynomial_schedule( init_value=init_value, end_value=end_value, power=1, transition_steps=transition_steps, - transition_begin=transition_begin + transition_begin=transition_begin, ) diff --git a/torchopt/_src/transform.py b/torchopt/_src/transform.py index af123a98..a04d49b5 100644 --- a/torchopt/_src/transform.py +++ b/torchopt/_src/transform.py @@ -30,19 +30,22 @@ # limitations under the License. # ============================================================================== -from typing import List, NamedTuple, Tuple, Union +# pylint: disable=invalid-name + +from typing import NamedTuple, Tuple import jax import torch from torchopt._src import base -from torchopt._src.typing import ScalarOrSchedule, Schedule +from torchopt._src.typing import Schedule ScaleState = base.EmptyState def inc_count(updates, count: Tuple[int]) -> Tuple[int]: + """Increments int counter by one.""" def f(c, g): return c + 1 if g is not None else c @@ -51,14 +54,13 @@ def f(c, g): def scale(step_size: float) -> base.GradientTransformation: - """Scale updates by some fixed scalar `step_size`. + """Scale updates by some fixed scalar ``step_size``. Args: - step_size: - A scalar corresponding to a fixed scaling factor for updates. + step_size: A scalar corresponding to a fixed scaling factor for updates. Returns: - An (init_fn, update_fn) tuple. + An ``(init_fn, update_fn)`` tuple. """ def init_fn(params): @@ -70,6 +72,7 @@ def update_fn(updates, state, inplace=True): def f(g): return g.mul_(step_size) if g is not None else None + else: def f(g): @@ -88,15 +91,15 @@ class ScaleByScheduleState(NamedTuple): def scale_by_schedule(step_size_fn: Schedule) -> base.GradientTransformation: - """Scale updates using a custom schedule for the `step_size`. + """Scale updates using a custom schedule for the ``step_size``. Args: step_size_fn: - A function that takes an update count as input and proposes the - step_size to multiply the updates by. + A function that takes an update count as input and proposes the ``step_size`` to + multiply the updates by. Returns: - An (init_fn, update_fn) tuple. + An ``(init_fn, update_fn)`` tuple. """ def init_fn(params): @@ -114,12 +117,12 @@ def update_fn(updates, state, inplace=True): def _update_moment(updates, moments, decay, order, inplace=True): - """Compute the exponential moving average of the `order`-th moment.""" - + """Compute the exponential moving average of the ``order``-th moment.""" if inplace: def f(g, t): return t.mul_(decay).add_(g**order, alpha=1 - decay) if g is not None else t + else: def f(g, t): @@ -130,11 +133,11 @@ def f(g, t): def _update_moment_per_elem_norm(updates, moments, decay, order, inplace=True): """Compute the EMA of the `order`-th moment of the element-wise norm.""" - if inplace: def f(g, t): return t.mul_(decay).add_(g**order, alpha=1 - decay) if g is not None else t + else: def f(g, t): @@ -153,11 +156,11 @@ class ScaleByAdamState(NamedTuple): def _bias_correction(moment, decay, count, inplace=True): """Perform bias correction. This becomes a no-op as count goes to infinity.""" - if inplace: def f(t, c): return t.div_(1 - decay**c) + else: def f(t, c): @@ -197,11 +200,11 @@ def scale_by_adam( def init_fn(params): mu = jax.tree_map( # First moment - lambda t: torch.zeros_like(t, requires_grad=moment_requires_grad), - params) + lambda t: torch.zeros_like(t, requires_grad=moment_requires_grad), params + ) nu = jax.tree_map( # Second moment - lambda t: torch.zeros_like(t, requires_grad=moment_requires_grad), - params) + lambda t: torch.zeros_like(t, requires_grad=moment_requires_grad), params + ) return ScaleByAdamState(count=tuple(0 for _ in range(len(mu))), mu=tuple(mu), nu=tuple(nu)) def update_fn(updates, state, inplace=True): @@ -214,6 +217,7 @@ def update_fn(updates, state, inplace=True): def f(g, m, v): return m.div_(torch.sqrt_(v.add_(eps_root)).add_(eps)) if g is not None else None + else: def f(g, m, v): @@ -255,16 +259,15 @@ def scale_by_accelerated_adam( Returns: An (init_fn, update_fn) tuple. """ - - from .accelerated_op import AdamOp + from torchopt._src.accelerated_op import AdamOp # pylint: disable=import-outside-toplevel def init_fn(params): mu = jax.tree_map( # First moment - lambda t: torch.zeros_like(t, requires_grad=moment_requires_grad), - params) + lambda t: torch.zeros_like(t, requires_grad=moment_requires_grad), params + ) nu = jax.tree_map( # Second moment - lambda t: torch.zeros_like(t, requires_grad=moment_requires_grad), - params) + lambda t: torch.zeros_like(t, requires_grad=moment_requires_grad), params + ) return ScaleByAdamState(count=tuple(0 for _ in range(len(params))), mu=mu, nu=nu) def update_fn(updates, state, inplace=True): @@ -313,13 +316,15 @@ def trace( """ def init_fn(params): - if decay == 0.: + if decay == 0.0: return TraceState(trace=()) - else: - return TraceState( - trace=jax. - tree_map(lambda t: torch.zeros_like(t, requires_grad=moment_requires_grad), params) + + return TraceState( + trace=jax.tree_map( + lambda t: torch.zeros_like(t, requires_grad=moment_requires_grad), + params, ) + ) def update_fn(updates, state, inplace=True): if nesterov: @@ -369,9 +374,7 @@ class ScaleByRmsState(NamedTuple): def scale_by_rms( - decay: float = 0.9, - eps: float = 1e-8, - initial_scale: float = 0. + decay: float = 0.9, eps: float = 1e-8, initial_scale: float = 0.0 ) -> base.GradientTransformation: """Rescale updates by the root of the exp. moving avg of the square. @@ -400,6 +403,7 @@ def update_fn(updates, state, inplace=True): def f(g, n): return g.mul_(torch.rsqrt(n.add(eps))) + else: def f(g, n): @@ -426,9 +430,7 @@ class ScaleByRStdDevState(NamedTuple): def scale_by_stddev( - decay: float = 0.9, - eps: float = 1e-8, - initial_scale: float = 0. + decay: float = 0.9, eps: float = 1e-8, initial_scale: float = 0.0 ) -> base.GradientTransformation: """Rescale updates by the root of the centered exp. moving average of squares. @@ -459,6 +461,7 @@ def update_fn(updates, state, inplace=True): def f(g, m, n): return g.mul_(torch.rsqrt(n.sub(m**2).add(eps))) + else: def f(g, m, n): diff --git a/torchopt/_src/update.py b/torchopt/_src/update.py index 2d17adb7..1f05f90c 100644 --- a/torchopt/_src/update.py +++ b/torchopt/_src/update.py @@ -32,32 +32,30 @@ import jax -from torchopt._src import base +from torchopt._src import base # pylint: disable=unused-import -def apply_updates(params: base.Params, updates: base.Updates, inplace: bool = True) -> base.Params: +def apply_updates( + params: 'base.Params', updates: 'base.Updates', inplace: bool = True +) -> 'base.Params': """Applies an update to the corresponding parameters. - This is a utility functions that applies an update to a set of parameters, - and then returns the updated parameters to the caller. As an example, the - update may be a gradient transformed by a sequence of`GradientTransformations`. - This function is exposed for convenience, but it just adds updates and parameters; - you may also apply updates to parameters manually, using `tree_map` (e.g. if - you want to manipulate updates in custom ways before applying them). + This is a utility functions that applies an update to a set of parameters, and then returns the + updated parameters to the caller. As an example, the update may be a gradient transformed by a + sequence of :class:`GradientTransformations`. This function is exposed for convenience, but it + just adds updates and parameters; you may also apply updates to parameters manually, using + :func:`tree_map` (e.g. if you want to manipulate updates in custom ways before applying them). Args: - params: - A tree of parameters. + params: A tree of parameters. updates: - A tree of updates, the tree structure and the shape of the leaf - nodes must match that of `params`. - inplace: - If true, will update params in a inplace manner. + A tree of updates, the tree structure and the shape of the leaf nodes must match that + of ``params``. + inplace: If :data:`True`, will update params in a inplace manner. Returns: - Updated parameters, with same structure, shape and type as `params`. + Updated parameters, with same structure, shape and type as ``params``. """ - if inplace: def f(p, u): diff --git a/torchopt/_src/utils.py b/torchopt/_src/utils.py index 79921916..5c904e45 100644 --- a/torchopt/_src/utils.py +++ b/torchopt/_src/utils.py @@ -19,8 +19,6 @@ import torch import torch.nn as nn -from torchopt._src.optimizer.meta import MetaOptimizer - class _ModuleState(NamedTuple): params: List[Dict] @@ -31,29 +29,28 @@ class _ModuleState(NamedTuple): def stop_gradient(target): """Stop the gradient for the input object. - Since a tensor use `grad_fn` to connect itself with the previous computation - graph, the back-propagated gradient will flow over the tensor and continue - flow to the tensors that is connected by `grad_fn`. Some algorithms requires - manually detaching tensors from the computation graph. + Since a tensor use :attr:`grad_fn` to connect itself with the previous computation graph, the + back-propagated gradient will flow over the tensor and continue flow to the tensors that is + connected by :attr:`grad_fn`. Some algorithms requires manually detaching tensors from the + computation graph. - Note that the stop_gradient operation is in-place. + Note that the :func:`stop_gradient` operation is in-place. Args: - target: - The target that to be detached from the computation graph, it could - be a `nn.Module`, `torchopt.MetaOptimizer`, state of the - `torchopt.MetaOptimizer`, or just a plain list of tensors. - inplace: - If true, the target will be detached in-place. if false, this function - will return a detached copy of the target. The in-place operation is - fast and memory efficient but may raise back-propagation error. + target: The target that to be detached from the computation graph, it could be a + :class:`nn.Module`, :class:`torchopt.MetaOptimizer`, state of the + :class:`torchopt.MetaOptimizer`, or just a plain list of tensors. + inplace: If :data:`True`, the target will be detached in-place. if :data:`Frue`, this + function will return a detached copy of the target. The in-place operation is fast and + memory efficient but may raise back-propagation error. """ + # pylint: disable=import-outside-toplevel + from torchopt._src.optimizer.meta.base import MetaOptimizer def f(obj): if isinstance(obj, torch.Tensor): requires_grad = obj.requires_grad obj.detach_().requires_grad_(requires_grad) - return None if isinstance(target, _ModuleState): true_target = target.params @@ -67,39 +64,40 @@ def f(obj): jax.tree_map(f, true_target) +# pylint: disable=too-many-branches,too-many-locals def extract_state_dict(mod, copy=False, *, with_buffer=True, enable_visual=False, visual_prefix=''): """Extract target state. - Since a tensor use `grad_fn` to connect itself with the previous computation - graph, the back-propagated gradient will flow over the tensor and continue - flow to the tensors that is connected by `grad_fn`. Some algorithms requires - manually detaching tensors from the computation graph. + Since a tensor use :attr:`grad_fn` to connect itself with the previous computation graph, the + back-propagated gradient will flow over the tensor and continue flow to the tensors that is + connected by :attr:`grad_fn`. Some algorithms requires manually detaching tensors from the + computation graph. - Note that the extracted state is a reference, which means any in-place operator - will affect the target that the state is extracted from. + Note that the extracted state is a reference, which means any in-place operator will affect the + target that the state is extracted from. Args: - mod: - It could be a `nn.Module` or `torchopt.MetaOptimizer`. + mod: It could be a :class:`nn.Module` or :class:`torchopt.MetaOptimizer`. with_buffer: - Extract buffer together with parameters, this argument is only used - if the input target is `nn.Module`. + Extract buffer together with parameters, this argument is only used if the input target + is :class:`nn.Module`. enable_visual: - Add additional annotations, which could be used in computation graph - visualization. Currently, this flag only has effect on `nn.Module` but - we will support `torchopt.MetaOptimizer` later. - visual_prefix: - Prefix for the visualization annotations. + Add additional annotations, which could be used in computation graph visualization. + Currently, this flag only has effect on :class:`nn.Module` but we will support + :class:`torchopt.MetaOptimizer` later. + visual_prefix: Prefix for the visualization annotations. Returns: State extracted of the input object. """ + # pylint: disable=import-outside-toplevel + from torchopt._src.optimizer.meta.base import MetaOptimizer - if isinstance(mod, nn.Module): + if isinstance(mod, nn.Module): # pylint: disable=no-else-return if enable_visual: visual_contents = {} - for k, v in mod.named_parameters(): + for k, v in mod.named_parameters(): # pylint: disable=invalid-name if v.grad_fn is not None: visual_contents.update({v.grad_fn: (visual_prefix + k, v)}) else: @@ -109,17 +107,18 @@ def extract_state_dict(mod, copy=False, *, with_buffer=True, enable_visual=False params = [] - def get_v(v): + def get_v(v): # pylint: disable=invalid-name if copy: requires_grad = v.requires_grad return v.clone().detach_().requires_grad_(requires_grad) - else: - return v + + return v def _update(term): if len(term) != 0: params.append({k: get_v(v) for k, v in term.items()}) + # pylint: disable=protected-access _update(mod._parameters) if with_buffer: _update(mod._buffers) @@ -130,12 +129,13 @@ def _update(term): if with_buffer: _update(module._buffers) return _ModuleState(params=tuple(params), visual_contents=visual_contents) + elif isinstance(mod, MetaOptimizer): state = mod.state_dict() if copy: flatten_state, state_tree = jax.tree_flatten(state) - def get_v(v): + def get_v(v): # pylint: disable=invalid-name if not isinstance(v, torch.Tensor): return v requires_grad = v.requires_grad @@ -143,11 +143,10 @@ def get_v(v): flatten_state = jax.tree_map(get_v, flatten_state) return state_tree.unflatten(flatten_state) - else: - return state - else: - raise RuntimeError(f"Unexpected class of {mod}") + return state + + raise RuntimeError(f'Unexpected class of {mod}') def _extract_container(mod, with_buffer=True): @@ -158,6 +157,7 @@ def _update(term): if len(term) != 0: containers.append(term) + # pylint: disable=protected-access _update(mod._parameters) if with_buffer: _update(mod._buffers) @@ -168,24 +168,24 @@ def _update(term): if with_buffer: _update(module._buffers) return tuple(containers) - else: - raise RuntimeError(f"Unexpected class of {mod}") + + raise RuntimeError(f'Unexpected class of {mod}') def recover_state_dict(mod, state): """Recover state. - This function is compatible for the `extract_state`. + This function is compatible for the ``extract_state``. - Note that the recovering process is not in-place, so the tensors of the object - will not be modified. + Note that the recovering process is not in-place, so the tensors of the object will not be + modified. Args: - mod: - Target that need to recover. - state: - The recovering state. + mod: Target that need to recover. + state: The recovering state. """ + # pylint: disable=import-outside-toplevel + from torchopt._src.optimizer.meta.base import MetaOptimizer if isinstance(mod, nn.Module): target_container = _extract_container(mod) @@ -194,4 +194,4 @@ def recover_state_dict(mod, state): elif isinstance(mod, MetaOptimizer): mod.load_state_dict(state) else: - raise RuntimeError(f"Unexpected class of {mod}") + raise RuntimeError(f'Unexpected class of {mod}') diff --git a/torchopt/_src/visual.py b/torchopt/_src/visual.py index 1f508f37..68d96903 100644 --- a/torchopt/_src/visual.py +++ b/torchopt/_src/visual.py @@ -28,82 +28,85 @@ Node = namedtuple('Node', ('name', 'inputs', 'attr', 'op')) # Saved attrs for grad_fn (incl. saved variables) begin with `._saved_*` -SAVED_PREFIX = "_saved_" +SAVED_PREFIX = '_saved_' def get_fn_name(fn, show_attrs, max_attr_chars): + """Returns function name.""" name = str(type(fn).__name__) if not show_attrs: return name - attrs = dict() + attrs = {} for attr in dir(fn): if not attr.startswith(SAVED_PREFIX): continue val = getattr(fn, attr) - attr = attr[len(SAVED_PREFIX):] + attr = attr[len(SAVED_PREFIX) :] if torch.is_tensor(val): - attrs[attr] = "[saved tensor]" + attrs[attr] = '[saved tensor]' elif isinstance(val, tuple) and any(torch.is_tensor(t) for t in val): - attrs[attr] = "[saved tensors]" + attrs[attr] = '[saved tensors]' else: attrs[attr] = str(val) if not attrs: return name max_attr_chars = max(max_attr_chars, 3) - col1width = max(len(k) for k in attrs.keys()) + col1width = max(map(len, attrs)) col2width = min(max(len(str(v)) for v in attrs.values()), max_attr_chars) - sep = "-" * max(col1width + col2width + 2, len(name)) + sep = '-' * max(col1width + col2width + 2, len(name)) attrstr = '%-' + str(col1width) + 's: %' + str(col2width) + 's' - def truncate(s): - return s[:col2width - 3] + "..." if len(s) > col2width else s + def truncate(s): # pylint: disable=invalid-name + return s[: col2width - 3] + '...' if len(s) > col2width else s params = '\n'.join(attrstr % (k, truncate(str(v))) for (k, v) in attrs.items()) return name + '\n' + sep + '\n' + params # mypy: ignore-errors -def make_dot(var, params=None, show_attrs=False, show_saved=False, max_attr_chars=50) -> Digraph: +# pylint: disable=too-many-branches,too-many-statements,too-many-locals +def make_dot( + var: torch.Tensor, params=None, show_attrs=False, show_saved=False, max_attr_chars=50 +) -> Digraph: """Produces Graphviz representation of PyTorch autograd graph. - If a node represents a backward function, it is gray. Otherwise, the node - represents a tensor and is either blue, orange, or green: - - Blue: reachable leaf tensors that requires grad (tensors whose `.grad` - fields will be populated during `.backward()`) - - Orange: saved tensors of custom autograd functions as well as those - saved by built-in backward nodes - - Green: tensor passed in as outputs - - Dark green: if any output is a view, we represent its base tensor with - a dark green node. + If a node represents a backward function, it is gray. Otherwise, the node represents a tensor + and is either blue, orange, or green: + + - **Blue** + Reachable leaf tensors that requires grad (tensors whose :attr:`grad` fields will be + populated during :meth:`backward`). + - **Orange** + Saved tensors of custom autograd functions as well as those saved by built-in backward + nodes. + - **Green** + Tensor passed in as outputs. + - **Dark green** + If any output is a view, we represent its base tensor with a dark green node. Args: - var: - Output tensor. + var: Output tensor. params: ([dict of (name, tensor) or state_dict]) Parameters to add names to node that requires grad. - show_attrs: - Whether to display non-tensor attributes of backward nodes + show_attrs: Whether to display non-tensor attributes of backward nodes + (Requires PyTorch version >= 1.9) + show_saved: Whether to display saved tensor nodes that are not by custom autograd + functions. Saved tensor nodes for custom functions, if present, are always displayed. (Requires PyTorch version >= 1.9) - show_saved: - Whether to display saved tensor nodes that are not by custom - autograd functions. Saved tensor nodes for custom functions, if - present, are always displayed. (Requires PyTorch version >= 1.9) - max_attr_chars: - If show_attrs is `True`, sets max number of characters - to display for any given attribute. + max_attr_chars: If ``show_attrs`` is :data:`True`, sets max number of characters to display + for any given attribute. """ - - if (parse_version(torch.__version__) < parse_version("1.9") and (show_attrs or show_saved)): + if parse_version(torch.__version__) < parse_version('1.9') and (show_attrs or show_saved): warnings.warn( - "make_dot: showing grad_fn attributes and saved variables " - "requires PyTorch version >= 1.9. (This does NOT apply to " - "saved tensors saved by custom autograd functions.)" + 'make_dot: showing grad_fn attributes and saved variables ' + 'requires PyTorch version >= 1.9. (This does NOT apply to ' + 'saved tensors saved by custom autograd functions.)' ) param_map = {} if params is not None: - from torchopt._src.utils import _ModuleState + from torchopt._src.utils import _ModuleState # pylint: disable=import-outside-toplevel if isinstance(params, _ModuleState): param_map.update(params.visual_contents) @@ -127,24 +130,23 @@ def make_dot(var, params=None, show_attrs=False, show_saved=False, max_attr_char fontsize='10', ranksep='0.1', height='0.2', - fontname='monospace' + fontname='monospace', ) - dot = Digraph(node_attr=node_attr, graph_attr=dict(size="12,12")) + dot = Digraph(node_attr=node_attr, graph_attr=dict(size='12,12')) seen = set() def size_to_str(size): - return '(' + (', ').join(['%d' % v for v in size]) + ')' + return '(' + (', ').join(map(str, size)) + ')' def get_var_name(var, name=None): if not name: name = param_map[var] if var in param_map else '' - return '%s\n %s' % (name, size_to_str(var.size())) + return f'{name}\n{size_to_str(var.size())}' def get_var_name_with_flag(var): if var in param_map: - return '%s\n %s' % (param_map[var][0], size_to_str(param_map[var][1].size())) - else: - return None + return f'{param_map[var][0]}\n{size_to_str(param_map[var][1].size())}' + return None def add_nodes(fn): assert not torch.is_tensor(fn) @@ -158,15 +160,15 @@ def add_nodes(fn): continue val = getattr(fn, attr) seen.add(val) - attr = attr[len(SAVED_PREFIX):] + attr = attr[len(SAVED_PREFIX) :] if torch.is_tensor(val): - dot.edge(str(id(fn)), str(id(val)), dir="none") + dot.edge(str(id(fn)), str(id(val)), dir='none') dot.node(str(id(val)), get_var_name(val, attr), fillcolor='orange') if isinstance(val, tuple): for i, t in enumerate(val): if torch.is_tensor(t): - name = attr + '[%s]' % str(i) - dot.edge(str(id(fn)), str(id(t)), dir="none") + name = f'{attr}[{i}]' + dot.edge(str(id(fn)), str(id(t)), dir='none') dot.node(str(id(t)), get_var_name(t, name), fillcolor='orange') if hasattr(fn, 'variable'): @@ -180,7 +182,7 @@ def add_nodes(fn): fn_fillcolor = None var_name = get_var_name_with_flag(fn) if var_name is not None: - fn_name = '%s\n %s' % (fn_name, var_name) + fn_name = f'{fn_name}\n{var_name}' fn_fillcolor = 'lightblue' # add the node for this grad_fn @@ -206,16 +208,17 @@ def add_base_tensor(var, color='darkolivegreen1'): return seen.add(var) dot.node(str(id(var)), get_var_name(var), fillcolor=color) - if (var.grad_fn): + if var.grad_fn: add_nodes(var.grad_fn) dot.edge(str(id(var.grad_fn)), str(id(var))) + # pylint: disable=protected-access if var._is_view(): add_base_tensor(var._base, color='darkolivegreen3') - dot.edge(str(id(var._base)), str(id(var)), style="dotted") + dot.edge(str(id(var._base)), str(id(var)), style='dotted') # handle multiple outputs if isinstance(var, tuple): - for v in var: + for v in var: # pylint: disable=invalid-name add_base_tensor(v) else: add_base_tensor(var) @@ -227,12 +230,12 @@ def add_base_tensor(var, color='darkolivegreen1'): def resize_graph(dot, size_per_element=0.5, min_size=12): """Resize the graph according to how much content it contains. + Modify the graph in place. """ - # Get the approximate number of nodes and edges num_rows = len(dot.body) content_size = num_rows * size_per_element size = max(min_size, content_size) - size_str = str(size) + "," + str(size) + size_str = str(size) + ',' + str(size) dot.graph_attr.update(size=size_str) diff --git a/torchopt/version.py b/torchopt/version.py index 4359b2e3..89c0c4c4 100644 --- a/torchopt/version.py +++ b/torchopt/version.py @@ -14,4 +14,4 @@ # ============================================================================== """TorchOpt: a high-performance optimizer library built upon PyTorch.""" -__version__ = "0.4.1" +__version__ = '0.4.1' diff --git a/tutorials/1_Functional_Optimizer.ipynb b/tutorials/1_Functional_Optimizer.ipynb index 467791c1..f4194835 100644 --- a/tutorials/1_Functional_Optimizer.ipynb +++ b/tutorials/1_Functional_Optimizer.ipynb @@ -7,6 +7,13 @@ "# TorchOpt as Functional Optimizer" ] }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "[](https://colab.research.google.com/drive/1yfi-ETyIptlIM7WFYWF_IFhX4WF3LldP?usp=sharing)" + ] + }, { "cell_type": "markdown", "metadata": {}, @@ -54,7 +61,7 @@ "\n", "\n", "def mse(inputs, targets):\n", - " return ((inputs - targets) ** 2).sum()" + " return ((inputs - targets) ** 2).mean()" ] }, { diff --git a/tutorials/2_Visualization.ipynb b/tutorials/2_Visualization.ipynb index 61f2b489..f1af008f 100644 --- a/tutorials/2_Visualization.ipynb +++ b/tutorials/2_Visualization.ipynb @@ -7,6 +7,13 @@ "# Visualization in TorchOpt" ] }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "[](https://colab.research.google.com/drive/1Uoo2epqZKmJNQOiO0EU8DGd33AVKBlAq?usp=sharing)" + ] + }, { "cell_type": "markdown", "metadata": {}, diff --git a/tutorials/3_Meta_Optimizer.ipynb b/tutorials/3_Meta_Optimizer.ipynb index 76d43132..aaca9e3f 100644 --- a/tutorials/3_Meta_Optimizer.ipynb +++ b/tutorials/3_Meta_Optimizer.ipynb @@ -7,6 +7,13 @@ "# TorchOpt as Meta-Optimizer" ] }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "[](https://colab.research.google.com/drive/1lo9q2gQz073urYln-4Yub5s8APUoHvQJ?usp=sharing)" + ] + }, { "cell_type": "markdown", "metadata": {}, diff --git a/tutorials/4_Stop_Gradient.ipynb b/tutorials/4_Stop_Gradient.ipynb index 4e3d3053..604196ca 100644 --- a/tutorials/4_Stop_Gradient.ipynb +++ b/tutorials/4_Stop_Gradient.ipynb @@ -7,6 +7,13 @@ "# `torchopt.stop_gradient` in Meta-Learning" ] }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "[](https://colab.research.google.com/drive/1jp_oPHIG6aaQMYGNxG72FSuWjABk1DHo?usp=sharing)" + ] + }, { "cell_type": "markdown", "metadata": {}, diff --git a/tutorials/requirements.txt b/tutorials/requirements.txt new file mode 100644 index 00000000..00cb5228 --- /dev/null +++ b/tutorials/requirements.txt @@ -0,0 +1,8 @@ +--extra-index-url https://download.pytorch.org/whl/cu116 +torch == 1.12 +torchvision +functorch + +--requirement ../requirements.txt + +ipykernel From 7455286cf40f760e03cfdccb902c2caf5ba41bc3 Mon Sep 17 00:00:00 2001 From: Bo Liu Date: Fri, 22 Jul 2022 15:37:10 +0800 Subject: [PATCH 17/19] fix(examples, README.md): minor fix (#40) Co-authored-by: Xuehai Pan --- README.md | 6 +++--- examples/L2R/l2r.py | 8 +++++--- examples/LOLA/lola_dice.py | 10 ++++++---- examples/MAML-RL/maml.py | 3 ++- examples/few-shot/maml_omniglot.py | 3 ++- examples/requirements.txt | 2 +- torchopt/_src/accelerated_op/adam_op/adam_op.py | 2 +- 7 files changed, 20 insertions(+), 14 deletions(-) diff --git a/README.md b/README.md index 9db7e70a..37477bf9 100644 --- a/README.md +++ b/README.md @@ -112,8 +112,8 @@ In contrast to them, TorchOpt realizes differentiable optimizer with functional ### Meta-Learning API -- We design a base class `torchopt.MetaOptimizer` for managing network updates in Meta-Learning. The constructor of `MetaOptimizer` takes as input the network rather than network parameters. `MetaOptimizer` exposed interface `step(loss)` takes as input the loss for step the network parameter. Refer to the tutorial notebook [Meta Optimizer](tutorials/2_Meta_Optimizer.ipynb) for more details. -- We offer `torchopt.chain` which can apply a list of chainable update transformations. Combined with `MetaOptimizer`, it can help you conduct gradient transformation such as gradient clip before the Meta optimizer steps. Refer to the tutorial notebook [Meta Optimizer](tutorials/2_Meta_Optimizer.ipynb) for more details. +- We design a base class `torchopt.MetaOptimizer` for managing network updates in Meta-Learning. The constructor of `MetaOptimizer` takes as input the network rather than network parameters. `MetaOptimizer` exposed interface `step(loss)` takes as input the loss for step the network parameter. Refer to the tutorial notebook [Meta Optimizer](tutorials/3_Meta_Optimizer.ipynb) for more details. +- We offer `torchopt.chain` which can apply a list of chainable update transformations. Combined with `MetaOptimizer`, it can help you conduct gradient transformation such as gradient clip before the Meta optimizer steps. Refer to the tutorial notebook [Meta Optimizer](tutorials/3_Meta_Optimizer.ipynb) for more details. - We observe that different Meta-Learning algorithms vary in inner-loop parameter recovery. TorchOpt provides basic functions for users to extract or recover network parameters and optimizer states anytime anywhere they want. - Some algorithms such as MGRL ([arXiv:1805.09801](https://arxiv.org/abs/1805.09801)) initialize the inner-loop parameters inherited from previous inner-loop process when conducting a new bi-level process. TorchOpt also provides a finer function `stop_gradient` for manipulating the gradient graph, which is helpful for this kind of algorithms. Refer to the notebook [Stop Gradient](tutorials/4_Stop_Gradient.ipynb) for more details. @@ -190,7 +190,7 @@ Notably, the operator fusion not only increases performance but also help simpli ## Visualization -Complex gradient flow in meta-learning brings in a great challenge for managing the gradient flow and verifying the correctness of it. TorchOpt provides a visualization tool that draw variable (e.g. network parameters or meta parameters) names on the gradient graph for better analyzing. The visualization tool is modified from [`torchviz`](https://github.com/szagoruyko/pytorchviz). We provide an example using the [visualization code](examples/visualize.py). Also refer to the notebook [Visualization](tutorials/3_Visualization.ipynb) for more details. +Complex gradient flow in meta-learning brings in a great challenge for managing the gradient flow and verifying the correctness of it. TorchOpt provides a visualization tool that draw variable (e.g. network parameters or meta parameters) names on the gradient graph for better analyzing. The visualization tool is modified from [`torchviz`](https://github.com/szagoruyko/pytorchviz). We provide an example using the [visualization code](examples/visualize.py). Also refer to the notebook [Visualization](tutorials/2_Visualization.ipynb) for more details. The figure below show the visualization result. Compared with [`torchviz`](https://github.com/szagoruyko/pytorchviz), TorchOpt fuses the operations within the `Adam` together (orange) to reduce the complexity and provide simpler visualization. diff --git a/examples/L2R/l2r.py b/examples/L2R/l2r.py index 9262c0c2..cd093313 100644 --- a/examples/L2R/l2r.py +++ b/examples/L2R/l2r.py @@ -37,9 +37,11 @@ import torchopt -from .helper.argument import parse_args -from .helper.model import LeNet5 -from .helper.utils import get_imbalance_dataset, plot, set_seed + +# isort: off +from helper.argument import parse_args +from helper.model import LeNet5 +from helper.utils import get_imbalance_dataset, plot, set_seed def run_baseline(args, mnist_train, mnist_test): diff --git a/examples/LOLA/lola_dice.py b/examples/LOLA/lola_dice.py index 7384244b..61d2e22c 100644 --- a/examples/LOLA/lola_dice.py +++ b/examples/LOLA/lola_dice.py @@ -19,10 +19,12 @@ import numpy as np import torch -from .helper.agent import Agent -from .helper.argument import parse_args -from .helper.env import IPD -from .helper.utils import sample, step + +# isort: off +from helper.agent import Agent +from helper.argument import parse_args +from helper.env import IPD +from helper.utils import sample, step def main(args): diff --git a/examples/MAML-RL/maml.py b/examples/MAML-RL/maml.py index e6a149e6..8734e000 100644 --- a/examples/MAML-RL/maml.py +++ b/examples/MAML-RL/maml.py @@ -23,7 +23,8 @@ import torchopt -from .helpers.policy import CategoricalMLPPolicy + +from helpers.policy import CategoricalMLPPolicy # isort: skip TASK_NUM = 40 diff --git a/examples/few-shot/maml_omniglot.py b/examples/few-shot/maml_omniglot.py index 1d561ef7..3f7a7f0f 100644 --- a/examples/few-shot/maml_omniglot.py +++ b/examples/few-shot/maml_omniglot.py @@ -53,7 +53,8 @@ import torchopt -from .support.omniglot_loaders import OmniglotNShot + +from support.omniglot_loaders import OmniglotNShot # isort: skip mpl.use('Agg') diff --git a/examples/requirements.txt b/examples/requirements.txt index eaf947df..9e2e108e 100644 --- a/examples/requirements.txt +++ b/examples/requirements.txt @@ -5,7 +5,7 @@ functorch --requirement ../requirements.txt -gym < 1.0.0.a0 +gym >= 0.20.0, < 0.24.0a0 matplotlib pandas seaborn diff --git a/torchopt/_src/accelerated_op/adam_op/adam_op.py b/torchopt/_src/accelerated_op/adam_op/adam_op.py index cde05f73..dac0697b 100644 --- a/torchopt/_src/accelerated_op/adam_op/adam_op.py +++ b/torchopt/_src/accelerated_op/adam_op/adam_op.py @@ -19,7 +19,7 @@ import torch -from torchopt._lib import adam_op +from torchopt._lib import adam_op # pylint: disable=no-name-in-module class AdamOp: # pylint: disable=too-few-public-methods From bdf15588fa26f9ed69932c8199c01c42f3e8d55a Mon Sep 17 00:00:00 2001 From: Bo Liu Date: Tue, 26 Jul 2022 13:23:42 +0800 Subject: [PATCH 18/19] feat(docs): add .readthedocs.yaml (#34) Co-authored-by: Xuehai Pan --- .github/ISSUE_TEMPLATE/bug_report.md | 2 +- .github/ISSUE_TEMPLATE/feature_request.md | 2 +- .readthedocs.yaml | 32 ++++++++++ conda-recipe.yaml | 2 +- docs/conda-recipe.yaml | 73 +++++++++++++++++++++++ docs/source/conf.py | 7 +-- 6 files changed, 111 insertions(+), 7 deletions(-) create mode 100644 .readthedocs.yaml create mode 100644 docs/conda-recipe.yaml diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md index 55dacf60..86dcfbcb 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.md +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -2,7 +2,7 @@ name: Bug report about: Create a report to help us improve title: "[BUG]" -labels: '' +labels: ["bug"] assignees: Benjamin-eecs --- diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md index 0e6ca2a8..b61aa154 100644 --- a/.github/ISSUE_TEMPLATE/feature_request.md +++ b/.github/ISSUE_TEMPLATE/feature_request.md @@ -2,7 +2,7 @@ name: Feature request about: Suggest an idea for this project title: "[Feature Request]" -labels: '' +labels: ["enhancement"] assignees: Benjamin-eecs --- diff --git a/.readthedocs.yaml b/.readthedocs.yaml new file mode 100644 index 00000000..88b7a202 --- /dev/null +++ b/.readthedocs.yaml @@ -0,0 +1,32 @@ +# .readthedocs.yaml +# Read the Docs configuration file +# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details + +# Required +version: 2 + +# Set the version of Python and other tools you might need +build: + os: ubuntu-20.04 + tools: + python: mambaforge-4.10 + +# Optionally declare the Python requirements required to build your docs +conda: + environment: docs/conda-recipe.yaml + +# If using Sphinx, optionally build your docs in additional formats such as PDF +formats: + - pdf + +# Build documentation in the docs/ directory with Sphinx +sphinx: + builder: html + configuration: docs/source/conf.py + fail_on_warning: true + +# Optionally declare the Python requirements required to build your docs +python: + install: + - method: pip + path: . diff --git a/conda-recipe.yaml b/conda-recipe.yaml index f7f0917a..625a236c 100644 --- a/conda-recipe.yaml +++ b/conda-recipe.yaml @@ -59,6 +59,7 @@ dependencies: - sphinxcontrib-spelling - sphinxcontrib-bibtex - sphinx-autodoc-typehints + - pyenchant - myst-nb - ipykernel - pandoc @@ -76,7 +77,6 @@ dependencies: - flake8-bugbear - doc8 - pydocstyle - - pyenchant - clang-format - clang-tools # clang-tidy - cpplint diff --git a/docs/conda-recipe.yaml b/docs/conda-recipe.yaml new file mode 100644 index 00000000..d55c0f19 --- /dev/null +++ b/docs/conda-recipe.yaml @@ -0,0 +1,73 @@ +# Copyright 2022 MetaOPT Team. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +# +# Create virtual environment with command: +# +# $ CONDA_OVERRIDE_CUDA=11.7 conda env create --file docs/conda-recipe.yaml +# + +name: torchopt-docs + +channels: + - pytorch + - defaults + - conda-forge + +dependencies: + - python = 3.8 + - pip + + # Learning + - pytorch::pytorch = 1.12 + - pytorch::torchvision + - pytorch::pytorch-mutex = *=*cpu* + - pip: + - jax[cpu] >= 0.3 + - functorch + - torchviz + - sphinxcontrib-katex # for documentation + - tensorboard + - wandb + + # Build toolchain + - cmake >= 3.4 + - make + - cxx-compiler + - gxx = 10 + - nvidia/label/cuda-11.6.2::cuda-nvcc + - nvidia/label/cuda-11.6.2::cuda-cudart-dev + - pybind11 + + # Misc + - typing-extensions + - numpy + - matplotlib-base + - seaborn + - python-graphviz + - pillow + + # Documentation + - sphinx + - sphinx_rtd_theme + - sphinx-autobuild + - sphinx-copybutton + - sphinxcontrib-spelling + - sphinxcontrib-bibtex + - sphinx-autodoc-typehints + - pyenchant + - myst-nb + - ipykernel + - pandoc + - docutils diff --git a/docs/source/conf.py b/docs/source/conf.py index da11e3b7..25159cb0 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -25,15 +25,12 @@ # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. # -import inspect import os import pathlib import sys import sphinxcontrib.katex as katex -import torchopt - HERE = pathlib.Path(__file__).absolute().parent PROJECT_ROOT = HERE.parent.parent @@ -76,12 +73,14 @@ def get_version() -> str: 'sphinx_copybutton', 'sphinx_rtd_theme', 'sphinxcontrib.bibtex', - 'sphinxcontrib.spelling', 'sphinxcontrib.katex', 'sphinx_autodoc_typehints', 'myst_nb', # This is used for the .ipynb notebooks ] +if not os.getenv('READTHEDOCS', None): + extensions.append('sphinxcontrib.spelling') + # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] From e0bb3cdbb3538c503e73a19ae6f6f994163f23de Mon Sep 17 00:00:00 2001 From: Xuehai Pan Date: Tue, 26 Jul 2022 15:44:35 +0800 Subject: [PATCH 19/19] ver: bump version to 0.4.2 (#31) * ver: bump version to 0.4.2 * docs(CHANGELOG): add CHANGELOG.md * docs: update dictionary * docs: update authors * feat(workflows): test wheels before release * fix(workflows): remove single-quotes in env files * chore(workflows): update triggers * chore: install torchopt in conda environment * chore(.gitignore): ignore wheelhouse * style: update pylint magic comments * chore(conda-recipe): add patchelf * chore(workflows): update triggers * feat: manylinux wheels * docs: update install instruction * chore(workflows): list wheels with size * chore(workflows): use pypa/gh-action-pypi-publish to upload packages * docs: add badges * fix(workflows): show wheels one-by-one * docs: update contribution guide line * chore(pyproject): use pyproject.toml * chore: support str for accelerated_op_available * chore(workflows): test wheels with CPU build of torch * docs: update CHANGELOG * fix(accelerated_op): skip checking op on cuda devices when CUDA is not available * chore: update conda-recipe.yaml * docs: update CHANGELOG * docs: update badges * chore: remove pip edit install in conda recipe --- .github/workflows/build.yml | 202 ++++++++++++++++++ .github/workflows/lint.yml | 13 +- .github/workflows/release.yml | 98 --------- .github/workflows/tests.yml | 24 ++- .gitignore | 3 +- .pre-commit-config.yaml | 1 - CHANGELOG.md | 57 +++++ CITATION.cff | 8 +- Makefile | 4 +- README.md | 20 +- conda-recipe.yaml | 7 +- docs/source/developer/contributing.rst | 28 +++ docs/source/developer/contributor.rst | 1 - docs/source/index.rst | 15 +- docs/source/spelling_wordlist.txt | 2 + pyproject.toml | 101 +++++++++ setup.cfg | 42 ---- setup.py | 42 ---- torchopt/_src/accelerated_op/__init__.py | 11 +- .../_src/accelerated_op/adam_op/adam_op.py | 14 +- torchopt/version.py | 2 +- 21 files changed, 475 insertions(+), 220 deletions(-) create mode 100644 .github/workflows/build.yml delete mode 100644 .github/workflows/release.yml create mode 100644 CHANGELOG.md create mode 100644 pyproject.toml delete mode 100644 setup.cfg diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml new file mode 100644 index 00000000..8b26e861 --- /dev/null +++ b/.github/workflows/build.yml @@ -0,0 +1,202 @@ +name: Build + +on: + push: + branches: + - main # allow to trigger the workflow with tag push event + pull_request: + paths: + - setup.py + - setup.cfg + - pyproject.toml + - MANIFEST.in + - CMakeLists.txt + - include/** + - src/** + - torchopt/version.py + - .github/workflow/build.yml + release: + types: + - published + # Allow to trigger the workflow manually + workflow_dispatch: + +permissions: + contents: read + +concurrency: + group: "${{ github.workflow }}-${{ github.ref }}" + cancel-in-progress: ${{ github.event_name == 'pull_request' }} + +jobs: + build: + runs-on: ubuntu-18.04 + if: github.repository == 'metaopt/TorchOpt' && (github.event_name != 'push' || startsWith(github.ref, 'refs/tags/')) + timeout-minutes: 45 + steps: + - name: Checkout + uses: actions/checkout@v3 + with: + submodules: "recursive" + fetch-depth: 1 + + - name: Set up Python 3.7 + id: py37 + uses: actions/setup-python@v4 + with: + python-version: "3.7" + update-environment: false + + - name: Set up Python 3.8 + id: py38 + uses: actions/setup-python@v4 + with: + python-version: "3.8" + update-environment: false + + - name: Set up Python 3.9 + id: py39 + uses: actions/setup-python@v4 + with: + python-version: "3.9" + update-environment: false + + - name: Set up Python 3.10 + id: py310 + uses: actions/setup-python@v4 + with: + python-version: "3.10" + update-environment: false + + - name: Set up Python executable paths + run: | + echo "${{ steps.py37.outputs.python-path }}" > .python-paths + echo "${{ steps.py38.outputs.python-path }}" >> .python-paths + echo "${{ steps.py39.outputs.python-path }}" >> .python-paths + echo "${{ steps.py310.outputs.python-path }}" >> .python-paths + + - name: Setup CUDA Toolkit + uses: Jimver/cuda-toolkit@v0.2.7 + id: cuda-toolkit + with: + cuda: "11.6.2" + method: network + sub-packages: '["nvcc"]' + - run: | + CUDA_VERSION="${{steps.cuda-toolkit.outputs.cuda}}" + echo "CUDA_VERSION=${CUDA_VERSION}" >> "${GITHUB_ENV}" + TORCH_INDEX_URL="https://download.pytorch.org/whl/cu$(echo "${CUDA_VERSION}" | cut -d'.' -f-2 | tr -d '.')" + echo "TORCH_INDEX_URL=${TORCH_INDEX_URL}" >> "${GITHUB_ENV}" + + echo "Installed CUDA version is: ${CUDA_VERSION}" + echo "CUDA install location: ${{steps.cuda-toolkit.outputs.CUDA_PATH}}" + nvcc -V + echo "Torch index URL: ${TORCH_INDEX_URL}" + + - name: Build sdist and wheels + run: | + DEFAULT_PYTHON="$(head -n 1 .python-paths)" + + while read -r PYTHON; do + echo "Building wheel with Python: ${PYTHON} ($("${PYTHON}" --version))" + "${PYTHON}" -m pip install --upgrade pip setuptools wheel build + "${PYTHON}" -m pip install --extra-index-url "${TORCH_INDEX_URL}" \ + -r requirements.txt + if [[ "${PYTHON}" == "${DEFAULT_PYTHON}" ]]; then + "${PYTHON}" -m build + else + "${PYTHON}" -m build --wheel + fi + done < .python-paths + + - name: List built sdist and wheels + run: | + if [[ -n "$(find dist -maxdepth 0 -not -empty -print 2>/dev/null)" ]]; then + echo "Built sdist and wheels:" + ls -lh dist/ + else + echo "No sdist and wheels are built." + exit 1 + fi + + - name: Audit and repair wheels + run: | + while read -r PYTHON; do + PYVER="cp$("${PYTHON}" --version | cut -d ' ' -f2 | cut -d '.' -f-2 | tr -d '.')" + echo "Audit and repair wheel for Python: ${PYTHON} (${PYVER})" + LIBTORCH_PATH="$("${PYTHON}" -c 'import os, site; print(os.path.join(site.getsitepackages()[0], "torch", "lib"))')" + "${PYTHON}" -m pip install --upgrade git+https://github.com/XuehaiPan/auditwheel.git@torchopt + ( + export LD_LIBRARY_PATH="${LIBTORCH_PATH}${LD_LIBRARY_PATH:+:${LD_LIBRARY_PATH}}" + echo "LD_LIBRARY_PATH=${LD_LIBRARY_PATH}" + "${PYTHON}" -m auditwheel show dist/torchopt-*-${PYVER}-*.whl && + "${PYTHON}" -m auditwheel repair --plat manylinux2014_x86_64 --wheel-dir wheelhouse dist/torchopt-*-${PYVER}-*.whl + ) + done < .python-paths + + rm dist/torchopt-*.whl + mv wheelhouse/torchopt-*manylinux*.whl dist/ + + - name: List built sdist and wheels + run: | + if [[ -n "$(find dist -maxdepth 0 -not -empty -print 2>/dev/null)" ]]; then + echo "Built sdist and wheels:" + ls -lh dist/ + else + echo "No sdist and wheels are built." + exit 1 + fi + + - name: Test sdist and wheels + run: | + DEFAULT_PYTHON="$(head -n 1 .python-paths)" + while read -r PYTHON; do + PYVER="cp$("${PYTHON}" --version | cut -d ' ' -f2 | cut -d '.' -f-2 | tr -d '.')" + mkdir -p "temp-${PYVER}" + pushd "temp-${PYVER}" + if [[ "${PYTHON}" == "${DEFAULT_PYTHON}" ]]; then + echo "Testing sdist with Python: ${PYTHON} (${PYVER})" + "${PYTHON}" -m pip uninstall torch torchopt -y + "${PYTHON}" -m pip install --extra-index-url https://download.pytorch.org/whl/cpu \ + ../dist/torchopt-*.tar.gz + "${PYTHON}" -c 'import torchopt' + fi + echo "Testing wheel with Python: ${PYTHON} (${PYVER})" + "${PYTHON}" -m pip uninstall torch torchopt -y + "${PYTHON}" -m pip install --extra-index-url https://download.pytorch.org/whl/cpu \ + ../dist/torchopt-*-${PYVER}-*.whl + "${PYTHON}" -c 'import torchopt' + "${PYTHON}" -m pip uninstall torch torchopt -y + popd + done < .python-paths + + - name: Check consistency between the package version and release tag + if: startsWith(github.ref, 'refs/tags/') + run: | + RELEASE_TAG="${GITHUB_REF#refs/*/}" + PACKAGE_VER="v$(python setup.py --version)" + if [[ "${PACKAGE_VER}" != "${RELEASE_TAG}" ]]; then + echo "package ver. (${PACKAGE_VER}) != release tag. (${RELEASE_TAG})" + exit 1 + fi + + - name: Publish to TestPyPI + if: startsWith(github.ref, 'refs/tags/') || github.event_name == 'workflow_dispatch' + uses: pypa/gh-action-pypi-publish@v1.5.0 + with: + user: __token__ + password: ${{ secrets.TESTPYPI_UPLOAD_TOKEN }} + repository_url: https://test.pypi.org/legacy/ + verbose: true + print_hash: true + skip_existing: true + + - name: Publish to PyPI + if: startsWith(github.ref, 'refs/tags/') || github.event_name == 'workflow_dispatch' + uses: pypa/gh-action-pypi-publish@v1.5.0 + with: + user: __token__ + password: ${{ secrets.PYPI_UPLOAD_TOKEN }} + verbose: true + print_hash: true + skip_existing: true diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 35b9d2a1..f2393c77 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -9,16 +9,15 @@ on: permissions: contents: read +concurrency: + group: "${{ github.workflow }}-${{ github.ref }}" + cancel-in-progress: ${{ github.event_name == 'pull_request' }} + jobs: lint: runs-on: ubuntu-latest timeout-minutes: 30 steps: - - name: Cancel previous run - uses: styfle/cancel-workflow-action@0.10.0 - with: - access_token: ${{ github.token }} - - name: Checkout uses: actions/checkout@v3 with: @@ -40,9 +39,9 @@ jobs: sub-packages: '["nvcc"]' - run: | CUDA_VERSION="${{steps.cuda-toolkit.outputs.cuda}}" - echo "CUDA_VERSION='${CUDA_VERSION}'" >> "${GITHUB_ENV}" + echo "CUDA_VERSION=${CUDA_VERSION}" >> "${GITHUB_ENV}" TORCH_INDEX_URL="https://download.pytorch.org/whl/cu$(echo "${CUDA_VERSION}" | cut -d'.' -f-2 | tr -d '.')" - echo "TORCH_INDEX_URL='${TORCH_INDEX_URL}'" >> "${GITHUB_ENV}" + echo "TORCH_INDEX_URL=${TORCH_INDEX_URL}" >> "${GITHUB_ENV}" echo "Installed CUDA version is: ${CUDA_VERSION}" echo "CUDA install location: ${{steps.cuda-toolkit.outputs.CUDA_PATH}}" diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml deleted file mode 100644 index ceb45f06..00000000 --- a/.github/workflows/release.yml +++ /dev/null @@ -1,98 +0,0 @@ -name: PyPI - -on: - release: - types: [created] - -jobs: - deploy: - runs-on: ubuntu-latest - timeout-minutes: 30 - steps: - - name: Checkout - uses: actions/checkout@v3 - with: - submodules: "recursive" - fetch-depth: 1 - - - name: Set up Python 3.7 - id: py37 - uses: actions/setup-python@v4 - with: - python-version: "3.7" - update-environment: false - - - name: Set up Python 3.8 - id: py38 - uses: actions/setup-python@v4 - with: - python-version: "3.8" - update-environment: false - - - name: Set up Python 3.9 - id: py39 - uses: actions/setup-python@v4 - with: - python-version: "3.9" - update-environment: false - - - name: Set up Python 3.10 - id: py310 - uses: actions/setup-python@v4 - with: - python-version: "3.10" - update-environment: false - - - name: Set up Python executable paths - run: | - DEFAULT_PYTHON="${{ steps.py37.outputs.python-path }}" - echo "DEFAULT_PYTHON='${DEFAULT_PYTHON}'" >> "${GITHUB_ENV}" - - PYTHON_EXECUTABLES="${{ steps.py37.outputs.python-path }}" - PYTHON_EXECUTABLES="${PYTHON_EXECUTABLES}:${{ steps.py38.outputs.python-path }}" - PYTHON_EXECUTABLES="${PYTHON_EXECUTABLES}:${{ steps.py39.outputs.python-path }}" - PYTHON_EXECUTABLES="${PYTHON_EXECUTABLES}:${{ steps.py310.outputs.python-path }}" - echo "PYTHON_EXECUTABLES='${PYTHON_EXECUTABLES}'" >> "${GITHUB_ENV}" - - - name: Check consistency between the package version and release tag - run: | - RELEASE_VER="${GITHUB_REF#refs/*/}" - PACKAGE_VER="v$(python setup.py --version)" - if [[ "${RELEASE_VER}" != "${PACKAGE_VER}" ]]; then - echo "package ver. (${PACKAGE_VER}) != release ver. (${RELEASE_VER})" - exit 1 - fi - - - name: Setup CUDA Toolkit - uses: Jimver/cuda-toolkit@v0.2.7 - id: cuda-toolkit - with: - cuda: "11.6.2" - method: network - sub-packages: '["nvcc"]' - - run: | - echo "Installed CUDA version is: ${{steps.cuda-toolkit.outputs.cuda}}" - echo "CUDA install location: ${{steps.cuda-toolkit.outputs.CUDA_PATH}}" - nvcc -V - - - name: Build sdist and wheels - run: | - while IFS='' read -rd':' PYTHON || [[ -n "${PYTHON}" ]]; do - [[ -z "${PYTHON}" ]] && continue - echo "Building wheel with Python: ${PYTHON} ($("${PYTHON}" --version))" - "${PYTHON}" -m pip install --upgrade pip setuptools wheel build - if [[ "${PYTHON}" == "${DEFAULT_PYTHON}" ]]; then - "${PYTHON}" -m build - else - "${PYTHON}" -m build --wheel - fi - done <<< "${PYTHON_EXECUTABLES}" - - - name: Publish to PyPI - env: - TWINE_USERNAME: "__token__" - TWINE_PASSWORD: ${{ secrets.PYPI_UPLOAD_TOKEN }} - run: | - "${DEFAULT_PYTHON}" -m pip install --upgrade twine - "${DEFAULT_PYTHON}" -m twine upload --repository testpypi dist/* - "${DEFAULT_PYTHON}" -m twine upload --repository pypi dist/* diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index f67d9b46..5c62ff1b 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -5,20 +5,30 @@ on: branches: - main pull_request: + paths: + - setup.py + - setup.cfg + - pyproject.toml + - MANIFEST.in + - CMakeLists.txt + - include/** + - src/** + - tests/** + - torchopt/** + - .github/workflows/tests.yml permissions: contents: read +concurrency: + group: "${{ github.workflow }}-${{ github.ref }}" + cancel-in-progress: ${{ github.event_name == 'pull_request' }} + jobs: test: runs-on: ubuntu-latest timeout-minutes: 30 steps: - - name: Cancel previous run - uses: styfle/cancel-workflow-action@0.10.0 - with: - access_token: ${{ github.token }} - - name: Checkout uses: actions/checkout@v3 with: @@ -40,9 +50,9 @@ jobs: sub-packages: '["nvcc"]' - run: | CUDA_VERSION="${{steps.cuda-toolkit.outputs.cuda}}" - echo "CUDA_VERSION='${CUDA_VERSION}'" >> "${GITHUB_ENV}" + echo "CUDA_VERSION=${CUDA_VERSION}" >> "${GITHUB_ENV}" TORCH_INDEX_URL="https://download.pytorch.org/whl/cu$(echo "${CUDA_VERSION}" | cut -d'.' -f-2 | tr -d '.')" - echo "TORCH_INDEX_URL='${TORCH_INDEX_URL}'" >> "${GITHUB_ENV}" + echo "TORCH_INDEX_URL=${TORCH_INDEX_URL}" >> "${GITHUB_ENV}" echo "Installed CUDA version is: ${CUDA_VERSION}" echo "CUDA install location: ${{steps.cuda-toolkit.outputs.CUDA_PATH}}" diff --git a/.gitignore b/.gitignore index 5deaf2bb..e195bfa9 100644 --- a/.gitignore +++ b/.gitignore @@ -21,6 +21,7 @@ parts/ sdist/ var/ wheels/ +wheelhouse/ share/python-wheels/ *.egg-info/ .installed.cfg @@ -169,7 +170,7 @@ cython_debug/ .LSOverride # Icon must end with two \r -Icon +Icon # Thumbnails ._* diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index e1fda090..9849236f 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -27,7 +27,6 @@ repos: rev: 22.6.0 hooks: - id: black - args: [--safe, -S, -t, py37, -l, '100'] stages: [commit, push, manual] - repo: local hooks: diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 00000000..70cbe2e8 --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,57 @@ +# Changelog + +All notable changes to this project will be documented in this file. + +The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), +and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). + +------ + +## [Unreleased] + +------ + +## [0.4.2] - 2022-07-26 + +### Added + +- Read the Docs integration by [@Benjamin-eecs](https://github.com/Benjamin-eecs) and [@XuehaiPan](https://github.com/XuehaiPan) in [#34](https://github.com/metaopt/TorchOpt/pull/34). +- Update documentation and code styles by [@Benjamin-eecs](https://github.com/Benjamin-eecs) and [@XuehaiPan](https://github.com/XuehaiPan) in [#22](https://github.com/metaopt/TorchOpt/pull/22). +- Update tutorial notebooks by [@XuehaiPan](https://github.com/XuehaiPan) in [#27](https://github.com/metaopt/TorchOpt/pull/27). +- Bump PyTorch version to 1.12 by [@XuehaiPan](https://github.com/XuehaiPan) in [#25](https://github.com/metaopt/TorchOpt/pull/25). +- Support custom Python executable path in `CMakeLists.txt` by [@XuehaiPan](https://github.com/XuehaiPan) in [#18](https://github.com/metaopt/TorchOpt/pull/18). +- Add citation information by [@waterhorse1](https://github.com/waterhorse1) in [#14](https://github.com/metaopt/TorchOpt/pull/14) and [@Benjamin-eecs](https://github.com/Benjamin-eecs) in [#15](https://github.com/metaopt/TorchOpt/pull/15). +- Implement RMSProp optimizer by [@future-xy](https://github.com/future-xy) in [#8](https://github.com/metaopt/TorchOpt/pull/8). + +### Changed + +- Use `pyproject.toml` for packaging and update GitHub Action workflows by [@XuehaiPan](https://github.com/XuehaiPan) in [#31](https://github.com/metaopt/TorchOpt/pull/31). +- Rename the package from `TorchOpt` to `torchopt` by [@XuehaiPan](https://github.com/XuehaiPan) in [#20](https://github.com/metaopt/TorchOpt/pull/20). + +### Fixed + +- Fixed errors while building from the source and add `conda` environment recipe by [@XuehaiPan](https://github.com/XuehaiPan) in [#24](https://github.com/metaopt/TorchOpt/pull/24). + +------ + +## [0.4.1] - 2022-04-15 + +### Fixed + +- Fix set devices bug for multi-GPUs. + +------ + +## [0.4.0] - 2022-04-09 + +### Added + +- The first beta release of TorchOpt. +- TorchOpt with L2R, LOLA, MAML-RL, MGRL, and few-shot examples. + +------ + +[Unreleased]: https://github.com/olivierlacan/keep-a-changelog/compare/v0.4.2...HEAD +[0.4.2]: https://github.com/olivierlacan/keep-a-changelog/compare/v0.4.1...v0.4.2 +[0.4.1]: https://github.com/olivierlacan/keep-a-changelog/compare/v0.4.0...v0.4.1 +[0.4.0]: https://github.com/olivierlacan/keep-a-changelog/releases/tag/v0.4.0 diff --git a/CITATION.cff b/CITATION.cff index 5c239556..60c65cb3 100644 --- a/CITATION.cff +++ b/CITATION.cff @@ -16,6 +16,10 @@ authors: email: benjaminliu.eecs@gmail.com affiliation: Peking University orcid: 'https://orcid.org/0000-0001-5426-515X' + - given-names: Xuehai + family-names: Pan + email: xuehaipan@pku.edu.cn + affiliation: Peking University - given-names: Luo family-names: Mai email: luo.mai@ed.ac.uk @@ -24,7 +28,7 @@ authors: family-names: Yang affiliation: Peking University email: yaodong.yang@pku.edu.cn -version: 0.4.1 -date-released: "2022-04-09" +version: 0.4.2 +date-released: "2022-07-26" license: Apache-2.0 repository-code: "https://github.com/metaopt/TorchOpt" diff --git a/Makefile b/Makefile index f6de2a06..f050cf1f 100644 --- a/Makefile +++ b/Makefile @@ -96,7 +96,7 @@ flake8: flake8-install py-format: py-format-install $(PYTHON) -m isort --project torchopt --check $(PYTHON_FILES) && \ - $(PYTHON) -m black --safe -l 100 -t py37 -S --check $(PYTHON_FILES) + $(PYTHON) -m black --check $(PYTHON_FILES) mypy: mypy-install $(PYTHON) -m mypy $(PROJECT_PATH) @@ -135,7 +135,7 @@ lint: flake8 py-format mypy clang-format cpplint docstyle spelling format: py-format-install clang-format-install addlicense-install $(PYTHON) -m isort --project torchopt $(PYTHON_FILES) - $(PYTHON) -m black --safe -l 100 -t py37 -S $(PYTHON_FILES) + $(PYTHON) -m black $(PYTHON_FILES) clang-format -style=file -i $(CXX_FILES) addlicense -c $(COPYRIGHT) -l apache -y 2022 $(SOURCE_FOLDERS) diff --git a/README.md b/README.md index 37477bf9..c73ae163 100644 --- a/README.md +++ b/README.md @@ -5,6 +5,15 @@
  • +![Python 3.7+](https://img.shields.io/badge/Python-3.7%2B-brightgreen.svg) +[![PyPI](https://img.shields.io/pypi/v/torchopt?label=PyPI)](https://pypi.org/project/torchopt) +![Status](https://img.shields.io/pypi/status/torchopt?label=Status) +![GitHub Workflow Status](https://img.shields.io/github/workflow/status/metaopt/TorchOpt/Tests?label=tests&logo=github) +[![Documentation Status](https://readthedocs.org/projects/torchopt/badge/?version=latest)](https://torchopt.readthedocs.io/en/latest/?badge=latest) +[![Downloads](https://static.pepy.tech/personalized-badge/torchopt?period=month&left_color=grey&right_color=blue&left_text=Downloads/month)](https://pepy.tech/project/torchopt) +[![GitHub Repo Stars](https://img.shields.io/github/stars/metaopt/torchopt?label=Stars&logo=github&color=brightgreen)](https://github.com/metaopt/torchopt/stargazers) +[![License](https://img.shields.io/github/license/metaopt/TorchOpt?label=License)](#license) + **TorchOpt** is a high-performance optimizer library built upon [PyTorch](https://pytorch.org/) for easy implementation of functional optimization and gradient-based meta-learning. It consists of two main features: - TorchOpt provides functional optimizer which enables [JAX-like](https://github.com/google/jax) composable functional optimizer for PyTorch. With TorchOpt, one can easily conduct neural network optimization in PyTorch with functional style optimizer, similar to [Optax](https://github.com/deepmind/optax) in JAX. @@ -25,6 +34,7 @@ The README is organized as follows: - [Visualization](#visualization) - [Installation](#installation) - [Future Plan](#future-plan) +- [Changelog](#changelog) - [The Team](#the-team) - [Citing TorchOpt](#citing-torchopt) @@ -209,6 +219,8 @@ Requirements - (Optional) For visualizing computation graphs - [Graphviz](https://graphviz.org/download/) (for Linux users use `apt/yum install graphviz` or `conda install -c anaconda python-graphviz`) +Please follow the instructions at to install PyTorch in your Python environment first. Then run the following command to install TorchOpt from PyPI ([![PyPI](https://img.shields.io/pypi/v/torchopt?label=PyPI)](https://pypi.org/project/torchopt) / ![Status](https://img.shields.io/pypi/status/torchopt?label=Status)): + ```bash pip3 install torchopt ``` @@ -242,11 +254,15 @@ pip3 install -e . - [ ] Support more optimizers such as AdamW, RMSProp - [ ] CPU-accelerated optimizer +## Changelog + +See [CHANGELOG.md](CHANGELOG.md). + -------------------------------------------------------------------------------- ## The Team -TorchOpt is a work by Jie Ren, Xidong Feng, [Bo Liu](https://github.com/Benjamin-eecs/), [Luo Mai](https://luomai.github.io/) and [Yaodong Yang](https://www.yangyaodong.com/). +TorchOpt is a work by Jie Ren, Xidong Feng, [Bo Liu](https://github.com/Benjamin-eecs), [Xuehai Pan](https://github.com/XuehaiPan), [Luo Mai](https://luomai.github.io/) and [Yaodong Yang](https://www.yangyaodong.com/). ## Citing TorchOpt @@ -254,7 +270,7 @@ If you find TorchOpt useful, please cite it in your publications. ```bibtex @software{TorchOpt, - author = {Jie Ren and Xidong Feng and Bo Liu and Luo Mai and Yaodong Yang}, + author = {Jie Ren and Xidong Feng and Bo Liu and Xuehai Pan and Luo Mai and Yaodong Yang}, title = {TorchOpt}, year = {2022}, publisher = {GitHub}, diff --git a/conda-recipe.yaml b/conda-recipe.yaml index 625a236c..3c10a3ed 100644 --- a/conda-recipe.yaml +++ b/conda-recipe.yaml @@ -26,21 +26,22 @@ dependencies: - sphinxcontrib-katex # for documentation - jax - jaxlib >= 0.3=*cuda* - - optax - - tensorboard + - optax # for tutorials + - tensorboard # for examples - wandb # Device select - nvidia::cudatoolkit = 11.6 - cudnn - # Build toolkit + # Build toolchain - cmake >= 3.4 - make - cxx-compiler - gxx = 10 - nvidia/label/cuda-11.6.2::cuda-nvcc - nvidia/label/cuda-11.6.2::cuda-cudart-dev + - patchelf >= 0.9 - pybind11 # Misc diff --git a/docs/source/developer/contributing.rst b/docs/source/developer/contributing.rst index 656736a3..278e2900 100644 --- a/docs/source/developer/contributing.rst +++ b/docs/source/developer/contributing.rst @@ -1,6 +1,34 @@ Contributing to TorchOpt ======================== +Before contributing to TorchOpt, please follow the instructions below to setup. + +1. Fork TorchOpt (`fork `_) on GitHub and clone the repository. + +.. code-block:: bash + + git clone git@github.com:/TorchOpt.git # use the SSH protocol + cd TorchOpt + + git remote add upstream git@github.com:metaopt/TorchOpt.git + +2. Setup a development environment via `conda `_: + +.. code-block:: bash + + # You may need `CONDA_OVERRIDE_CUDA` if conda fails to detect the NVIDIA driver (e.g. in docker or WSL2) + CONDA_OVERRIDE_CUDA=11.7 conda env create --file conda-recipe.yaml + + conda activate torchopt + +3. Setup the `pre-commit `_ hooks: + +.. code-block:: bash + + pre-commit install --install-hooks + +Then you are ready to rock. Thanks for contributing to TorchOpt! + Install Develop Version ----------------------- diff --git a/docs/source/developer/contributor.rst b/docs/source/developer/contributor.rst index e47a7c12..0f08d38a 100644 --- a/docs/source/developer/contributor.rst +++ b/docs/source/developer/contributor.rst @@ -3,5 +3,4 @@ Contributor We always welcome contributions to help make TorchOpt better. Below is an incomplete list of our contributors (find more on `this page `_). -* Xuehai Pan (`XuehaiPan `_) * Yao Fu (`future-xy `_) diff --git a/docs/source/index.rst b/docs/source/index.rst index 50ccb0fb..892a1090 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -13,8 +13,12 @@ Installation Requirements -(Optional) For visualizing computation graphs -`Graphviz `_ (for Linux users use ``apt/yum install graphviz`` or ``conda install -c anaconda python-graphviz``) +- PyTorch +- JAX +- (Optional) For visualizing computation graphs + - `Graphviz `_ (for Linux users use ``apt/yum install graphviz`` or ``conda install -c anaconda python-graphviz``) + +Please follow the instructions at https://pytorch.org to install PyTorch in your Python environment first. Then run the following command to install TorchOpt from PyPI: .. code-block:: bash @@ -39,7 +43,6 @@ We provide a `conda `_ environment recipe to ins CONDA_OVERRIDE_CUDA=11.7 conda env create --file conda-recipe.yaml conda activate torchopt - pip3 install -e . .. toctree:: @@ -77,6 +80,7 @@ TorchOpt is a work by * Jie Ren (`JieRen98 `_) * Xidong Feng (`waterhorse1 `_) * Bo Liu (`Benjamin-eecs `_) +* Xuehai Pan (`XuehaiPan `_) * Luo Mai (`luomai `_) * Yaodong Yang (`PKU-YYang `_). @@ -86,6 +90,11 @@ Support If you are having issues, please let us know by filing an issue on our `issue tracker `_. +Changelog +--------- + +See :gitcode:`CHANGELOG.md`. + License ------- diff --git a/docs/source/spelling_wordlist.txt b/docs/source/spelling_wordlist.txt index 24ee9124..db1e67a1 100644 --- a/docs/source/spelling_wordlist.txt +++ b/docs/source/spelling_wordlist.txt @@ -67,3 +67,5 @@ rmsprop RMSProp sgd SGD +CHANGELOG +Changelog diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 00000000..d76dd3dc --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,101 @@ +[build-system] +requires = ["setuptools", "torch == 1.12", "numpy", "pybind11"] +build-backend = "setuptools.build_meta" + +[project] +name = "torchopt" +description = "A Jax-style optimizer for PyTorch." +readme = "README.md" +requires-python = ">= 3.7" +authors = [ + {name = "TorchOpt Contributors"}, + {name = "Xuehai Pan", email = "XuehaiPan@pku.edu.cn"}, + {name = "Jie Ren", email = "jieren9806@gmail.com"}, + {name = "Xidong Feng", email = "xidong.feng.20@ucl.ac.uk"}, + {name = "Bo Liu", email = "benjaminliu.eecs@gmail.com"}, +] +license = {file = "LICENSE"} +keywords = [ + "PyTorch", + "functorch", + "JAX", + "Meta-Learning", + "Optimizer", + "Differentiable Optimizer", + "Functional Programming", +] +classifiers = [ + "Development Status :: 4 - Beta", + "License :: OSI Approved :: Apache Software License 2.0 (Apache-2.0)", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Operating System :: POSIX :: Linux", + "Environment :: GPU", + "Environment :: GPU :: NVIDIA CUDA", + "Intended Audience :: Developers", + "Intended Audience :: Education", + "Intended Audience :: Science/Research", + "Topic :: Scientific/Engineering", + "Topic :: Scientific/Engineering :: Artificial Intelligence", +] +dependencies = [ + "torch == 1.12", + "jax[cpu] >= 0.3", + "numpy", + "graphviz", + "typing-extensions", +] +dynamic = [ + "version", +] + +[project.urls] +Homepage = "https://github.com/metaopt/TorchOpt" +Repository = "https://github.com/metaopt/TorchOpt" +Documentation = "https://torchopt.readthedocs.io" +"Bug Report" = "https://github.com/metaopt/TorchOpt/issues" + +[tool.setuptools.packages.find] +include = ["torchopt", "torchopt.*"] + +[tool.black] +safe = true +line-length = 100 +skip-string-normalization = true +target-version = ["py37", "py38", "py39", "py310"] + +[tool.isort] +profile = "black" +src_paths = ["torchopt", "examples", "tests"] +indent = 4 +line_length = 100 +lines_after_imports = 2 +multi_line_output = 3 + +[tool.mypy] +allow_redefinition = true +check_untyped_defs = true +disallow_incomplete_defs = false +disallow_untyped_defs = false +ignore_missing_imports = true +no_implicit_optional = true +pretty = true +show_error_codes = true +show_error_context = true +show_traceback = true +strict_equality = true +strict_optional = true +warn_no_return = true +warn_redundant_casts = true +warn_unreachable = true +warn_unused_configs = true +warn_unused_ignores = true + +[tool.pydocstyle] +convention = "google" + +[tool.doc8] +max-line-length = 500 diff --git a/setup.cfg b/setup.cfg deleted file mode 100644 index c3438afe..00000000 --- a/setup.cfg +++ /dev/null @@ -1,42 +0,0 @@ -[black] -line-length = 100 -skip-string-normalization = True -target_version = ["py37"] - -[flake8] -exclude = - .git -indent_size = 4 - -[pydocstyle] -convention = google - -[isort] -profile = black -py_version=37 -indent = 4 -line_length = 100 -lines_after_imports = 2 -multi_line_output = 3 - -[mypy] -allow_redefinition = True -check_untyped_defs = True -disallow_incomplete_defs = False -disallow_untyped_defs = False -ignore_missing_imports = True -no_implicit_optional = True -pretty = True -show_error_codes = True -show_error_context = True -show_traceback = True -strict_equality = True -strict_optional = True -warn_no_return = True -warn_redundant_casts = True -warn_unreachable = True -warn_unused_configs = True -warn_unused_ignores = True - -[doc8] -max-line-length = 1000 diff --git a/setup.py b/setup.py index f399e14d..169a767c 100644 --- a/setup.py +++ b/setup.py @@ -80,51 +80,9 @@ def build_extension(self, ext): setup( - name='torchopt', version=version.__version__, - author='TorchOpt Contributors', - author_email='jieren9806@gmail.com, xidong.feng.20@ucl.ac.uk, benjaminliu.eecs@gmail.com', - description='A Jax-style optimizer for PyTorch.', - long_description=open('README.md', encoding='utf8').read(), - long_description_content_type='text/markdown', - license='Apache License Version 2.0', - keywords='Meta-Learning, PyTorch, Optimizer', - url='https://github.com/metaopt/TorchOpt', - packages=find_packages(include=['torchopt', 'torchopt.*']), package_data={'sharedlib': ['_lib/*.so']}, include_package_data=True, cmdclass={'build_ext': cmake_build_ext}, ext_modules=[CMakeExtension('torchopt._lib.adam_op', source_dir=HERE)], - setup_requires=[ # for `torch.utils.cpp_extension` - 'torch == 1.12', - 'numpy', - 'pybind11', - ], - install_requires=[ - 'torch == 1.12', - 'jax[cpu] >= 0.3', - 'numpy', - 'graphviz', - 'typing-extensions', - ], - python_requires='>= 3.7', - classifiers=[ - # How mature is this project? Common values are - # 3 - Alpha - # 4 - Beta - # 5 - Production/Stable - 'Development Status :: 4 - Beta', - # Indicate who your project is intended for - 'Intended Audience :: Science/Research', - 'Intended Audience :: Developers', - 'Topic :: Scientific/Engineering :: Artificial Intelligence', - # Pick your license as you wish (should match "license" above) - 'License :: OSI Approved :: Apache Software License', - # Specify the Python versions you support here. In particular, ensure - # that you indicate whether you support Python 2, Python 3 or both. - 'Programming Language :: Python :: 3.7', - 'Programming Language :: Python :: 3.8', - 'Programming Language :: Python :: 3.9', - 'Programming Language :: Python :: 3.10', - ], ) diff --git a/torchopt/_src/accelerated_op/__init__.py b/torchopt/_src/accelerated_op/__init__.py index 3eb1d44f..4c7f1cd9 100644 --- a/torchopt/_src/accelerated_op/__init__.py +++ b/torchopt/_src/accelerated_op/__init__.py @@ -13,12 +13,16 @@ # limitations under the License. # ============================================================================== +from typing import Iterable, Optional, Union + import torch from torchopt._src.accelerated_op.adam_op import AdamOp -def accelerated_op_available(devices=None): +def accelerated_op_available( + devices: Optional[Union[str, torch.device, Iterable[Union[str, torch.device]]]] = None +) -> bool: """Check the availability of accelerated optimizer.""" op = AdamOp() @@ -26,9 +30,14 @@ def accelerated_op_available(devices=None): devices = [torch.device('cuda'), torch.device('cpu')] elif isinstance(devices, torch.device): devices = [devices] + elif isinstance(devices, str): + devices = [torch.device(devices)] try: for device in devices: + device = torch.device(device) + if device.type == 'cuda' and not torch.cuda.is_available(): + return False updates = torch.tensor(1.0, device=device) op(updates, updates, updates, 1) return True diff --git a/torchopt/_src/accelerated_op/adam_op/adam_op.py b/torchopt/_src/accelerated_op/adam_op/adam_op.py index dac0697b..a59b00e6 100644 --- a/torchopt/_src/accelerated_op/adam_op/adam_op.py +++ b/torchopt/_src/accelerated_op/adam_op/adam_op.py @@ -30,7 +30,7 @@ class MuOp(torch.autograd.Function): # pylint: disable=abstract-method @staticmethod def jvp(ctx: Any, *grad_inputs: Any) -> Any: - # pylint: disable=line-too-long + # pylint: disable-next=line-too-long """Defines a formula for differentiating the operation with forward mode automatic differentiation.""" @staticmethod @@ -44,7 +44,7 @@ def forward(ctx: Any, *args: Any, **kwargs: Any) -> Any: @staticmethod def backward(ctx: Any, *args: Any) -> Any: - # pylint: disable=line-too-long + # pylint: disable-next=line-too-long """Defines a formula for differentiating the operation with backward mode automatic differentiation (alias to the :meth:`vjp` method).""" dmu = args[0] updates, mu = ctx.saved_tensors @@ -57,7 +57,7 @@ class NuOp(torch.autograd.Function): # pylint: disable=abstract-method @staticmethod def jvp(ctx: Any, *grad_inputs: Any) -> Any: - # pylint: disable=line-too-long + # pylint: disable-next=line-too-long """Defines a formula for differentiating the operation with forward mode automatic differentiation.""" @staticmethod @@ -71,7 +71,7 @@ def forward(ctx: Any, *args: Any, **kwargs: Any) -> Any: @staticmethod def backward(ctx: Any, *args: Any) -> Any: - # pylint: disable=line-too-long + # pylint: disable-next=line-too-long """Defines a formula for differentiating the operation with backward mode automatic differentiation (alias to the :meth:`vjp` function).""" dnu = args[0] updates, nu = ctx.saved_tensors @@ -84,7 +84,7 @@ class UpdatesOp(torch.autograd.Function): # pylint: disable=abstract-method @staticmethod def jvp(ctx: Any, *grad_inputs: Any) -> Any: - # pylint: disable=line-too-long + # pylint: disable-next=line-too-long """Defines a formula for differentiating the operation with forward mode automatic differentiation.""" @staticmethod @@ -98,7 +98,7 @@ def forward(ctx: Any, *args: Any, **kwargs: Any) -> Any: @staticmethod def backward(ctx: Any, *args: Any) -> Any: - # pylint: disable=line-too-long + # pylint: disable-next=line-too-long """Defines a formula for differentiating the operation with backward mode automatic differentiation (alias to the :meth:`vjp` function).""" dupdates = args[0] updates, new_mu, new_nu = ctx.saved_tensors @@ -106,7 +106,7 @@ def backward(ctx: Any, *args: Any) -> Any: result = adam_op.backwardUpdates(dupdates, updates, new_mu, new_nu, b1, b2, count) return result[0], result[1], None - # pylint: disable=too-many-arguments + # pylint: disable-next=too-many-arguments def __init__(self, b1=0.9, b2=0.999, eps=1e-8, eps_root=0.0, inplace=True): """The :meth:`__init__` function.""" self.b1 = b1 diff --git a/torchopt/version.py b/torchopt/version.py index 89c0c4c4..784a9a63 100644 --- a/torchopt/version.py +++ b/torchopt/version.py @@ -14,4 +14,4 @@ # ============================================================================== """TorchOpt: a high-performance optimizer library built upon PyTorch.""" -__version__ = '0.4.1' +__version__ = '0.4.2' pFad - Phonifier reborn

    Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.

    Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.


    Alternative Proxies:

    Alternative Proxy

    pFad v3 Proxy

    pFad v4 Proxy

    pFad Proxy