diff --git a/.circleci/.gitignore b/.circleci/.gitignore deleted file mode 100644 index 485dee64bcf..00000000000 --- a/.circleci/.gitignore +++ /dev/null @@ -1 +0,0 @@ -.idea diff --git a/.circleci/config.yml b/.circleci/config.yml deleted file mode 100644 index 7e754846023..00000000000 --- a/.circleci/config.yml +++ /dev/null @@ -1,1239 +0,0 @@ -version: 2.1 - -# How to test the Linux jobs: -# - Install CircleCI local CLI: https://circleci.com/docs/2.0/local-cli/ -# - circleci config process .circleci/config.yml > gen.yml && circleci local execute -c gen.yml --job binary_linux_wheel_py3.7 -# - Replace binary_linux_wheel_py3.7 with the name of the job you want to test. -# Job names are 'name:' key. - -orbs: - win: circleci/windows@2.0.0 - -executors: - windows-gpu-prototype: - machine: - resource_class: windows.gpu.small.prototype - image: windows-server-2019-nvidia:201908-28 - shell: bash.exe - -commands: - checkout_merge: - description: "checkout merge branch" - steps: - - checkout -# - run: -# name: Checkout merge branch -# command: | -# set -ex -# BRANCH=$(git rev-parse --abbrev-ref HEAD) -# if [[ "$BRANCH" != "master" ]]; then -# git fetch --force origin ${CIRCLE_BRANCH}/merge:merged/${CIRCLE_BRANCH} -# git checkout "merged/$CIRCLE_BRANCH" -# fi - -binary_common: &binary_common - parameters: - # Edit these defaults to do a release` - build_version: - description: "version number of release binary; by default, build a nightly" - type: string - default: "" - pytorch_version: - description: "PyTorch version to build against; by default, use a nightly" - type: string - default: "" - # Don't edit these - python_version: - description: "Python version to build against (e.g., 3.7)" - type: string - cu_version: - description: "CUDA version to build against, in CU format (e.g., cpu or cu100)" - type: string - unicode_abi: - description: "Python 2.7 wheel only: whether or not we are cp27mu (default: no)" - type: string - default: "" - wheel_docker_image: - description: "Wheel only: what docker image to use" - type: string - default: "soumith/manylinux-cuda101" - environment: - PYTHON_VERSION: << parameters.python_version >> - BUILD_VERSION: << parameters.build_version >> - PYTORCH_VERSION: << parameters.pytorch_version >> - UNICODE_ABI: << parameters.unicode_abi >> - CU_VERSION: << parameters.cu_version >> - -jobs: - circleci_consistency: - docker: - - image: circleci/python:3.7 - steps: - - checkout - - run: - command: | - pip install --user --progress-bar off jinja2 pyyaml - python .circleci/regenerate.py - git diff --exit-code || (echo ".circleci/config.yml not in sync with config.yml.in! Run .circleci/regenerate.py to update config"; exit 1) - - binary_linux_wheel: - <<: *binary_common - docker: - - image: << parameters.wheel_docker_image >> - resource_class: 2xlarge+ - steps: - - checkout_merge - - run: packaging/build_wheel.sh - - store_artifacts: - path: dist - - persist_to_workspace: - root: dist - paths: - - "*" - - binary_linux_conda: - <<: *binary_common - docker: - - image: "soumith/conda-cuda" - resource_class: 2xlarge+ - steps: - - checkout_merge - - run: packaging/build_conda.sh - - store_artifacts: - path: /opt/conda/conda-bld/linux-64 - - persist_to_workspace: - root: /opt/conda/conda-bld/linux-64 - paths: - - "*" - - binary_linux_conda_cuda: - <<: *binary_common - machine: - image: ubuntu-1604:201903-01 - resource_class: gpu.medium - steps: - - checkout_merge - - run: - name: Setup environment - command: | - set -e - - curl -L https://packagecloud.io/circleci/trusty/gpgkey | sudo apt-key add - - curl -L https://dl.google.com/linux/linux_signing_key.pub | sudo apt-key add - - - sudo apt-get update - - sudo apt-get install \ - apt-transport-https \ - ca-certificates \ - curl \ - gnupg-agent \ - software-properties-common - - curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo apt-key add - - - sudo add-apt-repository \ - "deb [arch=amd64] https://download.docker.com/linux/ubuntu \ - $(lsb_release -cs) \ - stable" - - sudo apt-get update - export DOCKER_VERSION="5:19.03.2~3-0~ubuntu-xenial" - sudo apt-get install docker-ce=${DOCKER_VERSION} docker-ce-cli=${DOCKER_VERSION} containerd.io=1.2.6-3 - - # Add the package repositories - distribution=$(. /etc/os-release;echo $ID$VERSION_ID) - curl -s -L https://nvidia.github.io/nvidia-docker/gpgkey | sudo apt-key add - - curl -s -L https://nvidia.github.io/nvidia-docker/$distribution/nvidia-docker.list | sudo tee /etc/apt/sources.list.d/nvidia-docker.list - - export NVIDIA_CONTAINER_VERSION="1.0.3-1" - sudo apt-get update && sudo apt-get install -y nvidia-container-toolkit=${NVIDIA_CONTAINER_VERSION} - sudo systemctl restart docker - - DRIVER_FN="NVIDIA-Linux-x86_64-410.104.run" - wget "https://s3.amazonaws.com/ossci-linux/nvidia_driver/$DRIVER_FN" - sudo /bin/bash "$DRIVER_FN" -s --no-drm || (sudo cat /var/log/nvidia-installer.log && false) - nvidia-smi - - - run: - name: Pull docker image - command: | - set -e - export DOCKER_IMAGE=soumith/conda-cuda - echo Pulling docker image $DOCKER_IMAGE - docker pull $DOCKER_IMAGE >/dev/null - - - run: - name: Build and run tests - command: | - set -e - - cd ${HOME}/project/ - - export DOCKER_IMAGE=soumith/conda-cuda - export VARS_TO_PASS="-e PYTHON_VERSION -e BUILD_VERSION -e PYTORCH_VERSION -e UNICODE_ABI -e CU_VERSION" - - docker run --gpus all --ipc=host -v $(pwd):/remote -w /remote ${VARS_TO_PASS} ${DOCKER_IMAGE} ./packaging/build_conda.sh - - binary_win_conda: - <<: *binary_common - executor: - name: win/default - shell: bash.exe - steps: - - checkout_merge - - run: - command: | - choco install miniconda3 - (& "C:\tools\miniconda3\Scripts\conda.exe" "shell.powershell" "hook") | Out-String | Invoke-Expression - conda activate base - conda install -yq conda-build "conda-package-handling!=1.5.0" - bash packaging/build_conda.sh - shell: powershell.exe - - binary_win_conda_cuda: - <<: *binary_common - executor: windows-gpu-prototype - steps: - - checkout_merge - - run: - command: | - choco install miniconda3 - (& "C:\tools\miniconda3\Scripts\conda.exe" "shell.powershell" "hook") | Out-String | Invoke-Expression - conda activate base - conda install -yq conda-build "conda-package-handling!=1.5.0" - bash packaging/build_conda.sh - shell: powershell.exe - - binary_macos_wheel: - <<: *binary_common - macos: - xcode: "9.0" - steps: - - checkout_merge - - run: - # Cannot easily deduplicate this as source'ing activate - # will set environment variables which we need to propagate - # to build_wheel.sh - command: | - curl -o conda.sh https://repo.anaconda.com/miniconda/Miniconda3-latest-MacOSX-x86_64.sh - sh conda.sh -b - source $HOME/miniconda3/bin/activate - packaging/build_wheel.sh - - store_artifacts: - path: dist - - persist_to_workspace: - root: dist - paths: - - "*" - - binary_macos_conda: - <<: *binary_common - macos: - xcode: "9.0" - steps: - - checkout_merge - - run: - command: | - curl -o conda.sh https://repo.anaconda.com/miniconda/Miniconda3-latest-MacOSX-x86_64.sh - sh conda.sh -b - source $HOME/miniconda3/bin/activate - conda install -yq conda-build - packaging/build_conda.sh - - store_artifacts: - path: /Users/distiller/miniconda3/conda-bld/osx-64 - - persist_to_workspace: - root: /Users/distiller/miniconda3/conda-bld/osx-64 - paths: - - "*" - - # Requires org-member context - binary_conda_upload: - docker: - - image: continuumio/miniconda - steps: - - attach_workspace: - at: ~/workspace - - run: - command: | - # Prevent credential from leaking - conda install -yq anaconda-client - set +x - anaconda login \ - --username "$PYTORCH_BINARY_PJH5_CONDA_USERNAME" \ - --password "$PYTORCH_BINARY_PJH5_CONDA_PASSWORD" - set -x - anaconda upload ~/workspace/*.tar.bz2 -u pytorch-nightly --label main --no-progress --force - - # Requires org-member context - binary_wheel_upload: - parameters: - subfolder: - description: "What whl subfolder to upload to, e.g., blank or cu100/ (trailing slash is important)" - type: string - docker: - - image: circleci/python:3.7 - steps: - - attach_workspace: - at: ~/workspace - - checkout - - run: - command: | - pip install --user awscli - export PATH="$HOME/.local/bin:$PATH" - # Prevent credential from leaking - set +x - export AWS_ACCESS_KEY_ID="${PYTORCH_BINARY_AWS_ACCESS_KEY_ID}" - export AWS_SECRET_ACCESS_KEY="${PYTORCH_BINARY_AWS_SECRET_ACCESS_KEY}" - set -x - for pkg in ~/workspace/*.whl; do - aws s3 cp "$pkg" "s3://pytorch/whl/nightly/<< parameters.subfolder >>" --acl public-read - done - - -workflows: - build: - jobs: - - circleci_consistency - - binary_linux_wheel: - cu_version: cpu - name: binary_linux_wheel_py2.7_cpu - python_version: '2.7' - - binary_linux_wheel: - cu_version: cpu - name: binary_linux_wheel_py2.7u_cpu - python_version: '2.7' - unicode_abi: '1' - - binary_linux_wheel: - cu_version: cu92 - name: binary_linux_wheel_py2.7_cu92 - python_version: '2.7' - wheel_docker_image: soumith/manylinux-cuda92 - - binary_linux_wheel: - cu_version: cu92 - name: binary_linux_wheel_py2.7u_cu92 - python_version: '2.7' - unicode_abi: '1' - wheel_docker_image: soumith/manylinux-cuda92 - - binary_linux_wheel: - cu_version: cu100 - name: binary_linux_wheel_py2.7_cu100 - python_version: '2.7' - wheel_docker_image: soumith/manylinux-cuda100 - - binary_linux_wheel: - cu_version: cu100 - name: binary_linux_wheel_py2.7u_cu100 - python_version: '2.7' - unicode_abi: '1' - wheel_docker_image: soumith/manylinux-cuda100 - - binary_linux_wheel: - cu_version: cu101 - name: binary_linux_wheel_py2.7_cu101 - python_version: '2.7' - - binary_linux_wheel: - cu_version: cu101 - name: binary_linux_wheel_py2.7u_cu101 - python_version: '2.7' - unicode_abi: '1' - - binary_linux_wheel: - cu_version: cpu - name: binary_linux_wheel_py3.5_cpu - python_version: '3.5' - - binary_linux_wheel: - cu_version: cu92 - name: binary_linux_wheel_py3.5_cu92 - python_version: '3.5' - wheel_docker_image: soumith/manylinux-cuda92 - - binary_linux_wheel: - cu_version: cu100 - name: binary_linux_wheel_py3.5_cu100 - python_version: '3.5' - wheel_docker_image: soumith/manylinux-cuda100 - - binary_linux_wheel: - cu_version: cu101 - name: binary_linux_wheel_py3.5_cu101 - python_version: '3.5' - - binary_linux_wheel: - cu_version: cpu - name: binary_linux_wheel_py3.6_cpu - python_version: '3.6' - - binary_linux_wheel: - cu_version: cu92 - name: binary_linux_wheel_py3.6_cu92 - python_version: '3.6' - wheel_docker_image: soumith/manylinux-cuda92 - - binary_linux_wheel: - cu_version: cu100 - name: binary_linux_wheel_py3.6_cu100 - python_version: '3.6' - wheel_docker_image: soumith/manylinux-cuda100 - - binary_linux_wheel: - cu_version: cu101 - name: binary_linux_wheel_py3.6_cu101 - python_version: '3.6' - - binary_linux_wheel: - cu_version: cpu - name: binary_linux_wheel_py3.7_cpu - python_version: '3.7' - - binary_linux_wheel: - cu_version: cu92 - name: binary_linux_wheel_py3.7_cu92 - python_version: '3.7' - wheel_docker_image: soumith/manylinux-cuda92 - - binary_linux_wheel: - cu_version: cu100 - name: binary_linux_wheel_py3.7_cu100 - python_version: '3.7' - wheel_docker_image: soumith/manylinux-cuda100 - - binary_linux_wheel: - cu_version: cu101 - name: binary_linux_wheel_py3.7_cu101 - python_version: '3.7' - - binary_macos_wheel: - cu_version: cpu - name: binary_macos_wheel_py2.7_cpu - python_version: '2.7' - - binary_macos_wheel: - cu_version: cpu - name: binary_macos_wheel_py2.7u_cpu - python_version: '2.7' - unicode_abi: '1' - - binary_macos_wheel: - cu_version: cpu - name: binary_macos_wheel_py3.5_cpu - python_version: '3.5' - - binary_macos_wheel: - cu_version: cpu - name: binary_macos_wheel_py3.6_cpu - python_version: '3.6' - - binary_macos_wheel: - cu_version: cpu - name: binary_macos_wheel_py3.7_cpu - python_version: '3.7' - - binary_linux_conda: - cu_version: cpu - name: binary_linux_conda_py2.7_cpu - python_version: '2.7' - - binary_linux_conda: - cu_version: cu92 - name: binary_linux_conda_py2.7_cu92 - python_version: '2.7' - wheel_docker_image: soumith/manylinux-cuda92 - - binary_linux_conda: - cu_version: cu100 - name: binary_linux_conda_py2.7_cu100 - python_version: '2.7' - wheel_docker_image: soumith/manylinux-cuda100 - - binary_linux_conda: - cu_version: cu101 - name: binary_linux_conda_py2.7_cu101 - python_version: '2.7' - - binary_linux_conda: - cu_version: cpu - name: binary_linux_conda_py3.5_cpu - python_version: '3.5' - - binary_linux_conda: - cu_version: cu92 - name: binary_linux_conda_py3.5_cu92 - python_version: '3.5' - wheel_docker_image: soumith/manylinux-cuda92 - - binary_linux_conda: - cu_version: cu100 - name: binary_linux_conda_py3.5_cu100 - python_version: '3.5' - wheel_docker_image: soumith/manylinux-cuda100 - - binary_linux_conda: - cu_version: cu101 - name: binary_linux_conda_py3.5_cu101 - python_version: '3.5' - - binary_linux_conda: - cu_version: cpu - name: binary_linux_conda_py3.6_cpu - python_version: '3.6' - - binary_linux_conda: - cu_version: cu92 - name: binary_linux_conda_py3.6_cu92 - python_version: '3.6' - wheel_docker_image: soumith/manylinux-cuda92 - - binary_linux_conda: - cu_version: cu100 - name: binary_linux_conda_py3.6_cu100 - python_version: '3.6' - wheel_docker_image: soumith/manylinux-cuda100 - - binary_linux_conda: - cu_version: cu101 - name: binary_linux_conda_py3.6_cu101 - python_version: '3.6' - - binary_linux_conda: - cu_version: cpu - name: binary_linux_conda_py3.7_cpu - python_version: '3.7' - - binary_linux_conda: - cu_version: cu92 - name: binary_linux_conda_py3.7_cu92 - python_version: '3.7' - wheel_docker_image: soumith/manylinux-cuda92 - - binary_linux_conda: - cu_version: cu100 - name: binary_linux_conda_py3.7_cu100 - python_version: '3.7' - wheel_docker_image: soumith/manylinux-cuda100 - - binary_linux_conda: - cu_version: cu101 - name: binary_linux_conda_py3.7_cu101 - python_version: '3.7' - - binary_macos_conda: - cu_version: cpu - name: binary_macos_conda_py2.7_cpu - python_version: '2.7' - - binary_macos_conda: - cu_version: cpu - name: binary_macos_conda_py3.5_cpu - python_version: '3.5' - - binary_macos_conda: - cu_version: cpu - name: binary_macos_conda_py3.6_cpu - python_version: '3.6' - - binary_macos_conda: - cu_version: cpu - name: binary_macos_conda_py3.7_cpu - python_version: '3.7' - - binary_linux_conda_cuda: - name: torchvision_linux_py3.7_cu100 - python_version: "3.7" - cu_version: "cu100" - - binary_win_conda: - name: torchvision_win_py3.6_cpu - python_version: "3.6" - cu_version: "cpu" - - binary_win_conda_cuda: - name: torchvision_win_py3.6_cu101 - python_version: "3.6" - cu_version: "cu101" - - nightly: - jobs: - - circleci_consistency - - binary_linux_wheel: - cu_version: cpu - filters: - branches: - only: nightly - name: nightly_binary_linux_wheel_py2.7_cpu - python_version: '2.7' - - binary_wheel_upload: - context: org-member - filters: - branches: - only: nightly - name: nightly_binary_linux_wheel_py2.7_cpu_upload - requires: - - nightly_binary_linux_wheel_py2.7_cpu - subfolder: cpu/ - - binary_linux_wheel: - cu_version: cpu - filters: - branches: - only: nightly - name: nightly_binary_linux_wheel_py2.7u_cpu - python_version: '2.7' - unicode_abi: '1' - - binary_wheel_upload: - context: org-member - filters: - branches: - only: nightly - name: nightly_binary_linux_wheel_py2.7u_cpu_upload - requires: - - nightly_binary_linux_wheel_py2.7u_cpu - subfolder: cpu/ - - binary_linux_wheel: - cu_version: cu92 - filters: - branches: - only: nightly - name: nightly_binary_linux_wheel_py2.7_cu92 - python_version: '2.7' - wheel_docker_image: soumith/manylinux-cuda92 - - binary_wheel_upload: - context: org-member - filters: - branches: - only: nightly - name: nightly_binary_linux_wheel_py2.7_cu92_upload - requires: - - nightly_binary_linux_wheel_py2.7_cu92 - subfolder: cu92/ - - binary_linux_wheel: - cu_version: cu92 - filters: - branches: - only: nightly - name: nightly_binary_linux_wheel_py2.7u_cu92 - python_version: '2.7' - unicode_abi: '1' - wheel_docker_image: soumith/manylinux-cuda92 - - binary_wheel_upload: - context: org-member - filters: - branches: - only: nightly - name: nightly_binary_linux_wheel_py2.7u_cu92_upload - requires: - - nightly_binary_linux_wheel_py2.7u_cu92 - subfolder: cu92/ - - binary_linux_wheel: - cu_version: cu100 - filters: - branches: - only: nightly - name: nightly_binary_linux_wheel_py2.7_cu100 - python_version: '2.7' - wheel_docker_image: soumith/manylinux-cuda100 - - binary_wheel_upload: - context: org-member - filters: - branches: - only: nightly - name: nightly_binary_linux_wheel_py2.7_cu100_upload - requires: - - nightly_binary_linux_wheel_py2.7_cu100 - subfolder: cu100/ - - binary_linux_wheel: - cu_version: cu100 - filters: - branches: - only: nightly - name: nightly_binary_linux_wheel_py2.7u_cu100 - python_version: '2.7' - unicode_abi: '1' - wheel_docker_image: soumith/manylinux-cuda100 - - binary_wheel_upload: - context: org-member - filters: - branches: - only: nightly - name: nightly_binary_linux_wheel_py2.7u_cu100_upload - requires: - - nightly_binary_linux_wheel_py2.7u_cu100 - subfolder: cu100/ - - binary_linux_wheel: - cu_version: cu101 - filters: - branches: - only: nightly - name: nightly_binary_linux_wheel_py2.7_cu101 - python_version: '2.7' - - binary_wheel_upload: - context: org-member - filters: - branches: - only: nightly - name: nightly_binary_linux_wheel_py2.7_cu101_upload - requires: - - nightly_binary_linux_wheel_py2.7_cu101 - subfolder: cu101/ - - binary_linux_wheel: - cu_version: cu101 - filters: - branches: - only: nightly - name: nightly_binary_linux_wheel_py2.7u_cu101 - python_version: '2.7' - unicode_abi: '1' - - binary_wheel_upload: - context: org-member - filters: - branches: - only: nightly - name: nightly_binary_linux_wheel_py2.7u_cu101_upload - requires: - - nightly_binary_linux_wheel_py2.7u_cu101 - subfolder: cu101/ - - binary_linux_wheel: - cu_version: cpu - filters: - branches: - only: nightly - name: nightly_binary_linux_wheel_py3.5_cpu - python_version: '3.5' - - binary_wheel_upload: - context: org-member - filters: - branches: - only: nightly - name: nightly_binary_linux_wheel_py3.5_cpu_upload - requires: - - nightly_binary_linux_wheel_py3.5_cpu - subfolder: cpu/ - - binary_linux_wheel: - cu_version: cu92 - filters: - branches: - only: nightly - name: nightly_binary_linux_wheel_py3.5_cu92 - python_version: '3.5' - wheel_docker_image: soumith/manylinux-cuda92 - - binary_wheel_upload: - context: org-member - filters: - branches: - only: nightly - name: nightly_binary_linux_wheel_py3.5_cu92_upload - requires: - - nightly_binary_linux_wheel_py3.5_cu92 - subfolder: cu92/ - - binary_linux_wheel: - cu_version: cu100 - filters: - branches: - only: nightly - name: nightly_binary_linux_wheel_py3.5_cu100 - python_version: '3.5' - wheel_docker_image: soumith/manylinux-cuda100 - - binary_wheel_upload: - context: org-member - filters: - branches: - only: nightly - name: nightly_binary_linux_wheel_py3.5_cu100_upload - requires: - - nightly_binary_linux_wheel_py3.5_cu100 - subfolder: cu100/ - - binary_linux_wheel: - cu_version: cu101 - filters: - branches: - only: nightly - name: nightly_binary_linux_wheel_py3.5_cu101 - python_version: '3.5' - - binary_wheel_upload: - context: org-member - filters: - branches: - only: nightly - name: nightly_binary_linux_wheel_py3.5_cu101_upload - requires: - - nightly_binary_linux_wheel_py3.5_cu101 - subfolder: cu101/ - - binary_linux_wheel: - cu_version: cpu - filters: - branches: - only: nightly - name: nightly_binary_linux_wheel_py3.6_cpu - python_version: '3.6' - - binary_wheel_upload: - context: org-member - filters: - branches: - only: nightly - name: nightly_binary_linux_wheel_py3.6_cpu_upload - requires: - - nightly_binary_linux_wheel_py3.6_cpu - subfolder: cpu/ - - binary_linux_wheel: - cu_version: cu92 - filters: - branches: - only: nightly - name: nightly_binary_linux_wheel_py3.6_cu92 - python_version: '3.6' - wheel_docker_image: soumith/manylinux-cuda92 - - binary_wheel_upload: - context: org-member - filters: - branches: - only: nightly - name: nightly_binary_linux_wheel_py3.6_cu92_upload - requires: - - nightly_binary_linux_wheel_py3.6_cu92 - subfolder: cu92/ - - binary_linux_wheel: - cu_version: cu100 - filters: - branches: - only: nightly - name: nightly_binary_linux_wheel_py3.6_cu100 - python_version: '3.6' - wheel_docker_image: soumith/manylinux-cuda100 - - binary_wheel_upload: - context: org-member - filters: - branches: - only: nightly - name: nightly_binary_linux_wheel_py3.6_cu100_upload - requires: - - nightly_binary_linux_wheel_py3.6_cu100 - subfolder: cu100/ - - binary_linux_wheel: - cu_version: cu101 - filters: - branches: - only: nightly - name: nightly_binary_linux_wheel_py3.6_cu101 - python_version: '3.6' - - binary_wheel_upload: - context: org-member - filters: - branches: - only: nightly - name: nightly_binary_linux_wheel_py3.6_cu101_upload - requires: - - nightly_binary_linux_wheel_py3.6_cu101 - subfolder: cu101/ - - binary_linux_wheel: - cu_version: cpu - filters: - branches: - only: nightly - name: nightly_binary_linux_wheel_py3.7_cpu - python_version: '3.7' - - binary_wheel_upload: - context: org-member - filters: - branches: - only: nightly - name: nightly_binary_linux_wheel_py3.7_cpu_upload - requires: - - nightly_binary_linux_wheel_py3.7_cpu - subfolder: cpu/ - - binary_linux_wheel: - cu_version: cu92 - filters: - branches: - only: nightly - name: nightly_binary_linux_wheel_py3.7_cu92 - python_version: '3.7' - wheel_docker_image: soumith/manylinux-cuda92 - - binary_wheel_upload: - context: org-member - filters: - branches: - only: nightly - name: nightly_binary_linux_wheel_py3.7_cu92_upload - requires: - - nightly_binary_linux_wheel_py3.7_cu92 - subfolder: cu92/ - - binary_linux_wheel: - cu_version: cu100 - filters: - branches: - only: nightly - name: nightly_binary_linux_wheel_py3.7_cu100 - python_version: '3.7' - wheel_docker_image: soumith/manylinux-cuda100 - - binary_wheel_upload: - context: org-member - filters: - branches: - only: nightly - name: nightly_binary_linux_wheel_py3.7_cu100_upload - requires: - - nightly_binary_linux_wheel_py3.7_cu100 - subfolder: cu100/ - - binary_linux_wheel: - cu_version: cu101 - filters: - branches: - only: nightly - name: nightly_binary_linux_wheel_py3.7_cu101 - python_version: '3.7' - - binary_wheel_upload: - context: org-member - filters: - branches: - only: nightly - name: nightly_binary_linux_wheel_py3.7_cu101_upload - requires: - - nightly_binary_linux_wheel_py3.7_cu101 - subfolder: cu101/ - - binary_macos_wheel: - cu_version: cpu - filters: - branches: - only: nightly - name: nightly_binary_macos_wheel_py2.7_cpu - python_version: '2.7' - - binary_wheel_upload: - context: org-member - filters: - branches: - only: nightly - name: nightly_binary_macos_wheel_py2.7_cpu_upload - requires: - - nightly_binary_macos_wheel_py2.7_cpu - subfolder: '' - - binary_macos_wheel: - cu_version: cpu - filters: - branches: - only: nightly - name: nightly_binary_macos_wheel_py2.7u_cpu - python_version: '2.7' - unicode_abi: '1' - - binary_wheel_upload: - context: org-member - filters: - branches: - only: nightly - name: nightly_binary_macos_wheel_py2.7u_cpu_upload - requires: - - nightly_binary_macos_wheel_py2.7u_cpu - subfolder: '' - - binary_macos_wheel: - cu_version: cpu - filters: - branches: - only: nightly - name: nightly_binary_macos_wheel_py3.5_cpu - python_version: '3.5' - - binary_wheel_upload: - context: org-member - filters: - branches: - only: nightly - name: nightly_binary_macos_wheel_py3.5_cpu_upload - requires: - - nightly_binary_macos_wheel_py3.5_cpu - subfolder: '' - - binary_macos_wheel: - cu_version: cpu - filters: - branches: - only: nightly - name: nightly_binary_macos_wheel_py3.6_cpu - python_version: '3.6' - - binary_wheel_upload: - context: org-member - filters: - branches: - only: nightly - name: nightly_binary_macos_wheel_py3.6_cpu_upload - requires: - - nightly_binary_macos_wheel_py3.6_cpu - subfolder: '' - - binary_macos_wheel: - cu_version: cpu - filters: - branches: - only: nightly - name: nightly_binary_macos_wheel_py3.7_cpu - python_version: '3.7' - - binary_wheel_upload: - context: org-member - filters: - branches: - only: nightly - name: nightly_binary_macos_wheel_py3.7_cpu_upload - requires: - - nightly_binary_macos_wheel_py3.7_cpu - subfolder: '' - - binary_linux_conda: - cu_version: cpu - filters: - branches: - only: nightly - name: nightly_binary_linux_conda_py2.7_cpu - python_version: '2.7' - - binary_conda_upload: - context: org-member - filters: - branches: - only: nightly - name: nightly_binary_linux_conda_py2.7_cpu_upload - requires: - - nightly_binary_linux_conda_py2.7_cpu - - binary_linux_conda: - cu_version: cu92 - filters: - branches: - only: nightly - name: nightly_binary_linux_conda_py2.7_cu92 - python_version: '2.7' - wheel_docker_image: soumith/manylinux-cuda92 - - binary_conda_upload: - context: org-member - filters: - branches: - only: nightly - name: nightly_binary_linux_conda_py2.7_cu92_upload - requires: - - nightly_binary_linux_conda_py2.7_cu92 - - binary_linux_conda: - cu_version: cu100 - filters: - branches: - only: nightly - name: nightly_binary_linux_conda_py2.7_cu100 - python_version: '2.7' - wheel_docker_image: soumith/manylinux-cuda100 - - binary_conda_upload: - context: org-member - filters: - branches: - only: nightly - name: nightly_binary_linux_conda_py2.7_cu100_upload - requires: - - nightly_binary_linux_conda_py2.7_cu100 - - binary_linux_conda: - cu_version: cu101 - filters: - branches: - only: nightly - name: nightly_binary_linux_conda_py2.7_cu101 - python_version: '2.7' - - binary_conda_upload: - context: org-member - filters: - branches: - only: nightly - name: nightly_binary_linux_conda_py2.7_cu101_upload - requires: - - nightly_binary_linux_conda_py2.7_cu101 - - binary_linux_conda: - cu_version: cpu - filters: - branches: - only: nightly - name: nightly_binary_linux_conda_py3.5_cpu - python_version: '3.5' - - binary_conda_upload: - context: org-member - filters: - branches: - only: nightly - name: nightly_binary_linux_conda_py3.5_cpu_upload - requires: - - nightly_binary_linux_conda_py3.5_cpu - - binary_linux_conda: - cu_version: cu92 - filters: - branches: - only: nightly - name: nightly_binary_linux_conda_py3.5_cu92 - python_version: '3.5' - wheel_docker_image: soumith/manylinux-cuda92 - - binary_conda_upload: - context: org-member - filters: - branches: - only: nightly - name: nightly_binary_linux_conda_py3.5_cu92_upload - requires: - - nightly_binary_linux_conda_py3.5_cu92 - - binary_linux_conda: - cu_version: cu100 - filters: - branches: - only: nightly - name: nightly_binary_linux_conda_py3.5_cu100 - python_version: '3.5' - wheel_docker_image: soumith/manylinux-cuda100 - - binary_conda_upload: - context: org-member - filters: - branches: - only: nightly - name: nightly_binary_linux_conda_py3.5_cu100_upload - requires: - - nightly_binary_linux_conda_py3.5_cu100 - - binary_linux_conda: - cu_version: cu101 - filters: - branches: - only: nightly - name: nightly_binary_linux_conda_py3.5_cu101 - python_version: '3.5' - - binary_conda_upload: - context: org-member - filters: - branches: - only: nightly - name: nightly_binary_linux_conda_py3.5_cu101_upload - requires: - - nightly_binary_linux_conda_py3.5_cu101 - - binary_linux_conda: - cu_version: cpu - filters: - branches: - only: nightly - name: nightly_binary_linux_conda_py3.6_cpu - python_version: '3.6' - - binary_conda_upload: - context: org-member - filters: - branches: - only: nightly - name: nightly_binary_linux_conda_py3.6_cpu_upload - requires: - - nightly_binary_linux_conda_py3.6_cpu - - binary_linux_conda: - cu_version: cu92 - filters: - branches: - only: nightly - name: nightly_binary_linux_conda_py3.6_cu92 - python_version: '3.6' - wheel_docker_image: soumith/manylinux-cuda92 - - binary_conda_upload: - context: org-member - filters: - branches: - only: nightly - name: nightly_binary_linux_conda_py3.6_cu92_upload - requires: - - nightly_binary_linux_conda_py3.6_cu92 - - binary_linux_conda: - cu_version: cu100 - filters: - branches: - only: nightly - name: nightly_binary_linux_conda_py3.6_cu100 - python_version: '3.6' - wheel_docker_image: soumith/manylinux-cuda100 - - binary_conda_upload: - context: org-member - filters: - branches: - only: nightly - name: nightly_binary_linux_conda_py3.6_cu100_upload - requires: - - nightly_binary_linux_conda_py3.6_cu100 - - binary_linux_conda: - cu_version: cu101 - filters: - branches: - only: nightly - name: nightly_binary_linux_conda_py3.6_cu101 - python_version: '3.6' - - binary_conda_upload: - context: org-member - filters: - branches: - only: nightly - name: nightly_binary_linux_conda_py3.6_cu101_upload - requires: - - nightly_binary_linux_conda_py3.6_cu101 - - binary_linux_conda: - cu_version: cpu - filters: - branches: - only: nightly - name: nightly_binary_linux_conda_py3.7_cpu - python_version: '3.7' - - binary_conda_upload: - context: org-member - filters: - branches: - only: nightly - name: nightly_binary_linux_conda_py3.7_cpu_upload - requires: - - nightly_binary_linux_conda_py3.7_cpu - - binary_linux_conda: - cu_version: cu92 - filters: - branches: - only: nightly - name: nightly_binary_linux_conda_py3.7_cu92 - python_version: '3.7' - wheel_docker_image: soumith/manylinux-cuda92 - - binary_conda_upload: - context: org-member - filters: - branches: - only: nightly - name: nightly_binary_linux_conda_py3.7_cu92_upload - requires: - - nightly_binary_linux_conda_py3.7_cu92 - - binary_linux_conda: - cu_version: cu100 - filters: - branches: - only: nightly - name: nightly_binary_linux_conda_py3.7_cu100 - python_version: '3.7' - wheel_docker_image: soumith/manylinux-cuda100 - - binary_conda_upload: - context: org-member - filters: - branches: - only: nightly - name: nightly_binary_linux_conda_py3.7_cu100_upload - requires: - - nightly_binary_linux_conda_py3.7_cu100 - - binary_linux_conda: - cu_version: cu101 - filters: - branches: - only: nightly - name: nightly_binary_linux_conda_py3.7_cu101 - python_version: '3.7' - - binary_conda_upload: - context: org-member - filters: - branches: - only: nightly - name: nightly_binary_linux_conda_py3.7_cu101_upload - requires: - - nightly_binary_linux_conda_py3.7_cu101 - - binary_macos_conda: - cu_version: cpu - filters: - branches: - only: nightly - name: nightly_binary_macos_conda_py2.7_cpu - python_version: '2.7' - - binary_conda_upload: - context: org-member - filters: - branches: - only: nightly - name: nightly_binary_macos_conda_py2.7_cpu_upload - requires: - - nightly_binary_macos_conda_py2.7_cpu - - binary_macos_conda: - cu_version: cpu - filters: - branches: - only: nightly - name: nightly_binary_macos_conda_py3.5_cpu - python_version: '3.5' - - binary_conda_upload: - context: org-member - filters: - branches: - only: nightly - name: nightly_binary_macos_conda_py3.5_cpu_upload - requires: - - nightly_binary_macos_conda_py3.5_cpu - - binary_macos_conda: - cu_version: cpu - filters: - branches: - only: nightly - name: nightly_binary_macos_conda_py3.6_cpu - python_version: '3.6' - - binary_conda_upload: - context: org-member - filters: - branches: - only: nightly - name: nightly_binary_macos_conda_py3.6_cpu_upload - requires: - - nightly_binary_macos_conda_py3.6_cpu - - binary_macos_conda: - cu_version: cpu - filters: - branches: - only: nightly - name: nightly_binary_macos_conda_py3.7_cpu - python_version: '3.7' - - binary_conda_upload: - context: org-member - filters: - branches: - only: nightly - name: nightly_binary_macos_conda_py3.7_cpu_upload - requires: - - nightly_binary_macos_conda_py3.7_cpu \ No newline at end of file diff --git a/.circleci/config.yml.in b/.circleci/config.yml.in deleted file mode 100644 index 999904576b9..00000000000 --- a/.circleci/config.yml.in +++ /dev/null @@ -1,317 +0,0 @@ -version: 2.1 - -# How to test the Linux jobs: -# - Install CircleCI local CLI: https://circleci.com/docs/2.0/local-cli/ -# - circleci config process .circleci/config.yml > gen.yml && circleci local execute -c gen.yml --job binary_linux_wheel_py3.7 -# - Replace binary_linux_wheel_py3.7 with the name of the job you want to test. -# Job names are 'name:' key. - -orbs: - win: circleci/windows@2.0.0 - -executors: - windows-gpu-prototype: - machine: - resource_class: windows.gpu.small.prototype - image: windows-server-2019-nvidia:201908-28 - shell: bash.exe - -commands: - checkout_merge: - description: "checkout merge branch" - steps: - - checkout -# - run: -# name: Checkout merge branch -# command: | -# set -ex -# BRANCH=$(git rev-parse --abbrev-ref HEAD) -# if [[ "$BRANCH" != "master" ]]; then -# git fetch --force origin ${CIRCLE_BRANCH}/merge:merged/${CIRCLE_BRANCH} -# git checkout "merged/$CIRCLE_BRANCH" -# fi - -binary_common: &binary_common - parameters: - # Edit these defaults to do a release` - build_version: - description: "version number of release binary; by default, build a nightly" - type: string - default: "" - pytorch_version: - description: "PyTorch version to build against; by default, use a nightly" - type: string - default: "" - # Don't edit these - python_version: - description: "Python version to build against (e.g., 3.7)" - type: string - cu_version: - description: "CUDA version to build against, in CU format (e.g., cpu or cu100)" - type: string - unicode_abi: - description: "Python 2.7 wheel only: whether or not we are cp27mu (default: no)" - type: string - default: "" - wheel_docker_image: - description: "Wheel only: what docker image to use" - type: string - default: "soumith/manylinux-cuda101" - environment: - PYTHON_VERSION: << parameters.python_version >> - BUILD_VERSION: << parameters.build_version >> - PYTORCH_VERSION: << parameters.pytorch_version >> - UNICODE_ABI: << parameters.unicode_abi >> - CU_VERSION: << parameters.cu_version >> - -jobs: - circleci_consistency: - docker: - - image: circleci/python:3.7 - steps: - - checkout - - run: - command: | - pip install --user --progress-bar off jinja2 pyyaml - python .circleci/regenerate.py - git diff --exit-code || (echo ".circleci/config.yml not in sync with config.yml.in! Run .circleci/regenerate.py to update config"; exit 1) - - binary_linux_wheel: - <<: *binary_common - docker: - - image: << parameters.wheel_docker_image >> - resource_class: 2xlarge+ - steps: - - checkout_merge - - run: packaging/build_wheel.sh - - store_artifacts: - path: dist - - persist_to_workspace: - root: dist - paths: - - "*" - - binary_linux_conda: - <<: *binary_common - docker: - - image: "soumith/conda-cuda" - resource_class: 2xlarge+ - steps: - - checkout_merge - - run: packaging/build_conda.sh - - store_artifacts: - path: /opt/conda/conda-bld/linux-64 - - persist_to_workspace: - root: /opt/conda/conda-bld/linux-64 - paths: - - "*" - - binary_linux_conda_cuda: - <<: *binary_common - machine: - image: ubuntu-1604:201903-01 - resource_class: gpu.medium - steps: - - checkout_merge - - run: - name: Setup environment - command: | - set -e - - curl -L https://packagecloud.io/circleci/trusty/gpgkey | sudo apt-key add - - curl -L https://dl.google.com/linux/linux_signing_key.pub | sudo apt-key add - - - sudo apt-get update - - sudo apt-get install \ - apt-transport-https \ - ca-certificates \ - curl \ - gnupg-agent \ - software-properties-common - - curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo apt-key add - - - sudo add-apt-repository \ - "deb [arch=amd64] https://download.docker.com/linux/ubuntu \ - $(lsb_release -cs) \ - stable" - - sudo apt-get update - export DOCKER_VERSION="5:19.03.2~3-0~ubuntu-xenial" - sudo apt-get install docker-ce=${DOCKER_VERSION} docker-ce-cli=${DOCKER_VERSION} containerd.io=1.2.6-3 - - # Add the package repositories - distribution=$(. /etc/os-release;echo $ID$VERSION_ID) - curl -s -L https://nvidia.github.io/nvidia-docker/gpgkey | sudo apt-key add - - curl -s -L https://nvidia.github.io/nvidia-docker/$distribution/nvidia-docker.list | sudo tee /etc/apt/sources.list.d/nvidia-docker.list - - export NVIDIA_CONTAINER_VERSION="1.0.3-1" - sudo apt-get update && sudo apt-get install -y nvidia-container-toolkit=${NVIDIA_CONTAINER_VERSION} - sudo systemctl restart docker - - DRIVER_FN="NVIDIA-Linux-x86_64-410.104.run" - wget "https://s3.amazonaws.com/ossci-linux/nvidia_driver/$DRIVER_FN" - sudo /bin/bash "$DRIVER_FN" -s --no-drm || (sudo cat /var/log/nvidia-installer.log && false) - nvidia-smi - - - run: - name: Pull docker image - command: | - set -e - export DOCKER_IMAGE=soumith/conda-cuda - echo Pulling docker image $DOCKER_IMAGE - docker pull $DOCKER_IMAGE >/dev/null - - - run: - name: Build and run tests - command: | - set -e - - cd ${HOME}/project/ - - export DOCKER_IMAGE=soumith/conda-cuda - export VARS_TO_PASS="-e PYTHON_VERSION -e BUILD_VERSION -e PYTORCH_VERSION -e UNICODE_ABI -e CU_VERSION" - - docker run --gpus all --ipc=host -v $(pwd):/remote -w /remote ${VARS_TO_PASS} ${DOCKER_IMAGE} ./packaging/build_conda.sh - - binary_win_conda: - <<: *binary_common - executor: - name: win/default - shell: bash.exe - steps: - - checkout_merge - - run: - command: | - choco install miniconda3 - (& "C:\tools\miniconda3\Scripts\conda.exe" "shell.powershell" "hook") | Out-String | Invoke-Expression - conda activate base - conda install -yq conda-build "conda-package-handling!=1.5.0" - bash packaging/build_conda.sh - shell: powershell.exe - - binary_win_conda_cuda: - <<: *binary_common - executor: windows-gpu-prototype - steps: - - checkout_merge - - run: - command: | - choco install miniconda3 - (& "C:\tools\miniconda3\Scripts\conda.exe" "shell.powershell" "hook") | Out-String | Invoke-Expression - conda activate base - conda install -yq conda-build "conda-package-handling!=1.5.0" - bash packaging/build_conda.sh - shell: powershell.exe - - binary_macos_wheel: - <<: *binary_common - macos: - xcode: "9.0" - steps: - - checkout_merge - - run: - # Cannot easily deduplicate this as source'ing activate - # will set environment variables which we need to propagate - # to build_wheel.sh - command: | - curl -o conda.sh https://repo.anaconda.com/miniconda/Miniconda3-latest-MacOSX-x86_64.sh - sh conda.sh -b - source $HOME/miniconda3/bin/activate - packaging/build_wheel.sh - - store_artifacts: - path: dist - - persist_to_workspace: - root: dist - paths: - - "*" - - binary_macos_conda: - <<: *binary_common - macos: - xcode: "9.0" - steps: - - checkout_merge - - run: - command: | - curl -o conda.sh https://repo.anaconda.com/miniconda/Miniconda3-latest-MacOSX-x86_64.sh - sh conda.sh -b - source $HOME/miniconda3/bin/activate - conda install -yq conda-build - packaging/build_conda.sh - - store_artifacts: - path: /Users/distiller/miniconda3/conda-bld/osx-64 - - persist_to_workspace: - root: /Users/distiller/miniconda3/conda-bld/osx-64 - paths: - - "*" - - # Requires org-member context - binary_conda_upload: - docker: - - image: continuumio/miniconda - steps: - - attach_workspace: - at: ~/workspace - - run: - command: | - # Prevent credential from leaking - conda install -yq anaconda-client - set +x - anaconda login \ - --username "$PYTORCH_BINARY_PJH5_CONDA_USERNAME" \ - --password "$PYTORCH_BINARY_PJH5_CONDA_PASSWORD" - set -x - anaconda upload ~/workspace/*.tar.bz2 -u pytorch-nightly --label main --no-progress --force - - # Requires org-member context - binary_wheel_upload: - parameters: - subfolder: - description: "What whl subfolder to upload to, e.g., blank or cu100/ (trailing slash is important)" - type: string - docker: - - image: circleci/python:3.7 - steps: - - attach_workspace: - at: ~/workspace - - checkout - - run: - command: | - pip install --user awscli - export PATH="$HOME/.local/bin:$PATH" - # Prevent credential from leaking - set +x - export AWS_ACCESS_KEY_ID="${PYTORCH_BINARY_AWS_ACCESS_KEY_ID}" - export AWS_SECRET_ACCESS_KEY="${PYTORCH_BINARY_AWS_SECRET_ACCESS_KEY}" - set -x - for pkg in ~/workspace/*.whl; do - aws s3 cp "$pkg" "s3://pytorch/whl/nightly/<< parameters.subfolder >>" --acl public-read - done - - -workflows: - build: -{%- if True %} - jobs: - - circleci_consistency - {{ workflows() }} - - binary_linux_conda_cuda: - name: torchvision_linux_py3.7_cu100 - python_version: "3.7" - cu_version: "cu100" - - binary_win_conda: - name: torchvision_win_py3.6_cpu - python_version: "3.6" - cu_version: "cpu" - - binary_win_conda_cuda: - name: torchvision_win_py3.6_cu101 - python_version: "3.6" - cu_version: "cu101" - - nightly: -{%- endif %} - jobs: - - circleci_consistency - {{ workflows(prefix="nightly_", filter_branch="nightly", upload=True) }} diff --git a/.circleci/regenerate.py b/.circleci/regenerate.py deleted file mode 100755 index e7d85d2f911..00000000000 --- a/.circleci/regenerate.py +++ /dev/null @@ -1,105 +0,0 @@ -#!/usr/bin/env python3 - -""" -This script should use a very simple, functional programming style. -Avoid Jinja macros in favor of native Python functions. - -Don't go overboard on code generation; use Python only to generate -content that can't be easily declared statically using CircleCI's YAML API. - -Data declarations (e.g. the nested loops for defining the configuration matrix) -should be at the top of the file for easy updating. - -See this comment for design rationale: -https://github.com/pytorch/vision/pull/1321#issuecomment-531033978 -""" - -import jinja2 -import yaml -import os.path - - -def workflows(prefix='', filter_branch=None, upload=False, indentation=6): - w = [] - for btype in ["wheel", "conda"]: - for os_type in ["linux", "macos"]: - for python_version in ["2.7", "3.5", "3.6", "3.7"]: - for cu_version in (["cpu", "cu92", "cu100", "cu101"] if os_type == "linux" else ["cpu"]): - for unicode in ([False, True] if btype == "wheel" and python_version == "2.7" else [False]): - w += workflow_pair( - btype, os_type, python_version, cu_version, - unicode, prefix, upload, filter_branch=filter_branch) - - return indent(indentation, w) - - -def workflow_pair(btype, os_type, python_version, cu_version, unicode, prefix='', upload=False, *, filter_branch=None): - - w = [] - unicode_suffix = "u" if unicode else "" - base_workflow_name = f"{prefix}binary_{os_type}_{btype}_py{python_version}{unicode_suffix}_{cu_version}" - - w.append(generate_base_workflow( - base_workflow_name, python_version, cu_version, - unicode, os_type, btype, filter_branch=filter_branch)) - - if upload: - w.append(generate_upload_workflow(base_workflow_name, os_type, btype, cu_version, filter_branch=filter_branch)) - - return w - - -def generate_base_workflow(base_workflow_name, python_version, cu_version, - unicode, os_type, btype, *, filter_branch=None): - - d = { - "name": base_workflow_name, - "python_version": python_version, - "cu_version": cu_version, - } - - if unicode: - d["unicode_abi"] = '1' - - if cu_version == "cu92": - d["wheel_docker_image"] = "soumith/manylinux-cuda92" - elif cu_version == "cu100": - d["wheel_docker_image"] = "soumith/manylinux-cuda100" - - if filter_branch is not None: - d["filters"] = {"branches": {"only": filter_branch}} - - return {f"binary_{os_type}_{btype}": d} - - -def generate_upload_workflow(base_workflow_name, os_type, btype, cu_version, *, filter_branch=None): - d = { - "name": f"{base_workflow_name}_upload", - "context": "org-member", - "requires": [base_workflow_name], - } - - if btype == 'wheel': - d["subfolder"] = "" if os_type == 'macos' else cu_version + "/" - - if filter_branch is not None: - d["filters"] = {"branches": {"only": filter_branch}} - - return {f"binary_{btype}_upload": d} - - -def indent(indentation, data_list): - return ("\n" + " " * indentation).join( - yaml.dump(data_list, default_flow_style=False).splitlines()) - - -if __name__ == "__main__": - d = os.path.dirname(__file__) - env = jinja2.Environment( - loader=jinja2.FileSystemLoader(d), - lstrip_blocks=True, - autoescape=False, - ) - - with open(os.path.join(d, 'config.yml'), 'w') as f: - f.write(env.get_template('config.yml.in').render(workflows=workflows)) diff --git a/.clang-format b/.clang-format index 6d0ab740db4..9f20a44fe9b 100644 --- a/.clang-format +++ b/.clang-format @@ -1,88 +1,5 @@ --- -AccessModifierOffset: -1 -AlignAfterOpenBracket: AlwaysBreak -AlignConsecutiveAssignments: false -AlignConsecutiveDeclarations: false -AlignEscapedNewlinesLeft: true -AlignOperands: false -AlignTrailingComments: false -AllowAllParametersOfDeclarationOnNextLine: false -AllowShortBlocksOnASingleLine: false -AllowShortCaseLabelsOnASingleLine: false -AllowShortFunctionsOnASingleLine: Empty -AllowShortIfStatementsOnASingleLine: false -AllowShortLoopsOnASingleLine: false -AlwaysBreakAfterReturnType: None -AlwaysBreakBeforeMultilineStrings: true -AlwaysBreakTemplateDeclarations: true -BinPackArguments: false -BinPackParameters: false -BraceWrapping: - AfterClass: false - AfterControlStatement: false - AfterEnum: false - AfterFunction: false - AfterNamespace: false - AfterObjCDeclaration: false - AfterStruct: false - AfterUnion: false - BeforeCatch: false - BeforeElse: false - IndentBraces: false -BreakBeforeBinaryOperators: None -BreakBeforeBraces: Attach -BreakBeforeTernaryOperators: true -BreakConstructorInitializersBeforeComma: false -BreakAfterJavaFieldAnnotations: false -BreakStringLiterals: false -ColumnLimit: 80 -CommentPragmas: '^ IWYU pragma:' -#CompactNamespaces: false -ConstructorInitializerAllOnOneLineOrOnePerLine: true -ConstructorInitializerIndentWidth: 4 -ContinuationIndentWidth: 4 -Cpp11BracedListStyle: true -DerivePointerAlignment: false -DisableFormat: false -ForEachMacros: [ FOR_EACH_RANGE, FOR_EACH, ] -IncludeCategories: - - Regex: '^<.*\.h(pp)?>' - Priority: 1 - - Regex: '^<.*' - Priority: 2 - - Regex: '.*' - Priority: 3 -IndentCaseLabels: true -IndentWidth: 2 -IndentWrappedFunctionNames: false -KeepEmptyLinesAtTheStartOfBlocks: false -MacroBlockBegin: '' -MacroBlockEnd: '' -MaxEmptyLinesToKeep: 1 -NamespaceIndentation: None -ObjCBlockIndentWidth: 2 -ObjCSpaceAfterProperty: false -ObjCSpaceBeforeProtocolList: false -PenaltyBreakBeforeFirstCallParameter: 1 -PenaltyBreakComment: 300 -PenaltyBreakFirstLessLess: 120 -PenaltyBreakString: 1000 -PenaltyExcessCharacter: 1000000 -PenaltyReturnTypeOnItsOwnLine: 2000000 -PointerAlignment: Left -ReflowComments: true -SortIncludes: true -SpaceAfterCStyleCast: false -SpaceBeforeAssignmentOperators: true -SpaceBeforeParens: ControlStatements -SpaceInEmptyParentheses: false -SpacesBeforeTrailingComments: 1 -SpacesInAngles: false -SpacesInContainerLiterals: true -SpacesInCStyleCastParentheses: false -SpacesInParentheses: false -SpacesInSquareBrackets: false -Standard: Cpp11 -TabWidth: 8 -UseTab: Never +Language: ObjC +DisableFormat: true +SortIncludes: false ... diff --git a/.coveragerc b/.coveragerc deleted file mode 100644 index c765e471155..00000000000 --- a/.coveragerc +++ /dev/null @@ -1,7 +0,0 @@ -[run] -branch = True - -[paths] -source = - torchvision - /**/site-packages/torchvision diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs new file mode 100644 index 00000000000..5e88f5b9bb7 --- /dev/null +++ b/.git-blame-ignore-revs @@ -0,0 +1,13 @@ +# This file keeps git blame clean. +# See https://docs.github.com/en/repositories/working-with-files/using-files/viewing-a-file#ignore-commits-in-the-blame-view + +# Add ufmt (usort + black) as code formatter (#4384) +5f0edb97b46e5bff71dc19dedef05c5396eeaea2 +# update python syntax >=3.6 (#4585) +d367a01a18a3ae6bee13d8be3b63fd6a581ea46f +# Upgrade usort to 1.0.2 and black to 22.3.0 (#5106) +6ca9c76adb6daf2695d603ad623a9cf1c4f4806f +# Fix unnecessary exploded black formatting (#7709) +a335d916db0694770e8152f41e19195de3134523 +# Renaming: `BoundingBox` -> `BoundingBoxes` (#7778) +332bff937c6711666191880fab57fa2f23ae772e diff --git a/.gitattributes b/.gitattributes index a476e7afb59..22d0452f8d7 100644 --- a/.gitattributes +++ b/.gitattributes @@ -1 +1,8 @@ *.pkl binary +# Jupyter notebook + +# For text count +# *.ipynb text + +# To ignore it use below +*.ipynb linguist-documentation diff --git a/.github/ISSUE_TEMPLATE/bug-report.yml b/.github/ISSUE_TEMPLATE/bug-report.yml new file mode 100644 index 00000000000..ba811554c43 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug-report.yml @@ -0,0 +1,60 @@ +name: 🐛 Bug Report +description: Create a report to help us reproduce and fix the bug + +body: +- type: markdown + attributes: + value: > + #### Before submitting a bug, please make sure the issue hasn't been already addressed by searching through [the existing and past issues](https://github.com/pytorch/vision/issues?q=is%3Aissue+sort%3Acreated-desc+). +- type: textarea + attributes: + label: 🐛 Describe the bug + description: | + Please provide a clear and concise description of what the bug is. + + If relevant, add a minimal example so that we can reproduce the error by running the code. It is very important for the snippet to be as succinct (minimal) as possible, so please take time to trim down any irrelevant code to help us debug efficiently. We are going to copy-paste your code and we expect to get the same result as you did: avoid any external data, and include the relevant imports, etc. For example: + + ```python + # All necessary imports at the beginning + import torch + import torchvision + from torchvision.ops import nms + + # A succinct reproducing example trimmed down to the essential parts: + N = 5 + boxes = torch.rand(N, 4) # Note: the bug is here, we should enforce that x1 < x2 and y1 < y2! + scores = torch.rand(N) + nms(boxes, scores, iou_threshold=.9) + ``` + + If the code is too long (hopefully, it isn't), feel free to put it in a public gist and link it in the issue: https://gist.github.com. + + Please also paste or describe the results you observe instead of the expected results. If you observe an error, please paste the error message including the **full** traceback of the exception. It may be relevant to wrap error messages in ```` ```triple quotes blocks``` ````. + placeholder: | + A clear and concise description of what the bug is. + + ```python + Sample code to reproduce the problem + ``` + + ``` + The error message you got, with the full traceback. + ```` + validations: + required: true +- type: textarea + attributes: + label: Versions + description: | + Please run the following and paste the output below. + ```sh + wget https://raw.githubusercontent.com/pytorch/pytorch/main/torch/utils/collect_env.py + # For security purposes, please check the contents of collect_env.py before running it. + python collect_env.py + ``` + validations: + required: true +- type: markdown + attributes: + value: > + Thanks for contributing 🎉! diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml new file mode 100644 index 00000000000..bdb6d3614f3 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -0,0 +1,5 @@ +blank_issues_enabled: true +contact_links: + - name: Usage questions + url: https://discuss.pytorch.org/ + about: Ask questions and discuss with other torchvision community members diff --git a/.github/ISSUE_TEMPLATE/documentation.yml b/.github/ISSUE_TEMPLATE/documentation.yml new file mode 100644 index 00000000000..a7fb6c04c63 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/documentation.yml @@ -0,0 +1,20 @@ +name: 📚 Documentation +description: Report an issue related to https://pytorch.org/vision/stable/index.html + +body: +- type: textarea + attributes: + label: 📚 The doc issue + description: > + A clear and concise description of what content in https://pytorch.org/vision/stable/index.html is an issue. If this has to do with the general https://pytorch.org website, please file an issue at https://github.com/pytorch/pytorch.github.io/issues/new/choose instead. If this has to do with https://pytorch.org/tutorials, please file an issue at https://github.com/pytorch/tutorials/issues/new. + validations: + required: true +- type: textarea + attributes: + label: Suggest a potential alternative/fix + description: > + Tell us how we could improve the documentation in this regard. +- type: markdown + attributes: + value: > + Thanks for contributing 🎉! diff --git a/.github/ISSUE_TEMPLATE/feature-request.yml b/.github/ISSUE_TEMPLATE/feature-request.yml new file mode 100644 index 00000000000..85c727dbcf5 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/feature-request.yml @@ -0,0 +1,32 @@ +name: 🚀 Feature request +description: Submit a proposal/request for a new torchvision feature + +body: +- type: textarea + attributes: + label: 🚀 The feature + description: > + A clear and concise description of the feature proposal + validations: + required: true +- type: textarea + attributes: + label: Motivation, pitch + description: > + Please outline the motivation for the proposal. Is your feature request related to a specific problem? e.g., *"I'm working on X and would like Y to be possible"*. If this is related to another GitHub issue, please link here too. + validations: + required: true +- type: textarea + attributes: + label: Alternatives + description: > + A description of any alternative solutions or features you've considered, if any. +- type: textarea + attributes: + label: Additional context + description: > + Add any other context or screenshots about the feature request. +- type: markdown + attributes: + value: > + Thanks for contributing 🎉! diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md new file mode 100644 index 00000000000..f267cc7da50 --- /dev/null +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -0,0 +1 @@ + diff --git a/.github/failed_schedule_issue_template.md b/.github/failed_schedule_issue_template.md new file mode 100644 index 00000000000..5e2d77550ac --- /dev/null +++ b/.github/failed_schedule_issue_template.md @@ -0,0 +1,13 @@ +--- +title: Scheduled workflow failed +labels: + - bug + - "module: datasets" +--- + +Oh no, something went wrong in the scheduled workflow {{ env.WORKFLOW }}/{{ env.JOB }}. +Please look into it: + +https://github.com/{{ env.REPO }}/actions/runs/{{ env.ID }} + +Feel free to close this if this was just a one-off error. diff --git a/.github/process_commit.py b/.github/process_commit.py new file mode 100644 index 00000000000..e1e534d98f4 --- /dev/null +++ b/.github/process_commit.py @@ -0,0 +1,81 @@ +""" +This script finds the merger responsible for labeling a PR by a commit SHA. It is used by the workflow in +'.github/workflows/pr-labels.yml'. If there exists no PR associated with the commit or the PR is properly labeled, +this script is a no-op. + +Note: we ping the merger only, not the reviewers, as the reviewers can sometimes be external to torchvision +with no labeling responsibility, so we don't want to bother them. +""" + +import sys +from typing import Any, Optional, Set, Tuple + +import requests + +# For a PR to be properly labeled it should have one primary label and one secondary label +PRIMARY_LABELS = { + "new feature", + "bug", + "code quality", + "enhancement", + "bc-breaking", + "deprecation", + "other", + "prototype", +} + +SECONDARY_LABELS = { + "dependency issue", + "module: c++ frontend", + "module: ci", + "module: datasets", + "module: documentation", + "module: io", + "module: models.quantization", + "module: models", + "module: onnx", + "module: ops", + "module: reference scripts", + "module: rocm", + "module: tests", + "module: transforms", + "module: utils", + "module: video", + "Perf", + "Revert(ed)", + "topic: build", +} + + +def query_torchvision(cmd: str, *, accept) -> Any: + response = requests.get(f"https://api.github.com/repos/pytorch/vision/{cmd}", headers=dict(Accept=accept)) + return response.json() + + +def get_pr_number(commit_hash: str) -> Optional[int]: + # See https://docs.github.com/en/rest/reference/repos#list-pull-requests-associated-with-a-commit + data = query_torchvision(f"commits/{commit_hash}/pulls", accept="application/vnd.github.groot-preview+json") + if not data: + return None + return data[0]["number"] + + +def get_pr_merger_and_labels(pr_number: int) -> Tuple[str, Set[str]]: + # See https://docs.github.com/en/rest/reference/pulls#get-a-pull-request + data = query_torchvision(f"pulls/{pr_number}", accept="application/vnd.github.v3+json") + merger = data["merged_by"]["login"] + labels = {label["name"] for label in data["labels"]} + return merger, labels + + +if __name__ == "__main__": + commit_hash = sys.argv[1] + pr_number = get_pr_number(commit_hash) + if not pr_number: + sys.exit(0) + + merger, labels = get_pr_merger_and_labels(pr_number) + is_properly_labeled = bool(PRIMARY_LABELS.intersection(labels) and SECONDARY_LABELS.intersection(labels)) + + if not is_properly_labeled: + print(f"@{merger}") diff --git a/.github/pytorch-probot.yml b/.github/pytorch-probot.yml new file mode 100644 index 00000000000..1a3402466f4 --- /dev/null +++ b/.github/pytorch-probot.yml @@ -0,0 +1,10 @@ +tracking_issue: 2447 + +# List of workflows that will be re-run in case of failures +# https://github.com/pytorch/test-infra/blob/main/torchci/lib/bot/retryBot.ts +retryable_workflows: +- Build Linux +- Build Macos +- Build M1 +- Build Windows +- Tests diff --git a/.github/scripts/cmake.sh b/.github/scripts/cmake.sh new file mode 100755 index 00000000000..4217a9d24be --- /dev/null +++ b/.github/scripts/cmake.sh @@ -0,0 +1,107 @@ +#!/usr/bin/env bash + +set -euxo pipefail + +./.github/scripts/setup-env.sh + +# Activate conda environment +set +x && eval "$($(which conda) shell.bash hook)" && conda deactivate && conda activate ci && set -x + +# Setup the OS_TYPE environment variable that should be used for conditions involving the OS below. +case $(uname) in + Linux) + OS_TYPE=linux + ;; + Darwin) + OS_TYPE=macos + ;; + MSYS*) + OS_TYPE=windows + ;; + *) + echo "Unknown OS type:" $(uname) + exit 1 + ;; +esac + +if [[ $OS_TYPE == macos ]]; then + JOBS=$(sysctl -n hw.logicalcpu) +else + JOBS=$(nproc) +fi + +if [[ $OS_TYPE == linux ]]; then + export LD_LIBRARY_PATH="${CONDA_PREFIX}/lib:${LD_LIBRARY_PATH}" +fi + +TORCH_PATH=$(python -c "import pathlib, torch; print(pathlib.Path(torch.__path__[0]))") +if [[ $OS_TYPE == windows ]]; then + PACKAGING_DIR="${PWD}/packaging" + export PATH="${TORCH_PATH}/lib:${PATH}" +fi + +Torch_DIR="${TORCH_PATH}/share/cmake/Torch" +if [[ "${GPU_ARCH_TYPE}" == "cuda" ]]; then + WITH_CUDA=1 +else + WITH_CUDA=0 +fi + +echo '::group::Prepare CMake builds' +mkdir -p cpp_build + +pushd examples/cpp +python script_model.py +mkdir -p build +mv resnet18.pt fasterrcnn_resnet50_fpn.pt build +popd + +# This was only needed for the tracing above +pip uninstall -y torchvision +echo '::endgroup::' + +echo '::group::Build and install libtorchvision' +pushd cpp_build + + +# On macOS, CMake is looking for the library (*.dylib) and the header (*.h) separately. By default, it prefers to load +# the header from other packages that install the library. This easily leads to a mismatch if the library installed +# from conda doesn't have the exact same version. Thus, we need to explicitly set CMAKE_FIND_FRAMEWORK=NEVER to force +# it to not load anything from other installed frameworks. Resources: +# https://stackoverflow.com/questions/36523911/osx-homebrew-cmake-libpng-version-mismatch-issue +# https://cmake.org/cmake/help/latest/variable/CMAKE_FIND_FRAMEWORK.html +cmake .. -DTorch_DIR="${Torch_DIR}" -DWITH_CUDA="${WITH_CUDA}" \ + -DCMAKE_PREFIX_PATH="${CONDA_PREFIX}" \ + -DCMAKE_FIND_FRAMEWORK=NEVER \ + -DCMAKE_INSTALL_PREFIX="${CONDA_PREFIX}" +if [[ $OS_TYPE == windows ]]; then + "${PACKAGING_DIR}/windows/internal/vc_env_helper.bat" "${PACKAGING_DIR}/windows/internal/build_cmake.bat" $JOBS +else + make -j$JOBS + make install +fi + +popd +echo '::endgroup::' + +echo '::group::Build and run C++ example' +pushd examples/cpp/build + +cmake .. -DTorch_DIR="${Torch_DIR}" \ + -DCMAKE_PREFIX_PATH="${CONDA_PREFIX}" \ + -DCMAKE_FIND_FRAMEWORK=NEVER \ + -DUSE_TORCHVISION=ON # Needed for faster-rcnn since it's using torchvision ops like NMS. +if [[ $OS_TYPE == windows ]]; then + "${PACKAGING_DIR}/windows/internal/vc_env_helper.bat" "${PACKAGING_DIR}/windows/internal/build_cpp_example.bat" $JOBS + cd Release + cp ../resnet18.pt . + cp ../fasterrcnn_resnet50_fpn.pt . +else + make -j$JOBS +fi + +./run_model resnet18.pt +./run_model fasterrcnn_resnet50_fpn.pt + +popd +echo '::endgroup::' diff --git a/.github/scripts/export_IS_M1_CONDA_BUILD_JOB.sh b/.github/scripts/export_IS_M1_CONDA_BUILD_JOB.sh new file mode 100755 index 00000000000..1cca56ddc56 --- /dev/null +++ b/.github/scripts/export_IS_M1_CONDA_BUILD_JOB.sh @@ -0,0 +1,3 @@ +#!/bin/sh + +export IS_M1_CONDA_BUILD_JOB=1 diff --git a/travis-scripts/run-clang-format/run-clang-format.py b/.github/scripts/run-clang-format.py similarity index 62% rename from travis-scripts/run-clang-format/run-clang-format.py rename to .github/scripts/run-clang-format.py index 3f16c833b63..670fd97833a 100755 --- a/travis-scripts/run-clang-format/run-clang-format.py +++ b/.github/scripts/run-clang-format.py @@ -1,5 +1,28 @@ #!/usr/bin/env python -"""A wrapper script around clang-format, suitable for linting multiple files +""" +MIT License + +Copyright (c) 2017 Guillaume Papin + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +A wrapper script around clang-format, suitable for linting multiple files and to use for continuous integration. This is an alternative API for the clang-format command line. @@ -8,20 +31,15 @@ """ -from __future__ import print_function, unicode_literals - import argparse -import codecs import difflib import fnmatch -import io import multiprocessing import os import signal import subprocess import sys import traceback - from functools import partial try: @@ -30,7 +48,7 @@ DEVNULL = open(os.devnull, "wb") -DEFAULT_EXTENSIONS = 'c,h,C,H,cpp,hpp,cc,hh,c++,h++,cxx,hxx' +DEFAULT_EXTENSIONS = "c,h,C,H,cpp,hpp,cc,hh,c++,h++,cxx,hxx,cu,mm" class ExitStatus: @@ -54,14 +72,8 @@ def list_files(files, recursive=False, extensions=None, exclude=None): # os.walk() supports trimming down the dnames list # by modifying it in-place, # to avoid unnecessary directory listings. - dnames[:] = [ - x for x in dnames - if - not fnmatch.fnmatch(os.path.join(dirpath, x), pattern) - ] - fpaths = [ - x for x in fpaths if not fnmatch.fnmatch(x, pattern) - ] + dnames[:] = [x for x in dnames if not fnmatch.fnmatch(os.path.join(dirpath, x), pattern)] + fpaths = [x for x in fpaths if not fnmatch.fnmatch(x, pattern)] for f in fpaths: ext = os.path.splitext(f)[1][1:] if ext in extensions: @@ -74,22 +86,20 @@ def list_files(files, recursive=False, extensions=None, exclude=None): def make_diff(file, original, reformatted): return list( difflib.unified_diff( - original, - reformatted, - fromfile='{}\t(original)'.format(file), - tofile='{}\t(reformatted)'.format(file), - n=3)) + original, reformatted, fromfile=f"{file}\t(original)", tofile=f"{file}\t(reformatted)", n=3 + ) + ) class DiffError(Exception): def __init__(self, message, errs=None): - super(DiffError, self).__init__(message) + super().__init__(message) self.errs = errs or [] class UnexpectedError(Exception): def __init__(self, message, exc=None): - super(UnexpectedError, self).__init__(message) + super().__init__(message) self.formatted_traceback = traceback.format_exc() self.exc = exc @@ -101,15 +111,14 @@ def run_clang_format_diff_wrapper(args, file): except DiffError: raise except Exception as e: - raise UnexpectedError('{}: {}: {}'.format(file, e.__class__.__name__, - e), e) + raise UnexpectedError(f"{file}: {e.__class__.__name__}: {e}", e) def run_clang_format_diff(args, file): try: - with io.open(file, 'r', encoding='utf-8') as f: + with open(file, encoding="utf-8") as f: original = f.readlines() - except IOError as exc: + except OSError as exc: raise DiffError(str(exc)) invocation = [args.clang_format_executable, file] @@ -129,33 +138,16 @@ def run_clang_format_diff(args, file): # > Each translation completely replaces the format string # > for the diagnostic. # > -- http://clang.llvm.org/docs/InternalsManual.html#internals-diag-translation - # - # It's not pretty, due to Python 2 & 3 compatibility. - encoding_py3 = {} - if sys.version_info[0] >= 3: - encoding_py3['encoding'] = 'utf-8' try: proc = subprocess.Popen( - invocation, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - universal_newlines=True, - **encoding_py3) - except OSError as exc: - raise DiffError( - "Command '{}' failed to start: {}".format( - subprocess.list2cmdline(invocation), exc - ) + invocation, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True, encoding="utf-8" ) + except OSError as exc: + raise DiffError(f"Command '{subprocess.list2cmdline(invocation)}' failed to start: {exc}") proc_stdout = proc.stdout proc_stderr = proc.stderr - if sys.version_info[0] < 3: - # make the pipes compatible with Python 3, - # reading lines should output unicode - encoding = 'utf-8' - proc_stdout = codecs.getreader(encoding)(proc_stdout) - proc_stderr = codecs.getreader(encoding)(proc_stderr) + # hopefully the stderr pipe won't get full and block the process outs = list(proc_stdout.readlines()) errs = list(proc_stderr.readlines()) @@ -171,30 +163,30 @@ def run_clang_format_diff(args, file): def bold_red(s): - return '\x1b[1m\x1b[31m' + s + '\x1b[0m' + return "\x1b[1m\x1b[31m" + s + "\x1b[0m" def colorize(diff_lines): def bold(s): - return '\x1b[1m' + s + '\x1b[0m' + return "\x1b[1m" + s + "\x1b[0m" def cyan(s): - return '\x1b[36m' + s + '\x1b[0m' + return "\x1b[36m" + s + "\x1b[0m" def green(s): - return '\x1b[32m' + s + '\x1b[0m' + return "\x1b[32m" + s + "\x1b[0m" def red(s): - return '\x1b[31m' + s + '\x1b[0m' + return "\x1b[31m" + s + "\x1b[0m" for line in diff_lines: - if line[:4] in ['--- ', '+++ ']: + if line[:4] in ["--- ", "+++ "]: yield bold(line) - elif line.startswith('@@ '): + elif line.startswith("@@ "): yield cyan(line) - elif line.startswith('+'): + elif line.startswith("+"): yield green(line) - elif line.startswith('-'): + elif line.startswith("-"): yield red(line) else: yield line @@ -203,61 +195,50 @@ def red(s): def print_diff(diff_lines, use_color): if use_color: diff_lines = colorize(diff_lines) - if sys.version_info[0] < 3: - sys.stdout.writelines((l.encode('utf-8') for l in diff_lines)) - else: - sys.stdout.writelines(diff_lines) + sys.stdout.writelines(diff_lines) def print_trouble(prog, message, use_colors): - error_text = 'error:' + error_text = "error:" if use_colors: error_text = bold_red(error_text) - print("{}: {} {}".format(prog, error_text, message), file=sys.stderr) + print(f"{prog}: {error_text} {message}", file=sys.stderr) def main(): parser = argparse.ArgumentParser(description=__doc__) parser.add_argument( - '--clang-format-executable', - metavar='EXECUTABLE', - help='path to the clang-format executable', - default='clang-format') - parser.add_argument( - '--extensions', - help='comma separated list of file extensions (default: {})'.format( - DEFAULT_EXTENSIONS), - default=DEFAULT_EXTENSIONS) + "--clang-format-executable", + metavar="EXECUTABLE", + help="path to the clang-format executable", + default="clang-format", + ) parser.add_argument( - '-r', - '--recursive', - action='store_true', - help='run recursively over directories') - parser.add_argument('files', metavar='file', nargs='+') + "--extensions", + help=f"comma separated list of file extensions (default: {DEFAULT_EXTENSIONS})", + default=DEFAULT_EXTENSIONS, + ) + parser.add_argument("-r", "--recursive", action="store_true", help="run recursively over directories") + parser.add_argument("files", metavar="file", nargs="+") + parser.add_argument("-q", "--quiet", action="store_true") parser.add_argument( - '-q', - '--quiet', - action='store_true') - parser.add_argument( - '-j', - metavar='N', + "-j", + metavar="N", type=int, default=0, - help='run N clang-format jobs in parallel' - ' (default number of cpus + 1)') + help="run N clang-format jobs in parallel (default number of cpus + 1)", + ) parser.add_argument( - '--color', - default='auto', - choices=['auto', 'always', 'never'], - help='show colored diff (default: auto)') + "--color", default="auto", choices=["auto", "always", "never"], help="show colored diff (default: auto)" + ) parser.add_argument( - '-e', - '--exclude', - metavar='PATTERN', - action='append', + "-e", + "--exclude", + metavar="PATTERN", + action="append", default=[], - help='exclude paths matching the given glob-like pattern(s)' - ' from recursive search') + help="exclude paths matching the given glob-like pattern(s) from recursive search", + ) args = parser.parse_args() @@ -274,14 +255,14 @@ def main(): colored_stdout = False colored_stderr = False - if args.color == 'always': + if args.color == "always": colored_stdout = True colored_stderr = True - elif args.color == 'auto': + elif args.color == "auto": colored_stdout = sys.stdout.isatty() colored_stderr = sys.stderr.isatty() - version_invocation = [args.clang_format_executable, str("--version")] + version_invocation = [args.clang_format_executable, "--version"] try: subprocess.check_call(version_invocation, stdout=DEVNULL) except subprocess.CalledProcessError as e: @@ -290,19 +271,15 @@ def main(): except OSError as e: print_trouble( parser.prog, - "Command '{}' failed to start: {}".format( - subprocess.list2cmdline(version_invocation), e - ), + f"Command '{subprocess.list2cmdline(version_invocation)}' failed to start: {e}", use_colors=colored_stderr, ) return ExitStatus.TROUBLE retcode = ExitStatus.SUCCESS files = list_files( - args.files, - recursive=args.recursive, - exclude=args.exclude, - extensions=args.extensions.split(',')) + args.files, recursive=args.recursive, exclude=args.exclude, extensions=args.extensions.split(",") + ) if not files: return @@ -319,8 +296,7 @@ def main(): pool = None else: pool = multiprocessing.Pool(njobs) - it = pool.imap_unordered( - partial(run_clang_format_diff_wrapper, args), files) + it = pool.imap_unordered(partial(run_clang_format_diff_wrapper, args), files) while True: try: outs, errs = next(it) @@ -351,5 +327,5 @@ def main(): return retcode -if __name__ == '__main__': +if __name__ == "__main__": sys.exit(main()) diff --git a/.github/scripts/setup-env.sh b/.github/scripts/setup-env.sh new file mode 100755 index 00000000000..24e7aa97986 --- /dev/null +++ b/.github/scripts/setup-env.sh @@ -0,0 +1,113 @@ +#!/usr/bin/env bash + +set -euxo pipefail + +# Prepare conda +set +x && eval "$($(which conda) shell.bash hook)" && set -x + +# Setup the OS_TYPE environment variable that should be used for conditions involving the OS below. +case $(uname) in + Linux) + OS_TYPE=linux + ;; + Darwin) + OS_TYPE=macos + ;; + MSYS*) + OS_TYPE=windows + ;; + *) + echo "Unknown OS type:" $(uname) + exit 1 + ;; +esac + +echo '::group::Create build environment' +# See https://github.com/pytorch/vision/issues/7296 for ffmpeg +conda create \ + --name ci \ + --quiet --yes \ + python="${PYTHON_VERSION}" pip \ + ninja cmake \ + libpng \ + libwebp \ + 'ffmpeg<4.3' +conda activate ci +conda install --quiet --yes libjpeg-turbo -c pytorch +pip install --progress-bar=off --upgrade setuptools==72.1.0 + +# See https://github.com/pytorch/vision/issues/6790 +if [[ "${PYTHON_VERSION}" != "3.11" ]]; then + pip install --progress-bar=off av!=10.0.0 +fi + +echo '::endgroup::' + +if [[ "${OS_TYPE}" == windows && "${GPU_ARCH_TYPE}" == cuda ]]; then + echo '::group::Install VisualStudio CUDA extensions on Windows' + if [[ "${VC_YEAR:-}" == "2022" ]]; then + TARGET_DIR="/c/Program Files (x86)/Microsoft Visual Studio/2022/BuildTools/MSBuild/Microsoft/VC/v170/BuildCustomizations" + else + TARGET_DIR="/c/Program Files (x86)/Microsoft Visual Studio/2019/BuildTools/MSBuild/Microsoft/VC/v160/BuildCustomizations" + fi + mkdir -p "${TARGET_DIR}" + cp -r "${CUDA_HOME}/MSBuildExtensions/"* "${TARGET_DIR}" + echo '::endgroup::' +fi + +echo '::group::Install PyTorch' +# TODO: Can we maybe have this as environment variable in the job template? For example, `IS_RELEASE`. +if [[ (${GITHUB_EVENT_NAME} = 'pull_request' && (${GITHUB_BASE_REF} = 'release'*)) || (${GITHUB_REF} = 'refs/heads/release'*) ]]; then + CHANNEL=test +else + CHANNEL=nightly +fi + +case $GPU_ARCH_TYPE in + cpu) + GPU_ARCH_ID="cpu" + ;; + cuda) + VERSION_WITHOUT_DOT=$(echo "${GPU_ARCH_VERSION}" | sed 's/\.//') + GPU_ARCH_ID="cu${VERSION_WITHOUT_DOT}" + ;; + *) + echo "Unknown GPU_ARCH_TYPE=${GPU_ARCH_TYPE}" + exit 1 + ;; +esac +PYTORCH_WHEEL_INDEX="https://download.pytorch.org/whl/${CHANNEL}/${GPU_ARCH_ID}" +pip install --progress-bar=off --pre torch --index-url="${PYTORCH_WHEEL_INDEX}" + +if [[ $GPU_ARCH_TYPE == 'cuda' ]]; then + python -c "import torch; exit(not torch.cuda.is_available())" +fi +echo '::endgroup::' + +echo '::group::Install third party dependencies prior to TorchVision install' +# Installing with `easy_install`, e.g. `python setup.py install` or `python setup.py develop`, has some quirks when +# when pulling in third-party dependencies. For example: +# - On Windows, we often hit an SSL error although `pip` can install just fine. +# - It happily pulls in pre-releases, which can lead to more problems down the line. +# `pip` does not unless explicitly told to do so. +# Thus, we use `easy_install` to extract the third-party dependencies here and install them upfront with `pip`. +python setup.py egg_info +# The requires.txt cannot be used with `pip install -r` directly. The requirements are listed at the top and the +# optional dependencies come in non-standard syntax after a blank line. Thus, we just extract the header. +sed -e '/^$/,$d' *.egg-info/requires.txt | tee requirements.txt +pip install --progress-bar=off -r requirements.txt +echo '::endgroup::' + +echo '::group::Install TorchVision' +python setup.py develop +echo '::endgroup::' + +echo '::group::Install torchvision-extra-decoders' +# This can be done after torchvision was built +pip install torchvision-extra-decoders +echo '::endgroup::' + +echo '::group::Collect environment information' +conda list +python -m torch.utils.collect_env +echo '::endgroup::' diff --git a/.github/scripts/unittest.sh b/.github/scripts/unittest.sh new file mode 100755 index 00000000000..da8a06928ea --- /dev/null +++ b/.github/scripts/unittest.sh @@ -0,0 +1,18 @@ +#!/usr/bin/env bash + +set -euo pipefail + +./.github/scripts/setup-env.sh + +# Activate conda environment +eval "$($(which conda) shell.bash hook)" && conda deactivate && conda activate ci + +echo '::group::Install testing utilities' +# TODO: remove the <8 constraint on pytest when https://github.com/pytorch/vision/issues/8238 is closed +pip install --progress-bar=off "pytest<8" pytest-mock pytest-cov expecttest!=0.2.0 requests +echo '::endgroup::' + +python test/smoke_test.py + +# We explicitly ignore the video tests until we resolve https://github.com/pytorch/vision/issues/8162 +pytest --ignore-glob="*test_video*" --junit-xml="${RUNNER_TEST_RESULTS_DIR}/test-results.xml" -v --durations=25 diff --git a/.github/workflows/build-cmake.yml b/.github/workflows/build-cmake.yml new file mode 100644 index 00000000000..9cee3bfc26d --- /dev/null +++ b/.github/workflows/build-cmake.yml @@ -0,0 +1,84 @@ +name: CMake + +on: + pull_request: + push: + branches: + - nightly + - main + - release/* + workflow_dispatch: + +jobs: + linux: + strategy: + matrix: + include: + - runner: linux.12xlarge + gpu-arch-type: cpu + - runner: linux.g5.4xlarge.nvidia.gpu + gpu-arch-type: cuda + gpu-arch-version: "11.8" + fail-fast: false + uses: pytorch/test-infra/.github/workflows/linux_job_v2.yml@main + with: + repository: pytorch/vision + runner: ${{ matrix.runner }} + gpu-arch-type: ${{ matrix.gpu-arch-type }} + gpu-arch-version: ${{ matrix.gpu-arch-version }} + test-infra-ref: main + script: | + set -euo pipefail + + export PYTHON_VERSION=3.9 + export GPU_ARCH_TYPE=${{ matrix.gpu-arch-type }} + export GPU_ARCH_VERSION=${{ matrix.gpu-arch-version }} + ./.github/scripts/cmake.sh + + macos: + strategy: + matrix: + include: + - runner: macos-m1-stable + fail-fast: false + uses: pytorch/test-infra/.github/workflows/macos_job.yml@main + with: + repository: pytorch/vision + runner: ${{ matrix.runner }} + test-infra-ref: main + script: | + set -euo pipefail + + export PYTHON_VERSION=3.9 + export GPU_ARCH_TYPE=cpu + export GPU_ARCH_VERSION='' + + ${CONDA_RUN} ./.github/scripts/cmake.sh + + windows: + strategy: + matrix: + include: + - runner: windows.4xlarge + gpu-arch-type: cpu + - runner: windows.g5.4xlarge.nvidia.gpu + gpu-arch-type: cuda + gpu-arch-version: "11.8" + fail-fast: false + uses: pytorch/test-infra/.github/workflows/windows_job.yml@main + with: + repository: pytorch/vision + runner: ${{ matrix.runner }} + gpu-arch-type: ${{ matrix.gpu-arch-type }} + gpu-arch-version: ${{ matrix.gpu-arch-version }} + test-infra-ref: main + script: | + set -euo pipefail + + export PYTHON_VERSION=3.9 + export VC_YEAR=2022 + export VSDEVCMD_ARGS="" + export GPU_ARCH_TYPE=${{ matrix.gpu-arch-type }} + export GPU_ARCH_VERSION=${{ matrix.gpu-arch-version }} + + ./.github/scripts/cmake.sh diff --git a/.github/workflows/build-conda-linux.yml b/.github/workflows/build-conda-linux.yml new file mode 100644 index 00000000000..a445ef9af25 --- /dev/null +++ b/.github/workflows/build-conda-linux.yml @@ -0,0 +1,52 @@ +name: Build Linux Conda + +on: + pull_request: + push: + branches: + - nightly + - main + - release/* + tags: + # NOTE: Binary build pipelines should only get triggered on release candidate builds + # Release candidate tags look like: v1.11.0-rc1 + - v[0-9]+.[0-9]+.[0-9]+-rc[0-9]+ + workflow_dispatch: + +jobs: + generate-matrix: + uses: pytorch/test-infra/.github/workflows/generate_binary_build_matrix.yml@main + with: + package-type: conda + os: linux + test-infra-repository: pytorch/test-infra + test-infra-ref: main + build: + needs: generate-matrix + strategy: + fail-fast: false + matrix: + include: + - repository: pytorch/vision + pre-script: "" + post-script: "" + conda-package-directory: packaging/torchvision + smoke-test-script: test/smoke_test.py + package-name: torchvision + name: ${{ matrix.repository }} + uses: pytorch/test-infra/.github/workflows/build_conda_linux.yml@main + with: + conda-package-directory: ${{ matrix.conda-package-directory }} + repository: ${{ matrix.repository }} + ref: "" + test-infra-repository: pytorch/test-infra + test-infra-ref: main + build-matrix: ${{ needs.generate-matrix.outputs.matrix }} + pre-script: ${{ matrix.pre-script }} + post-script: ${{ matrix.post-script }} + package-name: ${{ matrix.package-name }} + smoke-test-script: ${{ matrix.smoke-test-script }} + trigger-event: ${{ github.event_name }} + secrets: + CONDA_PYTORCHBOT_TOKEN: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }} + CONDA_PYTORCHBOT_TOKEN_TEST: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }} diff --git a/.github/workflows/build-conda-m1.yml b/.github/workflows/build-conda-m1.yml new file mode 100644 index 00000000000..e8f6546a678 --- /dev/null +++ b/.github/workflows/build-conda-m1.yml @@ -0,0 +1,54 @@ +name: Build M1 Conda + +on: + pull_request: + push: + branches: + - nightly + - main + - release/* + tags: + # NOTE: Binary build pipelines should only get triggered on release candidate builds + # Release candidate tags look like: v1.11.0-rc1 + - v[0-9]+.[0-9]+.[0-9]+-rc[0-9]+ + workflow_dispatch: + +jobs: + generate-matrix: + uses: pytorch/test-infra/.github/workflows/generate_binary_build_matrix.yml@main + with: + package-type: conda + os: macos-arm64 + test-infra-repository: pytorch/test-infra + test-infra-ref: main + build: + needs: generate-matrix + strategy: + fail-fast: false + matrix: + include: + - repository: pytorch/vision + pre-script: "" + post-script: "" + conda-package-directory: packaging/torchvision + smoke-test-script: test/smoke_test.py + package-name: torchvision + name: ${{ matrix.repository }} + uses: pytorch/test-infra/.github/workflows/build_conda_macos.yml@main + with: + conda-package-directory: ${{ matrix.conda-package-directory }} + repository: ${{ matrix.repository }} + ref: "" + test-infra-repository: pytorch/test-infra + test-infra-ref: main + build-matrix: ${{ needs.generate-matrix.outputs.matrix }} + env-var-script: ./.github/scripts/export_IS_M1_CONDA_BUILD_JOB.sh + pre-script: ${{ matrix.pre-script }} + post-script: ${{ matrix.post-script }} + package-name: ${{ matrix.package-name }} + smoke-test-script: ${{ matrix.smoke-test-script }} + runner-type: macos-m1-stable + trigger-event: ${{ github.event_name }} + secrets: + CONDA_PYTORCHBOT_TOKEN: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }} + CONDA_PYTORCHBOT_TOKEN_TEST: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }} diff --git a/.github/workflows/build-conda-windows.yml b/.github/workflows/build-conda-windows.yml new file mode 100644 index 00000000000..f404c06b888 --- /dev/null +++ b/.github/workflows/build-conda-windows.yml @@ -0,0 +1,53 @@ +name: Build Windows Conda + +on: + pull_request: + push: + branches: + - nightly + - main + - release/* + tags: + # NOTE: Binary build pipelines should only get triggered on release candidate builds + # Release candidate tags look like: v1.11.0-rc1 + - v[0-9]+.[0-9]+.[0-9]+-rc[0-9]+ + workflow_dispatch: + +jobs: + generate-matrix: + uses: pytorch/test-infra/.github/workflows/generate_binary_build_matrix.yml@main + with: + package-type: conda + os: windows + test-infra-repository: pytorch/test-infra + test-infra-ref: main + build: + needs: generate-matrix + strategy: + fail-fast: false + matrix: + include: + - repository: pytorch/vision + pre-script: "" + post-script: "" + env-script: packaging/windows/internal/vc_env_helper.bat + conda-package-directory: packaging/torchvision + smoke-test-script: test/smoke_test.py + package-name: torchvision + name: ${{ matrix.repository }} + uses: pytorch/test-infra/.github/workflows/build_conda_windows.yml@main + with: + conda-package-directory: ${{ matrix.conda-package-directory }} + repository: ${{ matrix.repository }} + ref: "" + test-infra-repository: pytorch/test-infra + test-infra-ref: main + build-matrix: ${{ needs.generate-matrix.outputs.matrix }} + pre-script: ${{ matrix.pre-script }} + post-script: ${{ matrix.post-script }} + package-name: ${{ matrix.package-name }} + smoke-test-script: ${{ matrix.smoke-test-script }} + trigger-event: ${{ github.event_name }} + secrets: + CONDA_PYTORCHBOT_TOKEN: ${{ secrets.CONDA_PYTORCHBOT_TOKEN }} + CONDA_PYTORCHBOT_TOKEN_TEST: ${{ secrets.CONDA_PYTORCHBOT_TOKEN_TEST }} diff --git a/.github/workflows/build-wheels-aarch64-linux.yml b/.github/workflows/build-wheels-aarch64-linux.yml new file mode 100644 index 00000000000..05c83991d5b --- /dev/null +++ b/.github/workflows/build-wheels-aarch64-linux.yml @@ -0,0 +1,54 @@ +name: Build Aarch64 Linux Wheels + +on: + pull_request: + push: + branches: + - nightly + - main + - release/* + tags: + # NOTE: Binary build pipelines should only get triggered on release candidate builds + # Release candidate tags look like: v1.11.0-rc1 + - v[0-9]+.[0-9]+.[0-9]+-rc[0-9]+ + workflow_dispatch: + +permissions: + id-token: write + contents: read + +jobs: + generate-matrix: + uses: pytorch/test-infra/.github/workflows/generate_binary_build_matrix.yml@main + with: + package-type: wheel + os: linux-aarch64 + test-infra-repository: pytorch/test-infra + test-infra-ref: main + with-cuda: disable + build: + needs: generate-matrix + strategy: + fail-fast: false + matrix: + include: + - repository: pytorch/vision + pre-script: packaging/pre_build_script.sh + post-script: packaging/post_build_script.sh + smoke-test-script: test/smoke_test.py + package-name: torchvision + name: ${{ matrix.repository }} + uses: pytorch/test-infra/.github/workflows/build_wheels_linux.yml@main + with: + repository: ${{ matrix.repository }} + ref: "" + test-infra-repository: pytorch/test-infra + test-infra-ref: main + build-matrix: ${{ needs.generate-matrix.outputs.matrix }} + pre-script: ${{ matrix.pre-script }} + post-script: ${{ matrix.post-script }} + package-name: ${{ matrix.package-name }} + smoke-test-script: ${{ matrix.smoke-test-script }} + trigger-event: ${{ github.event_name }} + architecture: aarch64 + setup-miniconda: false diff --git a/.github/workflows/build-wheels-linux.yml b/.github/workflows/build-wheels-linux.yml new file mode 100644 index 00000000000..818f32c102b --- /dev/null +++ b/.github/workflows/build-wheels-linux.yml @@ -0,0 +1,52 @@ +name: Build Linux Wheels + +on: + pull_request: + push: + branches: + - nightly + - main + - release/* + tags: + # NOTE: Binary build pipelines should only get triggered on release candidate builds + # Release candidate tags look like: v1.11.0-rc1 + - v[0-9]+.[0-9]+.[0-9]+-rc[0-9]+ + workflow_dispatch: + +permissions: + id-token: write + contents: read + +jobs: + generate-matrix: + uses: pytorch/test-infra/.github/workflows/generate_binary_build_matrix.yml@main + with: + package-type: wheel + os: linux + test-infra-repository: pytorch/test-infra + test-infra-ref: main + with-xpu: enable + build: + needs: generate-matrix + strategy: + fail-fast: false + matrix: + include: + - repository: pytorch/vision + pre-script: packaging/pre_build_script.sh + post-script: packaging/post_build_script.sh + smoke-test-script: test/smoke_test.py + package-name: torchvision + name: ${{ matrix.repository }} + uses: pytorch/test-infra/.github/workflows/build_wheels_linux.yml@main + with: + repository: ${{ matrix.repository }} + ref: "" + test-infra-repository: pytorch/test-infra + test-infra-ref: main + build-matrix: ${{ needs.generate-matrix.outputs.matrix }} + pre-script: ${{ matrix.pre-script }} + post-script: ${{ matrix.post-script }} + package-name: ${{ matrix.package-name }} + smoke-test-script: ${{ matrix.smoke-test-script }} + trigger-event: ${{ github.event_name }} diff --git a/.github/workflows/build-wheels-m1.yml b/.github/workflows/build-wheels-m1.yml new file mode 100644 index 00000000000..76709b755e8 --- /dev/null +++ b/.github/workflows/build-wheels-m1.yml @@ -0,0 +1,52 @@ +name: Build M1 Wheels + +on: + pull_request: + push: + branches: + - nightly + - main + - release/* + tags: + # NOTE: Binary build pipelines should only get triggered on release candidate builds + # Release candidate tags look like: v1.11.0-rc1 + - v[0-9]+.[0-9]+.[0-9]+-rc[0-9]+ + workflow_dispatch: + +permissions: + id-token: write + contents: read + +jobs: + generate-matrix: + uses: pytorch/test-infra/.github/workflows/generate_binary_build_matrix.yml@main + with: + package-type: wheel + os: macos-arm64 + test-infra-repository: pytorch/test-infra + test-infra-ref: main + build: + needs: generate-matrix + strategy: + fail-fast: false + matrix: + include: + - repository: pytorch/vision + pre-script: packaging/pre_build_script.sh + post-script: packaging/post_build_script.sh + smoke-test-script: test/smoke_test.py + package-name: torchvision + name: ${{ matrix.repository }} + uses: pytorch/test-infra/.github/workflows/build_wheels_macos.yml@main + with: + repository: ${{ matrix.repository }} + ref: "" + test-infra-repository: pytorch/test-infra + test-infra-ref: main + build-matrix: ${{ needs.generate-matrix.outputs.matrix }} + pre-script: ${{ matrix.pre-script }} + post-script: ${{ matrix.post-script }} + package-name: ${{ matrix.package-name }} + runner-type: macos-m1-stable + smoke-test-script: ${{ matrix.smoke-test-script }} + trigger-event: ${{ github.event_name }} diff --git a/.github/workflows/build-wheels-windows.yml b/.github/workflows/build-wheels-windows.yml new file mode 100644 index 00000000000..a269aea2604 --- /dev/null +++ b/.github/workflows/build-wheels-windows.yml @@ -0,0 +1,54 @@ +name: Build Windows Wheels + +on: + pull_request: + push: + branches: + - nightly + - main + - release/* + tags: + # NOTE: Binary build pipelines should only get triggered on release candidate builds + # Release candidate tags look like: v1.11.0-rc1 + - v[0-9]+.[0-9]+.[0-9]+-rc[0-9]+ + workflow_dispatch: + +permissions: + id-token: write + contents: read + +jobs: + generate-matrix: + uses: pytorch/test-infra/.github/workflows/generate_binary_build_matrix.yml@main + with: + package-type: wheel + os: windows + test-infra-repository: pytorch/test-infra + test-infra-ref: main + with-xpu: enable + build: + needs: generate-matrix + strategy: + fail-fast: false + matrix: + include: + - repository: pytorch/vision + pre-script: packaging/pre_build_script.sh + env-script: packaging/windows/internal/vc_env_helper.bat + post-script: "python packaging/wheel/relocate.py" + smoke-test-script: test/smoke_test.py + package-name: torchvision + name: ${{ matrix.repository }} + uses: pytorch/test-infra/.github/workflows/build_wheels_windows.yml@main + with: + repository: ${{ matrix.repository }} + ref: "" + test-infra-repository: pytorch/test-infra + test-infra-ref: main + build-matrix: ${{ needs.generate-matrix.outputs.matrix }} + pre-script: ${{ matrix.pre-script }} + env-script: ${{ matrix.env-script }} + post-script: ${{ matrix.post-script }} + package-name: ${{ matrix.package-name }} + smoke-test-script: ${{ matrix.smoke-test-script }} + trigger-event: ${{ github.event_name }} diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml new file mode 100644 index 00000000000..f6ec4201da3 --- /dev/null +++ b/.github/workflows/docs.yml @@ -0,0 +1,129 @@ +name: Docs + +on: + pull_request: + push: + branches: + - nightly + - main + - release/* + tags: + - v[0-9]+.[0-9]+.[0-9] + - v[0-9]+.[0-9]+.[0-9]+-rc[0-9]+ + workflow_dispatch: + +jobs: + build: + uses: pytorch/test-infra/.github/workflows/linux_job_v2.yml@main + with: + repository: pytorch/vision + upload-artifact: docs + test-infra-ref: main + script: | + set -euo pipefail + + export PYTHON_VERSION=3.10 + export GPU_ARCH_TYPE=cpu + export GPU_ARCH_VERSION='' + ./.github/scripts/setup-env.sh + + # Prepare conda + CONDA_PATH=$(which conda) + eval "$(${CONDA_PATH} shell.bash hook)" + conda activate ci + # FIXME: not sure why we need this. `ldd torchvision/video_reader.so` shows that it + # already links against the one pulled from conda. However, at runtime it pulls from + # /lib64 + # Should we maybe always do this in `./.github/scripts/setup-env.sh` so that we don't + # have to pay attention in all other workflows? + export LD_LIBRARY_PATH="${CONDA_PREFIX}/lib:${LD_LIBRARY_PATH}" + + cd docs + + echo '::group::Install doc requirements' + pip install --progress-bar=off -r requirements.txt + echo '::endgroup::' + + if [[ ${{ github.event_name }} == push && (${{ github.ref_type }} == tag || (${{ github.ref_type }} == branch && ${{ github.ref_name }} == release/*)) ]]; then + echo '::group::Enable version string sanitization' + # This environment variable just has to exist and must not be empty. The actual value is arbitrary. + # See docs/source/conf.py for details + export TORCHVISION_SANITIZE_VERSION_STR_IN_DOCS=1 + echo '::endgroup::' + fi + + # The runner does not have sufficient memory to run with as many processes as there are + # cores (`-j auto`). Thus, we limit to a single process (`-j 1`) here. + sed -i -e 's/-j auto/-j 1/' Makefile + make html + + # Below is an imperfect way for us to add "try on Colab" links to all of our gallery examples. + # sphinx-gallery will convert all gallery examples to .ipynb notebooks and stores them in + # build/html/_downloads//.ipynb + # We copy all those ipynb files in a more convenient folder so that we can more easily link to them. + mkdir build/html/_generated_ipynb_notebooks + for file in `find build/html/_downloads`; do + if [[ $file == *.ipynb ]]; then + cp $file build/html/_generated_ipynb_notebooks/ + fi + done + + cp -r build/html "${RUNNER_ARTIFACT_DIR}" + + # On PRs we also want to upload the docs into our S3 bucket for preview. + if [[ ${{ github.event_name == 'pull_request' }} ]]; then + cp -r build/html/* "${RUNNER_DOCS_DIR}" + fi + + upload: + needs: build + if: github.repository == 'pytorch/vision' && github.event_name == 'push' && + ((github.ref_type == 'branch' && github.ref_name == 'main') || github.ref_type == 'tag') + permissions: + contents: write + uses: pytorch/test-infra/.github/workflows/linux_job_v2.yml@main + with: + repository: pytorch/vision + download-artifact: docs + ref: gh-pages + test-infra-ref: main + script: | + set -euo pipefail + + REF_TYPE=${{ github.ref_type }} + REF_NAME=${{ github.ref_name }} + + if [[ "${REF_TYPE}" == branch ]]; then + TARGET_FOLDER="${REF_NAME}" + elif [[ "${REF_TYPE}" == tag ]]; then + case "${REF_NAME}" in + *-rc*) + echo "Aborting upload since this is an RC tag: ${REF_NAME}" + exit 0 + ;; + *) + # Strip the leading "v" as well as the trailing patch version. For example: + # 'v0.15.2' -> '0.15' + TARGET_FOLDER=$(echo "${REF_NAME}" | sed 's/v\([0-9]\+\)\.\([0-9]\+\)\.[0-9]\+/\1.\2/') + ;; + esac + fi + echo "Target Folder: ${TARGET_FOLDER}" + + mkdir -p "${TARGET_FOLDER}" + rm -rf "${TARGET_FOLDER}"/* + mv "${RUNNER_ARTIFACT_DIR}"/html/* "${TARGET_FOLDER}" + git add "${TARGET_FOLDER}" || true + + if [[ "${TARGET_FOLDER}" == main ]]; then + mkdir -p _static + rm -rf _static/* + cp -r "${TARGET_FOLDER}"/_static/* _static + git add _static || true + fi + + git config user.name 'pytorchbot' + git config user.email 'soumith+bot@pytorch.org' + git config http.postBuffer 524288000 + git commit -m "auto-generating sphinx docs" || true + git push diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml new file mode 100644 index 00000000000..22ada97fba8 --- /dev/null +++ b/.github/workflows/lint.yml @@ -0,0 +1,102 @@ +name: Lint + +on: + pull_request: + push: + branches: + - nightly + - main + - release/* + workflow_dispatch: + +jobs: + python-source-and-configs: + uses: pytorch/test-infra/.github/workflows/linux_job_v2.yml@main + with: + repository: pytorch/vision + test-infra-ref: main + script: | + set -euo pipefail + + echo '::group::Setup environment' + CONDA_PATH=$(which conda) + eval "$(${CONDA_PATH} shell.bash hook)" + conda create --name ci --quiet --yes python=3.9 pip + conda activate ci + echo '::endgroup::' + + echo '::group::Install lint tools' + pip install --progress-bar=off pre-commit + echo '::endgroup::' + + set +e + pre-commit run --all-files + + if [ $? -ne 0 ]; then + git --no-pager diff + exit 1 + fi + + c-source: + uses: pytorch/test-infra/.github/workflows/linux_job_v2.yml@main + with: + repository: pytorch/vision + test-infra-ref: main + script: | + set -euo pipefail + + echo '::group::Setup environment' + CONDA_PATH=$(which conda) + eval "$(${CONDA_PATH} shell.bash hook)" + conda create --name ci --quiet --yes -c conda-forge python=3.9 clang-format + conda activate ci + echo '::endgroup::' + + + echo '::group::Lint C source' + set +e + ./.github/scripts/run-clang-format.py -r torchvision/csrc --exclude "torchvision/csrc/io/image/cpu/giflib/*" + + if [ $? -ne 0 ]; then + git --no-pager diff + exit 1 + fi + echo '::endgroup::' + + + python-types: + uses: pytorch/test-infra/.github/workflows/linux_job_v2.yml@main + with: + repository: pytorch/vision + test-infra-ref: main + script: | + set -euo pipefail + + export PYTHON_VERSION=3.11 + export GPU_ARCH_TYPE=cpu + export GPU_ARCH_VERSION='' + + ./.github/scripts/setup-env.sh + + CONDA_PATH=$(which conda) + eval "$(${CONDA_PATH} shell.bash hook)" + conda activate ci + + echo '::group::Install lint tools' + pip install --progress-bar=off "mypy==1.13.0" + echo '::endgroup::' + + echo '::group::Lint Python types' + mypy --install-types --non-interactive --config-file mypy.ini + echo '::endgroup::' + + bc: + if: github.event.pull_request + runs-on: ubuntu-latest + steps: + - name: Run BC Lint Action + uses: pytorch/test-infra/.github/actions/bc-lint@main + with: + repo: ${{ github.event.pull_request.head.repo.full_name }} + base_sha: ${{ github.event.pull_request.base.sha }} + head_sha: ${{ github.event.pull_request.head.sha }} diff --git a/.github/workflows/pr-labels.yml b/.github/workflows/pr-labels.yml new file mode 100644 index 00000000000..bf6349ab02e --- /dev/null +++ b/.github/workflows/pr-labels.yml @@ -0,0 +1,40 @@ +name: pr-labels + +on: + push: + branches: + - main + +jobs: + is-properly-labeled: + runs-on: ubuntu-latest + permissions: + pull-requests: write + + steps: + - name: Set up python + uses: actions/setup-python@v5 + + - name: Install requests + run: pip install requests + + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Process commit and find merger responsible for labeling + id: commit + run: | + MERGER=$(python .github/process_commit.py ${{ github.sha }}) + echo "merger=${MERGER}" | tee --append $GITHUB_OUTPUT + + - name: Ping merger responsible for labeling if necessary + if: ${{ steps.commit.outputs.merger != '' }} + uses: mshick/add-pr-comment@v2 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + message: | + Hey ${{ steps.commit.outputs.merger }}! + + You merged this PR, but no labels were added. + The list of valid labels is available at https://github.com/pytorch/vision/blob/main/.github/process_commit.py diff --git a/.github/workflows/prototype-tests-linux-gpu.yml b/.github/workflows/prototype-tests-linux-gpu.yml new file mode 100644 index 00000000000..e1d6498761b --- /dev/null +++ b/.github/workflows/prototype-tests-linux-gpu.yml @@ -0,0 +1,57 @@ +name: Prototype tests on Linux + +# IMPORTANT: This workflow has been manually disabled from the GitHub interface +# in June 2024. The file is kept for reference in case we ever put this back. + +on: + pull_request: + +jobs: + unittests-prototype: + strategy: + matrix: + python-version: + - "3.9" + - "3.10" + - "3.11" + - "3.12" + runner: ["linux.12xlarge"] + gpu-arch-type: ["cpu"] + include: + - python-version: "3.9" + runner: linux.g5.4xlarge.nvidia.gpu + gpu-arch-type: cuda + gpu-arch-version: "11.8" + fail-fast: false + uses: pytorch/test-infra/.github/workflows/linux_job_v2.yml@main + with: + repository: pytorch/vision + runner: ${{ matrix.runner }} + gpu-arch-type: ${{ matrix.gpu-arch-type }} + gpu-arch-version: ${{ matrix.gpu-arch-version }} + timeout: 120 + script: | + set -euo pipefail + + export PYTHON_VERSION=${{ matrix.python-version }} + export GPU_ARCH_TYPE=${{ matrix.gpu-arch-type }} + export GPU_ARCH_VERSION=${{ matrix.gpu-arch-version }} + ./.github/scripts/setup-env.sh + + # Prepare conda + CONDA_PATH=$(which conda) + eval "$(${CONDA_PATH} shell.bash hook)" + conda activate ci + + echo '::group::Install testing utilities' + pip install --progress-bar=off pytest pytest-mock pytest-cov + echo '::endgroup::' + + # We don't want to run the prototype datasets tests. Since the positional glob into `pytest`, i.e. + # `test/test_prototype*.py` takes the highest priority, neither `--ignore` nor `--ignore-glob` can help us here. + rm test/test_prototype_datasets*.py + pytest \ + -v --durations=25 \ + --cov=torchvision/prototype --cov-report=term-missing \ + --junit-xml="${RUNNER_TEST_RESULTS_DIR}/test-results.xml" \ + test/test_prototype_*.py diff --git a/.github/workflows/tests-schedule.yml b/.github/workflows/tests-schedule.yml new file mode 100644 index 00000000000..3cba2ef59d8 --- /dev/null +++ b/.github/workflows/tests-schedule.yml @@ -0,0 +1,60 @@ +name: tests + +on: + pull_request: + paths: + - "test/test_datasets_download.py" + - ".github/failed_schedule_issue_template.md" + - ".github/workflows/tests-schedule.yml" + + schedule: + - cron: "0 9 * * *" + +jobs: + download: + runs-on: ubuntu-latest + + steps: + - name: Set up python + uses: actions/setup-python@v2 + with: + python-version: 3.9 + + - name: Upgrade system packages + run: python -m pip install --upgrade pip setuptools wheel + + - name: SSL + run: python -c 'import ssl; print(ssl.OPENSSL_VERSION)' + + - name: Checkout repository + uses: actions/checkout@v2 + + - name: TODO REMOVE THIS! Install non pre-release version of mpmath. + run: pip install "mpmath<1.4" + + - name: Install torch nightly build + run: pip install --pre torch -f https://download.pytorch.org/whl/nightly/cpu/torch_nightly.html + + - name: Install torchvision + run: pip install --no-build-isolation --editable . + + - name: Install all optional dataset requirements + run: pip install scipy pycocotools lmdb gdown + + - name: Install tests requirements + run: pip install pytest + + - name: Run tests + run: pytest -ra -v test/test_datasets_download.py + + - uses: JasonEtco/create-an-issue@v2.4.0 + name: Create issue if download tests failed + if: failure() && github.event_name == 'schedule' + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + REPO: ${{ github.repository }} + WORKFLOW: ${{ github.workflow }} + JOB: ${{ github.job }} + ID: ${{ github.run_id }} + with: + filename: .github/failed_schedule_issue_template.md diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml new file mode 100644 index 00000000000..b4a74733967 --- /dev/null +++ b/.github/workflows/tests.yml @@ -0,0 +1,170 @@ +name: Tests + +on: + pull_request: + push: + branches: + - nightly + - main + - release/* + workflow_dispatch: + +jobs: + unittests-linux: + strategy: + matrix: + python-version: + - "3.9" + - "3.10" + - "3.11" + - "3.12" + runner: ["linux.12xlarge"] + gpu-arch-type: ["cpu"] + include: + - python-version: 3.9 + runner: linux.g5.4xlarge.nvidia.gpu + gpu-arch-type: cuda + gpu-arch-version: "11.8" + fail-fast: false + uses: pytorch/test-infra/.github/workflows/linux_job_v2.yml@main + with: + repository: pytorch/vision + runner: ${{ matrix.runner }} + gpu-arch-type: ${{ matrix.gpu-arch-type }} + gpu-arch-version: ${{ matrix.gpu-arch-version }} + timeout: 120 + test-infra-ref: main + script: | + set -euo pipefail + + export PYTHON_VERSION=${{ matrix.python-version }} + export GPU_ARCH_TYPE=${{ matrix.gpu-arch-type }} + export GPU_ARCH_VERSION=${{ matrix.gpu-arch-version }} + + ./.github/scripts/unittest.sh + + unittests-macos: + strategy: + matrix: + python-version: + - "3.9" + - "3.10" + - "3.11" + - "3.12" + runner: ["macos-m1-stable"] + fail-fast: false + uses: pytorch/test-infra/.github/workflows/macos_job.yml@main + with: + repository: pytorch/vision + timeout: 240 + runner: ${{ matrix.runner }} + test-infra-ref: main + script: | + set -euo pipefail + + export PYTHON_VERSION=${{ matrix.python-version }} + export GPU_ARCH_TYPE=cpu + export GPU_ARCH_VERSION='' + + ${CONDA_RUN} ./.github/scripts/unittest.sh + + unittests-windows: + strategy: + matrix: + python-version: + - "3.9" + - "3.10" + - "3.11" + - "3.12" + runner: ["windows.4xlarge"] + gpu-arch-type: ["cpu"] + include: + - python-version: "3.9" + runner: windows.g5.4xlarge.nvidia.gpu + gpu-arch-type: cuda + gpu-arch-version: "11.8" + fail-fast: false + uses: pytorch/test-infra/.github/workflows/windows_job.yml@main + with: + repository: pytorch/vision + runner: ${{ matrix.runner }} + gpu-arch-type: ${{ matrix.gpu-arch-type }} + gpu-arch-version: ${{ matrix.gpu-arch-version }} + timeout: 120 + test-infra-ref: main + script: | + set -euxo pipefail + + export PYTHON_VERSION=${{ matrix.python-version }} + export VC_YEAR=2019 + export VSDEVCMD_ARGS="" + export GPU_ARCH_TYPE=${{ matrix.gpu-arch-type }} + export GPU_ARCH_VERSION=${{ matrix.gpu-arch-version }} + + ./.github/scripts/unittest.sh + + onnx: + uses: pytorch/test-infra/.github/workflows/linux_job_v2.yml@main + with: + repository: pytorch/vision + test-infra-ref: main + script: | + set -euo pipefail + + export PYTHON_VERSION=3.10 + export GPU_ARCH_TYPE=cpu + export GPU_ARCH_VERSION='' + + ./.github/scripts/setup-env.sh + + # Prepare conda + CONDA_PATH=$(which conda) + eval "$(${CONDA_PATH} shell.bash hook)" + conda activate ci + + echo '::group::Install ONNX' + pip install --progress-bar=off onnx onnxruntime + echo '::endgroup::' + + echo '::group::Install testing utilities' + pip install --progress-bar=off pytest "numpy<2" + echo '::endgroup::' + + echo '::group::Run ONNX tests' + pytest --junit-xml="${RUNNER_TEST_RESULTS_DIR}/test-results.xml" -v --durations=25 test/test_onnx.py + echo '::endgroup::' + + unittests-extended: + uses: pytorch/test-infra/.github/workflows/linux_job_v2.yml@main + if: contains(github.event.pull_request.labels.*.name, 'run-extended') + with: + repository: pytorch/vision + test-infra-ref: main + script: | + set -euo pipefail + + export PYTHON_VERSION=3.9 + export GPU_ARCH_TYPE=cpu + export GPU_ARCH_VERSION='' + + ./.github/scripts/setup-env.sh + + # Prepare conda + CONDA_PATH=$(which conda) + eval "$(${CONDA_PATH} shell.bash hook)" + conda activate ci + + echo '::group::Pre-download model weights' + pip install --progress-bar=off aiohttp aiofiles tqdm + python scripts/download_model_urls.py + echo '::endgroup::' + + echo '::group::Install testing utilities' + # TODO: remove the <8 constraint on pytest when https://github.com/pytorch/vision/issues/8238 is closed + pip install --progress-bar=off "pytest<8" + echo '::endgroup::' + + echo '::group::Run extended unittests' + export PYTORCH_TEST_WITH_EXTENDED=1 + pytest --junit-xml="${RUNNER_TEST_RESULTS_DIR}/test-results.xml" -v --durations=25 test/test_extended_*.py + echo '::endgroup::' diff --git a/.gitignore b/.gitignore index 5f483c84327..c2d4d2a1c42 100644 --- a/.gitignore +++ b/.gitignore @@ -9,7 +9,17 @@ torchvision/version.py */**/**/*.pyc */**/*~ *~ + docs/build +# sphinx-gallery +docs/source/auto_examples/ +docs/source/gen_modules/ +docs/source/generated/ +docs/source/models/generated/ +docs/source/sg_execution_times.rst +# pytorch-sphinx-theme gets installed here +docs/src + .coverage htmlcov .*.swp @@ -20,3 +30,18 @@ htmlcov *.swp *.swo gen.yml +.mypy_cache +.vscode/ +.idea/ +*.orig +*-checkpoint.ipynb +*.venv + +## Xcode User settings +xcuserdata/ + +# direnv +.direnv +.envrc + +scripts/release_notes/data.json diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 00000000000..762ebf6fce0 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,32 @@ +repos: + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.0.1 + hooks: + - id: check-docstring-first + - id: check-toml + - id: check-yaml + exclude: packaging/.* + args: + - --allow-multiple-documents + - id: mixed-line-ending + args: [--fix=lf] + - id: end-of-file-fixer + + - repo: https://github.com/omnilib/ufmt + rev: v1.3.3 + hooks: + - id: ufmt + additional_dependencies: + - black == 22.3.0 + - usort == 1.0.2 + + - repo: https://github.com/PyCQA/flake8 + rev: 5.0.4 + hooks: + - id: flake8 + args: [--config=setup.cfg] + + - repo: https://github.com/PyCQA/pydocstyle + rev: 6.1.1 + hooks: + - id: pydocstyle diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index 69c34f0f690..00000000000 --- a/.travis.yml +++ /dev/null @@ -1,86 +0,0 @@ -language: python - -dist: xenial -matrix: - include: - - env: FORMAT_CHECK - language: cpp - addons: - apt: - sources: - - llvm-toolchain-xenial-7 - packages: - - clang-7 - - clang-format-7 - before_install: skip - install: skip - script: ./travis-scripts/run-clang-format/run-clang-format.py -r torchvision/csrc - - env: LINT_CHECK - python: "2.7" - install: pip install flake8 typing - script: flake8 --exclude .circleci - after_success: [] - - env: LINT_CHECK - python: "3.6" - install: pip install flake8 typing - script: flake8 .circleci - after_success: [] - - python: "2.7" - env: IMAGE_BACKEND=Pillow-SIMD - - python: "2.7" - - python: "3.6" - env: IMAGE_BACKEND=Pillow-SIMD - - python: "3.6" - -before_install: - - sudo apt-get update - - wget https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh -O miniconda.sh; - - bash miniconda.sh -b -p $HOME/miniconda - - export PATH="$HOME/miniconda/bin:$PATH" - - hash -r - - conda config --set always_yes yes --set changeps1 no - # Useful for debugging any issues with conda - - conda info -a - - - conda create -q -n test-environment python=$TRAVIS_PYTHON_VERSION pytorch scipy -c pytorch-nightly - - source activate test-environment - - | - if [[ "$IMAGE_BACKEND" == "Pillow-SIMD" ]]; then - pip uninstall -y pillow && CC="cc -march=native" pip install --force-reinstall pillow-simd - fi - - pip install future - - pip install pytest pytest-cov codecov - - pip install mock - - pip install typing - - | - if [[ $TRAVIS_PYTHON_VERSION == 3.6 ]]; then - pip install onnxruntime - fi - - conda install av -c conda-forge - - -install: - # Using pip instead of setup.py ensures we install a non-compressed version of the package - # (as opposed to an egg), which is necessary to collect coverage. - # We still get the benefit of testing an installed version over the - # test version to iron out installation file-inclusion bugs but can - # also collect coverage. - - pip install . - # Move to home dir, otherwise we'll end up with the path to the - # package in $PWD rather than the installed v - - | - cd $HOME - export TV_INSTALL_PATH="$(python -c 'import os; import torchvision; print(os.path.dirname(os.path.abspath(torchvision.__file__)))')" - echo "$TV_INSTALL_PATH" - cd - - -script: - - pytest --cov-config .coveragerc --cov torchvision --cov $TV_INSTALL_PATH -k 'not TestVideoReader and not TestVideoTransforms' test - - pytest test/test_hub.py - -after_success: - # Necessary to run coverage combine to rewrite paths from - # /travis/env/path/site-packages/torchvision to actual path - - coverage combine .coverage - - coverage report - - codecov diff --git a/CITATION.cff b/CITATION.cff new file mode 100644 index 00000000000..37db28b2bad --- /dev/null +++ b/CITATION.cff @@ -0,0 +1,14 @@ +cff-version: 1.2.0 +title: "TorchVision: PyTorch's Computer Vision library" +message: >- + If you find TorchVision useful in your work, please + consider citing the following BibTeX entry. +type: software +authors: + - given-names: TorchVision maintainers and contributors +url: "https://github.com/pytorch/vision" +license: "BSD-3-Clause" +date-released: "2016-11-06" +journal: "GitHub repository" +publisher: "GitHub" +key: "torchvision2016" diff --git a/CMakeLists.txt b/CMakeLists.txt index df77482c870..f2430559909 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -1,16 +1,180 @@ -cmake_minimum_required(VERSION 2.8) +cmake_minimum_required(VERSION 3.18) project(torchvision) -set(CMAKE_CXX_STANDARD 11) +set(CMAKE_CXX_STANDARD 17) +file(STRINGS version.txt TORCHVISION_VERSION) + +option(WITH_CUDA "Enable CUDA support" OFF) +option(WITH_MPS "Enable MPS support" OFF) +option(WITH_PNG "Enable features requiring LibPNG." ON) +option(WITH_JPEG "Enable features requiring LibJPEG." ON) +# Libwebp is disabled by default, which means enabling it from cmake is largely +# untested. Since building from cmake is very low pri anyway, this is OK. If +# you're a user and you need this, please open an issue (and a PR!). +option(WITH_WEBP "Enable features requiring LibWEBP." OFF) +# Same here +option(WITH_AVIF "Enable features requiring LibAVIF." OFF) + +if(WITH_CUDA) + enable_language(CUDA) + add_definitions(-D__CUDA_NO_HALF_OPERATORS__) + add_definitions(-DWITH_CUDA) + set(CMAKE_CUDA_FLAGS "${CMAKE_CUDA_FLAGS} --expt-relaxed-constexpr") +endif() + +if(WITH_MPS) + enable_language(OBJC OBJCXX) + add_definitions(-DWITH_MPS) +endif() find_package(Torch REQUIRED) -file(GLOB HEADERS torchvision/csrc/vision.h) -file(GLOB MODELS_HEADERS torchvision/csrc/models/*.h) -file(GLOB MODELS_SOURCES torchvision/csrc/models/*.h torchvision/csrc/models/*.cpp) +if (WITH_PNG) + add_definitions(-DPNG_FOUND) + find_package(PNG REQUIRED) +endif() + +if (WITH_JPEG) + add_definitions(-DJPEG_FOUND) + find_package(JPEG REQUIRED) +endif() + +if (WITH_WEBP) + add_definitions(-DWEBP_FOUND) + find_package(WEBP REQUIRED) +endif() + +if (WITH_AVIF) + add_definitions(-DAVIF_FOUND) + find_package(AVIF REQUIRED) +endif() + +function(CUDA_CONVERT_FLAGS EXISTING_TARGET) + get_property(old_flags TARGET ${EXISTING_TARGET} PROPERTY INTERFACE_COMPILE_OPTIONS) + if(NOT "${old_flags}" STREQUAL "") + string(REPLACE ";" "," CUDA_flags "${old_flags}") + set_property(TARGET ${EXISTING_TARGET} PROPERTY INTERFACE_COMPILE_OPTIONS + "$<$>:${old_flags}>$<$>:-Xcompiler=${CUDA_flags}>" + ) + endif() +endfunction() + +if(MSVC) + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /wd4819") + if(WITH_CUDA) + set(CMAKE_CUDA_FLAGS "${CMAKE_CUDA_FLAGS} -Xcompiler=/wd4819") + foreach(diag cc_clobber_ignored integer_sign_change useless_using_declaration + set_but_not_used field_without_dll_interface + base_class_has_different_dll_interface + dll_interface_conflict_none_assumed + dll_interface_conflict_dllexport_assumed + implicit_return_from_non_void_function + unsigned_compare_with_zero + declared_but_not_referenced + bad_friend_decl) + string(APPEND CMAKE_CUDA_FLAGS " -Xcudafe --diag_suppress=${diag}") + endforeach() + CUDA_CONVERT_FLAGS(torch_cpu) + if(TARGET torch_cuda) + CUDA_CONVERT_FLAGS(torch_cuda) + endif() + if(TARGET torch_cuda_cu) + CUDA_CONVERT_FLAGS(torch_cuda_cu) + endif() + if(TARGET torch_cuda_cpp) + CUDA_CONVERT_FLAGS(torch_cuda_cpp) + endif() + endif() +endif() + +include(GNUInstallDirs) +include(CMakePackageConfigHelpers) + +set(TVCPP torchvision/csrc) +list(APPEND ALLOW_LISTED ${TVCPP} ${TVCPP}/io/image ${TVCPP}/io/image/cpu ${TVCPP}/io/image/cpu/giflib ${TVCPP}/models ${TVCPP}/ops + ${TVCPP}/ops/autograd ${TVCPP}/ops/cpu ${TVCPP}/io/image/cuda) +if(WITH_CUDA) + list(APPEND ALLOW_LISTED ${TVCPP}/ops/cuda ${TVCPP}/ops/autocast) +endif() +if(WITH_MPS) + list(APPEND ALLOW_LISTED ${TVCPP}/ops/mps) +endif() + +FOREACH(DIR ${ALLOW_LISTED}) + file(GLOB ALL_SOURCES ${ALL_SOURCES} ${DIR}/*.*) +ENDFOREACH() + +add_library(${PROJECT_NAME} SHARED ${ALL_SOURCES}) +target_link_libraries(${PROJECT_NAME} PRIVATE ${TORCH_LIBRARIES}) + +if(WITH_MPS) + find_library(metal NAMES Metal) + find_library(foundation NAMES Foundation) + target_link_libraries(${PROJECT_NAME} PRIVATE ${metal} ${foundation}) +endif() + +if (WITH_PNG) + target_link_libraries(${PROJECT_NAME} PRIVATE ${PNG_LIBRARY}) +endif() + +if (WITH_JPEG) + target_link_libraries(${PROJECT_NAME} PRIVATE ${JPEG_LIBRARIES}) +endif() + +if (WITH_WEBP) + target_link_libraries(${PROJECT_NAME} PRIVATE ${WEBP_LIBRARIES}) +endif() + +if (WITH_AVIF) + target_link_libraries(${PROJECT_NAME} PRIVATE ${AVIF_LIBRARIES}) +endif() + +set_target_properties(${PROJECT_NAME} PROPERTIES + EXPORT_NAME TorchVision + INSTALL_RPATH ${TORCH_INSTALL_PREFIX}/lib) + +include_directories(torchvision/csrc) + +if (WITH_PNG) + include_directories(${PNG_INCLUDE_DIRS}) +endif() + +if (WITH_JPEG) + include_directories(${JPEG_INCLUDE_DIRS}) +endif() + +if (WITH_WEBP) + include_directories(${WEBP_INCLUDE_DIRS}) +endif() + +if (WITH_AVIF) + include_directories(${AVIF_INCLUDE_DIRS}) +endif() + +set(TORCHVISION_CMAKECONFIG_INSTALL_DIR "share/cmake/TorchVision" CACHE STRING "install path for TorchVisionConfig.cmake") + +configure_package_config_file(cmake/TorchVisionConfig.cmake.in + "${CMAKE_CURRENT_BINARY_DIR}/TorchVisionConfig.cmake" + INSTALL_DESTINATION ${TORCHVISION_CMAKECONFIG_INSTALL_DIR}) + +write_basic_package_version_file(${CMAKE_CURRENT_BINARY_DIR}/TorchVisionConfigVersion.cmake + VERSION ${TORCHVISION_VERSION} + COMPATIBILITY AnyNewerVersion) + +install(FILES ${CMAKE_CURRENT_BINARY_DIR}/TorchVisionConfig.cmake + ${CMAKE_CURRENT_BINARY_DIR}/TorchVisionConfigVersion.cmake + DESTINATION ${TORCHVISION_CMAKECONFIG_INSTALL_DIR}) + +install(TARGETS ${PROJECT_NAME} + EXPORT TorchVisionTargets + LIBRARY DESTINATION ${CMAKE_INSTALL_LIBDIR} + ) -add_library (${PROJECT_NAME} SHARED ${MODELS_SOURCES}) -target_link_libraries(${PROJECT_NAME} PUBLIC "${TORCH_LIBRARIES}") +install(EXPORT TorchVisionTargets + NAMESPACE TorchVision:: + DESTINATION ${TORCHVISION_CMAKECONFIG_INSTALL_DIR}) -install(TARGETS ${PROJECT_NAME} DESTINATION ${CMAKE_INSTALL_PREFIX}/lib) -install(FILES ${HEADERS} DESTINATION ${CMAKE_INSTALL_PREFIX}/include/${PROJECT_NAME}) -install(FILES ${MODELS_HEADERS} DESTINATION ${CMAKE_INSTALL_PREFIX}/include/${PROJECT_NAME}/models) +FOREACH(INPUT_DIR ${ALLOW_LISTED}) + string(REPLACE "${TVCPP}" "${CMAKE_INSTALL_INCLUDEDIR}/${PROJECT_NAME}" OUTPUT_DIR ${INPUT_DIR}) + file(GLOB INPUT_FILES ${INPUT_DIR}/*.*) + install(FILES ${INPUT_FILES} DESTINATION ${OUTPUT_DIR}) +ENDFOREACH() diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md new file mode 100644 index 00000000000..b91e23b17c0 --- /dev/null +++ b/CODE_OF_CONDUCT.md @@ -0,0 +1,76 @@ +# Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to make participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, sex characteristics, gender identity and expression, +level of experience, education, socio-economic status, nationality, personal +appearance, race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or +advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic +address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a +professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or +reject comments, commits, code, wiki edits, issues, and other contributions +that are not aligned to this Code of Conduct, or to ban temporarily or +permanently any contributor for other behaviors that they deem inappropriate, +threatening, offensive, or harmful. + +## Scope + +This Code of Conduct applies within all project spaces, and it also applies when +an individual is representing the project or its community in public spaces. +Examples of representing a project or community include using an official +project e-mail address, posting via an official social media account, or acting +as an appointed representative at an online or offline event. Representation of +a project may be further defined and clarified by project maintainers. + +## Enforcement + +Instances of abusive, harassing, or otherwise unacceptable behavior may be +reported by contacting the project team at . All +complaints will be reviewed and investigated and will result in a response that +is deemed necessary and appropriate to the circumstances. The project team is +obligated to maintain confidentiality with regard to the reporter of an incident. +Further details of specific enforcement policies may be posted separately. + +Project maintainers who do not follow or enforce the Code of Conduct in good +faith may face temporary or permanent repercussions as determined by other +members of the project's leadership. + +## Attribution + +This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, +available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html + +[homepage]: https://www.contributor-covenant.org + +For answers to common questions about this code of conduct, see +https://www.contributor-covenant.org/faq diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 00000000000..41ecd860055 --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,234 @@ +# Contributing to Torchvision + +We want to make contributing to this project as easy and transparent as possible. + +## TL;DR + +We appreciate all contributions. If you are interested in contributing to Torchvision, there are many ways to help out. +Your contributions may fall into the following categories: + +- It helps the project if you could + - Report issues you're facing + - Give a :+1: on issues that others reported and that are relevant to you + +- Answering queries on the issue tracker, investigating bugs are very valuable contributions to the project. + +- You would like to improve the documentation. This is no less important than improving the library itself! +If you find a typo in the documentation, do not hesitate to submit a GitHub pull request. + +- If you would like to fix a bug + - please pick one from the [list of open issues labelled as "help wanted"](https://github.com/pytorch/vision/issues?q=is%3Aopen+is%3Aissue+label%3A%22help+wanted%22) + - comment on the issue that you want to work on this issue + - send a PR with your fix, see below. + +- If you plan to contribute new features, utility functions or extensions, please first open an issue and discuss the feature with us. + +## Issues + +We use GitHub issues to track public bugs. Please ensure your description is +clear and has sufficient instructions to be able to reproduce the issue. + +## Development installation + + +### Dependencies + +Start by installing the **nightly** build of PyTorch following the [official +instructions](https://pytorch.org/get-started/locally/). Note that the official +instructions may ask you to install torchvision itself. If you are doing development +on torchvision, you should not install prebuilt torchvision packages. + +**Optionally**, install `libpng` and `libjpeg-turbo` if you want to enable +support for +native encoding / decoding of PNG and JPEG formats in +[torchvision.io](https://pytorch.org/vision/stable/io.html#image): + +```bash +conda install libpng libjpeg-turbo -c pytorch +``` + +Note: you can use the `TORCHVISION_INCLUDE` and `TORCHVISION_LIBRARY` +environment variables to tell the build system where to find those libraries if +they are in specific locations. Take a look at +[setup.py](https://github.com/pytorch/vision/blob/main/setup.py) for more +details. + +### Clone and install torchvision + +```bash +git clone https://github.com/pytorch/vision.git +cd vision +python setup.py develop # use install instead of develop if you don't care about development. +# or, for OSX +# MACOSX_DEPLOYMENT_TARGET=10.9 CC=clang CXX=clang++ python setup.py develop +# for C++ debugging, use DEBUG=1 +# DEBUG=1 python setup.py develop +``` + +By default, GPU support is built if CUDA is found and `torch.cuda.is_available()` is true. It's possible to force +building GPU support by setting `FORCE_CUDA=1` environment variable, which is useful when building a docker image. + +We don't officially support building from source using `pip`, but _if_ you do, you'll need to use the +`--no-build-isolation` flag. + +#### Other development dependencies (some of these are needed to run tests): + +``` +pip install expecttest flake8 typing mypy pytest pytest-mock scipy requests +``` + +## Development Process + +If you plan to modify the code or documentation, please follow the steps below: + +1. Fork the repository and create your branch from `main`. +2. If you have modified the code (new feature or bug-fix), please add unit tests. +3. If you have changed APIs, update the documentation. Make sure the documentation builds. +4. Ensure the test suite passes. +5. Make sure your code passes the formatting checks (see below). + +For more details about pull requests, +please read [GitHub's guides](https://docs.github.com/en/github/collaborating-with-issues-and-pull-requests/creating-a-pull-request). + +If you would like to contribute a new model, please see [here](#New-architecture-or-improved-model-weights). + +If you would like to contribute a new dataset, please see [here](#New-dataset). + +### Code formatting and typing + +#### Formatting + +The torchvision code is formatted by [black](https://black.readthedocs.io/en/stable/), +and checked against pep8 compliance with [flake8](https://flake8.pycqa.org/en/latest/). +Instead of relying directly on `black` however, we rely on +[ufmt](https://github.com/omnilib/ufmt), for compatibility reasons with Facebook +internal infrastructure. + +To format your code, install `ufmt` with `pip install ufmt==1.3.3 black==22.3.0 usort==1.0.2` and use e.g.: + +```bash +ufmt format torchvision +``` + +For the vast majority of cases, this is all you should need to run. For the +formatting to be a bit faster, you can also choose to only apply `ufmt` to the +files that were edited in your PR with e.g.: + +```bash +ufmt format `git diff main --name-only` +``` + +Similarly, you can check for `flake8` errors with `flake8 torchvision`, although +they should be fairly rare considering that most of the errors are automatically +taken care of by `ufmt` already. + +##### Pre-commit hooks + +For convenience and **purely optionally**, you can rely on [pre-commit +hooks](https://pre-commit.com/) which will run both `ufmt` and `flake8` prior to +every commit. + +First install the `pre-commit` package with `pip install pre-commit`, and then +run `pre-commit install` at the root of the repo for the hooks to be set up - +that's it. + +Feel free to read the [pre-commit docs](https://pre-commit.com/#usage) to learn +more and improve your workflow. You'll see for example that `pre-commit run +--all-files` will run both `ufmt` and `flake8` without the need for you to +commit anything, and that the `--no-verify` flag can be added to `git commit` to +temporarily deactivate the hooks. + +#### Type annotations + +The codebase has type annotations, please make sure to add type hints if required. We use `mypy` tool for type checking: +```bash +mypy --config-file mypy.ini +``` + +### Unit tests + +Before running tests make sure to install [test dependencies](#other-development-dependencies-some-of-these-are-needed-to-run-tests). + +If you have modified the code by adding a new feature or a bug-fix, please add unit tests for that. To run a specific +test: +```bash +pytest test/ -vvv -k +# e.g. pytest test/test_transforms.py -vvv -k test_center_crop +``` + +If you would like to run all tests: +```bash +pytest test -vvv +``` + +Tests that require internet access should be in +`test/test_internet.py`. + +### Documentation + +Torchvision uses [Google style](http://sphinxcontrib-napoleon.readthedocs.io/en/latest/example_google.html) +for formatting docstrings. Length of line inside docstrings block must be limited to 120 characters. + +Please, follow the instructions to build and deploy the documentation locally. + +#### Install requirements + +```bash +cd docs +pip install -r requirements.txt +``` + +#### Build + +```bash +cd docs +make html-noplot +``` + +Then open `docs/build/html/index.html` in your favorite browser. + +The docs are also automatically built when you submit a PR. The job that +builds the docs is named `build_docs`. You can access the rendered docs by +clicking on that job and then going to the "Artifacts" tab. + +You can clean the built docs and re-start the build from scratch by doing ``make +clean``. + +#### Building the example gallery - or not + +In most cases, running `make html-noplot` is enough to build the docs for your +specific use-case. The `noplot` part tells sphinx **not** to build the examples +in the [gallery](https://pytorch.org/vision/stable/auto_examples/index.html), +which saves a lot of building time. + +If you need to build all the examples in the gallery, then you can use `make +html`. + +You can also choose to only build a subset of the examples by using the +``EXAMPLES_PATTERN`` env variable, which accepts a regular expression. For +example ``EXAMPLES_PATTERN="transforms" make html`` will only build the examples +with "transforms" in their name. + +### New architecture or improved model weights + +Please refer to the guidelines in [Contributing to Torchvision - Models](https://github.com/pytorch/vision/blob/main/CONTRIBUTING_MODELS.md). + +### New dataset + +Please, do not send any PR with a new dataset without discussing +it in an issue as, most likely, it will not be accepted. + +### Pull Request + +If all previous checks (flake8, mypy, unit tests) are passing, please send a PR. Submitted PR will pass other tests on +different operating systems, python versions and hardware. + +For more details about pull requests workflow, +please read [GitHub's guides](https://docs.github.com/en/github/collaborating-with-issues-and-pull-requests/creating-a-pull-request). + +## License + +By contributing to Torchvision, you agree that your contributions will be licensed +under the LICENSE file in the root directory of this source tree. + +Contributors are also required to [sign our Contributor License Agreement](https://code.facebook.com/cla). diff --git a/CONTRIBUTING_MODELS.md b/CONTRIBUTING_MODELS.md new file mode 100644 index 00000000000..390a25a0f89 --- /dev/null +++ b/CONTRIBUTING_MODELS.md @@ -0,0 +1,65 @@ +# Contributing to Torchvision - Models + +- [New Model Architectures - Overview](#new-model-architectures---overview) + +- [New Weights for Existing Model Architectures](#new-weights-for-existing-model-architectures) + +## New Model Architectures - Overview + +For someone who would be interested in adding a model architecture, it is also expected to train the model, so here are a few important considerations: + +- Training big models requires lots of resources and the cost quickly adds up + +- Reproducing models is fun but also risky as you might not always get the results reported on the paper. It might require a huge amount of effort to close the gap + +- The contribution might not get merged if we significantly lack in terms of accuracy, speed etc + +- Including new models in TorchVision might not be the best approach, so other options such as releasing the model through to [Pytorch Hub](https://pytorch.org/hub/) should be considered + +So, before starting any work and submitting a PR there are a few critical things that need to be taken into account in order to make sure the planned contribution is within the context of TorchVision, and the requirements and expectations are discussed beforehand. If this step is skipped and a PR is submitted without prior discussion it will almost certainly be rejected. + +### 1. Preparation work + +- Start by looking into this [issue](https://github.com/pytorch/vision/issues/2707) in order to have an idea of the models that are being considered, express your willingness to add a new model and discuss with the community whether this model should be included in TorchVision. It is very important at this stage to make sure that there is an agreement on the value of having this model in TorchVision and there is no one else already working on it. + +- If the decision is to include the new model, then please create a new ticket which will be used for all design and implementation discussions prior to the PR. One of the TorchVision maintainers will reach out at this stage and this will be your POC from this point onwards in order to provide support, guidance and regular feedback. + +### 2. Implement the model + +Please take a look at existing models in TorchVision to get familiar with the idioms. Also, please look at recent contributions for new models. If in doubt about any design decisions you can ask for feedback on the issue created in step 1. Example of things to take into account: + +- The implementation should be as close as possible to the canonical implementation/paper +- The PR must include the code implementation, documentation and tests +- It should also extend the existing reference scripts used to train the model +- The weights need to reproduce closely the results of the paper in terms of accuracy, even though the final weights to be deployed will be those trained by the TorchVision maintainers +- The PR description should include commands/configuration used to train the model, so that the TorchVision maintainers can easily run them to verify the implementation and generate the final model to be released +- Make sure we re-use existing components as much as possible (inheritance) +- New primitives (transforms, losses, etc.) can be added if necessary, but the final location will be determined after discussion with the dedicated maintainer +- Please take a look at the detailed [implementation and documentation guidelines](https://github.com/pytorch/vision/issues/5319) for a fine grain list of things not to be missed + +### 3. Train the model with reference scripts + +To validate the new model against the common benchmark, as well as to generate pre-trained weights, you must use TorchVision’s reference scripts to train the model. + +Make sure all logs and a final (or best) checkpoint are saved, because it is expected that a submission shows that a model has been successfully trained and the results are in line with the original paper/repository. This will allow the reviewers to quickly check the validity of the submission, but please note that the final model to be released will be re-trained by the maintainers in order to verify reproducibility, ensure that the changes occurred during the PR review did not introduce any bugs, and to avoid moving around a large amount of data (including all checkpoints and logs). + +### 4. Submit a PR + +Submit a PR and tag the assigned maintainer. This PR should: + +- Link the original ticket +- Provide a link for the original paper and the original repository if available +- Highlight the important test metrics and how they compare to the original paper +- Highlight any design choices that deviate from the original paper/implementation and rationale for these choices + +## New Weights for Existing Model Architectures + +The process of improving existing models, for instance improving accuracy by retraining the model with a different set of hyperparameters or augmentations, is the following: + +1. Open a ticket and discuss with the community and maintainers whether this improvement should be added to TorchVision. Note that to add new weights the improvement should be significant. + +2. Train the model using TorchVision reference scripts. You can add new primitives (transforms, losses, etc) when necessary, but the final location will be determined after discussion with the dedicated maintainer. + +3. Open a PR with the new weights, together with the training logs and the checkpoint chosen so the reviewers can verify the submission. Details on how the model was trained, i.e., the training command using the reference scripts, should be included in the PR. + +4. The PR reviewers should replicate the results on their side to verify the submission and if all goes well the new weights should be ready to be released! diff --git a/MANIFEST.in b/MANIFEST.in index 75f238c0a2c..9e45188df35 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,4 +1,4 @@ -include README.rst +include README.md include LICENSE recursive-exclude * __pycache__ diff --git a/README.md b/README.md new file mode 100644 index 00000000000..1076a7a186d --- /dev/null +++ b/README.md @@ -0,0 +1,128 @@ +# torchvision + +[![total torchvision downloads](https://pepy.tech/badge/torchvision)](https://pepy.tech/project/torchvision) +[![documentation](https://img.shields.io/badge/dynamic/json.svg?label=docs&url=https%3A%2F%2Fpypi.org%2Fpypi%2Ftorchvision%2Fjson&query=%24.info.version&colorB=brightgreen&prefix=v)](https://pytorch.org/vision/stable/index.html) + +The torchvision package consists of popular datasets, model architectures, and common image transformations for computer +vision. + +## Installation + +Please refer to the [official +instructions](https://pytorch.org/get-started/locally/) to install the stable +versions of `torch` and `torchvision` on your system. + +To build source, refer to our [contributing +page](https://github.com/pytorch/vision/blob/main/CONTRIBUTING.md#development-installation). + +The following is the corresponding `torchvision` versions and supported Python +versions. + +| `torch` | `torchvision` | Python | +| ------------------ | ------------------ | ------------------- | +| `main` / `nightly` | `main` / `nightly` | `>=3.9`, `<=3.12` | +| `2.5` | `0.20` | `>=3.9`, `<=3.12` | +| `2.4` | `0.19` | `>=3.8`, `<=3.12` | +| `2.3` | `0.18` | `>=3.8`, `<=3.12` | +| `2.2` | `0.17` | `>=3.8`, `<=3.11` | +| `2.1` | `0.16` | `>=3.8`, `<=3.11` | +| `2.0` | `0.15` | `>=3.8`, `<=3.11` | + +
+ older versions + +| `torch` | `torchvision` | Python | +|---------|-------------------|---------------------------| +| `1.13` | `0.14` | `>=3.7.2`, `<=3.10` | +| `1.12` | `0.13` | `>=3.7`, `<=3.10` | +| `1.11` | `0.12` | `>=3.7`, `<=3.10` | +| `1.10` | `0.11` | `>=3.6`, `<=3.9` | +| `1.9` | `0.10` | `>=3.6`, `<=3.9` | +| `1.8` | `0.9` | `>=3.6`, `<=3.9` | +| `1.7` | `0.8` | `>=3.6`, `<=3.9` | +| `1.6` | `0.7` | `>=3.6`, `<=3.8` | +| `1.5` | `0.6` | `>=3.5`, `<=3.8` | +| `1.4` | `0.5` | `==2.7`, `>=3.5`, `<=3.8` | +| `1.3` | `0.4.2` / `0.4.3` | `==2.7`, `>=3.5`, `<=3.7` | +| `1.2` | `0.4.1` | `==2.7`, `>=3.5`, `<=3.7` | +| `1.1` | `0.3` | `==2.7`, `>=3.5`, `<=3.7` | +| `<=1.0` | `0.2` | `==2.7`, `>=3.5`, `<=3.7` | + +
+ +## Image Backends + +Torchvision currently supports the following image backends: + +- torch tensors +- PIL images: + - [Pillow](https://python-pillow.org/) + - [Pillow-SIMD](https://github.com/uploadcare/pillow-simd) - a **much faster** drop-in replacement for Pillow with SIMD. + +Read more in in our [docs](https://pytorch.org/vision/stable/transforms.html). + +## [UNSTABLE] Video Backend + +Torchvision currently supports the following video backends: + +- [pyav](https://github.com/PyAV-Org/PyAV) (default) - Pythonic binding for ffmpeg libraries. +- video_reader - This needs ffmpeg to be installed and torchvision to be built from source. There shouldn't be any + conflicting version of ffmpeg installed. Currently, this is only supported on Linux. + +``` +conda install -c conda-forge 'ffmpeg<4.3' +python setup.py install +``` + +# Using the models on C++ + +Refer to [example/cpp](https://github.com/pytorch/vision/tree/main/examples/cpp). + +**DISCLAIMER**: the `libtorchvision` library includes the torchvision +custom ops as well as most of the C++ torchvision APIs. Those APIs do not come +with any backward-compatibility guarantees and may change from one version to +the next. Only the Python APIs are stable and with backward-compatibility +guarantees. So, if you need stability within a C++ environment, your best bet is +to export the Python APIs via torchscript. + +## Documentation + +You can find the API documentation on the pytorch website: + +## Contributing + +See the [CONTRIBUTING](CONTRIBUTING.md) file for how to help out. + +## Disclaimer on Datasets + +This is a utility library that downloads and prepares public datasets. We do not host or distribute these datasets, +vouch for their quality or fairness, or claim that you have license to use the dataset. It is your responsibility to +determine whether you have permission to use the dataset under the dataset's license. + +If you're a dataset owner and wish to update any part of it (description, citation, etc.), or do not want your dataset +to be included in this library, please get in touch through a GitHub issue. Thanks for your contribution to the ML +community! + +## Pre-trained Model License + +The pre-trained models provided in this library may have their own licenses or terms and conditions derived from the +dataset used for training. It is your responsibility to determine whether you have permission to use the models for your +use case. + +More specifically, SWAG models are released under the CC-BY-NC 4.0 license. See +[SWAG LICENSE](https://github.com/facebookresearch/SWAG/blob/main/LICENSE) for additional details. + +## Citing TorchVision + +If you find TorchVision useful in your work, please consider citing the following BibTeX entry: + +```bibtex +@software{torchvision2016, + title = {TorchVision: PyTorch's Computer Vision library}, + author = {TorchVision maintainers and contributors}, + year = 2016, + journal = {GitHub repository}, + publisher = {GitHub}, + howpublished = {\url{https://github.com/pytorch/vision}} +} +``` diff --git a/README.rst b/README.rst deleted file mode 100644 index 30dce8b4639..00000000000 --- a/README.rst +++ /dev/null @@ -1,89 +0,0 @@ -torchvision -=========== - -.. image:: https://travis-ci.org/pytorch/vision.svg?branch=master - :target: https://travis-ci.org/pytorch/vision - -.. image:: https://codecov.io/gh/pytorch/vision/branch/master/graph/badge.svg - :target: https://codecov.io/gh/pytorch/vision - -.. image:: https://pepy.tech/badge/torchvision - :target: https://pepy.tech/project/torchvision - -.. image:: https://img.shields.io/badge/dynamic/json.svg?label=docs&url=https%3A%2F%2Fpypi.org%2Fpypi%2Ftorchvision%2Fjson&query=%24.info.version&colorB=brightgreen&prefix=v - :target: https://pytorch.org/docs/stable/torchvision/index.html - - -The torchvision package consists of popular datasets, model architectures, and common image transformations for computer vision. - -Installation -============ - -TorchVision requires PyTorch 1.2 or newer. - -Anaconda: - -.. code:: bash - - conda install torchvision -c pytorch - -pip: - -.. code:: bash - - pip install torchvision - -From source: - -.. code:: bash - - python setup.py install - # or, for OSX - # MACOSX_DEPLOYMENT_TARGET=10.9 CC=clang CXX=clang++ python setup.py install - -By default, GPU support is built if CUDA is found and ``torch.cuda.is_available()`` is true. -It's possible to force building GPU support by setting ``FORCE_CUDA=1`` environment variable, -which is useful when building a docker image. - -Image Backend -============= -Torchvision currently supports the following image backends: - -* `Pillow`_ (default) - -* `Pillow-SIMD`_ - a **much faster** drop-in replacement for Pillow with SIMD. If installed will be used as the default. - -* `accimage`_ - if installed can be activated by calling :code:`torchvision.set_image_backend('accimage')` - -.. _Pillow : https://python-pillow.org/ -.. _Pillow-SIMD : https://github.com/uploadcare/pillow-simd -.. _accimage: https://github.com/pytorch/accimage - -C++ API -======= -TorchVision also offers a C++ API that contains C++ equivalent of python models. - -Installation From source: - -.. code:: bash - - mkdir build - cd build - cmake .. - make - make install - -Documentation -============= -You can find the API documentation on the pytorch website: http://pytorch.org/docs/master/torchvision/ - -Contributing -============ -We appreciate all contributions. If you are planning to contribute back bug-fixes, please do so without any further discussion. If you plan to contribute new features, utility functions or extensions, please first open an issue and discuss the feature with us. - -Disclaimer on Datasets -====================== - -This is a utility library that downloads and prepares public datasets. We do not host or distribute these datasets, vouch for their quality or fairness, or claim that you have license to use the dataset. It is your responsibility to determine whether you have permission to use the dataset under the dataset's license. - -If you're a dataset owner and wish to update any part of it (description, citation, etc.), or do not want your dataset to be included in this library, please get in touch through a GitHub issue. Thanks for your contribution to the ML community! diff --git a/android/.gitignore b/android/.gitignore new file mode 100644 index 00000000000..adcfad04c91 --- /dev/null +++ b/android/.gitignore @@ -0,0 +1,6 @@ +local.properties +**/*.iml +.gradle +.idea/* +.externalNativeBuild +build diff --git a/android/README.md b/android/README.md new file mode 100644 index 00000000000..788c83f26de --- /dev/null +++ b/android/README.md @@ -0,0 +1,3 @@ +## Status + +The Android demo of TorchVision is currently unmaintained, untested and likely out-of-date. diff --git a/android/build.gradle b/android/build.gradle new file mode 100644 index 00000000000..f7995a07f5b --- /dev/null +++ b/android/build.gradle @@ -0,0 +1,40 @@ +allprojects { + buildscript { + ext { + minSdkVersion = 21 + targetSdkVersion = 28 + compileSdkVersion = 28 + buildToolsVersion = '28.0.3' + + coreVersion = "1.2.0" + extJUnitVersion = "1.1.1" + runnerVersion = "1.2.0" + rulesVersion = "1.2.0" + junitVersion = "4.12" + + androidSupportAppCompatV7Version = "28.0.0" + fbjniJavaOnlyVersion = "0.0.3" + soLoaderNativeLoaderVersion = "0.10.5" + pytorchAndroidVersion = "1.12" + } + + repositories { + google() + mavenCentral() + } + + dependencies { + classpath 'com.android.tools.build:gradle:4.1.2' + classpath 'com.vanniktech:gradle-maven-publish-plugin:0.14.2' + } + } + + repositories { + google() + mavenCentral() + } +} + +ext.deps = [ + jsr305: 'com.google.code.findbugs:jsr305:3.0.1', +] diff --git a/android/gradle.properties b/android/gradle.properties new file mode 100644 index 00000000000..8204b73b051 --- /dev/null +++ b/android/gradle.properties @@ -0,0 +1,24 @@ +ABI_FILTERS=armeabi-v7a,arm64-v8a,x86,x86_64 + +VERSION_NAME=0.15.0-SNAPSHOT +GROUP=org.pytorch +MAVEN_GROUP=org.pytorch +SONATYPE_STAGING_PROFILE=orgpytorch +POM_URL=https://github.com/pytorch/vision/ +POM_SCM_URL=https://github.com/pytorch/vision.git +POM_SCM_CONNECTION=scm:git:https://github.com/pytorch/vision +POM_SCM_DEV_CONNECTION=scm:git:git@github.com:pytorch/vision.git +POM_LICENSE_NAME=BSD 3-Clause +POM_LICENSE_URL=https://github.com/pytorch/vision/blob/main/LICENSE +POM_ISSUES_URL=https://github.com/pytorch/vision/issues +POM_LICENSE_DIST=repo +POM_DEVELOPER_ID=pytorch +POM_DEVELOPER_NAME=pytorch + +# Gradle internals +android.useAndroidX=true +android.enableJetifier=true + +testAppAllVariantsEnabled=false + +org.gradle.jvmargs=-Xmx12g diff --git a/android/gradle/wrapper/gradle-wrapper.jar b/android/gradle/wrapper/gradle-wrapper.jar new file mode 100644 index 00000000000..94336fcae91 Binary files /dev/null and b/android/gradle/wrapper/gradle-wrapper.jar differ diff --git a/android/gradle/wrapper/gradle-wrapper.properties b/android/gradle/wrapper/gradle-wrapper.properties new file mode 100644 index 00000000000..442d9132ea3 --- /dev/null +++ b/android/gradle/wrapper/gradle-wrapper.properties @@ -0,0 +1,5 @@ +distributionBase=GRADLE_USER_HOME +distributionPath=wrapper/dists +distributionUrl=https\://services.gradle.org/distributions/gradle-6.8.3-bin.zip +zipStoreBase=GRADLE_USER_HOME +zipStorePath=wrapper/dists diff --git a/android/gradle_scripts/android_tasks.gradle b/android/gradle_scripts/android_tasks.gradle new file mode 100644 index 00000000000..6bba126b2f6 --- /dev/null +++ b/android/gradle_scripts/android_tasks.gradle @@ -0,0 +1,11 @@ +afterEvaluate { project -> + if (POM_PACKAGING == 'aar') { + task headersJar(type: Jar) { + archiveClassifier.set('headers') + from("$rootDir/cxx/") { + include '**/*.h' + } + } + artifacts.add('archives', headersJar) + } +} diff --git a/android/gradle_scripts/release.gradle b/android/gradle_scripts/release.gradle new file mode 100644 index 00000000000..ada97f33964 --- /dev/null +++ b/android/gradle_scripts/release.gradle @@ -0,0 +1,3 @@ +apply from: rootProject.file('gradle_scripts/android_tasks.gradle') + +apply plugin: 'com.vanniktech.maven.publish' diff --git a/android/gradlew b/android/gradlew new file mode 100755 index 00000000000..cccdd3d517f --- /dev/null +++ b/android/gradlew @@ -0,0 +1,172 @@ +#!/usr/bin/env sh + +############################################################################## +## +## Gradle start up script for UN*X +## +############################################################################## + +# Attempt to set APP_HOME +# Resolve links: $0 may be a link +PRG="$0" +# Need this for relative symlinks. +while [ -h "$PRG" ] ; do + ls=`ls -ld "$PRG"` + link=`expr "$ls" : '.*-> \(.*\)$'` + if expr "$link" : '/.*' > /dev/null; then + PRG="$link" + else + PRG=`dirname "$PRG"`"/$link" + fi +done +SAVED="`pwd`" +cd "`dirname \"$PRG\"`/" >/dev/null +APP_HOME="`pwd -P`" +cd "$SAVED" >/dev/null + +APP_NAME="Gradle" +APP_BASE_NAME=`basename "$0"` + +# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. +DEFAULT_JVM_OPTS="" + +# Use the maximum available, or set MAX_FD != -1 to use that value. +MAX_FD="maximum" + +warn () { + echo "$*" +} + +die () { + echo + echo "$*" + echo + exit 1 +} + +# OS specific support (must be 'true' or 'false'). +cygwin=false +msys=false +darwin=false +nonstop=false +case "`uname`" in + CYGWIN* ) + cygwin=true + ;; + Darwin* ) + darwin=true + ;; + MINGW* ) + msys=true + ;; + NONSTOP* ) + nonstop=true + ;; +esac + +CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar + +# Determine the Java command to use to start the JVM. +if [ -n "$JAVA_HOME" ] ; then + if [ -x "$JAVA_HOME/jre/sh/java" ] ; then + # IBM's JDK on AIX uses strange locations for the executables + JAVACMD="$JAVA_HOME/jre/sh/java" + else + JAVACMD="$JAVA_HOME/bin/java" + fi + if [ ! -x "$JAVACMD" ] ; then + die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME + +Please set the JAVA_HOME variable in your environment to match the +location of your Java installation." + fi +else + JAVACMD="java" + which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. + +Please set the JAVA_HOME variable in your environment to match the +location of your Java installation." +fi + +# Increase the maximum file descriptors if we can. +if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then + MAX_FD_LIMIT=`ulimit -H -n` + if [ $? -eq 0 ] ; then + if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then + MAX_FD="$MAX_FD_LIMIT" + fi + ulimit -n $MAX_FD + if [ $? -ne 0 ] ; then + warn "Could not set maximum file descriptor limit: $MAX_FD" + fi + else + warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT" + fi +fi + +# For Darwin, add options to specify how the application appears in the dock +if $darwin; then + GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\"" +fi + +# For Cygwin, switch paths to Windows format before running java +if $cygwin ; then + APP_HOME=`cygpath --path --mixed "$APP_HOME"` + CLASSPATH=`cygpath --path --mixed "$CLASSPATH"` + JAVACMD=`cygpath --unix "$JAVACMD"` + + # We build the pattern for arguments to be converted via cygpath + ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null` + SEP="" + for dir in $ROOTDIRSRAW ; do + ROOTDIRS="$ROOTDIRS$SEP$dir" + SEP="|" + done + OURCYGPATTERN="(^($ROOTDIRS))" + # Add a user-defined pattern to the cygpath arguments + if [ "$GRADLE_CYGPATTERN" != "" ] ; then + OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)" + fi + # Now convert the arguments - kludge to limit ourselves to /bin/sh + i=0 + for arg in "$@" ; do + CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -` + CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option + + if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition + eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"` + else + eval `echo args$i`="\"$arg\"" + fi + i=$((i+1)) + done + case $i in + (0) set -- ;; + (1) set -- "$args0" ;; + (2) set -- "$args0" "$args1" ;; + (3) set -- "$args0" "$args1" "$args2" ;; + (4) set -- "$args0" "$args1" "$args2" "$args3" ;; + (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;; + (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;; + (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;; + (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;; + (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;; + esac +fi + +# Escape application args +save () { + for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done + echo " " +} +APP_ARGS=$(save "$@") + +# Collect all arguments for the java command, following the shell quoting and substitution rules +eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS" + +# by default we should be in the correct project dir, but when run from Finder on Mac, the cwd is wrong +if [ "$(uname)" = "Darwin" ] && [ "$HOME" = "$PWD" ]; then + cd "$(dirname "$0")" +fi + +exec "$JAVACMD" "$@" diff --git a/android/gradlew.bat b/android/gradlew.bat new file mode 100644 index 00000000000..f9553162f12 --- /dev/null +++ b/android/gradlew.bat @@ -0,0 +1,84 @@ +@if "%DEBUG%" == "" @echo off +@rem ########################################################################## +@rem +@rem Gradle startup script for Windows +@rem +@rem ########################################################################## + +@rem Set local scope for the variables with windows NT shell +if "%OS%"=="Windows_NT" setlocal + +set DIRNAME=%~dp0 +if "%DIRNAME%" == "" set DIRNAME=. +set APP_BASE_NAME=%~n0 +set APP_HOME=%DIRNAME% + +@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. +set DEFAULT_JVM_OPTS= + +@rem Find java.exe +if defined JAVA_HOME goto findJavaFromJavaHome + +set JAVA_EXE=java.exe +%JAVA_EXE% -version >NUL 2>&1 +if "%ERRORLEVEL%" == "0" goto init + +echo. +echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. +echo. +echo Please set the JAVA_HOME variable in your environment to match the +echo location of your Java installation. + +goto fail + +:findJavaFromJavaHome +set JAVA_HOME=%JAVA_HOME:"=% +set JAVA_EXE=%JAVA_HOME%/bin/java.exe + +if exist "%JAVA_EXE%" goto init + +echo. +echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% +echo. +echo Please set the JAVA_HOME variable in your environment to match the +echo location of your Java installation. + +goto fail + +:init +@rem Get command-line arguments, handling Windows variants + +if not "%OS%" == "Windows_NT" goto win9xME_args + +:win9xME_args +@rem Slurp the command line arguments. +set CMD_LINE_ARGS= +set _SKIP=2 + +:win9xME_args_slurp +if "x%~1" == "x" goto execute + +set CMD_LINE_ARGS=%* + +:execute +@rem Setup the command line + +set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar + +@rem Execute Gradle +"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS% + +:end +@rem End local scope for the variables with windows NT shell +if "%ERRORLEVEL%"=="0" goto mainEnd + +:fail +rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of +rem the _cmd.exe /c_ return code! +if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1 +exit /b 1 + +:mainEnd +if "%OS%"=="Windows_NT" endlocal + +:omega diff --git a/android/ops/CMakeLists.txt b/android/ops/CMakeLists.txt new file mode 100644 index 00000000000..fb8d4348e8e --- /dev/null +++ b/android/ops/CMakeLists.txt @@ -0,0 +1,47 @@ +cmake_minimum_required(VERSION 3.4.1) +set(TARGET torchvision_ops) +project(${TARGET} CXX) +set(CMAKE_CXX_STANDARD 17) + +string(APPEND CMAKE_CXX_FLAGS " -DMOBILE") + +set(build_DIR ${CMAKE_SOURCE_DIR}/build) +set(root_DIR ${CMAKE_CURRENT_LIST_DIR}/..) + +file(GLOB VISION_SRCS + ../../torchvision/csrc/ops/cpu/*.h + ../../torchvision/csrc/ops/cpu/*.cpp + ../../torchvision/csrc/ops/*.h + ../../torchvision/csrc/ops/*.cpp) + +add_library(${TARGET} SHARED + ${VISION_SRCS} +) + +file(GLOB PYTORCH_INCLUDE_DIRS "${build_DIR}/pytorch_android*.aar/headers") +file(GLOB PYTORCH_INCLUDE_DIRS_CSRC "${build_DIR}/pytorch_android*.aar/headers/torch/csrc/api/include") +file(GLOB PYTORCH_LINK_DIRS "${build_DIR}/pytorch_android*.aar/jni/${ANDROID_ABI}") + +target_compile_options(${TARGET} PRIVATE + -fexceptions +) + +set(BUILD_SUBDIR ${ANDROID_ABI}) + +find_library(PYTORCH_LIBRARY pytorch_jni + PATHS ${PYTORCH_LINK_DIRS} + NO_CMAKE_FIND_ROOT_PATH) + +find_library(FBJNI_LIBRARY fbjni + PATHS ${PYTORCH_LINK_DIRS} + NO_CMAKE_FIND_ROOT_PATH) + +target_include_directories(${TARGET} PRIVATE + ${PYTORCH_INCLUDE_DIRS} + ${PYTORCH_INCLUDE_DIRS_CSRC} +) + +target_link_libraries(${TARGET} PRIVATE + ${PYTORCH_LIBRARY} + ${FBJNI_LIBRARY} +) diff --git a/android/ops/build.gradle b/android/ops/build.gradle new file mode 100644 index 00000000000..bfa2c393833 --- /dev/null +++ b/android/ops/build.gradle @@ -0,0 +1,93 @@ +apply plugin: 'com.android.library' +apply plugin: 'maven' + +repositories { + mavenCentral() + maven { + url "https://oss.sonatype.org/content/repositories/snapshots" + } + flatDir { + dirs 'aars' + } +} + +android { + configurations { + extractForNativeBuild + } + compileSdkVersion rootProject.compileSdkVersion + buildToolsVersion rootProject.buildToolsVersion + + + defaultConfig { + minSdkVersion rootProject.minSdkVersion + targetSdkVersion rootProject.targetSdkVersion + versionCode 0 + versionName "0.1" + + testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner" + ndk { + abiFilters ABI_FILTERS.split(",") + } + } + + buildTypes { + debug { + minifyEnabled false + debuggable true + } + release { + minifyEnabled false + } + } + + externalNativeBuild { + cmake { + path "CMakeLists.txt" + } + } + + useLibrary 'android.test.runner' + useLibrary 'android.test.base' + useLibrary 'android.test.mock' +} + +dependencies { + implementation 'com.android.support:appcompat-v7:' + rootProject.androidSupportAppCompatV7Version + + extractForNativeBuild "org.pytorch:pytorch_android:$pytorchAndroidVersion" + + // For testing: deps on local aar files + //implementation(name: 'pytorch_android-release', ext: 'aar') + //extractForNativeBuild(name: 'pytorch_android-release', ext: 'aar') + //implementation 'com.facebook.fbjni:fbjni-java-only:0.0.3' +} + +task extractAARForNativeBuild { + doLast { + configurations.extractForNativeBuild.files.each { + def file = it.absoluteFile + copy { + from zipTree(file) + into "$buildDir/$file.name" + include "headers/**" + include "jni/**" + } + } + } +} + +tasks.whenTaskAdded { task -> + if (task.name.contains('externalNativeBuild')) { + task.dependsOn(extractAARForNativeBuild) + } +} + +apply from: rootProject.file('gradle_scripts/release.gradle') + +task sourcesJar(type: Jar) { + from android.sourceSets.main.java.srcDirs + classifier = 'sources' +} + +artifacts.add('archives', sourcesJar) diff --git a/android/ops/gradle.properties b/android/ops/gradle.properties new file mode 100644 index 00000000000..5a4ea2f3aba --- /dev/null +++ b/android/ops/gradle.properties @@ -0,0 +1,4 @@ +POM_NAME=torchvision ops +POM_DESCRIPTION=torchvision ops +POM_ARTIFACT_ID=torchvision_ops +POM_PACKAGING=aar diff --git a/android/ops/src/main/AndroidManifest.xml b/android/ops/src/main/AndroidManifest.xml new file mode 100644 index 00000000000..8ca386493c4 --- /dev/null +++ b/android/ops/src/main/AndroidManifest.xml @@ -0,0 +1 @@ + diff --git a/android/settings.gradle b/android/settings.gradle new file mode 100644 index 00000000000..6d34eb8d51a --- /dev/null +++ b/android/settings.gradle @@ -0,0 +1,4 @@ +include ':ops', ':test_app' + +project(':ops').projectDir = file('ops') +project(':test_app').projectDir = file('test_app/app') diff --git a/android/test_app/app/build.gradle b/android/test_app/app/build.gradle new file mode 100644 index 00000000000..84cf1d82e6b --- /dev/null +++ b/android/test_app/app/build.gradle @@ -0,0 +1,135 @@ +apply plugin: 'com.android.application' + +repositories { + mavenCentral() + maven { + url "https://oss.sonatype.org/content/repositories/snapshots" + } + flatDir { + dirs 'aars' + } +} + +android { + configurations { + extractForNativeBuild + } + compileOptions { + sourceCompatibility 1.8 + targetCompatibility 1.8 + } + compileSdkVersion rootProject.compileSdkVersion + buildToolsVersion rootProject.buildToolsVersion + defaultConfig { + applicationId "org.pytorch.testapp" + minSdkVersion rootProject.minSdkVersion + targetSdkVersion rootProject.targetSdkVersion + versionCode 1 + versionName "1.0" + ndk { + abiFilters ABI_FILTERS.split(",") + } + externalNativeBuild { + cmake { + abiFilters ABI_FILTERS.split(",") + arguments "-DANDROID_STL=c++_shared" + } + } + buildConfigField("String", "MODULE_ASSET_NAME", "\"frcnn_mnetv3.pt\"") + buildConfigField("String", "LOGCAT_TAG", "@string/app_name") + buildConfigField("long[]", "INPUT_TENSOR_SHAPE", "new long[]{3, 96, 96}") + addManifestPlaceholders([APP_NAME: "@string/app_name", MAIN_ACTIVITY: "org.pytorch.testapp.MainActivity"]) + } + buildTypes { + debug { + minifyEnabled false + debuggable true + } + release { + minifyEnabled false + } + } + flavorDimensions "model", "activity", "build" + productFlavors { + frcnnMnetv3 { + dimension "model" + applicationIdSuffix ".frcnnMnetv3" + buildConfigField("String", "MODULE_ASSET_NAME", "\"frcnn_mnetv3.pt\"") + addManifestPlaceholders([APP_NAME: "TV_FRCNN_MNETV3"]) + buildConfigField("String", "LOGCAT_TAG", "\"pytorch-frcnn-mnetv3\"") + } + camera { + dimension "activity" + addManifestPlaceholders([APP_NAME: "TV_CAMERA_FRCNN"]) + addManifestPlaceholders([MAIN_ACTIVITY: "org.pytorch.testapp.CameraActivity"]) + } + base { + dimension "activity" + } + aar { + dimension "build" + } + local { + dimension "build" + } + } + packagingOptions { + doNotStrip '**.so' + pickFirst '**.so' + } + + // Filtering for CI + if (!testAppAllVariantsEnabled.toBoolean()) { + variantFilter { variant -> + def names = variant.flavors*.name + if (names.contains("aar")) { + setIgnore(true) + } + } + } +} + +tasks.all { task -> + // Disable externalNativeBuild for all but nativeBuild variant + if (task.name.startsWith('externalNativeBuild') + && !task.name.contains('NativeBuild')) { + task.enabled = false + } +} + +dependencies { + implementation 'com.android.support:appcompat-v7:28.0.0' + implementation 'com.facebook.soloader:nativeloader:0.8.0' + localImplementation project(':ops') + + implementation "org.pytorch:pytorch_android:$pytorchAndroidVersion" + implementation "org.pytorch:pytorch_android_torchvision:$pytorchAndroidVersion" + + aarImplementation(name: 'pytorch_android-release', ext: 'aar') + aarImplementation(name: 'pytorch_android_torchvision-release', ext: 'aar') + + def camerax_version = "1.0.0-alpha05" + implementation "androidx.camera:camera-core:$camerax_version" + implementation "androidx.camera:camera-camera2:$camerax_version" + implementation 'com.google.android.material:material:1.0.0-beta01' +} + +task extractAARForNativeBuild { + doLast { + configurations.extractForNativeBuild.files.each { + def file = it.absoluteFile + copy { + from zipTree(file) + into "$buildDir/$file.name" + include "headers/**" + include "jni/**" + } + } + } +} + +tasks.whenTaskAdded { task -> + if (task.name.contains('externalNativeBuild')) { + task.dependsOn(extractAARForNativeBuild) + } +} diff --git a/android/test_app/app/src/main/AndroidManifest.xml b/android/test_app/app/src/main/AndroidManifest.xml new file mode 100644 index 00000000000..a83bf223bda --- /dev/null +++ b/android/test_app/app/src/main/AndroidManifest.xml @@ -0,0 +1,21 @@ + + + + + + + + + + + + + + + + diff --git a/android/test_app/app/src/main/java/org/pytorch/testapp/BBox.java b/android/test_app/app/src/main/java/org/pytorch/testapp/BBox.java new file mode 100644 index 00000000000..6fd60791864 --- /dev/null +++ b/android/test_app/app/src/main/java/org/pytorch/testapp/BBox.java @@ -0,0 +1,22 @@ +package org.pytorch.testapp; + +class BBox { + public final float score; + public final float x0; + public final float y0; + public final float x1; + public final float y1; + + public BBox(float score, float x0, float y0, float x1, float y1) { + this.score = score; + this.x0 = x0; + this.y0 = y0; + this.x1 = x1; + this.y1 = y1; + } + + @Override + public String toString() { + return String.format("Box{score=%f x0=%f y0=%f x1=%f y1=%f", score, x0, y0, x1, y1); + } +} diff --git a/android/test_app/app/src/main/java/org/pytorch/testapp/CameraActivity.java b/android/test_app/app/src/main/java/org/pytorch/testapp/CameraActivity.java new file mode 100644 index 00000000000..1c427bb82ba --- /dev/null +++ b/android/test_app/app/src/main/java/org/pytorch/testapp/CameraActivity.java @@ -0,0 +1,432 @@ +package org.pytorch.testapp; + +import android.Manifest; +import android.content.Context; +import android.content.pm.PackageManager; +import android.graphics.Bitmap; +import android.graphics.Canvas; +import android.graphics.Color; +import android.graphics.Paint; +import android.graphics.Rect; +import android.os.Bundle; +import android.os.Handler; +import android.os.HandlerThread; +import android.os.SystemClock; +import android.util.DisplayMetrics; +import android.util.Log; +import android.util.Size; +import android.view.TextureView; +import android.view.ViewStub; +import android.widget.ImageView; +import android.widget.TextView; +import android.widget.Toast; +import androidx.annotation.Nullable; +import androidx.annotation.UiThread; +import androidx.annotation.WorkerThread; +import androidx.appcompat.app.AppCompatActivity; +import androidx.camera.core.CameraX; +import androidx.camera.core.ImageAnalysis; +import androidx.camera.core.ImageAnalysisConfig; +import androidx.camera.core.ImageProxy; +import androidx.camera.core.Preview; +import androidx.camera.core.PreviewConfig; +import androidx.core.app.ActivityCompat; +import com.facebook.soloader.nativeloader.NativeLoader; +import com.facebook.soloader.nativeloader.SystemDelegate; +import java.io.File; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.nio.ByteBuffer; +import java.nio.FloatBuffer; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import org.pytorch.IValue; +import org.pytorch.Module; +import org.pytorch.Tensor; + +public class CameraActivity extends AppCompatActivity { + + private static final float BBOX_SCORE_DRAW_THRESHOLD = 0.5f; + private static final String TAG = BuildConfig.LOGCAT_TAG; + private static final int TEXT_TRIM_SIZE = 4096; + private static final int RGB_MAX_CHANNEL_VALUE = 262143; + + private static final int REQUEST_CODE_CAMERA_PERMISSION = 200; + private static final String[] PERMISSIONS = {Manifest.permission.CAMERA}; + + static { + if (!NativeLoader.isInitialized()) { + NativeLoader.init(new SystemDelegate()); + } + NativeLoader.loadLibrary("pytorch_jni"); + NativeLoader.loadLibrary("torchvision_ops"); + } + + private Bitmap mInputTensorBitmap; + private Bitmap mBitmap; + private Canvas mCanvas; + + private long mLastAnalysisResultTime; + + protected HandlerThread mBackgroundThread; + protected Handler mBackgroundHandler; + protected Handler mUIHandler; + + private TextView mTextView; + private ImageView mCameraOverlay; + private StringBuilder mTextViewStringBuilder = new StringBuilder(); + + private Paint mBboxPaint; + + @Override + protected void onCreate(Bundle savedInstanceState) { + super.onCreate(savedInstanceState); + setContentView(R.layout.activity_camera); + mTextView = findViewById(R.id.text); + mCameraOverlay = findViewById(R.id.camera_overlay); + mUIHandler = new Handler(getMainLooper()); + startBackgroundThread(); + + if (ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA) + != PackageManager.PERMISSION_GRANTED) { + ActivityCompat.requestPermissions(this, PERMISSIONS, REQUEST_CODE_CAMERA_PERMISSION); + } else { + setupCameraX(); + } + mBboxPaint = new Paint(); + mBboxPaint.setAntiAlias(true); + mBboxPaint.setDither(true); + mBboxPaint.setColor(Color.GREEN); + } + + @Override + protected void onPostCreate(@Nullable Bundle savedInstanceState) { + super.onPostCreate(savedInstanceState); + startBackgroundThread(); + } + + protected void startBackgroundThread() { + mBackgroundThread = new HandlerThread("ModuleActivity"); + mBackgroundThread.start(); + mBackgroundHandler = new Handler(mBackgroundThread.getLooper()); + } + + @Override + protected void onDestroy() { + stopBackgroundThread(); + super.onDestroy(); + } + + protected void stopBackgroundThread() { + mBackgroundThread.quitSafely(); + try { + mBackgroundThread.join(); + mBackgroundThread = null; + mBackgroundHandler = null; + } catch (InterruptedException e) { + Log.e(TAG, "Error on stopping background thread", e); + } + } + + @Override + public void onRequestPermissionsResult( + int requestCode, String[] permissions, int[] grantResults) { + if (requestCode == REQUEST_CODE_CAMERA_PERMISSION) { + if (grantResults[0] == PackageManager.PERMISSION_DENIED) { + Toast.makeText( + this, + "You can't use image classification example without granting CAMERA permission", + Toast.LENGTH_LONG) + .show(); + finish(); + } else { + setupCameraX(); + } + } + } + + private void setupCameraX() { + final TextureView textureView = + ((ViewStub) findViewById(R.id.camera_texture_view_stub)) + .inflate() + .findViewById(R.id.texture_view); + final PreviewConfig previewConfig = new PreviewConfig.Builder().build(); + final Preview preview = new Preview(previewConfig); + preview.setOnPreviewOutputUpdateListener( + new Preview.OnPreviewOutputUpdateListener() { + @Override + public void onUpdated(Preview.PreviewOutput output) { + textureView.setSurfaceTexture(output.getSurfaceTexture()); + } + }); + + final DisplayMetrics displayMetrics = new DisplayMetrics(); + getWindowManager().getDefaultDisplay().getMetrics(displayMetrics); + + final ImageAnalysisConfig imageAnalysisConfig = + new ImageAnalysisConfig.Builder() + .setTargetResolution(new Size(displayMetrics.widthPixels, displayMetrics.heightPixels)) + .setCallbackHandler(mBackgroundHandler) + .setImageReaderMode(ImageAnalysis.ImageReaderMode.ACQUIRE_LATEST_IMAGE) + .build(); + final ImageAnalysis imageAnalysis = new ImageAnalysis(imageAnalysisConfig); + imageAnalysis.setAnalyzer( + new ImageAnalysis.Analyzer() { + @Override + public void analyze(ImageProxy image, int rotationDegrees) { + if (SystemClock.elapsedRealtime() - mLastAnalysisResultTime < 500) { + return; + } + + final Result result = CameraActivity.this.analyzeImage(image, rotationDegrees); + + if (result != null) { + mLastAnalysisResultTime = SystemClock.elapsedRealtime(); + CameraActivity.this.runOnUiThread( + new Runnable() { + @Override + public void run() { + CameraActivity.this.handleResult(result); + } + }); + } + } + }); + + CameraX.bindToLifecycle(this, preview, imageAnalysis); + } + + private Module mModule; + private FloatBuffer mInputTensorBuffer; + private Tensor mInputTensor; + + private static int clamp0255(int x) { + if (x > 255) { + return 255; + } + return x < 0 ? 0 : x; + } + + protected void fillInputTensorBuffer( + ImageProxy image, int rotationDegrees, FloatBuffer inputTensorBuffer) { + + if (mInputTensorBitmap == null) { + final int tensorSize = Math.min(image.getWidth(), image.getHeight()); + mInputTensorBitmap = Bitmap.createBitmap(tensorSize, tensorSize, Bitmap.Config.ARGB_8888); + } + + ImageProxy.PlaneProxy[] planes = image.getPlanes(); + ImageProxy.PlaneProxy Y = planes[0]; + ImageProxy.PlaneProxy U = planes[1]; + ImageProxy.PlaneProxy V = planes[2]; + ByteBuffer yBuffer = Y.getBuffer(); + ByteBuffer uBuffer = U.getBuffer(); + ByteBuffer vBuffer = V.getBuffer(); + final int imageWidth = image.getWidth(); + final int imageHeight = image.getHeight(); + final int tensorSize = Math.min(imageWidth, imageHeight); + + int widthAfterRtn = imageWidth; + int heightAfterRtn = imageHeight; + boolean oddRotation = rotationDegrees == 90 || rotationDegrees == 270; + if (oddRotation) { + widthAfterRtn = imageHeight; + heightAfterRtn = imageWidth; + } + + int minSizeAfterRtn = Math.min(heightAfterRtn, widthAfterRtn); + int cropWidthAfterRtn = minSizeAfterRtn; + int cropHeightAfterRtn = minSizeAfterRtn; + + int cropWidthBeforeRtn = cropWidthAfterRtn; + int cropHeightBeforeRtn = cropHeightAfterRtn; + if (oddRotation) { + cropWidthBeforeRtn = cropHeightAfterRtn; + cropHeightBeforeRtn = cropWidthAfterRtn; + } + + int offsetX = (int) ((imageWidth - cropWidthBeforeRtn) / 2.f); + int offsetY = (int) ((imageHeight - cropHeightBeforeRtn) / 2.f); + + int yRowStride = Y.getRowStride(); + int yPixelStride = Y.getPixelStride(); + int uvRowStride = U.getRowStride(); + int uvPixelStride = U.getPixelStride(); + + float scale = cropWidthAfterRtn / tensorSize; + int yIdx, uvIdx, yi, ui, vi; + final int channelSize = tensorSize * tensorSize; + for (int y = 0; y < tensorSize; y++) { + for (int x = 0; x < tensorSize; x++) { + final int centerCropX = (int) Math.floor(x * scale); + final int centerCropY = (int) Math.floor(y * scale); + int srcX = centerCropX + offsetX; + int srcY = centerCropY + offsetY; + + if (rotationDegrees == 90) { + srcX = offsetX + centerCropY; + srcY = offsetY + (minSizeAfterRtn - 1) - centerCropX; + } else if (rotationDegrees == 180) { + srcX = offsetX + (minSizeAfterRtn - 1) - centerCropX; + srcY = offsetY + (minSizeAfterRtn - 1) - centerCropY; + } else if (rotationDegrees == 270) { + srcX = offsetX + (minSizeAfterRtn - 1) - centerCropY; + srcY = offsetY + centerCropX; + } + + yIdx = srcY * yRowStride + srcX * yPixelStride; + uvIdx = (srcY >> 1) * uvRowStride + (srcX >> 1) * uvPixelStride; + + yi = yBuffer.get(yIdx) & 0xff; + ui = uBuffer.get(uvIdx) & 0xff; + vi = vBuffer.get(uvIdx) & 0xff; + + yi = (yi - 16) < 0 ? 0 : (yi - 16); + ui -= 128; + vi -= 128; + + int a0 = 1192 * yi; + int ri = (a0 + 1634 * vi); + int gi = (a0 - 833 * vi - 400 * ui); + int bi = (a0 + 2066 * ui); + + ri = ri > RGB_MAX_CHANNEL_VALUE ? RGB_MAX_CHANNEL_VALUE : (ri < 0 ? 0 : ri); + gi = gi > RGB_MAX_CHANNEL_VALUE ? RGB_MAX_CHANNEL_VALUE : (gi < 0 ? 0 : gi); + bi = bi > RGB_MAX_CHANNEL_VALUE ? RGB_MAX_CHANNEL_VALUE : (bi < 0 ? 0 : bi); + + final int color = + 0xff000000 | ((ri << 6) & 0xff0000) | ((gi >> 2) & 0xff00) | ((bi >> 10) & 0xff); + mInputTensorBitmap.setPixel(x, y, color); + inputTensorBuffer.put(0 * channelSize + y * tensorSize + x, clamp0255(ri >> 10) / 255.f); + inputTensorBuffer.put(1 * channelSize + y * tensorSize + x, clamp0255(gi >> 10) / 255.f); + inputTensorBuffer.put(2 * channelSize + y * tensorSize + x, clamp0255(bi >> 10) / 255.f); + } + } + } + + public static String assetFilePath(Context context, String assetName) { + File file = new File(context.getFilesDir(), assetName); + if (file.exists() && file.length() > 0) { + return file.getAbsolutePath(); + } + + try (InputStream is = context.getAssets().open(assetName)) { + try (OutputStream os = new FileOutputStream(file)) { + byte[] buffer = new byte[4 * 1024]; + int read; + while ((read = is.read(buffer)) != -1) { + os.write(buffer, 0, read); + } + os.flush(); + } + return file.getAbsolutePath(); + } catch (IOException e) { + Log.e(TAG, "Error process asset " + assetName + " to file path"); + } + return null; + } + + @WorkerThread + @Nullable + protected Result analyzeImage(ImageProxy image, int rotationDegrees) { + Log.i(TAG, String.format("analyzeImage(%s, %d)", image, rotationDegrees)); + final int tensorSize = Math.min(image.getWidth(), image.getHeight()); + if (mModule == null) { + Log.i(TAG, "Loading module from asset '" + BuildConfig.MODULE_ASSET_NAME + "'"); + mInputTensorBuffer = Tensor.allocateFloatBuffer(3 * tensorSize * tensorSize); + mInputTensor = Tensor.fromBlob(mInputTensorBuffer, new long[] {3, tensorSize, tensorSize}); + final String modelFileAbsoluteFilePath = + new File(assetFilePath(this, BuildConfig.MODULE_ASSET_NAME)).getAbsolutePath(); + mModule = Module.load(modelFileAbsoluteFilePath); + } + + final long startTime = SystemClock.elapsedRealtime(); + fillInputTensorBuffer(image, rotationDegrees, mInputTensorBuffer); + + final long moduleForwardStartTime = SystemClock.elapsedRealtime(); + final IValue outputTuple = mModule.forward(IValue.listFrom(mInputTensor)); + final IValue out1 = outputTuple.toTuple()[1]; + final Map map = out1.toList()[0].toDictStringKey(); + + float[] boxesData = new float[] {}; + float[] scoresData = new float[] {}; + final List bboxes = new ArrayList<>(); + if (map.containsKey("boxes")) { + final Tensor boxesTensor = map.get("boxes").toTensor(); + final Tensor scoresTensor = map.get("scores").toTensor(); + boxesData = boxesTensor.getDataAsFloatArray(); + scoresData = scoresTensor.getDataAsFloatArray(); + final int n = scoresData.length; + for (int i = 0; i < n; i++) { + final BBox bbox = + new BBox( + scoresData[i], + boxesData[4 * i + 0], + boxesData[4 * i + 1], + boxesData[4 * i + 2], + boxesData[4 * i + 3]); + android.util.Log.i(TAG, String.format("Forward result %d: %s", i, bbox)); + bboxes.add(bbox); + } + } else { + android.util.Log.i(TAG, "Forward result empty"); + } + + final long moduleForwardDuration = SystemClock.elapsedRealtime() - moduleForwardStartTime; + final long analysisDuration = SystemClock.elapsedRealtime() - startTime; + return new Result(tensorSize, bboxes, moduleForwardDuration, analysisDuration); + } + + @UiThread + protected void handleResult(Result result) { + final int W = mCameraOverlay.getMeasuredWidth(); + final int H = mCameraOverlay.getMeasuredHeight(); + + final int size = Math.min(W, H); + final int offsetX = (W - size) / 2; + final int offsetY = (H - size) / 2; + + float scaleX = (float) size / result.tensorSize; + float scaleY = (float) size / result.tensorSize; + if (mBitmap == null) { + mBitmap = Bitmap.createBitmap(W, H, Bitmap.Config.ARGB_8888); + mCanvas = new Canvas(mBitmap); + } + + mCanvas.drawBitmap( + mInputTensorBitmap, + new Rect(0, 0, result.tensorSize, result.tensorSize), + new Rect(offsetX, offsetY, offsetX + size, offsetY + size), + null); + + for (final BBox bbox : result.bboxes) { + if (bbox.score < BBOX_SCORE_DRAW_THRESHOLD) { + continue; + } + + float c_x0 = offsetX + scaleX * bbox.x0; + float c_y0 = offsetY + scaleY * bbox.y0; + + float c_x1 = offsetX + scaleX * bbox.x1; + float c_y1 = offsetY + scaleY * bbox.y1; + + mCanvas.drawLine(c_x0, c_y0, c_x1, c_y0, mBboxPaint); + mCanvas.drawLine(c_x1, c_y0, c_x1, c_y1, mBboxPaint); + mCanvas.drawLine(c_x1, c_y1, c_x0, c_y1, mBboxPaint); + mCanvas.drawLine(c_x0, c_y1, c_x0, c_y0, mBboxPaint); + mCanvas.drawText(String.format("%.2f", bbox.score), c_x0, c_y0, mBboxPaint); + } + mCameraOverlay.setImageBitmap(mBitmap); + + String message = String.format("forwardDuration:%d", result.moduleForwardDuration); + Log.i(TAG, message); + mTextViewStringBuilder.insert(0, '\n').insert(0, message); + if (mTextViewStringBuilder.length() > TEXT_TRIM_SIZE) { + mTextViewStringBuilder.delete(TEXT_TRIM_SIZE, mTextViewStringBuilder.length()); + } + mTextView.setText(mTextViewStringBuilder.toString()); + } +} diff --git a/android/test_app/app/src/main/java/org/pytorch/testapp/MainActivity.java b/android/test_app/app/src/main/java/org/pytorch/testapp/MainActivity.java new file mode 100644 index 00000000000..a9c13bffa6e --- /dev/null +++ b/android/test_app/app/src/main/java/org/pytorch/testapp/MainActivity.java @@ -0,0 +1,159 @@ +package org.pytorch.testapp; + +import android.os.Bundle; +import android.os.Handler; +import android.os.HandlerThread; +import android.os.SystemClock; +import android.util.Log; +import android.widget.TextView; +import androidx.annotation.Nullable; +import androidx.annotation.UiThread; +import androidx.annotation.WorkerThread; +import androidx.appcompat.app.AppCompatActivity; +import com.facebook.soloader.nativeloader.NativeLoader; +import com.facebook.soloader.nativeloader.SystemDelegate; +import java.nio.FloatBuffer; +import java.util.Map; +import org.pytorch.IValue; +import org.pytorch.Module; +import org.pytorch.PyTorchAndroid; +import org.pytorch.Tensor; + +public class MainActivity extends AppCompatActivity { + static { + if (!NativeLoader.isInitialized()) { + NativeLoader.init(new SystemDelegate()); + } + NativeLoader.loadLibrary("pytorch_jni"); + NativeLoader.loadLibrary("torchvision_ops"); + } + + private static final String TAG = BuildConfig.LOGCAT_TAG; + private static final int TEXT_TRIM_SIZE = 4096; + + private TextView mTextView; + + protected HandlerThread mBackgroundThread; + protected Handler mBackgroundHandler; + private Module mModule; + private FloatBuffer mInputTensorBuffer; + private Tensor mInputTensor; + private StringBuilder mTextViewStringBuilder = new StringBuilder(); + + private final Runnable mModuleForwardRunnable = + new Runnable() { + @Override + public void run() { + final Result result = doModuleForward(); + runOnUiThread( + () -> { + handleResult(result); + if (mBackgroundHandler != null) { + mBackgroundHandler.post(mModuleForwardRunnable); + } + }); + } + }; + + @Override + protected void onCreate(Bundle savedInstanceState) { + super.onCreate(savedInstanceState); + setContentView(R.layout.activity_main); + mTextView = findViewById(R.id.text); + startBackgroundThread(); + mBackgroundHandler.post(mModuleForwardRunnable); + } + + protected void startBackgroundThread() { + mBackgroundThread = new HandlerThread(TAG + "_bg"); + mBackgroundThread.start(); + mBackgroundHandler = new Handler(mBackgroundThread.getLooper()); + } + + @Override + protected void onDestroy() { + stopBackgroundThread(); + super.onDestroy(); + } + + protected void stopBackgroundThread() { + mBackgroundThread.quitSafely(); + try { + mBackgroundThread.join(); + mBackgroundThread = null; + mBackgroundHandler = null; + } catch (InterruptedException e) { + Log.e(TAG, "Error stopping background thread", e); + } + } + + @WorkerThread + @Nullable + protected Result doModuleForward() { + if (mModule == null) { + final long[] shape = BuildConfig.INPUT_TENSOR_SHAPE; + long numElements = 1; + for (int i = 0; i < shape.length; i++) { + numElements *= shape[i]; + } + mInputTensorBuffer = Tensor.allocateFloatBuffer((int) numElements); + mInputTensor = Tensor.fromBlob(mInputTensorBuffer, BuildConfig.INPUT_TENSOR_SHAPE); + PyTorchAndroid.setNumThreads(1); + mModule = PyTorchAndroid.loadModuleFromAsset(getAssets(), BuildConfig.MODULE_ASSET_NAME); + } + + final long startTime = SystemClock.elapsedRealtime(); + final long moduleForwardStartTime = SystemClock.elapsedRealtime(); + final IValue outputTuple = mModule.forward(IValue.listFrom(mInputTensor)); + final IValue[] outputArray = outputTuple.toTuple(); + final IValue out0 = outputArray[0]; + final Map map = out0.toDictStringKey(); + if (map.containsKey("boxes")) { + final Tensor boxes = map.get("boxes").toTensor(); + final Tensor scores = map.get("scores").toTensor(); + final float[] boxesData = boxes.getDataAsFloatArray(); + final float[] scoresData = scores.getDataAsFloatArray(); + final int n = scoresData.length; + for (int i = 0; i < n; i++) { + android.util.Log.i( + TAG, + String.format( + "Forward result %d: score %f box:(%f, %f, %f, %f)", + scoresData[i], + boxesData[4 * i + 0], + boxesData[4 * i + 1], + boxesData[4 * i + 2], + boxesData[4 * i + 3])); + } + } else { + android.util.Log.i(TAG, "Forward result empty"); + } + + final long moduleForwardDuration = SystemClock.elapsedRealtime() - moduleForwardStartTime; + final long analysisDuration = SystemClock.elapsedRealtime() - startTime; + return new Result(new float[] {}, moduleForwardDuration, analysisDuration); + } + + static class Result { + + private final float[] scores; + private final long totalDuration; + private final long moduleForwardDuration; + + public Result(float[] scores, long moduleForwardDuration, long totalDuration) { + this.scores = scores; + this.moduleForwardDuration = moduleForwardDuration; + this.totalDuration = totalDuration; + } + } + + @UiThread + protected void handleResult(Result result) { + String message = String.format("forwardDuration:%d", result.moduleForwardDuration); + mTextViewStringBuilder.insert(0, '\n').insert(0, message); + if (mTextViewStringBuilder.length() > TEXT_TRIM_SIZE) { + mTextViewStringBuilder.delete(TEXT_TRIM_SIZE, mTextViewStringBuilder.length()); + } + mTextView.setText(mTextViewStringBuilder.toString()); + } +} diff --git a/android/test_app/app/src/main/java/org/pytorch/testapp/Result.java b/android/test_app/app/src/main/java/org/pytorch/testapp/Result.java new file mode 100644 index 00000000000..ed7ebd006cd --- /dev/null +++ b/android/test_app/app/src/main/java/org/pytorch/testapp/Result.java @@ -0,0 +1,17 @@ +package org.pytorch.testapp; + +import java.util.List; + +class Result { + public final int tensorSize; + public final List bboxes; + public final long totalDuration; + public final long moduleForwardDuration; + + public Result(int tensorSize, List bboxes, long moduleForwardDuration, long totalDuration) { + this.tensorSize = tensorSize; + this.bboxes = bboxes; + this.moduleForwardDuration = moduleForwardDuration; + this.totalDuration = totalDuration; + } +} diff --git a/android/test_app/app/src/main/res/layout/activity_camera.xml b/android/test_app/app/src/main/res/layout/activity_camera.xml new file mode 100644 index 00000000000..7ba2e42b7c0 --- /dev/null +++ b/android/test_app/app/src/main/res/layout/activity_camera.xml @@ -0,0 +1,28 @@ + + + + + + + + + diff --git a/android/test_app/app/src/main/res/layout/activity_main.xml b/android/test_app/app/src/main/res/layout/activity_main.xml new file mode 100644 index 00000000000..556839a994c --- /dev/null +++ b/android/test_app/app/src/main/res/layout/activity_main.xml @@ -0,0 +1,17 @@ + + + + + + diff --git a/android/test_app/app/src/main/res/layout/texture_view.xml b/android/test_app/app/src/main/res/layout/texture_view.xml new file mode 100644 index 00000000000..6518c6c84c6 --- /dev/null +++ b/android/test_app/app/src/main/res/layout/texture_view.xml @@ -0,0 +1,5 @@ + + diff --git a/android/test_app/app/src/main/res/mipmap-mdpi/ic_launcher.png b/android/test_app/app/src/main/res/mipmap-mdpi/ic_launcher.png new file mode 100644 index 00000000000..64ba76f75e9 Binary files /dev/null and b/android/test_app/app/src/main/res/mipmap-mdpi/ic_launcher.png differ diff --git a/android/test_app/app/src/main/res/mipmap-mdpi/ic_launcher_round.png b/android/test_app/app/src/main/res/mipmap-mdpi/ic_launcher_round.png new file mode 100644 index 00000000000..dae5e082342 Binary files /dev/null and b/android/test_app/app/src/main/res/mipmap-mdpi/ic_launcher_round.png differ diff --git a/android/test_app/app/src/main/res/values/colors.xml b/android/test_app/app/src/main/res/values/colors.xml new file mode 100644 index 00000000000..69b22338c65 --- /dev/null +++ b/android/test_app/app/src/main/res/values/colors.xml @@ -0,0 +1,6 @@ + + + #008577 + #00574B + #D81B60 + diff --git a/android/test_app/app/src/main/res/values/strings.xml b/android/test_app/app/src/main/res/values/strings.xml new file mode 100644 index 00000000000..cafbaad1511 --- /dev/null +++ b/android/test_app/app/src/main/res/values/strings.xml @@ -0,0 +1,3 @@ + + TV_FRCNN + diff --git a/android/test_app/app/src/main/res/values/styles.xml b/android/test_app/app/src/main/res/values/styles.xml new file mode 100644 index 00000000000..5885930df6d --- /dev/null +++ b/android/test_app/app/src/main/res/values/styles.xml @@ -0,0 +1,11 @@ + + + + + + diff --git a/android/test_app/make_assets.py b/android/test_app/make_assets.py new file mode 100644 index 00000000000..f99933e9a9d --- /dev/null +++ b/android/test_app/make_assets.py @@ -0,0 +1,21 @@ +import torch +from torch.utils.mobile_optimizer import optimize_for_mobile +from torchvision.models.detection import ( + fasterrcnn_mobilenet_v3_large_320_fpn, + FasterRCNN_MobileNet_V3_Large_320_FPN_Weights, +) + +print(torch.__version__) + +model = fasterrcnn_mobilenet_v3_large_320_fpn( + weights=FasterRCNN_MobileNet_V3_Large_320_FPN_Weights.DEFAULT, + box_score_thresh=0.7, + rpn_post_nms_top_n_test=100, + rpn_score_thresh=0.4, + rpn_pre_nms_top_n_test=150, +) + +model.eval() +script_model = torch.jit.script(model) +opt_script_model = optimize_for_mobile(script_model) +opt_script_model.save("app/src/main/assets/frcnn_mnetv3.pt") diff --git a/benchmarks/encoding_decoding.py b/benchmarks/encoding_decoding.py new file mode 100644 index 00000000000..0cafdb2d8a6 --- /dev/null +++ b/benchmarks/encoding_decoding.py @@ -0,0 +1,99 @@ +import os +import platform +import statistics + +import torch +import torch.utils.benchmark as benchmark +import torchvision + + +def print_machine_specs(): + print("Processor:", platform.processor()) + print("Platform:", platform.platform()) + print("Logical CPUs:", os.cpu_count()) + print(f"\nCUDA device: {torch.cuda.get_device_name()}") + print(f"Total Memory: {torch.cuda.get_device_properties(0).total_memory / 1e9:.2f} GB") + + +def get_data(): + transform = torchvision.transforms.Compose( + [ + torchvision.transforms.PILToTensor(), + ] + ) + path = os.path.join(os.getcwd(), "data") + testset = torchvision.datasets.Places365( + root="./data", download=not os.path.exists(path), transform=transform, split="val" + ) + testloader = torch.utils.data.DataLoader( + testset, batch_size=1000, shuffle=False, num_workers=1, collate_fn=lambda batch: [r[0] for r in batch] + ) + return next(iter(testloader)) + + +def run_encoding_benchmark(decoded_images): + results = [] + for device in ["cpu", "cuda"]: + decoded_images_device = [t.to(device=device) for t in decoded_images] + for size in [1, 100, 1000]: + for num_threads in [1, 12, 24]: + for stmt, strat in zip( + [ + "[torchvision.io.encode_jpeg(img) for img in decoded_images_device_trunc]", + "torchvision.io.encode_jpeg(decoded_images_device_trunc)", + ], + ["unfused", "fused"], + ): + decoded_images_device_trunc = decoded_images_device[:size] + t = benchmark.Timer( + stmt=stmt, + setup="import torchvision", + globals={"decoded_images_device_trunc": decoded_images_device_trunc}, + label="Image Encoding", + sub_label=f"{device.upper()} ({strat}): {stmt}", + description=f"{size} images", + num_threads=num_threads, + ) + results.append(t.blocked_autorange()) + compare = benchmark.Compare(results) + compare.print() + + +def run_decoding_benchmark(encoded_images): + results = [] + for device in ["cpu", "cuda"]: + for size in [1, 100, 1000]: + for num_threads in [1, 12, 24]: + for stmt, strat in zip( + [ + f"[torchvision.io.decode_jpeg(img, device='{device}') for img in encoded_images_trunc]", + f"torchvision.io.decode_jpeg(encoded_images_trunc, device='{device}')", + ], + ["unfused", "fused"], + ): + encoded_images_trunc = encoded_images[:size] + t = benchmark.Timer( + stmt=stmt, + setup="import torchvision", + globals={"encoded_images_trunc": encoded_images_trunc}, + label="Image Decoding", + sub_label=f"{device.upper()} ({strat}): {stmt}", + description=f"{size} images", + num_threads=num_threads, + ) + results.append(t.blocked_autorange()) + compare = benchmark.Compare(results) + compare.print() + + +if __name__ == "__main__": + print_machine_specs() + decoded_images = get_data() + mean_h, mean_w = statistics.mean(t.shape[-2] for t in decoded_images), statistics.mean( + t.shape[-1] for t in decoded_images + ) + print(f"\nMean image size: {int(mean_h)}x{int(mean_w)}") + run_encoding_benchmark(decoded_images) + encoded_images_cuda = torchvision.io.encode_jpeg([img.cuda() for img in decoded_images]) + encoded_images_cpu = [img.cpu() for img in encoded_images_cuda] + run_decoding_benchmark(encoded_images_cpu) diff --git a/cmake/TorchVisionConfig.cmake.in b/cmake/TorchVisionConfig.cmake.in new file mode 100644 index 00000000000..7f7e78817fa --- /dev/null +++ b/cmake/TorchVisionConfig.cmake.in @@ -0,0 +1,50 @@ +# TorchVisionConfig.cmake +# -------------------- +# +# Exported targets:: Vision +# + +@PACKAGE_INIT@ + +set(PN TorchVision) + +# location of include/torchvision +set(${PN}_INCLUDE_DIR "${PACKAGE_PREFIX_DIR}/@CMAKE_INSTALL_INCLUDEDIR@") + +set(${PN}_LIBRARY "") +set(${PN}_DEFINITIONS USING_${PN}) + +check_required_components(${PN}) + + +if(NOT (CMAKE_VERSION VERSION_LESS 3.0)) +#----------------------------------------------------------------------------- +# Don't include targets if this file is being picked up by another +# project which has already built this as a subproject +#----------------------------------------------------------------------------- +if(NOT TARGET ${PN}::${PN}) +include("${CMAKE_CURRENT_LIST_DIR}/${PN}Targets.cmake") + +target_include_directories(${PN}::${PN} INTERFACE "${${PN}_INCLUDE_DIR}") + +if(@WITH_CUDA@) + target_compile_definitions(${PN}::${PN} INTERFACE WITH_CUDA) +endif() + +find_package(Torch REQUIRED) +target_link_libraries(${PN}::${PN} INTERFACE torch) + +if(@WITH_PNG@) + find_package(PNG REQUIRED) + target_link_libraries(${PN}::${PN} INTERFACE ${PNG_LIBRARY}) + target_compile_definitions(${PN}::${PN} INTERFACE PNG_FOUND) +endif() + +if(@WITH_JPEG@) + find_package(JPEG REQUIRED) + target_link_libraries(${PN}::${PN} INTERFACE ${JPEG_LIBRARIES}) + target_compile_definitions(${PN}::${PN} INTERFACE JPEG_FOUND) +endif() + +endif() +endif() diff --git a/cmake/iOS.cmake b/cmake/iOS.cmake new file mode 100644 index 00000000000..935c57f11b9 --- /dev/null +++ b/cmake/iOS.cmake @@ -0,0 +1,207 @@ +# This file is based off of the Platform/Darwin.cmake and Platform/UnixPaths.cmake +# files which are included with CMake 2.8.4 +# It has been altered for iOS development + +# Options: +# +# IOS_PLATFORM = OS (default) or SIMULATOR +# This decides if SDKS will be selected from the iPhoneOS.platform or iPhoneSimulator.platform folders +# OS - the default, used to build for iPhone and iPad physical devices, which have an arm arch. +# SIMULATOR - used to build for the Simulator platforms, which have an x86 arch. +# +# CMAKE_IOS_DEVELOPER_ROOT = automatic(default) or /path/to/platform/Developer folder +# By default this location is automatically chosen based on the IOS_PLATFORM value above. +# If set manually, it will override the default location and force the user of a particular Developer Platform +# +# CMAKE_IOS_SDK_ROOT = automatic(default) or /path/to/platform/Developer/SDKs/SDK folder +# By default this location is automatically chosen based on the CMAKE_IOS_DEVELOPER_ROOT value. +# In this case it will always be the most up-to-date SDK found in the CMAKE_IOS_DEVELOPER_ROOT path. +# If set manually, this will force the use of a specific SDK version + +# Macros: +# +# set_xcode_property (TARGET XCODE_PROPERTY XCODE_VALUE) +# A convenience macro for setting xcode specific properties on targets +# example: set_xcode_property (myioslib IPHONEOS_DEPLOYMENT_TARGET "3.1") +# +# find_host_package (PROGRAM ARGS) +# A macro used to find executable programs on the host system, not within the iOS environment. +# Thanks to the android-cmake project for providing the command + +# Standard settings +set(CMAKE_SYSTEM_NAME Darwin) +set(CMAKE_SYSTEM_VERSION 1) +set(UNIX True) +set(APPLE True) +set(IOS True) + +# Required as of cmake 2.8.10 +set(CMAKE_OSX_DEPLOYMENT_TARGET "" CACHE STRING "Force unset of the deployment target for iOS" FORCE) + +# Determine the cmake host system version so we know where to find the iOS SDKs +find_program(CMAKE_UNAME uname /bin /usr/bin /usr/local/bin) +if(CMAKE_UNAME) + exec_program(uname ARGS -r OUTPUT_VARIABLE CMAKE_HOST_SYSTEM_VERSION) + string(REGEX REPLACE "^([0-9]+)\\.([0-9]+).*$" "\\1" DARWIN_MAJOR_VERSION "${CMAKE_HOST_SYSTEM_VERSION}") +endif(CMAKE_UNAME) + +# Force the compilers to gcc for iOS +set(CMAKE_C_COMPILER /usr/bin/gcc CACHE STRING "") +set(CMAKE_CXX_COMPILER /usr/bin/g++ CACHE STRING "") +set(CMAKE_AR ar CACHE FILEPATH "" FORCE) +set(CMAKE_RANLIB ranlib CACHE FILEPATH "" FORCE) +set(PKG_CONFIG_EXECUTABLE pkg-config CACHE FILEPATH "" FORCE) + +# Setup iOS platform unless specified manually with IOS_PLATFORM +if(NOT DEFINED IOS_PLATFORM) + set(IOS_PLATFORM "OS") +endif(NOT DEFINED IOS_PLATFORM) +set(IOS_PLATFORM ${IOS_PLATFORM} CACHE STRING "Type of iOS Platform") + +# Check the platform selection and setup for developer root +if(${IOS_PLATFORM} STREQUAL "OS") + set(IOS_PLATFORM_LOCATION "iPhoneOS.platform") + set(XCODE_IOS_PLATFORM iphoneos) + + # This causes the installers to properly locate the output libraries + set(CMAKE_XCODE_EFFECTIVE_PLATFORMS "-iphoneos") +elseif(${IOS_PLATFORM} STREQUAL "SIMULATOR") + set(IOS_PLATFORM_LOCATION "iPhoneSimulator.platform") + set(XCODE_IOS_PLATFORM iphonesimulator) + + # This causes the installers to properly locate the output libraries + set(CMAKE_XCODE_EFFECTIVE_PLATFORMS "-iphonesimulator") +elseif(${IOS_PLATFORM} STREQUAL "WATCHOS") + set(IOS_PLATFORM_LOCATION "WatchOS.platform") + set(XCODE_IOS_PLATFORM watchos) + + # This causes the installers to properly locate the output libraries + set(CMAKE_XCODE_EFFECTIVE_PLATFORMS "-watchos") +else(${IOS_PLATFORM} STREQUAL "OS") + message(FATAL_ERROR + "Unsupported IOS_PLATFORM value selected. " + "Please choose OS, SIMULATOR, or WATCHOS.") +endif() + +# All iOS/Darwin specific settings - some may be redundant +set(CMAKE_SHARED_LIBRARY_PREFIX "lib") +set(CMAKE_SHARED_LIBRARY_SUFFIX ".dylib") +set(CMAKE_SHARED_MODULE_PREFIX "lib") +set(CMAKE_SHARED_MODULE_SUFFIX ".so") +set(CMAKE_MODULE_EXISTS 1) +set(CMAKE_DL_LIBS "") + +set(CMAKE_C_OSX_COMPATIBILITY_VERSION_FLAG "-compatibility_version ") +set(CMAKE_C_OSX_CURRENT_VERSION_FLAG "-current_version ") +set(CMAKE_CXX_OSX_COMPATIBILITY_VERSION_FLAG "${CMAKE_C_OSX_COMPATIBILITY_VERSION_FLAG}") +set(CMAKE_CXX_OSX_CURRENT_VERSION_FLAG "${CMAKE_C_OSX_CURRENT_VERSION_FLAG}") + +if(IOS_DEPLOYMENT_TARGET) + set(XCODE_IOS_PLATFORM_VERSION_FLAGS "-m${XCODE_IOS_PLATFORM}-version-min=${IOS_DEPLOYMENT_TARGET}") +endif() + +# Hidden visibility is required for cxx on iOS +set(CMAKE_C_FLAGS_INIT "${XCODE_IOS_PLATFORM_VERSION_FLAGS}") +set(CMAKE_CXX_FLAGS_INIT "${XCODE_IOS_PLATFORM_VERSION_FLAGS} -fvisibility-inlines-hidden") + +set(CMAKE_C_LINK_FLAGS "${XCODE_IOS_PLATFORM_VERSION_FLAGS} -Wl,-search_paths_first ${CMAKE_C_LINK_FLAGS}") +set(CMAKE_CXX_LINK_FLAGS "${XCODE_IOS_PLATFORM_VERSION_FLAGS} -Wl,-search_paths_first ${CMAKE_CXX_LINK_FLAGS}") + +set(CMAKE_PLATFORM_HAS_INSTALLNAME 1) +set(CMAKE_SHARED_LIBRARY_CREATE_C_FLAGS "-dynamiclib -headerpad_max_install_names") +set(CMAKE_SHARED_MODULE_CREATE_C_FLAGS "-bundle -headerpad_max_install_names") +set(CMAKE_SHARED_MODULE_LOADER_C_FLAG "-Wl,-bundle_loader,") +set(CMAKE_SHARED_MODULE_LOADER_CXX_FLAG "-Wl,-bundle_loader,") +set(CMAKE_FIND_LIBRARY_SUFFIXES ".dylib" ".so" ".a") + +# hack: if a new cmake (which uses CMAKE_INSTALL_NAME_TOOL) runs on an old build tree +# (where install_name_tool was hardcoded) and where CMAKE_INSTALL_NAME_TOOL isn't in the cache +# and still cmake didn't fail in CMakeFindBinUtils.cmake (because it isn't rerun) +# hardcode CMAKE_INSTALL_NAME_TOOL here to install_name_tool, so it behaves as it did before, Alex +if(NOT DEFINED CMAKE_INSTALL_NAME_TOOL) + find_program(CMAKE_INSTALL_NAME_TOOL install_name_tool) +endif(NOT DEFINED CMAKE_INSTALL_NAME_TOOL) + +# Setup iOS deployment target +set(IOS_DEPLOYMENT_TARGET ${IOS_DEPLOYMENT_TARGET} CACHE STRING "Minimum iOS version") + +# Setup iOS developer location unless specified manually with CMAKE_IOS_DEVELOPER_ROOT +# Note Xcode 4.3 changed the installation location, choose the most recent one available +exec_program(/usr/bin/xcode-select ARGS -print-path OUTPUT_VARIABLE CMAKE_XCODE_DEVELOPER_DIR) +set(XCODE_POST_43_ROOT "${CMAKE_XCODE_DEVELOPER_DIR}/Platforms/${IOS_PLATFORM_LOCATION}/Developer") +set(XCODE_PRE_43_ROOT "/Developer/Platforms/${IOS_PLATFORM_LOCATION}/Developer") +if(NOT DEFINED CMAKE_IOS_DEVELOPER_ROOT) + if(EXISTS ${XCODE_POST_43_ROOT}) + set(CMAKE_IOS_DEVELOPER_ROOT ${XCODE_POST_43_ROOT}) + elseif(EXISTS ${XCODE_PRE_43_ROOT}) + set(CMAKE_IOS_DEVELOPER_ROOT ${XCODE_PRE_43_ROOT}) + endif(EXISTS ${XCODE_POST_43_ROOT}) +endif(NOT DEFINED CMAKE_IOS_DEVELOPER_ROOT) +set(CMAKE_IOS_DEVELOPER_ROOT ${CMAKE_IOS_DEVELOPER_ROOT} CACHE PATH "Location of iOS Platform") + +# Find and use the most recent iOS sdk unless specified manually with CMAKE_IOS_SDK_ROOT +if(NOT DEFINED CMAKE_IOS_SDK_ROOT) + file(GLOB _CMAKE_IOS_SDKS "${CMAKE_IOS_DEVELOPER_ROOT}/SDKs/*") + if(_CMAKE_IOS_SDKS) + list(SORT _CMAKE_IOS_SDKS) + list(REVERSE _CMAKE_IOS_SDKS) + list(GET _CMAKE_IOS_SDKS 0 CMAKE_IOS_SDK_ROOT) + else(_CMAKE_IOS_SDKS) + message(FATAL_ERROR "No iOS SDK's found in default search path ${CMAKE_IOS_DEVELOPER_ROOT}. Manually set CMAKE_IOS_SDK_ROOT or install the iOS SDK.") + endif(_CMAKE_IOS_SDKS) + message(STATUS "Toolchain using default iOS SDK: ${CMAKE_IOS_SDK_ROOT}") +endif(NOT DEFINED CMAKE_IOS_SDK_ROOT) +set(CMAKE_IOS_SDK_ROOT ${CMAKE_IOS_SDK_ROOT} CACHE PATH "Location of the selected iOS SDK") + +# Set the sysroot default to the most recent SDK +set(CMAKE_OSX_SYSROOT ${CMAKE_IOS_SDK_ROOT} CACHE PATH "Sysroot used for iOS support") + +# set the architecture for iOS +if(IOS_PLATFORM STREQUAL "OS") + set(DEFAULT_IOS_ARCH "arm64") +elseif(IOS_PLATFORM STREQUAL "SIMULATOR") + set(DEFAULT_IOS_ARCH "x86_64") +elseif(IOS_PLATFORM STREQUAL "WATCHOS") + set(DEFAULT_IOS_ARCH "armv7k;arm64_32") +endif() + +set(IOS_ARCH ${DEFAULT_IOS_ARCH} CACHE STRING "Build architecture for iOS") +set(CMAKE_OSX_ARCHITECTURES ${IOS_ARCH} CACHE STRING "Build architecture for iOS") + +# Set the find root to the iOS developer roots and to user defined paths +set(CMAKE_FIND_ROOT_PATH ${CMAKE_IOS_DEVELOPER_ROOT} ${CMAKE_IOS_SDK_ROOT} ${CMAKE_PREFIX_PATH} CACHE STRING "iOS find search path root") + +# default to searching for frameworks first +set(CMAKE_FIND_FRAMEWORK FIRST) + +# set up the default search directories for frameworks +set(CMAKE_SYSTEM_FRAMEWORK_PATH + ${CMAKE_IOS_SDK_ROOT}/System/Library/Frameworks + ${CMAKE_IOS_SDK_ROOT}/System/Library/PrivateFrameworks + ${CMAKE_IOS_SDK_ROOT}/Developer/Library/Frameworks +) + +# only search the iOS sdks, not the remainder of the host filesystem +set(CMAKE_FIND_ROOT_PATH_MODE_PROGRAM ONLY) +set(CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY) +set(CMAKE_FIND_ROOT_PATH_MODE_INCLUDE ONLY) + +# This little macro lets you set any XCode specific property +macro(set_xcode_property TARGET XCODE_PROPERTY XCODE_VALUE) + set_property(TARGET ${TARGET} PROPERTY XCODE_ATTRIBUTE_${XCODE_PROPERTY} ${XCODE_VALUE}) +endmacro(set_xcode_property) + +# This macro lets you find executable programs on the host system +macro(find_host_package) + set(CMAKE_FIND_ROOT_PATH_MODE_PROGRAM NEVER) + set(CMAKE_FIND_ROOT_PATH_MODE_LIBRARY NEVER) + set(CMAKE_FIND_ROOT_PATH_MODE_INCLUDE NEVER) + set(IOS FALSE) + + find_package(${ARGN}) + + set(IOS TRUE) + set(CMAKE_FIND_ROOT_PATH_MODE_PROGRAM ONLY) + set(CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY) + set(CMAKE_FIND_ROOT_PATH_MODE_INCLUDE ONLY) +endmacro(find_host_package) diff --git a/docs/Makefile b/docs/Makefile index 2ca4b0d71a2..f462ff22303 100644 --- a/docs/Makefile +++ b/docs/Makefile @@ -1,8 +1,12 @@ # Minimal makefile for Sphinx documentation # +ifneq ($(EXAMPLES_PATTERN),) + EXAMPLES_PATTERN_OPTS := -D sphinx_gallery_conf.filename_pattern="$(EXAMPLES_PATTERN)" +endif + # You can set these variables from the command line. -SPHINXOPTS = +SPHINXOPTS = -W -j auto $(EXAMPLES_PATTERN_OPTS) SPHINXBUILD = sphinx-build SPHINXPROJ = torchvision SOURCEDIR = source @@ -19,6 +23,18 @@ docset: html cp $(SPHINXPROJ).docset/icon.png $(SPHINXPROJ).docset/icon@2x.png convert $(SPHINXPROJ).docset/icon@2x.png -resize 16x16 $(SPHINXPROJ).docset/icon.png +html-noplot: # Avoids running the gallery examples, which may take time + $(SPHINXBUILD) -D plot_gallery=0 -b html "${SOURCEDIR}" "$(BUILDDIR)"/html + @echo + @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." + +clean: + rm -rf $(BUILDDIR)/* + rm -rf $(SOURCEDIR)/auto_examples/ # sphinx-gallery + rm -rf $(SOURCEDIR)/gen_modules/ # sphinx-gallery + rm -rf $(SOURCEDIR)/generated/ # autosummary + rm -rf $(SOURCEDIR)/models/generated # autosummary + .PHONY: help Makefile docset # Catch-all target: route all unknown targets to Sphinx using the new diff --git a/docs/requirements.txt b/docs/requirements.txt index 014f642d0eb..2a50d9b8f45 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -1,3 +1,8 @@ -sphinx==1.7.3 -sphinxcontrib-googleanalytics --e git://github.com/snide/sphinx_rtd_theme.git#egg=sphinx_rtd_theme +matplotlib +numpy +sphinx-copybutton>=0.3.1 +sphinx-gallery>=0.11.1 +sphinx==5.0.0 +tabulate +-e git+https://github.com/pytorch/pytorch_sphinx_theme.git#egg=pytorch_sphinx_theme +pycocotools diff --git a/docs/source/_static/css/custom_torchvision.css b/docs/source/_static/css/custom_torchvision.css new file mode 100644 index 00000000000..07346d7b03f --- /dev/null +++ b/docs/source/_static/css/custom_torchvision.css @@ -0,0 +1,35 @@ +/* This rule should be removed once +https://github.com/pytorch/pytorch_sphinx_theme/issues/125 is fixed. + +We override the rule so that the links to the notebooks aren't hidden in the +gallery examples. pytorch_sphinx_theme is supposed to customize those links so +that they render nicely (look at the nice links on top of the tutorials +examples) but it doesn't work for repos that are not the tutorial repo, and in +torchvision it just hides the links. So we have to put them back here */ +article.pytorch-article .sphx-glr-download-link-note.admonition.note, +article.pytorch-article .reference.download.internal, article.pytorch-article .sphx-glr-signature { + display: block; +} + +/* These 2 rules below are for the weight tables (generated in conf.py) to look + * better. In particular we make their row height shorter */ +.table-weights td, .table-weights th { + margin-bottom: 0.2rem; + padding: 0 !important; + line-height: 1 !important; +} +.table-weights p { + margin-bottom: 0.2rem !important; +} + +/* Fix for Sphinx gallery 0.11 +See https://github.com/sphinx-gallery/sphinx-gallery/issues/990 +*/ +article.pytorch-article .sphx-glr-thumbnails .sphx-glr-thumbcontainer { + width: unset; + margin-right: 0; + margin-left: 0; +} +article.pytorch-article div.section div.wy-table-responsive tbody td { + width: 50%; +} diff --git a/docs/source/_static/css/pytorch_theme.css b/docs/source/_static/css/pytorch_theme.css deleted file mode 100644 index 0e54497643c..00000000000 --- a/docs/source/_static/css/pytorch_theme.css +++ /dev/null @@ -1,118 +0,0 @@ -body { - font-family: "Lato","proxima-nova","Helvetica Neue",Arial,sans-serif; -} - -/* Default header fonts are ugly */ -h1, h2, .rst-content .toctree-wrapper p.caption, h3, h4, h5, h6, legend, p.caption { - font-family: "Lato","proxima-nova","Helvetica Neue",Arial,sans-serif; -} - -/* Use white for docs background */ -.wy-side-nav-search { - background-color: #fff; -} - -.wy-nav-content-wrap, .wy-menu li.current > a { - background-color: #fff; -} - -@media screen and (min-width: 1400px) { - .wy-nav-content-wrap { - background-color: rgba(0, 0, 0, 0.0470588); - } - - .wy-nav-content { - background-color: #fff; - } -} - -/* Fixes for mobile */ -.wy-nav-top { - background-color: #fff; - background-image: url('../img/pytorch-logo-dark.svg'); - background-repeat: no-repeat; - background-position: center; - padding: 0; - margin: 0.4045em 0.809em; - color: #333; -} - -.wy-nav-top > a { - display: none; -} - -@media screen and (max-width: 768px) { - .wy-side-nav-search>a img.logo { - height: 60px; - } -} - -/* This is needed to ensure that logo above search scales properly */ -.wy-side-nav-search a { - display: block; -} - -/* This ensures that multiple constructors will remain in separate lines. */ -.rst-content dl:not(.docutils) dt { - display: table; -} - -/* Use our red for literals (it's very similar to the original color) */ -.rst-content tt.literal, .rst-content tt.literal, .rst-content code.literal { - color: #F05732; -} - -.rst-content tt.xref, a .rst-content tt, .rst-content tt.xref, -.rst-content code.xref, a .rst-content tt, a .rst-content code { - color: #404040; -} - -/* Change link colors (except for the menu) */ - -a { - color: #F05732; -} - -a:hover { - color: #F05732; -} - - -a:visited { - color: #D44D2C; -} - -.wy-menu a { - color: #b3b3b3; -} - -.wy-menu a:hover { - color: #b3b3b3; -} - -/* Default footer text is quite big */ -footer { - font-size: 80%; -} - -footer .rst-footer-buttons { - font-size: 125%; /* revert footer settings - 1/80% = 125% */ -} - -footer p { - font-size: 100%; -} - -/* For hidden headers that appear in TOC tree */ -/* see http://stackoverflow.com/a/32363545/3343043 */ -.rst-content .hidden-section { - display: none; -} - -nav .hidden-section { - display: inherit; -} - -.wy-side-nav-search>div.version { - color: #000; -} diff --git a/docs/source/_static/img/pytorch-logo-flame.svg b/docs/source/_static/img/pytorch-logo-flame.svg index 22d7228b4fa..5f2fb76be77 100644 --- a/docs/source/_static/img/pytorch-logo-flame.svg +++ b/docs/source/_static/img/pytorch-logo-flame.svg @@ -30,4 +30,4 @@ style="fill:#9e529f" id="path4698" d="m 24.075479,-7.6293945e-7 c -0.5,0 -1.8,2.49999996293945 -1.8,3.59999996293945 0,1.5 1,2 1.8,2 0.8,0 1.8,-0.5 1.8,-2 -0.1,-1.1 -1.4,-3.59999996293945 -1.8,-3.59999996293945 z" - class="st1" /> \ No newline at end of file + class="st1" /> diff --git a/docs/source/_templates/class.rst b/docs/source/_templates/class.rst new file mode 100644 index 00000000000..eeb823a961f --- /dev/null +++ b/docs/source/_templates/class.rst @@ -0,0 +1,9 @@ +.. role:: hidden + :class: hidden-section +.. currentmodule:: {{ module }} + + +{{ name | underline}} + +.. autoclass:: {{ name }} + :members: diff --git a/docs/source/_templates/class_dataset.rst b/docs/source/_templates/class_dataset.rst new file mode 100644 index 00000000000..c559c6dc9b0 --- /dev/null +++ b/docs/source/_templates/class_dataset.rst @@ -0,0 +1,12 @@ +.. role:: hidden + :class: hidden-section +.. currentmodule:: {{ module }} + + +{{ name | underline}} + +.. autoclass:: {{ name }} + :members: + __getitem__, + {% if "category_name" in methods %} category_name {% endif %} + :special-members: diff --git a/docs/source/_templates/function.rst b/docs/source/_templates/function.rst new file mode 100644 index 00000000000..72abc4f50fe --- /dev/null +++ b/docs/source/_templates/function.rst @@ -0,0 +1,8 @@ +.. role:: hidden + :class: hidden-section +.. currentmodule:: {{ module }} + + +{{ name | underline}} + +.. autofunction:: {{ name }} diff --git a/docs/source/_templates/layout.html b/docs/source/_templates/layout.html new file mode 100644 index 00000000000..aaa15d56e02 --- /dev/null +++ b/docs/source/_templates/layout.html @@ -0,0 +1,8 @@ +{% extends "!layout.html" %} + +{% block sidebartitle %} + + {% include "searchbox.html" %} +{% endblock %} diff --git a/docs/source/beta_status.py b/docs/source/beta_status.py new file mode 100644 index 00000000000..8871f6debbb --- /dev/null +++ b/docs/source/beta_status.py @@ -0,0 +1,21 @@ +from docutils import nodes +from docutils.parsers.rst import Directive + + +class BetaStatus(Directive): + has_content = True + text = "The {api_name} is in Beta stage, and backward compatibility is not guaranteed." + node = nodes.warning + + def run(self): + text = self.text.format(api_name=" ".join(self.content)) + return [self.node("", nodes.paragraph("", "", nodes.Text(text)))] + + +def setup(app): + app.add_directive("betastatus", BetaStatus) + return { + "version": "0.1", + "parallel_read_safe": True, + "parallel_write_safe": True, + } diff --git a/docs/source/conf.py b/docs/source/conf.py index 3c277168a70..df6cca3856a 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -1,5 +1,4 @@ #!/usr/bin/env python3 -# -*- coding: utf-8 -*- # # PyTorch documentation build configuration file, created by # sphinx-quickstart on Fri Dec 23 13:31:47 2016. @@ -20,72 +19,147 @@ # import os # import sys # sys.path.insert(0, os.path.abspath('.')) -import torch + +import os +import sys +import textwrap +from copy import copy +from pathlib import Path + +import pytorch_sphinx_theme import torchvision -import sphinx_rtd_theme +import torchvision.models as M +from sphinx_gallery.sorting import ExplicitOrder +from tabulate import tabulate +sys.path.append(os.path.abspath(".")) # -- General configuration ------------------------------------------------ -# If your documentation needs a minimal Sphinx version, state it here. -# -# needs_sphinx = '1.0' +# Required version of sphinx is set from docs/requirements.txt # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ - 'sphinx.ext.autodoc', - 'sphinx.ext.autosummary', - 'sphinx.ext.doctest', - 'sphinx.ext.intersphinx', - 'sphinx.ext.todo', - 'sphinx.ext.coverage', - 'sphinx.ext.mathjax', - 'sphinx.ext.napoleon', - 'sphinx.ext.viewcode', - 'sphinxcontrib.googleanalytics', + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.doctest", + "sphinx.ext.intersphinx", + "sphinx.ext.todo", + "sphinx.ext.mathjax", + "sphinx.ext.napoleon", + "sphinx.ext.viewcode", + "sphinx.ext.duration", + "sphinx_gallery.gen_gallery", + "sphinx_copybutton", + "beta_status", ] +# We override sphinx-gallery's example header to prevent sphinx-gallery from +# creating a note at the top of the renderred notebook. +# https://github.com/sphinx-gallery/sphinx-gallery/blob/451ccba1007cc523f39cbcc960ebc21ca39f7b75/sphinx_gallery/gen_rst.py#L1267-L1271 +# This is because we also want to add a link to google Colab, so we write our own note in each example. +from sphinx_gallery import gen_rst + +gen_rst.EXAMPLE_HEADER = """ +.. DO NOT EDIT. +.. THIS FILE WAS AUTOMATICALLY GENERATED BY SPHINX-GALLERY. +.. TO MAKE CHANGES, EDIT THE SOURCE PYTHON FILE: +.. "{0}" +.. LINE NUMBERS ARE GIVEN BELOW. + +.. rst-class:: sphx-glr-example-title + +.. _sphx_glr_{1}: + +""" + + +class CustomGalleryExampleSortKey: + # See https://sphinx-gallery.github.io/stable/configuration.html#sorting-gallery-examples + # and https://github.com/sphinx-gallery/sphinx-gallery/blob/master/sphinx_gallery/sorting.py + def __init__(self, src_dir): + self.src_dir = src_dir + + transforms_subsection_order = [ + "plot_transforms_getting_started.py", + "plot_transforms_illustrations.py", + "plot_transforms_e2e.py", + "plot_cutmix_mixup.py", + "plot_custom_transforms.py", + "plot_tv_tensors.py", + "plot_custom_tv_tensors.py", + ] + + def __call__(self, filename): + if "gallery/transforms" in self.src_dir: + try: + return self.transforms_subsection_order.index(filename) + except ValueError as e: + raise ValueError( + "Looks like you added an example in gallery/transforms? " + "You need to specify its order in docs/source/conf.py. Look for CustomGalleryExampleSortKey." + ) from e + else: + # For other subsections we just sort alphabetically by filename + return filename + + +sphinx_gallery_conf = { + "examples_dirs": "../../gallery/", # path to your example scripts + "gallery_dirs": "auto_examples", # path to where to save gallery generated output + "subsection_order": ExplicitOrder(["../../gallery/transforms", "../../gallery/others"]), + "backreferences_dir": "gen_modules/backreferences", + "doc_module": ("torchvision",), + "remove_config_comments": True, + "ignore_pattern": "helpers.py", + "within_subsection_order": CustomGalleryExampleSortKey, +} + napoleon_use_ivar = True +napoleon_numpy_docstring = False +napoleon_google_docstring = True -googleanalytics_id = 'UA-90545585-1' -googleanalytics_enabled = True # Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] +templates_path = ["_templates"] # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: # -# source_suffix = ['.rst', '.md'] -source_suffix = '.rst' +source_suffix = { + ".rst": "restructuredtext", +} # The master toctree document. -master_doc = 'index' +master_doc = "index" # General information about the project. -project = 'Torchvision' -copyright = '2017, Torch Contributors' -author = 'Torch Contributors' +project = "Torchvision" +copyright = "2017-present, Torch Contributors" +author = "Torch Contributors" # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. -# -# The short X.Y version. -# TODO: change to [:2] at v1.0 -version = 'master (' + torchvision.__version__ + ' )' -# The full version, including alpha/beta/rc tags. -# TODO: verify this works as expected -release = 'master' +# version: The short X.Y version. +# release: The full version, including alpha/beta/rc tags. +if os.environ.get("TORCHVISION_SANITIZE_VERSION_STR_IN_DOCS", None): + # Turn 1.11.0aHASH into 1.11 (major.minor only) + version = release = ".".join(torchvision.__version__.split(".")[:2]) + html_title = " ".join((project, version, "documentation")) +else: + version = f"main ({torchvision.__version__})" + release = "main" + # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. -language = None +language = "en" # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. @@ -93,7 +167,7 @@ exclude_patterns = [] # The name of the Pygments (syntax highlighting) style to use. -pygments_style = 'sphinx' +pygments_style = "sphinx" # If true, `todo` and `todoList` produce output, else they produce nothing. todo_include_todos = True @@ -104,67 +178,65 @@ # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. # -html_theme = 'sphinx_rtd_theme' -html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] +html_theme = "pytorch_sphinx_theme" +html_theme_path = [pytorch_sphinx_theme.get_html_theme_path()] # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. # html_theme_options = { - 'collapse_navigation': False, - 'display_version': True, - 'logo_only': True, + "collapse_navigation": False, + "display_version": True, + "logo_only": True, + "pytorch_project": "docs", + "navigation_with_keys": True, + "analytics_id": "GTM-T8XT4PS", } -html_logo = '_static/img/pytorch-logo-dark.svg' +html_logo = "_static/img/pytorch-logo-dark.svg" # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ['_static'] - -# html_style_path = 'css/pytorch_theme.css' -html_context = { - 'css_files': [ - 'https://fonts.googleapis.com/css?family=Lato', - '_static/css/pytorch_theme.css' - ], -} +html_static_path = ["_static"] +# TODO: remove this once https://github.com/pytorch/pytorch_sphinx_theme/issues/125 is fixed +html_css_files = [ + "css/custom_torchvision.css", +] # -- Options for HTMLHelp output ------------------------------------------ # Output file base name for HTML help builder. -htmlhelp_basename = 'PyTorchdoc' +htmlhelp_basename = "PyTorchdoc" -# -- Options for LaTeX output --------------------------------------------- +autosummary_generate = True + +# -- Options for LaTeX output --------------------------------------------- latex_elements = { # The paper size ('letterpaper' or 'a4paper'). # # 'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). # # 'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. # # 'preamble': '', - # Latex figure (float) alignment # # 'figure_align': 'htbp', } + # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ - (master_doc, 'pytorch.tex', 'torchvision Documentation', - 'Torch Contributors', 'manual'), + (master_doc, "pytorch.tex", "torchvision Documentation", "Torch Contributors", "manual"), ] @@ -172,10 +244,7 @@ # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). -man_pages = [ - (master_doc, 'torchvision', 'torchvision Documentation', - [author], 1) -] +man_pages = [(master_doc, "torchvision", "torchvision Documentation", [author], 1)] # -- Options for Texinfo output ------------------------------------------- @@ -184,67 +253,272 @@ # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ - (master_doc, 'torchvision', 'torchvision Documentation', - author, 'torchvision', 'One line description of project.', - 'Miscellaneous'), + ( + master_doc, + "torchvision", + "torchvision Documentation", + author, + "torchvision", + "One line description of project.", + "Miscellaneous", + ), ] # Example configuration for intersphinx: refer to the Python standard library. intersphinx_mapping = { - 'python': ('https://docs.python.org/', None), - 'numpy': ('http://docs.scipy.org/doc/numpy/', None), + "python": ("https://docs.python.org/3/", None), + "torch": ("https://pytorch.org/docs/stable/", None), + "numpy": ("https://numpy.org/doc/stable/", None), + "PIL": ("https://pillow.readthedocs.io/en/stable/", None), + "matplotlib": ("https://matplotlib.org/stable/", None), } # -- A patch that prevents Sphinx from cross-referencing ivar tags ------- # See http://stackoverflow.com/a/41184353/3343043 from docutils import nodes -from sphinx.util.docfields import TypedField from sphinx import addnodes +from sphinx.util.docfields import TypedField def patched_make_field(self, types, domain, items, **kw): # `kw` catches `env=None` needed for newer sphinx while maintaining # backwards compatibility when passed along further down! - # type: (list, unicode, tuple) -> nodes.field + # type: (list, unicode, tuple) -> nodes.field # noqa: F821 def handle_item(fieldarg, content): par = nodes.paragraph() - par += addnodes.literal_strong('', fieldarg) # Patch: this line added + par += addnodes.literal_strong("", fieldarg) # Patch: this line added # par.extend(self.make_xrefs(self.rolename, domain, fieldarg, # addnodes.literal_strong)) if fieldarg in types: - par += nodes.Text(' (') + par += nodes.Text(" (") # NOTE: using .pop() here to prevent a single type node to be # inserted twice into the doctree, which leads to # inconsistencies later when references are resolved fieldtype = types.pop(fieldarg) if len(fieldtype) == 1 and isinstance(fieldtype[0], nodes.Text): - typename = u''.join(n.astext() for n in fieldtype) - typename = typename.replace('int', 'python:int') - typename = typename.replace('long', 'python:long') - typename = typename.replace('float', 'python:float') - typename = typename.replace('type', 'python:type') - par.extend(self.make_xrefs(self.typerolename, domain, typename, - addnodes.literal_emphasis, **kw)) + typename = "".join(n.astext() for n in fieldtype) + typename = typename.replace("int", "python:int") + typename = typename.replace("long", "python:long") + typename = typename.replace("float", "python:float") + typename = typename.replace("type", "python:type") + par.extend(self.make_xrefs(self.typerolename, domain, typename, addnodes.literal_emphasis, **kw)) else: par += fieldtype - par += nodes.Text(')') - par += nodes.Text(' -- ') + par += nodes.Text(")") + par += nodes.Text(" -- ") par += content return par - fieldname = nodes.field_name('', self.label) + fieldname = nodes.field_name("", self.label) if len(items) == 1 and self.can_collapse: fieldarg, content = items[0] bodynode = handle_item(fieldarg, content) else: bodynode = self.list_type() for fieldarg, content in items: - bodynode += nodes.list_item('', handle_item(fieldarg, content)) - fieldbody = nodes.field_body('', bodynode) - return nodes.field('', fieldname, fieldbody) + bodynode += nodes.list_item("", handle_item(fieldarg, content)) + fieldbody = nodes.field_body("", bodynode) + return nodes.field("", fieldname, fieldbody) TypedField.make_field = patched_make_field + + +def inject_minigalleries(app, what, name, obj, options, lines): + """Inject a minigallery into a docstring. + + This avoids having to manually write the .. minigallery directive for every item we want a minigallery for, + as it would be easy to miss some. + + This callback is called after the .. auto directives (like ..autoclass) have been processed, + and modifies the lines parameter inplace to add the .. minigallery that will show which examples + are using which object. + + It's a bit hacky, but not *that* hacky when you consider that the recommended way is to do pretty much the same, + but instead with templates using autosummary (which we don't want to use): + (https://sphinx-gallery.github.io/stable/configuration.html#auto-documenting-your-api-with-links-to-examples) + + For docs on autodoc-process-docstring, see the autodoc docs: + https://www.sphinx-doc.org/en/master/usage/extensions/autodoc.html + """ + + if what in ("class", "function"): + lines.append(f".. minigallery:: {name}") + lines.append(f" :add-heading: Examples using ``{name.split('.')[-1]}``:") + # avoid heading entirely to avoid warning. As a bonud it actually renders better + lines.append(" :heading-level: 9") + lines.append("\n") + + +def inject_weight_metadata(app, what, name, obj, options, lines): + """This hook is used to generate docs for the models weights. + + Objects like ResNet18_Weights are enums with fields, where each field is a Weight object. + Enums aren't easily documented in Python so the solution we're going for is to: + + - add an autoclass directive in the model's builder docstring, e.g. + + ``` + .. autoclass:: torchvision.models.ResNet34_Weights + :members: + ``` + + (see resnet.py for an example) + - then this hook is called automatically when building the docs, and it generates the text that gets + used within the autoclass directive. + """ + + if getattr(obj, "__name__", "").endswith(("_Weights", "_QuantizedWeights")): + + if len(obj) == 0: + lines[:] = ["There are no available pre-trained weights."] + return + + lines[:] = [ + "The model builder above accepts the following values as the ``weights`` parameter.", + f"``{obj.__name__}.DEFAULT`` is equivalent to ``{obj.DEFAULT}``. You can also use strings, e.g. " + f"``weights='DEFAULT'`` or ``weights='{str(list(obj)[0]).split('.')[1]}'``.", + ] + + if obj.__doc__ is not None and obj.__doc__ != "An enumeration.": + # We only show the custom enum doc if it was overridden. The default one from Python is "An enumeration" + lines.append("") + lines.append(obj.__doc__) + + lines.append("") + + for field in obj: + meta = copy(field.meta) + + lines += [f"**{str(field)}**:", ""] + lines += [meta.pop("_docs")] + + if field == obj.DEFAULT: + lines += [f"Also available as ``{obj.__name__}.DEFAULT``."] + lines += [""] + + table = [] + metrics = meta.pop("_metrics") + for dataset, dataset_metrics in metrics.items(): + for metric_name, metric_value in dataset_metrics.items(): + table.append((f"{metric_name} (on {dataset})", str(metric_value))) + + for k, v in meta.items(): + if k in {"recipe", "license"}: + v = f"`link <{v}>`__" + elif k == "min_size": + v = f"height={v[0]}, width={v[1]}" + elif k in {"categories", "keypoint_names"} and isinstance(v, list): + max_visible = 3 + v_sample = ", ".join(v[:max_visible]) + v = f"{v_sample}, ... ({len(v)-max_visible} omitted)" if len(v) > max_visible else v_sample + elif k == "_ops": + v = f"{v:.2f}" + k = "GIPS" if obj.__name__.endswith("_QuantizedWeights") else "GFLOPS" + elif k == "_file_size": + k = "File size" + v = f"{v:.1f} MB" + + table.append((str(k), str(v))) + table = tabulate(table, tablefmt="rst") + lines += [".. rst-class:: table-weights"] # Custom CSS class, see custom_torchvision.css + lines += [".. table::", ""] + lines += textwrap.indent(table, " " * 4).split("\n") + lines.append("") + lines.append( + f"The inference transforms are available at ``{str(field)}.transforms`` and " + f"perform the following preprocessing operations: {field.transforms().describe()}" + ) + lines.append("") + + +def generate_weights_table(module, table_name, metrics, dataset, include_patterns=None, exclude_patterns=None): + weights_endswith = "_QuantizedWeights" if module.__name__.split(".")[-1] == "quantization" else "_Weights" + weight_enums = [getattr(module, name) for name in dir(module) if name.endswith(weights_endswith)] + weights = [w for weight_enum in weight_enums for w in weight_enum] + + if include_patterns is not None: + weights = [w for w in weights if any(p in str(w) for p in include_patterns)] + if exclude_patterns is not None: + weights = [w for w in weights if all(p not in str(w) for p in exclude_patterns)] + + ops_name = "GIPS" if "QuantizedWeights" in weights_endswith else "GFLOPS" + + metrics_keys, metrics_names = zip(*metrics) + column_names = ["Weight"] + list(metrics_names) + ["Params"] + [ops_name, "Recipe"] # Final column order + column_names = [f"**{name}**" for name in column_names] # Add bold + + content = [] + for w in weights: + row = [ + f":class:`{w} <{type(w).__name__}>`", + *(w.meta["_metrics"][dataset][metric] for metric in metrics_keys), + f"{w.meta['num_params']/1e6:.1f}M", + f"{w.meta['_ops']:.2f}", + f"`link <{w.meta['recipe']}>`__", + ] + + content.append(row) + + column_widths = ["110"] + ["18"] * len(metrics_names) + ["18"] * 2 + ["10"] + widths_table = " ".join(column_widths) + + table = tabulate(content, headers=column_names, tablefmt="rst") + + generated_dir = Path("generated") + generated_dir.mkdir(exist_ok=True) + with open(generated_dir / f"{table_name}_table.rst", "w+") as table_file: + table_file.write(".. rst-class:: table-weights\n") # Custom CSS class, see custom_torchvision.css + table_file.write(".. table::\n") + table_file.write(f" :widths: {widths_table} \n\n") + table_file.write(f"{textwrap.indent(table, ' ' * 4)}\n\n") + + +generate_weights_table( + module=M, table_name="classification", metrics=[("acc@1", "Acc@1"), ("acc@5", "Acc@5")], dataset="ImageNet-1K" +) +generate_weights_table( + module=M.quantization, + table_name="classification_quant", + metrics=[("acc@1", "Acc@1"), ("acc@5", "Acc@5")], + dataset="ImageNet-1K", +) +generate_weights_table( + module=M.detection, + table_name="detection", + metrics=[("box_map", "Box MAP")], + exclude_patterns=["Mask", "Keypoint"], + dataset="COCO-val2017", +) +generate_weights_table( + module=M.detection, + table_name="instance_segmentation", + metrics=[("box_map", "Box MAP"), ("mask_map", "Mask MAP")], + dataset="COCO-val2017", + include_patterns=["Mask"], +) +generate_weights_table( + module=M.detection, + table_name="detection_keypoint", + metrics=[("box_map", "Box MAP"), ("kp_map", "Keypoint MAP")], + dataset="COCO-val2017", + include_patterns=["Keypoint"], +) +generate_weights_table( + module=M.segmentation, + table_name="segmentation", + metrics=[("miou", "Mean IoU"), ("pixel_acc", "pixelwise Acc")], + dataset="COCO-val2017-VOC-labels", +) +generate_weights_table( + module=M.video, table_name="video", metrics=[("acc@1", "Acc@1"), ("acc@5", "Acc@5")], dataset="Kinetics-400" +) + + +def setup(app): + + app.connect("autodoc-process-docstring", inject_minigalleries) + app.connect("autodoc-process-docstring", inject_weight_metadata) diff --git a/docs/source/datasets.rst b/docs/source/datasets.rst index 040962edc6a..3caa7434e20 100644 --- a/docs/source/datasets.rst +++ b/docs/source/datasets.rst @@ -1,10 +1,18 @@ -torchvision.datasets -==================== +.. _datasets: + +Datasets +======== + +Torchvision provides many built-in datasets in the ``torchvision.datasets`` +module, as well as utility classes for building your own datasets. + +Built-in datasets +----------------- All datasets are subclasses of :class:`torch.utils.data.Dataset` i.e, they have ``__getitem__`` and ``__len__`` methods implemented. Hence, they can all be passed to a :class:`torch.utils.data.DataLoader` -which can load multiple samples parallelly using ``torch.multiprocessing`` workers. +which can load multiple samples in parallel using ``torch.multiprocessing`` workers. For example: :: imagenet_data = torchvision.datasets.ImageNet('path/to/imagenet_root/') @@ -13,214 +21,172 @@ For example: :: shuffle=True, num_workers=args.nThreads) -The following datasets are available: - -.. contents:: Datasets - :local: +.. currentmodule:: torchvision.datasets All the datasets have almost similar API. They all have two common arguments: ``transform`` and ``target_transform`` to transform the input and target respectively. +You can also create your own datasets using the provided :ref:`base classes `. + +.. warning:: + + When a dataset object is created with ``download=True``, the files are first + downloaded and extracted in the root directory. This download logic is not + multi-process safe, so it may lead to conflicts / race conditions if it is + run within a distributed setting. In distributed mode, we recommend creating + a dummy dataset object to trigger the download logic *before* setting up + distributed mode. + +Image classification +~~~~~~~~~~~~~~~~~~~~ + +.. autosummary:: + :toctree: generated/ + :template: class_dataset.rst + + Caltech101 + Caltech256 + CelebA + CIFAR10 + CIFAR100 + Country211 + DTD + EMNIST + EuroSAT + FakeData + FashionMNIST + FER2013 + FGVCAircraft + Flickr8k + Flickr30k + Flowers102 + Food101 + GTSRB + INaturalist + ImageNet + Imagenette + KMNIST + LFWPeople + LSUN + MNIST + Omniglot + OxfordIIITPet + Places365 + PCAM + QMNIST + RenderedSST2 + SEMEION + SBU + StanfordCars + STL10 + SUN397 + SVHN + USPS + +Image detection or segmentation +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +.. autosummary:: + :toctree: generated/ + :template: class_dataset.rst + + CocoDetection + CelebA + Cityscapes + Kitti + OxfordIIITPet + SBDataset + VOCSegmentation + VOCDetection + WIDERFace + +Optical Flow +~~~~~~~~~~~~ - -.. currentmodule:: torchvision.datasets - - -MNIST -~~~~~ - -.. autoclass:: MNIST - -Fashion-MNIST -~~~~~~~~~~~~~ - -.. autoclass:: FashionMNIST - -KMNIST -~~~~~~~~~~~~~ - -.. autoclass:: KMNIST - -EMNIST -~~~~~~ - -.. autoclass:: EMNIST - -QMNIST -~~~~~~ - -.. autoclass:: QMNIST - -FakeData -~~~~~~~~ - -.. autoclass:: FakeData - -COCO -~~~~ - -.. note :: - These require the `COCO API to be installed`_ - -.. _COCO API to be installed: https://github.com/pdollar/coco/tree/master/PythonAPI - - -Captions -^^^^^^^^ - -.. autoclass:: CocoCaptions - :members: __getitem__ - :special-members: - - -Detection -^^^^^^^^^ - -.. autoclass:: CocoDetection - :members: __getitem__ - :special-members: - -LSUN -~~~~ - -.. autoclass:: LSUN - :members: __getitem__ - :special-members: - -ImageFolder -~~~~~~~~~~~ - -.. autoclass:: ImageFolder - :members: __getitem__ - :special-members: - -DatasetFolder -~~~~~~~~~~~~~ - -.. autoclass:: DatasetFolder - :members: __getitem__ - :special-members: - - - -ImageNet +.. autosummary:: + :toctree: generated/ + :template: class_dataset.rst + + FlyingChairs + FlyingThings3D + HD1K + KittiFlow + Sintel + +Stereo Matching +~~~~~~~~~~~~~~~ + +.. autosummary:: + :toctree: generated/ + :template: class_dataset.rst + + CarlaStereo + Kitti2012Stereo + Kitti2015Stereo + CREStereo + FallingThingsStereo + SceneFlowStereo + SintelStereo + InStereo2k + ETH3DStereo + Middlebury2014Stereo + +Image pairs ~~~~~~~~~~~ -.. autoclass:: ImageNet - -.. note :: - This requires `scipy` to be installed - - -CIFAR -~~~~~ - -.. autoclass:: CIFAR10 - :members: __getitem__ - :special-members: - -.. autoclass:: CIFAR100 - -STL10 -~~~~~ - - -.. autoclass:: STL10 - :members: __getitem__ - :special-members: - -SVHN -~~~~~ - +.. autosummary:: + :toctree: generated/ + :template: class_dataset.rst -.. autoclass:: SVHN - :members: __getitem__ - :special-members: + LFWPairs + PhotoTour -PhotoTour -~~~~~~~~~ +Image captioning +~~~~~~~~~~~~~~~~ +.. autosummary:: + :toctree: generated/ + :template: class_dataset.rst -.. autoclass:: PhotoTour - :members: __getitem__ - :special-members: + CocoCaptions -SBU -~~~ +Video classification +~~~~~~~~~~~~~~~~~~~~ +.. autosummary:: + :toctree: generated/ + :template: class_dataset.rst -.. autoclass:: SBU - :members: __getitem__ - :special-members: + HMDB51 + Kinetics + UCF101 -Flickr -~~~~~~ +Video prediction +~~~~~~~~~~~~~~~~~~~~ +.. autosummary:: + :toctree: generated/ + :template: class_dataset.rst -.. autoclass:: Flickr8k - :members: __getitem__ - :special-members: - -.. autoclass:: Flickr30k - :members: __getitem__ - :special-members: - -VOC -~~~~~~ - - -.. autoclass:: VOCSegmentation - :members: __getitem__ - :special-members: - -.. autoclass:: VOCDetection - :members: __getitem__ - :special-members: - -Cityscapes -~~~~~~~~~~ - -.. note :: - Requires Cityscape to be downloaded. - -.. autoclass:: Cityscapes - :members: __getitem__ - :special-members: - -SBD -~~~~~~ - - -.. autoclass:: SBDataset - :members: __getitem__ - :special-members: - -USPS -~~~~~ - -.. autoclass:: USPS - :members: __getitem__ - :special-members: - - -Kinetics-400 -~~~~~~~~~~~~ + MovingMNIST -.. autoclass:: Kinetics400 - :members: __getitem__ - :special-members: +.. _base_classes_datasets: +Base classes for custom datasets +-------------------------------- -HMDB51 -~~~~~~~ +.. autosummary:: + :toctree: generated/ + :template: class.rst -.. autoclass:: HMDB51 - :members: __getitem__ - :special-members: + DatasetFolder + ImageFolder + VisionDataset +Transforms v2 +------------- -UCF101 -~~~~~~~ +.. autosummary:: + :toctree: generated/ + :template: function.rst -.. autoclass:: UCF101 - :members: __getitem__ - :special-members: + wrap_dataset_for_transforms_v2 diff --git a/docs/source/docutils.conf b/docs/source/docutils.conf new file mode 100644 index 00000000000..e2bef654a4a --- /dev/null +++ b/docs/source/docutils.conf @@ -0,0 +1,3 @@ +# Necessary for the table generated by autosummary to look decent +[html writers] +table_style: colwidths-auto diff --git a/docs/source/feature_extraction.rst b/docs/source/feature_extraction.rst new file mode 100644 index 00000000000..e83bc2fe4bc --- /dev/null +++ b/docs/source/feature_extraction.rst @@ -0,0 +1,166 @@ +Feature extraction for model inspection +======================================= + +.. currentmodule:: torchvision.models.feature_extraction + +The ``torchvision.models.feature_extraction`` package contains +feature extraction utilities that let us tap into our models to access intermediate +transformations of our inputs. This could be useful for a variety of +applications in computer vision. Just a few examples are: + +- Visualizing feature maps. +- Extracting features to compute image descriptors for tasks like facial + recognition, copy-detection, or image retrieval. +- Passing selected features to downstream sub-networks for end-to-end training + with a specific task in mind. For example, passing a hierarchy of features + to a Feature Pyramid Network with object detection heads. + +Torchvision provides :func:`create_feature_extractor` for this purpose. +It works by following roughly these steps: + +1. Symbolically tracing the model to get a graphical representation of + how it transforms the input, step by step. +2. Setting the user-selected graph nodes as outputs. +3. Removing all redundant nodes (anything downstream of the output nodes). +4. Generating python code from the resulting graph and bundling that into a + PyTorch module together with the graph itself. + +| + +The `torch.fx documentation `_ +provides a more general and detailed explanation of the above procedure and +the inner workings of the symbolic tracing. + +.. _about-node-names: + +**About Node Names** + +In order to specify which nodes should be output nodes for extracted +features, one should be familiar with the node naming convention used here +(which differs slightly from that used in ``torch.fx``). A node name is +specified as a ``.`` separated path walking the module hierarchy from top level +module down to leaf operation or leaf module. For instance ``"layer4.2.relu"`` +in ResNet-50 represents the output of the ReLU of the 2nd block of the 4th +layer of the ``ResNet`` module. Here are some finer points to keep in mind: + +- When specifying node names for :func:`create_feature_extractor`, you may + provide a truncated version of a node name as a shortcut. To see how this + works, try creating a ResNet-50 model and printing the node names with + ``train_nodes, _ = get_graph_node_names(model) print(train_nodes)`` and + observe that the last node pertaining to ``layer4`` is + ``"layer4.2.relu_2"``. One may specify ``"layer4.2.relu_2"`` as the return + node, or just ``"layer4"`` as this, by convention, refers to the last node + (in order of execution) of ``layer4``. +- If a certain module or operation is repeated more than once, node names get + an additional ``_{int}`` postfix to disambiguate. For instance, maybe the + addition (``+``) operation is used three times in the same ``forward`` + method. Then there would be ``"path.to.module.add"``, + ``"path.to.module.add_1"``, ``"path.to.module.add_2"``. The counter is + maintained within the scope of the direct parent. So in ResNet-50 there is + a ``"layer4.1.add"`` and a ``"layer4.2.add"``. Because the addition + operations reside in different blocks, there is no need for a postfix to + disambiguate. + + +**An Example** + +Here is an example of how we might extract features for MaskRCNN: + +.. code-block:: python + + import torch + from torchvision.models import resnet50 + from torchvision.models.feature_extraction import get_graph_node_names + from torchvision.models.feature_extraction import create_feature_extractor + from torchvision.models.detection.mask_rcnn import MaskRCNN + from torchvision.models.detection.backbone_utils import LastLevelMaxPool + from torchvision.ops.feature_pyramid_network import FeaturePyramidNetwork + + + # To assist you in designing the feature extractor you may want to print out + # the available nodes for resnet50. + m = resnet50() + train_nodes, eval_nodes = get_graph_node_names(resnet50()) + + # The lists returned, are the names of all the graph nodes (in order of + # execution) for the input model traced in train mode and in eval mode + # respectively. You'll find that `train_nodes` and `eval_nodes` are the same + # for this example. But if the model contains control flow that's dependent + # on the training mode, they may be different. + + # To specify the nodes you want to extract, you could select the final node + # that appears in each of the main layers: + return_nodes = { + # node_name: user-specified key for output dict + 'layer1.2.relu_2': 'layer1', + 'layer2.3.relu_2': 'layer2', + 'layer3.5.relu_2': 'layer3', + 'layer4.2.relu_2': 'layer4', + } + + # But `create_feature_extractor` can also accept truncated node specifications + # like "layer1", as it will just pick the last node that's a descendent of + # of the specification. (Tip: be careful with this, especially when a layer + # has multiple outputs. It's not always guaranteed that the last operation + # performed is the one that corresponds to the output you desire. You should + # consult the source code for the input model to confirm.) + return_nodes = { + 'layer1': 'layer1', + 'layer2': 'layer2', + 'layer3': 'layer3', + 'layer4': 'layer4', + } + + # Now you can build the feature extractor. This returns a module whose forward + # method returns a dictionary like: + # { + # 'layer1': output of layer 1, + # 'layer2': output of layer 2, + # 'layer3': output of layer 3, + # 'layer4': output of layer 4, + # } + create_feature_extractor(m, return_nodes=return_nodes) + + # Let's put all that together to wrap resnet50 with MaskRCNN + + # MaskRCNN requires a backbone with an attached FPN + class Resnet50WithFPN(torch.nn.Module): + def __init__(self): + super(Resnet50WithFPN, self).__init__() + # Get a resnet50 backbone + m = resnet50() + # Extract 4 main layers (note: MaskRCNN needs this particular name + # mapping for return nodes) + self.body = create_feature_extractor( + m, return_nodes={f'layer{k}': str(v) + for v, k in enumerate([1, 2, 3, 4])}) + # Dry run to get number of channels for FPN + inp = torch.randn(2, 3, 224, 224) + with torch.no_grad(): + out = self.body(inp) + in_channels_list = [o.shape[1] for o in out.values()] + # Build FPN + self.out_channels = 256 + self.fpn = FeaturePyramidNetwork( + in_channels_list, out_channels=self.out_channels, + extra_blocks=LastLevelMaxPool()) + + def forward(self, x): + x = self.body(x) + x = self.fpn(x) + return x + + + # Now we can build our model! + model = MaskRCNN(Resnet50WithFPN(), num_classes=91).eval() + + +API Reference +------------- + +.. autosummary:: + :toctree: generated/ + :template: function.rst + + create_feature_extractor + get_graph_node_names diff --git a/docs/source/index.rst b/docs/source/index.rst index 9de82b6e7fc..dc5fdefaefb 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -1,5 +1,28 @@ torchvision =========== +This library is part of the `PyTorch +`_ project. PyTorch is an open source +machine learning framework. + +Features described in this documentation are classified by release status: + + *Stable:* These features will be maintained long-term and there should generally + be no major performance limitations or gaps in documentation. + We also expect to maintain backwards compatibility (although + breaking changes can happen and notice will be given one release ahead + of time). + + *Beta:* Features are tagged as Beta because the API may change based on + user feedback, because the performance needs to improve, or because + coverage across operators is not yet complete. For Beta features, we are + committing to seeing the feature through to the Stable classification. + We are not, however, committing to backwards compatibility. + + *Prototype:* These features are typically not available as part of + binary distributions like PyPI or Conda, except sometimes behind run-time + flags, and are at an early stage for feedback and testing. + + The :mod:`torchvision` package consists of popular datasets, model architectures, and common image transformations for computer vision. @@ -8,12 +31,39 @@ architectures, and common image transformations for computer vision. :maxdepth: 2 :caption: Package Reference - datasets - io - models - ops transforms + tv_tensors + models + datasets utils + ops + io + feature_extraction + +.. toctree:: + :maxdepth: 1 + :caption: Examples and training references + + auto_examples/index + training_references .. automodule:: torchvision :members: + +.. toctree:: + :maxdepth: 1 + :caption: PyTorch Libraries + + PyTorch + torchaudio + torchtext + torchvision + TorchElastic + TorchServe + PyTorch on XLA Devices + + +Indices +------- + +* :ref:`genindex` diff --git a/docs/source/io.rst b/docs/source/io.rst index e7aeedc0716..c3f2d658014 100644 --- a/docs/source/io.rst +++ b/docs/source/io.rst @@ -1,16 +1,120 @@ -torchvision.io -============== +Decoding / Encoding images and videos +===================================== .. currentmodule:: torchvision.io -The :mod:`torchvision.io` package provides functions for performing IO -operations. They are currently specific to reading and writing video. +The :mod:`torchvision.io` module provides utilities for decoding and encoding +images and videos. + +Image Decoding +-------------- + +Torchvision currently supports decoding JPEG, PNG, WEBP, GIF, AVIF, and HEIC +images. JPEG decoding can also be done on CUDA GPUs. + +The main entry point is the :func:`~torchvision.io.decode_image` function, which +you can use as an alternative to ``PIL.Image.open()``. It will decode images +straight into image Tensors, thus saving you the conversion and allowing you to +run transforms/preproc natively on tensors. + +.. code:: + + from torchvision.io import decode_image + + img = decode_image("path_to_image", mode="RGB") + img.dtype # torch.uint8 + + # Or + raw_encoded_bytes = ... # read encoded bytes from your file system + img = decode_image(raw_encoded_bytes, mode="RGB") + + +:func:`~torchvision.io.decode_image` will automatically detect the image format, +and call the corresponding decoder (except for HEIC and AVIF images, see details +in :func:`~torchvision.io.decode_avif` and :func:`~torchvision.io.decode_heic`). +You can also use the lower-level format-specific decoders which can be more +powerful, e.g. if you want to encode/decode JPEGs on CUDA. + +.. autosummary:: + :toctree: generated/ + :template: function.rst + + decode_image + decode_jpeg + encode_png + decode_webp + decode_avif + decode_heic + decode_gif + +.. autosummary:: + :toctree: generated/ + :template: class.rst + + ImageReadMode + +Obsolete decoding function: + +.. autosummary:: + :toctree: generated/ + :template: function.rst + + read_image + +Image Encoding +-------------- + +For encoding, JPEG (cpu and CUDA) and PNG are supported. + + +.. autosummary:: + :toctree: generated/ + :template: function.rst + + encode_jpeg + write_jpeg + encode_png + write_png + +IO operations +------------- + +.. autosummary:: + :toctree: generated/ + :template: function.rst + + read_file + write_file Video ----- -.. autofunction:: read_video +.. warning:: + + Torchvision supports video decoding through different APIs listed below, + some of which are still in BETA stage. In the near future, we intend to + centralize PyTorch's video decoding capabilities within the `torchcodec + `_ project. We encourage you to try + it out and share your feedback, as the torchvision video decoders will + eventually be deprecated. + +.. autosummary:: + :toctree: generated/ + :template: function.rst + + read_video + read_video_timestamps + write_video + + +**Fine-grained video API** + +In addition to the :mod:`read_video` function, we provide a high-performance +lower-level API for more fine-grained control compared to the :mod:`read_video` function. +It does all this whilst fully supporting torchscript. -.. autofunction:: read_video_timestamps +.. autosummary:: + :toctree: generated/ + :template: class.rst -.. autofunction:: write_video + VideoReader diff --git a/docs/source/models.rst b/docs/source/models.rst index e1a141092dc..d0096aaf854 100644 --- a/docs/source/models.rst +++ b/docs/source/models.rst @@ -1,445 +1,577 @@ -torchvision.models -################## +.. _models: +Models and pre-trained weights +############################## -The models subpackage contains definitions of models for addressing +The ``torchvision.models`` subpackage contains definitions of models for addressing different tasks, including: image classification, pixelwise semantic segmentation, object detection, instance segmentation, person -keypoint detection and video classification. +keypoint detection, video classification, and optical flow. +General information on pre-trained weights +========================================== -Classification -============== +TorchVision offers pre-trained weights for every provided architecture, using +the PyTorch :mod:`torch.hub`. Instancing a pre-trained model will download its +weights to a cache directory. This directory can be set using the `TORCH_HOME` +environment variable. See :func:`torch.hub.load_state_dict_from_url` for details. + +.. note:: + + The pre-trained models provided in this library may have their own licenses or + terms and conditions derived from the dataset used for training. It is your + responsibility to determine whether you have permission to use the models for + your use case. + +.. note :: + Backward compatibility is guaranteed for loading a serialized + ``state_dict`` to the model created using old PyTorch version. + On the contrary, loading entire saved models or serialized + ``ScriptModules`` (serialized using older versions of PyTorch) + may not preserve the historic behaviour. Refer to the following + `documentation + `_ + + +Initializing pre-trained models +------------------------------- + +As of v0.13, TorchVision offers a new `Multi-weight support API +`_ +for loading different weights to the existing model builder methods: + +.. code:: python + + from torchvision.models import resnet50, ResNet50_Weights + + # Old weights with accuracy 76.130% + resnet50(weights=ResNet50_Weights.IMAGENET1K_V1) + + # New weights with accuracy 80.858% + resnet50(weights=ResNet50_Weights.IMAGENET1K_V2) -The models subpackage contains definitions for the following model -architectures for image classification: + # Best available weights (currently alias for IMAGENET1K_V2) + # Note that these weights may change across versions + resnet50(weights=ResNet50_Weights.DEFAULT) -- `AlexNet`_ -- `VGG`_ -- `ResNet`_ -- `SqueezeNet`_ -- `DenseNet`_ -- `Inception`_ v3 -- `GoogLeNet`_ -- `ShuffleNet`_ v2 -- `MobileNet`_ v2 -- `ResNeXt`_ -- `Wide ResNet`_ -- `MNASNet`_ + # Strings are also supported + resnet50(weights="IMAGENET1K_V2") -You can construct a model with random weights by calling its constructor: + # No weights - random initialization + resnet50(weights=None) + + +Migrating to the new API is very straightforward. The following method calls between the 2 APIs are all equivalent: .. code:: python - import torchvision.models as models - resnet18 = models.resnet18() - alexnet = models.alexnet() - vgg16 = models.vgg16() - squeezenet = models.squeezenet1_0() - densenet = models.densenet161() - inception = models.inception_v3() - googlenet = models.googlenet() - shufflenet = models.shufflenet_v2_x1_0() - mobilenet = models.mobilenet_v2() - resnext50_32x4d = models.resnext50_32x4d() - wide_resnet50_2 = models.wide_resnet50_2() - mnasnet = models.mnasnet1_0() - -We provide pre-trained models, using the PyTorch :mod:`torch.utils.model_zoo`. -These can be constructed by passing ``pretrained=True``: + from torchvision.models import resnet50, ResNet50_Weights + + # Using pretrained weights: + resnet50(weights=ResNet50_Weights.IMAGENET1K_V1) + resnet50(weights="IMAGENET1K_V1") + resnet50(pretrained=True) # deprecated + resnet50(True) # deprecated + + # Using no weights: + resnet50(weights=None) + resnet50() + resnet50(pretrained=False) # deprecated + resnet50(False) # deprecated + +Note that the ``pretrained`` parameter is now deprecated, using it will emit warnings and will be removed on v0.15. + +Using the pre-trained models +---------------------------- + +Before using the pre-trained models, one must preprocess the image +(resize with right resolution/interpolation, apply inference transforms, +rescale the values etc). There is no standard way to do this as it depends on +how a given model was trained. It can vary across model families, variants or +even weight versions. Using the correct preprocessing method is critical and +failing to do so may lead to decreased accuracy or incorrect outputs. + +All the necessary information for the inference transforms of each pre-trained +model is provided on its weights documentation. To simplify inference, TorchVision +bundles the necessary preprocessing transforms into each model weight. These are +accessible via the ``weight.transforms`` attribute: .. code:: python - import torchvision.models as models - resnet18 = models.resnet18(pretrained=True) - alexnet = models.alexnet(pretrained=True) - squeezenet = models.squeezenet1_0(pretrained=True) - vgg16 = models.vgg16(pretrained=True) - densenet = models.densenet161(pretrained=True) - inception = models.inception_v3(pretrained=True) - googlenet = models.googlenet(pretrained=True) - shufflenet = models.shufflenet_v2_x1_0(pretrained=True) - mobilenet = models.mobilenet_v2(pretrained=True) - resnext50_32x4d = models.resnext50_32x4d(pretrained=True) - wide_resnet50_2 = models.wide_resnet50_2(pretrained=True) - mnasnet = models.mnasnet1_0(pretrained=True) - -Instancing a pre-trained model will download its weights to a cache directory. -This directory can be set using the `TORCH_MODEL_ZOO` environment variable. See -:func:`torch.utils.model_zoo.load_url` for details. + # Initialize the Weight Transforms + weights = ResNet50_Weights.DEFAULT + preprocess = weights.transforms() + + # Apply it to the input image + img_transformed = preprocess(img) + Some models use modules which have different training and evaluation behavior, such as batch normalization. To switch between these modes, use ``model.train()`` or ``model.eval()`` as appropriate. See -:meth:`~torch.nn.Module.train` or :meth:`~torch.nn.Module.eval` for details. - -All pre-trained models expect input images normalized in the same way, -i.e. mini-batches of 3-channel RGB images of shape (3 x H x W), -where H and W are expected to be at least 224. -The images have to be loaded in to a range of [0, 1] and then normalized -using ``mean = [0.485, 0.456, 0.406]`` and ``std = [0.229, 0.224, 0.225]``. -You can use the following transform to normalize:: - - normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406], - std=[0.229, 0.224, 0.225]) - -An example of such normalization can be found in the imagenet example -`here `_ - -ImageNet 1-crop error rates (224x224) - -================================ ============= ============= -Network Top-1 error Top-5 error -================================ ============= ============= -AlexNet 43.45 20.91 -VGG-11 30.98 11.37 -VGG-13 30.07 10.75 -VGG-16 28.41 9.62 -VGG-19 27.62 9.12 -VGG-11 with batch normalization 29.62 10.19 -VGG-13 with batch normalization 28.45 9.63 -VGG-16 with batch normalization 26.63 8.50 -VGG-19 with batch normalization 25.76 8.15 -ResNet-18 30.24 10.92 -ResNet-34 26.70 8.58 -ResNet-50 23.85 7.13 -ResNet-101 22.63 6.44 -ResNet-152 21.69 5.94 -SqueezeNet 1.0 41.90 19.58 -SqueezeNet 1.1 41.81 19.38 -Densenet-121 25.35 7.83 -Densenet-169 24.00 7.00 -Densenet-201 22.80 6.43 -Densenet-161 22.35 6.20 -Inception v3 22.55 6.44 -GoogleNet 30.22 10.47 -ShuffleNet V2 30.64 11.68 -MobileNet V2 28.12 9.71 -ResNeXt-50-32x4d 22.38 6.30 -ResNeXt-101-32x8d 20.69 5.47 -Wide ResNet-50-2 21.49 5.91 -Wide ResNet-101-2 21.16 5.72 -MNASNet 1.0 26.49 8.456 -================================ ============= ============= - - -.. _AlexNet: https://arxiv.org/abs/1404.5997 -.. _VGG: https://arxiv.org/abs/1409.1556 -.. _ResNet: https://arxiv.org/abs/1512.03385 -.. _SqueezeNet: https://arxiv.org/abs/1602.07360 -.. _DenseNet: https://arxiv.org/abs/1608.06993 -.. _Inception: https://arxiv.org/abs/1512.00567 -.. _GoogLeNet: https://arxiv.org/abs/1409.4842 -.. _ShuffleNet: https://arxiv.org/abs/1807.11164 -.. _MobileNet: https://arxiv.org/abs/1801.04381 -.. _ResNeXt: https://arxiv.org/abs/1611.05431 -.. _MNASNet: https://arxiv.org/abs/1807.11626 +:meth:`~torch.nn.Module.train` or :meth:`~torch.nn.Module.eval` for details. + +.. code:: python + + # Initialize model + weights = ResNet50_Weights.DEFAULT + model = resnet50(weights=weights) + + # Set model to eval mode + model.eval() + +Listing and retrieving available models +--------------------------------------- + +As of v0.14, TorchVision offers a new mechanism which allows listing and +retrieving models and weights by their names. Here are a few examples on how to +use them: + +.. code:: python + + # List available models + all_models = list_models() + classification_models = list_models(module=torchvision.models) + + # Initialize models + m1 = get_model("mobilenet_v3_large", weights=None) + m2 = get_model("quantized_mobilenet_v3_large", weights="DEFAULT") + + # Fetch weights + weights = get_weight("MobileNet_V3_Large_QuantizedWeights.DEFAULT") + assert weights == MobileNet_V3_Large_QuantizedWeights.DEFAULT + + weights_enum = get_model_weights("quantized_mobilenet_v3_large") + assert weights_enum == MobileNet_V3_Large_QuantizedWeights + + weights_enum2 = get_model_weights(torchvision.models.quantization.mobilenet_v3_large) + assert weights_enum == weights_enum2 + +Here are the available public functions to retrieve models and their corresponding weights: .. currentmodule:: torchvision.models +.. autosummary:: + :toctree: generated/ + :template: function.rst -Alexnet -------- + get_model + get_model_weights + get_weight + list_models -.. autofunction:: alexnet +Using models from Hub +--------------------- -VGG ---- +Most pre-trained models can be accessed directly via PyTorch Hub without having TorchVision installed: -.. autofunction:: vgg11 -.. autofunction:: vgg11_bn -.. autofunction:: vgg13 -.. autofunction:: vgg13_bn -.. autofunction:: vgg16 -.. autofunction:: vgg16_bn -.. autofunction:: vgg19 -.. autofunction:: vgg19_bn +.. code:: python + import torch -ResNet ------- + # Option 1: passing weights param as string + model = torch.hub.load("pytorch/vision", "resnet50", weights="IMAGENET1K_V2") -.. autofunction:: resnet18 -.. autofunction:: resnet34 -.. autofunction:: resnet50 -.. autofunction:: resnet101 -.. autofunction:: resnet152 + # Option 2: passing weights param as enum + weights = torch.hub.load( + "pytorch/vision", + "get_weight", + weights="ResNet50_Weights.IMAGENET1K_V2", + ) + model = torch.hub.load("pytorch/vision", "resnet50", weights=weights) -SqueezeNet ----------- +You can also retrieve all the available weights of a specific model via PyTorch Hub by doing: -.. autofunction:: squeezenet1_0 -.. autofunction:: squeezenet1_1 +.. code:: python -DenseNet ---------- + import torch -.. autofunction:: densenet121 -.. autofunction:: densenet169 -.. autofunction:: densenet161 -.. autofunction:: densenet201 + weight_enum = torch.hub.load("pytorch/vision", "get_model_weights", name="resnet50") + print([weight for weight in weight_enum]) -Inception v3 ------------- +The only exception to the above are the detection models included on +:mod:`torchvision.models.detection`. These models require TorchVision +to be installed because they depend on custom C++ operators. -.. autofunction:: inception_v3 +Classification +============== + +.. currentmodule:: torchvision.models + +The following classification models are available, with or without pre-trained +weights: + +.. toctree:: + :maxdepth: 1 + + models/alexnet + models/convnext + models/densenet + models/efficientnet + models/efficientnetv2 + models/googlenet + models/inception + models/maxvit + models/mnasnet + models/mobilenetv2 + models/mobilenetv3 + models/regnet + models/resnet + models/resnext + models/shufflenetv2 + models/squeezenet + models/swin_transformer + models/vgg + models/vision_transformer + models/wide_resnet + +| + +Here is an example of how to use the pre-trained image classification models: + +.. code:: python + + from torchvision.io import decode_image + from torchvision.models import resnet50, ResNet50_Weights -GoogLeNet ------------- + img = decode_image("test/assets/encode_jpeg/grace_hopper_517x606.jpg") -.. autofunction:: googlenet + # Step 1: Initialize model with the best available weights + weights = ResNet50_Weights.DEFAULT + model = resnet50(weights=weights) + model.eval() -ShuffleNet v2 -------------- + # Step 2: Initialize the inference transforms + preprocess = weights.transforms() -.. autofunction:: shufflenet_v2_x0_5 -.. autofunction:: shufflenet_v2_x1_0 -.. autofunction:: shufflenet_v2_x1_5 -.. autofunction:: shufflenet_v2_x2_0 + # Step 3: Apply inference preprocessing transforms + batch = preprocess(img).unsqueeze(0) -MobileNet v2 -------------- + # Step 4: Use the model and print the predicted category + prediction = model(batch).squeeze(0).softmax(0) + class_id = prediction.argmax().item() + score = prediction[class_id].item() + category_name = weights.meta["categories"][class_id] + print(f"{category_name}: {100 * score:.1f}%") -.. autofunction:: mobilenet_v2 +The classes of the pre-trained model outputs can be found at ``weights.meta["categories"]``. -ResNext -------- +Table of all available classification weights +--------------------------------------------- -.. autofunction:: resnext50_32x4d -.. autofunction:: resnext101_32x8d +Accuracies are reported on ImageNet-1K using single crops: -Wide ResNet ------------ +.. include:: generated/classification_table.rst -.. autofunction:: wide_resnet50_2 -.. autofunction:: wide_resnet101_2 +Quantized models +---------------- -MNASNet --------- +.. currentmodule:: torchvision.models.quantization -.. autofunction:: mnasnet0_5 -.. autofunction:: mnasnet0_75 -.. autofunction:: mnasnet1_0 -.. autofunction:: mnasnet1_3 +The following architectures provide support for INT8 quantized models, with or without +pre-trained weights: + +.. toctree:: + :maxdepth: 1 + + models/googlenet_quant + models/inception_quant + models/mobilenetv2_quant + models/mobilenetv3_quant + models/resnet_quant + models/resnext_quant + models/shufflenetv2_quant + +| + +Here is an example of how to use the pre-trained quantized image classification models: + +.. code:: python + from torchvision.io import decode_image + from torchvision.models.quantization import resnet50, ResNet50_QuantizedWeights + + img = decode_image("test/assets/encode_jpeg/grace_hopper_517x606.jpg") + + # Step 1: Initialize model with the best available weights + weights = ResNet50_QuantizedWeights.DEFAULT + model = resnet50(weights=weights, quantize=True) + model.eval() + + # Step 2: Initialize the inference transforms + preprocess = weights.transforms() + + # Step 3: Apply inference preprocessing transforms + batch = preprocess(img).unsqueeze(0) + + # Step 4: Use the model and print the predicted category + prediction = model(batch).squeeze(0).softmax(0) + class_id = prediction.argmax().item() + score = prediction[class_id].item() + category_name = weights.meta["categories"][class_id] + print(f"{category_name}: {100 * score}%") + +The classes of the pre-trained model outputs can be found at ``weights.meta["categories"]``. + + +Table of all available quantized classification weights +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Accuracies are reported on ImageNet-1K using single crops: + +.. include:: generated/classification_quant_table.rst Semantic Segmentation ===================== -The models subpackage contains definitions for the following model -architectures for semantic segmentation: +.. currentmodule:: torchvision.models.segmentation -- `FCN ResNet101 `_ -- `DeepLabV3 ResNet101 `_ +.. betastatus:: segmentation module -As with image classification models, all pre-trained models expect input images normalized in the same way. -The images have to be loaded in to a range of ``[0, 1]`` and then normalized using -``mean = [0.485, 0.456, 0.406]`` and ``std = [0.229, 0.224, 0.225]``. -They have been trained on images resized such that their minimum size is 520. +The following semantic segmentation models are available, with or without +pre-trained weights: -The pre-trained models have been trained on a subset of COCO train2017, on the 20 categories that are -present in the Pascal VOC dataset. You can see more information on how the subset has been selected in -``references/segmentation/coco_utils.py``. The classes that the pre-trained model outputs are the following, -in order: +.. toctree:: + :maxdepth: 1 - .. code-block:: python + models/deeplabv3 + models/fcn + models/lraspp - ['__background__', 'aeroplane', 'bicycle', 'bird', 'boat', 'bottle', 'bus', - 'car', 'cat', 'chair', 'cow', 'diningtable', 'dog', 'horse', 'motorbike', - 'person', 'pottedplant', 'sheep', 'sofa', 'train', 'tvmonitor'] +| -The accuracies of the pre-trained models evaluated on COCO val2017 are as follows +Here is an example of how to use the pre-trained semantic segmentation models: -================================ ============= ==================== -Network mean IoU global pixelwise acc -================================ ============= ==================== -FCN ResNet101 63.7 91.9 -DeepLabV3 ResNet101 67.4 92.4 -================================ ============= ==================== +.. code:: python + from torchvision.io.image import decode_image + from torchvision.models.segmentation import fcn_resnet50, FCN_ResNet50_Weights + from torchvision.transforms.functional import to_pil_image -Fully Convolutional Networks ----------------------------- + img = decode_image("gallery/assets/dog1.jpg") -.. autofunction:: torchvision.models.segmentation.fcn_resnet50 -.. autofunction:: torchvision.models.segmentation.fcn_resnet101 + # Step 1: Initialize model with the best available weights + weights = FCN_ResNet50_Weights.DEFAULT + model = fcn_resnet50(weights=weights) + model.eval() + # Step 2: Initialize the inference transforms + preprocess = weights.transforms() -DeepLabV3 ---------- + # Step 3: Apply inference preprocessing transforms + batch = preprocess(img).unsqueeze(0) -.. autofunction:: torchvision.models.segmentation.deeplabv3_resnet50 -.. autofunction:: torchvision.models.segmentation.deeplabv3_resnet101 + # Step 4: Use the model and visualize the prediction + prediction = model(batch)["out"] + normalized_masks = prediction.softmax(dim=1) + class_to_idx = {cls: idx for (idx, cls) in enumerate(weights.meta["categories"])} + mask = normalized_masks[0, class_to_idx["dog"]] + to_pil_image(mask).show() +The classes of the pre-trained model outputs can be found at ``weights.meta["categories"]``. +The output format of the models is illustrated in :ref:`semantic_seg_output`. -Object Detection, Instance Segmentation and Person Keypoint Detection -===================================================================== -The models subpackage contains definitions for the following model -architectures for detection: +Table of all available semantic segmentation weights +---------------------------------------------------- -- `Faster R-CNN ResNet-50 FPN `_ -- `Mask R-CNN ResNet-50 FPN `_ +All models are evaluated a subset of COCO val2017, on the 20 categories that are present in the Pascal VOC dataset: + +.. include:: generated/segmentation_table.rst + + +.. _object_det_inst_seg_pers_keypoint_det: + +Object Detection, Instance Segmentation and Person Keypoint Detection +===================================================================== The pre-trained models for detection, instance segmentation and keypoint detection are initialized with the classification models -in torchvision. - -The models expect a list of ``Tensor[C, H, W]``, in the range ``0-1``. -The models internally resize the images so that they have a minimum size -of ``800``. This option can be changed by passing the option ``min_size`` -to the constructor of the models. - - -For object detection and instance segmentation, the pre-trained -models return the predictions of the following classes: +in torchvision. The models expect a list of ``Tensor[C, H, W]``. +Check the constructor of the models for more information. + +.. betastatus:: detection module + +Object Detection +---------------- + +.. currentmodule:: torchvision.models.detection + +The following object detection models are available, with or without pre-trained +weights: + +.. toctree:: + :maxdepth: 1 + + models/faster_rcnn + models/fcos + models/retinanet + models/ssd + models/ssdlite + +| - .. code-block:: python +Here is an example of how to use the pre-trained object detection models: - COCO_INSTANCE_CATEGORY_NAMES = [ - '__background__', 'person', 'bicycle', 'car', 'motorcycle', 'airplane', 'bus', - 'train', 'truck', 'boat', 'traffic light', 'fire hydrant', 'N/A', 'stop sign', - 'parking meter', 'bench', 'bird', 'cat', 'dog', 'horse', 'sheep', 'cow', - 'elephant', 'bear', 'zebra', 'giraffe', 'N/A', 'backpack', 'umbrella', 'N/A', 'N/A', - 'handbag', 'tie', 'suitcase', 'frisbee', 'skis', 'snowboard', 'sports ball', - 'kite', 'baseball bat', 'baseball glove', 'skateboard', 'surfboard', 'tennis racket', - 'bottle', 'N/A', 'wine glass', 'cup', 'fork', 'knife', 'spoon', 'bowl', - 'banana', 'apple', 'sandwich', 'orange', 'broccoli', 'carrot', 'hot dog', 'pizza', - 'donut', 'cake', 'chair', 'couch', 'potted plant', 'bed', 'N/A', 'dining table', - 'N/A', 'N/A', 'toilet', 'N/A', 'tv', 'laptop', 'mouse', 'remote', 'keyboard', 'cell phone', - 'microwave', 'oven', 'toaster', 'sink', 'refrigerator', 'N/A', 'book', - 'clock', 'vase', 'scissors', 'teddy bear', 'hair drier', 'toothbrush' - ] +.. code:: python + + + from torchvision.io.image import decode_image + from torchvision.models.detection import fasterrcnn_resnet50_fpn_v2, FasterRCNN_ResNet50_FPN_V2_Weights + from torchvision.utils import draw_bounding_boxes + from torchvision.transforms.functional import to_pil_image + + img = decode_image("test/assets/encode_jpeg/grace_hopper_517x606.jpg") + + # Step 1: Initialize model with the best available weights + weights = FasterRCNN_ResNet50_FPN_V2_Weights.DEFAULT + model = fasterrcnn_resnet50_fpn_v2(weights=weights, box_score_thresh=0.9) + model.eval() + + # Step 2: Initialize the inference transforms + preprocess = weights.transforms() + + # Step 3: Apply inference preprocessing transforms + batch = [preprocess(img)] + # Step 4: Use the model and visualize the prediction + prediction = model(batch)[0] + labels = [weights.meta["categories"][i] for i in prediction["labels"]] + box = draw_bounding_boxes(img, boxes=prediction["boxes"], + labels=labels, + colors="red", + width=4, font_size=30) + im = to_pil_image(box.detach()) + im.show() -Here are the summary of the accuracies for the models trained on -the instances set of COCO train2017 and evaluated on COCO val2017. +The classes of the pre-trained model outputs can be found at ``weights.meta["categories"]``. +For details on how to plot the bounding boxes of the models, you may refer to :ref:`instance_seg_output`. -================================ ======= ======== =========== -Network box AP mask AP keypoint AP -================================ ======= ======== =========== -Faster R-CNN ResNet-50 FPN 37.0 - - -Mask R-CNN ResNet-50 FPN 37.9 34.6 - -================================ ======= ======== =========== +Table of all available Object detection weights +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -For person keypoint detection, the accuracies for the pre-trained -models are as follows +Box MAPs are reported on COCO val2017: -================================ ======= ======== =========== -Network box AP mask AP keypoint AP -================================ ======= ======== =========== -Keypoint R-CNN ResNet-50 FPN 54.6 - 65.0 -================================ ======= ======== =========== +.. include:: generated/detection_table.rst -For person keypoint detection, the pre-trained model return the -keypoints in the following order: - .. code-block:: python +Instance Segmentation +--------------------- - COCO_PERSON_KEYPOINT_NAMES = [ - 'nose', - 'left_eye', - 'right_eye', - 'left_ear', - 'right_ear', - 'left_shoulder', - 'right_shoulder', - 'left_elbow', - 'right_elbow', - 'left_wrist', - 'right_wrist', - 'left_hip', - 'right_hip', - 'left_knee', - 'right_knee', - 'left_ankle', - 'right_ankle' - ] +.. currentmodule:: torchvision.models.detection -Runtime characteristics ------------------------ +The following instance segmentation models are available, with or without pre-trained +weights: -The implementations of the models for object detection, instance segmentation -and keypoint detection are efficient. +.. toctree:: + :maxdepth: 1 -In the following table, we use 8 V100 GPUs, with CUDA 10.0 and CUDNN 7.4 to -report the results. During training, we use a batch size of 2 per GPU, and -during testing a batch size of 1 is used. + models/mask_rcnn -For test time, we report the time for the model evaluation and postprocessing -(including mask pasting in image), but not the time for computing the -precision-recall. +| -============================== =================== ================== =========== -Network train time (s / it) test time (s / it) memory (GB) -============================== =================== ================== =========== -Faster R-CNN ResNet-50 FPN 0.2288 0.0590 5.2 -Mask R-CNN ResNet-50 FPN 0.2728 0.0903 5.4 -Keypoint R-CNN ResNet-50 FPN 0.3789 0.1242 6.8 -============================== =================== ================== =========== +For details on how to plot the masks of the models, you may refer to :ref:`instance_seg_output`. -Faster R-CNN ------------- +Table of all available Instance segmentation weights +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -.. autofunction:: torchvision.models.detection.fasterrcnn_resnet50_fpn +Box and Mask MAPs are reported on COCO val2017: +.. include:: generated/instance_segmentation_table.rst -Mask R-CNN ----------- +Keypoint Detection +------------------ -.. autofunction:: torchvision.models.detection.maskrcnn_resnet50_fpn +.. currentmodule:: torchvision.models.detection +The following person keypoint detection models are available, with or without +pre-trained weights: -Keypoint R-CNN --------------- +.. toctree:: + :maxdepth: 1 -.. autofunction:: torchvision.models.detection.keypointrcnn_resnet50_fpn + models/keypoint_rcnn - -Video classification +| + +The classes of the pre-trained model outputs can be found at ``weights.meta["keypoint_names"]``. +For details on how to plot the bounding boxes of the models, you may refer to :ref:`keypoint_output`. + +Table of all available Keypoint detection weights +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Box and Keypoint MAPs are reported on COCO val2017: + +.. include:: generated/detection_keypoint_table.rst + + +Video Classification ==================== -We provide models for action recognition pre-trained on Kinetics-400. -They have all been trained with the scripts provided in ``references/video_classification``. +.. currentmodule:: torchvision.models.video -All pre-trained models expect input images normalized in the same way, -i.e. mini-batches of 3-channel RGB videos of shape (3 x T x H x W), -where H and W are expected to be 112, and T is a number of video frames in a clip. -The images have to be loaded in to a range of [0, 1] and then normalized -using ``mean = [0.43216, 0.394666, 0.37645]`` and ``std = [0.22803, 0.22145, 0.216989]``. +.. betastatus:: video module +The following video classification models are available, with or without +pre-trained weights: -.. note:: - The normalization parameters are different from the image classification ones, and correspond - to the mean and std from Kinetics-400. +.. toctree:: + :maxdepth: 1 -.. note:: - For now, normalization code can be found in ``references/video_classification/transforms.py``, - see the ``Normalize`` function there. Note that it differs from standard normalization for - images because it assumes the video is 4d. + models/video_mvit + models/video_resnet + models/video_s3d + models/video_swin_transformer + +| + +Here is an example of how to use the pre-trained video classification models: + +.. code:: python + + + from torchvision.io.video import read_video + from torchvision.models.video import r3d_18, R3D_18_Weights + + vid, _, _ = read_video("test/assets/videos/v_SoccerJuggling_g23_c01.avi", output_format="TCHW") + vid = vid[:32] # optionally shorten duration + + # Step 1: Initialize model with the best available weights + weights = R3D_18_Weights.DEFAULT + model = r3d_18(weights=weights) + model.eval() + + # Step 2: Initialize the inference transforms + preprocess = weights.transforms() + + # Step 3: Apply inference preprocessing transforms + batch = preprocess(vid).unsqueeze(0) + + # Step 4: Use the model and print the predicted category + prediction = model(batch).squeeze(0).softmax(0) + label = prediction.argmax().item() + score = prediction[label].item() + category_name = weights.meta["categories"][label] + print(f"{category_name}: {100 * score}%") + +The classes of the pre-trained model outputs can be found at ``weights.meta["categories"]``. -Kinetics 1-crop accuracies for clip length 16 (16x112x112) -================================ ============= ============= -Network Clip acc@1 Clip acc@5 -================================ ============= ============= -ResNet 3D 18 52.75 75.45 -ResNet MC 18 53.90 76.29 -ResNet (2+1)D 57.50 78.81 -================================ ============= ============= +Table of all available video classification weights +--------------------------------------------------- +Accuracies are reported on Kinetics-400 using single crops for clip length 16: -ResNet 3D ----------- +.. include:: generated/video_table.rst -.. autofunction:: torchvision.models.video.r3d_18 +Optical Flow +============ -ResNet Mixed Convolution ------------------------- +.. currentmodule:: torchvision.models.optical_flow -.. autofunction:: torchvision.models.video.mc3_18 +The following Optical Flow models are available, with or without pre-trained -ResNet (2+1)D -------------- +.. toctree:: + :maxdepth: 1 -.. autofunction:: torchvision.models.video.r2plus1d_18 + models/raft diff --git a/docs/source/models/alexnet.rst b/docs/source/models/alexnet.rst new file mode 100644 index 00000000000..8e94b4eeed9 --- /dev/null +++ b/docs/source/models/alexnet.rst @@ -0,0 +1,28 @@ +AlexNet +======= + +.. currentmodule:: torchvision.models + +The AlexNet model was originally introduced in the +`ImageNet Classification with Deep Convolutional Neural Networks +`__ +paper. The implemented architecture is slightly different from the original one, +and is based on `One weird trick for parallelizing convolutional neural networks +`__. + + +Model builders +-------------- + +The following model builders can be used to instantiate an AlexNet model, with or +without pre-trained weights. All the model builders internally rely on the +``torchvision.models.alexnet.AlexNet`` base class. Please refer to the `source +code +`_ for +more details about this class. + +.. autosummary:: + :toctree: generated/ + :template: function.rst + + alexnet diff --git a/docs/source/models/convnext.rst b/docs/source/models/convnext.rst new file mode 100644 index 00000000000..f484bf63d94 --- /dev/null +++ b/docs/source/models/convnext.rst @@ -0,0 +1,26 @@ +ConvNeXt +======== + +.. currentmodule:: torchvision.models + +The ConvNeXt model is based on the `A ConvNet for the 2020s +`_ paper. + + +Model builders +-------------- + +The following model builders can be used to instantiate a ConvNeXt model, with or +without pre-trained weights. All the model builders internally rely on the +``torchvision.models.convnext.ConvNeXt`` base class. Please refer to the `source code +`_ for +more details about this class. + +.. autosummary:: + :toctree: generated/ + :template: function.rst + + convnext_tiny + convnext_small + convnext_base + convnext_large diff --git a/docs/source/models/deeplabv3.rst b/docs/source/models/deeplabv3.rst new file mode 100644 index 00000000000..e6f21686081 --- /dev/null +++ b/docs/source/models/deeplabv3.rst @@ -0,0 +1,28 @@ +DeepLabV3 +========= + +.. currentmodule:: torchvision.models.segmentation + +The DeepLabV3 model is based on the `Rethinking Atrous Convolution for Semantic +Image Segmentation `__ paper. + +.. betastatus:: segmentation module + + +Model builders +-------------- + +The following model builders can be used to instantiate a DeepLabV3 model with +different backbones, with or without pre-trained weights. All the model builders +internally rely on the ``torchvision.models.segmentation.deeplabv3.DeepLabV3`` base class. Please +refer to the `source code +`_ +for more details about this class. + +.. autosummary:: + :toctree: generated/ + :template: function.rst + + deeplabv3_mobilenet_v3_large + deeplabv3_resnet50 + deeplabv3_resnet101 diff --git a/docs/source/models/densenet.rst b/docs/source/models/densenet.rst new file mode 100644 index 00000000000..ee984886925 --- /dev/null +++ b/docs/source/models/densenet.rst @@ -0,0 +1,27 @@ +DenseNet +======== + +.. currentmodule:: torchvision.models + +The DenseNet model is based on the `Densely Connected Convolutional Networks +`_ paper. + + +Model builders +-------------- + +The following model builders can be used to instantiate a DenseNet model, with or +without pre-trained weights. All the model builders internally rely on the +``torchvision.models.densenet.DenseNet`` base class. Please refer to the `source +code +`_ for +more details about this class. + +.. autosummary:: + :toctree: generated/ + :template: function.rst + + densenet121 + densenet161 + densenet169 + densenet201 diff --git a/docs/source/models/efficientnet.rst b/docs/source/models/efficientnet.rst new file mode 100644 index 00000000000..cbc9718959a --- /dev/null +++ b/docs/source/models/efficientnet.rst @@ -0,0 +1,31 @@ +EfficientNet +============ + +.. currentmodule:: torchvision.models + +The EfficientNet model is based on the `EfficientNet: Rethinking Model Scaling for Convolutional Neural Networks `__ +paper. + + +Model builders +-------------- + +The following model builders can be used to instantiate an EfficientNet model, with or +without pre-trained weights. All the model builders internally rely on the +``torchvision.models.efficientnet.EfficientNet`` base class. Please refer to the `source +code +`_ for +more details about this class. + +.. autosummary:: + :toctree: generated/ + :template: function.rst + + efficientnet_b0 + efficientnet_b1 + efficientnet_b2 + efficientnet_b3 + efficientnet_b4 + efficientnet_b5 + efficientnet_b6 + efficientnet_b7 diff --git a/docs/source/models/efficientnetv2.rst b/docs/source/models/efficientnetv2.rst new file mode 100644 index 00000000000..3066c28ebd4 --- /dev/null +++ b/docs/source/models/efficientnetv2.rst @@ -0,0 +1,26 @@ +EfficientNetV2 +============== + +.. currentmodule:: torchvision.models + +The EfficientNetV2 model is based on the `EfficientNetV2: Smaller Models and Faster Training `__ +paper. + + +Model builders +-------------- + +The following model builders can be used to instantiate an EfficientNetV2 model, with or +without pre-trained weights. All the model builders internally rely on the +``torchvision.models.efficientnet.EfficientNet`` base class. Please refer to the `source +code +`_ for +more details about this class. + +.. autosummary:: + :toctree: generated/ + :template: function.rst + + efficientnet_v2_s + efficientnet_v2_m + efficientnet_v2_l diff --git a/docs/source/models/faster_rcnn.rst b/docs/source/models/faster_rcnn.rst new file mode 100644 index 00000000000..19ec9227886 --- /dev/null +++ b/docs/source/models/faster_rcnn.rst @@ -0,0 +1,31 @@ +Faster R-CNN +============ + +.. currentmodule:: torchvision.models.detection + + +The Faster R-CNN model is based on the `Faster R-CNN: Towards Real-Time Object Detection +with Region Proposal Networks `__ +paper. + +.. betastatus:: detection module + +Model builders +-------------- + +The following model builders can be used to instantiate a Faster R-CNN model, with or +without pre-trained weights. All the model builders internally rely on the +``torchvision.models.detection.faster_rcnn.FasterRCNN`` base class. Please refer to the `source +code +`_ for +more details about this class. + +.. autosummary:: + :toctree: generated/ + :template: function.rst + + fasterrcnn_resnet50_fpn + fasterrcnn_resnet50_fpn_v2 + fasterrcnn_mobilenet_v3_large_fpn + fasterrcnn_mobilenet_v3_large_320_fpn + diff --git a/docs/source/models/fcn.rst b/docs/source/models/fcn.rst new file mode 100644 index 00000000000..efcdb37c0d5 --- /dev/null +++ b/docs/source/models/fcn.rst @@ -0,0 +1,28 @@ +FCN +=== + +.. currentmodule:: torchvision.models.segmentation + +The FCN model is based on the `Fully Convolutional Networks for Semantic +Segmentation `__ +paper. + +.. betastatus:: segmentation module + + +Model builders +-------------- + +The following model builders can be used to instantiate a FCN model, with or +without pre-trained weights. All the model builders internally rely on the +``torchvision.models.segmentation.FCN`` base class. Please refer to the `source +code +`_ for +more details about this class. + +.. autosummary:: + :toctree: generated/ + :template: function.rst + + fcn_resnet50 + fcn_resnet101 diff --git a/docs/source/models/fcos.rst b/docs/source/models/fcos.rst new file mode 100644 index 00000000000..085f26549b8 --- /dev/null +++ b/docs/source/models/fcos.rst @@ -0,0 +1,24 @@ +FCOS +========= + +.. currentmodule:: torchvision.models.detection + +The FCOS model is based on the `FCOS: Fully Convolutional One-Stage Object Detection +`__ paper. + +.. betastatus:: detection module + +Model builders +-------------- + +The following model builders can be used to instantiate a FCOS model, with or +without pre-trained weights. All the model builders internally rely on the +``torchvision.models.detection.fcos.FCOS`` base class. Please refer to the `source code +`_ for +more details about this class. + +.. autosummary:: + :toctree: generated/ + :template: function.rst + + fcos_resnet50_fpn diff --git a/docs/source/models/googlenet.rst b/docs/source/models/googlenet.rst new file mode 100644 index 00000000000..91ea03ddf3d --- /dev/null +++ b/docs/source/models/googlenet.rst @@ -0,0 +1,24 @@ +GoogLeNet +========= + +.. currentmodule:: torchvision.models + +The GoogleNet model is based on the `Going Deeper with Convolutions `__ +paper. + + +Model builders +-------------- + +The following model builders can be used to instantiate a GoogLeNet model, with or +without pre-trained weights. All the model builders internally rely on the +``torchvision.models.googlenet.GoogLeNet`` base class. Please refer to the `source +code +`_ for +more details about this class. + +.. autosummary:: + :toctree: generated/ + :template: function.rst + + googlenet diff --git a/docs/source/models/googlenet_quant.rst b/docs/source/models/googlenet_quant.rst new file mode 100644 index 00000000000..4358389b3e5 --- /dev/null +++ b/docs/source/models/googlenet_quant.rst @@ -0,0 +1,24 @@ +Quantized GoogLeNet +=================== + +.. currentmodule:: torchvision.models.quantization + +The Quantized GoogleNet model is based on the `Going Deeper with Convolutions `__ +paper. + + +Model builders +-------------- + +The following model builders can be used to instantiate a quantized GoogLeNet +model, with or without pre-trained weights. All the model builders internally +rely on the ``torchvision.models.quantization.googlenet.QuantizableGoogLeNet`` +base class. Please refer to the `source code +`_ +for more details about this class. + +.. autosummary:: + :toctree: generated/ + :template: function.rst + + googlenet diff --git a/docs/source/models/inception.rst b/docs/source/models/inception.rst new file mode 100644 index 00000000000..e162eef5d30 --- /dev/null +++ b/docs/source/models/inception.rst @@ -0,0 +1,23 @@ +Inception V3 +============ + +.. currentmodule:: torchvision.models + +The InceptionV3 model is based on the `Rethinking the Inception Architecture for +Computer Vision `__ paper. + + +Model builders +-------------- + +The following model builders can be used to instantiate an InceptionV3 model, with or +without pre-trained weights. All the model builders internally rely on the +``torchvision.models.inception.Inception3`` base class. Please refer to the `source +code `_ for +more details about this class. + +.. autosummary:: + :toctree: generated/ + :template: function.rst + + inception_v3 diff --git a/docs/source/models/inception_quant.rst b/docs/source/models/inception_quant.rst new file mode 100644 index 00000000000..d26f1ab09da --- /dev/null +++ b/docs/source/models/inception_quant.rst @@ -0,0 +1,24 @@ +Quantized InceptionV3 +===================== + +.. currentmodule:: torchvision.models.quantization + +The Quantized Inception model is based on the `Rethinking the Inception Architecture for +Computer Vision `__ paper. + + +Model builders +-------------- + +The following model builders can be used to instantiate a quantized Inception +model, with or without pre-trained weights. All the model builders internally +rely on the ``torchvision.models.quantization.inception.QuantizableInception3`` +base class. Please refer to the `source code +`_ +for more details about this class. + +.. autosummary:: + :toctree: generated/ + :template: function.rst + + inception_v3 diff --git a/docs/source/models/keypoint_rcnn.rst b/docs/source/models/keypoint_rcnn.rst new file mode 100644 index 00000000000..ba677c7f8f3 --- /dev/null +++ b/docs/source/models/keypoint_rcnn.rst @@ -0,0 +1,26 @@ +Keypoint R-CNN +============== + +.. currentmodule:: torchvision.models.detection + +The Keypoint R-CNN model is based on the `Mask R-CNN +`__ paper. + +.. betastatus:: detection module + + +Model builders +-------------- + +The following model builders can be used to instantiate a Keypoint R-CNN model, +with or without pre-trained weights. All the model builders internally rely on +the ``torchvision.models.detection.KeypointRCNN`` base class. Please refer to the `source +code +`__ +for more details about this class. + +.. autosummary:: + :toctree: generated/ + :template: function.rst + + keypointrcnn_resnet50_fpn diff --git a/docs/source/models/lraspp.rst b/docs/source/models/lraspp.rst new file mode 100644 index 00000000000..312249c53e1 --- /dev/null +++ b/docs/source/models/lraspp.rst @@ -0,0 +1,24 @@ +LRASPP +====== + +.. currentmodule:: torchvision.models.segmentation + +The LRASPP model is based on the `Searching for MobileNetV3 `_ paper. + +.. betastatus:: segmentation module + +Model builders +-------------- + +The following model builders can be used to instantiate a FCN model, with or +without pre-trained weights. All the model builders internally rely on the +``torchvision.models.segmentation.LRASPP`` base class. Please refer to the `source +code +`_ for +more details about this class. + +.. autosummary:: + :toctree: generated/ + :template: function.rst + + lraspp_mobilenet_v3_large diff --git a/docs/source/models/mask_rcnn.rst b/docs/source/models/mask_rcnn.rst new file mode 100644 index 00000000000..5887b6c71a6 --- /dev/null +++ b/docs/source/models/mask_rcnn.rst @@ -0,0 +1,27 @@ +Mask R-CNN +========== + +.. currentmodule:: torchvision.models.detection + +The Mask R-CNN model is based on the `Mask R-CNN `__ +paper. + +.. betastatus:: detection module + + +Model builders +-------------- + +The following model builders can be used to instantiate a Mask R-CNN model, with or +without pre-trained weights. All the model builders internally rely on the +``torchvision.models.detection.mask_rcnn.MaskRCNN`` base class. Please refer to the `source +code +`_ for +more details about this class. + +.. autosummary:: + :toctree: generated/ + :template: function.rst + + maskrcnn_resnet50_fpn + maskrcnn_resnet50_fpn_v2 diff --git a/docs/source/models/maxvit.rst b/docs/source/models/maxvit.rst new file mode 100644 index 00000000000..29aaaaab334 --- /dev/null +++ b/docs/source/models/maxvit.rst @@ -0,0 +1,23 @@ +MaxVit +=============== + +.. currentmodule:: torchvision.models + +The MaxVit transformer models are based on the `MaxViT: Multi-Axis Vision Transformer `__ +paper. + + +Model builders +-------------- + +The following model builders can be used to instantiate an MaxVit model with and without pre-trained weights. +All the model builders internally rely on the ``torchvision.models.maxvit.MaxVit`` +base class. Please refer to the `source code +`_ for +more details about this class. + +.. autosummary:: + :toctree: generated/ + :template: function.rst + + maxvit_t diff --git a/docs/source/models/mnasnet.rst b/docs/source/models/mnasnet.rst new file mode 100644 index 00000000000..fd9ea511585 --- /dev/null +++ b/docs/source/models/mnasnet.rst @@ -0,0 +1,28 @@ +MNASNet +======= + +.. currentmodule:: torchvision.models + + +The MNASNet model is based on the `MnasNet: Platform-Aware Neural Architecture +Search for Mobile `__ paper. + + +Model builders +-------------- + +The following model builders can be used to instantiate an MNASNet model. +All the model builders internally rely on the +``torchvision.models.mnasnet.MNASNet`` base class. Please refer to the `source +code +`_ for +more details about this class. + +.. autosummary:: + :toctree: generated/ + :template: function.rst + + mnasnet0_5 + mnasnet0_75 + mnasnet1_0 + mnasnet1_3 diff --git a/docs/source/models/mobilenetv2.rst b/docs/source/models/mobilenetv2.rst new file mode 100644 index 00000000000..666dcce57ce --- /dev/null +++ b/docs/source/models/mobilenetv2.rst @@ -0,0 +1,24 @@ +MobileNet V2 +============ + +.. currentmodule:: torchvision.models + +The MobileNet V2 model is based on the `MobileNetV2: Inverted Residuals and Linear +Bottlenecks `__ paper. + + +Model builders +-------------- + +The following model builders can be used to instantiate a MobileNetV2 model, with or +without pre-trained weights. All the model builders internally rely on the +``torchvision.models.mobilenetv2.MobileNetV2`` base class. Please refer to the `source +code +`_ for +more details about this class. + +.. autosummary:: + :toctree: generated/ + :template: function.rst + + mobilenet_v2 diff --git a/docs/source/models/mobilenetv2_quant.rst b/docs/source/models/mobilenetv2_quant.rst new file mode 100644 index 00000000000..e5397378fab --- /dev/null +++ b/docs/source/models/mobilenetv2_quant.rst @@ -0,0 +1,24 @@ +Quantized MobileNet V2 +====================== + +.. currentmodule:: torchvision.models.quantization + +The Quantized MobileNet V2 model is based on the `MobileNetV2: Inverted Residuals and Linear +Bottlenecks `__ paper. + + +Model builders +-------------- + +The following model builders can be used to instantiate a quantized MobileNetV2 +model, with or without pre-trained weights. All the model builders internally +rely on the ``torchvision.models.quantization.mobilenetv2.QuantizableMobileNetV2`` +base class. Please refer to the `source code +`_ +for more details about this class. + +.. autosummary:: + :toctree: generated/ + :template: function.rst + + mobilenet_v2 diff --git a/docs/source/models/mobilenetv3.rst b/docs/source/models/mobilenetv3.rst new file mode 100644 index 00000000000..4322470286d --- /dev/null +++ b/docs/source/models/mobilenetv3.rst @@ -0,0 +1,24 @@ +MobileNet V3 +============ + +.. currentmodule:: torchvision.models + +The MobileNet V3 model is based on the `Searching for MobileNetV3 `__ paper. + + +Model builders +-------------- + +The following model builders can be used to instantiate a MobileNetV3 model, with or +without pre-trained weights. All the model builders internally rely on the +``torchvision.models.mobilenetv3.MobileNetV3`` base class. Please refer to the `source +code +`_ for +more details about this class. + +.. autosummary:: + :toctree: generated/ + :template: function.rst + + mobilenet_v3_large + mobilenet_v3_small diff --git a/docs/source/models/mobilenetv3_quant.rst b/docs/source/models/mobilenetv3_quant.rst new file mode 100644 index 00000000000..fe385b493e5 --- /dev/null +++ b/docs/source/models/mobilenetv3_quant.rst @@ -0,0 +1,23 @@ +Quantized MobileNet V3 +====================== + +.. currentmodule:: torchvision.models.quantization + +The Quantized MobileNet V3 model is based on the `Searching for MobileNetV3 `__ paper. + + +Model builders +-------------- + +The following model builders can be used to instantiate a quantized MobileNetV3 +model, with or without pre-trained weights. All the model builders internally +rely on the ``torchvision.models.quantization.mobilenetv3.QuantizableMobileNetV3`` +base class. Please refer to the `source code +`_ +for more details about this class. + +.. autosummary:: + :toctree: generated/ + :template: function.rst + + mobilenet_v3_large diff --git a/docs/source/models/raft.rst b/docs/source/models/raft.rst new file mode 100644 index 00000000000..7ea477698b4 --- /dev/null +++ b/docs/source/models/raft.rst @@ -0,0 +1,25 @@ +RAFT +==== + +.. currentmodule:: torchvision.models.optical_flow + +The RAFT model is based on the `RAFT: Recurrent All-Pairs Field Transforms for +Optical Flow `__ paper. + + +Model builders +-------------- + +The following model builders can be used to instantiate a RAFT model, with or +without pre-trained weights. All the model builders internally rely on the +``torchvision.models.optical_flow.RAFT`` base class. Please refer to the `source +code +`_ for +more details about this class. + +.. autosummary:: + :toctree: generated/ + :template: function.rst + + raft_large + raft_small diff --git a/docs/source/models/regnet.rst b/docs/source/models/regnet.rst new file mode 100644 index 00000000000..aef4abd2544 --- /dev/null +++ b/docs/source/models/regnet.rst @@ -0,0 +1,37 @@ +RegNet +====== + +.. currentmodule:: torchvision.models + +The RegNet model is based on the `Designing Network Design Spaces +`_ paper. + + +Model builders +-------------- + +The following model builders can be used to instantiate a RegNet model, with or +without pre-trained weights. All the model builders internally rely on the +``torchvision.models.regnet.RegNet`` base class. Please refer to the `source code +`_ for +more details about this class. + +.. autosummary:: + :toctree: generated/ + :template: function.rst + + regnet_y_400mf + regnet_y_800mf + regnet_y_1_6gf + regnet_y_3_2gf + regnet_y_8gf + regnet_y_16gf + regnet_y_32gf + regnet_y_128gf + regnet_x_400mf + regnet_x_800mf + regnet_x_1_6gf + regnet_x_3_2gf + regnet_x_8gf + regnet_x_16gf + regnet_x_32gf diff --git a/docs/source/models/resnet.rst b/docs/source/models/resnet.rst new file mode 100644 index 00000000000..9d777f2f6b1 --- /dev/null +++ b/docs/source/models/resnet.rst @@ -0,0 +1,33 @@ +ResNet +====== + +.. currentmodule:: torchvision.models + +The ResNet model is based on the `Deep Residual Learning for Image Recognition +`_ paper. + +.. note:: + The bottleneck of TorchVision places the stride for downsampling to the second 3x3 + convolution while the original paper places it to the first 1x1 convolution. + This variant improves the accuracy and is known as `ResNet V1.5 + `_. + +Model builders +-------------- + +The following model builders can be used to instantiate a ResNet model, with or +without pre-trained weights. All the model builders internally rely on the +``torchvision.models.resnet.ResNet`` base class. Please refer to the `source +code +`_ for +more details about this class. + +.. autosummary:: + :toctree: generated/ + :template: function.rst + + resnet18 + resnet34 + resnet50 + resnet101 + resnet152 diff --git a/docs/source/models/resnet_quant.rst b/docs/source/models/resnet_quant.rst new file mode 100644 index 00000000000..5609990646c --- /dev/null +++ b/docs/source/models/resnet_quant.rst @@ -0,0 +1,25 @@ +Quantized ResNet +================ + +.. currentmodule:: torchvision.models.quantization + +The Quantized ResNet model is based on the `Deep Residual Learning for Image Recognition +`_ paper. + + +Model builders +-------------- + +The following model builders can be used to instantiate a quantized ResNet +model, with or without pre-trained weights. All the model builders internally +rely on the ``torchvision.models.quantization.resnet.QuantizableResNet`` +base class. Please refer to the `source code +`_ +for more details about this class. + +.. autosummary:: + :toctree: generated/ + :template: function.rst + + resnet18 + resnet50 diff --git a/docs/source/models/resnext.rst b/docs/source/models/resnext.rst new file mode 100644 index 00000000000..5d8325d9b4b --- /dev/null +++ b/docs/source/models/resnext.rst @@ -0,0 +1,26 @@ +ResNeXt +======= + +.. currentmodule:: torchvision.models + +The ResNext model is based on the `Aggregated Residual Transformations for Deep Neural Networks `__ +paper. + + +Model builders +-------------- + +The following model builders can be used to instantiate a ResNext model, with or +without pre-trained weights. All the model builders internally rely on the +``torchvision.models.resnet.ResNet`` base class. Please refer to the `source +code +`_ for +more details about this class. + +.. autosummary:: + :toctree: generated/ + :template: function.rst + + resnext50_32x4d + resnext101_32x8d + resnext101_64x4d diff --git a/docs/source/models/resnext_quant.rst b/docs/source/models/resnext_quant.rst new file mode 100644 index 00000000000..916b9e4a39a --- /dev/null +++ b/docs/source/models/resnext_quant.rst @@ -0,0 +1,25 @@ +Quantized ResNeXt +================= + +.. currentmodule:: torchvision.models.quantization + +The quantized ResNext model is based on the `Aggregated Residual Transformations for Deep Neural Networks `__ +paper. + + +Model builders +-------------- + +The following model builders can be used to instantiate a quantized ResNeXt +model, with or without pre-trained weights. All the model builders internally +rely on the ``torchvision.models.quantization.resnet.QuantizableResNet`` +base class. Please refer to the `source code +`_ +for more details about this class. + +.. autosummary:: + :toctree: generated/ + :template: function.rst + + resnext101_32x8d + resnext101_64x4d diff --git a/docs/source/models/retinanet.rst b/docs/source/models/retinanet.rst new file mode 100644 index 00000000000..910692ef3a5 --- /dev/null +++ b/docs/source/models/retinanet.rst @@ -0,0 +1,25 @@ +RetinaNet +========= + +.. currentmodule:: torchvision.models.detection + +The RetinaNet model is based on the `Focal Loss for Dense Object Detection +`__ paper. + +.. betastatus:: detection module + +Model builders +-------------- + +The following model builders can be used to instantiate a RetinaNet model, with or +without pre-trained weights. All the model builders internally rely on the +``torchvision.models.detection.retinanet.RetinaNet`` base class. Please refer to the `source code +`_ for +more details about this class. + +.. autosummary:: + :toctree: generated/ + :template: function.rst + + retinanet_resnet50_fpn + retinanet_resnet50_fpn_v2 diff --git a/docs/source/models/shufflenetv2.rst b/docs/source/models/shufflenetv2.rst new file mode 100644 index 00000000000..2cbe328ca8b --- /dev/null +++ b/docs/source/models/shufflenetv2.rst @@ -0,0 +1,27 @@ +ShuffleNet V2 +============= + +.. currentmodule:: torchvision.models + +The ShuffleNet V2 model is based on the `ShuffleNet V2: Practical Guidelines for Efficient +CNN Architecture Design `__ paper. + + +Model builders +-------------- + +The following model builders can be used to instantiate a ShuffleNetV2 model, with or +without pre-trained weights. All the model builders internally rely on the +``torchvision.models.shufflenetv2.ShuffleNetV2`` base class. Please refer to the `source +code +`_ for +more details about this class. + +.. autosummary:: + :toctree: generated/ + :template: function.rst + + shufflenet_v2_x0_5 + shufflenet_v2_x1_0 + shufflenet_v2_x1_5 + shufflenet_v2_x2_0 diff --git a/docs/source/models/shufflenetv2_quant.rst b/docs/source/models/shufflenetv2_quant.rst new file mode 100644 index 00000000000..4fa236d2565 --- /dev/null +++ b/docs/source/models/shufflenetv2_quant.rst @@ -0,0 +1,27 @@ +Quantized ShuffleNet V2 +======================= + +.. currentmodule:: torchvision.models.quantization + +The Quantized ShuffleNet V2 model is based on the `ShuffleNet V2: Practical Guidelines for Efficient +CNN Architecture Design `__ paper. + + +Model builders +-------------- + +The following model builders can be used to instantiate a quantized ShuffleNetV2 +model, with or without pre-trained weights. All the model builders internally rely +on the ``torchvision.models.quantization.shufflenetv2.QuantizableShuffleNetV2`` +base class. Please refer to the `source code +`_ +for more details about this class. + +.. autosummary:: + :toctree: generated/ + :template: function.rst + + shufflenet_v2_x0_5 + shufflenet_v2_x1_0 + shufflenet_v2_x1_5 + shufflenet_v2_x2_0 diff --git a/docs/source/models/squeezenet.rst b/docs/source/models/squeezenet.rst new file mode 100644 index 00000000000..9771e5c623a --- /dev/null +++ b/docs/source/models/squeezenet.rst @@ -0,0 +1,26 @@ +SqueezeNet +========== + +.. currentmodule:: torchvision.models + +The SqueezeNet model is based on the `SqueezeNet: AlexNet-level accuracy with +50x fewer parameters and <0.5MB model size `__ +paper. + + +Model builders +-------------- + +The following model builders can be used to instantiate a SqueezeNet model, with or +without pre-trained weights. All the model builders internally rely on the +``torchvision.models.squeezenet.SqueezeNet`` base class. Please refer to the `source +code +`_ for +more details about this class. + +.. autosummary:: + :toctree: generated/ + :template: function.rst + + squeezenet1_0 + squeezenet1_1 diff --git a/docs/source/models/ssd.rst b/docs/source/models/ssd.rst new file mode 100644 index 00000000000..68b0bb224df --- /dev/null +++ b/docs/source/models/ssd.rst @@ -0,0 +1,26 @@ +SSD +=== + +.. currentmodule:: torchvision.models.detection + +The SSD model is based on the `SSD: Single Shot MultiBox Detector +`__ paper. + +.. betastatus:: detection module + + +Model builders +-------------- + +The following model builders can be used to instantiate a SSD model, with or +without pre-trained weights. All the model builders internally rely on the +``torchvision.models.detection.SSD`` base class. Please refer to the `source +code +`_ for +more details about this class. + +.. autosummary:: + :toctree: generated/ + :template: function.rst + + ssd300_vgg16 diff --git a/docs/source/models/ssdlite.rst b/docs/source/models/ssdlite.rst new file mode 100644 index 00000000000..7701d1c9f9f --- /dev/null +++ b/docs/source/models/ssdlite.rst @@ -0,0 +1,27 @@ +SSDlite +======= + +.. currentmodule:: torchvision.models.detection + +The SSDLite model is based on the `SSD: Single Shot MultiBox Detector +`__, `Searching for MobileNetV3 +`__ and `MobileNetV2: Inverted Residuals and Linear +Bottlenecks `__ papers. + +.. betastatus:: detection module + +Model builders +-------------- + +The following model builders can be used to instantiate a SSD Lite model, with or +without pre-trained weights. All the model builders internally rely on the +``torchvision.models.detection.ssd.SSD`` base class. Please refer to the `source +code +`_ for +more details about this class. + +.. autosummary:: + :toctree: generated/ + :template: function.rst + + ssdlite320_mobilenet_v3_large diff --git a/docs/source/models/swin_transformer.rst b/docs/source/models/swin_transformer.rst new file mode 100644 index 00000000000..b302f5bd79d --- /dev/null +++ b/docs/source/models/swin_transformer.rst @@ -0,0 +1,32 @@ +SwinTransformer +=============== + +.. currentmodule:: torchvision.models + +The SwinTransformer models are based on the `Swin Transformer: Hierarchical Vision +Transformer using Shifted Windows `__ +paper. +SwinTransformer V2 models are based on the `Swin Transformer V2: Scaling Up Capacity +and Resolution `__ +paper. + + +Model builders +-------------- + +The following model builders can be used to instantiate an SwinTransformer model (original and V2) with and without pre-trained weights. +All the model builders internally rely on the ``torchvision.models.swin_transformer.SwinTransformer`` +base class. Please refer to the `source code +`_ for +more details about this class. + +.. autosummary:: + :toctree: generated/ + :template: function.rst + + swin_t + swin_s + swin_b + swin_v2_t + swin_v2_s + swin_v2_b diff --git a/docs/source/models/vgg.rst b/docs/source/models/vgg.rst new file mode 100644 index 00000000000..77b5686927c --- /dev/null +++ b/docs/source/models/vgg.rst @@ -0,0 +1,30 @@ +VGG +=== + +.. currentmodule:: torchvision.models + +The VGG model is based on the `Very Deep Convolutional Networks for Large-Scale +Image Recognition `_ paper. + + +Model builders +-------------- + +The following model builders can be used to instantiate a VGG model, with or +without pre-trained weights. All the model builders internally rely on the +``torchvision.models.vgg.VGG`` base class. Please refer to the `source code +`_ for +more details about this class. + +.. autosummary:: + :toctree: generated/ + :template: function.rst + + vgg11 + vgg11_bn + vgg13 + vgg13_bn + vgg16 + vgg16_bn + vgg19 + vgg19_bn diff --git a/docs/source/models/video_mvit.rst b/docs/source/models/video_mvit.rst new file mode 100644 index 00000000000..cd23754b7bb --- /dev/null +++ b/docs/source/models/video_mvit.rst @@ -0,0 +1,27 @@ +Video MViT +========== + +.. currentmodule:: torchvision.models.video + +The MViT model is based on the +`MViTv2: Improved Multiscale Vision Transformers for Classification and Detection +`__ and `Multiscale Vision Transformers +`__ papers. + + +Model builders +-------------- + +The following model builders can be used to instantiate a MViT v1 or v2 model, with or +without pre-trained weights. All the model builders internally rely on the +``torchvision.models.video.MViT`` base class. Please refer to the `source +code +`_ for +more details about this class. + +.. autosummary:: + :toctree: generated/ + :template: function.rst + + mvit_v1_b + mvit_v2_s diff --git a/docs/source/models/video_resnet.rst b/docs/source/models/video_resnet.rst new file mode 100644 index 00000000000..ecb707b4eeb --- /dev/null +++ b/docs/source/models/video_resnet.rst @@ -0,0 +1,28 @@ +Video ResNet +============ + +.. currentmodule:: torchvision.models.video + +The VideoResNet model is based on the `A Closer Look at Spatiotemporal +Convolutions for Action Recognition `__ paper. + +.. betastatus:: video module + + +Model builders +-------------- + +The following model builders can be used to instantiate a VideoResNet model, with or +without pre-trained weights. All the model builders internally rely on the +``torchvision.models.video.resnet.VideoResNet`` base class. Please refer to the `source +code +`_ for +more details about this class. + +.. autosummary:: + :toctree: generated/ + :template: function.rst + + r3d_18 + mc3_18 + r2plus1d_18 diff --git a/docs/source/models/video_s3d.rst b/docs/source/models/video_s3d.rst new file mode 100644 index 00000000000..0d66c55487c --- /dev/null +++ b/docs/source/models/video_s3d.rst @@ -0,0 +1,25 @@ +Video S3D +========= + +.. currentmodule:: torchvision.models.video + +The S3D model is based on the +`Rethinking Spatiotemporal Feature Learning: Speed-Accuracy Trade-offs in Video Classification +`__ paper. + + +Model builders +-------------- + +The following model builders can be used to instantiate an S3D model, with or +without pre-trained weights. All the model builders internally rely on the +``torchvision.models.video.S3D`` base class. Please refer to the `source +code +`_ for +more details about this class. + +.. autosummary:: + :toctree: generated/ + :template: function.rst + + s3d diff --git a/docs/source/models/video_swin_transformer.rst b/docs/source/models/video_swin_transformer.rst new file mode 100644 index 00000000000..e31e69759b4 --- /dev/null +++ b/docs/source/models/video_swin_transformer.rst @@ -0,0 +1,27 @@ +Video SwinTransformer +===================== + +.. currentmodule:: torchvision.models.video + +The Video SwinTransformer model is based on the `Video Swin Transformer `__ paper. + +.. betastatus:: video module + + +Model builders +-------------- + +The following model builders can be used to instantiate a VideoResNet model, with or +without pre-trained weights. All the model builders internally rely on the +``torchvision.models.video.swin_transformer.SwinTransformer3d`` base class. Please refer to the `source +code +`_ for +more details about this class. + +.. autosummary:: + :toctree: generated/ + :template: function.rst + + swin3d_t + swin3d_s + swin3d_b diff --git a/docs/source/models/vision_transformer.rst b/docs/source/models/vision_transformer.rst new file mode 100644 index 00000000000..914caa9311e --- /dev/null +++ b/docs/source/models/vision_transformer.rst @@ -0,0 +1,28 @@ +VisionTransformer +================= + +.. currentmodule:: torchvision.models + +The VisionTransformer model is based on the `An Image is Worth 16x16 Words: +Transformers for Image Recognition at Scale `_ paper. + + +Model builders +-------------- + +The following model builders can be used to instantiate a VisionTransformer model, with or +without pre-trained weights. All the model builders internally rely on the +``torchvision.models.vision_transformer.VisionTransformer`` base class. +Please refer to the `source code +`_ for +more details about this class. + +.. autosummary:: + :toctree: generated/ + :template: function.rst + + vit_b_16 + vit_b_32 + vit_l_16 + vit_l_32 + vit_h_14 diff --git a/docs/source/models/wide_resnet.rst b/docs/source/models/wide_resnet.rst new file mode 100644 index 00000000000..9768355c77e --- /dev/null +++ b/docs/source/models/wide_resnet.rst @@ -0,0 +1,25 @@ +Wide ResNet +=========== + +.. currentmodule:: torchvision.models + +The Wide ResNet model is based on the `Wide Residual Networks `__ +paper. + + +Model builders +-------------- + +The following model builders can be used to instantiate a Wide ResNet model, with or +without pre-trained weights. All the model builders internally rely on the +``torchvision.models.resnet.ResNet`` base class. Please refer to the `source +code +`_ for +more details about this class. + +.. autosummary:: + :toctree: generated/ + :template: function.rst + + wide_resnet50_2 + wide_resnet101_2 diff --git a/docs/source/ops.rst b/docs/source/ops.rst index ec87d02556e..7124c85bb79 100644 --- a/docs/source/ops.rst +++ b/docs/source/ops.rst @@ -1,17 +1,103 @@ -torchvision.ops -=============== +.. _ops: + +Operators +========= .. currentmodule:: torchvision.ops -:mod:`torchvision.ops` implements operators that are specific for Computer Vision. +:mod:`torchvision.ops` implements operators, losses and layers that are specific for Computer Vision. .. note:: - Those operators currently do not support TorchScript. + All operators have native support for TorchScript. + + +Detection and Segmentation Operators +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +The below operators perform pre-processing as well as post-processing required in object detection and segmentation models. + +.. autosummary:: + :toctree: generated/ + :template: function.rst + + batched_nms + masks_to_boxes + nms + roi_align + roi_pool + ps_roi_align + ps_roi_pool + +.. autosummary:: + :toctree: generated/ + :template: class.rst + + FeaturePyramidNetwork + MultiScaleRoIAlign + RoIAlign + RoIPool + PSRoIAlign + PSRoIPool + + +Box Operators +~~~~~~~~~~~~~ + +These utility functions perform various operations on bounding boxes. + +.. autosummary:: + :toctree: generated/ + :template: function.rst + + box_area + box_convert + box_iou + clip_boxes_to_image + complete_box_iou + distance_box_iou + generalized_box_iou + remove_small_boxes + +Losses +~~~~~~ + +The following vision-specific loss functions are implemented: + +.. autosummary:: + :toctree: generated/ + :template: function.rst + + complete_box_iou_loss + distance_box_iou_loss + generalized_box_iou_loss + sigmoid_focal_loss + + +Layers +~~~~~~ + +TorchVision provides commonly used building blocks as layers: + +.. autosummary:: + :toctree: generated/ + :template: class.rst + Conv2dNormActivation + Conv3dNormActivation + DeformConv2d + DropBlock2d + DropBlock3d + FrozenBatchNorm2d + MLP + Permute + SqueezeExcitation + StochasticDepth -.. autofunction:: nms -.. autofunction:: roi_align -.. autofunction:: roi_pool +.. autosummary:: + :toctree: generated/ + :template: function.rst -.. autoclass:: RoIAlign -.. autoclass:: RoIPool + deform_conv2d + drop_block2d + drop_block3d + stochastic_depth diff --git a/docs/source/training_references.rst b/docs/source/training_references.rst new file mode 100644 index 00000000000..fc22ac5eba6 --- /dev/null +++ b/docs/source/training_references.rst @@ -0,0 +1,29 @@ +Training references +=================== + +On top of the many models, datasets, and image transforms, Torchvision also +provides training reference scripts. These are the scripts that we use to train +the :ref:`models ` which are then available with pre-trained weights. + +These scripts are not part of the core package and are instead available `on +GitHub `_. We currently +provide references for +`classification `_, +`detection `_, +`segmentation `_, +`similarity learning `_, +and `video classification `_. + +While these scripts are largely stable, they do not offer backward compatibility +guarantees. + +In general, these scripts rely on the latest (not yet released) pytorch version +or the latest torchvision version. This means that to use them, **you might need +to install the latest pytorch and torchvision versions**, with e.g.:: + + conda install pytorch torchvision -c pytorch-nightly + +If you need to rely on an older stable version of pytorch or torchvision, e.g. +torchvision 0.10, then it's safer to use the scripts from that corresponding +release on GitHub, namely +https://github.com/pytorch/vision/tree/v0.10.0/references. diff --git a/docs/source/transforms.rst b/docs/source/transforms.rst index 2e0c6cefb8d..d2fed552c4f 100644 --- a/docs/source/transforms.rst +++ b/docs/source/transforms.rst @@ -1,132 +1,676 @@ -torchvision.transforms -====================== +.. _transforms: + +Transforming and augmenting images +================================== .. currentmodule:: torchvision.transforms -Transforms are common image transformations. They can be chained together using :class:`Compose`. -Additionally, there is the :mod:`torchvision.transforms.functional` module. -Functional transforms give fine-grained control over the transformations. -This is useful if you have to build a more complex transformation pipeline -(e.g. in the case of segmentation tasks). +Torchvision supports common computer vision transformations in the +``torchvision.transforms`` and ``torchvision.transforms.v2`` modules. Transforms +can be used to transform or augment data for training or inference of different +tasks (image classification, detection, segmentation, video classification). -.. autoclass:: Compose +.. code:: python -Transforms on PIL Image ------------------------ + # Image Classification + import torch + from torchvision.transforms import v2 -.. autoclass:: CenterCrop + H, W = 32, 32 + img = torch.randint(0, 256, size=(3, H, W), dtype=torch.uint8) -.. autoclass:: ColorJitter + transforms = v2.Compose([ + v2.RandomResizedCrop(size=(224, 224), antialias=True), + v2.RandomHorizontalFlip(p=0.5), + v2.ToDtype(torch.float32, scale=True), + v2.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]), + ]) + img = transforms(img) -.. autoclass:: FiveCrop +.. code:: python -.. autoclass:: Grayscale + # Detection (re-using imports and transforms from above) + from torchvision import tv_tensors + + img = torch.randint(0, 256, size=(3, H, W), dtype=torch.uint8) + boxes = torch.randint(0, H // 2, size=(3, 4)) + boxes[:, 2:] += boxes[:, :2] + boxes = tv_tensors.BoundingBoxes(boxes, format="XYXY", canvas_size=(H, W)) + + # The same transforms can be used! + img, boxes = transforms(img, boxes) + # And you can pass arbitrary input structures + output_dict = transforms({"image": img, "boxes": boxes}) + +Transforms are typically passed as the ``transform`` or ``transforms`` argument +to the :ref:`Datasets `. -.. autoclass:: Pad +Start here +---------- -.. autoclass:: RandomAffine +Whether you're new to Torchvision transforms, or you're already experienced with +them, we encourage you to start with +:ref:`sphx_glr_auto_examples_transforms_plot_transforms_getting_started.py` in +order to learn more about what can be done with the new v2 transforms. -.. autoclass:: RandomApply +Then, browse the sections in below this page for general information and +performance tips. The available transforms and functionals are listed in the +:ref:`API reference `. -.. autoclass:: RandomChoice +More information and tutorials can also be found in our :ref:`example gallery +`, e.g. :ref:`sphx_glr_auto_examples_transforms_plot_transforms_e2e.py` +or :ref:`sphx_glr_auto_examples_transforms_plot_custom_transforms.py`. -.. autoclass:: RandomCrop +.. _conventions: -.. autoclass:: RandomGrayscale +Supported input types and conventions +------------------------------------- -.. autoclass:: RandomHorizontalFlip +Most transformations accept both `PIL `_ images +and tensor inputs. Both CPU and CUDA tensors are supported. +The result of both backends (PIL or Tensors) should be very +close. In general, we recommend relying on the tensor backend :ref:`for +performance `. The :ref:`conversion transforms +` may be used to convert to and from PIL images, or for +converting dtypes and ranges. -.. autoclass:: RandomOrder +Tensor image are expected to be of shape ``(C, H, W)``, where ``C`` is the +number of channels, and ``H`` and ``W`` refer to height and width. Most +transforms support batched tensor input. A batch of Tensor images is a tensor of +shape ``(N, C, H, W)``, where ``N`` is a number of images in the batch. The +:ref:`v2 ` transforms generally accept an arbitrary number of leading +dimensions ``(..., C, H, W)`` and can handle batched images or batched videos. -.. autoclass:: RandomPerspective +.. _range_and_dtype: -.. autoclass:: RandomResizedCrop +Dtype and expected value range +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -.. autoclass:: RandomRotation +The expected range of the values of a tensor image is implicitly defined by +the tensor dtype. Tensor images with a float dtype are expected to have +values in ``[0, 1]``. Tensor images with an integer dtype are expected to +have values in ``[0, MAX_DTYPE]`` where ``MAX_DTYPE`` is the largest value +that can be represented in that dtype. Typically, images of dtype +``torch.uint8`` are expected to have values in ``[0, 255]``. -.. autoclass:: RandomSizedCrop +Use :class:`~torchvision.transforms.v2.ToDtype` to convert both the dtype and +range of the inputs. -.. autoclass:: RandomVerticalFlip +.. _v1_or_v2: -.. autoclass:: Resize +V1 or V2? Which one should I use? +--------------------------------- -.. autoclass:: Scale +**TL;DR** We recommending using the ``torchvision.transforms.v2`` transforms +instead of those in ``torchvision.transforms``. They're faster and they can do +more things. Just change the import and you should be good to go. Moving +forward, new features and improvements will only be considered for the v2 +transforms. -.. autoclass:: TenCrop +In Torchvision 0.15 (March 2023), we released a new set of transforms available +in the ``torchvision.transforms.v2`` namespace. These transforms have a lot of +advantages compared to the v1 ones (in ``torchvision.transforms``): -Transforms on torch.\*Tensor ----------------------------- +- They can transform images **but also** bounding boxes, masks, or videos. This + provides support for tasks beyond image classification: detection, segmentation, + video classification, etc. See + :ref:`sphx_glr_auto_examples_transforms_plot_transforms_getting_started.py` + and :ref:`sphx_glr_auto_examples_transforms_plot_transforms_e2e.py`. +- They support more transforms like :class:`~torchvision.transforms.v2.CutMix` + and :class:`~torchvision.transforms.v2.MixUp`. See + :ref:`sphx_glr_auto_examples_transforms_plot_cutmix_mixup.py`. +- They're :ref:`faster `. +- They support arbitrary input structures (dicts, lists, tuples, etc.). +- Future improvements and features will be added to the v2 transforms only. -.. autoclass:: LinearTransformation +These transforms are **fully backward compatible** with the v1 ones, so if +you're already using tranforms from ``torchvision.transforms``, all you need to +do to is to update the import to ``torchvision.transforms.v2``. In terms of +output, there might be negligible differences due to implementation differences. -.. autoclass:: Normalize - :members: __call__ - :special-members: +.. _transforms_perf: + +Performance considerations +-------------------------- + +We recommend the following guidelines to get the best performance out of the +transforms: + +- Rely on the v2 transforms from ``torchvision.transforms.v2`` +- Use tensors instead of PIL images +- Use ``torch.uint8`` dtype, especially for resizing +- Resize with bilinear or bicubic mode -.. autoclass:: RandomErasing +This is what a typical transform pipeline could look like: -Conversion Transforms ---------------------- +.. code:: python -.. autoclass:: ToPILImage - :members: __call__ - :special-members: + from torchvision.transforms import v2 + transforms = v2.Compose([ + v2.ToImage(), # Convert to tensor, only needed if you had a PIL image + v2.ToDtype(torch.uint8, scale=True), # optional, most input are already uint8 at this point + # ... + v2.RandomResizedCrop(size=(224, 224), antialias=True), # Or Resize(antialias=True) + # ... + v2.ToDtype(torch.float32, scale=True), # Normalize expects float input + v2.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]), + ]) + +The above should give you the best performance in a typical training environment +that relies on the :class:`torch.utils.data.DataLoader` with ``num_workers > +0``. + +Transforms tend to be sensitive to the input strides / memory format. Some +transforms will be faster with channels-first images while others prefer +channels-last. Like ``torch`` operators, most transforms will preserve the +memory format of the input, but this may not always be respected due to +implementation details. You may want to experiment a bit if you're chasing the +very best performance. Using :func:`torch.compile` on individual transforms may +also help factoring out the memory format variable (e.g. on +:class:`~torchvision.transforms.v2.Normalize`). Note that we're talking about +**memory format**, not :ref:`tensor shape `. + +Note that resize transforms like :class:`~torchvision.transforms.v2.Resize` +and :class:`~torchvision.transforms.v2.RandomResizedCrop` typically prefer +channels-last input and tend **not** to benefit from :func:`torch.compile` at +this time. + +.. _functional_transforms: + +Transform classes, functionals, and kernels +------------------------------------------- + +Transforms are available as classes like +:class:`~torchvision.transforms.v2.Resize`, but also as functionals like +:func:`~torchvision.transforms.v2.functional.resize` in the +``torchvision.transforms.v2.functional`` namespace. +This is very much like the :mod:`torch.nn` package which defines both classes +and functional equivalents in :mod:`torch.nn.functional`. + +The functionals support PIL images, pure tensors, or :ref:`TVTensors +`, e.g. both ``resize(image_tensor)`` and ``resize(boxes)`` are +valid. + +.. note:: + + Random transforms like :class:`~torchvision.transforms.v2.RandomCrop` will + randomly sample some parameter each time they're called. Their functional + counterpart (:func:`~torchvision.transforms.v2.functional.crop`) does not do + any kind of random sampling and thus have a slighlty different + parametrization. The ``get_params()`` class method of the transforms class + can be used to perform parameter sampling when using the functional APIs. + + +The ``torchvision.transforms.v2.functional`` namespace also contains what we +call the "kernels". These are the low-level functions that implement the +core functionalities for specific types, e.g. ``resize_bounding_boxes`` or +```resized_crop_mask``. They are public, although not documented. Check the +`code +`_ +to see which ones are available (note that those starting with a leading +underscore are **not** public!). Kernels are only really useful if you want +:ref:`torchscript support ` for types like bounding +boxes or masks. + +.. _transforms_torchscript: + +Torchscript support +------------------- + +Most transform classes and functionals support torchscript. For composing +transforms, use :class:`torch.nn.Sequential` instead of +:class:`~torchvision.transforms.v2.Compose`: -.. autoclass:: ToTensor - :members: __call__ - :special-members: +.. code:: python -Generic Transforms ------------------- + transforms = torch.nn.Sequential( + CenterCrop(10), + Normalize((0.485, 0.456, 0.406), (0.229, 0.224, 0.225)), + ) + scripted_transforms = torch.jit.script(transforms) -.. autoclass:: Lambda +.. warning:: + v2 transforms support torchscript, but if you call ``torch.jit.script()`` on + a v2 **class** transform, you'll actually end up with its (scripted) v1 + equivalent. This may lead to slightly different results between the + scripted and eager executions due to implementation differences between v1 + and v2. -Functional Transforms ---------------------- + If you really need torchscript support for the v2 transforms, we recommend + scripting the **functionals** from the + ``torchvision.transforms.v2.functional`` namespace to avoid surprises. -Functional transforms give you fine-grained control of the transformation pipeline. -As opposed to the transformations above, functional transforms don't contain a random number -generator for their parameters. -That means you have to specify/generate all parameters, but you can reuse the functional transform. -Example: -you can apply a functional transform with the same parameters to multiple images like this: +Also note that the functionals only support torchscript for pure tensors, which +are always treated as images. If you need torchscript support for other types +like bounding boxes or masks, you can rely on the :ref:`low-level kernels +`. -.. code:: python +For any custom transformations to be used with ``torch.jit.script``, they should +be derived from ``torch.nn.Module``. - import torchvision.transforms.functional as TF - import random +See also: :ref:`sphx_glr_auto_examples_others_plot_scripted_tensor_transforms.py`. - def my_segmentation_transforms(image, segmentation): - if random.random() > 0.5: - angle = random.randint(-30, 30) - image = TF.rotate(image, angle) - segmentation = TF.rotate(segmentation, angle) - # more transforms ... - return image, segmentation +.. _v2_api_ref: + +V2 API reference - Recommended +------------------------------ + +Geometry +^^^^^^^^ +Resizing +"""""""" + +.. autosummary:: + :toctree: generated/ + :template: class.rst + + v2.Resize + v2.ScaleJitter + v2.RandomShortestSize + v2.RandomResize + +Functionals + +.. autosummary:: + :toctree: generated/ + :template: function.rst + + v2.functional.resize + +Cropping +"""""""" + +.. autosummary:: + :toctree: generated/ + :template: class.rst + + v2.RandomCrop + v2.RandomResizedCrop + v2.RandomIoUCrop + v2.CenterCrop + v2.FiveCrop + v2.TenCrop + +Functionals + +.. autosummary:: + :toctree: generated/ + :template: function.rst + + v2.functional.crop + v2.functional.resized_crop + v2.functional.ten_crop + v2.functional.center_crop + v2.functional.five_crop + +Others +"""""" + +.. autosummary:: + :toctree: generated/ + :template: class.rst + + v2.RandomHorizontalFlip + v2.RandomVerticalFlip + v2.Pad + v2.RandomZoomOut + v2.RandomRotation + v2.RandomAffine + v2.RandomPerspective + v2.ElasticTransform + +Functionals + +.. autosummary:: + :toctree: generated/ + :template: function.rst + + v2.functional.horizontal_flip + v2.functional.vertical_flip + v2.functional.pad + v2.functional.rotate + v2.functional.affine + v2.functional.perspective + v2.functional.elastic + +Color +^^^^^ + +.. autosummary:: + :toctree: generated/ + :template: class.rst + + v2.ColorJitter + v2.RandomChannelPermutation + v2.RandomPhotometricDistort + v2.Grayscale + v2.RGB + v2.RandomGrayscale + v2.GaussianBlur + v2.GaussianNoise + v2.RandomInvert + v2.RandomPosterize + v2.RandomSolarize + v2.RandomAdjustSharpness + v2.RandomAutocontrast + v2.RandomEqualize + +Functionals + +.. autosummary:: + :toctree: generated/ + :template: function.rst + + v2.functional.permute_channels + v2.functional.rgb_to_grayscale + v2.functional.grayscale_to_rgb + v2.functional.to_grayscale + v2.functional.gaussian_blur + v2.functional.gaussian_noise + v2.functional.invert + v2.functional.posterize + v2.functional.solarize + v2.functional.adjust_sharpness + v2.functional.autocontrast + v2.functional.adjust_contrast + v2.functional.equalize + v2.functional.adjust_brightness + v2.functional.adjust_saturation + v2.functional.adjust_hue + v2.functional.adjust_gamma + + +Composition +^^^^^^^^^^^ + +.. autosummary:: + :toctree: generated/ + :template: class.rst + + v2.Compose + v2.RandomApply + v2.RandomChoice + v2.RandomOrder + +Miscellaneous +^^^^^^^^^^^^^ + +.. autosummary:: + :toctree: generated/ + :template: class.rst + + v2.LinearTransformation + v2.Normalize + v2.RandomErasing + v2.Lambda + v2.SanitizeBoundingBoxes + v2.ClampBoundingBoxes + v2.UniformTemporalSubsample + v2.JPEG + +Functionals + +.. autosummary:: + :toctree: generated/ + :template: function.rst + + v2.functional.normalize + v2.functional.erase + v2.functional.sanitize_bounding_boxes + v2.functional.clamp_bounding_boxes + v2.functional.uniform_temporal_subsample + v2.functional.jpeg + +.. _conversion_transforms: + +Conversion +^^^^^^^^^^ + +.. note:: + Beware, some of these conversion transforms below will scale the values + while performing the conversion, while some may not do any scaling. By + scaling, we mean e.g. that a ``uint8`` -> ``float32`` would map the [0, + 255] range into [0, 1] (and vice-versa). See :ref:`range_and_dtype`. + +.. autosummary:: + :toctree: generated/ + :template: class.rst + + v2.ToImage + v2.ToPureTensor + v2.PILToTensor + v2.ToPILImage + v2.ToDtype + v2.ConvertBoundingBoxFormat + +functionals + +.. autosummary:: + :toctree: generated/ + :template: functional.rst + + v2.functional.to_image + v2.functional.pil_to_tensor + v2.functional.to_pil_image + v2.functional.to_dtype + v2.functional.convert_bounding_box_format + + +Deprecated + +.. autosummary:: + :toctree: generated/ + :template: class.rst + + v2.ToTensor + v2.functional.to_tensor + v2.ConvertImageDtype + v2.functional.convert_image_dtype + +Auto-Augmentation +^^^^^^^^^^^^^^^^^ + +`AutoAugment `_ is a common Data Augmentation technique that can improve the accuracy of Image Classification models. +Though the data augmentation policies are directly linked to their trained dataset, empirical studies show that +ImageNet policies provide significant improvements when applied to other datasets. +In TorchVision we implemented 3 policies learned on the following datasets: ImageNet, CIFAR10 and SVHN. +The new transform can be used standalone or mixed-and-matched with existing transforms: + +.. autosummary:: + :toctree: generated/ + :template: class.rst + + v2.AutoAugment + v2.RandAugment + v2.TrivialAugmentWide + v2.AugMix + + +CutMix - MixUp +^^^^^^^^^^^^^^ + +CutMix and MixUp are special transforms that +are meant to be used on batches rather than on individual images, because they +are combining pairs of images together. These can be used after the dataloader +(once the samples are batched), or part of a collation function. See +:ref:`sphx_glr_auto_examples_transforms_plot_cutmix_mixup.py` for detailed usage examples. + +.. autosummary:: + :toctree: generated/ + :template: class.rst + + v2.CutMix + v2.MixUp + +Developer tools +^^^^^^^^^^^^^^^ + +.. autosummary:: + :toctree: generated/ + :template: class.rst + + v2.Transform + +.. autosummary:: + :toctree: generated/ + :template: function.rst + + v2.functional.register_kernel + v2.query_size + v2.query_chw + v2.get_bounding_boxes + + +V1 API Reference +---------------- + +Geometry +^^^^^^^^ + +.. autosummary:: + :toctree: generated/ + :template: class.rst -Example: -you can use a functional transform to build transform classes with custom behavior: + Resize + RandomCrop + RandomResizedCrop + CenterCrop + FiveCrop + TenCrop + Pad + RandomRotation + RandomAffine + RandomPerspective + ElasticTransform + RandomHorizontalFlip + RandomVerticalFlip -.. code:: python - import torchvision.transforms.functional as TF - import random +Color +^^^^^ - class MyRotationTransform: - """Rotate by one of the given angles.""" +.. autosummary:: + :toctree: generated/ + :template: class.rst - def __init__(self, angles): - self.angles = angles + ColorJitter + Grayscale + RandomGrayscale + GaussianBlur + RandomInvert + RandomPosterize + RandomSolarize + RandomAdjustSharpness + RandomAutocontrast + RandomEqualize + +Composition +^^^^^^^^^^^ - def __call__(self, x): - angle = random.choice(self.angles) - return TF.rotate(x, angle) +.. autosummary:: + :toctree: generated/ + :template: class.rst + + Compose + RandomApply + RandomChoice + RandomOrder - rotation_transform = MyRotationTransform(angles=[-30, -15, 0, 15, 30]) +Miscellaneous +^^^^^^^^^^^^^ +.. autosummary:: + :toctree: generated/ + :template: class.rst -.. automodule:: torchvision.transforms.functional - :members: + LinearTransformation + Normalize + RandomErasing + Lambda + +Conversion +^^^^^^^^^^ + +.. note:: + Beware, some of these conversion transforms below will scale the values + while performing the conversion, while some may not do any scaling. By + scaling, we mean e.g. that a ``uint8`` -> ``float32`` would map the [0, + 255] range into [0, 1] (and vice-versa). See :ref:`range_and_dtype`. + +.. autosummary:: + :toctree: generated/ + :template: class.rst + + ToPILImage + ToTensor + PILToTensor + ConvertImageDtype + +Auto-Augmentation +^^^^^^^^^^^^^^^^^ + +`AutoAugment `_ is a common Data Augmentation technique that can improve the accuracy of Image Classification models. +Though the data augmentation policies are directly linked to their trained dataset, empirical studies show that +ImageNet policies provide significant improvements when applied to other datasets. +In TorchVision we implemented 3 policies learned on the following datasets: ImageNet, CIFAR10 and SVHN. +The new transform can be used standalone or mixed-and-matched with existing transforms: + +.. autosummary:: + :toctree: generated/ + :template: class.rst + + AutoAugmentPolicy + AutoAugment + RandAugment + TrivialAugmentWide + AugMix + + + +Functional Transforms +^^^^^^^^^^^^^^^^^^^^^ + +.. currentmodule:: torchvision.transforms.functional + +.. autosummary:: + :toctree: generated/ + :template: function.rst + + adjust_brightness + adjust_contrast + adjust_gamma + adjust_hue + adjust_saturation + adjust_sharpness + affine + autocontrast + center_crop + convert_image_dtype + crop + equalize + erase + five_crop + gaussian_blur + get_dimensions + get_image_num_channels + get_image_size + hflip + invert + normalize + pad + perspective + pil_to_tensor + posterize + resize + resized_crop + rgb_to_grayscale + rotate + solarize + ten_crop + to_grayscale + to_pil_image + to_tensor + vflip diff --git a/docs/source/tv_tensors.rst b/docs/source/tv_tensors.rst new file mode 100644 index 00000000000..cb8a3c45fa9 --- /dev/null +++ b/docs/source/tv_tensors.rst @@ -0,0 +1,29 @@ +.. _tv_tensors: + +TVTensors +========== + +.. currentmodule:: torchvision.tv_tensors + +TVTensors are :class:`torch.Tensor` subclasses which the v2 :ref:`transforms +` use under the hood to dispatch their inputs to the appropriate +lower-level kernels. Most users do not need to manipulate TVTensors directly. + +Refer to +:ref:`sphx_glr_auto_examples_transforms_plot_transforms_getting_started.py` for +an introduction to TVTensors, or +:ref:`sphx_glr_auto_examples_transforms_plot_tv_tensors.py` for more advanced +info. + +.. autosummary:: + :toctree: generated/ + :template: class.rst + + Image + Video + BoundingBoxFormat + BoundingBoxes + Mask + TVTensor + set_return_type + wrap diff --git a/docs/source/utils.rst b/docs/source/utils.rst index ad2fc91c897..cda04de900a 100644 --- a/docs/source/utils.rst +++ b/docs/source/utils.rst @@ -1,9 +1,20 @@ -torchvision.utils -================= +.. _utils: -.. currentmodule:: torchvision.utils +Utils +===== + +The ``torchvision.utils`` module contains various utilities, mostly :ref:`for +visualization `. -.. autofunction:: make_grid +.. currentmodule:: torchvision.utils -.. autofunction:: save_image +.. autosummary:: + :toctree: generated/ + :template: function.rst + draw_bounding_boxes + draw_segmentation_masks + draw_keypoints + flow_to_image + make_grid + save_image diff --git a/examples/cpp/CMakeLists.txt b/examples/cpp/CMakeLists.txt new file mode 100644 index 00000000000..a1329b0c968 --- /dev/null +++ b/examples/cpp/CMakeLists.txt @@ -0,0 +1,18 @@ +cmake_minimum_required(VERSION 3.10) +project(run_model) + +option(USE_TORCHVISION "Whether to link to torchvision" OFF) + +find_package(Torch REQUIRED) +if(USE_TORCHVISION) + find_package(TorchVision REQUIRED) +endif() + +add_executable(run_model run_model.cpp) + +target_link_libraries(run_model "${TORCH_LIBRARIES}") +if(USE_TORCHVISION) + target_link_libraries(run_model TorchVision::TorchVision) +endif() + +set_property(TARGET run_model PROPERTY CXX_STANDARD 17) diff --git a/examples/cpp/README.md b/examples/cpp/README.md new file mode 100644 index 00000000000..b2a9174c8ba --- /dev/null +++ b/examples/cpp/README.md @@ -0,0 +1,101 @@ +Using torchvision models in C++ +=============================== + +This is a minimal example of getting TorchVision models to work in C++ with +Torchscript. The model is first scripted in Python and exported to a file, and +then loaded in C++. For a similar tutorial, see [this +tutorial](https://pytorch.org/tutorials/advanced/cpp_export.html). + +In order to successfully compile this example, make sure you have ``LibTorch`` +installed. You can either: + +- Install PyTorch normally +- Or download the LibTorch C++ distribution. + +In both cases refer [here](https://pytorch.org/get-started/locally/) the +corresponding install or download instructions. + +Some torchvision models only depend on PyTorch operators, and can be used in C++ +without depending on the torchvision lib. Other models rely on torchvision's C++ +operators like NMS, RoiAlign (typically the detection models) and those need to +be linked against the torchvision lib. + +We'll first see the simpler case of running a model without the torchvision lib +dependency. + +Running a model that doesn't need torchvision lib +------------------------------------------------- + +Create a ``build`` directory inside the current one. + +```bash +mkdir build +cd build +``` + +Then run `python ../trace_model.py` which should create a `resnet18.pt` file in +the build directory. This is the scripted model that will be used in the C++ +code. + +We can now start building with CMake. We have to tell CMake where it can find +the necessary PyTorch resources. If you installed PyTorch normally, you can do: + +```bash +TORCH_PATH=$(python -c "import pathlib, torch; print(pathlib.Path(torch.__path__[0]))") +Torch_DIR="${TORCH_PATH}/share/cmake/Torch" # there should be .cmake files in there + +cmake .. -DTorch_DIR=$Torch_DIR +``` + +If instead you downloaded the LibTorch somewhere, you can do: + +```bash +cmake .. -DCMAKE_PREFIX_PATH=/path/to/libtorch +``` + +Then `cmake --build .` and you should now be able to run + +```bash +./run_model resnet18.pt +``` + +If you try to run the model with a model that depends on the torchvision lib, like +`./run_model fasterrcnn_resnet50_fpn.pt`, you should get a runtime error. This is +because the executable wasn't linked against the torchvision lib. + + +Running a model that needs torchvision lib +------------------------------------------ + +First, we need to build the torchvision lib. To build the torchvision lib go to +the root of the torchvision project and run: + +```bash +mkdir build +cd build +cmake .. -DCMAKE_PREFIX_PATH=/path/to/libtorch # or -DTorch_DIR= if you installed PyTorch normally, see above +cmake --build . +cmake --install . +``` + +You may want to pass `-DCMAKE_INSTALL_PREFIX=/path/to/libtorchvision` for +cmake to copy/install the files to a specific location (e.g. `$CONDA_PREFIX`). + +**DISCLAIMER**: the `libtorchvision` library includes the torchvision +custom ops as well as most of the C++ torchvision APIs. Those APIs do not come +with any backward-compatibility guarantees and may change from one version to +the next. Only the Python APIs are stable and with backward-compatibility +guarantees. So, if you need stability within a C++ environment, your best bet is +to export the Python APIs via torchscript. + +Now that libtorchvision is built and installed we can tell our project to use +and link to it via the `-DUSE_TORCHVISION` flag. We also need to tell CMake +where to find it, just like we did with LibTorch, e.g.: + +```bash +cmake .. -DTorch_DIR=$Torch_DIR -DTorchVision_DIR=path/to/libtorchvision -DUSE_TORCHVISION=ON +cmake --build . +``` + +Now the `run_model` executable should be able to run the +`fasterrcnn_resnet50_fpn.pt` file. diff --git a/examples/cpp/run_model.cpp b/examples/cpp/run_model.cpp new file mode 100644 index 00000000000..11faead5dac --- /dev/null +++ b/examples/cpp/run_model.cpp @@ -0,0 +1,67 @@ +#include +#include +#include +#include + +#ifdef _WIN32 +#include +#endif // _WIN32 + +int main(int argc, const char* argv[]) { + if (argc != 2) { + std::cout << "Usage: run_model \n"; + return -1; + } + torch::DeviceType device_type; + device_type = torch::kCPU; + + torch::jit::script::Module model; + try { + std::cout << "Loading model\n"; + // Deserialize the ScriptModule from a file using torch::jit::load(). + model = torch::jit::load(argv[1]); + std::cout << "Model loaded\n"; + } catch (const torch::Error&) { + std::cout << "error loading the model.\n"; + return -1; + } catch (const std::exception& e) { + std::cout << "Other error: " << e.what() << "\n"; + return -1; + } + + // TorchScript models require a List[IValue] as input + std::vector inputs; + + if (std::strstr(argv[1], "fasterrcnn") != NULL) { + // Faster RCNN accepts a List[Tensor] as main input + std::vector images; + images.push_back(torch::rand({3, 256, 275})); + images.push_back(torch::rand({3, 256, 275})); + inputs.push_back(images); + } else { + inputs.push_back(torch::rand({1, 3, 10, 10})); + } + auto out = model.forward(inputs); + std::cout << out << "\n"; + + if (torch::cuda::is_available()) { + // Move model and inputs to GPU + model.to(torch::kCUDA); + + // Add GPU inputs + inputs.clear(); + torch::TensorOptions options = torch::TensorOptions{torch::kCUDA}; + if (std::strstr(argv[1], "fasterrcnn") != NULL) { + // Faster RCNN accepts a List[Tensor] as main input + std::vector images; + images.push_back(torch::rand({3, 256, 275}, options)); + images.push_back(torch::rand({3, 256, 275}, options)); + inputs.push_back(images); + } else { + inputs.push_back(torch::rand({1, 3, 10, 10}, options)); + } + + auto gpu_out = model.forward(inputs); + std::cout << gpu_out << "\n"; + } +} diff --git a/examples/cpp/script_model.py b/examples/cpp/script_model.py new file mode 100644 index 00000000000..e91e888e7be --- /dev/null +++ b/examples/cpp/script_model.py @@ -0,0 +1,10 @@ +import torch +from torchvision import models + +for model, name in ( + (models.resnet18(weights=None), "resnet18"), + (models.detection.fasterrcnn_resnet50_fpn(weights=None, weights_backbone=None), "fasterrcnn_resnet50_fpn"), +): + model.eval() + traced_model = torch.jit.script(model) + traced_model.save(f"{name}.pt") diff --git a/examples/python/README.md b/examples/python/README.md new file mode 100644 index 00000000000..b6597959e37 --- /dev/null +++ b/examples/python/README.md @@ -0,0 +1,4 @@ +# Python examples + +The examples in this directory have been moved online in our [gallery +page](https://pytorch.org/vision/stable/auto_examples/index.html). diff --git a/gallery/README.rst b/gallery/README.rst new file mode 100644 index 00000000000..8dfea355276 --- /dev/null +++ b/gallery/README.rst @@ -0,0 +1,4 @@ +.. _gallery: + +Examples and tutorials +====================== diff --git a/gallery/assets/FudanPed00054.png b/gallery/assets/FudanPed00054.png new file mode 100644 index 00000000000..951682abb93 Binary files /dev/null and b/gallery/assets/FudanPed00054.png differ diff --git a/gallery/assets/FudanPed00054_mask.png b/gallery/assets/FudanPed00054_mask.png new file mode 100644 index 00000000000..4d5aa4e4020 Binary files /dev/null and b/gallery/assets/FudanPed00054_mask.png differ diff --git a/gallery/assets/astronaut.jpg b/gallery/assets/astronaut.jpg new file mode 100644 index 00000000000..9716f656269 Binary files /dev/null and b/gallery/assets/astronaut.jpg differ diff --git a/gallery/assets/basketball.mp4 b/gallery/assets/basketball.mp4 new file mode 100644 index 00000000000..16d62366068 Binary files /dev/null and b/gallery/assets/basketball.mp4 differ diff --git a/gallery/assets/coco/images/000000000001.jpg b/gallery/assets/coco/images/000000000001.jpg new file mode 120000 index 00000000000..9be80c7c273 --- /dev/null +++ b/gallery/assets/coco/images/000000000001.jpg @@ -0,0 +1 @@ +../../astronaut.jpg \ No newline at end of file diff --git a/gallery/assets/coco/images/000000000002.jpg b/gallery/assets/coco/images/000000000002.jpg new file mode 120000 index 00000000000..9f8efef9928 --- /dev/null +++ b/gallery/assets/coco/images/000000000002.jpg @@ -0,0 +1 @@ +../../dog2.jpg \ No newline at end of file diff --git a/gallery/assets/coco/instances.json b/gallery/assets/coco/instances.json new file mode 100644 index 00000000000..fe0e09270bf --- /dev/null +++ b/gallery/assets/coco/instances.json @@ -0,0 +1 @@ +{"images": [{"file_name": "000000000001.jpg", "height": 512, "width": 512, "id": 1}, {"file_name": "000000000002.jpg", "height": 500, "width": 500, "id": 2}], "annotations": [{"segmentation": [[40.0, 511.0, 26.0, 487.0, 28.0, 438.0, 17.0, 397.0, 24.0, 346.0, 38.0, 306.0, 61.0, 250.0, 111.0, 206.0, 111.0, 187.0, 120.0, 183.0, 136.0, 159.0, 159.0, 150.0, 181.0, 148.0, 182.0, 132.0, 175.0, 132.0, 168.0, 120.0, 154.0, 102.0, 153.0, 62.0, 188.0, 35.0, 191.0, 29.0, 208.0, 20.0, 210.0, 22.0, 227.0, 16.0, 240.0, 16.0, 276.0, 31.0, 285.0, 39.0, 301.0, 88.0, 297.0, 108.0, 281.0, 128.0, 273.0, 138.0, 266.0, 138.0, 264.0, 153.0, 257.0, 162.0, 256.0, 174.0, 284.0, 197.0, 300.0, 221.0, 303.0, 236.0, 337.0, 258.0, 357.0, 306.0, 361.0, 351.0, 358.0, 511.0]], "iscrowd": 0, "image_id": 1, "bbox": [17.0, 16.0, 344.0, 495.0], "category_id": 1, "id": 1}, {"segmentation": [[0.0, 411.0, 43.0, 401.0, 99.0, 395.0, 105.0, 351.0, 124.0, 326.0, 181.0, 294.0, 227.0, 280.0, 245.0, 262.0, 259.0, 234.0, 262.0, 207.0, 271.0, 140.0, 283.0, 139.0, 301.0, 162.0, 309.0, 181.0, 341.0, 175.0, 362.0, 139.0, 369.0, 139.0, 377.0, 163.0, 378.0, 203.0, 381.0, 212.0, 380.0, 220.0, 382.0, 242.0, 404.0, 264.0, 392.0, 293.0, 384.0, 295.0, 385.0, 316.0, 399.0, 343.0, 391.0, 448.0, 452.0, 475.0, 457.0, 494.0, 436.0, 498.0, 402.0, 491.0, 369.0, 488.0, 366.0, 496.0, 319.0, 496.0, 302.0, 485.0, 226.0, 469.0, 128.0, 456.0, 74.0, 458.0, 29.0, 439.0, 0.0, 445.0]], "iscrowd": 0, "image_id": 2, "bbox": [0.0, 139.0, 457.0, 359.0], "category_id": 18, "id": 2}]} diff --git a/gallery/assets/dog1.jpg b/gallery/assets/dog1.jpg new file mode 100644 index 00000000000..df29f9d9704 Binary files /dev/null and b/gallery/assets/dog1.jpg differ diff --git a/gallery/assets/dog2.jpg b/gallery/assets/dog2.jpg new file mode 100644 index 00000000000..528dfec7209 Binary files /dev/null and b/gallery/assets/dog2.jpg differ diff --git a/gallery/assets/imagenet_class_index.json b/gallery/assets/imagenet_class_index.json new file mode 100644 index 00000000000..2ebd2961e1d --- /dev/null +++ b/gallery/assets/imagenet_class_index.json @@ -0,0 +1 @@ +{"0": ["n01440764", "tench"], "1": ["n01443537", "goldfish"], "2": ["n01484850", "great_white_shark"], "3": ["n01491361", "tiger_shark"], "4": ["n01494475", "hammerhead"], "5": ["n01496331", "electric_ray"], "6": ["n01498041", "stingray"], "7": ["n01514668", "cock"], "8": ["n01514859", "hen"], "9": ["n01518878", "ostrich"], "10": ["n01530575", "brambling"], "11": ["n01531178", "goldfinch"], "12": ["n01532829", "house_finch"], "13": ["n01534433", "junco"], "14": ["n01537544", "indigo_bunting"], "15": ["n01558993", "robin"], "16": ["n01560419", "bulbul"], "17": ["n01580077", "jay"], "18": ["n01582220", "magpie"], "19": ["n01592084", "chickadee"], "20": ["n01601694", "water_ouzel"], "21": ["n01608432", "kite"], "22": ["n01614925", "bald_eagle"], "23": ["n01616318", "vulture"], "24": ["n01622779", "great_grey_owl"], "25": ["n01629819", "European_fire_salamander"], "26": ["n01630670", "common_newt"], "27": ["n01631663", "eft"], "28": ["n01632458", "spotted_salamander"], "29": ["n01632777", "axolotl"], "30": ["n01641577", "bullfrog"], "31": ["n01644373", "tree_frog"], "32": ["n01644900", "tailed_frog"], "33": ["n01664065", "loggerhead"], "34": ["n01665541", "leatherback_turtle"], "35": ["n01667114", "mud_turtle"], "36": ["n01667778", "terrapin"], "37": ["n01669191", "box_turtle"], "38": ["n01675722", "banded_gecko"], "39": ["n01677366", "common_iguana"], "40": ["n01682714", "American_chameleon"], "41": ["n01685808", "whiptail"], "42": ["n01687978", "agama"], "43": ["n01688243", "frilled_lizard"], "44": ["n01689811", "alligator_lizard"], "45": ["n01692333", "Gila_monster"], "46": ["n01693334", "green_lizard"], "47": ["n01694178", "African_chameleon"], "48": ["n01695060", "Komodo_dragon"], "49": ["n01697457", "African_crocodile"], "50": ["n01698640", "American_alligator"], "51": ["n01704323", "triceratops"], "52": ["n01728572", "thunder_snake"], "53": ["n01728920", "ringneck_snake"], "54": ["n01729322", "hognose_snake"], "55": ["n01729977", "green_snake"], "56": ["n01734418", "king_snake"], "57": ["n01735189", "garter_snake"], "58": ["n01737021", "water_snake"], "59": ["n01739381", "vine_snake"], "60": ["n01740131", "night_snake"], "61": ["n01742172", "boa_constrictor"], "62": ["n01744401", "rock_python"], "63": ["n01748264", "Indian_cobra"], "64": ["n01749939", "green_mamba"], "65": ["n01751748", "sea_snake"], "66": ["n01753488", "horned_viper"], "67": ["n01755581", "diamondback"], "68": ["n01756291", "sidewinder"], "69": ["n01768244", "trilobite"], "70": ["n01770081", "harvestman"], "71": ["n01770393", "scorpion"], "72": ["n01773157", "black_and_gold_garden_spider"], "73": ["n01773549", "barn_spider"], "74": ["n01773797", "garden_spider"], "75": ["n01774384", "black_widow"], "76": ["n01774750", "tarantula"], "77": ["n01775062", "wolf_spider"], "78": ["n01776313", "tick"], "79": ["n01784675", "centipede"], "80": ["n01795545", "black_grouse"], "81": ["n01796340", "ptarmigan"], "82": ["n01797886", "ruffed_grouse"], "83": ["n01798484", "prairie_chicken"], "84": ["n01806143", "peacock"], "85": ["n01806567", "quail"], "86": ["n01807496", "partridge"], "87": ["n01817953", "African_grey"], "88": ["n01818515", "macaw"], "89": ["n01819313", "sulphur-crested_cockatoo"], "90": ["n01820546", "lorikeet"], "91": ["n01824575", "coucal"], "92": ["n01828970", "bee_eater"], "93": ["n01829413", "hornbill"], "94": ["n01833805", "hummingbird"], "95": ["n01843065", "jacamar"], "96": ["n01843383", "toucan"], "97": ["n01847000", "drake"], "98": ["n01855032", "red-breasted_merganser"], "99": ["n01855672", "goose"], "100": ["n01860187", "black_swan"], "101": ["n01871265", "tusker"], "102": ["n01872401", "echidna"], "103": ["n01873310", "platypus"], "104": ["n01877812", "wallaby"], "105": ["n01882714", "koala"], "106": ["n01883070", "wombat"], "107": ["n01910747", "jellyfish"], "108": ["n01914609", "sea_anemone"], "109": ["n01917289", "brain_coral"], "110": ["n01924916", "flatworm"], "111": ["n01930112", "nematode"], "112": ["n01943899", "conch"], "113": ["n01944390", "snail"], "114": ["n01945685", "slug"], "115": ["n01950731", "sea_slug"], "116": ["n01955084", "chiton"], "117": ["n01968897", "chambered_nautilus"], "118": ["n01978287", "Dungeness_crab"], "119": ["n01978455", "rock_crab"], "120": ["n01980166", "fiddler_crab"], "121": ["n01981276", "king_crab"], "122": ["n01983481", "American_lobster"], "123": ["n01984695", "spiny_lobster"], "124": ["n01985128", "crayfish"], "125": ["n01986214", "hermit_crab"], "126": ["n01990800", "isopod"], "127": ["n02002556", "white_stork"], "128": ["n02002724", "black_stork"], "129": ["n02006656", "spoonbill"], "130": ["n02007558", "flamingo"], "131": ["n02009229", "little_blue_heron"], "132": ["n02009912", "American_egret"], "133": ["n02011460", "bittern"], "134": ["n02012849", "crane"], "135": ["n02013706", "limpkin"], "136": ["n02017213", "European_gallinule"], "137": ["n02018207", "American_coot"], "138": ["n02018795", "bustard"], "139": ["n02025239", "ruddy_turnstone"], "140": ["n02027492", "red-backed_sandpiper"], "141": ["n02028035", "redshank"], "142": ["n02033041", "dowitcher"], "143": ["n02037110", "oystercatcher"], "144": ["n02051845", "pelican"], "145": ["n02056570", "king_penguin"], "146": ["n02058221", "albatross"], "147": ["n02066245", "grey_whale"], "148": ["n02071294", "killer_whale"], "149": ["n02074367", "dugong"], "150": ["n02077923", "sea_lion"], "151": ["n02085620", "Chihuahua"], "152": ["n02085782", "Japanese_spaniel"], "153": ["n02085936", "Maltese_dog"], "154": ["n02086079", "Pekinese"], "155": ["n02086240", "Shih-Tzu"], "156": ["n02086646", "Blenheim_spaniel"], "157": ["n02086910", "papillon"], "158": ["n02087046", "toy_terrier"], "159": ["n02087394", "Rhodesian_ridgeback"], "160": ["n02088094", "Afghan_hound"], "161": ["n02088238", "basset"], "162": ["n02088364", "beagle"], "163": ["n02088466", "bloodhound"], "164": ["n02088632", "bluetick"], "165": ["n02089078", "black-and-tan_coonhound"], "166": ["n02089867", "Walker_hound"], "167": ["n02089973", "English_foxhound"], "168": ["n02090379", "redbone"], "169": ["n02090622", "borzoi"], "170": ["n02090721", "Irish_wolfhound"], "171": ["n02091032", "Italian_greyhound"], "172": ["n02091134", "whippet"], "173": ["n02091244", "Ibizan_hound"], "174": ["n02091467", "Norwegian_elkhound"], "175": ["n02091635", "otterhound"], "176": ["n02091831", "Saluki"], "177": ["n02092002", "Scottish_deerhound"], "178": ["n02092339", "Weimaraner"], "179": ["n02093256", "Staffordshire_bullterrier"], "180": ["n02093428", "American_Staffordshire_terrier"], "181": ["n02093647", "Bedlington_terrier"], "182": ["n02093754", "Border_terrier"], "183": ["n02093859", "Kerry_blue_terrier"], "184": ["n02093991", "Irish_terrier"], "185": ["n02094114", "Norfolk_terrier"], "186": ["n02094258", "Norwich_terrier"], "187": ["n02094433", "Yorkshire_terrier"], "188": ["n02095314", "wire-haired_fox_terrier"], "189": ["n02095570", "Lakeland_terrier"], "190": ["n02095889", "Sealyham_terrier"], "191": ["n02096051", "Airedale"], "192": ["n02096177", "cairn"], "193": ["n02096294", "Australian_terrier"], "194": ["n02096437", "Dandie_Dinmont"], "195": ["n02096585", "Boston_bull"], "196": ["n02097047", "miniature_schnauzer"], "197": ["n02097130", "giant_schnauzer"], "198": ["n02097209", "standard_schnauzer"], "199": ["n02097298", "Scotch_terrier"], "200": ["n02097474", "Tibetan_terrier"], "201": ["n02097658", "silky_terrier"], "202": ["n02098105", "soft-coated_wheaten_terrier"], "203": ["n02098286", "West_Highland_white_terrier"], "204": ["n02098413", "Lhasa"], "205": ["n02099267", "flat-coated_retriever"], "206": ["n02099429", "curly-coated_retriever"], "207": ["n02099601", "golden_retriever"], "208": ["n02099712", "Labrador_retriever"], "209": ["n02099849", "Chesapeake_Bay_retriever"], "210": ["n02100236", "German_short-haired_pointer"], "211": ["n02100583", "vizsla"], "212": ["n02100735", "English_setter"], "213": ["n02100877", "Irish_setter"], "214": ["n02101006", "Gordon_setter"], "215": ["n02101388", "Brittany_spaniel"], "216": ["n02101556", "clumber"], "217": ["n02102040", "English_springer"], "218": ["n02102177", "Welsh_springer_spaniel"], "219": ["n02102318", "cocker_spaniel"], "220": ["n02102480", "Sussex_spaniel"], "221": ["n02102973", "Irish_water_spaniel"], "222": ["n02104029", "kuvasz"], "223": ["n02104365", "schipperke"], "224": ["n02105056", "groenendael"], "225": ["n02105162", "malinois"], "226": ["n02105251", "briard"], "227": ["n02105412", "kelpie"], "228": ["n02105505", "komondor"], "229": ["n02105641", "Old_English_sheepdog"], "230": ["n02105855", "Shetland_sheepdog"], "231": ["n02106030", "collie"], "232": ["n02106166", "Border_collie"], "233": ["n02106382", "Bouvier_des_Flandres"], "234": ["n02106550", "Rottweiler"], "235": ["n02106662", "German_shepherd"], "236": ["n02107142", "Doberman"], "237": ["n02107312", "miniature_pinscher"], "238": ["n02107574", "Greater_Swiss_Mountain_dog"], "239": ["n02107683", "Bernese_mountain_dog"], "240": ["n02107908", "Appenzeller"], "241": ["n02108000", "EntleBucher"], "242": ["n02108089", "boxer"], "243": ["n02108422", "bull_mastiff"], "244": ["n02108551", "Tibetan_mastiff"], "245": ["n02108915", "French_bulldog"], "246": ["n02109047", "Great_Dane"], "247": ["n02109525", "Saint_Bernard"], "248": ["n02109961", "Eskimo_dog"], "249": ["n02110063", "malamute"], "250": ["n02110185", "Siberian_husky"], "251": ["n02110341", "dalmatian"], "252": ["n02110627", "affenpinscher"], "253": ["n02110806", "basenji"], "254": ["n02110958", "pug"], "255": ["n02111129", "Leonberg"], "256": ["n02111277", "Newfoundland"], "257": ["n02111500", "Great_Pyrenees"], "258": ["n02111889", "Samoyed"], "259": ["n02112018", "Pomeranian"], "260": ["n02112137", "chow"], "261": ["n02112350", "keeshond"], "262": ["n02112706", "Brabancon_griffon"], "263": ["n02113023", "Pembroke"], "264": ["n02113186", "Cardigan"], "265": ["n02113624", "toy_poodle"], "266": ["n02113712", "miniature_poodle"], "267": ["n02113799", "standard_poodle"], "268": ["n02113978", "Mexican_hairless"], "269": ["n02114367", "timber_wolf"], "270": ["n02114548", "white_wolf"], "271": ["n02114712", "red_wolf"], "272": ["n02114855", "coyote"], "273": ["n02115641", "dingo"], "274": ["n02115913", "dhole"], "275": ["n02116738", "African_hunting_dog"], "276": ["n02117135", "hyena"], "277": ["n02119022", "red_fox"], "278": ["n02119789", "kit_fox"], "279": ["n02120079", "Arctic_fox"], "280": ["n02120505", "grey_fox"], "281": ["n02123045", "tabby"], "282": ["n02123159", "tiger_cat"], "283": ["n02123394", "Persian_cat"], "284": ["n02123597", "Siamese_cat"], "285": ["n02124075", "Egyptian_cat"], "286": ["n02125311", "cougar"], "287": ["n02127052", "lynx"], "288": ["n02128385", "leopard"], "289": ["n02128757", "snow_leopard"], "290": ["n02128925", "jaguar"], "291": ["n02129165", "lion"], "292": ["n02129604", "tiger"], "293": ["n02130308", "cheetah"], "294": ["n02132136", "brown_bear"], "295": ["n02133161", "American_black_bear"], "296": ["n02134084", "ice_bear"], "297": ["n02134418", "sloth_bear"], "298": ["n02137549", "mongoose"], "299": ["n02138441", "meerkat"], "300": ["n02165105", "tiger_beetle"], "301": ["n02165456", "ladybug"], "302": ["n02167151", "ground_beetle"], "303": ["n02168699", "long-horned_beetle"], "304": ["n02169497", "leaf_beetle"], "305": ["n02172182", "dung_beetle"], "306": ["n02174001", "rhinoceros_beetle"], "307": ["n02177972", "weevil"], "308": ["n02190166", "fly"], "309": ["n02206856", "bee"], "310": ["n02219486", "ant"], "311": ["n02226429", "grasshopper"], "312": ["n02229544", "cricket"], "313": ["n02231487", "walking_stick"], "314": ["n02233338", "cockroach"], "315": ["n02236044", "mantis"], "316": ["n02256656", "cicada"], "317": ["n02259212", "leafhopper"], "318": ["n02264363", "lacewing"], "319": ["n02268443", "dragonfly"], "320": ["n02268853", "damselfly"], "321": ["n02276258", "admiral"], "322": ["n02277742", "ringlet"], "323": ["n02279972", "monarch"], "324": ["n02280649", "cabbage_butterfly"], "325": ["n02281406", "sulphur_butterfly"], "326": ["n02281787", "lycaenid"], "327": ["n02317335", "starfish"], "328": ["n02319095", "sea_urchin"], "329": ["n02321529", "sea_cucumber"], "330": ["n02325366", "wood_rabbit"], "331": ["n02326432", "hare"], "332": ["n02328150", "Angora"], "333": ["n02342885", "hamster"], "334": ["n02346627", "porcupine"], "335": ["n02356798", "fox_squirrel"], "336": ["n02361337", "marmot"], "337": ["n02363005", "beaver"], "338": ["n02364673", "guinea_pig"], "339": ["n02389026", "sorrel"], "340": ["n02391049", "zebra"], "341": ["n02395406", "hog"], "342": ["n02396427", "wild_boar"], "343": ["n02397096", "warthog"], "344": ["n02398521", "hippopotamus"], "345": ["n02403003", "ox"], "346": ["n02408429", "water_buffalo"], "347": ["n02410509", "bison"], "348": ["n02412080", "ram"], "349": ["n02415577", "bighorn"], "350": ["n02417914", "ibex"], "351": ["n02422106", "hartebeest"], "352": ["n02422699", "impala"], "353": ["n02423022", "gazelle"], "354": ["n02437312", "Arabian_camel"], "355": ["n02437616", "llama"], "356": ["n02441942", "weasel"], "357": ["n02442845", "mink"], "358": ["n02443114", "polecat"], "359": ["n02443484", "black-footed_ferret"], "360": ["n02444819", "otter"], "361": ["n02445715", "skunk"], "362": ["n02447366", "badger"], "363": ["n02454379", "armadillo"], "364": ["n02457408", "three-toed_sloth"], "365": ["n02480495", "orangutan"], "366": ["n02480855", "gorilla"], "367": ["n02481823", "chimpanzee"], "368": ["n02483362", "gibbon"], "369": ["n02483708", "siamang"], "370": ["n02484975", "guenon"], "371": ["n02486261", "patas"], "372": ["n02486410", "baboon"], "373": ["n02487347", "macaque"], "374": ["n02488291", "langur"], "375": ["n02488702", "colobus"], "376": ["n02489166", "proboscis_monkey"], "377": ["n02490219", "marmoset"], "378": ["n02492035", "capuchin"], "379": ["n02492660", "howler_monkey"], "380": ["n02493509", "titi"], "381": ["n02493793", "spider_monkey"], "382": ["n02494079", "squirrel_monkey"], "383": ["n02497673", "Madagascar_cat"], "384": ["n02500267", "indri"], "385": ["n02504013", "Indian_elephant"], "386": ["n02504458", "African_elephant"], "387": ["n02509815", "lesser_panda"], "388": ["n02510455", "giant_panda"], "389": ["n02514041", "barracouta"], "390": ["n02526121", "eel"], "391": ["n02536864", "coho"], "392": ["n02606052", "rock_beauty"], "393": ["n02607072", "anemone_fish"], "394": ["n02640242", "sturgeon"], "395": ["n02641379", "gar"], "396": ["n02643566", "lionfish"], "397": ["n02655020", "puffer"], "398": ["n02666196", "abacus"], "399": ["n02667093", "abaya"], "400": ["n02669723", "academic_gown"], "401": ["n02672831", "accordion"], "402": ["n02676566", "acoustic_guitar"], "403": ["n02687172", "aircraft_carrier"], "404": ["n02690373", "airliner"], "405": ["n02692877", "airship"], "406": ["n02699494", "altar"], "407": ["n02701002", "ambulance"], "408": ["n02704792", "amphibian"], "409": ["n02708093", "analog_clock"], "410": ["n02727426", "apiary"], "411": ["n02730930", "apron"], "412": ["n02747177", "ashcan"], "413": ["n02749479", "assault_rifle"], "414": ["n02769748", "backpack"], "415": ["n02776631", "bakery"], "416": ["n02777292", "balance_beam"], "417": ["n02782093", "balloon"], "418": ["n02783161", "ballpoint"], "419": ["n02786058", "Band_Aid"], "420": ["n02787622", "banjo"], "421": ["n02788148", "bannister"], "422": ["n02790996", "barbell"], "423": ["n02791124", "barber_chair"], "424": ["n02791270", "barbershop"], "425": ["n02793495", "barn"], "426": ["n02794156", "barometer"], "427": ["n02795169", "barrel"], "428": ["n02797295", "barrow"], "429": ["n02799071", "baseball"], "430": ["n02802426", "basketball"], "431": ["n02804414", "bassinet"], "432": ["n02804610", "bassoon"], "433": ["n02807133", "bathing_cap"], "434": ["n02808304", "bath_towel"], "435": ["n02808440", "bathtub"], "436": ["n02814533", "beach_wagon"], "437": ["n02814860", "beacon"], "438": ["n02815834", "beaker"], "439": ["n02817516", "bearskin"], "440": ["n02823428", "beer_bottle"], "441": ["n02823750", "beer_glass"], "442": ["n02825657", "bell_cote"], "443": ["n02834397", "bib"], "444": ["n02835271", "bicycle-built-for-two"], "445": ["n02837789", "bikini"], "446": ["n02840245", "binder"], "447": ["n02841315", "binoculars"], "448": ["n02843684", "birdhouse"], "449": ["n02859443", "boathouse"], "450": ["n02860847", "bobsled"], "451": ["n02865351", "bolo_tie"], "452": ["n02869837", "bonnet"], "453": ["n02870880", "bookcase"], "454": ["n02871525", "bookshop"], "455": ["n02877765", "bottlecap"], "456": ["n02879718", "bow"], "457": ["n02883205", "bow_tie"], "458": ["n02892201", "brass"], "459": ["n02892767", "brassiere"], "460": ["n02894605", "breakwater"], "461": ["n02895154", "breastplate"], "462": ["n02906734", "broom"], "463": ["n02909870", "bucket"], "464": ["n02910353", "buckle"], "465": ["n02916936", "bulletproof_vest"], "466": ["n02917067", "bullet_train"], "467": ["n02927161", "butcher_shop"], "468": ["n02930766", "cab"], "469": ["n02939185", "caldron"], "470": ["n02948072", "candle"], "471": ["n02950826", "cannon"], "472": ["n02951358", "canoe"], "473": ["n02951585", "can_opener"], "474": ["n02963159", "cardigan"], "475": ["n02965783", "car_mirror"], "476": ["n02966193", "carousel"], "477": ["n02966687", "carpenter's_kit"], "478": ["n02971356", "carton"], "479": ["n02974003", "car_wheel"], "480": ["n02977058", "cash_machine"], "481": ["n02978881", "cassette"], "482": ["n02979186", "cassette_player"], "483": ["n02980441", "castle"], "484": ["n02981792", "catamaran"], "485": ["n02988304", "CD_player"], "486": ["n02992211", "cello"], "487": ["n02992529", "cellular_telephone"], "488": ["n02999410", "chain"], "489": ["n03000134", "chainlink_fence"], "490": ["n03000247", "chain_mail"], "491": ["n03000684", "chain_saw"], "492": ["n03014705", "chest"], "493": ["n03016953", "chiffonier"], "494": ["n03017168", "chime"], "495": ["n03018349", "china_cabinet"], "496": ["n03026506", "Christmas_stocking"], "497": ["n03028079", "church"], "498": ["n03032252", "cinema"], "499": ["n03041632", "cleaver"], "500": ["n03042490", "cliff_dwelling"], "501": ["n03045698", "cloak"], "502": ["n03047690", "clog"], "503": ["n03062245", "cocktail_shaker"], "504": ["n03063599", "coffee_mug"], "505": ["n03063689", "coffeepot"], "506": ["n03065424", "coil"], "507": ["n03075370", "combination_lock"], "508": ["n03085013", "computer_keyboard"], "509": ["n03089624", "confectionery"], "510": ["n03095699", "container_ship"], "511": ["n03100240", "convertible"], "512": ["n03109150", "corkscrew"], "513": ["n03110669", "cornet"], "514": ["n03124043", "cowboy_boot"], "515": ["n03124170", "cowboy_hat"], "516": ["n03125729", "cradle"], "517": ["n03126707", "crane"], "518": ["n03127747", "crash_helmet"], "519": ["n03127925", "crate"], "520": ["n03131574", "crib"], "521": ["n03133878", "Crock_Pot"], "522": ["n03134739", "croquet_ball"], "523": ["n03141823", "crutch"], "524": ["n03146219", "cuirass"], "525": ["n03160309", "dam"], "526": ["n03179701", "desk"], "527": ["n03180011", "desktop_computer"], "528": ["n03187595", "dial_telephone"], "529": ["n03188531", "diaper"], "530": ["n03196217", "digital_clock"], "531": ["n03197337", "digital_watch"], "532": ["n03201208", "dining_table"], "533": ["n03207743", "dishrag"], "534": ["n03207941", "dishwasher"], "535": ["n03208938", "disk_brake"], "536": ["n03216828", "dock"], "537": ["n03218198", "dogsled"], "538": ["n03220513", "dome"], "539": ["n03223299", "doormat"], "540": ["n03240683", "drilling_platform"], "541": ["n03249569", "drum"], "542": ["n03250847", "drumstick"], "543": ["n03255030", "dumbbell"], "544": ["n03259280", "Dutch_oven"], "545": ["n03271574", "electric_fan"], "546": ["n03272010", "electric_guitar"], "547": ["n03272562", "electric_locomotive"], "548": ["n03290653", "entertainment_center"], "549": ["n03291819", "envelope"], "550": ["n03297495", "espresso_maker"], "551": ["n03314780", "face_powder"], "552": ["n03325584", "feather_boa"], "553": ["n03337140", "file"], "554": ["n03344393", "fireboat"], "555": ["n03345487", "fire_engine"], "556": ["n03347037", "fire_screen"], "557": ["n03355925", "flagpole"], "558": ["n03372029", "flute"], "559": ["n03376595", "folding_chair"], "560": ["n03379051", "football_helmet"], "561": ["n03384352", "forklift"], "562": ["n03388043", "fountain"], "563": ["n03388183", "fountain_pen"], "564": ["n03388549", "four-poster"], "565": ["n03393912", "freight_car"], "566": ["n03394916", "French_horn"], "567": ["n03400231", "frying_pan"], "568": ["n03404251", "fur_coat"], "569": ["n03417042", "garbage_truck"], "570": ["n03424325", "gasmask"], "571": ["n03425413", "gas_pump"], "572": ["n03443371", "goblet"], "573": ["n03444034", "go-kart"], "574": ["n03445777", "golf_ball"], "575": ["n03445924", "golfcart"], "576": ["n03447447", "gondola"], "577": ["n03447721", "gong"], "578": ["n03450230", "gown"], "579": ["n03452741", "grand_piano"], "580": ["n03457902", "greenhouse"], "581": ["n03459775", "grille"], "582": ["n03461385", "grocery_store"], "583": ["n03467068", "guillotine"], "584": ["n03476684", "hair_slide"], "585": ["n03476991", "hair_spray"], "586": ["n03478589", "half_track"], "587": ["n03481172", "hammer"], "588": ["n03482405", "hamper"], "589": ["n03483316", "hand_blower"], "590": ["n03485407", "hand-held_computer"], "591": ["n03485794", "handkerchief"], "592": ["n03492542", "hard_disc"], "593": ["n03494278", "harmonica"], "594": ["n03495258", "harp"], "595": ["n03496892", "harvester"], "596": ["n03498962", "hatchet"], "597": ["n03527444", "holster"], "598": ["n03529860", "home_theater"], "599": ["n03530642", "honeycomb"], "600": ["n03532672", "hook"], "601": ["n03534580", "hoopskirt"], "602": ["n03535780", "horizontal_bar"], "603": ["n03538406", "horse_cart"], "604": ["n03544143", "hourglass"], "605": ["n03584254", "iPod"], "606": ["n03584829", "iron"], "607": ["n03590841", "jack-o'-lantern"], "608": ["n03594734", "jean"], "609": ["n03594945", "jeep"], "610": ["n03595614", "jersey"], "611": ["n03598930", "jigsaw_puzzle"], "612": ["n03599486", "jinrikisha"], "613": ["n03602883", "joystick"], "614": ["n03617480", "kimono"], "615": ["n03623198", "knee_pad"], "616": ["n03627232", "knot"], "617": ["n03630383", "lab_coat"], "618": ["n03633091", "ladle"], "619": ["n03637318", "lampshade"], "620": ["n03642806", "laptop"], "621": ["n03649909", "lawn_mower"], "622": ["n03657121", "lens_cap"], "623": ["n03658185", "letter_opener"], "624": ["n03661043", "library"], "625": ["n03662601", "lifeboat"], "626": ["n03666591", "lighter"], "627": ["n03670208", "limousine"], "628": ["n03673027", "liner"], "629": ["n03676483", "lipstick"], "630": ["n03680355", "Loafer"], "631": ["n03690938", "lotion"], "632": ["n03691459", "loudspeaker"], "633": ["n03692522", "loupe"], "634": ["n03697007", "lumbermill"], "635": ["n03706229", "magnetic_compass"], "636": ["n03709823", "mailbag"], "637": ["n03710193", "mailbox"], "638": ["n03710637", "maillot"], "639": ["n03710721", "maillot"], "640": ["n03717622", "manhole_cover"], "641": ["n03720891", "maraca"], "642": ["n03721384", "marimba"], "643": ["n03724870", "mask"], "644": ["n03729826", "matchstick"], "645": ["n03733131", "maypole"], "646": ["n03733281", "maze"], "647": ["n03733805", "measuring_cup"], "648": ["n03742115", "medicine_chest"], "649": ["n03743016", "megalith"], "650": ["n03759954", "microphone"], "651": ["n03761084", "microwave"], "652": ["n03763968", "military_uniform"], "653": ["n03764736", "milk_can"], "654": ["n03769881", "minibus"], "655": ["n03770439", "miniskirt"], "656": ["n03770679", "minivan"], "657": ["n03773504", "missile"], "658": ["n03775071", "mitten"], "659": ["n03775546", "mixing_bowl"], "660": ["n03776460", "mobile_home"], "661": ["n03777568", "Model_T"], "662": ["n03777754", "modem"], "663": ["n03781244", "monastery"], "664": ["n03782006", "monitor"], "665": ["n03785016", "moped"], "666": ["n03786901", "mortar"], "667": ["n03787032", "mortarboard"], "668": ["n03788195", "mosque"], "669": ["n03788365", "mosquito_net"], "670": ["n03791053", "motor_scooter"], "671": ["n03792782", "mountain_bike"], "672": ["n03792972", "mountain_tent"], "673": ["n03793489", "mouse"], "674": ["n03794056", "mousetrap"], "675": ["n03796401", "moving_van"], "676": ["n03803284", "muzzle"], "677": ["n03804744", "nail"], "678": ["n03814639", "neck_brace"], "679": ["n03814906", "necklace"], "680": ["n03825788", "nipple"], "681": ["n03832673", "notebook"], "682": ["n03837869", "obelisk"], "683": ["n03838899", "oboe"], "684": ["n03840681", "ocarina"], "685": ["n03841143", "odometer"], "686": ["n03843555", "oil_filter"], "687": ["n03854065", "organ"], "688": ["n03857828", "oscilloscope"], "689": ["n03866082", "overskirt"], "690": ["n03868242", "oxcart"], "691": ["n03868863", "oxygen_mask"], "692": ["n03871628", "packet"], "693": ["n03873416", "paddle"], "694": ["n03874293", "paddlewheel"], "695": ["n03874599", "padlock"], "696": ["n03876231", "paintbrush"], "697": ["n03877472", "pajama"], "698": ["n03877845", "palace"], "699": ["n03884397", "panpipe"], "700": ["n03887697", "paper_towel"], "701": ["n03888257", "parachute"], "702": ["n03888605", "parallel_bars"], "703": ["n03891251", "park_bench"], "704": ["n03891332", "parking_meter"], "705": ["n03895866", "passenger_car"], "706": ["n03899768", "patio"], "707": ["n03902125", "pay-phone"], "708": ["n03903868", "pedestal"], "709": ["n03908618", "pencil_box"], "710": ["n03908714", "pencil_sharpener"], "711": ["n03916031", "perfume"], "712": ["n03920288", "Petri_dish"], "713": ["n03924679", "photocopier"], "714": ["n03929660", "pick"], "715": ["n03929855", "pickelhaube"], "716": ["n03930313", "picket_fence"], "717": ["n03930630", "pickup"], "718": ["n03933933", "pier"], "719": ["n03935335", "piggy_bank"], "720": ["n03937543", "pill_bottle"], "721": ["n03938244", "pillow"], "722": ["n03942813", "ping-pong_ball"], "723": ["n03944341", "pinwheel"], "724": ["n03947888", "pirate"], "725": ["n03950228", "pitcher"], "726": ["n03954731", "plane"], "727": ["n03956157", "planetarium"], "728": ["n03958227", "plastic_bag"], "729": ["n03961711", "plate_rack"], "730": ["n03967562", "plow"], "731": ["n03970156", "plunger"], "732": ["n03976467", "Polaroid_camera"], "733": ["n03976657", "pole"], "734": ["n03977966", "police_van"], "735": ["n03980874", "poncho"], "736": ["n03982430", "pool_table"], "737": ["n03983396", "pop_bottle"], "738": ["n03991062", "pot"], "739": ["n03992509", "potter's_wheel"], "740": ["n03995372", "power_drill"], "741": ["n03998194", "prayer_rug"], "742": ["n04004767", "printer"], "743": ["n04005630", "prison"], "744": ["n04008634", "projectile"], "745": ["n04009552", "projector"], "746": ["n04019541", "puck"], "747": ["n04023962", "punching_bag"], "748": ["n04026417", "purse"], "749": ["n04033901", "quill"], "750": ["n04033995", "quilt"], "751": ["n04037443", "racer"], "752": ["n04039381", "racket"], "753": ["n04040759", "radiator"], "754": ["n04041544", "radio"], "755": ["n04044716", "radio_telescope"], "756": ["n04049303", "rain_barrel"], "757": ["n04065272", "recreational_vehicle"], "758": ["n04067472", "reel"], "759": ["n04069434", "reflex_camera"], "760": ["n04070727", "refrigerator"], "761": ["n04074963", "remote_control"], "762": ["n04081281", "restaurant"], "763": ["n04086273", "revolver"], "764": ["n04090263", "rifle"], "765": ["n04099969", "rocking_chair"], "766": ["n04111531", "rotisserie"], "767": ["n04116512", "rubber_eraser"], "768": ["n04118538", "rugby_ball"], "769": ["n04118776", "rule"], "770": ["n04120489", "running_shoe"], "771": ["n04125021", "safe"], "772": ["n04127249", "safety_pin"], "773": ["n04131690", "saltshaker"], "774": ["n04133789", "sandal"], "775": ["n04136333", "sarong"], "776": ["n04141076", "sax"], "777": ["n04141327", "scabbard"], "778": ["n04141975", "scale"], "779": ["n04146614", "school_bus"], "780": ["n04147183", "schooner"], "781": ["n04149813", "scoreboard"], "782": ["n04152593", "screen"], "783": ["n04153751", "screw"], "784": ["n04154565", "screwdriver"], "785": ["n04162706", "seat_belt"], "786": ["n04179913", "sewing_machine"], "787": ["n04192698", "shield"], "788": ["n04200800", "shoe_shop"], "789": ["n04201297", "shoji"], "790": ["n04204238", "shopping_basket"], "791": ["n04204347", "shopping_cart"], "792": ["n04208210", "shovel"], "793": ["n04209133", "shower_cap"], "794": ["n04209239", "shower_curtain"], "795": ["n04228054", "ski"], "796": ["n04229816", "ski_mask"], "797": ["n04235860", "sleeping_bag"], "798": ["n04238763", "slide_rule"], "799": ["n04239074", "sliding_door"], "800": ["n04243546", "slot"], "801": ["n04251144", "snorkel"], "802": ["n04252077", "snowmobile"], "803": ["n04252225", "snowplow"], "804": ["n04254120", "soap_dispenser"], "805": ["n04254680", "soccer_ball"], "806": ["n04254777", "sock"], "807": ["n04258138", "solar_dish"], "808": ["n04259630", "sombrero"], "809": ["n04263257", "soup_bowl"], "810": ["n04264628", "space_bar"], "811": ["n04265275", "space_heater"], "812": ["n04266014", "space_shuttle"], "813": ["n04270147", "spatula"], "814": ["n04273569", "speedboat"], "815": ["n04275548", "spider_web"], "816": ["n04277352", "spindle"], "817": ["n04285008", "sports_car"], "818": ["n04286575", "spotlight"], "819": ["n04296562", "stage"], "820": ["n04310018", "steam_locomotive"], "821": ["n04311004", "steel_arch_bridge"], "822": ["n04311174", "steel_drum"], "823": ["n04317175", "stethoscope"], "824": ["n04325704", "stole"], "825": ["n04326547", "stone_wall"], "826": ["n04328186", "stopwatch"], "827": ["n04330267", "stove"], "828": ["n04332243", "strainer"], "829": ["n04335435", "streetcar"], "830": ["n04336792", "stretcher"], "831": ["n04344873", "studio_couch"], "832": ["n04346328", "stupa"], "833": ["n04347754", "submarine"], "834": ["n04350905", "suit"], "835": ["n04355338", "sundial"], "836": ["n04355933", "sunglass"], "837": ["n04356056", "sunglasses"], "838": ["n04357314", "sunscreen"], "839": ["n04366367", "suspension_bridge"], "840": ["n04367480", "swab"], "841": ["n04370456", "sweatshirt"], "842": ["n04371430", "swimming_trunks"], "843": ["n04371774", "swing"], "844": ["n04372370", "switch"], "845": ["n04376876", "syringe"], "846": ["n04380533", "table_lamp"], "847": ["n04389033", "tank"], "848": ["n04392985", "tape_player"], "849": ["n04398044", "teapot"], "850": ["n04399382", "teddy"], "851": ["n04404412", "television"], "852": ["n04409515", "tennis_ball"], "853": ["n04417672", "thatch"], "854": ["n04418357", "theater_curtain"], "855": ["n04423845", "thimble"], "856": ["n04428191", "thresher"], "857": ["n04429376", "throne"], "858": ["n04435653", "tile_roof"], "859": ["n04442312", "toaster"], "860": ["n04443257", "tobacco_shop"], "861": ["n04447861", "toilet_seat"], "862": ["n04456115", "torch"], "863": ["n04458633", "totem_pole"], "864": ["n04461696", "tow_truck"], "865": ["n04462240", "toyshop"], "866": ["n04465501", "tractor"], "867": ["n04467665", "trailer_truck"], "868": ["n04476259", "tray"], "869": ["n04479046", "trench_coat"], "870": ["n04482393", "tricycle"], "871": ["n04483307", "trimaran"], "872": ["n04485082", "tripod"], "873": ["n04486054", "triumphal_arch"], "874": ["n04487081", "trolleybus"], "875": ["n04487394", "trombone"], "876": ["n04493381", "tub"], "877": ["n04501370", "turnstile"], "878": ["n04505470", "typewriter_keyboard"], "879": ["n04507155", "umbrella"], "880": ["n04509417", "unicycle"], "881": ["n04515003", "upright"], "882": ["n04517823", "vacuum"], "883": ["n04522168", "vase"], "884": ["n04523525", "vault"], "885": ["n04525038", "velvet"], "886": ["n04525305", "vending_machine"], "887": ["n04532106", "vestment"], "888": ["n04532670", "viaduct"], "889": ["n04536866", "violin"], "890": ["n04540053", "volleyball"], "891": ["n04542943", "waffle_iron"], "892": ["n04548280", "wall_clock"], "893": ["n04548362", "wallet"], "894": ["n04550184", "wardrobe"], "895": ["n04552348", "warplane"], "896": ["n04553703", "washbasin"], "897": ["n04554684", "washer"], "898": ["n04557648", "water_bottle"], "899": ["n04560804", "water_jug"], "900": ["n04562935", "water_tower"], "901": ["n04579145", "whiskey_jug"], "902": ["n04579432", "whistle"], "903": ["n04584207", "wig"], "904": ["n04589890", "window_screen"], "905": ["n04590129", "window_shade"], "906": ["n04591157", "Windsor_tie"], "907": ["n04591713", "wine_bottle"], "908": ["n04592741", "wing"], "909": ["n04596742", "wok"], "910": ["n04597913", "wooden_spoon"], "911": ["n04599235", "wool"], "912": ["n04604644", "worm_fence"], "913": ["n04606251", "wreck"], "914": ["n04612504", "yawl"], "915": ["n04613696", "yurt"], "916": ["n06359193", "web_site"], "917": ["n06596364", "comic_book"], "918": ["n06785654", "crossword_puzzle"], "919": ["n06794110", "street_sign"], "920": ["n06874185", "traffic_light"], "921": ["n07248320", "book_jacket"], "922": ["n07565083", "menu"], "923": ["n07579787", "plate"], "924": ["n07583066", "guacamole"], "925": ["n07584110", "consomme"], "926": ["n07590611", "hot_pot"], "927": ["n07613480", "trifle"], "928": ["n07614500", "ice_cream"], "929": ["n07615774", "ice_lolly"], "930": ["n07684084", "French_loaf"], "931": ["n07693725", "bagel"], "932": ["n07695742", "pretzel"], "933": ["n07697313", "cheeseburger"], "934": ["n07697537", "hotdog"], "935": ["n07711569", "mashed_potato"], "936": ["n07714571", "head_cabbage"], "937": ["n07714990", "broccoli"], "938": ["n07715103", "cauliflower"], "939": ["n07716358", "zucchini"], "940": ["n07716906", "spaghetti_squash"], "941": ["n07717410", "acorn_squash"], "942": ["n07717556", "butternut_squash"], "943": ["n07718472", "cucumber"], "944": ["n07718747", "artichoke"], "945": ["n07720875", "bell_pepper"], "946": ["n07730033", "cardoon"], "947": ["n07734744", "mushroom"], "948": ["n07742313", "Granny_Smith"], "949": ["n07745940", "strawberry"], "950": ["n07747607", "orange"], "951": ["n07749582", "lemon"], "952": ["n07753113", "fig"], "953": ["n07753275", "pineapple"], "954": ["n07753592", "banana"], "955": ["n07754684", "jackfruit"], "956": ["n07760859", "custard_apple"], "957": ["n07768694", "pomegranate"], "958": ["n07802026", "hay"], "959": ["n07831146", "carbonara"], "960": ["n07836838", "chocolate_sauce"], "961": ["n07860988", "dough"], "962": ["n07871810", "meat_loaf"], "963": ["n07873807", "pizza"], "964": ["n07875152", "potpie"], "965": ["n07880968", "burrito"], "966": ["n07892512", "red_wine"], "967": ["n07920052", "espresso"], "968": ["n07930864", "cup"], "969": ["n07932039", "eggnog"], "970": ["n09193705", "alp"], "971": ["n09229709", "bubble"], "972": ["n09246464", "cliff"], "973": ["n09256479", "coral_reef"], "974": ["n09288635", "geyser"], "975": ["n09332890", "lakeside"], "976": ["n09399592", "promontory"], "977": ["n09421951", "sandbar"], "978": ["n09428293", "seashore"], "979": ["n09468604", "valley"], "980": ["n09472597", "volcano"], "981": ["n09835506", "ballplayer"], "982": ["n10148035", "groom"], "983": ["n10565667", "scuba_diver"], "984": ["n11879895", "rapeseed"], "985": ["n11939491", "daisy"], "986": ["n12057211", "yellow_lady's_slipper"], "987": ["n12144580", "corn"], "988": ["n12267677", "acorn"], "989": ["n12620546", "hip"], "990": ["n12768682", "buckeye"], "991": ["n12985857", "coral_fungus"], "992": ["n12998815", "agaric"], "993": ["n13037406", "gyromitra"], "994": ["n13040303", "stinkhorn"], "995": ["n13044778", "earthstar"], "996": ["n13052670", "hen-of-the-woods"], "997": ["n13054560", "bolete"], "998": ["n13133613", "ear"], "999": ["n15075141", "toilet_tissue"]} diff --git a/gallery/assets/person1.jpg b/gallery/assets/person1.jpg new file mode 100644 index 00000000000..83251c84a79 Binary files /dev/null and b/gallery/assets/person1.jpg differ diff --git a/gallery/assets/repurposing_annotations_thumbnail.png b/gallery/assets/repurposing_annotations_thumbnail.png new file mode 100644 index 00000000000..367eb4ec128 Binary files /dev/null and b/gallery/assets/repurposing_annotations_thumbnail.png differ diff --git a/gallery/assets/transforms_thumbnail.png b/gallery/assets/transforms_thumbnail.png new file mode 100644 index 00000000000..f9df96c9066 Binary files /dev/null and b/gallery/assets/transforms_thumbnail.png differ diff --git a/gallery/assets/visualization_utils_thumbnail2.png b/gallery/assets/visualization_utils_thumbnail2.png new file mode 100644 index 00000000000..cf057e04207 Binary files /dev/null and b/gallery/assets/visualization_utils_thumbnail2.png differ diff --git a/gallery/others/README.rst b/gallery/others/README.rst new file mode 100644 index 00000000000..fafb007d985 --- /dev/null +++ b/gallery/others/README.rst @@ -0,0 +1,2 @@ +Others +------ diff --git a/gallery/others/plot_optical_flow.py b/gallery/others/plot_optical_flow.py new file mode 100644 index 00000000000..6296c8e667e --- /dev/null +++ b/gallery/others/plot_optical_flow.py @@ -0,0 +1,198 @@ +""" +===================================================== +Optical Flow: Predicting movement with the RAFT model +===================================================== + +.. note:: + Try on `Colab `_ + or :ref:`go to the end ` to download the full example code. + +Optical flow is the task of predicting movement between two images, usually two +consecutive frames of a video. Optical flow models take two images as input, and +predict a flow: the flow indicates the displacement of every single pixel in the +first image, and maps it to its corresponding pixel in the second image. Flows +are (2, H, W)-dimensional tensors, where the first axis corresponds to the +predicted horizontal and vertical displacements. + +The following example illustrates how torchvision can be used to predict flows +using our implementation of the RAFT model. We will also see how to convert the +predicted flows to RGB images for visualization. +""" + +import numpy as np +import torch +import matplotlib.pyplot as plt +import torchvision.transforms.functional as F + + +plt.rcParams["savefig.bbox"] = "tight" +# sphinx_gallery_thumbnail_number = 2 + + +def plot(imgs, **imshow_kwargs): + if not isinstance(imgs[0], list): + # Make a 2d grid even if there's just 1 row + imgs = [imgs] + + num_rows = len(imgs) + num_cols = len(imgs[0]) + _, axs = plt.subplots(nrows=num_rows, ncols=num_cols, squeeze=False) + for row_idx, row in enumerate(imgs): + for col_idx, img in enumerate(row): + ax = axs[row_idx, col_idx] + img = F.to_pil_image(img.to("cpu")) + ax.imshow(np.asarray(img), **imshow_kwargs) + ax.set(xticklabels=[], yticklabels=[], xticks=[], yticks=[]) + + plt.tight_layout() + +# %% +# Reading Videos Using Torchvision +# -------------------------------- +# We will first read a video using :func:`~torchvision.io.read_video`. +# Alternatively one can use the new :class:`~torchvision.io.VideoReader` API (if +# torchvision is built from source). +# The video we will use here is free of use from `pexels.com +# `_, +# credits go to `Pavel Danilyuk `_. + + +import tempfile +from pathlib import Path +from urllib.request import urlretrieve + + +video_url = "https://download.pytorch.org/tutorial/pexelscom_pavel_danilyuk_basketball_hd.mp4" +video_path = Path(tempfile.mkdtemp()) / "basketball.mp4" +_ = urlretrieve(video_url, video_path) + +# %% +# :func:`~torchvision.io.read_video` returns the video frames, audio frames and +# the metadata associated with the video. In our case, we only need the video +# frames. +# +# Here we will just make 2 predictions between 2 pre-selected pairs of frames, +# namely frames (100, 101) and (150, 151). Each of these pairs corresponds to a +# single model input. + +from torchvision.io import read_video +frames, _, _ = read_video(str(video_path), output_format="TCHW") + +img1_batch = torch.stack([frames[100], frames[150]]) +img2_batch = torch.stack([frames[101], frames[151]]) + +plot(img1_batch) + +# %% +# The RAFT model accepts RGB images. We first get the frames from +# :func:`~torchvision.io.read_video` and resize them to ensure their dimensions +# are divisible by 8. Note that we explicitly use ``antialias=False``, because +# this is how those models were trained. Then we use the transforms bundled into +# the weights in order to preprocess the input and rescale its values to the +# required ``[-1, 1]`` interval. + +from torchvision.models.optical_flow import Raft_Large_Weights + +weights = Raft_Large_Weights.DEFAULT +transforms = weights.transforms() + + +def preprocess(img1_batch, img2_batch): + img1_batch = F.resize(img1_batch, size=[520, 960], antialias=False) + img2_batch = F.resize(img2_batch, size=[520, 960], antialias=False) + return transforms(img1_batch, img2_batch) + + +img1_batch, img2_batch = preprocess(img1_batch, img2_batch) + +print(f"shape = {img1_batch.shape}, dtype = {img1_batch.dtype}") + + +# %% +# Estimating Optical flow using RAFT +# ---------------------------------- +# We will use our RAFT implementation from +# :func:`~torchvision.models.optical_flow.raft_large`, which follows the same +# architecture as the one described in the `original paper `_. +# We also provide the :func:`~torchvision.models.optical_flow.raft_small` model +# builder, which is smaller and faster to run, sacrificing a bit of accuracy. + +from torchvision.models.optical_flow import raft_large + +# If you can, run this example on a GPU, it will be a lot faster. +device = "cuda" if torch.cuda.is_available() else "cpu" + +model = raft_large(weights=Raft_Large_Weights.DEFAULT, progress=False).to(device) +model = model.eval() + +list_of_flows = model(img1_batch.to(device), img2_batch.to(device)) +print(f"type = {type(list_of_flows)}") +print(f"length = {len(list_of_flows)} = number of iterations of the model") + +# %% +# The RAFT model outputs lists of predicted flows where each entry is a +# (N, 2, H, W) batch of predicted flows that corresponds to a given "iteration" +# in the model. For more details on the iterative nature of the model, please +# refer to the `original paper `_. Here, we +# are only interested in the final predicted flows (they are the most accurate +# ones), so we will just retrieve the last item in the list. +# +# As described above, a flow is a tensor with dimensions (2, H, W) (or (N, 2, H, +# W) for batches of flows) where each entry corresponds to the horizontal and +# vertical displacement of each pixel from the first image to the second image. +# Note that the predicted flows are in "pixel" unit, they are not normalized +# w.r.t. the dimensions of the images. +predicted_flows = list_of_flows[-1] +print(f"dtype = {predicted_flows.dtype}") +print(f"shape = {predicted_flows.shape} = (N, 2, H, W)") +print(f"min = {predicted_flows.min()}, max = {predicted_flows.max()}") + + +# %% +# Visualizing predicted flows +# --------------------------- +# Torchvision provides the :func:`~torchvision.utils.flow_to_image` utility to +# convert a flow into an RGB image. It also supports batches of flows. +# each "direction" in the flow will be mapped to a given RGB color. In the +# images below, pixels with similar colors are assumed by the model to be moving +# in similar directions. The model is properly able to predict the movement of +# the ball and the player. Note in particular the different predicted direction +# of the ball in the first image (going to the left) and in the second image +# (going up). + +from torchvision.utils import flow_to_image + +flow_imgs = flow_to_image(predicted_flows) + +# The images have been mapped into [-1, 1] but for plotting we want them in [0, 1] +img1_batch = [(img1 + 1) / 2 for img1 in img1_batch] + +grid = [[img1, flow_img] for (img1, flow_img) in zip(img1_batch, flow_imgs)] +plot(grid) + +# %% +# Bonus: Creating GIFs of predicted flows +# --------------------------------------- +# In the example above we have only shown the predicted flows of 2 pairs of +# frames. A fun way to apply the Optical Flow models is to run the model on an +# entire video, and create a new video from all the predicted flows. Below is a +# snippet that can get you started with this. We comment out the code, because +# this example is being rendered on a machine without a GPU, and it would take +# too long to run it. + +# from torchvision.io import write_jpeg +# for i, (img1, img2) in enumerate(zip(frames, frames[1:])): +# # Note: it would be faster to predict batches of flows instead of individual flows +# img1, img2 = preprocess(img1, img2) + +# list_of_flows = model(img1.to(device), img2.to(device)) +# predicted_flow = list_of_flows[-1][0] +# flow_img = flow_to_image(predicted_flow).to("cpu") +# output_folder = "/tmp/" # Update this to the folder of your choice +# write_jpeg(flow_img, output_folder + f"predicted_flow_{i}.jpg") + +# %% +# Once the .jpg flow images are saved, you can convert them into a video or a +# GIF using ffmpeg with e.g.: +# +# ffmpeg -f image2 -framerate 30 -i predicted_flow_%d.jpg -loop -1 flow.gif diff --git a/gallery/others/plot_repurposing_annotations.py b/gallery/others/plot_repurposing_annotations.py new file mode 100644 index 00000000000..2c2e10ffb2a --- /dev/null +++ b/gallery/others/plot_repurposing_annotations.py @@ -0,0 +1,211 @@ +""" +===================================== +Repurposing masks into bounding boxes +===================================== + +.. note:: + Try on `Colab `_ + or :ref:`go to the end ` to download the full example code. + +The following example illustrates the operations available +the :ref:`torchvision.ops ` module for repurposing +segmentation masks into object localization annotations for different tasks +(e.g. transforming masks used by instance and panoptic segmentation +methods into bounding boxes used by object detection methods). +""" + +# sphinx_gallery_thumbnail_path = "../../gallery/assets/repurposing_annotations_thumbnail.png" + +import os +import numpy as np +import torch +import matplotlib.pyplot as plt + +import torchvision.transforms.functional as F + + +ASSETS_DIRECTORY = "../assets" + +plt.rcParams["savefig.bbox"] = "tight" + + +def show(imgs): + if not isinstance(imgs, list): + imgs = [imgs] + fix, axs = plt.subplots(ncols=len(imgs), squeeze=False) + for i, img in enumerate(imgs): + img = img.detach() + img = F.to_pil_image(img) + axs[0, i].imshow(np.asarray(img)) + axs[0, i].set(xticklabels=[], yticklabels=[], xticks=[], yticks=[]) + + +# %% +# Masks +# ----- +# In tasks like instance and panoptic segmentation, masks are commonly defined, and are defined by this package, +# as a multi-dimensional array (e.g. a NumPy array or a PyTorch tensor) with the following shape: +# +# (num_objects, height, width) +# +# Where num_objects is the number of annotated objects in the image. Each (height, width) object corresponds to exactly +# one object. For example, if your input image has the dimensions 224 x 224 and has four annotated objects the shape +# of your masks annotation has the following shape: +# +# (4, 224, 224). +# +# A nice property of masks is that they can be easily repurposed to be used in methods to solve a variety of object +# localization tasks. + +# %% +# Converting Masks to Bounding Boxes +# ----------------------------------------------- +# For example, the :func:`~torchvision.ops.masks_to_boxes` operation can be used to +# transform masks into bounding boxes that can be +# used as input to detection models such as FasterRCNN and RetinaNet. +# We will take images and masks from the `PenFudan Dataset `_. + + +from torchvision.io import decode_image + +img_path = os.path.join(ASSETS_DIRECTORY, "FudanPed00054.png") +mask_path = os.path.join(ASSETS_DIRECTORY, "FudanPed00054_mask.png") +img = decode_image(img_path) +mask = decode_image(mask_path) + + +# %% +# Here the masks are represented as a PNG Image, with floating point values. +# Each pixel is encoded as different colors, with 0 being background. +# Notice that the spatial dimensions of image and mask match. + +print(mask.size()) +print(img.size()) +print(mask) + +# %% + +# We get the unique colors, as these would be the object ids. +obj_ids = torch.unique(mask) + +# first id is the background, so remove it. +obj_ids = obj_ids[1:] + +# split the color-encoded mask into a set of boolean masks. +# Note that this snippet would work as well if the masks were float values instead of ints. +masks = mask == obj_ids[:, None, None] + +# %% +# Now the masks are a boolean tensor. +# The first dimension in this case 3 and denotes the number of instances: there are 3 people in the image. +# The other two dimensions are height and width, which are equal to the dimensions of the image. +# For each instance, the boolean tensors represent if the particular pixel +# belongs to the segmentation mask of the image. + +print(masks.size()) +print(masks) + +# %% +# Let us visualize an image and plot its corresponding segmentation masks. +# We will use the :func:`~torchvision.utils.draw_segmentation_masks` to draw the segmentation masks. + +from torchvision.utils import draw_segmentation_masks + +drawn_masks = [] +for mask in masks: + drawn_masks.append(draw_segmentation_masks(img, mask, alpha=0.8, colors="blue")) + +show(drawn_masks) + +# %% +# To convert the boolean masks into bounding boxes. +# We will use the :func:`~torchvision.ops.masks_to_boxes` from the torchvision.ops module +# It returns the boxes in ``(xmin, ymin, xmax, ymax)`` format. + +from torchvision.ops import masks_to_boxes + +boxes = masks_to_boxes(masks) +print(boxes.size()) +print(boxes) + +# %% +# As the shape denotes, there are 3 boxes and in ``(xmin, ymin, xmax, ymax)`` format. +# These can be visualized very easily with :func:`~torchvision.utils.draw_bounding_boxes` utility +# provided in :ref:`torchvision.utils `. + +from torchvision.utils import draw_bounding_boxes + +drawn_boxes = draw_bounding_boxes(img, boxes, colors="red") +show(drawn_boxes) + +# %% +# These boxes can now directly be used by detection models in torchvision. +# Here is demo with a Faster R-CNN model loaded from +# :func:`~torchvision.models.detection.fasterrcnn_resnet50_fpn` + +from torchvision.models.detection import fasterrcnn_resnet50_fpn, FasterRCNN_ResNet50_FPN_Weights + +weights = FasterRCNN_ResNet50_FPN_Weights.DEFAULT +model = fasterrcnn_resnet50_fpn(weights=weights, progress=False) +print(img.size()) + +transforms = weights.transforms() +img = transforms(img) +target = {} +target["boxes"] = boxes +target["labels"] = labels = torch.ones((masks.size(0),), dtype=torch.int64) +detection_outputs = model(img.unsqueeze(0), [target]) + + +# %% +# Converting Segmentation Dataset to Detection Dataset +# ---------------------------------------------------- +# +# With this utility it becomes very simple to convert a segmentation dataset to a detection dataset. +# With this we can now use a segmentation dataset to train a detection model. +# One can similarly convert panoptic dataset to detection dataset. +# Here is an example where we re-purpose the dataset from the +# `PenFudan Detection Tutorial `_. + +class SegmentationToDetectionDataset(torch.utils.data.Dataset): + def __init__(self, root, transforms): + self.root = root + self.transforms = transforms + # load all image files, sorting them to + # ensure that they are aligned + self.imgs = list(sorted(os.listdir(os.path.join(root, "PNGImages")))) + self.masks = list(sorted(os.listdir(os.path.join(root, "PedMasks")))) + + def __getitem__(self, idx): + # load images and masks + img_path = os.path.join(self.root, "PNGImages", self.imgs[idx]) + mask_path = os.path.join(self.root, "PedMasks", self.masks[idx]) + + img = decode_image(img_path) + mask = decode_image(mask_path) + + img = F.convert_image_dtype(img, dtype=torch.float) + mask = F.convert_image_dtype(mask, dtype=torch.float) + + # We get the unique colors, as these would be the object ids. + obj_ids = torch.unique(mask) + + # first id is the background, so remove it. + obj_ids = obj_ids[1:] + + # split the color-encoded mask into a set of boolean masks. + masks = mask == obj_ids[:, None, None] + + boxes = masks_to_boxes(masks) + + # there is only one class + labels = torch.ones((masks.shape[0],), dtype=torch.int64) + + target = {} + target["boxes"] = boxes + target["labels"] = labels + + if self.transforms is not None: + img, target = self.transforms(img, target) + + return img, target diff --git a/gallery/others/plot_scripted_tensor_transforms.py b/gallery/others/plot_scripted_tensor_transforms.py new file mode 100644 index 00000000000..da2213347e3 --- /dev/null +++ b/gallery/others/plot_scripted_tensor_transforms.py @@ -0,0 +1,136 @@ +""" +=================== +Torchscript support +=================== + +.. note:: + Try on `Colab `_ + or :ref:`go to the end ` to download the full example code. + +This example illustrates `torchscript +`_ support of the torchvision +:ref:`transforms ` on Tensor images. +""" + +# %% +from pathlib import Path + +import matplotlib.pyplot as plt + +import torch +import torch.nn as nn + +import torchvision.transforms as v1 +from torchvision.io import decode_image + +plt.rcParams["savefig.bbox"] = 'tight' +torch.manual_seed(1) + +# If you're trying to run that on Colab, you can download the assets and the +# helpers from https://github.com/pytorch/vision/tree/main/gallery/ +import sys +sys.path += ["../transforms"] +from helpers import plot +ASSETS_PATH = Path('../assets') + + +# %% +# Most transforms support torchscript. For composing transforms, we use +# :class:`torch.nn.Sequential` instead of +# :class:`~torchvision.transforms.v2.Compose`: + +dog1 = decode_image(str(ASSETS_PATH / 'dog1.jpg')) +dog2 = decode_image(str(ASSETS_PATH / 'dog2.jpg')) + +transforms = torch.nn.Sequential( + v1.RandomCrop(224), + v1.RandomHorizontalFlip(p=0.3), +) + +scripted_transforms = torch.jit.script(transforms) + +plot([dog1, scripted_transforms(dog1), dog2, scripted_transforms(dog2)]) + + +# %% +# .. warning:: +# +# Above we have used transforms from the ``torchvision.transforms`` +# namespace, i.e. the "v1" transforms. The v2 transforms from the +# ``torchvision.transforms.v2`` namespace are the :ref:`recommended +# ` way to use transforms in your code. +# +# The v2 transforms also support torchscript, but if you call +# ``torch.jit.script()`` on a v2 **class** transform, you'll actually end up +# with its (scripted) v1 equivalent. This may lead to slightly different +# results between the scripted and eager executions due to implementation +# differences between v1 and v2. +# +# If you really need torchscript support for the v2 transforms, **we +# recommend scripting the functionals** from the +# ``torchvision.transforms.v2.functional`` namespace to avoid surprises. +# +# Below we now show how to combine image transformations and a model forward +# pass, while using ``torch.jit.script`` to obtain a single scripted module. +# +# Let's define a ``Predictor`` module that transforms the input tensor and then +# applies an ImageNet model on it. + +from torchvision.models import resnet18, ResNet18_Weights + + +class Predictor(nn.Module): + + def __init__(self): + super().__init__() + weights = ResNet18_Weights.DEFAULT + self.resnet18 = resnet18(weights=weights, progress=False).eval() + self.transforms = weights.transforms(antialias=True) + + def forward(self, x: torch.Tensor) -> torch.Tensor: + with torch.no_grad(): + x = self.transforms(x) + y_pred = self.resnet18(x) + return y_pred.argmax(dim=1) + + +# %% +# Now, let's define scripted and non-scripted instances of ``Predictor`` and +# apply it on multiple tensor images of the same size + +device = "cuda" if torch.cuda.is_available() else "cpu" + +predictor = Predictor().to(device) +scripted_predictor = torch.jit.script(predictor).to(device) + +batch = torch.stack([dog1, dog2]).to(device) + +res = predictor(batch) +res_scripted = scripted_predictor(batch) + +# %% +# We can verify that the prediction of the scripted and non-scripted models are +# the same: + +import json + +with open(Path('../assets') / 'imagenet_class_index.json') as labels_file: + labels = json.load(labels_file) + +for i, (pred, pred_scripted) in enumerate(zip(res, res_scripted)): + assert pred == pred_scripted + print(f"Prediction for Dog {i + 1}: {labels[str(pred.item())]}") + +# %% +# Since the model is scripted, it can be easily dumped on disk and re-used + +import tempfile + +with tempfile.NamedTemporaryFile() as f: + scripted_predictor.save(f.name) + + dumped_scripted_predictor = torch.jit.load(f.name) + res_scripted_dumped = dumped_scripted_predictor(batch) +assert (res_scripted_dumped == res_scripted).all() + +# %% diff --git a/gallery/others/plot_video_api.py b/gallery/others/plot_video_api.py new file mode 100644 index 00000000000..3a67e4d86d0 --- /dev/null +++ b/gallery/others/plot_video_api.py @@ -0,0 +1,346 @@ +""" +========= +Video API +========= + +.. note:: + Try on `Colab `_ + or :ref:`go to the end ` to download the full example code. + +This example illustrates some of the APIs that torchvision offers for +videos, together with the examples on how to build datasets and more. +""" + +# %% +# 1. Introduction: building a new video object and examining the properties +# ------------------------------------------------------------------------- +# First we select a video to test the object out. For the sake of argument +# we're using one from kinetics400 dataset. +# To create it, we need to define the path and the stream we want to use. + +# %% +# Chosen video statistics: +# +# - WUzgd7C1pWA.mp4 +# - source: +# - kinetics-400 +# - video: +# - H-264 +# - MPEG-4 AVC (part 10) (avc1) +# - fps: 29.97 +# - audio: +# - MPEG AAC audio (mp4a) +# - sample rate: 48K Hz +# + +import torch +import torchvision +from torchvision.datasets.utils import download_url +torchvision.set_video_backend("video_reader") + +# Download the sample video +download_url( + "https://github.com/pytorch/vision/blob/main/test/assets/videos/WUzgd7C1pWA.mp4?raw=true", + ".", + "WUzgd7C1pWA.mp4" +) +video_path = "./WUzgd7C1pWA.mp4" + +# %% +# Streams are defined in a similar fashion as torch devices. We encode them as strings in a form +# of ``stream_type:stream_id`` where ``stream_type`` is a string and ``stream_id`` a long int. +# The constructor accepts passing a ``stream_type`` only, in which case the stream is auto-discovered. +# Firstly, let's get the metadata for our particular video: + +stream = "video" +video = torchvision.io.VideoReader(video_path, stream) +video.get_metadata() + +# %% +# Here we can see that video has two streams - a video and an audio stream. +# Currently available stream types include ['video', 'audio']. +# Each descriptor consists of two parts: stream type (e.g. 'video') and a unique stream id +# (which are determined by video encoding). +# In this way, if the video container contains multiple streams of the same type, +# users can access the one they want. +# If only stream type is passed, the decoder auto-detects first stream of that type and returns it. + +# %% +# Let's read all the frames from the video stream. By default, the return value of +# ``next(video_reader)`` is a dict containing the following fields. +# +# The return fields are: +# +# - ``data``: containing a torch.tensor +# - ``pts``: containing a float timestamp of this particular frame + +metadata = video.get_metadata() +video.set_current_stream("audio") + +frames = [] # we are going to save the frames here. +ptss = [] # pts is a presentation timestamp in seconds (float) of each frame +for frame in video: + frames.append(frame['data']) + ptss.append(frame['pts']) + +print("PTS for first five frames ", ptss[:5]) +print("Total number of frames: ", len(frames)) +approx_nf = metadata['audio']['duration'][0] * metadata['audio']['framerate'][0] +print("Approx total number of datapoints we can expect: ", approx_nf) +print("Read data size: ", frames[0].size(0) * len(frames)) + +# %% +# But what if we only want to read certain time segment of the video? +# That can be done easily using the combination of our ``seek`` function, and the fact that each call +# to next returns the presentation timestamp of the returned frame in seconds. +# +# Given that our implementation relies on python iterators, +# we can leverage itertools to simplify the process and make it more pythonic. +# +# For example, if we wanted to read ten frames from second second: + + +import itertools +video.set_current_stream("video") + +frames = [] # we are going to save the frames here. + +# We seek into a second second of the video and use islice to get 10 frames since +for frame, pts in itertools.islice(video.seek(2), 10): + frames.append(frame) + +print("Total number of frames: ", len(frames)) + +# %% +# Or if we wanted to read from 2nd to 5th second, +# We seek into a second second of the video, +# then we utilize the itertools takewhile to get the +# correct number of frames: + +video.set_current_stream("video") +frames = [] # we are going to save the frames here. +video = video.seek(2) + +for frame in itertools.takewhile(lambda x: x['pts'] <= 5, video): + frames.append(frame['data']) + +print("Total number of frames: ", len(frames)) +approx_nf = (5 - 2) * video.get_metadata()['video']['fps'][0] +print("We can expect approx: ", approx_nf) +print("Tensor size: ", frames[0].size()) + +# %% +# 2. Building a sample read_video function +# ---------------------------------------------------------------------------------------- +# We can utilize the methods above to build the read video function that follows +# the same API to the existing ``read_video`` function. + + +def example_read_video(video_object, start=0, end=None, read_video=True, read_audio=True): + if end is None: + end = float("inf") + if end < start: + raise ValueError( + "end time should be larger than start time, got " + f"start time={start} and end time={end}" + ) + + video_frames = torch.empty(0) + video_pts = [] + if read_video: + video_object.set_current_stream("video") + frames = [] + for frame in itertools.takewhile(lambda x: x['pts'] <= end, video_object.seek(start)): + frames.append(frame['data']) + video_pts.append(frame['pts']) + if len(frames) > 0: + video_frames = torch.stack(frames, 0) + + audio_frames = torch.empty(0) + audio_pts = [] + if read_audio: + video_object.set_current_stream("audio") + frames = [] + for frame in itertools.takewhile(lambda x: x['pts'] <= end, video_object.seek(start)): + frames.append(frame['data']) + audio_pts.append(frame['pts']) + if len(frames) > 0: + audio_frames = torch.cat(frames, 0) + + return video_frames, audio_frames, (video_pts, audio_pts), video_object.get_metadata() + + +# Total number of frames should be 327 for video and 523264 datapoints for audio +vf, af, info, meta = example_read_video(video) +print(vf.size(), af.size()) + +# %% +# 3. Building an example randomly sampled dataset (can be applied to training dataset of kinetics400) +# ------------------------------------------------------------------------------------------------------- +# Cool, so now we can use the same principle to make the sample dataset. +# We suggest trying out iterable dataset for this purpose. +# Here, we are going to build an example dataset that reads randomly selected 10 frames of video. + +# %% +# Make sample dataset +import os +os.makedirs("./dataset", exist_ok=True) +os.makedirs("./dataset/1", exist_ok=True) +os.makedirs("./dataset/2", exist_ok=True) + +# %% +# Download the videos +from torchvision.datasets.utils import download_url +download_url( + "https://github.com/pytorch/vision/blob/main/test/assets/videos/WUzgd7C1pWA.mp4?raw=true", + "./dataset/1", "WUzgd7C1pWA.mp4" +) +download_url( + "https://github.com/pytorch/vision/blob/main/test/assets/videos/RATRACE_wave_f_nm_np1_fr_goo_37.avi?raw=true", + "./dataset/1", + "RATRACE_wave_f_nm_np1_fr_goo_37.avi" +) +download_url( + "https://github.com/pytorch/vision/blob/main/test/assets/videos/SOX5yA1l24A.mp4?raw=true", + "./dataset/2", + "SOX5yA1l24A.mp4" +) +download_url( + "https://github.com/pytorch/vision/blob/main/test/assets/videos/v_SoccerJuggling_g23_c01.avi?raw=true", + "./dataset/2", + "v_SoccerJuggling_g23_c01.avi" +) +download_url( + "https://github.com/pytorch/vision/blob/main/test/assets/videos/v_SoccerJuggling_g24_c01.avi?raw=true", + "./dataset/2", + "v_SoccerJuggling_g24_c01.avi" +) + +# %% +# Housekeeping and utilities +import os +import random + +from torchvision.datasets.folder import make_dataset +from torchvision import transforms as t + + +def _find_classes(dir): + classes = [d.name for d in os.scandir(dir) if d.is_dir()] + classes.sort() + class_to_idx = {cls_name: i for i, cls_name in enumerate(classes)} + return classes, class_to_idx + + +def get_samples(root, extensions=(".mp4", ".avi")): + _, class_to_idx = _find_classes(root) + return make_dataset(root, class_to_idx, extensions=extensions) + +# %% +# We are going to define the dataset and some basic arguments. +# We assume the structure of the FolderDataset, and add the following parameters: +# +# - ``clip_len``: length of a clip in frames +# - ``frame_transform``: transform for every frame individually +# - ``video_transform``: transform on a video sequence +# +# .. note:: +# We actually add epoch size as using :func:`~torch.utils.data.IterableDataset` +# class allows us to naturally oversample clips or images from each video if needed. + + +class RandomDataset(torch.utils.data.IterableDataset): + def __init__(self, root, epoch_size=None, frame_transform=None, video_transform=None, clip_len=16): + super(RandomDataset).__init__() + + self.samples = get_samples(root) + + # Allow for temporal jittering + if epoch_size is None: + epoch_size = len(self.samples) + self.epoch_size = epoch_size + + self.clip_len = clip_len + self.frame_transform = frame_transform + self.video_transform = video_transform + + def __iter__(self): + for i in range(self.epoch_size): + # Get random sample + path, target = random.choice(self.samples) + # Get video object + vid = torchvision.io.VideoReader(path, "video") + metadata = vid.get_metadata() + video_frames = [] # video frame buffer + + # Seek and return frames + max_seek = metadata["video"]['duration'][0] - (self.clip_len / metadata["video"]['fps'][0]) + start = random.uniform(0., max_seek) + for frame in itertools.islice(vid.seek(start), self.clip_len): + video_frames.append(self.frame_transform(frame['data'])) + current_pts = frame['pts'] + # Stack it into a tensor + video = torch.stack(video_frames, 0) + if self.video_transform: + video = self.video_transform(video) + output = { + 'path': path, + 'video': video, + 'target': target, + 'start': start, + 'end': current_pts} + yield output + +# %% +# Given a path of videos in a folder structure, i.e: +# +# - dataset +# - class 1 +# - file 0 +# - file 1 +# - ... +# - class 2 +# - file 0 +# - file 1 +# - ... +# - ... +# +# We can generate a dataloader and test the dataset. + + +transforms = [t.Resize((112, 112))] +frame_transform = t.Compose(transforms) + +dataset = RandomDataset("./dataset", epoch_size=None, frame_transform=frame_transform) + +# %% +from torch.utils.data import DataLoader +loader = DataLoader(dataset, batch_size=12) +data = {"video": [], 'start': [], 'end': [], 'tensorsize': []} +for batch in loader: + for i in range(len(batch['path'])): + data['video'].append(batch['path'][i]) + data['start'].append(batch['start'][i].item()) + data['end'].append(batch['end'][i].item()) + data['tensorsize'].append(batch['video'][i].size()) +print(data) + +# %% +# 4. Data Visualization +# ---------------------------------- +# Example of visualized video + +import matplotlib.pyplot as plt + +plt.figure(figsize=(12, 12)) +for i in range(16): + plt.subplot(4, 4, i + 1) + plt.imshow(batch["video"][0, i, ...].permute(1, 2, 0)) + plt.axis("off") + +# %% +# Cleanup the video and dataset: +import os +import shutil +os.remove("./WUzgd7C1pWA.mp4") +shutil.rmtree("./dataset") diff --git a/gallery/others/plot_visualization_utils.py b/gallery/others/plot_visualization_utils.py new file mode 100644 index 00000000000..72c35b53717 --- /dev/null +++ b/gallery/others/plot_visualization_utils.py @@ -0,0 +1,522 @@ +""" +======================= +Visualization utilities +======================= + +.. note:: + Try on `Colab `_ + or :ref:`go to the end ` to download the full example code. + +This example illustrates some of the utilities that torchvision offers for +visualizing images, bounding boxes, segmentation masks and keypoints. +""" + +# sphinx_gallery_thumbnail_path = "../../gallery/assets/visualization_utils_thumbnail2.png" + +import torch +import numpy as np +import matplotlib.pyplot as plt + +import torchvision.transforms.functional as F + + +plt.rcParams["savefig.bbox"] = 'tight' + + +def show(imgs): + if not isinstance(imgs, list): + imgs = [imgs] + fig, axs = plt.subplots(ncols=len(imgs), squeeze=False) + for i, img in enumerate(imgs): + img = img.detach() + img = F.to_pil_image(img) + axs[0, i].imshow(np.asarray(img)) + axs[0, i].set(xticklabels=[], yticklabels=[], xticks=[], yticks=[]) + + +# %% +# Visualizing a grid of images +# ---------------------------- +# The :func:`~torchvision.utils.make_grid` function can be used to create a +# tensor that represents multiple images in a grid. This util requires a single +# image of dtype ``uint8`` as input. + +from torchvision.utils import make_grid +from torchvision.io import decode_image +from pathlib import Path + +dog1_int = decode_image(str(Path('../assets') / 'dog1.jpg')) +dog2_int = decode_image(str(Path('../assets') / 'dog2.jpg')) +dog_list = [dog1_int, dog2_int] + +grid = make_grid(dog_list) +show(grid) + +# %% +# Visualizing bounding boxes +# -------------------------- +# We can use :func:`~torchvision.utils.draw_bounding_boxes` to draw boxes on an +# image. We can set the colors, labels, width as well as font and font size. +# The boxes are in ``(xmin, ymin, xmax, ymax)`` format. + +from torchvision.utils import draw_bounding_boxes + + +boxes = torch.tensor([[50, 50, 100, 200], [210, 150, 350, 430]], dtype=torch.float) +colors = ["blue", "yellow"] +result = draw_bounding_boxes(dog1_int, boxes, colors=colors, width=5) +show(result) + + +# %% +# Naturally, we can also plot bounding boxes produced by torchvision detection +# models. Here is a demo with a Faster R-CNN model loaded from +# :func:`~torchvision.models.detection.fasterrcnn_resnet50_fpn` +# model. For more details on the output of such models, you may +# refer to :ref:`instance_seg_output`. + +from torchvision.models.detection import fasterrcnn_resnet50_fpn, FasterRCNN_ResNet50_FPN_Weights + + +weights = FasterRCNN_ResNet50_FPN_Weights.DEFAULT +transforms = weights.transforms() + +images = [transforms(d) for d in dog_list] + +model = fasterrcnn_resnet50_fpn(weights=weights, progress=False) +model = model.eval() + +outputs = model(images) +print(outputs) + +# %% +# Let's plot the boxes detected by our model. We will only plot the boxes with a +# score greater than a given threshold. + +score_threshold = .8 +dogs_with_boxes = [ + draw_bounding_boxes(dog_int, boxes=output['boxes'][output['scores'] > score_threshold], width=4) + for dog_int, output in zip(dog_list, outputs) +] +show(dogs_with_boxes) + +# %% +# Visualizing segmentation masks +# ------------------------------ +# The :func:`~torchvision.utils.draw_segmentation_masks` function can be used to +# draw segmentation masks on images. Semantic segmentation and instance +# segmentation models have different outputs, so we will treat each +# independently. +# +# .. _semantic_seg_output: +# +# Semantic segmentation models +# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +# +# We will see how to use it with torchvision's FCN Resnet-50, loaded with +# :func:`~torchvision.models.segmentation.fcn_resnet50`. Let's start by looking +# at the output of the model. + +from torchvision.models.segmentation import fcn_resnet50, FCN_ResNet50_Weights + +weights = FCN_ResNet50_Weights.DEFAULT +transforms = weights.transforms(resize_size=None) + +model = fcn_resnet50(weights=weights, progress=False) +model = model.eval() + +batch = torch.stack([transforms(d) for d in dog_list]) +output = model(batch)['out'] +print(output.shape, output.min().item(), output.max().item()) + +# %% +# As we can see above, the output of the segmentation model is a tensor of shape +# ``(batch_size, num_classes, H, W)``. Each value is a non-normalized score, and +# we can normalize them into ``[0, 1]`` by using a softmax. After the softmax, +# we can interpret each value as a probability indicating how likely a given +# pixel is to belong to a given class. +# +# Let's plot the masks that have been detected for the dog class and for the +# boat class: + +sem_class_to_idx = {cls: idx for (idx, cls) in enumerate(weights.meta["categories"])} + +normalized_masks = torch.nn.functional.softmax(output, dim=1) + +dog_and_boat_masks = [ + normalized_masks[img_idx, sem_class_to_idx[cls]] + for img_idx in range(len(dog_list)) + for cls in ('dog', 'boat') +] + +show(dog_and_boat_masks) + +# %% +# As expected, the model is confident about the dog class, but not so much for +# the boat class. +# +# The :func:`~torchvision.utils.draw_segmentation_masks` function can be used to +# plots those masks on top of the original image. This function expects the +# masks to be boolean masks, but our masks above contain probabilities in ``[0, +# 1]``. To get boolean masks, we can do the following: + +class_dim = 1 +boolean_dog_masks = (normalized_masks.argmax(class_dim) == sem_class_to_idx['dog']) +print(f"shape = {boolean_dog_masks.shape}, dtype = {boolean_dog_masks.dtype}") +show([m.float() for m in boolean_dog_masks]) + + +# %% +# The line above where we define ``boolean_dog_masks`` is a bit cryptic, but you +# can read it as the following query: "For which pixels is 'dog' the most likely +# class?" +# +# .. note:: +# While we're using the ``normalized_masks`` here, we would have +# gotten the same result by using the non-normalized scores of the model +# directly (as the softmax operation preserves the order). +# +# Now that we have boolean masks, we can use them with +# :func:`~torchvision.utils.draw_segmentation_masks` to plot them on top of the +# original images: + +from torchvision.utils import draw_segmentation_masks + +dogs_with_masks = [ + draw_segmentation_masks(img, masks=mask, alpha=0.7) + for img, mask in zip(dog_list, boolean_dog_masks) +] +show(dogs_with_masks) + +# %% +# We can plot more than one mask per image! Remember that the model returned as +# many masks as there are classes. Let's ask the same query as above, but this +# time for *all* classes, not just the dog class: "For each pixel and each class +# C, is class C the most likely class?" +# +# This one is a bit more involved, so we'll first show how to do it with a +# single image, and then we'll generalize to the batch + +num_classes = normalized_masks.shape[1] +dog1_masks = normalized_masks[0] +class_dim = 0 +dog1_all_classes_masks = dog1_masks.argmax(class_dim) == torch.arange(num_classes)[:, None, None] + +print(f"dog1_masks shape = {dog1_masks.shape}, dtype = {dog1_masks.dtype}") +print(f"dog1_all_classes_masks = {dog1_all_classes_masks.shape}, dtype = {dog1_all_classes_masks.dtype}") + +dog_with_all_masks = draw_segmentation_masks(dog1_int, masks=dog1_all_classes_masks, alpha=.6) +show(dog_with_all_masks) + +# %% +# We can see in the image above that only 2 masks were drawn: the mask for the +# background and the mask for the dog. This is because the model thinks that +# only these 2 classes are the most likely ones across all the pixels. If the +# model had detected another class as the most likely among other pixels, we +# would have seen its mask above. +# +# Removing the background mask is as simple as passing +# ``masks=dog1_all_classes_masks[1:]``, because the background class is the +# class with index 0. +# +# Let's now do the same but for an entire batch of images. The code is similar +# but involves a bit more juggling with the dimensions. + +class_dim = 1 +all_classes_masks = normalized_masks.argmax(class_dim) == torch.arange(num_classes)[:, None, None, None] +print(f"shape = {all_classes_masks.shape}, dtype = {all_classes_masks.dtype}") +# The first dimension is the classes now, so we need to swap it +all_classes_masks = all_classes_masks.swapaxes(0, 1) + +dogs_with_masks = [ + draw_segmentation_masks(img, masks=mask, alpha=.6) + for img, mask in zip(dog_list, all_classes_masks) +] +show(dogs_with_masks) + + +# %% +# .. _instance_seg_output: +# +# Instance segmentation models +# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +# +# Instance segmentation models have a significantly different output from the +# semantic segmentation models. We will see here how to plot the masks for such +# models. Let's start by analyzing the output of a Mask-RCNN model. Note that +# these models don't require the images to be normalized, so we don't need to +# use the normalized batch. +# +# .. note:: +# +# We will here describe the output of a Mask-RCNN model. The models in +# :ref:`object_det_inst_seg_pers_keypoint_det` all have a similar output +# format, but some of them may have extra info like keypoints for +# :func:`~torchvision.models.detection.keypointrcnn_resnet50_fpn`, and some +# of them may not have masks, like +# :func:`~torchvision.models.detection.fasterrcnn_resnet50_fpn`. + +from torchvision.models.detection import maskrcnn_resnet50_fpn, MaskRCNN_ResNet50_FPN_Weights + +weights = MaskRCNN_ResNet50_FPN_Weights.DEFAULT +transforms = weights.transforms() + +images = [transforms(d) for d in dog_list] + +model = maskrcnn_resnet50_fpn(weights=weights, progress=False) +model = model.eval() + +output = model(images) +print(output) + +# %% +# Let's break this down. For each image in the batch, the model outputs some +# detections (or instances). The number of detections varies for each input +# image. Each instance is described by its bounding box, its label, its score +# and its mask. +# +# The way the output is organized is as follows: the output is a list of length +# ``batch_size``. Each entry in the list corresponds to an input image, and it +# is a dict with keys 'boxes', 'labels', 'scores', and 'masks'. Each value +# associated to those keys has ``num_instances`` elements in it. In our case +# above there are 3 instances detected in the first image, and 2 instances in +# the second one. +# +# The boxes can be plotted with :func:`~torchvision.utils.draw_bounding_boxes` +# as above, but here we're more interested in the masks. These masks are quite +# different from the masks that we saw above for the semantic segmentation +# models. + +dog1_output = output[0] +dog1_masks = dog1_output['masks'] +print(f"shape = {dog1_masks.shape}, dtype = {dog1_masks.dtype}, " + f"min = {dog1_masks.min()}, max = {dog1_masks.max()}") + +# %% +# Here the masks correspond to probabilities indicating, for each pixel, how +# likely it is to belong to the predicted label of that instance. Those +# predicted labels correspond to the 'labels' element in the same output dict. +# Let's see which labels were predicted for the instances of the first image. + +print("For the first dog, the following instances were detected:") +print([weights.meta["categories"][label] for label in dog1_output['labels']]) + +# %% +# Interestingly, the model detects two persons in the image. Let's go ahead and +# plot those masks. Since :func:`~torchvision.utils.draw_segmentation_masks` +# expects boolean masks, we need to convert those probabilities into boolean +# values. Remember that the semantic of those masks is "How likely is this pixel +# to belong to the predicted class?". As a result, a natural way of converting +# those masks into boolean values is to threshold them with the 0.5 probability +# (one could also choose a different threshold). + +proba_threshold = 0.5 +dog1_bool_masks = dog1_output['masks'] > proba_threshold +print(f"shape = {dog1_bool_masks.shape}, dtype = {dog1_bool_masks.dtype}") + +# There's an extra dimension (1) to the masks. We need to remove it +dog1_bool_masks = dog1_bool_masks.squeeze(1) + +show(draw_segmentation_masks(dog1_int, dog1_bool_masks, alpha=0.9)) + +# %% +# The model seems to have properly detected the dog, but it also confused trees +# with people. Looking more closely at the scores will help us plot more +# relevant masks: + +print(dog1_output['scores']) + +# %% +# Clearly the model is more confident about the dog detection than it is about +# the people detections. That's good news. When plotting the masks, we can ask +# for only those that have a good score. Let's use a score threshold of .75 +# here, and also plot the masks of the second dog. + +score_threshold = .75 + +boolean_masks = [ + out['masks'][out['scores'] > score_threshold] > proba_threshold + for out in output +] + +dogs_with_masks = [ + draw_segmentation_masks(img, mask.squeeze(1)) + for img, mask in zip(dog_list, boolean_masks) +] +show(dogs_with_masks) + +# %% +# The two 'people' masks in the first image where not selected because they have +# a lower score than the score threshold. Similarly, in the second image, the +# instance with class 15 (which corresponds to 'bench') was not selected. + +# %% +# .. _keypoint_output: +# +# Visualizing keypoints +# ------------------------------ +# The :func:`~torchvision.utils.draw_keypoints` function can be used to +# draw keypoints on images. We will see how to use it with +# torchvision's KeypointRCNN loaded with :func:`~torchvision.models.detection.keypointrcnn_resnet50_fpn`. +# We will first have a look at output of the model. +# + +from torchvision.models.detection import keypointrcnn_resnet50_fpn, KeypointRCNN_ResNet50_FPN_Weights +from torchvision.io import decode_image + +person_int = decode_image(str(Path("../assets") / "person1.jpg")) + +weights = KeypointRCNN_ResNet50_FPN_Weights.DEFAULT +transforms = weights.transforms() + +person_float = transforms(person_int) + +model = keypointrcnn_resnet50_fpn(weights=weights, progress=False) +model = model.eval() + +outputs = model([person_float]) +print(outputs) + +# %% +# As we see the output contains a list of dictionaries. +# The output list is of length batch_size. +# We currently have just a single image so length of list is 1. +# Each entry in the list corresponds to an input image, +# and it is a dict with keys `boxes`, `labels`, `scores`, `keypoints` and `keypoint_scores`. +# Each value associated to those keys has `num_instances` elements in it. +# In our case above there are 2 instances detected in the image. + +kpts = outputs[0]['keypoints'] +scores = outputs[0]['scores'] + +print(kpts) +print(scores) + +# %% +# The KeypointRCNN model detects there are two instances in the image. +# If you plot the boxes by using :func:`~draw_bounding_boxes` +# you would recognize they are the person and the surfboard. +# If we look at the scores, we will realize that the model is much more confident about the person than surfboard. +# We could now set a threshold confidence and plot instances which we are confident enough. +# Let us set a threshold of 0.75 and filter out the keypoints corresponding to the person. + +detect_threshold = 0.75 +idx = torch.where(scores > detect_threshold) +keypoints = kpts[idx] + +print(keypoints) + +# %% +# Great, now we have the keypoints corresponding to the person. +# Each keypoint is represented by x, y coordinates and the visibility. +# We can now use the :func:`~torchvision.utils.draw_keypoints` function to draw keypoints. +# Note that the utility expects uint8 images. + +from torchvision.utils import draw_keypoints + +res = draw_keypoints(person_int, keypoints, colors="blue", radius=3) +show(res) + +# %% +# As we see, the keypoints appear as colored circles over the image. +# The coco keypoints for a person are ordered and represent the following list.\ + +coco_keypoints = [ + "nose", "left_eye", "right_eye", "left_ear", "right_ear", + "left_shoulder", "right_shoulder", "left_elbow", "right_elbow", + "left_wrist", "right_wrist", "left_hip", "right_hip", + "left_knee", "right_knee", "left_ankle", "right_ankle", +] + +# %% +# What if we are interested in joining the keypoints? +# This is especially useful in creating pose detection or action recognition. +# We can join the keypoints easily using the `connectivity` parameter. +# A close observation would reveal that we would need to join the points in below +# order to construct human skeleton. +# +# nose -> left_eye -> left_ear. (0, 1), (1, 3) +# +# nose -> right_eye -> right_ear. (0, 2), (2, 4) +# +# nose -> left_shoulder -> left_elbow -> left_wrist. (0, 5), (5, 7), (7, 9) +# +# nose -> right_shoulder -> right_elbow -> right_wrist. (0, 6), (6, 8), (8, 10) +# +# left_shoulder -> left_hip -> left_knee -> left_ankle. (5, 11), (11, 13), (13, 15) +# +# right_shoulder -> right_hip -> right_knee -> right_ankle. (6, 12), (12, 14), (14, 16) +# +# We will create a list containing these keypoint ids to be connected. + +connect_skeleton = [ + (0, 1), (0, 2), (1, 3), (2, 4), (0, 5), (0, 6), (5, 7), (6, 8), + (7, 9), (8, 10), (5, 11), (6, 12), (11, 13), (12, 14), (13, 15), (14, 16) +] + +# %% +# We pass the above list to the connectivity parameter to connect the keypoints. +# + +res = draw_keypoints(person_int, keypoints, connectivity=connect_skeleton, colors="blue", radius=4, width=3) +show(res) + +# %% +# That looks pretty good. +# +# .. _draw_keypoints_with_visibility: +# +# Drawing Keypoints with Visibility +# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +# Let's have a look at the results, another keypoint prediction module produced, and show the connectivity: + +prediction = torch.tensor( + [[[208.0176, 214.2409, 1.0000], + [000.0000, 000.0000, 0.0000], + [197.8246, 210.6392, 1.0000], + [000.0000, 000.0000, 0.0000], + [178.6378, 217.8425, 1.0000], + [221.2086, 253.8591, 1.0000], + [160.6502, 269.4662, 1.0000], + [243.9929, 304.2822, 1.0000], + [138.4654, 328.8935, 1.0000], + [277.5698, 340.8990, 1.0000], + [153.4551, 374.5145, 1.0000], + [000.0000, 000.0000, 0.0000], + [226.0053, 370.3125, 1.0000], + [221.8081, 455.5516, 1.0000], + [273.9723, 448.9486, 1.0000], + [193.6275, 546.1933, 1.0000], + [273.3727, 545.5930, 1.0000]]] +) + +res = draw_keypoints(person_int, prediction, connectivity=connect_skeleton, colors="blue", radius=4, width=3) +show(res) + +# %% +# What happened there? +# The model, which predicted the new keypoints, +# can't detect the three points that are hidden on the upper left body of the skateboarder. +# More precisely, the model predicted that `(x, y, vis) = (0, 0, 0)` for the left_eye, left_ear, and left_hip. +# So we definitely don't want to display those keypoints and connections, and you don't have to. +# Looking at the parameters of :func:`~torchvision.utils.draw_keypoints`, +# we can see that we can pass a visibility tensor as an additional argument. +# Given the models' prediction, we have the visibility as the third keypoint dimension, we just need to extract it. +# Let's split the ``prediction`` into the keypoint coordinates and their respective visibility, +# and pass both of them as arguments to :func:`~torchvision.utils.draw_keypoints`. + +coordinates, visibility = prediction.split([2, 1], dim=-1) +visibility = visibility.bool() + +res = draw_keypoints( + person_int, coordinates, visibility=visibility, connectivity=connect_skeleton, colors="blue", radius=4, width=3 +) +show(res) + +# %% +# We can see that the undetected keypoints are not draw and the invisible keypoint connections were skipped. +# This can reduce the noise on images with multiple detections, or in cases like ours, +# when the keypoint-prediction model missed some detections. +# Most torch keypoint-prediction models return the visibility for every prediction, ready for you to use it. +# The :func:`~torchvision.models.detection.keypointrcnn_resnet50_fpn` model, +# which we used in the first case, does so too. diff --git a/gallery/transforms/README.rst b/gallery/transforms/README.rst new file mode 100644 index 00000000000..1b8b1b08155 --- /dev/null +++ b/gallery/transforms/README.rst @@ -0,0 +1,4 @@ +.. _transforms_gallery: + +Transforms +---------- diff --git a/gallery/transforms/helpers.py b/gallery/transforms/helpers.py new file mode 100644 index 00000000000..e94d717eb7d --- /dev/null +++ b/gallery/transforms/helpers.py @@ -0,0 +1,50 @@ +import matplotlib.pyplot as plt +import torch +from torchvision.utils import draw_bounding_boxes, draw_segmentation_masks +from torchvision import tv_tensors +from torchvision.transforms.v2 import functional as F + + +def plot(imgs, row_title=None, **imshow_kwargs): + if not isinstance(imgs[0], list): + # Make a 2d grid even if there's just 1 row + imgs = [imgs] + + num_rows = len(imgs) + num_cols = len(imgs[0]) + _, axs = plt.subplots(nrows=num_rows, ncols=num_cols, squeeze=False) + for row_idx, row in enumerate(imgs): + for col_idx, img in enumerate(row): + boxes = None + masks = None + if isinstance(img, tuple): + img, target = img + if isinstance(target, dict): + boxes = target.get("boxes") + masks = target.get("masks") + elif isinstance(target, tv_tensors.BoundingBoxes): + boxes = target + else: + raise ValueError(f"Unexpected target type: {type(target)}") + img = F.to_image(img) + if img.dtype.is_floating_point and img.min() < 0: + # Poor man's re-normalization for the colors to be OK-ish. This + # is useful for images coming out of Normalize() + img -= img.min() + img /= img.max() + + img = F.to_dtype(img, torch.uint8, scale=True) + if boxes is not None: + img = draw_bounding_boxes(img, boxes, colors="yellow", width=3) + if masks is not None: + img = draw_segmentation_masks(img, masks.to(torch.bool), colors=["green"] * masks.shape[0], alpha=.65) + + ax = axs[row_idx, col_idx] + ax.imshow(img.permute(1, 2, 0).numpy(), **imshow_kwargs) + ax.set(xticklabels=[], yticklabels=[], xticks=[], yticks=[]) + + if row_title is not None: + for row_idx in range(num_rows): + axs[row_idx, 0].set(ylabel=row_title[row_idx]) + + plt.tight_layout() diff --git a/gallery/transforms/plot_custom_transforms.py b/gallery/transforms/plot_custom_transforms.py new file mode 100644 index 00000000000..d1bd9455bfb --- /dev/null +++ b/gallery/transforms/plot_custom_transforms.py @@ -0,0 +1,200 @@ +""" +=================================== +How to write your own v2 transforms +=================================== + +.. note:: + Try on `Colab `_ + or :ref:`go to the end ` to download the full example code. + +This guide explains how to write transforms that are compatible with the +torchvision transforms V2 API. +""" + +# %% +from typing import Any, Dict, List + +import torch +from torchvision import tv_tensors +from torchvision.transforms import v2 + + +# %% +# Just create a ``nn.Module`` and override the ``forward`` method +# =============================================================== +# +# In most cases, this is all you're going to need, as long as you already know +# the structure of the input that your transform will expect. For example if +# you're just doing image classification, your transform will typically accept a +# single image as input, or a ``(img, label)`` input. So you can just hard-code +# your ``forward`` method to accept just that, e.g. +# +# .. code:: python +# +# class MyCustomTransform(torch.nn.Module): +# def forward(self, img, label): +# # Do some transformations +# return new_img, new_label +# +# .. note:: +# +# This means that if you have a custom transform that is already compatible +# with the V1 transforms (those in ``torchvision.transforms``), it will +# still work with the V2 transforms without any change! +# +# We will illustrate this more completely below with a typical detection case, +# where our samples are just images, bounding boxes and labels: + +class MyCustomTransform(torch.nn.Module): + def forward(self, img, bboxes, label): # we assume inputs are always structured like this + print( + f"I'm transforming an image of shape {img.shape} " + f"with bboxes = {bboxes}\n{label = }" + ) + # Do some transformations. Here, we're just passing though the input + return img, bboxes, label + + +transforms = v2.Compose([ + MyCustomTransform(), + v2.RandomResizedCrop((224, 224), antialias=True), + v2.RandomHorizontalFlip(p=1), + v2.Normalize(mean=[0, 0, 0], std=[1, 1, 1]) +]) + +H, W = 256, 256 +img = torch.rand(3, H, W) +bboxes = tv_tensors.BoundingBoxes( + torch.tensor([[0, 10, 10, 20], [50, 50, 70, 70]]), + format="XYXY", + canvas_size=(H, W) +) +label = 3 + +out_img, out_bboxes, out_label = transforms(img, bboxes, label) +# %% +print(f"Output image shape: {out_img.shape}\nout_bboxes = {out_bboxes}\n{out_label = }") +# %% +# .. note:: +# While working with TVTensor classes in your code, make sure to +# familiarize yourself with this section: +# :ref:`tv_tensor_unwrapping_behaviour` +# +# Supporting arbitrary input structures +# ===================================== +# +# In the section above, we have assumed that you already know the structure of +# your inputs and that you're OK with hard-coding this expected structure in +# your code. If you want your custom transforms to be as flexible as possible, +# this can be a bit limiting. +# +# A key feature of the builtin Torchvision V2 transforms is that they can accept +# arbitrary input structure and return the same structure as output (with +# transformed entries). For example, transforms can accept a single image, or a +# tuple of ``(img, label)``, or an arbitrary nested dictionary as input. Here's +# an example on the built-in transform :class:`~torchvision.transforms.v2.RandomHorizontalFlip`: + +structured_input = { + "img": img, + "annotations": (bboxes, label), + "something that will be ignored": (1, "hello"), + "another tensor that is ignored": torch.arange(10), +} +structured_output = v2.RandomHorizontalFlip(p=1)(structured_input) + +assert isinstance(structured_output, dict) +assert structured_output["something that will be ignored"] == (1, "hello") +assert (structured_output["another tensor that is ignored"] == torch.arange(10)).all() +print(f"The input bboxes are:\n{structured_input['annotations'][0]}") +print(f"The transformed bboxes are:\n{structured_output['annotations'][0]}") + +# %% +# Basics: override the `transform()` method +# ----------------------------------------- +# +# In order to support arbitrary inputs in your custom transform, you will need +# to inherit from :class:`~torchvision.transforms.v2.Transform` and override the +# `.transform()` method (not the `forward()` method!). Below is a basic example: + + +class MyCustomTransform(v2.Transform): + def transform(self, inpt: Any, params: Dict[str, Any]): + if type(inpt) == torch.Tensor: + print(f"I'm transforming an image of shape {inpt.shape}") + return inpt + 1 # dummy transformation + elif isinstance(inpt, tv_tensors.BoundingBoxes): + print(f"I'm transforming bounding boxes! {inpt.canvas_size = }") + return tv_tensors.wrap(inpt + 100, like=inpt) # dummy transformation + + +my_custom_transform = MyCustomTransform() +structured_output = my_custom_transform(structured_input) + +assert isinstance(structured_output, dict) +assert structured_output["something that will be ignored"] == (1, "hello") +assert (structured_output["another tensor that is ignored"] == torch.arange(10)).all() +print(f"The input bboxes are:\n{structured_input['annotations'][0]}") +print(f"The transformed bboxes are:\n{structured_output['annotations'][0]}") + +# %% +# An important thing to note is that when we call ``my_custom_transform`` on +# ``structured_input``, the input is flattened and then each individual part is +# passed to ``transform()``. That is, ``transform()``` receives the input image, +# then the bounding boxes, etc. Within ``transform()``, you can decide how to +# transform each input, based on their type. +# +# If you're curious why the other tensor (``torch.arange()``) didn't get passed +# to ``transform()``, see :ref:`this note ` for more +# details. +# +# Advanced: The ``make_params()`` method +# -------------------------------------- +# +# The ``make_params()`` method is called internally before calling +# ``transform()`` on each input. This is typically useful to generate random +# parameter values. In the example below, we use it to randomly apply the +# transformation with a probability of 0.5 + + +class MyRandomTransform(MyCustomTransform): + def __init__(self, p=0.5): + self.p = p + super().__init__() + + def make_params(self, flat_inputs: List[Any]) -> Dict[str, Any]: + apply_transform = (torch.rand(size=(1,)) < self.p).item() + params = dict(apply_transform=apply_transform) + return params + + def transform(self, inpt: Any, params: Dict[str, Any]): + if not params["apply_transform"]: + print("Not transforming anything!") + return inpt + else: + return super().transform(inpt, params) + + +my_random_transform = MyRandomTransform() + +torch.manual_seed(0) +_ = my_random_transform(structured_input) # transforms +_ = my_random_transform(structured_input) # doesn't transform + +# %% +# +# .. note:: +# +# It's important for such random parameter generation to happen within +# ``make_params()`` and not within ``transform()``, so that for a given +# transform call, the same RNG applies to all the inputs in the same way. If +# we were to perform the RNG within ``transform()``, we would risk e.g. +# transforming the image while *not* transforming the bounding boxes. +# +# The ``make_params()`` method takes the list of all the inputs as parameter +# (each of the elements in this list will later be pased to ``transform()``). +# You can use ``flat_inputs`` to e.g. figure out the dimensions on the input, +# using :func:`~torchvision.transforms.v2.query_chw` or +# :func:`~torchvision.transforms.v2.query_size`. +# +# ``make_params()`` should return a dict (or actually, anything you want) that +# will then be passed to ``transform()``. diff --git a/gallery/transforms/plot_custom_tv_tensors.py b/gallery/transforms/plot_custom_tv_tensors.py new file mode 100644 index 00000000000..9b113901461 --- /dev/null +++ b/gallery/transforms/plot_custom_tv_tensors.py @@ -0,0 +1,119 @@ +""" +==================================== +How to write your own TVTensor class +==================================== + +.. note:: + Try on `Colab `_ + or :ref:`go to the end ` to download the full example code. + +This guide is intended for advanced users and downstream library maintainers. We explain how to +write your own TVTensor class, and how to make it compatible with the built-in +Torchvision v2 transforms. Before continuing, make sure you have read +:ref:`sphx_glr_auto_examples_transforms_plot_tv_tensors.py`. +""" + +# %% +import torch +from torchvision import tv_tensors +from torchvision.transforms import v2 + +# %% +# We will create a very simple class that just inherits from the base +# :class:`~torchvision.tv_tensors.TVTensor` class. It will be enough to cover +# what you need to know to implement your more elaborate uses-cases. If you need +# to create a class that carries meta-data, take a look at how the +# :class:`~torchvision.tv_tensors.BoundingBoxes` class is `implemented +# `_. + + +class MyTVTensor(tv_tensors.TVTensor): + pass + + +my_dp = MyTVTensor([1, 2, 3]) +my_dp + +# %% +# Now that we have defined our custom TVTensor class, we want it to be +# compatible with the built-in torchvision transforms, and the functional API. +# For that, we need to implement a kernel which performs the core of the +# transformation, and then "hook" it to the functional that we want to support +# via :func:`~torchvision.transforms.v2.functional.register_kernel`. +# +# We illustrate this process below: we create a kernel for the "horizontal flip" +# operation of our MyTVTensor class, and register it to the functional API. + +from torchvision.transforms.v2 import functional as F + + +@F.register_kernel(functional="hflip", tv_tensor_cls=MyTVTensor) +def hflip_my_tv_tensor(my_dp, *args, **kwargs): + print("Flipping!") + out = my_dp.flip(-1) + return tv_tensors.wrap(out, like=my_dp) + + +# %% +# To understand why :func:`~torchvision.tv_tensors.wrap` is used, see +# :ref:`tv_tensor_unwrapping_behaviour`. Ignore the ``*args, **kwargs`` for now, +# we will explain it below in :ref:`param_forwarding`. +# +# .. note:: +# +# In our call to ``register_kernel`` above we used a string +# ``functional="hflip"`` to refer to the functional we want to hook into. We +# could also have used the functional *itself*, i.e. +# ``@register_kernel(functional=F.hflip, ...)``. +# +# Now that we have registered our kernel, we can call the functional API on a +# ``MyTVTensor`` instance: + +my_dp = MyTVTensor(torch.rand(3, 256, 256)) +_ = F.hflip(my_dp) + +# %% +# And we can also use the +# :class:`~torchvision.transforms.v2.RandomHorizontalFlip` transform, since it relies on :func:`~torchvision.transforms.v2.functional.hflip` internally: +t = v2.RandomHorizontalFlip(p=1) +_ = t(my_dp) + +# %% +# .. note:: +# +# We cannot register a kernel for a transform class, we can only register a +# kernel for a **functional**. The reason we can't register a transform +# class is because one transform may internally rely on more than one +# functional, so in general we can't register a single kernel for a given +# class. +# +# .. _param_forwarding: +# +# Parameter forwarding, and ensuring future compatibility of your kernels +# ----------------------------------------------------------------------- +# +# The functional API that you're hooking into is public and therefore +# **backward** compatible: we guarantee that the parameters of these functionals +# won't be removed or renamed without a proper deprecation cycle. However, we +# don't guarantee **forward** compatibility, and we may add new parameters in +# the future. +# +# Imagine that in a future version, Torchvision adds a new ``inplace`` parameter +# to its :func:`~torchvision.transforms.v2.functional.hflip` functional. If you +# already defined and registered your own kernel as + +def hflip_my_tv_tensor(my_dp): # noqa + print("Flipping!") + out = my_dp.flip(-1) + return tv_tensors.wrap(out, like=my_dp) + + +# %% +# then calling ``F.hflip(my_dp)`` will **fail**, because ``hflip`` will try to +# pass the new ``inplace`` parameter to your kernel, but your kernel doesn't +# accept it. +# +# For this reason, we recommend to always define your kernels with +# ``*args, **kwargs`` in their signature, as done above. This way, your kernel +# will be able to accept any new parameter that we may add in the future. +# (Technically, adding `**kwargs` only should be enough). diff --git a/gallery/transforms/plot_cutmix_mixup.py b/gallery/transforms/plot_cutmix_mixup.py new file mode 100644 index 00000000000..222be0ff359 --- /dev/null +++ b/gallery/transforms/plot_cutmix_mixup.py @@ -0,0 +1,150 @@ + +""" +=========================== +How to use CutMix and MixUp +=========================== + +.. note:: + Try on `Colab `_ + or :ref:`go to the end ` to download the full example code. + +:class:`~torchvision.transforms.v2.CutMix` and +:class:`~torchvision.transforms.v2.MixUp` are popular augmentation strategies +that can improve classification accuracy. + +These transforms are slightly different from the rest of the Torchvision +transforms, because they expect +**batches** of samples as input, not individual images. In this example we'll +explain how to use them: after the ``DataLoader``, or as part of a collation +function. +""" + +# %% +import torch +from torchvision.datasets import FakeData +from torchvision.transforms import v2 + + +NUM_CLASSES = 100 + +# %% +# Pre-processing pipeline +# ----------------------- +# +# We'll use a simple but typical image classification pipeline: + +preproc = v2.Compose([ + v2.PILToTensor(), + v2.RandomResizedCrop(size=(224, 224), antialias=True), + v2.RandomHorizontalFlip(p=0.5), + v2.ToDtype(torch.float32, scale=True), # to float32 in [0, 1] + v2.Normalize(mean=(0.485, 0.456, 0.406), std=(0.229, 0.224, 0.225)), # typically from ImageNet +]) + +dataset = FakeData(size=1000, num_classes=NUM_CLASSES, transform=preproc) + +img, label = dataset[0] +print(f"{type(img) = }, {img.dtype = }, {img.shape = }, {label = }") + +# %% +# +# One important thing to note is that neither CutMix nor MixUp are part of this +# pre-processing pipeline. We'll add them a bit later once we define the +# DataLoader. Just as a refresher, this is what the DataLoader and training loop +# would look like if we weren't using CutMix or MixUp: + +from torch.utils.data import DataLoader + +dataloader = DataLoader(dataset, batch_size=4, shuffle=True) + +for images, labels in dataloader: + print(f"{images.shape = }, {labels.shape = }") + print(labels.dtype) + # + break +# %% + +# %% +# Where to use MixUp and CutMix +# ----------------------------- +# +# After the DataLoader +# ^^^^^^^^^^^^^^^^^^^^ +# +# Now let's add CutMix and MixUp. The simplest way to do this right after the +# DataLoader: the Dataloader has already batched the images and labels for us, +# and this is exactly what these transforms expect as input: + +dataloader = DataLoader(dataset, batch_size=4, shuffle=True) + +cutmix = v2.CutMix(num_classes=NUM_CLASSES) +mixup = v2.MixUp(num_classes=NUM_CLASSES) +cutmix_or_mixup = v2.RandomChoice([cutmix, mixup]) + +for images, labels in dataloader: + print(f"Before CutMix/MixUp: {images.shape = }, {labels.shape = }") + images, labels = cutmix_or_mixup(images, labels) + print(f"After CutMix/MixUp: {images.shape = }, {labels.shape = }") + + # + break +# %% +# +# Note how the labels were also transformed: we went from a batched label of +# shape (batch_size,) to a tensor of shape (batch_size, num_classes). The +# transformed labels can still be passed as-is to a loss function like +# :func:`torch.nn.functional.cross_entropy`. +# +# As part of the collation function +# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +# +# Passing the transforms after the DataLoader is the simplest way to use CutMix +# and MixUp, but one disadvantage is that it does not take advantage of the +# DataLoader multi-processing. For that, we can pass those transforms as part of +# the collation function (refer to the `PyTorch docs +# `_ to learn +# more about collation). + +from torch.utils.data import default_collate + + +def collate_fn(batch): + return cutmix_or_mixup(*default_collate(batch)) + + +dataloader = DataLoader(dataset, batch_size=4, shuffle=True, num_workers=2, collate_fn=collate_fn) + +for images, labels in dataloader: + print(f"{images.shape = }, {labels.shape = }") + # No need to call cutmix_or_mixup, it's already been called as part of the DataLoader! + # + break + +# %% +# Non-standard input format +# ------------------------- +# +# So far we've used a typical sample structure where we pass ``(images, +# labels)`` as inputs. MixUp and CutMix will magically work by default with most +# common sample structures: tuples where the second parameter is a tensor label, +# or dict with a "label[s]" key. Look at the documentation of the +# ``labels_getter`` parameter for more details. +# +# If your samples have a different structure, you can still use CutMix and MixUp +# by passing a callable to the ``labels_getter`` parameter. For example: + +batch = { + "imgs": torch.rand(4, 3, 224, 224), + "target": { + "classes": torch.randint(0, NUM_CLASSES, size=(4,)), + "some_other_key": "this is going to be passed-through" + } +} + + +def labels_getter(batch): + return batch["target"]["classes"] + + +out = v2.CutMix(num_classes=NUM_CLASSES, labels_getter=labels_getter)(batch) +print(f"{out['imgs'].shape = }, {out['target']['classes'].shape = }") diff --git a/gallery/transforms/plot_transforms_e2e.py b/gallery/transforms/plot_transforms_e2e.py new file mode 100644 index 00000000000..765d7ad51e5 --- /dev/null +++ b/gallery/transforms/plot_transforms_e2e.py @@ -0,0 +1,181 @@ +""" +=============================================================== +Transforms v2: End-to-end object detection/segmentation example +=============================================================== + +.. note:: + Try on `Colab `_ + or :ref:`go to the end ` to download the full example code. + +Object detection and segmentation tasks are natively supported: +``torchvision.transforms.v2`` enables jointly transforming images, videos, +bounding boxes, and masks. + +This example showcases an end-to-end instance segmentation training case using +Torchvision utils from ``torchvision.datasets``, ``torchvision.models`` and +``torchvision.transforms.v2``. Everything covered here can be applied similarly +to object detection or semantic segmentation tasks. +""" + +# %% +import pathlib + +import torch +import torch.utils.data + +from torchvision import models, datasets, tv_tensors +from torchvision.transforms import v2 + +torch.manual_seed(0) + +# This loads fake data for illustration purposes of this example. In practice, you'll have +# to replace this with the proper data. +# If you're trying to run that on Colab, you can download the assets and the +# helpers from https://github.com/pytorch/vision/tree/main/gallery/ +ROOT = pathlib.Path("../assets") / "coco" +IMAGES_PATH = str(ROOT / "images") +ANNOTATIONS_PATH = str(ROOT / "instances.json") +from helpers import plot + + +# %% +# Dataset preparation +# ------------------- +# +# We start off by loading the :class:`~torchvision.datasets.CocoDetection` dataset to have a look at what it currently +# returns. + +dataset = datasets.CocoDetection(IMAGES_PATH, ANNOTATIONS_PATH) + +sample = dataset[0] +img, target = sample +print(f"{type(img) = }\n{type(target) = }\n{type(target[0]) = }\n{target[0].keys() = }") + + +# %% +# Torchvision datasets preserve the data structure and types as it was intended +# by the datasets authors. So by default, the output structure may not always be +# compatible with the models or the transforms. +# +# To overcome that, we can use the +# :func:`~torchvision.datasets.wrap_dataset_for_transforms_v2` function. For +# :class:`~torchvision.datasets.CocoDetection`, this changes the target +# structure to a single dictionary of lists: + +dataset = datasets.wrap_dataset_for_transforms_v2(dataset, target_keys=("boxes", "labels", "masks")) + +sample = dataset[0] +img, target = sample +print(f"{type(img) = }\n{type(target) = }\n{target.keys() = }") +print(f"{type(target['boxes']) = }\n{type(target['labels']) = }\n{type(target['masks']) = }") + +# %% +# We used the ``target_keys`` parameter to specify the kind of output we're +# interested in. Our dataset now returns a target which is dict where the values +# are :ref:`TVTensors ` (all are :class:`torch.Tensor` +# subclasses). We're dropped all unncessary keys from the previous output, but +# if you need any of the original keys e.g. "image_id", you can still ask for +# it. +# +# .. note:: +# +# If you just want to do detection, you don't need and shouldn't pass +# "masks" in ``target_keys``: if masks are present in the sample, they will +# be transformed, slowing down your transformations unnecessarily. +# +# As baseline, let's have a look at a sample without transformations: + +plot([dataset[0], dataset[1]]) + + +# %% +# Transforms +# ---------- +# +# Let's now define our pre-processing transforms. All the transforms know how +# to handle images, bounding boxes and masks when relevant. +# +# Transforms are typically passed as the ``transforms`` parameter of the +# dataset so that they can leverage multi-processing from the +# :class:`torch.utils.data.DataLoader`. + +transforms = v2.Compose( + [ + v2.ToImage(), + v2.RandomPhotometricDistort(p=1), + v2.RandomZoomOut(fill={tv_tensors.Image: (123, 117, 104), "others": 0}), + v2.RandomIoUCrop(), + v2.RandomHorizontalFlip(p=1), + v2.SanitizeBoundingBoxes(), + v2.ToDtype(torch.float32, scale=True), + ] +) + +dataset = datasets.CocoDetection(IMAGES_PATH, ANNOTATIONS_PATH, transforms=transforms) +dataset = datasets.wrap_dataset_for_transforms_v2(dataset, target_keys=["boxes", "labels", "masks"]) + +# %% +# A few things are worth noting here: +# +# - We're converting the PIL image into a +# :class:`~torchvision.transforms.v2.Image` object. This isn't strictly +# necessary, but relying on Tensors (here: a Tensor subclass) will +# :ref:`generally be faster `. +# - We are calling :class:`~torchvision.transforms.v2.SanitizeBoundingBoxes` to +# make sure we remove degenerate bounding boxes, as well as their +# corresponding labels and masks. +# :class:`~torchvision.transforms.v2.SanitizeBoundingBoxes` should be placed +# at least once at the end of a detection pipeline; it is particularly +# critical if :class:`~torchvision.transforms.v2.RandomIoUCrop` was used. +# +# Let's look how the sample looks like with our augmentation pipeline in place: + +# sphinx_gallery_thumbnail_number = 2 +plot([dataset[0], dataset[1]]) + + +# %% +# We can see that the color of the images were distorted, zoomed in or out, and flipped. +# The bounding boxes and the masks were transformed accordingly. And without any further ado, we can start training. +# +# Data loading and training loop +# ------------------------------ +# +# Below we're using Mask-RCNN which is an instance segmentation model, but +# everything we've covered in this tutorial also applies to object detection and +# semantic segmentation tasks. + +data_loader = torch.utils.data.DataLoader( + dataset, + batch_size=2, + # We need a custom collation function here, since the object detection + # models expect a sequence of images and target dictionaries. The default + # collation function tries to torch.stack() the individual elements, + # which fails in general for object detection, because the number of bounding + # boxes varies between the images of the same batch. + collate_fn=lambda batch: tuple(zip(*batch)), +) + +model = models.get_model("maskrcnn_resnet50_fpn_v2", weights=None, weights_backbone=None).train() + +for imgs, targets in data_loader: + loss_dict = model(imgs, targets) + # Put your training logic here + + print(f"{[img.shape for img in imgs] = }") + print(f"{[type(target) for target in targets] = }") + for name, loss_val in loss_dict.items(): + print(f"{name:<20}{loss_val:.3f}") + +# %% +# Training References +# ------------------- +# +# From there, you can check out the `torchvision references +# `_ where you'll find +# the actual training scripts we use to train our models. +# +# **Disclaimer** The code in our references is more complex than what you'll +# need for your own use-cases: this is because we're supporting different +# backends (PIL, tensors, TVTensors) and different transforms namespaces (v1 and +# v2). So don't be afraid to simplify and only keep what you need. diff --git a/gallery/transforms/plot_transforms_getting_started.py b/gallery/transforms/plot_transforms_getting_started.py new file mode 100644 index 00000000000..2696a9e57e7 --- /dev/null +++ b/gallery/transforms/plot_transforms_getting_started.py @@ -0,0 +1,266 @@ +""" +================================== +Getting started with transforms v2 +================================== + +.. note:: + Try on `Colab `_ + or :ref:`go to the end ` to download the full example code. + +This example illustrates all of what you need to know to get started with the +new :mod:`torchvision.transforms.v2` API. We'll cover simple tasks like +image classification, and more advanced ones like object detection / +segmentation. +""" + +# %% +# First, a bit of setup +from pathlib import Path +import torch +import matplotlib.pyplot as plt +plt.rcParams["savefig.bbox"] = 'tight' + +from torchvision.transforms import v2 +from torchvision.io import decode_image + +torch.manual_seed(1) + +# If you're trying to run that on Colab, you can download the assets and the +# helpers from https://github.com/pytorch/vision/tree/main/gallery/ +from helpers import plot +img = decode_image(str(Path('../assets') / 'astronaut.jpg')) +print(f"{type(img) = }, {img.dtype = }, {img.shape = }") + +# %% +# The basics +# ---------- +# +# The Torchvision transforms behave like a regular :class:`torch.nn.Module` (in +# fact, most of them are): instantiate a transform, pass an input, get a +# transformed output: + +transform = v2.RandomCrop(size=(224, 224)) +out = transform(img) + +plot([img, out]) + +# %% +# I just want to do image classification +# -------------------------------------- +# +# If you just care about image classification, things are very simple. A basic +# classification pipeline may look like this: + +transforms = v2.Compose([ + v2.RandomResizedCrop(size=(224, 224), antialias=True), + v2.RandomHorizontalFlip(p=0.5), + v2.ToDtype(torch.float32, scale=True), + v2.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]), +]) +out = transforms(img) + +plot([img, out]) + +# %% +# Such transformation pipeline is typically passed as the ``transform`` argument +# to the :ref:`Datasets `, e.g. ``ImageNet(..., +# transform=transforms)``. +# +# That's pretty much all there is. From there, read through our :ref:`main docs +# ` to learn more about recommended practices and conventions, or +# explore more :ref:`examples ` e.g. how to use augmentation +# transforms like :ref:`CutMix and MixUp +# `. +# +# .. note:: +# +# If you're already relying on the ``torchvision.transforms`` v1 API, +# we recommend to :ref:`switch to the new v2 transforms`. It's +# very easy: the v2 transforms are fully compatible with the v1 API, so you +# only need to change the import! +# +# Detection, Segmentation, Videos +# ------------------------------- +# +# The new Torchvision transforms in the ``torchvision.transforms.v2`` namespace +# support tasks beyond image classification: they can also transform bounding +# boxes, segmentation / detection masks, or videos. +# +# Let's briefly look at a detection example with bounding boxes. + +from torchvision import tv_tensors # we'll describe this a bit later, bare with us + +boxes = tv_tensors.BoundingBoxes( + [ + [15, 10, 370, 510], + [275, 340, 510, 510], + [130, 345, 210, 425] + ], + format="XYXY", canvas_size=img.shape[-2:]) + +transforms = v2.Compose([ + v2.RandomResizedCrop(size=(224, 224), antialias=True), + v2.RandomPhotometricDistort(p=1), + v2.RandomHorizontalFlip(p=1), +]) +out_img, out_boxes = transforms(img, boxes) +print(type(boxes), type(out_boxes)) + +plot([(img, boxes), (out_img, out_boxes)]) + +# %% +# +# The example above focuses on object detection. But if we had masks +# (:class:`torchvision.tv_tensors.Mask`) for object segmentation or semantic +# segmentation, or videos (:class:`torchvision.tv_tensors.Video`), we could have +# passed them to the transforms in exactly the same way. +# +# By now you likely have a few questions: what are these TVTensors, how do we +# use them, and what is the expected input/output of those transforms? We'll +# answer these in the next sections. + +# %% +# +# .. _what_are_tv_tensors: +# +# What are TVTensors? +# -------------------- +# +# TVTensors are :class:`torch.Tensor` subclasses. The available TVTensors are +# :class:`~torchvision.tv_tensors.Image`, +# :class:`~torchvision.tv_tensors.BoundingBoxes`, +# :class:`~torchvision.tv_tensors.Mask`, and +# :class:`~torchvision.tv_tensors.Video`. +# +# TVTensors look and feel just like regular tensors - they **are** tensors. +# Everything that is supported on a plain :class:`torch.Tensor` like ``.sum()`` +# or any ``torch.*`` operator will also work on a TVTensor: + +img_dp = tv_tensors.Image(torch.randint(0, 256, (3, 256, 256), dtype=torch.uint8)) + +print(f"{isinstance(img_dp, torch.Tensor) = }") +print(f"{img_dp.dtype = }, {img_dp.shape = }, {img_dp.sum() = }") + +# %% +# These TVTensor classes are at the core of the transforms: in order to +# transform a given input, the transforms first look at the **class** of the +# object, and dispatch to the appropriate implementation accordingly. +# +# You don't need to know much more about TVTensors at this point, but advanced +# users who want to learn more can refer to +# :ref:`sphx_glr_auto_examples_transforms_plot_tv_tensors.py`. +# +# What do I pass as input? +# ------------------------ +# +# Above, we've seen two examples: one where we passed a single image as input +# i.e. ``out = transforms(img)``, and one where we passed both an image and +# bounding boxes, i.e. ``out_img, out_boxes = transforms(img, boxes)``. +# +# In fact, transforms support **arbitrary input structures**. The input can be a +# single image, a tuple, an arbitrarily nested dictionary... pretty much +# anything. The same structure will be returned as output. Below, we use the +# same detection transforms, but pass a tuple (image, target_dict) as input and +# we're getting the same structure as output: + +target = { + "boxes": boxes, + "labels": torch.arange(boxes.shape[0]), + "this_is_ignored": ("arbitrary", {"structure": "!"}) +} + +# Re-using the transforms and definitions from above. +out_img, out_target = transforms(img, target) + +# sphinx_gallery_thumbnail_number = 4 +plot([(img, target["boxes"]), (out_img, out_target["boxes"])]) +print(f"{out_target['this_is_ignored']}") + +# %% +# We passed a tuple so we get a tuple back, and the second element is the +# tranformed target dict. Transforms don't really care about the structure of +# the input; as mentioned above, they only care about the **type** of the +# objects and transforms them accordingly. +# +# *Foreign* objects like strings or ints are simply passed-through. This can be +# useful e.g. if you want to associate a path with every single sample when +# debugging! +# +# .. _passthrough_heuristic: +# +# .. note:: +# +# **Disclaimer** This note is slightly advanced and can be safely skipped on +# a first read. +# +# Pure :class:`torch.Tensor` objects are, in general, treated as images (or +# as videos for video-specific transforms). Indeed, you may have noticed +# that in the code above we haven't used the +# :class:`~torchvision.tv_tensors.Image` class at all, and yet our images +# got transformed properly. Transforms follow the following logic to +# determine whether a pure Tensor should be treated as an image (or video), +# or just ignored: +# +# * If there is an :class:`~torchvision.tv_tensors.Image`, +# :class:`~torchvision.tv_tensors.Video`, +# or :class:`PIL.Image.Image` instance in the input, all other pure +# tensors are passed-through. +# * If there is no :class:`~torchvision.tv_tensors.Image` or +# :class:`~torchvision.tv_tensors.Video` instance, only the first pure +# :class:`torch.Tensor` will be transformed as image or video, while all +# others will be passed-through. Here "first" means "first in a depth-wise +# traversal". +# +# This is what happened in the detection example above: the first pure +# tensor was the image so it got transformed properly, and all other pure +# tensor instances like the ``labels`` were passed-through (although labels +# can still be transformed by some transforms like +# :class:`~torchvision.transforms.v2.SanitizeBoundingBoxes`!). +# +# .. _transforms_datasets_intercompatibility: +# +# Transforms and Datasets intercompatibility +# ------------------------------------------ +# +# Roughly speaking, the output of the datasets must correspond to the input of +# the transforms. How to do that depends on whether you're using the torchvision +# :ref:`built-in datatsets `, or your own custom datasets. +# +# Using built-in datasets +# ^^^^^^^^^^^^^^^^^^^^^^^ +# +# If you're just doing image classification, you don't need to do anything. Just +# use ``transform`` argument of the dataset e.g. ``ImageNet(..., +# transform=transforms)`` and you're good to go. +# +# Torchvision also supports datasets for object detection or segmentation like +# :class:`torchvision.datasets.CocoDetection`. Those datasets predate +# the existence of the :mod:`torchvision.transforms.v2` module and of the +# TVTensors, so they don't return TVTensors out of the box. +# +# An easy way to force those datasets to return TVTensors and to make them +# compatible with v2 transforms is to use the +# :func:`torchvision.datasets.wrap_dataset_for_transforms_v2` function: +# +# .. code-block:: python +# +# from torchvision.datasets import CocoDetection, wrap_dataset_for_transforms_v2 +# +# dataset = CocoDetection(..., transforms=my_transforms) +# dataset = wrap_dataset_for_transforms_v2(dataset) +# # Now the dataset returns TVTensors! +# +# Using your own datasets +# ^^^^^^^^^^^^^^^^^^^^^^^ +# +# If you have a custom dataset, then you'll need to convert your objects into +# the appropriate TVTensor classes. Creating TVTensor instances is very easy, +# refer to :ref:`tv_tensor_creation` for more details. +# +# There are two main places where you can implement that conversion logic: +# +# - At the end of the datasets's ``__getitem__`` method, before returning the +# sample (or by sub-classing the dataset). +# - As the very first step of your transforms pipeline +# +# Either way, the logic will depend on your specific dataset. diff --git a/gallery/transforms/plot_transforms_illustrations.py b/gallery/transforms/plot_transforms_illustrations.py new file mode 100644 index 00000000000..0c1f3b40021 --- /dev/null +++ b/gallery/transforms/plot_transforms_illustrations.py @@ -0,0 +1,331 @@ +""" +========================== +Illustration of transforms +========================== + +.. note:: + Try on `Colab `_ + or :ref:`go to the end ` to download the full example code. + +This example illustrates some of the various transforms available in :ref:`the +torchvision.transforms.v2 module `. +""" +# %% + +# sphinx_gallery_thumbnail_path = "../../gallery/assets/transforms_thumbnail.png" + +from PIL import Image +from pathlib import Path +import matplotlib.pyplot as plt + +import torch +from torchvision.transforms import v2 + +plt.rcParams["savefig.bbox"] = 'tight' + +# if you change the seed, make sure that the randomly-applied transforms +# properly show that the image can be both transformed and *not* transformed! +torch.manual_seed(0) + +# If you're trying to run that on Colab, you can download the assets and the +# helpers from https://github.com/pytorch/vision/tree/main/gallery/ +from helpers import plot +orig_img = Image.open(Path('../assets') / 'astronaut.jpg') + +# %% +# Geometric Transforms +# -------------------- +# Geometric image transformation refers to the process of altering the geometric properties of an image, +# such as its shape, size, orientation, or position. +# It involves applying mathematical operations to the image pixels or coordinates to achieve the desired transformation. +# +# Pad +# ~~~ +# The :class:`~torchvision.transforms.Pad` transform +# (see also :func:`~torchvision.transforms.functional.pad`) +# pads all image borders with some pixel values. +padded_imgs = [v2.Pad(padding=padding)(orig_img) for padding in (3, 10, 30, 50)] +plot([orig_img] + padded_imgs) + +# %% +# Resize +# ~~~~~~ +# The :class:`~torchvision.transforms.Resize` transform +# (see also :func:`~torchvision.transforms.functional.resize`) +# resizes an image. +resized_imgs = [v2.Resize(size=size)(orig_img) for size in (30, 50, 100, orig_img.size)] +plot([orig_img] + resized_imgs) + +# %% +# CenterCrop +# ~~~~~~~~~~ +# The :class:`~torchvision.transforms.CenterCrop` transform +# (see also :func:`~torchvision.transforms.functional.center_crop`) +# crops the given image at the center. +center_crops = [v2.CenterCrop(size=size)(orig_img) for size in (30, 50, 100, orig_img.size)] +plot([orig_img] + center_crops) + +# %% +# FiveCrop +# ~~~~~~~~ +# The :class:`~torchvision.transforms.FiveCrop` transform +# (see also :func:`~torchvision.transforms.functional.five_crop`) +# crops the given image into four corners and the central crop. +(top_left, top_right, bottom_left, bottom_right, center) = v2.FiveCrop(size=(100, 100))(orig_img) +plot([orig_img] + [top_left, top_right, bottom_left, bottom_right, center]) + +# %% +# RandomPerspective +# ~~~~~~~~~~~~~~~~~ +# The :class:`~torchvision.transforms.RandomPerspective` transform +# (see also :func:`~torchvision.transforms.functional.perspective`) +# performs random perspective transform on an image. +perspective_transformer = v2.RandomPerspective(distortion_scale=0.6, p=1.0) +perspective_imgs = [perspective_transformer(orig_img) for _ in range(4)] +plot([orig_img] + perspective_imgs) + +# %% +# RandomRotation +# ~~~~~~~~~~~~~~ +# The :class:`~torchvision.transforms.RandomRotation` transform +# (see also :func:`~torchvision.transforms.functional.rotate`) +# rotates an image with random angle. +rotater = v2.RandomRotation(degrees=(0, 180)) +rotated_imgs = [rotater(orig_img) for _ in range(4)] +plot([orig_img] + rotated_imgs) + +# %% +# RandomAffine +# ~~~~~~~~~~~~ +# The :class:`~torchvision.transforms.RandomAffine` transform +# (see also :func:`~torchvision.transforms.functional.affine`) +# performs random affine transform on an image. +affine_transfomer = v2.RandomAffine(degrees=(30, 70), translate=(0.1, 0.3), scale=(0.5, 0.75)) +affine_imgs = [affine_transfomer(orig_img) for _ in range(4)] +plot([orig_img] + affine_imgs) + +# %% +# ElasticTransform +# ~~~~~~~~~~~~~~~~ +# The :class:`~torchvision.transforms.ElasticTransform` transform +# (see also :func:`~torchvision.transforms.functional.elastic_transform`) +# Randomly transforms the morphology of objects in images and produces a +# see-through-water-like effect. +elastic_transformer = v2.ElasticTransform(alpha=250.0) +transformed_imgs = [elastic_transformer(orig_img) for _ in range(2)] +plot([orig_img] + transformed_imgs) + +# %% +# RandomCrop +# ~~~~~~~~~~ +# The :class:`~torchvision.transforms.RandomCrop` transform +# (see also :func:`~torchvision.transforms.functional.crop`) +# crops an image at a random location. +cropper = v2.RandomCrop(size=(128, 128)) +crops = [cropper(orig_img) for _ in range(4)] +plot([orig_img] + crops) + +# %% +# RandomResizedCrop +# ~~~~~~~~~~~~~~~~~ +# The :class:`~torchvision.transforms.RandomResizedCrop` transform +# (see also :func:`~torchvision.transforms.functional.resized_crop`) +# crops an image at a random location, and then resizes the crop to a given +# size. +resize_cropper = v2.RandomResizedCrop(size=(32, 32)) +resized_crops = [resize_cropper(orig_img) for _ in range(4)] +plot([orig_img] + resized_crops) + +# %% +# Photometric Transforms +# ---------------------- +# Photometric image transformation refers to the process of modifying the photometric properties of an image, +# such as its brightness, contrast, color, or tone. +# These transformations are applied to change the visual appearance of an image +# while preserving its geometric structure. +# +# Except :class:`~torchvision.transforms.Grayscale`, the following transforms are random, +# which means that the same transform +# instance will produce different result each time it transforms a given image. +# +# Grayscale +# ~~~~~~~~~ +# The :class:`~torchvision.transforms.Grayscale` transform +# (see also :func:`~torchvision.transforms.functional.to_grayscale`) +# converts an image to grayscale +gray_img = v2.Grayscale()(orig_img) +plot([orig_img, gray_img], cmap='gray') + +# %% +# ColorJitter +# ~~~~~~~~~~~ +# The :class:`~torchvision.transforms.ColorJitter` transform +# randomly changes the brightness, contrast, saturation, hue, and other properties of an image. +jitter = v2.ColorJitter(brightness=.5, hue=.3) +jittered_imgs = [jitter(orig_img) for _ in range(4)] +plot([orig_img] + jittered_imgs) + +# %% +# GaussianBlur +# ~~~~~~~~~~~~ +# The :class:`~torchvision.transforms.GaussianBlur` transform +# (see also :func:`~torchvision.transforms.functional.gaussian_blur`) +# performs gaussian blur transform on an image. +blurrer = v2.GaussianBlur(kernel_size=(5, 9), sigma=(0.1, 5.)) +blurred_imgs = [blurrer(orig_img) for _ in range(4)] +plot([orig_img] + blurred_imgs) + +# %% +# RandomInvert +# ~~~~~~~~~~~~ +# The :class:`~torchvision.transforms.RandomInvert` transform +# (see also :func:`~torchvision.transforms.functional.invert`) +# randomly inverts the colors of the given image. +inverter = v2.RandomInvert() +invertered_imgs = [inverter(orig_img) for _ in range(4)] +plot([orig_img] + invertered_imgs) + +# %% +# RandomPosterize +# ~~~~~~~~~~~~~~~ +# The :class:`~torchvision.transforms.RandomPosterize` transform +# (see also :func:`~torchvision.transforms.functional.posterize`) +# randomly posterizes the image by reducing the number of bits +# of each color channel. +posterizer = v2.RandomPosterize(bits=2) +posterized_imgs = [posterizer(orig_img) for _ in range(4)] +plot([orig_img] + posterized_imgs) + +# %% +# RandomSolarize +# ~~~~~~~~~~~~~~ +# The :class:`~torchvision.transforms.RandomSolarize` transform +# (see also :func:`~torchvision.transforms.functional.solarize`) +# randomly solarizes the image by inverting all pixel values above +# the threshold. +solarizer = v2.RandomSolarize(threshold=192.0) +solarized_imgs = [solarizer(orig_img) for _ in range(4)] +plot([orig_img] + solarized_imgs) + +# %% +# RandomAdjustSharpness +# ~~~~~~~~~~~~~~~~~~~~~ +# The :class:`~torchvision.transforms.RandomAdjustSharpness` transform +# (see also :func:`~torchvision.transforms.functional.adjust_sharpness`) +# randomly adjusts the sharpness of the given image. +sharpness_adjuster = v2.RandomAdjustSharpness(sharpness_factor=2) +sharpened_imgs = [sharpness_adjuster(orig_img) for _ in range(4)] +plot([orig_img] + sharpened_imgs) + +# %% +# RandomAutocontrast +# ~~~~~~~~~~~~~~~~~~ +# The :class:`~torchvision.transforms.RandomAutocontrast` transform +# (see also :func:`~torchvision.transforms.functional.autocontrast`) +# randomly applies autocontrast to the given image. +autocontraster = v2.RandomAutocontrast() +autocontrasted_imgs = [autocontraster(orig_img) for _ in range(4)] +plot([orig_img] + autocontrasted_imgs) + +# %% +# RandomEqualize +# ~~~~~~~~~~~~~~ +# The :class:`~torchvision.transforms.RandomEqualize` transform +# (see also :func:`~torchvision.transforms.functional.equalize`) +# randomly equalizes the histogram of the given image. +equalizer = v2.RandomEqualize() +equalized_imgs = [equalizer(orig_img) for _ in range(4)] +plot([orig_img] + equalized_imgs) + +# %% +# JPEG +# ~~~~~~~~~~~~~~ +# The :class:`~torchvision.transforms.v2.JPEG` transform +# (see also :func:`~torchvision.transforms.v2.functional.jpeg`) +# applies JPEG compression to the given image with random +# degree of compression. +jpeg = v2.JPEG((5, 50)) +jpeg_imgs = [jpeg(orig_img) for _ in range(4)] +plot([orig_img] + jpeg_imgs) + +# %% +# Augmentation Transforms +# ----------------------- +# The following transforms are combinations of multiple transforms, +# either geometric or photometric, or both. +# +# AutoAugment +# ~~~~~~~~~~~ +# The :class:`~torchvision.transforms.AutoAugment` transform +# automatically augments data based on a given auto-augmentation policy. +# See :class:`~torchvision.transforms.AutoAugmentPolicy` for the available policies. +policies = [v2.AutoAugmentPolicy.CIFAR10, v2.AutoAugmentPolicy.IMAGENET, v2.AutoAugmentPolicy.SVHN] +augmenters = [v2.AutoAugment(policy) for policy in policies] +imgs = [ + [augmenter(orig_img) for _ in range(4)] + for augmenter in augmenters +] +row_title = [str(policy).split('.')[-1] for policy in policies] +plot([[orig_img] + row for row in imgs], row_title=row_title) + +# %% +# RandAugment +# ~~~~~~~~~~~ +# The :class:`~torchvision.transforms.RandAugment` is an alternate version of AutoAugment. +augmenter = v2.RandAugment() +imgs = [augmenter(orig_img) for _ in range(4)] +plot([orig_img] + imgs) + +# %% +# TrivialAugmentWide +# ~~~~~~~~~~~~~~~~~~ +# The :class:`~torchvision.transforms.TrivialAugmentWide` is an alternate implementation of AutoAugment. +# However, instead of transforming an image multiple times, it transforms an image only once +# using a random transform from a given list with a random strength number. +augmenter = v2.TrivialAugmentWide() +imgs = [augmenter(orig_img) for _ in range(4)] +plot([orig_img] + imgs) + +# %% +# AugMix +# ~~~~~~ +# The :class:`~torchvision.transforms.AugMix` transform interpolates between augmented versions of an image. +augmenter = v2.AugMix() +imgs = [augmenter(orig_img) for _ in range(4)] +plot([orig_img] + imgs) + +# %% +# Randomly-applied Transforms +# --------------------------- +# +# The following transforms are randomly-applied given a probability ``p``. That is, given ``p = 0.5``, +# there is a 50% chance to return the original image, and a 50% chance to return the transformed image, +# even when called with the same transform instance! +# +# RandomHorizontalFlip +# ~~~~~~~~~~~~~~~~~~~~ +# The :class:`~torchvision.transforms.RandomHorizontalFlip` transform +# (see also :func:`~torchvision.transforms.functional.hflip`) +# performs horizontal flip of an image, with a given probability. +hflipper = v2.RandomHorizontalFlip(p=0.5) +transformed_imgs = [hflipper(orig_img) for _ in range(4)] +plot([orig_img] + transformed_imgs) + +# %% +# RandomVerticalFlip +# ~~~~~~~~~~~~~~~~~~ +# The :class:`~torchvision.transforms.RandomVerticalFlip` transform +# (see also :func:`~torchvision.transforms.functional.vflip`) +# performs vertical flip of an image, with a given probability. +vflipper = v2.RandomVerticalFlip(p=0.5) +transformed_imgs = [vflipper(orig_img) for _ in range(4)] +plot([orig_img] + transformed_imgs) + +# %% +# RandomApply +# ~~~~~~~~~~~ +# The :class:`~torchvision.transforms.RandomApply` transform +# randomly applies a list of transforms, with a given probability. +applier = v2.RandomApply(transforms=[v2.RandomCrop(size=(64, 64))], p=0.5) +transformed_imgs = [applier(orig_img) for _ in range(4)] +plot([orig_img] + transformed_imgs) diff --git a/gallery/transforms/plot_tv_tensors.py b/gallery/transforms/plot_tv_tensors.py new file mode 100644 index 00000000000..5bce37aa374 --- /dev/null +++ b/gallery/transforms/plot_tv_tensors.py @@ -0,0 +1,224 @@ +""" +============= +TVTensors FAQ +============= + +.. note:: + Try on `Colab `_ + or :ref:`go to the end ` to download the full example code. + + +TVTensors are Tensor subclasses introduced together with +``torchvision.transforms.v2``. This example showcases what these TVTensors are +and how they behave. + +.. warning:: + + **Intended Audience** Unless you're writing your own transforms or your own TVTensors, you + probably do not need to read this guide. This is a fairly low-level topic + that most users will not need to worry about: you do not need to understand + the internals of TVTensors to efficiently rely on + ``torchvision.transforms.v2``. It may however be useful for advanced users + trying to implement their own datasets, transforms, or work directly with + the TVTensors. +""" + +# %% +import PIL.Image + +import torch +from torchvision import tv_tensors + + +# %% +# What are TVTensors? +# ------------------- +# +# TVTensors are zero-copy tensor subclasses: + +tensor = torch.rand(3, 256, 256) +image = tv_tensors.Image(tensor) + +assert isinstance(image, torch.Tensor) +assert image.data_ptr() == tensor.data_ptr() + +# %% +# Under the hood, they are needed in :mod:`torchvision.transforms.v2` to correctly dispatch to the appropriate function +# for the input data. +# +# :mod:`torchvision.tv_tensors` supports four types of TVTensors: +# +# * :class:`~torchvision.tv_tensors.Image` +# * :class:`~torchvision.tv_tensors.Video` +# * :class:`~torchvision.tv_tensors.BoundingBoxes` +# * :class:`~torchvision.tv_tensors.Mask` +# +# What can I do with a TVTensor? +# ------------------------------ +# +# TVTensors look and feel just like regular tensors - they **are** tensors. +# Everything that is supported on a plain :class:`torch.Tensor` like ``.sum()`` or +# any ``torch.*`` operator will also work on TVTensors. See +# :ref:`tv_tensor_unwrapping_behaviour` for a few gotchas. + +# %% +# .. _tv_tensor_creation: +# +# How do I construct a TVTensor? +# ------------------------------ +# +# Using the constructor +# ^^^^^^^^^^^^^^^^^^^^^ +# +# Each TVTensor class takes any tensor-like data that can be turned into a :class:`~torch.Tensor` + +image = tv_tensors.Image([[[[0, 1], [1, 0]]]]) +print(image) + + +# %% +# Similar to other PyTorch creations ops, the constructor also takes the ``dtype``, ``device``, and ``requires_grad`` +# parameters. + +float_image = tv_tensors.Image([[[0, 1], [1, 0]]], dtype=torch.float32, requires_grad=True) +print(float_image) + + +# %% +# In addition, :class:`~torchvision.tv_tensors.Image` and :class:`~torchvision.tv_tensors.Mask` can also take a +# :class:`PIL.Image.Image` directly: + +image = tv_tensors.Image(PIL.Image.open("../assets/astronaut.jpg")) +print(image.shape, image.dtype) + +# %% +# Some TVTensors require additional metadata to be passed in ordered to be constructed. For example, +# :class:`~torchvision.tv_tensors.BoundingBoxes` requires the coordinate format as well as the size of the +# corresponding image (``canvas_size``) alongside the actual values. These +# metadata are required to properly transform the bounding boxes. + +bboxes = tv_tensors.BoundingBoxes( + [[17, 16, 344, 495], [0, 10, 0, 10]], + format=tv_tensors.BoundingBoxFormat.XYXY, + canvas_size=image.shape[-2:] +) +print(bboxes) + +# %% +# Using ``tv_tensors.wrap()`` +# ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +# +# You can also use the :func:`~torchvision.tv_tensors.wrap` function to wrap a tensor object +# into a TVTensor. This is useful when you already have an object of the +# desired type, which typically happens when writing transforms: you just want +# to wrap the output like the input. + +new_bboxes = torch.tensor([0, 20, 30, 40]) +new_bboxes = tv_tensors.wrap(new_bboxes, like=bboxes) +assert isinstance(new_bboxes, tv_tensors.BoundingBoxes) +assert new_bboxes.canvas_size == bboxes.canvas_size + +# %% +# The metadata of ``new_bboxes`` is the same as ``bboxes``, but you could pass +# it as a parameter to override it. +# +# .. _tv_tensor_unwrapping_behaviour: +# +# I had a TVTensor but now I have a Tensor. Help! +# ----------------------------------------------- +# +# By default, operations on :class:`~torchvision.tv_tensors.TVTensor` objects +# will return a pure Tensor: + + +assert isinstance(bboxes, tv_tensors.BoundingBoxes) + +# Shift bboxes by 3 pixels in both H and W +new_bboxes = bboxes + 3 + +assert isinstance(new_bboxes, torch.Tensor) +assert not isinstance(new_bboxes, tv_tensors.BoundingBoxes) + +# %% +# .. note:: +# +# This behavior only affects native ``torch`` operations. If you are using +# the built-in ``torchvision`` transforms or functionals, you will always get +# as output the same type that you passed as input (pure ``Tensor`` or +# ``TVTensor``). + +# %% +# But I want a TVTensor back! +# ^^^^^^^^^^^^^^^^^^^^^^^^^^^ +# +# You can re-wrap a pure tensor into a TVTensor by just calling the TVTensor +# constructor, or by using the :func:`~torchvision.tv_tensors.wrap` function +# (see more details above in :ref:`tv_tensor_creation`): + +new_bboxes = bboxes + 3 +new_bboxes = tv_tensors.wrap(new_bboxes, like=bboxes) +assert isinstance(new_bboxes, tv_tensors.BoundingBoxes) + +# %% +# Alternatively, you can use the :func:`~torchvision.tv_tensors.set_return_type` +# as a global config setting for the whole program, or as a context manager +# (read its docs to learn more about caveats): + +with tv_tensors.set_return_type("TVTensor"): + new_bboxes = bboxes + 3 +assert isinstance(new_bboxes, tv_tensors.BoundingBoxes) + +# %% +# Why is this happening? +# ^^^^^^^^^^^^^^^^^^^^^^ +# +# **For performance reasons**. :class:`~torchvision.tv_tensors.TVTensor` +# classes are Tensor subclasses, so any operation involving a +# :class:`~torchvision.tv_tensors.TVTensor` object will go through the +# `__torch_function__ +# `_ +# protocol. This induces a small overhead, which we want to avoid when possible. +# This doesn't matter for built-in ``torchvision`` transforms because we can +# avoid the overhead there, but it could be a problem in your model's +# ``forward``. +# +# **The alternative isn't much better anyway.** For every operation where +# preserving the :class:`~torchvision.tv_tensors.TVTensor` type makes +# sense, there are just as many operations where returning a pure Tensor is +# preferable: for example, is ``img.sum()`` still an :class:`~torchvision.tv_tensors.Image`? +# If we were to preserve :class:`~torchvision.tv_tensors.TVTensor` types all +# the way, even model's logits or the output of the loss function would end up +# being of type :class:`~torchvision.tv_tensors.Image`, and surely that's not +# desirable. +# +# .. note:: +# +# This behaviour is something we're actively seeking feedback on. If you find this surprising or if you +# have any suggestions on how to better support your use-cases, please reach out to us via this issue: +# https://github.com/pytorch/vision/issues/7319 +# +# Exceptions +# ^^^^^^^^^^ +# +# There are a few exceptions to this "unwrapping" rule: +# :meth:`~torch.Tensor.clone`, :meth:`~torch.Tensor.to`, +# :meth:`torch.Tensor.detach`, and :meth:`~torch.Tensor.requires_grad_` retain +# the TVTensor type. +# +# Inplace operations on TVTensors like ``obj.add_()`` will preserve the type of +# ``obj``. However, the **returned** value of inplace operations will be a pure +# tensor: + +image = tv_tensors.Image([[[0, 1], [1, 0]]]) + +new_image = image.add_(1).mul_(2) + +# image got transformed in-place and is still a TVTensor Image, but new_image +# is a Tensor. They share the same underlying data and they're equal, just +# different classes. +assert isinstance(image, tv_tensors.Image) +print(image) + +assert isinstance(new_image, torch.Tensor) and not isinstance(new_image, tv_tensors.Image) +assert (new_image == image).all() +assert new_image.data_ptr() == image.data_ptr() diff --git a/hubconf.py b/hubconf.py index f43f922e89c..637827127ca 100644 --- a/hubconf.py +++ b/hubconf.py @@ -1,14 +1,85 @@ # Optional list of dependencies required by the package -dependencies = ['torch'] +dependencies = ["torch"] +from torchvision.models import get_model_weights, get_weight from torchvision.models.alexnet import alexnet -from torchvision.models.densenet import densenet121, densenet169, densenet201, densenet161 +from torchvision.models.convnext import convnext_base, convnext_large, convnext_small, convnext_tiny +from torchvision.models.densenet import densenet121, densenet161, densenet169, densenet201 +from torchvision.models.efficientnet import ( + efficientnet_b0, + efficientnet_b1, + efficientnet_b2, + efficientnet_b3, + efficientnet_b4, + efficientnet_b5, + efficientnet_b6, + efficientnet_b7, + efficientnet_v2_l, + efficientnet_v2_m, + efficientnet_v2_s, +) +from torchvision.models.googlenet import googlenet from torchvision.models.inception import inception_v3 -from torchvision.models.resnet import resnet18, resnet34, resnet50, resnet101, resnet152,\ - resnext50_32x4d, resnext101_32x8d, wide_resnet50_2, wide_resnet101_2 +from torchvision.models.maxvit import maxvit_t +from torchvision.models.mnasnet import mnasnet0_5, mnasnet0_75, mnasnet1_0, mnasnet1_3 +from torchvision.models.mobilenetv2 import mobilenet_v2 +from torchvision.models.mobilenetv3 import mobilenet_v3_large, mobilenet_v3_small +from torchvision.models.optical_flow import raft_large, raft_small +from torchvision.models.regnet import ( + regnet_x_16gf, + regnet_x_1_6gf, + regnet_x_32gf, + regnet_x_3_2gf, + regnet_x_400mf, + regnet_x_800mf, + regnet_x_8gf, + regnet_y_128gf, + regnet_y_16gf, + regnet_y_1_6gf, + regnet_y_32gf, + regnet_y_3_2gf, + regnet_y_400mf, + regnet_y_800mf, + regnet_y_8gf, +) +from torchvision.models.resnet import ( + resnet101, + resnet152, + resnet18, + resnet34, + resnet50, + resnext101_32x8d, + resnext101_64x4d, + resnext50_32x4d, + wide_resnet101_2, + wide_resnet50_2, +) +from torchvision.models.segmentation import ( + deeplabv3_mobilenet_v3_large, + deeplabv3_resnet101, + deeplabv3_resnet50, + fcn_resnet101, + fcn_resnet50, + lraspp_mobilenet_v3_large, +) +from torchvision.models.shufflenetv2 import ( + shufflenet_v2_x0_5, + shufflenet_v2_x1_0, + shufflenet_v2_x1_5, + shufflenet_v2_x2_0, +) from torchvision.models.squeezenet import squeezenet1_0, squeezenet1_1 -from torchvision.models.vgg import vgg11, vgg13, vgg16, vgg19, vgg11_bn, vgg13_bn, vgg16_bn, vgg19_bn -from torchvision.models.segmentation import fcn_resnet101, deeplabv3_resnet101 -from torchvision.models.googlenet import googlenet -from torchvision.models.shufflenetv2 import shufflenet_v2_x0_5, shufflenet_v2_x1_0 -from torchvision.models.mobilenet import mobilenet_v2 +from torchvision.models.swin_transformer import swin_b, swin_s, swin_t, swin_v2_b, swin_v2_s, swin_v2_t +from torchvision.models.vgg import vgg11, vgg11_bn, vgg13, vgg13_bn, vgg16, vgg16_bn, vgg19, vgg19_bn +from torchvision.models.video import ( + mc3_18, + mvit_v1_b, + mvit_v2_s, + r2plus1d_18, + r3d_18, + s3d, + swin3d_b, + swin3d_s, + swin3d_t, +) +from torchvision.models.vision_transformer import vit_b_16, vit_b_32, vit_h_14, vit_l_16, vit_l_32 diff --git a/ios/CMakeLists.txt b/ios/CMakeLists.txt new file mode 100644 index 00000000000..4201240a427 --- /dev/null +++ b/ios/CMakeLists.txt @@ -0,0 +1,23 @@ +cmake_minimum_required(VERSION 3.4.1) +set(TARGET torchvision_ops) +project(${TARGET} CXX) +set(CMAKE_CXX_STANDARD 17) +set(LIBTORCH_HEADER_ROOT ${LIBTORCH_HEADER_ROOT}) +set(LIBRARY_OUTPUT_PATH ../lib) + +file(GLOB VISION_SRCS + ../torchvision/csrc/ops/cpu/*.h + ../torchvision/csrc/ops/cpu/*.cpp + ../torchvision/csrc/ops/*.h + ../torchvision/csrc/ops/*.cpp) + +add_library(${TARGET} STATIC + ${VISION_SRCS} +) + +file(GLOB PYTORCH_HEADERS "${LIBTORCH_HEADER_ROOT}") +file(GLOB PYTORCH_HEADERS_CSRC "${LIBTORCH_HEADER_ROOT}/torch/csrc/api/include") +target_include_directories(${TARGET} PRIVATE + ${PYTORCH_HEADERS} + ${PYTORCH_HEADERS_CSRC} +) diff --git a/ios/LibTorchvision.podspec b/ios/LibTorchvision.podspec new file mode 100644 index 00000000000..b88fb70ac40 --- /dev/null +++ b/ios/LibTorchvision.podspec @@ -0,0 +1,24 @@ +pytorch_version = '2.0.0' + +Pod::Spec.new do |s| + s.name = 'LibTorchvision' + s.version = '0.15.1' + s.authors = 'PyTorch Team' + s.license = { :type => 'BSD' } + s.homepage = 'https://github.com/pytorch/vision' + s.source = { :http => "https://ossci-ios.s3.amazonaws.com/libtorchvision_ops_ios_#{s.version}.zip" } + s.summary = '"The C++ library of TorchVision ops for iOS' + s.description = <<-DESC + The C++ library of TorchVision ops for iOS. + This version (#{s.version}) requires the installation of LibTorch #{pytorch_version} or LibTorch-Lite #{pytorch_version}. + DESC + s.ios.deployment_target = '12.0' + s.vendored_libraries = 'install/lib/*.a' + s.user_target_xcconfig = { + 'VALID_ARCHS' => 'x86_64 arm64', + 'OTHER_LDFLAGS' => '$(inherited) -force_load "$(PODS_ROOT)/LibTorchvision/install/lib/libtorchvision_ops.a"', + 'CLANG_CXX_LANGUAGE_STANDARD' => 'c++14', + 'CLANG_CXX_LIBRARY' => 'libc++' + } + s.library = ['c++', 'stdc++'] +end diff --git a/ios/README.md b/ios/README.md new file mode 100644 index 00000000000..0b50245f1ee --- /dev/null +++ b/ios/README.md @@ -0,0 +1,3 @@ +## Status + +The iOS demo of TorchVision is currently unmaintained, untested and likely out-of-date. diff --git a/ios/VisionTestApp/VisionTestApp.xcodeproj/project.pbxproj b/ios/VisionTestApp/VisionTestApp.xcodeproj/project.pbxproj new file mode 100644 index 00000000000..1c25d9d350e --- /dev/null +++ b/ios/VisionTestApp/VisionTestApp.xcodeproj/project.pbxproj @@ -0,0 +1,411 @@ +// !$*UTF8*$! +{ + archiveVersion = 1; + classes = { + }; + objectVersion = 50; + objects = { + +/* Begin PBXBuildFile section */ + 0C12EF7626163B7600B66C86 /* frcnn_mnetv3.pt in Resources */ = {isa = PBXBuildFile; fileRef = 0C12EF7526163B7600B66C86 /* frcnn_mnetv3.pt */; }; + 0CDCAE46274ED8FA006F9077 /* CoreML.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 0CDCAE45274ED8FA006F9077 /* CoreML.framework */; }; + 0CDCAE48274ED902006F9077 /* MetalPerformanceShaders.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 0CDCAE47274ED902006F9077 /* MetalPerformanceShaders.framework */; }; + 0CDCAE4A274ED909006F9077 /* Accelerate.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 0CDCAE49274ED909006F9077 /* Accelerate.framework */; }; + 0CEB0AC026151A8800F1F7D5 /* AppDelegate.m in Sources */ = {isa = PBXBuildFile; fileRef = 0CEB0ABF26151A8800F1F7D5 /* AppDelegate.m */; }; + 0CEB0AC626151A8800F1F7D5 /* ViewController.mm in Sources */ = {isa = PBXBuildFile; fileRef = 0CEB0AC526151A8800F1F7D5 /* ViewController.mm */; }; + 0CEB0AC926151A8800F1F7D5 /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 0CEB0AC726151A8800F1F7D5 /* Main.storyboard */; }; + 0CEB0ACB26151A8900F1F7D5 /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 0CEB0ACA26151A8900F1F7D5 /* Assets.xcassets */; }; + 0CEB0ACE26151A8900F1F7D5 /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 0CEB0ACC26151A8900F1F7D5 /* LaunchScreen.storyboard */; }; + 0CEB0AD126151A8900F1F7D5 /* main.m in Sources */ = {isa = PBXBuildFile; fileRef = 0CEB0AD026151A8900F1F7D5 /* main.m */; }; + 0CEB0B3A26152ED900F1F7D5 /* ModelRunner.mm in Sources */ = {isa = PBXBuildFile; fileRef = 0CEB0B3926152ED900F1F7D5 /* ModelRunner.mm */; }; +/* End PBXBuildFile section */ + +/* Begin PBXFileReference section */ + 0C12EF7526163B7600B66C86 /* frcnn_mnetv3.pt */ = {isa = PBXFileReference; lastKnownFileType = file; path = frcnn_mnetv3.pt; sourceTree = ""; }; + 0CDCAE45274ED8FA006F9077 /* CoreML.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = CoreML.framework; path = System/Library/Frameworks/CoreML.framework; sourceTree = SDKROOT; }; + 0CDCAE47274ED902006F9077 /* MetalPerformanceShaders.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = MetalPerformanceShaders.framework; path = System/Library/Frameworks/MetalPerformanceShaders.framework; sourceTree = SDKROOT; }; + 0CDCAE49274ED909006F9077 /* Accelerate.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = Accelerate.framework; path = System/Library/Frameworks/Accelerate.framework; sourceTree = SDKROOT; }; + 0CEB0ABB26151A8800F1F7D5 /* VisionTestApp.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = VisionTestApp.app; sourceTree = BUILT_PRODUCTS_DIR; }; + 0CEB0ABE26151A8800F1F7D5 /* AppDelegate.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = AppDelegate.h; sourceTree = ""; }; + 0CEB0ABF26151A8800F1F7D5 /* AppDelegate.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = AppDelegate.m; sourceTree = ""; }; + 0CEB0AC426151A8800F1F7D5 /* ViewController.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = ViewController.h; sourceTree = ""; }; + 0CEB0AC526151A8800F1F7D5 /* ViewController.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = ViewController.mm; sourceTree = ""; }; + 0CEB0AC826151A8800F1F7D5 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/Main.storyboard; sourceTree = ""; }; + 0CEB0ACA26151A8900F1F7D5 /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = ""; }; + 0CEB0ACD26151A8900F1F7D5 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/LaunchScreen.storyboard; sourceTree = ""; }; + 0CEB0ACF26151A8900F1F7D5 /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; }; + 0CEB0AD026151A8900F1F7D5 /* main.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = main.m; sourceTree = ""; }; + 0CEB0B3826152ED900F1F7D5 /* ModelRunner.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = ModelRunner.h; sourceTree = ""; }; + 0CEB0B3926152ED900F1F7D5 /* ModelRunner.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = ModelRunner.mm; sourceTree = ""; }; +/* End PBXFileReference section */ + +/* Begin PBXFrameworksBuildPhase section */ + 0CEB0AB826151A8800F1F7D5 /* Frameworks */ = { + isa = PBXFrameworksBuildPhase; + buildActionMask = 2147483647; + files = ( + 0CDCAE4A274ED909006F9077 /* Accelerate.framework in Frameworks */, + 0CDCAE48274ED902006F9077 /* MetalPerformanceShaders.framework in Frameworks */, + 0CDCAE46274ED8FA006F9077 /* CoreML.framework in Frameworks */, + ); + runOnlyForDeploymentPostprocessing = 0; + }; +/* End PBXFrameworksBuildPhase section */ + +/* Begin PBXGroup section */ + 0C12EF6F26163A4C00B66C86 /* Frameworks */ = { + isa = PBXGroup; + children = ( + 0CDCAE49274ED909006F9077 /* Accelerate.framework */, + 0CDCAE47274ED902006F9077 /* MetalPerformanceShaders.framework */, + 0CDCAE45274ED8FA006F9077 /* CoreML.framework */, + ); + name = Frameworks; + sourceTree = ""; + }; + 0CEB0AB226151A8800F1F7D5 = { + isa = PBXGroup; + children = ( + 0CEB0ABD26151A8800F1F7D5 /* VisionTestApp */, + 0CEB0ABC26151A8800F1F7D5 /* Products */, + 0C12EF6F26163A4C00B66C86 /* Frameworks */, + ); + sourceTree = ""; + }; + 0CEB0ABC26151A8800F1F7D5 /* Products */ = { + isa = PBXGroup; + children = ( + 0CEB0ABB26151A8800F1F7D5 /* VisionTestApp.app */, + ); + name = Products; + sourceTree = ""; + }; + 0CEB0ABD26151A8800F1F7D5 /* VisionTestApp */ = { + isa = PBXGroup; + children = ( + 0CEB0B3826152ED900F1F7D5 /* ModelRunner.h */, + 0CEB0B3926152ED900F1F7D5 /* ModelRunner.mm */, + 0CEB0ABE26151A8800F1F7D5 /* AppDelegate.h */, + 0CEB0ABF26151A8800F1F7D5 /* AppDelegate.m */, + 0CEB0AC426151A8800F1F7D5 /* ViewController.h */, + 0CEB0AC526151A8800F1F7D5 /* ViewController.mm */, + 0CEB0AC726151A8800F1F7D5 /* Main.storyboard */, + 0CEB0ACA26151A8900F1F7D5 /* Assets.xcassets */, + 0CEB0ACC26151A8900F1F7D5 /* LaunchScreen.storyboard */, + 0CEB0ACF26151A8900F1F7D5 /* Info.plist */, + 0CEB0AD026151A8900F1F7D5 /* main.m */, + 0C12EF7526163B7600B66C86 /* frcnn_mnetv3.pt */, + ); + path = VisionTestApp; + sourceTree = ""; + }; +/* End PBXGroup section */ + +/* Begin PBXNativeTarget section */ + 0CEB0ABA26151A8800F1F7D5 /* VisionTestApp */ = { + isa = PBXNativeTarget; + buildConfigurationList = 0CEB0AEA26151A8900F1F7D5 /* Build configuration list for PBXNativeTarget "VisionTestApp" */; + buildPhases = ( + 0CEB0AB726151A8800F1F7D5 /* Sources */, + 0CEB0AB826151A8800F1F7D5 /* Frameworks */, + 0CEB0AB926151A8800F1F7D5 /* Resources */, + ); + buildRules = ( + ); + dependencies = ( + ); + name = VisionTestApp; + productName = VisionTestApp; + productReference = 0CEB0ABB26151A8800F1F7D5 /* VisionTestApp.app */; + productType = "com.apple.product-type.application"; + }; +/* End PBXNativeTarget section */ + +/* Begin PBXProject section */ + 0CEB0AB326151A8800F1F7D5 /* Project object */ = { + isa = PBXProject; + attributes = { + LastUpgradeCheck = 1240; + TargetAttributes = { + 0CEB0ABA26151A8800F1F7D5 = { + CreatedOnToolsVersion = 12.4; + }; + }; + }; + buildConfigurationList = 0CEB0AB626151A8800F1F7D5 /* Build configuration list for PBXProject "VisionTestApp" */; + compatibilityVersion = "Xcode 9.3"; + developmentRegion = en; + hasScannedForEncodings = 0; + knownRegions = ( + en, + Base, + ); + mainGroup = 0CEB0AB226151A8800F1F7D5; + productRefGroup = 0CEB0ABC26151A8800F1F7D5 /* Products */; + projectDirPath = ""; + projectRoot = ""; + targets = ( + 0CEB0ABA26151A8800F1F7D5 /* VisionTestApp */, + ); + }; +/* End PBXProject section */ + +/* Begin PBXResourcesBuildPhase section */ + 0CEB0AB926151A8800F1F7D5 /* Resources */ = { + isa = PBXResourcesBuildPhase; + buildActionMask = 2147483647; + files = ( + 0CEB0ACE26151A8900F1F7D5 /* LaunchScreen.storyboard in Resources */, + 0C12EF7626163B7600B66C86 /* frcnn_mnetv3.pt in Resources */, + 0CEB0ACB26151A8900F1F7D5 /* Assets.xcassets in Resources */, + 0CEB0AC926151A8800F1F7D5 /* Main.storyboard in Resources */, + ); + runOnlyForDeploymentPostprocessing = 0; + }; +/* End PBXResourcesBuildPhase section */ + +/* Begin PBXSourcesBuildPhase section */ + 0CEB0AB726151A8800F1F7D5 /* Sources */ = { + isa = PBXSourcesBuildPhase; + buildActionMask = 2147483647; + files = ( + 0CEB0AC626151A8800F1F7D5 /* ViewController.mm in Sources */, + 0CEB0AC026151A8800F1F7D5 /* AppDelegate.m in Sources */, + 0CEB0AD126151A8900F1F7D5 /* main.m in Sources */, + 0CEB0B3A26152ED900F1F7D5 /* ModelRunner.mm in Sources */, + ); + runOnlyForDeploymentPostprocessing = 0; + }; +/* End PBXSourcesBuildPhase section */ + +/* Begin PBXVariantGroup section */ + 0CEB0AC726151A8800F1F7D5 /* Main.storyboard */ = { + isa = PBXVariantGroup; + children = ( + 0CEB0AC826151A8800F1F7D5 /* Base */, + ); + name = Main.storyboard; + sourceTree = ""; + }; + 0CEB0ACC26151A8900F1F7D5 /* LaunchScreen.storyboard */ = { + isa = PBXVariantGroup; + children = ( + 0CEB0ACD26151A8900F1F7D5 /* Base */, + ); + name = LaunchScreen.storyboard; + sourceTree = ""; + }; +/* End PBXVariantGroup section */ + +/* Begin XCBuildConfiguration section */ + 0CEB0AE826151A8900F1F7D5 /* Debug */ = { + isa = XCBuildConfiguration; + buildSettings = { + ALWAYS_SEARCH_USER_PATHS = NO; + CLANG_ANALYZER_NONNULL = YES; + CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; + CLANG_CXX_LANGUAGE_STANDARD = "gnu++14"; + CLANG_CXX_LIBRARY = "libc++"; + CLANG_ENABLE_MODULES = YES; + CLANG_ENABLE_OBJC_ARC = YES; + CLANG_ENABLE_OBJC_WEAK = YES; + CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; + CLANG_WARN_BOOL_CONVERSION = YES; + CLANG_WARN_COMMA = YES; + CLANG_WARN_CONSTANT_CONVERSION = YES; + CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES; + CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; + CLANG_WARN_DOCUMENTATION_COMMENTS = YES; + CLANG_WARN_EMPTY_BODY = YES; + CLANG_WARN_ENUM_CONVERSION = YES; + CLANG_WARN_INFINITE_RECURSION = YES; + CLANG_WARN_INT_CONVERSION = YES; + CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; + CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES; + CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; + CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; + CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES; + CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; + CLANG_WARN_STRICT_PROTOTYPES = YES; + CLANG_WARN_SUSPICIOUS_MOVE = YES; + CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; + CLANG_WARN_UNREACHABLE_CODE = YES; + CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; + COPY_PHASE_STRIP = NO; + DEBUG_INFORMATION_FORMAT = dwarf; + ENABLE_BITCODE = NO; + ENABLE_STRICT_OBJC_MSGSEND = YES; + ENABLE_TESTABILITY = YES; + GCC_C_LANGUAGE_STANDARD = gnu11; + GCC_DYNAMIC_NO_PIC = NO; + GCC_NO_COMMON_BLOCKS = YES; + GCC_OPTIMIZATION_LEVEL = 0; + GCC_PREPROCESSOR_DEFINITIONS = ( + "DEBUG=1", + "$(inherited)", + ); + GCC_WARN_64_TO_32_BIT_CONVERSION = YES; + GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; + GCC_WARN_UNDECLARED_SELECTOR = YES; + GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; + GCC_WARN_UNUSED_FUNCTION = YES; + GCC_WARN_UNUSED_VARIABLE = YES; + HEADER_SEARCH_PATHS = ""; + IPHONEOS_DEPLOYMENT_TARGET = 13.0; + LIBRARY_SEARCH_PATHS = ""; + MTL_ENABLE_DEBUG_INFO = INCLUDE_SOURCE; + MTL_FAST_MATH = YES; + ONLY_ACTIVE_ARCH = YES; + OTHER_LDFLAGS = ""; + SDKROOT = iphoneos; + }; + name = Debug; + }; + 0CEB0AE926151A8900F1F7D5 /* Release */ = { + isa = XCBuildConfiguration; + buildSettings = { + ALWAYS_SEARCH_USER_PATHS = NO; + CLANG_ANALYZER_NONNULL = YES; + CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; + CLANG_CXX_LANGUAGE_STANDARD = "gnu++14"; + CLANG_CXX_LIBRARY = "libc++"; + CLANG_ENABLE_MODULES = YES; + CLANG_ENABLE_OBJC_ARC = YES; + CLANG_ENABLE_OBJC_WEAK = YES; + CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; + CLANG_WARN_BOOL_CONVERSION = YES; + CLANG_WARN_COMMA = YES; + CLANG_WARN_CONSTANT_CONVERSION = YES; + CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES; + CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; + CLANG_WARN_DOCUMENTATION_COMMENTS = YES; + CLANG_WARN_EMPTY_BODY = YES; + CLANG_WARN_ENUM_CONVERSION = YES; + CLANG_WARN_INFINITE_RECURSION = YES; + CLANG_WARN_INT_CONVERSION = YES; + CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; + CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES; + CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; + CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; + CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES; + CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; + CLANG_WARN_STRICT_PROTOTYPES = YES; + CLANG_WARN_SUSPICIOUS_MOVE = YES; + CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; + CLANG_WARN_UNREACHABLE_CODE = YES; + CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; + COPY_PHASE_STRIP = NO; + DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym"; + ENABLE_BITCODE = NO; + ENABLE_NS_ASSERTIONS = NO; + ENABLE_STRICT_OBJC_MSGSEND = YES; + GCC_C_LANGUAGE_STANDARD = gnu11; + GCC_NO_COMMON_BLOCKS = YES; + GCC_WARN_64_TO_32_BIT_CONVERSION = YES; + GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; + GCC_WARN_UNDECLARED_SELECTOR = YES; + GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; + GCC_WARN_UNUSED_FUNCTION = YES; + GCC_WARN_UNUSED_VARIABLE = YES; + HEADER_SEARCH_PATHS = ""; + IPHONEOS_DEPLOYMENT_TARGET = 13.0; + LIBRARY_SEARCH_PATHS = ""; + MTL_ENABLE_DEBUG_INFO = NO; + MTL_FAST_MATH = YES; + OTHER_LDFLAGS = ""; + SDKROOT = iphoneos; + VALIDATE_PRODUCT = YES; + }; + name = Release; + }; + 0CEB0AEB26151A8900F1F7D5 /* Debug */ = { + isa = XCBuildConfiguration; + buildSettings = { + ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; + ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor; + CODE_SIGN_STYLE = Automatic; + ENABLE_BITCODE = NO; + HEADER_SEARCH_PATHS = ( + "$(inherited)", + "$(PROJECT_DIR)/install/include", + ); + INFOPLIST_FILE = VisionTestApp/Info.plist; + LD_RUNPATH_SEARCH_PATHS = ( + "$(inherited)", + "@executable_path/Frameworks", + ); + LIBRARY_SEARCH_PATHS = ( + "$(inherited)", + "$(PROJECT_DIR)/VisionTestApp", + "$(PROJECT_DIR)", + "$(PROJECT_DIR)/install/lib", + ); + OTHER_LDFLAGS = ( + "$(inherited)", + "-ObjC", + "-all_load", + ); + PRODUCT_BUNDLE_IDENTIFIER = com.pytorch.ios.VisionTestApp.VisionTestApp; + PRODUCT_NAME = "$(TARGET_NAME)"; + TARGETED_DEVICE_FAMILY = "1,2"; + }; + name = Debug; + }; + 0CEB0AEC26151A8900F1F7D5 /* Release */ = { + isa = XCBuildConfiguration; + buildSettings = { + ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; + ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor; + CODE_SIGN_STYLE = Automatic; + ENABLE_BITCODE = NO; + HEADER_SEARCH_PATHS = ( + "$(inherited)", + "$(PROJECT_DIR)/install/include", + ); + INFOPLIST_FILE = VisionTestApp/Info.plist; + LD_RUNPATH_SEARCH_PATHS = ( + "$(inherited)", + "@executable_path/Frameworks", + ); + LIBRARY_SEARCH_PATHS = ( + "$(inherited)", + "$(PROJECT_DIR)/VisionTestApp", + "$(PROJECT_DIR)", + "$(PROJECT_DIR)/install/lib", + ); + OTHER_LDFLAGS = ( + "$(inherited)", + "-ObjC", + "-all_load", + ); + PRODUCT_BUNDLE_IDENTIFIER = com.pytorch.ios.VisionTestApp.VisionTestApp; + PRODUCT_NAME = "$(TARGET_NAME)"; + TARGETED_DEVICE_FAMILY = "1,2"; + }; + name = Release; + }; +/* End XCBuildConfiguration section */ + +/* Begin XCConfigurationList section */ + 0CEB0AB626151A8800F1F7D5 /* Build configuration list for PBXProject "VisionTestApp" */ = { + isa = XCConfigurationList; + buildConfigurations = ( + 0CEB0AE826151A8900F1F7D5 /* Debug */, + 0CEB0AE926151A8900F1F7D5 /* Release */, + ); + defaultConfigurationIsVisible = 0; + defaultConfigurationName = Release; + }; + 0CEB0AEA26151A8900F1F7D5 /* Build configuration list for PBXNativeTarget "VisionTestApp" */ = { + isa = XCConfigurationList; + buildConfigurations = ( + 0CEB0AEB26151A8900F1F7D5 /* Debug */, + 0CEB0AEC26151A8900F1F7D5 /* Release */, + ); + defaultConfigurationIsVisible = 0; + defaultConfigurationName = Release; + }; +/* End XCConfigurationList section */ + }; + rootObject = 0CEB0AB326151A8800F1F7D5 /* Project object */; +} diff --git a/ios/VisionTestApp/VisionTestApp.xcodeproj/project.xcworkspace/contents.xcworkspacedata b/ios/VisionTestApp/VisionTestApp.xcodeproj/project.xcworkspace/contents.xcworkspacedata new file mode 100644 index 00000000000..919434a6254 --- /dev/null +++ b/ios/VisionTestApp/VisionTestApp.xcodeproj/project.xcworkspace/contents.xcworkspacedata @@ -0,0 +1,7 @@ + + + + + diff --git a/ios/VisionTestApp/VisionTestApp.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist b/ios/VisionTestApp/VisionTestApp.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist new file mode 100644 index 00000000000..18d981003d6 --- /dev/null +++ b/ios/VisionTestApp/VisionTestApp.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist @@ -0,0 +1,8 @@ + + + + + IDEDidComputeMac32BitWarning + + + diff --git a/ios/VisionTestApp/VisionTestApp/AppDelegate.h b/ios/VisionTestApp/VisionTestApp/AppDelegate.h new file mode 100644 index 00000000000..27716f4b6ab --- /dev/null +++ b/ios/VisionTestApp/VisionTestApp/AppDelegate.h @@ -0,0 +1,7 @@ +#import + +@interface AppDelegate : UIResponder + +@property(strong, nonatomic) UIWindow* window; + +@end diff --git a/ios/VisionTestApp/VisionTestApp/AppDelegate.m b/ios/VisionTestApp/VisionTestApp/AppDelegate.m new file mode 100644 index 00000000000..a20d3987c80 --- /dev/null +++ b/ios/VisionTestApp/VisionTestApp/AppDelegate.m @@ -0,0 +1,44 @@ + +#import "AppDelegate.h" + +@interface AppDelegate () + +@end + +@implementation AppDelegate + + +- (BOOL)application:(UIApplication *)application didFinishLaunchingWithOptions:(NSDictionary *)launchOptions { + // Override point for customization after application launch. + return YES; +} + + +- (void)applicationWillResignActive:(UIApplication *)application { + // Sent when the application is about to move from active to inactive state. This can occur for certain types of temporary interruptions (such as an incoming phone call or SMS message) or when the user quits the application and it begins the transition to the background state. + // Use this method to pause ongoing tasks, disable timers, and invalidate graphics rendering callbacks. Games should use this method to pause the game. +} + + +- (void)applicationDidEnterBackground:(UIApplication *)application { + // Use this method to release shared resources, save user data, invalidate timers, and store enough application state information to restore your application to its current state in case it is terminated later. + // If your application supports background execution, this method is called instead of applicationWillTerminate: when the user quits. +} + + +- (void)applicationWillEnterForeground:(UIApplication *)application { + // Called as part of the transition from the background to the active state; here you can undo many of the changes made on entering the background. +} + + +- (void)applicationDidBecomeActive:(UIApplication *)application { + // Restart any tasks that were paused (or not yet started) while the application was inactive. If the application was previously in the background, optionally refresh the user interface. +} + + +- (void)applicationWillTerminate:(UIApplication *)application { + // Called when the application is about to terminate. Save data if appropriate. See also applicationDidEnterBackground:. +} + + +@end diff --git a/ios/VisionTestApp/VisionTestApp/Assets.xcassets/AccentColor.colorset/Contents.json b/ios/VisionTestApp/VisionTestApp/Assets.xcassets/AccentColor.colorset/Contents.json new file mode 100644 index 00000000000..eb878970081 --- /dev/null +++ b/ios/VisionTestApp/VisionTestApp/Assets.xcassets/AccentColor.colorset/Contents.json @@ -0,0 +1,11 @@ +{ + "colors" : [ + { + "idiom" : "universal" + } + ], + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/ios/VisionTestApp/VisionTestApp/Assets.xcassets/AppIcon.appiconset/Contents.json b/ios/VisionTestApp/VisionTestApp/Assets.xcassets/AppIcon.appiconset/Contents.json new file mode 100644 index 00000000000..9221b9bb1a3 --- /dev/null +++ b/ios/VisionTestApp/VisionTestApp/Assets.xcassets/AppIcon.appiconset/Contents.json @@ -0,0 +1,98 @@ +{ + "images" : [ + { + "idiom" : "iphone", + "scale" : "2x", + "size" : "20x20" + }, + { + "idiom" : "iphone", + "scale" : "3x", + "size" : "20x20" + }, + { + "idiom" : "iphone", + "scale" : "2x", + "size" : "29x29" + }, + { + "idiom" : "iphone", + "scale" : "3x", + "size" : "29x29" + }, + { + "idiom" : "iphone", + "scale" : "2x", + "size" : "40x40" + }, + { + "idiom" : "iphone", + "scale" : "3x", + "size" : "40x40" + }, + { + "idiom" : "iphone", + "scale" : "2x", + "size" : "60x60" + }, + { + "idiom" : "iphone", + "scale" : "3x", + "size" : "60x60" + }, + { + "idiom" : "ipad", + "scale" : "1x", + "size" : "20x20" + }, + { + "idiom" : "ipad", + "scale" : "2x", + "size" : "20x20" + }, + { + "idiom" : "ipad", + "scale" : "1x", + "size" : "29x29" + }, + { + "idiom" : "ipad", + "scale" : "2x", + "size" : "29x29" + }, + { + "idiom" : "ipad", + "scale" : "1x", + "size" : "40x40" + }, + { + "idiom" : "ipad", + "scale" : "2x", + "size" : "40x40" + }, + { + "idiom" : "ipad", + "scale" : "1x", + "size" : "76x76" + }, + { + "idiom" : "ipad", + "scale" : "2x", + "size" : "76x76" + }, + { + "idiom" : "ipad", + "scale" : "2x", + "size" : "83.5x83.5" + }, + { + "idiom" : "ios-marketing", + "scale" : "1x", + "size" : "1024x1024" + } + ], + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/ios/VisionTestApp/VisionTestApp/Assets.xcassets/Contents.json b/ios/VisionTestApp/VisionTestApp/Assets.xcassets/Contents.json new file mode 100644 index 00000000000..73c00596a7f --- /dev/null +++ b/ios/VisionTestApp/VisionTestApp/Assets.xcassets/Contents.json @@ -0,0 +1,6 @@ +{ + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/ios/VisionTestApp/VisionTestApp/Base.lproj/LaunchScreen.storyboard b/ios/VisionTestApp/VisionTestApp/Base.lproj/LaunchScreen.storyboard new file mode 100644 index 00000000000..0b64f641701 --- /dev/null +++ b/ios/VisionTestApp/VisionTestApp/Base.lproj/LaunchScreen.storyboard @@ -0,0 +1,33 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/ios/VisionTestApp/VisionTestApp/Base.lproj/Main.storyboard b/ios/VisionTestApp/VisionTestApp/Base.lproj/Main.storyboard new file mode 100644 index 00000000000..b20f277b049 --- /dev/null +++ b/ios/VisionTestApp/VisionTestApp/Base.lproj/Main.storyboard @@ -0,0 +1,79 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/ios/VisionTestApp/VisionTestApp/Info.plist b/ios/VisionTestApp/VisionTestApp/Info.plist new file mode 100644 index 00000000000..5bae3d0ded5 --- /dev/null +++ b/ios/VisionTestApp/VisionTestApp/Info.plist @@ -0,0 +1,45 @@ + + + + + CFBundleDevelopmentRegion + $(DEVELOPMENT_LANGUAGE) + CFBundleExecutable + $(EXECUTABLE_NAME) + CFBundleIdentifier + $(PRODUCT_BUNDLE_IDENTIFIER) + CFBundleInfoDictionaryVersion + 6.0 + CFBundleName + $(PRODUCT_NAME) + CFBundlePackageType + $(PRODUCT_BUNDLE_PACKAGE_TYPE) + CFBundleShortVersionString + 1.0 + CFBundleVersion + 1 + LSRequiresIPhoneOS + + UIApplicationSupportsIndirectInputEvents + + UILaunchStoryboardName + LaunchScreen + UIMainStoryboardFile + Main + UIRequiredDeviceCapabilities + + armv7 + + UISupportedInterfaceOrientations + + UIInterfaceOrientationPortrait + + UISupportedInterfaceOrientations~ipad + + UIInterfaceOrientationPortrait + UIInterfaceOrientationPortraitUpsideDown + UIInterfaceOrientationLandscapeLeft + UIInterfaceOrientationLandscapeRight + + + diff --git a/ios/VisionTestApp/VisionTestApp/ModelRunner.h b/ios/VisionTestApp/VisionTestApp/ModelRunner.h new file mode 100644 index 00000000000..cfef3a3f347 --- /dev/null +++ b/ios/VisionTestApp/VisionTestApp/ModelRunner.h @@ -0,0 +1,13 @@ + +#import + +NS_ASSUME_NONNULL_BEGIN + +@interface ModelRunner : NSObject + ++ (NSString*)run; ++ (BOOL)setUp; + +@end + +NS_ASSUME_NONNULL_END diff --git a/ios/VisionTestApp/VisionTestApp/ModelRunner.mm b/ios/VisionTestApp/VisionTestApp/ModelRunner.mm new file mode 100644 index 00000000000..dea3822df26 --- /dev/null +++ b/ios/VisionTestApp/VisionTestApp/ModelRunner.mm @@ -0,0 +1,73 @@ + +#import "ModelRunner.h" +#include +#include +#include "ATen/ATen.h" +#include "caffe2/core/timer.h" +#include "caffe2/utils/string_utils.h" +#include "torch/csrc/autograd/grad_mode.h" +#include "torch/csrc/jit/serialization/import.h" +#include "torch/script.h" + +static NSString *model_name = @"frcnn_mnetv3"; +static NSString *model_suffix = @"pt"; +static NSString *model_path = nil; +static int warmup = 5; +static int iter = 20; + +@implementation ModelRunner + ++ (NSString *)run { + std::vector logs; +#define UI_LOG(fmt, ...) \ + { \ + NSString* log = [NSString stringWithFormat:fmt, __VA_ARGS__]; \ + NSLog(@"%@", log); \ + logs.push_back(log.UTF8String); \ + } + + auto module = torch::jit::load(std::string(model_path.UTF8String)); + module.eval(); + + std::vector inputs; + auto img_tensor = torch::ones({3, 224, 224}, at::ScalarType::Float); + inputs.push_back(c10::List(img_tensor)); + torch::autograd::AutoGradMode guard(false); + at::InferenceMode nonVarTypeModeGuard(true); + + UI_LOG(@"Running warmup runs...", nil); + for (int i = 0; i < warmup; ++i) { + module.forward(inputs); + } + UI_LOG(@"Warmup runs finished.\nMain runs...", nil); + caffe2::Timer timer; + auto millis = timer.MilliSeconds(); + for (int i = 0; i < iter; ++i) { + module.forward(inputs); + } + millis = timer.MilliSeconds(); + UI_LOG(@"Main run finished. \nMilliseconds per iter: %.3f", millis / iter, nil); + UI_LOG(@"Iters per second: : %.3f", 1000.0 * iter / millis, nil); + UI_LOG(@"Done.", nil); + + std::cout << module.forward(inputs) << std::endl; + + NSString* log_text = @""; + for (auto& msg : logs) { + log_text = [log_text stringByAppendingString:[NSString stringWithUTF8String:msg.c_str()]]; + log_text = [log_text stringByAppendingString:@"\n"]; + } + return log_text; +} + ++ (BOOL)setUp { + model_path = [[NSBundle mainBundle] pathForResource:model_name ofType:model_suffix]; + if (![[NSFileManager defaultManager] fileExistsAtPath:model_path]) { + NSLog(@"Invalid model path!"); + model_path = nil; + return NO; + } + return YES; +} + +@end diff --git a/ios/VisionTestApp/VisionTestApp/ViewController.h b/ios/VisionTestApp/VisionTestApp/ViewController.h new file mode 100644 index 00000000000..d29a133d373 --- /dev/null +++ b/ios/VisionTestApp/VisionTestApp/ViewController.h @@ -0,0 +1,6 @@ + +#import + +@interface ViewController : UIViewController + +@end diff --git a/ios/VisionTestApp/VisionTestApp/ViewController.mm b/ios/VisionTestApp/VisionTestApp/ViewController.mm new file mode 100644 index 00000000000..900005d3990 --- /dev/null +++ b/ios/VisionTestApp/VisionTestApp/ViewController.mm @@ -0,0 +1,44 @@ + +#import "ViewController.h" +#include +#import "ModelRunner.h" + +@interface ViewController () +@property (weak, nonatomic) IBOutlet UITextView *textView; +@end + +static NSString const *config_error_msg = @"Wrong model configurations... Please fix and click \"Redo\""; + +@implementation ViewController + +- (void)viewDidLoad { + [super viewDidLoad]; + if ([ModelRunner setUp]) { + [self testModel]; + } else { + self.textView.text = [config_error_msg copy]; + } +} + + +- (IBAction)rerun:(id)sender { + self.textView.text = @""; + if (![ModelRunner setUp]) { + self.textView.text = [config_error_msg copy]; + return; + } + dispatch_async(dispatch_get_main_queue(), ^{ + [self testModel]; + }); +} + +- (void)testModel { + dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{ + NSString *text = [ModelRunner run]; + dispatch_async(dispatch_get_main_queue(), ^{ + self.textView.text = [self.textView.text stringByAppendingString:text]; + }); + }); +} + +@end diff --git a/ios/VisionTestApp/VisionTestApp/main.m b/ios/VisionTestApp/VisionTestApp/main.m new file mode 100644 index 00000000000..1a8b57c33bc --- /dev/null +++ b/ios/VisionTestApp/VisionTestApp/main.m @@ -0,0 +1,18 @@ +// +// main.m +// VisionTestApp +// +// Created by Yuchen Huang on 3/31/21. +// + +#import +#import "AppDelegate.h" + +int main(int argc, char * argv[]) { + NSString * appDelegateClassName; + @autoreleasepool { + // Setup code that might create autoreleased objects goes here. + appDelegateClassName = NSStringFromClass([AppDelegate class]); + } + return UIApplicationMain(argc, argv, nil, appDelegateClassName); +} diff --git a/ios/VisionTestApp/clean.sh b/ios/VisionTestApp/clean.sh new file mode 100755 index 00000000000..20bedc784d9 --- /dev/null +++ b/ios/VisionTestApp/clean.sh @@ -0,0 +1,8 @@ +#!/bin/bash +set -ex -o pipefail + +TEST_APP_PATH=$(dirname $(realpath $0)) +cd ${TEST_APP_PATH} + +rm -rf ./install +rm ./VisionTestApp/*.pt diff --git a/ios/VisionTestApp/make_assets.py b/ios/VisionTestApp/make_assets.py new file mode 100644 index 00000000000..f14223e6a42 --- /dev/null +++ b/ios/VisionTestApp/make_assets.py @@ -0,0 +1,21 @@ +import torch +from torch.utils.mobile_optimizer import optimize_for_mobile +from torchvision.models.detection import ( + fasterrcnn_mobilenet_v3_large_320_fpn, + FasterRCNN_MobileNet_V3_Large_320_FPN_Weights, +) + +print(torch.__version__) + +model = fasterrcnn_mobilenet_v3_large_320_fpn( + weights=FasterRCNN_MobileNet_V3_Large_320_FPN_Weights.DEFAULT, + box_score_thresh=0.7, + rpn_post_nms_top_n_test=100, + rpn_score_thresh=0.4, + rpn_pre_nms_top_n_test=150, +) + +model.eval() +script_model = torch.jit.script(model) +opt_script_model = optimize_for_mobile(script_model) +opt_script_model.save("VisionTestApp/frcnn_mnetv3.pt") diff --git a/ios/VisionTestApp/setup.sh b/ios/VisionTestApp/setup.sh new file mode 100755 index 00000000000..3b3520d7052 --- /dev/null +++ b/ios/VisionTestApp/setup.sh @@ -0,0 +1,33 @@ +#!/bin/bash +set -ex -o pipefail + +echo "" +echo "DIR: $(pwd)" + +TEST_APP_PATH=$(dirname $(realpath $0)) +cd ${TEST_APP_PATH} + +PYTORCH_IOS_NIGHTLY_NAME=libtorch_ios_nightly_build.zip +VISION_IOS_NIGHTLY_NAME=libtorchvision_ops_ios_nightly_build.zip + +echo "Downloading torch libs and vision libs..." +wget https://ossci-ios-build.s3.amazonaws.com/${PYTORCH_IOS_NIGHTLY_NAME} +wget https://ossci-ios-build.s3.amazonaws.com/${VISION_IOS_NIGHTLY_NAME} + +mkdir -p ./library/torch +mkdir -p ./library/vision + +echo "Unziping torch libs and vision libs..." +unzip -d ./library/torch ./${PYTORCH_IOS_NIGHTLY_NAME} +unzip -d ./library/vision ./${VISION_IOS_NIGHTLY_NAME} + +cp ./library/vision/install/lib/*.a ./library/torch/install/lib +cp -r ./library/torch/install . + +rm -rf ./library +rm -rf ./*.zip + +echo "Generating the vision model..." +python ./make_assets.py + +echo "Finished project setups." diff --git a/ios/build_ios.sh b/ios/build_ios.sh new file mode 100755 index 00000000000..81ac2f2a218 --- /dev/null +++ b/ios/build_ios.sh @@ -0,0 +1,30 @@ +#!/bin/bash + +set -ex -o pipefail +echo "" +echo "DIR: $(pwd)" +VISION_IOS_ROOT=$(dirname $(realpath $0)) + +if ! [ -n "${LIBTORCH_HEADER_ROOT:-}" ]; then + echo "Missing parameter: LIBTORCH_HEADER_ROOT" + exit 1 +fi + +if [ -n "${IOS_ARCH:-}" ]; then + if [ "${IOS_ARCH:-}" == "arm64" ]; then + IOS_PLATFORM="OS" + elif [ "${IOS_ARCH:-}" == "x86_64" ]; then + IOS_PLATFORM="SIMULATOR" + fi +fi + +mkdir -p ${VISION_IOS_ROOT}/lib +mkdir -p ${VISION_IOS_ROOT}/build +cd ${VISION_IOS_ROOT}/build +cmake -DLIBTORCH_HEADER_ROOT=${LIBTORCH_HEADER_ROOT} \ + -DCMAKE_TOOLCHAIN_FILE=${VISION_IOS_ROOT}/../cmake/iOS.cmake \ + -DIOS_ARCH=${IOS_ARCH} \ + -DIOS_PLATFORM=${IOS_PLATFORM} \ + .. +make +rm -rf ${VISION_IOS_ROOT}/build diff --git a/maintainer_guide.md b/maintainer_guide.md new file mode 100644 index 00000000000..3d66a701be1 --- /dev/null +++ b/maintainer_guide.md @@ -0,0 +1,76 @@ +# Torchvision maintainers guide + +This document aims at documenting user-facing policies / principles used when +developing and maintaining torchvision. Other maintainer info (e.g. release +process) can be found in the meta-internal wiki. + +### What is public and what is private? + +For the Python API, torchvision largely follows the [PyTorch +policy](https://github.com/pytorch/pytorch/wiki/Public-API-definition-and-documentation) +which is consistent with other major packages +([numpy](https://numpy.org/neps/nep-0023-backwards-compatibility.html), +[scikit-learn](https://scikit-learn.org/dev/glossary.html#term-API) etc.). +We recognize that his policy is somewhat imperfect for some edge cases, and that +it's difficult to come up with an accurate technical definition. In broad terms, +which are usually well understood by users, the policy is that: + +- modules that can be accessed without leading underscore are public +- objects in a public file that don't have a leading underscore are public +- class attributes are public iff they have no leading underscore +- the rest of the modules / objects / class attributes are considered private + +The public API has backward-compatible (BC) guarantees defined in our +deprecation policy (see below). The private API has not BC guarantees. + +For C++, code is private. For Meta employees: if a C++ change breaks fbcode, fix +fbcode or revert the change. We should be careful about models running in +production and relying on torchvision ops. + +The `test` folder is not importable and is **private.** Even meta-internal +projects should *not* rely on it (it has happened in the past and is now +programmatically impossible). + +The training references do not have BC guarantees. Breaking changes are +possible, but we should make sure that the tutorials are still running properly, +and that their intended narrative is preserved (by e.g. checking outputs, +etc.). + +The rest of the folders (build, android, ios, etc.) are private and have no BC +guarantees. + +### Deprecation policy. + +Because they're disruptive, **deprecations should only be used sparingly**. + +We largely follow the [PyTorch +policy](https://github.com/pytorch/pytorch/wiki/PyTorch's-Python-Frontend-Backward-and-Forward-Compatibility-Policy): +breaking changes require a deprecation period of at least 2 versions. + +Deprecations should clearly indicate their deadline in the docs and warning +messages. Avoid not committing to a deadline, or keeping deprecated APIs for too +long: it gives no incentive for users to update their code, sends conflicting +messages ("why was this API removed while this other one is still around?"), and +accumulates debt in the project. + +### Should this attribute be public? Should this function be private? + +When designing an API it’s not always obvious what should be exposed as public, +and what should be kept as a private implementation detail. The following +guidelines can be useful: + +* Functional consistency throughout the library is a top priority, for users and + developers’ sake. In doubt and unless it’s clearly wrong, expose what other + similar classes expose. +* Think really hard about the users and their use-cases, and try to expose what + they would need to address those use-cases. Aggressively keep everything else + private. Remember that the “private -> public” direction is way smoother than + the “public -> private” one: in doubt, keep it private. +* When thinking about use-cases, the general API motto applies: make what’s + simple and common easy, and make what’s complex possible (80% / 20% rule). + There might be a ~1% left that’s not addressed: that’s OK. Also, **make what’s + wrong very hard**, if not impossible. + +As a good practice, always create new files and even classes with a leading +underscore in their name. This way, everything is private by default and the +only public surface is explicitly present in an `__init__.py` file. diff --git a/mypy.ini b/mypy.ini new file mode 100644 index 00000000000..d8ab11d0d21 --- /dev/null +++ b/mypy.ini @@ -0,0 +1,150 @@ +[mypy] + +files = torchvision +show_error_codes = True +pretty = True +allow_redefinition = True +no_implicit_optional = True +warn_redundant_casts = True + +[mypy-torchvision.prototype.datapoints.*] + +; untyped definitions and calls +disallow_untyped_defs = True + +; None and Optional handling +no_implicit_optional = True + +; warnings +warn_unused_ignores = True + +; miscellaneous strictness flags +allow_redefinition = True + +[mypy-torchvision.prototype.transforms.*] + +ignore_errors = True + +[mypy-torchvision.prototype.datasets.*] + +ignore_errors = True + +[mypy-torchvision.io.image.*] + +ignore_errors = True + +[mypy-torchvision.io.video.*] + +ignore_errors = True + +[mypy-torchvision.io.video_reader] + +ignore_errors = True + +[mypy-torchvision.models.densenet.*] + +ignore_errors=True + +[mypy-torchvision.models.maxvit.*] + +ignore_errors=True + +[mypy-torchvision.models.detection.anchor_utils] + +ignore_errors = True + +[mypy-torchvision.models.detection.transform] + +ignore_errors = True + +[mypy-torchvision.models.detection.roi_heads] + +ignore_errors = True + +[mypy-torchvision.models.detection.faster_rcnn] + +ignore_errors = True + +[mypy-torchvision.models.detection.mask_rcnn] + +ignore_errors = True + +[mypy-torchvision.models.detection.keypoint_rcnn] + +ignore_errors = True + +[mypy-torchvision.models.detection.retinanet] + +ignore_errors = True + +[mypy-torchvision.models.detection.ssd] + +ignore_errors = True + +[mypy-torchvision.models.detection.ssdlite] + +ignore_errors = True + +[mypy-torchvision.models.detection.fcos] + +ignore_errors = True + +[mypy-torchvision.ops.*] + +ignore_errors = True + +[mypy-torchvision.transforms._functional_pil] + +ignore_errors = True + +[mypy-torchvision.transforms.functional.*] + +ignore_errors = True + +[mypy-torchvision.transforms.transforms.*] + +ignore_errors = True + +[mypy-PIL.*] + +ignore_missing_imports = True + +[mypy-numpy.*] + +ignore_missing_imports = True + +[mypy-scipy.*] + +ignore_missing_imports = True + +[mypy-pycocotools.*] + +ignore_missing_imports = True + +[mypy-lmdb.*] + +ignore_missing_imports = True + +[mypy-accimage.*] + +ignore_missing_imports = True + +[mypy-av.*] + +ignore_missing_imports = True + +[mypy-defusedxml.*] + +ignore_missing_imports = True + +[mypy-torchdata.*] + +ignore_missing_imports = True + +[mypy-h5py.*] + +ignore_missing_imports = True + +[mypy-gdown.*] + +ignore_missing_imports = True diff --git a/packaging/README.md b/packaging/README.md deleted file mode 100644 index 7d3c5f7831b..00000000000 --- a/packaging/README.md +++ /dev/null @@ -1,90 +0,0 @@ -# Building torchvision packages for release - -## Anaconda packages - -### Linux - -```bash -nvidia-docker run -it --ipc=host --rm -v $(pwd):/remote soumith/conda-cuda bash -pushd remote/conda - -./build_vision.sh 9.0 -./build_vision.sh 10.0 -./build_vision.sh cpu - -# copy packages over to /remote -# exit docker -# anaconda upload -u pytorch torchvision*.bz2 -``` - -### OSX - -```bash -# create a fresh anaconda environment / install and activate it -conda install -y conda-build anaconda-client -./build_vision.sh cpu - -# copy packages over to /remote -# exit docker -# anaconda upload -u pytorch torchvision*.bz2 -``` - -### Windows - -```bash -# Open `Git Bash` and change dir to `conda` -./build_vision.sh 9.0 -./build_vision.sh 10.0 -./build_vision.sh cpu - -# copy packages to a output directory -# anaconda upload -u pytorch torchvision*.bz2 -``` - -## Wheels - -### Linux - -pushd wheel - -```bash -nvidia-docker run -it --ipc=host --rm -v $(pwd):/remote soumith/manylinux-cuda90:latest bash -cd remote -./linux_manywheel.sh cu90 - -rm -rf /usr/local/cuda* -./linux_manywheel.sh cpu -``` - -```bash -nvidia-docker run -it --ipc=host --rm -v $(pwd):/remote soumith/manylinux-cuda100:latest bash -cd remote -./linux_manywheel.sh cu100 -``` - -wheels are in the folders `cpu`, `cu90`, `cu100`. - -You can upload the `cu90` wheels to twine with `twine upload *.whl`. -Which wheels we upload depends on which wheels PyTorch uploads as default, and right now, it's `cu90`. - -### OSX - -```bash -pushd wheel -./osx_wheel.sh -``` - -### Windows - -```cmd -set PYTORCH_REPO=pytorch - -pushd windows -call build_vision.bat 90 0.3.0 1 -call build_vision.bat 100 0.3.0 1 -call build_vision.bat cpu 0.3.0 1 -``` - -wheels are in the current folder. - -You can upload them to twine with `twine upload *.whl` diff --git a/packaging/build_conda.sh b/packaging/build_conda.sh deleted file mode 100755 index aaddf0710c8..00000000000 --- a/packaging/build_conda.sh +++ /dev/null @@ -1,13 +0,0 @@ -#!/bin/bash -set -ex - -script_dir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" -. "$script_dir/pkg_helpers.bash" - -export BUILD_TYPE=conda -setup_env 0.5.0 -export SOURCE_ROOT_DIR="$PWD" -setup_conda_pytorch_constraint -setup_conda_cudatoolkit_constraint -setup_visual_studio_constraint -conda build $CONDA_CHANNEL_FLAGS -c defaults -c conda-forge --no-anaconda-upload --python "$PYTHON_VERSION" packaging/torchvision diff --git a/packaging/build_wheel.sh b/packaging/build_wheel.sh deleted file mode 100755 index 7d37239563d..00000000000 --- a/packaging/build_wheel.sh +++ /dev/null @@ -1,15 +0,0 @@ -#!/bin/bash -set -ex - -script_dir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" -. "$script_dir/pkg_helpers.bash" - -export BUILD_TYPE=wheel -setup_env 0.5.0 -setup_wheel_python -pip_install numpy pyyaml future ninja -# TODO remove after https://github.com/pytorch/pytorch/pull/27282 gets merged -pip_install six -setup_pip_pytorch_version -python setup.py clean -IS_WHEEL=1 python setup.py bdist_wheel diff --git a/packaging/conda/build_vision.sh b/packaging/conda/build_vision.sh deleted file mode 100755 index 000f314670b..00000000000 --- a/packaging/conda/build_vision.sh +++ /dev/null @@ -1,217 +0,0 @@ -#!/usr/bin/env bash -if [[ -x "/remote/anaconda_token" ]]; then - . /remote/anaconda_token || true -fi - -set -ex - -# Function to retry functions that sometimes timeout or have flaky failures -retry () { - $* || (sleep 1 && $*) || (sleep 2 && $*) || (sleep 4 && $*) || (sleep 8 && $*) -} - -# Parse arguments and determmine version -########################################################### -if [[ -n "$DESIRED_CUDA" && -n "$TORCHVISION_BUILD_VERSION" && -n "$TORCHVISION_BUILD_NUMBER" ]]; then - desired_cuda="$DESIRED_CUDA" - build_version="$PYTORCH_BUILD_VERSION" - build_number="$PYTORCH_BUILD_NUMBER" -else - if [ "$#" -ne 3 ]; then - echo "Illegal number of parameters. Pass cuda version, pytorch version, build number" - echo "CUDA version should be Mm with no dot, e.g. '80'" - echo "DESIRED_PYTHON should be M.m, e.g. '2.7'" - exit 1 - fi - - desired_cuda="$1" - build_version="$2" - build_number="$3" -fi -if [[ "$desired_cuda" != cpu ]]; then - desired_cuda="$(echo $desired_cuda | tr -d cuda. )" -fi -echo "Building cuda version $desired_cuda and torchvision version: $build_version build_number: $build_number" - -if [[ "$desired_cuda" == 'cpu' ]]; then - cpu_only=1 - cuver="cpu" -else - # Switch desired_cuda to be M.m to be consistent with other scripts in - # pytorch/builder - export FORCE_CUDA=1 - cuda_nodot="$desired_cuda" - - if [[ ${#cuda_nodot} -eq 2 ]]; then - desired_cuda="${desired_cuda:0:1}.${desired_cuda:1:1}" - elif [[ ${#cuda_nodot} -eq 3 ]]; then - desired_cuda="${desired_cuda:0:2}.${desired_cuda:2:1}" - else - echo "unknown cuda version $cuda_nodot" - exit 1 - fi - - cuver="cu$cuda_nodot" -fi - -export TORCHVISION_BUILD_VERSION=$build_version -export TORCHVISION_BUILD_NUMBER=$build_number - -if [[ -z "$DESIRED_PYTHON" ]]; then - DESIRED_PYTHON=('3.5' '3.6' '3.7') -fi - -SOURCE_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null && pwd )" - -if [[ -z "$WIN_PACKAGE_WORK_DIR" ]]; then - WIN_PACKAGE_WORK_DIR="$(echo $(pwd -W) | tr '/' '\\')\\tmp_conda_$(date +%H%M%S)" -fi - -mkdir -p "$WIN_PACKAGE_WORK_DIR" || true -vision_rootdir="$(realpath ${WIN_PACKAGE_WORK_DIR})/torchvision-src" -git config --system core.longpaths true - -if [[ ! -d "$vision_rootdir" ]]; then - rm -rf "$vision_rootdir" - git clone "https://github.com/pytorch/vision" "$vision_rootdir" - pushd "$vision_rootdir" - git checkout $PYTORCH_BRANCH - popd -fi - -cd "$SOURCE_DIR" - -export tmp_conda="${WIN_PACKAGE_WORK_DIR}\\conda" -export miniconda_exe="${WIN_PACKAGE_WORK_DIR}\\miniconda.exe" -rm -rf "$tmp_conda" -rm -f "$miniconda_exe" -curl -sSk https://repo.continuum.io/miniconda/Miniconda3-latest-Windows-x86_64.exe -o "$miniconda_exe" -"$SOURCE_DIR/install_conda.bat" && rm "$miniconda_exe" -pushd $tmp_conda -export PATH="$(pwd):$(pwd)/Library/usr/bin:$(pwd)/Library/bin:$(pwd)/Scripts:$(pwd)/bin:$PATH" -popd -retry conda install -yq conda-build - -ANACONDA_USER=pytorch-nightly -conda config --set anaconda_upload no - - -export TORCHVISION_PACKAGE_SUFFIX="" -if [[ "$desired_cuda" == 'cpu' ]]; then - export CONDA_CUDATOOLKIT_CONSTRAINT="" - export CONDA_CPUONLY_FEATURE="- cpuonly # [not osx]" - export CUDA_VERSION="None" -else - export CONDA_CPUONLY_FEATURE="" - . ./switch_cuda_version.sh $desired_cuda - if [[ "$desired_cuda" == "10.1" ]]; then - export CONDA_CUDATOOLKIT_CONSTRAINT="- cudatoolkit >=10.1,<10.2 # [not osx]" - elif [[ "$desired_cuda" == "10.0" ]]; then - export CONDA_CUDATOOLKIT_CONSTRAINT="- cudatoolkit >=10.0,<10.1 # [not osx]" - elif [[ "$desired_cuda" == "9.2" ]]; then - export CONDA_CUDATOOLKIT_CONSTRAINT="- cudatoolkit >=9.2,<9.3 # [not osx]" - elif [[ "$desired_cuda" == "9.0" ]]; then - export CONDA_CUDATOOLKIT_CONSTRAINT="- cudatoolkit >=9.0,<9.1 # [not osx]" - elif [[ "$desired_cuda" == "8.0" ]]; then - export CONDA_CUDATOOLKIT_CONSTRAINT="- cudatoolkit >=8.0,<8.1 # [not osx]" - else - echo "unhandled desired_cuda: $desired_cuda" - exit 1 - fi -fi - -if [[ -z "$PYTORCH_VERSION" ]]; then - export CONDA_CHANNEL_FLAGS="-c pytorch-nightly" - export PYTORCH_VERSION="$(conda search --json 'pytorch[channel=pytorch-nightly]' | \ - python -c "import os, sys, json, re; cuver = '$cuver'; \ - cuver = cuver.replace('cu', 'cuda') if cuver != 'cpu' else cuver; \ - print(re.sub(r'\\+.*$', '', \ - [x['version'] for x in json.load(sys.stdin)['pytorch'] \ - if (x['platform'] == 'darwin' or cuver in x['fn']) \ - and 'py' + os.environ['DESIRED_PYTHON'] in x['fn']][-1]))")" - if [[ -z "$PYTORCH_VERSION" ]]; then - echo "PyTorch version auto detection failed" - echo "No package found for desired_cuda=$desired_cuda and DESIRED_PYTHON=$DESIRED_PYTHON" - exit 1 - fi -else - export CONDA_CHANNEL_FLAGS="-c pytorch -c pytorch-nightly" -fi -if [[ "$desired_cuda" == 'cpu' ]]; then - export CONDA_PYTORCH_BUILD_CONSTRAINT="- pytorch==$PYTORCH_VERSION" - export CONDA_PYTORCH_CONSTRAINT="- pytorch==$PYTORCH_VERSION" -else - export CONDA_PYTORCH_BUILD_CONSTRAINT="- pytorch==${PYTORCH_VERSION}" - export CONDA_PYTORCH_CONSTRAINT="- pytorch==${PYTORCH_VERSION}" -fi - -# Loop through all Python versions to build a package for each -for py_ver in "${DESIRED_PYTHON[@]}"; do - build_string="py${py_ver}_${build_string_suffix}" - folder_tag="${build_string}_$(date +'%Y%m%d')" - - # Create the conda package into this temporary folder. This is so we can find - # the package afterwards, as there's no easy way to extract the final filename - # from conda-build - output_folder="out_$folder_tag" - rm -rf "$output_folder" - mkdir "$output_folder" - - export VSTOOLCHAIN_PACKAGE=vs2017 - - # We need to build the compiler activation scripts first on Windows - time VSDEVCMD_ARGS=${VSDEVCMD_ARGS[@]} \ - conda build -c "$ANACONDA_USER" \ - --no-anaconda-upload \ - --output-folder "$output_folder" \ - ../$VSTOOLCHAIN_PACKAGE - - cp ../$VSTOOLCHAIN_PACKAGE/conda_build_config.yaml ../torchvision/conda_build_config.yaml - - conda config --set anaconda_upload no - echo "Calling conda-build at $(date)" - if [[ "$desired_cuda" == "9.2" ]]; then - time CMAKE_ARGS=${CMAKE_ARGS[@]} \ - BUILD_VERSION="$TORCHVISION_BUILD_VERSION" \ - CU_VERSION="$cuver" \ - SOURCE_ROOT_DIR="$vision_rootdir" \ - conda build -c "$ANACONDA_USER" \ - -c defaults \ - -c conda-forge \ - -c "numba/label/dev" \ - --no-anaconda-upload \ - --python "$py_ver" \ - --output-folder "$output_folder" \ - --no-verify \ - --no-test \ - ../torchvision - else - time CMAKE_ARGS=${CMAKE_ARGS[@]} \ - BUILD_VERSION="$TORCHVISION_BUILD_VERSION" \ - CU_VERSION="$cuver" \ - SOURCE_ROOT_DIR="$vision_rootdir" \ - conda build -c "$ANACONDA_USER" \ - -c defaults \ - -c conda-forge \ - --no-anaconda-upload \ - --python "$py_ver" \ - --output-folder "$output_folder" \ - --no-verify \ - --no-test \ - ../torchvision - fi - echo "Finished conda-build at $(date)" - - # Extract the package for testing - ls -lah "$output_folder" - built_package="$(find $output_folder/ -name '*torchvision*.tar.bz2')" - - # Copy the built package to the host machine for persistence before testing - if [[ -n "$PYTORCH_FINAL_PACKAGE_DIR" ]]; then - mkdir -p "$PYTORCH_FINAL_PACKAGE_DIR" || true - cp "$built_package" "$PYTORCH_FINAL_PACKAGE_DIR/" - fi -done - - -set +e diff --git a/packaging/conda/install_conda.bat b/packaging/conda/install_conda.bat deleted file mode 100644 index 6052ad08b10..00000000000 --- a/packaging/conda/install_conda.bat +++ /dev/null @@ -1 +0,0 @@ -start /wait "" "%miniconda_exe%" /S /InstallationType=JustMe /RegisterPython=0 /AddToPath=0 /D=%tmp_conda% diff --git a/packaging/conda/switch_cuda_version.sh b/packaging/conda/switch_cuda_version.sh deleted file mode 100755 index 342def93899..00000000000 --- a/packaging/conda/switch_cuda_version.sh +++ /dev/null @@ -1,28 +0,0 @@ -if [[ "$OSTYPE" == "msys" ]]; then - CUDA_DIR="/c/Program Files/NVIDIA GPU Computing Toolkit/CUDA/v$1" -else - CUDA_DIR="/usr/local/cuda-$1" -fi - -if ! ls "$CUDA_DIR" -then - echo "folder $CUDA_DIR not found to switch" -fi - -echo "Switching symlink to $CUDA_DIR" -mkdir -p /usr/local -rm -fr /usr/local/cuda -ln -s "$CUDA_DIR" /usr/local/cuda - -if [[ "$OSTYPE" == "msys" ]]; then - export CUDA_VERSION=`ls /usr/local/cuda/bin/cudart64*.dll | head -1 | tr '._' ' ' | cut -d ' ' -f2` - export CUDNN_VERSION=`ls /usr/local/cuda/bin/cudnn64*.dll | head -1 | tr '._' ' ' | cut -d ' ' -f2` -else - export CUDA_VERSION=$(ls /usr/local/cuda/lib64/libcudart.so.*|sort|tac | head -1 | rev | cut -d"." -f -3 | rev) - export CUDNN_VERSION=$(ls /usr/local/cuda/lib64/libcudnn.so.*|sort|tac | head -1 | rev | cut -d"." -f -3 | rev) -fi - -ls -alh /usr/local/cuda - -echo "CUDA_VERSION=$CUDA_VERSION" -echo "CUDNN_VERSION=$CUDNN_VERSION" diff --git a/packaging/cut_release.sh b/packaging/cut_release.sh new file mode 100755 index 00000000000..91e0e5ff15d --- /dev/null +++ b/packaging/cut_release.sh @@ -0,0 +1,36 @@ +#!/usr/bin/env bash +# +# Usage (run from root of project): +# TEST_INFRA_BRANCH=release/2.1 RELEASE_BRANCH=release/2.1 RELEASE_VERSION=2.1.0 packaging/cut_release.sh +# +# TEST_INFRA_BRANCH: The release branch of test-infra that houses all reusable +# workflows +# +# RELEASE_BRANCH: The name of the release branch for this repo +# +# RELEASE_VERSION: Version of this current release + +set -eou pipefail + +# Create and Check out to Release Branch +git checkout -b "${RELEASE_BRANCH}" + +# Change all GitHub Actions to reference the test-infra release branch +# as opposed to main. +for i in .github/workflows/*.yml; do + if [[ "$OSTYPE" == "darwin"* ]]; then + sed -i '' -e s#@main#@"${TEST_INFRA_BRANCH}"# $i; + sed -i '' -e s#test-infra-ref:[[:space:]]main#"test-infra-ref: ${TEST_INFRA_BRANCH}"# $i; + else + sed -i -e s#@main#@"${TEST_INFRA_BRANCH}"# $i; + sed -i -e s#test-infra-ref:[[:space:]]main#"test-infra-ref: ${TEST_INFRA_BRANCH}"# $i; + fi +done + +# Update the Release Version in version.txt +echo "${RELEASE_VERSION}" >version.txt + +# Optional +# git add ./github/workflows/*.yml version.txt +# git commit -m "[RELEASE-ONLY CHANGES] Branch Cut for Release {RELEASE_VERSION}" +# git push origin "${RELEASE_BRANCH}" diff --git a/packaging/pkg_helpers.bash b/packaging/pkg_helpers.bash deleted file mode 100644 index 5d7109efe93..00000000000 --- a/packaging/pkg_helpers.bash +++ /dev/null @@ -1,261 +0,0 @@ -# A set of useful bash functions for common functionality we need to do in -# many build scripts - - -# Setup CUDA environment variables, based on CU_VERSION -# -# Inputs: -# CU_VERSION (cpu, cu92, cu100) -# NO_CUDA_PACKAGE (bool) -# BUILD_TYPE (conda, wheel) -# -# Outputs: -# VERSION_SUFFIX (e.g., "") -# PYTORCH_VERSION_SUFFIX (e.g., +cpu) -# WHEEL_DIR (e.g., cu100/) -# CUDA_HOME (e.g., /usr/local/cuda-9.2, respected by torch.utils.cpp_extension) -# FORCE_CUDA (respected by torchvision setup.py) -# NVCC_FLAGS (respected by torchvision setup.py) -# -# Precondition: CUDA versions are installed in their conventional locations in -# /usr/local/cuda-* -# -# NOTE: Why VERSION_SUFFIX versus PYTORCH_VERSION_SUFFIX? If you're building -# a package with CUDA on a platform we support CUDA on, VERSION_SUFFIX == -# PYTORCH_VERSION_SUFFIX and everyone is happy. However, if you are building a -# package with only CPU bits (e.g., torchaudio), then VERSION_SUFFIX is always -# empty, but PYTORCH_VERSION_SUFFIX is +cpu (because that's how you get a CPU -# version of a Python package. But that doesn't apply if you're on OS X, -# since the default CU_VERSION on OS X is cpu. -setup_cuda() { - - # First, compute version suffixes. By default, assume no version suffixes - export VERSION_SUFFIX="" - export PYTORCH_VERSION_SUFFIX="" - export WHEEL_DIR="" - # Wheel builds need suffixes (but not if they're on OS X, which never has suffix) - if [[ "$BUILD_TYPE" == "wheel" ]] && [[ "$(uname)" != Darwin ]]; then - # The default CUDA has no suffix - if [[ "$CU_VERSION" != "cu101" ]]; then - export PYTORCH_VERSION_SUFFIX="+$CU_VERSION" - fi - # Match the suffix scheme of pytorch, unless this package does not have - # CUDA builds (in which case, use default) - if [[ -z "$NO_CUDA_PACKAGE" ]]; then - export VERSION_SUFFIX="$PYTORCH_VERSION_SUFFIX" - export WHEEL_DIR="$CU_VERSION/" - fi - fi - - # Now work out the CUDA settings - case "$CU_VERSION" in - cu101) - if [[ "$OSTYPE" == "msys" ]]; then - export CUDA_HOME="C:\\Program Files\\NVIDIA GPU Computing Toolkit\\CUDA\\v10.1" - else - export CUDA_HOME=/usr/local/cuda-10.1/ - fi - export FORCE_CUDA=1 - # Hard-coding gencode flags is temporary situation until - # https://github.com/pytorch/pytorch/pull/23408 lands - export NVCC_FLAGS="-gencode=arch=compute_35,code=sm_35 -gencode=arch=compute_50,code=sm_50 -gencode=arch=compute_60,code=sm_60 -gencode=arch=compute_70,code=sm_70 -gencode=arch=compute_75,code=sm_75 -gencode=arch=compute_50,code=compute_50" - ;; - cu100) - if [[ "$OSTYPE" == "msys" ]]; then - export CUDA_HOME="C:\\Program Files\\NVIDIA GPU Computing Toolkit\\CUDA\\v10.0" - else - export CUDA_HOME=/usr/local/cuda-10.0/ - fi - export FORCE_CUDA=1 - # Hard-coding gencode flags is temporary situation until - # https://github.com/pytorch/pytorch/pull/23408 lands - export NVCC_FLAGS="-gencode=arch=compute_35,code=sm_35 -gencode=arch=compute_50,code=sm_50 -gencode=arch=compute_60,code=sm_60 -gencode=arch=compute_70,code=sm_70 -gencode=arch=compute_75,code=sm_75 -gencode=arch=compute_50,code=compute_50" - ;; - cu92) - if [[ "$OSTYPE" == "msys" ]]; then - export CUDA_HOME="C:\\Program Files\\NVIDIA GPU Computing Toolkit\\CUDA\\v9.2" - else - export CUDA_HOME=/usr/local/cuda-9.2/ - fi - export FORCE_CUDA=1 - export NVCC_FLAGS="-gencode=arch=compute_35,code=sm_35 -gencode=arch=compute_50,code=sm_50 -gencode=arch=compute_60,code=sm_60 -gencode=arch=compute_70,code=sm_70 -gencode=arch=compute_50,code=compute_50" - ;; - cpu) - ;; - *) - echo "Unrecognized CU_VERSION=$CU_VERSION" - exit 1 - ;; - esac -} - -# Populate build version if necessary, and add version suffix -# -# Inputs: -# BUILD_VERSION (e.g., 0.2.0 or empty) -# VERSION_SUFFIX (e.g., +cpu) -# -# Outputs: -# BUILD_VERSION (e.g., 0.2.0.dev20190807+cpu) -# -# Fill BUILD_VERSION if it doesn't exist already with a nightly string -# Usage: setup_build_version 0.2.0 -setup_build_version() { - if [[ -z "$BUILD_VERSION" ]]; then - export BUILD_VERSION="$1.dev$(date "+%Y%m%d")$VERSION_SUFFIX" - else - export BUILD_VERSION="$BUILD_VERSION$VERSION_SUFFIX" - fi -} - -# Set some useful variables for OS X, if applicable -setup_macos() { - if [[ "$(uname)" == Darwin ]]; then - export MACOSX_DEPLOYMENT_TARGET=10.9 CC=clang CXX=clang++ - fi -} - -# Top-level entry point for things every package will need to do -# -# Usage: setup_env 0.2.0 -setup_env() { - setup_cuda - setup_build_version "$1" - setup_macos -} - -# Function to retry functions that sometimes timeout or have flaky failures -retry () { - $* || (sleep 1 && $*) || (sleep 2 && $*) || (sleep 4 && $*) || (sleep 8 && $*) -} - -# Inputs: -# PYTHON_VERSION (2.7, 3.5, 3.6, 3.7) -# UNICODE_ABI (bool) -# -# Outputs: -# PATH modified to put correct Python version in PATH -# -# Precondition: If Linux, you are in a soumith/manylinux-cuda* Docker image -setup_wheel_python() { - if [[ "$(uname)" == Darwin ]]; then - eval "$(conda shell.bash hook)" - conda env remove -n "env$PYTHON_VERSION" || true - conda create -yn "env$PYTHON_VERSION" python="$PYTHON_VERSION" - conda activate "env$PYTHON_VERSION" - else - case "$PYTHON_VERSION" in - 2.7) - if [[ -n "$UNICODE_ABI" ]]; then - python_abi=cp27-cp27mu - else - python_abi=cp27-cp27m - fi - ;; - 3.5) python_abi=cp35-cp35m ;; - 3.6) python_abi=cp36-cp36m ;; - 3.7) python_abi=cp37-cp37m ;; - *) - echo "Unrecognized PYTHON_VERSION=$PYTHON_VERSION" - exit 1 - ;; - esac - export PATH="/opt/python/$python_abi/bin:$PATH" - fi -} - -# Install with pip a bit more robustly than the default -pip_install() { - retry pip install --progress-bar off "$@" -} - -# Install torch with pip, respecting PYTORCH_VERSION, and record the installed -# version into PYTORCH_VERSION, if applicable -setup_pip_pytorch_version() { - if [[ -z "$PYTORCH_VERSION" ]]; then - # Install latest prerelease version of torch, per our nightlies, consistent - # with the requested cuda version - pip_install --pre torch -f "https://download.pytorch.org/whl/nightly/${WHEEL_DIR}torch_nightly.html" - if [[ "$CUDA_VERSION" == "cpu" ]]; then - # CUDA and CPU are ABI compatible on the CPU-only parts, so strip - # in this case - export PYTORCH_VERSION="$(pip show torch | grep ^Version: | sed 's/Version: *//' | sed 's/+.\+//')" - else - export PYTORCH_VERSION="$(pip show torch | grep ^Version: | sed 's/Version: *//')" - fi - else - pip_install "torch==$PYTORCH_VERSION$CUDA_SUFFIX" \ - -f https://download.pytorch.org/whl/torch_stable.html \ - -f https://download.pytorch.org/whl/nightly/torch_nightly.html - fi -} - -# Fill PYTORCH_VERSION with the latest conda nightly version, and -# CONDA_CHANNEL_FLAGS with appropriate flags to retrieve these versions -# -# You MUST have populated CUDA_SUFFIX before hand. -setup_conda_pytorch_constraint() { - if [[ -z "$PYTORCH_VERSION" ]]; then - export CONDA_CHANNEL_FLAGS="-c pytorch-nightly" - export PYTORCH_VERSION="$(conda search --json 'pytorch[channel=pytorch-nightly]' | \ - python -c "import os, sys, json, re; cuver = os.environ.get('CU_VERSION'); \ - cuver_1 = cuver.replace('cu', 'cuda') if cuver != 'cpu' else cuver; \ - cuver_2 = (cuver[:-1] + '.' + cuver[-1]).replace('cu', 'cuda') if cuver != 'cpu' else cuver; \ - print(re.sub(r'\\+.*$', '', \ - [x['version'] for x in json.load(sys.stdin)['pytorch'] \ - if (x['platform'] == 'darwin' or cuver_1 in x['fn'] or cuver_2 in x['fn']) \ - and 'py' + os.environ['PYTHON_VERSION'] in x['fn']][-1]))")" - if [[ -z "$PYTORCH_VERSION" ]]; then - echo "PyTorch version auto detection failed" - echo "No package found for CU_VERSION=$CU_VERSION and PYTHON_VERSION=$PYTHON_VERSION" - exit 1 - fi - else - export CONDA_CHANNEL_FLAGS="-c pytorch -c pytorch-nightly" - fi - if [[ "$CU_VERSION" == cpu ]]; then - export CONDA_PYTORCH_BUILD_CONSTRAINT="- pytorch==$PYTORCH_VERSION${PYTORCH_VERSION_SUFFIX}" - export CONDA_PYTORCH_CONSTRAINT="- pytorch==$PYTORCH_VERSION" - else - export CONDA_PYTORCH_BUILD_CONSTRAINT="- pytorch==${PYTORCH_VERSION}${PYTORCH_VERSION_SUFFIX}" - export CONDA_PYTORCH_CONSTRAINT="- pytorch==${PYTORCH_VERSION}${PYTORCH_VERSION_SUFFIX}" - fi -} - -# Translate CUDA_VERSION into CUDA_CUDATOOLKIT_CONSTRAINT -setup_conda_cudatoolkit_constraint() { - export CONDA_CPUONLY_FEATURE="" - if [[ "$(uname)" == Darwin ]]; then - export CONDA_CUDATOOLKIT_CONSTRAINT="" - else - case "$CU_VERSION" in - cu101) - export CONDA_CUDATOOLKIT_CONSTRAINT="- cudatoolkit >=10.1,<10.2 # [not osx]" - ;; - cu100) - export CONDA_CUDATOOLKIT_CONSTRAINT="- cudatoolkit >=10.0,<10.1 # [not osx]" - ;; - cu92) - export CONDA_CUDATOOLKIT_CONSTRAINT="- cudatoolkit >=9.2,<9.3 # [not osx]" - ;; - cpu) - export CONDA_CUDATOOLKIT_CONSTRAINT="" - export CONDA_CPUONLY_FEATURE="- cpuonly" - ;; - *) - echo "Unrecognized CU_VERSION=$CU_VERSION" - exit 1 - ;; - esac - fi -} - -# Build the proper compiler package before building the final package -setup_visual_studio_constraint() { - if [[ "$OSTYPE" == "msys" ]]; then - export VSTOOLCHAIN_PACKAGE=vs2019 - export VSDEVCMD_ARGS='' - conda build $CONDA_CHANNEL_FLAGS --no-anaconda-upload packaging/$VSTOOLCHAIN_PACKAGE - cp packaging/$VSTOOLCHAIN_PACKAGE/conda_build_config.yaml packaging/torchvision/conda_build_config.yaml - fi -} diff --git a/packaging/post_build_script.sh b/packaging/post_build_script.sh new file mode 100644 index 00000000000..253980b98c3 --- /dev/null +++ b/packaging/post_build_script.sh @@ -0,0 +1,4 @@ +#!/bin/bash +LD_LIBRARY_PATH="/usr/local/lib:$CUDA_HOME/lib64:$LD_LIBRARY_PATH" python packaging/wheel/relocate.py + +pip install torchvision-extra-decoders diff --git a/packaging/pre_build_script.sh b/packaging/pre_build_script.sh new file mode 100644 index 00000000000..6bc3cdc703f --- /dev/null +++ b/packaging/pre_build_script.sh @@ -0,0 +1,42 @@ +#!/bin/bash + +if [[ "$(uname)" == Darwin ]]; then + # Uninstall Conflicting jpeg brew formulae + jpeg_packages=$(brew list | grep jpeg) + echo "Existing Jpeg-related Brew libraries" + echo $jpeg_packages + for pkg in $jpeg_packages; do + brew uninstall --ignore-dependencies --force $pkg || true + done + + conda install -yq wget +fi + +if [[ "$(uname)" == Darwin || "$OSTYPE" == "msys" ]]; then + conda install libpng libwebp -yq + # Installing webp also installs a non-turbo jpeg, so we uninstall jpeg stuff + # before re-installing them + conda uninstall libjpeg-turbo libjpeg -y + conda install -yq ffmpeg=4.2 libjpeg-turbo -c pytorch + + # Copy binaries to be included in the wheel distribution + if [[ "$OSTYPE" == "msys" ]]; then + python_exec="$(which python)" + bin_path=$(dirname $python_exec) + cp "$bin_path/Library/bin/libjpeg.dll" torchvision + fi +else + + if [[ "$ARCH" == "aarch64" ]]; then + conda install libpng -yq + conda install -yq ffmpeg=4.2 libjpeg-turbo -c pytorch-nightly + fi + + conda install libwebp -yq + conda install libjpeg-turbo -c pytorch + yum install -y freetype gnutls + pip install auditwheel +fi + +pip install numpy pyyaml future ninja +pip install --upgrade setuptools==72.1.0 diff --git a/packaging/torchvision/bld.bat b/packaging/torchvision/bld.bat deleted file mode 100644 index 73f217c2cf1..00000000000 --- a/packaging/torchvision/bld.bat +++ /dev/null @@ -1,26 +0,0 @@ -@echo on - -set TORCHVISION_BUILD_VERSION=%PKG_VERSION% -set TORCHVISION_BUILD_NUMBER=%PKG_BUILDNUM% - -set build_with_cuda= - -if "%CUDA_VERSION%" == "None" goto cuda_flags_end -if "%CUDA_VERSION%" == "cpu" goto cuda_flags_end -if "%CUDA_VERSION%" == "" goto cuda_flags_end - -set build_with_cuda=1 -set desired_cuda=%CUDA_VERSION:~0,-1%.%CUDA_VERSION:~-1,1% - -set CUDA_PATH=C:\Program Files\NVIDIA GPU Computing Toolkit\CUDA\v%desired_cuda% -set CUDA_BIN_PATH=%CUDA_PATH%\bin -set NVCC_FLAGS=-D__CUDA_NO_HALF_OPERATORS__ --expt-relaxed-constexpr -if "%desired_cuda%" == "9.0" set NVCC_FLAGS=%NVCC_FLAGS% -gencode=arch=compute_35,code=sm_35 -gencode=arch=compute_50,code=sm_50 -gencode=arch=compute_60,code=sm_60 -gencode=arch=compute_70,code=sm_70 -gencode=arch=compute_50,code=compute_50 -if "%desired_cuda%" == "9.2" set NVCC_FLAGS=%NVCC_FLAGS% -gencode=arch=compute_35,code=sm_35 -gencode=arch=compute_50,code=sm_50 -gencode=arch=compute_60,code=sm_60 -gencode=arch=compute_61,code=sm_61 -gencode=arch=compute_70,code=sm_70 -gencode=arch=compute_50,code=compute_50 -if "%desired_cuda%" == "10.0" set NVCC_FLAGS=%NVCC_FLAGS% -gencode=arch=compute_35,code=sm_35 -gencode=arch=compute_50,code=sm_50 -gencode=arch=compute_60,code=sm_60 -gencode=arch=compute_70,code=sm_70 -gencode=arch=compute_75,code=sm_75 -gencode=arch=compute_50,code=compute_50 -if "%desired_cuda%" == "10.1" set NVCC_FLAGS=%NVCC_FLAGS% -gencode=arch=compute_35,code=sm_35 -gencode=arch=compute_50,code=sm_50 -gencode=arch=compute_60,code=sm_60 -gencode=arch=compute_70,code=sm_70 -gencode=arch=compute_75,code=sm_75 -gencode=arch=compute_50,code=compute_50 - -:cuda_flags_end - -python setup.py install --single-version-externally-managed --record=record.txt -if errorlevel 1 exit /b 1 diff --git a/packaging/torchvision/conda_build_config.yaml b/packaging/torchvision/conda_build_config.yaml index 5188bb0ebec..a7c25c6d534 100644 --- a/packaging/torchvision/conda_build_config.yaml +++ b/packaging/torchvision/conda_build_config.yaml @@ -1,3 +1,5 @@ +channel_sources: + - pytorch-nightly,pytorch,defaults blas_impl: - mkl # [x86_64] c_compiler: @@ -5,8 +7,7 @@ c_compiler: cxx_compiler: - vs2017 # [win] python: - - 3.5 - - 3.6 + - 3.8 # This differs from target_platform in that it determines what subdir the compiler # will target, not what subdir the compiler package will be itself. # For example, we need a win-64 vs2008_win-32 package, so that we compile win-32 diff --git a/packaging/torchvision/meta.yaml b/packaging/torchvision/meta.yaml index da075ff03cb..a847328a77e 100644 --- a/packaging/torchvision/meta.yaml +++ b/packaging/torchvision/meta.yaml @@ -1,3 +1,4 @@ +{% set build_variant = environ.get('CONDA_BUILD_VARIANT', 'cpu') %} package: name: torchvision version: "{{ environ.get('BUILD_VERSION') }}" @@ -8,31 +9,49 @@ source: requirements: build: - {{ compiler('c') }} # [win] + - libpng + - libjpeg-turbo + - libwebp + - ffmpeg >=4.2.2, <5.0.0 # [linux] host: - python - setuptools - {{ environ.get('CONDA_PYTORCH_BUILD_CONSTRAINT') }} - {{ environ.get('CONDA_CUDATOOLKIT_CONSTRAINT') }} - {{ environ.get('CONDA_CPUONLY_FEATURE') }} + - pytorch-mutex 1.0 {{ build_variant }} # [not osx ] + {{ environ.get('CONDA_PYTORCH_BUILD_CONSTRAINT', 'pytorch') }} + {{ environ.get('CONDA_CUDATOOLKIT_CONSTRAINT', '') }} run: - python - - pillow >=4.1.1 - - numpy >=1.11 - - six - {{ environ.get('CONDA_PYTORCH_CONSTRAINT') }} - {{ environ.get('CONDA_CUDATOOLKIT_CONSTRAINT') }} + - defaults::numpy >=1.11 # [py <= 310] + - numpy >=1.23.5 # [py >= 311] + - requests + - libpng + - ffmpeg >=4.2.2, <5.0.0 # [linux] + - libjpeg-turbo + - libwebp + - pillow >=5.3.0, !=8.3.* + - pytorch-mutex 1.0 {{ build_variant }} # [not osx ] + {{ environ.get('CONDA_PYTORCH_CONSTRAINT', 'pytorch') }} + {{ environ.get('CONDA_CUDATOOLKIT_CONSTRAINT', '') }} + + {% if build_variant == 'cpu' %} + run_constrained: + - cpuonly + {% elif not osx %} + run_constrained: + - cpuonly <0 + {% endif %} build: string: py{{py}}_{{ environ['CU_VERSION'] }} - script: python setup.py install --single-version-externally-managed --record=record.txt # [not win] + script: python setup.py install --single-version-externally-managed --record=record.txt script_env: - CUDA_HOME - FORCE_CUDA - - NVCC_FLAGS - features: - {{ environ.get('CONDA_CPUONLY_FEATURE') }} + - BUILD_VERSION + - TORCH_CUDA_ARCH_LIST + - MACOSX_DEPLOYMENT_TARGET test: imports: @@ -44,12 +63,8 @@ test: requires: - pytest - scipy - - mock - - av + - libjpeg-turbo - ca-certificates - - typing - commands: - pytest . about: diff --git a/packaging/vs2017/activate.bat b/packaging/vs2017/activate.bat deleted file mode 100644 index ccecfc25442..00000000000 --- a/packaging/vs2017/activate.bat +++ /dev/null @@ -1,44 +0,0 @@ -:: Set env vars that tell distutils to use the compiler that we put on path -SET DISTUTILS_USE_SDK=1 -SET MSSdk=1 - -SET "VS_VERSION=15.0" -SET "VS_MAJOR=15" -SET "VS_YEAR=2017" - -set "MSYS2_ARG_CONV_EXCL=/AI;/AL;/OUT;/out" -set "MSYS2_ENV_CONV_EXCL=CL" - -:: For Python 3.5+, ensure that we link with the dynamic runtime. See -:: http://stevedower.id.au/blog/building-for-python-3-5-part-two/ for more info -set "PY_VCRUNTIME_REDIST=%PREFIX%\\bin\\vcruntime140.dll" - -for /f "usebackq tokens=*" %%i in (`"%ProgramFiles(x86)%\Microsoft Visual Studio\Installer\vswhere.exe" -legacy -products * -version [15^,16^) -property installationPath`) do ( - if exist "%%i" if exist "%%i\VC\Auxiliary\Build\vcvarsall.bat" ( - set "VSINSTALLDIR=%%i\" - goto :vswhere - ) -) - -:vswhere - -:: Shorten PATH to avoid the `input line too long` error. -SET MyPath=%PATH% - -setlocal EnableDelayedExpansion - -SET TempPath="%MyPath:;=";"%" -SET var= -FOR %%a IN (%TempPath%) DO ( - IF EXIST %%~sa ( - SET "var=!var!;%%~sa" - ) -) - -set "TempPath=!var:~1!" -endlocal & set "PATH=%TempPath%" - -:: Shorten current directory too -FOR %%A IN (.) DO CD "%%~sA" - -:: other things added by install_activate.bat at package build time diff --git a/packaging/vs2017/conda_build_config.yaml b/packaging/vs2017/conda_build_config.yaml deleted file mode 100644 index 5188bb0ebec..00000000000 --- a/packaging/vs2017/conda_build_config.yaml +++ /dev/null @@ -1,24 +0,0 @@ -blas_impl: - - mkl # [x86_64] -c_compiler: - - vs2017 # [win] -cxx_compiler: - - vs2017 # [win] -python: - - 3.5 - - 3.6 -# This differs from target_platform in that it determines what subdir the compiler -# will target, not what subdir the compiler package will be itself. -# For example, we need a win-64 vs2008_win-32 package, so that we compile win-32 -# code on win-64 miniconda. -cross_compiler_target_platform: - - win-64 # [win] -target_platform: - - win-64 # [win] -vc: - - 14 -zip_keys: - - # [win] - - vc # [win] - - c_compiler # [win] - - cxx_compiler # [win] diff --git a/packaging/vs2017/install_activate.bat b/packaging/vs2017/install_activate.bat deleted file mode 100644 index de0e6ff3c52..00000000000 --- a/packaging/vs2017/install_activate.bat +++ /dev/null @@ -1,30 +0,0 @@ -set YEAR=2017 -set VER=15 - -mkdir "%PREFIX%\etc\conda\activate.d" -COPY "%RECIPE_DIR%\activate.bat" "%PREFIX%\etc\conda\activate.d\vs%YEAR%_compiler_vars.bat" - -IF "%cross_compiler_target_platform%" == "win-64" ( - set "target_platform=amd64" - echo SET "CMAKE_GENERATOR=Visual Studio %VER% %YEAR% Win64" >> "%PREFIX%\etc\conda\activate.d\vs%YEAR%_compiler_vars.bat" - echo pushd "%%VSINSTALLDIR%%" >> "%PREFIX%\etc\conda\activate.d\vs%YEAR%_compiler_vars.bat" - IF "%VSDEVCMD_ARGS%" == "" ( - echo CALL "VC\Auxiliary\Build\vcvarsall.bat" x64 >> "%PREFIX%\etc\conda\activate.d\vs%YEAR%_compiler_vars.bat" - echo popd >> "%PREFIX%\etc\conda\activate.d\vs%YEAR%_compiler_vars.bat" - echo pushd "%%VSINSTALLDIR%%" >> "%PREFIX%\etc\conda\activate.d\vs%YEAR%_compiler_vars.bat" - echo CALL "VC\Auxiliary\Build\vcvarsall.bat" x86_amd64 >> "%PREFIX%\etc\conda\activate.d\vs%YEAR%_compiler_vars.bat" - ) ELSE ( - echo CALL "VC\Auxiliary\Build\vcvarsall.bat" x64 %VSDEVCMD_ARGS% >> "%PREFIX%\etc\conda\activate.d\vs%YEAR%_compiler_vars.bat" - echo popd >> "%PREFIX%\etc\conda\activate.d\vs%YEAR%_compiler_vars.bat" - echo pushd "%%VSINSTALLDIR%%" >> "%PREFIX%\etc\conda\activate.d\vs%YEAR%_compiler_vars.bat" - echo CALL "VC\Auxiliary\Build\vcvarsall.bat" x86_amd64 %VSDEVCMD_ARGS% >> "%PREFIX%\etc\conda\activate.d\vs%YEAR%_compiler_vars.bat" - ) - echo popd >> "%PREFIX%\etc\conda\activate.d\vs%YEAR%_compiler_vars.bat" - ) else ( - set "target_platform=x86" - echo SET "CMAKE_GENERATOR=Visual Studio %VER% %YEAR%" >> "%PREFIX%\etc\conda\activate.d\vs%YEAR%_compiler_vars.bat" - echo pushd "%%VSINSTALLDIR%%" >> "%PREFIX%\etc\conda\activate.d\vs%YEAR%_compiler_vars.bat" - echo CALL "VC\Auxiliary\Build\vcvars32.bat" >> "%PREFIX%\etc\conda\activate.d\vs%YEAR%_compiler_vars.bat" - echo popd - ) - diff --git a/packaging/vs2017/install_runtime.bat b/packaging/vs2017/install_runtime.bat deleted file mode 100644 index 5163c16cf24..00000000000 --- a/packaging/vs2017/install_runtime.bat +++ /dev/null @@ -1,49 +0,0 @@ -set VC_PATH=x86 -if "%ARCH%"=="64" ( - set VC_PATH=x64 -) - -set MSC_VER=2017 - -rem :: This should always be present for VC installed with VS. Not sure about VC installed with Visual C++ Build Tools 2015 -rem FOR /F "usebackq tokens=3*" %%A IN (`REG QUERY "HKEY_LOCAL_MACHINE\Software\Microsoft\DevDiv\VC\Servicing\14.0\IDE.x64" /v UpdateVersion`) DO ( -rem set SP=%%A -rem ) - -rem if not "%SP%" == "%PKG_VERSION%" ( -rem echo "Version detected from registry: %SP%" -rem echo "does not match version of package being built (%PKG_VERSION%)" -rem echo "Do you have current updates for VS 2015 installed?" -rem exit 1 -rem ) - - -REM ========== REQUIRES Win 10 SDK be installed, or files otherwise copied to location below! -robocopy "C:\Program Files (x86)\Windows Kits\10\Redist\ucrt\DLLs\%VC_PATH%" "%LIBRARY_BIN%" *.dll /E -robocopy "C:\Program Files (x86)\Windows Kits\10\Redist\ucrt\DLLs\%VC_PATH%" "%PREFIX%" *.dll /E -if %ERRORLEVEL% GEQ 8 exit 1 - -REM ========== This one comes from visual studio 2017 -set "VC_VER=141" - -for /f "usebackq tokens=*" %%i in (`"%ProgramFiles(x86)%\Microsoft Visual Studio\Installer\vswhere.exe" -legacy -products * -version [15^,16^) -property installationPath`) do ( - if exist "%%i" if exist "%%i\VC\Auxiliary\Build\vcvarsall.bat" ( - set "VS15VCVARSALL=%%i\VC\Auxiliary\Build\vcvarsall.bat" - goto :eof - ) -) - -@setlocal -call "%VS15VARSALL%" x64 - -set "REDIST_ROOT=%VCToolsRedistDir%%VC_PATH%" - -robocopy "%REDIST_ROOT%\Microsoft.VC%VC_VER%.CRT" "%LIBRARY_BIN%" *.dll /E -if %ERRORLEVEL% LSS 8 exit 0 -robocopy "%REDIST_ROOT%\Microsoft.VC%VC_VER%.CRT" "%PREFIX%" *.dll /E -if %ERRORLEVEL% LSS 8 exit 0 -robocopy "%REDIST_ROOT%\Microsoft.VC%VC_VER%.OpenMP" "%LIBRARY_BIN%" *.dll /E -if %ERRORLEVEL% LSS 8 exit 0 -robocopy "%REDIST_ROOT%\Microsoft.VC%VC_VER%.OpenMP" "%PREFIX%" *.dll /E -if %ERRORLEVEL% LSS 8 exit 0 -@endlocal diff --git a/packaging/vs2017/meta.yaml b/packaging/vs2017/meta.yaml deleted file mode 100644 index 34f4860ba85..00000000000 --- a/packaging/vs2017/meta.yaml +++ /dev/null @@ -1,45 +0,0 @@ -{% set vcver="14.1" %} -{% set vcfeature="14" %} -{% set vsyear="2017" %} -{% set fullver="15.4.27004.2010" %} - -package: - name: vs{{ vsyear }} - version: {{ fullver }} - -build: - skip: True [not win] - script_env: - - VSDEVCMD_ARGS # [win] - -outputs: - - name: vs{{ vsyear }}_{{ cross_compiler_target_platform }} - script: install_activate.bat - track_features: - # VS 2017 is binary-compatible with VS 2015/vc14. Tools are "v141". - strong: - - vc{{ vcfeature }} - run_exports: - - vc {{ vcver }} - about: - summary: Activation and version verification of MSVC {{ vcver }} (VS {{ vsyear }}) compiler - license: BSD 3-clause - - name: vs{{ vsyear }}_runtime - script: install_runtime.bat - - name: vc - version: {{ vcver }} - track_features: - - vc{{ vcfeature }} - requirements: - run: - - {{ pin_subpackage('vs' ~ vsyear ~ '_runtime') }} - about: - home: https://github.com/conda/conda/wiki/VC-features - license: Modified BSD License (3-clause) - license_family: BSD - summary: A meta-package to track VC features. - description: | - This metapackage is used to activate vc features without - depending on Python. - doc_url: https://github.com/conda/conda/wiki/VC-features - dev_url: https://github.com/conda/conda/wiki/VC-features diff --git a/packaging/vs2019/conda_build_config.yaml b/packaging/vs2019/conda_build_config.yaml index 358052ec012..b4dc99341d0 100644 --- a/packaging/vs2019/conda_build_config.yaml +++ b/packaging/vs2019/conda_build_config.yaml @@ -5,8 +5,7 @@ c_compiler: cxx_compiler: - vs2019 # [win] python: - - 3.5 - - 3.6 + - 3.8 # This differs from target_platform in that it determines what subdir the compiler # will target, not what subdir the compiler package will be itself. # For example, we need a win-64 vs2008_win-32 package, so that we compile win-32 diff --git a/packaging/vs2019/install_activate.bat b/packaging/vs2019/install_activate.bat index 3c38253aa5d..9e60ccfd2dc 100644 --- a/packaging/vs2019/install_activate.bat +++ b/packaging/vs2019/install_activate.bat @@ -27,4 +27,3 @@ IF "%cross_compiler_target_platform%" == "win-64" ( echo CALL "VC\Auxiliary\Build\vcvars32.bat" >> "%PREFIX%\etc\conda\activate.d\vs%YEAR%_compiler_vars.bat" echo popd ) - diff --git a/packaging/vs2019/install_runtime.bat b/packaging/vs2019/install_runtime.bat deleted file mode 100644 index e09a5ccfb0f..00000000000 --- a/packaging/vs2019/install_runtime.bat +++ /dev/null @@ -1,49 +0,0 @@ -set VC_PATH=x86 -if "%ARCH%"=="64" ( - set VC_PATH=x64 -) - -set MSC_VER=2019 - -rem :: This should always be present for VC installed with VS. Not sure about VC installed with Visual C++ Build Tools 2015 -rem FOR /F "usebackq tokens=3*" %%A IN (`REG QUERY "HKEY_LOCAL_MACHINE\Software\Microsoft\DevDiv\VC\Servicing\14.0\IDE.x64" /v UpdateVersion`) DO ( -rem set SP=%%A -rem ) - -rem if not "%SP%" == "%PKG_VERSION%" ( -rem echo "Version detected from registry: %SP%" -rem echo "does not match version of package being built (%PKG_VERSION%)" -rem echo "Do you have current updates for VS 2015 installed?" -rem exit 1 -rem ) - - -REM ========== REQUIRES Win 10 SDK be installed, or files otherwise copied to location below! -robocopy "C:\Program Files (x86)\Windows Kits\10\Redist\ucrt\DLLs\%VC_PATH%" "%LIBRARY_BIN%" *.dll /E -robocopy "C:\Program Files (x86)\Windows Kits\10\Redist\ucrt\DLLs\%VC_PATH%" "%PREFIX%" *.dll /E -if %ERRORLEVEL% GEQ 8 exit 1 - -REM ========== This one comes from visual studio 2019 -set "VC_VER=142" - -for /f "usebackq tokens=*" %%i in (`"%ProgramFiles(x86)%\Microsoft Visual Studio\Installer\vswhere.exe" -legacy -products * -version [16^,17^) -property installationPath`) do ( - if exist "%%i" if exist "%%i\VC\Auxiliary\Build\vcvarsall.bat" ( - set "VS15VCVARSALL=%%i\VC\Auxiliary\Build\vcvarsall.bat" - goto :eof - ) -) - -@setlocal -call "%VS15VARSALL%" x64 - -set "REDIST_ROOT=%VCToolsRedistDir%%VC_PATH%" - -robocopy "%REDIST_ROOT%\Microsoft.VC%VC_VER%.CRT" "%LIBRARY_BIN%" *.dll /E -if %ERRORLEVEL% LSS 8 exit 0 -robocopy "%REDIST_ROOT%\Microsoft.VC%VC_VER%.CRT" "%PREFIX%" *.dll /E -if %ERRORLEVEL% LSS 8 exit 0 -robocopy "%REDIST_ROOT%\Microsoft.VC%VC_VER%.OpenMP" "%LIBRARY_BIN%" *.dll /E -if %ERRORLEVEL% LSS 8 exit 0 -robocopy "%REDIST_ROOT%\Microsoft.VC%VC_VER%.OpenMP" "%PREFIX%" *.dll /E -if %ERRORLEVEL% LSS 8 exit 0 -@endlocal diff --git a/packaging/vs2019/meta.yaml b/packaging/vs2019/meta.yaml index e3f8b471481..94a0ed4db3e 100644 --- a/packaging/vs2019/meta.yaml +++ b/packaging/vs2019/meta.yaml @@ -19,27 +19,6 @@ outputs: # VS 2019 is binary-compatible with VS 2017/vc 14.1 and 2015/vc14. Tools are "v142". strong: - vc{{ vcfeature }} - run_exports: - - vc {{ vcver }} about: summary: Activation and version verification of MSVC {{ vcver }} (VS {{ vsyear }}) compiler license: BSD 3-clause - - name: vs{{ vsyear }}_runtime - script: install_runtime.bat - - name: vc - version: {{ vcver }} - track_features: - - vc{{ vcfeature }} - requirements: - run: - - {{ pin_subpackage('vs' ~ vsyear ~ '_runtime') }} - about: - home: https://github.com/conda/conda/wiki/VC-features - license: Modified BSD License (3-clause) - license_family: BSD - summary: A meta-package to track VC features. - description: | - This metapackage is used to activate vc features without - depending on Python. - doc_url: https://github.com/conda/conda/wiki/VC-features - dev_url: https://github.com/conda/conda/wiki/VC-features diff --git a/packaging/wheel/linux_manywheel.sh b/packaging/wheel/linux_manywheel.sh deleted file mode 100644 index d04e334d237..00000000000 --- a/packaging/wheel/linux_manywheel.sh +++ /dev/null @@ -1,62 +0,0 @@ -#!/bin/bash -set -ex - -if [ "$#" -ne 1 ]; then - echo "Illegal number of parameters. Pass cuda version" - echo "CUDA version should be cu92, cu100 or cpu" - exit 1 -fi -export CUVER="$1" # cu92 cu100 cpu - -if [[ "$CUVER" == "cu101" ]]; then - cu_suffix="" -else - cu_suffix="+$CUVER" -fi - -export TORCHVISION_BUILD_VERSION="0.4.0.dev$(date "+%Y%m%d")${cu_suffix}" -export TORCHVISION_BUILD_NUMBER="1" -export TORCHVISION_LOCAL_VERSION_LABEL="$CUVER" -export OUT_DIR="/remote/$CUVER" - -pushd /opt/python -DESIRED_PYTHON=(*/) -popd -for desired_py in "${DESIRED_PYTHON[@]}"; do - python_installations+=("/opt/python/$desired_py") -done - -OLD_PATH=$PATH -cd /tmp -rm -rf vision -git clone https://github.com/pytorch/vision - -cd /tmp/vision - -for PYDIR in "${python_installations[@]}"; do - export PATH=$PYDIR/bin:$OLD_PATH - pip install --upgrade pip - pip install numpy pyyaml future - - pip uninstall -y torch || true - pip uninstall -y torch_nightly || true - - export TORCHVISION_PYTORCH_DEPENDENCY_NAME=torch_nightly - pip install torch_nightly -f https://download.pytorch.org/whl/nightly/$CUVER/torch_nightly.html - # CPU/CUDA variants of PyTorch have ABI compatible PyTorch for - # the CPU only bits. Therefore, we - # strip off the local package qualifier, but ONLY if we're - # doing a CPU build. - if [[ "$CUVER" == "cpu" ]]; then - export TORCHVISION_PYTORCH_DEPENDENCY_VERSION="$(pip show torch_nightly | grep ^Version: | sed 's/Version: \+//' | sed 's/+.\+//')" - else - export TORCHVISION_PYTORCH_DEPENDENCY_VERSION="$(pip show torch_nightly | grep ^Version: | sed 's/Version: \+//')" - fi - echo "Building against ${TORCHVISION_PYTORCH_DEPENDENCY_VERSION}" - - pip install ninja - python setup.py clean - python setup.py bdist_wheel - mkdir -p $OUT_DIR - cp dist/*.whl $OUT_DIR/ -done diff --git a/packaging/wheel/osx_wheel.sh b/packaging/wheel/osx_wheel.sh deleted file mode 100644 index 900485d3199..00000000000 --- a/packaging/wheel/osx_wheel.sh +++ /dev/null @@ -1,52 +0,0 @@ -if [[ ":$PATH:" == *"conda"* ]]; then - echo "existing anaconda install in PATH, remove it and run script" - exit 1 -fi -# download and activate anaconda -rm -rf ~/minconda_wheel_env_tmp -wget -q https://repo.anaconda.com/miniconda/Miniconda3-latest-MacOSX-x86_64.sh && \ - chmod +x Miniconda3-latest-MacOSX-x86_64.sh && \ - ./Miniconda3-latest-MacOSX-x86_64.sh -b -p ~/minconda_wheel_env_tmp && \ - rm Miniconda3-latest-MacOSX-x86_64.sh - -. ~/minconda_wheel_env_tmp/bin/activate - - -export TORCHVISION_BUILD_VERSION="0.4.0.dev$(date "+%Y%m%d")" -export TORCHVISION_BUILD_NUMBER="1" -export OUT_DIR=~/torchvision_wheels - -export MACOSX_DEPLOYMENT_TARGET=10.9 CC=clang CXX=clang++ - -pushd /tmp -rm -rf vision -git clone https://github.com/pytorch/vision -pushd vision - -desired_pythons=( "2.7" "3.5" "3.6" "3.7" ) -# for each python -for desired_python in "${desired_pythons[@]}" -do - # create and activate python env - env_name="env$desired_python" - conda create -yn $env_name python="$desired_python" - conda activate $env_name - - pip uninstall -y torch || true - pip uninstall -y torch_nightly || true - - export TORCHVISION_PYTORCH_DEPENDENCY_NAME=torch_nightly - pip install torch_nightly -f https://download.pytorch.org/whl/nightly/cpu/torch_nightly.html - export TORCHVISION_PYTORCH_DEPENDENCY_VERSION="$(pip show torch_nightly | grep ^Version: | sed 's/Version: *//')" - echo "Building against ${TORCHAUDIO_PYTORCH_DEPENDENCY_VERSION}" - - # install torchvision dependencies - pip install ninja scipy pytest - - python setup.py clean - python setup.py bdist_wheel - mkdir -p $OUT_DIR - cp dist/*.whl $OUT_DIR/ -done -popd -popd diff --git a/packaging/wheel/relocate.py b/packaging/wheel/relocate.py new file mode 100644 index 00000000000..fb110abd873 --- /dev/null +++ b/packaging/wheel/relocate.py @@ -0,0 +1,380 @@ +"""Helper script to package wheels and relocate binaries.""" + +import glob +import hashlib + +# Standard library imports +import os +import os.path as osp +import platform +import shutil +import subprocess +import sys +import zipfile +from base64 import urlsafe_b64encode + +# Third party imports +if sys.platform == "linux": + from auditwheel.lddtree import lddtree + + +ALLOWLIST = { + "libgcc_s.so.1", + "libstdc++.so.6", + "libm.so.6", + "libdl.so.2", + "librt.so.1", + "libc.so.6", + "libnsl.so.1", + "libutil.so.1", + "libpthread.so.0", + "libresolv.so.2", + "libX11.so.6", + "libXext.so.6", + "libXrender.so.1", + "libICE.so.6", + "libSM.so.6", + "libGL.so.1", + "libgobject-2.0.so.0", + "libgthread-2.0.so.0", + "libglib-2.0.so.0", + "ld-linux-x86-64.so.2", + "ld-2.17.so", +} + +WINDOWS_ALLOWLIST = { + "MSVCP140.dll", + "KERNEL32.dll", + "VCRUNTIME140_1.dll", + "VCRUNTIME140.dll", + "api-ms-win-crt-heap-l1-1-0.dll", + "api-ms-win-crt-runtime-l1-1-0.dll", + "api-ms-win-crt-stdio-l1-1-0.dll", + "api-ms-win-crt-filesystem-l1-1-0.dll", + "api-ms-win-crt-string-l1-1-0.dll", + "api-ms-win-crt-environment-l1-1-0.dll", + "api-ms-win-crt-math-l1-1-0.dll", + "api-ms-win-crt-convert-l1-1-0.dll", +} + + +HERE = osp.dirname(osp.abspath(__file__)) +PACKAGE_ROOT = osp.dirname(osp.dirname(HERE)) +PLATFORM_ARCH = platform.machine() +PYTHON_VERSION = sys.version_info + + +def rehash(path, blocksize=1 << 20): + """Return (hash, length) for path using hashlib.sha256()""" + h = hashlib.sha256() + length = 0 + with open(path, "rb") as f: + while block := f.read(blocksize): + length += len(block) + h.update(block) + digest = "sha256=" + urlsafe_b64encode(h.digest()).decode("latin1").rstrip("=") + # unicode/str python2 issues + return (digest, str(length)) # type: ignore + + +def unzip_file(file, dest): + """Decompress zip `file` into directory `dest`.""" + with zipfile.ZipFile(file, "r") as zip_ref: + zip_ref.extractall(dest) + + +def is_program_installed(basename): + """ + Return program absolute path if installed in PATH. + Otherwise, return None + On macOS systems, a .app is considered installed if + it exists. + """ + if sys.platform == "darwin" and basename.endswith(".app") and osp.exists(basename): + return basename + + for path in os.environ["PATH"].split(os.pathsep): + abspath = osp.join(path, basename) + if osp.isfile(abspath): + return abspath + + +def find_program(basename): + """ + Find program in PATH and return absolute path + Try adding .exe or .bat to basename on Windows platforms + (return None if not found) + """ + names = [basename] + if os.name == "nt": + # Windows platforms + extensions = (".exe", ".bat", ".cmd", ".dll") + if not basename.endswith(extensions): + names = [basename + ext for ext in extensions] + [basename] + for name in names: + path = is_program_installed(name) + if path: + return path + + +def patch_new_path(library_path, new_dir): + library = osp.basename(library_path) + name, *rest = library.split(".") + rest = ".".join(rest) + hash_id = hashlib.sha256(library_path.encode("utf-8")).hexdigest()[:8] + new_name = ".".join([name, hash_id, rest]) + return osp.join(new_dir, new_name) + + +def find_dll_dependencies(dumpbin, binary): + out = subprocess.run([dumpbin, "/dependents", binary], stdout=subprocess.PIPE) + out = out.stdout.strip().decode("utf-8") + start_index = out.find("dependencies:") + len("dependencies:") + end_index = out.find("Summary") + dlls = out[start_index:end_index].strip() + dlls = dlls.split(os.linesep) + dlls = [dll.strip() for dll in dlls] + return dlls + + +def relocate_elf_library(patchelf, output_dir, output_library, binary): + """ + Relocate an ELF shared library to be packaged on a wheel. + + Given a shared library, find the transitive closure of its dependencies, + rename and copy them into the wheel while updating their respective rpaths. + """ + + print(f"Relocating {binary}") + binary_path = osp.join(output_library, binary) + + ld_tree = lddtree(binary_path) + tree_libs = ld_tree["libs"] + + binary_queue = [(n, binary) for n in ld_tree["needed"]] + binary_paths = {binary: binary_path} + binary_dependencies = {} + + while binary_queue != []: + library, parent = binary_queue.pop(0) + library_info = tree_libs[library] + print(library) + + if library_info["path"] is None: + print(f"Omitting {library}") + continue + + if library in ALLOWLIST: + # Omit glibc/gcc/system libraries + print(f"Omitting {library}") + continue + + parent_dependencies = binary_dependencies.get(parent, []) + parent_dependencies.append(library) + binary_dependencies[parent] = parent_dependencies + + if library in binary_paths: + continue + + binary_paths[library] = library_info["path"] + binary_queue += [(n, library) for n in library_info["needed"]] + + print("Copying dependencies to wheel directory") + new_libraries_path = osp.join(output_dir, "torchvision.libs") + os.makedirs(new_libraries_path, exist_ok=True) + + new_names = {binary: binary_path} + + for library in binary_paths: + if library != binary: + library_path = binary_paths[library] + new_library_path = patch_new_path(library_path, new_libraries_path) + print(f"{library} -> {new_library_path}") + shutil.copyfile(library_path, new_library_path) + new_names[library] = new_library_path + + print("Updating dependency names by new files") + for library in binary_paths: + if library != binary: + if library not in binary_dependencies: + continue + library_dependencies = binary_dependencies[library] + new_library_name = new_names[library] + for dep in library_dependencies: + new_dep = osp.basename(new_names[dep]) + print(f"{library}: {dep} -> {new_dep}") + subprocess.check_output( + [patchelf, "--replace-needed", dep, new_dep, new_library_name], cwd=new_libraries_path + ) + + print("Updating library rpath") + subprocess.check_output([patchelf, "--set-rpath", "$ORIGIN", new_library_name], cwd=new_libraries_path) + + subprocess.check_output([patchelf, "--print-rpath", new_library_name], cwd=new_libraries_path) + + print("Update library dependencies") + library_dependencies = binary_dependencies[binary] + for dep in library_dependencies: + new_dep = osp.basename(new_names[dep]) + print(f"{binary}: {dep} -> {new_dep}") + subprocess.check_output([patchelf, "--replace-needed", dep, new_dep, binary], cwd=output_library) + + print("Update library rpath") + subprocess.check_output( + [patchelf, "--set-rpath", "$ORIGIN:$ORIGIN/../torchvision.libs", binary_path], cwd=output_library + ) + + +def relocate_dll_library(dumpbin, output_dir, output_library, binary): + """ + Relocate a DLL/PE shared library to be packaged on a wheel. + + Given a shared library, find the transitive closure of its dependencies, + rename and copy them into the wheel. + """ + print(f"Relocating {binary}") + binary_path = osp.join(output_library, binary) + + library_dlls = find_dll_dependencies(dumpbin, binary_path) + binary_queue = [(dll, binary) for dll in library_dlls] + binary_paths = {binary: binary_path} + binary_dependencies = {} + + while binary_queue != []: + library, parent = binary_queue.pop(0) + if library in WINDOWS_ALLOWLIST or library.startswith("api-ms-win"): + print(f"Omitting {library}") + continue + + library_path = find_program(library) + if library_path is None: + print(f"{library} not found") + continue + + if osp.basename(osp.dirname(library_path)) == "system32": + continue + + print(f"{library}: {library_path}") + parent_dependencies = binary_dependencies.get(parent, []) + parent_dependencies.append(library) + binary_dependencies[parent] = parent_dependencies + + if library in binary_paths: + continue + + binary_paths[library] = library_path + downstream_dlls = find_dll_dependencies(dumpbin, library_path) + binary_queue += [(n, library) for n in downstream_dlls] + + print("Copying dependencies to wheel directory") + package_dir = osp.join(output_dir, "torchvision") + for library in binary_paths: + if library != binary: + library_path = binary_paths[library] + new_library_path = osp.join(package_dir, library) + print(f"{library} -> {new_library_path}") + shutil.copyfile(library_path, new_library_path) + + +def compress_wheel(output_dir, wheel, wheel_dir, wheel_name): + """Create RECORD file and compress wheel distribution.""" + print("Update RECORD file in wheel") + dist_info = glob.glob(osp.join(output_dir, "*.dist-info"))[0] + record_file = osp.join(dist_info, "RECORD") + + with open(record_file, "w") as f: + for root, _, files in os.walk(output_dir): + for this_file in files: + full_file = osp.join(root, this_file) + rel_file = osp.relpath(full_file, output_dir) + if full_file == record_file: + f.write(f"{rel_file},,\n") + else: + digest, size = rehash(full_file) + f.write(f"{rel_file},{digest},{size}\n") + + print("Compressing wheel") + base_wheel_name = osp.join(wheel_dir, wheel_name) + shutil.make_archive(base_wheel_name, "zip", output_dir) + os.remove(wheel) + shutil.move(f"{base_wheel_name}.zip", wheel) + shutil.rmtree(output_dir) + + +def patch_linux(): + # Get patchelf location + patchelf = find_program("patchelf") + if patchelf is None: + raise FileNotFoundError("Patchelf was not found in the system, please make sure that is available on the PATH.") + + # Find wheel + print("Finding wheels...") + wheels = glob.glob(osp.join(PACKAGE_ROOT, "dist", "*.whl")) + output_dir = osp.join(PACKAGE_ROOT, "dist", ".wheel-process") + + image_binary = "image.so" + video_binary = "video_reader.so" + torchvision_binaries = [image_binary, video_binary] + for wheel in wheels: + if osp.exists(output_dir): + shutil.rmtree(output_dir) + + os.makedirs(output_dir) + + print("Unzipping wheel...") + wheel_file = osp.basename(wheel) + wheel_dir = osp.dirname(wheel) + print(f"{wheel_file}") + wheel_name, _ = osp.splitext(wheel_file) + unzip_file(wheel, output_dir) + + print("Finding ELF dependencies...") + output_library = osp.join(output_dir, "torchvision") + for binary in torchvision_binaries: + if osp.exists(osp.join(output_library, binary)): + relocate_elf_library(patchelf, output_dir, output_library, binary) + + compress_wheel(output_dir, wheel, wheel_dir, wheel_name) + + +def patch_win(): + # Get dumpbin location + dumpbin = find_program("dumpbin") + if dumpbin is None: + raise FileNotFoundError("Dumpbin was not found in the system, please make sure that is available on the PATH.") + + # Find wheel + print("Finding wheels...") + wheels = glob.glob(osp.join(PACKAGE_ROOT, "dist", "*.whl")) + output_dir = osp.join(PACKAGE_ROOT, "dist", ".wheel-process") + + image_binary = "image.pyd" + video_binary = "video_reader.pyd" + torchvision_binaries = [image_binary, video_binary] + for wheel in wheels: + if osp.exists(output_dir): + shutil.rmtree(output_dir) + + os.makedirs(output_dir) + + print("Unzipping wheel...") + wheel_file = osp.basename(wheel) + wheel_dir = osp.dirname(wheel) + print(f"{wheel_file}") + wheel_name, _ = osp.splitext(wheel_file) + unzip_file(wheel, output_dir) + + print("Finding DLL/PE dependencies...") + output_library = osp.join(output_dir, "torchvision") + for binary in torchvision_binaries: + if osp.exists(osp.join(output_library, binary)): + relocate_dll_library(dumpbin, output_dir, output_library, binary) + + compress_wheel(output_dir, wheel, wheel_dir, wheel_name) + + +if __name__ == "__main__": + if sys.platform == "linux": + patch_linux() + elif sys.platform == "win32": + patch_win() diff --git a/packaging/windows/azure-pipelines-ci.yml b/packaging/windows/azure-pipelines-ci.yml deleted file mode 100644 index 6f9f3468cfe..00000000000 --- a/packaging/windows/azure-pipelines-ci.yml +++ /dev/null @@ -1,11 +0,0 @@ - -# Turn off auto builds for commits -trigger: none -pr: none - -jobs: -- template: templates/build_task.yml - parameters: - package: 'Wheels' - spec: 'CPU' - msagent: true diff --git a/packaging/windows/azure-pipelines.yml b/packaging/windows/azure-pipelines.yml deleted file mode 100644 index d0240570012..00000000000 --- a/packaging/windows/azure-pipelines.yml +++ /dev/null @@ -1,35 +0,0 @@ - -# Turn off auto builds for commits -trigger: none -pr: none - -jobs: -- template: templates/auth_task.yml - -- template: templates/build_task.yml - parameters: - package: 'Wheels' - spec: 'CPU' - msagent: true - -- template: templates/build_task.yml - parameters: - package: 'Conda' - spec: 'CPU' - msagent: true - -- template: templates/build_task.yml - parameters: - package: 'Wheels' - spec: 'CUDA' - msagent: true - -- template: templates/build_task.yml - parameters: - package: 'Conda' - spec: 'CUDA' - msagent: true - -- template: templates/linux_build_task.yml - parameters: - msagent: $(ms.hosted.agent.cpu) diff --git a/packaging/windows/build_vision.bat b/packaging/windows/build_vision.bat deleted file mode 100644 index 995c43905cb..00000000000 --- a/packaging/windows/build_vision.bat +++ /dev/null @@ -1,145 +0,0 @@ -@echo off - -:: This script parses args, installs required libraries (miniconda, MKL, -:: Magma), and then delegates to cpu.bat, cuda80.bat, etc. - -IF NOT "%CUDA_VERSION%" == "" IF NOT "%TORCHVISION_BUILD_VERSION%" == "" if NOT "%TORCHVISION_BUILD_NUMBER%" == "" goto env_end -if "%~1"=="" goto arg_error -if "%~2"=="" goto arg_error -if "%~3"=="" goto arg_error -if NOT "%~4"=="" goto arg_error -goto arg_end - -:arg_error - -echo Illegal number of parameters. Pass cuda version, pytorch version, build number -echo CUDA version should be Mm with no dot, e.g. '80' -echo DESIRED_PYTHON should be M.m, e.g. '2.7' -exit /b 1 - -:arg_end - -set CUDA_VERSION=%~1 -set TORCHVISION_BUILD_VERSION=%~2 -set TORCHVISION_BUILD_NUMBER=%~3 - -set BUILD_VERSION=%TORCHVISION_BUILD_VERSION% - -:env_end - -if NOT "%CUDA_VERSION%" == "cpu" ( - set CUDA_PREFIX=cuda%CUDA_VERSION% - set CUVER=cu%CUDA_VERSION% - set FORCE_CUDA=1 -) else ( - set CUDA_PREFIX=cpu - set CUVER=cpu -) - -set BUILD_VISION=1 -REM set TORCH_WHEEL=torch -f https://download.pytorch.org/whl/%CUVER%/stable.html --no-index - -IF "%DESIRED_PYTHON%" == "" set DESIRED_PYTHON=3.5;3.6;3.7 -set DESIRED_PYTHON_PREFIX=%DESIRED_PYTHON:.=% -set DESIRED_PYTHON_PREFIX=py%DESIRED_PYTHON_PREFIX:;=;py% - -set SRC_DIR=%~dp0 -pushd %SRC_DIR% - -:: Install Miniconda3 -set "CONDA_HOME=%CD%\conda" -set "tmp_conda=%CONDA_HOME%" -set "miniconda_exe=%CD%\miniconda.exe" -rmdir /s /q conda -del miniconda.exe -curl -k https://repo.continuum.io/miniconda/Miniconda3-latest-Windows-x86_64.exe -o "%miniconda_exe%" -call ..\conda\install_conda.bat -IF ERRORLEVEL 1 exit /b 1 -set "ORIG_PATH=%PATH%" -set "PATH=%CONDA_HOME%;%CONDA_HOME%\scripts;%CONDA_HOME%\Library\bin;%PATH%" - -:: Create a new conda environment -setlocal EnableDelayedExpansion -FOR %%v IN (%DESIRED_PYTHON%) DO ( - set PYTHON_VERSION_STR=%%v - set PYTHON_VERSION_STR=!PYTHON_VERSION_STR:.=! - conda remove -n py!PYTHON_VERSION_STR! --all -y || rmdir %CONDA_HOME%\envs\py!PYTHON_VERSION_STR! /s - conda create -n py!PYTHON_VERSION_STR! -y -q -c defaults -c conda-forge numpy>=1.11 mkl>=2018 python=%%v ca-certificates scipy av -) - -:: Uncomment for stable releases -:: FOR %%v IN (%DESIRED_PYTHON%) DO ( -:: set PYTHON_VERSION_STR=%%v -:: set PYTHON_VERSION_STR=!PYTHON_VERSION_STR:.=! -:: set "PATH=%CONDA_HOME%\envs\py!PYTHON_VERSION_STR!;%CONDA_HOME%\envs\py!PYTHON_VERSION_STR!\scripts;%CONDA_HOME%\envs\py!PYTHON_VERSION_STR!\Library\bin;%ORIG_PATH%" - -:: if "%CUDA_VERSION%" == "100" ( -:: set TORCH_WHEEL=https://download.pytorch.org/whl/%CUVER%/torch-1.2.0-cp!PYTHON_VERSION_STR!-cp!PYTHON_VERSION_STR!m-win_amd64.whl -:: ) else ( -:: set TORCH_WHEEL=https://download.pytorch.org/whl/%CUVER%/torch-1.2.0%%2B%CUVER%-cp!PYTHON_VERSION_STR!-cp!PYTHON_VERSION_STR!m-win_amd64.whl -:: ) -:: echo Installing !TORCH_WHEEL!... -:: pip install "!TORCH_WHEEL!" -:: ) - -:: Uncomment for nightly releases -FOR %%v IN (%DESIRED_PYTHON%) DO ( - set PYTHON_VERSION_STR=%%v - set PYTHON_VERSION_STR=!PYTHON_VERSION_STR:.=! - set "PATH=%CONDA_HOME%\envs\py!PYTHON_VERSION_STR!;%CONDA_HOME%\envs\py!PYTHON_VERSION_STR!\scripts;%CONDA_HOME%\envs\py!PYTHON_VERSION_STR!\Library\bin;%ORIG_PATH%" - - set TORCH_WHEEL=torch --pre -f https://download.pytorch.org/whl/nightly/%CUVER%/torch_nightly.html - echo Installing !TORCH_WHEEL!... - pip install !TORCH_WHEEL! -) - -endlocal - -if "%DEBUG%" == "1" ( - set BUILD_TYPE=debug -) ELSE ( - set BUILD_TYPE=release -) - -:: Install sccache -if "%USE_SCCACHE%" == "1" ( - mkdir %CD%\tmp_bin - curl -k https://s3.amazonaws.com/ossci-windows/sccache.exe --output %CD%\tmp_bin\sccache.exe - if not "%CUDA_VERSION%" == "" ( - copy %CD%\tmp_bin\sccache.exe %CD%\tmp_bin\nvcc.exe - - set CUDA_NVCC_EXECUTABLE=%CD%\tmp_bin\nvcc - set "PATH=%CD%\tmp_bin;%PATH%" - ) -) - -for %%v in (%DESIRED_PYTHON_PREFIX%) do ( - :: Activate Python Environment - set PYTHON_PREFIX=%%v - set "PATH=%CONDA_HOME%\envs\%%v;%CONDA_HOME%\envs\%%v\scripts;%CONDA_HOME%\envs\%%v\Library\bin;%ORIG_PATH%" - if defined INCLUDE ( - set "INCLUDE=%INCLUDE%;%CONDA_HOME%\envs\%%v\Library\include" - ) else ( - set "INCLUDE=%CONDA_HOME%\envs\%%v\Library\include" - ) - if defined LIB ( - set "LIB=%LIB%;%CONDA_HOME%\envs\%%v\Library\lib" - ) else ( - set "LIB=%CONDA_HOME%\envs\%%v\Library\lib" - ) - @setlocal - :: Set Flags - if NOT "%CUDA_VERSION%"=="cpu" ( - set CUDNN_VERSION=7 - ) - call %CUDA_PREFIX%.bat - IF ERRORLEVEL 1 exit /b 1 - call internal\test.bat - IF ERRORLEVEL 1 exit /b 1 - @endlocal -) - -set "PATH=%ORIG_PATH%" -popd - -IF ERRORLEVEL 1 exit /b 1 diff --git a/packaging/windows/cpu.bat b/packaging/windows/cpu.bat deleted file mode 100644 index 392a687f9dc..00000000000 --- a/packaging/windows/cpu.bat +++ /dev/null @@ -1,37 +0,0 @@ -@echo off - -IF NOT "%BUILD_VISION%" == "" ( - set MODULE_NAME=vision -) ELSE ( - set MODULE_NAME=pytorch -) - -IF NOT EXIST "setup.py" IF NOT EXIST "%MODULE_NAME%" ( - call internal\clone.bat - cd .. - IF ERRORLEVEL 1 goto eof -) ELSE ( - call internal\clean.bat -) - -call internal\check_deps.bat -IF ERRORLEVEL 1 goto eof - -REM Check for optional components - -echo Disabling CUDA -set NO_CUDA=1 -set USE_CUDA=0 - -IF "%BUILD_VISION%" == "" ( - call internal\check_opts.bat - IF ERRORLEVEL 1 goto eof - - call internal\copy_cpu.bat - IF ERRORLEVEL 1 goto eof -) - -call internal\setup.bat -IF ERRORLEVEL 1 goto eof - -:eof diff --git a/packaging/windows/cuda101.bat b/packaging/windows/cuda101.bat deleted file mode 100644 index db397d593c8..00000000000 --- a/packaging/windows/cuda101.bat +++ /dev/null @@ -1,59 +0,0 @@ -@echo off - -IF NOT "%BUILD_VISION%" == "" ( - set MODULE_NAME=vision -) ELSE ( - set MODULE_NAME=pytorch -) - -IF NOT EXIST "setup.py" IF NOT EXIST "%MODULE_NAME%" ( - call internal\clone.bat - cd .. - IF ERRORLEVEL 1 goto eof -) ELSE ( - call internal\clean.bat -) - -call internal\check_deps.bat -IF ERRORLEVEL 1 goto eof - -REM Check for optional components - -set NO_CUDA= -set CMAKE_GENERATOR=Visual Studio 15 2017 Win64 - -IF "%NVTOOLSEXT_PATH%"=="" ( - echo NVTX ^(Visual Studio Extension ^for CUDA^) ^not installed, failing - exit /b 1 - goto optcheck -) - -IF "%CUDA_PATH_V10_1%"=="" ( - echo CUDA 10.1 not found, failing - exit /b 1 -) ELSE ( - IF "%BUILD_VISION%" == "" ( - set TORCH_CUDA_ARCH_LIST=3.5;5.0+PTX;6.0;6.1;7.0;7.5 - set TORCH_NVCC_FLAGS=-Xfatbin -compress-all - ) ELSE ( - set NVCC_FLAGS=-D__CUDA_NO_HALF_OPERATORS__ --expt-relaxed-constexpr -gencode=arch=compute_35,code=sm_35 -gencode=arch=compute_50,code=sm_50 -gencode=arch=compute_60,code=sm_60 -gencode=arch=compute_70,code=sm_70 -gencode=arch=compute_75,code=sm_75 -gencode=arch=compute_50,code=compute_50 - ) - - set "CUDA_PATH=%CUDA_PATH_V10_1%" - set "PATH=%CUDA_PATH_V10_1%\bin;%PATH%" -) - -:optcheck - -IF "%BUILD_VISION%" == "" ( - call internal\check_opts.bat - IF ERRORLEVEL 1 goto eof - - call internal\copy.bat - IF ERRORLEVEL 1 goto eof -) - -call internal\setup.bat -IF ERRORLEVEL 1 goto eof - -:eof diff --git a/packaging/windows/cuda92.bat b/packaging/windows/cuda92.bat deleted file mode 100644 index 0bfcdc8e463..00000000000 --- a/packaging/windows/cuda92.bat +++ /dev/null @@ -1,59 +0,0 @@ -@echo off - -IF NOT "%BUILD_VISION%" == "" ( - set MODULE_NAME=vision -) ELSE ( - set MODULE_NAME=pytorch -) - -IF NOT EXIST "setup.py" IF NOT EXIST "%MODULE_NAME%" ( - call internal\clone.bat - cd .. - IF ERRORLEVEL 1 goto eof -) ELSE ( - call internal\clean.bat -) - -call internal\check_deps.bat -IF ERRORLEVEL 1 goto eof - -REM Check for optional components - -set USE_CUDA= -set CMAKE_GENERATOR=Visual Studio 15 2017 Win64 - -IF "%NVTOOLSEXT_PATH%"=="" ( - echo NVTX ^(Visual Studio Extension ^for CUDA^) ^not installed, failing - exit /b 1 - goto optcheck -) - -IF "%CUDA_PATH_V9_2%"=="" ( - echo CUDA 9.2 not found, failing - exit /b 1 -) ELSE ( - IF "%BUILD_VISION%" == "" ( - set TORCH_CUDA_ARCH_LIST=3.5;5.0+PTX;6.0;6.1;7.0 - set TORCH_NVCC_FLAGS=-Xfatbin -compress-all - ) ELSE ( - set NVCC_FLAGS=-D__CUDA_NO_HALF_OPERATORS__ --expt-relaxed-constexpr -gencode=arch=compute_35,code=sm_35 -gencode=arch=compute_50,code=sm_50 -gencode=arch=compute_60,code=sm_60 -gencode=arch=compute_61,code=sm_61 -gencode=arch=compute_70,code=sm_70 -gencode=arch=compute_50,code=compute_50 - ) - - set "CUDA_PATH=%CUDA_PATH_V9_2%" - set "PATH=%CUDA_PATH_V9_2%\bin;%PATH%" -) - -:optcheck - -IF "%BUILD_VISION%" == "" ( - call internal\check_opts.bat - IF ERRORLEVEL 1 goto eof - - call internal\copy.bat - IF ERRORLEVEL 1 goto eof -) - -call internal\setup.bat -IF ERRORLEVEL 1 goto eof - -:eof diff --git a/packaging/windows/internal/auth.bat b/packaging/windows/internal/auth.bat deleted file mode 100644 index c874bce493c..00000000000 --- a/packaging/windows/internal/auth.bat +++ /dev/null @@ -1,46 +0,0 @@ -@echo off - -: From the following doc, the build won't be triggered if the users don't sign in daily. -: https://docs.microsoft.com/en-us/azure/devops/pipelines/build/triggers?tabs=yaml&view=vsts#my-build-didnt-run-what-happened -: To avoid this problem, we can just go through the sign in process using the following command. - -:auth_start - -if "%RETRY_TIMES%" == "" ( - set /a RETRY_TIMES=10 - set /a SLEEP_TIME=2 -) else ( - set /a RETRY_TIMES=%RETRY_TIMES%-1 - set /a SLEEP_TIME=%SLEEP_TIME%*2 -) - -for /f "usebackq tokens=*" %%i in (`curl -so NUL -w "%%{http_code}" -u %VSTS_AUTH% https://dev.azure.com/pytorch`) do ( - set STATUS_CODE=%%i -) - -IF NOT "%STATUS_CODE%" == "200" ( - echo Auth retry times remaining: %RETRY_TIMES% - echo Sleep time: %SLEEP_TIME% seconds - IF %RETRY_TIMES% EQU 0 ( - echo Auth failed - goto err - ) - waitfor SomethingThatIsNeverHappening /t %SLEEP_TIME% 2>nul || ver >nul - goto auth_start -) ELSE ( - echo Login Attempt Succeeded - goto auth_end -) - -:err - -: Throw a warning if it fails -powershell -c "Write-Warning 'Login Attempt Failed'" - -:auth_end - -set RETRY_TIMES= -set SLEEP_TIME= -set STATUS_CODE= - -exit /b 0 diff --git a/packaging/windows/internal/build_cmake.bat b/packaging/windows/internal/build_cmake.bat new file mode 100644 index 00000000000..a29160538d2 --- /dev/null +++ b/packaging/windows/internal/build_cmake.bat @@ -0,0 +1,3 @@ +@echo on +msbuild "-p:Configuration=Release" "-p:BuildInParallel=true" "-p:MultiProcessorCompilation=true" "-p:CL_MPCount=%1" torchvision.vcxproj -maxcpucount:%1 +msbuild "-p:Configuration=Release" "-p:BuildInParallel=true" "-p:MultiProcessorCompilation=true" "-p:CL_MPCount=%1" INSTALL.vcxproj -maxcpucount:%1 diff --git a/packaging/windows/internal/build_cpp_example.bat b/packaging/windows/internal/build_cpp_example.bat new file mode 100644 index 00000000000..129c574e391 --- /dev/null +++ b/packaging/windows/internal/build_cpp_example.bat @@ -0,0 +1,3 @@ +@echo on +set CL=/I"C:\Program Files (x86)\torchvision\include" +msbuild "-p:Configuration=Release" "-p:BuildInParallel=true" "-p:MultiProcessorCompilation=true" "-p:CL_MPCount=%1" run_model.vcxproj -maxcpucount:%1 diff --git a/packaging/windows/internal/check_deps.bat b/packaging/windows/internal/check_deps.bat deleted file mode 100644 index a159d4436d6..00000000000 --- a/packaging/windows/internal/check_deps.bat +++ /dev/null @@ -1,67 +0,0 @@ -@echo off - -REM Check for necessary components - -IF NOT "%PROCESSOR_ARCHITECTURE%"=="AMD64" ( - echo You should use 64 bits Windows to build and run PyTorch - exit /b 1 -) - -IF "%BUILD_VISION%" == "" ( - where /q cmake.exe - - IF ERRORLEVEL 1 ( - echo CMake is required to compile PyTorch on Windows - exit /b 1 - ) -) - -IF NOT EXIST "%ProgramFiles(x86)%\Microsoft Visual Studio\Installer\vswhere.exe" ( - echo Visual Studio 2017 C++ BuildTools is required to compile PyTorch on Windows - exit /b 1 -) - -for /f "usebackq tokens=*" %%i in (`"%ProgramFiles(x86)%\Microsoft Visual Studio\Installer\vswhere.exe" -legacy -products * -version [15^,16^) -property installationPath`) do ( - if exist "%%i" if exist "%%i\VC\Auxiliary\Build\vcvarsall.bat" ( - set "VS15INSTALLDIR=%%i" - set "VS15VCVARSALL=%%i\VC\Auxiliary\Build\vcvarsall.bat" - goto vswhere - ) -) - -:vswhere -IF "%VS15VCVARSALL%"=="" ( - echo Visual Studio 2017 C++ BuildTools is required to compile PyTorch on Windows - exit /b 1 -) - -set MSSdk=1 -set DISTUTILS_USE_SDK=1 - -where /q python.exe - -IF ERRORLEVEL 1 ( - echo Python x64 3.5 or up is required to compile PyTorch on Windows - exit /b 1 -) - -for /F "usebackq delims=" %%i in (`python -c "import sys; print('{0[0]}{0[1]}'.format(sys.version_info))"`) do ( - set /a PYVER=%%i -) - -if %PYVER% LSS 35 ( - echo Warning: PyTorch for Python 2 under Windows is experimental. - echo Python x64 3.5 or up is recommended to compile PyTorch on Windows - echo Maybe you can create a virual environment if you have conda installed: - echo ^> conda create -n test python=3.6 pyyaml mkl numpy - echo ^> activate test -) - -for /F "usebackq delims=" %%i in (`python -c "import struct;print( 8 * struct.calcsize('P'))"`) do ( - set /a PYSIZE=%%i -) - -if %PYSIZE% NEQ 64 ( - echo Python x64 3.5 or up is required to compile PyTorch on Windows - exit /b 1 -) diff --git a/packaging/windows/internal/check_opts.bat b/packaging/windows/internal/check_opts.bat deleted file mode 100644 index 003ad921328..00000000000 --- a/packaging/windows/internal/check_opts.bat +++ /dev/null @@ -1,33 +0,0 @@ -@echo off - -REM Check for optional components - -where /q ninja.exe - -IF NOT ERRORLEVEL 1 ( - echo Ninja found, using it to speed up builds - set CMAKE_GENERATOR=Ninja -) - -where /q clcache.exe - -IF NOT ERRORLEVEL 1 ( - echo clcache found, using it to speed up builds - set CC=clcache - set CXX=clcache -) - -where /q sccache.exe - -IF NOT ERRORLEVEL 1 ( - echo sccache found, using it to speed up builds - set CC=sccache cl - set CXX=sccache cl -) - -IF exist "%MKLProductDir%\mkl\lib\intel64_win" ( - echo MKL found, adding it to build - set "LIB=%MKLProductDir%\mkl\lib\intel64_win;%MKLProductDir%\compiler\lib\intel64_win;%LIB%"; -) - -exit /b 0 diff --git a/packaging/windows/internal/clean.bat b/packaging/windows/internal/clean.bat deleted file mode 100644 index 7489640f49a..00000000000 --- a/packaging/windows/internal/clean.bat +++ /dev/null @@ -1,5 +0,0 @@ -@echo off - -cd %MODULE_NAME% -python setup.py clean -cd .. diff --git a/packaging/windows/internal/clone.bat b/packaging/windows/internal/clone.bat deleted file mode 100644 index 4ba181fa804..00000000000 --- a/packaging/windows/internal/clone.bat +++ /dev/null @@ -1,56 +0,0 @@ -@echo off - -:: The conda and wheels jobs are seperated on Windows, so we don't need to clone again. -IF "%BUILD_VISION%" == "" ( - if exist "%NIGHTLIES_PYTORCH_ROOT%" ( - xcopy /E /Y /Q "%NIGHTLIES_PYTORCH_ROOT%" pytorch\ - cd pytorch - goto submodule - ) -) - -git clone https://github.com/%PYTORCH_REPO%/%MODULE_NAME% - -cd %MODULE_NAME% - -IF NOT "%BUILD_VISION%" == "" goto latest_end - -IF "%PYTORCH_BRANCH%" == "latest" ( goto latest_start ) else ( goto latest_end ) - -:latest_start - -if "%NIGHTLIES_DATE%" == "" ( goto date_start ) else ( goto date_end ) - -:date_start - -set "DATE_CMD=Get-Date ([System.TimeZoneInfo]::ConvertTimeFromUtc((Get-Date).ToUniversalTime(), [System.TimeZoneInfo]::FindSystemTimeZoneById('Pacific Standard Time'))) -f 'yyyy_MM_dd'" -set "DATE_COMPACT_CMD=Get-Date ([System.TimeZoneInfo]::ConvertTimeFromUtc((Get-Date).ToUniversalTime(), [System.TimeZoneInfo]::FindSystemTimeZoneById('Pacific Standard Time'))) -f 'yyyyMMdd'" - -FOR /F "delims=" %%i IN ('powershell -c "%DATE_CMD%"') DO set NIGHTLIES_DATE=%%i -FOR /F "delims=" %%i IN ('powershell -c "%DATE_COMPACT_CMD%"') DO set NIGHTLIES_DATE_COMPACT=%%i - -:date_end - -if "%NIGHTLIES_DATE_COMPACT%" == "" set NIGHTLIES_DATE_COMPACT=%NIGHTLIES_DATE:~0,4%%NIGHTLIES_DATE:~5,2%%NIGHTLIES_DATE:~8,2% - -:: Switch to the latest commit by 11:59 yesterday -echo PYTORCH_BRANCH is set to latest so I will find the last commit -echo before 0:00 midnight on %NIGHTLIES_DATE% -set git_date=%NIGHTLIES_DATE:_=-% -FOR /F "delims=" %%i IN ('git log --before %git_date% -n 1 "--pretty=%%H"') DO set last_commit=%%i -echo Setting PYTORCH_BRANCH to %last_commit% since that was the last -echo commit before %NIGHTLIES_DATE% -set PYTORCH_BRANCH=%last_commit% - -:latest_end - -IF "%PYTORCH_BRANCH%" == "" ( - set PYTORCH_BRANCH=v%TORCHVISION_BUILD_VERSION% -) -git checkout %PYTORCH_BRANCH% -IF ERRORLEVEL 1 git checkout tags/%PYTORCH_BRANCH% - -:submodule - -git submodule update --init --recursive -IF ERRORLEVEL 1 exit /b 1 diff --git a/packaging/windows/internal/copy.bat b/packaging/windows/internal/copy.bat deleted file mode 100644 index b4aa397c6c1..00000000000 --- a/packaging/windows/internal/copy.bat +++ /dev/null @@ -1,13 +0,0 @@ -copy "%CUDA_PATH%\bin\cusparse64_%CUDA_VERSION%.dll*" pytorch\torch\lib -copy "%CUDA_PATH%\bin\cublas64_%CUDA_VERSION%.dll*" pytorch\torch\lib -copy "%CUDA_PATH%\bin\cudart64_%CUDA_VERSION%.dll*" pytorch\torch\lib -copy "%CUDA_PATH%\bin\curand64_%CUDA_VERSION%.dll*" pytorch\torch\lib -copy "%CUDA_PATH%\bin\cufft64_%CUDA_VERSION%.dll*" pytorch\torch\lib -copy "%CUDA_PATH%\bin\cufftw64_%CUDA_VERSION%.dll*" pytorch\torch\lib - -copy "%CUDA_PATH%\bin\cudnn64_%CUDNN_VERSION%.dll*" pytorch\torch\lib -copy "%CUDA_PATH%\bin\nvrtc64_%CUDA_VERSION%*.dll*" pytorch\torch\lib -copy "%CUDA_PATH%\bin\nvrtc-builtins64_%CUDA_VERSION%.dll*" pytorch\torch\lib - -copy "C:\Program Files\NVIDIA Corporation\NvToolsExt\bin\x64\nvToolsExt64_1.dll*" pytorch\torch\lib -copy "%CONDA_LIB_PATH%\libiomp*5md.dll" pytorch\torch\lib diff --git a/packaging/windows/internal/copy_cpu.bat b/packaging/windows/internal/copy_cpu.bat deleted file mode 100644 index f5b9d11515f..00000000000 --- a/packaging/windows/internal/copy_cpu.bat +++ /dev/null @@ -1 +0,0 @@ -copy "%CONDA_LIB_PATH%\libiomp*5md.dll" pytorch\torch\lib diff --git a/packaging/windows/internal/cuda_install.bat b/packaging/windows/internal/cuda_install.bat deleted file mode 100644 index cdd5a9ac206..00000000000 --- a/packaging/windows/internal/cuda_install.bat +++ /dev/null @@ -1,117 +0,0 @@ -@echo on - -if "%CUDA_VERSION%" == "cpu" ( - echo Skipping for CPU builds - exit /b 0 -) - -set SRC_DIR=%~dp0\.. - -if not exist "%SRC_DIR%\temp_build" mkdir "%SRC_DIR%\temp_build" - -set /a CUDA_VER=%CUDA_VERSION% -set CUDA_VER_MAJOR=%CUDA_VERSION:~0,-1% -set CUDA_VER_MINOR=%CUDA_VERSION:~-1,1% -set CUDA_VERSION_STR=%CUDA_VER_MAJOR%.%CUDA_VER_MINOR% - -if %CUDA_VER% EQU 92 goto cuda92 -if %CUDA_VER% EQU 100 goto cuda100 -if %CUDA_VER% EQU 101 goto cuda101 - -echo CUDA %CUDA_VERSION_STR% is not supported -exit /b 1 - -:cuda92 -if not exist "%SRC_DIR%\temp_build\cuda_9.2.148_win10.exe" ( - curl -k -L https://ossci-windows.s3.amazonaws.com/win2016/cuda_9.2.148_win10.exe --output "%SRC_DIR%\temp_build\cuda_9.2.148_win10.exe" - if errorlevel 1 exit /b 1 - set "CUDA_SETUP_FILE=%SRC_DIR%\temp_build\cuda_9.2.148_win10.exe" - set "ARGS=nvcc_9.2 cuobjdump_9.2 nvprune_9.2 cupti_9.2 cublas_9.2 cublas_dev_9.2 cudart_9.2 cufft_9.2 cufft_dev_9.2 curand_9.2 curand_dev_9.2 cusolver_9.2 cusolver_dev_9.2 cusparse_9.2 cusparse_dev_9.2 nvgraph_9.2 nvgraph_dev_9.2 npp_9.2 npp_dev_9.2 nvrtc_9.2 nvrtc_dev_9.2 nvml_dev_9.2" -) - -if not exist "%SRC_DIR%\temp_build\cudnn-9.2-windows10-x64-v7.2.1.38.zip" ( - curl -k -L https://ossci-windows.s3.amazonaws.com/win2016/cudnn-9.2-windows10-x64-v7.2.1.38.zip --output "%SRC_DIR%\temp_build\cudnn-9.2-windows10-x64-v7.2.1.38.zip" - if errorlevel 1 exit /b 1 - set "CUDNN_SETUP_FILE=%SRC_DIR%\temp_build\cudnn-9.2-windows10-x64-v7.2.1.38.zip" -) - -goto cuda_common - -:cuda100 - -if not exist "%SRC_DIR%\temp_build\cuda_10.0.130_411.31_win10.exe" ( - curl -k -L https://ossci-windows.s3.amazonaws.com/win2016/cuda_10.0.130_411.31_win10.exe --output "%SRC_DIR%\temp_build\cuda_10.0.130_411.31_win10.exe" - if errorlevel 1 exit /b 1 - set "CUDA_SETUP_FILE=%SRC_DIR%\temp_build\cuda_10.0.130_411.31_win10.exe" - set "ARGS=nvcc_10.0 cuobjdump_10.0 nvprune_10.0 cupti_10.0 cublas_10.0 cublas_dev_10.0 cudart_10.0 cufft_10.0 cufft_dev_10.0 curand_10.0 curand_dev_10.0 cusolver_10.0 cusolver_dev_10.0 cusparse_10.0 cusparse_dev_10.0 nvgraph_10.0 nvgraph_dev_10.0 npp_10.0 npp_dev_10.0 nvrtc_10.0 nvrtc_dev_10.0 nvml_dev_10.0" -) - -if not exist "%SRC_DIR%\temp_build\cudnn-10.0-windows10-x64-v7.4.1.5.zip" ( - curl -k -L https://ossci-windows.s3.amazonaws.com/win2016/cudnn-10.0-windows10-x64-v7.4.1.5.zip --output "%SRC_DIR%\temp_build\cudnn-10.0-windows10-x64-v7.4.1.5.zip" - if errorlevel 1 exit /b 1 - set "CUDNN_SETUP_FILE=%SRC_DIR%\temp_build\cudnn-10.0-windows10-x64-v7.4.1.5.zip" -) - -goto cuda_common - -:cuda101 - -if not exist "%SRC_DIR%\temp_build\cuda_10.1.243_426.00_win10.exe" ( - curl -k -L https://ossci-windows.s3.amazonaws.com/cuda_10.1.243_426.00_win10.exe --output "%SRC_DIR%\temp_build\cuda_10.1.243_426.00_win10.exe" - if errorlevel 1 exit /b 1 - set "CUDA_SETUP_FILE=%SRC_DIR%\temp_build\cuda_10.1.243_426.00_win10.exe" - set "ARGS=nvcc_10.1 cuobjdump_10.1 nvprune_10.1 cupti_10.1 cublas_10.1 cublas_dev_10.1 cudart_10.1 cufft_10.1 cufft_dev_10.1 curand_10.1 curand_dev_10.1 cusolver_10.1 cusolver_dev_10.1 cusparse_10.1 cusparse_dev_10.1 nvgraph_10.1 nvgraph_dev_10.1 npp_10.1 npp_dev_10.1 nvrtc_10.1 nvrtc_dev_10.1 nvml_dev_10.1" -) - -if not exist "%SRC_DIR%\temp_build\cudnn-10.1-windows10-x64-v7.6.4.38.zip" ( - curl -k -L https://ossci-windows.s3.amazonaws.com/cudnn-10.1-windows10-x64-v7.6.4.38.zip --output "%SRC_DIR%\temp_build\cudnn-10.1-windows10-x64-v7.6.4.38.zip" - if errorlevel 1 exit /b 1 - set "CUDNN_SETUP_FILE=%SRC_DIR%\temp_build\cudnn-10.1-windows10-x64-v7.6.4.38.zip" -) - -goto cuda_common - -:cuda_common - -if not exist "%SRC_DIR%\temp_build\NvToolsExt.7z" ( - curl -k -L https://www.dropbox.com/s/9mcolalfdj4n979/NvToolsExt.7z?dl=1 --output "%SRC_DIR%\temp_build\NvToolsExt.7z" - if errorlevel 1 exit /b 1 -) - -echo Installing CUDA toolkit... -7z x %CUDA_SETUP_FILE% -o"%SRC_DIR%\temp_build\cuda" -pushd "%SRC_DIR%\temp_build\cuda" -start /wait setup.exe -s %ARGS% -popd - -echo Installing VS integration... -xcopy /Y "%SRC_DIR%\temp_build\cuda\CUDAVisualStudioIntegration\extras\visual_studio_integration\MSBuildExtensions\*.*" "C:\Program Files (x86)\Microsoft Visual Studio\2017\BuildTools\Common7\IDE\VC\VCTargets\BuildCustomizations" - -echo Installing NvToolsExt... -7z x %SRC_DIR%\temp_build\NvToolsExt.7z -o"%SRC_DIR%\temp_build\NvToolsExt" -mkdir "%ProgramFiles%\NVIDIA Corporation\NvToolsExt\bin\x64" -mkdir "%ProgramFiles%\NVIDIA Corporation\NvToolsExt\include" -mkdir "%ProgramFiles%\NVIDIA Corporation\NvToolsExt\lib\x64" -xcopy /Y "%SRC_DIR%\temp_build\NvToolsExt\bin\x64\*.*" "%ProgramFiles%\NVIDIA Corporation\NvToolsExt\bin\x64" -xcopy /Y "%SRC_DIR%\temp_build\NvToolsExt\include\*.*" "%ProgramFiles%\NVIDIA Corporation\NvToolsExt\include" -xcopy /Y "%SRC_DIR%\temp_build\NvToolsExt\lib\x64\*.*" "%ProgramFiles%\NVIDIA Corporation\NvToolsExt\lib\x64" - -echo Setting up environment... -set "PATH=%ProgramFiles%\NVIDIA GPU Computing Toolkit\CUDA\v%CUDA_VERSION_STR%\bin;%ProgramFiles%\NVIDIA GPU Computing Toolkit\CUDA\v%CUDA_VERSION_STR%\libnvvp;%PATH%" -set "CUDA_PATH=%ProgramFiles%\NVIDIA GPU Computing Toolkit\CUDA\v%CUDA_VERSION_STR%" -set "CUDA_PATH_V%CUDA_VER_MAJOR%_%CUDA_VER_MINOR%=%ProgramFiles%\NVIDIA GPU Computing Toolkit\CUDA\v%CUDA_VERSION_STR%" -set "NVTOOLSEXT_PATH=%ProgramFiles%\NVIDIA Corporation\NvToolsExt\bin\x64" - -if not exist "%ProgramFiles%\NVIDIA GPU Computing Toolkit\CUDA\v%CUDA_VERSION_STR%\bin\nvcc.exe" ( - echo CUDA %CUDA_VERSION_STR% installed failed. - exit /b 1 -) - -echo Installing cuDNN... -7z x %CUDNN_SETUP_FILE% -o"%SRC_DIR%\temp_build\cudnn" -xcopy /Y "%SRC_DIR%\temp_build\cudnn\cuda\bin\*.*" "%ProgramFiles%\NVIDIA GPU Computing Toolkit\CUDA\v%CUDA_VERSION_STR%\bin" -xcopy /Y "%SRC_DIR%\temp_build\cudnn\cuda\lib\x64\*.*" "%ProgramFiles%\NVIDIA GPU Computing Toolkit\CUDA\v%CUDA_VERSION_STR%\lib\x64" -xcopy /Y "%SRC_DIR%\temp_build\cudnn\cuda\include\*.*" "%ProgramFiles%\NVIDIA GPU Computing Toolkit\CUDA\v%CUDA_VERSION_STR%\include" - -echo Cleaning temp files -rd /s /q "%SRC_DIR%\temp_build" || ver > nul diff --git a/packaging/windows/internal/dep_install.bat b/packaging/windows/internal/dep_install.bat deleted file mode 100644 index db665a99f26..00000000000 --- a/packaging/windows/internal/dep_install.bat +++ /dev/null @@ -1,14 +0,0 @@ -@echo off - -REM curl -k https://www.7-zip.org/a/7z1805-x64.exe -O -REM if errorlevel 1 exit /b 1 - -REM start /wait 7z1805-x64.exe /S -REM if errorlevel 1 exit /b 1 - -REM set "PATH=%ProgramFiles%\7-Zip;%PATH%" - -choco feature disable --name showDownloadProgress -choco feature enable --name allowGlobalConfirmation - -choco install curl 7zip diff --git a/packaging/windows/internal/env_fix.bat b/packaging/windows/internal/env_fix.bat deleted file mode 100644 index dd0aaf5f2d5..00000000000 --- a/packaging/windows/internal/env_fix.bat +++ /dev/null @@ -1,31 +0,0 @@ -@echo off - -:: Caution: Please don't use this script locally -:: It may destroy your build environment. - -setlocal - -IF NOT EXIST "%ProgramFiles(x86)%\Microsoft Visual Studio\Installer\vswhere.exe" ( - echo Visual Studio 2017 C++ BuildTools is required to compile PyTorch on Windows - exit /b 1 -) - -for /f "usebackq tokens=*" %%i in (`"%ProgramFiles(x86)%\Microsoft Visual Studio\Installer\vswhere.exe" -legacy -products * -version [15^,16^) -property installationPath`) do ( - if exist "%%i" if exist "%%i\VC\Auxiliary\Build\vcvarsall.bat" ( - set "VS15INSTALLDIR=%%i" - set "VS15VCVARSALL=%%i\VC\Auxiliary\Build\vcvarsall.bat" - goto vswhere - ) -) - -:vswhere - -IF "%VS15VCVARSALL%"=="" ( - echo Visual Studio 2017 C++ BuildTools is required to compile PyTorch on Windows - exit /b 1 -) - -call "%VS15VCVARSALL%" x86_amd64 -for /f "usebackq tokens=*" %%i in (`where link.exe`) do move "%%i" "%%i.bak" - -endlocal diff --git a/packaging/windows/internal/nightly_defaults.bat b/packaging/windows/internal/nightly_defaults.bat deleted file mode 100644 index 1bba23209b1..00000000000 --- a/packaging/windows/internal/nightly_defaults.bat +++ /dev/null @@ -1,200 +0,0 @@ -@echo on - -if "%~1"=="" goto arg_error -if NOT "%~2"=="" goto arg_error -goto arg_end - -:arg_error - -echo Illegal number of parameters. Pass packge type `Conda` or `Wheels`. -exit /b 1 - -:arg_end - -echo "nightly_defaults.bat at %CD% starting at %DATE%" - -set SRC_DIR=%~dp0\.. - -:: NIGHTLIES_FOLDER -:: N.B. this is also defined in cron_start.sh -:: An arbitrary root folder to store all nightlies folders, each of which is a -:: parent level date folder with separate subdirs for logs, wheels, conda -:: packages, etc. This should be kept the same across all scripts called in a -:: cron job, so it only has a default value in the top-most script -:: build_cron.sh to avoid the default values from diverging. -if "%NIGHTLIES_FOLDER%" == "" set "NIGHTLIES_FOLDER=%SRC_DIR%" - -:: NIGHTLIES_DATE -:: N.B. this is also defined in cron_start.sh -:: The date in YYYY_mm_dd format that we are building for. If this is not -:: already set, then this will first try to find the date of the nightlies -:: folder that this builder repo exists in; e.g. if this script exists in -:: some_dir/2019_09_04/builder/cron/ then this will be set to 2019_09_04 (must -:: match YYYY_mm_dd). This is for convenience when debugging/uploading past -:: dates, so that you don't have to set NIGHTLIES_DATE yourself. If a date -:: folder cannot be found in that exact location, then this will default to -:: the current date. - - -if "%NIGHTLIES_DATE%" == "" ( goto date_start ) else ( goto date_end ) - -:date_start - -set "DATE_CMD=Get-Date ([System.TimeZoneInfo]::ConvertTimeFromUtc((Get-Date).ToUniversalTime(), [System.TimeZoneInfo]::FindSystemTimeZoneById('Pacific Standard Time'))) -f 'yyyy_MM_dd'" -set "DATE_COMPACT_CMD=Get-Date ([System.TimeZoneInfo]::ConvertTimeFromUtc((Get-Date).ToUniversalTime(), [System.TimeZoneInfo]::FindSystemTimeZoneById('Pacific Standard Time'))) -f 'yyyyMMdd'" - -FOR /F "delims=" %%i IN ('powershell -c "%DATE_CMD%"') DO set NIGHTLIES_DATE=%%i -FOR /F "delims=" %%i IN ('powershell -c "%DATE_COMPACT_CMD%"') DO set NIGHTLIES_DATE_COMPACT=%%i - -:date_end - -if "%NIGHTLIES_DATE_COMPACT%" == "" set NIGHTLIES_DATE_COMPACT=%NIGHTLIES_DATE:~0,4%%NIGHTLIES_DATE:~5,2%%NIGHTLIES_DATE:~8,2% - -:: Used in lots of places as the root dir to store all conda/wheel/manywheel -:: packages as well as logs for the day -set today=%NIGHTLIES_FOLDER%\%NIGHTLIES_DATE% -mkdir "%today%" || ver >nul - - -::############################################################################# -:: Add new configuration variables below this line. 'today' should always be -:: defined ASAP to avoid weird errors -::############################################################################# - - -:: List of people to email when things go wrong. This is passed directly to -:: `mail -t` -:: TODO: Not supported yet -if "%NIGHTLIES_EMAIL_LIST%" == "" set NIGHTLIES_EMAIL_LIST=peterghost86@gmail.com - -:: PYTORCH_CREDENTIALS_FILE -:: A bash file that exports credentials needed to upload to aws and anaconda. -:: Needed variables are PYTORCH_ANACONDA_USERNAME, PYTORCH_ANACONDA_PASSWORD, -:: AWS_ACCESS_KEY_ID, and AWS_SECRET_ACCESS_KEY. Or it can just export the AWS -:: keys and then prepend a logged-in conda installation to the path. -:: TODO: Not supported yet -if "%PYTORCH_CREDENTIALS_FILE%" == "" set PYTORCH_CREDENTIALS_FILE=/c/Users/administrator/nightlies/credentials.sh - -:: Location of the temporary miniconda that is downloaded to install conda-build -:: and aws to upload finished packages TODO this is messy to install this in -:: upload.sh and later use it in upload_logs.sh -if "%CONDA_UPLOADER_INSTALLATION%" == "" set "CONDA_UPLOADER_INSTALLATION=%today%\miniconda" - -:: N.B. BUILDER_REPO and BUILDER_BRANCH are both set in cron_start.sh, as that -:: is the script that actually clones the builder repo that /this/ script is -:: running from. -pushd "%SRC_DIR%\.." -set NIGHTLIES_BUILDER_ROOT=%CD% -popd - -:: The shared pytorch repo to be used by all builds -if "%NIGHTLIES_PYTORCH_ROOT%" == "" set "NIGHTLIES_PYTORCH_ROOT=%today%\vision" - -:: PYTORCH_REPO -:: The Github org/user whose fork of Pytorch to check out (git clone -:: https://github.com//pytorch.git). This will always be cloned -:: fresh to build with. Default is 'pytorch' -if "%PYTORCH_REPO%" == "" set PYTORCH_REPO=pytorch - -:: PYTORCH_BRANCH -:: The branch of Pytorch to checkout for building (git checkout ). -:: This can either be the name of the branch (e.g. git checkout -:: my_branch_name) or can be a git commit (git checkout 4b2674n...). Default -:: is 'latest', which is a special term that signals to pull the last commit -:: before 0:00 midnight on the NIGHTLIES_DATE -if "%PYTORCH_BRANCH%" == "" set PYTORCH_BRANCH=latest - -:: Clone the requested pytorch checkout -if exist "%NIGHTLIES_PYTORCH_ROOT%" ( goto clone_end ) else ( goto clone_start ) - -:clone_start - -git clone --recursive "https://github.com/%PYTORCH_REPO%/vision.git" "%NIGHTLIES_PYTORCH_ROOT%" -pushd "%NIGHTLIES_PYTORCH_ROOT%" - -if "%PYTORCH_BRANCH%" == "latest" ( goto latest_start ) else ( goto latest_end ) - -:latest_start - -:: Switch to the latest commit by 11:59 yesterday -echo PYTORCH_BRANCH is set to latest so I will find the last commit -echo before 0:00 midnight on %NIGHTLIES_DATE% -set git_date=%NIGHTLIES_DATE:_=-% -FOR /F "delims=" %%i IN ('git log --before %git_date% -n 1 "--pretty=%%H"') DO set last_commit=%%i -echo Setting PYTORCH_BRANCH to %last_commit% since that was the last -echo commit before %NIGHTLIES_DATE% -set PYTORCH_BRANCH=%last_commit% - -:latest_end - -git checkout "%PYTORCH_BRANCH%" -git submodule update -popd - -:clone_end - -if "%CUDA_VERSION%" == "cpu" ( - set _DESIRED_CUDA=cpu -) else ( - set _DESIRED_CUDA=cu%CUDA_VERSION% -) - -:: PYTORCH_BUILD_VERSION -:: The actual version string. Used in conda like -:: pytorch-nightly==1.0.0.dev20180908 -:: or in manylinux like -:: torch_nightly-1.0.0.dev20180908-cp27-cp27m-linux_x86_64.whl -if "%TORCHVISION_BUILD_VERSION%" == "" set TORCHVISION_BUILD_VERSION=0.5.0.dev%NIGHTLIES_DATE_COMPACT% - -if "%~1" == "Wheels" ( - if not "%CUDA_VERSION%" == "101" ( - set TORCHVISION_BUILD_VERSION=%TORCHVISION_BUILD_VERSION%+%_DESIRED_CUDA% - ) -) - -:: PYTORCH_BUILD_NUMBER -:: This is usually the number 1. If more than one build is uploaded for the -:: same version/date, then this can be incremented to 2,3 etc in which case -:: '.post2' will be appended to the version string of the package. This can -:: be set to '0' only if OVERRIDE_PACKAGE_VERSION is being used to bypass -:: all the version string logic in downstream scripts. Since we use the -:: override below, exporting this shouldn't actually matter. -if "%TORCHVISION_BUILD_NUMBER%" == "" set /a TORCHVISION_BUILD_NUMBER=1 -if %TORCHVISION_BUILD_NUMBER% GTR 1 set TORCHVISION_BUILD_VERSION=%TORCHVISION_BUILD_VERSION%%TORCHVISION_BUILD_NUMBER% - -:: The nightly builds use their own versioning logic, so we override whatever -:: logic is in setup.py or other scripts -:: TODO: Not supported yet -set OVERRIDE_PACKAGE_VERSION=%TORCHVISION_BUILD_VERSION% -set BUILD_VERSION=%TORCHVISION_BUILD_VERSION% - -:: Build folder for conda builds to use -if "%TORCH_CONDA_BUILD_FOLDER%" == "" set TORCH_CONDA_BUILD_FOLDER=torchvision - -:: TORCH_PACKAGE_NAME -:: The name of the package to upload. This should probably be pytorch or -:: pytorch-nightly. N.B. that pip will change all '-' to '_' but conda will -:: not. This is dealt with in downstream scripts. -:: TODO: Not supported yet -if "%TORCH_PACKAGE_NAME%" == "" set TORCH_PACKAGE_NAME=torchvision - -:: PIP_UPLOAD_FOLDER should end in a slash. This is to handle it being empty -:: (when uploading to e.g. whl/cpu/) and also to handle nightlies (when -:: uploading to e.g. /whl/nightly/cpu) -:: TODO: Not supported yet -if "%PIP_UPLOAD_FOLDER%" == "" set "PIP_UPLOAD_FOLDER=nightly\" - -:: The location of the binary_sizes dir in s3 is hardcoded into -:: upload_binary_sizes.sh - -:: DAYS_TO_KEEP -:: How many days to keep around for clean.sh. Build folders older than this -:: will be purged at the end of cron jobs. '1' means to keep only the current -:: day. Values less than 1 are not allowed. The default is 5. -:: TODO: Not supported yet -if "%DAYS_TO_KEEP%" == "" set /a DAYS_TO_KEEP=5 -if %DAYS_TO_KEEP% LSS 1 ( - echo DAYS_TO_KEEP cannot be less than 1. - echo A value of 1 means to only keep the build for today - exit /b 1 -) diff --git a/packaging/windows/internal/publish.bat b/packaging/windows/internal/publish.bat deleted file mode 100644 index 7f118bbb6e3..00000000000 --- a/packaging/windows/internal/publish.bat +++ /dev/null @@ -1,89 +0,0 @@ -@echo off - -set SRC_DIR=%~dp0 -pushd %SRC_DIR% - -if NOT "%CUDA_VERSION%" == "cpu" ( - set PACKAGE_SUFFIX=_cuda%CUDA_VERSION% -) else ( - set PACKAGE_SUFFIX= -) - -if "%PACKAGEFULLNAME%" == "Conda" ( - set PACKAGE=conda -) else ( - set PACKAGE=wheels -) - -if not defined PACKAGE_SUFFIX ( - set PUBLISH_BRANCH=vision_%PACKAGE%_%DESIRED_PYTHON% -) else ( - set PUBLISH_BRANCH=vision_%PACKAGE%_%DESIRED_PYTHON%%PACKAGE_SUFFIX% -) - -git clone %ARTIFACT_REPO_URL% -b %PUBLISH_BRANCH% --single-branch >nul 2>&1 - -IF ERRORLEVEL 1 ( - echo Branch %PUBLISH_BRANCH% not exist, falling back to master - set NO_BRANCH=1 - git clone %ARTIFACT_REPO_URL% -b master --single-branch >nul 2>&1 -) - -IF ERRORLEVEL 1 ( - echo Clone failed - goto err -) - -cd pytorch_builder -attrib -s -h -r . /s /d - -:: Empty repo -rd /s /q . || ver >nul - -IF NOT EXIST %PACKAGE% mkdir %PACKAGE% - -xcopy /S /E /Y ..\..\output\*.* %PACKAGE%\ - -git config --global user.name "Azure DevOps" -git config --global user.email peterghost86@gmail.com -git init -git checkout --orphan %PUBLISH_BRANCH% -git remote add origin %ARTIFACT_REPO_URL% -git add . -git commit -m "Update artifacts" - -:push - -if "%RETRY_TIMES%" == "" ( - set /a RETRY_TIMES=10 - set /a SLEEP_TIME=2 -) else ( - set /a RETRY_TIMES=%RETRY_TIMES%-1 - set /a SLEEP_TIME=%SLEEP_TIME%*2 -) - -git push origin %PUBLISH_BRANCH% -f > nul 2>&1 - -IF ERRORLEVEL 1 ( - echo Git push retry times remaining: %RETRY_TIMES% - echo Sleep time: %SLEEP_TIME% seconds - IF %RETRY_TIMES% EQU 0 ( - echo Push failed - goto err - ) - waitfor SomethingThatIsNeverHappening /t %SLEEP_TIME% 2>nul || ver >nul - goto push -) ELSE ( - set RETRY_TIMES= - set SLEEP_TIME= -) - -popd - -exit /b 0 - -:err - -popd - -exit /b 1 diff --git a/packaging/windows/internal/setup.bat b/packaging/windows/internal/setup.bat deleted file mode 100644 index d18dfb35023..00000000000 --- a/packaging/windows/internal/setup.bat +++ /dev/null @@ -1,44 +0,0 @@ -@echo off - -echo The flags after configuring: -echo NO_CUDA=%NO_CUDA% -echo CMAKE_GENERATOR=%CMAKE_GENERATOR% -if "%NO_CUDA%"=="" echo CUDA_PATH=%CUDA_PATH% -if NOT "%CC%"=="" echo CC=%CC% -if NOT "%CXX%"=="" echo CXX=%CXX% -if NOT "%DISTUTILS_USE_SDK%"=="" echo DISTUTILS_USE_SDK=%DISTUTILS_USE_SDK% - -set SRC_DIR=%~dp0\.. - -IF "%VSDEVCMD_ARGS%" == "" ( - call "%VS15VCVARSALL%" x64 -) ELSE ( - call "%VS15VCVARSALL%" x64 %VSDEVCMD_ARGS% -) - -pushd %SRC_DIR% - -IF NOT exist "setup.py" ( - cd %MODULE_NAME% -) - -if "%CXX%"=="sccache cl" ( - sccache --stop-server - sccache --start-server - sccache --zero-stats -) - -:pytorch -:: This stores in e.g. D:/_work/1/s/windows/output/cpu -pip wheel -e . --no-deps --wheel-dir ../output/%CUDA_PREFIX% - -:build_end -IF ERRORLEVEL 1 exit /b 1 -IF NOT ERRORLEVEL 0 exit /b 1 - -if "%CXX%"=="sccache cl" ( - taskkill /im sccache.exe /f /t || ver > nul - taskkill /im nvcc.exe /f /t || ver > nul -) - -cd .. diff --git a/packaging/windows/internal/test.bat b/packaging/windows/internal/test.bat deleted file mode 100644 index a87fc1a2858..00000000000 --- a/packaging/windows/internal/test.bat +++ /dev/null @@ -1,79 +0,0 @@ -@echo off - -set SRC_DIR=%~dp0\.. -pushd %SRC_DIR% - -set PYTHON_VERSION=%PYTHON_PREFIX:py=cp% - -if "%BUILD_VISION%" == "" ( - pip install future pytest coverage hypothesis protobuf -) ELSE ( - pip install future pytest "pillow>=4.1.1" mock -) - -for /F "delims=" %%i in ('where /R %SRC_DIR%\output\%CUDA_PREFIX% *%MODULE_NAME%*%PYTHON_VERSION%*.whl') do pip install "%%i" - -if ERRORLEVEL 1 exit /b 1 - -if NOT "%BUILD_VISION%" == "" ( - echo Smoke testing imports - python -c "import torchvision" - if ERRORLEVEL 1 exit /b 1 - goto smoke_test_end -) - -echo Smoke testing imports -python -c "import torch" -if ERRORLEVEL 1 exit /b 1 - -python -c "from caffe2.python import core" -if ERRORLEVEL 1 exit /b 1 - -echo Checking that MKL is available -python -c "import torch; exit(0 if torch.backends.mkl.is_available() else 1)" -if ERRORLEVEL 1 exit /b 1 - -setlocal EnableDelayedExpansion -set NVIDIA_GPU_EXISTS=0 -for /F "delims=" %%i in ('wmic path win32_VideoController get name') do ( - set GPUS=%%i - if not "x!GPUS:NVIDIA=!" == "x!GPUS!" ( - SET NVIDIA_GPU_EXISTS=1 - goto gpu_check_end - ) -) -:gpu_check_end -endlocal & set NVIDIA_GPU_EXISTS=%NVIDIA_GPU_EXISTS% - -if NOT "%CUDA_PREFIX%" == "cpu" if "%NVIDIA_GPU_EXISTS%" == "1" ( - echo Checking that CUDA archs are setup correctly - python -c "import torch; torch.randn([3,5]).cuda()" - if ERRORLEVEL 1 exit /b 1 - - echo Checking that magma is available - python -c "import torch; torch.rand(1).cuda(); exit(0 if torch.cuda.has_magma else 1)" - if ERRORLEVEL 1 exit /b 1 - - echo Checking that CuDNN is available - python -c "import torch; exit(0 if torch.backends.cudnn.is_available() else 1)" - if ERRORLEVEL 1 exit /b 1 -) -:smoke_test_end - -echo Not running unit tests. Hopefully these problems are caught by CI -goto test_end - -if "%BUILD_VISION%" == "" ( - cd pytorch\test - python run_test.py -v -) else ( - cd vision - pytest . -) - -if ERRORLEVEL 1 exit /b 1 - -:test_end - -popd -exit /b 0 diff --git a/packaging/windows/internal/upload.bat b/packaging/windows/internal/upload.bat deleted file mode 100644 index a23391a2935..00000000000 --- a/packaging/windows/internal/upload.bat +++ /dev/null @@ -1,96 +0,0 @@ -@echo off - -IF "%CONDA_UPLOADER_INSTALLATION%" == "" goto precheck_fail -IF "%PYTORCH_FINAL_PACKAGE_DIR%" == "" goto precheck_fail -IF "%today%" == "" goto precheck_fail -IF "%PYTORCH_ANACONDA_USERNAME%" == "" goto precheck_fail -IF "%PYTORCH_ANACONDA_PASSWORD%" == "" goto precheck_fail - -goto precheck_pass - -:precheck_fail - -echo Please run nightly_defaults.bat first. -echo And remember to set `PYTORCH_FINAL_PACKAGE_DIR` -echo Finally, don't forget to set anaconda tokens -exit /b 1 - -:precheck_pass - -pushd %today% - -:: Install anaconda client -set "CONDA_HOME=%CONDA_UPLOADER_INSTALLATION%" -set "tmp_conda=%CONDA_HOME%" -set "miniconda_exe=%CD%\miniconda.exe" -rmdir /s /q "%CONDA_HOME%" -del miniconda.exe -curl -k https://repo.continuum.io/miniconda/Miniconda3-latest-Windows-x86_64.exe -o "%miniconda_exe%" -popd - -IF ERRORLEVEL 1 ( - echo Conda download failed - exit /b 1 -) - -call %~dp0\..\..\conda\install_conda.bat - -IF ERRORLEVEL 1 ( - echo Conda installation failed - exit /b 1 -) - -set "ORIG_PATH=%PATH%" -set "PATH=%CONDA_HOME%;%CONDA_HOME%\scripts;%CONDA_HOME%\Library\bin;%PATH%" - -REM conda install -y anaconda-client -pip install git+https://github.com/peterjc123/anaconda-client.git@log_more_meaningfull_errors -IF ERRORLEVEL 1 ( - echo Anaconda client installation failed - exit /b 1 -) - -set PYTORCH_FINAL_PACKAGE= -:: Upload all the packages under `PYTORCH_FINAL_PACKAGE_DIR` -FOR /F "delims=" %%i IN ('where /R %PYTORCH_FINAL_PACKAGE_DIR% *vision*.tar.bz2') DO ( - set "PYTORCH_FINAL_PACKAGE=%%i" -) - -IF "%PYTORCH_FINAL_PACKAGE%" == "" ( - echo No package to upload - exit /b 0 -) - -:upload - -if "%RETRY_TIMES%" == "" ( - set /a RETRY_TIMES=10 - set /a SLEEP_TIME=2 -) else ( - set /a RETRY_TIMES=%RETRY_TIMES%-1 - set /a SLEEP_TIME=%SLEEP_TIME%*2 -) - -REM bash -c "yes | anaconda login --username "%PYTORCH_ANACONDA_USERNAME%" --password "%PYTORCH_ANACONDA_PASSWORD%"" -anaconda login --username "%PYTORCH_ANACONDA_USERNAME%" --password "%PYTORCH_ANACONDA_PASSWORD%" -IF ERRORLEVEL 1 ( - echo Anaconda client login failed - exit /b 1 -) - -echo Uploading %PYTORCH_FINAL_PACKAGE% to Anaconda Cloud -anaconda upload "%PYTORCH_FINAL_PACKAGE%" -u pytorch-nightly --label main --force --no-progress - -IF ERRORLEVEL 1 ( - echo Anaconda upload retry times remaining: %RETRY_TIMES% - echo Sleep time: %SLEEP_TIME% seconds - IF %RETRY_TIMES% EQU 0 ( - echo Upload failed - exit /b 1 - ) - waitfor SomethingThatIsNeverHappening /t %SLEEP_TIME% 2>nul || ver >nul - goto upload -) ELSE ( - set RETRY_TIMES= - set SLEEP_TIME= -) diff --git a/packaging/windows/internal/vc_env_helper.bat b/packaging/windows/internal/vc_env_helper.bat new file mode 100644 index 00000000000..699876beb8a --- /dev/null +++ b/packaging/windows/internal/vc_env_helper.bat @@ -0,0 +1,49 @@ +@echo on + +set VC_VERSION_LOWER=17 +set VC_VERSION_UPPER=18 +if "%VC_YEAR%" == "2019" ( + set VC_VERSION_LOWER=16 + set VC_VERSION_UPPER=17 +) +if "%VC_YEAR%" == "2017" ( + set VC_VERSION_LOWER=15 + set VC_VERSION_UPPER=16 +) + +for /f "usebackq tokens=*" %%i in (`"%ProgramFiles(x86)%\Microsoft Visual Studio\Installer\vswhere.exe" -legacy -products * -version [%VC_VERSION_LOWER%^,%VC_VERSION_UPPER%^) -property installationPath`) do ( + if exist "%%i" if exist "%%i\VC\Auxiliary\Build\vcvarsall.bat" ( + set "VS15INSTALLDIR=%%i" + set "VS15VCVARSALL=%%i\VC\Auxiliary\Build\vcvarsall.bat" + goto vswhere + ) +) + +:vswhere +if "%VSDEVCMD_ARGS%" == "" ( + call "%VS15VCVARSALL%" x64 || exit /b 1 +) else ( + call "%VS15VCVARSALL%" x64 %VSDEVCMD_ARGS% || exit /b 1 +) + +@echo on + +if "%CU_VERSION%" == "xpu" call "C:\Program Files (x86)\Intel\oneAPI\setvars.bat" + +set DISTUTILS_USE_SDK=1 + +set args=%1 +shift +:start +if [%1] == [] goto done +set args=%args% %1 +shift +goto start + +:done +if "%args%" == "" ( + echo Usage: vc_env_helper.bat [command] [args] + echo e.g. vc_env_helper.bat cl /c test.cpp +) + +%args% || exit /b 1 diff --git a/packaging/windows/internal/vs_install.bat b/packaging/windows/internal/vs_install.bat deleted file mode 100644 index e6589092372..00000000000 --- a/packaging/windows/internal/vs_install.bat +++ /dev/null @@ -1,28 +0,0 @@ -@echo off - -set VS_DOWNLOAD_LINK=https://aka.ms/vs/15/release/vs_buildtools.exe -REM IF "%VS_LATEST%" == "1" ( -REM set VS_INSTALL_ARGS= --nocache --norestart --quiet --wait --add Microsoft.VisualStudio.Workload.VCTools -REM set VSDEVCMD_ARGS= -REM ) ELSE ( -set VS_INSTALL_ARGS=--nocache --quiet --wait --add Microsoft.VisualStudio.Workload.VCTools ^ - --add Microsoft.VisualStudio.Component.VC.Tools.14.11 ^ - --add Microsoft.Component.MSBuild ^ - --add Microsoft.VisualStudio.Component.Roslyn.Compiler ^ - --add Microsoft.VisualStudio.Component.TextTemplating ^ - --add Microsoft.VisualStudio.Component.VC.CoreIde ^ - --add Microsoft.VisualStudio.Component.VC.Redist.14.Latest ^ - --add Microsoft.VisualStudio.ComponentGroup.NativeDesktop.Core ^ - --add Microsoft.VisualStudio.Component.VC.Tools.x86.x64 ^ - --add Microsoft.VisualStudio.Component.VC.Tools.14.11 ^ - --add Microsoft.VisualStudio.ComponentGroup.NativeDesktop.Win81 -set VSDEVCMD_ARGS=-vcvars_ver=14.11 -REM ) - -curl -k -L %VS_DOWNLOAD_LINK% --output vs_installer.exe -if errorlevel 1 exit /b 1 - -start /wait .\vs_installer.exe %VS_INSTALL_ARGS% -if not errorlevel 0 exit /b 1 -if errorlevel 1 if not errorlevel 3010 exit /b 1 -if errorlevel 3011 exit /b 1 diff --git a/packaging/windows/old/cuda100.bat b/packaging/windows/old/cuda100.bat deleted file mode 100644 index ac9be3c6907..00000000000 --- a/packaging/windows/old/cuda100.bat +++ /dev/null @@ -1,59 +0,0 @@ -@echo off - -IF NOT "%BUILD_VISION%" == "" ( - set MODULE_NAME=vision -) ELSE ( - set MODULE_NAME=pytorch -) - -IF NOT EXIST "setup.py" IF NOT EXIST "%MODULE_NAME%" ( - call internal\clone.bat - cd .. - IF ERRORLEVEL 1 goto eof -) ELSE ( - call internal\clean.bat -) - -call internal\check_deps.bat -IF ERRORLEVEL 1 goto eof - -REM Check for optional components - -set NO_CUDA= -set CMAKE_GENERATOR=Visual Studio 15 2017 Win64 - -IF "%NVTOOLSEXT_PATH%"=="" ( - echo NVTX ^(Visual Studio Extension ^for CUDA^) ^not installed, failing - exit /b 1 - goto optcheck -) - -IF "%CUDA_PATH_V10_0%"=="" ( - echo CUDA 10.0 not found, failing - exit /b 1 -) ELSE ( - IF "%BUILD_VISION%" == "" ( - set TORCH_CUDA_ARCH_LIST=3.5;5.0+PTX;6.0;6.1;7.0;7.5 - set TORCH_NVCC_FLAGS=-Xfatbin -compress-all - ) ELSE ( - set NVCC_FLAGS=-D__CUDA_NO_HALF_OPERATORS__ --expt-relaxed-constexpr -gencode=arch=compute_35,code=sm_35 -gencode=arch=compute_50,code=sm_50 -gencode=arch=compute_60,code=sm_60 -gencode=arch=compute_70,code=sm_70 -gencode=arch=compute_75,code=sm_75 -gencode=arch=compute_50,code=compute_50 - ) - - set "CUDA_PATH=%CUDA_PATH_V10_0%" - set "PATH=%CUDA_PATH_V10_0%\bin;%PATH%" -) - -:optcheck - -IF "%BUILD_VISION%" == "" ( - call internal\check_opts.bat - IF ERRORLEVEL 1 goto eof - - call internal\copy.bat - IF ERRORLEVEL 1 goto eof -) - -call internal\setup.bat -IF ERRORLEVEL 1 goto eof - -:eof diff --git a/packaging/windows/old/cuda90.bat b/packaging/windows/old/cuda90.bat deleted file mode 100644 index fe0294812e2..00000000000 --- a/packaging/windows/old/cuda90.bat +++ /dev/null @@ -1,59 +0,0 @@ -@echo off - -IF NOT "%BUILD_VISION%" == "" ( - set MODULE_NAME=vision -) ELSE ( - set MODULE_NAME=pytorch -) - -IF NOT EXIST "setup.py" IF NOT EXIST "%MODULE_NAME%" ( - call internal\clone.bat - cd .. - IF ERRORLEVEL 1 goto eof -) ELSE ( - call internal\clean.bat -) - -call internal\check_deps.bat -IF ERRORLEVEL 1 goto eof - -REM Check for optional components - -set NO_CUDA= -set CMAKE_GENERATOR=Visual Studio 15 2017 Win64 - -IF "%NVTOOLSEXT_PATH%"=="" ( - echo NVTX ^(Visual Studio Extension ^for CUDA^) ^not installed, failing - exit /b 1 - goto optcheck -) - -IF "%CUDA_PATH_V9_0%"=="" ( - echo CUDA 9 not found, failing - exit /b 1 -) ELSE ( - IF "%BUILD_VISION%" == "" ( - set TORCH_CUDA_ARCH_LIST=3.5;5.0+PTX;6.0;7.0 - set TORCH_NVCC_FLAGS=-Xfatbin -compress-all - ) ELSE ( - set NVCC_FLAGS=-D__CUDA_NO_HALF_OPERATORS__ --expt-relaxed-constexpr -gencode=arch=compute_35,code=sm_35 -gencode=arch=compute_50,code=sm_50 -gencode=arch=compute_60,code=sm_60 -gencode=arch=compute_70,code=sm_70 -gencode=arch=compute_50,code=compute_50 - ) - - set "CUDA_PATH=%CUDA_PATH_V9_0%" - set "PATH=%CUDA_PATH_V9_0%\bin;%PATH%" -) - -:optcheck - -IF "%BUILD_VISION%" == "" ( - call internal\check_opts.bat - IF ERRORLEVEL 1 goto eof - - call internal\copy.bat - IF ERRORLEVEL 1 goto eof -) - -call internal\setup.bat -IF ERRORLEVEL 1 goto eof - -:eof diff --git a/packaging/windows/templates/auth_task.yml b/packaging/windows/templates/auth_task.yml deleted file mode 100644 index ece66412ff4..00000000000 --- a/packaging/windows/templates/auth_task.yml +++ /dev/null @@ -1,17 +0,0 @@ -jobs: -- job: 'VSTS_Auth_Task' - timeoutInMinutes: 5 - cancelTimeoutInMinutes: 5 - variables: - - group: 'peterjc-vsts-token' - - pool: - vmImage: 'win1803' - - steps: - - checkout: self - clean: true - - - template: vsts_auth.yml - parameters: - auth: $(vsts_auth) diff --git a/packaging/windows/templates/build_conda.yml b/packaging/windows/templates/build_conda.yml deleted file mode 100644 index 2d88271ad33..00000000000 --- a/packaging/windows/templates/build_conda.yml +++ /dev/null @@ -1,15 +0,0 @@ -parameters: - msagent: false - -steps: -- bash: 'find . -name "*.sh" -exec dos2unix {} +' - displayName: Replace file endings - -- script: 'if not exist %PYTORCH_FINAL_PACKAGE_DIR% mkdir %PYTORCH_FINAL_PACKAGE_DIR%' - displayName: 'Create final package directory' - -- bash: './packaging/conda/build_vision.sh $CUDA_VERSION $TORCHVISION_BUILD_VERSION $TORCHVISION_BUILD_NUMBER' - displayName: Build - env: - ${{ if eq(parameters.msagent, 'true') }}: - MAX_JOBS: 2 diff --git a/packaging/windows/templates/build_task.yml b/packaging/windows/templates/build_task.yml deleted file mode 100644 index e595662d313..00000000000 --- a/packaging/windows/templates/build_task.yml +++ /dev/null @@ -1,140 +0,0 @@ -parameters: - package: '' - spec: '' - jobDesc: '' - packageDesc: '' - msagent: true - cpuEnabled: true - cudaEnabled: true - condaEnabled: true - wheelsEnabled: true - override: false - -jobs: -- job: 'Windows_${{ parameters.spec }}_${{ parameters.package }}_Build' - timeoutInMinutes: 60 - cancelTimeoutInMinutes: 5 - condition: > - or(and(eq('${{ parameters.package }}', 'Conda'), eq('${{ parameters.spec }}', 'CPU'), - eq('${{ parameters.condaEnabled }}', 'true'), eq('${{ parameters.cpuEnabled }}', 'true')), - and(eq('${{ parameters.package }}', 'Wheels'), eq('${{ parameters.spec }}', 'CPU'), - eq('${{ parameters.wheelsEnabled }}', 'true'), eq('${{ parameters.cpuEnabled }}', 'true')), - and(eq('${{ parameters.package }}', 'Conda'), eq('${{ parameters.spec }}', 'CUDA'), - eq('${{ parameters.condaEnabled }}', 'true'), eq('${{ parameters.cudaEnabled }}', 'true')), - and(eq('${{ parameters.package }}', 'Wheels'), eq('${{ parameters.spec }}', 'CUDA'), - eq('${{ parameters.wheelsEnabled }}', 'true'), eq('${{ parameters.cudaEnabled }}', 'true'))) - variables: - - ${{ if eq(parameters.override, 'true') }}: - - name: TORCHVISION_BUILD_NUMBER - value: 1 - - name: PYTORCH_REPO - value: 'pytorch' - - name: PYTORCH_BRANCH - value: 'v0.4.0' - - ${{ if eq(parameters.msagent, 'true') }}: - - name: USE_SCCACHE - value: 0 - - ${{ if eq(parameters.msagent, 'false') }}: - - name: USE_SCCACHE - value: 1 - - ${{ if eq(parameters.package, 'Conda') }}: - - group: peterjc_anaconda_token - - name: PYTORCH_FINAL_PACKAGE_DIR - value: '$(Build.Repository.LocalPath)\packaging\windows\output' - - strategy: - maxParallel: 10 - matrix: - ${{ if eq(parameters.spec, 'CPU') }}: - PY3.5: - DESIRED_PYTHON: 3.5 - CUDA_VERSION: cpu - PY3.6: - DESIRED_PYTHON: 3.6 - CUDA_VERSION: cpu - PY3.7: - DESIRED_PYTHON: 3.7 - CUDA_VERSION: cpu - ${{ if ne(parameters.spec, 'CPU') }}: - PY3.5_92: - DESIRED_PYTHON: 3.5 - CUDA_VERSION: 92 - PY3.6_92: - DESIRED_PYTHON: 3.6 - CUDA_VERSION: 92 - PY3.7_92: - DESIRED_PYTHON: 3.7 - CUDA_VERSION: 92 - PY3.5_101: - DESIRED_PYTHON: 3.5 - CUDA_VERSION: 101 - PY3.6_101: - DESIRED_PYTHON: 3.6 - CUDA_VERSION: 101 - PY3.7_101: - DESIRED_PYTHON: 3.7 - CUDA_VERSION: 101 - - pool: - ${{ if eq(parameters.msagent, 'true') }}: - vmImage: 'win1803' - ${{ if eq(parameters.msagent, 'false') }}: - name: 'release' - - steps: - - checkout: self - clean: true - - - template: setup_env_for_msagent.yml - parameters: - msagent: ${{ parameters.msagent }} - - # - ${{ if and(eq(parameters.override, 'true'), eq(parameters.package, 'Wheels')) }}: - # - template: override_pytorch_version.yml - - - template: setup_nightly_variables.yml - parameters: - package: ${{ parameters.package }} - - - ${{ if eq(parameters.package, 'Wheels') }}: - - template: build_wheels.yml - parameters: - msagent: ${{ parameters.msagent }} - - - ${{ if eq(parameters.package, 'Conda') }}: - - template: build_conda.yml - parameters: - msagent: ${{ parameters.msagent }} - - - ${{ if or(eq(parameters.package, 'Wheels'), eq(parameters.package, 'Conda')) }}: - - template: publish_test_results.yml - parameters: - msagent: ${{ parameters.msagent }} - - # If you want to upload binaries to S3 & Anaconda Cloud, please uncomment this section. - - ${{ if and(eq(parameters.package, 'Wheels'), eq(parameters.spec, 'CPU')) }}: - - template: upload_to_s3.yml - parameters: - cuVer: '$(CUDA_VERSION)' - cudaVer: '$(CUDA_VERSION)' - - - ${{ if and(eq(parameters.package, 'Wheels'), ne(parameters.spec, 'CPU')) }}: - - template: upload_to_s3.yml - parameters: - cuVer: 'cu$(CUDA_VERSION)' - cudaVer: 'cuda$(CUDA_VERSION)' - - - ${{ if eq(parameters.package, 'Conda') }}: - - template: upload_to_conda.yml - parameters: - user: $(peterjc_conda_username) - pass: $(peterjc_conda_password) - - # If you want to upload binaries to Azure Git, please uncomment this section. - # - ${{ if or(eq(parameters.package, 'Wheels'), eq(parameters.package, 'Conda')) }}: - # - template: publish_test_results.yml - # parameters: - # msagent: ${{ parameters.msagent }} - # - template: publish_packages.yml - # parameters: - # package: ${{ parameters.package }} diff --git a/packaging/windows/templates/build_wheels.yml b/packaging/windows/templates/build_wheels.yml deleted file mode 100644 index 05c5712e334..00000000000 --- a/packaging/windows/templates/build_wheels.yml +++ /dev/null @@ -1,9 +0,0 @@ -parameters: - msagent: false - -steps: -- script: 'call packaging/windows/build_vision.bat %CUDA_VERSION% %TORCHVISION_BUILD_VERSION% %TORCHVISION_BUILD_NUMBER%' - displayName: Build - env: - ${{ if eq(parameters.msagent, 'true') }}: - MAX_JOBS: 2 diff --git a/packaging/windows/templates/linux_build_task.yml b/packaging/windows/templates/linux_build_task.yml deleted file mode 100644 index 0b32892791a..00000000000 --- a/packaging/windows/templates/linux_build_task.yml +++ /dev/null @@ -1,38 +0,0 @@ -parameters: - msagent: true - enabled: false - -jobs: -- job: 'Linux_CPU_Conda_Build' - timeoutInMinutes: 0 - cancelTimeoutInMinutes: 5 - condition: ${{ eq(parameters.enabled, 'true') }} - variables: - CUDA_VERSION: cpu - TORCH_CONDA_BUILD_FOLDER: pytorch-nightly - PYTORCH_FINAL_PACKAGE_DIR: '$(Build.Repository.LocalPath)/output' - - strategy: - maxParallel: 10 - matrix: - PY3.5: - DESIRED_PYTHON: 3.5 - - pool: - vmImage: 'ubuntu-16.04' - - steps: - - checkout: self - clean: true - - - script: 'sudo apt-get install p7zip-full' - displayName: 'Install 7Zip' - - - task: CondaEnvironment@1 - displayName: 'Install conda-build' - inputs: - packageSpecs: 'conda-build' - - - template: build_conda.yml - parameters: - msagent: ${{ parameters.msagent }} diff --git a/packaging/windows/templates/override_pytorch_version.yml b/packaging/windows/templates/override_pytorch_version.yml deleted file mode 100644 index 8af93ae43a4..00000000000 --- a/packaging/windows/templates/override_pytorch_version.yml +++ /dev/null @@ -1,6 +0,0 @@ -steps: -- script: 'windows/internal/override_pytorch_version.bat' - displayName: 'Override PyTorch Build Version for Wheels' - -- script: 'echo $(PYTORCH_BUILD_VERSION)' - displayName: 'Show PyTorch Build Version' diff --git a/packaging/windows/templates/publish_packages.yml b/packaging/windows/templates/publish_packages.yml deleted file mode 100644 index 51ce8247bf7..00000000000 --- a/packaging/windows/templates/publish_packages.yml +++ /dev/null @@ -1,8 +0,0 @@ -parameters: - package: '' - -steps: -- script: 'packaging/windows/internal/publish.bat' - displayName: 'Upload packages to Azure DevOps Repo' - env: - PACKAGEFULLNAME: ${{ parameters.package }} diff --git a/packaging/windows/templates/publish_test_results.yml b/packaging/windows/templates/publish_test_results.yml deleted file mode 100644 index 1e0dc0215d3..00000000000 --- a/packaging/windows/templates/publish_test_results.yml +++ /dev/null @@ -1,6 +0,0 @@ -steps: -- task: PublishTestResults@2 # No test results to publish - inputs: - testResultsFiles: 'windows/pytorch/test/**/*.xml' - testRunTitle: 'Publish test results' - enabled: false diff --git a/packaging/windows/templates/setup_env_for_msagent.yml b/packaging/windows/templates/setup_env_for_msagent.yml deleted file mode 100644 index 377734fa3db..00000000000 --- a/packaging/windows/templates/setup_env_for_msagent.yml +++ /dev/null @@ -1,25 +0,0 @@ -parameters: - msagent: false - -steps: -- ${{ if eq(parameters.msagent, 'true') }}: - - task: BatchScript@1 - displayName: 'Install 7Zip & cURL' - inputs: - filename: 'packaging/windows/internal/dep_install.bat' - - modifyEnvironment: true - - - task: BatchScript@1 - displayName: 'Install Visual Studio 2017' - inputs: - filename: 'packaging/windows/internal/vs_install.bat' - - modifyEnvironment: true - - - task: BatchScript@1 - displayName: 'Install CUDA' - inputs: - filename: 'packaging/windows/internal/cuda_install.bat' - - modifyEnvironment: true diff --git a/packaging/windows/templates/setup_nightly_variables.yml b/packaging/windows/templates/setup_nightly_variables.yml deleted file mode 100644 index 94b2fe934ce..00000000000 --- a/packaging/windows/templates/setup_nightly_variables.yml +++ /dev/null @@ -1,11 +0,0 @@ -parameters: - package: '' - -steps: -- task: BatchScript@1 - displayName: 'Setup nightly variables' - inputs: - filename: 'packaging/windows/internal/nightly_defaults.bat' - arguments: ${{ parameters.package }} - - modifyEnvironment: true diff --git a/packaging/windows/templates/upload_to_conda.yml b/packaging/windows/templates/upload_to_conda.yml deleted file mode 100644 index dc172bcf878..00000000000 --- a/packaging/windows/templates/upload_to_conda.yml +++ /dev/null @@ -1,10 +0,0 @@ -parameters: - user: '' - pass: '' - -steps: -- script: 'call packaging/windows/internal/upload.bat' - displayName: 'Upload packages to Anaconda Cloud' - env: - PYTORCH_ANACONDA_USERNAME: ${{ parameters.user }} - PYTORCH_ANACONDA_PASSWORD: ${{ parameters.pass }} diff --git a/packaging/windows/templates/upload_to_s3.yml b/packaging/windows/templates/upload_to_s3.yml deleted file mode 100644 index a31bcb15ae1..00000000000 --- a/packaging/windows/templates/upload_to_s3.yml +++ /dev/null @@ -1,15 +0,0 @@ -parameters: - cuVer: '' - cudaVer: '' - -steps: -- task: AmazonWebServices.aws-vsts-tools.S3Upload.S3Upload@1 - displayName: 'Upload ${{ parameters.cuVer }} wheel to S3' - inputs: - awsCredentials: 'Pytorch S3 bucket' - bucketName: 'pytorch' - sourceFolder: 'packaging/windows/output/${{ parameters.cudaVer }}' - globExpressions: '*.whl' - targetFolder: 'whl/nightly/${{ parameters.cuVer }}/' - filesAcl: 'public-read' - flattenFolders: 'true' diff --git a/packaging/windows/templates/vsts_auth.yml b/packaging/windows/templates/vsts_auth.yml deleted file mode 100644 index fde767d7f12..00000000000 --- a/packaging/windows/templates/vsts_auth.yml +++ /dev/null @@ -1,8 +0,0 @@ -parameters: - auth: '' - -steps: -- script: 'call packaging/windows/internal/auth.bat' - displayName: 'Sign in to Azure Pipelines' - env: - VSTS_AUTH: ${{ parameters.auth }} diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 00000000000..61e4a957fc5 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,18 @@ +[tool.usort] + +first_party_detection = false + +[tool.black] + +line-length = 120 +target-version = ["py38"] + +[tool.ufmt] + +excludes = [ + "gallery", +] + +[build-system] + +requires = ["setuptools", "torch", "wheel"] diff --git a/pytest.ini b/pytest.ini new file mode 100644 index 00000000000..8d52b55d5a6 --- /dev/null +++ b/pytest.ini @@ -0,0 +1,13 @@ +[pytest] +addopts = + # show tests that (f)ailed, (E)rror, or (X)passed in the summary + -rfEX + # Make tracebacks shorter + --tb=short + # enable all warnings + -Wd + --ignore=test/test_datasets_download.py + --ignore-glob=test/test_prototype_*.py +testpaths = + test +xfail_strict = True diff --git a/references/classification/README.md b/references/classification/README.md index acc2b0b4ed0..bc481f421ed 100644 --- a/references/classification/README.md +++ b/references/classification/README.md @@ -4,58 +4,320 @@ This folder contains reference training scripts for image classification. They serve as a log of how to train specific models, as provide baseline training and evaluation scripts to quickly bootstrap research. -Except otherwise noted, all models have been trained on 8x V100 GPUs. +Except otherwise noted, all models have been trained on 8x V100 GPUs with +the following parameters: + +| Parameter | value | +| ------------------------ | ------ | +| `--batch_size` | `32` | +| `--epochs` | `90` | +| `--lr` | `0.1` | +| `--momentum` | `0.9` | +| `--wd`, `--weight-decay` | `1e-4` | +| `--lr-step-size` | `30` | +| `--lr-gamma` | `0.1` | + +### AlexNet and VGG + +Since `AlexNet` and the original `VGG` architectures do not include batch +normalization, the default initial learning rate `--lr 0.1` is too high. -### ResNext-50 32x4d ``` -python -m torch.distributed.launch --nproc_per_node=8 --use_env train.py\ - --model resnext50_32x4d --epochs 100 +torchrun --nproc_per_node=8 train.py\ + --model $MODEL --lr 1e-2 ``` +Here `$MODEL` is one of `alexnet`, `vgg11`, `vgg13`, `vgg16` or `vgg19`. Note +that `vgg11_bn`, `vgg13_bn`, `vgg16_bn`, and `vgg19_bn` include batch +normalization and thus are trained with the default parameters. + +### GoogLeNet -### ResNext-101 32x8d +The weights of the GoogLeNet model are ported from the original paper rather than trained from scratch. + +### Inception V3 + +The weights of the Inception V3 model are ported from the original paper rather than trained from scratch. + +Since it expects tensors with a size of N x 3 x 299 x 299, to validate the model use the following command: -On 8 nodes, each with 8 GPUs (for a total of 64 GPUS) ``` -python -m torch.distributed.launch --nproc_per_node=8 --use_env train.py\ - --model resnext101_32x8d --epochs 100 +torchrun --nproc_per_node=8 train.py --model inception_v3\ + --test-only --weights Inception_V3_Weights.IMAGENET1K_V1 ``` +### ResNet +``` +torchrun --nproc_per_node=8 train.py --model $MODEL +``` + +Here `$MODEL` is one of `resnet18`, `resnet34`, `resnet50`, `resnet101` or `resnet152`. + +### ResNext +``` +torchrun --nproc_per_node=8 train.py\ + --model $MODEL --epochs 100 +``` + +Here `$MODEL` is one of `resnext50_32x4d` or `resnext101_32x8d`. +Note that the above command corresponds to a single node with 8 GPUs. If you use +a different number of GPUs and/or a different batch size, then the learning rate +should be scaled accordingly. For example, the pretrained model provided by +`torchvision` was trained on 8 nodes, each with 8 GPUs (for a total of 64 GPUs), +with `--batch_size 16` and `--lr 0.4`, instead of the current defaults +which are respectively batch_size=32 and lr=0.1 ### MobileNetV2 ``` -python -m torch.distributed.launch --nproc_per_node=8 --use_env train.py\ +torchrun --nproc_per_node=8 train.py\ --model mobilenet_v2 --epochs 300 --lr 0.045 --wd 0.00004\ --lr-step-size 1 --lr-gamma 0.98 ``` + +### MobileNetV3 Large & Small +``` +torchrun --nproc_per_node=8 train.py\ + --model $MODEL --epochs 600 --opt rmsprop --batch-size 128 --lr 0.064\ + --wd 0.00001 --lr-step-size 2 --lr-gamma 0.973 --auto-augment imagenet --random-erase 0.2 +``` + +Here `$MODEL` is one of `mobilenet_v3_large` or `mobilenet_v3_small`. + +Then we averaged the parameters of the last 3 checkpoints that improved the Acc@1. See [#3182](https://github.com/pytorch/vision/pull/3182) +and [#3354](https://github.com/pytorch/vision/pull/3354) for details. + + +### EfficientNet-V1 + +The weights of the B0-B4 variants are ported from Ross Wightman's [timm repo](https://github.com/rwightman/pytorch-image-models/blob/01cb46a9a50e3ba4be167965b5764e9702f09b30/timm/models/efficientnet.py#L95-L108). + +The weights of the B5-B7 variants are ported from Luke Melas' [EfficientNet-PyTorch repo](https://github.com/lukemelas/EfficientNet-PyTorch/blob/1039e009545d9329ea026c9f7541341439712b96/efficientnet_pytorch/utils.py#L562-L564). + +All models were trained using Bicubic interpolation and each have custom crop and resize sizes. To validate the models use the following commands: +``` +torchrun --nproc_per_node=8 train.py --model efficientnet_b0 --test-only --weights EfficientNet_B0_Weights.IMAGENET1K_V1 +torchrun --nproc_per_node=8 train.py --model efficientnet_b1 --test-only --weights EfficientNet_B1_Weights.IMAGENET1K_V1 +torchrun --nproc_per_node=8 train.py --model efficientnet_b2 --test-only --weights EfficientNet_B2_Weights.IMAGENET1K_V1 +torchrun --nproc_per_node=8 train.py --model efficientnet_b3 --test-only --weights EfficientNet_B3_Weights.IMAGENET1K_V1 +torchrun --nproc_per_node=8 train.py --model efficientnet_b4 --test-only --weights EfficientNet_B4_Weights.IMAGENET1K_V1 +torchrun --nproc_per_node=8 train.py --model efficientnet_b5 --test-only --weights EfficientNet_B5_Weights.IMAGENET1K_V1 +torchrun --nproc_per_node=8 train.py --model efficientnet_b6 --test-only --weights EfficientNet_B6_Weights.IMAGENET1K_V1 +torchrun --nproc_per_node=8 train.py --model efficientnet_b7 --test-only --weights EfficientNet_B7_Weights.IMAGENET1K_V1 +``` + + +### EfficientNet-V2 +``` +torchrun --nproc_per_node=8 train.py \ +--model $MODEL --batch-size 128 --lr 0.5 --lr-scheduler cosineannealinglr \ +--lr-warmup-epochs 5 --lr-warmup-method linear --auto-augment ta_wide --epochs 600 --random-erase 0.1 \ +--label-smoothing 0.1 --mixup-alpha 0.2 --cutmix-alpha 1.0 --weight-decay 0.00002 --norm-weight-decay 0.0 \ +--train-crop-size $TRAIN_SIZE --model-ema --val-crop-size $EVAL_SIZE --val-resize-size $EVAL_SIZE \ +--ra-sampler --ra-reps 4 +``` +Here `$MODEL` is one of `efficientnet_v2_s` and `efficientnet_v2_m`. +Note that the Small variant had a `$TRAIN_SIZE` of `300` and a `$EVAL_SIZE` of `384`, while the Medium `384` and `480` respectively. + +Note that the above command corresponds to training on a single node with 8 GPUs. +For generating the pre-trained weights, we trained with 4 nodes, each with 8 GPUs (for a total of 32 GPUs), +and `--batch_size 32`. + +The weights of the Large variant are ported from the original paper rather than trained from scratch. See the `EfficientNet_V2_L_Weights` entry for their exact preprocessing transforms. + + +### RegNet + +#### Small models +``` +torchrun --nproc_per_node=8 train.py\ + --model $MODEL --epochs 100 --batch-size 128 --wd 0.00005 --lr=0.8\ + --lr-scheduler=cosineannealinglr --lr-warmup-method=linear\ + --lr-warmup-epochs=5 --lr-warmup-decay=0.1 +``` +Here `$MODEL` is one of `regnet_x_400mf`, `regnet_x_800mf`, `regnet_x_1_6gf`, `regnet_y_400mf`, `regnet_y_800mf` and `regnet_y_1_6gf`. Please note we used learning rate 0.4 for `regent_y_400mf` to get the same Acc@1 as [the paper](https://arxiv.org/abs/2003.13678). + +#### Medium models +``` +torchrun --nproc_per_node=8 train.py\ + --model $MODEL --epochs 100 --batch-size 64 --wd 0.00005 --lr=0.4\ + --lr-scheduler=cosineannealinglr --lr-warmup-method=linear\ + --lr-warmup-epochs=5 --lr-warmup-decay=0.1 +``` +Here `$MODEL` is one of `regnet_x_3_2gf`, `regnet_x_8gf`, `regnet_x_16gf`, `regnet_y_3_2gf` and `regnet_y_8gf`. + +#### Large models +``` +torchrun --nproc_per_node=8 train.py\ + --model $MODEL --epochs 100 --batch-size 32 --wd 0.00005 --lr=0.2\ + --lr-scheduler=cosineannealinglr --lr-warmup-method=linear\ + --lr-warmup-epochs=5 --lr-warmup-decay=0.1 +``` +Here `$MODEL` is one of `regnet_x_32gf`, `regnet_y_16gf` and `regnet_y_32gf`. + +### Vision Transformer + +#### vit_b_16 +``` +torchrun --nproc_per_node=8 train.py\ + --model vit_b_16 --epochs 300 --batch-size 512 --opt adamw --lr 0.003 --wd 0.3\ + --lr-scheduler cosineannealinglr --lr-warmup-method linear --lr-warmup-epochs 30\ + --lr-warmup-decay 0.033 --amp --label-smoothing 0.11 --mixup-alpha 0.2 --auto-augment ra\ + --clip-grad-norm 1 --ra-sampler --cutmix-alpha 1.0 --model-ema +``` + +Note that the above command corresponds to training on a single node with 8 GPUs. +For generating the pre-trained weights, we trained with 8 nodes, each with 8 GPUs (for a total of 64 GPUs), +and `--batch_size 64`. + +#### vit_b_32 +``` +torchrun --nproc_per_node=8 train.py\ + --model vit_b_32 --epochs 300 --batch-size 512 --opt adamw --lr 0.003 --wd 0.3\ + --lr-scheduler cosineannealinglr --lr-warmup-method linear --lr-warmup-epochs 30\ + --lr-warmup-decay 0.033 --amp --label-smoothing 0.11 --mixup-alpha 0.2 --auto-augment imagenet\ + --clip-grad-norm 1 --ra-sampler --cutmix-alpha 1.0 --model-ema +``` + +Note that the above command corresponds to training on a single node with 8 GPUs. +For generating the pre-trained weights, we trained with 2 nodes, each with 8 GPUs (for a total of 16 GPUs), +and `--batch_size 256`. + +#### vit_l_16 +``` +torchrun --nproc_per_node=8 train.py\ + --model vit_l_16 --epochs 600 --batch-size 128 --lr 0.5 --lr-scheduler cosineannealinglr\ + --lr-warmup-method linear --lr-warmup-epochs 5 --label-smoothing 0.1 --mixup-alpha 0.2\ + --auto-augment ta_wide --random-erase 0.1 --weight-decay 0.00002 --norm-weight-decay 0.0\ + --clip-grad-norm 1 --ra-sampler --cutmix-alpha 1.0 --model-ema --val-resize-size 232 +``` + +Note that the above command corresponds to training on a single node with 8 GPUs. +For generating the pre-trained weights, we trained with 2 nodes, each with 8 GPUs (for a total of 16 GPUs), +and `--batch_size 64`. + +#### vit_l_32 +``` +torchrun --nproc_per_node=8 train.py\ + --model vit_l_32 --epochs 300 --batch-size 512 --opt adamw --lr 0.003 --wd 0.3\ + --lr-scheduler cosineannealinglr --lr-warmup-method linear --lr-warmup-epochs 30\ + --lr-warmup-decay 0.033 --amp --label-smoothing 0.11 --mixup-alpha 0.2 --auto-augment ra\ + --clip-grad-norm 1 --ra-sampler --cutmix-alpha 1.0 --model-ema +``` + +Note that the above command corresponds to training on a single node with 8 GPUs. +For generating the pre-trained weights, we trained with 8 nodes, each with 8 GPUs (for a total of 64 GPUs), +and `--batch_size 64`. + + +### ConvNeXt +``` +torchrun --nproc_per_node=8 train.py\ +--model $MODEL --batch-size 128 --opt adamw --lr 1e-3 --lr-scheduler cosineannealinglr \ +--lr-warmup-epochs 5 --lr-warmup-method linear --auto-augment ta_wide --epochs 600 --random-erase 0.1 \ +--label-smoothing 0.1 --mixup-alpha 0.2 --cutmix-alpha 1.0 --weight-decay 0.05 --norm-weight-decay 0.0 \ +--train-crop-size 176 --model-ema --val-resize-size 232 --ra-sampler --ra-reps 4 +``` +Here `$MODEL` is one of `convnext_tiny`, `convnext_small`, `convnext_base` and `convnext_large`. Note that each variant had its `--val-resize-size` optimized in a post-training step, see their `Weights` entry for their exact value. + +Note that the above command corresponds to training on a single node with 8 GPUs. +For generating the pre-trained weights, we trained with 2 nodes, each with 8 GPUs (for a total of 16 GPUs), +and `--batch_size 64`. + + +### SwinTransformer +``` +torchrun --nproc_per_node=8 train.py\ +--model $MODEL --epochs 300 --batch-size 128 --opt adamw --lr 0.001 --weight-decay 0.05 --norm-weight-decay 0.0 --bias-weight-decay 0.0 --transformer-embedding-decay 0.0 --lr-scheduler cosineannealinglr --lr-min 0.00001 --lr-warmup-method linear --lr-warmup-epochs 20 --lr-warmup-decay 0.01 --amp --label-smoothing 0.1 --mixup-alpha 0.8 --clip-grad-norm 5.0 --cutmix-alpha 1.0 --random-erase 0.25 --interpolation bicubic --auto-augment ta_wide --model-ema --ra-sampler --ra-reps 4 --val-resize-size 224 +``` +Here `$MODEL` is one of `swin_t`, `swin_s` or `swin_b`. +Note that `--val-resize-size` was optimized in a post-training step, see their `Weights` entry for the exact value. + + + + +### SwinTransformer V2 +``` +torchrun --nproc_per_node=8 train.py\ +--model $MODEL --epochs 300 --batch-size 128 --opt adamw --lr 0.001 --weight-decay 0.05 --norm-weight-decay 0.0 --bias-weight-decay 0.0 --transformer-embedding-decay 0.0 --lr-scheduler cosineannealinglr --lr-min 0.00001 --lr-warmup-method linear --lr-warmup-epochs 20 --lr-warmup-decay 0.01 --amp --label-smoothing 0.1 --mixup-alpha 0.8 --clip-grad-norm 5.0 --cutmix-alpha 1.0 --random-erase 0.25 --interpolation bicubic --auto-augment ta_wide --model-ema --ra-sampler --ra-reps 4 --val-resize-size 256 --val-crop-size 256 --train-crop-size 256 +``` +Here `$MODEL` is one of `swin_v2_t`, `swin_v2_s` or `swin_v2_b`. +Note that `--val-resize-size` was optimized in a post-training step, see their `Weights` entry for the exact value. + + +### MaxViT +``` +torchrun --nproc_per_node=8 --n_nodes=4 train.py\ +--model $MODEL --epochs 400 --batch-size 128 --opt adamw --lr 3e-3 --weight-decay 0.05 --lr-scheduler cosineannealinglr --lr-min 1e-5 --lr-warmup-method linear --lr-warmup-epochs 32 --label-smoothing 0.1 --mixup-alpha 0.8 --clip-grad-norm 1.0 --interpolation bicubic --auto-augment ta_wide --policy-magnitude 15 --model-ema --val-resize-size 224\ +--val-crop-size 224 --train-crop-size 224 --amp --model-ema-steps 32 --transformer-embedding-decay 0 --sync-bn +``` +Here `$MODEL` is `maxvit_t`. +Note that `--val-resize-size` was not optimized in a post-training step. + + +### ShuffleNet V2 +``` +torchrun --nproc_per_node=8 train.py \ +--batch-size=128 \ +--lr=0.5 --lr-scheduler=cosineannealinglr --lr-warmup-epochs=5 --lr-warmup-method=linear \ +--auto-augment=ta_wide --epochs=600 --random-erase=0.1 --weight-decay=0.00002 \ +--norm-weight-decay=0.0 --label-smoothing=0.1 --mixup-alpha=0.2 --cutmix-alpha=1.0 \ +--train-crop-size=176 --model-ema --val-resize-size=232 --ra-sampler --ra-reps=4 +``` +Here `$MODEL` is either `shufflenet_v2_x1_5` or `shufflenet_v2_x2_0`. + +The models `shufflenet_v2_x0_5` and `shufflenet_v2_x1_0` were contributed by the community. See [PR-849](https://github.com/pytorch/vision/pull/849#issuecomment-483391686) for details. + + ## Mixed precision training -Automatic Mixed Precision (AMP) training on GPU for Pytorch can be enabled with the [NVIDIA Apex extension](https://github.com/NVIDIA/apex). +Automatic Mixed Precision (AMP) training on GPU for Pytorch can be enabled with the [torch.cuda.amp](https://pytorch.org/docs/stable/amp.html?highlight=amp#module-torch.cuda.amp). -Mixed precision training makes use of both FP32 and FP16 precisions where appropriate. FP16 operations can leverage the Tensor cores on NVIDIA GPUs (Volta, Turing or newer architectures) for improved throughput, generally without loss in model accuracy. Mixed precision training also often allows larger batch sizes. GPU automatic mixed precision training for Pytorch Vision can be enabled via the flag value `--apex=True`. +Mixed precision training makes use of both FP32 and FP16 precisions where appropriate. FP16 operations can leverage the Tensor cores on NVIDIA GPUs (Volta, Turing or newer architectures) for improved throughput, generally without loss in model accuracy. Mixed precision training also often allows larger batch sizes. GPU automatic mixed precision training for Pytorch Vision can be enabled via the flag value `--amp=True`. ``` -python -m torch.distributed.launch --nproc_per_node=8 --use_env train.py\ - --model resnext50_32x4d --epochs 100 --apex +torchrun --nproc_per_node=8 train.py\ + --model resnext50_32x4d --epochs 100 --amp ``` ## Quantized -### Parameters used for generating quantized models: +### Post training quantized models -For all post training quantized models (All quantized models except mobilenet-v2), the settings are: +For all post training quantized models, the settings are: 1. num_calibration_batches: 32 2. num_workers: 16 3. batch_size: 32 4. eval_batch_size: 128 -5. backend: 'fbgemm' +5. qbackend: 'fbgemm' + +``` +python train_quantization.py --device='cpu' --post-training-quantize --qbackend='fbgemm' --model='$MODEL' +``` +Here `$MODEL` is one of `googlenet`, `inception_v3`, `resnet18`, `resnet50`, `resnext101_32x8d`, `shufflenet_v2_x0_5` and `shufflenet_v2_x1_0`. + +### Quantized ShuffleNet V2 + +Here are commands that we use to quantize the `shufflenet_v2_x1_5` and `shufflenet_v2_x2_0` models. +``` +# For shufflenet_v2_x1_5 +python train_quantization.py --device='cpu' --post-training-quantize --qbackend='fbgemm' \ + --model=shufflenet_v2_x1_5 --weights="ShuffleNet_V2_X1_5_Weights.IMAGENET1K_V1" \ + --train-crop-size 176 --val-resize-size 232 --data-path /datasets01_ontap/imagenet_full_size/061417/ + +# For shufflenet_v2_x2_0 +python train_quantization.py --device='cpu' --post-training-quantize --qbackend='fbgemm' \ + --model=shufflenet_v2_x2_0 --weights="ShuffleNet_V2_X2_0_Weights.IMAGENET1K_V1" \ + --train-crop-size 176 --val-resize-size 232 --data-path /datasets01_ontap/imagenet_full_size/061417/ +``` + +### QAT MobileNetV2 For Mobilenet-v2, the model was trained with quantization aware training, the settings used are: 1. num_workers: 16 2. batch_size: 32 3. eval_batch_size: 128 -4. backend: 'qnnpack' +4. qbackend: 'qnnpack' 5. learning-rate: 0.0001 6. num_epochs: 90 7. num_observer_update_epochs:4 @@ -63,28 +325,44 @@ For Mobilenet-v2, the model was trained with quantization aware training, the se 9. momentum: 0.9 10. lr_step_size:30 11. lr_gamma: 0.1 +12. weight-decay: 0.0001 + +``` +torchrun --nproc_per_node=8 train_quantization.py --model='mobilenet_v2' +``` Training converges at about 10 epochs. -For post training quant, device is set to CPU. For training, the device is set to CUDA +### QAT MobileNetV3 + +For Mobilenet-v3 Large, the model was trained with quantization aware training, the settings used are: +1. num_workers: 16 +2. batch_size: 32 +3. eval_batch_size: 128 +4. qbackend: 'qnnpack' +5. learning-rate: 0.001 +6. num_epochs: 90 +7. num_observer_update_epochs:4 +8. num_batch_norm_update_epochs:3 +9. momentum: 0.9 +10. lr_step_size:30 +11. lr_gamma: 0.1 +12. weight-decay: 0.00001 -### Command to evaluate quantized models using the pre-trained weights: -For all quantized models except inception_v3: ``` -python references/classification/train_quantization.py --data-path='imagenet_full_size/' \ - --device='cpu' --test-only --backend='fbgemm' --model='' +torchrun --nproc_per_node=8 train_quantization.py --model='mobilenet_v3_large' \ + --wd 0.00001 --lr 0.001 ``` -For inception_v3, since it expects tensors with a size of N x 3 x 299 x 299, before running above command, -need to change the input size of dataset_test in train.py to: +For post training quant, device is set to CPU. For training, the device is set to CUDA. + +### Command to evaluate quantized models using the pre-trained weights: + ``` -dataset_test = torchvision.datasets.ImageFolder( - valdir, - transforms.Compose([ - transforms.Resize(342), - transforms.CenterCrop(299), - transforms.ToTensor(), - normalize, - ])) +python train_quantization.py --device='cpu' --test-only --qbackend='' --model='' ``` +For inception_v3 you need to pass the following extra parameters: +``` +--val-resize-size 342 --val-crop-size 299 --train-crop-size 299 +``` diff --git a/references/classification/presets.py b/references/classification/presets.py new file mode 100644 index 00000000000..8653957a576 --- /dev/null +++ b/references/classification/presets.py @@ -0,0 +1,119 @@ +import torch +from torchvision.transforms.functional import InterpolationMode + + +def get_module(use_v2): + # We need a protected import to avoid the V2 warning in case just V1 is used + if use_v2: + import torchvision.transforms.v2 + + return torchvision.transforms.v2 + else: + import torchvision.transforms + + return torchvision.transforms + + +class ClassificationPresetTrain: + # Note: this transform assumes that the input to forward() are always PIL + # images, regardless of the backend parameter. We may change that in the + # future though, if we change the output type from the dataset. + def __init__( + self, + *, + crop_size, + mean=(0.485, 0.456, 0.406), + std=(0.229, 0.224, 0.225), + interpolation=InterpolationMode.BILINEAR, + hflip_prob=0.5, + auto_augment_policy=None, + ra_magnitude=9, + augmix_severity=3, + random_erase_prob=0.0, + backend="pil", + use_v2=False, + ): + T = get_module(use_v2) + + transforms = [] + backend = backend.lower() + if backend == "tensor": + transforms.append(T.PILToTensor()) + elif backend != "pil": + raise ValueError(f"backend can be 'tensor' or 'pil', but got {backend}") + + transforms.append(T.RandomResizedCrop(crop_size, interpolation=interpolation, antialias=True)) + if hflip_prob > 0: + transforms.append(T.RandomHorizontalFlip(hflip_prob)) + if auto_augment_policy is not None: + if auto_augment_policy == "ra": + transforms.append(T.RandAugment(interpolation=interpolation, magnitude=ra_magnitude)) + elif auto_augment_policy == "ta_wide": + transforms.append(T.TrivialAugmentWide(interpolation=interpolation)) + elif auto_augment_policy == "augmix": + transforms.append(T.AugMix(interpolation=interpolation, severity=augmix_severity)) + else: + aa_policy = T.AutoAugmentPolicy(auto_augment_policy) + transforms.append(T.AutoAugment(policy=aa_policy, interpolation=interpolation)) + + if backend == "pil": + transforms.append(T.PILToTensor()) + + transforms.extend( + [ + T.ToDtype(torch.float, scale=True) if use_v2 else T.ConvertImageDtype(torch.float), + T.Normalize(mean=mean, std=std), + ] + ) + if random_erase_prob > 0: + transforms.append(T.RandomErasing(p=random_erase_prob)) + + if use_v2: + transforms.append(T.ToPureTensor()) + + self.transforms = T.Compose(transforms) + + def __call__(self, img): + return self.transforms(img) + + +class ClassificationPresetEval: + def __init__( + self, + *, + crop_size, + resize_size=256, + mean=(0.485, 0.456, 0.406), + std=(0.229, 0.224, 0.225), + interpolation=InterpolationMode.BILINEAR, + backend="pil", + use_v2=False, + ): + T = get_module(use_v2) + transforms = [] + backend = backend.lower() + if backend == "tensor": + transforms.append(T.PILToTensor()) + elif backend != "pil": + raise ValueError(f"backend can be 'tensor' or 'pil', but got {backend}") + + transforms += [ + T.Resize(resize_size, interpolation=interpolation, antialias=True), + T.CenterCrop(crop_size), + ] + + if backend == "pil": + transforms.append(T.PILToTensor()) + + transforms += [ + T.ToDtype(torch.float, scale=True) if use_v2 else T.ConvertImageDtype(torch.float), + T.Normalize(mean=mean, std=std), + ] + + if use_v2: + transforms.append(T.ToPureTensor()) + + self.transforms = T.Compose(transforms) + + def __call__(self, img): + return self.transforms(img) diff --git a/references/classification/sampler.py b/references/classification/sampler.py new file mode 100644 index 00000000000..e9dc1735a58 --- /dev/null +++ b/references/classification/sampler.py @@ -0,0 +1,62 @@ +import math + +import torch +import torch.distributed as dist + + +class RASampler(torch.utils.data.Sampler): + """Sampler that restricts data loading to a subset of the dataset for distributed, + with repeated augmentation. + It ensures that different each augmented version of a sample will be visible to a + different process (GPU). + Heavily based on 'torch.utils.data.DistributedSampler'. + + This is borrowed from the DeiT Repo: + https://github.com/facebookresearch/deit/blob/main/samplers.py + """ + + def __init__(self, dataset, num_replicas=None, rank=None, shuffle=True, seed=0, repetitions=3): + if num_replicas is None: + if not dist.is_available(): + raise RuntimeError("Requires distributed package to be available!") + num_replicas = dist.get_world_size() + if rank is None: + if not dist.is_available(): + raise RuntimeError("Requires distributed package to be available!") + rank = dist.get_rank() + self.dataset = dataset + self.num_replicas = num_replicas + self.rank = rank + self.epoch = 0 + self.num_samples = int(math.ceil(len(self.dataset) * float(repetitions) / self.num_replicas)) + self.total_size = self.num_samples * self.num_replicas + self.num_selected_samples = int(math.floor(len(self.dataset) // 256 * 256 / self.num_replicas)) + self.shuffle = shuffle + self.seed = seed + self.repetitions = repetitions + + def __iter__(self): + if self.shuffle: + # Deterministically shuffle based on epoch + g = torch.Generator() + g.manual_seed(self.seed + self.epoch) + indices = torch.randperm(len(self.dataset), generator=g).tolist() + else: + indices = list(range(len(self.dataset))) + + # Add extra samples to make it evenly divisible + indices = [ele for ele in indices for i in range(self.repetitions)] + indices += indices[: (self.total_size - len(indices))] + assert len(indices) == self.total_size + + # Subsample + indices = indices[self.rank : self.total_size : self.num_replicas] + assert len(indices) == self.num_samples + + return iter(indices[: self.num_selected_samples]) + + def __len__(self): + return self.num_selected_samples + + def set_epoch(self, epoch): + self.epoch = epoch diff --git a/references/classification/train.py b/references/classification/train.py index 480092a0331..d52124fcf33 100644 --- a/references/classification/train.py +++ b/references/classification/train.py @@ -1,57 +1,71 @@ -from __future__ import print_function import datetime import os import time -import sys +import warnings +import presets import torch import torch.utils.data -from torch import nn import torchvision -from torchvision import transforms - +import torchvision.transforms import utils - -try: - from apex import amp -except ImportError: - amp = None +from sampler import RASampler +from torch import nn +from torch.utils.data.dataloader import default_collate +from torchvision.transforms.functional import InterpolationMode +from transforms import get_mixup_cutmix -def train_one_epoch(model, criterion, optimizer, data_loader, device, epoch, print_freq, apex=False): +def train_one_epoch(model, criterion, optimizer, data_loader, device, epoch, args, model_ema=None, scaler=None): model.train() metric_logger = utils.MetricLogger(delimiter=" ") - metric_logger.add_meter('lr', utils.SmoothedValue(window_size=1, fmt='{value}')) - metric_logger.add_meter('img/s', utils.SmoothedValue(window_size=10, fmt='{value}')) + metric_logger.add_meter("lr", utils.SmoothedValue(window_size=1, fmt="{value}")) + metric_logger.add_meter("img/s", utils.SmoothedValue(window_size=10, fmt="{value}")) - header = 'Epoch: [{}]'.format(epoch) - for image, target in metric_logger.log_every(data_loader, print_freq, header): + header = f"Epoch: [{epoch}]" + for i, (image, target) in enumerate(metric_logger.log_every(data_loader, args.print_freq, header)): start_time = time.time() image, target = image.to(device), target.to(device) - output = model(image) - loss = criterion(output, target) + with torch.cuda.amp.autocast(enabled=scaler is not None): + output = model(image) + loss = criterion(output, target) optimizer.zero_grad() - if apex: - with amp.scale_loss(loss, optimizer) as scaled_loss: - scaled_loss.backward() + if scaler is not None: + scaler.scale(loss).backward() + if args.clip_grad_norm is not None: + # we should unscale the gradients of optimizer's assigned params if do gradient clipping + scaler.unscale_(optimizer) + nn.utils.clip_grad_norm_(model.parameters(), args.clip_grad_norm) + scaler.step(optimizer) + scaler.update() else: loss.backward() - optimizer.step() + if args.clip_grad_norm is not None: + nn.utils.clip_grad_norm_(model.parameters(), args.clip_grad_norm) + optimizer.step() + + if model_ema and i % args.model_ema_steps == 0: + model_ema.update_parameters(model) + if epoch < args.lr_warmup_epochs: + # Reset ema buffer to keep copying weights during warmup period + model_ema.n_averaged.fill_(0) acc1, acc5 = utils.accuracy(output, target, topk=(1, 5)) batch_size = image.shape[0] metric_logger.update(loss=loss.item(), lr=optimizer.param_groups[0]["lr"]) - metric_logger.meters['acc1'].update(acc1.item(), n=batch_size) - metric_logger.meters['acc5'].update(acc5.item(), n=batch_size) - metric_logger.meters['img/s'].update(batch_size / (time.time() - start_time)) + metric_logger.meters["acc1"].update(acc1.item(), n=batch_size) + metric_logger.meters["acc5"].update(acc5.item(), n=batch_size) + metric_logger.meters["img/s"].update(batch_size / (time.time() - start_time)) -def evaluate(model, criterion, data_loader, device, print_freq=100): +def evaluate(model, criterion, data_loader, device, print_freq=100, log_suffix=""): model.eval() metric_logger = utils.MetricLogger(delimiter=" ") - header = 'Test:' - with torch.no_grad(): + header = f"Test: {log_suffix}" + + num_processed_samples = 0 + with torch.inference_mode(): for image, target in metric_logger.log_every(data_loader, print_freq, header): image = image.to(device, non_blocking=True) target = target.to(device, non_blocking=True) @@ -63,76 +77,123 @@ def evaluate(model, criterion, data_loader, device, print_freq=100): # could have been padded in distributed setup batch_size = image.shape[0] metric_logger.update(loss=loss.item()) - metric_logger.meters['acc1'].update(acc1.item(), n=batch_size) - metric_logger.meters['acc5'].update(acc5.item(), n=batch_size) + metric_logger.meters["acc1"].update(acc1.item(), n=batch_size) + metric_logger.meters["acc5"].update(acc5.item(), n=batch_size) + num_processed_samples += batch_size # gather the stats from all processes + + num_processed_samples = utils.reduce_across_processes(num_processed_samples) + if ( + hasattr(data_loader.dataset, "__len__") + and len(data_loader.dataset) != num_processed_samples + and torch.distributed.get_rank() == 0 + ): + # See FIXME above + warnings.warn( + f"It looks like the dataset has {len(data_loader.dataset)} samples, but {num_processed_samples} " + "samples were used for the validation, which might bias the results. " + "Try adjusting the batch size and / or the world size. " + "Setting the world size to 1 is always a safe bet." + ) + metric_logger.synchronize_between_processes() - print(' * Acc@1 {top1.global_avg:.3f} Acc@5 {top5.global_avg:.3f}' - .format(top1=metric_logger.acc1, top5=metric_logger.acc5)) + print(f"{header} Acc@1 {metric_logger.acc1.global_avg:.3f} Acc@5 {metric_logger.acc5.global_avg:.3f}") return metric_logger.acc1.global_avg def _get_cache_path(filepath): import hashlib + h = hashlib.sha1(filepath.encode()).hexdigest() cache_path = os.path.join("~", ".torch", "vision", "datasets", "imagefolder", h[:10] + ".pt") cache_path = os.path.expanduser(cache_path) return cache_path -def load_data(traindir, valdir, cache_dataset, distributed): +def load_data(traindir, valdir, args): # Data loading code print("Loading data") - normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406], - std=[0.229, 0.224, 0.225]) + val_resize_size, val_crop_size, train_crop_size = ( + args.val_resize_size, + args.val_crop_size, + args.train_crop_size, + ) + interpolation = InterpolationMode(args.interpolation) print("Loading training data") st = time.time() cache_path = _get_cache_path(traindir) - if cache_dataset and os.path.exists(cache_path): + if args.cache_dataset and os.path.exists(cache_path): # Attention, as the transforms are also cached! - print("Loading dataset_train from {}".format(cache_path)) - dataset, _ = torch.load(cache_path) + print(f"Loading dataset_train from {cache_path}") + # TODO: this could probably be weights_only=True + dataset, _ = torch.load(cache_path, weights_only=False) else: + # We need a default value for the variables below because args may come + # from train_quantization.py which doesn't define them. + auto_augment_policy = getattr(args, "auto_augment", None) + random_erase_prob = getattr(args, "random_erase", 0.0) + ra_magnitude = getattr(args, "ra_magnitude", None) + augmix_severity = getattr(args, "augmix_severity", None) dataset = torchvision.datasets.ImageFolder( traindir, - transforms.Compose([ - transforms.RandomResizedCrop(224), - transforms.RandomHorizontalFlip(), - transforms.ToTensor(), - normalize, - ])) - if cache_dataset: - print("Saving dataset_train to {}".format(cache_path)) + presets.ClassificationPresetTrain( + crop_size=train_crop_size, + interpolation=interpolation, + auto_augment_policy=auto_augment_policy, + random_erase_prob=random_erase_prob, + ra_magnitude=ra_magnitude, + augmix_severity=augmix_severity, + backend=args.backend, + use_v2=args.use_v2, + ), + ) + if args.cache_dataset: + print(f"Saving dataset_train to {cache_path}") utils.mkdir(os.path.dirname(cache_path)) utils.save_on_master((dataset, traindir), cache_path) print("Took", time.time() - st) print("Loading validation data") cache_path = _get_cache_path(valdir) - if cache_dataset and os.path.exists(cache_path): + if args.cache_dataset and os.path.exists(cache_path): # Attention, as the transforms are also cached! - print("Loading dataset_test from {}".format(cache_path)) - dataset_test, _ = torch.load(cache_path) + print(f"Loading dataset_test from {cache_path}") + # TODO: this could probably be weights_only=True + dataset_test, _ = torch.load(cache_path, weights_only=False) else: + if args.weights and args.test_only: + weights = torchvision.models.get_weight(args.weights) + preprocessing = weights.transforms(antialias=True) + if args.backend == "tensor": + preprocessing = torchvision.transforms.Compose([torchvision.transforms.PILToTensor(), preprocessing]) + + else: + preprocessing = presets.ClassificationPresetEval( + crop_size=val_crop_size, + resize_size=val_resize_size, + interpolation=interpolation, + backend=args.backend, + use_v2=args.use_v2, + ) + dataset_test = torchvision.datasets.ImageFolder( valdir, - transforms.Compose([ - transforms.Resize(256), - transforms.CenterCrop(224), - transforms.ToTensor(), - normalize, - ])) - if cache_dataset: - print("Saving dataset_test to {}".format(cache_path)) + preprocessing, + ) + if args.cache_dataset: + print(f"Saving dataset_test to {cache_path}") utils.mkdir(os.path.dirname(cache_path)) utils.save_on_master((dataset_test, valdir), cache_path) print("Creating data loaders") - if distributed: - train_sampler = torch.utils.data.distributed.DistributedSampler(dataset) - test_sampler = torch.utils.data.distributed.DistributedSampler(dataset_test) + if args.distributed: + if hasattr(args, "ra_sampler") and args.ra_sampler: + train_sampler = RASampler(dataset, shuffle=True, repetitions=args.ra_reps) + else: + train_sampler = torch.utils.data.distributed.DistributedSampler(dataset) + test_sampler = torch.utils.data.distributed.DistributedSampler(dataset_test, shuffle=False) else: train_sampler = torch.utils.data.RandomSampler(dataset) test_sampler = torch.utils.data.SequentialSampler(dataset_test) @@ -141,13 +202,6 @@ def load_data(traindir, valdir, cache_dataset, distributed): def main(args): - if args.apex: - if sys.version_info < (3, 0): - raise RuntimeError("Apex currently only supports Python 3. Aborting.") - if amp is None: - raise RuntimeError("Failed to import apex. Please install apex from https://www.github.com/nvidia/apex " - "to enable mixed-precision training.") - if args.output_dir: utils.mkdir(args.output_dir) @@ -156,52 +210,154 @@ def main(args): device = torch.device(args.device) - torch.backends.cudnn.benchmark = True + if args.use_deterministic_algorithms: + torch.backends.cudnn.benchmark = False + torch.use_deterministic_algorithms(True) + else: + torch.backends.cudnn.benchmark = True - train_dir = os.path.join(args.data_path, 'train') - val_dir = os.path.join(args.data_path, 'val') - dataset, dataset_test, train_sampler, test_sampler = load_data(train_dir, val_dir, - args.cache_dataset, args.distributed) - data_loader = torch.utils.data.DataLoader( - dataset, batch_size=args.batch_size, - sampler=train_sampler, num_workers=args.workers, pin_memory=True) + train_dir = os.path.join(args.data_path, "train") + val_dir = os.path.join(args.data_path, "val") + dataset, dataset_test, train_sampler, test_sampler = load_data(train_dir, val_dir, args) + num_classes = len(dataset.classes) + mixup_cutmix = get_mixup_cutmix( + mixup_alpha=args.mixup_alpha, cutmix_alpha=args.cutmix_alpha, num_classes=num_classes, use_v2=args.use_v2 + ) + if mixup_cutmix is not None: + + def collate_fn(batch): + return mixup_cutmix(*default_collate(batch)) + + else: + collate_fn = default_collate + + data_loader = torch.utils.data.DataLoader( + dataset, + batch_size=args.batch_size, + sampler=train_sampler, + num_workers=args.workers, + pin_memory=True, + collate_fn=collate_fn, + ) data_loader_test = torch.utils.data.DataLoader( - dataset_test, batch_size=args.batch_size, - sampler=test_sampler, num_workers=args.workers, pin_memory=True) + dataset_test, batch_size=args.batch_size, sampler=test_sampler, num_workers=args.workers, pin_memory=True + ) print("Creating model") - model = torchvision.models.__dict__[args.model](pretrained=args.pretrained) + model = torchvision.models.get_model(args.model, weights=args.weights, num_classes=num_classes) model.to(device) + if args.distributed and args.sync_bn: model = torch.nn.SyncBatchNorm.convert_sync_batchnorm(model) - criterion = nn.CrossEntropyLoss() - - optimizer = torch.optim.SGD( - model.parameters(), lr=args.lr, momentum=args.momentum, weight_decay=args.weight_decay) - - if args.apex: - model, optimizer = amp.initialize(model, optimizer, - opt_level=args.apex_opt_level - ) + criterion = nn.CrossEntropyLoss(label_smoothing=args.label_smoothing) + + custom_keys_weight_decay = [] + if args.bias_weight_decay is not None: + custom_keys_weight_decay.append(("bias", args.bias_weight_decay)) + if args.transformer_embedding_decay is not None: + for key in ["class_token", "position_embedding", "relative_position_bias_table"]: + custom_keys_weight_decay.append((key, args.transformer_embedding_decay)) + parameters = utils.set_weight_decay( + model, + args.weight_decay, + norm_weight_decay=args.norm_weight_decay, + custom_keys_weight_decay=custom_keys_weight_decay if len(custom_keys_weight_decay) > 0 else None, + ) - lr_scheduler = torch.optim.lr_scheduler.StepLR(optimizer, step_size=args.lr_step_size, gamma=args.lr_gamma) + opt_name = args.opt.lower() + if opt_name.startswith("sgd"): + optimizer = torch.optim.SGD( + parameters, + lr=args.lr, + momentum=args.momentum, + weight_decay=args.weight_decay, + nesterov="nesterov" in opt_name, + ) + elif opt_name == "rmsprop": + optimizer = torch.optim.RMSprop( + parameters, lr=args.lr, momentum=args.momentum, weight_decay=args.weight_decay, eps=0.0316, alpha=0.9 + ) + elif opt_name == "adamw": + optimizer = torch.optim.AdamW(parameters, lr=args.lr, weight_decay=args.weight_decay) + else: + raise RuntimeError(f"Invalid optimizer {args.opt}. Only SGD, RMSprop and AdamW are supported.") + + scaler = torch.cuda.amp.GradScaler() if args.amp else None + + args.lr_scheduler = args.lr_scheduler.lower() + if args.lr_scheduler == "steplr": + main_lr_scheduler = torch.optim.lr_scheduler.StepLR(optimizer, step_size=args.lr_step_size, gamma=args.lr_gamma) + elif args.lr_scheduler == "cosineannealinglr": + main_lr_scheduler = torch.optim.lr_scheduler.CosineAnnealingLR( + optimizer, T_max=args.epochs - args.lr_warmup_epochs, eta_min=args.lr_min + ) + elif args.lr_scheduler == "exponentiallr": + main_lr_scheduler = torch.optim.lr_scheduler.ExponentialLR(optimizer, gamma=args.lr_gamma) + else: + raise RuntimeError( + f"Invalid lr scheduler '{args.lr_scheduler}'. Only StepLR, CosineAnnealingLR and ExponentialLR " + "are supported." + ) + + if args.lr_warmup_epochs > 0: + if args.lr_warmup_method == "linear": + warmup_lr_scheduler = torch.optim.lr_scheduler.LinearLR( + optimizer, start_factor=args.lr_warmup_decay, total_iters=args.lr_warmup_epochs + ) + elif args.lr_warmup_method == "constant": + warmup_lr_scheduler = torch.optim.lr_scheduler.ConstantLR( + optimizer, factor=args.lr_warmup_decay, total_iters=args.lr_warmup_epochs + ) + else: + raise RuntimeError( + f"Invalid warmup lr method '{args.lr_warmup_method}'. Only linear and constant are supported." + ) + lr_scheduler = torch.optim.lr_scheduler.SequentialLR( + optimizer, schedulers=[warmup_lr_scheduler, main_lr_scheduler], milestones=[args.lr_warmup_epochs] + ) + else: + lr_scheduler = main_lr_scheduler model_without_ddp = model if args.distributed: model = torch.nn.parallel.DistributedDataParallel(model, device_ids=[args.gpu]) model_without_ddp = model.module + model_ema = None + if args.model_ema: + # Decay adjustment that aims to keep the decay independent of other hyper-parameters originally proposed at: + # https://github.com/facebookresearch/pycls/blob/f8cd9627/pycls/core/net.py#L123 + # + # total_ema_updates = (Dataset_size / n_GPUs) * epochs / (batch_size_per_gpu * EMA_steps) + # We consider constant = Dataset_size for a given dataset/setup and omit it. Thus: + # adjust = 1 / total_ema_updates ~= n_GPUs * batch_size_per_gpu * EMA_steps / epochs + adjust = args.world_size * args.batch_size * args.model_ema_steps / args.epochs + alpha = 1.0 - args.model_ema_decay + alpha = min(1.0, alpha * adjust) + model_ema = utils.ExponentialMovingAverage(model_without_ddp, device=device, decay=1.0 - alpha) + if args.resume: - checkpoint = torch.load(args.resume, map_location='cpu') - model_without_ddp.load_state_dict(checkpoint['model']) - optimizer.load_state_dict(checkpoint['optimizer']) - lr_scheduler.load_state_dict(checkpoint['lr_scheduler']) - args.start_epoch = checkpoint['epoch'] + 1 + checkpoint = torch.load(args.resume, map_location="cpu", weights_only=True) + model_without_ddp.load_state_dict(checkpoint["model"]) + if not args.test_only: + optimizer.load_state_dict(checkpoint["optimizer"]) + lr_scheduler.load_state_dict(checkpoint["lr_scheduler"]) + args.start_epoch = checkpoint["epoch"] + 1 + if model_ema: + model_ema.load_state_dict(checkpoint["model_ema"]) + if scaler: + scaler.load_state_dict(checkpoint["scaler"]) if args.test_only: - evaluate(model, criterion, data_loader_test, device=device) + # We disable the cudnn benchmarking because it can noticeably affect the accuracy + torch.backends.cudnn.benchmark = False + torch.backends.cudnn.deterministic = True + if model_ema: + evaluate(model_ema, criterion, data_loader_test, device=device, log_suffix="EMA") + else: + evaluate(model, criterion, data_loader_test, device=device) return print("Start training") @@ -209,53 +365,94 @@ def main(args): for epoch in range(args.start_epoch, args.epochs): if args.distributed: train_sampler.set_epoch(epoch) - train_one_epoch(model, criterion, optimizer, data_loader, device, epoch, args.print_freq, args.apex) + train_one_epoch(model, criterion, optimizer, data_loader, device, epoch, args, model_ema, scaler) lr_scheduler.step() evaluate(model, criterion, data_loader_test, device=device) + if model_ema: + evaluate(model_ema, criterion, data_loader_test, device=device, log_suffix="EMA") if args.output_dir: checkpoint = { - 'model': model_without_ddp.state_dict(), - 'optimizer': optimizer.state_dict(), - 'lr_scheduler': lr_scheduler.state_dict(), - 'epoch': epoch, - 'args': args} - utils.save_on_master( - checkpoint, - os.path.join(args.output_dir, 'model_{}.pth'.format(epoch))) - utils.save_on_master( - checkpoint, - os.path.join(args.output_dir, 'checkpoint.pth')) + "model": model_without_ddp.state_dict(), + "optimizer": optimizer.state_dict(), + "lr_scheduler": lr_scheduler.state_dict(), + "epoch": epoch, + "args": args, + } + if model_ema: + checkpoint["model_ema"] = model_ema.state_dict() + if scaler: + checkpoint["scaler"] = scaler.state_dict() + utils.save_on_master(checkpoint, os.path.join(args.output_dir, f"model_{epoch}.pth")) + utils.save_on_master(checkpoint, os.path.join(args.output_dir, "checkpoint.pth")) total_time = time.time() - start_time total_time_str = str(datetime.timedelta(seconds=int(total_time))) - print('Training time {}'.format(total_time_str)) + print(f"Training time {total_time_str}") -def parse_args(): +def get_args_parser(add_help=True): import argparse - parser = argparse.ArgumentParser(description='PyTorch Classification Training') - - parser.add_argument('--data-path', default='/datasets01/imagenet_full_size/061417/', help='dataset') - parser.add_argument('--model', default='resnet18', help='model') - parser.add_argument('--device', default='cuda', help='device') - parser.add_argument('-b', '--batch-size', default=32, type=int) - parser.add_argument('--epochs', default=90, type=int, metavar='N', - help='number of total epochs to run') - parser.add_argument('-j', '--workers', default=16, type=int, metavar='N', - help='number of data loading workers (default: 16)') - parser.add_argument('--lr', default=0.1, type=float, help='initial learning rate') - parser.add_argument('--momentum', default=0.9, type=float, metavar='M', - help='momentum') - parser.add_argument('--wd', '--weight-decay', default=1e-4, type=float, - metavar='W', help='weight decay (default: 1e-4)', - dest='weight_decay') - parser.add_argument('--lr-step-size', default=30, type=int, help='decrease lr every step-size epochs') - parser.add_argument('--lr-gamma', default=0.1, type=float, help='decrease lr by a factor of lr-gamma') - parser.add_argument('--print-freq', default=10, type=int, help='print frequency') - parser.add_argument('--output-dir', default='.', help='path where to save') - parser.add_argument('--resume', default='', help='resume from checkpoint') - parser.add_argument('--start-epoch', default=0, type=int, metavar='N', - help='start epoch') + + parser = argparse.ArgumentParser(description="PyTorch Classification Training", add_help=add_help) + + parser.add_argument("--data-path", default="/datasets01/imagenet_full_size/061417/", type=str, help="dataset path") + parser.add_argument("--model", default="resnet18", type=str, help="model name") + parser.add_argument("--device", default="cuda", type=str, help="device (Use cuda or cpu Default: cuda)") + parser.add_argument( + "-b", "--batch-size", default=32, type=int, help="images per gpu, the total batch size is $NGPU x batch_size" + ) + parser.add_argument("--epochs", default=90, type=int, metavar="N", help="number of total epochs to run") + parser.add_argument( + "-j", "--workers", default=16, type=int, metavar="N", help="number of data loading workers (default: 16)" + ) + parser.add_argument("--opt", default="sgd", type=str, help="optimizer") + parser.add_argument("--lr", default=0.1, type=float, help="initial learning rate") + parser.add_argument("--momentum", default=0.9, type=float, metavar="M", help="momentum") + parser.add_argument( + "--wd", + "--weight-decay", + default=1e-4, + type=float, + metavar="W", + help="weight decay (default: 1e-4)", + dest="weight_decay", + ) + parser.add_argument( + "--norm-weight-decay", + default=None, + type=float, + help="weight decay for Normalization layers (default: None, same value as --wd)", + ) + parser.add_argument( + "--bias-weight-decay", + default=None, + type=float, + help="weight decay for bias parameters of all layers (default: None, same value as --wd)", + ) + parser.add_argument( + "--transformer-embedding-decay", + default=None, + type=float, + help="weight decay for embedding parameters for vision transformer models (default: None, same value as --wd)", + ) + parser.add_argument( + "--label-smoothing", default=0.0, type=float, help="label smoothing (default: 0.0)", dest="label_smoothing" + ) + parser.add_argument("--mixup-alpha", default=0.0, type=float, help="mixup alpha (default: 0.0)") + parser.add_argument("--cutmix-alpha", default=0.0, type=float, help="cutmix alpha (default: 0.0)") + parser.add_argument("--lr-scheduler", default="steplr", type=str, help="the lr scheduler (default: steplr)") + parser.add_argument("--lr-warmup-epochs", default=0, type=int, help="the number of epochs to warmup (default: 0)") + parser.add_argument( + "--lr-warmup-method", default="constant", type=str, help="the warmup method (default: constant)" + ) + parser.add_argument("--lr-warmup-decay", default=0.01, type=float, help="the decay for lr") + parser.add_argument("--lr-step-size", default=30, type=int, help="decrease lr every step-size epochs") + parser.add_argument("--lr-gamma", default=0.1, type=float, help="decrease lr by a factor of lr-gamma") + parser.add_argument("--lr-min", default=0.0, type=float, help="minimum lr of lr schedule (default: 0.0)") + parser.add_argument("--print-freq", default=10, type=int, help="print frequency") + parser.add_argument("--output-dir", default=".", type=str, help="path to save outputs") + parser.add_argument("--resume", default="", type=str, help="path of checkpoint") + parser.add_argument("--start-epoch", default=0, type=int, metavar="N", help="start epoch") parser.add_argument( "--cache-dataset", dest="cache_dataset", @@ -274,32 +471,58 @@ def parse_args(): help="Only test the model", action="store_true", ) - parser.add_argument( - "--pretrained", - dest="pretrained", - help="Use pre-trained models from the modelzoo", - action="store_true", - ) + parser.add_argument("--auto-augment", default=None, type=str, help="auto augment policy (default: None)") + parser.add_argument("--ra-magnitude", default=9, type=int, help="magnitude of auto augment policy") + parser.add_argument("--augmix-severity", default=3, type=int, help="severity of augmix policy") + parser.add_argument("--random-erase", default=0.0, type=float, help="random erasing probability (default: 0.0)") # Mixed precision training parameters - parser.add_argument('--apex', action='store_true', - help='Use apex for mixed precision training') - parser.add_argument('--apex-opt-level', default='O1', type=str, - help='For apex mixed precision training' - 'O0 for FP32 training, O1 for mixed precision training.' - 'For further detail, see https://github.com/NVIDIA/apex/tree/master/examples/imagenet' - ) + parser.add_argument("--amp", action="store_true", help="Use torch.cuda.amp for mixed precision training") # distributed training parameters - parser.add_argument('--world-size', default=1, type=int, - help='number of distributed processes') - parser.add_argument('--dist-url', default='env://', help='url used to set up distributed training') - - args = parser.parse_args() - - return args + parser.add_argument("--world-size", default=1, type=int, help="number of distributed processes") + parser.add_argument("--dist-url", default="env://", type=str, help="url used to set up distributed training") + parser.add_argument( + "--model-ema", action="store_true", help="enable tracking Exponential Moving Average of model parameters" + ) + parser.add_argument( + "--model-ema-steps", + type=int, + default=32, + help="the number of iterations that controls how often to update the EMA model (default: 32)", + ) + parser.add_argument( + "--model-ema-decay", + type=float, + default=0.99998, + help="decay factor for Exponential Moving Average of model parameters (default: 0.99998)", + ) + parser.add_argument( + "--use-deterministic-algorithms", action="store_true", help="Forces the use of deterministic algorithms only." + ) + parser.add_argument( + "--interpolation", default="bilinear", type=str, help="the interpolation method (default: bilinear)" + ) + parser.add_argument( + "--val-resize-size", default=256, type=int, help="the resize size used for validation (default: 256)" + ) + parser.add_argument( + "--val-crop-size", default=224, type=int, help="the central crop size used for validation (default: 224)" + ) + parser.add_argument( + "--train-crop-size", default=224, type=int, help="the random crop size used for training (default: 224)" + ) + parser.add_argument("--clip-grad-norm", default=None, type=float, help="the maximum gradient norm (default None)") + parser.add_argument("--ra-sampler", action="store_true", help="whether to use Repeated Augmentation in training") + parser.add_argument( + "--ra-reps", default=3, type=int, help="number of repetitions for Repeated Augmentation (default: 3)" + ) + parser.add_argument("--weights", default=None, type=str, help="the weights enum name to load") + parser.add_argument("--backend", default="PIL", type=str.lower, help="PIL or tensor - case insensitive") + parser.add_argument("--use-v2", action="store_true", help="Use V2 transforms") + return parser if __name__ == "__main__": - args = parse_args() + args = get_args_parser().parse_args() main(args) diff --git a/references/classification/train_quantization.py b/references/classification/train_quantization.py index 22621fe2404..bd324c6eef7 100644 --- a/references/classification/train_quantization.py +++ b/references/classification/train_quantization.py @@ -1,21 +1,18 @@ -from __future__ import print_function +import copy import datetime import os import time -import sys -import copy import torch +import torch.ao.quantization import torch.utils.data -from torch import nn import torchvision -import torch.quantization import utils -from train import train_one_epoch, evaluate, load_data +from torch import nn +from train import evaluate, load_data, train_one_epoch def main(args): - if args.output_dir: utils.mkdir(args.output_dir) @@ -23,49 +20,52 @@ def main(args): print(args) if args.post_training_quantize and args.distributed: - raise RuntimeError("Post training quantization example should not be performed " - "on distributed mode") + raise RuntimeError("Post training quantization example should not be performed on distributed mode") # Set backend engine to ensure that quantized model runs on the correct kernels - if args.backend not in torch.backends.quantized.supported_engines: - raise RuntimeError("Quantized backend not supported: " + str(args.backend)) - torch.backends.quantized.engine = args.backend + if args.qbackend not in torch.backends.quantized.supported_engines: + raise RuntimeError("Quantized backend not supported: " + str(args.qbackend)) + torch.backends.quantized.engine = args.qbackend device = torch.device(args.device) torch.backends.cudnn.benchmark = True # Data loading code print("Loading data") - train_dir = os.path.join(args.data_path, 'train') - val_dir = os.path.join(args.data_path, 'val') + train_dir = os.path.join(args.data_path, "train") + val_dir = os.path.join(args.data_path, "val") - dataset, dataset_test, train_sampler, test_sampler = load_data(train_dir, val_dir, - args.cache_dataset, args.distributed) + dataset, dataset_test, train_sampler, test_sampler = load_data(train_dir, val_dir, args) data_loader = torch.utils.data.DataLoader( - dataset, batch_size=args.batch_size, - sampler=train_sampler, num_workers=args.workers, pin_memory=True) + dataset, batch_size=args.batch_size, sampler=train_sampler, num_workers=args.workers, pin_memory=True + ) data_loader_test = torch.utils.data.DataLoader( - dataset_test, batch_size=args.eval_batch_size, - sampler=test_sampler, num_workers=args.workers, pin_memory=True) + dataset_test, batch_size=args.eval_batch_size, sampler=test_sampler, num_workers=args.workers, pin_memory=True + ) print("Creating model", args.model) # when training quantized models, we always start from a pre-trained fp32 reference model - model = torchvision.models.quantization.__dict__[args.model](pretrained=True, quantize=args.test_only) + prefix = "quantized_" + model_name = args.model + if not model_name.startswith(prefix): + model_name = prefix + model_name + model = torchvision.models.get_model(model_name, weights=args.weights, quantize=args.test_only) model.to(device) if not (args.test_only or args.post_training_quantize): - model.fuse_model() - model.qconfig = torch.quantization.get_default_qat_qconfig(args.backend) - torch.quantization.prepare_qat(model, inplace=True) + model.fuse_model(is_qat=True) + model.qconfig = torch.ao.quantization.get_default_qat_qconfig(args.qbackend) + torch.ao.quantization.prepare_qat(model, inplace=True) + + if args.distributed and args.sync_bn: + model = torch.nn.SyncBatchNorm.convert_sync_batchnorm(model) optimizer = torch.optim.SGD( - model.parameters(), lr=args.lr, momentum=args.momentum, - weight_decay=args.weight_decay) + model.parameters(), lr=args.lr, momentum=args.momentum, weight_decay=args.weight_decay + ) - lr_scheduler = torch.optim.lr_scheduler.StepLR(optimizer, - step_size=args.lr_step_size, - gamma=args.lr_gamma) + lr_scheduler = torch.optim.lr_scheduler.StepLR(optimizer, step_size=args.lr_step_size, gamma=args.lr_gamma) criterion = nn.CrossEntropyLoss() model_without_ddp = model @@ -74,34 +74,31 @@ def main(args): model_without_ddp = model.module if args.resume: - checkpoint = torch.load(args.resume, map_location='cpu') - model_without_ddp.load_state_dict(checkpoint['model']) - optimizer.load_state_dict(checkpoint['optimizer']) - lr_scheduler.load_state_dict(checkpoint['lr_scheduler']) - args.start_epoch = checkpoint['epoch'] + 1 + checkpoint = torch.load(args.resume, map_location="cpu", weights_only=True) + model_without_ddp.load_state_dict(checkpoint["model"]) + optimizer.load_state_dict(checkpoint["optimizer"]) + lr_scheduler.load_state_dict(checkpoint["lr_scheduler"]) + args.start_epoch = checkpoint["epoch"] + 1 if args.post_training_quantize: # perform calibration on a subset of the training dataset # for that, create a subset of the training dataset - ds = torch.utils.data.Subset( - dataset, - indices=list(range(args.batch_size * args.num_calibration_batches))) + ds = torch.utils.data.Subset(dataset, indices=list(range(args.batch_size * args.num_calibration_batches))) data_loader_calibration = torch.utils.data.DataLoader( - ds, batch_size=args.batch_size, shuffle=False, num_workers=args.workers, - pin_memory=True) + ds, batch_size=args.batch_size, shuffle=False, num_workers=args.workers, pin_memory=True + ) model.eval() - model.fuse_model() - model.qconfig = torch.quantization.get_default_qconfig(args.backend) - torch.quantization.prepare(model, inplace=True) + model.fuse_model(is_qat=False) + model.qconfig = torch.ao.quantization.get_default_qconfig(args.qbackend) + torch.ao.quantization.prepare(model, inplace=True) # Calibrate first print("Calibrating") evaluate(model, criterion, data_loader_calibration, device=device, print_freq=1) - torch.quantization.convert(model, inplace=True) + torch.ao.quantization.convert(model, inplace=True) if args.output_dir: - print('Saving quantized model') + print("Saving quantized model") if utils.is_main_process(): - torch.save(model.state_dict(), os.path.join(args.output_dir, - 'quantized_post_train_model.pth')) + torch.save(model.state_dict(), os.path.join(args.output_dir, "quantized_post_train_model.pth")) print("Evaluating post-training quantized model") evaluate(model, criterion, data_loader_test, device=device) return @@ -110,113 +107,111 @@ def main(args): evaluate(model, criterion, data_loader_test, device=device) return - model.apply(torch.quantization.enable_observer) - model.apply(torch.quantization.enable_fake_quant) + model.apply(torch.ao.quantization.enable_observer) + model.apply(torch.ao.quantization.enable_fake_quant) start_time = time.time() for epoch in range(args.start_epoch, args.epochs): if args.distributed: train_sampler.set_epoch(epoch) - print('Starting training for epoch', epoch) - train_one_epoch(model, criterion, optimizer, data_loader, device, epoch, - args.print_freq) + print("Starting training for epoch", epoch) + train_one_epoch(model, criterion, optimizer, data_loader, device, epoch, args) lr_scheduler.step() - with torch.no_grad(): + with torch.inference_mode(): if epoch >= args.num_observer_update_epochs: - print('Disabling observer for subseq epochs, epoch = ', epoch) - model.apply(torch.quantization.disable_observer) + print("Disabling observer for subseq epochs, epoch = ", epoch) + model.apply(torch.ao.quantization.disable_observer) if epoch >= args.num_batch_norm_update_epochs: - print('Freezing BN for subseq epochs, epoch = ', epoch) + print("Freezing BN for subseq epochs, epoch = ", epoch) model.apply(torch.nn.intrinsic.qat.freeze_bn_stats) - print('Evaluate QAT model') + print("Evaluate QAT model") - evaluate(model, criterion, data_loader_test, device=device) - quantized_eval_model = copy.deepcopy(model) + evaluate(model, criterion, data_loader_test, device=device, log_suffix="QAT") + quantized_eval_model = copy.deepcopy(model_without_ddp) quantized_eval_model.eval() - quantized_eval_model.to(torch.device('cpu')) - torch.quantization.convert(quantized_eval_model, inplace=True) + quantized_eval_model.to(torch.device("cpu")) + torch.ao.quantization.convert(quantized_eval_model, inplace=True) - print('Evaluate Quantized model') - evaluate(quantized_eval_model, criterion, data_loader_test, - device=torch.device('cpu')) + print("Evaluate Quantized model") + evaluate(quantized_eval_model, criterion, data_loader_test, device=torch.device("cpu")) model.train() if args.output_dir: checkpoint = { - 'model': model_without_ddp.state_dict(), - 'eval_model': quantized_eval_model.state_dict(), - 'optimizer': optimizer.state_dict(), - 'lr_scheduler': lr_scheduler.state_dict(), - 'epoch': epoch, - 'args': args} - utils.save_on_master( - checkpoint, - os.path.join(args.output_dir, 'model_{}.pth'.format(epoch))) - utils.save_on_master( - checkpoint, - os.path.join(args.output_dir, 'checkpoint.pth')) - print('Saving models after epoch ', epoch) + "model": model_without_ddp.state_dict(), + "eval_model": quantized_eval_model.state_dict(), + "optimizer": optimizer.state_dict(), + "lr_scheduler": lr_scheduler.state_dict(), + "epoch": epoch, + "args": args, + } + utils.save_on_master(checkpoint, os.path.join(args.output_dir, f"model_{epoch}.pth")) + utils.save_on_master(checkpoint, os.path.join(args.output_dir, "checkpoint.pth")) + print("Saving models after epoch ", epoch) total_time = time.time() - start_time total_time_str = str(datetime.timedelta(seconds=int(total_time))) - print('Training time {}'.format(total_time_str)) + print(f"Training time {total_time_str}") -def parse_args(): +def get_args_parser(add_help=True): import argparse - parser = argparse.ArgumentParser(description='PyTorch Classification Training') - - parser.add_argument('--data-path', - default='/datasets01/imagenet_full_size/061417/', - help='dataset') - parser.add_argument('--model', - default='mobilenet_v2', - help='model') - parser.add_argument('--backend', - default='qnnpack', - help='fbgemm or qnnpack') - parser.add_argument('--device', - default='cuda', - help='device') - - parser.add_argument('-b', '--batch-size', default=32, type=int, - help='batch size for calibration/training') - parser.add_argument('--eval-batch-size', default=128, type=int, - help='batch size for evaluation') - parser.add_argument('--epochs', default=90, type=int, metavar='N', - help='number of total epochs to run') - parser.add_argument('--num-observer-update-epochs', - default=4, type=int, metavar='N', - help='number of total epochs to update observers') - parser.add_argument('--num-batch-norm-update-epochs', default=3, - type=int, metavar='N', - help='number of total epochs to update batch norm stats') - parser.add_argument('--num-calibration-batches', - default=32, type=int, metavar='N', - help='number of batches of training set for \ - observer calibration ') - - parser.add_argument('-j', '--workers', default=16, type=int, metavar='N', - help='number of data loading workers (default: 16)') - parser.add_argument('--lr', - default=0.0001, type=float, - help='initial learning rate') - parser.add_argument('--momentum', - default=0.9, type=float, metavar='M', - help='momentum') - parser.add_argument('--wd', '--weight-decay', default=1e-4, type=float, - metavar='W', help='weight decay (default: 1e-4)', - dest='weight_decay') - parser.add_argument('--lr-step-size', default=30, type=int, - help='decrease lr every step-size epochs') - parser.add_argument('--lr-gamma', default=0.1, type=float, - help='decrease lr by a factor of lr-gamma') - parser.add_argument('--print-freq', default=10, type=int, - help='print frequency') - parser.add_argument('--output-dir', default='.', help='path where to save') - parser.add_argument('--resume', default='', help='resume from checkpoint') - parser.add_argument('--start-epoch', default=0, type=int, metavar='N', - help='start epoch') + + parser = argparse.ArgumentParser(description="PyTorch Quantized Classification Training", add_help=add_help) + + parser.add_argument("--data-path", default="/datasets01/imagenet_full_size/061417/", type=str, help="dataset path") + parser.add_argument("--model", default="mobilenet_v2", type=str, help="model name") + parser.add_argument("--qbackend", default="qnnpack", type=str, help="Quantized backend: fbgemm or qnnpack") + parser.add_argument("--device", default="cuda", type=str, help="device (Use cuda or cpu Default: cuda)") + + parser.add_argument( + "-b", "--batch-size", default=32, type=int, help="images per gpu, the total batch size is $NGPU x batch_size" + ) + parser.add_argument("--eval-batch-size", default=128, type=int, help="batch size for evaluation") + parser.add_argument("--epochs", default=90, type=int, metavar="N", help="number of total epochs to run") + parser.add_argument( + "--num-observer-update-epochs", + default=4, + type=int, + metavar="N", + help="number of total epochs to update observers", + ) + parser.add_argument( + "--num-batch-norm-update-epochs", + default=3, + type=int, + metavar="N", + help="number of total epochs to update batch norm stats", + ) + parser.add_argument( + "--num-calibration-batches", + default=32, + type=int, + metavar="N", + help="number of batches of training set for \ + observer calibration ", + ) + + parser.add_argument( + "-j", "--workers", default=16, type=int, metavar="N", help="number of data loading workers (default: 16)" + ) + parser.add_argument("--lr", default=0.0001, type=float, help="initial learning rate") + parser.add_argument("--momentum", default=0.9, type=float, metavar="M", help="momentum") + parser.add_argument( + "--wd", + "--weight-decay", + default=1e-4, + type=float, + metavar="W", + help="weight decay (default: 1e-4)", + dest="weight_decay", + ) + parser.add_argument("--lr-step-size", default=30, type=int, help="decrease lr every step-size epochs") + parser.add_argument("--lr-gamma", default=0.1, type=float, help="decrease lr by a factor of lr-gamma") + parser.add_argument("--print-freq", default=10, type=int, help="print frequency") + parser.add_argument("--output-dir", default=".", type=str, help="path to save outputs") + parser.add_argument("--resume", default="", type=str, help="path of checkpoint") + parser.add_argument("--start-epoch", default=0, type=int, metavar="N", help="start epoch") parser.add_argument( "--cache-dataset", dest="cache_dataset", @@ -224,6 +219,12 @@ def parse_args(): It also serializes the transforms", action="store_true", ) + parser.add_argument( + "--sync-bn", + dest="sync_bn", + help="Use sync batch norm", + action="store_true", + ) parser.add_argument( "--test-only", dest="test_only", @@ -238,17 +239,35 @@ def parse_args(): ) # distributed training parameters - parser.add_argument('--world-size', default=1, type=int, - help='number of distributed processes') - parser.add_argument('--dist-url', - default='env://', - help='url used to set up distributed training') + parser.add_argument("--world-size", default=1, type=int, help="number of distributed processes") + parser.add_argument("--dist-url", default="env://", type=str, help="url used to set up distributed training") + + parser.add_argument( + "--interpolation", default="bilinear", type=str, help="the interpolation method (default: bilinear)" + ) + parser.add_argument( + "--val-resize-size", default=256, type=int, help="the resize size used for validation (default: 256)" + ) + parser.add_argument( + "--val-crop-size", default=224, type=int, help="the central crop size used for validation (default: 224)" + ) + parser.add_argument( + "--train-crop-size", default=224, type=int, help="the random crop size used for training (default: 224)" + ) + parser.add_argument("--clip-grad-norm", default=None, type=float, help="the maximum gradient norm (default None)") + parser.add_argument("--weights", default=None, type=str, help="the weights enum name to load") - args = parser.parse_args() + parser.add_argument("--backend", default="PIL", type=str.lower, help="PIL or tensor - case insensitive") + parser.add_argument("--use-v2", action="store_true", help="Use V2 transforms") - return args + return parser if __name__ == "__main__": - args = parse_args() + args = get_args_parser().parse_args() + if args.backend in ("fbgemm", "qnnpack"): + raise ValueError( + "The --backend parameter has been re-purposed to specify the backend of the transforms (PIL or Tensor) " + "instead of the quantized backend. Please use the --qbackend parameter to specify the quantized backend." + ) main(args) diff --git a/references/classification/transforms.py b/references/classification/transforms.py new file mode 100644 index 00000000000..96236608eec --- /dev/null +++ b/references/classification/transforms.py @@ -0,0 +1,206 @@ +import math +from typing import Tuple + +import torch +from presets import get_module +from torch import Tensor +from torchvision.transforms import functional as F + + +def get_mixup_cutmix(*, mixup_alpha, cutmix_alpha, num_classes, use_v2): + transforms_module = get_module(use_v2) + + mixup_cutmix = [] + if mixup_alpha > 0: + mixup_cutmix.append( + transforms_module.MixUp(alpha=mixup_alpha, num_classes=num_classes) + if use_v2 + else RandomMixUp(num_classes=num_classes, p=1.0, alpha=mixup_alpha) + ) + if cutmix_alpha > 0: + mixup_cutmix.append( + transforms_module.CutMix(alpha=cutmix_alpha, num_classes=num_classes) + if use_v2 + else RandomCutMix(num_classes=num_classes, p=1.0, alpha=cutmix_alpha) + ) + if not mixup_cutmix: + return None + + return transforms_module.RandomChoice(mixup_cutmix) + + +class RandomMixUp(torch.nn.Module): + """Randomly apply MixUp to the provided batch and targets. + The class implements the data augmentations as described in the paper + `"mixup: Beyond Empirical Risk Minimization" `_. + + Args: + num_classes (int): number of classes used for one-hot encoding. + p (float): probability of the batch being transformed. Default value is 0.5. + alpha (float): hyperparameter of the Beta distribution used for mixup. + Default value is 1.0. + inplace (bool): boolean to make this transform inplace. Default set to False. + """ + + def __init__(self, num_classes: int, p: float = 0.5, alpha: float = 1.0, inplace: bool = False) -> None: + super().__init__() + + if num_classes < 1: + raise ValueError( + f"Please provide a valid positive value for the num_classes. Got num_classes={num_classes}" + ) + + if alpha <= 0: + raise ValueError("Alpha param can't be zero.") + + self.num_classes = num_classes + self.p = p + self.alpha = alpha + self.inplace = inplace + + def forward(self, batch: Tensor, target: Tensor) -> Tuple[Tensor, Tensor]: + """ + Args: + batch (Tensor): Float tensor of size (B, C, H, W) + target (Tensor): Integer tensor of size (B, ) + + Returns: + Tensor: Randomly transformed batch. + """ + if batch.ndim != 4: + raise ValueError(f"Batch ndim should be 4. Got {batch.ndim}") + if target.ndim != 1: + raise ValueError(f"Target ndim should be 1. Got {target.ndim}") + if not batch.is_floating_point(): + raise TypeError(f"Batch dtype should be a float tensor. Got {batch.dtype}.") + if target.dtype != torch.int64: + raise TypeError(f"Target dtype should be torch.int64. Got {target.dtype}") + + if not self.inplace: + batch = batch.clone() + target = target.clone() + + if target.ndim == 1: + target = torch.nn.functional.one_hot(target, num_classes=self.num_classes).to(dtype=batch.dtype) + + if torch.rand(1).item() >= self.p: + return batch, target + + # It's faster to roll the batch by one instead of shuffling it to create image pairs + batch_rolled = batch.roll(1, 0) + target_rolled = target.roll(1, 0) + + # Implemented as on mixup paper, page 3. + lambda_param = float(torch._sample_dirichlet(torch.tensor([self.alpha, self.alpha]))[0]) + batch_rolled.mul_(1.0 - lambda_param) + batch.mul_(lambda_param).add_(batch_rolled) + + target_rolled.mul_(1.0 - lambda_param) + target.mul_(lambda_param).add_(target_rolled) + + return batch, target + + def __repr__(self) -> str: + s = ( + f"{self.__class__.__name__}(" + f"num_classes={self.num_classes}" + f", p={self.p}" + f", alpha={self.alpha}" + f", inplace={self.inplace}" + f")" + ) + return s + + +class RandomCutMix(torch.nn.Module): + """Randomly apply CutMix to the provided batch and targets. + The class implements the data augmentations as described in the paper + `"CutMix: Regularization Strategy to Train Strong Classifiers with Localizable Features" + `_. + + Args: + num_classes (int): number of classes used for one-hot encoding. + p (float): probability of the batch being transformed. Default value is 0.5. + alpha (float): hyperparameter of the Beta distribution used for cutmix. + Default value is 1.0. + inplace (bool): boolean to make this transform inplace. Default set to False. + """ + + def __init__(self, num_classes: int, p: float = 0.5, alpha: float = 1.0, inplace: bool = False) -> None: + super().__init__() + if num_classes < 1: + raise ValueError("Please provide a valid positive value for the num_classes.") + if alpha <= 0: + raise ValueError("Alpha param can't be zero.") + + self.num_classes = num_classes + self.p = p + self.alpha = alpha + self.inplace = inplace + + def forward(self, batch: Tensor, target: Tensor) -> Tuple[Tensor, Tensor]: + """ + Args: + batch (Tensor): Float tensor of size (B, C, H, W) + target (Tensor): Integer tensor of size (B, ) + + Returns: + Tensor: Randomly transformed batch. + """ + if batch.ndim != 4: + raise ValueError(f"Batch ndim should be 4. Got {batch.ndim}") + if target.ndim != 1: + raise ValueError(f"Target ndim should be 1. Got {target.ndim}") + if not batch.is_floating_point(): + raise TypeError(f"Batch dtype should be a float tensor. Got {batch.dtype}.") + if target.dtype != torch.int64: + raise TypeError(f"Target dtype should be torch.int64. Got {target.dtype}") + + if not self.inplace: + batch = batch.clone() + target = target.clone() + + if target.ndim == 1: + target = torch.nn.functional.one_hot(target, num_classes=self.num_classes).to(dtype=batch.dtype) + + if torch.rand(1).item() >= self.p: + return batch, target + + # It's faster to roll the batch by one instead of shuffling it to create image pairs + batch_rolled = batch.roll(1, 0) + target_rolled = target.roll(1, 0) + + # Implemented as on cutmix paper, page 12 (with minor corrections on typos). + lambda_param = float(torch._sample_dirichlet(torch.tensor([self.alpha, self.alpha]))[0]) + _, H, W = F.get_dimensions(batch) + + r_x = torch.randint(W, (1,)) + r_y = torch.randint(H, (1,)) + + r = 0.5 * math.sqrt(1.0 - lambda_param) + r_w_half = int(r * W) + r_h_half = int(r * H) + + x1 = int(torch.clamp(r_x - r_w_half, min=0)) + y1 = int(torch.clamp(r_y - r_h_half, min=0)) + x2 = int(torch.clamp(r_x + r_w_half, max=W)) + y2 = int(torch.clamp(r_y + r_h_half, max=H)) + + batch[:, :, y1:y2, x1:x2] = batch_rolled[:, :, y1:y2, x1:x2] + lambda_param = float(1.0 - (x2 - x1) * (y2 - y1) / (W * H)) + + target_rolled.mul_(1.0 - lambda_param) + target.mul_(lambda_param).add_(target_rolled) + + return batch, target + + def __repr__(self) -> str: + s = ( + f"{self.__class__.__name__}(" + f"num_classes={self.num_classes}" + f", p={self.p}" + f", alpha={self.alpha}" + f", inplace={self.inplace}" + f")" + ) + return s diff --git a/references/classification/utils.py b/references/classification/utils.py index 5ea6dfef341..7d9f0136ae8 100644 --- a/references/classification/utils.py +++ b/references/classification/utils.py @@ -1,15 +1,17 @@ -from __future__ import print_function -from collections import defaultdict, deque +import copy import datetime +import errno +import hashlib +import os import time +from collections import defaultdict, deque, OrderedDict +from typing import List, Optional, Tuple + import torch import torch.distributed as dist -import errno -import os - -class SmoothedValue(object): +class SmoothedValue: """Track a series of values and provide access to smoothed values over a window or the global series average. """ @@ -31,11 +33,7 @@ def synchronize_between_processes(self): """ Warning: does not synchronize the deque! """ - if not is_dist_avail_and_initialized(): - return - t = torch.tensor([self.count, self.total], dtype=torch.float64, device='cuda') - dist.barrier() - dist.all_reduce(t) + t = reduce_across_processes([self.count, self.total]) t = t.tolist() self.count = int(t[0]) self.total = t[1] @@ -64,14 +62,11 @@ def value(self): def __str__(self): return self.fmt.format( - median=self.median, - avg=self.avg, - global_avg=self.global_avg, - max=self.max, - value=self.value) + median=self.median, avg=self.avg, global_avg=self.global_avg, max=self.max, value=self.value + ) -class MetricLogger(object): +class MetricLogger: def __init__(self, delimiter="\t"): self.meters = defaultdict(SmoothedValue) self.delimiter = delimiter @@ -88,15 +83,12 @@ def __getattr__(self, attr): return self.meters[attr] if attr in self.__dict__: return self.__dict__[attr] - raise AttributeError("'{}' object has no attribute '{}'".format( - type(self).__name__, attr)) + raise AttributeError(f"'{type(self).__name__}' object has no attribute '{attr}'") def __str__(self): loss_str = [] for name, meter in self.meters.items(): - loss_str.append( - "{}: {}".format(name, str(meter)) - ) + loss_str.append(f"{name}: {str(meter)}") return self.delimiter.join(loss_str) def synchronize_between_processes(self): @@ -109,31 +101,28 @@ def add_meter(self, name, meter): def log_every(self, iterable, print_freq, header=None): i = 0 if not header: - header = '' + header = "" start_time = time.time() end = time.time() - iter_time = SmoothedValue(fmt='{avg:.4f}') - data_time = SmoothedValue(fmt='{avg:.4f}') - space_fmt = ':' + str(len(str(len(iterable)))) + 'd' + iter_time = SmoothedValue(fmt="{avg:.4f}") + data_time = SmoothedValue(fmt="{avg:.4f}") + space_fmt = ":" + str(len(str(len(iterable)))) + "d" if torch.cuda.is_available(): - log_msg = self.delimiter.join([ - header, - '[{0' + space_fmt + '}/{1}]', - 'eta: {eta}', - '{meters}', - 'time: {time}', - 'data: {data}', - 'max mem: {memory:.0f}' - ]) + log_msg = self.delimiter.join( + [ + header, + "[{0" + space_fmt + "}/{1}]", + "eta: {eta}", + "{meters}", + "time: {time}", + "data: {data}", + "max mem: {memory:.0f}", + ] + ) else: - log_msg = self.delimiter.join([ - header, - '[{0' + space_fmt + '}/{1}]', - 'eta: {eta}', - '{meters}', - 'time: {time}', - 'data: {data}' - ]) + log_msg = self.delimiter.join( + [header, "[{0" + space_fmt + "}/{1}]", "eta: {eta}", "{meters}", "time: {time}", "data: {data}"] + ) MB = 1024.0 * 1024.0 for obj in iterable: data_time.update(time.time() - end) @@ -143,28 +132,51 @@ def log_every(self, iterable, print_freq, header=None): eta_seconds = iter_time.global_avg * (len(iterable) - i) eta_string = str(datetime.timedelta(seconds=int(eta_seconds))) if torch.cuda.is_available(): - print(log_msg.format( - i, len(iterable), eta=eta_string, - meters=str(self), - time=str(iter_time), data=str(data_time), - memory=torch.cuda.max_memory_allocated() / MB)) + print( + log_msg.format( + i, + len(iterable), + eta=eta_string, + meters=str(self), + time=str(iter_time), + data=str(data_time), + memory=torch.cuda.max_memory_allocated() / MB, + ) + ) else: - print(log_msg.format( - i, len(iterable), eta=eta_string, - meters=str(self), - time=str(iter_time), data=str(data_time))) + print( + log_msg.format( + i, len(iterable), eta=eta_string, meters=str(self), time=str(iter_time), data=str(data_time) + ) + ) i += 1 end = time.time() total_time = time.time() - start_time total_time_str = str(datetime.timedelta(seconds=int(total_time))) - print('{} Total time: {}'.format(header, total_time_str)) + print(f"{header} Total time: {total_time_str}") + + +class ExponentialMovingAverage(torch.optim.swa_utils.AveragedModel): + """Maintains moving averages of model parameters using an exponential decay. + ``ema_avg = decay * avg_model_param + (1 - decay) * model_param`` + `torch.optim.swa_utils.AveragedModel `_ + is used to compute the EMA. + """ + + def __init__(self, model, decay, device="cpu"): + def ema_avg(avg_model_param, model_param, num_averaged): + return decay * avg_model_param + (1 - decay) * model_param + + super().__init__(model, device, ema_avg, use_buffers=True) def accuracy(output, target, topk=(1,)): """Computes the accuracy over the k top predictions for the specified values of k""" - with torch.no_grad(): + with torch.inference_mode(): maxk = max(topk) batch_size = target.size(0) + if target.ndim == 2: + target = target.max(dim=1)[1] _, pred = output.topk(maxk, 1, True, True) pred = pred.t() @@ -190,10 +202,11 @@ def setup_for_distributed(is_master): This function disables printing when not in master process """ import builtins as __builtin__ + builtin_print = __builtin__.print def print(*args, **kwargs): - force = kwargs.pop('force', False) + force = kwargs.pop("force", False) if is_master or force: builtin_print(*args, **kwargs) @@ -230,26 +243,222 @@ def save_on_master(*args, **kwargs): def init_distributed_mode(args): - if 'RANK' in os.environ and 'WORLD_SIZE' in os.environ: + if "RANK" in os.environ and "WORLD_SIZE" in os.environ: args.rank = int(os.environ["RANK"]) - args.world_size = int(os.environ['WORLD_SIZE']) - args.gpu = int(os.environ['LOCAL_RANK']) - elif 'SLURM_PROCID' in os.environ: - args.rank = int(os.environ['SLURM_PROCID']) + args.world_size = int(os.environ["WORLD_SIZE"]) + args.gpu = int(os.environ["LOCAL_RANK"]) + elif "SLURM_PROCID" in os.environ: + args.rank = int(os.environ["SLURM_PROCID"]) args.gpu = args.rank % torch.cuda.device_count() elif hasattr(args, "rank"): pass else: - print('Not using distributed mode') + print("Not using distributed mode") args.distributed = False return args.distributed = True torch.cuda.set_device(args.gpu) - args.dist_backend = 'nccl' - print('| distributed init (rank {}): {}'.format( - args.rank, args.dist_url), flush=True) - torch.distributed.init_process_group(backend=args.dist_backend, init_method=args.dist_url, - world_size=args.world_size, rank=args.rank) + args.dist_backend = "nccl" + print(f"| distributed init (rank {args.rank}): {args.dist_url}", flush=True) + torch.distributed.init_process_group( + backend=args.dist_backend, init_method=args.dist_url, world_size=args.world_size, rank=args.rank + ) + torch.distributed.barrier() setup_for_distributed(args.rank == 0) + + +def average_checkpoints(inputs): + """Loads checkpoints from inputs and returns a model with averaged weights. Original implementation taken from: + https://github.com/pytorch/fairseq/blob/a48f235636557b8d3bc4922a6fa90f3a0fa57955/scripts/average_checkpoints.py#L16 + + Args: + inputs (List[str]): An iterable of string paths of checkpoints to load from. + Returns: + A dict of string keys mapping to various values. The 'model' key + from the returned dict should correspond to an OrderedDict mapping + string parameter names to torch Tensors. + """ + params_dict = OrderedDict() + params_keys = None + new_state = None + num_models = len(inputs) + for fpath in inputs: + with open(fpath, "rb") as f: + state = torch.load( + f, map_location=(lambda s, _: torch.serialization.default_restore_location(s, "cpu")), weights_only=True + ) + # Copies over the settings from the first checkpoint + if new_state is None: + new_state = state + model_params = state["model"] + model_params_keys = list(model_params.keys()) + if params_keys is None: + params_keys = model_params_keys + elif params_keys != model_params_keys: + raise KeyError( + f"For checkpoint {f}, expected list of params: {params_keys}, but found: {model_params_keys}" + ) + for k in params_keys: + p = model_params[k] + if isinstance(p, torch.HalfTensor): + p = p.float() + if k not in params_dict: + params_dict[k] = p.clone() + # NOTE: clone() is needed in case of p is a shared parameter + else: + params_dict[k] += p + averaged_params = OrderedDict() + for k, v in params_dict.items(): + averaged_params[k] = v + if averaged_params[k].is_floating_point(): + averaged_params[k].div_(num_models) + else: + averaged_params[k] //= num_models + new_state["model"] = averaged_params + return new_state + + +def store_model_weights(model, checkpoint_path, checkpoint_key="model", strict=True): + """ + This method can be used to prepare weights files for new models. It receives as + input a model architecture and a checkpoint from the training script and produces + a file with the weights ready for release. + + Examples: + from torchvision import models as M + + # Classification + model = M.mobilenet_v3_large(weights=None) + print(store_model_weights(model, './class.pth')) + + # Quantized Classification + model = M.quantization.mobilenet_v3_large(weights=None, quantize=False) + model.fuse_model(is_qat=True) + model.qconfig = torch.ao.quantization.get_default_qat_qconfig('qnnpack') + _ = torch.ao.quantization.prepare_qat(model, inplace=True) + print(store_model_weights(model, './qat.pth')) + + # Object Detection + model = M.detection.fasterrcnn_mobilenet_v3_large_fpn(weights=None, weights_backbone=None) + print(store_model_weights(model, './obj.pth')) + + # Segmentation + model = M.segmentation.deeplabv3_mobilenet_v3_large(weights=None, weights_backbone=None, aux_loss=True) + print(store_model_weights(model, './segm.pth', strict=False)) + + Args: + model (pytorch.nn.Module): The model on which the weights will be loaded for validation purposes. + checkpoint_path (str): The path of the checkpoint we will load. + checkpoint_key (str, optional): The key of the checkpoint where the model weights are stored. + Default: "model". + strict (bool): whether to strictly enforce that the keys + in :attr:`state_dict` match the keys returned by this module's + :meth:`~torch.nn.Module.state_dict` function. Default: ``True`` + + Returns: + output_path (str): The location where the weights are saved. + """ + # Store the new model next to the checkpoint_path + checkpoint_path = os.path.abspath(checkpoint_path) + output_dir = os.path.dirname(checkpoint_path) + + # Deep copy to avoid side effects on the model object. + model = copy.deepcopy(model) + checkpoint = torch.load(checkpoint_path, map_location="cpu", weights_only=True) + + # Load the weights to the model to validate that everything works + # and remove unnecessary weights (such as auxiliaries, etc.) + if checkpoint_key == "model_ema": + del checkpoint[checkpoint_key]["n_averaged"] + torch.nn.modules.utils.consume_prefix_in_state_dict_if_present(checkpoint[checkpoint_key], "module.") + model.load_state_dict(checkpoint[checkpoint_key], strict=strict) + + tmp_path = os.path.join(output_dir, str(model.__hash__())) + torch.save(model.state_dict(), tmp_path) + + sha256_hash = hashlib.sha256() + with open(tmp_path, "rb") as f: + # Read and update hash string value in blocks of 4K + for byte_block in iter(lambda: f.read(4096), b""): + sha256_hash.update(byte_block) + hh = sha256_hash.hexdigest() + + output_path = os.path.join(output_dir, "weights-" + str(hh[:8]) + ".pth") + os.replace(tmp_path, output_path) + + return output_path + + +def reduce_across_processes(val): + if not is_dist_avail_and_initialized(): + # nothing to sync, but we still convert to tensor for consistency with the distributed case. + return torch.tensor(val) + + t = torch.tensor(val, device="cuda") + dist.barrier() + dist.all_reduce(t) + return t + + +def set_weight_decay( + model: torch.nn.Module, + weight_decay: float, + norm_weight_decay: Optional[float] = None, + norm_classes: Optional[List[type]] = None, + custom_keys_weight_decay: Optional[List[Tuple[str, float]]] = None, +): + if not norm_classes: + norm_classes = [ + torch.nn.modules.batchnorm._BatchNorm, + torch.nn.LayerNorm, + torch.nn.GroupNorm, + torch.nn.modules.instancenorm._InstanceNorm, + torch.nn.LocalResponseNorm, + ] + norm_classes = tuple(norm_classes) + + params = { + "other": [], + "norm": [], + } + params_weight_decay = { + "other": weight_decay, + "norm": norm_weight_decay, + } + custom_keys = [] + if custom_keys_weight_decay is not None: + for key, weight_decay in custom_keys_weight_decay: + params[key] = [] + params_weight_decay[key] = weight_decay + custom_keys.append(key) + + def _add_params(module, prefix=""): + for name, p in module.named_parameters(recurse=False): + if not p.requires_grad: + continue + is_custom_key = False + for key in custom_keys: + target_name = f"{prefix}.{name}" if prefix != "" and "." in key else name + if key == target_name: + params[key].append(p) + is_custom_key = True + break + if not is_custom_key: + if norm_weight_decay is not None and isinstance(module, norm_classes): + params["norm"].append(p) + else: + params["other"].append(p) + + for child_name, child_module in module.named_children(): + child_prefix = f"{prefix}.{child_name}" if prefix != "" else child_name + _add_params(child_module, prefix=child_prefix) + + _add_params(model) + + param_groups = [] + for key in params: + if len(params[key]) > 0: + param_groups.append({"params": params[key], "weight_decay": params_weight_decay[key]}) + return param_groups diff --git a/references/depth/stereo/README.md b/references/depth/stereo/README.md new file mode 100644 index 00000000000..22bcae27ab0 --- /dev/null +++ b/references/depth/stereo/README.md @@ -0,0 +1,180 @@ +# Stereo Matching reference training scripts + +This folder contains reference training scripts for Stereo Matching. +They serve as a log of how to train specific models, so as to provide baseline +training and evaluation scripts to quickly bootstrap research. + + +### CREStereo + +The CREStereo model was trained on a dataset mixture between **CREStereo**, **ETH3D** and the additional split from **Middlebury2014**. +A ratio of **88-6-6** was used in order to train a baseline weight set. We provide multi-set variant as well. +Both used 8 A100 GPUs and a batch size of 2 (so effective batch size is 16). The +rest of the hyper-parameters loosely follow the recipe from https://github.com/megvii-research/CREStereo. +The original recipe trains for **300000** updates (or steps) on the dataset mixture. We modify the learning rate +schedule to one that starts decaying the weight much sooner. Throughout the experiments we found that this reduces +overfitting during evaluation time and gradient clip help stabilize the loss during a pre-mature learning rate change. + +``` +torchrun --nproc_per_node 8 --nnodes 1 train.py \ + --dataset-root $dataset_root \ + --name $name_cre \ + --model crestereo_base \ + --train-datasets crestereo eth3d-train middlebury2014-other \ + --dataset-steps 264000 18000 18000 + --batch-size 2 \ + --lr 0.0004 \ + --min-lr 0.00002 \ + --lr-decay-method cosine \ + --warmup-steps 6000 \ + --decay-after-steps 30000 \ + --clip-grad-norm 1.0 \ +``` + +We employ a multi-set fine-tuning stage where we uniformly sample from multiple datasets. Given hat some of these datasets have extremely large images (``2048x2048`` or more) we opt for a very aggressive scale-range ``[0.2 - 0.8]`` such that as much of the original frame composition is captured inside the ``384x512`` crop. + +``` +torchrun --nproc_per_node 8 --nnodes 1 train.py \ + --dataset-root $dataset_root \ + --name $name_things \ + --model crestereo_base \ + --train-datasets crestereo eth3d-train middlebury2014-other instereo2k fallingthings carla-highres sintel sceneflow-monkaa sceneflow-driving \ + --dataset-steps 12000 12000 12000 12000 12000 12000 12000 12000 12000 + --batch-size 2 \ + --scale-range 0.2 0.8 \ + --lr 0.0004 \ + --lr-decay-method cosine \ + --decay-after-steps 0 \ + --warmup-steps 0 \ + --min-lr 0.00002 \ + --resume-path $checkpoint_dir/$name_cre.pth +``` + + +### Evaluation + +Evaluating the base weights + +``` +torchrun --nproc_per_node 1 --nnodes 1 cascade_evaluation.py --dataset middlebury2014-train --batch-size 1 --dataset-root $dataset_root --model crestereo_base --weights CREStereo_Base_Weights.CRESTEREO_ETH_MBL_V1 +``` + +This should give an **mae of about 1.416** on the train set of `Middlebury2014`. Results may vary slightly depending on the batch size and the number of GPUs. For the most accurate results use 1 GPU and `--batch-size 1`. The created log file should look like this, where the first key is the number of cascades and the nested key is the number of recursive iterations: + +``` +Dataset: middlebury2014-train @size: [384, 512]: +{ + 1: { + 2: {'mae': 2.363, 'rmse': 4.352, '1px': 0.611, '3px': 0.828, '5px': 0.891, 'relepe': 0.176, 'fl-all': 64.511} + 5: {'mae': 1.618, 'rmse': 3.71, '1px': 0.761, '3px': 0.879, '5px': 0.918, 'relepe': 0.154, 'fl-all': 77.128} + 10: {'mae': 1.416, 'rmse': 3.53, '1px': 0.777, '3px': 0.896, '5px': 0.933, 'relepe': 0.148, 'fl-all': 78.388} + 20: {'mae': 1.448, 'rmse': 3.583, '1px': 0.771, '3px': 0.893, '5px': 0.931, 'relepe': 0.145, 'fl-all': 77.7} + }, +} +{ + 2: { + 2: {'mae': 1.972, 'rmse': 4.125, '1px': 0.73, '3px': 0.865, '5px': 0.908, 'relepe': 0.169, 'fl-all': 74.396} + 5: {'mae': 1.403, 'rmse': 3.448, '1px': 0.793, '3px': 0.905, '5px': 0.937, 'relepe': 0.151, 'fl-all': 80.186} + 10: {'mae': 1.312, 'rmse': 3.368, '1px': 0.799, '3px': 0.912, '5px': 0.943, 'relepe': 0.148, 'fl-all': 80.379} + 20: {'mae': 1.376, 'rmse': 3.542, '1px': 0.796, '3px': 0.91, '5px': 0.942, 'relepe': 0.149, 'fl-all': 80.054} + }, +} +``` + +You can also evaluate the Finetuned weights: + +``` +torchrun --nproc_per_node 1 --nnodes 1 cascade_evaluation.py --dataset middlebury2014-train --batch-size 1 --dataset-root $dataset_root --model crestereo_base --weights CREStereo_Base_Weights.CRESTEREO_FINETUNE_MULTI_V1 +``` + +``` +Dataset: middlebury2014-train @size: [384, 512]: +{ + 1: { + 2: {'mae': 1.85, 'rmse': 3.797, '1px': 0.673, '3px': 0.862, '5px': 0.917, 'relepe': 0.171, 'fl-all': 69.736} + 5: {'mae': 1.111, 'rmse': 3.166, '1px': 0.838, '3px': 0.93, '5px': 0.957, 'relepe': 0.134, 'fl-all': 84.596} + 10: {'mae': 1.02, 'rmse': 3.073, '1px': 0.854, '3px': 0.938, '5px': 0.96, 'relepe': 0.129, 'fl-all': 86.042} + 20: {'mae': 0.993, 'rmse': 3.059, '1px': 0.855, '3px': 0.942, '5px': 0.967, 'relepe': 0.126, 'fl-all': 85.784} + }, +} +{ + 2: { + 2: {'mae': 1.667, 'rmse': 3.867, '1px': 0.78, '3px': 0.891, '5px': 0.922, 'relepe': 0.165, 'fl-all': 78.89} + 5: {'mae': 1.158, 'rmse': 3.278, '1px': 0.843, '3px': 0.926, '5px': 0.955, 'relepe': 0.135, 'fl-all': 84.556} + 10: {'mae': 1.046, 'rmse': 3.13, '1px': 0.85, '3px': 0.934, '5px': 0.96, 'relepe': 0.13, 'fl-all': 85.464} + 20: {'mae': 1.021, 'rmse': 3.102, '1px': 0.85, '3px': 0.935, '5px': 0.963, 'relepe': 0.129, 'fl-all': 85.417} + }, +} +``` + +Evaluating the author provided weights: + +``` +torchrun --nproc_per_node 1 --nnodes 1 cascade_evaluation.py --dataset middlebury2014-train --batch-size 1 --dataset-root $dataset_root --model crestereo_base --weights CREStereo_Base_Weights.MEGVII_V1 +``` + +``` +Dataset: middlebury2014-train @size: [384, 512]: +{ + 1: { + 2: {'mae': 1.704, 'rmse': 3.738, '1px': 0.738, '3px': 0.896, '5px': 0.933, 'relepe': 0.157, 'fl-all': 76.464} + 5: {'mae': 0.956, 'rmse': 2.963, '1px': 0.88, '3px': 0.948, '5px': 0.965, 'relepe': 0.124, 'fl-all': 88.186} + 10: {'mae': 0.792, 'rmse': 2.765, '1px': 0.905, '3px': 0.958, '5px': 0.97, 'relepe': 0.114, 'fl-all': 90.429} + 20: {'mae': 0.749, 'rmse': 2.706, '1px': 0.907, '3px': 0.961, '5px': 0.972, 'relepe': 0.113, 'fl-all': 90.807} + }, +} +{ + 2: { + 2: {'mae': 1.702, 'rmse': 3.784, '1px': 0.784, '3px': 0.894, '5px': 0.924, 'relepe': 0.172, 'fl-all': 80.313} + 5: {'mae': 0.932, 'rmse': 2.907, '1px': 0.877, '3px': 0.944, '5px': 0.963, 'relepe': 0.125, 'fl-all': 87.979} + 10: {'mae': 0.773, 'rmse': 2.768, '1px': 0.901, '3px': 0.958, '5px': 0.972, 'relepe': 0.117, 'fl-all': 90.43} + 20: {'mae': 0.854, 'rmse': 2.971, '1px': 0.9, '3px': 0.957, '5px': 0.97, 'relepe': 0.122, 'fl-all': 90.269} + }, +} +``` + +# Concerns when training + +We encourage users to be aware of the **aspect-ratio** and **disparity scale** they are targeting when doing any sort of training or fine-tuning. The model is highly sensitive to these two factors, as a consequence of naive multi-set fine-tuning one can achieve `0.2 mae` relatively fast. We recommend that users pay close attention to how they **balance dataset sizing** when training such networks. + + Ideally, dataset scaling should be trated at an individual level and a thorough **EDA** of the disparity distribution in random crops at the desired training / inference size should be performed prior to any large compute investments. + +### Disparity scaling + +##### Sample A + The top row contains a sample from `Sintel` whereas the bottom row one from `Middlebury`. + +![Disparity1](assets/disparity-domain-drift.jpg) + +From left to right (`left_image`, `right_image`, `valid_mask`, `valid_mask & ground_truth`, `prediction`). **Darker is further away, lighter is closer**. In the case of `Sintel` which is more closely aligned to the original distribution of `CREStereo` we notice that the model accurately predicts the background scale whereas in the case of `Middlebury2014` it cannot correctly estimate the continuous disparity. Notice that the frame composition is similar for both examples. The blue skybox in the `Sintel` scene behaves similarly to the `Middlebury` black background. However, because the `Middlebury` samples comes from an extremely large scene the crop size of `384x512` does not correctly capture the general training distribution. + + + + +##### Sample B + +The top row contains a scene from `Sceneflow` using the `Monkaa` split whilst the bottom row is a scene from `Middlebury`. This sample exhibits the same issues when it comes to **background estimation**. Given the exaggerated size of the `Middlebury` samples the model **colapses the smooth background** of the sample to what it considers to be a mean background disparity value. + +![Disparity2](assets/disparity-background-mode-collapse.jpg) + + +For more detail on why this behaviour occurs based on the training distribution proportions you can read more about the network at: https://github.com/pytorch/vision/pull/6629#discussion_r978160493 + + +### Metric overfitting + +##### Learning is critical in the beginning + +We also advise users to make user of faster training schedules, as the performance gain over long periods time is marginal. Here we exhibit a difference between a faster decay schedule and later decay schedule. + +![Loss1](assets/Loss.jpg) + +In **grey** we set the lr decay to begin after `30000` steps whilst in **orange** we opt for a very late learning rate decay at around `180000` steps. Although exhibiting stronger variance, we can notice that unfreezing the learning rate earlier whilst employing `gradient-norm` out-performs the default configuration. + +##### Gradient norm saves time + +![Loss2](assets/gradient-norm-removal.jpg) + +In **grey** we keep ``gradient norm`` enabled whilst in **orange** we do not. We can notice that remvoing the gradient norm exacerbates the performance decrease in the early stages whilst also showcasing an almost complete collapse around the `60000` steps mark where we started decaying the lr for **orange**. + +Although both runs ahieve an improvement of about ``0.1`` mae after the lr decay start, the benefits of it are observable much faster when ``gradient norm`` is employed as the recovery period is no longer accounted for. diff --git a/references/depth/stereo/__init__.py b/references/depth/stereo/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/references/depth/stereo/assets/Loss.jpg b/references/depth/stereo/assets/Loss.jpg new file mode 100644 index 00000000000..b6db8e204af Binary files /dev/null and b/references/depth/stereo/assets/Loss.jpg differ diff --git a/references/depth/stereo/assets/disparity-background-mode-collapse.jpg b/references/depth/stereo/assets/disparity-background-mode-collapse.jpg new file mode 100644 index 00000000000..b6542e8814f Binary files /dev/null and b/references/depth/stereo/assets/disparity-background-mode-collapse.jpg differ diff --git a/references/depth/stereo/assets/disparity-domain-drift.jpg b/references/depth/stereo/assets/disparity-domain-drift.jpg new file mode 100644 index 00000000000..8a98de03675 Binary files /dev/null and b/references/depth/stereo/assets/disparity-domain-drift.jpg differ diff --git a/references/depth/stereo/assets/gradient-norm-removal.jpg b/references/depth/stereo/assets/gradient-norm-removal.jpg new file mode 100644 index 00000000000..2c3c8459d5e Binary files /dev/null and b/references/depth/stereo/assets/gradient-norm-removal.jpg differ diff --git a/references/depth/stereo/cascade_evaluation.py b/references/depth/stereo/cascade_evaluation.py new file mode 100644 index 00000000000..7cb6413f1a5 --- /dev/null +++ b/references/depth/stereo/cascade_evaluation.py @@ -0,0 +1,299 @@ +import os +import warnings + +import torch +import torchvision +import torchvision.prototype.models.depth.stereo +import utils +from torch.nn import functional as F +from train import make_eval_loader + +from utils.metrics import AVAILABLE_METRICS +from visualization import make_prediction_image_side_to_side + + +def get_args_parser(add_help=True): + import argparse + + parser = argparse.ArgumentParser(description="PyTorch Stereo Matching Evaluation", add_help=add_help) + parser.add_argument("--dataset", type=str, default="middlebury2014-train", help="dataset to use") + parser.add_argument("--dataset-root", type=str, default="", help="root of the dataset") + + parser.add_argument("--checkpoint", type=str, default="", help="path to weights") + parser.add_argument("--weights", type=str, default=None, help="torchvision API weight") + parser.add_argument( + "--model", + type=str, + default="crestereo_base", + help="which model to use if not speciffying a training checkpoint", + ) + parser.add_argument("--img-folder", type=str, default="images") + + parser.add_argument("--batch-size", type=int, default=1, help="batch size") + parser.add_argument("--workers", type=int, default=0, help="number of workers") + + parser.add_argument("--eval-size", type=int, nargs="+", default=[384, 512], help="resize size") + parser.add_argument( + "--norm-mean", type=float, nargs="+", default=[0.5, 0.5, 0.5], help="mean for image normalization" + ) + parser.add_argument( + "--norm-std", type=float, nargs="+", default=[0.5, 0.5, 0.5], help="std for image normalization" + ) + parser.add_argument( + "--use-grayscale", action="store_true", help="use grayscale images instead of RGB", default=False + ) + parser.add_argument("--max-disparity", type=float, default=None, help="maximum disparity") + parser.add_argument( + "--interpolation-strategy", + type=str, + default="bilinear", + help="interpolation strategy", + choices=["bilinear", "bicubic", "mixed"], + ) + + parser.add_argument("--n_iterations", nargs="+", type=int, default=[10], help="number of recurent iterations") + parser.add_argument("--n_cascades", nargs="+", type=int, default=[1], help="number of cascades") + parser.add_argument( + "--metrics", + type=str, + nargs="+", + default=["mae", "rmse", "1px", "3px", "5px", "relepe"], + help="metrics to log", + choices=AVAILABLE_METRICS, + ) + parser.add_argument("--mixed-precision", action="store_true", help="use mixed precision training") + + parser.add_argument("--world-size", type=int, default=1, help="number of distributed processes") + parser.add_argument("--dist-url", type=str, default="env://", help="url used to set up distributed training") + parser.add_argument("--device", type=str, default="cuda", help="device to use for training") + + parser.add_argument("--save-images", action="store_true", help="save images of the predictions") + parser.add_argument("--padder-type", type=str, default="kitti", help="padder type", choices=["kitti", "sintel"]) + + return parser + + +def cascade_inference(model, image_left, image_right, iterations, cascades): + # check that image size is divisible by 16 * (2 ** (cascades - 1)) + for image in [image_left, image_right]: + if image.shape[-2] % ((2 ** (cascades - 1))) != 0: + raise ValueError( + f"image height is not divisible by {16 * (2 ** (cascades - 1))}. Image shape: {image.shape[-2]}" + ) + + if image.shape[-1] % ((2 ** (cascades - 1))) != 0: + raise ValueError( + f"image width is not divisible by {16 * (2 ** (cascades - 1))}. Image shape: {image.shape[-2]}" + ) + + left_image_pyramid = [image_left] + right_image_pyramid = [image_right] + for idx in range(0, cascades - 1): + ds_factor = int(2 ** (idx + 1)) + ds_shape = (image_left.shape[-2] // ds_factor, image_left.shape[-1] // ds_factor) + left_image_pyramid += F.interpolate(image_left, size=ds_shape, mode="bilinear", align_corners=True).unsqueeze(0) + right_image_pyramid += F.interpolate(image_right, size=ds_shape, mode="bilinear", align_corners=True).unsqueeze( + 0 + ) + + flow_init = None + for left_image, right_image in zip(reversed(left_image_pyramid), reversed(right_image_pyramid)): + flow_pred = model(left_image, right_image, flow_init, num_iters=iterations) + # flow pred is a list + flow_init = flow_pred[-1] + + return flow_init + + +@torch.inference_mode() +def _evaluate( + model, + args, + val_loader, + *, + padder_mode, + print_freq=10, + writer=None, + step=None, + iterations=10, + cascades=1, + batch_size=None, + header=None, + save_images=False, + save_path="", +): + """Helper function to compute various metrics (epe, etc.) for a model on a given dataset. + We process as many samples as possible with ddp. + """ + model.eval() + header = header or "Test:" + device = torch.device(args.device) + metric_logger = utils.MetricLogger(delimiter=" ") + + iterations = iterations or args.recurrent_updates + + logger = utils.MetricLogger() + for meter_name in args.metrics: + logger.add_meter(meter_name, fmt="{global_avg:.4f}") + if "fl-all" not in args.metrics: + logger.add_meter("fl-all", fmt="{global_avg:.4f}") + + num_processed_samples = 0 + with torch.cuda.amp.autocast(enabled=args.mixed_precision, dtype=torch.float16): + batch_idx = 0 + for blob in metric_logger.log_every(val_loader, print_freq, header): + image_left, image_right, disp_gt, valid_disp_mask = (x.to(device) for x in blob) + padder = utils.InputPadder(image_left.shape, mode=padder_mode) + image_left, image_right = padder.pad(image_left, image_right) + + disp_pred = cascade_inference(model, image_left, image_right, iterations, cascades) + disp_pred = disp_pred[:, :1, :, :] + disp_pred = padder.unpad(disp_pred) + + if save_images: + if args.distributed: + rank_prefix = args.rank + else: + rank_prefix = 0 + make_prediction_image_side_to_side( + disp_pred, disp_gt, valid_disp_mask, save_path, prefix=f"batch_{rank_prefix}_{batch_idx}" + ) + + metrics, _ = utils.compute_metrics(disp_pred, disp_gt, valid_disp_mask, metrics=logger.meters.keys()) + num_processed_samples += image_left.shape[0] + for name in metrics: + logger.meters[name].update(metrics[name], n=1) + + batch_idx += 1 + + num_processed_samples = utils.reduce_across_processes(num_processed_samples) / args.world_size + + print("Num_processed_samples: ", num_processed_samples) + if ( + hasattr(val_loader.dataset, "__len__") + and len(val_loader.dataset) != num_processed_samples + and torch.distributed.get_rank() == 0 + ): + warnings.warn( + f"Number of processed samples {num_processed_samples} is different" + f"from the dataset size {len(val_loader.dataset)}. This may happen if" + "the dataset is not divisible by the batch size. Try lowering the batch size for more accurate results." + ) + + if writer is not None and args.rank == 0: + for meter_name, meter_value in logger.meters.items(): + scalar_name = f"{meter_name} {header}" + writer.add_scalar(scalar_name, meter_value.avg, step) + + logger.synchronize_between_processes() + print(header, logger) + + logger_metrics = {k: v.global_avg for k, v in logger.meters.items()} + return logger_metrics + + +def evaluate(model, loader, args, writer=None, step=None): + os.makedirs(args.img_folder, exist_ok=True) + checkpoint_name = os.path.basename(args.checkpoint) or args.weights + image_checkpoint_folder = os.path.join(args.img_folder, checkpoint_name) + + metrics = {} + base_image_folder = os.path.join(image_checkpoint_folder, args.dataset) + os.makedirs(base_image_folder, exist_ok=True) + + for n_cascades in args.n_cascades: + for n_iters in args.n_iterations: + + config = f"{n_cascades}c_{n_iters}i" + config_image_folder = os.path.join(base_image_folder, config) + os.makedirs(config_image_folder, exist_ok=True) + + metrics[config] = _evaluate( + model, + args, + loader, + padder_mode=args.padder_type, + header=f"{args.dataset} evaluation@ size:{args.eval_size} n_cascades:{n_cascades} n_iters:{n_iters}", + batch_size=args.batch_size, + writer=writer, + step=step, + iterations=n_iters, + cascades=n_cascades, + save_path=config_image_folder, + save_images=args.save_images, + ) + + metric_log = [] + metric_log_dict = {} + # print the final results + for config in metrics: + config_tokens = config.split("_") + config_iters = config_tokens[1][:-1] + config_cascades = config_tokens[0][:-1] + + metric_log_dict[config_cascades] = metric_log_dict.get(config_cascades, {}) + metric_log_dict[config_cascades][config_iters] = metrics[config] + + evaluation_str = f"{args.dataset} evaluation@ size:{args.eval_size} n_cascades:{config_cascades} recurrent_updates:{config_iters}" + metrics_str = f"Metrics: {metrics[config]}" + metric_log.extend([evaluation_str, metrics_str]) + + print(evaluation_str) + print(metrics_str) + + eval_log_name = f"{checkpoint_name.replace('.pth', '')}_eval.log" + print("Saving eval log to: ", eval_log_name) + with open(eval_log_name, "w") as f: + f.write(f"Dataset: {args.dataset} @size: {args.eval_size}:\n") + # write the dict line by line for each key, and each value in the keys + for config_cascades in metric_log_dict: + f.write("{\n") + f.write(f"\t{config_cascades}: {{\n") + for config_iters in metric_log_dict[config_cascades]: + # convert every metric to 4 decimal places + metrics = metric_log_dict[config_cascades][config_iters] + metrics = {k: float(f"{v:.3f}") for k, v in metrics.items()} + f.write(f"\t\t{config_iters}: {metrics}\n") + f.write("\t},\n") + f.write("}\n") + + +def load_checkpoint(args): + utils.setup_ddp(args) + + if not args.weights: + checkpoint = torch.load(args.checkpoint, map_location=torch.device("cpu"), weights_only=True) + if "model" in checkpoint: + experiment_args = checkpoint["args"] + model = torchvision.prototype.models.depth.stereo.__dict__[experiment_args.model](weights=None) + model.load_state_dict(checkpoint["model"]) + else: + model = torchvision.prototype.models.depth.stereo.__dict__[args.model](weights=None) + model.load_state_dict(checkpoint) + + # set the appropriate devices + if args.distributed and args.device == "cpu": + raise ValueError("The device must be cuda if we want to run in distributed mode using torchrun") + device = torch.device(args.device) + else: + model = torchvision.prototype.models.depth.stereo.__dict__[args.model](weights=args.weights) + + # convert to DDP if need be + if args.distributed: + model = model.to(args.device) + model = torch.nn.parallel.DistributedDataParallel(model, device_ids=[args.gpu]) + else: + model.to(device) + + return model + + +def main(args): + model = load_checkpoint(args) + loader = make_eval_loader(args.dataset, args) + evaluate(model, loader, args) + + +if __name__ == "__main__": + args = get_args_parser().parse_args() + main(args) diff --git a/references/depth/stereo/parsing.py b/references/depth/stereo/parsing.py new file mode 100644 index 00000000000..71a3ba9904e --- /dev/null +++ b/references/depth/stereo/parsing.py @@ -0,0 +1,89 @@ +import argparse +from functools import partial + +import torch + +from presets import StereoMatchingEvalPreset, StereoMatchingTrainPreset +from torchvision.datasets import ( + CarlaStereo, + CREStereo, + ETH3DStereo, + FallingThingsStereo, + InStereo2k, + Kitti2012Stereo, + Kitti2015Stereo, + Middlebury2014Stereo, + SceneFlowStereo, + SintelStereo, +) + +VALID_DATASETS = { + "crestereo": partial(CREStereo), + "carla-highres": partial(CarlaStereo), + "instereo2k": partial(InStereo2k), + "sintel": partial(SintelStereo), + "sceneflow-monkaa": partial(SceneFlowStereo, variant="Monkaa", pass_name="both"), + "sceneflow-flyingthings": partial(SceneFlowStereo, variant="FlyingThings3D", pass_name="both"), + "sceneflow-driving": partial(SceneFlowStereo, variant="Driving", pass_name="both"), + "fallingthings": partial(FallingThingsStereo, variant="both"), + "eth3d-train": partial(ETH3DStereo, split="train"), + "eth3d-test": partial(ETH3DStereo, split="test"), + "kitti2015-train": partial(Kitti2015Stereo, split="train"), + "kitti2015-test": partial(Kitti2015Stereo, split="test"), + "kitti2012-train": partial(Kitti2012Stereo, split="train"), + "kitti2012-test": partial(Kitti2012Stereo, split="train"), + "middlebury2014-other": partial( + Middlebury2014Stereo, split="additional", use_ambient_view=True, calibration="both" + ), + "middlebury2014-train": partial(Middlebury2014Stereo, split="train", calibration="perfect"), + "middlebury2014-test": partial(Middlebury2014Stereo, split="test", calibration=None), + "middlebury2014-train-ambient": partial( + Middlebury2014Stereo, split="train", use_ambient_views=True, calibrartion="perfect" + ), +} + + +def make_train_transform(args: argparse.Namespace) -> torch.nn.Module: + return StereoMatchingTrainPreset( + resize_size=args.resize_size, + crop_size=args.crop_size, + rescale_prob=args.rescale_prob, + scaling_type=args.scaling_type, + scale_range=args.scale_range, + scale_interpolation_type=args.interpolation_strategy, + use_grayscale=args.use_grayscale, + mean=args.norm_mean, + std=args.norm_std, + horizontal_flip_prob=args.flip_prob, + gpu_transforms=args.gpu_transforms, + max_disparity=args.max_disparity, + spatial_shift_prob=args.spatial_shift_prob, + spatial_shift_max_angle=args.spatial_shift_max_angle, + spatial_shift_max_displacement=args.spatial_shift_max_displacement, + spatial_shift_interpolation_type=args.interpolation_strategy, + gamma_range=args.gamma_range, + brightness=args.brightness_range, + contrast=args.contrast_range, + saturation=args.saturation_range, + hue=args.hue_range, + asymmetric_jitter_prob=args.asymmetric_jitter_prob, + ) + + +def make_eval_transform(args: argparse.Namespace) -> torch.nn.Module: + if args.eval_size is None: + resize_size = args.crop_size + else: + resize_size = args.eval_size + + return StereoMatchingEvalPreset( + mean=args.norm_mean, + std=args.norm_std, + use_grayscale=args.use_grayscale, + resize_size=resize_size, + interpolation_type=args.interpolation_strategy, + ) + + +def make_dataset(dataset_name: str, dataset_root: str, transforms: torch.nn.Module) -> torch.utils.data.Dataset: + return VALID_DATASETS[dataset_name](root=dataset_root, transforms=transforms) diff --git a/references/depth/stereo/presets.py b/references/depth/stereo/presets.py new file mode 100644 index 00000000000..cadd2405178 --- /dev/null +++ b/references/depth/stereo/presets.py @@ -0,0 +1,144 @@ +from typing import Optional, Tuple, Union + +import torch +import transforms as T + + +class StereoMatchingEvalPreset(torch.nn.Module): + def __init__( + self, + mean: float = 0.5, + std: float = 0.5, + resize_size: Optional[Tuple[int, ...]] = None, + max_disparity: Optional[float] = None, + interpolation_type: str = "bilinear", + use_grayscale: bool = False, + ) -> None: + super().__init__() + + transforms = [ + T.ToTensor(), + T.ConvertImageDtype(torch.float32), + ] + + if use_grayscale: + transforms.append(T.ConvertToGrayscale()) + + if resize_size is not None: + transforms.append(T.Resize(resize_size, interpolation_type=interpolation_type)) + + transforms.extend( + [ + T.Normalize(mean=mean, std=std), + T.MakeValidDisparityMask(max_disparity=max_disparity), + T.ValidateModelInput(), + ] + ) + + self.transforms = T.Compose(transforms) + + def forward(self, images, disparities, masks): + return self.transforms(images, disparities, masks) + + +class StereoMatchingTrainPreset(torch.nn.Module): + def __init__( + self, + *, + resize_size: Optional[Tuple[int, ...]], + resize_interpolation_type: str = "bilinear", + # RandomResizeAndCrop params + crop_size: Tuple[int, int], + rescale_prob: float = 1.0, + scaling_type: str = "exponential", + scale_range: Tuple[float, float] = (-0.2, 0.5), + scale_interpolation_type: str = "bilinear", + # convert to grayscale + use_grayscale: bool = False, + # normalization params + mean: float = 0.5, + std: float = 0.5, + # processing device + gpu_transforms: bool = False, + # masking + max_disparity: Optional[int] = 256, + # SpatialShift params + spatial_shift_prob: float = 0.5, + spatial_shift_max_angle: float = 0.5, + spatial_shift_max_displacement: float = 0.5, + spatial_shift_interpolation_type: str = "bilinear", + # AssymetricColorJitter + gamma_range: Tuple[float, float] = (0.8, 1.2), + brightness: Union[int, Tuple[int, int]] = (0.8, 1.2), + contrast: Union[int, Tuple[int, int]] = (0.8, 1.2), + saturation: Union[int, Tuple[int, int]] = 0.0, + hue: Union[int, Tuple[int, int]] = 0.0, + asymmetric_jitter_prob: float = 1.0, + # RandomHorizontalFlip + horizontal_flip_prob: float = 0.5, + # RandomOcclusion + occlusion_prob: float = 0.0, + occlusion_px_range: Tuple[int, int] = (50, 100), + # RandomErase + erase_prob: float = 0.0, + erase_px_range: Tuple[int, int] = (50, 100), + erase_num_repeats: int = 1, + ) -> None: + + if scaling_type not in ["linear", "exponential"]: + raise ValueError(f"Unknown scaling type: {scaling_type}. Available types: linear, exponential") + + super().__init__() + transforms = [T.ToTensor()] + + # when fixing size across multiple datasets, we ensure + # that the same size is used for all datasets when cropping + if resize_size is not None: + transforms.append(T.Resize(resize_size, interpolation_type=resize_interpolation_type)) + + if gpu_transforms: + transforms.append(T.ToGPU()) + + # color handling + color_transforms = [ + T.AsymmetricColorJitter( + brightness=brightness, contrast=contrast, saturation=saturation, hue=hue, p=asymmetric_jitter_prob + ), + T.AsymetricGammaAdjust(p=asymmetric_jitter_prob, gamma_range=gamma_range), + ] + + if use_grayscale: + color_transforms.append(T.ConvertToGrayscale()) + + transforms.extend(color_transforms) + + transforms.extend( + [ + T.RandomSpatialShift( + p=spatial_shift_prob, + max_angle=spatial_shift_max_angle, + max_px_shift=spatial_shift_max_displacement, + interpolation_type=spatial_shift_interpolation_type, + ), + T.ConvertImageDtype(torch.float32), + T.RandomRescaleAndCrop( + crop_size=crop_size, + scale_range=scale_range, + rescale_prob=rescale_prob, + scaling_type=scaling_type, + interpolation_type=scale_interpolation_type, + ), + T.RandomHorizontalFlip(horizontal_flip_prob), + # occlusion after flip, otherwise we're occluding the reference image + T.RandomOcclusion(p=occlusion_prob, occlusion_px_range=occlusion_px_range), + T.RandomErase(p=erase_prob, erase_px_range=erase_px_range, max_erase=erase_num_repeats), + T.Normalize(mean=mean, std=std), + T.MakeValidDisparityMask(max_disparity), + T.ValidateModelInput(), + ] + ) + + self.transforms = T.Compose(transforms) + + def forward(self, images, disparties, mask): + return self.transforms(images, disparties, mask) diff --git a/references/depth/stereo/train.py b/references/depth/stereo/train.py new file mode 100644 index 00000000000..34332b46129 --- /dev/null +++ b/references/depth/stereo/train.py @@ -0,0 +1,789 @@ +import argparse +import os +import warnings +from pathlib import Path +from typing import List, Union + +import numpy as np +import numpy.typing as npt +import torch +import torch.distributed as dist +import torchvision.models.optical_flow +import torchvision.prototype.models.depth.stereo +import utils +import visualization + +from parsing import make_dataset, make_eval_transform, make_train_transform, VALID_DATASETS +from torch import nn +from torchvision.transforms.functional import get_dimensions, InterpolationMode, resize +from utils.metrics import AVAILABLE_METRICS +from utils.norm import freeze_batch_norm + + +def make_stereo_flow(flow: Union[torch.Tensor, List[torch.Tensor]], model_out_channels: int) -> torch.Tensor: + """Helper function to make stereo flow from a given model output""" + if isinstance(flow, list): + return [make_stereo_flow(flow_i, model_out_channels) for flow_i in flow] + + B, C, H, W = flow.shape + # we need to add zero flow if the model outputs 2 channels + if C == 1 and model_out_channels == 2: + zero_flow = torch.zeros_like(flow) + # by convention the flow is X-Y axis, so we need the Y flow last + flow = torch.cat([flow, zero_flow], dim=1) + return flow + + +def make_lr_schedule(args: argparse.Namespace, optimizer: torch.optim.Optimizer) -> npt.NDArray: + """Helper function to return a learning rate scheduler for CRE-stereo""" + if args.decay_after_steps < args.warmup_steps: + raise ValueError(f"decay_after_steps: {args.function} must be greater than warmup_steps: {args.warmup_steps}") + + warmup_steps = args.warmup_steps if args.warmup_steps else 0 + flat_lr_steps = args.decay_after_steps - warmup_steps if args.decay_after_steps else 0 + decay_lr_steps = args.total_iterations - flat_lr_steps + + max_lr = args.lr + min_lr = args.min_lr + + schedulers = [] + milestones = [] + + if warmup_steps > 0: + if args.lr_warmup_method == "linear": + warmup_lr_scheduler = torch.optim.lr_scheduler.LinearLR( + optimizer, start_factor=args.lr_warmup_factor, total_iters=warmup_steps + ) + elif args.lr_warmup_method == "constant": + warmup_lr_scheduler = torch.optim.lr_scheduler.ConstantLR( + optimizer, factor=args.lr_warmup_factor, total_iters=warmup_steps + ) + else: + raise ValueError(f"Unknown lr warmup method {args.lr_warmup_method}") + schedulers.append(warmup_lr_scheduler) + milestones.append(warmup_steps) + + if flat_lr_steps > 0: + flat_lr_scheduler = torch.optim.lr_scheduler.ConstantLR(optimizer, factor=max_lr, total_iters=flat_lr_steps) + schedulers.append(flat_lr_scheduler) + milestones.append(flat_lr_steps + warmup_steps) + + if decay_lr_steps > 0: + if args.lr_decay_method == "cosine": + decay_lr_scheduler = torch.optim.lr_scheduler.CosineAnnealingLR( + optimizer, T_max=decay_lr_steps, eta_min=min_lr + ) + elif args.lr_decay_method == "linear": + decay_lr_scheduler = torch.optim.lr_scheduler.LinearLR( + optimizer, start_factor=max_lr, end_factor=min_lr, total_iters=decay_lr_steps + ) + elif args.lr_decay_method == "exponential": + decay_lr_scheduler = torch.optim.lr_scheduler.ExponentialLR( + optimizer, gamma=args.lr_decay_gamma, last_epoch=-1 + ) + else: + raise ValueError(f"Unknown lr decay method {args.lr_decay_method}") + schedulers.append(decay_lr_scheduler) + + scheduler = torch.optim.lr_scheduler.SequentialLR(optimizer, schedulers, milestones=milestones) + return scheduler + + +def shuffle_dataset(dataset): + """Shuffle the dataset""" + perm = torch.randperm(len(dataset)) + return torch.utils.data.Subset(dataset, perm) + + +def resize_dataset_to_n_steps( + dataset: torch.utils.data.Dataset, dataset_steps: int, samples_per_step: int, args: argparse.Namespace +) -> torch.utils.data.Dataset: + original_size = len(dataset) + if args.steps_is_epochs: + samples_per_step = original_size + target_size = dataset_steps * samples_per_step + + dataset_copies = [] + n_expands, remainder = divmod(target_size, original_size) + for idx in range(n_expands): + dataset_copies.append(dataset) + + if remainder > 0: + dataset_copies.append(torch.utils.data.Subset(dataset, list(range(remainder)))) + + if args.dataset_shuffle: + dataset_copies = [shuffle_dataset(dataset_copy) for dataset_copy in dataset_copies] + + dataset = torch.utils.data.ConcatDataset(dataset_copies) + return dataset + + +def get_train_dataset(dataset_root: str, args: argparse.Namespace) -> torch.utils.data.Dataset: + datasets = [] + for dataset_name in args.train_datasets: + transform = make_train_transform(args) + dataset = make_dataset(dataset_name, dataset_root, transform) + datasets.append(dataset) + + if len(datasets) == 0: + raise ValueError("No datasets specified for training") + + samples_per_step = args.world_size * args.batch_size + + for idx, (dataset, steps_per_dataset) in enumerate(zip(datasets, args.dataset_steps)): + datasets[idx] = resize_dataset_to_n_steps(dataset, steps_per_dataset, samples_per_step, args) + + dataset = torch.utils.data.ConcatDataset(datasets) + if args.dataset_order_shuffle: + dataset = shuffle_dataset(dataset) + + print(f"Training dataset: {len(dataset)} samples") + return dataset + + +@torch.inference_mode() +def _evaluate( + model, + args, + val_loader, + *, + padder_mode, + print_freq=10, + writer=None, + step=None, + iterations=None, + batch_size=None, + header=None, +): + """Helper function to compute various metrics (epe, etc.) for a model on a given dataset.""" + model.eval() + header = header or "Test:" + device = torch.device(args.device) + metric_logger = utils.MetricLogger(delimiter=" ") + + iterations = iterations or args.recurrent_updates + + logger = utils.MetricLogger() + for meter_name in args.metrics: + logger.add_meter(meter_name, fmt="{global_avg:.4f}") + if "fl-all" not in args.metrics: + logger.add_meter("fl-all", fmt="{global_avg:.4f}") + + num_processed_samples = 0 + with torch.cuda.amp.autocast(enabled=args.mixed_precision, dtype=torch.float16): + for blob in metric_logger.log_every(val_loader, print_freq, header): + image_left, image_right, disp_gt, valid_disp_mask = (x.to(device) for x in blob) + padder = utils.InputPadder(image_left.shape, mode=padder_mode) + image_left, image_right = padder.pad(image_left, image_right) + + disp_predictions = model(image_left, image_right, flow_init=None, num_iters=iterations) + disp_pred = disp_predictions[-1][:, :1, :, :] + disp_pred = padder.unpad(disp_pred) + + metrics, _ = utils.compute_metrics(disp_pred, disp_gt, valid_disp_mask, metrics=logger.meters.keys()) + num_processed_samples += image_left.shape[0] + for name in metrics: + logger.meters[name].update(metrics[name], n=1) + + num_processed_samples = utils.reduce_across_processes(num_processed_samples) + + print("Num_processed_samples: ", num_processed_samples) + if ( + hasattr(val_loader.dataset, "__len__") + and len(val_loader.dataset) != num_processed_samples + and torch.distributed.get_rank() == 0 + ): + warnings.warn( + f"Number of processed samples {num_processed_samples} is different" + f"from the dataset size {len(val_loader.dataset)}. This may happen if" + "the dataset is not divisible by the batch size. Try lowering the batch size or GPU number for more accurate results." + ) + + if writer is not None and args.rank == 0: + for meter_name, meter_value in logger.meters.items(): + scalar_name = f"{meter_name} {header}" + writer.add_scalar(scalar_name, meter_value.avg, step) + + logger.synchronize_between_processes() + print(header, logger) + + +def make_eval_loader(dataset_name: str, args: argparse.Namespace) -> torch.utils.data.DataLoader: + if args.weights: + weights = torchvision.models.get_weight(args.weights) + trans = weights.transforms() + + def preprocessing(image_left, image_right, disp, valid_disp_mask): + C_o, H_o, W_o = get_dimensions(image_left) + image_left, image_right = trans(image_left, image_right) + + C_t, H_t, W_t = get_dimensions(image_left) + scale_factor = W_t / W_o + + if disp is not None and not isinstance(disp, torch.Tensor): + disp = torch.from_numpy(disp) + if W_t != W_o: + disp = resize(disp, (H_t, W_t), mode=InterpolationMode.BILINEAR) * scale_factor + if valid_disp_mask is not None and not isinstance(valid_disp_mask, torch.Tensor): + valid_disp_mask = torch.from_numpy(valid_disp_mask) + if W_t != W_o: + valid_disp_mask = resize(valid_disp_mask, (H_t, W_t), mode=InterpolationMode.NEAREST) + return image_left, image_right, disp, valid_disp_mask + + else: + preprocessing = make_eval_transform(args) + + val_dataset = make_dataset(dataset_name, args.dataset_root, transforms=preprocessing) + if args.distributed: + sampler = torch.utils.data.distributed.DistributedSampler(val_dataset, shuffle=False, drop_last=False) + else: + sampler = torch.utils.data.SequentialSampler(val_dataset) + + val_loader = torch.utils.data.DataLoader( + val_dataset, + sampler=sampler, + batch_size=args.batch_size, + pin_memory=True, + num_workers=args.workers, + ) + + return val_loader + + +def evaluate(model, loaders, args, writer=None, step=None): + for loader_name, loader in loaders.items(): + _evaluate( + model, + args, + loader, + iterations=args.recurrent_updates, + padder_mode=args.padder_type, + header=f"{loader_name} evaluation", + batch_size=args.batch_size, + writer=writer, + step=step, + ) + + +def run(model, optimizer, scheduler, train_loader, val_loaders, logger, writer, scaler, args): + device = torch.device(args.device) + # wrap the loader in a logger + loader = iter(logger.log_every(train_loader)) + # output channels + model_out_channels = model.module.output_channels if args.distributed else model.output_channels + + torch.set_num_threads(args.threads) + + sequence_criterion = utils.SequenceLoss( + gamma=args.gamma, + max_flow=args.max_disparity, + exclude_large_flows=args.flow_loss_exclude_large, + ).to(device) + + if args.consistency_weight: + consistency_criterion = utils.FlowSequenceConsistencyLoss( + args.gamma, + resize_factor=0.25, + rescale_factor=0.25, + rescale_mode="bilinear", + ).to(device) + else: + consistency_criterion = None + + if args.psnr_weight: + psnr_criterion = utils.PSNRLoss().to(device) + else: + psnr_criterion = None + + if args.smoothness_weight: + smoothness_criterion = utils.SmoothnessLoss().to(device) + else: + smoothness_criterion = None + + if args.photometric_weight: + photometric_criterion = utils.FlowPhotoMetricLoss( + ssim_weight=args.photometric_ssim_weight, + max_displacement_ratio=args.photometric_max_displacement_ratio, + ssim_use_padding=False, + ).to(device) + else: + photometric_criterion = None + + for step in range(args.start_step + 1, args.total_iterations + 1): + data_blob = next(loader) + optimizer.zero_grad() + + # unpack the data blob + image_left, image_right, disp_mask, valid_disp_mask = (x.to(device) for x in data_blob) + with torch.cuda.amp.autocast(enabled=args.mixed_precision, dtype=torch.float16): + disp_predictions = model(image_left, image_right, flow_init=None, num_iters=args.recurrent_updates) + # different models have different outputs, make sure we get the right ones for this task + disp_predictions = make_stereo_flow(disp_predictions, model_out_channels) + # should the architecture or training loop require it, we have to adjust the disparity mask + # target to possibly look like an optical flow mask + disp_mask = make_stereo_flow(disp_mask, model_out_channels) + # sequence loss on top of the model outputs + + loss = sequence_criterion(disp_predictions, disp_mask, valid_disp_mask) * args.flow_loss_weight + + if args.consistency_weight > 0: + loss_consistency = consistency_criterion(disp_predictions) + loss += loss_consistency * args.consistency_weight + + if args.psnr_weight > 0: + loss_psnr = 0.0 + for pred in disp_predictions: + # predictions might have 2 channels + loss_psnr += psnr_criterion( + pred * valid_disp_mask.unsqueeze(1), + disp_mask * valid_disp_mask.unsqueeze(1), + ).mean() # mean the psnr loss over the batch + loss += loss_psnr / len(disp_predictions) * args.psnr_weight + + if args.photometric_weight > 0: + loss_photometric = 0.0 + for pred in disp_predictions: + # predictions might have 1 channel, therefore we need to inpute 0s for the second channel + if model_out_channels == 1: + pred = torch.cat([pred, torch.zeros_like(pred)], dim=1) + + loss_photometric += photometric_criterion( + image_left, image_right, pred, valid_disp_mask + ) # photometric loss already comes out meaned over the batch + loss += loss_photometric / len(disp_predictions) * args.photometric_weight + + if args.smoothness_weight > 0: + loss_smoothness = 0.0 + for pred in disp_predictions: + # predictions might have 2 channels + loss_smoothness += smoothness_criterion( + image_left, pred[:, :1, :, :] + ).mean() # mean the smoothness loss over the batch + loss += loss_smoothness / len(disp_predictions) * args.smoothness_weight + + with torch.no_grad(): + metrics, _ = utils.compute_metrics( + disp_predictions[-1][:, :1, :, :], # predictions might have 2 channels + disp_mask[:, :1, :, :], # so does the ground truth + valid_disp_mask, + args.metrics, + ) + + metrics.pop("fl-all", None) + logger.update(loss=loss, **metrics) + + if scaler is not None: + scaler.scale(loss).backward() + scaler.unscale_(optimizer) + if args.clip_grad_norm: + torch.nn.utils.clip_grad_norm_(model.parameters(), max_norm=args.clip_grad_norm) + scaler.step(optimizer) + scaler.update() + else: + loss.backward() + if args.clip_grad_norm: + torch.nn.utils.clip_grad_norm_(model.parameters(), max_norm=args.clip_grad_norm) + optimizer.step() + + scheduler.step() + + if not dist.is_initialized() or dist.get_rank() == 0: + if writer is not None and step % args.tensorboard_log_frequency == 0: + # log the loss and metrics to tensorboard + + writer.add_scalar("loss", loss, step) + for name, value in logger.meters.items(): + writer.add_scalar(name, value.avg, step) + # log the images to tensorboard + pred_grid = visualization.make_training_sample_grid( + image_left, image_right, disp_mask, valid_disp_mask, disp_predictions + ) + writer.add_image("predictions", pred_grid, step, dataformats="HWC") + + # second thing we want to see is how relevant the iterative refinement is + pred_sequence_grid = visualization.make_disparity_sequence_grid(disp_predictions, disp_mask) + writer.add_image("sequence", pred_sequence_grid, step, dataformats="HWC") + + if step % args.save_frequency == 0: + if not args.distributed or args.rank == 0: + model_without_ddp = ( + model.module if isinstance(model, torch.nn.parallel.DistributedDataParallel) else model + ) + checkpoint = { + "model": model_without_ddp.state_dict(), + "optimizer": optimizer.state_dict(), + "scheduler": scheduler.state_dict(), + "step": step, + "args": args, + } + os.makedirs(args.checkpoint_dir, exist_ok=True) + torch.save(checkpoint, Path(args.checkpoint_dir) / f"{args.name}_{step}.pth") + torch.save(checkpoint, Path(args.checkpoint_dir) / f"{args.name}.pth") + + if step % args.valid_frequency == 0: + evaluate(model, val_loaders, args, writer, step) + model.train() + if args.freeze_batch_norm: + if isinstance(model, nn.parallel.DistributedDataParallel): + freeze_batch_norm(model.module) + else: + freeze_batch_norm(model) + + # one final save at the end + if not args.distributed or args.rank == 0: + model_without_ddp = model.module if isinstance(model, torch.nn.parallel.DistributedDataParallel) else model + checkpoint = { + "model": model_without_ddp.state_dict(), + "optimizer": optimizer.state_dict(), + "scheduler": scheduler.state_dict(), + "step": step, + "args": args, + } + os.makedirs(args.checkpoint_dir, exist_ok=True) + torch.save(checkpoint, Path(args.checkpoint_dir) / f"{args.name}_{step}.pth") + torch.save(checkpoint, Path(args.checkpoint_dir) / f"{args.name}.pth") + + +def main(args): + args.total_iterations = sum(args.dataset_steps) + + # initialize DDP setting + utils.setup_ddp(args) + print(args) + + args.test_only = args.train_datasets is None + + # set the appropriate devices + if args.distributed and args.device == "cpu": + raise ValueError("The device must be cuda if we want to run in distributed mode using torchrun") + device = torch.device(args.device) + + # select model architecture + model = torchvision.prototype.models.depth.stereo.__dict__[args.model](weights=args.weights) + + # convert to DDP if need be + if args.distributed: + model = model.to(args.gpu) + model = torch.nn.parallel.DistributedDataParallel(model, device_ids=[args.gpu]) + model_without_ddp = model.module + else: + model.to(device) + model_without_ddp = model + + os.makedirs(args.checkpoint_dir, exist_ok=True) + + val_loaders = {name: make_eval_loader(name, args) for name in args.test_datasets} + + # EVAL ONLY configurations + if args.test_only: + evaluate(model, val_loaders, args) + return + + # Sanity check for the parameter count + print(f"Parameter Count: {sum(p.numel() for p in model.parameters() if p.requires_grad)}") + + # Compose the training dataset + train_dataset = get_train_dataset(args.dataset_root, args) + + # initialize the optimizer + if args.optimizer == "adam": + optimizer = torch.optim.Adam(model.parameters(), lr=args.lr, weight_decay=args.weight_decay) + elif args.optimizer == "sgd": + optimizer = torch.optim.SGD(model.parameters(), lr=args.lr, weight_decay=args.weight_decay, momentum=0.9) + else: + raise ValueError(f"Unknown optimizer {args.optimizer}. Please choose between adam and sgd") + + # initialize the learning rate schedule + scheduler = make_lr_schedule(args, optimizer) + + # load them from checkpoint if needed + args.start_step = 0 + if args.resume_path is not None: + checkpoint = torch.load(args.resume_path, map_location="cpu", weights_only=True) + if "model" in checkpoint: + # this means the user requested to resume from a training checkpoint + model_without_ddp.load_state_dict(checkpoint["model"]) + # this means the user wants to continue training from where it was left off + if args.resume_schedule: + optimizer.load_state_dict(checkpoint["optimizer"]) + scheduler.load_state_dict(checkpoint["scheduler"]) + args.start_step = checkpoint["step"] + 1 + # modify starting point of the dat + sample_start_step = args.start_step * args.batch_size * args.world_size + train_dataset = train_dataset[sample_start_step:] + + else: + # this means the user wants to finetune on top of a model state dict + # and that no other changes are required + model_without_ddp.load_state_dict(checkpoint) + + torch.backends.cudnn.benchmark = True + + # enable training mode + model.train() + if args.freeze_batch_norm: + freeze_batch_norm(model_without_ddp) + + # put dataloader on top of the dataset + # make sure to disable shuffling since the dataset is already shuffled + # in order to guarantee quasi randomness whilst retaining a deterministic + # dataset consumption order + if args.distributed: + # the train dataset is preshuffled in order to respect the iteration order + sampler = torch.utils.data.distributed.DistributedSampler(train_dataset, shuffle=False, drop_last=True) + else: + # the train dataset is already shuffled, so we can use a simple SequentialSampler + sampler = torch.utils.data.SequentialSampler(train_dataset) + + train_loader = torch.utils.data.DataLoader( + train_dataset, + sampler=sampler, + batch_size=args.batch_size, + pin_memory=True, + num_workers=args.workers, + ) + + # initialize the logger + if args.tensorboard_summaries: + from torch.utils.tensorboard import SummaryWriter + + tensorboard_path = Path(args.checkpoint_dir) / "tensorboard" + os.makedirs(tensorboard_path, exist_ok=True) + + tensorboard_run = tensorboard_path / f"{args.name}" + writer = SummaryWriter(tensorboard_run) + else: + writer = None + + logger = utils.MetricLogger(delimiter=" ") + + scaler = torch.cuda.amp.GradScaler() if args.mixed_precision else None + # run the training loop + # this will perform optimization, respectively logging and saving checkpoints + # when need be + run( + model=model, + optimizer=optimizer, + scheduler=scheduler, + train_loader=train_loader, + val_loaders=val_loaders, + logger=logger, + writer=writer, + scaler=scaler, + args=args, + ) + + +def get_args_parser(add_help=True): + import argparse + + parser = argparse.ArgumentParser(description="PyTorch Stereo Matching Training", add_help=add_help) + # checkpointing + parser.add_argument("--name", default="crestereo", help="name of the experiment") + parser.add_argument("--resume", type=str, default=None, help="from which checkpoint to resume") + parser.add_argument("--checkpoint-dir", type=str, default="checkpoints", help="path to the checkpoint directory") + + # dataset + parser.add_argument("--dataset-root", type=str, default="", help="path to the dataset root directory") + parser.add_argument( + "--train-datasets", + type=str, + nargs="+", + default=["crestereo"], + help="dataset(s) to train on", + choices=list(VALID_DATASETS.keys()), + ) + parser.add_argument( + "--dataset-steps", type=int, nargs="+", default=[300_000], help="number of steps for each dataset" + ) + parser.add_argument( + "--steps-is-epochs", action="store_true", help="if set, dataset-steps are interpreted as epochs" + ) + parser.add_argument( + "--test-datasets", + type=str, + nargs="+", + default=["middlebury2014-train"], + help="dataset(s) to test on", + choices=["middlebury2014-train"], + ) + parser.add_argument("--dataset-shuffle", type=bool, help="shuffle the dataset", default=True) + parser.add_argument("--dataset-order-shuffle", type=bool, help="shuffle the dataset order", default=True) + parser.add_argument("--batch-size", type=int, default=2, help="batch size per GPU") + parser.add_argument("--workers", type=int, default=4, help="number of workers per GPU") + parser.add_argument( + "--threads", + type=int, + default=16, + help="number of CPU threads per GPU. This can be changed around to speed-up transforms if needed. This can lead to worker thread contention so use with care.", + ) + + # model architecture + parser.add_argument( + "--model", + type=str, + default="crestereo_base", + help="model architecture", + choices=["crestereo_base", "raft_stereo"], + ) + parser.add_argument("--recurrent-updates", type=int, default=10, help="number of recurrent updates") + parser.add_argument("--freeze-batch-norm", action="store_true", help="freeze batch norm parameters") + + # loss parameters + parser.add_argument("--gamma", type=float, default=0.8, help="gamma parameter for the flow sequence loss") + parser.add_argument("--flow-loss-weight", type=float, default=1.0, help="weight for the flow loss") + parser.add_argument( + "--flow-loss-exclude-large", + action="store_true", + help="exclude large flow values from the loss. A large value is defined as a value greater than the ground truth flow norm", + default=False, + ) + parser.add_argument("--consistency-weight", type=float, default=0.0, help="consistency loss weight") + parser.add_argument( + "--consistency-resize-factor", + type=float, + default=0.25, + help="consistency loss resize factor to account for the fact that the flow is computed on a downsampled image", + ) + parser.add_argument("--psnr-weight", type=float, default=0.0, help="psnr loss weight") + parser.add_argument("--smoothness-weight", type=float, default=0.0, help="smoothness loss weight") + parser.add_argument("--photometric-weight", type=float, default=0.0, help="photometric loss weight") + parser.add_argument( + "--photometric-max-displacement-ratio", + type=float, + default=0.15, + help="Only pixels with a displacement smaller than this ratio of the image width will be considered for the photometric loss", + ) + parser.add_argument("--photometric-ssim-weight", type=float, default=0.85, help="photometric ssim loss weight") + + # transforms parameters + parser.add_argument("--gpu-transforms", action="store_true", help="use GPU transforms") + parser.add_argument( + "--eval-size", type=int, nargs="+", default=[384, 512], help="size of the images for evaluation" + ) + parser.add_argument("--resize-size", type=int, nargs=2, default=None, help="resize size") + parser.add_argument("--crop-size", type=int, nargs=2, default=[384, 512], help="crop size") + parser.add_argument("--scale-range", type=float, nargs=2, default=[0.6, 1.0], help="random scale range") + parser.add_argument("--rescale-prob", type=float, default=1.0, help="probability of resizing the image") + parser.add_argument( + "--scaling-type", type=str, default="linear", help="scaling type", choices=["exponential", "linear"] + ) + parser.add_argument("--flip-prob", type=float, default=0.5, help="probability of flipping the image") + parser.add_argument( + "--norm-mean", type=float, nargs="+", default=[0.5, 0.5, 0.5], help="mean for image normalization" + ) + parser.add_argument( + "--norm-std", type=float, nargs="+", default=[0.5, 0.5, 0.5], help="std for image normalization" + ) + parser.add_argument( + "--use-grayscale", action="store_true", help="use grayscale images instead of RGB", default=False + ) + parser.add_argument("--max-disparity", type=float, default=None, help="maximum disparity") + parser.add_argument( + "--interpolation-strategy", + type=str, + default="bilinear", + help="interpolation strategy", + choices=["bilinear", "bicubic", "mixed"], + ) + parser.add_argument("--spatial-shift-prob", type=float, default=1.0, help="probability of shifting the image") + parser.add_argument( + "--spatial-shift-max-angle", type=float, default=0.1, help="maximum angle for the spatial shift" + ) + parser.add_argument( + "--spatial-shift-max-displacement", type=float, default=2.0, help="maximum displacement for the spatial shift" + ) + parser.add_argument("--gamma-range", type=float, nargs="+", default=[0.8, 1.2], help="range for gamma correction") + parser.add_argument( + "--brightness-range", type=float, nargs="+", default=[0.8, 1.2], help="range for brightness correction" + ) + parser.add_argument( + "--contrast-range", type=float, nargs="+", default=[0.8, 1.2], help="range for contrast correction" + ) + parser.add_argument( + "--saturation-range", type=float, nargs="+", default=0.0, help="range for saturation correction" + ) + parser.add_argument("--hue-range", type=float, nargs="+", default=0.0, help="range for hue correction") + parser.add_argument( + "--asymmetric-jitter-prob", + type=float, + default=1.0, + help="probability of using asymmetric jitter instead of symmetric jitter", + ) + parser.add_argument("--occlusion-prob", type=float, default=0.5, help="probability of occluding the rightimage") + parser.add_argument( + "--occlusion-px-range", type=int, nargs="+", default=[50, 100], help="range for the number of occluded pixels" + ) + parser.add_argument("--erase-prob", type=float, default=0.0, help="probability of erasing in both images") + parser.add_argument( + "--erase-px-range", type=int, nargs="+", default=[50, 100], help="range for the number of erased pixels" + ) + parser.add_argument( + "--erase-num-repeats", type=int, default=1, help="number of times to repeat the erase operation" + ) + + # optimizer parameters + parser.add_argument("--optimizer", type=str, default="adam", help="optimizer", choices=["adam", "sgd"]) + parser.add_argument("--lr", type=float, default=4e-4, help="learning rate") + parser.add_argument("--weight-decay", type=float, default=0.0, help="weight decay") + parser.add_argument("--clip-grad-norm", type=float, default=0.0, help="clip grad norm") + + # lr_scheduler parameters + parser.add_argument("--min-lr", type=float, default=2e-5, help="minimum learning rate") + parser.add_argument("--warmup-steps", type=int, default=6_000, help="number of warmup steps") + parser.add_argument( + "--decay-after-steps", type=int, default=180_000, help="number of steps after which to start decay the lr" + ) + parser.add_argument( + "--lr-warmup-method", type=str, default="linear", help="warmup method", choices=["linear", "cosine"] + ) + parser.add_argument("--lr-warmup-factor", type=float, default=0.02, help="warmup factor for the learning rate") + parser.add_argument( + "--lr-decay-method", + type=str, + default="linear", + help="decay method", + choices=["linear", "cosine", "exponential"], + ) + parser.add_argument("--lr-decay-gamma", type=float, default=0.8, help="decay factor for the learning rate") + + # deterministic behaviour + parser.add_argument("--seed", type=int, default=42, help="seed for random number generators") + + # mixed precision training + parser.add_argument("--mixed-precision", action="store_true", help="use mixed precision training") + + # logging + parser.add_argument("--tensorboard-summaries", action="store_true", help="log to tensorboard") + parser.add_argument("--tensorboard-log-frequency", type=int, default=100, help="log frequency") + parser.add_argument("--save-frequency", type=int, default=1_000, help="save frequency") + parser.add_argument("--valid-frequency", type=int, default=1_000, help="validation frequency") + parser.add_argument( + "--metrics", + type=str, + nargs="+", + default=["mae", "rmse", "1px", "3px", "5px", "relepe"], + help="metrics to log", + choices=AVAILABLE_METRICS, + ) + + # distributed parameters + parser.add_argument("--world-size", type=int, default=8, help="number of distributed processes") + parser.add_argument("--dist-url", type=str, default="env://", help="url used to set up distributed training") + parser.add_argument("--device", type=str, default="cuda", help="device to use for training") + + # weights API + parser.add_argument("--weights", type=str, default=None, help="weights API url") + parser.add_argument( + "--resume-path", type=str, default=None, help="a path from which to resume or start fine-tuning" + ) + parser.add_argument("--resume-schedule", action="store_true", help="resume optimizer state") + + # padder parameters + parser.add_argument("--padder-type", type=str, default="kitti", help="padder type", choices=["kitti", "sintel"]) + return parser + + +if __name__ == "__main__": + args = get_args_parser().parse_args() + main(args) diff --git a/references/depth/stereo/transforms.py b/references/depth/stereo/transforms.py new file mode 100644 index 00000000000..9c4a6bab6d3 --- /dev/null +++ b/references/depth/stereo/transforms.py @@ -0,0 +1,650 @@ +import random +from typing import Callable, List, Optional, Sequence, Tuple, Union + +import numpy as np +import PIL.Image +import torch +import torchvision.transforms as T +import torchvision.transforms.functional as F +from torch import Tensor + +T_FLOW = Union[Tensor, np.ndarray, None] +T_MASK = Union[Tensor, np.ndarray, None] +T_STEREO_TENSOR = Tuple[Tensor, Tensor] +T_COLOR_AUG_PARAM = Union[float, Tuple[float, float]] + + +def rand_float_range(size: Sequence[int], low: float, high: float) -> Tensor: + return (low - high) * torch.rand(size) + high + + +class InterpolationStrategy: + + _valid_modes: List[str] = ["mixed", "bicubic", "bilinear"] + + def __init__(self, mode: str = "mixed") -> None: + if mode not in self._valid_modes: + raise ValueError(f"Invalid interpolation mode: {mode}. Valid modes are: {self._valid_modes}") + + if mode == "mixed": + self.strategies = [F.InterpolationMode.BILINEAR, F.InterpolationMode.BICUBIC] + elif mode == "bicubic": + self.strategies = [F.InterpolationMode.BICUBIC] + elif mode == "bilinear": + self.strategies = [F.InterpolationMode.BILINEAR] + + def __call__(self) -> F.InterpolationMode: + return random.choice(self.strategies) + + @classmethod + def is_valid(mode: str) -> bool: + return mode in InterpolationStrategy._valid_modes + + @property + def valid_modes() -> List[str]: + return InterpolationStrategy._valid_modes + + +class ValidateModelInput(torch.nn.Module): + # Pass-through transform that checks the shape and dtypes to make sure the model gets what it expects + def forward(self, images: T_STEREO_TENSOR, disparities: T_FLOW, masks: T_MASK): + if images[0].shape != images[1].shape: + raise ValueError("img1 and img2 should have the same shape.") + h, w = images[0].shape[-2:] + if disparities[0] is not None and disparities[0].shape != (1, h, w): + raise ValueError(f"disparities[0].shape should be (1, {h}, {w}) instead of {disparities[0].shape}") + if masks[0] is not None: + if masks[0].shape != (h, w): + raise ValueError(f"masks[0].shape should be ({h}, {w}) instead of {masks[0].shape}") + if masks[0].dtype != torch.bool: + raise TypeError(f"masks[0] should be of dtype torch.bool instead of {masks[0].dtype}") + + return images, disparities, masks + + +class ConvertToGrayscale(torch.nn.Module): + def __init__(self) -> None: + super().__init__() + + def forward( + self, + images: Tuple[PIL.Image.Image, PIL.Image.Image], + disparities: Tuple[T_FLOW, T_FLOW], + masks: Tuple[T_MASK, T_MASK], + ) -> Tuple[T_STEREO_TENSOR, Tuple[T_FLOW, T_FLOW], Tuple[T_MASK, T_MASK]]: + img_left = F.rgb_to_grayscale(images[0], num_output_channels=3) + img_right = F.rgb_to_grayscale(images[1], num_output_channels=3) + + return (img_left, img_right), disparities, masks + + +class MakeValidDisparityMask(torch.nn.Module): + def __init__(self, max_disparity: Optional[int] = 256) -> None: + super().__init__() + self.max_disparity = max_disparity + + def forward( + self, + images: T_STEREO_TENSOR, + disparities: Tuple[T_FLOW, T_FLOW], + masks: Tuple[T_MASK, T_MASK], + ) -> Tuple[T_STEREO_TENSOR, Tuple[T_FLOW, T_FLOW], Tuple[T_MASK, T_MASK]]: + valid_masks = tuple( + torch.ones(images[idx].shape[-2:], dtype=torch.bool, device=images[idx].device) if mask is None else mask + for idx, mask in enumerate(masks) + ) + + valid_masks = tuple( + torch.logical_and(mask, disparity > 0).squeeze(0) if disparity is not None else mask + for mask, disparity in zip(valid_masks, disparities) + ) + + if self.max_disparity is not None: + valid_masks = tuple( + torch.logical_and(mask, disparity < self.max_disparity).squeeze(0) if disparity is not None else mask + for mask, disparity in zip(valid_masks, disparities) + ) + + return images, disparities, valid_masks + + +class ToGPU(torch.nn.Module): + def __init__(self) -> None: + super().__init__() + + def forward( + self, + images: T_STEREO_TENSOR, + disparities: Tuple[T_FLOW, T_FLOW], + masks: Tuple[T_MASK, T_MASK], + ) -> Tuple[T_STEREO_TENSOR, Tuple[T_FLOW, T_FLOW], Tuple[T_MASK, T_MASK]]: + dev_images = tuple(image.cuda() for image in images) + dev_disparities = tuple(map(lambda x: x.cuda() if x is not None else None, disparities)) + dev_masks = tuple(map(lambda x: x.cuda() if x is not None else None, masks)) + return dev_images, dev_disparities, dev_masks + + +class ConvertImageDtype(torch.nn.Module): + def __init__(self, dtype: torch.dtype): + super().__init__() + self.dtype = dtype + + def forward( + self, + images: T_STEREO_TENSOR, + disparities: Tuple[T_FLOW, T_FLOW], + masks: Tuple[T_MASK, T_MASK], + ) -> Tuple[T_STEREO_TENSOR, Tuple[T_FLOW, T_FLOW], Tuple[T_MASK, T_MASK]]: + img_left = F.convert_image_dtype(images[0], dtype=self.dtype) + img_right = F.convert_image_dtype(images[1], dtype=self.dtype) + + img_left = img_left.contiguous() + img_right = img_right.contiguous() + + return (img_left, img_right), disparities, masks + + +class Normalize(torch.nn.Module): + def __init__(self, mean: List[float], std: List[float]) -> None: + super().__init__() + self.mean = mean + self.std = std + + def forward( + self, + images: T_STEREO_TENSOR, + disparities: Tuple[T_FLOW, T_FLOW], + masks: Tuple[T_MASK, T_MASK], + ) -> Tuple[T_STEREO_TENSOR, Tuple[T_FLOW, T_FLOW], Tuple[T_MASK, T_MASK]]: + + img_left = F.normalize(images[0], mean=self.mean, std=self.std) + img_right = F.normalize(images[1], mean=self.mean, std=self.std) + + img_left = img_left.contiguous() + img_right = img_right.contiguous() + + return (img_left, img_right), disparities, masks + + +class ToTensor(torch.nn.Module): + def forward( + self, + images: Tuple[PIL.Image.Image, PIL.Image.Image], + disparities: Tuple[T_FLOW, T_FLOW], + masks: Tuple[T_MASK, T_MASK], + ) -> Tuple[T_STEREO_TENSOR, Tuple[T_FLOW, T_FLOW], Tuple[T_MASK, T_MASK]]: + if images[0] is None: + raise ValueError("img_left is None") + if images[1] is None: + raise ValueError("img_right is None") + + img_left = F.pil_to_tensor(images[0]) + img_right = F.pil_to_tensor(images[1]) + disparity_tensors = () + mask_tensors = () + + for idx in range(2): + disparity_tensors += (torch.from_numpy(disparities[idx]),) if disparities[idx] is not None else (None,) + mask_tensors += (torch.from_numpy(masks[idx]),) if masks[idx] is not None else (None,) + + return (img_left, img_right), disparity_tensors, mask_tensors + + +class AsymmetricColorJitter(T.ColorJitter): + # p determines the probability of doing asymmetric vs symmetric color jittering + def __init__( + self, + brightness: T_COLOR_AUG_PARAM = 0, + contrast: T_COLOR_AUG_PARAM = 0, + saturation: T_COLOR_AUG_PARAM = 0, + hue: T_COLOR_AUG_PARAM = 0, + p: float = 0.2, + ): + super().__init__(brightness=brightness, contrast=contrast, saturation=saturation, hue=hue) + self.p = p + + def forward( + self, + images: T_STEREO_TENSOR, + disparities: Tuple[T_FLOW, T_FLOW], + masks: Tuple[T_MASK, T_MASK], + ) -> Tuple[T_STEREO_TENSOR, Tuple[T_FLOW, T_FLOW], Tuple[T_MASK, T_MASK]]: + + if torch.rand(1) < self.p: + # asymmetric: different transform for img1 and img2 + img_left = super().forward(images[0]) + img_right = super().forward(images[1]) + else: + # symmetric: same transform for img1 and img2 + batch = torch.stack(images) + batch = super().forward(batch) + img_left, img_right = batch[0], batch[1] + + return (img_left, img_right), disparities, masks + + +class AsymetricGammaAdjust(torch.nn.Module): + def __init__(self, p: float, gamma_range: Tuple[float, float], gain: float = 1) -> None: + super().__init__() + self.gamma_range = gamma_range + self.gain = gain + self.p = p + + def forward( + self, + images: T_STEREO_TENSOR, + disparities: Tuple[T_FLOW, T_FLOW], + masks: Tuple[T_MASK, T_MASK], + ) -> Tuple[T_STEREO_TENSOR, Tuple[T_FLOW, T_FLOW], Tuple[T_MASK, T_MASK]]: + + gamma = rand_float_range((1,), low=self.gamma_range[0], high=self.gamma_range[1]).item() + + if torch.rand(1) < self.p: + # asymmetric: different transform for img1 and img2 + img_left = F.adjust_gamma(images[0], gamma, gain=self.gain) + img_right = F.adjust_gamma(images[1], gamma, gain=self.gain) + else: + # symmetric: same transform for img1 and img2 + batch = torch.stack(images) + batch = F.adjust_gamma(batch, gamma, gain=self.gain) + img_left, img_right = batch[0], batch[1] + + return (img_left, img_right), disparities, masks + + +class RandomErase(torch.nn.Module): + # Produces multiple symmetric random erasures + # these can be viewed as occlusions present in both camera views. + # Similarly to Optical Flow occlusion prediction tasks, we mask these pixels in the disparity map + def __init__( + self, + p: float = 0.5, + erase_px_range: Tuple[int, int] = (50, 100), + value: Union[Tensor, float] = 0, + inplace: bool = False, + max_erase: int = 2, + ): + super().__init__() + self.min_px_erase = erase_px_range[0] + self.max_px_erase = erase_px_range[1] + if self.max_px_erase < 0: + raise ValueError("erase_px_range[1] should be equal or greater than 0") + if self.min_px_erase < 0: + raise ValueError("erase_px_range[0] should be equal or greater than 0") + if self.min_px_erase > self.max_px_erase: + raise ValueError("erase_prx_range[0] should be equal or lower than erase_px_range[1]") + + self.p = p + self.value = value + self.inplace = inplace + self.max_erase = max_erase + + def forward( + self, + images: T_STEREO_TENSOR, + disparities: T_STEREO_TENSOR, + masks: T_STEREO_TENSOR, + ) -> Tuple[T_STEREO_TENSOR, Tuple[T_FLOW, T_FLOW], Tuple[T_MASK, T_MASK]]: + + if torch.rand(1) < self.p: + return images, disparities, masks + + image_left, image_right = images + mask_left, mask_right = masks + for _ in range(torch.randint(self.max_erase, size=(1,)).item()): + y, x, h, w, v = self._get_params(image_left) + image_right = F.erase(image_right, y, x, h, w, v, self.inplace) + image_left = F.erase(image_left, y, x, h, w, v, self.inplace) + # similarly to optical flow occlusion prediction, we consider + # any erasure pixels that are in both images to be occluded therefore + # we mark them as invalid + if mask_left is not None: + mask_left = F.erase(mask_left, y, x, h, w, False, self.inplace) + if mask_right is not None: + mask_right = F.erase(mask_right, y, x, h, w, False, self.inplace) + + return (image_left, image_right), disparities, (mask_left, mask_right) + + def _get_params(self, img: torch.Tensor) -> Tuple[int, int, int, int, float]: + img_h, img_w = img.shape[-2:] + crop_h, crop_w = ( + random.randint(self.min_px_erase, self.max_px_erase), + random.randint(self.min_px_erase, self.max_px_erase), + ) + crop_x, crop_y = (random.randint(0, img_w - crop_w), random.randint(0, img_h - crop_h)) + + return crop_y, crop_x, crop_h, crop_w, self.value + + +class RandomOcclusion(torch.nn.Module): + # This adds an occlusion in the right image + # the occluded patch works as a patch erase where the erase value is the mean + # of the pixels from the selected zone + def __init__(self, p: float = 0.5, occlusion_px_range: Tuple[int, int] = (50, 100), inplace: bool = False): + super().__init__() + + self.min_px_occlusion = occlusion_px_range[0] + self.max_px_occlusion = occlusion_px_range[1] + + if self.max_px_occlusion < 0: + raise ValueError("occlusion_px_range[1] should be greater or equal than 0") + if self.min_px_occlusion < 0: + raise ValueError("occlusion_px_range[0] should be greater or equal than 0") + if self.min_px_occlusion > self.max_px_occlusion: + raise ValueError("occlusion_px_range[0] should be lower than occlusion_px_range[1]") + + self.p = p + self.inplace = inplace + + def forward( + self, + images: T_STEREO_TENSOR, + disparities: T_STEREO_TENSOR, + masks: T_STEREO_TENSOR, + ) -> Tuple[T_STEREO_TENSOR, Tuple[T_FLOW, T_FLOW], Tuple[T_MASK, T_MASK]]: + + left_image, right_image = images + + if torch.rand(1) < self.p: + return images, disparities, masks + + y, x, h, w, v = self._get_params(right_image) + right_image = F.erase(right_image, y, x, h, w, v, self.inplace) + + return ((left_image, right_image), disparities, masks) + + def _get_params(self, img: torch.Tensor) -> Tuple[int, int, int, int, float]: + img_h, img_w = img.shape[-2:] + crop_h, crop_w = ( + random.randint(self.min_px_occlusion, self.max_px_occlusion), + random.randint(self.min_px_occlusion, self.max_px_occlusion), + ) + + crop_x, crop_y = (random.randint(0, img_w - crop_w), random.randint(0, img_h - crop_h)) + occlusion_value = img[..., crop_y : crop_y + crop_h, crop_x : crop_x + crop_w].mean(dim=(-2, -1), keepdim=True) + + return (crop_y, crop_x, crop_h, crop_w, occlusion_value) + + +class RandomSpatialShift(torch.nn.Module): + # This transform applies a vertical shift and a slight angle rotation and the same time + def __init__( + self, p: float = 0.5, max_angle: float = 0.1, max_px_shift: int = 2, interpolation_type: str = "bilinear" + ) -> None: + super().__init__() + self.p = p + self.max_angle = max_angle + self.max_px_shift = max_px_shift + self._interpolation_mode_strategy = InterpolationStrategy(interpolation_type) + + def forward( + self, + images: T_STEREO_TENSOR, + disparities: T_STEREO_TENSOR, + masks: T_STEREO_TENSOR, + ) -> Tuple[T_STEREO_TENSOR, Tuple[T_FLOW, T_FLOW], Tuple[T_MASK, T_MASK]]: + # the transform is applied only on the right image + # in order to mimic slight calibration issues + img_left, img_right = images + + INTERP_MODE = self._interpolation_mode_strategy() + + if torch.rand(1) < self.p: + # [0, 1] -> [-a, a] + shift = rand_float_range((1,), low=-self.max_px_shift, high=self.max_px_shift).item() + angle = rand_float_range((1,), low=-self.max_angle, high=self.max_angle).item() + # sample center point for the rotation matrix + y = torch.randint(size=(1,), low=0, high=img_right.shape[-2]).item() + x = torch.randint(size=(1,), low=0, high=img_right.shape[-1]).item() + # apply affine transformations + img_right = F.affine( + img_right, + angle=angle, + translate=[0, shift], # translation only on the y-axis + center=[x, y], + scale=1.0, + shear=0.0, + interpolation=INTERP_MODE, + ) + + return ((img_left, img_right), disparities, masks) + + +class RandomHorizontalFlip(torch.nn.Module): + def __init__(self, p: float = 0.5) -> None: + super().__init__() + self.p = p + + def forward( + self, + images: T_STEREO_TENSOR, + disparities: Tuple[T_FLOW, T_FLOW], + masks: Tuple[T_MASK, T_MASK], + ) -> Tuple[T_STEREO_TENSOR, Tuple[T_FLOW, T_FLOW], Tuple[T_MASK, T_MASK]]: + + img_left, img_right = images + dsp_left, dsp_right = disparities + mask_left, mask_right = masks + + if dsp_right is not None and torch.rand(1) < self.p: + img_left, img_right = F.hflip(img_left), F.hflip(img_right) + dsp_left, dsp_right = F.hflip(dsp_left), F.hflip(dsp_right) + if mask_left is not None and mask_right is not None: + mask_left, mask_right = F.hflip(mask_left), F.hflip(mask_right) + return ((img_right, img_left), (dsp_right, dsp_left), (mask_right, mask_left)) + + return images, disparities, masks + + +class Resize(torch.nn.Module): + def __init__(self, resize_size: Tuple[int, ...], interpolation_type: str = "bilinear") -> None: + super().__init__() + self.resize_size = list(resize_size) # doing this to keep mypy happy + self._interpolation_mode_strategy = InterpolationStrategy(interpolation_type) + + def forward( + self, + images: T_STEREO_TENSOR, + disparities: Tuple[T_FLOW, T_FLOW], + masks: Tuple[T_MASK, T_MASK], + ) -> Tuple[T_STEREO_TENSOR, Tuple[T_FLOW, T_FLOW], Tuple[T_MASK, T_MASK]]: + resized_images = () + resized_disparities = () + resized_masks = () + + INTERP_MODE = self._interpolation_mode_strategy() + + for img in images: + # We hard-code antialias=False to preserve results after we changed + # its default from None to True (see + # https://github.com/pytorch/vision/pull/7160) + # TODO: we could re-train the stereo models with antialias=True? + resized_images += (F.resize(img, self.resize_size, interpolation=INTERP_MODE, antialias=False),) + + for dsp in disparities: + if dsp is not None: + # rescale disparity to match the new image size + scale_x = self.resize_size[1] / dsp.shape[-1] + resized_disparities += (F.resize(dsp, self.resize_size, interpolation=INTERP_MODE) * scale_x,) + else: + resized_disparities += (None,) + + for mask in masks: + if mask is not None: + resized_masks += ( + # we squeeze and unsqueeze because the API requires > 3D tensors + F.resize( + mask.unsqueeze(0), + self.resize_size, + interpolation=F.InterpolationMode.NEAREST, + ).squeeze(0), + ) + else: + resized_masks += (None,) + + return resized_images, resized_disparities, resized_masks + + +class RandomRescaleAndCrop(torch.nn.Module): + # This transform will resize the input with a given proba, and then crop it. + # These are the reversed operations of the built-in RandomResizedCrop, + # although the order of the operations doesn't matter too much: resizing a + # crop would give the same result as cropping a resized image, up to + # interpolation artifact at the borders of the output. + # + # The reason we don't rely on RandomResizedCrop is because of a significant + # difference in the parametrization of both transforms, in particular, + # because of the way the random parameters are sampled in both transforms, + # which leads to fairly different results (and different epe). For more details see + # https://github.com/pytorch/vision/pull/5026/files#r762932579 + def __init__( + self, + crop_size: Tuple[int, int], + scale_range: Tuple[float, float] = (-0.2, 0.5), + rescale_prob: float = 0.8, + scaling_type: str = "exponential", + interpolation_type: str = "bilinear", + ) -> None: + super().__init__() + self.crop_size = crop_size + self.min_scale = scale_range[0] + self.max_scale = scale_range[1] + self.rescale_prob = rescale_prob + self.scaling_type = scaling_type + self._interpolation_mode_strategy = InterpolationStrategy(interpolation_type) + + if self.scaling_type == "linear" and self.min_scale < 0: + raise ValueError("min_scale must be >= 0 for linear scaling") + + def forward( + self, + images: T_STEREO_TENSOR, + disparities: Tuple[T_FLOW, T_FLOW], + masks: Tuple[T_MASK, T_MASK], + ) -> Tuple[T_STEREO_TENSOR, Tuple[T_FLOW, T_FLOW], Tuple[T_MASK, T_MASK]]: + + img_left, img_right = images + dsp_left, dsp_right = disparities + mask_left, mask_right = masks + INTERP_MODE = self._interpolation_mode_strategy() + + # randomly sample scale + h, w = img_left.shape[-2:] + # Note: in original code, they use + 1 instead of + 8 for sparse datasets (e.g. Kitti) + # It shouldn't matter much + min_scale = max((self.crop_size[0] + 8) / h, (self.crop_size[1] + 8) / w) + + # exponential scaling will draw a random scale in (min_scale, max_scale) and then raise + # 2 to the power of that random value. This final scale distribution will have a different + # mean and variance than a uniform distribution. Note that a scale of 1 will result in + # a rescaling of 2X the original size, whereas a scale of -1 will result in a rescaling + # of 0.5X the original size. + if self.scaling_type == "exponential": + scale = 2 ** torch.empty(1, dtype=torch.float32).uniform_(self.min_scale, self.max_scale).item() + # linear scaling will draw a random scale in (min_scale, max_scale) + elif self.scaling_type == "linear": + scale = torch.empty(1, dtype=torch.float32).uniform_(self.min_scale, self.max_scale).item() + + scale = max(scale, min_scale) + + new_h, new_w = round(h * scale), round(w * scale) + + if torch.rand(1).item() < self.rescale_prob: + # rescale the images + img_left = F.resize(img_left, size=(new_h, new_w), interpolation=INTERP_MODE) + img_right = F.resize(img_right, size=(new_h, new_w), interpolation=INTERP_MODE) + + resized_masks, resized_disparities = (), () + + for disparity, mask in zip(disparities, masks): + if disparity is not None: + if mask is None: + resized_disparity = F.resize(disparity, size=(new_h, new_w), interpolation=INTERP_MODE) + # rescale the disparity + resized_disparity = ( + resized_disparity * torch.tensor([scale], device=resized_disparity.device)[:, None, None] + ) + resized_mask = None + else: + resized_disparity, resized_mask = _resize_sparse_flow( + disparity, mask, scale_x=scale, scale_y=scale + ) + resized_masks += (resized_mask,) + resized_disparities += (resized_disparity,) + + else: + resized_disparities = disparities + resized_masks = masks + + disparities = resized_disparities + masks = resized_masks + + # Note: For sparse datasets (Kitti), the original code uses a "margin" + # See e.g. https://github.com/princeton-vl/RAFT/blob/master/core/utils/augmentor.py#L220:L220 + # We don't, not sure if it matters much + y0 = torch.randint(0, img_left.shape[1] - self.crop_size[0], size=(1,)).item() + x0 = torch.randint(0, img_right.shape[2] - self.crop_size[1], size=(1,)).item() + + img_left = F.crop(img_left, y0, x0, self.crop_size[0], self.crop_size[1]) + img_right = F.crop(img_right, y0, x0, self.crop_size[0], self.crop_size[1]) + if dsp_left is not None: + dsp_left = F.crop(disparities[0], y0, x0, self.crop_size[0], self.crop_size[1]) + if dsp_right is not None: + dsp_right = F.crop(disparities[1], y0, x0, self.crop_size[0], self.crop_size[1]) + + cropped_masks = () + for mask in masks: + if mask is not None: + mask = F.crop(mask, y0, x0, self.crop_size[0], self.crop_size[1]) + cropped_masks += (mask,) + + return ((img_left, img_right), (dsp_left, dsp_right), cropped_masks) + + +def _resize_sparse_flow( + flow: Tensor, valid_flow_mask: Tensor, scale_x: float = 1.0, scale_y: float = 0.0 +) -> Tuple[Tensor, Tensor]: + # This resizes both the flow and the valid_flow_mask mask (which is assumed to be reasonably sparse) + # There are as-many non-zero values in the original flow as in the resized flow (up to OOB) + # So for example if scale_x = scale_y = 2, the sparsity of the output flow is multiplied by 4 + + h, w = flow.shape[-2:] + + h_new = int(round(h * scale_y)) + w_new = int(round(w * scale_x)) + flow_new = torch.zeros(size=[1, h_new, w_new], dtype=flow.dtype) + valid_new = torch.zeros(size=[h_new, w_new], dtype=valid_flow_mask.dtype) + + jj, ii = torch.meshgrid(torch.arange(w), torch.arange(h), indexing="xy") + + ii_valid, jj_valid = ii[valid_flow_mask], jj[valid_flow_mask] + + ii_valid_new = torch.round(ii_valid.to(float) * scale_y).to(torch.long) + jj_valid_new = torch.round(jj_valid.to(float) * scale_x).to(torch.long) + + within_bounds_mask = (0 <= ii_valid_new) & (ii_valid_new < h_new) & (0 <= jj_valid_new) & (jj_valid_new < w_new) + + ii_valid = ii_valid[within_bounds_mask] + jj_valid = jj_valid[within_bounds_mask] + ii_valid_new = ii_valid_new[within_bounds_mask] + jj_valid_new = jj_valid_new[within_bounds_mask] + + valid_flow_new = flow[:, ii_valid, jj_valid] + valid_flow_new *= scale_x + + flow_new[:, ii_valid_new, jj_valid_new] = valid_flow_new + valid_new[ii_valid_new, jj_valid_new] = valid_flow_mask[ii_valid, jj_valid] + + return flow_new, valid_new.bool() + + +class Compose(torch.nn.Module): + def __init__(self, transforms: List[Callable]): + super().__init__() + self.transforms = transforms + + @torch.inference_mode() + def forward(self, images, disparities, masks): + for t in self.transforms: + images, disparities, masks = t(images, disparities, masks) + return images, disparities, masks diff --git a/references/depth/stereo/utils/__init__.py b/references/depth/stereo/utils/__init__.py new file mode 100644 index 00000000000..4dacbe61ba0 --- /dev/null +++ b/references/depth/stereo/utils/__init__.py @@ -0,0 +1,6 @@ +from .losses import * +from .metrics import * +from .distributed import * +from .logger import * +from .padder import * +from .norm import * diff --git a/references/depth/stereo/utils/distributed.py b/references/depth/stereo/utils/distributed.py new file mode 100644 index 00000000000..228aa2a0f9a --- /dev/null +++ b/references/depth/stereo/utils/distributed.py @@ -0,0 +1,60 @@ +import os + +import torch +import torch.distributed as dist + + +def _redefine_print(is_main): + """disables printing when not in main process""" + import builtins as __builtin__ + + builtin_print = __builtin__.print + + def print(*args, **kwargs): + force = kwargs.pop("force", False) + if is_main or force: + builtin_print(*args, **kwargs) + + __builtin__.print = print + + +def setup_ddp(args): + # Set the local_rank, rank, and world_size values as args fields + # This is done differently depending on how we're running the script. We + # currently support either torchrun or the custom run_with_submitit.py + # If you're confused (like I was), this might help a bit + # https://discuss.pytorch.org/t/what-is-the-difference-between-rank-and-local-rank/61940/2 + + if "RANK" in os.environ and "WORLD_SIZE" in os.environ: + args.rank = int(os.environ["RANK"]) + args.world_size = int(os.environ["WORLD_SIZE"]) + args.gpu = int(os.environ["LOCAL_RANK"]) + elif "SLURM_PROCID" in os.environ: + args.rank = int(os.environ["SLURM_PROCID"]) + args.gpu = args.rank % torch.cuda.device_count() + elif hasattr(args, "rank"): + pass + else: + print("Not using distributed mode") + args.distributed = False + args.world_size = 1 + return + + args.distributed = True + + torch.cuda.set_device(args.gpu) + dist.init_process_group( + backend="nccl", + rank=args.rank, + world_size=args.world_size, + init_method=args.dist_url, + ) + torch.distributed.barrier() + _redefine_print(is_main=(args.rank == 0)) + + +def reduce_across_processes(val): + t = torch.tensor(val, device="cuda") + dist.barrier() + dist.all_reduce(t) + return t diff --git a/references/depth/stereo/utils/logger.py b/references/depth/stereo/utils/logger.py new file mode 100644 index 00000000000..803e9aebd7b --- /dev/null +++ b/references/depth/stereo/utils/logger.py @@ -0,0 +1,153 @@ +import datetime +import time +from collections import defaultdict, deque + +import torch + +from .distributed import reduce_across_processes + + +class SmoothedValue: + """Track a series of values and provide access to smoothed values over a + window or the global series average. + """ + + def __init__(self, window_size=20, fmt="{median:.4f} ({global_avg:.4f})"): + self.deque = deque(maxlen=window_size) + self.total = 0.0 + self.count = 0 + self.fmt = fmt + + def update(self, value, n=1): + self.deque.append(value) + self.count += n + self.total += value * n + + def synchronize_between_processes(self): + """ + Warning: does not synchronize the deque! + """ + t = reduce_across_processes([self.count, self.total]) + t = t.tolist() + self.count = int(t[0]) + self.total = t[1] + + @property + def median(self): + d = torch.tensor(list(self.deque)) + return d.median().item() + + @property + def avg(self): + d = torch.tensor(list(self.deque), dtype=torch.float32) + return d.mean().item() + + @property + def global_avg(self): + return self.total / self.count + + @property + def max(self): + return max(self.deque) + + @property + def value(self): + return self.deque[-1] + + def __str__(self): + return self.fmt.format( + median=self.median, avg=self.avg, global_avg=self.global_avg, max=self.max, value=self.value + ) + + +class MetricLogger: + def __init__(self, delimiter="\t"): + self.meters = defaultdict(SmoothedValue) + self.delimiter = delimiter + + def update(self, **kwargs): + for k, v in kwargs.items(): + if isinstance(v, torch.Tensor): + v = v.item() + if not isinstance(v, (float, int)): + raise TypeError( + f"This method expects the value of the input arguments to be of type float or int, instead got {type(v)}" + ) + self.meters[k].update(v) + + def __getattr__(self, attr): + if attr in self.meters: + return self.meters[attr] + if attr in self.__dict__: + return self.__dict__[attr] + raise AttributeError(f"'{type(self).__name__}' object has no attribute '{attr}'") + + def __str__(self): + loss_str = [] + for name, meter in self.meters.items(): + loss_str.append(f"{name}: {str(meter)}") + return self.delimiter.join(loss_str) + + def synchronize_between_processes(self): + for meter in self.meters.values(): + meter.synchronize_between_processes() + + def add_meter(self, name, **kwargs): + self.meters[name] = SmoothedValue(**kwargs) + + def log_every(self, iterable, print_freq=5, header=None): + i = 0 + if not header: + header = "" + start_time = time.time() + end = time.time() + iter_time = SmoothedValue(fmt="{avg:.4f}") + data_time = SmoothedValue(fmt="{avg:.4f}") + space_fmt = ":" + str(len(str(len(iterable)))) + "d" + if torch.cuda.is_available(): + log_msg = self.delimiter.join( + [ + header, + "[{0" + space_fmt + "}/{1}]", + "eta: {eta}", + "{meters}", + "time: {time}", + "data: {data}", + "max mem: {memory:.0f}", + ] + ) + else: + log_msg = self.delimiter.join( + [header, "[{0" + space_fmt + "}/{1}]", "eta: {eta}", "{meters}", "time: {time}", "data: {data}"] + ) + MB = 1024.0 * 1024.0 + for obj in iterable: + data_time.update(time.time() - end) + yield obj + iter_time.update(time.time() - end) + if print_freq is not None and i % print_freq == 0: + eta_seconds = iter_time.global_avg * (len(iterable) - i) + eta_string = str(datetime.timedelta(seconds=int(eta_seconds))) + if torch.cuda.is_available(): + print( + log_msg.format( + i, + len(iterable), + eta=eta_string, + meters=str(self), + time=str(iter_time), + data=str(data_time), + memory=torch.cuda.max_memory_allocated() / MB, + ) + ) + else: + print( + log_msg.format( + i, len(iterable), eta=eta_string, meters=str(self), time=str(iter_time), data=str(data_time) + ) + ) + i += 1 + end = time.time() + total_time = time.time() - start_time + total_time_str = str(datetime.timedelta(seconds=int(total_time))) + print(f"{header} Total time: {total_time_str}") diff --git a/references/depth/stereo/utils/losses.py b/references/depth/stereo/utils/losses.py new file mode 100644 index 00000000000..1c21353a056 --- /dev/null +++ b/references/depth/stereo/utils/losses.py @@ -0,0 +1,503 @@ +from typing import List, Optional + +import torch +from torch import nn, Tensor +from torch.nn import functional as F +from torchvision.prototype.models.depth.stereo.raft_stereo import grid_sample, make_coords_grid + + +def make_gaussian_kernel(kernel_size: int, sigma: float) -> torch.Tensor: + """Function to create a 2D Gaussian kernel.""" + + x = torch.arange(kernel_size, dtype=torch.float32) + y = torch.arange(kernel_size, dtype=torch.float32) + x = x - (kernel_size - 1) / 2 + y = y - (kernel_size - 1) / 2 + x, y = torch.meshgrid(x, y, indexing="ij") + grid = (x**2 + y**2) / (2 * sigma**2) + kernel = torch.exp(-grid) + kernel = kernel / kernel.sum() + return kernel + + +def _sequence_loss_fn( + flow_preds: List[Tensor], + flow_gt: Tensor, + valid_flow_mask: Optional[Tensor], + gamma: Tensor, + max_flow: int = 256, + exclude_large: bool = False, + weights: Optional[Tensor] = None, +): + """Loss function defined over sequence of flow predictions""" + torch._assert( + gamma < 1, + "sequence_loss: `gamma` must be lower than 1, but got {}".format(gamma), + ) + + if exclude_large: + # exclude invalid pixels and extremely large diplacements + flow_norm = torch.sum(flow_gt**2, dim=1).sqrt() + if valid_flow_mask is not None: + valid_flow_mask = valid_flow_mask & (flow_norm < max_flow) + else: + valid_flow_mask = flow_norm < max_flow + + if valid_flow_mask is not None: + valid_flow_mask = valid_flow_mask.unsqueeze(1) + flow_preds = torch.stack(flow_preds) # shape = (num_flow_updates, batch_size, 2, H, W) + + abs_diff = (flow_preds - flow_gt).abs() + if valid_flow_mask is not None: + abs_diff = abs_diff * valid_flow_mask.unsqueeze(0) + + abs_diff = abs_diff.mean(axis=(1, 2, 3, 4)) + num_predictions = flow_preds.shape[0] + + # allocating on CPU and moving to device during run-time can force + # an unwanted GPU synchronization that produces a large overhead + if weights is None or len(weights) != num_predictions: + weights = gamma ** torch.arange(num_predictions - 1, -1, -1, device=flow_preds.device, dtype=flow_preds.dtype) + + flow_loss = (abs_diff * weights).sum() + return flow_loss, weights + + +class SequenceLoss(nn.Module): + def __init__(self, gamma: float = 0.8, max_flow: int = 256, exclude_large_flows: bool = False) -> None: + """ + Args: + gamma: value for the exponential weighting of the loss across frames + max_flow: maximum flow value to exclude + exclude_large_flows: whether to exclude large flows + """ + + super().__init__() + self.max_flow = max_flow + self.excluding_large = exclude_large_flows + self.register_buffer("gamma", torch.tensor([gamma])) + # cache the scale factor for the loss + self._weights = None + + def forward(self, flow_preds: List[Tensor], flow_gt: Tensor, valid_flow_mask: Optional[Tensor]) -> Tensor: + """ + Args: + flow_preds: list of flow predictions of shape (batch_size, C, H, W) + flow_gt: ground truth flow of shape (batch_size, C, H, W) + valid_flow_mask: mask of valid flow pixels of shape (batch_size, H, W) + """ + loss, weights = _sequence_loss_fn( + flow_preds, flow_gt, valid_flow_mask, self.gamma, self.max_flow, self.excluding_large, self._weights + ) + self._weights = weights + return loss + + def set_gamma(self, gamma: float) -> None: + self.gamma.fill_(gamma) + # reset the cached scale factor + self._weights = None + + +def _ssim_loss_fn( + source: Tensor, + reference: Tensor, + kernel: Tensor, + eps: float = 1e-8, + c1: float = 0.01**2, + c2: float = 0.03**2, + use_padding: bool = False, +) -> Tensor: + # ref: Algorithm section: https://en.wikipedia.org/wiki/Structural_similarity + # ref: Alternative implementation: https://kornia.readthedocs.io/en/latest/_modules/kornia/metrics/ssim.html#ssim + + torch._assert( + source.ndim == reference.ndim == 4, + "SSIM: `source` and `reference` must be 4-dimensional tensors", + ) + + torch._assert( + source.shape == reference.shape, + "SSIM: `source` and `reference` must have the same shape, but got {} and {}".format( + source.shape, reference.shape + ), + ) + + B, C, H, W = source.shape + kernel = kernel.unsqueeze(0).unsqueeze(0).repeat(C, 1, 1, 1) + if use_padding: + pad_size = kernel.shape[2] // 2 + source = F.pad(source, (pad_size, pad_size, pad_size, pad_size), "reflect") + reference = F.pad(reference, (pad_size, pad_size, pad_size, pad_size), "reflect") + + mu1 = F.conv2d(source, kernel, groups=C) + mu2 = F.conv2d(reference, kernel, groups=C) + + mu1_sq = mu1.pow(2) + mu2_sq = mu2.pow(2) + + mu1_mu2 = mu1 * mu2 + mu_img1_sq = F.conv2d(source.pow(2), kernel, groups=C) + mu_img2_sq = F.conv2d(reference.pow(2), kernel, groups=C) + mu_img1_mu2 = F.conv2d(source * reference, kernel, groups=C) + + sigma1_sq = mu_img1_sq - mu1_sq + sigma2_sq = mu_img2_sq - mu2_sq + sigma12 = mu_img1_mu2 - mu1_mu2 + + numerator = (2 * mu1_mu2 + c1) * (2 * sigma12 + c2) + denominator = (mu1_sq + mu2_sq + c1) * (sigma1_sq + sigma2_sq + c2) + ssim = numerator / (denominator + eps) + + # doing 1 - ssim because we want to maximize the ssim + return 1 - ssim.mean(dim=(1, 2, 3)) + + +class SSIM(nn.Module): + def __init__( + self, + kernel_size: int = 11, + max_val: float = 1.0, + sigma: float = 1.5, + eps: float = 1e-12, + use_padding: bool = True, + ) -> None: + """SSIM loss function. + + Args: + kernel_size: size of the Gaussian kernel + max_val: constant scaling factor + sigma: sigma of the Gaussian kernel + eps: constant for division by zero + use_padding: whether to pad the input tensor such that we have a score for each pixel + """ + super().__init__() + + self.kernel_size = kernel_size + self.max_val = max_val + self.sigma = sigma + + gaussian_kernel = make_gaussian_kernel(kernel_size, sigma) + self.register_buffer("gaussian_kernel", gaussian_kernel) + + self.c1 = (0.01 * self.max_val) ** 2 + self.c2 = (0.03 * self.max_val) ** 2 + + self.use_padding = use_padding + self.eps = eps + + def forward(self, source: torch.Tensor, reference: torch.Tensor) -> torch.Tensor: + """ + Args: + source: source image of shape (batch_size, C, H, W) + reference: reference image of shape (batch_size, C, H, W) + + Returns: + SSIM loss of shape (batch_size,) + """ + return _ssim_loss_fn( + source, + reference, + kernel=self.gaussian_kernel, + c1=self.c1, + c2=self.c2, + use_padding=self.use_padding, + eps=self.eps, + ) + + +def _smoothness_loss_fn(img_gx: Tensor, img_gy: Tensor, val_gx: Tensor, val_gy: Tensor): + # ref: https://github.com/nianticlabs/monodepth2/blob/b676244e5a1ca55564eb5d16ab521a48f823af31/layers.py#L202 + + torch._assert( + img_gx.ndim >= 3, + "smoothness_loss: `img_gx` must be at least 3-dimensional tensor of shape (..., C, H, W)", + ) + + torch._assert( + img_gx.ndim == val_gx.ndim, + "smoothness_loss: `img_gx` and `depth_gx` must have the same dimensionality, but got {} and {}".format( + img_gx.ndim, val_gx.ndim + ), + ) + + for idx in range(img_gx.ndim): + torch._assert( + (img_gx.shape[idx] == val_gx.shape[idx] or (img_gx.shape[idx] == 1 or val_gx.shape[idx] == 1)), + "smoothness_loss: `img_gx` and `depth_gx` must have either the same shape or broadcastable shape, but got {} and {}".format( + img_gx.shape, val_gx.shape + ), + ) + + # -3 is channel dimension + weights_x = torch.exp(-torch.mean(torch.abs(val_gx), axis=-3, keepdim=True)) + weights_y = torch.exp(-torch.mean(torch.abs(val_gy), axis=-3, keepdim=True)) + + smoothness_x = img_gx * weights_x + smoothness_y = img_gy * weights_y + + smoothness = (torch.abs(smoothness_x) + torch.abs(smoothness_y)).mean(axis=(-3, -2, -1)) + return smoothness + + +class SmoothnessLoss(nn.Module): + def __init__(self) -> None: + super().__init__() + + def _x_gradient(self, img: Tensor) -> Tensor: + if img.ndim > 4: + original_shape = img.shape + is_reshaped = True + img = img.reshape(-1, *original_shape[-3:]) + else: + is_reshaped = False + + padded = F.pad(img, (0, 1, 0, 0), mode="replicate") + grad = padded[..., :, :-1] - padded[..., :, 1:] + if is_reshaped: + grad = grad.reshape(original_shape) + return grad + + def _y_gradient(self, x: torch.Tensor) -> torch.Tensor: + if x.ndim > 4: + original_shape = x.shape + is_reshaped = True + x = x.reshape(-1, *original_shape[-3:]) + else: + is_reshaped = False + + padded = F.pad(x, (0, 0, 0, 1), mode="replicate") + grad = padded[..., :-1, :] - padded[..., 1:, :] + if is_reshaped: + grad = grad.reshape(original_shape) + return grad + + def forward(self, images: Tensor, vals: Tensor) -> Tensor: + """ + Args: + images: tensor of shape (D1, D2, ..., DN, C, H, W) + vals: tensor of shape (D1, D2, ..., DN, 1, H, W) + + Returns: + smoothness loss of shape (D1, D2, ..., DN) + """ + img_gx = self._x_gradient(images) + img_gy = self._y_gradient(images) + + val_gx = self._x_gradient(vals) + val_gy = self._y_gradient(vals) + + return _smoothness_loss_fn(img_gx, img_gy, val_gx, val_gy) + + +def _flow_sequence_consistency_loss_fn( + flow_preds: List[Tensor], + gamma: float = 0.8, + resize_factor: float = 0.25, + rescale_factor: float = 0.25, + rescale_mode: str = "bilinear", + weights: Optional[Tensor] = None, +): + """Loss function defined over sequence of flow predictions""" + + # Simplified version of ref: https://arxiv.org/pdf/2006.11242.pdf + # In the original paper, an additional refinement network is used to refine a flow prediction. + # Each step performed by the recurrent module in Raft or CREStereo is a refinement step using a delta_flow update. + # which should be consistent with the previous step. In this implementation, we simplify the overall loss + # term and ignore left-right consistency loss or photometric loss which can be treated separately. + + torch._assert( + rescale_factor <= 1.0, + "sequence_consistency_loss: `rescale_factor` must be less than or equal to 1, but got {}".format( + rescale_factor + ), + ) + + flow_preds = torch.stack(flow_preds) # shape = (num_flow_updates, batch_size, 2, H, W) + N, B, C, H, W = flow_preds.shape + + # rescale flow predictions to account for bilinear upsampling artifacts + if rescale_factor: + flow_preds = ( + F.interpolate( + flow_preds.view(N * B, C, H, W), scale_factor=resize_factor, mode=rescale_mode, align_corners=True + ) + ) * rescale_factor + flow_preds = torch.stack(torch.chunk(flow_preds, N, dim=0), dim=0) + + # force the next prediction to be similar to the previous prediction + abs_diff = (flow_preds[1:] - flow_preds[:-1]).square() + abs_diff = abs_diff.mean(axis=(1, 2, 3, 4)) + + num_predictions = flow_preds.shape[0] - 1 # because we are comparing differences + if weights is None or len(weights) != num_predictions: + weights = gamma ** torch.arange(num_predictions - 1, -1, -1, device=flow_preds.device, dtype=flow_preds.dtype) + + flow_loss = (abs_diff * weights).sum() + return flow_loss, weights + + +class FlowSequenceConsistencyLoss(nn.Module): + def __init__( + self, + gamma: float = 0.8, + resize_factor: float = 0.25, + rescale_factor: float = 0.25, + rescale_mode: str = "bilinear", + ) -> None: + super().__init__() + self.gamma = gamma + self.resize_factor = resize_factor + self.rescale_factor = rescale_factor + self.rescale_mode = rescale_mode + self._weights = None + + def forward(self, flow_preds: List[Tensor]) -> Tensor: + """ + Args: + flow_preds: list of tensors of shape (batch_size, C, H, W) + + Returns: + sequence consistency loss of shape (batch_size,) + """ + loss, weights = _flow_sequence_consistency_loss_fn( + flow_preds, + gamma=self.gamma, + resize_factor=self.resize_factor, + rescale_factor=self.rescale_factor, + rescale_mode=self.rescale_mode, + weights=self._weights, + ) + self._weights = weights + return loss + + def set_gamma(self, gamma: float) -> None: + self.gamma.fill_(gamma) + # reset the cached scale factor + self._weights = None + + +def _psnr_loss_fn(source: torch.Tensor, target: torch.Tensor, max_val: float) -> torch.Tensor: + torch._assert( + source.shape == target.shape, + "psnr_loss: source and target must have the same shape, but got {} and {}".format(source.shape, target.shape), + ) + + # ref https://en.wikipedia.org/wiki/Peak_signal-to-noise_ratio + return 10 * torch.log10(max_val**2 / ((source - target).pow(2).mean(axis=(-3, -2, -1)))) + + +class PSNRLoss(nn.Module): + def __init__(self, max_val: float = 256) -> None: + """ + Args: + max_val: maximum value of the input tensor. This refers to the maximum domain value of the input tensor. + + """ + super().__init__() + self.max_val = max_val + + def forward(self, source: Tensor, target: Tensor) -> Tensor: + """ + Args: + source: tensor of shape (D1, D2, ..., DN, C, H, W) + target: tensor of shape (D1, D2, ..., DN, C, H, W) + + Returns: + psnr loss of shape (D1, D2, ..., DN) + """ + + # multiply by -1 as we want to maximize the psnr + return -1 * _psnr_loss_fn(source, target, self.max_val) + + +class FlowPhotoMetricLoss(nn.Module): + def __init__( + self, + ssim_weight: float = 0.85, + ssim_window_size: int = 11, + ssim_max_val: float = 1.0, + ssim_sigma: float = 1.5, + ssim_eps: float = 1e-12, + ssim_use_padding: bool = True, + max_displacement_ratio: float = 0.15, + ) -> None: + super().__init__() + + self._ssim_loss = SSIM( + kernel_size=ssim_window_size, + max_val=ssim_max_val, + sigma=ssim_sigma, + eps=ssim_eps, + use_padding=ssim_use_padding, + ) + + self._L1_weight = 1 - ssim_weight + self._SSIM_weight = ssim_weight + self._max_displacement_ratio = max_displacement_ratio + + def forward( + self, + source: Tensor, + reference: Tensor, + flow_pred: Tensor, + valid_mask: Optional[Tensor] = None, + ): + """ + Args: + source: tensor of shape (B, C, H, W) + reference: tensor of shape (B, C, H, W) + flow_pred: tensor of shape (B, 2, H, W) + valid_mask: tensor of shape (B, H, W) or None + + Returns: + photometric loss of shape + + """ + torch._assert( + source.ndim == 4, + "FlowPhotoMetricLoss: source must have 4 dimensions, but got {}".format(source.ndim), + ) + torch._assert( + reference.ndim == source.ndim, + "FlowPhotoMetricLoss: source and other must have the same number of dimensions, but got {} and {}".format( + source.ndim, reference.ndim + ), + ) + torch._assert( + flow_pred.shape[1] == 2, + "FlowPhotoMetricLoss: flow_pred must have 2 channels, but got {}".format(flow_pred.shape[1]), + ) + torch._assert( + flow_pred.ndim == 4, + "FlowPhotoMetricLoss: flow_pred must have 4 dimensions, but got {}".format(flow_pred.ndim), + ) + + B, C, H, W = source.shape + flow_channels = flow_pred.shape[1] + + max_displacements = [] + for dim in range(flow_channels): + shape_index = -1 - dim + max_displacements.append(int(self._max_displacement_ratio * source.shape[shape_index])) + + # mask out all pixels that have larger flow than the max flow allowed + max_flow_mask = torch.logical_and( + *[flow_pred[:, dim, :, :] < max_displacements[dim] for dim in range(flow_channels)] + ) + + if valid_mask is not None: + valid_mask = torch.logical_and(valid_mask, max_flow_mask).unsqueeze(1) + else: + valid_mask = max_flow_mask.unsqueeze(1) + + grid = make_coords_grid(B, H, W, device=str(source.device)) + resampled_grids = grid - flow_pred + resampled_grids = resampled_grids.permute(0, 2, 3, 1) + resampled_source = grid_sample(reference, resampled_grids, mode="bilinear") + + # compute SSIM loss + ssim_loss = self._ssim_loss(resampled_source * valid_mask, source * valid_mask) + l1_loss = (resampled_source * valid_mask - source * valid_mask).abs().mean(axis=(-3, -2, -1)) + loss = self._L1_weight * l1_loss + self._SSIM_weight * ssim_loss + + return loss.mean() diff --git a/references/depth/stereo/utils/metrics.py b/references/depth/stereo/utils/metrics.py new file mode 100644 index 00000000000..05b149fb048 --- /dev/null +++ b/references/depth/stereo/utils/metrics.py @@ -0,0 +1,49 @@ +from typing import Dict, List, Optional, Tuple + +from torch import Tensor + +AVAILABLE_METRICS = ["mae", "rmse", "epe", "bad1", "bad2", "epe", "1px", "3px", "5px", "fl-all", "relepe"] + + +def compute_metrics( + flow_pred: Tensor, flow_gt: Tensor, valid_flow_mask: Optional[Tensor], metrics: List[str] +) -> Tuple[Dict[str, float], int]: + for m in metrics: + if m not in AVAILABLE_METRICS: + raise ValueError(f"Invalid metric: {m}. Valid metrics are: {AVAILABLE_METRICS}") + + metrics_dict = {} + + pixels_diffs = (flow_pred - flow_gt).abs() + # there is no Y flow in Stereo Matching, therefore flow.abs() = flow.pow(2).sum(dim=1).sqrt() + flow_norm = flow_gt.abs() + + if valid_flow_mask is not None: + valid_flow_mask = valid_flow_mask.unsqueeze(1) + pixels_diffs = pixels_diffs[valid_flow_mask] + flow_norm = flow_norm[valid_flow_mask] + + num_pixels = pixels_diffs.numel() + if "bad1" in metrics: + metrics_dict["bad1"] = (pixels_diffs > 1).float().mean().item() + if "bad2" in metrics: + metrics_dict["bad2"] = (pixels_diffs > 2).float().mean().item() + + if "mae" in metrics: + metrics_dict["mae"] = pixels_diffs.mean().item() + if "rmse" in metrics: + metrics_dict["rmse"] = pixels_diffs.pow(2).mean().sqrt().item() + if "epe" in metrics: + metrics_dict["epe"] = pixels_diffs.mean().item() + if "1px" in metrics: + metrics_dict["1px"] = (pixels_diffs < 1).float().mean().item() + if "3px" in metrics: + metrics_dict["3px"] = (pixels_diffs < 3).float().mean().item() + if "5px" in metrics: + metrics_dict["5px"] = (pixels_diffs < 5).float().mean().item() + if "fl-all" in metrics: + metrics_dict["fl-all"] = ((pixels_diffs < 3) & ((pixels_diffs / flow_norm) < 0.05)).float().mean().item() * 100 + if "relepe" in metrics: + metrics_dict["relepe"] = (pixels_diffs / flow_norm).mean().item() + + return metrics_dict, num_pixels diff --git a/references/depth/stereo/utils/norm.py b/references/depth/stereo/utils/norm.py new file mode 100644 index 00000000000..7f6e0011160 --- /dev/null +++ b/references/depth/stereo/utils/norm.py @@ -0,0 +1,13 @@ +import torch + + +def freeze_batch_norm(model): + for m in model.modules(): + if isinstance(m, torch.nn.BatchNorm2d): + m.eval() + + +def unfreeze_batch_norm(model): + for m in model.modules(): + if isinstance(m, torch.nn.BatchNorm2d): + m.train() diff --git a/references/depth/stereo/utils/padder.py b/references/depth/stereo/utils/padder.py new file mode 100644 index 00000000000..7d2c63afba6 --- /dev/null +++ b/references/depth/stereo/utils/padder.py @@ -0,0 +1,28 @@ +import torch.nn.functional as F + + +class InputPadder: + """Pads images such that dimensions are divisible by 8""" + + # TODO: Ideally, this should be part of the eval transforms preset, instead + # of being part of the validation code. It's not obvious what a good + # solution would be, because we need to unpad the predicted flows according + # to the input images' size, and in some datasets (Kitti) images can have + # variable sizes. + + def __init__(self, dims, mode="sintel"): + self.ht, self.wd = dims[-2:] + pad_ht = (((self.ht // 8) + 1) * 8 - self.ht) % 8 + pad_wd = (((self.wd // 8) + 1) * 8 - self.wd) % 8 + if mode == "sintel": + self._pad = [pad_wd // 2, pad_wd - pad_wd // 2, pad_ht // 2, pad_ht - pad_ht // 2] + else: + self._pad = [pad_wd // 2, pad_wd - pad_wd // 2, 0, pad_ht] + + def pad(self, *inputs): + return [F.pad(x, self._pad, mode="replicate") for x in inputs] + + def unpad(self, x): + ht, wd = x.shape[-2:] + c = [self._pad[2], ht - self._pad[3], self._pad[0], wd - self._pad[1]] + return x[..., c[0] : c[1], c[2] : c[3]] diff --git a/references/depth/stereo/visualization.py b/references/depth/stereo/visualization.py new file mode 100644 index 00000000000..07a7e7167d3 --- /dev/null +++ b/references/depth/stereo/visualization.py @@ -0,0 +1,127 @@ +import os +from typing import List + +import numpy as np +import numpy.typing as npt +import torch +from torch import Tensor +from torchvision.utils import make_grid + + +@torch.no_grad() +def make_disparity_image(disparity: Tensor): + # normalize image to [0, 1] + disparity = disparity.detach().cpu() + disparity = (disparity - disparity.min()) / (disparity.max() - disparity.min()) + return disparity + + +@torch.no_grad() +def make_disparity_image_pairs(disparity: Tensor, image: Tensor): + disparity = make_disparity_image(disparity) + # image is in [-1, 1], bring it to [0, 1] + image = image.detach().cpu() + image = image * 0.5 + 0.5 + return disparity, image + + +@torch.no_grad() +def make_disparity_sequence(disparities: List[Tensor]): + # convert each disparity to [0, 1] + for idx, disparity_batch in enumerate(disparities): + disparities[idx] = torch.stack(list(map(make_disparity_image, disparity_batch))) + # make the list into a batch + disparity_sequences = torch.stack(disparities) + return disparity_sequences + + +@torch.no_grad() +def make_pair_grid(*inputs, orientation="horizontal"): + # make a grid of images with the outputs and references side by side + if orientation == "horizontal": + # interleave the outputs and references + canvas = torch.zeros_like(inputs[0]) + canvas = torch.cat([canvas] * len(inputs), dim=0) + size = len(inputs) + for idx, inp in enumerate(inputs): + canvas[idx::size, ...] = inp + grid = make_grid(canvas, nrow=len(inputs), padding=16, normalize=True, scale_each=True) + elif orientation == "vertical": + # interleave the outputs and references + canvas = torch.cat(inputs, dim=0) + size = len(inputs) + for idx, inp in enumerate(inputs): + canvas[idx::size, ...] = inp + grid = make_grid(canvas, nrow=len(inputs[0]), padding=16, normalize=True, scale_each=True) + else: + raise ValueError("Unknown orientation: {}".format(orientation)) + return grid + + +@torch.no_grad() +def make_training_sample_grid( + left_images: Tensor, + right_images: Tensor, + disparities: Tensor, + masks: Tensor, + predictions: List[Tensor], +) -> npt.NDArray: + # detach images and renormalize to [0, 1] + images_left = left_images.detach().cpu() * 0.5 + 0.5 + images_right = right_images.detach().cpu() * 0.5 + 0.5 + # detach the disparties and predictions + disparities = disparities.detach().cpu() + predictions = predictions[-1].detach().cpu() + # keep only the first channel of pixels, and repeat it 3 times + disparities = disparities[:, :1, ...].repeat(1, 3, 1, 1) + predictions = predictions[:, :1, ...].repeat(1, 3, 1, 1) + # unsqueeze and repeat the masks + masks = masks.detach().cpu().unsqueeze(1).repeat(1, 3, 1, 1) + # make a grid that will self normalize across the batch + pred_grid = make_pair_grid(images_left, images_right, masks, disparities, predictions, orientation="horizontal") + pred_grid = pred_grid.permute(1, 2, 0).numpy() + pred_grid = (pred_grid * 255).astype(np.uint8) + return pred_grid + + +@torch.no_grad() +def make_disparity_sequence_grid(predictions: List[Tensor], disparities: Tensor) -> npt.NDArray: + # right most we will be adding the ground truth + seq_len = len(predictions) + 1 + predictions = list(map(lambda x: x[:, :1, :, :].detach().cpu(), predictions + [disparities])) + sequence = make_disparity_sequence(predictions) + # swap axes to have the in the correct order for each batch sample + sequence = torch.swapaxes(sequence, 0, 1).contiguous().reshape(-1, 1, disparities.shape[-2], disparities.shape[-1]) + sequence = make_grid(sequence, nrow=seq_len, padding=16, normalize=True, scale_each=True) + sequence = sequence.permute(1, 2, 0).numpy() + sequence = (sequence * 255).astype(np.uint8) + return sequence + + +@torch.no_grad() +def make_prediction_image_side_to_side( + predictions: Tensor, disparities: Tensor, valid_mask: Tensor, save_path: str, prefix: str +) -> None: + import matplotlib.pyplot as plt + + # normalize the predictions and disparities in [0, 1] + predictions = (predictions - predictions.min()) / (predictions.max() - predictions.min()) + disparities = (disparities - disparities.min()) / (disparities.max() - disparities.min()) + predictions = predictions * valid_mask + disparities = disparities * valid_mask + + predictions = predictions.detach().cpu() + disparities = disparities.detach().cpu() + + for idx, (pred, gt) in enumerate(zip(predictions, disparities)): + pred = pred.permute(1, 2, 0).numpy() + gt = gt.permute(1, 2, 0).numpy() + # plot pred and gt side by side + fig, ax = plt.subplots(1, 2, figsize=(10, 5)) + ax[0].imshow(pred) + ax[0].set_title("Prediction") + ax[1].imshow(gt) + ax[1].set_title("Ground Truth") + save_name = os.path.join(save_path, "{}_{}.png".format(prefix, idx)) + plt.savefig(save_name) + plt.close() diff --git a/references/detection/README.md b/references/detection/README.md new file mode 100644 index 00000000000..d9af26523a5 --- /dev/null +++ b/references/detection/README.md @@ -0,0 +1,88 @@ +# Object detection reference training scripts + +This folder contains reference training scripts for object detection. +They serve as a log of how to train specific models, to provide baseline +training and evaluation scripts to quickly bootstrap research. + +To execute the example commands below you must install the following: + +``` +cython +pycocotools +matplotlib +``` + +You must modify the following flags: + +`--data-path=/path/to/coco/dataset` + +`--nproc_per_node=` + +Except otherwise noted, all models have been trained on 8x V100 GPUs. + +### Faster R-CNN ResNet-50 FPN +``` +torchrun --nproc_per_node=8 train.py\ + --dataset coco --model fasterrcnn_resnet50_fpn --epochs 26\ + --lr-steps 16 22 --aspect-ratio-group-factor 3 --weights-backbone ResNet50_Weights.IMAGENET1K_V1 +``` + +### Faster R-CNN MobileNetV3-Large FPN +``` +torchrun --nproc_per_node=8 train.py\ + --dataset coco --model fasterrcnn_mobilenet_v3_large_fpn --epochs 26\ + --lr-steps 16 22 --aspect-ratio-group-factor 3 --weights-backbone MobileNet_V3_Large_Weights.IMAGENET1K_V1 +``` + +### Faster R-CNN MobileNetV3-Large 320 FPN +``` +torchrun --nproc_per_node=8 train.py\ + --dataset coco --model fasterrcnn_mobilenet_v3_large_320_fpn --epochs 26\ + --lr-steps 16 22 --aspect-ratio-group-factor 3 --weights-backbone MobileNet_V3_Large_Weights.IMAGENET1K_V1 +``` + +### FCOS ResNet-50 FPN +``` +torchrun --nproc_per_node=8 train.py\ + --dataset coco --model fcos_resnet50_fpn --epochs 26\ + --lr-steps 16 22 --aspect-ratio-group-factor 3 --lr 0.01 --amp --weights-backbone ResNet50_Weights.IMAGENET1K_V1 +``` + +### RetinaNet +``` +torchrun --nproc_per_node=8 train.py\ + --dataset coco --model retinanet_resnet50_fpn --epochs 26\ + --lr-steps 16 22 --aspect-ratio-group-factor 3 --lr 0.01 --weights-backbone ResNet50_Weights.IMAGENET1K_V1 +``` + +### SSD300 VGG16 +``` +torchrun --nproc_per_node=8 train.py\ + --dataset coco --model ssd300_vgg16 --epochs 120\ + --lr-steps 80 110 --aspect-ratio-group-factor 3 --lr 0.002 --batch-size 4\ + --weight-decay 0.0005 --data-augmentation ssd --weights-backbone VGG16_Weights.IMAGENET1K_FEATURES +``` + +### SSDlite320 MobileNetV3-Large +``` +torchrun --nproc_per_node=8 train.py\ + --dataset coco --model ssdlite320_mobilenet_v3_large --epochs 660\ + --aspect-ratio-group-factor 3 --lr-scheduler cosineannealinglr --lr 0.15 --batch-size 24\ + --weight-decay 0.00004 --data-augmentation ssdlite +``` + + +### Mask R-CNN +``` +torchrun --nproc_per_node=8 train.py\ + --dataset coco --model maskrcnn_resnet50_fpn --epochs 26\ + --lr-steps 16 22 --aspect-ratio-group-factor 3 --weights-backbone ResNet50_Weights.IMAGENET1K_V1 +``` + + +### Keypoint R-CNN +``` +torchrun --nproc_per_node=8 train.py\ + --dataset coco_kp --model keypointrcnn_resnet50_fpn --epochs 46\ + --lr-steps 36 43 --aspect-ratio-group-factor 3 --weights-backbone ResNet50_Weights.IMAGENET1K_V1 +``` diff --git a/references/detection/coco_eval.py b/references/detection/coco_eval.py index d758a64a909..ba1359f8c65 100644 --- a/references/detection/coco_eval.py +++ b/references/detection/coco_eval.py @@ -1,24 +1,19 @@ -import json -import tempfile - -import numpy as np import copy -import time -import torch -import torch._six +import io +from contextlib import redirect_stdout -from pycocotools.cocoeval import COCOeval -from pycocotools.coco import COCO +import numpy as np import pycocotools.mask as mask_util - -from collections import defaultdict - +import torch import utils +from pycocotools.coco import COCO +from pycocotools.cocoeval import COCOeval -class CocoEvaluator(object): +class CocoEvaluator: def __init__(self, coco_gt, iou_types): - assert isinstance(iou_types, (list, tuple)) + if not isinstance(iou_types, (list, tuple)): + raise TypeError(f"This constructor expects iou_types of type list or tuple, instead got {type(iou_types)}") coco_gt = copy.deepcopy(coco_gt) self.coco_gt = coco_gt @@ -36,7 +31,8 @@ def update(self, predictions): for iou_type in self.iou_types: results = self.prepare(predictions, iou_type) - coco_dt = loadRes(self.coco_gt, results) if results else COCO() + with redirect_stdout(io.StringIO()): + coco_dt = COCO.loadRes(self.coco_gt, results) if results else COCO() coco_eval = self.coco_eval[iou_type] coco_eval.cocoDt = coco_dt @@ -56,18 +52,17 @@ def accumulate(self): def summarize(self): for iou_type, coco_eval in self.coco_eval.items(): - print("IoU metric: {}".format(iou_type)) + print(f"IoU metric: {iou_type}") coco_eval.summarize() def prepare(self, predictions, iou_type): if iou_type == "bbox": return self.prepare_for_coco_detection(predictions) - elif iou_type == "segm": + if iou_type == "segm": return self.prepare_for_coco_segmentation(predictions) - elif iou_type == "keypoints": + if iou_type == "keypoints": return self.prepare_for_coco_keypoint(predictions) - else: - raise ValueError("Unknown iou type {}".format(iou_type)) + raise ValueError(f"Unknown iou type {iou_type}") def prepare_for_coco_detection(self, predictions): coco_results = [] @@ -109,8 +104,7 @@ def prepare_for_coco_segmentation(self, predictions): labels = prediction["labels"].tolist() rles = [ - mask_util.encode(np.array(mask[0, :, :, np.newaxis], dtype=np.uint8, order="F"))[0] - for mask in masks + mask_util.encode(np.array(mask[0, :, :, np.newaxis], dtype=np.uint8, order="F"))[0] for mask in masks ] for rle in rles: rle["counts"] = rle["counts"].decode("utf-8") @@ -146,7 +140,7 @@ def prepare_for_coco_keypoint(self, predictions): { "image_id": original_id, "category_id": labels[k], - 'keypoints': keypoint, + "keypoints": keypoint, "score": scores[k], } for k, keypoint in enumerate(keypoints) @@ -192,158 +186,7 @@ def create_common_coco_eval(coco_eval, img_ids, eval_imgs): coco_eval._paramsEval = copy.deepcopy(coco_eval.params) -################################################################# -# From pycocotools, just removed the prints and fixed -# a Python3 bug about unicode not defined -################################################################# - -# Ideally, pycocotools wouldn't have hard-coded prints -# so that we could avoid copy-pasting those two functions - -def createIndex(self): - # create index - # print('creating index...') - anns, cats, imgs = {}, {}, {} - imgToAnns, catToImgs = defaultdict(list), defaultdict(list) - if 'annotations' in self.dataset: - for ann in self.dataset['annotations']: - imgToAnns[ann['image_id']].append(ann) - anns[ann['id']] = ann - - if 'images' in self.dataset: - for img in self.dataset['images']: - imgs[img['id']] = img - - if 'categories' in self.dataset: - for cat in self.dataset['categories']: - cats[cat['id']] = cat - - if 'annotations' in self.dataset and 'categories' in self.dataset: - for ann in self.dataset['annotations']: - catToImgs[ann['category_id']].append(ann['image_id']) - - # print('index created!') - - # create class members - self.anns = anns - self.imgToAnns = imgToAnns - self.catToImgs = catToImgs - self.imgs = imgs - self.cats = cats - - -maskUtils = mask_util - - -def loadRes(self, resFile): - """ - Load result file and return a result api object. - :param resFile (str) : file name of result file - :return: res (obj) : result api object - """ - res = COCO() - res.dataset['images'] = [img for img in self.dataset['images']] - - # print('Loading and preparing results...') - # tic = time.time() - if isinstance(resFile, torch._six.string_classes): - anns = json.load(open(resFile)) - elif type(resFile) == np.ndarray: - anns = self.loadNumpyAnnotations(resFile) - else: - anns = resFile - assert type(anns) == list, 'results in not an array of objects' - annsImgIds = [ann['image_id'] for ann in anns] - assert set(annsImgIds) == (set(annsImgIds) & set(self.getImgIds())), \ - 'Results do not correspond to current coco set' - if 'caption' in anns[0]: - imgIds = set([img['id'] for img in res.dataset['images']]) & set([ann['image_id'] for ann in anns]) - res.dataset['images'] = [img for img in res.dataset['images'] if img['id'] in imgIds] - for id, ann in enumerate(anns): - ann['id'] = id + 1 - elif 'bbox' in anns[0] and not anns[0]['bbox'] == []: - res.dataset['categories'] = copy.deepcopy(self.dataset['categories']) - for id, ann in enumerate(anns): - bb = ann['bbox'] - x1, x2, y1, y2 = [bb[0], bb[0] + bb[2], bb[1], bb[1] + bb[3]] - if 'segmentation' not in ann: - ann['segmentation'] = [[x1, y1, x1, y2, x2, y2, x2, y1]] - ann['area'] = bb[2] * bb[3] - ann['id'] = id + 1 - ann['iscrowd'] = 0 - elif 'segmentation' in anns[0]: - res.dataset['categories'] = copy.deepcopy(self.dataset['categories']) - for id, ann in enumerate(anns): - # now only support compressed RLE format as segmentation results - ann['area'] = maskUtils.area(ann['segmentation']) - if 'bbox' not in ann: - ann['bbox'] = maskUtils.toBbox(ann['segmentation']) - ann['id'] = id + 1 - ann['iscrowd'] = 0 - elif 'keypoints' in anns[0]: - res.dataset['categories'] = copy.deepcopy(self.dataset['categories']) - for id, ann in enumerate(anns): - s = ann['keypoints'] - x = s[0::3] - y = s[1::3] - x1, x2, y1, y2 = np.min(x), np.max(x), np.min(y), np.max(y) - ann['area'] = (x2 - x1) * (y2 - y1) - ann['id'] = id + 1 - ann['bbox'] = [x1, y1, x2 - x1, y2 - y1] - # print('DONE (t={:0.2f}s)'.format(time.time()- tic)) - - res.dataset['annotations'] = anns - createIndex(res) - return res - - -def evaluate(self): - ''' - Run per image evaluation on given images and store results (a list of dict) in self.evalImgs - :return: None - ''' - # tic = time.time() - # print('Running per image evaluation...') - p = self.params - # add backward compatibility if useSegm is specified in params - if p.useSegm is not None: - p.iouType = 'segm' if p.useSegm == 1 else 'bbox' - print('useSegm (deprecated) is not None. Running {} evaluation'.format(p.iouType)) - # print('Evaluate annotation type *{}*'.format(p.iouType)) - p.imgIds = list(np.unique(p.imgIds)) - if p.useCats: - p.catIds = list(np.unique(p.catIds)) - p.maxDets = sorted(p.maxDets) - self.params = p - - self._prepare() - # loop through images, area range, max detection number - catIds = p.catIds if p.useCats else [-1] - - if p.iouType == 'segm' or p.iouType == 'bbox': - computeIoU = self.computeIoU - elif p.iouType == 'keypoints': - computeIoU = self.computeOks - self.ious = { - (imgId, catId): computeIoU(imgId, catId) - for imgId in p.imgIds - for catId in catIds} - - evaluateImg = self.evaluateImg - maxDet = p.maxDets[-1] - evalImgs = [ - evaluateImg(imgId, catId, areaRng, maxDet) - for catId in catIds - for areaRng in p.areaRng - for imgId in p.imgIds - ] - # this is NOT in the pycocotools code, but could be done outside - evalImgs = np.asarray(evalImgs).reshape(len(catIds), len(p.areaRng), len(p.imgIds)) - self._paramsEval = copy.deepcopy(self.params) - # toc = time.time() - # print('DONE (t={:0.2f}s).'.format(toc-tic)) - return p.imgIds, evalImgs - -################################################################# -# end of straight copy from pycocotools, just removing the prints -################################################################# +def evaluate(imgs): + with redirect_stdout(io.StringIO()): + imgs.evaluate() + return imgs.params.imgIds, np.asarray(imgs.evalImgs).reshape(-1, len(imgs.params.areaRng), len(imgs.params.imgIds)) diff --git a/references/detection/coco_utils.py b/references/detection/coco_utils.py index 26701a2cbee..f40dcdff783 100644 --- a/references/detection/coco_utils.py +++ b/references/detection/coco_utils.py @@ -1,34 +1,12 @@ -import copy import os -from PIL import Image import torch import torch.utils.data import torchvision - +import transforms as T from pycocotools import mask as coco_mask from pycocotools.coco import COCO -import transforms as T - - -class FilterAndRemapCocoCategories(object): - def __init__(self, categories, remap=True): - self.categories = categories - self.remap = remap - - def __call__(self, image, target): - anno = target["annotations"] - anno = [obj for obj in anno if obj["category_id"] in self.categories] - if not self.remap: - target["annotations"] = anno - return image, target - anno = copy.deepcopy(anno) - for obj in anno: - obj["category_id"] = self.categories.index(obj["category_id"]) - target["annotations"] = anno - return image, target - def convert_coco_poly_to_mask(segmentations, height, width): masks = [] @@ -47,16 +25,15 @@ def convert_coco_poly_to_mask(segmentations, height, width): return masks -class ConvertCocoPolysToMask(object): +class ConvertCocoPolysToMask: def __call__(self, image, target): w, h = image.size image_id = target["image_id"] - image_id = torch.tensor([image_id]) anno = target["annotations"] - anno = [obj for obj in anno if obj['iscrowd'] == 0] + anno = [obj for obj in anno if obj["iscrowd"] == 0] boxes = [obj["bbox"] for obj in anno] # guard against no boxes via resizing @@ -119,7 +96,7 @@ def _has_valid_annotation(anno): # if all boxes have close to zero area, there is no annotation if _has_only_empty_bbox(anno): return False - # keypoints task have a slight different critera for considering + # keypoints task have a slight different criteria for considering # if an annotation is valid if "keypoints" not in anno[0]: return True @@ -129,7 +106,6 @@ def _has_valid_annotation(anno): return True return False - assert isinstance(dataset, torchvision.datasets.CocoDetection) ids = [] for ds_idx, img_id in enumerate(dataset.ids): ann_ids = dataset.coco.getAnnIds(imgIds=img_id, iscrowd=None) @@ -147,55 +123,56 @@ def convert_to_coco_api(ds): coco_ds = COCO() # annotation IDs need to start at 1, not 0, see torchvision issue #1530 ann_id = 1 - dataset = {'images': [], 'categories': [], 'annotations': []} + dataset = {"images": [], "categories": [], "annotations": []} categories = set() for img_idx in range(len(ds)): # find better way to get target # targets = ds.get_annotations(img_idx) img, targets = ds[img_idx] - image_id = targets["image_id"].item() + image_id = targets["image_id"] img_dict = {} - img_dict['id'] = image_id - img_dict['height'] = img.shape[-2] - img_dict['width'] = img.shape[-1] - dataset['images'].append(img_dict) - bboxes = targets["boxes"] + img_dict["id"] = image_id + img_dict["height"] = img.shape[-2] + img_dict["width"] = img.shape[-1] + dataset["images"].append(img_dict) + bboxes = targets["boxes"].clone() bboxes[:, 2:] -= bboxes[:, :2] bboxes = bboxes.tolist() - labels = targets['labels'].tolist() - areas = targets['area'].tolist() - iscrowd = targets['iscrowd'].tolist() - if 'masks' in targets: - masks = targets['masks'] + labels = targets["labels"].tolist() + areas = targets["area"].tolist() + iscrowd = targets["iscrowd"].tolist() + if "masks" in targets: + masks = targets["masks"] # make masks Fortran contiguous for coco_mask masks = masks.permute(0, 2, 1).contiguous().permute(0, 2, 1) - if 'keypoints' in targets: - keypoints = targets['keypoints'] + if "keypoints" in targets: + keypoints = targets["keypoints"] keypoints = keypoints.reshape(keypoints.shape[0], -1).tolist() num_objs = len(bboxes) for i in range(num_objs): ann = {} - ann['image_id'] = image_id - ann['bbox'] = bboxes[i] - ann['category_id'] = labels[i] + ann["image_id"] = image_id + ann["bbox"] = bboxes[i] + ann["category_id"] = labels[i] categories.add(labels[i]) - ann['area'] = areas[i] - ann['iscrowd'] = iscrowd[i] - ann['id'] = ann_id - if 'masks' in targets: + ann["area"] = areas[i] + ann["iscrowd"] = iscrowd[i] + ann["id"] = ann_id + if "masks" in targets: ann["segmentation"] = coco_mask.encode(masks[i].numpy()) - if 'keypoints' in targets: - ann['keypoints'] = keypoints[i] - ann['num_keypoints'] = sum(k != 0 for k in keypoints[i][2::3]) - dataset['annotations'].append(ann) + if "keypoints" in targets: + ann["keypoints"] = keypoints[i] + ann["num_keypoints"] = sum(k != 0 for k in keypoints[i][2::3]) + dataset["annotations"].append(ann) ann_id += 1 - dataset['categories'] = [{'id': i} for i in sorted(categories)] + dataset["categories"] = [{"id": i} for i in sorted(categories)] coco_ds.dataset = dataset coco_ds.createIndex() return coco_ds def get_coco_api_from_dataset(dataset): + # FIXME: This is... awful? for _ in range(10): if isinstance(dataset, torchvision.datasets.CocoDetection): break @@ -208,11 +185,11 @@ def get_coco_api_from_dataset(dataset): class CocoDetection(torchvision.datasets.CocoDetection): def __init__(self, img_folder, ann_file, transforms): - super(CocoDetection, self).__init__(img_folder, ann_file) + super().__init__(img_folder, ann_file) self._transforms = transforms def __getitem__(self, idx): - img, target = super(CocoDetection, self).__getitem__(idx) + img, target = super().__getitem__(idx) image_id = self.ids[idx] target = dict(image_id=image_id, annotations=target) if self._transforms is not None: @@ -220,7 +197,7 @@ def __getitem__(self, idx): return img, target -def get_coco(root, image_set, transforms, mode='instances'): +def get_coco(root, image_set, transforms, mode="instances", use_v2=False, with_masks=False): anno_file_template = "{}_{}2017.json" PATHS = { "train": ("train2017", os.path.join("annotations", anno_file_template.format(mode, "train"))), @@ -228,17 +205,26 @@ def get_coco(root, image_set, transforms, mode='instances'): # "train": ("val2017", os.path.join("annotations", anno_file_template.format(mode, "val"))) } - t = [ConvertCocoPolysToMask()] - - if transforms is not None: - t.append(transforms) - transforms = T.Compose(t) - img_folder, ann_file = PATHS[image_set] img_folder = os.path.join(root, img_folder) ann_file = os.path.join(root, ann_file) - dataset = CocoDetection(img_folder, ann_file, transforms=transforms) + if use_v2: + from torchvision.datasets import wrap_dataset_for_transforms_v2 + + dataset = torchvision.datasets.CocoDetection(img_folder, ann_file, transforms=transforms) + target_keys = ["boxes", "labels", "image_id"] + if with_masks: + target_keys += ["masks"] + dataset = wrap_dataset_for_transforms_v2(dataset, target_keys=target_keys) + else: + # TODO: handle with_masks for V1? + t = [ConvertCocoPolysToMask()] + if transforms is not None: + t.append(transforms) + transforms = T.Compose(t) + + dataset = CocoDetection(img_folder, ann_file, transforms=transforms) if image_set == "train": dataset = _coco_remove_images_without_annotations(dataset) @@ -246,7 +232,3 @@ def get_coco(root, image_set, transforms, mode='instances'): # dataset = torch.utils.data.Subset(dataset, [i for i in range(500)]) return dataset - - -def get_coco_kp(root, image_set, transforms): - return get_coco(root, image_set, transforms, mode="person_keypoints") diff --git a/references/detection/engine.py b/references/detection/engine.py index 68c39a4fc1b..0e9bfffdf8a 100644 --- a/references/detection/engine.py +++ b/references/detection/engine.py @@ -1,35 +1,35 @@ import math import sys import time -import torch +import torch import torchvision.models.detection.mask_rcnn - -from coco_utils import get_coco_api_from_dataset -from coco_eval import CocoEvaluator import utils +from coco_eval import CocoEvaluator +from coco_utils import get_coco_api_from_dataset -def train_one_epoch(model, optimizer, data_loader, device, epoch, print_freq): +def train_one_epoch(model, optimizer, data_loader, device, epoch, print_freq, scaler=None): model.train() metric_logger = utils.MetricLogger(delimiter=" ") - metric_logger.add_meter('lr', utils.SmoothedValue(window_size=1, fmt='{value:.6f}')) - header = 'Epoch: [{}]'.format(epoch) + metric_logger.add_meter("lr", utils.SmoothedValue(window_size=1, fmt="{value:.6f}")) + header = f"Epoch: [{epoch}]" lr_scheduler = None if epoch == 0: - warmup_factor = 1. / 1000 + warmup_factor = 1.0 / 1000 warmup_iters = min(1000, len(data_loader) - 1) - lr_scheduler = utils.warmup_lr_scheduler(optimizer, warmup_iters, warmup_factor) + lr_scheduler = torch.optim.lr_scheduler.LinearLR( + optimizer, start_factor=warmup_factor, total_iters=warmup_iters + ) for images, targets in metric_logger.log_every(data_loader, print_freq, header): images = list(image.to(device) for image in images) - targets = [{k: v.to(device) for k, v in t.items()} for t in targets] - - loss_dict = model(images, targets) - - losses = sum(loss for loss in loss_dict.values()) + targets = [{k: v.to(device) if isinstance(v, torch.Tensor) else v for k, v in t.items()} for t in targets] + with torch.cuda.amp.autocast(enabled=scaler is not None): + loss_dict = model(images, targets) + losses = sum(loss for loss in loss_dict.values()) # reduce losses over all GPUs for logging purposes loss_dict_reduced = utils.reduce_dict(loss_dict) @@ -38,13 +38,18 @@ def train_one_epoch(model, optimizer, data_loader, device, epoch, print_freq): loss_value = losses_reduced.item() if not math.isfinite(loss_value): - print("Loss is {}, stopping training".format(loss_value)) + print(f"Loss is {loss_value}, stopping training") print(loss_dict_reduced) sys.exit(1) optimizer.zero_grad() - losses.backward() - optimizer.step() + if scaler is not None: + scaler.scale(losses).backward() + scaler.step(optimizer) + scaler.update() + else: + losses.backward() + optimizer.step() if lr_scheduler is not None: lr_scheduler.step() @@ -52,6 +57,8 @@ def train_one_epoch(model, optimizer, data_loader, device, epoch, print_freq): metric_logger.update(loss=losses_reduced, **loss_dict_reduced) metric_logger.update(lr=optimizer.param_groups[0]["lr"]) + return metric_logger + def _get_iou_types(model): model_without_ddp = model @@ -65,7 +72,7 @@ def _get_iou_types(model): return iou_types -@torch.no_grad() +@torch.inference_mode() def evaluate(model, data_loader, device): n_threads = torch.get_num_threads() # FIXME remove this and make paste_masks_in_image run on the GPU @@ -73,24 +80,24 @@ def evaluate(model, data_loader, device): cpu_device = torch.device("cpu") model.eval() metric_logger = utils.MetricLogger(delimiter=" ") - header = 'Test:' + header = "Test:" coco = get_coco_api_from_dataset(data_loader.dataset) iou_types = _get_iou_types(model) coco_evaluator = CocoEvaluator(coco, iou_types) - for image, targets in metric_logger.log_every(data_loader, 100, header): - image = list(img.to(device) for img in image) - targets = [{k: v.to(device) for k, v in t.items()} for t in targets] + for images, targets in metric_logger.log_every(data_loader, 100, header): + images = list(img.to(device) for img in images) - torch.cuda.synchronize() + if torch.cuda.is_available(): + torch.cuda.synchronize() model_time = time.time() - outputs = model(image) + outputs = model(images) outputs = [{k: v.to(cpu_device) for k, v in t.items()} for t in outputs] model_time = time.time() - model_time - res = {target["image_id"].item(): output for target, output in zip(targets, outputs)} + res = {target["image_id"]: output for target, output in zip(targets, outputs)} evaluator_time = time.time() coco_evaluator.update(res) evaluator_time = time.time() - evaluator_time diff --git a/references/detection/group_by_aspect_ratio.py b/references/detection/group_by_aspect_ratio.py index 61694cd63a4..d12e14b540c 100644 --- a/references/detection/group_by_aspect_ratio.py +++ b/references/detection/group_by_aspect_ratio.py @@ -1,15 +1,22 @@ import bisect -from collections import defaultdict import copy -import numpy as np +import math +from collections import defaultdict +from itertools import chain, repeat +import numpy as np import torch import torch.utils.data +import torchvision +from PIL import Image from torch.utils.data.sampler import BatchSampler, Sampler from torch.utils.model_zoo import tqdm -import torchvision -from PIL import Image + +def _repeat_to_at_least(iterable, n): + repeat_times = math.ceil(n / len(iterable)) + repeated = chain.from_iterable(repeat(iterable, repeat_times)) + return list(repeated) class GroupedBatchSampler(BatchSampler): @@ -18,7 +25,7 @@ class GroupedBatchSampler(BatchSampler): It enforces that the batch only contain elements from the same group. It also tries to provide mini-batches which follows an ordering which is as close as possible to the ordering from the original sampler. - Arguments: + Args: sampler (Sampler): Base sampler. group_ids (list[int]): If the sampler produces indices in range [0, N), `group_ids` must be a list of `N` ints which contains the group id of each sample. @@ -26,12 +33,10 @@ class GroupedBatchSampler(BatchSampler): 0, i.e. they must be in the range [0, num_groups). batch_size (int): Size of mini-batch. """ + def __init__(self, sampler, group_ids, batch_size): if not isinstance(sampler, Sampler): - raise ValueError( - "sampler should be an instance of " - "torch.utils.data.Sampler, but got sampler={}".format(sampler) - ) + raise ValueError(f"sampler should be an instance of torch.utils.data.Sampler, but got sampler={sampler}") self.sampler = sampler self.group_ids = group_ids self.batch_size = batch_size @@ -58,13 +63,12 @@ def __iter__(self): expected_num_batches = len(self) num_remaining = expected_num_batches - num_batches if num_remaining > 0: - # for the remaining batches, take first the buffers with largest number + # for the remaining batches, take first the buffers with the largest number # of elements - for group_id, _ in sorted(buffer_per_group.items(), - key=lambda x: len(x[1]), reverse=True): + for group_id, _ in sorted(buffer_per_group.items(), key=lambda x: len(x[1]), reverse=True): remaining = self.batch_size - len(buffer_per_group[group_id]) - buffer_per_group[group_id].extend( - samples_per_group[group_id][:remaining]) + samples_from_group_id = _repeat_to_at_least(samples_per_group[group_id], remaining) + buffer_per_group[group_id].extend(samples_from_group_id[:remaining]) assert len(buffer_per_group[group_id]) == self.batch_size yield buffer_per_group[group_id] num_remaining -= 1 @@ -77,10 +81,12 @@ def __len__(self): def _compute_aspect_ratios_slow(dataset, indices=None): - print("Your dataset doesn't support the fast path for " - "computing the aspect ratios, so will iterate over " - "the full dataset and load every image instead. " - "This might take some time...") + print( + "Your dataset doesn't support the fast path for " + "computing the aspect ratios, so will iterate over " + "the full dataset and load every image instead. " + "This might take some time..." + ) if indices is None: indices = range(len(dataset)) @@ -96,9 +102,12 @@ def __len__(self): sampler = SubsetSampler(indices) data_loader = torch.utils.data.DataLoader( - dataset, batch_size=1, sampler=sampler, + dataset, + batch_size=1, + sampler=sampler, num_workers=14, # you might want to increase it for faster processing - collate_fn=lambda x: x[0]) + collate_fn=lambda x: x[0], + ) aspect_ratios = [] with tqdm(total=len(dataset)) as pbar: for _i, (img, _) in enumerate(data_loader): @@ -182,6 +191,6 @@ def create_aspect_ratio_groups(dataset, k=0): # count number of elements per group counts = np.unique(groups, return_counts=True)[1] fbins = [0] + bins + [np.inf] - print("Using {} as bins for aspect ratio quantization".format(fbins)) - print("Count of instances per bin: {}".format(counts)) + print(f"Using {fbins} as bins for aspect ratio quantization") + print(f"Count of instances per bin: {counts}") return groups diff --git a/references/detection/presets.py b/references/detection/presets.py new file mode 100644 index 00000000000..e9b6d56c886 --- /dev/null +++ b/references/detection/presets.py @@ -0,0 +1,114 @@ +from collections import defaultdict + +import torch +import transforms as reference_transforms + + +def get_modules(use_v2): + # We need a protected import to avoid the V2 warning in case just V1 is used + if use_v2: + import torchvision.transforms.v2 + import torchvision.tv_tensors + + return torchvision.transforms.v2, torchvision.tv_tensors + else: + return reference_transforms, None + + +class DetectionPresetTrain: + # Note: this transform assumes that the input to forward() are always PIL + # images, regardless of the backend parameter. + def __init__( + self, + *, + data_augmentation, + hflip_prob=0.5, + mean=(123.0, 117.0, 104.0), + backend="pil", + use_v2=False, + ): + + T, tv_tensors = get_modules(use_v2) + + transforms = [] + backend = backend.lower() + if backend == "tv_tensor": + transforms.append(T.ToImage()) + elif backend == "tensor": + transforms.append(T.PILToTensor()) + elif backend != "pil": + raise ValueError(f"backend can be 'tv_tensor', 'tensor' or 'pil', but got {backend}") + + if data_augmentation == "hflip": + transforms += [T.RandomHorizontalFlip(p=hflip_prob)] + elif data_augmentation == "lsj": + transforms += [ + T.ScaleJitter(target_size=(1024, 1024), antialias=True), + # TODO: FixedSizeCrop below doesn't work on tensors! + reference_transforms.FixedSizeCrop(size=(1024, 1024), fill=mean), + T.RandomHorizontalFlip(p=hflip_prob), + ] + elif data_augmentation == "multiscale": + transforms += [ + T.RandomShortestSize(min_size=(480, 512, 544, 576, 608, 640, 672, 704, 736, 768, 800), max_size=1333), + T.RandomHorizontalFlip(p=hflip_prob), + ] + elif data_augmentation == "ssd": + fill = defaultdict(lambda: mean, {tv_tensors.Mask: 0}) if use_v2 else list(mean) + transforms += [ + T.RandomPhotometricDistort(), + T.RandomZoomOut(fill=fill), + T.RandomIoUCrop(), + T.RandomHorizontalFlip(p=hflip_prob), + ] + elif data_augmentation == "ssdlite": + transforms += [ + T.RandomIoUCrop(), + T.RandomHorizontalFlip(p=hflip_prob), + ] + else: + raise ValueError(f'Unknown data augmentation policy "{data_augmentation}"') + + if backend == "pil": + # Note: we could just convert to pure tensors even in v2. + transforms += [T.ToImage() if use_v2 else T.PILToTensor()] + + transforms += [T.ToDtype(torch.float, scale=True)] + + if use_v2: + transforms += [ + T.ConvertBoundingBoxFormat(tv_tensors.BoundingBoxFormat.XYXY), + T.SanitizeBoundingBoxes(), + T.ToPureTensor(), + ] + + self.transforms = T.Compose(transforms) + + def __call__(self, img, target): + return self.transforms(img, target) + + +class DetectionPresetEval: + def __init__(self, backend="pil", use_v2=False): + T, _ = get_modules(use_v2) + transforms = [] + backend = backend.lower() + if backend == "pil": + # Note: we could just convert to pure tensors even in v2? + transforms += [T.ToImage() if use_v2 else T.PILToTensor()] + elif backend == "tensor": + transforms += [T.PILToTensor()] + elif backend == "tv_tensor": + transforms += [T.ToImage()] + else: + raise ValueError(f"backend can be 'tv_tensor', 'tensor' or 'pil', but got {backend}") + + transforms += [T.ToDtype(torch.float, scale=True)] + + if use_v2: + transforms += [T.ToPureTensor()] + + self.transforms = T.Compose(transforms) + + def __call__(self, img, target): + return self.transforms(img, target) diff --git a/references/detection/train.py b/references/detection/train.py index 3b928611b4f..6a9ffb0af4d 100644 --- a/references/detection/train.py +++ b/references/detection/train.py @@ -8,62 +8,208 @@ The default hyperparameters are tuned for training on 8 gpus and 2 images per gpu. --lr 0.02 --batch-size 2 --world-size 8 If you use different number of gpus, the learning rate should be changed to 0.02/8*$NGPU. + +On top of that, for training Faster/Mask R-CNN, the default hyperparameters are + --epochs 26 --lr-steps 16 22 --aspect-ratio-group-factor 3 + +Also, if you train Keypoint R-CNN, the default hyperparameters are + --epochs 46 --lr-steps 36 43 --aspect-ratio-group-factor 3 +Because the number of images is smaller in the person keypoint subset of COCO, +the number of epochs should be adapted so that we have the same number of iterations. """ import datetime import os import time +import presets import torch import torch.utils.data -from torch import nn import torchvision import torchvision.models.detection import torchvision.models.detection.mask_rcnn +import utils +from coco_utils import get_coco +from engine import evaluate, train_one_epoch +from group_by_aspect_ratio import create_aspect_ratio_groups, GroupedBatchSampler +from torchvision.transforms import InterpolationMode +from transforms import SimpleCopyPaste + + +def copypaste_collate_fn(batch): + copypaste = SimpleCopyPaste(blending=True, resize_interpolation=InterpolationMode.BILINEAR) + return copypaste(*utils.collate_fn(batch)) + + +def get_dataset(is_train, args): + image_set = "train" if is_train else "val" + num_classes, mode = {"coco": (91, "instances"), "coco_kp": (2, "person_keypoints")}[args.dataset] + with_masks = "mask" in args.model + ds = get_coco( + root=args.data_path, + image_set=image_set, + transforms=get_transform(is_train, args), + mode=mode, + use_v2=args.use_v2, + with_masks=with_masks, + ) + return ds, num_classes -from coco_utils import get_coco, get_coco_kp -from group_by_aspect_ratio import GroupedBatchSampler, create_aspect_ratio_groups -from engine import train_one_epoch, evaluate +def get_transform(is_train, args): + if is_train: + return presets.DetectionPresetTrain( + data_augmentation=args.data_augmentation, backend=args.backend, use_v2=args.use_v2 + ) + elif args.weights and args.test_only: + weights = torchvision.models.get_weight(args.weights) + trans = weights.transforms() + return lambda img, target: (trans(img), target) + else: + return presets.DetectionPresetEval(backend=args.backend, use_v2=args.use_v2) -import utils -import transforms as T +def get_args_parser(add_help=True): + import argparse -def get_dataset(name, image_set, transform, data_path): - paths = { - "coco": (data_path, get_coco, 91), - "coco_kp": (data_path, get_coco_kp, 2) - } - p, ds_fn, num_classes = paths[name] + parser = argparse.ArgumentParser(description="PyTorch Detection Training", add_help=add_help) - ds = ds_fn(p, image_set=image_set, transforms=transform) - return ds, num_classes + parser.add_argument("--data-path", default="/datasets01/COCO/022719/", type=str, help="dataset path") + parser.add_argument( + "--dataset", + default="coco", + type=str, + help="dataset name. Use coco for object detection and instance segmentation and coco_kp for Keypoint detection", + ) + parser.add_argument("--model", default="maskrcnn_resnet50_fpn", type=str, help="model name") + parser.add_argument("--device", default="cuda", type=str, help="device (Use cuda or cpu Default: cuda)") + parser.add_argument( + "-b", "--batch-size", default=2, type=int, help="images per gpu, the total batch size is $NGPU x batch_size" + ) + parser.add_argument("--epochs", default=26, type=int, metavar="N", help="number of total epochs to run") + parser.add_argument( + "-j", "--workers", default=4, type=int, metavar="N", help="number of data loading workers (default: 4)" + ) + parser.add_argument("--opt", default="sgd", type=str, help="optimizer") + parser.add_argument( + "--lr", + default=0.02, + type=float, + help="initial learning rate, 0.02 is the default value for training on 8 gpus and 2 images_per_gpu", + ) + parser.add_argument("--momentum", default=0.9, type=float, metavar="M", help="momentum") + parser.add_argument( + "--wd", + "--weight-decay", + default=1e-4, + type=float, + metavar="W", + help="weight decay (default: 1e-4)", + dest="weight_decay", + ) + parser.add_argument( + "--norm-weight-decay", + default=None, + type=float, + help="weight decay for Normalization layers (default: None, same value as --wd)", + ) + parser.add_argument( + "--lr-scheduler", default="multisteplr", type=str, help="name of lr scheduler (default: multisteplr)" + ) + parser.add_argument( + "--lr-step-size", default=8, type=int, help="decrease lr every step-size epochs (multisteplr scheduler only)" + ) + parser.add_argument( + "--lr-steps", + default=[16, 22], + nargs="+", + type=int, + help="decrease lr every step-size epochs (multisteplr scheduler only)", + ) + parser.add_argument( + "--lr-gamma", default=0.1, type=float, help="decrease lr by a factor of lr-gamma (multisteplr scheduler only)" + ) + parser.add_argument("--print-freq", default=20, type=int, help="print frequency") + parser.add_argument("--output-dir", default=".", type=str, help="path to save outputs") + parser.add_argument("--resume", default="", type=str, help="path of checkpoint") + parser.add_argument("--start_epoch", default=0, type=int, help="start epoch") + parser.add_argument("--aspect-ratio-group-factor", default=3, type=int) + parser.add_argument("--rpn-score-thresh", default=None, type=float, help="rpn score threshold for faster-rcnn") + parser.add_argument( + "--trainable-backbone-layers", default=None, type=int, help="number of trainable layers of backbone" + ) + parser.add_argument( + "--data-augmentation", default="hflip", type=str, help="data augmentation policy (default: hflip)" + ) + parser.add_argument( + "--sync-bn", + dest="sync_bn", + help="Use sync batch norm", + action="store_true", + ) + parser.add_argument( + "--test-only", + dest="test_only", + help="Only test the model", + action="store_true", + ) + parser.add_argument( + "--use-deterministic-algorithms", action="store_true", help="Forces the use of deterministic algorithms only." + ) -def get_transform(train): - transforms = [] - transforms.append(T.ToTensor()) - if train: - transforms.append(T.RandomHorizontalFlip(0.5)) - return T.Compose(transforms) + # distributed training parameters + parser.add_argument("--world-size", default=1, type=int, help="number of distributed processes") + parser.add_argument("--dist-url", default="env://", type=str, help="url used to set up distributed training") + parser.add_argument("--weights", default=None, type=str, help="the weights enum name to load") + parser.add_argument("--weights-backbone", default=None, type=str, help="the backbone weights enum name to load") + + # Mixed precision training parameters + parser.add_argument("--amp", action="store_true", help="Use torch.cuda.amp for mixed precision training") + + # Use CopyPaste augmentation training parameter + parser.add_argument( + "--use-copypaste", + action="store_true", + help="Use CopyPaste data augmentation. Works only with data-augmentation='lsj'.", + ) + + parser.add_argument("--backend", default="PIL", type=str.lower, help="PIL or tensor - case insensitive") + parser.add_argument("--use-v2", action="store_true", help="Use V2 transforms") + + return parser def main(args): + if args.backend.lower() == "tv_tensor" and not args.use_v2: + raise ValueError("Use --use-v2 if you want to use the tv_tensor backend.") + if args.dataset not in ("coco", "coco_kp"): + raise ValueError(f"Dataset should be coco or coco_kp, got {args.dataset}") + if "keypoint" in args.model and args.dataset != "coco_kp": + raise ValueError("Oops, if you want Keypoint detection, set --dataset coco_kp") + if args.dataset == "coco_kp" and args.use_v2: + raise ValueError("KeyPoint detection doesn't support V2 transforms yet") + + if args.output_dir: + utils.mkdir(args.output_dir) + utils.init_distributed_mode(args) print(args) device = torch.device(args.device) + if args.use_deterministic_algorithms: + torch.use_deterministic_algorithms(True) + # Data loading code print("Loading data") - dataset, num_classes = get_dataset(args.dataset, "train", get_transform(train=True), args.data_path) - dataset_test, _ = get_dataset(args.dataset, "val", get_transform(train=False), args.data_path) + dataset, num_classes = get_dataset(is_train=True, args=args) + dataset_test, _ = get_dataset(is_train=False, args=args) print("Creating data loaders") if args.distributed: train_sampler = torch.utils.data.distributed.DistributedSampler(dataset) - test_sampler = torch.utils.data.distributed.DistributedSampler(dataset_test) + test_sampler = torch.utils.data.distributed.DistributedSampler(dataset_test, shuffle=False) else: train_sampler = torch.utils.data.RandomSampler(dataset) test_sampler = torch.utils.data.SequentialSampler(dataset_test) @@ -72,119 +218,117 @@ def main(args): group_ids = create_aspect_ratio_groups(dataset, k=args.aspect_ratio_group_factor) train_batch_sampler = GroupedBatchSampler(train_sampler, group_ids, args.batch_size) else: - train_batch_sampler = torch.utils.data.BatchSampler( - train_sampler, args.batch_size, drop_last=True) + train_batch_sampler = torch.utils.data.BatchSampler(train_sampler, args.batch_size, drop_last=True) + + train_collate_fn = utils.collate_fn + if args.use_copypaste: + if args.data_augmentation != "lsj": + raise RuntimeError("SimpleCopyPaste algorithm currently only supports the 'lsj' data augmentation policies") + + train_collate_fn = copypaste_collate_fn data_loader = torch.utils.data.DataLoader( - dataset, batch_sampler=train_batch_sampler, num_workers=args.workers, - collate_fn=utils.collate_fn) + dataset, batch_sampler=train_batch_sampler, num_workers=args.workers, collate_fn=train_collate_fn + ) data_loader_test = torch.utils.data.DataLoader( - dataset_test, batch_size=1, - sampler=test_sampler, num_workers=args.workers, - collate_fn=utils.collate_fn) + dataset_test, batch_size=1, sampler=test_sampler, num_workers=args.workers, collate_fn=utils.collate_fn + ) print("Creating model") - model = torchvision.models.detection.__dict__[args.model](num_classes=num_classes, - pretrained=args.pretrained) + kwargs = {"trainable_backbone_layers": args.trainable_backbone_layers} + if args.data_augmentation in ["multiscale", "lsj"]: + kwargs["_skip_resize"] = True + if "rcnn" in args.model: + if args.rpn_score_thresh is not None: + kwargs["rpn_score_thresh"] = args.rpn_score_thresh + model = torchvision.models.get_model( + args.model, weights=args.weights, weights_backbone=args.weights_backbone, num_classes=num_classes, **kwargs + ) model.to(device) + if args.distributed and args.sync_bn: + model = torch.nn.SyncBatchNorm.convert_sync_batchnorm(model) model_without_ddp = model if args.distributed: model = torch.nn.parallel.DistributedDataParallel(model, device_ids=[args.gpu]) model_without_ddp = model.module - params = [p for p in model.parameters() if p.requires_grad] - optimizer = torch.optim.SGD( - params, lr=args.lr, momentum=args.momentum, weight_decay=args.weight_decay) + if args.norm_weight_decay is None: + parameters = [p for p in model.parameters() if p.requires_grad] + else: + param_groups = torchvision.ops._utils.split_normalization_params(model) + wd_groups = [args.norm_weight_decay, args.weight_decay] + parameters = [{"params": p, "weight_decay": w} for p, w in zip(param_groups, wd_groups) if p] + + opt_name = args.opt.lower() + if opt_name.startswith("sgd"): + optimizer = torch.optim.SGD( + parameters, + lr=args.lr, + momentum=args.momentum, + weight_decay=args.weight_decay, + nesterov="nesterov" in opt_name, + ) + elif opt_name == "adamw": + optimizer = torch.optim.AdamW(parameters, lr=args.lr, weight_decay=args.weight_decay) + else: + raise RuntimeError(f"Invalid optimizer {args.opt}. Only SGD and AdamW are supported.") + + scaler = torch.cuda.amp.GradScaler() if args.amp else None - # lr_scheduler = torch.optim.lr_scheduler.StepLR(optimizer, step_size=args.lr_step_size, gamma=args.lr_gamma) - lr_scheduler = torch.optim.lr_scheduler.MultiStepLR(optimizer, milestones=args.lr_steps, gamma=args.lr_gamma) + args.lr_scheduler = args.lr_scheduler.lower() + if args.lr_scheduler == "multisteplr": + lr_scheduler = torch.optim.lr_scheduler.MultiStepLR(optimizer, milestones=args.lr_steps, gamma=args.lr_gamma) + elif args.lr_scheduler == "cosineannealinglr": + lr_scheduler = torch.optim.lr_scheduler.CosineAnnealingLR(optimizer, T_max=args.epochs) + else: + raise RuntimeError( + f"Invalid lr scheduler '{args.lr_scheduler}'. Only MultiStepLR and CosineAnnealingLR are supported." + ) if args.resume: - checkpoint = torch.load(args.resume, map_location='cpu') - model_without_ddp.load_state_dict(checkpoint['model']) - optimizer.load_state_dict(checkpoint['optimizer']) - lr_scheduler.load_state_dict(checkpoint['lr_scheduler']) + checkpoint = torch.load(args.resume, map_location="cpu", weights_only=True) + model_without_ddp.load_state_dict(checkpoint["model"]) + optimizer.load_state_dict(checkpoint["optimizer"]) + lr_scheduler.load_state_dict(checkpoint["lr_scheduler"]) + args.start_epoch = checkpoint["epoch"] + 1 + if args.amp: + scaler.load_state_dict(checkpoint["scaler"]) if args.test_only: + torch.backends.cudnn.deterministic = True evaluate(model, data_loader_test, device=device) return print("Start training") start_time = time.time() - for epoch in range(args.epochs): + for epoch in range(args.start_epoch, args.epochs): if args.distributed: train_sampler.set_epoch(epoch) - train_one_epoch(model, optimizer, data_loader, device, epoch, args.print_freq) + train_one_epoch(model, optimizer, data_loader, device, epoch, args.print_freq, scaler) lr_scheduler.step() if args.output_dir: - utils.save_on_master({ - 'model': model_without_ddp.state_dict(), - 'optimizer': optimizer.state_dict(), - 'lr_scheduler': lr_scheduler.state_dict(), - 'args': args}, - os.path.join(args.output_dir, 'model_{}.pth'.format(epoch))) + checkpoint = { + "model": model_without_ddp.state_dict(), + "optimizer": optimizer.state_dict(), + "lr_scheduler": lr_scheduler.state_dict(), + "args": args, + "epoch": epoch, + } + if args.amp: + checkpoint["scaler"] = scaler.state_dict() + utils.save_on_master(checkpoint, os.path.join(args.output_dir, f"model_{epoch}.pth")) + utils.save_on_master(checkpoint, os.path.join(args.output_dir, "checkpoint.pth")) # evaluate after every epoch evaluate(model, data_loader_test, device=device) total_time = time.time() - start_time total_time_str = str(datetime.timedelta(seconds=int(total_time))) - print('Training time {}'.format(total_time_str)) + print(f"Training time {total_time_str}") if __name__ == "__main__": - import argparse - parser = argparse.ArgumentParser( - description=__doc__) - - parser.add_argument('--data-path', default='/datasets01/COCO/022719/', help='dataset') - parser.add_argument('--dataset', default='coco', help='dataset') - parser.add_argument('--model', default='maskrcnn_resnet50_fpn', help='model') - parser.add_argument('--device', default='cuda', help='device') - parser.add_argument('-b', '--batch-size', default=2, type=int, - help='images per gpu, the total batch size is $NGPU x batch_size') - parser.add_argument('--epochs', default=13, type=int, metavar='N', - help='number of total epochs to run') - parser.add_argument('-j', '--workers', default=4, type=int, metavar='N', - help='number of data loading workers (default: 4)') - parser.add_argument('--lr', default=0.02, type=float, - help='initial learning rate, 0.02 is the default value for training ' - 'on 8 gpus and 2 images_per_gpu') - parser.add_argument('--momentum', default=0.9, type=float, metavar='M', - help='momentum') - parser.add_argument('--wd', '--weight-decay', default=1e-4, type=float, - metavar='W', help='weight decay (default: 1e-4)', - dest='weight_decay') - parser.add_argument('--lr-step-size', default=8, type=int, help='decrease lr every step-size epochs') - parser.add_argument('--lr-steps', default=[8, 11], nargs='+', type=int, help='decrease lr every step-size epochs') - parser.add_argument('--lr-gamma', default=0.1, type=float, help='decrease lr by a factor of lr-gamma') - parser.add_argument('--print-freq', default=20, type=int, help='print frequency') - parser.add_argument('--output-dir', default='.', help='path where to save') - parser.add_argument('--resume', default='', help='resume from checkpoint') - parser.add_argument('--aspect-ratio-group-factor', default=0, type=int) - parser.add_argument( - "--test-only", - dest="test_only", - help="Only test the model", - action="store_true", - ) - parser.add_argument( - "--pretrained", - dest="pretrained", - help="Use pre-trained models from the modelzoo", - action="store_true", - ) - - # distributed training parameters - parser.add_argument('--world-size', default=1, type=int, - help='number of distributed processes') - parser.add_argument('--dist-url', default='env://', help='url used to set up distributed training') - - args = parser.parse_args() - - if args.output_dir: - utils.mkdir(args.output_dir) - + args = get_args_parser().parse_args() main(args) diff --git a/references/detection/transforms.py b/references/detection/transforms.py index 73efc92bdef..e07ccfc9921 100644 --- a/references/detection/transforms.py +++ b/references/detection/transforms.py @@ -1,7 +1,10 @@ -import random -import torch +from typing import Dict, List, Optional, Tuple, Union -from torchvision.transforms import functional as F +import torch +import torchvision +from torch import nn, Tensor +from torchvision import ops +from torchvision.transforms import functional as F, InterpolationMode, transforms as T def _flip_coco_person_keypoints(kps, width): @@ -14,7 +17,7 @@ def _flip_coco_person_keypoints(kps, width): return flipped_data -class Compose(object): +class Compose: def __init__(self, transforms): self.transforms = transforms @@ -24,27 +27,575 @@ def __call__(self, image, target): return image, target -class RandomHorizontalFlip(object): - def __init__(self, prob): - self.prob = prob +class RandomHorizontalFlip(T.RandomHorizontalFlip): + def forward( + self, image: Tensor, target: Optional[Dict[str, Tensor]] = None + ) -> Tuple[Tensor, Optional[Dict[str, Tensor]]]: + if torch.rand(1) < self.p: + image = F.hflip(image) + if target is not None: + _, _, width = F.get_dimensions(image) + target["boxes"][:, [0, 2]] = width - target["boxes"][:, [2, 0]] + if "masks" in target: + target["masks"] = target["masks"].flip(-1) + if "keypoints" in target: + keypoints = target["keypoints"] + keypoints = _flip_coco_person_keypoints(keypoints, width) + target["keypoints"] = keypoints + return image, target - def __call__(self, image, target): - if random.random() < self.prob: - height, width = image.shape[-2:] - image = image.flip(-1) - bbox = target["boxes"] - bbox[:, [0, 2]] = width - bbox[:, [2, 0]] - target["boxes"] = bbox + +class PILToTensor(nn.Module): + def forward( + self, image: Tensor, target: Optional[Dict[str, Tensor]] = None + ) -> Tuple[Tensor, Optional[Dict[str, Tensor]]]: + image = F.pil_to_tensor(image) + return image, target + + +class ToDtype(nn.Module): + def __init__(self, dtype: torch.dtype, scale: bool = False) -> None: + super().__init__() + self.dtype = dtype + self.scale = scale + + def forward( + self, image: Tensor, target: Optional[Dict[str, Tensor]] = None + ) -> Tuple[Tensor, Optional[Dict[str, Tensor]]]: + if not self.scale: + return image.to(dtype=self.dtype), target + image = F.convert_image_dtype(image, self.dtype) + return image, target + + +class RandomIoUCrop(nn.Module): + def __init__( + self, + min_scale: float = 0.3, + max_scale: float = 1.0, + min_aspect_ratio: float = 0.5, + max_aspect_ratio: float = 2.0, + sampler_options: Optional[List[float]] = None, + trials: int = 40, + ): + super().__init__() + # Configuration similar to https://github.com/weiliu89/caffe/blob/ssd/examples/ssd/ssd_coco.py#L89-L174 + self.min_scale = min_scale + self.max_scale = max_scale + self.min_aspect_ratio = min_aspect_ratio + self.max_aspect_ratio = max_aspect_ratio + if sampler_options is None: + sampler_options = [0.0, 0.1, 0.3, 0.5, 0.7, 0.9, 1.0] + self.options = sampler_options + self.trials = trials + + def forward( + self, image: Tensor, target: Optional[Dict[str, Tensor]] = None + ) -> Tuple[Tensor, Optional[Dict[str, Tensor]]]: + if target is None: + raise ValueError("The targets can't be None for this transform.") + + if isinstance(image, torch.Tensor): + if image.ndimension() not in {2, 3}: + raise ValueError(f"image should be 2/3 dimensional. Got {image.ndimension()} dimensions.") + elif image.ndimension() == 2: + image = image.unsqueeze(0) + + _, orig_h, orig_w = F.get_dimensions(image) + + while True: + # sample an option + idx = int(torch.randint(low=0, high=len(self.options), size=(1,))) + min_jaccard_overlap = self.options[idx] + if min_jaccard_overlap >= 1.0: # a value larger than 1 encodes the leave as-is option + return image, target + + for _ in range(self.trials): + # check the aspect ratio limitations + r = self.min_scale + (self.max_scale - self.min_scale) * torch.rand(2) + new_w = int(orig_w * r[0]) + new_h = int(orig_h * r[1]) + aspect_ratio = new_w / new_h + if not (self.min_aspect_ratio <= aspect_ratio <= self.max_aspect_ratio): + continue + + # check for 0 area crops + r = torch.rand(2) + left = int((orig_w - new_w) * r[0]) + top = int((orig_h - new_h) * r[1]) + right = left + new_w + bottom = top + new_h + if left == right or top == bottom: + continue + + # check for any valid boxes with centers within the crop area + cx = 0.5 * (target["boxes"][:, 0] + target["boxes"][:, 2]) + cy = 0.5 * (target["boxes"][:, 1] + target["boxes"][:, 3]) + is_within_crop_area = (left < cx) & (cx < right) & (top < cy) & (cy < bottom) + if not is_within_crop_area.any(): + continue + + # check at least 1 box with jaccard limitations + boxes = target["boxes"][is_within_crop_area] + ious = torchvision.ops.boxes.box_iou( + boxes, torch.tensor([[left, top, right, bottom]], dtype=boxes.dtype, device=boxes.device) + ) + if ious.max() < min_jaccard_overlap: + continue + + # keep only valid boxes and perform cropping + target["boxes"] = boxes + target["labels"] = target["labels"][is_within_crop_area] + target["boxes"][:, 0::2] -= left + target["boxes"][:, 1::2] -= top + target["boxes"][:, 0::2].clamp_(min=0, max=new_w) + target["boxes"][:, 1::2].clamp_(min=0, max=new_h) + image = F.crop(image, top, left, new_h, new_w) + + return image, target + + +class RandomZoomOut(nn.Module): + def __init__( + self, fill: Optional[List[float]] = None, side_range: Tuple[float, float] = (1.0, 4.0), p: float = 0.5 + ): + super().__init__() + if fill is None: + fill = [0.0, 0.0, 0.0] + self.fill = fill + self.side_range = side_range + if side_range[0] < 1.0 or side_range[0] > side_range[1]: + raise ValueError(f"Invalid canvas side range provided {side_range}.") + self.p = p + + @torch.jit.unused + def _get_fill_value(self, is_pil): + # type: (bool) -> int + # We fake the type to make it work on JIT + return tuple(int(x) for x in self.fill) if is_pil else 0 + + def forward( + self, image: Tensor, target: Optional[Dict[str, Tensor]] = None + ) -> Tuple[Tensor, Optional[Dict[str, Tensor]]]: + if isinstance(image, torch.Tensor): + if image.ndimension() not in {2, 3}: + raise ValueError(f"image should be 2/3 dimensional. Got {image.ndimension()} dimensions.") + elif image.ndimension() == 2: + image = image.unsqueeze(0) + + if torch.rand(1) >= self.p: + return image, target + + _, orig_h, orig_w = F.get_dimensions(image) + + r = self.side_range[0] + torch.rand(1) * (self.side_range[1] - self.side_range[0]) + canvas_width = int(orig_w * r) + canvas_height = int(orig_h * r) + + r = torch.rand(2) + left = int((canvas_width - orig_w) * r[0]) + top = int((canvas_height - orig_h) * r[1]) + right = canvas_width - (left + orig_w) + bottom = canvas_height - (top + orig_h) + + if torch.jit.is_scripting(): + fill = 0 + else: + fill = self._get_fill_value(F._is_pil_image(image)) + + image = F.pad(image, [left, top, right, bottom], fill=fill) + if isinstance(image, torch.Tensor): + # PyTorch's pad supports only integers on fill. So we need to overwrite the colour + v = torch.tensor(self.fill, device=image.device, dtype=image.dtype).view(-1, 1, 1) + image[..., :top, :] = image[..., :, :left] = image[..., (top + orig_h) :, :] = image[ + ..., :, (left + orig_w) : + ] = v + + if target is not None: + target["boxes"][:, 0::2] += left + target["boxes"][:, 1::2] += top + + return image, target + + +class RandomPhotometricDistort(nn.Module): + def __init__( + self, + contrast: Tuple[float, float] = (0.5, 1.5), + saturation: Tuple[float, float] = (0.5, 1.5), + hue: Tuple[float, float] = (-0.05, 0.05), + brightness: Tuple[float, float] = (0.875, 1.125), + p: float = 0.5, + ): + super().__init__() + self._brightness = T.ColorJitter(brightness=brightness) + self._contrast = T.ColorJitter(contrast=contrast) + self._hue = T.ColorJitter(hue=hue) + self._saturation = T.ColorJitter(saturation=saturation) + self.p = p + + def forward( + self, image: Tensor, target: Optional[Dict[str, Tensor]] = None + ) -> Tuple[Tensor, Optional[Dict[str, Tensor]]]: + if isinstance(image, torch.Tensor): + if image.ndimension() not in {2, 3}: + raise ValueError(f"image should be 2/3 dimensional. Got {image.ndimension()} dimensions.") + elif image.ndimension() == 2: + image = image.unsqueeze(0) + + r = torch.rand(7) + + if r[0] < self.p: + image = self._brightness(image) + + contrast_before = r[1] < 0.5 + if contrast_before: + if r[2] < self.p: + image = self._contrast(image) + + if r[3] < self.p: + image = self._saturation(image) + + if r[4] < self.p: + image = self._hue(image) + + if not contrast_before: + if r[5] < self.p: + image = self._contrast(image) + + if r[6] < self.p: + channels, _, _ = F.get_dimensions(image) + permutation = torch.randperm(channels) + + is_pil = F._is_pil_image(image) + if is_pil: + image = F.pil_to_tensor(image) + image = F.convert_image_dtype(image) + image = image[..., permutation, :, :] + if is_pil: + image = F.to_pil_image(image) + + return image, target + + +class ScaleJitter(nn.Module): + """Randomly resizes the image and its bounding boxes within the specified scale range. + The class implements the Scale Jitter augmentation as described in the paper + `"Simple Copy-Paste is a Strong Data Augmentation Method for Instance Segmentation" `_. + + Args: + target_size (tuple of ints): The target size for the transform provided in (height, weight) format. + scale_range (tuple of ints): scaling factor interval, e.g (a, b), then scale is randomly sampled from the + range a <= scale <= b. + interpolation (InterpolationMode): Desired interpolation enum defined by + :class:`torchvision.transforms.InterpolationMode`. Default is ``InterpolationMode.BILINEAR``. + """ + + def __init__( + self, + target_size: Tuple[int, int], + scale_range: Tuple[float, float] = (0.1, 2.0), + interpolation: InterpolationMode = InterpolationMode.BILINEAR, + antialias=True, + ): + super().__init__() + self.target_size = target_size + self.scale_range = scale_range + self.interpolation = interpolation + self.antialias = antialias + + def forward( + self, image: Tensor, target: Optional[Dict[str, Tensor]] = None + ) -> Tuple[Tensor, Optional[Dict[str, Tensor]]]: + if isinstance(image, torch.Tensor): + if image.ndimension() not in {2, 3}: + raise ValueError(f"image should be 2/3 dimensional. Got {image.ndimension()} dimensions.") + elif image.ndimension() == 2: + image = image.unsqueeze(0) + + _, orig_height, orig_width = F.get_dimensions(image) + + scale = self.scale_range[0] + torch.rand(1) * (self.scale_range[1] - self.scale_range[0]) + r = min(self.target_size[1] / orig_height, self.target_size[0] / orig_width) * scale + new_width = int(orig_width * r) + new_height = int(orig_height * r) + + image = F.resize(image, [new_height, new_width], interpolation=self.interpolation, antialias=self.antialias) + + if target is not None: + target["boxes"][:, 0::2] *= new_width / orig_width + target["boxes"][:, 1::2] *= new_height / orig_height if "masks" in target: - target["masks"] = target["masks"].flip(-1) - if "keypoints" in target: - keypoints = target["keypoints"] - keypoints = _flip_coco_person_keypoints(keypoints, width) - target["keypoints"] = keypoints + target["masks"] = F.resize( + target["masks"], + [new_height, new_width], + interpolation=InterpolationMode.NEAREST, + antialias=self.antialias, + ) + return image, target -class ToTensor(object): - def __call__(self, image, target): - image = F.to_tensor(image) +class FixedSizeCrop(nn.Module): + def __init__(self, size, fill=0, padding_mode="constant"): + super().__init__() + size = tuple(T._setup_size(size, error_msg="Please provide only two dimensions (h, w) for size.")) + self.crop_height = size[0] + self.crop_width = size[1] + self.fill = fill # TODO: Fill is currently respected only on PIL. Apply tensor patch. + self.padding_mode = padding_mode + + def _pad(self, img, target, padding): + # Taken from the functional_tensor.py pad + if isinstance(padding, int): + pad_left = pad_right = pad_top = pad_bottom = padding + elif len(padding) == 1: + pad_left = pad_right = pad_top = pad_bottom = padding[0] + elif len(padding) == 2: + pad_left = pad_right = padding[0] + pad_top = pad_bottom = padding[1] + else: + pad_left = padding[0] + pad_top = padding[1] + pad_right = padding[2] + pad_bottom = padding[3] + + padding = [pad_left, pad_top, pad_right, pad_bottom] + img = F.pad(img, padding, self.fill, self.padding_mode) + if target is not None: + target["boxes"][:, 0::2] += pad_left + target["boxes"][:, 1::2] += pad_top + if "masks" in target: + target["masks"] = F.pad(target["masks"], padding, 0, "constant") + + return img, target + + def _crop(self, img, target, top, left, height, width): + img = F.crop(img, top, left, height, width) + if target is not None: + boxes = target["boxes"] + boxes[:, 0::2] -= left + boxes[:, 1::2] -= top + boxes[:, 0::2].clamp_(min=0, max=width) + boxes[:, 1::2].clamp_(min=0, max=height) + + is_valid = (boxes[:, 0] < boxes[:, 2]) & (boxes[:, 1] < boxes[:, 3]) + + target["boxes"] = boxes[is_valid] + target["labels"] = target["labels"][is_valid] + if "masks" in target: + target["masks"] = F.crop(target["masks"][is_valid], top, left, height, width) + + return img, target + + def forward(self, img, target=None): + _, height, width = F.get_dimensions(img) + new_height = min(height, self.crop_height) + new_width = min(width, self.crop_width) + + if new_height != height or new_width != width: + offset_height = max(height - self.crop_height, 0) + offset_width = max(width - self.crop_width, 0) + + r = torch.rand(1) + top = int(offset_height * r) + left = int(offset_width * r) + + img, target = self._crop(img, target, top, left, new_height, new_width) + + pad_bottom = max(self.crop_height - new_height, 0) + pad_right = max(self.crop_width - new_width, 0) + if pad_bottom != 0 or pad_right != 0: + img, target = self._pad(img, target, [0, 0, pad_right, pad_bottom]) + + return img, target + + +class RandomShortestSize(nn.Module): + def __init__( + self, + min_size: Union[List[int], Tuple[int], int], + max_size: int, + interpolation: InterpolationMode = InterpolationMode.BILINEAR, + ): + super().__init__() + self.min_size = [min_size] if isinstance(min_size, int) else list(min_size) + self.max_size = max_size + self.interpolation = interpolation + + def forward( + self, image: Tensor, target: Optional[Dict[str, Tensor]] = None + ) -> Tuple[Tensor, Optional[Dict[str, Tensor]]]: + _, orig_height, orig_width = F.get_dimensions(image) + + min_size = self.min_size[torch.randint(len(self.min_size), (1,)).item()] + r = min(min_size / min(orig_height, orig_width), self.max_size / max(orig_height, orig_width)) + + new_width = int(orig_width * r) + new_height = int(orig_height * r) + + image = F.resize(image, [new_height, new_width], interpolation=self.interpolation) + + if target is not None: + target["boxes"][:, 0::2] *= new_width / orig_width + target["boxes"][:, 1::2] *= new_height / orig_height + if "masks" in target: + target["masks"] = F.resize( + target["masks"], [new_height, new_width], interpolation=InterpolationMode.NEAREST + ) + return image, target + + +def _copy_paste( + image: torch.Tensor, + target: Dict[str, Tensor], + paste_image: torch.Tensor, + paste_target: Dict[str, Tensor], + blending: bool = True, + resize_interpolation: F.InterpolationMode = F.InterpolationMode.BILINEAR, +) -> Tuple[torch.Tensor, Dict[str, Tensor]]: + + # Random paste targets selection: + num_masks = len(paste_target["masks"]) + + if num_masks < 1: + # Such degerante case with num_masks=0 can happen with LSJ + # Let's just return (image, target) + return image, target + + # We have to please torch script by explicitly specifying dtype as torch.long + random_selection = torch.randint(0, num_masks, (num_masks,), device=paste_image.device) + random_selection = torch.unique(random_selection).to(torch.long) + + paste_masks = paste_target["masks"][random_selection] + paste_boxes = paste_target["boxes"][random_selection] + paste_labels = paste_target["labels"][random_selection] + + masks = target["masks"] + + # We resize source and paste data if they have different sizes + # This is something we introduced here as originally the algorithm works + # on equal-sized data (for example, coming from LSJ data augmentations) + size1 = image.shape[-2:] + size2 = paste_image.shape[-2:] + if size1 != size2: + paste_image = F.resize(paste_image, size1, interpolation=resize_interpolation) + paste_masks = F.resize(paste_masks, size1, interpolation=F.InterpolationMode.NEAREST) + # resize bboxes: + ratios = torch.tensor((size1[1] / size2[1], size1[0] / size2[0]), device=paste_boxes.device) + paste_boxes = paste_boxes.view(-1, 2, 2).mul(ratios).view(paste_boxes.shape) + + paste_alpha_mask = paste_masks.sum(dim=0) > 0 + + if blending: + paste_alpha_mask = F.gaussian_blur( + paste_alpha_mask.unsqueeze(0), + kernel_size=(5, 5), + sigma=[ + 2.0, + ], + ) + + # Copy-paste images: + image = (image * (~paste_alpha_mask)) + (paste_image * paste_alpha_mask) + + # Copy-paste masks: + masks = masks * (~paste_alpha_mask) + non_all_zero_masks = masks.sum((-1, -2)) > 0 + masks = masks[non_all_zero_masks] + + # Do a shallow copy of the target dict + out_target = {k: v for k, v in target.items()} + + out_target["masks"] = torch.cat([masks, paste_masks]) + + # Copy-paste boxes and labels + boxes = ops.masks_to_boxes(masks) + out_target["boxes"] = torch.cat([boxes, paste_boxes]) + + labels = target["labels"][non_all_zero_masks] + out_target["labels"] = torch.cat([labels, paste_labels]) + + # Update additional optional keys: area and iscrowd if exist + if "area" in target: + out_target["area"] = out_target["masks"].sum((-1, -2)).to(torch.float32) + + if "iscrowd" in target and "iscrowd" in paste_target: + # target['iscrowd'] size can be differ from mask size (non_all_zero_masks) + # For example, if previous transforms geometrically modifies masks/boxes/labels but + # does not update "iscrowd" + if len(target["iscrowd"]) == len(non_all_zero_masks): + iscrowd = target["iscrowd"][non_all_zero_masks] + paste_iscrowd = paste_target["iscrowd"][random_selection] + out_target["iscrowd"] = torch.cat([iscrowd, paste_iscrowd]) + + # Check for degenerated boxes and remove them + boxes = out_target["boxes"] + degenerate_boxes = boxes[:, 2:] <= boxes[:, :2] + if degenerate_boxes.any(): + valid_targets = ~degenerate_boxes.any(dim=1) + + out_target["boxes"] = boxes[valid_targets] + out_target["masks"] = out_target["masks"][valid_targets] + out_target["labels"] = out_target["labels"][valid_targets] + + if "area" in out_target: + out_target["area"] = out_target["area"][valid_targets] + if "iscrowd" in out_target and len(out_target["iscrowd"]) == len(valid_targets): + out_target["iscrowd"] = out_target["iscrowd"][valid_targets] + + return image, out_target + + +class SimpleCopyPaste(torch.nn.Module): + def __init__(self, blending=True, resize_interpolation=F.InterpolationMode.BILINEAR): + super().__init__() + self.resize_interpolation = resize_interpolation + self.blending = blending + + def forward( + self, images: List[torch.Tensor], targets: List[Dict[str, Tensor]] + ) -> Tuple[List[torch.Tensor], List[Dict[str, Tensor]]]: + torch._assert( + isinstance(images, (list, tuple)) and all([isinstance(v, torch.Tensor) for v in images]), + "images should be a list of tensors", + ) + torch._assert( + isinstance(targets, (list, tuple)) and len(images) == len(targets), + "targets should be a list of the same size as images", + ) + for target in targets: + # Can not check for instance type dict with inside torch.jit.script + # torch._assert(isinstance(target, dict), "targets item should be a dict") + for k in ["masks", "boxes", "labels"]: + torch._assert(k in target, f"Key {k} should be present in targets") + torch._assert(isinstance(target[k], torch.Tensor), f"Value for the key {k} should be a tensor") + + # images = [t1, t2, ..., tN] + # Let's define paste_images as shifted list of input images + # paste_images = [t2, t3, ..., tN, t1] + # FYI: in TF they mix data on the dataset level + images_rolled = images[-1:] + images[:-1] + targets_rolled = targets[-1:] + targets[:-1] + + output_images: List[torch.Tensor] = [] + output_targets: List[Dict[str, Tensor]] = [] + + for image, target, paste_image, paste_target in zip(images, targets, images_rolled, targets_rolled): + output_image, output_data = _copy_paste( + image, + target, + paste_image, + paste_target, + blending=self.blending, + resize_interpolation=self.resize_interpolation, + ) + output_images.append(output_image) + output_targets.append(output_data) + + return output_images, output_targets + + def __repr__(self) -> str: + s = f"{self.__class__.__name__}(blending={self.blending}, resize_interpolation={self.resize_interpolation})" + return s diff --git a/references/detection/utils.py b/references/detection/utils.py index 0e8e8560118..f73915580f7 100644 --- a/references/detection/utils.py +++ b/references/detection/utils.py @@ -1,18 +1,14 @@ -from __future__ import print_function - -from collections import defaultdict, deque import datetime -import pickle +import errno +import os import time +from collections import defaultdict, deque import torch import torch.distributed as dist -import errno -import os - -class SmoothedValue(object): +class SmoothedValue: """Track a series of values and provide access to smoothed values over a window or the global series average. """ @@ -36,7 +32,7 @@ def synchronize_between_processes(self): """ if not is_dist_avail_and_initialized(): return - t = torch.tensor([self.count, self.total], dtype=torch.float64, device='cuda') + t = torch.tensor([self.count, self.total], dtype=torch.float64, device="cuda") dist.barrier() dist.all_reduce(t) t = t.tolist() @@ -67,11 +63,8 @@ def value(self): def __str__(self): return self.fmt.format( - median=self.median, - avg=self.avg, - global_avg=self.global_avg, - max=self.max, - value=self.value) + median=self.median, avg=self.avg, global_avg=self.global_avg, max=self.max, value=self.value + ) def all_gather(data): @@ -85,35 +78,8 @@ def all_gather(data): world_size = get_world_size() if world_size == 1: return [data] - - # serialized to a Tensor - buffer = pickle.dumps(data) - storage = torch.ByteStorage.from_buffer(buffer) - tensor = torch.ByteTensor(storage).to("cuda") - - # obtain Tensor size of each rank - local_size = torch.tensor([tensor.numel()], device="cuda") - size_list = [torch.tensor([0], device="cuda") for _ in range(world_size)] - dist.all_gather(size_list, local_size) - size_list = [int(size.item()) for size in size_list] - max_size = max(size_list) - - # receiving Tensor from all ranks - # we pad the tensor because torch all_gather does not support - # gathering tensors of different shapes - tensor_list = [] - for _ in size_list: - tensor_list.append(torch.empty((max_size,), dtype=torch.uint8, device="cuda")) - if local_size != max_size: - padding = torch.empty(size=(max_size - local_size,), dtype=torch.uint8, device="cuda") - tensor = torch.cat((tensor, padding), dim=0) - dist.all_gather(tensor_list, tensor) - - data_list = [] - for size, tensor in zip(size_list, tensor_list): - buffer = tensor.cpu().numpy().tobytes()[:size] - data_list.append(pickle.loads(buffer)) - + data_list = [None] * world_size + dist.all_gather_object(data_list, data) return data_list @@ -129,7 +95,7 @@ def reduce_dict(input_dict, average=True): world_size = get_world_size() if world_size < 2: return input_dict - with torch.no_grad(): + with torch.inference_mode(): names = [] values = [] # sort the keys so that they are consistent across processes @@ -144,7 +110,7 @@ def reduce_dict(input_dict, average=True): return reduced_dict -class MetricLogger(object): +class MetricLogger: def __init__(self, delimiter="\t"): self.meters = defaultdict(SmoothedValue) self.delimiter = delimiter @@ -161,15 +127,12 @@ def __getattr__(self, attr): return self.meters[attr] if attr in self.__dict__: return self.__dict__[attr] - raise AttributeError("'{}' object has no attribute '{}'".format( - type(self).__name__, attr)) + raise AttributeError(f"'{type(self).__name__}' object has no attribute '{attr}'") def __str__(self): loss_str = [] for name, meter in self.meters.items(): - loss_str.append( - "{}: {}".format(name, str(meter)) - ) + loss_str.append(f"{name}: {str(meter)}") return self.delimiter.join(loss_str) def synchronize_between_processes(self): @@ -182,31 +145,28 @@ def add_meter(self, name, meter): def log_every(self, iterable, print_freq, header=None): i = 0 if not header: - header = '' + header = "" start_time = time.time() end = time.time() - iter_time = SmoothedValue(fmt='{avg:.4f}') - data_time = SmoothedValue(fmt='{avg:.4f}') - space_fmt = ':' + str(len(str(len(iterable)))) + 'd' + iter_time = SmoothedValue(fmt="{avg:.4f}") + data_time = SmoothedValue(fmt="{avg:.4f}") + space_fmt = ":" + str(len(str(len(iterable)))) + "d" if torch.cuda.is_available(): - log_msg = self.delimiter.join([ - header, - '[{0' + space_fmt + '}/{1}]', - 'eta: {eta}', - '{meters}', - 'time: {time}', - 'data: {data}', - 'max mem: {memory:.0f}' - ]) + log_msg = self.delimiter.join( + [ + header, + "[{0" + space_fmt + "}/{1}]", + "eta: {eta}", + "{meters}", + "time: {time}", + "data: {data}", + "max mem: {memory:.0f}", + ] + ) else: - log_msg = self.delimiter.join([ - header, - '[{0' + space_fmt + '}/{1}]', - 'eta: {eta}', - '{meters}', - 'time: {time}', - 'data: {data}' - ]) + log_msg = self.delimiter.join( + [header, "[{0" + space_fmt + "}/{1}]", "eta: {eta}", "{meters}", "time: {time}", "data: {data}"] + ) MB = 1024.0 * 1024.0 for obj in iterable: data_time.update(time.time() - end) @@ -216,39 +176,34 @@ def log_every(self, iterable, print_freq, header=None): eta_seconds = iter_time.global_avg * (len(iterable) - i) eta_string = str(datetime.timedelta(seconds=int(eta_seconds))) if torch.cuda.is_available(): - print(log_msg.format( - i, len(iterable), eta=eta_string, - meters=str(self), - time=str(iter_time), data=str(data_time), - memory=torch.cuda.max_memory_allocated() / MB)) + print( + log_msg.format( + i, + len(iterable), + eta=eta_string, + meters=str(self), + time=str(iter_time), + data=str(data_time), + memory=torch.cuda.max_memory_allocated() / MB, + ) + ) else: - print(log_msg.format( - i, len(iterable), eta=eta_string, - meters=str(self), - time=str(iter_time), data=str(data_time))) + print( + log_msg.format( + i, len(iterable), eta=eta_string, meters=str(self), time=str(iter_time), data=str(data_time) + ) + ) i += 1 end = time.time() total_time = time.time() - start_time total_time_str = str(datetime.timedelta(seconds=int(total_time))) - print('{} Total time: {} ({:.4f} s / it)'.format( - header, total_time_str, total_time / len(iterable))) + print(f"{header} Total time: {total_time_str} ({total_time / len(iterable):.4f} s / it)") def collate_fn(batch): return tuple(zip(*batch)) -def warmup_lr_scheduler(optimizer, warmup_iters, warmup_factor): - - def f(x): - if x >= warmup_iters: - return 1 - alpha = float(x) / warmup_iters - return warmup_factor * (1 - alpha) + alpha - - return torch.optim.lr_scheduler.LambdaLR(optimizer, f) - - def mkdir(path): try: os.makedirs(path) @@ -262,10 +217,11 @@ def setup_for_distributed(is_master): This function disables printing when not in master process """ import builtins as __builtin__ + builtin_print = __builtin__.print def print(*args, **kwargs): - force = kwargs.pop('force', False) + force = kwargs.pop("force", False) if is_master or force: builtin_print(*args, **kwargs) @@ -302,25 +258,25 @@ def save_on_master(*args, **kwargs): def init_distributed_mode(args): - if 'RANK' in os.environ and 'WORLD_SIZE' in os.environ: + if "RANK" in os.environ and "WORLD_SIZE" in os.environ: args.rank = int(os.environ["RANK"]) - args.world_size = int(os.environ['WORLD_SIZE']) - args.gpu = int(os.environ['LOCAL_RANK']) - elif 'SLURM_PROCID' in os.environ: - args.rank = int(os.environ['SLURM_PROCID']) + args.world_size = int(os.environ["WORLD_SIZE"]) + args.gpu = int(os.environ["LOCAL_RANK"]) + elif "SLURM_PROCID" in os.environ: + args.rank = int(os.environ["SLURM_PROCID"]) args.gpu = args.rank % torch.cuda.device_count() else: - print('Not using distributed mode') + print("Not using distributed mode") args.distributed = False return args.distributed = True torch.cuda.set_device(args.gpu) - args.dist_backend = 'nccl' - print('| distributed init (rank {}): {}'.format( - args.rank, args.dist_url), flush=True) - torch.distributed.init_process_group(backend=args.dist_backend, init_method=args.dist_url, - world_size=args.world_size, rank=args.rank) + args.dist_backend = "nccl" + print(f"| distributed init (rank {args.rank}): {args.dist_url}", flush=True) + torch.distributed.init_process_group( + backend=args.dist_backend, init_method=args.dist_url, world_size=args.world_size, rank=args.rank + ) torch.distributed.barrier() setup_for_distributed(args.rank == 0) diff --git a/references/optical_flow/README.md b/references/optical_flow/README.md new file mode 100644 index 00000000000..6ad1d4079f7 --- /dev/null +++ b/references/optical_flow/README.md @@ -0,0 +1,72 @@ +# Optical flow reference training scripts + +This folder contains reference training scripts for optical flow. +They serve as a log of how to train specific models, so as to provide baseline +training and evaluation scripts to quickly bootstrap research. + + +### RAFT Large + +The RAFT large model was trained on Flying Chairs and then on Flying Things. +Both used 8 A100 GPUs and a batch size of 2 (so effective batch size is 16). The +rest of the hyper-parameters are exactly the same as the original RAFT training +recipe from https://github.com/princeton-vl/RAFT. The original recipe trains for +100000 updates (or steps) on each dataset - this corresponds to about 72 and 20 +epochs on Chairs and Things respectively: + +``` +num_epochs = ceil(num_steps / number_of_steps_per_epoch) + = ceil(num_steps / (num_samples / effective_batch_size)) +``` + +``` +torchrun --nproc_per_node 8 --nnodes 1 train.py \ + --dataset-root $dataset_root \ + --name $name_chairs \ + --model raft_large \ + --train-dataset chairs \ + --batch-size 2 \ + --lr 0.0004 \ + --weight-decay 0.0001 \ + --epochs 72 \ + --output-dir $chairs_dir +``` + +``` +torchrun --nproc_per_node 8 --nnodes 1 train.py \ + --dataset-root $dataset_root \ + --name $name_things \ + --model raft_large \ + --train-dataset things \ + --batch-size 2 \ + --lr 0.000125 \ + --weight-decay 0.0001 \ + --epochs 20 \ + --freeze-batch-norm \ + --output-dir $things_dir\ + --resume $chairs_dir/$name_chairs.pth +``` + + +### Evaluation + +``` +torchrun --nproc_per_node 1 --nnodes 1 train.py --val-dataset sintel --batch-size 1 --dataset-root $dataset_root --model raft_large --weights Raft_Large_Weights.C_T_SKHT_V2 +``` + +This should give an epe of about 1.3822 on the clean pass and 2.7161 on the +final pass of Sintel-train. Results may vary slightly depending on the batch +size and the number of GPUs. For the most accurate results use 1 GPU and +`--batch-size 1`: + +``` +Sintel val clean epe: 1.3822 1px: 0.9028 3px: 0.9573 5px: 0.9697 per_image_epe: 1.3822 f1: 4.0248 +Sintel val final epe: 2.7161 1px: 0.8528 3px: 0.9204 5px: 0.9392 per_image_epe: 2.7161 f1: 7.5964 +``` + +You can also evaluate on Kitti train: + +``` +torchrun --nproc_per_node 1 --nnodes 1 train.py --val-dataset kitti --batch-size 1 --dataset-root $dataset_root --model raft_large --weights Raft_Large_Weights.C_T_SKHT_V2 +Kitti val epe: 4.7968 1px: 0.6388 3px: 0.8197 5px: 0.8661 per_image_epe: 4.5118 f1: 16.0679 +``` diff --git a/references/optical_flow/presets.py b/references/optical_flow/presets.py new file mode 100644 index 00000000000..32d9542e692 --- /dev/null +++ b/references/optical_flow/presets.py @@ -0,0 +1,65 @@ +import torch +import transforms as T + + +class OpticalFlowPresetEval(torch.nn.Module): + def __init__(self): + super().__init__() + + self.transforms = T.Compose( + [ + T.PILToTensor(), + T.ConvertImageDtype(torch.float32), + T.Normalize(mean=0.5, std=0.5), # map [0, 1] into [-1, 1] + T.ValidateModelInput(), + ] + ) + + def forward(self, img1, img2, flow, valid): + return self.transforms(img1, img2, flow, valid) + + +class OpticalFlowPresetTrain(torch.nn.Module): + def __init__( + self, + *, + # RandomResizeAndCrop params + crop_size, + min_scale=-0.2, + max_scale=0.5, + stretch_prob=0.8, + # AsymmetricColorJitter params + brightness=0.4, + contrast=0.4, + saturation=0.4, + hue=0.5 / 3.14, + # Random[H,V]Flip params + asymmetric_jitter_prob=0.2, + do_flip=True, + ): + super().__init__() + + transforms = [ + T.PILToTensor(), + T.AsymmetricColorJitter( + brightness=brightness, contrast=contrast, saturation=saturation, hue=hue, p=asymmetric_jitter_prob + ), + T.RandomResizeAndCrop( + crop_size=crop_size, min_scale=min_scale, max_scale=max_scale, stretch_prob=stretch_prob + ), + ] + + if do_flip: + transforms += [T.RandomHorizontalFlip(p=0.5), T.RandomVerticalFlip(p=0.1)] + + transforms += [ + T.ConvertImageDtype(torch.float32), + T.Normalize(mean=0.5, std=0.5), # map [0, 1] into [-1, 1] + T.RandomErasing(max_erase=2), + T.MakeValidFlowMask(), + T.ValidateModelInput(), + ] + self.transforms = T.Compose(transforms) + + def forward(self, img1, img2, flow, valid): + return self.transforms(img1, img2, flow, valid) diff --git a/references/optical_flow/train.py b/references/optical_flow/train.py new file mode 100644 index 00000000000..7012ea6f810 --- /dev/null +++ b/references/optical_flow/train.py @@ -0,0 +1,389 @@ +import argparse +import warnings +from math import ceil +from pathlib import Path + +import torch +import torchvision.models.optical_flow +import utils +from presets import OpticalFlowPresetEval, OpticalFlowPresetTrain +from torchvision.datasets import FlyingChairs, FlyingThings3D, HD1K, KittiFlow, Sintel + + +def get_train_dataset(stage, dataset_root): + if stage == "chairs": + transforms = OpticalFlowPresetTrain(crop_size=(368, 496), min_scale=0.1, max_scale=1.0, do_flip=True) + return FlyingChairs(root=dataset_root, split="train", transforms=transforms) + elif stage == "things": + transforms = OpticalFlowPresetTrain(crop_size=(400, 720), min_scale=-0.4, max_scale=0.8, do_flip=True) + return FlyingThings3D(root=dataset_root, split="train", pass_name="both", transforms=transforms) + elif stage == "sintel_SKH": # S + K + H as from paper + crop_size = (368, 768) + transforms = OpticalFlowPresetTrain(crop_size=crop_size, min_scale=-0.2, max_scale=0.6, do_flip=True) + + things_clean = FlyingThings3D(root=dataset_root, split="train", pass_name="clean", transforms=transforms) + sintel = Sintel(root=dataset_root, split="train", pass_name="both", transforms=transforms) + + kitti_transforms = OpticalFlowPresetTrain(crop_size=crop_size, min_scale=-0.3, max_scale=0.5, do_flip=True) + kitti = KittiFlow(root=dataset_root, split="train", transforms=kitti_transforms) + + hd1k_transforms = OpticalFlowPresetTrain(crop_size=crop_size, min_scale=-0.5, max_scale=0.2, do_flip=True) + hd1k = HD1K(root=dataset_root, split="train", transforms=hd1k_transforms) + + # As future improvement, we could probably be using a distributed sampler here + # The distribution is S(.71), T(.135), K(.135), H(.02) + return 100 * sintel + 200 * kitti + 5 * hd1k + things_clean + elif stage == "kitti": + transforms = OpticalFlowPresetTrain( + # resize and crop params + crop_size=(288, 960), + min_scale=-0.2, + max_scale=0.4, + stretch_prob=0, + # flip params + do_flip=False, + # jitter params + brightness=0.3, + contrast=0.3, + saturation=0.3, + hue=0.3 / 3.14, + asymmetric_jitter_prob=0, + ) + return KittiFlow(root=dataset_root, split="train", transforms=transforms) + else: + raise ValueError(f"Unknown stage {stage}") + + +@torch.no_grad() +def _evaluate(model, args, val_dataset, *, padder_mode, num_flow_updates=None, batch_size=None, header=None): + """Helper function to compute various metrics (epe, etc.) for a model on a given dataset. + + We process as many samples as possible with ddp, and process the rest on a single worker. + """ + batch_size = batch_size or args.batch_size + device = torch.device(args.device) + + model.eval() + + if args.distributed: + sampler = torch.utils.data.distributed.DistributedSampler(val_dataset, shuffle=False, drop_last=True) + else: + sampler = torch.utils.data.SequentialSampler(val_dataset) + + val_loader = torch.utils.data.DataLoader( + val_dataset, + sampler=sampler, + batch_size=batch_size, + pin_memory=True, + num_workers=args.workers, + ) + + num_flow_updates = num_flow_updates or args.num_flow_updates + + def inner_loop(blob): + if blob[0].dim() == 3: + # input is not batched, so we add an extra dim for consistency + blob = [x[None, :, :, :] if x is not None else None for x in blob] + + image1, image2, flow_gt = blob[:3] + valid_flow_mask = None if len(blob) == 3 else blob[-1] + + image1, image2 = image1.to(device), image2.to(device) + + padder = utils.InputPadder(image1.shape, mode=padder_mode) + image1, image2 = padder.pad(image1, image2) + + flow_predictions = model(image1, image2, num_flow_updates=num_flow_updates) + flow_pred = flow_predictions[-1] + flow_pred = padder.unpad(flow_pred).cpu() + + metrics, num_pixels_tot = utils.compute_metrics(flow_pred, flow_gt, valid_flow_mask) + + # We compute per-pixel epe (epe) and per-image epe (called f1-epe in RAFT paper). + # per-pixel epe: average epe of all pixels of all images + # per-image epe: average epe on each image independently, then average over images + for name in ("epe", "1px", "3px", "5px", "f1"): # f1 is called f1-all in paper + logger.meters[name].update(metrics[name], n=num_pixels_tot) + logger.meters["per_image_epe"].update(metrics["epe"], n=batch_size) + + logger = utils.MetricLogger() + for meter_name in ("epe", "1px", "3px", "5px", "per_image_epe", "f1"): + logger.add_meter(meter_name, fmt="{global_avg:.4f}") + + num_processed_samples = 0 + for blob in logger.log_every(val_loader, header=header, print_freq=None): + inner_loop(blob) + num_processed_samples += blob[0].shape[0] # batch size + + if args.distributed: + num_processed_samples = utils.reduce_across_processes(num_processed_samples) + print( + f"Batch-processed {num_processed_samples} / {len(val_dataset)} samples. " + "Going to process the remaining samples individually, if any." + ) + if args.rank == 0: # we only need to process the rest on a single worker + for i in range(num_processed_samples, len(val_dataset)): + inner_loop(val_dataset[i]) + + logger.synchronize_between_processes() + + print(header, logger) + + +def evaluate(model, args): + val_datasets = args.val_dataset or [] + + if args.weights and args.test_only: + weights = torchvision.models.get_weight(args.weights) + trans = weights.transforms() + + def preprocessing(img1, img2, flow, valid_flow_mask): + img1, img2 = trans(img1, img2) + if flow is not None and not isinstance(flow, torch.Tensor): + flow = torch.from_numpy(flow) + if valid_flow_mask is not None and not isinstance(valid_flow_mask, torch.Tensor): + valid_flow_mask = torch.from_numpy(valid_flow_mask) + return img1, img2, flow, valid_flow_mask + + else: + preprocessing = OpticalFlowPresetEval() + + for name in val_datasets: + if name == "kitti": + # Kitti has different image sizes, so we need to individually pad them, we can't batch. + # see comment in InputPadder + if args.batch_size != 1 and (not args.distributed or args.rank == 0): + warnings.warn( + f"Batch-size={args.batch_size} was passed. For technical reasons, evaluating on Kitti can only be done with a batch-size of 1." + ) + + val_dataset = KittiFlow(root=args.dataset_root, split="train", transforms=preprocessing) + _evaluate( + model, args, val_dataset, num_flow_updates=24, padder_mode="kitti", header="Kitti val", batch_size=1 + ) + elif name == "sintel": + for pass_name in ("clean", "final"): + val_dataset = Sintel( + root=args.dataset_root, split="train", pass_name=pass_name, transforms=preprocessing + ) + _evaluate( + model, + args, + val_dataset, + num_flow_updates=32, + padder_mode="sintel", + header=f"Sintel val {pass_name}", + ) + else: + warnings.warn(f"Can't validate on {val_dataset}, skipping.") + + +def train_one_epoch(model, optimizer, scheduler, train_loader, logger, args): + device = torch.device(args.device) + for data_blob in logger.log_every(train_loader): + + optimizer.zero_grad() + + image1, image2, flow_gt, valid_flow_mask = (x.to(device) for x in data_blob) + flow_predictions = model(image1, image2, num_flow_updates=args.num_flow_updates) + + loss = utils.sequence_loss(flow_predictions, flow_gt, valid_flow_mask, args.gamma) + metrics, _ = utils.compute_metrics(flow_predictions[-1], flow_gt, valid_flow_mask) + + metrics.pop("f1") + logger.update(loss=loss, **metrics) + + loss.backward() + + torch.nn.utils.clip_grad_norm_(model.parameters(), max_norm=1) + + optimizer.step() + scheduler.step() + + +def main(args): + utils.setup_ddp(args) + args.test_only = args.train_dataset is None + + if args.distributed and args.device == "cpu": + raise ValueError("The device must be cuda if we want to run in distributed mode using torchrun") + device = torch.device(args.device) + + if args.use_deterministic_algorithms: + torch.backends.cudnn.benchmark = False + torch.use_deterministic_algorithms(True) + else: + torch.backends.cudnn.benchmark = True + + model = torchvision.models.get_model(args.model, weights=args.weights) + + if args.distributed: + model = model.to(args.local_rank) + model = torch.nn.parallel.DistributedDataParallel(model, device_ids=[args.local_rank]) + model_without_ddp = model.module + else: + model.to(device) + model_without_ddp = model + + if args.resume is not None: + checkpoint = torch.load(args.resume, map_location="cpu", weights_only=True) + model_without_ddp.load_state_dict(checkpoint["model"]) + + if args.test_only: + # Set deterministic CUDNN algorithms, since they can affect epe a fair bit. + torch.backends.cudnn.benchmark = False + torch.backends.cudnn.deterministic = True + evaluate(model, args) + return + + print(f"Parameter Count: {sum(p.numel() for p in model.parameters() if p.requires_grad)}") + + train_dataset = get_train_dataset(args.train_dataset, args.dataset_root) + + optimizer = torch.optim.AdamW(model.parameters(), lr=args.lr, weight_decay=args.weight_decay, eps=args.adamw_eps) + + scheduler = torch.optim.lr_scheduler.OneCycleLR( + optimizer=optimizer, + max_lr=args.lr, + epochs=args.epochs, + steps_per_epoch=ceil(len(train_dataset) / (args.world_size * args.batch_size)), + pct_start=0.05, + cycle_momentum=False, + anneal_strategy="linear", + ) + + if args.resume is not None: + optimizer.load_state_dict(checkpoint["optimizer"]) + scheduler.load_state_dict(checkpoint["scheduler"]) + args.start_epoch = checkpoint["epoch"] + 1 + else: + args.start_epoch = 0 + + torch.backends.cudnn.benchmark = True + + model.train() + if args.freeze_batch_norm: + utils.freeze_batch_norm(model.module) + + if args.distributed: + sampler = torch.utils.data.distributed.DistributedSampler(train_dataset, shuffle=True, drop_last=True) + else: + sampler = torch.utils.data.RandomSampler(train_dataset) + + train_loader = torch.utils.data.DataLoader( + train_dataset, + sampler=sampler, + batch_size=args.batch_size, + pin_memory=True, + num_workers=args.workers, + ) + + logger = utils.MetricLogger() + + done = False + for epoch in range(args.start_epoch, args.epochs): + print(f"EPOCH {epoch}") + if args.distributed: + # needed on distributed mode, otherwise the data loading order would be the same for all epochs + sampler.set_epoch(epoch) + + train_one_epoch( + model=model, + optimizer=optimizer, + scheduler=scheduler, + train_loader=train_loader, + logger=logger, + args=args, + ) + + # Note: we don't sync the SmoothedValues across processes, so the printed metrics are just those of rank 0 + print(f"Epoch {epoch} done. ", logger) + + if not args.distributed or args.rank == 0: + checkpoint = { + "model": model_without_ddp.state_dict(), + "optimizer": optimizer.state_dict(), + "scheduler": scheduler.state_dict(), + "epoch": epoch, + "args": args, + } + torch.save(checkpoint, Path(args.output_dir) / f"{args.name}_{epoch}.pth") + torch.save(checkpoint, Path(args.output_dir) / f"{args.name}.pth") + + if epoch % args.val_freq == 0 or done: + evaluate(model, args) + model.train() + if args.freeze_batch_norm: + utils.freeze_batch_norm(model.module) + + +def get_args_parser(add_help=True): + parser = argparse.ArgumentParser(add_help=add_help, description="Train or evaluate an optical-flow model.") + parser.add_argument( + "--name", + default="raft", + type=str, + help="The name of the experiment - determines the name of the files where weights are saved.", + ) + parser.add_argument("--output-dir", default=".", type=str, help="Output dir where checkpoints will be stored.") + parser.add_argument( + "--resume", + type=str, + help="A path to previously saved weights. Used to re-start training from, or evaluate a pre-saved model.", + ) + + parser.add_argument("--workers", type=int, default=12, help="Number of workers for the data loading part.") + + parser.add_argument( + "--train-dataset", + type=str, + help="The dataset to use for training. If not passed, only validation is performed (and you probably want to pass --resume).", + ) + parser.add_argument("--val-dataset", type=str, nargs="+", help="The dataset(s) to use for validation.") + parser.add_argument("--val-freq", type=int, default=2, help="Validate every X epochs") + parser.add_argument("--epochs", type=int, default=20, help="The total number of epochs to train.") + parser.add_argument("--batch-size", type=int, default=2) + + parser.add_argument("--lr", type=float, default=0.00002, help="Learning rate for AdamW optimizer") + parser.add_argument("--weight-decay", type=float, default=0.00005, help="Weight decay for AdamW optimizer") + parser.add_argument("--adamw-eps", type=float, default=1e-8, help="eps value for AdamW optimizer") + + parser.add_argument( + "--freeze-batch-norm", action="store_true", help="Set BatchNorm modules of the model in eval mode." + ) + + parser.add_argument( + "--model", type=str, default="raft_large", help="The name of the model to use - either raft_large or raft_small" + ) + # TODO: resume and weights should be in an exclusive arg group + + parser.add_argument( + "--num_flow_updates", + type=int, + default=12, + help="number of updates (or 'iters') in the update operator of the model.", + ) + + parser.add_argument("--gamma", type=float, default=0.8, help="exponential weighting for loss. Must be < 1.") + + parser.add_argument("--dist-url", default="env://", help="URL used to set up distributed training") + + parser.add_argument( + "--dataset-root", + help="Root folder where the datasets are stored. Will be passed as the 'root' parameter of the datasets.", + required=True, + ) + + parser.add_argument("--weights", default=None, type=str, help="the weights enum name to load.") + parser.add_argument("--device", default="cuda", type=str, help="device (Use cuda or cpu, Default: cuda)") + parser.add_argument( + "--use-deterministic-algorithms", action="store_true", help="Forces the use of deterministic algorithms only." + ) + + return parser + + +if __name__ == "__main__": + args = get_args_parser().parse_args() + Path(args.output_dir).mkdir(exist_ok=True) + main(args) diff --git a/references/optical_flow/transforms.py b/references/optical_flow/transforms.py new file mode 100644 index 00000000000..bc831a2ee52 --- /dev/null +++ b/references/optical_flow/transforms.py @@ -0,0 +1,271 @@ +import torch +import torchvision.transforms as T +import torchvision.transforms.functional as F + + +class ValidateModelInput(torch.nn.Module): + # Pass-through transform that checks the shape and dtypes to make sure the model gets what it expects + def forward(self, img1, img2, flow, valid_flow_mask): + + if not all(isinstance(arg, torch.Tensor) for arg in (img1, img2, flow, valid_flow_mask) if arg is not None): + raise TypeError("This method expects all input arguments to be of type torch.Tensor.") + if not all(arg.dtype == torch.float32 for arg in (img1, img2, flow) if arg is not None): + raise TypeError("This method expects the tensors img1, img2 and flow of be of dtype torch.float32.") + + if img1.shape != img2.shape: + raise ValueError("img1 and img2 should have the same shape.") + h, w = img1.shape[-2:] + if flow is not None and flow.shape != (2, h, w): + raise ValueError(f"flow.shape should be (2, {h}, {w}) instead of {flow.shape}") + if valid_flow_mask is not None: + if valid_flow_mask.shape != (h, w): + raise ValueError(f"valid_flow_mask.shape should be ({h}, {w}) instead of {valid_flow_mask.shape}") + if valid_flow_mask.dtype != torch.bool: + raise TypeError("valid_flow_mask should be of dtype torch.bool instead of {valid_flow_mask.dtype}") + + return img1, img2, flow, valid_flow_mask + + +class MakeValidFlowMask(torch.nn.Module): + # This transform generates a valid_flow_mask if it doesn't exist. + # The flow is considered valid if ||flow||_inf < threshold + # This is a noop for Kitti and HD1K which already come with a built-in flow mask. + def __init__(self, threshold=1000): + super().__init__() + self.threshold = threshold + + def forward(self, img1, img2, flow, valid_flow_mask): + if flow is not None and valid_flow_mask is None: + valid_flow_mask = (flow.abs() < self.threshold).all(axis=0) + return img1, img2, flow, valid_flow_mask + + +class ConvertImageDtype(torch.nn.Module): + def __init__(self, dtype): + super().__init__() + self.dtype = dtype + + def forward(self, img1, img2, flow, valid_flow_mask): + img1 = F.convert_image_dtype(img1, dtype=self.dtype) + img2 = F.convert_image_dtype(img2, dtype=self.dtype) + + img1 = img1.contiguous() + img2 = img2.contiguous() + + return img1, img2, flow, valid_flow_mask + + +class Normalize(torch.nn.Module): + def __init__(self, mean, std): + super().__init__() + self.mean = mean + self.std = std + + def forward(self, img1, img2, flow, valid_flow_mask): + img1 = F.normalize(img1, mean=self.mean, std=self.std) + img2 = F.normalize(img2, mean=self.mean, std=self.std) + + return img1, img2, flow, valid_flow_mask + + +class PILToTensor(torch.nn.Module): + # Converts all inputs to tensors + # Technically the flow and the valid mask are numpy arrays, not PIL images, but we keep that naming + # for consistency with the rest, e.g. the segmentation reference. + def forward(self, img1, img2, flow, valid_flow_mask): + img1 = F.pil_to_tensor(img1) + img2 = F.pil_to_tensor(img2) + if flow is not None: + flow = torch.from_numpy(flow) + if valid_flow_mask is not None: + valid_flow_mask = torch.from_numpy(valid_flow_mask) + + return img1, img2, flow, valid_flow_mask + + +class AsymmetricColorJitter(T.ColorJitter): + # p determines the proba of doing asymmertric vs symmetric color jittering + def __init__(self, brightness=0, contrast=0, saturation=0, hue=0, p=0.2): + super().__init__(brightness=brightness, contrast=contrast, saturation=saturation, hue=hue) + self.p = p + + def forward(self, img1, img2, flow, valid_flow_mask): + + if torch.rand(1) < self.p: + # asymmetric: different transform for img1 and img2 + img1 = super().forward(img1) + img2 = super().forward(img2) + else: + # symmetric: same transform for img1 and img2 + batch = torch.stack([img1, img2]) + batch = super().forward(batch) + img1, img2 = batch[0], batch[1] + + return img1, img2, flow, valid_flow_mask + + +class RandomErasing(T.RandomErasing): + # This only erases img2, and with an extra max_erase param + # This max_erase is needed because in the RAFT training ref does: + # 0 erasing with .5 proba + # 1 erase with .25 proba + # 2 erase with .25 proba + # and there's no accurate way to achieve this otherwise. + def __init__(self, p=0.5, scale=(0.02, 0.33), ratio=(0.3, 3.3), value=0, inplace=False, max_erase=1): + super().__init__(p=p, scale=scale, ratio=ratio, value=value, inplace=inplace) + self.max_erase = max_erase + if self.max_erase <= 0: + raise ValueError("max_raise should be greater than 0") + + def forward(self, img1, img2, flow, valid_flow_mask): + if torch.rand(1) > self.p: + return img1, img2, flow, valid_flow_mask + + for _ in range(torch.randint(self.max_erase, size=(1,)).item()): + x, y, h, w, v = self.get_params(img2, scale=self.scale, ratio=self.ratio, value=[self.value]) + img2 = F.erase(img2, x, y, h, w, v, self.inplace) + + return img1, img2, flow, valid_flow_mask + + +class RandomHorizontalFlip(T.RandomHorizontalFlip): + def forward(self, img1, img2, flow, valid_flow_mask): + if torch.rand(1) > self.p: + return img1, img2, flow, valid_flow_mask + + img1 = F.hflip(img1) + img2 = F.hflip(img2) + flow = F.hflip(flow) * torch.tensor([-1, 1])[:, None, None] + if valid_flow_mask is not None: + valid_flow_mask = F.hflip(valid_flow_mask) + return img1, img2, flow, valid_flow_mask + + +class RandomVerticalFlip(T.RandomVerticalFlip): + def forward(self, img1, img2, flow, valid_flow_mask): + if torch.rand(1) > self.p: + return img1, img2, flow, valid_flow_mask + + img1 = F.vflip(img1) + img2 = F.vflip(img2) + flow = F.vflip(flow) * torch.tensor([1, -1])[:, None, None] + if valid_flow_mask is not None: + valid_flow_mask = F.vflip(valid_flow_mask) + return img1, img2, flow, valid_flow_mask + + +class RandomResizeAndCrop(torch.nn.Module): + # This transform will resize the input with a given proba, and then crop it. + # These are the reversed operations of the built-in RandomResizedCrop, + # although the order of the operations doesn't matter too much: resizing a + # crop would give the same result as cropping a resized image, up to + # interpolation artifact at the borders of the output. + # + # The reason we don't rely on RandomResizedCrop is because of a significant + # difference in the parametrization of both transforms, in particular, + # because of the way the random parameters are sampled in both transforms, + # which leads to fairly different results (and different epe). For more details see + # https://github.com/pytorch/vision/pull/5026/files#r762932579 + def __init__(self, crop_size, min_scale=-0.2, max_scale=0.5, stretch_prob=0.8): + super().__init__() + self.crop_size = crop_size + self.min_scale = min_scale + self.max_scale = max_scale + self.stretch_prob = stretch_prob + self.resize_prob = 0.8 + self.max_stretch = 0.2 + + def forward(self, img1, img2, flow, valid_flow_mask): + # randomly sample scale + h, w = img1.shape[-2:] + # Note: in original code, they use + 1 instead of + 8 for sparse datasets (e.g. Kitti) + # It shouldn't matter much + min_scale = max((self.crop_size[0] + 8) / h, (self.crop_size[1] + 8) / w) + + scale = 2 ** torch.empty(1, dtype=torch.float32).uniform_(self.min_scale, self.max_scale).item() + scale_x = scale + scale_y = scale + if torch.rand(1) < self.stretch_prob: + scale_x *= 2 ** torch.empty(1, dtype=torch.float32).uniform_(-self.max_stretch, self.max_stretch).item() + scale_y *= 2 ** torch.empty(1, dtype=torch.float32).uniform_(-self.max_stretch, self.max_stretch).item() + + scale_x = max(scale_x, min_scale) + scale_y = max(scale_y, min_scale) + + new_h, new_w = round(h * scale_y), round(w * scale_x) + + if torch.rand(1).item() < self.resize_prob: + # rescale the images + # We hard-code antialias=False to preserve results after we changed + # its default from None to True (see + # https://github.com/pytorch/vision/pull/7160) + # TODO: we could re-train the OF models with antialias=True? + img1 = F.resize(img1, size=(new_h, new_w), antialias=False) + img2 = F.resize(img2, size=(new_h, new_w), antialias=False) + if valid_flow_mask is None: + flow = F.resize(flow, size=(new_h, new_w)) + flow = flow * torch.tensor([scale_x, scale_y])[:, None, None] + else: + flow, valid_flow_mask = self._resize_sparse_flow( + flow, valid_flow_mask, scale_x=scale_x, scale_y=scale_y + ) + + # Note: For sparse datasets (Kitti), the original code uses a "margin" + # See e.g. https://github.com/princeton-vl/RAFT/blob/master/core/utils/augmentor.py#L220:L220 + # We don't, not sure if it matters much + y0 = torch.randint(0, img1.shape[1] - self.crop_size[0], size=(1,)).item() + x0 = torch.randint(0, img1.shape[2] - self.crop_size[1], size=(1,)).item() + + img1 = F.crop(img1, y0, x0, self.crop_size[0], self.crop_size[1]) + img2 = F.crop(img2, y0, x0, self.crop_size[0], self.crop_size[1]) + flow = F.crop(flow, y0, x0, self.crop_size[0], self.crop_size[1]) + if valid_flow_mask is not None: + valid_flow_mask = F.crop(valid_flow_mask, y0, x0, self.crop_size[0], self.crop_size[1]) + + return img1, img2, flow, valid_flow_mask + + def _resize_sparse_flow(self, flow, valid_flow_mask, scale_x=1.0, scale_y=1.0): + # This resizes both the flow and the valid_flow_mask mask (which is assumed to be reasonably sparse) + # There are as-many non-zero values in the original flow as in the resized flow (up to OOB) + # So for example if scale_x = scale_y = 2, the sparsity of the output flow is multiplied by 4 + + h, w = flow.shape[-2:] + + h_new = int(round(h * scale_y)) + w_new = int(round(w * scale_x)) + flow_new = torch.zeros(size=[2, h_new, w_new], dtype=flow.dtype) + valid_new = torch.zeros(size=[h_new, w_new], dtype=valid_flow_mask.dtype) + + jj, ii = torch.meshgrid(torch.arange(w), torch.arange(h), indexing="xy") + + ii_valid, jj_valid = ii[valid_flow_mask], jj[valid_flow_mask] + + ii_valid_new = torch.round(ii_valid.to(float) * scale_y).to(torch.long) + jj_valid_new = torch.round(jj_valid.to(float) * scale_x).to(torch.long) + + within_bounds_mask = (0 <= ii_valid_new) & (ii_valid_new < h_new) & (0 <= jj_valid_new) & (jj_valid_new < w_new) + + ii_valid = ii_valid[within_bounds_mask] + jj_valid = jj_valid[within_bounds_mask] + ii_valid_new = ii_valid_new[within_bounds_mask] + jj_valid_new = jj_valid_new[within_bounds_mask] + + valid_flow_new = flow[:, ii_valid, jj_valid] + valid_flow_new[0] *= scale_x + valid_flow_new[1] *= scale_y + + flow_new[:, ii_valid_new, jj_valid_new] = valid_flow_new + valid_new[ii_valid_new, jj_valid_new] = 1 + + return flow_new, valid_new + + +class Compose(torch.nn.Module): + def __init__(self, transforms): + super().__init__() + self.transforms = transforms + + def forward(self, img1, img2, flow, valid_flow_mask): + for t in self.transforms: + img1, img2, flow, valid_flow_mask = t(img1, img2, flow, valid_flow_mask) + return img1, img2, flow, valid_flow_mask diff --git a/references/optical_flow/utils.py b/references/optical_flow/utils.py new file mode 100644 index 00000000000..cd4b16eb0d8 --- /dev/null +++ b/references/optical_flow/utils.py @@ -0,0 +1,290 @@ +import datetime +import os +import time +from collections import defaultdict, deque + +import torch +import torch.distributed as dist +import torch.nn.functional as F + + +class SmoothedValue: + """Track a series of values and provide access to smoothed values over a + window or the global series average. + """ + + def __init__(self, window_size=20, fmt="{median:.4f} ({global_avg:.4f})"): + self.deque = deque(maxlen=window_size) + self.total = 0.0 + self.count = 0 + self.fmt = fmt + + def update(self, value, n=1): + self.deque.append(value) + self.count += n + self.total += value * n + + def synchronize_between_processes(self): + """ + Warning: does not synchronize the deque! + """ + t = reduce_across_processes([self.count, self.total]) + t = t.tolist() + self.count = int(t[0]) + self.total = t[1] + + @property + def median(self): + d = torch.tensor(list(self.deque)) + return d.median().item() + + @property + def avg(self): + d = torch.tensor(list(self.deque), dtype=torch.float32) + return d.mean().item() + + @property + def global_avg(self): + return self.total / self.count + + @property + def max(self): + return max(self.deque) + + @property + def value(self): + return self.deque[-1] + + def __str__(self): + return self.fmt.format( + median=self.median, avg=self.avg, global_avg=self.global_avg, max=self.max, value=self.value + ) + + +class MetricLogger: + def __init__(self, delimiter="\t"): + self.meters = defaultdict(SmoothedValue) + self.delimiter = delimiter + + def update(self, **kwargs): + for k, v in kwargs.items(): + if isinstance(v, torch.Tensor): + v = v.item() + if not isinstance(v, (float, int)): + raise TypeError( + f"This method expects the value of the input arguments to be of type float or int, instead got {type(v)}" + ) + self.meters[k].update(v) + + def __getattr__(self, attr): + if attr in self.meters: + return self.meters[attr] + if attr in self.__dict__: + return self.__dict__[attr] + raise AttributeError(f"'{type(self).__name__}' object has no attribute '{attr}'") + + def __str__(self): + loss_str = [] + for name, meter in self.meters.items(): + loss_str.append(f"{name}: {str(meter)}") + return self.delimiter.join(loss_str) + + def synchronize_between_processes(self): + for meter in self.meters.values(): + meter.synchronize_between_processes() + + def add_meter(self, name, **kwargs): + self.meters[name] = SmoothedValue(**kwargs) + + def log_every(self, iterable, print_freq=5, header=None): + i = 0 + if not header: + header = "" + start_time = time.time() + end = time.time() + iter_time = SmoothedValue(fmt="{avg:.4f}") + data_time = SmoothedValue(fmt="{avg:.4f}") + space_fmt = ":" + str(len(str(len(iterable)))) + "d" + if torch.cuda.is_available(): + log_msg = self.delimiter.join( + [ + header, + "[{0" + space_fmt + "}/{1}]", + "eta: {eta}", + "{meters}", + "time: {time}", + "data: {data}", + "max mem: {memory:.0f}", + ] + ) + else: + log_msg = self.delimiter.join( + [header, "[{0" + space_fmt + "}/{1}]", "eta: {eta}", "{meters}", "time: {time}", "data: {data}"] + ) + MB = 1024.0 * 1024.0 + for obj in iterable: + data_time.update(time.time() - end) + yield obj + iter_time.update(time.time() - end) + if print_freq is not None and i % print_freq == 0: + eta_seconds = iter_time.global_avg * (len(iterable) - i) + eta_string = str(datetime.timedelta(seconds=int(eta_seconds))) + if torch.cuda.is_available(): + print( + log_msg.format( + i, + len(iterable), + eta=eta_string, + meters=str(self), + time=str(iter_time), + data=str(data_time), + memory=torch.cuda.max_memory_allocated() / MB, + ) + ) + else: + print( + log_msg.format( + i, len(iterable), eta=eta_string, meters=str(self), time=str(iter_time), data=str(data_time) + ) + ) + i += 1 + end = time.time() + total_time = time.time() - start_time + total_time_str = str(datetime.timedelta(seconds=int(total_time))) + print(f"{header} Total time: {total_time_str}") + + +def compute_metrics(flow_pred, flow_gt, valid_flow_mask=None): + + epe = ((flow_pred - flow_gt) ** 2).sum(dim=1).sqrt() + flow_norm = (flow_gt**2).sum(dim=1).sqrt() + + if valid_flow_mask is not None: + epe = epe[valid_flow_mask] + flow_norm = flow_norm[valid_flow_mask] + + relative_epe = epe / flow_norm + + metrics = { + "epe": epe.mean().item(), + "1px": (epe < 1).float().mean().item(), + "3px": (epe < 3).float().mean().item(), + "5px": (epe < 5).float().mean().item(), + "f1": ((epe > 3) & (relative_epe > 0.05)).float().mean().item() * 100, + } + return metrics, epe.numel() + + +def sequence_loss(flow_preds, flow_gt, valid_flow_mask, gamma=0.8, max_flow=400): + """Loss function defined over sequence of flow predictions""" + + if gamma > 1: + raise ValueError(f"Gamma should be < 1, got {gamma}.") + + # exclude invalid pixels and extremely large diplacements + flow_norm = torch.sum(flow_gt**2, dim=1).sqrt() + valid_flow_mask = valid_flow_mask & (flow_norm < max_flow) + + valid_flow_mask = valid_flow_mask[:, None, :, :] + + flow_preds = torch.stack(flow_preds) # shape = (num_flow_updates, batch_size, 2, H, W) + + abs_diff = (flow_preds - flow_gt).abs() + abs_diff = (abs_diff * valid_flow_mask).mean(axis=(1, 2, 3, 4)) + + num_predictions = flow_preds.shape[0] + weights = gamma ** torch.arange(num_predictions - 1, -1, -1).to(flow_gt.device) + flow_loss = (abs_diff * weights).sum() + + return flow_loss + + +class InputPadder: + """Pads images such that dimensions are divisible by 8""" + + # TODO: Ideally, this should be part of the eval transforms preset, instead + # of being part of the validation code. It's not obvious what a good + # solution would be, because we need to unpad the predicted flows according + # to the input images' size, and in some datasets (Kitti) images can have + # variable sizes. + + def __init__(self, dims, mode="sintel"): + self.ht, self.wd = dims[-2:] + pad_ht = (((self.ht // 8) + 1) * 8 - self.ht) % 8 + pad_wd = (((self.wd // 8) + 1) * 8 - self.wd) % 8 + if mode == "sintel": + self._pad = [pad_wd // 2, pad_wd - pad_wd // 2, pad_ht // 2, pad_ht - pad_ht // 2] + else: + self._pad = [pad_wd // 2, pad_wd - pad_wd // 2, 0, pad_ht] + + def pad(self, *inputs): + return [F.pad(x, self._pad, mode="replicate") for x in inputs] + + def unpad(self, x): + ht, wd = x.shape[-2:] + c = [self._pad[2], ht - self._pad[3], self._pad[0], wd - self._pad[1]] + return x[..., c[0] : c[1], c[2] : c[3]] + + +def _redefine_print(is_main): + """disables printing when not in main process""" + import builtins as __builtin__ + + builtin_print = __builtin__.print + + def print(*args, **kwargs): + force = kwargs.pop("force", False) + if is_main or force: + builtin_print(*args, **kwargs) + + __builtin__.print = print + + +def setup_ddp(args): + # Set the local_rank, rank, and world_size values as args fields + # This is done differently depending on how we're running the script. We + # currently support either torchrun or the custom run_with_submitit.py + # If you're confused (like I was), this might help a bit + # https://discuss.pytorch.org/t/what-is-the-difference-between-rank-and-local-rank/61940/2 + + if all(key in os.environ for key in ("LOCAL_RANK", "RANK", "WORLD_SIZE")): + # if we're here, the script was called with torchrun. Otherwise, + # these args will be set already by the run_with_submitit script + args.local_rank = int(os.environ["LOCAL_RANK"]) + args.rank = int(os.environ["RANK"]) + args.world_size = int(os.environ["WORLD_SIZE"]) + + elif "gpu" in args: + # if we're here, the script was called by run_with_submitit.py + args.local_rank = args.gpu + else: + print("Not using distributed mode!") + args.distributed = False + args.world_size = 1 + return + + args.distributed = True + + _redefine_print(is_main=(args.rank == 0)) + + torch.cuda.set_device(args.local_rank) + dist.init_process_group( + backend="nccl", + rank=args.rank, + world_size=args.world_size, + init_method=args.dist_url, + ) + torch.distributed.barrier() + + +def reduce_across_processes(val): + t = torch.tensor(val, device="cuda") + dist.barrier() + dist.all_reduce(t) + return t + + +def freeze_batch_norm(model): + for m in model.modules(): + if isinstance(m, torch.nn.BatchNorm2d): + m.eval() diff --git a/references/segmentation/README.md b/references/segmentation/README.md new file mode 100644 index 00000000000..2c8e581dac1 --- /dev/null +++ b/references/segmentation/README.md @@ -0,0 +1,43 @@ +# Semantic segmentation reference training scripts + +This folder contains reference training scripts for semantic segmentation. +They serve as a log of how to train specific models and provide baseline +training and evaluation scripts to quickly bootstrap research. + +All models have been trained on 8x V100 GPUs. + +You must modify the following flags: + +`--data-path=/path/to/dataset` + +`--nproc_per_node=` + +## fcn_resnet50 +``` +torchrun --nproc_per_node=8 train.py --lr 0.02 --dataset coco -b 4 --model fcn_resnet50 --aux-loss --weights-backbone ResNet50_Weights.IMAGENET1K_V1 +``` + +## fcn_resnet101 +``` +torchrun --nproc_per_node=8 train.py --lr 0.02 --dataset coco -b 4 --model fcn_resnet101 --aux-loss --weights-backbone ResNet101_Weights.IMAGENET1K_V1 +``` + +## deeplabv3_resnet50 +``` +torchrun --nproc_per_node=8 train.py --lr 0.02 --dataset coco -b 4 --model deeplabv3_resnet50 --aux-loss --weights-backbone ResNet50_Weights.IMAGENET1K_V1 +``` + +## deeplabv3_resnet101 +``` +torchrun --nproc_per_node=8 train.py --lr 0.02 --dataset coco -b 4 --model deeplabv3_resnet101 --aux-loss --weights-backbone ResNet101_Weights.IMAGENET1K_V1 +``` + +## deeplabv3_mobilenet_v3_large +``` +torchrun --nproc_per_node=8 train.py --dataset coco -b 4 --model deeplabv3_mobilenet_v3_large --aux-loss --wd 0.000001 --weights-backbone MobileNet_V3_Large_Weights.IMAGENET1K_V1 +``` + +## lraspp_mobilenet_v3_large +``` +torchrun --nproc_per_node=8 train.py --dataset coco -b 4 --model lraspp_mobilenet_v3_large --wd 0.000001 --weights-backbone MobileNet_V3_Large_Weights.IMAGENET1K_V1 +``` diff --git a/references/segmentation/coco_utils.py b/references/segmentation/coco_utils.py index c86d5495247..6a15dbefb52 100644 --- a/references/segmentation/coco_utils.py +++ b/references/segmentation/coco_utils.py @@ -1,17 +1,15 @@ import copy +import os + import torch import torch.utils.data import torchvision from PIL import Image - -import os - from pycocotools import mask as coco_mask - from transforms import Compose -class FilterAndRemapCocoCategories(object): +class FilterAndRemapCocoCategories: def __init__(self, categories, remap=True): self.categories = categories self.remap = remap @@ -43,7 +41,7 @@ def convert_coco_poly_to_mask(segmentations, height, width): return masks -class ConvertCocoPolysToMask(object): +class ConvertCocoPolysToMask: def __call__(self, image, anno): w, h = image.size segmentations = [obj["segmentation"] for obj in anno] @@ -70,7 +68,6 @@ def _has_valid_annotation(anno): # if more than 1k pixels occupied in the image return sum(obj["area"] for obj in anno) > 1000 - assert isinstance(dataset, torchvision.datasets.CocoDetection) ids = [] for ds_idx, img_id in enumerate(dataset.ids): ann_ids = dataset.coco.getAnnIds(imgIds=img_id, iscrowd=None) @@ -84,26 +81,32 @@ def _has_valid_annotation(anno): return dataset -def get_coco(root, image_set, transforms): +def get_coco(root, image_set, transforms, use_v2=False): PATHS = { "train": ("train2017", os.path.join("annotations", "instances_train2017.json")), "val": ("val2017", os.path.join("annotations", "instances_val2017.json")), # "train": ("val2017", os.path.join("annotations", "instances_val2017.json")) } - CAT_LIST = [0, 5, 2, 16, 9, 44, 6, 3, 17, 62, 21, 67, 18, 19, 4, - 1, 64, 20, 63, 7, 72] - - transforms = Compose([ - FilterAndRemapCocoCategories(CAT_LIST, remap=True), - ConvertCocoPolysToMask(), - transforms - ]) + CAT_LIST = [0, 5, 2, 16, 9, 44, 6, 3, 17, 62, 21, 67, 18, 19, 4, 1, 64, 20, 63, 7, 72] img_folder, ann_file = PATHS[image_set] img_folder = os.path.join(root, img_folder) ann_file = os.path.join(root, ann_file) - dataset = torchvision.datasets.CocoDetection(img_folder, ann_file, transforms=transforms) + # The 2 "Compose" below achieve the same thing: converting coco detection + # samples into segmentation-compatible samples. They just do it with + # slightly different implementations. We could refactor and unify, but + # keeping them separate helps keeping the v2 version clean + if use_v2: + import v2_extras + from torchvision.datasets import wrap_dataset_for_transforms_v2 + + transforms = Compose([v2_extras.CocoDetectionToVOCSegmentation(), transforms]) + dataset = torchvision.datasets.CocoDetection(img_folder, ann_file, transforms=transforms) + dataset = wrap_dataset_for_transforms_v2(dataset, target_keys={"masks", "labels"}) + else: + transforms = Compose([FilterAndRemapCocoCategories(CAT_LIST, remap=True), ConvertCocoPolysToMask(), transforms]) + dataset = torchvision.datasets.CocoDetection(img_folder, ann_file, transforms=transforms) if image_set == "train": dataset = _coco_remove_images_without_annotations(dataset, CAT_LIST) diff --git a/references/segmentation/presets.py b/references/segmentation/presets.py new file mode 100644 index 00000000000..803769fcafc --- /dev/null +++ b/references/segmentation/presets.py @@ -0,0 +1,109 @@ +import torch + + +def get_modules(use_v2): + # We need a protected import to avoid the V2 warning in case just V1 is used + if use_v2: + import torchvision.transforms.v2 + import torchvision.tv_tensors + import v2_extras + + return torchvision.transforms.v2, torchvision.tv_tensors, v2_extras + else: + import transforms + + return transforms, None, None + + +class SegmentationPresetTrain: + def __init__( + self, + *, + base_size, + crop_size, + hflip_prob=0.5, + mean=(0.485, 0.456, 0.406), + std=(0.229, 0.224, 0.225), + backend="pil", + use_v2=False, + ): + T, tv_tensors, v2_extras = get_modules(use_v2) + + transforms = [] + backend = backend.lower() + if backend == "tv_tensor": + transforms.append(T.ToImage()) + elif backend == "tensor": + transforms.append(T.PILToTensor()) + elif backend != "pil": + raise ValueError(f"backend can be 'tv_tensor', 'tensor' or 'pil', but got {backend}") + + transforms += [T.RandomResize(min_size=int(0.5 * base_size), max_size=int(2.0 * base_size))] + + if hflip_prob > 0: + transforms += [T.RandomHorizontalFlip(hflip_prob)] + + if use_v2: + # We need a custom pad transform here, since the padding we want to perform here is fundamentally + # different from the padding in `RandomCrop` if `pad_if_needed=True`. + transforms += [v2_extras.PadIfSmaller(crop_size, fill={tv_tensors.Mask: 255, "others": 0})] + + transforms += [T.RandomCrop(crop_size)] + + if backend == "pil": + transforms += [T.PILToTensor()] + + if use_v2: + img_type = tv_tensors.Image if backend == "tv_tensor" else torch.Tensor + transforms += [ + T.ToDtype(dtype={img_type: torch.float32, tv_tensors.Mask: torch.int64, "others": None}, scale=True) + ] + else: + # No need to explicitly convert masks as they're magically int64 already + transforms += [T.ToDtype(torch.float, scale=True)] + + transforms += [T.Normalize(mean=mean, std=std)] + if use_v2: + transforms += [T.ToPureTensor()] + + self.transforms = T.Compose(transforms) + + def __call__(self, img, target): + return self.transforms(img, target) + + +class SegmentationPresetEval: + def __init__( + self, *, base_size, mean=(0.485, 0.456, 0.406), std=(0.229, 0.224, 0.225), backend="pil", use_v2=False + ): + T, _, _ = get_modules(use_v2) + + transforms = [] + backend = backend.lower() + if backend == "tensor": + transforms += [T.PILToTensor()] + elif backend == "tv_tensor": + transforms += [T.ToImage()] + elif backend != "pil": + raise ValueError(f"backend can be 'tv_tensor', 'tensor' or 'pil', but got {backend}") + + if use_v2: + transforms += [T.Resize(size=(base_size, base_size))] + else: + transforms += [T.RandomResize(min_size=base_size, max_size=base_size)] + + if backend == "pil": + # Note: we could just convert to pure tensors even in v2? + transforms += [T.ToImage() if use_v2 else T.PILToTensor()] + + transforms += [ + T.ToDtype(torch.float, scale=True), + T.Normalize(mean=mean, std=std), + ] + if use_v2: + transforms += [T.ToPureTensor()] + + self.transforms = T.Compose(transforms) + + def __call__(self, img, target): + return self.transforms(img, target) diff --git a/references/segmentation/train.py b/references/segmentation/train.py index b1173d5323a..abdc3c6aacb 100644 --- a/references/segmentation/train.py +++ b/references/segmentation/train.py @@ -1,47 +1,56 @@ import datetime import os import time +import warnings +import presets import torch import torch.utils.data -from torch import nn import torchvision - -from coco_utils import get_coco -import transforms as T import utils +from coco_utils import get_coco +from torch import nn +from torch.optim.lr_scheduler import PolynomialLR +from torchvision.transforms import functional as F, InterpolationMode -def get_dataset(name, image_set, transform): +def get_dataset(args, is_train): def sbd(*args, **kwargs): - return torchvision.datasets.SBDataset(*args, mode='segmentation', **kwargs) + kwargs.pop("use_v2") + return torchvision.datasets.SBDataset(*args, mode="segmentation", **kwargs) + + def voc(*args, **kwargs): + kwargs.pop("use_v2") + return torchvision.datasets.VOCSegmentation(*args, **kwargs) + paths = { - "voc": ('/datasets01/VOC/060817/', torchvision.datasets.VOCSegmentation, 21), - "voc_aug": ('/datasets01/SBDD/072318/', sbd, 21), - "coco": ('/datasets01/COCO/022719/', get_coco, 21) + "voc": (args.data_path, voc, 21), + "voc_aug": (args.data_path, sbd, 21), + "coco": (args.data_path, get_coco, 21), } - p, ds_fn, num_classes = paths[name] + p, ds_fn, num_classes = paths[args.dataset] - ds = ds_fn(p, image_set=image_set, transforms=transform) + image_set = "train" if is_train else "val" + ds = ds_fn(p, image_set=image_set, transforms=get_transform(is_train, args), use_v2=args.use_v2) return ds, num_classes -def get_transform(train): - base_size = 520 - crop_size = 480 +def get_transform(is_train, args): + if is_train: + return presets.SegmentationPresetTrain(base_size=520, crop_size=480, backend=args.backend, use_v2=args.use_v2) + elif args.weights and args.test_only: + weights = torchvision.models.get_weight(args.weights) + trans = weights.transforms() - min_size = int((0.5 if train else 1.0) * base_size) - max_size = int((2.0 if train else 1.0) * base_size) - transforms = [] - transforms.append(T.RandomResize(min_size, max_size)) - if train: - transforms.append(T.RandomHorizontalFlip(0.5)) - transforms.append(T.RandomCrop(crop_size)) - transforms.append(T.ToTensor()) - transforms.append(T.Normalize(mean=[0.485, 0.456, 0.406], - std=[0.229, 0.224, 0.225])) + def preprocessing(img, target): + img = trans(img) + size = F.get_dimensions(img)[1:] + target = F.resize(target, size, interpolation=InterpolationMode.NEAREST) + return img, F.pil_to_tensor(target) - return T.Compose(transforms) + return preprocessing + else: + return presets.SegmentationPresetEval(base_size=520, backend=args.backend, use_v2=args.use_v2) def criterion(inputs, target): @@ -50,42 +59,66 @@ def criterion(inputs, target): losses[name] = nn.functional.cross_entropy(x, target, ignore_index=255) if len(losses) == 1: - return losses['out'] + return losses["out"] - return losses['out'] + 0.5 * losses['aux'] + return losses["out"] + 0.5 * losses["aux"] def evaluate(model, data_loader, device, num_classes): model.eval() confmat = utils.ConfusionMatrix(num_classes) metric_logger = utils.MetricLogger(delimiter=" ") - header = 'Test:' - with torch.no_grad(): + header = "Test:" + num_processed_samples = 0 + with torch.inference_mode(): for image, target in metric_logger.log_every(data_loader, 100, header): image, target = image.to(device), target.to(device) output = model(image) - output = output['out'] + output = output["out"] confmat.update(target.flatten(), output.argmax(1).flatten()) + # FIXME need to take into account that the datasets + # could have been padded in distributed setup + num_processed_samples += image.shape[0] confmat.reduce_from_all_processes() + num_processed_samples = utils.reduce_across_processes(num_processed_samples) + if ( + hasattr(data_loader.dataset, "__len__") + and len(data_loader.dataset) != num_processed_samples + and torch.distributed.get_rank() == 0 + ): + # See FIXME above + warnings.warn( + f"It looks like the dataset has {len(data_loader.dataset)} samples, but {num_processed_samples} " + "samples were used for the validation, which might bias the results. " + "Try adjusting the batch size and / or the world size. " + "Setting the world size to 1 is always a safe bet." + ) + return confmat -def train_one_epoch(model, criterion, optimizer, data_loader, lr_scheduler, device, epoch, print_freq): +def train_one_epoch(model, criterion, optimizer, data_loader, lr_scheduler, device, epoch, print_freq, scaler=None): model.train() metric_logger = utils.MetricLogger(delimiter=" ") - metric_logger.add_meter('lr', utils.SmoothedValue(window_size=1, fmt='{value}')) - header = 'Epoch: [{}]'.format(epoch) + metric_logger.add_meter("lr", utils.SmoothedValue(window_size=1, fmt="{value}")) + header = f"Epoch: [{epoch}]" for image, target in metric_logger.log_every(data_loader, print_freq, header): image, target = image.to(device), target.to(device) - output = model(image) - loss = criterion(output, target) + with torch.cuda.amp.autocast(enabled=scaler is not None): + output = model(image) + loss = criterion(output, target) optimizer.zero_grad() - loss.backward() - optimizer.step() + if scaler is not None: + scaler.scale(loss).backward() + scaler.step(optimizer) + scaler.update() + else: + loss.backward() + optimizer.step() lr_scheduler.step() @@ -93,6 +126,12 @@ def train_one_epoch(model, criterion, optimizer, data_loader, lr_scheduler, devi def main(args): + if args.backend.lower() != "pil" and not args.use_v2: + # TODO: Support tensor backend in V1? + raise ValueError("Use --use-v2 if you want to use the tv_tensor or tensor backend.") + if args.use_v2 and args.dataset != "coco": + raise ValueError("v2 is only support supported for coco dataset for now.") + if args.output_dir: utils.mkdir(args.output_dir) @@ -101,47 +140,51 @@ def main(args): device = torch.device(args.device) - dataset, num_classes = get_dataset(args.dataset, "train", get_transform(train=True)) - dataset_test, _ = get_dataset(args.dataset, "val", get_transform(train=False)) + if args.use_deterministic_algorithms: + torch.backends.cudnn.benchmark = False + torch.use_deterministic_algorithms(True) + else: + torch.backends.cudnn.benchmark = True + + dataset, num_classes = get_dataset(args, is_train=True) + dataset_test, _ = get_dataset(args, is_train=False) if args.distributed: train_sampler = torch.utils.data.distributed.DistributedSampler(dataset) - test_sampler = torch.utils.data.distributed.DistributedSampler(dataset_test) + test_sampler = torch.utils.data.distributed.DistributedSampler(dataset_test, shuffle=False) else: train_sampler = torch.utils.data.RandomSampler(dataset) test_sampler = torch.utils.data.SequentialSampler(dataset_test) data_loader = torch.utils.data.DataLoader( - dataset, batch_size=args.batch_size, - sampler=train_sampler, num_workers=args.workers, - collate_fn=utils.collate_fn, drop_last=True) + dataset, + batch_size=args.batch_size, + sampler=train_sampler, + num_workers=args.workers, + collate_fn=utils.collate_fn, + drop_last=True, + ) data_loader_test = torch.utils.data.DataLoader( - dataset_test, batch_size=1, - sampler=test_sampler, num_workers=args.workers, - collate_fn=utils.collate_fn) + dataset_test, batch_size=1, sampler=test_sampler, num_workers=args.workers, collate_fn=utils.collate_fn + ) - model = torchvision.models.segmentation.__dict__[args.model](num_classes=num_classes, - aux_loss=args.aux_loss, - pretrained=args.pretrained) + model = torchvision.models.get_model( + args.model, + weights=args.weights, + weights_backbone=args.weights_backbone, + num_classes=num_classes, + aux_loss=args.aux_loss, + ) model.to(device) if args.distributed: model = torch.nn.SyncBatchNorm.convert_sync_batchnorm(model) - if args.resume: - checkpoint = torch.load(args.resume, map_location='cpu') - model.load_state_dict(checkpoint['model']) - model_without_ddp = model if args.distributed: model = torch.nn.parallel.DistributedDataParallel(model, device_ids=[args.gpu]) model_without_ddp = model.module - if args.test_only: - confmat = evaluate(model, data_loader_test, device=device, num_classes=num_classes) - print(confmat) - return - params_to_optimize = [ {"params": [p for p in model_without_ddp.backbone.parameters() if p.requires_grad]}, {"params": [p for p in model_without_ddp.classifier.parameters() if p.requires_grad]}, @@ -149,58 +192,114 @@ def main(args): if args.aux_loss: params = [p for p in model_without_ddp.aux_classifier.parameters() if p.requires_grad] params_to_optimize.append({"params": params, "lr": args.lr * 10}) - optimizer = torch.optim.SGD( - params_to_optimize, - lr=args.lr, momentum=args.momentum, weight_decay=args.weight_decay) + optimizer = torch.optim.SGD(params_to_optimize, lr=args.lr, momentum=args.momentum, weight_decay=args.weight_decay) - lr_scheduler = torch.optim.lr_scheduler.LambdaLR( - optimizer, - lambda x: (1 - x / (len(data_loader) * args.epochs)) ** 0.9) + scaler = torch.cuda.amp.GradScaler() if args.amp else None + + iters_per_epoch = len(data_loader) + main_lr_scheduler = PolynomialLR( + optimizer, total_iters=iters_per_epoch * (args.epochs - args.lr_warmup_epochs), power=0.9 + ) + + if args.lr_warmup_epochs > 0: + warmup_iters = iters_per_epoch * args.lr_warmup_epochs + args.lr_warmup_method = args.lr_warmup_method.lower() + if args.lr_warmup_method == "linear": + warmup_lr_scheduler = torch.optim.lr_scheduler.LinearLR( + optimizer, start_factor=args.lr_warmup_decay, total_iters=warmup_iters + ) + elif args.lr_warmup_method == "constant": + warmup_lr_scheduler = torch.optim.lr_scheduler.ConstantLR( + optimizer, factor=args.lr_warmup_decay, total_iters=warmup_iters + ) + else: + raise RuntimeError( + f"Invalid warmup lr method '{args.lr_warmup_method}'. Only linear and constant are supported." + ) + lr_scheduler = torch.optim.lr_scheduler.SequentialLR( + optimizer, schedulers=[warmup_lr_scheduler, main_lr_scheduler], milestones=[warmup_iters] + ) + else: + lr_scheduler = main_lr_scheduler + + if args.resume: + checkpoint = torch.load(args.resume, map_location="cpu", weights_only=True) + model_without_ddp.load_state_dict(checkpoint["model"], strict=not args.test_only) + if not args.test_only: + optimizer.load_state_dict(checkpoint["optimizer"]) + lr_scheduler.load_state_dict(checkpoint["lr_scheduler"]) + args.start_epoch = checkpoint["epoch"] + 1 + if args.amp: + scaler.load_state_dict(checkpoint["scaler"]) + + if args.test_only: + # We disable the cudnn benchmarking because it can noticeably affect the accuracy + torch.backends.cudnn.benchmark = False + torch.backends.cudnn.deterministic = True + confmat = evaluate(model, data_loader_test, device=device, num_classes=num_classes) + print(confmat) + return start_time = time.time() - for epoch in range(args.epochs): + for epoch in range(args.start_epoch, args.epochs): if args.distributed: train_sampler.set_epoch(epoch) - train_one_epoch(model, criterion, optimizer, data_loader, lr_scheduler, device, epoch, args.print_freq) + train_one_epoch(model, criterion, optimizer, data_loader, lr_scheduler, device, epoch, args.print_freq, scaler) confmat = evaluate(model, data_loader_test, device=device, num_classes=num_classes) print(confmat) - utils.save_on_master( - { - 'model': model_without_ddp.state_dict(), - 'optimizer': optimizer.state_dict(), - 'epoch': epoch, - 'args': args - }, - os.path.join(args.output_dir, 'model_{}.pth'.format(epoch))) + checkpoint = { + "model": model_without_ddp.state_dict(), + "optimizer": optimizer.state_dict(), + "lr_scheduler": lr_scheduler.state_dict(), + "epoch": epoch, + "args": args, + } + if args.amp: + checkpoint["scaler"] = scaler.state_dict() + utils.save_on_master(checkpoint, os.path.join(args.output_dir, f"model_{epoch}.pth")) + utils.save_on_master(checkpoint, os.path.join(args.output_dir, "checkpoint.pth")) total_time = time.time() - start_time total_time_str = str(datetime.timedelta(seconds=int(total_time))) - print('Training time {}'.format(total_time_str)) + print(f"Training time {total_time_str}") -def parse_args(): +def get_args_parser(add_help=True): import argparse - parser = argparse.ArgumentParser(description='PyTorch Segmentation Training') - - parser.add_argument('--dataset', default='voc', help='dataset') - parser.add_argument('--model', default='fcn_resnet101', help='model') - parser.add_argument('--aux-loss', action='store_true', help='auxiliar loss') - parser.add_argument('--device', default='cuda', help='device') - parser.add_argument('-b', '--batch-size', default=8, type=int) - parser.add_argument('--epochs', default=30, type=int, metavar='N', - help='number of total epochs to run') - - parser.add_argument('-j', '--workers', default=16, type=int, metavar='N', - help='number of data loading workers (default: 16)') - parser.add_argument('--lr', default=0.01, type=float, help='initial learning rate') - parser.add_argument('--momentum', default=0.9, type=float, metavar='M', - help='momentum') - parser.add_argument('--wd', '--weight-decay', default=1e-4, type=float, - metavar='W', help='weight decay (default: 1e-4)', - dest='weight_decay') - parser.add_argument('--print-freq', default=10, type=int, help='print frequency') - parser.add_argument('--output-dir', default='.', help='path where to save') - parser.add_argument('--resume', default='', help='resume from checkpoint') + + parser = argparse.ArgumentParser(description="PyTorch Segmentation Training", add_help=add_help) + + parser.add_argument("--data-path", default="/datasets01/COCO/022719/", type=str, help="dataset path") + parser.add_argument("--dataset", default="coco", type=str, help="dataset name") + parser.add_argument("--model", default="fcn_resnet101", type=str, help="model name") + parser.add_argument("--aux-loss", action="store_true", help="auxiliary loss") + parser.add_argument("--device", default="cuda", type=str, help="device (Use cuda or cpu Default: cuda)") + parser.add_argument( + "-b", "--batch-size", default=8, type=int, help="images per gpu, the total batch size is $NGPU x batch_size" + ) + parser.add_argument("--epochs", default=30, type=int, metavar="N", help="number of total epochs to run") + + parser.add_argument( + "-j", "--workers", default=16, type=int, metavar="N", help="number of data loading workers (default: 16)" + ) + parser.add_argument("--lr", default=0.01, type=float, help="initial learning rate") + parser.add_argument("--momentum", default=0.9, type=float, metavar="M", help="momentum") + parser.add_argument( + "--wd", + "--weight-decay", + default=1e-4, + type=float, + metavar="W", + help="weight decay (default: 1e-4)", + dest="weight_decay", + ) + parser.add_argument("--lr-warmup-epochs", default=0, type=int, help="the number of epochs to warmup (default: 0)") + parser.add_argument("--lr-warmup-method", default="linear", type=str, help="the warmup method (default: linear)") + parser.add_argument("--lr-warmup-decay", default=0.01, type=float, help="the decay for lr") + parser.add_argument("--print-freq", default=10, type=int, help="print frequency") + parser.add_argument("--output-dir", default=".", type=str, help="path to save outputs") + parser.add_argument("--resume", default="", type=str, help="path of checkpoint") + parser.add_argument("--start-epoch", default=0, type=int, metavar="N", help="start epoch") parser.add_argument( "--test-only", dest="test_only", @@ -208,20 +307,23 @@ def parse_args(): action="store_true", ) parser.add_argument( - "--pretrained", - dest="pretrained", - help="Use pre-trained models from the modelzoo", - action="store_true", + "--use-deterministic-algorithms", action="store_true", help="Forces the use of deterministic algorithms only." ) # distributed training parameters - parser.add_argument('--world-size', default=1, type=int, - help='number of distributed processes') - parser.add_argument('--dist-url', default='env://', help='url used to set up distributed training') + parser.add_argument("--world-size", default=1, type=int, help="number of distributed processes") + parser.add_argument("--dist-url", default="env://", type=str, help="url used to set up distributed training") + + parser.add_argument("--weights", default=None, type=str, help="the weights enum name to load") + parser.add_argument("--weights-backbone", default=None, type=str, help="the backbone weights enum name to load") + + # Mixed precision training parameters + parser.add_argument("--amp", action="store_true", help="Use torch.cuda.amp for mixed precision training") - args = parser.parse_args() - return args + parser.add_argument("--backend", default="PIL", type=str.lower, help="PIL or tensor - case insensitive") + parser.add_argument("--use-v2", action="store_true", help="Use V2 transforms") + return parser if __name__ == "__main__": - args = parse_args() + args = get_args_parser().parse_args() main(args) diff --git a/references/segmentation/transforms.py b/references/segmentation/transforms.py index bce4bfbe639..6934b9f862e 100644 --- a/references/segmentation/transforms.py +++ b/references/segmentation/transforms.py @@ -1,7 +1,6 @@ -import numpy as np -from PIL import Image import random +import numpy as np import torch from torchvision import transforms as T from torchvision.transforms import functional as F @@ -17,7 +16,7 @@ def pad_if_smaller(img, size, fill=0): return img -class Compose(object): +class Compose: def __init__(self, transforms): self.transforms = transforms @@ -27,7 +26,7 @@ def __call__(self, image, target): return image, target -class RandomResize(object): +class RandomResize: def __init__(self, min_size, max_size=None): self.min_size = min_size if max_size is None: @@ -36,12 +35,12 @@ def __init__(self, min_size, max_size=None): def __call__(self, image, target): size = random.randint(self.min_size, self.max_size) - image = F.resize(image, size) - target = F.resize(target, size, interpolation=Image.NEAREST) + image = F.resize(image, size, antialias=True) + target = F.resize(target, size, interpolation=T.InterpolationMode.NEAREST) return image, target -class RandomHorizontalFlip(object): +class RandomHorizontalFlip: def __init__(self, flip_prob): self.flip_prob = flip_prob @@ -52,7 +51,7 @@ def __call__(self, image, target): return image, target -class RandomCrop(object): +class RandomCrop: def __init__(self, size): self.size = size @@ -65,7 +64,7 @@ def __call__(self, image, target): return image, target -class CenterCrop(object): +class CenterCrop: def __init__(self, size): self.size = size @@ -75,14 +74,26 @@ def __call__(self, image, target): return image, target -class ToTensor(object): +class PILToTensor: + def __call__(self, image, target): + image = F.pil_to_tensor(image) + target = torch.as_tensor(np.array(target), dtype=torch.int64) + return image, target + + +class ToDtype: + def __init__(self, dtype, scale=False): + self.dtype = dtype + self.scale = scale + def __call__(self, image, target): - image = F.to_tensor(image) - target = torch.as_tensor(np.asarray(target), dtype=torch.int64) + if not self.scale: + return image.to(dtype=self.dtype), target + image = F.convert_image_dtype(image, self.dtype) return image, target -class Normalize(object): +class Normalize: def __init__(self, mean, std): self.mean = mean self.std = std diff --git a/references/segmentation/utils.py b/references/segmentation/utils.py index 2719996c808..92db1899851 100644 --- a/references/segmentation/utils.py +++ b/references/segmentation/utils.py @@ -1,16 +1,14 @@ -from __future__ import print_function -from collections import defaultdict, deque import datetime -import math +import errno +import os import time +from collections import defaultdict, deque + import torch import torch.distributed as dist -import errno -import os - -class SmoothedValue(object): +class SmoothedValue: """Track a series of values and provide access to smoothed values over a window or the global series average. """ @@ -32,11 +30,7 @@ def synchronize_between_processes(self): """ Warning: does not synchronize the deque! """ - if not is_dist_avail_and_initialized(): - return - t = torch.tensor([self.count, self.total], dtype=torch.float64, device='cuda') - dist.barrier() - dist.all_reduce(t) + t = reduce_across_processes([self.count, self.total]) t = t.tolist() self.count = int(t[0]) self.total = t[1] @@ -65,14 +59,11 @@ def value(self): def __str__(self): return self.fmt.format( - median=self.median, - avg=self.avg, - global_avg=self.global_avg, - max=self.max, - value=self.value) + median=self.median, avg=self.avg, global_avg=self.global_avg, max=self.max, value=self.value + ) -class ConfusionMatrix(object): +class ConfusionMatrix: def __init__(self, num_classes): self.num_classes = num_classes self.mat = None @@ -81,7 +72,7 @@ def update(self, a, b): n = self.num_classes if self.mat is None: self.mat = torch.zeros((n, n), dtype=torch.int64, device=a.device) - with torch.no_grad(): + with torch.inference_mode(): k = (a >= 0) & (a < n) inds = n * a[k].to(torch.int64) + b[k] self.mat += torch.bincount(inds, minlength=n**2).reshape(n, n) @@ -97,27 +88,19 @@ def compute(self): return acc_global, acc, iu def reduce_from_all_processes(self): - if not torch.distributed.is_available(): - return - if not torch.distributed.is_initialized(): - return - torch.distributed.barrier() - torch.distributed.all_reduce(self.mat) + self.mat = reduce_across_processes(self.mat).to(torch.int64) def __str__(self): acc_global, acc, iu = self.compute() - return ( - 'global correct: {:.1f}\n' - 'average row correct: {}\n' - 'IoU: {}\n' - 'mean IoU: {:.1f}').format( - acc_global.item() * 100, - ['{:.1f}'.format(i) for i in (acc * 100).tolist()], - ['{:.1f}'.format(i) for i in (iu * 100).tolist()], - iu.mean().item() * 100) - - -class MetricLogger(object): + return ("global correct: {:.1f}\naverage row correct: {}\nIoU: {}\nmean IoU: {:.1f}").format( + acc_global.item() * 100, + [f"{i:.1f}" for i in (acc * 100).tolist()], + [f"{i:.1f}" for i in (iu * 100).tolist()], + iu.mean().item() * 100, + ) + + +class MetricLogger: def __init__(self, delimiter="\t"): self.meters = defaultdict(SmoothedValue) self.delimiter = delimiter @@ -126,7 +109,10 @@ def update(self, **kwargs): for k, v in kwargs.items(): if isinstance(v, torch.Tensor): v = v.item() - assert isinstance(v, (float, int)) + if not isinstance(v, (float, int)): + raise TypeError( + f"This method expects the value of the input arguments to be of type float or int, instead got {type(v)}" + ) self.meters[k].update(v) def __getattr__(self, attr): @@ -134,15 +120,12 @@ def __getattr__(self, attr): return self.meters[attr] if attr in self.__dict__: return self.__dict__[attr] - raise AttributeError("'{}' object has no attribute '{}'".format( - type(self).__name__, attr)) + raise AttributeError(f"'{type(self).__name__}' object has no attribute '{attr}'") def __str__(self): loss_str = [] for name, meter in self.meters.items(): - loss_str.append( - "{}: {}".format(name, str(meter)) - ) + loss_str.append(f"{name}: {str(meter)}") return self.delimiter.join(loss_str) def synchronize_between_processes(self): @@ -155,31 +138,28 @@ def add_meter(self, name, meter): def log_every(self, iterable, print_freq, header=None): i = 0 if not header: - header = '' + header = "" start_time = time.time() end = time.time() - iter_time = SmoothedValue(fmt='{avg:.4f}') - data_time = SmoothedValue(fmt='{avg:.4f}') - space_fmt = ':' + str(len(str(len(iterable)))) + 'd' + iter_time = SmoothedValue(fmt="{avg:.4f}") + data_time = SmoothedValue(fmt="{avg:.4f}") + space_fmt = ":" + str(len(str(len(iterable)))) + "d" if torch.cuda.is_available(): - log_msg = self.delimiter.join([ - header, - '[{0' + space_fmt + '}/{1}]', - 'eta: {eta}', - '{meters}', - 'time: {time}', - 'data: {data}', - 'max mem: {memory:.0f}' - ]) + log_msg = self.delimiter.join( + [ + header, + "[{0" + space_fmt + "}/{1}]", + "eta: {eta}", + "{meters}", + "time: {time}", + "data: {data}", + "max mem: {memory:.0f}", + ] + ) else: - log_msg = self.delimiter.join([ - header, - '[{0' + space_fmt + '}/{1}]', - 'eta: {eta}', - '{meters}', - 'time: {time}', - 'data: {data}' - ]) + log_msg = self.delimiter.join( + [header, "[{0" + space_fmt + "}/{1}]", "eta: {eta}", "{meters}", "time: {time}", "data: {data}"] + ) MB = 1024.0 * 1024.0 for obj in iterable: data_time.update(time.time() - end) @@ -189,21 +169,28 @@ def log_every(self, iterable, print_freq, header=None): eta_seconds = iter_time.global_avg * (len(iterable) - i) eta_string = str(datetime.timedelta(seconds=int(eta_seconds))) if torch.cuda.is_available(): - print(log_msg.format( - i, len(iterable), eta=eta_string, - meters=str(self), - time=str(iter_time), data=str(data_time), - memory=torch.cuda.max_memory_allocated() / MB)) + print( + log_msg.format( + i, + len(iterable), + eta=eta_string, + meters=str(self), + time=str(iter_time), + data=str(data_time), + memory=torch.cuda.max_memory_allocated() / MB, + ) + ) else: - print(log_msg.format( - i, len(iterable), eta=eta_string, - meters=str(self), - time=str(iter_time), data=str(data_time))) + print( + log_msg.format( + i, len(iterable), eta=eta_string, meters=str(self), time=str(iter_time), data=str(data_time) + ) + ) i += 1 end = time.time() total_time = time.time() - start_time total_time_str = str(datetime.timedelta(seconds=int(total_time))) - print('{} Total time: {}'.format(header, total_time_str)) + print(f"{header} Total time: {total_time_str}") def cat_list(images, fill_value=0): @@ -211,7 +198,7 @@ def cat_list(images, fill_value=0): batch_shape = (len(images),) + max_size batched_imgs = images[0].new(*batch_shape).fill_(fill_value) for img, pad_img in zip(images, batched_imgs): - pad_img[..., :img.shape[-2], :img.shape[-1]].copy_(img) + pad_img[..., : img.shape[-2], : img.shape[-1]].copy_(img) return batched_imgs @@ -235,10 +222,11 @@ def setup_for_distributed(is_master): This function disables printing when not in master process """ import builtins as __builtin__ + builtin_print = __builtin__.print def print(*args, **kwargs): - force = kwargs.pop('force', False) + force = kwargs.pop("force", False) if is_master or force: builtin_print(*args, **kwargs) @@ -275,26 +263,38 @@ def save_on_master(*args, **kwargs): def init_distributed_mode(args): - if 'RANK' in os.environ and 'WORLD_SIZE' in os.environ: + if "RANK" in os.environ and "WORLD_SIZE" in os.environ: args.rank = int(os.environ["RANK"]) - args.world_size = int(os.environ['WORLD_SIZE']) - args.gpu = int(os.environ['LOCAL_RANK']) - elif 'SLURM_PROCID' in os.environ: - args.rank = int(os.environ['SLURM_PROCID']) - args.gpu = args.rank % torch.cuda.device_count() + args.world_size = int(os.environ["WORLD_SIZE"]) + args.gpu = int(os.environ["LOCAL_RANK"]) + # elif "SLURM_PROCID" in os.environ: + # args.rank = int(os.environ["SLURM_PROCID"]) + # args.gpu = args.rank % torch.cuda.device_count() elif hasattr(args, "rank"): pass else: - print('Not using distributed mode') + print("Not using distributed mode") args.distributed = False return args.distributed = True torch.cuda.set_device(args.gpu) - args.dist_backend = 'nccl' - print('| distributed init (rank {}): {}'.format( - args.rank, args.dist_url), flush=True) - torch.distributed.init_process_group(backend=args.dist_backend, init_method=args.dist_url, - world_size=args.world_size, rank=args.rank) + args.dist_backend = "nccl" + print(f"| distributed init (rank {args.rank}): {args.dist_url}", flush=True) + torch.distributed.init_process_group( + backend=args.dist_backend, init_method=args.dist_url, world_size=args.world_size, rank=args.rank + ) + torch.distributed.barrier() setup_for_distributed(args.rank == 0) + + +def reduce_across_processes(val): + if not is_dist_avail_and_initialized(): + # nothing to sync, but we still convert to tensor for consistency with the distributed case. + return torch.tensor(val) + + t = torch.tensor(val, device="cuda") + dist.barrier() + dist.all_reduce(t) + return t diff --git a/references/segmentation/v2_extras.py b/references/segmentation/v2_extras.py new file mode 100644 index 00000000000..2d9eb3e661a --- /dev/null +++ b/references/segmentation/v2_extras.py @@ -0,0 +1,83 @@ +"""This file only exists to be lazy-imported and avoid V2-related import warnings when just using V1.""" +import torch +from torchvision import tv_tensors +from torchvision.transforms import v2 + + +class PadIfSmaller(v2.Transform): + def __init__(self, size, fill=0): + super().__init__() + self.size = size + self.fill = v2._utils._setup_fill_arg(fill) + + def make_params(self, sample): + _, height, width = v2._utils.query_chw(sample) + padding = [0, 0, max(self.size - width, 0), max(self.size - height, 0)] + needs_padding = any(padding) + return dict(padding=padding, needs_padding=needs_padding) + + def transform(self, inpt, params): + if not params["needs_padding"]: + return inpt + + fill = v2._utils._get_fill(self.fill, type(inpt)) + fill = v2._utils._convert_fill_arg(fill) + + return v2.functional.pad(inpt, padding=params["padding"], fill=fill) + + +class CocoDetectionToVOCSegmentation(v2.Transform): + """Turn samples from datasets.CocoDetection into the same format as VOCSegmentation. + + This is achieved in two steps: + + 1. COCO differentiates between 91 categories while VOC only supports 21, including background for both. Fortunately, + the COCO categories are a superset of the VOC ones and thus can be mapped. Instances of the 70 categories not + present in VOC are dropped and replaced by background. + 2. COCO only offers detection masks, i.e. a (N, H, W) bool-ish tensor, where the truthy values in each individual + mask denote the instance. However, a segmentation mask is a (H, W) integer tensor (typically torch.uint8), where + the value of each pixel denotes the category it belongs to. The detection masks are merged into one segmentation + mask while pixels that belong to multiple detection masks are marked as invalid. + """ + + COCO_TO_VOC_LABEL_MAP = dict( + zip( + [0, 5, 2, 16, 9, 44, 6, 3, 17, 62, 21, 67, 18, 19, 4, 1, 64, 20, 63, 7, 72], + range(21), + ) + ) + INVALID_VALUE = 255 + + def _coco_detection_masks_to_voc_segmentation_mask(self, target): + if "masks" not in target: + return None + + instance_masks, instance_labels_coco = target["masks"], target["labels"] + + valid_labels_voc = [ + (idx, label_voc) + for idx, label_coco in enumerate(instance_labels_coco.tolist()) + if (label_voc := self.COCO_TO_VOC_LABEL_MAP.get(label_coco)) is not None + ] + + if not valid_labels_voc: + return None + + valid_voc_category_idcs, instance_labels_voc = zip(*valid_labels_voc) + + instance_masks = instance_masks[list(valid_voc_category_idcs)].to(torch.uint8) + instance_labels_voc = torch.tensor(instance_labels_voc, dtype=torch.uint8) + + # Calling `.max()` on the stacked detection masks works fine to separate background from foreground as long as + # there is at most a single instance per pixel. Overlapping instances will be filtered out in the next step. + segmentation_mask, _ = (instance_masks * instance_labels_voc.reshape(-1, 1, 1)).max(dim=0) + segmentation_mask[instance_masks.sum(dim=0) > 1] = self.INVALID_VALUE + + return segmentation_mask + + def forward(self, image, target): + segmentation_mask = self._coco_detection_masks_to_voc_segmentation_mask(target) + if segmentation_mask is None: + segmentation_mask = torch.zeros(v2.functional.get_size(image), dtype=torch.uint8) + + return image, tv_tensors.Mask(segmentation_mask) diff --git a/references/similarity/loss.py b/references/similarity/loss.py index 3e467b74c52..971810a0663 100644 --- a/references/similarity/loss.py +++ b/references/similarity/loss.py @@ -1,21 +1,21 @@ -''' +""" Pytorch adaptation of https://omoindrot.github.io/triplet-loss https://github.com/omoindrot/tensorflow-triplet-loss -''' +""" import torch import torch.nn as nn class TripletMarginLoss(nn.Module): - def __init__(self, margin=1.0, p=2., mining='batch_all'): - super(TripletMarginLoss, self).__init__() + def __init__(self, margin=1.0, p=2.0, mining="batch_all"): + super().__init__() self.margin = margin self.p = p self.mining = mining - if mining == 'batch_all': + if mining == "batch_all": self.loss_fn = batch_all_triplet_loss - if mining == 'batch_hard': + if mining == "batch_hard": self.loss_fn = batch_hard_triplet_loss def forward(self, embeddings, labels): @@ -77,7 +77,7 @@ def batch_all_triplet_loss(labels, embeddings, margin, p): def _get_triplet_mask(labels): # Check that i, j and k are distinct - indices_equal = torch.eye(labels.size(0), dtype=torch.uint8, device=labels.device) + indices_equal = torch.eye(labels.size(0), dtype=torch.bool, device=labels.device) indices_not_equal = ~indices_equal i_not_equal_j = indices_not_equal.unsqueeze(2) i_not_equal_k = indices_not_equal.unsqueeze(1) @@ -96,7 +96,7 @@ def _get_triplet_mask(labels): def _get_anchor_positive_triplet_mask(labels): # Check that i and j are distinct - indices_equal = torch.eye(labels.size(0), dtype=torch.uint8, device=labels.device) + indices_equal = torch.eye(labels.size(0), dtype=torch.bool, device=labels.device) indices_not_equal = ~indices_equal # Check if labels[i] == labels[j] diff --git a/references/similarity/model.py b/references/similarity/model.py index 797ad41a48b..f235ae11116 100644 --- a/references/similarity/model.py +++ b/references/similarity/model.py @@ -1,11 +1,10 @@ -import torch import torch.nn as nn import torchvision.models as models class EmbeddingNet(nn.Module): def __init__(self, backbone=None): - super(EmbeddingNet, self).__init__() + super().__init__() if backbone is None: backbone = models.resnet50(num_classes=128) diff --git a/references/similarity/sampler.py b/references/similarity/sampler.py index 0ae6d07a77c..fe6517418ab 100644 --- a/references/similarity/sampler.py +++ b/references/similarity/sampler.py @@ -1,7 +1,8 @@ +import random +from collections import defaultdict + import torch from torch.utils.data.sampler import Sampler -from collections import defaultdict -import random def create_groups(groups, k): @@ -46,7 +47,8 @@ def __init__(self, groups, p, k): self.groups = create_groups(groups, self.k) # Ensures there are enough classes to sample from - assert len(self.groups) >= p + if len(self.groups) < p: + raise ValueError("There are not enough classes to sample from") def __iter__(self): # Shuffle samples within groups diff --git a/references/similarity/test.py b/references/similarity/test.py index a1e646111c8..3b9848594b6 100644 --- a/references/similarity/test.py +++ b/references/similarity/test.py @@ -1,15 +1,14 @@ import unittest from collections import defaultdict -from torch.utils.data import DataLoader -from torchvision.datasets import FakeData +import torch import torchvision.transforms as transforms - from sampler import PKSampler +from torch.utils.data import DataLoader +from torchvision.datasets import FakeData class Tester(unittest.TestCase): - def test_pksampler(self): p, k = 16, 4 @@ -19,24 +18,29 @@ def test_pksampler(self): self.assertRaises(AssertionError, PKSampler, targets, p, k) # Ensure p, k constraints on batch - dataset = FakeData(size=1000, num_classes=100, image_size=(3, 1, 1), - transform=transforms.ToTensor()) + trans = transforms.Compose( + [ + transforms.PILToTensor(), + transforms.ConvertImageDtype(torch.float), + ] + ) + dataset = FakeData(size=1000, num_classes=100, image_size=(3, 1, 1), transform=trans) targets = [target.item() for _, target in dataset] sampler = PKSampler(targets, p, k) loader = DataLoader(dataset, batch_size=p * k, sampler=sampler) for _, labels in loader: bins = defaultdict(int) - for l in labels.tolist(): - bins[l] += 1 + for label in labels.tolist(): + bins[label] += 1 # Ensure that each batch has samples from exactly p classes self.assertEqual(len(bins), p) # Ensure that there are k samples from each class - for l in bins: - self.assertEqual(bins[l], k) + for b in bins: + self.assertEqual(bins[b], k) -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/references/similarity/train.py b/references/similarity/train.py index 9a166a14b38..7686729927e 100644 --- a/references/similarity/train.py +++ b/references/similarity/train.py @@ -1,15 +1,13 @@ import os import torch -from torch.optim import Adam -from torch.utils.data import DataLoader - import torchvision.transforms as transforms -from torchvision.datasets import FashionMNIST - from loss import TripletMarginLoss -from sampler import PKSampler from model import EmbeddingNet +from sampler import PKSampler +from torch.optim import Adam +from torch.utils.data import DataLoader +from torchvision.datasets import FashionMNIST def train_epoch(model, optimizer, criterion, data_loader, device, epoch, print_freq): @@ -33,7 +31,7 @@ def train_epoch(model, optimizer, criterion, data_loader, device, epoch, print_f i += 1 avg_loss = running_loss / print_freq avg_trip = 100.0 * running_frac_pos_triplets / print_freq - print('[{:d}, {:d}] | loss: {:.4f} | % avg hard triplets: {:.2f}%'.format(epoch, i, avg_loss, avg_trip)) + print(f"[{epoch:d}, {i:d}] | loss: {avg_loss:.4f} | % avg hard triplets: {avg_trip:.2f}%") running_loss = 0 running_frac_pos_triplets = 0 @@ -53,7 +51,7 @@ def find_best_threshold(dists, targets, device): return best_thresh, accuracy -@torch.no_grad() +@torch.inference_mode() def evaluate(model, loader, device): model.eval() embeds, labels = [], [] @@ -79,33 +77,45 @@ def evaluate(model, loader, device): threshold, accuracy = find_best_threshold(dists, targets, device) - print('accuracy: {:.3f}%, threshold: {:.2f}'.format(accuracy, threshold)) + print(f"accuracy: {accuracy:.3f}%, threshold: {threshold:.2f}") def save(model, epoch, save_dir, file_name): - file_name = 'epoch_' + str(epoch) + '__' + file_name + file_name = "epoch_" + str(epoch) + "__" + file_name save_path = os.path.join(save_dir, file_name) torch.save(model.state_dict(), save_path) def main(args): - device = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu') + device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu") + + if args.use_deterministic_algorithms: + torch.backends.cudnn.benchmark = False + torch.use_deterministic_algorithms(True) + else: + torch.backends.cudnn.benchmark = True + p = args.labels_per_batch k = args.samples_per_label batch_size = p * k model = EmbeddingNet() if args.resume: - model.load_state_dict(torch.load(args.resume)) + model.load_state_dict(torch.load(args.resume, weights_only=True)) model.to(device) criterion = TripletMarginLoss(margin=args.margin) optimizer = Adam(model.parameters(), lr=args.lr) - transform = transforms.Compose([transforms.Lambda(lambda image: image.convert('RGB')), - transforms.Resize((224, 224)), - transforms.ToTensor()]) + transform = transforms.Compose( + [ + transforms.Lambda(lambda image: image.convert("RGB")), + transforms.Resize((224, 224)), + transforms.PILToTensor(), + transforms.ConvertImageDtype(torch.float), + ] + ) # Using FMNIST to demonstrate embedding learning using triplet loss. This dataset can # be replaced with any classification dataset. @@ -118,48 +128,60 @@ def main(args): # targets attribute with the same format. targets = train_dataset.targets.tolist() - train_loader = DataLoader(train_dataset, batch_size=batch_size, - sampler=PKSampler(targets, p, k), - num_workers=args.workers) - test_loader = DataLoader(test_dataset, batch_size=args.eval_batch_size, - shuffle=False, - num_workers=args.workers) + train_loader = DataLoader( + train_dataset, batch_size=batch_size, sampler=PKSampler(targets, p, k), num_workers=args.workers + ) + test_loader = DataLoader(test_dataset, batch_size=args.eval_batch_size, shuffle=False, num_workers=args.workers) + + if args.test_only: + # We disable the cudnn benchmarking because it can noticeably affect the accuracy + torch.backends.cudnn.benchmark = False + torch.backends.cudnn.deterministic = True + evaluate(model, test_loader, device) + return for epoch in range(1, args.epochs + 1): - print('Training...') + print("Training...") train_epoch(model, optimizer, criterion, train_loader, device, epoch, args.print_freq) - print('Evaluating...') + print("Evaluating...") evaluate(model, test_loader, device) - print('Saving...') - save(model, epoch, args.save_dir, 'ckpt.pth') + print("Saving...") + save(model, epoch, args.save_dir, "ckpt.pth") def parse_args(): import argparse - parser = argparse.ArgumentParser(description='PyTorch Embedding Learning') - - parser.add_argument('--dataset-dir', default='/tmp/fmnist/', - help='FashionMNIST dataset directory path') - parser.add_argument('-p', '--labels-per-batch', default=8, type=int, - help='Number of unique labels/classes per batch') - parser.add_argument('-k', '--samples-per-label', default=8, type=int, - help='Number of samples per label in a batch') - parser.add_argument('--eval-batch-size', default=512, type=int) - parser.add_argument('--epochs', default=10, type=int, metavar='N', - help='Number of training epochs to run') - parser.add_argument('-j', '--workers', default=4, type=int, metavar='N', - help='Number of data loading workers') - parser.add_argument('--lr', default=0.0001, type=float, help='Learning rate') - parser.add_argument('--margin', default=0.2, type=float, help='Triplet loss margin') - parser.add_argument('--print-freq', default=20, type=int, help='Print frequency') - parser.add_argument('--save-dir', default='.', help='Model save directory') - parser.add_argument('--resume', default='', help='Resume from checkpoint') + + parser = argparse.ArgumentParser(description="PyTorch Embedding Learning") + + parser.add_argument("--dataset-dir", default="/tmp/fmnist/", type=str, help="FashionMNIST dataset directory path") + parser.add_argument( + "-p", "--labels-per-batch", default=8, type=int, help="Number of unique labels/classes per batch" + ) + parser.add_argument("-k", "--samples-per-label", default=8, type=int, help="Number of samples per label in a batch") + parser.add_argument("--eval-batch-size", default=512, type=int, help="batch size for evaluation") + parser.add_argument("--epochs", default=10, type=int, metavar="N", help="number of total epochs to run") + parser.add_argument("-j", "--workers", default=4, type=int, metavar="N", help="number of data loading workers") + parser.add_argument("--lr", default=0.0001, type=float, help="initial learning rate") + parser.add_argument("--margin", default=0.2, type=float, help="Triplet loss margin") + parser.add_argument("--print-freq", default=20, type=int, help="print frequency") + parser.add_argument("--save-dir", default=".", type=str, help="Model save directory") + parser.add_argument("--resume", default="", type=str, help="path of checkpoint") + parser.add_argument( + "--test-only", + dest="test_only", + help="Only test the model", + action="store_true", + ) + parser.add_argument( + "--use-deterministic-algorithms", action="store_true", help="Forces the use of deterministic algorithms only." + ) return parser.parse_args() -if __name__ == '__main__': +if __name__ == "__main__": args = parse_args() main(args) diff --git a/references/video_classification/README.md b/references/video_classification/README.md new file mode 100644 index 00000000000..39c5d8f1bba --- /dev/null +++ b/references/video_classification/README.md @@ -0,0 +1,125 @@ +# Video Classification + +We present a simple training script that can be used for replicating the result of [resenet-based video models](https://research.fb.com/wp-content/uploads/2018/04/a-closer-look-at-spatiotemporal-convolutions-for-action-recognition.pdf). All models are trained on [Kinetics400 dataset](https://deepmind.com/research/open-source/kinetics), a benchmark dataset for human-action recognition. The accuracy is reported on the traditional validation split. + +## Data preparation + +If you already have downloaded [Kinetics400 dataset](https://deepmind.com/research/open-source/kinetics), +please proceed directly to the next section. + +To download videos, one can use https://github.com/Showmax/kinetics-downloader. Please note that the dataset can take up upwards of 400GB, depending on the quality setting during download. + +## Training + +We assume the training and validation AVI videos are stored at `/data/kinectics400/train` and +`/data/kinectics400/val`. For training we suggest starting with the hyperparameters reported in the [paper](https://research.fb.com/wp-content/uploads/2018/04/a-closer-look-at-spatiotemporal-convolutions-for-action-recognition.pdf), in order to match the performance of said models. Clip sampling strategy is a particularly important parameter during training, and we suggest using random temporal jittering during training - in other words sampling multiple training clips from each video with random start times during at every epoch. This functionality is built into our training script, and optimal hyperparameters are set by default. + +### Multiple GPUs + +Run the training on a single node with 8 GPUs: +```bash +torchrun --nproc_per_node=8 train.py --data-path=/data/kinectics400 --kinetics-version="400" --lr 0.08 --cache-dataset --sync-bn --amp +``` + +**Note:** all our models were trained on 8 nodes with 8 V100 GPUs each for a total of 64 GPUs. Expected training time for 64 GPUs is 24 hours, depending on the storage solution. +**Note 2:** hyperparameters for exact replication of our training can be found on the section below. Some hyperparameters such as learning rate must be scaled linearly in proportion to the number of GPUs. The default values assume 64 GPUs. + +### Single GPU + +**Note:** training on a single gpu can be extremely slow. + + +```bash +python train.py --data-path=/data/kinectics400 --kinetics-version="400" --batch-size=8 --cache-dataset +``` + + +### Additional Kinetics versions + +Since the original release, additional versions of Kinetics dataset became available (Kinetics 600). +Our training scripts support these versions of dataset as well by setting the `--kinetics-version` parameter to `"600"`. + +**Note:** training on Kinetics 600 requires a different set of hyperparameters for optimal performance. We do not provide Kinetics 600 pretrained models. + + +## Video classification models + +Starting with version `0.4.0` we have introduced support for basic video tasks and video classification modelling. +For more information about the available models check [here](https://pytorch.org/docs/stable/torchvision/models.html#video-classification). + +### Video ResNet models + +See reference training script [here](https://github.com/pytorch/vision/blob/main/references/video_classification/train.py): + +- input space: RGB +- resize size: [128, 171] +- crop size: [112, 112] +- mean: [0.43216, 0.394666, 0.37645] +- std: [0.22803, 0.22145, 0.216989] +- number of classes: 400 + +Input data augmentations at training time (with optional parameters): + +1. ConvertImageDtype +2. Resize (resize size value above) +3. Random horizontal flip (0.5) +4. Normalization (mean, std, see values above) +5. Random Crop (crop size value above) +6. Convert BCHW to CBHW + +Input data augmentations at validation time (with optional parameters): + +1. ConvertImageDtype +2. Resize (resize size value above) +3. Normalization (mean, std, see values above) +4. Center Crop (crop size value above) +5. Convert BCHW to CBHW + +This translates in the following set of command-line arguments. Please note that `--batch-size` parameter controls the +batch size per GPU. Moreover, note that our default `--lr` is configured for 64 GPUs which is how many we used for the +Video resnet models: +``` +# number of frames per clip +--clip_len 16 \ +--frame-rate 15 \ +# allow for temporal jittering +--clips_per_video 5 \ +--batch-size 24 \ +--epochs 45 \ +--lr 0.64 \ +# we use 10 epochs for linear warmup +--lr-warmup-epochs 10 \ +# learning rate is decayed at 20, 30, and 40 epoch by a factor of 10 +--lr-milestones 20, 30, 40 \ +--lr-gamma 0.1 \ +--train-resize-size 128 171 \ +--train-crop-size 112 112 \ +--val-resize-size 128 171 \ +--val-crop-size 112 112 +``` + +### S3D + +The S3D model was trained similarly to the above but with the following changes on the default configuration: +``` +--batch-size=12 --lr 0.2 --clip-len 64 --clips-per-video 5 --sync-bn \ +--train-resize-size 256 256 --train-crop-size 224 224 --val-resize-size 256 256 --val-crop-size 224 224 +``` + +We used 64 GPUs to train the architecture. + +To estimate the validation statistics of the model, we run the reference script with the following configuration: +``` +--batch-size=16 --test-only --clip-len 128 --clips-per-video 1 +``` + +### Additional video modelling resources + +- [Video Model Zoo](https://github.com/facebookresearch/VMZ) +- [PySlowFast](https://github.com/facebookresearch/SlowFast) + +### References + +[0] _D. Tran, H. Wang, L. Torresani, J. Ray, Y. LeCun and M. Paluri_: A Closer Look at Spatiotemporal Convolutions for Action Recognition. _CVPR 2018_ ([paper](https://research.fb.com/wp-content/uploads/2018/04/a-closer-look-at-spatiotemporal-convolutions-for-action-recognition.pdf)) + +[1] _W. Kay, J. Carreira, K. Simonyan, B. Zhang, C. Hillier, S. Vijayanarasimhan, F. Viola, T. Green, T. Back, P. Natsev, M. Suleyman, A. Zisserman_: The Kinetics Human Action Video Dataset ([paper](https://arxiv.org/abs/1705.06950)) diff --git a/references/video_classification/datasets.py b/references/video_classification/datasets.py new file mode 100644 index 00000000000..dec1e16b856 --- /dev/null +++ b/references/video_classification/datasets.py @@ -0,0 +1,15 @@ +from typing import Tuple + +import torchvision +from torch import Tensor + + +class KineticsWithVideoId(torchvision.datasets.Kinetics): + def __getitem__(self, idx: int) -> Tuple[Tensor, Tensor, int]: + video, audio, info, video_idx = self.video_clips.get_clip(idx) + label = self.samples[video_idx][1] + + if self.transform is not None: + video = self.transform(video) + + return video, audio, label, video_idx diff --git a/references/video_classification/presets.py b/references/video_classification/presets.py new file mode 100644 index 00000000000..f73802c9666 --- /dev/null +++ b/references/video_classification/presets.py @@ -0,0 +1,50 @@ +import torch +from torchvision.transforms import transforms +from transforms import ConvertBCHWtoCBHW + + +class VideoClassificationPresetTrain: + def __init__( + self, + *, + crop_size, + resize_size, + mean=(0.43216, 0.394666, 0.37645), + std=(0.22803, 0.22145, 0.216989), + hflip_prob=0.5, + ): + trans = [ + transforms.ConvertImageDtype(torch.float32), + # We hard-code antialias=False to preserve results after we changed + # its default from None to True (see + # https://github.com/pytorch/vision/pull/7160) + # TODO: we could re-train the video models with antialias=True? + transforms.Resize(resize_size, antialias=False), + ] + if hflip_prob > 0: + trans.append(transforms.RandomHorizontalFlip(hflip_prob)) + trans.extend([transforms.Normalize(mean=mean, std=std), transforms.RandomCrop(crop_size), ConvertBCHWtoCBHW()]) + self.transforms = transforms.Compose(trans) + + def __call__(self, x): + return self.transforms(x) + + +class VideoClassificationPresetEval: + def __init__(self, *, crop_size, resize_size, mean=(0.43216, 0.394666, 0.37645), std=(0.22803, 0.22145, 0.216989)): + self.transforms = transforms.Compose( + [ + transforms.ConvertImageDtype(torch.float32), + # We hard-code antialias=False to preserve results after we changed + # its default from None to True (see + # https://github.com/pytorch/vision/pull/7160) + # TODO: we could re-train the video models with antialias=True? + transforms.Resize(resize_size, antialias=False), + transforms.Normalize(mean=mean, std=std), + transforms.CenterCrop(crop_size), + ConvertBCHWtoCBHW(), + ] + ) + + def __call__(self, x): + return self.transforms(x) diff --git a/references/video_classification/scheduler.py b/references/video_classification/scheduler.py deleted file mode 100644 index f0f862d41ad..00000000000 --- a/references/video_classification/scheduler.py +++ /dev/null @@ -1,47 +0,0 @@ -import torch -from bisect import bisect_right - - -class WarmupMultiStepLR(torch.optim.lr_scheduler._LRScheduler): - def __init__( - self, - optimizer, - milestones, - gamma=0.1, - warmup_factor=1.0 / 3, - warmup_iters=5, - warmup_method="linear", - last_epoch=-1, - ): - if not milestones == sorted(milestones): - raise ValueError( - "Milestones should be a list of" " increasing integers. Got {}", - milestones, - ) - - if warmup_method not in ("constant", "linear"): - raise ValueError( - "Only 'constant' or 'linear' warmup_method accepted" - "got {}".format(warmup_method) - ) - self.milestones = milestones - self.gamma = gamma - self.warmup_factor = warmup_factor - self.warmup_iters = warmup_iters - self.warmup_method = warmup_method - super(WarmupMultiStepLR, self).__init__(optimizer, last_epoch) - - def get_lr(self): - warmup_factor = 1 - if self.last_epoch < self.warmup_iters: - if self.warmup_method == "constant": - warmup_factor = self.warmup_factor - elif self.warmup_method == "linear": - alpha = float(self.last_epoch) / self.warmup_iters - warmup_factor = self.warmup_factor * (1 - alpha) + alpha - return [ - base_lr * - warmup_factor * - self.gamma ** bisect_right(self.milestones, self.last_epoch) - for base_lr in self.base_lrs - ] diff --git a/references/video_classification/train.py b/references/video_classification/train.py index 8e41f9ec474..a03a9722003 100644 --- a/references/video_classification/train.py +++ b/references/video_classification/train.py @@ -1,88 +1,126 @@ -from __future__ import print_function import datetime import os import time -import sys +import warnings +import datasets +import presets import torch import torch.utils.data -from torch.utils.data.dataloader import default_collate -from torch import nn import torchvision import torchvision.datasets.video_utils -from torchvision import transforms -from torchvision.datasets.samplers import DistributedSampler, UniformClipSampler, RandomClipSampler - import utils - -from scheduler import WarmupMultiStepLR -import transforms as T - -try: - from apex import amp -except ImportError: - amp = None +from torch import nn +from torch.utils.data.dataloader import default_collate +from torchvision.datasets.samplers import DistributedSampler, RandomClipSampler, UniformClipSampler -def train_one_epoch(model, criterion, optimizer, lr_scheduler, data_loader, device, epoch, print_freq, apex=False): +def train_one_epoch(model, criterion, optimizer, lr_scheduler, data_loader, device, epoch, print_freq, scaler=None): model.train() metric_logger = utils.MetricLogger(delimiter=" ") - metric_logger.add_meter('lr', utils.SmoothedValue(window_size=1, fmt='{value}')) - metric_logger.add_meter('clips/s', utils.SmoothedValue(window_size=10, fmt='{value:.3f}')) + metric_logger.add_meter("lr", utils.SmoothedValue(window_size=1, fmt="{value}")) + metric_logger.add_meter("clips/s", utils.SmoothedValue(window_size=10, fmt="{value:.3f}")) - header = 'Epoch: [{}]'.format(epoch) - for video, target in metric_logger.log_every(data_loader, print_freq, header): + header = f"Epoch: [{epoch}]" + for video, target, _ in metric_logger.log_every(data_loader, print_freq, header): start_time = time.time() video, target = video.to(device), target.to(device) - output = model(video) - loss = criterion(output, target) + with torch.cuda.amp.autocast(enabled=scaler is not None): + output = model(video) + loss = criterion(output, target) optimizer.zero_grad() - if apex: - with amp.scale_loss(loss, optimizer) as scaled_loss: - scaled_loss.backward() + + if scaler is not None: + scaler.scale(loss).backward() + scaler.step(optimizer) + scaler.update() else: loss.backward() - optimizer.step() + optimizer.step() acc1, acc5 = utils.accuracy(output, target, topk=(1, 5)) batch_size = video.shape[0] metric_logger.update(loss=loss.item(), lr=optimizer.param_groups[0]["lr"]) - metric_logger.meters['acc1'].update(acc1.item(), n=batch_size) - metric_logger.meters['acc5'].update(acc5.item(), n=batch_size) - metric_logger.meters['clips/s'].update(batch_size / (time.time() - start_time)) + metric_logger.meters["acc1"].update(acc1.item(), n=batch_size) + metric_logger.meters["acc5"].update(acc5.item(), n=batch_size) + metric_logger.meters["clips/s"].update(batch_size / (time.time() - start_time)) lr_scheduler.step() def evaluate(model, criterion, data_loader, device): model.eval() metric_logger = utils.MetricLogger(delimiter=" ") - header = 'Test:' - with torch.no_grad(): - for video, target in metric_logger.log_every(data_loader, 100, header): + header = "Test:" + num_processed_samples = 0 + # Group and aggregate output of a video + num_videos = len(data_loader.dataset.samples) + num_classes = len(data_loader.dataset.classes) + agg_preds = torch.zeros((num_videos, num_classes), dtype=torch.float32, device=device) + agg_targets = torch.zeros((num_videos), dtype=torch.int32, device=device) + with torch.inference_mode(): + for video, target, video_idx in metric_logger.log_every(data_loader, 100, header): video = video.to(device, non_blocking=True) target = target.to(device, non_blocking=True) output = model(video) loss = criterion(output, target) + # Use softmax to convert output into prediction probability + preds = torch.softmax(output, dim=1) + for b in range(video.size(0)): + idx = video_idx[b].item() + agg_preds[idx] += preds[b].detach() + agg_targets[idx] = target[b].detach().item() + acc1, acc5 = utils.accuracy(output, target, topk=(1, 5)) # FIXME need to take into account that the datasets # could have been padded in distributed setup batch_size = video.shape[0] metric_logger.update(loss=loss.item()) - metric_logger.meters['acc1'].update(acc1.item(), n=batch_size) - metric_logger.meters['acc5'].update(acc5.item(), n=batch_size) + metric_logger.meters["acc1"].update(acc1.item(), n=batch_size) + metric_logger.meters["acc5"].update(acc5.item(), n=batch_size) + num_processed_samples += batch_size # gather the stats from all processes + num_processed_samples = utils.reduce_across_processes(num_processed_samples) + if isinstance(data_loader.sampler, DistributedSampler): + # Get the len of UniformClipSampler inside DistributedSampler + num_data_from_sampler = len(data_loader.sampler.dataset) + else: + num_data_from_sampler = len(data_loader.sampler) + + if ( + hasattr(data_loader.dataset, "__len__") + and num_data_from_sampler != num_processed_samples + and torch.distributed.get_rank() == 0 + ): + # See FIXME above + warnings.warn( + f"It looks like the sampler has {num_data_from_sampler} samples, but {num_processed_samples} " + "samples were used for the validation, which might bias the results. " + "Try adjusting the batch size and / or the world size. " + "Setting the world size to 1 is always a safe bet." + ) + metric_logger.synchronize_between_processes() - print(' * Clip Acc@1 {top1.global_avg:.3f} Clip Acc@5 {top5.global_avg:.3f}' - .format(top1=metric_logger.acc1, top5=metric_logger.acc5)) + print( + " * Clip Acc@1 {top1.global_avg:.3f} Clip Acc@5 {top5.global_avg:.3f}".format( + top1=metric_logger.acc1, top5=metric_logger.acc5 + ) + ) + # Reduce the agg_preds and agg_targets from all gpu and show result + agg_preds = utils.reduce_across_processes(agg_preds) + agg_targets = utils.reduce_across_processes(agg_targets, op=torch.distributed.ReduceOp.MAX) + agg_acc1, agg_acc5 = utils.accuracy(agg_preds, agg_targets, topk=(1, 5)) + print(" * Video Acc@1 {acc1:.3f} Video Acc@5 {acc5:.3f}".format(acc1=agg_acc1, acc5=agg_acc5)) return metric_logger.acc1.global_avg -def _get_cache_path(filepath): +def _get_cache_path(filepath, args): import hashlib - h = hashlib.sha1(filepath.encode()).hexdigest() + + value = f"{filepath}-{args.clip_len}-{args.kinetics_version}-{args.frame_rate}" + h = hashlib.sha1(value.encode()).hexdigest() cache_path = os.path.join("~", ".torch", "vision", "datasets", "kinetics", h[:10] + ".pt") cache_path = os.path.expanduser(cache_path) return cache_path @@ -90,97 +128,100 @@ def _get_cache_path(filepath): def collate_fn(batch): # remove audio from the batch - batch = [(d[0], d[2]) for d in batch] + batch = [(d[0], d[2], d[3]) for d in batch] return default_collate(batch) def main(args): - if args.apex: - if sys.version_info < (3, 0): - raise RuntimeError("Apex currently only supports Python 3. Aborting.") - if amp is None: - raise RuntimeError("Failed to import apex. Please install apex from https://www.github.com/nvidia/apex " - "to enable mixed-precision training.") - if args.output_dir: utils.mkdir(args.output_dir) utils.init_distributed_mode(args) print(args) - print("torch version: ", torch.__version__) - print("torchvision version: ", torchvision.__version__) device = torch.device(args.device) - torch.backends.cudnn.benchmark = True + if args.use_deterministic_algorithms: + torch.backends.cudnn.benchmark = False + torch.use_deterministic_algorithms(True) + else: + torch.backends.cudnn.benchmark = True # Data loading code print("Loading data") - traindir = os.path.join(args.data_path, 'train_avi-480p') - valdir = os.path.join(args.data_path, 'val_avi-480p') - normalize = T.Normalize(mean=[0.43216, 0.394666, 0.37645], - std=[0.22803, 0.22145, 0.216989]) + val_resize_size = tuple(args.val_resize_size) + val_crop_size = tuple(args.val_crop_size) + train_resize_size = tuple(args.train_resize_size) + train_crop_size = tuple(args.train_crop_size) + + traindir = os.path.join(args.data_path, "train") + valdir = os.path.join(args.data_path, "val") print("Loading training data") st = time.time() - cache_path = _get_cache_path(traindir) - transform_train = torchvision.transforms.Compose([ - T.ToFloatTensorInZeroOne(), - T.Resize((128, 171)), - T.RandomHorizontalFlip(), - normalize, - T.RandomCrop((112, 112)) - ]) + cache_path = _get_cache_path(traindir, args) + transform_train = presets.VideoClassificationPresetTrain(crop_size=train_crop_size, resize_size=train_resize_size) if args.cache_dataset and os.path.exists(cache_path): - print("Loading dataset_train from {}".format(cache_path)) - dataset, _ = torch.load(cache_path) + print(f"Loading dataset_train from {cache_path}") + dataset, _ = torch.load(cache_path, weights_only=False) dataset.transform = transform_train else: if args.distributed: - print("It is recommended to pre-compute the dataset cache " - "on a single-gpu first, as it will be faster") - dataset = torchvision.datasets.Kinetics400( - traindir, + print("It is recommended to pre-compute the dataset cache on a single-gpu first, as it will be faster") + dataset = datasets.KineticsWithVideoId( + args.data_path, frames_per_clip=args.clip_len, + num_classes=args.kinetics_version, + split="train", step_between_clips=1, transform=transform_train, - frame_rate=15 + frame_rate=args.frame_rate, + extensions=( + "avi", + "mp4", + ), + output_format="TCHW", ) if args.cache_dataset: - print("Saving dataset_train to {}".format(cache_path)) + print(f"Saving dataset_train to {cache_path}") utils.mkdir(os.path.dirname(cache_path)) utils.save_on_master((dataset, traindir), cache_path) print("Took", time.time() - st) print("Loading validation data") - cache_path = _get_cache_path(valdir) + cache_path = _get_cache_path(valdir, args) - transform_test = torchvision.transforms.Compose([ - T.ToFloatTensorInZeroOne(), - T.Resize((128, 171)), - normalize, - T.CenterCrop((112, 112)) - ]) + if args.weights and args.test_only: + weights = torchvision.models.get_weight(args.weights) + transform_test = weights.transforms() + else: + transform_test = presets.VideoClassificationPresetEval(crop_size=val_crop_size, resize_size=val_resize_size) if args.cache_dataset and os.path.exists(cache_path): - print("Loading dataset_test from {}".format(cache_path)) - dataset_test, _ = torch.load(cache_path) + print(f"Loading dataset_test from {cache_path}") + dataset_test, _ = torch.load(cache_path, weights_only=False) dataset_test.transform = transform_test else: if args.distributed: - print("It is recommended to pre-compute the dataset cache " - "on a single-gpu first, as it will be faster") - dataset_test = torchvision.datasets.Kinetics400( - valdir, + print("It is recommended to pre-compute the dataset cache on a single-gpu first, as it will be faster") + dataset_test = datasets.KineticsWithVideoId( + args.data_path, frames_per_clip=args.clip_len, + num_classes=args.kinetics_version, + split="val", step_between_clips=1, transform=transform_test, - frame_rate=15 + frame_rate=args.frame_rate, + extensions=( + "avi", + "mp4", + ), + output_format="TCHW", ) if args.cache_dataset: - print("Saving dataset_test to {}".format(cache_path)) + print(f"Saving dataset_test to {cache_path}") utils.mkdir(os.path.dirname(cache_path)) utils.save_on_master((dataset_test, valdir), cache_path) @@ -189,42 +230,64 @@ def main(args): test_sampler = UniformClipSampler(dataset_test.video_clips, args.clips_per_video) if args.distributed: train_sampler = DistributedSampler(train_sampler) - test_sampler = DistributedSampler(test_sampler) + test_sampler = DistributedSampler(test_sampler, shuffle=False) data_loader = torch.utils.data.DataLoader( - dataset, batch_size=args.batch_size, - sampler=train_sampler, num_workers=args.workers, - pin_memory=True, collate_fn=collate_fn) + dataset, + batch_size=args.batch_size, + sampler=train_sampler, + num_workers=args.workers, + pin_memory=True, + collate_fn=collate_fn, + ) data_loader_test = torch.utils.data.DataLoader( - dataset_test, batch_size=args.batch_size, - sampler=test_sampler, num_workers=args.workers, - pin_memory=True, collate_fn=collate_fn) + dataset_test, + batch_size=args.batch_size, + sampler=test_sampler, + num_workers=args.workers, + pin_memory=True, + collate_fn=collate_fn, + ) print("Creating model") - model = torchvision.models.video.__dict__[args.model](pretrained=args.pretrained) + model = torchvision.models.get_model(args.model, weights=args.weights) model.to(device) if args.distributed and args.sync_bn: model = torch.nn.SyncBatchNorm.convert_sync_batchnorm(model) criterion = nn.CrossEntropyLoss() - lr = args.lr * args.world_size - optimizer = torch.optim.SGD( - model.parameters(), lr=lr, momentum=args.momentum, weight_decay=args.weight_decay) - - if args.apex: - model, optimizer = amp.initialize(model, optimizer, - opt_level=args.apex_opt_level - ) + optimizer = torch.optim.SGD(model.parameters(), lr=args.lr, momentum=args.momentum, weight_decay=args.weight_decay) + scaler = torch.cuda.amp.GradScaler() if args.amp else None # convert scheduler to be per iteration, not per epoch, for warmup that lasts # between different epochs - warmup_iters = args.lr_warmup_epochs * len(data_loader) - lr_milestones = [len(data_loader) * m for m in args.lr_milestones] - lr_scheduler = WarmupMultiStepLR( - optimizer, milestones=lr_milestones, gamma=args.lr_gamma, - warmup_iters=warmup_iters, warmup_factor=1e-5) + iters_per_epoch = len(data_loader) + lr_milestones = [iters_per_epoch * (m - args.lr_warmup_epochs) for m in args.lr_milestones] + main_lr_scheduler = torch.optim.lr_scheduler.MultiStepLR(optimizer, milestones=lr_milestones, gamma=args.lr_gamma) + + if args.lr_warmup_epochs > 0: + warmup_iters = iters_per_epoch * args.lr_warmup_epochs + args.lr_warmup_method = args.lr_warmup_method.lower() + if args.lr_warmup_method == "linear": + warmup_lr_scheduler = torch.optim.lr_scheduler.LinearLR( + optimizer, start_factor=args.lr_warmup_decay, total_iters=warmup_iters + ) + elif args.lr_warmup_method == "constant": + warmup_lr_scheduler = torch.optim.lr_scheduler.ConstantLR( + optimizer, factor=args.lr_warmup_decay, total_iters=warmup_iters + ) + else: + raise RuntimeError( + f"Invalid warmup lr method '{args.lr_warmup_method}'. Only linear and constant are supported." + ) + + lr_scheduler = torch.optim.lr_scheduler.SequentialLR( + optimizer, schedulers=[warmup_lr_scheduler, main_lr_scheduler], milestones=[warmup_iters] + ) + else: + lr_scheduler = main_lr_scheduler model_without_ddp = model if args.distributed: @@ -232,13 +295,18 @@ def main(args): model_without_ddp = model.module if args.resume: - checkpoint = torch.load(args.resume, map_location='cpu') - model_without_ddp.load_state_dict(checkpoint['model']) - optimizer.load_state_dict(checkpoint['optimizer']) - lr_scheduler.load_state_dict(checkpoint['lr_scheduler']) - args.start_epoch = checkpoint['epoch'] + 1 + checkpoint = torch.load(args.resume, map_location="cpu", weights_only=True) + model_without_ddp.load_state_dict(checkpoint["model"]) + optimizer.load_state_dict(checkpoint["optimizer"]) + lr_scheduler.load_state_dict(checkpoint["lr_scheduler"]) + args.start_epoch = checkpoint["epoch"] + 1 + if args.amp: + scaler.load_state_dict(checkpoint["scaler"]) if args.test_only: + # We disable the cudnn benchmarking because it can noticeably affect the accuracy + torch.backends.cudnn.benchmark = False + torch.backends.cudnn.deterministic = True evaluate(model, criterion, data_loader_test, device=device) return @@ -247,58 +315,69 @@ def main(args): for epoch in range(args.start_epoch, args.epochs): if args.distributed: train_sampler.set_epoch(epoch) - train_one_epoch(model, criterion, optimizer, lr_scheduler, data_loader, - device, epoch, args.print_freq, args.apex) + train_one_epoch(model, criterion, optimizer, lr_scheduler, data_loader, device, epoch, args.print_freq, scaler) evaluate(model, criterion, data_loader_test, device=device) if args.output_dir: checkpoint = { - 'model': model_without_ddp.state_dict(), - 'optimizer': optimizer.state_dict(), - 'lr_scheduler': lr_scheduler.state_dict(), - 'epoch': epoch, - 'args': args} - utils.save_on_master( - checkpoint, - os.path.join(args.output_dir, 'model_{}.pth'.format(epoch))) - utils.save_on_master( - checkpoint, - os.path.join(args.output_dir, 'checkpoint.pth')) + "model": model_without_ddp.state_dict(), + "optimizer": optimizer.state_dict(), + "lr_scheduler": lr_scheduler.state_dict(), + "epoch": epoch, + "args": args, + } + if args.amp: + checkpoint["scaler"] = scaler.state_dict() + utils.save_on_master(checkpoint, os.path.join(args.output_dir, f"model_{epoch}.pth")) + utils.save_on_master(checkpoint, os.path.join(args.output_dir, "checkpoint.pth")) total_time = time.time() - start_time total_time_str = str(datetime.timedelta(seconds=int(total_time))) - print('Training time {}'.format(total_time_str)) + print(f"Training time {total_time_str}") -def parse_args(): +def get_args_parser(add_help=True): import argparse - parser = argparse.ArgumentParser(description='PyTorch Classification Training') - - parser.add_argument('--data-path', default='/datasets01_101/kinetics/070618/', help='dataset') - parser.add_argument('--model', default='r2plus1d_18', help='model') - parser.add_argument('--device', default='cuda', help='device') - parser.add_argument('--clip-len', default=16, type=int, metavar='N', - help='number of frames per clip') - parser.add_argument('--clips-per-video', default=5, type=int, metavar='N', - help='maximum number of clips per video to consider') - parser.add_argument('-b', '--batch-size', default=24, type=int) - parser.add_argument('--epochs', default=45, type=int, metavar='N', - help='number of total epochs to run') - parser.add_argument('-j', '--workers', default=10, type=int, metavar='N', - help='number of data loading workers (default: 16)') - parser.add_argument('--lr', default=0.01, type=float, help='initial learning rate') - parser.add_argument('--momentum', default=0.9, type=float, metavar='M', - help='momentum') - parser.add_argument('--wd', '--weight-decay', default=1e-4, type=float, - metavar='W', help='weight decay (default: 1e-4)', - dest='weight_decay') - parser.add_argument('--lr-milestones', nargs='+', default=[20, 30, 40], type=int, help='decrease lr on milestones') - parser.add_argument('--lr-gamma', default=0.1, type=float, help='decrease lr by a factor of lr-gamma') - parser.add_argument('--lr-warmup-epochs', default=10, type=int, help='number of warmup epochs') - parser.add_argument('--print-freq', default=10, type=int, help='print frequency') - parser.add_argument('--output-dir', default='.', help='path where to save') - parser.add_argument('--resume', default='', help='resume from checkpoint') - parser.add_argument('--start-epoch', default=0, type=int, metavar='N', - help='start epoch') + + parser = argparse.ArgumentParser(description="PyTorch Video Classification Training", add_help=add_help) + + parser.add_argument("--data-path", default="/datasets01_101/kinetics/070618/", type=str, help="dataset path") + parser.add_argument( + "--kinetics-version", default="400", type=str, choices=["400", "600"], help="Select kinetics version" + ) + parser.add_argument("--model", default="r2plus1d_18", type=str, help="model name") + parser.add_argument("--device", default="cuda", type=str, help="device (Use cuda or cpu Default: cuda)") + parser.add_argument("--clip-len", default=16, type=int, metavar="N", help="number of frames per clip") + parser.add_argument("--frame-rate", default=15, type=int, metavar="N", help="the frame rate") + parser.add_argument( + "--clips-per-video", default=5, type=int, metavar="N", help="maximum number of clips per video to consider" + ) + parser.add_argument( + "-b", "--batch-size", default=24, type=int, help="images per gpu, the total batch size is $NGPU x batch_size" + ) + parser.add_argument("--epochs", default=45, type=int, metavar="N", help="number of total epochs to run") + parser.add_argument( + "-j", "--workers", default=10, type=int, metavar="N", help="number of data loading workers (default: 10)" + ) + parser.add_argument("--lr", default=0.64, type=float, help="initial learning rate") + parser.add_argument("--momentum", default=0.9, type=float, metavar="M", help="momentum") + parser.add_argument( + "--wd", + "--weight-decay", + default=1e-4, + type=float, + metavar="W", + help="weight decay (default: 1e-4)", + dest="weight_decay", + ) + parser.add_argument("--lr-milestones", nargs="+", default=[20, 30, 40], type=int, help="decrease lr on milestones") + parser.add_argument("--lr-gamma", default=0.1, type=float, help="decrease lr by a factor of lr-gamma") + parser.add_argument("--lr-warmup-epochs", default=10, type=int, help="the number of epochs to warmup (default: 10)") + parser.add_argument("--lr-warmup-method", default="linear", type=str, help="the warmup method (default: linear)") + parser.add_argument("--lr-warmup-decay", default=0.001, type=float, help="the decay for lr") + parser.add_argument("--print-freq", default=10, type=int, help="print frequency") + parser.add_argument("--output-dir", default=".", type=str, help="path to save outputs") + parser.add_argument("--resume", default="", type=str, help="path of checkpoint") + parser.add_argument("--start-epoch", default=0, type=int, metavar="N", help="start epoch") parser.add_argument( "--cache-dataset", dest="cache_dataset", @@ -318,31 +397,50 @@ def parse_args(): action="store_true", ) parser.add_argument( - "--pretrained", - dest="pretrained", - help="Use pre-trained models from the modelzoo", - action="store_true", + "--use-deterministic-algorithms", action="store_true", help="Forces the use of deterministic algorithms only." ) - # Mixed precision training parameters - parser.add_argument('--apex', action='store_true', - help='Use apex for mixed precision training') - parser.add_argument('--apex-opt-level', default='O1', type=str, - help='For apex mixed precision training' - 'O0 for FP32 training, O1 for mixed precision training.' - 'For further detail, see https://github.com/NVIDIA/apex/tree/master/examples/imagenet' - ) - # distributed training parameters - parser.add_argument('--world-size', default=1, type=int, - help='number of distributed processes') - parser.add_argument('--dist-url', default='env://', help='url used to set up distributed training') + parser.add_argument("--world-size", default=1, type=int, help="number of distributed processes") + parser.add_argument("--dist-url", default="env://", type=str, help="url used to set up distributed training") - args = parser.parse_args() + parser.add_argument( + "--val-resize-size", + default=(128, 171), + nargs="+", + type=int, + help="the resize size used for validation (default: (128, 171))", + ) + parser.add_argument( + "--val-crop-size", + default=(112, 112), + nargs="+", + type=int, + help="the central crop size used for validation (default: (112, 112))", + ) + parser.add_argument( + "--train-resize-size", + default=(128, 171), + nargs="+", + type=int, + help="the resize size used for training (default: (128, 171))", + ) + parser.add_argument( + "--train-crop-size", + default=(112, 112), + nargs="+", + type=int, + help="the random crop size used for training (default: (112, 112))", + ) + + parser.add_argument("--weights", default=None, type=str, help="the weights enum name to load") + + # Mixed precision training parameters + parser.add_argument("--amp", action="store_true", help="Use torch.cuda.amp for mixed precision training") - return args + return parser if __name__ == "__main__": - args = parse_args() + args = get_args_parser().parse_args() main(args) diff --git a/references/video_classification/transforms.py b/references/video_classification/transforms.py index 9435450c4b3..2a7cc2a4a66 100644 --- a/references/video_classification/transforms.py +++ b/references/video_classification/transforms.py @@ -1,122 +1,9 @@ import torch -import random +import torch.nn as nn -def crop(vid, i, j, h, w): - return vid[..., i:(i + h), j:(j + w)] +class ConvertBCHWtoCBHW(nn.Module): + """Convert tensor from (B, C, H, W) to (C, B, H, W)""" - -def center_crop(vid, output_size): - h, w = vid.shape[-2:] - th, tw = output_size - - i = int(round((h - th) / 2.)) - j = int(round((w - tw) / 2.)) - return crop(vid, i, j, th, tw) - - -def hflip(vid): - return vid.flip(dims=(-1,)) - - -# NOTE: for those functions, which generally expect mini-batches, we keep them -# as non-minibatch so that they are applied as if they were 4d (thus image). -# this way, we only apply the transformation in the spatial domain -def resize(vid, size, interpolation='bilinear'): - # NOTE: using bilinear interpolation because we don't work on minibatches - # at this level - scale = None - if isinstance(size, int): - scale = float(size) / min(vid.shape[-2:]) - size = None - return torch.nn.functional.interpolate( - vid, size=size, scale_factor=scale, mode=interpolation, align_corners=False) - - -def pad(vid, padding, fill=0, padding_mode="constant"): - # NOTE: don't want to pad on temporal dimension, so let as non-batch - # (4d) before padding. This works as expected - return torch.nn.functional.pad(vid, padding, value=fill, mode=padding_mode) - - -def to_normalized_float_tensor(vid): - return vid.permute(3, 0, 1, 2).to(torch.float32) / 255 - - -def normalize(vid, mean, std): - shape = (-1,) + (1,) * (vid.dim() - 1) - mean = torch.as_tensor(mean).reshape(shape) - std = torch.as_tensor(std).reshape(shape) - return (vid - mean) / std - - -# Class interface - -class RandomCrop(object): - def __init__(self, size): - self.size = size - - @staticmethod - def get_params(vid, output_size): - """Get parameters for ``crop`` for a random crop. - """ - h, w = vid.shape[-2:] - th, tw = output_size - if w == tw and h == th: - return 0, 0, h, w - i = random.randint(0, h - th) - j = random.randint(0, w - tw) - return i, j, th, tw - - def __call__(self, vid): - i, j, h, w = self.get_params(vid, self.size) - return crop(vid, i, j, h, w) - - -class CenterCrop(object): - def __init__(self, size): - self.size = size - - def __call__(self, vid): - return center_crop(vid, self.size) - - -class Resize(object): - def __init__(self, size): - self.size = size - - def __call__(self, vid): - return resize(vid, self.size) - - -class ToFloatTensorInZeroOne(object): - def __call__(self, vid): - return to_normalized_float_tensor(vid) - - -class Normalize(object): - def __init__(self, mean, std): - self.mean = mean - self.std = std - - def __call__(self, vid): - return normalize(vid, self.mean, self.std) - - -class RandomHorizontalFlip(object): - def __init__(self, p=0.5): - self.p = p - - def __call__(self, vid): - if random.random() < self.p: - return hflip(vid) - return vid - - -class Pad(object): - def __init__(self, padding, fill=0): - self.padding = padding - self.fill = fill - - def __call__(self, vid): - return pad(vid, self.padding, self.fill) + def forward(self, vid: torch.Tensor) -> torch.Tensor: + return vid.permute(1, 0, 2, 3) diff --git a/references/video_classification/utils.py b/references/video_classification/utils.py index 5ea6dfef341..934f62f66ae 100644 --- a/references/video_classification/utils.py +++ b/references/video_classification/utils.py @@ -1,15 +1,14 @@ -from __future__ import print_function -from collections import defaultdict, deque import datetime +import errno +import os import time +from collections import defaultdict, deque + import torch import torch.distributed as dist -import errno -import os - -class SmoothedValue(object): +class SmoothedValue: """Track a series of values and provide access to smoothed values over a window or the global series average. """ @@ -31,11 +30,7 @@ def synchronize_between_processes(self): """ Warning: does not synchronize the deque! """ - if not is_dist_avail_and_initialized(): - return - t = torch.tensor([self.count, self.total], dtype=torch.float64, device='cuda') - dist.barrier() - dist.all_reduce(t) + t = reduce_across_processes([self.count, self.total]) t = t.tolist() self.count = int(t[0]) self.total = t[1] @@ -64,14 +59,11 @@ def value(self): def __str__(self): return self.fmt.format( - median=self.median, - avg=self.avg, - global_avg=self.global_avg, - max=self.max, - value=self.value) + median=self.median, avg=self.avg, global_avg=self.global_avg, max=self.max, value=self.value + ) -class MetricLogger(object): +class MetricLogger: def __init__(self, delimiter="\t"): self.meters = defaultdict(SmoothedValue) self.delimiter = delimiter @@ -80,7 +72,10 @@ def update(self, **kwargs): for k, v in kwargs.items(): if isinstance(v, torch.Tensor): v = v.item() - assert isinstance(v, (float, int)) + if not isinstance(v, (float, int)): + raise TypeError( + f"This method expects the value of the input arguments to be of type float or int, instead got {type(v)}" + ) self.meters[k].update(v) def __getattr__(self, attr): @@ -88,15 +83,12 @@ def __getattr__(self, attr): return self.meters[attr] if attr in self.__dict__: return self.__dict__[attr] - raise AttributeError("'{}' object has no attribute '{}'".format( - type(self).__name__, attr)) + raise AttributeError(f"'{type(self).__name__}' object has no attribute '{attr}'") def __str__(self): loss_str = [] for name, meter in self.meters.items(): - loss_str.append( - "{}: {}".format(name, str(meter)) - ) + loss_str.append(f"{name}: {str(meter)}") return self.delimiter.join(loss_str) def synchronize_between_processes(self): @@ -109,31 +101,28 @@ def add_meter(self, name, meter): def log_every(self, iterable, print_freq, header=None): i = 0 if not header: - header = '' + header = "" start_time = time.time() end = time.time() - iter_time = SmoothedValue(fmt='{avg:.4f}') - data_time = SmoothedValue(fmt='{avg:.4f}') - space_fmt = ':' + str(len(str(len(iterable)))) + 'd' + iter_time = SmoothedValue(fmt="{avg:.4f}") + data_time = SmoothedValue(fmt="{avg:.4f}") + space_fmt = ":" + str(len(str(len(iterable)))) + "d" if torch.cuda.is_available(): - log_msg = self.delimiter.join([ - header, - '[{0' + space_fmt + '}/{1}]', - 'eta: {eta}', - '{meters}', - 'time: {time}', - 'data: {data}', - 'max mem: {memory:.0f}' - ]) + log_msg = self.delimiter.join( + [ + header, + "[{0" + space_fmt + "}/{1}]", + "eta: {eta}", + "{meters}", + "time: {time}", + "data: {data}", + "max mem: {memory:.0f}", + ] + ) else: - log_msg = self.delimiter.join([ - header, - '[{0' + space_fmt + '}/{1}]', - 'eta: {eta}', - '{meters}', - 'time: {time}', - 'data: {data}' - ]) + log_msg = self.delimiter.join( + [header, "[{0" + space_fmt + "}/{1}]", "eta: {eta}", "{meters}", "time: {time}", "data: {data}"] + ) MB = 1024.0 * 1024.0 for obj in iterable: data_time.update(time.time() - end) @@ -143,26 +132,33 @@ def log_every(self, iterable, print_freq, header=None): eta_seconds = iter_time.global_avg * (len(iterable) - i) eta_string = str(datetime.timedelta(seconds=int(eta_seconds))) if torch.cuda.is_available(): - print(log_msg.format( - i, len(iterable), eta=eta_string, - meters=str(self), - time=str(iter_time), data=str(data_time), - memory=torch.cuda.max_memory_allocated() / MB)) + print( + log_msg.format( + i, + len(iterable), + eta=eta_string, + meters=str(self), + time=str(iter_time), + data=str(data_time), + memory=torch.cuda.max_memory_allocated() / MB, + ) + ) else: - print(log_msg.format( - i, len(iterable), eta=eta_string, - meters=str(self), - time=str(iter_time), data=str(data_time))) + print( + log_msg.format( + i, len(iterable), eta=eta_string, meters=str(self), time=str(iter_time), data=str(data_time) + ) + ) i += 1 end = time.time() total_time = time.time() - start_time total_time_str = str(datetime.timedelta(seconds=int(total_time))) - print('{} Total time: {}'.format(header, total_time_str)) + print(f"{header} Total time: {total_time_str}") def accuracy(output, target, topk=(1,)): """Computes the accuracy over the k top predictions for the specified values of k""" - with torch.no_grad(): + with torch.inference_mode(): maxk = max(topk) batch_size = target.size(0) @@ -190,10 +186,11 @@ def setup_for_distributed(is_master): This function disables printing when not in master process """ import builtins as __builtin__ + builtin_print = __builtin__.print def print(*args, **kwargs): - force = kwargs.pop('force', False) + force = kwargs.pop("force", False) if is_master or force: builtin_print(*args, **kwargs) @@ -230,26 +227,38 @@ def save_on_master(*args, **kwargs): def init_distributed_mode(args): - if 'RANK' in os.environ and 'WORLD_SIZE' in os.environ: + if "RANK" in os.environ and "WORLD_SIZE" in os.environ: args.rank = int(os.environ["RANK"]) - args.world_size = int(os.environ['WORLD_SIZE']) - args.gpu = int(os.environ['LOCAL_RANK']) - elif 'SLURM_PROCID' in os.environ: - args.rank = int(os.environ['SLURM_PROCID']) + args.world_size = int(os.environ["WORLD_SIZE"]) + args.gpu = int(os.environ["LOCAL_RANK"]) + elif "SLURM_PROCID" in os.environ: + args.rank = int(os.environ["SLURM_PROCID"]) args.gpu = args.rank % torch.cuda.device_count() elif hasattr(args, "rank"): pass else: - print('Not using distributed mode') + print("Not using distributed mode") args.distributed = False return args.distributed = True torch.cuda.set_device(args.gpu) - args.dist_backend = 'nccl' - print('| distributed init (rank {}): {}'.format( - args.rank, args.dist_url), flush=True) - torch.distributed.init_process_group(backend=args.dist_backend, init_method=args.dist_url, - world_size=args.world_size, rank=args.rank) + args.dist_backend = "nccl" + print(f"| distributed init (rank {args.rank}): {args.dist_url}", flush=True) + torch.distributed.init_process_group( + backend=args.dist_backend, init_method=args.dist_url, world_size=args.world_size, rank=args.rank + ) + torch.distributed.barrier() setup_for_distributed(args.rank == 0) + + +def reduce_across_processes(val, op=dist.ReduceOp.SUM): + if not is_dist_avail_and_initialized(): + # nothing to sync, but we still convert to tensor for consistency with the distributed case. + return torch.tensor(val) + + t = torch.tensor(val, device="cuda") + dist.barrier() + dist.all_reduce(t, op=op) + return t diff --git a/scripts/README.rst b/scripts/README.rst new file mode 100644 index 00000000000..23247e34178 --- /dev/null +++ b/scripts/README.rst @@ -0,0 +1,23 @@ +Utility scripts +=============== + +* `fbcode_to_main_sync.sh` + +This shell script is used to synchronise internal changes with the main repository. + +To run this script: + +.. code:: bash + + chmod +x fbcode_to_main_sync.sh + ./fbcode_to_main_sync.sh + +where + +``commit_hash`` represents the commit hash in fbsync branch from where we should start the sync. + +``fork_name`` is the name of the remote corresponding to your fork, you can check it by doing `"git remote -v"`. + +``fork_main_branch`` (optional) is the name of the main branch on your fork(default="main"). + +This script will create PRs corresponding to the commits in fbsync. Please review these, add the [FBcode->GH] prefix on the title and publish them. Most importantly, add the [FBcode->GH] prefix at the beginning of the merge message as well. diff --git a/scripts/collect_model_urls.py b/scripts/collect_model_urls.py new file mode 100644 index 00000000000..2acba6cbbda --- /dev/null +++ b/scripts/collect_model_urls.py @@ -0,0 +1,20 @@ +import pathlib +import re +import sys + +MODEL_URL_PATTERN = re.compile(r"https://download[.]pytorch[.]org/models/.+?[.]pth") + + +def main(*roots): + model_urls = set() + for root in roots: + for path in pathlib.Path(root).rglob("*.py"): + with open(path, "r") as file: + for line in file: + model_urls.update(MODEL_URL_PATTERN.findall(line)) + + print("\n".join(sorted(model_urls))) + + +if __name__ == "__main__": + main(*sys.argv[1:]) diff --git a/scripts/download_model_urls.py b/scripts/download_model_urls.py new file mode 100644 index 00000000000..f5f53d71e98 --- /dev/null +++ b/scripts/download_model_urls.py @@ -0,0 +1,41 @@ +import asyncio +import sys +from pathlib import Path +from time import perf_counter +from urllib.parse import urlsplit + +import aiofiles +import aiohttp +from torchvision import models +from tqdm.asyncio import tqdm + + +async def main(download_root): + download_root.mkdir(parents=True, exist_ok=True) + urls = {weight.url for name in models.list_models() for weight in iter(models.get_model_weights(name))} + + async with aiohttp.ClientSession(timeout=aiohttp.ClientTimeout(total=None)) as session: + await tqdm.gather(*[download(download_root, session, url) for url in urls]) + + +async def download(download_root, session, url): + response = await session.get(url, params=dict(source="ci")) + + assert response.ok + + file_name = Path(urlsplit(url).path).name + async with aiofiles.open(download_root / file_name, "wb") as f: + async for data in response.content.iter_any(): + await f.write(data) + + +if __name__ == "__main__": + download_root = ( + (Path(sys.argv[1]) if len(sys.argv) > 1 else Path("~/.cache/torch/hub/checkpoints")).expanduser().resolve() + ) + print(f"Downloading model weights to {download_root}") + start = perf_counter() + asyncio.get_event_loop().run_until_complete(main(download_root)) + stop = perf_counter() + minutes, seconds = divmod(stop - start, 60) + print(f"Download took {minutes:2.0f}m {seconds:2.0f}s") diff --git a/scripts/fbcode_to_main_sync.sh b/scripts/fbcode_to_main_sync.sh new file mode 100755 index 00000000000..c08d61690da --- /dev/null +++ b/scripts/fbcode_to_main_sync.sh @@ -0,0 +1,47 @@ +#!/bin/bash + +if [ -z $1 ] +then + echo "Commit hash is required to be passed when running this script." + echo "./fbcode_to_main_sync.sh " + exit 1 +fi +commit_hash=$1 + +if [ -z $2 ] +then + echo "Fork name is required to be passed when running this script." + echo "./fbcode_to_main_sync.sh " + exit 1 +fi +fork_name=$2 + +if [ -z $3 ] +then + fork_main_branch="main" +else + fork_main_branch=$3 +fi + +from_branch="fbsync" +git stash +git checkout $from_branch +git pull +# Add random prefix in the new branch name to keep it unique per run +prefix=$RANDOM +IFS=' +' +for line in $(git log --pretty=oneline "$commit_hash"..HEAD) +do + if [[ $line != *\[fbsync\]* ]] + then + echo "Parsing $line" + hash=$(echo $line | cut -f1 -d' ') + git checkout $fork_main_branch + git checkout -B cherrypick_${prefix}_${hash} + git cherry-pick -x "$hash" + git push $fork_name cherrypick_${prefix}_${hash} + git checkout $from_branch + fi +done +echo "Please review the PRs, add [FBCode->GH] prefix in the title and publish them." diff --git a/scripts/release_notes/classify_prs.py b/scripts/release_notes/classify_prs.py new file mode 100644 index 00000000000..5847c9f03f5 --- /dev/null +++ b/scripts/release_notes/classify_prs.py @@ -0,0 +1,120 @@ +# In[1]: +import pandas as pd + +# In[2]: +data_filename = "data.json" +df = pd.read_json(data_filename).T +df.tail() + +# In[3]: +all_labels = {lbl for labels in df["labels"] for lbl in labels} +all_labels + +# In[4]: +# Add one column per label +for label in all_labels: + df[label] = df["labels"].apply(lambda labels_list: label in labels_list) +df.head() + +# In[5]: +# Add a clean "module" column. It contains tuples since PRs can have more than one module. +# Maybe we should include "topics" in that column as well? + +all_modules = { # mapping: full name -> clean name + label: "".join(label.split(" ")[1:]) for label in all_labels if label.startswith("module") +} + +# We use an ugly loop, but whatever ¯\_(ツ)_/¯ +df["module"] = [[] for _ in range(len(df))] +for i, row in df.iterrows(): + for full_name, clean_name in all_modules.items(): + if full_name in row["labels"]: + row["module"].append(clean_name) +df["module"] = df.module.apply(tuple) +df.head() + +# In[6]: +mod_df = df.set_index("module").sort_index() +mod_df.tail() + +# In[7]: +# All improvement PRs +mod_df[mod_df["enhancement"]].head() + +# In[8]: +# improvement f module +# note: don't filter module name on the index as the index contain tuples with non-exclusive values +# Use the boolean column instead +mod_df[mod_df["enhancement"] & mod_df["module: transforms"]] + + +# In[9]: +def format_prs(mod_df, exclude_prototype=True): + out = [] + for idx, row in mod_df.iterrows(): + if exclude_prototype and "prototype" in row and row["prototype"]: + continue + modules = idx + # Put "documentation" and "tests" first for sorting to be dece + for last_module in ("documentation", "tests"): + if last_module in modules: + modules = [m for m in modules if m != last_module] + [last_module] + + module = f"[{', '.join(modules)}]" + module = module.replace("referencescripts", "reference scripts") + module = module.replace("code", "reference scripts") + out.append(f"{module} {row['title']}") + + return "\n".join(out) + + +# In[10]: +included_prs = pd.DataFrame() + +# If labels are accurate, this shouhld generate most of the release notes already +# We keep track of the included PRs to figure out which ones are missing +for section_title, module_idx in ( + ("Backward-incompatible changes", "bc-breaking"), + ("Deprecations", "deprecation"), + ("New Features", "new feature"), + ("Improvements", "enhancement"), + ("Bug Fixes", "bug"), + ("Code Quality", "code quality"), +): + if module_idx in mod_df: + print(f"## {section_title}") + print() + tmp_df = mod_df[mod_df[module_idx]] + included_prs = pd.concat([included_prs, tmp_df]) + print(format_prs(tmp_df)) + print() + + +# In[11]: +# Missing PRs are these ones... classify them manually +missing_prs = pd.concat([mod_df, included_prs]).drop_duplicates(subset="pr_number", keep=False) +print(format_prs(missing_prs)) + +# In[12]: +# Generate list of contributors +print() +print("## Contributors") + +previous_release = "c35d3855ccbfa6a36e6ae6337a1f2c721c1f1e78" +current_release = "5181a854d8b127cf465cd22a67c1b5aaf6ccae05" +print( + f"{{ git shortlog -s {previous_release}..{current_release} | cut -f2- & git log -s {previous_release}..{current_release} | grep Co-authored | cut -f2- -d: | cut -f1 -d\\< | sed 's/^ *//;s/ *//' ; }} | sort --ignore-case | uniq | tr '\\n' ';' | sed 's/;/, /g;s/,//' | fold -s" +) + +# In[13]: +# Utility to extract PR numbers only from multiple lines, useful to bundle all +# the docs changes for example: +import re + +s = """ + +[] Remove unnecessary dependency from macOS/Conda binaries (#8077) +[rocm] [ROCm] remove HCC references (#8070) +""" + +print(", ".join(re.findall("(#\\d+)", s))) diff --git a/scripts/release_notes/retrieve_prs_data.py b/scripts/release_notes/retrieve_prs_data.py new file mode 100644 index 00000000000..fb64902a6af --- /dev/null +++ b/scripts/release_notes/retrieve_prs_data.py @@ -0,0 +1,212 @@ +import json +import locale +import os +import re +import subprocess +from collections import namedtuple +from os.path import expanduser + +import requests + + +Features = namedtuple( + "Features", + [ + "title", + "body", + "pr_number", + "files_changed", + "labels", + ], +) + + +def dict_to_features(dct): + return Features( + title=dct["title"], + body=dct["body"], + pr_number=dct["pr_number"], + files_changed=dct["files_changed"], + labels=dct["labels"], + ) + + +def features_to_dict(features): + return dict(features._asdict()) + + +def run(command): + """Returns (return-code, stdout, stderr)""" + p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True) + output, err = p.communicate() + rc = p.returncode + enc = locale.getpreferredencoding() + output = output.decode(enc) + err = err.decode(enc) + return rc, output.strip(), err.strip() + + +def commit_body(commit_hash): + cmd = f"git log -n 1 --pretty=format:%b {commit_hash}" + ret, out, err = run(cmd) + return out if ret == 0 else None + + +def commit_title(commit_hash): + cmd = f"git log -n 1 --pretty=format:%s {commit_hash}" + ret, out, err = run(cmd) + return out if ret == 0 else None + + +def commit_files_changed(commit_hash): + cmd = f"git diff-tree --no-commit-id --name-only -r {commit_hash}" + ret, out, err = run(cmd) + return out.split("\n") if ret == 0 else None + + +def parse_pr_number(body, commit_hash, title): + regex = r"(#[0-9]+)" + matches = re.findall(regex, title) + if len(matches) == 0: + if "revert" not in title.lower() and "updating submodules" not in title.lower(): + print(f"[{commit_hash}: {title}] Could not parse PR number, ignoring PR") + return None + if len(matches) > 1: + print(f"[{commit_hash}: {title}] Got two PR numbers, using the last one") + return matches[-1][1:] + return matches[0][1:] + + +def get_ghstack_token(): + pattern = "github_oauth = (.*)" + with open(expanduser("~/.ghstackrc"), "r+") as f: + config = f.read() + matches = re.findall(pattern, config) + if len(matches) == 0: + raise RuntimeError("Can't find a github oauth token") + return matches[0] + + +token = get_ghstack_token() +headers = {"Authorization": f"token {token}"} + + +def run_query(query): + request = requests.post("https://api.github.com/graphql", json={"query": query}, headers=headers) + if request.status_code == 200: + return request.json() + else: + raise Exception(f"Query failed to run by returning code of {request.status_code}. {query}") + + +def gh_labels(pr_number): + query = f""" + {{ + repository(owner: "pytorch", name: "vision") {{ + pullRequest(number: {pr_number}) {{ + labels(first: 10) {{ + edges {{ + node {{ + name + }} + }} + }} + }} + }} + }} + """ + query = run_query(query) + edges = query["data"]["repository"]["pullRequest"]["labels"]["edges"] + return [edge["node"]["name"] for edge in edges] + + +def get_features(commit_hash, return_dict=False): + title, body, files_changed = ( + commit_title(commit_hash), + commit_body(commit_hash), + commit_files_changed(commit_hash), + ) + pr_number = parse_pr_number(body, commit_hash, title) + labels = [] + if pr_number is not None: + labels = gh_labels(pr_number) + result = Features(title, body, pr_number, files_changed, labels) + if return_dict: + return features_to_dict(result) + return result + + +class CommitDataCache: + def __init__(self, path="results/data.json"): + self.path = path + self.data = {} + if os.path.exists(path): + self.data = self.read_from_disk() + + def get(self, commit): + if commit not in self.data.keys(): + # Fetch and cache the data + self.data[commit] = get_features(commit) + self.write_to_disk() + return self.data[commit] + + def read_from_disk(self): + with open(self.path) as f: + data = json.load(f) + data = {commit: dict_to_features(dct) for commit, dct in data.items()} + return data + + def write_to_disk(self): + data = {commit: features._asdict() for commit, features in self.data.items()} + with open(self.path, "w") as f: + json.dump(data, f) + + +def get_commits_between(base_version, new_version): + cmd = f"git merge-base {base_version} {new_version}" + rc, merge_base, _ = run(cmd) + assert rc == 0 + + # Returns a list of something like + # b33e38ec47 Allow a higher-precision step type for Vec256::arange (#34555) + cmd = f"git log --reverse --oneline {merge_base}..{new_version}" + rc, commits, _ = run(cmd) + assert rc == 0 + + log_lines = commits.split("\n") + hashes, titles = zip(*[log_line.split(" ", 1) for log_line in log_lines]) + return hashes, titles + + +def convert_to_dataframes(feature_list): + import pandas as pd + + df = pd.DataFrame.from_records(feature_list, columns=Features._fields) + return df + + +def main(base_version, new_version): + hashes, titles = get_commits_between(base_version, new_version) + + cdc = CommitDataCache("data.json") + for idx, commit in enumerate(hashes): + if idx % 10 == 0: + print(f"{idx} / {len(hashes)}") + cdc.get(commit) + + return cdc + + +if __name__ == "__main__": + # d = get_features('2ab93592529243862ce8ad5b6acf2628ef8d0dc8') + # print(d) + # hashes, titles = get_commits_between("tags/v0.9.0", "fc852f3b39fe25dd8bf1dedee8f19ea04aa84c15") + + # Usage: change the tags below accordingly to the current release, then save the json with + # cdc.write_to_disk(). + # Then you can use classify_prs.py (as a notebook) + # to open the json and generate the release notes semi-automatically. + cdc = main("tags/v0.9.0", "fc852f3b39fe25dd8bf1dedee8f19ea04aa84c15") + from IPython import embed + + embed() diff --git a/setup.cfg b/setup.cfg index 5b77b5fbce3..0f4ddbfab10 100644 --- a/setup.cfg +++ b/setup.cfg @@ -2,12 +2,22 @@ universal=1 [metadata] -license_file = LICENSE +license_files = LICENSE [pep8] max-line-length = 120 [flake8] +# note: we ignore all 501s (line too long) anyway as they're taken care of by black max-line-length = 120 -ignore = F401,E402,F403,W503,W504 +ignore = E203, E402, W503, W504, F821, E501, B, C4, EXE +per-file-ignores = + __init__.py: F401, F403, F405 + ./hubconf.py: F401 + torchvision/models/mobilenet.py: F401, F403 + torchvision/models/quantization/mobilenet.py: F401, F403 + test/smoke_test.py: F401 exclude = venv + +[pydocstyle] +select = D417 # Missing argument descriptions in the docstring diff --git a/setup.py b/setup.py index 8ece63ce739..956682e7ead 100644 --- a/setup.py +++ b/setup.py @@ -1,192 +1,518 @@ -from __future__ import print_function -import os -import io -import re -import sys -from setuptools import setup, find_packages -from pkg_resources import get_distribution, DistributionNotFound -import subprocess import distutils.command.clean import distutils.spawn import glob +import os import shutil +import subprocess +import sys +import warnings +from pathlib import Path import torch -from torch.utils.cpp_extension import BuildExtension, CppExtension, CUDAExtension, CUDA_HOME +from pkg_resources import DistributionNotFound, get_distribution, parse_version +from setuptools import find_packages, setup +from torch.utils.cpp_extension import BuildExtension, CppExtension, CUDA_HOME, CUDAExtension, ROCM_HOME + +FORCE_CUDA = os.getenv("FORCE_CUDA", "0") == "1" +FORCE_MPS = os.getenv("FORCE_MPS", "0") == "1" +DEBUG = os.getenv("DEBUG", "0") == "1" +USE_PNG = os.getenv("TORCHVISION_USE_PNG", "1") == "1" +USE_JPEG = os.getenv("TORCHVISION_USE_JPEG", "1") == "1" +USE_WEBP = os.getenv("TORCHVISION_USE_WEBP", "1") == "1" +USE_NVJPEG = os.getenv("TORCHVISION_USE_NVJPEG", "1") == "1" +NVCC_FLAGS = os.getenv("NVCC_FLAGS", None) +# Note: the GPU video decoding stuff used to be called "video codec", which +# isn't an accurate or descriptive name considering there are at least 2 other +# video deocding backends in torchvision. I'm renaming this to "gpu video +# decoder" where possible, keeping user facing names (like the env var below) to +# the old scheme for BC. +USE_GPU_VIDEO_DECODER = os.getenv("TORCHVISION_USE_VIDEO_CODEC", "1") == "1" +# Same here: "use ffmpeg" was used to denote "use cpu video decoder". +USE_CPU_VIDEO_DECODER = os.getenv("TORCHVISION_USE_FFMPEG", "1") == "1" + +TORCHVISION_INCLUDE = os.environ.get("TORCHVISION_INCLUDE", "") +TORCHVISION_LIBRARY = os.environ.get("TORCHVISION_LIBRARY", "") +TORCHVISION_INCLUDE = TORCHVISION_INCLUDE.split(os.pathsep) if TORCHVISION_INCLUDE else [] +TORCHVISION_LIBRARY = TORCHVISION_LIBRARY.split(os.pathsep) if TORCHVISION_LIBRARY else [] + +ROOT_DIR = Path(__file__).absolute().parent +CSRS_DIR = ROOT_DIR / "torchvision/csrc" +IS_ROCM = (torch.version.hip is not None) and (ROCM_HOME is not None) +BUILD_CUDA_SOURCES = (torch.cuda.is_available() and ((CUDA_HOME is not None) or IS_ROCM)) or FORCE_CUDA + +package_name = os.getenv("TORCHVISION_PACKAGE_NAME", "torchvision") + +print("Torchvision build configuration:") +print(f"{FORCE_CUDA = }") +print(f"{FORCE_MPS = }") +print(f"{DEBUG = }") +print(f"{USE_PNG = }") +print(f"{USE_JPEG = }") +print(f"{USE_WEBP = }") +print(f"{USE_NVJPEG = }") +print(f"{NVCC_FLAGS = }") +print(f"{USE_CPU_VIDEO_DECODER = }") +print(f"{USE_GPU_VIDEO_DECODER = }") +print(f"{TORCHVISION_INCLUDE = }") +print(f"{TORCHVISION_LIBRARY = }") +print(f"{IS_ROCM = }") +print(f"{BUILD_CUDA_SOURCES = }") + + +def get_version(): + with open(ROOT_DIR / "version.txt") as f: + version = f.readline().strip() + sha = "Unknown" - -def read(*names, **kwargs): - with io.open( - os.path.join(os.path.dirname(__file__), *names), - encoding=kwargs.get("encoding", "utf8") - ) as fp: - return fp.read() - - -def get_dist(pkgname): try: - return get_distribution(pkgname) - except DistributionNotFound: - return None - - -version = '0.5.0a0' -sha = 'Unknown' -package_name = 'torchvision' - -cwd = os.path.dirname(os.path.abspath(__file__)) + sha = subprocess.check_output(["git", "rev-parse", "HEAD"], cwd=str(ROOT_DIR)).decode("ascii").strip() + except Exception: + pass -try: - sha = subprocess.check_output(['git', 'rev-parse', 'HEAD'], cwd=cwd).decode('ascii').strip() -except Exception: - pass + if os.getenv("BUILD_VERSION"): + version = os.getenv("BUILD_VERSION") + elif sha != "Unknown": + version += "+" + sha[:7] -if os.getenv('BUILD_VERSION'): - version = os.getenv('BUILD_VERSION') -elif sha != 'Unknown': - version += '+' + sha[:7] -print("Building wheel {}-{}".format(package_name, version)) + return version, sha -def write_version_file(): - version_path = os.path.join(cwd, 'torchvision', 'version.py') - with open(version_path, 'w') as f: - f.write("__version__ = '{}'\n".format(version)) - f.write("git_version = {}\n".format(repr(sha))) +def write_version_file(version, sha): + # Exists for BC, probably completely useless. + with open(ROOT_DIR / "torchvision/version.py", "w") as f: + f.write(f"__version__ = '{version}'\n") + f.write(f"git_version = {repr(sha)}\n") f.write("from torchvision.extension import _check_cuda_version\n") f.write("if _check_cuda_version() > 0:\n") f.write(" cuda = _check_cuda_version()\n") -write_version_file() +def get_requirements(): + def get_dist(pkgname): + try: + return get_distribution(pkgname) + except DistributionNotFound: + return None -readme = open('README.rst').read() + pytorch_dep = os.getenv("TORCH_PACKAGE_NAME", "torch") + if os.getenv("PYTORCH_VERSION"): + pytorch_dep += "==" + os.getenv("PYTORCH_VERSION") -pytorch_dep = 'torch' -if os.getenv('PYTORCH_VERSION'): - pytorch_dep += "==" + os.getenv('PYTORCH_VERSION') - -requirements = [ - 'numpy', - 'six', - pytorch_dep, -] + requirements = [ + "numpy", + pytorch_dep, + ] -pillow_ver = ' >= 4.1.1' -pillow_req = 'pillow-simd' if get_dist('pillow-simd') is not None else 'pillow' -requirements.append(pillow_req + pillow_ver) + # Excluding 8.3.* because of https://github.com/pytorch/vision/issues/4934 + pillow_ver = " >= 5.3.0, !=8.3.*" + pillow_req = "pillow-simd" if get_dist("pillow-simd") is not None else "pillow" + requirements.append(pillow_req + pillow_ver) + return requirements -def get_extensions(): - this_dir = os.path.dirname(os.path.abspath(__file__)) - extensions_dir = os.path.join(this_dir, 'torchvision', 'csrc') - main_file = glob.glob(os.path.join(extensions_dir, '*.cpp')) - source_cpu = glob.glob(os.path.join(extensions_dir, 'cpu', '*.cpp')) - source_cuda = glob.glob(os.path.join(extensions_dir, 'cuda', '*.cu')) +def get_macros_and_flags(): + define_macros = [] + extra_compile_args = {"cxx": []} + if BUILD_CUDA_SOURCES: + if IS_ROCM: + define_macros += [("WITH_HIP", None)] + nvcc_flags = [] + else: + define_macros += [("WITH_CUDA", None)] + if NVCC_FLAGS is None: + nvcc_flags = [] + else: + nvcc_flags = NVCC_FLAGS.split(" ") + extra_compile_args["nvcc"] = nvcc_flags + + if sys.platform == "win32": + define_macros += [("torchvision_EXPORTS", None)] + extra_compile_args["cxx"].append("/MP") + + if DEBUG: + extra_compile_args["cxx"].append("-g") + extra_compile_args["cxx"].append("-O0") + if "nvcc" in extra_compile_args: + # we have to remove "-OX" and "-g" flag if exists and append + nvcc_flags = extra_compile_args["nvcc"] + extra_compile_args["nvcc"] = [f for f in nvcc_flags if not ("-O" in f or "-g" in f)] + extra_compile_args["nvcc"].append("-O0") + extra_compile_args["nvcc"].append("-g") + else: + extra_compile_args["cxx"].append("-g0") + + return define_macros, extra_compile_args + + +def make_C_extension(): + print("Building _C extension") + + sources = ( + list(CSRS_DIR.glob("*.cpp")) + + list(CSRS_DIR.glob("ops/*.cpp")) + + list(CSRS_DIR.glob("ops/autocast/*.cpp")) + + list(CSRS_DIR.glob("ops/autograd/*.cpp")) + + list(CSRS_DIR.glob("ops/cpu/*.cpp")) + + list(CSRS_DIR.glob("ops/quantized/cpu/*.cpp")) + ) + mps_sources = list(CSRS_DIR.glob("ops/mps/*.mm")) + + if IS_ROCM: + from torch.utils.hipify import hipify_python + + hipify_python.hipify( + project_directory=str(ROOT_DIR), + output_directory=str(ROOT_DIR), + includes="torchvision/csrc/ops/cuda/*", + show_detailed=True, + is_pytorch_extension=True, + ) + cuda_sources = list(CSRS_DIR.glob("ops/hip/*.hip")) + for header in CSRS_DIR.glob("ops/cuda/*.h"): + shutil.copy(str(header), str(CSRS_DIR / "ops/hip")) + else: + cuda_sources = list(CSRS_DIR.glob("ops/cuda/*.cu")) + + if BUILD_CUDA_SOURCES: + Extension = CUDAExtension + sources += cuda_sources + else: + Extension = CppExtension + if torch.backends.mps.is_available() or FORCE_MPS: + sources += mps_sources + + define_macros, extra_compile_args = get_macros_and_flags() + return Extension( + name="torchvision._C", + sources=sorted(str(s) for s in sources), + include_dirs=[CSRS_DIR], + define_macros=define_macros, + extra_compile_args=extra_compile_args, + ) + + +def find_libpng(): + # Returns (found, include dir, library dir, library name) + if sys.platform in ("linux", "darwin"): + libpng_config = shutil.which("libpng-config") + if libpng_config is None: + warnings.warn("libpng-config not found") + return False, None, None, None + min_version = parse_version("1.6.0") + png_version = parse_version( + subprocess.run([libpng_config, "--version"], stdout=subprocess.PIPE).stdout.strip().decode("utf-8") + ) + if png_version < min_version: + warnings.warn("libpng version {png_version} is less than minimum required version {min_version}") + return False, None, None, None + + include_dir = ( + subprocess.run([libpng_config, "--I_opts"], stdout=subprocess.PIPE) + .stdout.strip() + .decode("utf-8") + .split("-I")[1] + ) + library_dir = subprocess.run([libpng_config, "--libdir"], stdout=subprocess.PIPE).stdout.strip().decode("utf-8") + library = "png" + else: # Windows + pngfix = shutil.which("pngfix") + if pngfix is None: + warnings.warn("pngfix not found") + return False, None, None, None + pngfix_dir = Path(pngfix).absolute().parent.parent + + library_dir = str(pngfix_dir / "lib") + include_dir = str(pngfix_dir / "include/libpng16") + library = "libpng" + + return True, include_dir, library_dir, library + + +def find_library(header): + # returns (found, include dir, library dir) + # if include dir or library dir is None, it means that the library is in + # standard paths and don't need to be added to compiler / linker search + # paths + + searching_for = f"Searching for {header}" + + for folder in TORCHVISION_INCLUDE: + if (Path(folder) / header).exists(): + print(f"{searching_for} in {Path(folder) / header}. Found in TORCHVISION_INCLUDE.") + return True, None, None + print(f"{searching_for}. Didn't find in TORCHVISION_INCLUDE.") + + # Try conda-related prefixes. If BUILD_PREFIX is set it means conda-build is + # being run. If CONDA_PREFIX is set then we're in a conda environment. + for prefix_env_var in ("BUILD_PREFIX", "CONDA_PREFIX"): + if (prefix := os.environ.get(prefix_env_var)) is not None: + prefix = Path(prefix) + if sys.platform == "win32": + prefix = prefix / "Library" + include_dir = prefix / "include" + library_dir = prefix / "lib" + if (include_dir / header).exists(): + print(f"{searching_for}. Found in {prefix_env_var}.") + return True, str(include_dir), str(library_dir) + print(f"{searching_for}. Didn't find in {prefix_env_var}.") + + if sys.platform == "linux": + for prefix in (Path("/usr/include"), Path("/usr/local/include")): + if (prefix / header).exists(): + print(f"{searching_for}. Found in {prefix}.") + return True, None, None + print(f"{searching_for}. Didn't find in {prefix}") + + return False, None, None + + +def make_image_extension(): + print("Building image extension") + + include_dirs = TORCHVISION_INCLUDE.copy() + library_dirs = TORCHVISION_LIBRARY.copy() + + libraries = [] + define_macros, extra_compile_args = get_macros_and_flags() + + image_dir = CSRS_DIR / "io/image" + sources = list(image_dir.glob("*.cpp")) + list(image_dir.glob("cpu/*.cpp")) + list(image_dir.glob("cpu/giflib/*.c")) + + if IS_ROCM: + sources += list(image_dir.glob("hip/*.cpp")) + # we need to exclude this in favor of the hipified source + sources.remove(image_dir / "image.cpp") + else: + sources += list(image_dir.glob("cuda/*.cpp")) + + Extension = CppExtension + + if USE_PNG: + png_found, png_include_dir, png_library_dir, png_library = find_libpng() + if png_found: + print("Building torchvision with PNG support") + print(f"{png_include_dir = }") + print(f"{png_library_dir = }") + include_dirs.append(png_include_dir) + library_dirs.append(png_library_dir) + libraries.append(png_library) + define_macros += [("PNG_FOUND", 1)] + else: + warnings.warn("Building torchvision without PNG support") + + if USE_JPEG: + jpeg_found, jpeg_include_dir, jpeg_library_dir = find_library(header="jpeglib.h") + if jpeg_found: + print("Building torchvision with JPEG support") + print(f"{jpeg_include_dir = }") + print(f"{jpeg_library_dir = }") + if jpeg_include_dir is not None and jpeg_library_dir is not None: + # if those are None it means they come from standard paths that are already in the search paths, which we don't need to re-add. + include_dirs.append(jpeg_include_dir) + library_dirs.append(jpeg_library_dir) + libraries.append("jpeg") + define_macros += [("JPEG_FOUND", 1)] + else: + warnings.warn("Building torchvision without JPEG support") + + if USE_WEBP: + webp_found, webp_include_dir, webp_library_dir = find_library(header="webp/decode.h") + if webp_found: + print("Building torchvision with WEBP support") + print(f"{webp_include_dir = }") + print(f"{webp_library_dir = }") + if webp_include_dir is not None and webp_library_dir is not None: + # if those are None it means they come from standard paths that are already in the search paths, which we don't need to re-add. + include_dirs.append(webp_include_dir) + library_dirs.append(webp_library_dir) + webp_library = "libwebp" if sys.platform == "win32" else "webp" + libraries.append(webp_library) + define_macros += [("WEBP_FOUND", 1)] + else: + warnings.warn("Building torchvision without WEBP support") - sources = main_file + source_cpu - extension = CppExtension + if USE_NVJPEG and (torch.cuda.is_available() or FORCE_CUDA): + nvjpeg_found = CUDA_HOME is not None and (Path(CUDA_HOME) / "include/nvjpeg.h").exists() - compile_cpp_tests = os.getenv('WITH_CPP_MODELS_TEST', '0') == '1' - if compile_cpp_tests: - test_dir = os.path.join(this_dir, 'test') - models_dir = os.path.join(this_dir, 'torchvision', 'csrc', 'models') - test_file = glob.glob(os.path.join(test_dir, '*.cpp')) - source_models = glob.glob(os.path.join(models_dir, '*.cpp')) + if nvjpeg_found: + print("Building torchvision with NVJPEG image support") + libraries.append("nvjpeg") + define_macros += [("NVJPEG_FOUND", 1)] + Extension = CUDAExtension + else: + warnings.warn("Building torchvision without NVJPEG support") + elif USE_NVJPEG: + warnings.warn("Building torchvision without NVJPEG support") + + return Extension( + name="torchvision.image", + sources=sorted(str(s) for s in sources), + include_dirs=include_dirs, + library_dirs=library_dirs, + define_macros=define_macros, + libraries=libraries, + extra_compile_args=extra_compile_args, + ) + + +def make_video_decoders_extensions(): + print("Building video decoder extensions") + + build_without_extensions_msg = "Building without video decoders extensions." + if sys.platform != "linux" or (sys.version_info.major == 3 and sys.version_info.minor == 9): + # FIXME: Building torchvision with ffmpeg on MacOS or with Python 3.9 + # FIXME: causes crash. See the following GitHub issues for more details. + # FIXME: https://github.com/pytorch/pytorch/issues/65000 + # FIXME: https://github.com/pytorch/vision/issues/3367 + print("Can only build video decoder extensions on linux and Python != 3.9") + return [] + + ffmpeg_exe = shutil.which("ffmpeg") + if ffmpeg_exe is None: + print(f"{build_without_extensions_msg} Couldn't find ffmpeg binary.") + return [] + + def find_ffmpeg_libraries(): + ffmpeg_libraries = {"libavcodec", "libavformat", "libavutil", "libswresample", "libswscale"} - test_file = [os.path.join(test_dir, s) for s in test_file] - source_models = [os.path.join(models_dir, s) for s in source_models] - tests = test_file + source_models - tests_include_dirs = [test_dir, models_dir] + ffmpeg_bin = os.path.dirname(ffmpeg_exe) + ffmpeg_root = os.path.dirname(ffmpeg_bin) + ffmpeg_include_dir = os.path.join(ffmpeg_root, "include") + ffmpeg_library_dir = os.path.join(ffmpeg_root, "lib") - define_macros = [] + gcc = os.environ.get("CC", shutil.which("gcc")) + platform_tag = subprocess.run([gcc, "-print-multiarch"], stdout=subprocess.PIPE) + platform_tag = platform_tag.stdout.strip().decode("utf-8") - extra_compile_args = {} - if (torch.cuda.is_available() and CUDA_HOME is not None) or os.getenv('FORCE_CUDA', '0') == '1': - extension = CUDAExtension - sources += source_cuda - define_macros += [('WITH_CUDA', None)] - nvcc_flags = os.getenv('NVCC_FLAGS', '') - if nvcc_flags == '': - nvcc_flags = [] + if platform_tag: + # Most probably a Debian-based distribution + ffmpeg_include_dir = [ffmpeg_include_dir, os.path.join(ffmpeg_include_dir, platform_tag)] + ffmpeg_library_dir = [ffmpeg_library_dir, os.path.join(ffmpeg_library_dir, platform_tag)] else: - nvcc_flags = nvcc_flags.split(' ') - extra_compile_args = { - 'cxx': ['-O0'], - 'nvcc': nvcc_flags, - } + ffmpeg_include_dir = [ffmpeg_include_dir] + ffmpeg_library_dir = [ffmpeg_library_dir] - if sys.platform == 'win32': - define_macros += [('torchvision_EXPORTS', None)] + for library in ffmpeg_libraries: + library_found = False + for search_path in ffmpeg_include_dir + TORCHVISION_INCLUDE: + full_path = os.path.join(search_path, library, "*.h") + library_found |= len(glob.glob(full_path)) > 0 - extra_compile_args.setdefault('cxx', []) - extra_compile_args['cxx'].append('/MP') + if not library_found: + print(f"{build_without_extensions_msg}") + print(f"{library} header files were not found.") + return None, None - sources = [os.path.join(extensions_dir, s) for s in sources] + return ffmpeg_include_dir, ffmpeg_library_dir - include_dirs = [extensions_dir] + ffmpeg_include_dir, ffmpeg_library_dir = find_ffmpeg_libraries() + if ffmpeg_include_dir is None or ffmpeg_library_dir is None: + return [] - ffmpeg_exe = distutils.spawn.find_executable('ffmpeg') - has_ffmpeg = ffmpeg_exe is not None - if has_ffmpeg: - ffmpeg_bin = os.path.dirname(ffmpeg_exe) - ffmpeg_root = os.path.dirname(ffmpeg_bin) - ffmpeg_include_dir = os.path.join(ffmpeg_root, 'include') + print("Found ffmpeg:") + print(f" ffmpeg include path: {ffmpeg_include_dir}") + print(f" ffmpeg library_dir: {ffmpeg_library_dir}") - # TorchVision video reader - video_reader_src_dir = os.path.join(this_dir, 'torchvision', 'csrc', 'cpu', 'video_reader') + extensions = [] + if USE_CPU_VIDEO_DECODER: + print("Building with CPU video decoder support") + + # TorchVision base decoder + video reader + video_reader_src_dir = os.path.join(ROOT_DIR, "torchvision", "csrc", "io", "video_reader") video_reader_src = glob.glob(os.path.join(video_reader_src_dir, "*.cpp")) + base_decoder_src_dir = os.path.join(ROOT_DIR, "torchvision", "csrc", "io", "decoder") + base_decoder_src = glob.glob(os.path.join(base_decoder_src_dir, "*.cpp")) + # Torchvision video API + videoapi_src_dir = os.path.join(ROOT_DIR, "torchvision", "csrc", "io", "video") + videoapi_src = glob.glob(os.path.join(videoapi_src_dir, "*.cpp")) + # exclude tests + base_decoder_src = [x for x in base_decoder_src if "_test.cpp" not in x] - ext_modules = [ - extension( - 'torchvision._C', - sources, - include_dirs=include_dirs, - define_macros=define_macros, - extra_compile_args=extra_compile_args, - ) - ] - if compile_cpp_tests: - ext_modules.append( - extension( - 'torchvision._C_tests', - tests, - include_dirs=tests_include_dirs, - define_macros=define_macros, - extra_compile_args=extra_compile_args, - ) - ) - if has_ffmpeg: - ext_modules.append( + combined_src = video_reader_src + base_decoder_src + videoapi_src + + extensions.append( CppExtension( - 'torchvision.video_reader', - video_reader_src, + # This is an aweful name. It should be "cpu_video_decoder". Keeping for BC. + "torchvision.video_reader", + combined_src, include_dirs=[ + base_decoder_src_dir, video_reader_src_dir, - ffmpeg_include_dir, - extensions_dir, + videoapi_src_dir, + str(CSRS_DIR), + *ffmpeg_include_dir, + *TORCHVISION_INCLUDE, + ], + library_dirs=ffmpeg_library_dir + TORCHVISION_LIBRARY, + libraries=[ + "avcodec", + "avformat", + "avutil", + "swresample", + "swscale", ], + extra_compile_args=["-std=c++17"] if os.name != "nt" else ["/std:c++17", "/MP"], + extra_link_args=["-std=c++17" if os.name != "nt" else "/std:c++17"], + ) + ) + + if USE_GPU_VIDEO_DECODER: + # Locating GPU video decoder headers and libraries + # CUDA_HOME should be set to the cuda root directory. + # TORCHVISION_INCLUDE and TORCHVISION_LIBRARY should include the locations + # to the headers and libraries below + if not ( + BUILD_CUDA_SOURCES + and CUDA_HOME is not None + and any([os.path.exists(os.path.join(folder, "cuviddec.h")) for folder in TORCHVISION_INCLUDE]) + and any([os.path.exists(os.path.join(folder, "nvcuvid.h")) for folder in TORCHVISION_INCLUDE]) + and any([os.path.exists(os.path.join(folder, "libnvcuvid.so")) for folder in TORCHVISION_LIBRARY]) + and any([os.path.exists(os.path.join(folder, "libavcodec", "bsf.h")) for folder in ffmpeg_include_dir]) + ): + print("Could not find necessary dependencies. Refer the setup.py to check which ones are needed.") + print("Building without GPU video decoder support") + return extensions + print("Building torchvision with GPU video decoder support") + + gpu_decoder_path = os.path.join(CSRS_DIR, "io", "decoder", "gpu") + gpu_decoder_src = glob.glob(os.path.join(gpu_decoder_path, "*.cpp")) + cuda_libs = os.path.join(CUDA_HOME, "lib64") + cuda_inc = os.path.join(CUDA_HOME, "include") + + _, extra_compile_args = get_macros_and_flags() + extensions.append( + CUDAExtension( + "torchvision.gpu_decoder", + gpu_decoder_src, + include_dirs=[CSRS_DIR] + TORCHVISION_INCLUDE + [gpu_decoder_path] + [cuda_inc] + ffmpeg_include_dir, + library_dirs=ffmpeg_library_dir + TORCHVISION_LIBRARY + [cuda_libs], libraries=[ - 'avcodec', - 'avformat', - 'avutil', - 'swresample', - 'swscale', + "avcodec", + "avformat", + "avutil", + "swresample", + "swscale", + "nvcuvid", + "cuda", + "cudart", + "z", + "pthread", + "dl", + "nppicc", ], - extra_compile_args=["-std=c++14"], - extra_link_args=["-std=c++14"], + extra_compile_args=extra_compile_args, ) ) - return ext_modules + return extensions class clean(distutils.command.clean.clean): def run(self): - with open('.gitignore', 'r') as f: + with open(".gitignore") as f: ignores = f.read() - for wildcard in filter(None, ignores.split('\n')): + for wildcard in filter(None, ignores.split("\n")): for filename in glob.glob(wildcard): try: os.remove(filename) @@ -197,28 +523,43 @@ def run(self): distutils.command.clean.clean.run(self) -setup( - # Metadata - name=package_name, - version=version, - author='PyTorch Core Team', - author_email='soumith@pytorch.org', - url='https://github.com/pytorch/vision', - description='image and video datasets and models for torch deep learning', - long_description=readme, - license='BSD', - - # Package info - packages=find_packages(exclude=('test',)), - - zip_safe=False, - install_requires=requirements, - extras_require={ - "scipy": ["scipy"], - }, - ext_modules=get_extensions(), - cmdclass={ - 'build_ext': BuildExtension.with_options(no_python_abi_suffix=True), - 'clean': clean, - } -) +if __name__ == "__main__": + version, sha = get_version() + write_version_file(version, sha) + + print(f"Building wheel {package_name}-{version}") + + with open("README.md") as f: + readme = f.read() + + extensions = [ + make_C_extension(), + make_image_extension(), + *make_video_decoders_extensions(), + ] + + setup( + name=package_name, + version=version, + author="PyTorch Core Team", + author_email="soumith@pytorch.org", + url="https://github.com/pytorch/vision", + description="image and video datasets and models for torch deep learning", + long_description=readme, + long_description_content_type="text/markdown", + license="BSD", + packages=find_packages(exclude=("test",)), + package_data={package_name: ["*.dll", "*.dylib", "*.so", "prototype/datasets/_builtin/*.categories"]}, + zip_safe=False, + install_requires=get_requirements(), + extras_require={ + "gdown": ["gdown>=4.7.3"], + "scipy": ["scipy"], + }, + ext_modules=extensions, + python_requires=">=3.8", + cmdclass={ + "build_ext": BuildExtension.with_options(no_python_abi_suffix=True), + "clean": clean, + }, + ) diff --git a/test/_utils_internal.py b/test/_utils_internal.py new file mode 100644 index 00000000000..1a32e6f2b25 --- /dev/null +++ b/test/_utils_internal.py @@ -0,0 +1,7 @@ +import os + + +# Get relative file path +# this returns relative path from current file. +def get_relative_path(curr_file, *path_components): + return os.path.join(os.path.dirname(curr_file), *path_components) diff --git a/test/assets/damaged_jpeg/TensorFlow-LICENSE b/test/assets/damaged_jpeg/TensorFlow-LICENSE new file mode 100644 index 00000000000..c7563fe4e5b --- /dev/null +++ b/test/assets/damaged_jpeg/TensorFlow-LICENSE @@ -0,0 +1,13 @@ + Copyright 2019 The TensorFlow Authors. All rights reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/test/assets/damaged_jpeg/bad_huffman.jpg b/test/assets/damaged_jpeg/bad_huffman.jpg new file mode 100644 index 00000000000..ef5b6f12c55 Binary files /dev/null and b/test/assets/damaged_jpeg/bad_huffman.jpg differ diff --git a/test/assets/damaged_jpeg/corrupt.jpg b/test/assets/damaged_jpeg/corrupt.jpg new file mode 100644 index 00000000000..5e2fe6c56f5 Binary files /dev/null and b/test/assets/damaged_jpeg/corrupt.jpg differ diff --git a/test/assets/damaged_jpeg/corrupt34_2.jpg b/test/assets/damaged_jpeg/corrupt34_2.jpg new file mode 100644 index 00000000000..4211155c455 Binary files /dev/null and b/test/assets/damaged_jpeg/corrupt34_2.jpg differ diff --git a/test/assets/damaged_jpeg/corrupt34_3.jpg b/test/assets/damaged_jpeg/corrupt34_3.jpg new file mode 100644 index 00000000000..c1c2a9d1e1e Binary files /dev/null and b/test/assets/damaged_jpeg/corrupt34_3.jpg differ diff --git a/test/assets/damaged_jpeg/corrupt34_4.jpg b/test/assets/damaged_jpeg/corrupt34_4.jpg new file mode 100644 index 00000000000..b8e7308ba00 Binary files /dev/null and b/test/assets/damaged_jpeg/corrupt34_4.jpg differ diff --git a/test/assets/damaged_png/sigsegv.png b/test/assets/damaged_png/sigsegv.png new file mode 100644 index 00000000000..3ecff65ec60 Binary files /dev/null and b/test/assets/damaged_png/sigsegv.png differ diff --git a/test/assets/grace_hopper_517x606.jpg b/test/assets/encode_jpeg/grace_hopper_517x606.jpg similarity index 100% rename from test/assets/grace_hopper_517x606.jpg rename to test/assets/encode_jpeg/grace_hopper_517x606.jpg diff --git a/test/assets/encode_jpeg/jpeg_write/grace_hopper_517x606_pil.jpg b/test/assets/encode_jpeg/jpeg_write/grace_hopper_517x606_pil.jpg new file mode 100644 index 00000000000..0f37ea0d9e1 Binary files /dev/null and b/test/assets/encode_jpeg/jpeg_write/grace_hopper_517x606_pil.jpg differ diff --git a/test/assets/expected_flow.pt b/test/assets/expected_flow.pt new file mode 100644 index 00000000000..403784b1db1 Binary files /dev/null and b/test/assets/expected_flow.pt differ diff --git a/test/assets/fakedata/draw_boxes_different_label_colors.png b/test/assets/fakedata/draw_boxes_different_label_colors.png new file mode 100644 index 00000000000..72178930602 Binary files /dev/null and b/test/assets/fakedata/draw_boxes_different_label_colors.png differ diff --git a/test/assets/fakedata/draw_boxes_util.png b/test/assets/fakedata/draw_boxes_util.png new file mode 100644 index 00000000000..ee5dac329e0 Binary files /dev/null and b/test/assets/fakedata/draw_boxes_util.png differ diff --git a/test/assets/fakedata/draw_boxes_vanilla.png b/test/assets/fakedata/draw_boxes_vanilla.png new file mode 100644 index 00000000000..bbc7112deb0 Binary files /dev/null and b/test/assets/fakedata/draw_boxes_vanilla.png differ diff --git a/test/assets/fakedata/draw_keypoint_vanilla.png b/test/assets/fakedata/draw_keypoint_vanilla.png new file mode 100644 index 00000000000..6cd6d943b6c Binary files /dev/null and b/test/assets/fakedata/draw_keypoint_vanilla.png differ diff --git a/test/assets/fakedata/draw_keypoints_visibility.png b/test/assets/fakedata/draw_keypoints_visibility.png new file mode 100644 index 00000000000..8cd34f84539 Binary files /dev/null and b/test/assets/fakedata/draw_keypoints_visibility.png differ diff --git a/test/assets/fakedata/logos/cmyk_pytorch.jpg b/test/assets/fakedata/logos/cmyk_pytorch.jpg new file mode 100644 index 00000000000..16ee8b2b4bc Binary files /dev/null and b/test/assets/fakedata/logos/cmyk_pytorch.jpg differ diff --git a/test/assets/fakedata/logos/gray_pytorch.jpg b/test/assets/fakedata/logos/gray_pytorch.jpg new file mode 100644 index 00000000000..60c9c7cf705 Binary files /dev/null and b/test/assets/fakedata/logos/gray_pytorch.jpg differ diff --git a/test/assets/fakedata/logos/gray_pytorch.png b/test/assets/fakedata/logos/gray_pytorch.png new file mode 100644 index 00000000000..412b931299e Binary files /dev/null and b/test/assets/fakedata/logos/gray_pytorch.png differ diff --git a/test/assets/fakedata/logos/grayalpha_pytorch.png b/test/assets/fakedata/logos/grayalpha_pytorch.png new file mode 100644 index 00000000000..3e77d72b904 Binary files /dev/null and b/test/assets/fakedata/logos/grayalpha_pytorch.png differ diff --git a/test/assets/fakedata/logos/palette_pytorch.png b/test/assets/fakedata/logos/palette_pytorch.png new file mode 100644 index 00000000000..2108d1b315a Binary files /dev/null and b/test/assets/fakedata/logos/palette_pytorch.png differ diff --git a/test/assets/fakedata/logos/rgb_pytorch.avif b/test/assets/fakedata/logos/rgb_pytorch.avif new file mode 100644 index 00000000000..ea1bb586957 Binary files /dev/null and b/test/assets/fakedata/logos/rgb_pytorch.avif differ diff --git a/test/assets/fakedata/logos/rgb_pytorch.jpg b/test/assets/fakedata/logos/rgb_pytorch.jpg new file mode 100644 index 00000000000..d49e658b94f Binary files /dev/null and b/test/assets/fakedata/logos/rgb_pytorch.jpg differ diff --git a/test/assets/fakedata/logos/rgb_pytorch.png b/test/assets/fakedata/logos/rgb_pytorch.png new file mode 100644 index 00000000000..c9d08e6c7da Binary files /dev/null and b/test/assets/fakedata/logos/rgb_pytorch.png differ diff --git a/test/assets/fakedata/logos/rgb_pytorch.webp b/test/assets/fakedata/logos/rgb_pytorch.webp new file mode 100644 index 00000000000..e594584d76d Binary files /dev/null and b/test/assets/fakedata/logos/rgb_pytorch.webp differ diff --git a/test/assets/fakedata/logos/rgb_pytorch16.png b/test/assets/fakedata/logos/rgb_pytorch16.png new file mode 100644 index 00000000000..b5e9e35d989 Binary files /dev/null and b/test/assets/fakedata/logos/rgb_pytorch16.png differ diff --git a/test/assets/fakedata/logos/rgb_pytorch_incorrectly_encoded_but_who_cares.heic b/test/assets/fakedata/logos/rgb_pytorch_incorrectly_encoded_but_who_cares.heic new file mode 100644 index 00000000000..4c29ac3c71c Binary files /dev/null and b/test/assets/fakedata/logos/rgb_pytorch_incorrectly_encoded_but_who_cares.heic differ diff --git a/test/assets/fakedata/logos/rgbalpha_pytorch.png b/test/assets/fakedata/logos/rgbalpha_pytorch.png new file mode 100644 index 00000000000..5a9ff14ba5e Binary files /dev/null and b/test/assets/fakedata/logos/rgbalpha_pytorch.png differ diff --git a/test/assets/fakedata/logos/rgbalpha_pytorch16.png b/test/assets/fakedata/logos/rgbalpha_pytorch16.png new file mode 100644 index 00000000000..df1db4d6354 Binary files /dev/null and b/test/assets/fakedata/logos/rgbalpha_pytorch16.png differ diff --git a/test/assets/gaussian_blur_opencv_results.pt b/test/assets/gaussian_blur_opencv_results.pt new file mode 100644 index 00000000000..d68f477fb44 Binary files /dev/null and b/test/assets/gaussian_blur_opencv_results.pt differ diff --git a/test/assets/interlaced_png/wizard_low-interlaced.png b/test/assets/interlaced_png/wizard_low-interlaced.png new file mode 100644 index 00000000000..3badd9264dc Binary files /dev/null and b/test/assets/interlaced_png/wizard_low-interlaced.png differ diff --git a/test/assets/interlaced_png/wizard_low.png b/test/assets/interlaced_png/wizard_low.png new file mode 100644 index 00000000000..7b1c264f030 Binary files /dev/null and b/test/assets/interlaced_png/wizard_low.png differ diff --git a/test/assets/labeled_image.png b/test/assets/labeled_image.png new file mode 100644 index 00000000000..9d163243773 Binary files /dev/null and b/test/assets/labeled_image.png differ diff --git a/test/assets/masks.tiff b/test/assets/masks.tiff new file mode 100644 index 00000000000..7a8efc6dd0e Binary files /dev/null and b/test/assets/masks.tiff differ diff --git a/test/assets/toosmall_png/heapbof.png b/test/assets/toosmall_png/heapbof.png new file mode 100644 index 00000000000..e720d183342 Binary files /dev/null and b/test/assets/toosmall_png/heapbof.png differ diff --git a/test/assets/videos/hmdb51_Turnk_r_Pippi_Michel_cartwheel_f_cm_np2_le_med_6.avi b/test/assets/videos/hmdb51_Turnk_r_Pippi_Michel_cartwheel_f_cm_np2_le_med_6.avi new file mode 100644 index 00000000000..979cd3901af Binary files /dev/null and b/test/assets/videos/hmdb51_Turnk_r_Pippi_Michel_cartwheel_f_cm_np2_le_med_6.avi differ diff --git a/test/builtin_dataset_mocks.py b/test/builtin_dataset_mocks.py new file mode 100644 index 00000000000..ef5d5e1ec96 --- /dev/null +++ b/test/builtin_dataset_mocks.py @@ -0,0 +1,1582 @@ +import bz2 +import collections.abc +import csv +import functools +import gzip +import io +import itertools +import json +import lzma +import pathlib +import pickle +import random +import shutil +import unittest.mock +import xml.etree.ElementTree as ET +from collections import Counter, defaultdict + +import numpy as np +import pytest +import torch +from common_utils import combinations_grid +from datasets_utils import create_image_file, create_image_folder, make_tar, make_zip +from torch.nn.functional import one_hot +from torch.testing import make_tensor as _make_tensor +from torchvision.prototype import datasets + +make_tensor = functools.partial(_make_tensor, device="cpu") +make_scalar = functools.partial(make_tensor, ()) + + +__all__ = ["DATASET_MOCKS", "parametrize_dataset_mocks"] + + +class DatasetMock: + def __init__(self, name, *, mock_data_fn, configs): + # FIXME: error handling for unknown names + self.name = name + self.mock_data_fn = mock_data_fn + self.configs = configs + + def _parse_mock_info(self, mock_info): + if mock_info is None: + raise pytest.UsageError( + f"The mock data function for dataset '{self.name}' returned nothing. It needs to at least return an " + f"integer indicating the number of samples for the current `config`." + ) + elif isinstance(mock_info, int): + mock_info = dict(num_samples=mock_info) + elif not isinstance(mock_info, dict): + raise pytest.UsageError( + f"The mock data function for dataset '{self.name}' returned a {type(mock_info)}. The returned object " + f"should be a dictionary containing at least the number of samples for the key `'num_samples'`. If no " + f"additional information is required for specific tests, the number of samples can also be returned as " + f"an integer." + ) + elif "num_samples" not in mock_info: + raise pytest.UsageError( + f"The dictionary returned by the mock data function for dataset '{self.name}' has to contain a " + f"`'num_samples'` entry indicating the number of samples." + ) + + return mock_info + + def load(self, config): + # `datasets.home()` is patched to a temporary directory through the autouse fixture `test_home` in + # test/test_prototype_builtin_datasets.py + root = pathlib.Path(datasets.home()) / self.name + # We cannot place the mock data upfront in `root`. Loading a dataset calls `OnlineResource.load`. In turn, + # this will only download **and** preprocess if the file is not present. In other words, if we already place + # the file in `root` before the resource is loaded, we are effectively skipping the preprocessing. + # To avoid that we first place the mock data in a temporary directory and patch the download logic to move it to + # `root` only when it is requested. + tmp_mock_data_folder = root / "__mock__" + tmp_mock_data_folder.mkdir(parents=True) + + mock_info = self._parse_mock_info(self.mock_data_fn(tmp_mock_data_folder, config)) + + def patched_download(resource, root, **kwargs): + src = tmp_mock_data_folder / resource.file_name + if not src.exists(): + raise pytest.UsageError( + f"Dataset '{self.name}' requires the file {resource.file_name} for {config}" + f"but it was not created by the mock data function." + ) + + dst = root / resource.file_name + shutil.move(str(src), str(root)) + + return dst + + with unittest.mock.patch( + "torchvision.prototype.datasets.utils._resource.OnlineResource.download", new=patched_download + ): + dataset = datasets.load(self.name, **config) + + extra_files = list(tmp_mock_data_folder.glob("**/*")) + if extra_files: + raise pytest.UsageError( + ( + f"Dataset '{self.name}' created the following files for {config} in the mock data function, " + f"but they were not loaded:\n\n" + ) + + "\n".join(str(file.relative_to(tmp_mock_data_folder)) for file in extra_files) + ) + + tmp_mock_data_folder.rmdir() + + return dataset, mock_info + + +def config_id(name, config): + parts = [name] + for name, value in config.items(): + if isinstance(value, bool): + part = ("" if value else "no_") + name + else: + part = str(value) + parts.append(part) + return "-".join(parts) + + +def parametrize_dataset_mocks(*dataset_mocks, marks=None): + mocks = {} + for mock in dataset_mocks: + if isinstance(mock, DatasetMock): + mocks[mock.name] = mock + elif isinstance(mock, collections.abc.Mapping): + mocks.update(mock) + else: + raise pytest.UsageError( + f"The positional arguments passed to `parametrize_dataset_mocks` can either be a `DatasetMock`, " + f"a sequence of `DatasetMock`'s, or a mapping of names to `DatasetMock`'s, " + f"but got {mock} instead." + ) + dataset_mocks = mocks + + if marks is None: + marks = {} + elif not isinstance(marks, collections.abc.Mapping): + raise pytest.UsageError() + + return pytest.mark.parametrize( + ("dataset_mock", "config"), + [ + pytest.param(dataset_mock, config, id=config_id(name, config), marks=marks.get(name, ())) + for name, dataset_mock in dataset_mocks.items() + for config in dataset_mock.configs + ], + ) + + +DATASET_MOCKS = {} + + +def register_mock(name=None, *, configs): + def wrapper(mock_data_fn): + nonlocal name + if name is None: + name = mock_data_fn.__name__ + DATASET_MOCKS[name] = DatasetMock(name, mock_data_fn=mock_data_fn, configs=configs) + + return mock_data_fn + + return wrapper + + +class MNISTMockData: + _DTYPES_ID = { + torch.uint8: 8, + torch.int8: 9, + torch.int16: 11, + torch.int32: 12, + torch.float32: 13, + torch.float64: 14, + } + + @classmethod + def _magic(cls, dtype, ndim): + return cls._DTYPES_ID[dtype] * 256 + ndim + 1 + + @staticmethod + def _encode(t): + return torch.tensor(t, dtype=torch.int32).numpy().tobytes()[::-1] + + @staticmethod + def _big_endian_dtype(dtype): + np_dtype = getattr(np, str(dtype).replace("torch.", ""))().dtype + return np.dtype(f">{np_dtype.kind}{np_dtype.itemsize}") + + @classmethod + def _create_binary_file(cls, root, filename, *, num_samples, shape, dtype, compressor, low=0, high): + with compressor(root / filename, "wb") as fh: + for meta in (cls._magic(dtype, len(shape)), num_samples, *shape): + fh.write(cls._encode(meta)) + + data = make_tensor((num_samples, *shape), dtype=dtype, low=low, high=high) + + fh.write(data.numpy().astype(cls._big_endian_dtype(dtype)).tobytes()) + + @classmethod + def generate( + cls, + root, + *, + num_categories, + num_samples=None, + images_file, + labels_file, + image_size=(28, 28), + image_dtype=torch.uint8, + label_size=(), + label_dtype=torch.uint8, + compressor=None, + ): + if num_samples is None: + num_samples = num_categories + if compressor is None: + compressor = gzip.open + + cls._create_binary_file( + root, + images_file, + num_samples=num_samples, + shape=image_size, + dtype=image_dtype, + compressor=compressor, + high=float("inf"), + ) + cls._create_binary_file( + root, + labels_file, + num_samples=num_samples, + shape=label_size, + dtype=label_dtype, + compressor=compressor, + high=num_categories, + ) + + return num_samples + + +def mnist(root, config): + prefix = "train" if config["split"] == "train" else "t10k" + return MNISTMockData.generate( + root, + num_categories=10, + images_file=f"{prefix}-images-idx3-ubyte.gz", + labels_file=f"{prefix}-labels-idx1-ubyte.gz", + ) + + +DATASET_MOCKS.update( + { + name: DatasetMock(name, mock_data_fn=mnist, configs=combinations_grid(split=("train", "test"))) + for name in ["mnist", "fashionmnist", "kmnist"] + } +) + + +@register_mock( + configs=combinations_grid( + split=("train", "test"), + image_set=("Balanced", "By_Merge", "By_Class", "Letters", "Digits", "MNIST"), + ) +) +def emnist(root, config): + num_samples_map = {} + file_names = set() + for split, image_set in itertools.product( + ("train", "test"), + ("Balanced", "By_Merge", "By_Class", "Letters", "Digits", "MNIST"), + ): + prefix = f"emnist-{image_set.replace('_', '').lower()}-{split}" + images_file = f"{prefix}-images-idx3-ubyte.gz" + labels_file = f"{prefix}-labels-idx1-ubyte.gz" + file_names.update({images_file, labels_file}) + num_samples_map[(split, image_set)] = MNISTMockData.generate( + root, + # The image sets that merge some lower case letters in their respective upper case variant, still use dense + # labels in the data files. Thus, num_categories != len(categories) there. + num_categories=47 if config["image_set"] in ("Balanced", "By_Merge") else 62, + images_file=images_file, + labels_file=labels_file, + ) + + make_zip(root, "emnist-gzip.zip", *file_names) + + return num_samples_map[(config["split"], config["image_set"])] + + +@register_mock(configs=combinations_grid(split=("train", "test", "test10k", "test50k", "nist"))) +def qmnist(root, config): + num_categories = 10 + if config["split"] == "train": + num_samples = num_samples_gen = num_categories + 2 + prefix = "qmnist-train" + suffix = ".gz" + compressor = gzip.open + elif config["split"].startswith("test"): + # The split 'test50k' is defined as the last 50k images beginning at index 10000. Thus, we need to create + # more than 10000 images for the dataset to not be empty. + num_samples_gen = 10001 + num_samples = { + "test": num_samples_gen, + "test10k": min(num_samples_gen, 10_000), + "test50k": num_samples_gen - 10_000, + }[config["split"]] + prefix = "qmnist-test" + suffix = ".gz" + compressor = gzip.open + else: # config["split"] == "nist" + num_samples = num_samples_gen = num_categories + 3 + prefix = "xnist" + suffix = ".xz" + compressor = lzma.open + + MNISTMockData.generate( + root, + num_categories=num_categories, + num_samples=num_samples_gen, + images_file=f"{prefix}-images-idx3-ubyte{suffix}", + labels_file=f"{prefix}-labels-idx2-int{suffix}", + label_size=(8,), + label_dtype=torch.int32, + compressor=compressor, + ) + return num_samples + + +class CIFARMockData: + NUM_PIXELS = 32 * 32 * 3 + + @classmethod + def _create_batch_file(cls, root, name, *, num_categories, labels_key, num_samples=1): + content = { + "data": make_tensor((num_samples, cls.NUM_PIXELS), dtype=torch.uint8).numpy(), + labels_key: torch.randint(0, num_categories, size=(num_samples,)).tolist(), + } + with open(pathlib.Path(root) / name, "wb") as fh: + pickle.dump(content, fh) + + @classmethod + def generate( + cls, + root, + name, + *, + folder, + train_files, + test_files, + num_categories, + labels_key, + ): + folder = root / folder + folder.mkdir() + files = (*train_files, *test_files) + for file in files: + cls._create_batch_file( + folder, + file, + num_categories=num_categories, + labels_key=labels_key, + ) + + make_tar(root, name, folder, compression="gz") + + +@register_mock(configs=combinations_grid(split=("train", "test"))) +def cifar10(root, config): + train_files = [f"data_batch_{idx}" for idx in range(1, 6)] + test_files = ["test_batch"] + + CIFARMockData.generate( + root=root, + name="cifar-10-python.tar.gz", + folder=pathlib.Path("cifar-10-batches-py"), + train_files=train_files, + test_files=test_files, + num_categories=10, + labels_key="labels", + ) + + return len(train_files if config["split"] == "train" else test_files) + + +@register_mock(configs=combinations_grid(split=("train", "test"))) +def cifar100(root, config): + train_files = ["train"] + test_files = ["test"] + + CIFARMockData.generate( + root=root, + name="cifar-100-python.tar.gz", + folder=pathlib.Path("cifar-100-python"), + train_files=train_files, + test_files=test_files, + num_categories=100, + labels_key="fine_labels", + ) + + return len(train_files if config["split"] == "train" else test_files) + + +@register_mock(configs=[dict()]) +def caltech101(root, config): + def create_ann_file(root, name): + import scipy.io + + box_coord = make_tensor((1, 4), dtype=torch.int32, low=0).numpy().astype(np.uint16) + obj_contour = make_tensor((2, int(torch.randint(3, 6, size=()))), dtype=torch.float64, low=0).numpy() + + scipy.io.savemat(str(pathlib.Path(root) / name), dict(box_coord=box_coord, obj_contour=obj_contour)) + + def create_ann_folder(root, name, file_name_fn, num_examples): + root = pathlib.Path(root) / name + root.mkdir(parents=True) + + for idx in range(num_examples): + create_ann_file(root, file_name_fn(idx)) + + images_root = root / "101_ObjectCategories" + anns_root = root / "Annotations" + + image_category_map = { + "Faces": "Faces_2", + "Faces_easy": "Faces_3", + "Motorbikes": "Motorbikes_16", + "airplanes": "Airplanes_Side_2", + } + + categories = ["Faces", "Faces_easy", "Motorbikes", "airplanes", "yin_yang"] + + num_images_per_category = 2 + for category in categories: + create_image_folder( + root=images_root, + name=category, + file_name_fn=lambda idx: f"image_{idx + 1:04d}.jpg", + num_examples=num_images_per_category, + ) + create_ann_folder( + root=anns_root, + name=image_category_map.get(category, category), + file_name_fn=lambda idx: f"annotation_{idx + 1:04d}.mat", + num_examples=num_images_per_category, + ) + + (images_root / "BACKGROUND_Goodle").mkdir() + make_tar(root, f"{images_root.name}.tar.gz", images_root, compression="gz") + + make_tar(root, f"{anns_root.name}.tar", anns_root) + + return num_images_per_category * len(categories) + + +@register_mock(configs=[dict()]) +def caltech256(root, config): + dir = root / "256_ObjectCategories" + num_images_per_category = 2 + + categories = [ + (1, "ak47"), + (127, "laptop-101"), + (198, "spider"), + (257, "clutter"), + ] + + for category_idx, category in categories: + files = create_image_folder( + dir, + name=f"{category_idx:03d}.{category}", + file_name_fn=lambda image_idx: f"{category_idx:03d}_{image_idx + 1:04d}.jpg", + num_examples=num_images_per_category, + ) + if category == "spider": + open(files[0].parent / "RENAME2", "w").close() + + make_tar(root, f"{dir.name}.tar", dir) + + return num_images_per_category * len(categories) + + +@register_mock(configs=combinations_grid(split=("train", "val", "test"))) +def imagenet(root, config): + from scipy.io import savemat + + info = datasets.info("imagenet") + + if config["split"] == "train": + num_samples = len(info["wnids"]) + archive_name = "ILSVRC2012_img_train.tar" + + files = [] + for wnid in info["wnids"]: + create_image_folder( + root=root, + name=wnid, + file_name_fn=lambda image_idx: f"{wnid}_{image_idx:04d}.JPEG", + num_examples=1, + ) + files.append(make_tar(root, f"{wnid}.tar")) + elif config["split"] == "val": + num_samples = 3 + archive_name = "ILSVRC2012_img_val.tar" + files = [create_image_file(root, f"ILSVRC2012_val_{idx + 1:08d}.JPEG") for idx in range(num_samples)] + + devkit_root = root / "ILSVRC2012_devkit_t12" + data_root = devkit_root / "data" + data_root.mkdir(parents=True) + + with open(data_root / "ILSVRC2012_validation_ground_truth.txt", "w") as file: + for label in torch.randint(0, len(info["wnids"]), (num_samples,)).tolist(): + file.write(f"{label}\n") + + num_children = 0 + synsets = [ + (idx, wnid, category, "", num_children, [], 0, 0) + for idx, (category, wnid) in enumerate(zip(info["categories"], info["wnids"]), 1) + ] + num_children = 1 + synsets.extend((0, "", "", "", num_children, [], 0, 0) for _ in range(5)) + synsets = np.array( + synsets, + dtype=np.dtype( + [ + ("ILSVRC2012_ID", "O"), + ("WNID", "O"), + ("words", "O"), + ("gloss", "O"), + ("num_children", "O"), + ("children", "O"), + ("wordnet_height", "O"), + ("num_train_images", "O"), + ] + ), + ) + savemat(data_root / "meta.mat", dict(synsets=synsets)) + + make_tar(root, devkit_root.with_suffix(".tar.gz").name, compression="gz") + else: # config["split"] == "test" + num_samples = 5 + archive_name = "ILSVRC2012_img_test_v10102019.tar" + files = [create_image_file(root, f"ILSVRC2012_test_{idx + 1:08d}.JPEG") for idx in range(num_samples)] + + make_tar(root, archive_name, *files) + + return num_samples + + +class CocoMockData: + @classmethod + def _make_annotations_json( + cls, + root, + name, + *, + images_meta, + fn, + ): + num_anns_per_image = torch.randint(1, 5, (len(images_meta),)) + num_anns_total = int(num_anns_per_image.sum()) + ann_ids_iter = iter(torch.arange(num_anns_total)[torch.randperm(num_anns_total)]) + + anns_meta = [] + for image_meta, num_anns in zip(images_meta, num_anns_per_image): + for _ in range(num_anns): + ann_id = int(next(ann_ids_iter)) + anns_meta.append(dict(fn(ann_id, image_meta), id=ann_id, image_id=image_meta["id"])) + anns_meta.sort(key=lambda ann: ann["id"]) + + with open(root / name, "w") as file: + json.dump(dict(images=images_meta, annotations=anns_meta), file) + + return num_anns_per_image + + @staticmethod + def _make_instances_data(ann_id, image_meta): + def make_rle_segmentation(): + height, width = image_meta["height"], image_meta["width"] + numel = height * width + counts = [] + while sum(counts) <= numel: + counts.append(int(torch.randint(5, 8, ()))) + if sum(counts) > numel: + counts[-1] -= sum(counts) - numel + return dict(counts=counts, size=[height, width]) + + return dict( + segmentation=make_rle_segmentation(), + bbox=make_tensor((4,), dtype=torch.float32, low=0).tolist(), + iscrowd=True, + area=float(make_scalar(dtype=torch.float32)), + category_id=int(make_scalar(dtype=torch.int64)), + ) + + @staticmethod + def _make_captions_data(ann_id, image_meta): + return dict(caption=f"Caption {ann_id} describing image {image_meta['id']}.") + + @classmethod + def _make_annotations(cls, root, name, *, images_meta): + num_anns_per_image = torch.zeros((len(images_meta),), dtype=torch.int64) + for annotations, fn in ( + ("instances", cls._make_instances_data), + ("captions", cls._make_captions_data), + ): + num_anns_per_image += cls._make_annotations_json( + root, f"{annotations}_{name}.json", images_meta=images_meta, fn=fn + ) + + return int(num_anns_per_image.sum()) + + @classmethod + def generate( + cls, + root, + *, + split, + year, + num_samples, + ): + annotations_dir = root / "annotations" + annotations_dir.mkdir() + + for split_ in ("train", "val"): + config_name = f"{split_}{year}" + + images_meta = [ + dict( + file_name=f"{idx:012d}.jpg", + id=idx, + width=width, + height=height, + ) + for idx, (height, width) in enumerate( + torch.randint(3, 11, size=(num_samples, 2), dtype=torch.int).tolist() + ) + ] + + if split_ == split: + create_image_folder( + root, + config_name, + file_name_fn=lambda idx: images_meta[idx]["file_name"], + num_examples=num_samples, + size=lambda idx: (3, images_meta[idx]["height"], images_meta[idx]["width"]), + ) + make_zip(root, f"{config_name}.zip") + + cls._make_annotations( + annotations_dir, + config_name, + images_meta=images_meta, + ) + + make_zip(root, f"annotations_trainval{year}.zip", annotations_dir) + + return num_samples + + +@register_mock( + configs=combinations_grid( + split=("train", "val"), + year=("2017", "2014"), + annotations=("instances", "captions", None), + ) +) +def coco(root, config): + return CocoMockData.generate(root, split=config["split"], year=config["year"], num_samples=5) + + +class SBDMockData: + _NUM_CATEGORIES = 20 + + @classmethod + def _make_split_files(cls, root_map, *, split): + splits_and_idcs = [ + ("train", [0, 1, 2]), + ("val", [3]), + ] + if split == "train_noval": + splits_and_idcs.append(("train_noval", [0, 2])) + + ids_map = {split: [f"2008_{idx:06d}" for idx in idcs] for split, idcs in splits_and_idcs} + + for split, ids in ids_map.items(): + with open(root_map[split] / f"{split}.txt", "w") as fh: + fh.writelines(f"{id}\n" for id in ids) + + return sorted(set(itertools.chain(*ids_map.values()))), {split: len(ids) for split, ids in ids_map.items()} + + @classmethod + def _make_anns_folder(cls, root, name, ids): + from scipy.io import savemat + + anns_folder = root / name + anns_folder.mkdir() + + sizes = torch.randint(1, 9, size=(len(ids), 2)).tolist() + for id, size in zip(ids, sizes): + savemat( + anns_folder / f"{id}.mat", + { + "GTcls": { + "Boundaries": cls._make_boundaries(size), + "Segmentation": cls._make_segmentation(size), + } + }, + ) + return sizes + + @classmethod + def _make_boundaries(cls, size): + from scipy.sparse import csc_matrix + + return [ + [csc_matrix(torch.randint(0, 2, size=size, dtype=torch.uint8).numpy())] for _ in range(cls._NUM_CATEGORIES) + ] + + @classmethod + def _make_segmentation(cls, size): + return torch.randint(0, cls._NUM_CATEGORIES + 1, size=size, dtype=torch.uint8).numpy() + + @classmethod + def generate(cls, root, *, split): + archive_folder = root / "benchmark_RELEASE" + dataset_folder = archive_folder / "dataset" + dataset_folder.mkdir(parents=True, exist_ok=True) + + ids, num_samples_map = cls._make_split_files( + defaultdict(lambda: dataset_folder, {"train_noval": root}), split=split + ) + sizes = cls._make_anns_folder(dataset_folder, "cls", ids) + create_image_folder( + dataset_folder, "img", lambda idx: f"{ids[idx]}.jpg", num_examples=len(ids), size=lambda idx: sizes[idx] + ) + + make_tar(root, "benchmark.tgz", archive_folder, compression="gz") + + return num_samples_map[split] + + +@register_mock(configs=combinations_grid(split=("train", "val", "train_noval"))) +def sbd(root, config): + return SBDMockData.generate(root, split=config["split"]) + + +@register_mock(configs=[dict()]) +def semeion(root, config): + num_samples = 3 + num_categories = 10 + + images = torch.rand(num_samples, 256) + labels = one_hot(torch.randint(num_categories, size=(num_samples,)), num_classes=num_categories) + with open(root / "semeion.data", "w") as fh: + for image, one_hot_label in zip(images, labels): + image_columns = " ".join([f"{pixel.item():.4f}" for pixel in image]) + labels_columns = " ".join([str(label.item()) for label in one_hot_label]) + fh.write(f"{image_columns} {labels_columns} \n") + + return num_samples + + +class VOCMockData: + _TRAIN_VAL_FILE_NAMES = { + "2007": "VOCtrainval_06-Nov-2007.tar", + "2008": "VOCtrainval_14-Jul-2008.tar", + "2009": "VOCtrainval_11-May-2009.tar", + "2010": "VOCtrainval_03-May-2010.tar", + "2011": "VOCtrainval_25-May-2011.tar", + "2012": "VOCtrainval_11-May-2012.tar", + } + _TEST_FILE_NAMES = { + "2007": "VOCtest_06-Nov-2007.tar", + } + + @classmethod + def _make_split_files(cls, root, *, year, trainval): + split_folder = root / "ImageSets" + + if trainval: + idcs_map = { + "train": [0, 1, 2], + "val": [3, 4], + } + idcs_map["trainval"] = [*idcs_map["train"], *idcs_map["val"]] + else: + idcs_map = { + "test": [5], + } + ids_map = {split: [f"{year}_{idx:06d}" for idx in idcs] for split, idcs in idcs_map.items()} + + for task_sub_folder in ("Main", "Segmentation"): + task_folder = split_folder / task_sub_folder + task_folder.mkdir(parents=True, exist_ok=True) + for split, ids in ids_map.items(): + with open(task_folder / f"{split}.txt", "w") as fh: + fh.writelines(f"{id}\n" for id in ids) + + return sorted(set(itertools.chain(*ids_map.values()))), {split: len(ids) for split, ids in ids_map.items()} + + @classmethod + def _make_detection_anns_folder(cls, root, name, *, file_name_fn, num_examples): + folder = root / name + folder.mkdir(parents=True, exist_ok=True) + + for idx in range(num_examples): + cls._make_detection_ann_file(folder, file_name_fn(idx)) + + @classmethod + def _make_detection_ann_file(cls, root, name): + def add_child(parent, name, text=None): + child = ET.SubElement(parent, name) + child.text = str(text) + return child + + def add_name(obj, name="dog"): + add_child(obj, "name", name) + + def add_size(obj): + obj = add_child(obj, "size") + size = {"width": 0, "height": 0, "depth": 3} + for name, text in size.items(): + add_child(obj, name, text) + + def add_bndbox(obj): + obj = add_child(obj, "bndbox") + bndbox = {"xmin": 1, "xmax": 2, "ymin": 3, "ymax": 4} + for name, text in bndbox.items(): + add_child(obj, name, text) + + annotation = ET.Element("annotation") + add_size(annotation) + obj = add_child(annotation, "object") + add_name(obj) + add_bndbox(obj) + + with open(root / name, "wb") as fh: + fh.write(ET.tostring(annotation)) + + @classmethod + def generate(cls, root, *, year, trainval): + archive_folder = root + if year == "2011": + archive_folder = root / "TrainVal" + data_folder = archive_folder / "VOCdevkit" + else: + archive_folder = data_folder = root / "VOCdevkit" + data_folder = data_folder / f"VOC{year}" + data_folder.mkdir(parents=True, exist_ok=True) + + ids, num_samples_map = cls._make_split_files(data_folder, year=year, trainval=trainval) + for make_folder_fn, name, suffix in [ + (create_image_folder, "JPEGImages", ".jpg"), + (create_image_folder, "SegmentationClass", ".png"), + (cls._make_detection_anns_folder, "Annotations", ".xml"), + ]: + make_folder_fn(data_folder, name, file_name_fn=lambda idx: ids[idx] + suffix, num_examples=len(ids)) + make_tar(root, (cls._TRAIN_VAL_FILE_NAMES if trainval else cls._TEST_FILE_NAMES)[year], archive_folder) + + return num_samples_map + + +@register_mock( + configs=[ + *combinations_grid( + split=("train", "val", "trainval"), + year=("2007", "2008", "2009", "2010", "2011", "2012"), + task=("detection", "segmentation"), + ), + *combinations_grid( + split=("test",), + year=("2007",), + task=("detection", "segmentation"), + ), + ], +) +def voc(root, config): + trainval = config["split"] != "test" + return VOCMockData.generate(root, year=config["year"], trainval=trainval)[config["split"]] + + +class CelebAMockData: + @classmethod + def _make_ann_file(cls, root, name, data, *, field_names=None): + with open(root / name, "w") as file: + if field_names: + file.write(f"{len(data)}\r\n") + file.write(" ".join(field_names) + "\r\n") + file.writelines(" ".join(str(item) for item in row) + "\r\n" for row in data) + + _SPLIT_TO_IDX = { + "train": 0, + "val": 1, + "test": 2, + } + + @classmethod + def _make_split_file(cls, root): + num_samples_map = {"train": 4, "val": 3, "test": 2} + + data = [ + (f"{idx:06d}.jpg", cls._SPLIT_TO_IDX[split]) + for split, num_samples in num_samples_map.items() + for idx in range(num_samples) + ] + cls._make_ann_file(root, "list_eval_partition.txt", data) + + image_file_names, _ = zip(*data) + return image_file_names, num_samples_map + + @classmethod + def _make_identity_file(cls, root, image_file_names): + cls._make_ann_file( + root, "identity_CelebA.txt", [(name, int(make_scalar(low=1, dtype=torch.int))) for name in image_file_names] + ) + + @classmethod + def _make_attributes_file(cls, root, image_file_names): + field_names = ("5_o_Clock_Shadow", "Young") + data = [ + [name, *[" 1" if attr else "-1" for attr in make_tensor((len(field_names),), dtype=torch.bool)]] + for name in image_file_names + ] + cls._make_ann_file(root, "list_attr_celeba.txt", data, field_names=(*field_names, "")) + + @classmethod + def _make_bounding_boxes_file(cls, root, image_file_names): + field_names = ("image_id", "x_1", "y_1", "width", "height") + data = [ + [f"{name} ", *[f"{coord:3d}" for coord in make_tensor((4,), low=0, dtype=torch.int).tolist()]] + for name in image_file_names + ] + cls._make_ann_file(root, "list_bbox_celeba.txt", data, field_names=field_names) + + @classmethod + def _make_landmarks_file(cls, root, image_file_names): + field_names = ("lefteye_x", "lefteye_y", "rightmouth_x", "rightmouth_y") + data = [ + [ + name, + *[ + f"{coord:4d}" if idx else coord + for idx, coord in enumerate(make_tensor((len(field_names),), low=0, dtype=torch.int).tolist()) + ], + ] + for name in image_file_names + ] + cls._make_ann_file(root, "list_landmarks_align_celeba.txt", data, field_names=field_names) + + @classmethod + def generate(cls, root): + image_file_names, num_samples_map = cls._make_split_file(root) + + image_files = create_image_folder( + root, "img_align_celeba", file_name_fn=lambda idx: image_file_names[idx], num_examples=len(image_file_names) + ) + make_zip(root, image_files[0].parent.with_suffix(".zip").name) + + for make_ann_file_fn in ( + cls._make_identity_file, + cls._make_attributes_file, + cls._make_bounding_boxes_file, + cls._make_landmarks_file, + ): + make_ann_file_fn(root, image_file_names) + + return num_samples_map + + +@register_mock(configs=combinations_grid(split=("train", "val", "test"))) +def celeba(root, config): + return CelebAMockData.generate(root)[config["split"]] + + +@register_mock(configs=combinations_grid(split=("train", "val", "test"))) +def country211(root, config): + split_folder = pathlib.Path(root, "country211", "valid" if config["split"] == "val" else config["split"]) + split_folder.mkdir(parents=True, exist_ok=True) + + num_examples = { + "train": 3, + "val": 4, + "test": 5, + }[config["split"]] + + classes = ("AD", "BS", "GR") + for cls in classes: + create_image_folder( + split_folder, + name=cls, + file_name_fn=lambda idx: f"{idx}.jpg", + num_examples=num_examples, + ) + make_tar(root, f"{split_folder.parent.name}.tgz", split_folder.parent, compression="gz") + return num_examples * len(classes) + + +@register_mock(configs=combinations_grid(split=("train", "test"))) +def food101(root, config): + data_folder = root / "food-101" + + num_images_per_class = 3 + image_folder = data_folder / "images" + categories = ["apple_pie", "baby_back_ribs", "waffles"] + image_ids = [] + for category in categories: + image_files = create_image_folder( + image_folder, + category, + file_name_fn=lambda idx: f"{idx:04d}.jpg", + num_examples=num_images_per_class, + ) + image_ids.extend(path.relative_to(path.parents[1]).with_suffix("").as_posix() for path in image_files) + + meta_folder = data_folder / "meta" + meta_folder.mkdir() + + with open(meta_folder / "classes.txt", "w") as file: + for category in categories: + file.write(f"{category}\n") + + splits = ["train", "test"] + num_samples_map = {} + for offset, split in enumerate(splits): + image_ids_in_split = image_ids[offset :: len(splits)] + num_samples_map[split] = len(image_ids_in_split) + with open(meta_folder / f"{split}.txt", "w") as file: + for image_id in image_ids_in_split: + file.write(f"{image_id}\n") + + make_tar(root, f"{data_folder.name}.tar.gz", compression="gz") + + return num_samples_map[config["split"]] + + +@register_mock(configs=combinations_grid(split=("train", "val", "test"), fold=(1, 4, 10))) +def dtd(root, config): + data_folder = root / "dtd" + + num_images_per_class = 3 + image_folder = data_folder / "images" + categories = {"banded", "marbled", "zigzagged"} + image_ids_per_category = { + category: [ + str(path.relative_to(path.parents[1]).as_posix()) + for path in create_image_folder( + image_folder, + category, + file_name_fn=lambda idx: f"{category}_{idx:04d}.jpg", + num_examples=num_images_per_class, + ) + ] + for category in categories + } + + meta_folder = data_folder / "labels" + meta_folder.mkdir() + + with open(meta_folder / "labels_joint_anno.txt", "w") as file: + for cls, image_ids in image_ids_per_category.items(): + for image_id in image_ids: + joint_categories = random.choices( + list(categories - {cls}), k=int(torch.randint(len(categories) - 1, ())) + ) + file.write(" ".join([image_id, *sorted([cls, *joint_categories])]) + "\n") + + image_ids = list(itertools.chain(*image_ids_per_category.values())) + splits = ("train", "val", "test") + num_samples_map = {} + for fold in range(1, 11): + random.shuffle(image_ids) + for offset, split in enumerate(splits): + image_ids_in_config = image_ids[offset :: len(splits)] + with open(meta_folder / f"{split}{fold}.txt", "w") as file: + file.write("\n".join(image_ids_in_config) + "\n") + + num_samples_map[(split, fold)] = len(image_ids_in_config) + + make_tar(root, "dtd-r1.0.1.tar.gz", data_folder, compression="gz") + + return num_samples_map[config["split"], config["fold"]] + + +@register_mock(configs=combinations_grid(split=("train", "test"))) +def fer2013(root, config): + split = config["split"] + num_samples = 5 if split == "train" else 3 + + path = root / f"{split}.csv" + with open(path, "w", newline="") as file: + field_names = ["emotion"] if split == "train" else [] + field_names.append("pixels") + + file.write(",".join(field_names) + "\n") + + writer = csv.DictWriter(file, fieldnames=field_names, quotechar='"', quoting=csv.QUOTE_NONNUMERIC) + for _ in range(num_samples): + rowdict = { + "pixels": " ".join([str(int(pixel)) for pixel in torch.randint(256, (48 * 48,), dtype=torch.uint8)]) + } + if split == "train": + rowdict["emotion"] = int(torch.randint(7, ())) + writer.writerow(rowdict) + + make_zip(root, f"{path.name}.zip", path) + + return num_samples + + +@register_mock(configs=combinations_grid(split=("train", "test"))) +def gtsrb(root, config): + num_examples_per_class = 5 if config["split"] == "train" else 3 + classes = ("00000", "00042", "00012") + num_examples = num_examples_per_class * len(classes) + + csv_columns = ["Filename", "Width", "Height", "Roi.X1", "Roi.Y1", "Roi.X2", "Roi.Y2", "ClassId"] + + def _make_ann_file(path, num_examples, class_idx): + if class_idx == "random": + class_idx = torch.randint(1, len(classes) + 1, size=(1,)).item() + + with open(path, "w") as csv_file: + writer = csv.DictWriter(csv_file, fieldnames=csv_columns, delimiter=";") + writer.writeheader() + for image_idx in range(num_examples): + writer.writerow( + { + "Filename": f"{image_idx:05d}.ppm", + "Width": torch.randint(1, 100, size=()).item(), + "Height": torch.randint(1, 100, size=()).item(), + "Roi.X1": torch.randint(1, 100, size=()).item(), + "Roi.Y1": torch.randint(1, 100, size=()).item(), + "Roi.X2": torch.randint(1, 100, size=()).item(), + "Roi.Y2": torch.randint(1, 100, size=()).item(), + "ClassId": class_idx, + } + ) + + archive_folder = root / "GTSRB" + + if config["split"] == "train": + train_folder = archive_folder / "Training" + train_folder.mkdir(parents=True) + + for class_idx in classes: + create_image_folder( + train_folder, + name=class_idx, + file_name_fn=lambda image_idx: f"{class_idx}_{image_idx:05d}.ppm", + num_examples=num_examples_per_class, + ) + _make_ann_file( + path=train_folder / class_idx / f"GT-{class_idx}.csv", + num_examples=num_examples_per_class, + class_idx=int(class_idx), + ) + make_zip(root, "GTSRB-Training_fixed.zip", archive_folder) + else: + test_folder = archive_folder / "Final_Test" + test_folder.mkdir(parents=True) + + create_image_folder( + test_folder, + name="Images", + file_name_fn=lambda image_idx: f"{image_idx:05d}.ppm", + num_examples=num_examples, + ) + + make_zip(root, "GTSRB_Final_Test_Images.zip", archive_folder) + + _make_ann_file( + path=root / "GT-final_test.csv", + num_examples=num_examples, + class_idx="random", + ) + + make_zip(root, "GTSRB_Final_Test_GT.zip", "GT-final_test.csv") + + return num_examples + + +@register_mock(configs=combinations_grid(split=("train", "val", "test"))) +def clevr(root, config): + data_folder = root / "CLEVR_v1.0" + + num_samples_map = { + "train": 3, + "val": 2, + "test": 1, + } + + images_folder = data_folder / "images" + image_files = { + split: create_image_folder( + images_folder, + split, + file_name_fn=lambda idx: f"CLEVR_{split}_{idx:06d}.jpg", + num_examples=num_samples, + ) + for split, num_samples in num_samples_map.items() + } + + scenes_folder = data_folder / "scenes" + scenes_folder.mkdir() + for split in ["train", "val"]: + with open(scenes_folder / f"CLEVR_{split}_scenes.json", "w") as file: + json.dump( + { + "scenes": [ + { + "image_filename": image_file.name, + # We currently only return the number of objects in a scene. + # Thus, it is sufficient for now to only mock the number of elements. + "objects": [None] * int(torch.randint(1, 5, ())), + } + for image_file in image_files[split] + ] + }, + file, + ) + + make_zip(root, f"{data_folder.name}.zip", data_folder) + + return num_samples_map[config["split"]] + + +class OxfordIIITPetMockData: + @classmethod + def _meta_to_split_and_classification_ann(cls, meta, idx): + image_id = "_".join( + [ + *[(str.title if meta["species"] == "cat" else str.lower)(part) for part in meta["cls"].split()], + str(idx), + ] + ) + class_id = str(meta["label"] + 1) + species = "1" if meta["species"] == "cat" else "2" + breed_id = "-1" + return (image_id, class_id, species, breed_id) + + @classmethod + def generate(self, root): + classification_anns_meta = ( + dict(cls="Abyssinian", label=0, species="cat"), + dict(cls="Keeshond", label=18, species="dog"), + dict(cls="Yorkshire Terrier", label=36, species="dog"), + ) + split_and_classification_anns = [ + self._meta_to_split_and_classification_ann(meta, idx) + for meta, idx in itertools.product(classification_anns_meta, (1, 2, 10)) + ] + image_ids, *_ = zip(*split_and_classification_anns) + + image_files = create_image_folder( + root, "images", file_name_fn=lambda idx: f"{image_ids[idx]}.jpg", num_examples=len(image_ids) + ) + + anns_folder = root / "annotations" + anns_folder.mkdir() + random.shuffle(split_and_classification_anns) + splits = ("trainval", "test") + num_samples_map = {} + for offset, split in enumerate(splits): + split_and_classification_anns_in_split = split_and_classification_anns[offset :: len(splits)] + with open(anns_folder / f"{split}.txt", "w") as file: + writer = csv.writer(file, delimiter=" ") + for split_and_classification_ann in split_and_classification_anns_in_split: + writer.writerow(split_and_classification_ann) + + num_samples_map[split] = len(split_and_classification_anns_in_split) + + segmentation_files = create_image_folder( + anns_folder, "trimaps", file_name_fn=lambda idx: f"{image_ids[idx]}.png", num_examples=len(image_ids) + ) + + # The dataset has some rogue files + for path in image_files[:3]: + path.with_suffix(".mat").touch() + for path in segmentation_files: + path.with_name(f".{path.name}").touch() + + make_tar(root, "images.tar.gz", compression="gz") + make_tar(root, anns_folder.with_suffix(".tar.gz").name, compression="gz") + + return num_samples_map + + +@register_mock(name="oxford-iiit-pet", configs=combinations_grid(split=("trainval", "test"))) +def oxford_iiit_pet(root, config): + return OxfordIIITPetMockData.generate(root)[config["split"]] + + +class _CUB200MockData: + @classmethod + def _category_folder(cls, category, idx): + return f"{idx:03d}.{category}" + + @classmethod + def _file_stem(cls, category, idx): + return f"{category}_{idx:04d}" + + @classmethod + def _make_images(cls, images_folder): + image_files = [] + for category_idx, category in [ + (1, "Black_footed_Albatross"), + (100, "Brown_Pelican"), + (200, "Common_Yellowthroat"), + ]: + image_files.extend( + create_image_folder( + images_folder, + cls._category_folder(category, category_idx), + lambda image_idx: f"{cls._file_stem(category, image_idx)}.jpg", + num_examples=5, + ) + ) + + return image_files + + +class CUB2002011MockData(_CUB200MockData): + @classmethod + def _make_archive(cls, root): + archive_folder = root / "CUB_200_2011" + + images_folder = archive_folder / "images" + image_files = cls._make_images(images_folder) + image_ids = list(range(1, len(image_files) + 1)) + + with open(archive_folder / "images.txt", "w") as file: + file.write( + "\n".join( + f"{id} {path.relative_to(images_folder).as_posix()}" for id, path in zip(image_ids, image_files) + ) + ) + + split_ids = torch.randint(2, (len(image_ids),)).tolist() + counts = Counter(split_ids) + num_samples_map = {"train": counts[1], "test": counts[0]} + with open(archive_folder / "train_test_split.txt", "w") as file: + file.write("\n".join(f"{image_id} {split_id}" for image_id, split_id in zip(image_ids, split_ids))) + + with open(archive_folder / "bounding_boxes.txt", "w") as file: + file.write( + "\n".join( + " ".join( + str(item) + for item in [image_id, *make_tensor((4,), dtype=torch.int, low=0).to(torch.float).tolist()] + ) + for image_id in image_ids + ) + ) + + make_tar(root, archive_folder.with_suffix(".tgz").name, compression="gz") + + return image_files, num_samples_map + + @classmethod + def _make_segmentations(cls, root, image_files): + segmentations_folder = root / "segmentations" + for image_file in image_files: + folder = segmentations_folder.joinpath(image_file.relative_to(image_file.parents[1])) + folder.mkdir(exist_ok=True, parents=True) + create_image_file( + folder, + image_file.with_suffix(".png").name, + size=[1, *make_tensor((2,), low=3, dtype=torch.int).tolist()], + ) + + make_tar(root, segmentations_folder.with_suffix(".tgz").name, compression="gz") + + @classmethod + def generate(cls, root): + image_files, num_samples_map = cls._make_archive(root) + cls._make_segmentations(root, image_files) + return num_samples_map + + +class CUB2002010MockData(_CUB200MockData): + @classmethod + def _make_hidden_rouge_file(cls, *files): + for file in files: + (file.parent / f"._{file.name}").touch() + + @classmethod + def _make_splits(cls, root, image_files): + split_folder = root / "lists" + split_folder.mkdir() + random.shuffle(image_files) + splits = ("train", "test") + num_samples_map = {} + for offset, split in enumerate(splits): + image_files_in_split = image_files[offset :: len(splits)] + + split_file = split_folder / f"{split}.txt" + with open(split_file, "w") as file: + file.write( + "\n".join( + sorted( + str(image_file.relative_to(image_file.parents[1]).as_posix()) + for image_file in image_files_in_split + ) + ) + ) + + cls._make_hidden_rouge_file(split_file) + num_samples_map[split] = len(image_files_in_split) + + make_tar(root, split_folder.with_suffix(".tgz").name, compression="gz") + + return num_samples_map + + @classmethod + def _make_anns(cls, root, image_files): + from scipy.io import savemat + + anns_folder = root / "annotations-mat" + for image_file in image_files: + ann_file = anns_folder / image_file.with_suffix(".mat").relative_to(image_file.parents[1]) + ann_file.parent.mkdir(parents=True, exist_ok=True) + + savemat( + ann_file, + { + "seg": torch.randint( + 256, make_tensor((2,), low=3, dtype=torch.int).tolist(), dtype=torch.uint8 + ).numpy(), + "bbox": dict( + zip(("left", "top", "right", "bottom"), make_tensor((4,), dtype=torch.uint8).tolist()) + ), + }, + ) + + readme_file = anns_folder / "README.txt" + readme_file.touch() + cls._make_hidden_rouge_file(readme_file) + + make_tar(root, "annotations.tgz", anns_folder, compression="gz") + + @classmethod + def generate(cls, root): + images_folder = root / "images" + image_files = cls._make_images(images_folder) + cls._make_hidden_rouge_file(*image_files) + make_tar(root, images_folder.with_suffix(".tgz").name, compression="gz") + + num_samples_map = cls._make_splits(root, image_files) + cls._make_anns(root, image_files) + + return num_samples_map + + +@register_mock(configs=combinations_grid(split=("train", "test"), year=("2010", "2011"))) +def cub200(root, config): + num_samples_map = (CUB2002011MockData if config["year"] == "2011" else CUB2002010MockData).generate(root) + return num_samples_map[config["split"]] + + +@register_mock(configs=[dict()]) +def eurosat(root, config): + data_folder = root / "2750" + data_folder.mkdir(parents=True) + + num_examples_per_class = 3 + categories = ["AnnualCrop", "Forest"] + for category in categories: + create_image_folder( + root=data_folder, + name=category, + file_name_fn=lambda idx: f"{category}_{idx + 1}.jpg", + num_examples=num_examples_per_class, + ) + make_zip(root, "EuroSAT.zip", data_folder) + return len(categories) * num_examples_per_class + + +@register_mock(configs=combinations_grid(split=("train", "test", "extra"))) +def svhn(root, config): + import scipy.io as sio + + num_samples = { + "train": 2, + "test": 3, + "extra": 4, + }[config["split"]] + + sio.savemat( + root / f"{config['split']}_32x32.mat", + { + "X": np.random.randint(256, size=(32, 32, 3, num_samples), dtype=np.uint8), + "y": np.random.randint(10, size=(num_samples,), dtype=np.uint8), + }, + ) + return num_samples + + +@register_mock(configs=combinations_grid(split=("train", "val", "test"))) +def pcam(root, config): + import h5py + + num_images = {"train": 2, "test": 3, "val": 4}[config["split"]] + + split = "valid" if config["split"] == "val" else config["split"] + + images_io = io.BytesIO() + with h5py.File(images_io, "w") as f: + f["x"] = np.random.randint(0, 256, size=(num_images, 10, 10, 3), dtype=np.uint8) + + targets_io = io.BytesIO() + with h5py.File(targets_io, "w") as f: + f["y"] = np.random.randint(0, 2, size=(num_images, 1, 1, 1), dtype=np.uint8) + + # Create .gz compressed files + images_file = root / f"camelyonpatch_level_2_split_{split}_x.h5.gz" + targets_file = root / f"camelyonpatch_level_2_split_{split}_y.h5.gz" + for compressed_file_name, uncompressed_file_io in ((images_file, images_io), (targets_file, targets_io)): + compressed_data = gzip.compress(uncompressed_file_io.getbuffer()) + with open(compressed_file_name, "wb") as compressed_file: + compressed_file.write(compressed_data) + + return num_images + + +@register_mock(name="stanford-cars", configs=combinations_grid(split=("train", "test"))) +def stanford_cars(root, config): + import scipy.io as io + from numpy.core.records import fromarrays + + split = config["split"] + num_samples = {"train": 5, "test": 7}[split] + num_categories = 3 + + if split == "train": + images_folder_name = "cars_train" + devkit = root / "devkit" + devkit.mkdir() + annotations_mat_path = devkit / "cars_train_annos.mat" + else: + images_folder_name = "cars_test" + annotations_mat_path = root / "cars_test_annos_withlabels.mat" + + create_image_folder( + root=root, + name=images_folder_name, + file_name_fn=lambda image_index: f"{image_index:5d}.jpg", + num_examples=num_samples, + ) + + make_tar(root, f"cars_{split}.tgz", images_folder_name) + bbox = np.random.randint(1, 200, num_samples, dtype=np.uint8) + classes = np.random.randint(1, num_categories + 1, num_samples, dtype=np.uint8) + fnames = [f"{i:5d}.jpg" for i in range(num_samples)] + rec_array = fromarrays( + [bbox, bbox, bbox, bbox, classes, fnames], + names=["bbox_x1", "bbox_y1", "bbox_x2", "bbox_y2", "class", "fname"], + ) + + io.savemat(annotations_mat_path, {"annotations": rec_array}) + if split == "train": + make_tar(root, "car_devkit.tgz", devkit, compression="gz") + + return num_samples + + +@register_mock(configs=combinations_grid(split=("train", "test"))) +def usps(root, config): + num_samples = {"train": 15, "test": 7}[config["split"]] + + with bz2.open(root / f"usps{'.t' if not config['split'] == 'train' else ''}.bz2", "wb") as fh: + lines = [] + for _ in range(num_samples): + label = make_tensor(1, low=1, high=11, dtype=torch.int) + values = make_tensor(256, low=-1, high=1, dtype=torch.float) + lines.append( + " ".join([f"{int(label)}", *(f"{idx}:{float(value):.6f}" for idx, value in enumerate(values, 1))]) + ) + + fh.write("\n".join(lines).encode()) + + return num_samples diff --git a/test/common_extended_utils.py b/test/common_extended_utils.py new file mode 100644 index 00000000000..a34e15629bb --- /dev/null +++ b/test/common_extended_utils.py @@ -0,0 +1,310 @@ +import os +from collections import defaultdict +from numbers import Number +from typing import Any, List + +import torch +from torch.utils._python_dispatch import TorchDispatchMode + +from torch.utils._pytree import tree_map + +from torchvision.models._api import Weights + +aten = torch.ops.aten +quantized = torch.ops.quantized + + +def get_shape(i): + if isinstance(i, torch.Tensor): + return i.shape + elif hasattr(i, "weight"): + return i.weight().shape + else: + raise ValueError(f"Unknown type {type(i)}") + + +def prod(x): + res = 1 + for i in x: + res *= i + return res + + +def matmul_flop(inputs: List[Any], outputs: List[Any]) -> Number: + """ + Count flops for matmul. + """ + # Inputs should be a list of length 2. + # Inputs contains the shapes of two matrices. + input_shapes = [get_shape(v) for v in inputs] + assert len(input_shapes) == 2, input_shapes + assert input_shapes[0][-1] == input_shapes[1][-2], input_shapes + flop = prod(input_shapes[0]) * input_shapes[-1][-1] + return flop + + +def addmm_flop(inputs: List[Any], outputs: List[Any]) -> Number: + """ + Count flops for fully connected layers. + """ + # Count flop for nn.Linear + # inputs is a list of length 3. + input_shapes = [get_shape(v) for v in inputs[1:3]] + # input_shapes[0]: [batch size, input feature dimension] + # input_shapes[1]: [batch size, output feature dimension] + assert len(input_shapes[0]) == 2, input_shapes[0] + assert len(input_shapes[1]) == 2, input_shapes[1] + batch_size, input_dim = input_shapes[0] + output_dim = input_shapes[1][1] + flops = batch_size * input_dim * output_dim + return flops + + +def bmm_flop(inputs: List[Any], outputs: List[Any]) -> Number: + """ + Count flops for the bmm operation. + """ + # Inputs should be a list of length 2. + # Inputs contains the shapes of two tensor. + assert len(inputs) == 2, len(inputs) + input_shapes = [get_shape(v) for v in inputs] + n, c, t = input_shapes[0] + d = input_shapes[-1][-1] + flop = n * c * t * d + return flop + + +def conv_flop_count( + x_shape: List[int], + w_shape: List[int], + out_shape: List[int], + transposed: bool = False, +) -> Number: + """ + Count flops for convolution. Note only multiplication is + counted. Computation for addition and bias is ignored. + Flops for a transposed convolution are calculated as + flops = (x_shape[2:] * prod(w_shape) * batch_size). + Args: + x_shape (list(int)): The input shape before convolution. + w_shape (list(int)): The filter shape. + out_shape (list(int)): The output shape after convolution. + transposed (bool): is the convolution transposed + Returns: + int: the number of flops + """ + batch_size = x_shape[0] + conv_shape = (x_shape if transposed else out_shape)[2:] + flop = batch_size * prod(w_shape) * prod(conv_shape) + return flop + + +def conv_flop(inputs: List[Any], outputs: List[Any]): + """ + Count flops for convolution. + """ + x, w = inputs[:2] + x_shape, w_shape, out_shape = (get_shape(x), get_shape(w), get_shape(outputs[0])) + transposed = inputs[6] + + return conv_flop_count(x_shape, w_shape, out_shape, transposed=transposed) + + +def quant_conv_flop(inputs: List[Any], outputs: List[Any]): + """ + Count flops for quantized convolution. + """ + x, w = inputs[:2] + x_shape, w_shape, out_shape = (get_shape(x), get_shape(w), get_shape(outputs[0])) + + return conv_flop_count(x_shape, w_shape, out_shape, transposed=False) + + +def transpose_shape(shape): + return [shape[1], shape[0]] + list(shape[2:]) + + +def conv_backward_flop(inputs: List[Any], outputs: List[Any]): + grad_out_shape, x_shape, w_shape = [get_shape(i) for i in inputs[:3]] + output_mask = inputs[-1] + fwd_transposed = inputs[7] + flop_count = 0 + + if output_mask[0]: + grad_input_shape = get_shape(outputs[0]) + flop_count += conv_flop_count(grad_out_shape, w_shape, grad_input_shape, not fwd_transposed) + if output_mask[1]: + grad_weight_shape = get_shape(outputs[1]) + flop_count += conv_flop_count(transpose_shape(x_shape), grad_out_shape, grad_weight_shape, fwd_transposed) + + return flop_count + + +def scaled_dot_product_flash_attention_flop(inputs: List[Any], outputs: List[Any]): + # FIXME: this needs to count the flops of this kernel + # https://github.com/pytorch/pytorch/blob/207b06d099def9d9476176a1842e88636c1f714f/aten/src/ATen/native/cpu/FlashAttentionKernel.cpp#L52-L267 + return 0 + + +flop_mapping = { + aten.mm: matmul_flop, + aten.matmul: matmul_flop, + aten.addmm: addmm_flop, + aten.bmm: bmm_flop, + aten.convolution: conv_flop, + aten._convolution: conv_flop, + aten.convolution_backward: conv_backward_flop, + quantized.conv2d: quant_conv_flop, + quantized.conv2d_relu: quant_conv_flop, + aten._scaled_dot_product_flash_attention: scaled_dot_product_flash_attention_flop, +} + +unmapped_ops = set() + + +def normalize_tuple(x): + if not isinstance(x, tuple): + return (x,) + return x + + +class FlopCounterMode(TorchDispatchMode): + def __init__(self, model=None): + self.flop_counts = defaultdict(lambda: defaultdict(int)) + self.parents = ["Global"] + # global mod + if model is not None: + for name, module in dict(model.named_children()).items(): + module.register_forward_pre_hook(self.enter_module(name)) + module.register_forward_hook(self.exit_module(name)) + + def enter_module(self, name): + def f(module, inputs): + self.parents.append(name) + inputs = normalize_tuple(inputs) + out = self.create_backwards_pop(name)(*inputs) + return out + + return f + + def exit_module(self, name): + def f(module, inputs, outputs): + assert self.parents[-1] == name + self.parents.pop() + outputs = normalize_tuple(outputs) + return self.create_backwards_push(name)(*outputs) + + return f + + def create_backwards_push(self, name): + class PushState(torch.autograd.Function): + @staticmethod + def forward(ctx, *args): + args = tree_map(lambda x: x.clone() if isinstance(x, torch.Tensor) else x, args) + if len(args) == 1: + return args[0] + return args + + @staticmethod + def backward(ctx, *grad_outs): + self.parents.append(name) + return grad_outs + + return PushState.apply + + def create_backwards_pop(self, name): + class PopState(torch.autograd.Function): + @staticmethod + def forward(ctx, *args): + args = tree_map(lambda x: x.clone() if isinstance(x, torch.Tensor) else x, args) + if len(args) == 1: + return args[0] + return args + + @staticmethod + def backward(ctx, *grad_outs): + assert self.parents[-1] == name + self.parents.pop() + return grad_outs + + return PopState.apply + + def __enter__(self): + self.flop_counts.clear() + super().__enter__() + + def __exit__(self, *args): + # print(f"Total: {sum(self.flop_counts['Global'].values()) / 1e9} GFLOPS") + # for mod in self.flop_counts.keys(): + # print(f"Module: ", mod) + # for k, v in self.flop_counts[mod].items(): + # print(f"{k}: {v / 1e9} GFLOPS") + # print() + super().__exit__(*args) + + def __torch_dispatch__(self, func, types, args=(), kwargs=None): + kwargs = kwargs if kwargs else {} + + out = func(*args, **kwargs) + func_packet = func._overloadpacket + if func_packet in flop_mapping: + flop_count = flop_mapping[func_packet](args, normalize_tuple(out)) + for par in self.parents: + self.flop_counts[par][func_packet] += flop_count + else: + unmapped_ops.add(func_packet) + + return out + + def get_flops(self): + return sum(self.flop_counts["Global"].values()) / 1e9 + + +def get_dims(module_name, height, width): + # detection models have curated input sizes + if module_name == "detection": + # we can feed a batch of 1 for detection model instead of a list of 1 image + dims = (3, height, width) + elif module_name == "video": + # hard-coding the time dimension to size 16 + dims = (1, 16, 3, height, width) + else: + dims = (1, 3, height, width) + + return dims + + +def get_ops(model: torch.nn.Module, weight: Weights, height=512, width=512): + module_name = model.__module__.split(".")[-2] + dims = get_dims(module_name=module_name, height=height, width=width) + + input_tensor = torch.randn(dims) + + # try: + preprocess = weight.transforms() + if module_name == "optical_flow": + inp = preprocess(input_tensor, input_tensor) + else: + # hack to enable mod(*inp) for optical_flow models + inp = [preprocess(input_tensor)] + + model.eval() + + flop_counter = FlopCounterMode(model) + with flop_counter: + # detection models expect a list of 3d tensors as inputs + if module_name == "detection": + model(inp) + else: + model(*inp) + + flops = flop_counter.get_flops() + + return round(flops, 3) + + +def get_file_size_mb(weight): + weights_path = os.path.join(os.getenv("HOME"), ".cache/torch/hub/checkpoints", weight.url.split("/")[-1]) + weights_size_mb = os.path.getsize(weights_path) / 1024 / 1024 + + return round(weights_size_mb, 3) diff --git a/test/common_utils.py b/test/common_utils.py index 9c0c3175ef1..99c7931587d 100644 --- a/test/common_utils.py +++ b/test/common_utils.py @@ -1,13 +1,35 @@ +import contextlib +import functools +import itertools import os +import pathlib +import random +import re import shutil -import tempfile -import contextlib -import unittest -import argparse import sys +import tempfile +import warnings +from subprocess import CalledProcessError, check_output, STDOUT + +import numpy as np +import PIL.Image +import pytest import torch -import errno -import __main__ +import torch.testing +from PIL import Image + +from torch.testing._comparison import BooleanPair, NonePair, not_close_error_metas, NumberPair, TensorLikePair +from torchvision import io, tv_tensors +from torchvision.transforms._functional_tensor import _max_value as get_max_value +from torchvision.transforms.v2.functional import to_image, to_pil_image + + +IN_OSS_CI = any(os.getenv(var) == "true" for var in ["CIRCLECI", "GITHUB_ACTIONS"]) +IN_RE_WORKER = os.environ.get("INSIDE_RE_WORKER") is not None +IN_FBCODE = os.environ.get("IN_FBCODE_TORCHVISION") == "1" +CUDA_NOT_AVAILABLE_MSG = "CUDA device not available" +MPS_NOT_AVAILABLE_MSG = "MPS device not available" +OSS_CI_GPU_NO_CUDA_MSG = "We're in an OSS GPU machine, and this test doesn't need cuda." @contextlib.contextmanager @@ -22,20 +44,12 @@ def get_tmp_dir(src=None, **kwargs): shutil.rmtree(tmp_dir) -ACCEPT = os.getenv('EXPECTTEST_ACCEPT') - -parser = argparse.ArgumentParser(add_help=False) -parser.add_argument('--accept', action='store_true') -args, remaining = parser.parse_known_args() -if not ACCEPT: - ACCEPT = args.accept -for i, arg in enumerate(sys.argv): - if arg == '--accept': - del sys.argv[i] - break +def set_rng_seed(seed): + torch.manual_seed(seed) + random.seed(seed) -class MapNestedTensorObjectImpl(object): +class MapNestedTensorObjectImpl: def __init__(self, tensor_map_fn): self.tensor_map_fn = tensor_map_fn @@ -64,90 +78,442 @@ def map_nested_tensor_object(object, tensor_map_fn): return impl(object) -# adapted from TestCase in torch/test/common_utils to accept non-string -# inputs and set maximum binary size -class TestCase(unittest.TestCase): - def assertExpected(self, output, subname=None, rtol=None, atol=None): - r""" - Test that a python value matches the recorded contents of a file - derived from the name of this test and subname. The value must be - pickable with `torch.save`. This file - is placed in the 'expect' directory in the same directory - as the test script. You can automatically update the recorded test - output using --accept. - - If you call this multiple times in a single function, you must - give a unique subname each time. - """ - def remove_prefix(text, prefix): - if text.startswith(prefix): - return text[len(prefix):] - return text - # NB: we take __file__ from the module that defined the test - # class, so we place the expect directory where the test script - # lives, NOT where test/common_utils.py lives. - module_id = self.__class__.__module__ - munged_id = remove_prefix(self.id(), module_id + ".") - test_file = os.path.realpath(sys.modules[module_id].__file__) - expected_file = os.path.join(os.path.dirname(test_file), - "expect", - munged_id) - - subname_output = "" - if subname: - expected_file += "_" + subname - subname_output = " ({})".format(subname) - expected_file += "_expect.pkl" - expected = None - - def accept_output(update_type): - print("Accepting {} for {}{}:\n\n{}".format(update_type, munged_id, subname_output, output)) - torch.save(output, expected_file) - MAX_PICKLE_SIZE = 50 * 1000 # 50 KB - binary_size = os.path.getsize(expected_file) - self.assertTrue(binary_size <= MAX_PICKLE_SIZE) +def is_iterable(obj): + try: + iter(obj) + return True + except TypeError: + return False - try: - expected = torch.load(expected_file) - except IOError as e: - if e.errno != errno.ENOENT: - raise - elif ACCEPT: - return accept_output("output") - else: - raise RuntimeError( - ("I got this output for {}{}:\n\n{}\n\n" - "No expect file exists; to accept the current output, run:\n" - "python {} {} --accept").format(munged_id, subname_output, output, __main__.__file__, munged_id)) - - if ACCEPT: - equal = False - try: - equal = self.assertNestedTensorObjectsEqual(output, expected, rtol=rtol, atol=atol) - except Exception: - equal = False - if not equal: - return accept_output("updated output") + +@contextlib.contextmanager +def freeze_rng_state(): + rng_state = torch.get_rng_state() + if torch.cuda.is_available(): + cuda_rng_state = torch.cuda.get_rng_state() + yield + if torch.cuda.is_available(): + torch.cuda.set_rng_state(cuda_rng_state) + torch.set_rng_state(rng_state) + + +def cycle_over(objs): + for idx, obj1 in enumerate(objs): + for obj2 in objs[:idx] + objs[idx + 1 :]: + yield obj1, obj2 + + +def int_dtypes(): + return (torch.uint8, torch.int8, torch.int16, torch.int32, torch.int64) + + +def float_dtypes(): + return (torch.float32, torch.float64) + + +@contextlib.contextmanager +def disable_console_output(): + with contextlib.ExitStack() as stack, open(os.devnull, "w") as devnull: + stack.enter_context(contextlib.redirect_stdout(devnull)) + stack.enter_context(contextlib.redirect_stderr(devnull)) + yield + + +def cpu_and_cuda(): + import pytest # noqa + + return ("cpu", pytest.param("cuda", marks=pytest.mark.needs_cuda)) + + +def cpu_and_cuda_and_mps(): + return cpu_and_cuda() + (pytest.param("mps", marks=pytest.mark.needs_mps),) + + +def needs_cuda(test_func): + import pytest # noqa + + return pytest.mark.needs_cuda(test_func) + + +def needs_mps(test_func): + import pytest # noqa + + return pytest.mark.needs_mps(test_func) + + +def _create_data(height=3, width=3, channels=3, device="cpu"): + # TODO: When all relevant tests are ported to pytest, turn this into a module-level fixture + tensor = torch.randint(0, 256, (channels, height, width), dtype=torch.uint8, device=device) + data = tensor.permute(1, 2, 0).contiguous().cpu().numpy() + mode = "RGB" + if channels == 1: + mode = "L" + data = data[..., 0] + pil_img = Image.fromarray(data, mode=mode) + return tensor, pil_img + + +def _create_data_batch(height=3, width=3, channels=3, num_samples=4, device="cpu"): + # TODO: When all relevant tests are ported to pytest, turn this into a module-level fixture + batch_tensor = torch.randint(0, 256, (num_samples, channels, height, width), dtype=torch.uint8, device=device) + return batch_tensor + + +def get_list_of_videos(tmpdir, num_videos=5, sizes=None, fps=None): + names = [] + for i in range(num_videos): + if sizes is None: + size = 5 * (i + 1) + else: + size = sizes[i] + if fps is None: + f = 5 else: - self.assertNestedTensorObjectsEqual(output, expected, rtol=rtol, atol=atol) + f = fps[i] + data = torch.randint(0, 256, (size, 300, 400, 3), dtype=torch.uint8) + name = os.path.join(tmpdir, f"{i}.mp4") + names.append(name) + io.write_video(name, data, fps=f) + + return names - def assertNestedTensorObjectsEqual(self, a, b, rtol=None, atol=None): - self.assertEqual(type(a), type(b)) - if isinstance(a, torch.Tensor): - torch.testing.assert_allclose(a, b, rtol=rtol, atol=atol) +def _assert_equal_tensor_to_pil(tensor, pil_image, msg=None): + # FIXME: this is handled automatically by `assert_equal` below. Let's remove this in favor of it + np_pil_image = np.array(pil_image) + if np_pil_image.ndim == 2: + np_pil_image = np_pil_image[:, :, None] + pil_tensor = torch.as_tensor(np_pil_image.transpose((2, 0, 1))) + if msg is None: + msg = f"tensor:\n{tensor} \ndid not equal PIL tensor:\n{pil_tensor}" + assert_equal(tensor.cpu(), pil_tensor, msg=msg) - elif isinstance(a, dict): - self.assertEqual(len(a), len(b)) - for key, value in a.items(): - self.assertTrue(key in b, "key: " + str(key)) - self.assertNestedTensorObjectsEqual(value, b[key], rtol=rtol, atol=atol) - elif isinstance(a, (list, tuple)): - self.assertEqual(len(a), len(b)) +def _assert_approx_equal_tensor_to_pil( + tensor, pil_image, tol=1e-5, msg=None, agg_method="mean", allowed_percentage_diff=None +): + # FIXME: this is handled automatically by `assert_close` below. Let's remove this in favor of it + # TODO: we could just merge this into _assert_equal_tensor_to_pil + np_pil_image = np.array(pil_image) + if np_pil_image.ndim == 2: + np_pil_image = np_pil_image[:, :, None] + pil_tensor = torch.as_tensor(np_pil_image.transpose((2, 0, 1))).to(tensor) + + if allowed_percentage_diff is not None: + # Assert that less than a given %age of pixels are different + assert (tensor != pil_tensor).to(torch.float).mean() <= allowed_percentage_diff + + # error value can be mean absolute error, max abs error + # Convert to float to avoid underflow when computing absolute difference + tensor = tensor.to(torch.float) + pil_tensor = pil_tensor.to(torch.float) + err = getattr(torch, agg_method)(torch.abs(tensor - pil_tensor)).item() + assert err < tol, f"{err} vs {tol}" + + +def _test_fn_on_batch(batch_tensors, fn, scripted_fn_atol=1e-8, **fn_kwargs): + transformed_batch = fn(batch_tensors, **fn_kwargs) + for i in range(len(batch_tensors)): + img_tensor = batch_tensors[i, ...] + transformed_img = fn(img_tensor, **fn_kwargs) + torch.testing.assert_close(transformed_img, transformed_batch[i, ...], rtol=0, atol=1e-6) + + if scripted_fn_atol >= 0: + scripted_fn = torch.jit.script(fn) + # scriptable function test + s_transformed_batch = scripted_fn(batch_tensors, **fn_kwargs) + torch.testing.assert_close(transformed_batch, s_transformed_batch, rtol=1e-5, atol=scripted_fn_atol) + + +def cache(fn): + """Similar to :func:`functools.cache` (Python >= 3.8) or :func:`functools.lru_cache` with infinite cache size, + but this also caches exceptions. + """ + sentinel = object() + out_cache = {} + exc_tb_cache = {} + + @functools.wraps(fn) + def wrapper(*args, **kwargs): + key = args + tuple(kwargs.values()) + + out = out_cache.get(key, sentinel) + if out is not sentinel: + return out + + exc_tb = exc_tb_cache.get(key, sentinel) + if exc_tb is not sentinel: + raise exc_tb[0].with_traceback(exc_tb[1]) + + try: + out = fn(*args, **kwargs) + except Exception as exc: + # We need to cache the traceback here as well. Otherwise, each re-raise will add the internal pytest + # traceback frames anew, but they will only be removed once. Thus, the traceback will be ginormous hiding + # the actual information in the noise. See https://github.com/pytest-dev/pytest/issues/10363 for details. + exc_tb_cache[key] = exc, exc.__traceback__ + raise exc + + out_cache[key] = out + return out - for val1, val2 in zip(a, b): - self.assertNestedTensorObjectsEqual(val1, val2, rtol=rtol, atol=atol) + return wrapper + +def combinations_grid(**kwargs): + """Creates a grid of input combinations. + + Each element in the returned sequence is a dictionary containing one possible combination as values. + + Example: + >>> combinations_grid(foo=("bar", "baz"), spam=("eggs", "ham")) + [ + {'foo': 'bar', 'spam': 'eggs'}, + {'foo': 'bar', 'spam': 'ham'}, + {'foo': 'baz', 'spam': 'eggs'}, + {'foo': 'baz', 'spam': 'ham'} + ] + """ + return [dict(zip(kwargs.keys(), values)) for values in itertools.product(*kwargs.values())] + + +class ImagePair(TensorLikePair): + def __init__( + self, + actual, + expected, + *, + mae=False, + **other_parameters, + ): + if all(isinstance(input, PIL.Image.Image) for input in [actual, expected]): + actual, expected = [to_image(input) for input in [actual, expected]] + + super().__init__(actual, expected, **other_parameters) + self.mae = mae + + def compare(self) -> None: + actual, expected = self.actual, self.expected + + self._compare_attributes(actual, expected) + actual, expected = self._equalize_attributes(actual, expected) + + if self.mae: + if actual.dtype is torch.uint8: + actual, expected = actual.to(torch.int), expected.to(torch.int) + mae = float(torch.abs(actual - expected).float().mean()) + if mae > self.atol: + self._fail( + AssertionError, + f"The MAE of the images is {mae}, but only {self.atol} is allowed.", + ) else: - self.assertEqual(a, b) + super()._compare_values(actual, expected) + + +def assert_close( + actual, + expected, + *, + allow_subclasses=True, + rtol=None, + atol=None, + equal_nan=False, + check_device=True, + check_dtype=True, + check_layout=True, + check_stride=False, + msg=None, + **kwargs, +): + """Superset of :func:`torch.testing.assert_close` with support for PIL vs. tensor image comparison""" + __tracebackhide__ = True + + error_metas = not_close_error_metas( + actual, + expected, + pair_types=( + NonePair, + BooleanPair, + NumberPair, + ImagePair, + TensorLikePair, + ), + allow_subclasses=allow_subclasses, + rtol=rtol, + atol=atol, + equal_nan=equal_nan, + check_device=check_device, + check_dtype=check_dtype, + check_layout=check_layout, + check_stride=check_stride, + **kwargs, + ) + + if error_metas: + raise error_metas[0].to_error(msg) + + +assert_equal = functools.partial(assert_close, rtol=0, atol=0) + + +DEFAULT_SIZE = (17, 11) + + +NUM_CHANNELS_MAP = { + "GRAY": 1, + "GRAY_ALPHA": 2, + "RGB": 3, + "RGBA": 4, +} + + +def make_image( + size=DEFAULT_SIZE, + *, + color_space="RGB", + batch_dims=(), + dtype=None, + device="cpu", + memory_format=torch.contiguous_format, +): + num_channels = NUM_CHANNELS_MAP[color_space] + dtype = dtype or torch.uint8 + max_value = get_max_value(dtype) + data = torch.testing.make_tensor( + (*batch_dims, num_channels, *size), + low=0, + high=max_value, + dtype=dtype, + device=device, + memory_format=memory_format, + ) + if color_space in {"GRAY_ALPHA", "RGBA"}: + data[..., -1, :, :] = max_value + + return tv_tensors.Image(data) + + +def make_image_tensor(*args, **kwargs): + return make_image(*args, **kwargs).as_subclass(torch.Tensor) + + +def make_image_pil(*args, **kwargs): + return to_pil_image(make_image(*args, **kwargs)) + + +def make_bounding_boxes( + canvas_size=DEFAULT_SIZE, + *, + format=tv_tensors.BoundingBoxFormat.XYXY, + num_boxes=1, + dtype=None, + device="cpu", +): + def sample_position(values, max_value): + # We cannot use torch.randint directly here, because it only allows integer scalars as values for low and high. + # However, if we have batch_dims, we need tensors as limits. + return torch.stack([torch.randint(max_value - v, ()) for v in values.tolist()]) + + if isinstance(format, str): + format = tv_tensors.BoundingBoxFormat[format] + + dtype = dtype or torch.float32 + + h, w = [torch.randint(1, s, (num_boxes,)) for s in canvas_size] + y = sample_position(h, canvas_size[0]) + x = sample_position(w, canvas_size[1]) + + if format is tv_tensors.BoundingBoxFormat.XYWH: + parts = (x, y, w, h) + elif format is tv_tensors.BoundingBoxFormat.XYXY: + x1, y1 = x, y + x2 = x1 + w + y2 = y1 + h + parts = (x1, y1, x2, y2) + elif format is tv_tensors.BoundingBoxFormat.CXCYWH: + cx = x + w / 2 + cy = y + h / 2 + parts = (cx, cy, w, h) + else: + raise ValueError(f"Format {format} is not supported") + + return tv_tensors.BoundingBoxes( + torch.stack(parts, dim=-1).to(dtype=dtype, device=device), format=format, canvas_size=canvas_size + ) + + +def make_detection_masks(size=DEFAULT_SIZE, *, num_masks=1, dtype=None, device="cpu"): + """Make a "detection" mask, i.e. (*, N, H, W), where each object is encoded as one of N boolean masks""" + return tv_tensors.Mask( + torch.testing.make_tensor( + (num_masks, *size), + low=0, + high=2, + dtype=dtype or torch.bool, + device=device, + ) + ) + + +def make_segmentation_mask(size=DEFAULT_SIZE, *, num_categories=10, batch_dims=(), dtype=None, device="cpu"): + """Make a "segmentation" mask, i.e. (*, H, W), where the category is encoded as pixel value""" + return tv_tensors.Mask( + torch.testing.make_tensor( + (*batch_dims, *size), + low=0, + high=num_categories, + dtype=dtype or torch.uint8, + device=device, + ) + ) + + +def make_video(size=DEFAULT_SIZE, *, num_frames=3, batch_dims=(), **kwargs): + return tv_tensors.Video(make_image(size, batch_dims=(*batch_dims, num_frames), **kwargs)) + + +def make_video_tensor(*args, **kwargs): + return make_video(*args, **kwargs).as_subclass(torch.Tensor) + + +def assert_run_python_script(source_code): + """Utility to check assertions in an independent Python subprocess. + + The script provided in the source code should return 0 and not print + anything on stderr or stdout. Modified from scikit-learn test utils. + + Args: + source_code (str): The Python source code to execute. + """ + with get_tmp_dir() as root: + path = pathlib.Path(root) / "main.py" + with open(path, "w") as file: + file.write(source_code) + + try: + out = check_output([sys.executable, str(path)], stderr=STDOUT) + except CalledProcessError as e: + raise RuntimeError(f"script errored with output:\n{e.output.decode()}") + if out != b"": + raise AssertionError(out.decode()) + + +@contextlib.contextmanager +def assert_no_warnings(): + # The name `catch_warnings` is a misnomer as the context manager does **not** catch any warnings, but rather scopes + # the warning filters. All changes that are made to the filters while in this context, will be reset upon exit. + with warnings.catch_warnings(): + warnings.simplefilter("error") + yield + + +@contextlib.contextmanager +def ignore_jit_no_profile_information_warning(): + # Calling a scripted object often triggers a warning like + # `UserWarning: operator() profile_node %$INT1 : int[] = prim::profile_ivalue($INT2) does not have profile information` + # with varying `INT1` and `INT2`. Since these are uninteresting for us and only clutter the test summary, we ignore + # them. + with warnings.catch_warnings(): + warnings.filterwarnings("ignore", message=re.escape("operator() profile_node %"), category=UserWarning) + yield diff --git a/test/conftest.py b/test/conftest.py new file mode 100644 index 00000000000..a9768598ded --- /dev/null +++ b/test/conftest.py @@ -0,0 +1,121 @@ +import random + +import numpy as np +import pytest +import torch + +from common_utils import ( + CUDA_NOT_AVAILABLE_MSG, + IN_FBCODE, + IN_OSS_CI, + IN_RE_WORKER, + MPS_NOT_AVAILABLE_MSG, + OSS_CI_GPU_NO_CUDA_MSG, +) + + +def pytest_configure(config): + # register an additional marker (see pytest_collection_modifyitems) + config.addinivalue_line("markers", "needs_cuda: mark for tests that rely on a CUDA device") + config.addinivalue_line("markers", "needs_mps: mark for tests that rely on a MPS device") + config.addinivalue_line("markers", "dont_collect: mark for tests that should not be collected") + config.addinivalue_line("markers", "opcheck_only_one: only opcheck one parametrization") + + +def pytest_collection_modifyitems(items): + # This hook is called by pytest after it has collected the tests (google its name to check out its doc!) + # We can ignore some tests as we see fit here, or add marks, such as a skip mark. + # + # Typically, here, we try to optimize CI time. In particular, the GPU CI instances don't need to run the + # tests that don't need CUDA, because those tests are extensively tested in the CPU CI instances already. + # This is true for both OSS CI and the fbcode internal CI. + # In the fbcode CI, we have an additional constraint: we try to avoid skipping tests. So instead of relying on + # pytest.mark.skip, in fbcode we literally just remove those tests from the `items` list, and it's as if + # these tests never existed. + + out_items = [] + for item in items: + # The needs_cuda mark will exist if the test was explicitly decorated with + # the @needs_cuda decorator. It will also exist if it was parametrized with a + # parameter that has the mark: for example if a test is parametrized with + # @pytest.mark.parametrize('device', cpu_and_cuda()) + # the "instances" of the tests where device == 'cuda' will have the 'needs_cuda' mark, + # and the ones with device == 'cpu' won't have the mark. + needs_cuda = item.get_closest_marker("needs_cuda") is not None + needs_mps = item.get_closest_marker("needs_mps") is not None + + if needs_cuda and not torch.cuda.is_available(): + # In general, we skip cuda tests on machines without a GPU + # There are special cases though, see below + item.add_marker(pytest.mark.skip(reason=CUDA_NOT_AVAILABLE_MSG)) + + if needs_mps and not torch.backends.mps.is_available(): + item.add_marker(pytest.mark.skip(reason=MPS_NOT_AVAILABLE_MSG)) + + if IN_FBCODE: + # fbcode doesn't like skipping tests, so instead we just don't collect the test + # so that they don't even "exist", hence the continue statements. + if not needs_cuda and IN_RE_WORKER: + # The RE workers are the machines with GPU, we don't want them to run CPU-only tests. + continue + if needs_cuda and not torch.cuda.is_available(): + # On the test machines without a GPU, we want to ignore the tests that need cuda. + # TODO: something more robust would be to do that only in a sandcastle instance, + # so that we can still see the test being skipped when testing locally from a devvm + continue + if needs_mps and not torch.backends.mps.is_available(): + # Same as above, but for MPS + continue + elif IN_OSS_CI: + # Here we're not in fbcode, so we can safely collect and skip tests. + if not needs_cuda and torch.cuda.is_available(): + # Similar to what happens in RE workers: we don't need the OSS CI GPU machines + # to run the CPU-only tests. + item.add_marker(pytest.mark.skip(reason=OSS_CI_GPU_NO_CUDA_MSG)) + + if item.get_closest_marker("dont_collect") is not None: + # currently, this is only used for some tests we're sure we don't want to run on fbcode + continue + + out_items.append(item) + + items[:] = out_items + + +def pytest_sessionfinish(session, exitstatus): + # This hook is called after all tests have run, and just before returning an exit status. + # We here change exit code 5 into 0. + # + # 5 is issued when no tests were actually run, e.g. if you use `pytest -k some_regex_that_is_never_matched`. + # + # Having no test being run for a given test rule is a common scenario in fbcode, and typically happens on + # the GPU test machines which don't run the CPU-only tests (see pytest_collection_modifyitems above). For + # example `test_transforms.py` doesn't contain any CUDA test at the time of + # writing, so on a GPU test machine, testpilot would invoke pytest on this file and no test would be run. + # This would result in pytest returning 5, causing testpilot to raise an error. + # To avoid this, we transform this 5 into a 0 to make testpilot happy. + if exitstatus == 5: + session.exitstatus = 0 + + +@pytest.fixture(autouse=True) +def prevent_leaking_rng(): + # Prevent each test from leaking the rng to all other test when they call + # torch.manual_seed() or random.seed() or np.random.seed(). + # Note: the numpy rngs should never leak anyway, as we never use + # np.random.seed() and instead rely on np.random.RandomState instances (see + # issue #4247). We still do it for extra precaution. + + torch_rng_state = torch.get_rng_state() + builtin_rng_state = random.getstate() + nunmpy_rng_state = np.random.get_state() + if torch.cuda.is_available(): + cuda_rng_state = torch.cuda.get_rng_state() + + yield + + torch.set_rng_state(torch_rng_state) + random.setstate(builtin_rng_state) + np.random.set_state(nunmpy_rng_state) + if torch.cuda.is_available(): + torch.cuda.set_rng_state(cuda_rng_state) diff --git a/test/cpp/test_custom_operators.cpp b/test/cpp/test_custom_operators.cpp new file mode 100644 index 00000000000..5178575d21b --- /dev/null +++ b/test/cpp/test_custom_operators.cpp @@ -0,0 +1,65 @@ +#include +#include +#include + +// FIXME: the include path differs from OSS due to the extra csrc +#include + +TEST(test_custom_operators, nms) { + // make sure that the torchvision ops are visible to the jit interpreter + auto& ops = torch::jit::getAllOperatorsFor( + torch::jit::Symbol::fromQualString("torchvision::nms")); + ASSERT_EQ(ops.size(), 1); + + auto& op = ops.front(); + ASSERT_EQ(op->schema().name(), "torchvision::nms"); + + torch::jit::Stack stack; + at::Tensor boxes = at::rand({50, 4}), scores = at::rand({50}); + double thresh = 0.7; + + torch::jit::push(stack, boxes, scores, thresh); + op->getOperation()(stack); + at::Tensor output_jit; + torch::jit::pop(stack, output_jit); + + at::Tensor output = vision::ops::nms(boxes, scores, thresh); + ASSERT_TRUE(output_jit.allclose(output)); +} + +TEST(test_custom_operators, roi_align_visible) { + // make sure that the torchvision ops are visible to the jit interpreter even + // if not explicitly included + auto& ops = torch::jit::getAllOperatorsFor( + torch::jit::Symbol::fromQualString("torchvision::roi_align")); + ASSERT_EQ(ops.size(), 1); + + auto& op = ops.front(); + ASSERT_EQ(op->schema().name(), "torchvision::roi_align"); + + torch::jit::Stack stack; + float roi_data[] = {0., 0., 0., 5., 5., 0., 5., 5., 10., 10.}; + at::Tensor input = at::rand({1, 2, 10, 10}), + rois = at::from_blob(roi_data, {2, 5}); + double spatial_scale = 1.0; + int64_t pooled_height = 3, pooled_width = 3, sampling_ratio = -1; + bool aligned = true; + + torch::jit::push( + stack, + input, + rois, + spatial_scale, + pooled_height, + pooled_width, + sampling_ratio, + aligned); + op->getOperation()(stack); + at::Tensor output_jit; + torch::jit::pop(stack, output_jit); + + ASSERT_EQ(output_jit.sizes()[0], 2); + ASSERT_EQ(output_jit.sizes()[1], 2); + ASSERT_EQ(output_jit.sizes()[2], 3); + ASSERT_EQ(output_jit.sizes()[3], 3); +} diff --git a/test/datasets_utils.py b/test/datasets_utils.py new file mode 100644 index 00000000000..8ea5e12610f --- /dev/null +++ b/test/datasets_utils.py @@ -0,0 +1,1056 @@ +import contextlib +import functools +import importlib +import inspect +import itertools +import os +import pathlib +import platform +import random +import shutil +import string +import struct +import tarfile +import unittest +import unittest.mock +import zipfile +from collections import defaultdict +from typing import Any, Callable, Dict, Iterator, List, Optional, Sequence, Tuple, Union + +import numpy as np +import numpy.typing as npt + +import PIL +import PIL.Image +import pytest +import torch +import torchvision.datasets +import torchvision.io +from common_utils import disable_console_output, get_tmp_dir +from torch.utils._pytree import tree_any +from torch.utils.data import DataLoader +from torchvision import tv_tensors +from torchvision.datasets import wrap_dataset_for_transforms_v2 +from torchvision.transforms.functional import get_dimensions +from torchvision.transforms.v2.functional import get_size + + +__all__ = [ + "UsageError", + "lazy_importer", + "test_all_configs", + "DatasetTestCase", + "ImageDatasetTestCase", + "VideoDatasetTestCase", + "create_image_or_video_tensor", + "create_image_file", + "create_image_folder", + "create_video_file", + "create_video_folder", + "make_tar", + "make_zip", + "create_random_string", +] + + +class UsageError(Exception): + """Should be raised in case an error happens in the setup rather than the test.""" + + +class LazyImporter: + r"""Lazy importer for additional dependencies. + + Some datasets require additional packages that are no direct dependencies of torchvision. Instances of this class + provide modules listed in MODULES as attributes. They are only imported when accessed. + + """ + MODULES = ( + "av", + "lmdb", + "pycocotools", + "requests", + "scipy.io", + "scipy.sparse", + "h5py", + ) + + def __init__(self): + modules = defaultdict(list) + for module in self.MODULES: + module, *submodules = module.split(".", 1) + if submodules: + modules[module].append(submodules[0]) + else: + # This introduces the module so that it is known when we later iterate over the dictionary. + modules.__missing__(module) + + for module, submodules in modules.items(): + # We need the quirky 'module=module' and submodules=submodules arguments to the lambda since otherwise the + # lookup for these would happen at runtime rather than at definition. Thus, without it, every property + # would try to import the last item in 'modules' + setattr( + type(self), + module, + property(lambda self, module=module, submodules=submodules: LazyImporter._import(module, submodules)), + ) + + @staticmethod + def _import(package, subpackages): + try: + module = importlib.import_module(package) + except ImportError as error: + raise UsageError( + f"Failed to import module '{package}'. " + f"This probably means that the current test case needs '{package}' installed, " + f"but it is not a dependency of torchvision. " + f"You need to install it manually, for example 'pip install {package}'." + ) from error + + for name in subpackages: + importlib.import_module(f".{name}", package=package) + + return module + + +lazy_importer = LazyImporter() + + +def requires_lazy_imports(*modules): + def outer_wrapper(fn): + @functools.wraps(fn) + def inner_wrapper(*args, **kwargs): + for module in modules: + getattr(lazy_importer, module.replace(".", "_")) + return fn(*args, **kwargs) + + return inner_wrapper + + return outer_wrapper + + +def test_all_configs(test): + """Decorator to run test against all configurations. + + Add this as decorator to an arbitrary test to run it against all configurations. This includes + :attr:`DatasetTestCase.DEFAULT_CONFIG` and :attr:`DatasetTestCase.ADDITIONAL_CONFIGS`. + + The current configuration is provided as the first parameter for the test: + + .. code-block:: + + @test_all_configs() + def test_foo(self, config): + pass + + .. note:: + + This will try to remove duplicate configurations. During this process it will not preserve a potential + ordering of the configurations or an inner ordering of a configuration. + """ + + def maybe_remove_duplicates(configs): + try: + return [dict(config_) for config_ in {tuple(sorted(config.items())) for config in configs}] + except TypeError: + # A TypeError will be raised if a value of any config is not hashable, e.g. a list. In that case duplicate + # removal would be a lot more elaborate, and we simply bail out. + return configs + + @functools.wraps(test) + def wrapper(self): + configs = [] + if self.DEFAULT_CONFIG is not None: + configs.append(self.DEFAULT_CONFIG) + if self.ADDITIONAL_CONFIGS is not None: + configs.extend(self.ADDITIONAL_CONFIGS) + + if not configs: + configs = [self._KWARG_DEFAULTS.copy()] + else: + configs = maybe_remove_duplicates(configs) + + for config in configs: + with self.subTest(**config): + test(self, config) + + return wrapper + + +class DatasetTestCase(unittest.TestCase): + """Abstract base class for all dataset testcases. + + You have to overwrite the following class attributes: + + - DATASET_CLASS (torchvision.datasets.VisionDataset): Class of dataset to be tested. + - FEATURE_TYPES (Sequence[Any]): Types of the elements returned by index access of the dataset. Instead of + providing these manually, you can instead subclass ``ImageDatasetTestCase`` or ``VideoDatasetTestCase```to + get a reasonable default, that should work for most cases. Each entry of the sequence may be a tuple, + to indicate multiple possible values. + + Optionally, you can overwrite the following class attributes: + + - DEFAULT_CONFIG (Dict[str, Any]): Config that will be used by default. If omitted, this defaults to all + keyword arguments of the dataset minus ``transform``, ``target_transform``, ``transforms``, and + ``download``. Overwrite this if you want to use a default value for a parameter for which the dataset does + not provide one. + - ADDITIONAL_CONFIGS (Sequence[Dict[str, Any]]): Additional configs that should be tested. Each dictionary can + contain an arbitrary combination of dataset parameters that are **not** ``transform``, ``target_transform``, + ``transforms``, or ``download``. + - REQUIRED_PACKAGES (Iterable[str]): Additional dependencies to use the dataset. If these packages are not + available, the tests are skipped. + + Additionally, you need to overwrite the ``inject_fake_data()`` method that provides the data that the tests rely on. + The fake data should resemble the original data as close as necessary, while containing only few examples. During + the creation of the dataset check-, download-, and extract-functions from ``torchvision.datasets.utils`` are + disabled. + + Without further configuration, the testcase will test if + + 1. the dataset raises a :class:`FileNotFoundError` or a :class:`RuntimeError` if the data files are not found or + corrupted, + 2. the dataset inherits from `torchvision.datasets.VisionDataset`, + 3. the dataset can be turned into a string, + 4. the feature types of a returned example matches ``FEATURE_TYPES``, + 5. the number of examples matches the injected fake data, and + 6. the dataset calls ``transform``, ``target_transform``, or ``transforms`` if available when accessing data. + + Case 3. to 6. are tested against all configurations in ``CONFIGS``. + + To add dataset-specific tests, create a new method that takes no arguments with ``test_`` as a name prefix: + + .. code-block:: + + def test_foo(self): + pass + + If you want to run the test against all configs, add the ``@test_all_configs`` decorator to the definition and + accept a single argument: + + .. code-block:: + + @test_all_configs + def test_bar(self, config): + pass + + Within the test you can use the ``create_dataset()`` method that yields the dataset as well as additional + information provided by the ``ìnject_fake_data()`` method: + + .. code-block:: + + def test_baz(self): + with self.create_dataset() as (dataset, info): + pass + """ + + DATASET_CLASS = None + FEATURE_TYPES = None + + DEFAULT_CONFIG = None + ADDITIONAL_CONFIGS = None + REQUIRED_PACKAGES = None + + # These keyword arguments are checked by test_transforms in case they are available in DATASET_CLASS. + _TRANSFORM_KWARGS = { + "transform", + "target_transform", + "transforms", + } + # These keyword arguments get a 'special' treatment and should not be set in DEFAULT_CONFIG or ADDITIONAL_CONFIGS. + _SPECIAL_KWARGS = { + *_TRANSFORM_KWARGS, + "download", + } + + # These fields are populated during setupClass() within _populate_private_class_attributes() + + # This will be a dictionary containing all keyword arguments with their respective default values extracted from + # the dataset constructor. + _KWARG_DEFAULTS = None + # This will be a set of all _SPECIAL_KWARGS that the dataset constructor takes. + _HAS_SPECIAL_KWARG = None + + # These functions are disabled during dataset creation in create_dataset(). + _CHECK_FUNCTIONS = { + "check_md5", + "check_integrity", + } + _DOWNLOAD_EXTRACT_FUNCTIONS = { + "download_url", + "download_file_from_google_drive", + "extract_archive", + "download_and_extract_archive", + } + + def dataset_args(self, tmpdir: str, config: Dict[str, Any]) -> Sequence[Any]: + """Define positional arguments passed to the dataset. + + .. note:: + + The default behavior is only valid if the dataset to be tested has ``root`` as the only required parameter. + Otherwise, you need to overwrite this method. + + Args: + tmpdir (str): Path to a temporary directory. For most cases this acts as root directory for the dataset + to be created and in turn also for the fake data injected here. + config (Dict[str, Any]): Configuration that will be passed to the dataset constructor. It provides at least + fields for all dataset parameters with default values. + + Returns: + (Tuple[str]): ``tmpdir`` which corresponds to ``root`` for most datasets. + """ + return (tmpdir,) + + def inject_fake_data(self, tmpdir: str, config: Dict[str, Any]) -> Union[int, Dict[str, Any]]: + """Inject fake data for dataset into a temporary directory. + + During the creation of the dataset the download and extract logic is disabled. Thus, the fake data injected + here needs to resemble the raw data, i.e. the state of the dataset directly after the files are downloaded and + potentially extracted. + + Args: + tmpdir (str): Path to a temporary directory. For most cases this acts as root directory for the dataset + to be created and in turn also for the fake data injected here. + config (Dict[str, Any]): Configuration that will be passed to the dataset constructor. It provides at least + fields for all dataset parameters with default values. + + Needs to return one of the following: + + 1. (int): Number of examples in the dataset to be created, or + 2. (Dict[str, Any]): Additional information about the injected fake data. Must contain the field + ``"num_examples"`` that corresponds to the number of examples in the dataset to be created. + """ + raise NotImplementedError("You need to provide fake data in order for the tests to run.") + + @contextlib.contextmanager + def create_dataset( + self, + config: Optional[Dict[str, Any]] = None, + inject_fake_data: bool = True, + patch_checks: Optional[bool] = None, + **kwargs: Any, + ) -> Iterator[Tuple[torchvision.datasets.VisionDataset, Dict[str, Any]]]: + r"""Create the dataset in a temporary directory. + + The configuration passed to the dataset is populated to contain at least all parameters with default values. + For this the following order of precedence is used: + + 1. Parameters in :attr:`kwargs`. + 2. Configuration in :attr:`config`. + 3. Configuration in :attr:`~DatasetTestCase.DEFAULT_CONFIG`. + 4. Default parameters of the dataset. + + Args: + config (Optional[Dict[str, Any]]): Configuration that will be used to create the dataset. + inject_fake_data (bool): If ``True`` (default) inject the fake data with :meth:`.inject_fake_data` before + creating the dataset. + patch_checks (Optional[bool]): If ``True`` disable integrity check logic while creating the dataset. If + omitted defaults to the same value as ``inject_fake_data``. + **kwargs (Any): Additional parameters passed to the dataset. These parameters take precedence in case they + overlap with ``config``. + + Yields: + dataset (torchvision.dataset.VisionDataset): Dataset. + info (Dict[str, Any]): Additional information about the injected fake data. See :meth:`.inject_fake_data` + for details. + """ + if patch_checks is None: + patch_checks = inject_fake_data + + special_kwargs, other_kwargs = self._split_kwargs(kwargs) + + complete_config = self._KWARG_DEFAULTS.copy() + if self.DEFAULT_CONFIG: + complete_config.update(self.DEFAULT_CONFIG) + if config: + complete_config.update(config) + if other_kwargs: + complete_config.update(other_kwargs) + + if "download" in self._HAS_SPECIAL_KWARG and special_kwargs.get("download", False): + # override download param to False param if its default is truthy + special_kwargs["download"] = False + + patchers = self._patch_download_extract() + if patch_checks: + patchers.update(self._patch_checks()) + + with get_tmp_dir() as tmpdir: + args = self.dataset_args(tmpdir, complete_config) + info = self._inject_fake_data(tmpdir, complete_config) if inject_fake_data else None + + with self._maybe_apply_patches(patchers), disable_console_output(): + dataset = self.DATASET_CLASS(*args, **complete_config, **special_kwargs) + + yield dataset, info + + @classmethod + def setUpClass(cls): + cls._verify_required_public_class_attributes() + cls._populate_private_class_attributes() + cls._process_optional_public_class_attributes() + super().setUpClass() + + @classmethod + def _verify_required_public_class_attributes(cls): + if cls.DATASET_CLASS is None: + raise UsageError( + "The class attribute 'DATASET_CLASS' needs to be overwritten. " + "It should contain the class of the dataset to be tested." + ) + if cls.FEATURE_TYPES is None: + raise UsageError( + "The class attribute 'FEATURE_TYPES' needs to be overwritten. " + "It should contain a sequence of types that the dataset returns when accessed by index." + ) + + @classmethod + def _populate_private_class_attributes(cls): + defaults = [] + for cls_ in cls.DATASET_CLASS.__mro__: + if cls_ is torchvision.datasets.VisionDataset: + break + + argspec = inspect.getfullargspec(cls_.__init__) + + if not argspec.defaults: + continue + + defaults.append( + { + kwarg: default + for kwarg, default in zip(argspec.args[-len(argspec.defaults) :], argspec.defaults) + if not kwarg.startswith("_") + } + ) + + if not argspec.varkw: + break + + kwarg_defaults = dict() + for config in reversed(defaults): + kwarg_defaults.update(config) + + has_special_kwargs = set() + for name in cls._SPECIAL_KWARGS: + if name not in kwarg_defaults: + continue + + del kwarg_defaults[name] + has_special_kwargs.add(name) + + cls._KWARG_DEFAULTS = kwarg_defaults + cls._HAS_SPECIAL_KWARG = has_special_kwargs + + @classmethod + def _process_optional_public_class_attributes(cls): + def check_config(config, name): + special_kwargs = tuple(f"'{name}'" for name in cls._SPECIAL_KWARGS if name in config) + if special_kwargs: + raise UsageError( + f"{name} contains a value for the parameter(s) {', '.join(special_kwargs)}. " + f"These are handled separately by the test case and should not be set here. " + f"If you need to test some custom behavior regarding these parameters, " + f"you need to write a custom test (*not* test case), e.g. test_custom_transform()." + ) + + if cls.DEFAULT_CONFIG is not None: + check_config(cls.DEFAULT_CONFIG, "DEFAULT_CONFIG") + + if cls.ADDITIONAL_CONFIGS is not None: + for idx, config in enumerate(cls.ADDITIONAL_CONFIGS): + check_config(config, f"CONFIGS[{idx}]") + + if cls.REQUIRED_PACKAGES: + missing_pkgs = [] + for pkg in cls.REQUIRED_PACKAGES: + try: + importlib.import_module(pkg) + except ImportError: + missing_pkgs.append(f"'{pkg}'") + + if missing_pkgs: + raise unittest.SkipTest( + f"The package(s) {', '.join(missing_pkgs)} are required to load the dataset " + f"'{cls.DATASET_CLASS.__name__}', but are not installed." + ) + + def _split_kwargs(self, kwargs): + special_kwargs = kwargs.copy() + other_kwargs = {key: special_kwargs.pop(key) for key in set(special_kwargs.keys()) - self._SPECIAL_KWARGS} + return special_kwargs, other_kwargs + + def _inject_fake_data(self, tmpdir, config): + info = self.inject_fake_data(tmpdir, config) + if info is None: + raise UsageError( + "The method 'inject_fake_data' needs to return at least an integer indicating the number of " + "examples for the current configuration." + ) + elif isinstance(info, int): + info = dict(num_examples=info) + elif not isinstance(info, dict): + raise UsageError( + f"The additional information returned by the method 'inject_fake_data' must be either an " + f"integer indicating the number of examples for the current configuration or a dictionary with " + f"the same content. Got {type(info)} instead." + ) + elif "num_examples" not in info: + raise UsageError( + "The information dictionary returned by the method 'inject_fake_data' must contain a " + "'num_examples' field that holds the number of examples for the current configuration." + ) + return info + + def _patch_download_extract(self): + module = inspect.getmodule(self.DATASET_CLASS).__name__ + return {unittest.mock.patch(f"{module}.{function}") for function in self._DOWNLOAD_EXTRACT_FUNCTIONS} + + def _patch_checks(self): + module = inspect.getmodule(self.DATASET_CLASS).__name__ + return {unittest.mock.patch(f"{module}.{function}", return_value=True) for function in self._CHECK_FUNCTIONS} + + @contextlib.contextmanager + def _maybe_apply_patches(self, patchers): + with contextlib.ExitStack() as stack: + mocks = {} + for patcher in patchers: + with contextlib.suppress(AttributeError): + mocks[patcher.target] = stack.enter_context(patcher) + yield mocks + + def test_not_found_or_corrupted(self): + with pytest.raises((FileNotFoundError, RuntimeError)): + with self.create_dataset(inject_fake_data=False): + pass + + def test_smoke(self): + with self.create_dataset() as (dataset, _): + assert isinstance(dataset, torchvision.datasets.VisionDataset) + + @test_all_configs + def test_str_smoke(self, config): + with self.create_dataset(config) as (dataset, _): + assert isinstance(str(dataset), str) + + @test_all_configs + def test_feature_types(self, config): + with self.create_dataset(config) as (dataset, _): + example = dataset[0] + + if len(self.FEATURE_TYPES) > 1: + actual = len(example) + expected = len(self.FEATURE_TYPES) + assert ( + actual == expected + ), "The number of the returned features does not match the the number of elements in FEATURE_TYPES: " + f"{actual} != {expected}" + else: + example = (example,) + + for idx, (feature, expected_feature_type) in enumerate(zip(example, self.FEATURE_TYPES)): + with self.subTest(idx=idx): + assert isinstance(feature, expected_feature_type) + + @test_all_configs + def test_num_examples(self, config): + with self.create_dataset(config) as (dataset, info): + assert len(list(dataset)) == len(dataset) == info["num_examples"] + + @test_all_configs + def test_transforms(self, config): + mock = unittest.mock.Mock(wraps=lambda *args: args[0] if len(args) == 1 else args) + for kwarg in self._TRANSFORM_KWARGS: + if kwarg not in self._HAS_SPECIAL_KWARG: + continue + + mock.reset_mock() + + with self.subTest(kwarg=kwarg): + with self.create_dataset(config, **{kwarg: mock}) as (dataset, _): + dataset[0] + + mock.assert_called() + + @test_all_configs + def test_transforms_v2_wrapper(self, config): + try: + with self.create_dataset(config) as (dataset, info): + for target_keys in [None, "all"]: + if target_keys is not None and self.DATASET_CLASS not in { + torchvision.datasets.CocoDetection, + torchvision.datasets.VOCDetection, + torchvision.datasets.Kitti, + torchvision.datasets.WIDERFace, + }: + with self.assertRaisesRegex(ValueError, "`target_keys` is currently only supported for"): + wrap_dataset_for_transforms_v2(dataset, target_keys=target_keys) + continue + + wrapped_dataset = wrap_dataset_for_transforms_v2(dataset, target_keys=target_keys) + assert isinstance(wrapped_dataset, self.DATASET_CLASS) + assert len(wrapped_dataset) == info["num_examples"] + + wrapped_sample = wrapped_dataset[0] + assert tree_any( + lambda item: isinstance(item, (tv_tensors.TVTensor, PIL.Image.Image)), wrapped_sample + ) + except TypeError as error: + msg = f"No wrapper exists for dataset class {type(dataset).__name__}" + if str(error).startswith(msg): + pytest.skip(msg) + raise error + except RuntimeError as error: + if "currently not supported by this wrapper" in str(error): + pytest.skip("Config is currently not supported by this wrapper") + raise error + + +class ImageDatasetTestCase(DatasetTestCase): + """Abstract base class for image dataset testcases. + + - Overwrites the FEATURE_TYPES class attribute to expect a :class:`PIL.Image.Image` and an integer label. + """ + + FEATURE_TYPES = (PIL.Image.Image, int) + + @contextlib.contextmanager + def create_dataset( + self, + config: Optional[Dict[str, Any]] = None, + inject_fake_data: bool = True, + patch_checks: Optional[bool] = None, + **kwargs: Any, + ) -> Iterator[Tuple[torchvision.datasets.VisionDataset, Dict[str, Any]]]: + with super().create_dataset( + config=config, + inject_fake_data=inject_fake_data, + patch_checks=patch_checks, + **kwargs, + ) as (dataset, info): + # PIL.Image.open() only loads the image metadata upfront and keeps the file open until the first access + # to the pixel data occurs. Trying to delete such a file results in an PermissionError on Windows. Thus, we + # force-load opened images. + # This problem only occurs during testing since some tests, e.g. DatasetTestCase.test_feature_types open an + # image, but never use the underlying data. During normal operation it is reasonable to assume that the + # user wants to work with the image he just opened rather than deleting the underlying file. + with self._force_load_images(): + yield dataset, info + + @contextlib.contextmanager + def _force_load_images(self): + open = PIL.Image.open + + def new(fp, *args, **kwargs): + image = open(fp, *args, **kwargs) + if isinstance(fp, (str, pathlib.Path)): + image.load() + return image + + with unittest.mock.patch("PIL.Image.open", new=new): + yield + + +class VideoDatasetTestCase(DatasetTestCase): + """Abstract base class for video dataset testcases. + + - Overwrites the 'FEATURE_TYPES' class attribute to expect two :class:`torch.Tensor` s for the video and audio as + well as an integer label. + - Overwrites the 'REQUIRED_PACKAGES' class attribute to require PyAV (``av``). + - Adds the 'DEFAULT_FRAMES_PER_CLIP' class attribute. If no 'frames_per_clip' is provided by 'inject_fake_data()' + and it is the last parameter without a default value in the dataset constructor, the value of the + 'DEFAULT_FRAMES_PER_CLIP' class attribute is appended to the output. + """ + + FEATURE_TYPES = (torch.Tensor, torch.Tensor, int) + REQUIRED_PACKAGES = ("av",) + + FRAMES_PER_CLIP = 1 + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.dataset_args = self._set_default_frames_per_clip(self.dataset_args) + + def _set_default_frames_per_clip(self, dataset_args): + argspec = inspect.getfullargspec(self.DATASET_CLASS.__init__) + args_without_default = argspec.args[1 : (-len(argspec.defaults) if argspec.defaults else None)] + frames_per_clip_last = args_without_default[-1] == "frames_per_clip" + + @functools.wraps(dataset_args) + def wrapper(tmpdir, config): + args = dataset_args(tmpdir, config) + if frames_per_clip_last and len(args) == len(args_without_default) - 1: + args = (*args, self.FRAMES_PER_CLIP) + + return args + + return wrapper + + def test_output_format(self): + for output_format in ["TCHW", "THWC"]: + with self.create_dataset(output_format=output_format) as (dataset, _): + for video, *_ in dataset: + if output_format == "TCHW": + num_frames, num_channels, *_ = video.shape + else: # output_format == "THWC": + num_frames, *_, num_channels = video.shape + + assert num_frames == self.FRAMES_PER_CLIP + assert num_channels == 3 + + @test_all_configs + def test_transforms_v2_wrapper(self, config): + # `output_format == "THWC"` is not supported by the wrapper. Thus, we skip the `config` if it is set explicitly + # or use the supported `"TCHW"` + if config.setdefault("output_format", "TCHW") == "THWC": + return + + super().test_transforms_v2_wrapper.__wrapped__(self, config) + + +def _no_collate(batch): + return batch + + +def check_transforms_v2_wrapper_spawn(dataset, expected_size): + # This check ensures that the wrapped datasets can be used with multiprocessing_context="spawn" in the DataLoader. + # We also check that transforms are applied correctly as a non-regression test for + # https://github.com/pytorch/vision/issues/8066 + # Implicitly, this also checks that the wrapped datasets are pickleable. + + # To save CI/test time, we only check on Windows where "spawn" is the default + if platform.system() != "Windows": + pytest.skip("Multiprocessing spawning is only checked on macOS.") + + wrapped_dataset = wrap_dataset_for_transforms_v2(dataset) + + dataloader = DataLoader(wrapped_dataset, num_workers=2, multiprocessing_context="spawn", collate_fn=_no_collate) + + def resize_was_applied(item): + # Checking the size of the output ensures that the Resize transform was correctly applied + return isinstance(item, (tv_tensors.Image, tv_tensors.Video, PIL.Image.Image)) and get_size(item) == list( + expected_size + ) + + for wrapped_sample in dataloader: + assert tree_any(resize_was_applied, wrapped_sample) + + +def create_image_or_video_tensor(size: Sequence[int]) -> torch.Tensor: + r"""Create a random uint8 tensor. + + Args: + size (Sequence[int]): Size of the tensor. + """ + return torch.randint(0, 256, size, dtype=torch.uint8) + + +def create_image_file( + root: Union[pathlib.Path, str], name: Union[pathlib.Path, str], size: Union[Sequence[int], int] = 10, **kwargs: Any +) -> pathlib.Path: + """Create an image file from random data. + + Args: + root (Union[str, pathlib.Path]): Root directory the image file will be placed in. + name (Union[str, pathlib.Path]): Name of the image file. + size (Union[Sequence[int], int]): Size of the image that represents the ``(num_channels, height, width)``. If + scalar, the value is used for the height and width. If not provided, three channels are assumed. + kwargs (Any): Additional parameters passed to :meth:`PIL.Image.Image.save`. + + Returns: + pathlib.Path: Path to the created image file. + """ + if isinstance(size, int): + size = (size, size) + if len(size) == 2: + size = (3, *size) + if len(size) != 3: + raise UsageError( + f"The 'size' argument should either be an int or a sequence of length 2 or 3. Got {len(size)} instead" + ) + + image = create_image_or_video_tensor(size) + file = pathlib.Path(root) / name + + # torch (num_channels x height x width) -> PIL (width x height x num_channels) + image = image.permute(2, 1, 0) + # For grayscale images PIL doesn't use a channel dimension + if image.shape[2] == 1: + image = torch.squeeze(image, 2) + PIL.Image.fromarray(image.numpy()).save(file, **kwargs) + return file + + +def create_image_folder( + root: Union[pathlib.Path, str], + name: Union[pathlib.Path, str], + file_name_fn: Callable[[int], str], + num_examples: int, + size: Optional[Union[Sequence[int], int, Callable[[int], Union[Sequence[int], int]]]] = None, + **kwargs: Any, +) -> List[pathlib.Path]: + """Create a folder of random images. + + Args: + root (Union[str, pathlib.Path]): Root directory the image folder will be placed in. + name (Union[str, pathlib.Path]): Name of the image folder. + file_name_fn (Callable[[int], str]): Should return a file name if called with the file index. + num_examples (int): Number of images to create. + size (Optional[Union[Sequence[int], int, Callable[[int], Union[Sequence[int], int]]]]): Size of the images. If + callable, will be called with the index of the corresponding file. If omitted, a random height and width + between 3 and 10 pixels is selected on a per-image basis. + kwargs (Any): Additional parameters passed to :func:`create_image_file`. + + Returns: + List[pathlib.Path]: Paths to all created image files. + + .. seealso:: + + - :func:`create_image_file` + """ + if size is None: + + def size(idx: int) -> Tuple[int, int, int]: + num_channels = 3 + height, width = torch.randint(3, 11, size=(2,), dtype=torch.int).tolist() + return (num_channels, height, width) + + root = pathlib.Path(root) / name + os.makedirs(root, exist_ok=True) + + return [ + create_image_file(root, file_name_fn(idx), size=size(idx) if callable(size) else size, **kwargs) + for idx in range(num_examples) + ] + + +def shape_test_for_stereo( + left: PIL.Image.Image, + right: PIL.Image.Image, + disparity: Optional[npt.NDArray] = None, + valid_mask: Optional[npt.NDArray] = None, +): + left_dims = get_dimensions(left) + right_dims = get_dimensions(right) + c, h, w = left_dims + # check that left and right are the same size + assert left_dims == right_dims + assert c == 3 + + # check that the disparity has the same spatial dimensions + # as the input + if disparity is not None: + assert disparity.ndim == 3 + assert disparity.shape == (1, h, w) + + if valid_mask is not None: + # check that valid mask is the same size as the disparity + _, dh, dw = disparity.shape + mh, mw = valid_mask.shape + assert dh == mh + assert dw == mw + + +@requires_lazy_imports("av") +def create_video_file( + root: Union[pathlib.Path, str], + name: Union[pathlib.Path, str], + size: Union[Sequence[int], int] = (1, 3, 10, 10), + fps: float = 25, + **kwargs: Any, +) -> pathlib.Path: + """Create a video file from random data. + + Args: + root (Union[str, pathlib.Path]): Root directory the video file will be placed in. + name (Union[str, pathlib.Path]): Name of the video file. + size (Union[Sequence[int], int]): Size of the video that represents the + ``(num_frames, num_channels, height, width)``. If scalar, the value is used for the height and width. + If not provided, ``num_frames=1`` and ``num_channels=3`` are assumed. + fps (float): Frame rate in frames per second. + kwargs (Any): Additional parameters passed to :func:`torchvision.io.write_video`. + + Returns: + pathlib.Path: Path to the created image file. + + Raises: + UsageError: If PyAV is not available. + """ + if isinstance(size, int): + size = (size, size) + if len(size) == 2: + size = (3, *size) + if len(size) == 3: + size = (1, *size) + if len(size) != 4: + raise UsageError( + f"The 'size' argument should either be an int or a sequence of length 2, 3, or 4. Got {len(size)} instead" + ) + + video = create_image_or_video_tensor(size) + file = pathlib.Path(root) / name + torchvision.io.write_video(str(file), video.permute(0, 2, 3, 1), fps, **kwargs) + return file + + +@requires_lazy_imports("av") +def create_video_folder( + root: Union[str, pathlib.Path], + name: Union[str, pathlib.Path], + file_name_fn: Callable[[int], str], + num_examples: int, + size: Optional[Union[Sequence[int], int, Callable[[int], Union[Sequence[int], int]]]] = None, + fps=25, + **kwargs, +) -> List[pathlib.Path]: + """Create a folder of random videos. + + Args: + root (Union[str, pathlib.Path]): Root directory the video folder will be placed in. + name (Union[str, pathlib.Path]): Name of the video folder. + file_name_fn (Callable[[int], str]): Should return a file name if called with the file index. + num_examples (int): Number of videos to create. + size (Optional[Union[Sequence[int], int, Callable[[int], Union[Sequence[int], int]]]]): Size of the videos. If + callable, will be called with the index of the corresponding file. If omitted, a random even height and + width between 4 and 10 pixels is selected on a per-video basis. + fps (float): Frame rate in frames per second. + kwargs (Any): Additional parameters passed to :func:`create_video_file`. + + Returns: + List[pathlib.Path]: Paths to all created video files. + + Raises: + UsageError: If PyAV is not available. + + .. seealso:: + + - :func:`create_video_file` + """ + if size is None: + + def size(idx): + num_frames = 1 + num_channels = 3 + # The 'libx264' video codec, which is the default of torchvision.io.write_video, requires the height and + # width of the video to be divisible by 2. + height, width = (torch.randint(2, 6, size=(2,), dtype=torch.int) * 2).tolist() + return (num_frames, num_channels, height, width) + + root = pathlib.Path(root) / name + os.makedirs(root, exist_ok=True) + + return [ + create_video_file(root, file_name_fn(idx), size=size(idx) if callable(size) else size, **kwargs) + for idx in range(num_examples) + ] + + +def _split_files_or_dirs(root, *files_or_dirs): + files = set() + dirs = set() + for file_or_dir in files_or_dirs: + path = pathlib.Path(file_or_dir) + if not path.is_absolute(): + path = root / path + if path.is_file(): + files.add(path) + else: + dirs.add(path) + for sub_file_or_dir in path.glob("**/*"): + if sub_file_or_dir.is_file(): + files.add(sub_file_or_dir) + else: + dirs.add(sub_file_or_dir) + + if root in dirs: + dirs.remove(root) + + return files, dirs + + +def _make_archive(root, name, *files_or_dirs, opener, adder, remove=True): + archive = pathlib.Path(root) / name + if not files_or_dirs: + # We need to invoke `Path.with_suffix("")`, since call only applies to the last suffix if multiple suffixes are + # present. For example, `pathlib.Path("foo.tar.gz").with_suffix("")` results in `foo.tar`. + file_or_dir = archive + for _ in range(len(archive.suffixes)): + file_or_dir = file_or_dir.with_suffix("") + if file_or_dir.exists(): + files_or_dirs = (file_or_dir,) + else: + raise ValueError("No file or dir provided.") + + files, dirs = _split_files_or_dirs(root, *files_or_dirs) + + with opener(archive) as fh: + for file in sorted(files): + adder(fh, file, file.relative_to(root)) + + if remove: + for file in files: + os.remove(file) + for dir in dirs: + shutil.rmtree(dir, ignore_errors=True) + + return archive + + +def make_tar(root, name, *files_or_dirs, remove=True, compression=None): + # TODO: detect compression from name + return _make_archive( + root, + name, + *files_or_dirs, + opener=lambda archive: tarfile.open(archive, f"w:{compression}" if compression else "w"), + adder=lambda fh, file, relative_file: fh.add(file, arcname=relative_file), + remove=remove, + ) + + +def make_zip(root, name, *files_or_dirs, remove=True): + return _make_archive( + root, + name, + *files_or_dirs, + opener=lambda archive: zipfile.ZipFile(archive, "w"), + adder=lambda fh, file, relative_file: fh.write(file, arcname=relative_file), + remove=remove, + ) + + +def create_random_string(length: int, *digits: str) -> str: + """Create a random string. + + Args: + length (int): Number of characters in the generated string. + *digits (str): Characters to sample from. If omitted defaults to :attr:`string.ascii_lowercase`. + """ + if not digits: + digits = string.ascii_lowercase + else: + digits = "".join(itertools.chain(*digits)) + + return "".join(random.choice(digits) for _ in range(length)) + + +def make_fake_pfm_file(h, w, file_name): + values = list(range(3 * h * w)) + # Note: we pack everything in little endian: -1.0, and "<" + content = f"PF \n{w} {h} \n-1.0\n".encode() + struct.pack("<" + "f" * len(values), *values) + with open(file_name, "wb") as f: + f.write(content) + + +def make_fake_flo_file(h, w, file_name): + """Creates a fake flow file in .flo format.""" + # Everything needs to be in little Endian according to + # https://vision.middlebury.edu/flow/code/flow-code/README.txt + values = list(range(2 * h * w)) + content = ( + struct.pack("<4c", *(c.encode() for c in "PIEH")) + + struct.pack("" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "ascent = scipy.misc.ascent()\n", - "plt.gray()\n", - "plt.imshow(ascent, interpolation='nearest')\n", - "cropped_ascent = ascent[:100, 300:]\n", - "plt.imshow(cropped_ascent, interpolation='nearest')\n", - "print(cropped_ascent.shape)\n", - "print(cropped_ascent[90,90])\n", - "print(cropped_ascent.dtype)" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "torch.Size([100, 212])\n", - "117.0\n", - "\n", - " 117\n", - "[torch.DoubleTensor of size 1]\n", - "\n", - "torch.Size([1, 100, 212])\n", - "\n", - " 117\n", - " 117\n", - " 117\n", - "[torch.FloatTensor of size 3]\n", - "\n", - "torch.Size([3, 100, 212])\n" - ] - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXQAAADACAYAAAAUT5iEAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMS4yLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvNQv5yAAAIABJREFUeJztvWmMXVt2Hvbte+5YRRZZJB/H4vTGfnwREgmCIkC2EFiOI7cdtZIIQltG0o4baASIHSlOZLUtINYPB5Ay2FGAQMKLpKQTyG7JtgwJiZNYUeQE+eGOulstS916E/n4HossjsUiWeOddn7cWru+s+7a59xbw731qvcHEHV5hj2dPaz1rbXXdt57JCQkJCR88lGZdgESEhISEvYHaUJPSEhIOCJIE3pCQkLCEUGa0BMSEhKOCNKEnpCQkHBEkCb0hISEhCOCPU3ozrkfdM6965z7wDn3xf0qVEJCQkLC+HC79UN3zmUA3gPwrwNYBPB7AP6C9/5b+1e8hISEhIRRsRcJ/XsAfOC9v+W9bwP4MoDP7E+xEhISEhLGRXUP714CcIf+vwjgXy164eTJk/78+fMAgFqtBgBwzoG1hH6/n/tbqVTCfX6uUhmsRVmWwTk39H6v1zPLIGlkWTZ0zXsf3peyyV/OQ1/j3977oby5Dpw+l1mu63Slnjp/udbtdgvruxtkWYZGo5FLt91um9+ByyW/ub6TRKVSyZVHl9dqQwG3ufyVbzKt3dS9Xg9bW1vh9yQxSpvp50d9dj/e+3bD4uLiY+/9S2XP7WVCHwnOuS8A+AIAnDt3Dr/0S7+ESqWCc+fODQpQrYZJqd/vhw784sULAMDc3Bw2NzcBDCaVanVQZJlw5ufnw+ScZVl479mzZ7kJBhgMCsnrxIkT4Xqn0wEAbG1tYWNjI5RFFp1Go5FbAKTcrVYLwGBxkvtbW1t4/vw5gJ1BcezYsVCvzc3N3OQNAGtra6GO9Xod9Xo9lPvYsWO5OjAajQYeP348VN/dQso7Pz+Pq1evAtj5Dh9//HGog7WwSnkAoNlshnZmWJNl7JmyMspz/X4/lKHRaKDZbIb70qYyGeoJn/sHpwEMvi/3u1En9UqlEvLb6/d48eIFbt26BQB4+vRpuD7uZFsE/g7SBizYFAkUXBb+DvpZ/a21gJMm9HL85E/+5EejPLeXCf0ugMv0/4Xtazl4798G8DYA3Lhxw7daLTSbzTBpAQi/vffhQ8/OzoZremIGkJtsLalMS+5yTybeRqMxNEir1Wq4v7a2Zkrg8rdareYmXrkuiwM/u76+Hq71er2wEHD5JC2e7GJSOZf3pZcGi3a73Q75WJN/DNbksLy8HBbOhYUFAMD58+dx9+7d8I6WXp1zaLfbIX+9ABbB+n6x+6wJSV5ybXNzM1yfm5sLdeB3rIWEyyqTeJZloR15kh63PruBvH/ixAm8+uqrAID33nsPwGDhlnId5GRopbtfmgqP8zSZ7y/2MqH/HoDXnHPXMZjIPwvgx4peyLIM8/PzaDabOalZ4L0P13kwygTY6/XC83KfBzSn1Ww2h2iOfr8fJLhGoxGkSBnQPOE/ffoUMzMzIT0tfWRZFhYVRr/fH6J1eEJoNpuhDHxNJMNOp4OVlRUAwMmTJ4cmRudcqHuWZSGty5cv46OPBou4SNL6PQs82fGi9fDhw1y9L168GMq7tLQUrksbctvxoiZllbaR9HlAc9ksuqqo3JK3XJO6r66uhn7D5WLpmfuNlFPqw+lblJlzLqTLlFlM8xgHPHHOzc0BAN58800AwM2bN4NWtp/Ulp5kiyb0cfNkyR9IUvlBYtcTuve+65z7KwD+DwAZgF/x3n9z30qWkJCQkDAW9sShe+//CYB/MurzlUoFs7OzUcmi2+0OqfKVSiXHhTJlIvdZ0hKpjNOQ++12O8eR6uf6/X5O5WZqRNM+1Wo1p0WIZMgaBUu8IvnVarWchM1/pQ3k/szMzFA79fv98Hyz2Qx5zM7O4uTJkwAQJDh5XupYZhTk9pJ6iqTeaDQgBu2trS0sLy/nys6aSb/fD/SL9z5nANfps2SotQQBl0t/05hkKd9R2gYYlmjlN/c75ua1lM7tye8fJCQP6VOvv/56aPMHDx4MUVDjImbYL3tHP6vfsdJINMvB48CNogzpCKxG648r95jOkA7MBkPmr9m4qXlT/q1pEvnNnjEyEczOzoZBJIZSYKdT1uv1HA0jeczMzJgDQ37zQiKTfKVSCTSF9x7z8/PhWW2IzLIsx7cz9XH8+PHcs0+fPh1LTbboF3nv3r174TssLCyE6zKx82Tb6/XMyZvrLmn1er1CwxovFNqoKeWzJnxgmHqamZkJC3q3283RRdoI2Ov1ctfYdiLvWPTMfoLrJmg0GnjttddCuZaWlsKzuylH7B2LOrLGFJfV+n/yYpks0tb/hISEhCOCiUrowEAy04Y+lrBE+hTph2kSdg8UY2CWZVG3NI1qtZpT9S3fY35W0uUyM+XDUiS/V+Rdw8+ztMd5yfVerzdkFGQtQ7v/SZsw7SSUieSj62nBojY6nQ4WFxcBANevXw/eL6JZPH/+3KRBer1eTvuQelteI9aeA5bwLQ+gmAGP68CuqKLFcP6WR46WLHV7sHGatYhxEPPnL0K/3w/5vv7662G8fPTRR0NS9W4lY02Jxcqh3ynbp5Bw8Jg45SLuhNbkwhOn9jcH8vwxc9qcRpGXAXfUfr9v+gvLZMiud7yQsHcNTwTs4mYNAmsh4claFjDm5rk+8g67fJYNoosXLwYue2VlZazBZVFhMjHfvXsXly8PPFavXLkCAPjwww/DxOm9H5rEdV3Ys4hhTejW/SKuXf+WZzY2NnJ7AzgvXoC4rLF2iXHOMZ6/COPSEpz+tWvXAAwowA8//BAAwjffC3R5kmfKJwOJcklISEg4IpiahM40CRs1hU6w/I+990E6tbbu6+uxMnBZ+Fq/3w8eEUzPsJ8x52+p31yemESj/dQlP0lf2sB7n9MIgPyu1UqlkjOsajU5y7IgQXc6nSBBs1eQbpdYWfn+2tpaoF9EQrxy5Qpu374NYGCMZDrJCudgfb+Yz3mRCs90R6wOrBGJ9LqxsRF24XrvTd99zkP3K5bguS90Op0h7YGlW0uC363ky+2ysLAQyii7S3W4Bq0xxHZ3WsbYsjLGQnQkTBZJQk9ISEg4Ipi4UbRareakNi2xcEgAYJgX1VI1g6VURpkLFkspIh1rCVy73mVZZvo/F3Grcs3ijSX92dnZIbsCv8/ukjp9q+6y2/XKlSuBY9WS+rhwzoUwAyKpX716NfDqi4uL4b7FcXe73SAR1+v1qFHTel8/W6RZ6O9QqVRCX+BwDMePHzf9zAXsWskusGxjEfDu3dXV1ZCv4KC4aO89Ll68mMvvgw8+CG64RW0s7+81/ySZ25ikkXjiE7oMGDaK8RZ8PQhjqqDlMx1rLIuGYRpFEItBwpM0ezbwfaZcrHIwvaHVXO3Nwvct9dzqIGV0xfHjx8OAlxAB7C8+Dvg9CVNQrVYDvXPp0qWg9luUiKY4ZAK0AkUxYpMSUxvWpBLrK7y/QMpghaRg7xqrXBzq4MyZMzh9+jSAnQ1ey8vLpZEbmQYp8+CJ1U3KLPF9nHNhEV9fXzf70qgbz8qQJvM4WIC1YkztJxLlkpCQkHBEMFEJnaWIIuOkPCvXLOOSYBQpRvKMSSFaa+C09XuWdMN0h6aTuC46Xf6/JeHzczFJ3JLsWdrjvEVyE4nyzh0OZ29rRRZVZLX5kydPgovphQsXcOnSJQCDHaaWC6O83263c9pPkf+z9e0srS32fqwOW1tb5g5lltK1hK01Lknr+fPngeaSUAz9fh/Pnj0rLK8V/38UxOoJAGfPng31eeeddwIFxAZrQXJJ3D9YxuFxIqDuBRP3cpFNJTyJFm32YcQapSzCnRX9z+Kphd+Xsgh4ISpSvQW8AMlfa5JkPpZh+VOzZ0sZYguR1OHChQsABvFOdFTFGHgiim3EuXfvHoBBO0q8+263i/v37wOwD2nQcV+0DYXrI8/oOvI1izKx6qDTXFtbA7AT96XVauVoIV4krfSlTTY3N0M7SBucOHEi5Le8vJwLOaDBkTnZtjLOJM/2gBMnTgAA3nrrLXzwwQcAdmgyTjdN6PsH3VcOmmZhJMolISEh4YhgKkZRa/fgXlAmvVg+0THvC+u6VV4tdVv1sGKFx8ofO+1F1y3m02y9y+o7U1uS19WrV4P2UraTVNMsRcbapaWl0Oa8W5XjeOs4+FI3KY/lTTSKlFpEZ/B1ltZZkxFJvdfrBeqE3+c9Aiytc7ry3UX7mZ+fD5Kyczux5nl3cMxoWtRfGWX1PXHiBN544w0AwPvvvw9gQJNZmqOVr+5rCXHENO9JIEnoCQkJCUcEUzeKTmLVtzhpqwxFZbHKG/stGIWXLno/VsZxJFaLTxfU63Vcv34dAPDuu++avuM6nVi5+P7W1lYwuOrdqgCCgRDIaxF8IhVLObvtK2UujBbk/ubmZigLBztjA77UhwOQsSbDcXRE4zh+/Hi4/+TJEwCDdilzZyyT0PUzGr1eL7hmvv766wAGfuqsNVlaQpLQx8c07RETp1zGHZwxSsFSnYvete7Jfb3xQ9IfZRAVYZz3Y5O0LqM2mu21DEIrXL9+PZxb2W63o6EXdLraACxlFIPi4uJiOHBa8lpbW8tN4kxd6GPstAeQVb9RvYli92ITv0zItVotTOoc+MpaZLMsG6Irtra2As1y5syZEHJA8PTp09yBHGV9u0xQKKsbT+xihJa46pK+7o9pMv9kIFEuCQkJCUcEE6dcxjnAWO5Zrmgxqa1MoimjVazTlHYrnezVn9i6XySFFvkkxyCGubm5uUCN3L59u1D95uuxbyftuL6+HoJ2sRTLp/3E/MuBvI+45aOty1RUrlEkeMt3uNPpDMXDZ1dYTot3IHNfEsn+0aNHYSepSOqVSiVQH1tbW4X0Xll7xa6zUVyuNZtNvPrqq+GaSOlWqIOjgHGoqrJxE2uXIieFUe7vFROd0Hu9Hl68eDG2T20ZpxuLvKg348QGg85P/lr+8TEwNVL2rB7wo2wm0b6twHB9tKeN9p6xQh1wurLxaGNjI/iOc1rWpFI2kfT7/cDNx0Ia8IEeui58jc9QLZtwYgs7T7xFHjF8rdvthk05QlE0m83cszH/dLnGZ68+ffo0l0er1cKZM2cADCZ8oV8sQUT3RSuuvEUFMUfOz0ker776aqjbnTt3woJ7lKA97GKeZfpZ/r7yTmzvCIfxiKUrOAgvmES5JCQkJBwRTMUPvUiqsowxRSudc86UJmLeLGWSHT/LftujStAchY89NiyJVOc5Srl0lMdYOgIrqqUus9yX9xcWFoZ8x7U/eJH3RcyYF9MyWFOxtAj5vltbWzmPE0lzlPYbF1wu73d8zjl6IZelqF9pDUDSePToEYBBSAYxGJ85cyZ4v1gnD+m2LQr6FaPjrDJmWYZr164BGNCZEsBtP04/mhb0XMJ9vGz/i9ZCdZtZvv5AuYPFQVNYE5/Qy7bM60Efo0ksmiXGp8Y4dGtbtaTb7XaHDmYYBbw5hrf+ch5FXirj8vUWjRG7r9uRVW6mI7z3gQKQyWdjY6NwYSyifvQzuqzWd4gNQikPhzke1RPEirBZBKuMgs3NzXC/1WoV2jK8z4eO0JPKkydPcOrUKQAIZ54CA+8XDosADPqldT4tg795UV+LCQSXL18OeUi0Rh0G4ZPArevFjkN7lEF7+miqT88ZVt/mc4I53YNEaerOucvOud91zn3LOfdN59yPb18/5Zz7befc+9t/5w+0pAkJCQkJhRhFQu8C+E+89193zh0H8DXn3G8D+EsAfsd7/7POuS8C+CKAnypLTKuMZUdXxdT33aj6RZSDTlf7E1tpWKtyTKPgFd4ynI0q8egys4Rm1ceifaznYyrm/Pxgne50Ojn1m418VlpFZdLqqqXJWOn2+/2cd4y8M6o3Qew7Wc/GVGq+JlKrc24onjqnoT1ftNdWr9cL1Fa/3895vwgtw3mNSv+xBjYuJHa+fOebN2/mvr91wPphQ1FM/TKDJBtFeTzEAtMxG2CNUWtsxbTIMnqsCKVf23u/5L3/+vbvFwD+GMAlAJ8B8KXtx74E4IfHyjkhISEhYV8xFofunLsG4DsBfAXAOe+9bC+7D+Bc5J0vAPgCMAjbKgfXagkPGA4mJYhJW9a9ImlNG8+sZ2MGVotDs8pgaQllEtUoUlcZRpXaJD/+q59hg6AY/s6ePRvCwuoTluTdIiNSDMzzZlkWXOfkO7BPNB9qzHYKicNulUnnxb+t72c9G2tbyW9jYyPHp5fZXixjndTnyZMnId2ZmZngsy6G0qLDrK3y7laClvfOnz8PYGCzuHnzJoDB0Xq7Ces7aRTVPXYvZvgvMmrGvnOMRSjDXtp05AndOXcMwD8C8BPe++eqsN45Z5bCe/82gLcB4LXXXvO3bt2Ccy6o8seOHcsNAB0/o16v59QcMYpZDdTr9XJGJOsjyIQhkwCQNyJZfsgxlZtpAY7oOOrHt4xbsY9pdQ6eeK1OpSfWItqIKSamCOTZmZmZYLh79OiReVCJZdS08tWLKU/S0o78TThqpTY48STP37RsUdFqsG4PDeub8nvS75xzoRzsI86/taGSaSOmX06fPh2MpHL/yZMnuTFg9VNu7zLnghg9o7/Z/Px8LlqjxFTn/rebBX2/YQlvAk11MvVRRB2OsreE89D0zG4X2N3EUR+JYHPO1TCYzH/Ve/8b25cfOOcubN+/AODhWDknJCQkJOwrSiV0N1gifhnAH3vv/w7d+i0AnwPws9t/f7MsLe99kMLE7axarYYY1CwZWsfG9Xq9wtPknz17FiSHXq8X8mD1XNzx5ubmgjQoUjtLmY1GIyeVSRrWlvVKpRKCL7F7n7XrdGZmxjwUmfOy6haLi82GNX2UX8x/PibVWQZWvi9HqnU6ndyRala6Op1RrmtDIzDoH/K7VqsNSbfO7QTRYsqGqZpY/kWaVBlYO5L8AOQOnrb6FecXU8Olj3G8ctFYT58+jeXlZQAD6qPIwDZO3WLPWacffepTnwqB3HjXa5F0fJjBml+MZhmVBolRseNogzq9cTAK5fJ9AP5dAH/onPvG9rW/icFE/uvOuc8D+AjAj46aKU+QzD+y2seDQSquD6DQ4Pv8nuTVaDRyByfoDUC1Wi2nellqsqDX64W0Njc3g5rMp6uz5VsWl4sXL4b3tP1AyiCoVqtBvbYWkizL8ODBAwD5GCACrmO9Xg+UScyHn/lrPXH2+/3w3qlTp0KbygIraeh0dTtK21nquUUhVavVUHemV7h88izHfeH8LGjKRy+c+pmyOnJamhZsNBo524wWWrhvOedCH+j3+4E75/gvvOFNQhJw2WKTRpHPegzcP6Scs7OzgX4RXl36v7wzrUl91HyZYrS8zmI0i/Wszl/3pdi+gTJPGx4no6J0Qvfe/78AYqn+wFi5JSQkJCQcGCa+UxTIq8a8tZslNFmZ2NiiJTv5yxIar44sPUpeTPUILC8KhqWS8e+1tbUgsVrSXpZlwVCmDXfyl+kSrqOW4pi+abfbQUJjH202tsmzx44dG1rt2RjEga9YQmeJQuq4trYWqC15p16vR33iR92dadEgrVYrGNAfPHgQvql8R6ZWut1uoO+azeaQZqDpLOs4wZjxWV/jPqF32Qo4xrlQJhyl0QotwMZ2hkjq7KcuFBiQp/oY46jsXAer7jwm9WEZrC3GyjBtKkbnX0ZXFXmmlL3D6XN7xt7RWt5uMZUJvV6vh4mNPUS2trZylAMw7H1h0RnCuzLlYjVstVrNqerym6P98ULD3heWSi7XeBu45qKlDhKro8yzRVMG1iQsaTx//jxHTWh3OW672dnZobR4ImOqgA+d4LT4/FGLLrCoJGuC0pMEtx0v3nJ/YWEBwGCSlpPrLdWXv2mn0xkSDnhCjy0+Ma+hosFtPcdpbW5u5hY+/SwLKtxeTHPJs0+ePAn35+bmQh7s/RIrj/7+o96Tulh1l3H8+uuvh+9///79HMU07YlcoCfKMhuKNeb53TL6je+Vcec8zvbSXinaYkJCQsIRwUQl9EqlgmazmYtSxyt5lmVBleNrInVZRlHvdw7N0Kuj/F+k/lqtFtKv1WpBopAysFTOarQlbegogHyfvTLkGvsmW9QIaxZ8iIJ4cLB6zlSPoNPpmOe1Sr6tVmtI4mDtiDWL2AEHUpb19fUh7WJjYyNoIfoU+zKJw5J0pL7Pnz/Hxx9/DGCwuUm2pN+9ezc8ZxkyvfehvGwIj9EoluRu3bcom5hKzffF+4VpP9335Vn+jrpcWZYFz5JKpYK5ubnwrFx/8eKFWcdxoDcO6S3p+n61Wg2HZVQqFSwuLoY67oZC4HxHpUa0L7xF38boH31d5zVq7PQYiqRy3f/29N3GejohISEh4dBiohK6uGTp7dEcuEj7UgMwd38Ker1ejjtkP2Z5VqRU9j3f2toy3YY4fXaX08iyLEhCHLTIMopWq9WcBMZagPy1JD/WSFhCkDryyfTa9Q0YSO2ikbBfPXO3s7OzIU/Ol+sjf6W+LEVy21gBpGLhHGLQknC/38edO3dCvrIVXdp8aWnJlHh51zHXRbsr6rpbZYkZugT6vlVfSX99fT0YNS2bQ0wi43aReomhFBgYvcVIKultbGyMbJAuq2dZuViyfPnll0Pdbt++Hd2hOWqZRn0+9k34Wll9yvpqzPC8G8OvtbeEr+8GEz9TVFR7blD2Ay9SXSUNYEdFbbfbZhRAawsuD3I2GAp44nXOmcH9OS2ZwPRWea2eMdXDdAbXxTpMgzcLCSqVSvD04M5uxVzudrth8rDalb1gmN6xOmW73Q4LZ5ZlQ3G6nXO5+7KIFm1WKoLlJXP37t1gVLx06VKoo0xsbPTmfOR9Di0wSmzssr7Ik15RnbSQIJ5Jspg2m81gcNY0C9OBOi3vfS5Ko9Av4hXknAuLMC9mXIdYaAD9rbTxWoMpCuccrl+/DmDQ98WQzXWM9QVdRqbvOBa85X3D31Qbl/WzZeB3uAwxP3SL6rEEBqvcsbR2g0S5JCQkJBwRTJxyYSOcRq1WG3LTiklR7HtuGad4pWTpl09v17G1tSGEjZcW1SOGLu1OKeURKmBmZqbQ6MbSkeXmx+3gnMuFSihazZvNZpDQ2QVOwC50fEKTLhswoApEI2EjrrWHYH5+PpT3xYsXuzoM15J+2+12oF+kbRcWFnLulFZ7iFTVbreDpqQN0WVSkdUPx5GkWJOSdpbv6NzOLt1Op2NKlCylcj+V3ysrK6GMQr2wVL6xsVG4y3EcamMcXL58OYx5CRegd/TqcgC2JMuSfRksemW3VI9F5YyrZU4KE53QsyzDiRMnUK1Wc+qXRTdIJ2AfcP6g0sHZx7fb7eY2bkjsCfG+aDabOb5V0pDBxHFDynisdrttHh7BnjJSL+25w2o/MJicuPPwJKk7ULvdDnXUHhE6lMHc3Fxu0hZIWrwZKMZzSv4c0mBzc3OIjhL7iOTLW/SZigGKvQ2KUK/Xw6Jy69YtAMBrr72GK1euABh8U6YYuC/INRYEYmF3NcYJBxB7hutrtS2XUdMXgB1dUvdR2ewlEOpF8hKqp0xI2k/0+/2hwzLef//9XMwbC9aEXrZdv+z9cWDZP2JpscBiUaq7LcNukCiXhISEhCOCiVMu2mDAnhzWgb9syGQ1iHcEWisoGzgFMZ9lKUu1Wg2/YwcJsKRspWV5UfDuV1aDrVU75nEhdVlbWzO9byxfatYMWAVlyVQkJe3ZImlJO2jfc112733u6DSp+0svvRS2hFuHh4wDKwzB3bt3cfXqVQAD7wpW6/U+A9Z+mJLTu5MF+ylVFaXFhtK5ublQHjkMRsMy/LK2JtEY+/1+oF/m5+dDHxItpkzKLarDqJosl+vs2bMABuPs3XffBTDYZ7AXSs7qh5PGQWg2u0WS0BMSEhKOCCbuttjtdnPSs465oflBawcksCN1McfOzzQajSFtgH9bRlM2HLLxaXZ2NnDGIhnqsKWWWxL7G7OLouaPtXQdi1MCDKSrmPHSMsYyWBMRWK5dnJ/wu6wJWa6I1Wo1nK7DbdtoNEIMepHUy4y5ul6WCyPvJJVdiVevXsW1a9cADDh2+VYsqbNLqHzTfr8/ZOeYBCzb0NraWthJze5yliQO5Hcb6v787Nmz8Jv3YMg7HE8dGJ2XtrRQro92S9Za9fz8PD71qU8BsE8/Ggf6Hf6+k+Ktd+MWeVCYOOVSr9eHjFXsW6q3FFsRFgHkBiO/w/SNGARlgFQqlVwYAfZ4kffZICnvnzx5Mvj7yn2mXHS+MqHKBCd5y19+Vv5qrxFuM67v6urqkGEYyA9+WUgajUZuy7hFd1nBtZzbOfZN1PNYuST9VqsVJkXe3s5UjHx3OcleMM5mDj05VCqVQDHUarVgIL169Spu376dKzsb1Xhi9N7nwjQIrAmG23wvUfF0XoJ2ux3Ke/z4cfMYM75mTej8HBtKpT9KbPUsy/D8+fOh95meY7Cxj50A+K/+bdWTfebffPNNvPPOOwAGh2Xos4ZjfSI2ce/G2L5XTHsSZyTKJSEhIeGIYCrhc9k42ev1cpKdlj5ZUmAJm7f78zMiabEkzBIDGxTFOCjv6MBZku7jx4/DdaEgNO3AtJFlqLFUU4a17Z7LzvW1pEQOYiaGsCzLcmq21kjW19dzUiobCcVHWoymWtXXZTl+/HjQhGJtI2507XY7HGGn3fjGoWIkL2lPDgNw/vz5oBFIcC/eScp18d6H7ypamXb31H2J+8deJTSuN1NnL168CFI1h3iO7btgqVUgv58+fRrui3R84sSJcP/FixelfbRs56zAkphj77VaLbz55psABn7qogmP0ycOE+UxbUycQ2+327kJvV6vm2ol+1ozrAHJHDtvXNITmOZumUuWd5gnZg8QmfyZU2b/eQFPnHyCPeerrfrOuahvuaQhEyzXm6karptw53rA601dzItzGfjsVi6XvMeeSbIY8+Sg66fV59OnT+cOy+DFUH9vyxef6wDkt1hLFMZqtYpz584ByMd9EfDkzm1nCRIxn+b98qgoSl/6oIQJqNVqwfOI+5Xuu/qE7Hc6AAAgAElEQVR+t9sN9Ipgbm4uHEtYrVZDtMbdQI8t7ncWBcNjUvrrG2+8EfqonNGrJ+uiDV7aPqXbYxKTvT6/YVL5ChLlkpCQkHBEMHEJXVZyUY3r9bppXLJikPd6vSDZWZIjSwOzs7M5NZXzB4YjIAKDFZWDTrHEqWOfs5ahpQaRpizKh69zu/A9fo/9wOU+H3fHkuVLL70EADmDJOfBu2+tcrMBVoxpTI2xxCHvSl3ZAMt+3ZbUlmVZ8EleWloyI2TyO0Xqt6Y+JK3FxcXcwdxSLz7MmMuojdLs+WIdqn2QYPpFe1Wxn3qv1yvdzs9pcogEYNB/xGB94sSJ8P20JK/T2g9IHev1eqhDs9nMHWkHDPpHLIBYURljht2DxjTyZEwlfG6v18ttDRfwVnQdZU7AXiryf+tYuUqlkqM8gHz0Op7Q2VXR4uOyLAsDijccaR4WGHRK67g5du+zAu8zlSTXmcu2Ikpq3ltCHVhty+EFmFph8ITO3LmAF0PNx2pvA247a9IRCuull14KVAg/p72dJE3Lw4cXCvnmGxsbIe7L9e3If1evXs1xytahErxI88Kn6apx+P7dguso5VtdXc0dlmJ5RzE1xn1Q14GPsztx4kT4lvz9y2At2LyxzIo0yO/oOEhiu5CJvVar4d69e6E+WkiyXDiLynjQmDafnyiXhISEhCOCqWz973Q6QdLl0+adc6akJOj3+0Nb8rWXBEt2lm8xGyy1UZMD62jEjrnjcki6ImVoFVDD8phgo6r3dkAlllIkjRMnTgTNgI2q0o6NRmNIu4lhdXXVPM6OtSWR9lgrsGJVs7GMJWnB7Oxs8It+9OhR6eYWluYkL8s7o1qtBh968XJ55ZVXcoG8pG11H5JrIqFzHabt38xB4ebm5gItxIedWPs2gGGNrN/vB/qFA9v1er0hhwHelGVBj1MrwBg/a2k6/I70kZdffjmM4w8//PBQbbM/jBhZQnfOZc6533fO/S/b/7/unPuKc+4D59yvOeeGw/olJCQkJEwM40joPw7gjwHMbf//5wD8Xe/9l51zvwjg8wB+oSgBcVvUBgtewbXrG3PkHIo1tsKLdMzhU9moKpIH8/VcPgFLGd1uN3CKzFmy1CbpHj9+fMgFThs6db1Z+mEeuNvtmodEcxkF7P5pSXaVSqVQMmeJVCRbTovdQPv9fpDMBb1eL6cRWd+H/3I9JK2NjY0h1zltQ7F2JjI4XymvGPlu374dAnldu3YtnKSzsbFReBxcv98PbSP9S9dhHL/p3cD67qurq8GoyTuUtRaj02A7k6S1trZmGiqtUM6xcvG1Ih/9WP+wkGVZ+GZZloWwyWxTis0lnyQUtdc4GGlCd84tAPhzAP5zAH/NDXL9UwB+bPuRLwH4GYwwobMBCshvSffe57wM5BpPkDpsgDaEWhH0LHWZJwReMHgQ84TOG3uk3IJ+vx8mBPH64Pz0wRr8nqRvTUQvXrwYonrY+MheLpwv14FVZ940pcvIhl/ecMTtxd9E/JeZ4oid0crhB6Rcln/8yspKoAAk/gvnUea5oL+pnvxXVlaCQXFhYSHQL7dv3x5aOHliZ+8nbi+L6on5ze8WbGjU1zqdTu44O+3FxBvxrLbTRnv+PpIGOyeUhQawym1N2LHFoSgdYHDsoKQli3G73TadD8aJF3QYoEMe7BajUi7/DYC/DkByOw1gxXsvIt8igEvWi865Lzjnvuqc+6oOwJ+QkJCQsH8oldCdc38ewEPv/decc//auBl4798G8DYA3Lhxw4vLGxsk2Z2OJQogLynrgFhy33Jla7VaphQgEhrTILyzKyYpa3dKvfqLRmBFyGPDHdM+lmrM0gsbJ2NGYpHMW63WEA3R7XZzJzRpqUvXQbbjs3RjxaJvNBpD2/y1EU1QppJXKjuHXn/00UdBEhYD7+zsrPl9y+C9H/oO3nvcv38fwOB7XbhwAcDgmDQJ5GWdQsXllfLVarUo/XKQYI2HabKNjY1cPHogfzyjPA/k+ztrHKzBMTUJDMZmGf3CKKJBdDTGUeosacrh4FKud999N9Ch+pDwww4eAzxX7GWH6SiUy/cB+CHn3KcBNDHg0H8ewEnnXHVbSl8AcHeUDOVjcvyWIp9V9mNut9tDEw13ylarlfMp58lM8mKOvWjSYl/rzc3NoQ6ovTfYN9gK92upzFwHnhhl0uBj3zgdbi+Z0LMsG9ooVa1WQxtsbm4WTogbGxu5kMAyoXIIAEmXjzazyqW9dnRYV+byq9Vq2Oa9vr4enpVrFy5cyMVVsTp5jELS4MGytLSU84XXcV94EWbqQtOFVl57UfU1RaE9VmL3Nzc3Q3k5bLMlrHCaHCaA+6X8ljZim5NFoxRN0NbCatmUrHT0GJL2l010lUoF77//PoB8SOtPArg9rENLdoNSysV7/ze89wve+2sAPgvg//Le/0UAvwvgR7Yf+xyA39x1KRISEhIS9oy9+KH/FIAvO+f+NoDfB/DLo77IRkIGe7SwtMfHzVk7RfngBqEC2MAqqFarud2jct/y/tCRHbWxVhskebu/plS0lGFpBnyfd4dqIx1LNM1mM0jLXF5thNR1tCS858+f58IayG8OhcDx4SWv2PFtDO2hwxJar9cLUjFfF83gwYMHYes+S5GWr3WMutLtCwz6kuQLIATykna6d++e2U4CNpTWarUc5VRkFI15dVjvjPKs/M6yLJRHvJRarVaOMonRdlJvy0tK+gHXUUuUwPDhyGWa1Diwvp+U+8yZM6EPvvvuu7kont+OGGtC997/MwD/bPv3LQDfs/9FSkhISEjYDaZyBJ0+NJkDQGnplXlPjrViGa+cczlfaO2uKHyglEVgxcGoVCqBk9vY2DClC5EWWq3W0HFvnG8sHo1lnPLeBwmduWhLIm02mya/zPynJZWxj79c5zp674d8+CuVSojNXavVwrOW77l2VbSkJSnDkydP8OTJk5CHbufnz5+H73bu3LkhAymnrY18Atb6uD4ifd67dy/YdDiQl5SLQ6KyncLaSWppnjHNoUyKjEm6VloMqVev1wvfjCV4y31VG0+1RshHMloOBVqzGBW6DqNK1tyHZR/DW2+9FU4/Wl5eNsfdfrqUHkZM/IALMZKxoUx8vK1zAJ1zoSNa0eW839niL3SLQKv6ltqswc+K9TzmKcDGRyvmOv/lvPWAci4fH5q3++sy8qTHni0ceIzbVupgtW21Wg3UxtraWs6P2TJWycApm2i0sU1gtffHH38cDcQmdZCJtVarDRlky8qiwROQ5Le+vh7ol2vXrgEArly5kgvkpcvG37/X6+XqoBfWGI1SNrnoxUpfj9FN7JFj+alro7zOg+mXmEAgY44puZgAo797bBG2UGYk5sVndnYWN27cAAC88847od9Y+R5VpOBcCQkJCUcEE5fQxXWQVTmWWPVqzsYnXq35OVHJWXplSUmkDaZ6vPe57cNcPimXDgTG+bIRjCkIa2s/ayR8ne9L2Z49e5ZrjyLj0smTJ3Ptoaklll74PYtW0uXSklCtVgsaUKfTGYr5rtO3JChuO9Ec7t+/XyqhSVoPHz4M7cQU16gqPrcFUyPOuWBMW1xcBDCQ1C9fvgxgQGGIJmNpEXonqQ4KF6MUiiRPfd/KV0v7WuJn7bbRaARayfo2gO0KrP8Cg+8v/d/a4ahddEd1NbUoJG0QLXJr7PV6oe+/9tproWwcA9+q41HCVCZ09nJhqsDiuNhbpd1uB95SBg13pGazmTtTVBCLAqgnOO93IgpubW0FKsjaCFGr1ULn0ZNL0WDQ8dv5OjDwULC8GJg+El60Xq/nNnvoBYx9x63BEot7zRQQR/ZjeqeIzuL32fuG70uM62fPnpV6ykja3W43+KfLBpNms2l6s1jfTJfRWoBETa9WqyE0wMsvv4ybN28C2DnblSkbfp/pKqY4rIUvdq1okrb61yhptdvtsCBLuarVahBadIREa3OcNflbGwRZMPPe5+wOuoxcxzL6rGwS5gl/dnYWn/rUpwAgfLulpSWz7Y4SEuWSkJCQcEQwFaOoliYsq7tIC5ubmzkpQsBeLiIpN5tN08uAYUkRLE3wiTcWXcFhCE6ePAkgvu3dkqAszaBSqQTVmKPeWfDeh3y11CTvSXkksqXc03XY2NjIRa9kyVLvdj158mT4Ts1ms9TgzB4RGr1eL5xSNI6kVKlUQnkfPXoEADh79mzO08eS1i06g/9vUQwPHz4M7Xjp0qUQ8e/DDz8MdeD25Fjiur/yaVExKdSSyvl67P9lkOd7vR6Wl5dDeYDBN5Xxsrq6GjXCSrksWoVpNEmXdzjHaJKYQXdUeiZWV25H6RevvfZaKKO15+EoYSpH0PG2+l6vl+Mf9bFxfI4oQzpSo9EIW531wNRbzhmcpjXpbW1tmbEtJP1arRaoFh2LQU9mmkfWdgCeqHgzkbXNvFKphKPCeGu39z7nlilpWQNL/nJURX1ffkuatVot5MUTaExNZ1sEewMBA5cymVzKFuAYJCQun0/KA5rrw+Wy1HvNp8v9Bw8ehPsS92VhYQEAQuwXna/3w5tumFdnlHm57BYW/cILEHvkiC2k2WzmFiVNk+jj7ATszinf99ixY7mxJ3YatlnFFtmi+ujvZ9WXoTcpvvzyy6G8i4uLpi1hVHvMYUWiXBISEhKOCKZiFGVoWkAgK+b6+nrOy0VLP7VazTxqzjISstrIm4mYarACY1mSSbPZzHmTMCyjaFGQJOdczvec72uDbqvVChoJS11ZlgWjl0j72igqafBBFpy+pbWINsASWEyitdrLotHu3r0bvEY46NM4kqqku7y8HL7DqVOnxkqrSL13buc4xPv374c8xA/+4sWLuHv3bnjWivUuZWFvKabnygyd+w2tGbJBvNFo5GgUS1KNGcDlr9Tz2LFjIV5+o9EIGp0YtCflYaK1gCzLwuHTrVYrHJZhbSz8pNIxSUJPSEhIOCKYqIQuPugsLeqj3ASyanLoWnaBYj6XOWteYTn8qfy1QtZy/rwrtYgXbzQaOV7d4p/5HebopG6iWfDRelw3bie5xr7nHKyMeW3WYqwySGgB9ifWkrRcZ5dMzkN/B22/sAxoIhHeu3fPdGUbBVZfET/jLMuCRmGlvdu82u12MKZJuc+dOxfamQN5cfhk/uas/Wh7zH5Lg9YYYO2UJXHRlLzfCWltGe5jdiBLA+RQzK1WK3wTGVtPnz6Nxi3XWp7OV4/povjn1m5nKaPsMQAGhm75Ppam9UmS1qdiFOVg+tZWfmBnQt/a2srFotZqf7PZNNVYPuOSP2zRMWy87d4qE6fFsdf7/f7QINX1jrUHMJhgpVxZlpkeAPLssWPHhg6qAAaThI4eyR243+/nokfKOzww2fdcJnKhGngwldWH8+VvJkbGFy9e5Caa3QwYbhdpjwcPHoRJSf4Co50/qidXfU3aTDYeNRqNYChdX18PG5M4DStiKC90+gxQebdo4RmHSuJJPNYvJS+mX5rN5lDMJO31Yo1bptrkbNherxdilwsNU6lUglGcBbqYn7rOR8NyaiiDcy7sM6hWqyGmOhuMY2U4zEiUS0JCQsIRwcSNorKKilTFpxABO6shB+xiyH1R/2M7BZk2YEmpyOe90+nk/LJZctAukEX5arWR6R0AQ+VaXV2NGmCl/uwiyfSM5TvOsdtZQmetB4i78Xnvc66gwDC1pcFtpKVuKZfQFqNI+2Xg9/j7yQ7US5cuRY3WXGYpj+XCaBmnxV3yzp07IVgZsKPJbG5umi5/3Ef4EGcgfzrWfkqDse+rn5H7vAtW6sNlZQpJl1eXm49vlDzEoHz8+PGQ1srKimmULKNkrHayfOcZ2ideynXx4sUwJiVaI7tY7laLnAamMqEDyPmeW1uo2TOAB7+Oy6InGh4kesMP0w5WaAD2y2ZOkJ+ViU5sAVynWF17vZ65AUjy2tjYiE5wsnCdPn063OdOyZOWPiuUPVN6vV6gBfg5i69nH3v2IdebiRjsiw/k/ZNFvZbNQLv1PS9DpbJzPunDhw9DKFxrC75ehIsmOysGzMrKinlknxURVKetD1Qpao/d8LgWr122UHBf4KMAeWKPxZMB8kKLnjhlERScOnUqbI6rVCoh3ALbrSZBc3Ae58+fD+UBgPfffz8cFPJJmcyBRLkkJCQkHBlMjXJhCZ1XQFHx+IAFtsrLs2L0YimU07HUWFajLKMH0z8stfFvkcR0uS1VkA1LlteHSC462JVl1LI8avg6UypMk8h9jhhoHXDBEhp7tvAOVYYlQVnSVbVaDcZQDqx2UBKYlHNlZSXUQ7Qbq6z6d8zoxpqKXBNJnKNHWv2Kd9nqo9qAgYYmkrAOu6Clw1HUf+s7jGNMlTLF6sB1j/V7SYvH4crKSnhGJPS5ublw/+nTp0Na5qQMkpLvmTNnAAzGzre+9S0AA0pU7yQ9rFL7xCd0ID/Bcedgno47DNMROsIhT3C8Fd4KV8vqMNM3MtFsbm6aXh+SNjAcglff5w5sgRcoi6/n8na73bA1W6gXvTBIvszT8kLEz+uDJHiiYpdOyZOf1dADmb1z+JzXdrsdeO1JDtJKpRIWEsn31KlTJqesuVcLRQO43+/n3N50pErOS0clBAb9j+m7/ZosrMWAr2tenMslfYUXKkuwEujvz2XgPgYMBBm5durUqdzBKewdo9PlOliLlbYZFAkhuvy6L5w4cQLf8R3fASB/WIY1dg7T5J4ol4SEhIQjgon7ocuxaNaZoByIy7qfZVmQVK0Ie9bGA05LR0+UlZXzlHT5DEXvdzZdWIdpsFTFZbB87Z3b2bzE/uCxSHZihLVoEg7G1W63hyRN3szy7Nkz0wuADcdWMDNuZwss7Ul+rN3cuXMnF0hL7u9VqrGkIy2BSX3FGFur1UIseb1xrChdK1+WYll6ZTop5tNu5SV9ZGtrayjIGr+3n9LgKJqSPLO2thY0t1qtZgbM4/QsDZv7NRscxfvl5MmT4RmR1NlPfVxY30EwiiODjL0bN27g3XffBbDTlw6TVM5IEnpCQkLCEcFIErpz7iSAXwLwLwHwAP4ygHcB/BqAawBuA/hR7/3TsrTEIMpubQy9BZevsUQpXLY2OFmBuiwu05Im6vV61OVLAl/xlmPWAqz3tHuaPCe8JNfV4gF527SO3S7vWacfcf5SBtnuz/fZ6NrpdHJx1nVQsBiXHgvUJeneuXMnZzzk53YD1sakXJxvrB2AgXQlGge7YbK2xZohf2udL5clZhAU8Hfi/mG5tbI212g0hvrrXqRV/a4VB17/5npLH2q1WjljPJCXpHX4A71Nn/u4SOrAgE8XDcoylAIYkvZjfD2nYWkisbpzPlK3ZrMZTj+SZ+/fv1/oxjstjEq5/DyA/917/yPOuTqAGQB/E8DveO9/1jn3RQBfBPBTRYmIAZSNQMBOI3F0QOvQCO+9SbnEDE76o8c+vlAgWZaZ/uIAwoTOsPJlVd8y0FQqO+d4xjoUe5toA5tzO7G1q9VqbrHTqFarOcOrHoT6G4ihud/v57yILMQMbsBgkRWPhkePHu3J73wcT42y++vr6+FgjQsXLpTSSbsZqDxxWiErLGMa9xkApoBipT9KWQTWO+MsDlzGra2tnBeLlJnzs8IeCLThmL1fePORQBtKJT9dz1h9d/sdGTIeZGLPsiwY3WPRKaeBUsrFOXcCwPcD+GUA8N63vfcrAD4D4Evbj30JwA8fVCETEhISEsoxioR+HcAjAP+Dc+5fBvA1AD8O4Jz3fmn7mfsAzlkvO+e+AOALwGCLraiXWooB7JWU6YhKpRIMRuzqKKt9lmWmGh07nYi30Mv7/JvLo6MLskrGEodzbii8ABsJmQaxXCsrlUq4PzMzMyTFc1qcl35G/uotzLE6cF4cbGxU6O3z4qq4ubkZJOG9SkqjSOgxukDui3b0+PHjsJPUKps2II+qHXBf429nSayWGyDnE4ujHmtHq4zsMrgXKZK1zH6/n9NqgYFWFqMYtYTe7/fN/Q3Pnz8PdRBJ/cSJE+Has2fPcm0i4H5ruWTuB3hvBzA41k5+37lzxwyLMA3D6SijtgrguwD8Ve/9V5xzP48BvRLgvffOObO3eO/fBvA2ALz11lteT6IMK34DDwYOWavpEyDfOXhi5E1K/I6+rnli5tatrfnWB2OfZH5Ont3Y2Aj3LU6Z1fPZ2VmzU0pHYr96S+3rdrs5P2IBdzhJv9VqhUHKXhbWJMDfRNNLwIAXlQldL4xlGJX35PqUDVzrOy0vL4d2lM0kZe8UldmisbjPyDdnOw2/wxM2Uy7yTVhQiU3c1kRi2Yx2C/422huNN6Pp76HHFXv1aJuQ0C+S18mTJ4MdyTmXi9Ioz1nhPCxqSi/2RbRM0bvAYIzIWaVZluGjjz4CkPeeKop0eVAYxctlEcCi9/4r2///hxhM8A+ccxcAYPvvw4MpYkJCQkLCKCiV0L33951zd5xzb3jv3wXwAwC+tf3vcwB+dvvvb46QFjqdTqGkxWq7vMMGGB0PnVUrlnRYKrKkE5ZouAy881JgGc+Y+tB10LtVOa+NjY2h93gF73Q6OWlHl5E9UPShu1oi4Fjylsrtvc95C+mDNzjfMgqi3++Hdrp7924IBBbzjolBS+gxKYrLxc+OQ9HIwRjVajV4+HBelnQbk37lGabtpD159/DW1taQkbhIGuSDXoA8NTaOlDmOMdVCzINE+gX7qfN5B7oc+n1NazK9IhD6ZX5+fshP3fKHt8o+yjUuT1lbcXu+8sorYcy8//77OQcG/exBY1Si9K8C+NVtD5dbAP59DKT7X3fOfR7ARwB+9GCKmJCQkJAwCkaa0L333wDw3catHxgnM+G7eLXikLaxlVSkZj6diCV4lo44DZFgWWLm2NnynnVyDBtba7XaEE+n/a/ZyGtJXBwbQ/t4a7dGy0VSoI+aKzqCS8em0VKkdovj3Y4aLHVZUjdrAPfv3z8QNy7m4y33Uv4OZfkzj/vw4cOc3ULSL4vbYV2z3Ol4z0K32x3ag6EDwfF30lw1B/KK8emx+u4F3Ee5boJerxf81I8fPx7c/La2tqIaMjDsp677KIfePXXqVC5MMZDfAc1aBKc7CiyjdBm4XnL6kXMON2/eBGBrD+NokLvBRLf+ez/YLs3+3jxYWDWxzpzkLfL8ATgwlkCoHWDHYCNhB4D4yeNMbVgqJsPaxKTrK2myMbhIBa3X67nDLDQajUYupIDVQdgYXDTBee9zgbhkEFoRAy2KhZFlWRh8Dx8+HHkw6fKP2sn3w5OAaRDxKV5YWACQ70u8kFh91Uozdo2PChTMzs4G7w3eoMPB12QC44igZcbr/cQoFAUvOtKH6/V6EKxi1FiRIRzYmdS99+EYO/FTd84FeobbZr/bQ9ffov+AwVml8s0++OADAIP2mJTnS9r6n5CQkHBEMJXwubHdmHxoseWKxqor71Bjwx6HprXCxXL6RSfPt9ttk/rglZa1BIsK4GsioddqtRw9A+TpjFarFaQb1jL4ZCJR5Vit5K3sovpyHS0qiMvS7XYDncOGV6mDDpLE14HBSU7iqri2thbKG5OUuG0sysTqB7F+o9McBVoblO8jwZcuXLhQ6A4Zk7SKjJQCLXXzodx8ZFuv1xva/8DvtdvtqMG+qIwxV9Sy+7EdxvIO05nSB2dmZsL4tI5GZG2dQ0DLNQ7t8fz589Ae4so4NzcX0lpeXs4dtq5damPhdLkvlEnQVjvpfin7G+TZ9957L3e830G6M0482qJzLtewPNFoLhHIezEwrx3jTXmiYQ5T0uKYKNakw3SGFYnQmtCFSpJ0defodrs5f2I94NgL4tixY2ansWwGXG62RXAUR257fhYYLB7sE1+2ecXy/ZZybWxs4O7du+HaqJPrODTLfiLmASLqe61WC6fVW9x8EeWivz8PYiC/GQcY9DXxhT927Figf3SMH2AwWVrxe3ivRFkZyxbD3X4P7tcyHtbX10PUQulfbL/S+ekxqxd5+T7SnqdOnQqTO5D3fin7Zvy7LC5L2eLA5Zdn5Vi7arUazipdX18/0P6eKJeEhISEI4KJUy7iaaINRUBebYv5oWvDn3MuqJ0c5Y0PtWUvF6Y5LAOrpnTkfb0VXq/QLPlrLSN29J2lrjYajVy5dHt0Op2h0AJAflehtbXf8lCoVqvBGKfDDFi+tCydaA+hjz76KOziY42jjJqYhnSuYRnonjx5Euo2Pz8/RAGNUm7uw0X7AYAd7yUJjwEMvIWk33G/1J4vkleZIVpLl1pzYGiJlfu1JV0z7cj9udvthnALfPoWG0qL+oL2rpFnOHooH2cnWF5ezo27WLnld5kxNUbXWNBpnT59Gm+++SaA/OHTZZrUbjAVDp3VEq6M3roP5CkXq+I6joqljlr58uLA78hv8fiQdPUEFjtj0fowW1tbuUGkn+10OoGv997nQhIwrw3k3d64I/b7/TA5W5Mp/2Ze1jqYARjuwMzRs5eGvPPw4cNofhZi/GWZx8NBg/uEUB/1ej3nDSQoW7SssAi678tzDx8ONlo3Go3gadPpdMIiaW0sYwpxc3NzqI/qsL0WJcP1Lmtzi27khcqiTph+kUl4bm4ulFHHvtG8tHZl1JPls2fPwm8Ov+ucC/RLUbgRXV5BLLSEVcdRwlAIpZZlWaBfxHtHe5XtZXJPlEtCQkLCEcHEJXRZhZkK4O32enXSGw+0MY6t/BwFzlKvNB3C1AOXjdO3yi9/5bfePKFpIaZ/WIVkacPyVohF4bOoGpbQy4xeon202+0gebIEbtWFKSr+Jux7vpu45zEpcVJ+uzE4t+NNdO/evSA1c8x4/g5lWoZFXQlYG3DOhU0qCwsL4ZtKO1er1dBXmH7r9XpD319vELPG1jjtG6PfLMToF2AgqYtGWq/XzQiV/D7nZUnAIvk7t3OcnRhiAYQDnjUFY5VXoPMp0hxHieooz5w8eRI3btwAMPB+AYCVlZV900iThABm61UAAB3tSURBVJ6QkJBwRDAVDp2Nk3rV1dIrS8L6mCtgIIUwp1i2CluSrnWN+UeGJYXGAleJxMTultbz1WrV3PWXZVnO1Uv+svQqv9vt9pDUpOtrSVXsummly+/q49EA4NatWwCA1dVVM2SABe3Cp3lTKcM0wZzz1tZW4LhFUteumTFpHBg2hOr7nNejR49CO166dAnXr18HsLPrcH19PZcuc9HMp0v6HNbC6qcxQ+FeY4lb/U7AGm2r1crtlSgy4rMrIp+BIJK/Ps5OtE955/Hjx+aYnqS9xnuPEydOANg5/ei9994LgeL2Grd+4hO6nmR4suPfTK1YE5SO6QIMJvyiOOndbjd0cDbiSAMWbc+3DLeSV7fbNVVPfZYm58W/m82muVAwHcVUkj6WTu5bnhhcR5mEJa16vZ5rxyJLv94PIIvVnTt3htrlqIDbQzw17t+/D2Cw8ajMd3lU6MWSY8lfunQJwE6skJs3b+bOouUyaIcAjuxoeVfp37pMjNg741Bj/CxvjhNDZqVSKQwTYI0dnhP6/X4uSqN4v3B8Hom3zo4Kk4YOSXLjxo2wYMteDmB3Qs3RG4UJCQkJ36aY+tZ/dh8E8m5jQD5ConbD0u/oFdxSfa046oIiKZUlewFLRFweNlRJHS2p2ToRidNin3QOY8BtEDO8AsMun1IH0USOHz+eo7gsoxf7v7NbnEiq4ho2jkGUpUW9U1hjr6r/foDLJRJgo9HA2bNnAQzHZBdYfalM0uV+ubS0FAzYYuy7cuVK0IqYguA2ZeO1aFKNRsPUKCyfdGvsjFKHMrAkzVrz+vo6gAH9UhQmoFqt5rRiuW8dQclRGvngabm/vLxc6ERwkND5zczM4I033gj35CDz3fT9qXm5hALQJhS+bnFd1qYdpk74vEG+zryc5ZlgbcvWeVmDk/2xOQ8dNtOK1sd5aR9wqwNbPDOAXIwI9gbS4HJJvo1GI8qbWm3D1/jILUl/N+CFBJie/3kZ9Pd79OhRoKtkwpD7e5kg2Ctka2srTN7yzV566aXQ5nIPsD2ivN85U9Y5l4sHxHXRv8sm7LI6xmw3MZsV2734cBdgOL5LkQcXCyJMk0l7njx5Mni/9Pv94ONfdq7xOOCF0bIDWHn0+zuHw7z++uvhvY8//njsvpQol4SEhIQjgqlI6BwYSxtItW94v9/PScJ82IW8I5I5e3owfWIdrWaplZ1OxwxDYPloa6nekta5DpYEJOXRYQW4nKw+A/kIfGwI43eYkpHfHIhLfIBjEoRFC/HzL168CJTLbnzPJQ/+q8thURh6R90kocurd3eK4c2S9kaRsvgZDk8hPtaiEVWrVZw7dw7AoL+Kes5p8L4OywtKxg5LkzGUhQmw+nVZfXW/k+fZsM+RTjnMh86LwTtnWfMXmqzX64V46nNzc6HvPn36NKfd7wZldR9Vs6hWq7nDp1kLGwVTibbonBuKUQHkP4h+B8h7eFiTnuXhwohZ+mWi4/vM3fPzfJ4nq1Y8sWnvFl4cuLx88gzXW9Qvbg+dv+RjqXcW9VKr1UKn5QFdZn/ga9LmDx8+DFSPlHW/J9jYgD0sYKpgaWkpeKPwBjGL3hsHXF+hD+7cuYNr164BGLhQyjd+8ODBUB5sc/LeD1GBrVYryvnrOEiWGyu/w1RqbEMc18t6Dxjeps8eYLyoWPYenYeOg8RujfPz82HRYPqFw27spk9z25UtdrH0ZZwx/TIqEuWSkJCQcEQwFcqFN8zwCmv5nGpVVMdDB2Bu99eGVyC+dZi9NzhfVmFl1Yx5ArDkHqML5H3WAnQbsKWe/eq1xAQMjGa88lvSi0iMfH6oFSc9ZhTja9LOi4uLUW+g/YLVhns1OO435Jusrq6G7eWvvPJKOOawjHKJ3bcke3n2yZMnoa9du3Yt+KdvbW0FH2vuV+zVJdQF/+XxVERpcFks6dXq6/o9XW8rXwG3IZ8jauUb09y0c0ClUjHDBIgfPLATJoA9iEZB0TiIOR7EwOP4lVdeGbkMQJLQExISEo4MprJTVLvxsaFJB9/y3udWLOadgYGUwT7a1qoa25KuJWXJQ+7J9Xq9PmSc1GD/WB1oi6Vy3lVq8Wk6rK/FkfPRZQyLO2cD2TiHWut61mq1YIB79uzZkCF3XJQZN2NawmFEtVoNkt2xY8dw+vRpAHk3WkvqLqtPTBKWvGq1WpDQr1y5EvhfkUL1NnLdPzY3N4M9pdFoFBp0yzhl7eY7qr2jjG9vt9u5QHzcn6Wcsfe03YnHpfZT1+GR+Tg7KafUTTCqdrpbLXY3GulEJ3ShFoomFIuOEGRZZsYHZ6Mmp1XUGM7tbD+2GjxmuI1t9+frsuhYMWq896Y/MBt7rQmbB1jsvq4vU0XtdjscABBrFyvONucrW9K1N1AZitTzowL5vouLi+GaxOyI0YoCfa2M+uADMDjuixhLJb6OXlDY60rS4mfKjiDke2U0ShFiNJqVRr/fz52Rq8/51YbeonR5851zO8fZee+D94vQL1mWhU1zGxsbU+uz4y4GI1Euzrn/2Dn3TefcHznn/r5zrumcu+6c+4pz7gPn3K8554bjvyYkJCQkTAylErpz7hKA/wjADe/9hnPu1wF8FsCnAfxd7/2XnXO/CODzAH6hJC0456KuOCwZ8krKRkut6vMKrd20NMXANIqlFsYMNFweNlJap7XwdatclpbB5bJOK2d473MUk+XLyqqmpFWv14foKu0fb0Gk9tXV1eB7Pq5Ll6ZXjnIgr36/HwIsST2PHz8eNfLq9/l6rC8yJSc0WLPZDKfiSETIDz/80Owr1v4IfapWUXm1G65cG9X4q/ttUZRN7tebm5shXz5RjMMACNi9l90peS6R51dXV0O6YihttVq5w+U5mNhewNr4Qbjhjkq5VAG0nHMdADMAlgD8KQA/tn3/SwB+BiUTOrDjxVHk2cK/2Ypdq9VyAwfIe5X0er3cJoQiHlY/y2kC+dCkWZYFDw/m1Th93rzEeej71lFuzuWPmitqG20zsDb+cDRFUam1qqrTZ2ivG2AQBU5U31H483FU8KME53bi68gCWK/Xc/76sTbnNORazANGfsv3/fjjj8O3kkmp3W6HDUnAcGhqFmq898GzhEPacv+z/MFjdYiVOVbXGFgAdM4FP3Xpg7yBqGyCZL6exzGws/lI0pifny88zm63fXpcYWhclIpK3vu7AP4rAB9jMJE/A/A1ACvee2mVRQCXrPedc19wzn3VOfdVaZSEhISEhP3HKJTLPIDPALgOYAXAPwDwg6Nm4L1/G8DbAPDWW295raqxfyxHTqP3w2pYr9fDb1GBeMemll6KVBtW9ZiCYIu4ZdHWkSJjaXF5+B0diEunxaEOLEmGD/lgA6mVFr+nd9xJXkwFWVSPSCRLS0u7kkp4x944HgJltMNhBfc7iSJ47949XL58GcAw/QYMe3KV1ZnbRp5dW1vD7du3AQCvvvoqAODcuXOhL927dy86ToB8X9ja2goanWWwZC3D0qr1t7X+z1oCQ4/VWJ+Rtm02m7mxY40HrqtVH35HdpM68lOfmZkJ741z8HQMU5XQAfxpAB967x957zsAfgPA9wE46ZyTmWMBwN1YAgkJCQkJB49ROPSPAXyvc24GwAaAHwDwVQC/C+BHAHwZwOcA/GZZQs451Gq1qJ+qFY+cdzhyuFdtDJX0BcxlWzsyOQ63JbFIOeW9Ij9U3sFqgY1I7Gcu77N/bbvdHgqZy79Z4uXycpvysWSSLreHVS4t7Usacnjx48ePdxWIyzKAxnhk672DlGYOCtr+sLq6Gtrx/PnzQzYIy8hYBItjr1QquXgvwGDX6sWLFwEM+oIEE7O0H86ffb/ZxdaSfq0xNA5GMQxafUA0j/X19bCTVAeoi2kXGlaobX2cnQ7ru7y8vGc+/SBQOqF777/inPuHAL4OoAvg9zGgUP5XAF92zv3t7Wu/XJaWTOjcObiTWOdusu8p0wLse84fw4r1zaom+4vrzqi9Tdi4yUF7LHBoAD1gWA3nOrLvuZSRI+RxGuxRw361Fv3CvudyDJfeZKLrqyHPiscGb5gaZZIt6uSWR44ujzUYPymw6i6bgVqtVvB5tigQYHS6SRvQ+fAGYCAoCNWzsLAQ+rDc1xM6UyZ8LqmUu8j7atxJLUbPWOkVeQD1er1grD927FjooywYFeUfy7ff74fNR865MI54Yt8P+mW/MZKXi/f+bwH4W+ryLQDfs+8lSkhISEjYFSa+9Z+NQsBw+FxL1WaXPpEyrJC37HuuQ+EKiugMDoMbO46On7X8crWBS67F3BIBOxSwvMe0jkDei/ndCyqVSs5f16qPJVlxECPxc2b3sHFhSWMxl9Iyye2TAG1AZw3t0aNHoS/IdnOua2y3c0xqt+gXefbBgweB3uGdpFKulZUVc3coh+YQt8hKpZLT0OQ99tVmbXFU18TYOIrBag/5rekXraHH6B02pjI1yu0keYh21Wq1wrVpHmenMZUzRTnWeLfbzcUz143O29ezLAsNF7PYS0ezGtZ7b25CYEinZSqn0+kUxlTRscitWNIWj2/x1zE1mH3PrfrygBIVsNVqhfs8SPWiIXXhAf3o0SMAO3FB6vX6vk3o0+70+42yyYvvb21thUVS+O2ZmZmo10hZetYkyt5dkle9Xsf58+cBIEzsH3zwQfA9r1Qquclde1qtr6/n6Jci33KG5XetPXmsfmEtVDoNjX6/H7xfZmZmwrwhY4vPxdX56/bXZRY/dQHHf8myLFBqHAF1GvH7j96WvYSEhIRvU0wl2iJLntoXmlU4YKA68RZ+vcOLV3CWWC2KgLUBS72vVqs5q76lObCkLWXlE9V5VebIkZYkzUfBac1DoCV3LWlb6qq8w3QLPyt56EOoWfqXIFOWZjGKhD0qzaJV7k8imGZjqo79zFnDkp284vly6dKl3Gn3MR9tQUy6tegIyXdxcTFonxIN8sqVK7h58yaAvGRpHd/Y7/dDua0jGRnaE8uS0Iv+P+o9SZvrbUWSZOcDPX/oNATsJcNnFHA8dTGUtlqt4LO+vLwcDRg2CUyFctGdz3KHk/u8cYC36zO1Ih2mVquZm4msD27x9XrxYGjvi36/n6OCLE5P/rLHjXM72/ylw+lDMSzOntuIBzzbEnToYX2GquXlwr+lTVdWVvD48WMA+c1Tu0FMZbbK8EmFNZkyuA24X8rk8PjxY5w9exZAnru1EJscy/j2zc3N4M4o33R+fj6E3/3www/N6KCWbYi56ti5ueOgjFYqei62OHS73UC/iODE4XOtwzJiZWAhTOYPpmBOnTqVi2QqY0cWQF3Og+zziXJJSEhIOCKYCuUC5NVJKzIa0xIiBXA8Y6FGgPyhzLGj7fQ1ywum0WjkNinx0XbWoRQsobN0I1oEUzIC9hdm67vlM2tFn4x579RqtbAZgqUnpmEs7xtLKuMofVYYgXEwisr8SQd7sViGfW3s0/1yZWUl9KUzZ86U0lxMwxUZF/X7euNRvV4P9Eun08HHH3+cSx/YGZusDfb7/Zz3izwX8yYZddOUpc3FvGdGkdalHOIkkGVZbsxyfXS6Rd9Q6sSbj0RC73Q6YezwXGIF9SsyJO8WSUJPSEhIOCKYih86r1jdbjcnVfNqDOT9SZ1zQfpkntpa6azwAtYJQPIsMMzXs3HDkjIs6dWSIrSUwa6RUhZOn4/ZslwjdT0FUl8OrFQk0bBNoVqtBgnu3r17R861cL8R63fA6BIWu6QK79poNHKHFmtprshXO2Z01veF/11cXAwc+rlz54JGeu/evaE8tDYg/UbcHnXwNy6vHtMxHFSf4zABzKdbrsJlu3d5PHGYANbKZXyzU8VuMa4GO9EJXToFR1jUfttsDAXy53lmWRboi5i12gpyzz61vC1e0zccCZEHDvvdst82G0qsCZ19wAWNRiOoZ9aHZpW93++b2+25o0i+GxsboR6xLfpWHbiMEr97bW1tz2eGCsqMop9kWMbLcepqeaMsLS2Ftmf/9Fg0xnEoCE3VPXnyJOR1+fJlXLo0iIDd6XRCeACuq46RzuXa3NwMwpaOsz7OAreffUW3DRtK+bwDAEOhPXRZ9LfWThVC61j7ULSDxjiT9LjtkSiXhISEhCOCiUvocgiy5bfLftHsD84GJwEba6xQAparkZbe5f+SV7PZzD1z7NixoXwtlYxdzSwjI/vXs2+4rOp6J6rlo88HVnMdpewbGxvhUGLLddNyK2NpodPphEOgd4tRfc8n5cI1DrQkrIO+We6jAitan1yXtItikTPa7XaIiriwsGBGZozlMar7H38n0cqq1WoI5HX16tVAvwg9o11z9ZjSuzRj+Vlg7bZsd2VRHbkNrPj+rAmtrq4GaouD41nfKeZrz9eZvtGUapZl5hkHMZSNnyJMnEPXHZ89QIAdukAH2Jffems/h67VnV1zWJrzljTkw/JkauXFaekPag1ojivDap/wjvI+b6u3LO7ynuTPdZdOU6vVQluW+UJb5V5eXg4TyTihXItU/k8irMlhlEE4CuUxSr6VSiVE+bt//z4uXLgQrgPx/jFOuSweeGlpKfSfs2fPBm791q1bAHZ85qUsemFjmxNz1ZyfVa5YO3MZiybX2L1YNFVBu90OdZqdnR0SGscROvR40mnosL7W94vlMa7AkyiXhISEhCOCqUVbFElcb4sXSYTVliI1iGkDpiN4i7Vck2A68r6syiJN6NWQ1TAdXS62bZ5VLo6RzpK0gNVDSy3ksrOExhSQSEWNRsPc1WlJOlb6i4uLuYOuy2CpxjEPAY3DQrMwmPYbF+wtxLAiZJblzxLr06dPg9R85swZAHltT0uGo0ruFu3TbreDH3q1Wg1b2a9evQpgIKlbcb9ZUufxYHlnxTSeIqpOx/HXz2ptsoiu0J5t4kvvvQ/0KjtdjHqcnQ4ToOuuz0Cw9oPs15hIEnpCQkLCEcFUYrkA+dWLpVCORwwMGzosv1w2AloGI5GYq9VqbrW0dn9a7ocsJVinKmneTEv8XIdYrA4r9owlzXMdq9VqkKpbrdaQMdRytdIQQ9b9+/dLpdMiKeIwSt37hd3aB8bh4GOQMMbSh+fm5swAc1Z+o5Sb+7hIrHfu3Al9QQztly9fDodQW5Irj81+v5/zT9eao+bFdfmLMI6mM2o6zKeLPY01YYsZ0O3OdddlZJfPcbS23WAqW//5XE0Gb83l5y0jXSzWOL+nJ+Esy3KTnvYT14uBpdZZfuhsXOLwAfyOXGOqhi3flvcEG3mtjUd6sSuDnuhrtVqIqri6ujoy3WB1WiBugPukG0zHMRJb7zEFMW5bCKUm3ij1ej1n/C6bJMtgCTMvXrwI9MvLL78MYED5SFnu3LlTOGHyQsMBqtjJIBbewGqnmMAlGHXBjB0eAuy0M8f/Z5rVool4fmGnhqKAdpazxX5GZ0yUS0JCQsIRwVTcFvUKHTOAyPMCXhWZLrFUTD6BxdqSXGRc4jTkmrX7y6oDn7DEbpO8amspRKtsLClzgDD5K+l3Op3g1152RJxFt3jvg4R+ULSC9U0/CdgP6dei0caF9DExSD548CDs6ORvvhsXSX5PS6xyALIcEn7lyhW89NJLAAb9Tk5C4vzZfViud7vdoUBeRX3V0oYs6nK338OiVDktPhxb/Om1o4KG3h9TRNUwO2HRw1wupmpGxcQ3FhUdEce8lXW2p8U/dbvd3CTNapK2MmtrctHioTuowIpLzrSP5SESC2+g45VbYA8gSYtpH+H8Yp4WAvZfljSfPHkSjs4qUkctFPn46uc07/hJoGCY7hp3UAn2Y0IXSBlevHgR9guIjzpgf7Oib1L0DAswwuHzxqMLFy6ECU4O6dBpspCmox6yvUeXu6id9oO+s7zjeMFmqohDf/CY0+/r9PUcxmMrZn+ITe7j1jdRLgkJCQlHBFOR0GMrj6XWsPGRV2g++NU62Nla9WP+rwLtM8uWaesoOAGfXsQSieWNwFqKbhtdJosmybIseKbwCUuxeOfWlmO5v7S0FAwyluZRBMvab2E/pdRJgr//Xumo/QC3s9AhjUYjnEIPDO8DKMufx5beJa3x4MGDYIy9cOFCoH2EEnz69GlubBSN2c3NzZAWj4Xd+GNbkq3UoayPWpoKl5v97qW8vFO8TNOxggJa4Ut4PmQD6W40w4lO6KKO6Am9SOXij8y0gWUZ1zFdYi5GnLY8q9PnDsETOn+EmGeKYNQJgb1kut1u+PgcEoAXO7k2OzubqwNz61Z5NB9bFLuljI46LIh923HeY1gc627zOChIX3n06FGYEOfm5swJ2So7T/yxWDDW2JCDMWq1WjgYQ2iYra2tEH6ZYW0M1Ic9xMLuFiFGFVnPxL5fEV3IZdnY2AjPysTO8ZfKyqU94vg6kBdEOQ1+b1QkyiUhISHhiMBN0gPBOfcIwBqAxxPLdDycweEtG3C4y3eYywYc7vId5rIBh7t83y5lu+q9f6nsoYlO6ADgnPuq9/67J5rpiDjMZQMOd/kOc9mAw12+w1w24HCXL5Utj0S5JCQkJBwRpAk9ISEh4YhgGhP621PIc1Qc5rIBh7t8h7lswOEu32EuG3C4y5fKRpg4h56QkJCQcDBIlEtCQkLCEUGa0BMSEhKOCCY2oTvnftA5965z7gPn3BcnlW+kLJedc7/rnPuWc+6bzrkf377+M865u865b2z/+/QUy3jbOfeH2+X46va1U86533bOvb/9d34K5XqD2ucbzrnnzrmfmGbbOed+xTn30Dn3R3TNbCs3wH+73Q//hXPuu6ZUvv/SOffOdhn+sXPu5Pb1a865DWrHX5xC2aLf0jn3N7bb7l3n3L9xkGUrKN+vUdluO+e+sX190m0Xm0em1/dka/1B/gOQAbgJ4GUAdQB/AODGJPKOlOcCgO/a/n0cwHsAbgD4GQD/6bTKpcp4G8AZde2/APDF7d9fBPBzUy5jBuA+gKvTbDsA3w/guwD8UVlbAfg0gP8NgAPwvQC+MqXy/RkA1e3fP0flu8bPTals5rfcHiN/AKAB4Pr2mM4mXT51/78G8J9Nqe1i88jU+t6kJPTvAfCB9/6W974N4MsAPjOhvIfgvV/y3n99+/cLAH8M4NK0yjMGPgPgS9u/vwTgh6dYFgD4AQA3vfcfTbMQ3vv/B8Cyuhxrq88A+J/8AP8cwEnn3AUcIKzyee//qfdegpr8cwALB1mGGCJtF8NnAHzZe7/lvf8QwAcYjO0DQ1H53CDQyY8C+PsHWYYYCuaRqfW9SU3olwDcof8v4pBMoM65awC+E8BXti/9lW116FemQWkQPIB/6pz7mnPuC9vXznnvl7Z/3wdwbjpFC/gs8oPpsLQdEG+rw9gX/zIGkpvgunPu951z/7dz7k9OqUzWtzxsbfcnATzw3r9P16bSdmoemVrf+7Y2ijrnjgH4RwB+wnv/HMAvAHgFwL8CYAkDdW5a+BPe++8C8GcB/IfOue/nm36gw03N59Q5VwfwQwD+wfalw9R2OUy7rYrgnPtpAF0Av7p9aQnAFe/9dwL4awD+nnNubsLFOrTfUuEvIC9QTKXtjHkkYNJ9b1IT+l0Al+n/C9vXpgbnXA2Dj/Cr3vvfAADv/QPvfc973wfw3+OA1ckieO/vbv99COAfb5flgaho238fTqt8GCw0X/fePwAOV9ttI9ZWh6YvOuf+EoA/D+Avbg98bNMZT7Z/fw0Dnvr1SZar4FseprarAvi3AfyaXJtG21nzCKbY9yY1of8egNecc9e3JbvPAvitCeU9hG3u7ZcB/LH3/u/Qdeaz/i0Af6TfnQScc7POuePyGwMD2h9h0Gaf237scwB+cxrl20ZOOjosbUeItdVvAfj3tj0OvhfAM1KPJwbn3A8C+OsAfsh7v07XX3LOZdu/XwbwGoBbEy5b7Fv+FoDPOucazrnr22X7/yZZNsKfBvCO935RLky67WLzCKbZ9yZoEf40BlbgmwB+elL5RsryJzBQg/4FgG9s//s0gP8ZwB9uX/8tABemVL6XMfAm+AMA35T2AnAawO8AeB/A/wng1JTKNwvgCYATdG1qbYfBwrIEoIMBL/n5WFth4GHw3233wz8E8N1TKt8HGPCp0v9+cfvZf2f7m38DwNcB/JtTKFv0WwL46e22exfAn51G221f/x8B/Afq2Um3XWwemVrfS1v/ExISEo4Ivq2NogkJCQlHCWlCT0hISDgiSBN6QkJCwhFBmtATEhISjgjShJ6QkJBwRJAm9ISEhIQjgjShJyQkJBwR/P/ibQBFvEkRtAAAAABJRU5ErkJggg==\n", - "text/plain": [ - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "img = torch.from_numpy(cropped_ascent.astype(float))\n", - "print(img.size())\n", - "print(img[90,90])\n", - "img = img.clone().view(1,100,212)\n", - "print(img[:,90,90])\n", - "print(img.size())\n", - "img = torch.cat((img, img, img), 0).float()\n", - "show(img)\n", - "print(img[:,90,90])\n", - "img.div_(255);\n", - "print(img.size())" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [ - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAW0AAAC+CAYAAAD+3F4XAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJztvVuspNl13/ff51p1+vRtejjNy4gzIq0RKQYSZSNGEMmW\nZRGGoQCikAdChhFIVgIEiBU7cZDo8mL4IYBlwDCchzzEl4AxHFiyDIeEESSKJFimYkgkFcoWSQ2v\nIufGvnefe52qU/XloXrt/n3/WnW6yeGc4SH3Ahqn+qv6vm/f91r/9V9rl67r1KRJkyZNzoesvNkF\naNKkSZMmTy5t0W7SpEmTcyRt0W7SpEmTcyRt0W7SpEmTcyRt0W7SpEmTcyRt0W7SpEmTcySva9Eu\npfzFUsqLpZTPl1J+/ptVqCZNmjRpkkv5RnnapZQVSZ+X9GOSXpP0CUk/1XXdi9+84jVp0qRJE8rr\n0bT/tKQvdF331a7rJpL+maQPfnOK1aRJkyZNMnk9i/Y7JL2M/7/y8FqTJk2aNHmDZO2NfkEppcXJ\nN2nSpMk3IF3XFb/2ehbtVyW9E/9/9uG1BXnuuef0rne9S9PpVM8//7ze/e53azQaSZJWV1fr76bT\nqSRpNpvV613XaTab1evxN35bStHKytxgODw8rM8ppdTvB4PBvLJra/W3a2vzqm9tbdV3ra2t1fvi\n+5WVlfp5dXVV4QMopWhjY2OhrvHbyWRSP8c94/G4/u74+LiWZXV1tdYn7on//6t/9a/0kz/5kzo5\nOZGk+k62wfr6en1HtFFcj/fHb1dWVupn3hPPPTk5qc+IdpnNZrWsXdf12iCE793c3KzPyr5nG3u9\nWa74LsrNseLPpW/m5OREq6ur+uhHP6qf+ImfqHW5du2aJOny5cu1vjE2VlZWeu0VZYx3Rv1dVlZW\naj0nk4mked/u7+9Lku7cuaM7d+5Ikl566SVJ0oMHD2p5T05O6rNLKbXtYqwMBoNaBo71qPfKykp9\n1urqam98xzM4b+JZ6+vrtdzZOPqd3/kd/fk//+drfeIZHN8cv2z7uMY+4fjx9j44OKj9sbKy0qtb\nSPTHyclJb9zFd8fHx/X/W1tbtQ05rkLi2ng8XijXyclJHROsYymlrllx/9HRUe2vyWRS2+FjH/uY\nfviHf1jT6bTWcW1tTZcuXZIkvf3tb++tL/H8T3/60/rMZz6j4+Nj7e7u6rd/+7cXyi69vkX7E5L+\nRCnlOUlfk/RTkv5S9sPnn39eP/ZjP9ZbtJo0adKkySN53/vep/e9733a2dnRq6+++s1ftLuum5ZS\nfk7Sr2uOjf+jruv+6Bt9XpMmTZo0eby8Lky767r/S9L3Pu53zz///Ot5zXe0vPDCC292Ec6tfO/3\nPnZoNlkizz333JtdhHMr73znOx//o9chZxIR2Rbtb1zaov2NS1u0v3Fpc/Ybl2+LRbtJkyZNmnxz\n5A2n/IWEZ13qe8zD27yyspJ6oyeTyQLbgcyNo6OjBc/+2tpa9UBPp9P6+fj4WBcuXOj99uTkpJYh\nYzPQAz6bzXpedJbdr21sbNQ6855wxrpXO/5PlkW0w8nJSX1WvIusBbYdWSLxrul02mO6uPd9dXW1\n1z/xXrI3MvYHy0BWAduQXv94Zlxje3JM0KNOFkfUJ/vey5i1V3j/h8NhvU6WRpR7dXW1PpcMG44z\nXifTKa4FA+HSpUs6OjqSJD311FOS5myJYDqdnJxUBsJsNqvl4bWo13A47LEz4nuWNX5L5gTrxXaJ\n+8hIIjuF/R8siChfKaXX9/GZzCOyWnw+TafT3lyJ+kwmk/qMeOfq6mqPiRTXfX6FcM7Gb8gQY3tw\nvvDZUcd4Npk7LEe0R9d1tc/i/Wtraykr6vbt27XN4/3Hx8fa2dmp37/22mtaJk3TbtKkSZNzJGei\naZdSepxH7ozUhDIt058jzbUT8j0zjjJ52qFhbW9vL3BAZ7NZvf/4+Lin3Ur9nff4+LjukNx5Mw4p\nedqxG1ObJHe6lNLTDrxepZRaDmrt8fy1tbWqzfF3LGvWRnxn1p4sCzUo1pvtE+8ndzqe5X+luZZB\nKyXagxYENbOMH05NiRaA13cymdRxsL+/v6AVTSaTnmaXvYvXOD7ZdnF/PGs4HOry5cuSVN9/cHDQ\ns0aCY+wWYrRFaO3UfKnlUgsm59o57NPptKf5ucZKYXuOx+Oephvfx/18Ljn4UV9agvEcarEs02w2\n6z03ys928XfNZrPan9PptNanlLJgbXDMjcfjBYuK1iP7PpuDbMPZbJZa57wW1tXR0VFdE+LanTt3\ntLe3J0na2dk5lR59Jot2mF0cXN6JJycnvUWMZkkIzXaazW4C8fPh4WFvoXRTajqdpsE1HBDx/LW1\ntV4QSggXixAuQpzMfFZc52bBewiZxH1uVkVZ3ETnpsA6DwaDhaAKX/B8Y+MG5e/yzZWTopTSK2cI\nF5N47/Hx8QJkxE1ufX19AdZhGdmPnECs68HBgaR528cEiYAHBh3FbyjcgPhbn9hSHw7Y2NjQcDjs\nvWtnZ6cu1Hfu3OltBj6WCNWcnJwswGhd19Wx4UqBw0kcn1ywuDhGG66urtZ3EOqLDYTKjo/rKEuM\nVYffoq7srygDYSqOw2zzzAKrXCkgVMF3x7uosMXvOH45d7zPHf7zvptOp73NIhbou3fv1uCrGIfT\n6VT379+XNB/rseFl0uCRJk2aNDlHcmaOSGq53IXj73g87mmxdBaE0OwJYTg4nTfUvmnmuZlOjZah\nqHSG8Hs33eIZ8Uya226a0WlEa2FjYyM1FWmmMcw3rhHKcQ2MJj5N5Uz7oIaXhf5Si6VTiLBL5hzk\nfVkbUfMjpJFpOqPRqBfOHc/Pwsu7rltw2FHrPzo66mnC8b3DXdG2cY2mLuE117D429XV1appR3j1\nU089VbWu4XDYM9HdkqIjknMoc5DSUdl1XdWKqXHHvFhfX18I4WbfUTOeTCY9Z6W3zTJn7GlwKPuI\n84r1zaxsPi9LgcB0BxkU6JaLj0+OOfZzlkKAbZhBnHSe7+zsVK361q1bFc4M5yMdu8vGdciZLNox\nGDlgvGM4acfj8ammLgev5ziQ5hM8Bix/y8FHzzU9uPE981CEqcJ8DVwECG3wPg6UqAsnSIabc+Ei\nROOefvoIiIVyUrDdOND53CgLF2UvS9wX17IFi/ANJ65vkj74+V43sd0P4pAazXnmnshweTIFxuNx\n7VOaxcwH4eOTk9InqJvrfH+MQ+nRor29vV2ZJKPRqLfRO4zAvmUduBlx3hCiyWAGLgZu7q+vr/fe\nH+WdTqcLUAsXV/o5qCjw+Q5tcRz64hfCucbFNZ5LRhTHwWmMNPrD2HbcdDhmuYB7riGuF5kfZTwe\na3d3V9K8n2PR3tvbq+NuGWMp8zOENHikSZMmTc6RnImmHeC+78bSI03HNcNMY6TXlztv7KzkuFKr\np9lBrm68K2AVSalplrEhMt6mO5Lc0bO2ttbbQWkKuznv73W+M6/R6RnCsrIN4ju2pzsM3aPOerHv\nNjc3a5vTWsg4r6wLrRyyIKhxxnPo1Im+o6lOTZkS92UaSymlljuck+vr6z32CNvW60AnXZaJjmOD\n4zfG3MWLF2u5R6NRdUoFy8rrkcFrhLjins3NzVNZQBwTJycntb6E/AjTRXtn1hktrszZTPaDM6Gi\nDdlurGPUh3M0c6pnmSudhcYMh/Fewhu0Ulg+lj3uy8ZECGHDGFORrU+Sdnd39eDBA0lz9pLPEVoA\nzshyOZNF2wdixjqQHnUCF1HCH8vMUw+k4EQhRsughyzYwzeO+J54WzZQQhjQsrm5ubABcNJxoLon\nXerDDLzOSZlNoGWLP2EMxyedruTlJsZGytVkMlmYzG7O+2TI+i3K7ROIkAVxYk46BlhkgRAZrW02\nm9V2CpN1fX29B9U4Ts3F2RdP37wdn48yMB3s9va2pPkEvnLliqQ5q8AVFF+AnKrIzdtx5GyB9CAZ\nqQ99ceOKNuO8YN+GOHTkZeFzyXTJFBRSGDnXMjiI84dQyjLWU9zDuZD1c9Z33DgIS4YcHBzUugVe\nfe/evYpZP3jwoEfj9A3PN5BsfQxp8EiTJk2anCM5E02b3M6QMA9JiqemlHFy6TQgTODmhAfpUIul\n9uDP2tzcXIBP1tbWek4MOjKdTE/tYjKZ9JKpR714D7U115A9hDjaK+OEsw7USPg5vh8OhwvwCB2G\n1LrpXMycStSayZShduMBCTTRCevQKUnWQ3bYBTUtWi7U5vwwAQ8sCaETOjt4gNpVZsllMIGPP3ci\nD4fD+q4rV65Uzezw8LCa1lmoNevDvs2cj9TWqLmeFgtB6IDjm1ZdFkdwcHCwoH16f8R9Ma/IQ3Zr\nNwvk4fikAzPKmgm1VfYdmWVuQRCiouPRYzCkeX9xnWI/SnNIJKAvOom5TvB+zu3T2CNN027SpEmT\ncyRnFhHZdV0Pv4mdhEf4UCPgZzorpb4Di5F01OCogYVkzkViy9QMM6cpaUzUsOIex9rdScfvPAkO\nozKlPt3N8b94Fo86ciyUmvzx8XFaxsxHkNH06Kxl5Be1vCdpW6nPSafWzfBl4s9Rr+Pj4wVHkN/P\nvolxRfw7owrG/QcHBz3utlt6zr1eFursnzOK2snJScW0x+NxjZQ8PDys2lqGzzMiN8p9fHzcC3On\n0zv6LLR3tn3UI8oT7U4rh5qhjz9q9YwiDKGWynG/zJfk459CTTtL5uVWQWaNcsxFu7AO9N94PEDU\nx4+VOzk56VFHw+l448YNSXNNO5515cqVGvEoLa4NtOLdgexyJot25E8gy8Mbi05CmjBOcJf6i3oW\n8MIFgIEU0uKgcGdFvCPMmsFg0DvnjaZk5hDJ+J6cKIQLuMn4JpENXrYB38vr2fv5fA5qDnpCJpkz\nkxtnxi2lyUkWSDYZM+iL76V4JkWWO+PZehm44GXmKc91jDqOx+MFh1/c5/XOhPdwEcgW1O3t7VrH\n0WjUC3Vm28T743vWNWu3lZWVBWXHFQF3dhIGc+iLY5h1id/6fB6Pxz0HbKYoZGMmc6o7BJoFomXw\n3fHx8cL4cCeyL9DMWEi4iA76aFdma9zd3dXNmzclPVo7mJ5ie3u7B//5wuwBcKeNrwaPNGnSpMk5\nkjPRtCN8lk4KpwiNRqPeLp9xhJ1y45/j+VtbW1VToalLig9NKGpznhhHUs85Q03WLQB37rjGwHza\nHkUWz13GUaZWLPUpQtRUGLmWUb7o8M046Vm5CbXQxM6iVfncLIEXv++6rjqmaH1l4ceZxuoc6cxp\nyL7jOHJNfDKZ9ELLCSfF32UpDHwsZ9AK24AO2I2NjZrj/cqVKwvZ3xjpSY2XpnSmkZK/TRM8C8HO\nokpZRlpqtIAzCzOjTXLeZO3JeUPLmUJNPBvrpDIyUZX3BWmiJBlk6w1poIyiDbhpf3+/cq/v3LlT\nr7Mu0Te3bt1KtXZaw1Gf4XDY6wuXM8s94pPKOc5cyJlPhFh3ZrYwKCNkOp1WzJBpEH3BkRZNRuYv\nkfoZCWnWZAEFTKPJZ3BBZl2yYA5uIBn+d9rGx3eRI+34u0MwpZSKpWah0h6eT6jFTVUOemKhWd6Y\n2WxWJwInfhZEtbm5uTCQuTE5Fu91cB63Y7ScYIeHh7W+Ph7i+RxzDoM9Dj7h4jscDuvny5cv136I\n8VvKo9TCJycnC9AV00Nww6Us2wRD+CyHMeK3rjRwnGxtbS1kjiT8QlYWxyFx3QzqyNqYG1O2kLM9\nCYlxs8lCz0NKKb0+Z8BMbKSBTe/u7lblcGdnJ/WDURGhX8d/S6WV0FImDR5p0qRJk3MkZ8YeGQwG\ndbelOc+dO3ZFZj6bTqdV++CulXlYs5Bnev03NjaqCZM5yKgJMzQ4hBr+1tZW1fxoZjK8mU6ZKEvm\nUHOOupebGq1DRdGGWf5nmqTx+2UWApko7sVmW5PzzetZ5GCmzcU74rfss4xlQ3FWALnXHFN0NmVR\no+Trh1D7PTw8rBZTaFduzmcQXcYfXxbyToso2n4wGNToyIjUnEwm9fPq6mrVwBl9R6uU0IFr+4ws\nzJgVhCmWpT4gu4RpGPxgAj6L9WUbUaNlWT0DI6EzZx95+ah1ez9Eu2XvZbvQQo3PR0dHFQqJKMe7\nd+9WxgitSkKgUZfRaFShQM5HOkUzjnwmTdNu0qRJk3Mkj9W0SynPSvrfJF2XNJP0D7qu+59KKVcl\n/Yqk5yR9RdKHuq7bWfIMjUajnjPDnTa+M2c0OX9mPMuv0eHi1Dzi5vFscm4dh+Zuvr+/X3fL/f39\nBbx2c3Ozp3GQly4tYvnEll17YFmdrhZCzdGddNRcqKmTD0rsj89yDjLxYEbC8Td0ZlFrybQi4vqZ\n9hvPJw2KTs9MO6GzlOVZNqZcaL2Nx+OKW2a+FV7PrAmWhQ5UaoOMII3vL1y4UDX74G6T/0urk86y\nTLMk9hvteuHChV7SLbfa6ByfzWY9rjX58nF/lmc+S59MJ3BIKaVq7WFJhLg/wpNXRX3Je2e92bbU\n1qO9WKeMMkp/VpTt/v37tW+C2sc0sGx7OtdDnB7rDnyPU8ks4lrupd88khNJf6Pruj8opWxL+v1S\nyq9L+iuSfqPrur9TSvl5Sb8o6ReyB0SBMzMsCr21tVU7hg3PRC0MOeXgcueiO6WyyeyDUMpz4tJR\nSUckF+Awt8g+4WQK4aLBAcPQ7wwaoNMxc9IxuU8IByzv47MoNIV90mQDmr/jb5YFIGWbledcdsgs\ng4UozqMlg8CTiBFuWgafcJGL6zFpydd3mMGde+7kzKAv1oGbaCzWdIoGJBKLN5/J8cnrZH9QueCC\nlDFkuBlwoXNHIhWjUkpd0Fj3LLc82S8BVXIuMe0Ec8fTeejtG+/wtuVn1jXj69OBGp93d3drGe/f\nv18/M1w95j5hVCqEhHUiPzmPQOQ6yLH6usLYu6670XXdHzz8vC/pjyQ9K+mDkj788GcflvSTj3tW\nkyZNmjR5ffJ1OSJLKc9Ler+k35V0veu6m9J8YS+lPHPavc7jdcjj8PAwTTrEXSdO+7h//35PI/Dw\nUkackT7ICKl4F/msmfnsO17GG6b2kTnDWO8Qag8ODcW1ZY5AadHx5qdqkPZGpyR5vYQQqMmzH0Ko\n3bA8HgpNymPW3rz/cZq4t6FDQCwTISa3YvxZNN05zhit6illDw8Pe6caZe3B+zONL9No4/dRbkZK\nSnPudmixJycnVdu+ePFivZ91JLTkDmOe6ETKHmFLOqQJN7oTmM/NHNKj0ajew2horgOZdZlp0VwD\nMvjOYx4csvN3cO2JuU+6XVhXdD7u7e0tkBgI362vry9YRCQY8B20yAnxcM06TZ540X4IjfyapL/e\ndd1+KcVt7KUB87/5m79ZF6Hnn39ezz333AJv1+ETmvuOHXPAHR8fL5ifnJSEDjxEVernKM4CRMbj\ncc+kc6hGetQZhHg4MWmqk+3Ad7m55GwFn4AOcTgc4ItUCAd1djK8B0FF/YhlZvBFCDOfLQuo4SbH\n/OaOf3ueCTeBOUbIlsjgJG6o2YZIPjQ3kHgHGSVkBSxLZ0A2jffVMshuc3OzPiMCbi5evFg/b25u\n1jGRHZN2fHzcy56X4cgsg7cn/TCrq6s9M9+hslJKXcQGg0H9bXxP7jbHN5WlEIfBXAEhBErIYVnK\nBuLEIRn0lW3uOzs7lR1y48aNWsfRaNRjlUS54127u7s9DNufP5lM6gbAYCDW5d69e9rf3+9BW5k8\n0aJdSlnTfMH+J13XfeTh5ZullOtd190spbxV0q1l9//oj/5oD7daRulq0qRJk+9U2d7e1vb2tgaD\ngSaTSU085fKkmvY/lvTZruv+Pq59VNLPSPplST8t6SPJfZJy89+F2gfZEDTzw2yhFkttjBp5xgfl\nDsZdkeYWw8SjLNRefWem0Ivu7A3WM96ZOVqoWTqbgu+liZcl/1nmcGS4LJ9BCyLzqPNd7oBlHdmP\n7Dvnw0cZme867vMcx/FbH0OEtuI38V6W1+/PnJrk5HJ80SEY9WUUoqRelj0pP6g5e1+8i053wkvS\nfCJfvXq1liGSEUVdyGAopX8CkrOEyP8djUZpUiyOT2qJPJFJmo8N8o69DRi67uHz8Tvmsad1F1p7\nFjWajQO3XEKya7R2GS0d7Xr37l3duXNH0hyG5Rx0dIBOU1rG0W7D4bBq7Zubm/W+cGRGeUJ4vNpp\nPO0nofz9kKS/LOkPSymf0hwG+SXNF+tfLaX8rKSvSvrQ457VpEmTJk1enzx20e667v+VtOzAsg88\nyUs2NjZ64Dox6RB3gFHjdccDNbjNzc00BzHxWOYdcExwZWWlh2l7XmpGNjLVaJYEhxxSat1ZYhxq\nARlPm+3hqVNDMn4t25XOtux7wlWZVpKlRXXqnOOLTlfKIjkz3J1WELXzLAo24zgv42PTAiCW6fet\nra1VStbR0VH6rsCL9/f3exqnW0R0CFKWWZLU3Nw5uLm5qcuXL0ua578IjDWszpWVRylYNzc3e84s\np+m5tujY77L+oMONc4FzxOMDfA56DhdS97K1gOVyWm5IZh1yjmcRmZyDPFSZiZ9CO55Op1X7jXaQ\nHllWw+Gw3k9LLSyc6Cup7y/LLLGu63ocfDqaXc4kjD08yZmjMMQbmN5mN5dpzjGYg0dy0cSPzuWi\nu8xpGYMiS0hOp0+ULe6T+iYQf8fnZ0E75KXznoxl4QM6fusMGCaloWPXPe3x97TNiGY3y83wZbY3\nFwM6AuMaJ3O24bE/mWPdF3UuFnT4cZMidz/bLEKcIcFyxftjUg0Gg97BAhmbhQuKwzZ0XvI62zkm\nNqGBK1eu1OcGo+Tg4CANVmJ5+H6OI4fyuNl42gGHyrjZLHOckaOcOdKzOTqbzermyXfxvmxDjDba\n2trqlcWVtK7reptvJHwKSGR/f1/Xrl2rn2Mxp5M2xsF4PO4lgWJiL2k+TrI4hK2trd585N8o42kM\nkhbG3qRJkybnSM5E0w6QniaCc1edgkQTJ3bRMLHW19erpkOnIzXIjH5FjjI1HWqnUR4e8krzNXbA\nra2tBccDtdTNzc2l9Lx4lmut3mZuKkfdWWZvz9NyEcdnt1icU5xZQRkdk05N17ilxZD2EJroGT0w\nrlFDy+roYdunWXK8n+ODFgatGFpH0QbUcsMSYyIgwkmEtk5N/mNOaodt1tfX67i/evVqfUdAJoeH\nh+kpT3wGocSMLskoYY5DWh4ObzAfd5aYiePXnxX/J4WOTlEfnxwDtBRZT3LR4/tsTE2n05qi4P79\n+7p1a056C42a6WkvXLhQOdeZU50WiMdgRF2zyG3GjpDY4HEHy+RMFu1gQmQk/BCaD05K9wAO5qKm\nWU2uMbmefgI7P9MU5qJObJBHl2Vn6HHwcqFmMva4xsmcHWhALDVb7LO6+OEM8Zd1zPDJEOeBe70c\nTiB05KwXhs8fHR31TO+4Jwv0Yf/znswszoJ+sgWC7eHYs0MWGxsbdREhsycz+w8ODur3Gxsbdayw\nDRya4LuI6/M6IR4qBAxZD6yTi/bt27drvUL4LC4sXKx9UR8Oh732jLKT0cLNjvf7fPaFyZW02WxW\n6+IMIJ4bG9dOy7dNBcnTCnAxl/o8bMIj8cz79+/XhfrChQu1PRivwfNHmZEwCz1nubgoM6gm6hBt\ns7e3t8D5pjR4pEmTJk3OkZzZcWPUnt2zLPV3LfdMh3DXI4DPyC1pvssTWqC55L/lyTTU4EM2NjZS\nvnLm7BoMBqlZToYC24Ahw64VUdNh5NWy0F+2Tfwus1ao1WScc2d/xDVqjB79xs9ra2u9U3Dciezw\ny+O0Z+ej8x7ncdPioRke9SaE42wIZ054f0wmk96YCm1rPB738idL/chGQg4sU2ZFObwRzySTJBJK\nhbNsOp1WBgO1wdlsVmGbjEVEh1wGYTkjxC1B7xvvR9eMPaMf4RE6Ih2mimtZG2XEBI6DyWRSx2K0\n0f3792vQys7OTnXokulFyyYLu2e5MiuaZaXFzzryaDGpz5Dx+ebSNO0mTZo0OUdyZmdEOhfYMS5q\nhuvr673vHeMi3/To6GjBETQcDnu7eAb2L6Nk+e7OstBRSWob8Snuxl5HYqVSnyse9zFHQsbrXcZt\ndZ6s1yvTgMiDjd9ubm72ckbEO/l8ctmdhufORdfQXCvKMHxqhsQv3Qpyyh/bxN/nFlvUwRNDsWxS\nP6KSvpXQ3DY3Nxdoek5rdNyf/ekWRhb1SUw5yhMn3Ozv79fPN27c6I0/Wl3xXvYz6ZTSfC5lVFX6\nFugIZ9u41u6WC3PXRJl4P60JWqasc9znecI5b+knuXfvXm3fyIG9s7NTHZGHh4cLY73r+odNL8Ok\no66sr1vB9KOwjjw4mnTksLyHw6GefvppfepTn1ImZ7Zor6ys9MK23enkjqhojNFotAANeC5gXxyZ\nFIaNGf+XFp2bcX/GKuA9fJebafSoj8fjBY87zWZfvNzJwTZg2/iJ2V7HzONO51+2cXpwjnvfnUPN\n624S0mQkxJMl6SE3Ogu7Js+bMBYXOk7srJ+5OC9zGPszOfHpEGT4f3y/t7dX+zlMcY4DjikqKKc5\n5flbfr++vl7bKJx4Tz31VDXxd3Z2eiHg2fg8zVlKCJOLIxWUbJPNxgzLz/bOIFC2tztT4y+5zx6T\nwE2Sc//4+Lg6Gu/duydpzsdmexCejWdlAWjr6+u9hGHSfMPkPIs+4bkADO/nWA7hJk0FiLnTXRo8\n0qRJkybnSM5E0w6uKjVmd0TSAXB0dJSGaNNUyY4LI9+amjZPk8nobtT8XCOI98WzYuckFzeeOR6P\ne7uxWxN0hjnlyzVDOlxca47fZU7HDNahKcuoTk+A4+2RQR7LNFqajhm9j+WnVsb7PfUloRhqrDQt\nmciIFDSvA7VbN93jnVldOM6ycUJHYETy8fvhcJhGTGb008xBSYuL4y/k8uXLNWbhwYMH+trXvlbL\nHb9l+ls6zrLI2IxPT3orIZVom8FgUK0MOiSzfia9MDvhJf4vqQeL0lnq7TmdPkpVMRqNaln29vZ6\naVajDQgVelSow6lMceEkBnLo9/b26jjI0AOpvz7RKSn1YbaVlZVTKX9nsmgTVpAW+blS36yhZIEQ\ncT3udyavMj7fAAAgAElEQVQAJ7hPDjfffKH2M+WCDxvvzyY5zW6eF+kLMX/reK3DCA4XublO6IGe\n+GUh82z/bHHKsF1OarJtKN7OnKx+nJiLM1xCiHvSlI02YGZAtj2ZHhkjYtlCGX+52WTsDm4ghLEC\nnohJu76+XlkeWVmWQSMOm8Rvo4xZVsbt7e36rre+9a11wTo4OFiA1Lqu6ykwjkM7/EfoyTF+Yu0s\nI+d1thBnbDFf4H1t4Phmbm9mLIw67u7uVsz67t27FRahgsPN23F9MtMozhWX5uOUqQ0yuInzg+kZ\nsndkATqZNHikSZMmTc6RnJkjkppQFprM3Zre4PF4XI/xyZgEDA+N3fLg4KDesyxRUMYLnU6nPa1E\nmkM1PGYqHASMHssgFYY6U5vysO9og8wZRdjHPel0vvAd1AaXaXSuZZLhQGcUzU86osIc397e7r0v\n3kNz253E1GjdAeoh+rRm2OdudUhz8zQ0zgyqISRH+Cwkez/bzcdOaFh0koWGNx6P6/gaDAYVNglT\nfXt7u8ciWsbNj3qzjXm4cPx95zvfKWl+gnswH1599dVaHo4ZaoauzREe8YyAzlYiJ5wwQZZ6geOA\nDLCwUBhNyPnK/ias6NAXIQ9q6uTWE7bMMvPFekGYjnBSlI3CTJ4eN+HtxvnIfOwcc4TX/PQeypnl\nHnHTNCqTZdMjfkjaTGZSECcOc4+UPw4C4sA0wTLzk1gXyfqkb2V4bUjW2Y5PhhAqyShXxOJp2hFj\nzcqSBbQ4gyDeycnmpi5hjrW1tXr8FTffjMXBiUs8OmNsZAulBztlmeKirBcvXkyxYSoCbA+H3JZR\nBkNWV1d7kAgVCY6PeBahsWibOOOU/hCnSPJzlCOrF+dHFrTBoJ/4nmd2Zuln2c8cUx7EFM/kguWL\nctQznuVjbjKZLBycEPe4csczJtkvoUDt7u7WoJ0bN27U66RmRh0uXLjQOxXd/WHcGBlYR8Ulrm1t\nbaXwrtNg4zPng8OFHJ+eNsKlwSNNmjRpco7kzDRtJ8hTA6qFsYCH+K1DBzSHqL1mR4Q5sZ8kfmmu\nnWeMjiypkR9q6wED1GiXOR+pxdLKcA2KDizu8oRcqC26U5R18B3fd3F3vrhG4YmbMosi4766BpVd\ni2dduHBhAZpyp1TGdsicqpkTMeOyx3PjGr/3g5LpiGJ7UDuOuh0cHPTCkz34i5YT76eGtSx+wWEw\nQlvD4VDPPPOMpD6bIWMf0SKKsuzv71criqwrsjc47zjm+I64RuvN29MPAmAd3dpwaDUsHv4NPvb+\n/n6FXdxZL/X5+hwz5NgTDg0ocGVlZaEfuTaMRqOFALjhcFjLQuuKVgzhIqIDWbbO2rZLv2nSpEmT\nJt9yciaaduwc1CicK5w5hqRFrEfqp2Kk4yzDe1dWVnpadUQt8Wgflss1QmrH7njw3ZBlZXkzKhmd\nIKQSEjtehvP6tUxTdxyS//ffsg0yjJbcVY9Oc/ETYMjlXVbWeK9bAN7emQOVfZBFPNJ6Y5/6CS30\nVzCsn/Viil62vY+Z0WhUNazhcFjHWmCpHsZO7TPD4jMndVyjU2swGNTx/ba3va22/SuvvFLfyzoE\nLk+u8DKKpkcl08JlZCy194yjzEjnmJebm5tptCk13ixBV+TAfu2116qmTQcp60DHHt/l5aaPzNeR\ncHTHe6m107lNHxvHSRa/QAuAdMjTKH9nlk+bAzVzXIzH455HNQYcnYr+TP/MHNiZ86/rHh0ZxU2B\ncIBvHjShaM7TdKdnOu7PEqQ7lJNxzTlIspPGo9zkgTtjw69xweEikIWW04nGumeOkYyrzjLSqcMJ\nnk0qtvuy/Cp8b/wu21hoTmfQAicm0wKQ2eMQC4PD2I+EGfj+cIbxpHHCaFwsPDCKbeCQifcDHXdr\na2t1IT48POyd4i7Necsx/rlQ8r0xh+J5UR9XBhggUkpZYEOwvh5YF8/k5s6x6PNmd3e33v/gwYPa\ntsGOOTg4qHUknJSxxZj/nAodxz/7k8qGB89Q8VpZWelt+nGNCzE3jgyqyxgwmTR4pEmTJk3OkZwZ\nPOKhrMuob9Iihc3NQzcdYrdkjlqeLsFyhFDjzSh9WYIacrPpSMzqkGnP1EgIu1BboraWwUlsN3fu\nUMg3pXbm1Ddvm0y7dq57FoVIk5JtRPOPZfM6ZhpH5sxlWakVuTPMrQm2Pd+RabFsW9JBM8uG8Aa1\nyajv7u5uj0IWz6QVkVkxLPdpmnbXdb0Tw6NcFy9erJp2aIiHh4dV0z48POylZJAWj48jx9md03Re\nZ+/n2Dg+Pj71pBZqoexDUhVDu97b29Nrr71W6xD1y6Av79MoH61Szw0/m816bcC2d1qtQ68OcW5s\nbPS0bpbBI4lns0fH2S2zMEOeeNEupaxI+qSkV7qu+4lSylVJvyLpOUlfkfShrut2snudcZARx7mI\nSY86j8cWZTjfcDhcmPjMKeCYd/yWncUFxc0Svt/rhLaR1F9cl/EsM7yM7+WCmOFhHDgc6B54sgyO\noslG0zHKQpyOgzDjdMd3FH+vwwxc0Dgp1tfXF1KJstzeHlH+jNvq0FD8NjNJuWF7gAjbgKlynQHj\nm3OUR+ovjsHMIAvJ/QmOdXqMgwt52txYLly4UHnhwYDY3d2tC/h0Ol3Y1D2jZbx3NBqlsEy8N4Mv\nXUFyDvPKSj/FKmGKeF4syoeHh7XcDx48qPWJ77lxcXxl7BGO5Y2NjYXNgmHynn6DeUiijllqASoq\n0YaEkxicRcabs+CWydcDj/x1SZ/F/39B0m90Xfe9kn5L0i9+Hc9q0qRJkybfgDyRpl1KeVbSj0v6\nHyX9jYeXPyjpRx5+/rCkf635Qp4KzVfn/Ur9XZF5lOP3/C21KmoB8f3h4WEKWVADC+2HB/dSq+Gh\nvA4vSHnGQOcKu0XB32fOsihDXGO9l/Gc/b6Mb02NN+Mr83tCNSwLtW4yLrxcHm1Ix6zUdzTxHTS3\nM4cMTXdqxPTeM4+xH5xBrSi+YxvRqZk5F9kHNHu97UKo6YdGSCcgHZGZhZCNCUpmjZBxNJvNqmYf\nR5Pt7u7WugfbgkIGBPvRnfkhWU77eD7HPudVMGim02lvTFADj2cwT3gkfrp161Ztx7iffb+xsZFG\nNod4jEeUMxy4KysrvbUhY3FkLKPMwe7QGfvWLWPex9QdmTyppv33JP33kminXe+67ubDQtyQ9MwT\nPqtJkyZNmnyD8lhNu5Tyn0i62XXdH5RS/twpP10KvIUGkB2llVFtuCsRoGdaVO6WrmVSGyBeRk0m\n3rW1tdXTmlzDOjk56fGRT8sPQI2BmuEyZxbpQJ66cmNjo4fxehswoY+0qOG4Rp6ljCWWmSWMIu5P\n7Zc8V7ZzfJ9R19h3GZecfePOo7iPTrC4h5TR7D72J9vQT0pxnwv7Jv4+ztoJoTVC7Dg0x83NzfRo\nMlpXbL8Mw+fYYx2ZSyXqFseRXbt2rYcHR//Gs0KDlebzglaIW0FsL1pUdLayP9wnQz42y7C3t1fH\nclD6Hjx4UDVtOrdZhtDgSR2mtUDsmuWK9mDcBq0zOk7daUm6MNGDrN5O8zwtgpR5bjJ5EnjkhyT9\nRCnlxyUNJV0spfwTSTdKKde7rrtZSnmrpFvLHvCxj32sHjH23HPP6YUXXkjzacfiSnMqM83oVCLT\nZFliKf6fxzHxr9QPVOAgy5IhsVw0h8jycOeibzYhXLBCyGQhrzjEif8+kOkU5fuyXL5+oIIHN5Bz\n7m3PZEP+Li4yy5yLIaurqwsLoLeLtxEXOeczZwssF5yMhcH29L5bX1/vLQaZozAbU9KjTSaSGg0G\ng+o484AWryM3gPjNaRLlXV1dXVAEtre3e4ySmAvczMhf9/v5fG58dGBm84YbE98Vz2JAzGg0qtkQ\nY5O7detWuhAyDQTnLZVDJqiS5u3NPuUYD+H4J3/ck0udnJzUNSsL3uE44gbCsUp4+OjoqDJhTuvn\nxy7aXdf9kqRfevjiH5H033Vd95+VUv6OpJ+R9MuSflrSR5Y94wMf+EDvNJomTZo0adKX7e1tbW9v\nV//O7du309+9Hp7235b0q6WUn5X0VUkfWvbD0LJ5KozvNK7JhCzTZLizuWbjWmZoFDRL6QQ5jV5Y\nSulRmmiOO0+Vv3UHptTXTOnco/YQQsdYKWVB82OIN+ubORfdcnHxyDFPjUkHl4eIR9u6JeDvohZL\npxO/Z59Ii4nDHAZz2iTr42OGTiFCDv7OuObc/+Pj4x6dk/3sUIpDGv6ug4OD+nxGX9JCYBsuc1TH\nX47VjFJKTTvC3C9dulRpdAENkG5Hq9O1RLaz1O9Tjklqx1EvUvuoXce77t69Wy2SO3fu1D5gXZwX\nz3ZxC5bQZNyTWVQcW7QqmZoghHMtS22QJbryQ4Qz9IBtexrt7+tatLuu+21Jv/3w8z1JH/h67qdZ\nw+xY0iITIsNmucBzYXGcmjg4G4KYXYjn+iXOG2XhQM4S8PP9DLhheVk3LyOxdJaLg8A7mQOO5ifN\nOS7wLGsGM/C5vsCT+8ocMtyIMy58lIPl5gbEySz1k9pLc5wyw1CzTZRl8HaK7zL4gos6r3su6mWL\na8YFdrgpJDa4vb29yg7gZsAFiYsI2zuDdTI4yjMJSn14ZHd3twasxBxkEAyhPm4cma8py//iSouP\nz83NzR5OHZ9v375d52lg2lToqDRkWD6hqywX+3A47I0D5+Zvb2/3Uhtw/DrOzNiCUsqCQsg56qk1\nvG+opLG8mbQw9iZNmjQ5R3Jmp7HTjJxOH2Vny2AOj9rzo7a4UznTROo7HqmJUXul44IaJTnd8T3v\nCSYJy0ANLWNJEA7INEeyO/x0aC/3MsjDLQuWj2Zcpmk7POJtQA2DZaWFkLEaqD1Qm+TnZU7JqDet\nicyJS+2c8FuWrCtL+ETLiuPGYwOWcd0zrrxz4T28mZrl5uZm7wSXDB5xZ65/n1kA1D6pcUamuqef\nfrrCEPH36OhoaZoGQmLxriwfPC29jNMdwujMw8PDmjnvwYMHPU1X6jsqGUnJ9mamQ4q3GfnhmQX8\n4MGDXr0zDd2JDyFZVPMyR3kI243rkJMFenVa+k2TJk2aNPmWkzNLzcqdmadPkCJHjIynOIRwJ8ry\nQ8QOR2yOGhZ5ldSwMpoeqWy8RjzNtUt+T+4qaVLkGvO+jFnjjjdJPY3aHYZR3vg+y13iDk9vz+xd\nrJfnOfG2p1VA7ZVYrScfijKGZFzjjAbl91Dz8b6hxcb8E9SOqD27U5xtuCymgFYdky251k5tb39/\nvzf+3Kr0CFXHll2TdAuO5ZpOpxXD3draqpGSPJCY/hs6W91yWVl5lCyJ502y3UPrZRRiaNH37t2r\na8De3l51Oh4eHi5g5R534WOZODbnxXg8XtDwqak7MSDemeHjrGOW1Io5/mPtOTg46FmiWRmz9Lf0\nnWVyJot2LAqstE9mmmDBNonrmam6LPzTr3loMBdVqc+T5YLECUxnAct1Wma+zGnEwev3OcOAAR4Z\nT5uQCB2RXBAzOIqbCZ2qHFC+INDM43PZDxmXl2Vc5sTLNm3ew7Jmp6XzvRmXN1v0sxQE3h7OcCEU\nRAfsaDTqKQjxvScXojBh0N7eXl3cNjY20sMEQpyVEu/KHH58RsYouXTpUp0v169flzRfMHlUVwTl\n7O3tnRocQz4zg4aoLBEKkebwSLz//v37vQN4fS5I6sGSmVOU850bnm+obK+tra2UX86EZqexwTY2\nNnr96PnL3dGZMbc432NDY1xEJg0eadKkSZNzJGcGj3RdHg3ocIW0CPC704ifsyOaaBKR40mhIzI0\nJXIt6XyklstdnhpMvNfNSKmf3pNRZrQ8HJ6gNZJRhFyrd6iEVEZqaGwLOoGzJFAZvZHmeAZdUfvO\nzGp3dGbaEjXuzPnGurDtSQtz5x8/sz0zDY2OSmpqtIxCw6Ip61py1CuL9Aw5Pj7uhbdHn9DJF+LO\n0igrLYxsjjACkNS2+Bxa7s7OTq+/yFX3OcSxTNpi3H98fNyzNqK9wuF469at6oxlP3Mss78yi8qd\n3nGNlrFr0rSsmU4j3k+rgXAS3x/jjOvU6upqDYnnddYrg05ZlgyazOTMFm0KsTsyETjZM8ghm8D0\n8GbwCgdqxg7ggklucwYzkKxPXNQDM6S5OefskVL6hyBwUXR+rh9v5eYyB3fXLQamEL5xZobDEM6m\n8PZyHjfbxnH7DDZivWie+mbD6/78DAtn8IIHJvgBEYRXPG+L1J80GauF/g4ulJkJ7qkCHIphvcfj\nccWUB4NBXSTI+WV8QwaPsJ2zAB8uHISYIijs8uXLkuYwSWwgBwcHtVwc64Ry2DY8CCHaIp41mUwq\nfh0L23g8ru1JVhZD4glpZIE+UT8eqOD15VGAUh+bHg6HvfUn6kp4hX3nUBfLwngOloNtlM3XLJ82\n2zmTBo80adKkyTmSM9G0pUVHjmfMcrMmdunM+UIhKyCeSUeTazlulvAaNZZl2gu169gNqbXTgsi0\n50xDyt7FXZzaHJNq8Xvnn1NLde+7s1KoXWSajB/JltWB97C/HEJy/u4y+CKeRW3LnZ6EObw+Do+w\nXqxDiEcTugZGDrNzswkJxPcZM4htwPfHdTolo/zMvZ21t8MhGW98WXtGHSO0/emnn67JmkajUQ8y\nC7jGQ7W9viGERPb29qrW/rWvfa0+P0slwRPSsyyXPHiY7yKER+sti18IGY/H9XSf4KrToqJTdHNz\ns3dweDyfFl1meVM7J0zl12j9zWazU7P8NU27SZMmTc6RnBnlT8rxNmplWfRQpjWRfpNpRZ4Xgc9y\nLZP4t/Oso1ykqDHhTVaHLL8zubfcZTM6Wvxl8qplmmnGNV9mFVAjdU4trSA6ZllvWg1sI6fGMTrt\nwoULVWNg/YhDZ9Fh/C01MC83P7PtmSsls86IKWeWjdc92pD3L7OOQjIHe4i/J+7f39+vbU+KJrVa\n9+9w3tDiou+AYz6jJYamPR6PK/1vMpnU021Yd45fWppRxnBq7u3tVWv59u3bNc9JZpH558wHRX+V\n18spqdSOQytmWdl24QyNa+PxOE3XSj4940GWxUXE89n/GYU3xmnXdT2qYXaObsiZnsaeNTzB+8yE\nznJCe9avuM6Kulnk78iEHcYgA3YCPf1uLrvJ6JsVFx4uhMsmZTaoaVJmbcT6MziBEzsLi+aC6Gwd\nBrksO+iYkgUQhdDk5GaxLMCH9clYQFFHBoasrKwsTDxu/l6G+MsAEP9+GZOAv+Fiwjb2/OTO1Wc7\nx6IXR4Vx0eYm6U5bFy4Yy7jbni97a2urhrlfvHixwhtsD/Ytr0U7x+J8dHRUF8Q7d+4s5LHnJuuK\nkW9Ms1k/a6czqegUZTtTIaPznQ5aZ1113aPEUByHVA5DCEGORqOFYCAqOE5ycGWHZVzWpyENHmnS\npEmTcyRn5oh0SpZzW/1g10wryfjQWW5uapnUqtxxFd9TA8tCaDO+sztP4n5aC645DgaDXnrYTJMO\n8RBuOgXjmTx5xM1qUiEJSayvr/fyJ2fljuuEGJbBCA4zOAzjWigdPVH2+N7bgM/iIar8m3FiOb6W\nQUtZKD01z9NgiPX19V5SIyZ88mcRhuDpKTweLspIx1Zo3Dyo1i25KBfHf6bBhVCjdY68NI+SpFM9\nynLnzp3eocfSYkpcP/Hp/v37KWWQ97BchDvjuyyMnu1AOIHzkrRbP22G1h3HCbV6vp+WnMNghNkI\nw7JtaNlkKYXJ+Q7aJGmemZxplj9OBD9nkIOIJnyGzTEIZhmjhIsFB7DDH74IubBjGXBAEj4X3ygP\n8VpyRDk4s/wSLNdpQSrM8cHryzaoLECJ9Sbu7tDCYDDoLVLZAOd3bv77X5qJGRMlO2CAfZqlPlhZ\nWel59bOcExlXnH6BrCzZtczspjiP2/OYcDE4Pj6ui/5oNFpYGBjTQBM7g7h4X8ZO4e/JJGEgT8Ay\nly9frmHsBwcHCwsSIbfJZFL515FD5N69e/UaITcygFhW9qPDfj7GfENlHZhbZDQa1YUwWzCJU2cw\nLMsiLeZC4ZhhGbgGcGOMfu66rseOi2tUQE5bkxo80qRJkybnSM5E0w5YgSwP37WWaVWZNznTYqU8\n1JRarHNa4/ncGT2Mnc5JOhJHo1GPSRJ/M09+9nyWl1ogy5rt6MueFZJpBnT8ZlGEPFqKsI9bJf58\nWkTuDfffZ4wPN3kdYmEdM+chWQMsA7V5OrsIy7hlQjiL1kLGDKFQo2cMADUo1zKXcXpLKb2kQfFb\nHgLszm+Ob4qPa68Dx07m0HvqqadqGUajUeVXE6aLkPT9/f3qdIxrR0dHPa5xjKXQNhn67tz/OFmd\nEFe8d2NjYwEOZfSgO7Qd/uD4LaUsaMd0tJOfTlgz+pyHBK+urvbWjCgrrXRq0g55EXIj/JvJmVH+\nnJbmIdacUKurq73J7xVkA/FZ2QTlQGVGtmzBZNBDHAf14MGD3oLHBOxupjm1LsPHuUhkWHoWdn1y\nctIbCCG+gPIaNw0uiFkGO5r7lBhEw+GwFwzFRd0XUi76UTf+dajncZ7yzBPPOrIPuOn7ZCVOyM3G\nGQFRRodiiJ8Tssj8ET4OMiyez8+ohlQOYsHY2tpayHToMMNplL/MbxDlDSFUEkySa9euVYw9II+9\nvb36eTQaVXogM9xFePz9+/d7LIt4fozpS5cu1TIeHBws+COIc1OJ4+KdwanZ/ODYyCBO+nxWVlZ6\nxyG6QuebiQe4ue8mS3eRZSvlmpZJg0eaNGnS5BzJmWjaoclRE6ZDTeo75uhkownMBDNhMmY7L0PI\nPdGPa7/UwCaTSdWwl3EtqbU4e4S743A4rDt2/KXG4KYqoZB4Vhamnt2TaaHLeLSnOQnjs/PeaTJS\ns2NipIwXT+2C5Yrfbmxs9IKcMn5u9qxMy2S5qe0/LmE92zXToJY5hDLHalYW/61/75+9XMfHxxV6\nGAwGvTkkLYZEZw69kMyZy3IR/pMeBd1cu3atMkF4NFlAIWSKLMtlnQWehKbtyboyyI2EBYd9lvUX\n51BozKyjWznR3lxH2M50JMb7yf92q5JrE2Gf3d3dBfiu67q6ph0fHy8cuUZpmnaTJk2anCM5E007\nnDw8pcF3/K7rUscDw6rJc+Vu5toeNfmu63oONdfmPCEVuc/xPXdLlsExLD6XjgvmGiYuRm2HNDc+\nJ97r2JuHJGdtQO0jw/kyjJfaLfMhZwmKWE5aM+w7x4xL6UfC0XLxMlJT8WjSaFdq+OwPUvmkRa3e\no0g9JJmOxBCOE1pMGXfZKYNSn97l/RXXSVeLeod2u7Ozs0BR41xyPrPLMg2fY41c9ej/K1eu1KPJ\nXn31VUlzX0/g2FE+Pmttba3OJXeqR7nZBvFbOhqZSpfimjbTwK6urqbOzqy/aCXx2fSd0SEcfRLt\nsrLy6Mi12WxWy8AxHZb7/v5+jRZ1n1z8llz306KOn2jRLqVclvQPJf0HkmaSflbS5yX9iqTnJH1F\n0oe6rtvJ7o9FmJMmJDujj5VhGHDGRMjy73rS9iyogiHPdIz5cWN04pFTy0bmRIp3uUc77uGAo2l1\nWi6MDP7gUUcZE8BDZR0W4m99YfKjsrixueMrc/iSlcIgkviO93CgZmwctq3nMSFPnOOEk4L1Yr19\nEyJk4jCY1A9TznK9SI/GNbMijsfjNKaAmySZQwwYiXJHWXd3d+u4ClN6MBikvHfK41hGIbyXm+TG\nxkbNhheLEPNtc5EhsYBjMb4PyIUc6clkUqGDyWTSg02keUg/N6aQWOg5L6koTCaThbzVg8GglnE4\nHPb615/vRxE6xEOnZTZHV1dX63PZz3QYZ0E0VDQzeVJ45O9L+j+7rnuvpB+Q9KKkX5D0G13Xfa+k\n35L0i0/4rCZNmjRp8g3KYzXtUsolSX+m67qfkaSu604k7ZRSPijpRx7+7MOS/rXmC3kqbu67KUwt\ngZAItS1q3JmpSS01ozll2gW1hIy+xWOTGHW3srKyYKrG+6KspNlJfT60m2EhWeg5d/l412g06mlw\nrokwdHjZzp5xTB8Xcux0TM/ASO2Xdcj6hlo7aXLUZKg1OeXKhZZctDkhD44vhy+8XF5uhx04pnx8\nMa3A2tragoXpNNMYX6PRaMFxy/F5dHRUaXah8YaGGpJZdRmktkyyKMOtra0Fq+zw8LDn5PW0ALTC\naK3SmmEfBbRAbjOhIlphbjWWUnpWJ5/rsB+jqY+OjnoQTvyNdx0eHvb6KSS0Z4+M5XyKayQ2MDoz\n5lDmVM8sRcqTwCPfLelOKeV/1VzL/qSk/0bS9a7rbj4s0I1SyjPLHhAVYmPThI6KcKJkiwAXHqZJ\ndLxsZaWfmjPLW5BhVX4+nLSIp5Ep4pxZcq/X19d7EEy8kxsEFzIf1CwDP2cYKRdVZlPjIsTF1XNl\nLMOGswnOxZVYOMtK77uX3yEwlsvb2jcPx2Dpo+DGxMmU4f7ZQusso2hHbsyZiU44iSyk7DAKLmhs\ne3KXva6EpmazWcVFYwEgd5sLdcY48k0yW9QzfnnXPTq0JHDsO3fu9HLY8LiweD/HbHwmXMH2Irzh\nMCjzr2TQKscpN4soB+tI+ISbJ+tKxYprzrPPPitpfr5ltCcVM1+IuY55jpmQGBOXLl3qpWl9vcE1\na5L+pKS/2nXdJ0spf09zjdrV1qVREh/72MfqwvTOd75T7373u5/gtU2aNGnynSN7e3va29v7piza\nr0h6ueu6Tz78/7/QfNG+WUq53nXdzVLKWyXdWvaAP/tn/2wPEqEzqhYEmgx3QE80FZJppjQtGSVJ\nTdghC2p7GVOFGgOdXdROCd9Qe84808vYEoQEpH5mP2qB2aEAfFemqTv7xJ0fh4eHqQUQ4kwVikce\nOsc+JOPU0sKgczAkS5DEMjD0l1oixxKvEV5zDYvlXsZk4bWM9cQxtyxqTlo87ox1pHUUkqVnuHnz\npqS5s+zq1av1OZmmnUUiZwwFZ1NQU/YEXG4ZO6xTSulBRHyH1IcdV1cfnWS+vr5eIZ9sTBJuIkyX\nxc5FE2UAACAASURBVGXwsOG4//DwMI2VYJ05vgh1vPbaa/Udfn9mTTj7hBaoO7KPjo509epVXb16\ntVqKwdRxeawj8iEE8nIp5YWHl35M0mckfVTSzzy89tOSPvK4ZzVp0qRJk9cnT8rT/muS/mkpZV3S\nlyX9FUmrkn61lPKzkr4q6UPLbg6ciCq/cy3dmZZFohHP4o7O/AB8djw/fktHz7IoSc/x4Rhy5pig\nxkHc1DVO4ojUqlh24sVZgqxM2yN2xmts70yjzd5FPDjDgJ3X7ri6R9U5duyO0AyjD6Fzh9oetfYs\n5zhzKsfzydnNND/ivfSjkOa3LGeKa2yuqbMM/n72LduZmhhzOjtWPxqN0mi/aF8KtW9vey/rdDqt\nc+HOnTv65Cfnhvbv/u7v1rLQf0ONMsrHue3Hv5HSNpvNakpYH9fxfFpEpMp6XUho4Ik7WYQh582y\n/CtxfXNzs+Zf4buYatatDUm9cUj0gLTDeH98v7m5uZB4jfJEi3bXdf9O0n+YfPWBJ7xfw+EwXSh9\nEY3vfeGN69LiIQnOaWSgBDvROZjxbDZQmGZ0khCyyLjNmUPP4R7WLa5li0Q22bkA01yjWe6bINkt\ndIA+jjlB5yIXWjqCYoJl51iurj46nMHN7XhXxozgosmJz2CMLPApg9TY9vw+O36Ni+Qyhgt/50IT\nOOu7eDaftUxB8fuijZgJzp1Z9+7dq2OZJ5lnicV8M3UlhwFwo9GoBs28/PLL+uxnPyvpUb5swiXc\nXLOYAcIBZIQwpoGbuYfq+xz2vuE7OQfZThwHGYSzzNEfv/VQe29bj1+IsmQKBt/LectYhgy6CWlh\n7E2aNGlyjuTMDvYdjUa9ncg1He5U8Zu4NzOxeR/fI/UdCNTgaWZlfGs6HeP+LH/vsjpk5eezeI/X\nJ4M/+CzX9FhWpnF1Xmrcn52CQ60tC0Mn35VULZ4Q41YMw/ddS5T6SabiGXG/O0D5Xr7L6+rtSSpg\n5pycTCZVO3WaVjzHtbjHpRWgEIbI6IV8HjXDjHJKbS9LeSupRiaurq72Igu9vailsp85l6I9jo6O\ndPv2bUnSpz/9aX3uc5/rvZfOflqrbC/OK7ckB4PBwulVUW72WTyfMIdr2owK5elQ7Ac68Dn+vR+9\njdgfnmJgGURJOIsWKsPy3ZnZdV0venKZZSed4RmRNEsyPJCYpXMlfQFmKCpDTZdhqTRrHC9bXV3t\nmS1ZGDFNeJowjueyYxwGiDaglztb7Jct5BlkwYmdsTi4gGf4NBemjPWShYWzPU5OTiq/lrlT6CWP\n52b5uGnOU+Ke0WjU6zs3/bnZsD7xHgrhJAZJhbANoh38b+Z/yWA8b8+MicIxm7Fp/P1RJ/eTrKys\n9JgXMf6uX7++AE05Y8Q3g/F4XDfdW7du6dOf/rQk6eMf/3iFSgi5RRuvrKwsnIPJNqDPhPk7Mmhs\nMBj05pDUh+HI7ya8w1iL+DwajdKNyRf9KE88M+qwtbXVO7gl2obHw2WxI5mPgmkSCLMyjQOzmDq7\njtLgkSZNmjQ5R3ImmnbXdb182XFN6u/MvEaHXrbrMLzZTXQ6Pmh20wFKs4fX3OHnTjp+7zAC68Vn\nLWO1EM7J6khrwLVfWiDUXjPNlfXOohidS+wsB7YxtVE6uzLeMsP2GflKDStz7Gb8XLZxluyLFgAt\nNfbjsmjUeFYWOkytKeMlk1VCq4Dmtodlu5WUWRNZVB7nCK2o+Ly7u1vb+dq1awsMLM4FapTxdzwe\n19zdX/7yl/WJT3xC0jwK0hOh0XLhOCDzIoRjhlx2zn1aT24J8rDpjY2NBSbKaDTqRWRy7pNZE+/K\nTq7hOAqtm/14cvLodPoQjt+MeEA4djAY9NrEj1tkaPvjTmNvmnaTJk2anCM5E007+KUZhkXthVoA\ncScH5Z0e6LiVY4PUErkLxvfU6l3bIs2KWPpsNqvJYDwXRVw7zZnA35IbTU05ywdCJ0f2WzoMl50/\n5+8nxkoNnho5rQVqJZ4Tgv3hBwpHuTgOXHNkWUmny9rS60JrxDFhUuuW4cwZxYyaFv0ZtEzc+Uuq\nV2ZdOqWVuGfG5Y7PxHbZB+yvSLx0//79npYXz4lr4/F4Icr2zp07ldr3b/7Nv9GXvvSl+l7XDKkd\n01LLrE63rqKspNKGZFYjn390dFQ1Yb6HaXs5pnz8cI5nVinHCXFo9hlTMrM93crY2trqlYX1Da06\n/BHkn5+GZ0tnCI+w4SlZuC4XAzIfQmiSkv+YmYw8yJYLDp0cNFsdhpDUOwYo3rW1tVXfQS94lmDI\nvcrxfi4CrJu0GFzjG9MyniwnOCcYze6MEx737+/vL0xQDlhnjMQikR0NxUXXGQJ8f4hzvn1B8vYq\npZ/TPAtrzoI9MqeR88R9rLpywLZztgPLSEd3dlgG7+OY44LFMeELw8rKo+RodJbt7+9XPn2WGuHk\n5KQudHFs2Je+9CX9/u//viTp85///KmHzw4Gg97GGGXgXIs2IFmA7Z1lziObhk7LjHHEBTGe5WHq\nWY52KjDkVEt9B+ve3l4arMbNjnxrsoCib3hoSrZmhYOXcBPncyYNHmnSpEmTcyRndtwYnUOZFspr\nUt9BmTnZsgg/aurkiGbPonZEulC2W3pCoHhHCJ0cNAnppJAWoQU6y8Is5rPoPPQwYVooa2uPjnai\nk87DfeO3TsPzPN9OqaJ27s4wP+nH29gTcNFK4n1s36yOdKKxLI87TYYwRca5zrRr18qlvrZIp6XT\n6LxetDaiLQ4ODhaiJOO9bHOpzyumtp/xoXn/0dFRz8Eo9bXY8Xhcoxu/+MUvSpL+7b/9t/Xz4eFh\nL4rxcfCUQ2pSDmlxPIQlcHR01HOwe3Iq54Gzz+LdzKedRQJnlFemS2VYOed7Vq7MQZ+tTdT019bW\nahj8+vp6tW7CUmUZ6JzO5EwW7dFo1AtSce+61Oc40+SjCcPGyBYON/ulRe++L9o0/Zl5LOMtT6fT\n3onMzun2jIIuvphkJnIWXJPlcvZQfTfdnEucDcQQ3yx90rA9aQZm/HFCW+Sesj2zNqIPgO3ExckZ\nIfytsxE81/IyeCPjVrNszE+etQdhGY5T+hg8NoBMAkJ5HGtZ3pmMueMpHehPCGUkxiwPUbh7966+\n8IUvSJrzsKU5YySDaKTFbI7LQrwJxbCdIugnnrO1tdXL5UGYwTn0HP8cy5yXHkQXEtBmLOo8X7aU\n0jtPMupC/xHHgceD8D3cLOKZnN8bGxv1+sHBwQLWTqWCrJNMGjzSpEmTJudIzkTTDsYANTpP8jSd\nTpdqWNxl47dZmPoyRyedN/EMavXxrtFotADF8NBQloWOFHp96WH2OjpXmM4m31mpiWeZ+TxU3800\nRhMSqqGDMoS/ZZtS+8+8/jQ1abbz+d7ey1gv1ND5XRZNyjYmJLIsgtOfT+dhdvoJYQSG8nsfRBmz\n6EtaTG49sY3ocGZ9M0531I3C57slGdpcaNcOiUTE48svv1zfT5ZFlIWwIduAVo5zwsmEoSYc9+zs\n7FT2FetIpzgz/4WmnMFgnCu+xjg8R5lMJrVtGEfA8ZnBLlyHMniXp/CEhcET69nPtFSpmV+7dq1C\nVS5nsmhHIbMw9CwEnZAF06kyFDrDbon9MXcIWR7ODmG4d4Yj0QykJ5/n5tEEd1YHn8uFhwtHRv+i\niZ+llJUeDSgu6iw3n0kzzKlLg8GgZ65FGUk/o+nPCehmMfsxgyGIj3IisQ6cHFyQfGJ6u9G8dCjF\nF78s+IbPi0lMX4FPWr+PQRvZuCZ0kEFMxDK5CRMa8+cybYD7GGKBJvwXC8FnPvMZ3bhxQxT2x9ra\nozMcl1EkMzYEF7yAJrI5dunSpZQmurW11cPd428s8NPptJdFMr7PICauDTyf0ecln8UFlTlCuu5R\n0A3DzWNj4QbBDTfmKM/U5IZK31t8vnr1qt73vvfp937v95RJg0eaNGnS5BzJmQXXUIsYj8d158uI\n8ATiqSmH0GtLriTNTJr4NEEyB1RmyjqvNMpCTd3NeWrlmdbumiHL4jxrah9RTj7DHX5er67rehpF\nZobRdKTl4vAJoR6WlRZNFtTjLItotxAGOmSsA2qRGXTgbezMC39uxlShU4n8X2cRucPQx4lL9l7y\ncGnlZA52PpcWBtkh0lwzpRVExlI4+oKp8Nprr+mll16SNE8IRQ086spnBXTAfqIFQivLudM8QZ39\nyHFKgkCWloLXAl7IGGCcVxwTPu7jL61Oavgh5JxnztiQgEG8DNE36+vr1Rm8v7+fji8iAleuXJEk\nfd/3fZ+ee+45LZOmaTdp0qTJOZIz0bRj96QW4SdGSIs8XamvkWZ0o2Vh30x5SFrPaaeMsFzEJ+P7\no6OjquU5bzfKku36Wag+tSbSkBzriudn2gU1NM/1G++Iv/zMnNfxfOKD3jfEod1CYMQX2zU+u6OP\n2rdTtbJ+Po3mR02KyYEYUZY5tIlvUzun38C54hy/pBcSz80cY3xvtNHR0VEvDoCaH5MVxfMzq5DH\nVPH7uG93d1e7u7uSHh0C/PLLL9cc2RnOzPvZL5PJpGqi1MqjvZm/POo1HA574zu+j/oR46Wvib4v\navfR3vQHsA/pAyBtN/NdMAw+6hVWyWw2q23LccLxl60X7If47e7ubm0jphGm8zrKsr29rXe9612S\npGeffVbXrl3TMjmzMHbyYOlsyIIMnKftE4yLCCd+Bl10XVcHGlkQvJ+DNn5LxggHXMbZzvJEZB3O\nBY2Ll3ND4x4ulA4jOIzhphsHGXNW0GyPNmZCei7K7mCL7z00WFIvm9pp3GrfkDM4aRlvOcoR72dC\n/CzQg3Ugo4Nsh4wP7XWLcnvZ4/kZt57XnMHARZ/jLzPnSym9MUUIkO+R+gwXQgpxiviDBw/q4uQw\nQVwjl5xjzckAZBy5EzfKGsLgLDrPuQkSqiHTyJ/FsnCcEKrkepD1c0h2oALv97Uh7iXrhetMlCtg\npexM2/gcz41N4/nnn9fb3/52SXNHZLwjkwaPNGnSpMk5kjPTtI+OjuruMhqNFjJ8UZucTqdVc7t4\n8eJC9BCdINQCCRdkFDPm4g2hNsdoQNKVKMsi6KJ8jJJcRiGM51BLdCdblhktnuvPokMlc0RG3byM\nWcgz6XKk25GaxKPaHIagRjGdThdoUq6RUQvNOMi0yFyrdkdRlrSKVlp2H8vLvnWNlp+p8S6zFlhP\nP5DYNfZMa6am7do3241t3HWPUiM8ePCgHkMWjsigALoQlqJGffHiRUl9PnOmsdK6yrj9rDsd1wzx\nZvSmt5P3kfOhCSs5hfLy5cuSVKEictEz5zdjKwiJZc7njY2NXkj7zs6OpEfWBGMiaMFubW3V9SW0\n62effVbPPPOMpLkm75kMKWeyaHsSeHYCBz8Hb3ZaOk0VmpfRAB4QIam3SLsJHM9yorvUN+NCuBBn\nXHNioQ7R+PN9cLGMIdmGRhM7K3eGqXPwRJpcPst5w84I4WJAJoBDOPEsBtR4iDdhIy8rF7P4LWEd\nh9Jo/kp9szjjcvN+1iHKn+WYoSKQQV/0N3Chzw4DIOxEaIBt50dteZt4uTh+eYL6/fv36yIdizfv\nI2TGhYo+ET9ijPXhOOGYYLY9boweaEbOOY8Fc/8Kf+/vChkOh/X5TN1aSql1z3B/xiewbzKojZAG\nM32GMPSe/rQQZ4jFAv3Wt75V0py3HhvMcDhcqCOlwSNNmjRpco7kiTTtUsp/K+k/lzST9IeS/oqk\nC5J+RdJzkr4i6UNd1+1k98eOE04QahdZrl+pn8UvgwkyT77zYeNaxoem2c0d3fnjNKWPj497iW/c\nLHYecBaqSg0vcwBlTktqtNRo6JijV1+aaxxZJB21vHjXwcFBz/SL+maaOpkurDP7izCCsyGoyVDr\nyWAAskeizaPucX/GxskgD2p6Hu4t9WMDHFqKezLHMJ+V/TbqxjailkohlJdpfoSu+OzQjg8PD6tm\neePGjeqApAVKC9KTmjEKkqkPJFUOcRxHFu0g9UPLoyyEmAhpZLAPxxznIplcdIp7UjdGoDps6TAY\nNWI6U5l8jWsLj1fzctHiZx1pIcf7NzY26ju2trb09NNPS1JliWxvby+NP3B5rKZdSnm7pP9a0p/s\nuu77NV/o/5KkX5D0G13Xfa+k35L0i497VpMmTZo0eX3ypJj2qqQLpZSZpKGkVzVfpH/k4fcflvSv\nNV/IF4TUGqnP36X2xF0x44syJwDFaXyzWf+Ukwwfit8eHR31NBHX2j2iMmQymfQSw0hzLYGpW91p\nQ02ckV/LNO3MeUdqFK2G+ByayubmZs3Vu76+3nOGEUuU+oeWZomRXOunAzSzDMjZzVKkLktb6xZA\n1/WTPLmVRMoVedY8vo1aC8vi1hGtIJaPeS6yOmaccI45OuQyXNZ57x5/QPxd0gLmPZ0+OrrqwYMH\neuWVVyTNNW2fQ84pdwuB2iTLMhwOFyw5x7SdIklLM57nQguCKXCJb/s1tg0jF4ljM7lUtFd2CDUt\nPfp3Mn/BYDCo7+BfWgtOPWYbDwaD6th929vepqtXr0pSvXbp0qUFa3CZPHbR7rrutVLK35X0kqRD\nSb/edd1vlFKud1138+FvbpRSnln2jFJKL+EOJzsX6ugEJienCULPNAef84oJv9A5SJiAjj0uvhlv\nPDv6bHt7O3VccaHzyermLc18N8cJ5TzOBOchCJm3mmyHk5OTdFHm8wPGCqiIjJHV1dXeBOFgj7IQ\npvIUAzQ/OfE5qX3wh/hhAtxQWYbsXr4341uzjXzT97LSaUnnYYzfra2t3vf+Li4MXHDodOe8YHm5\naUtzSCQ2552dHd29e1fSHMZwOIlwE9kQhDboRM4WvGV52T1mYWdnp7ehexCLJyGLNlhfX6/1cUgl\nvvcYD0JEnkoiy+5HJcyVJXeIMxFW/CaSRDErKGFYBhVFGz711FO6fv26JOn69esVHiEfm+XONrla\npqXfPJRSyhVJH9Qcu96R9M9LKX9ZUmc/9f9X+exnP1sH6tNPP11pLk2aNGnSZC4vvviiXnzxxZRJ\nRXkSeOQDkr7cdd09SSql/EtJ/7Gkm6Ftl1LeKunWsgf84A/+oB48eJCG3hKO4DWaKEyqIvVTMdKE\nDqE5RccbD+alycnwZ6cNOqWLtC53cvBdpC5lGhY1cY+Wiu/ZHpk5n3HVeY0OVL7XtTVP1xq/DW7r\n2tpa7bvNzc1eyG9oCtTmaPby0ONoA5YxgxTYBrRWHBogR9rbPoMkMi2RdFRCTK7NnZyc1PbiIa1u\n9cXfrL055li+05yWHnXn5vze3l51Dr700kv1cwYnrKysVOhgbW2t9l1o59Soj4+Pq/NxZ2dngbJH\nTvdwOKx1X3aavJ+iQwuCjm6OZVod1PpD0yV3nDRVOi3dSuf4zFIosL3X1tZ6YfnxjmhDOt3j3Xzu\n1tZWLetb3vIWveUtb5E0dz5G23Nev/e979V73/veauX82q/9mjJ5kkX7JUn/USllIOlY0o9J+oSk\nfUk/I+mXJf20pI8se8A73vEOHR0d9TzLvuCtrPRPmuYi4qR2DkgPwZb6C4DnzfBwbs8Z4JiiL5js\nJGK3Un/xW8Z0ITxDiCabrBkuykU7W7CyABHnFXve34ODg3Rj4z0cqDHgxuNxNWVjcDrOnLF1MvYI\n24N9n8EE3BROY5/4e9kfvpA6k8U3XG7CznTyUH1uqNyos4Av5+A7ZMGyBMwYbS/NN9bILXLjxo26\nobLsDCHn4hhBN7Ehs7+2trZ63zuf2fHgeAfZQo6Rs0zT6bQ37wi/OXTKjY8waubTIfxXSqnc5xin\njK/I+OWEwZhfZWtrqz6D61SUhXM/oKALFy5UHvbTTz9dN0EG5XBtyRSnTJ4E0/54KeXXJH1K0uTh\n3/9F0kVJv1pK+VlJX5X0occ9q0mTJk2avD55IvZI13V/S9Lfssv3NIdOHis/8AM/oN3dXX3ta1+T\n1N+ZadYwUinjcGYnppDXmXnkufM6BBPvp6ns0YB0yMQzQvibeD9DXJ1zywx61B7oIM3yWrPc1OTJ\nLfU2oFbm3OuMIUOPu/cN+ax00tGxmlkW5NSSz0rzNtMoMkclNVLCDNTgyCrwMHRnN3h7sg2pubGP\nqa2HmZ+dpk3nuX+W+kmRJpNJ793MMBeS8ccjWvHg4KBq2t7PDKf2NqAFSS2XUb7UZB0CWllZSTX0\nuH97e7vCF4TBOCapWYbMZrPe3I+25lh1h/Th4WGt22Qy6WWe9OhEjm8SFuLv+vp6DwpitLVDfePx\nuD6fmTODe33p0iW97W1vkzTnuRNidOHaIfX55C4tIrJJkyZNzpGcSe6RF154Qffu3auOLdL/mJuC\nWFRG36MjKMMXeU92GgcdlHSMETd1pyYjD4kH09nEnZ0WgDs9uYtPp9MFLJS/pRAvC3GtKISaA9sl\ni77MHDLr6+vVMcV2ic/UoNbX16vWTMyTfGjPt02NhXXY3NxcoKjRIiNmvSwfCX0HGUWS93NMxLVM\nw19GDwxNl3lGQvh+WoXURt2HEe9lutx4J63D6JvIK3Lr1q2aqIhURGkx2RnrQMdrlH9zc7P2LR2s\nxJmpKbPc8aywPBizcHR0tEBh47z2vmHe8Xg+x6r7dyicox5fEEKKr/cD/W0sI9cktlHcd/ny5V5u\nbGnufIz5cfHixV7OEk87S7qnRx27nMmiffnyZb33ve/Vq6++Kkn68pe/3HNKujDbXtd1C789Pj6u\npgYl476ysWl60TMd37OxeHhnPJdsiMFgsOB0ZMPT2eAc7KgXM9z5IuKLtIfuuqPHHWueFCkmUyml\n17ZRr9jk9vf3FzIhzmb9ww6iLIQhsk02g3DcDOTG5yHn7v33TdAXbdadsEr8zZy4Id5eviH6whyS\ncc3pzKXzmYsUj4o7bYMgvHJ4eFgXjFu35mStV155pQd/ZGHgGbOC5eVY5+IWCw4T+Mc1Jh5zbrQ0\nH1N0TvocYDmZ9ZObVIwzLqSEMwmrkKdNp6SLp4SIz8yUSIIB+9bnKA9F2d7ero7GcH5evHhRly5d\nqnVgVkJXQAghjUajlk+7SZMmTb5d5Ew07c3NTV2/fl0/+IM/KGmuMYSmEGYmtZ/Nzc0e6O9aEfNi\n00lHeCVkMBj0dmZyVqV+7mWmdcxCqQmJeMRivJdmlkMxNHnoVHJal7ToJPFwcGpo1E4y7ccdSVl+\nZx795PnL2V77+/tVYxiNRjUMl/xcakqEZeL7kAzyiTKEsO08yRO1UGpYtELoaGQbU+uJ8mcc/gyK\n4Xu9PP5+On5Zd7Y929ktl+l0Wtv24OCgzpuXX35ZknoUP7YbNbdszNDqpMXHcRLaK++jgzc0yv39\n/Z41Gr+j6e8Qp/cRLROHLKj9TqfTajU6lVLq86n39/frc6O/Nzc3exZErD+0sjiHwqJ/6aWXFo5c\nI9d9fX29fg7tmnmxGSlMuCeEdeR8zORMFu3ZbJ7tLU4YPj4+1ic/+UlJjzAwnvhNPJiHJxDby3Cn\nEEIiPGl8WQBHForKgU5ogAPFPc+c2PyeDAR6hVmvLCezL+QUckw5AbNrDD1nnmNOVm42fnDB0dFR\nD9+mKRp4agxYLmasLycgcWy2YdY3XHQ9HwMHv8MuhBfifgZMeXizR6FleSQyBgIhiSwPC8cqJyUX\nzIwpEnJ0dFSx3bt379ZFm4v+MhaRzxvWj7z4+H44HNbnbm1t9WCb6HPmCAlcnaeSEyqkH8P7Y9lm\nxfMgQ5jF0u+T+orV4eFhT/FxhheVtNlsVsPJY/HmvF1dXa0+BId7or1igb58+XJVYGJxHw6HPRyb\nuWCcbeM+FW7GLg0eadKkSZNzJGeiaYf589RTT0mSvud7vqfunLET3rx5s8dAyMx5Rt25syzuk/ra\nnDt9qPHFPcs+S4vaCTVxN/m5W5KnTQ0vM9dpSlIry/jXdFZkWgvfH/f4KfRks8Rfh4Wk/sk1bEOe\naOJaHDWlnZ2dnjMpnkmOMq0r9/p7/b0NqGmzz8mM4DjIHMK0Rqj1ZBYXzfnsWDiG52dZ2vx3rFt8\n73EEk8mkd5p6aLdMvESuOPuUMIHU5y2TZUHtmBx812j5W0ZJElIgu4oEAnL3oyyEQwlP+PhkYiZa\nPIRcGEXJvg8rICzB6XTag3343hDyqOP6cDis94V2PRwOa7a+q1evLiSUcsucY8qTthEezpJcUZqm\n3aRJkybnSM5E05b62tq1a9f0vve9T9Ij58n+/n5NdjMYDHoYFilg8Sxif64xeOQXf+t5SqjlkuJD\nah9x5uwzcVVe8++ddpa1Teb8I+bN3Tqju/k7ow2Ix7rWw8jG1dXV1CEXWgZpjwcHB5XmFPePRqMe\nJh71yhIJsV6O6cW1jJObRbSRXkitOISaNNuYbZil+2U0bmYFETeNMtIBS6HvI6MtRjmkR76evb29\nimPfvXu31j3wU0biXbhwofYTz3rMInp52kv018bGRk9L5Ryks17q5/xZFt3I3CSeT4R+K9JIM78C\nLeTMiXfp0qXUorp06VLvVB9/Pn0bnH/UeDm3A6uOsl66dKlq8qurq/V7+gKySOJsbJBCSUsxkzNZ\ntGNCcqBHqOezzz4raZ5QJRqYJidhiLjfA2Y4cOOeMIdoTjnbICQL1AkhFDMYDHqQSNawdChm35MT\n7kl4WBaHPDix41rGlsmSRPmi4Kbs7u5uHXCz2awmCuLgpfc+rg8Gg16gjdTP2EYTPXOq8gDTrut6\nwVPRBpygbhYz6CgLF6dMp9Pe+IlneP7qKLcHkWSbdNSHTvH4nmyKbMPiBsMxF2M5HLw3btyokMje\n3t5Cud7ylrfUNtje3u4t+uFEC1l2bBcd0sGW8DnIkHK//+DgYGHR5YYrLSoTVJY8tUK0Uyy0TCiV\nEQ8YLzAej3uMINZHWuToexswodRsNusFOcUcYUKoYNCsrq4u5BT3d2Xc+3gXlR3m4c6kwSNNmjRp\nco7kTDTt0LgymCA07e///u+vO+utW7d6IZ2uQTGfNtNzMjQ9NEBqXXR8ZRF6NOd5aguFu6hTuiIX\n9QAADaZJREFU1KbTaZrwJ8S1+8wJwu8Zzp1pqrzPoy8ZXs2EUDSL+UxyakMYaUenUVxnqLJrySHu\nVOERZNT8HPKKspAfHO9gP7MNydf3JGNsr4yHzbajVRj3kO/PvqGWlznuSH3je6Ksg8GgWpiTyaSO\n29CSX3755fr5+Pi4F+krSc8880wty6uvvtrTaMPxH8+kxcSDjEmJpZMuo1NSA4zvnc4WbUHIgilM\no95ZSohSyoI1TIpl5jxk37BtMguWJ9DQoUwLJD4fHBxUTfrChQsLTk1GPbvVyL/xW843UhSjXWml\nvOnwSHQK+aSM2Zekd7/73dUs39vb63nPuVhL8wYkRus4seOXHAQOSTAghjzVjFGSQRQUToQsSICD\nc2VlpZd3xU1/Lgz0qGc4tAfHSPNBQkySE8DhEbZB13Vp7gc/NEDq509huTnxPaSYky7+H+JhwmRx\n+H3x/tgkmYmQEA79Fexbx7ypSHhwlreb580I4UKfMZnIHmFoefTz/v5+hUXiVPWDg4NeTueQuPbi\niy8u8OrjHf7erut6G4xnrSM+z4VlNBot5L9f5ocJ4bwhBMSTzJklkBklM98DNyPPHEm2DscJA3GY\nBiLa8fDwcCEoaDab1XJtbGz08rLE9fi7tbXVy63tbDFukqzjyclJ7T8u+nxvpsiFNHikSZMmTc6R\nnImmHdzcTBOOHeVtb3ub3v/+90uaaxyf+MQnJPW9zLErMqqQzgJCHr6zh2RME2qBNL2jfMtC1z0M\nmBrayclJz/yL8lNTyZyGvMdDwKV8F+ezQkajUc/ZxrZx08u1RSaXimvUIqmVBAc42AzULGluh0lJ\nTq07S7MkT+ynzJmVZRfMtExCLR4FG0KNNBtrmXbM8mb3L+PQU6snPBIMqsg9f+fOnR7sE31D52dc\n297ertozI4k5tuhUz6w7jm/OC7c2efwaoRS2CR2zDkPQUck4gc3Nzep4ZbsTEvG0EGxDQguMfqSz\nNdpgOBwuWFeEMTY2Nuq43d7e7jkg4/4QEhOYnzwjVBweHi4wdwgX0TLO5EwW7cCJnO5FGQ6H9Wie\nF154QV/+8pclzc3DqCDzFrDDnEJD73+2GGTvj3JmZmCGUXHRjoWDnUjTnmYiBz3NJYd4HDbwDYAD\ngp7+DKPlwsHzInmiN/NjxLt4hmQIqWQ04wJKmU6neuaZZ2pdHIPl4smNjewMQkCE1OjHiGvsGy83\n23OZT4WYPP0BnjqYWG7Xdb3+8YnPcZLRMQnf0FS+ceNGxa95vFsID40gNBZtPBqNem0U72Aif8IE\nGdyX+QWeeuqpHiMj3puxl+L+o6OjXpZBnyOuPJBF4VAg+46/zY61I/TEjSPaczgc9tol3kEljYty\nLNSDwaAqJqwX29BpuXxulDPa0FNJsNwMWsukwSNNmjRpco7kTDTtCEPNwrm5W4cW8F3f9V36nu/5\nHknzfMHhoHQnWPx1DScLIInvXYPhTslyhTB8mtoBvcyZxkznYqYFU6vJAmoI27AeGdeYpiwTQ4W4\ng8mz/HFnzxxyrAvziDMggHxXamvxfTicvX2zUHm2UXbga+ac9DiALCsinUJubVAry9gl1CypYS1z\nerIs3v+0fA4ODqrzkSerM/sc68D+j+cz42W0EZ3itLgIq8Rc4D0hZHHQCmHfhJCnvSwBEllA8X5a\ndzzIwRNwjcfjqqFnc4Ljk6wpwjLxTB71Ross7rlw4UKPjx2W2HA4rJ/JwKHz0ccBrWmGz29sbCwE\n6tBqzZhUlKZpN2nSpMk5kjMLY6f2m2m00qOd9erVq/pTf+pPSZo7KL/4xS9Kkl577TVJfQyWO3Ps\ncExGc3x83ItUit2ZOzeTT3nkF3d5Os4YlsodNuOIhrjWTivjtARHGd2N2gU1R+KA5I9Tg3fnDLFS\n1pGaUuB8V69erbzfo6OjBcx3e3u7px2HZhf3kCZFbI+aclZW53RHueggi/eynTMfAS0LvpM48Wl0\nNlIseTJN5qTmqS3swyjr8fFxz/lIWmu0MS0j72emeaBzmuOLfGZG4pFuFkKLLotGjb/j8bgX3UtN\nN8Qddd4GDI3nAbwMtY+/tAo9ZJ6HLtP/c+nSpWqxMDEZx1SUMa4NBoMeDzvuIw+b91OT9jnM+Ar6\nb8jvpnZNf9VpmvaZLNrBa+UiFBXMOnMwGOi7v/u7Jc2DB97+9rdLmicil+aQSXiYb9++veBMoNle\nSunxjR0+oNeWphWZClz8okMz5wsJ8lxQOJE42djhPim4KDOXbwbFcIHmok2n52kbJt+VQQPM3Pfy\nyy/XNrpy5Uo18+K9d+7c6eWUiEnDZPAh29vbPUZGvINn+NEcdtYLIQuyS7gAk8HA+vrmSt472Qgc\nG2RAEE5yqI0cZy5i7KMYf/fu3avOx5s3b9bNjewBwmxeH57mTmf/yclJ7RvPcR7lcibV8fFxrTfP\nO51Op7256/XhospxFG3Lgwm4oFFR4FxxR6QfnBH3xcZGOIHzjqHhWfg825mZ++K5pTw6LIOsFMKS\nrJfHZXBME/6gUsk5nDGdMjkTeOTjH//4Wbzm21I+//nPv9lFOLcSFlqTr1/iEO4mX7984QtfeEOf\nfyaa9u/93u9VuEPq71o0HbmD8fSHcGK95S1vkSS94x3v0Je+9CVJc9MqBlgWjk7NktpHiEd7ecTk\n+vp6z/nDndMj+EgPzHjaNPcZIUUNklpq13X60pe+pPe85z0LWibhBNceTpNSygKcFM+I9zpMQToe\nTdG1tbWFrHKz2azmf37qqaeqBsOITXK26UB1xxnrRcdWFkVJR2LwqP/4j/9Y73nPexZ+y/ERzxqN\nRr0UB34aDJNXeXZB14qo+WWc78lk0svi98orr9TP/tu1tbWU4kiLKEu5wDpEe4fVE+IZL0t5lGjt\nzp07NVf0ZDLpHTEX7cb+Yni6l5VUWvYnIRGnWHq94l1MSEaNlg5Bxje4A5SJnaS+QzfaKn67sbHR\nS0Hg1F+PJI7rn/vc5/Sud72rB8PxWbSCafmT2HDafD6zMHapP8G8ATxfBKEB8kWleSBHcCb5/Aj9\nJXZ9eHjYW3A83Htzc7PCJ/T0E9Mk7sRyx7M8JWPcn0EOWY4EwiMMHY7viZty0HPAOO/TeceZEK/l\nZ8/bwUWKmOLdu3fr4hITjMEe5BBzg4pNdmtrawGjZVk4KQgnLbvH8UEumITnCG847BSSMX+oFGTl\nCuEC7/lmpLmicfv2bUlzqI/YrMOGPKmcz8hYRhzfFy9erP2QBTDRn8Bne72jDjFHOF/j82QyWYDJ\nOE6ysH9u2K5kObREuIApYQkRMRsfF11nVTHd6qVLlxYCZQaDQe84Ma5ZGQ+fm7rHi0ynjw5hGAwG\nvc2C7RBtxLHypsMjTZo0adLkmyMl845/U19Qyhv7giZNmjT5NpWu6xZoJG/4ot2kSZMmTb550uCR\nJk2aNDlH0hbtJk2aNDlH8oYu2qWUv1hKebGU8vlSys+/ke/6dpBSyldKKf+ulPKpUsrHH167Wkr5\n9VLK50op/3cp5fKbXc5vBSml/KNSys1Syr/HtaVtVUr5xVLKF0opf1RK+QtvTqm/NWRJ2/3NUsor\npZT/7+G/v4jvWts9lFLKs6WU3yqlfKaU8oellL/28PrZjb2gsX2z/2m+IXxR0nOS1iX9gaT3vFHv\n+3b4J+nLkq7atV+W9D88/Pzzkv72m13Ob4V/kn5Y0vsl/fvHtZWk75P0Kc0prs8/HJflza7Dt1jb\n/U1JfyP57Xtb2/Xa462S3v/w87akz0l6z1mOvTdS0/7Tkr7Qdd1Xu66bSPpnkj74Br7v20GKFq2f\nD0r68MPPH5b0k2daom9R6brudyTdt8vL2uonJP2zrutOuq77iqQvaD4+vyNlSdtJ8/Hn8kG1tqvS\ndd2Nruv+4OHnfUl/JOlZneHYeyMX7XdIehn/f+XhtSbLpZP0/5RSPlFK+S8eXrvedd1NaT5gJD3z\nppXuW1+eWdJWPhZfVRuLmfxcKeUPSin/EOZ9a7slUkp5XnOL5Xe1fJ5+09uvOSK/teSHuq77k5J+\nXNJfLaX8Gc0XckrjaD65tLZ6cvmfJb2r67r3S7oh6e++yeX5lpZSyrakX5P01x9q3Gc2T9/IRftV\nSe/E/599eK3JEum67msP/96W9H9obkbdLKVcl6RSylsl3XrzSvgtL8va6lVJ34XftbFo0nXd7e4h\nCCvpH+iRCd/azqSUsqb5gv1Puq77yMPLZzb23shF+xOS/kQp5blSyoakn5L00TfwfedaSilbD3dv\nlVIuSPoLkv5Q8zb7mYc/+2lJH0kf8J0pRX0cdllbfVTST5VSNkop3y3pT0j6Tk892Wu7hwtNyH8q\n6dMPP7e2W5R/LOmzXdf9fVw7s7H3hiWM6rpuWkr5OUm/rvnm8I+6rvujN+p93wZyXdK/fBj2vybp\nn3Zd9+ullE9K+tVSys9K+qqkD72ZhfxWkVLK/y7pz0m6Vkp5SXP2w9+W9M+9rbqu+2wp5VclfVbS\nRNJ/Ba3yO06WtN2PllLeL2km6SuS/kuptZ1LKeWHJP1lSX9YSvmU5jDIL2nOHlmYp29E+7Uw9iZN\nmjQ5R9IckU2aNGlyjqQt2k2aNGlyjqQt2k2aNGlyjqQt2k2aNGlyjqQt2k2aNGlyjqQt2k2aNGly\njqQt2k2aNGlyjqQt2k2aNGlyjuT/B5o356YoiwKpAAAAAElFTkSuQmCC\n", - "text/plain": [ - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "show(transforms.Compose([\n", - " transforms.ToPILImage(),\n", - " transforms.ToTensor(),\n", - "])(img))" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "torch.Size([3, 256, 542])\n" - ] - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXQAAAC/CAYAAADuOyeQAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzsvV2sbdlV5zfWPmeffT7uvVU2pGyE7TISDaaRWs6LeWgE\nNmnlQ2o1ch6sTkcRxEoUgVpCCg+N+wUlikSIlEYhUktRhyA6ogUkkEB4aLktxEcLAt1RghK5Gyyb\ncuGyq1xVt87n3vt8rjzc+1/nt//nP9c+dlE5t50zpa2991przjnmmGP8x5hjfqyu7/u6T/fpPt2n\n+/SvfprcNQH36T7dp/t0n/5i0j2g36f7dJ/u0zdIugf0+3Sf7tN9+gZJ94B+n+7TfbpP3yDpHtDv\n0326T/fpGyTdA/p9uk/36T59g6R3DNC7rvu3u677l13X/WnXdX/nnarnPt2n+3Sf7tOT1L0T69C7\nrptU1Z9W1b9RVV+uqn9WVX+z7/t/+Rde2X26T/fpPt2nqnrnPPSPVNXn+r7/Yt/351X1S1X1g+9Q\nXffpPt2n+3Sf6p0D9G+tqj/H/y89vXaf7tN9uk/36R1K95Oi9+k+3af79A2SNt+hcl+pqg/g//ue\nXhtS13X3h8jcp/t0n+7T15H6vu/S9XcK0P9ZVX1713UvVtVXqupvVtW/5w999KMfrR/4gR+oq6ur\n6vu+uq6ri4uLurq6qq7rquue0Hx1dTU8M5lMquu66vt++OgefzP/crkcytC1rutqNpvVZDKpjY2N\n6rpu+L2xsTHc07WquvFs13W1sbFRk8mkNLmsa8qjtLGxUX3f16/92q/Vxz/+8SHP5eVlXV5eVlUN\nbWc5ao/yez3Ku7n5pCtVZtd1K9dYLq/rI56Sl2rzZDKpy8vLlbqZ2B9+X/X+5m/+Zn384x9fuea/\n1Wdqt/imxD5WUj9MJquDTbbN+aJnu66r6XRa29vb9fDhw9ra2qrZbFbT6bQ2NzeHvtWHckUZ8KT7\nl5eX9bM/+7P1Iz/yI3V+fl6Xl5e1WCzq7Oys9vf3a39/vxaLRb322mu1XC7r5ORkoPny8nKFTsnU\n+fl5bWxs1HQ6Hernh7ojesmjyWRSV1dXdXFxMfBfekVZVn+z7Syn67o6OztbkU/p2GQyqd/7vd+r\nj370owNPLi4uhrxciJFo3dzcrIuLizo7Oxv+U8ao27quvnUZuLy8HORla2trkOUkM2wDy9a1ra2t\nAaPIc/Hh93//9+t7v/d76/T0tLa2tmK79RG/Nzc3a2trqx49elTf9E3fNPCb5fd9X+fn53V2dlaP\nHz+uH//xH78hc0rvCKD3fX/Zdd3frqpP15Owzs/1ff8v3om67tN9uk/36T49Se+Uh1593//jqvrO\nd6r8+3Sf7tN9uk+r6U4nRT/4wQ/eZfV3kj70oQ/dNQl3kr7jO77jrkm4k/SRj3zkrkm4k/SBD3xg\n/UPfYOnFF1+8axLuFtC/7du+7S6rv5N0D+j//0rf8z3fc9ck3El6FsDt/+v0LLT5HQu53CZxworf\n/ruqVial+AwnvvhcmiThhAzzagKKkySceNPEXGtXrU8IpglCTjJy8lHXOBHj32kSUhNgyscJ38QX\n/vaJz9bEXuKFl6f2Om0+2ao8aTLKyydtbFeijxPniXanl5Oq7AtNnvEjOp2XXm6ajCMdnFjU5OZ0\nOq3pdFoXFxc1m80GGdTEuNcvPnDiUs/wOZc3p8vbnnRCtCfZIy18riVrSbeVOLnc6h+1jfpC3iZ+\n32b3O9vhbUg67DLr9ZAe6jNlPpVJ+V0ulyuTopTN5XJZy+WyHj9+PNquZwrQ072qm8pLwPKZbwGd\nl9kyBGnmnECf6GmBlZetejmLzw50QSSNrTazTAJ6AtiWkqldTpPT4O1KtDiPHdBdIR0oyMdUvhtr\n1sP2clWIJy+PdUppCOoqj4Y88SrJaEtuucpEKxs2NzeH31oNkeSHBlirrLxfyG/S6zS1+jwZ+dRH\nLI9laVVMC9xbNDgt7uis0+mxvhnDFfJrDNDHZM77Wb99RYvnVdsI6AJtX2l0fn5eFxcXdXx8XMvl\nst58880bbWK6c0CnZ+WCWHUTYNz6uUDrOu+3Ovfi4qImk0lNp9MbAksB0hIv1e2CwyV2LcFS+RQQ\npyt5t0xjXgm9NQqPL9dUOxLI+e9Uj/932glebsxIZ6uPRDPLdD6zTO8r0ui0pT6T0pyenlbf9wPI\n6p63K/XLWF3ihWRDsjabzWpvb68mk0mdnp7WZDKpxWIxGBbxiv3Ydd2wfJX9Tf7Ru3XD731Dfrh8\nsG/YFk+6zxFF4hWXPNKh4LJYtrVqdWScljKz/0k7607LnL2fWIY/R93xUTT712WA3+p/r5vOznK5\nrPPz8xUHY7FY1MnJSZ2fn9fx8XGdnp7W0dHRjT5gulNArxoPs7jlXFeOe70JpNgZEqK0npgKQsWm\n4Oo59w69Q0m/8idvND3v/PCUhEnXZICurq5qc3NzxfNpAffXm5LH5YJPnrYMiK5R6D2s5Hwa88QS\nLW64Ly8vB+PuozLv79vywelie+SlT6fTury8HDz02WxWy+VyeP7s7Kyq6sZIgQDn9VC+UsiG17yN\nY3KbeCmv3NupsqQXBNh1Mq57/iyvJQekZWhTyIY0JsxIIUbiSXIyVYdf02/fqyIaRPvFxcWwJ+Xi\n4mLwyo+OjgZAPzs7q9PT0ybfqp4BQGfDUiyVYYWqDL4JSBSP5P2qGjbjnJ+f37DcLuTKJ4VyD0Ee\nhuKi9J6cJtKlOlg2jYb4oWG4nqfXocSNHfyvMunJ0utwRRTgt4R5zLDSqOk5N3ouzB4q4m/l7/sn\nGyq8LO+bNHLyZ8lzyhs3ssnwKcbNZxWOUX1jiTxnmwnmm5ubNZvNqqpqb2+vqqp2d3fr7Oyszs7O\nhk1Gys9yqlb1gA6Fgz49ftFEo+X9yw1F1D13QqqeyIyeT6NclaHf5HFVrci+g63rpTbqtEKVbtyp\nL36P4T8fVVCPq65H8f5sMirEGD5DLGA+ydTFxUWdnp7WycnJsPlMo8blcrkC5MK0VrrzkIv/J+PU\n0dwl2bLWfo1CPJ1Ob3QqlUOW0Yd9FEKWS6ViCCGBuYO6C6N7By7YKZEegoWSGxbftUpvLXk2Trcb\nEwdhKg1pE4+8zQRZL0/0u+FkfvItzXO4opG/bCNBS8NdAXrLoKcY9boJM58U7bpuiJ133bUnvlwu\n6/T0tLquq9PT0xWj5+EV1pNkKNHPtrvBaekVUzIEah/BlSEV8kn/0wjV+zbF0hNN7sjRKUkOlcrn\ndQdq5k36NeacqG+pn9Qz0iAgPz8/r5OTk9rf36+Li4sB0M/OzgaPXbjTcqqU7txDV1JHUJl9Jt47\nSyl5lXyulZ+goW91hgucKyj/O6irfBdmPst2+28KqK57e5OweF6GlJIHm9rJ615n8jSSYV2X3GtJ\nxs3b479Vl8e5CWatur2NyiNv6eLiYuXYhRR2WdfOZCirVldraXSn8IsmSuUlujFszX20gFP1ucfr\n8kdQcrBPbeY96iufawF1knfVq2vkO3mWaEq64uDpvOfzLIeyQJBP7U8y6g5WSxZVz+XlZZ2dndX5\n+fmwikUgr5Fa1fXRJy2ZZrpTQHePj7FCMkvg5BOo9D4ImMrHemQRfTik6ypLQywpNO+xLIIJPWLR\nIvrcK3GBS0LGPMkjIQ+qridd1P4U7kgeiYMwadZz3g+tfG7I9DvxiPnS3IPLh/dpih8TlPgs+y8Z\nB4YFdH6IwgcEV49js5wWeDtf2SaNGieTyRB62d3drYcPH9bGxkadnZ0N/JJhZgjNQ3f87c7KOqPo\n9LlBEG91TefIiB6FfJKBIVC6vCd+pfCqOw1ufNwIcXECaU+hUI9/u9HyunlP+XjmD/mh9ia8IYgr\nxKaQy+np6RAxYLuS4+rpTjcWuRVteYu65mCQPEkvS6nVoVV1o1z3ImQh0/WUb8wTSe130EwK4c87\n3yRQref4PwkZ4+r+SX3FtrW8J7bd86d8rpi6lvjZ8pCcDrYrtSGBBUNwPt8w1tZWch7IwOijmLq8\n9K2trZpOpwNQEJxT/QnQWXcKW7UAyhONh5JkLTkGLUBnXs+zjldJl8ccHc/bkg+/5zru5Xpy/aBe\npfySrfPz8yGkot/8UO5SnWPpTj1095poiWiJ5Q1wA1Dfr67JFRNo1TmM7vt+WFVQtTqho+fOzs5q\nNpsNwkj63JPRtaTw7llUrU6MufekiVq1V/VKAFxRqQycbyAPVW4CQ5W3ubkZQcK9tZZn4DSpr9gn\nnERsDWOVl32TJnsZQqJHKB5pYlfXkoypfioMvVgNg1XedDodymH/i7/OUzoNiU8ESOWfzWbVdd2g\n0PLQl8vl4KX55CZlyGPANFKUgxY4utHkiI+6Jdo1MceYuhuNrltd5aKyPDbO/H6NckR9pt6nyfUU\nHmwZBd3zOLeePz8/r5Soz2pj4jP7Tqe+KqRyfHxc8/l8mBBdLBZD+Rw5SM/TghBPdwroaagmxvhQ\n0b1Pt/RJiQiSusdhUVIyXk/eA4dBFBgakzHAHZtAc6BzwBwDabaTxsWNR/LmSH/iK4fJXq//dq+F\nfGAbuLLB+Uyl59I4rUxKSs9vX6GRZIR8IQ2aIBVwyPBrdQrlbx0vUn38VnkyGtvb23V2dlZd19Vi\nsajd3d2aTCY1n89vOAPuYLDvvB+8b5wW5fcRJ/PQKxdvHVyTwVAZlAW2QW2SXvkSYMmmH1stmp0f\n3hecb0p9kgxC4hH1yg2e67pW8VCWtGqFSxCPj4+HMIu8ddfRpI9j6c5XubgyO/BWra7K0DNk8Lry\nHehdSHSdcUGl1kSU7umavp2u5AElOtn2BAjJe3B6WtdTHbzm91t9Mka/K+pY8jZ58hGBy4F7Xet4\n5DS1gEfPSQY0QUrvkAqc2jymcA4m9PA4QcodpO6Vr5sgTNdbfHa9SLLLdrU8xBagJ3oSLWNy4/3m\n+Vr93HIwUt4x2U/1uAPo+u8Gg2E8AbuWImpDm5fjfHKj0Up3CuiaGCITW6sWqsbjuPTM3KvQM0qt\nGKBCM6qLs8sMyVC5qq4nRvTb7+l5tUOeWWv4ycQhl3hBT1c0O00qU3kYBqJhZNgneTuigfznMN/7\nx5NPOum3h028H3Vdz/Z9P3iyDAGQLuZJXhk9bXpPzjeVoUnSquv1yPT8U/3Jg6Qcsb+VT5Ow3B/x\n8OHDOjs7q42NjSH0ohcpKD9lnRP43seUA8qCe9zsh8QXttuXlnq5VavhCnmtyuv9OJvNhjZQBt0h\n8pFoAmUPV7g8EkfUftHj7aP++ijb+aPRs0ZZqlsTncvlcmX3p3aHOg9TaDLpd0p3vmyRneSgpUQF\nZ1jDwbvrupVJBW88FU+gWlU3FIPgS2V0ul2BGfflPbe0FH7S5mXrOjs5eak+9OSQz/OLnx6TptKQ\nHvd4vO1uyJxP7FPn31g+KnQyzF5uGn67YdJ/huLc4JAOPxJA9x1MGE5yQ0jQ9285L5LFy8vL2t7e\nrslkMih81z0Jv2i47vFh1e9hAA8BulEmkHmfJGeKoT7yOtGhslKbNzc3V/Q5yb2DOelN9fp/N6Kt\nj7fVDZLut5xKtYdhG+mVNqxp8lPrzBUvV9t0MJs2DrkzkqITY+mZCLm4INELoheh/3rGvTF2hO+O\nc+BLnjv/kzb3cJl4LYEIBUYpeT98lpNN7klw2NXyjP1+a6SSDFUrJeVT3Qkc1pVLQPcDp1K9Lb6w\nXt9Aw3qUaLxVjhsPXaPHrCWvWsrIuhMotYx1uiaAm0yulzPqNEa9Fs839bAMgblA13deJgeB18cc\nHzopbJ9+J6CkTjoAjo1Ek0fdypf0qtUPLRD35PME7gyMOSwcAQnIJT8Kr8gw69V6kidNwpNu9lFq\nayvduYcuQZFFc2t4dXU1DEkJ9A7iBF6VpxUWnCXXSgjfUu6eBUGQCqI6tBWZnpF7asmqsxzdEx/Y\naVIiV/xUFsth2bqeAJ2K7QpOAeXz7vEIPHwVBPtlbPTlRralZOxzttHByEcX5Bl/u6I7AKssArrk\nZWtr60b4xEdcqR3JsJAn9Pa6rqudnZ169OhRTafT4fAuTaSxb1WWQCTxTfW3nCHKLEdAHhpzr9dB\nnv3kXraSVvF03fVksPOHoSOWlwyGj3TFFwdfdxZdrhN4S7bIN/0n3ii/5lwUXjk/P6/FYlHL5bIO\nDg7q6OhoCJ1NJpPhgDatBmN7W3q7Lt05oFMJ3JMYeyaBmVtR/eZwJeVnOS1l1Me3NbNep1v30vXb\nJgfvFo23rYNAlLwXp7tVvvOyVXdq/xi/Ul3u/bfoW1dOS/ETffqmwSIY3KbtY2lMVieTm4d36XuM\nz4mWsVGE60FqV3IeyD+vs9Uvrt9+fV1K9XjfOSa0ym7hB8twZ0cgrnusX+AuQOcacwH6YrFYOYBL\nsuXGyelKo9+xdKeAzsZworDqSWPEgJZHwXKUR98+WaHhzdHR0Q2v31cS0JrruhitN4dXra6IqLq5\ntNK9QLf8XFfPQ4CSd5AmvFg347G6Rg+KS6LEJ4+5jgEV83k/pN238giTAfS+UWJef2s6PUA+y5EB\nJ9RJs7fH4+9ePtutSTqdgqgYML1R7mlQf7acDCb2Az1gtX97e7u67skE6XQ6HQBCIweVwWG/Yv3O\nL4ZldJ/HG1A2uaFJIMUyJPfcY8C+Sx608vneET6jj+/K9glD6mMyTq5n0nEep8D+oD6RB26w9M3l\nq9wMJNCez+fDcbcHBwfDXIzAnCuntKgh6Z/q9P0ZY+nOt/5X5Tg2hYyekpSmKg+FOInjHV9VQzxS\nxiIpAYWtanXzBu+5ZR0TMhd+go3CQ6qfk1wuTGqv08jtzglEqDQEWXqFDmgOmt5vKrc1yaX7qUzR\nwhMleZ+nYbqRJYgLlDzM5PW5J5pkTXk9jKdwhgym5IUbmbysJHstYNc9GmP172QyGcIUWpvOMz58\nRyvlJ8WyW2DIRDlw45Seq6ob+lK1uoqNMqsyp9Pp0AbqCYG16uYGpATges69XBkRPyPKnSIZklZ7\n1T8uFwyvnJycDBOgh4eHdXZ2VkdHR02jIyND/PJ+YXzdeZHSMxFySQqWEhXjNuWmvOxwMZKWjx6d\n8hHQGDdLtKf8NEKp7f6s7nkbCGpJmMf4khSadboxcN61vBWns+UVp/JIv9/3eL3zTNfdmKayEpi7\nEWH5qQw5AXIE0ua3VmrxwRPbpBFl3/fDq+p0JAA9O29jiw8qv2XYnF6Xq1Y/jbUzGQ8+x75LTgh5\n4vW1RjypX1sy2srfaq/PA1EmfH25QJ4j4xSr93i+85NGmY5lK70tQO+67qWqOqiqq6o67/v+I13X\nvauqfrmqXqyql6rqE33fH6T8PoSi18ZOp2UlWGgilbvMWh2hcmXxZJFlebV0iJ5u1bUyqxxd8yNV\nlccPyaIB8c70VRuuAGk7P+twT1sCoGukgclXgzgtnp/C5EcZE9jpPXu7eJ98IC3qd8rF5eXlynGk\nKTm4iwZ5XIx/63kHF7aBPBDvJGvT6XSQIdVHz45yQ3n1CXKCWQpB9f31yFHhpwcPHtSjR49qPp8P\nrybz1TcCGdcf6kfqHw97iDbKk0+qkk9yiq6urlctUX5Upuuo+okypclSB3l+syzV7wCpMlpOV9Xq\nsSKqlzJMPFCYS9jDc8z5fXBwMOwAJQ94HIXK4WS76xB5QnrH0ts9nOuqqj7a9/2/3vf9R55e+4mq\n+kzf999ZVb9VVZ9qZXagG/O0Usfy3m1S6nCn5TYC4fl80syHhVSUljKlstmppIVAsM6rGeMPhSN5\nCYlup8EFzvO4d5X40KKbtLW8s7FykzyN1d/KJ7lR6EVgyjDgOr6kOrwtTh8nSAXafnhXS/FTeyj/\n3me34QNpTPM9+u0GzmVqjF46Wut4mWSSPEx0E0N8ziv1gd+TA8kDtvTRefZ+wBZxwEOQ3o50z+dl\nxtLbDbl0ddMo/GBVff/T379QVb9dT0D+RnJr3WqQg5qSBIfDT3k+/rYiB3OPk6oOPz6Xnhi9G/2X\nUmtVgh/RKxo9xi0axoa07i2xXvdsPIasZ1mGC7Q+9KxUpuhzxeL8gXudyeiqfxj39qE2+5N10LOs\nup6M4uFdzEvPraq9sSfFWVvJwXCxWFTXdSvzHjpO1kEueVWig54b+5YTeQLz7e3tqnoy/6PzXfhS\nDIVgGHv2eLz3lepQn3N+ymXOZcrndrquW1lbzaXIejZNjnMxAuvhTl53eAiMbAd11eeqWLbaJlrl\nIbf6zp2Sq6snO8Y1Sjo8PBzCK2+99daKwee8xnQ6vbEblu2Q3HrEgqPTMQdB6e0Cel9V/6Trusuq\n+u/6vv/vq+o9fd+/9pQBr3Zd90Irs69w0DcnK8V0DlcoEBIwNdbBouWVuPEQQKosDltJI4FEgMH1\ntK7Ivha+NUmo51OM3wFI+QXGare8BwkFQzNsr08QsTwXmrERRzptMAFBMgy6zlCP8nGykXzyzWLe\nP2wf62LIzcHNh7C8TyOk/33fD283Ur9r5QsPkErzLk6rt8FlUcsWr66uhjXLFxcXtbm5Oax22d7e\nXjmlj33FMiVDNCAEEhpB6iUNK8OJ0jvuQfC6q2owPGy7QknOc/FIu3MZQtM9D8dxD4g7KSp7zLNN\nCw8o4wRlhbPm83ktFouVExN5+Nbp6enKWfoqV0cCEMME2hsbG8N+A9bnjpjzztPbBfS/2vf9V7qu\n+9eq6tNd1/1JPQF5plGT4p5n1c1h2VgSgzxG6oDpAu6g5XWvSy36kgC1wKdV5ttJboBcWHmvlW+s\n3NvS5+3lqguWk+pt9cFteMj8CSi9Da4wySslGFbVikfsG6vGnIbbJPd8BWDyuvVCaYZeBIA0Xi2+\nJjng6I488f9uYN1A87rq038af7bR9T/Rk/iS7pFez9N6nuUlnoke9bmvNdeHHnnLeeJEP5/hJ6Wx\ne57eFqD3ff+Vp9+vd133v1bVR6rqta7r3tP3/Wtd1723qr7ayv9bv/VbQ8NefPHFevHFFwerpqE6\nd5a55ZTn9ODBg+HkMloyeR18a41WCWioJW9ciQqUPPuqVa+TySfjWqEFj12zc6+urgaPzwGiZaRI\nExWEw3bW594LvQL3LFVeKtsFjffdi6J3R8PrO+SYv+rmaXYtoBbv3Oumt+4eJssjD9hPzlfFTyeT\nyXD+hrbpqz6Nkkhby6g7oFDZRQtHWjs7O1X1ROYVehG4XFxcDPcdHCUHbCe/tRyT9XNCWXpDurkC\nIxlFBy3pmniqXbfyRjnaZH6GUVhWctz4jK9n5/Np3opLizkvJuOtd30ul8s6OTmp5XJZ8/m85vP5\nMALn6qSqGkKxm5ubNZ/PVzxv9jMxx8NxWtfuCw5S+roBveu63aqa9H1/3HXdXlX9m1X1n1XVb1TV\nD1fVT1fVD1XVr7fK+NjHPnbD0q/zAJMHRYDQM+yQlJ9KlmLkrsjMT2+stepDzxJsPHmMN4VjSKtS\nmlgZ41fyHll+67rzSf/ZHrYh8UrPJMOU6tPzfuxBCxBT8rDYOpkaSwn8dN2XMoovaRWR09DyBv0Z\nhXIkawIIfTi012jBjQRB8+3wIelbkl+uVqKOVV2H09xLpfFMBrtqNaySnCSXUedlSq1wJtvtyxJP\nTk5qPp8PxtT3AYg+xdvpLNGBUD5598QItkWrm3Tt1VdfbfbT2/HQ31NV/0vXdf3Tcn6x7/tPd133\nz6vqV7qu+2RVfbGqPjFWSPJydT0NT9xLEMMpJFI2ChOZ7GUxZKPnKIyMEbIcKhtHDFWrSkxPIXls\nFGQKY5o4cy9D7UwGQ3TIY3RQdE8wATc9MdWp++Sxe0EEPgoy+9yNOHnr8XmWl7xB5nfvx/nmz7Jc\nL7OqVhSW13ScLdeHczTiAOj8S3UxeayVMrq7u1tVVTs7O8M7SXl+u4982Mcuj9527yuXV9HhbfX6\nNEpm/4tPLrMeKqJDRVn0ZcEESO9rl+WWDrUcPxlBvVXo6OioTk9P6/Hjx8MLnQX2PlrR/9PT07q4\nuBhGcGoDQd4Xdei+yyzloZW+bkDv+/7PqurD4frjqvprX0M5TSKT5+jg3Pf9sPKgajX2qPw+0UmB\nEwO1Rp0HHFGBOHQlTRwiEfxJo1bOeJjGDQAFkhNrrizclUfg6rqbu9mSV9XyBNNKB283+ewKTiPH\nNqYRlIN81equPZ53oWc4GiLt3j5XUueVG2ryz3kjpabnq/xnZ2d1efnk1WI8JZF9zQ1I3t8pudfK\n9gq4CegPHz4cjtk9PT1d8cYpO+4YcKKX/3kchRs8jsrUNwpXbm1tDUaE4CW+MPxJQFP5klvxejKZ\n3OAlwxUKn5JGDwu6fLnM8D9DHgJp9e3R0VEtFovh+Nujo6NhYlz960dz0HArnKXndcJi13XDHgOB\nvxLzcz9Fag/TnW/9T8rp19iIdRZKHckVFO4F8lkPe3jdHrtzL69qFQySgXIAcU9kjD/Mn7ymxCc+\n7zTrfrrmvxOvx/jvvE31tHid+ieV6XnH5KdVR3q+RZcDW8pLo+3xeZeNdQqZ2u/yIiXn8brn5+fD\neS9V+cRNN36873zy/0n2XA6pe62yUn/y+STHY0a8JbdJ7m+DI/SaeXKi1phrVEbg1odLlH2VHOVA\nIRanubUSijLl4TxPdw7o/E1PmPfpmfmuTaXkqXrs05nFNdWqh56Ney2cNBF9VFrV456C6FLSBBXr\nSKDhEzdqA9drU4nIw+RpJ8Bz5UgezhiAprLozRGQ+Ell+8QkPVy2IXm6BJKWwXKjQaD1NvA/J3g9\nDKbQi+Rta2tr5YUVUsKWU+F0pDbQqZDscPfocrkcHBgdPudhAPdy6Zkno5UAhs9Wre54pNzxftI9\nb6+HTqgzlAd6633frxxQ5mDtZVHWWoZVfNM6cy1LPDw8rPl8PoRZ1KfqF2GDRinT6XTYHyB6FHbx\nDUec1+DoWjTyGeHaWLrz0xar2jFV/new8uGHCyOBmZbcwcPLctDTs7TITARSB2cHWyUKVsubUDk+\nVE58UZkJNJ2nHqt3GlL7+Z9gTD46bYkuKiavOd3sBwo/DSzXH7sxT3Q4n90Y8RlvM/strY5QmECe\nGecs0twmxUuYAAAgAElEQVQKeeC0OS/JQ3qDPOdFB87NZrNhy7n3l8ecx4wr6yE4Ug+cvhbN3ib2\naXI4nB6W6XQmY55GB24IvBzJJZclyhvXenMBMjdNcSJcmCOaGL5k6JZy77iTDJAn56WnO39jUQKQ\nZEGTIuo/78mCOqAruRFx4Uj0OQiyTllnB7CWErkie/taKRk90kBlcyBPQ2y20fmaeMG2tGj3YSQB\ngvTTayPAkefOz1TPGM9SG1pG23nLMpKsuZHU7sCqJ5tHdMSye78OQt6GZGR4j/MTs9msJpNJ7e7u\nDuEWeuja0EOg8bYkg8W6CN7uuHTd9TtaXR9czr091CuXzzFd9Gf4rOul5/G+V99wJCMAPz09HXaA\nHh4eDpu4Hjx4MNB9eHg4zB/4u4gVXyefeahbOqUz8SvhxTqcuFNA50SEUgJNpnTOgyznZDIZXqhL\nrzwpLK26/qeYHAVWHaNOoccuxdEyMr9O2pNws+1V7ZUnvMd8virE+ekjCz3L364kFKYEAK3y6IG4\nB8/r3s9+eJR7g7qezoUeA4jb0k/jn0DfAZf8UmxVcW3xT+1i2ETXPB6a5IBtp7yJzu3t7UHuTk9P\na29vr7quG87fdiMruuglU6dcb3S/5VFqpQvzOY0uf6nvEhCLb1zqyIlZn+h3XdWH3rIbID6zWCzq\n8PCwTk9Pa39/f3ihs55Xn81ms5XjEhxX9JzT4eEn3tf+hhbu3TbduYfucbeq7Hk54LhipXghy1Oe\nFHtvKXoSMhfWqtWD8V3wlY/XfI0160oAruutjiaIUbCSJ6vynb4USvDy1gkaeStwbXneKbXobNHk\ndCXe3QbEW/T5qMJpZOJEGidIx+ZIvE3r5NA9ZgKy1qdrJ6lWgHCo3ypX9Lf433IIWqNNynxqY3qW\nyU9bbPGI3nai20NGqRyNZhhm0ZpzvpRCeXgevgyMAzZ57oa8pfepj4llNGBj6ZmIoa8D86qKw0bG\nxThZwTMmJPT0rsVsHpLjNLhXqfqooGSwH5Tl3passO6lw7pUF/O6t+kA1AJoF2wu//N4ZWozy2C5\nych4nzkQ0JjS0/T2Je+KNLAsz+NtSTxLypTKYzlUJrZBfFQdUvzT09Pa2tqqvu+HI1E5ouDErvN7\nLN5ede0I0HlRDH0ymdTe3l49fPhwOP9IzyZD1fKKKcOq271NPZdGha2RBfnEfvQ+77pu4J9/0hyM\nl+8T6i6jolO6r41CZ2dndXh4WMfHx3V+fl7Hx8e1WCyq7/vhsC3tWt/Z2YkOlO/LYB87sPNaq799\nxHEbUL/zVS5cWuieFgWXjUkeuO5zskwAurm5ubIGnfkIeHpWKwFUB2OFKpuAnD6sww87Uhv17ZMh\nCuekUyQT2LgyKrlAKJ8DCY2b0hhwel4lGiy2UwKq2CLjy3zWwV1lJt5pJYnyCWAcdLx8/+8AS4V0\nXrpxSAZJiq/4rPqfIzv32lv9xL5w+RI9WlUhQH/uuedqNpsNowSBkcqpWt3invqWp2PqVY2kh16p\nG3z2jeiTnrsu6P5kMqn5fD7wmccDSAY4Cm6NJltGyvtL4Q2FWfS6uMePHw9vGtLr5KSL8so1EmM4\nS2UrNES8UN95eI3zIXT2eK/rVs9gpy610p2/sWhdogLRytFqOeiJ6WQkGc4yqnKctTVJ4UNT1dEC\nPJbvAKZ7NERVNw8LY9uSl87nXMi9fckzZDnkwxivvGyWTz62DEOrDE+JXi9fzyU+3IZup81pTsnb\npGsJ0FseZ5IFd2oSQFEXuFtTS+cuLy9Xjgbw5X3uydKoiZekg84NafM5J79ftbo02NtEI0c99bkX\n6nErzOOGzq+rHoXENOfBNeY6LZHb+dPSYF9FlJy4FhbovujhNT+1k/NNqpsnu6Z05zF0ByT+duDy\nDpOl5AQlLbCGogJ3Crd7ZLrWGgUQqFog5qDvXtkYmLHMtNaetCWPM4Ewy3XvTveSR0ild76nIZ/6\nZyxOybJJvxtIT25M6KWIHnptKbU8SeeL/qdQIMticrnQ4U0Cqaur67XpKp90e9lpjoYGTP+l9DIW\nXdfVbDYbVmLs7u5W3/fD+mkvy2UpGQg9d3Z2Fud9COjukKhsgZ8fEEda6Mkn2tRelzPnkeusrhMc\nBdxaPLG/vz9MhCrMwhM0RS9lJK3jd0eGOklsIn2uX/zQa+d9nY3fSncecqm6OdGYQIZ53EPQx5dR\nOdh5eCKVn7aX63nmSbFQB47UuWwfrf468GBoKpWZPIkW31qgxmcT/7xPfMg5llLcnjQkA+s8Ykpx\n2BSyaU1murOg3/7x66m9fp1Den68jev6PPE19ZOU3XeP6phdGRYvn2Uk54nPu/OgelNIjG1JDpu3\nVX3sOuZy4LRRpl2/kqMjDNBcx8XFRS2Xy2GtuR+XoLxjcltVK6MHevWkRc6k84PlcwNRS/48NOrp\nzkMuHpuuugmGCbz945OH9CjJ/DSBwmtJmNxK8pqXTy9Ldbjyc0iZvC8m5wXpZXJhYHtIh75bysK2\nJ0X3OljGmHfkdSWeOwh7+5PnxpQ8N5epBAypLrazBUYqg5PrVdfGVeeUz2azIc7NMn0kNwbeKSkv\n3z2qMvV+XC290w5IyZ5i17cBdMoP6W05BgT5JFstB8afEUCmkSjpoFdLfSOA69wZhVd0/O3+/v5w\nNouHb5VSmCfhBPnju7mZR88kfRRoU08oB880oHvnuvdBBRDDqdTJS0vbrVU+jwNwQHeL6JZd15V8\nIotDVRcw0ctht77V6VQw8ifVn4CPdPnxpSkWp8SJZwKweybOj3QteXbeHu97/qeCUqDdaLpiKD9P\no1N+Lt9j35NffN7Lb3n9foiVlyFAv7x88lYhhf+qrsMaPtne8gJb4CH6/WTFruuG89Ln83mdnJwM\nKzm8r9zgsx+8rxzkFcqsWt22Lj77VnfxLhkwhajoybbi4aKVc1cMaVCv+n71EDW9yPn4+LiWy2Ud\nHBysHGpG3FGbPJSoxRNV1xO25KGWNnICVYdxed+Sv35omcpKRrSV7jzkkhrJzk/AqmfUeGe4ynbv\nwPPyv3tKrficBM/B2wWQ5RNI+IwrM4VxzFOkgnnYw70l1Z3mBlqA4cYytYmAkoSM+WiIvf7klbY8\nOJ+HcH55W50/znf3jPh9m2dIl9rClUnyVDXRxhip5Jby633nv8l38s49VYVeFH7Z2dkZAM03w7Cf\nKOMt5yXR5HxLI8hWn3ib3JHQfR99Jf0mPxj60ESowiunp6fD99hBWWnEx0Sd5X+FouRkJEeqhRPO\nh3Wy5+nOAd09Pe9sFzrvyLQES8nBaF0cd6xO3aNX4h3qh32RXnqIyftodeKY8iSA9d8sx/OMpcTH\n5Nkl+tO8AHkmhSMPSacbRiqKGyqCIYf6uqa1wzTWySBUrX/ZAa+5MVYiDVoyOJ/PazK5frEzl8hK\n+RU68TBIMsD87fwWiHBSdjKZrLxcWsDO9lA2fRScgEvfNNrer6k/Ux73rt2paRl954n6UPcE6lp5\n5G8akmfuhoxl8vx18j7JNmVV/KWH7aP2pD+pjYknrfRMhVyc2HXEyxvhs2loIibqzAulBHw0LC58\nLItD7qprJfVzXdjJPjPO5F6Ag3kCWDdQLY9Bda5TlmQ0ldcVLqW0Xn9MoZMRovHhvZYRdv4S5DXk\ndWAc47Eb1uQxsg/8mu8arqoVEFWcW0mAruE219cnAzM29yCj0HXdEOKRI6HNRtosw2Ngu64b1q37\nCYr+20GJvPB5Gj8CYwyY3Vgnudd/LzPJsOhJOEDnivV6+KSqhnX4kiWWm+SJ4c2xthJfnLfJqfDR\nXCvd+aQovQKPYzFRKRNDvVN9Y5A6yL+prAJq0uV08L2Mes6F2+lheW4g9BzjZylRIDx8QWXi8jE/\nQ0Z0JGCkB+ttkOAncGl5iqQ7tUWJoJLa7kZS7UphtqQIzJvi5W4Y/H9KDjY+ieXt0vG6VTXEsTmf\nohAeDWYyng5mTg/7Q0B0cXExrHjhhiM3SCk0Rf4RyJL8eBn6zzXVzjd3SDjCYNnMx/DSGKAqrHJ5\neTnMHxwcHNTx8fEQfpFRS5uHfDKTc3IyfI5HctT83a2izXnO/97PyYm4TXomAH1dcg+KFsvBht4C\nJ2q6bvWMdNbN3aFOE4VIZZB292Rv25bWfwq50+ICkLxIppZxGUstgGNdLJNluyJ6uS2jktqpspOR\nUEqemf/WQVnr2ut0tXiWJkmZeJIePT8BDb1xtY/7KFrArvpcF5wf+qTQjceF6akSDKtubq+nwXU6\nyEcCMq87YCW+67p46Lx2Gadx07MMcx0fH9fZ2Vnt7+/XcrkcdoT2fb8SP1ed4hP1jM6D+sadG5+c\nVRSA/CUvUrudVy77NDBj6c5DLhxGJFDS9aqbbyjhZBTv81rqAIZJvB7FHf3kP6el1R4KVwLmludF\nwExx02TN6TVx4oWGLyUHZ6ePXlvLsLTAORkc8TwBubdRz+o/z0FJxiUBXGqLtyH9dx61rrnBb/Gb\n/xXL5bp00uEhvpYurKOXsi4Dsbm5WbPZrLa3t4ft7K3yCawyRn3frxyYxf5NspwcDraJI2zvx+QU\neV0t2Zbecpni2dnZcOCW+J9kNPFf/ab/Am6Fr8hntYttopNIrOLplWyf89XnA6TnY+nOPXTvGF96\npu90Td9p6MayyRQlCr6ssE6qa3mDTis7jnXfxgN1oWS+FK9u8cCvu/K495PANtHHb2+beOllkO6W\nZ838AuxWkvDTcLnSjRkuluNtdJmgrDh/Wg4DDXBL7qTo2mik94LqtXHsjxRGYnm83jI49MDF3+3t\n7drY2KhHjx7VxsbGyuvU6LiIzwonuGy5LI6FRjyPOyGt58RPOmIOZHRedF9grXYdHR0NR+AqzML2\nOhg7XdQ9XaeHrusbGxvD7k29E1Qg7qHYseOSmdJ8VyozpTtf5VJ1MxZLK+7KkwBMjOVQNj3nQKt6\nFW/XutkEpqKNMf7k6eq51uRsaktSFgmxW38vk0KXlKf10XOt+tkXXqeHhVhW1erhT6LdjVRrBOQ0\nJOVnbLbVHoYXkuJQiV0m9J3ALhm+5HGRf5Rnnfgp7033EoB6mMT50AJ1yZ9kRyEMnfOys7Oz4ry0\njJLocr7xt8uC9zHBiTzzvhaPudjA9a2lF/xoRctisRiOPeDZ8C4rPikqOnm2DHEmyZxCZjLQ7tjQ\naKuNrnPscz4rmn2U2kp3HnLRt1tidiBBlpbVO4hl6TcFmqfIJdBlTM07sermG9JZr36zI6iQ7Jw0\nKZUmedhedjbXznp9HOG0PDuCJWlObWm1c11yRfVr4kGa/3C6vc/H6mQbud3e26vn3RPSMwQAyp97\nWYlG56GAp6pqsVjU1dXVsCW/769jxkn5qcwsPyk2HSTG5AU4e3t7NZlM6sGDB8NSSh5ToLzUFY/r\n6xl3TKpuvnyG7SbfPBatetwo8r6/5YfnrZyentZ8Pq/z8/MBxI+OjoZNRIqbJ4eGdXGimmEp6q3a\nx0lRLrpQ33BEqfL9SABvq/cj+991qZXWAnrXdT9XVX+9ql7r+/6vPL32rqr65ap6sapeqqpP9H1/\n8PTep6rqk1V1UVU/1vf9p1tlu7fjCp2sOi00DYALftXNpVCbm5s3YmiqR4qr/xRgxvlpxWmE1oFc\nAjYKV/KGEiC6p5piau4pOh00WuJTAlPW7b89eR+xjBaou7eTynYBbgm/P9OSLbU30U8aU5sIUCpH\nBpZtpQJ6eVrlopMQJZdcaZHkxPkxxjPRpg1GkhWFex48eFCLxaImk8mwg3E+n98YffqokP+dz+5R\nuvGjLHv/0RumDriXy7K0UuXq6mqYAD09Pa2jo6PhJc+LxWKInSdniXykXLWeFXB7aMy347vTpOjB\n2dlZjLd7f+o/PynEmdL6ZRlVP19V/5Zd+4mq+kzf999ZVb9VVZ962pC/XFWfqKrvqqp/p6r+frcG\n6RwcW5bUgUDX3Stmg+mxSnAcSPxDIUqgw+cSTQ56qezURtHvI4SWcrsyJa9uHS/9fwKPFtB7SqBD\nuhKNTifvuyFw3rsMjNWb7jntLV4luum9+/NjskzHQbtHOVE6pgMtHqSPaJH8y5uUt67Du7a2tgbD\n0nWr67BbckU5Tfwhb0RD8uj1LPlSdXOJbUtnFLLSGTVa679YLIYwi0Ye63iW+owLI0ivT2y7c6Qy\nOQ/hvEzn6LT6PunKWFrrofd9/0+7rnvRLv9gVX3/09+/UFW/XU9A/m9U1S/1fX9RVS91Xfe5qvpI\nVf1ho+wbCuzA4Yqt37SKBHROqEg4uGmCAsmOVEqhjDSc8gOZxoCDMV9/nm1wehRr5T2OEFgfPRv3\nuD2skYDTy+N/tTm1y695Pjd8Xo/z28vTmScOmuyf2xiplM9HdG4klDedC9R1N9fNJ4OoNpJnfd/X\ncrkcjr6V58wRJOtMk+Ru7FI7COJd1w0vr97b2xu8xcViUVU1vNCBoEzZYQhR/OQyX3qtaqOHp5Lx\n03UuD2SdkgGVq1CQjsDVG4d09O3JycnwOjnRlY7m9X7yUZXrjUJCCacE0JRTnnDp0QSGcDiPp3s8\n90U887mIVvp6Y+gv9H3/2tMKX+267oWn17+1qv4Az73y9Nra1LKWSklglC8pLxWOAul16b+eU/K6\nCTCu+N4Oz+9tTN7BGPCpQxPYsmwHRIG/QNFpHEs0AMnLSLR6X7jSp+fcYCWjt45e8shTcgbSqMDn\nZFwWEh8cbKvGt4jzmrai931fu7u71XXXk/KJr7fhQ3rWAW0ymdTOzs5wANZ8Pq+u61bOAU+es9dN\nUFK73PmisaXhpnFQ3tbmN9Z7dXU1jGy0jf/s7KyOj49X3jREg6S+TbKZ6NWzfl5Tkn0lH/lzMpgT\n30oe2mU7PVzqtKY5HKa/qEnR8XFAI7UY5ACt5IxNz7JzxBx1roZLypdiZvKM5NW3rHGavfcwDelm\n23zlQqJ/zANrgc46Piu1vBSvtwXmLWNIDyTR0FJSKp0DByckqaTu/fjEGZXZ2+xGJPWF081wGIFI\ndflcjpfhRlpDeh0cxZUyyUlp0Zb4rGf13z1fxdZns9mwfNLPTVdyWRXvSZPXmfTU+5m0CqiouyyH\ndYlfAnaFWpbL5RDCcuNBnSMvkgEhz7iDV/MbxBHXDe9Dl5FUl0cKkjPH/+8UoL/Wdd17+r5/reu6\n91bVV59ef6Wq3o/n3vf0Wky/8zu/M/z+4Ac/WC+++CSy0/JsqFRV2UtIed1wuFKTiRxStgCN5Sbl\npgByrS7ve7jCAdUVkgaDFr7lcbCdvt2b9FMJnd8sV8m9W9LZCsu49+H9onvkZcvbJn28xvqTt6jr\nvvog9bGXTR45uCalc1rTdXmbVdfrl6fT6YpX6Ou8WwZ/DJhUFunXRqOu6+rRo0dD6EWHh1E/+v46\nHMAQgfpLvGTfen+785TALp374rxSKEWTn5oI1bcfY+t1eUx8DIRZltMlo1K1umSVZ6CTj+SL677r\nLvtbz2jFDvuylW4L6N3Tj9JvVNUPV9VPV9UPVdWv4/ovdl33M/Uk1PLtVfVHrUI/9rGPrTSs5dnx\nvitSAn7lU2KHKCUBZH0S0hSqcMVi3YkmT+xQP6eEZUlI9F/g3lrHqrZyc5QELPGAgOZxdzcoHO24\nd5y85uQpkjdU8NSHLWD0NrAtaQ+Bh0D4vNedDLaD/bpJa+ZNbeKIgV66wmLutKQyEk9Sasm15E5e\nul7XOJ1OB+AcA2p3iLz9yblIG8icj5Rvf04TnPLOBeya/GxhBw2Qh5LGeMm+0DMMn5BGjQqoI84T\nr5fXWgswVO+DBw+GVwtubGzUq6++Gmmuut2yxX9UVR+tqm/quu7lqvrJqvovq+p/6rruk1X1xXqy\nsqX6vv9s13W/UlWfrarzqvrRfgTdCD60/kpiEk+gM9qi0CqvlEfLlrwMf57K+rQ9AxNdgdkR3sSW\nt0wB03Oqh6l1n55A1bWAtcI3t7kuAeW7Vr1uehzM794FDS9jkaxrrI0JMFIopmUExr5b9TtvWgZI\nyedvWkcTkMaWEyLZn8/nw0mMW1tbN/iY2uqx+7FET73v+5WJ0tlsVpeXl8NSRnqg7BPl5ft5Hex9\nHbrvylQ5et7nxVSXyuAkrYBba8tPTk7q4OAg1kMnSP1FfulZjl65Bl3/fV6Fc1KMDrixp2PB86TS\nkme23R0ql8WWoWe6zSqXv9W49dcaz/9UVf3UunLxfFRQpXX3VIY+rRAH8ziw+uyzW+7kSeh6msQQ\n6CaD0VJ2jy0y7xhI+fXWcK7FN9KXPFIHIS/DaVBK4RfmdV56PyXAJa1Of9XNkAi/U/kOzqnOsbJc\nJpJSit7Ud6JBE5TL5bJms1n1/ephUTpHxWVyDMxTf0k3BMYC9Kurq9rd3R1eLK31237eTCp3jAds\nO3XMHSEabiXxpe/7YSWLJkD5Cjk9mzx0B8oUVqR80gnxeQDKKQHd9V+J7W3pM40ajU4rrQPzqjve\nKUovjMypuo67OZiQGfo/JlQuiIxRqn4pjEYMoonDYt13kBYtyapSUH0pWAIAJR45yqTnHLSdV35k\naQIglpmA3w2O87bVB3qGmzDcu24Bk5fLssUXB2JXjtTnXk7iBelk+W7sOTpyLy2VLXoTiOl5yZgm\n+sQ/LtdzMHKeUR7SdT7P0bBCL/5J8wXK5/znvIToVb3J4fHfvhJHZcpD16oWhVp49C2fd0+Z9SbH\nIvWvr4ihbFCnxQdOjnKuy6MDvO/Goe/7G+WwDylLf1Ex9Hc0qZFcYUIhIRMSw3mdCudgmUBeHoBf\np3fiE0SuwH5WMsuuWvWOWEYr/OLeMulQW5XHvQcXbgq586/lOXrfOH1ebjIIKY7ORDocSL1OHzb7\nNnDV4XsTUjtaMuHOhPNF31wv7EBJg+fGXv3NEKOSPNHFYlE7OzvVdd2wTtyXzyWgYnucfjfCNIpV\nNYR4Hjx4UMvlsrquG7bP65RCGRfW5W1neCjJvctpAldd18tAFC8/PDys/f394VhcrWph3ckTZpm8\n7qFI8liJTpE2XzEGL/ml4VH5vkzYr5FvfGlGMnT6n5b/pvRMAboLiAtqVZ7M4vWWoCRrp+TMJ036\n7d8thW+lVjw7JS+zRUe6x7a2Voq4x95KBD/nuQNiq5/SUJj3WRbrTQClUBYnaUmTe4NJFsba6vWt\nu9+SxdSGlmyKbt+F2FoAkHj9tSQHOMWLFb/n7lEZGndUmD/xhbKYnI9EP8shH+iV8zRFn+h32XTj\n0ZI3tYejIQIqAT9NEKdVbpRPp8fbPNavSYeeaUBXI2iBJGAt0GbjfZkQPUYemOMAxw5k2VUVLaas\nsU+A6FvCxclbgpCEgvT7EM9X06g+XeeyLiWfZCGP2Cbygh4k62dfkH4+Tx4QdLhTzj0l70N5qvJ2\n6NV53FG0K4mHUnaV56sRRL+ui2aG0xyIvH2iY8zZSENjyqUDiIfA2B55ekdHR8O6cL2cw1c1tQzF\nmCFjvfL82Z7Ly8t6+PBhbWxsDOegyIOUIaVRpa5RPrsuvwRGvHLe0ztWyEm7VrWF//j4uI6PjweA\n56oSp8lH8KlfnW/qG8oUn9NowEfZVTWEqMijtEqHB8UxlEtsSo6NEvk3lu7cQ6dCq7FVqxMlVEp2\npBIFmWW5EPlkpQ/xpTQECFdwDn9TSlbWBUTfLJ984D3yJNXhgM78zLcuButejgO8jzDYBvHGwSV5\nL6mPvS6Cz5jHR3B1ME/leDvJZ+Z1r4nPJMPhsqj/aWJY9xz0qmoALIGI4tt8NZr6InmeSSe8Duej\nypxOp7W9vV1939fDhw+Hw7q4LJBy5fMj5Bc34KQ+ZHvEL20SWiwWw6QsT07UQVyi108VZf96v7j8\n0ZN2eRFdpI3fxCMaPIG2GxKGnBR24SoYyq6De6I3GWumOwd0J1zL57wBHnN2zy8pL5VRneQnwLUA\njRNRDi4t73OsfX7N6fPJplZepmRU3NNVOYzzrwPzVjtYh+pPhsQVxtvVinf6HEGiwfO1QjrM0zJW\nbJf3cQrfjPV5MioEH15XGxNdklN5opILb29LuZ2OMb7ww3g9Qy8bGxsrXjododSHpC05SwQ5JY1O\nOPkpgNdacz+73T19X1rr/ZR022mjDiYccX4xfs6+YX2UZRohlZv6jh8+721O6c4BnWGRNFyngvkw\ni/kSeIhhNBI0FBQSlUWLSstL4HWB4uhBAktrTlpIB+ltxYRJg+rSs9qQ4mU7kHRddyO/exEUSo5O\nyONW36kON3psH3nroMkylIc8TiDrfeMxzsTPBC6tttIwuXdKr83brskzemGUFR1n65NcusYXF2v1\nlbx0Hy0qeXzb5d9/Mw9Dgdvb29V13QCsk8lkWDBweno61Kk3IPGccRoE8YtHAfPIDPYNgVzLEwXm\nR0dHQ/zcR9OUXb6U2T/JYaJDw/ZX1Y3X87VCdjwv3vm6sbExxPnn8/mwgYvtIE2sL3nlDAc98x56\ny0t0T2NMQd0zSt5Sem6MHj1Pg7GuzLGUnnNPOdHiBsfbn/ImL43KnvI5ENzW+2vxwRWL+QmW/u38\nIF0Otqmv17V53bOpLa2237bvk1eWDCQdF57xovhrGmHy+7b06FkaMgHRdDodjtdVLH86na54ogJR\nHRXA8lpt9vAM28sjcLn7s7UL1Ns7Vndqd+LVmPFL3nLKm+7TEU0brVo0jOnnWLrzSdEWICVA945t\nreKouhljo+cs5ialouDSMosGt6op/upemXuAtNAS6nRuB0++o4eRYmyctKKAcfTgiuA0kVaWIX4l\n74G8Eo9aid4wn2e7nC565ZxsShObCfjp4STPXzQ4+LPPUn+Tt6qv769DhtzKT1lyXiRQurq6GsCt\nqoZjdv1EQtXtO4YdkDzu7/0v+VCIRSOFjY2NOjk5GWiTbmxtbdXm5ubKYXfsS8XIz8/PVzzzvn+y\nRJiTvfLOj4+Pa7lc1uPHj+vw8PCGvrszoI+PEp2XqlvXeI+6Q10k9vjIQ8+ojWy/ViqRZsnfcrlc\nOZv3BVwAACAASURBVHKb/adrLRpcFsfSnQO6kisMAYhK4dfcOrPTfZhGZhOIfdKSgkJFEqi4Yrtl\n5VEDDgqtCQ+Pk7oCEwgdnF3IHWgS+CYQd6/alaBVJ+vT/cSb5OmwHP5P/E1K7b/HnlMIoGWcONfi\ndDlvvO+S16h84i2X2ume/9Z9bXOXl9z31285kowRbNhu5y/LT/0oZ0fhoq2trdrZ2amqqueff344\n4+X4+Liurq6G19fJeyZtdKQmk8lgJOhoKYyjFzefnp7WYrEYPHIZCsbt05lKHjokyE8mk5XwifcL\nV+XQixY/HQPoYTuPHWcoEzS6LqPikeMJjb3L1dhL1auegZALkys/lUkNpPK08imPK6SXNZbfy/eO\n8G8l71hed+Hy5ADFMpJ34eU7LaSPAuJtGcuTeOJ1e1kO9q08TGP30rNUYpXvoyJvSwukvW6XoRa/\nXMaYP7UnGa5WHyjs0nXdjeN1b5M/1T0m2zTqAnfFfq+uroYNSFWrB1cR9JKh52hMSfk1+cmQS1qD\nn/qS/9lG99jH9DyNsPx+y4gnnro+kjct3WxhQjJCt0nPFKCTCbRa+i/h5mTEmKDK0moYzAOFUlxO\nllTJhTX9rqqV4Zg8QQqLdtrRO6QSuYdLekgLQ0bigerUBBp5yGG+H2nA8inMHg5p9Y/azesOhFXX\na6jJL/WN2pJCPz7B6XznZBbDNirf+ae6RLcDu/izTvGo2PS60uS0rpH3lGn3MH2vgbxCLSHUmeV+\nThDbkY6poC61nCZek5fedU+O151Op0NIROvTq1Yn5UUX+4WyTT5rB+p8Pq+zs7M6ODioN998czge\nl0fTitfppc3sG1/7L9kjz0kHccZ/931fe3t7NZ1Oh9HIZHK9J8R1XRPd2oil0AudDJ//YBkqn210\nXFDd/v5ST3cK6C0F8pS84XWWkh3FPG7l0/UxjytZTnZMy6q2PMYx+v26C2LiUevamKezrhzS1YrT\nJk8l0Z88Hy/na5EJL5+//RkH5FaZX2tKNLTqGavDPUBdUyxa4ZeqWjn+wMHta6U9ORgqX29R2t7e\nru3t7SHuzbCAe5l0NPThzk9fkqizzNNxHet4to7XY7pMJ8np1QhJOteSLToydDC+Vg+bIZ1Ux5jO\nMt05oKf/3hEEMjaydYhV1ep7CMkkF0QHScZRHai901mGe18JTESP/rsAKDG2R96kTQlKSbD8zBgX\nQvdUSbd+J7pabSR/mIcereKsipM6ECRAp5JT4eQhEahTHh91cIet19PyfJPR91CC89hHFlWrrx/z\n1OpDLXeTN8hYtfMu0a/yXIZ13R0RvQBDMfXJZFLPPfdcnZ2d1XQ6rf39/WHSVInAyD4RnYqPz+fz\nYSni/v7+cI1g7g4c2+l6J76Rp5IPlud8ki5xtMP8ej0gR7S+wYvGljTxWmsTEelVP/MtaRyF87ev\n4fd055OiZL6ShwT0Tcvv4OLWnWDIs44ZcnHgcKOha2LmZDIZhpmpfgmIFE51ElR9CMr/BAABrpID\npfITLOn9Kr8DsHsl5BkBy++LBy74BB5dayUpt8JmbuTYdzw+lqES8kP1pr5kXW60ObFIJWP4h/zl\nRKnzvepavpIn6YZI+VK73Njzc3FxMRzepT6kI+FeIZODiPcheSb+qDyNCHi87unpaU0mk2HNvPLS\nw1TfqX/m8/mwokU7Pw8ODoZ16OpDgZbTmXjj9JP/vreCxoXeMMNeifdcqaRdvKxTRo80sB4estZy\neLyNPikqepPj6unOY+ju9SQwSfc9pfzu7biFvC1dCTBum9xbSv99U4zTkMrT/WT1vSxXAublfa/T\nn0k00RD4fQfAVrtIVzKqnpLxT8+4h+eeXSrT29vizVi/Oqglr8zl3Gnz8qTUisfzbBDS7vkTz8ZA\nXwZCRkbgvrW1NYRctra2Bnq8XuelgIinJ2oClKtZxmhPsptob7VXz5DPnEB3OfEykvPTwinP4zQm\nQHd6k2x5X7fSnXvoSu5dcjKBw+V0MJDyaDKy6np3qO7TO+U11Z0Ug8MfXXNvjLRSEdJkn5dXVcNm\njvRCAe9ACUNqA1cU8EW5XDqm5+lNi355WP76OtJJPornNB4UeO8nF0Q3JgIpelUOpk5X3/cra3up\nNB53pOfpspO+lc/bznukQzzUxJjy+TJT9p0vQdRmHQccTojN5/PhdXHiMyfQWss9W7x33jlwKfQi\nvgnUT05OVs4x4SiWZRG0dRSuDiBTmTTiCdTVd6qHowLSnsKluk5+cl055VP8Y58lZ0OhQ/1mfpcH\nRgd8U5jqY5vUHsXwq54YVX9DVCs9Ey+4ILAI2Hit6uZkhjM7bcV1a+i//VmBof63LGIKkwg4/XV5\n7GA9zzKurq6GDSQe92eIg2vbWQbjzgLrMXATTc4TPsfwAulm/DEZJ/LNT5xzYHNjTJo9fKN8zgOX\nAdXtYC7lSLxx/ul6y8h7XUoEJ9KVRkvih69rVjiv71c34Jyenq6EEHwHqR9XQfDyPnYZSG3migv9\n397ervPz85pMJrW3t1dVVYvFYugvByvJgI67PTg4qMViUYeHh7VYLFbCHuQFgc31k86L087nyGOC\ntGgU/7Q5ijqqMInjj/cd+esyRGdS9/0EWX5zEpqbkwj2Y3jEdOceuns6TrArjufnbyqPg3nK4/Ws\nq6OVr1XXusS2eRkJ6Jwm3k8eZmpDS6l9Yum2v52e1rV1fUhZaKVUT+u59Cy9SN7zetf1eXpmXftU\nR+IR/7dkVkZJYKPYrLxfevW3oX8ssSx9c3keQy++4qXq2rnSMkStaNFyRXr2SpxT8Mlmp6t1zw2X\nA/o6UJRB5Eg+5XGDzchCosUnnW/TP96Wlmx4ulNAdwukziKD6JEm8KfQtTxXCo97/2S+G5jUaayL\nE6CiUysBeDARafW6eZ3esBsnH8p6W1hWEnoCi4ZvXO1B3okerq8nWKjffJlVixbGW50ettPlgDLC\nZwkgHDLrw/6mUpPP7F/PS57TG/P6kufr8sf+dHBXO9PoyyeANZLTsj/xZTab1Ww2u8ErN1rsI/Jb\nzxAE+TwPvtrd3a3ZbFYXFxe1vb298kYjtlsHbckj15nmBPYUEuSIw+Wz71dfNOH6Lw9YvNY1ts9l\nvLXaSbLPfQTEgIuLi6E+7XqtqgELtra2hme5MEH9xj0pSQ+4Hp3PcZTRSnc+KVo17k07ELq36RY4\nWbRUPv+74HgeggJP0yOdLcFw8GAdLV64Ujk/SGsCwbFE0B3zuqvyhCO9lRRrbPGCYNNqjxuDsVHH\nujbyt+f3mK179AT3sfBFq8/TqKT1/FgbnG4aCMkgZTG15+146+5kCUx0WNdsNqutra2VNwxprbmu\nCcR1Tfn1nGjl5Kj6R/MEXXc9v+Lylwyz0m09Ye8vhnRasp6iAS259+veP44NLby4TXueCUAnA52Z\ntMKJ8Q5Qnj+BnRsIt5y8X7U6GTmbzYYtyk6nx/vds0406Hkl99Td05XXpOTzEOQPaXEhIVDJk6CB\nYP1eNvnKNqRYvbdzTDjdUImGlMYMTQJd8ZBtpDdPOpMhS/KgxJh/yxjTS2zRyX6iYUm8UchluVwO\nk6Rcbki5cGOV2pPopTyr7I2NjdrZ2anJZFKPHj2q+XxeVVUnJyfDAVR6QYWWKOqcFtG+tbU1ALr4\nTSN7eXk5HDuga34QWNL1ljFrORmpb6Tnzn/Nr+kZes10+Dyfb1xS+9hu5mMIrSXHY2ktoHdd93NV\n9der6rW+7//K02s/WVX/cVV99eljf7fv+3/89N6nquqTVXVRVT/W9/2nW2X7agsqmRrK9a18nqGa\nq6vrFwL4yWl6XvnVAR5/FIMZjpDyScDScIcCwHIk/By28wCkRG/LAxGvJDQ+scq6PURQtbrKhxNx\n5LePOPjN376W1/uSfCaAsUzxRu3hK8WqanjrfFV+azqVn3KSJnWV+IwDiPPTZSfxksm9RF5zcF7n\nebGcVKZ4IIDkeSuUBz+YKnmJrfoI/C7zV1dXtbOzM6zI4YFdOvJW4H50dFQHBwfDEsVUto+s2ec8\njoEgyH5SvzqQjvGXfawVRinMq3weMmOITHMLblyIHbymZ1ie+DCfzwedoKHQfU6YttJtPPSfr6r/\ntqr+oV3/e33f/z1e6Lruu6rqE1X1XVX1vqr6TNd1f6lvmJWWF+K/U0epY/U/WWzWwbzMk7wWFzCV\nK4YSMFheUlh6mOzIZH2T1+RecvI01lntlMbyJMBiSh5kSv5My5Pi818L/d6HSY7IayqR97mXR6W/\nrXeUaLxNSv3ees5juVrT3XXdsD488fxr4S3rcxoFMPK0FcOX1ypQZ6zceZ1GXf4MaRdwV60uz00j\n9DHZpSG4jffrPEt1JGMtHtEg+OhChpLOquu4ymzpS0prAb3v+3/add2L4VaSkB+sql/q+/6iql7q\nuu5zVfWRqvrDVDa95ORtqlFkCBunCRsxww+oIsBX3ZxU9TXlCdjZUT6jnyyvHxrlS7PSpJt7Bz5s\nbwE6O5nDRd73drSANBm99DwVqQWi5GFaW19VN66PeZNJwcVHbscmD1LYxI0r28NRodrnSx3Vjwk4\nnF8CLo5GWC+ThxxdplgvJ+HkGWuiUuuVxQPKUwvUx/qPiZvfVMfe3l49evSouq6r119/ffDODw8P\nh4O8fBS+sXF9fC756PJP/rGfdZ+7sVv8F1DqHpcGsu9Vn4Mu9YZ6qf0Gek5zB4r56xm1gUd2kJaq\nGs6xcQxk3/vcwlh6OzH0v9113X9QVf+8qn687/uDqvrWqvoDPPPK02sxOXAwbuqC1oqXtSbufAbf\nn1VH+DBIHeUASEGjktCLczD35xy0WCZpT94VDUkCJim9Qhjknfjn9DI/jUjLm6Px8TIdMJIAkgcO\ndgnEEh/ZVqchAT9Bg/S5cXfayRMqIq/RWJNWB/ykrK2jEtwbY/nJoEhmdT6OhwdcFzyldrfu0wkR\nLQr58MRBHoXLMpLsyQlSmR6ucx1Sm93w67fPiThA0khQB+kY6r/6yfVUzzpviRM0Dokml/Xk0LiT\n1TLKTF8voP/9qvrP+77vu677L6rqv66q/+jrKciZXXWTAWxYq8EtxfA6eJ+7IlUvBc6HaK26uQ2b\n91rKkoDd84oeF1zlZ/vG6tI9gk7L2LCcxM/bAILTRXpodFqeIQ1vMm7kn363ACvxpNU+XeNzVGLK\nQGpb1fVOZk+M0XuY0BPLcyPMUaee1SRk3/fDiYjiiUYebHOr/euAQvSIRwKt7e3t6vt+CPfIM+c7\nSJNx8bq1coablAiujEm7LvqIkbzic0mufCSj8kQvlxaSh0kuKIe+4zrxWHn9zCXdU1vckRxLXxeg\n933/Ov7+g6r6357+fqWq3o9773t6Labf/d3fHX6///3vrw984AOs44b16/vVdbFJqX34yrzJQ9Oz\n1r4b36648izYmdyqm2J8CVDd+xRt3nnJ6ybgK2anA4T4rHih513RWkbTlSR5DEqc3GQZ9BjZJ2x3\n4pHzgH3IHX7kiwu79yvf8rPOC0oA7vwaq5PAQR4KrOSVii4HCq1n7vt+WNHC/iCdiqFr/bcApTXC\nSE7KOkBnf/F8F+6APDs7q+VyubJEsaUrST/VbuqP3q2qcIlkvWr1FXfkC50GLgLQs1zNRhlSv2ku\nwB0q7twVT9gutdvpcf6JH5QFjbAUnqHMHB8fD6uJ/qIAvSvEzLuue2/f968+/fvvVtX/8/T3b1TV\nL3Zd9zP1JNTy7VX1R61Cv+/7vq+qMjgoialu5V0R+f2UxpUy/PdtPDR6Nyq/NVR2gG7dJ7ASwDxR\nYdO9ZHRS25W4asfr4O/beGqeHHydZ15nmpz09jldfr9FQ+Lnbdp023a3PN6UUpiwFS4g/frt3mGL\nVoKQluASyNaFp/SdHIFk0Jwf2uK/XC4HQGc4ynXJ5Zd95qMS8pF0EKBdT1zmeH1sQnZdjJr32V/b\n29srq7/4DOlJekus8JCw8GI2m9Xzzz9fVU/A/ytf+UqTxtssW/xHVfXRqvqmruterqqfrKqPdV33\n4aq6qqqXquo/eUrkZ7uu+5Wq+mxVnVfVj/aJs0+TCxM7xmO67p2QUWOhA15nGVQqfhNIVQ8nT33i\njPTx40NjdhDbJWVMwODt9Wv0dhJAULh4ZgUFiArH0Y/uOQB7Pfp2b1v5khHx+GISfJWb8qd5E+eb\n84L0et7UpkSD08P4v+eVF+a8SHMCDNMQxOipJZ47gMzn89rc3Kzd3d1hXbrKcgfBZTSFKMg/1y3W\nf3l5WYvFok5OToYdoWrrbDa7YWCc/9QBluttZd3ytG8D6GwrZdTl2501xwx/V7DkQSMVGTLvZ81z\nuGPpMpJWwmihR8u4e7rNKpe/FS7//MjzP1VVP7W25uvnbygCvWh6fS2PiNeT55eMQ5rc82dJYwLw\nloGhodF1B+1kTPxe1epBXV4HhZkC5HxNHh3LSrSzzORZON8Tz9bl9/80OFRgz+OxbfJK95MhIG+c\n/gQCqa1eRqKX+R1AUgjJJ+Ba/cY2sgzxWfH0yWSy8tpFruFWuR7m8zb5Ne9rd1IEbh7jdr0lGDJW\nrjI5Uc4ylYfn5Dsd1AVvC40a+0HPjjkJruceStVOWA83Ug44GSv5dGcmjdx9RLduFHHnh3M5we6h\n0TtwsE1KzfwJDJO3to5JLLcVTkkGx4E/tXUM5P0+29oC0nXXW6DlBoqAQ1qSgrfqbNHU6i9vWwJh\n0lJ1c+I4Aaordau/E2gloE5tSrwZ41ULNNM1d04SEGsOZ7lc1sbGxnCiIY+U9mOAnb4xA83EMMv+\n/n49fvy4XnvttTo+Ph4MCYHe9c6NYwqRpY+8cn9GbUl94t/eRo6cWs4Fy9d/tW8ymQybvHhaqhte\n9qP6y5087w86szQCY+nOAb3l6fBa1WoYwQWa12glfULOy9HzHErxOR9qy0PwRKvv1psC4MKdjEQL\nQAhcbq2Tx8KyCIxVq29zIdhVra4Pb3k9ThP56jzRUJ70tgCdefVNRSItzlf2qfeh0+N1d11+u5Py\nt7xq0dKK8fK3AwL7xJfGjRk8gjIPefIJdL1CjUc6p7BKCzy9PfQYtTRxf3+/Xn755fryl79cn/vc\n52p/f3/Fm2YolODkbXK6ONrsum5YEun8cX4mYG7xUx9O5FN23BmQrHE+gnNiBFuer+MGgNe4C1oy\nqPoUbiE2KHQ1lu78naJsRPIOqERjHpQDU/KmfYJVz7FsvoDAQZ15GWP2T6LbBVvtdeDmdRqjBBBu\n8JwniVctg5BA1elLXiX/O3Cv67NEO9uY6nFaCKw+ZG15hsnweDyZeRJNSeFZpsq5zXNusFI9fJ73\n3BDIQeHxujIYLQfKdcjbreuKD+vNQ/P5vA4ODmp/f3/YRERgTuW5PqoPOanocurtdbqch36NqQXY\nNP76z9FQaoPo1bUx/WA+n1/zcC7703mRMJLpmTici8xwBt8mHKLE+JsrN+uqWj1hcd2RlK2OGlNs\n9zxSB7cmoVLZLWOn/BQ+Koq+fcfebYSPnnB6LvHWAcONnofBPN6dwF3f5Jfa40BAoEqKkBQ1eX4t\nJ4PtaN33Z5PhbYE4lV4enAOA10ueafJ7Pp/X1dXVytuqptPpjdhtCiG22qtJv+Pj41osFvXlL3+5\nXn755frSl75Uh4eHK6MVAqTz1WXQj8XVWvQx8G5dp97RORvTUfaHUppvSfVzNJG85wTK6b76iPKY\n2vlMh1w48111cyJJwteKG6aOciDgb8UTHUCoMFrHq3sULAK0h0kSGLTAmMqcQiXpWW9Tuk46xV8H\nVE+67p4VhZ3tIt1UWvE1bcTgSXkEdJXl15wup5/yQEOVlo6xnb7eOLWXNLTiqslrc3lMxi99e3n6\nJjAyzJIMc4rFanu9Do+qul5H7TLsH9IlHdTLKs7Pz2t/f78ODw/rC1/4Qr388sv16quv1tnZ2cAz\nTmpyR6WupclQ0iWAIyB7KNB57cbDvf7WpCdH7ex3lpf4RPmjE+k6z2s8eqAVxnMHg5O2qm8s3bmH\n7oJIRUuC5mBMUCH4Ugk8xqm6CEpJuDgzTqa6gUmjAQKFd5i3l20Z8w6dZ248UplUJvKPz7McluW/\n9T/F80mXgNFHBuIdaaABULnpyNHUNuZhf/mzDrDuYXl/pHqSMU39xZCBl8ty3Jh4HS7nDhQJ/Emf\n2qbDsrquq+3t7RvglIwP6ZGR5tuHFGZ544036uTkZNjMloy164rLmtqRdoDSe+amIDdkidcMxbkO\n89tlz/vfV9x03fVRwm5kWafrhfNbNFJuSTcNFfXkmQb0llC5gCeL7GBHAHEQT15e8pDEwDTEZUfx\nN0E/AZwbKW+7t4HJh4z+m/mTV0rl4THEY3xMQNJqiyun153Anu1seU40gKn8tLzvNjQlA+H0OSC7\n9+bfqd2JD6zb5Z797oc4OZ2pPoYMGS/XMkb1Pd+ko3KTLEi+5ZlrnfnBwUGdnJzUSy+9VK+//nq9\n9NJLtb+/PxiMrlt9EQXBykNUznt5uu700BGi1+4bp7we9mXVzQ1LaV13+u8jV+eR+FxVK4cDel+n\nD8NCSpxo1fyH+kHXxtIz4aEnj8YnGyhkTC6MyQMlgPhse8tTSvX55EULUNg2Nw4JyFsCo45tKbbz\nLJXJey2A9TLcuLrhaHkiVLBEp49w1EbfdDIGvE5r2pTTalvyDp0nHGo7qPtvPedhM+e3Ow/JkI+B\nNssjz13mfNJfHp0mSAXOKZTEbwEHQeXs7Kzm83kdHR3V48eP64033hgmQvt+NQbsDlqSCW8fQc7z\nJYfMJ09ddsgPdybolHnoT884XZ6XdHg4hP2VdIF10mlTHT4f5p+xdKeAnsDKgTUJiD+n/x5vSkbi\nNh5WSygpdOoonj2hOjg8bAlyMgBsV+JDAgZvY0vIEyC1gNL7wndEJh67gROPxoyOA1+KK3ocX8+m\nIxjSULfV7sR/v898bnBFGwGdbWdKwM52ers9HxWcDgn7uHVNYZeu6wZvfWtra6WP3HHguf/ayv/V\nr361vvCFL9Sbb75Zn//85+vNN9+so6Oj0Y1E5JnT1dLTFv9cZpOeVF0beBqtxFPihY/mvT+9/1y3\n1A5fhqh6nCf+rlOXC4a5NMnNHahj6U4B3d++sQ5gqlaH68yTlDGVkYZ+ybN06677rli02prsEMCn\n/K3zyj0lo5JCT3rWaWDbXHlaw/qW4XTPz8MybF8CSgdHXveJyGTM2MYEWGPg7G1hn5FPXo6XnxwI\ndxJcljha5HyNG96W0dJ/hUzkLPAeaUhD+KoaznlZLBbVdd0QeuGZ4myjliVeXl7WwcFBLRaL+uIX\nv1h/8id/Uq+99lp95Stfqfl8Pky8+gQuHRx3CpKBFA/07ZPb1DGCJvuC8zbeHpXjIM/7lAkfibv8\n+AjHwz0e52e/iD8u15QX9ZeOIP5XBtCTkrjS+zUCKYHJlasqe+FkevIyydhUFgXPFco73wWSAueA\nkzwB0kAQcJ7wOQ/xkG8UCFfElqC4IiVlIj/IJ6ffhZe8YZ5EQ+IZE8t02sbyUvGpZCkM4rLptLXq\nS14YZcEBnW3ifTdCDiru2RKw6PH5uen0VgkoFxcXw/ksjx8/rsePH9f+/v7KWS3J2eK3G2fy0Pd8\nJB5Ura7Wcl54GMTBm7Llq+q4B8Z5Rlq8b7Ublv3pRom65e1yPKNOcE7O+3AdmFc9A5OiLU8qNUq/\nHTg8JusA44cc0VMgsCUDo+eVUrxUddELYRv57fS75+n1ujfM52h8vD5vp3sn5JHKSnwWj8UnAp0f\nqJS8I99u7nz0OY/EExqC1E6m1nXviwTkiceJdgd+z+vPJTq8nwSkpO028i8eu3datXq+v4yyVqpo\ndMzlpPosFos6PDys+XxeX/ziF+utt96qP/3TP61XXnmljo6O4lHOoqPFfwIx9YVyouf0jlSFG1rz\nZu7pOs8p+yyDIxpPYyEXlc+JSWIOZUp5UzhH18kDlan5Cp2cSf2gjrXSnQM6OzopmXtKBOp1YFC1\nGitrMd9BXM/5hE9V3QBtVzBe86G9exctsCKAqrNZLxWexi8JkwN6Enz3KFh/Gqbyt9OU6tdvKSiN\nn9OWDJ2Hr6icyRgrDxXF5YjtYDnKz2+WS5nhOSmkmTLAcviM5Esy1ff9cPSt1+ltF/+cVsp34qF4\nzeG7+p7XT09P6/j4uA4PD+urX/1qvfHGGytLFJ1P/jsZH44M3dA5fT7KJABzJNYakbccmdQPTrcD\nOufEKEu+yoZ1rptX8H7hOeoEdF+V5vSmdOcbi8iIqpvKo+GWmOzDJj3LPCrHlT0phgsTwYIC0YpP\n6jkHbiprUjBv99hwyoGCbRHtCiFx+zV56e3y6+4x87mWQNGwOKB7Xhoo3k/1EZTSiEh1kwZXbncI\nktHzPhzju1JqQwJv8dbDeno+barzc4IS8LHN/qw7BpyUpz7J++u6bgAOLXE8Pz+v119/vf78z/98\nmAB966236vDwcIjpstzkpIjH1DF64+Srjyy67vq9qTT+HnJRXucBecplf/683r/qoUS1Q3Wen5/f\nkK/kjLC/qPP0xPXOURoAbjbS3IW/6MLLGUt3HkN3a0yw4zkUSVmVEph4fN2TnzuRJkGVlwLhnqAz\nmIaBgEkakyD6fcYhE0AnJRF9EhINjRk/d09JZXJHn5LHWN0w8Tnyx9vmdCevIz3D/mU/uqftipwm\n09zzY2o5FYkG/h7zmJIcJaPMpH5L8qrkcV+n0Q2Ye7QEDtKyWCxqf3+/5vN5felLX6o/+7M/q7fe\neqteffXVYXkidcQdGOeb0+i6Rp7Q+20ZVpYpnfB5IdYnDKAMk3btoHW9T3k5IZ36sdXf7JPJZDLs\n1HXjp/t6YTTrdsdTZbTSnYdckvK6xXYBYcfofxIixg+ZxsDEQyG0wm4wUgezfAdNAkvy6Lx96b4L\nptfNshO/fIKJnhvLqWovK01pjNfJsKa+S+3p+9XYbOpn5h0bYfDZJE9epoNukhWGpZKj4fn42zhw\nvQAAIABJREFUnzKWjIMDJsvUdTeEiSduRNXfpH0+n9fh4WEdHR3VG2+8UY8fP66jo6OVOK7zyMvX\ndYIQr8vzTWE86pk26Hh/VF2f+6LdmmPGPxlNOkNpBOO0yNi4Y+bX3JhSH/UMnT/JNEcjLdwgn595\nD52W30MkSZCktBRyDl8pLGmopd8ECZ/k0HfK7/QnAUg73sYmTm7j6ZEPDiD0eqpW3z+a6lJKgM3f\nCaw4YnLDS5pb7XKPRjxLQKV7+p2E2esSL1K4zb1Xp0l1u8fn5Xu/sz0pH8GPoxhe83IJVMyT5gLG\ngJy00BMUb9XeN954o1555ZXa39+vL3/5y/Xmm28Opzb60J8fl//Eg6rVuScdh8tRrI9CvX0EeTkm\nyidPm/3BsKfLHMvmNn7ijfLrvsfMSaeWfyr/xcXFEF7RPAvfIKVyLi8vh5dpK/SVQkRq/+bmZn3z\nN3/zDR4z3fnGIn27gCSPp+pmbF2McQ/NFUJ5fejnCp6sZLLyntxzaymUP98qM9XL5ySAaWUBJ2+8\nrhbtHmqh4LqykjYHRjfOeib1JcsioLlH2sqbgLaVRJM7CTTgaruDohtmB4CW10QD7BN6TG58SFML\nnPntoUDyxtvfddeHpUl3tN78zTffrLfeeqv29/eH89QF+DS8DAMyUd9Io8tDWi7o9Hk7FY4i78VX\nvZWJAO6AnvSPfUejldpF2SZv6cSx/ziSEKD7yaBcQuoToJSLrruOwT948OCG/DDd+cYiAhItGxun\nGWAyk0uWqm4u0eLKEAq8BNk7wEEhea6qR/dJS/KS9FwScAqFg6LXTSViHV6e7vl2Yjde7gE4D8Qb\nrldO9bAM5xUBVALPkRD57QajtfwtAQbbQqfAwUztSX2aAJttIL/9SFvnHWl1Y8HrpJEgpfK5gobl\nptEDyz89PV3ZDcp26KOzWS4uLuro6KgWi8WwYUj/2e/c2aiJQjkTSppgVB4/Epe6rBc3pBEeDQdx\ngJsQBYCike/LJTAqv+sc+44GS3wkuG9tbdXGxsYAvqrfy+HOZfHe9fb09HTFaJ2enq7wmm2RLCrW\n/+53v7sePXpU73vf+2os3fkql6qbw2+f7EggOfZf11woyHgHPoYfdE2JQ3gZBIE4jYR3ctX1MZ4E\noRSnT8YleRRuCBIgOg1evscDXelYJ/sheWW6TgViXhpVr7cFrK6QqU9TOE7JX5pCD4j1OAi3eNYy\n7myLeENwboXZyEsaDeZh3ycP3/nCMhkbFm1V16s+zs7O6vj4uJbL5bAc8fXXX6+Dg4NaLpcrb/Fx\nR0fyIEBVuMFXVxFkSSPXzEv/GFtX3ZRT1etyTHr42w0p6/N+InbQQHH+QuXz8C3nvcsJDajqq6ph\ndKS3Ssk4clGC+n86ndbOzk7t7e3Ve9/73nruuefqhRdeuCGDTHceQ3cFak10JOXz+yqn5S1RSCSM\nvN8q38Hdt+9zopHXk1cgegmOCUyrVr3ftJnDPW/nhf4nIPJ2qW0JdFl+8oBbZUlJWt62K3sr5OYA\n7MbBaWjV50Y75U0y2SrPrzvwu9zyeisunowAR0hjNNAjZL/TcF9dXdXZ2Vmdnp7Wcrms+XxeJycn\ntVgsbuwAdafDjQn7gEaHYM6+9Q8BfWtrawXMvd3uuNCwtwCdcpPi/jR2osN/kyeKHpBG5vF+8VG1\n2qF5Cd8PoDJleDY3N2s2m9Xu7m49ePCg9vb2amdn50Z9TM/EaYtV11aa/z2Wyg5kB9OrSd6ornus\nnRtdkqfqnrAmSah0rQ0/9AZa3njLUySN9Gy8jeSf06pERaQXIq/AjQqH2uqDtMGhqm5cT1u5xWMa\nAipFSyEc4HSd5Ts/uKbXy6JR9b5I/ZDA2j0/71uvL4Ewn+OLEdTH7qnR+Hu/O+/4DEe/lG8N9Y+O\njuro6KjeeuutAdQFXpQFttlHD5IntYOjoFab3biIVoYaqENKDHV5fNz54SGzlgxRLpVoIFQvDRj7\ny/t9NpsN5SqsRBkXkLvhYt2idzqd1qNHj+qFF16ovb29eve7310PHjx4+zH0ruveV1X/sKreU1VX\nVfUP+r7/2a7r3lVVv1xVL1bVS1X1ib7vD57m+VRVfbKqLqrqx/q+/3Qqmx2jIRcZnISDnqoLuTNH\n1/gcQZaKlADO69HEhK7rm2UqJUFKCg4+r9TnwMyhm57l7xZY8ePJDZ97CrxGEG61gzxMnp0Dpo9a\nmFpAT7qdT6zPDaz/9nKTYUypdT0Z2dvkqconR7r88ZqXmwyIG7G+74cDn05OTuro6KgODg6Gw7eW\ny+UKSCa50W8/YIpzEx42ouMjmgmQLh9Vq7Ks+2ni2UeT+q2Qm4dZ2Lfc/el6QIfAZcnb6HXrOkf/\nDLFoY5cbjq7rhonT7e3tms1m9e53v7teeOGF2t3dreeff752dnZqe3v7Bh+YbuOhX1TVf9r3/f/V\ndd2Dqvo/uq77dFX9h1X1mb7v/6uu6/5OVX2qqn6i67q/XFWfqKrvqqr3VdVnuq77S31AlBSnI9BS\ncclsdZQDOC1uAjEyP4FJywOicmgZE/M5rfzvwsJOd9ocdLzDPW7s9FIhXFEIhu55OY3kqZ6V0SNw\nuJKk+2nISa+F9TH5nEXitbfb2+b0jRmBlpEg7d6e1PfJ0SDt3g43gi6zbmy8/50+3qMTIO9cbzDS\nmS76TU809YP6qu/7lV2WvvnP+9755nJNefB+E+ASpF3GUjnkE0f+5JOWAWpS0o0MeevHfbjRJH/c\ngIhH2s5/dXU17D5lPaJzY2OjZrNZ7ezs1O7u7vCZzWa1tbV1Yzexp7WA3vf9q1X16tPfx13X/Yt6\nAtQ/WFXf//SxX6iq366qn6iqv1FVv9T3/UVVvdR13eeq6iNV9Ydetla5EJyp7GKeN5xgQK+66joM\ncHl5ubJipurmsZZo4w1hpuKys7XDLgkqt/cyrsd2JcXTtys1+ZC8pgTuKY/o07eGraLN43nKyzWx\n3hdukMiHxDsHRSqRt8+BMSlvywC6A6A+SOWwXem/g3163vvM28RRp8CwVS7LdJ1QPekAuNQPKkcx\n8/Pz85rP58PBW/poYo70qhxfgSZ6tWbaeUt+MD/boyMH2Hdp4xhDouxTl1PR6vWrXC535KhZxx8I\nXNWWrlt9xZzKbIWAvW8EyHqek570zBOgb25u1s7OTj3//PO1t7dX73rXu+rhw4e1u7tbe3t7K2vZ\nW+lriqF3XffBqvpwVf3vVfWevu9fe9oJr3Zdp+nXb62qP0C2V55eu5FaAv20rvgcO0/M8TAKhYxW\nnsLuwyqvMykdDYh7arrm8c4WEDsfEm9S2GHseedXCyySMXGv3SeYWp5hal+LNjcubhhSGckAOi3s\n+1a9yQixDuchFdbp9nxOm67dlj/raEpGeV1blAgmZ2dnKxOivmmoxQOnQw6A75T2fP6RXDF84m2h\nDvsyV9JBT71VjrzlNGrj25tInzuMY05AMuwCZucZ//s1GkCtbFF4ZTqdDh8/DC6lWwN69yTc8j/X\nk5j4cdd1Lp23l9aniZMiAkIRn2aj0ySQOowgX3UdF9c91aPkQ0mCP2frdU9re6tqRTDlSdATUPn8\nsMNJJzuXQ2TvfAcPb4P+MzTCtd/T6XRQbBpHtp/AJ8+OfFWZoluvNlPsT33mB5LpeXo0vruuBYbK\n63xiXzr/XHZ4jeXpOmmk95f6i7xKxpsAxuV/aWLO63CnQ9fo1TtAVtVKOEJJ/7XWeblcDu8EfeON\nN+rg4GDwljc2Ngbv0ePfjAlTH/SZzWa1ublZx8fHKzLN8GjXPdniLj7o42Cq9vrcTdXqy3DGwni6\nx990jlgn4+jEIu17EW+VT/UJC+gsikcCXe7h4FwBZUsALh3a3t6uhw8f1nPPPVd7e3v14MGD2tnZ\nqdlsduOogla6FaB3XbdZT8D8f+z7/tefXn6t67r39H3/Wtd1762qrz69/kpVvR/Z3/f02o30x3/8\nx8Pv97znPfUt3/ItNyb3aAk5OUcme2yaoMTOFNigXdGCShjlvSRhcK+DAuE0j4Gc05KAzPN7aIH5\nE9A5kAqoBfgUPJXN119VXYerOHGWJn6V2I9K9M7GvOHEC3/O66RcuNeu32ljE/ns9NII+CiQckf5\n8T5xmpPRIgi518eyXZa9HZR/ORpa0XJyclJvvvlmHR8f1/Hx8eCdt8JRkiWBPGkgsFEWlF+ykiZC\nnf5kzH3E7KE/0uj96bF4OklcXUJQd+Ph9Hl4kKBKz1kxbh4XQL1KdGvj0t7eXj333HO1u7tbjx49\nGtaf7+7u1uc///n6whe+sMLTVrqth/4/VNVn+77/b3DtN6rqh6vqp6vqh6rq13H9F7uu+5l6Emr5\n9qr6o1Tohz70oRUhcCWhNScz0sQFO4PP+scBhcxV8k4gkFKxU30pz1hHJKVN11K7UpJXlGirurnW\nNs3yuzDyeYImvRd6rvzP/kxg5gDmfGVbnXaW6c96Oek/6SLw0xglcGZdY7LAfA6GiV4v38tKZbd4\n56C+WCzq5ORk5dVxHlZLhjQ5PaxTwO994+Cq5zl6GmufnnUa+XwCdKfXecpltBrRMiWj7Y4h+Swv\nW/F6etItfpJ2hWhms1nt7e3V9vZ2bW1tDZ/pdFrf/d3fXR/+8Ier655EMH71V3+1Wuk2yxb/alX9\n+1X1f3dd93/Wk9DK360nQP4rXdd9sqq+WE9WtlTf95/tuu5XquqzVXVeVT/aN9CHjHIvwzshebjM\nTyFSPj6fvA96RiqPMTcOcVQHldLDKcpDL4NlE9zoISSgdCDQPYJIKz6o31yOmZSA7XAw1nPuvVdd\nn3in+8kDo9Flv7AM8tS9a79edb3ihTLgxjqBYgJOz8skGmicvX/Hwncul9q/wBATeZwU/urqamVn\nosr1tf5uTHSetlau6BVyWqaoiVHf3Zw8dV3ns+w3LQBwvtI4ynvVaHC5XK7wS/dPT08HuSA4U88U\nDqF8iU4PryTdIJ2UZ9cxlk8ZdZwieE8mk2FJIRcZMLTEkORkMqnZ/9vetcVIdl3Vtaunu6u7q6en\nLb+k8TgGBWeMhRQ+4h9HAiGILJASxAeKQBFBQkKyeAgQhJiPSAgp8EEifvjhIUXhESEkk/BFEkV8\ngMgDYhND7NhWZCBhPJ6Znn5VV78vH1Xr9qpV+1S1o5gal++WSlV1H+fus88+e6+9z+MuLmJ1dRXt\ndhvr6+u466670G63sba2hvn5eSwsLAzppkdwGZ1nlss/Ayjh/B8t3PNRAB+dVPbg2tRoZdcAo0jL\nUZ6ToxgvM0NEGY+lsr1Tnae8rIxxqPu85ej3pGtLz1Jjo23iaJ3H2dEUTbkB9xkb3nnOW+9xqDFD\njtm9WZmZDo3jKTOkmdwph9I885IulxByxlMGVDhWwimKzKFztouj1KwuWRRBUoc+jh8tKyvPEW9J\nh9XplBB4Fs3rMzNZj3PoWb0U/OjzstSjl6t6RoM+Pz+PxcVFtNvtoYFQ5swn5cszmupKUQ4eqLcj\n+lFEQEXNhMowJJt2l22apYJVQ5U1ir7TkCGVblTkXhwYnovqpIaO5IMtfHYpraQdnrLzlBSRgCJz\nlaPL2aMMzpvVdIyOWzAHT9SpiIholO2i7apy8FSPIiBF4lnKRiMa6o475wwAaJuroXEDQ95YR/LB\n+vGZeoxy0KmxHn1lYzDOo8+7zvRH9YXXsd1oxHd3d3FwcIBbt27hxo0b9fJ+DoqzLPLLQdGI/uZS\np6en9Y6L1EvmoJeXlzE/P49utzukf6rfEf0BU7YH0zyaVyfvx8fHQ9eyjXQ6pSJzj6AvXLhQ56I1\nevKUoOoa7YU6Jz3u+sPn8aOLgPhc3dtc8/7UEfYNbrjFqYlLS0tYX1+vkTmnPWaLoibRVA366uoq\nbt++XTde1glUCdRY6Xk3fjQmWUNSMNoR3Fj6+WzAR41h5iyosFlUwN/qoNTQsA6KiJ0/N14lY5bx\nxkGbLJzmbB2+SFhRnxtZ5fHo6KjeZY4y8zRKtihLjUuGSrMoSHff9LZzNOiI15FTCVlnMi7dn8mZ\nclJZUS7Km/4uzT1Xp++IWq/nQDYHQnu9HjY2NrCxsYHDw8PaQPs9mpaLCOzv79f8qHGjoybK95ld\nLvsMUJQGCF1mrr/876k+fR6BmbeTjgnxGI1maWsKlqN9WB0Iy2u320O2i+3FtlC5ckYQ0fj6+jou\nXbpU79ei0xNLQGuSYX/9mP67SEtLSyNe1I2wD4gAeZrGqYTMS8jNDVQJQTkPpf+ZIcr4Uf5L/JXI\nkes4mlRvVWTP+yvCUN55nCsQHdH7nhUlo1cy3trB/Hgmi0my8/aaROPaoGTE9D53tuchbx8tVzt4\nZtjZFoywdL65TlfVbzXm/Ci6zJwr2zart34ykJO1oeqMjjHoOZfRuH5FcpuijsQHRMf1iyy9onXw\nvqH9RZ06DbamWRYXF7G4uFgj/qxPah097ek0VYR++fJl7O3tYX9/v36fHjDa2Kenp+mgkoboFCaP\n6ZQ7R5ZEJqpsVGTNd2rDKIIl2tRwTYWvA2Y+q0Y7Ip/hoZ8qEuurfGlZigBYH6YJsoUZwFnoTHlo\n6M5rdY6uvhCB9yi6Z1tQPkyBkW/WWUNQDqjxutPT0zrs5vxoT09pe2lqo2RIPCWmGzy5Q83QuDqb\n7Ld2dt7Puis61ShI3/PqbUo94HVqNKlvmdNX3eWgJ18px/nhKjO2qTvYUr20f6g+sW31dXCaAvOo\nrtVqYWlpqY4UtH6a3tQ6kdSQ6XNVfzV9qlE/j1P2uj5D2zVLd6n+aAqXg7mK/ikH3RKBg70XLlzA\n6uoqFhYW6tWf3HCLA6BsY++zLotxNFWDfunSJXQ6HVRVVefLgFEEoYYkI/damVcHRtHGuOsy4j2e\na8+QfIZOtG56T1amU4b0JtUxq1+GZNy4+UcdFXnN0LCvICyVnf32OvjvktEtySJDcZl8Xw96znjz\nMh0NajuX+FejV+IrM+L8plHhviz60X1DtKwSCnbnqE7AQUmGJF3fSao7PgMl42GcTmTRySQd9vpl\nvPogrcvDx+48vaPghM+j/Gi0iciJypliIRjyNnfQkfV5paka9AcffBA7Ozu4fv16PQjnKJeeFxje\nXlINsu4JA2BEqNogjv60HCJvdSQ0ThqiKcpznvh8NrCGaXqMZfE+XS6sA22kUifhOf12xMVjeo5U\nUn4Nf1kuB9w8yqByHh4e1gqsg4msi5bHPDinZili0w6veXcf99D2oyxZJy2H1ymK4/V+jZNGAVpO\nyVHyHZFEbGosFJWrE/f8rbc3edDBQ9UVpla2trbq7XBv376Nbrdb8+PT6NjXVC9UV9mfaGxYDtvB\nZ3c4wnZDT6R6fHxcr7LMHBuP+3nKXfPeKnfWLwNq7pxKctY+SF3yNFcWSXsdld9Op4O5ubk6T764\nuIi1tbX6/9LSElqtsxkvmiUoOYdxNFWDfu+992J7exsXLlyo92RmOOakuVgN09iIFIS+eoukxpdz\ne9Uoadlq8LUzquIAGFIC7SRu2MmfzkihYnp9eA+VxtM/WWPy+WqIsxWR7rjUSOucfD5XQ2du/cmB\nUnWoLguG89xb2xGQG1934u6cNJ+r+53roLQ7ea23ys0dgz+L8tdv5Ul55v2eUlO5uwxZhgMS7bCs\noxp850/rdnR0hP39fezv76Pb7daGnAOhviEUy9QBbI8MVM+zNmF6QB0FZ8ao/NQBU07qDOnMPc3C\nY7o1gctJ+6im33TwU/nTPqDAwsGayoDARMEfja+mEdn/+Tk6OqrbcmlpCQsLC/ULKhYWFupvzpDh\ntZp3Vx7U3k2iqRp0Tqzf29tDp9MZmj8LjO6bAQznUUsG0SlDdY5O/P7smJflOd5xlPE1ydtm5Pd8\nJ2VkZWb1VUPo00L1HlVE7ZS8V52Nt5U/yymTvV/v7euOq1S/rEyPEEo8ud5Nagd/pj7Do43MOZTq\nrA6UbyDi9ERts1KUO453R+/j6qSgKZujrrLN6qV8utEtoWo/rve5TrgTZTmKuvUaOjRP7/AY66jT\nQFUeOsWR0yp19ae+KMfBSEnG56GpGvSVlRXcd999aLfb2Nvbw/Xr11FVVb1xkBoJRaKKfPhRpMxr\nMlStjQig7hCeGuExonb3zIoO1VCpsdeOo/+VF96j3xli0jL0ejew2qHc+QHD0YYOZPG5nmqhLDhV\njbJwlM/oh+Fwu92uw2N9pg9uk6eso/t/31uGPHt9FARoZ1Fj4Q5dowwaGB0MdoNCorwdvWtbOjok\nqtTBNF6rz1O+vOOTv8PDQ/R6Pezu7uL27du4ceNGbdxZpr4URg2E8+T6q+cVUOmguuo368M0Qq/X\nq9tXUbU6F5av2xFof1WDWzLulLu+yFnL57xxbjBGHZ6bm6uNbKvVH7Bl2/Ml2q7rlA0jkps3b9Z2\nQW1Vu92uJwgsLCzUe7NwN0Weo91yebsOKLgaR1M16BFR7zJ26dIlHB8f1wjDwyVVLCqpKqcbQTV8\nelyv97mpTuMQpPKTefGsjOy4hm6l6zw3OM5rZ3yUEKnmCdW4ZwZWzwH5HuOaCqGx0fnH+jytv/Pp\nxzJDWorQvN4l1OvP1A6UofZSFOjXj4tIlNyZZihwXL1ovDTlcnBwMMJ3Jrdx0YTf46CEzsj7mAIB\nLjzTttG2yp6vz/M8vOuj12OckVPddD1Qh8L/7Is0ttnskogYGfMj7xFR781Cw879WVimGnEdCHUb\nkOnsHW3QW60WlpeXceHCBVy5cgVra2u1t97e3ka3260H1ChYndSvqEGRuXtV/2aHUIOehU2uhJkS\nuZJm4Z4+W6/Rc45Y3bho+arwTiU+spDOOwv/z8/PD40laETEtqD81GAfHBxgfn4eVVXVb48nGmH5\nmiNmeVonN2Rad3UmHt1kxl7PqwxKaNTbRo2YtrvLUw2ClqED+942jvwV5VJODlL0Xs4z393dxebm\nJra3t+vIVpEz73UDrbJxY6n3ettraqLVatWRtL6fl9GBbjetvPt+4c6Hpjv40YWC2i6eM3fdYN/S\nMZesD2vOvqqqevk9XxvnjpL6rXXhatV2u13nyTudTp1LJ2rn+AVnuVAurluusxExNBswo6kbdA6w\n3H333eh0OrWhuHnzJq5fv17nA0mqABx4W1paSlGRd3Y9T0V1dKr3ZPdr51Old4OfoV0NPfU8/yvK\n4fX8rWF9FjmwHgwn9X41XBoRaJjrPBFJVFU1hCg0LaPXE5npoLYiFhorDq7qUm0NVQHUHYXnPCR1\nZ+ht4s6QH0/L6cfDXC3fnao/Q+91Y6F65YZW+4Fe52k5dWS6cIiDn6+99hq63S62t7fR6/WG2ljb\nSTfKoq5Qt9SY6P7rfC5TBG78SUw9Me3GclmGOnLmnVWfCQ7U+aneaXSg+uaRNuvr60e8bXTpPsvz\niN0jEV/FSX7Zj1ZWVmpUvrq6Wo8R0nhzEJRrMLJoRG2AA4cMsTtN1aCTWOGqqrC6uor19XWcnJxg\ne3t7aF9uVs5H3jPUmhl2/VbvrMe887JM7SSu1CpoNzjZf0eg2W9/dnbcy8/QKv/7sczglcpRR+a8\nqzFj56JB1o6i0+QyQ5rxltWFx7LzzqeX4/Usle/HJ8lT20h50OfRsIyjTE+8fjoIyvw5XyPHfgSM\nOhS9vyRf1zV12DplTo0/MJom4bNUn7L/2cK3DHDps0ttqPYAwAhw8uM6hpb1O3XA2fGsXWngfQ8j\nB3JZf8z00PXnPDR1g87KcS7z/fffj9PTU3Q6HXS73RpJZnOZdYDNywPOUG1JcVyp1XF4GoLlckqS\nK7ij4XEfkip4qUE9PPY6eLjviM6PlRyYdnR+PEJQ463PV+RJtKJvbtEIS1cRAhiSZ2bo1WFrm2RO\nJzPgivCza8a1BcvInIaibh8UBYZX8fK/HishL41EPD2g00d3dnaws7OD27dvY2dnp0bFHKzTFMPC\nwkLtCBSZZ+k2jRIU1av8WI4aXZ94wDZWp8566ypNNXh6Dw0jeVDQoOWxHloej9G5kS+NNNh2OvCp\n/I9LAboT4KAqbRj3Ndd90pmJUCfD8vgMtqH2AT5L013jaOopF51n3Gr1c+rr6+totVq466676jCe\nA6XA6AwR5uW8A/jiiVbrbCSbAuNxXq8oyj2kNqw6FjV8jij0eld6/ua3rspz5Oo8qNHNULOWTYXI\nlITXqWFWPhiW0hDoZk5Ehdpx1bDT2DHcBs6crHZO5Z3Xq2GgomuomjlHr7sbF9UFlZW2nfKl6Evl\nzPs0peHGmfeqnD394Zs6AcOLW/Sb6ard3V30er367UPchEvz2Brqc4COxuv4+LjeidH1WFNf+hpI\nLgbyOqjh060cmP/f399Pp+d5TlrTngQO6mx0wRllwfZUvXV5qTP2VAYNri9Sos4QLLJe2sZ0DCQa\na5bJHLqWwWtUV1mmPreqzlJSTK/RObFdxtHUDXqGmjjN5+LFi+j1etja2hrK9/I6FYai9Ow8MDwY\nBQwPZmVoSY2E5/KcsjBKyyjdx3tLx93wj7vezytaccem/GmEUaIM2fO/ohr9qOHLDLCPGXiEoM91\nXhTpqIxLssjQu19XkrHz40Zb+VS983JKdXAe1bDxGB0oV4Xq24fUeEWc5Wsd1ZIfnb2RRbdaB+0D\nWftoXVxeWaok65ee69c8uZfrMszsBzA6+Kt1y8CUf0p9nYZW36PL37qMXz8Z6MraX/+ro3W+xtFU\nDbqiYUWvOvNlZWWlDjP39vaGNvTxXJ4qhs4S0OX8unRdOwsVieQGiqhqcXGx2BAeugFnhssHmvQ5\npbSK5zN5jYb7POa8kBTVKArk/UQ7ulAie6Z2GjfEalg5KMtrtHw1Ctp2Xr7y6h3VjR6vz+TCTqn8\nehRXkhtJ+VV0SX3Q+eR8pnfYjEoGT8smiDk6Oqr1f2trC7u7u7h582Zt2BXFRQTa7TY6nQ4iAgcH\nB9ja2hp6DtE302EuL0+T0IiVdMHrynNM/7hT13QQ/yto8r6iKRV3Mg4W1KlR53RGkfd+ZbMZAAAR\nWElEQVR91V/Nc2ta1aOAw8PDWob6QgruHquGnfaipE+6ylX5538dhFUnVaI7IoeugypU6Pn5eSwv\nL+P4+Bhra2vY2NioFVsFwntYFskRO89nysgyMjSo+fNxdRiHsrWMDCG4QXZ+vBNp3nOc4dP6amRT\nGnNwnifVRVG9lumIjuQ5f4/QxqEx58PrXeLVIzovP0Ngk9C1Gwbnye/1sYcSD14vRWs6EMrUF9MF\nPp6zv7+Pzc3NOqXJeeHKr7aFk/Lr9zlfyn9JFhm5bigi1qjHwZsb9Ux+486zXE/3kQfljSlDlsnz\nnKFFnn1VqJ5TynTW+7m2i45haLnjaKoGXZVDGWUF1tfXsby8XL9tpdVq1QspfEWdIy9tDFVQRWq6\nvwb5Ud54jrlQ3q8oZhKS1pAr+/iz3AirsdPOmJVBXnzgjQhLz7tzU5757Xlj1k/r74iY/FG2i4uL\nIyhPO67y4x2O/7PQOdMh51Pb3vlTmes93tHVeGu5Gh1425cMt59TRA5gBBWyjajzOzs72NzcxN7e\n3lC6hXuiMBo6Pj5Gr9dLZVWi0nWa9/ZrGRm43vJ+6mGGolWv3Zi7Qac8OI7jbaO/9XnaT9w5cPxH\nNwpzMKXPjzibzqk5cS4c0tWhXFTkdkH1Q3VR25l5cwD1hl0+QDyOpmrQdTDNO0Or1UKn08Hy8jIe\neughHB0d1cZ9f39/qLFoFBRx0PCqMqrxd09HYfFaNqiHg9oAmcfVjqGKqkrsDsQVKVPO7IXBej87\nAQ2380iZaCfTvT5KDoflMDxniO6ycQdDJLm8vDwUsvJZHFylzNWZqANSZ+poX2WYoXF3XFq+Xqe8\nZ8jN5anOic/mMR9U9/Z1OfG4ojHew+1vadC3trawsbGBXq+HnZ2dujwaAT5f9b7VOlumrukVPp8G\nVBdBlVBuSUbeF/xFzt5u2SCx6xsNKEHR/Px8nXp1fhQcsN21/6rcqfua3iHfHNDNtvjgM8nP0tJS\nPVbBxXOcZ84dMckXy/GN/1QP+KYpBUdceETkr+1Woqka9BLS1E7KPFWn08HFixexvLyM3d3doQ6l\n3tjDFyUqI89nwvHOrOU5fxmV0KKez56pysbrSgZfyZ2TO4MMNWZ11JkH7JREfOoEaTiVFzWeGhbS\nqBBVcaCOfHsOmeTIyo/rMzK0nRlkXusy1/8ehfi5khFzWbrMvR5e3wwZsg10Owy+BMbbSsEMv1W2\nel7lr0BAoxCVZaZ7bBvOfJmk33q/gis97lGQEiM+d+L6e1wuXw2z9gsCQdUnR8EKsuhgdB9zfkob\nbmX6rf9VLiobB4MZEMhoqgbdQ3DvxKxUp9PBPffcg/n5eVy7dq1e9kzEwT0sFNGq8QbOOhXPK5LS\nAS83qNqgmVHX866QWfg+qZFVubMOlRkeRaeaDlFZcIBIZUIlZqhHnon2/A3xPuUwy5truottzGXS\n+iJglZPLQe8nyuQztc5aBuujRkPbhik2bzdNuWg6zufLO9+abiiRGzrVb0frqq9E09yfZXNzc2iK\nor9Ozl8Hp0abbaByUBlSd9QYuZ5mqDwihtJpdNqZvrJOp6enQwOK6jTUsagesAx9x62DLF7L56u+\nqOwzpK1AhOd8TYQ6MKZBmFbhjDyd9eJp1qyvqIzY3nQumZOYpGukiVdExAMR8YWI+M+IeC4ifnlw\n/CMR8a2I+Org84Tc8+GIeCkino+I95TKpuHwF9VqZdnBOLfz4sWLWFlZqcOckhcrodvShx5Sl/+6\nh5/00eec59latjZ2hghLz1H+3fHoteokSnVytJfJMkMK2omooCwnW7CkDiOrK3nOBsB4LuNzkrwz\np5vpm8qzVO+SrmWGW59dMuZ+3gdBfUC0dN+kY9nYiBt4YBTx6nnVFY0Y3Nhmr7nz52YfbXdGeJpm\nzPpQqX9p22ZyUH31tldno4jc0bkbcP/t8taP9hUdWM22PTgPnQehHwP49aqqno2IDoB/i4jPDc59\nrKqqj+nFEfEIgJ8G8AiABwB8PiK+r/Iei7MXVGhuVRuHAm+1WlhZWcHc3ByuXLmCiEC328WtW7fQ\n6/Wwvb1dN7oPxHiOmELlCkU9RtTlgtdOTF7Jt1/rOWUe4/283mQ29D97Nq+jwuu1PjKuZei9WVjr\nxtmf5Twp/yxPF02ogjJ81Z3ryB+RLxGbDoS5Y8/QuMvRDYHWjzLK6umpOuWhZHzcgLs8XG48xxAf\nwFBYr2XTgHEREacq6stf9BmKnDO9Ul4z45CF9uRR02Kqx9q3sm/XSW0bGkjVR19BqcZbF0xl6T+9\nx9tUoy/+18icdVQboe1DXn3hEFeE6jFPIWobeJrS+wHlwudwoNYBK8sbRxMNelVVrwJ4dfB7NyKe\nB3CZ/Ca3vA/Ap6qqOgbwSkS8BOAxAF/KyldlcBSkAuFUoStXrmB5eRm9Xg/Xrl3Dzs4Obty4gZ2d\nnRrJMOfGj4ftaniB4dyrDti4EgPDc2szr6t885x+Zwa0ZAQctZTQlT5P+fEO5XVV48dvfaOT10M7\nGpWdbcOXIyvq1ghsbm4uffkzB7mqqqrnx2unZznA8Au3FUkqKRBQ2fnYSeagvD091eMIz9vPDaie\nz1IM7nyYkuACIi7vv3XrVp1i5PoKdZyqG2qYnP9MZ2g4VB9Ylg+QaiqLpGk2X+GsS+xJfJ4adTda\ndHwEaZoq4/W6eZy2lSNrbR+CC+oi9xzyKIN10P3MuY+5o3MaX9UJ1TFP4bqc+E1jXkLl7shL9Lpy\n6BHxEIB3om+c3w3glyLiAwD+FcBvVFW1hb6x/xe57ds4cwBeXs1sds4909zcHJaXl3FycoKlpSWc\nnp5iZWWlNipcaHF0dIS9vb0R1OapFJ3Kx2f5pvYaJYwLnbwcfY4bA96fGQKViyNx/7gClcrJeMl+\n+zG91+ugz6MR4v2+Wu7kpP8CY07BouwZZtIg6Lth1Ri7s8yc5zhFL8k+uy67T0nlmMnGnWRJdplD\nYWfXFaEHBwd13lydn+ue189z3s67OpdS3dWJax6+1Ae0PnyGR8fKqy6aIWnE5Hqr9cjOl56j/9Vg\nu8F3GUXEyEucfV64P6dUntZBwZXe7+WV7MY4OrdBj3665W8B/GrVR+p/DOB3q6qqIuL3APwhgF84\nb3kA8NRTT+Hy5cuoqgrvete78Nhjj414WhIbfXV1FUtLSzg5OcHa2hoODg5w99134+bNm+h2u7h5\n8yb29vbw6quv1mgxiwKA4TnEDK00rBvUe+jbqaR0ifxQVRVefPFFXL16tVhWZmTHKa2WD4y+ts9R\nh+cOnb/MWGpuVTsrr2VUxLEOzsVttVr1oCgXuhCJE9noniEaHXnKw1MLnsYi/97xSUTELi93FCrn\nrH2yZ/iAlcrxpZdewjve8Y76nMqRxLpRVpxzzo23tra2aqOaGV5tJ40os3QKefC9ypV31QUdU9L7\n+Qz+V72LCOzu7uLSpUtDxsr1W1G6b0Wg7U3H7m3jjoW8eDSufaMEbByw0WgvLi4O6TQBB5G+yvbF\nF1/Eww8/POSkPIL0Yzq/3FNP7BfPP/88XnjhhZFyMjqXQY+IC+gb809WVfXpgRBvyCV/AuDvB7+/\nDeCKnHtgcGyELl++jCeffLIYTnjn0YpSgTjSHhFYWVnB6Wl/xP7WrVsARufC+vJ+ntPGAc6Wn+s1\nWaqF5WT8ZkiCHdwNaKlMVzoi3nHoRXksGScnRS/OU2lgRo27XkdFZMrl5OQEm5ubuOeee0b2WHe5\naud2Q+nP9WNunNTAjHO8ej5rv5IT9bLdqEcEXn75ZVy9ejVtZzc0OsjIKHNvb6+OftwxqxHP+Ke+\nZAhYF3XpmIVeo/LS9AaPKS/eN7rdbv3CGvKideVvH8eKGH5pRdb2Wp4+W0nTg1n6IstNe15e0zs6\nM8f3aNG+/fDDD4/wrM9VoKXtoM5N2+zk5ASPPPIIHn300bqcp59+uviM8yL0Pwfw9aqq/ogHIuL+\nqp9fB4CfAvAfg9+fAfCXEfFx9FMtbwfw5VIFB2WlFedvT4GoYPl6p5WVFRweHmJlZQVbW1t1SM9V\npvocdh5XDh/I4bUlw+YIUvO0itxK9/i33qednc/WDq2oJevM0k5Diy5U0d0Iej2UvA5uyHmM6RXu\njqm8cvUiB6RVvlV1tssc0Rw7kw4csd1Lhk1l5vL02SEu95KDHuds1RGqzF1f9VqVHfml3HZ2drC3\nt4eNjY16IFTbUY2/11uNhfPC5+quiNk1qkeZzmZOz/PPPM59kzTaVQPuYI33+4I3BXDZ6m/gLAJT\nHdCFi9qeaoh9XIZAg1MT/eXOjOSzeqh81LZ4n+Nx5ueZziFfvvDqvKAMOIdBj4jHAfwsgOci4hkA\nFYCnAPxMRLwTwCmAVwD84qBSX4+IvwHwdQBHAJ6sMleLyfkgNzYuEFaUc0IptHa7ja2tLWxubg4p\nHNEtUWNmPLm5lA4+abiYIcYS36o8/O/GweuXlUU+HQV5Z/ZnkNipfEpm6ZklckSjhoLHdEriyclJ\n3TZAH23ohlO+fJ6pm3a7PVT/LOR3Pijr8+hUqSPqNVrnSfJwY6b8KH+OevVZ1L1er4dut1sP8meA\nhjKe1Mm9ndVY+KvqqEsamSp/Gfhie2c6xfpwcyrer5FwBnR0kNV1OksXsk46FqM64zwTEPAadwQ0\ntNniIRpeOkXKJWuHUjvzN/nQcv36LJKYRPF6OvR3kyJiOg9uqKGGGnqTU1VVqYWfmkFvqKGGGmro\nu0uT8wcNNdRQQw29Kagx6A011FBDM0JTMegR8UREvBARL0bEh6bBwxtFEfFnEXE9Ir4mx9Yj4rMR\n8Y2I+IeIWJNz59r35k6mGN3v51cGx2e93osR8aWIeGZQ748Mjs90vUkR0Yr+Pk6fGfyf+XpHxCsR\n8e+DNv/y4NidU28fnX6jP+g7kZcBvA3APIBnAVz9/+bjDazfu9FfTfs1OfYHAH5r8PtDAH5/8Pv7\nATyD/myjhwZyiWnX4Tuo8/0A3jn43QHwDQBXZ73eg7osD77nAHwR/W0uZr7eg/r8GoC/APCZwf+Z\nrzeAbwJYt2N3TL2ngdAfA/BSVVX/VVXVEYBPob//y0xQVVX/BOC2HX4fgE8Mfn8CwE8Ofr8Xg31v\nqqp6BQD3vXlTUVVVr1ZV9ezg9y6A59FfUDbT9QaAqqr2Bj8X0e+4Fd4C9Y6IBwD8OIA/lcMzX28A\ngdHMxh1T72kY9MsA/kf+fwuFvV5miO6tquo6UG92du/guMuiuO/Nm4Ui4iH0I5QvArhv1us9SDs8\ng/4Gdp+rquoreAvUG8DHAfwm+g6M9FaodwXgcxHxlYjgVid3TL2n/pLotyjN5FzRGN3vx+s5c/Wu\nquoUwA9GxEUAT0fEoxit50zVOyJ+AsD1qr+l9g+PuXSm6j2gx6uquhYR9wD4bER8A3dQe08DoX8b\nwIPyv7jXywzR9Yi4D+hvmQDgtcHxc+97c6dTJPv94C1Qb1JVVdsA/hHAE5j9ej8O4L0R8U0Afw3g\nRyLikwBenfF6o6qqa4PvGwD+Dv0Uyh3T3tMw6F8B8PaIeFtELAB4P/r7v8wSxeBD+gyADw5+/xyA\nT8vx90fEQkR8D8bse/MmoJH9fjDj9Y6IuzmjISKWAPwY+uMHM13vqqqeqqrqwaqqvhf9/vuFqqo+\ngP4GfR8cXDZz9Y6I5UEUiohYAfAeAM/hTmrvKY0UP4H+TIiXAPz2tEas36C6/RWA/wVwAOC/Afw8\ngHUAnx/U+bMALsn1H0Z/9Pt5AO+ZNv/fYZ0fB3CC/oylZwB8ddDGd814vX9gUNdnAXwNwO8Mjs90\nvU0GP4SzWS4zXW8A3yM6/hxt151U72bpf0MNNdTQjFCzUrShhhpqaEaoMegNNdRQQzNCjUFvqKGG\nGpoRagx6Qw011NCMUGPQG2qooYZmhBqD3lBDDTU0I9QY9IYaaqihGaHGoDfUUEMNzQj9H7b50VnR\nNeivAAAAAElFTkSuQmCC\n", - "text/plain": [ - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "img2 = transforms.Compose([\n", - " transforms.ToPILImage(),\n", - " transforms.Scale(256),\n", - " transforms.ToTensor(),\n", - "])(img)\n", - "print(img2.size())\n", - "show(img2)" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Files already downloaded and verified\n" - ] - } - ], - "source": [ - "import torch\n", - "import torchvision.datasets as dset\n", - "import torchvision.transforms as transforms\n", - "cifar = dset.CIFAR10(root=\"abc/def/ghi\", download=True)" - ] - }, - { - "cell_type": "code", - "execution_count": 13, - "metadata": {}, - "outputs": [], - "source": [ - "trans = transforms.Compose([\n", - " transforms.RandomCrop(32, padding=4),\n", - " transforms.RandomHorizontalFlip(),\n", - " transforms.ToTensor(),\n", - " # transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))\n", - " ])" - ] - }, - { - "cell_type": "code", - "execution_count": 19, - "metadata": {}, - "outputs": [], - "source": [ - "import torchvision.utils as tutils" - ] - }, - { - "cell_type": "code", - "execution_count": 15, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "(0.3371489570090489, 0.24515368371385993, 0.0, 1.0)\n", - "(0.44256409261470253, 0.2971765334316165, 0.0, 0.9960784316062927)\n", - "(0.4061938378436025, 0.32892546338681194, 0.0, 1.0)\n", - "(0.2704159075874486, 0.18337201969836966, 0.0, 0.9176470637321472)\n", - "(0.34992724462032737, 0.2732488478952251, 0.0, 0.9960784316062927)\n", - "(0.3060087387730164, 0.25710693466354395, 0.0, 0.9725490212440491)\n", - "(0.41604116667743557, 0.2388433838705675, 0.0, 0.9764705896377563)\n", - "(0.4606604996988608, 0.24625605326498523, 0.0, 0.9725490212440491)\n", - "(0.4938623460972546, 0.3129965597088279, 0.0, 0.9882352948188782)\n", - "(0.2621004459118315, 0.2239845061390575, 0.0, 0.8549019694328308)\n", - "(0.26454759721430793, 0.11071022852775213, 0.0, 0.5098039507865906)\n", - "(0.4611264388361936, 0.32001783467012906, 0.0, 0.9960784316062927)\n", - "(0.4666066774840753, 0.30674951653607474, 0.0, 0.9843137264251709)\n", - "(0.21249872842918194, 0.2636358923863605, 0.0, 0.9372549057006836)\n", - "(0.2946678490996722, 0.21798154353121305, 0.0, 1.0)\n", - "(0.4658573437985372, 0.28209593857100396, 0.0, 1.0)\n", - "(0.5015995290223145, 0.31443273237117386, 0.0, 1.0)\n", - "(0.3317019086171058, 0.19920514503802628, 0.0, 0.8823529481887817)\n", - "(0.3885838012647582, 0.27673680696400277, 0.0, 0.9254902005195618)\n", - "(0.38839997841690393, 0.22913308841635177, 0.0, 0.9490196108818054)\n" - ] - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAW0AAAB0CAYAAABOr2PFAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzsvVmsZdd55/dbez7zOffec8e6NQ8sjpIo0pKs0bZkeYrd\ntmPE6XTSDpAAAYIOEgToDvJAOQiCTpCHAP3UjW50HGRyp5PAhhPbktuaLGqiRHGuKtZ45+HM5+x5\n77XysNa9RdJFiyXJciu4f4Bkcdc+e6/xW9/3/4YtlFKc4AQnOMEJfjJg/U034AQnOMEJTvDecSK0\nT3CCE5zgJwgnQvsEJzjBCX6CcCK0T3CCE5zgJwgnQvsEJzjBCX6CcCK0T3CCE5zgJwg/lNAWQnxW\nCHFNCHFDCPH3f1SNOsEJTnCCEzwY4geN0xZCWMAN4GeBHeDbwL+llLr2o2veCU5wghOc4K34YTTt\nZ4E3lVL3lFI58L8Dv/qjadYJTnCCE5zgQfhhhPYasPmW/98y105wghOc4AR/TXD+ul8ghDjJkz/B\nCU5wgh8ASinxzms/jKa9DZx+y/+fMtf+Ej7xiU/w3HPP8dxzz/HFL34RpdT/7/957rnn/sbbcNLv\nkz6f9Psnp89f/OIXj+Xkc889966C94dxRNrAdbQjchf4FvDbSqk33nGfeu655/jd3/3dH+g9Py4c\njYMQf+lg+9cK73W+CqmwlMSSGQCvf+vrvPj8V1nsNIiyAoD6yik+/gu/gu1XsYT9I2nf0fgdtfPf\n/a0P6/bkgrKA7kIbKRMADg77NJotkizncL8HwNLSElEYMR6PEaU2BNdWVxgOh0ilkI5kb2cPgIrv\ncu7iBfYO++xu7+v3CnAcB8u2qFWqAAyHI5SSXL87/kvt/Umb9//lP/87ALzcSxjIVeadIQ13AsCl\ny4/wvmc+wc2bt/jOd14AYBRPWVrp8tj503h+DYCNvSHXbm1w4+ZNJqMxs8kUgDIvwBUUSMqyBMC2\nbYS0ubLQ4udPPQrAd/wWxaTPV1/8ClfPXgBgvjrPP/38Hx2385f/0w/odpeCTqdLZ65GUNVrbH5p\nkWbT5vqLA+58R7/n07/+iyw9WVBXd5lOJADDMOaRqzXqrYy9vX0ODg8BmMqIYbYDIuLU/EUAquVZ\n7rzs8Y0vvonj6Gdefv8yIRG1ap3zl+YAGIxvcvf2If/4P/w28BMx73+pgT8wPaKUKoUQ/zHwebTG\n/s/eKbBP8DcDpRQoBSgGBwcAXHvpRWaDHjLLmCZakPup5PFBn6XVCkqpv5YFXKu5AMRhSX88Za+I\nabQCABrNKmka4tgOC/MtABxbsbDQotmoIHJtCEpZsLa8SJqlZGQsdR8BIAxDKhWX+VaTeqAF0n7v\nkFanTZKmLM7PA+C6NkmcAH9ZaP+kIXZWAFha7/Dyt3cYOYqWp4XuBz65zuknn+brr73J67c2APjw\nRz7EL/7yz7Nz+xZzS8sAfOvGl+mlORevXmFna5ONe1pIpmlOnMcUSIKqniPXcXFnJb5Q1Cr6Posp\nVzoV3vAsVqpaARBi8LZ22rYWLUmaMgvH+BVFLvUz43yXldUG1aCLtajn6N7c+9kb93kiKKjYQwBk\nlnO4n6KERb26QD6n+5n0Bzh5E8Qa117V73fkIVY+z1y3xdKiXkvtrmIy3CSzJtzdCAHo92O2Nsrj\ndj75kacA8DyPOEnI8gJh6cPFtmxc18W1XeJhDMBkOOT85QuUFNQbes215uepOwF797YZTqfUl3Sf\nvE6FQb/HfLtDu94AYHd3j1lWMh0nTMe6P5ZVcPbCKkrB5vVdAMqoZOP2qw9cAz8Up62U+hPgyve7\n75Of/CRnLd2Yiu+R5wlhGDKdzXQDy5KSHM/3wGh849GEaqWOzdGGgyAI8H0fabsMR2M8RwuZuU4b\nyoIijcnKHIDeaMbhIGSjHzHI9H2ptGi35/jaV778w3T7X38IgW0JosmUV17QGsXuxj2yaMaNe/v0\npnrc1y+eZ3tri8WVFfS5+6MX2gf7+tDwvTpCgFSKg0OtVc/PdbCEZDYZ4Vf0BphMR/iej5IKmZjN\npaBeC3AdQVlIHFcL80rVp987YK65wFyzam6VzC12iZKYdKo3arVSoV6roX3l7w3/7A9eRUe1giPE\n8cjYtl6flmUhzPU3Xvo6jz/1ISxLgNLa29sOQAW2tHFcAUKvT2nr37pKIAotDBGC3ILShoLSvFti\nlenxo/qVswB05+e5cCbgzc0bpCP992FhEUqbm7t9ZubAG4UpO/sD9nb7HI604Ll5+y7PfuKTdKvw\n/+zcxbP0u/xGlXrpsn56jfMXtQa7f3DAi9/8NqEXUQ30YX9uNOWnWqfILl6msaI12NvTtwtt16ro\n9jcCClmQ5DleVV/LEkVvx6HGIpanBdwX/nQHSwiW/pbLWk0L7UYTZnGKGENSjins5GgSWGqtsLMl\n6G/qg3hxvsMs7lFZyFi8uKjf48xoW3MM90aMQv3McJZTpO5xOz/9a8+Y+RTkRcE0CUlS3c94kmAr\njzIpyQbamnHted7/7OPkqqDZ1oLY9318yyN9cpVZnLI3HAFw6942FUux1u3QaeqDpLe9TTqMcHGp\n+3o8pAops4yDXshsqvvole/OXP+1OyJBC+3/5p//lwCoMkfJnCgKj09j3/eIi4TM95Fmn/Z7fcaW\nRzWokaZ6UZaywHEskkJQlArX1h0b7/lUAw+hMoStN4tIJdHBAcO9KXf7eiAGCbQXFh6q7f/kD77A\n1rXvAHB45w3K0mHp9COcvnAVgM7yaYKKw43XnufezZcByKcz7NKh2WnhBFWe/emPA3Dx8iMk4wGv\nvfoi0tAWWZ7w+muvMBn1SDPdzzyzGfQjZlFCYTZstztHZ65+v2FKb3QlLIQCpKYGAKRQoAo2rr3K\ntW9/C4A4DNk66HPtzgG5eUR7dZW8yJGywLI8eAvzopBaWzeCSyAeINPVW/774EWWRFoTajeqWDgU\nSqEs/bsoTglcC99zkYWeeFmU5GQoKXGMgHQdl/7oECEEjm0x6GsBMZ5NaQQVkjAkRs+xY1sMR32K\nsiQPI906pQgqwQPb924oyxLLtLMUAgvxNmpKSmkENzzyxDMUstT3mLES6j6lYVk2JYLBaIxUej7r\nlTo2FpMkJQj05rUrHgqBlIrSCP0sy8jD6fF7v/31rwCwtHSBXBUstnL+3n/0D/S1c2sc9A/Z3tgi\nKfR7rl97DX8y5FzVJzcKzWe7beo332Sx7fAz2ZSio8fG9upUWx0W1k5R7ep98noWMqkq5iU0LQ+A\njy53uP3Sq1xtd5ja+rCdZffbCOBaeq1mMqJarWDZ4mgp4dgeo8OEN+59i71eE4Abm6/iNS5w65kR\nXc3C0Jq3yIDcitkf79CfasWvIerY2YzJMOXseheAy5dXcasNwmzK7oF2rb3y6g2ErFFOAtaWlgAY\nHd7DF/fXwsc+8xigD1qpFJnKkbmeNxmBnbnIqETlRpgGDl7Nx615WJ5en0WeI0sQsoPn17hzd0f/\n3gGk4PzZs7SMpp2GEenkJlFUUhiaslYPWOwscLCXYJtN7LnvLrRP0thPcIITnOAnCD8WTRugUtPm\n62w6psTCrdaPzc+gXoMsQCl1rFVXq218L8B1XGoNc2rnMY4rSPoRrusf0yOFkkyjlMB1qPj6FK3Y\nisXuArmyMMYnspdQcQSjh2j3ZDhgvq1NQNVdQjlNVk6fp5Raa7FkhIwKkmEfZWictYVFTq9fZP3i\nGVbXTrG4qE951/Up2lXWTy1TFFrTTpKY0XBGrzfA8YwGIGw68z5BLWY80WadHzhIVRy360jpkygs\nKRClQpnoSunCcPserz3/ZeJ9rZ30o4RXNw8YR+mxll5tNDl99ixGj77/bBRKlSglEcqYksL86y03\nirdp2g9Gmek5TqMUx7VxXJek0P3Is5TAtui0Oihlm/FIiKIIN/BJUqPdeDYil2RZxmwcg6/b5PkB\nNjaB41JI3YZcKSZhiO06OI6xEoR4S1vfO44oDktYCN7uBD7y+AshkOL4B2b0NH1iWfr9cZJw89Y9\nbt95E8/Tz2h4DZAWcZ5SNevbch08x0UoSKXeB7PpkDIJ+e1f0Q7dKNJWRmatsFwPWGytcfFx7Qy0\nGwG7b9ygq2Jqxq/cDUOe2u+zXs6gabhax8LqhdiOz6eEj23GSZaCfDqjfOMa9vVbALRkyeXWKnmz\nxuKS1mrv7e0SqpBLzdNM0M/s2u+wZIy2agOzwQjbs6gFWqv2PJul9RlekDG3bqyRNty6PUTmFn4g\nzX09pBxTFgmdZkKR6z3XP7xHPLWo1pdZPLMKQC++yf7dTabThPFYW23hsMR1Cup1RX9iuGKZMx3e\n30e1lnU8x5ZlIZSPr7RF4cSC+CACHKSx+oRv4TerEFi4NV9fswVJVpJmYBOwsqrHKQkFu1t7vP7y\nywSevne4PyA8PCSOJMKMe7XdRGTQ8QOCjrZcbLuEOzwQPzahnc608LGkBEuQ5Tm+EbBlmePZNnma\nYpkNXQ8qRGFIgaIS6E1a8S0ajSrjcUQhcxo1bXIUWcJkFJKMIY20SWxbEldKlhoujtMBIEwPyUXB\nQyHPyVK9WKIo4+zlNWZhSGbMpbmFFo5rcenSZT7yoQ8CsLZ0ilarS+6UVAMfx+x1URTE4Yw0z6ma\nyIZOe5EL5x/ljTeuH/OdaRrRanZwPRhPTFQEGVLeFxoFR5tCoIRCWtrxCJBOZrzy5a9x7/qbzIzT\n8ebGHoNBRJaXnD13BoCPfvyTdDoLIGxj1h9BoZQWkq5ZWLawwXqHeD/mbf8KLlweCeOMquVSYh0L\n6Farg0uJKiGoaIqgLBWBD5ZtUTecYRiFOLZLc67FeHeAb4RcmKXUHY/l+UW2dnVESVmUtNstsqIg\n8IzQjOJjAfpecUR/mNG4f0q+BUopSgnKMg46ZaItLEEYRfR6mru/fecOGzv7CEtSwzhmE0mSlsSq\nIDzUAsVWgkA42ApK88w8CwneEtjzqc98GoBap8mi6zBX9UgTvbc4mBLcvcvPrS5hD7XfwlMZNSvD\nLyX21PDkvkA0bNxpjpKS0vQtp6AMQ3wp8cz7Kpag4QdEQpEfjfHBIZdPncIOx9SmeoweMU65+wOo\n3+U5FuNZhHBsZlW9NxeXG5y9WGF13WN3T7dz+eI8h/9bnckQCqVz9qazPrgWrl2h7TfxO0b5UbvE\njYIstbm3dx2A8XRIfweKpA6llivZrGRhtUVQE+wb34rl+Ah5v5mucTpKpVBFiRyVDA76+towpndj\nkwouXkM/s7nYYe7sGp5Vod7UffYaFVIliFJBf3fMwXVNz7z+1ZfZ3dqjt3+AYxTUMitIpxlpaqEc\nveZHlmBwOKTi2Jxfa+s5aghe/C4PxPcV2kKIfwb8MrCvlHrSXOsAvw+cAe4Cv6WU+itd86OJdgpZ\nCDzXoRb4eK4+eaMioVILKLMEabTAKC1JZcEsnlBN9BJqVKvESU6e5chS4iizUBwHHEHFc4+1b6EU\ntm1Tr3gUlhaG3bpPjPNuB9gDUSQxwvCtvldh3Osxv3yK049pR83i+iqu60GRkxdakF/b7RPdPiS3\nMq6/8hLPXNUk3ceffQalFJPJmI17mvfy3ADPa7LQXWNj8019Lagyi0Mmk552XgHNZpU4jo7bNQi1\nQJirtsiUIkdSNdN5+8WXuf7VF5iNE94c6AW4cTiiTMCvBPzSr/wKAB/56Y8hcZDKRvvQjKauJGmW\nce2NN6gYB+Gli5dwcLR2eSSkj4T22yX+22CjD8lTy4tkWcLhKKLIzOFERlBvIFNJYgRPnhYoS2kt\nW+j+BLbDaDTh9PISa/Um00hv9FGkkAX4VcHakUNse8gsKlC2xf5Yr7kwDB86MkZKdXxIFkJqnlqI\nY+FtC47HQprOZ0XB6PCQw16fza0tRhO9JYqixMImqFSYZfoQLYuEWZSSqvKY4+9Ua3jYBNhk6Ptc\np4L1Fgsr6hnLS1mkFejvDQgjPceT126QjGa0HIf6glZUnLaDF3jg1PHu6d8GAqZtSTq9pa0QWx8k\neZ6jSoGrOPYRpK6FFIqiPyU3wqxSCZiJGXI8wzIOs8H07Yea45mDTECn1SFOUrJY895FLgi8Jdod\nxdqajoaJmefP/rzO7Zsp6hf1vFuqAZZD4Ncg95iM9fr3ShevIuglh+zu3wVAOClBvYtdaTPt6/dU\n/BSLPnlUPfaVzcKQ2fS+1M4T007bIo8yXv/iK0QbepyK/hgxmrHabBMYp6G3nEBZoXF6mZqZFzcr\nKfOSjWt3+dLnn2e4ZRTUmWI591lurYKRIaNyTD+QhLbNONbXxv0pti2pN21Eocfz2Q99iH/5+/+K\nB+G9aNr/HPhHwP/0lmv/APgzpdR/Z6r7/Rfm2rtiFupF2KjXqDTmcVSL/T1tAm70Nqi0qriqYBLp\nDZ3LkqafUypJdERwFDm2SlACVJmRJXrzKuEipMJ1JRVPC3gLC9exSbOUwKgqK90G09zlYZBGIXXj\nxGrOdfnAU+9j/fwlpsYiuH57k0kUMRuN6I/0ot7dG9JsdcFK+aPf/z9xf0tPxCc+/FFcN2d5eRWU\nFrqj4ZTvvvgyjutTa+hDrCgV2WyEbWkHJEBZZvQHveN2fe/aN3SfmqfILZtau4Xc18TPN/7k80z3\nB/TiGde2tXYU5gWecPjkpz7OL//qLwNQbzSIsxQbKCmx3uL8+tJXv8zzX3uepvF6/5u/8RucXT+D\nUgp5FOxgOwihiQfrXYRixYx9FE4pioJ6pUYU6X7EYczKfJc0i2kE2nz0HB+n6qPsnCTWHnvfr1Ak\nBXdv3uGpS+fpTXU/bWUhhUOvt4tjlnKR5oxnITkFg9HUvDtkbm7uvU24QamcYytBCE2VWFiI0nTe\nAtd1SbOMWaTX8a07t9nd3SGcxaRFSWmslFIpmo5HkimmWXk8xmma4nk+jbo+GPM8Jiug4gZIoSM9\n8jJ723m4/ZqOqpWLp1hYrtMY7TJ6UddoS6IZ7UsXqa2tYS9qIUM2Q712G2u+RjGv19d0NCM9HGPn\nNoXvk5pDo7LSJHDqRDduQk+PseO52M0KU1vQN05xpznPeNjjcG8Px9Lj3q+9nR6ZX9Z9UjkI6RHO\nIpbP6GuNto9KVyjcEZWGlgtBMKHWddjcqrO5ZcRSMaZSrzPfCCjCAqvUWqjKItI0pExtVKGtrkJB\nd9lmvN8jzfTYtSoORTzjYCdkEOpDyPYDbP++DCiPrFe3JO2FjF/eIN0z62Y8oe3aSNslirUCsLy4\ngBsponuHJLtae3cCj4PhhOsvvEx1lLLS0fRI6CmSJMZGkpnfV8qQhUqdApubZn3eGWXMVeaYr7qM\nh/pgml9q8W74vjajUuovgOE7Lv8q8Hvmz78H/Nr3e84JTnCCE5zgh8cPymkvKqX2AZRSe0KIxe/3\nA2HOh4rv0Kq4zHrgZPp0ft+Vp3j+9RcYT6bkJuTFFyWL8z5FDtuGp86siLpfstZdoNVsIowzsNVq\n4QqLLB6TGq1Hc8YWwoJGXWvfa1adSfZwXfZ9l9zW3GpcqXNnEvO9v/gWg77W8rd39nFtgWtJ0mPn\nYsZK1+Fg7x5N32M60hrjjTt3WFlZwHUdVtZ1osPq+jIbe5tcf2WTxRV9Qt/d6EEukZmkNNldgefj\nO/c1hGu3XgHgjtqkWm3SqTe49TVNgu28+ioiVry5t8cg1FpHqUqeeOpJ/u5/8O+wekpPl0Sy39un\n3W7TalXJjfVwMOjxxS99iVdff53mnOHYKi4f/uCzLHcXUaXWTlqdOebm5o1j88HjunZ6HYDJeILj\nuqR5jufpe+vVJgf7ezjYtFs6xMx3bXBtvKpHFOu2F6VNs9Vle2uHMIy5ekXTTa9cv0mWFESFpFHR\nz6xUPOzZlKzI6NS19u6onMW5xnudcj2Hysc23LujJI4Flu1w5DcI05jh3h5b21tME02D9AaHWJaN\nEBaOY1NyXysPycnykjjX67NIUxxh47s2hXG4JvEUP6gjLXXsNpXviLSs2EYrJuRcax6xE1GaDNOl\nJ6/QefYZBvsHRHd0TLpvFZCGcDCFKzopqfrkaYb/6ksEmYXw69Qu6moU1kKVXNmMd7axd3Q7G1mO\nH3TwV5fxm8aaqVcI+/v0Dvq4htcdviOh9tHHHtfv8ivYqorn1Gks6PlUToJQa0yTG0zC1wEYzzZo\ntB9l89YcX/3zewDMLX2X5VNLuNY5mt4yrq33sVdZBbfEHx0ghe774XCb0WBKv6fIEt1O6dQoUh8v\nzekaP0h/Mka8JckwMxRHYEFd+ARhwXB/cDz21XoNz7Pwm2Yt+YLhoAeiRErdH8f2SNOSU0GdxYUG\nM5NfMCsTiiwiiqf4hrZd7jbwJMRpycbQxLarBE8W1L0mhWlbr9fn3fCjckT+la75z33uc7xg+LQn\nbYfFdkKRx0QzLXTFyMexFEIoqoZf+/DFU/zm0+fZ2p3xj/5Up+RuxyVVpyAJx1w8s8BqV09Enic4\ntkW11TrmLsM4ReBQbVTABMsXIsJpPNzmrVaXOBjpib25ucnrr72K5TqUxjkZT0NsSxKnE0ZTLZyn\n4Yy7W29QqzS4cuEKGGH+ta9+iTPnznH5ymXmTQagHzi0mj5WMSZMjxxnKfFoSlkmBBU9HrPJlKah\nTwASvV4ImVIWJeNrt7jzghbaapayNU7YGk2MgxL8huDqM+dJ5JhvfudrANQbba7fvI3reayd6h5H\n7ty9u8lBv0dQq2HV9EZ58fqr3N26x0KzzZnVUwB87GMfpdWugQLLup9l9lZUTEJFrV6jlCXTaUJQ\n0RvAtQsmxQglC7Kj6HHLYjIZUicgjg33LaDZqJLncHjQO05PF9jYwqIsM2omDnZvcIBnKU5fOIss\nCzOeEaWUPAymcXKcSOMKiZAlSZowm2mTdmdnk8PDHqUssHzTdyGRZY6DjSUcMEK7pCQqChACaTh+\nzxFUPQ+BJMtMjLkD0iqIRXacJBZnKW+FV9Xr+wNPn2EhsBidatJe1/ltWQhb3/4ecv+A/EA7sNOl\nGl4gcMYZ2Y6hE+fOUvmpD/PyrdepOk0Cw7eq71yjgiDa3qaSmcQ3z+Ub4SE7mwmPn9Xz3mk0KTNF\nxa0hK3rc19fX4OX7pYcePfvTAIRFH9/1adY6lJ7O0tyf7JLFMWncIzPzUhQJlzsWeypGjgzH3oi4\nd2eDbGhz9dwSDUN9lqVLnsX4ToO1eR05kwws3rjzJrOJYr5hojpIkJlD0wvITUz1VApSE0QAOvcD\nwFE+B5u7bG7sUaR6PFzPJQiqWECnrmXN6PCQKNym0agS1LT4dP0qeQEIwSwMCRP9TN/3sUTAtAjJ\nzIatVxtYpSTPXQpD7agixRE5d3a22JlpBaD8owfvJ/jBhfa+EGJJKbUvhFgGDv6qmz/3uc9h3fxD\nADyvwiiVbE32uXeohZwcWggHGk6VpbbWAi81FwkGfRadjKYJAbKlg8JiMCm5vdVnafmceaZgNJyg\nXOc4vK8QNklaUFG6FgVAxQuozM0/VEfbcwvc3LwBwO7dO1TdlHE4ZDbRXRZSMprOGMUJjuHKFpYW\nqTRarJ19ivXA5s5LX9ftFxl5WXLY6/PEEzo55+Kl86yvdKl/6P28fE0v6jQJSF2JpHkc5re3t4Pn\n+8ft+uZXtYaSewlz0qJ9mFAO9XgOJ1NujEIiwBzwPPW+R6guOPzxn/0BO1u67bkEYQd4foDvCUqz\neceTmO3dfa4+/gTdC9pR1O8d4hSKjd1tWk29Ubd3NoiiCa7nURQ5D8LOnt7Ivu/jOA5Jkh/zoBLJ\n/FwLpSRRqhd1FM0oVUkhY5ThlLEsJuMpSZLR703wHNs8s8lBb0iazRgNtVLg+hXOnV5hYbHLeKQ1\nmZpjEUbhe5twg3t3XiI3IWZplh2HIqZGwBYqBwSOY6HMfUe0flEKbOxjTrtQEmHpSIXAtN2xbGwh\n9TOM5VFQkKqULE0pjDCLk5iiuO+IfP/j2gHezTKSUUjr3AW2N7VmGl7fojNNEUlEZpLM6mUbOY3J\ny4xiU0ep7C528Vbm2XMcdm7cxDO6/PnhmLnDMT4zsPRaHpYBv98bcL2/wd+v6wN4cXUBT/h4nSaR\nEaTzq0tvG7881e2fhiPcrk0sthmNtKP93u41LNHCp0G1qn/Xck5ROJJvjl5g0dYVnivqp4nzHDVZ\nYryzQtDRFoFtu5T5DOHMoND1SIi38KOAeBIhpXFYtio4boAT+AxLPW+tTpPEi4/b6bgmeqSQ3Lmx\nyUFvSmCSiFxVMgtj6o6LY5iCcDxkOo6xVYmF3o+lFAjbJ88yiqIgz7WSJixYWZ6n6sLd27f1HGcZ\nSZoyTaqkpraOZTt0F1p4lktloN/9S59+mj/7/IPDR96r0Ba83Ur7Q+DvAv8t8O8Bf/D9HuBaJqoj\nz0FpT7sypl69AlWvgusELJjsrMMw4gs7ezheQceYYBdqDnEYkWQFYRryxm2tTTx66TL1jkeWJahc\nL3DLsXBsm3qtTrupPelS5hDUeRjcuvUtrt26CcDO7i3KaUijVePKpbMAPH71cXYPY+4dhnSX9QI8\nc+EcjflF9ochqneHjXtaGB+O+lx9FD59+SrhTC8cWYLKMl77xte5dOV9ACyttfnGt77C3v6E3PQn\niTOGw/tZZ+XMCI+4x3Qa46YOhbl3Y9RnmhfkQvH4I+cB+OTP/BTVjktcjfBqWnsbTyaMRxFJmNDb\nPiQ00RbDVJIrj3a9zaqJMY+nUyqVgMOtPk8+qUMbL58/wx/90R8QJiGV5jtCvgyEOUYFkixNkEWJ\nY6iMqh9gqYLzF8/hmJC/cJYzHY+ZhgPGpt5DGCaMhhMoc9rzdYTSh8ve7i5RVDKajo81/Z/91FMI\nJan4LrVFTbkMR8PjNfRe8dqLX7gfl+3XwfZ0TQpjuUhLx4zkJccRJbIsEQJcaSOkRS7ux45LWeJh\nUTVZwIWA0rKxLAslSnNfSlpo53tuImx0Jup97uGM6fvg2y/gJzkRgijR86YGY4ZJirAFVeMMm/V6\n2BVB7ewa41TvwTv7d7n+3a/hS0XSG3Jg6JV5W+BbKZEoCE3kzkhUmZSSQiUMYt2fURGjbIXn5FhK\nz9G4fLvkaXuKAAAgAElEQVTb6+7287pNwZCGlyBFTBJr61IUdVw/wFIdMpO5eWF5naG6RcfZpmFr\nhebc8oeozVWo2A1E6aIS44KzbXwlCeMRtqWtz5o9pFZMcK0Jfq7HKD0smWUzqnVFbsJpy7jATt+S\n2WqUoiwV7Gz36M1STIQxgWvRm07pduaPo992dw5BOdTqdfLMzK9IqdS0UhLHMdOJtlLml+do1j1E\nUaVZ0+tbCEEuLIbplMg4m6VTgtAUaKdqwlz773Qj3sf3dUQKIf5X4HngshBiQwjxO8A/BD4thDiq\n8vcPv99zTnCCE5zgBD88vq+mrZT6t9/lr37uYV6kTNpYXiQ4doHvpKwt6SPtkctrnDv9DLfe3CBL\nNQFflmMmFWg2u1xs6lPyvB9w/dY2STjFDwKGJmY1XIOF5S4iHuNyVBlOUiDxAxeTtIWwXZT1cCF/\n3/jKF3CWNGd44eoTVDLJ1UcvceWy5vfKxEZZMSE9HNfUcLDb5IVPOB3QygoK47jbOBgS1LdpNTuc\nv3BWjwsW8Sji2je/h4q1Vvr4z3+WJ548T/zChFs37wJQrdZpte9TO2fXtCMz3JdM+lukWcHBRNMj\nvTRDWorVtUU+/dlPAbDcXSDLZpQSGqauweq5VXY3Q4aW5PEL60wNvXLrcMwbt7aZ9YbcflFr9wf9\nHr5b5d6dDXZ2NL3y9OOPMhlPePPOm7QWOw8cv6NKe3meYdk2liVITcIPStFpN3nisUdozmvN0BEB\nezs7PP/8F7n4lLYSylIxGYekac65lTo9U6Jzv39Dh+FZ7nHolu8LRK7I4xDXZOH6nntssbxX7L7y\nNY6yMObXzmO11xF+7TibFEtiuwpVOmRmG4lSYQud7GShjh2ZgeuRxiGdTouGq9fyvTvXqdSb1Dvz\nCLM3yrhACogd2BtoKzLLBJXaferh8MWXABhdf52a4xApRXXO5DtkY0SaULW9o4hSpiMJFQeZlGwY\n+i6MMpKDPdK5Kq1Gg5qjTfJs0EO2KyjHJUp0O7dFnVMrXWThEJmYZifPWLALPEuQHmUGq7f7DOaN\nv0m6MZPkBul0Sl3oGjxr3TqlPcEPqiS2XnP78atMkgSvVqXV1c7rTvMSFCWD3hRZZMeF4yoVD9vz\nmcUOeaE1WLd8lMWmzcbBCyhHz7XteAhlkQ1Spua3llQQ39e0yyMqQ3kk0mYqBZmxnNKiYCYLCsdm\na08P6GCcYAkPr5qAobqsIqdUFpYdEIbhMYXWbAY4tqReDZg3Dv3heMYoKdiL95i7oKlgETocDoZQ\nkcw19Vzm0/s5Ge/Ejy0j8khYekGALVIoRix1dAOfeuoqC90OC12f29f0ImjXVtjc26Ux38YZ6U3a\nadVZXXmK7Rvfohoobu0YB0oxJUwstrf2aBrzol6tImXKrIyP07vBQZU+D4ODzR7vf+qXAPD9LnM2\nrKw2j2OAN28OyKSPJUpsxzieVAqFQ5nGqFJSN5ER/VmI5dWQ6n72IhLqQZOzq+sEhi6ymPHE4+do\nt9v8Yfx5APZ2h6wtrh63KzQx6ju9PtE0xi8sdkb6WixsvFrA3/q13+AJEzFw9+ZrDPr7kCs8V7/n\n5gtvMh0q+uOM5Mk1fuajnwTg6foc/9V//d/zxrdfYGVVt92uVigDi3aryb3NuwCMp2P8akCUxDjj\nBxtt84b/LosSx3UpVEEW6E3VrLVZ7jZpt+q0W3ot9HfHpJMZ3UaTUyZWVUpF3KqSpTkLLZtWQwuZ\nG7e2qDQqjOOEwGTHhrMJdpEjcDkca6dOFEXMwofjtPtbd6mb6JOJsGj7bRAulnFcWSQUswjXbSAM\nB1pgkwkPVWsy312iajIqAyGwLAu36qNCzStfXFyilALb9vCMMA2wODg4oHAkXePbQXlU7PvO89um\nbnhe2tR8D9fzGQxMic+kwJOCPEtJTT1sZZeoqSCOffJHzwJwnjpqeo++jFm90CUycc0uArcWkAmJ\nNJz4QAVcffwKNdehuKMTwoIDCyceMSpKDqWeo9W4+rbxO7OuCzHlbLM53SYsU1zfODczxXS2wf7k\nJm7TZMIOY3ZDQWJ32T7Uvgjh3yGexUymU2zbOXYM12o1KrUqcZbeF+RuGzu4yPxiyJxxEHa7p3Wi\n1njC7kgLXTvPsaP7zt2jD2uVgF2tEwmbzCgAGRKqFXLbZq+vDxfbrVGWFoNxcjxGtbpFnOZg+cxm\nIanxQcgyZzYdIaRDy0SvjGcpvdmAxUtdPvq3fwGA117f5fn/+ctMihkV1zjpxbsHTPz4hLZ5VVoI\nRKFoNxu4rp7ord0Bys2oBRVOn9NCYrG7xumr55BWzmi0aq4t0evv0fLnuLA2R/KnOqngzZ27hGWX\n8TSmP9CTeOncGVa7Hco8IiuOCt0XCO/dvbIPQrU+h5FxjEYH+HNtokKSGAd0pdPAlwKSEmVGM8kj\ngoqDJTKk5VCf1+331AC70kF5NlLok1SUNSzbwa15VExoYpFO6W/vM1/r8qu/+PMAvPDSXWZxdtyu\n/UOt7R6OQ6IoR6SK2AgPfIfOwjw3b9w+Ls0q0hDPhuEoptnS/G4YTRn1EzJpE74RYhUm3LK5gKdg\nur9PYsqo5hZ0z6xTOBZ/8eUvAlD3JHc3bhOlIUH6YKHtGcvHrQQoFJMkBrNRmo0G1aouudrb18Jo\neDghnkxYX1pmbVEL8rJURFFClhbUq1CaTNiV1S7beyFplnHhsnZKJ0lE3RJYlsPIFPdP0pQkfnsU\nxveDsKxjL45DTs0u6Y8OiI0C0QxKsnxGu7FIs661qJ6ok9U7NFcvsj0JGb2pqz7a4ZBzlx9llubM\n9rR/5EKrzjhMiGY5wvDk71teYjAecad/gFrWpQZ8u6CS3ec3vxfqfrQW1lnuzhFQkhr+s5aVZHHI\ncNqnYrRnq0zxS8HchbOc/5SO6Jj8xffwA5vznTns0ZhyoIVk0/cJJyGZKpkV+iAZ+xYfv3qF+brN\n4IZO7HEPchKZ8ZIFf2HqePxS+HZHdKG0kAvTQ4bjEYejAYmjf79zd8xodkjulZw+qz+W0HabHE7v\nMstbb1kL28ySjEajSbXeIDM8vzOYUmtVEG5GZiLDdmcleSGZqy6yuKYtk8eufJCSnDJNOZPo/SZn\nIXJ83zfkV/Q42SLAbvhMioKqbawELGK7Ti9T9COtkHUCh8BzyYqEkSlxLOwqtgMpKf0oRym95g+3\nx4hC4rk+gXnPpJhRu9Lms7/9s1x8RlvwrcUOd158jZ3XD6ibBKLl7gXeDT82oZ0biiDwPFzH4/SZ\nyzTmtYnfWmlS83OqlqDTNvSGnTFXC7Bp0DylO7ewvEbv+T/GbbZpr55jbU1rCPf2dtm9s0vpusTG\nhLu3sUnNXqJW9bBcPWD1eoBTfzhH5Mrpc3oDA0kyYX/i4LUXyAv9TOG6xLMZubJwHH1KFrZPtdlk\ncX6EGsRkxjQX0qJSqWDZ9x0gZVliuTbKtpiZEpxCSnzLYnK4T6WqM/k+/uEnuX7r3nG76i1NR4j9\nKbNiokPfTFEaZduEYcLzz3+LdlVfC1SJKkvKoEZsYr8rwRyJO6I9N8+Zsx0CRwvDTqPD+soaWzuT\no6g1vMBjuLdPVOTHYWuvf69F4UGj06DVfnAGV904GB3bIk0Tar5LzwjTra0dZNlFXL/HzZvaWbu6\ndIqmY7OwUMG2j3zfkna7geu42CpmZuL2z51e5s2bLzIdjiiNtmgJD8uxSJP0OJ48TTIs++FoMcsS\nx4K+8Cdk4312Ng+Jh1pohxWF7UhkbYZa0gJWrK2zeuVJJhlsH/YhNnXLmwH98S6pdAhN6NeoFWCv\ndJCeS2nWwtCTnPnAY2x+N8FZ1sqLpUry6f3D+nVjzjdxeWVzj5XlOtLQOI2aw1xniajnUqvpufRn\nUxpxRCuwufeGjjjK+ntIW8EsJi6meKlxximbTGZYwiI1BZ9Sx6Fbn8NfXyc3tENc5ryWe3xhr8f2\nxJQYnrx9X9051MrCnf0XmBSbhEnKVqgtn52tKeOe5NHHLiAs83EBmTCTBbPpJuNc04zS8igsh8AN\nUF5JzSh59WqN6XTMweCAKNVj7Lgu09mMOIAzK3rsmotVVFxSth3qpbZc7MJFvDXkzyzwVq1Kc6FF\nikAYTTtVkhvbPZLUwzM1SlQS0xIK27FJTWlVOZrRaNXxmhWUazMY6uff2hhAVmI7Fso3FtqKx8/9\nnc/y2IceRVn62sUrC3z2b3+CP/n9b5Ae6L1p+ffDe9+J9+KIPCWE+HMhxGtCiFeEEH/PXO8IIT4v\nhLguhPhTIcS7512e4AQnOMEJfiR4L5p2AfxnSqnvCSHqwHeEEJ8HfoeHqD9im3jILI1ZXO/y7C/+\nG1TaOgY4l0Padko0GGB5WiNqLs5TCgvP79JEn7D97R3qzhwv38ywai3WnvwEAIu7f0a40afSbjIy\nNQai2RjHXtW+AqG1gTxKiIuHo0eUsI+dWNF0il+pMJ0MyBKthUWTKa6ARs2n29FacXOuRrddoXRa\nxH7BwJSPTMtdyCPKIkMeOZ8siXBt2nMdZKk1yDIvaLUqeEIxMnU2VD7jfVeXj9s1mRyFw8Xg2ppv\nPXbCKpSEOM7ITEZkt93E96qktkdokpqYZZQSRBzT3xb4HT1Ht+++RK8/wq82SE2ImRKSQDicatZR\n5gMO6WDEwuV1vPkagfvgpaSOih1JgShT6lWXQmm+7vU3dtndn9G8c8jAFFcq3RZXF6u0lmvYJuws\nnEXUmjVsyyaQCkwNjNXFNu1qhQtrSzQDQ8PYHtIWVJoep4zlELU6pLkEbry3STewzEc2iijicPM2\n6SjGPnK8CQfXFiRxRKq0Znjq0Q+xZ/kMhhtYZUjHOAjn2h73Blt0uqeputoxu3Z6Cek45MMxjol1\nVnnJ+dNnCC/MODBmv+NapMFbamWYAmIzx6XISsrNLQITv993FW+O+mSxJDG1Ls66Hh+sNtn/7qvH\nNXxEmlNMx4xrASJwmPPMV3+ERWZZ2KUiNFmapfDJpwkNp0rVcPw3EsUX7o5x8oLfvKw17M+ceXtI\n5c6+Lu3aG4yRviDwq4QmAbBWPc3CxTrrp+eZJDqOfzgOUa4DYsRkvGfaU6O0bWSeEI5GnD2jHdPx\nJOdwe4v+8ABpQg4vXjlDf/Me+7OQ/DG9D916REKJLSS2qTlkOxWc6n2/1pE1NcmnWMKjwCK39dit\nnuqyt3GAED6XTul5m44iICCPBdJUbGzLFFFJmRN11lfm6fd0PPrNXqjXiW+TFfqZP/f0z/D401eZ\nxiM8k/BTa9T41Gc+wplTl/kX/+T/AuD5V77Gu+G9RI/sAXvmzzMhxBvoL6//KvAJc9vvAV/irxDa\npTEFlJdgNWH10hUqdT0J/b279A7fpNU5g2ey3ZxWh+bcIogGSV/zqtu3v0o1i+k4dV761k1+/Xf+\nfQCeyUpG//cfszvIODAxmKWCUiSQl8hcd1OhsP2Hy4ikyHCMkGoFsN4SPHK+Tf3oayPCIpyMSKIx\nlZre0FcuzbF+5hSWe4bZaMT6ij6crtw5oDkXMNdp4hjOUSpQNgS1KoXJpLIUuJZFQsr8gt4Usygi\nHO0dN+vgUAtzKSWu72A7HqXh7j0sAtdlkg+PM84SZWG5AUUuCarmayOzGe1Om+XuPHe++zJD33w4\n1ZKUto1fgrekN4DMEohiVpotHFMyLckLPNtCWQLpPdhoOzCfXmpUAzzbwhL2/TKpyiWMcwoZMjFC\n6vVr13ly+WkqnoM4iu3HxfcURZFQSnk8dq7rsbyyTGthBXl0MOcpwrKpVH0K8xmvwHJIynf3xj8I\nZVEizXjaTQeHmGYgmU5Nmd6ZRHQaLK6eZ/Wxj+lxS2129++S9u7gpyFLi5qftMlYX+hQa/oITxuk\nF5aWcYRFI1PUqnqOW67HKb/K+vs/yN0dTRc5voMfeMfter+p9fzGYBMccKSkXTUZopWAqR0SJ1P6\nR/xvWfJI1aU/nIEpGGW1AkJVMq0FBJ5zTN91yxyEhUXBKDMljmWDvXs3aCMZotfN9+7u4BQlv/5I\ng09/UCfCnGm/XZSYs425+hlCuYXnxARNHXFVXXiMwt4mzDdQpj9V32dxBQ5Xxmy9vm8mIaCUBUUy\nTx7PuB6ZDxCnJVkyZTbdxzH+gHQuoFlRDPf2ETMdgRZNd0lxsVIfp9Trw3Lc48MYQJgErjxVbG8d\nkknJB545C8DHf+bD/NP/4V/QXF3i2V96FoA//3//nP7eiLJoUJam7HBp4berLNU6nF2sM57q8fzy\ny7fIipRmUKHw9Fo+/cQlvLpDNFXHWa+iTHAcwcWnlnnfJzQV/NILL/FueChOWwhxFngf8A1g6WHq\nj3TOmuwfWcOtwmR6QKWuw7z2917j+W98iyce/yAXzffdZOSQqQLPyZBGmC0vdhnd2+X8qS7D17aY\nGM7w6kc+RH/3Ft954QZFpE/RXj9CxTlO3SI1Hn8pJfZD1sL/xIef5vyj+uOfO9vbrK3OcfnSBZa7\nhiNTgul0RJpHx0KmXqtRrwfYXgVXZsShFoYfePwMZy+fJZc5ymjFhSxQtsB2HfLEJGnkBZZjIQIB\nJqsuzXOct/CyVbPRRSmZRTPyUpGZdHnf8UjSDNvzcI60RcdF+AFWmVL19SafDqeE45DDUhHHJbkp\neZr5Nk61QiEVgREIWILDnT0IUy6e1puvu7xKlinC4QQ/vS9Y3ood4yRbtuZp1StMJxGZ8QeU0iJJ\nSybhmKnRnqfThKrvkEUzfJPursoU1wqQlCRpQWHKgY5nMaNwRpoL2qYsgHBtonhKXo5xzKewpmF0\nLJjeK1zboTRJGklp48uCTqOGVzdO8eUrXPnA08ytX+Cw0AfbeGeb+sEG9WjA6W6b2b6uC71y7jRr\ny2f1YWK04sVOm06zyeWV09RN4kXVt6l4LpXA59mf1mvOdQTuWwpAXyhMvYtKnXyuRSYLcnO47PUP\nKWxBUK+wVOi+n/I9ovGEzYrFEx97Wo+7BbuvXaOYhHT8AFItDG2hEJZApSlTY0kqaZHJlDgreGNf\nz+UsT3j6VIsPX15gLjBCM3v7oTjXPAtAmEYMD69xbr2DqJroJznHxvAV7FqMJbXTMJnBxctL7G+8\nwe03dYRNUQhUnmPNMlRZkByl+1MyG+0yONzGM58OGy0s4Sw4WL4gMo7GcPeQDA/HmcM2iSzKs5Dc\nT1aqV/VBtnd3yPdeeJW5hTq/8us6NLFAEOUlC6e7PPsLPwXAfjzkj/+PrzKLCmSp522WlkxkzmGa\nsNhukTsm9FdUUGWM5/ucv6r3zOqZZQqpPzEnjpKmhAK7IGfKT39Gz9EzH3s//+M//goPwnsW2oYa\n+ZfAf2I07neKv3cVh5/73Od4/vPak/7IeptnnqpRRDP6xoTa3HmRhQWXPBrx3a99SXfYrdJZOsXS\n6joVc0pV55fwKs+Szd/i9CTk3i3tWHn0Y7/DUx/doT+MKe7pU7rtdrCtGqVzP8U6iyc4D0mPPP3k\nIzz2fr2B4scvUGs1kYA6+qqJ7TJXW0ZZ9x0EUkqdnZjnpGnMBVOQp+LViMMxynKOa0UroZBKfxPw\nqH5zFseUsobliGPKY9qPuHdnk8/8phlso0ELz8OVdVR+39kmEFBI6n4H28SS2rZNUpYUCA5NyneU\nZ4R5yjSOKB2bzNR6FoWiUkgECsvUUC7znFEmKYSkZhabZfv4wtR2EQ929A1NSBajEWGaUGQFnqM1\n0DRTjEczBtMxd8yHAC6vtfFtizSKKU00S56XVH1BOMvYuLtDZsqjXrt5jzjLiHMoRqaUqGPRaLYY\n9Ac45hAtlaI3fJjvFYHlBihTuD6XNdrLlzn3+DPMXdWb1+ks4aJIpjMc8wHh842AR65e5Ozy08xV\nXb7zTZ0V+Ni5s5w5d46gUjl2EFLmNOp1avUaVRNZUKu49Ad92u0OzYX/j733DLYsu+77fnvvE29+\nud/rfh1muicHDAYASYABJE2DSbQpipRtUpTpkuWSHGRLRVnWJ9hVVqlcsl0ql21K5bLKpqpkBpsS\nKAgmSIAYDAYzCBMwM5jQOffL79148t7+sPc973VP90w3CVGE1evDTPftG/Y5Z4e1/mut/3+aiCwp\nJ/vVDl0XureGhkZzBh3EOH4kfN+gWwGm0Ji+45AvK/x0Qk9KCtfGvre7i1rfplFmhMqj6xSTZGUr\nYsp0QuoOgjBosLi6yvbFiwxcBcaphZCPn+iy3IsQLgoV/s2ltBvX7SY5u9hgoTdHFET0Dttr0loy\nCbpsDft4TksyGY1h3qe3GNM+ZL9zrjsHieDsa5sYAhbnHeXp4Dpec8Jyr4F0ykoTucHq0mGeOPED\nNFwJ5c7mLg2/RzDfxA9shK2NpqqlhiB0QgRXL77FxXNXefbDD3PqYeuQ/e5nv47WPisnZhGuQOJ7\nf/z7MV7M1158i41N+yy8KiTzDaOWvd7ta313TwKavmJmocef+Tlb3tdbajNJE6vJylRZSRP6kq9/\n6U1e+kMr6iDeJ914V5u2EMLDbti/boyZtqzfNf/Ipz/9af770a8DEIj4bn7yvt23+3bf/rWyj/7A\nozz9UctgKfH5B3/v/7nt++7W0/7fgbeMMX//wGv3xD+Sn7cQQekp5uN52kEXlPW4Thx7Gq8ac/H0\nJdYu2MREmpZUKmD+8FE6Hfu+MG7y+DMfpTvTpvHm2/SH04aJGY48+eM8eE3z8uv/CwCryz0SWTIa\nViQTe+qrYsLsyr1VOcbNJi1H0N9seOAptDmoHyjQRqML7ZpmbI1vibZqMELSchqTZaWptAItMLjS\nHimgElSeXyvHUOYIXRFqhe+ggGaqMOv7RDd6KsUhJEIpAhmhnfetqwqhDEru48dTzUIpvdqjb/td\nqqqy6iUNqMr9MsSyLNG6YjJyvBZaY5Si9D12HPOcNxgw60nCyMe7g6edFE41fpSwtbNHI4hpOpm5\nwXDM1vYuu5MBsZOrmum1MVXJ5uaAz3z2c/YSEZw6dZJ+f4+tnV1WVm2O4O2z58HvUJmAzHG5eJ5A\negrlh1y+YptBvCDmyvXrd/nErRkvYumIrf1+6uM/ycKJp0j8Lldd4ip5922iyZBZJVh1HuDKkQWW\n5o4y01A0PPieU38egG67Qdhu0Wg1mbi5uLm5xqMPH8Xzg9pb1VUKMy2anTaRS9yPh0P62xs0lyx2\nvHzc/tarb20wXFsnVBA4uOuwHyJTyIZ9ZrR9HiZPkCqnYSTF29aLa2UFrUlG0AgJygIcm+LQVJgq\nw2BItF0n3VaX+bkF1t9+hwXHOPnscshjsx6BzuqIzxygOwXIHLxSpAHdeJlsohkp+9rO3pto40O1\nSNywCb4nH38MPzCsHB0yd8SOsx1nnFh8mPULI/o7I2LH0DlJfZZWenz0B1cJYgcd6ZAZZlm/0Kd1\n2HZURgtt/DSEykO460Fo8nwfbqrc/EzGORLJRz58kmbDvnb+9DU82eTIsVl2ExtxRjNtPvWzn+TR\nZx/j2g2ba/OMR7vps7jYY3lpmc/8pm2Ie+31K5iy4OShDqtPHbe/408wWiMKge9yM4iS0uVQanIw\ncXOH6UG7G7mxTwC/CLwhhHgVC4P8bexm/ZtCiP8AuAT8wvt9zyB3dY5FyXwqGOxlzLqHcOTos5x+\n43kuXLrE9g2LA544cojBcI93v3aGKHTJuKQgJGOlrdgb9/G6jiP38jmWHniUj//Ej5Imlkf47Csv\nUOUZQnn4jWmNpUeWpNyLtbuzGAc7TLIck2VkWc7YbWZ5kZNlBWWpa1a4osiZTCZWrUVr2rNd9109\neu15oiCgcgsVUSIpabcjtjccH3cyQusZBAHaYb2ddsixo/vtzA0XZmdFWR8i0wdeCYlC4Htefbh4\nnusoM6KmKRVOrFcIgZKy3rSLoqSqKoo8Qzuc3GhNGIeEYViz2Q0GQ6qqoNmKqarbEzL1HbHVeGJo\nhD6hJyhcgnBjc9OKmwrNoUM2JG03FTeu32B9ssW5i/agz9OKt95ZJ4x8xskEr9l116kYDkcIGddq\nOu12g/5oQlWWNbyx3e9T3kbj8f3s0OoJnvk+q8dYzR3mKxcuszWY0M4szPJ0u+TRY6ucWD5Eq2Vx\n0Z3dTTr+Mg8dP0ysDG23yflKgO8hlCRycyn2Zxjv3rDPTE8lyBJK1eDdc+d5/jmLZ65fu8IzTzzM\nLz/2DAALR20FUXJ2jdzLaDQCisSOSY7AX8vppQm43oRMFQhRoT2vbvU3ZY6hIk01ujCUrgXdKIOS\nBi19UnfvgmZMMR6Tr9/g8TkbJT+zOkvXKxmXJTiOdy1uDud783Y+NNsRXnScfjrgRt/Cobvb1wnU\nHCpSbOzZip6Ny+coJpKjqzGHD9l1fe3qJsXsCbrzPTbW01r6bmFmBaFfJ/Jge3DV3c+AyU7Bu2++\ng/ZsbubU6hLtzhyFScgdbl+WAnWgGicOHeNkATNzDZ7+2CkabTu/Ar9JuzNmfmGO0OUiqkoj/YrV\nU3Mce8Sux1YU4scBlYEwCFh80PZQTCjpj4e0VnrInt2DEiHw8cnzgqqGQEpEURJ5PtOOrqmI8e3s\nbqpHXoADyP3Ndtf8I+PAeptJknIiXGRtc52LTq6n1Wpz5cJlJsNRPdid7S2Wlw6h0xLtFnlWpfTX\nLlOtJ+zt7tCO7Cl99p1vsz1IeeDEIT70rAXyL337ZYpkgCmrGvA3ukKbnHuxf/qZz1H5zwOwu7vO\nqL+FNJC5aof19XUqbZhdWGRm3o4nVB7jnT1On3mbwWjEqhPSVb5Ppz3HiRNHOeJEEE48cJjZUNCO\nfLRr5UYpiqpEeRIV2oe4dHyeqHMAN6z3IEEQBAgh8H3nXWmN0AYpZN3623AJRau0vr+BKSlBCIzW\nTCZTr8UQBgFRw68FWrWu0JUmjuP6x/M8Ryms91Lc/jD0XFLGkxCFEa1Wm3OnLwK2pK7T6yBjaDbs\n2KXJ2d3tc3Fzk0bPYqCH2j1832Nzc4vtYcqGa9vOK5BhQJ6bulLEjCf4gSJJJjRcaztBhJY+nLsz\nc3KlcZoAACAASURBVNqt9sDDj9Gas4vyzOVLLMQ9ltqGx1ftglzxDE888yQijPjnv2Whv6+99BXm\nF2f5m7/6X3Dq2BESV37qozEOh5/S606yMZvbG8RRg7KwWHGaDNlNBb/7ha/gucX7wh9+gfUrZ/nl\n//y/AmDGqbk/utTm7FZGWkHmWnbTIgNTsleUbLo2dulpolKDLDF6SpNg1cdLI2gAzSlLoSnxtKSQ\nkpE7BQfJgLXrlyn31nnysJ1DK0tdqnSC5yuCnt1g1S1yc17TRhSHjsyxvp3w1ZfeIOra+9+OIq5v\nruF1cgrPerBnL2iuvlXy8e85QkPZe6QnAdqMWDpaceFMQOWaWfwg48hSg+TagCvnLE4/F0Vcv7hG\nM+zRTu1c+trnf49HPvIkC0tPETk2wLysiPz9lnvPjdtUgqMPzjF3dIZKuo7bwyvcuLxLqzOHcuV9\nhc5JKTHGELmD0VCSlTlahchKMjtr54j0Q0rl01ueIXPUsIW2uLqlobCHi9aaOArxvADpaKSr8o/h\naX+nTB61k6g5bjIY93nxS59xvJY2GTBKBhgK2q6Ursgrtje3UEaTO3L+Q4tzNCMfr8hpNTv4Lvkx\n3t2myEre/OofsLNuZXvb3Rl2N7fw8VBTXuMyp27xu0v7/T/8Kj3XkWmqEa9+9Q85duQI844I6drV\nNUpd0ZjtkTsF7fWrV/jRj30fH3rqcSZZinQ1zBcuX+L0mXO88ear9FyU8HN/7mf5xOMPERjJkWUb\n1uVO0VsbQzGFUbyKsLevw5dlU15ny8frHfCqjTEoIwiDoN7Iq6qy1TNqf3EJIax6uBDkZYFydaPK\nKKQSlu/BJUylsN8Rx3F9YGRJguf5YEzNIXyrTZynLdF0Gy2KsmLiEloPnDzFmYtniJohC055/dSh\nFsPRBO3FRL0pjWkJElpzLYLdiIlTBhFxiO8FaClRrn7Z9xXjyYAwboCDhnb29hind/ZcbmfHDh+l\ncDBQJxA02xKvlHTcIRiHHlpr3nj9NX7nM78FQDP22Ni5xv/6a3+fv/ZXfoXALbyW8tCmJHd8ywCb\nO+vs9HdQSjF24hlFXvHOtW3mj5zkP/1P/ioAf0dWfONrL9bjariKqR8+vsJRlfDylXWuOUFpTyn2\npOBdrZn2J3ZzwZIOKfZ1dACohEGbikNSohzpupGCVCiGlWZvGrXpnDwbE3k5R5ftZlcFhsI0iEIf\n4RJ5mJvv740tW3iwtALX1ja4fO4ivZajew0rhknJchxjnEN1+EiLtmlQ5B57TmhiYf4oR09ELC6O\nOPdWRZbbQu+ZlT1WHw1494unCTbcRh5X6GRAJ5rDuK7CzeI85uoO/bFgyXm6WkFWxMD3AjBxmrTj\ncUWz3WWc5WQ7NpHYaMYk+YSdnT7erP2dTOc04gbK82qx4CLXlOUEGUKW+YS+dTb8KEYFQ9rdFqOR\nfcZpCVEgMcZBo0AQBpTasD0Y1XtaWdx5n/rAjsj7dt/u2327b3967E/O0z7kSG2SHv31iwRZTCew\n/kClcyIPMB6+84o9CWVZUpUlUu43x2xsbHJi5RCPPPkse47KdG/rBhlXufzut0kdB8XS/BzduImo\ncnAeMJ6qOxHv1n7+3/1lwsVTAEyGa5x541ssH1qtE3xx1CHXCQ89cYqZZYvLTuZn+Omf+DdotGPG\nWcr0J0ujScuUjY0dLjnGtEajw9rVbS5++wzSsVCdX9vgY//mRzh2fIXCSWbJKAB/v1xx+vsK65GX\nZVnDHrqq8KUHxlC5MLkqK6QS+FLVJzwYqkpjsGWHvsP3pPO8pBQEaj+RaYzG8zwiFw31ul18T6KU\nQHN7T7bXs0nkUX+HLE0xRcri4py7dp9mw2d+oUvLqbY3wpDJ3oC9JGXouFgWFuZJ0jHtbhvhKaaC\n6KEXQBBghKybOZQS+L4VLMgc1FYZQ6N5MwvdB5knNGli52yZ7HJ6/SzFOKF56iEADj9wmPFwgxe/\n/Dlm5y0G+h/+pV/hW699i5de+jLPP/8cC06KS6QZaZ6QFzmTxEYZo8mIrCwAQ+a0MCdJxplru/zK\nxz5ZM1Murx4meHOf12Mwthc/GoyhGNGNSm70ncJ7ZbthG56o4RWMZKAEEwT5dC5ojQEiIPAMQ4dH\nj43PQFfsGs3QRRTDtQ3SzXnmOzEtJz1n8hIhA4yGbGSvJ45vLvnznQTb1u46o9GAE8snOP2aLTIY\nFwNmj0X4ocBz5bxEFRvZkGwU4LsSxJXDC1w4fZXQUwhsPwTAsa5kZiFidfkwWrr6cL+iaEEc5Ewm\nlpSr2zXoNGFr7zSlK/VVcZNmuN9Z3B/YOXLm7DWKzJCmFdqpKElf0h8OuHzlBk0XBSdFSpqUCCGI\nHNdPO4qRfoVINaNEcemCnTd7owEyEFS6ZOxYJvNK4qsmjUarjoyFEGxubzMcp7Tbdr28n6d9N4nI\nEPgyELj3/7Yx5r8WQswAvwEcAy4Cv2CM6d/pe3zXqhuFHteuDdg8t0fo2Np6PY8TyzO0oshirNgN\nezSaEEYxhduMPC/m4ac+QpkkhEHAZNtCIflwB6lzumFJM5y2BGcszi0yHm4xzibut2OkuDdq1jCQ\nnH7nTQAG/TWMMRR5zsglIoUQRKFPMRnS37TjXL98hc/93ufYHQ7pj/q0HUdud2aWZifk6tXrLM7b\naoCos8jzn/0cO2dep3JdbGfX1rk6HnLq0VN0Ow332S5xI+IJ23xH4O5nVlakaYpA1Bu50ZqszCjy\nvIZHPM9DIB2vtduIpzikFGgM2iWkhASpFJ6UU4QBpSRK+W4rsNfZbDYJAx+ERt6ha2n5kH3Gcjli\n0B/T8kKI7XXuDi7z6KlFhNYIY6GIrRsb+J7PjfUtpOuWKzJQKiDJU7xQIzzX+RnGRJ0u/d1+PSat\nSwJPkmQZmVuorU4LfY9NVWfPv8Ukm3ItC8xkTL43ZLdj7+dZdlDXA7791iv87M/8GQB+8Rd/iRMn\nHuClF5/nD774PEcXLCYuK0OaTkjStJZ0G2cFWVEisJAOQJolXNsasLWzy/qGTcIORhOanX1anz0H\nN23tDNkbj9Bo5PTaywoqTSB0TfudYCg8yyXvublvysLOY61Zq3IydwpODEwUZF6A59JY29fXufiG\n5pFHZuvGNGkEKgoQ2tRYfXXLDfZd8cCZc5fZ3dAMdkLKid2MK+DEg6t0ehMGE+eIGIPWgtBrsLRi\n819BnFAkGcmgjSkqlk/Za189MkurFXD48DxvXbT6sd0jXR48fJydnYuYwsIovmiTVA26iwJf7bif\nmZDsl72z7QS6L12+zuLCPOMkQwWONzz0yMuKG+vbPJDZxqA0LxgOJnieh3ZnqWd8vNxKzF08v8Zn\n/8Uf2GeV7NLptShMuZ+CMjAZJxR5XueZwjBCCo8gCFEusTvVa72d3U0iMhNC/LAxZiJsRu8FIcTn\ngJ/jHrhHxA27eMWsz8Jsm1FrwMCVsA12x0Q64aEHj9dJw8kkYTzKiFszPPqY5eZtLaww0SGHVw+z\nceUcQ8e4dmhxif5gTLOsKAq7mZZVwaRI0CrcZ9TLxjS8e0OEhttrfPGffRaAK2tXkUXC668PakHA\nsixBaH7/n3+RwOFRH3rmw+RBm0E24fzlDba3LSVlnmqur13kwsW3+cgzNmH6n/3Hf52vv/QiZX+b\ngXtQCYbz37zC8y/foOnZDc4PFCoM+dlf/m8AkNPeJqORUiCFQKlpsb6asp/WG7MRmsqAkuImbNPg\nGrI8WVeFaGPQuiQrDaFrzvFVaLsEta7x6zQbI2VMEPh3bAZIJxbLa7Yjmq0GS51Z9hzBf7sT04g9\nRFHWFJtFLvAbLRYWFhnu2Puxub5Noxvixx5R5CEczj8YDCiEIstTRDWtiBEoJQgCD+U8IaUUk8m9\ntbHv7u2wsW5pA3bXN6mEoUwyts/YA3xurkXUarKzu8mhZeu5DQcD5ufmUCrgnTOXuHrFJtqVkBRF\nSZEX9eItKiu5F4URy8uuKsiLKRjx6utv8MAjNrp79fU3GY72xz52eG+WjKGs8CtB0z24TCoMisLo\nWmU8MSWmLBFFUXt22kVXlRBgdM0SWCmJj0RpiBxG7ac5c37FctvDOCoAFbXAD6Eq8V2+Rvo3l3yO\nR+7Zbe2Rjjy0zmnP2atPRhA0AtAlFI6FMtQ89thRFmdPMn/Y4c96jWR7nsGGotWa4dAxu8lNij6n\nL4zY/kZJfsOV8y74DOYmxMc7yK6dH17WoOevUAqP4dCW54VGUg33lc7Hju7YjzziliTJc2K3cVbG\n5lLOnrvIQ5vW0w7jAN+LUUphzPSwzemvZ7zz+rd57stfZ+Ka1H7ox36Q06ffAUk9/zyviVCSvd0+\nmcuzdHvQ6/aIy7wunRSN23cYw13CI8aY6awJ3WcM98g9ItfsKZuPoRH6PPXgKp5rbU+HA9Jkjxs3\n1vY5OSqI4yZKhYwcmdDG5hZaG4ZHDnH+3XP0Iht+5klGMh4g8CldF91gMKQsS4RWmMpO9Ga0y9LC\nw3dzybUtLy1z6rit1zVoPKlRQtTeqtGGIGqCH7GyYr3nT37qU7QbDbrRDG+9+S1On7WlTocOHyc1\nEhU3ePO05QJ/6/RpGscf5fr1GWZ6Nuu8GAQ0WjE7a5fYvmZDvc2tddJq35spXZJMKZ9Q+KRpWnd+\nNhoNV5onanhE6ym0sZ/MnCYnwWoYTmETJSTKC9FlhXYQw2g0xjBCCkngFqovDZ4SGKMJw9tPsmm7\nvRSGMLSeRLdrPalYlihT4fsGJ9pOMckpK02r1WHGERQJCYXJyXTKzNE2u3v2IIi7PWQU4ytJMrZT\nNM8y8ryi1WqRukWhEDSDe4uwJuOKpvMWx36fSZlhgMQlgNfXUoy3S1aVDPp2PDvb22xvb5MkGcNx\nztAluezB6GGMqWvxKzQGQVYUjK7dcNcp0FXFV154kSvrFj7b3tmuhakBjIPLAmnwMTSEYt5BDKKU\nBFoSlIaJq/dKtLF6lGY6EtCugkhik83SHeMSQ4ghkiXzoZ3fzxxd4AcfX6Elsnp+SN8H37fZaQet\nyFucoSlHuEBRlgnthSbdBbvpLqsVVCDQeUSsLKS4cKjNTPQQjWCOi9cs70Y62kDvtmiHczyw4nHh\ntN14c1/TnQ8ozITIt3BCJ5thcK5Pvj5i5lH73NqLbYKZEHZj/Mh5taQ05H4x3FQc48jRRdodGE9G\n9Ef2Hg9HQ1aPHWU4StlwncFHjq5gjKQoKgx2DWa65OUXL/H5f/YKKi75qT9vt8Uqj3jr9JtEjag+\nMH0voNVqoZREuHunK4NRmkAa280MlNWdq9zuyu0UQkhXo70G/L4x5hvcwj0CvC/3yH27b/ftvt23\nP77draetgWeEEB3gd4QQj/NerpH3RQ2b2tVejjQhkmbogwvhZhfa5FWTvf6wBuDnZ2cw0md9Y5tt\np57SbkYsLc5y6Z1XWL+2R9+3WN/Otocf2qKm1JWDFTmMRiWBGrO04OgbVyO6R+4cdtzOdjZ3+N7v\n+TgAH/+hHyIMFZ6SNX6sjUahKPKKxJHmbF+9wE5asLO1w/mz57i+YcPs1uIKhBEiaNQq1L//3Fc4\n9uCTrM4eJnIJ14YfkqVDzg++Tavt1FtMydruqB7XFA+bFCVlXrpyvn2eEWNMzSUCEEWRw7WpRR2m\n79O6oqpMTXg1fT0MA4QLe7XWVFWFgZrTA7PfwbWf3LzZgilE4YGuJGlaULhxebFPGIakwwFNdz1F\npsnyEo2g6VTb/TBgb5gRhQ1kntXSTaWANBmjNQRunGmS2M5PBLFTPq+qEnGPzTXXrtygcg0ZVZmT\nmRJdaYTr/CyNpioMlYFvv2Xhr0984uO88NUX2BsMMEKhpxiVkEjhE4ZB/YykL/B9hfI8PAerSRHg\nSYGnBGlmvfTDR47WuQagbuDyFESBhykklRO1KE2FMIJQSrIprW1lKCUUUtQLtKoq2zVrwJOK0EGS\nkayY8QxLTZ+TS3beffSxwyw3wGQV2n1n5fl4QQilqjl4CG7eSgpH8pb1DbHXoDffpHRQiBeFIHIG\nOxNWD1sipUOzhynGEdcuX2Z3za6j2WiZJJuQFWOuXOized5e++wDS4SdHitPxaRN9/u5h1QV0U6J\nPG3nUjbuMr42xJvkLM5aCGrxyArhlUE9zkluPe0HH10lamh2+30qV8s3tzjD9/7gR9jbW6shqr3+\nmE6nje8HbG9bnDwbjbhw6Soy8Pme7/8QJx+2XEMvfvltiiKjzCpk5dZRXlHmBa1mu/a+lZL4nvXe\npx2zyXh/rd9q91Q9YowZCCG+BPw498g98tIZe4HHFls83YspygLjbk420YxGY3wvpOM0BYX0GA4m\n6MJghMuOFwUbW1sIXeD5Ff3EhpXSn8EvJdKn3kTSfEDcMswvCWaadpMIworOzL0pmDQbIdsDGxK/\n+vrLLC7OsLQ4Xy+e3d09SFM8XXD4hE1WrM60uXb6BuNRxuLSIRpzjqIz6jBJUpaXj7J23XZybW33\nWV4ZI4xh5BYqXkihK8K4SegebL69CQdEiT/zW//3PV3HvyrbcAeuECVpoplrzrDn9Ppacx3mu22K\nJCNwjQrjPLMsbAamNfU7O1tkZYHJBLHQdQv/eDKhnyQI6THjlHza7TZSSoSB0t1PTwhUcG+FUlm6\nQ+pwyJoywBgc8y9KCISwIe4LL1piqCxPuXjhAp1ex0FBLh/gB4RhSKvZwnPQkhC2IkcpiVT22r2g\nga4cTOU+a8T+nAb4uZfvvkHo7ux2VT8FXHCI6Etrt/n3D7a/8wtv/tGH9IH21h/7G8xf/gcAeK6p\nKzYBSgnGo4RuZ9oRGdDswrHgERKXb/LCkKwwlKZkNHEH0wAaMwEPf88Cx55cYFLY9850eywtzLN+\nbZtDjqa30SqowgqtKqpq2rJuyJXia19+k1e+ah2AO/U9wN1Vj8wDhTGmL4SIgR8D/i73wD3y6U9/\nmr/xzu8A0O600cpHF1k9Gft7A3RWsLzUqdXUyzTB5CnCgHE4T15UeGFAqHx8vyBwm7kMAqKwh1IS\nbam/6S0UdGZKenOSjmsnjqTCi+5t8Ya+JkttqdFXv/oFTJHSacS1MEKaJHhIjh1f5YnvtWQvDx5d\nYe/KVdZ2twjikAedrNrm5ognH36Cx598mP/rH/+fAHgEFOOUPE8xUwbCqESFIcdPPMDGFcvDgFTE\nzXvDZf802G9/9rV/1UP4I9nMTAfddcxwWlvPVEqUwyFtJ52gEce0m3Z+ra2vs7C0RLPZQEp1IDEs\n8X0PY2w5pnsRJSWe79U0vcILbPRSVYgpji24qYP1vn1nLQinBHaSIAxpxG3CaWVWnpFlCRqfwJUh\nNqIGla6QUjLjcjNhN2ZmZoFJPqE920S6aOroyWWM9yzt2ZjUNVpVkxSjJFvbu2SuxLeqSsIoZun4\nEj92xDbmpGnCP//1r9x2zHezgy0D/4ewqLkEfsMY8y+EEC9xD9wjX3rOJuMeeXKV6IFFQlMxdHSZ\neZozO9smiD0Kp1cnA2h0Asyo3M+oSoOnK8qyoBl0UcKGQcl4TFmltFqKGQeFtDqSVjMiamaE0yRJ\n2mA0ujdPe5JM6s66T/3ET6PzMaqwoTJYPUblBUTNBmt79reHe6fZSUpEFPHua+fZftFWuTxw4mE+\nevIUeZIST/Uci4JJkiKVV9dzJ1rjVSXHjjxA6gjdH+s0+frLr97T2O/bH90eeODBmm9CKWW1PKXc\n7zqttFVY9z1i10kqpSDLc3SlbYLXPU+lPJDSVvlMPXWl8H0fIfe5YJASbQye2V+WxYGqj/v2nbfR\n0G6cURRRFbaMdXrYpmmGkB5pUpAMXdIxSGm1W3i+QjoB3zi20GOYhXjhPkeziiSHgwXa7RaRy7RX\nVUGlBZNJwe7eFH7TNFsCFYRETlNVvU+V292U/L0BfPg2r+9wD9wj9+2+3bf7dt/++Cb+ZYdeQghz\nP7y7b/ftvt23ezMhBOZWzlvuc4/ct/t23+7bd5X9iXGPTHG5v/3/XkSUlS09c/9mW8MLgjCoOw21\nLlBFwkzDo+l4DcpJThY0mEyZYp0Hbz15deAbcd+h35PEMcbcNrHz6Z84DsBf+8u/BEA6TvHCJkjJ\ngycfBOCBBx8EY7h29QpvfeMbAFw8f55KgvQ9yywH9NodOt0unW6XmdmZupmk0Zqh3e4Stxq19mIU\nN1FBjGa/U9FMj9LK1MIGUkk+9qHH6rF/z/fbjsq42aTb7aK1Zji0GJkUhijwSccTYpdACQJJ2PQI\n/YjUlWOlaU6aJQgpaDVbhK7xpiwL8jwnDGO2t2zeYX19E+WFCOXXZWtFYd+3u7vL2tVr77nPwJ96\nPPb+OL+zdqdx/pX/6Qt1tRiIugRTCjsXBxuXuHH+DbavnqYc2EK0dmQ5qvOiwhhT84eAptKWs35x\nwSb5P/Wpn+TJJ5/C87y6smu6/o2p6nUEIIzml375L952nN8Ndi8akRL4JnDVGPMz98o9MjUjbCOV\nQjB1/KWQGGEJoaYlUbZby1BUJalr3R1dv8H8Q09QICk1tQKLESC0wJj9bXufEml/ItWb9Xv27P0X\nTj7+IQCOHjnGzOw8ufBtVt99Pk0THj50nAcfeQqA86dP09/dYW9nh8uXLBfKlcsX8ATEgU+VT/Bd\n+VYUzeCFEVG7SexqjXtzC/RmV+j2Zmg5Pu12t0PcaqNCSwEJlnbzoClHBGGoGE+G+H5A3LCHW5Yl\nCE/Q6rYIXO03OieQmk4rIhnZxKg0FXEcosGKArsqo0Yjth1tRtNq28Nla0tQlCUKWd/HoijqTsv/\nv9k/fnuErvZL7YSwdc7TjrXpWn+PUuo92gdCh4ab6rS/W80ToOv6foGU4ImCc29+DYDzr3+Fyc46\nJs1ZmLFrY2lxGS+K8X3FcDigdJU3SkryIkFXVd0d+7uf/QxXr1/j6Sefptu15XWB71v1KKMpXV/E\n7l6fzfXNP8lL/47bvcAjf42bCyT/FpZ75GHgi1jukft23+7bfbtv/xLtboV9jwA/Cfy3wF93L98T\n98jUrl24jBIa31OIwKmHK0noB0hd4Weu09DziJSAsqJ0bG/hoePsTjLGQuKpoKav1EYjcF2KU7Yx\nY/0iw34n2PT/tvNvn7rSHPC0K2m91a3+kEa7Sxj7pKntTgqCAJ0njLMJC4tWp/D7Dh/n2uWLTPp7\nfN8nvh+AG+vXCPyQXqvNm69/g+e+8C/sd2+cR0qBEQLluDqCIEBpgR8EeK7ErNGM6c4t0Z49wsyM\nhVbmnOhCPU7HRaq0Txy36HQ6NY9CXqaEjYjYD2tmtiypkMLQ39tBV44MyPcpxJTBT+G5iCDLU3RV\noCtRy3iFYUCZlDeFmVZHUn9XhpgfZJ4S6IM+zZRG88DfBfusvzf/4/uYOfhHAwfyTAfn4dSjt0pD\n3/2pJyVVHTEEKFSV8O5rX+bNlywjXjrapCzBw0dIC9PNLSzRnm0TeJL+oMnE8cs0W02qqiT2wylC\nyt54zCQZ8uqrXyd2PCOelMzPzRI3IjYcSdn1G1vs9u9NvepPm91tXPs/Ar8KdA+8dhP3iBDirrhH\nXrl8A4ytefWnjQoIPM/HFwbHUkkqYLHb4fhsh0OuIabVaJKkKUIrdgd9ktwVp5clyg8IgrCe+Mrz\nyNLMUpZOWdDynKos8Xyf2HFCS+G5T9h22gdOWna1q1cusbOzTqfdJXTvDZShGUiSNMdUU5Y/6HZn\nyLOkJnlZffBB4qhHq9FjfvUEEzezPv87v4EqDYHy8adirkmOrApSKdBunJtozNkzoBooR24zrRme\n2v5GqVGeRJsK4Q6xuBmRlzmBf0DEt9fFUyXXr60ROiFYqXyEqUAJlC8pnB7keDQikArfD+t60U63\nRV6OyHJ9E91rlmW0ndbnB9n/8D//GkeXunilhbtiVXDs8Apxc4FrA3s9f/CV1xjt9Gl3Zvjclj2o\n1GM/xOAb/4Qf9V7l3/+lv0DSsN2PWo/w8NjZ2OUf/to/AqC/u8ff/Ft/nRMnjvPNb1razpMPnSKO\nYlqtFjs7tjN3NBqxuHjnKetJgZ72B7zPoXSTNOJdbNrilr/dBI4Ywa3YnUF8AEHEd4dJ4dF0V1+l\n63z75S/zxjdfJHGMe0ZrhB/ihTGh0z898cAJZuZ7KKHQ2pA43vE8y7hy7SqTUcLhRdueHnoheTOz\njolTAnr7/DlOnnyU7sw8N9ZtJ+nm7ohWZ+FP9Nq/03Y3HZE/BawbY14TQnzyfd56V1NLNHtg7NY6\nbQnOgQoDpqThNpmiKmhOUkwrpDdrh7ncFqhei63+mHMbE85uO05rpYAJQhhCJ5zqS0WepQixv1Cy\nPKcoCqSUNZG/FApLrWLpX7OJ9apjT7K7vUGS5Cwesux9yIrCSPLSIKZ6e9rg+x4zMx1eeOEPAWjH\nIY89/jEy1SCvoOOSJYUXs7u7S8PTNNw4Q89DeCGGA0GCw+HIhzXmOZzcsphdJ06SpgyHA8QBPuxS\nlzSbMYaKuGG9FuV7VEja84tMH/twMMJIja8khSmpjMUM55fmCVCuScTuSkVeUlUlWu8LCHueR36A\nF/iD7JlHH8GXmo3r9rDtLR1G4wGSuZ7F83/6Uz/K+tXrXL2+xkmn2j7yd1g61qG6UfCVr79APG8P\n2IceXKU10+OFt7/Oc889Z+eC1vz+5z/Pn/25P8uTT9hnmiYJQlcEStJ2TQ6tOKQ5pRa8jSl5EK++\n89T+jvrAd9j09QHce7n73gPyYM7GGHPTISMAaW5OwB/E0W9NzNf0rbXKutn/+/R97j271d1j7aGE\nfNcemF/+w99muHmBRiiII7s2mq02cauJRNN2ea2lxSXiZkgcNInCiG7X+oxFWTB3/jyXz51nZsbO\nmyDz6Q8m5EXKrmMF3dkbcvKhx1g9epwXv2YP8N78UY6sHr3tGP/iP7mG0QYxLXQQ00O04uC5LZB1\nJLRvB+7F9M0HmBVvPaz1gfnlGYU0hkrmTGeU0JJ/+O+t3Hacd+NpfwL4GSHETwIx0BZC/DqwnKj0\n5AAAIABJREFUdi/cI1MbvvtNmkefQAhLvD+9CHuhmtJx+0amQuqStX6C1naTuLg3IdOKvXFBf1Iy\ncVSlg6JEIjHa4E1bziiQSITR9VzDBGjtYSoDLhNtTLU/GYGvfuF3AfBLzaETx8hLTaNlT/5GYxnj\nkqATx6QuFRR5xjvfeplXvvR5wHZVLS8ss7QaE/geTz72tL3Zf+Gvcu3KJfp7WwwHzuMb7DEej0mS\npM56GwxCSAIvromQGo0GXNiqx9np2GRLoSf4niQvsv3KG1MRxQ2KNGPsvJNxKmi0WmjpMR7Z1+JO\nl8l4B7Sh3WmTOSKnPLe8vkEQ1q22URyidYVSfs2ZEARWnb14H+Xog7a6OE9VlZSJ/U4hG1QGhPBr\n8QohDN0HjnB0ZZGTvvWo39nOmDn6OK3NjBs3Npns2PtgjhwiDGNWj53g2FG7ELPxiCeffIo0ndRK\n2+1GRFlmXLlwlmar7cYeUKTjO47VJsedFqVLQb536dmkuqlfP5jovhnqMO/Z+N+7Q4uDX1Ove3Og\n6oI6mpp+vyXzdFCK27BvjQw0xoo314xR+5/X3ITQIIRdDje9Ztxm/seAwSY7F3jxi58BoL+zhfJi\nDq2uELioqRWGGHLSbMSSEw0pq5I8g531DZ555hl63UY9yCh+mMOHZpkUdh2eX79EKgYM+jvohn1t\nebXNQ6dWeebpj2IqF9UHAZ5/++uwqKkBcethdPOzE+j38Mfv35oD7xX7z2hKtuUJjaLC0xrhbnIu\nKiqluPHtb3LtTctjI9l/5u8Z5x3/ZToEY/62MeaoMeYB4N8BvmiM+QvA72K5R+AuuEem1jr+5Af9\n5H27b/ftvv1rZyuPf4Jnf/5Xefbnf5WP/Lm/ccf3/XFqtf4u98A9MrWqLAFLA1ontYxBSBtylO50\nasuKSMLWaEJaWG9T7kkmuSFSFv9tyin7X0VVhfhIjDuhtBJoozEHpJcw1ts4GG7a03D/71N4ZGtt\nnUQXtOcXa88ljiLmFlbwPJ8smWrjBZw5/TYvfuV5pCtJ2tva4vrVK4TtOYJGi55joPuBT/4IUgqS\ndMxkYr2B8bDP+tVLXLxwgTNnreBBs9nkyJFV5uaWrPo5MDs7yxe/+R/V49TGYeJFSmY0fhijHNFN\nq9VCoKgqDxzk4XmK/t4QUWWkI3uN7XaL2VYXoXOUFtPgg8kkZVyW9LoK6Tt4ROfErZDJqKgpXMuy\nRBu4W9nNMk+YTBJiVw/uSUsBK6RPnlivt7+zy9LiIlFDMedyGYdjn0iAbj/EkflV+s5T11lOmWse\nf+IpfuAHrA7b/EyHT/34pzh//izr1y0LZLsRkoyH7Ozu0nVCE1VV4nk+xx596vaDlcpGYYBHic0Z\n3uJdGVBGWAUYOBA5Tr2x6Z+tr67Zhy6EENapu/V3jS3FrIsNlUCKA17XLZ62QBxALd6LwRtzIMHp\nXrdUOtP4Qb8nGWrMflxQX8NN33nvIPvXvvp5kDbCO/X4kxR5ijYl5VRkI88o8xG6yugctnBks9Nh\na2ON02+/xcXrF2g5QQ1jPNbX1knzCdppUZ5ZP8vK8SWOrfZqSJAkYJxdxohnWJi3kWmS5zcl0w+a\n56kadpxevb3s/eduMAgjkbe+9j61n0JKpiKmerjBZOsSpw55LB2y2PpO0WV74lFoj8y30YQRd96a\n75Wa9TngOffnPxL3iJSiDrVq8p0pDieow47KSEKpGXkxA8ex3YwFXmAIfY9+UtB0WctW4HFxN2eC\nxFfTLL+0SaKDxdsu7JQcwAH1zWFIz1VrrJ+/SJRMGFy9zPq6zTy//MorPPbY0zSaHXKnYCIFvP7K\n1+kP9urif13ZBTttGhoZuyE1GhD6MXGzQ3fGJsGiwCeQPoP+hB/5EdvEs7S0RKvdwYsa9QQ7qDgD\n0Gw6AhrfIJWPFzVYczSok2xMs9Eh8iPKwi6UyPdAlwijiV14WBUprbhBnuTkaVZTiUZxTFWWaKgF\ncdMspdNpMR5tEUdNd+8klTb7vNEfYK+98QrJOAHHeBaHAZ12j9ne0yQOLrpy7jSiTGk2GsS+PVya\njRjlabzeHH5Lkl629fDXb1ylMXOY3VHOww9bRaIf/7FP0un2mJubZ/3qZQD2Nq/TacZIUzEZ2Gah\nuBGTJ3fW4RPG1HNRuOqNW69SCchHQ4Sbc0EcUxmDEXKfZ5r9TVPeKbA9iBK6v9aQi5EYc5AD3j6j\ng1DK9BC1c/pmjFpK4xwVVf++kramqsRuUlMYSCqNcHXhwsmNIcR7NnKLodzhxt3BdrbWOXrEVlzt\n9ndp+YLJ9hqFy4902h2Wel18EVMVdm1duX4NjMbrddgWE86eOw3AhYtr7K1vEfk+fsPhz37K8uo8\n8/1tEgddJuMRl996DlUqhn37O93eDHl6e059T3lOes0diK5C6CZYyBhAuj3Fvc8cTICImrVRCpAm\npxjvkG5Yh0zvXiNIN2h3ZjjZtms6LVPe3b5G6nXZLe3+U/mzd7yX/wq6IoRL/L03IYI2tdeSVppy\ntIUR3VokdKkTECvJsfl5Tiw2aEZuEmp4/uwaXzqzxU5uP6+cV1OW5mZPxBgOtvPfKvgauHIh5XmU\nRY7xFGvXLVx/7sIVXnzxJbtJOoL9hdkeFCmehOHATpa5dosgDBBSUukKndtF4fsB3d4MutKkDis+\n/e7bvPClL3Lx4vlarmxrdxuDwIuaeA7TLm/BjaddiXGrhReEFFrUVR2msgyKnvEJXDTS7IQo4ZFk\nBYtOPTzVhrIq8HyfNEuJXeLPQyOFoCwL+q48Kk1TfD+w1SRugnq+RBlJoe+Mvx20y1cv4CFpONL/\nbFxZUVph8PypMAMkyRioML797W5kMMLHhDEq8Fg9dgyARqcDUZts8wYf/rDtEG13elR5ycryEung\nuB2n0YTKbp65O1h9T9VSbLcznxytp9zXCqiQZt+XllLS31znC7/z27Rbdn4+9MjDxDNdmgsLNFqz\nVG6eGWGLB+3mfxD3dj7cwT1BCCq5/z5pDOYA61/tTU8/dBMePfW696tQNArheYQGTGCfezlzAr30\nKO35IxyejRhv2M1w7VtfpRpuUakU4zZOTIE2tiNx2sVYL5n93qMPtMBU7Fw858YJjW6LldkWHcdd\nvbCwQByHpFnCxratKHnjjXXiVpMrg3XGyZDxmm2KubGzhaciRjtD5JYdTeBVfGXvTZqhpte18zgO\nFa21Nb796m8xGdu5dPjwEts7JX/xL/3Ke8boeb6Lrm7BsA/kGGw3p7ARvMO+pVYYoTAYpNx/az7c\nZLx2GkbXaSs3l+ebCL1EECtiVxEW+ZK5oCRsJlxbewOAVHbueC//xDftoiqQuETPTTCF21CdM1J5\n4DPiI72Qp5/9CACLHQ9tJIFUrC74SLdZlKXCe3iJQVLxe+esJ2VMhag0nlCYqUq5EzKlKmt1Crs8\n9seRFy4kVoo0mUDg4XsOHgk9RpOMsijQrktysLdFlY7p9nrk7gRIs4zRaISnPEZpRsepz+hCs7W2\nzng85N3Tluz8m9/4GufPv8t4NOLCJTupfd9DG4FU+0onB4nw7fW50kalMMYKIU/fGwUhFJoqnyB8\nO7GWuitcuH6d+V6HmRkLEQySiklSUJQFXuDX+e9KW2rRJEnqUkPf99GVxPNkLQqgpEdZ5geAgPe3\nZ5963EIKTilFGEMYNhFS0521i/fkww/h+T6+UrWSTxQ3ML6HkB6+kQiXTBTNWbaHkscf6rEwZzfO\nJC/JEk2ro3jw5El7PZMKJQqMqKimbdZGo8s77zpSD/CEHZNNQera4wRQwmdva43XX/oSJrUL8sLr\nq3QOL3H8yaf4vh/4FEI49XGhEcYgb4IVDqYQp/81GCEwQlI5RZX169dZWlwF9pNw9TfUm8kBzxqJ\nwN/vPhQCJUPKxhHS1U/Y9yw9StieIWoIwoWQlQ9/LwCLS6u8+4XfZDLcoHKzwZQaqurAMUAdRd6L\nzYSSGQdvrCwfotlqMT8/uw9VGoMXKIzw6ij23XfPgvK4tnuJhw43eGbFck0fWWhw5vqQzes74M4W\nz2g2NidoJIK+uxcFUlZ4yq8dmvidC3iqedsxKs8WM9za5irqg9DefqUlBlVDGEJ4lErhmYxquMVk\n6zwARf8qLZnT6yrCcDqXPPJUE4QhwXRteTFR1CTwFF3H/d97Hwbp7/6q/ft23+7bffvXyP7EPW1T\naetNyP3TzBhji+tttTEAyotQ7eOIhiQbu7pLr0m7EXFmc8A33tljvG0VqxuHTiArQTEpaLkWtVQL\njPBcdZOFFipXa6rLosaKPXVzxaXw7BG33e+T7GYcOX60bs6xJPi2hlo7fcsyr2jGEYPhkOHYYqSx\nlLz8yitc3OjT7s7QbNiTPRA+p0+/w+7eJhcvngFgd2+bylTuhLdjqKoKo20t9tSjmWpSTm0wsA0E\nqtNASBvSTZOWxWTC/OwMyivxK6dSPhiSDBOahGxet2Hm3qRAhhF+FKCNpnJq7kmWEkhFq9Wi6Rod\nBoMhgR8zGaf0+xZrLssCPwgo87uLkx89cRLlqZuuSRsJKBqObEvOSzzfJ/A8pKsD1sJGYBKDMiW4\nKKeQDcrtPs1mm9CbYpsR2zsZ6SCj53B/LRKESUGCdk1RUkikvJnP5aBdv/IWy6sftZ832vXWHkg+\nVSVVmdENBdKVno43rrI9uMHm3iax1+GpDzvPNjQYKsQty03WGPa+t2m0QHmSq5esYtFLz/0eH/vY\n98PjP3HTZ/edP8G+72Wo8PBVgO887SJsk80+Qho8iihthLKoC1bVHu1yjL44oLnrPL70OjdiQT6q\nMDpz1166od2Mld8pmXcn+9iHHmJ1xdZka+UzHAxpNJp1xKsrjfQkoiiYTGzkcvHCNbpzSygv4Nkn\nuzyxaCPEP3h5l3anQ3s+pRi5uaQhNBNbtle6haRTckoy3aDhSmRPPbHKow/dvoLN8yXmILHUFIq6\nFRDARkPSPTdPF+hki+HmJfLtKzSFzSPNtj3ioIUS+zCsLRMVeL5XdyArt69IIQlcl3j8Pgpbd9vG\nfhHoY8s6C2PMx/6ohFEK4cRk9xn4bmoKmIav2ufKxOedfsFb21cA6M620ZVhr59QXH0Lb/ciAP/2\nL55g89p1Huw2kZGdmF+9tIsy0A082qHrKgwChFJkeUHiBDT7acVmtn8bllePA1DEIWVWkOWGPacR\nWRiBH0eISlOldlKX0seoEC/08TKneWkkb545w/bLr9GIWwSOUMkYQZJMHEboDiflAwrk/qKQyrNZ\nrpsWys0QxBQuyYscqbDt5k5xo9vpUqQZkVSY1G7aa5ev0Ostk4726Pfthj8qDJ0lj1Jq8rLEm7bW\nhwHpYEyn02HidBJ930MpSRj6dd28FBAEPtVdtlmf/fa7BHFAyzWIzC/MI2VIFDbwplOxBFfD4CqN\nsIk+bdnaFKDC/eqTbjMiUBrj3nt1c8y7VxNWD8d0WlMVkBwqW1s77TAVpnpPPuOgnTv9BiuHn3bX\n6TNtW9dqKoeXcfpbLyOLCYsO0764cQNEE90f8MXP/FOavj3wHnvmSUphE4bTdEqlDZWp8KTVmbS/\nI1EYymzAu6+9CMBbrz7PqH+Nv/pv2U37YPLe7vgCjLsmBMIRnGVtm9SenPop5ANPMb/xDvHbzwPQ\nu5AyL5ZZWVyi3TE0U4tpf+mdN9hev0qRTTCO6gCHZUsOtI8Yc8ts/GCbn23TcXXWSVaRBz5hFNa9\nAUhN4QSHcX0AggATeiR5wTNPHuKHH7dw1z/63d9hIGZotGaZVHYdC11Q0aaS3oFNW+OZCk+LOu9w\n8viDHFu9fdOK7wtbPXJL3uDgtRpjQEqEVJjUrqPhjXOM1y8SkXMolkin5er5PtpMQbC6hM1h37KG\nbHQFRmt0UVp9VLDJnTvY3XraGvikMeagquiUMOq/E0L8l1jCqA/kHlHGoDEEyqN0G1dWlnXmewpq\nCyoyLdhONYHLzrfTMVUJrXSL1Awoph2AuzdYu/IupSn5vh/+cQDm44jFls/qXJvYt6szCgM8z6PS\nmtIJdV5Y2+N/+8rFenyHV1YBaPVmSdYTdnb7jCdugy5LkMLxcrhNE8PuYEAQ+HUWP8lyRllKVmSU\nZYViiqnb8h8p9ssdtbFUqgDVTR1m+wcavLevwXd4WFEYYl8RBR7KdZKZQjMcj9DKo+ta1idJyu6V\n63i6IHJUt40opDe/wPr2uvX2HJ4vBHi+YjIZ16yLcRQxGvbxlKw19PJck2U5YXDnzsKD9hu/+ds8\n/MgpnnnWbobNRkSzoSjTFOO8jtAlCKVSNy8Wqfj/2nuzGEuT687vF/Gtd8+8uVXWvvdSvbK7SUkk\nRUrCUIRm0WLZECxrMBLGL5YtG7Yxo3kwBgLmwQPDMObBBgyPPdbIMgSNZsQRTC2UuJPNXtlrdVXX\nvmRlVq733rzrt0SEHyLul1nVVd3VZDeb1XP/QHVlf5WZN7bvRMQ5//M/URDRWl2je9PGLWp7j7C9\ntcZffP2v6QztOG2aPZSmDrB3zyN4buzyNEGrFGMMKnMBOpVjVM7dwj2djTWUeyn90jxagxApxr2Q\nWxtrXHz9RWqhT8PNxebGOnmnTXOgmZ4VvP2SrfF36cxrVKemefypTxCU7IajXcQq04pkaA3XsNuj\n197k+tXTvPWSNbB62GXtxpWdcdjtSzYAmnEuvRA+AkMWNBlO2WJTw+Ao+5Tmp2Y7zC3aG9b25gq9\nC6/RX4ppNAO6kf35K9euo4cjm503Phkae47Xt/vk36dPOwqjgkLpCYXUyt543XukjLaSC0bhufch\nCkMS1cPIhDjIQNv5CAMBucb3Ijxhx06KHKUlAj2uDohWNoZhfMFQu1KA6YBE3zmpKgh8Z7RdF50j\nXxqz8wJ6HmbUp790lf7aVfss7TFrckxvm961LXLftn/h5DF7o0Pu/LywRAzf8/BdVnSmNEEYEPg+\nDMbSHHe/ydyr0d59BxvjBxKMCgMfIRWNUsQgt50bbncZs/OK7/OkZVAYzcG6fSkeXphiq9Wm0x2Q\nacXatr2mf+Ob3+SRp3+SKPKZrlojdWBhjrlqwFQ5QroobzkOkZ4kTTPaPXuCfPv6ckExAgpWyHR9\nmnw0BAMDl1UY+h7D0QidZfhjaqGwtKrRaIAc83iFIHUnCGMM2i1WIwRofUuuk3GbWEGFvO3fCs3w\n28ZRi3G1bskwSZlrVKjW7Mnuxo1VVCBQYUju3A5hqcHWmfPIPGNhnMrdrKJ8CMtlsiSleFPRVKol\nut0uvquSnuUJKksQyiuK22ZpRq5SAv/ejPb3z5ym0qzxhLHc6N52C3KNJ0aUXZs8z0flGbnJEG4T\nywysdtqsbbQYdIdUXZ/mZYk//IN/zbPffRZVtYySqWOf4cnyLMOtG2ROx3ywuU6a9dE6QTlXjkpT\njM6Yf+yTd2zr1uYSly+9DsADpz6LkCUCIYuN4PqVK7TbbQ4uzkLfZbIa6zYZ9jtMN6dJOpZ19OaL\nLxCGktaF14idu6lULYE2tNc3GXatEVm6do1etwuhQeV2fUqhyeWO+2n37dSuF8k45ogx5EGdYe1x\ntNuOTvSf5ej5y0TDS2h3bZ+u+TSoE9Pnkw/OcWbbtV9nBAJSI3Y5XBxT9nae9vvMjpRS7uhcKxvY\nVVlaiJdpbW9CWmsSd2tSeYoYKvAh8Wp0U2vkRoMEr2So1CO0e3el8gm0wEhl6a1AJQqRQUS7PyB0\nhrTfG9FzeQq3IwjkLSmiZux+8iS4wPBg4zpby1cQ25tUnBa4Xw5Jbm4yunmDhoYkHFNFx7/DIN1O\noNk5pI3dLhLLVotKZTx3C1bvkmV8r4FIA/y1EOJFIcQ/dM9uEYwC7kkwaoIJJphggh8c93rS/rQx\nZkUIMQd8RQjxNu88/N31vrQ7jT1bvcDMiUfZ6rQYpGO3gPMTjSl5gNQKpXM+sX+Knz5hT0w6yen4\ndgcedDtUHcfz8aee5umf+AzVckSajK9LOH8EBbUmyzKWrizxrZde46UVy6k+01Z00h0K0MULZwEo\nxSFT9RpJliHtbZy5mSZpmjIcDEjdTpimGb7v4XmSLHPByVwVQU+DKeRNnYjDLaI+RuvCrXI7bklo\nuO06Kp3vS2uNVhn93sDqjwC51uB55MLQz+x4zE7PEUcVjBzuBHsDQ5L0yNIhRuX448CcMaTDEXEY\n4otxYokhT1LQXnFq8D0fpMfI3UTeC0NhyCQ0mjYo1KxXCL3Q9sYlePS2O4xGIwSWWgWQCZ8//+o3\n+Oq3vkcQVnnyQavEGEbP8frrbzC//xDxoZ+07WycYOPGBZ776sv4j1m/bne9TWWqTr0W4o0z01QO\n6u6nmXTYYvmGlY8/8cAT9HtD8s4a0qke9jZWSdKERCtaG/ZE3Rn0KJcr+L5AmBTlAuhzlQBPJ7Qu\nvlFk0uZZgjFQqlRpukITevMS+SDlxIOniEN7BuoNB1xd33pH+4QQRXB6rFMh/Qb+7KMEtYij8WsA\nHBPrZMMW3VGfxMVW5isVDi402D9bYv/xvfz7L79g5wcPE0pMrtDjW5fW75DieL/+bIBut8vAubBG\no5RslBF47SIOk+QZ2ghyoxkldl7ydMRcGHBz2/BHf3Ke56dsbEuEFRYOzTA7f5TLb9v3tbW+Sd4V\neH6AqNrb4eLJkzx44kGe/fq3WV+1pIUrV5bode8cegsDGxsa989zN+PtzgZb16zfX21eJ5aGOArw\njD35p0qh8pzqVB253UOObRj2vbmF6IBLvxHilpGUUvLmG2/y3W9/5z3H8p6MtjFmxf29LoT4EvBJ\nYPX9CEb93u/9nm3c7GE21jqkiII/HbpWmCKXCDxhOL5Q4dc/d4pO316BWp0205HPjV6Hxx55mE99\n5mcBmG5OU/IDIpMxXbc+wzj0CWXO5sY6p8/aSPy3v/cc3/32d2n5UzR/6u8AMMgD9K404e+//F0A\nAt/Q77XxnaQnwP7FRTpbbVrKVnwBaLXbSAm50i4pBDzCW4xscZN0maC3JBXtGqfdQaY7lUnbDeF8\nXkHJw/PHgTJrdKemmqxvbFGulQnd76zUSjSnmvTba+TuStnb3mRqYY520iPy/IJxoHNNvz9in0v2\nAdhYXyf0Q6KgzGhkF70wGSrXyODuQZPdiGoxs4szRdaqL32MkAhjGDuN+oMOSX/AqDfgxpoNoWR+\nwIsvPMe1i+fZGAx5621rkALhsbDvEIsLh1gd2fFpzBjOvv0SHdnliMs6/f5L32dr1GahWePU8aMA\nPPHYwxh194zIdNjl2uXTAFw6f4bIm+PCC9+g5ni0MsvJ8wHPv/4Kc1XLahgaher1mJ2fQ2Up/Z7d\n7WemplGpgFTD0G7sZWnw45DFw3vwcrtubsQZ20mGTlNqTqRs/+wMzdrUXdspRUjgufVbrVKtlXly\n5gbTWP/1EJ9w9jiNUg0/tJvDiUOH+cQnHmFh7wwvPPc8r1yz1WP8sIbOhwRC7rCVtAGt0Y7hBXYt\nSmMgu/fsGq01xn1/miSgDEZQuA4lBulJVG7fJbCupoVGGaMMzz9/Bf3ESQAWTi7QrXk8/blH2HvE\n/vzy1YD+1oDeKKPnjOENscLqlQ0G1Q20tBtBJ+3jRXfW0w4D4ypr2c8ftNusXb9Ktr6EcL73Ulyy\nDCajyV0ke319lWjYp1yPGK0PkY4BYilgtndjl4vN0tZkWVqQCYRLl3/6mac5cPBAMV7/5o/++I7t\nvBdp1jIgjTE9IUQF+ALwe8CfYQWj/jnvIRi1G6myzAnfF0X6r1GQIwmFwLiMtYVqyC9/8ij7p0IG\nzne9MFVjOvKYrfwkDz3wEHXns0zThMhTSJOxtWb1Jq5eucgLL32fF7//GhcuWrJ7t7eNwmP6U7/E\nUFnjLvKUwNvxEi1ft8GFo0cPEZViRmlO6nS7A18iUHhC0HXsEyM9orhE3u9iXIZdqnPHTHD92xnL\nO6qw3Qm3G+nbKX9jBkfgFkieK+JonIAhLTODnDC2BlXpFF8aZqanaPXt/tpuDag26kilqFZrqNQu\nbGGgEpTot7s7Ot65lb3tdtqkLpkkS1KUkXjevV3YZqYbzM1NY1JrLI0GfN+e3MdB88An9ARhqcS3\nrlnj/PLZt7l65TKBStF5n9WOnY/p0hSbrTbm2jLRPqvjEMohZ99+DX//PobC+nWn9x/jK3/6B5CN\nOHvWJjAdOHyMhfm7Z51JY2hv3QTg5vINPvvUwzz0+U9z8S3bpt6NDXypaJPTcOykxWOHuH7mIsko\nJ2hGBI7lYnxBmnuIsEyCo2qqjNgLqYYRHnY85qYarHc32Wi3EG5DMck2izM7+s87+iISz5P4QqCc\nvGm8+Dg//emf4tH4BunAbniiMUd1dpHa7F7KNfu+LE43qdRKvPDmG/zBl1+m52TyS806WqWw63Q4\nPtFLT+K5NRj4Npi++r2vvteUF5hu1EHbeYsCn9EoIw7jgvKntHJU4AThfMVKZ3gy49H9AVdXcra2\nrgDQ2tJUjjxIc7rMiRmbGj86nuHlGctbOX/6V9YGHDzyCLIkMfuPUPbsOj575hWOHbtzG6PII+m2\n2byxBED75jJmOCBAW/0QQKuUUZIwHCQMe+N0+Rbznoc2AVprvIJEoMDsaETahxKtclaWl+ketcqU\nU3W76cuxt4FdGa93wL28bQvAnwqriOIDf2iM+YoQ4iV+AMEo24UcYfyifmGjHJJgdZo9x2DYX5U8\nsDjNcJQWC7gSVzh05BDy6D6iMEKlLiK8cZOXL1zg9OnTvPKafakuXrpEt7ttNTTGteUMxDML1Ob2\nFRQxrXMMOyfF3KW7on1K5SnW1peoluxJu9trEYSC0WiEC/hTKtfpdFqYPKNccpzmoULnO1egsZND\nwJ2DjYCU3i1BpmK87iACBJAZ2/fhUCFlgFYwdPzWuF5ncd8ekmGHgau6U40j4hi6m9sF+0gon85m\nl3SQs513Kbmgoy8DBr0+nVG7yJ6MZES7tcXmVotyxT0LQkbZO9N+74ZyFJMNR4wp+lJx2HPgAAAg\nAElEQVRbrWukQLkrZbvXRYyG7GnOM7/HvpCv/7svEYmAvXsOsHXlUjEW1VKMSYfMT5WpzNixf/Hb\nf0O3vcFyxeOP//JPAPj8p57m2OIiVy5f5NqyLUB8+uwZ9uz5ybu2VaUeiQv2eoFProeEcUi97LTd\nq4Yjc2XiUomgZl++x59YRI8k6WiELyXGuaY22musbGxRLleJxpoeyZA4C+lsrSOygRvPEmmaMUj7\n4Nv+tFob9Nzm4WbNjp0wCOGRRnNEs1Z35fGHjrGn2mI9zSg3rSGXXonrN1scjOrMuupHg7TDsD1A\nG8Gv/PLfodlsuu/1bBq23HG9eJ6H5/uEYUjoAnyBJ5BS8PijD73nnI+xsrZJo+IobkKgBWihCw33\nXFlRLg9TVJ5RwufazQF9P6MW1xC+HacwFnSyDV46/RZmzbo91q5ex5QlWd2jl9i1pJY3MWVBbgyx\nmzdEyLmVa3dsYxQJ1s5eo3P1MmAr3/iexzBJxl4ckmGf1eVl0kwV2Zyx0HieT9ZPyAwQ2g3CBlcB\nzxSiasKJS1UqMdK5q8Ylva3bclzI++5j+Z5G2xhzGXjiDs9/IMGoCSaYYIIJfnD8yDMiIy8AD07u\nnefYor32HWrGtHt9Or0+YW6vULWsRTpSJElOzQVqylEZoaFSiWm11vj61y2X9dlnn+fM2YtsbLZI\nXdVlpbWjsJni+u6FZYKZg4iwjHTlvoTnF4kusCPbut1u45diyqUYd8AgGSVUyxVGoyHGBUsyk2Ly\nFGModC2sT04gbtdX4c6aDcYYPCmLz76TkNHtGWhxbZyMogkDHyVsGTSArVYLEQjKsaTj1PMW52c4\ncXIvb768xaDrskYzQ5b3iDyPbq9PHjofmxH0BwN7XXPBwCAIyNIMgSgSVMIA0vyWy9+7otftsr62\nTu7cML1ul++9+ipeFJC4UmeDXocnH3yI3GiaTXuiR0i6gyFzVUPoRcSOHjhdqzEa9Ejba7SH37d9\nv34FoTO22qusrNu2d7cPEWHQeU7P3c5urK68q2CUIWTQd5TUUZe1jav4cUTsEnaefOgoKzdOs/76\nNQ4ctzeCQ4uzeI8d5aVnn6fb2cB3mbBquE1rdZkN/EJbIvYNlXJIu7/NsOtkenPopyn5YJsc+27E\ncYn+5o2iXePs3DyeQZQWibyYkrburpU3v8zVFzOU5/P4Ezbr78nHH6MhJSrpcvO6dQ1Vq2UaUw2e\neuQgoJDuGOn7nvv9phibLEsxRqPyPpubVkWylw6Lyk33ii//5deYrtkTaL05TbUSMVWvErss3iAI\niPwQtEDl45tpSHs4YC1S+IMeM1h3U5D1efwhzbBzlTNX7fpevdLBNGKiMGB6X+TGs8doe0iS9+il\nNg6TNwKuju487xVPEasRgVsjOgjIDIyGPSJ3Ks7TDIlhulGm7OY39iEfDUmShGBxjsBVctLaIDHo\nXTphQlrXUxyEReBfZhki33ETwTvdobvxIzfan3vsBFNlw7G5OhW3MBp+TuZ7DCsBuStOmwykFf4V\nhrLjPQbS0NtYpre8zVeff4X/50++DMDG2jpag0YW/GVpMgy27lzofL1hGODP7wM/tmRRQJPc4npI\nMutO2NpYZm5hD/v2zhO7TMGtzQ021jfRKqPsAhuh9Jjfu4ebGx1azve+Y7RvdW2I24KQYxhjUFoX\nEyWEsAkmuybudvfI9IztU2u5ZwNCCsaZ3GmaknS3KXkVy/gA+v0ejWpAXAoR23ZR5kmO9HMqjTLr\nK10aVevjHfaHZGlOEIV0+9aglCtlcgRaSMaic6HwyXs9yO6NOTrKUjq9Ll3HoFhaWuG1N98gKMcM\nXOamMIoThw+TqZyqM3B798zyyqunWTIeuTE0Hdd5bnqaVj5ke+0aKz17pU26I3xyyl5A6NwOl06/\nxtb6TXI0PSdGNEiSwiVzJ5w4dZJW2/78sLPKm69v8MLaGoFjyvz3v/Nf8Mv1ClMz36S/YX2olbXz\nnKyOuBjD0rWreOPs2tyQGElvu8uwP3btBEgvpjvI2WpbY9hPM9r9EWEOF69av+qBmQbBrkDvOIEq\nKtfAy/DSmwV7Z8UIhFdiYe8ePvnYwwCcOnnAulGyjO2ONVx7ZprEJUHSuUmSDMmdfIFSyh40oKjH\n2O/3CAKfwWDAxua6a4Vh4BQt7xWrKyuooZ23pRvLIK1PvtGwa65SqTDVmKJSrxRsqsg3HF2c4jM/\nM8fN66u0NuxaznOfT9Q163GfrePu5/fmDJOcLj3GFOeuzsiNwUdiHLMqExoR3VmNqex7hEaRdW0A\nWQUBMoqQgp1qTb7PzOwsEk009vOlGZ4f4IchndYmoVvLlamdmMnO+2vdpUrpQn+fPEW6998fu0ze\nJe71Izfa/8kzRwgjw9WVdZ79pj0pn5ovIYKQVBguvv0mAMdPnESS075xkX7LLrabK2ucv3iR6xub\n5OU9NPcdAcB4ESrNySUkzo+YD7qUAoE0itHAbgQqnqU0PY9RGbkz2gZ1ywDp1AaKUBJhcnxfs2fR\n+gfnZxf4i4t/zt7FvTh7wmCU0s8UuTZF9e6xhjHwjsDjOH1/55n1eOtdRtv+DhvBv5OfGyi0Nipx\nBZlq0KpgcdTKdYIwIPIks1PWX1mOywxGI/qDIb7TV/EzKJdDZuYatLe2MOykjadK27TxouRWZk8e\nUqDleOwEni/Q+d1PrLuRCUMvHbHRsUGyM2fPsry+yszCfGG0N9dXuXjtCpUgYo97oX/1F7/I0soN\nVJLjBTuZpypJ7DyLnJLzjap+C6lzmqJM2SUqdNKEoVIMdMrQrY8gvLOm8hgze5rML7gAoBZsd1qs\nb6/QvWHbfm1ljb2ze/nC536O66+9DMDW8mvIuSkWZ6e5cPHMWDacHEFvOET4ktQduTrDhOHqBp6Q\ndBO7vv1yhCgFtLa7RUm4ZNhn71y1aNc4AG26SzajEoN0B5UgiDGeIvKtxjxAs1Gn1pjhxo1lusat\n7VyRj8Y1VSUBdiyMZymp/f6AxFU+LwUxWZoy6o1Q46KuEsrxnZXy7oY9M9OcetAyd9rbHUa54O0L\nV7l82TK7wjAkLIWUp8rUqjYwemBvgyoJ6fUhv/Wrn+D//kNLh1u+mTIVZazJhC2nNT4QdmzyNKaS\nuhJmRpKqHKkqlJyp8/NtVHJniqpur9JrrRUyyOkwwa8Zu7El41iXlVeIUWinVS+RbGvDMBlhVM6+\n/TZN3qowjjOadzIitVL0h6MdmQaTodymMlbqfDcVxXvVHmkA/xJ4BOsp/y3gHD+A9sjQ+Gz1R5xd\n6fLdNy0PdqmsmamWaAQ5dVfZu1RrsLSywfmrm7z8qr36nl9apjvS4Ef87JMP8wsP2UUQSytHemNt\njaU1e2rZ7g05d/pN3n752SIQGS6eQHsRarAF4xN5EN5iVI8dsEZuanqBoFxnpFLWHQ/30L5jHNh3\nkLnZKXLH8V0+fYaNdpdUU0SYxW3VcN4ZWBxXxMB+za0nc7BG2/O8d0iyjjHadptTpil5vt293b+F\nYUi9XgOhmHa7fej5DLodtNHFbu4HNvV3u5MhZcTcvDVSvh+yvPUKQRjjud0pFTmVepVKpUzqTrCD\n7oAojhgN7k08qN3vce3mMpeX7Slyo7fN0upN/HLMsRNWV2JjY80GvyTEge3700+e4DOffYqlay1W\ntrp0WvYkFI0GqHRE7ikc4YBmPSZNUyKlid0Vfmt7jW7Jp5Nkxem6Uq28O4tH5BhcVqtQlKqChX3z\nlKS94WRa0eu0ECbgmb/1qwCcP71AkiWEL16lVC0VhRDanTa5zl2KnFsLxuBntgpQada5XD71GHPN\nWb7xlee5ed2eam9sGXqjHWrieC0nyhRsgzFlVeshMjdsbPq8es6yoJp75qhv97lw/kKhv6G1odVq\n4Xng+1GRX5CmCZ70aLc7tFwR3izL6fX6aK1pO5GyYZJY2tv7wFSjTmPayZP6iiRVPPLgCb7dsif2\nJFeYUYK37THsWz3th48uMNuosbSSko8yfv7zdo186ctnubkOq72EdNuOnekqTFUSRuC7W7ARNSId\nY6QhDR2dNQV/dOeT9utf/RLXr2/gBbadaZZgkpT21hZ5On7fcsgV05GPPyawex4mCMm0ohzHhC4Q\naYwuqhPdvtaaM80d9pwRBFF4C3tEvYsg171mRP4L4M+NMQ8BjwNn2dEeeQD4GlZ7ZIIJJphggg8R\n98LTrgOfNcb8AwBjTA50hBA/kPbIc8stklHCymoX569na9Dl8s019taq/Mov2Vp/Dz/6OGGpxszi\nAeYftJSmn0lz5psNpko+jVKZyJXgqsQxgZT0koQtR3tbaY/41twsQ21YdpUwjGcYbC2jBJTG9eak\nd8suuHePTcgIKlNcXd5gs7tdVL1YP7jFnn2LrK/f5NIVm5114+Y6CFe5Qt92FboNxliNEYNhnCZp\nD97CKf/tVnzYEa6x33jr7+pvuetUGFKKSyij2XbXPj/wXaZkylbXno6mpupIIWg2p0ldJmq6Db1R\nzraXUCqXabtSXMoYvFKILAUku8qT+DrH5HlRD69aLdHaHL2zcXeBUYYwiIpATTdPGRlNa2sb6arE\nLEzNEitFyUtYatvAmapq5uYkL7+0zTCXRK7K+ijpYVDoXLDVtb5qv1JjfnGOra1t1odOajbVSKkY\n5ppSaNdMvVxBqLtfQaXOSR3VNIgkg36P3Gi82F7Hv/Rn/44njy6wttZh/iG7ZkvTC7z07Ne4trFB\nuVYhcdSzSjkmJ2dmYQbprr+eHxB6Hvv27WH/Ket+m12sEwmfdrvLX61Z12GmPbrJLt2PXWpxthjF\nbbJAxiBD+Np3XgXgu88+h0m30XlK07mbPM9nOBwgpQApC6GyPM9BW/782KedZykqz1BKodz6znP9\njkpK74VDhw4UgfbZ2TmGwyH1WsBUw76Ha6021UqFRx96EH8sqjVcZ9++Ji++fJmLZ4c8csqpQ8Yl\nLp0L0XN1PnfgEQBeufIKV9evcPLxUzRie2Ncu7zKdtrBm42o+PZzRBKyLz50xzaeffNNvGga4bJm\nU6MROqdWqaBdm4zW+MLgGU3oXGxSSuaCkNmZaYLAKwTVCogx1RkkAs+TxJFfzOUoKBGUy3Y+ipyR\nH04w6giwIYT4V9hT9kvAf8Nt2iNCiHvSHmlttchzECojFE4tTkbsaRr2H3+Co49bDePaVA0pJfWq\nYGHGGu1QgCy0jUXB1kAlpLlGCo+ySzZZaPh86umniapT/H9fs0kA15avovSQPIiRTmHLJ7xFV9kr\n2eyzgZJoT+KLkJJLnuj2O/SzAZeuXGZryxpDmxVl5Rd33CDyVrnZMYTNuPKlKIrAGq3RjmmS7VI8\nkwIk/q5isbcamIbLwktMymA4JAiDQvtaerb0USmMmKvbFzUuRWxttfA8Qblsx31/vcbZK1eJyzFZ\n0mPooubKYNUMhSnWkBYabZT9f9cl6XlEcVD4X98LAgVZSuDYI+VcMxXH9Ec9NlrrRTsHwy79UYdz\nm9Zoy5FGeyOyvM/2Zg/htJ5rUWjLvPVHJM6BXKuUOHRgkWRuntffsCnOfq3O4t452m+fL7TNm/U6\nvAt7pDfo7gRHPej1h2B8lHPZ/OVff52VM3tZ6w3Rp207c3KSpEPYLJHebDHo2d8/NDlzB5v83V/7\nAiJ2PGuvRNrN2TM7zdBzboesR7lU5sRDx/juN1+089sNkPFuQa6d9WDAqWKODwuglcGTMQR23lsD\nwbDTBqVoOSaRGiUMh0MEAumC3gBZnmOUIs3SQi42DDx83ybWRK4dWS5B+MCd1fLuhMW9C9xwSStJ\nIqmUyqANM9N2PtrbHbQy5GnC8WOW976x3GV1bQkRClY3FY+6OM5MI6STHyYVs1QH9pnfz0i2NP1K\ngCxZQsD21lU6rU1OVg4ROufh8vlroO5Mgm7WZ1ltJ0g51nH3SHODzg3Ok0E5CvE9ULliNI7lmBw/\nV3i+RKnUpr/jJITFbYc4Zxd6gwGpc9VlWU6q9S3xr3fTer8Xo+0DnwB+2xjzkhDif8GeqO9Ze2Q3\nFhsVMqXIxBRRxRrIawmEjVk++9NP0azZHTHLrZHoaQid3kPNbWC+ES5Da+zcl6DVLUUDMDBVr/HA\nsSO89balZN24cZVcKzzp7dSJNNwifN7q2VP11ZV1lBCkw5yRy6Rp90eIwCfJdrSYfd9HKyu1uqMx\nspPuaw23+15P2grx+AhXJ9EojeeCE7kaBx0FwkgrtTn2m4lbDczY152olJKwcqbKFUBOVEa9XKFR\nrxG5yTdZTp4roigqigR3s4RMDxFhTr1cJnUStIPtHvVamSAO8SLb+FSl9Hod9s3voTewJ/J0NCpO\nG/cCvySIpyK2Hf0KL8ev+wxQrCr7O4VIWVbbzOoB57etUVi5tIJMahx9aB/ZGxdZcdKsOYJm1Z5i\np5y/9ODiHsrC8NmffIaqC7h+57nnKUf7KccxC7M2wWRxfg7vXdgjfuBjnK9eK5uBGMSyKDRx4pGT\nHG3uQ26v0ZZ2fSzMzFKeOUI2GNFa7tLdcu3Uhk6nR3fUx3PDlabbCBWw2snJQ3tLEB60+kOUbyg7\nmmtnrc+tir3jk5il5t1+HTPCWDnp2L5H5bhhy2FlQ0K35rRKYbCN9ENL8XPBWa0yjNIk3TYCu7HK\nwCeIIsK4yvhEL5ORXX+tjbuO3+04cGBfwcA4d+4cbd3G8yS1itNw9yXb3QFvnbtEyR0qZqfKZJli\nYbZCmiuq1f0APPRwl1425PLWOi232f/UE4rPNvbx9b/5PivO9/7Fv3eKqXgfFWmoT9tT+qUpw/Wr\n79RyATj96mlMWKO2YM1iGEfk2kr4jgU8jS8xno8Rpggyau0SbbR7l8dyysXs7D64CbTRdLpdUnfQ\n0DplkCTMCFHcxN5NRfFejPYScN0Y85L7/3+LNdrvS3tkjMHlN3joqWdo+5pBw1WTmJ7m2FOPs2/f\nwUKIyfPchcLYwAmAMR6+J/GQjkGwi1lR6NuMB8IQ+R71cszxg3bnvnjpEktb2xg/tsL2jNN0dwZo\nyRXxXbq5bms+allUZilXKvi5RmWmcIXIQGK0Y4W43yHY0W7QesdoC2wgSim1I8YvJSES4+1sONa1\nodFpsiM+4926J+aOReB7HqEfkmbpjttD5QRC4k9PoVyQyfM9oihGyIxK1Rrt9maXA4fnkJ7lDI/F\nukZrA6r1BpHvFQJJceSRRylhFBJr+/PJqItSqghsvhdm9s6yFSQ8v24rU+clUEemkSrnem4DUmEg\nEFmbzYunOX/DGoVLF1aZ9kd87pmfZu/8fv74T/7C/rz00MAzn3iMIwftlXdhfg6GfY4vzFJ+5kkA\nnnv2WS5duAjGsDhnr86z01NFuvGdkCdJIQHr+z4jnaMyhXRZvNOz03SH2xx7/CCqbg15JD1agx5B\nuUFj7zzLV+zmdGB+Dyudm6wsbzLnilRrchqNsk1Fd646ZRRRWCaII/YfswbqxsVzoHeHnnZ//c7s\nWuNomUKOr+4hWkSYAGRsNzYpBMgyflxB5WDcDSsSIE2OThXJ0G44WWrwPY9Kea6g0+IPEFkKXLrr\n+N2OcqnECVezc7oxzdUrVxgN+xxxkrpGhrx19jJbnR4vvWo1Xx55+AgLs9OUZYnllTX+4A+tVO6R\nwz6/+Z8+ypkrggsX7Bg//qDh4SdK/PrnD5Pmdn3WpiO+9d1rrLeHHHPc7V/9wpP0HZXzdlxZukqn\nMyiE6Gbm5mhMT1GuNoqC31mm8QMP34dwfFv3PaLQw/NtkZDA3Qi0MYX8anHaNlZobWFunshtomma\ngzK8+PyLfOc7VvvI/DCBSOcCuS6EOOke/Rxwmh3tEXgP7ZHdRvuRZz79Xh85wQQTTPAfHJ565il+\n47f+Pr/xW3+fX//N37jr990rT/t3gD8UQgTY7fU3sRI/71t7ZLZWIkt9eoOc8iNPAXBgts4DR+cI\nkcixvoGAwLMJI2PxFF8YW1dPjHnMjjKjLeHN5JCNa9pJgYdHpRTxmNNISDB85TsvsdYZFZllVtB/\n58QyFmqXQqKyFND4zrHrGYNvIESgnZBSmo+DQTunfds+UbhLxu0UaDyUVTRT9krsSUHJt/XihDvJ\n5FnmEnQyxgEJz7v1VCXdST+OIkqlmO5mD+F42rEU6NGQPE/wgnHBghFTcZlWOqTvqnjU5qsESYjO\nIUkHGMe/nplv2qII2pANXTAuFgihCIKApOU65rJsPP/esuMeOHGSc+1rdD3782GjxvxUE5koBs4F\n5SEQ2YArF5dIOnYuGukMJR3gDUfsn55lz4wNn9xYW2Wu3uSRw/uZqduTUM0z+JUSdFvMObGsn/uJ\np/mL516km6TUnHsjHQxI5N1PMwYou4CpEJJer41A4zs6V7leoTkVU56Zpe2ogVmm8YKYbpIws3+e\noGYTfh5//AHS1xOyVBf6H8bLKIcVMmXQruyU79lYSByXOO7orKefv061vMOJfoeQkNC7tG08fBcL\nMe4mKYWPMdI+c8UqpPCJSh5Rtc4oS2HkNGfUgI2Vq2xtLJHlThDNwPa2T3/YZ99he26Lq9P46b1x\n88eIghK+UyM8dLDCgUMHSdK0KBbyxJMDDh94jRdfeYsbq/aGdf7yTQJfEvtdetsZ56/ZE/JKN+KT\nl1O6W2BcYteNtSGj7+X0uobu0L5bh4Mn+Jm/9SmM53Hh3BkA/od/9qeUShX+2TPvbKOWEpMP6Ky6\nQgSDDptrMY3mNCdOuriaB54MGPQzMjcVnueTZSFxFJAkSVHQI44ChDboTCD9HbeHwBB6HqF7X1Wu\nwBi629sFd7sow3YH3Ks062vAHbr5/rVHTJ4wShJKgeTUceu22DsdU5IK6Ymi1NC4zI8wO6m7whiM\nAu3qKebKTphSikwZ+qkuOK3DRKOMzzBXKHeNWdx/iJnpK2xuXy8+RxiNMDsvQu6i5kJZtS6l8sKY\nmizBd9VYjVNwy01Cmue3qOaqsZRlQcl1HFFsRZCyrykH9lm9HFEux0jPK9wMNqlG27JfbsMIQsnZ\nazs0+CK/yhiyPCeMI8YlTEIEpTDA8yTGGe1ep0ugbEHiazftSzG9d9ZKBfQHCF9ZXXPsdU9oSZ7n\npC693EhBkgwYDnv4zu+W54ogjNHm3gJSemvIkcocFTeecR4TbUKU+0QlG1j1pUeebJOXA7RLq5ez\n08ShRCQdAuDBRRuj6G23+aknHuXhA4tIx5wp+SC8gFIgi7jB5z/9SV67cpXulSWmXMxk2OshuDvX\nOJeg3Hj6vkcYBST9PnHZzntzfoY4AS+IME7krBSX8HROluXsP7yHK4dnAWgsxJx6/CTlSomaCwwP\nRl3SdITSOULaZ0oZhv0O5bhEyWlC7z0yy8FDOxK5xbsw/u+4OjDgyRgpA+r1OvNTtp+hH5IPKvSG\nqatHCj6CKI4Io4ggCBGuMk57eZlRZw0pKcShjLFGKk8GGCfJ+8lPPIHwI1599S/fa8oLCN/HZ7yR\n2EC8H0REkf3sOC7z2c98lodPPcn5q5Zj/sJzz7Kx1qUUe1RrMQeOWB31pWur/PZ/95cM0gTt6mNK\nnaKUIskN0oltfe7nquxZHNHrbXPhvDXaLz53haeeunNh39LCcQapob9uM1y1UdTLAWlvCzNwuRpH\nZ9l/+CTf+PbLnDtjEwHrU00qtWnq1ao1xGMxulKIEApPeIROjM0LJCbt0e206fW23Rh75Cpj0O+T\nOr//D+vT/kDxuQdmftQfeW/4/X8EQLM+9lNpkhyMDgncYg/9gFB6KB3QcRMTBz55LEhTTT6uP6it\n4TbGslw8548OfUWjErPQbNAo2aGPQ+s3FkIUGim+H9hIsjRFhpTnSeDtormh282FMORaE8VlknE5\nJ2OIKhU0FMFFpRRaZGR5St3pM5vcJ1E5CYrpUokp9zt7nT6dLCFN7YYEEFVKNKebjEajwveepon1\n8b1LEdLdaA4UIjOFfEFZGEI8AiRVp0sRBhHZQBCH04TV8W2oivTA+D5ClHjJnVBCT7LQnGJ+qo6X\njcuVBShh9Yt9d4I9fniao4cOcXlphSNOr7herSDU3VkvXilkoOyGFfmCaqOOhyFzSVUikAy6XSo6\nJHbqtWQjpNHMNxvkZY9TT9mTqRfC0ekDXFu/SadlMyqDKCRLEnI1ohw5o50raqUKwmgqFftL9x2b\n4+CJHWLW1LSdOyFsaS6Eh3T+a88LCcKIT3/qCX7mZ3/ejlFY4vTbb7N0c4tqxRpyzxgapZCgVLWG\nKbBsi+99XfPdYZ/+qIcqTtoKZERcqnDqpNU0/Y+++HniqSb/6//+P93DrLuhyXdiO0Fg5XiFEEg3\nR4EfUY4FlWqDmXmb4DbfnOaV518m00NKlYArVy3N9uzpVVIhSUTGKLOxEE8HBGMFUbcc/82//fdg\nhGVMuVvXgQN7qVbvnM05/8AzlGf3cfOCVQqNVI+TJw8xXSkRu0PW3tkmwiiWlq5w+hUb5ouCmLje\npDlVY362ydzsAgD1mSZBOSTwfZSTdzYYkt4msadYWLBUz6OHD3NzeZUoDNnetIZc+B+x9sg3vvGN\nH8XHfCCYa9oXYG4mQGuFJMKTO8NkWSKauuODB1EFKQXJSOFkolFas97uM1MvIaVH6E67pTCjWo4o\nl8rOCIMnpb1hSL+o4mwl4Y2LOLjKMbcJ9Axc2ms5DKnUagzTvBB3UjpjkGQEUVgEIoUQRJWIIM+K\n04lQPoORDS4abYhj+/l9pfA8g+cJVDKmJWkqpYhBb4jR4yBrTpYpPBlxLzgURi4Ia1+AQPoEnkfo\neeBqInqeIqr4lr42zjD1DEiB59eQwkM7t8YoTVEqp9ZogLt1+VEJJCgCnGQNXiiZatSolEvMO5aJ\nJ6Cn36UOXwDJaCyJkKGiAC/2EXJc9cfDL08xyjOun1vj2KkDCF/gKUEgA0RgOPmolVlAKcgFA9NH\npPbnG/Uym4MhWWoKDQpP5QSeDxjKFbsJVRohswuNol1PP/MTgDWmaZqCDJFOK6QT/asAAAiESURB\nVFrlOaVSzKlTD3HsqA3wlctVSjE8eUpQchLDoTTMT5XxggqD0ZDQaXwfbzYQQcAb586x3bfPNIIo\niDi8uJe//fNfAGBuzx4G6l7z8sZzEBYkAQWEnhWnGlMLLePAcpiFK8T9+COPcGDuAEsbF+n1uyht\nhbNOPBxZmqrQDNzNWo1GBF6ZLDMMnF6OAKampzl+/AQLc/bW05xqUHW3pdvhS496c5H4UesWS9o3\nGAo4efgQR/fZw+aNK1fwayParTaBM6xGZURRwPVrV7l+8Sw1p+FTn5unMlVjplFjYc5uvHEcMeys\nc3D/QiHJeuncGS6ef5t6o87G1qYb93fJIbj3Yf/BcT8Z7Q8Km5174y5PcP/j0lvXP+omTPAjwlun\nz37UTfjRu0d+3OH7svg7CGICL2LsNDSOrpemeXH6rtXLaJO6Ul9jGp9is9fj4ZNNewUcE+bBVRzb\nOWFYt0iAJ73ipC2Eb6uzS7FTgM3c7uNyPnlfogwY4ReSpYaUUTKAbh+cT7pejugOErQRjNzpJMC3\nyT3KQCAL7neuc2bnmlQSj2RpFbBc5TzPSdNhUX29XInxZES7dfeyXbsReF5xLQbAkxjfRwcBoUuK\nissVgjDEC3z0mAPuSQI/IIojPM/jyKqj0q238MMSjeYs2chRG8MYjWKUC8yuk7TWimqtWnDUtVY7\nwaE7wexkfmZ5TpIqPE8UcQclNJmQpFlGkqR0un2U0lQqZbIsw/c8otqOGD65Zv/RPcQlR8ULoFSJ\nCeKI4cCpQ+Y5vqwghS6Sv/bsnaFc3rnJLC/bDcIYQ5amIH2CyAUY/YDc1Eny3NYJBZJkhOf7LOxZ\nwHPrq9PaJJeSIAoQKkdh3QX79u/hH/zHv8Dpy49wdcUWXqhV6yw26izMNGksWBpiUK0QZPemNzPG\nf/Zb//n7+v4PEs+99Pw7nv3O774zeVtKz7rVXHzFCyO6/Q4vvL3Bypo9vbfaAzqtEaMkR7t3OC7H\nlCo14n6X/laPrqtBaaKQTq/DtTMdDh2xt66Z2Rma1ZB+v8uVK+cB2FpZYmt7RJoOi/N1PE4XvwMm\nRvs2jP3KYRgQxwG+FxQRe601SimM1pQDpwPsCXKlEFIX2YNSCgJPUgm9nRwIbFDHMku8wpAjJEjP\nPXNBFek5wShRJFOI2y5F48CGFoY0z1HaL7KojPCQYUQQ+EV/tNK02yNk4FFymW1CQmhAKOt7T5zy\noQh9SqWIzVanqMYTlSOUGuD7HjuVXm2ljXtLYodaswlCFkGuOC7hRxEyDAtjGMcxvuchPI98zJ+X\nAs/zCkGdqgvmzc/OMco0id7JjtUqQ6HIjV+IKwlPorHyn2NGiDEG/S7JNSrPMEVxW2W1yqVBirGS\noyRXit5wRKff49rqTTBQG1WplitUyuVCTXGUJURhQKZylNtIZA6lWomKCBkNbd+zLENKQRjGCPdq\nHjyy7xbd7/PnrMjajtxvgPCd9HC5SrUxojtMCJz8qFEa3wup12pF7EGlQ6QXEYQBQZ4QOvdKWq2w\nN95PaXqOB7at4Tly7ChpBqNei8Q4QyJ9onvPqbpvIGUIOqNwJ3tlAj+CbMSSUxq9enMbLqxi1M5r\nIETAcDSyuRRCEzgFxAMHDiMFXLt4jprLVs6SIaYaUDIK6cqVXXz7HMcePMWhw4cQbrOeau6UmLsd\n4t0kAD8IiPFxZYIJJphggvcFY95xxf7wjfYEE0wwwQQfHH4kgcgJJphgggk+GEyM9gQTTDDBfYQP\n1WgLIb4ohDgrhDgnhPjHH+ZnfdQQQlwRQrwmhHhFCPGCezYthPiKEOJtIcRfuQpA9y2EEP+nEGJV\nCPH6rmd37aMQ4p8IIc4LIc4IIb7w0bT6h8dd+v1PhRBLQojvuz9f3PVv932/hRD7hRBfE0KcFkK8\nIYT4Hff8Yzvfd+jzf+We/3jN9Vg+9IP+g90QLmDLkQXAq8CDH9bnfdR/sJos07c9++fAP3Jf/2Pg\nf/yo2/lD9vEzwBPA6+/VR+Bh4BUsQ+mwWwvio+7DB9jvfwr8t3f43oc+Dv0G9gBPuK+r2HTcBz/O\n8/0uff6xmusP86T9SeC8MeaqMSYD/gj4xQ/x8z5q3FZCBLD9/X339e8Dv/QjbdEHDGPMd4DWbY/v\n1se/B/yRMSY3xlwBzmPXxH2Hu/Qb7lyy5xf5GPTbGHPTGPOq+7oHnAH28zGe77v0eSz88mMz1x+m\n0d4H7E4VW2JnAD6OMMBfCyFeFEL8Q/fsluo+wD1V97nPMH+XPt4+/zf4+M3/fymEeFUI8S93uQk+\ndv0WQhzG3jSe4+5r+mPV7119Hmfm/NjM9SQQ+cHh08aYTwC/APy2EOKz/IDVfe5z/IfQR4D/DThq\njHkCuAn8zx9xez4UCCGqwJ8A/7U7fX7s1/Qd+vxjNdcfptG+ARzc9f/73bOPJYwxK+7vdeBL2GvS\nqhBiAeC9qvvcx7hbH28AB3Z938dq/o0x68Y5NoH/g51r8cem30IIH2u8/sAYMy5y8rGe7zv1+cdt\nrj9Mo/0icFwIcUgIEQK/hq1287GDEKLsdmeEEBXgC8AbvI/qPvcRdkrfWdytj38G/JoQIhRCHAGO\nAy/8qBr5IeCWfjuDNcavAG+6rz9O/f6/gLeMMf9i17OP+3y/o88/dnP9IUdjv4iNwJ4Hfvejjg5/\niP08gmXHvII11r/rnjeBv3Fj8BVg6qNu6w/Zz/8XWAYS4Bq2gtH03foI/BNsRP0M8IWPuv0fcL//\nNfC6m/cvYX29H5t+A5/GqqiO1/X33ft81zV9v/f7Xfr8YzXXkzT2CSaYYIL7CJNA5AQTTDDBfYSJ\n0Z5gggkmuI8wMdoTTDDBBPcRJkZ7ggkmmOA+wsRoTzDBBBPcR5gY7QkmmGCC+wgToz3BBBNMcB9h\nYrQnmGCCCe4j/P/TCWL5jVFYFQAAAABJRU5ErkJggg==\n", - "text/plain": [ - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "transformed_images = []\n", - "for i in range(20):\n", - " transformed_images += [trans(cifar[i][0])]\n", - " print(transformed_images[i].mean(),transformed_images[i].std(), \n", - " transformed_images[i].min(), transformed_images[i].max())\n", - "show(tutils.make_grid(transformed_images))" - ] - }, - { - "cell_type": "code", - "execution_count": 16, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "(-0.3257020916595745, 0.49030737107138356, -1.0, 1.0)\n", - "(-0.1148718173111168, 0.5943530690757043, -1.0, 0.9921568632125854)\n", - "(-0.1876123301917687, 0.6578509306606333, -1.0, 1.0)\n", - "(-0.45916819203800213, 0.36674404239797703, -1.0, 0.8352941274642944)\n", - "(-0.3001455154347544, 0.5464976989913715, -1.0, 0.9921568632125854)\n", - "(-0.3879825306551841, 0.5142138738794487, -1.0, 0.9450980424880981)\n", - "(-0.16791767110892883, 0.4776867721654128, -1.0, 0.9529411792755127)\n", - "(-0.07867900658554088, 0.49251211342491164, -1.0, 0.9450980424880981)\n", - "(-0.012275311339180917, 0.6259931231081871, -1.0, 0.9764705896377563)\n", - "(-0.47579912012831, 0.44796901896179764, -1.0, 0.7098039388656616)\n", - "(-0.4709048134003145, 0.22142046144980368, -1.0, 0.019607901573181152)\n", - "(-0.07774712605169043, 0.6400356728895145, -1.0, 0.9921568632125854)\n", - "(-0.06678664839516084, 0.6134990363534119, -1.0, 0.9686274528503418)\n", - "(-0.5750025513892373, 0.5272717873515015, -1.0, 0.8745098114013672)\n", - "(-0.410664308796792, 0.43596309108907383, -1.0, 1.0)\n", - "(-0.06828531355131418, 0.5641918783797807, -1.0, 1.0)\n", - "(0.003199054510332644, 0.6288654684816006, -1.0, 1.0)\n", - "(-0.33659619160850224, 0.39841029565502767, -1.0, 0.7647058963775635)\n", - "(-0.2228324031845356, 0.5534736178810422, -1.0, 0.8509804010391235)\n", - "(-0.22320004721404985, 0.4582661803925075, -1.0, 0.8980392217636108)\n" - ] - }, - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAW0AAAB0CAYAAABOr2PFAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzsvXucFNWZ///uk7Isy7Io2rZt2qYdh2EYh2EcERFREa8Y\n4yVe1hhzWzdrslmTzTfxm5hsNkuMm7i5mJtxE+MvicYkJhoVbxFRAREQRy4DjMMwDEPTNk3TNEVR\nlkVZ1tb8/jjFoAajJtnsL7/XfF4vvBTVVc855znP85znVqnh4WFGMYpRjGIUfxsQ/9sEjGIUoxjF\nKN4+RoX2KEYxilH8DWFUaI9iFKMYxd8QRoX2KEYxilH8DWFUaI9iFKMYxd8QRoX2KEYxilH8DeHP\nEtqpVOq8VCrVn0qlBlKp1PV/KaJGMYpRjGIUB0bqT83TTqVSAhgAzgKqwPPAlcPDw/1/OfJGMYpR\njGIUr8WfY2lPBzYNDw9vHR4efhX4DXDxX4asUYxiFKMYxYHw5wjto4AXX/P/leTaKEYxilGM4n8I\nyv/0C1Kp1Gid/ChGMYpR/AkYHh5OvfHan2NpbwOKr/n/QnLtD3D66aczd+5c5s6dy6JFixgeHv7/\n/Z+5c+f+r9MwOu7RMY+O+29nzIsWLRqRk3Pnzn1TwfvnBCLfBWxEBiK3A93A+4eHhze84b7huXPn\n8tWvfvVPes9fC3EcAyDEXyYLcnh4mFTqD5Tkn419dL4VgihGiSOUyAPgdz/8Dj/79tfpaM7T8AIA\nclNn8G8/+AmqmUER6l+EvlQqRSqVGqHzqfs+K+nxBWEA7W3HEEUOAL19G8kXijieT99aGb/uPK6T\nRr1BeWsZEWoATD9xKkObh4jimOiQiDXP9wCQNnXOeve59LwwwKrudQDEAjRNQ1EVsukMAENDJeI4\n4qHF5T+g9y+x7v9Ta/1a7KPz/B1PAPCrfofBaBqt2hDj9Ir8u/e8l7//5y8zf/4CfnL7bQBstatM\nmdrOFWedgmFmAVjas4UHn1jKI/Mfo7KlzPZKFYDQD0AXBESEYQiAqqqISOXitiI3n/x3ANxuFgkq\nA3z9pzdy6exzAWg9YhInXffxETpve/EfJd2hoLl5Ms0TslgZyWMTOzsoFFQe/tkgC2+X7/nGXbfQ\n+cGAXPw01UoEwFDd5r2XZskVPXp61tHb1wdANWqwxXseRIOTWt8NQCaczcJfGnx/7u/RNPnMC67u\nok6DbCbHWee3ADBYns/TT77A8Sv/C3jn6/7XWOvXvmv4AJb2n+weGR4e/u9UKvVJYAHSYv/pGwX2\nKP53EMcxxDEQM9jbC8C8X/yM2uaNREcWqO6RgtycMJErBwfonJYmjuO/mMJ6LbJZHQC7HjJQrtIT\n2OSLFgD5QgbXraOpGm2T5KFNU2Pa2o6mkE8jfMmeURQw/fgpuN4ePDw6298LQL1eJ50+lNajC+TG\nSoG0dkMfR09oYvcelymTWgHQdRXHdoA/FNp/a7C1qQBMmXkM1//XKkpazCJDCt1fTTuZUz94D9+5\n5/fct2ApAKdd93/45Y9vZmWxi5bOLgB++OiN9K9YyXmXXszKFct56WkpJF3Xx/ZtAiKsjFwjXdPR\nayGmiMmm5X0KVS5sTnP1YQcx9QhpAAix6XV0qqpUuI7rUqtvxbRj/Eg+8zl/NcdPy5Ox2lE6JgLw\nTMsdmOUB3m8FpNVxAETeZvrWucRCkMu04bfIcToDmzjYL4A4iQd/I9+vRX0ofivd7UU6J0teOnpW\nzLKhZXhKhaeX1gEYGLBZvizk+ITOD/7fDwNgHGZg73bw/AChSOWiKiq6rqOrOvaQDUBlyxBnXzCH\nkL2Myx8JQLG1lZxm0bOkm83btjHuuEnymc1pNg30M6mpmaZcHoDVq3vY7oVUt+6mWpbjUZSA2XNO\nJI5h+YOrAAgbIc88cfcBeeDP8mkPDw/PBya91X2zZ89m8X/IxJK0aeD7DvWddaq1miQwDAnxMUwD\nEovvxVKFw9M5VPZtOLAsC9M0iVSdLaUyh2pSyLQ0N0EYELg2XugDsLG0nb7BOks3NtjkyfvcSKGp\nqYVv/sf/t63+PxtCoCqCRqXKr38sLYrVy57Bq1d59N519N8h533msue468EH6Jg6FckKf3mh3btO\nKg3TyCEERHFMb5+0qltbmlFERK1SwkxLoVupljANkziKiRxpMRFDLjsWXROEQYSmS7ZNZ0wG+tfT\nUmijpXBEcmtES8dkGs4u3KrcqEccnmbckVlgxdume+Y//AYh5Hs0IUZmRlUlfyqKgkiu735xDUeO\nPw5FERBL6+11CjAGNVLRdAFC8mekyt/qsUAEUhgiBL4CoQoBYfLuCCV0Rx41MPZ0ANpbz+bcu8/i\n98sfxS3tAaAeKNQjlcfXbOKOb34TgNJOl5VzB+lZPUBfSQqex084i2u/PJf2DFz7kQ9xmCLfNSaf\nIRfqnHLaSZx93nkArOvt5affv5W60SBjSWV/RqnKvxRn8Ox5F3LJ1AkAPFV9vdDWlbSkP28RRAGO\n72Nk5DXPidm4UkNlMorRAcDnrluJIgS9d+pMz24BIF+IqdkuogxOWCZQnX2LQGfxBFauEAwsl4q4\no7WZmt1PepLH5AvkM2OtRpMykaGeLZTqm+Uc1XwCVwfJLnzrjmuT9RT4QUDVqeO8JMdpVxzU2CB0\nQrxBmXOhHzSJqz95JXvjgKOapCA2TRNTORS39hI122XtlhIAC5Z0c7gSM729meajpCLp/2k37uad\n6BxKzpTzEcV1Qs+jd8MOalU5RiN8c9H8Px6IBCm0Vz7xeQDi0CeOfHbdXx/RxoeZBrsDB900iZJ9\n2t8/wBrFIGNlcV3JtGEUoGkKTiDoCWPWqJL8TI9JxjIQsUdKlZtFuBGN9b1s7qmyeEBOxKADTW1t\n74j2af/weZ6b9xMA+hY9QBhqdJ5yMaeceykAzV2nYqU1Hrnn2zzz+K8A8Ks11FCj0FxEG5Ph2s9/\nCYDzLngvTnmQe37zM6LEbeH5DvfdezeVUj+uJ8fpeyqDAw2279xNkGzYyZMn0tyS209YLDd6LBRE\nDETSNQAQiRjigKXzfsO8H90KgF0e4LneAe55aj1+8oimadPwA58oClAUA17jeYmJpLWeCC6BOIBM\nj1/zb3nfG4+Ozk5pCTXlMyhoBHFMrMjfNWwXS1cwDZ0okAsfBSE+HnEUoSUCUtd0BkovIIRAUxUG\nBwYBKG+vkh+bxqnXsZFrrKkKQ6WNBGGIv6MhqYtjrMOtA6zumyMMQ5SEzlAIFMTrXFNRFCWCG8aM\nO5YgCuU9yVyJeL9LQ1FUQgSDpTJRLNczl86holBxXCxLbl41bRAjiKKYMBH6nufh16sj7/3Rd78G\nQGfnuXh2QEdxgE1r5dg/+Q//h96BPnZu24WzVwryh+fdwweu+ThnfvFz+IlB812hcurjj9FxtMaN\nl91IMEHOjWrkyBQnwPQZbDxW7pPfPb+Ez2ViWiPYpBgAfKGrmSfvvJv0MRMoqlLZ1rz9NALoirSW\nvahBJnM4iir2sRKaalDqc7j/mVtZ87sCAI8sK2Lkz+HpcolPXy7vK7YqeICv2Kwrr2RTVVqh40QO\n1auxbcjlqJntAFxwwTT0TJ66VyXo7Qbg7t88zJIoS1ixmN55HAClDU9j+vt54Yvfli4fIQRRHOPF\nPtGdct2iBqieTtQIif1EmFoaH/7nW/lM1uAqQ/Jn4E8kCkFEx2GYWS67eiUAv737aYgEZ51+OsUL\npID/7UX34Vbms3NnSJC4KbM5iynNbfSu2Y2abGJDf3PRPFrGPopRjGIUf0P4q1jaAOmsPI/Uqlt5\nFQU9kxs5fh6ZyzLWszgojvETqzqTacI0LHRN56C8tDBf9m0O1gXrBxrouomRuEeCOKLacPF0jbGm\n1KJj1ZhNk9vwY4Xk8EnU75DWBKV3QHdlaJCJTTKIsbG9k/jgozh+9qUokbRalKhB1AhwhgaIEzfO\niW0dnHryHGaeN4tHH3mAjo5OAHTdJGjKMHNGF0EgLW3HsTnhhDP41a9uRzMSC0CoNLeaWFmbckUe\n60xLI4qDEbr2GX0RMUokEGFMLBILQYeh7iXc8+2vYp8svXebGg7/tWw9WxvuiJWeyRc4dfZsEjt6\n/7OJieOQOI4QsZ7QlPzjNTeK11naB0boyTV2Gy6arqLpOk4gx+F7Lpaq0FxsJo7VZD4cGo0GumXi\nuIl1Y6gIP8LzPGpbbRgjaTLGWKioWJpOEEka/DimsqOOeqiGpiWnBCFeQ+vbxz4XhyIUBK8PAsdx\nPBIHiMTID5LZk+4TRZHvtx2H+QuW8NSi32MY8hl5Iw+Rgu27ZMZJ/lZ0DUPTETG4kdwHte1DhE4d\nkAHdxk55yvCUqXTlLCYXpzP/UJm0peYtVt//CGecfBbZgyVJx9Z38Ml1A8wMa1BIfLWagtK/A/XW\nMXw1MlGTeYpCgV+tEd4/D/Vr3wOg+PsVbDh6GqsKR9LZKa3aJT2rqcd1zi+cRgX5zHb1DScZX45T\nBWqDW1A3KWRPlFa1Yah0zqxhXOjRsis5jTTBgieGiHwF04qS+/qJojJh4NBccBjvyz23pW8JdlVB\ny3XRMWsaAP32fNYvXka16lAuy1Pb4FCIrgXkcjEDlcRXHPlsGwrgcElmtqiMrLGiKIjYxIzliUKz\nBXZvA9CIklOfMBXMQgYsBT1rymuqwPFCXA9ULKZOk/M0p34Ga8as5b4XfoVlyHt7V65gxwsvYDci\nRDLvmWPGIzxoNsdiNcuTi6qG8BQHxF9NaLs1KXyUKAJFEPk+BycCNgx9DFXFd12UZEPnrDSNeh2b\nmLQlN+lYU6Eln8EqNwgin3xWHjkCz6FSquNsha0NeSRWlYgoiujM62haMwB1tw9fBLwj+D6eK5ml\n0fA4vWs6dr2OlxyXWtqKaLrC+edfwF133wfA7x6dQbH4S3wtJGOZaMleF0GAXa/h+j6ZJLOhuamD\nc8++HF3Pjfg7XbdBsdCMbkC5shaAGI8o2i80AvZtCkEsYiJFBh4B3EqNX3/1myx5+DFqdTkf859Z\nw6bBBp4fcsZZ0i96/Zfm8vVvfhWEmhzr9yEmjqWQ1BPGUoUKyhvE+4jf9o8c2KJ9wtgjo+iEKCMC\nulhsRickDsFKy10UhjGWCYqqkEt8hvVGHU3VKbQUKceDmIkSr3suOc2gq3UKK1avkb8PQpqOKeIF\nAZaRCM2GPSJA3y72uT+S2divJV+DOI4JI4iVJEAXJ9kWiqDeaNDfL333Ty1ayCUr1zFUj8iSBGad\nCMcNseOAHU+tBuDgWGAJDTWGMHmmv3gnlgonnCbfecO3vwFA9v5f0aFrXJExcD8mMzjorWItXsxN\nn/oM6pCMW/xH7JFVPMwwQq2+AoBmvguRV9GrLxNHEWEyNp+AsF7HjCKMZIxpRZA3LRoixl8ts3bC\n3he44OSTUetlslU5Rxfns2+YQCk4DU2hXGsgNJXaFsmLHcflOf28NN5MgzVzJJ1dtNJ3cY7KAghi\nqZiqtQHQFXQ1TZNZwGyWxs9x8SrsfMDLrsozPQ8CUK4O8chKCJwchFKueDtC2k4oYmUF65LYiqKZ\nvCvaT6aeBB2jOCYOQqJSyGDvgLw2tJv+R5aTRsfIy2cWOpppmX0ihpImV5BjNvJp3FjQcAUDq8v0\nPiTdM/fd9Ct+t+JWNsztRUsM1NALcKserqsQa9ItVlIEgy9sJq2pnHPSMXKN8oKf3v5GjpN4S05O\npVI/BS4AdgwPD3cm18YCvwWOBkrAFcPDw3v+2HO2VurJCwWGrnGEZZLRxwPQCHaTzo4l9ByixAps\nuCFuFFCzKww4koXymQyq41PyfKIw4uBYTtormgaaIG3oI9a3iGNUVSWXNlh9kBSG7TkTG42FbzXo\n1yBwbETibzWNNOX+flq7ZnDqFTJQ0zFzGrpuQODjB1KQz1s9QOPJPnzF4+Ff/4JPXCqddP/2yX8m\njmMqlTJLl0i/l6FbGEaBtvbpLF3+e3nNylCz61Qq/TJ4BRQKGWy7MULXYF0KhJZMES+O8YnIIGME\nT/7slzx0023Uyv08NigZ8NG+EqEDZtri1ttkOti8+fOI0IhiFRlDSyz1OML1PObdfz/pJEB4/nnn\no6FJ63KfkN4ntF8v8V8HFakkZ3RNwfN201dqEHiJcsLDyuWJ3AjHkUrddwNiJZZWtpDjsVSNUqnC\nqV3HcWJuPNsbcqOXGjFRAIdlBHumyiyEJ7s3U2sExKrC2q2S5+r1+jvOjImieERJBiKSfmohRoS3\nKhiZiygZvBcElPr62NA/wPLnVlCqyCDZgiBECVWsdJqaJ09YYeCwveHixq+O+PgnZI7AQMVCxUPe\np2tjUeJXRuhqbJRBOnPiRNw0DPRspt44FoDKvY/iTHkPxd4yuTZpqGhNGoZlgJbDWDIk51NAtSnC\nrT4uTyGqVCS+7xO/ItBjRmIErq4QiZhgYxV//UYA0odb1MR2onIN5RUpQgarr1dqmpEoMgHNxWZs\nx8Wz5Ykg8AWW0UlTc8zHp8tsGJuJNB/ZyhOPu8Sb5LorcR4UDcvMgm9QKUv+N0IdIy0QTh8L1y0G\nQGguVq4dNd1EdUD619Omi8IAfiMzEiur1etsr+6X2r6T0Kkq+A2P3/373TSWyXkKBr6NeM92phWO\nwUqChkaXAxcPkj+1i2xy8tW9kNAPWTpvMTd87mbePXyyXOPuCl3+GLqK0yCRIaWwzIAVsUNVKdvy\nWnmgiqpG5AoqIpDzee2nPwN8gQPh7ZgfPwduAX7xmmtfAJ4cHh7+ZtLd74tv+gb2TZZkwnwuy9j8\nJLS4yPoeeQRcuuEZ0kdn0OOAxTvlhvajkILpE8YRjX0OjsBHjR1iAXHo8bKTZJ8IHRHF6HrEWMNI\nBqagayrv8lwsS2rTE9rzVH39bQx5P9xGnVxaatmjJhzLJ//5Y8w8+3yqyYngoSeXU2k0qJVKbCpJ\nATlp7RCF8e2guJxz6VUs/c09APz7Z7+Arvt0dU2DWArd0lCV/+dnv+TmH3yd7IZlcphhjFcroSrQ\n3i6FURh6DAxugDmSrjvmyePr1MIMfEUl21QkWlcC4Huf+RzVeU+y0a5x83PSAq37AYbQuOHGL7Nl\np4yE5/J5bM9FBUJClNcEv77y9a9y87du5qijpGK959e/ZvbMWcRxTLQv2UHVEEI6HpQ3EYrpZO4b\n9W0EQUAunaWxU47d3mEztXUyrreLvCWPj4ZmomXGEKs+ji3pNM00gROweP5TWOefg1uVgkuNFSKh\nsbF/FU6isALXZ+v2Oj4Bg6Vq8u46LS0tb3fJ5XzH2sgpQQjpKlFQEGEyeAV0Xcf1PGoNycdPLHyS\n1WtW8uuajdsfEianlFfimPGagePFbPPCZI5fxnVdDENwVE6eMnzfwQsgrVtEYre8FnoIfOBQAJ6/\n534Apj74NBd25ciXuikN3gSAM28BTeefR/bEE1E7pJDBqxHf+xRKa5agVbonqqUabl8Z1T+YwDRx\nE6WRnlrA0nI0HnkMNpQA0AwddXya6sGCgSQorhUmUR7aQN+aHjRFzvumrMVBr5m/1uOkso99EJFB\nfUeDrtPktXHTTdrdqQR6iXReygXL2kb22AksX5Gjf6N8JkGZdC5Ha94iqAcoYZN8ptfAdeuErsqS\nQJ66ghjau1TK6/pxX5ZZMsWxGoGo0buqzqa6VEKqY6Ga+2VAuO/0qoe4/XVe/NVS3HlSuTS2VmjS\nVU5UdRr2DgC6OtrQGzGNJX04q6X1rlkGvUPbeOi2X/LD3grFZukeqRsxjmOjEvGyLQ2IobBOWzpH\ngMr8hD+fKnm0pFuYlNEpb5aKqbWzyM6tHBBvaX4MDw8vBXa/4fLFwJ3Jf98JvPetnjOKUYxiFKP4\n8/Gn+rSzw8PDOwCGh4drqVQq+1Y/EMmrxpoaHWmdWj9onrRgP3LxR7j53h+xtVLFT1JexoiQjlaT\nwIfuxE/tKQ1yZsj0yW0UCwVEEgwcXyyyUyh4dpmXEqvn8HQGUBAK5HPS+p6u5Kh42jsaqGnq+Kr0\nre5O51hYsbnjP3/I5gFp5U9atQ5dFehKxLp9wcXKy0w1zqB37TMUbvoO27fKirV7717I1Klt6LrG\n1Jmy0GHazC6W9ixH3P0sHYdJC2Dxsn7wIyIvIkyquyzDxNT2WwjzFvwagIXxcjKZAs25PAu++VMA\nVv3oNwg75hdr1jBYl1ZHGId84CMfYtFzj3H5VVcCEBGxrn8dTU1NFIsZ/OT00DvYz9wbbuAnv7iD\n2++9C4APf/wf+ew/fZKu9g7iUFonxeYWWlpak8Dmged1+mmnAFApv4im67i+j3GYvDeXKdC7bg0a\nKk1FecQ3dRV0FSNj0LB3ARCEKoXiZLpXPM/Ous1xF8k0rbsfegzPCWgEEfm0fGY6baBur+IFHs05\nab1rsc+UlvzbXXK5hrGJmvjetThCU0BRNfbFDequzVBPD889v4KqI90g8wb7UBoqQihomkrIfqu8\njs9LfojtvyTH5LpoQmDqB7E3Cbg69nZMK0ekxCNh0+gNmZZpVf7NWOr8+/hWxAO/I0wqTDs/eBHN\n136CwbxJY9Fzcj6VAFavht4qXChtq0zTKQx96QYsT0GYObLnnSrJnJTBj1XK3/0O6vNyH+U9H9Nq\nxjzxeMxCcprJpakPrKW/dwA98esOvaGg9rIrJI9lzLGocQbjuzny/yp58cOag4inU3UepVL/HQDl\n2lJOPG8Dy2+/kJv+bQkALZ230zWjkzOUsygYXeiq3MdGehroIZNLvawVcux9Q89RGqwy0B/jOZLO\nyM8SvGRi9PtMTuIgGytlRLx/Rr3ExWEpkBMmY+p7aawdHJn7w8dl6TQUzMIYyQumYGhwI4iQKJLj\n0VQD1+1hhpWjoy1PLakvqIUOr3gNPHsbZuK2PW5yHiMC2w35xpB8D7GDEQXkjAJBQlt//wBvhr9U\nIPKP1sJ/5Stf4aVV0k906EEaNO0m8HfR2C6F7vaSyf1KzLtEzBGJf+2z757B3R87hxWra0z6zI8A\neM4OyWgBTr3Meae3cUK7XIjYdzhYVTi6WBzxXe61XQQamXyaw1wpTHeKBlr+nW3eTKaTF0pyYec/\nu5ww+C2KrjE/CU7aq+uoSoTtVthSlcK5uuObLLrxUxy5dCUXz7kIApmj/s2vT2HWmWdywUUX0Noq\nk+1NS6NYMHlXUGb7tiRwttLFLlUJw91YaTkftUqVQn78CF2O5BfqVAmDkPK8J1h024/l31VdVmzd\nzbOlShKgBDMvuPQTZ/OjH9zOD34iCy9y+SYemv8kumEwfUb7SD784sXLuf5zX8LKZlGycqP8/OHf\n0pzNM+moJmZNOwmAf/3XL1BsykIMSlKg8Uakk4KKbC5LGIVUqw5WWm4AXd1LJSgRRwEeL8sfKAqV\nymZyjMW2E9+3gEL+CPyXoa93A9UkiCtQUYVCGHpkc7LBZM/geg5TYk6bM5solOtmNxqEUcQ7wXbb\n4aB9eeIiQkQhe1yHWk0eaVeuWk5fXz8PLQxQzGTsIiIK96JxEIrQIBHaISGN4BUQgijx8RuaIGMY\nCFJ4ngwHaZpPpATY4tCRIrFdXlJQkrhHjIzk73+85jTa2g6iNKNA08kXAeB9GlZsuZNoXS/+T6RS\ndzuzGJZAK3t4q6ShEb8vR/pTn+GXT9xHRitgJf7W+PZ5pPkdjSfuJe0lrkdD53v1PlYuc7jyDOmr\nbc4XCF+KSetZorTcTyefMh1+2T0yf5fP/hwA9WAWpm5SyDYTGrJKc11lDZ49Btfux0vWJQgcLmhW\nWBPbRFsTH/vJDZYsXMrDQyqXnNlJ3pBzEIYxvrcbU8tzYqsMwj4xqHD/okfZXomZlLjaBA6Rp1Ew\nLPwkp3pbJHB9Z4TOMEoqKmOT3uVreNb+KI4r50M3dCwrgwI056SsKfX10Xi4m3w+g5WVykE3M/gB\n4Apq9Tp1Rz7TNE3GCYvtwQ683XLDTsgchRK+yt6XD+WbiWsnDlw04bNw1XOsrEmfyN4dB95P8KcL\n7R2pVOrI4eHhHalUKgfU/9jNX/nKV1AelwLNMNKU3IjnKuuY3yevRUMKQoO8lqGzSVYzvbswhX8Z\n3EhH6HFkkgKkRhoxCpsqIcqzA4RdZybPFGwdqjBW10bS+wKhErgBY2PZiwJgrGGRbnnLAs7Xoaml\njcqzjwCw6o5FHKH3sbU+RO3F9QCIKKJUrVGyHbTEV9bWOYV0vsj02R9i5hiVhXd9F4C1q55l4pQT\n6Osf4KqrZHHOeeefzcyp7eQ+fTWnzZM+7Ueft3D1iIjCSJpfT8/zGKY5QtctN8lMFd9waIkUmvp+\nQrhB+oCHKlUeLtVpAImC5yNXX8LhAxruFz7KquekL86P4Lrjr8UwLUxDECab96STT6O7ezWXXnkV\nx56TpAz2b0ALYpa90M37L5SWZffKpTQaFXTDIAh8DoSVPYnFZ5pomobj+CN+0IiI1pYicRzRcCVT\nNxo1wjgkiGzixKeMolApb8NxPAY2VFilJVWJ5nh6N2zG9WqUNkujQB+T5szTptLWMZlySVoyWU2h\n3vijLPoHWLLoFzycpJi5njeSinhzImCDlT4g0DSFOLlP2gsOe8NdHIzKK4lPO4gjhBKhKypWQvsh\niooqIkBwSBK020vAntjFc7ezN7GebccmCAJAFqtcfaUMgF967cs4W+sUj5tC95pnAKg/tILmn7mI\n3TvxkiKzXNhEVLXxQ49gucxSWd27DmNqKz2axqpH5mMktvxZm7fS0lfGpAaK5OWh0OKy/kGUgaWs\nHicVcMe0NgxhYjQXaCQxpNZpx70uwO+7ckzVegm9XcUW3ZRKj8m5Xf1DFHEXJjUyGZkRUtRmcLoW\ncWnpNjrUJgDS8fXYvk9c6aS88nisZnkiUFWd0D8DodUgSDpn2CswGxZ2pUEUJQHLV9JogYVmmQyF\ncj6Pbi7gGPYInZqeZI8EEQsfXU7ve6pYSRGRHofU6jY5TUdLRGW9vJlq2UaNQxTkfgwjgVBNfM8j\nCAJ8XxqJQoGpXa1M0eHpJ2TPmMB7Ccd1qToZ9iS9dRRVY3JbEUPRGTsoFdO2PR9nq3vg9JG3K7RT\nyZ99eAiCP5l4AAAgAElEQVT4e+AbwEeAB9/qAbqSZHX4PsTg7Q2Ik6NeLg0ZI42uWbQl1Vl99Z18\nfmUPmhHw8+QINierYdcb7PYCdrh1HnhCNgm67D0XcmSzwaueQ+xLIRdrCpqqksuOo6kgI+lR5IOV\nw+bt44kFP2TegscBWLnqIcLqDvLFI7n4PWcA8L5L3seaDbtZ0lenvWsKALPOEYxbl+IT151PHC3k\nmaflpuorDfDMslV846avUa9JKqIQYs/jnu9/l/OHpIvgi9c28f1bv0bPugp+Mh5nl8fmzVU520BY\nk0wY2BuobrPRXY0gufeZ0gBVP8AXMe+/5GwA5t74L3R0XI+d2cXpF0qL+sVKhfLWBk7dob+7j3pZ\nBls2uxF+bNB05NFMu2o2AJmqxdj0GB73HueDH/wnAC44exb/9E//QN2pky4c2EMmEjUqiPBchygI\n0RJXRsYcixLv5aw5Z6GlpUCo7/Cplstsrw9SHpLukXrdobS5AqFP06QcIpbKpWfVKhqNkC3V8oil\nf9ONH0HEEWNNnbBDVvVtKQ3RXDjyHaw6/PZn1+/PyzZzoBqyJ4UiLe1IkTkjfshIRkkUhggBeqQi\nIgVf7M8dj6IQA4VMUgW8V8ArioqiKMQiTO5zcQMZfPeTDBtZibrf9zArGfvgj36M+ROfxrVfpPHD\nnwMQbyoz5LgIVZBJgmG1/n7UtCA7ezplV+7Bheuf5uHubj67fCXOhs30JgqirApMxaUhAupJ5k5J\nZNDCiCB2GExyqkuBTazGGJqPEks+LoebXzd/i7tvljRZQ+QNh0jYOEm2hAhy6KaFEjfjBZIXz+06\nmaH4CSZo3eTVSwA4o2sW2ZY0aXU7U8N/JXYScaWqmHFE3S6hKucAkFVvIxu8iK5UMH35HrcvpObV\nyORi/CSdNrQDVPc1la2JUeS5glXd/fy65pJkGGPpChu3VbmkuZVSkv22emUf/LdGNjcO30vWV7ik\ns9IosW2bamU7AK1dEynkDESQ4agjJX/rQrBXKCx1qzREIgO0EARYxhgmZJI0101DsN9Gex3eMhCZ\nSqV+DSwHWlOpVDmVSl0N/CdwTiqV2tfl7z/f6jmjGMUoRjGKPx9vaWkPDw9f9SZ/dfY7eVGclI35\ngYOmBpiHuEzvlCrt4gunc+Yp17LgsaV4rswF1cIylTQUCu3MKSRNdkyLhx/vxqlXMS2LoX55JN55\nEkzqmoywy4xjX2e4iICIgqWTFG0hVJ1YeWcpf9/72vWoljwuzbnsKtJexKWXvZsLy/JYNnP65Rw3\nxeaJ9Weg6TKt7CC1CT0wqVcHKb4cEHxKauRn1m/GynVTLDRz9rmz5bygYJcaPPiDn3PhnTKQeOXN\n3+X9HziLW2+7kwXzFwGQOSJH8Zj9rp3Z02Ugs74uorLxWVwvoLci3U39rkekxEyb3sE3vnsjAGef\n+wieVyOMIJ/0NbjkzGmsnlRny+aI950zk+oW+fsn+srct+A5ahu38NTR0uJat6mftXqG3/7mIVau\nlO6Vj115OZVyhccWPUZxsjzNjGHi6+avo1XS7PsvoagHoygC15HBOOKYY5rG829XXEyhVVqG2o0W\nPStXctTNc+n4kGSx8NWYSrmO6/qceXyO/g2yReetGx+WaXiKPpK6dZgpOMyP8e06elb6vk1DHzmx\nvF2s/tU3QMhTQuv0c1COmYkwsyPVpCgRqh4ThxpeEoQVYYwqZLGTQjwSyLR0A9euM6G5SF6XvPz0\nwodI5wqMa25FJHsjtAMiAbs06BmUp0jvJUH6yM4Ruvp+JjNvSw/eS1bTaMQxmRaZytfwygjXIaMa\n+zJKqZYiSGtEzissNeXRu954CWd9D9smZsgdlSfQJH97m/qJmtLEmk7DkXR2ixzXn9BOFGgsSnKa\nNd+j7eAAQxG4+yqD49fHDFrb5ckm0jdQcR7BrW4jJ74MwPT2HKFawbQyOKrkuXX2b6k4QxjZDMVj\nZwLQfNH5EIQM9leJAm+kcVw6baAaJjVbww+kBXtIeBlbCg+ztPdHxJpca1UzELGCN+hSTX6rRDHY\nMciDMuE+V0Zs4EQq2yLBS8nJyQ0CalFAoKms6JH7ffNWB0UYTD5iN+xz0wU+YaygqBb1+g5eSuIQ\nFxcsNDUil7FobZFFM0Pl7ZR2B/TYPUycI13BYofGC4NDkI5oKci41d7q/pqMN+KvVhG5T1galoUq\nXNhbonOCZLYPf+gy2i5opu06kycflEzQlD2B5T2ryLceg1Z6AYDm4jhumPoRuh+5hYwV88TzSQAl\n2EbdUXh+xRoKyfHiyMwR/He0h1q4a6S8GzTi8E3OHG+C9cs28NEPy6ZL5mHttKhw/MzZ+EmO5bOP\nD+JFJtU4RNUk474auxwUaISuTRxGjDtauj0GttdRjCxRvImR0u8IclaB0084lmLiLlKocdWVZ/Kt\nm+ZyzcdlQKdn9Wamd0wboaue5Kiv7N/IzqrNmEDh+ZK8ZgsVI2vxizvu5mN/L7srLp6/lsFN68Bf\nh6HL9zy+8vdUh2Ie+slLLHv+JG78qGy8fm+uBUXLcf+tP2LqvZJ2tZImtBSOKRb4zvLFAJSrZcyM\nRcOx0V5MWGl/rBSAiQW5HmEQouk6QRzwsiU31VHZJmYdW6DpH3I0FSUvDKwu41Z20J5/iBmdMlgb\nRTH2KRk816etTaV4lhQyTdc/S/qoNFtth7FJdezOWgU1eBnBoZTKMqizrtGgVn9nPu2BFYvJJdkn\nFaHQNKYJhI6SBK4UHIJaA13PIxIfaICKJwzibIFJ7VPIJBWVlhAoioKeMYnr0q/87o5lhJFAVQ2M\nJBZiodDb20ugRbQnsR1ig8PV/cHzJ5+TFbJ+qJI1DXTDZHAwafHpBBiRwPdc3KQfdqyGxFWBbZv4\nfycl1dnkmFRdwqbI5oQ5k5nkSTdUGoF+pIUnIqKDpSLZHFscc+XFLNM1bnhKFoTd2qug7SpRCkL6\nIrlG0+wjXjd/s2ZeIenk3Syv3kA93INuJsFNL6ZaW8q6ynz0ghS64ZDNmrrgDrWd7g0yFvHUSQux\nazaV7VXUg7SRDovZbJax2Qy7PXe/INeb8MbMofWFu2hJAoTHHnuKLNQqb2PNVqnFVN9HbezvmigS\n2RACaiZHQ6h4iQHgEUEmja+qrN0olYvQs4ShwuatDlESN8jmFGzXB8Vkx/Y6bpKFFYU+tWoJEWkU\nk+yVcm0P/bVBOt7TzvWP3gLAvfet5ltzbqAS1EjrkucOEXn2cmD89YR2Yo24gUAEMU3j8xyiS0to\n1ppB4lM8slaaU8+QfsiO9umcGp1JpPiUStOSa530D/RQfL6Fc09s4cTr5gHw2POL2RC207fNZmCT\nXMQ9Z53OCe0TCP0GXrCv0X2AMN48KnsgZHIT0WMZYilt7cWc0EQjiHCSAHT6mDxmJGBPSJxkvTkT\nG7ya1ugVHpGikWuV9BvxIGq6mdh4nkhITSrCLIqqoWcN0klqYnB1lYHudbRm2/npLdI3eNtdT1Oz\nvRG61vVJa/eFrXUaDR/hxtiJ8MDUaG5r5ctfehK7Iqtxzpr8NYyfwdAWm8LR0gpa3NhGaaPDS5FK\nXa9zww3SKjcLbXzthu+zbd06didtVH0FJp9+CoGm8I0bpXD/8N9fxdPLnmLF6uVY7oFZyUhYTE9b\nxMRUHBuSjVIYlyfTYqFrgv510rIc6qtgVyrMnNLF9FlSkIdhTKPh4LkBuQyESSXs8dMm83zPDlzP\nY86FMijtODvJKYJ3KYfwVNLc33FdnF0u7wRCUUachxo+WTVkYEsvdmJAFKwQz6/RlJ/C+HFNAGwQ\nObxcM4Vpc3iuolD6vez6qO4Y4qwLL2f7/T61W+YDMOdon611h0bNRyQZPld3dbJpa4mFA+uJj5et\nBkw1IP3SEFz2PgDurMtxFNtmctzkiViEuBulPznrhXh2naHqAOnEelZCFzMUtMw5g7NvkAZA5Rt3\nYloqZzW3YJa2Em6SQrJgmtRfrOPFIbVAKpKyqXDVJRdy9bOPM/iwLOz53nofJ/L4hQLfGJIC6tb6\n6wPRQSyFXN3tY6hcoq80iKPJ4PmqxWVKtT58I+SU2dcA0KQX6Nu+iO33FOlfJ4P8v/55wB2OR36g\nQGbXOLzEz6+JKs+NTzOoe3hJG9XVtRA/iGixOph8oowtXXnVvxPiE7o38unPyf0W1epE5SqL+2Xa\nqJmW86QKCzVvUgkCMqoUsBEKtppjoxfT05AGWbOlYRk6XuBgbJNKSKgZVA1cXAYaPnEseb6veysi\niDD0MVjJe7YFNfovbuK7D97Ev/z7hQBcN3cl73nfFTx/by+5pICo69hzefYPuFLirya09ya5vWOM\nw9C1gHfPupArJ8ojfvGnBbItPhlF0NyUuDdUj5ashco4CjPk0bmtazr93/40+lFNNF10FtOnS0e+\nsWY1q59aTajr2MkRbskzyyirnWQzh6HocsJyOQvtyByVd0D3CaediVCk28JxKqzbprHxlDb8QD7T\n1nXsHTX8QEE7WGrJQDU5vFBgV2uJeKyNd1vSKD5SSKfHoqg/HgmAhGGIoqvEqkItacEproswFYVK\n33rSGdmv+Euf+QAPLVjCwJBUOrmidEeItduoBRWZ+pY0pYlVlXrd4VvfuoWmjLxmxSFxGBJaWexD\nZFl02prIbn0Lx0ycxKzZzViaFIbHjGvmp7efxLMrK/uy1jjMMti8Zi2NwB9JW7vvzjup1Crkm/MU\nm1775bn9yCUBRk1VcF2HrKkTJML0uedWcsLUdsRDzzD/cZkONm3KDAoatLWlUdV9se+YpqZx6JqO\nGu+mluTtn3lKF40v/5TqUInQk7ygCIN3aQovOXtYkfCcu9tDUd+ZW0xRBI4tBWRgVvC2rmXl8j7s\nIemaqadjVC0iytaIO2fJtThpJtMu+gAVT6e7bwB2JX3Lx1tsLK/GjTTqSa7mlqMt1KnNRIZOmPDC\nkBFx+jVXsPx2B61LGi9KHOJX9yvre5Pj/Hh0Pr9sDSccnyNK3Dj5rEbLhE4aG3SyWbmWZq3KOLtB\n0VJZ8oAUmt4jzxCpMdRs7KCK4SbBuFjFizwUoeAmDZ9cTaM914J58sn4BalE7dDnHt/g8z39PFZJ\nWgy/+Jq2wcDCPpmmu3Ddj9gW3EfdcfHrMuvonhVVyhsjLv+7cxFK8nGByKEWBdSqyygnrVNfVQwC\nRcPSPWLjaLKJkZfLZKnWyvQOvkBjvpxjTdfZVqthWwM0kkBkoSNDbIeETRq5UJ5c1EBHvDblL2Hw\nYjZDYVIR1xSIxNJ244iHu/txXAMj6VESOzZFEaNqKtV9rVW31Bh3dA6jcDixrjI4JJ//xDOD4IWo\nmkJsyntrJxjc9Pj3+fAXLhtpX3DeRW189+tz+cyHvou7XtKumIU/LGlM8HYCkYVUKrUwlUq9kEql\n1qdSqX9Jro9NpVILUqnUxlQq9XgqlRrzVs8axShGMYpR/Hl4O5Z2BHx2eHi4J5VKGcCqVCq1ALia\nd9B/5KAkHzJ2d9ExczKfvOV20t+RzWL8aIgm1aUxOIhiSIuo0NFKKBQM8/9SQGrYge6V5LQWfvm4\nh/KhItM/II/pk6+7nkXPDJA+pkBph9RyjVoZTZ0mYwVCWgN+w8HeGu5LeX1biIU6EsRqVLdjpidT\nrQzi7ZFWWKNSRa9CPmtybLMMRBZassxoSnOOVsQ2A+6edQIAbrgG/F2Ewc+J9gWflAihqzS1NBOF\n0oIM/YBiMY0hYkrVkqTDr/H3l3aN0FWpSAulXrdBV6W/dSQIGxNHYNseXlIR2d5UwDQyuKpBPSlq\nYrtHGIHYtYuBboHZLNfo9lt+Qd/aLYzJ5NmTpJjFImKs0JhYyBEnH3BwB0u0XTATY2KWMfqBKyLj\nfe1kI4EIXXIZnaNj6aO97/5V3H5HjUKxj2vmS//zhZuLXDo5Q7HrCFSRpAHWFLKF8aiKihXF8tMu\nwLSOJpoyaeZM76RgJW4Y9TD+WxUcXDDYrUk3UKM4Add/FXjkba35PijJRzaCRoO+5U/ilmzUfYE3\noaGrAsdu4MZjATj58k+zRkkzuHopSriT5onyTDexyeDpwRU0t59KRpefQDvp1E4iTcMfKqMluc6x\nH3L2KadRf/AB1icFYZqu4Fqv6ZWRNBDbrh1K4IWEy1YwNsnfH9BjHi0N4NkRTtLr4gzdwMsUWHf7\n3SM9fITrE1TLvJi1EJbGBEPur1goeIqCGsbsTKo0FwgTv+qQ1zJkEh//I07M9YvL1PyA314gLexH\nT8/ywdfM3cp1CwDYOFjGMQWWmaGeFABmM6fSNifHyedPYqcj8/iHynVW6hqIEi9u7UnoyRKqKpHv\nUC89zexZZwFgV3z6lj/HxqHlREnK4bsvmsXG5UtYu30He+syaKjnfoRDiCoi1KTnkKql0TL741qO\nLYPiFX88ijAIUPBVOXfTZrTTs7QXIUzec7Jct+qWBmDh24Io6djYFLnsONxlghjHySdM4s7+RwGY\n31+XfGKqeIF85k3XfI3/6r6Eql3GSAp+svksX/32dTz5u37+bs6HALj5nG8y/bzPcyC8neyRGlBL\n/ttLpVIbkF9evxg4PbntTmAxf0Rov6ok+dOGwxlHwbQlF5HOyeyAgZ7F9Pf9nmLzLIyk2k0rNlNo\n6QCRxxmQftXuJ79OxrNp1nL84tb5/DLJf/7nG75F80c+xepNHr1JDuYrMYTCAT8k8qVAiYlRzXdW\nEUngoSVCqmjBzPGCiy9oIvfR5GsjQqFeKeE0ypxdlBv6ovNbmDlrBoo+i1qpxC03S+W0YOFcCi0W\nH/7w+WiJzzGKIVbBOiJDsCcJcl0CuqLg4NLaJjdFrdGgXuoZIWv9C7JpUhRF6KaGqhmEie/eQMHS\ndSr+0EjFmRMrKLpF4EdYGSnMarXtNE04huPbW3nq9rsYMqW/dpsSEaoqZgiHHSezQV59aTc0bKaO\nPxotaZnm+AGGqhArgsg4MCv1DpUAyGcsDFVBEer+NqmxTt1+mSCqU0mE1H3zHuQB5eOkDQ2xL7ef\nQzCNmCBwCKNoZO503eC4qcezu+0Eon2K2d+DUA5Gz5joyWe8LEXDCd88Gn8ghEFIlMynWtDQsClY\nEdVq0qa3FiGa83RMO5sT3/dFOW/uBaxevBi3exGm20dnR5P8PR4z25o5smAiDOlGOnfKcWhCIb9q\nPdlkPYq6wUlmG+Lqj/P0Kuku0kyNw8YY/OD90qX00aTX832Dy0CDQ6KIpkxSIXq4RVWtYztVBvb5\nf8OQHRkdd6gGScMopWhRj0MWZi0sQ2NtYpQcG/ogFBQCSl7S4jjK0/PMozQRMXTdpwC4c/FK3CDk\nrvfmmfKJ6QBc0PR6pZ3oNibkTmdn9CyHajZRQVZUZtquIFC72eM/Q5yMJ2OadBwPL9xcZsW9a5NF\nsAijgMCZhL+rxp6GbKkQuiHe7vnUqmvREheD22IxPh2zec06UjtkCXjjgdW46CiuiRZK/lA0fUQZ\nA4hItrny3Szdz/XhnR5xzbWzAfjSjdcxo+kyxk/r5Npb5di//KkvMbCmRBjkeSUxHpxQwTwmg55t\nZnZHjjOrMoR4w10L8AKXgpUmMCQvn3rV+Ri5n9OoxiNVryJ00DTBeR/u4iOXy+rWu267kzfDO/Jp\np1KpJqAL+bG9I99J/5FjTk+qf/47S8t4qFR7Seekr3pdzz18+3u3ctWVH+e886TvKWpoeHGAoXlE\nSVloV0c7pSURZ580maF7n6ViS3/Wpf/303z0Axdy+48fJtgptWj/QIPY9tFyCm4S8Y+iCPUd9sL/\n8meu4ezLpcZb2X01T33ySv7t/HPpak98ZLGgWi3h+o0RIZPLZsnlLFQjjR552HXpB/3HK2cx+4LZ\n+JFPnEx9EAXEqkDVNfw9SZGGH6BoCsISkFTVuf5eNPWQEboyR8jjgggjao0afhjjJb1PTM3AcT1U\nw0DbZy1qOsIcixLu4YgxcpNXh7ZR37qDF16Jse0QP2l56pkqWiZNEMWMTQQCiuCFlT1QdznvNLn5\n2rum4Xkx9S0Vth4uN9/Rb5i/VQMySHacMoliLk210sBL4gFhpODsCans2Mq2pINcdZtDxtTwGjsw\n04cBEIcvoSvjiQhx3IAgfBcA5doqtta3U/EFTa3yzUJXadjb2BxuxVelMKzWG3j+m8XiDwxd1QgT\n36gTqphRQHM+i5FLguJdF3PxNR+j5eQifT+TXx7fuqqbXO8ycjv7OG1yE9vXLQdg6pmnMr3rdISi\nMvYwyZ+3tXTQXCjgX/MJcln5+4ypcvkVOmnL5N7fyg/O6prgnv/an053TiAty2fSOfyWIl4U4CfK\nZc3GPgJVYOXSdAZyO55sGjTKFZanFa760sflvCuw+rcPEFTqNJsWuFIhqCJGKILYdak6cj3iSMGL\nXGwv4IF1MsV2wHe4ZkaRn1zYRouVCE3v9UqxpTAbgLrbwOt7gOkzmxGZJPspamHp0K84KGtzaCQ/\nA7atBnMu6ORbM+7jyUdlhk0QCGLfR6l5xOFenCSVTiOkVlrFYF83hpD+79Kxx6G1aSimYFfywdwb\nVvfhYaBpLahJIUtsKETsL1bKZaQi61m8hZ/3foiWthy33SVTE7NNgoYf0nbaZD7ZIYX2FR/8DJ+6\n4mvUGgFRKA23mhtSiXxucR06mo7G08Ym85kmDm3+X/bePUCOqsz7/8zxUCmKomiKpmmaSTMZxmEI\nYQghhBBihKjcRQFFUNEVBVR00XVdb68rvPwQFVm8rqvoorKId0XuFwkghBhiGMI4hGGYDJOh6XSa\nTlMURVEcz7x/nNM9E0gwWV333feX5w9IKt1dp06dy3O+z/N8v7sGAW84baF5b08dTKqK5HIholU0\nJTQ4KQkV/unLJjB7/xNbp13Y5kXbQiM/By6wHvdL+Ua2yj9y4YUX8vRHTCR9t8X7ot+5F2m9yrA9\nQi1fdRV9fZDUx/july40D+zl6e5fSP/8RYR2l8r39uOH5xP33sbiRV/lno9+AoC3fPpu3vWJj/Da\n8z7MrfeYXbprl24ulwUyd1fS1PBaxI0J3HT7skfOfeebOf1sM4EaZxxDodyJAnRL1cTx6CnMRcup\nzlRKmerEJCGKGhxjCXlCv0CjNo6WbpsrWguN0kYTsMXfHB+xiUztj3RFG/KoDNe5Z9lP+LK9h7Ye\ntNh1VzwFOpkKtgkEpIpi0I1jc0kdx6GZvUCK4I+Pm4W0nsTUkohKo07mOsSW61mkmjBVCDTScihn\nScJYrEiFomAHm3QCAhFCFMMme4Sf2lcAGLUpWQ+NraMUNUnj5/Fdk7N6Z6wZH6vyWGWcO4fMRD1p\nQRfdjiSqN8jse0uSlHwgqFVj7r3rD8SWHvXXt97Jx1bGNBJIx8zJw3Ulpc59GRl+DFeaxT3Tf2Lt\n42Pb/tIB6eXQlrg+UQW65r6R1535QXpOMZPXbW7Cu+tSmm4V9ylz9H1DmuOUU4/ltQeXGc17XGkz\nf9569GtZsvTN5MKQT3/Q+jZZQqlYpFAskLeZBYXQY3hkmK6ubjr7WoHIlLQ+pb9Ytkf3YkWTL3Sj\n/BDLj4TnadTePvo5hR63HPJphtes0yUlyXKj3jL2+CjOmmHyaUTguMy0ikmFzGTEpM06TbsRBH6e\nOUccwfDddzPRNAvzCbMD/vF1ZeZ15RD2FCq8zVNpB1eZRbJnTp4DunqZ7+foWmCeSalXUfe/x9rK\nOK40J8lGtQZ9Hl0HhpQuNr/ZW+6FhuCWq4bQ+Mw54EAAausfwC3UmdeVR1plpY3yYY7sP5wzln6G\nPWwK5QlDo+S9Lvy+Ap5vTthKKzI1FcoLrBDBirtWM5au4LxzTuaJF8x9zjv/m5xyhMf8pT2s+4MZ\nCxd85RN89ZNX8rUrfsbDfzTvws0CnvE01aJ53uGVT9g+8Sl4Dt0HdvHta75m2vSRRdSbDaPJSktZ\nSRF4ksG7NjCwzNAkiM2Ibje3bVq0Ozo6JGbBvnpycrJVsr7N/CMXXngh39lgvA7P4pQ7bIftsB22\nw6Zs9qLdKc823OoSj+v+dctMf9vqaf87MDQ5OfnVade2i38kvsNABKnr0PdMH6VflMESwyxdchZu\nVuOuG/7AwDLDFNZspmSOT99hi+nsNEffICxw+tkfpNxdIr93mXHZ2ie6Wfj2r3DMez/E+FEGtlh0\naBebZEpWyWjUza7vJHXeMH/7qFnDQoGiZQ0r5F1wHZSerh8oUFqhEoWylVRCSlKUUYMRkqLVmEwz\nRaYcUAKNTe2RAjJB5npt5RjSGKEyAuXgZeYVFZoOek3DkAYAqiXFISTCmYEvJcp63yrLEI7GkVP4\nsZQ7Wd3CndsefclzyLLMqJfkIUun0hDTNEWpjHrV9LFWCu04pJ7LY89aMvyJCXpcSZDzcMWWU+oa\niVWNrzZYOzJG3g8pBOb4OPFkjUeGRxmtTxBauar9ukr8aSxlaGiCc843Xq1AcPwJlzI+PsajR36V\nQxeZwO6vbridutdJpn0iy+XiugLpzsDxAu697wFzbdeQ+1at2vaXDmg3R/9Ck/t91se/weyj+2j8\n+gpWPGfgosb1vyRXr9DjCBa5BuY79PDZ9Pcu5kMnO+RdePhmk2L33f/9SYJSkXyxQN2OxaGhAU49\neTGu57e9VZU1obtIobNEzgbua5UK48ODFPoNdjzvaHOv8Z8NUhlYQ+CAH5g2LfAC5CaIKuPMUuZ9\n6LiBdGKe15LkF78BoBglFOsRfj5glzTBs2yKFZ2hswiNpqHMPCnvvS99vbNZ02gy2zJOnjMv4J97\nXHwVtU98Wm+eiBZZeCVp+pTDQ4nqiqpjro2M/cTMlewgwrwJ8J15+lvo9jXzF1foXWjauU8YsfTA\nN7HmLVXGR6qElqGzvsmj/7Auzv/MkfihhRFUwKxre3h42TjFw0xFZe6oEl4zgMxF2OdBKOJ4CnrI\n7PjcVIs5KJC8/33Hc8VPDFvmHTf8HlcWOPw1PZz6z2YBzXWXuPyHn+PeewdZOWCrt7VLqeAxx9+X\nDYUcBJkAACAASURBVOvHOefDJh/+qgXL0WnC8XNnsuiQo8y48epopRCJwLOxGURKamMoqZ2DrWrc\nLdm2yI0dCbwDeLijo+NBDAzyacxi/dOOjo6zgSeA01/pd56MbfZIknJ881VMjEX02JewcPG53PCj\nz3Pn7+5heKVhulu68BAmKuu4/us3sltgRE03NBIu//cvML/kMLbxENyZ5mg1fO/t9L/+VP7xa5/n\nf73TrGqnnfhFsjhCOC5J3tx7sumyX6PJ9lip3IO2sEM9itFRRBTF1OxiFicxUZSQporEsr0lSUy9\nXqdeq5AqRamnbH+ri65SHznfJ7MTFZEiSSmVdmNY/QqAZqOKUjECH2Wx3s5SwJLFU+XMe9o83GeS\ntL2JpM/bRVdIHF6F57qIV5nJ5Lq2okyLNk2psGK9QghmSMkLLcHd51KyLCOJn0FZnFwrRRAGBEGA\nMKgDExMVsiyhUAzJsi2HNJ54yvR3baMmv5tH4AoSGyB8eGjIiJsKxdy5ZrMt7eVw3R9Ws6a+ln0/\nZjb6uJnx3l+tIch5fL9R548F05+rhEOlUkXIsK2mUyrlGa9uJEvTNrzx6BPjpFvQeHwlm7toKe/9\n2GWmP3tXcumye1k7UacUmaPvu4dSTl1yBEcfMpdi0QrWRkN0evM46agFhI6mZBc5zxHguQhHkrNj\nKfS6qY2uNpNUtSTIGqROnt/cdgefv/gS00fPbOLT//gR7njL2QDMXmwyiBo3DxC7Efm8T9IYA0BW\nwRuI6Wo2wNYmPOskCJGRuG671F+nMZqMZlPxTKJxbQm6djSO1Cjp8RXbd34hJKnViNes5q295pT8\nhSN7KLsptTQFy/GuxOZLSVefGQ+FUg43dxTjzQlWjxs4dN3wA/hOL05uBvHY9QAM3nsrSV0iFu3B\n4Qebef373w+RpEspH9DF4Jome1jpu9n7Hcar1NXkXBieuN/2p0/98YTf/PhXPHCfic187uMxpc5e\nEt0gtrh9mgqcadk4YdDi3obu+XnOOv94jjnD0lG8uBelzhoi7CWwGTpZppBexqITelnyZlOgU8wF\neKFPppcQ+D4HHmNqKHqbKeO1CsX5XTz2tFmDGkLg4RHHCVl7+U0RSUrO9WhlYafJllkzYduyR+4D\nXrWVf95m/pGabzqi0WiSBnMYGLqau6xcT7FYYvmd91Gv/Kjd2JHhR5g36xDUgynKTvKdsybnDtxH\ntqbB2Dm/oBSZXfqWX/+U4Ykmr186l7871wD59wTzSRoT6DRrA/5/UhlKx2yP/d37/p7M+zwA69at\noTr+KDvP2J3ddzeY0/r1a8hUBz2zD6G7z7QncGZQG3mCG276JeufeoojDjGqrI7n0Vl6NUcffSQL\nrQjC0tcvoCcQlHIeaqaZ/PgOSfYCjitxAvMS+4/an9yZFfilbX97DRL4vo8QAs+z3pVSCKWRQrZL\nf/e0AUWjtD61gDlSghBopdhYb3ktmmBXn9yeXlugVakM9YJijz3C9s2fjWMcB+O9JFveDHd2DV7q\n7gS5IEexuA+3XW/4VKQjmdnViQyhkDdtlzpmdHScu4f+yL1dBgOdu08Xnucy9Me1PFppMmjLtuMM\nZOATx5r0ebvw1Op4vkOjUSe/l80U2jWHkh7cNvqK73q6veFNb6XYayrrbrz3j8wO9+Xgkub0RQaP\nP9TVLHvPh+hfkuMDbzNVp1+94lL27XS55Y6fcsKShTSsxJWHQlscvkWvW49qDA0PEubypInBipuN\nCqNNwXmf+SKnrDDZIxd99lN89IIPttvVbdXcT+kvcevaiGYGkaUlaCYR6JSxJOV6W8a+3FXkUgUy\nRasWTYJRH0+1IA8UWiyFOiVRkqaUPGV3wYnGBAOr7iMd+wRvP9yMofn9ZbJmHddz8LvMAuu8RG7O\nLZgTxdyFvawZbnD5V64hVzb9r3I5Vg0N4HbGJK7xYG9epljx85SPf3ghecf0kar7KF3l4MUZd97o\nk9lill38iIX9eRoPTLD8NhML6c3l+NWyAUi6KDXNWPr6xz/Gm857O7P730FOmrkVpxk5W6QD4Np2\n60zwmmN72fSqGpk0m8Phh8znD797nNR5NQ5mw0pUTJMUrTU5uzFqUqI0RjkBMpP0WJ4ROXQMqePR\ndUg3N97UtN83uLqhoTCbi1KKMBfguj7S0khn6V8hEPmXmlxsBlGhVmCiNs6/XHSO5bU0wYDqWd9C\nVxNKOds5ccbwI4/gJIrYkvPPnbM/hZyHm8QUCzPxbPCjNjpMEqX8+MufYuThGQCUdIN1Q2vZGRen\npdWXxuzH1jtjS/aJz11O10KThnPHiirfu+yzvPaII+jrNQv07+9/kFRl5Hu6iK2C9poV93PJ+W8D\nqalHTW65zZQuL7vvd9xw022879z3MmtfM9ivufYH/NPpJ+FrycJ5JisjdhyE3BWlNUkLRnEzgq5c\nu13P2DzaVBs+3p1dtw3ZaK1xtCDw/fZCnmWZyZ5xpiaXEMKohwtBnCbMsHmjjnaQjmCG4+LYgKkU\n5jfCMGxvGFGjget6oHWbQ/ilttFSyEoU5XyRJH2BjTag9YbjT+DGZTeRKwQcOMsssCfM3ZuPVuso\nNyQ3yyqliBQkFPcvsutojrpVBhFhgOf6KClxbP6y5znU6hMEYR4sNPTYujFqza17LluyJQsWk8Qm\nQ2emLyiUJG4q6bSbYBi4KKX40X98n+W3fReAQugyOLKSC85/N/fc+3N8O/GKjovSKbHlWwYYGnmY\nkfERHMeh9pTJ507ijOtWDtPT08Ojj5hS7s9eehHf/OrlnGjblbcZU//76Plc7DS4cvkaVlpBaddx\nWCcF1ylFqz6xHAsOVgHJlI4OAJnQKJ0xV0ocS7qupaApHCqZYqwFlalniZ+tkftOzOJ5ZrHLfE2i\n8+QCDzHDYLDozfv3wbUmPa9/PqwcGOTe2++iq2jpXoOMSiNl3rEh2jpUhx9eZJ/78yRnu4xZoYkD\nxWIWL83R/ECVW5dmPBObRO/95o+x6BSf33z2evyHrbjuHhmqMUHn7r3oQTM+hpLb+dWKxzi0di39\nXYbTWDkQJSHwEQDqddPu2saMwm1lHn0sJhoxlZunLA1pfKhOpbEet8fcJ1Ix+TCP47ptseAkVqRp\nHRlAFHkEnnE2RC7E8SvsXSjyvqp5x80Ucr5EawuNAn7gkyrN8ES1vaalydbXqe2TqN5hO2yH7bAd\n9t9qfztP+2BzNCps6mL9mmXsEoV0+sYfyNSz5FxAu3jWK3YlpGlKlqZIOVUcMzg4xNL5c3nzmecy\n1m12o7G1DxKRce/1l9NcaY5l/X37MzMsUMhisB7wLq5DXm3fPvWT624neNxAIfXKAHNLr2be3EXt\nAF+Ym0msGpz0tuOZNc/gsvWBYVZ8/6fkSyG1qEnrlo89+gf22udVbNw0gxtvNEoWPd0LGFgxzF0/\nuxlpWah+OzDI+Ze9nyVHHUpiJbNkzgdvKl1xJ3t/jUBpzfNp2oY9VJbhSRe0JrPH5CzNkI7Ak057\nhwdNlik0Ju3Qs/ietJ6XlALfaQUyJVorXNclZ09DXeUynitxHIFiy57srFldADz1xAhRs4lOmhw0\nx5xS8vldKOQ9DjiwTNGqtu8RBDC2nrFGk4rlYpk9u49Gs0apXEK4Di1B9MD1wffRQraLORxH4HlG\nsCCyUFumNfnC1JF4W8wVimbDSmE11nL9mm+RbGhQOPEkAPzXHUZt9iBXXHIBt9/6MwBWrriHq39w\nNVdccTG3/ezbzLZSXKIZ0YwbxElMvWFOGRvqXyJKnwM00fetak8j4srfj3LP+Z/j33/0HQAOOWIB\nH/37KV6PiZppU3WiBkmVci5l9bhVeM9MNWzeFbgtf0xLJhzBRgRxaywohQZygO9qKhaPrmmP9Spj\nVCsq9kRReXCQ5tAB9HWGFK30nI5ThPTRCqINGwEIw902A1F3thJsa0fXsKG6nhWHLOX6H5gkg1oy\nwauX5PACgWvTecllDEYVoqqPZ1MQ5x92IHduup+RGQ5iqamHAHhtWTJr9u48MG8BStr8cC8jKULo\nx9TrRiGnXNaoZoNHx24gTY0KjBMWKAQHt9s5PmHGyE23rCSJNM1mhrIqStKTjFcm+P3q1ZxiT8GN\n5BmajZ0RQpCzXD+lXIj0MkRTUW043HOnWeu6nDzSF2RZSs2yTMaZxHMK5PPF9slYCMHQ8DCVWpNS\nqcuMuVfwtLclEDkDuAfDSi2Bn09OTl7U0dGxO/ATTD3FGHD65OTkM1v7Hc+W6u62r8sDP5vg2lvH\nCCxbW9csl6PndVPM5XCkWSDT9Hmq1TpBLiSxi5Hrhpz8zvNIGw0C36c+bLDR+KkRpIopj6YUglZJ\ncMSc3oOoVR6hFpkXmwtCpNg+atbAl9z46x8DMLF+gIvOOZMkjqnaQKQQglzgkdSfYv2QKeZYc9/9\nfPiNZzJaqbB+w3r23sfoF5a7eyh0BqxYsYo5fSYbINc5h8+ffwGPf+NSsmfNAPrGwBrOeveTHH/q\nNynvk7ffLRPmc5zxadMu3/bnn9KMZrOJQLQ3Eq0UURqRxHEbHtnZdRFIkiRuV4TJFg4pBQqNsgEp\nIUE6Dq6ULYQBx5E4jmeXAvM+CoUCge+BUMitVC3Nm2vesZyXY2K8RtENIDTPOTrxO047cQ5CKYQ2\nENijqx/Gcz1++tBapK2WSyJwHJ9G3MQNFMK1lZ9BSK5zX8ZHn2i3SakU35U0oojI5uTvPbOI2s6i\nqlvu+DkboxbXskBvrBGPPcnoTNOft/72Wpycz1vPOourv2cWhE2bNvHlL38ZtOCTn7mE1xxoAscy\n0zQ31Wk0m21Jt3+NEp5JUgQG0gFoRg1Wrp1g7br1rPmeSdR6csMB/PCHU2RcYxZuWvtYhbFaFYVC\ntp49zSBT+EK1ab8baBLXcMm7duzrNEFrTaIUD2Yxkd0F6xrqDkSuj2sLUB5dtYa7rlG8+ZSedmGa\n1AIn5yOUbmP12Us62AvMRnPzbb9jYFAx8XhA+muzGGfA0jcsonNJnYm6dUS05kUl2M3N03+oiX/5\n+adJNkU0niyhL8o49ETDoLTo8FdTXOpz2GF9/OIyQ0xVXljmDYcdzeOPL0MnBkbZWZTY9GKema8T\neM6IvU2dRuWhdjuHrUB3NP4Acw7so9aIcKz8mxu4xGnG6oeHiS6zhVZxQmViPa7roopmvrvaw40F\nDg533THABz9sKmT/9dIr6ewq8nw1nQpBaajXGiRxTN7GmYIghxQuvh/g2MBuS691S7YtgcgXOjo6\njp6cnEw6OjpeBdzX0dFxM3Aa28E9Ih40k1fs5zG7p0R17wkmHrIMX6M1HlQNTjzmKIQwmHS93mBj\nNWL/4n6ceppJTCnOPpR6vsyCRQsYXH4blXUmu2DuAf2MT9QoPJmRJGYxTbPnqCdPo5xgilEvqpF3\nt+9wURke4DNnnw/A8oE8cuxRrr56fSv1wmCUQvHx938G3/sYAO/JcsR+iSejOnfcO8hnLza8uXFT\n8cCDy9BqJYsWmAk9MjTEN6+4nF+PD7PevqgGmtu/tZzF315NYb5Z4DzfwQkCwODjssURrhVSCqQQ\nOE4rWd9psZ+yUwvnFopMgyPFZtimxhZkubL1SCitUSolSjWBLc7xnMBUCSrVxq+bUQ0pQ3zfQ2xl\nKDXrBssrlHIUinn6O1/N2IiZNKXOkHzoIpKU2C6QSSzw8kVmHziHymOmP4YeGia/b4AXuuRyLsLi\n/BMTEyTCIYqbiKyVESNwHIHvuzjWE3Ich3p9+8rYH183wuCaBwEYXTNEJjRpI2L4JrOB9/YWyRUL\njIwOcfAhJqj8ox9eTV9vL6ujJr++6R5WLDeBdkdIkiQliZP25E0yiNOUXJBj3jybFeSGJFS56j9+\nxDW/Me/5mRde5Mmnptpee8qcPqJGDdIMLxPca19cJB00DolWbZXxhk7RaYpIkrZnp+zpKhMCtGqz\nBGaOxEPiKMhZjNprxvR6GfNKLtpSATi5Ini7QZbieaaaSu68ecrnxqp5d8vXjtGsuigVs0+vefpG\nFfy8DyqFxLJQBoq3nraYOT3Hs/8/Wt1GNUBjuI8nj3YoDnVz8KDh7H6n9wQ33lll+FspjbttOu9s\nj4nmRnZ/bSeybONnUZ5DvMPQeZdKxVBhBFqSVYYxCXFQa5gCru4DXfYoShrxc4SueaZMm1jKLbct\nY2jIZA0FIXhuiOM4aN3abGPG10Rc9x8/I7nk6ww+YoqYPnfZZ7n++l9BHep1w1nuunsiHMnY6Hoi\nG2cpd0FXuYswjdupkyLvszXbphVscnKydfadYb8zyXZyj8gBs8s+dz/skXg4xyzCPcp0TrOynvc1\nxuhYPTDFyZHB7mGBTiegOmECA4OPXINSmsrhc7nj+tvois1xPm5ENDZOIDKPNDUPPTFRIU1ThHLQ\nmRnohdwo/Qe8iaFteWhr8/rnceLRJl9Xo3B3UswQou2taqXxcwXwcsyfb7znz12+C4uWnkE5103U\niLj+ZqMxOffwo2lqiRPm+fENhgt8bKjK266+kQce2I/9ZplUoYN8n3wxZGTgHh79vTnqDa1dQzOb\n8mbS2EyKGY5HIDyazWZbXDefz7NbECCEaMMjSrWgjalgZis4CUbDsAWbOELiuAEqzVAWYqhWa2iq\nSCHxLTmUJzWuI9BaEQRbHmT5vPG4pNAEwW44rke5bPhMQpni6AzP01jRdpL6s6SZolicSbclKBIS\nEh0TqU10v6bE6JjZCMJyFzK3B56zE42aOabHUUQcZxSLRZp2UjgICv72nbDqtRcoWG+x5o1TTyM0\n0LAB4DUDTbQ7SpSlbFhv2vO9b3+b4eFhGo2ISi2mYoNcZmN00Vq3c/EzFBpBlCRUV662zylQWcal\nX7ycJccZutcbb1/WFqYG0BYu86XGQ7OHcOizEINIJb6S+Klmo62QbihNKsAMHeuR2wwiicBxXKTd\nxiWaAE1OphwQmPH9nsWz+c7b5lMUUXt8SG8X2NmDFwV02FObJ5mOkLU4wpfjkKYNSgcUKFvit3nO\nfJx5AhXnCB0DKR4wt0R37o0c7fdy10qjzvNM9WHUaJHdg/0J5rs8fKMhghr2FOX9fRJdJ+d1AdAZ\ndfPk7ePExSrdp5j3tvecEn53AKMhXs56tTTJy6ky9pY4xhHnfZdSJ9TqVcar5j6V6loWvXYxT1ab\n/HHQZLkcvng+WkuSJENjyKYilXLlFfdwVvOLOI+mfOuXhsgu+0KOn517Lbvlc+0N03N9isUijiMR\nFpZSmUY7Cl9qU80MpNnWs9y2CeDt6OgQNke7Ctw+OTn5AC/hHgFekXtkh+2wHbbDdthfbtvqaWvg\nkI6OjgD4VUdHx4G8nGtkq9wjAHsq4+nsVFUESArBLqANiU/P7H2IswJj45U2AN/Xsx9aeqwZfJTh\nX5idrzSQo3/Oq7nnuit5eMUY457B+kZudvEmTFJT06aDJTFsqKbs4tTon22OUMcdkaO8yOfHy7fl\nqY2NDI1wwYcN7CFzHkHg4DqyjR8rrXBwSOKMq79taBXT5p2MNBNGHh2h+GGfVYOGna845zAIcgg/\n31ah/qeLv4jX28+RPQvI2YBr3tuVqPkUt6//CXuXrHqLThkYrbbb1cLD6klKGqc2nc94EDMcB611\nm0sEIJfL4bomPCVs2x37OaUysky3Ca9a14PAR1hMXClFlmVowG0FMvVUBddUcHNz831Ds+64oDJJ\ns5mQ2Ha5oUcQBDQr6ynkzdG3Hr1IFKcoBAWr2u4FPmOViFywJzJ+pi3dlApoNjagFPi2nc1Gw1R+\nIgit8nmWpYjtLK5ZuXw1mS3IyNKYSKeoTCFs5WeqFVmiyTT89OdG0WXtyDCXXX4Zhx9+JFo4qBZG\nJSRSeASB335H0hN4noPjurg2zUsKH1cKXEfQjIzbuuCIxe1YA9Au4HIdyPkuOpHMcs2Yf0RnCC0I\npKRkUzVrmSaVkEjRhmZeyDJTNathZ+kQ2LS7nMzodjX9BY9j+824+7e3LmBeHnSUoexvZq6H6weQ\nOm0OHnx3M087sSRv94xrQjdPV1+B1EIhbm43EM8yMVLniAUmwLegZwFJLcfKe+9l3YA5Ne2eO5RN\n0UaeTTYwuGycge+YG/QMHkxwbBfz37EHzdvt/WMX6WTkRlJedaNlCj1sX2orK7j1mDk9BoKas3A+\nwfIpGZSnY+Npj5+yiFxesc4dJ7O5fO6cbj76mQ8w9sQAlc8YaGhsvEZnZwnP8xkeNjj5s9Uq/3LP\n/Ujf44JPvYdj1ZEAXHHJr0g+G5FGGTIzqIKKc6RxQrFQanvfjiPxXOO9typmG7Wpuf5S2y6Ad3Jy\nMuro6LgLOI7t5B5x6uYBdzqoiN8VkqTPoW3nRHUT2PPcgE6rKSikS2ViI39KNELY6HiSMLj2EYRK\nmOFlrG+YY+UsrxsvlUiP9iLyTDzBzkVNf7+gu2AWCT/I6Oz2jLb8NlohHzA8YY7EV11zJXPmzOKg\nA/vak2fdujFoNpmhnmfB0aa8elF3iZU3rGZjNeL7Bx9Cfn+TbD8j10m90WTevMUMrFoBwNpHn+DQ\n+TWE1lQj8zy4AYkaJwj3IrAvNh4egmmixO9965nb/hD/jTY4ZDZcIVKaDUVvoZuxdQZfLO7fSV95\nH5JGROIZRr+xODIsbBpasjkjI48QpQk6EoRCtUv4a/U6440GQrp0WyWfUqmElBKhIY1a8lQCx98+\n+oKoOULT4uBtygCtscy/OEIghDnifvFyUzn5gfPO4bwPfIDOrk6CIGiTdXmeTxAE7F0o4u5iRYCF\nychxnJ2QjoGWXD+PyixMZb+rxdSYBvjRd7ZeIDRzq//ySralrJ8E7jTPvvIrA1v49z//M7v8/Meb\nX9gai4CB/Vn+71OXZk3757ayykHTv/TzqT++YetNSqf9v8Vi0Pr/V+wWNsMWdT2tfRxHUKs2KHca\nZ3AXz0eXwT//zWxaYJysXYKAKNGkOmWDXdOqE5Dv3pU3XXAgS86cTb3LwCbd5RH6v9THtT8e5qw5\nRi0qX0zIggzlZGRZq2RdEzsOD9+3gZEHDPb9XPJx4PNbfK5tyR7JAy9OTk4+09HRsbPtpi+wHdwj\nF154IeddZwZbqbOEcnZBJc+0B+P6sQlUlHBIfyeeZbdKm0+j4yZCm7Q2gJ2TDDfwCRwPz0vw7WIu\nfZ9cMAvHkShM8Gjf2QnzulO6Xi3ptOXEuU4HN7d9kzfwFFFzDIDLLvsUOjmOk5aEbWGEs97YjYvk\ne0d/h9v7uwA4ZvF8xpavYGD0UfxwhGN7TaBqaKjKO950Bqef+UbedJwpJnXZlaTWJI6b6BYDYS7F\nCQKOft3rGVxueBiQDmFh+3DZ/xvsbR/4/n93E/5T1t3diSpbZjiljGcqJY7FIU0lnSAfhpQKZnw9\n+NBDzO7vZ6+99kRKZ1pgWOJ5LlqbdEx7EUdKXM9t0/QK1zenlyxDtHBswWYVrDvsr2t+YIIpeyDx\ng4AsLBHYzCziiGejBjvjsYtNQ8zn9iBTLyKlZFbZrAFBeXcmPvwkT8d19u4p4NrT1JHHHsLvPruM\nUk9I0xZaZXWBdvZg7fAoUdMk22VZSpAL0XIGO895EYAXmv8I9/0nF22MzssPOjo6zCiFn0xOTt7U\n0dGxgu3gHvncRYZ34KZ3LOLM188h0BkVS5AfN2P26ynRHe5MEhnIRPqQ7/TR1ZQ9bETVlxpXZaRp\nQsEvs7ew5D21GmnWpFh06LZQSHEfSbGQI9cTEbQyRpp5qtXt0wqsN+rtyrorvv5vqHgjTmKOymD0\nGB3XJ1fYg8uONmIHlbEbGGmkiFyO6666neHLTejz9UvfxPnHHU+8KUd4jk2/Sp6j3mgiHbedz91Q\nCjdLWbLwdTSrhgz9rZ0Fvv7t721X23fYf95e/4Zj2S2Yyj7Jssx48O2yZ2UU1j2X0FaSSimI4hiV\nKRPgte/TcVyQ0mT5tDx1x8HzPISc4oJBSpTWuHrKsUimZX3ssL++baiYU/RuuRxZYtJYHccknDeb\nTyKkyzONBF0xnnbgP85epQzX60BaAd8wzOG6+zEr2kgumOJodnIzOMyfTam0F3kbac+yhEy51OvP\nMzo2Zq6likJR4PjPkwstF7e7ubr9dNuWlL+HgXlbuN5gO7hHdtgO22E7bIf95dYxOfmK8cO//AYd\nHZP/1ffYYTtsh+2w/9eso6ODycnJjpde33Hu2mE7bIftsP9B9jfjHmnhcs995C5EmtECyMEEWpI4\nwQ/8dqWhUglO0mBW3qUQWuarekzk56m3NN5sgMYEahxeugcppV4WxNFaM7mFwM7k144CYN0fTCFM\ns9bEDQogJW841oSoX3/MMaA1K1cs5xffMuWzd91+O5kE6bkEexiMvas0k85ymc5ymVn7dVMu7wdA\nvthNqVQmLObJ2ZS9XFjA8UMUU5WKuvVWMt0WNpCO5EN/95b28/z9J88FINyrwL7lfVFK8WTlSQB2\nEpqcvwvN2kZCG0Dx/Z0ICi6Bl6Np07GazZhm9DRCCoqFvQls4U2aJsRxTBCEDK81Ml4PPTSE4wYI\nx2unrSWJ+dzo6CgDK1a+rJ+B/+vx2B3t/Ova1tr5cO+n29liINopmFKYsTjxx3tYfce1PHr/DaQT\nhuGwlPPJNMRJhtaatC0VqMiU4ayfM9sE+a+44puceeY7cF23ndnVmv9aZ+15BCC04pY77txiO/8n\n2PZoRApM4s7E5OTkydvLPdIyLQzNp4OgJXYhhSQVhhBqhq22m0Swk6PJZSlNS9VYXbWavpPOIEGS\nKtoKLFqAUAKtp5btKUok2ot0ewF/2Zo9deHYtxoKx8ULl9Dd00csPN71yS+3v99sNjh57lF8+1Yj\n1nDHDXcyvu5xnhgZ4d57zEBY/rvf4goIfY+Lzq3juUbUM7d7N26wO7lSgdDmGnf1zmbf/Q6l3NVN\nsWxyY0vlTsJiCScwFJBAOyLdMscSQWgyavUn8bxd2SNvEqSi6GmEKyiW98a3ud+oGF8qOos5EdA5\nMQAAIABJREFUGlUTGJX6BcJwNxQYUWCb1p3Ph6aiTSuKJRMQeeQRQZKmOMh2PyZJ0q60/H/Njvtl\nFZVNpdoJYfKcWxVrrbku/sLEjj+bGaLZLE/7f6q5AlQ7v18gJbgi4bafGIqH2z/7BeojF6ObMbO7\nzdw4+KBDcXMhnudQqUyQ2swbR0ripIHKXuDxdSbn+pwPvJcLPnYBZ515FuVyF2By96UArRWprYsY\nHRvnkYcf+Rs++V/ftmebuQA2qwD/JIZ7ZH/gTgz3yA7bYTtsh+2w/0LbVmHfTuAE4BLgH+zl7eIe\nadnKO+/FEYrVrsMa36TfrXEkVc9HqoynI1tp6LrkHMEuaUZq2d6CuUcxWo/YKCQzHB9t3Zw/aUUH\ntkqxxTamjV80yVQlWOv/pvJvirpSo9uskpk0UMza8Qr5Upkg9Gg2TXWS7/uouEEtqjN7jkmo+YcF\nR7HyvAs4Z3wM36YWrl6zEt8L6CqW+PE13+LiTxnCqRcG72AnKfiTEMywXB1f832ctyzB831cm2KW\n3ytHuffVlHoWMmuWgVZ6rehCyzLLReoojzDcm87OzjaPQpxuIsjnCL2gzcwWNTKk0IyPjaAyQ5Lj\nebuQiBaDn4NrCzqiuInKElQm2jJeQeCTNtLNjplGR1L9jzxi/jlzHYGaPj1aNJrT/i5os/6y+T++\ngunpf9QwTVtRT/vHlkdvlIb+Zijmf5k50mmfGHwcnKzBb35wCddeYq41qzeTpuDiIaSB6Xpn91Pq\nKeG7kvGJAvWaKfgpFAtkWUroBS2ElLFajXqjwlVXfZPQ8oy4UtLX28Pue+zGHx83kMuq1Wv59x//\nhgvOO+tv+fh/VdvW0XAF8HGmFSjxEu6Rjo6ObeIeufK+1aBNzqvXKlRA4M7w2EVoLEslmwQcVO5k\nfU8nB9uCmGK+QKPZZC/lsGlinEZsciyzNMXxfHw/aA98x3WJmhECQb3FghbHZGmK63mElhNaChcN\n7IqBMF5/3AkArFh+DyMja+gslQnsZ31HU/AljWaMzlosf1AudxNHjTbJy6JjjiHMdVHMd9H3zZ/x\nwUvMEfBjZ52Ck2p8x0O3xFwbMTJLaEqBsu38Iwp9803g5Pnw6ccAEAQB9w4PtvtxaqFUOK5E6Qxh\nN7GwsDtxGuN77pSIb1cZ10lZtfJBgsBAHtLxEDoDR+B4ksTqQdaqT+FLB8/bra36M3PfInFaJYpV\nm+7VdV2iKKJktT7/nHXOnsvi/jJuauCu0ElYsmA+YWE2KyfM83zyC99nw+Pj7L1PN3//qNmonNP+\nmYlvncyl7lUsu/lWGnlT/ahUFReXkcFRDjvE+A/jj4+xsbmepUuP4t/+zQi0HnfSCYS5kGKxyMiI\nrWKrVpkzZ85W2+pKgWoxrr3CprSZNOI2LNriJX/bDBzRgpdidxqxBTjvf55J4VKwT5811/DTKy/h\nmks/TqNiiJi0UggvwA1CgoKhPD36dUfT3deFIxyU0jQapgYjjiLuf+D31KsNDptjytM3ugFxITKO\nScVAJr+4/VaWH38a5cefZNUaUwo5NFql2Dn7b/rsf23blorIE4ENk5OTAx0dHUe9wke3Ka9P7NkF\n2iytrZLgGMgcDTolbxeZ5MWEvepNdDGgq8cs2vNKAqeryNrxGiODdW4ZtpzWjgPUEUITWOFUTzrE\nURMhpiZKFMckSYKUsk3kL4WD1opzbG1QVDdedehKRocHaTRi5sw17H3IjERL4lQjWnp7SuN5Lt3d\nnXzpS4bdqxTuyltO/xCRkyfOYJ8DDOn6yYvnMzo6St5V5G07A9dFuAGaaYcEi8MRV9qY55P1l0xm\nW4nT2NSkUlmPmMaHnaqUQiFEkxHmjdfieC4ZklLfQYDpz8pEFS0VniNJdEqmDWbY138APo4tEjFD\nJIlTsixFKdGuZHVdl3gaL/Cfs7NPfTOeVAyuMpttV/8CFC4g6e2yXBf/8nku+tQq5s4e4PjDTNur\nfSMc/JpOsgcTvvCNLxH2mQ32pGMWUezu4ku//AY3/eYuMxaU4u77bufqa67m7WeYd9psNBAqw3ck\nJVvkUAwDCi1qwS2YI6fj1VtfNf+qPvBWFn01DfeeV375BqmnxWy01pttMgKQeurfpn9++nfan28n\nAqjNPquUagf+W6eO0WzbsfZAQjxqNsyLP3cmlV98lXwgCHMmkFgo7kO4dwGJYh8b1+qf009YCAj9\nArkgR7lsysuTNOHdZ53Dfbf/lu5Zhqt+VrSB9U/WiZMmo5YVdGSswrEnnsaixUdxxde/DUBX32IO\nP+JI3v2mQV5qy05eiVYa0Up0EK1NNGP6vi2Q7ZPQlE3ri9aHpzErvnSzVtPGl6sdpNZkMqY1ooSS\nzLv+0C325baMuSOBkzs6Ok4AdgZ27ejouBqobg/3SCtX+6nr17LnYqPaoKYAC/ugitRy++Z0htwj\nZWC8wc+UWSQKY3Ui5TBWSxipp2y0VKUTSYpEopXGbZWckSCRCK3aYw3to5SLflGDjUTryQy05hz7\nkcs/cx4AXqo4+OglxOlM8kVziMjn56FtELTeMN6idCCJI379w+/wvYtMsKNQ+F98+5qb6F8U4nsu\nb3/LuwD45Q9/ygP3/47xsbVUJqzHt34dG2o1Go1GO+qt0Qgh8d2wTYS0Zz4Pd65t9+nMmV0APKfq\neK4kTqKpzBv9ArkwT9KMqFnvpNYU5ItFlHSpVc21sLNMvTYCSlPqLBFZIqc4Nry+vh8QWTWdXBig\nVIbjeG3OBN836uzJKyhHT7dFc/rIspS0YX5TyDyZBiG8tniFEJry6xeyeP4c7vaMR/3r4Yjuxaez\n7FOfwkNQHzH9oBfOJQhCFi1Zim9d3qhW5cwz38GixfPbStulfI40jVh+5y0UiiXbdp+kudUhixQS\nbWkSWiHIl089E1TX7evTA92bQx36ZQv/y1doMf1n2vNeT8u6oH2aav2+IfO0UIpdsF96MlBoI97c\n+mo29X3FZggNQpi1ebNr2i7mfwEMVh+5k8s/a2bZ+CNDOG7I3CMPY1d7aioGAZqYZlSlv9Ns4Ok3\nU+IIRtYMcvbZZ9NVzrcbmQtP5qMfP48FJxiY8o41u9Cc+SgT60dQeZNFdeiRJdCLOPtdH+SC8wxE\n6fo+Jy1dtMU2GtRUg3jpZrT5uxOol/HHT3XNtM+KqXfUIttyhcIhw1UKYTs5FhmZ47C69igrNzwA\nwKsmX9xqX/7ZtzA5OfnpycnJ8uTkZDdwBnDn5OTkWcD1GO4R2AbukY6ODjo6OtjrqG07Su+wHbbD\ndtj/n2x+4TDOPeCDnHvAB/lA3zlb/dxfcrr7AtvBPdKyLE0BQwPaDmppjZDmyJFa77lERk7C2mqd\nZmK8TTkmqceanGPw371ki/0vI3sxwEOiX7Ck/zsJlFboadJLTBpvY/px0+yGU39vwSNrBx5i2e0J\npb77OeRQQ7UY5nL0zp6P63pEVusvDH1uuuGXXPHFS6k/anbJsUfWsmrF/QSld+Pni3RZBrpPX/j/\ncf1vf0ejWaNeN556rTLOmhX3cPeyZdx0ixE8KBT2YuHCRby6t59wd8NF0NPTs1k/Km0x8aRJpBVe\nEOL4himvWNwbgUOWuWAhD9d1GB+rILKIZvUp08elIj3FMkLFOEq0Dh/U65uopSldZceQ2wOJigmL\nAfVq0qZwTdMUpWFbZTfTuEG93iC0+eCuNBSwQnrEDUuROTJK/5w55PIOvTaWsSD0yAlQpZNY2LeI\nceupqygmjRWnn/FODnm1Cdju393Jyof+wB133MKaVYY1sZQPaNQqjIyOUu4y7yLLUlzXY8mp79xy\nY6WDbvUdKSZm+BLvSoOjhVGAgWknx5Y31vqz8dUVU9CFEMI4dS+9rwapszZDHY5AiilPm5d42gIx\nDbV4OQav9bQAp71uqHRa5wf1smCo1lPngvYzbPab2w+yf/Xyj0NsSONOfNvbSeImSqekLZGNOCKN\nq7yYRXQuMHBkobOTtYMD3PirX5AvhxStoIbWLmsG1nDwoQehrBZlcU03y4+6iSUHdxE+aeYMjV25\n+cb/QIuzmd3XZS7F8WbB9Onmuk4bdmw9vXnsqfeu0QgtkS+99gq5n0JKWiKmqjJIfe09HD/XpX+u\nwdZHkjKP1V0S5RJ55jShxdbTaLeXmvVu4G775/8U94iUon3UapPvtHA4QfvYkWlJIBVVN2TCcmwX\nQoHrawLPZbyRULBRy6LvsmxdTB2Jt1Mryi9NkGh68vafADGF8QFTR09DLkhXt1kc19y+jFyjzsSK\ne/nBVd+ybXI47bSzyBc6ia2CiRRwzfe+yfDq+9rJ/yozE7ZVNFTVZkHK5yHwQsJCJ+VuEwTL+R6+\n9Nivp5/CxaaIp7+/n+tvvJt35fKoU88wn8vluGP16nY/Fgp7mDZ5Gul4uLk9edDSoG6MNlDId5Lz\ncqSJgUJyngsqRWhF6NmAUNKkGOaJGzFxM2pTiebCPcjSFAVtQdxm1GTmzCK16lrCXMH2nSRTeoo3\n+s/Y93/0XRq1BljGszDw6Sx10dP1LhoWLlp+2w2ItEkhnyf0zAZayIc4rsLt6sUrSpr3mnz4VatX\nkO9ewGg15p//4SMA3PSlC+lctJje3j7WrLgXgLGhVXQWQqTOqE+MmXvnQ+LG1nX4hNbtsSh0S0R5\nc3MExNUKwrGEZmFIpjVayCmeaaYWTbm16TYdJbR/bUMuWqL1dFUg846mQymtTdSM6c0xaim1dVSc\n9v0dqRFoUswi1YKBpKMQNi9cWLkxhHjZQm4wlK103FZs5JE1vOYIA2U8Pj7K3p5g4/AAyfNme5q5\nTyf9XWU8EZIlZm4tX7UStMLt6mRY1Pn+bTcA8O5lDzK2Zi05z8PLW/zZazJv0QHsv36YTXUDjzRq\nVe79xcU4J76fyri5T7mrm7i5ZZUl13Gt9NpUgZDYrLOxuL60a4r9nJ4eABFt1kYpQOqYpDZCc9DI\nx6l1K/Gbg5Te0M1xJeO8NNMmhw6vpOmWWZea9SfzNnfSptt/Qy6RkaeClwdEULrttTQzRVpdixbl\ntkhof6dP6Ehe09fH0jl5Cjk7CBV8/6wBLrppLY+1iMWtV5O+oDf3RLRu67ABLxN89W260M93dkmT\nGO06DKwy6UK3/nY5ixbNM4ukJdg/sKeLu269HlfCkxPGe96/VMQPdkHI08lUhorNpPA8n3JXNypT\nNC1WfMNvfsllF32WI//tG8y/8XrAqI9rBG6uwIydDXn6C88/zxe+MkVD0KpKDIt74/oBiRLtrA6d\nGQZFV3v49jRS6AxwhEsjeo45ffuaPlaaNEtwPY9m1CQMrAeMQgpBmiaMj1s9yOYmPM832SR2gLqe\nxNGSZBrm+kp274pluEjyljs7qr2I5AAQGtdrCTNAo1EDMrRn7l3OabTw0EGI47ssWrIEgHxnJ+RK\nREOred/7TIVoqbOLLE6ZP6+f5sRRpp1aEThm8Yztxuq5TluKbUvmEaNUi/vaATKknvKlpZSMD63h\nM+8+g9JeZnye+OaTCbvLFGbPJl/sIbPjTAuTPGgW/80LvKzT2zYtBJmc+pzUGj2N9a/tTbe+tBke\n3fK6p7JQFA7CdQk0aN8E8tLu16EOPpVS3+Es6MlRGzRVwAM/uIysspbMaaLtwolOUNpUJLaqGNtT\nZqr26M+arzMeW3arbSfsue/eHNZTpNNyV8+ePZvdw4BnogaDwyaj5NprryIsFvj16tvY0KhQe/CP\nAKweWYvr5KiOVJBrTWt8N+PSsWv5YKD4btmM43A3h1sHHiQ67XTqNTOWFiw4mOGRLTfcdT17unoJ\nhj0txmCqOYU5wVvsWyoHLRw0GimnPhpXhqgN3ADVVZQcO5b3LyBUP36PQ2hTfHOepNdPCQoNDh74\nEQBN2bnVvvybL9pJliCxgZ7NYAq7oNoWZS54VHl/V8BZ574fgA2dLkpL3i8dFs32kHaxSFMH9+R+\n3tvI+OhtYwBonSFeVLgdDnon6yntJGAnBS+mbXUKMz2m2hEn5vpdjkOzsRH8nfFc8xrCwKVaj0iT\nBGW1LNePrSVr1ih3dRHbHaAZRVQ3bMB978epNiM6rfqMShRrB9ZQq1X4zQ1G7eTb3/o6l331EmrV\nKr+92wxqz3NRWiCdMzn7XWZxfs+7U+Cn7Xa2+s6ozBgh5Bl2Ic/5ASSKLK4jPDOwDirPZ9mqVfR1\nzaS720AEE42MeiMhSRNc32vHvzNlqEUbjacJgt1smzxUthOuK9uiAI50SdN4GhDwynbuO99qIAVh\n0iqFfiNBsAghFeUeM3mPO/kkXM/Dc5y2kk8uzKM9FyFdPC0RNpgoCj0MVySnn9TF7F6zcDbilKih\nKHY6HHPcceZ56hmOSNAiI2uVWWuFSre+6kg1gStMm0wIUrU9TgBHeIytHeDqr1yEfr+ZkB97zSI6\nF/Rz1Jnv4KOf/heEsOrjQiG0Rm4GK0wPIbb+q9FCoIUks4oqa1aton/OImAqCNf+hfZiMs2zRiLw\npqoPhcCRAWl+Ic1FnzCfOfgUgtJD5PInsNsBAYfNvR2Ag676Jdd96i3UK4NkdjToVEGWTdsGaJ8i\nt8e6A8l+e+4NwPx5cykUi+y/f88UVKk14a47Eez2DLf91mwY1113MzguK0fv4aTD87z3sD4Ajjgw\nz40PVBhaNQJ2b3G1YnCojkIiGLd9kSBlhut4bYcm/NWduE6BU87/xMva6LgmmeGlZa6ivRGa7neU\nROO0IQwhXFLHwdURWWUt9bV3AJCMr6AoY7rKDrsGrbHk8lxT4QcBvl20PTcklyvguw5ly/3f5cHv\nttKX//lw8A7bYTtsh+2wv7n9zT1tnSnjTcip3UxrbZLrTbYxAI6bw9nnKEReEtVs3qVboJTPcdPQ\nBP/66zFqwyZhPj93KTITnFlPKNoStaYSaFyb3WTwuexPJtdUpUk7GOE6m2dcCtfsdMNPjPP04xFH\nHP0aZDuAI+2BQKG08dLSOKMQ5pioVKjUDEYaSsmV3/0uvR/9MKVyN3vuYYpZfOFx443XMTo2hMRg\nzd/65qVkOrM7vGlDlmVoZXKxWx5NS5OyZRMT600/deYR0hzpwtDg3El9I309++G4KV5mAqbPTVS4\npdKgQMDQKsNGMFZPkEEOL+ejtCKzau6NqIkvHYrFvSkUCvZ+FXwvpF7bxBNPGKw5TRM83yeNt+2c\nfOrS43BcZ7NnUloCDvnQeJKyT+J6Hr7rIm0esBLmBCbRODoFe8pJZJ50eJxCoUTgtrDNHMMjEc2J\niK5CaL/fQOgmSFC2KEoKiZSb87lMt1XLf868RR8039cKwRTGDaCzlCyN2DfYBfmCeZ7a4AoenVjN\n0KWf4fsX/QvvfN8/mXsFGk2GYPPgkmxj2FPeplYCx5WsuMcoFn314o9x/vmfgNO/ttl3p5w/wdQ0\n1mS4eI6PZz3tJCgR9ZxC0z8VkRpPd47q4UhnjFJaQ909QaFsPb6xP/CH8IvE1QytIvvsqW3a5lj5\n1oJ5W7MPv+ckjjjU1Csox6Py5FN05fdsn3hVppCuTyNxqNfNyeWuO1dS7u3HcX3Oe3uZtx1ogs2f\nuvJxSjM7KfU1Sap2LCkIdN2k7aV2IqkmMSmRypPv7ALghDOP5LST3r7FNrqeRE8nlmpBUS8FBDCn\nIWnf2/9h79zj5KjKvP+tw6FSVCqVSqfTGYZhmAxDMgwhhBDC/X6Tq1xURFTEGyqiq+4ui7zIgvq6\nLuu6rOu6kcUFXS/gDbkoBIUQYghJCCEkIZdhGDpDp9PpdJpKUSmK4sz7xzndM7lBRBHlzfP5TGam\nptN9rs95zvP8nt/jZDFZbSWlFXOIVs+jIPTe7mp1GOO1YIthEHdgDyE40XWaGci20StSSDyTJZ57\njQpbu5rG3g+8iF4rrwwODs54o4RRNsIUkx1i4NsqKaBxfc1c5o10+eXamMvG6qKO7V2tZKniwGKN\n+PGf4fQ9AsD/3PcoKxYs4rT2Ag8H+up805w+bAX7eQ6tvh4c3/MQh9mEUUzNFNB8vp6yYrMDI3T7\nDj1KZ9Z94F0nk4QxYaToX6vvYLESuLkAkWakdb2oE+mibB/Hd3FCU/NSSX58332snvk/5HMtnHPW\nyaafgquv/zvjIzSHk+0CNsihTSFth8ZMD22UrS9FW8zVPoojpI1ONzflrdrb9iOuhwTSRtV1kdQl\nc39PR8dh1Mv9FIta4ZdjRdsUh0RmREmCY7DSnu9RH1hPW9u+VE2dRNd1sG2J77tkBjcvBXijXNJd\nTLO+/8678XIeLSZBpLunGyl9Aj+P01BoCRgMg0EaoQN9mWZrswHbH0KftBcCPDtDmdfOX1Hh7vk1\njp6Ro61FK23biSDV2FrbKGqh0u3iGcPlwXt/xDUzPmj66dJIW8/MGCf1kPu+/13uvuFKJu+tXTMP\nP/0EiAJZcYDrPnY5M394GwDv+vD7SIQOGDbCKWmmSFWKI3WdSf05EhtFEg5wz23/CsDXJyRcd/VF\nzXYND95rjS9AmT4hEMJFOB5h6xkAVM/+NvLUbrqXHU7u5z8AoON3dbrFNKZPPpi92xSF+n0A3PDL\nr7J66XzisIpKTSDS+LIlw9JHlPqDr+iT9m9lyhRTaSpMiUbuhd/R0cwNQGbEacqrr6T85Edm3vFQ\nvkMtirn8kkP5l69od9fdCz/AgOgk39JFNdX7WGQxKa2k0hmmtDMcleJkgtYWPUdnnng6xx11GGG5\nul0bXVdo9Mg2cYPhfVVKgZQIaaPqOvOytHgWlaWzCYiYmpNIU8vVcV1eVQ0cUZPDk0GUqXpkiman\noLKMLE50fVTQwZ2dyK5a2go4cXBwcNOwZw3CqH+2LOtqNGHU63KP2EqRofBsh8QorjBJmpHvhlNb\nkPJiJlhdz/BCPWyt9QppAi31ldTVC8RKJ1kkfYt56rG7SVTCJ2/4NwAeygVMbnE5emIrm1y9OwPf\nw3Ec0iwjCbXSXbSknyOd2c32zZiugfctEw5g41M1ep99nvVVo6CTBKQwvBxGaaLoGxjA89xmFL8W\nRpTDOmEckiQpthlmJTT8R4ohuGOmQJqjPN0qw2zoQIPt8xpGGn9YHCty7ggCz8E2mWQqzihV1pHZ\ne9FuUtartU30zVuIk8UEhuo2H4ymo7uHpauXamvP+POFAMe1qVbX47g6EJoLAsqlIo69Z7OGXhRl\nhC9G+KN2nlk4XC569yWce/5ZfPhjmvehkA8o5G2Seh1lrA7fBAilbW+9WaSN7/r0LV1GaUk/AK3T\nT2agdxmf+dLfU9ykx2nNhKmc+amjuXXqe7HN2CVRSJZGKKVIYxOgSxNUmrCzcE9x5XLS+mMAOLnJ\nZBkIEaHMhuxduYxZP/wOreVe2s1crF65gqTYT9eGjP0PFNwz858AcAvfp2W/Tj7w8Y/i5vSBk5mI\nVZylhDWtuGqlMuX+1Tw2506+P/Nr+nWrH+fpBQ9z8SRtTGzlS1YAGY1ceiEcBIrY7aLW8VH9nu4q\nZqQZf9ddpOcwfcMaWPUE5d/cTuWxC5EHuKzz/weAS+bOI6vVdXZewzJUWklk2/rk/0Cftu/5KKXX\noi1SZPYKWbJPcx+lKtOUCyrFNvvB9zzCtIySIYEbQ6aV5ChXQJLh2D620GMnRUKaSQRZozogWapj\nGMoR1DKt3EtRlc3Zhh220XUdo7RNF40jXyo1tAFtG1WvUJk/h8qyOfpZVKZbJaj1A5T7ekkc3f4p\nZ5/OHsLAA5u0kBqI8U7bxjFZ0XGa4XouruNAtUHNsfObzK4qbYvt/d9viDDKcx2ETNlvjM+GRHeu\n9kKJBjqv+bo9NWzKURnHtelNcdGUDp7t66dYqhJnKU8P6Gv6DV/+MgceEeH7Du+6UJ/mR0/poafF\npSPvI02UNx94SFsSRTH95qS9e94iDTHSgIYmKqSzrZOkvhEUbDBZhZ5jU6vXyeIYxx4KTEipqNer\nyAaOVwgiY0EopcgM3lcJAVnGcMyCModYEwq5zd+anOHbjGMmGtW6JbVwMz3tBVpatStjwYKlpK4g\n9TwS43bwcu2s+fl9yCTmkEYqd1cLqQNePk8cRjR3KhmFlrGUSi/gGKRHnISk8YuI1G4Wt42jmCSN\ncJ1dU9q3/PwOHu9q5TumT+WBPkgybFFv1tCzbYc0iUlUjDCHWKxgabGfZSv7qJZqtJg+TZY5zj7j\nNFYseQ/ptXop3nnG1Xw46qbWu4C4XcOmqqtXEMUVsiwkNa6cNIpQWczk9396h23tXTOfh679XwDO\ne88XETKHK2TzIJg3ezbT95jEsdO6oWIyWZV2m9QqRfY/YH/CokY7/Pj62YwaJbnhnBMIjLspN34M\nZIr+Z9ZQK+mg4/y5G1iX+nz9ox8mfVRj/qXISOSQ+2n47VSvF0kj5ohSJG4btdbLyNDp3WdV1nHa\nrx/C3xiSCV30unMfh3baCKjw6fN7mDCg2z8ji3EFREoMc7jor+1w2n9gdqSUewzxXKc6sJvGEVna\n4L5+FcEeZFlGaG5NaRIhaik4ENqtlCKt5DZVQ+ycotA2msygXGTq4GYCJVMNbwUKvod0R9Nf2YBn\nFGmlXKe8vowufbu17OXKrVJEVcP9ZEswgeGNK+fRu2g2YmA1BcMF7uQ9wqdWU1+ykPYMQk+PnqUu\nMu+hkEZ9ZsAeQnEOQ24XieZK8nN57KI+mNLXyDLe1ZEfBB60LGuhZVkfNc+2IowCdokwarfslt2y\nW3bLG5ddtbSPGRwcXGdZ1jhglmVZq9ieIGqnhFHDuUe25BIO2KuVNev7qEYNt4DxEwlBI31RvpqS\nZgkfO7CDa8/TFlNbmGA7kL4cUX2oyN4G4/mBj13BZz5/Iy15nyhsXJcw/gia0Jo4jpk/ez5f+a/v\nM3OxxlRPez5lRVhoWtqz7r8LgCuvuIiOtn0I4xipi7dw0KQuNm+OqFWrROYkjKIYx7GxbUkcm+Bk\nkuoK20r7ZpuxDU3isBWpj8qypltlW9kqoWGb66g0vq8sy8jSmEq5qvlHgCR7BewRJEKBMSA3AAAg\nAElEQVRRifV4dHf2EPgFlKwNBXtdRRiWiaMaKk1wGoE5pYhqdQJvFI5oJJYokjCCzG5aDY7tgLSp\nm5vI60lNKGIJ7V0dAHS1FfBsT/fGJHiUB4rU63UEGloFEAuHT197A9d++V/Z98AWPny+hgyO9B2+\n+q3/4uCjTiA4/nO6ne3ns3Lh/fzbxO/iXKoZEksr+il0tNHW6mE3MtPSBNKdWzNRrY8nFvwMgOvO\n+xCVco2kuAxpWA/Xr1zK1TP2JsxS+p7ROP5itUw+X8BxBEJFpCaAftB4FzsL6Zv1I8KN+mqe/M37\nUQpyd/83Xa365pCtepCkGnH2BRcTeAfr8ahVeWT5mu3aJ4RoBqelubtJpx2n+1LcVp/Txmj/9eni\n34k3PkupXiE0xSomjy9wzJT9OLI7x/lnTOcjV2oWys9iozyJSlKyxq0ry7aj4ngjkLPSunVUR+j3\nfPHFiLi+Gtd+ohmHCZNlZOppKpUy9VDPSxLV6fFclgwoLrjqPg7u0IVHFvyqwJQTJnLg5FP53d2/\nBKBv+WqSksB2XESLdt8ddu65nH/mO7np+q+x4il9c3n44cdYVypy45V/t10bPVfHhhr9s83N+IXi\nSnrn6sSe/tXzCKQi8F1spS3/KE1Jk4SWjn2QA+uRDR2GyZbcZuwEyoAbhv4ipWT9uvUUe/sBsNiu\nNOTQa19/uGFwcHCd+b7Bsqy7gBnA+j+EMOrGG28EYI8yrIyLRKFAmUXnmRiUauYSgS0UZ04qcN+N\n76Gnoq9AfcV+JvgOd5SLvP/ii7jqH74CwOe/+CVyjouvYjrbtM8w8Bw8mbB65QruvEtH4r/2zZt5\naNE/03LspfzgVzrLsRq7ZMPShG+95SYA4qhEpfwcTjCWvU2g6chph1HsfY6+NCVn/MJ9/f1ICUma\nmaQQsPG2UrLNm6TJBN0qqWjYOA0PMu2oTNpwEcbn5eZsbGekCZRppdvRcQArVq4h35rHM+9ZaM1x\nwIQDqDz3NIm5UpYHVtEx5SD6w3X4ttNEHGTJK1QqdWZMP6L5eStXLMdzPHx3HPX68wBUVEyaZEh3\n50GT4eK3Bkw6dCLf/4pR+tJBCYlQiobTqFItElaq1MtVFizrAyB2XL4zegw/nnUfK6s1zvzV7brv\nwiaccQLTphzP0roen/aJil/ePZOZssQd874JwK0zb6W33s+UrlbefcYpAHzo/e9CpTvPiIxqLzD3\n4Z8C8Ntrf4Fv9/DAf/wjexscrYwTkmQDN//gVnpaNO69plLScpnuyT2kcUSl3A/AxI5O0khAlMFG\nfbDnpcIZ4zHtxKnYiV43C4KYtWFMFkW0GpKyI7snsqy1Y6ftlMLDtc36bWmhpTXPRyYtoBPtmqnh\n4HWfSXuuFcfTh8OZx53A5z/6XqasH8F/uBcx/1GdrOR4rWRJDVfIIbRSpiDLyAzCC/RalEpBvOvZ\nNVmWoUz2Y1QNIVUoQdN1KFFIW5Imei+BdjVNac+jUsXNN88mu/wcAA45dwql1hFccd3FPDFXH06L\nHh1JpXcD6+oxZaMMHxeLeeqRr1NdsZxM6oOgGFWw/WiHbfRcZSpr6c+v9vezfN4celfMRxjfey7I\naQSTykhMJHvFM0vxKxXybT71FTWkQYBoCJjuXcPlorO0M+I4arJlCpMuP+nASbz8ysvN8Vr25NId\ntnNXqFldQAwODkaWZY0ETgduAO5GE0Z9ndchjBouUaqRE44jmum/KoUEiScEymSsHTLe4/ZPn8o5\nHR5V47ue0tFKp29z/TWf58LvXshZxmcZRSG+nSJVTO8yneo9Z/Ysvj3zVn72wANN//UJJ51AyoN0\nfvZ2aumP9YAlEa49NAyLfq8RKaeedgJ+LqAebWHzZq3kXEciSLGF4AWDPlHSxg9yJJUSymTYRVli\nkAmmf+a9Gwxsu1I0YFslvS3kr4HgcL2RgLbuA7+RgCHp7jkQSYIXaIWaZhGOVEzcfwJ9FW0Z9vdV\naWnfF5mmtLTsQxrphS0UFNyxVPpL+OaWQqLw7ZGUis+xeZNe9HEYkSqJbe9aubFJ+7fT09OJirSy\nVBngONpybwTNXQfPFni5HF/5vd6Qt9x1Nw/f9yBuGpElFZYW9Xx05jpY1fccr85dxOgZhnlQ1rjr\nmNvZ68gjqAkdZpxw5Glc+IHTIa5z6Sc0DG/1yiVMmfwaWWdK0d+7BIAlixbwxY+/i5n/+Pcc/bPv\n63YuXIVzw1X0k9A+Wjd+2unHM+8XswjrCe4BPq5BuShHECU2wssTYqCaaUxge4z3fGz0eNw3oZ3l\npdWsfO5ZRKqBWP3hAAdM7GnGQYb4RSS2LXGEIDX0psG0y7ju6i9wyeiFRBv1gSf2PZCW7mm0dk8n\n36r3y9/+03cptOb4j5/8iDOuvIXfPaADarmufcnSzQxLom9a9NKW2FJzPbiODqYv/dddL1bVue8+\nkOk+TQgd6i/GBF7QhPylWYoSgo0yRIgt5lnMCBlz6VEujzyRsGbNbAD61mQUTjmfrs48103UqfH1\nr8bYScyi3oQPfuEJAI476b3InERVTyZv63X8y19+jzNO33Ebfd8mLPWzZsF8PfZLFpHUqrhkmj8E\nyNKIehhSq4bU1uvbeq3vOSbbushDlmXYTRBBCmqII1I/lLyaJixetIjSqccC0NGmD33Z8DYwLON1\nB7IrlvZ44JeWZQ2a1/9wcHBwlmVZi3gDhFG6CwlCOc36he1jPUIEycsJtkEwHNki2WNaJ7V6hDAW\nUSEocPzJxyNPnYHv+aSRiQivXMJ377+fn/70Tpas0hu9kjocc8Y7NYdGo7acgmDiFFp7Dkclmjw9\nyxIUQ5biFpPuSuaQy0/g6eWPsfdYbWmXys/ieoJN9Tom4E8u30ax2IdKYvI5g2mupWTJ0BWo4eQQ\nsONgIyClvVWQqTleOyABAnhJ6b7XailSumQp1KqbAQja9mXajKmEtSJVU3WnJfAJAiitXguG9VGk\nDsXVLxBVEwaSF8i5+lrpyJFUy+sp1p+js1NjY305mv6+Naxa08e4gl5kvutRj7dP+92Z5P2AuFan\nAdGXmea6RgpSc6XsL5cQ9RpTuyYz+ZBDAfjB5Cn4wuXwqUfTO/u3zbFoyQWoqMbBHXkKE/XYf/v/\nXkPp2x9jYcHm3Z+7BIBlwWzOuP4wHn7oAR5dpAsQz/vtfUyd+rmdtjWNRhCaYK/tOiRZDS/waMvr\nNXvoeMX1384TjM0xsvU4AC770DSyuiSq13GkRBnX1Mr+ZTzxzBrG5VvwG5weYY0g9ljbuwIRa6Ni\nwM0RRTEbogo4uhBA37PP8OiaJ+GIk0zLGjhzhRA2kd+D3/1O/fkXncYh4z14NSY/VmOipdfCvCV9\nHOu30W2qH1WjIrX+KpkS/OD27zTJyKRtI6WPkEOuF9u2sR0Hz/PwTIDPtQVyJy69ncmTy9ewb8GQ\nnAnBqwIykZEaI+3lVJNy2ShygUaZpMLh0SVV1jsx+wStCEePkzdGUIxXMvPOn6MMp8eyR3+PyktO\nabN58m69ltInVqPygkQpAjNvzPG4Z/GjHLcDqmrfFyy7ay7FOQ8BuvKNY9vUwrDhxSGsVVj6xCKi\n+BWykkGliQTbtogrIbECPH1AvJpl2slhqyapmkBhCUWhECCNu6pR0lu7LRuFvHc+lq+rtAcHB58D\npu7g+RsijNotu2W37Jbd8sblz54R6dsu2HDuYZM5fZqmJjzubwP6yxXWnl3BS/TV906nj3+pp4Rh\nQqsJ1OT9PCKDQiGgr28ZX/rS/wXgG9/4Fm0rXuDMc12iRAcm0iwzEDbVvL7bXh534nEI77dIU+5L\n2E4z0QWGaFsH+vtxcgHjxgYYA4OwHtKSH0+9XkOZYEmsIlQSoRRNXgvtkxOIbflV2DFng1IKW8rm\nZ++IyGjbDLQxrY1klAzPdUjFntTq2tJe0/cswhXkA0lxQAexpk2eyJnnTOeO766h+qJ+r02x4qUt\nZUaPsCmtq5CMMj42VaOyoaqvayYY6Lou8eYYgWgmqHguRMlWl7/XlHKpxIrlK0iui5u//+ttt2H7\nLqEpdVYtr+XD519AojK6uoyvWEhK1RoHtSg82ycw8MDO1lbq1TJR/zL6a7cC0Pv7exFZTG//Up5Y\nrtu+buAZ/FtuI0sSyuZ2tuDpxaQ/3jlhlMJjQ0XPx6Z6iQtWzsEJfAKTsPPhC0/h/BlXsPwHj3L0\nO/QV/fhDJ2H/eCbfuelmSs8/gzNOW8vpxrX0LV3ESpwmt0TgKAp5j/7KWmolQ9O7BSpRRFIdIEHv\njWBMjsqqBc12NbJzk2AiIjcN3x5DLtPurid+/Ck+/r6Y9MpruOyB3wFw+QcupV1K0rDEknmzAGhp\nydPe0c7H33sskCKNGek4tnl/1VyDcRyhVEaaVFi9WudFlKNas3LTrsonP3stn2nVFui+/7k/Lff7\ndMxvISjq8XRdF9/xyDJBmjRuph79tSrL/BSnWmYi2t3kxhUuuzCjVpzDzx/V63vp7CKqPcD3XDpn\n6M+pJGXqAzVeTNZRjnRQOGl3mVPf8bwX7JQgreOaNZK5LrGCeq2Mb6ziJIqRKDrbx5Ife5CZS0he\nLBCGIe60A3HHmsByexsSRTaMJ0xI7XoKXK8Z+JdxjEiG3ESwvTt0uPzZlfb1l55FR14x8PE2fm4W\nxjuchNix+WTB5d2mOG1YlZr4VyjyBvfoSkV55SLKiwb44r9/j3e8V1ejuOGmmWQZZMgmflmqGIWu\nO+cZX6/nuTgHzwAn0GBRICPcyvUQxtqd0PvMQnoOOZQZ0w8mMJmCvatXsnL5KrI0Jm8CG560OXj6\nVJ5cWaTP+N6HlPbWrg2xTRCyIUop0ixrTpQQQieYDJu4bd0jnRN1n/oWlXVAKE1oZHJvjiLC0lpy\n9niN+AAqlTKixSXIeYi1elEmYYJ0Egr75Vn+RIn2vbWPt7a+xktRwkjfo1TRNJf5wjgSBJmQKJ1v\ngyccknIZ4l1bRvWXItaWS9xhuMjnz1/MD+74MW4+oFrXz4T6Ld/6co04TWgxCm7/7m5uve1OHlM2\niVJ0GazzQZ3782xSY+DpR3mirJVUWKrjkJC3XTzjdvjtnd+nd8WTJGSUDaXuxjDkUbUNLGKYnPXu\nc+h7Xv//2lWf4o7/takuX4Zb026KdWue5vYf/JLv3vhlvnKpjqMUln+TSqXEZwJ4bO4c7KNPBCBO\nFKGSlAdK1CoN146LtANK1YQ1/VoZVqKY/kodL4EHHtF+1WMmtuMOC/Q2Eqj8fCvYMXa0pIneWawE\nws4xZfpUrrxUZ1F+6LyTtRsljhkoasU1dWIXQU4QFpcQhjUSQ1+Qpqk2NKBZj7FSKeO6DtVqlZWr\nnzGtUFQHSsBdrzflTVm6eDFpTc/b/AWLQGqffHu7XnPjCwU69ptAoa3QRFP5juLUaR1c8+Uelsxb\nSt9KvZaTLQ4fact4JqjQe8a+euw3JtTChBJlGhDnF7KYRCkc9kQZZFUsMoTv7rCNecfGUylxqV+P\nh+sifR8pGKrW5DhMnNSNJMNv+PmiGHuEiyM9is+twXtRb5DCIUMxk6H9q92laZohG8ZZEiHN/nca\nLpPXiHv92ZX2Tz91Mt/xFXMWr+AbN2pL+d2TcwjXIxKKB++5A4AzzjwbSUL/gllU+vRiW7J4Gb95\n8EHmrVzNzJ9ewdMDGoiubJ80SkgkhMaPmFRfIOcKpEqpV/VBkAYHkus8CJV2kRilrUi3GqAs0u9J\nKhFqC46Tccg07R3K4il85oErOXzadHI6/kd1U8T6OCXJVLN6d4PDGNgu8NhI3x96pj3e2TClrd9D\nR/B35OcG8B2dd18ICsgogyxtojha8/viei6+LenuOACAfJBnY73OvdUajuFXcWIYN85jYs9+9K/p\nRRmuTTFCEKWZThtvltx6iVdGusRSkMnG2AlsR5AlO7dYh8tLQlGO6qws6iDZL371K86+/zdMnHJw\nU2mvXvEUs+Y+QsE9l6lmQ9/4vZvpmbqANEyw3aHM0zR8kaRaYqRIyBkUQlrpQ2YJXSLPOJOoUIxC\namlKNYuomfXhejvmVG7IAVO7sM81JOsTBS8Un2PFwGJKC54F4OX1FaZ3T+dfvvRV5p1xCwC9T3wf\n2dPBYd2d3D/rFw3acBIE5VoN4UgiY3IVayG1pSuxhaQU6vXt5H1EzqVvoNQsCRfWKhze09JsVyMA\nrUrzdUYlCmkMFdcNUHbK6L00xzxAV3sbre0TWbBgESVl1naSktQbNVUlLnoslK0hqZVKldBUPs+5\nAXEUUS/XSRtFXSXkgwLFXUN6AjB1YicXX6D54vvXPk89Efzq/jk89JBGdnmeh5fzyHfkaW3RUN6j\np7ezNyHRvBqP/vhjnHS2zhJd9GREx6gY1fEiawzXeFXosUmigEKk4zB7K0mUJsi0QM7QJDjJAGm4\n44Zn/Usp9y0j2WLgvEmIs7ci3FxrGj9kml4hICWLN5nhkAxkilpYR/UWmXGkGU8FSihGMEwJC8Gr\nacqEWn2IpkHFpOZQaVAuvxaL4q5yj4wG/huYjDb0Pwys5g1wj9SUQ2+lzl2LS7Tc8XMAvpDPmNiS\no91NGDdKK6Pj3/Nh5i9eya/nrOaW2/XV9775i7j23sXg+Hz18ov41oXapR5ITUe6YtkyHl+mrZaB\nco17fnoHd8+8qRmI9A47i8y+mbTaCw2L3PW2UqpnHKODMh2dU3Dz+1JPI55ZqYuAHjf+dI656Th6\nujtIDMZ34R0/Z2V/iSijGWEW21TD2T6w2KiIgf6ZrS1z0Erbtu0mLGhbqQ9oV0gSZ+RsR5/e5m+e\n59HW1goipdMUzPVsh2qpSKYynL3MAnZ16u9AMUZKn57J+rrnOB4L19yK6wXYxtqNRML4thbGF/JE\nxoLd8EIVP/CpV3eNPKi/UmbukoU8tOhxAFaVF/GfTy3ByY/htLM0r8StK5/WwS8Jgav7/okPn8mh\nZ17B/Eef5YneEsW+fgD8epU0qpPYKSY5ja62gCiK8NOMwFzh1wwso5RzKIZxM+A5rqXAj14LxSOS\nJp5diZQxLSmrDp9M7kl9w3n0pz+nXOxDKJdP/bNGIv36ipsI4xDvzPeTa8k1CyH0F/tJskTDcoZR\nkTqxrgKU69Yugo989v30dB3I9V+4mSXzdMr5gl5FuR7SCJk21nKYqibaoAFZzbIaMlE8s8rh9vs0\nseeXr72CtoEK9//6/ib/RpYp+vr6sG1wHL+ZXxBFIba06e8v0meK8Mbxy5TXV8iyjOcHNEFbLQyx\nVcLYHded3aF0tLfR3qmVsXBSwuhlLjn/LL7ap11DYZKi6iH2gE2tovm033XqFLr3a2X+ExFJPeYb\n1+s1cvmVd7HkGViaC4kGNEma+m2KapF4PjjmFqxEK34WoKQi8gycNQKnvmNL+3+/+CEee2wlmdTt\njCohanNEf++zJH3GGIwTSFI6fQfHpNBj2yjXI85S8kGAN1K7Z1Qha1Yn2tZy7prYNYSeUwLX97ZC\nj6SvQci1qyHgm4FfDw4OHggcAqxkiHtkEvAQmntkt+yW3bJbdsubKLuC0/aB4wYHBz8EMDg4mAEv\nWpb1hrhH/m1RH5vrIR9ZWuK3n9TPeqslfrdkGYe3tnDseR8C4F3v+yBerpWJ047m5l9qCHg9Spjc\n1U5H7mUW/uJf+CdTa7AQBLhSUg5DHjC0jov763z1+uupXXc9i0wlDGUrqr1PkArI5XWQSEl7q1Nw\n+lRdBswtTGDOomdY/cIAGwwMcObMXrq7p7FixRIefNhkZy1ZAcJUrsga5E47z3CU0kAAzUmqDW9h\nmP+GMz6IrY/Ubd6y0muuU55HLhhLqjIGwo0AOK5jMiUjekv6StzR0YYUgq6u/YlMJmokYd2mhAH7\nRXLj8vQP6NTPVCnsnIcc6xIOK0/iZAkqSZr18PbeO8ezq+rbN24nolKF5/q4eW2tlpKITSqjr3ct\n0lSJmdJxIEGakrND5vdrcv60JaOn5xfM/M4AtUTimyrr9bCMIiVLBL0l7at2Cq1MntZDb+8AK2qG\najbKkDKllmTkPL1m2vIFxNWvcQXNEiIDNXV9ycZKmYHBDDvQV9/LP/YBLj9lCsuWF5l8gbZXcp1T\nmPmN67j+C9eSby0QhqZMXj4gIWHilIlIc/21HRfPtjniiEM58j3a/dZ9aBujhMO3vlric8u+CkCc\n2ZTCYbwfw9jidDEKzT8y9AKF9ODar90GgB90U64PMHnaNA4w7qY9RjiMcAtIKXhVSizjLpO4qAza\nOnK4hrQ/iY+j+4SYNE252KzvJMmatR13VU444ZhmoL27u4darUZbq0tHu7b+l/X101IocOmF5+OM\n1TUes43LmTHjAL4983fM+lWN975b86l0Bzl+d69H3LMv1x+ty/F9b/b3eGTFw5z7wYtpD3QQd9lD\nS1kbPY/dPZrxjv4cEXrMCE5gR/KrO+6gXu9EhBMAiOIMkSW0xuPIcvqzVZbhCIWtMjzjYpNS0rOX\nRzcZrmvjm8S7pogG1Bkkgj1tyfm+05zLupvDzec1jLKZM/LHEUZNAKqWZf0P2speBPwN23CPWJa1\nS9wjz/X2MZCASGM8YdjipM+hXYojz7iMdSZ5obWjFSklbS2CKRPPA8ATIJvcxqKJ1iANiZIMKWzy\nJhtpSrvDVZ/4BLffcjtXXHctAHMXzSHNaiRugLS1e8Fh7614le2cnrBqKslsibOHx1hD7VqqPE8l\n3sCDDz9Eb69WhjorStcgGXKDyK3pZhsidMaVI0WzCKzKMjKDNImHMZ5JARJnWLHYrRVMe4v224Vq\nM9XaRlxvJIWCPoikrUsf5Tyfnja9UYOcT29vH7YtyOf1uB/Z1sovH36EID+GOFxHzUTNU4VmMxSK\nEWaFvCIypEr1mjJdkraNH7hN/+vriSCFOMI1STz5JGNCEFDZVGZVn3YHqJxPdVOJygVF7l2t0Q6y\nnvFZu06cVBhYXUYYrudW38ORUKrUCY0DubWQ44RjphEeNJkf/FAHypzWNqZN76H/7l9TyOtl6iT7\nQHreTtu6vloaCo7asLpSA+WQVvUcffbvruOqK6ez7Loa2SU6OLmFLVz739/GOyBHtKSPalm7LWoq\noefYLm656xuIwOCs7RxRKeGQ2Z3UvqnX0iZVxs7lOeuC0/n61G/r+S25yGA4IdfQelBgWDEbxgJk\nqcKWAbh63vuqglqxH9KUPlOHM62H1Go1BAJpgt4AcZKg0pQojpp0sZ5r4zgSW0p80444kSAcXiMJ\nejuZNn0KC0zSShhKCrk8ZIqJnXo++geKZKliSxTyjjM07n3lohdYuuwxhCd4evXLfM1E2ie2exST\nk4hENy1V/cxZ/xJhb8b6govMaUDA2t5HKD67mnMLx+MZ5+HC+x6F1IYjPrtdG7vaJrG0fzNSNnjc\nbaJEkSUK48kg73s4NqRJSr0Ry1EJzqYU25GkMsIeo19s+4FW2NvUmFRKUa5WiYyrLo4ToizbKv71\nWlzvu6K0JTANuHJwcHCRZVnfRFvUu8w9MlwObS8Qpymx6GB5QdcqnBs+j9fezTX/5+O8r1WfiHGS\nkamUcgae4XtoNbEjRwmTodVw7kvI0q2KBqCgo62Vc08/mdNm6CSNBQtuJslSbGkP1YlUbEV83lfW\nVvWcJ5aT7iHYvDGhbjJpnqvUEa5DGA9xMTuOQ5ZqqtUhjpGhdF+tuM1rbakrxOMgXOP3SjNsIcjS\ntFmJXimBUFJTbTaIH8TWwb6GrztMI3LCJk1fJo1fMc9i2vIF2tta8c3kqzhhRZLi+z6BuaGU4pA4\nqyG8LbTlxxFVdUiiOlBm333yuIGH7evGR2nEunVFjjj4UMpVbZFH9TreqNcO6A0XJycIOnwGDPwK\nO8HZ12EDKUvTft1NEbEoHaA728hvBr4HwOLfPcGyaiunXTSD+IezWGyoWRMEXS3aiu0w/tLjpk0l\nLxTXfP5KbnZ0tPhr37yZcf6R5IOAKd06wcQLJPZroEcc16FofPVZqjMQ3dGSnNCK6+w7zuHUriOQ\nA8vol9oAmDLxQPIHnEK8sU7fwhIlwyORZIpisUzpxQq2NtiIogFE6vJ0MSHx9C2hbsNzlRqho8gb\nmGtxWYWtGXsbW1ZD87a9jimhNJ30GL2P8mPadTmseCOeO9r0ZzNUB5COpyF+JjibpTEqzQhL/Qj0\nwSpdB9f38YIWGupChnW9/vpW7nT8tpWjjjqcRx7RfvZ7772H/uw5bFvSWjAc7o5koFTlZ/f+lrHj\n9LPujjxxnHLIgQWiJKWl5UgALrjoBcrxRh5+dgV95rD/28tTrmmfwblX38ITBqBw863vYb/RR9B/\nm6KtU3O4/+6RV5k3p3eHbbzztp+ivKW0TtFZq14wiiTTFL4NAk/lSJTtoIRq0vRlWapvEZnZyw06\nZfO11/A5EoJXVcZ+pRKRMTSyLKIahkwUonkTey0WxV1R2gPA2sHBwUXm95+jlfYfxD3SIIyaPFgi\n19NDv5PR0t4BwFkTTuK0j3+Q9593fJOIybbNhULpwAmAUjaOLbGRBkEwDFnR5LdpDITCd2za8gHv\nOEaniz5ww2Tm9w6gnAAp9jJjE2yV3TV/oca8PrZkha75mMlmZZZ8oYCTZKSxarpCpCtRmUGFmPcQ\nDHE3ZNmQ0hboQFSapkNk/FLiIVH20IGjXRsZWRQOkc/YW1vaSaZTfR3bxnNGEcWbh9weaYIrJE5n\nB6kJMtmOje8HCBlTaNFKe8HqEsec1IO0NWYYpZX+pqer7L1vO75jNwmSAt9my+gIz/cIMv3/X6yX\nSF9OmzCl15NJh3fT64Z865nfADA7hfTk/dkz3cLvEw0tHOUKHoj76Zx1J7ct1BCzB7+/lE6nzvWf\nuo7pk4/ioouv0mMgbTLgyo+9n5OP1Vfegyf3QK3COw6exJef1hhe76abePD+WaAUhx2kg62Wq5rp\nxjuSJAwRhgJ2hOPwYpaQxinSZPF2du9PqTbA6R84lrRNK/JR0ua51jLupHbar1Fin+wAABQSSURB\nVJrMotn6cDp68lQWF5eweNFqekITDCShvT2vU9Hz+lmqUnwvjxv4HHmGVlALZt0L2fBtOvzn7bNr\nlYFlCtm4untkwke5IAN9sEkhQI7DCQqkCShzw/IFSJWQRSlhrR+AOFI4tk0hf1ATTouzARFHwG93\nOn7bSj6X48wzdGGGlc+s4dHZs6nXKpzSYnjCpcfP7nqI3mKZ7/yProV6ybtOZsqB+5OXORYtfpoz\nztbZzv90k8Ocey7lxzcJ7r9fj/EHz1dkH8oxyzmJgwz/0PLbfY5um8uK52ucbrDbP1n4ESrrty+A\nAPDwY49QLFZpMUR0Ew+ayX6dHeRb2psFv+N4NI5r4zjgGT5sx7HxPRt7hC4S4pqbeaY0imwkw6xt\nBSNsh9N7JuMbwy2KEkgVK5auYOF8jcl/9dVXdzqWr+uMNC6QtZZlTTSPTgGWM8Q9Aq/DPfKP//iP\nWJaFZVm0HDjj9T5yt+yW3bJb/r+TiT0TOerUYznq1GOZcdJRO33druK0PwP80LKsPYE+4HJgD94A\n90h3a444cihXE8Ze/HEAjv4/bXzo1B48JLLBbyDAtcGxh8hTHKF0XT3RwDEbyEymAW8qgbhR004K\nbGwKOZ9L33chAN+5/jq+8PWZLCvWm5llmtB/6OxqELVLIUnjCMhwTHDAVgpHgYcgM0RKUdIIBg1Z\n+7p9oukuabRTkGGTakazVF+JbSnIObpenDCWTBLHJkEnphGQsO2tz1dpLP3AH00uF1BavQ5hcNqB\nFGT1jSRJiO02ChbU2S/I81xUo2KqeKSTW3BDjyyBMKqiDP560sFduihCpohrJhgXCPYQKa7rEvaZ\njimTaersWnbceWedy739j1Ky9f/32vdh8n5dyDClakiobAQirjL7wccIl+q5aN88kbGZi12rc0Rn\nN+dM1MHiBcuW0tPWxcUnHsXENm0JddsKp5CD0nP0jNZj9skvf4KrvvmflMKI1py2itcsrBLKnbtH\nFDDWBEw9IUnK/QgyHMMrkW8rsH9HwPhTulHotm+JM2puQCkMmXTkwbitmsPissveSfSDkDjKmvwf\nyo4Z6xXYkioyU3bKsXUsZHSQ454LNab5zpvn0ZIfChdtRyQksmHcNjaOiYUoYYKLwkEpqZ+ZYhVS\nOPg5G3/vNupxBJsM50xaZeXiR+hdOZ84MYRoCgYGHCq1CjNO1Cx7Qcv+ONGuYfMb4rs5HMNGePwx\nx3C1gDCKmsVCqtWN3Hrr9/n2rT9lwVLtdrnvoSW4jiTY6wXKa2Pue1RbyN8o+dy5MKLUC8okdi1Y\nVqP+rwnlkqJU03vrRPcybvznY1G2zf33/g0Ar+z1AfY/pgD8x3ZtzKREJVWKS00hgmqR1U8HtHft\nz9nnmria/SK2dKlWYmIzFbbtEMcjCV50CUVIfqwe58B3EZkiiwXSGXJ7CBSebeOZ/ZomKShFaWCg\nid1ulmHbgewqNetTwOE7+NMfzD2ikpB6GJJzJe95h3ZbTO8MyMkUaYtmqaFGmR+hhlJ3hVKoFDJT\nTzFJdfPTNCVOFZUoo2xqN9bCjFQ51JKU1FxjDj3yeD7ROYnVA481P0eoDKGGNsIWkwkmUs3WlaZJ\nU5mqOMQx1ViVYXBLVEiUJFux5qYNKssmJNdgRNEVQfJORt7Vz9ryPuPyAdK2m24GnVST6bJf5sBw\nR0numltsfkYzv0op4iTBC3waJUw8BDlvJLYtUUZpl4sltqQZAyph7hLtduicfiBRPSWsbEA4qeY1\nR1/3RCZJkoTIpJcrKXgxrFKrrcMxfrdkS8rIUQGZ2rWAVNZb4+TCQRTMeAZJgL8GRiUOvtQp6460\nSSoDJHmXzDO1KLs7CTzJ7WGRLuCCaTptvDzQz99dfinFo6YhN+t5yzkg9nTJebIZN7j+769i+sNz\nKM2eT4eJmTxeLiOGIWO2lURCasbTcWw83yWsVBid1/P+0kETCfYG2/VRhuRsTJAjyhKeixOOPGEq\nD3+mG4D2KQEXX3Yu+UKOvU1geGO9xEtRnSRLEFI/S1NFrVJkfZCj13BCT/9kN8edMESR29wLjX8b\n1YEBWwZI6dLWti8Hd+h+eo5HsqFAuRZhjzCHAwJ/jM8o38d1PYSpjNO/cCH14jKkpEkOpRTY0iUJ\nq6i6Xn9XfexyhONz223bB/N2JsJxcGgcJDoQ77g+vq8/Owgmcc3VEaufW89v5swB4FvfvIlnlpXI\nBTZ7twYcfcrZAMx/9CmeL32WqgqR3Xot9cURK1ekhIlCOvqQu+SkBzhg2pGUywOMfk4XRj7pstlc\nccX7+Nwh27cxd8iZVCNFZYXOcM1USlveJSqvQVV1rsbxp0ziiBPP4Yav3cI9P/8JAG0Tuhjf+iRt\nLS1aEb+gCa+yMR5CpKwVNp4hY7NdiYrKnFbsp1w2yYHKJkljqpUKkQEj/LE+7T+pfOm8ids9+9Gf\n8fNv/A/QxIUN0adyw0o+oE0vrL2cjBcTUJmHa5T+KMfFkzZp5vK8ORED1yEJBFGUkTTqD2ZacSul\nUS628Ud7Tkp7IeCQrnbax+pNEXjabyyEaHKkOI6rI8lSNTOkbFuiPVJavLxOKhBCkWQZfjCOsFHO\nSSn8QoEMw12MPtgyERMnEW2tGiGjEocwTQhJ6cyNpcNwepSLFYpxSBSFbDb99As5Dujsol6vN33v\nURQSx1lTib+edFVTRKwYZ9AKeaHwsOlB0mJ4KbyST1wWBF4nXkvjNtSCtEE5DkLk6BhpuE9sycFd\nHXy6ow17f1OubD+XVGj+4kZ9y3eckGe/M9/HQ/MXc9KxugboV1oKiHTnqBc757Ex1QeW7wjGt7ex\nCcWWRuEEV7KxVEJkHqMNwsuN60iVcVBXO0ne5pdXnKvfy4NTO49m7oolrO3T2aD4HnEYkqR18r5R\n2klKa66AUBmFcfpNjzijh2PPmtxsV0dnB6ADo1mWgbCRxn9t2x6u53P1Zz/EDV/+hh4jL8fdd9/N\n40/10jJOK/I9BxXtYzzcXItWTK5GW3zz+q/y9VqFSr1M2rS0U5A+Qa7Axedqn/QPv3k9QUfX6873\ncImTodiO62o6XiEE0twyXMcnHwgKRx3OZ03G4u8efITv3TyTl7IaufEuDz+iYbZ33bmUSEhCEVOP\ndXKOnbm4DQZRsxzffcnloIRGTBmK4qOPnk5Ly/D9PySTz/sk+e4nWPIbjWDz03Wce+4JdBZyBMbI\nOqy7C1ulPDZ/Nnd+T3Py+25A0NZFV0crkw88gIO6HwagbVUXbt7DdRxSqfWMQhGW17Dg0ElMmaKh\nnqeeeCJLFi3F9zwGVmtFLpy3mHtk9uzZf46P+ZNIzwF6A/RMcsmyFImPLYcCbRolktFm8OCur/Gu\nYT0lMvmgaZbxTP8GDmgbg5Q23kg9zDkvpiXvk8/ljRIGW0p9w5AOUprceKS+8kr9M9AMfDakGusP\ny3ujKLS2UouSJrlTmsVUwxjX95qBSCEEowo+a5KYrFG9O3Wo1iM8fxQqUwSB3kCV9GVsW2HbgjRs\nwJIyCjmfarmGyhpB1oQ4TrHlEC51cHAQy9px1Y3jPJ80TZloLFhXOri2jWfbkOirr22n+AVHw9ca\nGaa2AimwR7QihU32ZYNvjSLSNKG1fV8wty7Hz4GElCMwlDXYnmR8+z4U8jkKEzRXhX3qsZSznWON\npQth3fC2VGMC30UGDpZsVP2xGZ3voJ7EjHjpafYcdwzCEdipwJUuwlWcc8nJ+s3SFBLB/McriEj/\n/33b8qyp1ogj1eSgsNOEvWwHUGwp6EOo0D6K7oPbdTob8MkrTYUelWrXgvSQhis6TRJyuYB3v/sC\nPv/5L+r1kW8hF8DMrwpyN2ql/V2pmNyRx3YLVOs1PLSieOCHM7nmC5/jh/fcw0BFP8sQ+K7PSYcd\nzre/oQuEnP++91NN5WvO9bZie14TJJACnq3JqRrQQo04ENi2RJhC3B+4+GJ6lxZ5fNWDlCsl0kwH\n6c56l8+YfMBLe2RUNxmjpF7HtfPEsaJa0YpcAB2dnZx55tlM6dG3nv072pkzdh6XfGT7XMC9pM2+\nXYcSXKpvFGH/QmoCzjnhOE6ZoY3N/JhRzF+2mIlTTsI1ilWlMb7vMm/uHObNuovWFn0It/VMptCx\nD5P2+z5TevTBGwQ+teIybNtuUrL+9t5fMOvXd9PW3sbK3jVm3BU7U8+7mhH5R8lfk9L+U8mqtX8A\nMcNu+auWbMO8t7oJu+XPJBvXb3qrm/Dnd4/8pYvjyOZ31w1wbZ/G2aYMXC+Kkqb13dqWJ1ORKfXV\ngPGlrCqv513ndukrYAMwD6bi2JCFIYRgT3sktrQZJV3zzGGcFAgphgqwqW3PV+OTdySpAiWcJmWp\nIqIeVqFUAeOTbsv7rKuG1JWgbvz+Lg4qe0XXA3RlE/udZAndPQdQCG3C+U8BGqucJAlRVGtWX8+P\nDxghfZ57dudlu4aLa9vNazEAtkQ5Dpnr4pmkqGBsAdfzsF2HrEHqZO+B67j4x/vYts1J52uuj79Z\n3ke/l6O9q5u4bqCNXkBGSj0RqGGWdJaltLS24Ius+XszOLQjUUOZn3GSEEYpoS2GsbBlCCGJ4piv\nbX6Ja0sV0jRjXCFPHMc4ts0o4/LJsgySjCNPmcrobt0n14UxhYCXA59a1bBDJgmvyAJSZEjjkpt6\n2ETGHu03eWUWLdIuAvWqIo4ikA7uaO0qk45LotoIk4TEWLVhWMd2HKZMnYJt1lexbzWJlLi+i0gT\nUlOTe8aRU3n4jm8x96G5PPqknve9x+/DtPY2nqr0cvLpOhDpthRw413jm2nI/XMXvP6LdiLjJuzA\nAf0HyL/9178P+82ijx2vVylt7VbL6fiK7Z3CC5W1fOueVdzUqefyrIPaWLtpCx+4aguZ2cNBPiBX\naCWolKj0limVtKWufI9iucjcXxQ5/mR965p04CS6Wjw2VNZx+OxJAPQufpxnX7iO/SZ0NUnbgvxY\nNu3kfNittLeRhl/Z81yCwMWxXSyjtF/NMtI0RWUZedfwANuCJE0RMmtmoEopuMneg3/x7KEcCGBQ\ngSX04pBN8LbElrZ5ZkispG0Io0QzmUIgGW67e66uRJwJRZQkpJnTzKJSwkZ6Pq7rNPuTpRn9/XWk\na5MzmW1CgqdApNr3HhrmQ+E55HI+q/ueb1bj8fOjSdMNOI7NUKVXXWljV69rrV1dIGQzyBWMzuH4\nPtIb2VSGwfEBjm0jbJtEHmQGVGDbdpNQp8UE8w4+8CDqcUaYDWXHZmlMSkqinCa5krAlGVAoFPAa\nLHlKkb1Gck2axKhmcduUWj0CqZCiweQoSdKUcq1OsV5m7qYloODJegst+QKFfJ4RJj38xThklOcS\npwnSHCQygTGtOfYVHvWa7vuWOGYPKRjlBYw1rHTpyTOaZewA7rtHFxseovt1EY5BueRbaGmvU6qF\nuIZ+VKUZju3R1trajD2kUQ1p+7iei5uEeMa9ErUUmB4cSa6zh/OuuRqAk39yG1EM9XIfoTLl7KSD\nv+s5VX81IqUHWcxezUK1eUY6B0FcZ36fTrg58YKPcNv9V6DSHze3gRAutXpd51KIDDfQe+boo09C\nCpg76x5ax2s/+kthDdXiMkaldJfXAfDgPfey/77v4eRTj0OYw7qj64N856aZO2yn1Uh6ebPElCnb\nLbtlt+yW3fIHyuDg4HZBgzddae+W3bJbdstu+dPJnyUQuVt2y27ZLbvlTyO7lfZu2S27Zbf8Fcmb\nqrQty3qHZVkrLctabVnW1W/mZ73VYllWv2VZT1mW9aRlWQvMszGWZc2yLGuVZVkPmApAf7ViWdat\nlmWttyxr6bBnO+2jZVnXWJa1xrKsZyzLOv2tafUfLzvp9/WWZQ1YlrXYfL1j2N/+6vttWVabZVkP\nWZa13LKspy3L+ox5/rad7x30+Srz/C9rrgcHB9+UL/SB0IsuR7YnsATofrM+763+QnOyjNnm2deB\nvzc/Xw3801vdzj+yj8cCU4Glr9dHoAd4Eo1Q6jBrwXqr+/An7Pf1wOd38NoD3w79BlqAqeZnD1gF\ndL+d5/s1+vwXNddvpqU9A1gzODj4/ODg4CvAT4B3vomf91aLxfY3l3eiq/pgvp//Z23Rn1gGBwfn\nAtuiR3fWx/OAnwwODmaDg4P9wBr0mvirk530G/Scbyvv5G3Q78HBwfLg4OAS83MEPAO08Tae7530\n2TCg/+XM9ZuptPcB1g77fYChAXg7yiDwoGVZCy3L+qh5tlV1H2CXqvv8lUlhJ33cdv5f4O03/5+2\nLGuJZVn/PcxN8Lbrt2VZHeibxnx2vqbfVv0e1ufHzaO/mLneHYj808kxg4OD04CzgCstyzqON1jd\n569c/n/oI8B/Ap2Dg4NTgTLwjbe4PW+KWJblAT8DPmusz7f9mt5Bn/+i5vrNVNovAO3Dfm8zz96W\nMjg4uM583wDchb4mrbcsazzA61X3+SuWnfXxBWDfYa97W83/4ODghkHj2ARuYeha/Lbpt2VZEq28\nfjA4ONgocvK2nu8d9fkvba7fTKW9EOiyLGs/y7Js4L0M5xZ9G4llWa45nbEsayRwOvA0f0B1n78i\nsdjav7ezPt4NvNeyLNuyrAlAF/DGCSjeetmq30ZhNeRCYJn5+e3U7+8BKwYHB28e9uztPt/b9fkv\nbq7f5GjsO9AR2DXAP7zV0eE3sZ8T0OiYJ9HK+h/M8xy6kN4qYBYQvNVt/SP7+SOgBLwMFNEVjMbs\nrI/ANeiI+v9r345tGIShIIDeXukZlAmYhIKGaZBShIIGKqRw6L0JfPr2Fba8Jhn+vf6bc49Jln3u\nU353va/JneSTZDvs63k/z6d7uj33ReZHzdo3doAiHiIBiihtgCJKG6CI0gYoorQBiihtgCJKG6CI\n0gYo8gUxLtEnTiMbfAAAAABJRU5ErkJggg==\n", - "text/plain": [ - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "for i in range(20):\n", - " transformed_images[i] = transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))(transformed_images[i])\n", - " print(transformed_images[i].mean(),transformed_images[i].std(), \n", - " transformed_images[i].min(), transformed_images[i].max())\n", - "show(tutils.make_grid(transformed_images))" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Random Affine transform" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": {}, - "outputs": [], - "source": [ - "from PIL import Image" - ] - }, - { - "cell_type": "code", - "execution_count": 12, - "metadata": {}, - "outputs": [ - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAgAAAAIACAAAAADRE4smAAEAAElEQVR4nJz9+bd1W1IdBs4ZsfY+5zZf//o2exJIRCKQEBJIwgaEMciyQVJK5UaWyxJCHlWj6tf6C1zl8i9VNUYJJNkgIYEESFgCaoAtWbIlCzLpBGTmy/bl69uvvfees/deEbN+WPuc238v8RqZ77vnnN2uJlbEjBkR/EEAAIRN45kvnAJ48ojNgUkKAO3cT9tmpnZMyM+ePl+RvOxk2kXnnbr8xV87AjBrN673jJYlbV0mGxc7GrPb2723lr/yhpOg93ffckf/kf0y6PBLmTee6qTy6j0zW5aJJVfrwPt3R1m+WYstrtxf9nW4E3HlmtLLvewWOzYNgbqaYnGVB4ulaW85jnmQwPDItROde+crV55bEIlhXQDJMPcsZTmstfQUlDnUZd86Jv349CsfuAGe6G1D0rINQmr+fPloXNSKcPEAnLwPmLJTX29G7/iPCz8BOj9x5paXDvyJdtnT4dLv22ncHCAAbE9CWBnGHViMh1cCzCMQGqvhkJKAUV0MEzS+bWC5Hwqv1o1Rpyk7HVVqPWIkD6nENAgMgTEQNfYkqY6pDtqFr4RRJkCob/e7OT+zcLO+8sbTNKrPOPGKgpha8n4VDQgsI9OxfYX5r3tf+Mi1PDPCOtUX5O9v/FHykhV46jpqB3E7aGRAhDZDfuFVjMgAHraEgTNT7dRdt+v4orMe8qZBFyWAmQYZKCDNgxyidMPRYrVUxuRUXYsaJfPeNdTxQRrWlQDX1saisNRK3VUm1+lgVq3ca8qxptl6KMG4cYRUTIAx0tdrXw/d9bsB1Vi//Pyi9acoPZav+ZNZGB0qTJTgSAACtdhfReeS1yIkTWF2QsoR9z/zwUfy9HjrhBD9fa9/oJyToqJAgTK0gT0xA4kEIZM2gmAeifkRsk1Gk9jmBm1z5smnnjeONoMuHMj2YuBlw8xLBQsA2EbwiJZSOAiayDaffPW+W18CIwGhikoZQmsbq62qm1KiUjCahWqtzLgPQEmgGHPpMaR8BaofKmjLWElZqxZaSWWqESj1iLBY6/DuIzDOMyAfG95Y3Ayj9ZykJCmBokhpicMkWEwmpJ2Tc1x9Lh7X9lup/S2xybzf7/oHCgUmT+78AiQCJCQ/c8kmTLXZACiYAMwTz2Z50OYzKG6UB9vuGZIJQJu5qUumbNvZcOn638y1i39tc7hdIJVFAgwk1WWM3c7zz3+m9ykMVidZJijhbohcKamJoiTIY42oGRGeApgQkBNiHMfMrgqKCZP58nC1A44pHYlgRcmOd+qCMSrLA96YF4AoPhWv8KboUlaE01LGFEgRCxwNpdDSLVVZzvQ+MXw+n9yOiQBY22f5v0P8A0BB2zFP38eUQMLgZ1caoe19mnTQ5kmO/0m4gKag2Klf5pmxPf3cM886DYJ+6rSTh7CN58WNloKYaRSYiXAzOCCCNA5xsHv986+5Y6ydWAMhMYz1AYSYkAxIsCQMA5GqIeYEGQOgxtIPU6zbnEBNSkL1iBhkqAIDVulRPRGTDOsVHptfW1T3zJdf6a6K9I7VgSSVs1QVd+yBI9DWj6ptN925L6x+bnzeTvRnEoBBfLhUvKyV8wuJW7XkghV4crgvFThFopoU1gVL/KGntymhk6v4gge8vG32mo1wBAo355Asw9gd3bz9pceQE0VGIMk2ZURECApQbRPJ0ZGYQkxIzaSJqAMslIIBUJoLq4wpagYyCSSVKLXCtB5FQq/117drJLunX37p/TvsCFNsVi4lEIR2eEDSDbOEF9ubbNeNXhzf35/szd/3vn+ylTPdB5lB6WxK6EWXngXG5TOO2U50iOdECOZFSjYrKE8vZsVGiuS8++T5xd62zZMz9+Q0M2B7Y0JJbWcMAeRilQsxNJBCpLG99gSAkWQC8xaUQFI1JKTkCnlmNyXAMUAEAEghO6xpNoZU56kWqOPEmkNkyWS81O1ofiRh75kXX3rfotIsA8z2nBs1IfudAy4AWMAQ6iTT8a4LEK/EB3pSm368XA3/Klo5+wU3egAfttIefrP3epTZuGsb19nfuDFsLr27wLO71uV3bHoS53lrZUSnVekorAHmsLlTUgLkG/WxLcowqm3PIVE0USMIhoDMeQdOTSFlDWpetATzUAiNSSiBo5fev3Mstnef+crLz+wKLItRgNRkDkAxbRcP0gCnIibEej/PLHK+lh9a6rSR/Psa/xNr98wEUJuUDtjlFvx8u0tFtPwi3Oj4XMkoUGmMC5a3hQAHIkHX+QMknn/bM/iDTn4gZQZRNCflnY1OTKPESIeIAJMC4J6Z22tGNAOtKatZiGAJsSlsEtAQJ2QAUjWoQoAj5YkUpTb+4IPXnt8iOq7r9aVXP2AUF1hbU5ohGZuGvYN1OmQJdHjn3UduGtHsplmP8Lf1/t25H4/f86tvrY8BbCfAvGG2jjXk2QvyVI+2OXs5lqPL8Rs0pWeDIVxwmLIhWuAl0+ihognz4t0eHNHUmdZ3dHgu+kOjahKAVCCATYtCAtzgFkK0UbF2vhkhhWZDpl3QKcCiDZxMSVPToCokNGU6acTb3bMABZESbg2vv/pUR6hvD6LZdmn72yInOIyO1NWju3aNbGrpZgT41vTR/fq/T+0Die0kbxvmVomfrW/LzBNjQ538QDMz6NS2dKadPv38/a0JL7tQcClFY4o8BYKcaJcZANsL5AkzSdags7bTAu7rydiBNQHKZlEugGamZGsgjUYjSaPI0rmqDAmlkALMzKwrBGKcagSbxi5zQDlNU6QiMyOUyqrX37b5/VPiU4+++VZIsL5DE+bzuJAA97ohUnQ3LJ9a3rubNEMez/y0e7/3Ln9/y37baNr2EArQrFAAcLqCTnqeMDTmbXn7YYM9zUb+uYewWeqdRA7nO1MCTVsZcFEzQg0lvUB9JJC6WPDMBqSC1An4PJIAGJwnm3WDTU1xL8CYbs1gpAEJ36gfNpvYbcwcACgjTuFyACpNYBFmlMmAwgDAFEDMcFdSkV/prs8ICCU+WV/rH0kHrasxLwSqGXTUnh8ZJpqL5bE79/RYqdasMgKCQfde+PAtQe0Ov682d29rpb3KVrFHkslzCJzOfNj+fk4b08PUkdnEO2/+nz6IeflvuMzw2UAtsNOGombEYYYm4d0yD/dl6wRUWTxy9siIANLYTMgTG8msQvL86zbLjfMG2joyN98D4kZdEYXxy1+zq/lOwuLZeKm7hqS7QmduBS19hVDnndRfrw90q5ut2Y3W6uvPfeiR7dP+ftqpzisAFQ1SIhT0Bva0HfOiRSqYNBsd0hlx3KRLzgDS2SdTzArGhZv/5gL1IY8+y8iLpk+2HcnY7ND5chtPAK2aqI5iZ0WdojoQfoOmpCBByFkXQ4ApiJoHMzevzs1V51FvWHjCTqg9818kwEzn8ZdHX/5InwCb/bt45itHe3Tz6FSB7bSb3UP9tOpBMkyLx945wI3lVk2ZD1l/bnwSeYFG/PtpsxJIcV691PG4n5O186w++cVZqPhh+h9n2/xhjVsA56L2MKVHTak8+UCU1LzW3KJBYtffSGgioH5BClbl2MybGZzQLK0BSAkmkDE/gdJR25s0RFp5YkrzxD/bXZIAUO6/9D7jxnzK3fcxigRwkbn1i2yET3YRXbPL0D/67kE+shDBjYpLBddf4JNnsfoLehTH4mzGRk6s7SLKCVS4hPafh4gUE+KEB5m4CMunKy724lluZAeVtFOaxvbsttE/7I0u+dFY4UKeoBAoVW8/7WpouSAY2WFfiCBIIomU1aROjNlZudYute2YVLMRCM1yJzNnoTn7MLItHhokWPOnJ+3tnSfAWamm7aao6jDHeuZNAClrHpayP65N5oCH3dDqnUeXCVJoT54G5hfGZ88hOee7pemslGUA9MYcmBWBYoKO1Xmd6ly/SM8/3TmnB8OQmj0UFw7SCV1SPIfmbJ4gQdpDJvZFSiApSed+o7JWqRkCMgFO66tgY6UIm23RC2kRm85okoHNZMCsIJkKjj0jYswjHuGi6mxaKdh2fRNBSK/0j0QTs2SbH5mGLIvVBgva3DxpS63Qzrflo28fvXOrB0jYLFYExIv1+f7sc5/rGAMAqamYSNiJES+JU461U+r5RboY/fTInPpEBEBepuSdmE/NX3T+qCZ4/WH4ti50EloqAYmn6QcKXvFMF6DaQaTc1wDGdEJUg75t1nk2j9S2RGwUuPk/bYugSUC0HTNBJBFlY7JaZ0H4RuoDSLFpKMwU31nuNCTA0FDKJCjZcop2h82FiMSSK4Ggm+zW7Qe40bMEDMgN2KaX4/2LhwEjxtoeJEmRCIkex0NbbMblZgl2oXXWZlECyEv2B4ENAWmexEv1ku2eeImIALHBJi59o4t+teY+FMXTm1KCS5rT6DRKCbpNgAZRwd1id9cO9gulZSDZCQjKJKYnmZaWmHcANsh9fovNnwQUJhIzOCL6diYBBgI+SwDqTtcjmiYAGGRBI6wo2tY8w4cGAVponTCzTPdH7CAfXcyeoWM997X4YA+/tL9yY/YZqQDIJKzpt0aomILME37fi2bT1ujDJTrcDKS1RaFLDkIzH9/LCLzY+Nj+fsm38wTnWfkQ1jf7jDQXNxCoHhBAud71b64Kc/d6HXo8UOzfTE42rgz9OBEcAflO620Fw5DtCWdwsRkQzZgiRXCWGyexs/afdniu3nrKE1sPlTzG4rTsbMjZwNBmmSB3uDJrHdfdyqN3by3mSx7v2m9MH9y/nF+n+f9k47219bPBHYAiePv2hJbYXPnHN1E0kPTYvT+7q0/fdta+tvrFydbAv0yKly3+LR51elfR5gI5X0fnaagE/ASgeeLtg962UYrOoBu7qLJpTQGFWA2GNJeKVZKLxQTrY0lfdEfDjtdK7VxXaCc7mO4cmPlqDCxJeE8J44BU85zIWGeyQsDahkKlbeARhZnx6PajrnlSSsiupgrplkN7W5vFsASi1xAMOkP+yLsPdGvZ1pDNjBqQd1748PU0XbZyHVAasYH/XLEd2lDB8V637UxgM4PnI3n6t80Rtt3UNT/uZrZf8ChtQr0HaHFeOBx/3tKIH+qmOn05pXfONOSW1MoOaaxhQBRxDIPcazgTMpdVxkgiNWZGUujCorg5vU51z8wp3xV47bpPnb1zv47iIHVFysd2lJlHa4PmmUr3UIP7RQPFu921rcuTgBaqmpzqY/Rm4W27NeE7GGcHI8otrd69ua/c9osApN377EduXDj+J/qVG/pY2+a3nVlOy9tZN59NatCaAmyzZDgevXmGnjw1fFYzLwHrZ3fMpQ+aFM4akDxFC9D8zzn8ScBFhieRIkmbjKGmIWQpg8gqAlbEAYR5bylMADoQCDM0b8AkC+8TsZOWHdTtkDlKRQCKpG6oET4Zgw6vy0d2yZxeq4acUJdFiscXQsR4O0CNxWlxu9uFQM7k1bRaLJ22mFEFZBOTAolccigwd2iyW+8eRO638bMN6Cg/fOGDj1y4d7evwrzZro1lxcY0AABXyROLvNGKtCEvbS/QjKtL3b8A7D3wgxNb1kOOIVM4BfXrBJkQmwl4wW10oVYpCLnxPAKkEWqTeiCgxRI4IqHSZYHEWkqGupBIxIKyNC2K0DvcS8SYRSmTi7QFbRc51bZOSIJ7XdJiGECGRPfMft8KIw+EtLWKG7q96xoNG8cCpdWdx3cnZ9lbByWYNYYAyRSxxKpD0mTALTu4i51jLUukIFt/Lh7HJQiJsEVQZ5fYjGISCZTc2nU6KVfagdp+YcLDBo+8YN8/9RRbqXZ29W6bGRQ4Q0Q9FhiN0NGM2XNPkhdSj9oLsNEBA2Y0mGkiMAJUcdgASxaQQyfEroU0M33M2tZRAm4VSwPGgXUxIczIXBQRqk6vPmZ2NHBJGuIoYGzOu4yrVs3CDgVTLt0suuvuqNY0fZFyvbL+kAP0LiUZiNy6SolcagRdMgvesvt5awlsOflEEhg+V58oFw/QBtaktX262S4CLASYHZvAjcmiE9Y/jRu79DLfL90uc9uebmZGXOriBaAU2sVObUrbRwGsbf86+SycwfY4qwgrgUbRkxEb5VN0JoBBgAo4TJTRBS4rKTcQqkYzBQA5rUsLFadxvUJBrBFeJC5IsE7raaAyVaAoC0ijhjBLMb0XuYfOwWFFc9CcQr9r3PGY/ekS8+pzB69lAmLvNAkygMqcX3unVCHkZsVv7B+9c3hiOBJGJOsXXks77hSb/3HC3Mzc0LBsm1EhmMnMvJzcOQXMBuj2Sic06ksEPI+1s8uGdnvkQy1AyR+CAVxKEt2ySM9838Zbs7Db/Ed0TbAYAKSn13CkLzzAKeV9W/kksVp7pwmwhTP74m6aVmaWIfQUfImFNNyviwCEYkBc6ZG0GsxMwYpSi6U71K0H0Q/NCjKv0A0LzSQkQLJn+Ub/5FRgjhTSbKOHNcec7R7UFiQg+S0c3LWdLVzU5LlMX6rv24rBDYYJtr0JGz/qsUi1pqxtiOfafHd6TBqEgNzIo3PNUY1CCvYe/JR8CPzTnohnuW+nnyW3qOb5M4Fz86YpFHQDQhAym+u7ZBJ1ck8v9HU1gAWaZDZ5qTb1mUQJ0SB5uhngIBpi41PDdqLvEa4wD0MN9UDYwoypae1kRNIDuUsFshwQiOx6Af0yemoZgx2Pkz09vlpuVYesq3VjStmsjwll/3DdSVZS4CN2/46upKw5ITU7CfXl6UOdKEBWrYSg2shwrZNQzYFGmZ8Jf7qAFHpq/R//cenA6Myh59vW5HlY4wVr+Nwhv6822w4b9addgGrq8BSEeLC3s04DnSOsijCmUYpCqQNZa6+uoVtkGSsITI29YQsjmZERRdkibHzHSJuO1GBeA6g9E4D1gQVGFAO0uwgV5R1e2ehNovpnp1e66wK8X8fWG79FNsL2NHYGWUnkdd5N7TjOan0v148sEjM6Nb/zcRSHeDzhjtv5CXCe/mewzEu27gAaAsS8xAM4T4ANDfTimURvXIE462o4dZ0zj7dteVH8YdtgQxQsYGkGEOq6VTqHtNDwYFdrAmHpNjGRC3PBqjePmTw1ZodEKaVz1Xu1UyRrR9GWKh3uHa2nYmNtkql3MTPWQK2TWET1HV3h90dAozkg7nLZQ9P99GVs+2NcfODzL5Ur2SFtMWwwr41mTom7yITc0oDruAPub2DzbbPX4sM7ACAobI6sOYZqDAICfopQcXZcm055St4TpzkAZ04wJA2NyHRioE4ds4khukyScAMCXjpBNkqqzjmoEucVyw1Jgkn6CT5HikZQGJOM5TN9DgBs4SmvQjqg8MqGICCySIucxCQQNZGmIMwp6wyaAmlQ1moA1Bsk5NTIKWZA7BQDZWvQavYFmd2uUeJB5tvjcVcZuqfzSytUsJSOZ1geAlD2MYmAO+k3b4137m0NL0mNyehvfeZwuxaUeYFX/Wxf2blbUaCd+K7Fd16u588TgFs33Pkx5NZVdtk1OJOxL7MRNkGm58daF02AWStARovma8AGkELDgNYCc7FAnQzy0qNKFJdMESEAKctVWlmU4Y2pdA5kLRaU0joHSkdoyEQqw1TywWj7EIGDyqQIc4JXDJBNR0BWmAHc7czAeMA8emfrECANV5+ZXpxUQe8XOmd0UdzzGhGkOfPalfXtw4zY9EG2ue13Pntn2xmZed5HcG4CnLlNW/CphmRra/1fJpjNZvsqjqm4x6rDZiJpuwYvvIZvufjn39sI4/b7Mz+bmxnP91Z7ASoT8ytTRtCBBSugwSlnWJ0ApCcsR4Q6V9jmYVhYWYv5tYWPcupwdEMo4SJ7SyrGVXpRsGi4m6U3KoYHyIyIJKB+n2ayo8EyszhqxZUCiKt1Tjy4jc0+C7ndfOreV6pC9M7NcHqwBPRXGFKklVJw/Wq8e2/mn9CcUmZKdv8zd2yG8d0u2JTP9tVZTHUTGDRv/+9FOOS8D53U4XTm5zPfXXSVh0FMpzek089P8mKZMUN/WWUbw6GFgME0ETE6ZR04JAHnBHdj9hjNvYpgxwnmAfQo+5QVGgNpHkkj0pYgplSiWJ2qkY+UXQNzxvAAK0AuYcjQ3RgV0TtzXOzQAXuwSlB37h3HRsjKk0/eeVU1BO52AnQK2yBkV6ymQNK9u3G13jvcRNHDbPYg2Ooz7/g8Fl8NPnM2/rgxKGfr8uGOeRgbsUOAzscRA1v+8SWW+uk7X2wiKhswd5Fu0IKqH/qSzIRoLVSVAt0U5FSt8TTGLKm+M9ZkZCcCrHDYOmPs5LJeMRXfWxCj92ljEB0Bd7gqI0yooNlOGXa7SFjUFALq6Kp7jpDpiGRlZ+p8tyOL1vc8ANO73V5qw1lOe7q+2T+J6iZrBLZTqLjAncNpkaJZqlzF/dv1xrYzGj1FYUcv1McMIqCgzguB08r6qe77/fJLeQaVO/vzlhN9OQmYZw4915p6c/HkeNgTC6QmDLHxITSjih6YFz4FrU0AFQFI6GmIEiCRcs8KWQE7d8pRwzgdVaBA3nUk87C6WQ1aT4+ySCN5qJz1XeViV6WUxf2JZO0IqL9ubuLhYAmJ8da6cUBalojFM9dfexeAVHYu4Psru31MM0EN3fX96d69swOgUevPvZ4z/eCiDjr9zWUT4GGhPcctA4iHjO5GF9sEAZ1T4mch5X45B7BFIl3ATmOGuzck+Nx7KmlgxLhh+jcZGvAJyHUI7N20BsC+dAVpQl8YMhFmYF9cCkeg9D1jqtM0Md0qSwpe3M2zpooizKjYWdDN61GAkTJ3TcuegRzuJTCpc0D9DmER932Wj8PbwcZXcHPUnWd3vnKHEXTvynneg8H3ParAYg7eujrdX8/vK6pFOhs5fu6lcU6wc4FcPd3Vx/AxeXKDfY9sE1vBe+EtsDHEtr9sdLjtQG1lg5nhREDm+etIuijW0Cgycz713AUaP6vG0LaBdnOlOq8gJieSiZwM9KKUI2VEVV+TYsBpq4nwrOk93XJ9oLQFJDd3W0BCchKiRjoJLEhAdZxpzTRxn1TkvSMXBy8AsO9iot7LKpgQOnxXM+UHCdYrz/lLDyBBXX8e3JDQ72pdIQBu5fqt617rjBo1mpkVJuJLX3kor/pkP578q/X1me8vPm3z+2wvXHbhzVPw9NfbC+R2AlzmaTBk5vkcATRrlm7OnMtz91dC03Rfx2KHTBVWIAdCsuKRhKyjEVWwgjKz7diR4VQbVSOMbuxJCB4sZUkqx+y6LkYzwlR25ND0QEYl5Ixa+hTMxhAaI0XdFYjBe/CcVRPcOzADkDVSYFx9H15cs1bBuuadO/3esP0upgiALtvrxqnpgfNamVnZ8ZUvVHuvLZ0nB2QGVJrD7sTIXdIyzIyyiwyNzeUaecvslNDZ/NEMNJ/jbc0ZF00BmpJmdn6DUOTsiGwwxXl4gEbVWLcYn5nuMEu2XEsBAusA4K6MMAQNhE2N/IaMKsEZ7LrO6xhW6BbJYs2CcOQECQl3GsoSkayHUNZIK2TbFKRD0AOFUCx3vaTxNmsAMplZvX49QWu7mTOvPzG+NDAB6zuTTpPnmYlunxVJ8+JWzLpuw/XisZ+cePWLE+Ly7RmYUdzt+pUyMIfFPuSk7dE0A2iXrH9IJ4hbFxyjDdpN6TjNxbl2uYtAAklregbPPkbzkESM00kOvOClBjlNlNKkAQCLt3A2dYTk1cRS1SEEeImEkUytxqwRqc5p6s2MhlUIg0QjY2nFDFM1KF0kxH030larTJtKVwBcM5lzWGkOLCJz9/lHdgXjHChAPvH4/ZeDAq2/wMMvqtuJgUbRDKWrR7HxxvJ4PIVXP72yS3fW1kVnkgA1JcBwSfTt5iif54eqqIcYAQ17n5/m1BU2/xZVsyp3h3TejGwEkYc4EBVQC+0+9xgtJDdWz+3WRtwWTJZG1gSmSsg7sxGAemUpilTxgGeAzAlSGakF5CQd9DFkXGeaKayXmVZR0nJMOVW5JIw2RFMA3Cz7XSIDhxFZp84A7OzjKGh3gEzBFEB9bNceW87kG0VCfPLmO69CWdO7U6k2CIgmaLkbNUHRSC8ac/P+xq3nNu3tzzx4WIpGseU2IDcckbPtcvifDSl4yNXnC1w4dhsp1ZLIvVds40MMyAZAXD4/4OXJrz39mFYigQEADNRIgFYJBMSFJCpphcyppsDOvJRdF5arO6VZPe6kd17c6ooFtWYpQPqymHRwqCq1uI3c6VOh9Z30EigQdHWRi97qXW8BsEnKn6N2H/cNSxhkds89+uZrlATr55AlApuwS4LRX8kxBFiBsLBaI46Pm7sn7Z1P33uvrRxouz7PyVBcjL0ACjlhFnI+xEcEZDZG29nzN5a/I+BNP7j4KjnHwV7YZgSQ0GX7XObIq+98dGfjoRLEAGTAkAA6cpoIK05XhMK81kCkW2GAPiZRRBaZmbFDVwJpLqcVc1GTlAOIQubuDoFcHTXc283APVLkeoQ4GBHgDm2peDBhEluMQty8lcTVW5ktSAlO5OKpvdfeQSjULRxtrcxvMffIYj/WVYA5nWUTkIL55/Z3+L3fu624ULk6bibNK+lce1iSj/lOp62vjQvuWK248NQtOLA1NS/equZd8fJnpyntEh4KSVvV/ftX9o8vTtBtDGo0EyBNDSWynATBMyVPsXhlR4dkDmoxRekwpmSZUVxQ7w7WFVpAuQHUjkFWarR40UKF7dMMXKV1it7JutiryMS9Fgfa0k88VwTErZvHb2rM3eeWL90tGSkvFLeqoDDzldnvs4YkKzA3TYJSW/i27Rayw0+/w8sTubS7pbYhrpu2sZsvO0fmsyw6jetoo4zy1OezZ8++uW0QLy9BAWYN87JfNxPALzFDaT51+kraJrMSKPScjHVwSgaNAWTXIYtGwc0oTJAZGOv1xOgKkbqypA13JmWOWQoN1lNmQzJVJ7AHsiwAi5yYUqaknHYLJfCuTIMMNOxDEqZVNwk0yjx2nmhg/mP7mxlgBujaM/byEQVY37VQyzmiKrMlK8u+TLVmwoxAZwFkHuvTpAjJh8+/lQ83Baz1tJ30tLZZdkGfO9uwZAi2jbLatg2HeON15wXm2zyyRtrG8tOF3lyaMmW84FcAULTf4iLAw8yQNqG63bZJZrQWOUjPIGIkaB24pqAMKjNrOgWyEoY1I60CPRNd3JNj1I4jM1CcQum7KbCeEiEYaVgszTAdHIGQ4E74FQqJB2OhoqOAfgdQ6H4i2RQ+6KlrbXqWx7pIYc6+ZLz1TP3Sispk57OPlsqZgCkR6lWHRKYZDa4MkZFb74gBQOLws69fyufF5rizovthilf7sU3g8/3exqZh9Bfi0O0y1sTf9sEu4nM0Je/yudvogeYXTg+SShTLDk9PoGFOTEVXwqYQaVSMoLDokzGRWKTIlJdOGBJTwLqEyvWbDq5LgZvAQmS3MDiGdU0BopnljoHqVxWAICPV7wkA74pEJYVcLJk1690yJjFHajy52QWXjy6Alo8KNOixp4avjHWa4ItejelHOxEUJb+5Ox5BImlGq2vOwVfHFFgoMX7+5feQANww0Dchg811d5FSECekseCUnbIzZvY52/q80D/YXjYyzMSt8D4nSlIzw+HyuetWL45kNjYbi2Q3LQ98WVvyKBjkFoLGNFXJGQT7HvQCsRREqqaZBcs0VYqdAOzsKacVIm2q9J6R3gFWD8yoADpQXBTWGI9SGS2nbC57uinvV3FAzxR3PUEOE0YkjCqs129uaT1XbzkVSYGKFB5/4uCVJk66Fr93qodALztXsQ6EipkV58HdO+Mx6J40IwnPL71S85y03jY7Xqhb9fxyD9tmhryHA/9hqhu5dSFefpVL1NJTd9H2yHP3JwDEFFDB4ZOx/VotFuLImgEVtTkMAIWCFORV6rhSKahCMRFeV2brYWxJZHsCttub2zQJUtALcup2GcB6EGYRLu2Tqjo6AGPVd+4qV6ICvB9jQGiZBZ7ePS7BcPNacpOTRpQ/cfPua1SkbNE1ZONMLB539hoGTkBa5O23bud5yJTxuc/XyzPK2cPCMc83hyLivWtA2KWJvuZ9IhJz0p2HX+iyHxQqRF5s45CA1odeqhbvLgmAyaTRNSI1RESIGANpPqPE6YpMVRbz5BQdgU4h9eO9QYM5jaNUJJQlGRpTVGbCHOpdihiqQErGkr6ESXZXnQccmdzrEsh6hBFBkwy581RspWDmI9d0QvlS/9zVN98whWjdmRDOxllM211GZgadZtjZz/t352RH0VAmABnUy1+adJk1eCJd/DEb6GLzTQTn0OX3gBf4EAbAvNE8NFXYcTtFidkiSO1p56SbZy8ymyKioVvH3p2RYQBkSFiRmJMZaOCYDpZCIJJe2gbmpctMlWkiekCOu2G+mgibpigOsCzca6wr5FOiELJ9gMhVWJ1ll/ZKENRBV1BLocyXVQw/GuZ8hpTXx68cL1mqe3y9nmnsEqXlM8Pru9egpBsmnozWFE2AuIO1eYtbCNzwt2/nzaIZ2sMm4Rb0Uv3wZaVhTnD/LiPezQc62q5v5pfrd5shuiDF99xS3pxOAPhwSSKcnEjHa8BsE+u0TSlw4qQUgahr72p49HHzymRAYxS7JnidnKAjB4J0BjKislOCsL7rVtUwRcBcoB3eqahHIzQNYtcDXDrAcU1SNVWI7JYUM9YkUmGGmvuMWnWUS2T1YrSyECke2Jiax8afO5n4H9k9tWhUXjOAiqvPPoqBESA7Pw2qbWbAcpGWAs3dcOURPLhbCbpxDiqjGUi89pnpEnH6UH/BqYGLBv1Ey8V0ifA+RhQvn0yxha4vdxdx9vOdYIKcMAkytOGrnpEAbCRRW6wHWJ26oLizUiYcCBYlNVZmKGUBGBSZqIR5G5Sy5AjFsEYa082GYDlaF7AOQq+kLZC1TmMqa4V5ZnQuBQ7GFrlvBvlOShrfFTCoKGrsFphyOmJNEilZ3Hz0dDfm7mMOgi15rVlcf/rKNEkSyuKUbrVVgWy3Gxr910hevVHv3o7znU6+8enDizXqrzrR6MwwmtW3S03L7TqdTcvzEPPFRNOzFt9WMdWJQy44+qw7miStrg8msDAoLd++3muOnmJBAkcVhBki2C6gVgpGEMDiGk1cgugMVE6lc40JZIi9pNIppXXUUCV7pNgr6xRH7T7mhty3hGBVQLVi01R2g4QdIVombgH18e7M8svr1wmoSTGa6NoROVWBOxazeAOOgyyFnb7WqYkN8fpN3b9dLxjq8s7v3b+QdGEXqWvHwaU4BtroppYJmarnhPfxSMcmjvdUO+02UJ6Bd05ZdCf8/7rAN62TNs0ZHEChnOr44G7kII2I2Lu3+2i2LcfcIuATCThUg6i2AJCZ4QRkcpYQIsYgeyBR06yOPZUKuNFhzozhSAwl5O7qlkKoTiCjBgDlkoB8PRqyOpWGkpmJw7bJW9LVP3VuKeTjN0Gju1mzom3RJ6gMdv3JoCkJrW4Sy46LUsjdHVdv5L270RCh2Zyeg0fv/c6diwotXebPP7fJ8+E/ty/PGXYzOHn6oPeAeL6KdiJ90WnlmKzr4QAsGt9Sp+yq4+U5KI4wDkKOc66DKQFzyDExrUfTc4Axt4HIqpksnY6qQlPGwgXvXIhxBC0TbqA6H2uN9ZCQTO5Qv6xS2kGAA3pD110zZ80pMAKA0hlPXg2efCeQskevwH1jTLOUnOZ8VPLF2Q6epeyeTZIA7wx25UbefXeyE8fMSrMffPqtC6xmu4hX3Tx+UCS2EIL5lqV+USGYVleDTF2cuXK7aWwUwId4nzJnC+tiXhv9Er47iboeAKhD8pCuqSeWr93LoFJZaUmNR/PRY7YNIJKieSQk0ccjJaeQrANglJc6RVZJ7A1AF1Ws0wQPtXoBPtap1lW2PMsF4E4HJVf3zRhOqCz3INhipVACCAj27MlupBE0R3l0oYyEAnOyeiCoDFm/OJV7rAX7SfS9PpIKGNz8yo14cDdafgPNPicCSFt/9q3z/K3LlfUT3QoASrs4tefmqDmfxfn957w35xI38gaR5uaCvy8pQVMMUwJkdIvseOC+mqDl9JFMgFNl4dTs4xSUFQAWhZm16QBJpBUXkDkKi5YthOA4tvSH3gkqrDHVcYRSlDvEPjKirgClZGTGviRxFaaxGoFaWCXpyKaWcJiYrh2rgFuKrIDdJ8uJ3gAI77KlZDPf6k+NSjx3py08Ukp5Mfj1m/HgThVsw5nY9u76s6+eW1OmC/W5ExUFZrg30x5quM8r24Cz1O+LuCYX239NwdxG8nx1mUfaHUhMU026AcWDhsUhwC5jGVxMoGXSIqUIgJHK2gzNSKCqOBqhMnIUkhAKIGR6r/VgZhFwkzuBqMPglpHJDsQSkVNEmEyRRmYpLURDQqhAYA9a4kicc6Gb4qllnngBAFCmDPuPAC0AFQDULaBMpJRYdNuldjKIW9ZzyswkDZlXr0/3bo9G6Ayd3sbPv5xndt+HpBqez87Npwuhd2z8zXNidV044LPr5/iXy4wIk2Y6F3WxpnC2lMFcRVAag+5VnkwoS905oEd248LffupzrZ6LRSUPpwJkmtW5GIhjYqvT6wBjqKiVVWkTskiGGDIRY2A5H6QcqkOJLJ2gTgn42Ai51kHa6arg9dCA0d2IspOgbNUPYJOmuLJFAclGYra5Z26O72x7iCakupGmCvM+U235n1yMZOdHLWuY3BDXefe+bhZ6tpx6nMF7Yfh8PO+6NDLobFe3sZqhtcu4wlvptZUlFwMOwEMzBM23nK1dbZPnX/ZYm9YyANRhTLPh9leOZodZyX6qXY5kLN99fKep3h2CvhIh3R8yQXZdy59LT6iWBEuKWUeY9fdfWZWA82hyKRLqDJmmyFprpKmqI0Emaks7lQmzFHciM20NZo0CEQtPgdOEaa4TbPWRa3nijeY8p+3dHrm+Ud5EAwq5IBocsMRM4DkxAURa2VNw5mFD167nnXeG2FCqtB0U6otfyFNb+TlP7GWuoLPQzzamcfsa29G9cE+JGWy8vJ4QzZiCpA0H4CITRRknLtD6P8Yx0vLt11c3LNliBeR+WCyoujj4whSiqTCAGJyQ9T62RAgUmsKXsghkXSvTJtkilOaUeY5hxpC5YJ0A1XWtQgi9UR1rIsfJTZBcUNkJUbkCMMLdgKXIwIScgKCA5BOnXi5k0IYr44/tt2hBUDKjyzuDQmG+UzYdf9xxmZLv5VoA6HTX1Rs8ul91DMgex0a89LnxpDC2k6zMrb//DFXzGIO+aGg1K2wPVdlOcrMfftjGVXjxcTyVJ5QCGVMmPW+/PNxYLH1TlN1zcQALeO31OibJ3b0SMRigfr9PQPBoC4ROgVONaTVBLCZ1vPrUDpIWoYIplX1JkFQkqhlqRWdHh7AGkILAJADUEgI4HqUwFCdVOgHgIdbNEjfE1cfmukobRe2kSF883gMRm0hdohKH4cpqfX+up9tNfYejtlvw1Vvx4O5wprMlIPXyC+sTSJ6dLO9utvX5nxQE2wV3alKcuPZcWuvCETvXLj1S2wgxAecLZ28e0cqJiufMrGOixNsvD4/vfv63ogAsAOh15yiLpSps8cF1iz6aIuvEVi40AIGNMEWXMhA1W3TElO4FboboS06SNKYtYBBqrVO4yWKKrpQiCwJ9RWPaG2jLKRA2HsGqOstxWFhCmuSjKorAMj61NydhZSuUx2PtiarLR2FzvGQbqup+pERI3jtPrFKyBYUxuz1OSQVK6WBXr+X9u7XBcluYToIZXvv8+ngHKWRL/MxtMZKzQw1q5tVdEDCwFQ5fzfA/POKgXW77EJddUHM2ckJUppI23T/sHlt//p3FR3bm5PjMrP0wdZkltLy/zHCwU9Cm2s7FNMfAKAPoIEE1SyqZ4xBkQSaqFWhKh0HslHCl0pghiSx0MxuI4hVJNdlR+qQqDwQGSZgvKwRbdbWCQMJi5+k4pWKdekNQ16a3pU1RYRli8ncOHg+pc2LMjRV4Qi+nyu5B7QiBLuma3b6PG91MKGwlplvYm71RP7q7Oa+0VEImWGo2z+IcmGNNw83zgWBbCZbvGU8I6BKC34kbtVl4su74uYsEZgGVFbUDpvvr/pHV528vP/rU7roKFDKdhB9dQ1147twdnr5DWVGyjNFJZi0i3B1ArWqFyhSt8GuFNbWcJQBbTc5QVbFMojKDqioaw7u09DApK6y2cg/KHkkxB1NGVwjvLS3BujiSXGRyev5mPbG+zzaCt6Y7G2096ElgfM2fqFk7b2SKVoYGaKUqJTLL/uHYt4Wc0B7u3sfNfssi3FYGEd/9zIevzNZUCVDwFmm08cAyNsTC5KaWJHAG4SclntpLHjqy878XaYDbq7QZTx0bRWeO2x6etIgK63x4d7V78/Bzd3Y/+sxOym3y5lHvxulqJarZuNfdefYOKGMo1yAspi4CgFmrp9Byk4WYCAhKy2JKK9Usgk6vQiFoVCKNFKLBpbQqp68hTpPRBSxSrm7FTqOKSbagIITlRCRqFvCxWZXhnObvbJM/Ph7OG20L3bDHhle6m6kwW3IKV26jcyh5ClTZuz91EM0Z4jXefqAby3lKtWzbs6P+nfjo1SZGiqAZn+fmYlv4wARabAbwbLm+Fsay/fO98pUDAC7kph2nspsngAwXJpSbgx8tBcWUpE9vH+7cHD5z++rXP7KHEVmnbJSsQH+0y+DUdxN23nCDu42BHCVlmGLLm1CSrGCOaU1HoOjFJAsYVROmKhYLWtYiWLrFCOsJmZJVDACSFi3zNFMcN6hG2kIAse4PaxIKs7z62Maouix9qrqnXjkytfkhEv64XnplcbWOXfGFppPjoTmkmVn2VmMHtZpS2NOdA9zq221m6udsX9399EeuE9hsATMVqF3Mt3qh1Mw3bRKIn3tKzRy/C3892XITsHCBpNDMI4aETXa8C0MCBbR6B6YMFRtvH+1ev/fF+1e/8YlSDyESXY1WU0kFWVre6VjeeV3qaWu4hkJN6FHVYPswEMacc0hGpjMStAYwEJFQWJV1RHoaUlJRTGE90JIyQuGtkDSgRQYMdQCjdh3FnkmBiaHVFHEMH9if2kAgLs2Ns3jslWi5YrPpO49Or375Q7vMqbMlp7mk8YkOFYgFV+GWYKeIcgW3D/XIxrd0svHeZz/ySLLVOrkc4NngMpe35mi72LI/ww1oL3zBvRpO1dSaTem8Sx4IAJiSgj4dHpQrh28OV55/rNdgi8I0jA+0OZhRuctpUaY+Huk//2hvQQ5rqKV2wAbanKMplQEiI2EheNG8y44yIKbsvFKN0RxJ1UBxKMuU4rL5EuAlAa8guA4ga89w9DKJQz/WEAFjLp59bx6OuP/E6ye0Zjqeildf+mAXSecicjaI52RC82urswfZz6iS7BruHtr1xQV3s6NPf+jpDeB94e0JUAakLhy1rUvnMvAX29oxnIMUDLhkn2hKrYwJnM8HzG3ywkxnTSbRrR6s+r0urz261zcx5FLdUDtMpLo8ONr3ST5af/hIGFWR05QKOLISoIOsNYpFgBHw3OSd7zgrG+t5cluhYKiCPDNcLdaIGWktTc6UcArFq0k+JjVYSVfpkkZOywcpkdFxeuLGvAMkXZepz4lrw1vGGRsXmPbk+s5Lz3ZKmi2GVvGQc5GHeUwsy+5qKqbqxSLyKu48yJuLi1bd0Qt60lCwHcVT+Mr8eDg3ul+tuY9jOJvnv5t/2T43sGX5nJ9Mm4MEUBmg2er+qn90yZbAEZapDYBhqZlv0K323lWZapfYfecDV4NMcEqTdWRNCOYCIIcLRMRcsCVz1gqrMIITZQEUSkQl4YApUwukLEUZwpQiCiw7BImYCFWnAUulA9nFILQiAfa8HzPdLqdGkI/WOyc6nLl4Nt72Z2ESew2a44TmQaFACepxhEJTw2f28+6B3fQTN5G1CCMbP5fPqZQmBY/RwJB81i6SfnY3zuNqDmwVKHmpwbZRJWcF4iL1f1MmStvjBBOoPJ0gaSOqETJiunu09/jRfbvBHLWRVjBQ7FfVQTWYd8GhmxIa+rU++FKXQa5BoSU2IH1TwtObDmAk1KLNmvFcgUIRWQET0wYhWa2grnPRM1QmUOwqsyYNUWDVCE0VmGDmQZ/g8lW3ShCSI688cSLLa5N/F3F2JXtsOPTNFBA89p6vb3ZPQrWw57p1fMqAY2uc6rVqyj3MKq/57ftxY4GTd2xim/Xz8ayNrabpMTpM1a1p14JoTz6UtVCo2W/RkgRfZgGazXJ7ngfn37CN/5zkA8JcFHOjC5yaK3AoA4bhzdfxaP7up++NR+sp0XIcABl1HI8OK1MzSKT0BwVJjV33iorXCgwgUzUiAZkyFWoRVTWZmWJmBK2jhFC0bKxR00qmVDNgUCommJs5kkKXzISimAIeysSQwGjWaGRMYLL1jLRxemx52rGiy5hZ6p/cObl0DPvPLl95C0II3RJSKmgAle0SRpHLnYxQiqQhrtzk6l49gRjlFryZPveV8u/+ztuNF7WxAvzY4XqBftqwqY1/kRvZc2EjQIuNbL/gYietP5y+1BnESWzEEtNwf1jcPPzMwZWvfWyWo0qqzqaEF67DRAY0YdpZqbNcDth7uxZUMocW08yarfKrmk0JqFX/bSSB7AiKkWkuyIbQwgKIOpdF9RB6gqhpACdKdTKK0VNIY5KRxdOwkEwcfaqz1qn+GZ3bVi/pw9y/9cWdbc+I4q148aXF9WRkKSXUFMDmQZ/dvgJ2eOSEDIAj93TnAW51JzTyje1g+mL5M9/+27/xuspcET4BcZs99PiU7eM1bXd2X/BSFHgGnhwyZJPnJ1WM09U9GxQibHSZM74nNv4jBHXTu8P+/t1PP7j1B24WtYR8SBitK05Ako+SCRwWu+N6l9Fj7BiLFdwCbWsGEBWQewVSVFGi5ObRmKInwFo3U1JEEcUgZRY0DVlKCpjQYDfmaAbAywhjSYFVbgl3IB21NEdRGqYnr9Xjl5tf9jLNqt46eHeBTf4HEbo1vfRSt5dE+nI9OObYGFNuBlVAH2MtFNArRlzFnUPcLLA8q2dR5fDad33rZ3/ty1OxVsxwa1qcGfnNGTh9hYsbT1URexiVrE3ZLcn5ggPbQ4XLONwdl/3t19ePfugRaRAI82KdGcCsCjUUFwBsQTlURNWlTZ1nKQJjmhVesb0em/bZaupiY9aDkrrYhNRlqOtiloxkEjGqdDnPjo6AajMCvQyG5CDG5A7KHRTq1N8XgDCjnuo2HouvwoOSz+Aet6X+BPoT06svfmABVeMCFfNTbxcjRcj2bIWu9X2XusbbD+LGzgV3Y9mvq+W3fuMXfu2z66IUMVd+nSfAucibk1k9Lgfs56XcigCeY5KcFHjcxKPNHy6+XMJg0/2hL++8Od36yK0S2S1ZnG4h1ZrRjCUa6HUmJ4GGjCSyH8oumINsPXZQyBQN3CIywyFiU7oXCtG8IYTp7sGRkTRQaMnHU5wqOoRzEjPnQPf09FQ6wmMUM4wAS5JU2Dhk2+dj/4ljYt9XAQfY0/mAx1AJ5E+u33nl/SVZjYvUDMjO+0RuAuGXuUYxBGicbD/vrNy6E27UmP8u/+wbH62H/nVf89Jv/fZtlm3kwWmX3DE953jkNv871zamCS4XbGdPeMhvM8iJ9YPUvh9cf2pn6fDemVLNQRVIedK2uROGbuPUsPDDqxR3ql8ZLVI6EgVBk2a0kS1OQ0SmC8wN+pqtbgBTsBo0KBk51ye2SdbTiwTIiww5RV8glEmOUrNoFEWWko7sx8UKc57CeHqvbvvpq2gqT4wjN+5PCFo8F3f655FK484qyJBt+nsjCFLLWmcPjgpwze480I3l8Va+dQfv/v/2P/6sr/X+933bb/72G/LG/m6pRo7d89u5uoVpWtHzi95hzh1i3GYLf1jjeY1/8wsaGC0nh/vjcs+5fMIFGTVmNOGX9ITJkGBkKHLVREwC9MHevrKo6YVmbHGhUqaxpiVLigWCMeiaXaZSBIxZyxRhUoYzjT0QfQWyWIAa1fUmKAygpcWYoKHSEjJMBKoVQkVyoELrkHkYsdzkhIBtEqQ+FFtR7D71cnhsCvVS2nvfF94sT7U4ncVKxJxRA3PWHkOC2F3PVaJEOK7oztHGM7TpXQAod//EO//m1z724X4dj/x73/5bv/7KZIY8af03zbXtfqefDBdFmgBzwKvSLq0As7k0pXxoKbGWFmV9f9y7Xocru4jI0FzAe5MJg1A0jreZStev6kwXgIFaXRtr6YelW1TTkYDacg+zmTOCPKHGyWIrpQ5mABUUXAZFFktYTGlAiD4NLZHkpCRdgqakZctFDmYVx9o7QUsoberqJDDlmB5/ZLMsctOlD6dJMPYff6NiuwSp3H/2i691j1FI9DHITxrrbYESKLtH01y2Him7gttHuLk829flv37++751+OxvfPjrr47rnT/5hz77q18+6roTVuM8AebhOCu2LgzzmzeLvCD3wJlX48OLyYuCY7qz2r1193P5eEvzwy1dSIpsBTppO8Xpgqps3JYfIbS498TisHaHfVcGKIdZpWz54pIb0IM5peSWMiWaz6YpFklEhUGwnOSOgNkqrEtZDDJ5kZgh7wRgMkoRiMkKIU92kk/93WQbQj5dxuPtlRQuSqZ3qheAW6vbLa/93Ct5c3z5le6mkOSSKzuhrc9mhUD53sHYO2ZZald19wgnZEBr5aNv/Y2f/c7v3Pnszz3z8Vvjg/JNH/vyr33m0Fl5jPBtVYdziQEuZoAzaEpeUsn05KvNFe/OyhFCM6RLrg9rf/32F8ZbT1/ZJiaXBFSRYcXdrZhUhRqCZJbw5im3Re7eV9FqN2CosGFqWm0GRBqUkoiEGC1Vo6NOLoQa7aJLiYJMQisyCUtiVNclOSUTTjAjRDDJpImjqAkG0qFwTsj1DALk3pP12Ahri+s9AmBoikfHw3mqz3DA4/XVF+16BYOLHFndtPFtzdu1KO4eTY2DY57VrvP2imf9AqV/5tF3f+Z/+Nbv+4Yv/8Ktb3yKa/vIR179jX/7NnxmkXx1etypsWsuwvda/pgNsLP5kLf6J2UcHgy7/e3X1489f8Vq07UrQBrL0sxYmFW5bjnyZnCiW89Lage5Wt6bFjbllLQQpzRJYjQNnXPsgRrKtHHNRyHAcWpzEKiVLjEqMou5oU6xMEFVAIsBGFOdJTyRDQVKuZAsCVXTclWDBjrj0b3xxAR4zy6a+3Px5GtH3IQ4Sgx7cnzjxQ/tZEXhLlfbbNjSjHU3fHH3aPJW+JAEruHOYdxanDLdyl++9+D+3Vde+K0PfOcffvt/XnzDR3ylJ//0t/3mb78mL/nVaHGnn9dTCXAbPvher0hdwv9UAdf3pt39d78yPf2xaxk1k050CxY3g6HmqLEKYmqTSg2AUFvWi8m73XXh7Zs+2t64jEqMtMwxGQCyE0GlGA1MC/aQ1D4hqhJEENOQrsmYlUnzcJsmFVCqlSgmsg4yt6QPabCoplUubOYukOlrGK06E8/ktrxem3nvkWvDQsGS/eOvTqWlH2hrpjwzvvvSBxcKFetrEg3mmbfrue/py3VMBU2bidzT3dWdm93JG5S7N57f7Uq9++a7r7/va175rU9+w/sF3fieP/LZT744mqmlqfwqhn7zTmesw0v1m/n7SxBQZzx75wtT3732Vr31/HUb4bYs3tOYymlIBYRsm3VTgtHSM6aGVgAQrDudx+7BDeOqO4xaycMoiIme0Ox8kBQTCKWppa2NYIZUAKlDtpyJyTSEqWWbXVcvVZzCILolp0lAyqagyEEa1z1FMKDs0mJq2irr9UdO7ADCud3vbGuIlQx7j7xVDTN6JQrLZ+udl95vyKTtHtVjZ4I2CJ4Ald3VOFO8afJruH1fj/QbeB5A+dt91+9evfrIraeXw3TrG+x/sa/lIablt37j53/thaMOCF6o6l3Ytv6IDYicF/BIN6/PtnZPgr6bd3bvHnSvHz6Kg6tP7u56dqVzCjFlzcimTAHWzWqKclImJDf0tjMepIPZXbHsp6vviB4+HuwJuSYYaRkSStccyWxBXgkQE6mwGgZlbXlkE1FhQTDTkQhQq9rlAK/NM5DMIdQzhSkMxQcozRwwy2RiunJU00005PvLsYKt43e+fAKEtyLzuj69k6Tgmknc+8+O73bPOSI7W2SYbfyvFEzbXWAZU3TGxurIK7jflVDZJhwqt4S8/3Zk9bLY5fd835v5zi66dRW+9mte+tTv3SkmbUzQ92zZckDyOJD8IkNwJoG2BzwjIiSY5SIivjg8cpiPLTuAQK4jWxW12c8NSKrICCpRYH1xd4ZCHOe8uSEs18vmWry6ciInxoZ/2lTaGmjOQLXyl2ANEEFFCCxWwSnYUuNnqJRKaZKOWmEYg4PIdfUOCVRIlEljMlGLJ4CErRrs6HX/qd+XTrXxFABgeWR4sGH/CUjo2jMvvdk/lZDgO+vYUnTPaHm7q9E195vsSvHxRHpHFpkRS1okNPqHj259dPUg0t180Ps++Ppv/O6bUahzUcUXtxlfOyExLlz/JyfFKaqp2MgTXFeOuXxFt2oCUjYp70DbYZSITMmNfelRSguryghIdGYCzFVxJnzYr8LOuFoqAmKtPo95BeuYLAEgCCQVJRoBrwaQbi3uHghRMmXJNK1Hi6EANETh1OcwkbkSXS6xBob7DrjZSJhrZxgJiF7jib16cTdc0o7JWFR5sh6x1Qtvcj5v1Zdf7W7CgrDl0ZZHe+q6VNnNCZ0oGCy5zLTjQG+VgTS6Bd0YH37mzhXb2RkeaP2pj35kdz089v1/9Hd/4+Uws4ApzssrP1fMV0qeILxc9mpt/yNPSEKBMrfpDiZbUh791d0pAWfZ+DnFUMDM1JViPYFUZh1PZo1nX6YZvZItpt3aH03d9Ac+W7kaW5h7JEhLyJVq8YhI41wCSiUCDWLKgE01LSAqSriFqg8KR6QjTW4IG9aGA4cJ1QwZvsays6R5zbRh515Yi97pnjoemFlhu3gLoKWM0dgeG2dq/9grtdVqbts8+UR9+Sv9taDcbPdoIzG0CTMmFUaV/QeTmUDACNeYUbbeqPIf3ju4d7iehoCIDy8f3ILUP1L1db/zWx/8A9fHo6t/4ptf+PUXxs5T8DP+gKbvbUDojazK95za2po3p99aMqv3By2WriBz6VvET3OVn9LTulIIhnKKFuU/HzVrnzRrKpEtwMXqqmUZi3D9rW4MMoeYHaiacyNki2s2oQqYEq3wH5zMCWj1eZliEjYA40pQlQcN5hXSg6Sq4C0kf0qVHY40CsXlOQ2cQbSbj21iNN9r8W9LBOhYBuTVJ17fLnMBlD05vPXSB/aQAe92VxsNEZytW6JIRL93GD7PNUKuCc72meWZr1sUxXTw4N6DBwfvX8VeklKxDz3x7mf/wdN/8PE8tI9/7MVP/u6hm2Yj8+TDB0BTC0GbY1tbVr5Tb5OnLF7JTKfwjxbZIuP0YCi78Ll+7wIeUBDMUgpKMaOrhoaqVhaawDFQRSRrKOpBphHqGDQoqnvuvDD1WMtRp0alM1Mip5AiiiSaJQDVlhTEwpQ+VlqkmJTCAB+jWo5pAsXM7PMQjBUBs+rVmRonywkKqiDTHN1QCUoF+UzZsMHnje8yFVCbnaKR9DcI4I3hnU2hUQCC+ufqna98YNmqwnvO/iYoW9T6PAtywYOhNL+AKzwDYXP/l/97v9zdf+T6leuPfWhZ+3s7ZTJQ4IPf/vi3PHjrF6990/u0xkc++Mpv/tt3jJa5CRzWduG3CYsz9t9DGnHGs0DBBEM+GPtFjn3X0uStS38E9664FwORMYYyExKhTfiqCYJSEUCkBQv31wJkFm2Lqt10uMz1ntaZmKqHGr8sN+GFVTQC2ZISKAEamIwMU61t80mKyMjMSoXIMEOONZCCxGTA5FqRqi0b+gSZ1bKKlvIQu0/Uzah/FR21XTInVtOj4307ngFgLp6t915+nwMh21lXHmdZ1vGpQr/3wLx5+GSCxTY9UbmWceetL2So6/Gf/Lsv3Jw9v/6Vn1//kc+s/vjtf16+6SOLIZ999tt+5zdfoSVa2IhOJAzWPEmPy5adsRvPuhAsN1Roti43Ehmrdb97G/sHpTmZjHr91vWF0ZFrzIl82t43bz2tYLZa+jwvZgVQuq3HBGRjKXVALYOT4+IoMbqYqgIZrBTmZaIsUFjjEs4e9uZnRCLTNQHGan0VEBOYgDEJMqKZjckEXbIhwKheGAAtUTmjaWbTM/t1M2exhfYvbVs9Krm1lbsnxuFEPJ6gK8996Z3u2YYD9qhKUi6cyK+SoNDtjdUJMlqNJFU1PKi4z0YVtXz+MPYaFTz1ov/ze9//z/7bP/YnD373V7/241eG6cZ3fevvferFdKQk20TwsEFZZ5WDh7eEnaAdyGCe9WjdX7vzytUnV3s7tWvSabj7nNcZiLCZLtw8vwpmK71mVszKTIALJRlpHmnIUJZwxFsfJlcCYiXT6CWTUKQLKk0UO7KRW9PMa2KmoPnMtFUCHZVSslkQbPMka4gNME6lWVJ1Mq90SVbRk1O3rgRgAJ85vagfXp3tZNjl9rxcPv7qNg9kkwQ3nn7p7e5JKsiyyErMbsbj0yUKSx6pAHCEC6wypGhRJmukXHp8zdO3e2+avt/9cim/ce8vPP/f/g/f+R3l03/3+W95fFr1f/SbPvepF9at8tY8T/yh7ryL28y/nj+Yi5wO18urB585eO7Rw8UcNQVO5cZe3capEUKrxhJOGd2WXSkJSylDCXHOtIxiSgrQWLo67U4TNF63VdbWOS0TrxwpsGzVV1UAEspMHM+YS+IkTCw1USHAPEIOFWb1DM5ElCACYk5ApmkCAVRL+gM6AGO9dWujH89a6MN6bhOUeXoCIK489jpmBa5p3vloffn18rhApS1XwTlg6ISqLkrsc0VvyRCiZc41pFQ+dDSuhkzC8L7y4Naseo7vvIuwL/3Yn/t//H/+8S//we/5xi/8o0f+4PNY6WNf9+Inf++e+8wub7TTraWqy1C/9hrZ4j5bD8xxQnTA9GAsNw4+t7ryDYtpt5vTCCRj6RXzTI5mvXVEV0rxpulkTjW1pZLmTJ0l+yFcaTmFgvKja6sYY1S1bSQbTXO2LehkZI6wIUVTc1acMo+WoUpAAVyUd2O2jFYgEgp3BWxKM1EVEGFA7E6Tm1gIPdmdLPF4IU/6uJ3Ih3rKrMeN4TZmYM7bPH98eP3V/oZClC+HutWuDDmr2gLFJQZvxmDh5AipQ4BfRqxX68P791eH3/30lz4wO5Um/ze/MDm59/0f/ul/fXf99d/74a+8cOWbPtwfaSff+M3ffFsdM9o8Ow4UeegE4Cbqf6PBsskwtziauLP64nT9md1h7AuQWYjk+rC//yw4CS08emFu7kKiKrJmM3U2Mc3HqlUKB+9wQRijcKXlgT93d70sR7e/0vlwdyhUUyG/yiR0J95BauHArZhoE+ItDYTSPbQRiBJkyylYcO1ggnXeW/Z/4krg+JHPX327kjb9I/r5DuX0+v227TREgMn40ruLD+0H0px1HXNiyLaRtQwkBEQcDV2LoVAVkOop8Yd3r1y/eWV/f6+vxtXzlUA6Dn7xu27/xMEivf/2b//lfzm9/uDr/9QHvvyGfcPX767Vlbd/+5OvyzdL76tqxtSJcuiERCMSXK/ZLY5e9mf2c4zSyQ7G/WJK5N0OeyqOUqwzMhWZLZ06j80PABA8iAyFWv2n9bgsjkBqMUF+96MH95e79750aBjuovnNtvmQaK2KrsygsC2IAijomZzTt7TXZVLibLds2nGGNxDIBm95818vRxe7RbH67LeGZkF4cRfZvOidyLwshy/A4dUHLaA/DQGDbP3FO1ffv6PJyTIOMevXgmgzPa711YOx9/asNUQRHfiDOVUIXfH/7I9/9tb1aEbZi//PZ/5T+4nXlun45u/71C9wfGfx3Hd9wxe/ePCBjz8yrbvF/U9/8ktTz5Zw+6HTYEMwPu36EmmkqIOxK0AMiz1NSTcxbi92CiHk7d3XrzzaF7hlzEXRQGtdszEIJCQiM6RQS3IxjG6wKgMrFq57V9/4MO5h+e4LJWw4dCntuKCBSLa0FeTWOdvkoFqmik3kExVEes5a1QnhN+d4mF91U1CdkEGCkb4spm95tuKhE2BDwba2PC7tVh6+PJKbmDyJ8vtfPLz2gUVzX4yrJomZQuMQzFkFRT2ofaPVREKQOpZrjSQfceX9h7iSSAPTvrBz90c/8SN///eWsl+/+2cf/cl8blh9cv8D14eX/sEz3/w4Vstv+8bP/+rnV73lCXPl4qdtryUZW4LE1ustl8bR4XLniB1sh5FZTMCqXu2jmXnivZtdjtvRhkFGb1z1BDKhVGXzB5sTTmpgb4JUON4tN1LFUO5f07THScqx71Npx4EqJ/859k0KtMYSZB5/5y0W8ngAG5TQVrzZnP9itoadrDBlZnF3Yv9EZtgLxzZP/EF/iPtNe4++kZs4HgBA7D/74oN3H/cqZZTSolXa+rBm6zazhftHYwdAZOGUxFj4l2a7Oj/+iXdXz9W2EIYf/9xOlO/91n/0r3sDn/iz/InX9xPXv+eZf339w1/+3LVv+SDXWMSXP/npu1aU2EaunBv8DU9pQ36ZQ08KAeaw5t7dezf2SRE5FSc01K7H1JLAv737xvOLTXdvnNJBRAQbDuBuBLI5TJmg1qs5YWcOB4eHNx6XdYdlxedvr/be/myFYtkC4rfp8VoKhk2Zve0QUJghBhrU4q5aSpxE42CSDfucFTLTsenVFBwi5WIYur2i+OjXxUzoeY+MerDZM/mQ9uZbJNUS+LRIvTfeWD5xTSGxYBjJFgFLgiKyKSYUcC+7RqGf08+WNQkjC9/XPbjePCpc371tsPoL7/6Zm7+UtLd/4vv/2t/93G69+4+/60/+9D/5U9/z5r/8Xz/+dctVfuiDr3/qt99pYVQX7wPc4sYNcWkLj24ihkPfX39xem6n6fzREdKkviCGZSO0W5ebhIVKRBsIA7sdc7oJuYkIER1krIcRpJIaDh4klkPtsJpi564KhsUT76LWrNYmYZZ5rH0r9bfQls8xQI0EMGtRVEKq2ZQGRluB2uQ+4wYYzeRcz4UZLoesMPuntljJ5QHhm157L5CIjw73GxzR4oUIPTrdOyr7NqYnS2TzCTSPgGbkTKK4fziVJqMMkJK/dHh0/956XC/+k1svvo8iYJr8C//wQRHy63/oCz89FHXLf/dbfv5fLeD2h7//l/72o9/9Let/e/SxP3Blnb3f/vVffz274yqix/sBm6ycZ8BMVg7SSLOc1rYYXr999bmSBjKqO5gtr+aUbgLs9uLOrf1qoYQZDCxezDxEZCpzm4icAlzjcCQCYuZqfRRy+c6VukhbXXnrg3m3Xnv7K4oY0szaZgLHTEmbAyu28S/bLX2WO5I30oCa6UvBJhgAU21WVhLZIGPM1d9kCngC2t21+tS3tOo875VMiyIQF6n/pw5bv3o0z5EkEQSmgx5dn5O5rI4xqxBNPIgzyk1Bh9XcGx2tQvyfdvZ33Djdvqqj5yYCMOXPf/zaj7+6TMtn//zwE28vo+O3/an/9Z+aW37oL3zl//ulG3/sO/vfffsD3/zYOPTlzmc/+WLOS/0kI8GYOI4027y30wjVVeyNb9y58tRukLnRqBTqAFut9s2S8tvKK04zdlZKK0gpTKp1A0Eem37UOKxrS4RUj46GMAvuX18eHe32vL/39hO7d4ZHD16IjLtjNYPvLOSRcid8Srk1ksRsTXIDX29U16Qx5v2iDbgKGiw+r3+2DGhUOIStIzdTzG5h47c8F5sRvmRQGyJBmZB4jwlAe/DK1IyTpClIKEbVRceacB/HRiDVhp0l2WwXSIdT5yYCWRP8U+77i+Xe1e9931duXmtKPe/+N/yPn//7v7MAefUHn/zxz+2o8CM/9MV/sFownvzz5b//9F38oe+7+YUXn/iWZ2LwnaMXfvXzQzkuaINZ2m8kwLyeEupcMIzr3CkvvXntyauqbY2JkGXSRQ0P+l24ILvrh+snr2TfAS27O5WSa5uRQEQz6LQexiQQQBytBrrg+1fLOFDaGaIb4vn7h7v9Z1eZdw4EotsvsJzUFaMOglrsO6CxloVgGmhYUNKUxWEnOA5hLgYhMI2z3NgoqXNGLwQ9j7OnW6jL3e/oj0VhXigGmqXswuUl144ngOzuq5NtRFTDysaQdrscSWtOT2z052b3EExRqqss3tSySfzPVFWj3vi/Pvri+62trP5TPw37/j/y8/+yM2Lne7/pZ35tAdMzn1j9+O2dxP4PPvtTvxuv2B/5zsVbr1z9pg/5mt304ic/c7+ziM0KOd4StdF5mF5KSnXI4rpz79EbNrQiFE0AZFPFxnW/U+WC7F73xvR13FwlrYXmz1KZoCRkSDmtYaYEI44OJtCx2N8r6wGGyj07Ghf12cO7Zf+l29CDd0zJbmksquoczJWo5Z4BdchdBzBNZLcksRq1U2R9JmgTkEPtegBFgGL2obSQTWzELRMJU2xCesFqNn30G8c2R847y7cTwKLt1l/FBACIN9/cTj8CUGLoBy37HGHGYZhFY4uYbMEaVAIBHGTfSKqZ/EuE0fLjn3jr6Nm0oCHLT//aQtMf//5/9U+iy+J/9Lv/xS+y0Pb/o8f/7hcW9PLd3/rzv9qt1jt/+PuGF7+sj31s50hLvfKbv/0uyZldiQ3kx5S1SeE9YBiO2JlrqsuSCs66CkXBEtRUu4UG86T8Ae+U903z6x6/eMuOrQwoEAoyVZRQjeEwkGaL6zsaJs14yDjFXnfVHoxX774EDm8EAC/uDQ4yixWJvR2Kw8hdozhMxF4RcghbEu4T3TASMY6LHvJFonGLMGVfRM+pFvdd1AdH3Q5DzSAU0sQJrvIdNybjJqvOxc0s4KpuofeqzAoA0Gu3MWOxLeWtDcFqS68p81xPQEO8Q4Ax4AIrASmP1BGyRnOCmPqAPbjZkItS33nZAP/nd/7cjZ8+KhX/8p0fuvGzQxf3/t73/Zf/8FOLnH7xnf/g0V9Y7OJzO9/+1BX/9G987TfuT/Hkc3/sNz/1VjhhiGOq16x7oJgbNK7Uj6urlp0rc8ao52lqAhXZF+Ww08Y6FjnHPgNAUlIi5zTPZvTMaWwJ0QAdHY5pRFnc2Ml1bb5vcG22OKo2dR51rwT7LgqhiK4EaC2+zEwAUVWQZFaKtBSRTkiRpWUNSLlDaoXvpbQMc5iZB4vv3IjxIMvVCF/XklZXNWSWPevj18JwIhrwwgFNKOF8r0ihebqkPbo65OxZTgCWJaOr49JTCiwxztWZyA0HiyJIZbd7FKVFkNi1Bet0dOPDQ70yJyTL69+zW912Pv03n/iRx0cL/t7ffv6/vLEGpn/6zz7x70eN/Dc/+Yf+827Q6lO/vHP3f/3gd975qV+5t8/Dq9/9137ofZiKm9nxmm3MqMWiM67vrXb61x9csVrDTMitL6G5EUhx0QmzNxiODoIiow6r1WoYhqFK7Prl3t7e/pUru12JoClF1qN3766S8CvPPNkdHYmSAEnFiylyVUV0e2BZwEhjjGNkADWrUuwFVbVYgSqGtxy6zVMaDljTQcw3mz6oVM0WEIbJoT6yjrzemzcvaTbEvaP4pGmmNT5sAqihiF9VpRyaFk8uuVE+JJDsmZ0PUUyC7fYNCFTL5d6KMQBKGcpuTk2b5ZfG9dHR0frbDg7fVwXAVP7Vc/aTr/fyuPaJx378hd0kbv35az/xpSVKfOyHfu8fVCee+AvT331rYXr+B1/9sSd+4PEvvXj9m9+Xa1usP/PJLx4Vj2TzVxAJFjcjp/u5p9ffeewpZoUZpDRqk9nAZlTDxOneXnEBGFbya1VNUMOdnJpqKVEw1fWYjfc0Ha4GGuVXrnbjWJ0xY1NKBO/nFR+f0Krsvv4WtXq7BC3bWNDBmrTuilvWAYtCcQzkYhfJcbSFpSMK3NZ0HNXSJ60D2bx1Y5aOxbweLa3s7+j+K7vPR63rwcTu3hoAug6x+8d29DA/0IlxxcPQ4s1BM8Bmd15t9Y1aPgPIcsjF5L0mkYwxz7icmXMeYGJYsRAE/+trV/av7O5df+PKzVaKnPlfr/7qtZ/4wiIciz/9jT/7qz2F3T/9tT/1m7uiPvCJ2z/57kK48h8+8/dfWBLXf+jGf/M7H/2er3vzhf7jH/GBff3Cr3/hbvRSCiKyNxQC9YGWeOvN8uzNKSFzAXHs8xY9c5ZV+aAs4YL4YOjv7+90i0IDlYbasmM28ugUq9kEHlZHE0t42btSxhoebBkqJUDD4bC8Ynjz+b1D27v3ciLvHqDMKSxRnUgY+/1imgb2JtpKhp0eifXkO4yiLCoZ9HGMroSVLo2aQMRgO2ml05EW7Be78ca9R56KGO+HpecdAEBfMH7k69Nm2/hyhH+b3cHfCwfYQo721lsBQjQlAIoxdFm7RVYZGOO0CcyYVcVmmwIi6gE6EvxTKcbT/7crX/xwu7jZl//WevcHP/pzn3IS9if/nX/+K2mw/jv+xD/7FaPx1p+7+uMvLuXd9/7Bf/y/7ZD9v/+Nf++Xjh77d/7o+DvDN3xsMXKxeu2l33qltrwp9K85eNMZD3IZb7/VP3kLWbPxQeS59bRkK93hzIjKRcCV6fdr98JHr1srbyvZXG9USa91mKgEE+PBoWiM/sZ+jkFpQ+MQIK5u13h8J7u3rz5+wN3pxTXt3oMFxyoBZlWiZLiy64hx7HrBYgJt34AYqi8sitLQKcRpzIWJxQmwQrYeuwXRdXXVufU7JV9ZP381c3XPWH11YAZkX2DfdmtTLAy4JBc+PeSIyxLGXTwPEK+/25ht2OS40iSwdBrTTTkGm5Y4WwttjVsK5OrIeqA8q/ThDz/+6tInA8DsvrReHP3k9/3Fm79soP3Kuz906x8OS02/cvs/uvmPRtftv/N9f+VnfmNR4+ff+jO3fqnY6ufe/otP/dLdn/zZP/Hdi9/99W/7+mmcrr//mz/9ya/kcky/qr0V/a3Dm/bS67vP3bLMSDqjQXZNJZn935vNrHTSrAYn9697QgqzRgpAIzauVtksyBwOjgJdYufKXsxRwdrYRqyrw0MD1kupP3icse52JqB3dj6tARA+xzqoinVqXT9FSyAaUdPbxt8K+qDWMuPIYPOmNYhDgkuZdjQuF4NjTILaRAGz3rxeTyzwLXp1us3I7XuhwNvRhwCUx46OthkhZh9iTQS6fpRgy2E6wSPfoGbt/0seTZ1KAtz5Gty/2bYfToefIxy/+O5/cOtnqkX57bt/8a/9xO1lLf/m3if+8k/e39Xhz3znX7zxPy7o//rOn33kZ9Y9/+W7P3jrHz16/xf/6R/7P9/99NeLwsQ/9M2//trXTJ98rVNZsRxeefGF/Y8+6gZEsgMYzma8bHirjV4+Ia3Ix+wAJKsvGxPdZ3RBEhR1CpgUzKODtcxp+3u7uZrhaEbbSjitDkdI5AjW5Z3RYljsPCB6q/2idMPUfI4wpSvATEM4IpPZIYiadICa3BkpayWTRSVRQmE16BQRE8JR13ZUr1YwBkgeEwDITXrKJwqbqJ6LYy0V5hXOr5ZkYZJk6p95cfLZuMg2FTF5jFxgTJh3UctxrOaxDmJKlOV6cqthce35o2l/du3X+qxkZr/6d77ur1wZGPzS39Bff37NWLzw/937r545VOh/+rnv+QQn+O/9zSf/6vW1+On/7vn//Oq1P/zhT730vufNleZa+4c/8uz7UMzL2vNa99mnvu0xJ3JMGueKF8iWAjkz57Lvkeg6zoQ+0OZkvhAUimlcrVdHhw/urQMUcnrw5ttrc3bXnnqsOzyKmSWWQAa83n37ziAZp/WDsDAMvdfYc8AXZmS/tzMbyBCUEVFDGTUiUmbKpOSOpDKRSWOVETBDiNEIN02A5QQgsx4dytdV6ykTXCchWGfafazBxalzRTVPrGhD2kNJYqeOnp39yL2nO0BoLy4CsGJdH5N3hMIXp5K0SVKmGmyNZR9pTzxy1d5/6/Zi0ZAL9Yff9aermP7C39z7r55bM/ydv/WFv/LxAShv/djbf/1jRwj+xn/30b+0d8Tyyo8d/PUPrmAv/xj+6hOHj/2hZ+50FKwjHYeJV99yoNFUn/gY05gTSukwBQuwSRTbagSbGWoazKjb1eeaKFOp0zgMq9VqvZqmMYfDKhYzk8bbb707FHJx8+mbeTiSSmAuOCxbv/3WnTWQMT54sDo4Er0/dGIoPcklMgnbvbqDhJnMrUZGEsqatSplKdUUkZkBhYzUJDlgqM2TSkQYBSLUM2rVtPb13dVqDSZz3fYrMh/bzbnzz+TdPbOk39MJfHICaIaU4sYj88DO+16rbpeHtTgleD+fwC0HUq04Hx3LRfDV0P13P/jq1UcqAXmWHyt/6d/+3EFJ19UffN9P/daCAL/7j//KrxSS/j3f/gv/rCvQI58of+f1Zaj/gY//3Kd62eL7vv5nf/3p/8tb+cFJ414Oxb6Uz3/yf7qSeOqL4Jv50ge6jrlGb0ANuKWS3CSJAgAqWpqHfHC031MQh9sLLp1eGD1pHushCCUsp4OjCUaWa/scR2veuFbNF9Thg6E6w2IYa4J8/7XoDoYPjCtdf+MeNd21XhQ9Yhgb+URkl7W4WMbJsLgCsQ5YmFkMZugMiGHq+0SpcjekMI1adkDPYdrXBHTj/b6YF0Dqh/tGwNhbfstTLR6EOE6ydn5IZ0bSw3i1x4N/gsVq+eq7YGMjyMNExOTj0bUdDgFjnWZ2YEP/t8g6ATCG8v/aXTzzbXX97GwslPtvvzH+xb/y428vEw/+3vf+pzf/RaHsl+/+6Rs/P3ao/+TdH7j581H4zt/+Mz/y93+3t/zH7/7ZR34ZHH/+7U9cfWPn6JYQUnR28Oaz9nayLhBFNnS7qVrNjTnahmZLUpA1X5AmGgnFqt4okzdNbu8lPrMoXUkRWo+TCKHE6uAofRG2s7uHIdA4yjMib9PR4VrmRBxOAaJf2nofsTiY6FX794DS1UbjMZado3GefskMJBQNEN34tTblTEQBNsVyc6NWIEKE6uQ1q/t4SHFDKxzasnaLa7ceTgWafUniV7v/n7qKyR6b7s5+kvlXKxPs9o29kpMVlJiLI25O3yiEomxRfkfDd/ypt5ab7KX28v2rn/vRv/jX/84XFtD4C/f//Rv/ZOpr+dU7n3jk791byv/V2//xrb+3LrH6B9/1n/6Tf7VM/fPbP/TIT0+9/tWdH3h7mpZJMaEyVov7BdEBqiyPilN4McSkpCFFp2UmHMh0ZMveH5G80tcGD6ti/eT1pCZKdZwz9uTq8EhyYvfa7jRu3IkA02RcHR0NoAHDMApgWXoddLNTx9We2bQoSXXRc8gCErbXT6MAuCKJsKhGMmCa0gghFOgsqRpmVhCTASWS2WitBqnlRI6kW6u35TkBcpoRTy62IeEXAUH00LYwr3+1SsC2iVo8MQwkROYctegx3nvl2tfsLJTJUtaVoGgzJa8l00dLPGvPPfc131FuX58fhnohtPPGj77x1/7QRNT8Fz/1R/7zK5Nl+fyP7v/1p9fM/vN/48YP31oVxS//4g/8BzVYfvtHn/9r1yfyd/+uxuwbqZC2shJDgboqw1j26jixM8Qk71qu5rk4zJbE3hmY46jdThmzXZj7Ow4aNR2tkwZaHrz79gHNytXHH12s1htYEG17Hd56887kbrl+cDgCtnNlGQcjxtGA7qBz1bIUrbf1UWQKUtpyf6/lHfCStcFMMFARaZqDTSBugAVsQtQIwEOwOpkxA3WF4sNMDxhjTn2UO4/HZuFdMoQP+e2SE04eL2r55LLViJkrEItdv1i89dJkCwfVSJI4VkBmb2qLFKzT1acfxJU5IGi8+6oQ/eGP/9p//N0VJfi7f/OJH35sjcnf+ltv/fBHD4n+jR978F997SoT/9vf/5b/ZBmlvPKj9f/03IDuK+sdLAS5u2noOncHfS1Hx7WiLKhxkhcwYEZDYlObHnQjoaF6R9ocke1UH1DG6nCVykhMd9969whF5dbTj9i4RjGAglIgdfjqa/fDkOPB/aNJ8J39fhwmszquE1quRY5amKlzmTmimoPgztUFSTM3ZjROToKQF0RGY3ZCEdkhhSSToLPVMHbLKmUAWckWwwbmADOINNy8NovmBmKdH84Q7Kty/21OOOMuFnD1kdYRPo+zbPnoM1feeGU0hzLYUZyVbrKxVmcdkmZ6/taD0mwApva++9rkWfTzv/TvfcKrhb7yN8e//rXrTnrw93/zL337KPnhj//Wf/FHK2Gf/tHHf+TRFbp7P/HFH/6mFXZvHnTeQqmFQ7cYO6GvsOjfXaFbQlOoFFMEC6iYE3XOPgoox+pLB9fRN7iSATCHw1FucNXbb9wZRHa3nroeh0G2SOG2IOu9N99YJYE8enBUQ93+jg+rkJnIVZUWdew6cNcB752Aur2WMy9t/9quAXI3S2Uqc5qmZislELCMmGDejO0WHZgRTggBNZfhSALmLfFYNcDI4vYkmzFwLDfOt7zUOrxkDpz+aLx5fRZURIt0VvfEk+W1N2vXAVC3aKAbWvFubDRSUpb24e7uzc2Fl791668+sTLQ/tnf+QP/xfWJstv/3Qv/x29fCzn+01/5D/+MKqlf+KUf+oEI+Gt/8/BHPrySjz/zP/+F7x36awcd2MCTetTZ0VCArlLQCl2fmFR6w6gsBsvcKG7JQgnIsS4WIIbbRxDASIbVw6HKlMBw5/V76tjtP/HktTwK41xsOJXw+u6rbx3KlOv799YyLvd3Y7UWzVIwPxJl/Uhq7F1UMXKx301HaQBIdstl19ZDKiqVtU416lSVEY2ZGrMK0mYtI0iDRE1uqFLbYAVmQutKwGgee49m+7P18MUT4KtMwbRtZ64i6PErLatFi2yGDP7k43j53eodAXm3XW3apP2VEkD5My8+Mwz7EwGSyn9x90f+yk99tlD+O/f+Dz/8d7+yIw4/d/sHb/6iufgv3v1zj/zUYUf+L7c/cfMfDl25/99//1/+2U8uqF9594eu/MbVO7sGKYuY4xU/Gvfki8lQlbsemGrvqVSaNaNXLRam6VpUclGAnA7KrtBwrfVeGUCl58HRYdLB/au7uW6etS3iz+HwYGiJYsdJhC896hoNGkDQMU6LyKvFzKblcjIV2FJjioTUckXsaDUkCKqEItNiZhxV85n8X9Aox6TErJtaLdFRlQypUPR0ICcgLb0gn+ibB0u6ZPBpX2UCprmdy74PgFaenFbNdGqlY8Xsnxpvv7S84gpBnWrOELAwMzGCAOwP/8D+a91yBor97aN7/+8v/+VvmxJhX/lbRz/8sTVB/I//4Dv+s34s4O/82KM/8thAdb/zN5774ZsDWX/2V/7890Wi//Uf/dD3j8NuYs7EdLCKe6N1BZWeeXNvwhSd0SVzB2oAVqyKYGGKyMq+AJpWvLmjJp/2+skJmA7ffPN+dlauPvZ4t1qJpDZJPg2Hb7zx/yftP6MsSY4zUfAzM/eIuPemqiytWmtCNERDg5zhECQHBEGAACEaQBOKUJyZd3bPeXv27e6PPfvnvbO7bxQVKDBvCNFQBEgogtAa6G5ooLXu0jLlFRHhbmb7I25mVXdXVxVm/UdlVVbcyMhwZW72ieUcCDZeGzaOOOhTU2c6g0kB2dBBMKdgqIhIAk1G2uFTDaZOTBTn+gwiopjrFqZE5rC0UaXoAmdNbi7wjjcEAOlsTXXvbJebtoN/RYs7LxjiXRT+o7sST3EzJ+3tKjYu6jZU9nLflvrhdUQBgKLgbtnvkkCAk7CDeDQZz+zyKeg5Pjqcaz7wzdf9PlwsnP4/7n37Sxp44jv+2+Xv2jZhDgf+Zvjvrh2ThYPvb993+YSJv/6R37hZEsoDf7dkbZHJikIbqlunVZBDleBtX731GMiyu3Egc53a8GrH0YA2EHJY3cZ5MZA7t81MaWCyteMnxizE87sXQ52mephmBLhj/ejh1UywPF6fJJVypmfjcSYCoEZwFsrKMHNqTcRzxe4U4AwzU4WIkKdGPUh/riKAEbxpMmvOTgY2EjCyggAk68AJQTvXIVZj8lbNDRGAAKZIGSCnSNg6p1Oq2HmQIJCLSgFOneLPPQKwsH2qmbiZJ7DBrmrtYINAICCcsYuYAhMYIGLlYrZYY2eAKT+kXlaf/div39JPbNR87Gt/9CpT1vLh9/OfXjEm59X/fvc7XzABipX/9tC7njsGFz97/6Xv2VJ7cWo00xRORs49nqRK1mEYmBJrlZJDIiErWAS58+adooid3M1CIIelHHrscGuTuwEB7eqhE7VFLrftWbRx22Fc4A53Yl05cnQcIrxdXR2pc2+uh1Hjm6SfTlOr7A9aZbgaA1pEEAux5aQdKMxoZrHXQtziYFAwCCKWc1LN2dCF1Za1w7ZNk07WknTMMWeCkasCUylp1qajdwloVzy/FvxZ4lYXMQLO83/uW+e7sK4DB8FBvrCnPH2kpejujrjJSDpj+QliN7emAmfHOJ98rBSTwXf/5op379bgoH+59UW39HJEOPWBR9/7vMYd+VNff8MfZDfgk9980+9mQ/HoX+ifXlpTsbWO0WFMbjThIi2xxIEAFPSIWAhuiZgleAt4YGg2dGVeV2MmgiWNkcCqeVxngJvJ0uETDUuotu2Z1VHLGyxHqDOnk0dOj5nI03A9gbg/W6VRDd5QqmERAnr9Au0wBXBeIyHlEgwRypnAIuxhZr5Q6bMRKYpeT8whgV2z5a5OYWpqTKau2d0DwbWrSyNlMJuBO7tIYhLh2ipXkiLQ/PZM513iiTdx1P9/Nueds9MxOiWFCduOvfHw0SYUnUoVb04NpikETfmkzAJslobZ81KhLYFm7v1zf++1tRAVP/zvl75754SMm499+42/ZSDnr97667fEFDJ99eP/5vWSUax94MH3PHtcbB/FTliXgPWiaE5WgTg7AY1JEFiGi4i3ziKsGUwddNjdIOKmTcOVwNsJRQKhOXFk6XQi5t7OHf1UW+hYWiCYBZ4cO7RUg1wnq2utuczMlu246eC62aa5odBfKLVORA0Bfko5tNYjIAqYmaEmc3MxTVSidOUk7s/2BACFQiyD4RxczTo2Zpun4rc21XdQY4ZaxyGEUyACWg2koB771v6FJ/fF1YCm9d7zjYBixxRM67Sp7bp9e3cUgGJz1SXeFP50589/8e6TYPdclGV7xVtfebm0xjPH/+qRd7wkGVDe+3563w0Nw+2fP/3bbyiVlX76/kvfvbUmkx/93XV/MtcS549/802/2V+YlASy4CAMOXqKsIKZHVpVDE+AEGlrTiKqYGKBdqpLQq650RBcU8qdKF+tk+XMRINduwepMSGfHhccsPUDh1YTEVm9PsrO1fyMjdaVmQA1MBPILMzOxnrkTIEnzlrmxEhtGZ0QJIop9+ZmaDLOIkxczs26EykPZgo4nCQQCFlbNU1m3gnAOLmRJgfD2FWJyXianWFxIUq1K7H3Iss2vxDGn1wvSqmiAzmf90rt7wzcqS8xoasAyN5t9ugysRDgoTxLFQUAwMz1zw42yyCjVlOFxZe87Zbnz7a533zoO699BbnnePxvHn7n82uAwvf++w3vWEjs8dG/8j+9tBaND/9l8b59NXP40sd/8xWY9Lyr/sJQisWgxiOVrLLdyRpi4aCZhNnbTADDnSkbJJJDa6r6lHLODvFG0F9ckEy9vbvKuu7sOs0JWZ1sePzQEAzKw/WxOg9mBz5pAfesZiRCZurV7CCvT4wBy4QMcBiFEExKAjiwxcFMpXWtIjAU/SqNp9YbcWYuQpgpqMOAlNScVM3dwQRMhanBmkGmZq7G7sYCM25SYUEsimyb16de3ElEhJ3knCWiJzfmCznwYG4HOh8n65QZhbzcPZMeXfEqksG5eDwLGu7czt142W6jlfX5fgZpTde99h0v35dJvvCpl7y5pwFx8onvvuHl6ozynr8s37e/Fo+nP/Doe589Zo+nP3D0PU+rwb0f/30v1dX0wAtaiHGc2eETJ27irFkiCRGNgULR0SZAbo7cIWCtbjxKyuoA0spx7oHNdszt3F2kNN3TvEM/6+kDR0YS4Xm0NlEPswtluzaZAg07vU6QFLNVHnenSOIyNi3IqzViIS4BiMRej+txMmKChwLDpeHG6zXE2RkBQK6d/LVmJ9XUOsEDHKpO5FAFgRqbhobO6hR8BDLygolYzqe1Py2E/MpKNedp2xd5GgOAGCB47l8+WHt0YoEdBBY4/Oy8E6/t3pIDUE0OrqA0Ctzo9t98+83P7NN3PnTln+xqyAWf/eRvvjFm9uL4Xx9/z7MamLQf/96bX5bVePSRH771Nxr33v0PbWkrI2MytayBa3N3zwSLlHJyYaGcutKOMUHIQGaQIHBvJ1QgtQYz5PUTYZEt1zq/f9azk9M0qQNmW3rseA0jNMNR6yHOzoZmmBDJHFAEdjhCb7aXGqNABEJVpNFwDHjZJKLkkYVZAtdNRzvhMjZrq4kDYJapUwwoewURyMA6SdaqJ3PN7h1qXxUCJ9ugQ6p2GiOmxHUjEino2uryIJyndz37Zmn5wu2iskVOOwY+BQd0qp7CPtgzs/zYmIsAB8WpQN/mGAhXPXNcBtLB4jc+9msvnefWGTlVz3zasV/c/4sTt7zn1nsq8/i902/5k4+crixMPvSKW7Z+Q9jxhZOv2fGpHBmfO/GqLZ8niVsUpbGrsYfVx3bQKmBF0SCOy0PFIkpxbZWJGGmDDNzp0xHg6+NBJCcmQjtZT1XIaEW89NSNaECdiWx9fTUFhrfQBKBXitXZpNshxAlk5LEMWk+BsBSD1o2BhouwmCd9SR5jIpCbscNREI1rIyYmNQN5ZicQYshN6vh/yTsZUHRRthMMLD41l+1qlxv0caaJl4BmT/TGp91+nt66mKPfZq37Iq4FACt3HZxsQj8J5Cy63Q+cDJcXVZ0CIUDp7GNHuDlXNE5zlg+t/vzR667fHZKyeEN79r7kwR996OVv+9SPgqO69/1vfN+tD0dn/OOJVy5+NgeNd6zc/M6PrUTEH6y+cfsn62JHTSHBHepVvbZXTqeYmpQptL6+QyKTt86RSTM5mChDkEEMJ11LC6IwYm+G40TB0ZSsI0SCbjBHmfP62tCpcLNsbuJlIWmcQ3B3UwZzDq5SFWhHG/6ZpadRIgDeGmuMo1nRVFQJ7iRJmQpJWTtjJXVHANypimMFXAZtk1mdxKE5ZCdzIlbxlDqCCgCnbORuAUriblknYAdzte/1L02PHiyfus5zPhmYaf9v2MCcW3b/XCOgv/1oO+XbTqG0xNvr48f7OwtxJ7DYWUEgwHZkRYq+jteOlLJ628c/9cu6im5AW/ee8/bXHXrgtS8jYusd/esj73p2S1D5xgdufPtMG1Dc95ez79tbA9U971981yJvHYXApD1RwyRGGrJaJHNSeBnJbWIchTS5gQMyCNmImUlzuSUYjLxeOb6qEsKgJ2m4Xm3ZyJA5wLb82NF1CozUtq157M8Vab2emnZlc2YnpZmF2E6yCDuk7Ek9GiU4i4SmZrZy6MLKJRHHEJjK0scTBZt5zt1+ABkszBSDHkDkxUzVrVHkltOG05SZdkwWTSBQ9imnDmxuGKXIzBSrN79kIk8rz8kEJr5gRLf5i3d4vgsPFe7yjb5lG3lX056KNZBh91Y8tmSxhMG5xOMMHard9cEDieeOn5w0UqUHPvexrx9CSQqxiV72OzvHv/0HPQ9UjD58+5tfDpD37n7/4vv21rDeyb86/t6n12Tl8feP3vX0ctTvDqsxYMiFj4NbUHMiLWeAXHMUopyJSDgpEbIGgeVaufDszJOV40tesAy2b5fVcW+hhG3ok1Nz6pEjY46i9ahVZ+lXXo8zM8hyoyQCdhT9ORmNlZjYQxnzaH2sziwMze3YmaomkZgWArCgDGk4MSHPZoqOXVXMblmgScoqEkCgalAxkXuowhRPk8w7dWnzbE5u07Qqg1nBGGsAM0nzaEu2c39+qu66GAagmwNEfBFkEd4QuKNti9K53m5+yMOe+fzoskkkB6bFgY1HOcjcrNfN1sFPf3kkVT0iy9X+6y6f1UwERzCrfvovp0kk5Ze+8if/1Aionr9536139ZyyvOIFn/lOIZTjH1zbOzC3L2E4MEZ525GnlR/3spnvPYTth4dpASlbYIFmIqciJyJkjUyueRooj4eNiknsV2KetMCG8ArYJutLOTjBU3ZjLwpKjXUGUkreEd+lKDh1CXuDRKpr24iHzInbrZcbvNkmedIrT7Rwb+t1Ch1IluHkxBKrykFtY5GZdK2b13liTgBrO8iJI0VJLUkQRusgoUzqzuJWAFRgKQ2EGUK7/pfduTrxNXtC+LaBzLoA/+/M9biYKGCa2yV24/axdQKbbbLClHjtsbWZq+YpZSOn3J7BFdIhB7Gv5+2hffSX969zFeGOrddcsyMm64wt0+SBnx9OQpMbbj7x4eVKkYrff8HnvlWwU/vrr/jeF0xA+RW/fudli6qTngvi18dPW//YVqq3ymO87ZGx9LIRs4tmCCCWCchadBXqzodpfeJMTP1Z6bRA3KkTgnPUq+tJ2M1TdrjEwuvkTCBq3VjggFEsgrbeMTckWD0xI5CwKuDElOauIJfV/q66xtzyOrnnZqjsQBeZOqRfiQNes8RgkyZNt2jKOWWCuzGSRBZkJREGMojYMrl7ENeC4L12qegRGFzYq9+QpPz+fU8A+5N0ycQL6gBttg47dTFjxUnMiIePtkybOp7kCsjqI+Otl/WlyQR3rTfRxwEEV5pnzXz11afv++XhYVlGWvr+Ty679rKBqRO47L34uQd+ce9Kdedfv+XfffiRAlE/vfrKLV809uIbSzdv/eSocNdeKhzeqRCMi0Ak2WJLdHASh9FICqTWCQzKBqBFYJ1mDXyy3hg7h5keU+pUvxwOBWCjpdopeFa0MOIytiODE8hcjQsC3DkK2UiY4C4F6WjsIWgwdzMAoloOYqoMvLqb3CwSOVDE1t2ZmSzHouyJAtlVKrFhnZy0A/J0mgBGnYKFwl0tCgymnYtRhxDwAAOnWkk7XXb59ov2Z732QEtTUCA/NS3k3P2O7rh2XlWBsz/QXUSY3X0kPREzMrf70OlwWSmmDKZicw0IAAhQI/YGCy997qN3379UVCHqvQ9tu/LqnZIc0CzXXHPsnjuPPvb+N7z3Ez8sQeHLx2/e/rFRmXu/fP8t7/7w0R7tbLwymIAcbT0TlAkUxuB1mxybRxBYNhYitADrhCvPnTaQjoYNkVDRq0LOvGkpRi7ejJYaZ6asmuEUKmlH2ZlEtDUKQcnJOUbNCgEcItKOWmaDwHT69tDrsTZ1dPSGiUOTClGANbgSO8O4qkpTeDbjIrRN00mAKbGwGVFga3XKwMwEozPG6BuneSWoCelkk/BmJ775VtPFy+8JT+iL7ssFAjs668/zXdxd0RX4p0s++eL41BOA4PCt6fCJsK+SscO98ClnkMLUuNWYiKBjvuba5bt/eWS91yvo+LGf7bvu8oFmJ5tg228+/5Gf3/13r7p561eZUf189Ob3fvho4eXhv7r5PbfeXy02MWTPgRzc5kJWnLmImdBWdWRm0pqYiDyDpF6rIrlDGXkybklcil6fNCspyFzcAKJmeaUFCemwFXYuelZ3Mn3wNrsEcnYPBbc1gQlKUmm7noRAZC0URORe9CO1DaEesAcazgtZLyYmtpJAbFTEHlnOFkESkVZr585PWIiFjExEvMidBq+Rm1MGAa6M6FOGPsMcZprKacjmkNv+9SWKaw+OpkJd3ZjZGDoXqhFQ52e/UcF9ygsFADJkM9RwAu/OS2fZN7ITG3a2x49Vu0NUI9ewsRqFkPiMdiWRNzT/0uceuPOBlaIM0t774ParrtkWkhFyjk+7/tgvv7/0sq3/1ASrHvqLN73vw/eWHlY+8Ifv+uTdC+slQwBzUIvIk8ACdjeXpigyZ+1Kn9mJJmuzlRnYWUd1C0YIgwK5O1Z3XHAQxiurCQLk8ThxH1X0cXJ0OR+FMLEzqChzhjhIuQCt58bEGCDtsE+h1485mzO8Abn3Wydu++UYcIRYB5RV4OzEwsLIo9RaJ4NNBBGYFAF1hngR2obClFwnkOlTdjxG9hym5GymjjTsFE597R2aF676SXx8HHgxp7pOXOtiFv7Hqw92To0edrfD6QjolELNXfa2ywfiIktjYJRN9+Th9HZqp1I904fTLFddtXTfnYe1KCOOH/7JJTdc2rOM4A3t2f/ie37yrPlPnS5zeepvX/OOT/+gMMkfP/maK+K4AtxciXhIkcYEppXWjahcNM7ZmcGendh5hxmMkSbjzNFR9iKps1o3fgEnG62uJRZYasdDju59Gil3anwKFsDEEUvXGs4wxKjaqjkqSlP7D/fY67G25qxGoCzATHTmNpfsREErLUJgzeDgLllTY8bcTWmBu3MZo7mkibtTGdoGrAho1SSwuSNpx2ebkr+13tDlAcjjHb9xVcpXPTg+Y/4AABdB/wLbxUkFTQ/7QhuvrvNV0XL3gQ2XenII1OHlJc36o2GBxIiAonZyOL3z2huv6Wt7Bpre1R1DrA/84sEV7kU3551XX7tNUiYmk3I03H70k48Fck0v/+2vfQlBqX3eS7Ycnr0k0ZAqlf7P7vq1/Z89sthmGY+rYy0GMTdETKxqksMUuNjWbTYiFL3gptyhPDsn6DRcHWWGQXWtZdUiVuiSm6YO6gQ+uCrYFA4lFeGmUeWQ2dyVIqs590ppjcjNtYjeblkAOS+gmczIyRbsqamdIEIugdGMEzqhPKcgBvdQlqwIBsttVjhI69YFZq5UIhMzO0y7+BvCSOtVycIQBphjftm7EuLPfyo05YVOVfOfut/9zAH+QoqymJoB0EZ6/6zLnfzU0bwpSNA5QFs4fWA8d2VPNBvBNIEc4dTJOy658YZdRTqLbkAEzXzV1Uv33n1wWPTFjx/90eXX7e+rgX0S5uvdb/zn+5pY0heWXrvtH8aR+t+ee+UDu4ycBl6Lcz9oXbUmE8fMwhKhzcRsYk5CAsDYkk4cBaGIZJnAGy7VTqwry2MTJndDLDMF4jwOkQFVo0Bu7B77gV07WU0uctNkIiF4NiKCuhW9wq0GjMDco8lQZQFOnoNzilULAEWdpAgECanNrcv0LRJBnWJZEMzJyUNRNUMluMxMsjqRiIFMyODWBerinTLCRiBmBM/yk/uvaXH1I2u8IerS3f6pI7oN1T+A/JxJxMdfP31aemK2mM22ptPTFItv1AZ0oTm0fmh/v3ADwEEdCBC9/8G5a5973ay1m3SBLhrA7Iufc+Cu+1a4LEN95307rrlqGycXkLS91574+b1LNLht+Y/f9eFTBVXzIVeOBGDgOigKdyKEOI5s5cHFEoXmkDw4IEqENpkJM4KQ5y4cUQAworS6PPHAZK1yZejnmsWgFNQySUkGUS/LkqCdOGRIbardWYxMuxhRbb4kbTrqdghso0ZJWusGX9RMAXCwR41uCBhOnLyz5SUy8cRFv+Lc7ajk2jaZkYmcyl7dGhMEatTJRXuHEmF37zCLkM6eVfn0t66EzV53Bxw8FS6i84gAdLGHO8G5S6ueP1S0zc/h8XGik2DHZG1TSVSlow7tzEdOxH1SmBE7UcsIs3VDvH77z3Y/4+mXVClv6OQCRLCar7pq5b67HptUMeDI4R9det1lPVMnVHrZlSfv//nBeO+fvfnff+j+QdxeUzR2sBrJBOVkXKBzZjFfXqSoKUCY1ABGSgaO7AzvEnndGkXONF5a7QQ5TFVY4XE2KwiuY6MYmWDgqioVDgYFIh17FjYi9+xEzFnjXA9Ju100FGFYpxQFrimaU3amrAU5wCjqzKEeKwgKcYDF4CjiXNHI5pudtCYEmdoz94pJC3b3qDZFh21MM7WSz+oS8vijl17f0hUPLsmUmH2BZd1BNNV3Bi54NR6X8H18Iws7mwlblwzdQN9iR3viaLkrcKeNFNVCr0pNo6yPPPS1q591w7aOPzDdhFg8++wLn3PwznuXQlmivvOe3ddetU2SGaV24UXPeeQXDxz+L2969yd/NLNlPbAyOlOiNoaTdelWcTbPHmMxTjPU/X4wS6CSIa5TW1b3TqoFef3UurLAUzaRwOLKiL2xw5zcAjG5h6IseGrKIZRGWSFgN+2sWjL1epU1iYjdvaQ8abJ7hJCmegDYqSIwUlFOqLOMzs2UM9shTtVlZmagydzIwZTrFqDOcZ6cSODULxp1EmOCUVcF7E6I2QlCnYgsIACtfesa0/7Vt4G0E4mZ8n/Psl18fL91iljE1pGWzj8AgKeuKbrO7j2Y4aQkJJ3kPXnc3SwdjNuYswEI5sEQi36aJKb1n/xs+w3Pu7LfpilSmcidyRq+4ooXP3DnoeWiKujIoTsuu37/IGciTXLNtcfv++GHfvcNiz+dW4nBkAsAsn5otrecOHmZFA7VuJbmQyeeajkTS2BYMncDlKY0D9L15fXMQtZkAXMAayZx62vj5Ab2JCFKL5q7sRvY21qNyI1SN/gc6M+RjozE3EJJ7TD59DxnsPV5EI+GW4VTVTYAOGJdwAaQgSiae+z1CtSZWy1MvG4UBHdVELO7I5o5RWlbM2IHu206oZGnLgnEnXCfE1x+/OvXp3Tp/Uuh03/YAHaec2mnjrVzkec/4Iwg6zn6nwQLzWHjqV1bJ1JJXu3V1UdlKyMbOwQBBOOySrluHEcPff+KZ9+4A6ljuRMIJmKtzb/gOYfuvPd0KKOM7rxrz3VXbZVk7C3t2P3ch+7459/YGyZ9RlcVDetL2zipG1HD4qq7jRbJHATVRDEEoZRVu4zaVGee8sqJiVCA5gT2EKLm3OlwUT9lciM2eL8KZh0DOtStKkiApI7I5mqh15OUHdSJuY0nLYRY3DvoztjEeWa0U4KiYHMQV+NO19gosBvKahChOYl4QdqMsgcyUyIWcjhVg35ancCoDKk2MSMouIN1KpuVAJzJnUHsDKf1r14NG9zwPTBgKqzdIfCcgcC0gjst/1yEXBg/9QAAw7FtfJrArkxTixUQze3X4WNlj2Nys0gBXQgTYtXWjXHzy7u+8mvPvnJGsws5dZERuyW+4vKXPvDLg6tFFfzo4TuuvG5fL2einMpnPv2RA3N1s8XIO+nnCfVoKQAgFfYmbJPSHQxrjKJEodwkn45fMExJ2pWlsQdCTmbsEsUaY9FOhzX2Ri6uYJgmYYCyINe1MbOpZmdmuHnoF9R2pCyPUce1KTMJuvXbwawCFMPs0rYFOwMavfPYErgJV1WAgQIieVofGxGjdUJkh7nMzvbJAoeRw4SpVcCpNJgqk7onYqZp+N6NUtNw573PTHb5A8eig4R8ukE8qds6Rgc2N3W68CngfOFk9xN4T1oluLgC0nl/O+Z2HRo9dkUpkiDOYeNiR1HqpK6jHzv8/f3PfuaekFSZpzp+xN7q7Aufc+gX9y97VcjoJ3fufOZli56JvaErdEzr+w3OBDJajxVGAiB46rXe61y5vFUUQUDmWeFETmRsRBQmy8tjYXJtWxAhRk4pgAA0eRCcBjam4EaW2hIEYp2MMzuxt1kR2R2Gqs+mmdjNQ3SMxwlRHK7IJGoIRUlNAS3yJDLnqhgzQKXkabk9FDEyOr5XTONx040KgISYzIr+7MCUoDxXTYYExDLVuXPmE81MZElA5JthYKdov/aNGxjxmhPA9KTm54b3bAjEnDkkXnAAXHiJKPekMaZIekwdA3h7PrxU7KdoSuZhwyjNTMCDcrYemzT33vflG559/bw3urkbEZFP6NLLVx74xWPLRVXa4SOzV16/r+h8Uge+iNxJ0pOPCsoTdo+UbOiLakbQrBwDMzJ1dX4nKODsNllaarmE5ZSdiAMjO9iD5+TSI2PQILUcMtgshxDXRtlAYG+hXBIAC1Uh1hqxOkJhzaRlIjeWnI1B7rEsQ9bMCqbR9kTZygkAkl7D4h5CGbiTewFRM5m0IgoluEQ4slRzA2JTS+DCxm3n6VFIp0NDmV0FGexK0ol0+NSCxIs773mm2uUPHQmdmxXOt2pj83/dnwoFsin1snmfpzYYoNk9B1S1y0DR9JSB3fWp48VuLlqFh7CRoSAnh0icrScN0/J3frD3OTdeVjZ58zkI5EkHNz3z0D33nPKe0NqP79517TVbwsREcWnbsqUcDT4W9hTYCxuDVR0dJzAKqwFEOW3IphFsdHrFEKCpNRCiGHfMTjSZOQQ4nFx6CoKzJ5d2bd0CA56zxwJsDp4NlFInOxiLNGoakBYMzW4k5iGWpXtLSEZO1ZhZOtFXuA/W4cI9gRsRXATjtdaFCXCGEEO9nB9UlOE55yB5fTxWcnYn537ZtEYECDJtIDA2pq4TSMDjr1wrqK47ehEFgKf+11nfJ3ri0vDUdzZsaY6Q0eZBEACc9+fTB+P2IlL20J3O4BCd6lX1e7mpG/ZHH/3y9c+9Ybu3Z+iNhGA1XX75ix+68+G10K/swGO3XXf9jn5WQglrs5r2dFJw00SDt0zJyHNykSAEIyHXDbY1iPP60nqGuLdddU8oU6d+rQouBG4eYjIu2zZ4NmFdTpAAdTMXZmqokEEJVcAdUVhX606BRtxMCVZUHKMrG5xSKtzKFWVijbEWJ+4FL4Q8ExMJeR7WCQJAXVgAy9Sb60dXQCe5V9Urw0zEQCbptFi4baK4Oxlt5mW6+AIB7sjhzl/elOyy3UfCeRHgfpH9z+TEao+DdZ1nr/DtzckzyedOntuKfWn9YNzGYhY02FmZDADukMFMM27ch9//0e5n3HRFv1XDpvEXeev959549K67Tk2KgNXbfrr7+mu2WjIiDqygXC/ESV2s8AwkKywZhyBs2QOZm1trAJtLvXpqRBRU29bcJQiyUTCQqhOHLpyq+j5qQX3NAnDdRoGbGlyE1CDlTCQ1uBuVbHXTJAriTFmZiFSqKnbuLk7kORWwIk8qRlPEBhCP/Ql5lwQQapsmMTGMIAy4kszNFWI1RMAch6OxE9RBITK7OjmqyO7m7G5wwE1ATh30hcjFvf7W04MWNxy/GBDohc9/m1aMF9eI947Xzjb3dLBZf++B9YNxNjKFvcczB+uAOd0nALiXZW7GddDDh75x1U3P2MVt3gQxEsgb2nvJSx/82UPrUkh65MEt1zxtX79RI2JQ24Y4TDyo2bKmDAQJ8GxgdSK31G0EzcqxBsLetqoODsTJiBmkGQhgOLmUpZj2kJz768Zwt0zeOiID7sy9Xsk5GRRSUB4OHVRQQDZxd3Pqz/KUx+vRgWbSAyQM+xRUIzsZqDe2QHCK1DbqxAQnIiFW43J20AGZOIiOJpOJC2UHl2B3pwI5k6Nn4zRVvt+I78kBJnQaMuVdP39BzpfsORweVxQ8Z+9eMPXjF1klPHNH2f/QRKZ1YQI6++MtKQ0PXcYB4Z2P3PXwsklwpU0sDgBzGQzSeJxQ//LubU977jVzbSLaiAcYlFL1rKcf/cU9J7SMsvL9H+171rVbaqZU8CgVfFSDcRwPW48SggMOss7qMid3wuTkSu0BnusaYGaxTCREsKwsnRKLlAPJyghVchTlmGCA1yoxCpl6MTMgU3Mhp2j1uDUHB8rJXFSd42x0N7B35pBqzC1Yvb+2Syyj4CQEKgVKJDbJ5iBTISYm0xTn+gOyNpPHQHltOLEcc2KUwuxmwiRmYk6mMRmxsZuh26G7gIuVhFxj/s4zgxbXn7ALHu0viPzrfAUvmiYAAJjZ/1jqAAZTrwDvAEIrRy7pcaie8fSV++56dNIptZy5L7sjLsy0ozrbya9/5/JnP3t3bLvid6eZDM20a89LH/zZY+scY7rvnh3XP3MXCDwuemGFQUYPH1woKRS2efAldSSXPD652jCTpbY1ghTIeerAq0aRDQZU/RjdAqxAkYbsVcpgzZmYBG5W9fvBMqAQgQ7HCTCBJzNiSh6KmYK0y6t1hWhC4jqVsPK0igXE0JLDi6IuxCbZwAAhCBhq3pubDe7myYvo43rcwo0yJERyz0RlgLtTsNxqlGLSmeK6n1X1A5yIQKm4/xfPT/nSfQ+flyp4UW1j+v8qq4BvaY7o1GV2yq4mYFtz6lSxKwauaf7Fzz167z1HG5kqK2wuBAaq+mkyqZHuu+dLN9x0w4I1HjaiUCJorm58+tG77zpRA3rwgf1XD410vGUhrAS0wU+kML/XjhVmxuZQJ0skWDu5kjmapqYBmKMkgweBwZyEYeTU6wdWFRgKTyMPhtAbmVuKUKfIg0HBMOPMwlaPsmZI52hvMOPQnwFph6lispyEGGqtTkrySKN+glLROOChHAKdQDhESMzMZXa+L+ZmOXNBw/XaiSxzpKITM4hE7G5mkbMkCiiSQ1ynwGziTS+sDpU2+cbTC6drDp4bFNp1qp2fKSJTk5NOjPbiKOUbzXY0J6nzcdyIHwjFfj19hHeHY7uLZuz7LnnxgTsfOGmFuE0xEd1+5Crzc/Voknnlu7dd8qyb9hfa4eGmY8Aa7N3/wofufGip9f5C5h6sGki9Xhqrr6xf8rR8OJgZADdIVs/DUysOIa1bNaeSoW2X0nSd4qyc+0UQR3QgUBq2Foqcvcpjj2YCoN/vRTMGiKOPmlbdyN3MwCQZVa8MBgOxu8PIlT17UqU02QJIHM9K1qLoplFlaepZJOLIhmJ+a8/MWmQmzqtNYoYblZFInZilEM9gJhVJk2YSgTI11O38lnmDe+dkAhDIwn0/e1HOey55WJ40BGgzaDj/2rAZHl4ETOCJP0Jod72+acyB6UIQdjerh6vw47z/ih1o2uKG65bv/8XhIQTSFWdtSi3KUhVbRuMa+cGHv3r185++6OmM7gmRt+g96xlH77z3wXre4OQzpWQU7mSrg4W7rMhO6FS+SdvlU2NlRqpTVuEemQLszN52AiAwlqIfHDlAOXg9coIgKxz9nBCSBZYopnCVwtJS8g4R4wlgV5WZXoBnIjJl88721ZK2ALm0Bng53M2UChEjdy8liStYJGYn9OcHA4blMceQVppWmdQ0FEE8O4XILO4IDhafjCdtApyoTOrs5kYw6Rynp4hjgRv7t545ML72YH7iLCfuOO+A2HmL/xsjh/JG8fGimwPl3odb21ANmY5U6u/VyQH67vyho8UVl83lxiPbqXvvOViLeHdan+aOADhTOx41DOV9Nz77qr7mqbpR9xOcIg/vvucPe0ouR4/tb2+tJPW23tlvibQTJkN2DFeOr1E0y9q0BC7Fk7ERw7UDtRuoKEoCs3tk0tSoGLmllioH0rqSKnOcG0TyQrxemwgLsnomB9yI+zGoETE52hzJnWGWk7kTm/vsZcFYT1/nkzxDSw27Ixw8FQgkEixXM4MygMVhnJYmbRJ2Mg6FwGAcA3PWEMjdrZ2sjxQcCDOBaK0mmDo4hMAcIGDmQEQMkAR/50sT0Xfui0+Y6FOsOU0xfeed3dOt4uJdBTY/CNDJR3UzcuwwYgCdPjIOfzv/kmfq0Xtmr9rXzy22733hkbvvOaZROmwud5qCDPeynB+Nm0wHH/3K1c9/1i5q8tkwwuTF829oCoKjJs4gxnjnIJEY1A0wQnPixMQCWdskQxAhawAGk6pHcsBVirIC4A6W0I6ySiBTaxGjgzSWtbMZVN1Kb9txQwI1U1AAzLyYqZAzSUcHCKwgpibrFCBO/RBSgAdp+0mbQdk6kdHMKQvkblps7cVODbo3064cHUUKbpAQiCyLVD3KSSmKZ6RxPamBkgB4G4CyBcTVzaYVPRDLVLuVHPbtZwwgNxxqntjDZy0Jm9PtqRoD51Eceer+ZzdsmxzbPKSis90zLKZj9NBf/Xz2uhdcOj443HHlrlCrlFh/6K5HV7QTvT0zy8EgpOGwBVLY+cznXzujyc8aje6jXqilSL/o7x1+tJ+12HOPdCLLakLj48caZtNcJzgXwbM6dfweBIKBXYpCLBLcOUjdtMYiaFsNReR2XMLZ1hNnZZrZTjQcu7g71IKwe9JYzERTczZAOyVMgmmbuzyPxCoqfH4BRGkw147SvJ2GOXN7vwWi0CsXqtphrc861asptwiBSbilwrnsMSU2ZlJNbT2cqDB1wYHTXDAbJsCyQ0IUBCEKJDz1qGdEeuu/agJ/754nHuDOqyH8hEvFfBOt8yuNADgoP7rsTp2f+BR0pKyHw8P/l3/4wb13LNz4gutP/oj2XbpIjRbPesbSg3ceXGeZ1vGmHQxH2DJfD2v4iS984/IXPHcvt3omy0hQKRCOHf41WR/2ZbKQLcDgloGVE0s1Bcupzu4cC2uMiJgsdWwhVwqhJ/CCDcFzPU4sZJzaFGciwbkc5OTcM0S422SSmQEzNyoCuVnRHwSdkvYVKXMBIm+SGciJUPQFamS1i4PUKSSLMbmxxcEqQjW7aGnkIAgP60kOxJGEIxtEqpI5k1oQQx5N6klLEjt4PwXAWoGUqcOCmhtCdyT1TbiuZfr2s3uOaw6Mn5DH+9Vm9NSI+VdsDsDDvnbEm4RBkIPgYU/44Mteu/2r2+tvfe3aF10/PvVw77IrBjrxhRc8+8S9dx1pA28ebLvfxanfT+Nh28v33fuFG15ww7ynjbKVM6iNqEvBiitxHVk7VbfVo8sKhtYTEElBuTFihqspRYLDNQwik7OQMbf1xCUQoa1FZgPgifq9mJaTl3kMdkzGVQWFGgUmS2LVoCw1GbE5qSkAKMyb7CAoF1VguJP41DlPjYIYlzWLg+bWQxlZc20UGPWkBYs7ByGHQfpV8Dyu5yMhpeGwTh4LClNNAJA5tSVQNgnMBjLjTd1+wI3FPEAe/sm/zrb9ip9HeiJF8GJTe64X6Sx+9q29g5kC1t/36JjYMHW+I3eCxdD78qnXb/uHeF27/I93v/r5JyYn7tlyxb6ymfC+/S86cNcDp0DsZ/Ap5O6Q+bk0HCY//fXvX3rTcy+pciIiJ6W+MqgWpkcyxjZX5wBQe/LkUpbgqW4BSDBNKgx2M+fCCA4LM0EInY5hahoHEzwlKiIjQ2S2FymHwTK8lxLIlFVzJi4IyIi9qk+qSuypNc4MiKfGDe5kHvuVdPBJJ0GuZ+BiGhPVVRHgBMyU2dFocA46qhOJAcJE5JAqiGjLoZxj0+HaqLUghUyTKZs9lwoKRQJYzN1hYIHzVLaROtrGt541p3TDI6NfxRvgCSPgV1sw6HF/IVvY81ie3mb6fSejd/N4/y31rcf7hnbPH/JtOy8fHhrvvGqHtJkLXn3oFw8PScjxRHaT1sOhw33LtS94xjZLjtCOyp6h+OHalfMfPLYPpy6tD0WanDqyTEyqde3BRUhbFwbIMkRYAUNRCQjBibhNKbkQSLNRJwxJg9miMBgkrY7geZzdHWDnGNkVMj8XoU7mOm6VYOSsOauSE1Rir+8KJ2dQwKhtdux2Iu/327reosvdyzl4etATEs5tMhAZBwowC2UsxCwYC6MdrU1q8xDYQUQGnpqvAXCZI2pW3NyUEIKIiCAQiQgIEAKLv+l3JyH+6Pb45E78FWb1r3AG4Kmk+sYK437wpLuTdzkn6qTW32synr15z6339oBm5vXb/+7ITTcuLh3EJZcuotEy6vF77jvQSHB9/MJF5Hk4moBV9j/neVdUyRrv2Wgg38dlg78bLehk3+hUferIMjHQ1jkhinhSYmMxd7B0zxJiwSoMcmjduhg6B0FhMTcq+70AAYQkD08C1AxhahBGEYmKuTKIuzu3oyYTkTtnpNadjJ16vcjanWSE2pRaylsuI0Jre7Spe2E1sTvzyaPcC9CchRwIQWBMEsvSyIVBSHllbZwlEneM8alzl2+kdnW2Mqw1BlclLgKLMAkxCzM7CUDBLv8/zWocfX79iYgwuri4rjsqXniwdCm8DjHerVQbA4DbR1dsqrnXiVoB9PZSpeVXPv9z3w+gjN9/0ae+kPY99xnVkRODq/bMWkMhTI7cfc8JI/EnFLOYcjOctCAbXPv8Z+1sYyY4vjm4BO/vlW67H33wSOsOn9QJQiDWRDyFMQcAcFBRchYxYaKmbo2ZAVPtZr+H3lyVWgQOQXQ4NG8dWE+eE0X32BvMVGJwAelwnDogq+aUu3Nu0SsKylOulzWN1hYI5RUFkE5fx00dB6NVcaUwPDqRjg4YmYnBTmUVSjMWFsp5aXmSnQvGWbwuB3dWdQRAiznyds1c0RKKGEQgRLHzzxE2cin8Db+TvPjFD8ITltOLUgtCp0WgfsEdhEkfJ0WweXtynzw46dQKOtEJgtMHvhcY2r74D37wRWf45EWv+fFnltfzVS+9tn1svP3SPUWTpeT1h+56aMWZ/UyhYLoJ5tGwVbjvvmnn3AsMIX1t997JXy1EO7i0vAIircc1F0TciU2Qw6mT9yALRSmuLgxAm9adGJzNQkegDb1+LKhNHoqQR42CPSXids2QcmD0d1ahu2Nol5qOOuhZFcbmXvZLIudO3DPlpibqDAuvmjXIwUsWJ2Oar5fZYSEfWVVyEWYRdnBRFgIBFYE8r62sTiwGArPTNC/WFfs2ymcOp/notpw64SCOUQILIxCLcFcZduZL/s9zRpMvLYnjbJOQDiV23lWA2KZCzxfBF9tYAfDk+JKWHm034j9ikIOO3/F5DU7DG958+BOrhdP4+puPfno9nxzteu7zq1On2x1Xbg9NDtGW7r/zwCgy9PFZa2KMR5OaHEV10ytnF9a+ccWlh/922/DQzzmIUj1qTaKww5xYzM0lAA6w9DunImZKrbbG6KTkGC5w9MpecCGYRsmjCQtI6xQI1ozgyQOX2ysKLMT12pgIMLMMI/bsIZQlnBlwhk/GmqNNX47t2W0Ix2cuHafJHC0rwZhPLa0DgWMBMMdBZMADF5Kb5eU1BHEh2BlZpy73zBukXwO8mnMarcHdslEMIYgQBSYRhjNDHIW98XcSyy9/IMDZ9lEdqeS8Hcu84S33q5UTaaocdeYbRw4qkzkwtbcK73/rjo+uFT5775/d8p6PHOhj7r4/u+WdHzq8z+rb1l9z5aBYuZ33XzGLZFte8ryj99x7tJag5HxmFXP0+nm83igtPbQ9U2NBhll6VfYctEnJSFwzHJE0GSOwEYxjGSPDXIV9XJuD2VSVgzjcpFfEgEwECKf1hEjQRp3ZwUVqqGhU0qRPjtSuT5yIyDRBIJapqGKXwVCEMKlTa8K5owoBeUyA94cumawsJgCcykKa0mE5xKKM4iZigScry5OEGAF0TzrdAjZz8Z2MCAiOnAVRMtw6NVYjCnALndQFyC0Qvvu8WfJr7j8Z0fmcOW/0/PSG5x4HxHAGXJn9/P2/YSm7eZ8nriy+sz7WBQZTKiTf/x/Lf3dJTTY49ZdH33VjDeuv/NWhdz9zZL3Fhz4xwmcfvub69a9+8X7r00j3/9t3/vFLtmfl2MkwggggmPLc3r1brP61hX7Rjpu0ZhSLXm6aSeIikGlWA1nTKIsILGUq5yp3cw6FDVfHzkRkbYsYNblLb3Gm6N6RyHh5NTMjr6+2MhgU7C49AQq4J5S+evTImjN7alolYs/U27p9NjqHAC55fGppvSUBC3k2d8dMpQQr04RADcfAIEgV+6SAFL2yYvcQhG310XseW9WyigAAiQWIqLPb6zz6Nv4OIrC25GUJUCDA1LpIbxMfAJBnPnR7NOs/rXOqwnQTPbuHntT9HVF3o/x+UY2edNez7y/7FqgzvOnAC2Fu9S/e+Cef/mmwMn/wZTfv/Dok2AdfdvO2b2gcHPjAzW96/+dveNbT9dBdW67aFVMdrr9u7YG7H14VEpA7Q107lZJef3HbPieuc5sOu1vmyjQHd2alzoJVJJpzcolcFJ0ChjdNdo7m8NY4EuCIg8BuLCpCuW4ggdC2SXoxwFAkRexPjETF6rU1YybyhhCIUtY4mAnZsrMRl7o6Ts7i0wXLKHC/ikile8B4C3NSMTDcosQilWUFEouRgq6vjUdAZKbpZHKG1iGePV27V7oRlDVFoF7t7ixJyYncBW5CAKkwYGD9znMXPF9590khTE9jj8/sPXkAsHrHUIZfhK9oJy/wuLs+odbgxf5HRphqh8ApaC994PffvP2rApEvnHjd9k+3EdUXT75u56fbshz9zav/H//w5V9W1z//acOf/2TPlVuo9cFNzz563z2Ha+bOd2BKTzEUcY5AQ46cK6csccKiDGJKoORT8y0X9KMLOwW0OU9pAmZOgdTB/X4ErFtemkniAELbapwRgloYDPxI4io3RMB4QoHhaqAQXLOUC31RBQs5c7tamwsZ2HLnUlzOVmTmqXDicridQ24DK4FRxUnlwiYcIDw5udoqCW+ovQBw1+yMttjoItug5U/fLakWVlQjdji7UoriTgSbMuZIjLw4evvvZS+uPwXgiX1/rkabXXgB45nHfeACwIK5fY+0mAJXyQKbzHz+xOt2fHoSffZHJ972rg8vR5/9yam3vuvWU72o/3D0D3f/y/juH+x84bPk8KFy/+VzeeI79r340N33nE5M7LZh0ALnBbiuS5S9B4yNmFCkDrWR1WKAI3MsoMZiFLTORghw18aFCG7gol91BUjh3LQEZlczFAVIlWN/poBvOa7cM2X1zNGzu0hgd+OZ2T65Q0hZdJQmJsQEhhmTk/T7JamCkA3wamxBOceiBrSiwaifUqBQBKyvr6tLQQ4GmXcGIV4aApA3kPXkHQdwEyQJpAro1c4ucHezrn/dGMRkgQSmdPvztrle9cBR8YsC9k1l6MTsIl0FztzT/dybBvnCziM2dRM3prfF4LR22dsmHzxVGtXzb95+6/195aZ/y7aPPlyCxte+6bFPNbw2Ll78e/nE6umZK/aV1lJJaw/f88CKxQ4M7wBR9Y4rmviTR67ce/8XA9/1cNPAPRnBzWpnEXJwnEXr/R7nJndpUu/cpDPIpVcxCRs7KOfkILgppAwAJfRme6XCyQ6tMerJRmE0RmZz6g1moxoZSD0Nx03uFuopK0rKQcVM5m4kcVaM/Pj1PKlppp6Q7bjxF0eOp3o46EWbTFqLDMIGNxIM4xgKHZExdKr5ZsxTiOfG3DSai8DpBu5qLjEEEWGCdCZVAJOh0te/Yizxga9fXE1nM064GLx416VPdaLczD6Rwx47PmUvw+mLn1XRMFx4066P3Vc45fDK537mB4Eo41U3/dNtPaLx1lviR472Pevzfuvg6Uv16Nq2K3bFNodgp+6/+8CQZcppla1v22a495FL9v7wu5F+emx1Upp7NnLl1AAhwlnm+pM2xtxaFAJZcnfpxMR6vaBMpESUGgM7ZTOLlZDDQ9kfwJk4B5w+QiqjCczUI8VIRP3ZGVJnuLu2o9FkKrABN3KnMNPrQzMDxN5O6stmFHJ0z5a60fm0Fi590fX/dNvq0FZUUnYO0sX6TE4MZyGJYuqWrYvlAXiWDZ1PMpvWyaoB82gJnRmehBgDB0ACkTizk7hH2vs/zTHnfz5+0T7h0mX1Lu7qTuLlHMnCTTgYAKf2gXWe5jDCr+346FKpg/W/ec3bP/udQqJ+4uirtn4JQvrJY6/Z8S8I/eW/eMN7Pn53wfTdldcs/eUVz790/Wd5z2WLVOvCi593/J57DjcS3Mm913f3iQdM3FPDnoSJ3UDi0VNnvsEmRdZETnD21kyILIOk6PdSywTnZDkTxNwTorBnpqKqQlAXgmJttWZjKrKam5A36M/OBTjMWd3Gw4lPxX4MAClXs/3o7pCOVTpRXR8AVI52qqrFuWufv7t/xU+rMZWrtUfpePEuIIi7EJdCRouLg95t64Js4gRQOOt1djYshOzkRcxiYg7rVHCFoCzunWU7gw9///cV/Wd+9eJ6Hxvl3wv3/4ZlqF1QWZq9vOShCVFH5v+vt7z7Yw9V1tOPH/+D7f+sQtV3Tt686xNjocF3Tr1lxyeHZdV88Lff9OVvRZ6552/f8u//6nu7nvbshaXb5PJL+nmCPZe+5LE7H1zSQIT5ykFObEN3y05e9yJYGieiHloAIM7r5nBhwJKJBAMMZUlMk+RRLVv2QDA3R4xkRtKrCsCUhL0dj2oFCBZ7IyZXJp7dU6Ih56htalazEfGUfwb3cq5fEhIDJMhro8ZIvCXAq7ESeVtd/duz0H3zecX6desm3k18sME5cCHq/W275kuynevc6cACTzq7OYEsRSt6a0w+NUPZTJqekQV23PGC7dD9u46cf1OfosN+peLfxtp/odXC4VsueUi729Nbizfc8OkfFQ6Mn/6mAx8fVkZru94qHz5aQNZ33SIfOdJzH9302l98to006b3hylt/XDd7X/CM8Nj61v17ejlx4csP3f3oOsWb/rCV8derK2c/ciKOf1ovL3GvYENuyBipQRTizr1QxIxMAgGWOJYxp2nFNiWBQGFAYHZ3KQdlp50VislK7eTQ3BRsPq5JEwXZsivAiFyHo5TRyXYFc7hxnOkFgNQDs9WrYwfAkQf72MlOXBObmgZXvM4J+qH7Tw95dZXBAIOETFhiCKrVlh07KnYDH73N3DWcYVC5UCbpwj1yQ5wnpNPQDDVIFSQQiwdBEHYXBjmX+spXtVz+8lvn3QNIzARkhqeQkjnHR9ieCiu6mRQ6axAcPIwOF/Snbfqd3/rmlxFchjvfFj58sALV5Ruu+OSdlUkd33DlJ37Zdx5e/pbljy+Xru3vvfSLt4W147M3vnjPidOTrVft4FZD1KOPHLz/hb837t//paft4w8N48m77NQKl1zCODdOBqtjcBJQZFhCDExsMK9igGaTDt7jMLAbs4ChFLjfJwDM0KZutMzqKXkMBs+TBM2Benv7zppGTTKow9xJ4OpSDQrp6vZM7WhcQ4gCRSfs7Tv42K4dddPOzL95i4Ie8HtvLZpTGcE9EItLjMFymNm9c06sO+vkb6+K541qkDuDyMDu3kUfrHN95eWxdV5YsQgFk5AQCQs5E7NRtJ3/YQe4/tKx840Akq43DRcu/ky7mOgpbQWfNADIKT18qkM0vc9tcuMbH/iHcaUyqm6+8hO/KMGN/e5vfvlrkTm1L/+NL307EsbzNy9+9JGKbPyiV//wCxwnox1v2H+I06G8f/88Zeam+tgl/2bcv/2Hz9i19Enwo4/h2NB7TAHgNgHw5GIMCmJJYxFIc4RLr3SHq5MqDAI1Q6AAQCmUkbdUw8QCTWMlF2vHhNjzGiDP64aWIu3e4u1qqxxUkxJ3wgllNYiqTg5hq4ejLMQSQicquqVPzqfLK+p2Mhv+8Iaadf6SB/5kWVZHyoLQ+YlkVDv3LhSqMj2xh5/dW1hGFwbCTM7gJcmVYNafRRgtqbtnIy5jiOyBEASRAAEDzHjFqzMX936dzZ/yINiR8LyTbbiIdv6C4pNXAHIeP7hOgAdAZn5y+o/ffeuxng7aD7ziTTu+FlD4F07+0bbPtjHgc8dfs+OzKfTHf/vqd372h4XM3XbyLbs+ulZuHX7wNYtfiTfNHFhe3DNndROaeXWsC2OtLb12c07JPQ7IoycQYlZ2olQbFwWbUAhIEUkAUiVXdneoFsTiMIR+cA80UXg7TmA2nbSQXlRk76VEHBKiAfXpJhvDU1aX4FCNM72BbUDy0qieOAWOzOxKsVfFwkFWTIwApcduACjly274VqxqAnNRCpHSzl1bZ0QzNhMd2POQQTZq4tPYr7Ngdog7OGuwsqyhTuxkygqyTflwZwCKcPsLd1i6fP+B8FROYERE1nFaL4T/4A6bcP5gwc/ELFMYG2CDS+9PDmJ1YMuhPx++94YxWSz/8ZO/9fqgRLM//Mur37HQIMzd8dfXvn22RcGf+JdX/x4ZLTz2Z/wn+5u64K/Ya3fSpf/2pfOrZAhNnjeytSA8ctcc4Gx16zp2oCjZCYHhOmkM5OoAlb3ISKpeD1vy7G6eWioYmjLKmRkBBWlabZZOtVJENHUu5uYqa3Vm3+X7yamKzgxbWmmdcppMLFYlzOKOK67c6sZBmKk+eWS5lhhiv4rs3N+yfb6EEuBVOxEOLR1vGJSa/nMFRSkzs3MzBXzu6ufddGVfs9OZLshbF1KIbBsDYLo3YZq5dGZtyUOfp0d3ENyJiUFw0i6JxEInvkuw4hnhqZP7BMD98TLQT33prwATpo0DIZHN7QsC0Df+sQlOrb36BV/4dumQ9SvfvPrRUz3nyeybtt/6cOU0Gdyy9SOPVETDG9746KfXUWj/9TtuX9i+bUYKHx84eGWZL68bbz77h4smH9fri+/cNdvcO6qP+RgijEFQtokS4JozmF2YysDmJTUCDkgdN0ThHAB1llgJw8FRctMoF2K5zhwLeBaf2V6Nc3nsRKB26O7wokA2ksisytXCzAySjpt+nKyPJi0RiVBks1DGkkgYjkocfHLnYlOnhermnYl17847/l3y2iKZldt3bqvUYY8z9SAL99xZeNZw9nc3ksUGmEPmg7SnE7KaopBQijCJF8QEEqbO22fbf9itxF99MNA51oDOmXQ6US80/3+Vnt9YCDa+kD92MjM9Mvzo8cpJx7/+yh9+PhcaRjNvXfzww33jhl7znE/fURLX9rpnffqn5Dza89bJd+d2bVvoLbiO15dPH1r97fWPvXXOLm3G5eFvv7Hg/NFy4e6fzVfNvc36KRp5kBBLA5FOjOCWE4EhxL3IpBRSliDIRDCbRlWuUsYQ2UBEOm6MixJ1a7EMcPNypugNdG3YLw8MiSYjMjiYRCLDKAwWB+KZoDn72urYwESFiDmFqld0qo5gFgGB4kKu2wH/7k012eKe4bvuHCSkMLd7R4/VIE8ozpLT+vcaWA5y9je7i9wBc/jswGltFdmRVCSWBQuJR2ImEhCBgUJ/97WK4sjnnc/B82D2M3oO5+vhLpH6qw2AaUJwWjKwh5Y8/Nlb3vWJ+0qXma+ffPP2f1gpcn/9L17/nn+8I1hpHzvxR7u+kiikDx54xba7ti0szsZLLifX1UM/PPXC9U+tMJpTb3nf7NK8Eoe1WKg0k/ahB+pFT9lczClI5AYBkJgcEKYEjQSQcJdnoejKbtZ5QLmBCyaCwdlS0xi7asoeqwIw9OYH0bSludIynLzMLZs6B/LsxcLsfOlZc+pJXl8Zt06Qghik1ItVX0mMQIR6udpr8DBFdx7JThg388/8hXG1Y8dcNFNjfiL8zuEzc8dimOo+sxHbFHkHgN3YyTMI1SizkpgDZiABZ9lkAANu/MMX7zHffenD5w7wLg76TeyAX5T11FkgRN/4k8Q0XjIeh/X3v+rtn/9uJCze85/f8Z4PHyxtYB869podX1GnwZeOvG7uXzA/v7Xf/M6Lqwrj5Z+sPO2xL49yWz/25ls+erqslv/63z6rCergtSoaT5ZXT05YO8l+BA7iWZBEUMgE7Cg8dw/BwVWJI3k2KKQ0s2wSCoLDiUxTylmYszmKwllRzPQjK3sUn6yMci6ySZlAcKjFsLh1NiApRHi4vlwTsUtgBlEoelHIBa5MaX210QVlIEsbiJQOj3pGKfNNn9m+Y75nljd79QnN455j4NCErpesDexgn15NYPO6qqzorQUoB3MzdGZTZyXzCQjL3/sjWHz6If0fkAvYfDKf3u1/oBEBBrL+ZQ+FIn/85Mt3fCFFnV/7s9e969M/Lojnv3TojVs/s9br7Y/3P38vb6k4rz6kO795TzOerD30mpd8warekT9/y3s+/EivP/7RTVYoASt9byb3nRxmuHk2MvMQBCDznMrCuUxKhBIJANQtg4Kbt8oiELfs1CvY4IzspElBQV2plMDkCDP9gigLiQ1Xx67k6mxStiSuInOXh8Jb55DHa8OhOpNLIBBxUZYhELtDkEero4yizE3f4NpGZi2WT11mlCez1/6ms059/s7p6EW2o58MZAFwF4BJwWZCnQwRyNAWzr3RtGjopmSiLnCNm7dRlh++YL/5vqvuOXf/nXdUbICC3Il9I+H5K/JFuTM9ct2yJyDw106+YesnV0vtpb9/+R9t+0ZDYdeRL772TSd2bxn08lh6R+9dOb2+Zn/4jPsPDGj+p8vv2HnrWtFv/+aV7/jcHf2wSyw4yNfaLx44fVBz1iie4QmsgLs6M6myF2bkzjEpsXvrIRC8dcTg5mrKhURTBplZdupmT2QOnqUoy4rhTiS2stY4gjdtotKpslaQhQ1wc8mjtbXGidhZiAyh7AcWAxkJjU+vJ4TIjDzuAQR1wD0dubIlTHTHZfeWT9kHRAbYYNthoaAEsFuXNZyeBxlkYLLWxPvFRKAMU1aSzpgWKthAdrfl0m2XmcszHm7lrA2/+zF0ARGgTZ74mUXqrA/Q4x59+q8OteEdx8engtTdmNkV2Gjulyf++D0fOxBqLj+z/KqtD+5c3LIYw6X7m1MPn1haOv6c3xp+rwykt/7Buz//7cizj/6nW/701keryP944lU7v5oWVVhBqfnpt71d22Nwgid3NYibKrEQvI1ipTQg42jMqHOPzJNxCA7LSiwd24LNNDmBHe4xgtBKKKuA5CURdG2UIJTHbYtYuoHLZAJgsr7VLS+tjrupKAJnKouiYA/kRmrj1TXjcmrgVAPkojlYa3IwsUvdzOy/j5/SpLurAPCuIx1OYFPRdfr6u/lPIE/B0E/OxuYdjK9TBzYGlAhgJucfv2Sf5cXL7imeyBNjP7eO8GY7A0I5O7P3FA981n0VICIoQOQ2DUqUQlOQ9o/919e9+UtHtmzZtqWPm54u2p5eOnVo67+656vM5F+f/OHezycJ/PFjf7TtcznMrv/la//kUz8pZfC9U2/Z/pEtrbAnbicNmFnVcs/QEivIEygKXImKmVyzkBO8cHf3rJZSKEsmy5mCqJOKI0Gd4O7mKIkxCmWMA0GWQJ7TpFUSaNtmGpSOTEYcE1MbUou1tXWFwZ0DW+JYlUUkMjaJOlkZNV6STfOvlJzceFRWTBZPrs1nSvXgkuLcb35Tx9dt28yYY+pQAZtzcPophhFp22MbjGtiFzelADcEcg+dyayzG2tY+v7rlMpnPpyeEAe6XoSjUAcn9TOl/4uJJDobpekNziQxw7/6vszMLfbq6lWTQR/18PCR6trv/ijn3Obxa7b9kwroe6fe+McfWy18/nun/nj7R4ZFYR89/sadX+Iw88B/eeu7ZdgntmJ97Bl1xztxN2kyASlEdkC4nJnh4w312gx0dls+ViL2VKihFDjUOZMq3IN4IgqByFEQVxFGBNc6ZSbROicrBhFQLxeXGu4hE2c5tZ4zGB0r07gfZ4rgcCVGs74+BkXxTXY1N03PIethJqEthie2JrfR3N6tJ56yRte9a+tvfyS4pNC5FG4wwzZMWYhcUipceg1AIDfNm/LRXclgWuaLP3zB/qy7ruhMBTcLf4SLlJH6lYO/M4iSjXfgAIHCb13O2+dKyWunCv/WgXp1VPMf/OvJD2JZ+HeOvv0dnzgdafa+P3/r+z7ySM/nHvgvb/t3HzrQp943Tr1pxz/UsbfyF694xol5dsOwUYcyuZGYZjMTpw6K7VGKIvTD4YTCMsGdzJQCITNZL1h2aAimyTttvdYiCcEcLGQaiBipbjUEynXOXIQSpjQzN5BwkKQaq7iujyo2BwtAoSxi6JNbhrSj1fUUopxBSDmcKQNscQjmelIcvg7AxOb3HZFzbQJTRXuQOW07TNYxRACnxMTd0grv4KWwcWDvDxMRibuRu5uBKQuYnMhcAMfp2y4zs6c9kgldAOEAiN0NcqGC7mbQ9+Rh4Jv/4Y+7xCDoXNdIphkBNyZxTpdfmu7/7if/j//2d7fWl594cN0q/viXX/2HnoD5h/5jeM8lNfvM2l8+8u5nT9xmlv/zgffeOHYa/OI/73r3YoMqf521MJgOGyUUPYWH6NYC3umBqiZFVRUV9yoHFeIgFiSFwzXr/PY+u5ObW1YiICWjQjo1ro4NldQmK2u1CzBZa3hmri85hy179sxYmhmYS+GAqudkEtmlmlmYn5sdwEE0PvbowyvU68lZpTKWsgpjAN4ft0SxlcNJSNo6Xh3PObM6C1nAQbptNjPYQHAwmDrfrs1XzW6tAkXPO9A4UmeHuCHHCoDM3bP86FBA3n3VVG7p8fawv5LF7EW2JzEKuh/M7ad/GW/7h+/fd4QO/234n56eYDL75Q88+239xm3L6n964D3Pq4kK/+BX3/hvs3qJv//WW34rKWaP/cf1/3DthHix1NLQ27IyUWcOZoiuZuRmIE7DJjnFoiqaUw+cdrj0xInBU8svY6FeP1LWtlaw55yMQpi+jxACO2kzWV1LUbxeW27iYLYwlWJx12KvbWG2VUyLwroKMIxjb25hZm5QBCaanH7s4Im27BWsRszozk5lVUVUW5jICh8HYfJTy5HEJ7qnZ+fcT5mYYGYOrxY72iqJCLl0kO1u9sIB5sg1uc3EqSuxupmaEWPDionMYB6Wv2MAPb3vQphmjDZVoO3CIGDzJ+kMnL/v7YlXkxBA3P+1rz/81pcSBx+s/tnPbnlxa/Atd//HufdeUovOhA995fUvN6U4+OLfv/RNZeth8LkPvexNMWGg77/97b/e6Bajhdjc/9mfq3s2AZyNVLlb6MyIEavSlx85OPLWGFzCAe60XgmoreA0mSRjIsuQWAZkJyZicXRM8QYFp/F6LYO5mQomgx37tiA7cb1+6iRBqAjEASAuBlsWZucGTIy0duCRA+tUFdENwkxu7hwkktGWX3vh06MBFIYhBNP6MKmizlv3KDGfwVR1BR8ihxMxMzNhpxh1Lkc8fcxNiAgTg4VaJaoqkk6j29xMycwdbk7uYAa89h8fEcs7r950BncjVjBzlw4+V5tKtW4gFh+XqKazVLtwho/i7hD26cMDTGYbYYCZI+gzy08cf9XOz1vQKn/85Ct2fqGN2j/5Zze/+2M/63ko/vn4m3d8si4w9+NTb3vfh45VmPvRqbe/+0PLZcGfOPb6nZ/e1Xtk+cix46nvOZh3ixwbFJKdCMTgUtbHiQTmk1hp6NUgoqJVdlKl9eVJMnhpmSKTkxqYnAUO8qzmDELWFrEsyBOXc71Ka9GGpRnWaq2HLNXYyTnEMvYKNiNv1lZGGmNwY1DHqyRIFErU27VrISrKEYDB2AnIdOBGd6rbmf33g84WYybvjuaYOnKCbGFulYmty1c+vg+moaK2fcKgBoENsOSFZSFnNRYoAshhVix99/VgevrD42lG311gXXD51DgBP/vLk/WGzh080Fn7C0H5TGQgoKNeHPz7y9/wyD8uVcZYe/rNRz6+UkCa9pX/+l++yWIY7ntb++GTlfKkd8v+D9/Tcx733rr3I/f0XFaueuf9D+34qrKFQo6cluwlZy36sn7Mm+Mtk46FJRRmxDAYgUsQmuxGSHUUoirCOSjDcghwt85Z00W8M9ojJ3hOMZYMGBez/dDCtUpp0jQppiYjCPmwZaLAM/2ZkrQZr60njxFTXLUzO7mEYCgW9mwZkDp46Yg4N0vXhXaMme1/HBXYt3j3h4Czq+s0LaB0r86IAL7/vkjWRMGGAdQUtrlJFrewwNDjSdWRPcQQhGNARxgHdQkhEZr7ny5RkW//LFzkUr5ZdjxXpujJoI+zP0hn/CfOAgc4CHTUvTr897232ccOlg5e2/WW6qOPVu4YPf91v/zHNmhc799yycfvKsCT9JqXfO67gaRu3vDiz/wAJOvVrsOTwcDbIPHAmqThVtJxsRVLp1GfrhlWB4nkIJgbmJiCM3lqndyTRQIVFCLVJgXlVLlCOgonc7d1MatmhEo6O8vejLgxSzvJtYJTaiGJCmhKcBcpdvRpvDzOiZgEZ16VoCA2HuzZOUemDIDaR5VAhy/bUtdpPr7+sga+uGf4NyeCPz5c2pBMpekAwNoPM3tLcUNbe2M2+bR7QDbbc6wus3oHfC2CBCEmAYdN1CGivuyNWcKpz45+Bcx3d/j81QYATfGC54wrGMT17lvq/7T0Jzc2QJo5+RdH3vXc2plmvv/nl79rSxPSTPM3P3nrbyT3Xu8Tn3rlH5Fq2fvwR6+ZNeS5dHsuYOZO3jrMpFuGHJ2zn8dYkJPnpEosDE0zfaciwoljcDNtko1XUyyFcosQewWJEOA5mUgQqxvEQY/UNCH2uBlbIbq8tFYr8rjOLObCjiI6vDXTtUfuP5mImTqPLGcGpIx9RrX/WS++bt5yh7LzUDiciqGzeG4PMwdp0swlRk9QYSFipk4ovROznJl3sHQuowy3KQOURUQksrtqS8CgABMxsmq2buN1QAFTEEHJ5SeHC7KdV+eL6n7AzQw4o9nebfzT5n5GqfqJTRjEeu51hh1Au+eWmfff/obfycY2yB/45ht+x5x84dH/kv70qlpyr7z1E694LSeXhW/+16e9vTCPi9/6X0+Js/a3sjERE2UFPLIDDFK4i4GZ2N0tp664n4o911+1pzIuxQEKZI7crHYKe7natX2hJ8KAm6uTO1LTxtlBRZazWhQyVeS1pVNjJU3jzrCn3Lp9SyQqg7OnyaGHTnNfdPpumMhJBv0YaOG65924V5JuVtO4cCKuRiCWREczgeo2XNGd6Ohxfzy+xe0Ax42llAOfFQo4iJCorgmhbwCYOxiEdQkydC6Z7g5XLH8bcL9u9qJpPxdLEN64/uwvT0EXCOTEaHe9/hP/cOoPtn5mHLXkzy+9bvenxoXPDf/yj971me+VTvPfPPGOnR9bJt5y///zaiZSXixOzM85zDknJ/OoRjAhuEVyg1ogdxI3hVMkOFF/57bCc7X3YIOq7rYyz04kaFAUg9nSa4Gjc4QCNIGqwADlRMQB7vDcmpLQpIU6R5eyrArmcCpzz5TaNHja/ZJBYCYisEmIDutt37FY2hMUbvrLDK3WmwBkOjacVbJ6dt/siH1q4j2t8z8BF8K+/dFEYJsu5lOFD9sIHtkKsrpkDIYK55BcxTtOmpLACB3j2AH+6Usvy2nPdT86h27Uudqv5BKADbdiftw4eNIAiFmhea28afytY29+5yeOR4vzPzjxtnd99FiRC/3QkVfv+KwH33bf//qOl30CbltGX57b4mSYiadsns11cO0+rOuxwwpLFWAUHEZQYSWHWhf1usxu214aWdTQS85V7QAFVQYT3MtBQTlEtSYbGKxqCBU7IycyEpDB4MosoZ3k5ESOMhZlNCcb6GlIbF3I9h4cMaa+HyxFdAtzO3cNpi61Z7deMFi08Rbm7GvHFxLZRLftvrfsLJyxCfZ4Qq3VevMnydk3ZOA23nZ3OIQzjLIJyt6os4ljczcj141o0gkMFQunv7c7iV91V3Ohru1oP2dl/7v2pFMAfDNudZz95OSbTJPHB4Phr19nw+H68tJKO/PQn7/pXR+/q+82++j/923v+fjdpcrCV06+ZccnViXPr/+/ZgoWst5VR09sK4is2nmi3aYzu5/++9Bx7zP3E8gFBGKkBBgTsWnmQO7ksm1XFRDhKyeXDe5UtHAgmLoSEUsmCbkeTZQJjtY8Fg5D7WqRAXIlURemtkkGEEssi0AhUeF5vJKDl6kmbmhmjZmYCF38Odixc05czxEehdgAVI62SZGTHLzGGJN2cMk907CPzrLRedxb1rDjhEP0cbk1wjTRw2zW1QR5UGvXEW7ubmxC5oLoOZI5eavFT168exQWrrjrgkzBizETJDwxOcyP/88nvgQnIHz1wI1Lq2vjJqNcPP4XN7/tM7cJ+8zoz/7obZ//ToAu/vTo7+0+HVVnwmHfUTE57T1+fNsgE8fdp0//7rMXVqxR2LArMrpp54/rbJy9U34hhcd+GaTykydPJSKII1irTN7J3zJrYBsOawGzuSl6Ae6KZKBAcDBD2MmbtlEWD6FgCcQC8WZ5fWjFgKmfneR0AWIiCQVY49bdW6snCtxt/PahV7NTb+QM5HCoZud20ru051PCM9E5g2pnLPZrJsqFE8zJp+xYEODOzjCyuiSvorFyyLmbF9pllIJnAErCAEa3v9o1XPNIfV5EB5Hr+RfyJz8kaxdyno8pSADCv/7sfds6O5nh7L967EOP/N6uz+eovfbDR1+99XNOurD0v/cWUjANlxw9tnUOBNu5eqrdZ2PmHUsHX2JlZsVkTQlwUe/OxuTulCpfRy+YOrg3U/bqo8eXzJlY61JcihYwjql7GJu0iYmhWVmCBDcFdIqmIhYBuzeNwwOHIgaYkDBSvTqciERreuDeJNhdXHLgKASe2blzdhPi87hfvJOO6y0D1luflBJTdWp1a4bXunPh9IXgNTaz9UCcjhAQKG1ouGCjxAZOubDQq4koZFd0Z0uHFSIg6URmjDz89Kbd9WTbFXeeYwk4y0CYzr0YPfnqMx07bReMGsLB3UfSdjY4+JS877GfffWlWz63xFbEr9yzQ4kc/b0nTi30APD+0yfzfCDYQjx57dM/fyL6toMsg9lGi7VRsk7JzALBFDCKlMzZGgM4zM/bgROruRBQTiRuICmSiSO2TK6NujNYzanf2QKS+HTPIwrEqsgZ5CKRC+kwLlavDydGlQCsGhBzkSexLCI5l9t2bqnsiYFf935EnQHqiZExxhVJbeMju8wxzrN7TlxoADhtO+jElAMggCHAfLrtMtjITXkSyXvrmdVYTVyDiWaJUuYUXYWEDIAt3/FKLuSGh9OT+7eLQti8G2uO82G/SOzMjsQgy4ASn73jP4FCMP1ruOOSrSdsBzsB/k/5NVecWt/7O/9Uk9vioZ9sWQC7FbuPndrSC+S6GI+2WwuCDsqv/qiXrbLB+um7v3hXLlOTO+U5OBM5mRKiZKFgJgD1ykOnRsIFKTIFQhYi6hzlgxso5YGItzmEIpDBGQFTJdYoIKjm7A6iXmAwGUCe02jUcizNALC1BWlvEnpFAS3nd+3okaanWv66RSqEFpBiuA1AksPPInBKvSt+eSGwNen8zIQo6HTXD9SJcnYUIADEoEYDeoMVgALUc8gEl7KS0am56KGzFAIr//IFe9S3X/vz8KQfM8UVnqPTztWmLgVOG7HiBdwnNlo4HbcMxkd2BYgxfen+EAenh2uxIOhidSJvKSGgPadOzw0KdpuLh4/uKlOJsPU4nklHefy3sztun6hHU5CBjDx0YCAC6cKl3yUKcBY7kEiUOKsIi7oMFtbWpazNBQI3pzp66/0iiGrn4emk5iyRYCm5MRCDSQmAXKA2HDfE5bRaA2KrC5d+ZJf+1t1zQnqeY1Onnie9FvBqqCJFxpFJYazjmf1VK+dPzVCutj8qm8oQG6fB7n2bdz/XmkDoDROj0/hWdpmZwfqhWHTIESeAiU7f/mojuv7B8bm71P1xtL+n7NJOxVVcibXbhYCLQgqFa9Yn/Wr55G5P5KF3YmbSlMptKyHaTHnk5LYSYNpWnkoLJaC9yw4d2nnFSh1o92n/4xMf3fFvTvxWvTyc/cHtcGhkNSf3lGM1JhRrDy/kBAihhrApGZdsUJ7b0ZNerqXILczFDF43xiW7OaSAkgHmFMQIqVZmARfc6ZYTkNphrRwCKTbYWWwpRoDCll2Lfc5K5/z9p96I5gQi7i8LrFhpo0iry0t7DJikhV2PXGAPcKZthzt2cFfhnWaWrNMl7QSpqKnEer1sRmyuxGmwUNnp0zQn7sqdwqmRx58+f0/OW6/5cTxHyf5ML15crkinpNKNw95ZuNGnukG47tTyaG7b8eOX9Ge3bNmypb90+0r0kLNmprjn6IltJQX3+fLkqfleII+XHj/2zoX/Pi58+4N/8d4/OXDjfVTtbhYfrjORBbgjAsoSlx2hXu+7inTCWblbLBW9ufnZom3L3ccnHNW66aDEQkkql5JsqmXqBPY2ZY7EjNDJfhIh1cOEGIhogzzvAJeRGP3tu+c521MTpTtxz6lechWcPAyaEjCdHNqfwU3q7X/ogi/a5gfrXObEADamqMOcpyxBkJMmdpkdwWFwdhtsLdqVNZ+tpuAhgLwNRMu3vQbADQ+Mz4MFJbu4AeBTVakzdOEzWeOn+EhQKx849mu7jtSv3VENqgi+7NNHCwOZDhardvfKwd+48p/qYNXu46e29gMb7Y6f+g///oMHe7b19P/2pqtPgCxrfSqBVAuCaYAqqIATOGaO0X2KimMycLUw2xdrqBy1EEfVJnGwkpvANER2I4MZYHBqs7tA4rQOTkBu65SoU3JxgpMGR6SC2cPini2Vm577d+3GytlRlBexIWArOTE7H0oGtG3vstLl/FgLsmLbGkwal01VDgBiTgJ02E9DW5L3ikYUDPfZbaE5NWmqQEoc2HIBJ0vB5Ocv3JPaLVf99MlUBN4gHl7QLlY6BJYAm+iC839gs4XVZDvvffTf3vj9n7xHmBj5sj++9UDIad8LL99ajIuZx26/fVIYIew9djrPRHHbtvK/veNP//GnooP2v717T0EOr5cTwS06nOBu5CG0JOJdJVIJRCCYDBZmApkGtKurNYfkFKeCDu5ORO5GpNnJiWDZEcgjQoASHO65GRlL0SHbQQB78BAiuwy27xyEc0b95+jBLisWejWRWdaQQyqPjSojHc3uXjzx5Ihs84MA4IStjxk72/RC19gpAk4jbSeQt1o4z3ZO2bxzK62vZKVCujRTpxFJAPHy7a9y2LUPjnkTUuDTZyS/YECK6UmpixZp81sX20I9K4t8z4H/6wv+6sPvyerOuuONtz6y+NybdsJsZ33HL05Uv33Fj35ZC+9aOtUuVMS2MPmLl1x54GRpVdgNldCQDeEEY5CzcAYZxyySOTg2mFbGXMzNFEJwnQzXGmEiNkdZQ1SodWOoklini2umIOJITExkYHNt2xZU0TTJ6czqTFKxebG4ezGY5fNMFD1LqGcq78llRzVJvZhrGp64XInadmbP0fgUCwBvsDl1dn5ZPKbp7t+lW7w7ubIbnLNRHcgHaxnwuGfOllfdrOJI5JaDi3bPYEY/ft6enLZe8bOiIxf4NAtN2FThPGcjTKHeDod3kqVPuNrPSk48uTkBCL/2zIPfv6b8xv7/++L/J7wDykC97c1/z89YJJDc87V7L3/tNTx5+b+69xtjXiwO69ZSyHrFV/9lGw17aL+nP9u9M1p/3d2tZHeaZXNH5HLEIRg4pdApnRJJKe5KebheO0WgscDOiC0IEhJAnpGDsMHaTIzAImASc1BuJ8kp4gxag+CQGNmxZeeOASw/dZqsm5RPBtoNgmNq94ecD14J8Dj3Lv/pxrn7qW4GKrYuAUE3tuZu5vFmRpDAQF0VJoNWPe6aSafHjsQSyJXY8gZE0DLz8h2vNrMrHmw3LEefcGI/X/sfsxE789AIL7vq7360a9f1H4z/8//tf8c7YARpF9/64T+79Kqrwx0/nn3dc6qUKj94ZFWDzFx68OiOngXQnrXlF++4eyjfm3nR8oFJHHF2V2HLHBlqRAimwmAiI8CzECSYgZpT45aITGFSBkrqXCSGijmATo6F61bcIksAE8gNTdMmZw7oNLwAwIFYkqNa3DVfaWY/n06KKPjsNWD69spyAjZxB3GKhzIBOunvGtQsT0kRAgTI2HrALFlbbk4vIuPN0ckuZJ5L4v4ahZlitFyTCsjYFTJdtAGQ5+jy0xdsN9t27c87VSBYJwp8Ycafw31qSPTU19DZX7pPTeMKNieEy+XONZy8+uWf8v/lf/5/5z8hI3Ba/OMP3/vwt8Ly3luuzI0UR79yx+5L7mtYiv2nDu9+7mOTaPNy+Ld/++/tlvteQM364k8+pQwOkuHkogZGJBh3qRInN/r/Ufbf4bpm2V0Y+Ftr7f2GL5187jk3Vg6dVB2ruxVaUkumlSw0zzDYYJIZxsCDB8kwJINtDIMxQTNgGDAehmAegk0yCAUkWqFBUkudqqq7ct26OZwcvvS+795rzR/7/c4599ZNvZ+n6px77/e9YccVfuv3EyKlMN6tG2EgBCLJJRpJhLNGiXyEaYOsBjriWSiDYyNWDVVTGznRmclNFInYFTBeWF/qQEO7B9/18nTM9Qvcg0jHnB+JKakySSh29wdKNtGl1XfvTbLaYoLMjNFf2HBkNU4wyNBsl2EzGLFOCkZRjjM5PAipEC7pSSd0GSe64brg/S/+cCB6+p0hw9gsLZqHtdlKuK/H037s+McMwXDUC0QGF67fFAuv/V9+6I/FP/5H/mLzOz3A1Mz/tn/8dmzK/X/4xPMX7Nc+33zff/T5l0qNUzcX9j/9mX+yn9tg/8d/6+94tztXG5eeamNlatFAqszm2Ng1AGtFTGRgQhwGFkHUBuIIIRLlbjqNosZGLpDVGpynACJHQkzQpqkb5bZ+JmkdGcEJO0Jn9dQcIx7ZTu9prMZHIM972FPUPYAZpnlOPLWDG4sVUVV1zt/HEWwLgcmMwctbxIaYAWhPAjIjp9pKdBNZU5fg/iTsjdRbaxoqcYJNEBkTQDEKfeljayEsPP4Sz/IUD9vW0/0YgEU+mQC+c9jv8yc9mhUG94VLtLyb69/9u//jn4i/77f/nZ3f0vVeZNr/zf/kUhaaq5f+w6peeeY/Ox+vqCpRtHmvH+z/s4skg+pvf9+ndiBRFQd1SLWnKSMGg4prKEmnBkcgl4g72VkMSDIaMPIZI29MyAKMJLIayKghIRJSQ9NMlT3Flts4LXQSLxzc0tpSYQ+uoTCSZDQY35N2wUoYK3az0qsg3vwgQE2Ds3kKrp8YelKDRUp7MwmgC+UU5IwMYAQiZWI9vjFHU1QlxbwcVcSREBlCiNLKyrfF00Sxkp1f+40Qe/adCSWP7xiW9PD2yJyz6bmOHzD94i6+qis4yC/+vf/rx/5p/uKP/L3/16e6eb/by3vf+1M3JYuhuaa8/TPPn77NZkoEtc/fXvnshy7eOijWfubCM5lXhe5PAZpICUyyDEogsKuYKEXcQEQUARar1ODFoCSZCKBE3oyyVBkRiKAGL2KBNGoVo+QnAmQEmGTEgu7a6oA12n3P/UcCWlqeVQC7g1WAjW9WbBQm3dODHXdi2Ry5WCedsljMX3diTVp9QhXfwdRMgJEOe5lw5zBvVFWj8ixraCAjCwwjskDqvvrp1RgXn/naN2fPHUFWHrkdPdxRDYH7jjecLNCh+9dXez8y+cnHnv+VgxepFsnz/oUPbLzTONepJ7cu/kx3aJEFiFx1s3epc7p/7TYvrdv22YxI9prINO4gogEhEAuRN3NkyYtruamtUaOczJRcyWwGjRYDR5K8gimxRVMRlchc1dFYXMvQbkwwA3sWsmLh9FKOEOj+r88cTRBP1Fjcy6IydlOD+nFkV0+723vLAZjG/vo2nZh1M2gVTu6yIgu3yaRpPIzA5mfGFgGavJ7aBqbgThayJpgqJ5l0oKVy95Q8Xud454s/pBafemvI9F5T5l4PDiQEsrQHzns9vplhcuIvyYgUkVhnIHGGW5xoT5fc4bb9rp2vX/+pH1z4xWv/WTM8HA6vhQ889XI4W+3nWdk9uB0jMt/JHOTjHx9X08Px+uVLG/9H9tpPLpaT1VtmiJpHU/FpVzZkDOaGjZVFFUSqYGYKTOSdMAUCNZpEowi+NmYOBoKyoKoVwoC2gDwiI2YuENzc6mqXLTzA/SHcDx97d5PuiMy6+9MSFDG6sdaQ1HXv+W/Ykblhydp+z1d1vpiCpXbp6HMpBH/kCBiZ9DsWnPruHrFXCk0OS/BdMpoRBhFZcPBf+vApi/NPvizfRFngLAj0zX0ebQVR+023iyIW4zm3Jefr773+U1/6rz70d37691MVp+PJ4aVTn/zA9PLrt0a9/sr+3sEhD/NOvrxcVdSbd/jU/uWv3frh8c39zpsNMcxcCuAl3Cs8IhiNE3aaqFTBDmYEcoxIRDGSeTNUgQnkAgCGRSmcjRp2ZDqbusYEEc+I5en1RWcWj+i57jmsFu/WOLxfK8RgwgddZlW7/gIbaVWc748Ys+RaIlO5u6PJOnNTUoPyDAsA4lT5STDSkPWcbvTnzeaGDbG5ogGCCDRCAGhbCk2wJpfdr/yAwp69ODo6pB9O+0KczJJHaUYAm2kyHVNJa5oAX7z47J75umvv3licG3zXPzn1J878+N/+/VkcWETQBeusfvLL/7BZ0PlThwcHw0mT6a9+YjULdQ3qvfDCKPdxv3Pl71cA2KU4uCkAU7JIRFATAimMhE1JkFDkpBotEw0alZwpk+ckEdHp16NR4cmiIdW7mTHIZRxlbvVUzzQ8jENBZ1F0fpgtYIUENmRDMxfU3ZxmAbGKnaX93IjU2n7S93oQBl69BQhUjk0DNjWwAgQru765tR/mGXkZCaSeUtaWCSAySasAAMfA7mufXA5x/smvPSI3NFEiEzw5S+6XSzzJDEFHymTtN/kr14YLsJy68rVO+e9Hv/Hn/sL7/9TX/xaLcZZ3ymljhz/xv+Jzvyvb4oULz7/vsT72/79/6q/8zE0pvITJNIvjqpmOowEKIQvOkcU0wRyIATVtgiZCU8pyETKYaZhWkSw208qI2bKcKPNGTGwHQ/rWLIQISsWQxC4rC7HO4x/96BNF02gLebh/S4uLkSotH9TMZyawzjSwSMDuthhsatkFzCx/JWZqS3/vbHGuq+QkkBxpuptpmnlm3X42vDqm8YRJBgwWgC0YxZiYACwm7ACB1DTS9q8RoE91H3IEHNUGzMpE+fgfTvx4z5faB0zCtyc/6N59YkcWtzhvpMqaXz7/f3v5v/2zf+K//h//xn8hkY2dydX//eKHP/f4/nB36vJybn5tXI1Hv/yVpSfe/+zpUqOSNymGjRGpUGyRHAZV5TbsRsGI2YzYeafRiMlCQl5bjI6UyHW9TIbmrFayYYfsE29MHJK1qsQirNnC+nIJjSfjOvfpo3S0yv1nyZGmOhF8cRhg5e60axQwunZeQE3j1zKDQUyZFAzBPTZky5dGUJFAQkmDiQwCGEXlvIhbe3U03euq5vk4HY7RqZmmOiMLjkAWUw1T5Jc+sRib+ae+1loB93cCvxmrP/WTos1/8Xv7zRU8Of2+b1zxfmid8YWfu/3Jb/3H9Mf/5F/8n3+vJCBqsfytL1Jze1ea6dDneTmIC3Faj77yyuL5Z59fKxGj6X4TCdHBoD6R4ppB0/nGnMSBzHVEjURVyVKwRMrKYJJ5doxenJrX2kyaumyS/SXELE4CddbW5lkDHrb3E+zhCTTMoPNGZNTbAkx42AUBdCOSUQy2OthtM70PsrEWr4JINDeIKWmTgQCoGfWyamc/RDD2lj3coGoPJQsyS9/MyggAoxjYb37xB4z4uXeHD3Ps6D65nfu/ats3M5DIHc3dfuy7vmXdbTQy9xjF3zL+p1/MT//z+N/88f/hb/7eLAKkq7/dNXX/YEzEFpqh9F5wNzO10Iy+8fq/O/3ss2dzcaNKCY0QRZWEeCYQOTbmACMlgEgIQohRk5NgLMo+UC6OjYBYBLWMmmjGmBggZOzZG/zK2nKJ0Ag/dPYz4r0267uaxVmazaCZKIGy4Sl2GvztYaGEcWdxZTuLZmnpk+o9C/Zjf3DAJiFVj5i5tgo2uNKPbk0RvCjp4XLg0jcsgVw0i+RYiQAPZdIoAEwUdfHSJ1ZjM/fES+5B9qslp9TISO4o9XiP/5emOOuMGuc+kBK3+n96Kl6/+rkvXT5zjr+U/66NV206+Gf4E3/oL/3N3+sDQIEqzQ+/HJ2l7LR99olrl9+9PvYda6o3XvenV7bO7ZmShRxoa2hBsGj57FHM0mJzVNWaVj9AZGpSwjmYmYZRFXKnTpNzy0Ts2BOov7baE4332/HvXi73K5G/+1uz/1veOSSguxtZcDi/v/FkBRo33XNvnOz0ey9Ky5cOAZHGEQBzUALIUOS8fSt4VzipVUIU850DIqJEDNqKzthsUIwIipxu//r316JPvTN9aGznkTKAbSdjBg69zzfc73z/q6uv04Un/oUUk1/7xh/7PX+vGg37/7r4rT/61/8/v981DDLzV/7xW6KARWbt9eP6+U9tX3rr+gFlZRi+7J78eqeOLOwNZhmZKQvAkJZrgTQKSJxVlSZqewIMLLmSN1MjnVZNaCSyUabmjEE+8wwUy6eWvMWG6L4HPu5cL/JIAQA6rvt35ZBjyOKkB7EYrz0DiDV2Ljtpjd2TNxSExasmlWna43RWMpzbzW2Zc8zK3UCh7pj0hkRiEUqqbQAgCjhNGZgCZPLSxxa1mXviZf9AH5YM9+2NO99RwaaEB9MJu/5Xz+2+/sz0xeKXL+Hm6H/60d/1L/ZWzi+tbZ3/o3/mz/+ePgtgWma1KBQw1YU+xYpX1z66e/nta8Ppcnjsj1z8x1MGTEwDHNAQmUKVSDnJLTMDCJVSckgYSs6VjswMFJvQBDLO2KpciOos2Muu65UHayv9tpj6kQK79ynlueMj7fQ7TpJaSQoSGQ4IEvzVmkx12l1evumOr3Wfq2qve8CQcHJ3JeJwe7LgWQQWCdBxSbHoDAlwgREpCAxGZqRtOg9E1Jjbful7guDZi6O7xzfxArbeqOI9FNF2x48TLwsgpSvvjylz19/s/3TQ8uB076fjxvz2X/3RH/qHUc4+f/qnX/y///m/+MMLWZ5lMv+f/82XvKZE6FIWGGhAS6sf3bv+5q3NQReRjNV8K/ejBphpoz4dyKl8vQFxWp7KlJe5wYygaOrGiNMhgcZxlo3LzhXy2dLpRW/xm7B3HqKb0nalJscr7cJGiIUEYimGkYSbYmt/LphNYvfsjYcz9qlbOqCMqigAUt6JwK7O5sBt+onUquDN98ZQSgXiMTIg3EJfZyluhdmXPrCg2rvwsr8TCW7ghDMEcUwl5oJ7ZPyOkTI4aROkMq37vYPbcD//6rOhh9HyyqtTDG78+O965mXGK4f/9h///s/+07/1wUw63f5g8G23b7sIAutCtyZVVQuR5pc+OHrVmv1AQgEcEMGmICOIRfipMoyM2RRisEBgpSLzHUaIxMqTOg0IsYIYoeoQ9QQyv7pacqwflhDjVMbHLVfbQ63/tissnY2J3zXLlDSUe5HZTW20sdQQNaF4/MsPkWonMmDpWmSjRPdJ7ShFGWikxCykxE4nObTIagkQCqqiSiRGEXIyUKXmt1/5zsrpU++O3hMNSvG+e3qj7+2UWT8oGAln+IB+dNXVKU9KqYfvHzy+86abu/XXP1dEvnWT3vp/fua5r371+9zufmgs611vCo0A/crOqVNrC/2SgBAbyz6+PZw2gShCQnrGaNx3MOPr+y0jaaQ2+xSJs04pBmW2WJ+/OQYDrJLGAgXDNdxZOrUk8R5aCnd1SkvdMivSe9CCPSJXeu9npBg7xCxMu2Ct3ZXnANI6nhocPmRHIVjsDrbZWrNdiRtqYdkOqjA1gVGsgqjvVK1FphqJERybaUiyoEZIO9NXPzwXw+ITL5/E87cMV9+E73f82HfQg967uZt7lvXmbIoVOv07/te3is7BP3s6W/+ODaZf+9LvfuFfv/r7/DAcHB6OT33pVl8ci3/rqw33lk6tnF5Z6JZCk153WAeCihng2WJDzTsCIpqwMTfUWq2GIK7oiLCSIExHk4Vn3xQzlkS7TSzOa+Tl06ulaZzV29z/NUXbqDpgD0mLU8v/aTTb/U90SEkgsD/sibqQ3azZSKf9hZU9Jnow2gDml3cA5ph4SM0AsEKgRsokMFN1sRHw4DCykkq09JcEgNScEpsKwZo8yuZL3xUoPnVpksoXEnEsU6C8IdLWdLn3WJ58e7YWmKRH1HH3PQPctXyUdXqBnLPJ2u/8O2/6vHrr3He+D+Nz8qs/89998K/93f9qKVqwOn7HP/hiV2Ttx07d2tq4tXn11amVCyvra/2NlR1VJmVXI+QEAPFaA2OWImElZggZyjoZExE1cTpuonYHbSEDASTOicbe2qmes3j/I6tthJk38eBPnkh70fHf3Nm6YmLIxwBJoK2dlUhShfLCWw9xxwxEOp83ZK4SIgMs1UCowohEUuljZRjlFPPuAaeRCS27VNqaLaV9jGItMfvKC30Ng3Ovu9Z5oyhZtHywqDeHTOzsPXSP93hjO0r20yPsAGNG1pVxZ1EqVy3/zr//hivr26ezg8CnP/nFH/+zf/rP/bU/WNSeunH1T/71n3Rs/9snn3vc1c1ouL95/dbt1+tmXDzJEaTegAijGEmXm7oKBmUw9achPQC7nIIjaDUJwcgrx5Zq3yRxMC2dWumQ6iNgYZiCRRZDfIhLxG1q6P4TwDJXsVmx1zjiaJObp5QRmuJc8eCEYhJsKBZuSzRVZtV09RjVfOKhAhuZKOrgTHsjNRVLfDKqykxGZIwIAQhiKnL7pe+amD51dSxqTJF0/ux0l8v+mvOj8YHMjw5yrZkfNO1ZjdsKuBP7xf13AIuuGLidbgfBqFn5nX/3jVxWluKmUVz/5Of/9J/+7/6Hv/oHikjktPdjC784PvhH//z8hz78vvWF+dPPUZwe7l/Ntl+pGFYUapp1jSM4KwgmtSqTNN/z0tUcUCEIOUZd1WriDQwbEcFIyXljdFZPD9jCo7m4Zg9N9KVtQpFCkffvMeP+NAJFMx2QVLW79gIBsequLtxydxjXx5dtTWwymCzeNgRnkiD6aqaUJ/hbi+0lZ7HyQJFVDOWU+qEW3mZGYE5RcyWJ+OpHO4rVp14CsTLUP3/qYhzM+bm4HgaXL/byKwuDxYNqNqPt5EOhXfv3zgfd7/Vd7Utg43SRh0Y91cu//e+/peuD8S02tjMv/lv7M//Nn/srf6BrBGr8717+Ur2zfentf336+Q9/8GyukeZPn7GDlysAxDDNczMlJhZnEZUCzDenDCNrPBE101qjk1TJwNW+kSW1t2Lx1FJBUe8Tcrn7bTiaPBwLNUt8GfCgNcMlmTnKRn0iCf72JDfCJHbO3yDDvcwAVmBGIoT57ogKnWRECEkd5Sg3mE4DAFyrAP2aDWyqCjcbPCUlJoMZG1SFb7zymQnp4++MmZTVzQ2nk5r8zkrB1/thrtvZnvBgcL26x1sg0dC2E2P2BCd/3LO5YBK3+qdIdDSuY26nfsfffvN8eeulvGg4nn7xZ+2P/hf/y//0Y53KEQf5vvD6ymS0v3v70s8uvf+FFy5043TU25o2RgaXZEgswIwcUUj0lP4ty5QYahaaWuGztiKKI09VRVitt3aqJ6mv7juoTCd35KO0gGDmTr+3HWfL7UE4a/MwipwdrDFx4J3dsw24jvlZuad4xwxe2jrx5fyIIdZIUPOeW6zWkSsuagZq6o6is5/GjZSCqEo0BwIoMhksMFtgol//UIkw98RLLgrm4vZouiO7NB6f7jZfe/OZMc69tRkHrjr5VGk/1KMFoXio7MyJ5iaW+6/P7+XnNv7F7nTU80/+8PdNT+fb29VT3Yiw/omv/Mz3/j9+/K/8aB4ZFIsfwtfzuYVzk7293V/4pfknX/iWx4M0k0hIwRwQIQAgIrCzSErcpPpfsboBOZ5VRFDA9EpBRH5xfTm3qPzgLf04qvHeEbzPN4/mxb2Kgk58rMgnZChHjRCiTq+dN4LWxene6B4poGPDszUuFzYUyKae/DFd8+wDoBZWNioIvtNIIp6yGEBZIkBpvRMDyFTZ3/zatwa256/sFVlhV+I4AHq4FscrLHP8lGytHUTLxndUDiTtgvdkCB6loNjIYTJ2Ny5mZztf+WlhMvvGwlP/8Vq8Yf7NpwYN6/raZ7bP/tE/9//+sbIisJY/wi9nDcrOerW3u/9rv9Jb7X1sgKhknhqQzABKKY9mqmSBQKZmIE4dZMwGGOIIInOrqz0f9S7E5XvaEdWRvDffZ/c/DChpcN+tAHhXJ7iy4qhFtxFiVO5arUw66cytHNztWybEZwvXStfUQTFkMHwqzaNoQpzII9jIzIxN0TSZUW+kvoazEAVgY1JCS6nahjEig776oY5a+eTXB5361ZvbWD3EdrF08WJn7Xxfhrr8amex1/nGFh+TxVvkGaPZkbtDwH3JYe94IThqdvPuBg0vvakZAKl+4rcsNBuvjVbs7Wd6keszp7r/v9/+J//cX/wDcwGg4H6QXhYokK+tVcPdrXfGv/69HAkUKoQqS8duG2YTCsYCs0RVTDPXh0AAy4XtsPLYaq56XyaX48ecBULsgVGtk99oE22P8FlwZ5cYhGgkXLsbo64RqphfeHt2dwDHqKo7v2z54EDM+VmV3izmNftoGhMbe1hRDI0VBkQcU0mYOUrhu1RYff3VFyuuToXx4Wsbh1vzt/aDe/Ibo7lm0Ns7U06n6+4MlR/59xO2YyOw1S858VDAg+zek80xjw5WxoNv3fkaohLAV6/Z19felVEnvvlMz6xr9U++9uf/8H//Z/7zVc0ds/ucvZQZoGaysHRh8wr5sQpV4/OqPJpPCWtjC0k/Gw6BOOWDKOERBQxj0T6WnuUYHoUAs83xidpDokOznme9A6X/4KalmFGKEXGUg61BQ9SE/GwZMRvBtFnbPe6vi7cNmjU5q0kSiWxbwjSaUoAPwRv3DlU4gEzJLLAYjNQkslhgMCGC2L78XGbNdHr78tt6MJbpFHNvHUj1BedHzz5Z5o9tXD8z6T73sqWcRrqR4Y5nS+ml+7/wyXXh8hgq4Q996tI/2IaWXafYH4zfKpc3pdx/9f2DZvT65pNf/NH/8nf81R//VKeY63S7nU+NL7Y3ikbrndvSGCg4bkwFUOcaYjWYWkwJ0qT9YESSAlxsxAAfDJY4pEjNfQeS0vsYkOzch4w+Ha+JBE5+WGMooGUxVgIsitOgeu3pAApVuT63OQvKm9FRYv3ue853hkx1k4EocToAACX5NyNWgjdo7QllXhlArIQYIIgJ0UdAQlMAGqi8fvHDo+nm9PXt7TDNogEHTdM9GKKYVjvPjDaHBx+6gLOXt5O/dMdDtUiLhyQM7A5clSsRqzD3/oWX37RMJtOVMhwu/6Y3f3Hddl1n+tpzxeUNXv/kF//45576hZ/4UGfoOS/7EE0pKoBikyEQyJyZmmMz+KAajdTUOqx7ZsQEmIAYJK3MRnQc5YEFve0Apf3MkDaKh0PCjr55hCl/8BcAMPIJQ4nVEaNxVysywmTQObMxyyEkupp7KjmYmz8EecQWxWjtQaUqiEk3i01RlxJ5sGVt1DpGxsySIxhnaUcnpWhfei7brt76WlXD56hlYo7iIY3rbIe73aXTF7XcOre+d/TdI5fo5MnzoHcnVm2dWCPWvE/V4prdXjy3sv7cJ1/8ztUrK8/YZnZ+OaqjNw8//IlBfeqTu//7wXd2r3zsh15831q2dyOp8xmxGYWMG0CDI0DZYJF8bBJvHjmylKszZk4YL86EmODZnDsup75fS5Jb7e94oJ14JKfShljohPTLvb8glIR0jHIFoBrALA1vHgrAdcD54+LqBGS453SlRQb5bOal8gyfKwC7GfiaQ0PCHUcsQmxRm2jRoClWoDOpIlMxvvaWr69+cX/agKIS4EyCau4sLoS5/NlPPn/6VDzrFJhpx5xQpHqERhaNDCICI7iGpF+fW6gvz62tnjuz0Msff23e3do9jzVscveg+Y7uxiZWP/7lV//w9/69n/gjL4yjNdOD4cV3nSVco5HVMAQPIPHfabBji8kzZigMOIMgkegDkEchP6d7FuXc97MnfjzC5+kIl1mKggwxCrimw43lhhCq4nR3+gg2hA7m9kkFQWZsDSfZWmZr06YlWdbftZl9euS+JG+w/ahGofqLT93+tQOISE2OgtOQBcumUvUPimp0Nc5hGbR0k47f9YGgr/c2ay1VMoCFicbnOsPb7//M5z751GrRefZj3erGtCKsLYj5C6tFENj6Jy//z2f+zOm/MVmcWzp17v0v/siH21CcRRcaRCIX1YyJQKRGiauH5WgRGkjYC3MmzCxO/L2irHc1YVh8yMI/fqto0Fmx6APoHdpGQiARYWbSPAUnx4GIKeo1AGR1XFh9lEJ9+DkVNolI0hFWK53cfEjEDKhrAB0RMjKhaFBTtQgDYsIyKwgUQ8Q7r1y+YsbaKEIjdXQ6DdU0OrBztru7f3n7UM60zuNMF+HRtQRSzpJjjAoDC5GsPiH7p198/OzaYGnJ01xxeDVOADo9YL+ex49814rZ8re+8aeaP7b6V3ZyYvHZ3G/8lhoAyHjQRFBIDDVZEgVVgzVpV2t9IwIB4rw4cc4JGbc70HvGpc3tt2/U4oke7c1mH7w7InK/T852KUJeKkBhpwERalytxIBKO2f0UW5ui95IMm2flaIKTGd7F6XoPNmEiMuifTloY2atDlULUTbAKAYg/Mo7QG4R40k1NYvNJIZams78wNfz5309fO3ScO5EYPSRx/6ObkpPwuPJuDi9rs1HPrwmkjdK4zzb3NZ9YspOFe604emP/8izUU99+uU/e/N3n/oru1kEDO47z0QQcehdGBsLQIZIbNYQpaqeaICxaLL7xDGz8yRsQiBmE2a5h7mamM5SA0DMDzvLj9+M29nz0C4hYaZZ1tmIpQMALhxAnItuZ4cAamI4lz9C6Z1pb84IIgaDkFFxUv+BJe0BkeoAlT7AJMIgbbSdJWSmPNOCEgvRbe9m7B1X1TTqJNYhcYr5YvkDq5dpPcv7caKPWEN217OaQUTIQCIiIszkc3QGk503fuELX/jabgz1YSe7eJsOakCL9cU18mW++hu/xcLSRwYH9Y+t/6XNzIwo8kogBQUfGiUonJoSmWkiSk3HIEPVJWYIcV5EvHOexDsnwu7eGB56aD3XnR9PK2wm7vnQlgT0DDA6tuksBxgohgSRYOObYqBQ2Vr/UfR8TBYVpNbo8cbCjnDH/I6kDUNLD8AMDA0gmBpBA6A2iwxbABNyzjLXP9tBxj4nZmiww10fynADlVs89UhJs3u9fWspH/9NLr3Vxw5vvXT9YLz/xpuHauMurg7rwzGBoj+zqATW/Ac/rLb80Q++e/UPnfvLW66O0arDrbEYsH2FCBSIoUnrARYspUOMOGFYQQY4JwwiZiZiNpL7rOxvcgKA09J/1IOC+Oj6x7ex0oNhnaoikIXmigKwqfbWHskI0EEOUlVu6StnemIz0x5kzhumSnDd5NmSBo1IEwAEmKmFJhLBghkgmbPO8vr5LM8yz8Qi4rF35e3s/UXIrn+5nkpfCQ9Sirq7pROG2044ei2Ocu678XO/uD+H8eaVr7y+t39AO69bs79DgKDXjyBQdD/wkWAL3fBfX/ux839hu9vEejqsNg8ZTWiqEFW9KoHZLCqSPoq1ZVoGYhZmgjCREKVqS7gW+3osZQ4WoXj0Rg8Hv6TFTPzIGqoANB712ImOywtVaBYmxMKN3JiwQavgz/D9wknC7WEjItbtqUlHzBAhLfCQuD2TWDgliauamPrCBBaGhRgCTFsxkzSBo4IQ6giQhWq63Z8LLvcglsxxzIrdK+N62MzT9beGc01jzMJEiHpyGtxnPogYxFLsSY8Bl258LvvKE8N63cXJZn0rHzS3vqGXB67aPuvBcc5FBsCRftC+sqQ/+dU/8ud/7C//hR89VWEyVmxyD4iNmcXcDCakptAGFmAgkNNoagTKUl4E4pQkiRgeLVmiNhmWYHXHB8PDLfmZ4/dwCJkd/XKvj5r0Dg0QP54HUVPu7q8FSNUU54r6PhfEjMuLDMSLuwBLYDKilpXqyA1svR0jnWZKWXefSY2VLEg6tyi5hsbUUqA1zvmp+U0/7c9vNn3JRKsg0Lz/sY8udS34Z2/H+axf7ew2CvIuM9IjvWEjs/skAmZ+78mudcvZxfVPz01K7O7FOt64Gm7t7+7kKOsmj2bxenfFCCDtnpoM6ubTL/3hP/+H/tKf+S8fHx0cEoVrKyHWDQPKZKq5cSQKyTtEKuPXyELaMJiZjSzxoJkjM0uaSmFW05ecxaOHe/hBcOQFP3T8ZUYYc79PdtgAZEODuMbqm+sqpE2ztHDj3nU6FolJkyC1wuazSOoaJUkRoBRfwlEQIwH8qsjG5QHMjFytatY4QWzdIQIgZCDzOR82VI3mT6H0TXSdWCu5+Ph/+pFnTzfTEKq5x3d387XxL9dbh40VXlxHYtFpb8ZGOqsjAAAmJHoaACQKuWPXdVad+eTztDfJfd7r3n77av2Bx35pnBnXkz4gl/+3z56u07S8PKw3ry598hf/4H//e//yn/1Ny69vK0K45bKUlRADopBZigEk5WhuXSEiEImwAIbgk2TKLC3YDtHDa3qPh/ObDHy0YY+Uirz3Xaz0ARa724HJVQ1f/wgBmHbLs9dpFre566KUVnt6nnJuU4gTz2Ua9zsWITklMorjgVm3GLXrPgYzcBuVmcWkCCCLmWkWMu177U+IASnq8N1/8Fv2i71b+3VouNPpTpvNL33kzBvbo5s9n3/xNV79P88lRqTxJs/lRTw+u2Yax8fu8clk0HMffm6uE5vxdHm+Pn1l/535T61/pfYEPVwBdCXz0wj20fRW+Dl3k+c+/e//4O/57st/42kbskLEdGoSDQ6AsUJBUc0iJ88mVfQTsUt350QZBjKQMB/bsg+v6jjuTdYHFwG8p4kZUYSz+x4r5nI1Q6aTvpIL5bVhqUSTmJ1xlohf7v4Gz2pGk6bt/IaxZTp7rIYdU0yQfgVYTSiCgpK5QQWLCkl6SMrmItjQykIZk4apz4S9YxoX/Z28aA6jW/pNv+G5DQy3xlLkHUX06/ljL1/5zR+dnnvny3XJb4UbV5djPNgb744XS2dasiJRbMcWpkEnuMSPm/sd5+rIjSwjiPnFuWvnzsqGmWcKIHOfWbn9Sqecvvv00uh6uTMUNIvf8Ut/9hMd//X1MgKGepqRASaWJFD8QqRiHI4y9yZHgR1mBjOncAGnkrVvYhRngVWiE5nwR2l8lEx8wIey/NBxFDkcMFHA7m6/IgqNX++Mj4HF6XL3LLRbyBukUigARjmQjlxCYjMisCnHJoeWvlYIIlsUQJUD8exslgQTaOoiD1leFA45qqz/+KH7TxYu1beXinLdZbkMq9GV3XjmB/7Rr30bjb/jo//89vNLB4fuys6tra52epPmcMX1yswrvRcKc3c3uGKycRqxVLXGQn1r50JxcMNzdNQodOV89+ff6Mrb1z7z5PTaZ3/gtV9Sigvf+su/+MR8vLnSbQKb00AEIwcL8CAEzJvtHYyjCQkIzIqWF0iFCSAHMygoPmLN54mHb5HXjx71bsfhXm9+RzN0ADYqhwriaYzXHgeAqpw//XoOVeYjQ4AtniDdmX2/HGyRcZKSA82oOOKMGRakpnWu0xzme7tkZGxRGTDiKEflngqCslj0/eEkKzM3PFj91k8+e7q6VhRlXO3E4UE+npsfHPZWd1//F5Pe5dt5tfcdr/16lx3tvLY97h1yV/udPmxv5PN+Jo3eNQPuZpZ1Ey/9WNcwsrq5/GV6nDduOQpMjXFc7Ndn82ar+/zoa4f100++ffNMQLP8bV+49GSvurlWGNA4RGL0zUxLrwRDdIPFeri3O25izokSx2AKcBsWVWMYfBntRDD4wcQes+75piIEabyO+Hse+DHt+gizzmbtXZTgrzYEYBrzM2+mkFHra7bY1PeaBCs7gSc6czKMEt9dKhwXUxIlAlWNA/cPTBLRUcSJwIxFhxQ0MGo467phpr3nnvrwhcymPRqfy6s3bq3yxrS/u7pWHErvOz/1+Z/dyD704Z//a/u7ByPLSgnNiICtpeXOikWrt3d73dyII53wfe5eO66HTqY1IDFUNz9//cw5v7HPBuUYWZcEHwioQlPxF2+t08ErfLrhZvFb//1uvze5tSpAYGI1dRaUcgYpS60RebYYDrdVQyKoIJC0/EZEIIGRZeeungCtPRTjnzypb3LLAN0PMHp3s8zVAvVxkhlx8LcO+pGojnE9T+pTdEQsc+8LzBVDZtcYz5zDo8UPYIY7QJz2gaw8TNvYkVUWZ0CCYPAwwMaYa5onPvXY+fkC40BZb//2fO+5N950Poxso1kfjHS3+5/87X/S/PSl/reWTDtbV17bNkgZaLo5HD+xOMjNQ8dSJAEHO/EcdzT3DXcur4KxVtPtn/41Wlu2t6a9CNJGSVbAg2hkIfZeKRcmb2dvxbOBw+K3Xx4183y7WIVzUwbUoBqZLNCkB/YxEPmV5XA4qrTJWFhmcFGpkcNgok5OPs17kJ10VyGn2T2put7TjjQdZh7fI8bJXD6CqfOjAREC7W0tRNNY56cGG/4EfjclK957zZj1x+zFAllLyJlGV2KqXlWQQg1VjyJ3hom/PkAiqYJMJCS0iTIrAxy0Wv3sB1585ZfnOvOnBhL6K4d1dO9b/eL+Ck9DXZ8vdqMdFP/tLzW/NIFkg/kL3/8jr118+7aKxcm0sYPu3JnzHHwdQg0XxdXChnuAZNzF+Sd0vFFmodn5Nz9HuNBv3omJsTmwLCgTBJZFt5UdXv16Xr4Wzweybo7r49XmkFcRj+cWKwMdV8OMiC2A+v2Vc9d2DwMdZfXI28wFfOjgtGSdbeb64ec+zWB7bab0EXeLNPWouwcwFYdr5LJRtMvPGMGqfvfsBs2S+g8yImRp02AUMm5ZWVpX8MhkSfGuZtIBur5u3XRVm+UMLYVBxGDEUz39wmcrR+e+ZDa8kq+u+Z42xLtzH//C1ZX5prb4WLbDE3nyf1ldnFZ13Lz86yvf+bHvrC/+g0uZIO7vzJWdC2unluan+1HHLHs4O1WWjN9Td+neV9W3r9+az7tX/49/Txmd9rtvxkYYIo32evKVtTMVARx38G/euCnUe61+UjkEmV71C9jHKVOmWRCDwDTyDUWT1lhTXl2ZHG7vTRpHjtmY23pQtoeMqVla79yW8z7C6meKBoszRpdHHf+W47tLxobOTnTCHORGTQAq5TMvt6XFYhH3PatI57sjVvEgM1GTqNw+dyQOBCMXDdGqjpjr7yYNhaCmqiBnxojRSKBE0evO2osXbl95Lq58/ksYuOmlq6dOORr3yxvhYxevxDkd2eULB0Mf/qW/tNgVB+/j5J//+iefe/b3/NSrBw2THAxlg9fXnnrSTUyGu9OlbOVrv7K/fuHUwl1FUu5vPblZbITO/PXPX4reyrnDNy9ZoyClWnvd+I1f+U8XGxhv35r8qnZ3YjZ4C0/FBvqRr7z9xAB7umqJ90LNYNTsdE092Sy5SYianVoNh/v7dXDMMru7PTBne3xyHk2Th4d7E0HwERPio7d04TKvjFCG8ZwJh2zrcBDBVeNPl20hlj2weMGywSET0GQEEJvV3rXxqjbSQwAx6iYDuocBrEas0QEWxcDWepAABXr+cX0n8s0dne5drOfF6+Wb5+YYg+WLNz8w/0rsTKHl2k2LRbl1O+/lTOAMl66//3ue/m1fe/vW9c3aAWN/8PoXn/jNH9/Yi6c3t3eeO/3Eq5t+cJiXd7yDe/n0dHt7K6Nf3c1YR+XNn39t30VlmEYr6sP9jX/6A2uTunplu2yWl1/fqLT/VvO+JvDpxc9ffOr0zu2DMz7ayMOorjsaXlkc+DM+hlnhNymsJppfrMeHk8aO9V7ImpNOwJ0j0vY04/jMeoQJkEQF2B4OM79z6FJdHZcViNmP5o2d0cHNhQDESb6wfNlj5kk+YFux+VtG5hpjApFxIZrejwGOlMKzhGaSseXFIWuynSLYwVTlRDlB9qlsL3QUW9/wC5974+UbXdfpHL72+Fnz82euvf1B9wsLRVi53F2cwpe+GYXceceKcvLK3gvf8/SN5fOH794ekcTB6Wzv584td6TqNzdunF7v1fvni9caSbi1dDs3/+K3v/Yvanpr3FcSW9j79VeZI9VSmvLBVw8Ospv/7KPl9UtvVJmKf0q3JtP+W9KHx9J3fuHS8+f+pfOqVhPDGg2i2Vmebu53CmkPcE1MtTFyfxDrw1G0VCTCqtFwH25s0zTuJyN+D4/3s4FMYe/hUHrYF9UIZtLZZYsoJyBIQ+Hq+wGgivnZd3xbWvbAiWXz5ZggUYkUDKJArZRcm+4TMmtIaiVQb5wogiwYNcSRozBaQuooH+rfkLoQxKuLUrzviZ9/Q/LB3PCtbH2LF8vtr37Lt31+cb7Bux8a//TbvbzmyTT3XhiU04139v/jc39bl+YXhgfxhe/vvfNz1aW1w/5g/dR21mR2e+5m8b7x3rCljiYA7szHn/j6zzxXb+UEcc3579drjWe1UOUTvP7yQt/c/s+MdyZTa0xi9txbGwdh/o1zmmX1wnf9wi980C1SIKUCAMhIXbcbwnjbin7XqdlM3o8ABRXFQl2Np8lKIqP7JgCSw4VHsfyOx7E1Rci+ue9hthg6ZmRa7DYO0IBrFUegMT2bGz2cMArqF8fANGZtuOruL5CRgY21LmBlOTwSiAOIlEFCrY1xRi47bpqYRS6yEN137F6ZHB4ulW/lczcuXLiNr37o2/6d9DpbN4s3NqQcRxcqDSJgQXbwC91v//jPN7d8Obf03R/amNv7kvxWf1t7seiYLVTvnucvD87KjhztrPzps53b0809MXXM/IEXngwiwmANtR1sVgSFp7ECIO/UPbPSndZF/uK3O8S5z6xudQfJnlWYCZPWb16ri8W1Jdq+dmsU2GDQCGJmEYZKubB+erlDMVqSVbhXY24RC/yIGQIWkCAcLdF7Qc3u39KxRChzGFmuU5CXmnf2mZmqQCtzj/QQhCU29nnUFs0o3MZfTGfvYd6bTQC4DpiFSBjRYhsBSIg2XZxvxChOQk12W1mrSc/z9Nb1mL021ItnV8bFS2devHJjKG8UHy1cN3cEa6qmibGOapNffOmTA+S6f+nrf++n+PnP5F/+pU5+EGxnu66bleVLeOL2r/ilFoFARPw9vrnU00iJ9fa5ftiAmRNiDXXtczXA5s9LBZXcWP3TKy7wp3/fB2uhsPCRpbmeJucVZgKtZf+dr726oeXyOu/u3NyqObGD0zHMk8r5U2tLXYoRd26qJ7B832Sod5buw0N8tXt/t22Wl8Yu62UHICLjw5vOQHGk/fXwSEeKdvsK52lGbUd0/Do08wQNNKkI6PpEoIsYokZrZVLFKHQvdB0SYsi5a1/fgdPRXDfD3rVKXtmb3vpEt+q//dS3X39ncnDp/X3xHiDEUNUhxGYsYe9fXX66bkyK5ht/41/tfsdnxz/51vzugZRVFTSeXjjU8+fKue7RDODl/OAdn6h7FYMzuLRTgsCi1owbFo2ANv0LDurFyAX/3ArnH3ly9yUmxKDzrBaDI1NjMovdua7be/Nrb+7o0mPLg2rrxm7DrGpIXi4ZC0GKheVT85HvQE8flXTgEQ78OwZRAJlxDT0qgHT23ZNIsp4UGbvy0KAC0ytGAE0bd/ZRGAgB8/PQmIQv2vdtoa0EEQEJCQOEymC+nyBaYgpWVTVlYTPrPilRBEYW2Ky5eeXKbrZezq/M55PrAV+5fPvg04O6vLj8uckb0+uuT3nJTGYGbYJZIzTd/7c3cmW58MMvLr329e0f/I/qf7PjL44KDHeDugGG/+Gtn/1X0ps9tRP37o2MEv5aFzpbXxl3IzgyIwxJHARgWO99N0ZejIw1+8Ar8VT4mxeL91MMzaAxouDZoCQgzqgJ3prbW/0zZd7vxen40PW6jlu+ZeI2ZCQCzeqp8IkYoD1UBO+94wcASkiQ7G/S/bv7Yl3P4NjdazyTBblVSQQ1Gs72qgflH4+feOGacuSkCJWCwSejHWlSGEkdnFF/WBPDwBaEVBmkZCBc6A8z5qgO1rDRvm92Bi888dYmynF9+7R7benG+z762l7v+ur3v2T51jsxd0wAwxRmnjSb1piwCl8+99v3X4F3P/Lte835vYrdxHatwI7pzevd3TPTkNaKy92lKRMxiDkMy1Bt1QABAABJREFUbr70BQcziDI1FXIGG9igpeRLHA3kLP/gHm68uXItfhABtZIRHBvUBFCnUQHHqDq3L6+vDfqdMNm8Ndct8pTonO2JBhhPJj4r3Cy4kigcH04JfqIxGUGjkBI9GrvMA5qVWXSIBU0zhqps7qxFoK7cwtIVxw+AHXIbsbTu3LaAObj0Olazg1mLcaIkOyJQrRzgy4AkI2sanEWmyMboLFHGCIEt01iL0G7Ip/WT33Hj9Zvkx7dPTXZ2bo8eo/3BNl843Lx0ZRLqKM5gkRkRzricVrnoFPj5jXNf6n9EsjneL1e1ajjrsPWXpqd2Ho8bZ1duJQPFdeKWry2RFDn7ietXfGItU0LUwqVcHrg+//7pO0n9UP3CN372Je8vxw8oSAE0PZhFr7BAPB0Xnli58OOd3f7iQpFbkL1YlD1vGgnEpmRIUE6dTMVnziWHnwiPVtR7tCdbkibBI4ptJYaH2b5x130sy0cw4DQrsav8+MYZA9m049cv3XP8Z+kKA2BGpG5uG2SucUcaMzZLeauxGIjJDNYoG3dG0VLNeaQAYWZVkccGEw3OApv62PjMAtU6HD62Pn/1S1thMunUPPziRZypstHN3d1NspFGJtcoWVKcVXaV5LV6aa721+azhaLsG41dkRUNcvTndWvTcz3MusPkBtL06jQ9ByyvL1ZxppNgosiPti/+3If+nbYpOdv7Z/s7kLmr02VIA4IlehgDqaKuNXdsErE2H/dfyZdWBz3pV+PD/axbJjp4M5qlwhgxTiTLMj7q40dhdaCZ8lWbInzU2E+L4Uo5wvccGdkIBmQhOuHY0LWPkkEqpdP5feK/J/7aQKYLWSBzQTlt/uUJIJG1eFQ1QxNy0zIbt1M9tPwOhrC2Vte1Zr6qswjTqN5PYePDzacdffRnajcpfXSUv+1WDiTL50eHeRNIyWmSEE+sky54VgM3Fyfbh2VWdk7N7XhxXVbjyPnt1z2mU1rrcgDBTTbejqn7rMlZmkbB4ERwJ9nsTXXh2V599NbTvV7n3Zo7tw6XTNVgPoKiODPjaBHCLu8ZYZK5uH+w86FhnO/2wmi8u1N0epI2TIptxI9AOhpxlhfSAn4eNoicwj3tHx+gDZMaQxMSKs29FF+8B/+TcZE0n9jAQjG/Os4aorqWU+Whu9cdjqFBBoYxuv09QBGdAkaIwJGiuCSbwEirTKc54MoJmVgUM4u+5ffKuFbSmPtGBWaNNl1DyfXw6oe+ceBccyAemAy6W2cxVVCW5SIchFjUaGZC+WlgYdDwgHcvKwS9M6tzvYVeUXhvm6eWnmtwxd5X/Oz442LkXn/91xsigNmsoxrhlZNjlRtls9fW3txwZ3bIxkD+SbxVY+6gX8SW4wzmzJpgtjoY1xNxfTIaTwMpoX/jawurK735fhhdzbqdXuG0hcG2WTYii5OxK3J391nbOlB3Fj/cG9r9kPYI+UTrOCUQQY2cD+X29tkGiHXeP3Vwj/3iHq4KL+4R1YqToJG2+gGzgBgK0ioyWW8YDCBT1hgkiEC1UTZD5VyjJkYWAhdcuaze2n36l/OmGfFcoOlk8drmwmtC5pz3BMAc1Uh1IiwqycFMMWlHJPWbl3s0t+6akD3evzw3v9x7thPCE7+iYuR+4q2hVyESA3INU4gzs0ixcLTgjl6irK/tu1lH1Zbxs3irQZ4ZQGrOKFpmqrGJjZvTSvf7VtcDHs+PK0e+TxubnYXFbvdxN5kOudvPrT0LWseNCNDRUPLcZ8ddB1W16QR54d1JkeBvrhjyqHieZiNxn2aZn1KkKCEjNbXm+mMMo2nPrb/13gDDjGcjQZ3aKwyyQN5V6ehvpwwDpkxKokqJviNMuoass5dywMFZEI2skBFzVFhkCZKO1ClLA+8mb372/Ku+ocMuqV29MHnzU90Ni+yzzDHIeEaIoUZizIDCOMJCBGcuyz/9mDrodNJfeeVf+ueeO7OwXH7uw9P9A7h3bmTEDCaQZUAdp0TEwuTPfCg/UtGOw194bZpWp3FskIHeJ6/XmaiB4CXo0VTXCuYKEFVVvhoPtkdzxIPKs21tFUsLstjE6fDAdcuCtJVBOBpWjeOx80UhM64pnU4OhnXgrN9bKGaw9wdyvr2nPepniWAum1gSeCVyjeLKiwRYHdyZTI9FyGc/jvg9jpv1+pueKYTCtGUxP0nwTAwFq4GnHQJ3DhREzAGBIwJDaO/6OT8hKHsz8tE0SE0FsY+7Fz/w5l7TCfvz8Ncv+I13Vt8QWJ45YShSDQ5Z4lNgitHgiUwXnrWzo5FNf7H7kdgUGSb8xKq9/nf7a2tLS/OH4yVxZsJgMjiwt0jdGjGKutj77k+8/kZb52r80n8YrM9GKih5IzxHrxqpsQUnTUy3DwQ2YFrRgqlzkpdze07VBSDLMY16lVfKfK4aTQ5o0PNGwAlRCAYsNBOXlaWYGmyyU1VMCOOhdleWeqQAsQX5JvJ9jxwdYBi6+xAyxOjI13V2c1gqW9P45fkNP7P5jydAMj3vmGG8sAWCH2XUghmUUoUcgFaqjhUEbTJBUUzN2CIbWyQCGPpatSZmwbzUxoaIRsFE5MOrj7/wr0llP2dqth+78vqbex0zuFzYlIi8UjAyMhUQJRV2piBnnlyZO9y69h/+0ZvfvtpMnYVSzxW333jbZ3lf5r6/7+pAFhgwQUcwPsgyNqiqnXph5eXo217U7bB09KbByHM0e3q06RsgUUPCmFC3h5/X6FhZyKjTVVjWHWqssrLovX5jpbcyVxQ6vBH3fK/rZ5m/43GgOBy5vMgcmuG4qlBp7powmh4srJQKM8gjY31mnsKjfDICUoipwJwoQKE82HxcQVoXxelbmAHdj6D190pU6HzetE5qK2aRwkJJpNCQ4OBqOsmgrl9Z+gBMNYhE4vh2JxslLQlVM4mmFYzgy8NXPv3OK+rHO4uUv/OiHIwaZYJkLKn4gL0GkJGASKmFucj+F7449+SLL3zqoz/38pXPPpMxOHv1q9Gf27eF59Zf2a3ZjXcFJOSIULBNdoTLMneg0dpK2D3KUNdGx65QUHEAKMwnVQdzBpCQcoufNzNzMbRCwjDwmbC3V0dH2j83mFy7trDQLR7DtNrd6nS7CQvfXtsYgCCOJuyL6c7hFFK70FQicU/Ha3PxKMX6SOP6yDsFqwGa+wT+qAsXpLLptacaAHWUMy8/2gWtmLvN5rK6k/IHFGMnAK3nC20la2CNCrS7PwUAQ4SYRnICZnZGQBBnyoGIrDFQxs6/9fR3X5pAtrPMb906+/W5ncOSyZwIwQLIxCWx5TbtQICZOW+7v/rVs+//xO+98gtvZ2tZ5vK1vJw8fj4UPVuIXXMjzoVIYLFhc5LHugk1dbw7FW61uklkqIMcExI0IVEgaBSLBIuFRY3EgDqKAJkJ2NRx44Q4mo7fXllZuDpuovlcBr1mfMgr/WxO62qytZMNurAZ/y6OdDNjmKKI3FBeGeDJaKxbRS+SsxAf8RBoP/QAfACxKZOakQCR/VQYarVnkCldawBQZbxS1HS05h8EN5CFTQZ8k5SwWxi54QQhUSuVVTExlTUpjExjA9FADIM5MiVjF7hwU8AQpwxGvvO17/vMTwRqtlfNfyPsl52q8k0QV1uqK4ksZkmoClCjVsUd4vTdy1/82Kd/2/Wbo3okp56e75cizajyHxhuq+OBAc0YItx/fv0nt+RQMzsYdcv8668dSruLaU3+iPbCKngxMooaxWBQr0bmXYSlVEgCRUGcsJHBQrO50esHDMpIzSQn6VOldFWXBnkv1Ieb20U/k6OCytnQEKjMYj1sqko6ZsD4cPPt/MCfPrtUxEcxBOk9v5z8tyNX46SR1h2SgXTcIxZpilv7AzWKTTa3eN2d/PJ9my2WUzbi6NPBz/dwPogUqAom6hwqyMUEIU1RLd3opgq7zEUjwJgsViAtezduvPjaG3DDXpHvbTb7K246rZQ5JKYZGDw3M5+TZm9ogFGumz/5pY+9+MzemPejzXk9cC7LqL+3qc4sKqnPmJoP/TD9m0y6tWa96SHevRSOzD6rzc06gCxCOK1RGKCkQqDojEyFwIhRmbOEgCIFkcnaaGffw4tZqbtZAVjROdyeXptfyLvdMLm+OSg7pQTFTO4k3dYYZbdZ2BofNDHWTdWYwd4oVp9/ovtwGqgHToAjvUGDRG2tCuYCUEPYmM+IuM73NxZqBqZFdubq8fb/oCMo+vlbsKCWvFkSIEK0zf1Cjc2MJ6HT1B1DWYwMamRmGiBRQDyMrm73mOglmhGHGBpqsvFXfsN3vz0UG2XU6UxG3U4+0uDzxiwRTEElaoqKM+lRIQMARWabP/Xlb/9EoWo3D5YWBlnIYdG25txUzVGRNRHFp953cU9EnFGGEW5Oz87irRSqWFCy1oxCgOdIQNOkTVZSaTCg6igqISpzor5iFQWU/GAiGQ6HK/HU6Y3d3WmWAd3BRw9u3dbuQuHOopkMpVOWCQ42G38AMHV8Tg9u3dyvSQRKwPTdS8sf+eCD0jMPCxadoElKO/osH5m7SDDvhj0jiRavPQsAU5X1RzU8aP42EflGpY2b3/UgbBHmxHTaIXO9aQpqkmqg4BSCSgkAaeMQPFcS2JsGJZHyxs0nP/5vnR91BOxHk1wKUzgfKeUl2KILJq0wWmJaOK6uEGz/q7e+fQF1XW0Nu2UnR45Ll59z7Bhik8g481z31qEDi1JdTdeJ81RrDlCMSEgnVlAMlANkFtqlNcgMlpVKamAokwtRXGXCsnn9wmKENSP0MieBYtzMly9cf3U06WtO+fraZOfWdZ2f43JQT8b7Wb/rSI/D9EmBwIwWlp4+3Ly11QhTBHnb+tmb331ci/ueQVDwA6uIjkpMmKAnDQqfjZkM5eEaMayh6w0ZUYhxZf7g0aaA9TtjctDoEoxMrVWKncWKDACbWtV4WMfXYDSElIKJEGsqBxgQxVkgJYQsD6Y1UU67k+/90ttnDjq9SjxPp95l2ri8UgYnJGQkOQqbJiDy8U5JTl+68tTqIC90Ug97PZlUV/cOXReGZgpHzam5yTeaXAUB4eD8k42UhtZwDQ1nZJwKHpoAD6jBYtIBD2YWs9wYTKpkRi6wryDcTPdHfTYlcY6ZM28yenezX57aGswpD6+s5+Xa6enWziRwvygGzWhvs+h1nbXHAAFt5lCjLC49vnd787AyJkDoG/LZh4A0H7AHJPnoVsX7hFdpUh5yhBV7QZgsZrf25iNMp/lgdfdhPMWJDUHz/lDArm4ZcNjuDmRzJAKTNZ7Md2sDMZTNVFVgbKGgFMcTgVHNHHNqrIYZLj+2/D1fG1tdWSSeTs17tSwFgwlQEkUrucQAWdpNZzMP0dzuK4PB3Pxiz5cj7cjeQXPbgahqHCnp/PjaVxyByEnz+Oe2bnWylL0AUAXK2mJeomjmASMEFQNIxcVZb5MlcTMTqiNAp/o5k1qySEgJ6mzvwEPgALz59vL6IhcXFjdvvLE4t9Qpsrlm51K/7OWOtE3aJRQBmSp4caVqbtw8mKgTc6/Ov3g/Ci9jiw+M/RtAbG3V/0mHgrpsqtzfmvRJRN3w9urEzKa9bO2th7iACQgIkoVNI3NVbFEBBNOT1ZkgAlskHRdMsXcYJKZ0R2zIRRJrnLIFB7PgQdZIhaxTaayJ3nms921feNWralA/moojzwedvRni0FIIyATtyLEqjiCyZgDHUT3a2e3neZZ1epb1O44pGsNMs+yXX3nLRQAUn/lE7ypn3EYzmWuwm22bVEUnifE6cdVZAjQzJYroJJQjFIkZfr6lTWUBGnZQl9dNE8K0szhde+LazRu9lVNLTG79w5tbt3r9Ti9m+XhbO53SW0zSyiDEtKFZdPnTjw+3bu+MqJQvLTxbpzzCeypBDHwvjcjZCKQSDBi14l4nvlhk6iNcNu6R85XqjQ8wBYqwM0WUB8iIEVsQYlOLc+WQjTgmTfGZdZ8qnIgA46gE42baARedQwNFIkRSDQK1ulWKZVPHidkGkgXTSKN3zi/80KujweVxacZVERyTKymQwUjUlIhaFVMmMzYOSm3GJd0/NrBQXVeXZf2FEmUKeiFC4K9c256ml9En5w5rFEeev1Xm/ewPWqlkACFMIQ0AOGoTsoYZO6Y5KDHBjE4kbgkwmIhlYRSAcXP+ycvf2LjYXyvzudW1evfWlnV78+iGarytZa+QxDaqLeLTyBAgC3OPH97cGIbqP6zMKUC4izGC7vp510g9MDpsPhsTKfLhqoG08ldqNodY88LclhDufeoQkNDoIILmcwdMyOqW8oO0zmzGN0mzgCIBVpUE9Eat/w4NQlCj6VG5BMFJJESA8xxKxBt72Yee+rV3LxbzZU6hIiZyC3ktR65mZE6kUy1DEZ3cCQ1AE12tICLZ3ukNBrcdR2MVIsonEgMDBnJmVaDctVusxdryowmAJpbeQByjqcIIAoAzwCJMAeNoJgxySetWlazdSswis4KJ5vOYbX9lfm3+sZtztn1GaNJ1K6eq7dvThsvMd5vpZIfKTiaJQXfW08ZAjDS38NTh3ubt66dGRzT+JyZASqPdB8j5YG0ac9lQjLizHYiZg9/cW22ImsoXp247Ae4paEsMGLmoAgYwdxPAsbgfubag+OgvhJRNqWkKtTyfRpNgIIOq1nAN+QgzZ2CFb8xgNbQQB+O9jW7/+Z/baXi3yXIXEMxjIT9Iz54kamLa1tK5Y0SWkoORkDKX1gBCRsCQhDtOUzpOgiBWGtUT1HW4ionmBACoqc3PKDIoBriEDFAzECwqYI0WRjCkI4CILCbIrRm1WEBDq6kNKAtR9NnNrY7ryBkac/Wri6cX8nx9/drh6/XS0iDLOk013udez4sdhwYoldzGQHMLF/bcTs5o3dNkoBKO+3o2VEfVF/cd9pMzIAfItLMRMrAEGd5cVQY3xuuv6D2OFTabBflm8BBd7B0I8azyzZgTDd4x/R0BMKVYZ0a+NyFWNoNZYEC1MVCiDYepEzWySGQFkfnR9tqovHqYTwui2IcGjtpEmM5YthgwYzrpSye2zrYr0n8KRkSMVQOnBDWzKM6oYYlBYK6LWt2RcibHWo82ANKA9Ie6MTY2O8wYFqcDIiRxKCOCN1Xm9DgGMBkDJBAoyJjUqBm8sL9z+8AORqsdLVc2vtRdO9V1kPzC4MaOnysz39vflkNf9j2r8YnEO4HMQlywq0XeK7yZchqJeGzRHZt2bcynBYw/LH7UZZBq5kaFitSKSx9UAQeL672xvJeRki0SW4oOz4bYze+Thcb5lAlqMUGJYoQAY1UCas4mJcQ6eUUmqqaU+JNqA6vFqiPRFM6iqmrkachE+NbjhzfW9qrx/o7Pu4tzC8IWKlNLFpoRHXGPJ/vTxGhW1Ww4uVWq8oR4zzV1E1nIi5BGZqkDAkuYwB/BgaiOvjiaUSFSwRFAY6xK1rKEChsiEGcs0aqOWzEbhSmRCmApM0ZKjiFGy8sX9m5ub8XHjR57cuft195cOsXUXXny8f3Nra1OP+tkPk5HB65b5pYQL7NpDZBEw3i0k/V6hUulB3YiQddO+ba0O6EyHoF7UIt8CgLnoyWFcOAblYuEGNz80oiIcRKYkmq/U+rvxOSyhRswFNMoRnYC6i6AGhEnP6001aaAubJqhTQRhE2dRgZYo0YiIwkugKOEqJYZ7VZvvP3czX0xnU73rvuiv5jVUzoBkk0gNCUGmZGkkgw6ouubeccKo8YiDd04K+tgHJqMx4dwji1SxzUVsiP/mCrw0QSgOlCKCseQLAx2CqgngwJKpkZgp1Y0wyIzTZZwGjtSphZBygDIgqycGm4cRkxeX5x/3l/b2TnTd3VDiyv1zu0NWnIu890wGe1lna4ch4dS76uBYc3uXtbpdNw9+UKP9/2kTvLwymHOJ0Rmnd2GSFxd7uytRiOrCrd+CZTAnicuHxIa/Y7Lard3QEKh6uCYNPTIVEzgWSKAdFrAqHsQQQZLRoBZ3fhkz0EE7BuvARrUBZijZvK1UW99p60lttH+debZ5nPMn2hJq9HIos6eoTVE4/EaidF8dF2bBlWrB2xh4rn2QjrIDmsujrwereKJ7aBpUikogtJRwkNVgHaFkhmRB2W2Oy5Xy9aVEzY1s2TgkhCxMCCw2H3m8BaHa5u9hbk6LmnMrg1PD3JZWT18K+B2t+fFa1Pv7eadPiNVFM7c6nR3q6a7vtctcgp36iURW2AGxDTl4h9+AhCVw6isnc06B1Pg0ZVTxkCtOJsZjNJWztRSm8pRyuVE83P7ovATbcnCdEZtN3tqJgUiUDUZISvGDBjMBI1EaqY5kZJFASLgg4vGikY0gvHyJVcPxAIBDsZZS5yUYBVHvLStqqCpJqPJeEYVyQCUDDAzDgEuVnXUQGXJVnc926TM62U3BcrjGV1bdlx1EWKeGQBLcSALlLyYma6aATB2CpAeXru6utbX41WANpqEEFsKO4I1Ecgf39rb96KjkszvXOuurXWK3uZy9/XX51eXSueK3nS8tV12epJMmqOzoDW+mp3drOh3POvdtFAtac/DwjjpGQ0oDQTLeFSYMcyufgwA1TEuDnZbitPWqrz/FRduGBlzmKGqU4XQcal7WzMNqzyBe5P2AaI5C23ZNFpzLTK5EAAk+qHqRhPrwbnabFo1Bjp5qLWGUjrok1yb4Sg0HFsRdTISVrYAg5G5UKlp0amYm8gWnewMytKmzREkHNQ07N3R0dcQOQCkjTpTJlKoWnSqpDA1NSgcK8QC+eml6ls4Rm4P6KSoYIjK8Wi1EgC/fnq8vbEZhqt9XXx849Zb7yye3XrtM1jav707mJufz1wnbxzv7RadrsAS31Fr77eYq2p6IN1BmgNtt88yfsfpnvuN/2wCGApWNqNsvABmq/zNca6EWEmxvJ32EFFrU+33adbvHrCRhAyspPeIPBjAHIFJ4Yy6+1MYB4PBorOYJBcDNJEsGjEZNezTHLEmUr9TqNX1ZDieNCZCAEkKfwFGUFTJKIkEoibFUoCjOBYZhHwCp7lDyaqen1Y5oSbACpoQaQUpWudJKVZwbKREZtAa4gygEFgCAeqkBtgdZ9gMpGJmRE048kmUkOqflYyUHdGxLAAzrJFO//z+jU0DDvcXlhfe2dio1wVdF+ut7WJ+od9zSw5WDbf3y05+vCGRkdks2NXs7rtOv5NxioDSIxX2zy7U/i8rDtlIBjsmxBSK3a3zCqDqyNqbRxlIemC20WThAATfAG2Uvqb8DhOFCEZsCHUWIZ0pJ2/ZIjGsAUgTHQlxUviKgE4zZjXEulEp9+mURK5Gk+noYGpHxlEyMWJsSVbMZhrXdOQkpycMCBAiIeezupsdNj7TGAEQlS7rxpqydOoTG2qjAkwJ0GoN52wAa20EIjRp24W6uiYyI4UkxViQGsgjEmAqIMcNxZnm7h06AQRijTSYe+bmAW0dDubz03udbOiMO4eNoR5udPvrXcuEvE3rTec7+ZFPRu2NklBw2Nv3xXxRJLla4OHHfjtKR8G08hCAlnXdiZCg9e3HawLVkU9lcfbI/OAtxRavR4JIkwEgaFu9kjbnFBwwI1ZF1SFD7zBI005XhaV5wwRTZQMHKNhZbIwQo2oIZKNlclmFPOfJYefmrcSynrb4mEp7ACCJ7hFZW4R2Yv7BEFUCXKfu2TBINwt1C2CmItepZb6tojKulHOKxGTECI2m6o0YAYOxZjBETeogajAytYwiDDEZvdbuyZr2gUiJC+9kh7WWgRrngkG+t9spMsyvjypezEajcbCq3uku7WwvLeTmECfjIfW62VGJkIHI2rwHUTPdl86gm7MpyO6jo0mzDMndASLqbQLQXCYFiTTAux8jI2pCtrRwu03wsD0QmMjW6e8IJKuzFuGWtV44tTrPqW+ZrKlyRlYegJXYYErRatDMAD/CyIHVW6zZEEPT5JRlvqjNVVVDK2U2HiJS4qeDInmmakm+lAykfOQGHiHayEAQdqcOJhP1lLM2ykYgkpzrBiXbLGpaG3trL0V1IE+AUR2SKaaiKQEPE4UmdCUQOUKDnsyGtthPmzHZH6+ho82JiMnmP7S/sbEd83pQkC6sbE2m41GlYZJnt66sLC10mLqdejra6nXznPUo+50YR5K3Hw+GUvR7RWbhgexShPdQiVrpAwAuDxZAQONuHHYM0Los127Naj3uGRE+cVme3wU0wGYjeSIcObM2CagEVUGE8hBsRBEWAyE6sEvgruOdhggssKgaQ8xj0e3VAYdT6uS2S8t11Sa4cCQk2uIDiQgQUqVZokZmEUoi9nCl7KpQENGGuCFTstztKWWIZCSmpJMoXmePEkNCBJq2HiV5s+R3RLXW4CGX3CRTmCOLBjiCkakaSbJr4knnmaRNpxNUYuzPP7a7sbEfl7zlF1a3b/leVe01jV+9eHOnO7fQzxwVJNnhru90HUHbcEuKD6kZiBAPD6SY6xUyq96+ozEsmaXvLRGk4hCAFvtK5lBl+5tPBIDGPV5/ZVYPTBruIDo9mZJkwLBwVaFe1bVD0gIebearEcHMGnK1EnGnmMAoMtSCIBobMyEywFmoVQkq3JhzUWFWwU+WBlWdHVqHOEa1vB80AWn4BFSOzGb82kQu7cUMMJGpKYgZ5p369Q2lIvfJVDdAO9lUuXMEZ9TaXDZTZERtVBiBrLZWLO7IvTbAkCQjPSf6QCLjI2hGCpofFcuRoxNL40RXMsXIK6eqS1/vZrTHc+tN7Xt1r2/S8aGaHmx05/vdTrf0salGB3k3c2wz97c9zC15vONx3ul0O8700XmjLOvsE5l1d5oMELXm+tMNgTXKamd69JB3fmkWgJl5+taZ283M1UMPOn65E3EsMoJxz6JVpanvjmfMCRZdJIltMsNAbJZHjdGgDcGi2air/eU46hRzAYGAOKFFOqwjnaxXR3swAkhq4qAEOkdktghVMzJecNIsZBtCPsBI2czYMh0b5yYpy0yh4cIfXXWqnBMACyAQqZoAiEIEJahGQzQQB02nAbmWDpWME184CJjKYvIOqE3iJKoUMLFICi3Fsv/u/ALV2wc9t6xlOaxI/WBfo072t8u5+UEBL3mvnu5p1i1cwldTuwXGlA5yYtPRdt7tlP497sB9dHVA6IiBNHOjjFU02tVARqSN7y1fmR39cveXUiBmNjGIF8cumkfjUyKLZ5xYdgQRNSirhEnOZN3dKCZKFMm0ISJygRSIJIgWM4uhMYuiAEVz+TyNpEDRhOiE1AjF4mg0noDiHZJrbWUSzzwlE41EpuQBtWiE4BzF7pmbNem0iQmdTLlNOfezYAtNa+THvdeYpCBPbe1x76KZujY4Q0IWnToDCCROODdjz0KEoKRIsTxxaZ+eLRxrZ0KaJ2mF6EE1hUz3e3XObuqW/aG4paIZDRupYs9z2I4LZZoDu8g7hZAlLOxRGCB1vQ0PqdMbFP5eXMn3aJaREiDZ4YKR1NFvDbsGinXXn7pyIst4cvjfc12b354AzjUOdwaDgaOy0vSHEDKg6O8BaM+yQJw0aM0EULVQZy6TQI3GEAxZZ06qet5FU3ghKDkK3FlpDg4OQlqXrYWrKVhvsxAhJ9oaZ9HBOYXYyJHTxi0dwkI64skob2rrHKtf1obsKBlMNaVkcAjqohKBOKoacaQIdX3AjEInAgQekKc8ap6RwdAooEYKMmEzPcEIdYRcSCVmKdanFoybd/38fN/RsJ5j6Kn6cG4yHId+r5+5ze0hz88577txOt0O3V7BoFmIiAyAgBqAQdPJtuvM9YQseVsPcuIsz0dkxOW+ErOEfG/j6cYIDWzNge7BbncH6qLdu/POmM1cBczs7qM6cUomWaoPoDDxDPSGIblcFi0QmzKDTcXApNyYMlxy66IVPY6T0hEbOQYsizD2EW6+PzepQx008dJpigAfnwoJuGNkpDVZZLZizsE8NYW3OA1kpgS2bNrY0ZCDGqP8KO4Ra0t1YRpMQdAokgh6CKYWKoMawkAijHXKDRSYHABRiQfWAgSVW1K89hbHaTwDYMoGgC1+C4h0PLrVm+t3stAT6p0aXd3tNJU/2H9m0F1+/PbOBrp9z2UeYLvIerm0a0wZJkZQExgYTZzuFp350gNKD+SiMtcdg6wptqM3kgb1tWeVmZsmW+nt32n7tQIyJ+y/WZPBNqD5sM5nMhc2M9SPSowIAFsVyajwtVNSGCE2JAmlF8kQTZJ4g3fsAwiEvrNJ6DsSR2xgI4khWgNttKMxVJOmappgigTPOJEgiKBUBmkEc25lPhQOiIKaJFTKibbI56PA+fF71ib5DJFBIaj3yaRVUjIEZmtfzpS2bjuYUViCAphsqcljjO0NB5JykCFqikDcqfR5B2TS0DrZVCGAHLmoB/tZv7dQZLST9R7PNrNOYDMpop467XZublq39POQOK13KOsUnN6a6ASIL7IA9XivKPudTOKDZ0BBAGLGVWYgrXG1IVMXmqy7uuvuOgPoPX5k27pZTcaZJgUJI5t2jnzfo6icESTWXs3NTcDRQCALgZAl/kiQIoU1m5B5AsiiOYSqdGLINKElSViRIYYmGAUnWlWTUIcEelWCzsqTATNiRFWhSP67f0v+tgMInqYBEXCRRYyykXH36KV0rPlRLhBRKSMyoBXKZSUjDiowUzIqNAJEPhUoK8gkKvmcsqIrINMAEDFr654gPRSOdkg+jpsHCyaelIhJ93Z3ljMOu3vlnNU9Gg7NpJl0an/+dLV1ayMfiHkptR7vuizrAFAnJKZi3CbBDOwxrfZ9p9fN+UT1wd2NOhLUSORwXpkt5huTuakiVqVfe2O2xx8Rz8y8pRPODAHQojsVWDHSFJ4CdxJ7XIoJChQE4QjYpGRYp5yCxRSKGtIG8ygKKWBKEpuaCRSsrruYUoeSG8umII5sppwJCFGjiJGLVR3BGtQsc830hF2kIHJg5uqn3/ytn3MAWeOynXzpViRPYpq5Eclx+jfWcDJz1yg0nLJEWhkDKeqPWa+o74SgCvFqZDFqhCcYl5nkuTNEZSNprVLccwROrMwaZuSlzSugboj3h53DnO1w7gyNgqvfXlwQr9ljF8a39qup9DOVMlbT0V428N6pcioBSYGA5COaVtNt3+8O8oRcuEezwjcArB8jiCTy4a0FhcqkL2t5PNqujmftvR4ebmEHZMJ1dsQrQCec4FkszIibplDznemMWTaGDCBJOjJEAHuFIFYCgIJkWg1I+JiggsgUngDNZAhP5hqpvHmq6qY2g1vQMZppJFArs+xIxBW8+Yvf6VJpaTYfZXU7jXJHRpQflUJSCBB/NAEmoXUJmkhGCoslqUY4gyoReyYNgV1UUjIO7J0Ze+dzZ5FYkcGIqPJ8LA180iTT46igeX5CVSS0VnSI4oc3euVin3lvulBMSZqrNwenVroWek+9Gm4edrOlUojzWFfX/Nx8bkYcyRC5xS1biv1S2N4quv1u7u6pOmw+GxJI531kMAULlz9QalQEnu/fzvjEOr+fLWkEHZRTwgTGbVLyTkeUxCJFI60cVwWA0kVLefpGAcBZcpotqjcLMLVI0aI6NywKYRLVpGJhMGUW1CpFE513GjLzJXHjXMiMLIalxUk1rNixACxgx8K+X9Zh7GBmZMhD7LuNyABhEpCJti9IoUZHEkLfDDXSDmAhYSGiuVmYG0QgBwQQnJmRGQk7MRLnhdRYUiyMYO7OzjjxK9GRnd3nyOJaq93PQ6P4+mDa8THPJigZfGa0t7XRX1hbDoUL5XNn3t7ey7udiG4xzQ8PirJfqOKIqXcWhzEisbi/K525fi44GTtrfwx2CdaSBojWeOvzS0/M56S1fSA/GGs6p+yuL518IzZo1h2LlXF0fPE7fchEzigW/LjjDJ18SAokkC2IYaamxBobc+TAZBpNgxZN7Hk5ujWrGQkJoiLn6NjEOMbkbRLMjJrQLHVGUrF4Z0ZC5LxnlxfPfWrepSQKsUcoVzYimxy8JJT7gJbeoVLLKRGGGtBAkoMYzVTBbP5oIXCrDwkSUTOKSkIZGYHA5NCG6YgTe+ax43SS4JeZjMwcQiwQARYjMkIjj5UBc08PO7wrNHXsPBDn+/sHw73r/cXzQh0sLU23b27tFF1vK16b+mC3KDslqc1CsJawmUamDNb9/azT7xdOjwKUyQW1MnEtRIHBhZhfvdbJlx5/+vwgPvXYzvb27l5jd/Ic3zmN2zKQ+V2DZ659Wv0zGMgxMMgAWAbEaQ/G/VFbNxgiTNgcNJqpQadehIUbhcIsr4pcZieJEqkpMbEa+ayhwlLGnQSxiIUyAtmUqENWEAqfgouuYOHHv/eFhcbNmOxMqO6u3Q4Cnva1mzoCACrjbLZ/ktXGOchg0RhEiswFUysoigEaoxlzyCWkRSfsuYnkRIQAVhjI1GB3MrqrHsUwBaTvjuLicj9nxBrMBoPrF+UBehhke8NOmefTZkrz0LGiz4gY7l5Z71CgIKfPTLavb1o/h3N5t6n2d4teN0GW0iHcMh5wSprF/QPpDvqFHFnKyoB6VxGbGUGJ2BS5bO5c/LdLFx4/s7a8HKb7e1u7o4ruR1ORshPWz6Yw9rXgjtBDy8poAJGKGZlOu2TaKcet9m5khhkLjCMUQjX5jMlFhgUy6ziA22SfGXgGB2GnLKqA5SauCpKxqmWAU/YOkcWxAsQgN7/8oe9eiUN27dIIROzrcv12IM1cyGEzUfcJ+IQTMA3eAeAQNKUCeqxGLifVJmmDw1rrFjESwbEZcYtTbMFoR7vy0ZIxNQOYtdpW2M0au9fmzo60ISM2oJifL3b3u+wmN7KBA6KTjEFmmNbWHdM8DyOy3bdWuwj+zJnh5uaYYulJcm1GW1tZP/M+4mQFSXprgkAPh1TOd0o3Y7dhQ1ZOmWJi92FScJzmdWeyceuL3dXHLpxbOnO2rodbt/dGqnzPvLCCjIr+mI3y2iiVCMwQoYlfTBP2SwmgSV2Yue6k9YippZRFi1SSoLWV6QyZiuUZCxBTbXsESISsNrAjdoCRAyAKJkfmoGbOcpb+lD1bBCijpR96Zq4rI8kbp0QMS1SzWciXN9VppVc6HUYqzKqQFe1wMdWBGSCSuo7OYKKaKH4UUDXlpO8nKQNETGBGa/MTMUUOZKKpwkaAGe0nEcRsvHewFxcJTMy6tbMnaiRCYHe4H7rzbETzjZnCRJiMCS431U6jZR/RHbxULJ5eKSIVT2VXwzuuu5gXkXNMxpvcGXiJpDOKWWs95HQq6cGQyvlOz0eDAGTc2QdIY5MRlBulGDqjoCxWXb7Mc6cff3KtXF15ZrS9tbs/NeJ0YtMRJaCRsanNb8HMS3Tp6EsWkClEjUgsBfbJjHiaE1vvYAqTkNjlyMwFA0OkUlYNTUauIoqgUoRiMlIIShTYaSSAWNkHNnMRQiGKIxOA1EQ8uICm9U4+fvI3NFqz08gubU8gqJpDU5zaAiJt7C70+4PSIYaJ8QmfMFDhIxmakOJWekRF0zotKepJTSRK4klHaAAiahMGs1VIR/8CxP3d3YNgkgGk6sxAASF6ABRkOnKNK9P2CGph5uw5L8iqpomxcWRuYXL9evfUqeXcPNHaC1u3d2W+JOuUsd7Z7uR95yWmWPCJOgmCEXMcDrnTm2tRpegoEWBqnSoQR6Y+LHAgsCPs7rzcWbnw1Nmlxbkz1XRn8/CgiSyzHH+rX0GAdrMpm7nKt7FgTLP08+j/yhFmoGknA3ynAhlC7lKeJCFZxWcTIsSGPAExeJc5RzTjdCZwguIyyMWEuiQmiQ2LKLlkEYgQoagNnoyN+s+P1YtFARygbVZczAS1LNfgiMnYsrzXG8xZY05miRXUAVk6tGOb3GaA0ieOzTpz0URDVJA5iTYLghIsppRWk/rLYBHkGkBfqxuAE2ugaUymTDCIM5g98ZqN3vU9Avs2jBqZlC2IywsNNfu6FppfPhiN3ro4v3yWyZe9c/XutY093+2aZmUvm2xy0S9mznbaqNvfJJiwjUcbRX/QzUFWkCmRejJmY41UBzYJqaiKWJprV77YX3/ssfW5weKF5nB3b2cc0qiTpAqQxqIreiMG0hkAgEyVk9o8KUCiKgYzI43TPBJ1D9FMaWldLOkqKCkU7GslBCISjo360rGwG0ZO+03KRKZgl0WN5GAeiOTZiIg1ErFzRhQFLC4SNU8/1gQqKgA49vcBJnMWRMDBSEjizg45Z7Aqc6QKA1UxeYHUVGmjYiFFiosl/isAYEc2Q31n1AITjw59A2nDCfMIBqrtXSBWkLQfAlAGGeBBgGSAHJx54ZVQbzylCLUwk0EIxtw0BhIpSFVqYNLtl/kw7G5dGvScTYHltXrz1uYt33WLZFlsJkNX9HKnd5eUW6LoYBuPbhWLvV5e9vcYSLVSBgNCnQdH7SluYBHa3/56ufT0MxdOFWjq6f6trd1xYG7JwEjMW5i7DTLvQ96CCTttL1h7UwBEYpAw6pOhkxtTZ61oQ4uUWDchonGWhQtU5p6ViIiJYGwgSUpsaUNzTGbEjQozBWaDg1HmApG0COrB8vd0Ak0pa+zEBIAJgdRRIxpbOncBYvC898WiO98vfaZSmWQEAHVMAxh9Ev9KFWht7F2ZVGeWKSgVR5Ee+S6g0htAXsPwYPdw7NfaQgkiR8Z2oH0yVvbaOGMF7LXzT1wKrCSsoEAZFBSdC23q33sXDiPMxpQ3Ji6MS5LNbkcarK3Vmxu3rENeQJ4m40Mp+q4TAx+77kaskaBCznR6i/PeIBVsGwKbQsmKvG6KVsXUCJFMJUe4fvULi+eeeWqtWw7OVKOdjc1hFZNtQ4ChV9ZMwbT1GS0KJahHovFJd1ZStmbSIaZ+5fuFGXMC8bKxGpk5RTQ0zgkqyYkpCiQSE6CR1TgYGxknu8rIuRjYiOAAApMRRRAxQ/kjS+Uz63Hq3WTqKIKOJkAKiTEcVcyBXFtQRDDTUO3ecHl30O+NwB0yA6JpBCnAMI2TOUlgonaAU05PjYwsaiu6Ry1tT4p2mCHu7O4MgzkSVWnMiLUeY95QHO66wpHzVRBlqJm+OyiNVbh0RJbkXmMUUaiBNSixROkt7VexY1E6DM3efXtt7VSBSKfPTW5fDjvNYqna7YZ6NPT9jsw0DtLLxxRTM5gQ7W/19woHM3DjSTSGLGbFSAmiUIhpqvmDscPO5lc6q08+fX65KObOT0c7W9vDxlKGzDqLtxSNRJ2l5GbhW2qjmymSg8NMhz019PYaLnS/6otZUrMEp9LXqILaM8a+IKXWeiVTS0rVybm0RBRDzA3BJeowJcCJC0zRMp370KefQsX1FB5jl9OJCQCKDiDA2QHnw1iyJVSCQknMdDy8yZ4Rxh32ZgZnKoam7kIxGZAlsjIApJSRkqoCZB42i+y1+Fxtg4q8eTXkzIgty7lNxlWQPhS+rweRXNmdEsNBieCG6IuKk0qQOyRYLomaparbxjFJfmZuOJpmwzp0BMimFy8trK4tZVpn67dpf2skc33vmMtQ7e9lZTdzsc0SKNIuaqnaLVQDrgsD02hy2pgRNZpwlaWeT9nG9B6RhVFfuvSL8+effGy9HAzWdLKze3t3HFkIPL+tWsheXcwC3IoEQbfjCCIInjCpsihZbzuMd4cfliatFWMiI65NlNVZYKCbQpkz8EQrSiQwJo3GIDbiqEYMJUEwYnIuRM7XTj1VrMzvbs5V5F0DaRpxSkcxWbEE8AVlWQO3Ecs8zxiIkgDfRAxEzeIrWbc/X4wqFoC5KswouqR4ZNKokhI8DEgPJC0OUNkcxVmFdGREDcqUjnujeDiq1XV82WMEUupT04wyCoB4qihrck+qRPUoQ+3KQmIkMjiDBTgigptiyr1+GA73qqr23mSxiMPdd+bX1hYE2bT82HMXb9ziXteDfLeqdjfzuW6GGMER5NstH9Y46nfITUFGIvun2IwVNg35OEv5v/b0a+PLASZesbPx5XL97LOPLWe97pn3jfY2N/erQGVescVsam0AkNq0eeuAiBpYDS6ajTMBOnvN6BAQKEvCz4JFycyFKTlT5dxgRpHY2moro2iOTJkaI7BBOCrYkTIYAoiQLj45v/j4auBs3x92srFk08h57djIhcSYE2lWfW1W+pCdGY/3FS4vumw004qBCMGarRtiFHtGAKIL1qZwiCy2/AAemAUSZwhqm3E2HGXEQDAFpcQqhrpQdAqnExXkHa0nzgcvSlbODX7wW17OVnwFGNDkFOtDdJZyVrAnEDkmQmOQKsJkfm55OjwcR1EMtByGnZ0359fPFKOs6nzL+w9uXNngXuHUF9k+7+wVnfL/z9Z/BFmSbulh4HfO+X93vyrihkqdWTJLPy1ad0MDBAgQJACCK2ywHhszLmbBJW02sxuzGRsb4xqjOKQZjTNDUIDoRqPRr183+unSlVqHjrjCxf+fc2bh90ZmNRBW9qpeWGSGX/ffj/wEM5lF1iQrhT0nsBdLI3IbWjO24MssEmlu63H+eo99UT+5B7Hu7hf/evP1d968OpTR6JotDvaPD0fzSFSmFPsBz2or8bWRsAPmxaB0diurWRg1ShQcALlzUIhQZpFMICV2zyRYiW7QarcBCjBHcOtrQwTq290+Prz/va1BzClbLAbWbJZNKUqBzD2FldMuQ0E9EsO4kCxDQlcvFu3Au0WMArhB3M2ZPXgbAhHgqaedjlafR9FPqEgdK4k6XoOw4OZgN+onSaJEbmJZm9lkD7g1KnOan870Q/FFuzUapHkYeAbKv/UPP/pnP91aVHAj4bnRstFBU/veZcOsGBJBzahgZ0uFw1BVm93hE+HOOFYpBj05+Go7xJxNJx++f/zsybM4GnDYKDm1pydhXBWCnFT6F1RhPq8qNgE8xmZTDcjRwGUvwU0C6MtFMsENThmABD/96Z8Pr755+/p4EKebby3m9371/HxBTEGp/+hw5R4Wum4NPSEMRr2YAg8XiFlZjKxvQjzDODYdxDQVLuaZAsgoOJzQr2QFTqaAOIORHMxsznAHBy6+9Z2YSu3qUVNXC0I9qE08c6kODkAWoouCeJ0PclIbbG2j7rw9hktVxhgAJzdS6GqUT+YBvdAytFfC5ot706v7MFRf3qqLnaz11cVi0aKY7MCpXLyYzZccpsH8Wv0sbU0H6ZmT2/DDm1/+9//7MU4w7Eakp0vNm3t50ZTnO0KnNI1RACI1WlHPYUC5/cJ5XNQ5eIfhOC8XG/H87q2BJ2xdev/4yeOzYTWEV2XOy2MuhiVM2Ne5FeYsSRzAxvwyCDD446fbG6PIqwBGF58JgIPWpBdh13t3/nDz2juvX9ukyWj3+4tn9x89OMhgJuvHdxe3uWdsqVRjMW5nU4FPTmtwBvU/627OahSKJgXJmimYUR/4/GUvuwZX9k1FL6vUMwKYnMqP3mplGAJn9S6On+kkjJclvItiRQq5V1Dh9XaqD3GBkukLGwyZc3WrW+a0dCnCcKIeoFEvEOgU4J7bobn2CCQFe18DqNuK9rcKdgyzXkMiIzpocZp4fGU0iMmgd04Qqzdq/w6d0d5rD//V+XG5MxgVzsuPv/df/flGLqv2vCGI5/GITnUySJkcQ3RZS4KIGjv5RcIRybx7/WDRtc2yHQyHBtjPvrh8c6f0RJcvfbj/7HwJHgq4zDmdWTUMKr1eF4AtQkwDADo4boQodJmG3WJjGaKwXLibv/q/osar0xfIT45+VV15891bO9HC9MN0/Oje3YO5IcBb7bWXnMjc3NWrcXRpD553H44MYVwz194fZjFTlkzuUlJnpKLqXWDpEfu0wm2KUD9bZ5CBehCaQxTsTvH2O3U3IGWmzJbj1lEsiqIj9m7gFvpcY/04dg147EcARCeHGO0AVI5ZU9fV5zTVmVVaqbkweiaBORABJmNfgTw4WN+gMJzdbY1M5XU/0BNDU3V1o0B9ei5vE/G1S5d2tv70caHsnW5FefvwaR4nAA+7fYOdsufEVmyMbY6NUcpx4MTcBTfKIsxKnOZjW53iGDrpRmVe1O1slpooZAHz2Z3plZtbUbPfvPzz5ujhcLpZsVNRpXTi5biAEWDEWJRFYwA05nZkEFYebMqIDdk5EuPV4Ra58yuqBUTmHLS7f+f3t15/942rE+dLe9/p9u/efXzQgI3IGMYgKNykGhD72aMT08MROY1OgQQy6vcFSkSeyVqvGtecuixZfDW/X50kQd+TWKT+b+3lwljcGPzh1sEuOCAL51K7wCMkdidzzUElLMbsYDD8a2IDEHh5ZdzMmA5mcVyEsjJqLUlanuXjUMVh38YKK2AR0NUmiA0IbG4gJ3DgV4R9Vkuh1T7QLkt7UtfLVq8Swm9dDu7IXV86MPJr39r/xd3OUD38Yqydm5K1luIghZ1hSEVZscOJmcXI1N0Nvly7tDmXS24z4tZmMz0/qXOKXHZS0enRF9NbVzdYOVjxzq07L3yyIcbVIOf6RSyHY7HMyHMuXcVJPS5HBJCxj+aLKbzytKCCiXqE+UUov1Diwsp4zJlxtP/ngytvv3dzu1C+9dpfWTz+4v6jkyyRtNcEVFSD4FI/fZpCpJMbQW04niOteUEe1BgpF6QZRTJNpL2byL/DezcEYvTyW70cpRPgfHWaUzGwJM7cEDWh2i9icFVW6bpCwpxHbkzGBOjKyK/vVMRo+I1fnBGNlsckoSgKYdjWtOmWy+PdjX5hTADIGHAjs5UeHfecIZA5yWoLKuRCYMoMliJEBx2eWqvl6Na1SQPelARI7hxGcIpdRzfnX4Jgi5++XZTuXhVuwl7FfDwqWBdDhsVCQ4CwqzJAna2EYIxLp06DOwaDza3FclErkUo1sub42fDSjcub49MyfPD+7NHDAxqN2biQjvPp6WAoTLRLJCmqIw5mu8ZC6jSuj9uP6IUWrSFZCP3KdrWRWb88q4bJwW4gEUr37vz+9NZb71wbmpbvvp+PH31172COyJzNiorA3dMnDVeg0J3tiNN4yZ0rw6EcLGZTzUyhawKH7GZdlPhSe2z1293FGc5ZrNfA7Z38RAfVcoKu8iaaajjdyobJi2Hp7XKalOd0LeQzG+GlY8crE3sh3a8X0t74/vHs/PRkPmffGymq0tHNVqqZnfh6uYqLisicEzncnMzIYSuMmwOWPERYWmAbfLYoBm/e3hrE+V2CgshILEWHgysFMgTwUH7xt1+/XyKOli0R88LLomuaJQ0+2iAyxAhjo8IUaLqiRzHAS1DOsa85J2NtHh9RbLLFotpoF0+eji5XYt35cPzRBydPnuz7cBLyuGLtmhmXg6IMKJKRgzafGwicLAxKNL/1/qdfPlqGKmdrGaGXAQO9ijRYZ/i+CXNw9KMXfz688uY7r22z+talH9T79+8+OlgkmjCTHj4+lyEARD/fEreqqFMWUK+6RtQLCBZeL2MF5OwoBKtOtD995KvN6iuPoY9OxFTPi2U9Gu02oW22l0303WF1NAjCs2FSPaKQuPYBnNeyDavyXszBjjlIh5PKOTWLe5+EgPS8iMUgXlIzY3ItGK4Oo+zCSyd37w1onKgSYCBKpRMxmFkNkWxZN10eO/mtNx+cvHuzzZrQ62UQGGBmOEXP7owE4er5r65/NQppUSeA67Mwp2rQFaU7ZHnAYTpgHgT3ZaKuiQ5ASbgEWxqsD3OY1sccawecilhZ19wZ7oZ2uZ07TPc+Onr0Yj+MRkGtrLxulxaGE1IA0CHakokpcRzOv/lB+Pav28//8HkMagmdR2HvHzdWzNT1KeiX5P1x5BLd3a/+YOf1N966PkRLN2/91uzp3S8O5h2aZ0eyzeQdOdAkMgqjeZsLh5C6G3MmuDqCoCOylDzGFfllBQoH3Blixm4s6nB2mLE7w+t2jrZZSLykVHfTRbXYCsXibM+powJ63gVvuaHRK+kM/TwZThRyZkoD7UA03JrjNe/Gt0/O5yYy2uzRKuIOXyHFfTgUKAcP7jJdoRXYchGhBjOShNlCLQESCX759jPtkjO9rD3WAHSOBELlBNCwW54XJXMuoxNtgU2T5+k0iFu52aVjFIPNSUVtS3Y25D4oehRD+zKe2YCxcn4zF4mDSXKhP392cxq1od3L+eDh6ZJDELWy9K6bnXZb2wryIPMKQcmc4+ivlS3s+n9S/l+kylxh3lAbV0gAvDLc8b4+7kNBXxw6B+wf/On48jtv39qWlEcfvv83zh7e++lPzgZjqFvBMPfZLnnYPK3bHoGhLiCQIAuZxK4uyJsRItZhDqCLc0fEsAtdZVoNgczbOYVrj7/ySVgMBo0NZntDSq0o5yhVdSdQorLm0UvuVx9fQCBWoTqPxi4ATM80Ntj+Vrs4PX7xpKW+FJae1dmXAk7q2WHVAHqaATeN77kfL9gBUiXYySJKUbCWALlaZnEwr+eqHMUBN+ISmShkAvBbX22VdwasTQkID5QNWVVPaLyRIJtEXDezcmdqgGoz9l4CIcYW6eIAwINo6aphTZcLTCTzn362dfPaRrSOLl179is/PNzcHosijtr2Id9UIkM52+tl4QI+/LAj6Fze2P6Yfyc/jlIUmjNQhD599npQK2lIcvRYqIuVoxDqO18U26+/99alUrNufue7f+PxV5/ef9ESCIqMOgXXMKiXGwCcydhJrUqawKGFNfBs/dZGSVbiWD2a0SG6jgggI6YVXzk1m082t88fXN08E+lmG0UVmuJkU7rCOy+7EH3pgzOfXFAYjQDpjw8Rl1rsTzfVQD43NioRtrdvP//xvC+2rbQ1oBPA/oyzE+Ure94974fLYweWS+GeQwoMqwixrr8rbBeK/76OAAonNWJTWFU/ewN+9vf+f4+3Brkr40DhseDMwd08z/dE973sbm3TYvnsdBzIbVGt5DlF4N1LkgGKYhE4r7XFQICblF375On40o2rY9J2EHzwRvn4xWQS2WOcjgElS+VCGRAzH/1OUQvl7JffuDf5+90fPEbBHJBzm4oLfXp3gNdNor8yMnIHnIX98OAn4+s337u1xTlX7374d46efvH5w5M2cGHWBnKfPj69rmRErJnZNBQpuYiAmgzvqG/vCHQxknK6UNsgEnViZ3YiRmY62918fiXycnd7fnVsjbYlt14XBjc6OAkj0aUM5jLKK28TuNNqa8lAId3xp+9uOpHWkYkGZO7erFy73MjdfU3uy70Og0c2L51SAkUYSiYhFttg8DC5mrqlvnP4GiwD8N5E3hxiGTb+O18kkh8Nhvc2tSkqMmUJzMIEQs4worxc+p3BZMw3rXPKXVhMSZ3IuJxT8/Jvd6nmgVccAHIwzB0sJtzefbBx5fqlIY/Ows0fHj98dJRG40Cjj9LzAEdl9ZDAbt3Oay0RYbkxfPOP/+al8++9+ezOKQvFoFprGWhNPwGROZmsSvFXPl8fIoRnn/zqf9154523rkRvdOvy95oX9z/78skC6EbkXhWP3iFbs+Q4EUfvVD2YNZbLQXxF2YjWwzta7eLdpIegX2Dd/UhSXdGDeFNnRWy8KzePSiss86C58ssQwqReWjXnYj3BJekhLauLDnb+6bubDu3Gkr0wJqcu90vUHunQV0sOrdjUnVgcFJ2goCjJiblXWHS1LpO6uYPhciEQoBcSrgZhcpKCHfBv3fwSHv4XHjQyjG5mDHFEB9ClIrLxQMm65mzMy6tlJj6x4Z3RpcIUoQTV+YKM4VwZSwJceTW5NSJawItqTMvPvtq6eqOw4vS0ev/92dP7Jz6Uk10N5i5ej5wNebA83egMpN3wzevf1nBUvzb5N/u0iRAKXQ46C4HXe1525z5Bc7+yBajHzDDMDRTsxdMfjW6++86NTc5K11/7vdOnX3x652Q5DMabz053skPZkztbciu8VWXmrFkGq4p/jSfAGoLRoyTWWyLOJEqB3M9jTLaz9XxyvtibaairpHf3tGypCINxqJAZNfN8VGJ1nauV9eqzCPn55+9taddWGaHASh6GALj22LX4Eu3p6D+wdX1x2kuUGYRSvwrXi8D4MkHjoo8kXwU2VgXQxpDBM4ksvK6vCCDWNmzAM1EMkFJ9oYfP9665hfrs+EfXbl6fGEVQznH1lpBT6Vy07vpSvRNSWFvlWkbR0vnxF9uVNyotjd57+/zp44NuzE5uMljs9hTB5umbHUBm+bWtGuXO3SPsHj0a/YM/OWAZBHbriHrIooO+psy3Zqat4o8SObFg+cmn5d6b7765W1qj5e33/87xvTuPn811cPpwZ30vzV1jnaLBlTjlPChW8hsXH4MAQcbq1hHgFNSNAOeoSjZHt+NXD+ud+5tNW3WdjR9Piky5q9oc6gIYxzmFjorVttKc+qbMiCgYi59/9t7ucTtWxADAKPeEfvKADAJIV9YQvclYAcseLAOrgwL4ai6gcCJ2EmfvMbZfSwEBCriC2LID8qINVAxIeiGZrJGYBd7mwaDX/4oMQEBN6J5fikSTVMmjpxuX37hcGbdtaT2Fn9UHgkipCJ7XNScQor2utc7DYLiRl3VJ+PKNScgNbWy/e1qm4ATC4MTJnbPiwW8EA/nD93d2H7+eth/lYjzZ/J3fix+fnogzJObcEkvoCcEr1GvfE3xtSOBwhhEoIj999KPx9Xfeub5BVmNr5wfLJ4/uPG4+ubarSuBVLR6ThxZu8JYHL1GtBCcoi7oT6Uq5zskYTmYMF7gpawrzPG4zPqiebCzZwHF4PC2jL2SUw7Juo8RqXsrCh6/mlNVEixxCfP7xN7tcKEIBgLxDzwh0ApHmwpzcSbP12gsBTqHfVDBArnDBykGDACZH5GxYiY28DGVgrBTWCyggZ7+6RC5UupJbL6Qn0CbHjbCaKodeyiYzMTTSs/sflUWOyy/uXdqNqh3revvgVZmKFeJhVeK4+k6+yed1WjZdUVVGLl98eunGjRGsw7anakkgLbUtQYQcns3HhrD/r9+cvvEZdHy1XgzDW78xv7oMB/f3TUCRoZqoEPY13Mz74I+vT2su8HPgiPPjjweXbr/zxm6hrfHb7/7O/NEvW267KMYpSuysaJRBnoBud+BrOvaFyMB6uu7wVfLxfhLEZmJWsMfWy3D2WbVM3dMoZbV3J9RXOyxdAlnhnpmXkRoa9BNBwlpeUXsfSo/Lj2MoOsQCAGclgivLxeYXMCfPqjB2MzOSfjHHRNY7TJsxO1ZWBryiyfP61K4OQM82coWIucuDs8vEZtpjCQI7iJvEG5EzgWHiJO6KVgXUpYH4l/TNkEQqfXp2BZ6i2UqfgWLsAps5O1wZTmRCmx9t+uJ8tqxrXXIQQlzefTC5fPPyCMnjZM7EWnJdgplczg+nivCLXx5O3/43x7t//Efvb79254PtVDU6fW+2v9gSYS/sOHLHcZWiHeTrsX2/Nnftl/gXYp6EIN3du3+wdfPdN69PvKsx+sZ36vPDg7N5Le5e1qmITWQDWHVT3JhX1iCA+sodxFzUyFmSBQuJQoLDvSuDsWRQc0QjXrTzgu4dvT/b3Bid1y6UOUDIDVnQVaFDkF5SGt4zj3r6igv7LJdlQ1EMoJTA7IAlkdZ7P3cnqILUlSiqoaDkDid3y849Ncp6BjGBdM2VWjfJfVvITD0Mzgt0FmePXGPuKTWBhE2JPI2qHlvR4w9ATp0zKGk2i+XpJ9unCkKESbxzfmMseXWKgwUmhrLLEW+SA1SkKvj2NJ2fLRdtl4WBnSbNTx5Mrt64VMjkOQCncr7lIOrK/Oi2huc/Xdx963pxsP0Hd7vystTmG9PD2jdPn538p6NfLhUWipyURV6VIKS1exaBYeR9LQiA3dyVAtvhi58Mrrz13hvTmBvj6fbt9ujw8Nl5PShrL+cOBVh5vFKksYtXj/pVrJk7iTupRidzs2CprwzZQbbwLGLnXo0+GV4+Hh7F42tnqGJgwHOyUCxml4PacAVt6Lcq1DuckZN4WZpaRQCoTcTsMNOin3L1xZsZ9zsKJncmN88g8wvVqr4YJoD/Ii5/XSsRI4N1vV/7/IgMXAiBhWGuiRD3egd2dicDApMtUSUo6XLbIgbPFmWHDMkaZPnJnb0blyKZAzw4CtLT146eFcWA4E42HytQXNppZqfLpjEwhm2EnR9/Pr36+jAYgbxagABJ5g8blz/dpy/+0vbVJ/mO1X7Hnp1PaOtYaOtgvPW3UjNbHE8ixZBhKo6iL+XopTiAryiyq0NPK8wsOyDS3vni97dff+edK2NLHeTqNZ0fPtqfz/JEXFo4dBLXIvsv75wTwxRGRJxBK3EwdzYntlI9Ziciy/VWTajr88/KcrTYg9PDWzvB2VQzS2rSs70B2oKdnNiJzbgv8eCAUKIz7dWiOCmgLsSIAFYuEAztedh+QTUzcSFXh7o4gbmXCRWCi/QEYfK+fQaJk7t4cidzFjE+/CIHB5MREUEtL6cOoczusmKNgrmuJZrk1MIBYYTHN8hArim4hPzs6fTK1U1xlZFwqNWCcRyUvRcAo+HelLIa7i7nx/tUNOIcaZi604OPdyYGwKrTthC4p/BivvXizwhfne+++y9+Zj5j8OzxR/nK4ZmMi/J3N8/G9e7O/GkXLJCbmroI+yrxrb+cBD24fNW2YQWmcwMVfvDsT6bX3r19czOkzmkyfbs+e3J3fnRWOGJHG71vWR8wYUQKsPQRgXupbiIXU/G1InQERIUgVHeuxufFi08vl3J4uw3lLARy1cwBdcbCrhSmgzUCdP0sV+c2ZIrek1Y0r3AX9jWV0BWVoDeSB9x7iIGu2LHMvX0JrWW1VoS5df4nUA9LIAcV0OYXbZUNQCB31aQyLmz1SfuA4nCfL4alcm5TtMZMOFluCgQ3TwMntcgnJ/d2rl4eUEEo5zmL2GSY1+uH2RqmqTTZ3JnVoRouukTlxD2fH4XCyFHSsiAiWDh/tvfjg8qfP917+795xG2rXqQn3/Dh5RMZja5+rxvbTy6PRnGRYsgU2LMmjsJrgdQLMOxFPLwot9ffMaGI87NPy9233nvjcmFdcrl05dvz/bsPHx97Fu7zivqF5a1jxTTzVU7twa0qSdldXMOFX4i32VCw2f5Ed8/mW7qxXQXKmlU4J0dYPL9UoR3CzUC6Op2razZiBYzJYd0KeKXKxiuMs2K1G4UT+UqS0SFQA4jFGdLvSJmceaWDQs5fE3NmgvBK9PbTZyHDzIjNUkth1IvVkbCLOwjszpSGtIBlip5JtIiLDsu2JDgU7qxKAfriYLR3Y0BeoA+NXDiJu5s0iXyt+6nVxgLxynLW5NygnGzWYIa6lMutDBdDd3D0Z+I8+/KjS7eeJGpTofSwKfK40jCtS5fX0+EIZbF/tn0zdxC2owIdhYL+Qivo4rrqE18N5atHCyZ9+uSPNl975/aNEefOw3jjnXT88M5nh7ktwWTIFHz1fMiB3iIWTp4phmzkWkoOnEQAIknuQaEdiBaIXNfbh2F/pzyKNwOZZhl0Syc4zTcqteWAWNeadithcwecy9P0YLSXmLoe3u8e2R0+ZMAhF5swWh9NAnq+4Gpf1CtgrI3tff0zr3wx9ZKmjrA4XC+8UlavKiGYARQT+v2hwR2ic6ZA6s6xTsBkfxmDLZui5OQE7lebTPOzh1uQKCs4n1MfP6jpORurl3Jokm/y6el5k9u2Hg68AwEYLB1EpNkfnx0M3HEnjd/4s46RReTo6GaajE59evfe95vr/5E2xUY9Pm3/8fN/qQmWQ2WqNSReRABavxxrtPR6gryKiz3nGtFPj386uHr79pvTyrtEtHfth4tHdx8+XzQS3S766osKigHuraM5SZ8pJPWnAs7kMI7ZTYM7PZhcOpkvr4RPh8FMKXrrbAbd3UgEt4GEFYDrQmzfQcCkmX/y/uWcOgdIHD4htlU7I7DMvWVc70tp6kQubkAP3kIvGy6rRAUD/TtGnAyFc6eI0mYYpUxEDRWluDtpdoBF+pMWtBurh4oteSxz3XqKGC4xHefc1a3wLhsyCxE5U8DRhgj14jnamwk6UVoO7WId7UMiW1wf7i1Pz5dNTqc9ZNIGp10Ek+b45axw5PDg5MrtcWvWlvD6yU0NGydxc3T/+xhd3T8r9p41wx+8Mfx5q3W7SUQBlloV4rUGqnHfAhF6ladXv/qxpRtlJ0G6+2Wxe/Wjd66MOOXExTsf5uMnjx7un6KKYn1cAbOpwwMAU2Y2R5KezEE5uEOdqFA3ln4/5tx9fuZ0aWt8/jSklIvQdHDzPLoMN3jSCa2Bzm4vFX09jJvlJ3Y598LUIFYygzF6ALOTkTr3KjA9NJXZYeiVyfscxb5mCYD8L2ourxO7QSiRaw9NG7EQaZFJCCQBoWcq26HsUBikFCccsgVjkFsak0kQtbRQJjZH6EFmkByI4Cb9KoVhZpQXey/FfnxcZsygMt3olsfLtGxV3N0iLTedCJ107kbEL+5fvrF7TNYayB/9wPnGU5rsPJgNMf35r956d/to6zd188pXcTSik8QOLszVKDIDWMsL9b/W/sIr4Ot9EuAGDsH3X/xicO29d27uhpTrhja3v9EcPLn/fJ6od4UMDFMNIJh1TvBgnphSBWLpLDhLMo4Ks0jJKZfkmPmNo6MuDI5CcolWOxyIV0JH3nmZdMprkNPFth9O8GjLX/FWl/vGjwKv5/raC0GTwRGluxBC6k3aXzouEjl6Y3tbh/yvHwCjULuhU8vEqgZIqTC+Wu63kQGOIGID64uD684cBhTIwJHLWVvlUE1SHUeh8I4Aoq5icyEHxEk4tu7wLD2C2y3S/JUkRIPBGc8zPKOI4zybLw8TK8CynBpIzIlgJOiOfevW3Y5zKoyfnU+6XDeT3cdP3u9G5Runn4/t9lspvHO+WHoVamZxYig0OYWCfHUv1+f9L94CAN5LZ1O/2wmov/ii2H3zvTeubGiXWpIbr/36bP/xw/06gYQZmoVBsKSQEBxuEpSIiKQTNiZziHcA9ZgRYVHf/Oqdcn4aEkrMMhkhXR8mZD/qplXnU+6Zv4RV6raVaJA3v3hN10wwdXDGSq5IDXABZSE37XmAWNuFmRMLwaxP5QQwc+DeQPECEeRCBhYDPz3jYL30tYNscHOrmSVhZee+uNSnp5ulkgVhAqt5XC5bxu6392vyszgoKssF83wxKgOMnLOLc1WvoGz9K0jgV1bG5GF4yuddv6UMYbjdyGMwFMPGnSh0GgUE951rdfn2j7qcU0A8fbER//DulaPDfP8DH9w+odPzoeZKL12a5aYbp7P9rREHcRfJy6qT2PP6BACMuKcaf+0lQL/vdQdTL9QT2J8//qPpjfdu35pI0s64evOt+mT/4ZMDNSeXXmS9jSKBs7rnqBrhCMlhkUVVjGBF8KgU2aH15vHh9+QgWOC6ZYfpznaGU+06y1V7vnEBPF7NK/qL45Dnn5UAVm4gfT/oqiu5cQJWTtH9CV9BDFcfBmtmPH1tH/jKO0j94OTsS+E2UcEEIvLJzbG2qZIL+0JOD+tpkTUgOoiJctl5ml1F9etnn91rks5iGbqhl6N6viyqcqVWQUVP6vZ1eFJu2vLi/jtNnBfNRi+XaJk28VzJyQbnSUBkuWA4SL+30RRvTpaKdgBKD98//PHB05lsPGqIxi+KjS3/4vH7zZj2OdNgPvSHt//Rzx4RBF1ThqyQyGtENQiEV/uD1Q33FaAIgBPIFBTo/Be/GOy99d6bl0rPmoh3r3yjef7s6f5pTZEJlnuKMzkoF2tG54r4RWKBESKjVmHPOJ7Qz27dfhpC4TW5w4srgs6XRYmB1ZyXw1VbZxb64t2ciFWINUk2Ba9LRCEnNVnbolC8qE6dxeFE0hPmV4QaElo1lWQ9tX2lcaSQHlxA9+feUskhqbuUW9cmqT4vYhHIEZI70v12q1A3YiFmeBFJR79+ZUFJr1+hF80saYNNcqlAua3DaMBsBpJS1sTq/omjbaqXSxVMRHS+uTpkjlwOzghOhXYjZ2Mnlky6+4Mu643L+0QJYniov3xU5Cu32weHV9M0mtMWvvoQ8W23k+VdHe3qD/+SnCO1WkQuolmuOfRQQgYgZl/zFSRzkJjzxVWu1qmESPnxoz+c3nrv7Rsb1GVTim+8mc+f339wlCBmbMbsDpLssYO5uLiTROXA5GJcNCSmwQCcpOf//UdVCLZ0KNivDVSopu3zQUHWkvOgXwmv5/X9+01u0TvJ7tx35e7BnchTWYbcj+d6LshLEsi6W6WLvf8rp/2VZUDfj5E7HT0AFebeQ8rfuZRmJwveoJwYAAz61C6TcQQoRnIQYhfHf7n5uVg2tvm3Bp89T3kBsugUgqX5vBxFM4mT/lb2pa0zKC+3bNWCk9soZp2vMxLB4+BEHE6XvBf1ScQu9r29utONNz9VzZ047z/4ifp3r2F27941HYzO2cebDxbBNiouZfMXNtz9fn1zMhtOdD5lcwiZJUhkpvVm7+v74osdyRr8SX3j4EZO4ieHfz6+dPuDm5ejJm+ZNnc+aA8eP3q6v5QgQbQ/4LHL0ZwCEbjMiuBmIBYQUiBXOgo3tpe7QRatgZH2RimYt9edPTJr7ZQm1Fsu93JE63yePKBpq35FD8relAxXD1U8O+pYQeK6NhEnt14ohOAkbr6GkFqv1r0eNLI5jPqOHooHc9YEiBOxYTg/nCVJiyQsPfzyqJ24cYDDmZ0gDLFyWbB7AsYdf+vt5/fuukGgkQpn166JkzGEzTNMnCCuBGcseOUvw2RWDI7l1C86bJfNZyAn3zAjUGhMQvbt72bOWt0eNKxd0bXy/31YpNbzYPTJd5g3z4Cwd+fZG+1kckS29d1Pmu+81u2+fiel6dXj54AbAqtZa1KSOwmxvjoQ9JV9Vg8sX40HpR+yOqsTRXT37/3+9M333r469qwpEV+7/v3z53cfvVDXXggSLCZBLUIimJ1BobNEbAHelC7u7+1t5XnQ5IDr6CrYbb49OBcLzCXPYLoRjIO6X/SCq3lAYAU54WC7UsIi9nIwKC9P90+ciQm8EpMnOIiN2ATrLdbXZ38AgXoBPwKI1Fm0NpAQE8VooBeHT2vhTEVVjQtyw9H5iI36+RfIEQIFSZYHIVNHNOa6rd5649sPzgAcD8sQ2a3I+WRYUqjdjCX0I2WD42m1N3RdfTQeH4VFW9jFpW3GTHBSjavShkP97as1d6q3tk4dOp81w0ct2ZPXIdsvZtu9R+8WvnrT48ZxkeMZ+DqseOvuOJ83G4cLomgwEvGca/RayCDCq7WgE7nTyqv1wl8Ca4yRA8J+fPBno+u333t9o9JkCp5sfVAfPX3y/KSzzFVGmRCMAoSco7ORsKZoiOqqQSGP6s8/SGGu7G7haoBiVm4tsWedsUmxNLEt7u3i7JVeGQ6LGwoHshjYY4TxblINXr2+/XymAu8NJIliUJgR9wiFVSZxk1XhR0CvaexM/X9If2CUWbRpmUtzH0+/ffpjZ23aejoix/5B5WAKMBCoeJ2/SKXmcV1vMVlLNKLGPdt082dnVMU6xSoSo1Alozjz0txUmEI/c6k/e7i7t1GquTPRJqOZ715oiPuwSETuJCpEoBzZd36oTNZVl64/ztQUtc0v15lPu+DbD55cnv3rm6XaZHyvEZ+kBTenk/p5ct1+NLy8kU8lzp9PtkERICG4ZpQMwoUp7cssQLReILwMDuvqhWCgguovPvmfdt5+/63LpVvyDL5y/bvzoyf3H507YcAk6lqSp8DcK/Zm8khSswc0NJ/gyUZIcHO9NXbiWXutXUwCNa11XUJd4WQaVoM8vRiYszTt2DrATKJrr07lUiqWobCN0ckTmKmTWp/DNIP6QywEX82YHX5hE+PsDnU2h7Novy6A5qb6tYPBwKi4u/zu1qSNIR8vkjsfPauKwAw2dCMqP/jhd3//F+3Gex/+H2cxZO6cR7HuCmPj6Dy8erbokEMlyBEAdfPBoIA6kwUiJRAt7z+d7uyOYHDaCJZn2xchyqrhHE4OgQpxMGL/9uXEQPLRGz9tKE9uLmCSZHZ4XcfVVz+498vJBMabL/av6+Qk7owmP8OL40tp63JetNMRpxG/6P7J/i/ZQUJWizTMYeVtsn7SvC6oVzP3l8GyJ531tqDZWUSfPvuj6c3b797aQM6eEg1ef/PXzh89ffJCRUPhqtFyhmeQ+6ApFIiNuSN0OK4GO4HcoTu7IKTzS8WJFeZl0No9NT5qT6fSY41WhOTenze4EhnlvpIid4K5mFJdOV8aUb6gytGKcI+VstFq/m+JVvPw1ceiVc7wNYY2J6rG7fjoxU3xsngw2/juD376J36mLHpAIAYHsEyAyP/L8j/9u91wcHwSpWq4MyrLtiOAuXTo3m/ff3DSqsZBlTQFDDWfxrISVwUTC2BgPTiodi7tRLOqaHB2Ee7IeeMFyMkTx/5KbfsHAEB1qt4eL5HacZko1ty+uO5h89Hpn88Pb7jkMj241ZXvnoZ0v77+7M61evDRY3+0XxQsW/mtv3S832qdmb2lnQxtheJaSbafDV0A/+jrxeGqLFhxEGBAEJ/98hfl1dvvvTUtLBkSaGv3O8sXz54+aDoUaqLKIQlzFrbQRRotzM2L3Kn9IpibDq8A7scbo3o+ciGDSyGwhkfpZCMYCznMVQgAQ0WIglPoiH2NB6VFjGFgmQINYD10tJeI6nGfTmuAjCu7MDkMtJoWraZeAIjNxVyTDAj2leVAlG+8tV1uXnrr1/7Kpz9+YoAwclFoEi52z+DXlv81/a2qLX+xv4tgnJTLQdM54KiIkLauf/PBved19qpkFpOBpzSbV1Vkg4fgMIYTmifPxlf2hoPRIsyVX84+puKAI/YzbmQJIyeANOfr0+PESZk9kseTRNg6+FdfxheJT7H95P6vsV8/t7QzLZ7f+/Uqf1B3LKl1TE5+4IPrX22jPl+GUeeCoNkzBeF+cb5epf17eiYTKElfk/UyfYC5BLL7X/2LvTfevX15bFmRE4UbN/PJ0cNHx8vMITA6E+JUpCJ5V5TOgTPqxflZAJyuVO7huNxtjoO0BZyjOkIIabGhZxsRF5pYAEx6lWuCZ+4JaAQAI07MhbSNhE424snMQLTijfSWUhdZf+0w8cpEHKuTEMRAoFAEgmVXg3L+5q8vtD37bPPW+3/9f/gjpiKx50ExaRqcMfStb/yf/9urPyD8vG69cE4ai+G8JoBQBgm5LgffeHf/i0ez3AwGRoVnLsq6a8KoiG5OqxaLmGazh9MrBWPeDtYX69goG4YDKQQnIo3zZzsKwJNeeu2eSc4lWZQkJ8tN2yz+9SKevhjqRrH95HQnT0Z1GV1HT/ev6N72omqI3cKNb/jw7S9Pq63ds1Or2AEqM3lOFAMudoKrwusvNIjr2/a12sDNwKUf7P94fP327Vub0dQd7tt773ZHzx8/O20DR6PeKyEwcrk4mp+eLZsmWQDsytQhi+UtnLablJW8y0Qy9BSb+RiLSVzxdvsnFcQ9AwRP3Hcm7IAX6IgIVdG2iwlVV8ZdRzDhlVJEn7x4/WI5s/VQoF53aQ1pBqkThAXuXTal0o33HxpiYfPudPvbP/JAHMnbyT/55v/pZ3Ulaf7B7/63//zWzsExNxphbiSD3AuvV6SUW04tXbtycv/L+dlgCA4wUETOcy9HAiZTcjCUBWn/kAhpOTYmqAdXK0Ytw5CjOxGzkj390AnwZjx860dNsjrCEJO3B9s2nJyK5ccfbBLvPrl3yeqzYcupGD+5fyPL9UebC4GRvnulCW++cdg8HW3szB43gXuOJZumxEWP9mQAznhJvcf6Gz012AgrtDHWTGQIo/n8s8H2m++9tTs0NUARr177znL/0ZN9bUIZLEXw7OxoNpunrgf/B+j0shHS4V446LZIhSjNWnglpaRiSZtYjvki/gBENvMCDMBijxd0AKS9OrzRIIo3kTbeff6sEyaY9TMjgrnpyjpD1zAhrCeeq3O/yhcs2hhHRgDzw7A7roJG2KyJLb712bwkPf/nv/WPv3K4zfDr//Lul9VXXnSpMsndmEaWAIdXnBkNiF19Z2/5yFSMB7LsxJximbqTsDu0XrGKBEb9ktqfTmO/sQZ7sXHYC/bCic0t0aMkRqDO7I3RjNBMCChb6WrKg81n5DipRG0av/pO+YtPf31GIW2++OLX2XbPtp85iDh3sR1eer6TFgfldLPOLOIMZQmak1IkXqs/uL9SKq2Cg4NovarzdU/Q/7CSk+Snj/5447Xb79wcwbOLuQ/fems5e/7gydnsfFmfn5wvk/caHnBFyOU1VsjhxsZ8NqnURdp5JqNmUAQRsm5AyyqQA8qr9zj0Ji+rjZ8VIHcU2XtpOg+TNmWW4rVLz170vsErZVQ21378Y74W18C6BujdonhV/jChRACIyOXkRxtbm1vT0cZkk4i6fzL9r34RQvrV//V/8/3PnWl+ojtPTw/vDmPKgeAdMJI6s8NDzMx1HzXVi+WHl++yLDd26jorG/FA625g1otj9CoWIHJ+cHbl8oTEjNh5o1+MU3YGs7s8P90xgK0Lly/vk+RckBecqx/GOm+EmNDWEV5uPznZ+snxsouRt8bP9i+najwdLphRvHjyjsUr/2I22fT2QAd2dLq9kSgCLqzmHShEWj9tEOnXKkGspH/7AnE9KyCAjNzIKUQ7+/nPxpdu335ju/TsbIbi0uWPFodf/uGXD09RcmHWr+hcEHBpYCRnNE0nRaFMlJpOjTDYaw1caO1DNANa0yrh8IBe893cQebMCvLjUU8YJwKVRaNGMnhjJ/V8dRX0+kv9eXYCX5jR9hHNSdQB6q0QCk8sa/EDYLl85mHntUvXJ0tC/NHv/O/+9L87nNT/8qO/9NAI8/rcc7O/HIa6K4XQOAbeGBO5FIuAdtW/ytTbvccd6cPJ1k5edK5KRQgkWNgYpMxOvSiE6PzOw63dvRHc3TdkpQzsMHA2Wjy/nAjmi0R7QpwMplIs3v7gPtJotHBZ7G93HnafHS4e4vDSEW3T1r17VxyD8XQBAzVf3JbFcOdgcTzZ3GnbdrT/qPzr5ZeZxCHiaNvYSgi0mqo7r4ZEF/QC84vj0bcOq5G6c09/dRa09+78/vbNd96+OrHUm08O33zrrx5+/m//7Zf7KpF6ARBCuDRV5+XsurzIm04Oa2tVF9lWsoScoc5WD4L3XYcTJ8DYiFhLAO69m0AMdbGunAyVtxmgLU29fdgqRr1saL82C+wHQqt/nEh3Lv9cGYA5ZSFRlery5Wk3o+tFbdP9n7/zX/zXP60W/+w//w/+J3O9tMzBjj3EZZqIeUc+4jpHkIXSII32K0SahLmXjb++8/mTwd5W1+ZWnQOBaLksRwIiw0p8hcWPjh7u7G1VqtVgJuTwzNEBzqU//ghwR2OnMmrV6yiOov3hcEjCeyfB/cVtQLfix10rR1d2mYrdB3e/T1TW208d8HB3VvmN95+Xdno82N14HrdOhn9z9785WcxX4tx1SVZzjESOl4HyFV0ofzmZXY2K+lSwTgmmxCL54PmfjW+89c4bU07ZgyfDpet/9ezBJ3/+87sLD71hXNgVwI63wlm3WQBAWmbzwHuxU4O7y5TqMlHR71PVKDMpIOzugfs5qYOE12MrOMSNh13uxEioKhbCTmBZy4WDLAH2NRdm0gwwKCQEdvug+Yz7o2Rk2k4++OblMmo/etr9B/f/dOMfHD/l/f/Xf37nY2AjStQFlePzOsaGFF5VOY3NmYfs1KUeamzVsLZSQ37r1rN7L8LGhOtlA1fQSLr2VMqh9NWXEymDqHn0dHTl0mi4eSYA+TJVTI6s9riDEmCLT08rhnRODnntvbRRdrwRYycni6FZtfWrealP3xzrMm5Onp7u6PTOaOdFcPDBgw/Ibg7MwfWDGDA+/eZlurm8cf58oURMqhVlTR0Hlh5CvdoJrW+WvEQSObDmcTH56tUiZTcwld589slg951339gr0ClJTije++Y/PPn0Jz/9eL+RKByElA4GG/X5kDQ4dNmpBWxXrabkCWErnCoFzZFXjSmtpUDzymqof/D2ymvtIHgRNCdtNydvHR5r72ew2n31Re7qYn0tMXLRaRLB7RvLB4WDxQBQGQ/+ZK+oqkjWSj7cH//lmfz9/8cZfvI/bmtYzJthlxU0sCUFC42jKlMvQVGSI6VhJjj5aDhvRoSmLm689vFPFjzZ3Cw7AzlRWWpbL6tBNCeCuMEBEp+d3d+6URGcjfJi24mILByc7CWQhUefN2XRshqI+Afjthy0vjmoW56fjI1kax+G02ajqavh9r17l7rR3r96xzOY8mfvk9648iAaMTthOPpm0b75y6fT283xSYtqo2QXdtcOIfKaq/VyGv+yAHvZNNI6KVyUjKQwIHh+8uyPNm7cvn1zLG4Gb1se/ebvzp9++qc/vXOuwbOc0l4+E3Fy0qZRsE83Ou+yZ/dxPFdexoEpBVcmDbJoNvt+1MXJ1IUIRMFBLv21KrETmGNckOd4bXo0M3EA/Q4JRuLE3DeQxOzO6mI93MrJya1675kSrcgksV7Yl8wepRiyL7jzKf3O9J89Kv6H10qt53OZkoKG1qIAWgtSzlp3dq8AyV0fP1EMzpph4LYuE2/45ObBw/HmZm4NDCMZe9vWPKpSEDPz3mKXJT9/IQIy8vFxWxiINdYHV5ITUVhkKohyrmC0CYTJDHHrlIhf3BDwVpFrbvcvc2Dee3jvO9R1n/MkkXLx8HTiW7fv9yFcBbfeS3759a/2B5OrmydnzQTO5gzxLmcvgjiclS9M6npQ2UUYcF6hnFdjFpd+Uc9wQAkifnLw0+HVN964NI5RgiiU+a33/v7Jlz/5cSBezq7xoQ41RrVcG8gG213ujNR9sjlPnrsmTU2JYHC22Lsqwlj6wANyF1ao9q4FuPDVLCgqiEfV8uTcwD2Zx3thsxXFae2ip31tGFx7UbGBqMPYtZebBcyxTHxlzGePpkMIffM/+f3Pjo6HWp+lQjgB0RoqXFQljo9bNgfKkDk1qwwaJk/Pb7hrvS0+DN0b33/64EU7HrZGVVIDlVWqZ/NyMGAyIxAsuHGhvVuwB6orA5FbfvQRkZNOq46qmLmtIunDbwIb+1m2nrZqHjJsY9JF9efvdhLzZPzwaO/wsyuzoTuBT+9+eym3/7glp2wsvrflXr7+JDb7g2p7vDir2RlGRJGQui4EYedXV8Yrcqj1SdUvEAQ93tWIYCvdY3cyI0Tq7n5ZjEcb1WAYy3JQlaWUg9ff/UfBu8PtwXE94RgJtmzNqdpNmjsk9XKy7CQlTbVPOQkbemUwI4DsJc4ftKBB//t7buAqEKlUltRYJqPzg77GIvS8vjUUYpU3wnrS7dbnE9YQ6rBKMZmAnPPGD3afHUn7RKuU6vm1f/pn/7dmSIvHvEVGwJA7VIDlkodoQA4vigSuV9MGnnAdRLFg9tHo/Gzr2tXZw7snI85F2Wp2cplgXi+3J4zELoEUMO/JTPBQLbZBTObypGVy2ODSUSyi4trWM8jjZamDoqPNqnb7wc7jwnh6HJMcPrkEltHWnfs3v3xyieuBOzx9+e0qX7n6VQEkHaicHtyyfK2ebYktrKio7VhiVicGQjDtPPRWfC9BQr0258taesWzWQkT8NqTYI28c4cELM6fOSEGFKAgVeRQhXQy2VqeDbyI5Na0BuYdypZMk/BONyNPqprOS4quZG5E7hbgpNRr3rgTaQgGgvH6161BThQkJXXG1rCuSQlO1DvvGMmKF9ADB3tHLXFjuKbIoG8dfyUsnAmk2RyDid22F6GdPzraG5ex2P0n/n/nIC++bN8C4ENpcqXk7RCVLhWAh6DkjfdWOjySRooajcPjZLZA9sk333/4pUlXjiilFtl4OMzugDFM4T2DgHr9rMFc+30Y75/vZsCLvY9DCE7f37v76Pzk4DUtJjMaTc716ofdaULYCJE4/TwuR6/bqdz54c+1HmsSco2Pj3bb4XtfwTxGd6o/ec3z1s3fP9zd5bbLXGWcNNMhMrszSNwyPAYW0zWQejVNN19VYNxDbXtDShj+Peir/oVTUaVEIIcZIZzbTjopSwSx3NbJxbeKzrusCbSDuWjqslq5mWooibohr2sMCeRkzkpAlAQxjfz1GsXhVBa5S+wy1ux2sRboydF9GLj4vz3hsS8Qg03e2vx0bnCLJAJ4efonf74zTPbfpVsf3biyGcvyb//ZcWh9tggGeFGkLjpSxzymNkcnl8LEF9qzk30kCx8ueankNMECxKb8+rNjwflgMNQypc4IMThk1g0rMpD1mBYnkA2PU+hfssWTywmgvFMoDc5e/+aDa1cff/noDfeRgKf7/tH0ZC7sozKL+flVOTqNkV/82VeUEtUjcg9nX11t8zuTBZzglvHl75QZH/1iWNXFMKAtG6+eHE32thcrFj2VmjVxCGu0LdZV9EVnwCsk+aoc+PeAzfv02/NQiQTg4M7tZZxiRFUw+LJjyRvjhKRmIlOcwHPOSXkUfH6illxhCxUnwF1WeFYGxNXV1dePVdYBoLe2H5aezFGORmwACVFPz/HevvPlH3GG99pxIlB+97f3OmfOKjHGmFFJZ6Tt6NaA2nT4yaf+H0ftwnZVOoAqpLYM8A5WhaafkAwkUme9/JlVg5xHRnUCwgbVmZzIfeTgUk6Oragmo1LYOyeOPDs6y9RrHjuJGyzSkpzYlfypGkB5PLEwoF/bfwp6a/e+CsaF0U6x9408ODoXaHYCYpc2JgG0+Oc1axMimanSnS7Y7q3Ugz4sHN4vON/6aFwVdvjwiU0HNtzlk+k//e1NdhIhplAVkpvlUkl66BQUgSDcf1ByYmFav0tEK39CopVm1Po/fe3w3XWp02y8M553gxwiwRZdhk52FF2yBJuO5g7PORtPB5rnyxZuXaLhCjGVxd0d66mlsa+FAHwdAXrlWBjKYbT6xOPurSnM/RV9uPUcu//D3BtXuYOjKvLu772pxLlts6pzWLFclz/98y/v/z//yz+k6bdHGp5UuwUIXhS5i9HRQUehSQARSiFq29UuoyrbdujctAxMQtMwQJCRw0Z/+Td2Z8daTjY3C4jBebxVdUeHdXbV7D1PKpZzWUlwPerY4SjHieytt/74kbhd2j8WFBuSTv3qlRTeQXP3T5cSXfKSegPdxgwaTEHi9PQgGH8Q+00ePH2sQPVW4af3vnxy/5NHHdFmNfrW9R9++M0rhfWll9PcInXLWp16Ebje1qt/0foem1bUa+/XtN7/RD+iX331J6In52XNYeP8fMxUkGM5z6TlTqacTckmwzOnlHOXZTxQXUq11AHFRZqoEaDIFUCm4iD3ZRAHovQmGLgoT6xXzXIeJq3UvbwyPpmr6wVYuMc+mjnATiDLIZoZB1YF2ej6EVCkBCLp+x0wFs3pvbn+R38zZBEt7k935gW7x7JpAoNaRhFzPQY7lUaUV4qxHgeHs02mtNyCDQdtswmH0wY7191rN04fPno23ojDRVYQK5Vl1xzxYNz78DgBw6WBXcz5+PRS5wraYLc3H385TWyT5umedvvH9dal0/Ntv/oHZx0VGpMjG8BmFEMi02DsTrz44orbpbKBkWfjeHd/N+O1P3o6TyhKnEllZRneb0ZIby73TxbEBGtsMWbLbUdFJFtrw9BF799jSV5+m301J+xLQl9/n4Dcr2eMnNNZEYph4dTNknG8Zq0m1eSDySJBc9dlmYxymvmQxDqlKs1MiXzlFgNlwAhSmLv8uxiGlfAb3GVz05SMNq9fH4Xi4qK99xjup1hu6/jmVLgRkRMbsVRsqelUNQNOPMWJfy/+4u5XP14gUzx5YcaIo9xKBHXmRZUyE4iH4uL16lpk7IuKyZeADQZp4WBiGxUgXUCn3/7L3y32D3lva+BUwDQXo9EwHR3W2rvv6bDL/WabZ8/EwYwRWXz8B/n8PNh4cM+1oqNtp/OnIY8GNcMoCPSNq8ZwNy4IlkIfm/PnifnRkphSa+NIs88C8tb4uQ82BuxsmcLo1lWr3jx4mK/fvjVycxkXA6cgRYFmsczkzFhLLVxUBSt8BpHjFUT/q3s30Kp77CkkfKIlBc7ezJIH7AWPKSeluJMzacpJeTy0fJaHIsKmTuNu4URO6kJ9fAG1uRJfyRB8vebgNbTJyeNGkTUrb968UlzY8/REzTVfUno/JVVa414YsDKmohQmcjUGuD47OZx8Mx58+T//rxLgHYeFqsnAOimdWvUw0v6lqYqLA0COIS9DZTJ3eDnKCyJidglKNifSrnr7r/z21v5zTAQehiUryuHWGGf7C3Ui8mgtO5Fl4FEmYkJZOM9OQ3dMFi49WaIYzO785AXumg/eAplzCIi/+5ECxKCCQIlJiSyX+0+l++WcuqWNpuNpQR8vxOVbW6MCcFNLoMEHhekbe+l433ZuXhoWItMR4CJcFMHautULsAh9/Z73j/yCmO+2Fiha/YTZamvgQMhjEcqus9qD745aadqkiLtd7dp1qcNwTDq38UqFW80nz2gPcPLVHMggLxavbzIRKa3mOn03st5Wr//NkbukgXk66ro+ZvXxSt0yERwkSAxyhZP2DtIgn3z3iycIDnVkBaNpFmHxh1c8VDvnzt3TXc/TF1e99CUVoE4LGVrjcHgIrt70w2uizbjEcEkLY8iQu952oxwtiJY9EJUuX/+3H5/HyEzjHHNnLZVFSnUZDOQSZhvqxKr0uBUnR4xLjuXS9t8NuTg5uK4bc4WEZ4vSrhd9I2pX394ve9R5FGV7PrteKcyWn77x6CuboxoOigGW/OzLbzXp9s2HQZnYLCt4z5SHbxwizQjjYb1IwNp4zQs3XXIUwRpF+eo7Tug52GuRQrz8EertuS9+mgdFZLDWTTbfnCbUbc7gLWk059R2Ptj0NEsTIQkCjtTUx21B3vMagRXm7/STB23xyrqvzwXuL7kfAKCGaoimYyon5OsLxNpPE9Qf3744KJAAUCSQbvzW96cRyXKX1CGlo1y8eNydxy3JZIvTg8UXNOAxt7kCaSKqrHMCIIU71b7C+A2LTktI0zHRhGctAELYdMiiI4IzZZvq6IOJERfj0XA8CFp3NBj2/SpNZr3OGeTkROAcytKJhcLR5x//+F56LDYcu2Q/fR7TlZ0OcET+xmhvNzN5VikctDxawmHOX6U7Zyj2tjc3iqMH+/AvnHT8tkHMAEYSuZME/ro12qWurnm8Mco9TIVAEmJZet10aTVefzUIrCr/VWGNVV22Ph7u630BEcBVkAhtm0Q82s3uOSf3nbiAprZNVm2SLhabkUNgJ0Zu69n2lAjkzHBoP1Ao0p0vmldkP1es4pe/q/+uk8fJ0OvkLLE0YpD0UulYnReBMZk6KHh2Agubo6O3/9pf+f5b2+MhG4zz3Gftpder69OGAOZZe7p8dsiVdFYC1hGG1hoAl8qYu0wgJrIypG7k1PVtQN31QIEhi9Qt90lRx1V7/XaG7y+H08l4a6vEapkAWNV1AGBGy6fiBAy2xcDOy18+Pk94aF6MVYH6AdnopgMGbH0jD9709uTFsxkFmA7eet9TV8/4+PNP1Gmj1MN7j/abRI+OguPDsTqbwT1pfHJYULd76WA+r1sSuJdVzlk5ggVEJIMBp7ZtUq+A/2qe7y1laOUG98rBgL0MAP3UtgUsLec1pLpM7o1mzRtlo6pd21ncFK3nG6GncQfJS6eNCRNAuZc97NfAEsPys0csXy9J/mJJSEHEvZiUmprjPD74pOut5mC6Pq0rQW9HTySD9jsPTnLpg7/8XjfeImI/pTzYPv2ff//UssMYcnJ49IsXUlLOJUg7wjgsExGBhwTpUv8EvRp29QikS3IdFqvuwEfB0dWr1QTGk3Q0CW5jf3ZSbE4m042RrLTRrdCGAKZO9QnIgWJauAVRELmFF6fEYyKYPGiEXu8R99evZL519HzOY9EyAKGaz2ZdGCD/jw8C6qMXDx8fpmLkfHIveL75ete/CbBEiy/ITb5Vdc1itr8kJi+LKMcPzz2sJugcY8Ha1XmttbaOv6un/bXXD7TuFJ361TwRgQshT00G06XQpKZLhs3N2izlTr2YSrtcTobEzBxCyEsHYikEQAYGchv2yGBI1C9/OWe6+MVmq8x78fr7ahhBww1v2cweyxUIHB7cFSyACbKBReGrw8Ts5EqBNNOolV/bMp38Xmx9/7PmtaGfzciMRsPjB+NLVsa2LQBrCcPQdUzkHoU8NavKJEx8XjKsIWhZpnl/YYOoyPUablNNMBtWwPbf/MFw/4DKyXS6PeGevxvjnCGgbHhSM1yKARu4UlOF4/yJoCJ35aND0RtT0/ocy3nIN9+cXtocB5WCWHA+unZ5b7uk/SWhe/bsjLcGZOr+WUsWPhCA3IyQLHx+DuRb14hdDz794jBX40JlPL97/yCSihAThKkMaOZNMls1UP0b1dOx+vrv4pVUkIhQz9zqvxcCQZsms+0Vc7RdVh/uNdlyatocNqqclsPSIQJiNDV4rQLssQAADoQeXkQhNE92tsWwZoGtov6roakPSOZjrXMTiba6x5cDelVkJ1oLiGQwgmUQMdh6SVqCR7bh4Cjo5lv1w80PLm9vPrrfC+p0oaifjgdl143ZqYFFaTMAxoCctV0h0GmIuorqcyePw9Nlf6/KqgXm68kVbz8+DRv7xUl+7cbZpw+GmyOypmu6/nTMHU5EWhwdXDf3EAdz40C9O2p6+BEFN3OfPbrebl76DNmL86fbzfTDI7gaFUULlm1KmlpDEVomLapIEbAcHh5eTvre3mEAiB1Zi8N732509N6TYEGPj15sTreqpIOd/Tr/480/3FfqB4AUIF3OiMS9GKuv0QFErzaAwHpB0LcJPaOHKaX5UoW2xgvkrK2V222bU9u0WaZVas6ryikIEVM3B0NS4PXarh85AgYR9VjtbPlpfaFi2lf5fiGCQxdHgUBxo9Jl59p98fPnHty1NzqlACU3M6boCcTkImS5/zsC5yTw9l/tfjjaOKhHz59NBwDnFwdUD7coFNbF4N4B5UBrBxGVDMICfQ/DA1qgNF4YSMa+zL0wwsjIF7qeq41CnSbK7VLzzns4f/RoESdbvcY8DZtEYIby8oU4PI4uBUCC91YZT2oxA1G2+9nDLR1ML4fmvju9aVnGN256jJ43pqfzxfnJUUMDAFKJZjOzJMsvBLZ104gDOxGS2ycdyN/ZyHR+xqF+9sXHTwGMy3j9gw9/78OrQ3NiYoCkqIJ2i7rrE/xajKNvB149AGZmBl7JFBMTBfO2TvDB1pJTmzqSLWs1p7ZLmBS5Ow3DlaQnpy6Qhfpkp9d8NiG4rnDp4hxCOayiZo2V2Qq78GoXQK60EvhwdueyaloNUWafH43zGhHoIgoiyU6B1IhMhCivjB+K0GoBCvmPukn61rXnR7tNYkU86wr7xh44aiuFcmsug6YjI3jBGflCBmYjznU0wzwFow1a5sgwyDALLVK5UpXeLJp606k937JuMD6l8/Phxk41B8htQN3QAAL54++DxCd7jxJIErpgLkcnU9Zl6vKl/dON9M7rS3iKj+Zle/1m3grI0cTtG+/+rIOyhkpiWhNAYVn5sx8GX54w9W2bq/H951c17b71E91PDpCdHN64qoPx6Zvj9srTzXr/sDPTQO5MtBwOctfGYuVbwiuG4auivH3VZq7rKh0ORjtrGeXlDMuWDFuhtdzVTcLGWLvzYuwURCQUtjRyak54uHrDAzu8J+yThBhCWZRs5/d+9TDJyyAAXHSHvUaW9+WpkVeToquT48Vdgq1alT4HEICA3gySAe8XkC5RLcIhw9/85q/d0NkGF5rJeYD4994mj4NuGQpQZx6HXePkQFWae6OrzDgss41MUsfwcdG0TESwkTjVDff1iY8mdrZZuJ0SebnFUWjx/LMjQj9WmBMAcZPHSwbJ8PJVAxdMpm6oHxPr0ZmXfP405t2bTU4tHT6PefNtak/2Dw2RyndfG42KcjShRMF7aCQBZIlfPJH49IGQ9/MzzbL8JMD5vWJ/0b9HIict8bR4s6JNexyvvn5p0FOkyWaylFiI1ovW0CdT6sE5L+P/xULIXxrfcrtIbrwDpbZtjLbLhaaubTubbJjOw5hZAGJOc7CD8nAqyWktcSzuTnAGEUefP/n8k6/O2vufHROtmtF18ee+KlQvOKAO5+FYmi7DQpC1oIAbxKGgiGQCJlasPYyK4CnCyavCJ/MnIjZzA4Wzv/5/+KECXGnjMVHOzANv+j5wAOauH33DipiaETzVQj6IWjMgbOPSyRarM2vFBp0PSpWZEccdhUBI1wpZxQJqQtbFk8PgTsWVtykjEGl99uJR/dDAm7tbI00PYMXrPNjcKpr7BHq9Pjo+ePzCB3b9WnX71hUBZZMCWGdIp+z5c7cvztdJk6wz/nwh6F6bHK0lAqQ+Jy92XnOSNxZ3j+TSjd2xMBg04M4klDFSWtaNMgHWr91fKQJhecUJWa1g3LnpjOVy0diyy4rtcWPaNk3GaJvamUwIEoOQWM0E97izPbhY9a+WToC7MPLZkwdPFx7Yef7pV42YXVBc+3GAr4dDuNhXWBhNvElWd2XzIIk7hDPgqkBg61Ud2X0F65PgykS0nc6vz+4czJ6HdGjS/MZ/+Q/eLwigkbRauuckGKI2AsAjF6m7vsJDMdDlgNhnDK+qXPdgr1EwyvM1vFLGNpMN45OOQJuliICp7CXKMaxzT162+jEDHuNrG9klOp89awM/PQ8I3C0W9nQu+ua1qohS3JtzvmFPzuXKt6fO747o/flEydUohosXlJBVvjrvPvPO1uzfLvHBpxFe3c4v4YAnxnTzSma9diU0hwe6e3kUSjeMqi0GUQhFETy1Tdd78/wF/akeRbKiaoOIiFuIT6uZNSlljEeLbLmusw23NDVdyVIShIs8B7tXo+DOJa8TNrmRw2FGhOUipbr1EFidXvzqKTivRKHWR7oPTE4E8wvUWLE58e68I7/zs8fGFjm5OyshkILgLGb94WGiqG0U0m/tbDb3D2chPDw7hoD+4fdmJIAPpNYKIXXAEK0SyFHAPHcEOAQy8XlVEtVgCsO8ADmIY+HI530J4PBR0dqmcT0T6MYEJIHjjWl/lEtrqOdf5CcGgtnkmisLfLi9uyGnTwt3qcaTeLgf0u4lJ7Q4Ooo6/OHv/od/769991be+8D13RKVUcgu5UWeFKHsh48PnxS9Qrs6mDLpp0rI39lN65waljNnbxheXnnWVbI47qaT8XhU8nDUl31CEktO7bI2phVG7CIEsBCBhMAMODETd/DxxjxlzZ1Wm4u6a5u2zeUU7XldRVBgR7QaZKzzxXy+iL2YwQWvb4UEcxYybeaLTgoxaT775DyslMr6Jw/3fvbv/lIhmpApTspSzHj2+S8Oe3IRWN0FSQnwCMDBRA4utAsEPh8+/+RA5oeL+qAqNR59SRuFk2MUkw3g3sEqaXvbyIoh1oKYCSQjWoRgNM+EMLFZIgAuEyfMlbzXtBmXzXzDyc4YNhpTUQQuZdwrZkVZEkBsFp/PyJwgt3rvijB2t+4xCOPd3aroHoqP9g6fPj229j45fe+DvYh2u5IR0u7mYqogU4pk8LVUrVr65OM55bXVDtBp+ehhRL703W79JrOeQM6+CA7cTp/cPQ/jsEQxHFUxgn2VakWkjJQXy9RT+1/ChtzMzC44BQ4w8mBnnjiljGI6n2tbLzoNU2+Wy8FQYnBh9oUFcD49Ojg+PreeP5U6IjgtLyTWmAXu2iyWYOFQHP7qQbeWByEQe/8mw19eAAC4AYMdqetEfvTJx2ekLqbqztQpAyQMap2ZCFKYEnl4+PR8uTwfPH3ybP6DPeXzgwMdOQjRcyqYvAMNim4JOHzIRt4wMTOIJ7z0gXGtBB6GpiWAwBsAt81qbOWjojvfKOEnBsimsYjgWSN9f1UuCU5sJieHrADl62WmkuBSTqblEx8YijKAHiW2G9O3f/0//L14txMaeEqicef8cQC+8cAEjmRlZDInFgHIUvH5vyIzdaCvl1KS5WcE4NcnqisIuMw65l+dsPvet06efvLJk3ZU5s6r0ZDbXkmfmeDEoSitWTTZ1ytjwqoNfOX+OzEVu3WN3LUq0/lR03R108p2SIvFqBRiAGWYKzs1R40wGKHnT6WWnFxbh/cKxXDnKBwkLZbEJKXd//gQQdVU3cx6RimcmFd9KNBLhwG8sSFtm1kff7Vdcu+bLdEyESB9NetwULAc2Q11Bs4Oo7Y4ycTtcn4KAqOQrhsw0MCqqu0AIY8Ct8Xa32AY2zwyajtyn1C9goqMhKWr+xEVUE58MRyYnJkQbTCxsJ2frMaSw2V/89TrJwK4qxZZmd3C7uZweNpNqZ4dPb6bXhzG/OZ//DvfuDS9/OwwaDloC+a4qXeTdx9N70QWSs4RgNpqd6OUawdnJe9bNkrKX55HSu++24kQ4GZIx0bP7wUH/8aeLl589emds3KAOslGRLNs+nQpALNUJae67ZSIewAvWISJeouJVVLAXjq3lJPRRjrO7WLeKG+VbduOC5bgFMTnxoA2xeZkurU5rXJP8V2tGgm9onMArREB1p4vDVJWi08/m4ULLNJL5s+FZQR6CKO7hcnYu1ZJxxv7S5gReuet3kNoJR9BFbfM7uLnM/eUjK4c7bMbhpydGMXA2xCcWniI1hIRWVU5dLFiM3lVWjcBpznDh2Ve9jP+UQSlxSopmWzQLE40zGqGbRZ9ZM2rPWCZOgHACjxRuNOLP1mKoXBPoZSQzvfC8mB2632fPYIWw7bLtNc8FOdJFIjsTO6fSh5//zwJsatHB8odAuA1uaJiBzpfzXQlJXlxT9wHvxnXt8zpPAX+VWKka99uSq6PHn3x5WkxrdKSQhmOvzpxYWIO7A4KMaCdLXLu2wgnWun/X0Rg3pH9rKlRntLzrF3XJdvbyM18WEUhkJAsEznlmUcmTWrSJyhjh5ILAerEXBD3I1sjFsrLJlssw9nDA++3OsS0skkyU1/DgdfkALiHjTLXnYNetJbN0ZtYEAuY0+q4FJrPMjFZVoW1xv/073dGGikQAC/LvAjRtQak0s4BeKicYl7ZSFtZdfWIkZcEr8p+zwcMqwSdeV8VE0a06LaJ05yBjZE7MLo2DOZwWKCFA2BTfrEQ0P4f/dmcjIRI58d377dnm9G++3f/6m9N/aERT4SAafkgMTaiOdN05/xRMHv/ck1E7C6R/MbfFQlRCG4UQlbKykTExJGV8EmCpe9c6wACMVB0C+L7T6O7/ObAKVua3f/4s2e0vTNWlFN98tXjxhOcRRhBSIoKdV1nX4M3ewTnuqrk4bM2dZ3aIDxXzZrNtia5WVSVcC/OPs9spDO1nLtstrKKhwZg3dYD5MVq5udu5q6WFoulDTdmn/zqdL0j9LWC6DoprbfF5A43H45jbtRYYtQkgS0zgLgCsIMA8fazBTswgcmw6MD/2W8ZpZk0JQHOUdtYGKUMHqH1HoTiQNL1OmiUZ0OBzwGLQyz6C4tDd8xzv8gin8R2MYXhjOHF1Ag0DiupI4RqFSmMTg4F5z/6aaeqOQTYk+fXf+PmQbFpy0nHm/r0POiocLLx9OFJ0GroRCTb+Z6xXdkdZqLoHiryN9+/ngEZRiHnSla6gQQRSJfig8eMtP1ufrlePc7c/NLB6c33OzaYYvn00598tRwFt8Fu6F7Ef/QR28XYR0IRJS+WS0U/cOtBAqsgwEeNpy5ZEZ81sC5ln2509dlgxBwCM2lSAmkTy6IqixgokBkcrgEO61n1Djiiw51INacuuam2izktWjr55T0Nq16Q+ujDq8bQzd0Ma5UZC+Ot0HbaLqT+xQsE014I0lwzAAeVVD/OrFE6pgnnUf5/t38jeP43GYEB52FOKEBJwSNvlJyEhwDabrXtlZEvQnSaGzlv+FLJHU4jIixT6M8yhoNuNimynxuBN8h5UDZnsxVzfFj3jCij+okf/vhniV1hwoBMvvv+1bqe0lEm7ITTp8GKihjx8vwxG4+ZmWh3+Ogk6u4b5wUxeXLB8J3huwQmJjbDILghO0k/IIbp8itx6lpWJ7i5AMsl6MsXwS3+RuGUk1Hw+tmXH3+ZCbQRy3jzg7/x0W7h5oFXKZ/L4O1y2Rq5XbgN9QfgULMaio3jJeVk6sOpp/Ni6EwAB2k7dson82XdzBfNctmjKJhgTAS3leyfgQe9eA0TcwgiEovoFCMFPPzFc2ci5zWA9ZXIsQKsM5EQiGg0ljRrHEe//ER6IfXg6KUimLyUvHD2QBrHuZYOp19Niyyf/snAmOA89sZKl9QxDajNAoDH5Mj1uv4YcSOjzLUR0xhtYiICj51o2axuiZVjPR0OmY6VnLaDYUhxNB04yD0NukRgIjV88avf/9GS4KqKCJP2RVsVp9vVybnoTkyPDDIxJ9+Vr5R9QnCyyd7x08LxnQUiERMqunqre2fCPT3aXQqwJ2cQMxFzcvpiHsPhYWFwYycyt3MNJ196hn37jdRPVgkhHd976LBiwsUbVrx77bXdAZn3CEEKHMtKrF02GbSGXPQHwFTNZXi4CJaz+XDL2zMeOjMTRSwSO7X7i5xT6uVeQu9R5s7EYDDgqmYEiQSAJMZYFEVRhFAUlLODYn3nMPcMC6xe/FcPABN772ngZh43RsOitWDPJlczpP8EKa0OAPkwGSjQ8iiH8/gP/s4f2WYG/8HTghxASY1VzikFH0qd+8KAnLwvjh0+Ck0eGTcNE41C2xIxE42ZOM/XWxOeysw3lGc1QcejIBVkd3tVhUduGMysKg/+Pz+fkWfNgAqc9FiHm8fjYfPCbTzxR0v4SJx0OH18yl4MjIBiL99JnN69/oQYzEBxe9hduabCTEwgL5nIM0DMzGIvWnnxFfz++cDNXHtXhtPW/bMls27+xoplxyGy0fkxQJsyuUp0uRpdvro3FlphhIlBoU1B21r9FZE2cO6yyfDsTJOapTi17hwjlsjEgmVid/jGpUuX9vYu7e1curTT+wiYumClEecKIjNbQXtds2nO2T2rZp5Od15//xvjtva1KOC6Bu05gnzxjf6i3ON0SCmryOjw8zb03OfemYgL0m9sZyKNkxjozf/if/s3/RfMgqd/YOxgHnKdS0A78kHoGgCwKCCtHcxM8GFcdENFs2S3QWz6NsDHhZEv1gZZPuFlmiryGQPlyPNpIqHeagMU5n0Jk5GPen6Iu2UCuZ915aWFTHFA4F0/PIhWDRxeXjp+GiwMCc7YGzw84zT8a9qJwN2H77gV79tqtK4eCgDdeplan86QPzG90w0Cy8prR5dnoKcPxSh/74YxTE0iM5OfZPJBNb2sNBk/S+V0d3dSrEooJ+5k1pRCzbJulHsHLgJndRrMj7J2Sa3YgM5pU0QIxFTngmA03tkYjqoiRmHXsicA2coyUHr3M3Lz3mgD5jDLRqoAv/nN9z/45ptXpuMR8gUW7aIF7FuAV6Bj3qskjzZi26Ss+as/vZ+DOHnuaXBBmqtvZMiVm5ORFf/o/fryN+6n4NX2H9+pAEIVNFVE3pDHHr8FDEqCL7Sv8KyobDFm2AKwwSDP+kseDFT0zNbTyXHZzacMOwYQhnM/fXJQ634WJyIMl1ghLUlABEZ2txAcdL6Me3l5pTzNBS7F+gGZjAlEl+N9J4wjCLaxe/xEzN+4sQbwGzy/OV059BlCdBByXxzriZ7k4sFB/ZAlam/e7uY4zZw/BiNf+j4IzjkBIPByxkbjq6NMeHt8clyH0WRrc1T0KztOXblwiZGtXdQWerAAQ3jQngKm6jIJeakjowJKQrUGJ0JazNqm7lLX5bZLIAXAxqxO0H4VJWS+MgMlqKm5JbMkr13bGY9jKKUsJsPo3SsyIj0P3N36IUFvOtoTCdx5MIm5y4r5pz957o6c2AFPIaqPhNPv/gelRzmbp/TeVoe8/Y/HP9twwAZBc0mEFl4WuTUmprIAqDPpNZfKIdWDAK8ZFMe2XI3XhixY5H5RChtV6Xw0AM6dDFtlHBeLZ3drUjUwj1ILYg6BIK5qAJTMAsHrExoNDrYG80Ox6VgfZ8aIANuYPJqRlwcKMr6c7zjlK1sCJwGnL4jz3hvdal1iVJDDEpG7c9NwvaDZF0cvxAd7t65UCOwCLM5I7h8UBPv1qROBLWdVJz/KQsWtQLDd12PZnsxRjLa2tyeRFRgPKRCTxKJibRdJQQR2lHraC9TTSHLbDogIJIxlYrPudFGnVDd10ywW82XTQjN68c8LtPFqsuDF2h+mXwQzpIpFYAmhrEosn9/97IWL2cWaGH1nuJ4TvRod3KsRt50zH3/RadJViIxiGl1INoYi+Ytlkar3Js7Nm387F5XDg7RNETO17jLMLYHIpDCgadkJBIqDPB+UjnOQyVCXuWcubTikWa4qUoSRn1VDlaPE8GkFxOEgVaIEwIK2nEFep37L3Te1XpCTH6K4fFhu5RdkxQ69OA46KI2s2Dt8Kjo4PQ1Ovj16OI+6dfkBERGM75+w83thXRN5WQKuLgbypVE+MfnVjztCefXy9dd3SnJzz2fOs18Qcbr1rRwEbm02uPH8HC7LTIDcODiSgSznicvh5tZGSYpqOFrt40IQbRd1cmIq6DT3o9ph6bmtAkdxktDmEpyOl7Plclkvl8s6pZSSlQ7ASZngpmvmF4jJWWCACAPMJBwCkxuJUP3405/99Iuj+sHnZyt8iNkaMPDqsgq8WhgAypMN7hKYJAaGAE4sQdsIQxu33MvnT5qi3b7kyOVH7w4mTh4KrUuyUBtJlRsnB8nAgdSuJg88Rh0q43lHJCNuOoK7h7ET6gVDRERAE5rRpku7CILRWMlBWxu965VSmIMBUoBXEtfmrCzmct7ITpsvh/1GeIfnT8R4RA7fzo8Y9D4s5/pZefK4YPu1DzsCXOX4YUS+teOh76adKhZKtZKDOFtMuTj+twLypXpxZS/AnIp64fzpEbvH36zcPOfeVjG3+2bh4cMA5BuXf/7li64a+qK2UI23t0dMG4O+uASxFEX0tm64LJa5B46UlVs3LFmYnEKnBTzNKRQxxHIwGAyqalBVgwis+Dt9+9GHb3TuRMWrjxJwLlikffLzP/6jnz5qKAj56eePVNYhwNRXoMJX/lz/LwpMVG4MdZE8LxFenAiBPMQuFwJvw7YzzZ4e8NCMpGuuvTHdNEYxtFoKeJMQBlQbAOehS9BmXYCMZZkHznVHbpPYLgnM7OPgwRfopY2JN8IibTs15+IeJo1R7meZ5AD2CgNA0R28xluxObnZ8sym4ejy6ORcdKfKD5R4CGbfmdxpJF86y0UZXrupX2a0r51+2tdA+UtHmr6ZAfSKlYV44I0Rifn40uUbewoUcFDnMNqcFEUopDt1OfmUHfUHtzuh1YJem/bgnL3+mQJOv91+/Ktf3TsJY27qFEaTnb1NsLA7hcAiIiGEYFwsOmEijoOo1paRGcwcU2JzrWVQDoqy5NWo0r33aTHXtQh4jw7lnMDUD6ABgrurqrXPf/mjP/zxZ88XydQCM3N6+tlBT2gn9IOev+CUsD4Igdy8nAzaZrEwPvjTT+tAzqV3DHiruwN290WL6dACFtUuBkJUVNp4ZZaVfOhtJoB8CHBue3VV8mGo25FR1xBsVHULImbxUWVss57s5kSToLOpUD4wB+8U8tEbFVEUcwJsWPXVYqu0VktSh0cm5GMfbBwOx92+d6NdfXQmPohENtg+eB50Mu2IeLBd3D8nHb2zWLIDHu4fRcI7wQGBEakUhvzDbykHDK/vjaJmLQRAVysJJoUSODZK9NmMSIe/LcxMbtrVi47yC+Jw71kEt2/8Vn18/9Nfff6cx0Wa1wjjrarpnC+snZ0Fkeva3cwSReQmBAiTITYNO+VF6+oOU0tdVjVzA6uDCL12vLI4w5w0dW5GgYSdXDVl1eSLH//Jp4/PlWBpeV4rizhh9uXnc4KqGYSBCxXqr32tNhbOw+3AlRnVX/74yxQoWMesUNuaJM+Bci7eJk8HeRILMdBYG6nAKTEN0XUEhw9iNizXsWUQ226DSRcMq8o0B7lDiwqwWVqVLz6outlG1HiiBNoY2PiHv/n20FYln2tes177+N+rIEQm1xPl7VPfoX1V2Qvnz4IVlSHMn6U7AL8dSOHV6OxxdP9+cc4AEOZ3A9KNvcxM7cIAlOKDt98LEHZVdTPigTphSexUVp66plucZn/8QFT0+zcauHuum0TkfjADFj8zB4W/umW0fPbpzz952FYjny9qmw69bf7/jP1JlyVJdh4IfveKqOqbnz2b3czdw+fwCI8xIyPnTGQmAAKoIlBFEgSHJut0n9qwN92b3veu/0DtelunF314inUIotAkAQKJKTOZQ0RkDD67m7mNbrPZG1VVRO7thegzt0jyVPc7Ee5mbub+1FRFrtz7fd/9rqfoK0LMhpmTdrNeSyyzqLM1Yw0Tp85BWAbOVn5fNOXwhM35lPj4pEAQQMUFKCVVcAcTsbHIuZYmhtlYY1DkJaxloy8f7ATWoGD6LzobzyNAbB8moe5cvSwENHzws02thdKkKkHa8wIXMMkacxbuqB+SxCiQyUTrZHzBqCelAwiaskAm1UgeqdUwahB0QFDboLEABLEdR5SPq3eXtC2nWSfYkWNIt1YeSfft77/d5eiSxUogMizgJHIbXsmTgeJszEvlydWZwyILc/V8Q0AtIt58jI1J8HP1YA2ynn9GXN6+9jJqVfWJI2m9LkzIUgKpTf2l5UvXAlTFUNAATUlhfEnQtJUQxE32gvH3A0gWvikhH43zOKWDyj1VfrSXqHF3P3TKHE4f/er+4120krwfapQgjI5LNnENM7jbas/0ZjtNoznXomzGOm+Ziv1Bno+dL5333vuyLIqiLJjifAW1QJTsQsAqTA4ENgqQMczGGMM2McxsLDOzIXGl82JtVgtr98/Y0qsa4uLex3mdSIBChVsdKp0yn3z289IUbMi6wJcto1ywE+mmTnPnGyYhUGq9rweVkqSWlRMQk6QJQBNvDDMbTuo6zjhgKKzcxMgxALZNVS6KKbNlOhhpR2k8NBrSppyJhNqbHxi1zFVLKxMJV7ZnkCCiLCqTM+m2Ty5fmRwk0mrT5lhR9473jsrDAytJk0BJ46rdPDO++97VWSGIt7uHVnG7FkiQaoDC4EYtvQNhNmQYXoLJQoDkQkgSgZKxgzPQk90MWr6XnuY+nCfT+xNjB58zVM3fawOiTMOND7422Zp0WoUyZUkyerp9IkbBLMqTIMRprc7CiWFjrc3ChJTKg5EieIF4HwTMbKxN641p2mMUIDFQaFCKAKAiASK+qyGE4FX8VPUaBASX54VXmySD+2uFvZj9XYj906wwdkhBVdN25vMSLC/DtaCpkhNZrAPKZY3aNaFjQTcRQBqm8AlIc5Ja5iZcJbdKRcExvNtmGGUNpqESUYt81TfYNOByML0i7iajvCfsTgjgHo2cgbiZdvWzRNEOBa9Tf07nFSmp6pFkcweNG+2X0GwWh4fW12yOUI4mG0bRUoVKb/Z408J97YcrAYCE4XNLYWXFT1v2kWa3ONzpejAJQBIYLVKlCtCOLOqhN6P70ek+grARTjXjQxH76MCA3FvvlwSo2H/5D3/z//Q77d2TxlxbATNvDzdenCQIaphJQ1kWQwdNLZExNnUuAZUj06jZpJWkjVar2WjWG416s15LaxRSAoIyU5xzH2fTR9IuMCsqx2BiIIhKqERIEkIIGibjiRPbSPY+f8n23FXoQvB/5YhctZgRidZmGlIGEOru/kTVKSeWzCf/0//j367XmoKhO95xAUADrswYGAM2c4WSMtuWilTUPynaGJmG8Lg0Bk0uC4YqpGnUyDjWqErSTt2gZ4lPCESddDJgMkhnJDYuR0m8FVFDAkBCEB+MEQkDbxb6o8tX+qWhOTveSti29WxD+2HdUcisgihZKp8plTdea6ZxnT8ujM9uK9iYKIJbXpGwcE1IwcpMwmRTKIUc4ps9MkxihxOyD4858E5iQcQmRl1j9grh4wes0NpvpUIktRtvpq79tX/xz69tHWQ1ENV7iZ3sfu33l2DYsrJCmNQYJms4CTkDsB0SF1NAIY0lPgt5kl79ZKi+amogkEYzSiNlZoTS6F7kLwhPpj5GAHEAOT9OWsYY7E0W2houNhIiukr9ekxQMLiR5QUbCrRFEOcDKZKf9/0vX/ugzzr2oTyDLVFP/aRlVAto2kARGAA1wDptBSZq2sI3JS3GddVWVkT7EG2mE8awahiH1lrHpzcaE9t3DO2meX9ZFDRb2V1TsAQoxBgOkWxR9oagrj+uz/HLd6/uHa/4mfrZ2tchnf7xsR2Hg5O5kJpACNowG4Omb47/OiMhwO7sXfbh9t8WBqpQZVmaEdDtzyoJEDtjue4IPMlIzTLOgkDDaRNHj75Zuq0snZwrLAQYHF4W+uKdplLx/tsfpbR4JfvlHSslrl+jjwpRA+32hVtX73R/1J+waDX02zCxMaYcG0IY5QpNm51uu5lmNcPkiyLvj86GxlxdbcX9rRCQQEWVgnehLIMIcWxhIIZIhHkj9CAhBF+4slTxvj/ySEcvPn5W8LQz5kIbieJiYFAwM1NAs8OFIyVio2NrIcFlVoc/OmScfJyv2o9GPZ+k+ShNgpYBpkE5CADXmFnyShiptbSYtBV+xECW+UF8k6wZoMOS4ptJ1gh90wrcLyxJrRWGCgC9BNGNL0Qhb2AyIFJmFRJNGMiPpd7drK/MHbPU5rB/Qr5ey5gH/dGmVVOfEJS+KM82E0Vn/qooAJ48JfK9K1L5JAd7OkzYX1/0sXJVEkXKUAkO0KRdJ0D0wBE+HyXD7aTJTK8ANd0rhV7et4rQ+N2MMdOm7WdWiXL96vzyH8wraa1n0JvNl+8uLXMIwftoKWATY7RghUwKNxrmo/6kCMpJ1um060Zd8IFt+tpKh0kkntGGQAGsZI2xACmZOJBVRaL6kxkgCSGeAACILMiNxq4k3fx837BIAEKofFxieaXhQiBQVQUzTLPD41IoCH/+1KR57hRl2mWl04fu88P9j0Zs01CkVqlQo03NHQBInVT8eYbfSstJm6ADhaQNGQtADNshNnnOFUNlOhiiqzwaMLTW0TNHQOjUfdyT4hXKJHGmNgikompJgzvVdOnlaOHKsCSzYEabVpI6VMOxW3fQ3mkZRg8/PXFrgcqbv3+bYxX1dMKS3YYiACCYk+esoXUrTGNjAJJMiUIOQNMsJWbOT4j31uoHh6aVnKfSzGwHp+zl/ojA4Wt3xu5FP8GvcqMwbvW1l3Pf9cTcSjHTYHt9bo7tdKarqiXSkpVc3xmFBtEwLJwvRpNxQaxCxEmSXZ1vAp4JUD0fe02GiEQBshW0JwKiEIKI9+KdTiFIrkwKxNQSm4wePxhUxnaRG5gaHfy6t0RUnyatpkwmR7mc/eonSsGDVFIDxW/k/+bR1XxguKFjSpUmJaOBogICLICpMFizuoyaVqmvBNOhqBog0waxG8adRIQOjcY9sBySgmaS/ohAWu9GCEhdAJRYFAxV0kBQ9WQRwllJi+Od1kqSsyzU3boQ2iHY2pnf7RtXmx0ofbw7oLUzI5l1XQKRJAc7CflrMzJFxeSRU+Bus7oJ5DyhTkpaCAFZLWFi4LCk4jO/O5SsXmWPUQame57MzsMECN3fBI1fKq09zwSAeSf90bUrwWjW1l6iOtP0HH07ldgk1mipymEwcaX3HqTBalCCaChLGAIl1Fxs2yCakAJgpkgse+eDKydBwUajj7S6onBlUTrvC1FQ1RCgFCchR89Ki737myGST3EnhVhFmAsuFzEiADDgbKbpExPI7PP8ld7Yuv7Eq+pf/4f52hu/OReyuhSokfWOtGmKgkBAIwHJKCasUNOQUZKRGQsDLc5Ly0SQJoTCYBp3tJW6wYwRPlOAutYNGapmNrLoaikoK8EpG5AqixcIM+DHo9CubyS99MRoe0b2xlazZo2oGPa3E7XN/vBoSEN3tpOotrsrQaFE+TOFLlz1FaEe7OZeAr96uSQFsWEOQGoVpAUBJkkTZebBSPn51n5J0kq4AmoAVT4ZGiOfjQEuvnM3mIGj8HFhBBSu39o+/Joo2W7WoRCweMIAGctMgEmoVJA/K6yqhlAWRRlcUUwmpXOOYa2lhHuziUc1RHLa6QkNIbgyH3vWSAhQxfPF6VKqEK8U0WebGBCBQlGkJp2/datdUuwzUoWKk2k0OE8JCYqKd4Oi1utR4cVwlt1cTMzZizFUPhru/3R9oaNU14lmCnGkjSQvCYAmNWG4Is48AzdpZOvCY8dAmyd5tNPqporzDlFIrSlnnUbg04LgWzU3IBDpPEsc0hIhHFKdamQhGiQhCuMzrS2uTcx4E2qX+HjHBlv3mvJJua4UFsrPP3/W8EP3VEmatW5CQqp4OjHKt80UruTRYwOx9yheslJwynUCUHgGNZJGqpTQkZjir14Y0rT2ygqUQLIXkGytJUqu91sg75BsPK4FBcx75he3LnnSpDOryrRynXlqLEIGnkG+77nqV1IVH8QHKV3pXSC2lJrZuowDBTYcNA54VwBJPbVpQj6EQCY6QlcLUgRQcHz21hhrYAwxhfqN99597+sfvPnaUl3hnVeEEETIuyDTjAARd0aoqHcNUKGskzoXQK5+d7WRuFxV7P4vn//d//qJalMdGiJ+TJTV/EShUJMpTDGJJyTQoom0nJYTIjQSN4ozjut1QRi6alSfZh09SzrKwxFDsrYeBTBLtx5Pt9imQ0ZU4xR6wwJ4YhZ3pLxwejTe3ylJerXihSq3Uyfa160zE1rQHQecmI1j9olNugKImL1Ni3B9Ps45YVU8Hhn4m/Me8AFKCNDEMKtpE4tJkjozuF+KebBuFabOEBGJQ4VVj8Ys7osSMOHbV32+Byk/ztOg5K/cfH74DQXpzAygwnc4as8ssWEXmGXgmCubsYsvxPacNOnYsoSKiTgtASoEzUNQES20GrwETPXexKAkSawxpBcOdr7zzts3L11baGZJvZEYV1JKkZ+YZuvT75XYyIypmBGqWmtxXjhVXLq9XJsQ0+gZdxvFy7GpY1LUBWFCkqZuDBDBNoXg8ihaADVsXnSUy6GBZA0dx2nMtbaAJ3llh6i2KyOdFZP3E6JkVkclg9Do+OitFCI7CAExYqONamAoheNc5vXBzungxPqZum7k7Bdf89664emu1Wz+2UaqNA5nLy1htrMQ/SbCY0Xo3FAiiIg43t+28L2bnqIUCqIwSaAw/z2CchNJQqDyyARjg0IbmTIDEvvA2O+rps83kgC3/H1PuwNOtp4kqqrpO+Znb6+WyrSrUGAuEgIAsQGM+jNPkTImJeZpo/L5k8i44Z2DVxJmEiIofAB86URVgyNSm0SWMPYoGWuZmInOG9IUgJrVmU4zVRhj2FjbrY1OA5SiH5SSkkzthrXqZMe54xFBTaMphRM4M3O9PfE6GHKt+cH3hkU9KfIaEXIEUwu5gpiprpRIHgsM1kbiJi2CHxBp0g6jqte1pYR8oHHVAu2kGPUYesLM1LX5wACUdBSkTHw6ITCzRKfbaU0kiaqMjrU183hDypes2Zw5ObSSLopwOCvWvIa7xdgw5wN9KhwaSS0eccmLvgXdqamCJhOo8Y88gNeTajvACXNKSpfeWypJk5RbpMTHuaUmC5QbFLu+OM71PcoZxadBhPz3V0O5JSg/d4kGhKvXn7z8PkHNk5epAsTeq2EmYkuiOvQXjHwqyU8EO5WZwQm3XF6qipGAalpItUSUAJmEagiUoiqpQWQ5tqpXLYGGlIjqDRYpgyosU765sf1sY4Igaji+JcVxwiogkWpqDFWDB9kSsl5Li6ACO8Neebi/fbj9rH+QGe9qKdFEwQ3JAwOKhpEQJhWtpc2am7QZNBKobcqw6rztEigMiaHMIGmlxUnPCB15gbaSom8A0IIhJSj5QaydAlsbBCQiQiFYguTHxItHzwNeCpslGmwx+HLmHPdla2JCt0PzNaBl1oZG0mw72gLS8QuS8sqiGICZSc3zoYF/bbVKtkBBkVnwlcYbxpDJkDQYXJ6R2EwBbbJIbIGGiFK+p2Ke76agcvWH3hztia5t1gypr73Lf3P7NSEz/JUaRuBbC91awjYN49F4MgqvbMUVU5P5KpwTKaVJ0429BqRQiI/0P8OBmWCsEQeN2paKQRSJuokLYA8RxJDRQoQxzosX9x/ff/FyrKdr+9F1MBY0VV0YJ7vG6K1VcaBKpJK1G855RTCJSei0b44OeomtuVFqwGMwujT2DCVtGQ0YhlhcalYPg1rm+awEcZsnLprAt4xaHWqcqQpp1n2/1fDJMAek1SlOPADMVm5RyWmkAYQ1MRfSVkDdseNFHsEcDqzMNfSFZ3+9LSojd3JgpHav3gv1JWOPty3JpR/c8KpUhPBYyNduKQFZ5lXN8dME2rx7vh+9g6mhcdnd6Qq0ZqXVAMJxYKqTEtmWQDUIxdZ/2psQxvcN1OgP5x32SjP51BkDFNduPtv+rqimD7cTAvE/+hf//B/83m9985171+c7WVqv1xr1LE2NMYaIpliOMYZJnRAnTV84FVJm0VDVaSpBCAo2cETgykYMUDIVoxz9wDVGAcMB6nU8yv3CnXfeWwmh2bEgt7M+kAgTnOcKlWpMQ7WkzoMTIWjSqflJEBVt3J4LWXLghdJykqaKcUlc14kjAJLawBiH+PfVNHWY1IWGjjXUTTEmAxg0M0EYOpAB2HDWpYGZER6NEtWkbQaeAG03ojCw4T0psVHxIIiwgQi8YaieTajXIDHjA/bNGbw8sW7lsiqHk/y5Ufnwa7PzvdlfvaRnHmpbl62hIC7dOjEkt9IAZVL1gic5IdxuVCGZNChSXVxwc7cKqMmI6ynxYMAhTQFFy0ZqPoCZyUxOGPrFywTqXvuuQ/+Y+fGmtQRJ36af3LrhVMefiUKtSVpzhKDBuXw0GoxGw7zIxVEQnTo3VTFZCZSkjZCXlmN6UGuC1Ev1oBQAgksIduoNMq3kXz26+Bkr9vdGd2/VG4nKzM3dLx4NEyoFZ6PZhdqUDqq6wnFuKawUOYbKhZqgRureq4xnTbfd982P663XGjpJbBDxiTZNWRAAzWqOkJdpNfKnTRNtn1Je1kk7WTlkAUjrtQkwybOKpDdtPXMzguJ0KcDM8nA8ExBqs0cpAKnzpBYoIiUcqssLitSRDA572dxpKbp7U83ci8HLxbL11qeJ0NnS+sQWq/zL2qwNg87GsC6JOfQWmoqcrH/o3dLlJxlXy3Rjf9WH5RsfZ7E0FZ+p5dVWad78xDM18mDaJyiOOuBGQaRZ9+zcqgEAHy5xcvb5b3mo/PasM7uztcFnl40RKq9ef7rz7fWg5vHbVxwsx4HOMLYxAyL1hZtM8v5gNJ7kzrsgAkiAKjFLkjXtOA9EWQKwEMUqFoGMAKzK6pIKHzwneZhif1IlH4CSEssX9fS1y4V6oKRLK1958ulxYnwZjgZLPQpqBQbnvtbK54sifh43BVgpq/uib6GQtm6X65cuZX5MKZIysNSTIjrAZtlJqs7R1CTATnzTp340p5LVx+PoSW87R4bGo5nK+gQ9mozmDeFEodq1xaAHEp6NN5mTQRckxMHAFiYORCQKDIE/UbO4ZgT7o7os1Yq1e5C3MoUM86ODS7517Uczb15dLMfHW3d9altXN0mJgz59j7T2+jOFKgPejp6tqmRvfBZH/ATSYNG6Zn1YufKMNctyShrjpD9pqDUkxPWBQJU4Fkw83F8SfvCVjqf82rs/soPda/bh+5dtUM3eefGTf3DlWUqTX60SbKXAqA4xgmb1DhnyQcp8MuqPR+M8zwvvnYAmhmpZUYqJQjFiUkwnGcd2I0Uozs0HzuVelUocVLl2E4GSZpNcdDdF0N637z354iBNQjHZOFloq04tsLV64tMQoFPHE50uLtNMx4VlkMkvh5erHT/WRqi5UUfr6ST2+9lMifyEKyfAtsmLNlj6BKStw2H0fUraQuRjUUhQ6WST/lwqdBxA0qn1T69CobNGAQLVz1YAsiKBDQkrFMIQwwFyUtRna6WY035TZrr7m8NGcfPqk8z4QW1tFXJvdqH4z6vmOHl+l1zTG+sBVbu9v+jDjdZ4es/40dethGtzcXo9QwuDdFZIsjfWSJHmQNPDnzZCUh8RNKuPSSrIHAR/tCjm+LPvKnFosOjuXOvs06uwjsLN1x7ufWudlJ9sXxGrxK8G0RJBNSgChLjeIFbAl+V4OB6NRqM9n6Jpy1IFBhICpSyASDWvigSsQaI/4XQBKGm0JgUTICAlViIhtnY6JgKAFu2v3nv2cDekfnI6nl1KvUaZgVK0kK3sxyupwZRqVrCK2k6Re04VJpmcEXKXCXxBoVYbnguDhVxuIpJP9aQctpior0S2QyOXKSmhYxTSF4rvImlrcny5eWb7eU0kbZ/0hZSkUx+zErR+6q0ahRCqH4KENG+lBclpv9buDJjc4bKrz++e7N2edN9fY+uHc+tfY3ftxuu3fvyw7NkXo1qoffUvXdwW/bVV7+evfW6ggAqb/Y3XC+ne2TcKgAUuWLd9ByS35o6M1vNSqSly2ss4Gytp0sqrqp0AwAz63SD332mI7efdEfKXN83D91dJSbJ3n//d37+yYTD+5BJzZN+qZI8IzGwMk2FSEed94Fp35c773/17v/9P36tnpkGhQEz4SFyltNUpVgOwMRVXHI8jhomiEROJ3YgNwxBbvAo8IA5l8vYf/N6thmm0sffoAFx50cZ/RzTOc311ypH6IMSizKpZp6GFg0ptc7tbSAOKnJUaMglkmNEm0TCq6OnQyGTYTAR9gaKJ8SQOLOsaodB3ADMIWuvoSdL1NBowKJ3lQUlEodUVkJJmOlGFkAiIY3EKEREWyPiIswUSr9vCtJiUG4D88LKDHcn+sfWNb7Q6f2/htMknW1a1/eEdT4AqP8tZkzummqEh6h8JlN5oVHdI1IMeD0nDzG0BbAqmpGaKASTJFJB6GpgR5xuJqN9TpPtPU8HpcCYF9k+Ts888WSV3/ebz3Q+IYR5vcTWHa/rSKe5HRBydlVTFl845Ja4l3NJiErjK1Jlibqg6BeypQtLO0/hzOif2P9P57+ddRNNvZC341u/9/rvtpDODjWdDE4lZrRZALEwvkoRMCoqzsUBZKw0ueKcyQ3sZASOFacpQmJiowaphHGKJp2kzjGo14VHB0JZ1g9iL36gL0SiPCSup6eownxP1RwzCbDLqWyK185G2MMkoJjoBnJ6vS3GWoMWR18WUQQdjuLmWbBYmrL7FhsrReJMVd+eS5td7o+CfCktWpEYBDbS7aeCudlx1N5TXTgz85dUwFfEGZAcvEgr+bkuAzBiSWovPlKhGAGwrqjoqapVOB1B8OjLmcJLVUyq2Hd3fUwY0fUt/dvuqByYfu18XZU/v8EXYjsgQMQdvUm4hFKpqKIgyI8IOKiBmJiJEO6/YGALEfM1Et9GIKCji2OOYEOh5DICCtAyXfviH35ilWnP8dEdtZWmqU++r6moAEImCGEoQIVJV26hzCBSUzC/vBxfGYFNH4UkVqBuT0NhXvTfckQE1A+UFQVupHwqxQrOGQvOJYSgRMXVtMZwzJMcAtJWUZ1BiqtrETWPIDGIEFmtjIQQgkFXVg5y7dYItP92Vxrw52KNgW45J+rIuJEvZsV/57aUOtkYs9ZNHtbjkRmsEmbsZD7sQgjldswjZHeXK9ys4DvcDSJZX1GiaAcy1Zj4BklRA1EhQtVcSM7E/4MA7TxIcF5TYlE6O7fHnAgP4a1ee7b4HJbO2/esLYFqCv8IDqRLLc5mTsR2XuwiVaFWlCTBVjuN8+0cyYbrDz4NK9eVXqNCUw6y+iSmE3jf+wW8smWZt98GeBq+vrsPwlF6o1viUGaCYfiaNZuJLH8SdZSIjENoa2X5pZEo09qiCUIuHfkb88YSgtawcRU9l2xaSfDi9KmmkxclsLfBhaaDtpjsWAuls6qEKaeRlDD8BdrrYQUGsQobHLuuCVT/5oi/zPN4wQvNagE7Kl8c2ZJca0Dtvv9/Z37KQ1d3T6t6tFyx8x07tG1SfOaJwp+NjeIV6z2t7FpK8QQquR1grOYVSDQpJ6kqGicgYIgIdjUjDJ3l4qVRnm/jtkr44MQBJ/S362Y3LDpR/xPFWTm1cz4HA84BA00/JB6S24csSsKQRqiXSAJxXadCI9CrH+fFKrySiUW5pjDGMyAFwtC0zsSHLsDEM1eDqH/yjP7h0Wp49fjxOY0u2EEN9pcYIACGAmCSuscpqiGGzmhUlWJPVJ6cptzjPCYBkNafkiqk7ZouKsk3sThlimzpWAhGZtgJRIhZ7hFt6Wm9pMsgZIenqWUEK32zHE78WBWcpxKCCaxEkwAhxcSjJijEhtObLvb7xLzzLB1cEVOZnWzZoqyGks0vLxQsid/Xd2aivzF7uJFReWfAVNRPM+h6pLt70qE5YH3j0EGrk5ryDJlaI2dSLnChLSaH1jBQmQkmq5A4Au/XCHxJs5m2tv2WP7wOAFNevrR9+wwiZ5xd5HxBXLn7n80eYtMoTQT6IsTWXl1NgxhqNXbsSZ78hHtn0KqpQhAABilTsecIcac/p5o+kATR+ZNQlb31d2eru52vOnOe1r0xFpnv/wmi8eLiYpFGX3HuXI//RJ+M2l2MCoDYjmDKfqvgb1k9axtIZiKjNoyJe+oyFkYGv4pTWW9TnGeXyzAC2R8MJA1qfizHaUDVkJAiingHEgCMSdQeB52ts4K9e0oHavVPr5le9kOn758KSElkxZjlZH7Da3/1qLwAEdY+FpHkjAJCJU6XRExKhe5kglnfiBA/7BqHzupJyzQAQMjmUUyXSWiJEIBZlAoGPxqTl58enBG6So/Sgbz47Iihp4x59fO96qVrGe88x2+P4nM6NHJU4coNQVeQ+2KwRilLVKamyc7G0Q1Di6FDPGvXg8YGQic9UwTEGcLQXpCQ21zDHoRXMHM2mqAKeycMAnMjGr/aYNU7MjkQXyEBFODa4veqHqM4ZTZtpKEanHoNP/tNmWkziUqsrkx9rpWWtZ/6sZRBOhIk7pqwSv06mBoPy/PTroZ/PB3KHpKQzWT5gBWiRFSBwY6hQKAvBRPWCCtTZBAEDV7a7YjjfzqTVxGA3Efv1FSEeyNYJw2QIRsPczOGuUWlGZ40gZm3C4DutigrTQI8nLP7KbCDEhCfAHm0Y1fB6K0DSKgueCDhjgHGnoRGkZwJg3YkGs/7zHIpaQ73SnhzeBxTkblx7vvsbqWq0E+Rq8wHxWSsqe3KtziNmZlM4NrXU5Y7dKUCgovh1Vb9Oh1GgIsmjBpAJbEiFMC0EX30faZi2O5zriLQqMFJrh/e/6IM0XhmpiKjGmfWRF4zG4xTdJEXUKNJmoiQw9uCLVjYxBCLTEoUfaPSj0azuR41Ee2NHSjM0GbMSoI3EgyfFeYCZsZPT2ZTMkQIyk/kzIkM6a2Prsx0LqTJBwdOylxDUKmn/LClmWIHdiblzWf0LGH3r3bpQmfd3rFIdRKCZ5fwpQZq330gF6gs+2DAarix5gBoJRM3+ywS+/eZ5IuRLuAeeOCxfCYZsCigb9gVLmgrgf+/rJUH1/BkclaDJLwuVEFqsWp4d4NMREzPq9/DTGzc8M8exM3HzndPJ097Hqc83MzM5JaolRQHycSJI3IuY1mqv9uF5qK6w2zjLmhJTVZkxqE9Tp1xAFYV8Tj+CVDTzsL3VVsgDKUQUr5wG47qM1CwADQKNUAFBqdFrexeCYWpsPyerQJNVwyDOHlZNmjLkhk/dmKCNxA/iyZB1FVz2IwappK2anM7WJTktGKHekZNgiEKrKZEu7AKx8gGnpBHroFAwoPn2oL7EJHz813sz1xu0fYbQvLIcghuGda9ILcMYvZysD4xy+tq8gDAJ7qEnn90RqgxUuXwIgF5vTpE1dZq82E9A5k2rRHWjIGIdi6LJ0Obl79VCVZ4DUB6fkoJJVDVrCtTtjI/uIxBRuHHl8cZ3bJAp/0rTexsfmyqZyAKa6msUCnDStH4i5DmKIAwBvjLvYK7+XlwyFRDElWrIkJ5HBwWRMQyJe42S6rRRqtJRBThJWksrV19/78N7r8+Uw8LF5x4NZKZrTQOAGJBFQYagGgRMMK02JoUqUf9v/mbfGmkYVZ34KpvgloyooyiGDKk1w1kgVYjpCtT12cRzSFsNd1pvBDMaJsxmhs5KVkZjVlQASroIZJQlgKLflaoIgknIyx5ss6FixpN535zX4wPrO53lJNAwbI5Y00yIgdne0Y4hTsczCuW62s0BIdxshkoZFrA2SMhduiKmwteD0PCxBg3XFwpCnDdAtgyWbMqYa7/1uiciaKhc/A88qakRoNpOAqHYLj+fsKqGxpv00+vXwhS4eVWWvdrI52qwSvs5cUwtk+eVPQhUuFoyFQ9Q1XUx7ce0lDzf2K/eyXKsBB0RKVmgBBfHqCpOIuLJ+I17b1x75+7lTrOW1WuZESC6CP2aYLyafVPxPJW/MIhtLZXSeWG8+KufHtmOUSCSwyDYNk1cR8kPGZq2wtDHBKJJgJzJFBRLu3qic8KTUwPinpmcMYjMrCLO3PKYGsTQNBVVCDFYRqPJfx4lhKCLlF5it0EwS6Fr7Dg/3bOKmhJRaC4UzwlS4zQBiGFPNxKEpctuin/a4+dWpfbGeVriS6GHA6Oh9QYDthar7DAiUE1kud74/lR4F53jhycKrSUAJG2oOAxH+w9TVYW/ufps+zfSL3fnX6j/Xrn3TN87lGKoKWUJwEIJEo9tEDwMxaYgYjM9BKqZVZaEtCoCIgZkIFBVUKalgsmoZ5XTMvZdMRGTPdi8vDLXSxLLDG4kzKFkGymL84pAaepUJ4E5hKkDcXXVaTNR5zxM8ejPf17awGacVxcm3XQ8mmHIGaCmg0lBzMqmw0oyclUtTEnHjPM5gp6IgmbT/EyhqrMmnjyk0MBsFNE8kRCU1IsBYXRW09Ia0Mn+8fZi3W+UpryMes2FSbnBpFliKahZtptDAAs2VYIGdU+DhOQuawWxGXlcEoVbMw4kYIC9moN1q+rvdAIkqdTzEw8kVpbS/Gs3InM7NRc/diqmFgTQpgVCPpDPx0aJpPkWfvLa9eJLC6A6n78cA6ZfDC4kpiUuF3VEkIqprYSaiLNAXmUAUwSommwWX5XcJkLByqlMiiLP3dmE0Zs7lycrCEnDwljnfAjWmLRex+7htPIHSCv4/dXH1aqdfgA2ptZM1JVOk/CrvyyDwOUGCmKSls3P2gZmEIiobcZjBoikW1OVYcFVIJPZZHK2QGL3hCGdpjsigDBfFyD2YMT3r8YhxfcOPgHgNg8/uK4M7j/f/JuXDd0/NphZvLyk5kw2c6g1/aEPk5nO4XaiqN+YFwVrsBsnlsLNmemZ783GgVU3dwum+hnFQx4ERli46klNFm+BHwImSZYpLHwHxpKSSqTf+hNWzSwpoVYTUj31u08SFpC7vfp459uWLxp1ETNdwGanASD+khfeclOLiUBjEUaMqu0bQap0ny6uKDLECqapRIi44s6IwSBCrZGCkB78yUOwEmPa5axpjavhBiBo2H36xf0Xz5+NaFpjiuDcfl5FXx045y8mCNVbqYjzkjlOg5RjjYWKZjUZNjIxE0cGbeNGBmQo1BtCPOlXRxe0W/cn7dTxWcEIWZfOSgKk1a2S2J3BNKKQDWHqaO3Bqnryn/4GNQLTTv0Sama0ZZXemeuR5P5wj9Vk9nj3cNicy58BmoaVmkI50OlaAj9zxVc/jPDoqYHSm6mvkjPxZDd3LZTvWgHXq806DsZk7R5M+Pa8VDeFmJVk3wtsHQqiliVjhifySW60EKm9IT+9fvsc+olQQCVJBFBNdjgvwI2TYG3TOU8iCRDdQkCAiOh5sX7e3B/zvYgFxkYBUz37yjOYAQLZWpbVa5MvDgqOTZDRx5zCyKkvhGrzV/HkwdZJLqxHT7YCTfvl9PzcpXMY6cLSU4ngVtZkV7hgjKk1aMSkzCBkdQzTesCwMCqdpBiqQqFZS0Cufx5PGnU5ztqBx2cE5TkaTQyx2NnApFAeH8XvDIhoMDGpBEdWEcqz/DKYhHc3V9s9yAuPcJ2NLcrx8AVDGrUl2gn1y7w+NsrdG3M+7pgnBdS+ztFWV5TwdELkry77+FChZaDBY1by1y8Jw6YVSOch5lJP2F/9asUmkQZl5tOhCtfA0FCvgRCOaOuRgQWFW1cfb36bfy3hqwJbZF2oUhcQiExZwtimL0sEGI2wR7zZImCCVoDhdAfGPY9Y9BMpmKcJigjiZicSJdaZ6w9/1geUXSg1ujl559Leldtvv//mnR/+ow9n1dbriSm2Hh+bKJmCVgD6VGf46tlXTy+mIMKNpgnjiai3rc0NpAQFZR0ZUEuoGFqg3pC+khJgZoRIzip2A5LM6In2hIoTVuhskp8QCLRkCSCVdvQc0Om6JwIErmQiGq583ywaIs1Kdt0u7fRN6M1pB8UpPS9IU/WXBgMsdw53LGGxuxIrUWwfJQivzcQBCRKceblpNLTuKMWHSlIIPx0YSPMNZVA93gIaedVGpgB9txEbFxGlmuFYApKGqiq1KcD0T+mzkTFMofmG/vS1+FT0QrhXJSYmivBf3EcM1ULYJPWQ++pZk5Cpuv0uhF9mNvERMhlzfjJOqRxmgEEGTNWCMfnDtSfPTe3wWZYNVMYAFFpfef3dt968tdLL4Onq7/2L31khqTUa2fjp81ElOPEiASRC4hUSzq+gyknijCKCaNLIpBifDMUM/vpvd60B1LR16FtkwxAQ29RRQVBS6pIinDlGlYv2MCqXiPTQsNF2OjlRBaMXB99ToyitxvPNGFLAKwjijIH61vWTpQYD1P/VRtrT0x0b7OrSHctnxcGxVehReG8uz+Ynz1U880rTKxB08NzAzdwMBIioihaPFRTutjUCasTq6eUzCwq3O07BlUFv7snub1OQ/N5bJYPEkyGQOpyVFKhGSoxaDWB3wptrqSpIbq082+EvF3xfbgc6L8sIhFI8N2pF7hBiSaYkAHlwNUUYU/rvQh5YFYgKmo5dAYistdOuDyrXv/i7jdHs5XZ9uL95NvYxc2zcvjyfQUI0yQ8z3/o//Pf36pqmqd1/uO1NhQZNOcHIf1dH9/T3KWCkIslMrXQUnOLFX/zVgWUNHRTjGQ3ulIFkVscutt90jILG5TQLpK4tjhdtMEeOoK1WOBFWCjNdDwI0owFFqCt2RFYBMRgmkv1i7n4gKM42P9rKyG2CsGhXr2fjfLJOGnrJ/kHLZFft8xErpfNzcc4fnuYMvhuVoAoorw8N3PJVF28gkQskDxzgezc8IUsVIEXIlYr7qpDsh4lQvAFEUBSnCcSm0U6fNKCf+18VBFBovyU/5ldp3qs9dLH+02mEK0SpZnwpJJVBotEL2ZiZbmkAQXA+6RBsuHoDompwjYQo9xUAPP+DP/idf/U7g1PzLE+9NtQYqKGgohq5QhgKrvbWP/ynX5+BrTVp69ERRycVSAACrAa8AojOnz/OV4RK1uvyqPCew6M/+8lxik6Sj2ZUcRYIaFM+ITBImnUFxiOO4kWEds0dtRpi+iOopgs8KBmQdEYJSspZLgBEI1JdydakEKuGx3t3v75sCFTSopurycaEw/Ls3tJ71090vVRNfGPJsy7NHu4YZVu7RgQRNTv7qborCxLb6CXw0RpE7T1LVUbmA+zmjvGe30gVklZH8QSgJwcWVLx/oySwlaAwzKTHJZGpCwhaqwWi4UvaemYBULix+qwaVfylFPpL9R9VW1tKYdtI3CSwiyl8VQhVgr0vgT/ngXj6DAAo2ak1fIw1botACFnXzrV6SuPk5KTdQ6KI5QBxtHUijuQoX/ndf/o7l8Gt2fD86SQKGWP5FS9/il77iOSqynSGDQhKzZZxeR40dQ///BfjbpIPWhmlw8DQjsnPDIEIjZaAy/5UkSrNdjisdYMZ9S0pz/KgzyDwYvXz1QdaJTOx6YEIRBBnVaU40s6MMdCscfOtzJjjg1TSSy1d/v3FyctjIxmdthihPZ8/BUJ21mt5AODRU6HQvKUKyDhXITwWQ+HmvNcqNy89jZ4Qs1xdDdA0izesHDP3H7Ai9L4rKnFiDIjIjI4YmiQeJKZJALafjx6UClBo3/NRD8BfkgWg+niKq0BVSUtvTRtFHqDGKgjCHHXBEELlbYHpNJILN/+CODjWT9U8FLKeAK65F3+23njvppnhLFFroMxKhjTS0MoMMmwMeT/7rX/+j99MXWdh9GzHkY9DBEgDmWq5vWKMY3vzq9WoQKNlxXnhpPzsPzyqy0lWD2Y4MhS6jfI0atdrHSWEPk0jX71LwzCvFI4tyM8m+TGgxHOJKJS0HlxFdwtsrHyYCM7agLAz2X5hWKB0uPaSNN8kpZXX3Mbe6zzZNqrXzx6eCJlVszE2aum5AYCg5mnO0DupECtBg/DmAcPP3ArVBmMNwk8G1kjtHgnbGkUR48SreTowgPvOJacSyBhWMsz2uAC4bqCkjUyUdfv5o83o8nZ7paLiY/XHHIf3RFwstvhUjwOu8ExtlLkiRBwQgEa1JgCQUGwIiGuCKoKgKgbis4zH1ZRozJYVTEmN2+/as+L4YDzX9H2rkVCozNBiAhFPJDZwtbf/8J99sxXqtf7aQaVJ0QvNmZW6WaYM54WIBibOMvJl6dQe/7LMzpKO0GTEQKPp+h6AqmkHFekHQNkQlLtmMJ4DySEB2mn5EwDwvXrsB8p0TBUOzEaoGooGLwbgk+HV4NkQ3MnOXsLy3LH0Wssr5vXb4xeBpFPb3ypIL3UP9yz8pQ/mFYDPk4NdC7eyKiBTTwHHw6ckqm/VY0+SQkulvTUT4pgBTW301ityMvvPjVJ5+VslKLr0qKrypM/gehoISi1AYU8++bmqgkLrnWoBxFqaznUZMexPQRsQyJWe0UaZq0icmKgMqMoUOdJ4lBDFOUwKAk3966bGqvFeTeHcshRi0qSnvcc/a79OKZ+dxuBRRdMK1K7OJAURCr3yza+9s0CJPdkt4i3XWCBXZcs08EyrxOlLmYlNlsEV3rNF7XAtLcWdMmCaOigQoR9DBqexSRyAztLkZD4TPsgZUuvpiSNAGnMKQCRNBqQAygKodE0AqHQGwPBYxg0mIT883XwivH1gJVui7vzGjcbmyIZ6t33yNAmthdFzAMnC26kCKKR4yqT1mxplNqTgpxOCX1n11XYgJ4SHJdT1bgRSrkcGz0/E+y9yAP77Pa8SQjWZh+QgEKhGSqBGVLAO/ywWAu4GqyoZIP42VWYpqv1v2EANg6z3wZqmeA8SyxK3HjGI44Zjo1VnLQCCEFU0v6l2P5NSHGYHKAxBOUBVafTwL5/0Zg/6g9PilGIPAaJp/IUKdQozEvrHtPLf/v51YLKxz1anOcV0/ymrIKDSKlVQXSQ6FQZIjUyKUpj9X2/By6mSJh0d5wxVkq5l0lFOZOJRNZPJSa/ubX9iWE2PB2PDhtOFeKekXqgqGXJChkO8DgNRS4Z1t/POmKGwg1l75qjY5qCde9dlYjonO0ZpZQ+HuZpV83xshGh10QEmVV4fGYSbdakSK28Pdo1K4814sjI05LCbu0aBew0BpQyQMk2UeHMrVSpvfVDGwBxCAMGMjlUpSwEgaRMAJHt/XBIU0uEvl3sXXvqlj7mQYJNmmRdQMbEOnCq79UJu92uAbLWFq3TwVbEZ9+mpAiRp+9LRoP/ZSTdMWivesWE2JK/IhfOclNyEQHVamMuvf/3mN3qtYuuE+VwsPlWIR7RBz6dk4kJ6qjbJmpnPnfdGyzrrGUExx6OhVSJoqyFKxSlXeWVozoQTOydmcmhAOm/GpxZEWI4Wl9IoPQBYoxXOEd+mZPZBDsJiSgkrleWNWghh3RGlvSR8a+GqrCvJokWjb7TbPNixUJtcVYDI0+EGwy2ullU65qV4pErF672q2lZ2gYePGeRXVwOJtQoA7EeM4qESSfKDRpRYTF9HAqWaKqk0jRJI7I9/lCpAgTHF+l8lgJUwYLptoEqKMohN6z4vSNUIKO5wCTJVQ8UdZ6v0j7ViCqsdSAwCTPwH41Bg8GBsVXG6OJOa5PYSZxPj94YPP8oHW2MjYLgLKAWTqD9FqmblSm+mP/zgt797dLj99Pk49p5TJYmPAqOqPMe0HJxCxcoE4lozEadCTEnteC9LQ8v6gYAMSa0toKKPyBmQZh061Vk14ZBgaKbpjxWK0KsJEURrMgGUDASwdA5HF2JUw+mnYSEwA/Xnk85iStsDS83yZaidzrbXRuyalzZPdpztO7+mBGOv1iO2Uj4HkN0x8cgMQbDWtwhzt0PEaBlaOno6NJD0HgSUAUpBaeRgnx2w8OTtt+IgDAKpBKHhkGEyFkDNfJTjh//lqRGKC6CqBM/L5ql8m6ZFICkhF1hb966ERHmHwuIVAh8P//jhOYYwDQ7EhkDn1DMFJSVSP19qGB/98icfHU9OtiebdGP2TIifrHH/cQ4v57rx6pKEFoebnpLTycySGf18XEoiB/c3nFU51y5fRDUkhHNuYvpShappNRNXOoXY0b//yYA7DXdWDW3reCI5Pkd2TQ+j0QKr2VdC6HTkyDMgjRkPBam1QwNShQ8X0k4SbwjsHvb3Tg0IjbPHd767IP2XJthuMXr4srt49NIIX3YHu8d6dGzXhkYNfDtElcXzvlF/q1uR5sGZgw2rbN9IqwxHpGR7+MwqhRvzjiW10WcmnxANHhlAGr9Rbez4LCgcKtTUCZzob94JDKjZ/deFkQsCTjBehYwLFGEFs+k4qKllLvfRIVcozo6I3o9KpGSm979yg4mSRQAwJIBCposiSQiKkCx/8sQcffRwe2GJuu1TmfixFLWi1W1da20UFtHn8fzF3NDhYza7pTmxl2qf7zUNkSm3Hh8QK0Rk+nbTZ87G6JfOJDqXqdlGy/hQDCe2+OhPPkLDDdRAmakNMJ04roQmmKXJcS8VOioISHtylhMoZPMxTQ61cUAUO6qdilNA6lNSLWbT0vgEgZLkfTPWYh0BNdXk9YXlfI3ZvabH/YOA/XCwbxVSXCOoqvDROqvMXQ2IGocAeegJ7vpi1YpHFDz5+yXBdW4q1GTVaPaRen04YLD/4EZJJDI1cODBCEqZUYLe+i1igjr7i79gme5KqmqA6esVRUhTUrAolRtJWapRxLEBFQpLkOlPHqsJjtUelEinIjAGmIIApMRsSMHg4WDx9DBdvfbevdWVd+auf6cjYe7Ki785HpxuHzVdn8twzvgBABVfPGte2fvFYPwk2zpOry6yZ8MmnayvjSNuVGGRimmRGmPQeZSq/EejtaOt1/0gd0iHP/2zQgeRftEZC2BUdTgzh07THXSawQwHFkoLNB4xiGmpmkWclkLKzCowqUocHyHiYUC0P6bZGVWgMcxrNW+2cyPtdOVW3S82XkzYz772bOtw5/p76p8rpLm6aIUAgTxTqH2TycSsSu3mvtHQvqusSlDmMJFk+6WB4o2mkCaxz9K4PLF7z6zCL34PpETiI3lK7pihtYaKZvVvXiuYidT/8YNMwVEI8OWkD69C7/RMCKUYtKzLnXiGgkk5mrnH5qxXwCtXM6FpeoxUm5LIVi4BU9r4xelkK7jG/MbBJfsg2X12mrjnKJffW9z63IzHORqZEuFVKpJ6d3z4K2r+sk37273XrlhVhbE2HDzY9HwBCML5gUD4tfxmuhYgwtToNUPpYA8njcGwBoC0mxJ4fHauNWq19CiZC8YdGoX2svyIQSzzsWWTGlISmBhS1SGioKB5MKra7//edxcFCPX+1hIX5ujQSm2+dvSy35vf37eCNxubmy93vn5d1oas6fG/KwwRqdjNUwP3Wq8aeCyOR8+gFN5sxtYYJgTH40esHFZeEyOcVAXPKIC+KBkUvnvJRzwsRIDxKGcgZdFOu/NbEFH12P83fcZUCHChKUjjMKT4UayulVwplhtUjr0GtYAEic19FJsaKLaITRtFATCrRFwpEkKouk0r4ahCMXA3Bmdb/eal+mT/0c8eudpkuOlfn82Ta1c2j4MXS16VjcIYAwLuvn7t3W/ene3I2oNCWzn7QtJAtdmV1Vo4J6WqdVg95y+B3HE+cdzcNmVKWrUUwTuirPjbp2RF0aoJkB9BYzjRrKsnYZZYjpSNdlv+UACg24pUY2rHCAIVUU4o+uMyw0sKJS+z3UZmQGw/8s0Skw0jtPTFSyqHC+Um2N+eLbf3TpN3Ogd7icqlDy4rAHg6W7eQzu0pw+kCPZ0wwqVrngikIZD3wk+HRiV5Uz2ZNNZlkjvhrQ0TtFz9mhjEkSyiKlIcMUlSI2k3y29eLaPJ36d/Dv8qz/6vVILTPwTIeU/cUlcKhej3oUkM/HrxDp8LQs/TcFTarer0pfNdCm0fmjfs8urtZ6dF6/1kNp/4fGGBa4f73H72WkiNSeKunl6DcmYe/3zrF7X89NH64cmgNXv9jdffevetOysL84nPPaahp4KCYh10oTalV3qnKF0Sk9QtghM1u//+z3cTG2oNB9LjiEQBauZ0NFxSpd1AkEZXjpxVaK3S3aA+VFIlDhBjAI3HjJbGMmS/nyy2DCl11v6mM3DhRUF+gZ9rL23a5wW7hbvlfn/n73pXB+tAmPm9e1E5BX3mCfR6rTrPVGnnZaKavsnnt7QUc/KcQe61RU/IOMZUP0GYfOoJwHcaMl3t4gP4eEygGkmnVs79gBJSGJT/7qOMzzO9C8/xAjs4RbdcUSZUl7JgFgZUWFTBFQ9fyUPBlolENfoBkGEhgI1WKWL1zwkIrJItpll/KVn6m8UrtUvrR82wdb/oZQstlOnk5GmYgxWBMdA4TJQpzajcml9evbv6RfPt0/Xxyps3lueuzjYya4KQKR0CvvwKEqo4UN0MlWpwpQAaFARJGg0KQTQ8+dMfnSS1rhL0xJuqfwWzpjzppt70c4aaWT0bMKC8GNeaZhMPIqVXgw8IUC3UqtKgcO0lsEpT/vKMHXbOktD40B2e7jdbLw+NmneSfDj75N8geT4gqFyfE1LRQJsnVv3lZUcQFVIx4VEQcjfmAqJVGotT99ARXPsuAM4i8E4TpWR934LK19/JqxluIgrlybECWQM9i/Dty4ERguDkf97gaqOfdwNejKCIzWEgUBnEcDu4QpTSqRHg+Q6bxt0pYUVTcCE25F/gCKcwkALYoJMXx0lx8+qV/vPZ3my7t6f06WDY74wvd3fnR5taZajTQKSqybd1b7Gbz37tHZ074jQlCWTThk0Ty9DSx8aQL63lat1RRW4RiUiFE8YvCbdbWgpxef9P/na4oErcn0zfOsw0ysOZljdnpwbAvK10YItpDAE1LaKliDJZ0ekURB8Y0NGo/1NnCGp7upY4O9w2kGt//97sbtGYvCCUt5fcduv3+1v1gz1WtTM3RBXq7WDdIHRuCYF04hCEn59ahLlbU/Mt8aXy5oFVktstAdKq56nIQaNHAKT+m9bzVOAFAp14gJpJjyksfy8oTFCm5/8zT0/9L6lDX2UD0xygULW26cs8iDOAQtQQREVZfOwCUzZVnxBT9HYVjZxeRQ1W0L2yKpTIX5rbzzqOvzd7ODl8SGZu8dtp0Ts8vdo8GC7dPNhqJxfHIFRVhl6+Yc1r/933svcXlBO2qTHGpPW6SZIsy8yo4CjUP/978QCKyXCcVCYiQUjD1D9egrczDXUBOPv43645DxoPz0Vm7TaO7RzYH7GSzNb8EQHku53IOluaRD+BIEhjlCMFfMEgln2eJA0Q+etubTwSv6ZQ27ZlfW5O1z27+Wt+76f3ixHyNRYhc6MWVIN4feoZ4XZNAKiD93SymSiZt+oKqAcUQUz+GIKwfDUAJo2HRzgTwaNTBsbv3i2FolxOJDiMBwylZkeZ/XdW1LCRoMnPptXeeVfor72m1WEeAiedULpo1UYx7LNGAXJV/ksV6CvFMFD1GE67hahap5EjTBaSZq826Y77//504cpS2tDDmSEXhzNrZxutr6SjS6EKIlUs8sRnJ3LV1Ouapo1GO0szIaPCBmlWtxzOdjZePN3xDCKJ7mUEirric5VSZKSZY+VTCVWkKOq9Oko1yeFWI5SSn00jhNZn5MxdUmAPADrtcFKCGK0ZLwpVshMAICNalTcqDEggFdU1+c4lz4BvLh567/jlwIiRob+3eKf58pglebfWO3t+//PSbkyYDPWWPIEmpd16aeEurThSqqegADwuCfm1Sx4a8e7SCT0bWqh9KyFFLeZZPCmID5+xsnZ+A4BANar7gh54BbU7oupWviVpvdmwgP0vK/9fXwDxt8KJydIwcYbAEXu1VKHE0wGqOhUCqV44Fs4BQVSYopIKjIV6s/R8O2m2bn51fyEfbb/MXwg1R/1fPejVNvK37vaZKDl3DCWCG6v2HLOzWe59N2NNDRsLTgsMN7ZPT8/Ocr/zcF8RBJDKXOJLyU1lSxXN1DB1omaWcVHvZN4HYltLEz0+F0TRHM7O5q3yYWkQags4zplYzbKKkirqw9iNpwKq2uiIlEq1DAxfTD7uJ6zp4bHxufDhnlFqmHoSGpdPN6y6N3pU9250RIf7FrDZPBTQHKPnBK2/Xu0aLT1v7FmV2puRY4USnPLhc6PkXlsKClu5CvNIFQ8KJuO/eaUAg1BZ/WBwCqiWrEr43mpWs1lqg1RHgNJ/7QB4hQSG3AkliSudICAKoQjQWJhKTLXZRAl5vHUX2jUq1W5sKCNCKZYEam6leyHZc8vLX6Uwbl8pmo3G0bPO7ly5sNTQRo3Y2OhPoAodOxomHWKFMciYU3guhOVoNDxtjL//jbbWezPdjIut5yfMEjANXqRBK/GYibEKEqbJyDly5IZ5Y84UpYoiqR/4tKIPdT6dHM7W1J5OCErzyeiUFcwLSfQ7bM97GAOoF2NVonSX4UJCQNjdX+jAiAa2VIxduWGgJmHkWzPmhWO5dHs0T43ecSjWWZVRGENIjeJZSSq3u6GqaRyN1kgp3Gk6RAkUF0Hdk5KA1m0hUMJQr8p5QXZ32yjCpe9IDNEVKuJPSY1/FEDsV79jLJskqdG0DPyvFoHTL4CkCMz1tCjctPlcieTLxYNWArEv77mKDSCQlFEbALUWUC6frW/bl+NBw6X7GyvN1+dfaxw9e3zzeO/61umtFolU3jSGI/Zxlr/MrBJFi2oxrdr89bfeTQZ7jdvf7mW/+Xs/vN1Ac3G+ZU7Xt/PqvS9UndN8MIaoL10jFATWoe+0EMQVIdn/D+tsARDJbCb7zRnPwyML0vnEHRlVCrOtKDw3rQqwFhWD81XlXUpKwScz3+loQtomFIXoxoQ1bTAXC83a9pmR7D3vFlZaHdB6boDk0oxAAWd3X6bs5q5UM4qCJ34yZvjlO67Ki8gXZNf3rVK42wxVGkgaihFT+ZmA2P/gphE9r72Vzvom8PMtqwTzzYZLQNZk9sLd+VKursCFDMznwdimDXmgQAkpNMSpmQEkgausOi7N6B2rPFWWKAGmEu5FQymQsmr4MVpfxc+/mgZTv1ts0d5w299ZvrP54S3bzSFqorqc1+Y6mts0O13LszKJJy2r0Q9MRrTW+a1O1+Lrx7sn73/l5OGTE5qRkTs6mZ+zGqY9y0a0amc1FGIb+nmIByEix0pmgrRzIq7sWHm+de29ZXjPaLRPj8zMC+MP7zjCbP3wCMYbbc8MiEUNQlACMwVVG7vmRTVQkRGA4dlwyCyApFo76zQPD1acTfKMX762/OLlrPN3FoqkqyfHC/t7V70pi5kjQ6RqyqevQbPXP4vgMGuZ7m3d9jD3PoEAoMAmb9jxwxUBlm/8oi6mNhYCgcbOmucvVzzC6m/8jesPYKtGH7ijDmHyxWUNKK985UeZBLpg53Jhq0zZwVfPn5wTgyaKXDRYIig0gUQ1oJ7/pYu425fEAaogcKrRUDUeEkHrGC9csWeZDmbT2WufjJO093KneOte8u5rIdRCGqM06TrxsI90dQY5maq9kIxpZBBZfvvqvNO1dWwLYe57//T33miGzqWe3VvrExHO3can2HCFU71KD6iCPolUiIsyE5+xBA6P/t2PjhKWUJ/Rk3IRjJcBkNqMHE2Y8yHPihGBCsz0HxOTxH8+tuUQALc3Oql2V39YFz/aYBKTyFznbCmsC7srSwe7NdQ1jNegUlt8MxUoxNGLASHc6ISqnvLwj5XIX1t0U3Tcl8TP+wZEb9eFOIk/HxVjweghBDDvtmcur7SNErEhAp2OVPnZfgIovjXjEstZPdKGqrH5o8oHom1IpRQFlKgMgblNrvCCjBUqmikrhMxUAEA0taaIIuFzTR9AEjCNxlH1DRBx+k57Mv5vJttPPjlh17m72DvcGr8ehpkkZIyYqsddV+jUdIcH2+tz10fjMlxccEy1Vrr24lQfP378moGf1N7+x//k+4ueZ5bN1pY30CAhSBBICAKFnE9io/MlLxq5fwBqkHY7yB0QzOTj/+VvBwloAaPjpQzJQclQu0CnQ1YY3zIqIKmcjsioZ0mhJMJsgOAtqfrRoFxMoGqSRIaDIM8LaJqIIblU3xiaULt9XNviKx9ek/UJQrJ0dSWQavDm5bYl6d1WipLLEHjtxKjvvh5Q2fIgd2Z/K1F1V1cCxCYAIOCBkn18Sgp343quraVL823rCk2Y3KEGHn5BCipfe0uV2x1TGWd9iQ2MwyGgMu0NBJXBM7WonABJO2LuDK08Gl4983OPWBCEDFNFONM0DgD6yikq3PtnNX1m/rc/nfjjrX+3jrKR3H39Vm/Cyo1wVJmCidRfPxw3zf0/He4c6dNPvzR4BEql9+u/sDcaS45JBbm/9MN//gf3augsYPtQGefZDZ9bISCWI1PdeLxvILYEJaFGOy1yHzwlk1/+259PklkaH81lJZ+eGoAXzfCIrDk6adtAVGX8UGKWOGcJpCIKVzKIxNJzaoGBRtIcHQd6eUYKY/z8wvKlwwNL+v5S59bZsb9e3zuwQK24HCklzp8rxL5OVRroAh+vGwD32oCSKIFLDzzwBKm9oQClUAUD4wJ0/MyKSvbVhUs33/jqH/zjf/Gbs+nytW7WHxnwk0OjoPSbHaBGwq+yhF/P/uI4lJhJFaIpNX04Z+iVOO7siuIN1V3F+eOODYPVKTyNuRqXDgBI8lvdTC+t/cXezmefP//TfncwH1qr64M2QMym14pv5PhU8dknP5+5ScNnO+O0mic9pf7M450r7S/yyx/O7irckBLji+zeH/4P35tFuzHc7Z8H/CoHOV+g+uqTKuxNCwI19baOCgmebP/Hf/xRsymH9VnhyaFR0tma3+mvP3yy+yKaBJCSBwChIGBTYV2AOnAAxr/xvQMmQGt+VOOchruJIkXCSFaKdZC/cqd+962CtTlaJ9JW+7W6J7D3ycaAEK4vVli7loKHJchdvlxSpcmWQs2LvQQU7swEUmM0OC+MsQjuFywo737/7VtzxfHBHr3/gxtmZbU9ZuXT+wbE5c27IbHClqtKiF7dkMr0IupwY+o2LkPCTSlKnYr7DUGDChRBzo97ZtapaXAkjRGYSSOzDZoiywCgyeOd65n8YG1/3uyPfnD9Rfl8/8NBWkwaEgitSuSpmrw4/nDnL1++06ZJebgw2rvCwgSIglkQtj6/Nz98+N+6/pz6LBzUtcVcyPylD588epG7PprnQOTUWvZiAfCqd3GaHZAow7RdWRg2IDr8s6WU9s38moZ9ADLTPtvsjIuf9ZNZYq1OqanQ2XCoQC+hMiSlUPGLr7TXlYPa5iHLpI3NdwFiygpetOtj6+tvb8x2xllj/mjtw0Q4m7vywCJoWTvcvuvL7q0dK0IqKBO7s7/ipf7mA5CSQkjyWjp5tKwc5m/9NCmD1lbmMgTZLzL78sWdkrSebDfyRpnWusmM2x+s1gfOKj+41wuE9OsfJ6SmmhB2QbN5QQlG025hyUOgtO4KB/KB1F+sGS7gdTHCVod/9c+qSPxX9LxIjF8PNvnVaaNYfPdbK96tHsrrptb/dMdmf/qzhEkldpGbFJds93rxg+7cKpn5eutgq5yq1UCg8LXsQf/dxe2ntZulyzEeff7I90c6dM2v/OG//OFl7Q/LKv5MmZppBNPp3p+uhBjx4iKAbdWNOOed8l5Iz/JVFewHgnAN/bWf/+Sk9513AwEKFRKAjCUFJVEmQwlTcAyI7v0/199qANBmncqxo62hASWq6cbP7eGBEVw3pnUpD8Nk74gVdp1NXIvlYwHp66lAFQrvzPiZUQ53e2UQNQwh78GPh6wwd4829vdPxvlr852wvJJqKO4HKPD6rVtvX7t9tZtl4zDjjrs8IeWTh6RA8fqb7C+aOv7vvAiFKtfqpc+VbGpIgyC2xBIRXvVgyPl2qvr4yVAUgleRpfqPmJka3//gnZd/+pOrJ/uLe7t/NrlR6lf3lu8McGp9XlIAM5j2Xhy03yhHl99cNMe8ilpSn38JEydSkxoj+4s1uaJ7C/2XTz9H+5rJfzUaOK5zcFj53f/jH75pB0elUhwxg/PVpwBgKmuJuBJABIgIjHoRkSzLOAR4YWfz/zSyITuaGLH9DRPWnxz+xv/196+T16gwCFAQw78qd4kUjixxSO1fPZ6DYc3g4Rwf7holI7Wjf/O3Uj4Dh0utDfPm8EndjNcYUsOhCQLyjjcHKZVXloNCQwx5T8cMP/tmd67Nw+PTYT48Fjp+kSiV11b7KjK30p/IQtZrQ83GS6vk2nOnkigSHYaxr50V2cRD9fGQFJx+p1ao/v+zABRaeJi07ia+yqMJpEH017/xS/KLV+tnCudPobdID+RnNdrMh9nVdy+XG5/adPCnZ/cOkkdrv/3h+NMDkwQARXn0ct/ZYfH1/ScUsnE41GWbRgcEIiLR+vwmNU3ojDjv94yRw7f/mzFm0gQE+Lzx/h/90Tfm+ifBsJ5rRjFdAQoyVXd1VEHHUqHSuolSlvgilALCo58aJ2fHbNx/GCTef/h/+4P6MXWEovDAByIoRIIxdB4Ry2AU8G+8/vSLTICkrprnmj8ngP3an7xoW2w4Dq1LR8fj49OeoecFA6/fZIWqd/Z4w6g07iozaQhumJuD7UQVH9y5uWqO+/loMDjKjTwoiULjuw3mbO7OUlYzodYg5dEDIgWWeIgsqzVrnmqZGTaQQ/nlQwhQ3roU0GBM4d4L0C/O/wAxoIfCEdcTV3idCgBJjUiIdPv5bTW/vp5iBlHZPFM0lgBNPBF8rembd97+AfPnY3s2P5e6ZLuc5d8YnZ48u3b084/67MOhv/yV4v7Z5rNyeHash88eTNqlO3c4V0D6B90waro505l8LXk4eX5pubciZZXYEBwu/+4/bhy92JVXxUh1XQQhpgszK4gMmOO0UgWCktp6Al8ECaRJ04wPrf3Jz6Du0j+76Wm3c8laAkGNiGFE39A0hOnyKmFESPd/tfNeojCmQRKc8FZOaj77fz3RVoKXByw8d7ixvpT2V83unlEkaVMJxMr6KBD0rrzc29t+sf7i6Z7JHwSQXupyOE0tW0P+TO3mSw7Gv39pEijpazuT3LY9QE9ODSCdGWbnTL1ukkszrdybMRH0/gSBtP51U2uzUtUUOGWFomhr+hmUma0rA2vNuMKfD4KTaGSFigm4cGuDgihOj5127SlDScKU3c2MApRmwTWyF6+9Q/TfXzotiz82pvk9oaW1o5Rw4rvgycl2cIPh5LP+Bl3fHZW11y5Jx6RJdWyTGvdQf3BniWqzy3bnoHv/f5tZCEKdAKqKW4YP+cbp8YsnAzIk0wslpdhDpPqq+0Q1QGNDg8YCD8r1eqalE1FFrfV8/PhPzoREbUmh01hkthxnBqqAWHxsMldACBRKaxS+8+1veGEVNG0wKPXwkDVpd5caUm+MNwzJarK5PQ4zzWTygqFNN5LxqH+8e2p2Di3camvj8Oh0NHFyXCYbfQtJ7rYoDxycGD12Jn/MpG7uKyrMOtYs9b7VcmROnzJIzXLQMOwfbp++nPR6S7W0GBLs7ppVsLxxtzU1ZrrA/kWh8PQzBTGx957QJJcHlQBRwMe5zHQBVZ2+KidoBcWbQSpKFNVDUQqQVqWhdN6qt7NL3//80zfk0//po692F/J2De3Dl5PLd99Nxe7WejsH1wfPZl5Pk1HzxaXLk7Mt77omDkiFAvUP3uzt/PHp3WO6U+vPbK6sllB5lb4SgbSRwdj+4xcF88Wl+muSYYCIjDXVvNQKClehtJaEvPQh+GTtX/9FwOvXfP/Ugtq2h0g6K8VE1wigUnWlKVFOBA1ho/ed1QAJidGkXsh4y6hevdpbzcZMGwX5mTft7lb+4CPHT3P26dLj7c3t3b29I9t/DqD+9SRJs3otSfM+Hz2zCppxJ0WppKXT8amnZwMG4astnm1adzxoJGXWdiB6OAEQZu3h8e7O0dHm7smOS3R8duRF5UFOStr8wDIbQtUbGEO+IVS+7aqAMQYKystgbQu+VPIDBhC0VCBybMrRJZqntZ9WpJBATcTiGQoxNa7cQmNjIdhmmH+r7he+2Zwc/eTBzbdb9MBZXbn7tn56xiDcuN5w+sAVY78xzpzbdfpZNitJIHLKsZEB5mi0mk1Cpv36JzPfcobZ1iMHHrsdGM0uoKx7D/ZQKdfiCiUh1gtpi8TOG47dkYRoTClqavUkTHwYlrwzvjyz+i6PD0ldSd0szsSElYh2eKVEhCgGF3JCAEn6r/9jLyOAu+KgEtY9hd7suNvRPu+cWqHlz19s/PSvfvok2Tu07O7M9ilJsmzs5LknFO/MC0VVwxnhSUkUuo1dMQISUTmDOdxIQO7GHanZk5f7O+uTB0+OT8eC7ReJkNqV8aQ2Gc/0eq4wg+NjKUYM3nyRKuBvLjFP+b6L3N+XfiEAhXgjHfWFcFmBPZWH7DndB6KLYAvhoioA+iVcYVozEqABTj7BNzu7hnYTPTuQNOe5O+/INnwYf/SJ+9OPsf/JA9p84W5vD14vzw76mYmNOJNgVdnpzL5eumQsZw8/rAkRaRpykaqgI2itoyBQUq4/HtI0COhUJnIxJlQ5UHUinC8NoSRLZDjwwggrhyf1fA+gbjLbLbwICDKeLi01VqGqQaDkXAqgbC1+8WNDIH/rTrF/VJqdMys1ff7Lj3/28STfNBSu4KgYOXNW5msMt/CuIQmCcb+2u5+ou/TWJETh8VlhXx4YUbOSx6snprMJ6wMHBPtheba7dzzOT23vwNQn4PBFySBZXOrI1sebh5NJODw+9d12X4DyC89K0n5v0f4XvVM0pciqrUKkVAYY05RQilHLRoWjDChadwaKzV7V94sijhBkrdwAlIWnE8TOT2EYAgxP9I93+zfH/8OPPwo/Tz9YOvhcBsN7aW843E3Plq6d/MpeqX1bqf6Tota4vVG7veGvPl5uFDVoNt5o9UR3j99fSbtfFO81TifdaI5jhsOFgBZF6aImMxIBb4wezS5laqbsmBGJbOU5RECVYp3ISFU1RDMYNoBVT4axuWPSQzATmrMvrLIoyHoLVqNikE5UjedAgBYdVugoXK2dGWEeIpzg7IbZnlXb2QBYT2rP34Wbf2+HwHCnvedfA/idP1cPNf5wbrh+xUntwx8FlkDQwdnyaGvFI1zuHBgVCmowPqrz1s7lgtw7t5JMs2Y7P1lOjrOxN/xi90qAtIp/q57W6/OJ5nkx291vBGX7YutmqaQ3ju2vF26vdijOP9JCXdfWQ14KqWVVrZQgStMWUjr//gj36gVt5lQu+6oCR0wH1R79+dZC4xvrbb/yj/lFo37WXnLD2eNTaS01R/lSw2+u/ubez91XH37v97646WaPn9y6fbj5iz+i+Vl3+tHM5/rubGFCcrMcv+jRcJTurLaVAGmXSb0sDcXxX2irKCUlKUxfF+uvlmEEcs6ZcNJqBcSFjOlVRzfkRt0HVWYSzo5e3PAifOnjuO5DniQAgKJhk5GxvfGAg5GQz9iSdHf/ZTp7yZP6ndJQedRZexvlm6vbVul0abvfDvzOfxQo+eP5vf0VV9xd2jTKxCe5efIhq3tndTcBAA7Hi/LoXdIw8/b9NIiBV+bjhXT07DIjzH77fu1hecttHbXtYCjpDOUPV5QR3rgxNoS6q7nTMlnBXMc4UP7pFSiFxpX/mhYUmAqB4v/QcYCxzeByqKMQRETiNDgiESWp5rqD2XC8Z+dVfwB9GXgXYGoTALW68Qv5+j95+yjvfU/nWoPPfxX6WefghGsLdr0fbO9Gz+LJH4t1810jt/d/pdR+d3K89sUjPzy61ZGnf3e/d/hnf/Xj/d6DVX3vVtTnSW9WpJ5FhYLC9ChpLMzXZi9du/3Gtawop752StFLLkZ/xG6X6twKMDg/0FSJAU5T4/MyBE+jP/6Ls9SE+ZpE+9vjSVUui6TwLjQyDyRnWwWralkO9vaDBZIeMZlx2BlTOfN6INCoGGxaKu8ulEDAIM/XIX7+XgkJoPLU7u0ahIV3i6AkQflkkOxuG0H4WiMQKxuonfSJHw8SNvrOzn8+PP347zbKwz3fnWeFeXzEHu7Se0qc2jCclPUrlrsMIk3WdjkE0Uv2vwLeRLEnVRucoDr2wmmzdCUgfH6WE0iVoTHl0ekmrx51VH7GyH/ekFN5iwtIlEDS/aNvH66/1J1fLj2Z1/6HR2sr/dO8I/OaPh99gH1n9Gzztcbjq/nuDZqY3p1T83TpnS+2Xd77+t+//yf/cPLLxuyyGS2vPN8cvF22h5g0PYHYAoSUAhFUBq17iVU3W0shoqmdFPVEpgccqutWkIqpyGswGHJRXx7BHWOsd56tyvDHj96/15lrHjNUxCTDORCLAjC2SDw1Jy7Uu9vD7kS5CU5HeV0h7UQJo8np7s1J8t6fA1qc1J+/g7B0dyv1QuNx+vwDDvjKf1BRhRwvl8+vQOnr/z6QKALcaXfy6BrB37z7s5oqITDJYAF7a+87Klfu/WKWbVRhE2mwOHu2KBa490WRpMEejiVLDycl2VYbZvLZMpFK/decQs+5gNhYg0gJBOfB9UaYOFWJzlARQozGfxqHFeKV6p8ojvU7D/wcB35EZQigKnFpBDTe/PovPn5558nByls///eHnbvZ/oNTqiG7P1hpZTT6orx9Z6F4/Ofuxadrk49cfTBf2yk36q918v/Po1mDhQ9/2N9cLvp8WP7lYTh6uSlp9QMpszAzpdv/6/2FTre32K2zscwmqSWhiDAFTRkwRbQki8yISuV7cs6OTTkONUlmgw8qfPKf/t8fN+dBUFFtTARgYhagYUJuslmUo9rSiAwkS5X8GErUaAqTH4w3iP0bsx7Aie6cWTHvWSUiOTJ7h4bcnWUHEtHjET+bMMpb1zyBmYiPSl47Y4T6Ny3bLKm1Z2bn04LoQQ5B7dsLjVpqrDXWWmMB8ONxQuwv3aJEJimVNHi6PXTjs7MCgZ/tJRqdvr7sDVGB4gxUzpQAeeeZM+NceR77tTK1AqJrOynFfSPiRVQZgjgyvvp+EISZo76PYAwTkYLMsyK8W7i7N5e/W/5y5o+uN97/cHl+S907Gwem0X73W/Wls3DVrc69+Pj0+HSw/9P2+NmLHzS391uTz76Z3vqXd5sr+h+efbZX/LierSXd7uGeupJjUhKPmaIfbCPNrGViQwBMq86TcbzWKWdFqiAjIYSgYBMEZIgMV6lhVL0SAWrSpMxdKBzt/ev/dMUBIEgtlBZKhGjbXgiy2VDkjbo34LSukBMopUuvBRGchs0J+UvXHSlG5cmugdzreTJEfTdeM+rn3vIsgHHHZn8nQZh9J4BVCRgN0pMXVqn88I25pYXFpYVeu5MWandfGlB+dYVTkybGJNbYxKiavTUWIvPNH7y7enQgSQpYkTQrJxD0H0BByueM33komO7/V2Vh4cHaTlwRpoU0XiVLF9uICHreoDdNrS98VarDt2riJCKSXuvH2WZz45lFdrl+OaOFO3ezvRdnfuFeQq1cZ/TB3zwwl98M7/3hgW/ZCT/ao3Cz5rZfbK/eeXaadFTmbh1sJ7Z2+IvJpXkqD0dkzt0uAFCrkVmtNMvKxtqEJbGj02J6vle/TNElvdDheqEYinmCEjjL3NiLL3z4y5O6QAGu0YQBNlCwqbUyr+gsSzEJbAi2ocoDV1+9+ca3agIalvsnVurvKSm5vqyp8Ut3HBHYDXitYNDXUhiC6lFwzxTgb/SyZrvd6c52JhIeeqIw935Sr6XWsDUGisljAkLrLVGomtQYY4y1gHzhSOEXfuNbRqkxt7w830mgwk5J+MmRmUby/92XgsvSJ9xkVzgOSgKNnggI54Xd1CEnpvxkEKABGhvH482NtDYIKoUCGtiQARxulX9Rv9nZe2RkftHDv3iaPB4+mpRv1UpT/+uPasX+Tul2ruf/8fhHh5fnLx2tb91qvH93bnbmjdpbiXeO9+c6+WRwozDBru+V2WmSXYQmpW6IGTCGWY1JSdWFiSv21/YDQxGUKMquVNkwWwSlaHes0QaVqnm8U7kc1bvZeBJCmZz9XeyAVJsNfXQN8gEmw6gknZl3blCkCNpgoXyUXH995WvXHciPJ9tG9b12YMYZNkYs9a+IBGV3xHsHFu7OqjfMpu4nZm1oqLz1VrPTbjebzWYosb1nAX6vSYm1xhibSKny5JRB8tY3Pvz+7/3+b9/JrGUCkfDWlhUlV+TPOSGYrN7upVnTjEeGuf/AqJLF/+8Xe+csmuryQIiDxpQsB0zbwafPflo8kUZPN8J0MylAQW1Fn6Wv9idM7+nz4afvd47zGd8gGP8fb4x62j9bJGXZfrLV9afvrdSW6xvjxYldH73PxSA3PH9kvr06Wspra2XrZ0mW+fF32198IvnXPPeLxoW0lpAgWFYgBG+FbGZ2d5RDwfne2VKr6nF6ZS0St35lcl0liefgYfxdQa36cJwYYNihSCo3+jHV8T6xmCTFyLAu6D5GdQ4+Swr2o6K1yr2vftZUOV1c+wqVV1+7nynOiqO9G17e6g1YIWelX1+Fm/3aYQ0EI5P20dYbhW9/ZStRCBli3xw/v+zhr6+uR3ElAYA5ef4VB9/+7xSkAz4+ptilxOXD16AgPDrmggD4JLOhntg6ADx4uyWwOn12r9L4L70I4NyF1LbVF0pqKWLdUYZOU/krAJCJOG8UCxtiZVYVeiWViyECYFAUEBL7mR/e+PTp5Op8NgnXa6zS2/nkXvvQZp5S7shnbWkvjK62j2fb96/0Nq8Ou0SHFH78/etSywvuf/za8fHx+Nag27ryI3dcy46fXHJyQeagyLiUwGpNs9vsNFuN7PTRg+1xyo3cbfZ6mVFoZQ0cH3osXkiEzv8sshpGhTkoASTcdcPC8MSlAmaE1BcpACZhtbXc5sMOaMkdjuZSh6ReEIblYNbb95vK6Oe7Jx3ffvvThmo+SJ/doOLymx81QUbL+trXCHj/xzETdyE8vaPq3vnrkUUgI27c1MdfyURab22wwpBCSZXD0/dISUtSkGnPDwRgRgA9O1gMED0a1JXJZiMNQnamm/THlo4ffb2o5t4qGHEY2q8XhaqsMlYx3AyuEPLGKokqQaBhmtWjkm8xFNFFlQAEBpgJpEoETSJdAKnKA0OkpjAb5pflvcW13w1P84aVwl3atlI7GopVL7/Yb196tjyptxkoL28PZl8OukrrH33ld661levB7TUeLLx58NPRa5dP+Nvbm7nMriwnxl2Ia2LTpF3vdlvtRt0QRGjx0rd2HtzfdY1QDsdz3QtK1ulfAesrrcO53EV1OiCHoJrOFRMeDxZEGCIpFfUAQANYUp1kftBkWSiHzpBq41Rp7PaU3Y3ljRT5+GR71un7nRRGC7zIjTQ/fFpXcMhbuwfLzt1Y3bZqCL5I1/ut4FevfZYqQWurZWn3N9/IIW/83cRGJ0kiIbu9c7kEhAFNunOHp4ZAiQ/cf7gIJX3nb8dZkpmaK7z2lpuatsZC9MW9msQFEMA6nR/5azdDiWgcvOGmL3KQMJGqBqJg4M87bOIYLVWdTnMlQCWaPgtU+byyjPkUKcFSkfdGD2lnqfuJbN2/tTcqGGX49rMn/lfvtrYPVhb0e+8+7917OaolvPfJrfdmNg784OHtrfLJytc+/9k3TpvzPtsc/vPxX93r3Tj97PI/m3l8b3Lz9nGB7FUJTzDfMY3UsvFCAQBpELp67TtrXzw95hoOh/PZedirCt9Yw0z3vkwDYdX8Ehc3QbNaM8knFgpRsuMZAMoiBMnMKCvHTaSrL84WSKRu1Izyw+P5svPOViYoy413uLjx+stEROR474qEezOOACplsraC0HlrK5owlfXDtfe91u7dV2WE+odrjxL3+A7BLd34pJrPxfBIJg8uE2kAK1Cf640EqiabgJ+82xG45Xd/ykE6vuB8cTlhOM5y5v1HHxSxN5A5qjxhfg0ZBBlAyqDGNMXlYdpxGdkdCBMA0SobNBUNqBFCjxbO8soTJvJr0UVWJdhnD9c/qpufPfrxz831x8fHdMlozZzojXR5/sXjScK8+s7y3I23brYa4r/x9vw7K65zdvqfR265c/h5WX7ygupdy3j+46WcL5nlsPhHX++4PG1MxubVCiZeWWqwhlIwNdCGuqL29h/9n//Ju01N3MtDx0BcqoEQQmAT1JCheJnTIDf9tToaRIIkK50zJTAL18exlYJ9AAVmkxSFQW21VAtkTWOSyWCH1X7Qa7bbAVsTo7NvUmIS9ZN1glu9roklVsdrBQT3mmSIrM2FngWCe2NOGAp/fDIRWj8k1fQt4yMdSpI1YZ6fcdS8EtnOXA3Khmyi9njdCKl+pcuJteMgSyuWVQKaBqJfDL7UG3iRR79QEkIKT5zWnXdRnQbEJ38xLzr/8Dx7ukApRJDo1TkbsyqWywtb9eanGw9ffPj63b9XTrrzQNJOWpvF3P3D7ptzgCuuZbp6aYXX3PXe7vDK33/73eUwHJzW/pdHV4vFa+r7b/ptulv68sW93lG20FhMrR83vxzFvNdXvGa1KEidzH3zX/6rv3+96c5e9plJ/Zeg8Orbz4Vkkc6kV4YzqhpcmUlACKBQLxyBiOP6cLZsZaMc0poZW6i2a41mKF+A3O15qidOj3dYzd2asQY5reUstTeUiY2Wdu/IwF1dFRgjkudm69iQW7gTCCB/MAje9p+zkru5GDchkWaXYU/WjFQYm2nO9xhEyomQfjFiJbd6zaFFY55bIqhoHEJndp/Zc3mVXqBqzrmAuNVdHtTUUpeXNC0bhJiiSaAKDHMkEdlyLAMraQkgylPfd6lwFVIyGpTYaO+1r377Z/vXDuE52f/oozQpiEzvcjr+3C+0JjnIoDbTTWaSfJDsfl5oq2i+M9LZ+sDqty7PvON+/vD0xdEXe925+S9GN91LVhVVmzbaARWXUTUuEeKMwekfsiHA5XTld/7V//jbq8XW9oShQQAfiEiEEDRovOg4FlRpOhFt6n1o4IJahMiMZchNNF0WwOVGtJ6MckjGJbzWa2S97I7TsHCD2aDINwy5Gx3Haks9OOEgd9vCxvBEizXLaN0EK4jU2cG6VeBuAoDLnRFy5WfOQLpvKhB93WtXe14eeaukQBCkrV5mCABz4Je71sNk76ZpfVTOND0oWCY5OgmAPpicK3YrLGj62auxeCBfwHAj9bkDgsU5TFJxZrFJ42IQAF4dwlp5/qqyVqzh+fmsMxv/97MPe+23W/sH95czPwoEBLyWf/rFo9ZyLQg71E/uH+1u17d3ZhvNVnb89CD1jY1n3/n25Njk693Ws/37bqf3299ckuXbjomgWSdOTyeKzch08b/zP5JOI9cAAAZ4SURBVIzVvXf1u3/0f/kfv5lsbjt77pmp50ulugHnN+XCAlDIuIQxRSRGTDKMSz8IKbExwbbsxAnXWUipW7MoTw6s1l4HDDndKDjMXRdjKYTxFsOvXAoASJysl6z0Rk0JxJwHfVKCwo0FTwQ6LUIp2HlplHC3HhUXapvzt4PdfGkRW5xgW712nB9uBf5+IIW/sZqcHc12HGmhCKOTgyNPgTZfTMvAqWPC9DONmDgAUOFKRtOUhVMWjh47teopR3yf49mgADQAEiPM+QIQY6Am4DzF5Nhe8tnDT57eO3j/3ffPXnz4wfqguVADsQ/zH1wx19uby3+79F5onLZn1+z/t7Nr6Y3jOMJfVc9jZ/bF5UOkTFHWw9wNCQiKCBuxk4MPgg8OECCH3HLwJUCA/LBccswlBx8SIA4sGQliWJYMKVxSEkUuRUm7S+5jHt1VOcwsH44QOJnTNjAYzEx3bc9XVd/39Yb1ThTW9r48Dt6zL4/CYeeL9MPdQfvonzuh7M9vxLcP/v5pWOrA8IXcBv/HjwsjtZj7aOvw4TcHQd2zYJWSt0Q0wzbFuTP/kXI/UCX2fGPCVAgMq8E4Y6jRXFVMxJwFpnYyqZGtYITEtPqSTXevq7YjZeR4+Xo5Dze/AUPSeOeOsXGnSwrSpHH4csnlV1eeGxGiLIv2j1asa232jDikhjX3p9015+erV//lKcBKzcr611n2aA0gZwDluNUYClv4QeZot3c5U1Rv/UnmaqmPDDJNR/1RPvQJ9sFbJWJmG10x4Cy3nkYsSU6Cc311BMA5MM3g0UxparbbcmE1wIXN7DkuctlrsXNvufLV9oufNq/cSiZNDALDcJPpgutL62B4/0GYRh75/eANBZfn63N+N9249fFKuFBNJ/3bze7O+A9ft6OlqysU8+rdip7d/Q8/iMmlWP30d7+9LdOzjhUt8OushilaBMXpgysBph5CKwJABBSXBtdklZjhWVVT04nYSVTJaZI2TGKf5ciurFiQzU+eM+RmXYkopcMhq7ZDYQUyJM+N5rX3BMQgpDrdMQrdDJ2SgoykxN2x56SyUZJb/Gaw9K71ugNWJZAQTHUxUkCp4iuNH6kBufY7S3Fq3MiyPR6NR5b6CcTbe7tARCGlUg7MVHLSGqVpjjKyqXCvcOW0F9/6RVgRZvblRRWdlKiA1WVKUKEZAFXjPvks/OD9O6v7KvHfDnw79+jbPHz1XbQ/fPDw5V9HzfnYPJ14QdTc/PjDyK/u/P4vizSc5rXFMM1+1tpurNy7tA7aezyNYLw54v9nAQBEzC4JOr/8zdZc7Hmn/UKiOCWyllDpYpaMmIFAMlIVSIWy2SeCAmSckPoNO3ETqlVyOZG6k6M3Xl67kQOa0LOc7PKKZeJc+s885JeXhVTJprLrPKKNoPjrSR1tJ0xu7XKmyh4TcusdPTOittNy5Jyg2vBNm/hN1yvAFqmGcZNgDDlm4EmfoHa+M86NTlNMh+Pj44SDOCNo+j3Y/5ZDkYgLuA6XFDUpBVw9FJpRg88ayGbRIlraOxZMoELartRnAcgpg9mMrPM7n7y/1Y7H/d3RwZ+HV7cHr+Eddk/in//kYeXawkeD1L72amvX2vVLkUqjcunpvS9Gtfr45NsXuj28eX1ndYsrjbg7ZHv8/QTGf51zXJAvIGIg5cUPfv0rSYqSIRWOZkVpuJjs8xZEhpUZ4og8zymxIfXMpIQBQoD4xhLUq+a2SrxQE5loxY72DFEnAGDRGxtXWVcFNNOnQq7ZcQ6AS8yLI0/TK+9YKClpbnq9AC7eLCmYLBnsd5aQL9zIhZRNXPXs2iVHjxIGKVgFQXO+QmBWIjWDx0bZSHuBdXKcTV719nsD8bzWeKryA14b6SSzxlTzNHPImc+Ieefe57kFUKKocwTB8vSskJCi0nPKZRQt3xlxNVhYXBT5/I+txa0f++nc/P7SWnVp95qzlay/USd2IuJf92TpF3dvxNdXN+62G+P7n//Dk/suy2vz0Wr+ggeHPi7G5/+wAIqland69bX08ejslnGauDrdDM5fQNmQIrDl44UTLbgvhZafcUTQIEwNQAtRkp4YTp+C87UFB8rsoMeQm5EFOKO9oafUjgpwgukeQ2rrQgCpJEieANB21VEBTTIxewMD0I/MNHXq1WKSeB3e/r6nBd4WbrSqIJfmQgA/mTDYLi3uHg2O3+w+fzURomz6+viEQP8G1wnphEb81c8AAAAASUVORK5CYII=\n", - "text/plain": [ - "" - ] - }, - "execution_count": 12, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "img = scipy.misc.ascent()\n", - "pil_img = Image.fromarray(img.astype(np.uint8))\n", - "pil_img" - ] - }, - { - "cell_type": "code", - "execution_count": 24, - "metadata": {}, - "outputs": [ - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAA6gAAADWCAYAAADcga8EAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMS4yLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvNQv5yAAAIABJREFUeJzsfXeUHNWV/ldVneNM9+SgCcpZQsJChp+NwQb7gA+LbYytNT7GGMwar0ECZCxrJYRBRgGEMDl4bRDCIqxkgg0I8FokgQBJo5xnlCbHzqnq90fvvXpd6pnpUTAK9Z0zZ2a6q7qq+t133w3fvU/SNA0GDBgwYMCAAQMGDBgwYMDAFw35i74BAwYMGDBgwIABAwYMGDBgADAcVAMGDBgwYMCAAQMGDBgwcIrAcFANGDBgwIABAwYMGDBgwMApAcNBNWDAgAEDBgwYMGDAgAEDpwQMB9WAAQMGDBgwYMCAAQMGDJwSMBxUAwYMGDBgwIABAwYMGDBwSuCkOKiSJH1TkqQdkiTtliTpjpNxDQMGDBgwYMCAAQMGDBgwcGZBOtH7oEqSpADYCeAbAA4CWAfgh5qmbT2hFzJgwIABAwYMGDBgwIABA2cUTkYG9UsAdmuatlfTtDiAvwC44iRcx4ABAwYMGDBgwIABAwYMnEE4GQ5qOYADwv8H/+81AwYMGDBgwIABAwYMGDBgoFeYvqgLS5J0A4Ab/u/fSaWlpcf9mdFo9Lg/AwAkSQJRnyVJAgBomsZ//6twsq9Hzyk+o6qq/JqmabDb7Sf1Hs4kNDY24kTI8emArq4uaJoGWZaRSqWQSqWgqiq/TzIlyzJkWebXbDYbACCZTEKSpKwyrp9/2coQJEmCqqp8viRJiEajSCaTkGUZdru91zmrl/eTCU3T4PF4Tuo1TgbOVFnu7OxEKpXKkA1ZlmG1WvmYgej6bGsFfYYe9Ll0XCQSYX1LkGUZNpttQOtOX/PleOXb7XYf1/lfNE51OY7H4wDS41xeXp51rNva2hCPx6GqKiwWC8LhMPLy8pCXl4fGxkakUql/9W0DyK6bRZtCb1voXztWWCyW4zr/dMWpLssizGYz8vPzEY1GM3QrkJaR3uTWZrMhGo3C6/XC6/UimUzi8OHDWfXpFwlRrun/45Hts0mmGxsbAaBN07TC/o49GQ7qIQCVwv8V//daBjRNewLAEwAgSZL285///LgvvH379uMSZBIyk8kETdOQSqWgKMoxfQYZ5frXczlflmU2vgci8Ppr9nWcLMsZ9yNJEpLJJLq6ugAAVqsV0WgUgUAAxcXFCIVCMJvNkCQJo0ePzvmezibceeedOBFyfCrjiSeeQFFREcxmM6LRKFKpFKxWKyKRCBKJBEwmEyRJQl5eHmRZZkcSAOx2OwYNGgSfz4e9e/eitbUVQNqIzs/Px6hRo9De3g7giCzTHEylUkfNh94CLJIkIRQKIRKJYNeuXdA0DRUVFWwAinJPfyuKwtdMJBJIJpNIJpN8HVEXiNekRVacszT/Refg61//+okchpOOM1GWV69ejc7OTmiahpaWFgDg4IqmaVAUBaqqoqioCKlUCmazGWPHjkUkEoEsyxxUAcBjTDJJ+pM+Q5QDvZzqdXsqlcoI4oRCIQQCAYTDYTQ2NvK92Ww2jBgxgmWVPoNkTZZlxGIxnnPiMSSL4vxJJpNIJBL8vqZpRznRF1988b9mcE4STnU5lmUZBQUF+OEPf4hEInFUwGLOnDmYPn067rvvPvzud7/DnDlzMGfOHDz44IOYNGkSLrnkEsyYMeOo80TkansM9FyTycQBQVHmxMAlydLxGO+92TVVVVXH9HmnK051WSb86Ec/4jWd9AuQlpfm5mYsWrQIEyZMQHV1NQ4fPoxp06Zh2bJl8Hg8CIVC+OUvf4n7778fl1xyCb72ta/hjjvSfVZPViBGL5ui/qP3RHmmtQAAyz4dq7erB4qamprjeJLTA3feeScANORy7MlwUNcBGCpJUg3SjukPAEw7Cdc5Cn0JRq5KWjQwSNhUVe3XUdULNf3Wv5bLZ9D1+jtHNIJzdU7pXsTsE70WDAZhtVqhaRoSiQSsViuSySRaWlrwla98BevWrQMALF++HN3d3bBYLOjq6oLdbscvfvGLnK9v4PTEypUrkUql0N7ezs4pAASDwYzATn5+Ps8Xv9+PlpYWRKNR2O12Pm/o0KFob29npd/d3Q1JkjBy5EhIkoSenh6Ew2HOeAHgRaK3uUhzFkhnflwuFwoLC/m9VCqFaDSK9evXQ1EUjBo1ChaLhR1g0cgzmUwwm808D2VZRiKR4B9xvokOAM15Mtbo9bfffvu0c1LPJLz22muIxWJQFIWDD+SYms1maJoGt9sNi8UCSZJgNpthMpmgqiry8vKwefNmhMNhAOkxLisrQ01NDQKBAOLxOCRJYjkSjRTS5dkySQRRljRNg9PphNPphKZpqK2tPWodIDkOBoNoa2vLMGro/smJJQOKnOtEIsH3RUwD+kzxvun3u+++CwC46KKLTtxgGACQziR8//vfR2lpKRKJBOs5sgMOHz4MAPD5fNA0DfF4HMlkEs899xw6OzsxYsQIyLKMH/zgB/jLX/4CILsRfzwGc2/sFU3TOCBDx5Aci/aTeE5vn9/be/ognx4NDWkb92xzVE91HD58GKWlpRmBuHg8jldeeQWtra245ppr8Pzzz+PGG2/E7NmzUV1dDVmWMXPmTMyaNQuff/45JEnCeeedh+XLl2PJkiWYOXMm67ETiWwyJtrg9L+oF0nuxeOz6fVcIDLAJElCfX09qqurj+OJziyc8BpUTdOSAH4J4E0A2wC8oGnalhN9nYFCjCbrQQYEGRgEOj6XLKpIFxQj2uL7fYHugYyEgQg6ndNfhFK8hv5YVVU52iUaNqqqwul04uOPP4bdbsfmzZsBpLNM7e3t/Dlr1qzB+++/j4cffjjn+zZw+uDdd99l55CMcqKdicZIQUEBFEWBoiioqqpCYWEhB300TUNTUxOA9HywWq0ZEfYtW7bwe263G8XFxRgxYgTGjh0Ln8/HDjAAdi7oeP2c0c8fcjJdLhemTp2KyZMnQ1EUbNmyBevXr8fmzZs5+2SxWPhaZDjSszscDrhcLqbk6BcYcZE62fRhA7lh2bJliMfj6O7uhqqq6OrqgsViyTBCTCYTy2MymURPTw9nJGVZxqhRozKczKamJpSUlGDw4MEYNWoURowYgfz8fNjtdp4LorzqkU1XZ3tNz3Khz3U6nSguLsbo0aMzdDaQzoxqmoa2tjZs2LABe/fuRUdHB0KhEBRFgd1uh9vthsPh4HskR0Ov/y0Wy1EUPQMnBqlUCqWlpSgoKMhwTlOpFP7xj39A0zREo1EsW7YMAPDggw9ClmXU19cjPz8f8+bNw/bt2zFhwoSMIMiJhj74TcFIMesvQmS89Ie+jsnGRMsGclQNfPFQFAUjRoyA2+3mgB6ts1u2bMHu3buZoRKPx6FpGj788ENmc8iyjLa2NgDAxo0b2SHNy8vLYHfoAyDHimyf0RuLkfSiyZSZ1zuWABBdNxsN3pDnIzgpNaiapv0NwN9Oxmf3BaKcZENf9AAyVHKl9IpGqOjUinSubBRa/flixF38jFzugQRcXCT0WVGCGOUkRzrbxCRHgt6naHsymUQkEoHVaoXZbAYAVj6pVAr5+floaGhALBZDMpnEk08+icLCQrS0tMDr9eLqq6/u93kMnLp49tlnOaNusVjQ09NzVP2n3W6Hw+Hg1ygDpSgKrFYrEokE4vE409hIxsTsJdF79VBVFWVlZXwtSZLQ0tKCUCiEYDB4lMz3la2i68qyDLPZjEmTJrHBpaoqOjo62CAnR5ucUVmW2SknyqWeMkm/9bXcgJFF/SJAlPT29nZEIhGEw+GMKLgkSTCZTGwAkQ4n/Xj48GG4XC5YrVbY7XZEIhF26t59911ceOGFPL4lJSVHZZMoe7Bnz54MmRCdYz01Ul/XRBAZPcARA97hcPAxJMtAmr1QUFCARCKBWCyGYDCIlpYWruki2R4xYgRsNhvXh1OWGUivqbFYDGvXrsV55513cgbpLMQPf/hDDpiJzmkoFMKKFSuwb98+vPnmm5AkCVu3boXf70cgEMD48eOxYcMGTJ8+HbNnz4bZbIbT6cRtt92GRYsWZQQDB4reMpYiG4TmjN6h1tssuTDA9BDlfaBUyYaGBiOT+gXjuuuuQ3d3NwKBAMugLMtYuHAhOjo6MGLECITDYdTX10NRFNx7770AgD179kBRFCxcuBCxWAzbtm2Dx+PBjh078Le//Q3f+MY3cMstt2DOnDkZ8tZXAFif1exNtomiTucARwLL+kQTXfd4GAkEPfNRn2Ay5DmNL6xJ0onG9u3bAWQqNtHx00NUhqIQA+jXUe3NORXf6wtijQYZxqJzmyvE+xd/ZwO9l+25JElCIBDgrAJNFpvNxvcnSRJTORsbGzMc4ubmZkiShFgsBpPJhGAwiGg0ClVV4Xa78eKLL0LTNDQ3N2P8+PFIJBKnfW3T2YJVq1YhFoshFotB0zSEQiEA4HpkAPB4PLBYLBxsIZo4USedTidisRgCgQB8Ph/C4TDcbjcKCgrQ1taGYDCYUc+UrdkSgTKlBQUFKCgoAJCee+FwGG1tbVBVlZ3WbEEh8X9Rhikjm5+fz8cQqyASicBsNqOjowOHDx+G1+tFVVUVzyX6bmgOm0wm/i7IoaVneueddwzZ/xeBKOlNTU1IpVJMcyUdR8ETt9uNVCqF8ePHIxQKYd++fXA6nUgmkxzlF+VJ09L9CTo6OjKCotkCITQPRowYkfUeNU1DJBLBvn37MuRRX9MsGk+i4SQaX+L9iSwZs9kMs9kMl8sFTdMwaNAgmM1mnm9Eee7u7kYoFOIIPj3z8OHDYbfb8cEHH+D8888/OYN1FqG9vR3Nzc0YMmRIxlipqoq77roLNpsNLpcLgUAAN954Ix577DHMmDEDs2bNwogRI1BXV4cnnngCiqIgPz8f//jHP3DuueeipKQEjY2N/Hlkm/RmU+h1bTYDPlsmSZ/ZHGifDiBT/9J1CH0F0ftCQ0MDFEVBRUXFgO/HwPHhuuuugyRJTEd/8cUX4fV6cemll8Lv9yMUCuGnP/0pZs2ahZUrV8JkMnGvh4aGBpSWluLQoUO49NJL8f777+OOO+7A3LlzMXXqVG4KNnr0aNTV1XEALVvpRDYnU6yFFmno9Dn65I4od8cr5yL0PofePtHDoLCfQQ6qvhmFPnIiNj4CjqZO6bOiuWIgtBq6Bjl95OCKUe++HFTRcMl233rQsf0pe6IyUv2SSM+RZRlutxuapmHbtm0AwPcqZgCAdP0TdSWkjoOBQICdW5PJhG3btiGVSrER6HK5kEwm0dbWhuuvvz7n79LAycWjjz4K4EhmnmiPyWSSs+hkIAHp8Xa5XOzQORwONDQ0YMiQISgtLUV7ezvL6Y4dOzBhwgSUlpaiu7ub5UhRFDQ1NaGoqIjvI5f5paoqbDYbGyY0T9rb2xEMBtHT0wPg6CCSaGiR3tAHnUR6o9/vh9frBQBs27YNoVAIyWQSNpsNI0eOZGowBbjE74rmqaZpWLNmDb7yla8MbEAMDAhvv/02ZFmG3+/HgQMHMsaF5MPn87EOtdvtSCaTGbTzQCAAr9eLffv2oba2FsOGDcPWrVs5EKdpGrq7u+F0OjOuLerl/uRXkiQ4HA6MHj2aM/tEIQ4Gg/D5fAiFQujq6uI1Qx/AEdk4esNH7zSTjIsOrslk4prrvLw8VFRUZLzf2tqKxsZGhMNhfPrpp7j55ptP0Cidnbj88ssxdOhQAEAgEIDVakVdXR2WL18Os9kMVVXx61//GrNmzeL65ieffBIA8Oqrr6KsrAytra0oLy/Hf//3f2Pbtm1oaGjArFmzcNNNNx2ViSS5EKncYtBMZI9RYE2UIVrvjyWI3hvEkgyRXg4cW1MnMVC0f/9+DBo06Ljv0UDusFgszI4ym80IBoPYt28fvvnNb+LnP/85fv3rX6O5uRlAeryLiorQ0tKC2267DUuWLMGwYcPQ2NiIwYMHY/Xq1QiHw0gmk7j55ptx9dVX4+abb8Z1112HW265JYNlKCaVxFI3kqHedgugY8UeEydKtkWnWbzH3hiV/eFsdlRPxj6oXwjELpxAZnZRdDh7i44MRDDJwNFHbXK5x2zRGpokud5DbwaI/hgyaPqLRKZSKd6Wg7pXUvansLAQ3d3d6Ojo4GcuLCxEeXk5SkpKkJeXx9+BJKVrlii7Fo/HEQgE2LGJx+MIBoOIx+OIxWJ8/YMHDyKZTOKFF17A66+/jpdeeombcxj41+PVV19FVVUVjxNthyFJEjtceXl57JwOGjQIX/rSl+B2u1nGyQiijKLJZGIHlgI0iUSCs6xAWpbXrl3bb2QxF2iaBr/fj6qqKowZMwZjxozhWlZyKES5peuIBpp+nsuyDIvFAovFguHDh2PixImYOnUqxo0bh66uLuzduxcbNmzAhx9+iE8++QRmsxkWiwVOp5NrHKmG5f333z/m8THQN5599lnU19dDVVV0dnYCQEZjC6/XC7/fz8cnEomMWtThw4dnGMxUu+l0Olk30nurV6/OSsMFcpddmh+igVRSUoIhQ4bA5/OhsrISY8eOxbhx4zBmzBiMHj0aFoslI4MqdpjsLXCZrfdAtnsUacik76urqzFq1ChMnjwZH374YU7PZeBoXH/99fD5fNxksLi4GBaLBatWrYLD4cC3v/1tpFIpLF++HADw5z//mbNM5eXlCIVCGDZsGGKxGIYPH46mpiace+65mDRpEgKBAG6++WbW0bTuZ7MBqPyCIB5LENlnoo4eKPTXFm0fUU71ZSP9QfwcvbNx8OBBHDp01OYRBk4CtmzZgp/97GesL1RVxY033gibzcbye80112Dp0qU8PrfeeisA8I4AEydOBJC2JVRVxYIFCxCPx9HR0YGRI0cyM+Db3/52VnuWgihi5lRRFJZzPatE/K13KLPhWBJRwJH5JNoTx2rTHDx48JjOO51xxjiol112GSup3hZd8TeQ6Wj2BVGY+2pG0JfgSZLElKpjKe7WG8u9gQxgvWOeDTRxu7q6OHoqGko2m40zvAcPHoQkSXA6nRn1BQ6HA8XFxSguLobf72c6Gd0vKQrKzFJUi+r99uzZwzS6trY21NfX81Y3L730Ep566iksX74cy5Ytw6uvvjrg781A7li2bBmefvppxGIxdHd3c2ZdpB3a7XYUFBSwnNhsNsTjcaRSKXi9Xq49FY1+TdMwcuRIqKrKtctEGy4rK2OKJXA07exYWsvrA0ci7ae0tBQ1NTVs8MuyjLy8vIytPihYROfpnWj6EaO4JSUlqK6uxpgxY3DeeedhypQpaGxsxJ49e/DBBx/ggw8+wNatWzmII9YNGjhxIEp6NBpFY2MjB8JE55QMeIvFwk2NVFVFQ0MDj3l5eTkcDgfC4TAkSUJ7ezsSiQSPm2gI6fX5QPU7NTXqT9Yp60VO9JgxYzB+/HiMHTsWEyZMwNixYzF8+HCOtOsDMHSv2ZxU/dqWjYImHrN27doBPaOBNA1SVVWUl5dD0zQsWLAA999/P2RZxnXXXQez2YwpU6YglUph69atKC0thaZpuO222yDLMmdH6+rqIEkSNm7cCFVV8eMf/xiPPvooli1bhtraWiSTyaMauQCZRjyNrWhAE/TvHW9WiQLl4rX0dX/0dy7X0rPNVFXNqIml95LJJOrq6rixo4ETj4MHD3KSYuvWrTyuiUQCV1xxBdrb27nOvaKigu3Hnp4exONxzJw5E4lEAo888gii0Sh++9vf4nvf+x6vsUuWLEEkEoGmaXjmmWeYeZStiy6Ao+ReL096PyBX2e6LpahHtpK9vsqW+oP4TPv37x/Quac7zhgH9ZJLLsHChQuZokUCLC76ZISKFIH+MqAkZLmk6fU0FT0G0sBAFHx9hKevrGku0XsxW0QOAzm29Ky0oLS2tvIESaVSKCoqQnd3N7q7u+Hz+TiLRs+en58Pn8+H4uJi5OfnIz8/n7NoRCGWJImNSJq4kUiEmywlEgkcOnSIG3uEw2GYzWa0t7fj5ZdfxltvvYUXX3yRuxsaOH68/fbb0DQNsVgMzc3NvEcpcKTelLp/aprGhr0sy+jo6ICqqnA4HGxAk2O7Y8cOrnMDjtDJ6uvruXmLaECL9d/HGmnUR1OzgRyC0aNHo7KyEuPHj8e4ceMwYcKEo+quRIq83lAX56S+a2txcTFqamowadIkTJ06FbW1tejp6cGBAwfwwQcf4KGHHjqm5zNwNFauXImHHnoITU1NiEQizNiIRqMsT/n5+SwblZWVqK2t5ax4KBRiB9Bms8Hr9fJ8IHkNh8MYNGgQ3G43X1eSpKPYHsfSvVnMAPUHsTaVziX9azab4fF4OAAzYcIEDB06FCUlJXA4HL3OLepULdZoZQvKiNnZjz76aMDPeTaD+mSoqgqv14srr7wSoVAI27ZtQ3V1NYqLizFv3jwsWrQIkiRh0KBBkGUZRUVFkCQJbW1tkKT0FlxDhgxBZ2cn4vE4Fi5cCLvdDp/Phz179mDRokWoqak5yoYQjfiTtadkbxCD2scC0WkWg4Nkp1DQiY6lGnGyubZs2YK6urrjfxADR4HYSg8//DAee+wxlrOhQ4ciLy8P8+fPh6Zp+OUvf4mrrroKsizjd7/7HYC0bUFsLZvNBkVRUFNTA0mS8Jvf/AaKouCyyy7DwYMH8d577yGRSGD+/PkZgUFRZ2ZjjZwoZLNH9OV24n0dCxNMf9zZzg44IxzUl19+Gc8//zw0TcPvf/97XkhJedGgk3GSK32kN2oKIZsx0VumFkCGodsf9JSEXM8TnYFcPlvcSBkAR181TUNeXh5kWcaePXsAAE6nM4PqSM01VFXFkCFDUFxcnPHslE0tKiqCy+WC0+nMoF6I2TKxM6Wqquju7uZMaiwWQ3t7Oxuc5BAFg0F8+OGH/GPg2PDoo4+ipaUFgUCAu+1SUxkyBFwuFzugBQUFHBWn7TioVkrMnkejUW4gRJREormKm9LrI+KU9aJ5TDKRK/T7CfYFsRaLnFbKSI0ePZodbroPfQBI/HwxuCNSJOk+TCYTvF4vSkpKMHr0aNTU1GDFihU5P5eB3kGNucjRIpkhuaO1QJZljBw5EgUFBRn1xTSO+/btyxhzCsSQThRLEwCwI0zQy2kuDqfomOayNg3kWFVVYbfbUVRUhGHDhrHTOn78eO5Crde92Z5DfP9Yu8SezdizZw/eeOMNZmNIkoShQ4ciFAph7dq1kGUZN998c4Y+vOKKK5BMJvHSSy9BVVU88sgjSCQS0DQNI0aMYH0UCAQQjUbx8ccfsyx+9atf7bWESc/C6k+OcjWu9WwCmnfHQ2uk62ezq0TbTpRXTUs386M5Sw55KpXChg0bjvk+DGQi21hs2LAhI2tYXFzMDdnEvZjNZjO+9a1vQZIk+P1+yLKMGTNmQFEU7Nu3D5qmcSBw+PDh6OjoQF5eHtatWwcAuPDCCwH0bxfnoqdylU39Z+nZJscj4wAy1ijxmtn0vaqqWL9+/XFd73TAGeGgzps3D08++SSam5vxv//7v7jvvvswf/58dlKzUf6y0ZzEv0XHkBTkQARQPF/8yYbeHF0xOpMLDVkU5P4mDAl+NBqFw+Fg+jFFIjVNY3pbd3c3gDQ9jvaqrKioYIOtqqoKqqrCarUiLy8PlZWVMJvNGU2UHA4HnE4nCgsLuQMr0UGpQQfdbzgcRiwWQzgcRiqVQiQSyahblSQJHR0dSCaTaG1t5fM++ugjrF27Fh988AHWrl2LN998s8/vzADwwgsvIB6Po729PcOhBMDjUlBQAIvFAk3TMG7cOFRUVGQ4m4qioL6+HpIkobq6OiNDb7fbEY1GoWka74lK3ftoe5mioqKMjOTf//53vr/+WAkisrEicjWkxfkq6otx48Zh5MiRbNh7PJ6MjJL+fPEeSaZFRgcdY7fb4ff7UVlZmdP9GegdK1euRCKRgN/vRyQSYbki3e33++F2u1lmKRtqMplQVVXFwTIKzABpWRo+fDh3JVfV9P6p1PhKHM9YLMZbERFE3d0XyHgeaBBmoHItXocCSCNHjsTEiRMxadIknHvuuTjnnHNQW1vL85SOy+bQ0Fw1AoO5wWazYd++fdiwYUMGDfKaa65Ba2srtm3bhlgshrlz52LBggWcQSKaqsfjQTKZxJVXXglN0zBkyBBIkoTbb78diqLg7rvvhizLWLp0KZ599lmMGjWKA8LHa8T3Zpjroc+SUsCov/OygQJ6onObrZY2272Fw2EuMSE5NpvNTNE/Gwz7k42rrroKv/jFLzKCW+SMLlmyhMfl8ssvRzAYxIMPPggAGDZsGK655hokk0kMHToUkiTh3/7t36BpGg4cOAAgHczRNA133XUXgPRWX7IsY/bs2WhoaMALL7yAK664gse2P7kaqHxnA8khyZ/4//EGXwh6/Uzyr7cdUqkUmpubYTabz3hWwBnhoK5cuRJf+tKX2Cj561//CkmSsGTJkowInjiZsjmcopEpRoHEBTlXiFnK/gyPbAIuGgcDMVz02w30dX9EgdE78slkEnl5eejs7MS2bdsgSRLvgUmOKNHBKNtAmQRFURCJRLjpkt/vZ6rjkCFDOGshyzK8Xi8KCwvZgKT7pqwHGfgiZbOjowNtbW1Myauvr8+Irqqqik2bNkGSJLhcLqxevRrvvPMO3n77bbz99ts5f49nA5577jkA6e+bDHvgiFNoNpvh9/shSela0/z8fK5DpfHVNC1jKxVSpmJ2Zs+ePTCZTCgrK4MsyxzdpkZJqVQKeXl5GbWuImVL7CjaF8Q61v5YBHrkkmnVNA01NTUYP348JkyYgKqqKuTl5fF3RsfRb1HfUPSYrkW6QZIkfPzxxwO6VwNpvPPOO3j88cfR0tKClpYWpkCKmRtqhmSxWGCz2RAKhSDLMhoaGjjDStvN0L7P0WgUyWSS9VA0GoUkSVzTWlxcDK/Xy0yQ46G7kpxmY+z0hb56IRD0a5a+J4E++09duaurqzF58mRMmjQJkydPxtSpUzF06FAUFxfD6XRmrKmSJBlOaj84dOgQNC29NdbDDz+MW2+9lcc8aqHlAAAgAElEQVSltrYWPp8Pb775JgdQxo8fj/Hjx8NqtWLp0qXc1VfT0rX8iUQCS5cuRTgcxu7duxEOh7F//35EIhE88MADKCsrw2uvvYa5c+dmjFNfGKhtQxDnmmiviIZ8tuxnf9AnE3Kth43FYkwXJftEtPVaWlpQUFCArVu3crDdwMDx+OOPw+FwYPr06RkMI5/Phy1btmD79u38vU+fPh3hcBjPPfccNE3DxIkToSgKHn/8cQDAggULIMsyVqxYAVVVsXPnTni9XrYPXnrpJSQSCTQ2NqKurg7jx4/Hp59+ih//+MdZg8R6DNSB1CeXRHs6G514oGwSfYkhzRWx1pTscOBodkAwGGQ72mQyYePGjfjss88GdA+nC84IB3X8+PGIRqN46KGHsGrVKng8Hjz77LNIJpNYuHAhFEVhQ0TPXdfXlgHH3k2OQNcQDe5cob9utghotgkh1tbm4kwrisL7RQJHGg1QzQZ186UaLIvFwplUt9sNVVURjUZhNpvZWaUmS5FIhB1Iq9XKjqbD4cjooEkbw9PCTJlVqhWj8RIVAdF86fqiI0Df2/jx43lbG7GuVpIkdlT/8pe/4PXXXz8rGy+tWrUKjzzyCG/DQgYFUbc9Hg8KCgrg8Xh4DMVaYQBM93W73SwHtEcqNZIhKi+Qli+ig9tsNqiqiv3793MWXtwKIVtUfCCLgUhNzgW51v7p78fv97PDSpkov9+fQR2le862uNGPqqqGkT9AvPHGG2hsbISqqqwTiAVCOs3n80GW002wPB4PyzeQlk1iC5CuIV1WX1/P8hgOh1FUVMQsDmKdaJrGQRUg7YToZU4ftOgLAzVyKNre32dSgG8gARvSl+Jakp+fj5qaGowbNw6TJ0/Gl7/8ZUyePBmjR49GWVmZQZ3sBbQ/JJAeM9o2iLr0SpKEn/70pwiFQli8eDEA4KKLLkJ1dTVTVVVVxZ/+9CcAwH333YfJkydDlmVce+21ePPNN2GxWPCnP/0JkiThvvvuw+rVq7Fnzx4oioJ77703a5anv/+zgWwkvfMpyrc+29PbZ2eTXXJE6RoDDTICQHd3N+tf0aajzFNeXh7XAm/atAnPPfccj4WB3LB79250dnZi7969qKyshNfrzRjjkpISLFy4kOUiLy8P5557Lpqbm5FKpRCLxTB9+nSmo4fDYVxwwQWQJIkbFt56660wmUyYP38+l8Y8/fTTiEajKC4uxoEDB3DOOedwAB3ovflWNmQr39PX51NwQwy6ZLPnB+oAi4kdMQCjnxPZAorEahTPpXvdsWPHgO7jdMBp76Du3r0bO3bswJQpUzBt2jSoqopvfetbuOSSSwCkhf/ee+/Ff/zHfxzVVl1UtPrXByJ0IrVWzEQqipKzks123d4cTfEYcYuCXLO8iqKgo6MDJSUl8Hg8ANICTvUxmqZxoxF6PpvNxls30IbviUQCRUVFTMOlqI/JZEI8HuemHLTfaSgUQjQa5TpHu92OwYMHs3EvLk4+nw95eXkoKChAXl4eOz00hrTnpKqqWTNQFMGjTJ743UiShMLCQp7sr732GlatWoVXX331jHdYV6xYgcOHD3NNTjAYZMdSURSuFQbSAQSz2Qyn04lYLAZVVdHY2JjRsZecTSDd0U9RFNTW1gIA06qsVivC4TAbuuJYEh2TMjNkSHz++edZ7z+XrNFAAkJAbsa+eG2xIyX9T/JVXZ3ejoOyT4WFhRkOq94xpfMkSTKazuSIZ555Bjt37kQgEEA4HEY0GuUaaElKN2orKCgAkA6WVFRUoLy8nOU7EokgHA5j7969ANJbG9CiT59DtaiTJk3iwJjFYsH+/fuRTCbZ+QWOOHRiLaqob/qTx+NtCtYbRL03kIBNNtaP+Fmk56khEwVpDBwNi8WC73znOwDAzJCSkhKsXr2at0CSZRnz5s2DJKVZX1arFZMnT0YqlcLcuXM5q1RWVoZkMonvf//7HBwEgDlz5kCWZfzqV79CIBBAMpnEjh07sHv3bkiShMmTJ2c4l9kyqtlkT+xSrj9HH9gXgzG56N++nFZaA8QSof4+S9PSexK7XK6M18iuoqBSR0cHiouLOQjV3NyMrq4upqAa6BuVlZVwOBy4/fbbcdttt+H999/HjBkzMpI9lFiYN28er3WXX345otEolixZwvstq6qK3/zmNzCbzRg2bBjMZjPuuOMOAMAf//hHli1JkjBr1iwoioIHHngAkiTh8ssvR2dnJ2bMmMFbf4noj5Yu2uiibIsyT3IvymEuGdtsEHW8vsldLpCkI41FRaq7pmncGBYAPvnkE+zatSvnzz3Vcdo7qLW1tYjH41i3bh2ef/55fO9730N7eztKS0sBpBv7rFixAsOGDcN//dd/ATi6FXU2DNRgFYW1L1pVtvOBI5RAkc6Yyz2ItEwAvTrEYkaW/qcaTzE7pWkaSktLEY1G8fnnnyORSMDtdiMcDkNRFJSXl/NkoawYbUdDGVfayJ6cQ4fDAavVipaWFsiyjHA4DKfTCU3T4HK5YDKZEI1GYbPZ2Ji3Wq0oKipix9nhcMDv98Pv96OoqCgjwyoGAuhZxowZw9+lvmshKSSXy5WhnOgYclTp50xo7f3BBx/ggQceQFNTE+LxOFNrydA0mUwoKipi6m5NTQ0GDx6MqqoqJBIJWCwWBINBAOBanry8PMTjcVitVs6i6g0UMvr37NmDVCqFgoICpqmoqorDhw9z4w8ae03TsGvXrgwlnKsyp4VnoJ1UczGqKNuW62drmoaqqiqMGzcOU6ZMwXnnnYfS0lJuvEPHiL+NTGrfePnllznAEQqFeIsWMiaotl2WZeTn53O9KDV4I3YIOQuhUIgDLaKxTceRXvH5fJyZFx1WsSHW66+/nvWec6GmH0sWP5djjsXp7S3zpb8/mmvk3Ovrpc52dHR04PXXX8eUKVM4CAKkv7fCwkLMnj2bX4vH45gxYwYAcA3f4sWLYbVaMW3aNEiShCFDhsBkMmHevHmIx+N47LHHAABr1qyBJKX7MkiShLlz5+KJJ55APB6HLMuYNm1ahhHfm3OqD47rA2jZzqXn0X/WQECOM9lletZYLjW0sViMKffAEWeCyk5sNhuKi4vh8/kQDAbR2dmJlpYW3qs9Eolg6dKlRhlQH2hubsakSZMgyzLv00yB/vnz5wM4Mv7l5eVoaGiA3W5nNsrNN98MRVHw5z//GYqi4OGHH0Z+fj40Lb3FoKYd6XvS0tKCoqIilvWOjg7cc889sFqt8Pl8iMVicLvdcLlcuOCCCzKCKHqmpN7R7I22m83OoNeOJXGl9wlEm1W//vf3OYFAAF1dXbDb7Ux9pjlDrLRUKoWrr74a0WgUK1euzPk+T2Wc9g7qpk2bcNFFF+GGG27A8OHDUV5ezs0BKHLs8/mwatUqdHd3Y+nSpUilUtzg5VggCpcYqQMyqQLZkG1x0HPS+1sQ9OeKQp7N+dbfYygUgt1uRzAYZAOM6G2qmt7cnui6FosFTqcT7e3tGcdpmoaysjJuHkJOptVqRTweZ8VDRpzYgIScmsLCQpjNZvT09KCxsRGSJCESicDhcMBkMqG0tBRf+tKXUFBQkKGAJClNBfH7/SgsLITL5cL69eszDDKz2Yzt27dzLRk5FfQdkONLjZf6qnHcsGEDenp6EAgE+Od0AnV/LCws5BodWrhJXouKigCkv7fKykpWpBSRJrqvJElobm6GJEkoLi7mWmKiPzY1NXGGleSOuvZRZp6cUf38o2w+AG7KlCv01LJjndu9QZxrAzXCRH0hOqznn38+pkyZArvdnrFFjZFJzQ4yuqkpEel3KiGg4IempbdBam1tZUfTZDLx+3l5eRwU0Wf9qYkXybKmaRwoi0QiHJQjHUhBFX1gZiBGu6iTBoL+5gdRyQaK3mq+9Q6M/jU613BS041dZs+ejYKCAqxevRq/+tWvMr4rKm958sknAYBZUBdeeCECgQC6u7sRjUbxox/9CNXV1TCbzdi5cycHli+++GIAwA033IB3330XkiRh+fLlGbp25MiRMJlMCIVCuOqqqzhIAxypOaZ1lfSa3hAX19Te5E08JlfdKAb3RQZKrsFI0RZQVRWhUAgWi4XXIXoOMuD9fj+i0Si2b9+O+vp6hEIhduBpXXM4HIhEInjllVfw8MMP5/QcZxNUVWV69G233YZUKoVly5ahrq4OdrsdEydOZFtW0zSUl5fjqquuAgDuazJ69GjU19dj9+7diEajaG9vZ4YAAMyfPx/FxcWIxWKora1FJBLBxRdfjLa2NoTDYS7ncDqdsFgseP/99zF8+HD4fD5MmTKF7U0AGfIh2sBiph7oOwByLAEX8VwK3AGZJR/6e8h2TXoO2mnD6/VyLwvSF5QoikQiaG5uxiuvvILPP/8ckUgEzz33HJ5//vljvv9TAae9g2q327FmzRo8+eSTuOyyy3hfTiA90P/85z/xz3/+E+Xl5Xjrrbfw0Ucf4YEHHsDtt9+eUbeZC/TKHciMxvTm5Iiv6f/WR/VzNYD7WhB6MyRCoRC6urr4NYro0P8OhwPFxcWIx+O8fQht30BZUcoe0EJgt9shSemupJTJJEXgcrmQl5cHi8WCAwcOwGQyIRAI8MbORB0NBAL8efT90P6p4XAYkUiEa1wdDgdsNht3DgbAUbWdO3fyZyiKgpEjR2YEAMRFkBZnr9fLizoANkr13+c///nPjNdCoVCGw0rZxVMNixYtQjQaxc6dO9HV1cWZIwpkUFdlSZI4Cgcgg+JNdX6SlKb2RSIRVppOp5MjpADQ2dkJSZJQWloKj8eTtUupzWaDzWbjelaR9k1zkmQuVydVzJCLFNpcQHLRHwbiMOs/Wz8n6f5kWcaECRNw7rnn4oILLsDUqVNht9sHfJ0zHatWrUIsFkNbWxs6Ojq42ZHJZILb7eYAy4gRI+DxeFjG4vE4Dhw4gEgkAo/Hw9lXop2TASvWpxLDIBAIZOg4Orejo4NljIIqpMf1GfBcZIbkdKA03P7WCTJm6Lhcm/XlYrCRTIu142Jg6Gx3UhVFQVdXF1555RWMHDkSjY2NmDFjRkb2pKCgAB988AEaGhp4TTrvvPNgs9nwxz/+ERaLBcOGDcMzzzyDSCSCpqYmzqJSh+nS0lKYzWbceeedsNvtuPvuu9HT08MlHBRM8Xq9qK2tRUFBAWpqauByubg3h2i4k7xkC5LnYpfkIu8kG9kaH+Wqs8nuCgaDaGlp4b4YZINYrVa2b3w+H5qamnD48GEOlJtMpoxGM8SmOHDgAHbs2AFJkvDee+/ldC9nA77zne9AlmWsWbOGvzvSk42NjXjppZdw9dVXY/jw4RnBAyDtzJLMX3nllaiqqsJLL70ERVHg9XoBADt27GA75JZbboGmaRg2bBicTie+9rWvwW63w+FwMHtL09IdxCdMmIAXX3wRNpsNu3btYlsykUjA6XTC6XRyg0/RWQSO1qHH44zqIdqb+kxqrtchuykQCEBRlIxdLERb1mw2Ix6Po6ioCFu2bIHZbEY4HEZTUxNaW1uxaNGiE/Zc/2qc9g7qHXfcgbKyMqYaUMYHABoaGlBbW4tYLIZXX30VkiTBYrGgo6MDhYWFuPrqq7Mq394ESL/oDtSx7e3cgdBZ+rq+aGiT8UDbtVCzhWg0yl0prVYrenp6mFpMVLNUKoU9e/YwtfPAgQNMS0qlUojH41xfKNLeFOXIvpipVIozpMFg8KhtYijrkUgk0NXVBbPZjEAgwA6P3++HyWRCd3c3G3/AkXbmDocDI0eO5IVYHEe6H1EZuN3uo16jz9M0jZ+9qKgIbW1tWcdN/F+fgTwV8eSTT/J3l0gkEI1GMwIqlAEFwE6rqqb3l6VMkiRJvFE8KUlZlhEMBiHLMm8mT9RgOiaVSnEtK8lke3s7jy01waCsOZ1Li08ikcBrr72W8Tz9GT+i8SwyBvpDLovGsWZPe6MN6UHjRA7rsTjDZyqobpo6heu3kSE5czgcsFgsHEgBwIGvQCCQUXYgdonet28fN7ChvZuJQpVIJHjLCnJ4SeeQ3hSbslA990BAulrs9HyiIK4JuVAl+3pPnxXWZ9oMpEFzedGiRRg8eDDmzJmDzz77DF6vN2Otogz8b37zG24mF4vFMHPmTCQSCTz44IPQNA1f/epX8e///u+QZRnDhg3jdRMAXn/9dciyjP/5n/+BLMvYsmULSktL2XCVpPTelDU1Nejq6oLVakVrayvLs9lshsvlgs1m4+wT/VCWhp5J73jkatyLx5MMiowREQNZS4m9RcFmasJnsViYBeH3+9HS0gJFUdDc3MznWq1WeDwe7tRNzAhiR6VSKWzfvh1PPfUUnn766Zzv6UxES0sL7r//ftYBDocDsizj0ksvhclkwoQJExCLxWA2m/Gzn/0MwBFdU1NTg5aWFtbH0WgU1113HRRF4VrJX/7yl1BVlbdwe+ihh9hWB9J9ZoYMGcLMP6r3HzlyJJ555hlIkoSysjK2aSnYSI3tyP6kxkTkrOqDaoRcgn+9gbKb2V7PdU0XGTXt7e38TETTp3smhqLT6UR+fj6amppYL7S0tCCVSnHvj0ceeeSYnueLxmntoNbX12Pp0qXYuHEjbr31VqZp0UR6/vnn8dBDD2HhwoVc87ZixQp88sknePPNN3H++edzG3fKsomUExI0MdKnr48AMrM3/YEEnzKm9Ln9KfhsDpjonNGiRYXUoVAITU1N2Lt3L2cbaNI7nU6esF6vN6OOs7GxkekyNKEog0nHiQ5kMBiE1+vlvceIQicaeocPHwYAbqBkNptRUVEBm82G5uZmWCwWdpSj0Shv3ZFMJtHV1cWKxm63c/MlTdOwY8cOyLKMSCTC20gQBVmMMlFmjmpOxdbziqJw9JUWq0gkkjW6u2nTJoRCoaOyCMfiuJxM/PnPf8Zzzz3HXY5p4SX5tdlsnHEC0vV1RKO22+3o7u5m2khPTw9KS0shyzLcbjcb5sFgENFolI0YRVF4cW9oaEAsFuPXqQswKUuipRAli+SGqPcUOaSxI/T3HWej0OSCXBxZMUAy0M/V16hnew4xeELG4YmmKZ+OePDBB3nhFevKSO4KCwu5Bof2Zi4sLOTsJp1DwZGSkhKeA2JgTpLSZQOaprHe1zSNG4r5/X6oqspZfQqi0HWSySQSiQQikchR2wUAfWeG9JmrXHCyAhi5yJy4bonBQPHeRJbR2YampibWmSNHjkReXh7Gjh0LSZJw7733YsKECQDADpTZbMZ9993H62MymcR//ud/IhaL4dNPP8XEiRNRWVkJSUp3qE8mk1i2bBlisRjWrl0Lm82GdevWYdq0aZg4cSLr5Ugkgvfffx9jxozBkiVLuI8Alavs3LkTDQ0NiEQi3GgsEAigp6eHe0kQI4aaMZIzSOMtroHZmCLi/2ITvIEGY/SZr1Qqhf3792PHjh3c+ZV0Jt2nx+NBY2MjFEXBp59+yutbVVUVwuEw16WSDQQcKS1JJpPM3kokEnj55ZePSyZOZ6RSKbjdbnYYZ86cCQA499xz2Ql64403sHv3bgBp1hY5UUSvfuqpp1jPJhIJTJs2DatWrYKiKBg0aBCGDBnCzm1bWxszX37yk59g3LhxiEajHAT/6KOPsG/fPjzwwAMIh8MYPHgw9u/fj7a2Nt5eyGKxsCNKzTppe0JiFVByJBwOMy2e5EDsaaIvL8uWuCC/oTf9ORCnl+ZSa2sryzOtNSKrkfqGdHd3c5mcoihoampiOwpIz53Bgwfjk08+waOPPjqQof/CcVpbQHfccQeuu+46DBs2DPX19RkNgjZu3Igrr7wS4XAYs2fPhizL2LlzJ2bNmoU1a9bgH//4B3bu3InOzk4sWLAAZWVlGU4JKV16jYyObII2kEyqSC8VX+9LeEXlLxo8RL0kI3/fvn1ob29Ha2srNm/ejE2bNnHjG2qKQ8rXZDIhFosxzZKypalUCq2trUxto+0TXC4XZFlGZ2cnUy3i8TgvqFQHQplIj8fDzg49YywWg91uR35+PhRFYaONnCiPxwOr1YqSkhIA6cgZOd2UJamoqGAnl5QdbVtTXV3NNDzx+wqFQkgkErxwiWNBELejKS0t5U2jxTqd9vZ2rF279qjs3KmU7dq1axd31PT7/bz1Bim64uJipiWaTCYMGjSInUZJStN8iaZNcpJIJDB48GD+v6urC4lEAk1NTZBlGeXl5TweFDShehGz2Qyr1cqZV9oOxOl0wmw2c3dFfbMbMojWrVvHz5aLgqcxH4hxnMv4DdTgFjOi2Qx4fWaCQItKrp2Fz2R89NFHqKqqyigpoPlI9ec01nl5eZyJJ31ARk1PT08GS6KkpASSJHEdPpDu9g4ApaWlHCAgmSS5liQJbrcb8Xic6fIURBFlTtyTjsa2P8eP5CAXiqMYIOsLA5XXXEABAvpN5/bmmJxtTupPf/pTAMCNN94IIL3FhtVqRVlZGZ5//nnY7XZcffXV3IEXSHdG3bx5M7Zs2cL1ck6nE6NGjcK7776LeDwOn8+HVCqFefPmYcGCBQCA66+/HhaLBaNHj8bcuXMxfPhwznBJUrpXQH5+Ph5++GFmBJCupYBOZWUlNxKTpHTjQFVVceDAASSTSd4ahJq0bNq0iXV/W1sb2tvbOXDc1tbGzoVoQ1FQU5zHQOYe831l4kX2ipjtPOecc/D//t//y6gLp0yq0+lkA//zzz9nG8btdmPDhg1s8KuqyqVNZONQkFvfn2P58uV44oknTp7wnIJobGzEPffcA1VVMXPmTCiKgoULFyKVSuHuu++GJEmoq6tDYWEhXn/9dXzyySeIRqMZSQKPx4P3338fhw4dAgDOeHZ1dWH+/PlIJBK48cYb0dzczIGBadOm4YILLsCIESN4TFRVxZYtWxCPx/HXv/4VpaWlqKiowK5du9DW1obu7m4OIJJtS/+rqoqioiLYbLaMABrZlWvXrsWWLVsQiURY1mk/3UGDBmVslyfqQL0d2VuQcaDZ04aGBk4KEEuR+qbQto8ulwtNTU1IJpPYt28f61q/388OLNlSu3fvxscff4xEIoF333332AXiX4zT2kFdtGgRHn30UUyfPh01NTUAjgzwX//6V/zpT3/Cd7/7Xa5JiEajuPfee/H1r38dyWQSGzduxNq1a/HCCy9g+vTpGD16NFPE+sp49EbRywZ9TU5vdaq9QYysizUlra2t2L9/PzdTCAQCKC0tRSKRQElJCcaPH49LL72UW1DT/VGkn7IPItWGFHRHRwc7NrSwOZ1OnpQ0yU0mE0pKStDd3Y1kMolgMMhdfquqqpjeSw4LNT8iShFRhzs7O1kJDRkyBIqiMEWBFh1ysKhmdOPGjRndL2tqauB2u6EoCjvd9P15vV7s378fqqpy7Q2NIykZm83GGT6bzQafz4fDhw/zMXScyWTKeL2vsf9XY/369SgvL4fFYmEnn7bXMJlMnFkiOi8Z4JSxpsicw+HA4cOHIcvpjsv0/dtsNq7xliSJKcNEXXM4HLz/LADejoPGiOSJ5iNFA+mz6EfMgouUyVyM92PNZp9IZ1CfVdd/tmiM6TMPNB9Pdfr4ycaSJUvQ1dWFpqYmboREclVQUMDfE1H3Dx06xHIUj8cxZMgQAEcoV4lEAgcPHuRO0pRxAdLBgIMHDyIQCLDRTHU9siyjra2N6/jEIAtlmigzQ/qxvr6enyNXdg3JSC6OrD7gkQ1itirX6/flJIiv03qip7L1dk+UXTjT0dLSgjlz5kDTNDa8KXN0++23Y+3atairq8OmTZvw61//OqOz7qBBg9jxBNJycMUVVyCZTHIjpfvvv5+dT6I5plIpXHPNNdzZnoIcVCf49ttvw+fzYdiwYQgEAkyTpIAkleyIzDGPx4Pa2lpYrVZ2EOx2O+x2O0aNGoVoNMprud/v522b2tvbEYvF0NjYiHXr1qGhoQHRaBRAum770KFDaGxsRFtbG5qbm9khocY39EMBdyoLIqeaWDjRaBThcBgNDQ0850QWmKIoaG9vh6IoWLduHesFp9OJPXv2wGQywel0wuv1wufz8Xfu8Xg46EXfZTKZRDQaRUdHB+uBN954418oVV8cfvGLX3DJjqZpePbZZ5FKpZhhlUql8KMf/QibN29Gfn4+2tvb4XQ68dZbb+H3v/99RjKltrYWM2fOzKi3v/feeyFJEg4dOoRkMplhB5rNZrjdbgSDQZb3Xbt2QZIkdHd3w+PxwOv1YseOHQgGg4jFYiguLmYWHNnxdH0xeA4cyXqSDTJu3DgMHjyYy9N8Ph8ndnbv3o329nYEAgG0tbWhs7MTXV1dXAoGgG1zYvqQ/PZni4j12IqioLOzE7t27eL+LWImlxhnNEcpSLplyxZo2pFmflR3TXRmYsNRAHfHjh1YtWrViRWWk4TT2kH1eDzckp0cGVVVceedd3IU5bXXXsO8efMQDoexYMECxONxfPTRR5g1axY++ugjTJgwAS+//DIaGhpwww03YOHChQB6b0XdG3p7X4wW9ge9AUCRGllObyrf0tKCjo4OdHV1YfPmzejs7ERhYSHy8vKQn58Pt9uNmpoaWK1WeL1epuNS9Ie+E3IwU6kU038dDgdaW1uxZcsWriWliQ2kJyDVrmqaxjUcVBtGdaRUr7Vz5060tbXxpI3H43C73VzzEgqFEIlE0NnZyQsGdQGWpHTLfJroZPwNHjyYjVFaoImqR3URoVCIW5aL32s4HGbnXhwPcYypu6ckSRl7V+oDFjt37syoJfqisX79erz33nu8AB88eBCFhYUA0tHf/Px8zhppmpbRvIoy6USDIVmgCHwoFEJHRwfTqumZqa1/c3Mz1+GJtci0Hx85uFarlanaNK6UKaDoOtX7+Xw+joqTPAFHspL9IVdnkxa+3qKeIgaake0rKyAGPYCj90DW/z4bjHvCCy+8gEceeQSxWAxbtmzhAAUFjsS66UGDBiEWi7GuE2npxA4AAK/Xy/RzoroPHp+WMP0AACAASURBVDyYI+hdXV0AwGwNqk2jWjai9pLuIKo7BVui0Sjy8/PZABIp3bkGJHMNrIhBsf6Op7mYa+OZbBkBPfTv6/VsNmQrizlTQWtMLBbD+eefD5fLhZtuuglWqxWLFy/G3Llzef39wQ9+wONITJSf/OQnXB9NW880NTVh8+bNiMViuOWWWzBr1izEYjE89dRT+P3vf58RuJZlGQcPHsSnn36K9vZ2OBwOFBQUYOfOnWhubmYHgOiaAHgtBY6si/rxEtktDoeD98emsdU0DWPGjEFZWRlGjx6N8847D9X/x2giGmd5eTk7qjt37oSqqujp6cFnn32GYDCI9vZ2NDc3IxKJZDDienp60N3djUAggFgshmQyCYfDkSHX2RIBH3/8MWfeampqcODAAXbeS0pKoGkaB1QrKir43KKiIp7DtIZR199EIoGGhgY8/vjjWLZs2ckQoVMGGzduxF133YWNGzcimUxi//79uP3222E2mzF79my43W4MGjQIFosFM2fORCwWw9atW5k1dc0112Rs+1NWVoZ77703o09AMpnE888/D5vNhnHjxuHaa6/FjBkzMG7cOIRCIbaBQ6EQ/H4/3nrrLTQ2NvL64HA4sHfv3ozdJWjsKKipZz/qk080J2ibQ5JrcsYp+VNcXIzCwkJuyFRUVMSdhCORCDMKKPBC84ro8mL/BLqu2ByV7v/rX/86PB4Pz0mae3QeJXA2bdqETz75BKqqorCwEMlkEh0dHWwXl5SUcE8eor9TMKihoQGPPfbYKU9dP21XjebmZjY8SKFrmsZtqxOJBBYvXgxVVXnD4DvvvBN33303uru7sXjxYlx00UW4//77sWLFCixduhSffvopXnrpJSxatKhfTnkuoMVc5LCL74m/xb8VRUFjYyOam5sRDodRX1+PTZs2wev1YtCgQaiqqsL3vvc9fOMb34DVamXngbqhUiSSsp/0DJSFJKc3lUrB5XIhkUhw/VUkEuGaq6amJkhSukkOZdPIeaAmOOS4Utc0WU430CkuLsaBAweY508UJTIMaTsSTdO48H3YsGH8HjktFosFyWQSI0aM4DGhTAd1Ox0zZgzXn+7bt487cIoYPnw46urqOIuqNwjFGl3amsbn83FHP5IvMsjWrFlzzHJxIrF582a+V+rQTFHtcePGAQDXV1D0T1zM9+/fz9F2cQxFxRYIBCBJEiZOnMjKkGqTiNJF3SSJtksKkbKsDoeDM7YA2MjQNI2dBlqwKOtO0fFPPvkEwJHGVCcq2ylSdfqa57nsmyxCrNHrDxSpp4VVfw79fzY4qc899xwbqJp2pMaTjGJqoqEoCgYPHoxIJAKfz4d4PA6n04mGhgYkk0nEYjHWaQ6HA6lUiucAsR9cLldGF0/KiIpUQqKmA0fmkMvlYhmnqHwikUBLSwvLExlUFFDpTw56y7T3BjGY0RvEIEmua1hfc0ufudWzR3K5d7Ex1ZkGMpo9Hg/mzp0LWZbx5S9/GbIs47PPPmNbYujQoXjsscewZs0aTJo0CYMHDwZwJENitVrx3e9+l8fY6XTioosuwt///ndomob8/Hz8+Mc/xje/+U3ccsst3G2dsqCUSaytrcWHH34ITdOwbds23qZG3CIpW78Nsg2A7MEIyohSBpX6EVCjMeojQQwrKrmhraG+8pWv4JxzzsHUqVNRXl6O/Px8XHjhhSguLkZxcTFqa2vR2dnJNEUK+FAQmtZ9mocEuhev14toNIoNGzZwPWJRURE6Ozu5l0ZlZSU/SzQaRWlpKYB0iRBRPv1+P++jTHOf1iiykRRFOeUN/GPFDTfcgNdffx2zZ8/G3/72N9xzzz24/PLLmbFC2cynnnoKqqry1nPbt2/H3//+d+zatQtjxozJGCObzYa6ujo89thj7Gz99re/RSAQwNNPP82OVVlZGdskdrsdu3btQn19Pfbs2cPOZE9PDwoKCrgBGOleChpSIBw4WheKZTf6oDcxzABwQJ1s3mg0Crfbjerqat4PNh6P47LLLoPP50NxcTFnNz0eDw4cOIAPP/yQt4Dp6OhAU1MTNy4NBoOIRCJMJ6YOxevXr8+4J5rXxOwLBoPYvn0720PEhNy5cydMJhM8Hg/PGafTyckJctZpnrpcLqRSKbzwwgt45pln/kWSNTCctg5qMpnETTfdlLHHpdlsxh/+8AfcdNNNiEaj+O1vf8vNCgoKCpBIJHDXXXfhO9/5DuLxODZu3IhoNIqHH34YxcXFWLFiBUaMGIFnn30W8+fP5+ysmGkRka35CR1L7/VXa0qTpaurix29rq4uFBUVwe/3I5FIoKKiAueeey5cLhdisRiCwSACgQA6OzuRSCQ4y2C323mRo458NBHJcBK3irHZbNy2vqenh+9XLMjOz89np5caCNCiRbWIPT09AMCRMZfLxXQ62iOVFA5lM6PRKNPj6MdsNiMSiaC1tTUjIux2u1mRbNy4EQD4WcrKylgGNm7cyHsVtrS0HDXJJ0yYgIaGBgDgKDaNC/1NThM1XqmpqWGaiTiemqZxh+EvCnV1dSxr9P2JBgTdJ9XdDR8+HLIsH0WlofElKiS9Rw4oAN4zV6wRJceVsqWSlK7Ro4yUvm4QOFKLTNlRkWpJ0WpS3NR46+DBg/zM/WWaSOZzyRplCxzpIS5guWaixNqq/iDSjuh6+usTzmQnddOmTVAUBX6/n5uZkWFIG7IDgNPpRFVVFRvBkiRlbHVFbJP29nZuImE2m+H1etHd3Q2z2YxDhw5B0zRUVlbCZDJxnSo1OKIaH2IYUGaW5Jn0UiKR4I3liZJG2bN33nknQ/f31ySpv2Oyoa/jyTEW+zL0B72Tmatuy9WpBc5MJ/Xw4cOwWCz47ne/C0mSMHPmTEiShAULFvDemxaLBXPmzOEShokTJ+LVV1/Fz372s4w1pbi4GFarFVu3bmWWzkUXXYRkMonHHnuMu9ePHDmS69CoyYvZbMb69evR3d2NZ599FsFgEK2trfD7/airq8PYsWN5rRQpiaQHiRopBsXErBP9dHV1ZWSBEokEJk+enFGGA4DZBXa7HX6/nzuqSpKE/Px87kVBazZldcvKyrgEhO4VADObJEliG4V0AJU2dXR0YN++fRzgpgaIxPQpLy+Hpmncl6K6upqdEuqvQQFss9mMoqIiNvjFYCIxfCwWC5YvX37adknNhhtvvBGSJOHaa6+F1+vlZ12zZg0+/vhjfOMb38Cnn34KAEzPXbx4MWeax44di/nz5+Pzzz/H7NmzM5h4Q4YMwerVq9HS0gIgvQYvXrwYu3fvxqFDh1BdXY1wOMxOVCKRwIEDB/Dxxx9jx44dqKys5KZeu3fvRnV1NTf+pMwtNcwEMlkhYvd1Aq3vkUiEG+kRQqEQCgsLIcsyPB4PM//a2tpQUFDAPT6IYlxQUICKigpO+tTW1uLiiy+Gy+VCcXExotEo6urqMmwDVVXR0dGBcDiMWCyW0bOFeqbQGlRQUIDW1la0trayDUW109u3b4eipLftcbvdyM/P5/MA8I4aon9CzISmpiZ0dXWdMkkXEaetg/pf//Vf2Lx5c0YmsrOzE5qm4amnnsLixYsBAOeddx6nvidMmABVVVFRUcGc93vuuQdNTU248MILuT6poaEB7733HkwmE+bOncvZIDHD2RsVSh9RzkbXI4Oc6GihUIijRVarFS0tLVz3UVFRAY/Hw+n6wsJCpp6RMUYTmii8pEApQipGkogyKXZQdTqdCAQC3NyDKG3AkWwQ7YNKNExydMSIa319PRt3lOVMpVIcWSorK0Mqld7ChpwR2nuTakG6u7v5/inDUV5enkGFELPmFGltamriMSBqMxmPRIuQJImbPog1CiIooytmvsmgFbMHkiThs88+G7BReaKwd+/eDGoIZYncbjccDgdnQqn748GDB5lO29HRwYYSALS2trKBT4X4FLEjx7OzsxN79uzhLQxsNhtnkcjR1FO+6HuPRCL8t2jkU+Mq2oOOIp9iplXv3IpjkG3+kVF3oiiFokN8ohoviTpBlLNczj0TQfqOjEWn08nz1u/3M1MiPz8f8Xic5Yko+UA6OGY2m3krCaJVFRQUIBqNcn0aMTxIh1DgjIwQyuKTHrXZbPy32WzmRmFWq5Xr3cjwFtkkZMgDuW/HkYvM6lkffaE/ZsDxoL/AK5C5FZeIM81Jvfbaa1FaWorq6mpuHCiydDo7O+FyuWCxWDBlyhSYTCYUFBRwbfWCBQtYt6iqisrKSsyePTtDfubNm8eZJqIDDx06lK8FpNk0gwcPxqpVq5jpAqTnF61pol1gtVqZAis6XaRDSQ+LLACLxQKPx8POI5BuZki1rGQXFBYWwul0cnlPR0cH3G43Z50ikQg6Ojrgcrng9XozamKJmUVBdVmWEQgE2HaJRqO8FRoFv202GzO+KGDu9/vR1taGXbt2QdM07lxP6yUFwhRFYceZguQ+n4/XMcrYik5FKpXibsiUPTxd6vr6w8qVK7m0Yfr06ZAkiW261157DZFIBNdffz0nRKibL9nJhYWFKCoqQmVlJRRFwbx58zLsYpfLhRdffJGdQQqQ/+EPf2Dbk+Rz5cqVmDp1Kgc7qBFoKBTiIAMFs6l+mei0/5+7L4+PurzWf76zJpmZJJNkMllISICAIIsoiIJWXKq1tWrVQr1Ve6tgvT+rInUBqwICCgoq0lt3XLleQOsCKlatCkpQ5LJDAoHsmWwzmSSTmWwz8/tjfE7eGbPh1tr38+FDlsnM9/t9z/u+5zznOc8Bon0Gle6rgtNGo1EYOqpdV1RUiLAXS8Ti4uLQ1dUFn8+HxsZG0X0JhyPsTZ4RsSJcADB06FD89Kc/FT8ciMQArDWlIJOaPOG6cjgcqKioQHd3twiYMQg9ePAggIiGSFpaGjo7O9Hc3IxgMIiCggLZW1gKwyQBmYb08SoqKvDhhx9+7/Z1PONHG6AGAgEMHz48ynlcsWIF8vPz0dHRgffffx96vR6bNm0S+eoDBw5A0zSsXr0al156Kfx+P+677z4AwMaNGzFz5kysXLkS8+bNw0svvYSDBw/CbrfjkUcewZgxY2QBqk4rnYC+sjF0wNnTs6mpCZ2dnfjyyy9RVVUlfUOJdtAp0zRN+OJtbW1oaGgQ9IYbdTAYRFxcnGwUbE7PnmY6XU+vSZXKyMWr0+ngcrmECsMDKi4uTihqakDA7JRKE+JB4XA4RO2MTpzf7xe1QpPJJBsH//GAzsjIkMPM5XJJ4Gw2m5GXlyfXxvpYqg5SkMlgMKChoUECUgZMp59+utwzMwljxoyRzF8svYODzqymRcSThg4dGkUZVgMgKtP9UKOqqgpVVVViU2rNDeeHQEJGRgbS0tLQ3d0toi9cM9zsVGdR7Y/L9wMg80yKFOuCgcgGGggE4PP54Pf7YTabRXRJFWKKi4sTpVXWi6jqp2qNMG0KAJqamqJAHY6BMpTfZa0qHZrjoWD2l1WK/f54KMH/blnUoqIiFBcX46KLLhLlb2ZYUlJSBKRyOp0AIvdfXV0No9EogSOzQqyd5jNuaWmByWSSBvCJiYkSPLpcLvh8PtjtdoTDEWEwj8cj+xcBMjrwAKTlBPd0tigg+q3WzQMQtcTBgg99gZ69vWYgJgGAr2WgBnpt7Iild3KozJzehnptfX32v0uQev3116OtrQ0TJ05ESkoKJk2ahISEBOR9RRe9++67YTKZcPPNNyMcDuOMM85AZ2cnHnjgAaxbtw5HjhxBR0cHbrrppqggddiwYbjlllvkZ52dnbj22mtx9OhRvPPOO7Inqwq5SUlJ2LBhg7SAKysrQ11dHVpaWiTgUkFAlapKH4TnKc8RnjEMWN1uN4AegNDtdotvZLfbpbUTs0LMbPHMDwaDwjRgkOfz+WA2mwX0IcOHZzdb31D8kNfMQUaYTqfD9u3b0dXVBavVKuKRFI2xWCzSfictLQ12u12Aep4x9Fm8Xi/sdruAthaLRfwyvV4v+wTFBMPhMFwuF55++mm8+uqrP7AVfndj06ZNePLJJ3HrrbfirbfeElt46KGHxF9ITk7GnXfeiVtuuQWLFy/GyJEjkZCQgLvuugvt7e0YNWoUNE3DgQMHcOzYMekOAUCAgi1btkSdew8//DD8fr9QTzVNw1tvvYWLLroIjz32GDo7O2E2m+V34XBErZ3AJJmDqpieyh4k+KFStckiIMDIfU3TNLFr6rkwmFPL4YLBIDwejwhQdnZ2oqWlRZhAiYmJol1An1zVdqFPrdfrpT+3qoANRPb7tLQ0uFwuhMNh7N27F0CEAZiQkID9+/fLecO6asYNeXl5CIVCyMvLE3YRbZiBNJ8PmZCHDx/GmjVrfmiz63P8KAPU1tZWfPnllzCZTEIVvf/++6UGIRgM4h//+AdSUlIQCATw4osvYunSpdDr9bjmmmsQCASwefNmzJs3D5qm4be//S38fj9effVV5OTkYNGiRbjrrruwfPly3HPPPdi7dy/+8Ic/4OKLL/7awapulAwW1VQ6M4XFxcXCTTcYDBg/fjzy8/PhcDii0DmfzwePxyMbJrOqdNgcDocgMkajUbKnVOkCerIHKn2QaCT7pDKLyM24ublZAhuv14twOCz9xRisMajo7u6WljKhUKTPFT8nJSUFjY2Ncmjy3hiAl5WVCYLDuQqFIup8hw8fBhA5gFj/xWCRv2tubpb6F6rOHj58WO6dAewpp5yCUCgkarZ8vomJiSgqKpIgqy8nj84sEAm0iM4C0c7c4cOHUVxcfPxG/A0G6cm8BqrM0cFQEUlN04QGOXnyZACQLDM3QGZL9Xq9OB5UM05OTkZcXBwSExOjstOs/Sb1kqADwQ3WljY0NAiNHOgJFng48IBnYMt6Vdpld3d3FDXs2LFjcg205f7GYALUgYLcb/ra/jJXsZl4lWkRy7bobZB+/e8w9u3bB7fbDU3TsHHjRjQ2NmLkyJFR2Y1QKITm5mYcO3ZMgkyz2Yyqqiro9XpxiBMTEyWrSvoYMynDhw9HV1cX4uLi4PF45Fm3tbWJOAbXEmtPiXIzGGU7mo6ODqmZdjgcYv8MBjnvzFSpiPxAwadKhetv8PoHej9mjBl49DX6y7L2BqjwfmJ/F8saUkGu3sa/gx1rmoZNmzZh06ZNWLp0KX7yk59g6dKluPzyy9HV1YU///nPCIVCouq7YsUKjBkzBm63G9OmTcOKFSuwdetWOJ1O2bcBSFbw0UcfBRB5lhkZGTjhhBPwySefSGaftZ3V1dUoLi6WrH5LS4uUDRHk4d7K7wlyc9+OFUri33A/Y30bbY+0d9aG+/1+8SPIRvD5fOjq6oLNZhOAkkAnGT/8PAaKajADROx41KhRwqzgNaqK8KFQCIWFhULzpI4HM6A8z1tbWyWTajKZ0NTUJI66WmKVlJSE+Ph4OJ1OFBQUyDNLS0sTOrVKo2dLE4Ljb775Jl5//fXvz/C+h7Fr1y40NjbKnrVr1y7MnTtXAIeTTz5Z9lqWQt11112oqqrCypUrxZc4cOCAUFCfeOIJeL1ePProo1EBaX5+PmbMmCHBZHt7Ox544AHZL9atW4fJkyfjhRdegNVqRU5ODlpbW1FRUSHz3NnZKX4oA076bAQSmDwCevY5qtuyhINfq8Am7ZrZXb1ej/T0dPj9fjlXjEajaKu43W6YTKaoQLWlpSUKeFHL5IDIWUJBpVAohJqaGklYcQ4yMzPR2NgITdOwa9cuAJBYoLi4GHq9Hna7HUOHDoVO19N1IS8vT5IyZHCwvprCqur5RJoza7zXr1+Pd9555/s1uEGMH2WAev755+Pss88GAFGJmzdvHnQ6HXbv3o05c+YgHA7D4/HgscceAxBpLqzT6fDSSy9h+fLl6O7uxqpVq7B48WKsXbsWEydOlKCpu7sba9aswdSpU/HnP/8Z48ePR1NTE0455RTMmDFDBDZMJpPQPxjoEY0hQkEq2bBhwwRp6+joEKSHmxzrrdra2pCbmyuBIHn9Xq8XTU1NaGxslEwhswZqFtVisSA+Ph4JCQno7OwUBBDooRdTgpqjq6sLxcXFUYEGEK1ISOPmItM0TZBPVQqbQWVHR4cI7/A9ScHw+XySWSXKxMORwbDRaMTw4cNl4QCR4J09PLOysgBEwAqfzycOYVdXl8jkAxDZeJUmOmXKFDm81ENPHWoQwWspKyv7WnChadoPkkVlXyx+LhHuhIQEySQRAeQm6PV6kZ2dHeVQ0tEBIrVTrLVlcEinh2wB0snZWqOtrU0QUTpCBoNBbEcVJ+BzUmudsrKy0NDQII4KKTmxDASq5dHe/u///k/uWz10eht0pvob34T6ONh6vt6c+sEEn8DXgxg1iFWDnR+7c082BLMYRJ2DwSBOOukkhMMRpfDMzExxoL1eL4LBoCh2s9yAwJua/SGoUllZiUOHDiE3N1dKCXw+nzikrPlRnW/Ww9H5pxATAAHquC8RSefnh8Nhaa0ERGiXHAPN/WCy48dL/+b1fZP37et6uY+oexF/ru6Pgwm2f8x2/Nvf/hZApPfpnDlzBICdN28e7rnnHpxzzjl49NFHpSZt8uTJsFgsuOyyyxAfH4+xY8fCbrcjPT0d7777LlatWiUZFSDCGtiyZYsAk+FwGFdffTU6Oztx+PBhUdJ9//334XA45AxngMU2MGQVqewAoIfmGCsIqZYckO7u8/lQX18vJUMtLS3o7u6WPtosCeG8s97b6XQiKSlJgleqChMEYhcBlnmodsXzhLR8nU6H+vp6Ac7j4uKQmpoKn8+HXbt2iS3ZbDaUlZVJUEH9kUAgID2SrVYrvF4vzGazZHIpmsR6Q16LyWSSjLimRdrWJScni5/CZ8bBLFZtbS2efPLJ79sMv5Px+9//HgaDQSirFIj64x//CCCyP44dOxZARN1X0zTcc8896OrqwgcffIA//elPePDBB3HPPfcgIyMDiYmJ2LRpE0wmE7Zu3SpAOZM3zLiTwcg2QOFwGHV1dZg2bRqeeOIJ0RE4dOiQZB9HjBghmhlqxjQWuGOCRmVA0Z5JM6c/ymQSS8x4XjCJA0SSI2azGU6nU3xSBqZ2ux2hUAhNTU2Ii4uTOmwgwrwhjZyDSZnhw4dL9nTEiBECzhMAraurQ0dHh/QwpQ7NoUOHAEDaJel0OimTonYN2Qdcsyw/oF/H0js1aA6FQtKKqrS0FH/961+xadOm783uBho/ygCVaEFXVxdeeeUVdHR0IBSKtJfJzMzEX/7yFzz88MMIh8OYO3cuUlJShArQ3t6OBQsWSCH4vHnzYDQasX//fixYsACHDh3CXXfdBSAS/BQXF4szzj6QV111FRwOB6ZOnYq4uDgcOHAAH3/8sRwKcXFxYgw0VC4cGgTRcqJ4/B03Oi489rS0Wq1ITU2VQuhQKCTZJ7ZsYQDBJsV0svjelIdXP5uHAik8DJhZG0onjAgp74NOo9r/LxQKSd2Mz+cTpT2iraRRMHhl1q+rq0vqWUnlYPNvTdOEc8/7SE1NlWsmJZjiJsFgENOnTxdbCYVCshBJVQqFQqK62ZdypaZpQhNiZiQ+Pl56t/L9eNh/9NFH34utl5eXi4PCeaPzwHlS0T8+B1J1GxoaEA6HccoppyAYDKK+vl5qgehocMMiZYzPPisrSxBo0heZaeKGbzKZxMln1qm7uxvZ2dloaGgQehQpJFy3zOSrUvI8uOjgE6zw+/1RbREGyo4OhiapZoIGMwgIDOR09wV2qI47BwEj9We9BbdqHd/3VVP4Q409e/ZIqQX/qQAJ97j09HSEwxHV3UmTJkHTIrV8Kk3L5XIBiDib3d3dknUlw6SlpQVNTU1CweXn+Xw+2SdJhyJYRYE2zgsPeIvFAo/HI1RDv98veyMzhTabDSkpKZJRCYVCojwJDGyXAwWSg8maxr5erQnv6zXq/7Gjtwx/X++hfj1YOz3egPtfZbz++uuYPXs2Fi1ahJKSEmzZsgWapuHdd9+F2WyGwWBAcnIylixZgltuuQUrVqzAjBkzEAwGsXjxYrS3t2PDhg3QNA3r1q1DVlYWampqsHjxYtm/Q6EQCgoKcNttt8n+19nZiUWLFiEnJwdmsxl///vfMWHCBLz88suSwTMYDMLqYbkMaZCqcIr6P0FElQkDQM5iq9UKu90e9XN2TGD2UO2LzXOT7CxmHhkMEvyhz0JfguU3Kg2SGVdSkgmCM5gpKSkRkMpsNqOsrExqc8ngaWtrQ1JSkgSVXq9XngfPdgL63G9Z0+fz+VBWVoaEhAQBx/R6PdLS0iS45nnKetuamhoByzds2PAvkY3qa5xxxhm48sorsWPHDiQmJuIvf/kLFi9ejGuuuQZPPfUU7r77bvGHurq6UFRUJMJ1pN3m5OQI8+Uf//gHbr75Zvzxj3/ELbfcgo8++gg1NTW45JJLotrRORwO7Nu3D4WFhcIU/OCDD2Cz2bB27VokJyfDZrNh48aNkunW6/WwWq0CJgI9WX4C34wH+HsCMPR5VSElZjhV+2GwSVCSIDzBiKamJkkEqYCMqk3Q2toqey/XRWzwzACZmeeKigoB19PS0kTYa/fu3VHMgJqaGgARAIs2yFpSJh8SEhIkMOYapW9ot9ulPWNaWprERvTNAoGAKH6bzWb4/f5/Wu/fH6W3Y7Vahfrl8/nw9NNPY8WKFTAYDLjhhhuwbNkyrFq1CrfeeqvwvumIrFixApqmYc2aNTj//PNhNBpx2WWXwWQyYeHChTjnnHOwbNkyLFy4UBA/fiYX44YNGxAXF4fdu3ejoaEBubm5mDx5shgjJ1sNOIHorBwdcBbkA4hqh0B6Fjc89qMMhUJobW0V+i83TQafXHwApHcXP5POFJ3BhoYG2XBIeSOCRAeNNFAeWnwfXh8DTCquApHA3m63ywahZhc8Ho/QIoAeQSZmi8mVT0lJkU0HgNSzhMNhEYVgHRmDa51OJyifOug08vM0TcOECROkJqa3LCrniuiW0WhEamqqiDLFwQt02gAAIABJREFUvo73/l0OldIL9LQkic1QkurOeVeRcYMh0uONz5vrYcSIEQgGg2htbY3KhgIQxJx1yOpG3NraCq/XKygdgwtmnYCIkxoIBGA2m6PAk5aWFpSWlkpWh+uAdU6U9k9NTRXRAt6PmgkaTGZmoEHRrcGOwQYHg702zmN/QQmfeV+B9I8t+1RdXQ2bzSbAA4AoG+BBajabkZ2dHVXLzoO3oqICQI9ieW1trQi9kZpO1JttYwjUUHyFa4BOB50RAlwEEFlCEQqFhJ5Iloiq7A1AQFCCckTk9Xp9FLW+v9GboFDs+CYAxUA2Nhi7VgNx9Rq4LtR/g71G7lPUTfgxjVGjRqG5uRl1dXV46aWXUFRUJAyAmTNnQtMidPRgMIiVK1di7ty52LRpE84991w8+uijuPDCC6NAkA0bNgiofN9990VRs+12OxYuXAgAUiMZDodRXl6OcePG4dlnn4Xf70dCQgKam5vh9XrFDllypNJ53W43PB4PgB6bJBiu0rJ5Pni9XqHNA5FzyefzIT8/HwBEJZtr2Ov1yjlOUaPm5mYJAJilYaCZmpoKs9mM1tbWKHYYg1Eq9VNXAohkSSkG09LSIpoVvE7W6Pl8PrkernWfzweLxQKz2Sx+ITNhFotFfClmfkmRpOopa+WBCN1SbVcVCoWEbQQAHo9HmG/vvfce3nvvve/TLL/RmDNnjtjIX//6V0ycOBFxcXGYPHkyHA4H7r77boTDYTz44IP41a9+hWAwiFGjRiEUCuF3v/sdNE3D//t//w+apuHxxx+Hz+fD888/j+eeew6ffvoplixZIvXA9957r2T3gEhrn7/97W8wGAx4//33MXXqVGzcuFF8jurqalx22WWipEybou9M0IL1lbwP1X4CgQBcLpfUMre3t0sG1O/3yzlbX18fdTYxeKV/Q/tVKeFkDtBuSaknEMokAstC6Me0t7cjIyNDfPS2tjZJbDmdTtTW1sLj8eCLL74QSrnNZpO2gA6HQ9Z0c3MzOjs7RUyU/jjtWy35InU9JSUFeXl5ACJriToNOp1OBFKZlPJ6vaisrMT//u///uC2OeBJomnaGk3T6jVN26/8LEXTtPc1TTvy1f/2r36uaZr2mKZpJZqm7dU07eTv+oL/8Ic/AIgEGna7Hb///e8xbdo0BAIBLF++XJC8G264AXv27MH0r9R5ly5diq6uLsyfP18yQ59//jnmz5+P9evXY9GiRVJk39nZiXnz5uG2227DCSecgObmZoRCIXz66aeC4BCBYFBJ5JG0Vh6+dP5jhV7omFksFkHb9Xq90Ah4uBDRSUpKkowe0URmolgbyu9Js+QhpdZGEeXS6XTIzMyUDZz0CXUR0bFRRZG4cagBoyoewuyaipaqqJHq5HAzUB09vV6P5ORk2RSam5vlUNA0DUOGDJHnzo2FTpHVasWoUaO+ZjOhUEjQI5UuSaGg3jIMvB4iqnRYiShxqMJE3+WIDU4BSIY71gFkFlXd+NUsd319PZxOJyZNmiSbF4voSWuhIAbbygCRQDU5OVk2yKamJnFk6EwBPaqGdIBIZUlJSRFniPRuoMcxJXpNAQpVXZLrQ53nTz75ZEDq4GCzo8fr6A8mC9bXe/aWfeI6643WqWat+L593fOPJUhlqyAe+rFrkRmezs5OqW869dRTpcco69zC4bDUrZK9wQNf3Xvo3PPA5p6maZo4oW63W5wNrp3s7Gy4XC7ZCwnk1dfXQ9M0QedVYCU9PR2apok4GANhUv127NgBYGAKrwrCfNuhAiqD+Vz1/9i/594WWxvY21DX7EBZ29hr+7EEqWPGjEF5eblk+TVNw2233Yb8/HzodDocOHAAAPDUU08hGIyoyXd2duLzzz/H1q1bMX/+fKSkpGDJkiWYNWsWrFYrZs2ahb/+9a/4xz/+AZvNJi3BQqGQZJree+89WCwW+Hw+HDlyBA6HAy+++KK0Q9m3b59kUVmqwX90ljVNg8PhEACY64MgIjU9OKxWK5KTkwUsDAaD0k6DJTUEvclkYskGEDkbqCxPPwcAkpOTER8fL2cRqbX0C1T2gto2r6KiAmlpaWhpacHevXtRXl4uZzMFZpxOJywWi9wPfSlmiZOSkgSMp+/D9jJGoxEOhwNZWVkoLS1FdXW10DPb29sFoB42bBjGjx8vtX08v5g143NlCYvP54PP50NzczPWr1//A1ts3+PTTz8VOivBi2XLlgkAN3fuXMkIxsXFYc2aNQiHw9Libs+ePQiFQrjnnnsARPyWzs5O1NTUIDMzE3v27MHSpUsxfPhwYb6tXLlSwLhbbrkFK1euxNq1azF58mQ88MADqK2tRWJiIpqbm9HV1YWDBw9+jT0WHx8Pm82GpKQkKfMjvb21tRXNzc0CEuh0OtHUUFV8VeVozjOz7GrJk9VqFeYf7Z2sAgIcVqsV8fHxaGtrk7PDbrejra1NbIefFQr1CPBRCJCCTJmZmaiqqkJnZycqKirE96ToGf32+Ph4CfrNZrMIJPH99Xq9iDqx0wez3nweFosF6enp8kzYT5Xfkw3HTG96ejrefPNNbNiw4Qezz8F4ac8D+FnMz+YB+DAcDhcA+PCr7wHgQgAFX/27HsDj381l9owbbrgBjzzySJShTZ48GYsXL0ZycjKeeOIJlJSUwGQy4ZxzzsHZZ5+NUCiEBQsW4LLLLoPBYEBVVRWWLl2K5uZmLF68GN3d3fjzn/+M++67D36/HytWrMDy5ctRWloq1BNN01BbWyv9hpqbm2GxWFBXVyd0AS4I1WlUA1UGZAxq6DgRSWXGlkEnDaOpqUkWHtF9Um+owMvMK+Wvm5ub0dDQIHLTKoWHWQC32x0lKMKDiY4fUR8Kg9DxYPCoiidpmiaLgcgxe3LysOImp2k9amW8V9aXqnTh2tpaod0AEEoQ34sZPiDSioZ1yRx81jqdDqNHj5Z+qkDEMRo5cqTU+jIryxGbJSUVmgEa7zk2S/5th0rpjb0O9b5ir5UF/Qx6YsERzv3IkSNhMBjg8XjElkgPYXZLtT1+T8o3N3aq+rIOmvWBdFBUerhK6WWmmQc50ej4+Hjk5+eLjQCQmnC+B+kw/Q06Yf8MKmx/QXFvVMlYwEp9TW9BQ1/jXz1IVWvogJ79BYiuGyLVqLm5WWqnx4wZIxSqMWPGSPYF6BFEI4BIYTXuh9yfWT7g8/lEkVFVmObzIyJvNBpRW1sryDQDDM4X95RwOCw1bXSwuXep9kcqYX9zyd/3Z7eDzXZyHC/TQKX7qp+j1l7HBrF9/a9myGNHbwyUH8vIy8vD+vXrkZGRgeXLl2Pu3Lk466yz8Nhjj2Hy5MlISkrCl19+idTUVCQlJeGPf/wj9Ho9li9fjnA4jEsvvRTt7e04evQo5s+fjy1btuC+++7D6NGjAURaQRQXF+O6664D0BPADxs2DC6XCy0tLaKI/vzzzyMuLg42mw1VVVVIT08XINtisYjIncViQWJiotT/MThzu92orq5GS0uLZI+YWWImkBmU5uZmcYgByNnO88bn88mZTvCejAjSeKney6wXa+pYP+f1esX+GSyyBRpBp/T0dNTW1qKsrExAeQDyGdQQ4WdThJLXlZmZKS3wmLHlMzSbzcjNzRWKNP0hZtx4FuXn5yMhIUHAMNp6YmKi9HZVhXcIxrKXeENDAz7++GNs27bthzDZPofVasXMmTNx4oknwmazYdGiRbBarfj0009x5513in+1cuVKrFq1Cjk5OZKNO3r0KDRNQ2FhIZKSkkTR3GAwYPny5TCbzRg9ejR8Ph+mTJmC+fPn48iRI9KKcNmyZZg2bRpyc3Oxf/9+TJgwAfPnz4fFYsELL7yAiooKlJeXIxgMSjsZ2ozqS3d1dQlLi/6E1WqF1WpFfX09qqurUVFRgcrKSpSVlcnfs4aadh0fHy+id2TdUByPfi/ZlKxjbW1tRWJiosyr3+9HUlKSsAGol8BzSgVe6I8BEcowuy5UV1ejsrJShKboZ1ZXV0On0yEtLU1ALwIzrJGlYCbPVzVIzcrKgsfjgdlsRmJiIkwmE+rr6xEIBITyC0SSCjabTdhFPM/0+oh6fkdHBxobG/E///M/P4iNDujFhcPhLQA8MT++BMALX339AoBLlZ+/GI6M7QCSNU3L/K4uFogc9seOHcMzzzwT9fOuri7MmjUL4XAYmzdvxgMPPCAbxIoVK6DX6/H6668L6r1q1Sp0dnbCZDJh0aJF0Ov12LFjBzo6OnDHHXcAiKB8rM8zGo0YPXo0tm7dKtQvZvu4GdIIWTupZi2JuBNdAxDVdBqIHAolJSVCeSJqxMbEPDzoLKm0KgBSd5GcnCzZKi4q1huSEsD3ZI0iFy4dLwaKKrWX6DjV9jo6OsT5Y9ZDzUjyawaKFFXi9SYkJEigy0yI6rwCkcOBQTjreVtbW9HY2CgbB1WR1aHWm/JeiH7y0OfBwa8Z3KiOHTcrZgjV7K9KB/kuRmlp6dd+pgbUQA/dtTfHjtltoMfmeO/MkvIZ6vU9/Ul5WBOp432T4hUXFyefT9CC80bETc2a6PU9bWNIMwF6AinOGesfmO0iMsnromPCTALtr7+h1lj1Nb6Jk6/WtAz02oF+Tzvjuhrs3/8YR3l5+dcCH9bnqRl/2iqp4qRM0allCQHXNRkfdCKYxenq6kJOTo6sWwa+3Iv4N3zupHgBEEouRZFI1VOzqSoNl5nT4cOHA4icTWzezjWhOiH92Z2aUe5vqHvzQK/7psFsb3+nivf0NTiX3KN6Yw6o70cQTR3/6lnUd999F/v370d7ezvWr1+PsWPHYubMmTjppJPw0ksvIRAIID4+HnPnzkVXV5fUiy1YsAA6nQ7PP/88AIgS6a5du7BgwQK88sorWLhwIZKTk/Hpp5/inXfewfLly6PWx4033gin04l33nlHejF6vV6xB/oSsWcw93RS2c1mM2w2m6ikc61VV1ejvr4eZWVlOHr0KFwul2TV6OOoWh5tbW1ob2+Xa4i9HiASBBEk5x4eFxcn2TFel8VikVpwoOdszszMREdHB9ra2pCSkiLAEc9/nkMtLS3iZ9BnUvdbk8kkvhDXMtloKSkpQsnm9XR3d8s+RNCdDAu2syJ1n3RmzhOzcCqLgs+rtrY2ik33zwxSP/nkE7z33ns466yzoNPpcNlll2H16tXYv38/qqursXfvXnR0dODOO+9EOBzGddddh2HDhonGBG3L4/EgOTkZv/nNbwRA1DRNMsxOpxPjxo1DKBTC0KFD0dHRgRdeeAEzZsyA3+9HdXU1hgwZguzsbFRXVyMQCGDTpk0oLi7GF198gcLCQng8nqi9mGq6ZLRw0CZCoUh3iSFDhsDhcIifUVdXh8rKStTX10u3ipaWFhEBJd2W1HbqtPCzucZUVXebzSZBMuc7KSlJ/GwGmkBkfzMajcIEpJZBZ2en9NNtbGwUQEWn00lXD8Yj/AwyG+iPUy2b65Lr2mg0oqurC0OHDhVRVbatZDxC8UsCvQSOWJdKG3a5XNC0iM7D3/72t+/dRr9pmsEZDoddX31dC8D51dfZACqV11V99bPvbLBw+ssvv8Ts2bNRWFgYtSHOmTMHw4YNg8/nwwMPPIBNmzaJhPXs2bMl09nQ0IArrrgCmqZh8eLFKCgowHvvvYc77rgD8fHxOHToEJxOpyAVzFKxRk6n0wn1jIEV0Q5m2LhB0TGic8FAlXUYam3EqaeeCqAHDaGjxWwp0OPo+/1+uN1uNDQ0oK6uDnV1daivrxdZamauKioqUFRUJNfBxVhSUiK0TiJGzMLSIeQC4ibAhQhAHDhmI1SUSxUQ0el0qKysRFZWFgwGg9AnOGfMnqoHSkNDA9rb26XehsJQAKTmg38/fvz4qPpH/k51kjRNk0XH16nOJuewL8cuPT1dkDvOHwP+7yKwKC0t7Td7otKt1fvkIBWKgSZtRa31ZB0fvw8EAhg6dCgAyNyxXo9Ie1tbm9T4qBRzZlQZKJhMJrjdbqFrcVPjnNFZooNCdUkKFRAsycrKQl1dnVCM6FRxLo8cOdLv8x6s0q46zwMFtAS1BpPN5nurv4/NGJHmrmZQY+1WdfYHM/4Vs6ixmVP1fwJ/7CPHgJ3Puq2tTRR8x44dC4PBgJaWFowcORI2m02cXmZAKa4SDkeEU7Kzs6HXR1SCfT6fPGuVCZKYmCj2y8w/s6gWiyUq66/SHpOTk5GWlib3wZ7clZWVaG5ulnID7kkdHR0oLCzs125j952+Xsv9dDB7jgpiDjT6A364Dvl1byCe+h6DDbR5jbHjXzVIffXVV4Vevn//fnR3d2PWrFnQNA3nnnsuPB4P3G43gsEgrrjiCgARUbBwOKKoTnX0O+64AxaLBQsWLIDFYsG9996LPXv2YPXq1Vi1ahWmT5+OCy+8EI2NjbjllltgNBqxYMECtLe347nnnsNPfvITPPTQQzh06BCKi4tx7NgxOZdJ0SXLieq9/B2BYLX3Z0pKCtLS0pCdnY1hw4YhJycHQ4cOhdPpRGtrKxwOh7CYysvLceTIEVRXV8PlcklfdrISeCawzQ1rxD0ej5zp3d3dSEpKgl6vF3YYz+JwOBylmspsj91ul/Kl4uLiqDIf6makpqYKEA9A1H4pjBMfHw+XyyVnDetfg8Eghg8fLkFXRUUFgsGgdGVg/WNGRgZsNht8Ph8OHz4slE86+qSCMuhlIMsEAM80NRMNRMCKxx//zsmG/Y7XX38dTU1NOOmkk5CVlYU5c+Zg27ZtcDgcyM/Px+rVqzFu3Dhs2LABJpMJ8+fPx759+zBjxgwBk+vr62EwGMTnePLJJ6FpGubPn49gMChieAaDAb/4xS8wfPhwYQ9ceeWVAIBXXnkFU6ZMwSOPPILdu3dj586dMBqNqK+vR0lJCfbt2wev14sDBw5gy5Yt2L59O7Zu3YqdO3fC5/NJ+zn6ZkCPD0Agwmg0Ii0tDU6nE9nZ2cjOzkZmZqa0K6yursaxY8dQWlqK8vJy1NfXC6jO/b+1tVVqiQGI9gbPBWbg3W63xAxkLaisMbPZLK3OSO8dPny4sAp27NghYFA4HJaewGq5HjO3BEHi4+NhMBjgdDpRV1eHzs5ONDY2CoOAytkUW62oqIDb7RaNELfbLYAVmYQsxSG4w8QWn3NdXR38fv/3rvD7rQvnwuFwWNO04+bpaJp2PSI04OP9PGm+3tXVhWeffRbbtm3DnDlzZOFcdNFFuPjii/Hwww/j2LFjWLlyJe6++24UFBRg0aJF8Pv9uO+++7Bx40Zcc801ePnll6Xw+rHHHsOiRYukATMNbP369SKpXltbK041HRkiFQBENY9IjnqYMzBlzQODPSCC3hQVFUlLGjoFNGzWeajvSzoZ0aFYFFKv12PYsGGS9e3u7sbRo0dhtVpRV1cn9R0MUNSDQs12qoEC6wxVIQfSJciNpwPK+1Il7oPBoNDhmpqahDqjct/p+JG2odb6Erlk0MCMSS82FvU/EOmJSHoK54wZaYpEcHNQbY6DwRjrFliT821Gb5nT3kZ/jimz38nJyXKwM+NIim5dXR3S09MxadIkHDx4UIJJh8OBxsZGEYcgAs26H9o50UlSeYxGo9ivyWSSg4qOKkESVViFWWzajHqYUPKdrZkIBLBlEtHsvuaagfdA2c5YuxgoCGRg3duItRPVeY8NTjlir683OmTs+u5vcO/iofmvMFSael/0VpvNJvuVuha57zU0NEhNqM1mEwn/9PR0lJSUoK6uDk6nEwkJCWhqaorqZapmVAig0TZI6yNKzRo0ov8JCQlim+q+QxSbao2qKAxrtMl+oeOgUr0GGupz6svG1TNBHXyGHCoQ198YjG2p1xN7XbFriWdIf0N9r76uLyEhIarW/589Xn75ZSQnJ4s+wpQpU6LO4kAgIJlBo9GIgoICAJG9nW07Zs+ejaeeegr33XcfkpKSsG7dOtkru7q6MHfuXCxZskTaluXk5CAQCOCiiy5CfHw8du7ciYsuughFRUXIzs7GF198ga6uLhw+fFhKZwDIXtDV1YXc3FyhBdK+2LqFDjzQAy5w3+HcUJGXNWwjRowAEJlvCgix5zV/zjNA0zQRvOPgGcH1BETsgC3PCBT5/X4UFBTI2Uxa5c6dOwU8DYfDolRKIUT6YlarNQpQDoVCqKqqQiAQQHNzM/R6vQjNMMgKBoOiS8HvKfaYmZkpQmzs5c79giKLOp0O48ePF2ppQkIC4uPj4Xa7o848vV6Pw4cP49RTTxUAKSUlBdu2bcPUqVO/b1PGM888gxEjRuDSSy/FhAkT4HQ6cdttt8FgMOCEE04Qv85ms+GKK65AUVER1q1bh/HjxyMxMRH33nsvlixZEiU0pOqGkEn4+uuvw2w2IyUlRRgzL7/8MnJycjBp0iSsXbsW06ZNw6pVq9DU1CTANEE41R9TgXl+xp49eyTgo+3a7XYBVHoDoGl39B01TcPYsWMlc0hKbyAQQG1tbRQbgdThQCAgZSYEn6gCTQHQlpYWATO4xyYkJKCurg6jRo2C2+0WxiOD0C+++EIyoXp9T296+mXM9rILQzgcFqHUQCAgNH0CNE5nJG+YmpoqiZ7a2loBYNnz1m63A4iUFzgcDoTDYezcuRMWiwVWqxUej0f2ddZ1q77OW2+9hY6ODvz617/+zm31mwaodZqmZYbDYZcWofBS4q0aQI7yuiFf/exrIxwOPwXgKQAYbIAbeziaTCZkZmaiuLgYN998My655BKcd955kmH705/+hIqKCjz11FNYsmQJ5s2bh7a2Nuj1etx///248847sXbtWtx33324++67ceONN2L16tUwm80YMWKEUHVtNhtOO+00bNiwQQzK7/ejvLwcw4cPFwqZikKrBdrchLq7u2E0GmXDJiecgYfdbkdGRgZCoR5lW74nFz9RE/ZH4sbIw2no0KHi6FGZkoEI0URSWJKTk4U/T5oKU/qq8i6Hmo1jPaBOp5NsKwMSChPQCayurpZAniiyXq8X6oJKUaJqZ2trKywWi4AEDOzptHCj+fnPfx5lE3zefQ2VjsN7IbBAyjevRQ30iewym2g0GjFy5EiUlpZKNuV4hxqYDiYjojoOvf0OiCB7NTU12LdvH6ZNmya/59zz+Zx44onYtWsX2tvbhZ7t9XqlHoFKzDz0mamKi4uTPrZ8r7a2NglsmTlVgz8GynxO3Mx5+NDZ55xwnZB+wsC4o6MDNTU1eOCBB/DZZ5+JLLuajY+lYfY2L4PJVKmDDs7xZspjP5sgTF+2os5vrO31NefcW/j8/hWC1FiBL9VJjL0Ph8MBj8cjNV3cn/h6Bp4jRozAzp074fF4kJiYKIBdS0uLKHTW19cjNTVVskkUrGPPQ9aLsl6I4mJ0BBiUcn8kPZ3Pn6APbb2joyMKSSdNEYColdJJDoUionWnnXaatKRSgZzYLORgxLNUu4h9ruFwDxW4vyCUAEhvNsazhudLrF32dk2qTfb1uQMJyqkO3b9KkDpnzhw4nU64XC68++67WLhwIQ4ePIht27bh7bffxs9+9jOkpqbitNNOw/nnnw+/348lS5bAaDSK+ue8efOQn58vz2fv3r3QtEg/yXA4jLvuugszZ87E+PHj5Yx9/fXXcc011+Czzz5DTk4OXnzxRXi9Xuzdu1fsnE47AWUGGAkJCdi3b5/YOW1Cne+CggK0traKaBLQw3DgdZLWq2apAIg4k8oiYqaVFEy9Xo+qqqooAEPdBzMzMyX45V7OAJsBKM+EgwcPQq/XCzWTPgfrTukv2e128XuYXSJTgnWu6enpcv6QzcOWUMxwpaamwmq1IikpSQDampoaKVfimicAlZeXJz4UgXogEiCQjsrnyOfMkZmZCU2L1HT6/X6ce+6536s9NzY2Ij8/H6NHj0ZraytGjx6NgwcPSi0092qz2YxTTjkF+fn5mDdvHhITE7F48WLJ+AGR9cwzn1lkzkd3dzdeeOEFXH311QAi7fiMRiM2bNiAYDCI7du3Q6eLiC2VlpZKf1lSdzmHPBPU81DTNLjdbrEb+pK1tbXiP/A563Q6Cb5pr9zb+D19UwKSCQkJEktQUIuCoNxbyehkMiVWZJLvSfVfm80mgAYDa4PBgO3bt4vvS/vSNE3UdRmcWq1WORvpN5Ed0d7eLnRrshAsFoswA6jHoNq4zWZDKBRCXl6e7BG1tbUSiKvrqampKaoulfZLcOqll17C8OHDv1OQ5ZsGqG8B+B2AZV/9/6by8z9qmva/AKYAaA73UIG/9fiv//ov7N69G2+88UZU/ZbT6UQwGMS6deuwbds2LFy4UNDz7OxsLF26FA8//DAefvhhdHR04N5770VHRweWLVuGO++8E7fffjtsNhuefPJJzJ07F2VlZRg2bJggNRs2bMAZZ5whwgM8PMaNGycqbaT4xgamKppIAyC9xm63w+/3y+8piKQuQhb/k7bDe2bqno47USI6S6oR8R8QWWhZWVmyAGj4ra2tSE5Olowq5boBiJNAGg/QQ/WicQKQ+luVysD+S7w3Bjm8FqI3PIT4vIgIWywWoSqZzWah0BkMBowbN+5rjhw3qr6CgHA4jBEjRuDIkSNyzTyouMFRNpyOmXpw22w2OeSMRiPy8vLQ2Nh43FnU2KzpQE4d760/h5RjyJAhQldUkXUqomZlZcHlcomT4HK5kJSUJLQjXoPX6xU5fj5/ris6Gsy28jkSWVQPCJvNJt+r88XsLO+bCnhWq1XqWFgvVVZWJjTvUCiEqVOnYvr06XjttddkrlRb7y/z2F/QF/u63r6OHQOBIuqIDY77ypSpTmRf7x2b7eD4ZwapvalPA/1noUkdp8w+1yHR5LS0NDQ2NkqGtKWlBZmZmSgtLYXP5xMHnKJefF5er1ey7xSMiI+PR1JSkghT6PV6Ac+InAM9arTM/nOfUns3ms1mcQ5U9gzZL8xCeL1eVFdXIzs7G+np6fiP//gPtLa24oMPPohqqwH0nW3XBApuAAAgAElEQVRWh+qgDfS6wVDE+1oL/HudTicgQ+x+GPta/qy/taf+jdFohNvtxoknnhgV6FNh+V8hSD148CCKiorwq1/9CrNmzcLZZ58NvV6PsWPHYuLEieju7saaNWvw85//HI8//jh27dolCpkUI0pMTERycjLC4Ui5SmdnJ+Li4nDVVVdh7dq1WLx4MZ544gmkpKQgLi4OHR0deOONNzBjxgwpdTl48CDcbjfcbrf0OeVZR2eezBNmIgn4sB6OTAUChfv27YPJZEJNTU1Uhi8UCona7fDhw8Wme8uUq2CF2iudgWNmZiZCoZAAkwwCjh49KgrvpEYPGzYMQKRmG+ipB62pqUEgEEBNTY08H71eHyWsyL6jzPIQCCfDgTWFSUlJwv7JyYnkUyorK6HXR1pC2e12CSoyMzOFMXX48GHRZdDpdOLcDxs2LMq5Jw2c7ItgMIikpCTJrPFZcfBM9fv90kLxww8/BIDvPFClAuv111+P7du3Y/ny5ViyZAmuvfZa3HPPPdi7dy9sNhs+++wzKYGrr6/Hnj174HA4cOzYMfzhD3+A0WhEbm4ugEhwfeutt+Lhhx9GaWkpMjIyMGnSJFRWVqKurg7hcBhvvPEG9u3bB51OF5Xc8Hq9OHr0qPSNJfuLyYNwuKf2n/bMoJJsNr6W/ih9ZfXZ6nQ6FBUViU1x76FyrdpKTy1L4poxm80CxvCzCMg3NjZGaZIYjUZR9eX6Y2wQHx+PlpYWnHDCCSgqKkIwGMS2bdski28wGOD1emEwGET4kowB+pi8Rt4X1zaBHgbAQ4cOFeXiL7/8Uvxa1s0SKBoyZIiIDba0tEh9Leuw6dvRL+MzpGqyTqcTsFbTtO+UCTBggKpp2isApgNI0zStCsACRALT9ZqmXQegHMCMr17+DoCfAygB4Afw++/kKr8aDQ0NmDZtGk4//XQcOnQIzz33XBTiPGTIENTX1+Paa6/FBRdcIP3IAODWW29FTU0Nnn76aSxduhSzZ8+G3W7H0qVL8dBDD+HnP/85NmzYIJlTlcpJI6IzXlVVhZqaGhQUFAiPnRlH1XkHelqchMORHqBUyGI9BeuniLSGw5EaUDpPRHLY30+thYp1QnnAAD3ZQfLRKQaSm5uLoqIi5Obmwul0wmAwwO12Cy2DdSTMHtDJ4uLl82bNF++RC5OLlgEGUU0efDw0qNDGDUCvjzS6Z/aUz5WLPj4+XtBLBjqsn+T8xzrsfTl6dCYZCLCm8osvvsBJJ52E+Ph4mYvesm2JiYlyAHKuB1v7CPRP6R2I0jmQw0nEOy8vDy0tLRg1ahQqKirkPgwGg1ARx40bh507d0pmKBQKwe12Iy0tTZwAfiYPZtYvk8rGLD3QA2Cogk6kvKjgBoN7teE6v6YwheoQaZomzl1bWxuuvPJKpKWl4fbbb8fFF18MvV6Pd955R2gzAwWfxxNMqvTQgV7b39yo1Pnjvab+7oXz9s8e/dVQx963+j33WSpQUyOAjglroocMGQJN07Bjx46o+unKykoBiOLj49HU1IS0tDQBy7q6ukQYoq2tTdY23yMhIUEAKqCHRcF1qAansTRdAkAU46NQHg/vuLg4xMfHi4O8bNkyGI1GPPjgg8jLy8Mvf/lLWQcff/wx6urqBswucgxk48xiDDRiQTh18Oeq0I4anA4W6Im9bv7vcrkwfvx4Uaf96KOP4HQ6odfrcckll2Dnzp149tln8Z//+Z//VFZAVVUVJk2ahEsvvRRbtmzBjh07MGPGDNxwww244IILkJSUhOuvvx7//d//jVNPPRWffPIJHnzwQVitVrjdbjz66KPo7OxEamqqlE64XC4MHToUq1evRnJyMh599FGcfPLJGDVqlPRt3Lx5M95++21xDHNzc3HgwAF4PB54PB7Y7Xbpgc02FqozHCukSAeSAS3Qwxog+KmCKw0NDYiLi8PBgwdlTfBvaDdJSUlwOp1Re7VKF1Yz7MyGMrvpcDiigg36C3SgA4EAbDYbCgsLYTAYpAckg2DSbwlgs36Ov+f96/V6AcNZykOHv6amRq63oaEBDodDMoHZ2dmSoWLdK889BjQjR46UZ1paWirMN1Xc0Wg0IiMjQzKEBNk4eHbU1NRg5MiR8nOdTocPPvgA55133ndix6+++ir8fj/uuOMOXH311TjhhBOQn5+Pe+65By+99BLGjx+PWbNmYdWqVXj88cfx9ttvY82aNUhNTcVPf/pTTJs2DXfddRcOHTqEvLw8+P1+3HrrrXjhhRekbGr48OHw+/3Izc3FzJkz0dHRgSeffFJqK8ks6erqQmpqKoqKilBZWSntXLjv0hZUwIu2o9oo/XMAAlJy3plgok9CO+ee1dXVhaqqKslI0gaTkpKQmpoaxTZUAWM1gA2HI/RsAPJZLpcLgUAAra2twp7Izs6O8oG+/PJLWK1W7Ny5EwaDAfX19ZI5JfWXIxAIICUlJYqlmJaWFlV33tLSInXPBPtJZ6+rq5NOC21tbXA6ndA0DU6nUxJuPp9Pyhrb2tqEFUDNEIplxsXFCWWdvh9ZReoZ8l0FqdpgnbXvc2iaFl74VSPq/sZnn32Gm266CUOGDJH6hDvvvDOKJgpEDKepqQmBQACvvPKKBBP83erVq6Um8q677hJHqLW1Fa2trUhPTxdEwGw248iRI9i0aZOk8CmOROOlI8tiaPXg5gLiguNhzjYvLNRvaWlBXV2dqPCq76dmBNXMKBezGpiVlZVFZXG5oEnxLSgogNvtRlZWlggYlJWVyecR3aSYkdVqlZosIoKxSLnac4oLmL9XacHMfNK4qazGbG1CQgIaGxvley4es9ksiCrv9/LLL486/AabVeDo7u7GkSNHot6jtLQUqampgjqxzqU3B87r9Qr9o6OjA5WVlWhqakJ/dlxaWtqnQ6ei0H3dB4Ol/mpRgR7ntL29HT6fDzk5OSgrKxMVODpLHo8He/fuFYcmGIyopbKeh1QWUsdUFUev1wsAYt+sceL8sJYBgMwne/zywGDgyrXG96DoBoNCg8EAi8UCr9crQIha13reeefh2muvlQPlxRdflK8H2t8GCvjptA00+rM92k9skBvr4KtfD8aWB6oz/DZO/cKFC/u1ZXUMJPAF9F8/DUTuva6uTpxOOh108AgaBoNB7N+/X9RIeRCTRUOwj7Qw0s+pJcA+qwCkTpVlCsy0cp64XrjvsaaeDrXP5xPKFSnDZAHQTtmyhq9TQTqdTod7771XaqDoKD333HMDChwNxiYHAlUG+z6qQ8i54oi104HmmOuemZqLL75YEHvWclqtVjz++OOora3FQw89hO7ubuzfv7/P9+1vHI8d9zaefvppjB07FldddRUuvPBCdHd346qrrkIwGMS4cePw5ptvYvr06Vi2bJk4aitWrMDRo0excOFCrF+/HpWVlXLmFhcXY8yYMejo6IDD4RBnr7u7W/qKB4NBVFZWCjDLYK6lpQV79uyBx+MRsMVisaC1tVXWB22OICzBXO6F9HWAr9O06WTSbtRSCZfLhY6ODtjtdlmTmtYjCMn1TQYS7YKCkrweFTSJ/XxN04Tam5qaiubmZmzbtg0WiwUNDQ0IBoNCkyWFkT25LRZLFCjDc4bZJ5vNJgFjOBwWoTX6ZRSDAiK0SIL3dXV1aGhoENCdTjsQ0bPg/nH06FFhA3FuCO6TQuz1ekXVmcGceu8ulwtDhgyR4FX1Ozo6OrBjx45vbMsbN27EiBEjMHr0aJx55pk4duwY6urqcODAAVRUVGDJkiWYNm0arr32WqSmpuKNN97A+PHjcdZZZ+Hyyy/H7bffLvvvkCFDcPToUaSlpWHGjBn46KOPpP2RyWTCLbfcgrS0NBw8eBA5OTm4/vrrJWNP0DA9PR1FRUVCq+Zz5L5Jeil93OzsbBHzoW1SwZ3gjGpXtEnaJ/cuBrUGg0GytiaTSQQ0uc+r8wL0nAXsgsDRG1DHAFZl45lMJvh8PgwZMkSYBswoNzQ0iF9mMEQU7lWaNDOitFW1lzgVgK1WKxobG5Gbm4tQKISMjAwBPJkEU1slDRkyRBhxNTU1UpdNZhFpvQUFBfJc1Iw4z10G9kyMTJ48WZ5DfX09MjMzMWXKlKhn9pUN7wyHw5MGstsfvlngNxw1NTW4/vrr8fbbb8Pj8WDHjh0wmUy4//77pZ5ApZww48L6DrU+9Oabb0ZWVpb0PGWmkUam0+mkLo9ZIwZtRqNRmkCr4kgMHEhr5WLmBNKYmpqaZKJpCBQocjqd0vBZTeGrKCgDPjpf7e3tqKysxJEjR1BSUhJFAyXS2tDQgJaWFrkuLoCUlBSh43IRkCrEjCvpuKzpYjpfraNITExESkoKkpKSJOugZtHolAGQg5T3QqCAGQ0uboo88JBWg1zOLQcRyP4yVLGDB6H6Hnl5edi/f79sBrGggOqMMZPI+4vduGIH56avoTqjfTl5qiPQ31Cz9nFxcSgtLcXUqVOjbMfv9yMhIQETJkwQCh/pSJQ552auOhJUtSNtm7ZLQSVN0wRRJFpNx0d9rqpDpda7qDQyrht+LgMuBtO8tk8//VT6DQLAtddei5ycnAEziwM9x8Fms/hsBnqv3oJOPg8V8GFAMNDn8f36uo9/FTXUwdgsKXOk3vNvWNMOQDIaGRkZAqCRDlZfXy8OAevvObg30z64xwHR6o/qPsXXq84HwUgVcOA5QDCHrwMgdDOHwyHnAEFNgpX3338/ZsyYIaBMKBTC7NmzvwYCDhYo4VD3r/72ncEM3mtv+1N/QbT6GjqLPGOmTp2Kiy66CHV1ddJCwmCI9J9kHd6zzz4Lo9GIqqoqFBYWfqt7+KaDVLuRI0fC6/XC5/Nh1KhRACKMkZNPPlmej6ZFKIbz5s0Ttcxdu3ZFiaqMHj1agFpmLGbPni2ZdIJx4XBP/2iqgpaUlKCpqQnt7e1R86HaKs9LFTynzZKhovoS3Et5BgA95wyvgSMuLg5Wq1XmkIEd6YXMHnZ2dkpmp7q6GmVlZTh8+DCqqqpQWVkpfhGAKAeXAi9cv6zbZAkTzwjuCzqdTupcSe9XzxVN06Jqwnm/dNh5FvJrPiMGK1VVVVKfTlYPQapRo0YJwH706FHxYzo6OiSITUhIkCwx/Ru1jEnVEAmHI/TgcDgcVR7D33/bvfw3v/kNJk6ciFAohHfffRfNzc345S9/ieuuuw45OTnYvHkzHnroITQ0NIhNPfLIIzjzzDOxb98+OYdJyw0EAliwYAFOP/10FBQUyBqYPHmy6HSkpKRg9erVEpwzyE9JSUFzc7P4mLRvAohAz3nKeWcApQrU6XQ6WCwW2Gw2JCcny/Pi3sqvOS/0W7nHcz3YbDbYbDZYrVaxafrWvBaKgR07dgwVFRUiHKQyAFRb5jNkyRvXDHVU9uzZI/GC6tswc8oEAUEV2jJBIT4r0nATEhKkdY3dbo+yIbfbjaamJlG5djqdEtM0NjaitbUVZrNZWkRZrVYpW+SzYkzAa+Hfq3sP75mDCtnbt2//xnb7rVV8f6jR3d2Nt99+G4sWLcKyZcukD1B2djZuuukm7N69W/rycFHn5OTA7Xbjd7/7HS6//HJcfPHFYthXXnkl4uPjcdttt2Hx4sW48MILMXHiRMk2EmVk5pQbYlJSEgoLC6VWgobH/9kHjxu7y+WSA8FsNiMuLg5ZWVmC8jQ2NiI5OVkayKtBUWyA5Pf74XK5ZBGoh1Rra2uUo8UNkgueqXhSd9TfcaGyFoyLmlLW6nWov+OGw0XFYJV/w6yESongc6ITTsdELcCmiAkXR2VlZVTt6znnnBNlG2odzmCdON4DDyeOUaNGycFMmquKKvO1JpNJ1D8NBkO/AWpJSYl8rTqeA43YgDt2zvu7XyJafr8f8fHxOHLkCAoKCnD06FHpY0eaCA9e2ijvmY5tW1tbFJ1GXQ+8J25cPCg4VDCCjhMpkMzMMqvCTZgAELPs/Nrn8wlQpNLRaWNXXXUVjEYjnn/+eZx55pk466yz8Pnnn6OiokKcHPXwGGgQOBkM8DHQ+/GzVYeE8xQ7b3y/WFBEHbz3vuqt+XffZw1fb3XU/QWivT1H9fVWqxV2ux2ff/45pkyZIiAJ74V7n9PpFCEMt9sNi8UiBz8PdAIwFPVqbm6WUgXW+BCU5N7BYbfb5ecEWZqbm6NALYI/Pp9PxDRUtojKKCE4qfakppPNObzuuuskgJs1axauvPJKybSuW7dO6t4Gu79xLtRawW86+ppXzgufXexr1L28ra0NcXFxmDhxIlwuF5KTk1FeXi69JwOBAJ5++mlceuml0Ol0OP3003Hw4EF4PB5UVFSgvr4en3zyCc4666xvdS/HM1577TXk5uYiKSkJDz74ILKzs3HppZdKvd7QoUNhtVoxY8YMAfdMJhOeeOIJ/OxnP8NTTz0l80xl38OHD2PMmDE4cOCA1FZv2bJFMrC33347ysvLpf62s7MT6enp2LdvHyorK8Xu1ICU68poNArYzCyhyggAIAEZ7YIOtBowkvLO88Nms8k8+3y+KPtWP5fZSn4G0LOPcR+y2Wyorq6OWnd08gk2sXaXjAquXQYyBMkZXNPXUmvKCUIxi0mwiM+NQQwzf3xW9GXYMor3DEBqh0888UQJ6A8ePAigR8TSbDZDr9cjPT0dycnJsu8fOXJEAlL6MmomW6/XY/To0VH7BwPC2P3peMf555+PjIwMmM1mFBcXw+Vy4cMPP8RPf/pT6HQ6rFixAjfffLM8F71ejylTpsBisUgtNdcvEAnWL7nkEtx4443YsGEDRo4ciZKSEtx8881oamrCxo0bceONN+Lee+9Fa2srPB4PMjIy4PP5kJGRAZfLJQrQer1e9C2YZafvwpp0zhO1A/jcaDMU2CM9myAA55oBIPdvzgnnkIEjP5s2zXnyer3iC1F8rLu7W1hkvN6kpCQJoPl6rptAIIC8vDx4vV588cUXsFgsko3ka/i+XH9MgoTDPUKUascNAlXsFa5pmmRSqWXT1taGzs5OadmWk5MjIFNVVZVouhCI4X6cl5cnyYOuri7pZ66WDyQnJwuoxDWsArrUE+ju7sYHH3wAAMdNV9d/G/rLdzUWLVq0cPr06f2+Zs6cOdi+fTs2b94sFL/09HRs3boVFRUVOO+883DBBRdg8+bN4tQAEdqK3W7Hjh078MUXX8gD4gM9++yzUVVVhS1btqCwsBDnnnuu1EqwYW1hYaEEJOFwGAkJCYKe0biYFVILtBnopqWlifKkyncHelTvVOoLEdeysjIxNtJ6iO7E9jzjBqsGtnSYVAQ9LS1NjJ+0Xxo6s11q3ya9Xi81oeFwWIwOgBw4qiOnIld03KxWq6iYqY43HS615oxGzvmjOhrRmssuuyxqs1YdtoGyNLEjJSUFdXV1UX+bkJCA3bt3Iz09PSpY681JU7O8oVBEPS7WjhmcqoHJQIG0ejD19bu+MipqLS5FYXhwe71euN1uTJgwQejdVqtVngNfR9uivZK6RHCC98DDjEgmUTUi1KR60RkgyMLgip+n1p3y/lShL64v2qYaEHNO1Kzrxo0bsWnTJlxxxRXIzs7GhAkTMHLkSFRXV0cpDA92DCb7N9DgfQ2GCgsg6oCNHSo7QQ2Y1PdWv1azIIMdH3/88ddsWR1qcNrXdXKoNtvX62jPLClITk4W0Qh1r8jIyEBXVxccDgcqKyuFAUIlcVL9+PxoQ9xX2EpAzf7ThglAapomTrlKDVazMgQ61bKNQCAgNDZ1DRPBj30GKtDC9REKhbBz5068++67sNlsSEtLw0knnYTJkydLvSaD1f4GP6e318VmZ/t7j4HAPxW8i51bvV6PnTt3Ij09HcXFxXjzzTfxwQcfYOrUqdi8eTOysyMt0v/+979j7dq1uPrqqxEOh3Hs2DGUl5fD7XZj7969eOedd9DU1ISWlhYUFhZKr/DBjIHsuK+xdu1aGAwGTJ8+HZMmTcK6devQ3NyMrVu34sorr0RVVRX+9Kc/4YMPPsAzzzyD6upqnHbaaQiFQhg/fjwee+wx6PV6NDY2irjiDTfcgJ07d4qIktfrxU033YQzzzwTFRUVWLNmDYqLi8XRbG5uhsPhQElJiQT5FB3kmlb3RM4RfQmV6cTXUhAmISFB9myuOdJgVQohA0+q/atZI6DHj1Lr+2j7DI4TExOjznpV1IUURVIXyY4wmUzC5HG5XEIfJRBAm2MmkkGC2kOV90ofhyApr72lpQUOh0N8OaBHo4FMIdYEEpAfNWqUZPSOHDmCUCgknx8fHw9N05CTkyNA/aFDh6RXLPu9M9CgcjLXvqqfwMBMXcMVFRXHbcvvv/8+Tj/9dBF0i4+PR1ZWFrZs2YLExERccMEF+PWvf41Ro0bB5/MhEAggOzsb+fn5GDdunPi7u3btkv34+eefx8SJE7F371688sormD17NlwuF8xmM0488URMnz4dd999N6qrq0WUk0FqRUWFiOswqcI93OFwICUlRZTYmbBh1pP/yFTg/HNtqPbOxAHZCna7XSjx9GFIjaedqyxK2jLBJSDiq9Nfb2lpkc8leE6/yePxSPa/sbERoVBI+ozq9XrU19cLgKqW49HH8fl8Qk1W2T60Ka6ZtLQ0Cex5Znk8HtkL2B+YgXtGRoboKRw4cABtbW2yFtW+3QUFBbLOysvL0dTUhGAwCJ/PF5WE4j/uR5qmCZuV65/lWwQCysrKUFFRAQCuhQsXPjWQ/f5oAtRNmzZh3LhxUTLh+/btw6xZs/DQQw9J0DRz5kxMmDAB27Zti0IOExMT0dnZiWeeeQY+nw8nnXSSPOCxY8ciMzMT27dvx2uvvYbLLrsMVqsVL774Ik488UQUFRVJjzPSEkhD4abPyWUwxc2ZtXAqXVFdBJqmobi4GHV1dWhsbITH45HFS4VfCipxobIBNAcNQQ2WVAPidRGdZOrdbrcLCsSFEltQrlJ52dOJCCULwFVDJdqj1o8ykImLi5ODkc9MrdtVh+pAMuhJTk7GkCFD5J6PN9DobdTV1X0t0ElOTkZ1dbUE1uo1xjpqRJRNJhOqq6vlACkpKREZcTVz1JfTyNEfHVINWPt7jTp/7e3t0lDbZDLBZDKhqqpKwI7U1FTZVLn5qZQVOvukyKj3QdCA4IhaH8vDQq/XR/VHY50FnRW+ntlSvh7oqfVTwRufzyeHhLqpq4c8AYy33noLb731FiwWC8aMGYPRo0eLk08hjYGG6vj1NmLXWV9DnffY1zMQUG2wPxvh9YTDYaH4Z2RkCE1/586dmDBhAhoaGuT5HC/Nsz/HvrfMKa+7r8EgZ6DXqGvM4/GIU6KCLna7HW1tbdLzl4CbqiCq1+ul9odBJUW/6PSQHsUDlfV1vAaVisi9ioBLLLiios1ADw1dZamoIhm8Ds6zGsTzOezbtw+bN2/Gnj17MH78eFgsFowYMQJTpkxBZWWlON2xtqICPX09Z75usEOtP1XnsC+AMBwO45lnnsHhw4exfft2EVE54YQT8Nprr+HCCy+Ez+fDokWLUF5ejubmZhQWFuKMM85ARkYGPvroI2zZsgUlJSVyDnB+P/roI5x55pmDuu5vGqCGw2GcffbZ+PWvf43XXnsNZ599NrZu3Qqr1Yr8/HzMnj0bN998s2RO3G63aBosWrRIWB+jRo1CSUkJ1q1bB51Oh927d0Ov1yMvLw+apmHmzJnw+/04evSodCegTaWkpEj9IzMYFPPiOU0/AoCwn7h/sE6Ptk6aHluk8OwiW4hnOJX22TKEivBdXV3i5HM/VzNLtHmV7sjPJgVWZaUwEAciwXNOTo5koc1ms9TQNTU1CeCt0+mE2aRSDNW9nwAOzxP6bASdWKtOVVQVJO/u7pYAgOcjg4Xs7GxomoampibJAqttRMLhiGgSM3V79+4VUJZCN0Ck68TQoUPl2XPOuLZ4lqqgK/DNAtSHH34YK1euhNPphNlsRmVlJYqKirB582ZceOGF+N3vfoeGhgaMGDFCMsbz5s3Dp59+ilNPPVXs7Nxzz8UTTzyBJ598Evv370d2djZ+9rOfobCwEKNHj8a0adMwZMgQPPPMM9i3bx8KCwsRCkXKySjMw/I3t9stzDXaKLN9drtdAMTOzk60t7fDZDJJy0HVV1CBcvWfpmli536/P8r/5LNUWy/Gx8cjPT09ai3RLujr83fc42m/vC72vO7o6BA2DxNHKSkp0ieeuiD0yQgE8QxiqYd6BqklZKSJk+XDPYGgPv/O5/PB6XRKW0WHwyEMycOHD0uAzoATiAT1rKtmf2WVsk/gqru7G8OHD0d6erqIO3H/oACTCsjzvoDIGfFvGaD6fD6UlZVJ36mhQ4fC7XZjy5YtOO2007Br1y5MmzYNH3/8MXJzc/GLX/wC+/fvl9Q9ENlE09LSsHPnTqxfvx6//e1vxUlOT0/Heeedh7a2NowdO1YCrffffx+hUAgVFRUiwKHWo6qCL3TKaSRAD52LWaADBw6gtrYW9fX18Hg80ly3q6sLTU1N0qdI0zShq3Gz58RzsuloMehlQM6Nm21oSDvR6XRwOp3o7OwUJ58oEkVqiDhRnIELkguXgScbVOt0PcXd3Nx5UDB45UIPBoNCu+MBx+/Je1cPV75HQkICsrOzcfrpp8tcqpv3NwlUGUw7HA40NjZGBZFGoxHHjh1DZmam3F9vTjgzMnR8XS4Xpk+fHkXp5etU0ZP+AhqVbhXrQKp0x94cUw5mJ/lZrDVISUmRmmoyAuLj4yWLSrVfZlEpIMCAMxQKiUPP6yGaxjVAIQn+DYNRrgvVqVE/g5s/7VtdQ0TfYt+Tc6Y64+qmyLF3715s2rQJRqMR48aNQ35+Pk466SRUV1dLXUdstloNPHtz9mPtYDBDDVB7+/lg3o/zQ/CBjjQPotLSUkyaNAmBQADjx4/H0aNHsXnzZowdOzYq0B9o9OXY96dA3dd1q3vAYDJ/cXFxQjFsa2vDySefjNraWnHo2tvbkZmZKdifqMQAACAASURBVGqn/LtwOCzKuqozQeeT+yZ/TltWs/8AJMsf6ygyECU9WKUnct8kZVKt/6Ptkiqv2kEscEVQiJlLTYvQ2zZv3ox3330XmzZtQigUwi9/+UtMnDgRp512Gvx+P+rr66N6OA/mOfc3Z0DPWuK5wnXKACMWcCHK/uCDD8Lj8Uhwec455yA3NxelpaWor6/HT37yE5SXl2PDhg0S6I8fPx4lJSV477338OGHH4oSJp+PqmGg0+nw5ZdfRp0HfY1vEqBaLBb8f+6+PTrOulr7mXdmcpvJZDIzyeSetEl6L20hvYClFYrcREAs4jrKzYUKh0tVDkLx4KmiIAcURLGCLsCKiqCCUm6WYgstpRfakjZtk+Z+TyaTySST60xmvj/GZ2fPNC3lfGd9y8/fWl1pksnM+/7e/duXZz9774KCAgDA3r17UVlZiYsuughr167F0NAQzjnnHFx//fXSCTYYDCIrKwvf/OY30dTUhL1794q8Dw4OorKyEk888QSuvvpqnH/++di3bx9uuukmrFmzBoFAALt378bGjRvF4aX+5oxendHj8ygoKEBeXp7sBwFdyjBlgfJJZ5cBIzOWlGMGoczS6myU7klhs9mQk5MDm82GjIwMsR8EP1mSAUB8Du27JNs/Ov9jY2PIy8vDwMAAHA6HZKiCwaDYMp5HHRDq+nLWcGv2BH0KJjDoo7BxEQCRM547BhgcK6K79g4ODsLn8yVQiBlkzJo1S+z8/v37AUAaqTEQnzVrlvQA6ezsFF+N+8MAfWxsTH7Ory0tLR9Llm+//Xbce++9eOONN/DHP/4Rzz33HLZs2YJQKIQ333wTV111FQzDwCc+8QkBWm655Rbk5+fjvffew49+9CO88847+PSnP41IJAKfz4fnn38e9957LzZu3IiFCxfi0KFD2Lp1K84//3zEYjH85S9/wXvvvQcgPl5ubGxM5qkHAgH09vZKcMQRYdTN3F9mENm0jkkD2j3WXLLWl8wZXXuqyygoDwwgdTMrkyk+a5SsSL4vkyisWaYeoh9Dn4TvzyCYgDCz/VlZWUhNTUV2djYyMjJQV1eHrq4uhMNhAVZpu2lHtCyzIRiDbpPJJCyBsbExKdECpkBg6iWCNqyp9vl86OzsFPAlIyMDdrsdk5Px/jcsPRweHkZLS4voBQbxZDDxdYZhSO8HMhhGRkYk4DUMA/v375cxNmQR/WMU3b9OgNrc3Iwf/OAH2LlzJz7zmc+guroaAwMD+MxnPoP6+nq0tLTgtttuw8aNG7F27Vo8/fTTGBgYwHXXXYdPf/rTeP311xNqCN1uNxwOB5588kls2bIFV155pSjOWbNmIRaL4cCBA6ioqEB1dTVSU1Oxa9cuXHzxxVJboaljFAo61Pxda2urNETq6emB3+8/YdCvRpHI5eaB0IEhg1g6DdpxTk9Pl/oA3ifT7jqjlJqaiqysLHR3d8tcLlLj2LFWG5GMjAyZD8rFVuB0VIjeMEjhfEBthJhJYJMlXh/rNFjLSeSWVAgiRRaLBatWrQIwhWqdjoN1sqUdK8MwxCDr98rJyRGFYLPZEqih+hp0INXZ2SkZ3uSVTOFJzjbwe51JSV5Ukh/lhFIR6M8lwsZ5pw6HQ7JT6enpGBkZgcfjEXoKP4vPmig7g2SOPaJzoR16Pn/dIID7Z7FMDaNmwMDXc7+TG3doSrymayYHkXpfdUBE57mmpkZqyufNm4eKigpUVVWhqqoKBw4cSDhXBEiSg93kdSqwgb+n4ZruuSbLUPL7UU/wPVpbW4XmunjxYng8HrS2tgrCOX/+fADAQw89hI0bN+Lf/u3fcMUVV8But+MPf/gDZsyYcdJr1Ws6x55ZsOnuEUCC7phunYodkLwfACRjEwwG0d/fL+AanQ0af2ZYND2dACKz8Sy/SNahHLHEQIn0c87c0zU3Wg6JdJMeSCoggRw6SFqWtcPEZ6plV5cucC+5J5THWCyGhoYGbN68GZFIfPD7GWecgeXLl2PZsmUYHR0VVsjJ1kfJNBevLzc3F7FYvIzjkksuQWVlJZqamhICD8MwUFdXh/LycnH07777bixduhS9vb3Izs7G/PnzceTIEbS0tKCzsxOxWLyj/ltvvYW9e/cmBKRaFnTAz2dks9kQCoVQVlZ2ynv4uAHqBRdcALfbjQsuuEAcuMOHD2PRokUYGhrClVdeiWuvvRZLly5Fa2sriouLYbPZcM899yA1NRUzZszAvHnzsH37dqSkpODZZ5/Feeedh+rqamzZsgWrV6/G+eefj927d6O0tBROpxPf/va3xRkdHR3F4OAgCgsL0dbWBp/Pd0K2iXLOxjOUXzKs3G53Qqdz6no2c2QGlHJNfwWAZPw1sMMsoAb/gsFgQlaLwaKmzFNu6YTzbAGJrAuTKd74xe12y88YSHAWtx5pwawoqcnJ9X4anDUMQxx6ZpbYKZw+FjNAhmFI9i47O1sAcl7r2NiY1O+SPWa1xmdpFhUVic2pqamRa6R+sVgsWLBggfg2R44cQSgUkuCD562lpQVutzsh+cA9+jgB6uc//3mce+65OHbsmADL6enpSE1NxQsvvICqqirpLF5ZWQkAuOeee4SiuX37dlRXV8PtdsPpdKK4uBg7d+6E0+nE1q1b8aUvfQnf+c53cN999+Hll1/GwYMH8ac//QltbW0SMJLBQhaXy+VCIBBAd3e3jPuiPefsdgZo1KWUQSZZGNDTH6bscL4154DquabcWwBCVWfpEnU1QRjtR9OmAJCMPIEOyo32BXU5EuWltLQUAwMDktjx+XwIBALiR/ErfV+daeReRCIRodeSFcC9MZvNQi0nmOPxeE4A3FkaSGCb/jZH4NCvr6mpgd/vT2jaR8C3vLxc6tvNZjOOHj0qgT79Q4K9PP8MqPW5/JfLoI6Pj2Pz5s0yrNrlcsFkMqGmpgbr16/H3r174fV6UV1djR07dkgdz8yZM9He3o4vfelLGBkZkTladDzZ0Ojpp5/GtddeKwJKCklLSwu6u7sFAWHHLSoPpr97e3vR2dmJ1tZWdHZ2oru7WzrnEpnUBd88bBoVpBNNR4oOgQ4aU1JSpBaDh0M3DwKmGiQBU848qT/j4+PIzc2V+WIcIk9apT6EVCDamBEN1Zk1Ilo0mvrnutaQSJTFYpHXORwOHDt2DBUVFbIfrCcjGmsYBi688MKEOqfTcXKn+5l2eLTyys7OlloB/sxqteLIkSNCWdBNIfg67gfRoe7ubixcuPCk16eduVNd70cFPDpjkbwflKHBwcGEhkLM/I+NjUnTiUgkIvO1+LW1tRV2ux2Dg4OIRqPiGFBxW61WoYqw05tG6YkM0nEnZZgUMyLsmgpJQ6NrTpIpYtq50RkBLv08NcU92UhFo1HU1dXhtddeQzgcloCuqqoKZ599tszZPF05+zgAiQZ6Tva75PcbHx/HwYMHkZmZKXVMP/nJT9Dc3IyzzjpLdBDrdp544gkUFBSgpKQE8+bNw4IFC+D3++Hz+TAxMYHDhw/LgPVTrWTHnplTjU5PtzQDIPk1+v6nC2T5fEwmkzTXoG6h3i4uLobX65W6NKLxRHvpKDPAZFaVAAkdTjru1KNWa3xQObP/WrY0e4BZUI2iU39S3nTjGS3T+h/vX9OIqVs0GwaYAilo9Pn648eP45133sErr7yCsbExzJs3D8XFxVixYgXKysrQ0NCQwDDg19NhnfAeCwsL4XA4sGjRIqSlpeG3v/0tVq5ciZkzZyI/P1/koqOjAzfccIOUoGzevBl79+7Frl270NHRgerqahw7dkxAU/1VA2/6WnXwpMFY1l2WlZXhrrvuOuUImo8ToG7ZsgWFhYU455xz0N/fj/T0dOTk5ODtt9+Gz+fDpz71KSxfvhyVlZVwOBwIhUJwOp1YvXq12Amz2Qyv14uNGzfiqaeeEjrkmjVrsH37dhw9ehTnnHMOvF4vnnnmGTz++OPimNNm5ubmoqOjQ4AY/qPTzT3hP2abKBtsIkNqHbM6/Jqeng6HwwGn05lQdsNA12QyCVDMAIFZRWYlCeyyey+BbsMwEujCXPqs0G+hs2+1WlFeXg4AyM3NxdjYGAKBgJRzMTNDH0GDPKQdUiaSgwbdDI0Nzcxms2ScCdCaTCb5PDrrPHupqamSwSYQwGdRUlIizf9InzQMQ/pzRKNR5ObmoqKiQuz+oUOHRCdxtA3l//jx4ygsLBRgTTMpPk6Aevvtt4tfyefA8TGf/OQnkZ6ejpSUFKxYsQIAsHbtWqlVNAwDQ0NDeP/99/HKK6/A6XTi2WefFT3IkrNbb70V+/fvx7p16/Daa6+htbVVgHD6Gk6nE93d3XA6nTh27JiMVaLfaLFYMGfOHJSVlaGsrAzFxcXIzc1Fd3d3AvOFrD8+B/pj9E8oE9T5DGYdDge8Xq/4mwRpYrGYyCzPkWEY8v6kAEej0QQfh3tKAIgJDK23qLtTU1MTQJeenp6Exl7aX6Gvq0sC9fnVrACCSLrvTDAYTKhr5lng39JPGBoakvgpLS0Nubm5ckYYcPL6CGgA8cw/dcixY8fg8/lkj5xOJzIzM6XRKv8mFouJXmEiw2T6F6T4Xn/99Th8+LA4u7FYDKtXr0ZTUxP+/ve/Y86cOdi7dy/WrVuHgwcPYnJyEj/84Q/x1FNPCff7oosuwgUXXIB33nlH3pfC53Q68fOf/xwulwtlZWXi1Lz22muy2aFQCOFwGL29vdI2vaWlBR0dHZJ1GhkZkWZGo6OjCQ4s/8+AlAJPR91iscgcPQYENpsNbrcbdrtdCrQp/ETlOZqDdRUMcCmUVORUPOzmRYoLnTvWvmhUn4LLzCcVBe+JqCqvgZkLbRQ1YgNA3heId4ctKCiA1WqVejkqDl7PGWecIbPUuI8fh9bL+9HfAyfSKmkwNPLk9Xrh8/mk+zKpvFREvB4aytbWVixatOik15fseJ7sevUzm27p7It+DYO8trY2dHZ2oqSkRDI7fJ12hjijy2azobu7GyZTvJC+u7sbfr8/wYgQiabTyPpoUlnY6VArRzrufK40DAASagI1XWa67LhudqB/rg2TVsYazdTZS73/sVh8sPqrr76KnTt3Ys6cOXA4HDjrrLOwYsUK7Nmz538tQNWgEe9fy5+Wbb6GZ3Pr1q04evQoDh06hG3btuHDDz/EJ//RhCIYDMLv9yMlJQWbN2/G5s2bcfXVVyMWi0mDEYJlzz//PJqamiSQ+ahFx5611HrvThdASV7J2bHpfs+9oG7x+/1SJ221WtHb2yvBa0lJCUymeDa/tbVVkF6Oq9LGngAfEXc2bqFTDUyBJWQW6JpWdoEEpiiBlD8CRXR6GTAQAdfZLMoCwR/KfLI8cK947XqUGeVagzB1dXV48803sXnzZoyOjmL27Nk488wzsXz5ckQiEXR0dMj5OZ3sqWEYWLVqFZxOJyYn46MCXn31VRQUFMjcPqvVirlz58LtdmNiYgIHDhzAkSNH8NprrwkjiJ3cdd0WM2E6s6zPRXLAToeQdb6hUAhZWVkoLCzEDTfcgKVLl2L37t3T3sfHCVBvuukmDA0NYcuWLSgrK5PxbU6nEzfeeCMuv/xypKamYsmSJTCbzeju7sZjjz2GTZs24bHHHkN1dTXWrFkjmbAXX3xR6JCLFy/GsmXLsHnzZuzbtw+rVq3Ciy++iN7eXvh8PulSm5OTIw1fOjo6pDGNzjalpaXB4XDImWBtP3Work3WTBT+PBwOi2yTjkiH1eVyJWSj7Ha7jJKh06xHzVCv8/xoMJxZULIe2IyFz1v7J7m5uXJWMzIyMDAwILaJPh8DBJ5lNnyhvSF4ocFkylUoFBI/hf4Ou+xaLBYMDAzI32vwSYOsBMCYxSMbJRqNSrkWfQiWUM2bN09YYMyu0nYahpGQ7SLYxgy4pqzGYrHTDlAfffRRAacpA2eddRauvfZa3HrrraJ/0tLSUFlZif/6r/9CSkoKnnrqKbjdbmRnZ2NwcBA//vGP5XdLlizBu+++K9fz+9//HsuWLcOCBQvwwAMPoK6uDhMTE1I+FIvFy6daW1uRlZWFaDSKQCAg9F0y78xmMwYHB8WX7urqkmSBZrVZLBZ4vV45B7w36gjqW934k3LA58U+HDxP2odhORzfk0EwAX/WfobDYQFF+FySAURmDcv+we7IyclBWlqa1JPTBuhElGEYklRIZgUASGjgZDKZZE8jkYjUq5MFQHCDdH+TyST+XE5ODiYnJxOaCQYCAXR2diaApTwrlZWVAlyYzWapq+a1ssTLbrejsLAQTqcz4bnppBAD648ToP5/MQe1vb1dgoDPfe5ziEaj2LZtGz772c8iNTVVsjKPP/44ysvLYRgGXnrpJUQiEezatQt//vOf8corr6Cvrw8PPvhgghNFYS8tLcUvfvELTExMCH2KqfXe3l60tLRg3759qKmpQV9fnwjt4OAg/H4/QqEQJicnYbfbhQpJ1AKYcjrpkFBQmWqnYuQBGR8fF2oKhV8Hg6OjowiFQujv75eCbAokHS/twEejUcmGcr80wqabHXBP6SQCUzMVebg1FYGGQLcJ597yQAMQVI374vf70d3dLUGyxWKRLmS8xuLi4gQFzvs73UXjppFV/V5cbJDCxXtobGxMeL3OYCS/BzDV0OBU62QOPBVGclB9uovXk5mZmZCt179nB8eRkRG0tbXBZrPJAGw2/eLgZx1U8fnr5kfhcBhDQ0MiY1TmrEVjAEqUnSg35Yqd9njf/D+dX9a4Op1OuFwuOVs8L5oKT2CHz5vXrRt3EMWjUaHD4/P5cP/99+Ouu+6S91i3bp28f3LADJz+bE86JqwFKSgoSKCYaqMGTFGyg8EgWltbceTIEYyMjIjOmTlzJoLBIL74xS9icnISg4OD+MY3voF3330XPp8Pjz/+uDTwSEtLQ01NDX73u9+hpaUFvb29aGtrw8MPP/yx5IrXeTprujPB95huH6d7HZu3ZGRkICsrSzqh0okcGxuD2+0WxHdiYgIOh0MMNmWVVHXuKe+DYBn1aTIir+9FO+Isg6DcJNf0AZBML/U530M3+gAgiDnll6UfOujV7BPNstHXRHlnEGIymbB161Y8/PDDct6qqqpw6623YuHChSc4PtMtZpeB+LiNAwcO4J133kF1dTXq6urgcDikji8nJwfj4+Po7+/HwoUL8corr6CpqQlNTU0IBALSIIb3oh00vce66Q8X742BNQMfwzBk5vb/1rr00ktRVVWFm2++GS+88AJuueUWfP3rX8f3vvc93HLLLbjtttsS6KnRaBQbNmwQnTk0NIT6+nq8+OKLmJycxLx582CxWHD8+HFcccUVomvLy8sxOTmJjRs3oqmpCRMTEygqKkI4HJYxGr29vcjMzIRhGNIllCBxSkoK5syZg8WLF2P16tWoqqoS3aYBJALezO7RwdVO6MTEBILBoHRIJvBIoJp2UzvfaWlpyMvLS+i/wdeZzVNNEXXdqMlkEv3v9/sTOqDqyQEMDGkfNDOBNZyUfV1qRJkCpsaA8HppkzSAxPuZnIx3b+3r6xP2HP0b2kpmtfkc2AyzsLBQdEVjY6PMBmWPhFgsllBOEYnER9KwyRUBGl3jbjabkZeXJ/aBssb7Pp31q1/9Suwq77O4uBi9vb146KGHJDC2WCwoKipCWVkZ9u/fD5vNhquuugrPPPMMQqEQNm3ahM2bN0uNv9VqFfAdiI+u+fnPf47GxkbU19dLQMmv2dnZAq5Eo1HU1taKXmRQyGdJMEAzYIApP4/gQ3Z2tmT8tQyTvs5FH4SZ3v7+fvT396Orq0uaL/F9Gezl5eXB5XLB4XCI3mEQSl3PZzE6OgqfzycN+ng2ee7IzuRnUL+bTPF+ApQpPiMGy5RRAOL7cw801ZwTOKLRqZFR3A+eF/p9uqSKY2FYrwvEdezAwIDYUS3j8+bNSwBC9+3bJ5/J+vPh4WFkZWWhqKhIAGINOHJNx/Y7nfVPPwe1s7MTr7zyCr797W9jw4YNePnll+XAPf/881ixYgU+/PBD3HPPPXjkkUcwODiISCSCo0eP4sYbb8SmTZswMjKCV199FQsXLsTrr7+ODRs2ICMjA9/4xjdESFJTU7F+/Xrk5uZiYGAA9fX1MAwDzz77rNQiacHWjVq0odQOBAVR00UMw0ioyeCBjUajMp+PrZmj0Sh8Pp98hjbqdGA04gxA6Jxer3daZ0pnxSh82rlhUK6zCjx4LpcL/f39QuHhvtFx4EgTonN0voh809Dq7mwUeADo7e1NQMwWLFiQ4LifrkOSjM5Pd2CSF8fl6CAWABYsWCDP0mazSU2uznJqpBY4eQDN11Iukhdlh9d+svegU5C8iOQeOnQIl156qSDk7CbM6wbitcSZmZlobm5GcXExxsfH4XA4EAgEsGzZMhw4cACZmZkyJw6YkgWilLFYTAwuaUE0fpOTkwgGgwlKm3/PIBSAoJp6JqpGJJMz1Sz45zBwUq3C4TAGBwdlXxlE82+Bqew9nxfPKY2I3+/HrbfeiuzsbDz44IO45pprkJ6ejvfffx8HDhwQZJh7zXuYTjY1cmg2m1FSUgKr1YpAIIALLrgA6enpeOONN05oXNTW1obx8XEZqZGXl4evfe1r2LNnD/72t7+hq6sLHR0dCIfD2LBhA/x+P6LRKG6++WZUVlbipZdewpNPPimAEdFq7gnP7AMPPIB77713WhnjYrMv7tdHnT/tVCWfI/3zk+0Zf065jUTinTv5bAlw8Xna7XahyVksFuzZs0c6cAOQv2M2g1TAiYkJyc5w1iSd05GREYyMjAhIxyYUzNzps0fHmgEw75H0OW2M6Yh4PB6hWjGI5X3ToWNNFYMAOg86+6qpkhrFpw7x+/244447AMTnN27YsAFLlizBmWeeCbPZjL179+Lw4cMnBexKSkrwzDPPYHh4GDt27BCd6vP58Pbbb+N73/seAoEAMjMzsWzZMixfvhxjY2NobW3Feeedhw8//FAAKq3PaGN0B1YtG9QRlA+dESMoWlRUhJycHHz5y18Wmb7ttttgGAYef/zxU8roydbs2bMxc+ZMCRIHBwelWch//ud/orGxEWazGfn5+TCbzSgsLMScOXMAQGj0hmGgvLwcP/jBDzB//nwUFhZi3bp1+N3vfgev14uXXnoJX/jCF1BXV4cnnnhC7psBXXZ2Nnw+H5xOJxobG8WOEnxm4EJgxmQyIT8/P8HG6eBDAx+UYf1avgfBzJ6eHtjt9gTatZYrq9WKgoICRKNRYWDRnuiyC9qHyclJaUTDmj8gsZyBTSlNJpP4F319fQDijjj9B2ZfmQmNxWLipJMqTJ+CGS6Ox+Bi9pWBaSwWQ3NzM1JSUpCXlyfZXsMwpB4wEokIMEWn3GazSQlBc3OzgGrMLpvNZhQXF4t/VVdXB6vVKhRpvff0nygLunSG53y683myxXE43Bun0ynsLj4HYCqJUVNTg8bGRixcuBB5eXl48MEHsWPHDlx99dV4++23UV5eLg0WWUvNvQyFQvjOd74jwITL5UIwGBT/fGxsDOnp6ZJ9HBoagtlslmZz9M9YusZ53QxkuShbDQ0NIodkpTidTtF/3E/qbN6r2WyWeZ9kJ2jgkqPFCHqQ8s3gORgMIhQKyev1+TAMQ7ofa71M/9fpdGJgYCDhfDJJRH+HAKTH45H7npyclCwp/WPai6GhIdGjpOESpGcAz9Xe3i6Ufn39zFzznDLmMJvjNePUKyyvOnz4sDTWJJMuGo1i+fLlCSWF9fX10odF6xkNmHyc9U9P8f3mN7+J5cuXY9u2bfjqV78qzsQnPvEJ1NXVwePxoLOzE++//z7uvPNO6U6WkpKCt956C2vWrEFnZyfuu+8+/OIXv0BHRwcMw0B3dzduuOEGhEIhmad35ZVXilPAxkp1dXUIBAJCb6UzTMOolYvOgAJTTiqbE9EhIppvNpsTZnCxeySRZvLhNVLOz9JOoMfjkTlLVNCam896TrPZLMgznVcGEVTmFCptzKgIWAdAgaazxjmqpPEwgE9Gl5hR1YGBdlSIPrKj3MKFC0UONPqis1onWzoIOt1lt9vR39+fYBRSU1PxwQcfoKCgALFYLKHeRxtok8kkFN9TLY2Onuz3H3VvJwuIaEw9Hg9cLpco2OzsbKSmpmJwcDDh3mKxGFwul1DmKNejo6Po7e1NkPH+/n7J3NMR0eAMnU5NodbPVnd9ZFMvUqfpdBuGgaKiIjgcDvk8ynNKSgp6e3vFSbXZbMjOzk6gU1JpMsOvAwBt4HntQOL4DO7t0NAQ3n33Xezfvx/nnXce8vPzcdZZZ0m9eXLWc7rnxSDr4osvhs1mw7FjxzB79mz4fD7s2bMHZ555JmbPno0jR46I0fL7/bjqqqvQ1dWFiooKHDt2DH19fdi9ezfq6+ulzoNDu2noY7EYdu/ejddffx2NjY0JwIlG6KlbaHSamppOKq/btm3DGWeckRCQaZ0w3dL7cbLX0Cif7AwkU5lo3AwjXtfl8/mkaY+Wt/HxcRQUFKC9vV1mnfKe6TAQGWawSLTYMAxxovheRJN1dpX3xuvnM9a6lxl+OizaYe7v78f4+LjMeWVdMfeEYCUBP+1cc+8pK9TRWmdrJ4gsFQa3W7duxe7duzF79mxkZmYiNzcXVVVVCIfDcLvdMqid5/WFF15AbW0tGhoaJECIRqNCK//GN74hYytIH2P2atasWTh48KD0N+AeE9Wn7tdZ4eSlbQadKIvFgvLycpSVlcFut8NqteLMM8+UzveGYWDv3r0nyPHJfAuurVu34vvf/34CoGQ2m7Fp0yasW7cO+/btE13FkpMVK1YI9W327Nl4/fXX8f3vfx+bNm3CLbfcgr/97W8A4vS+hx9+GDfeeCNKSkrwk5/8BO+99x6GhoYQCATEgS0qKkJnZ6f0Bujp6ZH70vYYgFC9TaZ4YxTupXaaGZTyfmKxWAL9kjqYuoDZWb6vrtNjtpFOcDgcJdhRwwAAIABJREFUFvou5zkCSMgU8T0JtJAuDEBkifapsLAQPT09yMrKEnovx3Fo2afOow0AIIEA6ZqsMyQVmntHfcOv4+PjCAQC4qvwuhhkmkxxtlswGBTdwVnwDMhYJ8wznZKSAqfTCY/HI/ve0dGBkZGRhGwiz+fVV18t4wup5+h/MlFBnROLxT5yzMyLL74o4IDJZMJDDz2EkpISPP/883C73QJEmM1mVFRUID09HS6XC2azGaFQCO3t7XjggQewatUq/OY3v0FGRgbuu+8+7Nu3D4sWLcJ3v/td2Ru/3y8Zv0AgIJlHt9uNcDg+kSI/Px+BQEA6wupeJACkF0tmZiYKCwulQ2xZWZk0vkvWhQSkyYTS9f5s+kSZc7vdwkSIRqemSjDRRDmk38RMIjAF5lL+GQiSBUNbwmfEZTbHG25lZ2dLHXJaWho6OjpkwgHPJhtJRaNR6R7MM8YgmZ/Na9YBpQbjqUsZiBqGgZ6eHmkaRXlmgM/Ygswz9l7wer2SrIlEIqipqZF604mJCWRlZYlOX7RokVzD4OAgamtrhVnDFQgEpIaXANXHofj+02dQmW3cvXs3tm3bBq/Xi1AohL1792LNmjXIz8+Hw+FASUkJbDYbbrvtNpSWlsLr9eL6669HZ2cnzj//fPT19eGOO+6QAKS2thY7d+7EVVddhauuugrf//734XQ68eqrr+Lss8+G1WrFrl27JJ2t6yupvOmAcNEIM4jTjpimnNAJ5oPm4WNAx4MXiUQE9Z+cnJrDRWNBZURHhIgrnTk6CESmTCYTSkpKxOjrLCBpWwwOdWaV6LndbpfMAz+DhgaAoPzaMABTMwFpEGlgdcaMtXR0DlauXJmwrzrAOFUA9z/JuGpZA5Bg1KLRKJYsWSJDroGpDm/68/RnnSqATna+k5fOhuv3TH598vccCn38+HFccMEFothZqzQ+Po6cnBz4/f4ExIuKsq+vDz09PViwYAF8Ph+KiorQ0NAAm82G9vb2hOfFzzeZTGK8w+Gw1Asl14rSIeKzJ0rJbOjAwIAoeo7tMZmmur+lp6eLM2AYxgkzgUn/pHNEJc6sG6+R8k76vclkEpnXz8dsNmNgYAATExNYt24dYrEYzj//fFxyySVYtmwZ9u/fj4MHD36kHM6ePRt//etfYTKZkJ6ejhtuuAHnnHMOVqxYgb///e9YsGABysrKUFVVhZaWFuzYsQN79uzBiy++KBk4dtJmwKQpa9pZ4vOcLvNM40TklKg1M6unWtRN1AHJsq2/p+xOlz3V18K/O1mWvL29XeqwdPDFBgys9zrzzDMxMDAgMmO1WlFYWCioMfURz62miBJM8/v9konVoAuzfjoYpeOjM8X8Sid65syZiMXinXY5IsswDNTX1yd0q6Zd8Xg8QmXs7+9P6CnAc0saJAe709HVjAhSm5nV1bV0fK9wOIwf/ehHAOINU771rW8Jbe+ss85CdXU1Dh48KAPg9bgorbOWLl2KAwcOwOv1oru7G8PDw5JhYJfUmTNnIhQKobOzU+p6ub86C8zrp0OUzBJikOpwOODxeODxeGSsyaWXXiqgIQOXr3zlK/jlL3/5kXLN9ec//1kCbMq5xWLB448/jttvv10+PxqNYtmyZcjMzMRXvvIVbN68GU8++SQee+wxvPTSS/j1r3+N48ePCyB411134YEHHoDZbMaKFSuwZMkSPPXUUzh69CgmJiYwPDwsneJLSkrQ0tIiHU/5eQQLtRxaLBaZ/8vsKjOCw8PDkkWkPDNDF41GEzJL1B2sxc7Ozk4A+lJS4nO9qSP1CD0COPQ3urq6RPdQv4bDYRkhR1CfZ8TlciUwwCYnJ6UsRdsYvh+fDfU9zwUzR/Sb3G43otGoZCpTU1NljEYoFBLbwNGDPOtsPpeamiqN00KhkHS7Z/aUcky9EY1GpTadgbYO8Ht6esRH0iCCw+HAJz/5yYTny2tLT09HX1+fzF+lvvoo0Pr3v/+92Dur1YoLLrgAn/rUp3DOOeeIvLA+cOXKlQLwcJ83btwIp9OJnJwc3H333RgeHsanPvUpWK1WdHR04P7775fAMBAIIC0tDf39/QgGgygsLMTQ0BCKiooQjUalbOuNN97AxRdfjKamJplQQd+Pdpyslfr6ephM8ZEvtNW8b37NzMyUZ6kBdQaRfX19CZRzltWkpqZKRptZ1NbWVjljuptyMBgUHaxp53l5eQld4CnnOTk5krhhF12yE9lThgEigVBdypCZmSnMHPY2IBDCWIPABztMMxNK3yU1NVUo/TyHgUAAubm5ACCfSxvG88umYTyDLKfjeWtpaUFaWhoGBgYSaONz5swRn2p4eBjHjx+XkkeeXS6Hw5EAgp0uE4Drnz5AJd/7uuuuw6ZNm9Dd3S11kh0dHXjnnXdgGAYWLFiA+++/X1B0s9mMCy+8EG+++abMohocHMS3vvUt/OxnP8O///u/izAFAgHceOONsNvt8Hq9aG9vF2PLwITGgIEqaQgmk0mQSQDiWPKhk14JTGVrKPz8e41cUrEDkOwoZ0DR0NBBIurOjmcU2uS6EVIYKfQUZo3+mM1mEUL9GaQX8H353prOSwQHQMJ8VQZ8pA/w83gI2P2su7tbDnFqaqoMC9YCfTpZU+DjN1HSKxaLYfHixaiurk747JSUFBw4cADLli1DJBIf7cDM83Sfdapr/ajAOfm+9Xsmf08QhM7j5OQkLrroIqkFYWdXtuSnM6fnvtFpMZvNcLvdqKmpwdy5czExMYFVq1ahublZ6Dds+a9rlHjNdIQIsOism8vlkgwVHWkGnxromTFjBgYHB9Hd3S33yAHVRB/1/vIZZGZmIjs7G263G93d3Ql0dlLc2H1YU4ToODCQ0ftJAIZsh+3bt2PLli246KKLcOGFF2Lx4sXYv38/qqurE2iI3A+Xy4Xt27ejqakJNTU1Up+xd+9eLFy4ENnZ2cjLy8OCBQtkPtuSJUvw4YcfYmBgQJwf1mLz2RFRpXOvQSadQdGZU2YcYrGYZK4jkQjWrl2L9vb2k45GSl66XEHf63QyPJ0holxMl3nWwEIwGMScOXOkdnm6bGpWVhY6OztlXiF1SUFBATo6OkQWdHaHn6FHVgCQQJ3ZaGb/abyTFzOydEyZxZo9e7bQQlkGQaeI/3jPzLyQEcQsAuvzCPoAU83ASOkiKMFggoFDS0uLUMkZsFOWAQiCzSzC+vXrEYvFG9R8/etfxxlnnIH58+eju7sbzz77rMzZi8VicnYsFgvWrFmDs846Sz67o6NDshd0oJYtWwaXy4UjR46grq5ORqXwfvhMqUOTgxLKtsViQWFhIWbOnAmbzSYzJ8nqodNEoLe/vx+LFy/GwYMHT0umA4GAPFPqtDlz5uDOO++U6zGb47WBHo8Hn//85/HUU0/h5ptvxqxZs7B+/Xp4vV7ce++9OHr0KF544QX4/f4Eml0kEh9szz0FgPz8fPj9fhQWFgr9kvLZ1NQkz16DElrXz507V+x/KBRCU1OT3AODSpfLJdkeOtf6GXF0hckUp9pmZ2eLYx+LxcRhPXLkCOx2O/x+v7DB6FRz7+hb0BmmzWCWir5NWloaJiYmpEFRXl4egDilvK+vT4BBlm+wbwaz9yyzoa7Q1FrqTGakuBfj4+NwOp3SMwFAQgaXwSx9F44D5Cg+Agc8k2wAxf3nHHfadmaAmeWjjEciEaxatUrKrwCcMIc9Go0KQMdnRaD8ZOvJJ58U/47BzK5du3D22WcnvG8wGMS+ffuwa9cuvP/++3INZrMZHo8H5eXl0oiOz7WiokKYS7FYTBiIbJxXVlaG0dFRFBUVIRKJN2MjKOZ0OrFjxw7RedqHdDgcUnfJIDwSiaCrqyuhWZUGYR0Oh9C+BwYG5P6ZsefZoK9BmaePynmc9Mkpt0VFRUhJSYHP5zvBrlJ2+/v7RQ8zO+p2u0UO9ezsGTNmJMzRpT/DpBP9cSa0hoaGhA1FsJkgCuUrGo1T2cnyA5BAD6dd8/l8SElJEXYHy57o8xOo5Agk6uK8vDzxGbq6uhCLxSQwZZCZl5cnPr1hGGhsbBR/S+tPDSpHIhEBkNgN++Osf3qKb1VVFdLT0/Hwww9LMLdu3Tq0t7ejs7MTGzZswPbt27F7924sXrwYDQ0NWLt2LQBgz549uOGGG3DkyBFcfvnlMmOqrq4Oe/fuxaJFi+ByuSRryILqZ555Royq3+8XwdMZCB4KdpJj2n18fFwK6nVLbDo+RJBoCLxeb8IgXzqBRN5yc3MxNDQkHP5QKCToHik2wBQNhplRojoMGFhg7nQ6AUCQFzZHCAaDQtthAybSZiiAdB5IE6ITxs/W98ZAgIqVTjwVO++VCikcDsPhcCA7OxsLFy5MyFLozMt0Szs7Ojj/nywaWgZxfF+2v2dmhntN40EH8YwzzjjtQHq6RQX5Ue+h9ycWi0ntEgMul8slgU92drYo0czMTKk11U4R0UxmMohGa6eCaJ8OyPSzoUOklRObMtlsNgSDQQBT7dEtlnhTLH42Z7SS4sJ6npqamoTZWvyqa1tJNXM4HHC5XDKPl3saiUTEedE0Scovg5/kIIrfM1Dq6urCG2+8gVgshrPPPhtLlizBokWLEA6HpbspX3fw4EHU19dLYM5MxP79+3HTTTfBZrPB5XIhIyMD5eXlsFgsWLlyJV577TX09/eL8db1XTSeWr51UK0pnzyH1JucJ0gkvKOjAy6XC/fffz/27NmTIF+k+Opzcbpn6qNed7LzTAdnYGAAubm50smczh2vA4jXVZIdwICRdWp2ux3d3d3iiHOum84w6z1j6QPZOgzieY0Ed1i/ykwOAGmOZzKZRH4HBweldMNms6G5uTlhLIG2H3xmo6OjGB4eRm5uLrKyspCVlSUOA69RO10EK3lPrLljNpifofW2lnfuKT9769atqKurk66je/fuhc/nE7ra2NgYbDYbnn/+eaxevRrBYFDALZvNhtmzZ6OiogKHDh1CUVEROjo6ZO5yZmYmGhoaEuQjGXSg7dAZ4czMTLhcLhQUFAi4y+fzpS99Cfn5+TIWi4HR+Pg48vPzpbPzqSi+f/jDHxCLxSSIsljiIy/+MUhe7L3VasWcOXMwPDyM6upqpKSk4KyzzhLqfzgcH8+1c+dOXHfddeKU7tq1S2xc2T/G/tCxHRoagsfjkZrn6upqLFy4EHV1dYhEItJLQwNNdG7pp/T19aGrqwuDg4MApuiGJlOceeFyueD1egXIA6ZAbYIb1OUsYTKZ4uNWQqEQ+vr6pBs4/RG73Y7c3Fzk5OTA4XDA7XbDZrMJlZB2hn6DyWQSBphuDEPgNC8vDyaTSZhfw8PDMJvN0s9A+070Jxgocm9od0KhkJxd7oM+d5Qf+iB8f17fyMgIQqGQNH6hbaTPwwaPzF4z86zH+jDDyMBS66vLLrtMHHy9fD6fPJtoNIqmpibk5+cn3D+Aabv4bt++XfbbarVi1qxZMkqO+j8vLw8TExOYP38+3nvvvQQqq2EYuOeeeyQwjcVi6OrqSmAWaSBJs5Py8vLQ398vvsLAwID0htCNNDmhgc+QtoiZSv6Mz4RBLFk/BGjYU0B3Qeaz4fNlFpp9M+jLjY+Po7e3V85bNBqV7HVZWRnMZjOys7ORnZ2NrKwsuFyuhA7kurkWF+WFFGH62wwiOc+Wr9HN/JIBOg3wsG6bsmUyxetO+Tzoj/FayDwgjZw+MIFXllaRsk7mA+WSmfRwOIz29nZYLPGu1tSrZrMZZWVlIvNWq1VKH7knvKdoNAqv1yt7pH02ys+/DMXXMAw8/fTTqK2tlWHYDz74IH7605+KQ7phwwbcddddeOSRR9Da2gqHw4E///nPuO+++3DffffhZz/7GQzDwCuvvIJrrrkGf/jDH+B0OjE6OioI8AsvvICxsTHU1taio6MDBQUFCAQC6OnpEWeO6A1RfR4OjvGggHHpLBLRcz0uhQ9aUyZTUlKkxTbR/MbGRqFa0ZhT4dLBYNEyAwKN/JAiqVENCjQ/lwhaS0uLvLdG5imELpdLkC1tgFg8TSeK8wQ15520GyrhcDgsWTRmxLOysrBixYoTMisftfi+/P//JDjV+1NcXCwoFd+blA06uhq9+qgAmosyo7OPetGgnM596AwQnTt2RbTb7TLE3W63Y3x8HKFQCB6PB+Pj4/D7/SguLgYAoSLGYjHJ5pjNZpkZzG58c+fORUNDA/Ly8qR2h/cUDocTqLlavrKysjA6OiqZCtbpmc3xMQ2U9dLSUgCQLC0RNxocLYfJ+8LvWReTl5eHoqIiWCwWaWLBs0BKHGlsdIgzMjLQ0NAgaCqvi/LLfSZt/q233sLf//53XHjhhVi5ciUWLVqE5cuXY9++faitrcXk5CT6+vpkDhw/h3SjefPmoaenB9XV1QkUyKGhITz66KP44Q9/iMbGRnR2dibQjahXNDhCeQEgQRR1ADBl0Di+aXx8HDNnzkRlZSU+/elPAwDuuOOOj2wwo52Ck8k7A66Tye50WVheP52IFStWSKt9Op10KHTW0zDi3WaHh4eFMjs4OAiTyYSioiKRF3YxZEmFpurzmplVZJ0RQUEdaHZ2dkowy78nWMBsE8dSmc1m2O12cUyol5OboyXrDjqrHo8Hubm5kq1g5ofPfHR0FKOjo1KzxnEVzL5qUIP7rnU6gx2eCZ75H/7whwAgQebY2BgaGhqwdOlS9Pf3Y86cOdi2bRuCwaDsTyQSQVtbm3yuw+GAz+dDe3u76KPPfOYzOHr0KI4ePSp7p0FMXgufmdPpxIwZMyQ7pYNai8WChQsXYnx8XIA4gk8OhwPd3d1ISUnBJZdcgo8C4GkzTSYTnn76aXz9619He3s7PB6P7BnBa4LRQ0NDuP766yXYGR0dxZlnnolzzz0Xl112mTBBSMddvHgx3n77bVgsFhQUFIjDSvrkyMgIPB4PtmzZIplqXhuDH+4tAybaWsqDbghD/cY616ysrITsOeWeQAXBEAYBlA8GkgSkCQI0NzcDgGSQdEkHu/xSVjXbxTCMhC7w/B2zotQZOvjk9Vgs8Y7uyQwv2uHkWliTySQdvxnsMBvqdDol+KDfxP2gDWS2NhaL1+6SSUCGG6+H900dzgZP9Amj0SjWrl2boI+nO/8alGYH19PxKZg5jMViEpz6/X6RDZvNhiVLluD48eOSdderuroaP/nJTyTRAQCLFy/Geeedh5qaGhw5cgQtLS3SRIrXy3FIBDoGBgbQ2dkpcqtllY3ouAesUdVJBTKD+D3r3vlcCL5otmEyI4O/5zll1tBkMqGzs1Mo4ZRL2qq6ujq5BoIsZGQxY0v70d/fL3RZ/QwzMjIwPDwsfsy8efMQCAQSgj/6WNQrPBf0K4GpMYBkGJAtwLrQ1NRUGSvDZnu8DyYaKEsabKVtZa8EPjfKCSeBAHHQleNjCgsL5SxPTk6ip6cHw8PDCb4v9bfD4cD5558vzfcYS7S1taGyslIYAR9n/VOPmfnTn/6E1157DUNDQ+ju7sZ9992HBx98UJTDunXrYDKZ8IMf/ADj4+Po6enB7bffjmg0ivXr12PDhg2w2+146KGHEA6H8bvf/Q7j4+MIBoO45557MHfuXBiGge3bt2Pnzp0YGBgQw87DczKHhrTWQCAg9CU2gNFBJAcPe73eBKSBQR4AQUJJBeBn0MFgFz8aCir4sn8MNmYhOBEWXU/DrxQMBqxEIXnISV3mPfNwl5aWoqSkBCUlJQlIFWkXWjHQeDALx8/UCguA0NPq6+uRkZEhWWd2O/s46+O+/mSL+0AlqlFXOtxEBukw8/WnG1DzfZn9SF7cH+Cjs1C6QJ7ZW459IJWE7cTD4TC8Xq9kxtlASQe5DEjHx8cxMDAgwVl9fT2sViuCwSAKCgpgMpkEaNHBhq6L4x5QeekOlDQORP11Zmh4eFicDwbNR44cSaDvaEdVv5/+eW9vrzhfZWVlkrHlGaJs0knh/DbWgZAiTWNGQ8vnTdAqEolgy5Yt+NnPfiZ0zcWLF2PNmjViWFgiwPNCBHHnzp1ITU1FbW0turu70dLSIpRVh8OBM844Q/aAdNOMjAw5OzqrSGdSyy6RT9aj0TiS/kb66mc/+9kTZO9ki3twKrqZdoCnW3ovk9/bbDbLOB0+P2ZH2PyC92oymaRZFOW8s7NTwDyHw4EFCxbAYrGgrKwMbrdb0G3ujaYlxWIxmVdHx4iOsdlsFnnl3wBIeB31MgCZU2kymSRI4+v4VcsyzwH/AZDGfLFYvJEZeyxoupbWSQx+6TxQx3C/aa+0nPB1fC6jo6NCS+zt7UV+fj7S09ORk5OD/v5+XHbZZdi1a5dQ3VpbW9HY2Iienh65b9rTiooKAZfIbCgvL5ch8UAiWEGb4fV6UVJSglmzZsHpdMLhcEiTP4JWt99+u2RBYrE4FY1B1pEjR+Rs1tXVnVQO//jHP8q+GYaBt956C2lpaVLHx+xOamqq0K953aRLDg0NobKyUvbY6XRicHAQhw8fBhB39AYHB/HBBx+Ic8v646KiIgH6tI4EIMG2rmVjkzmtS/l7yjx1ISns1B2Dg4PyM/ofZLAQfHG5XOKnsCmd2WxOqJ+jLSGwwBIoZqXIOuJz0Rl7u90uWV273Q6Hw4G5c+cCmAoyCWJqH4LywQwdqY88O8x40p/SepFg8vj4uIAp3DuC+npR5/AeJycnhRZJu6gz+fr+UlNTpTSMgWw4HMaaNWvkuqZbPKN89nTi9fM8mU+xe/dukd/i4mJEo1G0tbXJ7w3DwKWXXora2loUFxefkE3u6uoS/1InLKLRKC6++GK8+uqrcLvdmDt3LmpraxP0FxBvjJOfny90W9pxPgNm8egH6jpEzQChPmEDLv6OMqSvmdlA+o3M/PM1lBP+jH75jBkzUFhYKOeae6+fNQCx236/X0B+vs/kZLyzrtfrFaYLnxttFhM4vDc9M5vZfz2iiaURfM6xWEwACgacBKfoixA44dmlDPDe+PmsE2VgyUZePAO0b4ODgwmNy6hP+N58f7/ff0JwyusqKSnBeeedl6BfeM8M+rnnH2f9U2dQZ8+eLd0FTSYTVq9ejZ6eHjidTgQCATz22GM499xzsWvXLpSVlaG+vh533nkn7r77bvzyl7/Ef/zHfyArKwvr16/HI488gvXr1+O///u/cffdd0tw5XA4hNrKOoTW1lbh0rMjLtF1PdaCdT66dolIEBFA0muocEgvAaayYEQxiHqYzfHOZsz2APGaFS2o0WhUnCYqBN1llz9nLQQRSo226Kyjw+EQGgEVGdFtKhVmj/k8gKlAimgNkXVek6YV6kyFnoGZlpaGL3zhC/j85z+P5557bloHdrrFg3C6rz/V++j3MAwDM2fOlMJ9/o7OKw9eRkaGNLviOlX2iHvP/5/qeoBTz0DT2e2+vj5YrVa4XC5RzuzOS5od52W5XC5B6djsoK+vL4GmFIvFm7yQntfX14fc3FyMjIygsrJSqKyGYYgSpwwHg0G5x4KCAkxMTCR0CM3JyQEAaVphMsUby0SjUQkwqOSPHj2acF0areO9a/ReZ7R4HXl5eULd6enpkaCT55fnlOMTeAY0nU5Tf3leTSaTdLYDgPvvvx/FxcW49tpr4XQ60dDQALPZLEEUa6ImJibw3HPPYfny5XJGOjo6MH/+fIyOjiIYDMpon6VLl2J4eBjd3d0JGX5eT7Js0MAwiOL78+dAPDNXUVEhDppusnLLLbdg48aNJ5VLLv35p7u0AeZ16/+Pj4+js7MTl112mTjDrHUbHh6WDqDUx6yB0w1PsrKy4Pf74ff7MWvWLPT392PevHkYHR1FW1ub1NFlZmYmNJ/RAS9BGv6cDABmZrU8sN0/O6+2t7fLc6BDzP4HlOGTyS/1AeXYZDLJZzJDWlRUBJPJJPWGOiNBEJCUZ2aLGIhwTxmU8DoBCF2Msk39ZrVapTFReno61q9fL45fX18f3nzzTXg8HhQUFGDr1q3o7u6Wjqac+9na2opYLCZBOwMzvQ88T2eeeaY4gdxXnlcGs5OTk6K3+CyGhobQ39+PzMxMcZjr6+uxYMGCk8ojM+QEeh999FFs3bpVar46OjqQl5eH3/72t/jVr351AniYnp4u41F6enpw9OhRAQKHhoZEvmKxmOiiwsJC9PX1oaCgQDKJBLcnJyel3nTVqlV48803Rd+xvIeyQGouAQ0CeAzIAAizi7qUjjiviQEsfRSOJ0lJSUFubq7oXTLLWFOq2V38R2aD7gqsHVGCQZR/MlQI9NLn0DXXmmnGn3E/DCNeg97f3y+MEd4ngXbaZGaAuUf0a8bGxpCVlYVYLCYdY/k3ZARpwJLALmWZ98eRYHqZzfF+DqtXr05g1dHeUR9qHaqp3IsWLZpWT0632BhuaGgIzc3NkmUzDANf+MIXsHv3bpnzyc8F4vTUjo4OLF26NEG2w+EwPvzwQyxevBherxc+nw91dXWYMWOGyCLrRznndXx8HO3t7fLeujkjmQTUp+y8r58HWSNOpxNDQ0MnsGW4D1lZWUJNJatRd2InkM4sI4NLNp3KyckRkF2P3uNoJcoXdSjPFu02dY6murpcLpFTMgrI6nG5XGhraxPmldVqFXtFGSf1lo2smARiZlSDTwSttN5jsolTOHi2otGolGgMDw9LvT77ExD043gs7hv3lzWs1BF+v198SO4T5fnKK688AXjm7zTYSTv0cdY/dYCanp4uQ6vXr1+PY8eO4ZFHHpGHm5qaimeffRazZs1CU1OTODXr169HcXExsrKy0NPTg/z8fJn5953vfAcTExO49957YbFYxLmg45OdnY3W1lZxUnTGjAaVn02ki623NY1BIzhEg6iogSk0icqWwldcXCxIMNszU+lQ2ZFKoZUdlQeNIQWLgs15UQDEOOjgigqXB3NgYCChplRnf/k6BrAURFITOF+JjicwNcuKe9Le3i5oumEYePvtt3HVVVclfMaplg4a/m8lNhJVAAAgAElEQVSXvn4uvffcYz531mXSGGuKyakWr5cG/GR/oxXDqX5PmaqtrcWll16KwcFBRKNR5OfniwKmw8amD0QdGQQxk1heXi40Qj4TjjnIysqSQIv1qYZhSMdOKmDtmDAI0KM9iAwyM0T5icXizRfogHLv6YTpvdBoqt5Hyql2/mmcAAjtlw4+ZVqfWQI3pE5SWfPZ6ywxf04U32QyJVAkS0tLpS6F+1tdXY2HHnoIixcvlhrQ4uJiVFRUYGhoCI2NjUIvpfFfvXo19u3bJ7Q6nnPt7NDJ555qyhr31+PxICsrC+np6WKscnJypNSBRu/222/HT3/604+U4+kcp1MBMMmyrGl+1FelpaUJSDnrkUlnYjaaZ06PwNBOuN1uR2dnZ8IznDVrFurr6+H1ekU/k1oYjUaliynlh04DwUneT2FhoRh/0hs9Ho9kIzMyMsSRamlpEdqhZt9wJWdwtFxp+Z6cnJQZ0aS+ApCMe3p6Ourq6uS9WXtutVoFODSZTAkMHJ1tJ5ihkW+dvTGMeBOajRs3wm63Y+3atfD5fEL3j0ajuO666/Doo4+KMzM2Nobs7GwEAgF0dXWhrq5O6nXz8/OxcOFCHDx4UGh0zI4SZCUFn9k6TcXs6OiQzpXMDlosFhw4cACBQADNzc1YuXIlNm/ePK38PvPMM9J7gUHsX//6V9Hp7Gq+atUq/PrXv05wlA3DwC233IIdO3bgww8/lOvjfpeUlKCxsVGufXh4GMFgEKWlpejr60NZWRkACCWSmW0GlwMDA9iyZUsCUJFM56esMZOuO3TzGqk/6bvwe52RomNKYIIjXrhYHsIu6YYRr/WuqKhI8Gf6+/tFFjWTgNfBv9NBqu7Iyv4ClP3h4WHpnms2mwW0DwQCSE1NlWvkVAGCcAzKeeY4a5ify0W/hpRmt9uNsbGxhPmSbK5HZ1w3TNPZWNbhaYr1ypUrZdyKlhud3eL5YjaXDauAuG7s6urCzJkzTxqg7t69W+RidHRU2HeUgWuuuQYffPABFi9efAKjZXJyEs3NzVi2bFkCkOD3+yVoffnll9HQ0IBwOCx9AMgy6u/vR2FhoVw/ZVDbZQDSlVyDb0xgsOO42+2WvihkAzDrrW0E907vKT+LVGPqOw3KUecMDAxILSzrnAn4sPswg9H+/n74/X5JXFEu9RmizGkKL30dzsml/tR+eHIgR9o05YGACBkjfA1ZHdQTbETG983Ozk6Ya0ofi+NoKNsEc+izUUeRKUHWBJ8XS0mS635jsRjy8/NRVVV1gi/L69TynJx0Od31Tx2gDg8PY+bMmXj22Wfx1FNP4frrr8cll1yCu+66S4SuoqICo6Oj4tBkZWWhtLQUAwMDcDqd0mQoIyMD3d3dyMvLkw21WCw4duyYNL/RQ+C5SH/wer0oKytDU1OTOC1sBx0IBAQZ0sXhuskFBVMb3by8vAROPOuuiLLoDEdtba04E3x/HTSTYpCSkoL8/HwASDjMVER0YnVwRyXINvV0ZpJpZzpQKCsrS6hNiUbjbey7urqkCQ+vgZ9DQ8j6U204aCjnzZsndUrJSyu6/43AVO/NdA51eXk5jh8/LtkHraQYcOsB07zH5Guj06cV0akCWv5OB/R68e9JRbnkkksQCoWk0VQgEJCxMhzhkpubKw22bDYb3G63ZBSpiLja2trE8WPN0RlnnIGJiQnJyFosFsyYMUPq8jhnjYEsA6Oenh6Rdypw1oh5vV7Y7XZpypCZmQmn04m0tDQcPHgwoUaCBoT7w3/JGajkPaSTTwc/NzdXHHwGqrFYfGA7A5fBwUGp2yJNVme/9OfxjNFRiMXiHU9HR0eRn5+P7u5uaed+00034YorrsC2bdsEJEiu4Vu0aBFqa2vh9XolQ1BRUQGfzyfZFp3BpBPHe6WcUV4tFos0SyHFiRnTRx55BP39/SLf7H64ePHik8olg7Lpzt/pgkvcLz7bycl4U5mxsTHMnz8fkUhE9G5+fj4GBwcxPDwMt9stz4eyUltbi4yMDLS0tGDu3LkSMFF3+v1+GWvB2sBgMAiXy5WQbWRdEZtIcD9jsZjMY6T86mwUn0FHRwdisalxGl1dXeJ0Tpcl1f+AROBFy7Kuk6atsVqtyM7OlrEWw8PD6OzsPMEJYhaBwROBWGaR+Dr+03RD7gvfkyDp2NgYXnzxRflZSkoKjh07hpqaGpSUlOCSSy5BSUkJ3nrrLfz0pz/FgQMH0NPTI+fwk5/8pDxPPmuWHESjUQFrta7lc2DWpaOjA62trQlAALNdvb29CIVC2LFjxwkAABBvKsNgrqSkBCaTCTU1NdLl1WKx4Oqrr8auXbvEvvOZGYaBDz/8EE888USCflmyZAmee+453HHHHTAMAwUFBdJLIhwOS4aLDVLYqZyZ70gkArvdjr6+PqlNpQxQ17Juf2RkRPwD1kbyuel9AiAZdTrrZrM5obEi2V8cZccAgGdnYGBA2GDa4abDyky33W5HRUWFjMEg28Uw4qM/2CySPweAGTNmCJhE4IF2Us9Spb5iAJCSkiK9K3gtzKKSeRaNRgW4oa/ArsDa3rFZD+WfCRHaaPoutBH0+dhHgbaP54TZJJ3J5n1pW56cHXY4HJJp5Jkie4BBtF67du1K+Hs9e9kwDFRVVWHHjh1YsGCB+JsahN+3bx+WL1+eEHBEIhH09fVh3rx5iEajmDt3royG0XptdHQUNptN9DMB7Ly8PHR1dSEtLQ3p6emora2V88s90HacNqqjowOFhYWi8wmmZWZmis2mbmQnZXaF5b1nZmYKk4nyzywtZUM/D63TJicn0dLSAsOI9zNgsy/WzY6Pj8uZ41nTgDhlmMxKn88ngBu74VLvskSHtcq0p7ymrKwsSTLwLJLZmTzvl5lVzY5xOBwJDDbeH2WT2WbufSAQQGZmpug5p9Mpvj9fyyZp2lZFo1F87nOfS9hTvXie6b/pmvJ/mQxqNBpFT08PmpqasGzZMmzatEl+fu+992JkZASlpaVyiH/1q1/BMAy0trbC6/XC6XSipaUFBQUFSE9Pl3lzt912G0ZGRjBz5kxYrVZcc801EqA6HA709PSIY8qAj0jdsWPHRFEye0rlHQ6HkZOTI0EqFQKNCw2IRrP7+vrkIU5OTgrCQ94+0Sci0hQk1tTxcBJ14WEAkCC8dDy5Z8AUImQYhtwTFTKFn91Xi4qKBKWlseCh4/fsWkxDQfScWSoeEu1s6a9U4PPnz0ddXV0C7UQr15Nlbz7umi5rmrw0apr82RxEzQyMdlaTl76X6VayAtBK61ROPzO6NOgFBQXSIKGgoAD9/f0YHh6Gx+OR4IadcdlRt6enB5WVlZI91dRQZlzN5ngn3Tlz5sBqtaKxsREVFRUYGRmR+Vw8h3rwuGYOsFU5nTCz2SxBwuDgYMLMSGZIuOeUY1KvklFJGj+9T8lgAB1utpLPzs5GeXk5xsbGJEAm4MBud0Rk+X563qhGafkzGlNS4VwuFzwejzjRzz33HB599FHJ4jU1NSW0lrfZbDh48KBk53i/2dnZuPTSS7F///6EJjPcWxp9yg2NEs9tQUGBdHdlp8EvfvGL6OjoEH3EgJlNnE61NB0uefGMf1SgqksMTKZ40w120aUDyQxZJBJvcc92/C6XC+FwGH6/XwKL1NRUacQSi8Ukc84Mf0dHB8r+0a2xoqJCRpGxflUzAOgAUPZpB5xOp2Q36KDMnDlTSihsNptkqWtqaqS+iDpD63RtrHWwqsE8fc41KyAWiyVkVDMzM1FZWQnDMNDe3i76mUEoQSPaEI7E4L1RfyWj3DooLC0tRUtLi8wVpK7X7KH09HS8/vrrsm+PPvoo7r33Xrz88svweDxYtGiRMApoW7Qca2eUNjSZ4g8AR44ckYwr9RrPLj+bzzB5GUacNud2u2G329Hc3CzU2kgkgssvvxw7d+5EaWmpBC9cH3zwAebMmZOwR2QtLVmyRO5l7969Mm+c3V+tVqs4gHR4ta02jHi5BIFhLtapkvGgR3AQGJyYiM9V1ZkK6kZmkeiDMGDRcsZ9djgcsNvt6OnpgdVqhdfrlQ7YGRkZMj2gqalJmsKw5hCIZ054jwyqNHOL9pa6lkAL7ZRhGNI/gOefY2Q004uTF0jzJFuImWxeLzNIABKyfAQMmTHTgRptJa+Hn6mzn4FAIEFf8H4/97nPJVB1eWZ1gDjd0kwQnkmWBXDxPXfs2JGQMNDlVKSrNjc3o6SkRBrtUBaGhoZw5MgRnH322Qk+idlsxt69e7F8+XK5RgbMtNlFRUUIBoPwer2wWq04dOgQcnJyhPVSXl6OgYEBpKWl4fjx4wKCkpJOueN10jewWq3CdiCwAEAytwyMdTKEGXeCqrx/zkfmc4pEIsjNzUUsFhMmDUcyMTjkZ3J/fT6fPFMyjWgzKSsEsin7HIFHO8Zu8jyjnZ2dyMnJSQA63G636Hyz2SzJAw0IMGZgRpK+NX0g6gGed4JYrJemf6/9Vj2lw+PxwGQySXDNM8QJHslBpclkwlVXXXUCAJO8CHBRt9hsNgQCAXi93tMGsOW9Ptar/x+ugwcPihJZs2YNnnnmGRw/fhwbN26UZhH33HMPHA4H7rzzTnz1q19FX18fHnvsMfT19YkDum7dOjz88MNoaGjAN7/5TTz22GO44oorEIlE8Prrr8Mw4q3Cx8fH4Xa70dXVlTAnj8aLB5qbzoyrDnAcDgfS0tIkg8XMEQ9DOByWA0WjSoScRogUFSLeRDmZAaBAA1OKkygVa4B0pic5O2gYhtTesU6GNCB24KNTzsDdMAxBQdkQQ9ONeACi0agIeyQSkSCV1Cn+40Hi9XBPjx07hvnz5yc4a9rQJge1p7uSjYM+cKdasVgMFRUVOH78uLwHA//GxkZkZ2cL8qpnyp4sQ6r3ST8T7Rgm3+d078Nr4RiAjIwM5ObmijPhcrkSAtVgMCgZSipkzjikwebnkCpP2qc+B62trSgsLITb7UYoFEJHRwdmzZqFiYkJeL1etLa2IiUlBW63G729vRKoms1TNRwc/5KamiqOBQMsIo+6GUVWVhYWLVqEPXv2CJpPmUkOXnVXbL1/OqDlzwkOcf5bOByW2j4afaLENDgABBjSFB8+N6LdlBE9p/Wzn/0snnvuOXz5y1+WexwbG0NeXh5qa2uxY8cOGc3BbrQMrMjOqKqqQl5eHrZt2yZyQF0CTFHXnE4nsrKyUFxcLNlfYKqswGKZ6oLK58DxLHRMplsfdV40APVRS6OxExMTOHz4MJYuXSrOgs1mkxpq1lNzoDqR5YyMDASDQZnPqAEd0p1aWlowf/58eDwedHZ2YsaMGfI8HQ4H2trakJqamsAAIPJvGFM0dzpX3GuLZaqLZ3d3t8i0zWbD8ePHpRkTHa3x8XGkp6dLVo3PhbKjM3J6jzTQqdkEdPhY29Te3o7y8nLk5+cjHA4nUP60Q8uGGKT+JrNkdBZW6+D6+nrMnTsXIyMjAmQASOjGSXvC+/zNb36DvLw8XHHFFQiFQmhra8OcOXMwc+ZMjIyMJAA+OvvPTLV2ZLmYyWXgTXvIHgy6eRRBN673339fQLScnBw0NjaKzQoEAvja176GXbt2JdTs8d4OHjyIqqoqCSq5VzU1NVi4cCFqa2sRDodRXV2N8vJyjI+Po6+vD4WFhUIj1eCHHlWWnZ2Njo4OCXh0QKYXsxmcWej3+0Uvud1u+Hw+2Q/uATOXtL2a/se/pWyRsUFghQFnMBhEXV2dBAk6q8/rpD/BBk/aXrI2j34Knffu7m5hQpCCyMwmnzv3RIMogUAggU1DX4wjQdgJmSAHg35moBjgpKSkCA2YI43a2tqQmZkJu90ubAz6TOzTwcCWe33hhRdKQ7tk/aZ9Dc260IsZdmZytZ3SjeGSF5kADFZSU1PhcrmwdOnSE1gbDQ0N6O/vx8qVKxOC07GxMRw4cACf+MQnRLarq6slSGJn2rGxMcl01tfXC9jR3NyMsrIymEwmLFmyBAcPHhQgjg1G9TnnfbC0xmKx4G9/+1tCaQp/ztmgzI7Sb6WNozwCU03eGNgBkKwjv2dtNeWFcpaSkgKXyyUZY15Hsn8SjU6NUCEjjPfEucaVlZUyAUTbZ7/fL3qNPgOTYmzCxd8xCOY55L3zfnlPjC14vbQxvFed/YxEIuLnGYYh5VYMaLlXwWBQ+jvQ7vHZTAfAaDvOM0/9wn3Py8sTwOhfJoPa1dWF0tJSFBcX4ytf+Yo0Oenu7kZra6sU94ZCIfz4xz9GKBTC9773PQQCAXg8Hhw/fhyVlZV4/PHHsXbtWrzyyit44okncMcdd6CyshI2mw0PPPCA1Fhoqh6RC13XQWVUVVUFk8mEQ4cOicKl4iHapOmSzG5R6BgQ8v2INLlcLulOBsQVR39/f0Ixd09Pj8xnogOmBZDXmRzUMdD1eDwSiFKhaiSVKBsPC40Gm38wa8rMHe+LQQgw5UDwvUhbACAIEJGdZHTmu9/9LjZt2pQwpzM5M/Y/WTrw0wHg6SweeI2gmkwmLF++HO+99x7OPfdcoRnxPqZbuobqVIeUhv9Ur9FI9OzZs5GXlyfBKR17ztsj5cPtdkumnc1XBgcHUVZWdsLcOnay5D4xM2EymVBbWwuLxYJ58+YhNzdXZicODQ2huLgY7e3tMJlMyM/PRzQaTRhj4/f7BcXLz8/HxMSEzMH1eDwA4rU1VJbZ2dkoLi5GQUEBrrzySphMJvzlL3+RbD/3nE5TcsaUSyOVyQEdZ0empqZi1qxZ0nCF+oBoIWVcj7zQ1DL9Gn4mz2Bvb6/MFfvjH/8Im82G4uJimWFos9mwdOlSHD9+HIcOHZKGE8wutLW1YWBgAM3NzQn6hPcVDoeRl5eHgoICoc/zuWkAiIHzzTffLIEgGxCxCzrf+2SyqYPA5HPE77XRmo6iztfSiPX29iItLU1GhhiGITRGXUPt8XikBIEAAhDPQPDZAZDAhaOvamtrAUAowF1dXViwYAGCwSDy8vJgGIbQUFkHSfo1nyu7xLJunxkxnilef1paGkKhkOgNp9OJyy+/HOFwfLTCtm3bJJujqZx0TJLPPmWKoBF/r8Euszk+N/Lw4cNwOp0oLS3FjBkzYBgGhoaGhLbIrAIDFwaqPDvagdLZAiDeXIt1TxxvQIonnRLaNU07JQuBMkqAhFRe2i5m8XQgosFdyguBM+3Ek96oZ1zqMWBAPDjl3mVlZcloCerlG264Ae+++y6sVqtkSbnq6uqwYMECcY65752dndLgbXJyEvv27UN2djaGh4cRCoVQXFwsfS3efvttXHzxxVJblp+fj66urgTaOpupUVfxGejldDqxcOFC7NixQ8BFAML44LVwogCprXy23HMGqppSm5aWhoaGBgEY6NPQrrBxzZIlS8Q+sJ7RYrH8H+beOzqu8swf/9y5ozIqI416l1Ws4l5xBxeazcbAHrKBABsIm7AmCeTLkkAabEJbQpKzJNndhBTOjwRCQjphgYDptrENtomLbMvqvYy6RrI0M/f3x/B59NzrkS0Z9py85/hIGs/cufd9n/d5n/J5Po+NKEfLJ+s0KavsFZ6fn49AIGBjMiWkmw4X78GyLGn1Qv3PgBsDQERSaFQTM7p8TgbcWZfH+U5NTUV9fT1KS0sxNDSEoaEhQUz09fXB7XaLTUKkT2FhIS644IIzILg6KOrMHkcb2g7j0GUyGm3Cc5rzxHXz+XyIiYlBdXW1DbUBRAJobrcbq1evtqFbTNPE+++/L07r5OQkmpqa5Hv7+vpQVlaGoaEhJCQkYGxsDD09PVLff/z4caxfvx69vb1Cfrhs2TK0tLQI1JeOCZ+HgV2Px4PS0lJUVVXB5XJh3759SElJQW1trQTxdF3rddddh6efflrun2gZBn/o8PG7KG8M9PI80EFrnqEsaaETOjg4iO7ubhtBK218tp7ka6mpqbZuFiQkYrlPT0+PTY9p/UUCQK4FAEH0MOAGQPQuZYX2POfTMAzpcc+M79DQkJQ36qQYB88THQQhIkK32ZucnMS2bdtsdoETNcYzgmdVeXk5jh8/LvrL5/OhsbFRGM9nM/5uHdQHH3wQx48fx0svvYTS0lJ85zvfwQsvvICSkhIh8Xj++edRWFiI5557Dh0dHdixY4fgu9PT0/GpT30KTzzxBJ555hlkZ2cjFAph586dEuVgjYbH40FycjL6+vpkAhnZpzO1bNkygcD09PQI3lvDYzs6OqRHHrOhOsOoDRIAAtVlnRsNybq6OltE3efzwe/32+rQNASSwkeMO+t6KDwUIL5PC5WO2BB2rOsseL90pFNTU21GOBUmr0WhZV0wIUrcSHRQOXT2kGx+nCd+hu/RinomQxsxTuN6psOyLKlF1RlNy7IkukwoC+8x2kGksx/63qYbvE/nvfJvv9+Pvr4+VFdXCwtvamqqkFsVFhaiv79fYLjMBtKIp9FAR8XlcqGlpUWyVAxScL5N0xRygWAwwljq9XolUsem84T8UlmxN1tHR4dE1sgITIeVcsleZXSq0tPTsXDhQltNxVVXXYX9+/eju7tb5lu3P+K8cq65pzifOuJvmqYcaocOHUJhYSEyMzORnp6OpqYmG0GCrqU4ffo0kpKSxGDWEUX9kwGdvLw8ZGVliTM+MTEhRg+zsSdPnoTf78fq1auxadMmfO9738Pbb7+N1tZWqW8EIgcYDWgGkBITE5GcnAzDMOTeqH+4Z/j52NhYdHR0IDExUeq+eYjm5uYiJiZm2vYc1IXOw845dObtbO/jvMbGxuKCCy6Q2l8e7tQfk5MRdlRNh89sLw99fsY0I2Qqo6OjMre8h9raWsyfPx/Z2dno6OgQYzMQCCArKwudnZ0SGWe2nzVA1JF0Rn0+H8bGxoTZ0LIi0K3W1lbJGDgNofT0dFx99dUwTRPPPfecbR6cmVS99/i7vp4OCHZ2dkorFwBS+5SdnQ2v1yskZzRUdUCFrNX6rGJmXZPs1NXVYd26deju7pZaQBqQPEOIGNB1rKync7lcAnULBoNiFNHJ0IFKBgA4JxqGqedCsy2zHjMxMRGxsbFCAgdEavYYeGZGjeuVmJiIxYsX45133kFpaSny8vJs2WO279KZbc4R0Q5AJJBJgjPLihB+se50aGgI6enpaGlpQVFRESzLQmlpqTjQhCwTPUV7QGdZg8GgkEHu3bs3qi6jXNDppw3DLCp/EgpJJ1AH8EiKQnlnAEyjVg4ePIjk5GTRbXPmzEF/fz8qKyvF2eV99/T0SMBeO9wTExNC5tfV1SX3xzOfpC38nTJBvUvII9t7UQ/TZmNWNBQKSbZO16Qy20T7raamBmVlZWK7UA8MDw8LOgmIIHpM08TmzZttiQ2n/qOczGQwaRFtnrgu+lp6/3L/MBO3Z88eXHnllXJfExMTaGtrw6pVq2wBG7fbjV27dmHp0qWyt9577z0JQIXDYRQXF0vw2uPxiPPI3uimGemdzD1NWbniiiuwd+9eW0sVkvVwDycnJ6OiokLs02XLlgEAysvL4XK50NjYiJSUFOzatQsejwe//vWvxaHS9qxhTDHy0qGj00gHmXLEftmcPyBiPxw6dEhsnsrKSiQmJiIjI0PKRyjHhK1qvURCQ8MwUFVVhZGREbkfYCqrybOe5VBcTx045LpQnxK1qPUV7R3qM2aV2ScViMCzPR6PlHIkJiZKSQhg5zVgkH9wcFC+m2uUlpaGtWvXniGvtIP1XPB17i2uFe+bSKRoZRdnG3+XDipZGEtLS5Gfn2/Luj3++OMCVdm+fTssy8KGDRvw1a9+VYy53NxchEIhvPLKK5gzZw6am5uxZcsWzJs3D2lpafiP//gPUQapqam2ht+pqanCPsasEaNtbW1tNqZTbjguNkkpyDqpjTkqWGDKKSQkSStbvpc9Ghkt0REMjRnPzc0VI4JRF20gUojYWoSwZP7TxgRg78fHCGIwGBRWOr2xaGCwVowQCxp/FGKyYdJgprML2NmMec3LLrsMTz/9tC2rwPk+m9LXa8Hn1lkdJ7R2poOHJQfnt6qqCrt378aFF14ozsx019evT/ce7WRN95xc13A4jLVr14pBkJiYiL6+PoyPj4sMa8OejiUQUeQDAwOorKy0Ff5T4dFQpiKic60jZ2xfRMfI7/cjIyNDjEQacMy289Cj40lHmjAXt9sthD08cAmp03MfDoexcuVKtLe348iRIzaICucHsLPq6cACBw+p+Ph4nDp1SpwcOttk4WWvOM4HI/E0KvhMNAx1xouGRlZWFtrb20X+eNDQuWK2LSkpCQMDA9i5cyfuvfdePPXUU9ixYweASD30nDlzJJNlGJEs9fj4uGSfCa0C7AEyYErnVFRUyPcwGMaI58DAALKyss6aQeXzRcug6vdpWZ1uUO6Kiops8C2d2XG5XMjIyBC4OLM71K+tra0oKSmR/achxgy8UMcZRgQNkpeXJ4d8X1+fQNZJasX5YwsyGml8pmAwKDBs1l2xzyphklzPrVu3npFVsiwL27dvx3PPPSfwKa6dzr5w8Azg0M4cACHf0SQ5NKpYr+fxeFBeXm4jb6JzSceb88eyFhrrPAuZ+aQxw+80DEMCszybOU9cs1AoJHWZzLbyfrk2+p9TxnTwk+cj70sjkXjOEAKtZc15HjMYlp+fj9HRURQUFNgy1SSfWrdunU2ugAgMkjV7gUAA2dnZcrYSFUFYKIniiBrhWTZ37lw899xzMgd0Uul4ct70Obh9+3aEQiG89tpr8Hg8EmSic8n2K9dffz2efPJJWyCDAQMyCzPbRLikaZqiz3le6jINjcoaHh6W7DyDGVlZWbbgNxCB93H/6T7nfr9fHBs+G+2ncDiMvr4+kQHqAJJIaj3r9XrR3d2NpKQkkUsG8HlNAMIOy0Cr1+sV/eL3+xETEyPoDTpB3BdE+PA8I4kOZZO2xtl04tkGHWvOjQ4Acj70/3FOtG6l/RcTE4OdO3fiwgsvhGma2Me1/eoAACAASURBVLdvH9avX29zTgEICzeDu01NTQJ9BWA7szMyMkSvaaK+rKws1NTUCDw2JSVFAkvaOaXc6SCKz+eLOhc8YwsLC2EYBrZt2wbLsvD666+Lzc4+x865puM1NjYmTmJaWhrS09OldjszMxNNTU22oCCRJkBkX4fDEZbprKwsCc7k5eXB5YqwXrPzAa/B9QOm+thSLrhnKFepqam2/dzd3W0juOLck4eGgRKWnnCv0VazrKnuCCSQYgA1Li5OYOrsZc2zhvPMUgjqHertDRs2iF2h5ZTPHO1sp4wyMET/IyEhAeXl5XK+z2b8XTqo+/btw8mTJ/HYY4/hoosuwic+8Qncd999GBgYwKpVq7BmzRp897vflcPqjTfewAMPPIBPfvKTWLp0KXbv3o2EhATcfvvt6Ovrw4kTJ/DnP/9Z6itXrVqF//qv/xJFxsOYEQsS/lDxW1aE5ZMLQwgUDeNAIGCLenk8HuTn5wOA1KMSsmCapsDUGK2kcmEEQ9dpWpZlcyo9Ho9klShkJAgxTROdnZ2SuaECZlSEUXVCXChQZOBrbGyU2q7ExES5B8KGeO/Hjx8Xh4ZRSe3UsDEvDxN+t84K6/fz4JicjPTgWrdunc0R5jiX8ud1qOCdDulsMqd6UAlQOevXs7KyzjBeprs3wN4aJdrQzmk0x8owIjWNnZ2dAg8hJHpyclIgkYZhiFLSxigwBfnhgWwYhrSpoGFHBUNyHRqjExMTAs8zTROHDx8WQgYS07D+JDU1Fd3d3TBNE5WVlXC5XDh8+DA6OjoEkkZnpLm5GcCZ7TiiPb9hGMjJyUF2djZefPFFkUHeo55LstE555PPR0eaUddQKEKCk5mZKYq1qanJllHW0CE6FXRWaWTyuUzTlD6ydORNc4qchw4QEQymGWnx9M1vfhNXXnklrrvuOrzxxhvw+XzSS4ysjcxCMQvJ+6AzoQ12yl5fXx/q6uoQHx8vhll/fz96enrk4Hb29ZtONqcbMz2ELMuSWumkpCTk5+ejp6dH+ABI/OJsYcSsLw19HcQgGzCfmzqAAYyenh709/djYmICixcvhmmaaG1tRWFhoQQTLMvCwMCATV92dHRIjz7qbkKJqR9ramrEqGbQwbmHtQG8fft2AJHzbmBgwOa8acdNwzM11I/zTJi6nnv+7OnpEZlJT09Hbm4uXC6X1NXr0hAaLzwDNILG5XLhlVdegWEYWLNmjSAwaFyRmAaArZaXRlB8fDxSU1PR0NAgrY4YkOS+437l+aCzJcwIaf3AbMLk5KTs4fj4eDkvueZa3hgU4tm6fft27N+/Hxs2bBADit97+PBheZ3DMAzs3r1bWFANw8CxY8dscGX2Oy0qKkJnZ6fojH379mHFihUYGBhAZmYmXC4XrrzySvzlL38ReC9h0hyU77S0NOTn54vNc8kll8h9xsTE2DJOeXl5+N///V/J8lNGmXnkdWnPsAxEZ6Co/+jIEV2QkJCA5ORkCdro7BTb8+ksOvtnGkaE7Zc1fIsXL8bY2BgyMzPR2dmJcDiMxsZGCRroALplWXJmcT/TfomNjUVOTg46OzsF2shrEDbNgD7rDpltZ8aPupO1gERhUNbIgm0YkV69tO0oKzPRiWcbdIhIXMjnHhoags/nswWsNAJNy6phGFKuxuDeu+++i4ULF4qscy8dPnwY4+PjWLlypWTE6fAYhoHe3l4hIPV6vZKkYGArFIr09B0bG0N2djb27t2LTZs22eZixYoVqKmpsZUKUZ8x634uu0nP58aNG+V8p51IuaHs0tYEIPuIfBvMitMO4b/4+HhkZ2cjGAxK+6JQKFLffvLkSZn37OxsCdCxjMYwIl0BgIjNz7ZywFRLHdbxApEgG9EY1Js5OTliBwEQhmy+h3wLdFRZLsVuDJSNyclJCdrR5idRn8/nQ09Pj3DCcN8SKaGTRD6fDxs3bjwjoBEtcDrdYNkT1zAUCp1xPs10/F06qC0tLdiwYQNM08SWLVuwZ88e3HPPPViyZAl27NgBj8eDxsZG5OXlIS4uDqtWrcKPf/xjxMTEoKmpCRdccAFM08TVV1+N3/zmN1i5ciXWrVuHN954A3/6058ESsLGuGQDTEpKQn19vdRa6AWnsRkbG4vy8nKBU7FNDTDF9EkogV5Ut9stkR8ANjp9pvAzMzPFyOXGNk0TDQ0NYjgGg0E0NzdLDyNtqOh6Ewq4aZrC3EpHlQdWXFwclixZgmuuuQa9vb04cOAAQqGQEDYAsLXFoXAyGumM4gH2rImOuPA1TdbAmiMypBmGgddff136c81EmJ1RSx3JdDrGsx38nGVZKCkpwfvvvy9/8/plZWU4cuQIli5dGvV+9NCR1miDmxmws1bq+yGEndGxtLQ0jI6OSp0QlSKdUxqBQIRkgzBdksUAUwqlt7dXnFjTNAWaExMTI06vDkb4/X4kJCSgpaVFou+WFWEYLS4ullYrgUBADt9ly5ahqalJZJcwd5IIMEJ98cUXnzFXWp54H1u3boVhGHj55ZdtkFZ+jsY9P0+ZYCBoeHgYaWlpAteiAcDIY05ODoqKimR+6PAzIDU0NCTRe+qEUGiKBp7RWUL8aNxxrxJ1QGOa9+v1evHiiy/CMAxUV1fLYUK4EPcG9zT3p66bpD7iey0r0n+4vb0d1dXVAq07cuQIYmJiEAgEJCo7mxFN5qeDqOsRDEba6+Tk5CAjI0MCHIWFhejp6cHExIS0EODhrwN2zJgQzkgjjUgYBg6576hTuc7Nzc0oKCgQlsHh4WEUFBQI7JUQLgAiBwcOHEBSUhK6u7sl8kxSFp0FcblcuOiii85AQ2iIP39fsWIFXC4XXnjhBVvQQRs7et8BUw4Gs6PT6Us6R5o4icRghmGgq6vL1oOR30UuAqIBeKbExsZi9+7dmDdvnhhGbL9BZ8nlckkWlllA6ifuETrNPDt15oVzo2HNvDfWSNEZcbKdMpi2Y8cO+Hw+fP/735f/ozHG9RwYGMDevXuxdu3aM8iPDh48iGXLlgkSgvO7e/duqe91uVzYs2ePGJKE+/n9fsmoDg0NSd0/5Ywwdj7r5Zdfjr1794oRqvUUz8f09HSkpaWd4QzRNiHzNjP23JOBQACHDh2Sno5kx6Vhr+WEAYaJiQm0tLQAgLSpYQ22ZUVImvR3GEakFra3txfJyclSTmIYESIkIsGoT4naIWsp15/ZbK/Xi4mJCen3roOOnGvdVoTZLAZGqdeZxbMsy9ZDmfdHeQVgc4z9fr/t/GCw77LLLhOHmXs52hl9Po5qWlqarQctET0k3uRZPV3Q3bIiJREkLdq3bx/WrFljI2+yrAg54ODgINavXw8AaG5ulnWgU1VeXi5BVSZRyG5LRAV5L/Ly8tDd3Y3R0VEEAgHk5OTIehUVFQmpJr+/sLBQmJ1nO1daLyxdulT2SW9vL4aGhlBbWystIPl9POepV2tra22Eejk5OZKBZB0tE1VsFUUZI/M2s7E8V4gKY30q9xcDLgwqhcNh6SLAtdRtpJiYIiuwZvUl9J+yT9i7y+WyEc4xEXb06FFUV1cjJiYGnZ2d0vqOrPb0T3jOhcNhrF69WlANeszUMeXQaBfarMzaztYWP6eDahhGIYAnAWQDsAA8blnWY4ZhpAH4NYA5ABoB/JNlWf1G5A4eA7ANQADATZZlHZjpDf30pz9FUVER8vPzBfp1xx134HOf+xwOHz6M++67D7fccguefPJJpKSk4OKLL0ZFRQV27twpB1o4HMa//du/4eTJk/jKV76CoaEh/OAHP8C6deuwevVq3HzzzTBNU9qE+Hw+NDc3w+fzSQaCbHTMbDJtnpmZiZGREQwNDaGnp0cOS92jiUqdta3AVI0lhUHXTPG9ZKBjjR/rQEZHR2XTDw8PS5SZSp3RRK/Xi7S0NABTCp8pe34vBYcRUFJNl5WViSGjHVkqbe1sU0nQMAHsLF6APVLCA5aKXzuPOpvqcrkkk7dt2za8+OKL55SX6TaPNmpmM/QzAlMRe0braQDx+RgAYIYtmsLVjlW0+ZnNIIQ2JiZCB97U1ISSkhJxsJiRYEaGBjp/7+3tRWFhodSeMnvJqCAz+qyHonHF9hCZmZlS90NjgbWCfX19mDNnDrxeL9ra2gTSR/gXa7wLCgpQXFyM5uZmjI+PS6SVjt3y5cvPMOw5nEEHzuMVV1yBY8eOoa2tTaLhDHxEWw/+ZHsMADaYMPc+CWZ8Ph/S09ORkZGBhoYGgbsTfsN7osNC2aZTFQqFpAaFBxsPFh4YdHi5F3itQCAgPQnD4bA0TtfPw0NRO2IMqDmN2mAwaMv2adIZl8slxCvOA2U6J2g6mdfXcF6LmZvk5GRxTicnJ5Gamoquri4Eg0FkZ2fbGKiDwaAE6YBIywBGsE3TRF1dnbyXGRIgElCj0aGRLsFgUFoMVVVVISkpCbW1tZg3bx4sK9JKqqCgQFpfjI+PY8mSJUK0xP3GdeFzeTweVFRUnNGmhPepgyx6brdu3Qq/3y9OMPconT+tO/ld1P2sG9XrowOr/DwNL4/Hg5SUFGRlZSE3NxcjIyPo6OiwOQM8D+ig0lhPSEjAkSNHkJGRgTlz5ojjQ2eWDjuzF7xvtzvSKuX48ePSdsHJ2EoHgM/BdiKUZ86hzpZYVoTEKhAIYPv27Vi6dKmtNpC6SgdOY2NjkZGRIZkQyoVhRDKiBQUFNli1aZrYtWuXMN0DkbYzRCrR+GYgjllN3ntCQgJKS0vR3NyMiYkJLFy40AbpXrBgAVpbW8WY0865z+dDeXn5jJA6eu8ZRqT0Y926dSIHLleEeXh4eBgejwdtbW1wuVy27CHlk06rJoorLCzEwYMHxQiNj48XtvDc3Fyb8U00CAm5+DrbRVF3crDmk+uRnp6OxMREKZmibPT29toYmvVZoQMyMTExNuQL63F5b0QMUZYY6OWZSd25YMECFBUV2TgxzhZkPp/BjJ/W1T09PZK5o+xyjbSNwv3i9/tRWloq8vPmm29i1apV8Hq9cs/19fWSIWNmXEM7c3JyMDIyIkmNkZER0X/cH1VVVTh27BgWLFgAALj00kvx1ltvIS8vD11dXSgqKpJgKs8M0zSxcuVKcQz1/E1XUnK2wc+Hw5G2Q2lpaZgzZ45cLxAISL9Y6m4dlGBrn56eHnR2dkoGmSUwhmHY5FmzuRNOz3nt6OhAUlKSQKD1+/Pz8zExMQGv1yu9qnnNiYkJYdbV5xXr1qmvmchhuzCitLinyAoOQLKvJLUi8oDywnOCjPNApIRt8+bNZwQp9d8cM3Ey9RnEa2jSr9mMmWRQgwD+zbKsA4ZhJAN4zzCMlwHcBGCnZVn/YRjGPQDuAXA3gK0A5n7wbxWA//ng54yGaZpoa2vDCy+8gLVr1+K3v/0tTp8+jYceegi//e1v8ZOf/AR5eXlwu9148MEHxaHbunUrXn75ZQSDQaxcuRI5OTlIS0vDmjVr8I1vfANf/epXUVVVJT302LeOGHMa68wi6I1FQ7yvr09qMgCIAPE9uq7MNE1hy2VknRFjHvimOVUHQ7gf2VjpTOrBQ9bj8SAzMxM+n89Wo0IFTOGqra0Vx5lKbP78+QCmjDZGjRiJzs7OFgIX3h8zXRypqak2wdcGBQ0cHWWksmdGTjsZfEYNkeJ8zibC9mEypRz6WWiw6LVeuHAhDhw4IAqd31tcXIz33nvvrPfGoTMn0Ua0/9cGB9fZ5XJJYT/rJicnJ4VFEoD0vqMzQLifdrqpGPkZKif2QCV0hYQgOsrHfUFHcGJiAgcOHMDSpUuRkJAgEWE6DJmZmQKdjI2NFSZbEo65XC5kZWUhPz9/2nWfzkmanJzE3LlzkZ2djf3798u+c0IttePEzIBz0EDRMsBscWpqKkpKSjA5OYnm5mYJXITDYYFH0hnVKISXXnoJpmniggsukDoWvZ/ZW5KHDvUIER5JSUlC6MN7pBHJ33VNPPePhvLxH99HI0075MwyOOWOn5vJftRQe2egSP9kuUJvby9cLhdycnKEKTMvL0/6wZKASztBhEo7jTqygmpdwoAhjYFwOEL0EQ5Hat0MI9JGpKCgAJmZmejv78fg4KDUIXu9Xng8HmE5Tk1NRVZWFlpbWzE8PAy/3y/rFRMTA6/Xi4qKiqg6KZpzquctLS0NF198MUZGRnDw4EHRRzQ6NTqAzgzJjHSk3qmPneiN8fFxkaWcnBwkJSVh7ty5sCxLMlf87lAoJLJJRA33u9/vx9q1awUVEAwGBZJGcibKpMsVqSUk2U95ebkNYsnr6vt0EuVxX7MMhfCxO++801YX+9WvflWI1Dgneh4YiCPXA89j9mkm1Jtjz549wigOAPX19QLj9/v9qKyslDKbtLQ0dHd328pwiouLBRYdHx+PhoYGJCYmoqKiAkDk/GW2mbWWHIWFheft+HBox7G6uhpARMcsWLAAbrcbAwMDGBkZwdGjRyXQDsBWBmKapq0/KO0WIrwIJ66srBTdS9ZzYMoJmzt3rgQ2iYZobW21nUW0LcbHx9Hd3S3terjeycnJElCkXmpoaJD6eh2Qp/1DB4sywIxrOByW3q/UyfHx8YiNjcWWLVtszq9GrH2UQweGgKmEBnU85UEnCLgvGLBj4MTlipAMFRcX45133sHWrVvR2dmJo0eP4sILL5Q9tW/fPtkzfX19KCgoQCgUEqKxrq4uKfcgquPCCy/Evn37hPnWMCJZcp/Ph5GREeTk5KChoQEFBQUwTRNlZWWoq6uTgJuTz4PzCpzJCnu+g2iuSy65RNZ7z549ggLTwWJNpubxeAQ5YFkRcp+8vDxJTGnbS59jqampck6wvWRHR4eUH9DBnDNnjhDwEVlAGDqfW3PIJCcnY2RkROx6IitdLpckDdxutwQyuafoa5C12DRNKbXisyUkJCAhIQGbN2+W15y2pzPIrF8719BBI33d2dro53RQLcvqANDxwe/DhmHUAMgHcCWAjR+87f8D8DoiDuqVAJ60Inf3jmEYqYZh5H5wnXMONl6+99578fWvf12yhc3Nzbjvvvtw1VVXYd68ecjOzsall16KK664Avfeey+uvfZatLW14ZVXXsE111yDm266CSUlJfjWt76Fhx56CGNjY/jJT36C1tZWcQapRBnNY60oISk0yDU5kI4u0ACksGulQmXITCwAiTgahiGbhc4wo3h05Ji14XcwRc46yOTkZNTU1CAuLk4Y5rQj+MF6ncGapQ1StzvSg2rx4sXo6urC9ddfj2effRZ5eXkSuafxwU2sDaQ//OEP4qDz+bjRqBiAKZIPZnc0pl4bUjSaBwcHpWZvuiwaDR6n43G+g9fQzqfOvvG5qHA5+KwFBQUCoeP7nfeuN+l0z6ajolpBAFN9uZhBMwwDaWlpUsdIWAgPccLn/H4/3G43RkdHxbkCIvPY1dUl5BpUhoSDED5DI5UG58jIiDhULKS3LEuiuy0tLSgoKJBnOnnyJJYsWYLu7m7JhN1yyy0oKSnBiRMnsG3bNqxZswYAohI6cC1mYhykpKTgkksuwfj4OPbt2weXyyW1NcyI6OAJa1p0tkRHyXWWZnx8HF1dXdKXtLS0FAMDA2J4cl9Rt5BIg4dSOBzG3r17sWLFCnH6uVcJbWYQR9eJJCYmwufzCUs4Dx3KiEY4kByJLNpaFwFTTNiE2fOQCwaDAq2L5lQBsNUrnmsdnPLrlOVgMCiU/G53pAcuIVXp6eno7e3F+Pi4QNhZgsGgx+joKMrKykS/Es7OdQIgGWzDMCRzTUI87hHuFxJ9hMMRUpDExEQ0NzdLjRDbTtD4tawI7J/teVyuSJ0qHcxo+kifG2cbhhFBw2zevBmjo6PYs2ePnCtO8i+uHSFe/Lyeax0soDHP/RQMBsUpS0lJkVrgQCAg8Dw6WsyiMLpPNMcLL7yAbdu2CTycKBPKPQnRmNlKTU2VNWMm0/n8/F5gijSEcsxreTwefOYznxGW4H379uGHP/yhyIFTv2p9HhcXJ2VCu3fvxurVq9HY2Ij+/n6sXbvWpp8PHz6MuLg4gdH6/X4MDg6K0T5nzhxpGZGXlydB6NHRUQkgZGRkwOVyoby8HEePHkVfXx8yMzPR3t6OnJwcmKaJ4uJi1NfX2/Z3dna2ZHk5zifjFG3oQCWZwPPy8kRGueavv/662BXj4+NISUmRPQZMGbfMyLDVGADJYPp8PpSUlIj+GR0dRXV1Na677jrs27cPTU1Nov80Moy22KlTp2wZ8LS0NIFL0wHIzs5Ga2vrGaUTvAfaVwzE0T5jkJS6My4uDmVlZSgrK7PZfVqPzmScLdjM6xI1oPUy14bnlg44MUAMTAUCeQ1mp7u6ulBZWYmBgQHExsbir3/9KzweDy6++GKEQiGMjIzg3XfflX0yMjIiaEWeWz09PVLXyP2/fv167N+/H4sXLxbobFtbGwKBAKqqqtDU1ISmpiZp58U6c/Y9JrLobPPFOaHsaR0326H1yAUXXCByPTk5ifr6eixbtgwHDx6U7LlzbeLj49HX14f+/n553bIsVFRUiO3a39+PsrIyW0tI8rcEAgEpm+J8zZkzR5ILAKRFDoM0RCzoM1ajGrhGiYmJomeol3kGUMYZfOjp6ZH7Z0Bm2bJlKC0ttSVhdKKJf5+vbe1MQPH+Z6u7ZlWDahjGHABLAewFkK2czk5EIMBAxHltUR9r/eA1m4NqGMZnAXzW+R2EsFVUVCA9PR2dnZ1YunQp3nnnHezevVsaCy9evBif+cxncOONN0oE9Etf+hJ6enpw0UUXYfv27bjrrrtw9OhR/OpXv8Kll16Ku+66Cx//+McFysA+eM3NzdL+gZBWGvQ0Bqi4+DujHlRCdDxp8NCYoDKm80OlwNo7Kh8amRR8w4g0pPZ4PDh16hQMw0BGRoaw/R45ckSMb5JZOOFdOoNBjDuLq7u6upCQkCBKgwXcl156qU1gmelwCq4urHYqUSfMZ3h4WFjbnHWCPIw/kAkAwJ133okf/ehHQtsfTUaUHJ1X9pT34TS0z6ZAaTjq7+U8MxPN16dzrJ1KMNrfzmfRckbnlAY7AFGGzBIlJCRIpJvQPM2Qyu/r7OwUhc11YCaBfzNjwT6qXq9XslkaUkqIGGFrtbW1CAQCWLFiBXJyctDd3S3w+Pj4eLzzzjuYO3cu5s6di+PHj+PgwYP4yle+Mm309FyKjYcY793j8WDTpk145513ZG6pJ7QDSsWu59ypmLmeDLKwvQlrJ+ncsJcmP8M6PpYAMCq/f/9+rF27VrIWzEBTh+j1IjqC96ozdTqjBcCmk6LVifMZOA+sR+bzE+ZNg84pg/pntMHv1ms13d4cHh6W76eODAaDSExMFIeGhF9ki2ZGhHpVwzgHBgYEMswstl5fYKqxPeWho6PDVqPFWuvm5mZ4vV6Ul5dLfRudqsTERKklLCoqwm233SZBlo0bN9p69znHufQUDQptWMXHx2PTpk04cOCAoBbohJMBXsuvYRiyvvr7nHrOGYAhtHtkZARud4Shk20K2traJHBBp4zBVQYff//73+MTn/iEBJSZTdVBAtYE66b1zGCnp6fbnELeN+eD/8c1uv3225GRkQHLstDV1YV77rnH5oBOF/zj8yYkJKCiogLHjh1DUVERdu/ejYyMDKxfv972WbZ2Y7uFiYkJnDhxQvQjCbmGh4eRl5eH0dFRjI6Oore3V+yAxYsXo6amBlVVVTAMA/PmzUNfXx9qa2uxatUqgdu63W6Ul5ejoaEBQIRISAceOJznJfWwnruznWPRhtYR+qdhGJJlAaZktKurC7W1tRKsIIzSOc9EsLBnLksekpOTsXz5chQVFeGZZ56BaZpSb5mQkGBDZ42Ojkp2lp0PRkZGMDY2Jhk+0zTluzTrKZ08Pgvl1e12S1skOqdpaWkIh8PYsGGD6FK9X2drY0Q7y3RwyIlU0WRvALBkyZIzMk+0ObQzwfkOh8O48sorAQC//vWvhf2cOpHP87e//U304PDwMIqKijA0NCRzMDQ0JL3UqcsXLFiAhoYGZGZm2uCadMoOHjyI+fPnIykpCfv27ZP6Rz5TQUHBjGVSZ974tw7Eno+jo+eQZEkAkJeXJ8GAN954Qxw+HYDRIxQKoaGhQfYy3+v1enH33XcjPj4e//RP/yT9xekU8lwkGSR1g8/nQ05ODgzDQHFxsQ2+ferUKWFNpxwzu8+sKe0E+ieUqdOnTwvHDoMYtO9IsHYuyPr5OKYclElem3XMRUVFs7rOjLEKhmEkAfgdgC9almWjebQiEjUrjWhZ1uOWZa2wLGuFfj0UCmHJkiXo7e3FTTfdhG9/+9u46aabcMstt6C+vh7Z2dm2KPk111yD73znO7KoF1xwATZv3oxdu3Zhw4YNqKqqwt13343x8XHce++9EpGLppCZzWTWlBOckpIiRclcNNbX0GjSm4oCQQYuOgmElNBQ1c4djc/6+no0NDTg1KlT6OvrQ1tbm63hMYlo6FQw6qczQPw+QoxjYmIwZ84cLFq0SCCNbHo/f/58xMXFYXJyEuvWrYuqkKNlSzhfVJQ8ULTDSEVKI5TPS2gQ547RP2DqsKVxPt0m0fc0241EZXE+UJ1wOCxGrT74nAd7tBHt/5zOLP92Xp/1plRAdBR1pJGU/zq7TCXU0dEh0XF+j65nZuaD8kgnmOtCx469Stlsno4NDwzWrlDu2dM3HA7jb3/7m7QuqqurwwsvvCCyceTIEdx3330AzjTA+PvZ1lk/s35tzZo14sjzINLXo9zq+XcGGSiz3GsAJLtMwyklJQVz5861HQZ0Lmlg8cCfmJjAm2++KSzZNHJJYMLvoAFGaGtcXJwYDnFxcTZUg7PGjvBgPieDRRq6zOwCD1PqkOn2O2VtOpg6DQcdOXWuIZ2M5ORkcUCJDqFzzGCebukFQHRyd3e3QC31IPpFr5eGQBuGISULhEIFAgF4vV7RSQxymKYpiAjDiNQlaii21+tFT08PgsGg1H29zN/FaQAAIABJREFU9NJLZ8iTHufSU04DVY9ly5Zh4we1Y3w+sj87r08ndrprcb9znrX+pm5sb2+XvwsLC6XeSme4uM50VF977TU555hFYD0UzyMaUkRqDA4OCkMyZYdywvOWgYVgMIhLLrkEjz32GDIyMhAMBvG5z30O9957LwA7s7Hev9PBy4aGhqTFDPfzkSNHbO85ceIE5s2bByCyL06cOCG/s0aZWX/KE41Hy7KQnp6O4eFhFBcX25yeoaEhJCQkoK2tTeSa9zhnzhwkJCSgo+NM0JnzrHAGuHlvTufnfIfz8zy3MzMzsWbNGlxxxRVYvXo1qqurkZiYKOUITn1NgifCshMTE4X3orq6GuFwGK2trcLCq41t1kqnpKQgJydHkC8pKSmSPae+0PuDc0T7gnMVCoUwPDxs49tgjfqmTZsk4K+dwg879JmmkQB6nzKoxO+rra0V5ICzjRpgZ7rnMzO4d9FFF2F8fFwC2pOTk/jzn/8sZSncE9nZ2RgbG5PANt9L554Olt/vh8vlQmdnp9wzeVLI0J2UlITm5mbk5+fbkH0AbARQMxlar+nfZ+qczkbumTDZsmULrrjiCqSlpcHn8yEhIUGSM5x3np08U3TfZGZBuYZxcXHwer1ITU0VhnHqQDqOIyMjqK+vR0tLi+gPBrb1PqJOZ2stkiFRBwBTreUCgQD6+/tt5R2mGemkcPnll58hz049+WEHbQ19/YSEBOTn5886Ez6jDKphGDGIOKdPWZb1+w9e7jI+gO4ahpELoPuD19sAFKqPF3zw2jnHE088gfXr12Px4sV46qmn4PV6ceWVV8Lv92Pu3LlobW3F6tWrMTAwgKNHj+JrX/saent7kZSUhKuvvhpf+MIXcNVVV+H+++/HggUL4HK5cM0112Dr1q3YunUrdu7cCWCq9ymj8OxrpB1Q7XRoNi0dcYiLi5PDiAYOi5PJJuZyuUTIGT0GYIvwM+3O9+rv1vdDhZqSkiKZE2ZdWDwdExMjECUAZyjBgYEBdHZ2orGxEbm5uXj//fexZcsWAJAo20wGo+M0CnRED7AbuVQouvibz6sdV21Mx8bG4hOf+AQef/xx2yZ0Xvt8hjagZztcLheqqqrw/vvv2yLFM/0scPZMis7wOB39lStX2iATzKIYRgSO7ff7cerUKRQXF4sRznpHOgYMhPDarAkGIBT8NNTp4JIEivuD8Eit2GhYAJAsErNXnZ2d8Hq9AsHKz8/HyMgIXn/9dezfvx833HAD8vPzceLECXzzm9/EQw89JE6vPoTP5fxHm9NQKIRly5bBNE28+uqrUt/FA9hJLKObvus9rweNC8IdBwcHkZSUhKSkJJSUlCAQCEh2mvuWQS9+Z0pKCt577z2sXbvWFgjQcGNmwHk/hKaymTrZHbXT7Mw48Tm0UxkbGyusonydMsLnm27wuueS++mcXML2GDwbHBxEfX29OJwMTLHuhsYiST1Yl6rhcO3t7WKMkSzC5XIJZJnyGA6HhY2UsHdei4PrQ3TLe++9hxUrVkhNLFsw0Kl55JFH8PWvf12YqV999VX893//t61Zu64fO9c4m26wLAubNm1Cd3c3ampqxGgdHh4WVngNh3OeWXo463epc6lrJyYm0NbWhri4OCQkJCArK0uM2d7eXtEb/C7WZP/ud7/DtddeK44C+RZYH6Wdp7GxMZSUlOD48eNCHMR74/NSZ33+859Henq6nFH33HOPtNbSc0sZjxZEoeHEZ0xISBDZIyKls7MT8+fPh2VZ2LdvH7xeL3w+HyzLwu7du+Xzp0+fFj02MjICn88nZCsk2wkGg5g3bx4aGhqQn58vPXhbW1vR2tqKVatWSRa/o6MD+fn5ops539NBRPXf0TJO1AN87VxGva5znOmgjk9JSUFKSgrmfEBSQ3kLBAJ48803bcEzAIKUsCwL+fn5aG5uljIr1sHqQG1ycjJ8Ph+ysrJw+vRp5OTkiHFOBAHrzDlfPp/PZmvxHsLhsHwXg1Rutxu///3v4XK58PTTT59xzpyPk+/UkxoerNeFg+gmvZ7V1dUCl+c+8/v9EiziOeasrTUMA3/84x9RUVGBsbExtLS0SDu8uro6W3CK51JWVpYgAbq6utDR0YHKykohXvL7/ejo6BBmarc70rKMwaeCggLU19ejsLAQLS0tEtjRARRyZcxkTGdLAlOyqs84Pbec79muFxCRzVWrVtnW6eTJkxgYGEBXVxfC4TBKS0tlfxHu/pnPfAYAsGnTJgCRc6SjowMLFiwQtIjuYUqkhWbwJzKDz8t2Y7omXKObaDczEOvxeFBbWysswy6XS874LVu22PS1fm4d2PqohkYC8Rw4fPgwFi9ePKvrzITF1wDwMwA1lmV9T/3XnwF8CsB/fPDzT+r1zxuG8Qwi5EiD1gzrTz//+c/jU5/6FMbGxtDW1oY9e/bgqaeeQllZGZ555hkAkUmsqKjA5s2bReE8/fTTeP3119HX14enn35aouSWZWHu3Lmora3Ff/7nf6KsrAwAhG0rIyMDLS0tiImJQW9vLwYGBsSA5QTrDJWOOBNyxmJ9Rnx7enrEEIuNjZUeY7z3D+b0DPIKDWnRhiXrOrSS3LRpk0SqaWTp4fxbf5aCrgkZCHtpaWk5qxPgdJx1VFBvLB1x0sYyC7f1Z3m/OtvKbCB7pmnnVRtUs91Q/F5nhGe2w+VySXDCaXyezZHSCnW6oaP9/Kd7CgJTNb26WbhlReAi4+PjqK2txYIFC+RabW1tKCkpESfEsiycPHlS6k3IcEcFxiglZZ3w997eXmEypvzScbAsC16vVyB4/B7SsR8/fhzV1dVISEiQ5tSpqakYGRnBk08+iWuvvRZlZWU4ceIE7rjjDpimiUceecRmqJxtnMtp0sY9+5tF+0w0aKQzoEHHkVF5AGKoZmVlITk5GYmJiejo6EAgEJBDifNKQpnExES8+OKL2LRpEwzDEHQH15Z1qswg0airqamxEcxQf2hYDV/j92rjhRFVrjWjstRBd911F370ox9NO4+U0+kMB65XtPUwDEPq73TWtLm5GaWlpcJIyGegIUriNr0X+B72JGV2ngYpa92JNKCDw7XTAUkGY4Apwjg6/3v37hX4WlpaGk6dOoWlS5dK7feDDz6IG264QYy0z372s4iPj8fOnTvx1ltvzUhPTRcMcc69YRjivPj9fhw9elQMVs1A6ryOM5Do1NFaH2pHLhQKYXBwEIODg8jIyEB8fDzy8/Phdrtx/PhxOWu5TgMDA3j88cexY8cOyUbrYI/LFWHOZGBoeHgY+fn5OHr0KIqKipCUlCQObWxsLD796U8jLS1NHO8777wTIyMjNni3nhv+dGZd6Lzr9wJAQUGBBBNoQL/66qsS7V+yZInoLwASAOG+HBoaQm5urjBP9/f3S5axuroax44dw9y5c+FyRRg9Gxoa4Pf7kZycjP3792PevHk4ceIEFi1ahIaGBhQVFcn99/b2Civ/bIYzIKHXl9BS7lOdKfyoBve/x+PBpZdeCpfLhZqaGiF/dLvduO222yQY9Pbbb4shTl2ln2FiYgI9PT3SI5kw4aKiIkEMEQ3CDKDWuxrhNjg4KJkxrQuZPDhXIPRcQ9twWhajEcw5v0vzLPCeSTrI4C/3JfeTdgRcLheOHz+Ouro6FBcXIy8vT1jOOzs7kZmZKfNKNn92jgAiqMBgMCjn+9GjR3HVVVdJH/ChoSH5XjLvEy7d2NiI5ORk1NXVwefzIS0tTdrPlXzQC9flipC9sSQhWqnWdOgc55jOfptOh57PulL/0W/g3J08eVJs1KSkJLjdbiFR+vnPf25D77A8j/pq0aJFkhllvanb7baRNbHkhckvBqKJNnQGnYnqO3HihNhgrHnPycnBvHnz5KzX8+HUkefSAzOdw+kCos7uAzMZM8mgrgNwI4DDhmEc+uC1ryLimP7GMIxbADQB+KcP/u9/EWkxcwqRNjM3z/Rm1q1bB5fLJQQfW7ZswX333YeLLroIP/rRjwSyVlFRIY2B7733XrhcLnz3u99Feno6br31VhhGpI/bY489JhvqYx/7GGpqahAIBISohQcWDRlmhRYsWIAjR47IYiQnJ9v6D5qmKQcVAHR3d9uMeo/HI6QJn//85/Gzn/1M3kuhIiyPvxPyy9qrxMREJCUlCTEDFa3T2JltpIjQaEJtKMjBYBCXXXYZ3nrrrTMcUSA6E21paSlaW1sBwAah0ffFzcRIOj/P2lkak3wvnfd33nkH//AP/yAK2xkhO59oD43QmbKRnm1UVVXhvffes0Xz+B3RBo3qs923zhzon4z2O6PizHZSxkOhEHw+H7xeL+rq6uRAcgYxjh8/LvdNiKphGNKUm8yWhhFhqNP9dScnJ5GRkWGDpZ8+fRoZGRkwTVP6hFpWpCWBy+VCU1MTvF6vGChAxMAbGBiQNis/+9nP4PP58OUvf1kyEQ8//DBqamrw5JNPfmj4CWUzKysLPp8P+/bts/X907W3HHpNox2INDx4QExOTkqAKi8vD3l5eQiHw2hubhbZ4z4hLDIhIQEnTpzAwoULRc5JSkaji4Qr4XBYdEJfX5+8R+8NfTDQQdBZVe5DksIlJSXB6/Xi9ttvFznW5QLOcTb51Xt+OiOBkF7Oq2EYooMaGxulPyf1g2VZkvGgrGtoeX19PYLBIFJTU4U0gjBSZkt00CccDkuAZHJyEjk5OZJFGBkZEcMwJiZGZLmkpAQjIyOoqanBsmXLkJubi7a2NgwMDGDevHkYGhrCk08+KTwILS0tCIfDqKiowNKlS/Hoo48KSdZ0g/fmzOjqoXWyZVlIS0vDhRdeiOHhYRw6dEgMdx244Do4s8W8FmVOGz58fm0IW1aEydqyIjDNlJQUYSpua2uz9fgOBoN4+eWXsXHjRjnj+M80TeGWIFqAbM4HDhzAsmXLEBsbi3/8x3/E8uXLYRgGXnnlFTzxxBO2emzAXudEY905T3rw+3WgeGRkBBdddBH+8pe/CMKD7OIXXnghLMsS54j7d2BgAPn5+fD7/cjPzxf4Ym9vr80ZZn15c3MzCgsLEQwGhRgqISEBKSkpkv2ur69HWVmZfD/nnS0/PsxwOkHO4JEOiEdzGvg56o+Zktbws2T5DQQCSEtLg2maUrYQGxuLkpISxMTEoLa2VvQkM+XOQBLlCQCampoAQHg12M4uNjZWGGPZz5prHR8fL4g2p0N8Nr0300HdFw6H5V51MCja/PB3n8+Hvr4+GwqA+psOqsfjkQCf1uscgUAAubm5iI2NRUVFBUKhkLR84973+/2YM2eO9BEm5Hl8fFzIv2JjY7Fy5Uo0NTVJdtflivQdP3nypOjYmJgY5OXloaOjQxIeHR0dWLRoEebPn499+/ahoaFBmKhJDpqdnX2GU+o8f89n6CAsYK9h19BcfSbM9Dsty5J2R93d3aisrJTWejwnnn/+eXE6dcCM9sXx48dhGIbY4fPmzUNsbKzscUJ32Y2B5xxliKhMJ+qHn2FSa3h4GB/72MdsgVd9P04d8GGCVJRRrXd9Pp8tUcGgSGlp6ayuPRMW37cBTHf3W6K83wLwuVndxQfj9OnTUvsSCoXw6KOP4sYbb8TPf/5zAFNw1eHhYbz77rtYvnw5wuEwFi9ejH/+539GXV0d+vv7kZ6eLtF0DV0htIQ9EvXCM+oxMTGBv/3tb/K3jtBr5ys/P1+UiXMzMLsZDAbxm9/8RlgqeZiyToNkULodhnNox+J8MobaSCGkGQBaWlpQXl6OY8eO4eMf/zgmJiaEll8Pp8GuBZkOPu9NQ4w4J3ROmV3iZ+gAOWHV/Pyf/vQnbNu2DRdddJFEWM936O9g5PjDDpcrwsh46tQpueezvXc6o10PpwHJmsnm5mZUVlaekWXRip11iboNhGVZOHXqFCorK0Uh0vgcGRmB1+uV2i9+LwvwGVUm3JFsvjqjSMeJUN6xsTGpW2EPMWZp6eCSTKiqqkoi3sym9vf34+GHH8Ydd9yB+Ph4HDp0CJWVlbjtttvw2GOP2XomRpvjc60XjYWYmBisW7cOhw8fFqOFz6wJZ/RcR4sw0qjj2lIZh0IhacOUkZGBkpISBINBcVx0nebo6ChqamowPDyMTZs2CdSS8kCWZjLMkvgAiPRam5iYQG5uri04pA8FOmK8dxrZbrcbd955p8DcDMNAU1MTfv3rX0u2yBl04T2fS5anc04ZmAKm+iHyYGbZQnt7O7Kzs6XOm+yPrFVk9FnrGDJYA5EMOJuyU/doSDCp+5k54HqQOTknJ0eyfDTItWHe0tKC3NxcxMXFISMjQ1iduTYPPvgg7r77bmFjDgaD+Pa3v439+/fjj3/8Y1TEC9f6bKiO6Yw3y7KQlJSE9evX4/Tp0zh48KBkNQl71oY+gKgOrP5ufZ7pzBsNeMLXMjMz4Xa7paWE3+9HT0+P9PTu6emR1hXUO7xObm4uent7bYHe0tJSXHbZZbj00ksRDkdIQr71rW9J3SvnjQamDg46M8LaCNXv0/JrmiYKCwuRnZ2NT37yk/jVr36F/Px8We+//vWvWLdundgDvG5xcbEwzRNC/t5772HhwoUSrCYRUltbG0ZHR1FUVISamhoJDsXExKC4uBh1dXVYs2YN9u3bJ2yy7e3twoI+Pj4ue8S5H51Qx/Md+vx1BoN1sJR7Tjt1GqHhhF3y5+joKOLi4lBRUYHu7m5cf/31EqAfGxsTtA/bHPEzb7zxhm1fMJuvg24AhJdjbGxM+szyTOH1aPPRztJ6m+fauRBk0w1toOv9pPXwTAaZzDmo0/Xe7+/vF72lg0z8OxQKobS0FKWlpXL+k3wHgARVWIZmmqYQzFGWTTPSDo2yfPDgQbjdblx22WWor6+X+mnTjDBPt7a22myTBQsWoKenR4jkwuEIkyyJJDWiUD8D50v/PlvZdupJbRdoxJ+WqWiyzPl33oMOQjH4f/vtt2NiYgIpKSkYGhpCZWUlMjIypBf96dOnJRFA+5fnVX19PYCITiWkn9079D2RQV4nc1gylJycLA6xaZr49re/jYKCAjz11FPTlgicj0Pq1Ds6g6vnnQFmPXeWZUnAeTZjViy+/5fjZz/7mdRFpaam4p133sH+/fvx/e9/H/fcc49EHC3Lwr/+67/igQcekLq2559/Ht///vfxyCOP4Ac/+AGGh4cxNjaGpqYmlJaWoqurS6LhZLAtLy+Xhs+63i4xMdHW4xOALSICQGrAdNSNhyijZikpKSguLhZHNCsry1Z3qo286ZzTmQ4d+XQabxwU9OzsbHR0dMh9aiIisto5DW7npqfipKHM79L1rjpKoxWRFmJ94DoVPCESFRUVeOutt857TjRlOTfwR+GgAhGK8Pr6+nNm97RRNBMnlXNGSOO8efPOyE5pZUEjlSyfhmFI71E6BTTOamtr5X5Y+2gYkSxWKBQSSnKyOrPGh7JAOnrCgmnUT05OSnNqEqLw0GcjbSASGHG73Thx4gSqqqoEknXkyBEsWLAAIyMjeOSRR/Dxj38cCxYsQH19PUzTxFe+8hU88sgjouB1JkU7ZtGcSA6nE7tw4UKYZoSgie87ffq0Ta4p69oQ1oavNo6pC7gPLCvCMJqSkgKPxyPRw9raWrlmKBRp9dHY2Ijf/e53uPrqq22HEPcQ61B4OCQkJKCxsRFJSUnIyMiwEd/ws4yE8xB2u9248MILkZWVhcWLF8vhcscddwgkTreZce4T5951GrD8nJ4f/p9lWUL0xVpqnVU0DEN65Q0NDSEtLQ2BQAAej8dG80+Ui2EYtgAhI9v8TgYbGdRghpUZrNHRUVu/ydOnT0sQkS3DwuEpdu6uri6kpqYKlItQtvT0dAwNDaG1tRXLli3D2NgYHnjgASQmJmL16tVYt24dampqkJubi4cffhi/+c1vcOTIEdF3Tr0aTU71HE7n/BtGBDK2YcMG7N+/X0iIWMOmSVZ0jbPT6NDr7DxDnOvPM5V9KdPT08VZb2xsxAsvvIALLrgAy5cvF/kcHR2V54iNjUVycjL6+/vx0EMPyfN/9rOfFcgw94l+Vm3EOuVQyyLnsb+/X5wYfa4ZhmHr1Tx37lyBODY0NCAnJwdvv/22lNmQzZztdVyuSD9C1gUePXoUoVAI27dvR2trq0DAS0pK0NDQIEgYt9uNwsJCNDQ0IC4uDjt37kRKSgoCgQD6+vowf/58W9ajs7NTekNrg17Dtj+qoeXLGZzmT54ZXAcO7QDoz5umiaamJmljVFVVBcuKlBY8/PDDNjnjZxMSErBt2zb5DsOIkPLs2rVLYMrAmX1y6dAx66edUKLUNAyevzsh43o/TDdPnAsnsk3/PZukgg6+8Tu4D/S663ni91GeJycnUVVVJT02T548KXrWsiKoJgaG09LSpASNWbhwOCwyblkRpn/TNLFx40bs3bsXXV1dWLp0KUKhEObOnSsJJc5ZWloaent74ff7UVVVhfj4eLS2tsp8cz3b2tpsreic66/XVO/Zc43Z7gV9RjnXintM24ycr0WLFglDMu0rlgqwOwGvv2fPHpkH+hnMjlIf89wJBoPo7u4W5BoDtyxT4XrzbKM+4fnucrnwyiuv4JZbbvnI9IJOhNBmiWaDAlNrlZiYKHLGwTNoNuPvxkH98Y9/jEOHDuGXv/wl/H4/wuFIfef3vvc9iTyUlpbi05/+NAzDwFe+8hV0dHSgo6MDN9xwA1577TUkJCSgsrISmZmZKC8vh2VFsPJkkGPWR9P5a+be1NRU9PT0SN0cHQUqSF0bwV5EpNEnPBmIzoalD//zgadGG/oQ1odDNMHk/7MGleyDsbGxaG1tRXZ2Nqqrq22KL9rhD0wpycrKSjQ0NNggkvy80xl3wqsmJycxNjZmg5Lxffo1ft9MIJ76Xp2HxnRG9/mOmUb2nAfOud6rlT0DI1p2OMc8cNxut5AlsXcpoTuEM/H9zF4RQs/30xhiEIYOkGFE2gKQuU/vGSpUKh0e/pYVyZ66XC60t7eLEW+apkCDgAhhQFVVFfr6+tDR0YHCwkIcPnwYy5cvx9DQEJ555hl4PB584xvfwPj4OFpaWnD//ffj+PHjePbZZwHYWTvPNr/nimJXV1fD7XbbmHU1hE1HXPWe04aulnun8TQ4OChw5vT0dMloNzU1yXeFQhHCJZYg0IBjBppwOBq9zMqcPHlSDDGd3SC8l7Wud911F2JjYzE+Pg6Px4NvfvObQojB+6Rzeq6yAR3pnm7unVkOygaJP3goh0IhaYGljaOOjg5bZpg9RvXctre3IxQKSVZUOybM6FM3kYyCfaR11oOEXF6vF+FwWEhAwuGw1HfycKYempiYQFdXF/x+PyoqKqQOsa+vD8XFxejv78eLL76IsbExXHHFFRgeHsbhw4exadMmeL1evPnmm7ZzaCZjOufUaRwvW7YMbrcb7777rk0HRwvq6GwF101fS8u30+hm9pMN712uSJ0l0SXd3d04cOAAenp6sHXrVoRCISFcI1HWtddei4yMDMTFxeGuu+4SIhJnsEXDd2eiU5m9BiD1yfo5+TmSCz799NNIS0tDYWEh9u7di5KSEnR2dkpNKO2DsbEx2UNk56aeNk0Tq1atkowSERMMZDJoZ5om2traRJYTExNRUFCAU6dO4fLLL8drr70mUHfON6G/WudpPcQzUgfMZrKXZzvOpUsBOxM/jXKigZw9IaeDC0dzTtjiRD+3RnHxuxkI0Oez7rvutOs4b6yZpBEOTLXV0VlKnR2Nhno4X/vOeT+WZQnDOO8nNTVVnCS+R/+Mi4tDe3s7BgcHcejQIQkYsaf02NgYhoeHkZWVhZGREQCRrCAd4VAohNTUVAm6jY+PY+vWrdITOTs7G6Ojo/D5fJLcobxlZGTA7/djzZo1sCwLL7/8MlJSUuB2u4WPgeUZpmmiubkZxcXFZ7XtOPf6jNXz8389KJ86iMEAfSgUkhZM1dXVuP/++88I6oRCIaxYsQIulwuLFi2CaUZ4anbv3i0MzbSLaSOTh4EJg7GxMemVrFt7kUCR88Nz7+abb8bevXvPCO7Ndugzz7kGWsaj2dwsSdJDO9kzHR8dbdOHHKtWrcJjjz2G3t5emGakJ+CuXbvwi1/8AhMTE8J65fP58D//8z+IjY1FXl4efve73yE5ORlvv/02AGDjxo3wer14//33BbbHCDwzBFT0VGbEfRtGJKvkXPCEhASpcXC7Iz3irrrqKlx22WW4+OKLsW7dOgBTyvGjGNEUnz4YtIA4ayDPdg+M9pBVcWJiAsnJyZLJmamxBEyR9VBBUaC1EufmYxSPCkpHIPlezjkNgrvvvhsAzjiEog1ej9+nI3YfZmin2wlHtiwLK1asONvHbRE6fi7a0JFRvocbWh+m+gBz/mSPRq/XK/BPZsVDoRCampowOjoqxlUgEAAAgeN2d3eLEk5PT4fLFaGtD4VC8Hg8qKyslFqY0dFRyQR4PB6JpPJ+2tvbJfhDMgBtWPFgSk1NlSJ+MqUSdTA2NoaHH34Yv/3tbwFEWO6KiorwhS98QaBIM3H6zzUofxs3bkRlZaXMJzPQeu2ch6PTGKH8UZZdLpcQSQQCAWFCjImJkV7PjOKHw2H84he/kNo8wzDkJ6GrJLBITU2V9lP79++XNgmhUEgczSVLluBrX/savvzlL8vaPPPMM7j++utx6tQptLe3256HBsq55pPPdbbhdDBcrgjZFomeNLqB88j7oCNZV1cn8qgdd94zyygoxy5XhLmXUHIgEjghFJUkKOFwWBwp6n8AYmxxLUjuUVdXJ06Ex+OR2jfLihC7VFdXyz5PTU1FU1MTMjMzkZycjN27d+P++++XNgLd3d1YvHgxHnjgARQUFMhzn2tMF3Sc7pwIhUJYunQpFi5cKP0dqeMB2AxcDu0Ecv2cmSatY50GEtdsYmICIyMjSE9PR3l5OWJiYvD4449jaGgIpmmivLwct912Gz73uc+hubkZn/70p3EFTozsAAAgAElEQVTjjTeio6PjDFidU/c5kQJavizLkjZMrJHT8+fUwbz+M888A6/Xi2XLlmHu3Ln45Cc/iba2NuTn5wOA9H8+ffo0Tp8+LUY/SZJoS2zevFmgbQcPHkQgEMCWLVsk6M39yr6HfMb09HS0tLRg/fr1eOONN7Bx40ZBd+mzp6OjI+q+03o1mh3wUQXEZzp0MIOkiEuXLkU4HBbIZ25uLvbv3z8jh5fX1D3RtQ3BoRMJJPPTNoaWXdotbrcbXq9XIOrOOeN+0XpqpgHn2Q6umT7X5s+fb+NuYDkD503fL+9reHgYR48elZaC/f39gvwYHx9HZmamLShA8sRwONKmkU7s2NgYioqKhLTU6/UKC29ycrLUpDMLODQ0hDVr1sj6V1ZWIi8vDyMjI9i2bRtycnLQ2Nhoe87R0VHb/DrHdPKuzyGnDv0o5V0nBU6dOgXTNIXcMT4+HmvXrpXnoJOvP+u8H5btsD6UOkqflXrP0xfSAWQAonupf5lUY7Jt/vz5H8o5ne5sONvZr18vKys7431c89mMvxsHlYcPJ5wZVNauAZGmug899BB6e3uxa9cu9Pb24ujRo3j00UeRnJyMhoYG+Hw+jIyMIDc3F5ZlSbTWMCKwx8HBQTHEtEDpQ5GZRd00mr25WD8yU8V6viOacDnhKRQAJ+nP2TZoamoqAIgCZwaChsFs+hTpKDXnjhuF96UFUsNmTNOUukidIYzmaJ5L4ej3fFQZaq6tXgfnmuiI3nRDPy/vL9rgfWvlTfnXc8L3auiJzkoyw0bFyswBFUtvb6/072SGaWxsTKLLvA4AgZXqA7K3t1cCMZz3rq4u+WxZWZnU37E/WigUkhpWIAJpr66utj37/PnzAUQMgtraWnn+wcFBNDQ0iHy1tLQgISEBd9xxhy0wM92YTh6iRb9DoQhpWGVlJQzDEDip05hnZJJronUIB/WZRmKQnIPzz6gzjfjY2FhkZWXh0KFDUQ8qHlaE+AAQp57R2FAo0vZhw4YNuPzyy4X85+abb8b111+PV155Rfa5U36d2eDp5nMmw/k+khXp7Ar1BR1O3RydDb1J3KEj2KFQCI2NjTh9+rQYa4RVO1EaXAvqbK4bs6pARI9lZWVJZks/vw4euFwuWTcgAllatWoVwuEIIRK/KzU1FQ0NDQAisn769Gn88pe/xMmTJwEAfX19aG9vx8033yxst9ONaIY4x0zq6ZOTkzF//nyUlZWJ4e5yTcH++R3AVP9U4MwzREfOdSBRw8u8Xi9SUlLg9/tx4MABaUpvGAbKy8vxxz/+EaOjoxLQ/etf/2ojMtROJD+nM7vaEXMG9Ig+CIfDUjLDe6Zc0a5wZmgLCwuRlZWFtLQ0kRX2cwYgCAfqS8OIEO6w5yT3HHU9jUTDMKQ7gA7QakfI7XYjKSkJq1evxltvvYXu7m709fUhLS0NHo9H6iJ5zzMZ02W9na85nfyPemgZIqxx0aJFsKwILPzQoUNnrPl0w7Isqf9zBrSdssD/I9GSXmvtcFCmGGDi/9EGBaZQRc7n+b8YvL62J0KhkI0bhLpOl0bwdV6DsF7Ke3p6ukB4+VogELARXDKgxfOdbbgqKiokacS5yMvLw8DAgARELcsSYk89R1lZWRgeHhaIfGNjo9S+c/T19Qnrsg5AnM/Q59lHvVYMBkxMTMDn8wlSikF4MvvO9HsZaGZQFJjScVr+gDPbR2kkp97TOlBHbhTe+0wG9Z0OsmvbYzbJHup+HcxhwGQ24+/GQV2yZIlNwS9YsADLly+3bTxi2V0uF3bu3IkvfvGLMAwDlZWV6O/vR21tLfx+P/Ly8rB//36B0BmGYWPH4qKyljQUCgkjKgWP2dK4uDjEx8cLuUF8fDyysrI+VGYumhBTOLSwaqVwtizibO6FDnpTU5NQ/bMJM2BXyOcahIHyfvVh4NxgPGAIi9awBqfBwL/Zx+zWW2+d9vl5Dc7DTA+8mQ6nUUSnGZiKFK9YseKsc3Yuo5/3rmFFQ0NDttd0tJTv4SDDKTB12DCSz1FTUyO0/jprNDo6itbWVtTV1Ql0l+04mD2Ni4sTh4FRWJfLhdLSUrhcUxDhvLw8CQpRtrivmOWyLAtlZWVnrJFhRMgV2COUBmd6ejqCwSC+853v4Mknn4RhGGhoaMDQ0BC+9KUvSY9TZxRVK+1oY7pInmVF2FFXr14t1yG0RgcIorH+6qCK1lvcxzra6ff7Bc7IbCqN3GPHjqG5uVkyrzz42KeWrTjYg2758uU4efIkYmJi8K1vfQu33norNm7ciLvuugtf+MIX8MUvflHumz91xlTP0bn2j2GcGzao553roA9WpyPE/cHDmvoiNzcXwWAQnZ2dyMnJkftzuVxC6EHZM80Ie7ppmtL+ye12S/ZQE3+x7YdlWSLLukcnA5SWZaG9vV1I7QzDQG1trayJZUWYN3lfFRUVKCkpARDRjQMDA0IENTExgT/84Q945JFHBM594sQJDA4O4mtf+xq8Xq/IRzSn4lzzPN3/U2fk5ubiwgsvlGcnKzhLPpxrrI19Xst5bukzioFJn8+Huro6eDwejI+PC0ywvLwcL730Eq677jocOXIEN910E5599lmRBWeQT8uh83f9GjMXiYmJNp4KwhoBSOCG54rznBgcHMTKlStFtp599llkZGTAsiIMvrm5uUKyw9potteiM7pgwQIhuDl58iQSExNx6aWX4sSJExgaGkJvby88Hg/a2tps5IxpaWkYGBiAaZrYvHkzkpKSEAwGMTg4iNHRUaSlpUkmNRwOo7W1VRBhs3UqtSw5ZetsxvW55Gy60dTUZAugsv7UMCJlIo2NjXJ9Dq2Xnd959OhRuWfdMkhfQ+tg53Np51PbeUePHpXXKRca2cXxUdoV0QYDdU47qquryxao4fxpR4y6nM4l5YVBQCKOWOfPgF5TUxMmJyexfPlyDA4OIhAISGa0qqoKp06dgt/vF92al5cneoXBrtzcXPT09Ej/YK5HU1MTFixYgKysLLz44ouYO3cukpOTxfbgM/b29uLdd9/FyZMnbYGz2QztqHEdnTa1fu9sM3lsF0PUYWxsLJYtWwaXy4W8vDwJtswEQWlZkZ7KPNP1/UezYXT3Cx1E4TNoHhl+jnB1rcOdAR1gSp4YhHXO/fkGrmgvanmm3M1m/N04qJZlSf0XNyCp6+nEsLUCEHnwkpIScajWrVuHmJgY5OTkYM6cOfiXf/kXrFq1SiIJzBiyzoi9gtatWwfLsqTtio6sxcfHi2HpdrvFKJxps2EO7bDp6Af/5ogGWdTZhvMZzusRtkTH3OVy2VgCZ+Pg6Uitzl7w72iHBp9Ff5c2UJ3RTv2T13I+nz50P2zkTBvW+nf9zM5DSxvc0a7HZ4u22fVrPBDj4+OFsY0GM59P/9TKkBk0w4jUPBJKrY2L2NhY5ObmIj4+XgxiyhxhPaOjo8LeNzw8LPAfwzAEAhwTEyN9OEkbHxMTg8LCQiQnJ6O3t1f2ommaOHbsmByW8fHx0leMg89oWRbKy8sRHx8vDHX19fUCEa6vr8dPf/pTYQnt7OzE8uXLccMNN9hIjGYiB+f6f9M0sXr1apSWlgrM37lu0Yx3zrUTPcD/oyHPCHdnZyfa29sRCASQl5eHyspK5Ofn49VXX8WuXbtE5zAQQMc4OTkZmZmZWLRoEZ544gk8//zz+Pd//3e89dZb+OEPf4gbb7wR/f39Z0CPuJY6yKL/TYdY0LJ3rsNdZ3uoB5wHKA1XGosul0vaFpCefmJiQhiMNQOgZgKenJwU6v3U1FSEwxFmZE0ORIeWmRutowi5io+PR0dHh9wnewTSGElPT0dDQ4Ps9cTERCF70TKTkJCAhQsXiqNE9EBqaioMIwIb/973vifOEwmyduzYgf/3//6fGJ0zkd+zZdW0TuSwrEgd3/Lly2UOkpKSMDIyIo6qNmq41jqzHe26zqyTy+USQpxbb70VP//5z7Fjxw7cdNNN2LFjBx599FG5b57t0Rzx6fQl60D/f+a+PD7K8tr/+86SZSaTyUz2fYEkEJIQ2YRAULQVtCBS1IsViwhitVbr0tZbrKCIVVxusVe9tb0ut63XLral/bSlFRRBBASEkIUEyL5OtsmsmZlk5v39MfecPPNmJgRL+/N8PnyAZOZdnuc8Zz/fA4BtBgDsQCYkJCAzMzPEVrBYLBP0itgiQXJt//79zEderxc5OTncDkGtEYRQTAHt4uJizry3t7dDrVZj+fLlqKurg06nQ0ZGBsxmM4xGY0jJKJVFlpWV8TNdffXVjLA+a9Ys9PT0hMxSlySJe9Db2trgcDgu2dhWrqdSfoWTZ+EMaJHCPQPtEbU3EEI2AfgpnwMIRXhVPpNoRJOsEm0mkTeVzyPOEaW+Y9p7qmIId+Yudw9vOBLlI/EenUEKgNCzORwOBvaiz4nPSGec1oqmVVDCxel0QqvVoquri4PYWVlZ6O7uxsDAABITE+FwOHDFFVdAo9GwHSJJErKzsxEXFxdS2qtSBduAFixYwO+iVgf7SwmVeWhoCNnZ2SG92y0tLfxO1ELkcDhw8OBBnDhx4h9eU6UNqtR5SvwKkcLxMqHwarVaBtS78cYboVarYTQauWIm0nWU96KqINFOiOQXGI1G3gOSG2KGk/wTrVbLLS5UpREuwEIBHqUDH07vfF6/QwSKpGtUVFRMqV1PpC+MgyqWK9EcH3ET8vLyuAwQCC7y6tWr8aUvfQnAuHJ69tln2eCluTsUYaWs1KlTp9gwkiQJVVVVKC4u5qgDjYkhgykmJobHbQAT0epEog0QHdFwjqcoRJUH4h9ROOFIzPwNDQ1BkiRG9wTAERpqep/s/krFBSDEEBAzvqRA6HdieR8JCZ/Px0EI2n9S4ipVEO2NjDHl4RH38B9ZG6WDq3QEJyP6bnx8fNjfkxKJVCIhCg0yoK1WK4O5ED+JayYa9eT4khNitVpZoJFwIHRJuofJZIJer0daWhqSk5M5EkqZWKvViqamJgwPD8Pj8cBoNHJfGRn+FJAghVdeXg6Px8OjFJKTkxEXF4e2tjbmD61Wi/nz509YA6URUFBQgBkzZgAIyoK2tjbY7XYYjUYePUGZYJfLBbVaje3bt4eMU/lHAjri/qenp2PhwoUMtEZZNxE9UjwT4tgMWiNREdHfItiHw+FAX18fZwWNRiNmzJiB1tZWOJ1OPk9arRY6nQ4xMTH41re+hfvvvx8bNmxAX18fvvWtb+G2227D66+/jjNnzkCWx8te6dnEcxKuP22ytaPPT/W8iZ/x+XxobGwMCaio1eNQ9eJ9KXhIJc/x8fEoKysLAeNoa2tjh4AcHJoB2t/fz/xEWXzRAaHeU5r3B4B7Asn5p9EoPT09iImJ4ZIuQlGWpGD/a3Fxccj7UiCKelzLysr4rPT19eH8+fMwmUyQZRm7du3CCy+8gI8++ghqtRqdnZ3o6enB1q1bsW7duinJnsnkNK1NONJqtbjyyitRWVkJIGgAUTWMiLIr6mPxuuEMGjHQmJGRgS1btuCPf/wjVq1ahf/8z//EAw88ENIqIH4v0vOL70m9pbSPhCyek5OD8vJy7vmNiYlBe3s7/vznP6OmpgbAuBOrNErFjIdGo8Ef/vAHeDwe1jXkgHo8HiQlJYU4CsogTCAQQEtLC7xeL/Ly8tDW1sagUFqtFqWlpbBarVwWGQgEkJaWBoPBEKJ/ent70dvbi4yMDOzZswdXX3019Ho9Ojs7Q+5JIDanT5/GyZMn/+HArLjuYuBNGVgSg56i3CO9Tj+z2+1c1puQkACXy8U6UqPRYPfu3ROuLepf8RlkebySQZxHLz6feBaonFpcExGMRrS9tFotPvjgA0b7Fp9Hed1/Fon7SvJe/BnNjgWCTj8lWYhH6Q8F5gCgp6cHOp2OganEKqBz586xnCJ909vbi6GhIZw6dQoVFRWIiYnhCkSy6Ww2G1pbW/n/KpUK2dnZ3DZGazYyMgKr1cpBgIqKCsTFxaGvr49lTVZWFstqOgtOp5Pbii4XPxMpHVbxb5Gnae1F3qI+6r6+Puj1evh8Pnzta1/jqhyaaABMHGko3kPkKaVNK35GdBzp/xS0JTtStCVEu/sb3/gGgPEAuUqlmuCTTCXL+48S2YnAOF+Mjo5OCExdjL4wDiptWCAQwIoVK9Dd3Y3u7m7ExMTA4/Hg3XffDQE9IManOUJAMK1cUVGBPXv2hBgMIoQ2bRKhaIqlFKtWrcKMGTPYSRUZhhwNioQqs4JkiIqNwEph+88QdmIUUSlUxXsDQUVKUTMykgHg2LFjzORkhIupf1pvur54MKiXSzzQJAxIcdP6kGIhRUW/p0NDjpa4Tjt27OD+CDrM9M5iVHUqJCpEcW9EZRsu4hTpWqIyLykpifjZyZ4vnJFHaNTkcFEGSIxik7NK9ycAGYfDAZPJFFJGRp+hkki6D/UeJyQkICkpiTNJYqbb7/ejs7MT58+fh9vthtvtRlFREVSqIEolobWpVKoQBE4qiSQwJbVajZSUFB7boXz/cAEcyrSSUWKxWNhQ3LlzJ/72t78BCGbUzp07h+3bt+MrX/lK2OuJFClYQHugJL/fj6KiIlRVVUGlGu8lFUcT0P0o2BPJMaR3EyOfIlLsxx9/zBHyrKwsHDp0CL/85S/ZyFi2bBk2bdoEnU6H3bt3Y8uWLXj00Ue5TFD5DsoqBPGMRnrfcGtH7zFVwDL6m3r2CclQlCeiXJRlmdeOysnEoeR0zdbWVuZNh8MBp9MJr9eLrq4udHV1ARjP3FCpOiEVE/9Tz5NKFUSbJeeCAkJpaWlISEhgIAoC7BLneFZVVU3IXCgdoEAggFmzZjEYUlJSEjo6OjA4OAidTgebzYZ9+/Zh586dLHtbWloQExODzZs3h/T3E9+IRlQkmizAJgYOJUnCokWLsHDhQjZwqGecDF0lf5BRJgYa1Wo14uPjUVBQgOeeew6///3vkZ6ejq9//eu48847cfLkyQnXEzOnkRweWZZZ5lAWSK1WY2RkBOfOncO3v/1teDwedHd34/Tp0/jss8/wwQcf8Lxc4jXKJFGPmzIgqdVq8f7773OVFBmjJEfJIaBZ4c3NzUhOTuZWJJfLBavVymuSmJiIgYEBDsxlZGRwRoMyWWlpaRgcHAxBzvd4POjv70dlZSX6+vqQlpaGU6dOsaNL4Fy0Pnl5eXC73ejt7eVy2ctBlxKAiiS7ybZyOBwc5Fy/fj2A8Squi+lX0X6ggIQycCEGf8X9JiIeo2SDkt/GxsawZMmSEPT1fxWJdg49D+lxYDzZkZOTE2L7iO0/ZDORA04AcVQBQpk/q9WKQCCA3t5evmdUVBQWLFiA7u5uAONrFRUVhYaGBta7xGsizoEkSbyPFEwGgjzc3t6OiooKPsM2mw0DAwOIiorC0aNHkZ+fj/b2dp6DTHK3rKwMJ0+eRE1NDTvTom09mV6fCs9GIlFfifqa9oTGx/h8PgbrI3Tv7OxsPPvssyF6Nlywhf5IkoQzZ84wz9Jnle8nXoP2mK5FtgDZ8KJMLysrC5nlTnaiMpv7eQP4UyUKaoq2FoFpXgp9YRxU2qADBw7gqquuQlxcHEwmEzdRv/322yGLOjIygu9///shBk9rayuysrIwNjaGPXv28O+oZDA3N5cHsH/66adwuVzo7u7mCJQkSZgxYwauvvpqpKWlcSSfNl+tVk8o9aN7h/v3P5tEJ0L5M1GBiJSRkQFJklgwjI2NITk5mb9DQp+cH/Hg0WERDxv15YqOOV2Hojf0PXJeqP+J7kOAVTQCSHwnlUoFj8eDwsLCCWsrRlmnQsooLz3rpdKlfGcywaoUFHq9Hh0dHfjzn/8cYsCLglOc0SlmqKmUMDk5mR2nQCCAhoYGuFyuEBAklSpYDknRfY/Hw6NqTCYTkpKSYDKZGHWSggAErX769GmcP3+enWYyaqkciPoFT5w4wUaBXq9ncBRxbSZby9jYWJSXl/OMVo1Gg/r6eh6/cuDAAezYsQNtbW2QJAlNTU3IysrCzTffzEpUjC5ejIg3Ihn/fr8fVVVVmDFjBmc0qbdW3C9SOMrSyHBnkuRLeno6Ojs7odPpuCdYkiQkJibid7/7HVauXIk77rgDra2tePXVV3HHHXdwplTs6aHnFNdXDD4pjSJRkXk8HjidzpDnJqIzL2ZllU6v8t3ETJxoAIoBNMpKiYYmlVHR3FLxfaj3j8qeRZnk8Xi4BNPhcHApmtgDBwSdXMpWS9L4vNjo6GiUlpYCAFfTUJtHc3NziIxUBjkmO+fx8fHcn01Ob319PcxmM+Lj4zEyMoJt27YxH4+MjMDtduOJJ57Atm3bEAgEQiopREc/HIULioh7Eo4WLVqEyspK7nGm+X7KTKHBYGA5TQGEZcuW4Yc//CGefPJJWK1W3H333XjjjTcmPUuioyA+E1VFUPaS1p90FD0Dlbh/4xvfwH333YfKykp2IkmXkFOckJDA66cMLNLZoRnOpCMIZZz22e12o7W1FR6PB2q1muf5Op1Oln0qlQpXXXVVCKiMWq3mskhaD5Lj8+fP5+fxer1oa2vDlVdeCVmWkZmZybgZZHdkZWVhcHCQ0ZjHxsYwc+ZMxMTEhPScKY3efyWJSQBZDrZs0PklmUyO+lQCy/T8R48e5f+L/C86L/R7k8nEehJASMZV6fRoNBosW7Zs0qBlJJpMpyhlgRh8IRLL+ennBNRFtpFKpQoBdCMgMrfbjeHhYe4FJawIWQ7Oug8EAnA6nZx91ev10Gq1IX3T5eXlaG9vhyRJDKx41VVXMfhXf38/NBoNZsyYga6urhCbkICSZs6cyWeG2m7y8/N5LbVaLTo6OrBo0SKMjY0hJSUFe/bsgdlsRlRUFPr6+qBSjVeEERL2K6+8wmCB4nor91C0WWmvLxeRvqSs35w5cxivhjK/Ip9FImVQmDKy4tgp8dyK70yfIdlBVT6iA0xnjarmRFBJ5fv8q4jei/aV7BSbzXZJ1/nCzEGlTfn2t7/NUNTR0dFob2/HsmXLJnjeZ8+exRVXXBGy6CqVimcaXrhwAQ6HA4FAEMWMBq0PDg4yZPbQ0BBHp7q7u5GRkQEgKPQyMzORkZEBq9XKykssx/lXkZLRRCN3Mgct0nPSgGASNmq1moGjhoaGOEsgGkCRjCFlxIk+K/bLiFlSMlTJWSIjJzExEQDQ0tKC3NxcNtplebwvmUbhfJ71Fw0lJbDA57meaHiLPxPvR+8f7vfK65CQnTNnTkh/KBlcVH4kKi4xAkf/p9FBZMBQUzplYOnfBOxCYDImk4kzAkNDQ7DZbBzpJwNVCdZEWQ3KKNbX1/PMToPBgJqaGlYWUVFRqKysnNAgL5YKK0msWkhLS4Pf74fNZkNaWhrq6upQXFzMcO9vvPEG7rjjDhQWFsJmsyEhIQHbt2/H9u3bee2UQYnJhHWkzBN9JzExEQsWLMDx48eh1+uhVqsZDIfeiZxy5X7TuRXLfGiNurq6kJqaitjYWGzZsgWLFi3iTPT3vvc9HhkkOgxKI4+uLWYnlT8Xzyw5BAQCN9ma0POL60jXDkcUzSenhr5DDqFY4kvP5Xa7odfrIUnBqH9ubi6Xo3V2dmJsbAw6nY6NMQAhiNT0rCpVEC2Z+qnpfJAxJMvB8R6BQHAkksvlYmOgo6MDTqcTarUaSUlJIb2scXFxuOGGG0KCglOpjImPj0dFRQUHdoiPNRoNCgsL4Xa78ZOf/AQ6nQ733nsvz86UZRnf+c538Mc//pGrhS5maET6Pb37ZPJ81qxZaGpqgtVq5RmMJCdozcmwnz17Nh588EGo1Wo0NzfjgQce4IAUnYPJnlXUX5SdIVBCkv1JSUlc9jc2NgaPx4OmpiZkZGRg165duOeeexATE4Ovf/3r2LBhA6xWK773ve/B4XBwhRAFLeLj4zkQTfckGSXqHJo3TDrK6XTC4XBwua9arca0adO4LNtgMMBoNGL27NkYGBjgUkVZDgLCUVkkrV1qaio/B/F9b2/vBPC41NRUNDU1ISkpCTNnzsTHH3/MrRwmk4nlhyzLOHPmDPx+P5YtW8Z7qQw6i0FhZQD0chmvKlUQRZZkg16vx/DwMEwmE4+le/HFFy/KG8prUnBKDGArM6aiTKOSSDEorQwI0/4SjsJUSek0iXJQfAbRFqL7KWWokgiBms4q2QCUjaeRI3StQCDANtXIyAhiYmJYV5JdRe1yYgViVVUVj4Lr6+uDJEm46qqr0NnZic7OTsTGxiIxMZHnoIv8StcZHh7G3//+d6SlpSElJQXDw8Ow2WzIy8vj+zQ0NPC5WrhwIfbu3YuSkhK4XC5UV1fj+uuvR2trK3JzcyFJwQrF66+/Hnv37sW+ffsm2PjiuhM/ExEq7uUi0kcAuApTkiQ89NBDGB0dhdlsRn19/SVdT6UKzqmnVgWlE0d7SvYP/QHGM9xiBSPJYqK4uDjExsZO4Du6/z+bRDuf2gVFm2MqDr1IX5gMKkG1d3R0cDSHlDMZKLTAZGyLjNvS0oLy8nIAQWWTm5vLEX8ySEgwp6amcnR4aGgIR44cQVpaGpqbm0OcLlmWkZCQgMTERP7/pQIkfR4K59iITEZK5vOQyWTiqC6V0Z04cQKyLDPapEjKRmuRZFlGYWEhgNDeNDo49Bn6t0ajYcNBkiRkZWWhpKQEPT09aGlpYSAR8RkCgQAsFgs0Gg0L6Ush5aEUBXukA3upykr5+cnWjJ5BLNkhp7G1tRWBQIAHaouONO1VJEfXbrfzyA76PAV6REeTzkMgEIDNZuNrkqMJBMuFab4jGakajQZJSUlcni1mx5ubm7nniIxAMaOk1Wq5T0Vct0jOqWhc0ftmZmby+6WlpXGPLqFevvvuuwyRb7fb0d3dja1bt3KWR7le4WiygAUZAkRqtRqLFy9mUIuEhISQMmzlvUR+E6OlADhgkJ2djZiYGHzve9/D8uXLkZaWhmeffRZ33313SG8SXVc0MHPzJXsAACAASURBVCMRfY7Wknifxs+Qw0dR3cnefypGpRiMIpRMMiZJDpNhLRqNIniMEqyO3tFut/NzUMuGLMuIjY2F0WhESkoKkpKS2KmnEijKGAQCAe719Xq9PDKGHKOCggL4/X5GDyfsgVOnTvGzRZK9kdZF3OdAIIDCwkI+j8nJyUhMTITT6eQeUK/Xi9dff517eMhhXrlyJcrKyiZE4y+VIp038d/Tpk0LcZZILhDCJpWoPvjggwCARx55BM8880yI8x8p+KS8l9/vZ1RacoBlWea5pFSJMDIygg8//BBHjhxBX18fqqurYbVasXv3bvz973/nwEd8fDx++tOfori4mPfJ7XbDbreHZNkog0FZJarqIQeQstZer5dBR8hIDwQCSE1NRXd3N2eBtFotBgYG0N/fzw4VfY+MNco8abVaFBcXhwRYyGClddFoNLDZbKiqqkJaWhqXqRcUFLATQvw4bdo0eL1enDlzBqdPnwYwkR9FO0hEBqfPXk7j1e12Q5IkzpaMjo5i+vTpkCQJOTk5XDJ5KfekZyYZJbYj0e9pXykoKcpHUeaSbAgEAsjKymKbaCrZ5nAJA6WjLzqsyute7J1F+4ScEAI6AsDYKKT7xFJYsgGUfaomkwkGgwFarRZ6vR6BQAAOhwMOh4OnOsycORNOp5PnNVMwKDExEV1dXWhtbcXAwABGR0eRmZnJTpYsy7BYLGhoaMDAwACKi4v5HdxuN3w+H8rKyvj9GhoaWO77/cEROlTWTHKfKvLEoHCktRJlK633xSqzLoXOnTsHtVqNgYEBjI2NcYAACAZf9u7de0nBHQqyiLpP5E/RvqfkmlLXi7YjEf17bGwMv/jFLy7Lu38eUr6P8tnDVWdNRl8YB9XlciExMRH9/f282DqdDr29vRxdAYJMWVtby70bpAxcLheDYlA0ln5Hc+6oX2lsbAxGoxGZmZnw+XzQarX405/+BJPJBJ/Px5kKYnaTyYTs7GwGNZgKhSv3Ep9X/IwoPCP9oc8rHdZLJVKG5KAToI0YaRWfO9x7KN8TCC3XVDpf9MwUgaYSn9bWVh5vQoZmIBBgB5YY+ujRoyxEp0rKZxHXUTzk4UipgJRrIiol0QEgEpWlcq3IsaM/69atw7p163DzzTdzqacsB8vYxKia+Myio0LP4HQ6kZqaGuIQAeBMJ32OeJzKqmNjY1mpWSwWPjeFhYXsFFPAIC8vD0ajESaTCSaTCQkJCbxnFFiwWCyor69nA4BKqJSRTTGIEW79w61rYWEhSkpKIEkSl4u43W4YjUZ4vV7s3r0btbW1kOXg+AkCnSkrK/tcxof4c2Cice/3+1FaWspDqePi4rj8WFQ+ojGoPPN6vR4xMTEwm83461//ij179mBgYAB33XUX1q9fj+bmZi6Dp+uIBlC4Zw4nd2jeLSkISZJ4xrNKpUJKSgorxkgZZLG0KNIaEZHcraurCylFoj9kPJETQe9A5Z3t7e0wm80ctbZarfxZ+plarUZ0dDSXo9NzxMXFISkpCYmJiUhKSgoBuaNS9dHRUfT19aG+vh4OhwN2ux1qdRCMh4xDAnehCLVGo2FgIZEiBbvC8Tc5qdSzLssyj3QyGo2M4P3CCy/gjTfeYIOos7MTy5Ytw2OPPYaEhISIRlgkPhdRdpWfVwY6NBoNTCYTrrzySuTn57NhazQakZSUhFdeeQXbt2/Ha6+9hs2bN6O/v5/74ETnnfhJDFbRmpDTKUnB+XgUyPD5fGhubsZDDz2ElStXcnksBSlFGVhfX4/q6mqcOHECO3bswM6dOxETE4OxsTH84Ac/wG9/+1sA4J49cRYv2QEej4d5u7+/n/uN6ZlHR0fR09MTYghXVFSgpaUFsbGxzDcFBQU8/ooM7KysrJC+V3HPRPuiu7s7pCxSrVajsbERCxYsgE6ng9PpBBCcMX3gwAFYLBaW1SQDbrnlFoyOjmLfvn1hjUDKtIiyjBw9ZdmryBuTBa3CEV1XpVIxaE98fDy++tWvsnNMQaNLvSZl7pQ6WdSTFPwSZYRKpWLnX8mLVJURiUS5Tfem74ezy0R5P5WAnpJILpKTqDybdrud9R7ZSkAwS5qVlcUZV+pNpHaJ2NhY6PV6FBUVYeHChejq6oLb7WYQpeTkZNTU1IS837x58zgoSEFUr9cLm82G8+fPIzs7m7FiTCYTvF4vTpw4gQMHDuD48ePo6uriEmBZDmb5V69ejZGREaSlpSEnJwfHjh1DTEwMV4eQLF2zZg18Ph/2798/Kf+JZ0a5b8TPyj9TJeIRv9+PsrIyBmSkShwaJTZVXpakYP8pBW7FAJGSp8g+BBCCyEtthyIPi9lUlSqIq/CvJuUeifax+DOz2XxJ1/3COKhkdJKiCgQCuOmmm9Da2so9bQC4D46ARei7ZHSTAUKOUFJSEhsZJLTo8Gu1WvT19aG8vByJiYnw+Xw4cuQIDxsn5UKlkWNjYyGjCMRnD/f/cJE7UZAqP0M0mVD7RyNDpLwyMjLg9wfn8b3//vuQJIlnvl3KPZKSkiBJUkhpipjlAcYHl5tMJoaal6RgpNPj8cDr9UKn0/G6iOA/kiTho48+QiAQwKJFiy7aYyBGbiZTEuGczkiOqPIaoiGndF7ClRGJEOGrVq3C2rVr8dJLL+Gpp57CvHnzMHfuXMTFxUGv12NgYAAVFRXc16lSqULKfMkgIWMbAM8LJeFOvacU/aezReseHR2NwcFBeDweHjxN667RaBixlCKaJpMJM2fOhMlkClGA1KNKDitl4L1eL4qKigCAe/vCrX8kmswwUqlUjOYtyzIjxBoMBkRHR+M3v/kNdu7cCZfLBY/Hg/Pnz7NhH26P6Vkmc2AnK4kEgiiwCxYsYCVuNBpDIqRiSY64F8uWLcPPfvYzvPjii9iyZQtqampw5513Ys+ePSHnQOQnMrCVBpIyACNJwV5GWgcAjIxL7xMbG4usrCwkJiZyBjIzM/OiGcJwCll5bmJjYzEyMoLc3NywTrX4nKKTbTQa+ffx8fH8/gTIRaWT9E7k1FFvqkajQUZGBvLy8jgrSQBgRqMRycnJiI+Ph06nY+OOeharq6tx5swZHi/j8/lQX1/P6xoTExN22Hgk3pmsykWlUqGsrIx129jYGDo7OzEwMMDlkBcuXMDjjz+OQ4cOQavVorOzE42NjbjrrrvwwAMPsBFGez+Z8TWZsawMponVMImJiaioqMD69evxox/9COvWrcPmzZuxefNmHD9+fELwRHQAiPfF+xNSPCGyqlRBMLSsrCzk5+fDYDCguLgYn376Kfx+P7761a9iy5YtSEhICDHogHGeq66uRnV1Nc6dO4cnn3wSTz/9ND744AP+PPX1Wa1W+Hw+eDweLjUj3T4wMIDMzEx2WslhNRqNXF4syzJPGPD5fNz6c/XVV8Nms0Gj0SA2Nhb9/f2YOXMmrFYrf8/r9SIzMxPd3d1obm7Gxx9/DK/XywFxsfSNgIVob2bPng2n04nBwUHY7XbU1tYiISEB7e3tITr25ptvhkqlwsGDByPygbh+yn8TiUavMiAl7m84oqyaLMsoLS1lG8rr9UKj0eDQoUMhujncOC7leaIzKGZ+6dnCOdTi2DexUkPkT/r9zTffDADcQqZ8L6UtE24dI2WgP4+tJspE0XYUW0EsFgvy8vJ4n3p7e3mGttfrZT1EspIylIFAcLIFjZSi7OnChQt5vjM5QLm5uejp6UFPTw8AcJlxWVkZEhISYDKZcPLkSdhsNsyZM4dRtgkzxmg0wuVy4cCBA5w8amxshMPhwP79+/HBBx+w7tFoNMjJyUFfX1/Iet56663o7u6+ZCd/MlIG0EReVvJhTU0NZ3nJrtqyZQtkOQgaRXg2U91n6gumShQKhBK/KoMhZLvRswJgtHr6nngWyB6tq6ub0vModXakz0SymZQkygpaS/ou/T8rK2tKz0b0helBLSsr41lC1Jt44cIFNnSJhoeHUVpaGsK0Z8+eRUVFBVSq8f4HEpJ+f3DGkiSNz1Ylwz8xMZHLRufPn4/W1lYYDAZGMEtLS0NraysyMjK4llqj0aC/vx9ms5lLtYDQyJlIojCMJMj+mSQqdVkOlih3dnYykwPg9SGhAkytL0Us3QXGx8OIPXDU+0NGpsfjYRhzKmOlUivRsCGivVKpVFySp1xTkZQHOtKaK42cyRSQeG0S9ErFKEbyyPl2u91YvXo1li9fDrfbjbi4ONTW1uLo0aPo6+vDV77yFYyMjHDP07/927/hnXfeQUdHB0ZGRpCZmYnz58+z86/VajmwokSlptmgADgS6vF4QoQL9Rp3d3dzyXRycjK8Xi+Ghoa4hIVK4Lu6uuB0OhEbG4uoqCg2zMxmM5dDNjQ0YGRkBLGxsWx0yrKM2tpamM1mrFy5coKhLvYlhCNlz2i4fSkvL8fp06ehVqthNpvR2dkJo9HI6/7CCy/gkUceYdAhv9+P73//+3j77bfR09MzoW8pEolOn5LEtZXlYL+Z1WplZR8VFQWr1cq8Qs7h7Nmz8dBDD0GSJDz77LOccSZjebIScWUgS8m/LpeLHS+VSsXgVyqVCg6HA2vWrMGMGTOwZ88ehsfv6enB+fPnIcvjgEtKIqcl0r4ozyWdAXISRWNA6VyTLKKz7vF4mMfpM2RYiiXplAWjctiEhAR+1/7+fng8Huh0OuTk5ECtVrPiFuc+S5IEh8PBhrVYSt/e3o4vf/nL+Pvf/47U1FQsX758Ai9frAwtEn9RgCk5OZkHvVPmrba2FsXFxTCZTBgZGcHevXtx4MABbN++HX6/H62trYiJicG3v/1tdHZ24pe//GVIT+5USdxLkZdE2XbbbbdBq9VicHAQ99xzDwfcRIdXLL8EEAKIRf3qPp8PcXFx3CO/detW/OxnP4NOp2On/NixY1yy//777+P9999HcXExvvrVr+KRRx6BRqNBbW0tnnjiCb42BY4pK3f06FFuSzhy5Ag/DwUWSMZRXy3Nv83NzeWyd4ryU1WB0WjEwMAAzGYzMjMzYbFYGLyrsLAQDocDra2tDPxDMx8/++wzxMXFISMjg8eHOZ1O7lX/+OOPkZ6eDqfTiTNnziA6OpoRTktKSniNt23bhttvvx2BQADl5eXo7u7G8ePHsWjRIvT29iI1NZV104033oj9+/df9n480jO0z8qABlFDQwOAIOgUBTS3bNkCAEhPT8e+ffuYb0TbhM5/uPPS3t7Oe03vpQw+k9yk/mUxCE5YAHRtMbt27NgxXHfddTh48GBEo/tfSWTMi9lxYHyNHQ4HFi1aFIIlQn36drudsSTU6iDaL7XWkM7NyMhgACSVSoVFixbBbrdzzz05TlqtlisHNBoNdDodpk2bBo1Gg76+Pqxduxajo6NobGzE8PAwBgcHMTQ0hIKCAj6LbW1tiI+Px8mTJ/HJJ59g9erV2LdvH1SqIOZLUVERli9fjsHBQdTX16OkpIT7aCUpmES65ZZbeF0uh6M6WUuR+G9RLyUnJ8Pj8cBkMoUAfP74xz8OccSAcX4RQT7J1qCqIjFJJdqNYuBXpVLxzG66nljpQM6oGDBat25diF1/MRJtDfEsiusRzk6i5450Nogv6XoUbAdwyW16X5gMKhC6mampqRgcHORGbzJq2tra2MAAghtL6KEAGBgGCDpeXq+XMzti9kGtVvOcNNoUm80Gr9fL5RJ1dXUMT97Z2cmHV5aD8yZ7enrg8XgmzfiEy7xdTpKkiaMfRNhpIhI0Yk8eCXBSyGScTfac4u8m60sjR87pdOLcuXPcp0OCksBloqOjefQDXUt0BGVZZkeWjHjxHmKUN9yBudiai88/lc+KkVwi8b6rVq3C7t278eSTT+K1117D0NAQTpw4gX379uGPf/wjvvOd7+B//ud/sHDhQgwNDTFKZUFBAc6fP4/c3FzExMSguLiYZ38S7yqFtFod7M8ym838e+Jtt9sdkrmkviVyAigrQBkD+h0ZWf39/YiKioLRaER+fj4CgQC6uro4SEBZLFkOgopkZmYySAKVRxKw1aXQZE4Q8TTt9dy5c9mIpHK7gYEBznq8/PLLsNvtDJdfW1uL1atXY9GiRZPeR/k84c5wuO9S0Gv+/PlsSNEMTQB47LHH8PLLL6O4uBjr16/Hhg0b2KCjz4igCJM9k/gMFL0eGxvjYIJKFSxBSk5OZth5g8GAl19+GcnJybjjjjuQmJiI/fv3o7Gxkc8VnctIa3+xZyJ5RAqUkEhJcYoKT1RixLsajYblPl3TZrOFrA9ll6g1geS20+nkdZdlOQS7oL29nfl7+vTpmDZtGqKjo3l4PJWsm81mRrcOBALYv38/gNBqCfGdJwu0TGZUiYE3KiEjgL/09HQ4HA7U1tZCp9Ox/nrqqadQU1MDrVYLn8+H1tZW6PV6/Pu//zvmzJkzIThHJBpC4Z5R1F0kn6+99lps2LABr732GrZs2YKtW7dOCJ7Q98Ktgej0U6AiPz+fgweffPIJNm/ejI0bNzJiLgUzVKrgnFq73Y76+no8/fTT6O3thcvlwowZM/Duu+8iLi4OsiyjoqIC8+bN48w7lWl/+umn+PTTTwGA95OcF3KYo6Ki4HQ6GTFXBJahiityOEdHR5GamsrllVQaGRcXhzNnzvBZ9Pl8yM7ORmNjI1JSUmAwGBgU6/z584iKioLJZEJvby8yMzNhs9kQCARBfXp7e9Hc3AyPx4MzZ84AAD777DMUFxejrq4OJ0+eRHx8PNLS0mCz2TA0NASz2Qyn0xmyv+IMwstJkXhILJ8kHTlt2jQG4CPQv+joaM6SAaEyg5wqsqXEDC79nki0X+gM0fkkGwMIrXwT5Yvo4JKsTklJmXB9pYy/XE7SZERyi+QlPbfFYgEQzEDRfhMoIsk56lekyigCmwPGbTWSyRkZGUhNTYXT6cSFCxfY6VGr1bjiiivQ0dHBwShJkrjUv6ysDNOmTePA4OjoKBYvXowbbrgBpaWl6O/v5xFaZEva7XasXLmSs4fU+vPRRx/BZrOhra0NHR0dsFqtGBoaYl0WCATQ0dERsYXicpJSpxK4nkoVbH8ZHR1FeXk52zaNjY0hyOD0XbKjRPtC/L2SX+n/yiAuJcNor8W2MWWlANnGc+fODVuRQM8g2sjE//RzsukmS/iI359KYF9ZrQBMXlUU9jqX9Ol/IhHACS3+8uXLJ5SzdXZ2orCwMGSRKNpDTEGlvn5/EA5abNgXDSKKhFCjOfWlkNHS2tqK2NhYuN1utLW1wWw2o7+/H729vQgEAnw/u92Onp4eLh++nCQKqEi/V0ZhlM6T+H1y9ihrQqWcFy5cYACRFStWTHD86Dpi5EQsjaB9EpUIKSyz2YycnBwuATWbzRgcHITZbOY6fGXmlJxTMaqzbds2yLKMW2+9NeR5lMrr85Do4ItrTYpS/BwpPKUSo+8NDAyguroaf/nLX3Do0CFIUhCUy2w2Y/HixXj++efxu9/9DocOHWK4cgL8IbRGt9sNq9WKiooKZGdncx+JKBBp76kkl9bu5MmT3F9KGdRAIMBOGp0DMtiA8ZIytTrYA0soq9T3J0kSZ1NpZIbX60VPTw+jMNM5jI6O5oBCuH49ev5wPD0ZrxOJvC3LMtLT0zF37lxWppQRMZlMkGUZr7zyCp544gmerdnb24vS0lI89NBDnHmIROHKySeLHBIf0tzPBQsW4MUXX8Sbb76JXbt2IRAIYNOmTXjvvffYmBP79sIZRCKfK88xZYHIKSWDpKysDBUVFUhMTERiYiJcLhfsdjt8Ph9mzZqFXbt2QaVSYc2aNfj5z3+O0tJSjlgTWqFIYvYkHIlOikqlQnJyMjQaDcxmM3+HyppJcYmOKv0JBAKMIE0BR61Wi7a2tpD+QeJ9clbJ2cvMzGQHV5IkxMfHIykpideZvm8wGNDX1we32w2TyYTS0lIkJCRwRlav10Ov1/MYGLPZjCuvvDIsL1xM4U4mu5UUFRWFwsJCvmZ2dja6urpgsVgQFxcHj8eDX//619i2bRsHGu12O5qamjB//nxUVlaGyAbSB8BE4DYlrxGPVVVV4a677kJ9fT02bdqE2tpaXmux3FHJC2RYDw0NQaVScdl1UlISjEYjl72r1WqkpaXhgw8+wIEDB6BWq3HjjTdiy5YtuOaaa1hmUZCDxmn87Gc/w+7du3lmbFFRETIyMmCxWCBJEvLy8lBeXo7U1FQGhKGyWWolkSSJUfkpS0O87vP5kJyczNlfr9cLg8GAuLg4+P1+XHnllZxpGhoagiRJWLx4MSwWS0jQMjs7m1FPx8bG4HK5MH36dKhUKsyYMQMVFRVoaGgI4VOdTof29nYkJSXB4/EwEvDw8DDq6uowa9Ys1NfXo7u7GxaLBUajETfccAOPySNQSdrHFStWoK2tjdf7X0U0S5jsBEJPJkN6MsC6SAE/sZRRdFrF9xJlFwVFiUepbJWuI0njvaxarRarV68GAHYAlY6K0rb4ZztKQBDrgc6rmH0Tn8vlcjFvBgIBtqWqq6tx8uRJ6HQ6LqGn56Z/R0VFIT8/H9nZ2Whvb2e7Rq1Wo7CwEO3t7QDGz/nMmTPhcDgwPDyMX/3qV8jMzIRarUZDQwPy8/M58FdfX4958+bB4/FgcHAQ+fn5sNlsOHv2LABg//790Ov1SEtLQ2pqKs/7nT9/PnJyclBdXQ2DwYCBgYGQdb5w4cK/bO1pfWgslCRJ3JN+0003QaVSIT09Hb/+9a9DvkNrKwbRxYQBABw/fhwqlSpkVKAoU5V7RYF3AFztQXws2oKibR7JlhATPqLtHo6U/sNUAvnhSKww/bzX+sKU+A4NDbFAkSQJhw4dQmVlZchiDQwM8Jw62vSTJ09i3rx5CAQCnBGVJAnDw8NITEwMQYccHh5mRL2ZM2fy3DkC7KEDnZiYyEIuMzMTzc3NDDJSVFSE9vZ25OTkhDDVwMAAMy851eQIfB7E3XARjsmI1uNin0tMTER7eztHNem7FJEhBMtwzxLJOBdBDzQaDWc4RCeTrk/PKDrVooMV6X7Dw8MM3qE0li+F6cMZ2sQj4n3FEimlo6pUjiL94he/wKJFi2CxWNhIO3bsGEZHR3meGSnc+vp63HXXXUhPT4dKFZy919DQgA0bNuDVV1+FJEkYHByETqdDQ0MDZsyYwWutVgeBwYxGY0gppFarRW9vLwdgRMFESK1A0GFISkriOX40z3ZoaAjd3d3w+4PD4RMSEhixT61Ws8Ck6oG4uDhERUWhtbWV9zc+Ph7Lly8PW2I2WUnkZEIzUnaI9m/27Nmora2F1+tFTEwMGhoaUFBQwIbzE088gZKSEtx00008fmT9+vX4/e9/z4aVMtgRDjBI5OdwxhUFr9asWQOz2Yz9+/fjnXfeYWEtZvjE6KcyUikG66ivVwROEHvWhoeH0dPTgw0bNmDVqlU4cuQIPvzwQ4yNjaGrqytkOPvY2BjmzZuHnTt3Yu7cuVi2bBm2bt0KrVaLdevWMWgMEDoySrln4juL0dxNmzbhD3/4A4+XiI+PR11dHfLz80PWTHRqxXX0+XxITU1lmUA9SOTQ+3w+qNVqGI1Gdoiop1HMOtDPgKDh7HK5oNfrkZeXx3KflH0gEES11Gg07OTW1NSwoxwXFwej0ThB5tA5VK7NZOWK9O6RKCYmBgsWLEBPTw+6u7sRFxeHmJgYziqPjo7C5XLhhRdegNFoxEMPPQSPx4Oenh7MmjULCxYswHvvvcc9dbTGkaLqQJBn8/LycN111+Evf/kLI/KKAUr6nPJ9CVGfDDPK8J07dw7d3d14+umnIUkSnnjiCbhcLj4fAHDw4EEcOXIEqampuPfee/HlL38Z119/PR599FF+XrU6iIZL+ndgYADPPPMMTCYTzzDv6uriPnoynkdGRtDf389js4AgQGB2djaDvnV3dyM3N5dlnWh/UKk4vSvNPKYMIDmsdrudS/ezs7MRHR2N5uZmXg+TyQSHw4Fly5ahpaUF6enp+NrXvoaamhpMmzYNFosFe/fu5XI+CpxoNBr85Cc/wY033sjlpxqNBg0NDZxBt1qt6OzsxDXXXIO2tjbk5eWxzB0ZGeFy4n8FSZLEYE49PT2MKXHNNddApQqO6HvttdcuOQNJ51+0h4BxMB36OWVoAXDfK/0cGK8MEDNXarUaPT09SElJiWhDfF7jfCoUTh4CYMRpMXNI405oLaiv2mazcStAX18fPB4P9Hp92HEedP3o6Gj09PRwIIXwLYqKikIQryUpCOI3OjqKa6+9Fu+88w5uvfVWyLKM6upqLtuXZRnvvvsurr32WnR3d2PhwoX4zW9+w2dlzZo1qK2t5TNy9uxZRlv/61//iqioKCQkJDDSdHp6Otra2pCTk8Pvf+HCBRQUFIToJHE9RF0iBjw/DxGwZCAQ4CSC2PpBdla4QF04Ir00PDzMzivZFtS2Re9FfxOfUgCMbDgKstDPKPgiSRKSk5N5NJlStwITWzoi8cflIvG96NqXuidfmAwqCXdyQsQsFTBezkhGMBA81NRwrFKpOBMqSUEkR5fLxeWPYgnpjBkz0Nrayn0poiBQq4MzUAOBIMJtQ0MD8vLy4Pf7sWLFCgwPDzOyXk9PT8gG+P1+tLe3Y3BwED09PWhqasKnn36K06dPTxB0YlkMRWApPU/GrMhgn8fJDUe0DmQsiFF2rVYLq9UaNnM0WXZLfEZZlhkYhyI+oqInY0k8DLQ/JBTIOBeZmnof6Brid6dCYvYJGHfKldEuMXIqKrfJriUS9SSp1Wp2HMfGxrBp0yYsWbIE2dnZ2LRpE6666ircfffdyMnJgUaj4SxqSUkJ/va3v+GBBx6A1+tFYWEhdDodg1qJSpqMcuKT6upqNsQpEAMESyFVKhUjZ46OjnLpqZjFI8NHlscRlf1+P/r7+zE4OIjo6GjEx8fD6XSyYarX60PGf9BMwEjOXbg1m0rm9GLOayAQQElJCWdRc3Nz2SHX6XTwer04DxaumQAAIABJREFUduwYfvvb3zL4wLlz53D99dfj7rvv5vMmolqSoyM+v/gs4t+SFBz3sGnTJqxYsQLf/e53sXnzZvzqV7/iYJWo0JTRRGVAxuv1wuVy8XgZGi8QCAQwffp0zJkzB2lpaZg+fToyMzNRUlKC999/H16vF/PmzcO3vvUtBt1SlvrX1NSgrq4Ohw8fxg9/+EMeH/Dmm2/iv//7vyecgXDAEmTsBQIBLFiwAM8//zxeeOEFGAwGPPLIIwCCqKNarRbTp0/nKDidO9oDsXSYxhrQWqjVavT19bHhS58n+RAVFcUZkqioKIyNjWFgYID1AMn3qKgoHkVDa0vBlaysLM6ukcHQ2NjIToder8eKFSvC8t1kgG2TZa4mM3rpLKelpWHevHkhhiIhDJN+7OjowA9+8AMcPHgQsbGxGBoaQktLC2644Qbcf//9XNoY7v50Zq677jp87Wtfw7Rp07Bx40YG6KI9ECPyYq8pgfuIQSvKWEdHRyM7OxszZ87EG2+8AY1Gg8cffxxPP/008vLyQozu9PR01NTUYPv27XjppZcwOjqKL33pS9w+AIzPKI2JiUFKSgqOHz/OjkVKSgqXyFqtVkbg1+l0yMzMBBCcBhAVFYX09HSudPL5fKzX6WwSb7pcrhBUaDLSKbNFwIDnzp0LceK1Wi1aWlpCdHlubi6Sk5Pxv//7v8jLy4NKpUJjYyOPtsnJyUFSUhKqqqogy8HxOtSOQY758PAwo1WnpKSgsLAQsixj6dKlMJlM+OijjzjbLmZHOjo62I5S8oEoh4lXldnDSyHiJ1mWMWvWLMhysEqN5rImJydzoGCqmTByyKmfX5k1EuWzaMORPqL3pPcX35PePyMjg+81GV1O410pQ0XHCgiW8dL96O/i4mIA4HYoap+iygRC0jYYDJg+fToGBgYm3JOIQEZp5rxKpUJpaSk8Hg9nTykzm5GRgcTERLzzzju47bbb2HZ0u91YuHAhgGAAsKysDN3d3YiPj8ehQ4ewdOlSrpiz2WwMWtre3g5JkrBw4UL09PQgNTWVk0NUQXDq1CkGTSJ7kNC0aT+VVUX0f5HXlQG2qZBKpWKsAsr4OxwOPPDAA5DlYA/vW2+9dcn8QHqOkjdi9lQE/yJKSkpiHhH1nRg4JB1F3yWbS1wHMbN7MZqKHXYpRCBd4vNcakXHF8ZBFecLabVaJCUlhRhG5DCKJGblALBDYLVakZaWxjXw4vwy8TpDQ0PcQzo8PBxSUkIMGB8fz0PLDx06hDlz5qC4uBhjY2NIT0/n+n0xC+D1euF2u9Hd3Y3h4WE4nU4cP3580og6ZXqAUJAJYtLLVapDyIM0+0sEGbHb7RFRPCejjIyMEGVBh4X+rTw0FMGmQxuOccVIKa3t8ePHuYSb6FKzp6KhpSypoPuEUxpEkQQT/dzj8TC8vtfrBRA8qG+++SYOHjyInp4evPnmm/jwww/xH//xH6iursb58+cZIGVkZATl5eU8vsBisSA2NhYpKSk4c+YMl5tQEIbWjTJzNKaDAg/kHIm91xRhFcs5aS1oTiSVDYllq5IUBJQh54mi811dXbyOsixzj2ekNYq0N5HoYiUporIqKyvjSKRer+feRMqGnTlzBvv37+fvdHZ2Qq1W48EHH5zgcJLDGolo7auqqnD77bejsrIS9913H7Zu3cq/U2Yglc6t8t9ut5sdJipNBIJGV0NDA6677jpYrVYu9Wtvbw8pV33xxRc5A/Pggw9i6dKlIY4xZSn9fj/q6uqg1WrxX//1X/j5z38OSQqWxd577738fsRLdEZo3Wg0waZNm7B+/XpYrVa4XC7Ex8djcHAQMTEx6O/vh1arRUZGBqPVEh8po6rEfySbiZfECgD6PPUqWiwW5k+DwcABFXKsRJlK0XAy2ClQR4PqPR4PkpOTQ3hK/FvJu8qSWXEvlRUhIl1MXimDb2VlZSHyktCFKWDh8Xhw6NAhVFdXs7yi6oaHHnqI9aryz5w5c7Bx40Z89NFHePjhh/Hcc89x36WYhaBIPf2RZZkDxXq9HrIs48KFC2hubsbDDz/MJbfUWtPb24snnngCf/nLXzA8PIy7776bx4XRWgHj2BHPP/88AxYR4i8FjIh/UlJS4Pf74Xa7oVKpkJGRwfOIx8bGkJ2dzc8GgDPmdE8y7ujzsbGx8Pl8XJIqZh2VQUtJkjBr1iy+N61PQUEB2xC0TnFxcXC73SgrK0NlZSUjTXu9Xh4zFB0djXnz5sFms2HevHno6elBfHw8enp6sGTJEu51k2UZOTk5mDZtGhwOB44fPw6n04msrCxMnz4dDocj7MzEc+fOhWRUlOtAek+Uocrg2VRIPC+yLHOlEPET2VbA1J09+hzpUfFnYmBffC/RIBaNd9GGEuUYANa9k9FUDXflukVqExGrEcTnIp1P91Rm6YaGhjhrSXKzv78ffr8fLpcLIyMjDDQktlGIPEEBHJ1OBwBcKqycUEFJnlmzZmHhwoVsRzc1NTGIV0xMDE6ePAmv1wu9Xo+ZM2eiv78fqampaG5uRkpKCkZGRvDKK69wT2VKSgpnS5OTk5GamgqXy8Wj7VasWMGjwMR1JZtI3JNwQe9wezlV+1nkm76+PrbFxWkVZO9cCtGIGrFainhQtPdpf0h3kd8i8rToeBN/Ex6DCMIkvtPlzo5OhZS2j6iPp0pfKAc1JycHcXFxiI2NRXp6OoBxxjpy5Aj3mhEdP36ce4Nqa2tDNpYySGq1GjabDVarlZUsZQU6Oztx8OBBVo4U1SADFwguanJyMioqKqDRaHD8+HEMDAygrKwMeXl50Gq1cLlcIZtAB3fatGnIycnhaNepU6cmdTZFQXW5GYoOBaG8iYPuyWhUq9UhA82nSvTcSthrUdiJERQ6YLQPlG2k7J3IyPSd0dFRVFdXc9/wpZLS2BXfUXxO5eFW7kMkx4L+bTAYUFRUxAjRJNgqKyvx8MMPY/Xq1dixYwe++93vYvv27cjPz8eMGTMgSRKXV5aUlODEiRMoKipCfHw8j3S54oorGAyEepz8/uBs2ZMnT3LmUzT8zGYz1Go1Z2j9fj/MZjMDfRBYUnR0NJf70jBvt9uNwcFBRhoGxrNPFL3z+XxczkUzdSMFOCbjq8n4/WLGg/J+paWlSEpKAjBe2mW321mx7927F08++ST35fb29qKvr49RSsMZI2IwIxAIwGw2o6ysDFu2bMGFCxewa9cu3HfffSElU+LzKw0o8X3dbjcHFqKjo7nkqaSkBEuWLIFGo0F8fDwyMzOxePFi3H///bBYLPB6vYzILGaRdu/ejY8++ggajQZbtmxh1FNRppFxU1NTg9bWVnR2dmLHjh145ZVXUFVVxZ8TAyC0Dvfddx+ee+45PPbYY5g5cybGxsZgMBgQExMDnU6Hvr4+3HHHHRgZGUFWVhZcLheKiorQ0dERonhpHUQjlkpjZVnGqVOneFQSOX0qlSoku0XfFftRKeNGvURUWuXz+dDe3o7h4WEYDAaGvKfItCzLHOGn/aZqACVFkuFK2aKki/Gy8veBQACzZs3iGcfUj15XV8e9siMjI/jVr36Fbdu28bNbLBa0tbVh48aNuPPOO/kdkpKS8PWvfx3V1dX45je/iYMHD/KekGwUDSh6HnHsiclkQmZmJrKyspCeno6CggKUl5djdHQU3//+97F+/XqsWrUq5B3OnDmD119/nZE58/LykJiYiMbGRuh0OoyOjnI57jvvvIO0tDTIsswyyO12s+7IzMzkPab+arU62NtqNptx/PhxfOMb38APfvADAEBCQgKSkpJC+k1pXrGIOh4VFcWyTpnBomqgqqoquN1uXLhwgQ3GkpISxhIAQrNPqampePfdd7n0+Ny5cwzcKMvB0sjo6GgYjUa89957KC4u5v2LjY1FW1sbtyTV1NRwOafJZEJTUxPS09M5kHXhwgWuuKBnp6AiMG780jqG4zVRNimzVEoSf06I6gRGRv3HZHxTab543amSGAwSgyZKPU0BRdLhZGNQsJbkmRh8FIMfl4PEnlHSPbTm9EcJZCNWa1GJqxiUE+VRZmYmVy+QbBoZGeGKJp/Ph97eXlRXV4e0tynt00AgiLlSXFyM3NxctLe3s/0FBFvB0tPTcd111+HnP/858vLyoNFouEqLSow7Oztx7bXXYmxsjPur165dC4fDgcrKSg7Q3HrrrUhISEBqairmz5+PgYEBTg75fD4YjUYepeR0OlFaWoqMjAwGKyL5f/bs2QlOv3L/wv1fdPKUpORjOgPl5eVwOByIiYnhqhzx3FyKjX7mzBkGZhNtCdpf0oliFpTwQsQeY7GKTjxH1Ld76NChKT/TP5tEPS3KpEuhL4yDCozPmfN4PDh69GgIcJIym6XRaBiMBQCXBEmShKKiIjaixSj92NgYEhMTOSJHB5hS+ZS1oLpvIMjcXV1d0Gg0WLBgARv6ra2tsFgsfAhlWUZvby8LHmJiclRrampw8OBB7N69G6dPn75oScnlJmISUlY0TkatViMuLo4zcF/60pcuWhohRoEAcH8ZOY5i1FKMnNL3CMQnJyeHy7qVkUSRSKAODQ1Bo9HglltumbKCEyPGoqKaylopn2EqtHjxYnR3d2NgYAB+vx8WiwVOpxNHjhzBc889hz/96U947LHHsGvXLmzfvp2d1fb2dt4HynBkZmbyffV6PfeqkhNJa0wZf6oIEN9bpRpHMVSpVDAajQyYQ7PNaE+oDGdsbCwEfImqAuiM2e12jr729vbyuYuOjsacOXOmtE7KtZ2M3y621+H2ymw2Izc3l9cnKioK58+fh8Fg4HLJxx9/nKPPXq8XFosF99xzD2677baQKLuoDPR6PW666SZUVlbitddew+bNm7Fnzx60trayIlX2H4aLhBPSIjlUVKJqsViwfft2AMGKkPr6eq4EiYuLw/bt2+H1evH4449j06ZNbGCLDt/w8DAOHTrEnyXk04KCAn6X5ORkdtqGh4dx/Phx9PX1wWazYceOHQDGS48WLVqEl156CT/84Q/x7LPP4uDBg+js7MSqVatw4403or29HSqVirP75eXl2LdvH+6//36Mjo6iuLgYLpeLUVzFs07vT+W95EiRMSdWBADgmX60Jmq1msvcPB4Pg6XQ+BK3283ZvtHRUWg0GnaIbDYbLBYLrFYrl/+KAbq4uDgsWLBgAn9NFo2/FMM7HIU7B2p1EDmytLSU+TA/Px89PT2w2WwsN0ZGRrBt2za0tbWxzKRs6n333YeNGzciKSkJ9957L44ePRrW0BJ5ntZSkiQum5dlGefOncOdd96JjRs3ory8HMuWLUNRURHeeustvPLKK1CpVCguLkZWVhYcDgfLLLvdjqeffho7duxAVVUVOjs7ubWD1o50lNfrRVpaGgf4RkdHMTg4iI0bN05wPsjZJOTglJQUfPLJJ7yWIvAU9WV6vV7GnGhtbUUgEMCpU6cYxVe5n7QeVEZNZbzUq9rW1sbyUqPRoLS0FCaTCYODgyxPqAJGLI2sqqpCd3c3pk+fDr/fz3J/9erVsFqtbJCfOnUKK1asYCM+OTkZUVFR6OrqYpThZcuWITY2Fn19fSHOSVdXV4j9JDqgF+M90alV/ox4kwLIY2NjKCkpYXTZBQsWQKUKoom/8MILYdc10loDwIkTJ9igV2YblSTLMgPjkVFPeyE6espMIjkEl6tCja4tyuRw9ky4zwOhI9hE21fMPNntdqSkpMDtdjPYpYjk7HK5oNFocOLEiZB7iu8OBKsDgWBVHdkNQNB2TUlJQXJyMg4ePIjbbrsNQHA/fD4flixZAr/fD6vViqamJrS3t6OsrAwdHR0c5CbsFbVajdOnT8Pj8WBoaAiLFi1iGyImJoZH0xkMBrhcLuTk5ODcuXMAgiXqBoOB+2+BYIC8ubk5YqJgsj1R8pfo4BEPUOKGQP38fj9uuukmAMHJIvv27QvJTE+Fb2RZZhBK2gO6D91XbJ+g69N7k01B8o7OBAVZAGDFihUhWDCR6PP25X4eIvBM0kXkX10KfWEcVDrIBF2t1WpRW1uL/fv349ChQ1i8eHEIM1EUkhQPGc8iTLXRaOSsaVdXF/chDQ8Ps9IrLS1Fb28vKzzKeIrRRPG+11xzDVwuFzo6OqDX6/GnP/2Jy8vIiCd0PrqO3+/HjTfeiLVr1wIADhw4gOeee+4fitxN9bvKg0mOKSGYSpLERjo5KcqonfJ+ojAVDygpEPosHUhRsRCTio68GEQQhbUYCSUjhowQ8XriHyC0rEbM6NI1ldlTcY+V0Wflzy9Gf/vb37ikVJaDPUWxsbHIzMxEbGwsysvLkZ+fj9tvvx1z5sxBYmIiHn30UTb+KLNJYC3r1q1jsCXiW3KAaA2oN4NQXCm7ShFWGqsgyzKio6NDSqZozAbtESH60b6IPd4U1aMeV7HpPy4ujlFEL4VEx0pJyvKtSBTuu7Qe5eXlrATS09MZWIVQW3/0ox/h+eefh1qt5qoKo9GIDRs2hMwlveKKK7Bu3Tp89tlnePLJJ7Ft27awxgM9qzJrSms5MDAAWZZZUBNKbHJyMnJycpCeno633noL3/zmN7l/i65DUfJdu3bhpz/9KVQqFV566SWsWbOGs6/0WQJJIeRTtVqNp556CsnJySgoKEB6ejpmz56N6dOns7FtsVhw7NgxHnHxzDPP4NVXX0VSUhL++te/ore3F2+88QYaGxtRV1eH559/HrfeeitGRkZQXV2N6OhoOBwOBAIB3HLLLTh8+DCGhobQ39+PWbNm8Yza5uZmAONleuRk0DxLelexkkU5P254eDgkYyqWgyYkJECWZQwODjK4DmXiXC4X87LX6w0J4FC0HAhmVSOVqn9ePAClvAxHkQwI+t6MGTNYflKwhVAyjUYj3G433nrrLTzzzDNscNH+3HPPPQzYpZR59G8qQyUZabVaUVdXh0cffRSNjY2orKzEihUr8MwzzyAQCKCyshK9vb0wm80oKirC3Llz8dZbb+Hll1/G7bffzqMliKg94A9/+APLp9HR0ZDMgcFgCMlUkLEcHR0Nu92OrVu3YvHixawHSHap1cE5g3a7HYcPH+ZACxl6NMuURmsBQYeVSpZ1Oh3LHFoXkeLj42E0GpGbm8v3zf+/uaX07IFAcFyO3W7HrFmzkJ2dza0Xfr8fS5YsAQCeA97b24t58+ahpqYGa9euhcViQW1tLex2O9asWYMrrrgCcXFxHJwkZ4L0AM1oJfTT3NxcmEwmWK3WkHdobGyclO8uRnTuxACG0uGj0TtkY8ydOxcAQkAnRVLqZqWettvtIbJSdCjC3Z8qiiiApeRzseSXzgbx2VQMd9GuEMGWlM6C6AxHCmxPdr9w2T46/y6XC8nJybw+hDwOgDPXyio2pU0j3ofkp3iv0tJSOJ1O9PX18b2cTifGxsZQWFjIcvbDDz+EwWDgiQHnzp1Dfn4+9u7di4aGBjQ1NeHUqVOc8MnPz4fH48GxY8fQ3d2NCxcuIDExEQaDgZNHJIsPHTqEgoICOJ1Oxkuhd7Hb7SFgnpMFLsKRkudoTajEmc6Ox+NBfHw8Zs+ezfbW4cOHQ9ZQaScqqwNEm1REsY6JiQlpTxTJaDRyNSfxEJ0BqpYSKwHUajWWLFkClUoVMlowHF3OagHxmuK5FN9RuVaXev8vjINKXrbJZOIyJjKOR0dHsW/fPq79VquDA5bnz58PSZI4yhgIBDibREiZw8PDGBgYgF6vR3d3N9LT00Mc2dTUVAZ/EReQBBfNxSNBQz1tS5cu5RmGshxsHF+6dClHZgkmnogUytq1a1FWVgaNRoPdu3df8jqRQFSm+elvZZ270sCnflHqa6LPiNG3cApajPYohaeoAERjXenoit8XBY5areY9F6OfYoSf1q+6ujrk+wAYkTgqKooPvVhGozxA4SKqyvel+072b1HwE1133XXcJ0V8ODIygo6ODp5v19rail/+8pc4efIk+vr68Pzzz+Pxxx9HQ0MDYmNjWckUFxejra0NQLBXqbCwEE6nE8XFxfyekiShtbUVw8PDnN2XZZmNPip/o0ilWCJCpcMAuDeK9trr9bIRTNE+KrdUqVQMlEC8GB0dDYPBELJ+yuhkOFIqTeXvlAI/HIUTevSzQCCAGTNmYPr06fw+Q0NDGBoagk6ng9/vx+DgIJ566in+Tk9PD6xWK+677z7cfvvt0Ov13Ks3PDzMfEokGhuiUUC9yGR8R0dHIzMzk4Fd7HY7HnzwQfT39yMhIQE2mw0ajQadnZ14++23kZ2djSeeeAJ5/wfmQuug0+lw/vx5bNu2jQ1cMriAUKh7h8OB9vZ2PPPMM9ixYwckSWLEx0AgOHKlvLwc2dnZ7AQQP1dXV6O6uho//vGPsX//frz66qu4cOEC/H4/PvzwQ7z++us4e/Ys3n77beTl5cFut3Of5+joKL7yla/gnnvugdPpREtLC1JSUjiTKmbngWA5ELUayLKM+vp6jrJT1JgMKYpGS5KExMRErgagyDeVq/r9fi7jHhkZ4SAc9V77fD4MDAwgISGBMwL0TNHR0ezoTpUm49NLCSgqSfkMxcXFmDlzJmcpCgoK4Pf70dDQgISEBABBZOcdO3bgzTffxHPPPYcf//jHXCot9qPTtX0+H/cuq9VqrFy5ElVVVVi2bBnmzJkDn8+HZ599Fm63G2lpabj22muxf/9+dHV1Ye3atVi6dCnU6iBgUkZGBqqqqvD0008jJSWF5xtaLBZkZGTAZDJheHgYo6OjSE9PZwPV4/Fg586dPDaJ1oN4xWAw4Le//S3q6upw1VVXYefOnbj99ttZvoh/HA4H7HZ7yDVMJhPLMZr9bbFYeE7p7NmzJ8wUVa49PZskSUhISIDD4WCHjNY1PT0dmZmZIaWRTU1NvE9AEO2d+rKjo6OZNw8fPoycnBwMDQ3hvffeg8fjgd1uR3Z2Njo6Orjqhc6F0WhkXVBbW8sVAkajEZ2dnSG69MyZMxOqny5H5pACWmTY+/1+PPzww3wmz549G9EYF3WEuIbEh+F0MslXOuPkmOn1egwODjIQlsFgQGJiInQ6HQwGwwS7gdpAaGxQuPvRc4hVYMrnIBtHDJRcjC5Woaa04Yhv4uPjIUkS860yM0q8ZLPZuDpAudai7RMbG8t6EAi2BPn9fqxcuRLNzc08Iqm+vh7Tpk1j7JLf/e53uPrqq9kWPnz4MLq6uvDnP/8ZCQkJ8Pl8jCisVquRl5eHWbNm4fDhw/D5fMjKykJmZiZqamq40pHKm6dNm8YYMnPmzOHRcWTTA0F0Z2qJulzViP39/QCCNn5GRgZkWebgCmEYiDJTua7kMyht7rq6OsiyzNgJSttc/D8lF+jM0r1Jn5N+EveQ0OeBywemKhL5HOF4WwwUKYMyoiyld7zUDK6aysn+f9KTTz65ffr06fx/MlSMRiNaWloYgn1wcBAtLS1oa2uDRqNBfn4+AoEAmpqa2AhMSkpiJ5Gc26GhIQDjcyxlWea5TkVFRfjkk0/Q19eHqKgoxMfH86xQauIuLCzkjfD7/ejr60N6ejrS09NRVFSEpqYmFBcX48MPP0RsbCz0ej1OnTqFvLw8dHR0QJKkkNpzo9HIaK9U5noxIxyYmA1VOpLKyE24a46OjnKWLjY2lrPV11xzDQKBAHJycvDJJ5+EMJYYJYxEbrcbIyMjAMYZWjyspBwAsDESFxcHm80WUgJAilfM5omCtaWlBUuXLuUh6/39/ZAkCaWlpUhOTsbMmTN5/i0pqnDro1xXun64zymdbFEgkVKSJAkffPABo69RL9Do6CiDDen1es5Q63Q63HDDDVi/fj2KiopgMBi4dIYypkAQ4KOiogKNjY0YHBxEXV0d4uLi2AE6e/Ysl8QFAgGO0pHgprJHUuJiVI6CFOREkTImg1A0APR6fQisPGWiVKrgzMPrr78+ojEfqXRMjGJO9XvK309WliYSlbJbrVbExsYiNjYWDQ0NyMjIQHR0NGw2G44d+3/UvXl4lOXVB/x7ZjKZJDPZJpnJvpA9hARICPsSAigKQlugiy8u1Wp9q60LtFZpEXgVrVbt8imoLa1UrVpFVKCAssoSIJCQkJB937dJJsnMJLM83x/DOd4zJCzW73rf774uLpLJzPPMc9/nPvc5v3PO75xBXl4eR1g3btyIXbt2oaWlhVkRPRFT8fsTEk9r5+XlhcTERNTW1qK1tRUvv/wy1w8lJCRgdHQUKSkpWL58ObKysrilj8lkgtlsxokTJ5Ceno6ZM2ciPT0dZ86cgSy7GF7r6+s5bfD48eN45JFH8MUXX3DkgPYOObwdHR0ICQlBQEAAR3gsFguTXWk0Gmi1Wq4l7erqwgMPPICYmBjExMTglltuwbJly6BUKnHbbbdh2bJlyMvLw6xZs3DLLbdgcHAQ4eHhbqCPzWbD/v37MWfOHLS2tnJ9bnR0NPr7+2GxWJjohxxbWZZhNBq5vMPhcLBMk8M4PDzM0U8/Pz9mphZRZeqbSuAXrY+Pjw9HTuh+kuQi8yH512g0SE5ORnBw8Jhydy0DdDyZHU8fe37WU67ElD/P61GaHxkxQUFB6O3tRWBgINRqNcxmM7dvIyNQvAfVoNPfJk2ahPT0dDQ0NDCzb1tbG2JjY5kdeebMmYiMjERjYyN8fX3R1NSE999/HxcuXMDx48dRVVWF7Oxs7N69mx1oyiKhNL/k5GRufURlBUFBQZzqvXbtWixevBhffvmlWzN6OuOrq6tRW1uLqVOnIigoCPn5+Zg2bRqOHz/uBnJIkqtGzt/fHwEBAeygUylKW1sb80N0dXXBaDRyNhWlP4prR9elVkgajQYdHR1sFDqdTqSnpzO4npqaCp1Oh+HhYTQ1NSE1NZXlm6L6GRkZ+PDDD5GXl+cGaEVERODo0aMAXPbQxIkTUVxcjNbWVphMJgQGBjKfBO1d4mkIDAxESUkJUlJSmLCM1ry9vR1RUVFXnav0u6cOH8vekoS5AAAgAElEQVQg95RDyuwJDg7mtOfFixdDll1t+hoaGtwcpWsNEexuaGjgfU7fEwCfYxqNBn5+ftBqtfDz8+Nzl9htKe2b6vMIXCA7xN/fH7feeis+/PDDq8gX6TuIDq0Y5ff8920Ou90Oo9HoxsMhyy4ysgkTJqC7uxuy7Cop8/HxYXIvOuMJ3CE9GRMTw9f2jBwqlS7eEQII9Xo9wsPDsWvXLvz4xz+GJEkoLCyExWJBWloafz+yuc1mM44fP46RkREGJMLCwlBdXc33dDgcSE5OZnCIugCQbU1rSQ4z2YW9vb2YOHEient7OXWdHHTKkCF7i9aL1kLMhKC/XUuO7XY7BgcH4e3tzcEFq9WKp556CoALmNqxY8e40XZPG1MEX4ntW/wc2WB0PTEYExoaiubmZthsNg4aaLVaLocbqy/wihUr0N3djcrKym8ljVe0+z3BI9FhFW1h+hvJlpeXF+89mnuSYwDtmzZtevN63+P/jINK9VHA1+1KCCmUZZlr+kQjuqKiAgaDAc3NzVAoFFyEDIAdGKphIoSNDKHm5mYEBQXx9cPDwxEZGYmBgQG3fkfBwcHce1OSJLS2tiI5OZkPTjK0KMoUFhYGu92OwsJC1NbWYtKkSUx2QDUSABhpHRoaYsVJDo/nuNFI0o0MMn4lyRVBIyNx/vz5XHvw1VdfjeusjTXo0CWKf09U1HPQhiRFJBqKPj4+VxWSixtBpVJh0aJFqKysxN69e1FQUICGhga0trayoVNWVobU1FRO4xqv3cK1ns0TFfWMtIqOCv18+PBh5OfnY+LEiejq6kJ0dDQGBgbY0KZnJ0OpvLwcx48fR2FhIRobG3HgwAFkZWVxaySFwkUI8vnnn0Ov12NkZASJiYnsNNIe6O3tvar2MTQ01C01hhQKtS6gVkD0fmptQAiszWZjxJBS5yTJxf5HKTukhCZPnsw1fGPN47WAgWutwbUc1Osh0GMNlUqF8PBwdHZ2MjpaU1PDrTFGR0dx4sQJHDlyBKdOnWKQQ0QFxRZQhHiKrLt0aE+bNg0pKSno6urCkiVLMDw8jNzcXCQkJMDb2xvDw8PsvP7zn/9Efn4+goODkZOTg3379rFxf+7cORw7dgx33HEHUlJScO7cOVRWVnLGB92bWFxFZ40cD4PBgLCwMI40yrLMxiSlXG7ZsgWnTp2CWq2GVqvlXo7Hjx9HdXU1zp49i6NHj6KxsRGnT5/G4cOHcfjwYZw+fRoHDhzAV199hfPnzyMyMhI6nY7T/SZPnozLly8jMDCQa1QtFguzO4qorELhKt0oKiriTA7af+SMUj9kklXaW2TMUbo67R+SM1mWuZclOe5DQ0OMaoskLv7+/sjNzb1KpkgGvokTej1ZvtbnxvobvR4cHIyQkBA2BIg9mdJVaU7pDPH29kZ3dzc77i+//DKio6Nx4MABPPXUU6ivr0dWVhb++te/wmq1YsmSJUhMTMTevXtRXFyMCxcuQKPRoKqqCocOHcK0adMwYcIExMbGor29HSMjIygtLeXzhRzL5uZmREdHc9T6lVdewYULFzhTg75fd3c3Ll26hBkzZmD+/PlISEhAYWGhW5RElmUGlM6ePYvc3Fx4e3tj4cKFWLx4MUpKSviMa25uRkREBOs30oUUkenr62PQjgC4/v7+MY16Gmq1mp0DAkhlWUZmZiacTicWLlyII0eOcFT5woULmD17NuuGL7/8EjExMbwPBgYGUFhYyHNRXl7O9XcqlQp5eXmc4UJEkv39/YiLi2PZJFspJCQE/f39WL16Nc6ePQu9Xg+r1cp7RaFQoK2tzc2wJ1lyOp1uZRWeRjXpPpJ1AGz/kJNBwCW1zQkJCcE//vEPN9D6RkZ1dTWGh4dhtVrd1o7muq2tDR0dHQxmUV0ucSJQ+j+1JlKr1Qx6k1zm5+cjOTkZIyMjDEyIESrPvXc9QPXbGgqFAi0tLXwv0mc1NTUICwvDyMgIWltbWS+S/qPvTOtMznh0dPRVYJdotwAujgvR2U1OTkZoaChMJhMaGxs5gCFJEj766CNkZmaitbWVgUS6N2UhdHR0wGKxQKVScTYKgaq+vr4YGRnB4OAgIiIiOFU5MDCQwVWTyYSAgACcOnUKeXl5KCkp4d7tooz29/e7sa97ro8IhNPcimAMvZfs4qGhISaJVCqVWLx4MUZGRhAXF4fjx4+7laRda4iOcn19PZO8eQ6yLUj/OJ1ODpJZLBZIksREl0SUJ2ak0bouW7YMNpsNpaWl1/1unvtY/F0cJFvfFIhRqVRoa2tzc95JjnGDDup1844kSfKRJOmsJEkXJUkqkyRp85XXJ0iSdEaSpBpJkj6QJMn7yuvqK7/XXPl7/I08DE0QCQY5mE6nE83NzQgICIBarUZ/fz9HKex2O6OMwNchZUKYaOPSJGdkZMDhcDBaTumUtNFJkEmJKZVKPvhFh0UMsdPn4uPjERAQgMbGRvT19bEwHT16FN3d3YiOjmYyBloswCWgTU1NnCYkCsK3jdBRFEJM86Q5onkixOZm7ilJEvc8FPvXiQpTfK+ItpDCoRYHZPSL7xdRTElyRSXfeecd1NXVcU1xdXU13n33XVRUVCA6OpqJI8T6J/pOY/1M1xbXVLyvZxTZUwnSsFgsOHfuHH79619j3rx5mDVrFp544gmsWbMGSUlJeOihh7BixQps2bIF69evR3Z2Nu68805s2LABixcv5iifzWbjqMW8efOQk5OD0dFRDA0NITY2FrIs48iRI6zYqLURIakE6FA2AM2xGKEVn4nS3WlOxB6rgAs08vPz47YMtLbU83AsmaDrj0UaNNb8i+N6aVI3ckiM9Z0AICEhga+RlJSEgYEBJiqiLABC4qkmkQ4Fem6SLavVyizGy5cvx4wZM5CcnAwfHx8sXboUCxcuxLlz5zB16lT8/ve/hyzLSE9PZ0NOq9Vizpw5eOaZZ2AymWCz2WA0GlmX0V7dtm0bN1EXgQXaL9RXkfr6UYSG1sHhcHAvQbom9VZUKFwkL08//TTX4ANARkYG1qxZg1//+teYO3cu7rnnHvz85z/HjBkzcPfdd2PTpk147rnnsGHDBrzwwgt49NFHOZpHURubzYaOjg7MnDmT51an08Fms3HKOWVNUGqwWCpAOooMbKVSyVFictIB90OX6v1oiPpHzCCguenv7+fXNBoNs1KKMuZZ9+05/lMAcTx5Hitly9NhUiqVyMzMBOB6PuJeAODWjobmt6qqCiqVCk8//TSsVivmzZuHrVu34vTp01i6dCn27dsHu92Os2fPYvfu3XA6XQzRiYmJmDJlCr744gs2Cnfv3s33vf3227FgwQKOTq9evRp6vR6tra0wGAwICAjgNbPb7Vi/fj3uvfdet5R0Sk98+eWXoVAoEBMTw2RbtCY0VwTGvPDCCygtLeV9+eijj3KvUABuvdApPVcE9GiORZ0vgqJjGW8BAQF8XpFMUa3psWPHMGPGDEiShLNnz7oRohw8eBBarRZWqxXp6ek4evQoO/Z+fn7o6+tj8EClUjHPRmVlJZqbmzkiSrWBosEcGRnJ33/Pnj1YuXIl7xdyPknPUVaZpxx5AtMk16Luo98VCldNrJg2Lssy1qxZ4wYSkQ69mUFkjk6nk/WGaAuFh4cjOjoaOp3OLaJD+5qemTLpyGEj202tViM3N5d7g4tzIIJb/1uDsqQAuNXiDw8Po7e31802IcI4kmPS8aTbT58+zXJKQ9Q3arUafX19GB4exve+9z1UVVUhPT0dSqUSly9fRnx8PAOFSqUSc+bMQW9vL2cCkM5OT0+HzWZDZ2cngoKCEBISwoAnrSdFxDUaDRITE2E2m+Hj4wOtVssdBYxGIxO/ERP0ihUrEBkZiYaGBje7QgQnrzdE293TJm1vb+dzi4Ak2uPA2H1Wb2SI5SieUV2n01WvrtFoEBgYiPDwcBgMBkiSxEzdiYmJUKvV0Ol06O7uRn19PX+e9mBISAgUCgX27Nlzle6iQfvDMysA+LqtJckYgYF03tLc3cwQ7ROSSwKJb+o617ux5LqTRpblIUmSVABOAHgUwBMAdsmy/L4kSdsBXJRleZskST8DkCXL8kOSJP0QwHdlWf7Bde4hU0oIGRp02HR3d2PVqlWw2+3429/+xuQxxJoXFhbGh57Y4FqhcPVN6urq4vTH3NxctLa2oqWlBSMjI1iyZAnOnz/PKE9KSgpvAElypVANDg5i5syZrPgqKyuRlpbGAl5dXY2kpCQWwL1798LLywupqano6upCRUUFf5eFCxcy2Y2YPiYehNQj7psO0SGkA4MElqI99fX1vBnNZjP8/PyY8XPChAl49dVXbzqX3el04syZM8yeTAcnCbhIo00AA0V1GhsbmQAmODgYfn5+TKtNnyfDgpyrmpoaTtWj+jKKpsyaNQtRUVHIy8tDdHQ0Dh8+7KbERCTUU1HROospGjd6YG3YsAEPPPAAk714Ouh0wIpGEEWTV6xYgfnz57uxLxLyrVQqcfHiRRgMBuzatQvDw8MIDAzEe++9x+nuIuMmHcp0UFN6qyRJ7HRFREQgLCwMFRUVXDsWGhrKbQ2IYY9IGCRJYmZUmh8vLy/MnDmT6x/EIUYaxxvjzemNHALjffZaLM3iZ5RKJc6fP8/GtCRJ7ARRjSnVd3V2djKbIH0vilZMmTIF58+fR1tbG5555hm0tLSguroanZ2d8PPzw8qVK9HS0sIN0EkmZs+eDaPRiFOnTiEkJASS5CIrO336NNcDEjstzSNlHBiNRq4rFmujxJScgYEBNuqnTZvG6DXtPZI7YsXt6enBiy++CMCVBnj33Xfjpz/9KQN4tC+oJICeBbg6BfLnP/854uPjGUH38fHB3r17ERUVhRMnTnCPOwIBKOXS4XDgxIkTMJlMDK6QHgkNDeWIq2d6LxkO9F1FR5fkkBzhkZERXkvS+11dXXwo6/V6jnx5Dlp7z4yKsQ59cVwPbBkLhLwe2/hY8q9UKtHd3c2yRjqRACWSK61Wi97eXvzxj39knWw2m5GdnY1XX30Vn3/+ORwOBxtLXl5euO+++/DSSy/B6XQRYCmVLpIUyt5ITk7GnDlzYLfbsX//fgBAb28vXnzxRRw4cABHjx5FYmIijEYjZs+ejYMHD2LNmjXIysqC1WpFS0sLtmzZgsTERJ4TWZaRl5eHvLw8ji5u3bqVgRuSDafTyQyWd911F+Lj4znVccGCBdwahupzOzo6OAXcZDKxvJAB6e/vD4fDgdmzZ7s5ZaK8E/swpe9NnDgRQ0NDWLZsGd58802sXbsWZrMZ5eXl3EPSZrPh3//+NxITEzEwMID6+npOXw8LC0NgYCBqamrcsnZiY2MxMDDAzOsdHR0ICAjAtGnTGNQODg5GQEAAfHx8cPnyZeh0OnR2dnIKZXFxMYxGI+Lj4wGA5zI+Pt7NEbrZIcuu9jz0M5HnPf3007xGKpUKu3btYnuC0mQB95Ysor0hSRJOnDjh1o6K7kHgmliGQnNoMBhQXV3Neo9Su0lXenl5Mbin0Wjw5z//GU6nE9u2bRt3v38TIPTbGP39/aipqeHndzgc2L9/P5KSkrirhFKphJ+fH5OCkg5yOBy47bbb4Ofnh9HRUbS3tzPvAelKsmno+ch2sNvtyMvLgyRJOHXqFCwWC/Ly8qBQKHDixAnU1tYiJSUFFouF632JKd7pdCI8PBxDQ0Pw8vJCeXk5yzYRidG9lUoloqKiUFVVBR8fH+4EQYGprKws1jEjIyMwGAyoqKjg0jiq3yYdMGXKlG9cf2m321FZWcn13VRWsnXrVkiShJiYGJw+fRqnT5/mz9zovSgbyGKxcEBMBNmamprYDqfzOD4+HrW1tRgaGoJCoeBsKr1eD61WyyR5pK+USiX++te/4u2332ZwVpRdAizIqQe+3nPkG4i67duSeVmWUVxcfNX19u7dCwDnZVmedr1rXLe6WHZdfejKr6or/2QA+QDuvPL62wA2AdgGYOWVnwHgIwD/jyRJknydp/aMVgGuiQ0KCuIDIicnh4u/tVotKx8AnApiNps5+kNKng4zYv4ilkJK7xGRDVGBUsSRvjoxQooOBzm/hE6QwXT48GHExMQgKysLKpUKBQUF2LNnD/Ly8vggSk1NZUVLo7y8nB3csSJ89FxjbRB6XVQCYm471VeJkUIxkipGzG5keAq2+FkysGhdRVRG3KCUkk3flQ5yamMiKlR6PmLJValU6O7u5rRFirR0dnZiwYIFCAoKYhIgcc7ECIroNIrP5fmcNzpGR0exefNm1NXVobq6GnPmzEFtbS0qKyuxevVqBAcHo7CwEDExMWxUkEKhSPbrr78Ok8mEnp4ebNmyhdG10dFRdHZ2IiQkBKdOncLw8DCnqFM0U+xvS3MCgGvVANdeqaurc2M5pUNkdHSU031IZqxWKxv1tE5KpRIhISGIjY0dV1mPN29jGeQ38jfg2k7ttdJvPD/ncDiQk5ODy5cv8+FAbJ6UqtbX1weNRoPBwUE0NDQgJSUFq1atwm233YZXXnkF+fn5nAHgcDjw+uuv47HHHkN0dDROnTqF1tZW/P3vf8cDDzyA6OhoHDp0iCOLW7duxX/913/hwIEDWLp0KaeiKRQKBtwGBwcxOjrK5Cr9/f2w2+2IiIhAV1cXbDYbYmJi0NbW5kYxT1E0hUKBjo4O3HPPPVAqlTh48CBOnDjhhgQTkUxISAj+9re/QafTYcWKFQCAuXPnci/nwsJCTJgwAfX19Qz01dTUoK6uDl1dXcjIyEBTUxMGBwdRU1PD5E2Uqnv77bfj9OnT7CyRc0lGLBmVlCUgIt2Epov11bIss64nGSa9T03jKaOD0spIvijioFaruSUDGUfXck5JhjxlzDND4EbHtUCvm3VOAde66/V6GAwGnDt3zs1xb21tZfbZkZERhISE4E9/+hN++ctfwmq1IjQ0FN/5znewZcsW6HQ6vPPOO+jq6gLgctrXr1+PjIwM9Pb24oMPPsAvfvELLFiwAEqlEnv27EFdXR0eeeQRGI1G1mU6nQ4vvvginnzySSxZsgRWq5WB5dWrV2NgYAAbN27Es88+i8TEROzYsQOrVq3CkiVLmLjm2LFjOHLkCKZNm4b58+dj/fr1kCQJr776KtevKpVKBhzeeecdKBSu0oi77roLgKsWOigoCJIkcf/g0dFRBhGBr0EHclqp1EE8I4GvzzxKEU9NTUV3dzfsdjtWrFiB1157DT/5yU8gy66ejZGRkSwfR48eRVpaGreqESMUonMlgmRJSUnc+ohKFCgrhlJ77XY7zGYzuru7ERwczGRktbW1nN1iMBhQX1+PuLg4Tv1vbm7mHtzfZIjy5e3tjcHBQd5nsiwjPj4eZWVlV5XYkL0xnhyLWSqUnUJnjGgDiYy1oaGhHHUm4JpaAIltOeh70J738vK6qpWJ+D3+twYBh55rQ6nIVqsVwcHBHEWkaD5FyPft24cVK1bA39+fI+Ziyj2tAc0JgXcEWBDh4pIlSzjCVlVVhSVLlqCpqQmBgYEM7JlMJj5LYmJicOzYMUydOpX7otO9aT4JTCCip/T0dDQ2NiIjIwMDAwMICwvjWvLc3FxUVFTAYrHgxz/+MXbt2oXIyEgujSOn9/Lly0hJSflGc11RUcGyQEz0VBNLZ2pBQcFV60F7VwzGeAItYpo6vYd8COJjkGUXR4DJZEJ0dDSf5WR7OZ2u0kFfX1/2M2g+VSoVli5dytlV9HdaM3KISb+JPsFY0dFvU+Y9nV06F25m3BD9lSRJSgDnASQBeA1ALYB+WZbpbi0AKIwSBaAZAGRZtkuSNAAgBEDPte5BDyI6TIR8nTp1CtOnT0dNTQ10Oh0fjG+++SYmTJjAxkpQUBAreLPZzHUIkuRK7zWZTNxLMy0tjf9G0YSuri6eVEJlk5KS3NAO0eGjCB8ZTKdOnUJGRgb27t2L0dFRdHd3IyIiAlOnTuXvf/DgQcTFxSE9PZ2JmqKiovh5yUkrLS1FZmamG+pBBwIpd/ruIqIvRlTGUnDiPIvCSpvMarViZGSED2i6r6hc6P2iwqfvQoplrCFGIiXJRedNkWSaSzrMPFNQ6WcCIYjUwNfXl5UKbc5ly5YhMTER/f39bo7+eJFTes3zeW5mUCqISqXC1q1b+XmLiorgcDgQGxvL7HaTJ0/mNVIqXX3CDh06xDWCRBQydepUSJIEq9UKvV6Ps2fP4sEHH8Qf//hHBm4oWq1QuGrNCAgQHW9aP0qHISOfHB4vLy8MDg7Cy8uLa7CpppTS4qxWK4aGXDiVl5cXNBoNFixYMG5973iK7loOqCiPYw1PY9HzftdKvxxrOBwOJjUpKyvjNECq0VUqlRytNhgMaGhowNSpU9HY2Iif/exn0Gq1uHjxIt566y1oNBr4+/tj8+bN2LhxIxYsWIBHH30U3/nOd/DZZ5/BarWiuLgYOp0OM2fOxIIFC7Bnzx74+/tj37597PArFK5aen9/f9YrFG0MCAhAd3c3+vv7ERISAll29WN+8cUXYTQa8eqrr3LarMlkglarRXh4OF577TU8/PDDWLBgARYtWoRnnnnmKgPFZDLBarW6RVKPHz+OI0eOXBXRvoKA8rw7HA6cP3+eQZGDBw/iN7/5Ddrb25k1nNoZzZs3D++99x50Oh0mTJiAwsJChIaGwmaz4eTJk9w3k4hmKOLR39/vFu2miCjpP6rjN5lM3PbLx8eHU1spxZ3mU2QEpmcbKzVKfM7xxvVkejyZ/DZTCUX9ZTQaGYAl5tqIiAg0NDTAYDAwONDa2ooNGzbgvvvuQ0REBEZHR7Fp0yb86le/wt69e/H000+jqKiICbZsNhsCAgJgtVoRHR3N/b3vuOMODA0N4dVXX8UTTzyBxx9/HL/73e/Q1tYGnU6HzZs3Q6vV4oknnsCkSZNQU1MDi8WCoKAgrF69Gq+88gpMJhPWrFmDDz/8EE6nE9u3b4dSqWRH9cKFCzh//jyWLVuGKVOm4PHHH4e3tzeefvppjoTLsszy0djYiBdeeAEAWEcSmNzX18cyQu8n/UE6JiAggOePPu/pZAUHB2NkZAR6vR6RkZH44osvMHPmTAZRrVYrEhIS2HmYOHEimpubWcYHBwchSRKioqIYQPTz8+NzMCkpiUEYAFx2lJmZiUuXLmHChAksY6TX6WxxOFwt42pqapCSkoKLFy9Cr9dzZJXsg4qKCmRkZAC4eeO0rq6O9TXttUmTJrml9h44cGBc+0Qc4u8iqO1p4IpOgXhWE7eCSFAn/i+e+SIgTxlYYqbT/4VBTp8IpouDnotKegD3CLNarcaBAwcgSRLCwsJgMBjg7+/v1oZEzAwgvUhzcuHCBYSGhvK9t2/fjqVLl6KxsREzZ87E7t272Ral1N4nn3wSjY2NOHToEC5duoSMjAzeX5R1Q5lJpHt9fHxQUlKCzMxMBl6o5tLhcLCdo1QqUVdXhzvuuIMzNDz3PfUEpnX0jNSPNcT30LkvSS7STVmWOT3d0zYU5VXU4aQ/6B+tE71ONqsYSaWsT51OB29vb2bDpqADgQzi96B1UqlUmDFjBp9pns7nWKDqt3XmXG/Q9/TUmzczbshBlWXZAWCKJElBAD4BkHZTdxljSJL0IIAH6XdSYuKGpMmmfmHh4eGsmJxOJxtphMjYbDamyQbAtXwUWaM6S8CVd0/9SiVJQnx8PPeTtNvtTPU+ODiIixcvsgANDQ1xsTQZRYcOHYLBYMAXX3yBlStXMkIvy65i/pycHNTU1HAPu+HhYXaW4+LiYDKZ0N/fz2gK4BKi0tJSyLKMlJQUaDQaWourNsfNHCwUqRgYGGDmNiI0oYNFrDcR1ov/90RcPF+j1+n7iYgpITuE/Hlem5xRur8YFXM6nXzwW61WJCUloaSkBIGBgYze+vr6MhujJElutTZ0HzF6Kv5NfJZvuqm2bt2K7u5u9PX1YdKkSW6U4TTPNpsNb7zxBqO2VAP5yCOPICoqitNtiGWXCAOmTZuGuro6bNy4Edu3b0d6ejpOnjzJqanEsCvOJ0WwCOX2ZLmk9w4PD7OyHB0dRV9fHzsH5NjSZ7y9vbnH3Vhzc62UxvHmku5zrdTGa63DtSKr4w1K//Tx8cG0adNw+vRpdlKqq6uRkJDAByDJ28DAAJP1PPLII9i5cyfi4+NRV1eH/v5+REREYMuWLQzwHDhwAMuXL4csy4iOjkZZWRlqamqQl5eHnJwcZGdn48CBAxgYGEBfXx9Wr16Nffv2obu7G1qtFgaDAX19fXjppZfQ39+PDRs2ICAggNP0QkJCUFtbi6SkJDz77LOorq7Gv/71L2i1WjZc+/r6sGXLFkRFReGee+7Bxo0boVAo8Jvf/MYt/Yf0As3Zli1bMDIygqamJqSkpKC/vx8DAwNITEyEyWRCQ0MDKisr0dDQgLlz56K6uhomkwnz58/HZ599hnvvvZfJMLRaLdLT0/Hpp5/ioYcewuuvvw673Y57770XZWVl7ASYTCYmmwLATJz0PcngCQgI4NpbtVrNa0BOR0REBJRKJSPtYo0qATSiLlIoFFyqMZ6sjCWX30QmgW/YE+4a71cqlRgYGOCUfYXCVedlsVhgtVo5C0OlUqGsrAzp6elQKBQwm8144403IEkSk5e88MILXFus0Wjw4x//GM3NzWhqaoJer4evry9efvllrF+/HhMnTgQA7N+/H1arFf/zP/+DJ598Ehs2bIDRaMRLL70Es9mMBQsWYP/+/Zg9ezYmTJgAhUKBr776CiEhIVi0aBFk2cUdUVtbi5UrV+Lhhx+G0+nE7t27UVVVxSDynj17sG/fPkREROC+++7Ds88+i9bWVmzbto1T4chIJ2eFonvDw8Pcz1E0xsV+qiQLlFY7e/ZsNwNXBHjpLKYygKCgIGRkZECSJJSVlSEqKor15s6dO5GSkoLR0VEmbvTz80NQUBD0ej0kSeLWFsQA73Q60dPT42ZkxsbGssNbV1igyEEAACAASURBVFeHyZMnc9seAhVlWUZkZCQGBwdhNBpx+vRprFq1Ch9//DH3yqW2K7Iso7S0FJMnT+ZzSjwXxjPuKVMBAF/P19cX4eHhHIkio/lmUy/PnTvH0SH6HmI6orgXRIChtbXVDWwgp4iCCOL16IxqaWlxCwJ8kzGW/XOtITrD432W9Dr9TcxAE4FnqrUVbWdyqugzPT09HBGdNWuW23tFnaLVamEymXDo0CGMjIwgMzMTsuxiXZ00aRKam5sBuGegUXS6s7MTjY2N0Ov1+Pjjj7Fo0SK2scV6b7VaDR8fH5SXl/M1qN49Ozsber3+qpaPlJVYWlqKyMhI3H777Thx4gTq6uqQnp7Oc0i1q5QtcSPrQ2zEnZ2dSE5OhsVigZeXF9asWcPlUW++6c7lcz2wkuSpvLycXxP/Jr4mptjSM/f19bkFoigTQGwtKDrJxDvhOUQb79uMjN7o8JSx8YIL1xo31UBIluV+SZKOAJgFIEiSJC/ZFUWNBtB65W2tAGIAtEiS5AUgEEDvGNd6E8CbgKsGlWqRxnJ0VCoVKioqYLPZsHTpUq4posW1WCwICQmB0Whkw1CSXGkKcXFxnAJx5V5QqVTw9/dHXV0dh94BuBnhdGCdOnWK2c0o7E9OXGBgIDOaEpsfCYOvry/i4+PZCOjq6uIU25SUFDQ2NmLOnDnw9fVFQUEBIiIi0NTUxBTwwNeKrLq6Gj4+PkhMTPyPez5Rg+CQkBAWcNFpoUilOESkf6xoJA1KSRBJEUiZejrSSuXXfRvFQT0KyZkiBkq6D/V8Sk9PR1RUFFJTU/HUU0/h7rvvxoIFC3D33XdzWrbNZkNNTQ3XhIqHwlhDVCTfdHh5eUGn0zGTKaUmvvHGG+xwUrR3woQJuO+++9yikJSeRMPpdCI0NBR33303duzYAW9vb7S0tCA3NxcOhwMtLS2oq6tjlmpvb2++LzWWJ2OMFOTQ0BBHRQlAEJFOMZJIaWQki2q1mhFZz3G9COi15pXQxuulfo01RCRYfO16yCml9xcVFUGpVDLDaXBwMOLj4zk6R9+7r68Pf/jDH7Bu3TrIsitrY+XKldi1axc6Ozvx05/+FK2trQgJCUFvby/i4uLQ3d2Njz76CKmpqZg0aRI0Gg3Ky8tx4sQJbNy4EWfPnoVCoeDsD5PJhOeffx7r169HWloaiouLkZeXh5dffhmPP/44Xn31VTz77LOcrWC32/HBBx9g0qRJiI2NRWZmJp588kn8/ve/d9tfZMA9//zzuP/++xEeHo7nn38ex48fx4EDB9xqumgen376aZ4rMeWWnDXxwN27dy8brc3NzXA6nWhqasLKlSu53YW/vz++//3vY9++fXjwwQfxwQcf4PLly7DZbNyGhjIjRESfdAXVnpLBQ7rVYrGgu7ubayn9/PwwMDAAq9WK4eFhZkMlh5bAPqPRyIZecHAwtxO7Gdkb6/0ki+Jc3cjnaJCOvxHUW6FQoKurC01NTVfV3FJGhF6vh9FoRGBgIKxWK6KiolBeXo6MjAzWA9RuSKVSYXBwEH5+fujq6oIsy/jLX/6Cy5cvY8OGDUxSqNFosGfPHrS0tGDdunW444470NPTg9bWVjz77LNQq9V49NFHsXXrVlRWVvIZXVZWhkOHDmH9+vWYNWsWlEolysrKuMXFuXPnsGnTJuh0Ovz3f/83Vq1aha6uLhw8eBD19fXsRLa1tWHr1q342c9+Br1ej40bN6K8vBzvv/8+l02IziTVZokOH6X2i+CYJLnS0QGX8Wo0GhEaGspnO9kWdA3KsrBYLJg7dy5kWcbJkydhsVg4++rChQvIz89HU1MT5s6di3/961+cnkitn9ra2tDY2AiLxYLJkydDrVZzfbZIEEXOs4+PD0ZHR1FZWYnExETYbDZMmjSJQSng65IkvV6PwsJC3H777Thz5gzXx4tAYnFxMSZPnnyVnHkCtzQuXbrkBoRQ2Q2xFYeEhODtt9/+xoYxfTdyHD3TsUUSM9JP3t7erDOIwdrTARDXkfp60zU8z4tvElUV54/uCVw9jxQh9LSnPCP1ohMnfietVutGbinaVyMjI5xdQjJDJJQi6C9m4dHrlNW1fPlyvl9zczN8fX0xOjqK/Px87Ny5E3fddRc++eQT6HQ62O12hIeH48CBA0z0k5ubC4vFwm3xKECUnp7OIKLYkSMlJcXNBqVnKigoQN6V1m+HDx/Gnj17sHr1agBAbGwsmpqaEBMTw3Pd0NCA7OxsN7mjNRefkwaVk9C5Yzab8dOf/pQDW3q93q2s70YGrRmB/kQCKGZsirJAay76IiQjpNPpHCEbi2wl2icmk+maYNL/xhDtsm9qW1/X25EkSQ/AdsU59QWwBMDvABwBsBrA+wDuAfDplY98duX301f+fli+gRkaGBjg2hjgaqdIFDKVSoUjR44wmQHVa9BmFSnGW1tbERYWhp6eHu7FNW/ePHR3d7Pw0SQSUQIdgu3t7Vz/kZCQgKGhIVgsFuh0Oo5+ybKL8p7C8LTxJkyYgMuXLyMvL48dAZvNxmm9KpUKRUVFGBoawne/+13U1dUxS6rJZGLiFHrm0dFR1NTUwNvbm1lIb3SIh4u4IQAwIY6oNKmui8aNIv1eXl7MOkj38kScxegoGQRxcXFoamq6CrUVo6mA65CaMmUKpk+fjqCgIJSWlmLZsmXw9/fHyy+/jPT0dBQUFMBkMiEuLo6NhPr6egQHB6OtrQ1arRapqakc5f62ByFwp06dwokTJxj56uvrw8qVKzkFjGSaDg1xniRJQlNTE371q1/x68HBwThz5gxmzZqFqqoqxMfH49ixY4iIiGCUkw4ymtfQ0FC3WqecnBwmQVIoFG5svGTU0ryIbGtU+3f33Xfj888/ZxTWc4iG282Ob5ryKBofY11zrKFUKtHe3o62tja+L5UG6PV69Pb2MtOsUqlERUUF0tLSEBAQAJPJhN///vcICAiAv78/BgcHsWbNGmzZsgWffPIJ1q5di+7ubgYGgoODYTKZUFJSgh/+8IcAXCzC//73vzE8PIwZM2bg1KlTvB8KCgpw++234/nnn0dFRQXi4uIQFBSEgIAA7NixAxaLBRs3boRKpcKGDRs48lheXo7S0lKUl5fj3nvvxbp16/DZZ5+huLj4qvS5HTt2ICgoCI8++ihmzJiBrKws/OEPf+BDmfbv448/zuQaFosFgYGBcDhcfaAjIyNhsVjQ0tKC2tpazJ49G3V1ddi5cycMBgO6u7vx/e9/H/Hx8XA4XL0w6TqLFy9GUVERFi1ahC+++AIRERE4ePAgent7GZEnkhMRfScjlWSUiEFkWUZLSwsbcX19ffDx8YGXl5ebcUHAF2UzkNxQuvp4kf9rvT6W3JEupZ/HGuOhyeNd03NvjIyMcGSRzglPw5Z0eXd3N9cvUe1lbGwsBgcH0dHRwcAnlRgEBATg448/xvLlyyFJEhoaGhASEoLp06ejra0NVVVVuOeeezA4OIioqCj87ne/Q2xsLO677z7s2bMHH330EbRaLV577TUolUo88sgjUCgU6OnpgSzLWLVqFU6ePInCwkKsW7cOEydOhCy7WMnT09Px1ltvsaz4+fnhvvvuw9q1a+FwOPDMM8+4sb6//vrrGB0dxUMPPYSMjAxs3rwZsizjpZde4ghfe3s7k8WQ/Ig6g5xTAkIKCwsxd+5cTJw4EYODg241e8DX7RfEdFVKRTQajZBlGbfeeisb2d7e3mhoaEBubi7rUrqGSqVCdHQ0vvvd7yInJwfTp09HeXk5Jk2a5OaYKpVK6PV6mM1mNDQ0MKgsOtmiXJHRvXTpUnzxxRcwGo3IysriOmSaW9GAvHDhArKzs93kzlOuxJ/J5goICGAAn0Cxvr4+NzDgZgbV3alUKjd7jOaMno/2PtkvVNNO0VPxbCCnljJi6N+VlhdjjrGAddGhFO2U8YCqsebOM7IkRkDHsoE8r0myKj4v8HXJENnABFb5+vrCbDZDo9Ew0RDdg+aIZNvX1xcWiwVtbW0IDw/HW2+9hTvuuANlZWXIz89HQUEBcnNz4XQ6kZiYiN7eXvT09ECpVGLRokXIyclBa2srFAoFCgsLoVarmbQxLi4OsuzqZU99hru7u6HT6eDn54fjx48jISEBBQUFuPPOO1m/09lIWQGffvoptFotGhsbGQQmVmvARUw0ZcqUq9bDM/AjduhISUmB0+nibkhISOC5uXz5Ms/7zTh6pEsIIBFBARG0ENeXfB+SEYo4E9BC80DXpAy30dFRfPrpp1fVwP5vDxEQEIHvmxk3wuKbBRcJkhKutjQfyrK8RZKkBLicUx2AIgBrZVkekSTJB8A/AEwF0Afgh7Is113nHnJKSgoUCgWio6P54WiQYhoZGUFOTg4MBgM+++wzriMixMzpdLKTWF1dzUpt+vTpbqQw5KA2NzfDarUiJCQEOp0OdXV1jLwlJyejqKiIhSIwMBCRkZEoKytDXFwchoeHkZycjPr6ej4siT2PDIe0tDT09/cjLS0Nvb29kGVXbQD1PyUh7u3t5QOJHBxKjdPpdIyGib36xmIto7nwRIvodVJE5eXlnH5DSMxvf/tbqFQqGAwGvPbaaxxxuJlx4cIFN8cWgJuzSv+T0Tk6OoqIiAjY7XbU19czIhgYGAh/f3+OqFMUMTU1FQAQFRWFuLg4zJ49G/v374dCoUB2djb3oD169CiCg4MxdepUDA0N4bnnnkNAQAB++MMfoqCgAI899hgeeuihq5qx3+jwROfoUN60aRPWr1/PxeleXl547LHH3OpmPI1dkt2DBw8iPDwcmzdvZoPJYDAgMDCQv5tCocCmTZugVqvxl7/8Benp6Th48CDOnDmDuro6Ro8pjTghIYH7/SUkJLBTS4ZRVlYW+vr6OA2aDnlSdOQoyLLMQEZgYCAWL1485rx8E+ZRwD3N5VpjrM8TSne991MaUltbm5txKtY9A+Ao8cWLF7m2xm63M/sxOU1kBPX29kKpVCI4OBgbN25EeHg4Lly4gKeeegqjo6OIiYlhHbJx40aMjIwwgU1FRQXuu+8+2Gw27N+/H+fOncPg4CD8/f0RHR2Nu+66C3a7nYnTJElCfX09iouLsW7dOqjValy+fBmffPIJPycZY+vWreN53bx5M+tHcY69vb3xgx/8gFmYqdelzWbDqVOn8MADDwAAy7KI4AJfp0iTvAYHB2PBggWYNWsW6yqVSoXPPvsMR44cgc1mwx//+Ec4nU4myNixYwcaGhpQUFDADijJMT2P5/p6GnN0L0Lm6blJz9BBToZtUFAQurq6uIbJ398ft91221XyQ9ccT27HM0xF2buWXhGvK0YPxvqMCCKVlZVdZTgTqk5tH6i+X6l0kSINDAxg9erVcDpdLWOKi4v5nu3t7YiPj+fnpQgM1Sw7HF+3ESkqKsLRo0cRHR2NgoIC5gCgdVu+fDl2797Nzsry5cuxf/9+hIeH4/7774fNZkNZWRmUSiWOHTuGwsJCPPzww5g8eTIzAG/btg2BgYFITk5GREQEjEYjfvGLX3DdaEdHB44dO4aKigo3h8Fms+Hee+9FXFwcg3CLFy/m9EQx/ZecKrEOm+ae5oF6ck+ZMsUtc0k8Z2kMDg6y7REaGsops2+++SZWrlyJ6upqzJs3Dx9++CHi4+PR2NiIsLAwWK1WGI1G3H777Vi0aBF0Oh02btyIyspKPiNl2ZWRlZKSguLiYmYbJVb+GTNmQKfT4dy5c/jRj37E0ZuSkhLMmjULgCsFe2BgADExMSgrK8PkyZO5hyTZCzQP1LJovCFJEstPfX09UlNTMTQ0hK1bt8LpdPUi7+np4awKMVLkKe+eQ6lU4quvvmIGe+Brm0GMYJNMki6mntbEOq/RaKDT6QCAyZPIaVWpVIiKisIvf/lLfPTRR8yG7DnG0j3j6QPxOW/WlhCjS+KcXLx40S2D7N///jeCg4M5Q2BgYIA/R3JLc0iD7DGFQsG2BPV49tQ5tEYEbuTm5mJoaIhbzVC/0pycHEiShCNHjnCfdwLah4eHkZSUhKysLLz77ruQJImzyeLj49HS0sJyYLVa0dvbiylTpqC0tBQqlQp+fn5MruTj44M5c+a4lYKdOHGCiZkWLlyITz/9FAEBAdDr9W5Ovd1uvwps8Rx1dXUYGhpCf38/DAYDhoaGkJaWhrVr1wJwtTL685//zOfSjTpYsixzj2eS47FkGPi69pW6WgwODjKgBrjSrikDiLK8yEchZ/WVV15BUVERSkpKbjriL46bdcJvZFCGGPlwAKiG+IZYfK8Lb8myXCLL8lRZlrNkWZ4ky/KWK6/XybI8XZblJFmW18iyPHLldeuV35Ou/P2azikN6g0mpj2QcSymeZSUlOD8+fPsyBFCRIpK/Cz9T5FLup7FYkF7ezun6Wq1WigUCkYUtVotnE4nHn/8cXzve9/Dgw8+iKysLGRnZ+Ohhx5CcHAwt55Zs2YNU5hTo29yWKqqqpCdnY2hoSFG/cl4kiQJISEh3C+qq6sL3d3dyM7ORnd3N5xOJ9fM0AITigK4nMGWlha3gxX42iAS0SJ6ndBcEnRKEyJDjeYwLi7uhgVdVIbxV9Iixd6jpGCvyMaYCtlTsRKST1GttLQ0JCUl8XVHR0eZHZEE/+jRo+jp6eEeiRaLhRVQTk4Ojh07hu3bt2Pnzp1YsGAB2tvbeY1EpXmtISJflHrhuaHNZjOSkpKwfv16PProo7wG4qFFhjRFKf/xj39g27ZteP755xEVFYWkpCQkJSXB39//qvsfPHgQsiwjIyODMwASEhK4zo5ADll21T9TjYrBYGCFRs6OQuFqCE5RJj8/P5ZjtVrNbL4qlQoBAQHw9fXFLbfcMubciApovL+PN+hAud4Q51p0+se7Hx2cfX19KCws5DWnvUT3FB0NhUKB2tpahISEIDAwkOVTq9VieHiYUWoi8SFDnOSe0q+pB1xTUxM7+Nu2bYOPjw+mT5+O7u5udHR04Pnnn4csu3qoUi2er68v2tvb8f7778PLywtTp05lGZowYQLy8/Pxhz/8AZLkIl5ZtWoVzw8Zmy+//DJHIn7729+6kZ4B4BTuDz74ABUVFVCpVFi+fDmeffZZBoLo83q9HlOnTsUPfvADaDQaZGZmYs2aNYiIiMDChQvx4IMPYv369Xj66aeRk5PDvZC//PJLbNy4EWfOnEFQUBDuueceNrhjYmLQ2tqK+++/HxkZGcjPz+e18Pb25v5wGo2GGb1pkPzSmpH+ValU7DDROlFqGe05isxoNBoEBATA29sbS5cuvemMgGs5pyRHN+qc0tqNFz0lg7GkpATl5eXjRmvI2BkYGOCIXn19PZRKJX70ox/xtYnkin6PjIzkFDfK+BgZGeH0bzHCM3XqVM54UavVsFgsHGUEgF27dmHGjBno7e1FcnIy7HY7brvtNjQ2NuK9996DWq1mApLFixejs7MTf/rTn7B9+3b4+Pjgtttuw549ezBlyhTExcWhoaEBdrsdr776KrZv3w5JcrV/W7x4MXJzc3kPU13me++9h+rqarf93d/fz3aFQqFgJm0anmUFFKUivVhYWOg21546x+FwcJRoZGQE2dnZ7AhlZGSgrq6Onb5Zs2ZxrSjgSs1bu3YtCgsLMTg4iLa2NjidTrS2tjJ5j7e3N2JjYzE6Ogo/Pz+uQad9ZDKZcPHiRQbxZVlGWVkZkyhJkqu+NTY2FkqlEg8++CAGBgag0WhYP9F8jFV24zna2tr4/fFXMiTIsSBdSZEnUc7EMd7+oFIp0Q4cz/kSI9qibUH6XQSExSiW0+lEVlYWZFlmoqqx1nc8oGisITr5NzPGez8BPmKUEwDbqERYKNp/JLfiEHtmm0wmN7IiuqaYcipGdQ8dOoT6+nqMjIwgOTkZp0+fRk5ODmRZxokTJwC47B2z2QwvLy9kZmZixYoVGB0dxerVq7F9+3YoFAps3LiR685TUlLg5eXFjpbZbIa3tzd6e3uZpFR0vsUzS5ZlJm4iZmBJcrWFEjPwaB6sVus1591sNkOWZeaisdvtyM3NZdlRq9Vu5EQ3mg2gUCiuurd4XnhmNNFnKNhB603PJ8qjWF7j7e2NW2+9lW0Wz2vf7LgZ59TTzxhvEOmUCIDdzFBu2rTppj7w/8XYvHnzpoSEBFitVk6XFdFrejDatESAQ7VxYvoGoTJkpJPRMjo6ysxoUVFR6Orq4klLTU1FVVUVR/AmT54MPz8/RhoBV6E5pfG2tbXB398f/f39CA0NhV6vx6VLl5CcnIywsDA899xzGBkZYQIEImagtjh+fn588Pj7+zOL5ejoKIqLi7F8+XLodDoYjUYmujCbzW6tEsho6erqYtbPG1l8u93OioA28ejoKFJTU5nooKioyM3JFIeYogt8HRWUZZnnXnxNbI4tKhBSikSLTvVgFI2hfk+RkZGIi4uD2WxmWaBaQR8fHzQ0NKCiogKhoaHQarWIjo5GXV0dWlpaoFarkZiYiNzcXDz++OOMlmVkZOC9997D6dOnUVFRgfj4eF4fMTJBm53uS0CBuNFEZPjo0aPYt28f17KJcktpDiqVChaLBevWrcPOnTuZ4ZXIASgrAIBb1IoU8YMPPojLly8jJycHn3/+OUpKShAcHAyz2Yze3l5unUHRK3KE4+LiUFxczKRJSUlJHE2hyD6hoA6HgxnlaK9JkoS8vDxW2OKgubkWenw92byeA0v/i9cRARvPa42OjuLSpUvo6enhvmy0XnQtcjQVClcNOaHvlLrb3t6OW2+9lbMkKGIqy642KVQXqdFoYDabsWjRIgCu9LZVq1bBarWis7MTPT090Ov1mDFjBs6dO4dJkyYhOTkZ1dXVMBqNOHnyJDo7O7Fs2TLccsst2LVrF+68806EhYXhn//8J2bPng2DwQBfX19uLZScnIwdO3bAYDAgLS0NM2fORFJSEkpLS/lQP378OE6ePIm8vDzMmDED2dnZOHPmDK8XOfA1NTU4fvw45s+fD4fDgcmTJ+Ptt99mB4B63pWVlXE7C6orvPPOO6HX67ksoKmpCW+99RbOnTuHqqoqJCcnY/369Zg5cyYiIiIgSRLrs9jYWHR1dbFebmtrY5CJ1oIOe19fXz4XSGdQ+hjJpGickY6idQPA/w8NDfHZ4uvri8TExDHl6HqyPN7fSfddy/ikv9N7PJ1uGk6nE5cuXWIjaqyhUCjQ1tbGkW4/Pz/Wo7fddhvCw8Px7rvvoqamBp2dnfD29mZ2+ebmZjcdRZkSpKdaWlpw8uRJzJs3j++fmpqKgIAA1rHDw8Mwm80wGo1sVEVHR2NkZASHDx9Geno6MjIyYLPZ8Pbbb8PhcLV4Igfu8OHDqKiowJEjR1BTU4Pp06cjPz8fHR0dyM3NxfTp05GRkYH09HTs3LkTmZmZ8PPzQ0pKCrKzsxEaGoqamhrWseXl5Th27Bh0Oh0OHDjAmSCioU/PR9EMmjsxmkR6lzJKqI+q+B7xd6oZjYuLgyRJ2LZtG2bPno22tjZMmjQJO3bswLx585gvw263Y3h4GOXl5cjPz0dqair+9Kc/4Yc//CGOHj0KnU7HMmswGNDa2sqyT+nE6enpKC4u5rKCmpoaREVFobe3F6mpqbxmYWFh6OjowODgIHp7e5GXl4ezZ88iODjYLVpJ5Q/UZ3KsQbXANpsNGo2GjfH58+dDlmVotVp8+eWXbo68aMvRGMvIpb6JIiGMGJ2k/SOevwaDARaLBSaTiZ06srVorugeSqWL8XtoaAhz5sxBXV3dVVlfN2tE/6efG+9aPT09biBqTU0Nl5mIHCwEKtFziv/IzhPleGBgABEREW7fmXQS/RscHOQAxtKlS9HZ2Ync3FwmZKyqqkJubi50Oh23jhoeHkZ/fz+WLFmC+Ph4REZG4tNPP8WsWbNQXFyMVatWIS4uDhs2bMCHH36IgIAAUOZkb28vQkNDWRajo6PhdDoRExPjJifUwkytVqOwsBDf//73UVhYyKSAYtCgr69vXDkmAjGKLDscDmg0GqxatQqSJHF2nSc4JcqjqAfEtXc6nWhra+PvLII/ok1I8077RKPRoKOjww1I9fX1ZUCN5p72hq+vL+Li4pCYmMiRSjE6K46xwE/PM0+0fcVn88zWoXnwPK/Gel9LSws/J33uSlp9+6ZNm9zZp8YY/2ccVJ1OxxuDoqJkRIlpCHSYUMoG8PVkEWNcZ2cnAHAdqlarZQWWmpqKwcFB7hnkcDgQHBzMPVKVSiUiIyOhUqlgNpsZhWxtbUX8lQjhyZMnOYWkoqICKSkpKCgoQExMDE6cOIHS0lJO58vKysLhw4e5/oacETFiSelJDoeLgvvChQuIiorC0NAQoqOjUVFRgeDgYLS2tl5FbkDXbG9v51S9aw2VSoWOjg7+neZ18eLFUKlU0Gq1OHPmDM+rZ4STnE9RQGmzkiFLhz19P0+DjAwJSjVVKpXMbEj3ofX09vZGYGAgR6DJqKZ2M0uWLEFbWxunskqSK70pJycHbW1t0Gg02LlzJ9fh+Pv7o7a2FmvWrEFPTw/MZjOqq6uRlZUFtVqN5uZmBAYGXtWuhe5NSkCMUNP3Pnr0KJ544gn+HL2vsbERjz32GD755BN8/PHHOHLkCIKCgqDT6dzYFOk+9PwAkJaWhg0bNmDevHnccN5ms3Gvu9LSUpjNZuj1ena0CMigfUIGfnt7OyTJ1Xja29sb1dXVjNIT9bnVauW+k729vfycFDkbb1wrNfFazqf4rNcaojL0lEtRzkpLS9Hd3Y2enp6r/iYaOiaTCV1dXfDx8WF5kiQJ7e3tWLVqFZKTk5GdnY29e/eiq6uLD0Cx1x4hrMQkq1AouOG42Wxm8hOFQoE5c+bg+PHjyM7OxsmTJ3Hp0iXcddddOHToEBoaGmAymVBQUICFCxciPz8fCoWLzCohIQGVlZXQ6/UICgriWmq1Wo309HTs27cPRUVFmDNnDnx8fLhGYcFTDwAAIABJREFUmQA6ADh58iTq6uowd+5czJ49G7W1tczWS3vRbrejoKAAJ06cQG5uLt59911kZmYyo67T6SLGWb58OX7yk59g7ty5mD9/Pnx8fHDmzBm88847OHfuHC5dugS73Y5169bhlltu4bYToiFEBGjUUomeTaFQcBqZxWJxMy6JJK69vZ2/z5QpUxAYGIiOjg4GtWgd6B4URfXy8mLWcjonCNEfi/CL5mY8x/VaziK951qyPFZWBR3wXl5eqK6uRnNzM7cc8Ly+UqmE0WhkIiMCVSRJwq233oqIiAicPXsW/f39aG9vZwNWr9dj3rx5iI6Ohre3N1JSUhAcHMyOqggEk04YHR3FqVOnUFhYiPnz58PpdPXl02q1fK4RfwI5FwsXLkR9fT0mT56MS5cuoaKiAjk5OcjMzERTUxMOHz6MvLw8GAwG/OAHP8DOnTthNptRX1+Ps2fPYurUqcjIyEBkZCSfjbIsIzk5GQcPHsTevXuRm5vLznZ6ejoSExNx8eJFNvoqKyvR2NjIKD7tf8q4omg+yZjYQ1rU8T4+PhgYGODzlXSJZ+SO0sv7+/vh7e2N/v5+DA8PIy0tDdXV1Uy+NDQ0hLa2NrS1tcHHx4cZsUtKSrB48WKMjIygs7MTRqMR06ZNQ3R0NMxmM2JiYtDX18ctVaKiouDn54fLly/D398ffn5+vK9SUlI42kZy09jYiMbGRnbk8vLyeL4o64De29HRMa49QaBlY2MjQkNDMTw8jNjYWCankSQX46pYDjCWY0pzRz8rlUqO+IuAp5iGLX6G5I2y76inMzmm1ENVdFTJiZk/fz7i4+Nx4cIFt0jif+KcXgvQutEzTny/2WxmRmcAqKmp4RpR0iH0TGJmlPhPtCskyQXum81mBlHGczz8/PzQ3d2N0NBQhISEYN++fZg6dSoA4NChQ1AoFJg4cSJ0Oh0GBwfR19eHsLAwyLKMyspKGAwGLu+IjIxEUFAQpk6dyh0pPv/8c25bM3fuXFy+fBnDw8PsbAcEBDCxm0ajgVKpRENDA4xGI/r6+hAUFITAwEDodDruwW00Gq8q26LuH+Lw8vJCWVkZJMnF1h8ZGQmr1YrIyEhMmTIFsuwqAXjjjTduSP97zvfw8DA6Ozvdzg9Phxb4OnWd9khAQAAHeMhh9vLyYj4Fsbaa7OMf/ehH8Pf3R0lJyVVrLX4nz+fwdD49PzOeTIp2l6c9NlYEtr29neeB7vX/SwdVrVZDp9NBoVBw+J6cVJpIm83GUR4RRSADUZIkTnGiSc/JyUFnZyf6+vogyzIyMzNRUVHB6PqECROY7ZAQVJ1Oh+zsbJw9exapqamwWq1obW1FREQEG0AUpSCH9vTp04iOjobRaMTPf/5zPPfcc0yAYLVamWkMcDU+J8IeWuzh4WHMmjUL4eHh6Orq4rrAuLg4ZGVludVndXR0cLRPPAA6OzuhVCq5yfNYgxBS0TFWKBRYtmwZk5kQKybw9cEhHnTiEIVZq9UyWQk9l3iAi0MUaD8/P45Ii8YRRWWoP6RYIE5Cf/nyZeTm5qKmpgZDQ0Oora3FxIkTMTAwgLNnz0KSXGlflDJltVphs9lQVVWF/Px8xMTE4OLFiygvL0dJSQlWrFjhNi+eczfWHHg6qL6+vvj73/+OV155Bbt378axY8cQEBAAg8HAqbKehwMhXJMnT0ZOTg4eeughLFiwAHq9HiMjIygqKoKPjw90Oh3UajV27tyJTz75BN3d3WyQG41G+Pr6MvgiyzIf4rQn6DWRFZUat1utVu5BZjKZ2MH19fXlVinjydRYg+RzvMP5ZhipPeec9qrNZkNlZSW6urq4cTiAqxQzGfTt7e3skGo0GnR3d8PLywtLly5FfX09tFotQkNDsW/fPpSWljIZEgCO1IlGLDlBdrsdjY2NOHr0KBYtWgSn0wl/f39ERkaipqYG9fX1GBgYQG1tLfr7+9Hb24uqqio8/PDDKCoqgiRJ0Ov1+Oyzz1BZWYmFCxdCo9Ggr68Pvr6+6Orqwo4dOzBjxgzo9XqYTCbIsoywsDD8/e9/54hmZmYmsrKyOAJM8zA0NIRjx45xv8mFCxciODgYFRUVAL5mM3Q4HDh37hzq6uowbdo0TJw4EQqFAr/4xS+wZMkSTq0fGBjArl27cPDgQVRXV8PPzw+PP/448vLyMGfOHLdUJJoztVqNZ599Fn/+85+xe/duJCQkICsri8G1oqIiDA8PQ61Ww2w2u4E8FNkj49rhcCApKYlTG51OJyZNmoTW1lbmIqB2BQSY6fV6dqioxn327NljGmnjOZrXS6EaD6i5njwTWNrU1ITW1lYuVRC/A61lV1cXs6YSkHrrrbciLS0NISEheP/999He3s5OIwF9CxYsQFxcnJshK8sukpm0tDRUVlayDNB9CKyz2+0YGRnBV199BbPZjLS0NOj1emRlZSE/Px9HjhxhQ9JoNOLixYuYPn06t1bKz89HSUkJLl26xHrtL3/5C5qampCTk4O7774bdrsdJSUlMJvNKCgo4AwmqsESAd5Zs2ahtLQUf/vb3zBhwgRERETAYDAgLCyMa1PtdjszkJLuI7KuwcFBt7UmYEA86+gsohRWAjJEI05cb3IcLRYLzp07h9jYWBgMBiQlJaGyshLx8fGw2WwoKiqCSqXCwMAAbDYb/Pz8uPf0rbfeitTUVEydOhX79+9HRkYGOjs78fDDD+P+++9HfX09mpubccsttzAQW1ZWhuDgYDgcDj4rKysrkZqaynJWWVmJ/v5+qNVqBAcHM6uywWBAf38/urq6uG0fzUlzczNiY2Pd9nFZWRlk2ZViGRAQwHP25JNPQpZlhIeH4+233wbgnp451hjLKG5paXEDADxlnwYZ8YALPKXUaDq3CfgVo4diZDEtLQ3x8fG4ePGi2/f5T8d4hv6NfMZzUPsimg9K4xSfg9Jl6fnGck4Bl2x+73vfQ3JyMpqamrj++FqDbL/i4mKsXbsWTqcTjf8vdd8Z3dZ1pftdACQBAqwAewO72FUoFlGyRBWq2FLk3uLEk4mdeGaSOB6nzEtiz8SZFKc6k+ZUt8S2HFu2itVlSrZEkyLF3jsBVoAEQZAEQaK8H5i9dQBRsvPmvbVezlpaYgHAe+85Z5+9v/3tbw8Pw2KxYO/evbxnTp06xVoLnZ2dvObuvPNOqFQqVu3+2te+huzsbLz55puYmpqC0WiETqfD4uIiwsLCMDQ0hJCQEG5j5nJ5uxSQiFpvby96enqg0Wi4LG9wcBC5ublobW2FTqe7zq8GvNTm6Oho9om6urrYLyIWwPLyso9qfUxMDBoaGj6W6JB/Zv/q1auQJIlbwYm/E99Dc0RlNJLkbYsosu3UarXPOqZnSz4bKRqLQIv/3/MPMsX15v/9atngGwWs4vD/HLKp5HOLz+nvMkAtKiriFiOEcop0R/EBkNNCQj+icqlSqUR4eDhn8khFk5RzExMTOUCTy73qewsLC6y0S9kjo9GIvLw8hIeHo7OzkxFLmUzGtX2FhYW4cOECcnNzkZubi8nJSaSmpnLzcKL/9vb2clbX4XAgNjaW0SwxyxUXF8dZmIWFBSwvL2N0dBTd3d3Izc2FXq9Hf38/t7ehIJWGJEmszEhUYv8FRfQDMi6UxUhLS+Oau97eXp8CfKLafdSQJAlGo9HHuRKzravx0B0OB9+HxWJhug31y6K2LeKBRRnX4OBgaDQaFBUV4erVq1haWkJoaCj3wXI6nWhvb0dPTw+vB5pnj8eDpqYmREVFYXR0FOHh4XC5XBgdHcX999+PhYUF1NbWstDCjQ4bsdaguroavb29ePPNNzEyMoKYmBhWYPVvtEzPxuPxIDs7G9/4xjdQVVWF7OxsZGVlob6+nuscKTMkSRJng9asWYMTJ07wz6anp6H/byEDSbpWy7CwsOCDutE1LC4uYmlpiY0eKWGrVCq43W6f2meNRoPk5ORVn4FYC3qz33/cn682xL0vl8sxPT2NsbExDA0NwWq1+tQi0etpvdjtdszMzLBDHxoaiq6uLuzduxdZWVnIyspCdnY2hoaG0NnZifDwcHZ2COzZsmULUlJSkJaWhoyMDB/lR7vdzrbK5XLBbrdzplSpVCI0NBTr169HV1cX9+gltsDg4CBqa2uxd+9eVuc8ePAgdDodXnvtNdxyyy3Q6XQwm828fv7whz+gv78fO3fuRGxsLI4fP47u7m6YTCY0Nzejq6sLlZWV0Ov12L17N2pra3mNkgJvS0sL18lu2rQJNTU1fOAB4FYx586dQ05ODsrLyxEcHIzu7m709vbitddeQ0tLCyYmJrBu3To89thjLJwh7nES7rnvvvtw6NAhHDlyBCqVChEREYiMjGThmOTkZLz11lvIz8/H/Pw8wsLCMDU1haWlJdjtdq6JJPaATCZDaWkp5ufnMTU1BZlMhsLCQszNzXHGUQQnVlZW2LZRvRRRNpOTk1ddczcKNG/mdH7c4NQfgZbJZGhvb2fUndQfKXOkUCgwPj7O9YW01nbt2oWMjAzI5XK0tbWhtrYWbW1t8Hi8tVrp6ekoLi7mdXvx4kWMjY0hNTX1OgdGkiRWraWsjVKpZIc0JiYGABisPX/+PKqqqjigzc7ORnV1Nb9PkrwiYF1dXaiqqkJgYCCuXr2Kxx9/HFNTU1Cr1VizZg1kMhleeukl6HQ6bN++HZ/85Ce51GV8fBx1dXXo7OzE1q1bERwcDK1WC4PBgIWFBS7feO655xAQEMAtLioqKlBYWIja2loYDAYObimrTu0fxMBFFB+hrJToQCqVShgMBuj1eh/nUnROAXAGMzMzEzMzMygvL8dvfvMb7N27FwqFAs3NzVhcXERlZSWDpi6XC4mJiXA6nUhNTUV8fDySkpLwyiuvIC8vDwkJCVizZg2cTif279+Pw4cPo6CgAP39/cjOzsbw8DCr+RJILpfLMTk5Cb3e2+quq6sLw8PD0Gg0PD9Xr17Fli1b0Nrairi4OBiNRg7gCWQmYJ6CoeHhYRaqSkpKYkGzrVu3QpK87ZrEftI3AjVXG5R5ojZp9F5xndJ6I5CQ/BwqvXC73XxeUuZcdO7Jt7zvvvsQEBDAveb/X4ybOfGr/c5/KBQKjI6O8u/7+/t9MsFiRk0MwGmIa7moqAhqtRoymbffc0REBDMIbmTn5HKv6ndGRgZ0Oh2mp6fR1dWFyMhItgckFjo6Oor6+nquky4tLcWGDRvwi1/8AuXl5bh69Sry8/NhsVjw17/+FbfccgvGxsZQUlKC48ePo6qqChUVFSxcRoEo2T6lUsmAKwFhNLfUYovWqtjDnfxA6hUqSRKvYYPBgPT0dI4nSAMiJiYGzc3NPuUPHzXP4j+qhxWBQDFTSeuUrg0A9/WmDKokSXz/FAeJ61mhUCA2NhbFxcWw2WwMNN9sPa123R/1s78VtPEPZt1u93UsoL/LAJXquIKCgqDVamG1WnkyxHpUyvrI5V6ZbepRSlLalH2LjY3FzMwMwsLCWGyIFBtnZ2fZSUhNTcXw8DAvIKqFVSiu9ZGkJr4qlQpjY2MICwvD/Pw8kpOTsbS0xPV9kuRVt5PJZIxuU7sJtVoNp9OJjIwMBAYGYmZmBiEhIWyI3W43kpOTGb0ODw/H0NAQwsLCWPCHBG+Kior4minwBnwXx/z8PCYmJqBWqzljR4cMOeBkzNxuN/bv388OeXNz83W9M/3HagtXJpNhZGTEh+IrbnCREkV/lyg6kiRxHSodPKQynJiYyEZE3OAej1dQZmlpCd3d3Vyn1traCovFAqVSCYvFgsbGRs7Kiw3aSWVtcXERTqcTjzzyCB566CH8+Mc/RnJyMtOAHA4H17z6D1Hcp7q6Gtu2bWOET3REyeDTc7v11luxc+dO3HnnnSguLkZ9fT2mp6fx1ltv4Sc/+QnGx8c5i5Ceno4XX3wRH374ISoqKhAcHAy73Q6z2Qyj0Qiz2cy9X0NDQ7kFElGByKGl50boM1GfKBvo8XipeuQIE72ksrLyumznatmd1dbDamO1A9r/9+JcS5LEwcjg4KCP6Bi9VkSPyYmm9U4BydLSEiorK7k+/NixY2hpacHAwADTwYFrcv0lJSXIycnxOSQAIDMzE319fewUuFwuzM3NMdrpcrnQ0NCAwsJCrudbt24dH7JKpRI2mw0KhbehPYFKGRkZeO+997Bhwwakp6fj97//PYqLixEfHw+dTof5+XnU1tbCaDSip6cHhYWFKCsrg0KhQE9PD6qqqtip7+npQX5+PioqKrB27VpWDqYs1KVLl5CVlcWqzIODg9xUXZIkGAwGfPnLX0ZgYCDeeecdHDlyBD09Pdzi4oknnkBlZSUyMzN96l6olcszzzyDn/3sZ3jzzTeRlJQEnU7HKqzi/F66dAm33noriouL0d/fj4sXLyI0NBSzs7Pc15qEW8T9ExERgdbWVt5/Op0Ovb29PkCj2IZGo9EwKwXwZl1upEZ9I/DkZmv9Zut9tc+Ry+UYGRnBxMQEA4b0OwqolEolA34hISHcFmHnzp1IT09HZ2cn2traMDw8zGfN5OQkKioqUFpairCwMJ9AgVpu9fT0cOmEWMfW39+P4eFhDo4p0xwaGorJyUmEh4ezMCHVlxJlz+124/333/exHaRx0NXVBbvdjt27d6Ovrw91dXW4dOkSTCYTNm7ciIKCArS2tuLQoUNQKpXYt28ftmzZwq3hRkZG0NPTg8bGRpSUlCA+Pp6pwATWUGnKsWPH2EZu3LgRr7/+OoKDg6FUKlkwi0S8xFKS1eyISJsksHtxcZFrUW80HA4HZmZmoNPpEBERgYGBAaxbtw4KhYJLgihTNDQ0BLvdDo1Gg6mpKVitViQmJiIhIQHHjx+HzWbDE088ga985SvYsmULDh48iDvuuAMffPABtm3bhqGhIWzduhWXL19GUlISlEoll2a4XC6kpqZCLpejp6eH7R6tM41Gg5GREWzatAm1tbU+PhOtZ5nMK6SXkJAAt9vNGT0KUGw2Gz73uc8hIiICkiQhPDwcDQ0N/Cz+luCP6MbiHPgP2u90ZpE4k8VigcvlYgVfURAQuKZXQDZ39+7dkMvl/6MA9WYO/scNzm/2+vn5eQ68xQBVvBf6+kZlV5RNPnDgAPLz8zE3N8egLiUDRBvhH2AQG0uv1+PSpUtwuVy45ZZb2Dd/8cUXYbPZIJN5Oyn09/dDkryg+Ycffsi6GNHR0fB4PHj++edRWlqKoaEhpKWlobe3FykpKSx+NDQ0hKmpKZ9yAQIW+/r6EBERwYGaCERHRESgqqoKdXV1CA0N5V7OgNfPNBqNSEpKwtjYGIsj0TXNzc3h1ltv5XpXnU6H119/ne35/wnIsrS0dN05IgpekS9MZxuBLHRtdIaJtcX+a/nLX/4yZDIZTpw44SN69T8Zf2tA+lGDnrd4Nv5dBqgkFW2z2WC32+FwOJh2AlyTy6bDD/CiDsTVJucaAEJCQrC4uIjExESYTCbMzs5iZWUFJSUlXNBNE060MTKKRHvp6enBli1bAHgfcnp6OjweD06cOMH1o8nJyejr68Ply5cREhICk8nkUy+TkpLCNB4AjMRSI3mdTofh4WGkpqZidnaWM7SUpQW87WTI4FKNhdPpRGlpKerr6xkNHR0dvU5qm6h4o6OjPvUkExMTTIMjQxAREYG8vDzI5XIkJSVdh8iIw3+TAdecs+HhYe5L5o+A0jOga6ODnzK9hLTQPYgZVDKigYGBcDgcnFUBvIFYWVkZent7GcCgHrZLS0vo7OzkbCHNBSHqdrudn0FcXBzCw8PxyiuvoKysjK97YmKCayocDofP/fgHqGKzc5GKI0kSvvSlL+HOO+/Eli1bsGbNGly6dIn7WTY0NDAl7fbbb2dkvbCwEGfOnMGuXbtw7733oqWlBXa7HcnJyTh+/DisVisLxthsNlabJQednpckXcum0jOmOh0K0CMiIhAUFISZmRls374dAwMDiI6ORkpKynVr4KNU2T7KWReNv/9raV25XC6YTCaMjIzAZrP5KGFThpP28ezsLIsnkE0YGhrCvn37kJqaivT0dERHR+Ptt99m9UCXy4XZ2VlotVqkpaWhoqICqampSEtLg0ajQWNjI2JjY7mmVxyZmZno6enxcWZHRkag1WqZmdDU1IS6ujqUl5dDJpNBp9NBr9fDYDAwwCZJEkwmE/bv34+JiQkUFRXhnXfegV6vR0FBAd544w3U1NSgoqICMTEx2L59O1577TUYDAYMDg4iNTUVZWVlyM/Px+XLl/G5z30Oubm5SE9Px3PPPYeSkhKo1WqUlpYiJiYGra2tTJm9cuUKLl26hK1btyI3NxdVVVVobGxkurLb7UZdXR2MRiOio6Px5S9/GaWlpdi8eTOjo8A18YdvfetbeOutt/DnP/+Z65yothSAzwG7srKCoqIiPPbYY/jDH/6AwsJCvPnmmzCbzTCZTND/d6aKamWJMUP/E9IOgGvnZ2dn+e9IksQHI9Wo0fpRq9VYt27ddSrZdG03Gjdb7zejq4sBOdnq4eFhrnmUya7VuwYFBcFisTCDJTg4mGuRJUlCRUUF6uvrcfHiRV531OPU5XIhOjoac3NzSE9P91ExBcBnUnR0NBoaGtDb28ug6sDAAAfwBGKJwhoE9i0uLnKto8PhQGtrK9577z2kpKSgsbGR30+tUKjG22QyobOzE9HR0SguLkZnZydMJhPq6+sRGBiI3NxcrF+/HseOHcP58+dx4MABlJaWcku3lpYWrKysoLa2Fh0dHdi/fz82btyI+vp6/OlPf2KBkQ8++ABDQ0OQJK/44YsvvsjXIZPJfARm6J9/toPWATns9Hun0wmbzeYj3LLaPJNYo1wuR319PT71qU/B7Xbj9OnTcLlcKCkp4QCZ9CUcDgcLpCgUCmRkZKCmpgYqlQqdnZ0oLS2FwWBAZ2cnEhMTORN+9uxZ5ObmIjMzE1arFQaDgTOQAQEBzB6ampritkMi7VAmk2FxcRHbt29HQ0ODj2gS+TFyuRwzMzMwGAwcDFEAHBwcjN27dzOz59y5c1x+ID4XEQgRn6u4xoaGhq4Dsf19DTp3CHiKjIyEJEm8dslWkO9AAIR4DstkMlRVVbFvsBqdeLW59f/6ZoyJjwPCil+v5itR1p5+39/fz9lDMTilzL9/zaMkeTPa8fHx2LhxI+ut6PV6vPTSS7DZbEwtXe16ZTJvZ4ulpSXup15WVsb+XV1dHW655RaMj4+jp6cHwcHBWL9+PUpKSqBSqVh4VKvVIjQ0FPPz85ibm0NCQgLsdjvy8/Nx6tQp7Nq1CwC4vdD09DTXfRPQT+uHtEeGhoa4JNBms2FwcBB6vR56vZ6zkORvkx/U3d3t4w8RcK1SqXy6cZw9exbz8/O8TkWxr48aNwJZ/OdbDCjJPlFXDqfTCZVKxYklAvtov8hkMk7GpKSkcM36/41xo2te7fuP+hwAnLASwY+/JUD927so/z8aarWaaYWLi4uw2WyIiIhgQ0QN1mmIgYaYbaH6y/DwcDgcDqbOiIeMf2aQHhz9rby8PJSUlKy6aWnB5uXlwWQyISgoCA6HgxEQAKwuSfTdubk5BAYGQqvVIiYmhpUr6+vrERISwguT+nGJmTCZTIbY2FgWG/F4vD2WXC4X7r33XuzYsQN2ux1hYWHo7e3lZ0E0GKLq1NbWYnZ2ltFjCgDIsTl37hwef/xxnwJv0bHyR5fFZ+cfbIjNyAH49JmkIQZx4iElGkuxr6eInJKTGhgYyK+Ji4vD1NQUHA4H5ufnMTw8zIIq9H76LOCazDoFvSqVCn19faivr8dnP/tZpv6ZTCYWzaIM6s2CL/qdKFzwrW99C9/73veQlJSEvr4+nDx5Et3d3cjIyMC3vvUtREREIDw8nPvnnjt3DsPDw3jwwQfZ6Tt9+jS+973v4ciRI9Dr9XjyySexsLCAlJQUuFwujI2NsTCHWq2Gy+VCTk4Ot5+hf2J2RKlU+jgOSqWSKUXnzp1DUFAQKioqVr1PMpj+e0NEcz9qmM1mdHZ24sc//jEAMFrqdDrR3d3N9Zqi00IGnPahxWLB0NAQAgMDERMTw/WzgYGB3JpobGwMr7zyCt5++23ExMRApVJx3+KYmBjs3LmT61zo2mNjY5GTk4OLFy+itrbWh8pHh8SuXbt87El8fDyDSFRHubi4iGeffZYpZ3Fxcfjnf/5n/vu0B99++22Ul5djbm4OBw4cwJEjR7CyssJI//e+9z1egwkJCZAkCfX19XjxxRdRU1ODnJwc7ndKdm7Pnj344Q9/yCrE+fn5ePTRR1kQhZ7pM888w2UGTz75JM+5zWZDeXk5nnrqKTz88MO8r/3n3OVy4Ytf/CIGBgYwNzeH8PBwRqfF19GcVVVV4bnnnsMnP/lJuFwubN++nedekiSuGUxJSUFcXBzvefrn8Xhral0uF4ONg4ODPhkBAsDoZ6JmAIBVg1Oa39XGRzmcN0Ow6ecTExPo7OxkGr7/62UyGXp7e+HxeBAWFsbidwUFBUhKSkJ4eDjq6+sxODjIZQ/+qDw5Y7W1tT40SboOOlsSExMREBCAzs5OduYI0Xe7vU3sqR6KPoPOaMqSEpNDoVDgjTfeuK7ueG5ujinwoaGhWFpawvvvv4+WlhY88MADqKyshFwux+XLl/HSSy9hYmIC6enpCAkJwTPPPAOFQoGNGzeiqqoKpaWl0Gq1yM/Ph16vxw9/+EM4HA78wz/8A770pS/hjTfeQEdHB5RKJSYnJzE8PIz333+f55roiv5MHJEmKZ51IhAWGBiI2267jftHdnR03HCe6XkplUruZe5yuTjztW/fPl6T8/Pz2Lx5M9xuN1pbWzE/P89MAxKLXL9+PbRaLZxOJ+rq6nDfffehs7MT+fn5UCgUKC4uhtvtZgYNKQOTCjZRQqnvOp37gNffoRY1crkcd999N5+tpHxOryVFWZfLxSA8ZdJ1Oh3cbjcLuK22rsV4ICrkAAAgAElEQVQ9JIID9HOyu6uBKuJeEde0CJLSa0R1W9Hm0OeQXaD2SOK5mJOT43NtNzrnRXskDv/rpnGzINb/M/wDc/q3adMmn+dJ2gf+69X/8yIjIzkApPdSUEa+ILGL/DOodD0E5ubn53OyICgoCLOzs2hsbMT09DSio6ORkJAAp9PJnRUUCm+rJrqfM2fOYMuWLeju7kZhYSGcTier5tLfVSgUyMvLw8LCArdTIvBRbGMUHx/P80BKsYcPH0ZERASmpqagUqnYftFnkwKymCSx2+1YXl5mantERARGRkZ87l8MDkUgUgSzaBBtWvRpKVYhEIH2FF2D6DPSfJOfKgJlYn9wt9uNLVu2wOXy9sX+vzX8gRJxbj4qQ7sa0EHldv+n4/+bDCqpklosFjgcDlbvM5vNnGEjJ0ChUMBkMiEkJARWq5XRMrvdzjQ9EcnU6/VcE0iKhnK5HNnZ2TCZTD4y41qtFikpKTCbzRwkGo1GJCYmQi6Xs4JmWloaTp48yXQECjypUTrVg5jNZkZvZTJvk3iLxcJGkZAPpVKJiIgIbkFDcvF0PwCYEmQ2mzE9PY3e3l4Wqujt7YVWq2W6BVGx6JAlKgO116GDl66V6ImnTp2C0WjE5z73OURHR6O/v99nA/qjhv6IC7UxIARZPDzEr+maCOUio0sUB0KRwsPDodPpAFzrd0i1ylRHIUkSSkpKEB4ejurqaoSFhWFsbAxRUVFcc0eGQaQM0hwQdXpiYgKtra2oqalBYWEhADDqPDMzg1dffRXp6ek+9yX2jauursaGDRuQlJSEb37zm9iyZQt2794NpVKJw4cPw+Vy4Re/+AV6e3sxMDCADz74AGq1mnsU7t+/n3vSPvXUU3j77bdxxx13QKFQ4DOf+Qx/XnNzMxoaGiCTyfD9738fZ8+ehcfjYRnyiIgIhIWFwWq18homtV56DnRwkwNFSncEigQEBCAjIwOSJPmsQXHeVxtiVt3/9VTv7HA4UFNTg7Nnz6K/vx8ejweNjY2spm2xWFZdMwDYKaI9RNdns9mwefNmxMXFcQ9Zk8mE/v5+tLe3IzIyElFRUT6ZEqJO9vf3s/queMATQ4MoO7GxsZDJvHWts7OzPAd0yJNt6u/vR3R0NK8PhUKBM2fOoKqqil97+vRpAPBZf62trSgsLITJZGKFzoWFBRQVFSE3Nxe//OUvMTIygieffBJ33XUXXn31VYyPj6OhoYGDalqvy8vLUKvVKCoqgtFoxMDAAOLj4xESEoKNGzdiYGAANpuN92BNTQ2ampqwadMmJCYm4pVXXsHx48eRmJh4nZNIgkUvv/wyfvCDH+Do0aNQq9WIjY1FWFgYI+yi87Rt2zZ84QtfwI4dO5CdnY2jR48iIyOD2SGvvfYa6urquPSBKFrUu4/65VEZAK11ui6i89L8UvBK7IIdO3agr68PCoUCGzZsYGaOOFYDXVazczf6PeDtA3rixAk0NDSgqamJFeTF5uvi59G1kuo01SKFhISgtLQUer1XaZQYLcvLywgPD/fJrBBNnxReyb53d3cjKSmJnXqj0Yi6ujoMDw9zBloul7NoGlEKqWUbnVdE+xdtQW9vLwNC1Giegj16La1/CmpCQkKwtLQEg8GAlpYWFBYWIi0tDYODg5DJZGhpaWEl6sjISPzlL39htVkSXAoJCYHD4UBVVRV+9rOfITMzE9nZ2XjwwQfR2dnJZxoBGK2trQgPD8f8/DzbQALR6B7FPe//vUKhwN13380t4bKysuBwOPiZ3yyIWVpaYlGzkydPQqVSITk5md/z8ssvIy0tjfuf057U6/U4f/487HY7goODOSjo7u5mEciCggL87ne/49pPg8GAiYkJjI+PIyoqCktLSwxAUqBJdXnkBAcGBsJoNMLlcqG5uRlFRUWYnp6GzWbD4uIil97QvqISGRLfotZ3V65cYQChpqZmVRDrRoPOIrlcDoPB4ANui36F6DPQ14BXmHFiYoLBB7lczmUVlFkk/4W+JvCyv78fJ0+exMjICNxuNyIjI1mQkrLA/s45+VX+e1i8ZwoURZaPPyNBnAf6XP/fSZLEwVdSUhKqq6t9Mor+9+Z/PVFRUbjzzjt9GGj0uVu2bEF9fT1mZ2cRGRnJ97WanaMgKjs7m4HEX/3qV5idnUVoaCi6u7vhdrthMplYRdfhcGB6ehpzc3OIjIxEcnIyVlZWMDQ0hJWVFeTm5uKFF15ARUUF3G43MyqoLVZbWxs0Gg1MJhOLAhLYTr49+TJ33HEHs8NSUlIQHx+PkZERBuqJYUXrlYAW8Zysr69nu0NJARGUXs1GiGAKvWZ0dNRnfdJc+M8tcI2pIrIA6HMI0KZ1LEkSK9ATALZz505IkoSWlpYb7q//6fAHUj/OGSi+XqvVck0uZfj/Lim+SqWSVea0Wi1mZmYYLSFkmIyt1WplNUmqOaXDX6PRALj2sAIDA7GwsIC0tDRYLBYO4GQyGbRaLdezEWpNhrKoqAiSJKG9vR2ZmZkcRHV2diI4OBgWi4XFCQIDAxEZGcl1aFTfo1armT5MxiQkJAQLCwuM5CgUClbS02g0OHXqFOLi4jAxMcGCSuIYHR31Ec4xGo1cF7J161a0tbVBoVDAZrP5UEDEGkQxy0WZSZIflyQJVqsVJ06cQF1dHb761a+yStpqB48/0mI2m+FyuZhqKxpF/w1P1yIehrOzsyzKFBQUxPNA2WgKKpeWlrC0tMS1L0lJSSgsLMTZs2dhs9lgs9k4C0HKcmIQQYaLDj2Fwtum4p577mH1VkmScPLkSej1em5yTsqWdN3iqK6uxokTJ1BaWgqn04nTp08jPj4ezz77LAoLC3Ho0CG43W58+tOfRn19PSNtjY2NsFqtqK2txfj4OJ566in84Ac/gMFgQGNjI4xGIz744ANUV1fj3LlzLMn+m9/8hhU1L1y4AIvFgvDwcFa1JnCHDDGBFaK0OQCmjJDgDB0C1NePAAI6YEXke7Xh/3O5XI4PP/wQ/f39GB8fxwcffIDR0VE4nU5ERUWhqqoKaWlp/Hp/FM/tdmNxcZGRUbIBDocDOTk5sFgsyM3NxfT0NE6fPo3h4WGmfJFQEdWdi5kRChZJFE28BhpRUVFYs2YNBwmDg4MYHR3F2NgYB0FkfGmNh4WFob+/nw9+Uo6+cOECVCoVEhMTcfHiRbZFAQEBbBdIlXDNmjWora3FwYMHcenSJeh0OuTm5qKurg5tbW3YtGkTHnroIVgsFnR3d6O5uRlnz55Ffn4+B4p04Gm1WqjVajz//PNITU1FZGQkt2hpb2/n9eHxePD+++9jeHgYzc3NePLJJ3k+yMH1eDx4+umn8fvf/x7Dw8NITk5GaGgo/05EwhUKBe655x48+OCDKCgowPHjx1np22KxMJ2L7q2zs5NLPKhOPzk5mRF8Cn6oTQgAPhfIpiiVSqaYSpKEmJgYjI2NobOzk4P4nJycVdftauCK6Hyutt4JwVcqlfj5z38Om80Gp9OJ3NxcZGdncxmF+F7aQ9QehwJwEkgLDQ3FyMgIWlpaMDo6CkmSWKissLAQer0e8/PzMJvNsFqtDGq6XC5meZBtHR0dhcFgQF9fH/dSdbvdjMZTWYVSqeQ51mg0zC4ghdeVlRVkZmYyQ4DAwZGREe7XSXuBaHXisyQ2j0KhYOX9jo4OZGdnY/v27UhLS0NPTw8sFgs6OjrgdDqZcXL69GmEhYUhMzMT8fHxLAqWm5uLrq4uHDp0CGvXrsXWrVtx6623QpIkbsPV1tbG9EJyKEV2C/1Pcy9mocj/mJ+fR2lpKZaXlzE7O+vjb9AZJ9Ir6b3BwcEwm82Ii4vD0NAQysvL2cGklj2NjY3o6elhpXQS/xoeHobVakV2djaUSiXefPNNlJeXo7e3FwqFAgkJCRgeHkZeXh4Ab//Q4uJibNy4ES0tLUwVj42NxcDAALRaLQfkxDBQKBT4xCc+wftlcHAQxcXF6OjogE6nw+joKEJDQxlYJHokgcSS5O0XSfTt2NhYGAyG6/bPzQY9P7PZDLPZDIfDwT1Z/bP/BOaQnVUqlQzIENhGNaZEj6T9KwYVarUahw4dQnV1NfdG7+3txezsLFJTU5GXl8dglr9No2u+EYVT/F88E8RAVGSC0Pd0j2IADHj9vfLycgBe/4IUrel9/gGl+H1ycrJPyZF4TW63G01NTdwT1T8IET+Teh3PzMwgLS0NLpcLSUlJsNls6O3t9REpHBkZ4baAw8PDCA4Oxt69e/HCCy8gLS0NZrOZFajXrl0LwGvDOzs7oVQqkZKSAplMBr1ej6amJqSlpfG1EbMG8NqTgIAAbNu2DXK5t5NGT08Pa3LQ2SzS3UkDob+/H/Hx8SzmSTRmKumhDKq/H7La2hXHzMwMpqensbCw4NMiTfR/RZCFfq7RaDA9PY2lpSUOnqmMwr/ulOYmKCgImzdvRmBgIDo6Oq47oz5q3/n77Tf63Wrnof/7bvY3id1H57Mk/Z32QZUkb3Nvan8RHBzMBy+hYpOTk9BoNLBardxcl6haQUFBbIDpwdKk0QYLCwvz6e2o1Wq51lOSJKSlpTF6k5+fD0mSWBQpODiYs3bkFNImocJtvd7bnJioI9QwXXRyqG8ZtVBZWVlBamoqXC4X6uvrERQUhJGREYSHhyM1NZWzw3R4kqgC1dtSPRc5dJs3b0ZfXx//3bm5Oe4HRs4yOXREByX0iQ4tGi6XCydPnsTx48fx9a9/HXNzc1hYWFh14QLeDW0ymRiR9z+8aQHTphWzTwQsUAE//T4mJoafvWiYiWZGr0tNTcWhQ4fwq1/9ip02Ck46Ojr4GsiY+qvq0iHd1tYGmUyGgwcP4qWXXsKDDz4ImUyGpqYmpKamYteuXWxsxfpTANxi5PXXX8d//dd/wWAw4IMPPoDT6UR/fz872U1NTeyoSZLE691ms+GZZ57B008/jfHxceh0OnYgqA3QlStX+FrLy8vR3NyMhIQEXLlyhQ0eOcSUQQ4PD+esFGV8xcNezHTQfiNlWlL0JAqZGJyutgZWoywRfY0c4sLCQhQWFiInJ4frW8VglL6n2lONRgOPx0vfb2trw/79+zlLajabWfGVEF2qp87OzkZKSgqioqIwOzvLip4BAQGwWCyMUFLAPjg4yLXmbrcbXV1daG5uxsDAgA+gQ+vI6XRySyYS5yHniihDYo3b0tIS+vv7ERwczDRBeu6U0QkMDERnZyf6+/vxiU98AidOnIBOp8Mbb7yBvr4+3HXXXcjIyMDPf/5zbN68GUVFRRgcHGT6/sWLF9Hd3Y2tW7ciPDycqcZUPzQzM4MXXngBlZWViIqKwubNm1lRmA50qvF58skn+V6/+c1v4pVXXkF1dTVnk6icwj8oDQ0NxdNPP41bbrkFWq0Wp0+fxvz8PJ577jkMDg6iubkZfX196O7u5gxdREQEnE4ni8FR9p8cfY/HwxkwEtMjW0TrxuPxQK1WM10tOjqa26PQfezdu3fVg3S1g5rs62rUt6mpKfT29qKlpQVWqxWnTp0CAOTn5/OzF5kz4l6hkg7KRAUHB6OkpARBQUE4duwYzGYzA5xOpxM5OTkoLCzk7ITH44Fer+f6aQpiCbwRh8hUIbYM1YRKksR2kK4jMDAQarUaQUFByM/P57MqICCABfnETAsBKyJtfGlpiTPjVPtPf4MYEERXb25uxtWrVzE9Pc022+FwYHZ2FpOTk4iMjER7ezusVisuXbqE0tJSpKamcjYnMTERmZmZ6OjowBtvvIHi4mKkp6cjPz8f7733HlOmRdtGIJxY2yYGMmTHJElCQkICHn74YQQGBvKZfOjQIVgsFtZ8uJGTR8+CWq8VFBTw325tbcXY2BjMZjOfTcQQGB4e9pm7xMRE1NTUMBVz586deOGFF/DQQw/B7fa2/picnERhYSFkMhn3pdRqtcw+I1YDZcxpX8XHx3OwK5N5+2inpKSgq6sLiYmJMBgMnCknf0Kj0TBQRAG+zWbD6Ogo1q1bxwyYG50Rq436+nof4Nwf0BGDBbK9BE5ThobahwUFBfF9EyApZhCpVRX1ipYkb7366Ogog7w6nY5rFkXQwt8W3Oz+xIBitWCS7sd/iH8vMTGRf15dXc11l/QaEVQRg5i0tDQ8+OCDAK5pBNDv6H0bNmxAY2MjB/X0fAjA9Q+2V1ZWkJ2djZ/+9KcoKyvD8PAwt4KUyWT4yle+gsLCQgwPD+NHP/oRKisrWeAyKioKPT09KC4uRnBwMN544w3uq0r7mxhGAGAwGDA1NYWQkBBmddBciEyerKwsn0w1AWikEhwUFITx8XEWHPJ4PEzppezp8vIy994OCAiAXq/3EdT7OEOSJDQ2NjIzwf9corUi+rz0TENCQlgXhvqrq1Qqn/NDzJQHBgZCqVQiKyuLGWhBQUFITU3lZJcYEN4o0PaPlVYLRj8q0L3Rs6BBrBxx/He7pL+vADUxMREzMzNQq9V8UNJinJ+fx8rKCqKioiCXyzm7ZbPZuC0G0ZKIuw34Piii+8bFxcFkMvm0HQC8i4eCIaPRyMbeaDRizZo1cLvdOHv2LFQqFYxGIwtyFBQUcEBEgYfD4UBMTIyPUlpHRweCg4O5FocMJ1EH29vbudGwQqFAWFgYOjo6sLCwwAeE0+nE2NgYLBYLB+907ZLklfUnxzY8PBxGo5Hr7SYnJxESEsIONHBtgZJDQOp9wLVAgQzWqVOnoFAo8PnPfx5RUVGsYuq/qAltJyXkGy1yOnQI6aJWM6QkCnhrEisqKnxoP5SRoGdLG316ehqNjY24cuUK9/CkHlvU95b+LnCtLpY2KF0jfX3hwgV87Wtfw8zMDCIjIzE5OYlf//rX+N3vfoeVlRVs2LCBr5NGdXU1vv3tbwMAHnjgAdxzzz1IS0vD/v374fF4MD09jYKCAkxOTiI6OpoPQ6Jh/uhHP8K//du/+ShHNjc3Y3Jyktt6EFgSEhKC8+fPY/v27YiPj4dKpUJHRwfT3O12O+RyOYtA0SG0tLTE9Yb+tXp0iKtUKjayBAaEh4evKhYk7rUbHcCDg4OIiopCbGwsEhIS2Cmm34trbWRkBPPz8wwuUIC1ceNGFidZWFhAdXU1lpaWYDKZOGAisIjUc+kziTZNGXACj8TaEloX/f39GBoawtDQEGw2m8/hLN4fobRixlmlUvGao3VFNTlku1wuF7q6uhAUFMT1OXRo0ppUqVSwWq1oa2tDZWUlO7JLS0toa2vD2rVrsWHDBvz2t79FS0sL/vVf/xV33XUXB9JtbW3o6OiA2WxGXl4egoOD8fbbb7PTajAYUFNTg66uLhQVFSEzMxPT09Ms4+90OmE0GtHZ2Yljx47h1KlTUKvVCAkJYWqjOM/0PJ966ins27cPZWVlmJyc5FZJlDm9//77UVlZiaKiIuTn5wMA1q9fz5T0X/7ylwgLC8Ps7CyDcnNzc4iIiGCmDCmekn2gtUNOPtkEAi+p76mYXaV+luIaFP/3t1P+65pEpMgRt1gsSExMZIqY6JAAXqBvYmKCs+VWqxUpKSncOmxsbAy1tbXM+tBoNIiLi0Nqairsdjvsdvt1WQ6ZTIaUlBQkJyfDZrP5tEYiW+d0OhkspP08Pz8PAEyD1Gq1iIyMZBt7yy23oL+/H3l5eYiOjsbatWt9bHJkZCS0Wi07HWQj6JkTiLiyssKKqg6HwyebRPZmZWUFGo2GAYng4GBs2LCBe5SurKwwy+COO+7A/Pw8jh8/jsuXL2PDhg18ZigUCgZbfvrTn+LKlSsoKSlBRUUFXnrpJd6fJJZDJT5ioApcL4wUGRmJe++918cWAl5wkwTSNBoNtwVabVDWf/v27bx3nn/+eTgcDiiVSs6IUusbOrMnJia4/dvy8jI2b96Mnp4e7N69GwsLC7BardDrvS1krl69yiCtTCZDeno66uvrERUVxWuU9qlY3lNRUcFrKisri68lJycHGzZswOXLlxEbG8t0bgLkqZUOgao0iD1Agk20Tj/K0aXMGwHk/meyuDfFPUV9jmdnZ9kHJD2JwMBAH8Cd/AdKSkxMTCA7O5szvvS5U1NTCAgIQFlZGRwOB5KTk9nX87+Oj5NBop9/nODUPyD0962qq6sRERHB6xbwbVlFr9fpdCgrK2PBPjozKasnBiRDQ0MwGAzQ6XQ+CQVxvUuSxD5DT08PHnjgAbz33nu45557UF1dDYVCgZMnT+L8+fOoqamB2+3GJz7xCVy4cAEBAQH44x//yGulqKiIs8IEeAwODiIoKAhJSUl8r+fOnUN6ejqL2xGAJdK2t2zZ4kN3JjqpzWZDbW0t7rzzTtTX10On03HpxNjYGPR6bz9iClAJXAa8GetHHnkECoUCExMTq87basEc4BX58l8bq801AYaAl/EZGBjIQBUxA5RK5XUsAPJVAgMDER0djTNnzuDVV19Fe3s7Ll68yIKZmzZtglKpxMzMjA/wRvNKa4d8e/Fv+IMTN8sii++70TomFg+N8vJyVFdXA39vASoFFJQxpOwNHbB6vZ4zayQXTyhZY2MjIiMjuc/TjegKpExHQgRUq0LviYyMRGlpKQICAhAZGYmFhQVuVi1JEoaGhhAREcF0IY/Hg9HRUaSlpXHza7PZjPHxcej13vYko6OjMBqN7KCGhYXBZrMxDUqSJMTFxeHq1atITU1lcQISeZqbm0NPTw8r7A4NDTEVixxkei5msxkqlQrNzc1Yu3Yt7HY75ubmMDExgdjYWJjNZoyMjLBym+jgUVBAVFpxkVEwOTExgbfffhtWqxWPPPIIOjs7ucZOJpOxYtvU1JRPwCPOA83NaotaLpez40ObPDs7m2mplPEi40KOECm0UWE99UPs6+tDT0+PzyFJ1+P/M7p+8W9funQJ69evx9jYGA4ePIjbbrsN99xzDzZt2oSFhQWf+lPgGsLZ0tKChoYGvPfee2hpacHFixdZtXNsbIwpqpSxmp6exrPPPouvf/3rSEtLw/T0NFZWVhAbGwuLxcJtjFwuF2ZmZuByuaDX6/HFL34RVqsV77zzDve8nJ+fx/j4OGf6ExMT2eGnIJXaMlFwTwc5/Z6cV5FSTc7Jaq12/OeYnufMzAxmZmYQHh7O6KU4yFDbbDZYLBaoVCpoNBoW2llcXERycjJMJhOmpqZQV1cHrVbL15+RkYGioiKkp6cjLi4O8fHx6O3tZUaDeH0ajYZpScPDw6yGSNfh72BSXejy8jKrD5KibEBAAANEUVFRiIiIQFRUFJRKJQYHB7mGnoCo0NBQDpQok2Gz2fjQFIEOcvSp7VZLSwvWrFmDHTt2YGZmBhaLBc3NzXA6ncjPz4fBYMB7772HiooKbNq0CVVVVTAajZyhJwXg/Px8pKSk4Bvf+Aby8vLQ2toKhUKBK1eucKuP/Px8ZGRkoL6+HhMTE9i8eTNCQkJWBSZoTezbtw+f//znUV5eDqvVisbGRrzxxhs4duwYxsbG0NbWBkmSMDExwbWtV65cQXNzM4xGIxoaGnDu3DlIkjcTsri4iNzcXIyOjsLtdiM4OBgmkwkxMTGYn59HUlKSj2gGZUfF/sYUqInCe7SWKZOvUCh4LfvXZ/kP0VZJkjfbQi0fYmNjkZycjIiICJ91Tf+MRiOCg4NZtCMtLQ0xMTFcztHZ2QmtVguNRgOXy4WCggLo9XpuQRYTE4ORkRGMjIxAr/ftwSmXy1nBUavVwm63c59XAj8mJydZcIfKGGQyGcLCwuB0OpGYmIiSkhJkZmYiNTUVx44dQ0lJCRoaGtDR0YH8/HwkJycjPT0dfX19CA8Ph9ls5powysbReUjlBeQ42u12hISEQKlU+ijxi+AosaVIV6GgoABbt25Fb28vg2sdHR2or69HQkICDAYDmpqasGbNGgbsyEn9y1/+gujoaJw4cQIbNmzAoUOHEBAQwPaAfAsxWydeD51DRDMksUTRr4iPj8fVq1dhMpkQGxt7XW2huFYoOBgZGcGaNWu4rjYmJoaDK2JbKBRexXq9Xo/HHnsMMTExiI6O5lpw6ibQ0NCAyspKeDzenuUWiwV79uzhORCFilwuF6anp32Ccjr3KcCl9Z2SkoKOjg7U1dWxqnJHRwcrQgNgfQlJ8qqPk0o3/e3p6WmEhYUhPj6ewZDVzgh/H41qLcXX0GeK76EAm9gSBBDR2U19XgmMojmme1QoFDCbzWwfFxcX2cbTWUiZ2K1bt2JoaIjFK/2d9hvd22oBqX+AQj+7USC+2t+prq6GVqu97pwV/9fpdNwLmDLibW1tePXVV7Fhw4brzsX8/HzU19dzTb54jyL7QpIktrHj4+MICgpCSkoKWlpa8MUvfhHx8fE4cuQI7HY7Dh48iObmZgQGBuKVV17BPffcg7a2NhZrOnPmDAoLC+F2uzkxs3fvXv77HR0dXOZHNd+SJHECi9YGCRvSkMlkzB4MCAiAWq1GWVkZxsfH2S6SP0L+TENDg08vbLfbjePHjyMwMJDZkP5zKwZwNIi1QO0sbwZakN/l8XjYj6ByHKfTyXXGIsuP/DC6BmpRRvGRXC5nbYG1a9ciMTHRp/ZT3EfE2BLvS1yPZBPEtUV7XLSDIluHzlR/tkNAQAAMBgM8nmtCX3+XAWpKSgoCAwPZAFILEZpAehgUqBBa5nJ5W1GMj48jPT39us/2j/LpoVMtIfXP0ul0kMvlMBqNKC4uZqeHEBeZTIZLly5BpVIhJiYGJpOJMwdhYWFIS0vD7bffjocffhhPPPEETp48CZfLxb2bFAoF4uLiOCihDPG9994Lo9GIvXv3oq+vj2unoqOjYTabWXiip6cHCQkJ6O3tZZEQ6vVIz23//v3sDI+OjsLlcmHz5s1Yu3YtPB6vQmN+fj4fHNQWw263M1UsMjKSFf/8nyF9PTk5iXfeeQef/exnsXXrVphMJn7P8vIyC0fdCDkErhWIEwWPAiNCQym7Wlxc7EPRokbrlFEmp3NiYgIWiwVzc3PIz69PcrIAACAASURBVM9Heno6goKCuK5SdBpFpFz8fzVay9WrVznDReDB2NgYlpaWrrun6v/ugxoSEoIvfOELqKyshCRJuO222+DxeLBz506um7RarSgqKsLOnTtRUVGBb3/72zwHhNgvLi4iJiYG09PTTNciIa3FxUV8+OGHaGxsxK5du6DX63H77bfj+PHjAACLxYKIiAim15D4xcjICDv1VHdMRgkAU6SofRHtO7vdzqJJqw16/+LiIjvFtAbEjBL9raGhIabAA8Dg4CBGRkbQ1NTEz5rEDmJiYrC4uMh1VHFxcXw9YvBANd5dXV1ITk6+zpknB5BocWL2hGqayb5I0rVsE5UYhIWFMcVKq9XygV1VVYWUlBQYDAaunxTXBz1HhcLbfoZKEYgmR0GqGGCRSFhAQAC6u7vR1taGO+64A0VFRWhvb+fawnvuuQcqlQqvvfYaOjs7sWnTJm5ro1QqUVJSgnfffRe5ubnQaDS466678Nvf/hYrKyvo6OiAx+NBU1MT0tPTERsbi6ioKJSUlODQoUMoLi72ObToGbpcLjz00EN48MEHud3O0NAQjh49Co/Hg/Lyctx1113Ytm0bUlNTcfDgQeTn57My7P33388A5C233IKFhQW8/fbbkMvl+NWvfoWCggIMDAywKrdGo4Hb7ea6OLVazb2yATBllOoeaU4pc0cHLImsEWuGQENxnsThb/fof8r8Udbe36kcHBxkem5oaCiamppgNpsRGxvLGQM6h4KDg6FWqxEfHw+73Y7JyUluCUZ2KiUlBXa7nVs5AEBNTQ1aW1sxMDAAu90Oq9XKtcYWi4UdHo1Gg7CwMERHR7PiPAW2JSUl3L6GArO1a9fi7NmzuO222zAwMMABdGhoKHJycvD+++/zeZOTk4M9e/agoaGBn4MkSZzxIAVHqmEMCwvjtS0+L4fDgYWFBYSHh8PlcmF4eBiNjY0ICwvjc9Lt9irVRkZGoq+vDwsLC+jp6cHly5dRWlrKc2u321FXV8eK7NPT0yzsRBkkyiqJQSrNLa0XvV6Pu++++zqHlGxZeXk5WlparuuJutoaIpZXVlYWzpw5g4cffhhGo5EVTaenpxEbG4vKykpYrVbce++9iIqKwujoKFpbWxEVFYXu7m7s2bOH2VtJSUmQy+Us1EY+ilwux9GjRxmMozNgamrKp2ZTr9ezOAuN8+fP8/Pp6upCYWEhMjMzuRXL3NwcU7qDg4NhMBgY6BHP2NHRUWzbtg2bN29GR0fHqs666BsYjUZYrVZ27kVnWsz8iI620+nkMi9a6x6Ph1uniYAoOc40twsLC6xdQOUP9C87OxubNm3Cnj174HA4oFarYTKZfBzv1eZ7tXPRn30hfn+z99HPVwtQ6fwRQRNxfUZFRWHbtm3MMpydnWXhwOTkZJ8SGnpeR48exdDQECdixGsV7yUoKIjbNO3btw+vvvoq9uzZg927d2Nubg7vvvsu4uPjsX79elYqb25uxszMDAOqL7/8Mh544AEAXhbS4OAgtFot4uPjAXhZE3V1dex/r1u3Dk1NTQywEMBE/qwodnflyhXMz8+juLgYVqsVIyMjyMrK4q4ULpe3DRfRuyXpGgPCX3RvdHQUX/jCF9DQ0HAd7Vw8E+nnlJ2l852e381AFrfbzTEI0c3Jt6cyC0oYEYDldrsRFRUFk8nEP6M2gS6Xi21DT08PcnNzWUldvAf/NXijLCkFs+K1A9fExoDrlbTpmsgvc7vdLOhHGfK/ywCVJKqpTmZubg5msxnz8/Pcn1GSJJ8CeoVCwQ430bbElD8N/wkgmrDdbkdUVBQmJyfZqAUEBLDxv3r1KhISElgEQqQbtre3c/YyJCQEra2tuHTpEhITExEaGooLFy6gra0Ncrmc0fXY2FhkZ2dj165dnAWkfk95eXn48MMPERcXh5mZGUbORT672Wxmx476IIk1RGlpaQgLC0NqaiqMRiPkcjlqamqg0WiQnZ2N9PR0tLe3M0pIwkSUBf3xj3+MyspKrukRFzY9R1qEcrkcFy5cwFtvvYV7770XZWVl0Gg0GB4eZjEbsfbUHzUUBxlaonYT4q9SqZCbm8ubj1BRCgwog7KysoKZmRksLS0hNzcXKysrXGOYm5uLgYEBdkoo4PKn0Ij0Fhr0tcViQU9PD3p6eiCXyzmQ9x8UoAJe0Yrjx49jcHAQDQ0NMBqNaG9vx+TkJNO0Z2ZmcODAAXz3u9+FSqXC7OysD9WThIEoczc+Pg6TyYTvfOc7mJ+fR1NTEz796U9DoVDgj3/8I+rr63H//ffj0qVLTBEhGjxR5cTn7HQ6ER0djZmZGTY6YvaDnBV6dvPz89wKQZxPAiWovcFqgxxPqrsLCwtDc3MzBgcHuT58ZWUFCQkJSExMZBl5orvqdDquyyCV056eHoyOjrKAFa2Z5ORkXL58mZ35mZkZVgweHBzE0tISBwQOh4NpoSaTiWm4gLc9hUwmg1qtRkREBNfa3Xrrrbh06RJ27NiB/v5+tLa2oqCgAPHx8Qx4eTwe6HQ6SJLEgAA9L6L3UVBFc0WZCQJk6FBVKpWw2+24cuUKbDYbdu3ahbGxMdhsNq7dKikpgcvlwrvvvotNmzYhNjYWy8vLaGxsxLZt23Dx4kWcOnUKpaWl2LFjB7q7uzmIysvLw+joKA4fPoyKigpGvtevXw/A6+A5nU7cfffduO+++3Dbbbehvb0dCoUCjzzyCJcWEDjR3t6O999/HxcvXkRzczPOnTuH++67D7/+9a9hs9nQ1NQEo9GIxcVFDA4Oso2XyWTIysrCzMwMysrKcOXKFQ4+aY4AsP1Tq9Ws4i7WOtLapMM9ICAAKpWKa3wJKADgk/n0H6KTLEneDAIphpKDIoJpBoOByzgsFgs+/PBD9Pb2Ym5uDqGhoQgJCUFUVBTbuJiYGISGhnLQQLV91N9WdAxon05OTsJgMLCaMa0RQtUDAgKYsZCcnMxgXkxMDDZv3oyMjAzk5ORgeHgYY2NjGBkZQWZmpg9IlZeXhwsXLiAiIgIFBQX44IMP0NbWxrXjBI5WVFRAkiQUFRWhoKAAzc3N/LyUSiUGBgYQFRXFc0BlOERvp3OE7C+V8hBourCwgKysLM4WWiwWWCwWREZGwuFwwGg0wmazobOzEw0NDaioqEBOTg4OHjyIo0ePYnJyEi6Xi4Eekd4r1uvRfdPPoqOjceedd7IYklg2JGYT2traMDg4yC2V6HeibaS5WVhYQF9fHx544AHU1NQgMjISp06dgiRJOHr0KBYXF1FbW4usrCzs3r0bZ86cQUBAAOrq6pCQkICFhQUkJyfj9OnT3GdSJpOhv7+fs0+Aty1aTk4Oenp6sG3bNtamALzgIWWPrFYrzzvde29vLw4cOMDsgJqaGhQXF2NoaIjrh4nBRIIuhYWFmJ6e5nOV1mtdXR1SUlKwZcsWn57qqznIi4uLTCOnZ+0PGogOMK0jlUrF4jJkG4KDgxms8Hi8ugVr1qxBbGws1q9fj1tvvRXf/va3UVxcjMceewwXLlzA+Pg4g0HFxcWIjY1FcHAw8vLyUF1d7RO4fJzh/9qPCkZp+K8b/+GfQRXnDvDWq6alpfHcAMD4+DjCwsIQEBDA7CvyKRUKBf785z9jfHwcHo+H25f5X6v4dXBwMJdUTUxMIDAwEAcOHMCTTz6Jr371q7jvvvtQV1cHl8uFP/3pT/jUpz6Frq4u3HbbbVhaWkJWVhb78B9++CGWl5exY8cOntvXX38dRUVFMJvNzAqgNmni3BPzKi4uju+np6cHlZWVbFPn5ubQ0NDADCACWOgzFhcX8fDDD3OPV9q79HyPHj2Kr33ta2hpabkpeEnrXZKu1fSLr/FnddJ54fF4BRXn5+d5DSuVSqjVaraZdF0ipXtiYgJmsxlarRY6nQ4mkwlWq5XnvL6+Hhs3bsT69esxMjLi49OI87kaE8D/PslG0u/F78WAVYwLxPuUyWQwGAws9EXrGH9vfVDF9DAdulqtFuHh4YiLi2PHTqTTiIspMDAQoaGh6Ovru85AiA+VBlFEALBDnJGRgZ6eHng8Hkbns7KyfFAS+hxyIAnNkcvlePLJJzE0NISBgQFIkoTvf//7TKmkLOaWLVsQGBiIp59+Gm63GykpKUyTKiwsxF133YWQkBBIkleYZmFhAVNTU/B4PNzeghBLMRMoohYk1Q94haAaGhrw8ssvw+PxoLS0FACYAkefQZvY6XSirKyM0XoquBYdGLp/ymD++7//O86cOYOsrCzs37/fh3ZMr13NwIvzTYENZUE8Hg8OHDjAoARlbuh9FFzI5d6+l4uLiz7tgSIiIhAaGorp6WmfHrp0LcC1LC4AHwdYXDf0O6vVitHRUZw7d86HunSzQbRhMj7UwoXoz7t27eJDFABn40NDQ7nFDvUXU6vVSE9PR2ZmJn7605+ira0NSUlJePfdd/Hmm28iKCgIn//855GVlYXIyEg4nU4WBZqamkJ0dLRPraaISFIgJdZMixQPWt82mw3R0dFMk3Q6nTCZTNyr1H9eaT5Jxj8pKQkejwfV/612PDIyAqvVCrfbjfj4eKSlpaG4uBgKhQKTk5MYHByEzWZjui/VrZGDJda7+dNu1Go1uru7cf78eQ7iqCaPPicsLAyLi4tMEdPpdEwXpFomqilPS0vDHXfcgR07dmBpaQkHDhzAyZMnsX79eigUChw5cgRBQUGorKwEAA6KtFotfz4Zccqe0j6k3pKE5tLzFmsISSiIeh2TmA3grZs9fPgwsrKyoFQq8Z//+Z9YWlpiCmRbWxtiYmJQWFiIn/zkJ1AoFHj88cdZiCcjIwMulwtZWVn47ne/C5PJ5LP+/+Vf/gXPPvssSkpKEBgYiL/+9a9IS0vDV7/6VVZZJOCIqFRlZWX4zGc+g6effho/+tGPMDY2hu985zv40pe+hKeeegr/8R//gX/8x39EQUEBRkZGMDk5ibi4OIyOjuLIkSOIiIhASUkJPB4P102bzWbodDpGjWUybx2mx+NhASHKHJC9Eu2QeLB6PF4qJIlqrDbo9VTqQUCO/1hZWcHExATi4+Mhk8nQ1taGuLg4ZGVlIS4uDuXl5UyTJNYLfe7y8jKDUkSbVSqVqK+vZ3pydXU1Xn31VVbvJoeFnvno6ChTRansIDw8HL29vSgvL8edd96J/v5+tnVOp5N7+DocDhw+fPg6IHLXrl2Ynp7G5OQkt6o5cuQIi4gkJiayLgHN/4MPPsgBO2V+Ozs7WViM1r0kSQgLC7sOuKQsLIlEud1epdHJyUmsWbOG1fQpm04Z6fHxcczOzuKZZ55h8cFXXnnFR2+B9rQYmNJ1SZLk41hpNBoGTMiR9c9+yWQyPProoyzgJmazxLNGzJRLkoT333+fQe7AwECm81F5E7Wqo+4DZWVl6Ozs5PYnxcXFnDU6fvw42wcaNTU1pJIJlUrFpTuU6RSfObGDAHCm2eFwoKCggG3X4cOH2R5ptVq43d62cJmZmXjiiSfwT//0T6uWAEiShD/84Q+IjY1lZdYbDRJ9pMwTAB+gUyzFoGdLAm2kDQD4tpFxOp0MyFAfXLlcjv7+figUCrS1tcHpdOLPf/4zXn31VRw+fBj/63/9L3z2s59FamoqNBoN1z8C18Bq8f5WC1roOv3HxwluV/vM1V7j/zXtB9IvCQkJwdzcHN555x3WF/F4PAgPD+c9AHjrbfV6PQMZpOhNn0nXLN47rbfR0VH09fXhr3/9K4NNer2es4EymYzZDuS7nT9/nuuGSUAtKSmJAysChxISEti3a21tRVFREQtpRkRE+OyxwcFBfhYknidJEpKTkxEQEACdTueT4KDzBAA+85nPIDc3F7m5ubzHxflSKBR48cUXsWPHjusoseI8kG0TAebVEmRkF+kaiNpPgKcIxoiAtgh+yWQyaDQaOJ1OTpIQo8XlcvH50NHRgdbWVpSVlfHni0O0uyLgcaN1eqP1Lt7naoGrx+PxCU7/1vH/TQaVnHSq4xQpGdSomqgl1ONLkry89IWFBUxPT8NsNmNlZQUGg4EFVVZDsuhrOugJ9SgrK0NdXR02bNiA5eVlTExMMD+d6lxtNht0Oh0SEhIwMDDA9F1CQAcGBlja/fHHH0deXh5qamrw6KOPorW1FXv27MHzzz8PvV6P119/HcXFxTAajUhOTkZdXR1ycnKwbt06DnSpr6DJZOLesG63m4MuCizcbjf36CQHdmBgAGlpabBardBoNGhoaEBCQgIjy9QiZ2VlBTqdDqWlpbwZ9u3bh6CgIAwPD3Mtkb8AlXgwd3V14dixY4iNjcVjjz2GjRs34r333rsuC0GbYLVMkSh6QHRk6uVG62FhYYEzehRUkrozbV5RIIEyYOPj4wDAjr9/gE/Gga6PModkCOnncrm39YBer79uHYsZVADcosDhcHA/UqKxbdiwAeHh4fj5z3+OiIgIzM7OYu3atThw4AASExNx9913c1sai8XC9cWkjvzoo49y3eDtt9+OzZs3IygoCH/84x/hcrm4piYyMhIrKytQq9X8XvGelpeXuV4VgI8hJKMsCk2YTCYEBwdzeyZx0Hp0OBwwGAxYXFxkR2J2dhbr1q3DX/7yFxQXF0Mul6OkpAQKhQKxsbFMh7x8+TK6u7tZ0ZOuHQDXTlAPR8AbCA4PD8PpdGJ2dhbHjh1DV1cXzGYzB3GkTOvxeJg2SnVZFJRSPeHi4iLUajX27duH9PR0LC4uMjiRl5fnQxmlTO1tt92G1tZWtLe3o6CgAJmZmdDr9WhoaMD8/DzXExJTgZ4/7T/K7tC9StK1/rq0zklBfGVlBVevXoVKpUJpaen/pu69w9usr/bxW5JlWZanPORt2bK8VzwynUkSshctpCGQQMNMB4SZDmihtLR9CwVKCgVaVsIICXEgCQnZiZ04ieO9LdvylLxlW7YlWdLvD/UcHjmBt31/73td3z7XlSuWh/SM8zmfM+77PhgcHGRaQG1tLXPCzpw5g+TkZKSnp7PiKQlPnT9/HlevXsWPfvQjrFixAteuXWPo+tmzZ9HU1ASj0YhDhw5h+fLlCAkJwcGDB+Hr64tz586xEjF1MAmeREEEJWIVFRXo7e1FUVERTpw4gdOnT+Pq1as4d+4cTp8+jfLychgMBlaZJiXmdevWITo6GsePH+dKOAnujI6OIiIignnVVGwhtXLihgv5Z9ShJkg3+TGxWIz4+Hh+DsJDLBZzx+5mdu50OmE0Gnk8y9WrV9HQ0IC6ujooFAooFAqehevl5cWCd9RJJEE64dgOSqZ9fX1ZVbSvrw/BwcHQ6/VwOl3cZRqz1tPTw5wlshkS21myZAlGRkYQFxcHhUKBlJQU7N+/n4M8kUiElJQUnj3e0tKChIQEt+vUarVwOBxoa2vDqlWrUF5ejrKyMsTExECr1UKj0eDgwYMsUiQWi5GTk8NVfiocOBwOdHZ2smgL3U96JlSUoX2CknXytXV1daiqqsLo6CiUSiX7UplMBpVKhbq6OhgMBuj1epSXl2NwcBB5eXnYsGED9u7dy2tiOrSXkmuh76LZkbS/NjY2oq6uDup/QminxxDBwcG4ePEiQy/pOmivEfoLk8nEnx8dHY3a2lo8++yzUKlUeOWVV6BUKnH77bez3sCHH36I2bNnQ6fTITs7Gx988AF3rcnHUjeVkuzx8XEYDAZGkpSUlPB5kv8k3zk+Po74+HiIxWKcO3cOMTExCAkJgb+/P2JiYtDU1MTcaJPJxLodHh4eeOqppxASEgI/Pz+oVCouAgoDX6fTiYMHD+Kxxx6D0WhkRXhhwA64Cmwk2kdrT/hspgf4BCP09PRk1BFBIynhX7FiBceJxHOmQvenn36KkpISN5+uVqvh5eUFpVKJ3NxcnslLisp0TcKvhc9Z2GWiGEzYoZ8evNN9uFlCerPuKfBNB5Xsl3xccHAwgoODmS8dFRWFw4cP495770VzczNCQ0OZn65Sqfj9hoeHWfuB7GB6AjY9MaFYUCaToa+vD1qtlm1n3rx5GBwcRE9PD7766iukpqZCp9MhKysLfX19sNvtHE8fPXoUDoeD5/hSF1KtVuPq1atMiyoqKmJkBEFoqXBH56JWq3Hq1Ck4nU5oNBo+Zx8fHxiNRrZLGn8jlUqRkZGBgoICAMCqVatw8uRJtwkX9Kz6+/uxfft2FgadflBBbXR0lP+eYixhkjkdpk6xs7e3Nyv4EuKHFIeF9iUWi7F06VKkpKQgKSkJdrsdbW1t0Ol03FGempqCSqVCYmIi5s+fD71ej+DgYAwPD7s1d25mc9Of980SceHXwvcRxtDTf296YYfsGP9pEF9SEXQ6nQgMDIRCoWBFQ2GSQ5UXhULBSpoUfFIFg4JWkvqfXi2gg15TF6O9vZ1HzRD8IC4uDgBw+PBheHl5ISMjgz97zpw5HPxOTEzAbrez+ARVGaOioniGaFtbG7Zs2YL9+/dztzciIgKdnZ1ITEyE0+nk0SBdXV3Yvn07ioqKGIpJQRrNJBMuAA8PD4aF0dHV1YXAwEDMmzePVb2I10pwSAqMb7nlFsTGxuLUqVNYsmQJhoeHUVdXB4vFguLiYkRGRroNOqdkTVgAcDhcpHcSUnruuefQ19fHw8CnJ7W0aAk+RBzD8fFxqNVqpKam8vXR5jo2Nobx8XEuUNCYD1rYVM2lAISqS35+fqzgJ3QWdN4UvAo3QWEVkaqyFosF/f39mD179g12PD1BBVwdHYvFwnwW4oSKxWKUlJRwxd/DwwMDAwM8b/Ps2bMsHuLj48N8LpLIv3jxIrZt2walUokzZ87gpZdewvnz5+Hr64snnngCnp6eqKqqYlgqQeeI70X3HHB13AMCAtDX18ebFBUG6F9cXByCgoJY/Vr4LB0O15xS4lvTeVLlmmYY0qZFgSrB9CorK9He3o6GhgbMmjWLiyHC50AdX5Kcp04KXcfk5CRkMhkiIyNhMBhgt9sxNDTE3F25XI7+/n7mdpByJgnHhIaGIj8/H+np6bh8+TIyMjLgcDigUqmgVqvR0tKCpqYmpKamcvGDIOiHDx9GVFQUsrKycPToUcTHx3OAWFtbiy1btiApKQmZmZk8o5fuHfFQaV0RHwaAW2XZ6XS6zQMlYamtW7fy6CmCPQ0PD+OJJ57AX//6VxQXF6OgoID5yyQW5XA4cPDgQeTn5yM1NRVWqxV//etf4enpyQUsmqXa1taGzz//HE1NTaivr4fFYkF1dTU/49zcXKxYsQJ2ux0pKSlYvnw5dzAKCgqQn5+PvLw8rFy5EllZWUhMTMT3v/99tLa2orq6GkNDQ/D19UVYWBi6u7uxfPlynDlzBlu3bsXq1atx5MgRXj+EtiBu9tDQEHNcyPcTF5F8O9myUPiLfr+npwcqlYo1DaampjAwMMC8numVcrvdjs7OTobxenp6oqurC/n5+ZBKpcjOzoavry8sFgtDkUWib1Q0yZ7Jh9HII1KUpGsiKN3U1BROnTrFyQfpF/T390Or1UKpVKK9vR1ZWVlYs2YNcnNzMWPGDHz++efIzc1FeXk5GhoakJSUhNTUVLS3t6OiooKDnLCwMKSlpaG2thb19fWQy+W8hinI02q1OH78OGJjY6HRaHDx4kWEhYXBx8eHKRXHjx9HcnIyABcPjgqHtEdN37eo+0XUGSpUkr+ntUGUAqJyTE1NobKykmMEq9WKyMhITrw6OztRXl6O0tJSdHV1obq6mlEj0xMG4ddisUvwLDg4mJWNbTYboqKi8NVXXzE8WxhDEBWJ9sfpAZpwf3E6XdQK6mbU1taira0NO3bswPj4OC5duoTXX38d/f39GBgYYNhec3MzJ4x1dXWsfq3T6WAwGJCbm8uf98UXXzAkd9WqVZBIJBgaGsL8+fNx5coVhpBT8CyRSNDS0oKkpCTodDpGV9H7RUdHs8AJjeagbicpAPv6+uInP/kJz6EWJmu0Zg4ePIhHHnkEnZ2dHKPRvRGLxbwnCwP36QEw/U9xHlG6qClBezQV4ihmIJ0FgpaHhYUhKioKDQ0NDDX38/ODn58fi2AODw8jPj4eTU1N6O/v53sifL7Cgsr0ZER4vtObJHTNN7tGOudvO86ePQuVSuV2HmKxmIUhzWYziw/NnDkTnZ2dMBgMCA4Ohkwmg7+/P+sfUOFerVYjNDQUer2efebNkhjh9ZMydVhYGCYmJuDl5YX7778fvb29qKyshIeHB3Q6HYtNJSUl4cyZM1i1ahW/d0NDA2JjY/k5ikQuvio1J1JSUlBeXo6wsDCmFFGCSEkqzUUlBehVq1a5nbO3tzf0ej3TL0h/QiKRYNeuXVwQPH36NI4dO4bQ0FA31AOdV2FhIbZu3YrS0lIuMAufW09PDyYnJznWomcsPITdSSrI0bUPDAywDRHKzcPDA+np6UhMTERERARCQ0P5mYWEhCA0NBQvv/wydu3ahVOnTkGv10MqlSItLQ2LFy+Gl5cXZsyYwfdRuI6mP9ebfS20SeGzn56MftvfUhPoZp/5H5mgBgYGcjBM1WSTyYTk5GQO8Cn5A1zOiHDXFJwB34hYyOVy9PX1ITw8nBckcKM8ON1AuVzOgXt8fDz0ej0CAwMRFRUFh8OBsrIyqFQqBAQE4MqVK8zPamhowMjICFd1qJthNpvxxRdfYM6cOVi6dCleeeUVyOVyeHt7M2cOAAwGA2/okZGREItd4yq6uroQFxfHarw9PT0IDQ2FyWRiLiptMrQ5x8fH83UBQGNjI5KTk5mXFBsby+qYIpHIDdbb1dUFs9mMkydP4nvf+x6Ki4tx5swZWK1WvPbaa+zohdBa4WYidMoSiWucx8GDB/Hss89i3bp1UKvVuHbtGuPqSa2UoDyhoaF8T4xGI/R6PWbNmuUmHkPPniAbEokEw8PDEIlEDGGdvkHQ31Oxgsj4lGzSQQGSSCRinhptmlTVJzt67LHHbmrHN0tQ6TzGx8ehVCqxbNkyzJw5ExcuXMDIyAiCTV4w7wAAIABJREFUgoKwadMmiMViN0hzXV0dJBIJtFotbr/9dtjtdlRWVvKGaDKZ8P777+PKlSvM4Xv22WeRlZWFuro6vPPOO9ytoWuhkR2kIkzXTV2+8fFxTE5OwuFwICEhARqNhlVqyWlToYGC9fb2dh6A7eHhwZ1ggiWT2Imw0kiFApPJhI6ODg5qIiIi4Onpibi4OOaF+/j4wGAwoKWlBZGRkQwDIx7wxMQEKioq0N/fDx8fH5w5cwZGo5G74GNjY+jr60NISAiLI0xMTGD16tVYsmQJLly4wPzP5uZmJCQkIDMzE/v370dGRgYA14ackpKC3t5eXL9+HYGBgQzLBlyzL318fNDV1YWoqCgUFxcjKSkJXl5eiIqKQmFhIYxGI+Lj4xEVFcWdXHL0JChFY7KEIwGEEDYhHD0wMBAjIyMoKSlh7jmNMpFIJHjvvfd4jmFpaSmSk5OhUCh4U/fw8EBJSQmKi4uRnp6O6Oho3HrrrRgcHERXVxdsNhsWLVqEoaEhhg6vWrUK165dY9VAgqYajUZUV1ezONv169dhNBpx77334uc//znOnj3L/uTSpUuscE2wVII/EuS1tLQU9913H7y8vPDmm2/irrvuwunTpzEwMAClUskdTyr+EKyPOv3UMSNoPaEhqMhBiSd1Umne3sjICPtvYWdtcnKS7XFiYgIdHR1YtGgRIiIiIJPJ0NzcDK1Wy3uOTqdDTU0Nuru7odfroVarGY5K6yQwMBAtLS0YHBzk5KajowP+/v5obm5GT08PBgcHudBJ0GCHw8EKvYmJiVzIHR4ehlar5eJjWloajh49ioULF8Lf3x/nzp1jLp5arcZnn32GjIwMXnvp6eloaWlBT08PGhsbkZSUBACczBPVpaysDJs2bcKxY8dQV1eH3NxcKBQKZGRk4ODBgxgZGeEib35+vltwRD6DePG0F0xOTkIul7OqqjAIEotdCpfUHSP1ay8vL1RXVyMhIYH1FBYtWoSkpCRUVFRALBbDYDBgeHiYfR5dD+1R04PxyMhIzJ49G3FxcXA4HKisrITD4UBOTg7EYjEXLWifMRqNeO+99zA5OcnPWejzhYkwfY/WjljsGq1y4cIFGI1GJCQkIDY2FlevXoVIJMLXX3+NhQsXcsGmqqqKRffEYjGuXbsGiUTCXW+pVIpr166hp6cHy5cvh0gkQnFxMTQaDRQKBdLS0ngW98TEBMPmpVIpWlpa4HS6ZuvSuYrFrvE1hLqhTqpIJMITTzwBLy8vnDx5ErNnz0ZsbCwiIiJQXl7OnHE6aM88cOAAnn76aVY5p/tDHPTR0VG3ovv0mIL2KvpaoVBw3EVFHxKX8fDwQGpqKiYmJnjvowI0xYZtbW1oamqCzWZDQkICWlpaUFFRAaVSiYSEBFy+fBlLly7FgQMHbihqCQsb04/pyet0O7hZF+pmHdabHZSg0r2QSCQICQlBQkICbrvtNiiVSkREREAikaC1tRUjIyNcdHY4XJxpijGFHV7yhbW1tYzgEZ7f9OukdUyxiqenJ1pbW1FUVASNRoP3338fK1asQG1tLfLz81FXV8eK1CKRi9vpcDiwYMECPofr168jMTER3d3dWLNmDex2O65fv85qxBMTE8yFpjygr6+P9+HIyEgEBwfzMwAAvV6P/v5+vgYqkt1zzz0IDg7G73//e2zatAkjIyPYvHkzSktLGeUitEFKnh999FFGGwl/JhQund4ppQYMnRPBcMlWLRYL05yAbzjUEokE6enpMJlMXMD28HBNWUhLS0N9fT2GhoagVquxZs0abN26FQ888AA2bdqEhIQEjI+Pw2azISAgAHV1dTckosJ19q8c35XIfptNfxcSAP9pCSpBrMiBU4ePWt3EpXA4HDwHb3R01I2XJlw4JPBDwTvgDq8gQxIuQplMhtHRUR6sLZfLERERwQRziUSClJQUnDx5kqFMra2tWLRoEZqbm9nxkjKX1WqFSqVCZWUlNm3ahPLyclRXVyMmJgaNjY2cnA4MDECr1WJ4eBgKhQLt7e3o7e1lldKYmBiUl5cz1Fkmk2FoaMiNRA0ALS0tSE5O5nvQ3NyMzMxMvmapVAqtVouWlhbY7XbmndDsTBobs3btWvz0pz/lMQxKpRJKpRKenp44fvw4q1/eTB2MnCE55mPHjqG6uhqJiYl48MEHsXDhQhw7dgwikWvIe2BgIJxOJ8rKyrizRs915syZnHxTsmm1WjExMcGBZH19PeLi4piXS46bkmDaXCYnJxmaSoqswoRMmGyTrZBKM3EonE6X6t23Yeq/LUGlrrC3tzfUajU++ugjOJ1OVtWsra1Fb28vSktL0dPTw7zZX//617hy5QqWL1+O4OBgnD17Fu3t7RzMJSUl4Q9/+APmzJmD2bNn49q1a3j33XexePFibNy4EQkJCTh79ixGRkagVCoxMDDAxRDqANE6MJlMyMjIwJw5c5CSksJcPgqmyKY6OjpgMpmY7zY0NMRVaZL3Ly8vR0JCAlfGHQ6XqMKCBQug1WoRGBiITz75BBcuXEBcXBwaGxsBfDNfkgpTBGE0Go1QqVQsNiSVSrkzT11Q+pzk5GTodDqGBxP8lAJsf39/bN68mbljeXl5aG9vR1hYGADXLLOEhASkpaVh//79iI6OZvgnBTKdnZ2cTNPao82prKwM0dHRuHbtGpqbm5GVlYWcnBzo9Xp0dnYiPj4eoaGhyMnJQVVVFdsZBQihoaGQSqVc/ZfL5W6FFNoMxsbGOGjs6upCSEgIbrnlFvj7+6O3t5fFUKi6W19fj+LiYtxyyy2QSqWorKyE2WxGaWkpjh8/jqamJqxcuRL5+fnw9/dHSUkJ23JHRweamppQXl5+Q9dJKpVCpVJhxowZLGAzZ84cpKeno7y8HLfeeis2bNiA8PBwrF+/HrfeeisuXLgAnU6H/v5+3pxlMhknZFarFYcPH0ZPTw/uv/9+OJ1OXL58GRaLhWdYk0osrVXh2rVarSykQ3wj8lN0zpSwBAUFISwsjP2GcI/o6enhEWEDAwOIjIxEXFwcZDIZGhoa0N7ejqioKB4ndv78eSQlJaGqqoqLbYGBgejp6YHdbkdcXBwXIxoaGrhzU11dDbVaDaPRiL6+PjeNASoWpKSkoLa2lhEGO3bsQHh4OBeQBgcH0dvby5xciUSCrKwsXLlyBZWVlYiLi0N1dTWPUMvMzMShQ4eQmJjI60er1SIoKAh6vR5tbW08xoFszs/PD5mZmTh8+DA2bdqEiooKVFRUMMIjNTUVYrEYn3/+ORdws7OzkZiYyMU1AKxKTx1JKgDS0Hrq5AhtzWw2s4gSrQk/Pz9cuHAB7e3trNewc+dO3HfffdDpdCxER8IyZANCPhnZT1RUFNLT02E0GiGVSvHmm29yEZw6tUqlEnK5nIvCTU1N0Gg0SEpKQmRkJI8+AsD74PR1S+/p5eXFszhHRkbwwAMPsH8lEa329naEhoZCoVDg2rVrSElJ4WB+dHQU69atY3s9duwYlixZgtbWVmRlZQFwidDk5+dzbEBFFqFaOV1/RESEG/yTulxURCa1cZlMhsuXL+PLL79EXFwcNBoNxGIxqqqq0N3dDalUyhxaiq+cTtcs6gMHDuCpp55CU1MT76fU/RImttO7qMI9mfy8r68vj9OhvYkU6KVSqZv4Gfnp8fFxBAYGore3F4sXL0Z9fT0iIiIQFBSEnp4eJCQkoLe3F9HR0QgODsaf//xnbNu2DQcOHGDNgpslpXRu05PRmyV402Ml+h5dx3cdBPEFwIr0YWFhyM3Nhbe3N/72t79x95/GiAUEBCAgIAA2mw2hoaFoa2tDVFTUDXbp4+ODsrIy6HQ6jheEXW0hGkAkciH2RkZGIJVKUVdXB09PT6xcuZI5oBUVFYiJiUFsbCyuX7+O+fPnAwDOnz+PyclJpKenM1KDuKYUB4eEhLASOgmQHT9+HDabDZs2bYJOp2P/SA2F0dFRN4oC7RkikYubSt1RmUwGb29v/OUvf8HExAS2bt3Kgnbh4eFuI2koTwAAk8mEtrY23H333YyIpOJnV1eX2+/S59MhbOLQWiAFbUIoUCGIhE8J/kv3nChQCoUCCxcuRF5eHioqKtDd3Q2VSsVjJMViMQYGBhh1cvLkSTf7mx6fC/c7IQ//2+x8elHv237nv0MC4D8tQRVCWKnSRUkR4HpIXl5eGBkZgZeXF0M9hV04oegBDcWemJjgwPBmUIvpzsTb2xsjIyOQyWSIiopCQEAAjEYjQkND0dXVhaSkJJSUlLDj1+l0CA4ORl5eHkZHR5kLRdWb2bNnQy6X4+jRoyxEQkaoUqlQW1uL7Oxs6HQ65vxUVVVhamqK8fQAmEtA4wREIlenmCrEBF1oampCfHw8JBKJG8yZ3ocqPgA4ECJOEOHzt23bhn379vEw7nnz5nEn684774RcLkdPTw9XtoSS7tM3FeIGXrt2DWfOnIFarcaOHTtQXl7Os+QMBgM8PDzQ29uLiIgIWCwWZGZmIiYmBjabjRevzWZjURNSZ7RYLAgMDORuofA66XyIp0XjbChADQkJYbw/wZjIGVNHlRY0FUVEItG/naACLlGHiIgIXLt2DRqNBgMDA3juueeQn5+PsLAwrF27FsXFxcxZfemll3i+6Z49e5jfS3xMq9WKkZER1NfXY8mSJXj00Uexdu1arF27Frt378bLL7+MM2fOICIigiXQg4OD0dvbC6VSyXLocrkc0dHRyMzM5GsUOhfqsJLz8/b2Rnt7O2w2GwYGBqDRaFBdXc1BOM27rK2tRWZmJiIiIhAeHg6FQgGdTsdV1KKiIuzZsweZmZlQqVRoaWmBQqGASqXC6OgoYmJiEB4ezvwuSrZGRkYQHR3Nc4eF/D2z2YygoCAkJCRg3bp1mJqaQmxsLG677TaEhYVBo9Ggo6MDNTU1PDKHOgcU1HR2dqK+vh5JSUlIT09HaWkpC8U4nU5otVrU1NSgvb0dg4ODLM1PBZeMjAxcv34dGzZswLVr1zA4OIiEhARER0cjLCwMp0+fRmJiIhwOB0voC6kNtM7JyVOhbDpPD3Dx9QnubDAYUFNTg/T0dMTHx6Onp4cDfhK7GRsbY3hnSkoKCgoK8Pbbb8NqtaKrqwstLS2Ii4tDXl4ePvzwQzdbppEO5G/9/f358ycmJliZ2mq1Yvny5Xjvvfe4m0pqvhcvXsTJkydht9t5ZEhQUBBDmqhSHB4ejuzsbGzfvp05msXFxQwRNZlM/PlBQUEYHR1147LT+iBBLxJeoy6Kp6cnEhMTeWSTELJlMplgNBr5milYo64TKcd6enpCqVSio6MDs2fPRkxMDAoKCnD+/HmevUfFVpqhOzo6Cq1Wi0uXLiEgIABWqxXj4+MQiUTo6upCRkYGBzHkq0hluqGhAYsXL0ZKSgoWLVqEffv2sfCeTCaDRqNBU1MTamtrGWoLADExMcjNzUVJSQnmzp2LsrIyNDQ0ID09HWlpaThy5AiqqqqQkpLCELL09HRWxg0LC2NtALLTpKQkHDx4EGlpaQgJCUFZWRnKy8uRn58PHx8f5OXl4e9//zujKLy8vDB79my0tbVxQZBUuzs6OpjyQD6YdASouETBE3W/JycnefRUeHg4+vr6MDAwgLa2NrS3tyM3Nxdz5szBxo0b8e6770KpVN7AEyS/LpFIEBoaisDAQHR3dyM7O5vHW1DxlLo0KpWKdQ1ovba2tnIhdGhoyG3kys2COKfTybxIs9mM4OBgzJgxAxcuXOD56G+99RZuvfVW1NTUYMGCBSgtLcXy5csBuFR66+rq4O3tzZQeShp7enoglUqZlkSwfgAoLi7mfYh4wAMDA/D19eU1LZx3KpFIuHBI+yXxm6mTvGDBAnz00Uf4xz/+gdmzZ6OlpQUrV65EY2PjDXEV0TQuXryInTt3skhRe3v7TRFuN7t/wq4qjVMSIquIrjFv3jyMj48zrJ9QI3TvqYFBSDQqtMfFxaGlpYU58f/1X/+FxMREpKWlwcPDA7/5zW9QUFBw0w7SzTqk9LPvOujnwtFL33acPXsWwcHBjJ5LSkrCunXr0NjYyPPjCYVhs9nQ0tLCo7mCgoIYUk9ibsL1QAWtixcvYnh4mJE+089TaNv0HiEhIZgzZw78/PzwyiuvYNWqVdzxP3fuHOLi4qBUKuHh4YHKykpYrVZOWJ1OJyorKxEZGYnu7m7Mnj0bDodrvB99TTH8ypUrYbfbER4ezmPzhOrSer2elamdTidaWloAgNE1JKxXX18PiUSCefPm4R//+Ac+++wz5ptTcYViYaFd9vX1YXJyEosWLUJbWxtEIhF0Oh2P+ZruY+hrWlNUhKAusNPpZGVlugYaBZWdnc3oP7oGajJNTk7i0qVL6OrqQnBwMDfRLl26BIfDgdzcXIyNjTFlZWhoiM9HiEik8xMmpsLXtA6F61NoDzdbo8K4+7vsGP9pCaqfnx/De00mE6xWKytkChWv/Pz8IBaLuctGxGKCPQHf3JycnBxkZWUhPj6egxiqnNNBwQk5UercTkxMYHR0lAfVWywWrngajUZotVqIRC5pf4ICenl5ISsrCx0dHW7S7AMDA9ydpVl1ly9fRmJiItrb25Geno7Tp0/z+1AwI6wIlZWVQSaTYfny5dxdos4DdYvoWjo6Oli1MS0tDYC7o9TpdADgxkegURK7d++GSqXCBx98AKnUNeB84cKFkMlk6OnpQUZGBtr+ObyaupGkjDpdRImeBQUDFosFV69eRWFhIV599VVs27YNPj4+aGlp4eCUVNeWLl3qVlmiiqvD4VI9ow1eOCeXHDDdB/pcYQeUugskGuTv74+AgACeZUuVeOqWCjusWVlZiIyMhPomAknAdyeogEu188c//jFSU1Mxf/58vPHGG3jnnXegUqm40kVz2XQ6Herq6nD9+nUEBQVhw4YNqKysxMMPP0wLHHa7a77dhg0bsGHDBvz+97/H888/j8TERERHR/OwdErkSJhFoVCwgBDBv4VBOgB0dnYypJIGzZN4CW36tE6pK0XzZwMDAzE6OorW1laGL3p7e8NkMqG7u5tFkZYvX47XXnsN6enpsFqt6OvrQ39/P0JDQ1FbWwsvLy9kZmbCYDBgbGzMrcNOnFxa0xTYGo1G7owvWrQIcrkcx44dY06pWq1GZ2cn2traeDOjTnljYyMsFgu0Wi0uX76M5ORkTl77+vo42EtNTWXhKsDFuQO+2bxTU1Nx5MgRrF+/HpcvX0ZFRQXS09MhlUpZcIj8QFZWFsMgAXDiQsgRCsqpsEIdQeGGMzk5iZCQEFgsFp4RGRgYiNjYWBiNRvj7+8NkMjF8srq6GleuXMGcOXOwbds2VFVVwWAwoK2tDbW1tSgvL4der7/BlgmCSR03ClA2btyIdevWQaFQIDc3l0dzrV69GmvXruXxDvPmzcOnn34Kk8nEo6GIb9ff34/HHnsMfn5+uOuuu5CVlYU///nP+Pjjj1FUVITbbrsN6n/SBMjnOZ1ODAwMIDw8nNcXrQvyPaGhoTwL+ZZbbkF4eDjzeYRV8MHBQUxMTMDHxweDg4MYHBxEfn4+1Go1B1C0lry8vJCbmwulUonu7m7+fAqmKisrMW/ePA6iKEAJCAjgBJDWF6kxkl2TYq7ZbMbExAR8fX0hk8kQFBSERYsWoaamBoODg1i+fDmuXr2Kzs5OLp6Ehoais7MTjY2NzL0mOyHILwlr1dTUIDk5GRqNBhkZGazCTn5Aq9Wivr4enZ2dKCsrQ05OjhtvkMRlrl69ittuuw1VVVUMraVZuqdOneKiid1uR3JyMtLS0lBRUcHdauqE0V5D/GEArGYvVKmlz6fkgpIJgq8PDAzg1KlTqKysZJ9M0D/qXggRAARfXbx4MQwGA9RqNaxWK5KSknDx4kXExMTw/koqtsJgPTY2FrGxsSgsLERsbCzMZrNb8D79oOdhsVhYuOjatWvIzs5GVFQUPv30U6aAZGRkIDQ0FHV1ddBoNHA6nbh+/TosFgtWr17N+9nY2Bg0Gg06OzuxYcMGOJ1OFBYWsk7C4OAguru7sX79ekRHR+P69esIDw/n4hbtm62trewTr1y5wr6GmgQkqEjX8fzzz+PFF1+EVCqF0WjEz372M7S3t6Ozs/OGEXUU4FqtVlRVVeHhhx9GZWUlDAYDP29hN0dY5Kaf0R5OSt4DAwMcE1DxVKVSsRgOaVBQfOhwuGbp0lrR6/W8hr/3ve8xvDozMxMlJSV49dVXUVpaimPHjuHzzz/HunXrEBER4UYnm/5sv+v1tx3T7fvbjrNnz4Li5ICAANxzzz2QSl1jEYuKiqDVann+eVlZGTw8PHi8lc1mg0ajAeDyQzQ5QtjllkgkWLBgAUpKStwg+MJkRPhMKOYCXLHviRMnsG7dOnz55ZeYO3culEolmpqakJ+fz2g6i8WC7Oxs3r9FIhHKy8sxMDCAmJgYBAcH83hJ0kogQSXiX0skEm6QUGwnXNMhISE4evSoGyyWngd12wHgtddew0cffcSFoV/+8peYOXMmSktLOWYSdj/FYpeOTFBQEDIzM9He3o6mpia3orHQXumg9yC7EQrbDQ4OcnxJCDRPT0+Eh4fzM/H09GStEtJh2LhxI8rKyripQ+Ok7HY7IiIiMD4+jpGRETQ2NqKiosItRpleUJneXKJ7JSxECLuu09f09ONfQQLgP23MDMF3qBtKmxcp1wqrZ9RVGxgYuGnFgmCJlLx4eXnxsPHpTm96lYMOUpWsqqrCqVOn0NHRgcjISPT29mL+/Pk3QDQAMHwGAFJSUiCVSlFWVoaUlBQYjUaupA8ODrLkN52PkNNGfDpaYF5eXizVTgZN4hHUXaJxGQQHoM1XKB0OwO2cyZAImhwQEMCFAOLvicViXL16FcXFxfDwcA3wTklJYWd399138z2eLtVNn3ezruT9998PDw8PbNy4EU899RQyMjK4+BAUFMQbvfA+kxoZLRIqKNA/oTqnSCRiJyxU7iWHRUE/vSb1v/DwcMTGxjInKDo6mrkdPj4+2L9//79s0992iMViNDc3o7a2lh337NmzsWvXLvz2t7/Fyy+/jJ07d+KBBx7AsmXLMGvWLFb4peonFT+cTidee+01noM5e/Zsvs+0qatUKh43Q59N9kEwbvp9grcHBwdDoVCgo6ODk0Wz2YywsDCeUUrw8OjoaA4w6X00Gg38/PzcNgQSeCAo79GjR5GcnIxTp04hKysLNpsNq1evZuhWbW0tvL29kZGRwbMdqZpvMBigVCoZ4UCjFAICAvD9738fEokEtbW13CUneKrT6WSoXGVlpVu1cObMmcz/lEgkOHHiBFeIx8bGuGLvdDpRUFAADw8PlJeXo7u7m9+Drm/58uUoLCzEypUrIRaLsW/fPg4YZs6ciXPnzjFMafv27UhMTHTbPCjAA9wVpIU8fADc/e/u7uZNr7u7G2azGUVFRWz/iYmJ0Gq10Ov1KCsrQ2NjI1566SVMTU3hj3/8I8bHxxEVFcV+9duOsLAw/OhHP8LChQsRGhqKjRs3orKyEn/84x9RWFiI9PR0PP/889i7dy9++9vf4qc//Sl+9atfYffu3XjhhRd4LSmVSsTExCA+Ph4REREMqVu/fj0A12gbUsTdunUrZsyYwSqjxFuk4ISeO3VKqGNKkvsajYY7UMJNme6pXq+HWOwai1BfX4+EhAQEBATg66+/RltbGzw8PNDW1gZvb2/Mnz8fM2bM4DWWkJCA6upqtvGcnBwsWbIEADBv3jweTm61WjE8PMx+NDMzE+Hh4ayRQN3drq4u7rBRZ46E3s6fP4+CggIeq0UjSKhQQl01mUyGS5cuue2ZDocD3//+93Hu3DnMnz8fdrsdn332Gf9s8eLF2LdvHwetHh4e2LJlCwudCH9G/8LDw6HRaFBYWIiCggLY7XZcuXKFubwbNmyA2WzG6dOnGTInl8uxdetWhus7HK4RGR0dHfx8CE5KiCrinVNMQIWAvr4+RgnJZDJO5mnvEh50TUKoIgCesf7FF18gJCSE1anPnDnDo41oRmdtbS3PjKT3o4J3SkoKrl69itraWl6zQt8nXMfCIjiJDjU3N2NiYgIGg4HRHUlJSZicnERubi53yoeGhmCz2dz4mBUVFbxmKVYSBtf19fVunUalUskFHtqP6ZpaW1vh6enJ70d2SYkKIduee+45Fnvz9vbmQvLY2BjmzJnDNixMOmmfJvj11q1buUg0vYsqtN1v+x4d1AknChYpDZNQIt0vADwjlMZ2kQJwYWEhbDYbampqMDExgd/85jduauDbtm3D6dOn8fXXX/M89O/qEv1fHXK5HKGhoUhKSoLT6cSnn37KHf+6ujosW7YM0dHRiI2NRVpaGpYuXQoPDw/k5eXBx8cHMTExHN/R+QuTDtofyadN/x1hN5CQHoArQSV0n5eXF+Li4mAymZCdnc1/T2gUITKQmihms5k58VevXmUUiFjsEg6i6QeAy5aIe+9wuLQ16FyFYyZpDVLjiwoVS5cuxe9//3vYbDYYjUZGSojFrrnChKyhGItiULoXR48e5fhXKpVykX76IcxNhPdges5CyCT6Ha1Wy6hGYfOMrmtkZARO5zejIWtra/leW61WtLS0QCaTsc8Wi8VoaWm5KeVA+FqYQwmTUfIR5AfoECa2dP3/XXL67x7/z3RQhaqFNP7hL3/5C9577z2Mjo5CoVBgcHCQlQ4BsEgSwfyAbwJtcrZ1dXVobGxEV1cXV/diY2PdDE9o0PQgKIEhSDFBzSIiImA2m6FQKNDW1sazpCQSCY4ePYrw8HD09vZCJpNh5syZKCgoQGVlJcNodDodcnJycO7cOSiVSgwODkKj0eDy5cvIysqCRCJBc3MzDzUmA6moqAAAzJgxA4DLEGbOnInJyUlO7okrS85XJHJBxzo6OqDVagG4knfqrpKTpU7ss88+C8DFNdq7d6+bGmZRURGUSiW0Wi13k0coalmfAAAgAElEQVRGRrB06VJUVFRAoVDwfEzaIISLgDYgoYM7fPgwDh06hJ/85Ce45ZZbWL2TFil1S2kjocLE6OgoZDIZWltbWRqenh8tMqvVylh+mr0nlAAnsQvqHBM8SCqVsvPz9fWFUqlk8YzNmzejsLDwW+34v+ugAkBpaSkqKyvxzjvvcBARHR2N22+/3a1aVVFRgerqaixbtgyRkZFwOp0sgEAQmvvvvx8ikYgDjunVMQDMSaQggIJMgvhOTEwwN4kq+1VVVTCZTKiqqoKPjw8UCgWCgoJgtVqh1+sRGxuLvr4+DtAmJiYQHh6OtrY25jFJpVL4+vqivr6eO0xUCGhtbeWAVC6XQ6lU4sKFC1izZg3mzZuHr776Cs899xzsdjuam5uRnJzMmw3ZEV0LBUcUjMjlcjQ1NSE8PByDg4PQ6/VYtmwZxsfHcfHiRSQlJcHX1xeDg4MYHh5GfX09vz8AJCUlYWBgAMnJyWhubobNZoNKpUJERAQuXLiAgYEB5vBotVo0Njaip6cHvb29DJGj+xIVFYWTJ09i9erVaGlpQXV1NXJycuDv74/c3Fzs27cPSUlJkEgkiI6ORl5eHq5fv862MjY2xhB+AFz88vX15W4hBaMOh0vBl+x8dHQUHh4eaG9vR15eHkO0o6Ki0NbWxs+yvb0daWlp6OzsRHh4OEOO+/v7v9WW6+rqUFtbC4vFgqamJoyOjgJwVaRJQCwkJAQZGRkYHx9HUlISCgoKmJtJvnfXrl1obW1Fb28vpqam0NPTg7179+LkyZNQqVTYvHkz7rrrLqhUKvz1r3+FQqHA4sWL0dbWhqGhIYyMjMDf35+hTtRNAlwbLikZhoWF8dqn+0NKz9T9J9XU0dFR2Gw27pS0t7dj3rx50Gg08Pb2hk6nQ0BAAAd4crkcw8PD3EnV6/VoaWnBqVOnWJ+gp6cHBw4cwMcffwyLxYK6ujpkZWVxR474gLT+yXaEwYFcLseXX36J+++/HwsWLEB1dTXMZjOWLVuGq1evor+/H9HR0QgKCkJkZCTa29sZNk+dSKfTiZycHBQWFiI3NxcjIyNobm6GTCZDYGAgsrOz8cknn7DYktPpZBGkjo4O1NXVMcqHfExUVBSSkpLQ19fH8yMvX76MsrIyZGdns6rx+++/j+TkZO5+z507F8XFxdylIagp7WOU+NFeS/QeWo80WsPf35/vD63hsbEx3HHHHeyTCTIPwE38juaQpqenY+bMmbBYLEhISMDo6CjP5Z2YmOC9k/YjIReRnplarcalS5fcOPO0PoX/UyAnkUh4GoG3tzdycnIwNjaGRYsWoby8HCMjI0hMTMR7772H+fPnw+l0zWbX6/WYOXOmm0ZAc3Mz+vr6kJqaCj8/P3z++ec8j1wkEvF4ubCwMIhEIqjVagwODvJe3dvby/SOoaEhRrHR+ZKPl8lk2LVrF2699Vao1Wr86le/QldXF3fDFixYgNbWVly6dAk1NTVM16G1R4dYLMbJkyexcuVKrFu3Dl988YVbMCzs3tA9Fib3CoWCfRQlvdSQSEhIwMTEBM+OpsRJLBYz/1Amk6GqqoobBCRW4+/vj9DQUOzduxc1NTXcyZPL5aioqMCePXvw9ttvY3x8nHmN0+GR/+7xr3ZPAZctR0ZGwsPDA9u2bYPT6WQOeVNTE86cOYP9+/djdHQUc+fORWJiImw2G8LDwzlxID8jjH2no6diY2NRX1/vBu2nY3p8IZVKYbPZ0NDQgIcffhgHDhyAl5cXUlJS8M4772DRokVwOp0YGhpi4UuKT8ViMfbv389IrOTkZLS1tQEAJ4AtLS3o6+vDihUr3M71zJkzXDSnRgyh8Orr693OkRBWgYGBeOGFF6BWqyGTyfDiiy+iu7ub19KCBQtQU1MDkcglMKZSqdziSopdbTYbenp68OCDD+LkyZOcewjjWwBu95XiF6lUCj8/P8hkMhgMBjchP7JNjUbDBVhCANBIRT8/P1b6LS8vZwqJl5cX00/UajWcTifq6uoYUhwVFcWw35slp9PjZ+HvCF+TLQhjzemFpX/FjvGf1kGlgJOgbdnZ2aiurobVakVERARXQshQyMCFgev0iiA5JWGi5eHhgaKiIsZlk4MTQhiEzpFgWUNDQ5BIJDhy5AgLpzQ3NyMoKIg3c6PRyBxBmUyGtrY21NfXc2ePeBHEQW1oaHBT9SMDdzgcLCBB9yY2NpaN1Wg0cpJNgjZeXl68Yff392NwcNCtavzVV18xRJAqu5SYURWTKqzC+0wVT29vbwQFBeGDDz5AZGQkfH19OSEl4Qi5XM5whPHxcQA3yrPT+5LgkdPpxL333ovXXnsNcrkce/bsQXBwMD7++GNOoklxl0SvnE4nz38UynoLHSklp2QPBJ2ggyCTRJynkSjCeZEBAQH45S9/ib179+KZZ5656WiZ/8lB3EDi8ixatAitra3Q6/X43e9+h9WrV8PT0xO33XYb24aHhwdaWlrw7rvvoqmpCbt27UJycjJ++MMfYtOmTXjyySc5WPvpT3+K5557Dlar1U31EgAryAmrg319fbhy5QpKSkrQ0dGBmJgYyGQyrF69GklJSdBoNOjq6sLAwADzrqhgQEFOS0uLW8UcAItRUdIDgLt8SqUSIyMj6O/vh9VqRWJiIsbGxnDp0iXMnTsXycnJCAgIYLhvWFgYlEolK7ESp+T06dNQq9UsmGOz2XDo0CEer+JwOHDixAme81ZXVwexWIy8vDzk5uZCJpNx9ZxsPzExEeXl5QgODkZLSwuOHj0KkUiEJUuWIC4ujjncYrEYd9xxB2JiYjA0NIR9+/a5VVr9/PywefNmlJSUwN/fH/Pnz8e7776LxsZGTE1N4e6778bRo0chkUhYZGnHjh2cdMrlcvj5+aGzs5M7S1RcoEo1+SxKimkUiVwuh8PhQHh4OOrq6uBwODA8PAwAmD9/Pu699144nU5cvHgRL730EoqLi5Gfn4+33nqL1cC/61CpVNixYwd+9atfYefOnXjmmWfwu9/9DmKxmOG75eXlMJlMqK+vx5EjR1h07sUXX8T27dsxNTWFxx9/nIOXjIwMHuP161//ms+noaEBO3fuhL+/P+644w7U1NQwPBFw+ejOzk5WKk9MTMTs2bMxY8aMG+B43d3dMBgM8PPzQ2VlJVpaWtDa2gqxWIz09HRotVqYTCZUVFTwyBEKZL29vREfH8/cJrLppKQkeHt7o7S0lEchzJw5ExkZGTh8+DBmz56N3/3ud7j77ruZM19TUwOn04mwsDAuNtA6N5vNaGtrYxGk5ORkFBYWsqL66dOnMX/+fBgMBhQXF2PRokWIjIzEwYMHIRK5OIlr1qyBWCxGZWWlWyfdbrdj06ZNmJqa4lnGZWVlOHr0KABg8+bNPEuY9oioqCjceeedcDqdeP/999Ha2sr3lDocqamp6O/vR2trK7Zt2war1Yq9e/cyeum+++7Dl19+ic8//5y7/o8//jjPRRQm6ATRpz3FarXCbDYjIiICgYGBmJycRF9fH0PEBwYGWJsgODgY9957r5utTk90aF8MDAzED3/4Q0xNTcHX1xefffYZTp8+jfDwcGRlZaG7uxtRUVFoamrCyMgIDAYDbDYbIyyE+41UKsUzzzyD+fPnczeRjuldRMAVvJJ2hKenJ/r6+nDkyBGMjo5Cr9cjLy8PfX19CAsLg9PpgiafOHEC4eHhbrz3kpISaDQaWCwWxMbGQiKRICcnx00ASSqVIikpiZP+qakpNDU1cXfRz88PRqOR9wSyb/p76tZu3rwZSqUSdrudaRKEVnjssccY7eXl5YW///3viIuLc0PVTE/WH330UQDAW2+95aauTLGbsCtDsR3FLpRw0R4mk8mgVquZ001JrNls5hFFNKnBYrFg8+bNvI9R3EYzm4uKitDa2soFCdIkaWpqwowZM3i+ZHd397fCHP+vDm9vbzz99NPcIf7ss8+4aL5jxw7MnTsXOTk5TLOxWq1uYkLAjTMvxWIxqqurcf78eUbprVixgn0AoUWEz4LsgmxIqVQySmj9+vVob2/HPffcw4WgkydPIioqisWBJBIJzp8/j4ULF2JsbIwnGVRXVyMlJQWAyx/SOCBat5SQbd68mefqEnqK7IFsgjRUaM0+/fTTbDcqlQrl5eW872/dupXtV6fT4dNPP3UbWUj3i86joqICzz//PP70pz/d8Izo94TFC6HtU5HWZDK5NcacTidmzZrF5yvUdiF9FUKeEBUoMDCQ9RBIuMrpdCIyMhKvvPIKvvzyS4SGhuLSpUvMm73ZMT0ppXO92feFr4X+7H+7ewr8P5SgBgUFcXJKkK2enh7mmQLgqqRIJOKqO23uQtib0CFStwxwiYoQhE+n07FzpZ8LFx59DiU5YWFhrDJJCrhOp5MTMcC1eIVBWXNzMwtXOJ1OfPXVV7jrrru40k/jEEjwBwAHmgTzISer0WgYqhIREcHiApSs0RB74raRo5+cnOTE4fjx4zhz5gwb2PTxPHSIxS5ejtlshr+/PwtHiUQixMXF4fHHH2fDdTpdwjEEyaivr8fatWu5OyfsaNyswkL3/9q1a3jooYfwxBNP4JlnnsG+fft4w6XnS51ZKgaYzWYO2ikRnV7hoaRTCMkRJuDklO69915s2bIFL730Ep588km8+eabeP3113le4PRK4v/fg5T3/P398corr8BgMOCOO+6AWCxGQkICDh06hD/+8Y/cBWxubsYjjzyCsrIyFBYW4i9/+Qv+8Ic/wGQyYWBgAL/97W+xdetW7N69Gz4+PpicnMT27dvR09PDlTq6LwRJ0ev1HHzLZDIMDw/DbDbDZrPBZDKhr6+P7SM9PZ0rkSTgRdw3er4+Pj4oLS3lZw0AcXFxPA4EAC5cuACDwcD8MbVazTw86noHBQXhxz/+McOv7XY7mpqaWMFQWOgg+GVgYCAWLFiABx54ANevX8drr72Gzz77jOe3njt3DmvWrOEgBXB1l1UqFSQSCU6ePOlW5Fq4cCGMRiOysrJQUFCAAwcOYHh4mNWEjx07xvabm5uLVatWQSQS4dNPP72h2rh69WosXLgQDQ0NuOWWW1BSUoKenh5YLBZs3LgRn3zyCd58803mxz300EMM8ZFIJAgMDERHR4cb73N0dJQrrsJNjtAkPT09PHd3amoKV65cgU6nQ1tbG+RyOdauXYtPPvkEfn5+6OnpwaeffooXXngBJpOJBeD+u6OwsBDPPfccQkND8eyzz2JgYAByuZyFWGijJchlf38/HnzwQVYXf/vtt3HnnXdCrVYjJSUFnZ2d6OrqwujoKA4cOIAf/vCH2LlzJ+rr6zFnzhz8/e9/x4YNG5Cbm8tq1MQfIvVI4pRTV9fhcKCjo4Mhsw0NDaipqWEVdErk5XI5ysvLMTw8DB8fHwQGBsJgMCAsLIzRK4ArkYqNjUVFRQUHN06nEzExMfD29kZwcDAGBwd5Zuf8+fNhMpkQEBCAs2fP4p577mGBjIaGBjQ1NSEtLQ0ikYj9FIkJ6XQ6Ho1w7do13H333dDr9bjzzjvx9ttvY8mSJRgfH8eJEycQExODjRs34tKlS1yUXLFiBeLi4mA2m1FYWOi2r8XHx2Pjxo0IDg7G8uXLMTk5iY8++ggA8L3vfQ9TU1P44osv3AIk8k+XL1/m3xXaXUFBAX7wgx/g7bffRkFBAUZHR/HSSy+hs7MT3t7e2LJlC9asWYOXX34ZV65cgd1ux/r16/HII4/wXizsoFEXgYqGZrOZk1aVSsX+MzAwEH5+flizZg0WL158g50Ku0VUzAkKCmJ+qUajwdWrV7F27VoMDw+jpqYGAwMDsFgsqKysRFtbGyOTYmNjb+ie0muHw4FVq1bBx8eHi0rCe06HMFgluDohDU6fPs0d/JKSEoa8d3Z2QiKRMEeU/EtbWxt0Oh2PBTp06BDUajXvWV988QWrRdP5UtAqDPCpE0YBPl0P7Q++vr4spBUaGsrQZ9IW8Pf3x+uvvw6TyYSnnnoKV65cQU1NjdtYOeG1A6546cEHH4Snpyfy8vLcePf0+RQkCxMjwNXpnZ4sqf85Jkf4fKh5Qe9FY7zeeustiEQiRqmUlJSgvr4eIyMj6Ojo4KSM6FQ2mw2/+c1veJb4kSNHEBERgdbWVj6H7+qm3uxn/073lI6cnBxMTk7i5MmTMBgMDGtfsGAB9uzZgzlz5iA+Ph5dXV0oLS1FTU2NW9xks9mg1+v5WVDi09HRgVmzZmH//v0Mq59e2AHAtAqhgA/tMzabDWFhYXA4HDh8+DDT1nQ6HU+QoPeUSCQ8k5mOvr4+BAcHM9WrpaUFU1NTWLlypdvfnTlzhm131qxZ8PX1hVjsmqcrLFwYDAa3ogadb0BAAKPKqMNuMBjYfn/xi1/AZDJxQkiaJMJ/drtrxN6ePXvw2muvcTIpLLDQIbQlQitSw4xoaFT0onyFONRSqZRjaEL7EeWAeNhmsxnnzp3j9x4eHsbSpUtZhG7fvn2IjIzE6dOnma74XZDkb3t9s+P/CtrL7/9/8q7/g+MnP/kJfvGLX2DZsmUICAjAz3/+cxw6dIiFfCYnJ+F0OtHf34/JyUnmEgjhOsLOHKkfUhWMXk9OTmJgYADDw8Ooqqr6Vvw48M0GAoChYB4eHjh27Bh37rKysji4IMXUjo4O7lRSxZwCaUrAIyMj0dfXB8BVfSVBJLoG4rUBLkhfV1cXJ6+9vb1cRaUFERMTgw0bNmDOnDkstAG4eJs0zJqCXvocum5yUrTw6Pypqy1M0EpLS+F0OnneHc3+E4tdHK69e/dy0ECdIUoIpgcKN7vfRqMR27dvx69//WvMnTsXmzdvhlarZWciEon4WuLj47mLIqxyCT9DCD+g+0UBlVgsRn5+PjZu3MidCrvdjtjYWBY4GR8fZzjUq6+++j8x7W89ZsyYgcnJSdTV1eGxxx7D4sWLcd9992Ht2rUYGxvD5cuXsWfPHpw9exYvvvgiurq6UFxcDJPJhPj4eOzcuRN6vR579uzBL37xC3z44Yd45plnsHv3bvzsZz9DdXU1V/OFSAFyigEBAejo6EBaWhp8fHxYPKarqwvZ2dnMtzp//jxEIhGCgoJYNKq1tdVNXp3+ztfXl1Uc6b4nJCRAp9OhtLSUNwvqDnh4eGDdunUYGhrC0qVLce7cOSgUChQWFnJnnhLj3t5eREVF8fcdDgcnjE1NTdDpdMy90Ov1uP322xmCY7VaceLECSQmJuLKlSuora2FSCRCamoqd5sKCwu5Mi0Wi3HbbbfBZDIxj7yoqIg5M0qlknmkgAuqXFBQAAA38PVsNhsUCgVSU1Px9ddfs4DMO++8A6fTiTvuuANz5szB3/72N1gsFjgcDjz00EOc9EokEp7jCoC7q2NjYwzPpIM6ZU6nk5Ngh8OBzMxMKBQKGAwGfP3119izZ48blN/hcKkkCwOGf+V45plnUFZWhgULFiAoKIj5ogQP+/GPf8zIkRUrVkCv1+ONN97Ak08+CZlMhr179+Lhhx/GPffcw3Zqt9tRWFiIBx54AHl5ebhw4QKWLFniBq0MDg5m9Ve9Xs/K4mTjfn5+6O7uZpSFzWZjSgDx7KOjo5nDOTw8jLCwME5sAbDKKImIAWA4qb+/P1pbW/mZFBcXc+GF+NeTk5Pw8fFBXFwcurq6oNVqUVZWxoEzqUUODQ0hMjIS3t7ebO+UqNGe8ec//xlPPvkkLly4wPbxwQcfICsrCwEBAThx4gSGh4eRkpKCTz75BIDLxyclJWHWrFmcmBD1guwkOTkZRUVFiIqKQkJCAvbt2wen04mMjAysX78eX331Fft3ALjrrrtYJfb99993Gw1C++ADDzyA4eFhrFmzBgkJCfj666/xyiuvMLz3kUcewcTEBI9rsNlsWLJkCXJycngvJxumGX5SqZQVl81mMxwOB/NzzWbzTRNTOoS+SDjDe/369ZBIJIiMjER+fj4+/PBD5OTkQKPRoKenB3FxcdiyZQu0Wi0KCgqYHkBdKSHaihLqqakp3HvvvW6q2/TZwj2PfAPx+Ds6OrhgvXbtWuaCkt0VFRXBbrff0IHKy8uDyWRCYmIiB8lCCoRIJOKkmmIPsVjMiJJZs2YxnLyvr4/PUdhJnZycdBNilEqleOGFF+B0upSMQ0JC8KMf/QiZmZkoLS1lX71kyRLYbDYWeBEW7mjfmJqawo4dO3D//fdzEi0segq7zna7naHIpD9C13vnnXdyrEIB//TklHQt5s2bB51Ox7EUCQCOjo6yyNx0/j9NKyAF9MHBQbzxxhvIzMzkRIKujf5W+B7/WwF8QUEBtm7dCqfTiT/96U+QSqVIS0vD7t27MTk5iezsbNTW1sLf3x8LFy5Efn4+Jza9vb1oaGiA0/mN1gTgEtHKycmByWTicVTt7e08lUJoE9MbQRQ7Dg4OQi6XY8mSJaitrcUDDzzAYn9XrlxBeHg4goKC2I9cuHABixYtwsjICMPxz507x3soAJSXl3PsSXZw6tQpLF68GKdOnWL9AkJ20XQLKkKpVCpODn/wgx+wAKWfnx8effRRTE1NISgoCCEhIfjkk0/YfolGlpmZ6SZCSjYpTMoqKipw6NAhfhbC5052RzYgLL4Rwk8sFnNxbu3atQDA3HvKNywWC8dsYrFLaf3LL7/k+dDUVR0fH0dzczM+/vhjtLe3w+l0cnPi888/x549e1BeXo7i4mKecfy/cfxvvc/Njn85QRWJRBKRSFQmEom+/OfrOJFIVCISiZpFItEnIpHI85/fl/3zdfM/f67+V96fFnllZSUyMjJQWloKAMwPINw6ddLISITQECH++5/nwoPuhdU0Mvjh4WFWO7tZwnTDzRKLOfC4cOECnE4n8xBGR0dRUFDAYjy0iVksFjz//PNwOBws9GI2mzFjxgx4enpCo9FgaGiIRxuQUQphdjRag6ryzc3NbkGpVCpFe3s7ABcEZPHixTwPTy6XQ6FQ8ExKoaPs6OjgTUt4/a+//voN33/jjTdgt9uZPB4YGMhw5oSEBFitVkRFRTEMmhTHCELrcDi4akQJ4ndVaDo7O3HPPfegpaUFS5cuxdNPP42AgABYLBZYLBZ0d3ezMtl3PS9h12l6ZcvHx4cTM3IgZrOZOUgksjVdXOt/81izZg2SkpIQFRUFnU6HXbt2Yd26dTAajXj11VeRkpICvV6P++67Dxs3bsTatWvh5eWFmpoaXL9+HUeOHMHIyAieffZZaLVaHD9+HL29vaisrMTu3bsZ0mS1WjE2Noa+vj6IRCL09/czDEosFrNYCwWulJQ1Nzdj3rx5AAC1Ws0QOyG3ge4piWsQfJ2er91uh1arZeEvp9OJpqYmKBQKhIWFwWAwYNOmTWhvb8fDDz+M1NRU7qTSmjabzVxsoeKHyWTigJM6dgcPHsTu3btRWloKmUyGhx56CFqtFvn5+VAoFCgqKsItt9wCpVLJsH2Hw8GcLeo00fPOyMjArFmzUFZWBj8/Pxw+fBiNjY2YOXMm8vPzsW/fPrYtgkJ6eHgwFJJsjgLhHTt2oLS0FNu2bYNUKsV7770HLy8vpKenY+fOnfjwww/5b2JjY7FkyRI3WJvVakVjYyNzVmhN01qg+yUsLNntdvj7+yM6Ohrp6ekQi8UoLi7G448/DrFYjFdffRU/+MEPsGXLln/bfvfs2YO0tDQsWrQIDz30EOLi4tDZ2cncVIPBgD/84Q948cUXsWrVKiiVSjz00EOYP38+Hn/8cTQ1NSE2NhaBgYFITk7G5OQkQ0X37duHW2+99YYxWeRDSPjCz88Pw8PDDI+12Ww4ffo0i/Y0NTXh+vXr8PT0REhICFJTUxEaGorg4GAuslksFnR2drJaqvBfd3c3J5Z0f+Pj4xEXF4fm5maMj49z94eSOa1Wi82bN8NoNEKj0WDx4sVISEhAQkIC00JsNpubXfv4+LBID+AKVN566/9j7rvjorrS9587nd47glQBRQVUjD3WGLvRxJqiiWYTE00z2W/MGtOMSVZNUaPGFk2ipKqxF0RUDCgiCtJ77wMDA0y5vz/G980d3JL9bbK75/PhAwzDzJ1zz3nPW573eXbCz88PNjY2+Otf/4olS5ZwRWfRokWcaImIiMCNGzfg7u6ORx55BF9++SUnBl1cXDBt2jSu8JNDBFiQS5MmTUJAQADy8/MZ6kqO7OjRo3Hz5k2cOXMGgMXRGjZsGObOncvXV1VVdQ8MbvTo0SgpKUF5eTmWLVuGwYMHY+PGjbhz5w7/vbi4GBs3bkR2djaOHTuGcePG4fnnn2fnTS6Xw9/fH52dncjPz+fAg9BCdD/Isft7Q1pxJH6Bl156CTKZDBUVFThy5AguX74MtVqNI0eOMLNnTEwM9Ho9Mxg7OTlBqVRyclpKkpSbm8vOO+03Cq6liW5pJZES7KR3mJmZiX79+sHOzg6//PILf67y8nIoFAqMHz+eP5NcLkdVVRX3yQNATU0NJ7VEUeRzmORo6AyePn06rl27hqCgIPj6+rK8FFW8aI8NGDCAz2+SNBJFkdFnlKykwNVgMHB/vp+fH1d3Jk6cyI40YO240+9PPPEEPvroI/aRpPdN6tzTeUW+HQBOQNG1E5meyWTioJSqfq2trQgKCkJJSQkSExOh1WoZSVZdXY309HQOpslHoffX6/X461//iry8PMTGxsLb2xt79+7ldhd6f7o/5JdK4eD09/9fp767uxvr16/H0KFDMWTIEPTt2xe3bt3CW2+9hV27dqGyspIl2uRyORobG2E2WyRUioqKEBgYiK+++or7mU0mE9zd3eHn5wd3d3fExsaio6MDaWlpUCgU3JNJ8/D3fDbyofbu3YtffvmFE705OTkwGo3o378/P1ehUKC4uBg3b97kNdXY2IiZM2fy35ubm6FSqTBnzhyr+2prawsHBwcYjUYMGDCANXUDAgK4ugsAeXl57LdpNBqu3srlcmzZsgWdnZ28J6OQCU0AACAASURBVGpra/k6Jk6cCFG0EDRWVVUx+oP8C1qv0rV77tw52Nra4p133rFq8ZHOG31RPEJVeooXiHlXythPCQBaO7SmXV1dkZaWBq1Wy+jKlpYWlJeXo6amBkeOHEFjYyP3rUoh6u3t7SgtLYWTkxOTmv47vu0fGZwC/1oFdSWAO5LfNwDYJIpiKIBmAEvvPr4UQPPdxzfdfd4/HYJg0RTy8vLC888/j7q6OqvspBSmSfAO+htVCeh1yGEliCDhuak/kiAt5FRcvnyZme7oNei7NANK311dXVk+4OzZs7wJ2tvboVQqsWzZMnh4eGDVqlVISkrC/v37AYCv2c7Ojns+SHiemt21Wi1MJhNcXFz4kM7Pz+dKL2n40QYDwIECDZ1Oh379+kEQLCQWRPZDjg0dEMReJj1ARVFEW1sb3N3dObgjTTppkEebq6ioiPs+9Ho9Ll++jGeffZY3dFtbG9atW8f9rpRlls7p31sPAPD+++/jxRdfRElJCRYuXIgPPvgAfn5+iI+PvyfLSXNB1009rNIDDgCvpY6ODuTl5bGWVVVVFaqrq9HW1ob29naGs1JW9o8YJSUlWLt2Lby8vBAeHg4fHx+89tprCA4Oxtq1a3H06FFkZWUhNjYWeXl5yMzMZHhhZGQkpk6diqFDh2LUqFEwmUy4cOECQkJCcN9992HevHnMohccHIzGxkb4+vryHsjLy4O9vT1KSkruEYem6oWTkxNX86m6bDAYmIgiLy+PddcogCotLYVCoeAeFrlcjpSUFF5Ltra2WLhwIUNyWlpakJ+fD51Oh5ycHABAVlYWevXqBU9PT5ZkIKkYQRC4Z7i9vZ0JxebMmYMHHngAr776KubNm8eMl56ensjIyIBKpYJOp8PJkycREBCA69evc+BhNBpx//33QxAE/Pzzz1ZwMxsbGyxevBj19fUYO3YscnJyGGIbHx+PH374waq/a968eRBFESkpKfcw4prNZjz11FPYv38/li5diu7ubmzZsgU1NTXo6urC008/jYSEBJw/fx6iKCIiIgKjR4+2sneBgYG8H6lK0traChcXF9YrdXd3h6enJ5qamlBdXY2MjAzY2dlBoVAgPDycD6hXX30VV69e/bfW8I4dO3hvffTRR8yY7Ovry3IkdFB+9dVXuHbtGkaNGgWj0Qg/Pz9OZhkMBnz66ac4cuQIzp8/jw0bNuCFF16Ak5MTFixYgDfffBMvvPACO5gdHR1WbIcEndJqtRg5ciST1bW0tLAeant7OzOht7S0ICAggOHlVGX19fVl54WcCjs7O1y7do2dBaokhYeHM/yaEi/kKKSkpGDixIn47rvvWGT9zp07zOzb2dkJZ2dnZtbs7u5mdIBWq0VnZycWLVqE06dP486dOzAajdiyZQtqa2vZNo0fPx4+Pj64fv06Ro8ejdOnT6OtrQ2PPfYYEhMTeT8BwIwZM6BQKHDs2DFkZWXxOSkIAvz8/LBw4UIolUrEx8fj5s2bSEhIgJ2dHSZNmoQBAwYgPT2d15tSqcSzzz4LmUyGU6dOMUSdzgdBEDB69GgsXboUmzdvxvXr1xEaGooLFy5g48aN0Gg0mD59Op5//nmcOXMGAQEBfD1/+tOf8Mwzz1hV/Hx9fVFeXs7QPjs7O3R3dzNj8j8a0qSGQqFgBlQ661QqFcPax4wZg/Hjx+OBBx5AUVERn7tSX6BnclUul6Nv375ITk5mvgaqWtNZRIkiKURS6vR3dnZCrVajT58+UKvVCA8P5+rUjRs30NXVxclwer0RI0ZAq9Vi1qxZEAQBFy9ehJ2dHe+F7u5uBAYGWs0B9dMrFAoMGDCAEWqAJblNSa3S0lI+N+VyOaZMmcLXeeDAAej1enh4eEAul7Pkj0KhQFxcHDOGOjs746OPPoKTkxPUajUjkeh66Dv5A0888QQ+/PBDbl0gqCPBR7u7u1kmhhxjk8mExYsXWwUCBFeWcmrQuVFbW4u1a9cyEWBtbS23V1FLEgUGUv+TfqZKVk1NDd5++23MmTMHffv25fOiZ5Vc+jP5Iv+OU19UVMSkQhTI9OnTB4IgoLGxkaHKdEZ2dXXhl19+4cT1Tz/9hAcffBBeXl64ffs2J7gEQeA+8M7OTsTGxqJv3748N9IqKs1Fz89HRZkHHniAn3vjxg2EhoZy8kMURZw5cwbDhg1DW1sbHnroIQiCgAsXLli1ayUnJ7MOMr1WTk4OfH19ceTIEUyfPh0mk4n74Uniiv5fWuWkViSz2Ywvv/wSOTk5TBhExRMKhIcPH87+i0qlwueff84kilJbII0HAEv/r4uLCxOGSf9Ge18URe4pJ3+JXot0YSmwl8J9peeQXC5nBnhCcyQkJKC6uhrFxcXQ6XSorq7m96Xqv8FgwPXr1zFjxgxuASJf7x8Viv7b4zcFqIIg+AOYAuCLu78LAMYC+O7uU/YBmHn35xl3f8fdv48TfsMMCILAjGkeHh749ttvGUNOcJmGhoZ7smh0M6XBk/QwoudKoRe0uUaNGoWpU6di5MiRVn1XfwuOIV10oiiyXmd7eztSUlJw6tQpzoIUFxdDoVBwpocOWJIoKSkpQb9+/SCXyzFw4EDOYJKD07NKHBwczPpHJpOJD0EpjIcqWRRkZGRkwGw2Y9SoUVAqlXxQUYAKWDLrNFfSgJ2e+84772DixImc0aQ5oPft1asXPvvsMyQmJloJh0tvd2xsLJMCURDe2NjIDvY/GvR56+rqsG7dOixduhQ7duzAmjVr8Pnnn2PYsGH3UO4TvENaPaJ5JAdIevBStTQ3NxcNDQ3cp0yEQ3V1dWhqauLs7x8xkpOT0dLSgjlz5mDKlClYsWIFnn/+eXz66afYunUr3n//fVy+fBmff/45V+0LCwuxcuVKREdH4+GHH8bUqVMxa9YsvPHGGxg3bhzq6+tRVVUFtVoNLy8vREREYOrUqYiKigJgMYROTk4MeTQajQgNDeXqjSiKzBBNfbDJycnIz89HSEgIw+UFQYBWq+W1RIaV+l4AID09HWazGdeuXWNqeDs7O9x3331obm5meabIyEjY29sz211mZibc3d0REhICo9EIGxsb2NraIjU1Fc3NzQgMDGSHLC8vD7t378bWrVuh0+mwbds2duTNZjM78vHx8dBoNEhOTsaECRPg5uaGEydOQCaz9F3PnDkTMpkMhw4dYgeHsvVEINOvXz8kJibixIkTCAoKwpw5c3DgwAErKCSRAB07dgwnT57kfQZY9uDixYuxd+9eRl0cP34ce/bsgdFoxIIFC1BaWoqjR49CqVSif//+ePnll9m+kYNA/e/US6nX6yGTWbSE6+rq0NHRgbCwMERFRWHAgAFc/bGxsYFGo0F1dfU/rT791rFjxw7s2rUL586dQ0lJCYqKipCeno4vv/wSu3btwvr167FixQo88cQTVuRfcrkcd+7cwalTp/Dyyy/DycmJbXpdXR3eeecdLFq0CEFBQQzznzNnDkP91Go1V7QqKythY2MDg8GAzs5Ohq67urrCxcWFmYop0KG13rt3b+4FNJlMKCkpYXtK0PiamhqubNGaoLVN2X5bW1s88MADmD17NpydneHv74+0tDTMmzePK+1RUVHIzMxEWFgY23Bvb2+22SkpKQgLC8OwYcPwzTffYM2aNVizZg3WrVuH77//Hk888QSqq6uRn5+PlJQUpKenw93dHVOnTsX+/fsxduxYXLlyBbm5uZg6dSpEUcSJEyf4nCAJpoKCAvz888/3VHaGDh2KpKQkzJ49GwaDAefOnYNOp4OHhwecnJzw5ZdfcsKuu7sby5cvZ8dyy5YtaG1tter5UiqVePHFF7Fs2TJUVlbimWeegclkwocffsjtGcQiu2bNGtTV1WHPnj3cD0ZnNvFS0HqRyWSYPHnyb1qbUuSMnZ0dZs6cydXY48eP4/r166itreWkhiBYes/Kyspw6dIlTq7SnpOSwRmNRjQ3N6OlpQWOjo4oKipCRUUFVCoVWltbUVlZaWVHKOik66IqG8FbTSYTNm7ciOjoaHaSqXorrb5+8803zMlB9nPcuHF8XQRVp75RwHL+xcXF4ciRI2zTEhMTAVh8EmI7puo+wSNNJhNzGOTm5qK+vp7PVbLx8+bNAwB89913TIqXmprK7+Hr6wsPDw+roI8+izSBkJCQgI8++ojJtZydnRnOT/JmCoWC0QaLFi1iH4v2IABm2iWSHKVSyRWn6upqeHl5obGxEUVFRUweVFNTw59HFEXu7ye/iIodBJmtqqrCxo0bodVqERoaCnd3d+zZs+eexHvPZMbvMUpLS3H27Fls2bIFffv2hUxm0f90dnZmGbWWlhY4ODggKioKZrNFEovg2sePH0dQUJAVr4tMJmNCP7JLcXFxaGhouCexIPWRaQ2TNGNqaioEQWAyO0qSU0W3trYW+fn5EEWRix8UcFIBCgAefPBB9tlkMhny8/MZaUj7gWyRFE3X2NiI6Ohofn1KOIqiha+lsrKSC01dXV3o168fJ5T37NmDuro6Zkam5IiTkxPs7Oys2tVokL1LTk7G+vXr4ejoyG0aVOCg5D3xTFDgSee9NJFFLQ5EDkpBt16v51aUS5cuIS8vD1VVVdBoNDy3RPxE95NsTkdHB86ePYsffviB1QXKy8tZOk+aQJG2xf2jdftHV0+B315B3QxgNQCqbbsBaBFFkQDpFQD87v7sB6AcAO7+XXv3+f/4QmQWvc0HH3wQhYWFVhuCMhe0WKSDNgdVUXsaQGnA1DOwJTZeDw8PK8MjNZ49X49eUxQtfU5EKEOHsdlsZokU2ggVFRVYuXIlhg4dClEUceXKFQ7Q2tvbubdPEAR4enoCgJWhTE9PR21tLWc7vby8rCAlBFOgz9rV1cWSNaIo4r777mN2VqlhIb1JAJytFEURJSUlCAsLQ0JCAgCwjAsZQQCcodTr9QgICOBDIjQ0lHskAfAcxMfHo6qqCv369WOhbSnTb89B95K+kxFISUnBzz//jM7OTowfP54JRqSbil5Puunp+ul3aTazpKSEtfucnJyg1+vR3d3NWpQGgwHbt2//Z0v43xpUDTp06BCam5tRU1PDmlZ79+6FKIrYunUrBg0aBF9fX0RGRiI8PNxKN5jklEaPHo2ioiLU1dXh4sWLuH37NubMmYM7d+7g5s2bqKioYIeLyDeI9Evag2kymVBdXY3o6GjOsJK4ObHx1tXVsTg4OViiKHJV6erVq6irq2NHv7CwkKnSZTIZhg4dips3b8Lb2xu3bt1CcHAww2xlMhlDke3t7VFeXo6CggI4OTnB398farUaAwYMwIYNG/D222/jm2++weuvv86SPTU1NQwzvHHjBsLDw3Hq1CmMHj0ara2taG5uRlBQEIxGI7KysgBYDpsRI0ZApVLh9OnTvA9pvUyePJnhonq9nvUEFy9ezAQntPaeeuop1lIkqBQNmUzGpDZE3iTN1i9btgxlZWW4fPky34sXX3zxHqggHYB0f2g9eHp6MuS6ubkZjY2N7GTqdDq4ublh1apVv+sapgPX3d0dAQEB+PTTT/HUU09xf2FsbCzKyspQWVlpRf61evVqXLlyBevWrcP69euxfv16vPXWW/i///s/Jpuh+1VfX49Tp04x+zPNAzkRJSUlXNUxm83s5NM9zM/PR2hoKGsB03WTzSb7n5ubywgTqr7U1dXB3d0dBQUFbEOogk8BMCVvmpubmZDv+vXrePTRR3H16lW0tLRwUoeqhnZ2digoKIC7uztWr17N1cP09HSEh4cjLi4O/v7+0Ov1uO+++2Bra4va2lpotVqUl5cjISEBSqUSQ4YMwcmTJzFixAiGnA4YMAA6nQ65ubk8TzNmzIDJZIJer0dmZqbVPXR2dkZcXBx++uknzJw5EzU1Ndi/fz/UajVCQ0Mxf/58nDhxgpOWBoMBDz/8MMLCwqBUKnHw4EErRAudT3K5HI888gj27duHlStXoqurC++++y7u3LkDuVyOK1euwNnZGQcPHkRzczOThBBEkWyT2WxGUFDQb6qc0pBWPgcOHIiOjg5cuXIF33//PcLDwxmFcvr0aQ4GS0pK8MADD2DkyJFWPWhUIaFAk2CuHh4eGDRoEPr374/U1FS2o1KNZPreM4ChM0uv1+PQoUOcPKSzuKurCzNmzLCquMrlFrk4SoLfvHmTz2aVSoXs7GxmV6X3IPinKIqIioqCXC5nuTAAXABobW3lYFIURfTq1YsD1aSkJDQ1NSE0NJTvx5IlS7iPtq2tjWXFfHx8YDQaER0dbUVoSfBsGtJq0ZkzZ7B792588sknKCwshFarRVpaGjo7O9lGU5UzODiYA3vpmS/dx3SfqH2ntbUVtbW1LKVUU1ODEydOoK6uDg0NDfeQOZFvIYoiJyZaW1uhVCpx7do1RsGUl5czaq6pqenvrkWywb/HWLp0KR566CEm0ikqKkJ5eTnKysogl8uRm5uL4uJiDtKjoqIQFRUFpVIJf39/huVK19XIkSMZjUdJLep7pvml50qh2jTvgMUmUBsSAL5HABj+29DQAAcHB/aHiehIoVAgMzMT3d3d/L7ku/n7+zO7stFoZNulUCjg7e3N80K9p1TRJdg7nRft7e3cgz1p0iSsWLGC55B6M81mM4YNGwZBEFiqi5IWPdcHBW9fffUV1Go1PvroI5YrpGQ9/Y/0jKczhJKtZOMISUn+KbUM0mf985//jKtXr6K5uZmJW8nvr6+vtwompSgZ8u8JnfrNN98gMjLSSt2CbAH5ynRdfxQJ0j8b/zRAFQRhKoA6URSv/55vLAjCMkEQrgmCcA2wBEsdHR0IDg5mKAjBvwhLTvIF0iifsit/q3oqnWy6SbSgZDIZjh8/zmyAHh4eyMzMRF5eHoBfq3cEZb17zVabWRAEpns2mUw4ffo0C4kbjUZMnDgR9vb2UKlUyMnJYf08Nzc3rlr5+/sjIyODP2NeXh5nLul9goKC4OPjg46ODoY1SJ0uuVzOh4IoWuDG2dnZnF2yt7eH2WwhtomKiuK5o55AcsAoACaHqbCwkLPG5PQ1Nzdj06ZNaG9vZ2jvjRs3eJ737t0LV1dXpKSk8NxRNU6v13MgTu9Dz6H57FkJ77mhTSYTfvjhByxduhSCIOChhx7CX/7yF4jirxAMMgq0UaW6rJRNNRqNVvI/dXV10Gq1KCsr435o6hOQ9q3+UWPixIm4fPkySktLsXv3boY9rlmzBtevX8fHH3/M7HtXrlzha/zggw+QmZmJTZs2YefOnUhISMDy5cvh6uqK6OhouLm5wd3dHWvXrkVVVRXDaqmJng4v6uNxdXXlTDrtoWvXrsFstjBZG41GnDhxgh1SnU4HuVzOkFxp9aS8vJyrtKWlpVAqlZg5cyZCQkKQkpLCVZj58+cjKSkJgOUAmzBhAmevy8rK0NDQADc3N/j6+sLb2xubN2/GM888gyVLlsDGxga7d+9GQEAAtFotAgMD0dTUhG3btsFkssgh3LhxA+np6Th48CDmz5+PvXv3IiYmBtevX8fNmze50kQOlJubGyIjIyGKIg4ePMjGWSaTwc3NDVFRUTCZTIiPj8fevXvZYXrsscfw448/snGnSinBaw4cOMD3m2zHs88+i8zMTNja2mL27Nn44osvcOTIEZjN5nvIZDo6OjjYo3tDSSIiSrC1tWVSBTs7O7i7uzPbKbU22NnZ/UsO/r8yZs2aBVtbW6hUKsyePRvTp0/Hvn37MHDgQJSVlSEnJwfLly/HsmXL0NjYiPXr1zP516lTp7Bs2TI4ODigpKSEe227urqwevVqvPzyy1iyZAlOnjyJ9vZ2huEDYFvl7OwMBwcH2NjYoLKyEr6+vpDJZCxFQJJIcXFxnKAjfeqgoCCWpBAEC/GHi4sLI3jMZjMHzMSIDFiScKSn29raCrlczhI3hOZITU1FZGQkE7CUlZXBwcEBgYGBKCsrw4wZM/Dxxx8jOjoaBoMBL7/8Mn7++Wd88cUXSEpKQmFhIcaMGYNdu3YxRJgYfqlq2bdvX4wcORI//fQT/Pz8kJiYiMLCQsydOxc6nQ7Hjh1jW7ZgwQIolUrk5eXhyJEjjCwRBAF9+/bF3Llz8f3332PatGlwcnLCzp07UVNTA5nMItG0ZcsW6PV6AJYzeMyYMXjiiSdgNpuxY8cOJCUlWXEYCIIFcjhq1Chs3rwZMTEx0Gg0OHv2LO/rjo4OtLW1MfmXvb099yRSgkWhUPzmyimNuXPn4qGHHmIis23btqGjowOHDx+GRqNBfHw8Vq9eja1bt2LatGlobGxkJ7GlpQWdnZ1oa2vD7du30djYiPr6epSXl7Mj7ebmBpPJwsTfq1cvDBw4ECdPnmS7QRUhGjTP0uq12WxmiKWLiwufO1R9lLarkMbonTt3OECRMvLrdDqUlZUxQzWdpVevXmUHNiwsjM8QURRZNYCu18fHh6uHgwcPhiAIyMzMhCiKePbZZzFu3Dh4eHjAy8uLocLZ2dlQq9WQy+XIzs5m3pCmpibW0Z4yZQo0Gg37DjQf0kT01atXIYqWHlpKPOXl5aGtrQ0mkwk1NTVoa2vDhAkT+H/JB1QoFGhpabEqOKhUKmi1WmYSp3U1duxY2NnZoaOjA8eOHeOkFgAuhkh1I8nXVKlUqKysxJ49e7g6R2iswsJC3HfffX8z6f5HDJnMwqrd0NCAPXv2YMKECXx2kX7runXr8MILL6CkpAS9e/fmBLWjoyN6S5iP6Zqjo6MRGRnJ7Mx0TkuHtBAgTUgRrHj37t0wm81YsGABFy9Ih3b27NmM5iDUCs27VqtFc3Mz+4b0+ufOneMgdv78+QCA6upqJhwlDhZqx6DkBK0Bg8GA3bt3A7AoKNx///2QySwkYQUFBZykJwZhQRAwceJEtLS0sC9IrSvSuaDPbjQaYTQa8fjjj0OlUjHC0snJiYmYiIysqakJ/v7+CAsLw9NPP23VQkY2k/aCjY0Nz19bWxtKSkqYyPDGjRtMVEnsvpQwk8KHyQeXyWR85rzwwgt44YUX0NzczMirnlVyQjhJ0U40/hPVU+C3VVCHA5guCEIJgIOwQHs/BuAsCILi7nP8AVTe/bkSQC8AuPt3JwDWjVgARFHcIYriIFEUBwGW7J2XlxecnJzw9ddfs/NA2RjqPZH8PzvR0l4PmkSaVOo3lAav9OXr6wtPT0+EhIQgMDAQS5da2mgzMjKsoDiAtd4PVYroMYI2UM8TCUTff//9KC4uhl6vR//+/fkgKikpYdF6k8mE0tJSNnKUBaUMrCiKuH37Njdry2Qy7lMl4yzF4tOiJIgaQZko+KQgBwBnpyl4o88kZQ1LTU1Fa2srZDIZbt68yaQ01dXVSElJ4crP+fPn8e2330Iul2P58uVWmWLKYpnNZtaQvHPnDl577TU2jGSoAFhtlp7ZZroHMpkMS5cuxaOPPgqTyYSXX34Za9euhZubGwdNVOGgTCBlkOh9aH4pYCsrK0NycjLr5DY2NnIA8J8YxOKZn5+PEydO4OjRo3ByckJ1dTVeeOEFrFq1Cu+++y7q6upw+vRpNDY2QqlUIjg4GE8++STeffddGAwGZt+VySzajrGxsfDw8ICrqyumTZuGyZMnY+LEidxf0t3dzXuMsoQeHh5We8vX1xcKhQJBQUFwdXXFyZMnERoayjAZwEJkRBl9Wpe1tbXw8PBAnz592Kg5ODjA3t4eV69e5Qr1iBEjoNfr+T2pd1Gj0aCtrQ1KpRItLS146623UFNTgw0bNsBoNCI2NhY2NjZ4//338cwzz+DYsWPIz8/H2rVr2VFwdHSEyWTCmLui4cuWLcOlS5cwbtw4VFRU4OzZsxg4cCDOnj3LklBhYWGYMGECBEHA4cOHea0IgoD+/ftjypQpuHDhAqKionD69GmcO3eOe1n27dvHlROqpPr5+cFoNOKnn35ip5AqEHPmzMG4ceNw5swZzJs3D2VlZSRmjbFjx0Kn0+GDDz5Abm4u9u3bhyFDhmD48OH3HBzUe0R2gioVlIhpbW3FmDFjfjdY798bMTEx+Oyzz6BSqdDQ0ICff/4Z169fx/vvv4+xY8di6NChiIuLw9GjR7F7924OZKdNm4atW7cyq+2kSZPQu3dv7NixAwcOHODe6oCAAAwZMgSOjo4IDg6Gn58fHnjgAYZ75+bmQqlUwt7eHgaDAWFhYdyzLJfLodfrkZycjF69eqGsrAxZWVmoqalBXV0d624SxKuzsxPu7u5sa41GI++v4uJidHR0oH///ujs7ERISAj3AQmCgPDwcCviGY1Gg/Hjx/Mey87ORk1NDQRBwMMPP4z8/HysWbMGoijitddeg7e3N6qqqnDkyBF88skn3EP2008/8b13dXXF4sWL+exycXHB4sWLodVqERsbi+zsbFy8eBGxsbEYNWoUdu3axQm8mTNnIigoCGazGQcOHGBEC60pEqL39/dHXFwcEhMT8e2338LHxwcrVqzAjz/+yFI0dNY+99xzEEVL//qHH34IwHLOXLp0Cdu2bcPx48dhZ2eHnJwclkAjIh57e3s4OTkxyzIFLZSsdnJy4v62/58xceJE5OTkYOrUqQCAN954A76+vvjmm28QHR0NtVqNH3/8EZ6enoiJicHFixcBgPsuc3NzceLECdZlLikpQXZ2tlULkclkkT6KiIjA2LFjMWzYMNbGpTVEP0vPNnLgiaBHq9Wy40woIbI/t27d4laIrq4uXL58Gb169eLrOHz4MLq6uri/D7AgLWxtbZGUlMQBL/FnBAQEMFGM2WxhtyanXhRFxMTEwGg0oqKiAp2dndi8eTPMZjOGDx+OZ555hhM1BMmmpDNJ6pH82Mcff4wbN27A1taWE980yN+hvUJatocPH2bEDrF2G41GDB8+nOGPFITQ+xqNRrS2tjKihJLpOp0OISEhcHFx4fMkJiYGXl5eVr4hVbYAMNSZBp1rL774ItRqNd59913odDrY2tqirq4Onp6ec//qwAAAIABJREFUDAvvOX7P6mnP0dzcjMcffxxqtRrffvstB2rx8fFYuXIlHB0dkZGRgYqKCjg6OvL5TutD6iNXVFSgq6sLkZGRmDlzJmpra1FRUWG1Xv8WEoDmh3wG0jKm53799dcYNGgQDh48CJVKBYPBgJs3bzI6QKlU4tixY+js7OS+aiouERGoNDFOySEp8zohz4hsiOwprYH6+npuYbO3t4coWhCO1JNPkjaJiYno6uri/W0ymbBp06Z7ijo0F8CvMcnixYvx9ttv44svvkBZWRlu3bqF1NRU9gnq6+tRWVmJ2NhYK84UKjq1tbWhq6uLWYepmKLT6VBbWwu9Xs+oRiKTbWpq4kICXUvPQZVYtVqN8+fPY9OmTXBzc0NgYCAjkuj/pOiinp/xPxWcAr8hQBVF8c+iKPqLotgbwDwA50VRXAggEcCcu097DMDhuz8fufs77v79vPi3ZqvHSE1Nxbhx45CQkID29naGsUp7DOm7FCtNrLKUcSSDBfwKz6BNQkaQMu0TJkywTMLd57i4uODVV1+Fg4MDa6FJszhkwKUZO/rdycmJSUmIhEIQBKxevRoymQy1tbW4fv06V1BJHoMcKXq9oqKie7IZnZ2dKC4uZtgsifJS+V4ulyMwMJANrFar5X4N6bXa2dkhPDz815sv+7XHSloxpoVKByU50pTpoYqoNFA0mSyyCcSMKq1q0/8A4GBv8+bNkMlknO1TKpWora3lIEmaXe6xHvlnuudLlizBhg0bkJGRgaVLl+Ktt95C3759mbSKWDqpwkqfzWw2W9HDl5eXo76+Hrdu3eI+J51Oh7Nnz/6z5fu7jUmTJqG4uBgXLlzA7t278cgjj2DVqlUIDQ2FjY0N9u3bhw8++ABqtZphfBs3boSDgwPDoejwrays5DkTBAH9+vWDVqsF8GtwTkaxq6sLarWaRctVKhX3fAGW/UmJFwoqk5KSrPZpVVUV7O3tGdoiiiL3sNI6+eqrr2A2mxEdHQ0bGxukpKRApVLB398fERER6NWrF/Lz8+Hr68u9P+Hh4SyZQ/1pEydOxIYNG7jKQ4x0J0+exJkzZyAIFkimq6srZDIZQkJC8Nxzz2H48OFWgWR4eDi6u7uRlZWFmTNn8kFAgV5cXBwEQcDXX3+N5uZm3mNKpRKjRo1CTk4OZsyYgZqaGmzatAkKhQJLlizBjRs3GCIuiiLGjRvHhD1btmyx6usmUrShQ4di3759eOihh5CZmYkNGzZAEAQMGzYML7/8Ms6cOYPY2FjY2dlhyJAheOWVV3g/mEwWHcDs7GyrpJQ0SCWWxP/E2L59OwYMGICAgACEh4fj22+/RWNjI9atW4eqqirk5eWhoKAA77//PnJzc3Hnzh2kp6fj9u3bqK+vx+uvv85f77//Ph577DGMGjWKpbkAi8RXfX09HB0d4eLiwslBV1dXfpx6bNzd3eHu7s72Xy63EHcJggAPDw+o1WoUFxejT58+UCqVuH37NmuaymQyRqGIokX7urq6Gp6enjAYDMjIyMC4cePYvhYWFvK8BwYGMjw3Ozub5btob7i7uyM7OxsffPABdu3ahb59++LNN99Ea2sr0tLSuB/KxsYG9vb2qKysRFRUFIKCgmBnZ8cMxVeuXGEkAAAMHjwYp0+fxoMPPgitVosff/wRLi4uePrpp7Fnzx4+I2NiYjBq1CgoFAocPnwYt2/f5vk1GAyYM2cOvL29cfXqVZZqIAKvRYsWITIyksmjyG4vX74cAKDRaLB9+3YkJCQgPT2dUTSAxf5oNBqWUwCs2TGpkkD77f777/+HUjL/ytBqtaz9GhwcjIyMDCxYsAAdHR2IiYlBRUUFampqMH78eDg7O0On02HKlClcIY+IiEBmZiZ69eoFDw8Pq4qDQqHgx1QqFezt7REXF2dFDCitkNDnpp8dHByg1+vx888/4+rVq9Dr9fD29uZ5INKZ/Px8xMTEcEARGRkJAFw1kjKaAhZynQcffBBKpRIjR46EUqlEc3MzQ3lpUCBL10oVHLPZzD2FhBK7dOkSw9kzMjKgVCqZLdrDwwPPP/88srKy2G/o6OhAaWkpvL29+f6Toy/1pcjPe/zxx3Hr1i3s2bMHzs7OCA0N5QR6fHw8P1e6bghlRuu7s7MTlZWVaG5uhtlsRkdHB1xdXbln0mQyMaSVbDENSiLS3+i+yeVyfPjhh9iyZQsmTJiA6upq7N+/n2XTzp8/bwWRpOv4o0dLSwtqamrw9NNPA7BAazUaDZqampCYmAiNRsNoEb1ej/z8fCv/jCQYb9y4gby8PG57WrJkCRctpHPTs+JGxQYqhhBMnR6ztbXFuXPn0NTUhMWLF0Ov17M0mFwuZ19To9FYVQB/+eUXDBs2DM3NzYiLi0N3dzeOHj0KlUqFwsJCRj4pFApGf9H+omu7dOkSDAYD225BEPDSSy8B+FVPmP7HbDZj9uzZ+PTTTyGKFkjss88+i8DAQE4u19TUWM09vQ/5xFStHTFiBLy9vblaTYPkNOk8orltb2+HXq/n4gEhAGpqalBZWcmtdTExMQgKCoJCocA333yD9vZ23jdSgi+yQ1SgEQQBZWVliI6OxqeffsqBcXh4OLeISO8txVQ0/lbi5Y8c/w528VUALwqCUABLj+muu4/vAuB29/EXAbz2W16MYCctLS1WgQM5qTqdzqqKKoV/ArAKaOimS7NiFDTRQerh4WFVXaVhMBjw0ksvYfjw4dwc3rPvi26gNKtJ10rQhGHDhsHV1ZUzet999x0zpmm1WmRmZsLb25ulBqSvQxl7+jxqtZplSAgi0LNH083Nja8tNzeX6b3poCLsPGVzAVgR4lDVQC6XY9KkSRyEUCaV6NrpPamKYGNjgwEDBnAPgDQZAMDqAKIg1mQywc/Pj1+nV69eiIiIgLe3NzQaDevWSjcZjZ5VdMro5OfnY+PGjXjppZegUCgwdepUrF27lqGigiAweQoAzr7q9XomoqBqqVKp5IPXxsYG9913329Zwr/bePLJJ1FZWYnKykrMnDkT4eHh+OGHH/Dee+9Bp9MhNDSUK44XL15Ea2sr2tvbsW3bNpw/fx6RkZEYOnQoB300VxTU0h7p378/JyNoHmxsbNDe3g57e3urveHh4QGtVgulUgkXFxeoVCqGqHt4eHCSqKSkxGotyuUWqnlHR0dcvHgRjz32GCoqKmAyWaRn7OzsuGfHxcUFSUlJCAoKYokGAKyrWl9fD71eDxcXFxw8eJAdGmI2dXZ25v139uxZNDU1wcHBgdlrX3zxRdjY2LADs2zZMmRnZ2Pw4MHIz8/HuXPnEBUVhfPnzyM9PR2iKCI4OBizZ8+GTCbDxYsX2RZQ0Lt48WKcOHEC06ZNg6enJxISEiCXyzF79myWrwAsh+eCBQuYtOzjjz/mZAytz969e2P16tX4/vvvsWLFCjg5OeGdd97hvZSfn4/S0lIkJSXxfRo8eDA7eyqVCgEBAWhoaGDCD3t7e4wbN+53c+7/lREZGYlffvkFU6ZMwdSpU9HQ0ICJEyfilVdewZtvvslax59//jkcHR1ZK6+5uRnFxcVsv3U6HcaOHQt/f38YjUZcvnwZx44dw7Fjxxgide7cOWRlZXGPklqtRkFBAWxtbdHS0gKZzKJfSgQWdA54eXlBJpMhICAANjY2SE9PR2hoKGsUm81mrqxSHxLZ8OrqavTp0wdBQUGsq6hWqxEREWFVSfXy8kJkZCR69+6NgoICODs7s35taWkpOyBeXl7IyspiSDK9X1lZGezt7dG7d28EBwezhBIl9nQ6HbZv346VK1dyQOPk5IQlS5Zgx44dXDE8ePAg2tra8NRTT2H79u1sm0m/VhAE3Llzx4rPQBAsEjbLly/H1q1bMWnSJJSVleGTTz7hcyk3NxefffYZdu7cia1bt2L37t1sS1taWtDR0cHyMFQ961mNAayRM8CvkMpx48b97muT9A5PnjyJJ554gtFL165dg7u7O+rr66HRaHDu3DnU1dWhqqoKwcHBmD59Op8bXV1d6N279z0+xODBg+Hq6gq9Xs+VnJ4QSeDX3ksadEYSFN9gMGDu3LlW80Ws6SaTCVFRUVCr1Vw9FUVL77/BYGAYJV1XTk4OCgoKuB1JLrdolDc3N6OgoIDfo6GhgW25QqHAmjVrAICZQYkg6cKFC4iPj+dAl1p/aF2mp6dzhZdIGtVqNQYNGsTX0DOIkM4D7bP33nsPer0eO3fu5AQzwSelyTn6TlV5vV7PXx0dHfDy8oKfnx9rZRLckTSOaZ6kyLWeSXK6X3SWvv7665yomjt3Lq5duwYHBwc899xz3HJFBYb/5MjKyuJz9datW2hqakLfvn0ZZk3B/qVLl2A2m7F//35mEM/IyEBUVBTi4+MRHx/P9sHBwQEZGRloaWmx8lUB695KAIzQoLUvCAIOHDiA+Ph4dHV1sR3Pzc3Fww8/zCi+H3/8EY6Ojpg9e7bV56moqGDCRIoNVCoV2xTaQ8RvQetu2rRpPPe3bt3iQLiiogKnTp2C0WjE559/Dp1Oh6eeegpvv/02ZDIZLly4gG3btsHBwcGKcJPIQCMiIu7hgaHvZFMvX76MJUuW4LXXXkNUVBT72t3d3eju7kZoaKgV8hMAXzetf6p4NjU1oaOjgyvfERERUCqV8PHxQWRkJJRKJRITE61iBvK9KI4if1cmk6GlpQV79uyBTqeDRqNBZ2cnbG1t0d7ejmXLlv2hhKD/6viXAlRRFC+Iojj17s9FoigOEUUxVBTFuaIodt19vPPu76F3/170W17bzs4OAQEByMvL4+yEwWDgXk0Kou6+B0f2BNmSQgEoKyDNYkkNEP3N3d0dAHjh0pfBYMCoUaPwzDPPoLS0FJmZmVYlfWkWVLox6UAn+A1RxYuiiOTkZDQ0NDBMsrOzE4MGDYJGo2EhbVqwBJWRXk9hYSFCQkKsAjZBELhfpqWlhecyMjKSNUmpyijehfJIFx+JMBO8iDYibRK5XI7S0lLo9XpcuXIFxcXFvGHGjx+PtrY2vr62tjZuvhZFEampqfzZzWYztm/fzj+3t7fj0KFDkMstEh50GFOzN30+aVaop1MjhX9Js6+tra1YuHAhnnzySdjY2ODVV1/FmjVrYDab+b3oHlGlkPpmAHC26dq1a3zIXrp06Tfvkd9rPPnkk3BycsKyZcvg6+sLLy8vLFiwALGxsXjooYfw0UcfsUHMyMjA5s2buUdImqAhw0gjJiYG165dAwCeB8rWFRUVsV4dZcHd3d3h5ubGVS7STg0LC4NMJsPZs2cRGBjIzgEFqf7+/twv5ODggNTUVDg6OuL27duorKxEVVUVbG1tMWDAAKbOByz9YsnJyXByckJwcDBXA4iQrLq6mg89Yrsjp6SyshK2traYNm0a6urqOKimQKG5uRmvvvoq5s6dy7CdyZMn4/Tp05gxYwba2tqQn5+PadOmwcbGBg0NDQAstoMc+IMHDzIsk9Yp9Q8//PDD0Ov12LFjB0wmExYsWIDbt2/jxo0bvHbDwsIQFhYGjUaDffv23eOcdXV14ZVXXsHGjRvx3HPPQa1WY8OGDUhISEB0dDTLnbz99tvYuHEjMyt3dnZy3zUF5HV1dX+Ic/+vjFWrVsHBwQGJiYlQKpXo3bs3/P394ebmhi+++ALvv/8+EhMTMWvWLLi5uSEgIABhYWH49ttvWadv5cqV6NevH06fPo2Ojg6MGDECU6ZMweTJk3Hy5EkUFxdj7NixePDBBzF58mRmh5UGo8ReaWNjA7VabdVf3atXL652UgKAJDQoyKioqGCCHHKSzGYzs+g6OTlh3759HKSGh4dbrWsXFxdcvHgRvXv3hqenJxwdHdk+U1a+u7sbnp6esLGxgShaJFBEUYSzszMGDhzIOoAqlQoXLlzgniiSvDCbzSgoKEB4eDiioqJgMBiwYsUK7N+/H/3794eHhwdOnjyJ3Nxc/OlPf8L27dtRUVEBwOIcPfroo/Dw8MC5c+dw5swZPjMpAHvppZeQlJSE+++/HzY2Nti2bRsSEhKY8M/e3p4ToZQMcHZ2Zr3evwU/k2br/9YZSyinP2qEhIRAr9fj6NGjuH79OhMmUp+3ra0tDhw4wIQ6NTU1MBqNiIiIQEBAADvIdH7TuhoyZAhmzZrFUjBNTU3Q6XQAfnUgpcEpDUrukt0k+KkoikhLS8MjjzyCjIwMhIeHw2Qy4dtvv2VJOblcjrS0NERERLBNByw9qc7OzkhLS8Ojjz4KADh16hSTNkrvj7u7OyOppDC/3neZrgFwQjciIgJffPEFrl69isjISA5Krly5wiguqrj06dMHMpkMTU1NjJQiaRApLwUNqZP+yiuvwMbGBmvWrIHJZMK8efM4oSx15AnJA4Ad7s7OTtZ4pbUJ/OoHUmXPx8fHqm2K9ibdE9r3dF1GoxFNTU0YO3Ys6urqGK1VVFSEyMhIRnr0tO//qZGamoqWlhZMnToVkyZNgtlsRmBgIJKTk5GSkoIDBw5g+PDh2LVrFwYMGICEhAQ0NzcjNzcXdnZ2qK+vR0lJCReG1q1bB09PT4b6SosjgHWQKggCV8cPHToEpVIJBwcHXL16Fb6+vli0aBFMJhNXcGUyGfOEREREMDJQFEVUV1dj9uzZKCgowAMPPACFQoEjR45g5syZ3DZGCQW6lu7ubtjb26N///68Nw0GAxobG7klbdCgQSw/lpKSgt69e+Py5ctcyRVFC7RdFC2aqJ6enlCr1WhsbOSef0rs96wm09oELCzvH374ISdCysvL8ac//YmrptLnk99F5E5ms4W5t6KiAkqlkvu2aW51Oh1kMhkTokqT59JiGiWkKL4YPnw4/P398fLLL7PPR2tYq9VyXNRz/KcTLcC/V0H9XceYMWPw008/wWAwwMPDg2Us/l6WTRAE7u/pebDR36U9WrQRBEGAu7s7VCrVPRNO/0sbxM7ODv/3f/+Hrq4u5OXlWR0o0h4Z6TXRYqWAz8HBAR0dHWhoaEBYWBhKS0uZdIAqtHV1dQB+hQj5+Pjw5yHn3Gy20ISTAaDg1cPDgyGC9P9VVVVMtECLU6/Xo6SkBCEhIQCsacKl2cza2locP36cP5+vry9cXFzYiNNnM5stOmc0bxqNBr6+vjx3ffv2RVNTEwwGA3Jzcxki6uLigj59+rDzn5aWhtTUVIZwCoKlx0YQBCZ5kEKOadC9lFbSpWtFEAQ8+uijnDlbt24d3n77bdaSs7W1RXd3N2fiAEuShEil6L3/XY3If2fQoVtdXY0BAwZwFjo+Ph5arRYGgwHNzc1oa2vDiRMn2OmlIZfLsXfvXl7n5PwEBAQgPT0dgiAgOjqas9wuLi7c11pfX8/GX6lUwtnZGfb29rhx4wZcXV2hVCqZETIxMRFBQUHcs0R9esS0S9CSmzdvcv8EsfmJokXrMz8/n6999uzZyM3NhYeHBydlpPdVr9dzJhywOMPBwcHo7u7GoEGDUFVVBVdXV9TV1SEnJweVlZXIysrCZ599hu7ubqxatQpXrlyBm5sbbG1t8dRTT2Hnzp0YP348bt26hZMnT6J///5ISkpCWloaH3CzZs2C2WzG+fPn2TaJogU6P2XKFOzfv58F7bdt2waFwqLp2NjYiIMHD3KwNGbMGGb4/fjjj3kuGhsbkZqais2bN8POzg6bNm2CRqNhCBIhAEwmE/eFq1Qq9OnTh/s96ZpsbW1Z+uG/PbRaLbRaLXbv3o3CwkKYTCbs27cP2dnZ6O7uxvfff4/Ro0cjLy8PV65cYcfz66+/htlsxqZNmxAeHo5Zs2ahra3NSvNOqVQiKCjISqOUklCABfZGmplNTU3w8PDgwImyy3fu3OG1TMR8iYmJCA8PhyiKrN1cUFDApCiCIDCb7+3btyGKFkhiQkICnx3SIFUQLKQgaWlpHKBT5VuamCSJK1tbW9ja2uKRRx7BtGnT8PPPPyMuLg6iKCI8PBx1dXWccack4ZQpU1BRUYG33noLffr0YW29p556ikkI77vvPmRlZUGn02HFihXIzMzE+fPneY+NGzeOne7t27ejtrYWtbW1OHToEPbt2wdRtBCWmEwmdHR08BqkTLzUGZI6boA1iz6NnglX6Rk7fvz4P3ZhSga1FLS3t6O4uBi3b9+GTqdDVFQUxo8fj1GjRuHKlSs4c+YMSktLkZqaiszMTIYsSx30rKws5OXloaGhAUlJSXj66afR3d2N3NxcqyqxNDCnQfNBFZrKykr2YW7cuIE7d+7AbDYjLi6OyVikjL8UOJLtNpvNOHXqFEaOHMl+EvVjuru7Y/LkyVbJdgcHB7Z3RqOR7dz69euh0+mgUCiYnEYul6O8vByiKCI6OhrJyck4ceIEJyyqq6uh0WjQ2tqKYcOGQavVoqqqihOnP/zwA/dCUh/h36qmms1mPPfccwgKCsKKFSsA/EpiJN5FUDU3N3Py3WQyoampCW1tbYzKIqhyzyqTIFhYakNDQ+Ho6Mj/T3NF6gZ0LXQvKChau3Ytr5nhw4fju+++w5dffomQkBCIomjVZ/vfGLdv30ZOTg4qKirQ0dHBTMNhYWGIj4/H008/jQEDBmDRokW4ffs24uLiEBISAldXV/zwww8QRRHHjx9HZ2cnnn32WavEBw2p7y1FQ1DSdNu2bcwmPnToUJhMJmRmZuLhhx+GKFpg1Pv27WNUEvkBGo0Gly9fRltbG9+HxsZG1jAFfmWxB4CoqCgr7pjDhw/j8OHDjEISRQu60GAwYODAgfjkk0+4GGU2m5GWloZTp07xumpvb+fPS0kQo9HIiJpx48YxIpCuWWrzyA/IyclBQkIC++10LeSf0xeRgNHa7OzsREFBAWtoe3l5cXBqY2MDf39/uLu78/ki7Y3tSQorvb7U1FQ8/PDDcHR0RF1dHQoKCrgVKjExkRFbUtv93whOgf+hANXDwwMtLS3o6uqyYpcTRZGNj9SQArDKnEgnU1pBokE3UNqrSo3aFACRBAANUbSQPQCwYnijsnzPG0nXRgvE3t4eWq0W9vb28PHxQXZ2Nuzs7BhyRrqhJONB10kwCgBWC7e+vp5hxPSZy8rKOACghe3n54empibOEoqiyFTWGRkZ/NoUYEohLdu2beMFDwABAQHQaDS4efMm/P394ejoCAcHBwwZMoQDVaPRIhgvrVhERUUhICAAKpUKn332mVVje1paGrPJ9u/fn2nIifxp48aN9zDh0SHRM8MuHdJMJx1gn376KR577DFOGKxYsYJp8+keSnXtVCoVV247Ozu5P+W/MRYuXIj6+nrcvHmTq4ZGoxEpKSkYOnQo9/LQKC0ttSIMACx9elICLXJ+iMCAkhDkOOn1eobJ0tql6gj1NtLeJCgaACY3oftUUVEBtVoNPz8/q6quUqlEXV0d5s+fj6KiIigUCobSJCcnc9JHmiQIDw9nwo3evXuzo+7s7IwxY8Zg/vz5GDlyJKZPn47jx48zfPiXX37hrKBGo8HChQuxaNEitLe3489//jNXgSn4/P7772FnZwej0YjS0lI88sgjSE9P58qHKIoYOnQoVCqVldQGze3ChQuRkpKCxYsXc7+wKIqYNm0aRo8ejdOnT7MsDSWiVCoVMjMz8cknn+C7775DWlqaVUVAp9Mxm59areZKgLu7O9sZguD7+Phw4mfKlCl/3ML8F4dOp4OLiwtKSkq4x4kq6e7u7nj11VeRl5eHKVOmwGAwMMMzfW6a52vXrnGGWVptoXVJTMbEVE57hpIf5Gw7OTnxa1KisqKiAjKZjPuF5HI5KisrERwcbFX9KysrY5kXeozWx7lz5zBv3jwmoqOEFxFtmM1mTJo0CcnJyfDw8IC3tzfmzZuH+++/H3K5HA899BBDsR0cHJitlvq3k5KSWK+bxOep+tXZ2YmxY8di+PDhMJlM2LlzJ0NYz5w5g5EjRzJsWqPR4OjRo5DJZJg+fTp8fHywc+dOnsuGhgbI5RZtzsTERJw5c4ZJO6QC91QV6OmcSaGR9HdpL5/0bJZWYuiMFgThPxqc0jAYDAgICGDoHEldjB07ltn1iQeCEsPV1dXsVwiCgOvXryM/Px96vR4ODg4ICQmB2WzmnmFpsr3nXEjnRCaToauri5mXVSoV4uPjkZmZyX3Mra2tTL4IADdv3oQoiiz5BVjWjp2dHS5dusSJ//z8fJavoPYicoiJaIh8hzNnzmD16tVW/cOCICA0NBRXrlzh9wgMDERWVpYVV4WbmxsOHTqEuro6GI1GvPfee3xdb775JsPACUUgZfPvOcxmM8s4UT8k7Uvy4ejaKfkvrQxT0CytCtO10p4ICwuz0p0k/4DsMQWuUnsAWJBeZG92796NyspKRjf8L4zW1laUlJQgIyMD9fX1aGtrQ2hoKEthFRcXo6GhAaWlpSgqKsLFixdx6tQpvP7663B2dsaIESMgiiJ8fHyYbE56pkt9cqkvTOShBoMB58+fR1tbGxdSsrKy2I8wmy0M1v369QPw6z4gHdejR48yyuXatWsYPXo0ADBaQSb7VVeU2tna29uRmpqKlJQUdHd3o62tDTKZDL169YKbmxsyMzP5bI2IiOCAUQrh/umnnyCKIv7yl78AAKNhuru78ec//xn9+/fnBAkFgj3vudlsxttvvw2DwYDY2FieO1qrlLSjqqkUEUBM2NSOSIlXQo5RoEtoHgC8j+ge0ON0n+jxTz/9FH369IG/vz+0Wi3a2trQq1cvRmZI78N/c/zPBKgxMTFITU3lANJoNMLb25snlm4m/U4Licgo6EYTfFcapEoPAJVKBUdHR0yePJl7fUg/yM3NzSrLS9cxevRo9OvXDyUlJezYA/cGpoA19FQms2iWEn68rq4Ov/zyCwsm00FAWUWCJ0dHR/N1EI029fvI5XIrzadevXrx/9F1VFdXc18gvQdl66Vi3EQAQgb3xRdfZLIZ+oqMjERgYCCioqK4dyo1NZXfmxwPLy8vxMfHM1MlYOmb0jSeAAAgAElEQVRbaW5uhrOzM2ekWlpa4OTkxPNG5CNkXEwmEwIDAzFu3DgWmKaNqNVqrSDKUvIlej36neaPNutjjz2GQ4cOAQAef/xxrFq1ihksCTpH10Nz6e3tzURD/60xfvx4TJgwAb6+vgAsDjJVWr766isOsEiPt6Sk5J7qOAAmEqARExODlJQUAEBQUBCvHWdnZxiNRk4ayGQyODo6skyJTGahtScHOTAwkCvNwcHBUKlUaGpqgp2dHW7dugUvLy82pgqFAllZWbC3t0dWVhaqqqrw1VdfwWg0Ii4uDmq1mgOsiRMnoqCgAD4+Piz+PXv2bBQWFsLFxQVjx46FTCZj6QfAov05f/58hIaGQqlUIjQ0lDOvnZ2d2Lp1K4YNG4bY2Fi88cYb8PLywty5c6FQKBAcHIwlS5ZAEATExMQgOTkZAPD8888jISEBt27dgkKhQHh4OObPn8/auNLMoo2NDVatWoXt27fj8ccfR2trK86cOYOUlBQ0NjaisLAQW7Zswc6dO7FlyxZeW4Q0oAQNABaFp0BdihIAfiXwIDtDlQ9/f/9/WYbjPzGioqKg1Wrxww8/4PXXX8cnn3wCjUaD5cuXw93dHS4uLggICGBHMCkpCUajEe+88w4EwdKLlpGRgdjYWMTFxWHlypVW63zgwIHsFIqiyGQaMpmF6ValUsHBwYF1fzUaDVxdXVFeXo6amhpkZ2dz77SPjw80Gg3KysrQ1tYGJycnvi6DwQBPT08EBgbynlSr1UhPT4ebmxsSExNRUVHBclXEMn3x4kXeA/Pnz8f169cREBCAK1euwNXVFdOnT2d2S+pTvn79OusTm81mzJ07F4cOHcLAgQPh6uqKkJAQbgdZuXIlSkpKsHjxYqxfvx6pqal44403mITu/PnzmDdvHi5cuMDV3S+//JLPR41Gg2+++QYHDhxAZmYmOjo6UFVVxXaDoOMUvEudsZ4IJylxTc8kLqF1pAmGngHrHyWB9FtHa2srOjo6oFarkZSUhIqKCoSHh2PBggXo7OyEo6MjS+7cvHkT2dnZOHjwIPR6Pfr06YPCwkL4+/uztJBer8ejjz7KNpGcaXJUeybFAbA/oFQqkZ6ejk8++QR9+/aFKIp44oknIAgCDh06ZEXIpNfrmbma7HlOTg6ThlEivLy8nLUhGxsbef7p7NPr9UzoRgSBtAb9/PwQFBQET09PnD17FtXV1XxmUGuWVqvF0qVLYTAYYG9vj6qqKjzzzDNob2/nKiY51dQbSrrjPVFpNMxmM9auXYvu7m7MmTOHfUKqoHV2dqK7u5t9DEq+UjWf5khaAaW+feoz1Gg0iIuLY5gwvb7Ut6O5oPkQBAEXLlxAc3Mzzpw5g7a2NqxatYpJof5XRnh4OCIjI2FnZwcnJyeo1WpUVlbi+++/R1ZWFt577z1UVVVxP/KsWbMAWPwoer7JZMJf/vIXFBYWWs3B34L60s9dXV0Mt3700UdhMBiQlpbGskYymQy7du2CKIpcsKHXysrKwqRJk6BSqTBhwgRuBSMI6vnz5/n9KZlNtofuIWmaGo0Wfe7evXtj4MCBSEpK4gDZxcUFGzdu5LmSyWRcUT969CjkcosyxY0bN2A2mzFixAgIgoBz585x8EiFgb/ljyoUCmzfvh2LFi3CrFmzGM4LgFGLXV1d3Kokk1nI0Pr06YOQkBBOdEpfn9YwIW3oc1Oyh36XIlmkPrZKpcLy5cuRnZ0NHx8fGAwGZGZmWhEskY3+b1VPgf+hAPXs2bOcCQeshegBMNabFj9V66TGWBqISh+TBpyU+aHePOoLbWpq4mw6GSIyMu7u7ggPD8fgwYOtgh7aHJRxk16f9DNIdc10Oh06OzthY2MDhUKB1tZWREdHAwAbcHJ8KKsZHh6Ozs5OpqmmgFYmkyE/P99q0QqChb1UWkEl9rJZs2ahf//+bNSl1QFpoGowGHi+r169CldXV3bMevXqxZnJPn368GcluOzgwYPh4+ODJ598ku+l9N7QBqHHamtrre6zjY0NBzT19fUIDg6Gp6cnZ4Ip0KX56UlPLw2uaVMSXOns2bN4+umnsWHDBqjVasydOxdvvPEG9Ho9cnNz0d3djfT0dLS3t0OhUOD27dv/Ew3j/4+9Lw+Pqszy/tWtqtSSpCpLpSr7QiAQEhYhbLIoCK4NboAoLkFFENDGxsfdz3bGftrWGbulexq7e9rWQcVdAWWR1cgSCDGs2ciekLVSSSqppFLb/f6oPifvLULPfDPO19Dd53nyZKu699a95z3vWX7nd4gB9IMPPsDnn38OnU4HnU6HG2+8EU8//bQCSSBJEp5//nmFA5iTk4Py8nLFMQmSDQTvfVJSEq+PtrY2HnlDjHcENSfI79GjRxmKbjKZMDg4iN7eXmaYJmeeeqLoemQ52NPhdDphsViQlpbG0LSxY8eirq6OiTbmzZuHjo4OREVFYXBwEIFAcK5aVVUVAoEAZs+ejYiICJw6dYohLZIUZMwmZ4T6WLVaLe699140NTUhPz8fzz77LFavXo2mpiakpaWhqKgIADBy5EgcPHgQ0dHR+Oijj3DhwgWsWLECAwMDik2Igoj33nuPA2uHw4EtW7YgPDwc77zzDsLDw1FRUcEsq36/n+c6U+ZXo9EgJiYGer0eJpOJ18ylEAKh+k0M27IsMzzzcpWFCxfi/PnzaGtrw4YNG7B7926kpKTg448/xquvvor9+/ejoKCAIbnHjh1DY2MjHnvsMQVSAAC2bNnCkDwgaIvHjx/PcGkgqPcAWKcp0UIEOBqNBhMnTmRIbVFREff8UfKtqqoKGRkZMBqNcLvd0Gq1KCgoQHx8PFJTU9nhpcH04eHhsFqtaGlp4eRLXl4ejEYjDh48yM9r+fLlOHjwIFJTU1FYWMi6TWO4Zs6cCZPJhLKyMlRUVAAIPv/169fjk08+QWtrK2Q5OA7p8ccfh0qlwnPPPYeOjg7ccMMN3K90+vRpnDhxAkVFRfjoo48QCARgt9tx/vx56HQ6vPvuuzzPk/Y+nU6H8PBwRcVEdBxJxD12uP/Rd/qixKhYgaFnR6/9a1ROh5Pz58+jvLwct912G0aNGgWdToeOjg6kpKRg27ZtsFgsqK6uViR0GxsbIUkSXnjhBcTFxSE7OxuVlZVctXz55Zdhs9nYwSehexP6N41GA5fLxePd/uM//oMJTQYGBhjuJ8syvvnmGwBgQjey5c3NzUj/86xLarkIDw/nERuUbKbnQX6K2I9JCXBqNXC73Th16hSio6NhsViQkZEBtVqN7u5ueDwevP766ygoKODP1N7eznwTdEyfz4fq6momfiS2fUKDhQoF8xs2bIBWq8WyZcs4qUdz2gcGBuByuXg2JAXcVNEV90hK7JLvI7aCjRgxQvEsKEkuJsTFiRBtbW3Yvn07GhsbUVVVhePHjw8bZF8OsmjRIrS3t2Pnzp04fvw4V1GXLVuGJUuWYOrUqUzgAwSTp1S5I9934sSJAIYSUWK/LqAcyUgQWb/fz205xcXFmDNnDuucWq1GQkICFy4A8Izmd999lwOuyspKLtZIkoQLFy6w/026Rj2kYiKEeDVcLhdaWlrQ3NzMFX6y4R0dHaivr8dzzz2HV155Bc3Nzby2iLCQ/PybbroJTU1N7HsHAsFxeKGjKUn8/uAc7B07diA/Px/R0dE8Xqunp0fhc1P7TlpaGvtRdF66L6JN1Wq1SEpKwvjx4xUVYPqiQF30uVWqIIR/9erVsNlsqKmpQVFREbZv345x48Yxag746wanwGUUoLa1tXF2UpaDFPyBQAAOh4MNjJiVFX8HoDAyFOCIwanYj6rT6diomUwm9Pf3Izk5GSdPnuSsGikFQXNyc3ORnJwMrVaL8+fPw+Fw8MMWlWK4DKAsB6FYlAE6cOAA+vr64PP5cOzYMR68XlpayjO8gCESm5qaGqjVaoZiJiUlsQLV1NQo5hepVMGxHDExMXx+CioAYMyYMYp7Qwabsrr0erqf4eHhMBgMkKTgqAXaIKmaNnPmTOj1eq52ExGBaPjF6i59J6NPGVwx0KTn1N3dzdA7m80GAAwv6+vrYwITEdJAx6JnIUKxyRBUVlbi4YcfxptvvslZwY8//hifffYZzpw5g23btuH999/nTf+vLXv37sULL7zArKhA0OB0d3fjk08+YablQCCAmpoaPPnkk1ydo43GYDBwpREI6kliYiIKCwt5zEtERAQ7CtXV1QwpBYJwcEqq0PMhUpHo6GhMnDiRf09MTGQDTBl5mj1GekfU/xaLBW1tbQxdz8nJwb59+5joZtq0aRgYGMDIkSNx8uRJ2O129PT0oLi4GJIkYcqUKfD5fNixYwev9VmzZiEhIYEz+qNGjcJjjz0Go9GIJ598kiH0Xq8XTz/9NAoKClBQUMB9vXfccQdXmfbu3YujR4/immuuwQcffICvv/4anZ2dOHz4MCcK3nnnHezZswdbt27lYw8MDMDr9XKvo9FohMFg4CH1w2VahwtK6f/ixhdacVKr1f/rZDI/lKxatQrx8fE4ffo02tvbUVBQwARgNpsNBw4cwL59++Dz+dDX14eTJ0+ipaXlogRLW1sbjh07xhsoBYnx8fE4d+4cgCBMNi4uDnq9nhmsqR/N5XJxooCq1Wq1elh0wNGjR5GZmQmr1YqGhgZERUWhuLgYDoeDky/07MrKytDd3Y3Y2Fg88MADjGiYOnUq9Ho9Dhw4ACD43FauXIk9e/YodLuvrw/Hjh2DShUk8iBikpKSEgDgHmq3241JkyZBkiR88sknWLFiBUwmE9asWYPe3l6cPn0aJSUlcLvdGD16NLKzs7nPknoLqUpIbJi0/kOdPhIx6BSrgGIASjaX9FO0/WK1kOwzvQbAZZlcOX78OMLDw/G73/0OfX19qKqqQn9/PyZOnIirrroKDzzwAMaMGcPzuAsLC/HOO+9g//792LdvH2bMmAGr1Qqz2Yzu7m5s2LCB+y6BoaSyuH+La91sNsPn8yEmJgY6nQ6PPvoo8zrk5+fznltfX6+A8wFgYpXq6mpYLBZIkoSjR4/illtuUfhDYtsTJRxpBiP9j9bZ/fffD1mWUV1dDYfDAbvdjoULF6K5uRmxsbHsVFdVVTEBFxCEu1MVjhzgzz//XOF4015NkEkSUb+8Xi8efvhh6PV6rFmzBtdddx0jq4g1OiwsDFarlZOUoi6KeqjVajnI1+l00Ov1iIqKQkxMDDPNipUn0mVKNIri9/vxzTff4ODBgygtLf1BdfCHFuq7bGxsRF5eHo+2GzNmDPsNfX19XJSg9jDau++55x7Y7Xa+r+LoEdJl8tm1Wi1aWlqYeXrjxo08AlGlUuGPfwwO/xCJvtRqNRoaGrBixQpoNBosW7YMKpUKx48fx7Rp0wCA2/FEMi+fz6dIZMqyzIGjWq3GjTfeiLFjx+LIkSOYP38+qqqq8OCDD/KcUUIjbty4EY2NjQrfkj6X1+uF0+nkvlxqCRLvQ2iQSvq7detWNDc386xVkSuBJlcAwZm2breb+VFosgQF6lQlJf8DCO51NK+YUJNiUiEUgUrn/fbbbyHLMh544AHMmjULDz74IMdglwMC4LIJUA8fPsz9kh6PB3FxcQCg6DUAhhwysX+CgpvQXhdRWemBxMTEMHkEZT2JJl2c30jHi42NRW5uLqKjozFixAhmxaytrcWxY8d4YxGDZXFTp0ykLMsc+HZ3dzN73IgRIzhYTk9PhyzLzCCpVqvR3NyMnJwcNt5OpxOpqal8fGI9EzOElFkixaSgQ8zEiNcmCv2NjkfV0+7ubhw4cAC5ubn4/PPPFcE5AO5poeoDLQyxukkGizbFPXv2KIJr2gx27drFfSYVFRXYs2cP94+VlpYq2CyJwVnc2MnBos9AzzS0f+TcuXO4//77UVVVBbfbjYqKChQXF3Pz+3DZsL+WLFiwALNnz2aHmJ5vd3c3jxbp7OyEw+HA4cOHGYJGMnHiRGbNA4KbNdH/U8A0YcIEvs9RUVHo6emBTqdjhyE8PByxsbG8Tuk4Op2ORygVFBQoKrMqlQoXLlxAREQEGz6VSsXZTIfDAavVygyo4eHhPN6BzjF9+nRs27YNo0aNwsmTJ3H11VcjIiKCyZ1uvvlmqNVq7Ny5k/UgOzv7Iic+Pz8fBoMB69atw7x587jX+sSJE3A4HHjrrbfgdrvx9ddfw263o7y8nBk1//SnPyEyMhJdXV346quv0N3dDZfLxTYIGBozZTQaYTKZONNLGymghOiIG6uYXBHbDADlSC3adEiH58+ff9lUnf6rcueddyIQCODgwYOYPn06Ro0ahfT0dIwdOxbr16/HL3/5S/T09HBG+cKFC5BlWQFTDwQCmDBhAsrKytjmA0BaWhrbKyAIbSN7EBsbi4aGBq7Q+/1+xMTEsDNLX4QOkGUZKSkprN8xMTEwm83srLe3tzPZDHBx8mXPnj1oa2tjYqipU6fCaDRyMOHxeLB69WrW7fLyckybNg0WiwW7du2CSqXC2LFjMXr0aPT09ODgwYMAhoJbus7o6Gh8+eWXWLBgAUwmE2bOnIm8vDzk5uayo0Hz9SjpaDabERUVpQiURHtB5wlNjJBQEhAYSg7T38X3UwKUXkfBrbhvzps3768yBum/KoWFhZgyZQqMRiPPi46Li8OsWbMQFhaG7777DiUlJdizZw+Ki4t5VBHB+SQp2PtGbRIPPPAAGhsbuQ+NfByVSsXoJlrfot2gxLUkSThz5gxf365du6DVanHPPfco/J+CggIkJSWhqakJN998M4Bgiwgdm3qSKalAxFd0XWLQTJUYIrq55ZZbsGTJEthsNgQCAfzpT39CWFgYr5ekpCQel+f3B9ncKQi999572X6qVME5ri6XC42NjQpCFzGZIepLIBDA/fffD6PRiIkTJ2Lu3Lk859zlciE3N5fRVgD4eGJgSkgGCgpUKhUnv+Pj42GxWJCQkKBAllFSnfSdfCWy77IcnGNPRJSXu9x6663Q6/WIi4vDXXfdxRwdqampSExMVBAEieJ2u3luqLhHUfBE9510OTU1FT6fj9sRFi9eDACK5EBycjLvcWFhYdi9ezc+/PBDJibq6elREF19+eWXCgg3wbxFP1SsqFIC02azcbKQ5mK//vrr6O7uRkpKCtxuN+x2O7N5d3d3MxdCIBDgES/EZlxdXc3noKq8KKEJvmeeeQY+n49nRQPgKRi9vb04e/Yso6rIT6biEtkSImbyer3cMy3uZ4Ay7qAv0a6QP3z+/HnU1dXhu+++w5QpU/D+++/jueeeu2x838smQJUkibMdYsAiYvnpphFhC8GYyIiQIyf2n4qVOSCYyaNhxefOnUNBQQE6OzuZ8ICCKtEoiseKiorCc889h2uvvRaRkZEMDaTPQEIKIFZ6qYGbKqlvv/02jhw5wmRFRHIgwobPnj3L/WUajQbR0dFcUSWIr1j5BILVhZaWFs4QkuEGhhyL0Ky3+J0UnyoCkZGRPJYl9BgEC6Mso0ajwfvvv4/+/n5mhKVNYePGjUxKpFKp8MUXX3DwQ8+2p6eHWY3PnDnDmcpDhw4xm+y6detgtVoBBPu/3G73sFCE4apS4mKlsRyvvvoq8vPz8fjjj6OoqAiPPfYYTp48+VeHN4TK5s2b8cknnzDMQ5Ik2O12GAwGmM1mRQVk7Nix6Orq4vf6fMHhzna7XaGnKSkpOH78OOtpenq6okpKRrK5uZn/TpWn1NRUHDp0CPHx8QqGubq6OiYeIAj+mTNnkJiYyCOYZDnYP97W1obGxkasWLGCx65cffXV0Ol0OH/+PGfU161bh61bt8JgMGD37t0YP348Nm/ejM7OTvh8Pu5V2bdvH6/fVatW4fPPP2cnft++fbj77rvhcrkQHR3NrLiSJHGPTlVVFVeZyIEhmJPX6+XEkslkQkREBKKjoxVOi6hrIiRM3MhprdDvZKPEahPdf2AI3ia+X61WX3GBqSjLli3D119/DZ/Px/1IVPH5+c9/jt///vfsnNCcY6pGkv76/X6MHj0axcXFij3jqquuYnI7n8+HqVOn8r2NioriKovT6eQEnsVi4awzoQMI7hsWFob29nbU1NRg1KhRfE20Lmw2GydfAHDyxWAwwGq1oqOjgxl/p0+fzr9TQEK6PWrUKOzevRtjxoxhZ4Fm244fPx5er5fJ+rRaLWbOnImOjg709/dDo9Fgz549UKvVcLlcrKsRERGIiYlBVFQUzGazooUm1DaSHRa/QvdBEvF9YtKTfr/U/0P/dyXpcG9vL6655hrceuutaGhowJkzZ1BbW4tz587B4XBgYGAAycnJyM7ORkREBKNPOjs7eW+VJAlWq5WJjMQvWtdkN0gX9Xo9z9uU5WDbDQV/KpWKg12j0QhgyA+pra3lnlSCDtKcSFmWUVBQwD4WOcG0P4vIM6rcEHT86NGjChZsr9fL40xWrlzJVaa4uDhOrOfk5DDrc0xMDLNP07WMGDECb731FiZOnAiVSjk7dLjECQDcfffd+Pzzz3H99dez/zZ58uSL5tuLFVQKZmisiVjYoKQ0scSnpaXBYrEgJiaG30OVQbIVwJBf5/F4eLzMlSIU8FCfoyzLTB40XOKJWO8lSUJDQwMfJ7TQQccm31yr1eLjjz/G2rVrOVGwadMmSJKEnJwcXhtAsKVp4cKFPJ8UALZv3869x6STsiwzAZ5KpeLJDKFwWJVKhUmTJuGbb77BuXPnkJOTA51Oh9mzZ8Pr9WLhwoVYsWIFj0Iym82su6RL5Kfed999TA4qIkD6+voUBTISWtdk9yRJ4lmj+fn56Ojo4ORhZ2cnJ0iI4E8syBE6gXSQ5tZT8cBsNiMxMZF9JvEaxHtB/goQHGt39uxZlJWVoaqqCuHh4bj33nsvi+opcBkFqOSAiY6Yw+FQ9HuKjhq9hkR0EEkRQgMUUpYLFy5gwoQJMBgMuOmmm9hIpf956DaxsInvFY+rUql44LzNZmMHib5IIcXAWNykyRiSETh48CBvNFQRBoIOblZWFg8Fl+UgURDBJbVaLfeaihsaVSQIikLwNbVarYCt0AKQJIl7P+h3yrYYjUY2Inq9Hn6/n5m+VKpgX0tDQwMbd9rcZFnmvkZJkhiaJMtDjMJUJQ+tflL1g+at0b2orKyEzWZDbGwsMjMz2fGk+0mwcFFCe9dEEc9JlYZ33nkHb7zxBtauXYsnn3zyL+rsX0M2btyI1atXc8bS5/Ph3Llz+PTTTxWMpyaTCZWVlYpstE6nQ2Vl5UWO5pgxY/Ddd99BpQqyM4pU8vX19dDr9TzKICIiAhEREUhKSkJlZSUMBgOOHDnCSQyCB3s8HowePZorvUAQwm42mxXQQGIK/uabb7By5UpUVFQgEAggNzcXDocDe/bsAQDeSJxOJyIjI3H69GmsWrUKW7duRWNjIwKBAPLy8uDz+fh3tVqNJ554Ah0dHTCbzXC5XNi+fTsSExORm5uLuLg43pBp9hll+QnaHhkZyZlWSoyFOvViJlnM9Is9KfSa0GQJfRcTRiQiZE9cI7L81yeS+SFkzpw5PFeX4L5077u7u7Fq1SpFVlqSJNx3330AhqqVlDgT17nf78eMGTNQWFjI64GSN2RfKSHq8XiYII3QAXQsGnXkdDqh1+tht9vR2tqKpKQkTjLIsozTp09z8oV0u7e3F6WlpbDb7bDZbOjs7ORkYk5ODux2O8rKyjiJs2bNGmzfvh2RkZHYtWsXrFYrbrvtNrz11lvQaDSwWq0YOXIkiouLsXPnTmzdupXnHg8MDKC/v5/HP0VGRnK/eGhAKDr7oq0UnXjaH0gf6Uus9Ik9TaLQ/hGKnBHPQ8e6nKumf0moxaW2thafffYZoqKi4PV6ceONNyIiIoL3XuI1IEQUMHSfN2zYAIfDoUgS05c4H5GeQ0JCAjuyJ06c4CoUObJ6vZ73VyDIUxEREYHjx48zCV55eTmTQ5IzTVUUAJxgJjZfAApfJCIigmdjAkF4IPXnE4JFkoIMrYsWLcKCBQvQ09ODefPmoa+vj+3td999h7feeovPqdVqkZ+fj76+PtTV1cFqtUKj0aCrq4v9LRE1AgzZ06+++goFBQX47W9/yyNKRHZ1Egpi6BmIgSkhKojpV6/XQ61WIzIyEqNGjUJSUhKysrI4oCbUF+0HKpWKeSseeeSRH1bZ/j/I0aNHmT1eo9HwDGMSshlbtmzBpk2bMGXKFK4uhkpowoX0l2zvt99+CyDouxKJ3eTJk/nZXLhwAe+88w4KCwvZd6QiBwVmH3zwAQAodJgCKvI1yD65XC5ERERwEExonAMHDkCv1zOqZOfOnbj22mtx7tw5zJ8/H7GxscyMnpmZyQUeSZLw7LPPKu7NypUrsWnTJu7Npbm+ooTGI+vXrwcAvPzyyxgcHERtbS08Hg8mTpzIiX76jLTPEHSX5nhTyxoFscRjkZKSApPJpCj20TWQ/tL1DQ4Owm634+DBg2hoaMAvfvELBWT7ry2XTYDa2toKvz84141GMYizUEXjQlU+CvDI2IRWHkTacir9m81mtLW1weVyITU1FRaLBTU1NTh58iQGBwcVme2+vj5mCQ49B13n0qVLMXLkSFRWVirK7RQMDhdQ0/+pp7O2thaffvopV5AJckPOj9Fo5F7ciooKJCcnM4TWarVe1PdD7ItkACRJ4qCDAkm1Ws3ZPnIqSMQqTnx8vCKI1mg0OHz4MACgv78fzzzzDBsPMiZklMrKynhR7Nq1i6utdC4RiivCfx0OB4BgMCa+lhY+McO1t7cjKyuLNyS9Xs+DpUnE8w3nTInPhhwzYsWlQPpyE6Ikp3uhUqm4MkmG68svv8S4ceN4jiutialTp+LIkSOK522xWBAREcHvTUxMZAMZFRWF2tpaDiKAIeMfGRnJx1GrgzN5IyMjufKv1WoxevRoTiwFAkHSBEqwkLE+e/YsIiIiUFpais7OTpSWliIyMhLjxo3jEUcqVbBndqt0eFoAACAASURBVPr06UhLS8OFCxdw7NgxPPzwwygsLIRGo+Eg8/Tp09i1axe2bt2Kr7/+GrIchL3T4Hbq3yCDHh0dDZvNxvonJkaGk9AgU9SrUCcoFBECKGf40toTHX8xOxtaxZLly4dI5oeQ7du3Y/PmzTh16hQqKyv53jz55JP49ttvFU53TU0NNm7cqGitAIJtDjSvFhhiOExLS0NxcTGA4LglguipVCo4nU4FgQfBXaOiohTV1tbWVthsNq481tfXIyUlhceMUGa9tLQUSUlJTD5Dz7O9vR3t7e2wWCzo7OxEWVkZD5Hv6upi3Q4EAlizZg0aGhoQExODbdu2oby8HFOnTsXu3buxY8cOVFVVobe3VzGwnZIoBO0S17UYdJKEVk3Fv9O9ECsnlASjNU+fTUTRDHdcMQHa0dHB/Zt33nknnn76aVx//fX/bZ25HKS1tRVZWVmIiopi5uzk5GTk5ORg1qxZSEtLQ3JyMvR6vaI6BASfxyuvvIL6+npF4DVcAovsAyVqtmzZgpSUFH4eNMrq/vvvZ99HpVKhvr4eCxcuRFhYGLN6k2+j0WjwzjvvsI0iu+/xePhnYIgEp6urCyqViknGaHxcT08PUlNT4XQ6ObCmyifxa/z4xz/mEYJE9vXJJ5/AZrNxdRIAz96mHlUgGBz39fUpEiXifaGv3//+9+jr68Obb74JSZKY4JH2nb/khwFgh52I00REDI0Ds1gsSElJgc1m4/eRH0JItSeeeOKHVLH/7yKiAUMT+2QXCBo7bdo0jBw5UlFpC92rRCGfpbq6Gk6nE++99x4nJsRKa11dHUaOHAm73Y7Zs2dDkiRs27YNN998MyRJQlVVFVcRW1tbGalFeivurbRm1Go1mpqacPvtt+P222+HRqNBd3c3Zs+ezYSLpLtkz7OysrBy5UosWrQIFRUVXEVdsWIFj1yikZiJiYlwuVzo6uri5AoFvuTriwlq8mUfeeQRBAIBWCwWGAwGDtSHq/4DQ33aIsKRqqMGgwEGg4HnpUZHRyvYfEUEB8UOYrXX7/fj4MGDzIdzuchlE6AS6Y5YpQSUFVWVSsXQmO7u7osUUoRdUMWIFoxer0dsbCyuv/56pKSkoLOzE5IkoaWlBTExMYiOjkZMTAwsFgtcLhdiYmI4+xLqgMqyzP1KSUlJuO6666DRaFBTU8PXK0KOQwMjAJzdjoyM5Iwc0buLC5zGdND31NRUnscUFhbGjGTi9VEGhRTa7/cr5mPSIunr62PniBaSCINRqYIjawKBACZOnMhkSTT/jmixCwoK+HNrtVomPiLnUpKCxE7k/NN1UmApVn/pmfv9fvzpT39SbCyULQ4LC8PVV1/NBFHR0dHsNBEZDWW8Qu/9pX6nhSwakctZdDodfD4fB4m//e1vuV8YCMJkxDmFBF2npnraWGj95OTkMCtdcnIyV6MlKTjCRK/XM3szrVUaJi/LwQHYERERCAsL4wzg6dOneX4nJV7q6uqYiAwYWtctLS1oaWnBggULUFhYyAHCuHHjmEkYCFbdd+/eDZ1Oh87OThw7dgxmsxk7duxAeXk5JElCZ2cnw75FQoGYmBgenyOe+y+tU1FCURR0jNBNebhj/aVj0zOiNUAbiRjA0vv/FiqnofLVV19h1qxZ+OqrrwAM3Y+GhgZmkXW73WhoaMA333yj6H+k10dGRqKmpkYRCKSkpHB/fyAQ4PnH5NAQLIt0neyTJElITU1FXV0dWltbkZqaiqioKISFhcHn8+HAgQNIS0tj51ocd0G2iKSzsxMdHR1obGzE/PnzOWFEa9PpdPK+5/P5MGvWLHR1dcFoNKKrqwt2u50hxQMDA7BarbBYLLx+gKG9hCRUF8XqHNm24aqa4vvFJGVo4hfARQ5/qH53dHSgtLQU1dXV6O7uxk9+8hP8/ve/x4gRIxSO8JUuEydORHR0NMLCwpCYmIjs7GzuVxNJUADlyJ3Kykq2h8OJ+AzoXlNrhWg7iWxRlKamJpSXl6OwsBAejwcejwc6nQ4ZGRm8r4ttOaNGjeLzidUV8drDw8PR39+PWbNmIf3PbSA0wsvpdHJbhAjp7OzsREZGBs6ePYtAIMDjaFSqIJNreXk5uru7OYkntl4YDAbmmbgUwkS8rx9++CECgQCPzwnVb2Ao6BIT8qLtF5148jOioqJgNBoRGRmJ9PR0REdHIzk5mZPYdH5x9N+VLJcKUvV6PaKjoxUzqCkIEgN+2rPEhBbZHPI9KBHt8/n4b7IsM8KL5pxS8Nne3s6FktOnT7OtI5+WglBKlIgETXSdNHPdZDKhp6cHFouFn59Wq8Xtt9+uQBZ8/fXXSElJgcPhgE6nY7ZorVaL9PR0xMfHc5tZIBBAR0cHvF4vzGYz23NKJNLnE+0y7Rmtra146aWXmAlbHBlJ7xP1luaji8zalFChMU1hYWFISEhAXFwcYmJiFH3t4pQAj8ejgP56vV6MHDnyB9CiH04umwA1Li6OsxeyLLPykaKHiriBAuCFIjp59HBFeMenn36K9PR0nDt3DjqdDr29vYiNjYXT6URvby8zbTY3NyuCPDoOSWNjI+rr69HQ0ICRI0fi+uuvh8vlYrZFukbxM4gGlhYZOVc9PT3MNika1PLycjQ3N3P18fjx44iJiYFKpeIghIwGff6GhgZ20ClwI6y60+lUBIR0jygDJX4BYIU2m81obm6Gz+eDw+FQ9GGQkRErQLIs46GHHuLPQvPfCOYjfkZxEyYiH0mSUF5eroAaiRleSZLQ2NiInTt3Mm6fgjVJkmA0GtHf3889rMNl9kKfyXCZv8tV7rvvPqbtl+XgmAEaz6FSBeHxn3zyCSZOnMhz4YCgvk2fPh0nTpxgh4mMXENDA9rb27Fjxw4EAgEO8vV6PTo7O7lvj56ZqKt+vx/l5eWwWq0MNRwcHMS5c+dgsVhYLyjQSE5OVtxv0qnPP/8c69at45lcNHt3//79OHDgAPbu3Yv4+Hi43W44nU5cuHCBjbbBYGCijqioKERHR8NoNF40IkBcxyJBkSjD/R4alIZWPEhoQxad+NAqqvjeUPgPrQeyGz6f77InkvmfyIIFC/DRRx/x7FBycgcGBhAfH4/m5mb09PQgMjISra2tyMvLU9hZABg9erSCpRoI3uvJkyfj22+/5eReZGSkgqrf6/VCr9ezI6JSqRAfH89jWCQpyHyanJwMk8nEwQcx+xqNRkUVVa/XIzk5WYEOobnAX3zxBdauXYs//vGPCAQCuOaaa5CRkcG6vX//fpSVlfFMSEqukGMVFRWl2NcAKKDGoYkW0eaJ+1doQCkGK/Q++i5yMlBwEXqu2tpaRUBGIxpef/11bNq0CZs2beIe3b+l4JREkiRmFSebSkkUsRoty8Geuffffx8TJ07k0TWh+w/dV7ESD4DhecQHQO002dnZfG6xCnX+/HlkZ2cDAA4cOIC5c+dCrVbjo48+4mfqcrlYLwwGA7OQk+2hcWNz585FQ0MDXC4XMjIycPr0aaT/eXwNJSmpJYaChb6+PrhcLtTU1LAfQXOuKfDzer2IiYmBLMvYvHkzjh8/jra2NmZxp2sZzt6KvlRBQQG+/vprTJ48GfPnz2cfJNR/EAP0UF2mfY38JYL6EoNqREQEkpOTkZqairy8PFgsFixfvhzZ2dncevC3IEVFRbxOxdawm2++GatXr8by5ctZr+l/wFAyjH6m50b639/fD7fbjdOnT3PlUSRV2rdvH0aOHImPPvqIK4SSFGTqJ3+kt7eXX09EWBSgAso15PV6ERYWhvnz52POnDmoqqqCJEn45ptveOY5VVfT0tLQ0tICSZIYNen1evH8889DrVbDYrEwgu/kyZOorKzk8XiyLPOca6fTyczowyWvxd9lWcazzz4Ln8/H1X9ChooiTqkgKC+td7rPRN6m1+u5mJCYmAibzaZotaJ+aWrdGxwchMFggNfr5dm3l5NcNgEqVe1oExYNMxlACkxcLtdFVQdgqC8JuDjYoIf7wAMPwO/3Iy0tDUePHsX48eO5wb22tpYhM7Nnz+bASxQyWGVlZRgYGOANh2adkqMS2t8j9gINZ2hNJhPDWT766CNIUrB5nUYGUPA1evRodHR0QJKGerVCmRM7OztRW1vLzp7X62XGPKpAA0Mzjujei1AYul+tra1M6ERzWGlRfPDBB9wnK8syuru7sWXLFtjtdhiNRmRmZrKDTZ+XAmU6h0oVJMURM6VerxefffYZtFotUlNT+RgU5PT29vLPHo8He/bsgcPhYHheS0sLV80lSYLT6Rw2CKWfSZcuBQW+XCX/z6y01KdQV1fHlRki0YiKikJpaakiKJOk4CBqGlkjScH5dj6fD+Xl5QgLC4PdbkdlZSVXPsn5DA8Ph9Pp5GpSVlYWBxRut5urrbGxsXzv4+LikJaWxqQ3RJtOTKlJSUlISEjgMVP79u2D0+nE3r178c033zD8hioBkZGRMBgMTH4UFRUFk8mkcDoAJeV9aM+c+BqqQA1XORB/FollxPOIxw69BlFCg1yS0ABZRI0AVxaRzH9XTCYTnE6nYvab3++H3W7Hzp07GYaekpKCuro6HjtFolKpMGXKlGHn/SYnJ6O4uBgqlUpBxU9ZbiITInihLMs8C5CexcmTJxETE8Psjl6vF8XFxcjIyODWDAAMT6c+VdKDM2fOwGAw4KuvvsLo0aNx6NAhHDx4kMeWuFwubsWgYJQSUGJP3XBJNtE5G+7/of8j/QytIolJTmAI1keVMWobGRgYwPfff4/vv/8eJSUlTApEiaBXX30VP/vZz7iPjY5HMzf/VoUce3IcqW8fGBqddvz4cUZeTZs2jVn8Q+0BPR/R0ff7/TCZTPB6vfj1r3+NwsJCeL1e3HDDDfxejUaDyspKTJkyBRqNBjNnzmQnnPY56n0FwD4HQRLp/KRXtBbj4uLQ2trKkPmzZ89y8CtJEoqKimAwGLB9+3a+jry8PKjVaiZECg8P5wBQDA6JkIbWn8PhQFdXF0pLS3lUzaX64kRn/+OPP8b333+PadOmYdy4cVwhIoI72gfEdgpAmXAcNWoU5s6di/w/z8p+5plnsHjxYlitVoZS3nfffcjMzMSSJUvgdDpx4403/jAKdJmJmEwi6DMlwB988EH2MYgJeTj7A4ArmuTTUrvdXXfdpUicNTQ0ID09HYODg3j44YcBBIkhSc9aW1sViQdCBGg0Gg4qgaCPbjAYuJ2nt7dXUdmMiopCZGQkM8CfOHECYWFhqKysBBAc83LXXXcpkkyEHqQ2ECKl/PGPfwwgWFihubLUdiXCzUVkjygajQYPPvggvF4vxo4dq4Dpi60VIrERHZvuBf1fhKdrtVruR9XpdJwgpMA/LCyMk5+EMroc5bIJUMl40EOgfjFx46RsnsgyBighG/SdFgEAdojNZjMcDgcyMjJQV1eH4uJinD9/HgaDAf39/bj22muxfv16hvmKcx9Fx5KCqszMTGRlZWHRokVoaWlBamoqRo8ezRXY0EqLGBCFOsEUpBID46effgogqMBjxozhkRhlZWVISEgAEAwwqZmf7pNWq0VMTAzGjRvHWVKNRsNVA7pfIjyaHDYxeKSMOlXKKJNO9zQQCKC+vh5arRZfffUVk4icOHGCYcrx8fEXOfNhYWGKDUqtVuOGG27gTZh6Fo8cOQIAHNwAwU2qs7MTmzZtQiAQgNVq5c9dXFyMbdu2cTB18uRJvn6C/IY6AvQ5REgzPacrCUpJG7DH40F8fDwCgSD7rNfrxXvvvYdRo0bh0KFDAIaqhwkJCejv70dBQQH27t2L2tpayHIQudDR0YGRI0ciNzcXDQ0N/MyJrp3gXAaDAX6/n6tSKpUKhw4dQmpqKgKBAFfLjxw5ArPZjHHjxiEzMxNjx47l5MLIkSO5h4eqAgT9MZlMTPhiMpk4Y3qpyiMwvAM/XPWShAx96HtDg0kxcRPq7Ic6O6EBJklociQ0WKZkUiAQHKNw7ty5v9mq6XDy6aefIi0tje8hEXGYTCaMGzeO1/qxY8eQmJjIxB4iDLWnp4cTUvS/zMxM3oTVajV6e3sZHRAWFoampiYmzAgEhojhaHYkENxPLBYLjEYjM/729/ejqqoKmZmZDPmTpOAIEIJZpaSkYOzYsTwOjJAtpOM6nY6r/SJUd7gWg1D9IWc7VDcBZWAamvyg94TuQ+K+YLfbcebMGVRWVsLhcMBsNuPZZ5/FunXrsGHDBn4eubm5Cqb2NWvW8M8iQuNvsXJ6KQl17IGhShSNx6KAKyEhQcEADSiDrlBb4na7eQTFmTNnEBERwUGDShUkWrRYLDhy5AgnCCorK2EymRSQQDpHamoqBxC0H9O5aT+gMUoTJkxgPgJJknD11VejrKwM+/btg8PhQCAQQEJCArxeL3bu3IlRo0YxQyxxU5AvIsqdd97JMGHSS41Gg9jYWLz//vvs34jTHEITkeQ//va3v0VTUxN+9KMfYdy4cfD7g6NzqMomjuGhz0Hnk+Vgv/Tg4CDD6KOiojBhwgSsXr0amzZtwsaNG3HTTTfhV7/6FV588UX8y7/8yw+iM5eriNVUQDnB4Y477oDT6eQkgrhnhv5OwRSNBgzdGxsaGqDX67Fjxw4MDAywztAIMK1Wi3379vHrIyIiOHlOhKOir0l2++qrr8bu3buxadMmxMfHo6KiAunp6ZyYWb9+PfR6PbRaLdvgEydOYOzYsRgcHOQJG5RkoeAxEAgo1h4hIR0OB1QqFY+LCe2jJgn1Ix599FHcdtttyMjIQCAQYD+Akn5i9R8YSoqL+x8dl1qazGYz71eTJk3CmDFjEB4eDrfbzXPAyb//yU9+8sMozA8sl1WASg4CBabUm0lftIGLm7eYcSAjSxkPMkaUuaPZhK2trYiJiUFvby/0ej0GBwcZlkjHoffQOSiz39jYiMHBQcTGxjIJTHh4OFwuF7Kzs5GamorJkycjISGBsxWihFZSRVGpVExN73K5mOmOKqgqlQptbW38Whq7QGxqtDhpgDYtKoKIaTQantuoUqm4WkCfOXQ+HTkrlEmlY+l0OhgMBixZsoQD10AggOLiYsTGxvKCpeOQwxIWFoYFCxawIxQIBPDiiy8qFpr4HpUqSDdPVXWdToeBgQGGC9E8WGr4pmx/XFwc4uPj8cwzz0Cn0zEsIiwsjCvJYqUrdMDycFnAy1WIzVGSgkzMbW1t7GBT79rx48cZLkhGdu/evWwEaWMmp5kIg2bMmIGVK1eiq6sLshzsEenr6+NMqizLMBgMsNlsMBqNiI2NZSbdMWPGYOTIkZg9ezamTp3KGwiRSlAVwWg0wmg0MgMw9bFeqgIpZiKHc+JDDXhoRVOsgNL/Q89FTpLYCxK6+dLfyN6Ebjh0LrJNZNvIYaqsrERlZSWamppQX1+P9PR0mM1mvPTSS3jqqafQ2NiIdevW/XdU4oqW6667Dunp6Zzt9Xg8+NGPfnQRMcSXX34Jk8nEM0Xpvs+YMQOnT59GdXU1H5MSTvv378fu3bv57+Twms1m1NTUcK8RANbF2NhY3kcOHz6MhIQEBULA4XCgoaEBqampsFqtGDNmDHJycrg1hJJ+ZH+ISIwgjGT3SI9ENNBfEjGZFvozMKTD9PlJ90Sdpuv//vvv+TV9fX04f/48PB4PlixZgjfeeANPP/00Fi9ezPsJAKxduxarVq3iirJGo0F+fr5irdE5CKnx9yShMEkg+EzuvPNOPPjgg1iyZAmmTZsGnU7H0F0S8RmJQn+n+cs6nQ7Z2dmKNpmioiLceuutaGxsRGZmJmRZZoIYAJz4BsAkieTkEzkh2TraR5KTk1FYWIjt27dj6dKl8Hq9WL58OY+T2bVrF+Lj41nPyU8YP348XnvtNdZxrVaL+fPn8x6kVqt5NM4rr7zC10J6+8Ybb6C3txe5ubkALh6RJwr9zePx4J/+6Z/Q3d2NpUuXIi4ujvt2qVIc6kuK/mNycjKPDCLkHqGFaL8UyTd/85vf/A815cqQ4ZIuAPDSSy8x+kW0N/RFf6P9PxAIMArgD3/4A7/+iy++wLJly6DT6TB16lQAwLZt25i7hKDoYtGH3ktEqnQelUrFfl5WVha3AEmShO+//57XwhtvvIHs7Gw89thjaGxs5ECXdPuf//mfFRM3yM6JFU6LxYJf/OIX7EeSL/naa6/hZz/7GfutJGJMIYrX64XD4cDixYt5rBG1mFC7B92/4fQfCCZbzWYzo+rIz6LZ8gkJCRg/fjxGjBjBhay0tDRs2LDhv6sW/+ty2QSoPp+PG82JAAlQ9m8S5EmsqgLKQeFivykJBad33HEHLBYL4uLicOONNyIzMxNpaWk4c+YM4uLiUFtbi4GBAURERLBS+f1+nDlzBseOHcPRo0c5Ax56TiLXoJEE1GMB/OeVE/FnqnjSscPDw2G32zF16lSoVMH+KHJ+CEIrOjsU2FOPLS0kYmAVmWljY2P5vJSlEa+FiBEaGhqYSp++aEHSgqGMUnR0NOrq6lBWVsZwU/GYVHWj6+3v7+fPTH/zeDyIiYnhDYQCicHBQXR3d8PpdMLvD85ApJlPZITOnj0LSZLwm9/8hhva09LS+HnpdDrOqALgai4F4aQvV5JQTwglURITEzl7TvAXt9uN/fv3Y+vWrdi7dy87w16vFw0NDUhMTITdbkdMTAwMBgMyMjIQHR0NWZaRnp7Ofc6UBNJqtbwx9Pf3Izk5mUmTqIpNM3/1ej0iIyMRGRnJjoJ4j0PhJaGBJ/0uVjtFEQNO+plIPMQ1JvbHAEqiF3LeRX0OrUyFBsiUte/p6eHkVW9vL0Psi4uLUVxczAEABeN6vR7h4eGYN28efv3rX+PNN9/EHXfcgYceegg+n49hdH+vcuONNyIyMlLBkNnU1ISMjAxOiFFrAY0NIJFlGTk5OdxPJMvBloFt27YpzuHxeBAeHs72hQJRo9EIu93OOkTrgbLhNPc3JSUF48aNw6xZszBy5Eio1WrExsbC7/dzD5DFYmEnY7gEBzBU/SG9Cu0/Cv2bWP2k94euJwCK/REY6sErLy9nONqJEyfQ2NiIqVOnIiwsjElPnnjiCbzwwguYM2cOBgcHFb294n5BGXi3240VK1bw+ciWFxUV/V0Gp6KEks7QfmM0GpGTk4P8/HxunRFFfK70s2j7yLaJvfzEhEuJ7by8POYWoGO2trbycanFJ3T9kA2kfX769OloaWlhX4zmUVOfcSAQwNKlSznwCwsLw6RJk9DX18e6o1arsXTpUixatIiRWHSuxx9/nPcAuj8xMTGMoKirq+NARQwoQ++TeG/Wr1+PwcFBrF69mkeMUb+5yLdBeyB99ra2NkRGRnLA1dLSwrpOSSRKXm7cuPF/qB1XlgzXmwoEk+ShI2dC7R09Owq03G43I9oIpvurX/0KLpcLV199NSRJQmtrKyZNmgS1Wo13330XQPAZ0dxfeg7izE9JCvKP+P1+xMfHo6mpCR6PB/fccw9f78DAACNgaDQYJZBPnTrFPd2iz2i1WrFx40a2bVTcefPNNzmB7/cHZxFnZGRwnzbpGxWGRH+Y/Fq/P8gG/9xzzyEQCGD16tWw2Wzo7e1l6LKYnKGCj1g9JUj97bffjpUrV+Lll1/Gv/7rv2L58uXM7hsdHY2srCxkZWVh1qxZmD59Ou66667/JW35YeSyCVDFDVeswIVWKcSh1/SQRAl9vZiNlGUZn332GT788EMcPXoUWVlZeOyxx5Ceno5x48aho6MDDocDRUVFbJyOHj2KtLQ0nD17lqGJ4oBiIGhUb7zxRsybNw8LFizA9ddfj6VLl2LMmDGoq6vDqVOn2OkhEfvWxA1C3OSJRMBoNKKkpARarRZNTU28OdXV1UGtViMlJYU/J1VGZ8yYweQddCxysEnRaeAwAAXEl66JHCez2cx9BrThUEZKp9PB6XSivb0da9euRXZ2NubNm6fI8lBwSVkougafz4e3334bHo8HN9xww0W9IbQARdYyqnqEhYXhuuuug9/v54Xc29vLwSttQHS/UlJSmIWWrmFgYIAd3uEcwytJHnroIYwePZoJWsxmM2w2G8/goyCK7mNvby+TCGRmZmLmzJm4//77kZeXh76+PpSWluLgwYMAgGnTpinm9lKwSv0kZrOZA1dq0r9UEiY0SAxN2IQ6ZfRagmFfqrJA76deanquJKFJLUCZzKJER6ijL8LRKTlVVlaGU6dOoaSkBM3NzfB4POju7sa1116Lp59+Gs888wyioqL4/Hl5edxnFRsbi6eeegqvvvoq7rzzTsXIIAB/9w49yb333stzfQOB4GxkSpBQYuyzzz7DnDlzUFhYyPdakiTExMRg9uzZKCkpQVFREQoKCrgKQgQWpLfi6KvW1laGRvX397NexcfHIysrC3l5eZg+fTpfIwXQhDqgmawkwyUgQ+29qP/DJV8AcB+fOHIn9Fih+u1wOHDq1CkUFxejtLQUpaWlsFqtePLJJ3HTTTchOjoaPp8Pc+bMYQZ7g8GAm2++GRaLBf39/aivr0d3dzfi4uLYUSJiHkkK8iEsXrwYd999N9sGeg5/T5De/0xCYZKhCbnXXnsNnZ2daG9vZxs5XKVE3NeILfqrr75im15UVITHHnsMgUCARyhVVlYiKiqKHXtx0sHo0aP52BSsAUP7rs/nY7QSkSdR9dBqtaKgoACyLGPq1KnQarU4deoUAKCurg4LFy4EAB79JUkSrFYr2trauBJF47xCESjh4eHo7e2Fz+fD9u3bmXOC5r5TFRS4GAZNx/L7/Vi5ciVUKhUeeeQR3pcoEU8BKAXplDh1Op0wmUwYHBxUwI6pBYASW2LF++9NQnV50qRJSEtLU/h7ou6SnRL3OfLFPvzwQ7zxxht44oknIEkSNmzYgEAggOrqathstovsmt1uZ3QjBaYej0exZsgHvOmmm1BRUQGfzwe9Xo+9e/di8eLFuHDhAm677Tb09/cjMTGRkXmBQHAmkPJe4gAAIABJREFUsNfr5Yo5EFx3jz76KN566y32p4Egi/D58+fZX6Hq6v333w+/34+9e/fCZrPx+amqSyIif2htrVmzBn6/H2vXrmW0IhUeCLUWijSk39va2mC32xkt2dfXhylTpuDpp5/Gb37zG/zud7/Dyy+/jLfffhtbtmxBcnLy/46C/IBy2QSot956K4AhljdyHOjhi06o2DtD0BHRqRV7CknIcC1duhSRkZFITU1FSkoK4uPjMXLkSLS0tCA3NxejRo3CVVddxVj5cePGobW1FVdddRXP3tTpdOzMigGywWBAb28vampqcOLECUyaNAnXX389YmNjGYoWCikUF3Go8+L1eqHT6dDS0oLBwUGcPHmSz6nRaJCUlAQgONwYGILA9Pb2oqSkhD+/Wq1GUlISO0O0EVVVVfG5yHCLGwVVw3p6ergKmZCQwPeSmuS1Wi1sNhva2towd+5c9Pf348CBAwgEAoypV6vVKCkp4U2JroUqfRcuXFBkNek5E+SOYL4iy9m3337LG6B43yRJ4tEkx48f58DCYrFwlb6rq4t7wq704JSEIFlEtBIdHc2N8jabjY2dLAf7g/v7+2EymeD3+1FaWsobwaxZsxAWFsY9zn6/H2PGjFEw3pITI44vImICQAnTFu+vuCaHC1IB5diM0OrBcOgIckjETCqgZAC/VJWA7AUhDwYHBzE4OIgLFy5wBYiIT2iEjsfjQWpqKuLj47Fs2TI8/fTTePHFFzF37lz+3M899xzcbjfGjRuH8PBwdsqWLl3KAb4IFwL+4dSHypIlSxgCOTg4iI6ODt5U6R5u2bIFsixzTylJf38/oy0AMEs7OSJpaWnIy8tDbm4uOzQEGRb7rPV6PXp7e3nDB4J6bjabFWPMSIb7XdRBMRAQnRNxvZCIVaLQvj2xgkB94z09PczKWFdXB51Oh/DwcKSkpOChhx6C1WrF4OAgMjMzkZ+fz/1VdI02mw1OpxOnT58GAGRkZCAxMRFAkMSK2gfEfYVmsYrX+w8ZXkIde9qLvV4v8vLyuBITmoQj2ycGtoFAgPfrw4cPQ6VSoaSkhJ/7TTfdBJUqyDh6zTXXQKVSYd++fbyvi6SDkiTxPkqoEK1Wq0Ay+f1+3HPPPdBoNBwc7969mwNWr9eLxsZGvPfeezh06BC3BI0dOxZAUF+jo6PR2trK7xF9J9H/keUgu6lKpUJvby+TzsTExHAyVAwQCd0AXNy/u379eqjVarz00kt8r+l4dN1iNVeWZVRXV0Or1aK9vZ0DU5r7Ste6adOmH1I1rjgJhfw++eSTaGpqUgRQwPDkbGIRigoexMZPCZGSkhIsXryYE8NAUIf6+vqYgI4CN7GSSGRI1APf0NDA/kRLSwvS0tJw6NAhZv2fO3cuBgcH4XK58Prrr/Oc2y1btihsnUoVnGkt2ujQRAmtJQpWiYeGCP3EuAVQJtbp8/n9fjQ3N2NwcBA333wzgGByiGauUvWf2l0o8NdoNOjs7ERUVBTz1QDB3llCENB9BC5NOna5yWUToALAtddei8WLF2PZsmWc/SLjTAytYqaWDHxoYAcMbQAqVRADPnr0aM7Ga7VafP/992hubsbYsWNRVlYGo9GIjo4OyLKM5uZmno33xz/+EefOnUNLSwsOHz7MEIK2tjY+BzmbDocDg4ODSEtLY+d45syZWLVqFdLT01FRUcEOO202YslfpPGn65TlIMupx+PB0aNHFQQHNPg6OTmZFytlSEePHs3X4Pf7FTMC6f2kpBQYUiBBhtpoNHIAWllZCVmWcerUKa5CiIHkl19+yYxhM2bMYKNP2S7KvovPKRAIcFM5DVAWnR96drQACRrk8/mwZcsWfP311wCGMqf0OlmWUVZWBlmWMWnSJGzbtg0HDx7kgDs9PZ17yCjZIBIoXUkESaJMmzaNP78IjSeWUprnRkY8MTGRDV9rayv279+PoqIinDx5Eg6HA9HR0Zx1T09P514IIEhqMDAwwCOSADDLLkloHxyJGHDS76GVAvqbCLMNPQ71A9HfQ/tT6fxUTe3r60NPTw88Hg/a29tx/PhxFBcXo6SkBOfPn2dIUX5+Pmf6A4EAZsyYgYiICFitVvT39+OXv/wlHnvsMTz//PMcuIvBMF3H22+/zWNy1Go18vPz+frFXsPjx4//Izi9hCxZsoTJs1pbWzlJQAgIWZYxY8YMFBcXQ6vVorq6Gnv37kVhYSFXTBsaGhAXF4f+/n6uFJItSktLY6IYWZa5h54SM7IsM9kXoQZEyHmoDBdgkmMi6vSlAttQZ472BfrZ4XDgzJkzKCsrQ0VFBQ4fPowzZ86gtrYWI0aMgMViQXFxMX70ox/h2WefxfPPP48HH3wQiYmJmDx5Mic9d+3ahQkTJvDnIaKmffv2wWQy4cKFCzyDleyuOOuRPru4/wEXVwv/IUoZrjdVkiQsW7YMFotFEQTRd9EGitVGo9GIgYEBnDhxgnucCwsLodFoeG56d3c3bDYbJEniCo4kBUe0qVQqxRghuhYAbMeTkpJw9OhRyLKMQ4cO4fDhwxgzZgx0Oh0qKioQCAQJ3QKBAN59912UlpbCYDDA5/OhoqKC9/esrCy43W589NFHinVBVWMKMsjfIB9kYGAAp0+fZjZ3McAhCU04ij93dXVh3bp1cLvdnDTUarUKjhF6DoTOaG5uxqFDh7iK29zczCRQg4ODl0Q6/L2JqMuUzAu9P6IfPpwvEAgEyRQ3b97MbOharRbd3d1cBPq3f/s3fg+RaIrM4nQc0X+mgM3v92PhwoVM9KbValFUVAStVovc3FxkZWWht7cXP/3pT9HV1YXly5cjLCyMizdqtRq33HILV1kBJTEiIWZE/8TtdqO9vR3h4eFobm5m+0kFLBLxPohr4oUXXsDg4CAmT56MSZMmQZIkbjukvm+q8FJCXaVSITk5GQ0NDejq6sKFCxf4vNRqJZ7vSoGnX1YBaqg88sgjWLNmDR599FHcddddWLp0qQK6F1o1E1m2xOoIzTOlbENubi4PEK+ursZtt92GMWPGsOMzbtw42O12REZGYtGiRZgyZQqmTZuGsWPHIiUlhSuwdA0qVbCf49ixY/B4PKivr4fVasXAwAAaGhpgtVrx7LPPMnkBAIapicoufi6xCqxWqzkbHhYWhn379kGSJA4aadGKAWN1dbUimCfyJTFbT2zAdL8IdgGAqxayLGP//v2MuU9KSsJ1112H/fv3Y8GCBQCA9vZ2BbFNXFwcH7e2tpaf0e7duy+CqdHrAoEA/4+cOLVajV/+8pfw+/2Ii4vj50gbFbEzk9Bn83q9qK6u5koWQVw/++wzjBkzBl6vF8nJyYq+Sp1OB5fLdcVkli4l06ZNY50RA/ZQ4h/6vyRJPISbIOWBQICrLSdOnMDOnTuxY8cOGI1G7ju1Wq2w2+2sW6Svfr+fHdvhglDReRCZRIcLQENFzDSKzprT6cTAwAD6+vpQVlbGDnxtbS2OHDmCY8eO4dChQ1xB7+/vR0dHB2w2G1JTU/HCCy/g2Wefxd13343ExEQ4HA6sWrUKBoMB8+fPR1RUFBOh3XzzzTxkvKmpCXa7He3t7ejt7VVA1zUaDcLDw3kTFefkiSQTf+ujN34Icblc3M/Z2tqKpKQkRd/mZ599Br/fjwMHDqCyspKz2NTbl56ejqqqKuTn5yMvLw/19fUwmUw4dOgQJEnCjBkz2FlXqVTco67T6dgxBYIwQKqiku6GBpb0N5FVcrhAVnx96N/EANbtdqOiogKnTp3C2bNn0dPTA6vVih//+Md46aWXOJE7e/ZsqNVqxMfH4/nnn8f06dMVST1ZllFbW4uoqCi4XC7k5+fjtttuQ0REBK6//npMnz6de8l7enqQkpKC7777Dn/4wx8Uuj3cZ6G//SMw/a8LMVCTrZZlGT//+c8Vc1EBZVBKQj4OJZa9Xi/efPNNZGZmoqOjA1OmTIEsyzh9+jQjmEgXAoEA7HY7tyuJCCIxqUczx6dMmYLS0lL4fD5MmDABjY2NmDt3LiOZMjIyoNVqmZSJkh2U0CA9pvVEQlUmAHxttC8B4GraiRMneI3U19czUR8ADobIFogJQlqDPp8PfX19ePHFFzE4OIjXXnsNkZGRXImic4uJGJ/PB6fTierqak4KdXV1QavVoqGhAb/73e/+FzXjypOioiKo1cHxUnfffTcAJXqJJDSxQMkxQmo99dRT0Gq1OHToEB588EFGshCKxm6387Ol4FVMUANDY5PCw8Oxb98+Hp1XVFSExMRE6PV61NfXs7/i8/mwcuVKGI1GTJo0CQaDAR0dHczr4vV6MXnyZD4ufY7Ozk6FDyXLwfYmQtX8+7//O86fP4+SkhLY7XYuvmi1WgXUl/YH0f/xer1Yu3YtZFnGddddh5kzZ3J7m8fjgdFo5Dmr5LsODAygs7MTHo8HZrOZW2Lq6up4LQwMDDCB0pUil3WAOpw89dRTeOKJJ/D4448zTlusqIqOARlHlUqFW265BUeOHME777yD3bt3Izs7G9dddx3T5W/evBlnzpxhQpmwsDCuhH7//fdMxlFaWoqKigpUV1czmy0pyTXXXAOfz4ekpCTodDru/2htbYXb7cbs2bOxbt06hp+GVolowZCTCyhnqHo8HkRGRqKyshKbN2/G+PHjuQJDi59eX1NTw72AarWae+Bok5JlWVE1pF5PMZNDRsZms/F9KikpQU1NDSIiIiBJElJTU6HRaDBq1ChFxbqtrQ2BQADTpk0DcOn5o6LREo0XVeecTqeiUi3LQQjzuXPnuGJMGzUdo7CwEG1tbVx5kSQJ1157Le6++27MnTsXCxYsUFRPaXPT6/V/MUC6UmT69OkKKLnozNDvwFByhYy1+D/6Hh0dzYZXrR4akwAANpsNXV1dkCQJ7e3t7GRERkaiq6uLKdhJhssYEsyKro+um5wGgmsXFhbi7NmzOHz4MAoLC+FwOLj5v7S0FFVVVWhra4PBYGAH/vnnn2dDfsstt8BkMjGT9P/5P/8H69evx9q1axmimJCQgJ6eHu7NnjBhAn9uSZKwcOFCqFQqTpRYrVYkJiYiNTUVZrMZLS0t0Gg0vLn5fD4sX74cq1ev5oSLuDn8w6H/r8m6deuQmpqKnJwcJCUlwefzIS0tjXusJUliRlKdTofGxkZGC0RERECtVmPWrFms64sWLUJFRQXcbjf3Ht9www2MECCbEooQoJ48yliLTkXoFzBkQ4EhoiPRkabgQ9TvQ4cOoaioCCaTCbGxsUxUZrPZcOedd+KJJ57AQw89BIPBgIGBAWRkZOChhx6CxWKBz+fj8WKh8Enqwz9w4IBidMPs2bPZ7uXk5GDevHnQarXo7OxEdnY2JkyYgL6+PvT29qKpqQnV1dU8R1PU53/o8v+7HDt2DMePH1f0lL322muM/iERE8+AEnFChDAejwc2mw1+vx9Tp06FJEkoKSnBokWLoNFo8MUXX7B/RMgDQhiI8HhZDrbuEGPz/v37IcsyRowYgeTkZOaSWLNmDbOEUjKeEnP33XcfVCoVzpw5w3vMXXfdhUAgwA48VUw1Gg3MZjOAoT2BfJ+XX35Z8bmbmprg9/tRX1/PcEcxUAn1McQ10NzcjDfffBNutxsbNmxQnJt6Suk8brcbzc3NqKurw/nz5xkaqdfr/8ERcAk5duwY68lwibZQlJEoNFuZiC4LCgpgNBrhcrnw4YcfcuGCSOhoZJDD4eAEPEF2VargXN28vDxOYMyZMwfd3d1Yvnw5fvrTn0KtVsNisWDVqlWQJAlxcXEYPXo0YmJiWG9FG+3xeLjyT3pFvo04+UOWZaxduxYAcOLECSYAPXXqFAeQpLeUzA/dR8S4YN26dfD7/Vi0aBFSUlI4aUQQd2qxIF+WJny4XC4ePajT6ZifhVrArpTqKQBo/vOXXN6yYsWKi/728ccfs2GjvoPPP/8c77//PvLy8pCamopJkyZhx44dTEM9Y8YMFBQU8MD2Dz74gGdKTp8+HadOnUJUVBQ6OzthMpnwH//xH4iOjsZPfvITbN26Fbm5ubDZbLjmmmvQ1dWFffv24d5772WniAyz1WrFK6+8gj179vBg4OEWrRgoir/39fXBbDajt7cXR48e5T5ZMsY0Z2zWrFm8KQFAbW0tB4Pk/DudTkVlSyShkWUZERER6O7uRnR0NG9q5eXlPF/J6/Wip6cHMTEx2Lp1Kx5//HEMDAywg65SBdkxASh6PYhhlYKR1tZWxMXFcUBC15iSkqIIVAjqUF1djby8PMiyzPM8xYVOz502HbfbDY/HwxWV5ORkZorWarUoKytDTk7OJUl4rkSZMWMGjh49CuDivhzSFXLGgSEnSDSSFNjS86DnR5VUgg5LUpBIq6uri/vaNBoNenp62CDT8ehcfr+fM5WEZpg+fTpMJhPOnj2Lzs5O1gVicnU6ndBoNJg8eTJX0U6ePIk5c+Zg+vTpClZWgoC+9tpr2Lx5Mzv0ROAlQuUkSWKnJDExET09PRg1ahTGjh2L3bt38/gcqtqfOnUKWq0WSUlJXCUFMCwEkjLyYiLmH878/7vMnj0bx44dg8/nY1iVyWRCIBBQMGwODAxw3xtB2AnJQYPeA4EAEhMTIUkSdu7cybbMYDAokiq0wUdGRrJzQGuIkARk64BLV/+pX46COa/XixMnTgAY4lDQarVMyjJlyhRERETA5XIhLS0N8+bNY70mW0ii0WiQnZ2NgwcP4pprrlFcixgME0HMLbfcwpUjseWD1rjJZILNZkN5eTkcDgc6OjqwY8cOREREIDk5Gffccw/bcJJ/6PP/TCjomTJlCvsWL7/8MkNdRfgroISMA0G2fZ/Phz179vDPYWFh6OrqYv+hpqYGQHBPzszM5IBOq9Wir6+P7ZMsy+jr64MkSXjggQdQX1+Pqqoq9Pb2oqenh5MZra2tkKQgIZksy6iqqkJqaiqio6MBBHvA9+/fz5UjnU7Hn1Ps+SSkCY3O6ejoQFZWFmRZ5rmUlNxJTEz8v+19eXSUZZrv760tqcq+AdkggRDWII2Igsi+qahcRQb7IEq3Q5+mvcdu2rFvy/Rmt2fGO3br7dHp0zoiOgdacNqlEZUlLLJFtgTCFrJQCVkrSaUqSVWqUst3/6g8D+8XoBunhSTy/s7JqSWVqq/yPe/7Pcvv+T0Ih8P44IMPcOrUKWzevBlGoxFer5fbka4G8gdKS0uxefNmLF++HGvXrsXLL7/MAS6NHaTJBcQI8/l8GDp0KLq6urBp06arMggUIqA9jcQX6RzL/q38mPYRj8eDuLg4/OM//iPGjh2LqKgo7NixAz6fj+0sHA4zBZhEq0i4jWCz2dDR0YEVK1agsLCQNSUqKioQDofh9XrZPzEYIqJd1dXVGD16NCwWC7OcXC4X77P33HMPgEhLE60RavWgwJkCVurHp+9OiRYS+WxsbOR+VLJZWZ9D3o9pHa5ZswZvvPEGHn/8cZw5c4bfl4pVQgieK0uf4fP5MG3aNNTW1iIlJQVutxtxcXE4efLkgGNsfSNX27Jly7Bs2TIsX74cjzzyCIYPH4577rkHL7zwAr71rW/hgQcegNFoxJ49ezBhwgTuqUxLS4Pb7YbRaMTChQuRm5sLISL9nNnZ2Uz5KyoqwurVq+F0OvHWW29hwYIFSE9PR3FxMd544w2cPHkSBQUFXMqnII0uNMFgEJMmTeJxElfbWOUKJgUKZMBerxdWqxVOpxNmsxlVVVW6IMTn8+lEb4jGaTAYOKukaRpnjChLRGNp5KoazVA9deoUhBDIy8tDRkYGtm7dCiEEZs+ejXPnzumOnS6MckWBgvSoqCg8+eSTAC47bFS5IBo2/S0JN9DrY2NjERcXx8cJ6KuBcjCvaRo++eQTCCGwcuVKHiZNiYKhQ4ciMzOTv+vatWuRl5fHtOVvAqZOnYru7u6rjluRHV65F1mmLva+uAghuGc1KiqKnfmamhp2eClLTyq3e/fuRVlZGTweDw4fPoxDhw7h0KFDOHr0KC5duoSqqiq0t7dj8ODBvJnSeff7/UzHJfpKYmIi8vLyAEQcuu985zuYO3cuBy0kjkDHEggEMH/+fHg8HhYLkfs/qTpvtVq5wkQKvEII3HvvvZg8eTK/1ul0IiUlBVlZWUhOTsY777zDI5yIetbbiaGsJ6Cc+b8HvXusKclFVHaDwcAJA/kcE4WdmADkSFC1m5wEs9kMm83G/T1RUVHMEKC1QEJcpPLZu02DKosdHR04ceIE2zzpB8THx+PkyZPsxPj9frS0tDDjwO/3Izc3F5oWUUcldUw5uSPfB8BVAtnu5OCUHtP/jf5ncmBKj2NiYlBbW4uMjAx89tlnrDmwbNkyFsmR2yqUPX99oOkBtK/K2hCU5CL0dvqJqvvd734XAPDSSy8hLS0NJSUl6O7u5nPmcrn4vYkJQ9dNUvKXq/2U5JwyZQqqqqqwatUqnDt3jplYpGcwYsQIpk5aLBZs27ZNpylBDjyxZYSIjNp5+eWXeQQHURBnzpyJNWvWYPfu3bymiNEwf/58+P1+7N69G1arlSuhvUXS5AQkfcdwOIxdu3Zh165dMBqN+OEPf4jY2Fh0d3fD5/Nx8GM0GhETE4NAIICysjIWeFLB6fVh4cKFmD17Nu+Jsh/ROykGgFmICxcuhNlsxjPPPIP58+dzy5nMigT0/aZUURVCsO/+/vvvo76+nvfi8+fP48EHH0RUVBSKi4uZgUN+z+LFi+F2uxEbGwun04lTp07x582fP59VsuXCR2ZmJp599lkYDBH1d1ILBoBXX331ir7ucDiMuXPn4p//+Z+58kkTKQj0XeW/DYVCWLt2LcLhMH7zm98w24A0RXw+H/d8+/1+XLp0CfX19QiHw7Db7Whra2O/baAFp8A3NEDtjREjRsBut8Pr9SIlJQVlZWV48803cc8998DhcODYsWOw2+2YNWsW2tvbcfbsWYwePRoTJ07EjBkzUFxcjClTpuDChQv47//+b3R3d6OjowP/9E//hBUrVvD8OJfLBZfLxQIEDQ0NugqlrF63ceNGGI0RZdvS0lLd8ZLDQwGETJWg5wOBAFJTU+HxeGC327FhwwYA4EpXXV0dKzD6fD7O3Mh9pt3d3bogWL4YAOD5ojSmQdMiFOf333+fF5DdbsfSpUs5oPV4PEhOTsasWbNgsVgwfPhw3kBoo6KqAwVCaWlpEELoqnnUAG4wGDjrRcFlfHy8rl+XLr5ynxQtUIPBgBkzZrDCHFGUJkyYwFSo1NRUhMNhTJ48+YbYX1+i9+gdClblYBQAK0LKdDKiodAPAK4yAWAqbEZGBlpbWxETE8M9VCkpKdzrYzKZUFpaCp/Ph0AgwNl4l8uFjo4OJCYm4o477oDD4UBRURHPrKP+KiEEi3LRMHnZ0SbIyQrgspOekpKC2bNnX9F7IV8sgUiVafr06XzBpL+Xq59JSUk4c+YMmpqacP78ecybNw8VFRXweDxoa2uDz+djZgKgV95UzvzfD7nHWk749U5Q0S05yXL7BHAlncpoNDIdnILYUCiE2NhYbvkgh4ISd7Qv0ezbc+fOoaioCAcPHsS5c+cQDAbh8XgQHR2Nu+66C1arFTt37oTL5WJVRfm4ae25XC6UlpayY0THR7ZETgclFnt/Z+p7Bi4nRen3fr8fTU1NcDgcqK2tRU1NDQfdRqMRJ06c4DnfL7zwAjs9LpcLp0+fBqCfc6rw9eL48eM4evQofvrTn+qSEWTDcr+oDKrGvPbaa1zpCwaD2LFjB/7whz9wMic2NlZHVSSblp1jILI+tmzZwp9dV1eHU6dOISkpCR6PB6FQZGTcypUrObHx/e9/n0XGqEc1GAwy0+Evf/kLf59gMIi5c+fy9wIie2tmZia2bNlyRbLabDYjKysLM2fO5O+elJSEpKQkVlgntoSMqzGCNm7ciM7OTkRHR2PhwoXcf0rUS2I0xMbGcjWVtEMUrh/yNZf8TDqfMgPAaDTC4/Ggvr4eEyZMgNFoRFdXF0pKSnR7IyVViJYuB3Fkd6mpqeyfA8Djjz8Ov9+PyZMnw+FwoLW1FTk5OXj66af5eAKBgO6HPpN8jM8++0zXqkHtQevXrwcADBo0CImJiZg7dy42btyIo0eP6nxp8o2/973vYceOHXydMZvNOpulvZs+n25dLhd+/vOfIxQKYeLEiZxYpWKBLHTm8/nQ0tKCs2fP6iq5W7duveHn+0bglghQr4b77rsPI0aMYLWrUaNGYcOGDTCZTBg/fjw7CpWVlcjIyIDP52MnurGxEcXFxfjyyy9RXl6OmpoaHDp0CJ9++imrRNrtdt3sSHJ66KIzYcIEpgMEg0HY7Xbd5io7TjIlU/59OBxGcnIyOjs74fF4OOi1WCyYMmUKB2jR0dGoqqrSOdoA+OJEvSMy7ZYuVvT3ra2tECIi+56TkwOr1QpN07BgwQKMGjWKFc6IB0/9IrJYDr23XJHQNA1vvPEGTCYT1qxZo8sck4NJVRGZXuHz+XhQfO+hxZqm8ViJYDCI9PR0hMNhFBcX45VXXkF0dDSGDRuG8+fPIxQK8egK6kX8JmHx4sVobm6G3W5HVVUVnE4nWlpaWFhIVqaj80RZaQoK6YcyhGRjlLknR9xoNCIhIYHFM+Lj4xETE4OmpiZ0dnZywoOoMUIInkmnaRr279+vu6BRtV+uhpGCc0VFhU7RWhZHIzl4+h0FNPRYpnHKzr8QAnFxcVfQbuRgaPz48Zg4cSKPPTp48CDKysrw+uuvw2q16rL6ctVUOfNfH+68806dg947uSb3XxN6O/uAvgebnqdzR0GbHBhER0dzgsRut+PIkSPYv38/ampqeKQN2TMlYCwWCzo6OhAbGwsAutmKcj8gJd8A4NNPP0VjYyM2bdqkq25Sr2FiYiJCoRC8Xi/a2tp0WgJElaRARP4fCSHQ1taGiooKtLa2oqWlBampqbpZkBaLBZs2bYLP58Pnn3+Ol1/qSQShAAAgAElEQVR+GT/60Y9QUVHByufKnm88jhw5ghkzZlxVW4PstPf1HIjYNAkBrlq1Cg8//LCuihUbG8uOvlzF6c2YGT16NMLhMLPAoqOjmUa7Y8cOhEKR0WNWqxWdnZ0IBoPw+XxYsWIFLBYLM5aEEPjBD36gSwbR2p04cSIHn/Ixkj8yefJk3b5Oa9tgMODYsWM8No0YVVQRJvRe57K43g9/+ENomoYxY8bgqaeeAhBJUFJgTNcpi8WCpqamK4oICtcHqqTK5x+43J9KoD2XxrFRwoBAjC26FtP+TDZBieyuri4OMj0eD95++22eIf3JJ58gHA4zG5IYTzExMdyXf+jQIT7O0aNHw+/3o6ioiI+DfJ+uri5OHgaDQeTk5GD79u04fPgwz5ynZMewYcPw6KOPIhQKwWazYfjw4fxeva9J8n3ZJ7Pb7di1axfmzp2LSZMm8WsorkhISICmRdryKLlK0z9kP2mg4ZYNUHujsbERM2bMQFNTExobG2E2m5Gfn4/FixezGAfNP7TZbHj88ccxe/Zs3H777WhoaEB6ejrGjRuHlJQU5OfnIxwO86ia3igvL0dVVRUmT56M/Px8TJs2jXvqAH1PUO/KKaBXkKQMJTlGpaWl6OzsxJdffokhQ4awkVM/bWtrKxsrKfvSRcFms/HnU38IAGRnZ+Ohhx6CpmlISEjArFmz0NzczO9HmSBN01BTUwOr1cpBQkxMjE5BuKamBsnJyQAuUzWSk5MRDAbR0tKiq4KEQiGeY0jy8AB02c7e1BH627KyMs40dXR0cPVUztiS80dqvqmpqV+LLfU3rFq1iqtH5ERTr0I4HJk129LSgqamJng8Hu49oyq2HOjJ1XVKFoTDEdVfCvzofYcPH86/l3tRKeiTs+yff/4508vkRIZc/ZGDifr6ehZmkqu+QgjugyKKsEz30jQNFotFV5mSHRnqUywuLgZwuSpPn09jEZKTk1FUVITq6mrMnDkTS5Ys0dFMCcqRvzGYOnUq35fPkewE0/5INkbo3X4gg/qJiCru9/thsVjgdDphMBi4UpWdnY3ExESeF1lbW8tri5I5VquVK1N+vx9btmxhASY6VkoA0h5PNuTxeNDd3c37LB2bwRBRb3c4HDCbzaitrWXRkJMnT8JoNKK2thbNzc06hgQAnD17Fnl5ecjJyUFSUhJKSkpQUlKCQCCAF198Ea+88gr27duH3NxcxMXFYf78+XjllVdQX18Ps9mMoUOH8pxChZuDmTNn8j4FXLZvOYlGiIqKQjAYxMGDB5GXl4dgMIjc3FzdfgfoxbvoPSmRTNcJElwyGAys6ExCY0Q7p323o6MDRqMR7e3tzNCSZy6SEqtcPaP+2nfeeeeKqhrt+UTFlANGcvw1TcORI0d4vI7JZILX671ilB1BZsvQmvj0008BREbpbN68mUfZUJCakJCAO+64A5WVlV/T2bw1IQepgF6ZmgJM6i+lUYyFhYWcyJYFhSg4lf8+Pj6ez/vDDz/MrBKDwcAtYQaDAa2trYiOjuaxYeQb1NTUYNiwYQiHwzh+/Djb4owZM/h96LOCwSC6urp0+h7kd5DS75QpU3QzS+Pj45GYmAhN07Bp0yZuzQMia0NeK4B+xBg9DofD+Pjjj2GxWLBkyRJd0oYKMVarlYVRW1tbOelJrYQDESpAvQrq6+tx5swZVtmjsRXV1dWw2WyYPn062traEBsbi5MnT+Ktt95CXV0d0tPTMWHCBBQUFGDOnDm47bbbAFx2rOknIyMDNpuNF4s8l0sOtGih0H0AXNmUK0TyQPnCwkLExcXB5XIhLy+PM5S1tbX8ObQ4EhISdM4/ZSDpfYlGcerUKabVhkIhlJWV8dgZ6t2jC9vUqVN5OHhFRQX3CFD2X1bLo4V96dIleL1ejBo16opNzO12c0ZVXrChUIgDX+oFkGlw9Jlerxfr1q2DyWTiUSQkdx8MBll4wWQyDeiF/LcwePBgWK1Wdqi9Xi9qamoARP5XJEbQ2dmJlpYWOJ1OVFVV8exQqrjS+fP7/XC5XPD5fGhoaAAAbtYnGw6Hw0hNTeWxA5T9NBqNumHpRDWXHRDKkJLDLle7KNDevn07XC4X24XJZML58+fx8ssv8zww6s8AIoyByspKOByOq1IjScL93LlzMJlMOHfuHOx2O/+PgIjYGIktmM1m1NfXo6mpCYWFhWhpaYGmXRaQUcHpjcXUqVPZVq6VIZb3Sfk54HKiT06KAHrVVBprlZiYCK/Xy0ErAGYJ0HgqUnSkfn7K3JtMJmzfvp1tkZwPeUSCvG7oOKqrq/H555+jvb2d35fElAKBAHw+H9LS0tDU1ISOjg6MHTuWRzZs3boVJ06c0LEDxo0bh/b2duzfvx91dXXIyspCRkYGvF4vRo8ejfj4eCxevJgZRH6/Hw8//DA++OADlJeX49ixY+yYKdw8kLBbb8iJGDn4DAQCyMjIQFRUFNN06VorO/uUmCXEx8cjFAohPj4eI0eOZOrg+fPnkZKSwlUsu92OQYMGYcGCBbqqVWFhIYQQaGxs1FGRQ6EQO/503IsWLUIoFEJiYqKuv5ZuiRVFPeDUA0q+TmdnJ9ra2nD+/HkAkTVLs1/pPeSKcO+APBQK4YMPPsDBgwdhNptRXFyMS5cuYePGjfjzn/8MIJIUb2pquiKAUPjqmDNnDgeqcpKAQIKbDocDhYWFOnovsU+owur1evl80jgiIQQmT56MU6dO8bV92bJlMJlM+PGPf8z+4ogRI/DEE08AiGhGNDQ0oLCwEIsWLeLCFADdLGEZd9xxBwBgz549vIfTd6JWEFlZWm5RMpvNzGAbMmQI+6GymBlw5Vx4Yg14PB4899xz8Pl8WLduHR8nsTCpYkvUfZ/Ph4SEBNbIGIi4rgBVCGEXQpQKIUqEEMd6nksWQuwUQpT33Cb1PC+EEL8XQlQIIU4JISbdyC9ws9De3o78/HxMmjQJI0aMwI4dO1BXV4ezZ8/yIOxly5YhJycHoVBkbqfRaOTFJcPv9yMpKQlTp07laktqaiqMRiP36fWuBBBoMVCFVQb1TFDvVFFRETRN42zKiy++iBdffJGdbblnk5xzGbTZm0wmtLS0IBQKISMjA3v37uUqJqlj3n777UwZam1txe9+97srKEhGY2ROH2VDiepLi+9qmxZVfNPT03Xv1djYCE2LyIvffffd/DxlXyl7tW3bNqZuEkWJsm0FBQVMMxro80//Fh588EHu46BAkS7gQkSEwEjEZfDgwUhOTsagQYPg8XiYvmi32+FwOFBZWcmjN7q6uvj/aTabuWenq6uLs4cxMTFITk5GcnIyzGazjiIsb+Zy1Qu43OsmB6b0WNM0VFZWYtu2bXA4HDyweuzYsYiNjUVDQwOqq6uZgt/R0YH9+/dzDyw58vT9q6ur0djYiP379yMvLw+JiYmIjo5Gbm4ujEYjqqur8bvf/Q5tbW2s+hcfH481a9agrq4O06ZNw9ixY1VwepMxffp0eDwe7rOWq6a995PeF35ZTEumawGXq5VEP6f3o3YNekzVxujoaH4/chgoGKVeIbmPiUbcyA41oNcfEEKgqqoK27ZtQ3t7Ozs0I0eOhN1ux+nTp+H1ehEXFwen04ny8nI4nU4Eg0GMGjUKY8aMQUdHh65CFQ6HYbPZcPz4cd7vExISWBBq7969iI6Oxuuvv859YSNHjuRxPgp9gzlz5mDOnDkArqSuyr4CMYL279/PoydIX0AWjouJidHNQCX2ixACCxcuxBdffMHv+4tf/AKhUAi33XYb/H4/pkyZwgEwJRUp+dLd3Y3Dhw8zQ4aOp6amhtejxWJhmuKXX37JlSlKfNL1WqbNh8MRQSaTyYRNmzaxLV+8eJEFjeh/Q9eU3uyb3giHw3jrrbewcuVK+P1+lJeXIxAIIBgMYtOmTdi0aRNaWlpu+Lm9lUA2LIPseMiQIWhoaMCePXt4v6QEMSVTKDkHXB5HRDY8dOhQVFdX83teunSJfVvqyaRKezAYRHV1NbKzs3HHHXcgEAjA5XJdUeAgWjD5NzSiUG4DIXszGo0cIBIDhxKoKSkp6OjoQFRUFMrKyhAOh1mjg7Rkevv5gF7cjhhc//7v/85+L7USysyKcDgMp9OJjz76CNu3b78BZ/Hm4atccWZrmjZR0zRSkvk/AAo1TRsJoLDnMQDcC2Bkz89qAH/4ug62P2HMmDFobGxEfn4+HnroIaSkpKCtrY0DN7lvTe5HAyJN1ffddx9ycnLwyCOP4Mknn8Sjjz6KtWvXoqWlBc3NzUxLlY1WvsjI2Uh587XZbOjq6mJK7M9+9jP89re/RVxcHNatW8cqf5RN7U3blCu19L5EVyBJeqfTyQqW999/PyoqKtDc3MwOnsPhwI9//GN+n7lz5/JFaP369brvRd+J5qjJKn+dnZ348MMPEQqFkJqayq8FLotBPffcc7pjld83FAqxNLfNZuPsLwVXQ4cORU5ODrq7uwfUbKj/KR599FF2GOj/RbN8AbDYl8PhQFdXFweTMTExOieInAkSyqB5pLRJE2WH+ptIyILo6DSbkgJVomCSg0MVbTkDLjscsmqr3W7n8QrR0dEIh8P4/ve/j5ycHJjNZsTFxXH2kqpFMpWbnLLMzEx4PB6MHz8eR44cgd/vx8WLF1FYWIgTJ07AYrEgKysL58+fR2dnJ7q6uvDAAw/gvffeg9lsRm5uLjo6OlR/Xh/gvvvu44TExYsX4XA44HK50NraqrtfU1OD+vp61NbWor6+nmniJNZFjovsLFOrBjFXiCVAjBBNi4i2JSUlISUlRWfbcm+83GNHfydTGukx2TfZvdFohN1uxyeffMI9UwkJCTxSxuPxsGOdlpaGhoYG2O12NDQ0oKWlhQMF+lyqik2bNg3Tpk2D2+3Gnj170NnZiUmTJiE5ORmvvfYaZs+ejePHjyM3NxezZ8/GpEnfiDzzgMe1KlGyn9DR0YHu7m4899xzOhol0Wxpb+3t6NO1taGhAVVVVQiFIqPAioqKuHd13759nGicMGEC91kLITB9+nQYjUa0trZysFxQUABN03Do0CEAYIddFjgjW6f+ffpuxPASIjJ7mpJQH330kS5xv337dvYhgMiaodE5sn/U+1auOj/99NPYt28fKisrORh67rnnbtBZvLUxb948zJs3jx9T4o9o41QtNxgMGDVqFNt4VFQUt82R/0HtFEJEpgl4vV6kp6ejra0NpaWlyM/Ph8FgQGlpKVJTU3H33XdDCIHdu3dj+PDhqKioYF/ywIEDvEdPnDgR4XD4irm3ZrMZTU1NuvVns9muSKzQtSQjI4Nb2vx+P19nzpw5wy1NRqMRNptNJ1hGkCv/tHZKS0thNBoxe/ZsbNiwARs2bMDp06eRkJCAQCCArKwsPPDAA/y5Axl/T0r0IQDv9Nx/B8AS6fl3tQiKACQKIdL/js8ZMKipqWEH9WqOKjkuFPDJGT4gEiC+9NJLeP755+Fyufh5+TUUBFIFTBZ/IdhsNs7yDxo0CBMmTGAaZlZWFvc8yX2cxO2nha9pGgdyHo+HKTqULaXX+P1+3HnnnYiLi0M4HOZ5sh9//DF2794NADo6p9wcT45gV1cX/uu//guapjFtlzYhIHLhJKEj2dmj46G+LxmaprHAA1EfNE1Dc3MzXn/9dQCRzeaPf/wj9yPcCvj2t7/N/3dyUsiGelNnyL5I1TMUiqiX2mw2CCFY3MJkMuHChQvcn+Z0OtkhcjqdMBqNSE5O5h4pGpBOATBVHWVmwLVomWQDsrNht9vx/vvvA4jYitVqZRVTct67urowdOhQGAwGbNmyBRcvXmS6GxCxBRJPIGeooKAAw4cPR3d3N1paWhAdHY1HHnkEbrcbxcXF2L59O1paWlBeXo4//elPKC8vv8FnT+FaWL16NVcyOzs74XK5UFNTg8rKSp3zSiyKzs5OeL1eeL1e3iecTifKysrQ3NzMLAGiS8mznTs7O1kduLGxESkpKWz7ZrMZ8fHxiIqK0lHaydaILnitamRvJ1oWydi6dSsqKiq4EuB2uzF48GAMGTIEBw4cQE1NDQYNGgSXy4V7770XI0aMQExMDFd0SQSpsbERJ06cwK9+9SvYbDaMGTMGXV1daGhoQExMDNLS0vDFF1/g4MGDPDtaoX9hzpw5OiefWChUiXK73UzZDYVCOtFCWRBRdvTpOu3z+Ximc0tLC7Zt24ZVq1bBZDIhJycH2dnZ+Pa3v41wOIyPPvoIFosFFy5cwNy5c1nwEbg879pkMrGoIiVf/H4/J3HI0SdKb3Z2NgwGA1paWrhSFR0djSeffJIrpOR7kC9E36GkpIRZDm63W6dnICevCXTNCYVC2LBhA37605/izJkzVwguKXz9mDdvnq5YQeeQ/DtKBtIIFflc08/gwYMRDoexePFi9mEbGxvhdrshhMBjjz2GQCDAoyIXLFiAYDCI5uZmxMTEYM+ePZg7dy4CgQDbLYk8UuKF1kh8fDwCgQDee+89nXK8wWDgmayUWKHvlZaWxirWmzZtYr+7urqak0Dke9IapOAcuNL3p+NbtWoVWlpa4PP5sHz5cpw+fRpvv/029u3bh+bmZpw5c+Ymnskbh+sNUDUAO4QQx4UQq3ueG6xpWkPP/UYAg3vuZwKQNblre57TQQixWghxjCjD31T8tYCVnBeqsFKQlZCQgFdffRXNzc26GacEmfpIt1ejiREfvrq6mrNSVEmUA1EAfBEgh50qv7QAx4wZw79rbW3lAJkGXN95553cDzh+/HgcO3aMLwpDhgzR0e/MZjMef/xx3fFTlY4ytfLFJBwO4/bbb+fRDvJ8NZJ/p5mmcsaptbVVV12lIeb0vvQZFovlGzX/9G/BYrGwcADZHCkiygEbOQMZGRn8WrnyTsqNVBklJyQ2NpadeLI36n+1Wq1wu90IBAKwWq2w2WyIiYlhZ/5q1HbZtuVRRHTxMJvNaG1txYcffsgOfVJSEjvv586dw9GjR+F0OpGWloa4uDhMmDCBE0WAXhjs5MmTuO222xAdHY26ujqe05qamor/+I//wMGDB5GdnY2YmBjMmjULCxcuxLRp027a+VO4OogZQA6pnPwgW4mOjma7sNlsPPOWKo1UYZQdeKpiEtNAHmuVnJwMg8GAtLQ0nvNMao3x8fGIj4/nv5HXGwB2ssju6DjlagIlaygR8+WXX6K0tBSVlZW47bbbcPLkSWRmZiIvLw+dnZ2cYNG0iLBMSUkJjh07hn379vGamTZtGhYtWoSsrCy43W6MHj0ad955JyZNmoT29nYsWrQIS5YswWOPPXYzT5/C/wBykAqAkzEk+mI2m2GxWDB48GDez6nKKVMCU1NTObg1Go3MiKK/pepVMBjEoUOHkJmZCYPBwOJhO3fu5FYJ2V9ZunQpK1/THkvCTyRIJ+/1OTk5WLduHVeyKGkeGxuLrKwsCCHgcrmu6B3XNA1OpxN//vOfUVBQoPN1KOgh0DWM3kMWXmpoaMAzzzyDX/7ylzf+5Clg/vz5WLhwITOzUlJSuOhADBYgIujp8/mQlJTEs2uFEJyIPn/+PEpLS2EwGHhOr8FggM/ng8fjgdlsxocffgghBF566SXU1dVxgp4o6LQHjx49GuPGjUNRURH7mpSop+SmbDsUTFqtVggh0NLSwjZmsVh4je7evVvXZnXu3Dmu/NJak/3T3klMOckSCoXw7LPP4r333sO6deswa9YsCBEZbfnuu+/e+BN3k3C9Aep0TdMmIULf/YEQYob8Sy3yX/tKOsaapr2hadpkiTJ8S6B3wHqtKisA/Mu//Av+8z//k8WG5Kph7421N3+dnqOmcrfbjZKSEqbJ0AKiz6Lsk0xdIGEio9GIL774gh0solJSlSAmJgY2m40DWrvdjmHDhnEmaMiQIfyeNBKB+qmIe08ZS3Lo6HtQFouodDJtJxwOo66uDgAwbtw4XR+pwWBgKgb9TVxcHH9f+m6ZmZkYMWLE132a+zVWrlyJ1atXQ9Mio5SIOiJnm2WRIvq/0sWeXkM/Wk+/R2lpKeLi4nhDr6yshMlkYtGCQYMGcSWqq6sL7e3tfN5jY2M5aAYuK63SZ9N7yps0VVJJAbq5uRnt7e3YuXMnQqEQO+8jR45EcnIyGhsbceDAAe63BS5n0IUQSEhIwIEDBxAdHY0XX3wR5eXlTAk9ffo0jh49CoMhMmaju7sbQ4YMQX5+/k06awp/C//wD/8A4PLaJufGZrMhKSmJ+9ADgQCam5u5ikOtBZR0I+dDCKHrNaKRZADgcDjQ3t7OPU1GoxHp6ek89oJGf1GvEVUygSsphnSsvfvkyDZlCnBdXR1Onz4Ns9mM9PR0zJw5E1arFbm5uUhJSUF5eTny8vJw/PhxFv0QQuD+++9nhsS4ceNw7NgxxMbG4sKFC9i3bx8HARTYEp1Yof+DKJO0H0ZHR6O+vp7777q7uzlpQcFaIBDQzUSn5EVJSQkqKio4+UgaGqQZ4HA4UF1dDZPJhLa2Nrjdbhw+fJiV/AcNGsTMmmAwCK/Xi0uXLrGNU5uQwWDAa6+9pqtQBQIBPPvss7hw4QIzF0hkjKrDNIquvb2dBfw6OjpgMBi4N4/+B7SeriVyJK9D2e+ZPn36TThrCjIWLFjAFVOqMlJbF52/QCDArKvo6GgeCWe1WllvhfbsYDCIRYsWcbU+LS0N8+fPR3d3NyorK5Gbm4t/+7d/41aJ/fv3A4jYxLBhwxAIBHDx4kUA4EBzzZo1rLpL+zX5EUOHDsWSJUuYuUe+SzgcRnp6OvvOtK7k4hBNHSD9Amqf642rsco+++wz/OQnP8H69etRUFAAIQSef/75G3CG+gbXFaBqmlbXc+sA8CGAKQCaiLrbc0tSUXUAsqU/z+p5TuEa+GsV1kAggPXr17PDIGcCe1Md6UfuSxFCIDExEX6/H7t370ZRUREHGaS2R5u/rF4ZCoW4DzYQCGDo0KEcJFMWlnoGrVYroqKicPToUURFRWHQoEHIz8+H3W5HOBxGUVERHxdlrIYPH86bEYnsGAwGLF68mINnuZoHRHpr6BjoYllRUQEAeOqpp3TBDAVPckBL9L6LFy8ybfWBBx7AihUrbuDZ7d+Ii4tjx4UccRLZ6l2RpsdUKaVz2traitjYWH4tXWBSUlKgaRqr11GASSOVhIjM1SUqudVq1fWe0oZMwSrd9qZzk907nU4UFhbirrvuQnNzM+bMmQOHwwGLxQKXy4WxY8dyIqW7uxsnT55Ec3Mzdu3axb25ycnJePDBBzFz5kx0dXVh+PDhGDFiBGbNmoWlS5eyI//OO+9cccFQ6HtQn7Xcyx4IBOD1ehEbG8uiQXKihRwNmqdIexudX6q003xRIJJII7s1mUxoamrS9WTTXufz+Xh/i4qK4iCVHBSZySL/yPYvUxTD4TDa29tRXV2NkpISrvBnZ2dj4sSJ3HMVFRUFu92OcePGYciQIejo6EB8fDwOHTrEPdOlpaWYN28eUyodDgeWL1/+jehduhUxf/58ruiQXQWDQW4XIrotBXTy/FGiwNMcXQA8S5zs0O12c99pKBTC5s2b4fP5cPHiRd14JFnbQtM02O12fs5oNCIzM5PpjPKenpmZCb/fj/379+v2VvpOW7Zs4UD6atUl8oXq6uo4mS0LMcqQ2WGyj6DQd1i6dCmfi87OTi6EAOBiSzgc5ikEVLHs7u7mqjz5dePGjcPtt9/Odu71ejFlyhQeb+Tz+RAfH4+77rqL34MKNtnZkfCltraWryHEimlvb9cxveg6smbNGlacBi5ruNDr9u3bh7a2Nk6GyqKQ4XAYP//5z3XsRbLbqwWl8n1iFDz//PP41a9+9Y2jpv/NAFUIESOEiKP7ABYAOA3gLwCe6HnZEwA+7rn/FwArRQR3AXBLVGCFv4GrVVj9fj9+/etfY+XKlTpHpbcB93ZyZAc/JiYG7e3t2Lp1K/9dRkaGbj4lXVRIVIY2AABM2QUilTQSNYqOjsaGDRvws5/9jBfVsmXLkJaWxpvLiBEjkJWVBSCy+KgHlhrM5SyuPDNV0zR0dXVhy5YtCAaDuPfee3XVBrniS4ILvftqNC0y043eMxwOo7GxkfsNYmNjuQp7q+FHP/oRXC4XB2zyIGw6vwB0Dg/dEvWFstUGgwF+vx92ux0pKSkIh8OwWq1wuVwwGAzcD5WWlsbVSqJ7G41GuFwu+P1+2Gy2K/qFyIGXqey9GQNk0y6XC9u2bYPf70coFEJBQQHGjBmDKVOmwOfzIT8/H/v37+fqUnV1NUaOHImLFy/iyJEjmDRpEgYNGsSK2FOmTEFKSgrPgly2bBmeeuopPPjggzf/hClcF1asWIFQKMT7jMFg4Ao9OcnknJM9UdKNHsv0YLLHjo4OlJaW8r6YlpaGixcvMrvDYDCwGntmZqSrJRgMoqOjA21tbVwBiIuL0znUcvKFcLU+JLJ/6pEluvrBgwdx+vRp/P73v8fhw4dx+vRpZGdnw+FwYO/evXC73XC73Thx4gTuvvtumEwmFBQU4NVXX0VqaiqSk5ORlpaGyZMnK4GvAY4FCxbA6XRykoT2WgA8poiStWazmdkEgUAA8fHx+M1vfsPsqba2NkRFRWHJkiWc9KiqqsL48eO5z3/ChAlYunQpnnnmGQDA5s2bOeFDc1PJpsi+CwoKUFRUpGMGWCwW7hml6zUJ5tDx7969m9VVKTigxKJMlacAm9aL0WjUVaXktUbP0Rq7msqsws3DokWLcP/99+tYXbSPUwBHlVOi3JaXl6O6uhrhcJj3RTq/Xq8XdXV1XCHduXMnkpKScOTIEYwdO5Y1C2pra9mnJEo7JWKEELBarfD7/bo2OLLBtWvXwmg04siRI2yDQghOxD/99NP44x//CCEECyySBoLRaMRrr72GUCiE/Px8rg5TwkgG+cREh5aLNXV1dXjllVe+cdT066mgDgZwQAhxEsARANs0TfscwL8CmC+EKBcOTWwAAAhUSURBVAcwr+cxAHwKoApABYA3Aaz52o/6FsOxY8dw9OhRXLp06Yqhx4B+9AwtGvn3tKEnJiZyBp8UKeVKAS28xMREdHV1obS0lAOSqqoqpvTKYguhUGQI/ahRo5i2W1xcjAsXLsBqtcJoNHJfCx1rb2UyciLLy8uZsglcXpB0cUlMTLzCqSMn0u/34zvf+Q6AyxegUCiE9vZ2fj19PgCUlJRwdfjtt9/+2s7VQAPRIuncAuAsHw14p4s8gCuSAuTst7W1YciQIUxtBMC0alItdTgcHNAmJCRwYEsVV3rvhIQEXZVUTjrIlXUAVzjxDocDbrcbY8eORUVFBXbs2IHi4mI4nU40NjYiOTkZLpcL9fX1yMjIQGJiItxuN/Ly8uD3+5GQkICTJ0/io48+wsSJE1FVVQUgQuGhzK6iP/Z/rF69mhkTsohab+cCADvsZGdyX5ucDCT6emVlJdPRMzMzuSefevDT0tIARATpSKDNaDTybDqDwcAKpbQvycrYdEvH2TvhKIRAbW0t9uzZgwMHDqC+vp5FyKKiolBZWYmKigqUlZXx3L3u7m4UFBTg8OHDePfdd9HZ2cm6A/Hx8cjJycGxY99oOYhbBkuXLsUjjzzCARyNsgPASWHaNzVNY2rg3r17WciF+gCHDx+OcePG6apODz/8MIQQKC8vx/Hjx7Fx40YOMGkcXSgUwtChQ5n9RCwtmg1Mc6QJQggsWrQIJpMJRUVFnGD0eDw6SiQxIAiapmHUqFHslwghsGvXLlZtpyqprCov+yBXS3gq9D2WL18O4LKuAI3Tov08Pj6eBblo1B3t57GxscjOzmZfMi0tjenib731FoLBINra2lBeXg5N01jMkxgw4XAYb775pi4xSKySV199Vbcfy6KRxCAgv5LouiSiJPtMoVCIRzUFg0HU1dXB7XbzWiXfWq78y76ObLfEQvwmUtNFf1iYQoi+PwgFBQUFBQUFBQUFBQWFG4Xj2nXoD5luxpFcBzoBlPX1QSgofEWkAlCTvBUGEpTNKgxEKLtVGGhQNqswEHEz7HbY9byovwSoZdcTTSso9CcIIY4pu1UYSFA2qzAQoexWYaBB2azCQER/stvrHTOjoKCgoKCgoKCgoKCgoHBDoQJUBQUFBQUFBQUFBQUFhX6B/hKgvtHXB6Cg8D+AsluFgQZlswoDEcpuFQYalM0qDET0G7vtFyq+CgoKCgoKCgoKCgoKCgr9pYKqoKCgoKCgoKCgoKCgcIujzwNUIcQiIUSZEKJCCPF/+vp4FBQIQgi7EKJUCFEihDjW81yyEGKnEKK85zap53khhPh9jx2fEkJM6tujV7hVIIRYL4RwCCFOS899ZTsVQjzR8/pyIcQTffFdFG4NXMNmfymEqOvZb0uEEPdJv/tpj82WCSEWSs8r/0HhpkEIkS2E2COEOCuEOCOEeKbnebXfKvRL/BWb7ff7bZ9SfIUQRgAXAMwHUAvgKIDHNE0722cHpaDQAyGEHcBkTdNapOf+LwCnpmn/2rNAkzRN+0nP4v7fAO4DcCeA/6dp2p19cdwKtxaEEDMQmSX9rqZp43ue+0p2KoRIBnAMwGQAGoDjAG7XNK2tD76Swjcc17DZXwLo1DTt5V6vHQvgTwCmAMgAsAtAfs+vlf+gcNMghEgHkK5p2gkhRBwi++QSAE9C7bcK/RB/xWaXoZ/vt31dQZ0CoELTtCpN07oBvAfgoT4+JgWFv4aHALzTc/8dRBY6Pf+uFkERgMSejUFB4YZC07QvADh7Pf1V7XQhgJ2apjl7nKSdABbd+KNXuBVxDZu9Fh4C8J6maX5N0y4CqEDEd1D+g8JNhaZpDZqmnei53wHgHIBMqP1WoZ/ir9jstdBv9tu+DlAzAVySHtfir//jFBRuJjQAO4QQx4UQq3ueG6xpWkPP/UYAg3vuK1tW6E/4qnaq7FehP+DpHirkeqJJQtmsQj+EECIHwLcAfAm13yoMAPSyWaCf77d9HaAqKPRnTNc0bRKAewH8oIeWxtAi/Hglg63Qr6HsVGGA4A8ARgCYCKABwG/79nAUFK4OIUQsgD8D+KGmae3y79R+q9AfcRWb7ff7bV8HqHUAsqXHWT3PKSj0OTRNq+u5dQD4EBGKQxNRd3tuHT0vV7as0J/wVe1U2a9Cn0LTtCZN00KapoUBvInIfgsom1XoRxBCmBFx9DdqmvZBz9Nqv1Xot7iazQ6E/bavA9SjAEYKIXKFEBYAywH8pY+PSUEBQoiYnoZyCCFiACwAcBoR+yTFvScAfNxz/y8AVvao9t0FwC1RfhQUbja+qp1uB7BACJHUQ/VZ0POcgsJNQa+e/f+FyH4LRGx2uRAiSgiRC2AkgCNQ/oPCTYYQQgB4C8A5TdN+J/1K7bcK/RLXstmBsN+abuSb/y1omhYUQjyNyMI0AlivadqZvjwmBYUeDAbwYWRtwwRgk6ZpnwshjgLYIoT4LoBqRJTQAOBTRJT6KgB4Aay6+YescCtCCPEnALMApAohagH8AsC/4ivYqaZpTiHErxG5CAHAC5qmXa+IjYLCV8I1bHaWEGIiIvRIO4DvAYCmaWeEEFsAnAUQBPADTdNCPe+j/AeFm4m7ATwOoFQIUdLz3PNQ+61C/8W1bPax/r7f9umYGQUFBQUFBQUFBQUFBQUFQl9TfBUUFBQUFBQUFBQUFBQUAKgAVUFBQUFBQUFBQUFBQaGfQAWoCgoKCgoKCgoKCgoKCv0CKkBVUFBQUFBQUFBQUFBQ6BdQAaqCgoKCgoKCgoKCgoJCv4AKUBUUFBQUFBQUFBQUFBT6BVSAqqCgoKCgoKCgoKCgoNAvoAJUBQUFBQUFBQUFBQUFhX6B/w8hl4s8XSiyPgAAAABJRU5ErkJggg==\n", - "text/plain": [ - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "transformed_images = [None]*5\n", - "to_tensor = transforms.ToTensor()\n", - "for i in range(5):\n", - " t = transforms.RandomAffine(degrees=(-45, 45), fillcolor=128)\n", - " transformed_images[i] = to_tensor(t(pil_img))\n", - "plt.figure(figsize=(16, 16))\n", - "show(tutils.make_grid(transformed_images))" - ] - }, - { - "cell_type": "code", - "execution_count": 31, - "metadata": {}, - "outputs": [ - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAA6gAAADWCAYAAADcga8EAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMS4yLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvNQv5yAAAIABJREFUeJzsvXmUZHV9Nv7ce2tfuququ3qffYEZhgEchlUFj2ICxJgo/jQaNQbPKxwT17igCGQAYTgsvqgYFzhGE4+JJyQk6kFkE0mUfcI4MMM0Pd3T3dN7LV1VXXvd3x/t8+nvvVNVXQ0zyuvc55w+3V111+/y2RfNNE04cODAgQMHDhw4cODAgQMHv2/ov+8HcODAgQMHDhw4cODAgQMHDgBHQXXgwIEDBw4cOHDgwIEDB68ROAqqAwcOHDhw4MCBAwcOHDh4TcBRUB04cODAgQMHDhw4cODAwWsCjoLqwIEDBw4cOHDgwIEDBw5eE3AUVAcOHDhw4MCBAwcOHDhw8JrAcVFQNU37Y03TDmiaNqhp2uePxz0cOHDgwIEDBw4cOHDgwMEfFrRj3QdV0zQDwEsALgIwBuApAH9hmuYLx/RGDhw4cODAgQMHDhw4cODgDwrHw4N6FoBB0zSHTNMsAfghgLcfh/s4cODAgQMHDhw4cODAgYM/IBwPBbUfwKjy/9hvP3PgwIEDBw4cOHDgwIEDBw4awvX7urGmaf8HwP/57b87duzY8Xt5jmq1ilqthnK5jKmpKWiahmMd9twMmqb9Tu5TqVTk3dxuN3Rdh67rME0TpmmiVqsd9VymaaJcLkPXl+wY1WoVLpcLmqbJT6VSQaVSQblcRq1WQ7VahaZpMAxDxpLXqNVqlr8Nw4CmaXIO/3a5FpemaZoyRuq1+Lz2c/lZrVZDrVaTz/ksmqahXC7LsdFo9FXPd7FYhGmaSKfTiMVir+pajXA81slKrlkoFODxeI461+VyoVQqIZfLQdM0uFwuFItFAJDja7Ua3G63rDXOX7VataxHl8sla4nrxDRN+Hw+aJqGQqEg82cYBnRdh9vthqZpyOfzluur5/BeLpdLvgOAfD4va4fX4rtxT5imaVmjzaCuSd6T+4zgGuTvUChkWbsrQX//8bP7PfPMM/h90WQHDo4VnnnmGUSj0Vd1DZX/HGs6rMobx/La9a5lmqbQO/vnpLWkl+Sd/F491n7u8UK98TZNE6FQ6Khjy+Wy0O9670iQpvN9S6USNE2Tc+3w+XwNvzsWGBgYaPlYhyY7+EPAM888AwCzpmnGlzv2eOSgngvgOtM0/+i3/18FAKZp3tTkHPN3qRSq+MY3vgGv14u3vOUtMAwDAHDdddehWq2iUqlYlDe7YvRqGIpdgWv1HPV3s+NIhEdHR9HZ2Yk9e/bgnHPOAbD4Huo1+J48JxAIwDAMlMtlpFIpyzhks1lUKhV0dXUBAHp7ezE1NYXnn38e1WoVmUwGuVwOuq6jra0NpVIJPp8Puq6jVCqhXC6jvb0dpmkin88jFAqhUqmgWCzK56lUCrFYTBQPr9cLn8+HiYkJdHd3AwAWFhZEuD9y5AjC4TAAwDAMuN1uLCwsIJ1OIxQKwefzwev1wu12I5PJIJvNyvy9/e1vt4xFPUW6lXn5zW9+g5/+9Kd473vf+6rnT51DrslWnmcl11fXXrPjdV1HtVrFI488gvPPP1+EF4/Hg3K5jNWrV2N4eBhPP/00arUaBgYGcODAAXi9XpmrdDqNVatWweVyIZvNwu/3I5vNYm5uDgAQi8Wwdu1aGIaB8fFxlMtlzMzMoL29HYZhYPXq1XC73RgcHEStVkMqlUJ7e7tcs1gsYnh4GLlcDh6PB5VKBdu3bwcAPP/889B1HcViEX6/H9u3b4dhGBgcHEQymbSs+3PPPRezs7OWfT45OYlUKoVNmzbB5XJhYWFBzqk3X+l0Gi6XC263G6VSCYVCQdY/j1GVdAC44IIL4HK5VkwPbrqpIUl91fhdG+ocODge0DQN73rXu476vBEtrbfueaxqWGp0bqO/G+0ll8tlkSmWQzMewO9IX6rVqvAPACiVShgcHMTAwIBFfqlWq+js7MTs7Cz27NkDXddRLpfFSNwMjd7Lbkzmb96XdJCGcxoAaRRXDef8m/9rmoYdO3bIu6njSzo7Pz9/1HjwXWlQprGzWq1ibm4OfX19R80Vj3vTm9607FjY54EGeD47DZcqbrnllpav69BkB38I+C3decY0zTOXO/Z4hPg+BWCTpmnrNE3zAHgPgP88Dvc5JggGg3jyySdxzTXX4JlnnsHhw4exa9cu3Hbbbbj00kuh6zo8Ho8QHXpEXolyymvQ+7NSYrOcUqx6CycnJ3Ho0CFkMhmEQiGcffbZqFQqABY9W21tbYjFYgiFQvD7/fB4PKLElctlFItF1Go1eL1eOa9cLqO7uxv5fN4iUBuGIconvaj8rlKpoFqtolQqoVQqyTsXi0Xouo5KpSLer3K5LBbccrmMbDYrHlxe3zRNZLNZOb9cLsv7UznhPTwejzxDuVzGwsICstksDMOAy+XC61//egvDr8fQWpkTADjllFOWPXYl860qMa0op1xPrT43IwdaWcfValUUPh7PfUDPqa7rFmEoFAqhVqvJfHAN6bqOQqEgXuz29na43W643W54vV6Uy2VZM/TI+nw+5HI5mKaJZDIJn8+HcrkMr9cLwzAwNTUlY+tyuRCJRKBpGkqlkqwlr9drWRf8zjAM8XbG43ERyvie3d3dWL9+PSYmJlCpVBAMBusq9hRA2traZJ1yTxWLRVn7HCt17H/5y1/iqaeecoQPBw6OE8gbVeWhHtS9TZpGJWk5esn9befTzfY1afarVU4ByPORDtsVzGq1Ksopo0J4LL2EpIX9/f1i5O3q6kI0GoVhGBgYGEBXVxf8fj+CwSC8Xi/8fj90XUcwGITf74ff7xfazOdm9Ik9YovGPEaweDweBAIBhEIhhMNhtLe3IxKJIBqNIhaLoaOjA7FYDEeOHJEIFfW6hUJBvKyqckvl1zAMeV7SaI/Hg87OThw5csQyh5w30zTx0EMPtUyf7R5olX+rY6LySwcOHByNY66gmqZZAfA3AH4G4EUA/2qa5r5jfZ9jhZ/85CcIh8O45JJLEI/HUa1W8dJLLyGdTuMNb3gDdu/eDbfbLWGtrxSNmEYrYUPNvK320NZKpYLBwUFomoa+vj5s374dgUAAPT096O7uht/vR6lUQiqVwtzcHHK5nCgPACRcc35+XjyhqmDvcrnQ09MDwzDg9XpFcajVaiL0V6tVeL1e+btcLqNUKsl9KpWKKJzFYhHValUYCIl5sVi0KCvJZFIUIlpAAYjCWavVsLCwIApvoVCQkE5e0+/3C9PSNA09PT0Nx7RVUFGrx2waeTQbCUe8Xr3rL4dWw8/sCuZyTJfMfefOnUd9V6vVZC4oYCwsLACA7JdqtSpGj3K5DI/HIx5YAPB6vXIfPg+PMwwDPp8PhmFgYWFBhByXy2UJzc1kMvKdpmno7u6GpmlIJpPi9dQ0TTyZXGt8Pl3Xcfrpp0PTNGzevNkiyPK9EomEHO/1ept6UWKxmAhKVLxVYckuANVqNWQyGczMzDhKqgMHxwGqskEDsYp6oaSqUtoKDaaHrF6EhR1UENVnqwdVeW12XVXZVN9VBcNgVYWxVqsJPSVP4HekuUyp0DRNjG5erxeBQEDeBQAikQhisRjWrFmD9evXwzAMdHR0oFKpIBqNYu3atdi+fTvWrl2LQCAAv98PYFGuiMfj6O/vRzgctqTj8Prkmfw8n89b+BfHiTze5XJZjI3q/NHwTuM2j2VUlzpu6rp48sknG86TCq4vwzCOCjdW00SayQEOHDg4Tn1QTdP8qWmam03T3GCa5o3H4x7HCrquY3p6Gvfffz/uuecebNq0CaeccgoMw0AymcSBAwewa9cu3HHHHVi/fr3kvDVjWPWUFRIqVRkEmodtqIKs3Xpbz0rLPMHe3l4MDAzA7XajWq0im81idnYWyWQSxWJRvKMk6JFIBG1tbfD7/QgEAggEAgiHwwgEAvB4PJb8wWKxCE1bzJ2rVqsoFAqYn5+HpmniKQIgiiWVU4bS8LhCoYBKpSKCPD1eDCGmskkPFD9PJpNyj2q1ipmZGeTzeWSzWfFSVSoVzM7OyjiVy2VkMhmkUinLmKtzoVpilwOFATVPthlUZrvc2iFzbTVct55gtdyz8PrNLPdksJyv5557TtYM31vXdfFKM7dzdnYWprmYN0oFNhwOy7F+vx/lclkMGzR0BAIBzMzMQNd1Cf2uVquSP5ZIJCw5oZ2dnSK4UHGkoMXfU1NTkv+qaRpWrVqFUqmEl19+Gfl8XvajYRjYvHmzvHtfX59FkNV1Hdu3b8czzzwj3lg1L8kuaNZqNYRCIczPz4vi7fV6LetWNRTw3D179mB4eLjp/Dlw4OCVoxG/tX+mRgepxqpmaKQYNnuOZvmSgNUr2ox3tGKg5LUYOQIsRd7QiKdpGsLhsMXjTCMv+TgNv0wHUiNl6BFdWFiw1CNgtAwN5FTeqBz29fWht7cX1WpVlEy/3w/DMLB27VqcfPLJFjnI5/PJ96rXular4fHHH0epVEIoFLKEZ6v8wzRNhMNhFItFVCoVuN1u9Pb2Yn5+XoysnCPS8Hw+j8cee2zZuWV0UqVSEV6pRgRxvhxjpAMHzfF7K5L0WoFhGDjppJNw6qmnYtOmTZLf5vV6EY/H8cILL8Dj8eDFF1/Exz72Mei6jv/4j//Ao48+KgqgndDQY0cCp+Ye2K2DRDNraj1rr0qQqbxFo1FUq1VhDh6PxxJuQ48lz6vVasjn88jn83JdMiJ6NVWrKhkWFch8Po9yuYzp6WkAEAWQxW7IgJnzyc/pGaVlltZLej+pELMAD72pZE5kGCMjIxbvLe9dLBbh8/lQKBSg64vFd7xeL5LJpCgrF110kWUs7fPQDKonrNk59UJBW4EaDksFqtmz8Her4b18pkYMUs2boVKZSqVEMFAFpkKhgKmpKXnmhYUFS/4T51ENezUMA8ViEYFAAOVyWXKVaeigB5WhWnx/joVpmujs7ESxWEQikZB15vP55H5cK8lkEh0dHbJOaMihcOR2u2EYhsVYEQ6H0dbWhnQ6bQnHPeuss/DEE0/gzDPPlHvxvexwuVyIx+OWkPJarYbp6WlRsO0CkGmaGBwcRLFYxObNm1c0nw4cOKiPlRgf1YJoK03DWcl+paK03HPxGdSicfWOaUbP1VQR4OjIHsMwkM/nUSwWYRgGgsGgKI6kt8ViUbyvKq9h/QjKGD6fDx6PB6lUCh6PB9lsFsFgUFI66KVk2k4gEEClUpF0HRpEgUWlLhaLwev1yjvm83kEg0Ex+nm9XqHTxBvf+EY89dRT2LFjh/AYvhvHm8fHYjGk02lUKhXx5M7MzKBWq0ktDjXqqlKp4KGHHsJFF110lMLL8Vf5tcq/eQ0ntNeBg9ZwwktAuq7j4MGD+Pd//3fcf//9ImCTmNx///14+OGH8W//9m/Yv38/vvnNb+Kd73wndu/ejZtuukkUQDXUUyWWdk+J+pmKehY1NRyQ5/EeuVwOpVIJv/nNb+D1ehGNRi0MiOE3hUIB6XQaiUQCmUwGCwsLYjXk9ZnzQY+p3++XHJBQKIRoNCqVVRcWFmAYBg4cOCDnstgN83VVZZg5hMzjLRQKoiRznJmTyPxTfp/P55HJZMQby2eoVCrI5XJiqSwUCnJdeol5XSq3s7OzMo60zhIrDbUhw282n+qx6twtB1X5A6zMrpm3sxUhjGuHa83uzec9+D2FFAoK3d3doqyVSiUZQ3oKVWWLc8U50nUd7e3tSCaT4jmnAJBIJGSu6N2v1Wro6OiApi0WvAAWld9gMCjGiFqthqmpKVkbALBmzRoUi0UcPnwYLpcLwWAQwGK1ZgpB9J7SuHTJJZccNQ59fX3yHqrwt3PnTmQyGSn+xTGzzy2Ppye4VqvB5/MhFovBNE0x6tTzjoyOjmLPnj0N59GBAwetYyWGQjXnvxXDJY9rVTklrW6mUNphN6LWQ7MoLPIqVT5QUzyYdsEoFdI8AFIxt1KpiGeTyioVS9M0RUagh1BVSGksd7vdSKVSku6j1iEAlqrhl0oleL1eKd5kGAaOHDkiUTq1Wg2rV68W/jQ2Nmbhx6Zp4nWvex327t0r0Txut1sUShXValUKNTIdpbOzE4VCAZlMxjJnqjz2wAMPCN1Ww8DtyqddOXW8pg4ctI4TXkFdv349/vRP/xSf+MQn8Na3vhWrV68GsEiUstksPvzhD+PQoUNYu3YtXnzxRdx///34r//6L/zgBz/Avn37cP311+P2229HX19fS8pEM6iJ9Halg38XCgVks1mMjIzA4/Fg8+bNcLvdyGazciy9qMBijp9aECkYDEpVW3ouFxYWkMvlsLCwIES8WCxiYWEByWQSk5OTogCQubCgjFqFkN4oEnG1fQjfh5ZWFsahAkuLNXP2eD7fh8eSUajhPvycXlYqOXxe5iiSsa5atcoyps2EC/U7NaxXzQmux3TshoV6aPR9I4GnVe96I9jPb1SlMZ/PY2FhAZOTkxgbG8PCwoJ4HdX3UvNH+ZtzSAs51xPnO5/PiwDIOeNxNDbQQh4MBlGpVGT9UZmjkppOp8XTrwoglUoFmUxGKkDXajVEIhGUy2VMTk7K/Q3DQH9/f93x1zQNW7ZskXuqYdFtbW0AFoU2Vo+2j606t4FAQKINfD6fFHGipV49n/M5NzeHX/7yl45A48DBq0SrvFhV3OzKRL0Q4EbfNUIjT2Yrz9voWJXf1oMa6cOoEVXRYpguAMmBd7lcSKfTACARM7quIxwOiwKpKp+GYYhhmsouaSb5ulqV3+12I5fLCT/o7OxErVaTQnQ8nwX0aGBmZNTCwgICgQBM08TExASKxSIGBwePGuvt27djeHhYKrirbdLsiEQioqTSkFgsFjE/P2+pb6AaYR966CEL/20mR7wSedCBgxMdJ7yCOjIygp/85Cf4yle+IuGFJEL//M//jLvvvhvXX3898vk8fv3rX+POO+/EY489homJCfT19WFychI//OEPcdVVV+GOO+7AOeeccxSjawQ1VBdYYiZqvinDEJnQT2K7ZcsWyYcrFotSXdQeBkQim0wmxetaq9WQy+WkmBBzN1UvJRkwv/N6vZiZmZE8QXohc7mcFFxi3mo4HJaftrY2hEIhtLe3IxqNoq2tDZFIBKFQSH58Pp94b30+n0UZoBJFJVQNkVHH2TAMS2VC1XNNpsxrnHXWWTLey8FumVWty83AZ21mNa2n4K4krEwNPwWWZ4KNGCifk15sPpNqODBNE0NDQ5KrZPc6k7kDgN/vl+JXtLSrobfqemOIFIsQlUolyVmlkYSedcLv90tRIYYT00OvCk1qfrMaoq5W5TzvvPOOGiPVCLJt2zaLQKl6tqkc18uFUueWwhaNOz6fT5Rc5koDsAhCwKIx6rHHHnOUVAcOjjO459Vqs60qFStRVltpJ0Wa0UooaCvPyGOobKlGZXolqTxShpienhYeQG8nU3RoFOY1mLLj9/tFuTUMA5lMRngF80GpmDKEloUXXS6XGEHJO2k8JE1maHBnZ6fICHNzczJX4+PjFvqsaRq6urqwZ88eeb5GMplpmmhraxMPssfjQUdHB/L5PMbHxy1eeDXc98EHH7S06XPgwMGxwwmvoH7qU5/Cl770JVx55ZWSi0lG9Za3vAXFYhHXXHMNPvOZz6BQKOBrX/sarrvuOszOzuKOO+5Ae3s77rnnHuzduxff/OY38f73vx+333473vOe9wiToQKlCrhq6Kdq0eQPi8eQYO7bt0/CJFUrXqlUgsfjEWGcymWlUpH8B7/fL+Xa29ra4PF4pCS81+sVyyKfN5fLYXp6GtlsFrVaDTMzMxgZGcHCwgLWrFkjxZGSySTS6TQ6OzsRiUSQSCQkxBZYCtlhnoo9FEbNk2WYcTAYRFtbG6LRKCKRCCKRiFw/Fouhs7MT0WgU3d3diMfj6OrqQldXFzo7O9HZ2Yl4PI54PH6UMhwIBEThUhWB5Ri8neG1KrTYqw4uB46JvRhWo2PVdaMKVc3eo56Xj0phPp/H9PQ0UqmUWPkZ2hsKheD1ehGLxcRgwD6fXKu0rNNL7nK5JF+oVqtJkSx67L1eL4aGhiSPiLmkqqeUkQBUfmnBZ4i6atip1Wro7++XwlkUgJgLSu/s/Py8JfRYbVOkRiyoc33KKadYxo77hOFmpmlKT956YWQc62g0Kop7IBAQrwLDfdV8J6JUKuHnP/+5I/w4cHCcoIbpLqc8qjSnXlqFSrtV3qGmVjRLKVENXMs9Rz160QimaWJ0dFT+5j1YIEiluayarr4bI6j8fr/FeM7jaFgEIMUaSec8Ho/FqMkiSapyzNQq1h+goVTTNAwPD0PXF6vwm6aJDRs2AAAOHjwohkfW02BuPxEMBrFjxw689NJLkvPajI+HQiEpBuX1etHZ2Sn9vDluam0FAHjsscewf//+lni3AwcOWscJr6DefPPN+NSnPgWv1yuNmnVdx969e/Htb38bt956KyqVCm677Tbs3r0blUoFV199Nf72b/8W+Xwe119/PX74wx/ia1/7Gubm5vDjH/8YQ0NDeN3rXofbbrsNZ555pnjwVK+aHSrjCwQC0HVdhOmhoSHs3LnzqIJMVEbVc4FFhjs8PCwhNgsLC5ibm0MikZCCALwnLcdsLzMxMYFCoQDDMJBOp3H48GGUSiXE43EMDAxI/p3L5UImk5FS8mRIap4oFYtcLifX1HVdmBFzEek5ZWsYVXGmkk1lVg2z5XiSMahjS0s1w45o3WW11pWEfanj3Uqu0XKe8+XOXU7goFCiGiqWA+fafh1apUdGRiT0WoUafuvxeLBhwwYRYlhYA4D0JqXSpxpiaNDI5XJyXb/fj4GBAQCQsDDmlvJ5qczRus/CSaqXnP9TuKlWq5idnRXBq1AoSA/UbDYrHgTDMHDyySfL/dRWBry/iv7+/qPGlOuX4eO00KvXUJ8RWOoPy+eIRCJIp9OSS2u/N8/du3dvw7l14MDBylHPSLwcVD7eyrE0IKo9OxvR65VEz1DRbfW5S6WS0HZ7qGqhUBAaapdP1PZvasVh9kJnsTtVlmAah6r88lhW1CcPj0QicLlckqJEIwHrerAIZK1Wk5QSRkXl83lLZfRSqYSurq6646G2PVPrTxCqAZfFJnkeQ5LtkTRq0caJiQkLf3PgwMGrxwmvoHq9Xvh8PnR1dVlCSKlUfulLX8LnPvc5FItFXHfddfjsZz+LYrGIb3zjG7joootQLpdx/fXX4+abb8bk5CQeeeQRTExMYGxsDHfffTc++MEP4uabb8a73/1uEUrtIZm0yAWDQYTDYaTTaQwPD4tFcuPGjVI4yB5myHNVy6bX60UkEgGw1HO0VCpJIRo1JIWENpPJIJvNwuVyYXp6WjymmzZtkr6nrO6rWoF9Pp80/45EIvD5fNIWplwuY2FhQc7j81DRUHNZ1MqtauU7lVGoChR7ugWDQbjdbvT19WHt2rUoFAqIx+MIBAKSw8gQUU3TcOqpp9b1JjaCypSOV/W9Vgoc2ZluKyHEPI+MnnPHQkG0qk9OTmJmZgarVq1CNBqV61EAmpubk6rNrOYbDAZRKBSkQBYNEYSaBwxAimYxvJeCC73s3G/0cjPHlFVw+ez0WubzeVF8mY9ULpeh64uFurhfOjs7JbSWiioV7G3btsnzLSfotbe3S64Wx4b5sM8995yMGT3GqgFBNSCxGT2/c7lcWLNmjey7ekIqi0H97Gc/q7sWHDhwsHLYw/8bRT6ovwEILW0Euxe11XDdlYYVt0L/eV0A+MAHPnDUM7LwIZVV9jYnr1WjtChHsII5DdKhUEjSZ1j4jsooeYXX65Uq7Uxz0HVdCuGxzgCNwGvXrrU8J8OF6/XN5vju2LEDACy1A4BFXrlhwwb85je/kXPp1bWPIeWhSCQixfTcbjcCgYClpkK9KKYnnnhCQqMdOHDw6nHCt5kZHR1Fb28vDh48iE2bNgFYJGjf+9738P73vx/f+973cOutt+Liiy/Ggw8+iN27d+OSSy7B/fffj0ceeQQ9PT1wu93YtWsXLrvsMvzLv/wLfvzjH+O6667DT3/6U2lx8o53vAM7d+7E2NgY7rrrLvFKaZqG1atXi4czlUrB7/dj7dq1EgoDLOW8qYqJWkWPFk3TXKxeOzIygv7+filOQ8KqenPT6bR4vUzTxNjYmFgnN2zYgEKhIIoAsKSwHDlyREJnKfRTyWdf0lqtJnklqqBOSy4VDgr9xWJR8ghpfaUizOqAVF75zvyb3q9SqYRVq1YhEAggl8uJ944GB3ptVSznhaSCt1y7F4LP1kjQsAsUqsGhmbChego5LsvdS70H1w+Z7NTUFIBFxr9u3Tp0dXUBWPRmzs/PC9MPBALiefR6vRgcHIRhGOjq6pLwbz6L6mHm/PJ+NFaooWvMY2JD+Ewmg87OTlE2Acj8UyhSPbrqugqHw5JbTYu9KtCUSiUkk0kZE7uQ2Yp3fN26dXjhhRcsY24YBnbu3Ilf//rXOOecc2Cai7lM7PPKuVPBkHoq1G63G+3t7UilUpicnEQ8HresCdUb/eCDD0qLJAcOHKwczKUkfyAdUmsv2KGG/ZOnNAL5rL24TiPU4wnLQa003OzaapuY5557ztKyhWNQr5c1AOlxWiwWEYlE5NjOzk7Mz89b0jGoxE1NTUm+KIsmNuplTfpHOYh8Xu1lfeTIEfFiatpSL+uRkRFJyVJ7WZO/9/X1YXx83DJP27dvx1NPPYXTTjtNwoVZc0T1KnN8Q6EQEomEKOAALBWGyX/VugZ79uzBxo0bsW7dumXn8JXgPe95z1E8v94a4BpXDf311qydl7WqXC8nC7US1QUsySYce8oJHN9KpYJsNouuri6k02lpWfTCCy8gnU5LZFQsFsPGjRsxPj6O2dlZcdjQ8cGaD5OTk+js7JSWhi6XC+FwGO3t7Th48CDi8Tj8fj+mpqbg8XjQ1dWFbDaL2dlZ9Pb2ipxJuTWfzyOZTOK0006zjN309DSq1aqs13w+b5HhVYMIZX4a9RlRZh9D01ysiRH+FdCEAAAgAElEQVSPx4+aC1U3IN1pdQ6a4ZZbbnnV13g1OOE9qGvWrMHVV1+N9evXA1hcMNlsFh6PBz/84Q+xbds2+P1+PPjgg+js7ER/fz8efPBBdHd347Of/SwSiQQmJyfxZ3/2Z/jxj3+MdevWwe/349prr8Ub3/hGHDp0CA8//LAQgVgshl27duHyyy/Htm3bpDl0KpXC/v370dfXh1gsJhtLDSOx57GoC9Pv9wtxNgwD3d3dchx/isWiFJeZnZ2VCrfDw8MYGxtDJBLBwMAAYrGYEAnm6tHKOTo6ikqlgvHxcSnFPjc3h1WrVsnGY94iGTU3zsLCgnhUVW8sQyQBWD6j5ZZWXDKBQqEgDIteu/n5eYyOjkoIEXu1ck5N08SFF14o/xP1Ql9VqGPfCpbLQ1GJGMemWQ6pvQCHvbE7Lb7N7kclr1wuI51OI5lM4qSTTsKGDRuwY8cOCbHm+FI55L18Ph9CoRDcbje6urqwfft2eU/VU0rLNK/B9UlvJrDUfoCEnl59lWCrwgvDq9ra2sRiTyMKQ8JpvKjVakin05awXrU/rlqU6dJLL20+kXVQq9WwZcsWy3xxjZ9//vl44okn5D2YW6WGgqmeVDXkjAp1e3s7XC4XJicnj5pD9eeBBx5o6sFx4MBBY5BP2qvaNhLgASvPWI4XsAYEr1uvnVe9e6zEg8pnaqRQqLSGvaxZMR1YigChjGHvZc2QWjXfksZsptrw+uVyGX6//6he1uFwWNq48Hi1lzVbcLF6MIvqAUsV5sfHx1Eul6UIpCoDqEI5x5xggUbyJv4+66yzsGfPHqmozvvUG0fWL+C92ElgenoaiUTiqDngmA8ODkpO6vGAfY3UM3jb5RD7mlUNoGphQDvUgpQ0Wjdb/ytdv6r8A0DkQjoVaCSfmZmxtJ2jYprJZGQNHjhwQFJl8vk8stmsRE9ls1nMzc1JRWjKCTRQzM3NoVqtYm5uDmNjY1KgMZ1OS07y5OQkJicnpdgYWx1WKhXs3bvX8u5dXV2Ix+NIpVJwu91iYFfTCnh8JBIRBw3T2uqNo6Zp2LdvH4aHhy3fqzIh73EslNPXAv4w3uJVYPPmzdKji7jzzjtx1VVXIRwOY//+/ahWq3j961+PRCKBiYkJ/Pmf/zkSiQRuvfVW7Nq1C7VaDffddx++8IUvYHx8HLlcDm9605tw6NAh5PN5vPWtb8Xg4CA0TUNbWxu8Xi+CwSBGRkYkT1PTNOzcuVOIMAV9e/4qsLS4Sex1XZcqqGQ8rH6qtnphtT2Px4PZ2VmMjY1hfn4efX196O/vl6bWKsOcn5/HyMgIhoaGkMvlLJV8dV3H2NiYbCwqM5q22Fqjo6NDepC5XC6xurKNCYkRQ32ZL6vmMqoKuqpoAzhK8WWubSaTsXjT6KFSc09WUiFRJSbLYTnCoBJ61QLdCKoVXmUkKmOsd091zCqVClKpFJ577jkYhoGBgQFL2Dc99SxuRK8j1xMt1cViEbquS76NymCoeHo8HllDVAoZulUul0UwoOeUDITXoUJLwwiLG3ENU5lTc5xUxZXRBCyqxPdgmLeqPK9k3gjTNHHaaacdpXBqmoZzzjkHzz//vFyfgpjdOs9n4jhzjdMQ4PF4MDU1ZfEQ29fgQw891NLzOnDgYAmkt60URCLsuarLwV4bQqU1jfiIKlQ2uwefW40iskOlFY16WfNajXpZ03iqFpTTdWsva6YEGUb9XtZqRf56vaxpRG/WyzocDgv/qdfLmvSxWS9rewi3vZe1mkOrgsez5QzfqV4va7XQFvD76WW9EsO7ekyjSB8AFuVRVbibKd+N9pV97dtzstXoBHVu2ZqN8iN72lOuoIzAXGUW5eSaKpVKyGazEsGldgYol8uYmprCxMSEKLZMg6vVapidnZWaFvyuWCxiZmYG8/PzYthZWFjAiy++aJEZKZuwcrUayWg3arS3t4t8xSJh9BDbMTw8LOHq9WSDV4NjdZ1jhRNeQU0mkzh8+LBsuHK5jEQigZtvvhnZbBYXXnghSqUSfvWrX+HjH/84AOC+++7DNddcg0KhgGuvvRZ//dd/jUKhgBtvvBHXXnstKpUKfv7zn+OKK65AoVDA//zP/1hySJk7GQwGhfgDS9XtuMmApVAeVRkhwaVSXSgU0NHRIZ4vWp08Hg8ymQxSqZSEN05MTGB4eBiVSgUbNmxAV1eXbBzeT9MWS7YfOnRI8g8TiQRyuRza29vh8XgkT4VVgdPpNHRdRyQSsYRLtbW1IR6Pi/LjcrkkD1FVKGjJVTckq8KqxRBUsFiTaZpIpVJSqInjpCo0F1xwgWUMWxVMVCXklcK+6fmMKxWQ7NesR0x4LBX56elpzM7Oor29HRdccAE2b96M9vZ2BINBBAIB6Q9HQwHbCagMiK0DaOCgcklhSc3zJagwMkxHVWa53ngtYDFUhoqrWmSJzIoeVzVERi2CBVhDZtU5ZiskPi8LZXEcV2ptrFarWL9+vWXuuJ63bNmC0dFRi/df3bP2uQoEAuLdrtUWWzfFYjG43W5MTU01ZBivNUbiwMFrFY28EYBVAK93jCpw1rtWs8/r0ZV69N5+XKsROEDjXtaVSgXz8/NNe1mrRjJVSVHDbpv1sq5UKhJyWa+XtWmaTXtZqylEKl+g4s28U+b11+tlzWKM9cZf0xZ7WYfDYfHIcV7UXtbMvbWPrcr7mZPaqJe1qnRwPn/Xvaz5DCsxtgKN17AK8k4e0+wejSIB7OPA9WI/RjXEU45jXROGmXN+Oa+qYkuHCb+jIV79nrIdDfXq/FH+YXFPyuhqHRUqvFz7pVIJiUQCMzMzolzToDM9PS0RCV6v17L/1LXT3t4uxm2fzwev14uXX37ZMgfE9PQ0nn766RVHXfy/hhNeQf2rv/orS3iv2+3Gpz/9aQk7fOSRR/B3f/d30DQNd955J2666SZUq1XccMMN2LFjB9xuN/7xH/8RZ5xxBnRdxw033IB3vvOdqFaruPvuu3HmmWdizZo10owaWMpjCAaDImS7XC6phMrwVW46uxXL7XaLJ4hV6RKJhMULHA6HkcvlxNs4OjqKw4cPw+PxYP369ejs7JRCOfTi1mo1jI6OYmhoSNrIJBIJ8a5pmia5dSwxzyJT9A5TCVWZTa1WQzgclhANPr9pmhKOS8sUlXTmPHIcaDHL5/PCTNSendzw9DyzXQqJjco8lgu9OdYbXiXMJLitggaL5Z5JFaaYy7x37160t7eju7tbeumyNVAulxOhhcYTVk6mcAFAvPmpVAqRSESKBdEbytAsWpgZRssfNedIDRkHIH/TSOHz+QBAqkpSSGlra5MwbxL/+fn5o/JSs9kscrmcFOAAFpmNagjSNA1nnHGGZW7UEOVW4fF4JLdKnRu/349QKCQeCYYmk4mSKXO+TNNEOByWcDeOFffSxMREy8/kwIGDo6EKdvZIhmbRKxRim3lO63mfWjWA8liVvi9H55vVStC0pV7WlCMa9bIGIPyTUHtZqxEtjXpZk0eTZtl7WdN42KiXNSu7816NelmruXVUMFW+csoppxw1RryOaZpYs2aN3Fvlk2pdBt5PVR7sES/NelnbDR78n72s6xkojxXU91lJnYxWFFl7scpm67qV91OPsYeiql5T/s1ogUAgIKHnlFm7u7vR2dmJjo4ORCIR+P1+BINBSVliiLoa4cd7qrVdGOGnpguxawQjCfid3WNJjycV4qGhIUv9DABYvXo19u/fj0qlIh06OJZ8Hl6zra1NIh99Ph/WrVsn0Zf29ZjL5fDoo48e03Sf35UxpVWc8Aoqq87SGsOWMWeffTb+8i//Erqu4/bbb8fu3btRrVbxxS9+ER/72MdQKBTwwgsvoFar4bTTTsOLL74IANi2bRvuu+8+rFu3DldeeSX27t2L0047DV1dXSLQ08pCosfNUiwW4fV6ZYEzGV9VWGl1IQMyjMWG2LFYTLxmtPrMz89LfmlHRwf6+/sRiUSEMXFhV6tVDA4OCvOamZlBMplEe3s7dF23WF6BJWsa8zDYM4wl3nVdt4RJmOZiMaRCoYBIJIKOjg55Z4ZN0EuqetS4cWm9pcVUDXMqFAoS6ql6ZWllCwQCOP300y0EuRVl71iByijXF7AyIkDhohHUOZmdncX8/LwUwzjrrLNk7bjdboRCIemFS2sePdu0fFMJpAffNE2ZWwBiTAEgRgDVgk4hCYBlLVIRZDgww23UFjM0UnDNu91upFIpEYKYl01llc+h5ntxbTPUmGHfHCsq1gQZRCtGAIL37e/vl+dR0d3djUQigVKphIWFBbG2UwCiQGY3lLBSJo1QkUgEbrfbUVIdOHgVUAW4epEnjeixarhqhTaQL7VCS1TeRt7e7Fn47PW8fFQK1V7WquG5Xi9r3rdRL2sanZv1sjYMA7Ozsw17WfN9GvWyLhaLEtlF/mDvZc3iMo16WWuahq1bt1rGVZ0zPkezXtb00L2aXtb0dHH+7ZFgDz/8sDgLjjXUtbacbEFe10o4uXpOK9Fey32vehbrOQq4LtT34d8ulwsDAwOy7gDIuvP7/QgEApJ3TBmHsmZHRwfi8Tji8Ti6urrQ1dWFzs5OdHZ2Ih6Py98dHR3ym+fEYjFEo1FEo1GEQiEx0Pv9fvj9flk/lEcB4Omnn5Z1xHfYunWr1NZg14F6qFaraGtrQ7FYFKV4w4YNOHjwYEMZ9he/+EXdCMNGUJ0k/FtVlFcaUXY88dp5kt8TQqEQAoEA8vm89AE1zcVqbD/96U+xceNGaJqGT37yk/jEJz4BTdNw11134Y477hBhm546VqHTNA1HjhzBt771LWzduhUnn3wyvF6vLNjp6Wlks1l4vV6Ew2Fo2lKjajIqtsogYeA9uPCpkOXzeSH6JODFYhHj4+OYmZlBKBTC6tWrJR7eznQPHTqEoaEhaNpiNbF0Oo1wOAyv14tMJmOxVnKDMIyXBYqGhobEe0uLEomg2+2WvmLt7e0y7h0dHbI51AIHVD6odKhVhFVrNQvkUAmmUsWNRmbj8XiwZs2aFSmdx3qT2i2bzSzy9c6r9+wq8w0EAshkMmItv/TSS4UY9/f3S7GK+fl56YVLQYPPks/nkclkJIxJHWufzyfjnM1mkU6noWka5ubmRLhSBS3OF635FFpUZZbCDgDL8Qy34TG0smcyGQBL4TdqaJfqKec65Pd+v1+EHF3XLd5TFSuZb3Wfb9y4UQQl1QK/ZcsWyfktlUoIhUKWsH2OrXpftdgT/29vb7fsAeK1Zul04OC1DLunRkU9+qqmd7wSLLc/VZ7aqjerHl+iEqfr1l7WjNoA6veyJr1u1MuatAho3Mta13WJIqnXyxqA0HneS+1lbZdFdP3oXtYs7tSol3U+n7cUh1SVHvv8NeplTd7QqJc1f/Pver2sQ6FQ017WAPDII480mtpXDK7TVg0j9pSxlXo9X82xKo/ks9uhKkyqQZ9pPAz7BhajBBm5pR6rFnaq92z13t3uGeVx6vPQo8oWhsFgUKLKotEoYrGYKMKjo6N1vfFcq+3t7ZYoM4J7lhFvlGdXr16N8fFxy7ipNObxxx9fNjJQfS91narPoc5Jq4VBjydOeAWV7TboDfr4xz+O3bt3461vfSuSySQOHjyI973vffB6vbjzzjtx8803o1ar4Qtf+AJ6enrQ19eHvXv3SqjJnj174PP5sGnTJtRqNfzv//6vhExyQblcLgltZVglPV30FtKbqiqnZAAkRlRK8vk88vk80um0WJIYyhuLxeQcXqtcLuOll17CwYMHpUz23Nwc2tra4Ha7RUFUcyWZt8HQXlofaW31er3C6GjRUr1dzPNQ4/i5sVWmSi+wGvYLLDXi5jjYK8OS+eq6jqmpKVHu6UlrBap38Fh5UTnvdsLX7PokDPbcFvW5SIiTyaR4PvP5vOQxFItFzM3NYXZ21lLNzjQXe+aGw2FEIhEEg0H4fD4JTaVFkPOh67qEYbG3LUOqWM2OCirnOpfLSV4l78nPgSWvOS349LLmcjmxjpdKJdmT6XQamUxGeuvx81QqJZX0isWi5CFzjfMYlXCzlZQ6lq2Ae4AMR90ba9eutcwT/z7vvPPwzDPPyPvTk0qmYxeEarUa2traLK0P3G431qxZ0/IaduDAgRVquKCqGNmPeTVKaSseKfu1WwkhVp+NShGVzWa9rEn/yEPsvazJkxv1slZlD8Ley9rr9QqfZlgkDZoq72jUy5pGvma9rCkfNeplfd5551l4YrP5a9TLGgCef/55mQN7L2u7Ml2vl3UkElm2l7Wu68e8l7WqbDUzcqiKYSsRASsx2KoKZ6vrv9H9WbBK9ZxqmiafT05OwjRNKSbItUPZNR6Po7e3F2vXrkU0GkU8HpdiidFoFN3d3di4cSO2bt2KSCSCWCwmTpdIJIIzzjgDZ555JmKxmEQR+Hw+cXT09PQcRSfU/UsjQbFYxODg4FHHqM4Ypv+oc8c1xehIrlGPx4N4PI6xsTGL0UT93YoBRPViqzoBx1r1/req8B5PnPAKant7u1SyvfPOOzE9PY1SqYQ3vOEN2LVrF2699Vb8+te/xq5du6DrOj7/+c/jk5/8JEqlklTCffvb345yuYznn38eH/7wh1EoFLBv3z5cffXVQszIMEqlEvbs2SP9FIFFhW5kZESsNcxRpaDKhcrQRyprJNzlchmHDx+W3JJEIoHVq1eLcsBFOD8/j/379+Pll1+WUBnTNBGPxyVXhJ5IElRWv6XSrFou0+k0arXF4gUTExMol8sSFsyN6PV6hTHOz89bQhSYc9je3i6hMlQ2GPLL8F1gsQIgCRaFfjJClYHQuwoA3/zmN1cUo78Sz5Qa1kFGx+dTGUKrHlOCY9+IiFMxTSQSYolj4QkAkkfK0BS/3w+v12tpvJ5Op5FKpST8ipXo0um0FLzi+mF4MO/NanUMYVVDwFVPK6v01mo1WQMsjEHlNZvNiuKrhpCl02kp/U4FHADGxsYwNTUlSijbIbGIE6/NvcPqkapABRyd/7Ic1DVkn5twOGwJM+P9dF3Htm3bMDQ0BGBRwKNn334cAJmfaDQqlSvJrNWweX7uwIGD5aEK8XYvSb3jlhO07VCv24yuqGGn9rSC5fYzeQqVs2q1isnJSYyMjCCTyaC7uxtnn302AFhyOVkwkee5XC4MDg6ip6dH+pzae1nzWflDPmzvZU1lzzRNSy9rtphj8RjSerVmAMNkyRs1TZO2MGova4Y8AhCZi6jVali7dq3l/+XGcd26dZZQRs7Hzp078eSTT4qsxLoHfD/7nKqdH3R9sRqy2ibMblTm+bq+2Mv6WMHuFWwE8tVWjiXs9U+aPYP9px6WU3hU3sznpPdSra+iaYupcWzHxkJcmqZZwtFVJZcOErfbLUYPFk6iUsYewZRb1F6y/f396O7ulvxWGoo8Hg9OOukkbNu2DdFoVGR8GvyDwaBlXAzDwPz8vKQiUe5Vw4EpP9PRo+aTx+NxjIyMWMade1fXdTz88MNN21px7lXHAukK1zn1kNdClNYJr6Ayh5OWwR/84Ae4/fbbce+990oO3Qc/+EEYhoFbbrkF1WoVd911F974xjfiXe96F1wuF370ox/hmmuugdfrxbe//W3ceOONcLvd+PKXv4x3vOMdGBkZgcvlkobPoVAIL7/8slQBc7lcWLVqlWwsn89nyQ2h0kbvJjff9PQ0hoeHMT09jYGBAfT19cE0F6uBqcRgZmYGBw4cwOTkpPR38vv9cm22ZeEmd7lciMViCAQComCwz9nCwoJ41ADg8OHDouB4PB7JcWFoTjable9Vy1FXV5cowFSymDugaZqlcTG9g3xGlpgn4+Q4MSQ0GAyKolgul/G2t72tIdEkMXglAr/qOVTHW2VGxEo2ez0LJ5VhVlRWQ5z5/eTkpIxDLpdDMplEKpUSxZDjRcJKAkoLIXMsIpGIEFuGjbNwET3mXAMLCwuSL8o9RGbAwgFq/z0q8lQiaWSgdZ7rk+tfFXC4Hmhpp0eV51H4YV4yYC0gcdFFF1kYOoWRVudenUO7wGIYBnp7e49i0hzTZ599FgAkbE21XNoZt6Zp6OjokDXPUG8KndVqtSkTcuDAwRIooKr7t56wrHrLWqUJr6SXtZ2+N/LqqueQ95XLZYyOjiKVSjXtZU36RCXQ3ss6EolIEcJ6vaxJY1T5Q41YUo2ujXpZMzWhUS9r8otmvazV+9TrZb3SAjG1Wv1e1qZpvqpe1vQIN+plzXHhvV5pL2vygUZhrM2wnAHFDtXDvNxxQP2CYcDSHml2Xxq37ftU5dPqNUxzMdwbWDI2q7IN1w9lW8BaEInRSZQ/6KjRdR3JZFLCzZne1N7ejkqlInVXaKDhc9FIToNQrbaYT03ZSH2faDSKkZERMbT7/X4LzeH+5f/BYFBa0LhcLvT09GBsbEzeyy5/2oty8TyOM+UJriXOs7rXuMd+3zjhFdRqtYre3l64XC588YtfxFVXXYXNmzdj7969uOuuu3DbbbfhV7/6FQzDQLFYxA033IAvf/nLGB8fxz/90z/hxhtvhM/nw65du3DdddehVqvhS1/6Eq666ipEo1H86Ec/kpw9bqRoNCohvplMBi+88AIOHz4MTdPEK8oquAzlUa0509PTGB8fh67rWLVqlfQTUzd2qVTCiy++iAMHDkiYZzablfwJhvIwhJae3mAweJS3lE2OAVjy/yKRiIUocMMDkOeloqFpGnp6eqTYDolzMpmU/lbcSG1tbRJCDMDS94wVCvm5+jzVahXT09MWq+z3v/996VtG2In6SpVT1coFWC2sr8TqZLe+2/MAdF1HKpWSPMxIJIJ4PG7xxnm9XnR0dEjok9/vRzQatST104JIxS+Xy0kpdSp39IxPTk6KJZtEfGFhAT6fD+l0GkNDQ0KY+fwMS+E8qhXwmL/B8GEKQWQW6vNx3XM/8Bh72DktfxR0AIiRJ5vNYnp6Wgg7x0UVooDG3hJ1TagFBZrNM406PJaCTW9vL8444wwcOHBA7sdcJ9W6zvtSyIzFYjI3tPySWa+kErQDBycy6hn81Ege+z5sRYAn/TlevaxJU1XllL2sk8kkuru7m/ayDofDQkMpQwBLvayffPJJ4QWkj2ova/JuKoWNelkzmqteL2vSLRo5yQtoBNc0TQycquJqmku1J9R5Op69rHmPRr2seZ5djmCbHso5jXpZ83z1fq+kl7WqWLeiQNi9xa3IJyvZB61cu5nxRVXK6KyxPwej4ngvyn+8HyOLVAcLDQdMPVNbzdBgQs+/Woma0V/8Ti0oxrnkHjJNU9LeuJcYJeh2uxEIBGAYBg4dOiTrg++8ceNGSZdjhBr3E99NlVVY5Z/ykKqkqrSLa/oXv/iFKPCkH42MBOocvBJHzfHECS/lMNeBSkahUMCf/Mmf4G1vexsMw8C3v/1tPProo3jiiSfg8XjwkY98BIVCAVdccQWAxc3x93//97j66qtx7bXX4n3vex++//3v45ZbboFpmnj3u9+Nzs5OAEveE+YrsBVMqVTCunXr5HpU9LjxGRrDAk4+n08abnOzkhFls1kMDQ3J4p+dnYXb7UZvb6+l2IxajImeLjIJv98vLTu48XiOruvo6uoS75Vq7dF1XTYFFU1aZKkAAZDQYLtly+fzwefzCSEBlnKDNU2TFiJ2a5MazqFWEKzVavjZz36GD33oQ7jssstw7733WmLsiXohPMtBVYJ5DcDah7QRSNwaKUa0zprmYojq7OwsVq9eLdZuAGJEUMNYZmdnEYlEUK1WxWtK0BNNix/XkX1uvV6vEGg2SR8aGsJJJ52EarUqa4sNqv1+vzyrGvKshpjZx4N/M4SY1js7cVbnkcKPXZlT8zTUEBWuZ5fLBa/Xi40bN7Y2sb+F3dvSqjVx3bp12Lt3r1yD78Ackmw2K6HYbN+gvjPfi/s5FoshmUzKXFF4tLeIcODAQXOoQrTdG2an58udS4NXK9VNgfqGsOXuRWE4lUpB0zTE43FccMEFYtxSo0sAa9suTdMkNBBY6mXNdhuMtiKtZLEh1RvGFB16pxg6zAq2VER5jr2XNWAt9MfPVW8Yq5o26mUdDAah67rUJ6CswdaA9rlsFdXqYi9rtvDgO+v6Ui/rgYEBeDweeL1eqXVgD33UdV08ZpSXmBKVTCYxNTWFnp6ehvO8Uqh8iGuw0bXtxthW7knHwnLyUL0xr3dtKpyN9oiqGObzefFwqsZrAOLQ4XpVfwBIhX9VzmShoXw+L7VOKDOwJVKlUkEoFJLrZLNZ1Go1SytDdqdQo/jo3e/t7QUAjIyMyL4olUrYtGmT5V0OHTqEk046yUJnYrEYRkZGxLkTCoWQyWTq0iFNW8zDzWazsg+7u7sxNDQkhVxVWQ5YrCbs9Xpx7rnnviaVz1ZwwiuoV1xxBYLBIK6//nohipzMarWKD33oQ9B1HcPDw7jnnnvw1a9+FeVyGW9605twzjnniFfo7//+7/Hggw/iX//1X3HSSSchlUphfn4epVIJfX19Fq9oOByWUJByuYze3l7ppaUK3GQ2uVwOtVpN2rPQo0XvVa1Ww+zsLMbHx2VT0uPLjZbJZFAqlcQrqeu6tKahlZaWTlYUplJDZYWWTlqduPlJKIvFohD0ZDKJtrY2KXrjdrst/cJUq3NXV5dFgWF4cKlUkpzWVCol3ilae8nEyTRN0xSLpmpt+853voPLL78cO3fuxNNPPy3X4HGtQn12NexExYYNG3Do0CH53x72Uu9+/IxFmkxzsY/r3Nwcurq6sGbNGjmW1aBpveVaMAxDCuzQ28Ywbs6jqhiToKlEbW5uTgSHmZkZyUk++eSTkc1mEY1GMTk5iWg0irPPPhsvvPACZmZmpG8Xw2BULy2t6CojpdBEYcnuTVC9B+ocq9/xh0orj+M6UC2yuq5j69atMuetGiMaeTuWw+mnn47nn3/e8pyVSgU9PT2Sq0VvgmpgUsdCpQHRaFSUVJ63XBZoFwEAACAASURBVD6PAwcOFmEPgVMjX4DWPAh2gxWr6LcCGstaEfp5r2p1ser64OAgtm3bJrRcbZNFJZL8gFEoVARV46DH48H09DQ2b94sMoJqZFaVTfITVTagIkG5g5+pfIWKALAY9uvxeKSQk9rLmmPHaui8jsvlQj6fl0gd1VsWDAaRTCbFQ3bGGWdYxoE8YiWCOHtZsy0PofayjkajR/FadV45vrq+2DmABSTJj1OpFCYmJkSRebXgOlLlp3pQ+ZU6Jo34mJpv2oox1s5/66GeQabevdU6IpTbyP+oqObzeck/phwNQPrt6rou3Qp4DuUXGhF0XZd0MnvNi2AwCK/Xi7m5OdlnbrcbwWAQPT090DRNwra5F9Tio8lkEqZpSriuz+eD2+3GoUOHxGly4MABUVKBRbq0evVq7Nu3D6eccopEDTZqGcN1yPBlr9eLNWvW4ODBg9i0aZNlfBndUa1W8eijj+KCCy5Ydk5fizjhQ3zb2tqQTqfxN3/zN/iHf/gHS4ghsMQ01q5di5tvvhlbtmxBoVDAf//3f+Omm27Cs88+KyGLF198MW644QakUilccsklOP3007F582ZREgFIMRt6DOktdbvd0taFG545sCxrTUZCcCPv27dPEvNZyr1aXex/BkAUVL6Lz+dDV1eXeC4ZVsy8RjIPem9Znp2MidZB3oteWeZAqpXyKpUK5ufnpd8Zn8c0TXR2dqK7u1s2lhrKas8hpELPz+xKaq1Ws4ReEIZh4OGHH0a5XMa2bdvkXBoWWoUao2/3lqq44oorcOWVV1o8iiTm9cK4+D1DN7LZrHg/169fL8xcfU+ey2JGfK75+XmxeJfLZaTTaQmhpseNDJ7jkM/nMTk5KdbpcrmMoaEhlEolBINBbNq0SSoicx7Ymsnlcok3kESfApOaS6rrurRC4N9c/1Ru1VxYElcSbJ/PJ8q7+r5kaHYjAK2s9O7a+/i2AlVYrOd1b4ZqtYp4PC7nqkyclawZFcEKg+p9Vc8zn4MNvPnOToivAwetQ02faBS90kjQVuk0vXsrpQfNPK12Dx4rs+/duxfnnnuuhAoypJd5naFQCF1dXYhGo+LRZIV0O83y+XyIxWLi+VFpNIV0Csb8nvKGPaSSNE2NDlHDgHk98tlcLieGY0YH0SNFr5ca3sjnoLJN7yR5n1rTgFhpTiYVvIGBAfj9/qPmoqenB4ODg2LcZYE9ygHkn/b8S44bDbORSAS5XA7j4+PHzKi43Ds2+77RGmTaSKu1DTi/jZRZNS1H9fDXA/cGZRT7cZwfRnZRgVS/456g7MrxV2UDGg54Lnkwc4c5d2632xLey/PVjhI8jqBcAywp0JTB1DVCWZnjp2katm3bhr1791oiHurNoWla88N5r9WrV1uKKtK4pDofXnrppbpj3wivFW/rCS/luFwuUZJeeuklfPCDH0StVsNnPvMZnHbaaWIJ5Gb7oz/6I1x66aXIZDK49957cf/99+Oxxx6D3+/HRz7yEWiahs985jPwer2S/8e2Mh6PBy+//DLi8bhYPHV9sdAMBXQAQtBjsZgI+YDVczc3N4fR0VG43W4RXHk+sESIQqEQstmsxPB3dnbC7/fLJkskEhKGwPBiTdOkP6tpLiajU5nIZDIwDENCo4vFImZmZqToAsMWqXQx5FcNyWAJbzLkdDotpejZ84wC+Pr161EulzEyMiJx+ul0WsaDoR3MmWEopEqINU3DwYMHsXXrVnR0dGB2dtYSCtVsM6qb3J6DWO+8Bx54AOeccw5uvvlmlMtl7Nq1S9aOeg6NC3zXVColSptpLibs23Nw1OetVqvSB4yef+baqnk6quVUzQmt1WqS+2sYi4XCZmZmYBgG1q1bJ0R8fn5eBLOFhQVZq9VqVYwGJLrZbFaYAgWbSCRiEUBIuPl8ZOZUVMlgSIjVcussKEChx+/3iyCxatUqCZcvlUqWXJAdO3ZYxrAVqN6VVsP4VHR1dSGZTMreVQ1UmqbJvFUqFYTDYRln1ROgMhmXy4VoNIr5+XmZbwcOHCwPKlyvVOhS0wfUCJRWrqkK8PZj1f8DgQBmZ2fFyNzV1YWLL75YqqfTwKcqcbVaTSr3s4ASlUJd16WwIfkMjZ+12mL7rWAwiImJCei6LmlOqmLJgoS8J/m6ms+n5qPyeficavQWFVHSdtNcrNTOKqasfKp6flX+oKYbnXrqqZaxWwldJ9SomPXr12Pfvn0ArN72888/H7/85S9x1llnoVgsSiSLagS3rwt6XinruFwurFmzRmQW+zOsFK16N/lc/H+54+0pOY2gejqbjTvXimpQbsSzuL4oF3P8eS/K3/RShsNhzMzMAFiKxuJxapSU2mqJRmEWsKKBhDIzDeOsUs2K+1TcKQfkcjlpZceK0JOTk6hWq6Isb9iwQZw/wNJaMwwD5557Lg4ePCjyFZ9369atGB0dxerVqyXysB5Mc7GY6OzsrFS35juqBhQ7rRodHUU+n8cZZ5zREh18JWvzeMCRcrDkJWlra0Nvby/6+/tx++234/3vfz8+8pGP4NChQxYLH7AYWvCBD3wA1157Ld7znvdgamoKt912G3bv3o1nn30Wuq6jr68PPp9P4toBYGhoSBQHFiBiribzLyj4qz3HaDEZGRnB3r17MT4+jlwuJ5XDmCfCWH4y1WAwiLa2NrFgVioV5HI5zM3NIZlMipLJ/MOOjg60t7cDWAplyufzmJmZEW8Yw3zD4TA6OjpkI/r9fvj9fvEEa5qG7u5u2axtbW0Ih8Oy+MfGxjA2NiYKDpXaSCSCDRs2YP369ZYqwBw3VpnldZmTp1q87Axs9+7dqNVqePvb326xhreyWe3ez0bKKQBMTEzgU5/6FIaGhjA4OIibbroJN954I9atW2epZrdt2za0tbVJSNPIyIiUL+c9ySxUTyzvT8+g6g0lI2Dug9/vh8/nkyJJ9EzOzMxII/REIoHDhw8jkUhg8+bN6OnpsYSQc28sLCzg4MGDMAxD1kJfX5+E4qjh8bTic5yoaNuNIPTM2i3RvL89f0U11vCeFJYYTsZiIlTE3W635CsxFGs5jyiLg9k9syvFli1b5G917mggUpVUlsdX76euOf4diUQahgA5cODgaFx22WUSMbOcMlNPiFYFPfWzZrxDDf+0e2xVWmAYR/ey7uvra9rLGlgUzJv1svZ4PEJnmMOp60u9rGnMbNTLmu3HmvWyzmazUiSpXi9ryi00Ktt7WfN3s17WpVJJFAVC7WWtjudysEc0qbx227Ztlnni3+eddx6eeuop8WA36mWtGi3q9bKORCLHpW6AXc4Blmo8tGJ4V9+B79EMrfJCNYy+kedUfZZ0Oi3GE/VelAnt70IDNNcYq/yT5weDQRiGYSmMxHoU8/Pzkl7j9/uhaUt1XoAlT62maZJ/yjaK9GLynFqthomJCbhcLqn4S/lheHhYPJtqhMH69estSitpE+tNlEolUT7VMVJl1ng8LntUNeJzH9ejVYlEAo8//vj/U+lBJ7wHlRYSVWjVtMXkZwrn1157LWq1GrZt24YvfvGLFq+qaZoYGBjArl274PP58N3vfhf/+Z//iQceeAClUgmXX345Vq9eLQUNpqamMDU1JeG7VCi9Xq94pihI05qjaRr27dsnDCObzULTNKlQpjIQFj2icsKwTy7gmZkZCVehx9MwDHR3dwvz4Hn0lrIKKy1SXV1dohCRAM/NzSEWi2FychJer1eqvTKEmvmrtN4CS6GYRH9/v8WCNj09jbm5ORkDWrtIeNrb24Vxq9ZWFVRIXC4XrrzySnznO99BT08Pjhw50tL6qEfkmxH+AwcO4Atf+AK++93v4rLLLoPH40E6ncbll18u1uGvf/3r6OjowPj4OH7+85/j0ksvxamnnirrQO1fSyVNtSrSIxsIBCxV6Jg8ryo6tKrTkk7FcHZ2VnI0N27ciEwmYwmj4hpkv1Q1hHpubk4qCLNtEp+bHnkSS1r4Oa/FYhFtbW3CUFigQC0KpgoJHAvOO9c5AKlw29HRYem1R4brcrlw4YUXWsLFm1lyCY5Bq4WR7FDf4fTTT8fevXuPCjGmMYLh/bTYUgBUr2Ufk/b2djHqOHDgoDne8IY34Mwzz4TX68WNN96IZDIptFCNpFFprpoXB6zcS1evPoEKwzAwOzsLwzAsvazVFAAWD6LRj7QcWBR41Ugi0kqVF6qV/5nKQ4Hd5/OJAkgFk9di7hq/p/Ga4cCkZYxQoaJK/s7nmZ+fl2dW7zs2NiZVTalc+P1+Sy9ryld+v1+8ZaZpihIAtF5LgFAjkuypQMCiF2v//v3yP3nF+eefj8cffxxnnXWWhU6rNJm0nVFA0WgUc3NzCAQCooiwsCFlqlfq0W8EriF1PTeCagBXIwHqjQs/p1LfSkQRj1vuOWigZug52wTyXD4fPdLco6qMY2+TxHMLhQKy2ayk20QiEcvaZ0gtldfZ2VnZD1QamYNMOZRrn0VNKQMlk0lEIhHJg6WBSVUszz33XDln69atGBwctMh6kUjEsjeCwaCl77AqG2vaYpQjZXI6iei55vGkYfydz+fx6KOP4s1vfvNrxkvaDCe8gnrTTTfBMAzcddddGB0dtYTXAYsbk4rTkSNH8Bd/8ReoVqv49Kc/jfPOO0/KRNPy+N73vheatphQ/aMf/Qhf//rXkc/n8a1vfQsLCwuIxWL41a9+BdM08fDDDyMYDOKUU07BzMyMWFnIkDKZDA4ePAhg0WLJ8D4yUBJcKhqGYYhnkZXBqFh2dHRgZmYGuVxOiGx3d7cQfIZT0qvKxc2m2u3t7YjFYpYNwbDhYrGIZDKJWCyG6elprF+/Hl6vF+l0GqZpSl9KbghVkerr65NGyyyOMD4+DgAWz5Ku63IeLUyJRAJtbW2Ym5uTMF81f5hQrXmlUgkXX3wx7rnnnhVb0ltBuVzG3Xffjeuuu05ycUkU9+zZg+npaWH2mqZhy5Yt4i1nSJadMJNgApDEfOYsqwybyn8kEpHnp2We88A84WAwiFWrVklfW44PFcvR0VF5jkQigVqtJqXO2euWShLbJum6LsaLVCoFwzCkPZDaw45MnARXNQ6ppeZpcOF7UBFnmI1pLoaoq32DWfGSzIw9R4HWQ/047ssJmfVAhqTeY/Pmzdi3b5/Fek/hh0q2z+cTwYzGBGBJYLBb6pn768CBg+b41re+Jbzu6quvhsvlwt133y3Vtgm7AqP+Blbey7pePQTTNKUYHY2sDHs1jMVe1hs3bhQ6R9rMED5ej9ViSR+pnNL7CCzSItaeIK8mX5+ZmbEYwing0ghJmk1aRaWXigLptsqvaCRUlX3ya46FGkpMXkPPppoLSxmD8grHh72s7V7tVg0I6jH15qe3txcTExNHncMeqa973etEoaaS2siI3dHRgUQiIXUhaIikonMsFATVeKt6KltRCsnn1HdodF4rXtl6oIGkHlRFijxtYGAAw8PDFl7NCESuJ/4wnYuGaSqdsVhM0pcAyJ7IZDIii6o1HOLxOACIEmqapvQedblclv7xzC9lROLY2JjUu6CzCliMdORzc+x6e3stivSGDRuwf/9+i0e/t7cXL730khQ9CofDUtm3nvOFcgPlJsrjw8PDOPnkk2XuOGakIw888ADe/OY3v6L0pd8lTngF9dlnn0V7ezs+9rGPoVqtYnZ2FnfeeacsLFV4DoVCCIVC0HUdX//61/GVr3wFpmnimmuuwSmnnCIMzjRNdHd346Mf/SjcbjeefvppSbCOxWJ47rnn4PF4MDAwIC071By9sbExKX9eLBaRzWYtFfqAJWUCWNzotA4Bi8yLXi+GZ/p8PgndpfeNG6NSqUg7GjJBEofe3l5Z/GooULlcRi6XE4bETRCPxy1FoTRtsYcTwy5531WrVlkUzvHxcUuvNL/fL8/Jd+Kzz83NiWeYua+0ojGHFYBl7njuRz/6UXznO9/B+vXrMTQ01NBa+EqUEwDo6OiQqtA33nij5EbQW8xGzwxdZkgtQ1SZR8HjGeqqKunAYtn1UCgkCiAZfDweFy87GRaLJWnaYnl+ChWssEgmkcvlcOTIERFaOKZsN8OQrM7OTnnejo4OJJNJy9rXdR2dnZ2oVquSs6NWsWb+BY0KqlKq5oVw/fLdyBjUIhn0HKgWWBpr+vv7ZX5bnUs1TPxY5HnSs3366adj3759RzGaQCCAJ554Aj09PVi9erWE7NCo0Og5Xql314GDEw3vfe978bnPfQ4vvfQS1q5diyeeeAIf/vCHUalU8Oijj+K+++6zeJReicCmKkp2UAgkD5ydncXmzZsRDAbFIAUs9bJm9f1AICBhhIxuUsNS6S3lM2uaZqlOWigUkMvl0N7eLkbOUqmEaDSKRCJhUQz53GohQpWv2A1vqpeOwjWFdFV+oPLNa6pFeNTxJq8h76UHigZYRpyoArU9jLHenPDzeu1T6p0Ti8WQTqclJJrvouu69LLesmWLGD9Vz7Mqj3GeYrEY5ubmpN4Iq7CqXq9XC3UcVCNLI+WZx6he01bu0UiJVcfZfs1GXlRVVkgkEmL8SKVSSCaTkubGdcyoQbVIFRXPSqUiUQgM752bmxPjLz20yWRS9ksikZA6K2wTROcL34Gh5ZSd1DmjPJ9Op2W/ApA0NBqMOCbsmGHH1q1bsXfvXovh+qSTTrI4guw1KuxjyLaQ3B8ejwfr1q3D4OCgpJap5wCLstUDDzyAP/7jP1527n+fWFZB1TTtHgB/AmDaNM1tv/0sBuBfAKwFMAzg/zNNM6ktzuz/BXAJgAUAf2Wa5rPH59GPDQ4dOoR7770Xn//85zExMYHXv/71uO666+Dz+TAzM4OvfvWrIsgDS4phX1+fXOPLX/4yyuUyNm3ahJtuukm8XFyop59+OqrVxSa74XBYFE41XLNQKGBwcFCsdwzfYZElMgmViDPnRA3VSCaTkutITyM9vCyBTlc/hXmGJNDKEo1GEQqFhGnQisU4/fn5eSEuDIEgY6KHl6HECwsL6O/vx+zsLDo6OrBq1SoUCgUUi0VLYZ5arSbKqN/vl3BnMpbR0VHp42YYhii6DBmlgk1rl0qoVZDgXXjhhXj55ZfrrolXE6M/Pz+PRCKBd7zjHRgZGcGqVatknsPhsJQxZz4OCS092Aw74brg/HDtAYueRRZIYvgyqyVTwWIoLgsXbdq0SazRal6UaS6WSGdfMB4TCAQQCARQKBQkN8Q0F3vxxmIxFAoFMXr8/+y9eXhb9ZU+/l5Jlm1Jthbvjh3bcRaSELKRECDQQimFAqWUQhmmDMzAQJm2FOgDLWtIAmEJkJZMn1KGAlMotMPWlpYpS1l+pYQtJISQzfHueN9t2ZZl6f7+EO/JuYokywkD/T708zx+4sjS1b2f5Szvec85jOyzkBaNGuZV6GtwrxMV5/PoAgcALCXgNQ2G7yVFXhcMIiJMQ+DII48Uo0RTXpINGhYH65im+pzNZsPMmTOxd+9ey+umaWLFihXYuXMnRkZGZF8nUpi8x3SiwP8Y/xj/GLGxevVq/PjHP8ajjz6Khx56COeeey5eeukl1NXV4dJLL8XKlSvR3NyMn/70pxY5lEoPUKakcoyS9bJmT+Zkvaxnz54NACl7WfMzyXpZk8acrJc1dVFvb2/SXta8rrYv9L3wOyl3tWOm7SXNRomXr3Q6dDSPdoiOCn6Wvax5v6l6Wevn5XNRVtPpJXD6SfWy1hHgRJFK7fQkciLp0KTzPamGnudETnGiM0KGACm1Ho8HpmlKwUPmbjqdToyMjKCrq0ueh4U3CYjoqD3byzDSz9fHxsaERQjEGFm6gwTzpkkxpm0bDAbR2dlpAcIJfJOGzpQ/ds1gLjbn2m634wtf+ELSeTjssMNQW1trSa+iLcvzRbpvojmlbUm7nPZjeXk5amtrUVVVJedayyy73Y4XXngBixcvRmFhYco1/qxGOhHURwD8J4Bfqdd+DOAvpmneYRjGjz/+/48AnApg1sc/RwH4+cf//t2Ot99+G/feey9+8YtfYGxsDEcccQQGBwfx/vvv49hjjxVK0NatW/HYY49JLqXeIGVlZUKnOfvssxGNRnHllVfixBNPPAAtY64ekU7DMPDee+9JvkZfX5/Qc5mnpx1Tmy3Wb4vCzmaLlbcOBoMWZ4aH0+/3SwSOh7G3t1eq7bFvqWmaKC0tFeeXrUkocCYmJqRVTEZGBgKBgBj/pOJ0dHSgpKRE8iI13YgV2ohsEYEi/cVm21+FlonujY2N6OnpkegYhZjODdJJ4nTi6aCSBgpYEe4rrrgCDzzwAJYuXYrNmzd/ovvp2muvxa233oqnnnoK3/72t1FRUSF0KRoYFCYEMqgomP/LCCcVOmCtnkcHTQMPNEYikYhQZFwuF6qrqy00Xk2VaWpqEiVJFNPv90vVZ513QuVAECEYDCIQCMg6ut1uqTzH63MtyTwgCkhByzXSRgrvkc9H5cIoNNsu6UgC57a3t1cADEb5qYAnc+h0xGCqEZR4elWiYZoxGi9zk/S+NE0Ts2fPxvvvv48lS5aIktHOuX4vz8s/xj/GP8bkIzc3Fz/5yU/gdDpx8skn47e//S1mzpyJbdu2YePGjZg2bRq++MUv4q677oLL5cJ9992Xspd1Ilqcli/U9cl6WWuQKVEvawJvLHxE2aipstrx0A4H6cM05Nl7PL6XdXFxsaTTJOplTSaTbtmhnRndwiOZE6SBa+3o6XvWTivfR/uK8o+2z8H2so6/n3RGfC9r6s1kvay1fcG50KkiPp9PqJr83KEA4fHPlCwlJX7v6jnRbKFUI57Jk+ozyUBgzg9tM77GyCNtteLiYhQXF6OhoQFjY2PSyYK9aknV5TzqFja0IwjOjI6OIicnR5hYuoUin4EtZcgC0+9jZwlen0UmGfzp7OyEYRjiKDMFqqenx3Jf8aCStjHo6BYVFQl7jX8nY5Lry5oj/Fz8maONxWtGIhFUVFSgrq4OlZWVMtca0LfZbNi2bRtmzpyJysrKSffCpz0mdVBN0/z/DMOojHv5TABf/Pj3/wbwGmIO6pkAfmXGZuwtwzB8hmGUmKbZhr/TkZmZiY0bN2LFihV45513sHHjRhx55JF47733MGfOHLzxxhs49dRTMW/ePGzYsAHPPfccXn311YSc/+zsbMyaNQsdHR346U9/ip/85Ce4/PLLceKJJwLYn19HB7O2thYNDQ1iXFOgsQovhZxOqPf7/YIuhsNhqWbGqq6Mcrrdbvj9fhGC3LB0cOjUhMNheL1eFBUVidAl5RiAVBYEYhu6oKBAFDTvobOzUw5tSUkJ6urqUF1dLQeCeYuNjY0AYgeEitdms0klYKKLra2taGpqkmJL8SifdlI5Lz6fT1q16BLeVIA60saKa0uXLsXWrVsTOhcHy82//fbbpXKvplHTwdaFL3TbF1JgWURB5/gCsCg8XalteHhYKCb19fVSgKG4uFhyW/W8GYaBzs5OidCzF57dbpfoPtdaO0NsQM0IL/cPKcZZWVno7++XvUrnlc4mUUBGvfV50PR2CudIJGLpsaqFu66Czb3N6wExMGTatGlTcuJSRURSjalGNMvLy9HT03OAUWez2bBo0SK88847WLlypbSaIjqsFRGV7Kc5vvnNb8p368Hn1q9rUCDZfMav6WRDK+50Dc10rqsBokO9nqbk67OmWyGMjo7C6/UKODM6OootW7YI86ShoQFOpxMrVqzAxMQEPvzwQ+Tk5GD69Olob29HMBiUCo89PT2IRCKYNWsWTNNER0eHIOEtLS3IzMyUAmbDw8MSyaGBx5oFLpdLql+HQiGpes3R398v1WFZKVufVQ7mFwIx2Ts0NCRpJYnGF7/4xYRzrM+hXuuDlclf+tKX8MQTT8AwDGzfvh1utxt79+7F5Zdfjl/84hd46aWXUFlZib179+Kcc87Bd77zHTQ0NOA///M/DyigCBwYveCPrkURDoell7WeF/1MhhGrXkq5yEhOUVERgP39PrVDp7+T88x+i9Qn1Mms4TBt2jSp9g/Eelk7nU50dHRIL2uC0brAkk4PyszMRDAYFJnMdBSXy4WBgQFLoTw6BHw/zwPtDz0XGjjXc6t1NVlPiT6fasTrzqnoA6bLMHqmHWzSdMkw0hRM/X1aDpLxNjg4KFHvg+1lzTmId9ZTnQ+CDfG9PdP5Ht7/ZJ+hTo5/D//PtBWeDVJoCdCMj4/D5XJh165dEjRhAIMUdZ4JDWhz6B7wrOfCgp9utxtDQ0Oyl7SDytc0U5HOMJ+H6UW8rtPplBQ32rEMltC+o84icyJ+rrh2pmkiEAigtbXVsv81UEPGGe9F60MOm80Gn88nlblZ6Kmqqgp1dXWYNWuWJbijAaQ9e/agvLw8bdDi0xoHC8MXKaezHUDRx79PA9Cs3tfy8Wt/t+Piiy+G3W7H66+/jquvvhpOpxNvvfUWrr32Wjz44IN444030N/fj6amJjz33HM4/fTTsW7dOqxbtw5f+cpXZLNopCU/Px+zZs3CnDlz8Oijj+KCCy7A+Pg4ent74fV6YbPZ8Mgjj+CDDz5AW1sbamtrMTIyIgVtGDnVSjoQCCAQCMBms0nvUZbDDoVCYmSxeTeNGFIUOjo6pDBPTk6OHGQg5txFo1F0d3ejvr4e4XAYQ0NDkoeYmZmJgoICcU5ZAKm7u9vilLBoDhWgbjXCyq0Uki6XC4WFhSgsLBQEatu2bXjvvfeEHqwPOZE5OmD69cHBQaFC83VdaIYCVkdYb7rpJkE2tSLhNadySLWzNDExgTVr1uDCCy9EcXGxKGzTjPXvYkEoKn86pcy1oaPOYhXcU5w3GpeGYYixyL9lZWWhurpaIuFaiZmmiYaGBuzZsweDg4MYHR0VWi/BAuYx8Hm4VyjEiXgGg0FkZWWhqalJAAYAFgec1eeYO0Sjyu12CyBBlgDpTsFgEKFQSACReESdc0IDkNFdnZ/L/OXZs2en5TRSiHMdpzq0KcX+3AAAIABJREFUgZrOiEajWLhwoewzGnO81vz58/HBBx/A5XIJ6EWlFG/k/j0M7UwkQ+/1+7QRl2jeEj2jBuvi36NHvBGfasTTwlONZFGB+EGnj/enc6hpkFI+7du3T/Yzgb6Ojg5Z64GBAfT09Egrse3bt6O9vV0QdN3ao7a2FjU1NdLiwzRj9LmxsTF0d3ejpaVFKqdPTExIqoRhGOjq6kJra6sYfAMDA+js7LQ8l8/nQ1tbm1DcdDVNPc/MHSeCn5OTY4mYxI9XX301qTGr9wbn/2ANp2eeeQazZs3C5ZdfjpaWFoRCIZx44ol4+OGHsXDhQsydOxe//vWvcfzxx+OBBx7Aiy++iEAggDvuuAOrVq0SwJDryKEdFpfLJQYrQYlEvaz1ZyORiFTi5FwGAgGZX8pFpkiwwIt2tnS1/aGhITQ2NqKjowMejwdVVVXSO5lzSDZNe3s7xsfHpfghGSq6Kr6u6s99SYeYLTp0uggBVd67LsBDGyEajfVr93g8yM3NRXZ2Nqqrq2GasQIzbHNHJhEdB93LOt2h9+jB5OwXFhZa8kR5Pcoj3SaMrQL5Pq1/NcDi9/tlvQ+WBcPPajo2X082NKCZbuRWy7F0QcREYAqBbUbqP/zwQ4tey8jIQH19PSYmJjA8PIxgMChFFlm3grRo7jkAFmeRNVG088nvzsnJEZvEMAzZ95wXsshCoRBcLpf05CXIooMC+fn5AiSRaWiz2QRkItDHOXA4HJbqvcnmGADmz59vAaMZvLDZbMKwzM3NtVCXtYzk9Twej+S/8hq65koiPW0YBv7yl79IwOHvZRwyT8yMzfCUNYdhGJcahvGeYRjvHeo9HMr4r//6L9x+++3Izs7Ghg0bcNNNN8HhcODee+/FEUccgaysLGzcuBEdHR14+umncdddd2HHjh3YunUrTj75ZNx555246667UFlZKXRXvcDTpk1DdXU1fD4fWltb5QDl5uaKImDhl9HRUWnIbbfHKr+63W6htg4NDaG/v1+cLyJFdBJYoAjY36KFCebM8czLy4PD4UBBQYEcwtraWtTX1wvtlAZGVVUVqqqqUFJSIsKno6MD+/btsyClDocDpaWlghTl5ORI/1fdf87r9VqKP4yPj8tc7t69WyKoGh2ic6ONAx5iOqtA7CAWFhbKgY7vw6lHNBqVBuXnnHOOfD7+sE82eB/aeJo/f744Snl5eWKgtba2yusU+KxQ7HK5MDo6KsqO1FQ+P5+Thg8pKq2trejr65MiUdOmTRPQgc9tmiZ27dqFmpoajIyMYHh4GENDQ9IblUKe90RHlxUiSSvjeg4MDKClpUWKAGRmZorQZP4HkU4qmvg14B6hYUSHXLc90BF8ClRGE7j3GV0Mh8NCjzFNE+effz6Ki4vTlgFTBSMS/T6VYZompk3bj9tx7zGK7fF4sG/fPjGKE/WB+6xGIsdSAzzJogJagSa7jn6fHtzHkxlJNB7jHYlE19b5yqlGuuCDrnjKz9Cwine0+vv7UVZWZqG6jY2NYWhoSGRqTU0NGhsbhYFAZzQajaKnp0eKc7AgGnVJX1+fOL8jIyNy5lnBm3lfPT090oNyYGAAbW1tgtS3t7cLw4KjvLwcu3fvFnml5a4+P8xnZK54ooiNdthee+01kVl8D9d6quBPsnHSSSehsbER999/P9asWYOxsTG88soruO6660TvLF68GPfccw+am5tT9rLWjkGiXtYOh0Oi2Noxit+XjFIm6mXd3d0tsjhRL2sAk/ayZhVSnh0Couxl3dbWhoyMjKS9rAkYah1Kp1qnX1A/cW/y2eKjXFx73ct6YGBAHIZkvax5Zg6ll/Vk70015s6dK7mFeu3ie1mHQqFJe1nz84fSy1qfh6k8E22qyeZC368+pxqITPU5As4E0MfHx9HW1oZgMIiPPvoIbrcbK1euBACL/ehyuUTXFRYWYvny5fKM8YEHwzCENaXb7GkZqatFsy+wZgISdKHcZRcJl8sl9ng0GhU7h+eXkWgWu8zOzgYASycKggH8bDprzbmdM2dOwrllcVCmiMXLVB1NZ0BIpw7YbDZUVFRY6q5ocJzr/Ne//vUA2f9ZjoN1UDsMwygBgI//JeS6D0C5el/Zx68dMEzTfMA0zSNN0zzyIO/hExk2mw033HAD/u3f/g3j4+NYvXo1vvGNb2B8fBw7d+7E+Pg4Fi5ciD/96U/S++jJJ59EbW0ttm7divr6emzatAk/+MEPcNddd+Gaa645oI8TAEF/iAhRGZDuQSFPqoPH45EoUV9fn0RLNSLtcrlQXFyMoqIi+Vt/f784sTzcHo8H+fn5qKioQHFxsZSf5sFmDqvNZoPX60VVVRVmzJghUdb29nZ0dHRI5T8e3OLiYlRWVqKwsFAUGw9oV1cXnE4ncnNzMXPmTNx2222CHnZ0dGD37t2oq6sTxUjjllXLcnJyJBqno6VU5lrZE/ml8ubcxvfH0v+GQiHcfPPNCIfDyM/PPygjSBvN/Leurg7hcBilpaUizOx2O5qbm8VQZA9R0noZkaTwZdSRNA0KXBpDLS0taGxshNfrlWJdel6IlO/YsQM1NTWYmJiQyCv7hFKYc58QedMVkLkuo6OjEik3TRMDAwOw2WKVemmM0uGemJiQir90tlnhsaCgAD6fDy6XC36/X6LKpK1z3bi24XBYUE+uowZl9NqSGp2VlYWKigqcfPLJSVFzCuyDoQwmixamM7TB7ff7hS4XryTKy8ulJRQBnk+q4uMnOXTkmXOdaj65P+NlY7LBQhcaMU51XRrQySIElIe893gFfahDo9/xg/KRbASmcdB40fIrGo3K+WQeP6NX4+Pj0rOY8plnjcU52FqELcJ4Dru7uyVPnEwFzld/fz+6u7uF7kZnVI+5c+diy5YtiEQiUvmSTpV+ft04XldfZ2RWR2QMw8Df/vY3bN++XebpUKKlicaLL76I6667DuPj47j55ptx1VVXYWxsDHfddRdWr16N4eFhvPfee7j88ssxMDAgvawfe+wxAWvHxsZw8cUXY8OGDVizZg0KCwuFCfLSSy/BbrdjwYIFlt6HdI40eMFnpr53uVyWSrzUmRx09oLBIPr6+tDX14eBgQFEIhG0t7dL38SZM2eiqKhIellzbYCYI9jU1CT910kFzM7OFp3q8Xjg9/uRl5dncQqo05nSQV1EEJJUX+bn0WahbqRxTT1Dmct9H4lEMDg4aOllrdNx6FzQ6NfXTiVDdO7eweh2/ZnS0lJLazT+ZGVlSTEbOqcejyfhteIjncz1PZih9WE69x8PnKWiFvO61K/p1HDQTrMGOIaHh9HX14fa2lo4nU584QtfEJYFGUJaLmZlZQlo09LSAofDYamFQjCOMkavMUHrUCgkYB3XTLdCos3F79Z5p5RlOq+YcpTyToPnuoaGti95XxkZGZLex3mazG6w2WwoLS21gBCGYaC4uFjkNWnlemhWC78rJydH5o7054qKCtTU1FiCK/Fr+/bbb6O1tTXlfX5a42Ad1D8AuPDj3y8E8Hv1+r8YsbECwID5d5x/CgDnnXcexsbG8NBDD+Gwww5DVlYW/vjHPyIvLw9XXXUVMjIy8Oabb+L73/8+RkdH0dTUhDPOOANbtmzBI488gmnTpuEXv/gFHn/8cbz66qsYHh7G6tWrsX79elx//fUSXZqYmEBtba04HgzbMwfE4XAgNzdXhAKLKzGiqg+M2+2WSmOkdQ0MDGB4eFjoI5mZmcjIyEB+fr4UKBobG8OePXuwZ88ejI2NWRSUaZribEYiEezbtw91dXWCxvMAOBwOVFRUoKqqSvJDW1paRAF2d3cDAPbt24fMzEw4HA5ceumlErUtLCy0GIo0WN1uN3JyclBRUYGvfvWrOO2003DssceKANMCQKPs/GFeY1FRkSglbbjpyr40EDo7O+F0OiWKmu7gPevB+7viiisA7C+GReOkubkZfX190jqHgnh0dBQej0eKVFC4s4iVNjxJ1QsEAigrK5MIOItImWasCt6OHTvQ3Nwsjinz20gf578EBzSlyjAMKVuu9xOFc1ZWlhgxFHDMYQUgQpG5RYFAAAUFBXC5XJbS7L29vZbIrd/vl8rWPCOM/NNRBWBBSWngs4csxxVXXIHx8XGphqnX7VCGdp41vSbViI8C8RoAhIacyCCfP38+tm/fLkYmUWZ93U9zxN8f10kXZptsTAX5p4E0mWHE9/K6en4TDa4bi10ke692oFIN/p17Nv57aKzwWWg48LywhQFlIs+qNiL1a1rmEdQh7VZ/F2UUzzgAiQpwD7MIBx0NRtwoH95++22L82maJpYsWYLNmzcjGo0iNzfXMj8avNHgC42j3NxcoTHHz19PTw/efvttuc4n4aDq/bF+/XqsXLkShmFg48aNOP744zExMYHVq1fjwgsvRDgcxs9//nOcdNJJ0sv6pptuEmopwa/BwUEpLJiolzXnVDuplOManKHzrnPN6QDu2rVLqurTkdW9rEnjzcrKwowZM5Cfn39AL2sAaG9vR0NDg8hbAsw61UX3sqZ+LCwsFKoxI0+svE6DVwMP3ENcM9YW4Gf4enwva+qktrY2DA8Po62tDYZh7WXtcDhwwgknWNY0nb3BNTgYAEob79w/8+fPt0Qf9d4Kh8Ni7+jq9BoA02A2ZcPB9LLWeyrV4H0SjI0HdPVIJQO1nksmC7nnCB4zsu9yuaQAmdvtFlqvtjn19anLw+Ewdu/eLXKpt7dXnEEGHQieaxmr5SxBce41pjiQxmsYMYo2ARteU581PWfRaKyGBgEbXtdut8Pr9Qpww7Qpftbv98s8xe+HZIP2mJ5z0zQxffp07N69W/Yc5W+ideFrPp9PHFvqkPLyctTU1BxwD3oeP/roIzQ0NCS9x09rTGqxGYbxBIBNAOYYhtFiGMbFAO4A8GXDMGoAnPTx/wHgeQB1APYC+C8A//F/ctef4HjqqadQUlKC/Px81NfXo7+/HytXrsTw8DB++tOfYvXq1cjMzMSGDRtwww03IDMzE0899RRuvPFGGIaBdevWYcOGDXjrrbfw/PPPIxqNYvv27Xj11VcRiURw9dVXS9Pd7u5u7Ny5U2g3bA1iGIYgSkTiQqEQBgYG5DBo5IQKj5RgGhQ0PNimpaCgQKKq3d3daG1txdDQkMUpDgQCYgx1dHSgqakJDQ0N0haEDlZJSQmqq6ul2XB/fz/q6urQ2NgozqXf70dBQYEYI0Q+GSV++umnhRLjcDjg9/vh9/tRVVWFk046Caeddpol16SsrMziGOuiQhS6/N0wYgUnGCFkhITChsYjDyEd9h/96EcYGxuTSsyTDa1kEkXo7rvvPqxZswZNTU0WhJC5YpmZmUJjYn4PabF0GomsA7F+px0dHQgGgygrK0NRUZEk62t6x86dO7Fr1y4Eg0HU19ejpaVFkupZdZDOKaPSLHzCNaKzqWm5pInk5+cjLy/P0ofUNE1pY5OXlyeGKdvPcK8CMSoO2xpwfWlcBwIBoYV7vV6h+xD91LlXnHv9/76+PstaOJ1ONDU1YenSpRblq5HPqTp4+vrJlEKioQEVHbHluVq0aFHC7zJNE0cddRQ2bdokRh1R+3SNtE9yxCO6vE9g8uqO2uDT70n2fh1xStcY1feY6hnSNVinYtjSmNZ7LVnUgtHF/Px8AJDK6KWlpSgtLUVJSQlKSkqEGVNYWIiCggLk5+cjEAggLy9P5GZOTg5yc3OF+eB0OqUCfDw6znvk7wQY+H/KUV6DFMlNmzYdEOFfunSpOFA0Avk3/S9BL1Kd3W635LPyDOozHQwG8dprr03pfOnvi/+d9/XjH/8YIyMjeOutt+DxeDBjxgy8++67yM3NRUVFBX79619j8eLF8rx5eXkoLi7G2rVrxfCmzOzs7JR5S9TLOhQKidzlnGsmjF4L9rJmBJVrUlhYCLvdjsHBQfT19WF8fBwDAwNobGxEV1cX8vPzUVJSAq/XKy3waJiPjIygvr4e9fX1GB4eRm9vL7q7u+HxeKSAII315uZm2Gyx9CBN0+X95ufnSxs5m81maXtBx5OymEYwAAHVWaFYy13aFHp9eA2uO3MCKQfj5cxkg3t3Kp+ZbJimiUWLFiVkc7CqfH19veyT+FSvRPdxMHmxQGo5Fx+l1051vHzideLPt5bnk8lrvff27duH0dFRbNu2TWxQRjR1r1/KF71fyFLIyMhAdnY2ioqKpNUgiwoy4MBoKm0AnQLB/cbgAAB5jfaE7pAB7C/oxpaN/A46yxx0MElT10UMCfxp/VhYWJh0bieTb1VVVZbq0Fy/hQsXYvPmzTIfrO2ia43wWSn/2eqPrzmdTlRUVKC+vt5yv3w//19TU5PyHj+NMenpNU3zn0zTLDFNM8M0zTLTNH9pmmaPaZpfMk1zlmmaJ5mm2fvxe03TNL9rmma1aZoLTNP8TPNL0xk+nw99fX1obm7G6aefDrfbjZdeegnXXnstxsbGhPI7MTGBdevW4eyzz4ZhGFizZg3y8/Mxe/ZsdHd34yc/+QlWr16Nffv2we12o7i4WCJJAIRG8/DDD0seEAAR9kTUuInJl+dB408oFEIwGLQISTqsdBRo4NXU1EjT6WAwKPTZgoICLFiwANOnTwcAQUu7urosJbzz8/OxePFiLF68GAUFBXA4HGhqapJCE16vF7m5uSgrKxPniQfQ4/FIDuN7770H0zTR2dmJhQsXoqSkBEVFRfjyl7+MU045BcuXL7egaxykRWuHlIagRtC0MCZ9iYctPhdVG0R2u136xp5++umTGkaaRpHMcA6Hw7j22mulBDmjm16vF11dXYhGY4n3hYWFlj53OscnIyMDo6Oj0l+0pKREGj3rn/HxcWzbtg27du2yUMsYGe/p6bEYEzabDTk5OWLkAvur2PX39wsYoqOmfC+jLw6HA8FgEE1NTSIkWeVRI7vMfeju7hYHm6+bZiwHOz8/39Jgm5GW7Oxs6d+raVSk7hCp5b8Uzrq4yPXXX49IJIJly5ZZDBb9ezpDP89Uh3ZOdMEQwFrA44gjjjjAYKBBe8wxx+Cvf/2rRTbwfH+aI1HUK/4cpLqneOWd7P00EPh7OveUyNBK9n4NbKUzUjnIOgqQaExMTCAYDFoASY3w2+12KUqhr6Hpmbw+79nhcIgTybxEAoS5ubnw+/3wer1yblmbgEya/Px8+eF76Pjq1wgweTwe7NixQxxK3ouuOB7fi1vPm8fjgc1mEzmXlZWFQCCA9vZ2mQvuDe75V155Ja21AQ7cI3pt+e/69etx2mmn4ZRTTsHw8DD27t2Lf/mXf8HIyAhqa2tx/vnnY/fu3TBNEzfffDMGBwfR3NyMr371q5bK8zTCWeU+VS9r6intkOl5CYVCyMnJsbSgIGBNGWuaJnp6etDY2IhgMIhZs2ahpKREvksX4Ovr68PevXslz5gFtsjWIeAciUTQ1tYGu90On8+HrKwsKX6YkZEhzCzqGILYZEvR+CeFk6AnI1HxtQb4zDT66STQIeEeopwlM4YOLsF9DW6lcjp5tg52JKI86r/NnDnTYnfwvSUlJZKLrO01PpOOnumo3yc54u8pHqTSIx3ZnQ5QZBiG2I2BQABerxdnnnkmCgoK4PV6kZ2dLUw6On26yrVp7qefavlC0C0ajQqTKt5Zjqfh8pm07ARwQEFFTRenzAkGgwIu6GcbHR2ViKvO52SKBp+LspzfPzExgeOOOy7l/E42lixZcoCdCwBHH3003nnnHUSjsVQQRlJ18IXPz98JZgH7ZST7pMb34/20we9U43PfTO+4446TnM9XXnkFRUVFyM7Oxk033YS7774bkUgEzz33HG644Qa4XC709fXh7rvvxr333otrrrkGVVVVeOaZZ3DllVdicHAQv//97/Gzn/0M06ZNE+VMStfWrVsxPj4u+RykDdHZ4OHgxufmYsVT/vAQ0DE1TVNyH3TElI4HC9+wsh9zIHt6eiwVWrXSW7RoESoqKmSz7tmzB7W1tULHJX24pKREDunExAR27dplUUQ2mw3vvvsubDYbBgYGcMkll2Dx4sU45ZRTLOh7MqVD2rU+pHxmHZnhD3M1GUnQiJI2BDV146mnnhLUOJUgoQOcihq4fv16eL1ei2DhGpKqwmg5BS8NTiAmHNguyOfzSRRaCx3mtO7atQs2m00Ai3jqCykyzOlhxWQ6+g6HQwoE6Hk1zVgRn/z8fEsEZHR0FD09PWJc0GAhJV07qMPDw4LUa2fM7XZj+vTplijD2NgYBgYGLAgpzw33taZNAVbjh3kg/BupaQ6HA4cffriFSqyF72SGjEYWDwbt1hGCycaMGTPkDOn7NM1Yb9q2tjYLLf5QjLBDGdrYTnVW4iNZ6cy1Pn/aSUs1dCGWyaIliaJrqUYqY5WOtDZS4kc0GoXX6xWqLYEKnk0dWWLecX9/P/Ly8hAIBOR16iS/3y/GXDQaK/Shf7SxpGWjBtXijVL9Xg32sV1CdnY2srOz0draapFDdFIJjDLdIB6A4FnUABZ/1xECvY4AsHXr1rTWSDNJeC1dQZbjlVdeQU1NjaTcPPvss6isrITdbsfzzz+P0tJSRCIRXHfddfjWt74Fp9OJ1157TfJ5aZiyIBHB4kgkIpFi/Z00mjXgwvkdHR0V45uVkTkftBWi0Sjq6+sRDAbh9/tRWloqcl7LOtOMtYjo7++XaNPExITITd1HPV728czx/thGSP+dAGd2djZ8Ph8MI8Z8GRoakoiqBuI4qMe0vuX5JmhMcJH7lM6IPj+zZs2y7Kd0xsFETdO5tmmaoj/5f22/zJkzB1u3bhX6M3Nz49+rgbJPcsQ7puk6GtoxTMch5fsZ2Xe73ZKTHY1GxU4YGhqyOJEs2MZq/WSJaTlI0IZnh/KJAR3uSZ572q/c3wSueX/aaQSsQQZeQ58PXbuF9jVgrfoOwMI4Mc1YHRfS5R0Oh3RwSLQ+6Y5oNIrZs2cfsI6maWLu3LlSTI1Ovs63jv+MYRjw+XwyD1xrv9+PlpaWSYHjz2p87h3U5557Dnl5eSgtLcXQ0BB2796Ns88+G5deeinGxsawbt06XHrppXjppZdw+OGH44UXXsD111+Pl19+Gf/7v/+LF154AaOjo6ioqMDGjRuxbNkyifrRQX311VdxzjnnoL29HTabTVq4MLrGwzc2NiaREt1LiY2HSdWkgWOapnD7gf1OASviZWdnw+VyCf3W4/EgFApJW4Le3l6pIuj3+8WRYTXYvXv3YuvWrdi2bZs4KT6fD6WlpSgvL5diPtu3b8euXbvQ2NiIzMxMySUZHh6G0+mUZuGmaeLkk09GcXHxpEYlX6+urgawX8HRwKHwovPAH92GhYY80WPeA51WIHaQX3rpJTgcDlxyySXi+MePdCNvdrsdF198MUpKSuS7nE6n0K0BCB1q3rx5gs6RosLy+xS0FJxEqrdt24Zt27ZJBU6df8n3FxYWWhBDfj/nq6enRxQI0UwanCxkROeZFOSenh5ZF+7X0dFRmRcikHZ7rBIt8zu4h3Nzcy2RYNM00dXVJegrDSm73Y7y8nLMnDlTWtz4fD4xvmnIUJkwV1c7wZyvyy+/HOPj47jgggsswls7ssn2XnxUK1WkLNVeSMcIcTgcFko00XYq9jlz5qChoQE7duwQA0nno34ag+cLsCLr8f/qOdVnc7J50FEWnTc12Uh2XhONZAZYKkc4Hapvsn3BSJG+TxryoVBICiNxXxOwpMzSzh2jpZq+FQgEUFRUhPLyclRVVcHr9cp59Pl8WLhwIY466iiUlZWJ48OIxtKlS7F06VKpEqsjGRUVFZLKQSOOEY94gIgRQ6aNaENOy1tem1HG4uJidHd3S4SZ68PR29srOamTDe38a6CR93jDDTcgFAqhoaEBQ0NDOP744zE4OIi6ujqcf/75GB4eRlNTE84++2zYbDY8/vjjWLVqlYBwGji22+147733LMChYRiS+kCjNRqN9b2kzKdsIiuI8tzlckl1dNM0UV9fj5qaGtTX16OkpARlZWVwu90WxhXTfWpqalBbWysVPoeHh4U9ROdQ61mmdDidTqmgC0AKz7DIHfcz26KxNZ7NFqsVwGtovURnW+f0MTLDyJkGQ+i4kELJCC0rUgcCAVRXV6cVxdP74GCM63gZlmqYponDDz/ccuY1O+Soo47CG2+8IeeYuYS8vnYgPmmQMR7kS3dovZpI3ul7piwYGhrC1q1bpagXo+EsykZZxigqKf4ej0dkEAsPmeb+dnTc50NDQxLRpIyk/OF5jD8T1B16D9Jm5plhehRbKGqwj+8naATsB8C418l+1IEiynntjH/hC19IOIdTWUv+O3PmzAOu4fP5EAqF8MEHH0gnkPhCihoc4v4kmAbE6NY+nw8VFRXYu3fv31XklONz76BeeOGFuPHGG3HRRRfhuuuuw/z58/HEE09g9uzZuOWWW7Bq1So8++yzaGhowKZNmxAIBFBcXIwXXngBr7zyCq666iqEw2HU1tbiW9/6FrZu3YqJiQm0tLQICh0MBiVX1Ol0Ynh4WKhdrIgHAGVlZaiqqpLkeSKrdBCi0VjBCZaddzgc0teMeXp0biloiOCPjIygra0N7e3t4sTm5uaisLAQc+fORSQSEUW0e/dufPjhhxgcHBQjKS8vDzNnzpSIaXNzM3bu3ImGhga4XC64XC4RJJqexGIJr7/+OgBIYaDJDiwFEQ+9dtjjET9N/6WAKC4ulntnsQYqUlI19Pif//kfRKNRnHPOOXJvukqpjtqmGuFwGCUlJVIsiPdKIMHtdqO/vx+9vb1SJCISiUhVRa3YKViGh4exfft27Ny5EyMjIxJtZGSZSKEezFUlFS0SiRW+6urqkqJDLDxASh8LGWVnZ2N0dBR9fX2S9B+J7C+pT2ouUfumpiY4nU6phkyKMZUXk/7tdru0KaLDS6HpdrsxY8YMVFdXC3LJNSOymp+fD7vdLsqDuRdaqWohq+m+cWn8AAAgAElEQVRz2hhIJ9LGfRX/+mSf006ZHuk4OmykTUWrozFLly6FYRhoaWmxMA8+zZEoIhz/Gs+lBgtS3asGmaYy9NlPZ3A+E61DvFOtneNkeyBVHjMNPZ3jqfcFI/4sKOf1esW5cbvdIutyc3OFLsrrcu19Pp/sk+zsbGFnMMrD79WVsBmdp36ggcbzZrPZUF5eLvngpLKRjcA0D8C6B9577z3JH+O96/1Lo1n3Sc3MzMS0adMwODgo+iI+ujsyMoLXXnvtgDXQDqMGnQhiaqMSANatW4dvfOMbOOmkk2AYsX5/P/rRj2C32/Hoo4/illtugc1mw5NPPokLL7wQExPJe1nrft40aHUvazpq2lDmfmG6DqM6jKbyemQ6sWUbz5GmLDY0NGDv3r1oaWlJ2cua+tMwEvey7u/vRzQaTdrLmvmj2nmgXGQva+a18v4T9bLmfuCa6F7WlBX6fuN7WaczdPT2YB3UqTjC0eiBvay1rE7UyzqRLPykIlX62eOj5JMNnn/NDtP3x2tmZGRg9uzZaGhoQE9PD7Kzs7Fy5UqUl5ejoKAAOTk5kl9KoGhsbExSh0gDZ5s7AsvcVxkZGXC5XNizZw+cTicCgYD0dGbggTYu7SSCZmQkci9qGjrPFWm8jM6y0wBbbtFOpA1tmib6+/ulWjDPbDgcFnCH9qnNZhNqN+Wqzl09mMHnBmKtbNh/Va9NdXU1Zs2aJW0faRfp6Hk8q4XgNm1MMu7KysqkuFUiPf9Zjc+9g/q73/0OTz/9NO644w7ceuutuOyyy+BwOHDbbbcJQs1m5ieddBJCoRD27NmDc889FxkZGbj33nuxatUqOJ1OPProo1izZo3kyRlGrM8bCydQCDAfD4A4rmzYzfL+5JVTCZMGzI1KxaYpTkRQaMQzWsbS9Kw65vP5JG+0oKAAIyMjouAodMPhMAKBAKZPn47q6moUFhYKaqarAOvcuszMTHg8Hnz5y1+GaZpSmTUUCuHtt9+GaZpoa2vDYYcdllAh8P9aUNrtdixfvhx2u92S28Noqv4s56uvr8/SuoYCizmL2lClIfniiy/CbrdLYjuNtnhnZTJH47bbbsOf//xneL1eEcpMwNc0DDqNzNWlgUhn2mazoaurC++//z7q6uqkZDufX6OCdDTpJObk5CAajYphEgqF0NHRIc/PtSLYwrnMyspCf38/2tvbZe44b8xfYw8uGkMApN8t9wLvUT9Xd3c3mpqaRHDzPX6/H9OnTxd6UDAYRENDA+rr6zE0NCQRHkb3vV6vADjJnB86AuFwGJdffjmcTif+9V//NW0n6GAFNM9BIsMgXUdq8eLFYsxqZ880TcyaNQvNzc0YGhoSZfRpDz03iWhEfC1dipmmEk7FSU0HLNJDR6QnG7yfVCNVVDg+v1VHl8kAMYxYUTfuU9LhPR6PzInOB9TGECs6c39kZWVZdIxhGOI8kMKv14N9rVlBWNNTWaSvtbXV4vjk5ubC4XBYCp9xXlesWCHtrBwOhwCrfHa9D9xut8ghh8OBoqIiqY7P5wGs6P+LL75o+U6uvY6mcG11RJ1rGA6H8fzzz+P555/Htddei4yMDNx999247bbbAACrVq3CTTfdhGg0il//+tcpe1l7vV6pJEpAIRrd38uaQESqXtak/Zpm4l7WZEfweUzT2suarYRS9bK222PVRZP1sh4cHBRHMVEva85dql7WTqdTvoMOue5lzWdM1ctaR1QT9bJOV25ORVZrGXCwTqJpWntZa/sjUS9rgsmHOhLJSO6veEppKnmqo6JA8gJKhhGrqjw6Oora2lqJ6hPw6OvrQ3d3tziQPBuUHSzmxqAK8+XZQpB6hLqec0fbZnBw0OJ0Emgjw4x7jWdgbGxMHE2yCAhaMeWJMpFnjrZVX1+fFESLRCLo6urC6OiogD4aFKUMYjS5t7dXvuv444+3zHW8TJtsTfgZrWMZcNFrSsDM6XRicHBQUgQ02BCv82jX0d8gqMdAhb6PTwpAOZTxuXdQv/e97+HDDz/ED37wA7hcLqxevVpaygwPD6Ompgbf/OY3kZOTI7moHo8Hv/rVr3DaaachFAph9erV+NrXvgabzYabb74ZJSUlePDBB3H99dfj9ddfl+IH2ggjmsxCMIODg2hvb7f0tWOuqKZ27dmzx4LOEJ2j46PpNTyQjLxmZ2dL2w+Hw4Hu7m40NjaipaVFqDV0mAwjVll4fHwcu3fvxubNm9Hc3CyRMG58UjUWLFiAM844AyeffLLkQ0WjUUGneA8DAwM477zzDjBcgP2GR3ykg+gu6bD68Om8Lq0k7HY7AoGAGAy6PQmFHeeUP7/97W9hmib++Z//WT4Xf8gnO7QrV67E66+/ju9+97uora3F+Pg4GhsbJeKdkZGBbdu2Yc6cOQJC0OGk4GxoaMC7774rzde7u7vFCOCcUUgTbS4sLJTeoqQFUjmynYBpxnKVKyoqUFZWJvMYDAbR3d0tbQjC4bD00yssLJTG81xTIuFEw5nbqiNGPp9Pcku7u7tl/W02m/TGraqqQnZ2NsLhMHp7e1FbWyv5XZrWSOeexZjoULMaNNdVrw2FtGmauOOOOywGc7Khz086aw1YjQDSkA9VsC9btkzuQ0dPGEn94IMP0orIfpIj1TPFOwnpfIZD59XGK+hk3zUVKh/XJ/76ya4NJK/Qzb+lKrDEdSMljJ/hvzwHNG5MM9YTsbu7WyKL/LyORPDskfkwOjoq+aE00HReKwvfsVUU89pJT6NxyegXzy+fm1EG6p6SkhJkZGSgqakJO3futMxlJBLBnDlzpP0MIynJ9mhOTo58L0G1kpISNDQ0iCGoHWqHw4GXXnpJerhq4CbZGmpQcd26dfD7/XA6nbjzzjtx++23Y2JiAjfeeCMuvfRShEIh3H777fjGN76B5cuXp+xlTaZJsl7WAIQySBBRR1bYSi5VL2vqo2S9rJlfmqqXtV5rRmp1L2ueh2S9rHUEioBCfC9r5qQm62XNiFOyXtbxldkPtpf1VOVtIqcs3aHtDN3LOv498b2sAVhaoR3soF7TqRMaAEt0JpI5RomeXwPyw8PDGBwclPNsGDFWA6nvtCfz8vKkej+ZF4w6MhWJjiR76g4NDVnSsTg3brcbIyMjYn93dnbKPiJwRyp5JBKR3vKUjQzckMLL3qnBYFAc2pGREfmdjh0d23A4jM7OTjQ2Noq8JtjCZwFiud0sKhkPFtNe0jKI/081EkXX9Wfmzp1rAVr5/vLycrS2tooOYI/UeFtEf04zefT3xDPSPuvxuXdQbTYbrr76ajz00ENYtWoVQqEQ7r//fhx55JH4p3/6J+Tm5uLxxx/H2rVr4Xa7sXbtWtx4441wOBx44YUX5MD86U9/QnNzMyKRCGpqasSw4AZlLhIHHUsaGDo5e+nSpVi0aJFUauT7JiYmxOAHILmgubm5oqB4eCnoGcEkBWJwcBBdXV3o6OhAT0+PHOSCggLMnj0bTqcTHo8H4+PjqK2tRWNjo+QYUQFlZmbC7XZj9uzZ+NrXvoZTTjkF1dXVFoSVpfIbGxsFyWdVWirpeBqdFv7xwpNRNG3AaEWoFVY0GsXAwIAoTUbyKLBo+GsqmGmaePXVV9He3g6/338AGpnuOOGEE3D77bfj9ttvx5w5c6QibW9vr+QKHHnkkejs7LTk7gwPD+Ott97C5s2b0dHRIQVTdHsAnSOiEUrmLY+OjmJgYAB9fX1SUINthCisiPjbbDbpW6vLrUejURQUFKCgoEDyRZmXQaO1r69PaDu6cMro6KggxYWFhVLlEYg51Pn5+ZgxYwZKSkoQiUTQ3NyMuro6KQbicrmkbQYrRLMd0tjYGOrq6tDe3i7XYrSAxg4VGA1tns09e/YgIyMDF1xwQUo5wH0wFcNF70fOxcEYPnrY7XZUflzAhUMDN8uXL087P++TGomMnMkcylSIMc9qsvcnU5I6qjTZIPDD70n2mfhoQrL3xhsGiYYGkAhIxtO1NeOE+1ZTyEinpzzlM+jCZAS7mJ9os9mk+A738MjIiPTtpC6inKQTTDDUbrdL3jwAkUv8nflgjLzV1dVZ5s5ut2PZsmV45513MDExYTFUExlmpC/TSXU4HCgpKUFzc7Pcg3Y+DMPA+++/L9XRE61Z/Droz1599dVYt24dfvjDH+Lmm2/GZZddhlAohF/+8pdYuHAhMjIy8Ic//AF/+9vfUvay1tEgyiXdyxqAvJ6sl/W+fftS9rLu7e1N2cuaER2uW6Je1lzzZL2sfT6fREa5V3hOmJ7CayXrZd3X12eh58b3sibTi9HS+F7W+p6S9bI+//zzLYDQocpWDW5PBUzUe04zvNjLOpFc0L2s6cgeai/r+OdPBxRM9BqfXZ8RXU9ifHxc7p2RTr6PzzA2NiZpQExjYw4qr099Tto481AZKSUgR5r/6Ogodu/eLd/T09Mj9h3zPsPhsKTG0S4kAERZSH3OqC7rfjCqqiOp2hYk4MN9TceV7e54nngP4XBY7HGHwyGpFxxTdfaSsW84Dj/8cIu9zH8PP/xwbNu2Tc4emW60GROBF/wuynnKOc7HJ50jfTDjc++g/u53v8PatWuxZ88e3HLLLTjvvPMQDofx4Ycf4pFHHsHatWvhdDpx880346yzzoLNZsO6devQ2NgoCqKrqwumaaKiogI9PT344Q9/iI0bN+LWW2/F7NmzYbfb4XK5LO0+NGKh6agrVqyAz+fDBx98IAKdlAceUGC/8mSTawp6Rs/Gx8cxNDQkyoMGB/NWenp64HK5UFBQIBWH29vbxQAg/YeGDR3X3NxcnHrqqTj11FMxf/78pI4cG3/n5uaKEPrd736HiYkJyb8E9gvcySIcRx11lKUABrBfyOroMWlKTAQnEKAVIdeBwpTXGh8fxx133IGBgQGcddZZU1Je8f/XVWffeecdiaBT8NEo7erqwmuvvYatW7diZGQE3d3dksxP5J2OLAUv20jQSBoYGEBXV5eFBkxH1DAMKVQwPj4u/XBbWlrEECbFo7KyEqWlpTKfdEAJMPT29kpOmt/vh8/nkwJYNKJI1WVOs9PpxOzZs1FRUQEgRgfes2cPampqxKBmZJSOqdvtFqr27t27sWfPHvT19cHlciEjI0OQS649gRnu8XjDIxQKYe3atZb9ED8OBTXk+UoVWZvKME1TUOl4Q5wKZMmSJZ/Idx3MiDeKpvpZjnTQWp5ryglNtQISo876/ekatOm8Lx3HiNcaHh6WqKg2JAhGabYIgRbdloIGlja2XC4XRkZGpKgRi4x0dXUJUMMzog0S0v/s9lj/VdL+aZDRqGHPSzaDz83NBQBJA6Acokzq6uqSZ+b3HXPMMVK4gxTlZFEdpkEA+5lEJSUlAtZyzrSTu337duzdu1eeJ9U+5DXuvPNO1NXViQF5yy23YO7cufjWt76FQCCA3bt3IxKJ4Mgjj0RGRkbKXtZ0ppP1smZknGsBHNjLuqSkJGUv6/b2dvT09CTtZU0gzmZL3su6p6dHjMxEvazZxsw0E/eyJkU4VS9r7p1kvay1/krUy1rnbibrZU2HmoOf1zngU2VT8DNTkWEELxPlVi9atOiAAjV8H3tZsx6HYRx6L+uDkb18dgJUidgivb29GB4eRmNjI5xOJxYuXAiv1yuVrLn3aJeyyI7P54Pb7YbL5bKwnEzTxODgoOSccv0Zfezr68Pg4KDsU6YckOGn9w/Xi2eekXoGTgj4EbgjoENZCcBS7VwXOaK8BPYDi5Q5vL7u58u9QBub92+z2XDCCScAQFr6R/+N6xLP3km0R2bOnGnRDZybFStW4O2334bNFksH8fl8lutqfQPsr/+Rk5Mj98+zpoHdz3J8MhbV/8Pj2WefFdSvtbUVixcvRmFhIR566CEYhoEbb7wRnZ2dKCgowDPPPIOGhgZMnz4dlZWV0luUeYukWpqmiddeew15eXloaGgAAGnToYULqZQ8ILNnz8abb75piXjRiMvJycHIyAhcLhcmJibQ19cnThZzkrSjq5Fp0i0YpWLFXuabNDQ0yH0xl8bv92NoaEgczSOOOAKVlZWToiq8d8DKkc/KykJTUxPs9lihnNNPPx1/+MMf0hbSLMREw0oPHVXUBmEkEsG0adOwd+9eaaLOnBs6KvGH1jRj+RBFRUVJ0X9gv1OijWIAGBwclIqZvBev14uXX34ZDocD/f39GB8fR0tLC3bs2AEgppyp6Gmc0UEkFYNVMqkII5FYSwFGVHQlN4/HA4/HI8K2tbVV8qOY9+ZyuWCzxXrG5uXliSFD45nI/+DgoDxjSUmJJTfS5XKhvb1dConQYOY+rK6uRk1NjeS0Dg0NiVJhQRWiqEBM4fX19aGxsVGUEJ+XRjQNMxpUzDem8iRtRQtYh8OB+vp6ZGZm4oILLsADDzyQdJ8djPKPp3IezCCdE9i/l+bNm4fNmzeLDOCeSoeu+n856Ixr+mK6QytgLStSvT9eGSdySvle/R2MfqX6Di0zJptTbRAnMzA5N8PDw9i3b58YExw2m02ibgRy+FyMptHZIHBEQIbXoePIyrGUQ9yHHo8HDodDityNjIxI2y2mS7S0tMj368gbKf6kirHom81mw44dOyQPllEIHVXjWLRoEWpqalBdXY2cnByRIfHzZZqxYk+9vb2SOsJqmMwF45xpWdvQ0ICRkREcfvjhaYFCmZmZePLJJ2V+L7roIvj9fixbtgxHHnkk9u3bh5///OfYtGkT1q1bh5tuugnXXnstzj//fFRUVEhU2uv1ilPFXta7du2y9LLm/ZL2S5ptSUmJMJl0ZIOOqd1uF91EJ9cw9reC4/knWMz+0eFwGBkZGRgbGxNHQhujZMSwWjsA2Z+jo6MoKytDOByWvtM8a6wkn5mZeQDQQjmclZWF/Px80TNkV42Pj0sHAe51ABbdrdNrtOzTgOP111+P3/zmNzj33HPxq1/9ShyKdOSGHpwLnZ88lc/y33j7QoMohx9+OLZs2SKf0zLymGOOwbvvviuF7qgDD4alpe8jHbnGwfvW4BcdZlL9c3NzYbPZMHfuXITDYakwzdSheLCJaT78Pu4L2hrU4ZrBomUM9zsANDU1oby8XOy07Oxs9Pf3o7+/HxMTExIRjHfKtP0Vr4e45vyhrcRzQZlJHaaZJfHAaLzO0XqYYCEBpPj3Jxv6frme6eh1p9OJwsJCidzq7zv22GOxa9cuzJ49G6FQCG63W+yu+DXQ9+DxeKSWCINfBA8/y/G5j6COjIwIrcg0TVx11VW444470NPTg4GBAUEeKWQrKirQ0dEBIMaX7+jokBYeHR0duPDCCxEKhbBs2TKh1HJD8FDoA8soid1uR01NjSgnvsfr9WLRokUi8LgRyZXXOUV0DJgDw+RpRkHpLFKY1NfXo7OzU4QI0SIKr0AggCOOOAJnnHEGysrKUjqniQxFHSlk4jmF9oIFC6aE0ESjUcydO1d+10PTffl/wzAEneP90enj2rW0tBwgxE3TxP333w/TNLF48eKkCk07xVoZXHLJJbj99tuFYk3Ds6urCyMjI+js7MRf//pX7Ny5E6OjoxKxJkpOx9A0TXG8WFSABsrQ0BD6+/tF0Jvm/jYOLElOI4E0LlZopvFnmiYqKyvF6BoaGpKIiGmaUn3PNE3k5ORIn0AqnYGBATQ3N8sckAZOI4x017GxMfT09Mj9Mpc2NzcXRUVFQvlhbm5DQ4OlGqBWSBq9Zhl6jRDqaES8wROJRPDnP//ZUsQhfhwMfYyA0MFGT3VkMP4ebDYb5s2bZzEMee+HSnU72BEP6qTrnMYbFJMZWPGf5ZjsM9wPNEhSvT8RYj3Z0IZ1snul40CDVBtRdJxZYIwyG4A4paQEct/yM3Ra6ADZbDZLhVCuBYGs4eFhiajRcSDgo6s1MlpmmqY4W5QRBOpIVaYhZxgGjjzySFRWVlqMK23gAZDcRW3M67kCYi0TKJepg5gHqddK51+xrUo644c//CG+/e1vS9TjkUceweOPPy7rP23aNKxfvx4rV67Ej3/845S9rNva2gAgaS9rzkGqXtac82S9rClzKVfje1kzdYU6O1Ev62g0Cr/fn7SXdfyakkLOc839kqqXtc61S9TLmns6WS9rDSgl62X94IMPCtNJO1cckwHmXD/O11RHuo5DNBpN2ct6wYIFeP/99y3MsYOhUN5yyy0AYMn1jdcF8aC5fj3eOQ0Gg+jp6bGwdQiMj42NSUCEe0rbHDoqznZwXq8XxcXF0taQc851mJiItUTifmdRRMpM5qiyuKTX65X2iJqBxnnlc+o15pywP7Pb7Raqscfjgd/vRyAQgNfrRX5+Pvx+P/Ly8pCbm4tAICBdCsjmYi0MnvH4aDR/T1Q0K51xMHnUJSUlB7xGPVBYWChrZJqmhTofr0vjv1fbTZ8lCM7xuY+g5ubmYmBgAD09PfjNb34jSumaa66BaZr40pe+hGOOOQZZWVl48MEH0dTUhNLSUtTW1mLatGmoqKhAfX097rnnHqxevRq//OUvcd1118HpdKK0tFTK/wP783q0s0ojQ28ou92O6upqqfpKJ5KOJQ8zUVdGpGhc0JihI6CL27CsPyNYGiFklC4nJwdHHXWU/C1VJDGR0OfGDwQCGBgYwMjIiLQlYQVS0pImG9r5KygosKCoACyGKAUo/865LigoQHt7OxwOB/Lz85GVlYU9e/ZI5JA0KsMwpG2Aw+HA0UcfjQ8++EDug88Wf1/67zNnzkRbWxvOOussVFRUYMOGDfLs7e3t2LRpk6CWVBJUCDoXaWJiAn6/32JY9vT0yPewQBEdYK/XK4aCYRjo7e3FwMCAUNF0DgcAaYkQDofR0dEhRjD3SWZmpiTbc42Zt8G9oqtG8/pE7Vi4gM9Og4OFqwi21NfXY3BwUBgANJCAA3MI9ZzzmQKBgCCJdL55X1Ro3P9PPPEEvvSlL+Giiy7CY489Joh6MidrMsNE04Km4qBqB08/b/wzEpktKChAR0eH5fXPasTvexqNkw3KNe5tntNkRqNel0TOzWT3x89Pdm/p3Lt2ACdzeinTKyoqJPrJZ+BZJdWe551OACN8nBca+6SQ8m8AJKeLr5MdQ2CMTp52/HgPfBZGKemkAUB9fb0wLABI6yjmYWkAjWyh2bNnC7jK81ZdXY0tW7Zg0aJFUjWSRVH0nBJ4YrqA2+22RBG1LNBOajQaxebNmzF//nwUFRWl3BuGYWD69Om48sorkZmZiXfeeQfPPPMM1q9fj3A4jPPOOw9VVVX4+te/jjPPPBN2ux0XX3yxJfeOvaR37dplAQvmzZtniSgTdM7MzLS0fKHjEo1GUVdXJ04XnU8WqdLthkjr1VFTzhuLw5B+qI1yv98v1zeMWKSTPWcJAOiUH0av2cua+5IAB/OPc3JyhB3Es9ne3g5gv2FLm2XatGkSvWlra4PP55O+29xDfDY689oZ5Hy98cYb+I//+A9ccMEFeOKJJyzgdyqnMx605DXTjVJxpOvU2mw2oUQznUd/V2ZmJqZPn44dO3ZgwYIFwoab6uju7sZdd90FwzCwZs0aS94lYC00CRyYlkCHkwBvKBTC9OnTLe9hwIO2hy7cZrPZ0NjYiHnz5okNQRuGLAx+L69Hirdmh/X394sNVF1dLcWHyFbo6OiAzWZDcXGx9G2nPUF5lJWVJTJF95nXLBD2aNWgHv/OFCN+hsACAzzx+5H7Npntd8wxx0ypvYzWcenqUY5oNCp5p1xjndKlwUeuqY6IaluCz5OdnS09kTlfn/X43Duoy5cvR0ZGBubPn48rrrgCP//5zxGNRrF+/Xps3LgR4+PjeO6551BWViatKq699lpUVFQIEu7xeLB27VrMmDEDDQ0N+NnPfib90nhQGIllpIkb0maL5StpOlMgEEAoFMLevXstSplINB0IFkPSBTZIj9A007GxManQSkdBHwY6IkuXLpU2NvFIZaJBo0cfLm1wl5SUoL+/36JUfv/73+Pcc8+VHFjmOSYbmqpH2jKVoY5+8HcakZzLkZER+Hw+9PT0wO/3Y2RkRHrHkTpFo4AGdDgcxqpVq3Dddddh4cKF2LZtmwXZjX9mPUwzRmvOzc3FmWeeiezsbOkRSIMjPpIRT8f2er0SlWNfOa6HLsrDNjI0DFktkXuCVGYgZmg2NzdLrysWUqLQJT23qKhI1oWgCaMGWumwUFFmZqa0PGhsbITP57NQBnNyctDX1ycoptPpRE1NDcbGxmTPk2LPNSRowD2m51rn6WmKkN1ul8Iy8dEzCmgAeOGFF3Dqqada0PhkiiGRIcP51U7tZE5LqmvHf0e84jNNE1VVVejr67NQKadCq/0khqYzxf+eztCRV41AJxsaHU/3+vEyK9W66jM82dqlcy9aTvNMt7e3Sx42ZSUrtrMoSEZGhgAsWrbQcAiFQvD7/RgeHhZ5H43GqmsbhoGOjg55X2FhofTjJP1OF98LBAIwTdNSoZLylDIkGAwK7Ze6ZmJiQmoucL5I++OzH3bYYdi1a5dlXy9ZsgTvvPMOFi9eLAVSyMqIn1sgBkB2dnZaKsmzaqjf77fsF873Rx99hFAohLKysqT7iUYl12bJkiU44ogj4HA4cN9990k0taKiAmeccQYMw5BcWLZTo+PZ1taGaDRWhI/zSgPY5/OJgUrGFdeSHQG4VpR/NPJp4OviUkVFRejs7LS0Kuvo6BAgj/fmdDpFh7POAtksLJLFZ+faskhVf38/CgoK0NTUBJ/Ph9zcXHR1dcl8sZc1HVUyxbgXyNyikzt9+nR5ntHRUQuwwaiwZv/ovOZEeyIajeKee+7BD37wAwtFdzK5kEy+Tuac8rkTvS8dJ2LWrFnYsmWL6CDN4CooKEB/fz9aWlowbdo0S7vBdEdbWxvsdjvq6uqwevVqOBwObNiwQdhMtKV0FNVutwvo09fXh66uLlRWVmJ8fBxer9eSC61lNIEKBkGA/Xmn/C7ud36GhZb43YPpDAYAACAASURBVLRfCNS0tbXJnpk1axaCwaClJyojqwBkn/Ks0dbQ0XZtAzDiC0DumTYTP0OHmfIOgNwzZRsjwgTcmcJHB48U5t7eXkxMTIgDz0AD9+lkekWz8A7WhjjiiCPESeU66N7EtB3JzIkHTvgZAAIU9vf3H1DU9bMan3uK78svv4wtW7bg4osvRmVlJS699FKhY1522WVoaGjA5s2b8dhjj+HOO+9EZ2cnDjvsMPT39wtCSke2paUFy5Ytw3nnnYfvfe97uOGGG8Q54abgwaHzwAOho5S9vb1obm62cPxJu2EBAgBSaGFwcFCcEzZMnpiYQGdnJ4aGhhAMBiU/kQeROSLHHnssTj31VHzlK19Bfn4+gAOjJHrQyZ3MUQOsVcLY3Lmurg7RaKywx9e//nX5vvihDxy/y26P9UTlcxMN5n3Ffw6ANFV2u93o7u7G4OCgBcXl0NFDOvSZmZlYtmyZzL8WOpM9s2EYmDt3riB9ubm50tNL90OlAUhjkH0CWVyAeQ1UBAQw2BfLMGJU5sHBQYyOjoph5HK5kJOTI/nSrJycl5cnxgGNRafTiYqKCskZi0ajaG9vR3NzM9ra2kQZTExMwOfzobq6GuXl5cjOzobD4UBjYyMASJVmom8XXHCBlKEfGBjAzp07JY9N50uzmAERaCoTTWHiT7zyMwxDojxkFOh8Eu4FCudnnnkGkUgEF110UVJDJNXgek0F7Uw00jGU9BlcsmSJtCWaahTg/2JMJarJuZoqpS2VHIp/H/doOmMqhoDed5R3qd7L88g+0vGRDK0L+DoNHp4xzbTheSJtlOh4a2urnGEaJACkUElPTw+AGNOC8pLFMHRhEuas837I5jEMA0VFReLUMHpLOXziiSce8PxML9C6bMWKFdi6datcU1MJ9d7h74WFhUIddDgcArDxefg+HXnYvXs3du7cmXR/XXbZZdLuTFesBIDvf//7WLVqFb7+9a9jx44duO+++1L2su7u7hYDN1Evaxq+qXpZa+cuEjmwlzULz0SjiXtZM+84VS/rnp4esTWoP3Qv62h0f640cGAva5vNhqKiopS9rHk2kvWybmpqQkdHR9Je1lq3xw/aGADw4Ycf/j/fy5rjUHtZ//GPf0RxcTHuuece1NbW4uGHH8aVV16JDRs24Hvf+56AYQQUsrOzJZjR0NCA8fFxcU7pwFFu0Tbk64y6abba2NiYpUYEa4OQBss9NDQ0JB0CxsbG0NzcjI6ODhQWFqK6uhqFhYUSWNHz3djYKFE8zg8BOaYuUL7w3FCmaqeM0WJgfzqDTv2hLKc8oHyjLKbco6Pe2toKv99voUbz7w6HQ4ojaec0lb6gPmAAZqr7lPcMxCr7clCuUGdpxglTziYDbdgi8O9hfO4jqAAwffp0nHDCCXjggQfw7rvv4vLLL8eqVavQ1taGcDiMc889F2+++SY6Ojpw6623Cg2K/HjTNHHDDTdg9+7dePLJJ/Hmm29i3rx50u+tr68PhYWFQsNlWXg2+aWRrvn1pJmSdmuapiBRdrtd0GgiXDREmPOqqQzAfgoviyQdddRRlshmqqGdPl3UA0gttOnwmaaJlpYWzJ8/X5zjUCiEefPmHfAd/B4gcYSFbXe0Q6qRMm388LtY6IOCjxE/CitdEVJHVVatWoXrr78eCxYswPbt29MSInfddRd+9atfYfv27SgsLMTQ0BDcbrcYhhSA2dnZAhzQeNM0bSLgWrETbScqxogKo/J07PLy8izGfW1treXeKfgZbS8rK4NpxujfLIhBR5pGaVlZmexRoodEgJkHV1BQIHtw3bp1AGLCs62tTSIGjDrYbDYxpm02G4477jgxRp9++ml5Jr03dLScim1oaEjyvMgioIGoUV2+PxqN4s9//jNOP/30KRXN4DV0hOtgRjpOcbK/z58/H5s2bTqo7z3UkcxZTNeBJLMhnSiEVqzaIU/2XakYH/GAlf4OLf8mi46mugfe5/j4OILBoMhrAJgxYwbq6urgdDot4AkdB20gMoqm740RLYKYPOP9/f2iXxjBi0QiKCoqQjgcRl9fn4Wur6tTch/TcC0rKxOAFYBQ9FnRcXBwUOieuuhIvMyOp9xzXpYuXSrOGynIzHeMX0cgVgGeKH5GRobQJvfu3StOsP5uRmZaW1tx0kknHbCWeXl5uPrqqyXye+edd0rFT0Y85s2bh7Vr1yIzMxMvvvgiXnjhBbz88su48MILMWvWLLS0tKCiokLkybZt23DyySejq6tLAE9SsiORCBoaGtDb24toNIre3l7YbDbLvgD2R3bpiLPqJteGhbQo6+nUOBwOoWTzu4PBoDBi+EzxRRwpe6lvIpHIAb2stb3g9/slRYnrr8EO5sGNjo5KL2tGTFlHgNFVfi91C/c27R+dX6/XlUWz7rjjDlxzzTVyb8lkgQartJ2SauhIJxk7hwoALlu2DFu2bDkgosnz8Pbbb2P58uVTvm52djZWrVqFhx9+GOvXr8dHH32EhQsXYsGCBQgEAlizZg3q6+uxadMm9Pb2Snu/aDSK0tJSkTOUR9pJ1YAuGUmMpOncxKKiIosupHNHWcQ9zErmZCcQ2NFF3yhLWltbRX/v27cPFRUVGBoaErvWbo/1dM7OzhYgnvuGVHXeJ+WhtldpN9HG4BzYbDZLHRey2eiEulwuYbwMDAxI9H9oaMgSZPD7/bJvqCsm0/U6Wj3Voa9rt9tRUVEhzBheV9uMBDGzsrLkLOsRrz+9Xi8GBgamfF+f9PjcO6jsI1deXo7DDjsMlZWVyMvLw/3334/7779fFPGCBQvw+uuvw2az4bnnnoPP58PAwADq6+tx/vnnY82aNVi+fDmWLFkCmy1WLbWtrc0iJCkUWO2O1zaM/VX77Ha7pcJs/GGgg0qkTPdImzlzJjZv3nyAk8YCOitWrBBqlkYYJxPG3OzaQUh3eDwejIyMSOP40dFRNDc3C1JOQaGR7VSUXyJjutiNpt/qKmp+vx+NjY0IhUJYsGABpk2bhtbWVgAQQ0JTcXQuIRtFO51OLFu27AAaRbI527ZtG8477zxZuy1btmD69OkWh5L5Qaycxj1IRBCARMuYC0IUnQ4jKSXZ2dmyN9xuN/Lz80UBsP3M4OAgPB6PJWLPPcD8N87bxMSERDdZNICI/cTEBFpaWsTBZjVRFsug8RaJRPDKK6/ghBNOwHe+8x0Eg0G8/vrr6O3thcvlkvYTxx13nAA12ujg82ghqvccFaBhGILu656SWrHymXS+9R//+EecccYZ+Pd//3c88sgjaRXaAPY7MlOJnsYbTOmMZPvLMAysXLkSf/vb39L+/k9jpBPp1JHvVEMjwEB6kZBUjq+OYMZfg/Ij1T3Ffy4R+kzknvR9yifDMPDhhx8KfT4ajUofPd43r6GjBSwqwygXDT89WHiNhhedG02dpK4hGMTWItyPjJ5SD4VCIQSDQWHX8B4HBgZkrux2O44++ugD5oFnkkAgc+GB/QVdtIylc5xsfemkGoYhLUtycnJQW1tr6bnNqA/v7y9/+QtOPvlky/VsNhvKy8vFYL3kkktgmia+9rWv4aKLLhInjU7mCSecgC9+8YuWAnTsZc22aexlrWXj8PAwdu/eLb0Tacx6PB5L5JNrbhiGRBcp80ZHR4XySGM/EAigq6tL5CSLSmVlZaGjo0Mo3cD+QmpkonAfMdJOA55zT4dR672srCy5V7agoW5lbikdua6uLnR3d0tkia1mWOMiHA5Lfu3Q0BD27dsnlGTmEHKP8z6oQ3RPX93L+r//+78TnlXtZE1FRms7TIPVhzLs9lgv6/r6+gPsLdM0D7qXdTAYxIknnoi7774bxx9/PKqqqlBaWoqJiVgVXtOMsbCYbsNq0nSgTNOUPGNtj8anOxCgZvRdM9VIu2ckPp6N1dHRIUD8zJkzpcWMttU0IG6z2aSYJ6OyNptNqNAlJSXYt2+fAB+M2A4PD0uKGCvV6miktu/09wKQZ+I9aZtIg4bsD+xwONDZ2Sm2NPcIg036XKfSVYn0STr6U49ErAOyz/Q86Ehub28vioqKMDg4KAEiDfRTD3IuyIz4rMfnnuJ71FFH4ayzzoLP54Pf70dlZSXeeOMNTJ8+XQ4Pf0455RSceuqpGBoaQmNjI/r6+oQa+bOf/Qzf/e53ceONN6KpqQkZGRnYs2cPDMOw9GLSdF6NpvNQ5ObmWlAeopM8MF6vV3pQ0ikBYvz7ww47zBI59Hg8OProo3HmmWfiK1/5CgKBwAEG2WSCmEYPD1E6B0nPWWlpKQBIRbdoNIrnnnsOpmmip6dHKvNq2jCvkWwsWbIEpmlaqg5SwNDIyMvLE2OBz0jBR8NQ50bow8x7GB0dxW233QbTNLFw4UKL459s5Ofnw2azobm5GePj42hoaMCzzz4rjh8LE/AadPLYWF2jcLrnnq5iR2FPw4HCyW6PVYLu6uoS5JR7Kj8/H/Pnzxd6FY3gSCSC3v+fvfeOjqu81oefM0VtZqRRHcuyZcmW3LsxxZhmig02mEtJCGkOpPGFCwEuYHLpBoMpYZGwQhJyCRBuIOQGCIQEbLAxxmATx7h3Wd3qZTQqI41mzvfH8GztOR5Vk8Ba+e21tCRNOec9b9n12Xs3N4uC43K5MHPmTMyZMweFhYVwOBxoaGhAeXk5amtr4fV64Xa7MWrUKBEeGnbj9/uRkpKCf/zjH+LtfOSRR5CamoqcnBycfPLJWLZsGc4//3wkJycfV93SMAyce+65MVAzrhn3hPW1YDCInJwcKdxEoUUDH4itGtvb24tXX31V7jlQgSNGe3mdoXrXec742eEoTPGuRYpEIiPyvJ8oDXTure9ZvbsDzRfnRXubhzJXWqgO5TsUxPzcUBwG8d63jrGrqyumR57D4cDRo0fFuGBdAO4FeuKBvrQCLSNY3E5Da7XsIM9KSEiQ9AX2TaZiwj7TjHSRV3R2dqKtrS1mPrKzs8XppWVSZmYmTNOMaTHCM0CeznnQhgEQhTHyebWDgAYAlUZrtMFKbHWinbJjx46V2gyUJ1o5BKJ55pqvaIeX3W5HYWEhioqK8P777+Oyyy7Dt771LSlCpq8Vr5c15bWGBOpe1q2trWhoaBiwl3U4HB6wl3VXVxfC4XC/vax7enpQX18/YC9ryrr+elkD0chyf72sOQcD9bI+cuQImpqa+u1lTcOzv17WdIKS12t4u44Emab5T+9lze+PtBq7lUzz+F7W+nc4PLJe1r29vVi3bh3uuOMOfPTRRzj99NOlQBDPucvlkmKXjM6TH2lYKx232iFKY93pdMo+0VVg7fZoAbWEhATpJsAq0uXl5SgrK0Nvb6/AeBk11ZHW6upqlJaWoqmpCW1tbWhqapJoH3mk3W6XmhXcMzSsicZJTU1Fdna2OM54zjSihs+g06hYPM5ms8XwbwZLDMOIiS63trbGtMvx+/3C3xwOB84666xh7QvqLTrgMRzqz6FeUFAgckkXOLLb7fB4PFi/fr3IA11Z3eoQ53x+Gejf3kAdO3aseGq6u7uxZMkSzJ07F9u2bcMLL7yAlStXioALBoN44IEHYjaUy+XC7bffjptuukkU+kceeQQ//elPsXfvXvT29spm0B4qK9PSArWtrU0MEpLT6ZSKvOFwGLW1tQgEAgCiDIRRL0a2CgoKsHTpUskjGmzDxXv/82DWjJbRw2y3R6sSA1Fv4IIFC4Z9Xyb8awWSa0ShumvXLqSmpkrhB3r06A0j09QwX61oUZlitc158+YN6dAyIsk+g8eOHcPWrVtF+AIQw5OvUZnQ0QwKCb7PqAiZG/cVhXhnZ6fAZNrb29HZ2SlGK/ODuAc1xEpHgAoLCzFr1izZo7W1tTh8+DA6Oztjyq2PGTNGKoYyB5X9fhn9CofDokCnp6cjJycHl1xyCcaNGycCpr+Il9frjYmKaE+fNdeYeVRW5wkVYe211bR27VrYbDYsW7ZsUAGhI6eDRdusfw9X+PC7+ntWJZ/K+hdN/Z0H6zMP9P9wkBzWa3A+hqKY6ohsPD7c3z3ijZ3njS2UaPQxKqmjOCx4RoMnHA4Lj2Ckje9pRU4/k2maEtmsqKiIUaS1kUckCGsj0Ng1TVOUMCpyHCcVOkKCNW/lNVjcjRFIPTaO2bpnJ06cGHNeyM/YniUSicQo7/HOCaOnfE/nMDJ6bF0bfm/9+vXHjRPo27ORSARZWVmYMGECDMPADTfcgDvuuAOVlZUx+4OR5JSUFCl+Q8SPaZo4ePAg9uzZAwBSeI5F4jgm3dPV7XYjJydH4K/hcFhkDHk8odWJiYkoKCiQFAbys5qaGsm1o4GRnp6OvLw8WaPu7m5Z/0AgIC3zHA4HsrOzkZGRASDKPzlWplXwXBmGgdGjRyMhIUFyWtva2lBTUyPniAZ2WlqaoKSY5nHo0CFUVlYiISFBYJpaOe/u7hZHKfUa7UjiWWNUtaSkBHa7HcuWLeuXD/fn7BiMhoLUGIysDjPDiPZH1dE5/bmRGAH33HMPHA4H1qxZg7POOguFhYVyxnU0nO1VDMNAVVWVBDJoVHGtOef6u9xDlO/Hjh2Tvcd1Z3cAmy2a7lNTUwOHw4Hc3FyMGjUqxjCkjtXc3CwVrFnng84aohXIxxh5ZxTeNE1J72LKFNcrKysrplWiLlRGY5SRYAY2rGhAzccMow/RSJ7JNeX3+T+vEU836Y+GW4thqGQYBiZNmiRj4loBUd6Zl5cXU2RNo8p0fq7Wvb5o+reH+G7evBl5eXn44Q9/iJ/97Gc4cOAAvv/97+Ovf/0rsrKysHnzZlx++eU444wzMGrUKCxevDimtPP8+fORmJiIzMxM3HTTTeJ5Sk5Oxh133CGQXQoIbnoeloSEBMkVYVU9bnSPxxMDH+jo6BCFxjAMgWC4XC4kJydj7dq1WLJkiQg4fbgGIm2YabjcUJm1VTG3euT4f2lpKSZNmiQRg2AwKN52fWgHg0iQeeo8Uj2Onp4e5OfnCwPmvLrdbuTn56O8vDxGIdAQNJ2vyLVav349zjjjDFEorBANziHHHolEcPjwYfzP//yPKDNaEDDXlDBvep8JcQEg8GJCrahIUNGjccqeYfwM4d6MkrLCHJ+5s7NTKk9bI5QulwvHjh2T/rBcJ7vdLi1v+LwVFRVobW0VBY4FD7inQ6EQ3nrrLVx22WUIBAKYOHHigNBtTZwrXdQrHjHSxM9kZmaipaVF1pw5F0BsKxiubV1dHfLy8lBcXIwjR44cNz7u5XiQmnhk3Rf6HFgVeGt0SV+DShnHbP0NRCtu/ytJQ++pYGmBbjUWrM9rfU/TSKPL1qjoQGTNDxruPfldKj/V1dXIyMjA7t27YzzokUhEIieMXE2ePBklJSVwuVzo6uqSyuZut1uq+dKY0PPW3d0t8NpwOIyMjAyBS9JZZYXOWeWMzsGio7S7u1scTORNNPiSkpIEpUFeRL4IIOZZyYu1EUpyOKItvVhlmOvFZ6V8cbvdAkuL57iw2WwiG8PhsLRRaWlpQX19fUwvTuv5WrduHQBgzZo1KC8vx5/+9CfpYa7vM2bMGNhs0YrvP/nJTxAKhTB79mzcc889MtbU1FR8+OGHUsF048aNCAaD6OzsFJgqcHwVVf5m5IJ5ryzWp40E8rGsrCwxhDXiJisrC01NTQID9ng8GDNmjOxnFsujcUQ4uWmagrLh/k1ISJDol9PplK4EbI3E/FBCKTs7OyVKSzmQlJQkhZEIM96zZ4/INO1II5/Qxg9holSYucacRzpu9Nl99913cc4558RAGK3ndCTE9RuJgaoRXNoo4FhmzpyJ3bt3x/BIHUEbDq1atQoPPvgg7rjjDrz33ns4++yzY5zrrLNBPcBut2P06NFilGnjT7en0bKF60PnRm5urqDVaNiaponq6mopeFhQUCCfoeOd81FTUyMGK/dkUlKSjIE8UKdbtbS0SFtBjj0pKSmmECTv3dPTA7fbLcYy9wUjsTROuW+1I4571zAMgRDrVCGgTyemPs29mJycjOzs7GHpy5y7kdBQnC9OpxOjRo2S9k/6XuPGjcOuXbukVQ/5O3kEP68dRF80/dtHUPft24eioiIEg0F84xvfgM/nw09/+lP4/X5cc801mDNnDjIzM3HkyBH8x3/8BwoKCvDoo4+iqqoKfr8fZ5xxBj799FP87Gc/w3/+53/ivvvuw44dO7B7927pGakr4QHA1q1bsXfvXgDRza/7QxmGIc2O09LS0NvbK5VUmcMIQAoeud1uFBcXY9myZVi0aJEo5EMlbkTt0RtMedPeaKsgtv7N67FSK4Uz83MJYR6ON9EwDIHH0Gjjj34ezimjyzpXhkKRkQXtRdRRglAohHXr1sFms+GKK66IWSc6CRISEjB+/HgA0VYJwWAQx44dEy87c8godLu6umRPeL1eTJ48WYQJGWc4HEZTU5PkIzEyTqOW5dkp0HUZdQ0Hq6mpQV1dnXj3U1JSkJGRgbFjxyIvL08q03V3d2P79u2orKwURcTr9SI3NxcFBQVITk5GZ2cn9uzZg9LSUsmhpaDVhqTT6RQPummaUt16qM4SIFpkgkYRjWgqw3qduZ5+vx8ej0deo9dUtzHgmlJxv/vuuxGJRHD++ecLM9Zj5DoMl1HHY+79/c814w/3lTaW40V/TgQyPBKiI4D7VK85n8Va1GogJAS/H8/ZMxTSBW76Iz1Hep/GM6it19aKBB07vb29KCsrQ0NDA44dO4aUlBQsXrw4xsFHAxAAAoEAgsEgduzYIQo90Ac3I08nUWEMh6Mto5iXzrm32+2ieBI9w77DAGJSAYLBoERQaASxXQPnOiMjQ5Q1XayNRl97e7t8hzKKKQm8Rn9n2jCi8GHd65Hn1uVyYdeuXaL4MZfNGvHkfmAxEw1vZs4tDeD+IrAA8NZbbyE7Oxu33nor1qxZgx//+McSwdQOlqSkJOTn58f0sr711lslQtja2oqOjg6sXr0atbW1AlNklFm39uE6ezwegSE6nU40Nzejvr5eChvS+KKyy0r6CQkJaG5uxrFjxyQSBECKDtEI5FqXlpaitbUVPT096OzslJzA7Oxs5OTkxDgS6+vrpXCd7mXN3FHuKc6LbkWSkJCAzMxMZGVlISMjQyDtW7ZswcGDB8XZq38oj7n++iyapinXoSOC+5DniWclEon2srbZbFixYoVcZyC+MRjv5rkaLjxYO6a1o9dqeDLyN3/+/LiOyOGS3W7H3XffjauuukqMKDqnbbZoj1L21qXBV1paGpMCQGeC1fDSRiIAqV2hi1t1dXWhrq4OR44cgcPhQEFBgeg8nBeuV1lZGUpKStDe3o6mpiY0NDRILQwajTqXnC2NnE4nWlpaAABHjhyB0+lEdna26KjkRexq4Xa75XlZAVunHrS3twtqhPek4ctzRTQCzxPzc7kX6Sgj0oRBglNOOWXIsngwufN5UVZWlsga7SR3OByYPXs2/v73v4v+S+i01VmuI6pfJP3bG6iJiYnw+/3iibzxxhtRXFwMn88nicRjx46F2+3Ggw8+CJfLhZycHKxfvx7f/va3cdttt+EHP/gB2tra8POf/xx33nknzjzzTGRnZwvUKhwOCzzB4XBg7NixUsLeMAzMmjULQGzEs6amBjU1NWhoaJBcEvZm8ng8KCgowLJly3DhhRdi4sSJw1ZataKpFayhkFWpjGekamISP1ucGIaBv/zlLzAMAw0NDTjllFMGjWpogWAYhuTnsAos0JfTpRVpbZDS2KECposA6OJD2mDlIf3kk08AAJdeeqlcNzMzE2PGjMHpp58uUOWxY8eivLxcGkuHQiG4XK4YyDbzSpk/unPnzph+e1SQaSynpKQgNTVVeiFq5sF5o7FsmqY0wWarGioZGRkZGDduHMaPH4+kpCQxcAnDZsW8UaNGYcKECcjLy0NSUhL27duHQ4cOob6+PkZx4ViTkpIwb948MZ4JKaKXsbu7G5dddlncNdaQKO15Zl9DriMLi+n9xr1EBRyIQugorAlz41j1fHF9a2tr0dPTIxWm441rKBQvihRvD1vfj2e4akWuv2uONFIwUtKQKP0D9PXa058bzANLRUbPb3+f10YQFUqe14GMYPICPXZ+z7qu2ukE9EXCdC9gRhPy8/Nx7rnnSqEXQvB53YSEBFRVVcme7enpwbFjx8SgJz+3oj/I44nsYCRTO1i0gR2JRAStA/Tlj7EQDvkJi68x8kqFRFdf5VklDwQg0VogajDReNI0kBJjmtEevlaovmmamD9/PrZu3Sr/p6WlxTgV9X7g2OiwJd/3er1wOBzSp7k/6ujowA033ICNGzfi7bffhtvtxv33349HHnkES5cuFcNL84XU1FQUFxfjyiuvjOllTfgkiw7RmcTCd5Q3brdbxtfR0RFj7Ou6Bx6PB1lZWTGV3hsbG9HS0iLrw77m+fn5SEtLk6q4LS0tKCsrQ01NTUyun8/nk1ZgdKDSKO3s7JR56e3tRUZGBnJzc6VlTnl5uRSwAiDFqUzTjOllXVpaih07dmDHjh1SrE8bnjrP2IpKonyz2+0xPRcZ3eNe1fJXn/V33nknpu6BlV9az3W81ygj9GvDNRj703viOd1sNpvAL0kjMVaSkpJQXFyM1157DcXFxVL0UQcumBNKCO3UqVNFl6UDgroPx0gDlEZrIBCQfR8KhSRIwuJb48ePR0ZGhhiL/H4oFMKhQ4dw+PBhdHV1obGxEe3t7UhPT5fcYw37JX8mBJx6EfW6pKQkJCQkSCssFkiyVuDlHHMvsFYLDV+eU0ZfdQtH5pLzPGt4K/UHHVGmg4xIsaGQ5v0joeFG3IuLi2OivXp/n3LKKdi4caPwdwYkgPh794ukf3uIbyAQwObNmzF37lysXLkSNpsN3/3udzF58mRs3rwZO3fuxIwZM5CSkoKvfvWreOCBB3Dvvfeip6cHy5cvRyQSbRYeiURw/fXXY9euXRg3bhwuueQS6ZXEQ8RNQiHHdGdCbgAAIABJREFUQ71r1y7ZgN3d3QLz1ZuypaUF6enpOP3006Vam/baD0S8DoWGVtj1Z4ZCOuoxVIaenp6OyspK9Pb2ClSIBR66urpw/vnnY+vWrXGVTQ2f0UqvaZrweDwxEVh+Np4Sy7/ZnJrFIrg+uiAF0OftcjiijdjffPNNnHzyyQKNy8zMxCuvvILk5GQkJydj2rRpAIDbbrtN8sA4LjJDzrEW2Owrp6MJ9MAzYtjY2CjGJNsP6GqHHDMhY5zjlJQUpKWlSRVKmy3al49J/q2trRJBYTEPenwPHjwo96GhSKKh6PF4cNZZZ4nhHwqFcPToUTQ0NCArKwuBQAClpaXIzc2V3BDrPuOca2WDe5sQX31+9F615sWEQiGpDq2h1brwjPa622w23HvvvXj22WexaNGiGFTDcGgoxqmmoShDptlXft56/X919BSAeKitUFlW/QQGroqr1xYYXs6pVcDyenyvPyLP0/Onv2eNaPAzOh+qurpa0hF0bjSfVTu4eB651z0eD4AoqiI7O1sipPwcnVaMKEQikZhCIBwTox38jubj/KGyxvOq6w6Qb9CIokHL77FFCXkbo5QsnsN5Oeuss2IUsqE6RWfOnInDhw/HRIttNhtOO+00fPTRR5g/fz4ikYj0iraiC7g+NBzpdKPx1N7ejrq6Ovh8vuP2DABs2bIFDzzwAB577DE4nU6ceuqpKC8vx759+3DJJZdg4cKFSEhIwG9+8xvs27dPeKFhHN/LGuircKrRN4RE9/b2wufzSRSTirS1GBzzNTlOVldmagKLw6SkpEj0iHtPGw5M8WEvaw3vDgQCaGhoEEWVMjA9PR2ZmZkx1YvLy8sxZcoUtLW1ITs7W6qXsmpuR0cHWltbUVNTI/snISEhRk+hMczIEyHZes2t55JnMisrC42NjcLvaaDo9adR8+qrr+KCCy7AihUr8Nvf/nbY/JDrdSJwSz7HYO/r67MeRiAQGBb/0xQMBrF3715897vfxUsvvSQyNRKJ5pPv2LFDnMN+vx/d3d3S5pDOKuZI68gxz5N2YkUiEXF0Uy8JBALIz88X45b6lu5fyr3JPr2EwlsdhGlpaQLDpSOeveLZYsnr9YrOQ0co9wi7IOion74W+0aHQiGBBmvdgigEINYhCUAchJxb7memRrlcLjz11FP4zW9+M6z9N9T9xnOho9vDIdM0MWPGDKmDw2sC0XO4cOFCbNq0CWeeeWYMog848WJjnyf920dQq6urUVFRAbvdjtraWgSDQTz99NN47bXXsGHDBimOEAqFcOzYMVRUVOC6667DK6+8gvXr12PWrFniJc7OzsY555yDiRMnYt++fVKF1zSjeHgd4dORL6BP2DNaRC8fvUvTp0/HBRdcIN7MwUjDTXTEie8Nl3itkTD0SCQixS6Ye0lvGJ+ZyoUVKqPhM1aaPXs2gChsiZ5bq3FhnQfmfxFSRANSt3jROQiRSEQ85Lt27UJPTw9GjRqFZ599FrW1tdi3bx+2bt2KV155BQDEQ9jR0SGKLgUCoX0ck1Y2CwsLMW/ePIwfP168u2Qafr8fjY2NCIfDSE5OxsSJE8U7SIZLpYDFtIA+yGAkEkFjYyMaGhpQV1eH9vZ2dHR0SJTA5/PJd2pra7Fnzx4x6jgWu90ubWyWLFmCiy66CGeccUaMkkqFhRV1bTYb/vKXv8But6OpqUnysAaLXvG3tZ0Fx6E9pTwvACT/itePRCICuWM0it55wnztdrs4FebMmTPsva3Ha42Q6ueyRpCspCOQJ+Jp/WfR448/juLiYgCxEVPm7/D1/qIKwPHFngajeLyRCvhgkWjOd7zIb7zx8fN0aNTV1Qls0ufzoaioSBwcTK1ISUkRpVzn2jkcDmnPxJYrx44dk6JIfCbyLe1A08W9GFWlgUm+Qj4VDAbFmLK2GSGagREDzgHvQySEdpzR6Ufllo4yADHQVdJQ92gkEkFRUVHMGvHvU089Ffv27RMeRrhyvLXiWqamporBx8iq0+nEkSNH5LvWc/izn/0Ms2fPht1ux2OPPYaOjg689dZbuOWWW1BRUYFNmzbhBz/4AR5++GE88sgjmDlzJgwjCnfu6uqKiXBynmmoET5JGcSq6OTxjFwRwpiWliYyh3mjjNQkJCQgKSlJoMGE5paUlODQoUPyPEzNoMNh/PjxMM1o646SkhKUlpaipaVFZK1pRnNtCwsLxXFimiYqKytRXl4u+429rG02G0466STMmDED06dPF0cto+9cC6fTKR0QFi9ejIsuugiXXnppDI/Q66H5G3k4c1118UCeQ+4fHV2PRKK9rB2OaAXVoRp61Kf0fh8JDYV/xXvfMAxMmzbthIziBx98EE6nE88//zxuvfVWjBo1Su7lcDjgdrtRUlISE6n0+XxiNFIXoZNAG0FEHBmGgfr6enGe5+fnSzs7XRMFAA4dOoR9+/aho6NDcqP5WfIf3ZM8OTkZGRkZSElJESOZ6Uh0mHN9KioqJD2KSBHmrpK3ssAjc2NzcnKQm5srBjCrULPAKPVp6nbU0W02mxjn5LvkMXyW6upq2bvcnxdffHG/a80zMBJHBPkf9behklWPYpGueM6+adOmYdu2bWKk624jXxb6cmlBXwBlZWXhzDPPxLvvvovzzz8fd955J6677jrcc8892LNnD4qKipCYmIjW1lZUVlZi/PjxaGxsxKeffoopU6YgLS0NCxYsQH5+fsxm5kahgcpeeIz0kNiXVCvbNptN8oJYyUxDWYdCg0HsBiKrcB9uhIj319/x+XxySByOaFn9ffv2AYhGsS+//PK4SuNA96SBxmq0vC/vTWGkhSJ/uF66kBKNF15Le38jkQheeeUVGIaBX/ziF9L8mkolK3M+/PDDePjhh8UYN01TGCKVR/7Q437SSSehuLgY5eXlOHDggCg/htFX3ZPPw/xOKjX08nG/sAK0VhAI42tpaRFoMvOdQqGQ3NPj8cDlciEvLw82m03g5C6XCwsWLMCyZctw9tln9xthNE1TPKfMlWLORm1tLb7yla/I2ui10PtEk9vtFoGkFQutDHJPEX5HRUxHX6nkM4LOc8F1p2CcN2/esM4Mz0Z/39FGkdU40/tVX89q4FkN2i9KgPz5z3/G9ddfj0ceeQS33347TNMUhwTXQo9VK7H8P54B3x9pr7GOdPTnTbZei3urv3mP9xoNEKfTiYkTJ2LOnDk466yzhAezkmp7e7v8cH/p802HFvO0yNM7Oztj2plQCaICQScK3+fY6HjiGDmvNE6opPFc+P1+MWD4HR1hNU1TFDQW4KGBTSOUEULKprPPPvs4Q3GwNdRkmqakslidTsXFxdi2bZsoy4SL6s9aFWoWyKM8YfrC0aNHjxvTnDlzYBgG9u3bJz2xX375ZUydOhWpqal47rnnkJycjGeeeQbV1dXSy/rBBx+E3R7tZU2HW1lZmax3Z2dnTESF69fV1SWQWY47ISEhpk0Me87SkOA5crlcyMzMlLoADQ0NKCsrk8iybvnBPdna2oqqqiqUlZWhtbU1Jm8/NTUVEyZMwLhx4ySyXllZiZKSEunrmpqairy8PFlbhyPap/bo0aMwTRM//OEPsWDBApHhKSkp0uru5JNPxtKlS3HeeeeJgsuzzzkj2kfzOv0+x0uYJw17OhGt+xuApAhNnjx5SAq8dnbFc5IORPq++u+BKJ7DjL8XLlw4ZCedle666y7ccccd6O2N9iT3+XxinNls0d6VhPgCUeRdd3e3QGV131OOkzoMDVMaYvn5+cjKyjou2tzT04P9+/fj4MGDCIVCUryTBRk7OjoAQNaQ+Zo0hNgWyW6P9lRtaWmRc0JUgNfrFd5YUlKChIQE6ctJJwaNyeTkZPh8PmRkZMRA2Bn95fhTU1NjUsMASHSV/Jk8XPc31nmqXLf6+nr87ne/Q0ZGRsxaW3Wj4a4x96Z2ogxlr+r9pGWvYRjSltH6eY/Hg8zMTGzatEn0zeTk5P8XQf0yUTAYxFVXXYX58+fjBz/4AYqKipCfn4/9+/dLdcHJkyfjsssuw+WXX47i4mLMmzcPNptNGiEXFBRgxYoVGDVqFIDoAWZvJ3pA6DHioSU0gzkr3IjaKKCiT49mf6QVB/4MFHkcjLjBh8qMraTzIWgokblQUXU6nXjrrbdgmn1NhIcLrTTNaIEFKqK8Np9dK8ycW9M00dDQIOOkoCT8TRuQVIbIPMPhMB566CH5n8yfTA4Ajh49ioMHD8r4eB8N22PkgmX29+3bh/fee0/aBwBR5ZN5y2T8wWAQLS0taGlpkYpz7DdHrx73FPMjmpub0draitTUVKSnp0uEpL6+Hrt378bRo0clH5WFQ1gcoaCgABdddBGWLFkCn8/Xb74ZmSMVm97eaE80ziu9+FOmTBnWfopEoj0/qaRro1Svs4YfEybGeacgY8EZVg3leLke9913HwyjLx+8v+fkXrIqGEMxHK3GlUY0WAVRf9HGkZzHz4NY4fyDDz5AYmIiVq9ejYceeggulytuYRT9TP3xoYHmTEddrBGxwch69q1kNf6ZVvHpp5+KE4jVrtva2qT4EBWZlJQUeDweJCcnixPRej4SExPh9XqRlpaG1NRUpKSkoLe3F/X19cIL9LMyykC0DRUk8h8qY9qrTgOI0Q8gaowSraMjIryuLsLG11tbW4VncAxNTU3C/yKRSEz+qYZND8dhYpomJk6ceNw6pKSkYMGCBdLGRacv8P56vAAk952KNWVKfn7+cUbq/v37EQwGce+998Jut2Pz5s249tprMX/+fNx88824//77RT63tbUN2MuaUZv+ellro59OHNYcoOyn4ssxMzLFtmB0hvfXy7qoqAhTp04dsJf1+PHjMW/evH57WXs8HmlNE6+XtVZY++tlfeGFFwrc08qjzj333BjjUzuKARzHK/rrZc3P6vMK9PWyjkQiuPTSSwfMRddRL2D4vaytf58oUa6NhILBIB555BEsWbIEkyZNEqOTcGrmPVPO0zClvsG5tUYCq6urUV9fj6SkJIwZM0Yg4Nphf+zYMezevRulpaXo7OxEWVkZOjo6hE9oHkSnDCG62ukfCoWEr9IZx1od2dnZ8Hq9MTn9Pp9PnKEAxNgkH6ajPxgMwu/3o6amRgIIAERPor7OyCqdfoZhyOeZvw/08WU6E60FSN955x04HA5cccUVMekU/C5/D3XfaP7G/62v9UfaMaS/z99sp0WijMzNzcWsWbOwdetWOfNEWHwZ6N/eQM3MzIRhRPum/eUvf0Frayvq6uqwceNGEe5OpxNTp05FcnIyvvrVr6KnpwfnnHMODh48iD/84Q9igBDuRq8LN5gVy873uHHJXG02m0SLKNj4ezDjbSAGPRTSyvdIo69ULHg9fQ3tsaMnn0UjGAm0QsiGQgUFBeJxtd47nlDR+UVsGQD0VQNmlFtDQSkoeU3mI02ePFkqE3N9PB4P9u/fL1E9LVDJQAi/o9cuGAzK/emwmDlzpkTOqTSz8EFTU5NEc2gcABAYLpVrVtolTMw0o0WBKioqxHDketMr6fV6MWbMGFx88cWSGzaQMLcaaWzy3tTUJAUZqqurRYnW1WmHQropu95PdDjocei1JpOlck8F6NChQzHFX0h+vx92ux0LFy7s1zHD+8ebk6EoPLwnjYr+jNAvI91888144YUX8PTTT6OhoQF/+9vfkJiYiFWrVmHx4sUSyfo8iGcUGH4+sKbB5pTQ856eHmzbtg0LFiyAx+OJ8fzTy5yTkyOGSDAYlAq9ej9zPUOhELxeL9xud1wlm3JAR0r5vLpgG9+n44zRAfJK8lKeK6IIKF8I6yMKQxfEIG/j9TgfWkaR73A8pJEYpzQ46QSLt66pqamSWkE5qOWIFUmgUxz0GUpNTZXiMUAUiTF37lxs2bIF99xzD371q19Jv9LVq1fjsccew5///GdUVVXhD3/4w4C9rCm7+utlbW2rMpJe1qwI3F8v64SEBFRUVMizap2j8LNe1oww99fLevTo0ZIH63DE72UdCoUG7GXdn+JtmtFe1hoJpB3IOrpDmddfL2vtMLbS2rVr4XA4MGbMmEH3Isc7mNM+njNwJLqQVf+x8nrqeMOlxx57DKFQSKJeNCDZfpD1JngvRqB5XzoMtOMrEAjA4/HA5/NJL3YtH8PhMHbv3i21UejIcTj6eujy3AJ9jn+v1yvwYeqUfr8/JiLJeRk1apRcm0gB5q3ymrpSMHmrYUTRH3Tm6IAEEQbp6emi4+hACAsdMXrPOdLnCehLx9DILfKdw4cPS1R2JCl0Vqf3SOQd5S7HFU8OM5ChjVY6jVgVvL29XZz2n5csP1H6tzdQebgvu+wyuFwu/PKXv8QHH3yAl156STzpLDwDRBnNQw89hLlz5yIQCGDLli14+umn0d3dLXkiXV1daGlpkQIG9KJorzY3AQ+ljgxxs9Lg0x4oTWQEIzUo+fy833A9hNpwHixqoRUuVgqkoLfZoj3c8vLyBj2gnAc+N73IjFBYn0d/Xs9xOBxGVlZWjHFJRY/GJAUqhQ0ZJL3qzMth8RQgChmn19vj8cQYpoyGs7AMBQXHnZubi6lTp2LKlCmSV0EGwnHQkCVTpaeZHsTOzk6J3rDyXW9vL6qrq1FTUyOFATQz9Hg8yMjIwDnnnIMLL7wQc+fOHRTmofee3n8ZGRkwjGgVXhZeeP311yV6NNxIOQUlK1dqjzbHqBUPPu+oUaNioGWFhYU4evSoKG5aSPNMvv7666Ic671m9TwOZ/z6GhQQOooQTyGy/v1loHfffRf19fVIS0vDmjVrsGXLFmzYsAHt7e047bTTsGbNGvzoRz+S9h8jJX1ehzMHmnfG40N6Hdh3uLGxEaFQCD6fD8uXL5fXuU6hUAiBQAB+v19yMun8Yc46+0Lq+/J/RiA6OjqkGjsjtjo6SsOHUQgqkeQBPNu6Gi+jdISJUokif2BEjXKGPILKJQ3btrY2icKQl1BmkV8vXbp0wLUajHjWuKYalqnXZdKkSdi/f78gL9xu93GGkI5ImKYpiqpGU3i9XoHTAn2teP7617/i/vvvR3d3N1566SX89re/xS233IKWlhbU1NTgvPPOk7QMKp/XXHMNDh8+HJO/TmcA14NFrCinmUerIeDMF+bnmS/IfdLU1CQKIvcUoz7p6ekiNw8ePIiysjJBipC/kS+xl/XevXuxa9cuiYox+jl69GhRzNva2rBr1y6UlJTI/uG6c7+89dZbABC3l/VA6x8Oh3HyySfLuDQv1tFUrVzbbNFe1tQJaNCTrPc2DAMrV65EOBxGcXFxXKVaG4RDIWsEK97veM+vjRteh+PRfF/n04+kl/Wjjz6Kb33rW1L8iEg8OqHT09Ol6m1ycnJMP1SuAfUIRi3ZX5kILz5PR0cHdu7cKdB4RhNtNhtyc3Nht9sFpUWHcVJSkqQPsVAjc0jJ4+jwSk5ORm5uLtxutxTqAaLnprGxEQkJCRLEIA8kjJyys6WlBQ0NDQKV59g5Bp1C19raiqamphjnnMfjEZllmn01CHR7OubSMn1Pp+jdc8896O7ulk4UvL82BgciHfXk/YdLhDDra1rJ4XBIXr0eJ+dh3rx5kmZhRTt8kfTFj+ALpmeeeQbjxo2D2+3G4sWLUVxcjPvuu09w8ceOHYNpRnNMDh8+jLVr16K2thYvvvgiNm3ahKKiIjzzzDO44oorJKlbNwMnTJMRO30QtHFHjw+LKVAh50bX8F1tVA2HrN6a/iJFQyGtkA1VmczMzITNZpPDHggEUFlZCQBoaGjApZdeGndcerzaQNDCh1FPLSisz6nzT1paWsQbTeZK5VBD2LShRiaXlJSE9PR0gYlkZWVJgZ3HH38clZWVUvBCj8Hr9UqxKO0Z5v5KSkpCSUkJ9u/fj/3798dUr2UxFj47vexaEfZ4PBIxJUyvvr4e7e3tonhz3SjEJk+ejIsuugjnnnuu5FMNRFwXDf0BELMmNOwpdAipbmpqiptrbCWNLHA4HJg/fz4AiPKovY26xDzH0dzcLBGwnJwcFBQUoKSkRASc9vaSOjs7sWHDBthsNlx11VUxe1Dfk/8PRvoZqSDwOjw31uvEixRZaaSOqBOlWbNmITc3FxkZGXC5XLjoooswa9Ys4VOvvvoqcnJycM899+DJJ58UBAD5V7w1j6cAasNoJALSGk3T17Xb7WhsbJTzQicPUQzsPUqlxOFwSHum9PR0JCcnIyUlRdIv6FzUffk4BhpPNASZBwlElX0aA1TEGY3VDsyOjg6pWNvV1YVgMCj8k/lSvb29EmkIBoOora2VYko0nILBIKqrqwUKStnDCKved4ZhoKamRv7OzMyMyXsczhnQig7nlN+dMWNG3OuddNJJ2Lp1q8DzvV5vjPzj+eFrhmEgNTUVwWBQ9gwj3zoqvWHDBvzoRz9CKBTC3Xffjf/8z/+E3W7Ho48+irvuugsJCQl48803cdlllw3Yy5oyqL9e1jRO6QSN18uaPGigXtYul6vfXtZMmRmolzWdg/31sj548CDq6+v77WVdXl4Ou91+Qr2sdd4r543nzuqIN4yR9bIOBAJwOBxf+l7WvCbHNlL+duutt2Lu3Lm49tprcezYsZiofUlJiTilmTevdR7DMCTQkpGRAQDCb4C+Amv19fXYvn07ysvLJVeaXSf0c3Pv8Do5OTnSMzccDqOhoQGtra0xjgabzSaFwhISEtDR0SEGKnkzn4c52EB0PyYnJwviMSEhQTolkJ+xRd7o0aNj+kU3NjaitbVVnHqMXk+cOBEFnyHw2DKMc0WnHhA1AAnv1Y5wrv3hw4cxefJkCSjwOwPtOa2namTVcPcEn2ew/c29OXXqVJFP1lSNBQsWYPPmzcJrKc++SBp0NgzDGGsYxgbDMPYZhrHXMIwbP3s9wzCMdYZhHP7sd/pnrxuGYfzMMIwjhmHsMgxj7j/7IU6E2J+Mxsevf/1rpKWlISUlBd3d3QgEAujp6YHX60VWVhaKiorw+uuv4/7778fatWsRDodxxRVXwO/3o7KyUhpNa2gXPU/hcBj5+flYuHAhTj31VIn+kfThIDPTDM362aEalzqyoCNQIyXrAR0q5eXlwTTNmMpzr732mjB1Qm6tUap4SqemuXOjW0y3IqHQIFEQaiFsGAbGjRsXU6yByivHxMPMNQSisKrRo0fjhhtuwIEDB/Dmm2/i5ZdfBgD8+Mc/RnNzM4DjjTYKDwpgDYkJBoMoLy9HIBCQyAeZISEqhmHIM7ISL4sTsCone+i1traK8CITY+5GUVERlixZgiVLlmD69OmDeu2sCiL/jscU+RrzUBlNYnRhzJgxMXOjSb/G65umKS1yKHS08sCx8TucX5vNhrFjx6KxsRHl5eUxSg7Pg9UbGgqF8MYbbwDAcQKJ9xoK6TXWBml/Rpr+0ffRRqyGLn0Rns1t27ahsrIS9fX1CAaDWLx4sayLw+HARx99hK1bt6K8vBzPP/88vve97+HRRx/F0qVLZcw6BUB79TXP0+kOw1UqtSOBc8g90dLSgqqqqpjiKw6HQyqdpqSkiLHN/d3b24u2tjY0NTWJUcnqqyySpJU7jZYBILLA6XTi6NGjwkvoXKIzjMZqZ2entL7id5kTxdeZSxYMBqU9DJVMtjbRSJCOjg5UVVVJ0R5dyKerq0tglHyNBjXP1dlnnx2DVLDu1YFIG6XW826asfmo+vWFCxfi448/BtBXCMqah6r3hmlGW6dYnZC854MPPoiUlBT8+te/xvTp02Gz2fDUU09JVd9Vq1bhwgsvhM0WbbcyUC9r7gvmldrtsb2saYhyr3ONdS9rGqVWxZSOSLvdLiiseL2s8/PzMW7cuEF7WU+cOLHfXtba2Gc+r+5lzcI6jHSNpJe1LmKlEURWXkfZzPnQvazpuLFG9zj2np4e/Nd//deXvpc1nTTWaw2Xtm/fDpstWlisqKgoxsF09OjRmFoN7e3tgiyjkyAtLU10D51eYJomdu/ejd27d6O6uhodHR2S9sLP60ginSj8m+iyjo4ONDU1xdTnoLGTlZUl8GM6kP1+vxRlMgxD+BhTK6jvUPbrs0I0lsfjQVpamjjYu7q6UFVVJXxPoytyc3NRWFiI8ePHx1QB7u3tFTgw0wt0bqu1ZoDeK2vWrEEkEsG5554rnxlq0MZ6noYi87R9MNzvJiQkYMyYMWKcah4ERB2EH330UUx1/i+ShgI07gVwi2ma2w3D8AD4h2EY6wCsAPCeaZoPG4axEsBKALcDuBBA8Wc/pwB4+rPfX0rq6OjA4cOHMX78eFx11VVwOByoq6uTDdvd3Y2MjAzs3bsX06dPR1paGl5++WV85zvfgdfrxbvvvovzzjsPo0ePxubNmzFx4kSMHj06pogCAClyQ6HpdDoxadIkRCIRTJs2Dfv370d1dbXkLWrFeyhMUROZoVUQDEex1R4k4tJHSlQq+GyEhiQlJYmHTEcTSYRUDOa11aXvtSdTwyd4HXqWe3p6pLesFngaVqfbwzAvzuFwYOnSpUhMTMRZZ52FF198ES6XSwpkPfvss5ILEYlEK9oxcq7vr+eDSozuK0cIsGEYki/FMXKedN4sq48CiHGOGIYhns7TTz9dYHPA4D0ktaAdyjqQsrOzpReq3W6H3+/Hp59+imnTpkmFQW1Mcqxc83jE0vKEQ1NYaaigNuZoGLDQGHNIOHfcbzpvuaenBx988AGWLl2Kq6++Gs8999xxe3IoxM/raIHVSLX+HW/e+cN9bI3m/yspJycHzc3NIpwPHDgglbBZsOvtt9/G9ddfj5deeglTpkzBjh078L3vfQ8LFy5EdXU1nnzySVE0rM/dnwHfH2nDX8+Fni+bzSbF6hobGzF27FhBF/Aa5HPa0OTeIQyT42W01LonyGsJudeGAgCB0NIRyvtqBA2AmFQOHW0kf+NcA7E1Bxht1XNot9ulNQxlCZVFQoIZYens7JSzog1K7WjVNNTzYHV89aZpAAAgAElEQVQSWs92QkICMjIyBPHAuYhEIliwYAH+8Y9/CDIlJSVFnpGf0TIuEom21iHKQitvd999N1asWIGPP/4Yhw8fRnt7OxYtWoStW7eivb0d5513Ht555x2Ypolt27bh4MGDkpNl7WXNe5KHG0ZsL2sWomEUiDxA97JmH1MasRqNwv1DpTleL2tCxk3T7LeXtdvtFiNbO4NINKRdLlfcXtYsTOP3+0fcy9owDJx++unYunVrv72stSzi/XUva847ZSPHqdedMPUT6WU9XIe7dW/HI61/WHW4kfLvN954A2vXroXL5cINN9wQc97r6upQV1cX40QjZF9HoG02m0RSu7u7sXPnTjidTnR0dAi/IC/jZ/QeYkXetrY2ZGRkoLGxEUC0LQz5KeUu28qwcJvL5YLf70cgEJDrMbUhMTFRgjacL+pIXq9XugKkpaXB7/cjNTUVfr9fnikQCEgbQe2gYvE0BgQMI1qFt6mpSfapNY80MTFRIPo0+unU1PKZOsT27dtxyimn4P333x/WHhrKa1bqz+E91Pv6fD7ZJ3r/8PkmTJiA3bt3C8rli6RBT4lpmjWmaW7/7O8AgP0A8gAsB/D8Zx97HsCln/29HMALZpS2APAahpH7uY/8cyKv14vf//73WL58OXp7e5GTk4MVK1ago6NDco0SEhLw9ttv45e//CXefvttbN++HYZhYPbs2QiFQnjrrbfQ29uL66+/HhUVFfjHP/4RA2dhAYD29naBPGjFOhwOY/LkyTj77LMxa9YsacytPYZDUQq0EaqjnMDwoIFa8efGPVHSxoFpmqioqIDT6RTFCIjCfE877bSY7w1F0JimKVBqGj86WqOvw3mhAgb0JehzjKyKp9s78BoXXXQR2tvbEYlEsH37dlx55ZU4dOiQ9KhjfgjnjAyYSgcFtS68RMbA4iGsdMdqnhQcNptNIj1UgqhgAojJwU1MTERSUhKmTp2KCy+8EIsWLZJIxGD5pRyXzvkcTtJ8dna2FG/is61bt07gQ2eddZbcg/M6WFTmpJNOEk+tNSqp4VLMtzp48CAcDgfy8vIkr8saCedvbQz09vbinXfeEQfVcMiq5FjPYDxD1Ura0WL16Fuv96+ko0ePoqGhAX6/H1deeWVMVe6amhrceOON6O7uxs9//nN8+9vfxssvv4wNGzZg06ZNeOONN5CZmYk1a9bg/vvvj8kXs/KowZwm+rNArKKno7QtLS0CfU1NTUVhYSGSkpJQX18v301MTBTlg5A4XTWdUU3moOocULYSYfsmv9+Pqqoq4Rc8Z2z7EQgEsGfPHlHSuOf13o8X/bF+TnvO+ay6oB4NazqU9Jzpgmo0omkw04Gq89pOPfXUYe0RfS897oHWlIaPdsjyGadOnSp8FehrycbrW69rGNH8Yn3GgWif7BdeeAGffPIJvv/97yMlJQVr166F0+mEy+XCe++9F2NMDNTL2pr3bu1lfeqpp0rfUCJYentje1kDGLSXNaHC8XpZHzt2DG1tbQP2smaBI41SsNv7elkvXLhwwF7WAITXDtTLWvMoq0McgEC0SXr/8ocyDzi+lzWN1P56WZOnn2gva72HrDzb6izsj09pJM8/C+myatUqXHvttWhqasIdd9yB6667ThwDGRkZ+Pjjj2GaJtavXy/9dHW0nDwtEAhg27Zt2LNnDzo6OiQtjXyZ0W8NW2VOp+7/DEBg2V1dXZJH7fP5MG7cOKSlpYmu4/f70dDQIPuK+z0nJwfZ2dnweDyw2WyiHzF9ga1i2tvbRQcCIPoA+bCG4APA6NGjUVxcjNGjR0v6RkVFBQ4fPoympiYpGkY4Pc8ci0+xABnPsJbdmg8BwFNPPYVwOIzFixcPuoYnIsc/L318+vTp8hw6LxiAtLkqLy8/4fucKA2rVJNhGAUA5gDYCsBnmmbNZ2/VAvB99ncegEr1tarPXqtRr8EwjO8D+P6wR/w506RJk7Bnzx7J+7z55pvx5JNPClPv7u7Ghx9+iGnTpsHv92PTpk0Ct3E4HLjggguwfv16PPvss3jqqadw7bXX4v333xcDh/AUANi3bx9ycnIwatQo1NTUIBKJlu+ngUbmkJWVJaXuu7q6UFtb26+3xXpgToS0cNGM+kQYrVVopaWlSfNmer127tyJSZMmwe/348wzz8TWrVuHZERpmj17NjZv3ozExMS4ESkgfm+ynp4eeDwe1NREt6eGs1Bw09BasGABqqqqxPjbu3cvCgoK4PP5JJeW7QrYsDopKUmgYToyTi8m+5fSg6eT/bXSyd6ALNHOz+roMYWGz+fD9OnTxWOtPYPAwOupP2eNcA1GWlmh8V9WVoa8vDwRdMFgEGeffTa2bNkS893BImg09JkPRwcDFfVQKISUlBSB9TD3NSMjIyYCRfgjFUFWOaVjo6enBxs2bMDSpUvx9a9/Hc8999yQKw9ryIw2DvQZ7c9pRKXPKvi+KIPUSqmpqaJILly4UPIu7XY7XnrpJbS3t+Occ86By+XCO++8g8zMTIwbNw6vvfYaLrnkEtx33324+uqrkZSUhBtvvBHhcBhvvPEG1q9fH4MM6I+sEDnrXFJZYpsBts5gNVEWtcnOzhZEBHORaJwBELgr14Fnj5VvdYSBihLbPgFRY4hVq8eNGwfTNCU3zOv1oq6uDg5HX+E7Iin4jEB8pxw/y7miY5P8VY+NvItnv7+5JV/Q1+MaG4aBvLy8YfFhvZ+Hg7iYPHkyysvL0d7eLtdhFDA1NVUiaN3d3VJ0xeq81egZj8cjVUaBqALt9/uRlpaGp556Cj09PaI4d3Z2Cpxtzpw5WLRokVS3vfrqqxEMBiXnuLGxUdaLhiuV6dNOOw0ZGRmor6/Hrl27JELZ0dEhe4dry+hTW1ubzBX5PB3Z2qgEonDxlpYWeS0tLQ1JSUlikB84cEBa1ZimKTmDzMc1TRNz587F6NGjZSzx1pZGiWEYKCsrw9ixY4/rZf3CCy/EGLVWR5qmSCQSg/7hazzP+uzritFjxoxBeXm5OHXZ5oPoJ42McjgcMb2sGUAYCg3mGI0n96w83BpMsBrp+j3+P1K+bpomfD4f7r33XiQlJeH5558X53dGRgY+/fRTgXHSUUV+EIlEUFVVhSNHjsAwDAQCAYRCIUFVMTqt+YXT6URqaqo4kfRc68gnET46WGAY0WJt5KfUCfj8ubm58h4dMUDUKcOIq2FE05ZSU1NRUVGB/Pz8mPl0u91SSMnpdMLtdiMnJ0f0tp6eHpSWlgKI7jumWUUiEeHfPHfk3bwWW+RQH7T2o7Y6XrZu3YrTTjvtuKJFet9wbkeiU3NthosOiEd2ux0nn3wytmzZEqMj85nGjx+PPXv2nPB9TpSGPEuGYbgB/AnAj03TbNPvmdGnG5aFZJrmr03TPMk0zZOG873Pmw4cOACgb/GLi4tRVVUV4ynTRiaFJgvg2O125OTkYNy4cdi7dy96e3uRm5sr3h1tHGRnZ6OjowObNm2S6I7dbkdlZWUMFpwGjGFEm2Iz3ySegWU1KkdCVI4/T8NUX1uTz+eTfAQgyqw++eST4/D0Q3kWPV5GDrTQ1ZEJ/q29nKZpSn6PLmhEZq3hR1OnThUGRS9WMBjErl27cOGFF2Ls2LEAgP/+7/+O8eATNmeFSTNySobDfDJ9Tx0piUQiaGpqQlVVlURhqUwmJCTA4/EgNTUVF1xwAWbPnj2kKnKfB6OLdy39nG1tbQLN0jl+8fJxBjOCNbwGiM1VNgxDKqX6fD5kZmaKR5UCS3sL43nDDaOvsAUhMNqDPBjpCNdwSCtbw3XM/Kto5cqVWLVqFX7yk5+grq4uJgr9la98BV1dXdiyZQsOHDiA9PR0NDc3SzuKv/3tb7jvvvtw9913w263449//COam5tx/vnn49577z0umhIvimH9WyMh2Hs3FArh8OHDKC4uFiWJnw2FQgIBpTEGQArR0PmRkZEBn8+HnJwcgeaz2i3zQAGIolxTUyOGIYuSOZ1OTJw4EYFAAGlpaSIDxo0bh5SUFLS1tYlRSyeZdqDoiL5+bq0Ikz/wGVmgJzk5WYxhVudOT09HVlYWsrOzkZOTIw7QUaNGIS0tDV6vF6mpqQKjo9E+Ekg5+dVwZJFpmigsLIw5h+TPY8aMieHbel50FJVnm3NCwxaIRgJpXHLddEXSa665BjabDR999BEMIxpNJ+pJR0zJr+kMYXEr8uF169Zh9+7dAnOkoQhAHKfhcBidnZ0CLwyHw3C73fIsumotZRKRV1SaWZzPMAwcPXoUBw4ckHxUm80Gl8uFbdu2we12w+12S9uwwXpZc045h4R20sg0TRNFRUVDXlfSpEmTAPQp9fH4o4b8Ev6po5bk23RW6PUjD3/11Vcl2t0fWZ1Aep8OhfdSBvGH14lneMQzWq33HCnR+Fq2bJlEmBnooL4BQHLLA4EAtm7dioqKCoTD0R7uzD3WudV0bJmmKXuNvCcQCKC2thamaYquQkcIe4yapik1NMrKyqT6OHUs6svZ2dmyF3QPaNaEoYMrKytLkB+ZmZlISkqS+hqmaSInJ0euk5+fD4/HI/mvLS0tOHLkiDiyqbelpqYiMzNTnEXBYBClpaUoLS2NyZM2TVNqe1Bm9Lempmni2WefRSQSwfTp0+Ou2ecRRPo8KRKJSFtA6540jGjf+i+ahhRBNQzDiahx+r+mab762ct1hmHkmqZZY0QhvPWfvV4NYKz6+pjPXvtS0muvvRZj6Nxxxx3H5fvx0ALRhbvuuuvw17/+VQ7c3LlzsWjRIvj9fsHlAxDFh15Wm82G5uZmSeouKSlBc3MzkpKSkJubi0AggJaWFowePRpAXzifY2tqagIQZU68B8elxzhUohCI5+EbLlm9qP2NJy0tDXV1ddJk3eFwoLS0VIS6bkg/0Li1QU+DctasWdixY0dM/pcVEkElhr8Jwx4zZgyOHj0q893V1SURSUYT6urq4PF40N7eLh61w4cPo6SkRPD6l156KR5//HEZU1JSEjo6OuT+LpdLCpawR157e7soBoR3sQKoYUQLvJDIPMlcbDYbpkyZgsLCQvE4DpWsMO7hGlZWL7gmFhljHmprayv27t2L8ePHxxjiQyVGOHbs2CGRZ52/xQJQQF+vXc4pIZUJCQkIBoOi5Oh8RO45GgyPPPII7rnnHixfvhyvv/56XAXden608WxdBx1F1c9knU89ji8TPfHEE3A6nZg5cyZOPfVU4T82mw3PPvsszjnnHHz44YcCc128eDHWrVuHzMxMeDwe3H///bj99tvxzDPPCFxrwoQJSElJwUMPPYSEhAQ88MADaGlpEWWJRoHVU82oBRVV5vMBwJw5c46LtAKIibLzXAGQFA4AUjiIxLUgXyLsTEccw+GwQCpdLhfGjx+PQCCAQCCAxMREtLa2wuVyIS0t7biepLoPICGebIliGNFCaPT4d3R0SIXMYDAo/SXJNyORiDjQqKDzNd2z026PFhWprq6WKCKdMozkRSIRnH322aKYDhVSdiLOzXA4jKlTp2LPnj0y7zTamEPOFlGMiOr9oI0FjjktLU3GNXbsWPj9fjHyqAynpKTgxRdfxG233Ybnn38eO3fuRG9vryA8NPqCij8joqwNYJqm9EnlGJxOJ6ZNm4bDhw+Ls1rvMcOIVkgmH+Ne5f4AEANp1IZdZ2cn6urqxDjgevN7zFs9/fTT5b2BUCBWnuXz+VBbWyvGAR2jPp8vxsE3VBo1ahT27t0rETars1g7u3TxQEbH2O7PZrNhwoQJOHToEMaNGxdzD5vNhg8//BCXXXYZrr32Wvzyl7+McT6SdFqHdY8OZc/q+g4a1fF5OnsHoj/96U/42te+Jo42yriSkhK4XC60tLRIezvDMFBbW4u9e/dKRVsWv9K8VRezMk1TqkJTHrH2ABFQdrtdUCXZ2dkIh8OCGmHhI8pRFhxyu93IzMyUwm+E7RqGIX3guT/cbrcUnKP+zPnWtQGam5vl7KSlpckeaWhokC4NREewuwF5HABUVlaiqalJzhcjqgBELyMPIWqCPF87r7QjJRgMYuHChThw4MBxqB/SSAM/J7rXtH7PsRUVFaGxsRGlpaXH6TL/qj09EA2liq8B4H8A7DdN86fqrTcAfPuzv78N4M/q9W8ZUToVgN/sgwJ/6eiZZ56JMVa0os6/c3Jy5PM2mw2zZ8+OKcHM3NXc3NyYHFMyACrr3d3dUkyHniZGUBsaGrB9+3Y5BKwKrA09HgzTNFFfX4+amhpUV1ejs7MzZjPRgGEPv3jRKu35I8Vj6IORNdeC1+hPgJlmH4SIBSUcDodADRobG3Haaaf1+30eIm0UcQwsBkSlgsolx6MjNGTIdrtdFAHOCyEz2qNIo5ZQOjJZNnX/5JNPAACHDh2CYRgCBaFRtH37dmzbti2mKBTnwzAMYfbM36ytrUVVVZU0x2Zkgq0KfD4fFi9ejAsuuAD5+flDVhr4LFYlYTBmZFVi+jPESKzWywb2TqdTeuo1NDRg0aJFw2aAhKpR6YpnEPK5uAcYOSFElREUnVOiBQ8Aab9BY2Hq1KkxTiyuRVFRkRhSjGgMtAbxImLW+f2y0o033ohly5Zh+fLlKCgokP370UcfIT09HZs3b4bX68Wdd96JUCiEtWvXYuXKlVKl8Stf+QrefPNN5OXl4ZZbbsHOnTuxfft2PP3006itrcXrr7+Ou+66C0888QS++c1vyhnX88p1dblcYpzu2rVLUApa6bLyIcOILfxBHql5MxUmOh75fUYndYS7paUFFRUVaGhoQFpaGgoLC5GWloaOjg65NiMDuvKuYRjIycmJgZUxAsLiRXSwcA/rKpIsQKL3Gj/PuSLfI6/iXNJYZUsFVgdPTEwUuB4V8NTUVInwDVWh0pGu4TigtHOThbd4PRqoVJ657sxX4/e0oapTMwBgxowZsNlsUp2f0SOv14sVK1agt7cXjz/+OM4//3wUFxcP2Ms6KytLHJjxelm73W5MmTJlwF7WlH+ce2sva+6b/npZt7W1HWe0WntZn3vuuQNGEoGBe1lrGK7D8c/pZa0dqhpFwedlL2vOVWGcXtakf3Yva/7o8fBa+tr9/f150d/+9jcsX74c3/nOd9DS0iJovpqaGjnbdPq/99572LdvHxoaGtDQ0CB8hg5iva+TkpLEiGRApb6+Xgw97kmmN/l8PklzIzqFxbwYbQ2Ho73mCwoKkJWVBdM0pV0SHfzM7bfb7Rg9ejR8Pp/s+XA4jMbGRmk/Q75GaDt5dFJSEpqamlBXV4eGhgZxyBGpl5OTg/T0dCQmJqKyshKffvop9u/fj2AwKPBl6kRcT+4Rp9Mp0VbOA9BXoZzyhjrjzTffjHA4jFNPPfVzXX9rTZWhkt6fWr/XjqLs7GxpfaX10s8DQXmiNJQRnA7gmwAWGYax47OfiwA8DOB8wzAOAzjvs/8B4K8AjgI4AuAZAP/f5z/sz4+oQLN4RSAQANC3sAUFBbj88stjvvONb3wjRtlkpdYpU6YgNzc3Bj7GKCErZtGTarNF+0FNmzYNF1xwAUzTlOhca2srPvzww5heTpWVlbLBrHBg5l1VVVVJBODQoUPST3Pjxo3YvHmzRA5OZONpQTLc68Rj6oQwrVu3TqIGbHpsvZdVGFjvT6OzP+FBoqDh+zqxn4yJiiEZ89q1a0WhZJEimy3amohREiDarysSiSA9PV2UxEgkgvz8fGRlZcXk/2ijMjk5GfX19aiqqkJDQ4PAZzgP9IovWbJEilv093wDzb821oeqRFojpPq+/d0/JSVFrl9bW4uEhARxpHR3d+OMM84Y9Bo6IqKjA1SsyLB5H2v5dQ21y87OFoFD4UhFkdfgulNYrV69Gna7XYQNz9vMmTMxbdo0zJ49G/Pnz0dOTg7mzZuH1NTUmBxoq8NHw+ese1cbHF9GY/XJJ59EeXk5/H6/RDgikQheffVVcTi0tLRg9erVWLVqFUKhENasWYNVq1ahp6cH//d//4d7770XtbW1eOqpp3DfffehtLQUaWlp2LNnD37xi19g586d+NWvfoWZM2di9erVuP/++6V4WFZWFoqLixEIBNDZ2YnNmzfD4XBIoTruURoyVoFOHq/TAPi55ORk8fTT2NNeZrZtMs1ob+HS0lK0t7cjLy8Pubm5ApOjccnoXHl5OSoqKmCz2VBWVoZQKIT09HSBXjKvnIa1LgLDKJuGkdLY5dg1L4nXNgKIRQbQYPH7/cjLyxMIKQCpck2FxeqoHMx5qVM0rOkag5HmQ1QkrUSDg8iWcDgcw2OshoGOAFx11VW477778M1vfhMdHR3IyspCSUkJuru78b//+7/SeuV3v/sdDhw4MGAvaw3xJLqK8+1yuZCeno7a2toBe1lzHfrrZU2oNvebtZc1n+9EelnH4zUa9ko+O1Av64HWmHNGWRuvlzWjxQP1sh49ejSSk5NRUBC/l7Xe6/+sXtbcSzyD2hkS75z0d/2B3hsqkQ+OGjUKP/rRj3DxxRdLIaG0tDTYbDY899xz2LlzJyorK1FaWiq51nSE0NgjdDs7O1v2HHuPshCkzrVOT09HTk6OIJU6OjpQW1uLtrY2KdRIxxENOaLEysrK5EyRh3d3dyMtLU3SKRiAYUGx1tZW4eHV1VEQZklJieSjk6/l5uZKzqrdbofX60VOTg5ycnKQnJwMv9+Pv//975KCRxmtdUqtN/C3YRgxziBGUa1FTskD+L329nbMmDFDUkT4uZHozNQ5uF7DdaxQJmgdOt41Zs6cKd/hvH4Z0o2GUsX3Q9M0DdM0Z5qmOfuzn7+aptlkmua5pmkWm6Z5nmmazZ993jRN80emaU4wTXOGaZrb/vmPcWJExcDn8yElJQX19fVySE4++WRs3bpVPksvkqbW1lYpj+1yueRQU1iwWqRWTul1IvX09EheSmJiIsaNG4euri7pjUklrL29XQ66ZpL0eLa0tKC6uhput1vgFsT5b9y4ERs3bsSOHTtG5I0ZKmmhpw1BPjvzD+id1Z56Mv+hFqax0ty5c2Gz2SRSovO0rGPTXqTe3t6YdjC8hs4P095yXcyoqalJIqksKGEtfGWzRYtmpKWliXJBxaW3txetra3CjGj80ls4Y8YMXHzxxVi0aFFMwY3ByMqINLOL935/xM8PxyAOh8Pwer1SDZeGKe9L2GS8MXCu4z3nrFmzAED6mxFyZzX+tJCpr49mH1ARcjqdEgXXBWW0wkHFsrOzE6FQCHPmzEF+fj52796NV155Bb/97W/xyiuvIDs7WwqaJSUl4ZRTTjnOoOfcUQBblSad2/JlJa/Xi6997WuCNuB4L7nkEjgcDmzYsAErV65EZ2cn7rrrLnzta19DV1cX7rzzTnz3u99FZ2cnVq1ahZUrVyIxMRH33nsvVqxYga6uLqxfvx7PP/88XnnlFSxfvjwmennDDTfgmmuuEbisw+HAjh07ROHlWQL6coh1hAbog/dy/a1Kak9Pj/QaZX/R+vp6tLa2SiGbiooK4bvFxcXIyckRvqUj8DRgGxoaEAqFJF+KijorhkciEamoqiOihNExmsYoHAt5aTgv76kVH8qeSKSvfYyOwnZ2dqKyslIcYZQrvL/T6cSCBQti1p7nY6Czr/n7SPayVtyKioqOMwJ4VhyOaP9RViTXvaWthhf5x+rVq/H4449jxowZaG1tRVNTE9LT03HttdciPz8fubm5KCsrw6233oolS5YM2Mtao5g0HJXrVlVVhaampgF7WfOnv17WbCPTXy9rt9uN9PT0E+plTVnWnyGVmxttvDBQL+t4+0Hzcy3P4/Wy1ulTGsnDueY9OY7+ellr+mf0stZOif4c3vpH30fLFO04H2mQgNd1OBwYP348ZsyYAZfLJR0nONcsOsTq04SWUzYmJSXB6/WKscdzwTkmH0lISEBmZqYUDgMgLVrIM6nbsKd8dna2nM+SkhKUl5eLM4FOn8LCQhQWFgqyicZuY2Oj8Djqh3l5eeKw4TMxqMD7ER3hdrsFlbBv3z7s2LED9fX1EojSTiYd/bSuoXaA22zRvG7qa9o4thYcC4VCuOWWWxAOh3HllVfK94frdNZ7RTsbh0vx9pn1zPO6p59+er+f+aLoi4/hfsFEWERiYiKeeOIJVFRUyME1DAMff/xxjLeQfVGBvoXt6OgQpsum6zRQAcQoMUBfcQWbzSZ5pVTkbTabVA6cP39+DJZ+7NixUnUwEomgubkZZWVlx0VseJB9Ph+mTJmCCRMmICsrCwcOHMDu3bvx/vvv47HHHsPatWuHNEda6R8KxfucPigskEQG4HQ6hYEBUS+r1+sd0r2sRIVMR0S0Z8z6PGRa7CuXn58fA+fQ/e8IhWNeBJXG5ORkMVDJsAGI8sK9pMfCwi4cB+dCV8srLCzExRdfjAkTJgzJYNf7VEcfh7N21gi1VjyHSvzsmDFjZD/SG79582YA0eJJPp8vZtxawGvlThNhjzqyoX+0MNDtKwwjmofG12gg00Clk4oOCCC6lg8//LDAgt98803s3bsXO3bswL59+/Diiy/iyiuvxEMPPYR169bhyJEjCAQCcZVRCjSrksPnHM66fBG0a9cu1NfXY/z48TKegwcP4s9//jNuv/12mKaJRx99FEuWLEEkEsEf//hHUZ5+97vfYerUqXA4HFi9ejUikQhOOukkPP/885g3bx5M08RPf/pT3HTTTaitrRWjKyEhAUeOHMHWrVsFfmqz2TBx4kSZDzqSeH6YQ6n3D50NrKpOBxj3Jc8yES/MNezt7UV5eTkqKytFAUtPTxeUinb0HDt2DBUVFVJHgAqcy+VCb28v0tPTJXpvGAY8Ho84m1JTUyWKQRlCQ1nnxGrYJZE4zEmlIq/TS3SxJVZO59xo+CllEKPLOjowkDLNNdBOr+GSld/w94wZM2QNNe+iUbh9+3aRg1Re+/P4d3Z2ora2FldddRVqampQX18v5/473/kOVqxYgQkTJstWSPEAACAASURBVOCNN97AqlWr4vay5l7hfJKP6F7WdChw/yQlJUn0hfmqSUlJMZFzFmzq6emRNk66vzWfPzU1FWlpabjggguwdOlSnHfeecILhzLH8Yy5gfgOdRfWEGhvb8enn34KwzCkl7WVJ2nZE49fsao9eaDuF8txkih/bbYoNFsbKNzD1n1uGH29rE3TxNVXX93vWAYjzqt2YPL1gQxRa5R0INk5EiOV9RRIXB+mDeicTe2Q1elJbrdb9lwgEBDnldZRU1JSJLIJRHVcv9+P1tZWcdoz9SE/Px8TJkxAcnIyJk+eDKCvKBhb5DmdTvh8PhQWFmL8+PESpKmoqEBtbW1MpwPCuwsKCpCTkyO6g8PhQFZWlsB4U1NTUVRUhFmzZsHtdqOtrQ2lpaXYv38/jh49KrxdO9+8Xq+0l6EOwbOho4xavrS3t4sTmnOpo6hahvN7fr8foVBIUgKGSzTOR7pPOJaB+IPWTXhPBngG++6/iv7tDVRtKPz9738XY8Fms+HrX/96TCELRii9Xq8sIg1KbkLtKWJeKxDdDFpB0geOzIOCizkWGzduFEXN4/Hggw8+QHJyMqZMmYKDBw9KZT+Hw4G2tjaBl9I7xEMXiUTb4ixfvhyXXnopzjnnHDgcDhw8eBBHjx49zsNnJTLeoUTutFGoIyHWz1AAs2BSV1eXwHOamppw4YUXDssg5g8jEwBkXTh+Gh5aeOjIGw1D/qYypHvh6bngmnZ0dAgETef86ggOId6RSCQm78H8DD5DzyCVGmtp/qGSVXAOFvmwfpdzMlyPM4n7TV+Hhp/T6cTGjRsRiUQEKqafL54n2kqm2de0W58tzpX2kmoFm0gC7UFlVVarg4d/Mwr/zDPP4Pe//z32798vZ4pOplAohObmZhw8eBCzZ8/GhAkTsHTp0uOMbh2FH87zknSBhC/CWL3iiivQ0tISUyTEbo8Wy3j44YexZs0a9Pb2YsOGDcjOzsaECRNQUlKC3t5ezJ07F2VlZejt7cX8+fPR29uLTz75BDfeeCP27t2LxMRETJs2DU888YRE16k8pKSkCHyMe4gVK7mvaFhx72kBaxjRnFIqSTTkOIc0bhnNMk0Tzc3NKC8vR1VVFTIzMzFmzBh4vd6YCqvkASUlJQIXbWxsRFNTE9xud0ylV1Y9ZgSTBb3IE+j48ng8ku/E79NhQgPIWg2cyhJ5izViw7NIJ0t7e3tM+ymroX7SSSfJ/GjlOx59HgpMfw4a0zQxc+bM45QoIAoj9Hq9qK6uFoWRCrGeA8qPn/zkJ3A6nXj44Ycxf/58TJ48GS0tLVi5ciU2bdqEKVOm4KabbsKKFStw2223DdjLmnUp+utlzSrIA/WyZgSrv17WOnoYr5f1UBE0HDvndzioKe5XPrPD8f+T9+XhUZZX+/csWSbJZJJMJjsJBAgQSBQwoiJWEasgWvVTtC2t0lq1tWgv1Gqr1u1TsZa6VrTqV20/a0s/tQqouOCKyCI7YctG9mW2TCbJTCYz7++P+d0nz7wkIQE/9bs815Ur28z7PvO8z3Oec+5zn3OO7GU9WORzON2k72XNuVOdcSC2l3UgEJBeuYP1suZ7VDCmv//L7WWt19nDRVM5BpUOzOvw9+PR3/fccw8efvhhXHHFFTAYDJg4caLoONXm8/l8Mb1KGdFkpF9PT2feZnJysgBl1Glk8NEWTkhIkLSL9PR09PX1obm5GbW1tTh48GBMETcg+mxYEI8FlWpqatDU1BTjIKanp2Ps2LHSs9pkMqGxsRF1dXXo7e0Vfd/U1CR092uuuQYXX3wxUlNThZZOUcGd5ORknHTSSZg/fz4WLlyI2bNnyzOhs833qM+d86CCUtQzBKM4LhXUuO222xAfHy9R1JGICqxzHanfRyojsd+G+nt8fLzUyfkmyLfeQVVR8H/+859ihBBt0T9IGiAUOimqgcTQP79zE6hUQlLO1L9zPHwNE9fNZjNaW1vR19cHj8cDn88Hi8WC0tJSzJ49GwUFBaiqqpLGwl6vF/X19TGonrp5UlNTMX/+fJx//vnwer2SB6MqUDUipTrj6ufm99FG6dQoVUtLi7yXzks4HEZ+fj6AoZ1mjklFtfkcTzzxxJgDkK9XI6b6z6BpmvThczgccrDTMGTEgqCD6oAEg0GpKkmnRy3Cw/fQ0VUNaTrIVqtVcuFoSKs5YoOJGsHQ54Adi/F4NIN0OFFz/tSxcK7a2tqEKsbXsdjGaA/ssrIyMRrVCCkBCo6f+Ysmk0miEjk5OWL8cR2qkSWVzsjDqbq6Gq2trVLRsrOzE36/X2jeOTk5qKioQGpqKqqqqoQeyDlVxzhaUT+Lfi1/lXLBBRdI5JJz89RTT+Hee+9FJBLBbbfdhhtuuAElJSXweDyoqanB97//fUQiEWzevBm/+tWvYDAYsHnzZtx6663o7+/HY489hgceeADBYBA7duzAkiVLkJubKwYr58zlcolB0t/fD4fDIS0wOEfcq3oDBUBMOgVz24AB545Vspubm1FTU4NAIICCggLk5+dLviP3MxDVWbW1tTh8+LC0DOnu7hbgic4NHRdWwezv75eiG+o+oKNKIzsjI0PSDbh26EAHg0H09vbGVCLmZ6HhSZ1F45Q5rMBA9VLSNV0uV4wzT72rOoWjNXJGIqoDMJQYjcaYvocqA6awsBAejyeGvURgUg+oJiYm4q677kJ7ezuam5slBeY//uM/8OGHH+LGG2/Er371K6xbtw5AbC/rcDgc04qDQDJ1nAoyMAedUW2eGfHx8bDZbLDb7bDb7VId12w2yz14TrAIXkZGhpzRU6ZMidkPRxOVMaTq8pHodf15z/xBFqeig3PmmWfGpNGM5PrU2QRzVSDJbDZLXnFbWxuqq6thNBpRU1MjBcP4pfaypr1gMAy0CGMv67i4OPzwhz8cVS6duh7V+VbtMj14owe81XOYf9df+1hlz549+OCDD1BeXo7bbrsNEyZMgMViQXp6ugCo1DUej0fmjN0BIpGI2CsqaA5AaNhsTcO8TuoagyHagik5OVn6SXd0dKC1tRVer1f2YnJyMsxmMzIyMuRZd3R0oLa2VuqkEFiMj4/HxIkTMWHCBOnH6vV6UVNTg8OHDyM5ORlWq1Xy4o1GoxSGYgqU0WjEBx98gPT0dAH30tPTkZ6ejnHjxmHhwoVYuHAhCgsL5XwvKCiQ/UHbRW/3qulpkUgEubm5MelKjCSrqTuqfXfrrbfKWTKSfavuh2M531Wg/miif41qCxcWFkr0/euWb72DymRwIrFsf8INoEfgVMoVRV18/JkOi4rsAYihWKkIDl+jdwR5TavVinnz5iE5ORmVlZVCUXj11VdRXV2NjIwMlJeXo7m5GXFxccjIyJB8VB4keoRd06L5Cg0NDaisrER3d7eMaTg0VB2j+vrRCBUXlSZpsLwu0amhRI+8q/dXDbvBno1+/HxtV1cXDAaDOPp6dFZtfcG/E2RQm1Azj4MOk94R5kFNo5doOp0pOqZHo3CpRu6xiv5QHa2QcTAUqkxF53K5YDKZJNpDg3zixInHNGYAMeuDa5uRaVX00Re+nsaN+juvy+fJKHxSUhLi4uJQUVEBq9UqwJGmaaitrcX+/fvhdrvR1tYW4/Cq++1YRE9v/Lqkv79fGqLzOVutVixbtgw//vGPAQBPP/209D7VNA3r1q1Dbm4uTCYTnnzySZSXl8NsNmP58uV48MEHYTAYcMstt+D6669HfHw8GhoapBI39wIdS6L6ZKyohzmps/yuUlRpCBGASk9Ph6YNVNZlJJb98hwOh7TT0OvL/v5+HDx4UAqOuFwu9PT0ID09HZFIRAob8d7UIVxzvb29gujzNQBiAM7k5GRJE7Db7aIXidLTOFedbCC2D6PK3OHPqsOmfjaV5UGwdaTsieNxUPWRqcFE07SYVjH6HO7Jkydjx44domOGMuzC4TDa2trw5JNPyp6PRCL48Y9/jPvuuw8VFRVIS0vD+++/jyuvvPKovazZjkI1TplHR/qrOh7qaTJHeCYP1ct6xowZOPvss+WZDCdDGbLH8mz012KPxKF6WetrcaigxlDCOdSf13y/vpc1+2sO1csaONJJ5Pyzl3V5efmoUlyOBQBUbYXROMSjlQkTJqCwsFBshvz8fFlL+/fvP4Lxp1LgqZMYrVcLdrJ4kt/vjzlfqVNNJpPkobJqrtPpFPYeWV/9/f2YNm1aTBpGJBJBR0eH1PTgus/Ly8OMGTMARCnlXq8XDQ0N8Pl8EslNTk5Gdna25NVqWrSYKFtPNTc3C9PGaDRK32eLxYJ58+ahoqIiZq1QuL+5P1V7Ua+XGJEm84TzpddH6nM3GqNMQLPZjPnz549oH6sg+WhEBZW+DDEYDJg5c+aXdr3jkW+9g0rDy2QaqBxnNBoxbdo0uFwuyZNTX69fiOoBqaJnqqGrIix8fX19Pd5//32J3PF1vC4QG/aPRCIoLy9HRUUF+vr6sGvXLmRmZqKrqwsWiwVvv/02pk6dimnTpiE5ORmHDh0SDr/P50NdXV1MhEkVk8mEpqYm7Nu3DwcOHJBDabDXqUaYOtajibrgWYQhLy8PBsNAVV0eQqFQCBkZGXK4qxHLozlUdApJB6RiVpW0+hlUmi8VtErxYBEVth5QFRSjggQBWF2Qh7ma20EDke8ncscDkUgnkXQ6v+rcq1/AsdPsVKU2mmsMprSGez8jIJwn5jIBUcPn/PPPH5ExwDlSq7SyRQgNQfU6HKe6XplnzGvxkCOgoL5XXduMQLHYBPtpWiwWJCUl4dZbb8WKFSuwaNEiLFy4EHl5ebjssssQDAaxe/du1NbWoqmpSSJ1NKhoQKhzqK5x/bwej6N7vFJfXy9F3wg0FBYWIikpCX/5y1/w4IMPQtM0rF+/Hg6HAyUlJUKVveyyyxAOh/HFF1/g+uuvR39/P+666y7cfvvtAICnnnoK999/P0455RRBxIHoc0hPT5foE0EkFtZg8SBWGiXgwzVCMCQcDktPUVayZIRr27ZtOHz4MAoKCqQir2rs0rE9dOgQamtrhfbGCpSMggGxPRZtNhtsNpuATNznLOBB0IPPmXl2QLQgFdu8pKSkiGGozoumRamSPT098hlpUNJAD4ejvTqJ+AMD+Wg0PJl3m5mZiZNPPnnUIMixgCYqyHc00TQNZWVlMXqH749EIqioqMBnn30mz40OPfcQALz44os4fPgwlixZEvOMCAQuWrQI9913H/74xz/i5ptvxkUXXRQDIrJGAu+tAmxqL2u1fVVqairy8/NhtVphMER7WTudzhiALhKJSJ/gU045BQsXLsSZZ54pBbhGcq6q1H/1+0hFDzSrQoYL8xy7urqwd+9eWVcXX3zxqIBNgyHay5qt1/g3lYaempoKu90uDgcdILWXNYEa6m2ud/WMZy/rYDCI6dOnix2mF/2Zqjoc+rNOteFUoE6/No+VwTQSueOOO+BwOKTVksPhQEdHB/70pz/ho48+gs/nk2g/02D4WmCAVcQxsxe4mjJDh456grnDfX190jaso6NDGB10IsePH4+JEyfCaDSiqalJoqgEbwKBADIzM1FWVoYTTzwRWVlZ8Hq9qKqqwu7duyU/NiUlBTk5OSgoKIDD4UB8fDw8Hg/27dsn4yID8fXXX4emaVJAb/78+ViwYAHOOeecIR1OPtuZM2fGdEpQmQP8UtcFAHGOWXiP9jTXl7o+AOB//ud/ZN6HExVcHI09fayBBf041fXKtTFlypRjuvaXKd96B9VgMEh1Vb/fL0gpNz4L+qgLVl3I4XA4JueRzqfac4+LSI/s8/fW1lZZ6CqSoypvNeROx+b000/HaaedhvLycmiahuzsbDQ0NGD37t3QNA0dHR1YvXo1Vq9ejaqqKmRnZyMcDsPr9aKlpUUOeXWRckyNjY3Ys2cP9u7dK60S1KiA+rlGM9ecRxqZBoNBIg8+nw9btmwBADidTpx99tkyLv29jybMXyKljaJS9QaLXLM6bEFBgSiVSCSCvr4+cVD5XPg/FXFVoxfMLeVaUA823ptOF5WewWAQw0l1Xo+V0qmPGo8kcjGU6JX2SMZExyIzM1OQ97fffltQVRocQylm3k9Fy02mKI3bZDJJ5Uv9F9/LQ5ZMCKPRiKKiIvksKs2X/1eNZyLyLKSWlZWFcePGISkpCRMmTMDjjz+OoqIiacGSlpaG3t5e7Nu3DwBwyy23YMWKFRgzZozsu1WrVuH999/Hpk2b0NISbRGtRqE5D2o+NMfydcmECRMkV46F4K6++mrcf//9uOqqq/Cb3/wGd999NzIzM+FyuXDw4EEsWrQI8fHx+Oc//4m7774bZrMZTz31FC644AKEQiHcf//9+N73vodQKIS77roLKSkpguTTsGLkmvPBVgh0rkKhkBhPfPZqoRoA4sCyiE11dbX0Ap08eTLGjRsna5nRGaPRCKfTif3796O1tRXd3d1wuVwS5SSNk1FHABId5biYOhGJROB0OmUuWb2alVvVtglmc7RKLfUQje+MjAxpI0HDnHRfFlRSr08KLx0itb0E57OlpUX0TXd3tzhGIxHuyWMxxEfqfKkyefLkmHNU1Ztjx47Fvn375Bxmvh338aJFizBjxgyUlpbi5ptvhtvtRkpKChobG/HSSy/hlltuwU033YRXXnkFCQkJw/ay3rRpEw4cOCDj4OfhPDocDuTm5g7by5oFXo6nl7X6NVLnVD0L9CDtYMLrDdXLetq0aaN+jvpe1vpxqeNjSx7mhg/Wy1otPKmCwGov6/7+/mF7WRMY+r/Qy9pisUiqQ19fHxYvXowf//jHwiphCxd2jqCupJ3KeeXnYHCE/ZupMwiaEYDxer3Ss5QF5ZKSkmLSETo7O1FdXY1Dhw7B7XYjEAggHA4jJSVFIu8FBQUIhUKoqqrCjh07UFVVBYvFIkBcQUEBxowZIw7znj17UF1dDbfbLcwFBhLi4+PhdDphMBjQ3NyMu+66KyaqP9Ta5N/Hjx8PYKAgkd620f/e29sLu90u9nc4HBa6MoAjQDdN0/Duu+/CbDbj6quvHpI2Oxjr68sQ/XpU17aqg1Umnro3jrVQ6Zcp33oHlYuQVFMuNqvVis7OzpgFr6LzdFSYe0SUiEYmr6fSgLggmADOnkqM9Kn30dNW9ItN5cdzM1VUVCAvL096Rl111VU44YQTMHbsWDQ2NiISiWDdunVobm6G3W6HpkULgtTX14shM1jUprOzE7t27cLOnTuxf//+GLRoqDnVO0R6ZUFnmDku/Nubb74JTYvSZPPy8o6IGgxmEA1GyWCeMCN3nH8VyeKz5PzxdZqmxRQVYATC7/fL7zQm9GBDJDLQA5URJ4vFglNPPRWnnXaafCY111F9vmp12vHjxx/XoaenrhyvHMs16GhEItF+knTW6XwFg8GY9h+8BxX2UIcM88RUKh6voQIG6vM2GKIVKLkmgIEekqpxQyGoAETzBMeNG4cFCxZg1apV+Oijj7B69WqccMIJOPfcc9He3o5t27Zh7969KCgowKxZs1BSUoKbbroJDz/8sOiBZ599FocOHcK2bduwc+dOQfs7OjqkRxtwZMGmr1vcbreAOw8//DAeeugh7N+/H0ajEZMnT8Z9992HhIQElJWV4ayzzkJ8fDxefvll3HPPPTCbzbjrrrtw1113IRQK4Z133kF2djays7Px9ttvIz8/H6mpqWhvbxc0mhV0CfIYjVEaV21trRTpoLFEh0+dJxpWau/TxsZGNDU1ITs7G4FAAF6vFzabTT4X9/GBAwewb98+qcTr8/kkR5wMGual0pFJTU0VPU4d0d/fD7/fj2AwiJaWFmiahrq6OhiNRqkKqRbg4ZhUmiANBavVKj/T0KRRrlaOZy4q15sKpNLh5fVVCulLL72EM888c9g9rt9bIxVVh1Mnjfa98fHxKCsri/kfHUOyhD799FOJCNHpACBR5eTkZEyZMgWvv/46kpKScPnll2PNmjW48cYb8dhjj2HhwoUYP378sL2sc3JyZI/qqaYWiwU+nw/19fXD9rI+9dRTj6uXNTCQZzpSOqA+QjoSB1UPout7Wff19UlkeahrqGChGrXjNVQQGDiyl7VK0T/WXtbJycnD9rKeN28eJk2a9H+il3V/fz8aGxsRFxeH5uZmoeSTTeL3+wXw5zjj4uKkrQtBHNLW3W433G43TCaTOJJJSUkwm83o6upCV1dXTNVeAsoApGVeZ2cnWltbJRc1EAggNTUVWVlZOPHEEwV8j0Qi+Pzzz7F7925h6CUlJcHhcKC4uFh0cUNDA/bt24e6urqYvudkKmiaJvRZngHBYBDnnXfeiOaQtiDXjgpeq06basOSQcC1RdCbQAh1rz6lDwBWrVolOh44soiRGjQZqYxEb+j3uz7qz/1CUYtA6es5fF3yrXdQJ02ahOTk5COMQdXg4Ubv7u6G3W6PWRgul0tyNbxerzgePChJ0+SCYCEAHnpxcXEx9AsqAv2CUg12voaiomJ2ux0TJkyA2WzGe++9B6vVisLCQlxyySUwm83Izc1FVVUV3nrrLbS1tSEQCAiV1ufzoampKWaT6TdtX18fqqqqsGvXLhw6dOiIHMvRKGhuMh74dApVw00tZz+Yo855GiyqS2OOubX6//PZ8rvRaBRUTtM0QQZp7BG1U4tbAYiJoquHJX9XHfqJEydiypQpuOiiizB79uwjck85DhrmI1VceuWnKrDRREUoRPVGEy0d7Frqs6mvr5doF+fP6XTinHPOiRk/MEC9HmrspFEO1ReW71eRUDqowEBkl1QdfvF+quHDa1x33XW44oor8Oyzz+Kjjz7CO++8I4dVSUkJTCYTnE4ntm/fDr/fj8WLF2Pnzp1Ys2YN/vu//xuffPIJpk6dihdffBFJSUloaWnBCy+8gJUrV2L9+vVIS0uD3+9HZWUlEhMT8eqrr6KysnJU8/6/JTabTWjOt99+u9CrHnroIbzwwgvikJ199tk4++yzsWzZMiQmJuKee+7B7373O/T39+Pee+/FpZdeilNPPRWdnZ1obm7GggUL4HQ60dHRgYaGBnn+CQkJUkk3GAwKVbW4uFiiIjRMiGZTF/DQpwNXX1+PxsZGWK1WFBQUSDGjjIwMcdx6enpQWVmJqqoq9PX1obu7Gz09PXI2sGAIHRZGQNlvmpR+GmxsUwJAopcAZJ8z15bGMB1Z6tOUlBRkZWUhMzNTohCMilosFslPTUhIiAFRGSUljRmAFPMyGAaowF1dXTG9AV966SU4HI4hHR59EZHRiJ45NNr3quefWqhLlbFjx2Ly5MnYs2cPUlJSYDKZxEl1u92wWCx4+OGHxRh88MEHMXfuXJSUlODpp5/Gd7/7XZx77rlYunTpsL2s+cw1bfBe1jSYh+tlnZubO2KKs17v0mhWbZORCF8/Wod4uF7WbrcbEyZMGHQM+nGqwl7WXPt0fPTOn3qWDdXLWt8bVXUome/4xz/+cdhe1uw3azQeey/rocDzL1t4TgWDQaxdu1bsFNpNKpOCLYscDgfq6upw4MABRCIRqbGhFt7TtGjvW03TpGKuWrFdBbsJ8LpcLikaSHpuWloaCgoKhJ7rdDphNEbz8Ak4MH2rsLAQY8eORXp6Ourr67F3717U1dVJFWAVbGCbRVaCZp5pMBjE9u3bxYFlnRB1jav2o2r/mUwmnHzyyTH34d/1kXHahpFIBGPGjDnCRtUXWgQGACzaCdddd10MIKNG80ciI923g+lHdW2qdr26ltUxqWvj65RvvYNqMpmQk5MTUxGuv78fra2tRygdj8cjlF+K1+uVaJ1awILKl4s6EokId58Lgs2TDYZocYDq6mo0NDTEoDEsTa8u4sFoJeqm7OnpgaZpqKmpwYcffoj33nsPGzZsgNlsxkknnYTvfOc7KCsrg9vthtVqRVFREQoLC+Hz+SS/gQaknnpsNBqFutbX1ye9Vffs2YPOzs5RLey8vDwA0VwXRidVh7SnpwclJSWDbmB+XnUzqqgUAIlAqhtR02LL3Ovfr449Ozs75r1dXV2Ii4uTCLiKsrEoFueLTu1HH30kxVTU9cRqpGeccQZmzpyJcePGAYiliajjHE5UBaxXRKMRKi6Vpnw8wrlhnjEpOv39/Xj//fcBRJ/x9OnTYw784RxTVcrLy2Wv6Z+hagjz+XEfMfeLvxOJ5aGsVocNh8OYPn06pk+fjrq6Ovz973/H0qVLBen9+OOP0dzcjPz8fHi9XqSlpaGxsREejwdr165FQkICKisr0dDQgJ6eHrS3t+OGG27A8uXLER8fj9LSUhiNRixZsgQvv/wyioqKcNFFFyE5ORm//e1vceKJJ6KkpCRmTr8u0bSBojXLli3D7bffjjlz5qCmpgaPPvooVqxYgTfeeEMojHfeeSeWL1+ON954A5dddhkikQhWr16NDz/8EDfccAPi4uKwZs0a3H777QKwRSIRQbTb29ulIrLf78eOHTvQ0NAgz4zVPamz6TwyD7ypqQlNTU3IyspCQUGBUHPpKFHv7t+/H01NTfD5fFLwgxFYGnysYKrSeOmwW61WBAIBeDweiTDRSTSbo63ErFargCoJCQlCDTMajUhLSxNKcmZmJux2u0SJyHJpbGxET09PTEQpJSVFIrccY3d3t5wbpD3zbKPxFgqFJFrNvfHaa6/BZDLh4osvHnSN0VjlOhiJDMagGak+U51C1QFKSUmRNaiKyWQSSmJDQ4NE4oCogzp79mzMnz8f1113Hf79739j7969WLJkiZzRJ510Ek499dSYIlXAkb2sVaNysF7WqjFvNh97L2v1c+t/H8k1KOo5OVJDmO8Dhu9l7fV6cemll8aMWz1P9QA3hXud+0SdV/U6wECaDB3XwXpZ61kNal5qb28vWltbRV8M1sv6yiuvxE033XRcvay/Kr2cm5uL5557DnfddZdUBydrguuD9GlGQ5ubmyXSR3ZKSkqKgF0GQ5Qyu3fvbrsLxwAAIABJREFU3phcXUYv1c/Ge4VCoZiibyZTNI2HHRCcTicOHz4sUV6Oh/uIDKg9e/Zg+/bt6OrqEruK852QkICkpCTMnTsXCxYswMknnwxgoB6Ly+VCfHw83nnnHUQiEbS0tEjNmMHsoMFABAaXVNtHddLU3wFIv3v19aQyk/2jps7x65///KcEpwDE2I+jcTxHs4eHExU01NvP6v2+bvnWO6gsi80FSe47K0qq4nQ6pRQ3RY3OqQV2uBESExPFQEhLS5Ny9DQc+vr6pCJbbm7uET2I1HwB3ke/ydQoFQBRQmzNQDqI2WzGa6+9hpaWFiQnJ6O8vByJiYnYsWMHdu7cie985zuYMWMG4uLi0NbWhqysLCk5XlNTI/OlUsPUA+bw4cPYvn07tm/fLrSR4YQ5scBAbkVPTw8+/PBDmM1meL1enHnmmTE5iuqBp3dQ9ZudUWwatByr+nfOqxopDAaD8Pl8cl8agKzkRmOESoil3Cmkd3M89fX1QuluamqSXDY198FisaC4uBgTJkzA2LFjY3KY9aJXKurnH60QeNDnOR6PItQf4nRQ1dZBGzZskL1Cg3q046dRysjQYBQavQFkNpsFfFLpZKSM0bHgQTp9+nRZP8zXcTqdmDlzJsLhMPbv34/PP/8ce/bswTnnnINgMIiMjAw0Nzdj1qxZko8WCATQ0dEhlKkbbrgBU6ZMQWVlJbxeL+644w4sXboUv/71r7Fz506J2tntdrhcLowZMwZJSUkxBuxXKfv3748Bn6i/Zs2ahXvvvRc33ngjzGYztm/fjuXLl+ORRx6Bz+dDX18fLr30Upx88sm47bbbUFBQAIvFgkceeQQPPfQQDIYoDe8Xv/iFVOpUaduk+QYCASQnJ0sRDkZHmAfKCEx3dzfa29vR2dmJ3Nxc5OXliSNBwCEUCqGhoQF79uxBe3u7GFN0Xn0+nxi7FNJr2eSeqH4wGITH44lhxJDJkpqaipSUFIkIUKeQlqtGkNhiip+ns7MTTqcTbW1tsv5IK0tLS4PD4UB6erq0IqNBTceaukkfOeXvKq2Nz/P5559HUlJSDB0NOLJdxnCi1x/HSl2jA6NGxTgO5gzr9b6maSgqKkJLS4v0GQeAoqIifO9738Pzzz+P6667Dh9//DHGjBkDTdNw5v+nNfP9PT09w/aypiE6VC9r5uYdby/roZhJI3mv/vWj1as8m1RQmo6f2svaaDTKWTrY/Ye6L6N+3Md8H+dKBQ5Vp3WwXtbU2YM5IZqmSRrIcL2s6WT8b/eyphxPys2hQ4dkH6ttjFRAlk6n2pKHembmzJk48cQTER8fL5XRDQYDAoEA3G635Mrn5eUhKytL9AF1iVqAyWKxSFeLhIQEuFwudHR0oK2tTUB5g8GAvLw8FBcXxxSUrK6uxuHDh2POYaMxmpqVnJyMkpISXHjhhTjvvPOEscBnSko/zwae116vF4sXL455RqpNoD4vVdh/nq9XbU3qHRV4oi3DqCrnhWAVgCMcVLY9uvbaa4/77B7MkdQDKYPpHNUe1zve31T51juoagL52LFjhXsdDAaFessHy0bpQGy0jd9pfKgLk0LjmJQsvpYNkukIqQc7aQt6WtRQtB/+z+FwiBKnZGdnIxgMoqioSAr+tLa2AgBycnJgNpvx73//G9u3b4fBYMAVV1yB/Px8tLW1wWCI9o/0+Xyorq6WyIBq/KtzYjAY0NjYiC1btqCjoyPGoeUYOTaVCspDb/369VixYgXa2toQDodx/vnnIyMj4wjUZ7hDWM0VZoly9bWqg03hoRwXFydU0MzMTLmv2iOXpdvD4XBMxWM6tVwH6enpiI+Px5YtW7Bnzx44HA6kpaWhra0Nfr//CGebCjI7O1sAjKE+6/EqF9W5/TJRYP11CAoYDIaY/FPOWTAYPAIMOtr1eY9x48bFGCrqZ+GX3sCmcqbjzC8i7ypSrpbaJwK9Y8cOtLW1oaSkBLm5uWhqasKuXbuwfv16WK1WBINBdHV1wev1CrpMfcBIbU9PD/bv34+4uDjMnTsXt912G3w+H2666SYp3tDe3o6enh4cOnQIDz/8sDhOX4esXLkSt9xyCyorK4+ghnENL1u2DL/73e/EeX3++efxt7/9TQzctLQ0/OQnP8F1110Ho9GIX/3qV1i6dKmAW3a7HX19fWI4seAUn0tqaiq6u7uFIk4nlXPLQh92u1365vEZcqxutxt79+6F3+9HKBSC2WxGenp6TFVGVnbmPrZarZKzzz3T3d0t0V2+j0Yhcxd5/4SEBDidTjH+OG6ueToE1Nn6VkWkxZFCx8+i6gwAYvQTUItEIjF52rweWTnqORWJRPDuu+9C0zTJ19PT7UcinCMVyBgNrVeN6g4npaWlMU6cSu0rLy/Htm3bYuigDocDOTk5WLZsGR5//HEA0WjUnDlzUF5eLmPlHqVdwLFzjumwqvPB56D2sGbqxtGqd+qdUPUMHa1jS1HXxmiuweesOqZqni0jZjzjuM9V1tNIZdy4ceLQq+9VgV+Kek4YDAZJpyG1n19cx/wswEAV9gMHDqC+vh4GQzRaSKCQrYHIirBardi7d29MO0H1HPkyzkg9TXg0kpycjD/96U+4//774fV6Y4IXvCbHyfQHg8GA7OxsnHTSSVLJXGXmqesOgDiltG24hzVNE9CF7RnJeuOzCQaDcLlciEQisNlswkIjFZu9Tlncjfdmwbji4mKcd955mDp16pCMA6vVKtW1SfXmWlXPdP1eGswxNBiiaVeD2ezAkSlHQHQdsuiTukfUehvq2AnA19XVwe12D1lReihRdc1gulQdu6p3B7MZGWBS/8Zr6K/5TZCvr7HeN0RUOlZCQgImTJiA2tpaaJomOYdUwPqHRgUJDORE0HhT29GQEkHERS2ykJCQcET+hV7Z6uk+/B8jBrt27YLNZsP69ethNpvh8XgwceJEJCQkoKenR5B5RovVzWMyRVvQJCQkCOVW0zS88sorOOmkk3D++edD0zRs3boVBw8exIQJE9DfH23Q7PP5MGbMmCOcVEpcXBxaWlrQ0tIitJKSkhIxxrihGEWjc2gwRHt1PfnkkzJPU6dOxbx58zBx4kR0dnbi888/F+oOFaiqkHi98ePHC+qoUooo6nOlEmKVPIPBIA4GUUv+zAgEnVafzyfX4MHNe7a1tcFmsyESiWDXrl2wWq0SYQsEAmhvb4fNZpODl/c2GAwSiabC5+FxrAaMajANhbSNRtQ510deKFTgRqNRnFHulf7+frS3t+O73/0uVq9ePeR49M+J31kVkkj+UIYxx0WqF50E/s9oNKK3txcWi0Uibueccw7a29ulQBYPmv7+aBN0k8mEU089Fa+88goSExOFWpiRkQGfz4enn35aqJRJSUmSx0igigj4Rx99hL179wqD4sQTT5Tm64mJiViwYAEMhigdlbk3XzXyabfbEQwG8fTTTyMUCiE7Oxv33XefIOCqEXfTTTfBbDbj888/x+uvv44VK1agr68PixYtwoQJE5CXl4d7770XiYmJWLFiBZYvXy4RUhamY3Smra1N8jX7+vqQlJSEzs5O6WdKmp/FYkFqaqo4F2oFXQBSZRmAOG18BqTt9/b2wufzCZOGUUnqp0gkIsCV6jxqmiaFl3j9lJQUMUboeJLmX11djQkTJiAhIQEej0fWJtcU55SRuZycnJg2J6x1QGeTZwNzW4GB3pWqg8G92t3dfQQgRMe2uroakydPxqZNm2KiCJThDGs96DdYxGkkMhLdZjKZMGbMGDQ2NsbMF3XQtGnTsHHjRpmXn/3sZ3JmxsfHY8yYMaivr0daWhpmz56NjRs3xhRK7OvrE9o1dZcKdNAwpy5m2ylggJYaiUT7qg/3edSIsfr5R7O/9Q6G+veRiBqpHmx8jMrX1tZiypQpMscssjNt2jSpbDxSYdEvOjacTz5LYKBCPsEHji8vL0/YarSB1Dx0rnlGfEmtTEtLg6ZpGDt2LGpra+UeBCVoO4VCIWE7DAeCj1S4FkYK8gwnN998MxISEqRti3q2qveKRCKSj240GjFx4kQ4nU58/vnnUgeCVZLJ5mC7rJ6eniMow7QZqT+4buPj4yUlzGiM5vBmZmbKmmRvcLJFkpOT5Xxk9d7ExEScfvrpIwJz1MBLZmYm4uPj4Xa7sX37dpSWlkobuJGyPjQtyrrYt2+fVD8HBoIVnFtVl3V2diIxMRHZ2dloamoSu0ZN/WKdGXW8Dz74IH7/+99j0aJFeO2114YF7rgfVed6KCd7sPfpAR460CrIMpQzO5J5+6rkW++gqogbHcaioiIA0UbPb775ZozjxIfM72p0hELDSV1YqrJTUWs6n/rFqjohenpCQ0MDQqEQPv74Y1FGdICozPv7+5GUlASXy4XCwkJUVlbGRJoYWeA9UlJSkJ2dDbPZjPb2duTk5EjBl0AgIMhWVlYW2trasGXLFmRlZQkK6fP5kJOTA4vFIkpSnRNSJHfu3IlwONo/tLS0FCkpKVJQhJuZRif7XJrNZlRWVkrV0GAwiLPOOgvXXHMNTCYTGhoa8Mknn0iOqBrFJAU0LS1NKLp8BsCAg6VH/oFo5cLs7GykpKSgs7MTZWVlRygJ9QAHBooYECXr7+9HXl4egsEg0tLS0N7eHhM937hxI5KSkpCVlSXOKiM2dFTp1Hi9XjFOLRYLMjIyBl13Ho9H+iqq62qw3M5jUUSDUUwG+7s6Lh6EBAAIMkyfPh2BQACTJk3CmjVrhrynGuVUrwsMtHRSHWT1OakVl7nfXC4XcnJykJ6eLocr+93q0VEaQACk4i6fe25uLlJTU9Ha2gqHw4HW1lbU1NTAZDJJjiAPGkYHVOeYhnVra6vkn7/77ru49tpr0dHRAZfLJWjz2WefjfLycjQ1NaGxsXHUz+14RNOiVHYaH93d3bj22mvR19eHH/7wh7jgggvEyOOaraioEDrZo48+ilWrVskBfs0118BoNOLGG28UvcpnqzpmKvuCFdMzMjIAQJzaxMRE9PT0xDhivE5lZaWg2n6/X0A65nPy3jabTQw+tbojASbq+2AwiISEBNHZdrsdvb29YmhzbTPKSyevr68P6enpMBgM8rl6enpEx2VmZkqFX6PRKG2NSDWPj49Hc3Oz3Jd7ivm+WVlZiESibYCampqQmpqK/v5+cbj5uVS9xzlSo5D33nsvXn75ZcyZMwefffaZnA9640Yv1Lf8mfcZCrQaSkb6WoMhWr3X6XRKESj+HYhGgXJycoQBxGufffbZMJlMqKiowJYtW7B161b09vaira0NaWlpQuMdqpc1QQiesYxIqNRArllGUvXGon5OjhUkPNZrqPMxWNVRvRQWFkr1bKMxWkxx+/btmDp1Kjo7O3H++eejqqrqqDm2dGqoH8rLy7Fjxw50d3dLTjcQa5PRYeV5ytZ/nG/uSQKe6mdUgcyhelnzfwUFBVixYoV0bwiFQrjsssvw0ksvoba2FkZjlH6alJQk4BgAcaz1Tgz3vn5OjjVySiETo6WlBcFgEMFgUFKMqDsASFVf6qTPPvtM5gKInjsTJkyApkUri7O6vaZFi7xpmoaMjIxBI4XsSev1eiVFjX8jON/c3CzjIXDDc8NmsyErKwtlZWUoLCwc0brRg/K08YLBICwWC9555x1MmzYNra2tGD9+PBobG0eU8825Ull9vCfnSX9P2iEMUjDVpLe3F5mZmdIfloAIMLC/PB6PsONUu1Evqv3CNTOSdaM6naodPpjD/k1xQoeTb72DSlEXX1JSEjweD4CBctw8oD/88EM5sGioALFFAmg4qU4LEXAuPNWBY8SU/6fQ4AqHw3C5XGhsbBQqMJuHsxUCqaidnZ0SSWBVyb1798JgiNIrSXdlxIlOOemkKt2CTi+pbDt27IDH48F3vvMdnH/++QiHw1i7di16e3tRUFAAIJpE7vF4kJeXF5M/yzmkEujv78dnn30mtA9N09DY2Ii8vDzpN0WHUj0A+PNHH32EDRs2IBQKITMzE0uWLEFxcbEUydi4caMgfSoAoB6Q6u96BFuNThQVFaGqqgqbN2/GWWedJYasOjYqE0ZwVECChyuNlaKiIowfPx6apiEnJwd+vx+1tbUSyc7KypLqo+x/qNKo6KQ3NzdLZDojIwOtra2or68XR7Crqwvl5eUoKCgYdRRDldEi+qrwueXm5qK6ujrG2H333Xcxa9YsiWjysB3uEB/sc5SXl+OLL76QXD9VORNlV/cd1wSN3K6uLtlXpCslJCRg3bp1qKioEBYCdUQgEIDL5YLD4cC//vUvZGdno66uDq2trTCZTPB4PPB6vUhPT0d6ejqAAbYEHWlggIHBqA4Q7Xf5u9/9Dn/7299QXFwsBaTefPNNPPbYYzAYDMjMzMQll1xyTM/jWEXdI4xI0MB7/fXXsWrVKlgsFvzoRz/CGWecIcAS1/wNN9yA+Ph4bNu2DW+88QZWrlwJAKioqMD8+fNFBzGiSVCQkSm/3y9VjolQqxVUVdaJ1+uV/Cb2CGWeP/cOAFkvNNIdDgfcbjeCwSBCoZA8T1ZppL632WwSie/t7UViYiL8fr8AGSyCBEQdJeo3XodpHT6fD3a7Hc3NzZLbzlx8YKDFF9cndQ4AqWqpRkvcbrfkT9IwZeSbdDoa6nqgR6UYb9++HSeffDI++eQTOd+OFv3RG1SUo+kd9Tw4lghTaWkpvvjiC3kvz0yTyYTc3Fzs3LkTO3bsQHJyMsxmM1599VW8+eabmDt3LqZNm4bZs2ejpaUFcXFx6OjokPlRmVE8EwksqWk8nBuudc6hauDq5+BYI8rHo4fVe6tA+Ujm3G63o76+/ohe1jNnzpT1TN3ItaSOXT9u3pc2ktpFge/h66izObeMShUVFQnTjY4gx8LXqVTG/v7YXtZmsxkulwv5+flYvHgxJk+ejMOHD2PMmDFIS0tDfX19TC/r0tJS/OEPf0B1dTUCgQA+/vhj2O122Gw25OXlweFwSBsqdS9wDGpK1PFIQUEB4uPjYbfbBUhJSUmRe3NvE9QCBhx+nnvjx4+X+iTV1dWSbkCQjw6basfSXuI8m0wmqaXCZ8We3syVVwECgm3jx49HcXFxDCg2lOhzJrnnDAYDMjIy0NnZKT1fCQh6vV4sWrQIv//97486l+qZNmPGDGzbtk0cRwAxrDt1r2iaBo/Hg6ysLAGlDQYDiouL0dTUhO7ubhQVFUkQgmMOBoNYvnw5VqxYge9///v417/+JePgdY81mqmCOWq6gzrm/4vyrXdQeTCraCIQbavAhPDOzk60tbVJGW41choOh7Fu3Tr09PQIst/d3S0tDFQnU/0ZGMibUR1UtRId76GirsFgEJ2dncjKykJeXh7S09Oxb98+QRkLCwslQd1ut2P//v0oLi7GmDFjpNJlR0cHOjo6UFBQIGgkNyUPYbfbjZKSEskhYJXL/Px8bN++XQ6izMxMzJ49G0DUOX3vvfeQm5sryH1nZydycnJinFWTKVowiMYYFSrzGujQ8/VU8vr8Ox5WbrcbK1asEPRu3Lhx+MlPfoLc3FxEIhGMHz8er776qihqNRpAA0+NtnFd8P0ARHGx7yIBBqPRKI3uOX/9/f2CGrKQAOleeqe9pqYGBQUFEo2houdhk5ycjEAgIBXq6Cipzldvby+amppgMBiQnp6Orq4umd8DBw6gsrISkUgE8+fPB3B8yNlIDll9RNVgMIiBSBSWwI/H40FSUhJ6e3tRWFgoyp7PYCTGHCvj0RFQ0Wx1TPwf/+71emG325GTkyOIL3OqQqGQPC+2HCGabDBEKegulwtZWVmYM2cOenp6sGvXLsnbzsrKgsfjkVxlfjEVIBKJxCC2XJOdnZ349a9/jUAggIULF8LpdMLr9eLmm2+WiFtcXBw2bdo0iqd2/DJjxgxs2bJFquaqgFpGRoYYgc899xyeeuopZGVl4YEHHogpqhYKhVBeXo6ysjJYLBasXbsW69evx+bNmxEOh3HzzTdLUTmfzyetVSwWC5xOJxISEqQugBpxZX+6pqYmtLS0wGg0Ck1N7cFnNEZp3DTcTCZTDAvB6XTK5+vp6RG9yD1HfUBnMzExURrYq/1re3t7kZ2dHROhpS53Op3Izs5GdXW1vIb/VwELUonpGNMhzszMlAqYnAM64+FwWGhmRPh7enrk8/p8Pjlv9LpUjZI8/vjj+Mtf/oLvfve7Um37f0NUxsrxAGgnnXQSNm/efARDiVJYWIju7m74/X40NTXhhz/8IbZt24YFCxZg7dq1EtFm1dJwOCx5xIyCsJe10WiEx+NBW1ubPF/1SwW/NE2TSvJ6pstIdbBK3fwyjMxjiaDQaeFnp0Pg9XqRnJwsvazV1BTaLera1wvnlaCTel6ojBmuS/6POaMq4EhwkzUDKCpbRe1lfeGFF4qzXVNTg0mTJuGdd95BR0cHsrOzMWXKFKSkpMDv9+POO+9ESUmJ2HTPPvssjEYjDh8+jIyMDOTn58NgiKZgWK1WWK3WGLaC/vkfj/zpT39CamoqOjs7Zb8TpCYoqIKwau0No9GIjIwMBINBVFVVyfNRQRiCs2QQ8NyjbZaUlASr1SrP1Ofzwe/3CwCoMlgAIDU1FYmJiTj11FOPSFMbTvSUfc4j12xubq7k4MbFxcX0fk1NTRUdeTQHmCwI5u+rLB7VuVOfIe2Enp4epKWlSRHD6upqGAwGAT155vFZUOe6XC6kpaXhhBNOwK5du+SeQ+2T4UTda4ONWy/6z6IHjlTbbTgK8lcl3/oiSYyYDFZ8hLlonZ2dQmFgVNLlcglizYqBqhGsJu4DsVW91J8HE9VJIj3N7XYjOTlZFDPR9fHjxwsCeckll0hhFW7qK6+8EjabDW63W8pys8UJjXGTySTvU50v1VFgNcw5c+ZgxowZyMjIQHp6OkwmE1atWoVVq1ZB0zQsWrQIZ555JlJSUnD48GFkZ2dLUaa2tjYxJqnIiDgCkEgfD8C6ujoEg8EYOp5+Y3Gsai5GQ0MD7rnnHlx33XX4+c9/jmAwiMcffxzPPPMMnnzySXz/+98X6hYNMx6GjA4VFhaiqqpKIuYFBQUxdA/em4g6x8MWCDS0aRzyukTaGaG3WCwxEcSpU6ciPz8fbrcbcXFxGDt2rOSMmM1mdHd3C0qn0pP5xUqnkyZNwrhx49DW1ob9+/fj4MGDePzxx/HII4/EOGlDyWAOnTrvelGdLRXFVd/Hvx0+fFjyj5h36nQ6MX/+/Jj3jNRoJe0IOLIdhqr0VeoVEWIAkuOoMiBUWqj+XpqmSZTM4/Hg3XffxeLFi9HS0gKn0wmbzYampiYkJyeLccT5pMFGeo96WNOZIZPhgQcewPPPP4/u7m6sW7cOGzduxMGDB7F79+6vnOL7ox/9CA888AAeeughzJo1K6Ydj0oby8nJQWFhIYxGI66++mpceeWVePXVV+Xzct8Hg0HMmzcP//mf/4lly5bBZDLhlltugdVqFYMIGNjfRqMRDocjhmZKUKumpgbbtm2TRvEdHR0x+oJ6hvs1KSkJmZmZAr4lJCRIXhF1KY2QcDiMrKwsSftgATWXy4WWlhY5xGkcOhwOZGVlxeh6Rjp6enoEgCG6ruaQOhwOSZdQq+2aTCaUlpaitLRUmDNtbW2oq6tDXV2dGEUWiwWJiYmwWq0xbSSYW8UiPiowp9/PPMe2bNkiLUaGEtUhG40xo7KO9AbTsQp7a6rMJH42h8OBvLw8lJSUwGazYeXKlfjss89gMpkwb948NDQ0wGAwoK2tLWZ9qU6R2ss6IyNjyF7WqoE3VC/ro7FD1GuoTvdIRB8RJZjOr9GIPpIJxPayVnX3YL2s+fNwY2cKwGD2Fx0OveOv72UNxKbW8H4j7WWdlJQEv98/bC/rdevWDdvL+tFHHx22l/WXZexfccUV+OUvf4m5c+cKfZb7mTYC97qmadJ2ivPgdrvR0NAgLBEKq1IT7CWATKCPNRFoE7e3t6Orq0uAOj4rjikpKQmzZ8/GueeeizPOOEP2wnDOqbpO9c6pXlTmBfXo+vXrAUSBuIsuugjAkXtNvZ5qHxgMBmnLo9L2OS7VEaSj3NnZKeB7dXW1BFb0+pBnj6ZFWXn33XcfDAYDKioqZEyjBaBUEId26WBO52CvV3/nPPBzftPkWx9B7erqkkIr6oEJDORC5eXlCUXP4XBg586dCIVC0nctLi4OLpdL3k80nQpAdSBUp5Ovp7GgFmNi1Ua+r7u7G/v378cVV1wBo9GIHTt2SNNin88Xk1v6gx/8AGvWrEFaWhpOO+00bNiwQYqLFBYWCtrI/B2LxSKGkEol1R8u3LQ5OTnIyclBOBzGpk2b0NfXh6ysLOzZswdOpxNxcXGw2+1YtGgRbDYbdu3ahf379yMrK0uQOJfLhYyMDGRkZEg/Vjp7vG98fDwuu+wyHDp0CDt27BCnhvQR9eCkqNFJfo5//OMf+Ne//iXK9KqrrsLKlSvlOfzjH//AJ598IhEyp9OJXbt2IS4uThxO5tbyECAyGwwGpcgAMOCIMUmez4WGMJFmtfchaWGapsnaYqTt4MGDaG5uxty5c7F582Y5TLOzs9HW1obe3l5kZGRIZTsg1hE7/fTTZY1t2rQJjY2NWLlyJX75y1/KAX40pG2kzqyK3OmdeTV6yDns7OzEwYMHxYEgijmYsTLUPUlpKSsrw+bNm2NaxHA9q0Kjj+uMUUmi0oyeEkkmOKUeMgStHA6HRMfXrFmD2267Dc8995z0wwwEAlIhVl/4TP0cNMDU58Gx+3w+vPHGG7DZbAiFQkhLS8OkSZPEIf+q5M033xRk/fLLL8cFF1wAg8GAJ554Au3t7QAGosB0voqKimA0GvH222/j1VdfRXx8PFasWIHc3FyhUhqN0cIav/nNb5CYmChU1EgkIrnpbKPC3B4g6jxs3bpVQDWmA9DY1QNaBG/UdgKRSAROp1PaxXDeHQ4HXC6X3IeOKh0YlXVZyr8RAAAgAElEQVTR39+PjIyMmDxPg8Eg1bcZheAeYuGlzMxMWCwWeDwepKamwu/3w2azCW0xISEBubm5Qp/s6elBT08PDh8+LNFW5gQajdGKx4yW0oD0eDwxABnTO8i2UR0wzgnn6plnnsEpp5yCCy+8EKtXrx50TagRLvW9R9MXavEhvud4IqhAtBAS80p5Tcrdd9+NCy64ABs2bIDf78esWbPw/vvv45NPPsHEiROxcOFCrFu3DmeffTbWrFmDSZMmiQ4Nh8PYsGEDiouLAUBa0vHZspe10RitFtva2hqjvwKBgFQuHU5Uw1E1VkfruKtzOdII6dHGZTBEK563tLTE9LI+ePAgNE2TXtbvvPOOjGGkxi73DfMfOX4VmOI+49zw/3a7XXqvA9G5pgNlsVhinFWmStTV1aG+vh5Lly7FunXr0NDQACAakSsrK8O2bdswduxY1NXVwWw2Y+3atSgsLMTWrVthNpul5/ENN9yAhx56CEuXLsWkSZPQ3NyMJUuW4KWXXsLPf/5zlJWVwel04re//S1qa2tlH7K407ECMn/+859RWloqaVdcawRWabMwXYL2hcqaIKskISFBgHq1XSJz9Hm2BgIBWK1WuFwu0dlkwQEDYAwBulmzZo14/aoRV+oFFVAeStQgD1MrNm3ahDPOOANtbW0oLS2NuQevp0Yr9TJ9+nRs2rRJbGLV+ePZpkZY2XIsNzcX3d3d6O3tleg+X8cAEMfAoEV7ezscDgfKysqwZ8+eY9rnFD1LYSjbTXVg1f9xj6uMEM7X1y3fegfV6XTCaDRKVUMeuHontbu7GyUlJcjLy8P48ePxl7/8BSkpKZg3bx5qampQV1cnBURI7wQgizwrKyumiS8dCxX5oxEcDodRW1sr0YSxY8fipJNOwt69e8WYAgYcETqXqnDRVlVVSU82IKqcaKiQFsKoMHsIlpaWoqenB36/P+ZApnJSNzkpqzxA2NtU0zS89dZbsNlsOOecc1BWVgYA+OCDD7Bv3z6UlZUhFAqho6MDra2tmDRpEjRNQ319PcaNG4fOzk7YbDZMnToVU6dOxaWXXorFixdj9uzZqK+vF4OeB6a6wfi7umE55t7eXvz5z3/Gs88+i2AwiFNOOQVLlizBD37wAynktHLlSqEdhcNhNDU1IS8vD3a7XYo00BCnAmLOGXPuGCVSQQqDIZqLwbGplCgajcxPo1FTVVUFu92O1tZWqX575plninPs9XrFaW1tbZVIt34uTCYTZs2ahVmzZsFgMGDfvn0oKSmRsY4E5Vfn9FiMIH6O/Px8obCbzWbJGyKtSDVeBxN9TilR10gkcoQxqDrNquLmwdPZ2SkFVdTKp6SCut1u2O12aWHCfcViSXa7HQaDAbt378b8+fPxt7/9DXa7HWPGjBGGAavTqsajOn71O1kABFgikWhjdEZ7fT4fgsEgCgsLRzzvX4aceeaZuPPOO1FTUyO5SXl5ebjpppsQiUTQ2tqKJ554IiY1AYjqU4fDIQb90qVLEYlEkJGRgT/84Q8C/NDA4mc+cOAA0tLShO5LI8HlcqGyslIK1tAoVYufEeDh87Lb7WJEmUymmN6iPT09MJlMSEpKQn9/P3JyctDU1ITMzEx0dHRA0zTJUeRe17RoVWbSg8nCYR5/XFwcenp6YootqUYLmRuk6CUkJEg0AojmIbP4EsE8/o/62GQywW63i36JRKIVhltaWhAfH4+4uDiJBnMeGEHm76QP8/163b5hwwbMmTMH06ZNk/6RqgwW4RipPuC1jtcI0htgkyZNktxB1dB66623sHfvXvl8c+fOxbp161BaWoqOjg4sXLgQ+/btw7Rp0xAKhZCcnCx73mCI9rJmIaqamhrY7XZpfUYHyWKxSKQdiBZZHK6XNceszsGxOC808tUz8Hgj0urZAUQr51J3A5D9w+J3ZH7ogcmjiaZpsNls8Pv9ovvp4KrOgDoesznayzo3NxeFhYVCcVcjqAQqmVpDp8rv9yM1NVV6WdfV1WHr1q2YNGkSDAYDzjnnHOzZs0d6WZ9++umSVsFidaSCs5f1nj17YDKZcMcdd+Cpp57CsmXL8POf/xx5eXnSy5ot/ZKSkkTPHYs8+OCD6OzsxKOPPgogqg/cbreA52xzRNuEzBWuMzpeXCNkjgQCAZkj2iCqM93b2ys6mM+B4GJqaipOO+20IyJyIwGrgAFbdbRrh3mzNTU10h+bnyUQCEil4uHqjajCqDMjwtxPwJEFxVhTIxgMoqysTO4NQNIvVMCWz4Lg5cMPP4xHH30UFRUV2LVrl8zT0dKa1PGrdtBgrxlq7vX7S/2s3wSnVJVv1mi+BgmFQnIQqQaEGoEzmUywWq3Yv39/DILERTBmzBhkZ2fLNRMTE2PovQBw8OBB1NbWoqamBuFwtL9Ue3u7VGU1mUxISEgQRzkvL0+47ByXSslQr02niBuBRUOIPLISLRF3Utl4MLBK7qFDh6Q4kdFojGlxwnHoNzepIfrWNZFItNJsa2srXnzxRbz22msIBAKw2Ww444wzkJ2djZaWFiQmJmL27NlSWY+OnqZpYoRrmoZx48YhIyMDixcvRkVFBYxGo1TADAQCMQaxGq0aTBmpymP79u34xS9+gTvvvBMNDQ2wWq1YunQp3n77bdx6661ihDJyvHv3bvmM/Mx83oPdR21FQISQCkR9j4ok8vdIJJo3WlRUhM8++wzhcBgzZszAzp078dlnn8FsNmPBggXo6+uTXN/+/n7U1dVJ2xs1Qq8qQU2LVjhta2uLuaeKoKnCdQ/EVj4ejdjtdpkvo9Eo1Bwa4qFQSAy8wRSlGqnRG1AAxMlXD1HVSOW49dF3Fr1SnwH3EA9uGu8qyEDniY7L1q1bkZKSAqfTiW3btuG1117Dli1bYgrX6J8vf+a49NETPYshFArB5XKNuq3D8cott9yC66+/HgUFBdi4cSOKi4vR1taGdevWSV7l8uXLceedd4oBp4Jm/Cxjx47F2LFj4Xa7cdVVV+GPf/yjGJLUuQZDlG7FqAWNo61bt2Lv3r3iqBJEYP439xQj6GRyEN0nxZZzyvwqRrnYZosF5Oj0BgIBKcbR39+PoqIiyas3GAzw+XxidDLqTfApJydHKjnTKWXrIa4trh8WNWL0uKurC83NzVKUhIZTRkaGXJMtxCorK+F0OpGUlCTRGhVopeGpVpUma4fCM4PveeGFFxAOhzF79uwhI4DHSl08Fv2hFz1932g0ynlEfUsJh8OYOnVqTE/Ys846C/Pnz8drr70m7BwV9OI5ovay3rBhA3JycobsZU3bwGT6entZH+91Bxuvvpc1o3TAsfeyNhgMx9XLWk0dUNe0qudPPPHEYXtZs+XQUL2sCUJwzQ7WyxrAsL2s3W43Pv744+PuZU0bjqwM9exQo6Qcq8qsI0uN88RAB2uBkElEdpcerKUQdLVYLJg9ezbmzJkj+09vwwwnqi3NMQ8n/Lx8T2ZmJoABvcVrGI1GqajLc0ctUjVUdJ/3J0OOwv1L/Z2ZmYn9+/dL6zmCsCo9WbXb9ddn7Zk1a9bIc1HtlJHI8e5vdWzqnvqmybfeQWWPOeYtqY6NelAZDAYpZx0fH4+8vLwY54O5m1RmROypMJKTk6UHKq+rKlX+rJazV69PXjxFXciqYcH7TZ8+HQCwadMmKfTBTcfPREOSm8RsNqO2tlZ64AUCAfz973/Hyy+/LM2XybunkqABplYn5ZgYZSGF+tNPP0V9fT1aW1tRUlKCyy+/HPPmzUN+fr4oQ1Y9ZaEmbnpGDplrFQ6HsXz5cuTl5YmDzvY03GgqtQKIPXRpmNFgaWtrw913342f/vSnuPbaa/Hggw+irKwMf/3rX6WX48yZM2Miq2o+IRW42tpFzUPm+EKhELZs2SK9WdWDVD0E1EiUw+HAxRdfLNRu5vFqmoZ169ZB0zTMnj0bgUAAn3zyCdLT0yXaWlNTI8229RQPozHaU/HAgQPYt2+fFFrSi7qu1INoJMpUNZj4XEhBpPOxdu1aaFo0r/vcc8+NiQhQ1P0ylEybNk0iy0CsEawefhwLP4PX64XBYJDqjpx3Ohsq8km9wMh9IBCA2+1GJBLB3//+d1RUVCAzMxNxcXEIhULYt2/fEcCXHkRhMQeuRR6qHIeam2MwGCRy/lVKQkICnnjiCZx66qmIRCJ45pln0Nraig8//BBLly5FfX09XnvtNXi9Xtxxxx34/e9/j8WLF4vu4vPkei8uLsbEiRNRW1uLiy66CIsWLcL7778vh3daWhqamprg9/vx+uuv4+2330ZTUxMaGhrgdDoBQAAtPis+Y5vNJnuAUZOOjg4BQrj34+Li4HA4pN2CyRStwExnkm1hGNnPzc3F+PHjRVc1NTXB6XSKEce2CzabDTk5OVJUiRFVl8sFv98vlOiqqiokJiaiu7tb1mpeXh5cLhfa2tpiijzZbDbY7XZkZ2cjLS0NkUi0p/KOHTvQ3NwskVh1n3B9c894vV4posSqyVyP/AJio3u9vb3SIke/J4HRF+9RjaFjNYh4Db1Dzf+puV3q3zVNw1//+ld5T3x8PO69916kpaXh9ddfBzCQt6iexYy683N6PB4cOnQIlZWVMJvNCAQCaGxsRFdXl+hKNWrjdrvhdDrR3t4u+eXq5xit0OCmI368wvlQAUi9qLqrp6cHcXFx8Pv9ePvttwFAelkPN56hzmK1l7X+NarTzfVMO4j6OCkpSf5HoJv7/JxzzhFnzOfzwefzwel0oqGhAZWVlfB4PLJeQqEQGhoacPjwYQE6nn76aXFsCVwRvGQfZYLqzzzzDP76179i48aNqKmpgdfrRUpKCpKTkzF37lxMmjQJHR0dMQUgRys/+9nP8Nhjj0mfViC27oIKcvKMj0QiAqYx9YT2GufYZrNJBLC1tVWcUjUf02azIS0tDSUlJVi4cCHOPfdcsddGuw5VW3akoq4ZAkdGo1HaIxIEAKLtiL73ve/FvH4kMnPmzBiH2Wg0CsuElaD37NkDq9WKzMxMKTrFNA8ybXimq8EBPXjw/vvvIxKJ4LLLLhu1LtTbQyqgo+4Z3ksFwNWz4ZsWMdXLt57iS+SbLT6IJtF45AOkMkhKSsJbb70Fu90Ot9stxowe3aFxAQxU/eLhRGWnFmDhwcj7qNfkYlOr06nIMRelisR/+umnMcaUauAyvxUY2PSsBkfal6ZpeOONN1BRUYGGhgasX78eY8aMwa5duzBnzhy5l81mQ2pqaoyRpRb4OP300wFEK5dVVVUhFArBbDbjlVdegcfjwQknnIBTTjkFxcXFqK6ulpxPKsbExES0tLSgrq4OV111Fbq6urBjxw6kpaXB6/UKteODDz7AkiVLsHHjRplX5gcPppzUxHciXnzWJpMJu3fvxo033gggGvH76U9/imuuuQaLFy+WXMoNGzZg9+7dovB5T16PLWd4fx7wLACVnZ0tz1w/TjWCoz6nsrIyAUHY2zY5ORlbtmwR2uF7770HTdNgt9sxffp0ySkhJVWloKsGps/nE/pkamoqcnJyJNrJMamO7khE71TyvU1NTUKB/eKLL3D++efLOlSRTpX2dTSx2WwyH+q9GLnWC/cqnRur1SrOg8EwkNOjGm8qs4LPjghrX18fQqEQrFYrZs2aJfnWfr8f27dvP6KSNfWBGrXl/1W2hBoRVoGzr1J+8Ytf4NFHH8UXX3wBo9GISZMmYdeuXQCABQsW4LnnnkNWVhYmTZqE3t5e7Nu3DwsWLMB9992HlJQUrFq1Chs2bJDG7kD0+aSlpUlVyBdffBGXXHKJPH+/3w+/349Dhw6JUQhAaLmstEwGB4secW4Y/QAghYJUWhqfaygUQldXF3p7e+H3+8WB6+3tFX1EPeHxeKSiMA1h3q+goEBya1lMyePxiK7h2mEEPicnR6L3NpsNXV1dsFgssl8TEhKknQ+BzcrKSsmlJ0gCDOxjldkDDICRKiiVnJwsQBtpgVxXahQbAJYtW4YXXngBl19+Of785z/HMA9UwIui3lsVfYSAjt9IRaXBq2PgtfX6YcaMGdi6dWvM30KhEBobG2NAK6/XK7mnfI0aaQNie1nT+ORc6XtZM/KdmJg4ZC9rAMIwGqyXdTgchtvtHrKXtTr36tyORoaiZg9lsGrakb2s4+LisHnzZpx33nnH3Mua9x2ql/VgID73uMvlQm5uLvLz81FdXS3rWd/LmgUIVcdW7WVdWVmJpKSkIXtZqznm/K7vZR0Oh4ftZT1hwgS8+eabuP7664+rl/UjjzwCi8WCK6+8Uuw1lRlEJpK6N2nzeb1eWT9WqxXp6enSv5kVcfke1hvgGW2321FRUSGv0adyDCXUP3pQZqQ2hHrW8n0qQEWdx7zjTz/9VHJgaRuONGJNYJTBGz3DjWc2iwBy/plmRbuQ4DR/5hi4ToxGo1S+Xr9+PebMmSOO7WCfXbXxKereUP/G96n+gQrU6///ZQBc/5vyzXafvwJhEnggEMDBgweFLspDUKXzAgOl0Rl1AY7s0cTDXuX/c2GqxjYNUjXCwgXFA4ALXKW4qEbuYLTF9evXx0RyjUYjuru7hY6jIu0UNQeBG8VsNuOTTz4RNLKrqwuRSARvvPEGVq9ejcbGRmkSzYgvPw83Dz9vRkYGTj75ZCxcuBAAkJWVJXTiNWvW4IMPPhDk2ePxSHRt9+7duPvuu/Ff//VfyM7OxnXXXYeGhgbJPZs8eTImT56Mxx57DPPnz4fVaoXdbpcDKhKJiPM82CbnM+HhRcMDGIiAdnd346mnnsKVV16Ja665BnfccQfcbjfmzZuHa6+9Frfddht++tOfynt4QKhGKR1WOvDqffRoFzA8xVVV+nPnzkV+fr445aWlpTj33HMxceJEGAxRWtaaNWuwfft2pKenIy4uDs3NzWhubpa8SNXp4pywgBGjNIwODCd6ZG6wz5CamgoAQs9h/qCaG6eifnREBruXfm5UA12N+OrpSereJlPA6XSKg6w6LnQ41Gswr4XrmwYA6crjxo2TSAP3wpQpU2IKLakFkUjLVw0wFrBgji6pm6R6joZO92XIypUrMWPGDFx++eXQNA1ffPEFrrnmGvn5Zz/7Gdrb2/Hiiy9i3Lhx+OCDD3DjjTeipqYGa9aswaJFi/DAAw/g3nvvxVlnnQVgAISjbpswYYLcz2w2IzMzU6I1dPoASNV1NULocDhgNBrh9/ul9Qvz4mjEWa1WOBwOqdocCATE4aShlpycjJSUFOTn52PKlCny7CORCA4ePCj0XCAKRqWlpWHixIkYN26cRDtpqDKnmesxOzsb+fn5QuONi4tDXV2dVN/l5+HPTMVoa2vDzp07cejQIVkPet2vnhF6J47gJPPKbDabzKeK9gM44iwCgDvvvBOhUAhZWVkxRtFwjqgqKitBBZ9GIypIpHdyBjO+4uPjj8jTDofDePrpp2N0LL8uvPBCcQaH62WtaZr0OTzjjDOgadFe1kajUepGHD58WFg+9fX10sdWNUBppDY3N6OhoQEejwcGQ7SS8N69e3HgwAFs2bIFa9euFWbL8YBSxxM14R5jmz2uZ1aFZg0BOoKDRWnUew52//LycgEF9Wcg9bo+AmQymY5gdKm9rDUtyjBiTiD3Me/jcrnkvL766qthMBjQ2toqgPgXX3wBr9cr/VMjkcgRthOdDl4/Li4OOTk5+MMf/oDPP/8cra2tmDZtGhISElBUVITHHnsMV199NZ544oljcg527NiB3bt3o6enB8FgUMbE827Tpk3Yu3dvzNhUZ4TsDpvNBp/Ph5aWFil4x/VFum9aWhpOP/10nHfeeZg5c+aogGk+Pz1bcDhRX6POsfq+wc59gyHaGYBnNG1mRtNHs94ZbQYGqL3qmFSWicFgELsFAIqKimIcUrUjht5JJJDw7rvvwmg04tJLL42pik8fID4+HsXFxTFR8KPNpd751INPw+nvb5p86x1UNlqngUgaKwAxHvWIFJVCf38/3G63UE2Yl0HjQDVOuPhUtJoLhpEULngVuVEpaSoSpYoagbFYLGhpaRHF1N3dDZMp2huKBh6dSrW4EiuT8nAOBoMoKCiQhtBOp1OodTTmdu3ahdWrV8dw/OkQ8DMSqaQTSMestLQUF110EfLy8qTtA6W5uVkOkVWrViE+Ph69/4+8Nw1vq7rWgN8j2ZYlS5Ytz/GcebKTkHkghCQQEgKkDIGUAIUwFNJCacvtbZkv3DLflsLXUDpQoEAbCFOAkARCICEDKRmc2fE8z4MsWZYs6Xw/lHdlS7GdATp8X9fz8ODI8jn77LP32mu9611reTyor6+Hw+GAx+OB3+/HV199hezsbMTGxmLbtm3o7e3FlClTMHPmTNTW1mLlypVCA+G7ZDGGSDRXVeR9GQKqwVJTU4Nf/OIXuPHGG3HHHXdgw4YNGDlypHyP0dRIpIpOicFgCGusrUYxqchUxcq1wd/zMypxo9GIefPmoaqqClVVVWhra8OgQYOEujR8+HB4PB5s2LABRUVFsNlsCAQCsNvt6OnpkbY1vD7HqCrUyspKHDx4EEVFRRLB4nOdjtKkDBo0CJqmyfumocbWAU6nE5MnTw67VmT0U72favxomoYJEyZIsYO+9gqdSv5sMITy1rjHcnJy5PdcMyodmPPDglakFNOwio+PF4ppWloaEhMTkZqaiuTkZIwbNw6jR4+W96uuOe4X/kfHQV0THKvNZjtjI/Obis1mw8GDB/Hqq6/ikUcegdlsxm9/+9uwnx9++GHouo6HHnoIN9xwA8xmM1555RVMnjwZzz77LIqLi3HgwAEsXrwYjz76KP73f/8X2dnZYai4WkEZgPR0pk6h8US6bjAYlArMBBlYKZr6MikpCUlJSRKl6ezsRFtbGzwej4AIdAazs7Ol0BjXOau1q0WZcnJykJubC4fDAZ/Ph4aGBlRVVaGpqUn2BkHC/Px85OXlSQS3sbERzc3NEkUkY2XKlCl46qmnkJqaCpPJhLKyMhw9ehQulysskh8TEyMtx4YMGYIFCxaEOaWqsak68Vy3BD+5rnl+RRqgXJ/sLXv55ZefBPKdjhC4VFkrpyuqHlCd0v72trqfWNCHwqgkRdd1KTrFc4/rh2cpAPk/jW3OF3CilzWdH5/Ph4SEBNTX16OkpATBYFB6WVdVVQlji3YB54W9rHU9xCogfZS9rDds2ICPPvoobK+crZzu/KusK0beCTbQpiHIo+u69LLmuzgTUXtZq2dbX4CIeh5yPgkSEFik3aZpmhQ76+7uFh2iaSd6Wbvdbrz88stITU1FS0sLoqKi0NjYCLvdjvb2drhcLnF4CSzRdlDHxP3MXta7du3C6NGj0dLSgn379uH666/Hk08+ieeeew6PPvpon8DrqWT27NlISUmBruviNDscDqnYm52dDbvdLowR9vFlHjsA1NfXo76+XvqXVldXi16x2WzIy8vD4sWLsXDhwrCc9dMVrhEVVDqV9BfpO9V9mWrHyD4rvfO9T506FcDA+ZXq+tI0DRMnTpQzQbWtA4GA6E0+J2127gHqVE3TZA1yn6sdMtTI7ldffQUAAkgDQEFBAVJSUjBz5kzMmDEDixYtwsiRI6FpmlQUV3Wd+ix92Y19Pev/F+Q/3kGNjo5GS0sLAoEAUlJSAITohw0NDWhtbZVDXzUggdCisFgs2LVrFyoqKsKMV0ZKAJxkiLI/Jn+vIkVEaFQDQ40kqUaHKkT5GX2kgUvEzGw2o7CwUA5jVnLTdR3V1dUIBEKNye12O2w2GxwOB5xOp/Tzc7lcsNlsYnA1NzeLAe31euH1ehEfHx9WiVjTQtGwysrKkxCwQYMG4ejRozAajRg7dizmzp2LRYsWycEUHx8vOR5q0ZuGhgYsXbpUjKoNGzZA0zTU19ejqKhIaFUdHR2YOHEiEhISkJ2djaFDh8LlcqG9vV3yXEjni0TT+zOg+kLp6Wy/9tpr+N73vgcgvOcVCxbxQOCByebWwWAQFRUVKC4uFmAiMo9WvWekwuF7Vo0cANixYwc2bdqEbdu2CU3n0ksvxYQJE2AymYSWTYohc/V6enpQV1cnEWdG4ElJpMFfWlqK/fv3Y+/evaipqTkjo5OGiM/nkyqBHo8H7733nqy1adOmnRThj5wL9f/qz7w+c6pVx151ZvtCK7u7u2U++V2ioIwWqE6r2+1GV1eXVEtlGxj+3mazSeGJ5ORkZGdnIysrC9OnT8eMGTMwYsQIiShzLalRv8g95na70dnZiaVLl2LVqlWnNd/flrS0tGD+/PmIiorCQw89hLvvvht+vx8PPfQQrr32WgQCATz00EMwGAwYPHgwVq9ejezsbFitVjz99NMYPnw4/vKXv+Cxxx7Db37zG4kQ/ehHP8KTTz6Ju+++G3FxcQgEAqipqYHFYhF6NI0D7l2DwSD96nRdl3wsGv00IJOSkqQVAHvWsiUXnQ2TySSRVVIYKyoqUFlZidraWnGErVarOCqsSkxwp6KiQvYM92hWVhZycnIk6tTZ2YnKykrU1dXBYrEII8HhcMBkMsHlcmHBggXw+Xy48847he3A9RsTEwOz2QybzYa0tDRcfPHFWLhwIaZOnYqEhARZy5EGPc8SVX94vV5kZGTAYDAIy4fAKtehGqny+/2499574fP55NlPR1RH42wKw6jP0x/gpH53oN8ZDKFcc7UdmNfrhd1ul8g7ddhAvazpnPXXyxoA8vPzB+xl3dvbO2Av6+joaGRmZvbby/rpp58OM3YHEnVNnmquVCAwkirOv+PZznO9p6dnwF7WpyusWQGE51SqYwFO7mVNe2ugXtasoKveiw6ex+NBZ2cnTCbTgL2s1er7au0JNdDAOaIONxhO9LLu6urCjh07vnEv6yuuuEIYcipoRYA0KioKZrNZwDBGmLneSfPlXAWDQWzfvh1paWlYtGgRLrroIowfPz4MDBpIVDvlbAAoCteVCgSfjpCCzPQOr9eLdevWIRAIwMHrrukAACAASURBVOPxSH/evq6nRujV7/BM4F7nOcGxqU4obSSCDQQ3Gb1mFFYFn2m38b1x/1x55ZVwOBxob2/HCy+8gLVr1+L555/HK6+8gqioKMTHxyMuLi6scKf6XKoPoj4bz4CzeS//avmPd1DZyNjv90ufTlIm2tvbpQgHcAIJVg9Mk8mEY8eOyeHO71LZs3Ev/445dtyQauQROGEwqwuLv4tUGmoUSdd1yXcCTqC+F154IQoKCtDa2orU1FT5O+a5xMXFSd4cDb/Y2FjMmTMHmqbhrrvuQnx8PJKSksRhZ0/ChoYGQRZpaEei2OzXpcqwYcOkdQSfga14AoEAEhIS5NkZubzgggsQHR2NxYsXC9ff6/Xi2Wefhd/vx7Rp06SvHXuVaVqI+jdixAjJT1y5cqUYrnwXLBygGnN9oXc03vh8fN+khfEzfm4whBL4Sc9UK0iS8pWcnIz8/Pywe9GIiTz8OL99ia7r0q+MYIbT6RTDeMOGDYiNjUVWVpbQUbdv346RI0di8uTJMJvNOHToEOLj48Mi1p2dnSfRtLjeaKQVFRVh3759qKys7HNskeOkciZQYjQacfToUaHXEhGNpIhFRoMi54QHAd+rundViiHHoRruwWCoTYfBYEBWVpZck1ER4AR1iPehk0QnxuVywe12Iz4+XvZtpNHHyrIGgwGpqakYNmwYxo0bh3HjxiE3NxeDBg1CdnY2hg8fDqvViuTkZKSlpSE9PV0KCx09ehSFhYWnnOtvU0gVv/nmm+HxePD8889j0qRJAEJ9hrOzs6VYyd69e7FixQo0NTWhqakJ8+fPx+eff464uDjMnDkT1dXVWLduHdra2mAymVBRUYH4+HgMGjQIBoMBhw4dCns30dHRUimXe4lsDK/XK8wPrgvm+3EdEVX3eDxhe5vggcVigdvtRmNjI5xOp7TYslgssNlsYVRfg8GAsrIylJeXo6amRvoF8n2OGTMGQ4YMEeOxqqoKlZWVcLvdSEhIkMJxmZmZso4Yqf3www+haaFWFz6fDzabDVarFSkpKbDZbDjvvPOwcOFCzJ49+6T1z7SGSCOKe1Y1qpgOwLVJJ1R1DCJz2VpbW2EwGHDFFVecFtWU31FTW05XVOeDz3cqGSjywnVEfcB56OzsFDYJc4F5JhGUUlM2DAaD9I30+/1SrIrvn2d5Q0MDSkpKwnpZl5SUYO7cuWhubpb6E2azGY2NjaiurhZnlSAVz3qDIdTL+rLLLsMll1yClJQU+P1+vPjii2EgW39yug4Dv6cC4ZF/S52v6kSTySS9rHl2nwm7Q10bBQUFsn/V9xZp96jrw2AwCLOLehc4wcRQ0yciwWO2GKOdwl7WLpdLesTzTE1PTw877yNTdMgiU6OqPOOdTic2btyINWvW4LXXXsMLL7yAzz//XCrtn4kMHz4chw4dgqZpEoxQ7x15ptFxYlSTeomMNrPZjBkzZmDatGmnvVbUc1h9H/z5TETVx2cDbKhBo97eXpjNZhw4cEDOIpVaH2lDRILWFF0PVX4GIGdGZFQ/ch40TYPb7Rb2FtcE2VVcK1yLXEekoxcVFaGpqQl79+7F1q1b0dLSgkOHDmHnzp1YvXo1rr76ajz33HNwOp0oKyvDpZdeGuaIctzUXer8nq5j+u/owJ5yNWmaFqtp2leapu3TNO2gpmkPH/88X9O0nZqmlWia9jdN02KOf246/u+S47/P+8c+wjcTGjZjx46FpmmCmFksFqSkpKCtrU0MahXtVI11Rl8AiLJko3cWJBo5cqQoeC5O/r3KU4/8t/pd9cBWnViO7ciRI6J8WJFt586d+Pjjj7F//36Ul5dLlLa8vFyQYBUxZz6gijT7/X5ceeWVuPnmmwGEKqTFxMRIc3in0ykGIami6sYgmqg66TQyVcnJyZFn5JgYSb344ovDjBaPx4Njx47h2LFjKCsrg9VqxdatW8UQGz58uCDRfM7FixdLyx32oDUYDELV7OrqkshmX47QQM5iJIpKBcX5ZH9OPhcVKJ0eNWLB6/XlhKmiUnyJ1qm5kroeypdOTk5GbGyssAI4BuYKbtmyBSaTCZMnT8bUqVPR1tYGTdOQmJgIk8mE8vJytLe3h+VAqM/NddvZ2Yndu3ejrKyszzlRKTHq+iY6+eijj0o7nRUrVoQV+errEOvvUMvMzAzbN+pe6UthqyCPpmmwWq1yXRUxZwVUUuBZrIaAD9eZWhyNBwbBLPUezDNNSEiA1WpFWloaBg0ahMTERGRnZwuNNC8vDzk5OcjMzER+fj7uvPNOpKenn/Tc/0ihc/XSSy/h8ccfh8/nw6FDh3DTTTfB4/GgubkZc+fORWFhIWw2G37729+ioKAAJpMJO3fuxLBhw7Bs2TL85Cc/wZNPPonLLrsM1dXVuOuuu4SqRiOqra0Na9asEcoljSkaV5xHAjnAiUJYXNuMPnPN0bnw+XywWCyw2+3yeVVVlURWmXPGPN/CwkK0tbXBarVKO4BAICAtBsxmM4xGI8aPH4/MzEzZyxUVFcKsYQ5bUlKSOOHqPue+ZQGfzs5OjB07Fg6HAxaLBaNHj8Yll1wiIBvXJUXTTtDSVKMr0nBU9yCZKtRBpERzvXN+eZ/e3l6sWbNG6K0DGZM0ltXvna6TpCL/Zyt9/a3KWqKwz7fBEKrRQACaZx9wgtqnRldUvaY6KZwvTQuxlOx2u7RBSkpKws6dOwX0mD9/vrAysrOzERUVhcrKSsnh7OsZjMZQL+tly5Zh1KhR0vIs0lBVvz8QE0X93uk6Fsx9T0xMFKaIpmnSoYDsj1NVGKbdRPuCUSoAYrtQ1BQpPi9wMpWUbcyo51lDQNd1AS2BE0C/3++XiGJUVBSKi4sxceJEbNy4EQcPHpTih729vUKr5N6hbcLrEVSg/UZdxPH7/X60trais7MTtbW1OHjwoETgz0RU0JpgCXWSWoAOgBSC47hpazC1S639cCbOYeR3z+Rv+7KTvsn9VWdP1fe0w1U6fuQ1Bronz3CyDtU9ptrc6n/0AUgD5hogC5C2gApA8XrvvvsuOjs78fnnn0taHkGgzs5OdHd3w+PxYNCgQRgxYkSYflXPk/7spMh5O9W8ninQ8I+S0xmFF8BcXdfHARgP4CJN06YBeALAr3RdHwqgHcCK499fAaD9+Oe/Ov69f1th9C8qKkqKEeTk5AjN1O12h9Ezib5RWO0vMTExLJ+RSGNycrIsyEmTJsFut4eh1cxljURzaFRHUqM4BnXDUOEQsYmJiRE0nXkJzzzzjNzjyJEjMJvNEhEgpeGOO+7Aq6++ijfeeEMoMwAkukr64sSJExETE4Pu7m5BkYHQgd/a2oqenh50dXXJpvziiy+wceNGbN269aSNqYqah6pWPE5ISBAgwePxIC0tDV1dXRgyZAisVitKS0slp/Ivf/kLDAYDrFYrtmzZgvz8fERFRUnEsKurC1FRUWhpaUFcXByOHDmCu+66S/p60Vhm1CUSQVajB33RYAKBgBRB4fOT7snfR0VFSeSakT0qRDo6qqg5bVwjxcXF8Pv92LFjBzZv3ozPPvtMcvWAkCHB9hY0kFmhmtfq6elBenq65DvV1NRgz549uPDCCzF16lRkZ2fj8OHDElV3Op0oLy8PizaoYA2fo6urC0VFRdizZw/27dsX1vBb0zQpC6/m9bII1+bNm3H77bfjhz/8IT766CM4HA5ce+21uOaaazB8+PAwajzfjYqIGo2h/mgGg0HahPSnsNX5VaMqwAkjSY260gjlu2IRBpVqlZSUJGNRnQHSV2nIsUIry/RbrVYkJCQgMTFRomHZ2dnIyMiQPNbvfve7eOWVVzB9+vSzokx+E6mrq8PixYsRDAbxyCOP4Lvf/S48Hg/+9Kc/YejQoTCbzdi8eTNcLhfuuusu/OY3v8HixYtx//33w2QyYdeuXWhqasJ///d/48c//jEyMjJQXV2N1NRUOBwOGAwG2df79u3Dzp07w3r4kUbtdrvh8/nEULZarTIXNFjVwm3cg3QmrVarRFbV1h/U2VarFdnZ2Rg8eDCSk5PR0dGBrq4uGQvXBSuxFxYWCvhYUVGBoqIilJaWyr0cDgfS09ORn58v0Zpjx46hpKREjGcaVtw/ra2teOqpp7Bw4UIsWrQIubm5Eh3py3DgGUbHJjI9RC20x/93dXVJbi2FxrgalVB13KeffoqoqCh8//vfP6Wxoxpup/qe+hxnK5HAU+S12Ntb/dztdof1ulUdDuo1An/8OzVKrabSqGOgk0CHYfz48UhJSZEcU7PZjK+//hrl5eWwWCwYPnw4AMgaYD4qK6uqkTs6Xjk5OTAajThy5IhQWCMNZ1V3AeGtfSKj1KcrrCHgcDhQV1cnALfH45G5ysvL6xfEUMFYVZdz3jluVb8R0OT65bVV0JEsBDUiqxa7UucEgKQLdHd3o6OjQ1IJPv/8c7hcLpSVlWHr1q1Ys2YNNm7cKDaGOqfqGaSOR3VO1XOAeqinpwdNTU3Ys2fPGc09ELIPuru7EQgEYLVaw+7PokkqIMp3wPfN6t9koqiAdl+irm31HD0VSNWfqFTeMxXVUVTBF1LyKysrYTCEaLNMGWppacGsWbPOeKycw8hOCmp0X7XD1bHl5eWF7UO1nZd6bc4FW3m98cYbUmuho6NDnFuPxyNFTKOiotDe3o6ampowcCxyHN9EOLZ/BznlKtFDwn4o0cf/0wHMBfDW8c9fBrDk+M+XHf83jv9+nvZNZ+wfKL29vcjNzYXH48HgwYNF2RIhT0tLkyJBjKSqaB5zo7hpaKyqxgTpQDExMXC5XMjOzhajgUqEhxDpQsCJhczr0dgCwg9gRgDpCLHXXjAYxC233IIf/vCHSEhIwJ/+9CcZL3M+JkyYIDSD9vZ2dHd3C22W98rKygq7V1dXF6688krcfvvtGDp0qCSIJyUlwW63i/FHJIgb1Ov1YvPmzdi4cSOAULSZz8pIWWRkwGg0CjIFhHrQXX755QIAUNGaTCaMHj0ae/fuxY4dO2A0GrFx40Yx7GbOnCk9W+fMmSOH4IIFC2Cz2WCz2ZCamoqCggIpkME5J32XSkkdc6SoSDr/DUCoH1RK6uECnEiap4JRW44Aoag1AHz22WfYunUrGhsbZf3y4CQ4oWmaRIJLSkrCjAGOnc54Zmam0EnJHPjoo4+wZs0aFBcXIy8vD7Nnz8bEiRPR1dUlIILL5UJtba0UweJzqlFL3rekpAR79uzBV199hfb2dolqq6gn825Jw9O0UOn7Dz/8ED/5yU9w55134pNPPsGll16KlStX4qqrrpJy75wzzjuNRBqQ6ntQFTmAsKiHrp/IZ8zNzYXZbMbo0aOlciPfeSAQkOIRHR0dYqQlJCRIsR3eh0V9mDPEgmxqZI8AANcyo6tWq1VoWMOHD8cVV1whuXLfxJg/G7Farfjb3/6Ga665Bg6HA93d3VKV95JLLkFWVhbGjx+Po0eP4v/+7//g8Xjwy1/+Eo899pjkl7799tu47777oGkannzySfzsZz/DzJkzpTjE22+/jdtuu01ALrWYGfOrVVYFI92c75aWFnR1dUkTe+oU6jzSrTweD5xOpwAGjN6wST3p7ceOHUNtba2kTpCZQv3f0dGBjo4OKR7GoiNmsxkZGRnIyclBcnIyYmJiUFtbiwMHDqCsrEwc6cbGRnk+tibo6OhAVFQUcnJyTvmOVSMvGAwKyMaziCwV1bikUadWdOf5EdkbUdXHdIzWrFkjEbJvA2VXI0L9RQ55/0iQSf03z07VCVPP6ZaWlrC2PADC8k45H8DAvaxtNhvi4+P77WWtOlY8e3Q9vJd1fX39gL2s9+/fL8yr/npZ85kNhr57Wfc1Xxybqv9O5x2qupNzS4osgAF7WfeVozhQRCeyl3WkU6SOSXWwB+plzZYemtZ3L2vmZgaDffeyrq+vx5EjR+Sd8uziuyeVlkVxuG+4FlVHhmM+217WPT09wlgjsKw6OqQxq8wm2plxcXGIjY1FbGysnEX8uT+h3osEnNQ1dCpR0w3UvzsTt0AdgyqaFqppwhxNFu4qLy+HrofSt84///wzSjEAQmtt3Lhx4vDyPTKoo9qBHJ+u61KNm8/JvwdO5FZzXdI+IQ25vb0ddrsdgwYNEjCB9yFbiO+LZxV1wtnaAyrYojr9qh79V8ppvTVN04wAvgYwFMD/A6AUQIeu6wwl1gBgybxMANUAoOu6X9O0TgBJAFq+xXF/a8Ly2RaLBXV1dRIJbWlpga7ryMnJQVxcnOQyqZFFLkJV6amVclWUiX+TlZWF7u5ujBw5UtBy9ZCmIUCJdHT4s3ooqs4uo4e83rPPPgtd15GamoolS5bAZDLhsssuw5AhQ/DKK69g9uzZ+OCDD+Dz+TB8+HBs2bIFSUlJ6OzslAWam5srPZ3oAHNDTp48WXL3nE4nkpOTxVHi+N1utyCW6sLftm0b4uLiMGPGDCmQQPRG3Sg00r1eL7q7uzF9+nQ899xz0tssGAxK5USTyYTS0lJMnjwZY8aMQXV1Nerr62G1WjF69Gh8+eWXmDJlCjIzM1FbWyv905jLRmrfvn37sGDBApSVlQmVhhEPJsGrCK5q0KjRdj4TaYlUUixqRaECpRFBRfjxxx8LoMA2Dy6XS3qa6rqO5uZmxMbGSvU6NeeGkVOLxSKGvM/nE+VHSpnVakVMTAzy8/NhMBhQV1cnTvXq1asxatQoXHDBBdD1UFuRqqoqOBwOJCYmSvNzFgNSwQ06E5yf8vJylJeXw2AI0TDZRkSNpKoVW0mxo2H38MMPy7W+853v4IorroDdbofL5cK2bdtQVVWFqKgojBkzBkePHpW55f5U6fKRAAHBFHUMZWVlGDVqlES/+F2uA7aWAULFVVTqjYqwRwIbPLRdLhdiYmLE2ImOjpaiK+eccw7OP/98iaiSPs/98c+U++67D3v27MH+/fsRCASwZcsWfPjhh3C5XFi2bBlKS0vx9ddf47/+67/w61//Gg8++CCuuuoqvPbaa3jggQdw9dVXSwVgFqN74okn8POf/1z2kNfrlaI0NDRoqBLMiImJESr0wYMHw3ozshI09R3/hn1O2YKC75a6hk5qT0+P6D3+ndVqFaMhJiYG5eXl0LRQ/ldvby+OHDki1G/m6jPi09nZiZKSEtF9kX1L1Xw15tN+8MEHWLZsmbTJISDSl6j6R9d1nHvuufj000/DqJcqyMn70Njx+/3IyMhAWVmZIP09PT2yHgm48l66HurvePXVV+O2227DqlWrwqIBfC4CuadrgEaef/09K0Xdu+r31fZokedlV1cXMjIyRM9yjjgf6lwO1MuaEfT+elmrTj33szoOTQvVRSCLpK9e1nl5ediyZQt2797dZy9rpm1QVMeevaypP/OOF2hSWS5nGi/oCwwAgOzsbNGb/fWypqig7UBypr2sqWPJcOirl7UKBvJaqpPEvydIE9nL2u/3o6WlBUePHj3rXtbqXPIsOhsdzmJvqpPMeaL+JFgbCSyojhP/3R8VW2VhnK2ouulMheuFemiga1gsFgkqaJom9hltLF3XpVjRQKKCCbquC0uHZ79qR0SytvgZbcS0tDQ0NjbKO2ItGjWdTp2fnp4eJCUloaGhAW1tbRg5ciQOHz6MhIQEtLW1wWg04oEHHsCsWbMEFE9MTEQgEMDvf/97tLa2CgOLdgifKRJoVD9XmRaqX/HvIqfloOq6HgAwXtO0BADvABj5TW+sadqtAG79ptf5psJiSL29vVIdi9GplJQUqaAZDAbFEI2Liwt7wcAJ6krkYibiT9F1XQ4MtriJi4tDQ0NDWBSI0l+oXTV22YssISEB0dHRqK+vFwUTCAQkStbW1oaoqCgkJCRg7ty5cDgcePbZZ7FkyRJs375dUDir1YqysjIZd2ZmJjo7O5GcnIz09HQ0NzfLZgsGg6ivr8ekSZOg6zrmzJmD119/HfHx8TCbzaLse3p6wto2kCoZCASwbdu2sAO1rKxMnAk6DXV1dVIy/Xe/+x0sFgva2tpgsVhwzTXXIDc3F42NjUhKSoLH4xFq8/PPP49AIIAlS5YgGAzio48+woQJE3D48GHExcWJcXrTTTfhmWeekUI/I0aMkH5lNIapnADIYRAXFxdmaPIQUPO5GN0gJZTODSPDkVFHvlcqDr/fj4aGBqSlpcHhcCAlJUUUXyAQwJAhQ9Da2ip/s2bNGuzduxfPPfecROuam5tRXFwslY1TUlLQ29srlZIJQNCRHzt2LLxeL/bv34/MzEy0tLTg448/RmdnJ/Ly8rBs2TIxwDZs2CC5XE6nE62trVIpmOgg1ywdeB6QiYmJcpgwMqlWwY6kS6lO/Zo1ayRX0WQy4Xvf+x6uu+46GI1GNDQ04Pe//70U8lLzQulA07Cmwc7CNBQ2bacRwjHFxMTA6/XK56zgyWdVDXs6WsyvIg2bDllsbGxY/mRMTAxuvvlmyf+lY5qYmCjrJzLX+J8hFRUVmD59Oj744ANBpq+++mp88MEHePPNN/HEE0/g4YcfxqpVq1BQUIDy8nKsWbMG6enp0DQNa9asQVZWltD/29racP311yM+Ph6rV6/G3//+dzncg8GgNIrn4UogJSEhAS0tLVJJnNRfFlDi+vD7/RItpa5RDUmV7urz+STq6vF4pLIu+6H6fD709PSgublZxsHCSjSmMzMzJZJy4MABieIwyktDgPpi+vTpQpPs6OhARUUFMjIypDZAfX095syZI3n16hnSVwRC0zQxnlUwRE0dABD2eXt7OzIyMpCeno6mpiaJAHF/EuFXqYJerxd//etfsWzZMsTGxkqkm3PK/QSc2rhVDbz+vttX9CzyZ15HdWAiIy0+nw+JiYnyt5HFCVUHlTqKn9GhoeMNnKjWrtLs1HnuawzqZ6qxOnfuXASDQZSVlYn9sGzZMmGcsEBZcnIyCgoKYLFYpP1SXFwcEhIS+mRXeb1eFBcXy72HDBkCm80WNmd9zfepDNT4+Hg4nU5JTyLDhOe5GlXmszLq3tf9IoE/6lHVgGYbn8i54ztkL2tW4+a7YrEkMgqY10c2m3pe99fLmjbTqFGjcPToUZkjFYiOfGYCHeq6UfcUWV9nKo2NjWFpOmRQ2e121NXVhelPnnMERGknqsETRia/KeDJeeA7+SZOjuo0nc64eEYEAgHU1dXB4XCgq6sL1dXVyMjIkDQnsrNUmzpyvJH/V+3oSIdfXZ+cS+pPm80mQAIZAeyTHQno8f+0IVijRdNC7fhyc3Nx660hV6m2thaJiYno6OhAZWUl0tPTMXbsWJx//vnYsWMHDhw4IEwKt9stjIJhw4YhOTlZbBb13pHnyL+TnBFHR9f1DgCfAZgOIEHTNGqcLAC1x3+uBZANAMd/bwfQ2se1XtR1fZKu65POcuzfitTU1IgxYjKZ0NHRgd7eXmRmZkoOH8vR9/b2SuVGKmKVZmIwGAR9JeVWpQ6p6AsdVaIgo0aNwqhRo8KQPhWF7isqw3vTyQoGg5g4caIoRjpCDQ0N2LRpE9555x2JAvj9fhQWFmLVqlVYt24dNE0Tmoymnegd5vF4YLFYBJVMSUkR1I1KgVRoOlzx8fG45JJLMG3aNInq2e12oZh4vV5UVVWJkqcz2tvbK58HAgGhK/l8Pqxdu1YMzKSkJCQnJ8sYDh48CIMhVIxkwoQJiIqKkggKcz2BEHUrLy9PcjKJfP/tb3+TqBUQon7xHX33u99Fe3s7HA4H8vLywqgmbEPg8XjCIj18LpVeStqYWhBCdbaotDifao5ycnKyoHlVVVVSCTQ5ORmjRo1CbW0tYmNjpUpzVFQU7r//frhcLtTX14uyJ0WRQEtHRwfa29vhdrslckKDlpElXdcxb948TJkyBVlZWXKPt99+G2+99Rb27t2LhQsX4vzzz4fdbkdRUZFQbdxut1Ry5SHCNcMKhuxLSYdt3759UnG1r2IC3Auq88/5fvHFF3HHHXfg5ptvxnvvvYef/exn+N3vfoc//OEP+MUvfoGsrCzZmyqKSZQ1KysLUVFR2L59O3Rdh8PhQFRUlCCYnEPel/RIvv/u7u4w1JzjZTQ8ksbPvc7xREdHY86cOULnAyA9QdXoINfLP1Nyc3OltcTYsWNhtVrx1ltv4cEHH0RsbCweeugh/PSnP4XdbpfendOmTUNHRwfKy8txySWXoLm5GdXV1bj22mtht9uxevVq/OAHP8CePXuEYcDIIdvq8B1ZLBaYzWa0tbVJ8RKCfDS+6BQCIb1OZgeLLJFNwHlUDRvuSwItZrMZ8fHxcLlcqK6uRlNTE9rb28WJI3WbznppaSkOHTqE4uJiQfDV9x8bG4thw4Zh8eLFuOiiiyR3nxE4gkW6roujvWDBgpPy4Dn2/gyJ1NRUSTmh0JhXaVz8t8FgEMCVBjyBAeBE5JoGmsFgwGeffQaj0YjbbrsNwMm5jdyfpxLVcVc/i4wunM61IoHdyOidpmmiQyk0AmlEErxiLiWvo0Yf1Ofsq5e1Cqb118uawnfA67KXdXR0NA4cONBvL+t169aJk9ZfL2teX50TtZe1CmJHzvmphHmogUBAemieTS/rSHCfn0f2so4cE50B9Z0M1Muahj/vw/1DtgYDBv31sk5ISDhlL2v+R1uGZ73ay5qO6jfpZU27gnqIbVaMRiNycnIwa9YsTJs2TdprcQ75/KpDajQahd0ROb+n66jwunyP/PuzkciI7uleR9d1iZ7W19eLDfPOO+9A10MMs0svvVSuqTq9tB/6u9c555wDAGGAVaTTzLXMOWXxq9zcXJlv6hX1njx/aAN7PB7k5uZiwoQJePrpp1FUVIS1a9fir3/9K+bOnQubzYbi4mLs27cPDocDU6dOxYgRI7Bu3To8++yzqKurg9FoxKeffoqtW7fi8OHD+PLLL+XzxsZGtLe3h6VCna5+/VfJ6VTxTTkeOYWmaWYAFwA4jJCjeuXxr90A4L3jP79//N84qALQWAAAIABJREFU/vtN+r/xDOTl5YmCI5JO44ENmtPT06UHIXMvWIKcSL2KTKuHKyMkah5jpDD/rLe3V4rWqFRB4ETJchW94eKis0Vkbvr06ZhzvE0MHViWzC8tLUVLSwv++te/orKyEt3d3Xj11VdhNBqxfft2Mb7ZHobViOvr62WsjDrQ8COlgIftyJEjpTfZkiVLBEGKiYkRChyLw7S1taGlpSWshyrnibmtACR3CwAmTZoEs9mMa665RvquGQwGDB06FPHx8VIFlUappmnYvHkzPvroI5SXl+Pee++VFkJ+v1/Kto8ZMwbBYFAivwcPHpTob0xMDBobG5GbmwtN07B//37MnDlTnM/IA1jNT6FTRAeU70SlA1GJmUwmKTSkaaF+cxkZGbjmmmugaRouvfRS5OXlifGj67pUheQ9H3nkEQwZMkSehRGmhIQEOJ1O1NXViWFqs9mE2gRAKFFqBNftdsNutwtSx76LJpMJnZ2d+PDDD/HWW2/BaDRi1KhRmDdvHgoLCyXaZLfb4fV6UV9fL3RkVr8NBoOoqqqC0Rgq8JWXl4eamhpcffXVkvuh9hhUIx1cb6oBSSNkz549uPPOO7FixQrccsst2LZtG+6//368+OKLeOmll3DjjTdKD0QWJvryyy9RXFwMi8WCiooK2b+s9KiCUXTySdH1+Xzo6uoS50A99FTjiPtQRdgpBFMYIWxpaUFDQwNiYmJQXV0NXQ/lvLa1tZ1WS59vU4LBoBRS27FjB26//XZYLBY88sgj+PnPf47e3l489dRTqKmpwSWXXILY2FisX78el19+OeLj4/HGG2/A5/PBbrfj1VdflarQsbGxmD17NhwOhzgRjDQySs0IMw1IRlJnzZqF6dOnY8SIEQICECBk9NTj8SAhIQE5OTnynrhmGFmhsUgjJBAIiL6vra2F0+lEV1cX4uPjkZeXh6FDhyItLU1AJ+avUQcaDAbpN2y323HBBRdg4cKFKCgokKgLhWslJSUF0dHRcLvdQk3XdR0jR46U70Uagn3JxIkThRpP44hgqXou0RhkHQA1b5pRANWooi7jnn3jjTfCigydrqhgbl9mQV+GU3/RDc5JX1GWSGdV3bsAwgptEUQlzTqyHYnay5rSXy9rztNAvaxVoSPDaBEj/kVFRf32sh42bJg4Uv31smYVU66ByF7Wzc3N37iXtaZpYcyDyF7WasQ5UvpyQviz2suaxRH5TtV9oIISBP766mXNvdlfL+v29nYBh/rqZW2z2YRW3V8va6YaEWCO7GVNW/Gb9rJmPYNIu0IFiaKjozFixAhMmTIFixcvliJ0fHbufQJ7kTbpQE6L6oSqe/hMnFLVVooEzc7UVeB9s7OzoWmaAH6xsbFSt8Pn80mxTPW+/P9AYydIT7uX+lp1/Cl9RZBVm0QtQsd1yT0SDIb69V533XW44YYbUF5ejo0bN2Lz5s04cuQIXC4Xhg0bhuzsbNTU1GDr1q2wWq2oqanBihUrsHHjRrz++ut4//330dvbi6effhqTJ0+WFjbPP/88XnvtNbGlGhoaUFVVhfr6erz00kvSQvLfTU6H4psB4GUtlIdqALBa1/UPNE07BOCvmqY9CmAPgD8e//4fAbyqaVoJgDYA1/wDxv2tSUtLi1TerKmpEaOpq6tLyjqrlRzT0tKkDQcdSdLtaBSolE21GAU/4yHBfwMnilxoWihfZtSoUWIgqRRSCjcBI3lUVEycZ4P4qKgoiUbQcNE0DV9//TU++ugj3HXXXWFtBmiQuFwuZGVlobW1FTk5OZL8Tee4sbFRiidlZWWhsrJSHNa8vDzU1tYiLy8vDAW/5JJL4PV6sX79eowYMQK1tbWicFmYik6TruvSK5AUvX379mHUqFGSE7p9+3ZMmjQJ48aNE+VLurau61i7dq1EBsjvV+eO741KZsiQITh8+DC8Xi8sFgtef/11PPDAAzCbzSgqKkJ0dLT0jp06dSoWLlyIiooKOBwOFBUVyRrg88bExGDQoEEyHp/PJ9WTWYCBeXZE/1i8inMwZswYZGdnh9FkacCTmsRqtaTmpaSkIDMzU1onREVFCRWS4yJiDITKmDMvsK6uTpxw0k3a29vD2prk5eWhurpaDHOr1Yq4uDiJNP7tb3/DRRddhEWLFkl5/c8++0wi1x6PBwcOHMCkSZMExaWyj4uLwx/+8AcEg0G89tpruOOOO5Cfny/7gGCBmjcGnCgCohoz6uH52Wef4fPPP4ff70dKSgpWrlyJX//61/D7/YiOjsbatWvR2NgohWJUNL6srEwMSDrwcXFxaGpqgsViEbYBo+j8LvefSinmeFRDgfu7u7tbClPt27cPSUlJSElJEWe0qalJ3vH69esxYcKEM1N230CuvfZacSJ7e3uxevVq3HfffXjwwQfx5JNPYvr06di1axdsNhtWr14t9NbVq1fD6XRKQYe2tjYMGjQISUlJSEpKwtKlS5Geni4tsKjHSJei0ej1eqWat8FgQEFBAdrb24U9wR54LMzGOY6KipKcbV6T9DpN0yTqpDpzwWCoJ24wGBRmAoEop9OJkpISMVgcDocU1CPQaDQaMWPGDNjt9n7zR9V1SiYOaWBr167FypUr0djYiCuvvBJPPfXUSZSw/kTTNLkO9QMQnufJtUlAi+ea2+2W84NAGa9BHcm1vH37dlx++eVYvnw5/vznP/ebiqKude5H3v90jNq+nFP1TFHnQ51TVTwej+g/GoMdHR3SloSMGQBhlVD5fbKD6GTQCWVOc3JycpgOBk70suYcno7xR3tBfU/sZV1dXY2SkhLk5+fDbDYjJSUFNTU12L59u9RwKCkpwddff42hQ4fCarVKIR6z2SzMF3Xe6OC0t7cLHdHhcCA3NzeMhtjXOKkfecYbjSd6WdPJT05Olj7a/b3XvkAI7jueTxxLJBWc+5trmbnmFosFWVlZqKqqAgBxGNlWhWs4GAwKW8HtdktQIhAIIDk5OYzGqdLYASApKQnNzc1ITU1FfHy8OB60PzQtVLytrq5OdAUptiaTCUePHsXKlStPuSYi5euvv5bWVxw7AQIW0VGjmrGxsZg6daq886amJinQFXlmDiQq4PpN2DucU/58unpgIGFUGoDYRQTOaKP39vZKvQsVOD6V6HqI0cJghuqgqteJtNFbWlokJY4sNupl1pngWcNrXHjhhaiurkZ1dTWuuuoqGAwGvPDCC3A6nTh8+DDmzp2L8vJynHvuuXj//fcBALNmzcLGjRvR0NAAv98vqUGPP/44pk+fju9///t4++23ERsbi9zcXAFTJkyYIMGJgoICdHZ2Ij09HdXV1Sc9379STqeKb5Gu6xN0XS/UdX2sruv/c/zzMl3Xp+i6PlTX9at0Xfce/7zn+L+HHv992T/6Ib6J0EFiXmVeXp4cFKxYywJJ6enp6OjoELSSirGurk4Unnp4qvlzfSn7vtBDAJKvGB8fj5EjR8qhF7loeC9SfKlEVZphcnIyBg0aJAghqXEsjLFz5068+eabMBqNqKioQG1trYzbZrNJJI8RO0ZYSWPQNA0ZGRkSBWXFTP5eNYQ0LUTVu+KKK3DkyBE5IBnBIHLK8bNiJu+7fft2ea5AIFRtmdX1KisrYTQasWPHDvkbFf266aabwipa6routGWfz4dPP/0U77zzDtxuN2w2GzIzM4WKe+WVV4YlyAcCAZx33nmCiHV2dkqfNBo2QMjZO3bsGCorK8UQYMSY8xITEyMFIZKTk0XRMmKo0oBVoXNNIwgA5s6dC00LtUnavHkzjEYjUlNTxXg2m82SZ+33+1FXV4dDhw4J9Uc1kLiWLRbLSQ3FzWaz5PnwHXMtMOL5wQcf4KWXXkJNTQ2Sk5Nhs9lw/vnno6WlRfKiWV6dEXhG2rl3ent7MW/ePNx6661YsGCBOO9AyKlW95RqRKtjoqhOZ2trK/7nf/4Ht956K7Zu3Qqfz4cFCxbgnXfewQsvvICCggIphkIDhQ4m1w+jUowSO51OGbtqbKkIfyS1KDLKQ8O5urpaKFt2ux2dnZ1ITEwUJNZkMgnD4Z8lzBHmPBO0uvbaa6HrocJZzc3NYly2traK8USQTNd12O12aeHU1tYmLRz4PHQuuQa5FgCIM2mz2bBnzx4cOnQIwIk9QN1DA42ABh0Lv98v+e+q3lSj2U6nU6pr2u12ibL5/X4cOXIEtbW1YRW+qVOYLzx27FgsXLgwrP1NX6JS7mhENTU1wWg0Sq5+Z2engAJnIjabTeaM0Qk17YNrWF1zPLP4H3W50WiE2WwW+iav29vbi8bGRpmH/iTSsTyTSB1FBZsiaaHq3Kh6SP2cOdyquN1u0SV0Sjkf6jU4d8zPJ7jM84rGsDoedaz9nfEUleJLR0o14nX95F7WnZ2d6OjogMHQfy9rXdfR1dXVby9rVeisGgwnelkXFRWFOcscP9c7n5lj7KuX9ZIlSzBlypQwAz7y3UT+m8Je1jwPuQb6W0NqRE/TwntZ0xEj4Mq5piNNx5H6je9bnRv+3F8v68TExAF7Wefn5yMrK+sb97Im6MVnIwDKoARbFKlAihp1TkpKwuDBg5GUlBRG2x9IvolDql6jL4f4mzpBkfs/EAiIzdrd3Y2mpiYBMAnSnKkMGzZMQApVL6j3VsFnk8kkz6lSuVX9qxay07QT7D12EeB7HTx4MEpLS8UXmTlzJg4fPoycnBz09PRg8+bNkqeu6yGGldvtRiAQwLp16+D1etHQ0IDy8nJUV1dj2rRpePzxx9HR0SH1daxWK2pra7F7927Rk5G6618lZ1Z7+f+HEhsbC5fLha6uLgwePFiqudpsNsTExAilLjU1VWhejPbQUKSiYeSNB3YgEJCKenPmzDnp3lzEkQtB0zRRzE6nEx6PB01NTWHfVQ1hNZeGtBV+7vP5xHE5fPgwjh07BiCEGttsNnz++ee4/vrrcfjwYXg8Huzbt0/+NjU1FTU1NWF5N0SXGTUyGo3IyMgQykFjYyMyMjJQV1cnER41x46HgMPhwPTp0/HJJ5/A7/cjNzcXPT09aGtrE6UaCATQ0dEBi8WCnp4eQUSZS2k2m1FdXS09/VauXBlGrV2yZAnefPNNeV9XXnklnnnmGTgcDnkmp9OJ6OhobNu2DU6nEzExMWhra0N1dTVSUlKwa9cujBs3Dh6PR6K0LpcL69atw5QpUzBv3jy8+eabcoipeZM89NS8FOBEYSsKKWVq3iWNaDVPSHXImHPHfoyMtu7evRvr1q0L68fJQ4hz6nA4EAiE+rVyfVdVVaGgoABOpxNFRUUoKirCsGHDMHToUBw+fBjDhw8PMzR4b645KjQa2GlpafD7/Th27Bj+/ve/IykpCb29vVi6dCmAULugoqKiMAeE42R0ubi4GFdeeSWqq6uF4sIxs1gFaXlqEQ11H1Fo4HD8HPerr76Kv/zlLwgGg7Db7bjtttvwxBNPSO7npk2b8Pnnn0uVYpWRwAORh2FGRkaY0RZpRKhjUh1fGhN0+omGejweJCYmIiYmBpmZmSgtLZV57KvgyD9SdF0XlsPkyZMRHR2N/fv3Y+PGjQKmkKWQmpqKzMxMeL1euFwu0WVLly7F66+/jujoaNjtdsybN09aa9CQIMDBOQIgfWNJf3S73TJ3RKNHjx4tlVV1XZdiVF6vF83NzYiOjkZSUlKYrlT3GdF1NX+TBnt7e/tJ7Rjo4GqahqysLJx77rlhOqA/USNTXIOZmZmoqKgQUNTn80mRKJfL1SdddCCZNm0avvjiC4kaReoAILyKLZ9n0KBBgqCTbt/R0YGamhrk5eUJA0TTQnUBHnvsMfzqV7/CtddeizfeeEOuo4qayxpJfzsd4fXU6rsqUKRKf9dtbGxEfn5+WASora0NeXmhfoWMKAGQgkN99bJm6gfH01cva1LICV5Q+P54DhgMBgFpd+zYIecWKbO6HkrdYG67zWYT4IJAHgGY9PR0+bm+vh5utxvjxo2DyWRCfX091q9fj/z8fOllzar2rNPAOYmkjZJqDoTYRWytpWkhKmVDQ4NEDqnH2Mt6w4YNMqeLFi3C/PnzAQC7d+/GkSNHZK+rkWrqZ6Mx1MuazBWmE/X3rnlOcm4J6MXGxgozTAUv1GuxxyTzzQ2GE72sqR+MRqPkiHs8HmHwcG0wPUut4sxK+WwpSP1/2WWX4aqrrpKz40xl5MiR6OrqkmJrFosFXV1daGlpQX19PTweD8aMGQODwSB2LZ19lVnCaq8EoxgcUJ1xNSp4ts6K6pypcib67FTXBk6AJFFRUSguLkZhYSGCwSDee+89XHfddWhoaMB3vvMdvPzyy/0yA1RRx8uUP7JuIoEw2hbqHjcYDMLOVM//yEr1BoMBw4cPF/uF+uWtt95Cbm4uZs+eje7ubsmt3blzJ5YvX46vv/4aVqsVe/fulcJ+XV1dCAQCcLlcAtT+8Y9/FBbhHXfcgS1btuCZZ55BS0sLOjs7hUKfm5uLLVu2YOfOnViyZMkZg4j/KPmPd1BJQUxMTERPT4/0oaOjx99R4XBRAZByz3FxcWGVOrm4VYpPU1MT4uPjJfKiGgaR0RX1GjabTUpNDx8+HHv27MGIESPCaIOM9qjGMh1oNb8mOTkZaWlp2LVrF2bNmoWqqipcddVV2L17tyBEXq8XNpsNDocDDodDlBQpeoxMMnIEnKAvx8fHo7KyEmlpafB4PLJ5SXNQo0ajR4+WSNzixYsRFRWFl19+GTk5ObBaraiqqoKu66ioqMDYsWMlekZ0Kj8/HwcOHBCHt6WlReiZuq7jnXfewbJly/D73/8edrsdMTExKCwshMPhQFZWFtrb29Ha2or4+HhBkrgeWDTL6/Xi3XffRUFBASZPniwtNoiSAaEDQ9d1OcxViq/b7RaFw8puauSLOZbquzcajVLVlkoxck1wzkkPJaq6fv16ZGZmhn2XRjR7mHJtq8ix2+0WoIY5M7quo7KyEnV1dUhNTRWaHN8pjWsaFW1tbVJl+brrrhPj6ujRo/JOvvjiC7S0tCAxMREXX3wxBg8ejLKyMvj9ftTU1CAjIwNutxtbtmzBzJkz8fDDD+Pdd9/F0aNH8dZbb2H27NlYuHAh9u7di9TUVPzpT3/CFVdcgQMHDqC5uVmMS7Y5oKhGsbr31IPKYDCgvb0dTzzxhBzO8+fPx7JlyyRaHhMTg6KiInz55ZeIiYmRtaNWAI4snKYauHSKaJxFRUWJAUXKNQ+zxsZGdHZ2Sishv9+PzMxMNDU1wW63n9TT8R8tgwYNwnvvvSe53BdccAE0LVQwadWqVXjooYdEJz3xxBNYvnw5Vq1aJdUlMzIysGbNGkyYMAE1NTXo7OzEiBEjBIxg7hhzs6lr2F5KTVHgXLLomdlslmJM3C9ms1mcVDq2mqbJgUxgAwiBlGqkxu12S24xcKLNANcJwcsxY8ZIu4/TOdBV0EnV+cy7TU5Olrzu4uJiZGdno7GxERMnTkRRUdGAhpXq+HKNq1EuCtenWsWWFMf4+HhZq6NHj0Z5eTl8Pl+YY8L1SzphU1MTUlNTMXbsWBw8eLDPKKQqAxm61C18v30xIiI/6wuwjZTW1lYMGTJErst3TMCUUQu+IzoxqiEKQNYiz8S+elnTqaWOVX/PnLivvvpKxpKamhpWgIXnNYFQo9GIkpISjB49OozmyvVvMpmQlJQEs9ksIKrFYsFXX30lrcjy8kK9rDVNw6ZNm6THp8vlkvzLtLS0MBBPfSeBQED6afv9fgwfPhyZmZkCZhBQcblcMg41Kvvhhx9i3bp16O3tRVZWFu644w7k5ubC7XZj37592LdvXxiYx/vTflKdJBVsUaOqKnvN6XTCbrcjNzcXJSUlwlLgviAIFh8fj7a2NtHfgcCJXtacH77TmJiYsPFQV6v3pR2mphGwBonZbIbNZpNe1mervzk3aq/enp4etLe3yxlkt9tRWlqKkpISjB8/Xuwjr9eLQYMGAUCYDurp6QkrqORwOGQ9n62ozrB6Jp6No8u90hf4q0pSUhKcTify8/Mltam8vBxAyC5jfupAEumAknnFs0cF2iLZI5E1YjjmrKyssM4U3C+ck8zMTOmJzXMpMTERtbW1KC8vR0FBAcaMGYP169fDZrPhj3/8I9LT06XCf11dnZxXHo9HrsGUO77j+++/HwkJCRg2bBhGjx4twQqm4yxYsAClpaUoLS3FsGHDzgpA+bblP95BJT3XYrFI2JtoBJFzAELHIvpGQ4fcbSp2bnIqcxYocrlcghzOmjUr7GClRG48XouK0+8P9eMivYcbVs3bMhgMEkGlM0cla7fb0dTUBF3XcejQIVx88cUoKytDcnIyKioqEBcXh2eeeQa7du1CampqWD/CrKwsUcYOhwO1tbWyUcl9z8/Px969ewGEjC4e8swhbGhoEAWZl5eHoqIiUV5UkIyAqDkFdKa8Xq+U1p4/fz727t0LTQv1Be3o6MCHH34oRsHhw4fFqI2OjsYnn3yCWbNmISEhQXp8Dh48GHa7XQ5Iu90Op9MpY+JhFhUVhS+++AIJCQnyucvlwqpVq3DrrbfC4XAIQsY5ovKiwlNzWVS0WD1sOV+qYqGoDhbfLe/HfJ3Ozk6pPg1ADH+CE7wXx8Y5JROgtbVVoihEhfle1q9fD4PBgMzMTEyfPl3yPqgI6bwGg0GUl5dj7NixiIqKwtixY1FYWIgdO3aI4o2OjsamTZtQX1+P7OxsAKFIdnZ2NoLBID799FPJXdq/fz/eeust9PT04PDhw9i4cSMuuugibNq0CWPHjsV3vvMd7NixA1OmTMGRI0eE1k0DQu1bF2ngqMi6Chjx540bN+LTTz8VA+WGG27A7NmzpShPV1cXVq9ejT179oQdbGqEk/fjIRFJ8+Whx/fNipKMTJBy39jYKNVz09PTMX369NNXct+CuN1uaUv17LPP4vnnn8eNN96IhIQE3HXXXfjxj38Mn8+Hn//85/jZz34mrYZqamqQlJSEo0ePorCwEAcOHIDH48Fjjz0m88+8x0AgcFKELCEhQSKmLpdL2iLxgG9oaBAqot/vlxL9BLRcLpcAFm63W3RaMBiUfHCCA6wtwMisujcZxS0oKJCccJX+FynUudTRNE76MpD4O9KQo6Oj8dFHH+HWW2+F0+nE4sWLsX///lO+I5VimpaWJqkE6tpUwROuSyDU/ojVSxmF4HgJIqhRKqMxVNDnySefxBNPPIGpU6fiwIEDYddWHdrTETWSdrYRlr6M197eXmzdulXGozocBERojPGdqy1EOLesvM7P+uplzd/x7DcYTu5lbTQaB+xlHZn7GNnLmnM0UC9rgir8TmQv67///e/SQuxse1mzzQ2dfTWSSnCHNhHtELWXtd/vxznnnIPly5f328v62LFjUqmYulkFQNT3zbONxYmo+00mE5xOZ7+9rHt7e+FyuQD03ctaPRu4lgksUy+x+B0j0JG9rHVdl7oQnJezWeM8F+Lj49Hd3S26i++ezB6v14v4+HiUlJTg0KFDiI6ORmZmpkTY3G43cnNzZZ3TCdN1XWxETdOEdaKeoYFAQHJ2ExMTTwI0VEeSc3U2ol4nkmrel6Snp0vqD1lVtO2ZVsKzuS+wj067CnjwvgUFBdi/f7/k6QMn0q/UYEAk64JghGoT0uZSK8/znbIVDlMOzWYzvvrqKwwbNkyYBK2trWhvb8eQIUMkRSo2Nhbd3d2SL0swnPuWoEpNTQ1KS0vFIR08eLDsqbVr1+Kqq67CkiVL0N3dLWzLf6X8c3sV/JtKUlISurq6xAhzu93Sl5QoBRdTdHR0WLGEgfoR8lBvbGxEcXGxOAXsV6QaLCoSp6LQ6gFPahzvu2nTJuzZs0fyaPn3bPfCz3lNUpYNBgPS09OxadMmVFdXo7S0VCq/jR49WkL+NPza29uRlZUl9KOMjIyTWnXk5+cjJSVFNmJubq5s4KSkJPj9fhQXF8tYNC1UlKq3txdlZaE05by8PFitVulFqWmaHOhEPVkpkAg1q9QmJiaKc5yamioH+gUXXIDm5mZ8+eWXiI2NxW233YalS5dizpw5WL58OW6++WakpKRItEZ9B6wK+NZbb4Xl7pAWw5yeH/zgB/K+7Ha7jJ1ovBo5jnSISGlV70mh0dSXglfzjmNiYrB27Vq5D4sjWSwWGQ8NdxV04Jy2t7fDbDYLmt7U1IRAINQfmOuF77KxsRHvv/++gAhc43yGQCAg9G8qX03TJEf38ssvR2FhISwWC5KTk2U+MjIyJOoTDAbxySefIBAI4He/+x1uvvlmOTy3bdsmaDejyCtWrEB6ejpiYmLwyCOPICkpSdgOpMoyKhxpOPPffFeq8Hl0PURvffHFF3H99ddjxYoV+PWvfw2TyYRbbrkFv/nNb7B+/Xo88MADyMjIgNPpDMt3Vh0ZdW8TZFLz+jguGr+kmQJAUVERGhoaYLVa5bN/ltTV1aGnpwcbNmzAypUrMWbMGNx999345JNP0NTUhJUrV8Jut+O3v/0t7rrrLjz77LO4/vrrJaKRmpqKOXPmYPHixcjMzMTjjz8u0RcalWQb0BjV9VD+nUo/VCP3PT09kgLByCoPdZvNJsYJ3wOrAhMQAEJVxBsbGwXM4jX4n8ViQUZGBi677DJceOGFYuSpzktfQj3Ie0fmLEUKC79VVlYiJiZGWi9xb0VSRinqulX/X1BQICAMDfe+IhCqMxgIBKTOQiAQEJBH/T6NPxpiZB0BwPjx4wGE5y+drnOqjv1UxijnVP3dQHNLWj6/RyD3iy++wNq1a8PaiqnAhLrmgsGg0Co5byq7RTV4VZCS69jv96O+vl4c/tzcXMkDDARCvayZH6lpGpYvX4777rsPdrtdnIfm5mbs2bMH+/fvR11dHQCIg8EoIHNlhwwZgjlz5mDChAninLCX9erVq+Hz+TBq1ChceOGFmDx5sjBkDIZQPn1TUxNqa2vDzlrqJr53OrEEHdVe1upeiqz9wGsH3AVfAAAgAElEQVRpmoa9e/fipz/9KW666Sbcc889sNvtuO6667B8+XIsXboUhYWF8Pl8ogtVfamuFX7WVy9r0uTVdUnwQNU/PFcIWBDo5XfZVoxnLo1/vjeuK5PJhJtvvhn33nsvlixZgrS0NGEncP54rzMV/g3nms4ogLCiUvHx8Zg4cSImT56M9PR0WCwWAfK3bt0q5+yxY8dQX18vZ6UK1AChYj8MLvB3xcXFOHz4MA4fPoz169fjwIEDYWlgZyt8v2rw5XREPcNVfUbw4NixYzAaQzn948ePl/cQ+V8kG0IVq9UqlOr+9Lnq1PL3LpcLUVFRSEtLCwsO0FnWdV3o8ARp6bA2NTWho6NDcuUZ5DKZTJg0aRI++eQTYQgwUg8gLDqv2p1kVcbFxeH73/8+xo4di9deew0JCQnIzs7GlClTsHHjRixfvhwff/yxdPD4V8p/fASVqEt3d7eE1flzVFSUtCmIjo6W6r7BYChfkIUBuDCcTiesVis6OjqQlpYmaI1qcHZ1dSEmJgbd3d3YunUrkpKSMGLEiDBjmYpcRcSBE3RgRlmcTicaGhrQ3t4uuWk0plXqL4236OhoiRCWlJTAYrFI37vBgwdLnyQeOqSjNjc3Y8yYMSgvL4fdbkdaWhoAhBmLubm5cLlc4sjn5+ejoaFBKKexsbFyPSKhPp8P2dnZ2L9/P4YMGYKxY8di27ZtYaXleYjwAKqrqzspL8jtdmPUqFHwer1CYc7JycFXX32F+fPnIyUlBbW1tdixYwcmT56M2tpa2Gw2vPLKK1i5cqVUPiQaRQeDyNLBgwcRCAQEoeKB3dvbi6KiImmrQ6GyU6Onasl9NXoaGWnlvPI6/JtIZcgxAJAWJJw3RnQnT56M3bt3SzSJhn9RURHy8vKQlZUlgABz9XRdR3Z2thhtdrsdmqZJPgXXZVdXF2w2mxxqHKdqEKuSkZEheTs5OTnIyclBIBDAsWPHUFtbK9+n08z7dHd3Y9KkSVKNlXuxuLgYMTEx+PjjjzF37ly0trbCarXC5/NhxIgRqK+vR0VFBdrb22Gz2WS+SK/heCPR3r5E3Zc0XPbt24fbb79dkM/58+djyZIlWLFihSCXe/fuxfvvvw+j0SiRET4T+9SpjlVkYQ4ae0Rc6dAdPXoUO3bswOWXX97vmL9tsVgsGDJkCFwuF5566ins3r0bP/rRj/Dee+9hxowZaG5uxsSJEyXn6sMPP8SvfvUrlJaWQtM0pKenY968efD5fDjvvPPQ3t6OVatWITU1VSo0M7eK+4OGLSOLNBSBE7nenDdWxmaEQEXJGflSKdZtbW1h6LKqUwjsTZkyBSkpKWERoIHWiZprNBC41JekpaVJPn1OTo6ANT6fDx0dHZKDqOol4IRujxRVf3LsfTlNHGtiYiIaGhqkF7BaYTKy3yPBOuqsRx99FL/61a8wceJEYaPwuwM5jqpERmjUcaq6UXWsI/8dqU/VtRIbGyuRtLq6OgwePFiiFZy/3t5exMfHix7tq5c187xo7KkF5OjU8p6MfrKXNaN0pLyzl3VSUhK+/vpr6bcZDIZab7388st47LHHpN0aQT/aFgCkbzvBVbVCPCPFuh7qZd3a2orq6mrRfwRfR40ahYULF6Knpwf79+/H9u3bMWjQIEm5YIEssnD4fKR++3w+AfGZ1jF06FBx8FUQP/J9qdFQ9rLm9SdPnoyLLroIK1euhM/nQ3V1Nf785z+jurr6pHYxLMKWlZWFrq4ubN++XdpXtbS0oK2tDUOGDBGbjPNjMpkE0Kejx3erjpfrnYEJFbhR2RIGgwFzjveyJlXW6/WKXcHq/FwvZyq08ajPeB4xpYrRf7WYE3OsSXXOzc2Fx+NBZWUlsrOzxVlqbGyUVJLIfRYMhjo3ACFnjXVRYmNj0dnZiY8//hherxe5ubmYNGlSv8Ud+5PT1ROqLqAupK1AHcWznu/o3XffxQ9+8AN0dHTgvPPOw8GDB89obADE7mZhLTJ4VIee61m1F7hmEhISpBYCi2vSFuF7Y1CBujoYDKK1tRUGQ6jPMFu6dXZ24oMPPhBGUHNzs/T3pp1pMpmklgNwcpHWF154Ab29vVi4cCESExNx5MgRjBo1CjfeeKPsqZ07d57xPH3b8h8fQa2qqkJHRwcSEhKk5x2VPYsTEB3j4cRcDxpRfPks+80DPBJVpVNDOhmLYHzxxRdhlXfV6KkaAVUNk2Aw1N6Dhm5DQwOiokJ97Yik8TlU9LilpUVQYo4nGAzi3nvvlRxEIEStbWxsRCAQQGtrK0wmkzgw7GHJw1nTQkWdysvL5SC12+2CwNLpoTKlA2O32zFu3DhBN1NTU6HrOnJycmQ+GFWgEqLx9PDDD8vhrGkaqqurMXfuXFitVqSkpODcc8/Fxx9/LMZKfn6+5MFQifX09CA6OhqLFy8WZcM2QxdccIEgpjSEVSOWfP81a9YgEAhgxowZAE44F5FRskhjS414q++cf6MejKoC5u/VaMqePXug67pUmSwsLMS8efNQXV2NnJycsJyzo0ePSlEvfs4chJ/+9KcwGAy45ZZbxOkiYJGeni65LG1tbaIISZtSDQ1eUzXmOeeqcRsVFYVRo0ZJNIY5VyzHHgwGcf/990u+G993VVUVjhw5gnPOOQe7du1CIBBqMTB16lT4/X5MnDhRHGBd1/HLX/4SNptNcvxUo0qlD/UXrVHflRpVpbHS0dGBt99+GzfccAOuv/56PPzww6irq8O4ceNwzz334J577sGDDz6Iiy66CD09PeJ8MBLPCtmcM6vViujoaMTHx0s+NA2rlpYWHDhwQKjy/yzp7OyE0WhEUlIS3nnnHaxduxYtLS2455578PrrryM3NxeXXXYZJkyYgE2bNuF73/seLBYLhg0bhmnTpiE5ORlLly7Fj3/8Y6xbtw6JiYm4++67cd1110kRNzViwTln9ITzr+aZWyyWMIPc5XLJfFKnsEgFdZ6maVJ5WM0XJ41y6tSpWLRoERYuXBgW4T8dY5JrXP2vP1EBEoJKmqYJkNPd3Y2dO3fCYAgV21i0aJHcAzgRoR3oHgQL+srf4t9zjmpqaqRNVE5ODqKjo6WGASNenHc1ishrP/744/D7/SgoKDjJeexP+oqAqg4jP1fH3NecR/5O1Z9AyGgrLCyE1WpFa2urrDPSIulAMf+Twn3O5+Ya4jnQVy9rjic6uv9e1gsWLEBe3sC9rGtra/vtZe12u9HY2AiDof9e1pFgRGQv68LCQslh7auX9ZIlSzBjxowBe1nTuKZDTd2dlxfqZf3AAw9Iu5a+eln3BcKr0a09e/bgsf+Xve8Oj7JM17+/KZmZTDLpvTcIgZDQCSBFFxV7WdTVVSkurK7KWtZ+1LOuume7uB737FrWhitrpylF2oKCtCTUAKkQ0ntmJsmU3x9z7od3PhLELe7vujzvdXGRTGa++b63PPV+nvvZZ3HbbbcNyWU9bNgw+P3+s3JZZ/5vsy+eF64xuaxpC52Ny1rNtA3GZc3zNRSXdWNjo3BN/iNc1lxPdb5U+DXtS4PBIE0IVUfa7XbDbrdLoN5qtaKgoADx8fFwOp2C5Ghra0N9fX2QHue1DIYA9dKIESMwbNgw9Pf348CBA6itrcWmTZvwq1/9CmvXrj3n5+FzfNUYKrOqylJmOGkfmc1mYQ7gmR0M3nsuo7CwUPY754IyVN3PQDD3Oe1mnhcGY9j7hIESIhNUDl32V/B4PCgrK4PJZEJlZSWMRqMwB9jtdoGm89pqcEpNjlB+Uad++umnePDBB7F3717U1dXhk08+wb59+1BZWYkNGzb8XfP0zxzfegeVmY26ujqBrbAegnQCbAAAACdOnBDiduLwgeDucHQGuAmYyaTSt9lswoPEz+3YsUMMdQ5VIADBxOSqEqeh5na7UVpaisTERBgMhiAjGAg4QfyZ0c7e3l5ce+21cDgcMBqNQg9jMBiE55IRRkK5eEjVyC7/3t/fL7URXV1dcngTExPPcFpSU1Nljkl6zvcTEsrDBECgOM8++6zA+jhfTqcTTqcTlZWVOHXqlBipvF+PxyPGKoUY4cw33HCDQF8BYPr06Zg+fbpkv2lAAAhyJrjWZrNZYMoULGazGaNGjZI1U+sdVSNH77By3VUjkEP9u1rzwznjPunu7sbKlStx5MgRlJeXS+a0paVFMqIq7Jgdkbu6ukSZeTwe5Ofn495775U5ZrbQbDbj5MmTsj6sc6VRBEDqiFTlRsJydahZKV6DwvX8888XgU2H7OTJk3jxxRcl6puWlhYE7dm6daugH4qLi3HZZZeJUgZOOz+ExfT19aG7uzuo9lEdahZosIyQ+nyEYdbV1eGBBx7A4sWLcejQITEei4uL8bOf/QxLliwRhRIWFhZEjULYMqPdPHvq2VVrgr6pMWfOHFxzzTUwmUzIyMjAww8/jEceeQQTJkzA3LlzJdtpMBhw7733CmrB6XSiuroaAwMD4iSsXr0aixcvxrp162A0GgWuyD3N+aIxqco9zj+DR3wvZQHlKSmVTCaTQBDp8HZ1dcletdlscDgcSEtLw5VXXikdUc81og/gKx3FwYZauqEGP1R42t69ewEEzlh+fv4Z0EY+71CDfMYMCjHTw+/q7+9HVFQUqqurJevc0tISZHRR5lI38Z8qn/r7+9HW1oaQkBBMmjRpUCjyYEPvjKrrqzox6jOq+oZzoe4R9TUOt9uN2NhYFBcXY/HixcjNzYXFYkF9fb3AbjnPlH2cJ4vFgvT0dOTl5cn3xsbGYvTo0UKRxt4EHPX19Th27JgYn4WFhUhOTg7SR1wXyj99F9Xo6GhpxsX78nq9aGtrQ2dnJ9LT08XxPH78OJqbm+FwOIIyXZxT6ld1UN/zO8kdeurUKQBAeXk5DAYDvvOd7+DSSy+VLHp0dDRsNhvq6+tx6NChoH3CNbBYLLjnnnugaRp++ctfSj8B9kpQdaF+PYHgAD11odFoxKZNm6TLelNTE6ZOnYqHHnoIL730Ei644ALs27cPaWlpiIiICKrns1gsQqWkNlqj40yUFm0l2inqOeBeVIO6XEsVyt/b2ysNHjs6OoLo9yIjI8U+MJlM2Lhx49kPyCBDnRM6PJS9nFc+U2Zm5hnBKDa43Lx5s+ig9evXo7OzEzNmzMDw4cOFoi4hIQG1tbVCx8fnBU5n6gYGBpCTk4O5c+dixIgRQR2gz0UOfJ0sMoNk6r7Vy7/Y2FiZ4+7ubulDwaZSTFB8ncHv4xllgka9B/29qPPOvyUkJIhNqd4LfQQ1EM655V7t7OzEZ599Js0IWeLERq0shdHbI6rtTOiwen983759+/Dpp58Kg8Px48fFb/l3jm89xJdCnz8nJCSgubkZ0dHRwodF5U2DWN2UjHxzkzGipUZKVMGs1lOYzWZMnjxZFBU3FI0zdbPpHRgOfg8NMdZSrVmzRqDDrK31er3SLODaa6/FqlWr8L3vfQ/XXnst1q9fD5/Ph6VLl2LChAlyKNhEwOsN8EupUeL6+nrk5uaKM06Ib21tLfLz84PgT4mJiXItHvjc3FyJIpaWlmLSpEno7+9HfX29dOA0mUxISkoKMsqYyQaCmwSx62RkZCRsNhsiIyNFUFdXV2Pq1KlwOp1IS0tDRUUF8vLysGLFCsyZMycIEkJByEw5AIlkud1uiWhXVlYiKysLW7dulTVhNJ6wG9bQEIKkCjK9E6pCfVVnj+sMnC7kp5CjQeX1Bjobc36ysrJQVFSEXbt2Sdt9dqQmxEvTNBQVFeGpp55CeHg4Fi9eLNclB25kZCSSkpLgdruRkZGB3bt3Izw8HAkJCZKZb2pqkkg5HYdDhw4JjcKYMWOQkJCAsLAwHD58GIWFhUHnj5A5PiPray+//HLJ2s6bNw+PPfYY4uLigiCd2dnZ0kCpqKgI27Ztw3nnnYeBgQE0Nzejvr4eF198MYYNG4bt27cLFdLSpUvx61//Gh0dHVJ3y/putbZMdZ7VOdefwcEcVafTid/85jcC38nIyMCPf/xjJCcnY/78+WKkbt++HZ9//rmgAeiUc+/RgGSGh43Cvskxbtw47Nu3D3/6059w+PBhzJ8/H6+99hpef/11rF27FhdffDF8vkAjNkZ31SBdamoq7r//fjG43333XWzZsgW7d+8WCC8zxZGRkeju7hbniGdBhejSAKTS1bQABFPlP+zq6hK5CwTqwwcGBlBeXi5O69SpUwVWzH14tqHCuoCvT78wmDEDnDZ0vF4vGhoaEB4ejubm5qDMJw1P9V7OZmwxQKm+RzW47XY76urqEBYWJvqBtenh4eHScMTnC9Sd0mBRnQbVEFq7di1mzpwpkOqhnp9BKNUYGww+rZ4p/eA6qQ73UMZuZGSk1Fj29fXBZrMhKSkJ5513Hk6dOoWDBw+KrladbyAQEOvt7YWmaZg2bRqcTqc0CDrvvPMkO6L2KLDb7UEZez23KYfKw8p9O336dAmaNTU1SVdWINBbgXPHoGNjY6PoJGboCMtl3WNERARaW1sRExMj382Ag567mbX7hw4dQnl5Ofx+P2bNmoULLrgAn3zyCaZNm4aPP/4Y0dHRiImJQVhYGFpaWgSazPNdUFAAg8GA+vp6zJ07F5MmTcL27duxcuVKQTy43W75PhWyr673YA7s8ePH8eCDDwIIOEpTpkzB/Pnzcfnll8NqtaKurg7PP/88Dh06JPqRslN1NMPDw6V5jslkQldXl9hnKqWUKve5T1SHUNM0oZIhmm1gYAAZGRkYPXo0QkJCUF9fjxEjRkjvAE3T0NDQMOh+/apBuUp0hBr4puMTHh4eVDNNZ1EtJ1EdXYvFgi+//BIejwcjRozAqFGjpM+Gw+GA1+tFU1MTPB4PkpKSZA+ptkxubi5ycnKgaRra29vx29/+FnfeeWeQTNUjxfg8XzX0cnkoOR0VFYUTJ07A6/WiqqoKBQUF6OrqQmNjozQ/HT16NI4cOSLzONRQ7UG9c6e3F1VZSFmmBjoI5+W9c53IuKHa8QCE0oilVHa7HV1dXWhvb8fRo0exe/duJCQkCIKClEpq3w86t6q85Xfzf849Id4tLS3QtAAiMj8//yvX5V89vvUOKgW1pmnSpprGi9FolMZAjY2NQVAxHq6wsDCB1FBoqYqSG5lDzQaQZqW/vx+lpaUCCfJ6vUhJSUFGRobc41CHWBXgNK7NZjPa29ulKRG7pTLCR6dV0zSsWrUK06ZNw+TJk7Fu3Tr87ne/w44dO1BVVYWenh5pIAWchkARrqxmVHkfiYmJOH78OIYNGxYE9eQcVFdXY9iwYQACBtSRI0ekUVJJSQmysrJQX1+P8PBwiWqqyoKGkhokYKZp//79uOSSS+B2u9Hc3IySkhKsXLkSl19+ucz7smXLcNttt8m679mzBxdffHGQwNm5cycuuugilJSUYPfu3SKEuPZ2u12cN78/QJ+iri+flxnz/v5+yd4SNq1mP1QBwrVW98xghigjlUajUTKBBw8eREFBAcaPH4/Ro0fDbrfjqquugtvtxgsvvCCO6csvv4yFCxfCaDTioYcewn/913/hrrvuQnt7uzRQSk1NRUNDA6KiopCQkIATJ05gypQpmDhxIjo7O/HBBx/AZrOJkeX1eqVDYUpKSlB2l3VpBoMBdXV10uGX5yMuLg6dnZ1BGTA18zwwMICSkhJomiZwfI/Hg2PHjiE7OxvPPPMMOjo6UFhYKNA1n8+H4cOHw+fzYdu2bZg1axaqqqrQ0NAg0XXCgBkA4Z6gQc5sgJrp0TsoPHeqolYdM/W16upq3HffffKM3//+9zFr1iyMGzcO48aNExmydetW6TrKgFdpaSnq6+ulHb2maXjmmWcGlQn/ivH73/8eM2bMQEtLixgh9957L4qLi3HixAnMnDkTV111FSorKzFt2jR0dHQgLS0N1dXV8Hg8aG5uxq5duzBx4kTYbDbccMMNuPrqq9Hb24unn35akA1tbW0SDKSz6/F4UFlZKQERGtY8a0RnAAEkBeun+Y/nhNmPpKQkFBUVnWHcnW2oATLgdL3p18kAqNcazFGNiopCe3s7Tp06hejoaAwMDGDDhg2YOXOmyBuVs/OrvsPv9yMmJkb47lheQUfc5zvdhZOOP+GCSUlJgoxQa6ZUQ01FvPh8PqxduxYzZszA3Llz8frrr5/hkHEOVf2p//tQzzHYe87mlKrDarVi165dsNvtyM3NlZowooR4/3SuVCdVz2VNFAONfA5mXVWnT9+0RZXvwNBc1jt37pRyDXWfcD+TH9nrHZrLGgA++ugj5OfnD8plrRqm3A9qNtTtdiMpKQnd3d1Dcln7/YEGaM3NzWdk9+mEer3eIbmsq6qqZN7OxmWt6no1WE9ba8eOHdi5c6cEyPRc1kePHsWbb745JJe11WqFy+U6K5f1YPtU3f9EVg3FZd3f34/09PR/mMta0zS0traKDZKUlCS6hs6W2gma98ryqsFs0WnTpgUlINiEb+vWraKr7XY7qqqqkJCQgN7eXvT398PlcomuVweRVnPmzEFFRQXMZjMyMzPlu/VnVo8koY7lM3wdGaueXWYlPR4P3nvvPSxYsABdXV04//zzsX///rPChVW5o76P3XzZOVn/d/28U9a3trYiKSlJgkW8PtEpDHp0d3cLAoxBj97eXni9XoSGhqK+vh45OTlYsWKFZOrDw8ORl5cH4MymSFwP1WlmwJyD9gjPFhBA4R06dOic5/1fNb71EF/C6cgD6XA4pA6R0WTWYKqcXFRY+s6vQHDkmkY7u0rSeCUE4eOPP8ann36KpqYm6TZqNpvR0NCALVu2SFMgNXpztsEDEhkZCbvdDqfTiaamJqxcuRL19fXC59rd3Y2CggKBxmZmZopDzmYIVFJUxnQcaDSrRdiaFmislJOTI7DEpKQkMSKYRSQUlsq6t7cX8fHxYjwVFxcjLy8P11xzjShhKjAqfX6WkWQaiuRwdbvd2LRpE+x2O3bv3h1kVHIOuJ7Z2dmwWCx44oknzogiX3HFFUGKNyoqCgMDA/jxj38Mi8USFNHWrw8jnFwLOpLTp0/HmDFjzogM83c1YzdY1lyFFlO5857ZlKG0tBTPP/88fvGLX+Czzz4T2Da7ES9cuBBPPvmkRLhLS0vx4YcfoqWlBQZDoOYtIyNDoD0RERFijBmNRnHMzz//fMl2kq4gLCxMgh/t7e1BzRwYBFi3bh1qamrEGCLHH3C6yRIbdnAOGhsbpSmJ3++H3W7H9ddfL0aa0Riob01OTsaBAwewYsUKdHR0oKCgQGqPR48eLXMVFhaGyZMno7OzE83NzUhOTobb7caxY8dgMATqjDjHHR0dUq+kKi29sax3dtQ1Vc8+jdiXXnoJt956K+bNm4eHH34YVVVVMBgMmDx5Mh544AE8+uijuPPOO8VAZI2gCmf8pgZhhcw4v/TSSzCbzSgvL8eTTz6JuXPnYtWqVXj11VfR1dWF++67D4899hheeOEFNDQ0IDMzE7NmzYLJZMKJEyfwzjvv4P7778cdd9whdaU8w1z/AwcOYNOmTThy5Ig4T+pzh4eHS+OuhoYGnDhxAna7PYgEPSQkBOHh4cjMzMRll12GOXPmSPdUda3ONlSoOvesagAMNVSIMhCcvRzsu5OTk2EymYSn0u12Y+fOnWLgXH311ee87ry/UaNGSXBNfR5m9gwGg8wVZV1TU5M49XQ2mB3iXlZlHgBxYLdu3QqXyyWQM15z5MiRiImJwYwZM5CSkiKOn1rDOdRz6OfzXNZMfw3Synz55ZdwuVySlSTcGzgd9FNrk1V6Cjoh6hpQ/6hBK8IK1XICDvXeB+OyXrVqVVC2DgjmsjabzYiKihKHhGgSIrVIX7N161YAQE1NDbZs2QJN0wTyyj1JvUTDlLREfn+gbvOiiy7CVVddhfj4eGnus2XLFrz22mv4+OOP0dDQgLi4OAABfXfixAkYjUbhsvb7/fjtb38rgeh3330XZrMZc+bMwYgRIzB58mS43W6MGjUKVqsVra2tklnV6z3et5oF4utqwJdc1vPnz8eiRYvwxhtvICoqCo8//jgWLFiAG2+8EWPGjJF+Ew6HQ9Zb5bJW14p/V+eK96E2E+N+4GcaGxtx6NAhHDlyBAcPHsQf/vAHhIaGoqmpSWpd/57B+2CgXkUR8JzX1tZi27Zt8roa5B5MHpnNZsyaNQsTJkxAf38/ysrKEBsbK82pysrKcN5552HUqFGw2+04evSocPg2NjYK64X+LDOQcPz4cRw6dAhVVVXo6uoaVHaqGUTO/dfRc3wWfiY5OVls1dra2iAbLTo6WuQT15I/n02+MLBlt9vR399/xvv18p2BUb5GW4WylWVP6pxwP7MZan9/P3p6etDd3Y1XXnkFBoMBsbGxgrro6elBaWmp3ANlM21nOsJqp3zOrT5YqgbNmJj6d45vvYNqtVolY2q322G1WmG324Xc/dSpU+IIcBPTgVXbwlNA8nCobdGZCaSDpa+10Sv67u5u4fxraGgQYu+hxmCCgYKbNSPM0o4aNQpAwOCcNGkSjEYjli9fjqqqKrhcLmlUAwQUKBWQwWAQGhlmIQBIppmde6OiosRZyM7OljpVCnNCXHi/drsdxcXFEjGy2Ww4fvw4fD4f5syZIwKXER86DZw/FpsTs+92u7Fr1y5kZWXJ/VmtVoGs1tTUwGAwID09HZqmYfz48fjoo49gMpkwadIkiYa++eab6OvrC+JYNBqNktUmBQOjsexsTEObwk5dI7UO0mw2yxwyq6EGOWgEcq4oSFTjhXvq2muvlaw8habVasXEiRNhMgXIqgn7eemll+BwOPDoo4/C6XQiMTER8fHxACDdZdvb25GSkiK8kFTCFFgGQ4CmiMGXa6+9FgkJCXC73UhJSRFHwmKxoLe3V84Q94HRaERFRQXWrl2LTz/9NCjQUV1dDaMx0GxjxYoVorDKy8vFgOdcMmgwduxYAAFjzOPx4O2338HicPwAACAASURBVIbX60VycjJ8Ph+ysrJgtVrR2NgoztC7774r5PTt7e2ora2F1WrFokWLcNttt2HYsGFSy00jnU3N9MEEdZ25DnpFpSpH9awS8t7Q0ICnnnoK8+bNw+23347Vq1fD5/MhPj4er7/+OiorK3H8+HFs2LABhYWFZ9Ty/qsHo7ehoaFoaWlBZWUlXnjhBfj9funCfNVVV+Hee+/F4sWL8atf/Qpeb4CgfuXKleju7saNN96IyZMnY9GiRSguLsbzzz+PV199Fa2traJQ1WYPdrtdSi3UM0Jjy+Vyob6+Pghmyb1EGohZs2bh0ksvxfjx48+I9A829HJWjTzz93MZainEuWb6CGlm0JP7iDXHrMs/27XUTBu/NyUlRRwhyk6DwRBEwUMnVc00p6Wlyc8MEqowWJ4BrovX68Xq1asREhKC7373u0EG38GDB9Hb24sTJ04gKioK0dHRKCgokJp41RHhfav/hjJoB/v5bIOZSs4hex+o8ll1QFSki4qO4M80BlV5QP2n1iqq98hrD8VlzS6sQEA+sBeEymXN80C5xD4ZzK6ZTCbJfDudTvj9AS7rDz/8EF9++SUGBgaQnJwsXcIZ9NMHH0ymAJf1hRdeKEE72ksHDhzAG2+8Ie9Vu99v2LBBmlINxmU9btw4+Hw+pKSk4Oqrr0ZPTw8mTpwomWy/P9AUit1ph5pDNWBCHcOffT4f1q1bh/vvvx/z58/HXXfdhe3bt2Ps2LFYsGAB7rrrLixatAjTp0+Xa6qZZXUvMqNHea3uCfVMMRBN58LnC3BZt7S0oLW1Ffv27UNjYyNsNtvfzWXNhjpWq1UcDlUXcf1ow6pNcVTdNBgyjzbNtGnTUFJSItnU1NRU7NixQ2Dfzc3NWLFihVCkxMbGwuVyoaOjA6dOnTqjTp3/GMjYv38/Dhw4II4ZHTaOrxuAVZ0tOoEMyvCZ2Om2r68PF1xwgdybak991aB9R9mgz7arckx93WAwiC2s2oZEqKjzoO4nfhflLkvp8vLyMGXKFFxxxRWYNm0axo0bJzB7fo7fw8AFbWnKNwbW1P9VKjbVmf13jW+9g0rDCIA0MCK0qa2tTcixw8LCYLFYBA7S0NCA+vp6WCwWDBs2TIS8WmzP67NzJ+GC3BCqIqZjRKUyMDCAzs5OUU5bt26Vbq366DWvM5iS5uGLjIxESEgIenp6pDMj4cUff/wx7r77bhiNRrzxxhuorKyEyWRCdHR0UNE5uVGBAIxTL0SYdaOxk5SUhOrqagABAZKRkREUsTcYDMjNzUV0dLR0CPR4PNJkSYU+qAaqSrtBB4bUEPv370d+fj4uv/xyAMCYMWOwa9cupKamwuv1or6+Hh9//LEI9/3792PPnj3w+XwCXdQ0DcePH4fBYMDll18ur/HeNE3DjBkz5Ln1XdJUhak6lUBAAbDrYlxcHAoLC8+gbVCVMAWUCo1TnVHCivgZRoZdLhe++OILLFu2TJoFVFdXw2q14uc//znuvvtuhIaGYseOHRg5cqTcJ+kQSG6tKjw23/D7/cjJyZH6YbPZjJycHEyZMgXnnXcegICR5nA4YLVaERkZif7+fpw4cULOA6EtmqbJ/KvfZTKZ0NvbK+coLS1NMg6EYRmNRuTm5kpwiQY1u19WV1fDZrOhsLAQn3/+OdasWYO+vj54vV7s3r0bmqYhJydHYHNerxe5ublISkrCqVOnYDQakZqaKpQJRFpwD9Lw02fC+fxcQ73yUmWPPlJMQ//999/H/Pnzccstt2D+/Pm488478aMf/QhLly7F2LFjcf/9959x1v+Vo6qqCmvWrMFf/vIX3HbbbXjiiSdw66234rHHHsPy5cuFC48wprvuugu/+MUvpCX/kiVLkJqaiu985zsoKirCK6+8ggsvvBCTJk2SzI6a0ePcGY0B2hev1ytZbBUWpcoBBjYmT56MK6+8EnPmzEFYWNg5GR5AMLyL/5+r48OhfmawAMbZhmrQMVvW2dkpnKhE2Kjfo5cVg41hw4ZJEJWGM/ekCrtTdQjh/mp2lfNOHaQPqtE4ogM0YsQINDU14YMPPsDmzZuxfPly/PGPfxTYX2hoKEpKSjB79uwz5kh1/M42V193cE9t3rwZmqZJQI9GnKZpZ+Wy5vlU74EBLM6Lmq3mewdzsIfisiZEk7QTfw+XNelkhuKyXrduHb744oshuaz5HWpAXc9lzc65XCM9l/WSJUvg8/mG5LJmnwSTaWgua+7bobisVUilPigMnBuX9WWXXYZf//rXQ3JZ6+W3GnDkueHvPCNEb9A24Gv/KJe1Gkhg8kOFazJpoj+rPOeqs6jOjTrURj42mw0pKSnIz89HbGysNEecN28eioqKpAP44cOHsXXrVtTX18ueaGtrk6Zb/A693VpTU4PS0lLs2bMHp06d+kqkoN4OGgxNREeNNoDRGGgSVlFRASDA7ZqcnBwk74dyxvTXNhgMgsRiIF4fgFSDJyriibZsamqqOKPUWexzw89xbfkcXA+fzyddfdltmueVpUr6wBnPkIo0YOCT9cf6pBv/9u8e33oHlULIYDAIPNTrDVCreDweieJ1d3cHOYwU4Dk5OUEck2rEDYBEMFWnis4OM3Q0dhmRocKjoABOCxbi19VaOH0EVz9Uo5n49p6eHqxcuRJZWVkICwsTKpe9e/finXfeQWxsLFJTU+XzHR0dyMjIEPJe1n0xiwYE6nGrqqrECTEaAx06+czDhw8PinpS6VVUVCA8PBy7d+8GALlHNQqlOqvMlvBvfJ2O5ciRI2G1WrFz504MGzYMa9asgcfjQVRUFCIiIlBeXo6BgQFUVVXhz3/+s8AWU1NTMXnyZDFWSF3CQUFvsVhw0UUXBQlKKgnes0rhw8/q9x2bs2RmZp4B/1PXVXVk1L+rHek0TcN1110XRKNDqpK1a9fC6/Vi6dKlKC0thc/nw7Rp02AwBOC8rKUgFQsdAVX5+nw+nDp1SozV9PR0qad1uVwIDQ1FTU0NfD6fdH1kICY+Ph52u12glX19fWhsbJT3qM6pSoGjaZq0OlfrwBITE3HJJZcEGYYOhwOTJk1Ce3u7QMa/+OILrF69Gh9++CHWrVsnz8QoptFoxIIFC+ByucSYePnll+Hz+ZCRkYGQkBA0NDTI2hw8eBB33XUXNE0TwU5ngs68er4Hiwrzd7W+TIXjqD/zfPv9fjidTnR0dKChoQEVFRXS4fWbGoTFP/3006itrRUuQnat9Hg82L17N3p6evD000/jscceg8PhwNy5c/HYY4/hd7/7HVJTU4MMywkTJqCoqAg+n0/qweiA8WwAgXPABlZUrIQ26lEtOTk5QXVZXzVUJ00NFqiy96uG3jj5uo4pcNqBiY6OliAmm6CsWLECfn+gC/e4cePk2vrs7lDfyb3E+VIzkupZUH9mv4HY2FjZ/8xqqevCa6tOwAcffAAA+PLLL7F27VopWWlsbERtbS1++ctf4qmnnsKKFSvw8ccfo6WlBSNHjgwyPtWgnPpM/P1cM9K8T9UoJ+pl48aNgr5Rs87A0FzWKvx3sEyrmk3mdfSBD/39AcFc1syMWiwWaVjo9XolMwoEmjd1dHTAZDIJtVpISIjUKI8bN04cIJXLmllwZkqISFINVzVrrA49l/WECRMwZ84cCQyqTWBYr+z1es/gsqbcrq2txYkTJ/DJJ58gNzdXEgHkss7KykJycrLQatBp7+3tDXLKuFfOFohSZTPnl1zWN998MxYuXIiPPvoI0dHRWLhwIZ599lk89NBDuPTSSwU5xmcyGAxBvTC4L6hz1XXnugGQOntyWW/evPkr9/Bgz0EUgJoF455jMI82Kp+V+1h1utXgvxog1wcIad+MGDECubm5MJlMWL9+PcLDw5GdnY3p06cjJiYG4eHhOHbsGNasWSPNu+Lj4+HxeNDS0oKWlhZ5DjU5w5/b2tpw+PBhlJWV4ejRo2fYPYPZQIMNIm6op6lzVq5cCQDi+Kk6YrB6YM6HKpcASLLKbrdLTxeuubrPeL5VWabWHlPmer2nubv5nV5voKEq9w9ZOnhus7KyJGDOhmWRkZEYO3YsRo8eDYfDIcELvfzj/uC+pZ1K2Waz2RAeHi7MFf/O8a13UOkIqhFiQhvVjUllrCopACgoKEBzc3OQsGeGhcNoDHS1pBBTnUkKBEJRaSCokcvp06fjlltuwWWXXYbhw4fDarUGGbGqwuShUyEWfJ9639x8vb29yM/PR2FhIVwuF1paWtDb2yvdbtm85uTJk8IVq2maROLptPCQNjU1SdMjOi8UAux4TFoJRgOrqqowZswYNDc3w2g0YvTo0WhqaoLRaEReXp5cu7m5GcBpqhA14+ByuaRZkt/vxyuvvILy8nJRyGazGSNGjIDb7ZZamWnTpsFkMuH48eP4+OOPYTAYxEHy+Xx45ZVXJMIOBHjTCD2lIUFlzrpb3o/aQY1jMAFLWHN/fz9ycnIkI6hmD1TFqhqkDGhomiaUBtOnTxfDoKenBy6XS+aptbUVzz77LD777DPU1dXBaDRi+/btqKurk2cPCwuT/R4VFSXGIo0LDrXT6smTJ0UJ8X6pGG644Qacf/75sFqtQisUGhoqZN90ujhvVOoMUPztb38LcmBHjBghEXhmHI4cOYL4+HgYjUacOnUKO3fuFOgar0XHXc1qs56Dz0uj3GKx4Nprr8WePXvQ09ODY8eOCWQ4KioKsbGxGDNmjBgKbrcbFosFbrcbPT09ErTR16ZxX1EmqIOKSTWK9XJCDUh90/CbqKgoXHbZZWhtbcXDDz+MN954A7/5zW9QXFyMsrIyjBo1ShqSPPzww0hISEBNTQ1effVVaJqGe+65BzfddBOWLFkSlAFRO5n6fL4gNIBqhKoGhBqAIeqFvIxq9upchhpcUse5Zl35Xr2xda6Dhjd/ZqkA963FYsGxY8fg9wfqIefMmRMUtFQdqbONzMxMgYOqQTR1H6p15szCREZGShkHDXMGlnifqiHJPbpnzx6B/7HJB4CgDuxhYWFITExEYmIiJkyYAOBMDuiv4+Trh3qPquPJZwQQxGWqBiioT/k7X1P/8blVh139HvU5VNnGn9U9rXJZs1Z/KC7ruro6qV9UbQWVy3r69OkYPXr0kFzWjY2Nch+DcVkzIKmeA547lbbI7/cPyWXNPTMUl/U777yD9PT0s3JZJycnIy0tbUgu656eniCo5VD7RV0rNXNIA13TtCG5rBcsWICHH374rFzWdrtddCXXkQFiOjN0Tv4RLmsViUeHp6WlRfhpVVuFwYrKykpUVFRIEIRrpTpTnDt9sE3d3wyU8pqbNm3Ctm3bpP5x9uzZmDFjBgoLC9HW1obw8HAkJiaKLmZzqM7OTtTX1wedF8oeBoz6+/tx+PBhlJeX49ixY7JG5zpHnNuEhATR6U1NTZLccDqdGDZs2KCyWu9oqnPBedCfgcHmUP0Mr8lyFpYP8bNEuZEhgoFYg8EgNjjtuqSkJIHyh4SEICIiAjabDVFRUUhMTERycjKKioowffp0jB07FhkZGUFnnGeNe9/j8Ygz63a7hQlk1apV5zTf/8rxrXdQ2dXN7/ejp6cnqEZSPZgcfJ1KPCEhQSDB+ggrcGYhstoFWDVO9FErflZVWsw+HT16FDt37kRZWZkcOh52NXtLQawXOsDpOhl2Iea9WSwWHDp0CF988QXsdjs6OjqkhT2jXOq1Wf/Fe2DLezqh6rPxcyyoVyOiaWlp4ngmJiYKfHT48OFBhjufRb0mHUW13qK9vV2eke3W6YScOnUKtbW1SEhIQF5eHhISEqRbb2ZmpigzPtuUKVMAQGoYdu3aBQC44IILBnU6SVugGimqAaAfmhaAYHNkZGRIBklVEOr3MAvAvaXWmHi9XlxwwQUCiaWhRrjXqVOnsHLlSuFEYyOtyMhIyRxpmib1x8BpCLXqNFssFkRHRwv0l1AUTQvA5LhWPp8P559/vrRK17TTTQqMRiMiIiLEufX5fPIzn4/XIVKBdcH79++XOvFTp06hu7sb06dPlwxdamqq1ESpmRPCeaurq1FdXY2uri74fD6kp6fDYrFg3759QVA1AEJTMTAwgIkTJ+LkyZOIj49HQkIC4uLiJCvAOSdP71DOiqrUzsUZ+nshjf+s4XK5MHXqVLS0tKCkpEQyNoQIGgwG5OXlCYn9okWLEB0djSuvvBIPP/ywtNOPiIjAkiVLAARkUGFhYdA5YedFOo4MLKhZPBq+dEoJtRoM7qUOVfbpHcPBIJjnMtR7V7/jXAYDWOr66xunqAFU1Wj5uiM9PT3oXrlfgWDHiuff7/eLLoyLiwtykNSg6GAGmcfjwcqVK+U67e3tcDqdEiwzmUxITExEZGQk0tPTceLECQk+/jOG+ix8Vj3KSJ131YFk8IcZ7MG4rPncqpGv6mD1f/V++F18bSguaxq/Q3FZU8+p66bnsu7t7UViYuKQXNYOh0PQUMxyqhlhv9+Po0ePnoFk+jpc1gDk/YNxWdPROhuXNQ35s3FZ+/3+s3JZ6+d9sL+rjqvJFMxl/d5774nTNBSXtZr1H4rLms2s+PPXCYJxENKsJh9U2DflB21Rr9cLh8OBpKQkgWATKq2WoQyli/RykfuEz+f3+3Ho0CGYTCYcPHhQnPXx48cL1DwxMRGzZ8+GzWZDZ2enZPy7u7uFh1idOzUAp2maOKsHDhwI4qk92yBtlhqQoi3IxofFxcUAgm10vSzT23d8jehCBibUoQ92qf9rWgCxQ0QQZZTL5QryCdRgEe0W9awT5q3emx4FQ7kVGRmJlJQUZGdnIykpCQ6HAzExMYiJiZF9QwSS3W5HVFQU7HY7XnjhhbPO8TcxvvUO6u23346f/OQneOKJJ/Dkk0/i5z//OZ544gn88Ic/xDXXXIPi4mIkJycjPDwcNpstiN/MYrFIvUJ8fHyQclEjpfyftVaDRVxUI4wHVtMCDWo+++wzJCQkoLi4GCUlJfjhD3+ISy+9VDrflZWVSTRQX1Ogj4xyqAcwKioKoaGh6OjogN1ux7Bhw7B+/XqpG4yMjERbWxv8fr90dmUURiUFp+DKyspCY2MjDAaDFKxTwQ8MDIjzqSpmZuNocLJ2lREwAAK1ocJiswDV2PB6vSgrKxMoXG9vL6ZMmYLVq1fD7XZLxPyvf/0renp6cODAAWkyZDabERYWhvT09CD+RRbUczB7M3PmzKB55jP19fVhx44dZ8CO9NFbdRCeQ65BOn80/lWDiEPNiNOopxNtsVhw/fXX49ZbbxXnl3P82WefSUTOYDCgvLwca9euRUpKitT/Go1GpKenS0dF1gepaxkbG4u0tDTpEhwWFiaGdG5uLgwGg+wXTdOQn58vDY00TUNGRgays7PF0eCz8DP6SOKIESNk/pmd6ezsFOqI5cuXIzc3V+bo3nvvFYqD9vZ2odSIiYkRSM3vf/97OXulpaVobW3Fu+++i4GBARQUFMja+P1+yf5OmjRJOFYJmwcgjjUNJxo1VOp6RaYPYqhOGP/OtVX//nWcoH/WKC4uxoYNG/Dhhx/igw8+wIsvvoimpib09PTInrFarSgpKUFISAji4uLw6KOPYvbs2Thw4ACSk5Mle9Ld3Y3a2lpomoYjR46IkUX5wX1OQ0w1nKl0bTab9ASgIUijiUOdJ86danB9XadfdYK/yhk+21DvQ28wq7DZzs5OmM1mOJ1OOYfsH/BVe0DdVyrU1WKxBM2BatDonVfuWYfDERRsIkRO1Tl07Bik9Pl8skYTJkyQCD9lyMmTJ6Vkprq6GgcPHjyDq1J1KM918Czr14bOpxq85T6iEcxzTh3OWv7Ro0ejsLBQ4HLqvtE7oaojrN9v6nnm2efPdCjS0tLQ1dUFs9mMrKwsfPe73xUkk8FgQGtrq6BuKGuLioqwfPlyrFy5EidPnpT3paWlobu7Gz6fT2rDycXp8XiQmJgYxGVN/lQ1m/Lpp59i/fr1aGlpkSzt4cOHg+aWAWL1GSmzH3roIRiNRuGyDgkJQUpKiqDA9u7dG8Rl7ff7sW3btiBqo7S0NFitVgwbNgxdXV0S6PzP//xPCZ6EhoaitbVVUE6qfaOf98ECv+oacl9rWgAZtXr1aixcuBDz58/HT3/6U+n6Pn/+fDz++ON4+umnMW/evCD9R6ec+4O2BfdQTU1NEE/8uQ5eT836E+bNfim8f0J8Q0NDZZ/zbFNO6mWg3iHVI/4YZCCc2+/3Y8KECbBaraiurkZ9fT26urpEvtvtdmzevBkfffSRILImTJiA1NRUdHV1ITExER6PR3q60EHjs6mywOv14vDhw9i7dy/27t17Br0RcLrJE1EfPK8GgwFOpxMPPvggli1bhsbGRoSHh0tXdzW4rtex6s+8LzYsY4dzdf/onX29Q83MdmJioryuUh4Rpk0qH+4dNgF1u92IjIwMcqpZDsLu6WyOSkc0ISFBqCtHjhyJ7OxsFBQUYPjw4Rg+fDjGjh2LwsJCjBo1CmPHjsWcOXOwdOnSr70//9njW8+Dqm4eKher1YqMjAxkZGQI9Eiv+DZs2IA9e/YIXxk/rx581QlVoyLq4DVpIOhhoVSifJ1Oz8SJE6Xr3auvvoqjR49iYGAAI0eODHKMh4oc8nceTL/fL/AyUkps3boVAwMDGDduHOrr6yXbqQqOnp6eoMhTREQEcnNzceDAAWiahry8PLS1tUmjB2aXVGM8KysLe/fuleZTsbGxQVAPNWrW2dmJ8PBwOJ1OhIaGSmSZ3c0A4MCBA1iwYAE2btyIrVu34tJLL8WaNWtwxRVX4LnnnsOSJUvQ1dWFkJAQ3HnnnUhNTUVZWRmOHDmCzMxMTJs2DW+//TZ8Ph+WLVuG6667LkjAMENw0UUXCbRTFZSMSFVWViIqKkoEs2q4MMunrgX/pzLp6OiA1WrF5s2bg5xhfo51MDTqDQZDkILknDOrzIjvlClThHMSCGRWExMTERsbi8bGRtm78fHxaG5uRmpqKuLi4nD8+HHZx5qmITMzM6g2MC0tTYQ1a0+qq6uF/iY0NBQVFRXweDwoKSlBVFQU3nrrLVgsFowaNQpVVVUATgcl2Ajkyy+/RGFhYRBtEUne3333XYkMV1ZWwuVyoba2FnFxcVi7di2uuOIKREdHBzU+iY2NRVdXlzRB07QAP3BERITA62w2GxYuXIgHH3xQzp7T6cTbb7+Na665BuHh4Th16hTKysrg8wVqVuPi4nDkyBG89dZbePHFF9HQ0BCUkXa5XMJ5qModGqqDyQfKFu4vvSH4TQ2/34/rr78ep06dQk9PD7Zt24b33nsP4eHhCAkJQUtLC+Lj49Ha2irddP3+QLfHiooKLFmyBNu3b8dHH32ElpYWeDweHDx4EFFRUeL8c18RKaIajAaD4YymSNzrzO4NZlwMlqXkOBcH858xz2qg6qu+kw7SwMAAqqurpfRiw4YNmDNnDtra2jBhwgRpLPZVDhyfX9MCiIjW1lbph8AstX6oc9jV1SVE8KxnUztjqgasPoDicrnQ2tqKYcOGISwsTNbU7XajoqIC/f39wm8bGxuLoqIibNy4Ebt27ZIgRnt7OxYsWBAEb01PTw9y0nl+iB5hAI1yksGpwYxvnieXyxXEa0jHXc9lHRkZiZaWlqDyDs6Zen7V7Ai/S93PwNBc1uHh4RgYGMC8efMG5bIeGBjAgw8+iOeeew5G4+Bc1uRz379//6Bc1gaDAcuXL0dfX9+gXNYxMTGw2WySlRyKy5r7ZzAua6fTicOHDyMxMXFQLmubzYZDhw6huLh4SC5ro9GI/Pz8Ibms58+fjz/+8Y/iMDAIQbQUA2dqxlBdd/2+B87kslaz4kNxWaempuLOO++U/a/nsjaZTKipqUF5efk/xGXN+yL6TIXYq41tGPTjmVWD3HwevRPGc8x5P3nyJKqqqmC1WrFlyxb4/QHe27y8PMkMW61WNDQ0YMSIEVLCpe75/v5+REdHo6enBw0NDbDb7Xjvvfcwfvx4XHrppQgJCcHmzZvR2dmJrKwsdHd3w+VyoaurC2lpabLneCZ532azWZBThPSmpKTI3KiIQiYm2Edh//790jDJ5/Nh5MiR+P73vy89VHbt2iUyTrWR1cApAOTk5ODo0aMia9Q5VOeUP3P/MYvscDhkL9LuoA/BYCQAOcuapklHZTrGXHc+mxoYYyNQOrjk0iba02azITo6Gk6nU+RKSEgIXnvtNUE+MOHx7xrf+gwqEJxNVCM36j81gkxIDAuRCQtRnQ6+V1XYjA7pBYM+O6B+J51Xvlc/DAYDFi5ciJ/85CfIyMjA4cOH0dLSElSjpjqsqkGjKljVgWXdETMje/bsESeRz6ppmnTIVI2gvLw8qc0wmUzIzMzEsWPH5P4z/xdCy0w0FVB9fT1sNhs+//xzMab4/FQwXq9XmjmpMBDONQ1dp9MpgorwMaPRiL/97W/y/QcPHkRbWxsuvPBCWCwWlJSUYPny5dLFjIO8mNdff718l8fjwZ49e6BpGhYvXnxGpjQkJAShoaEwmUyoqKjAF198IVlGVWjplQSfh8/KLrhOpxMbN24Meo+maUH0RZxLziu7Dft8PjE6fD4f8vPz5bNshjRjxgxcccUVsNls0iLe7XYLZ5vf7xdHQm2Skp6eLuTeISEhyMnJkUg7DW3WEvN80BAkXKe4uBhutxtlZWUyjypkyuv1Ys2aNQACnXyBQP10b28v6urqBIbW09MjQvfmm29GaGgoPv/8c/T29mL+/PlSI2swGLBgwQJxEpmFVw1eNhBbvXp10Pl0u904fPgwjEYj7rjjDmkI5vP5xLkmN5zX68WoUaPOoEnhXu7t7ZVAzVBOBve5aoxwHQczsP6VY/HixdA0DQ6HA/Hx8Zg8eTKysrLkfDQ1NcHr9eLee+/F448/jrvvvhurVq3CuHHjUF1djTlz5qCkpAR//etf8eabb0rHXmad7Xa7PJPRGOgGHBMTEwTbo9yh1lKzUAAAIABJREFU08/IMaGqlEX6DOfXzcKpJRaDOb1f91qqLD+XkZiYKHA9TQsgdXbt2gWfL0D19J3vfCfIidIPzpP6/AaDAQUFBWc47HQyeWbVwKYqR1JTU4MMRaJhgNNZXw4GLhMSEhAVFSXdZx9//HFs2rQJ69evR11dHU6cOIGwsDAUFRUhOjoaJ06cQFNTE4YPH46bbroJiYmJKC4uxssvv4z/+Z//wdtvv42Ojg7U1NSgpaUlCM2kOh9cOw7WhgOnAzx8Bt676qhzrQnz9flOc1kPDAxgypQpQ3JZUyar86LPqHCu6ODxdzY8IRpgKC7rrKws/OY3v8GTZ+GyJgJoKC5rTdMwZsyYIbmsGUw4G5f17t27JVM1GJe13+/Hpk2bZA70XNYOhwOjRo2C1zs0l7XJFGgmOBSXNWs6z8ZlTaqes3FZD+Zc8L2qXUgHkUG0c+WyDgsLw/r16/9hLmt+vz5zqN6v6sgNVrvJfUnoMWWFx+NBXV0dBgYGsGXLFlRUVAQlMCgf2EHW5/Nh+PDhOH78uOx92mGqMxcTE4OCggIJ1CcmJqKlpQUrV67E+++/D7vdjuuuuw4TJ04ULt/4+Hj09/ejra3tDA5TNWDA9aqvr8fu3buxa9cu1NXVwePxiH2qBjSZgCLEmtDkxx57DEuWLMELL7yA4cOHY9GiRfjBD36Aiy66SK6jykqWq2iaJrKB86i33/mz+h6uQ1hYGIxGI4qKis5YJ/ZUoGPK9YyIiBC7hfJODdiqaDR25u3v75eaVRXOy27qFosFYWFheOCBB9Dd3R10jX/n+NY7qKqwUFPyfB04LaTUaLHT6UR4ePgZvIiqQlKzWryWel1eT4U0qN/NfypkQf9PdWC/973voaurC0ePHsXnn38u36M+Gw+PCnfh6+qGNJlMQs7d19cHs9mMZcuWSWtrIGBIES7C62ZkZKC2tla67FosFpw8eVLmYOTIkVKkzrllrWNxcTEaGhrg9/tRXFyMuro6AMDEiRPl+oxy0+jXw61Yw+Z0OpGcnAy/3y8Q7Q0bNsDlciE2NhZ+vx/Lli2TiGJsbCwcDgeOHj2KzMxM4QY1mQJ8cgUFBbKuFPpq4b866DBVVFRIu/qDBw9i8+bNZxg1HPrX1WguBZLL5cKmTZuwadMm+P3+oAZIqoFJ5cnB9Rs3bhwaGxtRVlaGmpoa1NbWwuv14tlnn5U9xLrb9vZ2+HynqWUoaFmLTFhRVVWV7KfIyEhx1AAIN6oqeO12O0aOHIkTJ04AAPLz8xEaGoqQkBCJmPp8PnGMmUWlkrnwwgslAELhHhoaKjD1TZs2YdasWWhtbRWo7fDhw3HttdfipptuQnJyskTWeUZVuAxf27p1Kz766CMxalSILu+dkH2O6upqhIeHQ9M0zJ07F2VlZXA4HIiKikJcXByio6PlHBDWzEYfdFbVtefP+gCTquS+qWE0GtHR0QGv14uOjg7cfvvtuOaaa1BaWgqPxyOwpcjISDzyyCP47W9/iyuuuAJXXnklNm3aBJPJhEsuuQR9fX2w2WwC81edbjUj0NfXh+zsbEybNg0zZ85EZGSkNF5RA32qDBwKpTLU0BsEqpH3dedXny3jv79nxMXFwev1IioqCvX19QKXZHADgHQUVfef6mgONgjD1de5qs4qn13VKy0tLUEZUgZMaOgSnk1dweH1enHXXXfhvvvuw3/8x3/gvPPOE7qz9PR0HDp0CNXV1eLo5Ofn42c/+xlWrVqF+++/Hy0tLZg6dSry8/OFq3j16tWora2VIKqaxVPLPdS1jYqKQl9fX1B/CTpcDCoTMsnXVDQKB/UPjbzBuKzVoWZbVKdfX7vq9wdzWXOthuKyfvrpp5GZmXlWLmuiaobisvb7A/0WhuKyZhPFs3FZt7a2Yt26dUNyWfOM8mzouaw1TcMdd9xxVi7rzs5OtLS0DMllrWka0tPTz8plTXk5FJe1uk5cSzXorp4frqt6xqj/z8ZlvWTJEhw5cuQf5rLmZ1paWgYNbtMmUp9LHyABEBQsXb9+PTZu3Iht27ZJMN/nCzTJ6ujokHNNRBfRS36/H2VlZYiIiJCAms1mg8PhkO9WmTAoq6Ojo5GUlCT7zOPx4IMPPsDq1atRUFAgvNUNDQ1oampCXFwcent70dTUhPr6+jOcbvW8GwwGNDc3Y/v27UHNONWu6GzQyHPPM8nyhZdffhm333475s+fj6VLl6K4uBgLFy7ELbfcgrFjxwYlAyhv1W7g6lwPlozq6+tDbW0tjEajNDDauXNnkAOsl8VMDMXExMi1VB3DPU5HlfJDvR7lgMFgkH4aycnJuPHGG/HUU0/hhRdeEKrFv4cC6V8xvvUOKnBm8wx1qMKK7zGbzejv74fNZpNW7yqsYCgDQY16qRFfCn3939XMASOavPZQP5vNZmRnZ2Py5MmwWq3o6uoK4s5To92qU83Pq93tGI2hg9Tf3w+Hw4E333xTak15qDkcDgcqKyvR39+PkydPwufziZCjIwhAeEaZJYuPj0dhYSHsdrtQvpSWlkqGlSM2NlYMCGbMVGXPtt8bN27E2LFj4fF48OGHH+Liiy+GxxPgZLzxxhul+x0AiSRdeOGFeOedd+Dz+YS8u6+vDy+//LI4eQAEOvPqq6/C6/UKfQ7nT42cAQFIh9PphM1mw44dO7B161ZomhZkkHM9BsvWcI8Qptrb24u1a9dKIwAAQZlfg8EgUGwAkrE+efIkGhsbpe6LsDd2dOS1rVYrmpub4fP5JHLHe62trQ0SwqRjYpMTtdlJbm6uROe5/7KysqQhFoUlI+d1dXXy/OwgqQZqfL4ArxmN44yMDKSlpaG5uVmgpRs3boTL5cKNN96Iq666Cl988YU47NwzBoNBMv18Xk3TxIHs6+sTR5IGN5+5s7MTL730EgDg7rvvDoqGkoOQ1zp69Cg++eQTlJWVobu7Gw6HQyLN7e3t8uzkIWOmR22upAaUBsuWfVPjyy+/xGuvvSawadbGUC55PB6kpKSIEaxpGtra2vDjH/8YZrMZ27dvF/manJwMi8WCI0eOBGWu6IQ3NTVJV18gMLdJSUm47rrrcNlll2HcuHFB+0LvtH7VULOi+kj3uXxeHer6/yNOKRAMx6UsaGxsREhICHp7ewWS39fXF0QBoAY9v2pwjdR5V40pNRJPA4yRetKJABDkEI1zGnucR5/Ph8svv1xQEv39/dixYwc2bdqE9vZ29PT0ID09HXa7HU1NTdi2bZtkzEmF1dnZiRdffBF79uzBn/70J8ybNw82mw3r16/HM888g0ceeQSapkmTNfYlOHbsWJDx39HRgdbWVlRXV0udLOUu52Ew55F/Vw19/huKy1ovw9XsCeeG82Q0Ds5lzc8PxWVtt9vx6KOPnpXLmnt6KC5rIKD3huKyzszMhNF4di5rnlFNG5rL2u8/zQmv57JmydDZuKw/++yzs3JZm0wmLFiwAAbD0FzWCQkJZ+WyZp8H/XnQn2W9YwqcO5f1rbfeijvuuOMf5rJmrSLPP+1O7lvaA9xjql3HDLhaSqHaIN3d3QJfT05ORkZGhgQ0HA6HdIF2Op2IiYnB4cOHpWZS0zRBq+3atUtgzMz2EpVFPZmQkICxY8di1qxZEmy22+1Yu3Yt1qxZg08++QSxsbG44YYbMHXqVMTExMDlciE6OhpdXV3o6OhAbW2tnB+VppG1mMxwAhB6RtZwqkE5VdfSHua12tra8Otf/xo/+MEPsGDBApSXl+Pqq6/G97//fdx000244YYbxPZSg8n631U9QZQAzwlhuPQjeF+0izwej5w1lpTxZ1VmM6BP55R7nfPB91gsFkyaNAn33HMPbr75ZkyaNAkRERHo7u6Wrthqg8h/5/g/B/V/h6p8BntNrzycTifMZrN0wlOzovxd5e1Uoxfq9fmZoYwlKrbQ0FAhq1a/UzXSLBYLrrzySowfPx4pKSkwm82IjIyEw+EIckLUiJM+EkWDhL/zf2bM2JJ62bJl+PzzzwGcdkp4LWZMDx8+LNdgZI6OBmsA+JxFRUVBdCOadhqeRKGjRvH5mtoNj1lF3gMx9dXV1Xj//fcRFxeHxsZGXHDBBcjOzkZ1dbVAULiedFYzMzPFMKMzsXjx4iAYE+t95s2bFxT55lrwXgcGBgRm1NHRgYiICGzZsgU1NTVBTiSvq/6uwjn53AwYkEd3165donSplFTaG6/XKw6c3+8XeOtrr70WtAdZgxQVFSV7jAYa143BBl63t7cXoaGhqKysDGrZD0CaMxEqbjAYkJ2dLbyZvFefzyf8cFx7RgppSGzfvh3r16/H559/LrWIx44dw8iRI1FeXg6v1ytBBTZk8fl8WLNmDaxWq5yHkpISdHZ2Yu7cuTAaT9d+GQwGPPnkk4Ic8Hq9iIuLE2NMVSCMxp933nlBZ4U8hL///e8BAD/60Y8wMDCAtrY27N27F6tXr5YGKG1tbUhOTsbo0aPlmhaLRdrB9/b2irJg9FSfGfomx6JFi7BhwwY888wzso/Ky8ulFKCvrw9lZWV46KGH8OKLL+Lhhx/GbbfdBrfbjalTp6Kvrw9btmxBf38/MjMzMWHCBOTl5QWR2TObc/z4cekASYip+uwOhwNjxoxBQUGBKHoAZ7xvqKEP7qnj686vKqP/GbBr1cBn9oWvvf/++wACsvSyyy47Qy+dy8jLywMQfK65t1SDRNVHfv9pKiXgtK4jBRcp1ngPXq8XhYWF6OzsRGhoKA4fPozZs2dj5syZKC0txbFjx1BTU4PRo0ejvb0d0dHRqKmpwdatW/Hcc88hLi4Ozc3NqKurQ0dHBwwGA2699Va0tbWhubkZFosF48aNw2uvvYaHHnoIZrMZEydORF5eHtLT0xEZGSlQOABieIWHh+PkyZNwOp3o7+9HV1eX6BVC2vg750jPZa1HGem5rPXZaX5Gj6ziGIzLmnX9DIbruaw3bNgAn+/sXNb8PjW4x6yYuuZDcVmPGDFCGtgwgKznsmZjJdXp8flOc1lzbwzFZX3RRRfh/fffPyuXdV9fHz777LMhuayXLVsmzvxQXNYMaGva4FzWdG6H4rJWA1+qs6Fm7NXsuxoAUXV3f3//P8xlTTmjIuBUWaYmSRiQVcdgZSI9PT3CY8q57ezsxMyZM3H++efD7/dj0qRJiIyMFF0OALfeeisGBgYErUSHNy4uTjphM3jNLst6Peb3B6gGU1JSMHnyZBQWFiI6OlqC58uXL8fq1asxYsQIXH755Zg0aRLCwsJQU1ODhIQEsYebm5vlWdUEBueGzcM8Hg9cLheqq6slQKGio1SZqp5dBjc2bdqEBx54AIsWLcLtt9+Ovr4+LF26FK+88gqee+45lJSUyLlW+0/QEY2Li0NKSooE63w+n7ANqM4pEVXcY0ajUfQsqRtVpA/tDzWowrnWJ8HGjBmDnJwc6S3A5mg9PT1B9sdQibZvcnzrHdTBoulqpExv8HBT9PX1ISQkBD09PUHCQm3drxpOKnxH/e7BMrZqxJWvaVqgI5rVakV3d3eQU6gedjqxfr8fGRkZ2L9/P7788kvU1dVh3759g0YK9fc02OBzs519b2+vZObIv6U6HJGRkWhubpbPqBE1j8dzRge7pKQkHD9+HG63G01NTWK0EsKlNixgh1+uDY0K/kxFtW7dOlEw48ePR1JSErZt2yZR5q6uLuzcuVOCB52dnSgqKpLi/5SUlCDYDBW/GmFlJoERewppteEGHQ+bzRbUlpzcZfrsB6N3+n2hro/f7xcnrLOzU2CqvDc180eYstlsFmfZ7/fjz3/+M9xuN37+858HQQMjIiLgdDqlBTkzdwy6qHsnJCQE8fHxQuujr9OgAcVhs9mEj41Qq4iICERHRyMjIwPAaSoEXq+/vx/r1q3D2rVrxWBihPDQoUMCnyV8qKenR+gJYmJi0NraKg5vTEyMNP8ipy8DCrwffeCHCAFC7d1uN/bt24ddu3aJA8ksYm1tLdra2uDz+TBjxoygdbTZbLJ3U1NTMWXKFEyfPh0jR44UiJ66Z4haYGbg3+WcAoEMS2Njo8yBzWZDRkZG0BypVEGEVXLvTJkyBU6nE3/+85/h8/nEsKdhRyOXv+/cuRPx8fGSWWdnTlXRalqgoVhOTo5ALYGha071WS3g3BolDXYdRq5VPfGPZE/Vs6tpmkBYuW/ZZIXB0ezs7L9rP9AwDQsLC5Ilqr7Rz5/JZJLsLbOqRBrQCGQWQNMCtY1GY6Be0+VyYffu3ejo6EBvby/Cw8Nx4MABVFRUCB2Xy+VCRkYG+vv7ccstt4gsIycfjbWPPvoI48aNg9cb6P66cuVKXHLJJTh58iTa2trgcrngdDoxevRoZGZmwuFwAID0i7BarXC5XHLO2XiEGQMguEaV+nswnak6B2azWXROTEwMkpOTgzq4qtfgPud8q4EC6pJLLrlEHBpmhrhWzc3N+Mtf/oIf/vCH4ryy6y0AYRugHGOTE9oQDAJxL7DJIUs6VGgfz9QNN9wg8pVdXE0mk8Du2UCRc6b2lwCA0tLSoLM+e/Zs7N+/HwCk22tLSwvi4uLg9wc4Nnfu3BnUTI4OkOro+3w+aVLDZpVqRv/888/H3r17cfDgQRw/fhyaFsi2x8TEYMSIEYiPjw/SlzTWuT9Ue0U9E1xXPp/qEA72s7pn1I6vfw+XNeUpm+SocF7eGx0ulRNavR8+D/eGy+XC9u3b4fV6UVRUhJCQEIwfP17uk8+alJSEW265Bf39/QgJCUFxcbFknz0ej2RTrVYrYmJioGma1DtSXqhnimfA4XAIQiAzMxMlJSVISUkJChIsX74c27Ztk3N22223ITo6WoIzMTExaGpqQl1dnZwXg8EgdhnRS/xev98vcPPu7u6gQCiHPimltzf9fj+WL1+Om2++GS+++CIMBgPmz5+PV199Fffcc48gAxi0io6Ohs/nw5YtWyS7aTAYxCZUgzxE6aklUrTd1DVWB3+n3cZ7V9EbvDYAeW5VDng8HkEwscTu3zn+z0HVBs+SqrVNaiRSzQLyUKiNEYDTzpyqfIBgCLF6UNTv1N8bDQLe25EjR7B79260tbVhYGBAmiIBEGjq1KlTMXXqVGRnZ+PRRx/Fk08+ibFjx8Jms2HPnj0Cn6SxokaJ9Iaw3jnn/ZKTsrOzEwcOHMBbb70lVDRerxf5+fmi7NRCenVu1GYnoaGh0iX2iy++EGgOlXlycrLci+pI6KNxAAQudeONN0ox/+rVq9HX14fS0lLB3hsMBmzatElqdaKjozFs2DC8/vrrGBgYwMUXXwwgkCl4++234Xa7cdNNN8lcE/7r8/mEkkTtsMd17+vrk2yny+VCeno6Zs+ejQsvvBAZGRkSKdaPwYwh/VoQztXX14dVq1Zhx44dYvDz+202G6ZPnw6/P9AxOTQ0FC+++CK++93vIiQkBFdeeSUSExNhNAbqOdmB12AIcGhxriMjIyWbx3OQkJCAjIwMibip60Rn8ujRo0EGSnNzM6xWq3SGHDZsGFpaWlBTUyMGIp0engs1Eq12da6oqMDEiRNRUlKCxsZGTJkyBR9++KF0ppw6dSreeecduYbH48GuXbukrpjPwaxgUVGRzLH6nVyD3t5eHDlyBMuXL8df/vIXqadhfYvT6RSD3u12w+FwCFSQyvnYsWNC52S326UBCjsRss28pgVqQdhhj7Wq/47xySefyFoYjQH+0nvuueeMjBONHp/Ph0mTJkldscViwQUXXACz2Yy//e1vsmeZoVaDMnFxcTCZTNi7dy92796NI0eOSFlFRUWFQIiB0/BWu92O9PR06U6tNr0CzkSr/D2DjpmapfhnBQ3018nMzBRHg3XsNPgYWVfP+FcNVZeRVkEP81XlqRp1B043XsrIyAiCr7nd7iBDm3XhNIb6+/vR29uLjRs3ory8HLfddhvS0tJw9OhRbNmyBceOHUN8fLzQan3wwQfSxER1cnmPhw8flrOxceNGREdHo6+vT2pb1b1BhI/FYoHNZkNTU5PUiHu9XoEHapp2Bs/zwYMHgwLO/H51rVSbwWQyweFwSH1qQUGB8G9zLvh+1UjnPQAQY59nivX21DVAoMSgpqYGOTk5WLp0qVCxVFVVoaWlBSkpKZINdrvdSEtLk2ckMkTVIyEhIYiNjZWGb0STGI1GoXABgNGjR+Piiy+WDsMWiwWxsbGw2+3yGmGUAwMDaGhokOdUO/2HhYXh2LFjaGpqgtlsRl5eHjRNw/79+9HQ0ACXy4WLLrpIIIw+nw8LFiwQ28RsNqOnp0dkBteOvStSU1NhtVqxf//+oNpIvz/AlRofH4++vj5kZWUBALKyspCRkSEOldobgM0WOX/6wbOi6gf9XlGHalv9vdkpygTqG6MxwPedmZmJ3NxcDAwMIDIyEqNHj0Z+fj683kADKgYNiPyqqanB0aNHAQTQMVdeeSUuvPBCJCQkyLnnWtLZJaKApVLV1dVBNeChoaFob2/HyZMnZS01LVCuwyaYdMpUFAGh/eq8ZGVlBZ2blJQUGAwGrF27FpqmYdWqVRg+fDiuvvpqzJw5Ew6HQ2wXt9uNjo4OVFZWim6lHmJGPT4+HnfeeSd++tOf4kc/+pGUirW3t59hzwPBFEVcdzr7JpMJ+/btw5133olFixZJhv6BBx7ACy+8gP/+7//Gfffdh7q6Opw8eRIpKSmSJSakmk01qedU5APloMfjQXt7e1BZG8+qmkRQ9yZfUx3s0tJS7NmzB9u2bRM2g+7ubsTGxgqK0efz/Z+D+v/DUBUzf+f/+giYamSbzWZkZGSgsbExqN5FvRYFtF6xqd/Nz9AgUAWeei1mIHNychAXF4fs7Gx0dXXB4/EITx03IjesChuYO3cuHnnkEdx8883o7OxEeXm5ZNL0wpX3qm7qwf4eEhICh8MhnF+ffPIJ3nrrLcTExCAnJwc2m02UdVNTk2TzcnJyzsgoDwwMoK+vD4WFhdKEpqSkRKDAJSUlcg9qYwo68eq80YFdv369tKtncwsgIORnz56NgoICgW+mp6ejtbUVXq9XYFCMaPt8AQoTg8GA4cOHB2V9mDm85ZZbAATDt+gk0bijcc/syOHDh7Fu3Tps374d+/btw+7du4OMu8EcUnX++RqvaTQa0dzcDLfbHZTBDAsLE7htYWEhTCYTNm/eLFksKmmj0ShUCmxOxJo1TdOQlJQUBOXTNA1ZWVlS32A0GpGdnS0NHLiH2aiDz2YymTBy5EhUVlZKJo4RXcLgyZepj5hzjsnVBwQM0I6ODmzZsgVms1lqGxl5Z9agpqYGJpMJRUVFsFgsoki4l0NCQnDdddeJE6Jpp7tJM5hiMpmQlpYGm80Gr9cLi8WC5ORkjBo1SjIpJ06cwB/+8AcAwPPPPy/RfyqZ+vp6+P1+gWxOnz4dK1askABNaGgocnNzkZCQIPzKpLtgS/lv2lH961//GhR4GBgYkC6PdHZ4FjkiIiIwceJEABCjavHixXA6nQL7o1Hs9/slOq5pGhr+H3lfHh5lea5/fzOTWbJvZCOBJCQQ9k1kDVqKglURt1atokJdOJ6Kpz89rbVVz2mr1lo9trZHe07VKloXtLIVQUABZZUtIRsJSSAhGwlJJpNkJrN8vz+G+8k7HzOpW+u5rr7XNVcy38x8y7s877Pcz3O3tMDj8SApKQnx8fE4evQo3n//fZnnAwMDaGhoEINERVjoui7FVSifjciYz9oop9RoiSpXP8vvjX9VhydfxkblXH1PB5emBaOOjBAOdW31/Dwf8xVVWaUqM+pz8vndbregXihzOA5q9GzOnDlCY9Pf3y95bV1dXTh79ixee+01gebRENizZ49Auuvq6tDT0yMQOV6P988aA2ZzMHf+lVdewdNPP40XXngB27ZtQ1tbG7q7uxEfHy+8yaQlS0xMlMqYPT09IruJWGA/aFpkLmvej9HQAyDOO6JnInFZq+OqRufUqAgNtHBc1l6vF52dnWKgmEznc1kDQXqdSFzWnIORuKyBQedPJC5rOqQicVmrTpFwXNbvvPMOKisrh+SyZjQ8Epe11+vFU089JXpcOC5r1egPx2Xd1dUlaykSlzURY8a1qgYguH5UOcPn4Frj51/UsUWnhzpPKisrUVlZiaNHj8Lv90sQo7a2Frquo6WlBT09PeLkYH4m9SHVeaI+i6ZpEpk3mYIpGJy/jHxmZGQgNzdX6GTS0tIwfPhwxMbGCsqvo6MDfX19aGxshNPphMlkEs5TOhiA8412wnxVw16F4q9evRqvvvoqPv74Y1x44YWw2WxYtGgRTKZgoaTMzExMmDBBnFyUHX6/HyNGjBDZtWDBAsyZMwff/va3pT4Gn10NHISzEdh/qr7mcrnw3HPP4a677sJdd92FH/3oR0hNTcXGjRuxdu1abNy4ETfeeGMI73xeXp7sXdQXPB6PFHQymUzo7OxEb2+vOCnUOaQ6USjzVRlOvYqRWUasGxsb0d7ejv7+fjQ1NUmRNVZq/rrbP72BSkVDjZiqSokaLeJ7p9MJu92OwsJCtLe3hxQKAQYT0lVBwt8bDY9wximPq42RMrvdjo6ODpSVlaGtrS0knK82o4FN4TNmzBjcdq7KXENDA2pqakI2TNUIVyF3kZQ7s9ksnjEWTvD5fHj99dcl0kUvNxfN+PHjYTKZpCIs7y0mJgazZs0ST1JmZiaOHj0qC5ie+piYGPmNy+USIxSAGKiapuHUqVPIy8uDxWIRr67X68WBAweQnp6Ob33rW/jWt74lXG2lpaUCzRwYGMDIkSMxfPhwGQ96q7nZAkHhVFdXJ4KM0C561iiEOScAYMuWLVi7di2qq6tl82PffvLJJzhw4IDMo8+iVPNzlgq3Wq3weDxYv349du/ejZSUFClLPm3atBCIktlsxsGDByWCQGiN2WxGd3e3kMQDg44B9X6GDx+OlpYWMd7S09Olsq/ZbMaIESNEceP8zszMlCJFhNZ4vV5njUa4AAAgAElEQVRcffXVmDJlimyYzA+iB1GFeVGpDAQCcDqdKC8vl807Ojpa5o/FYhFnDDevvLw8oTA4cuSIPNPrr78u0W7VI2mz2fCDH/wAuq4LxJ0FGzRNEwOI0L7GxkYZd0KCPR4PnE6nFJ04ceIEysvLBdZUXFyMw4cP480330RZWZkU72Bu7tmzZ2XO0Vj9R7ZPPvkEwGAUUaW90bQg1Pbqq68OkSXkOWSjwXnZZZehr68vJEqn68FcJI/Hg4GBAaSlpYnDwuVyIS8vTyBShw4dwpEjR9DZ2SkVwdWCGVQYOI+cTidOnz4t+YfqZu73+1FbW4u9e/dK4Ta2SJGTz9OMnmz211CNDio1p5tG04YNG2AyBWHzl19+ecQornosnIOU6ySSx11VpIlwoXFMmCflGqNNgUAAp0+flsqdHFu3242Ojg50dXXB4XBI1XY6anJzc/GXv/wFNTU1iIqKktxRlUKIhXHU4h0WiwWFhYWIj4/Hzp07Qwwh0q4AkNwqu92OqKgodHV1SUSRfc1z8pjFMshlTWQQZbEaFQ03zlwPcXFxogiygjswiLLhczG/jSgCfqbC++gQTUhIwJIlS2T86Hzr7OyU/FPWGzhz5oxwE5tMJnG28fyapiE3NzyXteqEra+vF71A5bIeGBjAwoULBVY5YcIEud/09HR5TnJZBwKBEC5rwpfXrFkj+4/KZd3X14f6+nqRtYzoalqQy5rKOwv5ReKyZsVrs9ksXNYDAwMyPnv27MH27dvhcrkwbNgw1NTU4JFHHpEKwFFRUYLKIkQ8XESN6yqc40LVwcKt0c/aVNSZmu5CDlRG3DgH1SAJ6c/cbvd5xcGMxo5671wPDMRMnToVALBv3z4JDhDiT4OS8Gw6XL1eL06cOCF0cm63G6+//jo2bNgQ4sBXo4IZGRmIj48PSfHiPcXExAg7Q1dXl1DC7d27F4sWLcJ3vvMdLFy4MKQo3MmTJ2U+ku6HTraYmBgkJydj2rRpqKurwxNPPIGsrCzJj1bTGdTAjbGpjj/CcVtbW/Hoo49ixYoVuPvuu7Fx40asWLECr7zyCtauXYtf//rXmD59Ot544w0AgwXn6NSlUamOAcdUlSmcH7y+Ou9orNLwbmhowLFjx1BeXi5BBDqATp48KTn7X3f7pzdQjQtUfR/umK4HyedjY2ORk5MjpPOqMaguJuNEVr0v6nEuTAoDHlPLYDNCRm+Hy+VCYmIiGhoasGbNGlRVVZ2H8eeLinRraytyc3ORkZGByy+/HD09PTh06BAOHToUEh2jMRkJ6mAUapqmSQ4CFauoqCi88sor0HVdIh8Wi0WMviNHjohgMplMmDx5skQ0KehYJMPr9SI7O1v6pLm5GUAobFo18lVieVaZ27dvH5YuXYpNmzZB13U0NTWJt7W/vx+1tbX48MMPcfbsWaxevRqBQACzZs0CEFQkXnzxRZjNZtxxxx0yF/r6+vDqq6+GjG8gEOT7VOeBcTNTnRcsUtHT0yPC5dNPP0VHR0dEQchzqtflKzY2VhQt5mZVVFSERET27duH9957D5mZmTh+/LhcJysrSyIlLS0tSE1NlYhyUlKSQITY78yr5DNwjvKe8vLyxNDkXGJE3WKxoKSkRPKWzGYzCgsLQ9YTIVaMynAtuN1uxMTESNXPGTNm4Lvf/S50Xce0adOwe/duDBs2DD09PWhqasI777wjxovD4cCBAwfg9/vxyCOPyHhUVVXBbDZj0aJFIf3L3O/JkyfLXKXB7PP5BNrGqoEmkwk9PT343e9+B6/Xi9jY2BAlyWw2C3zm448/BgD8+Mc/lmdeuXIl7rjjDlx11VUSpU1OTkZ5eTmqqqqQlpb2mSN4X1VTnU8cRyruFosF06dPD5EfALBt27YQZcflcgkFweWXXy5yiYqVWhyCChiNCofDgRkzZsDv92Pq1Kni5a6rq0NNTY0o993d3aitrQ2h0wAGjU2n04nGxkY0NDSgo6MDVVVVkuNaXV2Njz76CNXV1V/aOOU8+SLREsoKKsWUKTabDUePHgUQdIRlZ2efF/1QjUv1fMZGA4KReJ5DjVqq+5Ga2kJ5yntjbpTf70d5eTni4uKkRgPXts/nQ2trK5xOJ0pKSrBixQoAwTmxefNmqXrd3t4uuaE0NMIZzry/pqYmOJ1OXHXVVbjkkkuwdu1aiZ4UFBQAGKzSTmcH5xmfg+fiXAYic1mrzw2E57Jm//n94bmsVcUxHJc1zxuJy7qrqwuzZs0akss6KSkJnZ2dEbmsec9DcVnzWCQua8rASFzWDQ0NAs0Nx2UNBGXIUFzWDocDDocjIpc1jQca03wulcuacobGiZHLmvM/Epd1QUGB7J00CHU9lMs60hrn3OK4hitS9Hka9SIaXR6PRxznXIvsF8KA1RcwWEuB69J476rMVA3x3t5eWCwWfPzxx6KP0gnBucD9jn0KQKj/aGjpuo5169YhMzMTLpcLb7zxBkwmk6Ry8DsJCQmSO6ppmtQmoSPqkksuwXXXXYfRo0eLMV5XV4fVq1fj5Zdfxp49exAXFyd7CyPGdKbb7XZ0dXVh165dKCsrw0svvYSjR4/isssuQ1dXFwoLC5GRkYHy8nIkJyeLI5X0cpHWPb/H9xwvjtm7776LFStWYPny5bjnnntQVlaGO++8E6tXr8YNN9yAb37zm4iJiUFMTExITRmLxYIxY8acl3qk9rWqV1qt1vOKNTEyyrQh6ry7du3CRx99hPr6+pD1/3W3z3wHmqaZNU07rGnahnPv8zRN26dpWo2maW9qmmY9d9x27n3Nuc9z/z63/tU01bPFQQkXLuf/hBaxQi43EWBQ+KgE5uqmY4yUGs+t/uX36RE+fvw4/vKXv6Cnpwfjx49Hamqq5AukpaVh3LhxqKqqClnERihCdXW14OBZmILkyQCwd+9egdcaF59q+KrGEM+vGrIs8OJ2u9HT04M//OEPEgVSN2MaRhTiEyZMwO7du2UT0XVd6Eg0TcNFF10k11CLdlAIcAwoNHVdx9mzZ5Gfnw+/34+MjAzk5+fLYs/MzBQOObvdjsWLF2Pz5s3C3ao6DYCgl7q7u1tgWxxftZAQhRCfizAh1UBVx50RCMJJ+vr60NXVBZfLhcrKSuzatSukf1WFVD1mnNM0HukwYGGiEydOoK+vDzt27MBjjz2G+vp6lJWVobGxEXv27EFycrJUQG5vb4fNZhNqAs5lOgd47fb2dvF+Ez7C7xOiSsWCEfXy8nJ4PB5UVVVB0zTknquaDAShoWoUgediH6sRPLPZjNOnTyM/Px+JiYk4cuQIhg8fju3bt0ueUlRUlOTBNjU1CfzTarVizJgxWLBgQch5i4uLRTHk76OionDNNdfIc7P/qeTSkcIoRWlpqUQ1EhISZJ6q8588gnzOsWPHwuFwYN26dQCChZQKCgpw1VVXQdM05OfnY8qUKVi+fLkQ1v+jGvNeqKQ++OCDyMjIEGeZ3+8XyC+bsbgIq7rSg851q3Ibe73ekDwlbqZsNCSbm5vF+GEBpZ6eHuHUJDl5RUVFCF+tCrXzeDxISEjAqFGjkJWVJY6vrVu34plnnsFTTz2Fd9555ws5A4yOvXBNPa9qCHKdZWRkiHJBWh8qlzTgZ8yY8YWM4KKiIhlL1ZjmfVE+UY7yeozGMdfUbDZL1JspEXQseb1eoURgf9DI27dvH5xOp3AHV1RUSAFA5mADg7QM3HdUZZ/ntdls2Lx5M1auXInGxkbExMSgrKwMI0aMADBobDJ6zuruKrSQc5H3SYPJyGX98ccfw+l0hh1fozxWlUaVy7q3t3dILmueZygu608//XRILuuMjAyJaofjsuacicRlzc+H4rJmdCcSlzXXOCM/nOPMrQUGi25F4rJua2vDjh07InJZM1o/FJc1Ic+q8q5yWastHJd1X1/fV8Jlrc416haft9FZxfMRsUUHghpRo4Gioo44Hhw79hnHhr/nXFIr8Hu9Xmzfvl0MPh5n3ib7gzKM99vR0SG5lTxusVikWCHvr6KiAu+99x7Wr1+PxsZGSbthkUDCu433m5+fj/nz56O4uFjSmVJSUuB0OrFhwwa5Rnp6uhi3/f39KC0txaOPPooXX3wR3/ve93DixAl0dnairKwMNpsNRUVFKCoqwrPPPovJkycjIyMDAwMDIvvoOIpkrNIQVp1+fPGeent78fvf/x633nor7r77bvzxj39ERkYGvvOd7+D222/Hj370I6xYsQLx8fHipOf8VvOjGWGlIUxHBh18tEko4zj2FotFqiCXl5dj7dq14sz4Mg7ar6p9nhWyCkCF8v6XAJ7Rdb0AQCeAFeeOrwDQee74M+e+93+2qcaD2oybvrrgOjs7BWrEScJJx7/qb3gdVbioi9h4L1z46iY9atQoFBcXIy0tDaNHj4bX68WhQ4fEyAsEAigoKEBLS4soeka48tSpU9HR0QGPx4PVq1ejuroaRUVFmDdvHhYtWoSLLroImZmZsniMz8CFoPaPKmiN0VUql9xI3nrrLfzpT38SBZUwPwpD0pVYrVbhTZw4caLkhRAeBCCkoq7P5wuBfhGeCgSN8okTJ0p/P/zww+jr60NHRwcuvPBCtLe3Y/z48Th9+jSKi4vh8XjQ0tKCCRMmwO12o6ioCKmpqdLHL7/8Mvx+v3iCA4GA5N+q7z0eT0jxKtXj2d/fL3/VQkCE6ahzw+FwoLGxMaRQiNFBYIQNqXMVgBQQ8Pl8KCkpQXZ2NqZMmSICkjCrpUuXIjo6WqB33d3dCAQCkoPE5zh9+nSIMub3+5Gamora2loAkM2P8zkQCKCmpkbu0e8PFqViWXsgmBdXU1MDTdMwY8aM8+BzKjyefdnf34+YmBiBiv35z38WL6HX6xW6GYslSHRvs9kwbdo07Ny5Ew0NDQLzTU5Olmd56aWXRNFm8/l82L59uyjhnGNUXDRtEOabnp4eArf605/+BAACnVbHqby8HFarVdbwf/3XfwlMlceuueYaVFZWSuRq6tSpCAQCYtT9oxpzljRNw4UXXiiFjliYhhFh1cnHseI8cLlccryvr0/mkN1uF4cDx5rOB4fDIQ6jQCAgxaIyMjIEtk4Hk9VqFbqCESNGICoqCunp6WIY1NXVhVUUaeRMnjwZxcXFmD17tlTPbWhowNNPP/2Z+sjo0PpbTe0bRiuMfc69pK6uDlFRUejv7xeEQk9PD+bNm/eZrsVG2cJ1y/xMYFD5Nd4HMAh153OyaAmfg/nVuq5LXjAVJDoDVCO3paUFTz75pETkTCYTmpubJZ9dPTcdjmrUifPIGCHq6OjAj3/8Y+zYsQOlpaUAggYq9wOVWobnopKpHjc6qsllbTabUVVVNSSXNfvI2O+MCtlstiG5rDkfInFZ19fX49SpU0NyWXP+BgLhuazVPgzHZc37GIrLmnUCqGMYuaxZeE+N7AcCg1zWDocDJpNpSC7rZcuWYdu2bZKTxzlkMgW5rKk/DMVlrc5hjqXKZU0ZEAiE57LesWMHDhw4MCSXNeVPJC5rY4T+izY6adhUDlc6QdUUGHUuMzppPB/lQTinixrsaG9vR3Nzs6wTcoizYjuNP77UudzZ2RmSBpadnY2UlBTRk2j0MqWhpKQE69evl+gsn1t9RjIgqHPAbDZj6dKlWLp0KbKyskJQEpqmyf7qdrvx1ltvibE6evRomEwmdHV1wWw2Y//+/cjJyYHdbsfu3btx6aWXIi0tDTk5OcjIyEBGRoYEd6jz0xmnrllg0ChkfxubKj927dqF733ve1ixYgUeeughbNmyBUVFRfjpT3+K//3f/8Xzzz+POXPmyJpW5Q1tBhqlKnpFrRXA/uM9qu89Hg+OHz+OhoYGbN269YtP1K+ofSYDVdO0bACXA/jfc+81AAsArDn3lT8BWHru/6vOvce5z7+p/V8wxSM0bkJcTEbDi/+rn3s8HlGMGTJXB5mYedVDrk4IACGCxOhJUw0PXpMl3aOjo7Fjxw7YbDaMGzcOiYmJOH36NHJzc4Vjkd1tfB5N0zBr1iyMGjUKd911F+bMmSOeT10PwnKY+9Xc3Cw0AFwIqjDjc6rPzc1DfQb+dTgciI+Ph8vlwvPPPy+LSBXY9IbOmjVL4K0XXHAB9u3bJxssvcpxcXEyJswhUD3XVICdTqcY60CQm7O9vV2KKIwaNQr5+fl488030dPTgzvvvBOffPIJ3nnnHbz11lsIBAK4/vrrxWAj/cry5culXwYGBnDw4EEZX5USg1E24xhTsDLPjkKWCrrVasUtt9wivLak3FALFajKkXGJ8bgKL4mLixPlyGq1Ytq0aVJd9ujRo6iurobX68WZM2dEIfD5fHC5XNKvPp9PCh3wFR0djdGjR0sUMTs7W+YGjUUaMOq8nzp1qsA34+LicPz4cZhMJowYMSLE0ObGpG76FMacjzSaKysrYbPZUFBQgIaGBowdOxa9vb1oa2vDpk2bkJiYiO3bt8NsNqOkpAS6HsyB48Z95swZWCwW3HzzzbIJ+/1+fPTRRwAgxbDYVI89c15Z+bisrAzV1dXnQb3ZD1Tc16xZI/+npKSgoqIC77zzjng+TSYTbrrpJgBASUkJ/H4/5syZg39kUx1nbrdbDG+r1Yof/ehHYRV0Kgx8r3rQu7u7ZfwYTeGaUWUIvdA0iIzeasLbyOVGBXLTpk2ora2FpmnIzc0V2en3++F0OnHq1CkZW9Xh4/MFqULmz5+P6667DkuWLEFSUhKeeeaZkKhipC3ts0ROKfdU45THjeciJJoRbLfbjbVr18JkMoVE8v7WFsv1RIVV0zRkZ2eLca6OMWWQGk1R95DOzs6QSAyhfqzo29HREbLf0bHR3d0tTg23243q6mqkpqZizZo1KCsrk/VGhxowaESpa1918qpNjbLW1dUJEoHyiwYm5RjPw0gfIcWMQITjsnY6nWhvb4/IZa2Ov9oHqmNXjaiG47Lm+SJxWZvNg0XxInFZs5AKEJ7LWqXVCcdlTfk9FJe1zWZDRUWFyHQjlzVhl7oensuauYlDcVmbzWZMmjQpIpc1c3yH4rJmSkgkLmtGdxlZMnJZm0zBWhdDcVlzrkTislahsl9GHea85dipkS673R7C28sIqjo/qcsCg3uxiphQ9RQ16EK5T71D0zRBnyQnJ0vfUk7pui7OitjYWKSmpsJisSAhIQFOp1Mofsi7ToQA+VLp5CeEWV1LlGPq2uP9+v1+WecTJkzAggULMH/+fFnXjY2N0DRNIK6qkyo2NhZOpxMDAwNSLbi5uRklJSXw+XxIT09HYmIibrzxRlx33XXIyclBQUFBCDQ9EAigo6NDZAr7dKhouXHPoIxubW3Fa6+9httuuw233nornnrqKfT09GDx4sX4xS9+gZ/+9Kf4t3/7N0ycODFE36ahTfns9/vF0cuos81mCwns8Hs2mw2NjY3YsWPHF5meX3n7rBHU/wLw7wDoWk0B0KXrOuvUNwIYfu7/4QAaAODc593nvv9/sqmKcDiDzvi5yWSSSlcUbJGisOGispE+M0bB+FeN0O7ZswculwtXXHEFent7ERUVhfb2drjdbrz//vvIycnB+PHjxfBRjcv+/n60tbUJnxkjPNxAaNT19fWJAWI2m3Ho0CHhSFO9cZ/n2bhoXC4XkpKSZLPjBqUqiFarFaNHj5biT+TN5DVUKK3H4xHPugpP4XdJeXHy5ElZgF1dXeju7pYooUpUrGkaFixYIJXcaPRw86H3jmX2VQNJVcTZ1KR21YgPd8/8PY8RUsNIYWVlJT7++GPU1NSgvLw85DeqUk/niHEe8a+mBWkmzGazFIpgxK6kpARdXV3o7++Hw+GQjUv1/DKPRB1nwp94PxkZGZIzxTEzFo1KSEhATk6OwKNoOKj3yf7iRsqoKDBYvIH3UlNTgwkTJsDv96O7uxvjxo0L4bwFIMWSmNtEKLrFYkFeXp4oErquo7CwUOYflWy73Y5x48aFOFUI+YmNjRUoLit5qvAxTdPEeFLXRUlJCcxms1S1/fnPfy4wShoJV111lUTACaUKF+X6ezYaBIziMZJM2WFUupinxsaxooJBpYP9oRqKnL/8LpVp9VqqMQUE54ndbofT6cT27duRlpaGMWPGICoqCocPH8aUKVNw4YUXorS0FD6fT6CI9fX1IQ4s9Tr8f+7cubjqqqtEbhmRIpH2Cz43m+rU+SyGrK7rkqtHFEdUVJRQggCDCmskBUhVPFXl1GQyCZUDDUheUzX01Ma1S5lGag6+iDCggqjKR8o1OgwBoKKiAtOmTUNHRwdOnjwphi2jAJwT6lowOkvZx6osUA0zfs5qrT09Pejv7w9Zm4zWq4q/zxeZy1p1OIbjsqaiHM4pqf7V9chc1pqmReSyVuGzkbisqXx+US5rl8slTlOTKTyXdUZGBk6ePAldD89l7fF4xIAMx2XN11Bc1oFAQCrsh+OypqFOh0I4LmuTyTQkl3VCQkIINN3IZc3f8tnDcVmPGzduSC7roaCgn6ephohaqI7jyM8oN7l3q2gAoxxVgwxA6P7LNXT06FFxWnAPTk1NlUKBRCHxdxUVFaKDAkHZERMTg+joaGEM+P73v49hw4ahs7MTTqdTnFJtbW2CtqExyfvmS0UGqH3Duao+I2tVABBUgiqHOd9YPZpR2127dmH//v0YPXq09GVOTg4++ugjJCUlie6UlJSEadOmSWEov98vFenVAphcx+HmAPdCFTXJZ6WTuqKiAitXrsSqVatQX18v8vjSSy/Fo48+iiuvvFKCTerYqnJSzRemfmOz2ULy9Dnu2dnZn29y/h3a3zRQNU27AkCbrusHv8oLa5p2p6Zpn2qa9ulXed4vcB+f60VP0siRIwXiQc8nN3oKXnUjYlMhSjyf8V74PxWSQCCANWvW4KKLLhJSbBY46u3tRWVlJW655ZaQyBMno8US5GSMjo5GfX19SAVai8WCpUuX4oorrsDFF1+Myy67DEVFRbjwwgvx4IMP4vbbb8ewYcOgaUGeMj4bFw4b36sKgtGzT6WOsJCenh4MDAzghRdewMsvvyzGSUFBgUReaFQyB8jtdmPSpEkhXm1eg0Yjr68qkXV1dcKTFxMTI79j8Zn4+HjMnDlTvGx1dXUYOXKkGCZqhMHtduO1117DwMCAeErpAGBTr83jRgWYeWTh8r2oLO3cuRNTp07FzJkzsXz5ctx7770iRA4dOiS0OKpXUZ1HHAM1usUXK/6yr2bMmIHe3l4cPnwYvb29svGzYiwVnJSUlPOiF/n5+TKfbDYbRowYIQLUYgnSstCI57ycOHEienp6oOs6jh07hkAgIPA+RsWAoKBkSXr2kepNZQXt+vp6Kb61detWxMbGory8HG63G/fffz9MpmDVaIvFghtuuAHf+MY3AADHjh2Dz+dDamqqKNNvv/22QJG4Rj0ej0DP1HnHYk1XX321QDJZfZDQUlbgZj6u2mio7d27F5oWpLXp7+9HfX09nn76aZEtq1evxsMPPwy/34/169d/JgPnq2yUaYRIUTmiA8UIOe7q6pJqrcD5dAxEn9BJpirNhCNxXRsNE9VA4YbPORkXF4eLLroIF1xwgUC/MzMzcfjwYWzYsAFpaWmYPHkyCgoKxHnB4jknT54UJSxcHs6pU6dQWVmJqqoqOJ3O8wwkY1MhhpQtn7exD+kQIyySBp/L5cKsWbNCjE+10TjkfapNVU5VyLXqHFJzUPkcXMeM7qowN0YZiXBglIJ5Wx6PB11dXejt7UVJSQk2bdqEnp4edHR04MiRI3j33Xfx4osvhlStVhVKNl6T969Gh9R8bwCSgsB6A8nJybIPqH1A+Un0kxq1VbmsBwYGhuSyNirNal+r98X/jVzWH3zwgawxXl/lsr7nnntgs9mG5LIm9NZkCs9lrXIthuOyZsEgrvNwXNbjx48XVEQkLmvWcqDizr4gJFLTtCG5rOlAicRl/dOf/lT20Ehc1kzD4BgYuazJoRyJy/qJJ56Apg1G3YHzuaxpPEfismbRxi/LZc3zcU6OGTNGjEQGHYx6GNcG16RxDqqGDPdAvmdEvLS0VIx7u90OILh3bd68GTU1NRItDQSC6TypqamiN9ntdlgsQT7a1NRUFBUViT5QUFCABx54AIWFheI8YVoOK3+npKRIWpS6jxDlxecxmYKV9Kuqqs6DUvN9Xl6eXNvtdsNqteLhhx9GV1cX7rjjDpjNZnR2dqKqqgpvvvkmEhMTMWLECLS1tUnNl4MHD8Lj8Qj6zmw249prr8XSpUuRmZmJ2bNnS5FQjgm5e1Vj1ei8ZAsnM4BBPa6rqwuPPfYYbr75Ztx+++345S9/ic7OTowdOxb3338/HnzwQfz85z/H1KlTRY4EAsHUMzrPua5UPTQ6Olr2NeodX3f7LBHUuQCWaJpWD+ANAAsAPAsgUdM0zoJsAKfP/X8aQA4AnPs8AUCH8aS6rv9B1/ULdF2/4Es9wZds4bxt4V5snNgFBQVwOp0h0A3yXqreDyP0jY1eW/5VhYR6TxQaJK9+//338Yc//AG/+93v4HA40NPTg0mTJoXgzdVnIyn29u3bcebMGYE1qgLJZDKhvr4ehw4dktLqfX19GDduHB566CE88MAD+Pa3vy2RR1WA8Tqq8qUqFmp/ULHiRmmz2dDd3Y3e3l689tpr+P3vf49p06Zhz549cLvdEimMjY1Fb2+v0LLwWkajhaXq2YeEEdKRwGcNBILUJEePHhUP84UXXogPP/wQuh6kwRk5ciQuvvhi7Ny5ExaLBddff72M2+nTp2EymfD9739f7kXddNSxDheNMBqr9ABzI+fYM/JHAZKUlIT7778f//7v/44ZM2bg7NmzUtVTNYrVPlfHgX9Vo5ZezYGBAbjdbnR2diImJgbDhw+X+8rIyBAvHp0g/MxsNiM3N1fgNhwH5szpui5cdFRmNC0ILzx8+DByc3NRVlYGTdMwZswYyeWksWk2m8UjSmVZVSRVKqPvHlwAACAASURBVBbOr7a2Nrlfu92OKVOmhKyF66+/HvPmzUNpaSneffdd2O12FBcXi3FSVVUFk8mEa665JkTJ/fDDD6FpGpYvXx6yjoGghzkQCAj0VteDxctYmZCOJjXHjv1Frt/Dhw8jKioKeXl5Ql3E669atQpPPfUU/H4/jh07FsI3+o9oXV1dGBgYECOEfKysEpqenh7yfeYYs2laKJSXio9q+BpzA6mM6rqOjz76CGVlZSHzXI3McW6r+XL5+fmigDOiRKX5vffeQ0JCAgKBALZv346jR4+KMdjS0oKGhgahqwIQIleBYOSsoqIC5eXlIg+Mjb9RHU/hDFlji3Su06dPC/KB8M/u7m7hmqXBzv/VtIFIjRzaKoxX7c9w9xUIBPO/dH2w0rBqpLLaJ8eGjiQa1ywU1NXVBZ/Ph4kTJ2L27NmYMmWKOIqMjlpVoeN8YeSbBiqPc4/gc589exZ9fX3o7++H2+2WCLBaEZTzSlXwgfO5rCljInFZ7927VwxfI+TXaKCqCqgK26UyrkYwVS7r9PR0WCxDc1k3NTWJTA/HZa1ym+aF4bJubm6WeWAyheeypkMyEpd1IBBAWlqaXJtzh2NIgweIzGXNKE4kLmu73Y6SkhIZx3Bc1iy6RrlLpym5rDUtiCqKxGWdlZUlcioSl/Xs2bOH5LLmOH5ZLmu/3y/7alpaGlwuF7KzszFjxgyh2KHc4XVUo0idh6pxp85HdT2rxhELL6n9WVBQgO9+97toaWmB2WxGZWUlHA5HSA2K+fPnY9OmTeJkiI2NlXtpaWmBz+cT6pqlS5cKdVhiYqIECVigsq+vT9gONE3Dzp07sXXrVmEpKCwsDJHbfC7qLWpfMAJPw7GwsFBg0aNGjUJsbCxOnDgBvz/Iufrkk09C14NFmRwOh1AYDgwMYPfu3Rg9erRAZK1WKyorK7Fq1SrExMTA4XCgq6tLaq+Ekw1GmWeU2+p77o0mkwnV1dW49957ceutt2LlypX49NNP4XA4sHjxYvzgBz/A448/jkceeQQzZsyQ/YCQdzX9SN1LOzs78emnX2vsEMBnMFB1XX9Q1/VsXddzAdwAYLuu698F8CGA68597VYAa8/9v+7ce5z7fLv+f8EUj9DCeZ3DNRUS4PF4kJOTI1xKhDCqhpma98FjjHLpuh4CvwDOL0/NxnOMGDECdXV1uPjiizF9+nSkp6cjPz8fo0ePRnJyMt566y0p7c9NpKurC3l5eThw4ABmzZolRN3kaeQz8RksFgvq6+slP+DMmTNwuVxobW1FcXExHnjgAfzwhz8UL7JqmIbzGlOos6mQYv4fFxcni9zn82HNmjVS3GLHjh0IBIJFNJ5//nm8/PLLUkmVfUnFl7AFXpvRK7PZjN7eXnzyySdCUxMVFYWWlhaJRAUCAfzmN7/B7t27YbFYcNddd2HmzJk4deoUtm/fDr/fL7kRFAwul0u8omqSuxp9UB0MnGvq/+x3joVRUISDDVOwLlq0CD/4wQ+wdOlS1NbWoqysLCR3j9dVhbJ6DqMhy5wot9uN+Ph4HDp0SDawvLw8qVadk5ODqKioEAivw+FATU0NPB4PWltbBWbL8WFlW0K12TdNTU2YNWuWRCzGjx8vvLek92EETI0aEcqk9mUgEMCpU6cwYsQIWCwWbN26FRdccAE+/vhj4X3zeDx4++23Jarzk5/8BNHR0aioqEB8fLwolvQ0T58+PWSDpoKTm5srfUyFsq2tDVFRUbjkkktgMpnEo06Fm/Be5lKqzoiOjg709/dj48aNAILRkpiYGBw6dAi/+c1vZB4sXrwY9913HzRNE8XoH9UYeSEHLADhljxz5gxSUlJCnHmEeVK+eL1exMfHy3ixnxmRVg1YVaHl2iK0WfX8UwlSnVJGec7jU6dOxaWXXgpN01BVVYXk5GS0tbUhKSkJubm56O3txebNm7F+/XqJRrDMP2HAquGoRiW6u7tRWlqKkpISgf/xuf9WU+WE2n/qcT4HUSZutxsbNmwIMdyMebvhjNxwbeLEifD7/RKtMiqrvBceM5kGUx3oaFJlGIu/qb9XHbbGcXe5XIiJiUFHRwfy8/MxceJETJo0CQsWLAiRq2rkR61fwPfG6Krad4zg8PcqhDgQCEhEiPc8FJc1nWKRuKyJbvkyXNasZhuJy5p5eUBkLmsamJG4rJuamuTa4bisCWc3myNzWZeXl8Pr9UbksmbfqzJA7XfOOzqMw3FZJyYmwmQyReSy5vOyD8NxWQcCgSG5rAOBYKVjRuCMXNaapuGGG24Yksva7/cPyWXt9/tDqvx+US5r1UlAY5fpHy6XS/ZnoywBQhEdNPz4uRpgUGWsytMbExMDl8uFtrY2BAIB3HHHHbj77rtx6aWXCoUSnzUmJkaMzqlTpwpnc0NDQ4hRZEzr0TQN1113HVauXImCggIJLqSkpAjSpb29Hb29vTKndF3H6dOnsWXLFuzatQs2m01YA/h8BQUFIhO435jNZnH0MSjDYAjnIBF03JP7+vpQUFCAffv2SSR27ty52Lx5swSuaAssWrQIcXFxiIuLQ1paGi644AJx6nD+qywFXM+R5LfR6OYYcT1Rx/ntb3+LW2+9FcuXL8dPfvITnD59Gna7HVdeeSV+8pOf4NFHH8WyZcsQGxsre7qmaYKge/PNN/Hmm2/ihRde+Nzz86tuX4bo5ocAfqBpWg2COaZ/PHf8jwBSzh3/AYAffblb/Ps2dWGEU+qN3lwaPUlJSaKMqxEyYx4MmwpFM3pKgFDvlroRqB7WlJQUFBUVISUlBeXl5Xj55ZclQpOamoqenh4cPnwYgUAA1dXVKC8vx1tvvSXfr66uDqFtUJ83EAggPT0d06dPR1lZGdauXYusrCwkJSVJVTt6cu+9917U1tbKJhzJ28PfqH2oGqfqM1IQUKjbbDb09PTg5MmT6O7uRmpqKp599lk0NDSEVCkjrEotV65GYVjFbfr06XKtESNGCITTbDZLZC06Ohp79+5FbGws1q9fj56eHsHoz5w5UzaBkydP4k9/+pNExCoqKnDs2DF5dlUxpRBjH6mKqxqhVxVL1YBRlXz1f/bB1KlTsXTpUvT29mLv3r1SidEozNS5xDEI5wwhb5/H40FnZydeffVVid4Ag0T2qoedBqnNZsORI0dkvnd1dYV4Y6uqquRaKgyMFRDtdjtOnDiBQCAQAg9VI3Esta6uN8IT9+/fj7lz58JsNiM9PR3d3d3Ytm0bYmJisGrVKpjNQY5WTRvMmfnmN7+Jd955Bx6PB5dddpnMpZdeekm8jLyu1+vFiy++CK/XK/dHhwEQhAsPDAwIdIaNtDy8Z0aS1DVAp8zGjRuh67o4WEjLoes6Ro8ejRdeeEFgVP/IlpCQIIVL6MHWdV2oZtTcafaJqoiQnkmFjNKwUueROjepIAGQvB4VXs3f0EFB44mfs89VA9BsNmPBggWYO3cupkyZgrNnz8Jut+Paa6/F2LFjMXLkSHHQbNy4ESUlJUhJSRGYVkNDQwgcWZXfJlMwz/7YsWM4cuQIysvL0dvbK/Pf2NTj6no1NtIjsKiR2WzG0aNH5bcej0fg+jxXuOsZZQL7JTU1VZBBlDuqkqruV6o85/dVY52ynUVTOAdYlESF3zIKGh0djSlTpogBk5GRAZ/Ph8LCwhBZruYt6rou8HjOF7PZLEo7uaDZz1QMGWUjBJEONu5BqnwMx2WtzifOOd4buayp9H5RLmvecyQu64GBASQlJQ3JZd3V1QVd1yNyWZPr0GwOz2XNPYYOynBc1oTvRuKyZqPCbuSy7u3tlYhfJC5rGq+RuKytVqtAcAOBL8ZlraIOwnFZA0Go/VBc1oFAYEguaxrFXwWXNedpd3e3rFWHw4Hs7Gz4/X4UFRVh7ty5IWuFz0fZxTlrlBWqjqrOTT5zbGys7J3PPvssnnrqKbz77rsSkb/qqqvwwAMPQNd1zJ8/X3TRUaNG4cCBA0hISJD7Vq9Jg62lpUXm3YwZMzBmzBhoWrACPGUz5ZWuB+Ha1P3Uoke7d+/Gtm3bBBZM54763JzjzKnt6OjAzJkzBa5PaGxiYiKKiorg9/vR2toKn8+H9evXo6GhQejO8vLyhEeY1zlx4oTM+/b2dilgWlZWhqKiIgCDHLG9vb0h+aqq/symjpOqyxntCl3XRW41NzfjoYcewu2334677roLW7duhdlsxvDhw3HPPffg4YcfxoMPPoiLL74Yu3btQk1NTYgz/utun+sOdF3/SNf1K879X6vr+oW6rhfoun69ruuec8fd594XnPu89u9x419VU40C9aV6hanYaFqQQ4weV0YYubjUCJkKQ1WhVuGiWUCoQqUqcOrfCRMmIDExESdOnBBvXm5uLnp6ejBy5EhkZ2cjLS0NmqbhzJkz2LNnD8rLy9Ha2gpNC9K+1NTUCOTHZBrEpxcVFWHTpk2iVDBPgJsJGxc2yasPHz4s9CDqAlHvXW3sJ77UFggEEBMTE+Jh37BhAyZNmoT6+npERUUhLS1NIrzcRGjUqx57Ki2M9LpcLhw4cEDGUuUcnTlzJvr7+4Ws2Ov1orKyEhs3bsSePXuwa9cu+Hw+HD9+HC0tLQLVYP6CxTJIAm0c13B9oAoTfo+Rb/W92udq7rJ6HnqE09LSMH78eGRnZ8PpdIpir0asVYGmRqnUa/A6CQkJyMrKQn9/P8rLy3Ho0CGBefn9fpw4cSLEUeB2u5GTkxNCCk/oqtfrFSGtrqe0tDQcOXIEPl+QgJ2bDAsMqFA5Fc6pknmbTCbJrWhpaZHN+8CBA1KluKamBpdccgmAIM9db2+vRHVJBm61WjF16lRRetva2qBpGm677TaZl7quo7GxEVFRUfiXf/mXkAi30+nEmjVroOs67r77blGWec9+f7CipK7rQrVkLOzl9/ulSm9ycjJ8Ph/279+Pl156ScZ90aJFeOSRR4Y0aP4eLSEhAdnZ2bBarSF0D4Sh+/1+kV3hvMAdHR0SQeV48dlpfHJMY2NjkZSUhKSkJNlos7KykJycLLQGlFPcjI1pB6pjRH2vOjdsNhuamprQ0dGBv/71r4iPj8eoUaOgacFq5wUFBTh79iyamppgtVpx8uRJKQbmcrlw6tSpkNxNVWmgE7C2thalpaUoLy8PKT5nlPtDNcKnGV2iQ8dms0HXdXR1deGb3/xmyP5jPK+6j/BzvsaOHSuOIq5vo9xSo0n8n9GNnJyckO/TscMIN69JeDhrDBA1k5GRge7ubsTFxYUYl8OGDcOUKVMwceJEJCcnh+T8M2LA/zmP6Ejq7e0VGcgoE+lm2traYLfbpagZZQYVSPXclK98PjpB1edSocU0uv+eXNa7d+9GVFTUkFzW3d3diI2NjchlrVYIDgTO57JmgSJNi8xl7fV6UVhYOCSXta7rEbms6YAi7D4cl3VUVBSys7Mjcllv3LgRUVFRQ3JZ03iOxGVNGRaJy7qiokKuYzQMyGXNnNpIXNY33XQTLBbLl+ayVnUdwrLVuWS32xETEyOpTTSE1D5Xx51rnf0BhOouNOKvu+46QZFw3/P7/SgsLERycrLUUEhMTMSCBQtw33334YUXXsCcOXPgdDrhcDhw4sQJZGVloaWlReYJZTmfoaOjQ84fCATQ3NwMn8+Hm266CfPnzw+htiECz+v1oq2tTaqIqzrC7t27sWXLFhw8eFAMUxbAYgSWaWsej0cc/GfPnoWu67jhhhswcuRIWK1WxMXFCex1zJgxeO655yRiO2HCBOzfvx8jR45ERkaG7GlmsxnLly8XQ13TNCQlJeHb3/42MjMzMWXKFCmm5XA4QmSX0YltlA3G45QhqqyhfKSM+vOf/4xly5bhzjvvxLPPPityYO7cuWKglpeX45ZbbglxZnxd7es3kb/mpgoc9aV+xv8BoLW1VSr4tre3h+D4jQtc9V4ZI2dGY5W/VRcnv8vNsa+vD/fffz+uueYa3HfffQI57ejoQGJiIurr65GXl4fW1lbY7XYsWbIE6enpWLFiBaZPn47p06djzJgxWLduHUpKStDR0YGDBw+ipKQEH3zwAQoKCrBmzRpkZmYK56jJZBIYjOoVnTRpkkRjU1NTJS8qnEEaztuj9r/6/LxmdHS0QOxOnjyJ+Ph4/Pa3v4XD4cBFF10kv6HnjkojBRf5RMvKyuD3+1FeXo7a2lrxmrndbpSUlOCtt96C2+3GyZMnMTAwAKfTCavVisceewxAMLf0ww8/hN/vx6JFi0RxaW5uRkNDg8B/VQWDY0jvrNonanRU/Z+/M1bc83g8ovgYPaCcG7W1tZg3b54QpqelpSEhIUF+E87zpirvqkJmHJeEhATxKG7evBkvv/yycMWq0SqSt3MTGDNmDE6ePCmOiJiYGDFM+GyTJ0+WSCA3OBYM4Tl5L/S0hzPqdF0XeNuZM2fEgVRSUoJhw4bh3Xffhd/vl8I57777rkRIPB4PiouLcfz4cdjtdowcOVLur7u7G5mZmRKJ4aZGZY+bIT/jeLO/WJxE0zS0tLSEKBJcS6osOHz4sBhkuq5L9cHGxkaBBY0dOxaPPfZYiNPoH9HoPdc0TZRwTdME2qs21UBjc7lcUqWRCisw6JihJ1nXdVRXV6OiogKVlZWyUbP4yunTp3H8+HGcOXNGvOLMXzXKX3UjZ1M3dcKzo6Oj0dLSgv379+ODDz6AxWJBX18fRo0ahalTpwq34vTp01FYWCgF8si/6vF40NjYKPnThNhxnbE1NDSgtLRUFN9wEP5wTVXmW1tb5b6Za9XX14f8/PyQZ1OVTCOKRZUvPM4oj2p4qXLKeC7KKr/fL9VtVcWViAiz2Sz7FL/f3d0tOdms9JqQkACTySTFxeh0S09PR05ODsaNG4exY8eKEy4uLk6MIM4dvmgIE4rucrkkh43Kq91ux+jRo8VhSsWfRk0kLms6PSNxWVMGqPPOyGXN9a3qAJGiFfxc5bJmatFQXNY+nw/Dhg2LyGVNecVjfn8ol/WYMWOkUBX7xMhlnZycjMmTJ8u9G7ms1TXJtWh0mNL5F4nL+vjx48jIyIDFEp7L+sCBAwgEAl+Ky5rjEInL+q233kJ3d/eQXNbPPPMMdD0yl/Ubb7yB1NTUL81lzflL41FFjNFxojqtgeA+m5WVFeKgUh3F6jHON64JFWlFZBDnr81mw4kTJ7Bu3TpBPtXV1cHn82HSpEn47//+b7z//vtoaGhAVlYWfD4fMjMzJX+9v79fqqkDkHnL89NhTn5dzqcrr7wSs2bNkuio3W5HfHw8AoEAzp49K5V61f4hfZyu60hJSZHo/8DAANavXy8O176+vpB0h7KyMvmMvKeMmKoFN00mE9577z3s2LFDIuuBQABvvvkmHA6HzDnmt+q6jptuugmdnZ1ITk5Gbm6uIAeJPqF8oaONc05dT8b5qP6vyhjVEcHxLC0txb/+679i2bJluO2223DHHXfgnnvuwf333w+fz4eVK1d+7vn5Vbd/egNVjeYZI3tcwFQyTaZgvpHFYpFCD1SajR4OVSDzXGyqJ1GdRCq0iudTo1qxsbH41a9+hb/+9a/Yvn07xo8fjxMnTiA7OxubN2+Wv+np6XC73bDb7WhtbcWGDRtQWlqKuro69PX1ITU1FUePHkV9fT2OHTsGs9mMiy++GHFxcRIpZNSVGzeVBkaGTp06JUZvTk4OYmNjER8fLwpluMbzcdNTDY5wxjqVdBpQTqcTv/jFLyTh3efzISEhQX7D8fL5fEhOThZeRHp86T2jp89kCuZBUtHWdR21tbXo7u4OEehmc7Ay5A033CCFJ2pqavDBBx9A13WMGTMmBNptNADZ1I2A9xxOqVaVR7vdDrvdjoSEBOHGNHrf6S1kROv48eM4cOAAjh07hv3794cYn0ZPvdrf4Y5zI4uLi5PqhvT0qRGVpKQkKfJhsQR5K1nyn154Y+QoMzMTAwMDSElJERhsfn6+8CXm5eXJ91XlSFVu1PWp67oUPQKA9vZ2ZGZmSpRdrZCr68FIXUlJCVJTU7Ft2zYEAgHMmzdP1t2WLVvg9XqxcOHCkCjN+++/DwAyD+nlDAQCwh9mjCpxHrFk/eLFi8OOgd8frDoJBGFlFosFNTU1AunUdR3Z2dm47bbbwo7Z36txXK1WKzIzM6WPSb+hNkL51Ma8RH7O/2lcqFHxhIQEyUGj/CPFBqsQqhVkwzmCgPPntRE1QEcBDTPKYIvFgtOnT+PAgQPw+/1STKKqqgpHjhyBzWbD4sWL4XA4cPToUXi9Xoly1tbWynzkPmKUA36/X5Rr0nQM1WhAqNEsj8eDffv2SZSeRpNRCQ3XjH0AQMZQVYbC3bsqmz0ej6xxyk/erxEBxONMmWBhEkI+adQBgw4+Gr/8ncViQVJSEjIyMlBQUIDs7GykpqYiISEBEydOxOjRo4VzMT09XXITicwgBNBiscBut4ujhP2kRnJouPK+1er3qvzVNE0iIOH0iKuuugpLly7F1KlTpRqpClWmEq0aU6pSqUZAGDEifyohq+SlPXz4MN544w2ZyykpKeJAp3HM86o6DhDcQ7KyssTpkZSUJIYE9xZGl9gnOTk54pBjVfhTp07BZDIhMzMzJMLN9cB5rOpW/MuUhuPHjyMqKgpJSUnYtWuXOBM7OjpQWVmJlJQUcWgCgzQ5vB7za1k8jP338ccfAwBmzpx53hy32+0CRY6KihK0B/cl3rPRaFP3Y9LXuN1u9PT0YP369WJoX3311bjjjjsABGVhIBDA7Nmz8Xmber2YmJiQwjact+p3qdPFxsbKmLBPgMFCYSriTx0zPicjqfPmzRMnulrgavfu3QCAzs5OPPnkk2hpaUFubi7y8vLg8/lw6NAhAMFaBqzn0dPTg9TUVNkPuNapF3K/SUxMlIgmx5yV82msp6WlSd0A9kl7e7use9XwpSHLPlJpuyyWIIduXl4eLrvsMoHoEylCJBad7pqmYffu3Xj99ddhsVjwzjvvhFSnZ4CE48B5Tp2pvb1dcuStViuOHDkieb8mk0kcuEx9MxZB5b0YZRKfRz2myh31pc6J3t5edHZ2oqGhQRBwX2f7pzdQIzV1Y1UVa+Lnu7u74XQ6RSConizVc65uuqpwUycajVUgtMCNOgHpuWptbcWRI0eQkpKCRYsWoa2tDfn5+Zg9ezY2b96M4uJiBAJBMu7m5mYMHz5cDFlWp9y5cycWLlwIXddx1113ITMzE1FRUejq6sLp06dx9uxZ5Ofno76+PqzxQi+e1WqVBGsuJIvFgsbGRkmk54bE51b7wShcgdAFxe8RltfZ2Yn+/n6pnmuxWCTipub4kviZuTCxsbGSZ3D8+HEUFxfjmWeeQSAQEMj2f/zHfwAIwnzeeOMNuN1uKQRUXV2N5uZm4e6k8sHKdU8++aQoQxQsfBZ+V92U+Zlq4HHOGBUcGuKvvvoqOjo6hM+PRpymaZg7dy5mz56NefPmYfz48Vi1ahUefvhh3HTTTUK3cvjwYbkvjgvnaiTFnooShW18fDwsFgtaW1sRHR2N1atXY/fu3TCZTBg7dqxQG1itVoGo8hkmTpwITdOkwASj5GazGfPnzxf40Pjx43HwYJDRatasWWIIkwaA/UeFms9A+CSNZE3TBApPJXLhwoUoLCyUKDnhvV6vF83NzdA0Denp6QILr6yshMlkEo83N8+GhgaYTCbceeedIf3W09ODnTt3AgBuvTVYJ47zjnOro6MDPp8Pq1atkj7m+Hu9XsnBYeMcXbdunYz3TTfdJPnf/6hG2Wa325GcnCwl9z/66CNkZGQAGJw3Z8+eRXp6ekg0TnVCqVUM4+LiQnKhuA5sNpsYPX6/XypD0rigrOF1Izle1ONca0ePHpVqoP39/WJsE4Ln8/nkb29vL1paWmT+JyUlYevWrVizZg1cLhemTp2KOXPmYOLEieju7kZ2dja8Xi86OjoETm6MlPJZzWYzurq6UFJSgsOHD6O2tlZkAmUB31POcu1aLBZs374djz/+OH72s5/h+eefx4033ogbb7wR06dPP0/OqH2gNo5PUVGR3BPHS/29KhP4O9YJoHKpGsSUybGxsSLn1eJyJlOwEFlMTAyGDRsmyq667/KZ6exISkqCw+FAQkICUlJSkJGRgdGjR6OwsBB2ux35+fkYP348JkyYgLFjx2LMmDHnzWGuNYfDgdbWVimgpspBrnM6wFhwR51LvE9Sy/G7aj9FRUWJM29gYADV1dUoLS2V/GQa5uxTVZdQr6XqEzSEmJ/q9/vFmVpTU4OoqCi0trYiMTFRnILse9WwMOoqaWlpyMvLk36IiYmRgl/AIEJJRfgUFBSgvLxc0FNqpJYRdK4rNlb+VvcgIlH279+PefPmwev1oqenB8XFxTh58iQAIDk5GX6/H+vWrRPHAtOBWOQuPz9fxsFsNuPyyy+XMeG+bLfbcfXVV4fcU39/PxITExEXFycynZR0rNDNCC0j/ap+tn//fui6ji1btkDTNLz00kvCG8rCbiNHjsQTTzwBn8+HsrIyged/3qbCOFnMh2NsPB/nEWHizFUfPXq0oBdUVAQbDXUVYkpnUX9/P5YsWRJCXQJA8rh1PYiAeeCBB1BXV4ef//zn0DQNRUVFMJmCxY56enqkMFpmZqboMunp6SIT2TIzMyV1CAgiCYhq4HPPnz9f0nNYkVrTNMTGxsLj8eD06dOSU8q5kJycLPKAa8Lv9wt3NvNnZ86cKVzoSUlJmDlzJiZMmIBNmzbB7XbDbA6yDMTGxoYgQlT54XA4sGzZspDx6+jowMDAABwOB/bv349jx44Jc8bdd9+NOXPmYPLkyfIsZnOwfobfH6QqAgYLCEYqjmkMkqj7iTH6qv5WRZB83e2f3kBVlQbVODK++J3e3l5YrVY4nU4piKJGOal4cWLwr+qd5XsqIUajRp1EqqKhFiGwWq3o6urCjBkz4PP5UFpaivT0dOH73LVrF0pLS5Gbm4sxY8aIQun3WEMG5QAAIABJREFU+zF58mTExsbiwgsvhK7riI+Px9q1a3H27FlkZ2dj6dKl2Lhxo+Sb8f5UoeXz+ZCXl4fFixfje9/7Hu644w7cdddduO+++yTHtbq6Wp6Pz8HG8xj7SV0k6viYzcEiUVR46HEfGBiQCoXsY0KD/X4/cnNzBZLI3ICWlhYMHz5cosVHjx7FuHHjJP+JFf3uu+8+Ub5ff/11+Hw+/PKXv0RWVhaysrJkI1W9/+pcoZLGecHGOabCO9VIMucCBaqmBYs8LV++HD6fD62trbJRUsCq8GAemzBhAh566CH8+Mc/RkpKCurr60PyP9jPxjVAgyGccyIQCCArKwtAsALfiRMnsHr1avT19aGiokJyX3w+n+R20vuoaZrkK9EgKygokEJc9CQSZhYdHR3ixCAES1Wk2feskNje3i5jCUDG9JNPPsHIkSORlJSECRMm4ODBgyEQnW984xvweDxISEiQ8vbMZ+JGpGmaGLwskEY4F8eO90IFjNQQFotFNmLmPxOOzrVutVrFUCb8KycnB0CwEMevf/1r6Tca+v+oxvnKtU+PvMViEXQGnRPNzc2S18VnZ/9ERUUJPZfqhdf1Qf5TOuNsNpts9JR9XBs00lTnF68FDCovQNBx8Mknn2DHjh3QtGB+/smTJ7F582ahROHztbS0yO9SUlIkZ5Jz0G63IzMzE7m5uYiOjkZ7ezvWrl2LiooKXHHFFdA0DcOHD0d7e7tUKO/p6UFlZaUYq0YFgdd2uVw4fPgwDh48iMrKSukLALJ+aKzzOWlkfPrpp3jooYewatUq/PGPf0RhYSFuu+02LFmyRKpHqtdSI1iMggGDSj/XP40B474IDMIE6Vwi3ySVVhpgHA+u2b6+PoH4EpGhVrXnvOC4EEJsMpngcDgQFxcn0VdWkiZtWUZGBmJjYyXKDkCitir0Oi0tDa2trRL5VKvwA0NzWfMc4bisVSdjIBCey5oO4Uhc1mo/q1FpjgPbUFzW6enpwj0bjsvaYrFIBVXgfC5r7qvcV8JxWUdHR0teaDgua+ZY+nzhuaypjwzFZc37jsRlnZiYKFGrL8plzb6NxGVdUlIi8y8Sl3Vzc/OQXNZAkELly3JZU442NDTIWg2X7sFnVvWphIQEQQ6MHTsW48aNC0EGqXPY6JCiY0HtT0YtaZhRRhCl9j//8z9S1I9INUb/k5OTpS4BU2boUGEEXNM0FBYWyjw2mYJsFo2NjfLcfr8fdXV1SEhIwLXXXot58+ZJyhUdRHQuEOKvacEiiao8cLvdsFiCFawdDgc2bdqE/fv3IzY2FvX19cjIyMC+ffvw9ttvy/noYPvZz34m5yX6ibR7Xq8X77//Pn7zm9+gr68PycnJyM7OxnPPPQebzYbbbrtNnE2kzcrJycGVV16JtrY2cRyxJggRjISqs7BSOH3NqGeqtkk4h244x9jX3f7pDVR1g6FBoRpj6mZuMpkEo97V1RWyqAnVUeGURq+W0WPBY/yeekxVuoCgUN+1axe2bNmC0tJSjBs3DocOHRLupfnz5wuUwmKx4Omnn8YVV1yBwsJC1NXVobi4GMXFxbDb7di/f7/ADxhNYlRsxIgR8Hg8WL58OXJzc6Vku7HPbrvtNsydOxepqakhECVN0/D9738fv/rVr/DQQw/hzJkzgvdXm0rDo27Gat4ucP6i8fl8SE9PR1NTkyivPL/JZAqBjvX19UnlT13XxfioqqqC1+vFmDFjkJWVhQ0bNqC/vx9paWlITU2F0+mEz+fDkiVLhMOKsChuVCaTCW1tbXj55Zeh67pAQxnx4xirCqE6liqsTI22U5mnIKKg7evrQ01NjSh0zHNQ545x3lHhs9vtuPvuu3HJJZcgIyMDJ06cEPgWMFjVjn0dadNT567JZJLcTKfTicOHD6OqqgputxtlZWXQdR0FBQUCkeG8qaioEMWV0cljx45JYRE1X83nG+RHCwSCNDI0ZAgXUx0ZNGhpoGpaMPfz6NGj2Lp1KzweDxYuXAgAWL9+PQYGBlBfX48dO3agqKgIr732GgYGBnDzzTcDCHrWX331VVgsFtx+++0AIE6bV199FYFAAEuWLAnpm97eXvzhD3+A3+/HLbfcEuIAoGL2+9//HgDw4osvyvNwszWbzejo6JD8W64Pi8WCffv2SV/SI/uPaqo8orEYExMjhk1tbS22bduGLVu2wOVy4ZNPPpHKz3QoAcG5Y8zZZjNCmNQoOftAlRucp6pixdbU1CS8tYcOHZLvaVqwIiShTawsCwzme3P9ERJKw4HyPSkpCXl5ecjMzISmBQs4dXR0YNOmTdB1HZ2dnbj++utRXFyMqVOnwul0Sk5/V1cXamtrpbov+4R/6QRwuVwoLS3Fvn37cOjQIVmbzDnkevX7/VLogmvW4/HglVdewb333osf/vCHeOONN3DRRRdh5cqVuPnmm6Vwiuro0TRNnDoqBycNONUbr8ovXdfFqM/MzER8fDwmTpwYItdpADISSkO9q6srpGYA91/KQco+rh9G0L1erxidjNRRiSXcdsmSJXj22WcBAA8//DD+8z//E4899hh+8YtfIC4uDjk5OSKbVCPQqJhzXnFs1D5gYz+yX9T5GY7LesWKFUNyWatKpKoT8K/xfsNxWbtcLnGsmc3nc1n7/X6hUjGbz+ey5vWG4rJm8bvc3PBc1hMmTJDxD8dlzWsMxWU9e/ZsfPjhhxG5rHmtobis2fz+8FzW1BsicVmzz4fismYKSiQua7M5WFjyy3JZq4GQnTt3SjX9cN8L52DWNE0cx/y8oqIirAOdfcNnjIuLE1QZ9aprr70Wy5Ytg8lkEtnNCN+OHTtk/jQ1NWHbtm2yZtPS0qQKONPJWKSNuonJFISKnz17VopcFRQUhFAkRUVFyXs6y3Rdx0UXXQRdDyIHCgoKzntmv98vLANutxtbtmyR8eYYNzQ0YMuWLfjLX/4iKWJtbW2io/AeiaZjYcVAIICMjAwpVrl79244nU4kJiaitrZWqPQOHDgAu90eUi3c6/Vix44d6O/vx5kzZ9DU1IRTp04hLi5OdAJWzWfxNxrY7M9wTQ2AcYwjGaBGmfN1tn96A5UDFclQDBcdycnJEa8PeZ/UCUYlAwjNb1EbB58GgRo5oxBSPVmapmHTpk2Ijo7G//t//w+JiYnYu3evRNLS09PR29uLwsJCbN++HW63WwzpQ4cOweVywWazCVSipKQELS0tqK6uRklJCWpra2Gz2VBcXIyCggK5t5SUFClGob7UKGa4z3RdR05ODh5//HH87ne/w4033ogzZ86E9A0jUCoUWDVWjUY8P/P5fLIwo6KiQpR5KpVMnO/u7hZlxufzISsrC06nE9HR0Vi1apUICKvVihtvvFE4PtetW4e+vj4xhpxOp5BK5+bmhhjEFosF3/3ud8+bTwBCvHTqcT6XMSoBQLy7gUAAnZ2d6OvrQ1tbm3jb6RGvqamRJHoqAsYX8wri4uIwfPhwTJo0CQsXLpQcOGMF1HDeNSNMUl07FotFoL9UNI4dOwa3242JEyeisrJSFFS32x0C3zGZghUTjx07Bl3XsXPnTlgsFqHT0LRBeiAAsgFardaQdcPGQkp//vOfJcdk3759mDdvnsCtGP1mWfjZs2eLR5aFFBwOh0RUWlpa4HK5kJmZKeuS652OIbXRa2+z2TBixAihhKCBcerUKQQCASFR5zNxw/P5fJLHSIqkm266SYyx6667TuTM19k43ygXjTlCPp8PlZWV2L59O/7617+K86y7uxtutzskQsWIG+chP+PzU3FVFWdG34DBPP1AIIDGxkbs2LEDVVVVcq4zZ87I2g0EAlLYiA4RziMawQMDAxJxI6SZY0a4JqNqVqsVs2bNwgUXXCBVmltaWrBhwwa8/fbbOH78OC6//HLY7XZMmzYNLS0tUoW8vb0ddXV1Uj2U84qylYoQEPTI0+ipqqqCxWKRuUbFTjUKOQYWiwUdHR144oknsHz5cqxcuRLHjx/HLbfcgttvvx1XX301JkyYALM5yENsNpslYmw0nI37EY00v98vxXh8viBlCGG8KuRSdTL09/eHICJUlA3/qgYpnSJqhJXKr91ux7Bhw7B48WL8+te/xnPPPYdp06bJvOJ8oQOIMEcqwkbDUFXOjX2qyj91v1ONUz6n0QGgrp2huKzVfU/dD9VmXAt0wDK/1+PxYO3atRG5rDVNw6lTp+Q6Ri5rAEJJo2mRuawdDkdELms6Gfz+yFzWal9SPqpc1sOHDx+Sy3rp0qWYN2/ekFzW3L8oM0ymUC5rXQ/mU0fisk5JSUFTU9OQXNbUDSNxWdMY/bJc1kbn99mzZ8XRohqvQGiRTuM5NC2I0jGZTKirq4PL5ZL9T3UCcn14vV7ExsYKvzUhyhaLReRvTk4O/H4/5s2bh6ioKCxbtgxVVVXw+/1wOp1oaGiQ6CllaiAQEEORTa1jYrPZhAKJUWC1ngEdL+wTymmHwyFw7sLCQpw5cwbjxo0TKDIQrOjPebJ3715ZQ1zTfK729vaQ95qmYeHChaLLWK1WrFq1CklJSRJVTkpKQnx8vETuAUj1cKvVir6+Prz33nuCfKCcIJsEANx9993ibPnggw+EyrC5uRmrVq3ChAkTJE2KKXdMo1BlH+eEUf9kUwNDRqTj193+6Q1UdfNUFXt1M+YxEgXn5+dLNUUKHlWZ4kZIzztw/sbGv+pmqHpigVCI58DAAC655BJcfPHFMJvNWLduHTo6OrBlyxbYbDbU1tYiLi4O69evx8iRI9HU1IT09HTMnz8f1113HdLS0rBx40Y0NDQgNzcXe/bsQU5ODjIyMlBTUyOQC3pkqSQZ4Sx/qxmNLXrWsrKy8Pjjj2PZsmUhyo36Pf5ejYoYz2syDeY4AoNeTnXDUDedxMREdHZ2IiYmBmlpaWhra4Pf78fBgwcxduxYAMHEfqfTiQULFohBWlFRAQD4/+y9eXRUVb49vm9VZagklXkgISMhEIYgYxgEbQcUtHEAwcYRAVtU9Ots05Pd+rTbfuu91bZLu9HGVhQQgSc0iAoKIoYhTCGQAAFCyDzPqbnq/v6o3h9OFUn6deuz33q/fNbKAkJy695zzj3nM+zP3jNnzoTX60VXVxc+/PBDhISE4OGHH5ZntNlssvFyzgLXjhpIBW4QfCZ1HLj567qO8+fPiw5eRkaG9Ma2tbXh5MmTqKiokJ5I1dTPa2lpQVFREZqamtDc3IzU1FSpTpSUlODYsWN+65AW2E8deF21ykqYIIOsDz/8EOvWrRPtU0LC6ADzs1wuFzo6OjB06FCcO3cOuq5j8uTJKCsrE9grN9ZBgwbJ2DJAVJv829vb4fX6CBDmzp0LXffpdFosFnR3d6O6uhppaWlITEzE1VdfjaqqKowbNw4lJSU4dOgQ0tLSYLPZEBUV5ReUv/vuuwLX5fedTif27t0Lh8PhV+VhgLNz50643W5ZY6wQ0RFbs2YNDAYDVq1aJZ/D5yB0PSoqCh6PB9OmTfM7hH/zm9/g+zZWFgF/4rbw8HBEREQgKSkJkZGRUlW/ePEimpub5X3wer2or6/H7t27JYnncDj8+k9ZoVEdDeBSwobOu/qlwpa4Tq1WK6qqqoTAJy0tDW63WwiJEhISYLfbkZWVJet4//79EmAx6XThwgVUVFSgsLAQlZWV4oSyD6ijo0PWclRUFMaNG4exY8fCYPDBXZOSklBdXS1katXV1ViwYIEwbre1tSEmJkZgz1VVVejp6REUCAMfr9cLq9UqQXFMTIy8Z16vF8eOHZMxCqzs0ThnBoMBBQUFWL58ORYvXoyXX34ZjY2NuO+++7B06VL8+te/xsSJE6FpmsBseS11b+fYU3+R52BGRoY4eWoAxb2BlRKS+KlyCipaiWchP5MMoh6Px6/vMygoCBMnTsTChQsxZcoUtLa2or29Xfq7gEvBFNeIy+VCamqqJFU4PuqYqePI4FmtoqpwX/X6gf6D+u/eErmadrmWNfkCektmqgnEQJQVf47vYX9a1tx3OSdMMlDL2mj0Ebr1p2Wtaf4SRYFa1irLeW9a1mog1ZeWNatjfWlZMznfn5b1vffeK5/bl5Y1rTcta6J11J8N1LIG0K+WNcmevq2WtbquQkNDYbFYUFdXh3379mH//v1+awXwR2eoxj2TMHGHw4GSkhIcPHjQLzFDH9fhcPhVUFVdUc4vWetLS0tx9dVXo7y8XHzYzs5OvPLKK+KvMfkEXGJ15/6r+n8k52KgqOu6X5tXVlbWZQWTrKwslJWViR+ekJCA4OBgnDhxQkjAuKbYRqFqJFOtQdN8aBvC09va2mC32/Hpp5/iBz/4gfg8X3zxBUaNGoWwsDCcOnUKMTExePzxx5GRkQGbzSY+hbqHEs5rMpnw9ddfy37PMXe73YiOjhYuD8CHVCM/TFxcnLTq5eXlyVnJpAX9EErWqO0wvfmgges80L/7V5np7//I/22bM2fOP/Tzgeyb36epcMJFASyeFKbuz2666aZev798+fJvdV//qH3f8MT+7PTp037/Pnv2rN+/+xqz2267rd/rqo4OD2I6OerBx58LrE7QQSSjoNfrRWpqKnbt2oXu7m5cc801SEhIgMPhQGlpKYYNGwbgUr8fnRmj0dfTEB8fj66uLrS3t+PixYuYMmWK9C+puqmBmW1eR0268J6ZVaSzFhoa6udAMpP3pz/9CXfddReysrJw6tQpCVDUz5gyZQrOnj0runeff/65ELeQQTAoKEgy5E6nU+QJeC0eogzuWPHasGEDoqKisH79ejz66KPYs2cPJk2ahG+++QaPP/44li1bhq+//hphYWFYt24d7r//fixbtgzvvPOO9GIFBQVh2bJl+O1vfyt9U7t378bEiROxfPlyvPrqq3IAdHd3Y9++fbj66quxYMECvPTSS4iKivKDNHHOOYckYmAQVlxcjIkTJ0rw99Zbb+Hhhx+Gx+PB4cOHxdn8voxJARXCCFyqooeFhUlgx4CCuqWsnLe3t0uVB7hUiWZQqga+BoMB4eHh6OrqEocl0Mni+8M1QoeLFbfKykpkZ2djypQpmDZtGjZs2ABd1zFt2jTU1NQgPz8fRUVFMBgM2LNnjyQGqB0ZGRkpyS/2V5IVMigoSJJZ/Dtwqd9x6tSpMBh8xG6nT5+G1+tFYWGhkMrceOONmD9/PoqLi9HQ0IDm5mbp7aY2X2JiorA/dnZ2SsLGYrFIIstmsyE+Pl72FGoOBgY43BPUoMBkMqG9vR2bNm3C5s2b4XQ6MXToUDz88MN45JFH4PV6UVZWhh07duDo0aOCLNB1Hyyda7egoAAhISHIz8+X8VAr5EajUYIDrhvCUXt6eoSJXTXuQ4T/8lpMmvHvaWlpUvElggOAsIgHrhfuq1FRUVJFJ7yU90VTA2fV4Vf3WP4c71n9fm9a1oGJWa5pyoKQpZitNUSLqBVemho4q5/Be7BYLLBYLGhraxNSpoSEBCGjIVSQ17Lb7Rg6dCguXLgghGFHjhxBfn7+ZVrWfMbc3FwcOXIEbrdPy5o9nLzvrKwsVFRUSLWMlX81YcHEVHBw8GVa1nPmzEFaWppoWa9cuVLIxxISElBZWSkoHmpZ79ixAwaDQSCXWVlZ8Hp717J+6aWX/AKAjRs3YsWKFVi2bBlee+016fdjciQhIQHNzc147733cOedd/a6JlUt6/b2dmHSnzBhAgCfrzZu3Di89NJLl637/65xLVy8eBEulwsWiwVBQUE4cOAALBYLRo4cedmaUfcA/sl9lYy05FjZtWsXxowZI+vDZrMhNjZW2glIdMX3MioqSnpiBw0ahF27diE+Ph4NDQ3IyMjAli1bJMBkoj0wacNkJVFn6l4VERGB8vJyJCQkICMjQyr2lDnjnqxpGrKzs7F//364XC6Ul5cjMzNTzqVhw4ahrKwMmnZJWQDwvfddXV0IDg7GFVdcgY0bN8JsNsPhcAgxHoPdw4cP4/rrr8fMmTOxfft2FBQUYPr06ZgzZw4cDge+/PJLhIWFYenSpXjxxRfR2toKs9ksKBM+t9VqxcaNG8Vf4jx1dHTgtddew1NPPYXly5eLMgAAVFZWypmUlZWF2tpaOBwOJCQkCFqOewEDdk3TBALMeQ409XzlXPxvqKb+/76COmAD9l1aoE4pcDkBV6BjQVMDVG7+1M8bMmQILly4gMGDB2PYsGFobW1FUVERiouLMX36dGF76w3G0dPTI8721Vdfja6uLsTGxoo4dFxcHCwWC2w2m9CiBx5uajWEf2fAQviNWoVghZOEJh9++CFOnz4Nl8sllVJed/jw4aK3Sue2ra0NAKR3hwFMXV2djJ+a2ef1Ah11AJgxYwZsNps4+Xfeeado2plMJlx11VW4cOEC9uzZIz098fHxsNvtAkOvq6sTWC6NB2tkZCQyMzP9nt1oNMJut8vPq/fIjDHZih999FG5HueeXxy/qKgo3H777SKb9OSTT/73F+V3YCSFUiHJgD8igAkLq9WKBx54ADfeeKNf38ygQYOQkpIiDiidHWar6fircCNd14VRkzBt9kVyLbrdbmENZ8D7wAMP4LHHHhOHhw44K1BEYAC+9/S1116T3k4SnzU1NeHs2bO4ePGiMDoTZh8aGirBUHt7ux8ZEJEnhKRNnjwZOTk5CA8PR2hoKJKSknDs2DFs3rwZ5eXlMBgMuOeee/CDH/wAY8aMQVtbG5KTkwH4+rnq6upgtVoFpUPnhnC8lJQUREZG4oUXXsCSJUsEgcHnVedJrWxyHtXg6cKFC1ixYoWQ3l28eBEPPvgg/vznP2PlypV4+umnkZSUhLi4OBw+fBgXLlxAbGwswsPDhTCHciqcTwYmhOAzmcBqsRrIqugiQnn5bwBCuka4aENDA+rq6nDw4EF0d3fj4sWL0r9IOKpaUXI4HNIe4vF4pJIXuK75eazM9FZ1CKyUquOrBnGBWtaBQbDB4K9lHRERITqvvWlZ8/NVp1JNbgH+FbPetKzV6hivp2pZ67qvxaA/LWtNuwQNBi7XsgZ8TOy8H84/1wU/W/08QvKJTCgvL0dSUlKfWtYMIPvTsmYAx3EyGv21rDmWXKdMhhKBxWv1p2XN7/WlZX369Gnp9fw2Wtbt7e2yN6prj9I2ZFT+5ptvJLmnIk44F+qapXFfY2BWVFSEjIwMuFwuQTyQjFBFQQGQZKTH48G5c+fQ09OD2tpaIfu7ePGitE3ExMRICw7gg1BzDjhPKmdAZGQksrKyBJKflZWF+vp6+X2DwYDq6mr5d0JCApxOJ9LS0nDixAlJmAYFBeH06dN+iSW+X3a7Hdu3b4eu64KWYUIuOztbUAV1dXWiV3r99dfj/vvvx7hx4xAWFiZs5JMmTZLWrCNHjkjik2Ou7ov0twi/55lHcj61DUhFgKxevRq6ruPFF19EcXExvvzyS5SXl0tiXtd1eS/5/GQUt9vt8nlqK2JgdTVwz/lX2ECAOmAD9h1aYFAaGJyqQahqqhOiyhqkpaUhLCwMV155JRITE9HZ2YmOjg5x/Jm1pOMQeC232y0scXTGVO1BEpakpKQI9Or48eMSIHLTUg9hldCnt0BW/Xt3d7dfZSE4OFhgYjxohw8fLrCwjo4Ogd3Q0c/Ly5PrUWKEmyufhf/PzfzAgQMAfAcdgytWd5ltHj16NHRdF1ZdBjKE+vAAd7lc2LlzpwQcNJvNJr0rakWd9/H5559D0zQh0KJjDPiC1F27dgEA5s6dC5vNdtkaYY8Mx2nhwoWIiIiQSsD3aT09PeKc0NSDk/dI8gYGPoT9Pfjggxg/fjzOnz9/mXOtzh/1K0kqpJKOcX1QrsdkMqGyshI1NTWSoElMTBTmcbXvn/fC8WX/FRNKeXl5Ao3k2ktLS0NSUpL0P7rdbiQmJsLj8eDUqVNobW0VSDkra/ysQMi9xWJBRESEXzAeExODqKgomEwmrFu3Dh0dHYiPj8f8+fMFTskMfkJCAoqLi2WN1NXVyfvBHiiXy4WZM2fi1KlTGDRokKxDq9Xql3QCLifNUP+uVrI3bdqERx99FD/72c9QUVGB3Nxc/PrXv8arr76KO+64A2FhYYiNjZUgiL173D/Uint4eDjsdjtCQkJEIoOmoj7UCiV/V3X6VVgtKxIk/6utrUVnZyeqq6t75X3o6uoSx48Vd65dPjvXc29Ve9W55b/V79HJVNd0oJa1qiGtJtaYAGLl9X9Sy5r91WpyQNWy7u7uFjQI5yJQy5r30peWtaZd0hsHLteyZpBDp5zjoWpZHzlyBBaLpU8t65iYGNTX1/erZQ2gXy1rzjfPlEAta84HK1//jJZ1R0cHtm3bJp/1z2pZq3sMicGYYAJ8vgMRPyUlJUJIx3lW+1TVtR14fhO5wTXJoIzJFfaYq4mKgwcPSuKQSagHH3xQ2iY4p2azGa2trXLtuLg4uUf6KkziAr4ANiEhQfZqogJoTCCr50pwcDCGDBkiv0N2aM43x4CcLF6vV9B0r7/+Ojo6OmTPLC0tFQLNu+66CxkZGZIoAIDU1FRh3OX9Um5u1KhRsk9ZLBZcddVVcn8qgz0Dcibl3G636LNec801fmugqqoKp0+fliCX511FRQV27dolfbYxMTEoLS3FHXfcIYgZnmXBwcHo6ury4zvh/P9vgPbSBgLUARuw79ACX24VqgL494QEOjM0OtLMwBUUFMDlciE6Ohp2ux0RERE4cuQIZsyYgWuvvdbvOgaDASUlJRIscONW+6Bmz56NWbNmYcaMGZg3b55UMJ955hn8/Oc/x/Dhw6FpPkkYZmHVe+YhoDqyPHgDD75Ap7OrqwuVlZV488030dnZCV3XkZOTg3379sFms2HPnj0wmUxCMOTxeJCVlSUHs9pX5vV6pYrFg5J9VU1NTX7ZRIfDAbvdjm+++UakCwYNGoTPP/8cXq8Xubm5GDduHFpaWvDVV19B0zSBd2uaJllV6uQR5vvVV18BAJKSksSJ9HqIL7prAAAgAElEQVS9IioP+CRsmE3n+FRXV8uh4XA48Nprr4kGG+eK1bo333xTKiSU4qED9H1ZaGgoGhsbhcWQ46KuY1ZkzGaz6LTW1NSIs5KRkSHEI6y4BxJ51NXVoaqqSgKw7u5udHV1ieY0r094emRkpFR2AJ+zq0pIqNdWExtM8LASfebMGURFRUkWnNW18PBwxMXFSdB6/vx5lJSUwGazoaGhAbquo66uTmCRRBOo9wD4khNhYWGXMUDynezu7sbnn3+OtWvXYtu2bUhISMCYMWMwe/ZsTJkyBc3NzUhPT0d3d7ckVOjosGKvaT6IXl5eHhYtWoQ77rgDbW1tUhVqbGwUCBgr9IFjpFb+iB5gVevFF18USbEvvvgCzz33HD7++GNs374d69evx/XXXy/3tWPHDr9+be4TZrNZpH1cLhfi4uLEEextzjg+avIpMNCurq4WGB0J2xobG6V6o/bUUfe6s7MTmqYJ1JT7Cdn41QCUa7uv4FStfPBnVOgi751a1k1NTf1qWU+ePBnXXnttv1rWXEcqvFQ1/lvdl73eS1rWTKr0pWVNWLq6tnrTsuaz9qZlzXetLy1rjhmfoTct69DQUGzYsEESrIFa1moSsS8tawD9almrqKfetKzJ4Ar0rWXNM7EvLWsmwzjf/6yWNYNltcKonu/sBWbV/uTJkzh48KCsQXVd8P1U93C+KzyvwsLCYLVa0dPTgy+//FLgpSpRIaGvHFMmgDo6OlBbW4vk5GS88847frq4TU1N8Hq96OzsRHp6uqBXKGfEvRWA9JlyDzaZTFLN5jpU5cG8Xi9ycnLkXo1GI0aPHi0BIZPMHo9HEh+scv785z+XNg61r3/UqFEYM2YMTp06hfHjx2Pjxo1+8P8PPvhA5kjXdRw5cgTBwcFYsWKFIH7Cw8Nxyy23IDc3V+C3ajJD3TMqKyvxl7/8BSaTSeT7uF9Tj5htWYsWLZJ3iCzAra2tSEhIQEREBEaMGIH8/Hxk/g3qzMQmtatbWlpEx/h/mw0EqAM2YN+hqYdGb1ltwqNUBlL151SYW1BQEB577DEEBwfj+eefx9GjR5GUlISMjAxMnTpVtLzolOu6DpvNhuHDh+Pw4cM4fvw4WlpapJqoZooBICIiAna7XRr2mQldvnw5nn32WTz33HNwOByoqqqSDZIOBU3NvKrPoTrAdERMJhOio6OFaGbNmjV48803cfLkSTQ0NCAzM1OqRDNmzMD+/ftRVVWF4uJiP/kROgSsOqpBjs1mk8NGlQgIDg4W2niOeVxcHPbv34+QkBA8/fTTGDt2LCZMmIBdu3bB6/Vi6NChMq5MGMTGxvplHemkeTwe6QNWCV8aGxvhdDrx8MMPy0HInlpd1/H+++/D4/EgOzsbM2fOxA033IDZs2fjhhtuwMiRI9HY2Cg9WwaDAb/97W/lwPs+jaQzTU1NaGtr83N8uR5UaK7T6cSBAwf8oFwcGxoTMSTcUIMQda1xDnRdFyZPtW8VgPQf8Xq9mXrPrGaTGI59TgzKuG67urokIAZ8iAX2OXNuGbSvXbsWGzZsgNFolDFSK2UGg0EYtwMrc7GxscjOzobFYoHRaBSY6vr166FpGubPn4/bbrtNtD0JAdY0X18sA8+VK1fihhtukIArPj4eTz31FK6//nqMGDFCepDIpBs4Pr0R79BIygQAW7ZswZIlS7BkyRL87Gc/Q2lpKX76059i5cqVWLlyJV5++WW/irXX6xX9SU3zwVwZPPf2eXSW1fWiBouE+PJnzp8/j8LCQhQWFuLo0aM4ffq0PJ+K8GCA2t7eDqPRKFVEBjuBwTI/J3DtBibr+qqgq0SH1LJmb15fWtYqTLw3LevS0lK/IFmF5qnrra8zyOv1IiYmBgaDoU8ta+pV/z0tawB9allzbtWKuMrcTOMz96VlTS3M3rSsSaID9K1lvXr16n61rFlxVZEgqpY13wtWQvkeq1rWXLN9aVnruk+r+NtqWavoJ/Zdss2G7OJM1l133XW44YYb8IO/EfqoyBZ1HQcmMdR1zoSB2WwWlNann36K1tZWOeMAIDIyEjNnzhTpmaioKCxbtgwLFy6EzWZDSkoKNmzYINflODY1NSEhIUGkZhISEqBpmhATGQwGP7IsJgFVDd/k5GRJpKhr8uLFiwAuwdcBH9SYlUmTySRSYXwOViP5nrC/3WKxoLq6GseOHQNwCd2U+bfWHvW5jEYjJk6ciK1bt0o1VB2rRYsW+a1/VjyZ3DMYfNwFRqNPWlLXfS1HRP243W60tLTgj3/8o5A68Z3i3BLynJKSAqfTieuvv16q1YMHDxZYtclkEmkuPj/Xfm97x/dtAwHqgA3Yd2yBFQD1EFCz9Wqgqv6dvxcRESGH3ZQpU1BYWIjc3FxUVFQIJFbXdRE0NxgMqK2tRXd3N6KiojB27FhEREQgKytLghw6Puxb2blzJ1paWtDc3IxDhw5B0zRs2LABDQ0NiImJwU9/+lP87ne/Q0xMDFpaWuRee3tmfgUGsCopAwDRlnU6nXA4HNi5cyfCw8MxdOhQyfCZTCYUFxfj3//93zFy5Ei/yrOqxQf4w1OYdQd8TnRqaiqMRh9RFGFD1A9TCSGCg4Nhs9kkIAkKChICBo/Hpz25evVquN1uDB48WAL27u5uvP/++9A0DUuXLkVMTAxsNhs6OjpQU1OD9957T+A2x48fR3NzM2666SbcdNNNcuhx3lNTU3HFFVcgMzMTKSkpsFqtCA0NhdfrxV/+8hc5gMiS+n2a2WyG1WqF2WxGS0uLsCHSgQtkueRBSueMjqrq8DMwVQlTGJCyX409XCr0kmuJa4IU+2o/TaCpCAM6Hi6XC1u2bJHvs6LGf4eGhiIuLg66fqlfLigoSGQOmH02m82is0vIVXFxMT7//HM/RkpKY3g8HrS2tkr2nhW0CRMm4NZbb8WECRMkgI6IiMD27dvxwQcfYOvWrRg6dCgMBoM4qgaDT+pqy5YtCA4OxsGDB3H77bfj/Pnz2Lp1qzhGycnJGDVqFKqrq/H4448jNTVVxoxj3dt7rVYf1QoFnXdN01BVVYU//elPuP/++7F48WL84Q9/wLBhw3DrrbfiRz/6ERYvXoxrr70WCQkJklTjswH+1T5eXyXGUhNdXCNEMHD8mJyrrKxEfX09qqqqsGnTJpl7OrsMignR4/X4PqkVP94Tx1ldVyrUVWW7VytQRmPvWta1tbX9alkHfplM/lrWCxYsQFlZWb9a1mqFrDfjM/WlZU323v60rDkffWlZ01nvS8s68FzQtMu1rL1eL0pLS/vUsg4NDcW4ceP61bKura2FydS3lvVtt93mt/YDtax1XRfkByGsgVrW6rttNF6uZc3f/7Za1up9MqHMvloVkUGZlaamJnz99dc4cOAA9u3bh4MHD4reLddeYOJF9V/U5G9kZCQiIyPhdDpRVlYmVW++y6yAsxr6ySefwGj0seq2trZKkMRzMyIiAvX19QgLC5Oe0tDQULjdbpFwYcLx4sWLIqHC4JLPPGzYMEnaMrkTEhIieveUduN5H6hty88hGZa6p/D9bWhoQENDgyhchIeHY9u2bUJYN23aNHg8HlxxxRXQNJ/iwNGjRyWpCvj2/40bN8LhcPjtt9wfVL6KxMRE2Gw2qeabzWYMHTpUmKsrKytht9tRUFAAp9MpagHs6bdarWhvb5fkktfrxaeffipav4AP7jx48GAMGjRI9jAilAjd/1fbQIA6YAP2HZq6uQc6D4GOs1ppVQ8eOmfBwcEICQlBYmIiTCYTmpubERQUhF27duHKK69EU1MTzpw5gy+//BIGgwEnT54UlsEDBw6gpaVF4EiBWWvAR4tPzdv4+Hjk5OSgubkZCxcuFNp1VmgWLFiARYsW4fDhwxKcBDq0vD4dF/UQ5DNxc2SvFYMUp9OJffv2CTHBM888g56eHvzhD38QfV4GD9yEDQaDsPkGVlEJDeZBQ9iRzWZDTU0NwsLCsHbtWsTGxuKrr74S/VGXy4URI0ZI5pKV2ubmZiHDefTRR3Hq1CmcOHECR48eFa1Uj8eDcePGoaOjA93d3aivr/fTa5w/fz7CwsIkoZCdnQ2j0YgjR45A13Xk5uaKw9zW1obc3FxERkaip6cHTU1NwsS5YsUKDBo06Fuv1X/EGHzpuo/c5uLFi+ju7haHjXOt9qVyHajwSPYd0RkFLjFPq9Vw9WDn9VWZGa4vVR+TjpoacAL+xEBcD2FhYTh+/Lhcn5Ap9kyrwa5KbMHeOiIPCLm32+3o7OyE2+1GV1eXVGE+/fRTbN26FaWlpeKEeDw+ySI6416v1y9YSUpKEnIZs9mMpKQkREdHIzQ0FEVFRTKOZHImrP+Xv/ylOG1vvPEGampqoOs6NmzYIL1XOTk5iI+PR0VFBeLi4nD27FmBfQZWOQMrLYE9nWqVm4kFk8mEgwcP4qGHHsLSpUvx7LPPory8HDk5ObjnnnuwfPlyvPTSS7juuusQEREhARXREGoFR63Ic21xHahBs1qFNRgMqKmpQX19vRDhqHssK2xNTU2ynlhBVYM7tRqhaln/vT2cv9OflvX58+e/lZb1yJEjMWvWrH61rAMDjcBKdOC8BmpZU8Pz72lZE97Zm5Y1Iax9aVmT80C9TyZLqGVtMplgsVj61LI+d+4csrOz+9Wy1nW9Xy1rnie0QC3rmJgYmQcG2V6vv5a1mqzjWcJ9RtN8vZV8T76NlrUaQFFWTd0/OX5dXV3461//ioMHD8JqtaKzs1MCsPr6enz99dcyBmoSvS9TE+vR0dHCoL9nzx5s374dBoMBra2tCA0NRVdXF0aOHAmr1Yrm5mZcc801sFqtSE9PF64Au92O5ORkqSIz+c3WCjU4YsCqaRoqKioAQDgKDAYfE77RaJSf4T7S09ODxMREqXomJSVh/vz5uPnmm0XjlwlFQrC5Hr1er2jvGgwGHD16FJMnT8aMGTPQ09OD6dOn48iRI7KeKioqsG7dOuGHCA0NxZAhQxASEoIXXnhB1teJEydgMBikqg343seYmBikp6eLz0KiJzUZaDKZkJKSIv6S0+nEZ599Bq/Xi5dfflnWOYnBjh49Ck3T8Pvf/x6apmHt2rWoqKjApk2bsHv3btTW1srZyusxic8z719tAwHqgA3Yd2iqQ65W/YDL+1EDf48HhXoIJSQkID8/H/PmzcPChQvx2GOPYdasWfjqq68QFBSE3NxcJCUlweVywWaz4d1338VXX30Fp9OJ8+fPo6amBrt27RIYlK7rQsI0adIkNDU1YcqUKRgxYgQ+++wzYWDkxqs6+WlpaTAajSgrK8OhQ4cuk3EIrLTQVMeOmTr1Z+is2e12VFVV4fXXX8fIkSMREhKCgoICeDwejB8/3s9hVQNUwlO4kRNW2tPTg0OHDkkWNDs7GzU1Nfjoo4/gdDpRU1OD3bt3w+l0IjQ0FPfffz92796NlStXCuHG4MGDUVNTg9bWVump4QHB4NPtduPdd9+Frvv6LNU57urqwsaNGwEAd911F8LDw3H8+HGUlZXhyJEjomVMR9/hcGDPnj2oqKjAuXPncP78eam0bdu2TcaLkiTfl5H1jxns9PR00VlUHRw1wGDAGBoair1790rlW10LwKX3QoVp0eFWAwOVSEJ9lwKrMGpwoRp/h/N9/vx5P8fLYrGIgLyu68K6yzXLvh06acHBweIUkSnUbreLs8xKsK7rqKiokDXDcaHT63a7ZRzVQIcySTNmzMCtt94qTMB8dwiTY1DK6lVhYSGGDh0qzioh+nTmXC4Xxo0bh/z8fKSkpODZZ59FZGQk4uLiBOpKDb3A6grHWP0zsOKnfq+trQ2vvPIKHnjgATz44IN45ZVX0NnZiR/96EfYtGkTXn31VSxZsgSjR48G4C8FozLYUtJHhcMG9o9GREQIFF1NWKkV0J6eHqSnp6OxsVGSLuqaVJ12de9Tq7mBFph8ZNWHYxCoZZ2Tk/N3tazVsdQ0fy3r4uJigZb+s1rWavKURhIVjvexY8f+rpY1gD61rC9cuADAX35H1bJmhYpjDlyuZa2+m71pWX/00Ufo6enpV8va5XL1q2Xt9XqxbNkyGT+TyV/L+sc//rFUw7n2vF5/LWs18cqxUrWs4+Pj8eCDD8r4fxstazWxwnYF3ndgMlDdBwkr7u7uRkhICE6cOIHjx4/7/U6gqXup+m57PB5RAnC73di2bRs6OjoA+PalyZMnw2g04t/+7d8kCU5CIsCHnklISJC5b29vlzXK9gUmH5mUSkxMRHl5uRAgsY2Ca4TVeVp4eDjGjh0r5+fIkSPR3NwMs9mM2bNnS/WU48kkFd8JSisZDAZ0dXUhOTkZgwcPRkFBgUh7UYv2wIEDIqfE92jixInYsmULTCYTJk+eLPelaRpuuOEGgbvz3Fu0aBEeeOABv8Suw+FAUlKSBNwABIV0/PhxBAUFobu7W6TIgEuIkeDgYBw+fFi02tV3PSUlBc888wwWLVqEwYMHizZtR0cHSktLMWfOnIEAdcAG7P+aEe6jVhNVIiFWA/j/Xq+/zh7/j4eN0WjE008/DQC4cOECnn32WdTX1wuct7S0FHl5edi7dy9Gjx6N//f//h86OjqQnp6OiIgITJo0CSkpKaiurkZhYSHOnz+Pffv24eOPP8bq1asxZMgQHDp0CHV1dcjLy5P7i42NvUwiJTQ0VIKj1NRUv6qWGhj05rAGkkexF1c9UMxmM9LS0sSx1nUdR48eFWkIFYqnOvIMBgjdYZ+Zx+NBeXm5HMIktmlubgbg61Pp6enByZMnUVRUhOjoaIENUg7hkUceQVdXlwiZf/DBB9B1HT/84Q8BQDKV1AAdMWKEiHMT6nn27FlYrVYhyTh27BgKCwulYms0GrFy5Uq43W5ceeWVcujpuo6UlBQkJyfLwces5/z587+rJfvfsvDwcKkSU5fXarXi4sWLIs0D+K9rGrPUO3bskDlTg07gkhQHAL9gQU3uqIGRCmlUA9TAQEE1Htq6rmPnzp1StQJ80LLhw4djyJAhQqJBGK3X60VNTY3o7EZGRgo8ktp+ixcvFvp+OrIOhwP19fVwOp2SQCIpEB0h3j+rdOq9Z2Zm+jlp2dnZmDp1quwLiYmJsn6Y+dY0H9vq0qVLxSkxmXw6tZs3b8aJEyfg8XgwZcoUgVAbDAZ5981mM44fP4709HRJGvX09KCnp0fmqrcqdaCp7zUdWk3TcO7cOTz55JO47777sHz5cuzduxdpaWmYN28ennnmGfzkJz/BPffcg8jISKnIMrAn/I8QOa4bBpQM/MPCwmCxWIRQjfdgNPrIqDIyMtDQ0CBjpjq5gfswn7e3ijr/T12jKiSakLlALetDhw6htLQUKSkpuPLKK2GxWKTixntR+1CDgoKQlpYmvZ47d+7E2bNnMWXKFPzgBz/AVVddhQkTJvi9Q6qpAQv/zS9WztX3wGQyISIiQnoWqWXd2dkpFR7uZVyzmuZrQ+H9pqam+um4slpILWv1vlREBJNHLpcLUVFR0t6xYcMGtLW1Yf369XA6ndizZw+qq6sRFhaGkJAQLFu2DMeOHUNLSwvWrVsHl8slQafX6/XTsub3qGWtVqYACAR13759ACBICe4LmubrX2UfIpPIHAuy3hYXF0viZPTo0Zg9e7Z89j+rZU3/gmNnt9tl/nhuBSYlmJTjPsSEAvsc9+zZg8bGxssSjIHvQeC7ziCK0mc881jpdjqd2LFjBxb9ja24vb0dn332GUwmE2JjYxEfH+93n/xz8ODBl+2F0dHRyMnJQXd3t7QssJrKJF9FRYXck8FgEEgsWbQzMjJw6tQpeU5VYkh9Z9V3A4BofLvdbmzduhWjR4+WJN/hw4eRmpqKtrY2nD9/HgaDr1fb4/Hg5MmTOHr0qCTHOY7vvfcebDYb5syZI5/ldrtRWVnpV8XkWuJYkAAyJydHzs66ujq8+eabKCsrkx5x9Qwm+dGqVaugab72Le5DfO9uvfVWtLW1wePxIDk5GSaTCaNGjcLEiRP/4fX5XdtAgDpgA/YdGgNOWiBcTHV61D/pxAGXZEro/Pzyl7/Etm3b0NXVhYSEBHz55ZdoaWmRPhCLxYI5c+YIQ2R+fr5sqCQLCg4OxrZt21BcXIzOzk6MGTMGI0eOFIhicXExBg0aJNdkn4UaKK9evRrjx4/H0KFDkZ2dLSxxhKL0ZmpWWg0sVNkE/hzHKjg4WOBmbrcbK1euFMeahzEd70Anx+12IykpyS9rT4FsCtQ3NjYiJCQECxcuBOBzcr/55hs/Mp6qqiphoGVVVK2u3H333XLfhG12dnbC4XBIX2VYWBhiYmKgaRq2b98Oj8eDH//4x8jOzkZ2djbS09NFZ4+wUErsWK1WtLa2orKyEmfPnkV1dTW8Xi82b94MAH0SAf1PWVdXlwQFFL5PSkpCTEwMrFarENJwblSjw831HMgmCUDgdqqzqjpG/F1WstTgFPDvCVSrr4FOua7raG1tFSZEOuF5eXmIiIiQ/m3gEvS4vLxceoEDSXVWrFghDlNISAgyMzNFAoHBbGtrK1pbW8VxU00N1gPho6w2qf3GRqMRsbGxAoVjhp9rzuPxwGazYeTIkRLcOZ1OlJeXw+PxYO7cuTh06JBA5KKiomCz2TB+/HjZD9LS0nDbbbchLi4OkZGRoj3rdrulqqreU2+OK+eAc8x/q386nU68/fbb+NGPfoQXXngBbW1t0DQNKSkpWLZsGX71q18hNzdXWgoCZRmAS0kEMnOzwmcymfyCdq5Dj8eDQYMGiZwVcHn/psoWHEh8pDp/anCqrmUVTeB2u6UCOXbsWBQWFiI2NhbDhw+H0+lEdXU1Ll68iLCwMKnG80tdH9QCTkpKwjPPPIPc3FzZn6k3bTabERwcLND73uYmcD54zjApGChnYzab5T1Ys2aNzAUROfyMpKQkqSoRJUBSF03zaaPymqySqsGtekZyXydCR9M0jB8/XnpcAWDhwoVob2+XYGDq1Kk4evQompqaBDEQFxfn9zxERaiJU6PRiBMnTsDr9Zcs4z7Mexk5cqTfHJPQhs/y1FNPyfX4nFwzERERyMjIgK7rIlul6zoee+yxy+bm75mKHPF4PKJ1rJ6FTCpYrVbR4uY88L7UfTMoKAhlZWVCrKN+Rl/7MY3vCpMwlC7ZsmULUlJSZE/yer3YsmULbrzxRgQHBwtJDxNw6v7N/1OJEePi4hAdHS0kSUaj0Q91AkAqkrzHlJQUgfnzuQglDkT6cD8hUoPPzs9i242u6xg1ahSCgoIwZMgQ6SkOCgpCY2MjTp06hYiICHR1dWHv3r2y56tkWlVVVdA0DZMmTZJ793q9+OKLLwAAw4cP90tUqXsfUTxMgl64cAEul0uqqYGJXMCXGODc8hxzOp3YtGmTrIeRI0di3rx5ACDM8FlZWf/AyvyfsYEAdcAG7Ds0FWZDZ51QNeByeBxwyaHm75GJll8qXIlaiQsXLsSUKVNEj9FutyMyMhLR0dHYsWMHYmNjYTabBQK1e/duPP7446isrERSUhKioqIwbtw4uN1ubN68Ge3t7Thx4gQcDoef6DVwKbPJA56EBHRGSKDD4JbPF1hV5bUCD9TAqgU3fGYuu7u7sWnTJgnKdF2X/jpCbKOiotDW1oaMjAw0NzdLTxuvb7FYcNddd4mG3qFDh5CVlSXj3tLSgqCgIDz88MPQNA319fVYt24d3G43fvvb3wpsx2azyVjHxsYiOTkZaWlpqKurwzvvvAODwYDf/e53GDJkCAYPHoyEhATY7XacO3cOBoMBubm5mDZtGm6//XZERUWJA6hpmrAOrlixAq2trRg0aBDS0tLEQezu7ha9VtUx/j6MlZmMjAxJfDgcDpFhqa6ulsq0WnGi0dkPrJzz5zgWY8eORU5OjjjPfDcI01Zha2qAqjqdgcEvv0dn6MyZMxIQUieQlYkzZ86gqKhIkholJSWi8UlnZdSoUZJEoQPE4IeVQAaznZ2diIqKQnh4uFRx2K/qcDikEqXrPmkWBmm6rksfHceVxl5lvlv8fbPZjJ/85CdSFQsKCkJbWxvOnTuHd999V4hcvvjiC6lOTpgwAatXr0Z0dDS6urqQnp6OiRMnIj4+Hl1dXUhMTITZbBY4NO/JZrOJRqYarPY27ur+EfgnkQFVVVV4/vnncd9992HZsmXYu3cvQkJCMGfOHLz66qt48cUXsWzZMoE5x8XFSZWPexB75js7O2EymcTBVYN8h8OB5ORkdHR0iEyDmiTpDc3CvSywwhoY/KnOMcfXaDTKfOXl5YkuIkmampub4Xa7MXHiRMTFxfkFxFzTTIwxoULnmIkDm80Gs9ks0OX6+nocPXpUiPT4zvB8UZMHgYkGzpeK+vF4PEK2wqpfYWGh7ENGoxGTJ08WaRoG//n5+aisrITJZMKMGTP85p1nm8PhkKSPmnzUdR379u2TxAvllbgGCd+Mj4/Hli1bJMA8e/YsDAaDjA3nr6enB++//76gcTh3NptNfp9M6/wdOvEAcOedd0LXdSEvI0Li7bffhsFgkHng//E5zp07B6PRiIsXL0LXdbz++utISEj4p4nu1H0RgMgicX/sLZDkOiTnA6VojEYf2+wtt9yCuXPnIi8vT5KjwCUEC6uyNDXo51rhmiKbM/t6p06dKv7Ezp078fHHH2Pfvn1CWshED0mkNO2SviulZAwGA7KysgQtQhQRIcWAj2CQSWveW1xcHE6dOgWn04mKigo/yC4ZfDmmnHOPxyNoLcJvyS59+PBhXH311bhw4QIaGhrg9XrlXrKyshAUFIRt27bB4XBg8+bNKC8vx5kzZ1BSUoLo6GiMGDFCzkEGgWq11GazISQkBPfff7/f+0+2aKPRiBtvvBFer1f0s/lVUFAAXdeFdVh9x4uLi2EwGHDx4kV4vV7cd999OHToEC5evCgB67x581BWViYJeV33afX+q20gQB2wAfuOTe2HY6z2OQYAACAASURBVJZdPZx7yxyrwZoqoM7DXNd15OXliQg64KMSj4mJQUNDAwwGA3bv3o19+/YhPz8fw4YNQ3d3N9LT09HW1obp06ejuLgYDz30EK666irExMRg7969qKysRGhoqBxOgwYNkuyiClnjoR8bG4tp06Zh0aJFWLx4MR544AE89dRTyM/PR2hoKC5cuCDOfaCp46Aecuqzq1lgOgQWiwUulwvNzc1y+KsU/VarVTS9urq6kJubi/LycgC+KpSmaWhtbcXQoUORmZkJg8GATz75BD09PcIAy2rpvHnzkJycjNTUVKHxt1gsiI+PR2pqKlpbW/Huu++KAxYeHo7g4GBkZ2cLsQadUFZ1AJ8jWFdXB7fbjRtvvBGjR4/GpEmT5PONRiPWrVsnhwt7ixjQTZgwAbm5udB1XRh9v09zuVwYPnw4LBYLmpqahCiDTMehoaFobW1FfX29BB10boBLGWuOB6vuaoWTznBISAimTp3qF2iqbIuBP69+BnAJysh1S0dQJaxisMmKpqZpmDFjBvbs2YNnn31WKktkUNU0DYMHD0Z4eDgaGxvx0Ucf4dNPPxWIr9PpREJCggSLDKqfeOIJxMTEoLu7W6Q9NE0TPUXC4njfR44cwY4dO1BYWAiPx4OEhAQRZecXnWTgUoBBxADF4q1WK4YMGSJkYXQEvV6f5u9vfvMb6UlrbW0V6HZ0dLT0Uo0dOxYtLS1wuVzIzMzEyy+/jPDwcCQmJkrfuK77tI1VopnAfS+wF06dR64B9cvlcuGtt97CokWLcN999+Gtt95CT08PIiMjsWDBArzwwgt4/vnncdttt8Fisci6stlskthi0kr9rJ6eHoSGhoq2I9cikRrcc3pDAPS1n6mBn/pZ/WlZZ2dn/10t64sXL8r6603LOigoqF8t66uuugqRkZF9almr74/6f9x/AueKSUXOd2VlJd544w1xjHvTsh4zZgz2798va5DjrWpZO53OfrWsm5qa5PN707Im+ofrujcta66/vrSsGfT3pWXN9gwmaOn8q1rWmqb1q2V96tQpqVR/Gy1rJtKYbGKyQ+1F5RpiIKYmg7lfcB0lJyfDYPAxgVPPubi4GMeOHfN7D7hWAquOKqpAfV8YqPb09CAnJwcejwcNDQ1wOp0IDg5GfHy8tCrpui5KA+q+QOI4TdOQmpoqTL5nzpyR++GzcdzVwNntdqO1tRVRUVEoKiqCweCTrKH/NWnSJLnnsrIyec95TZ5ZhKTX19cjOjoan3zyCb744gupGhsMBsybN0/6QIOCgjB+/Hjk5uYiKioK69evh9vtxpgxYwThsmnTJui6jmuvvdbvPTt8+LDMLb+nEllNmjRJYOVJSUnQNA2nTp1CdHQ0dF0X2RyOAdeC3W7H2rVr4fV6cdddd8Fut8Nut2Pz5s0ICgqSM+jee++VavD/BhsIUAdswL5DU7OJKmSvtx45dTPtLVg1Go0oLy9HeXk5Dhw4gNjYWNTU1IizfsUVV8DjuaRZd8cdd2DUqFEYN24cYmJiMGLECMyZMwd5eXn45JNPUFVVhT/+8Y8oLCwU2ElOTg6io6PR09ODWbNmYfDgwUhJSZFsNU3TNMydOxc33XST9CHx4He5XLjuuuvwH//xH/jP//xPxMbGory8HF1dXX4BpwqlC4QkqQ54b2MaEREhkggApN+UmXjA5/gwmB09ejTMZjOioqIQFhYmDMivvPKK9EeFhIRgxYoV0n/04YcfSvY4KCgIXV1dqK+vh8vlQk5Ojjjd1FB8+OGHZe7YM7d//364XC4hm+I4DR06FB988IEc7LquY/To0eJIWCwWYS90uVwYO3YsIiMjYbfbMW7cOHHQDAafTuD3TWBgsVgEDpuZmQmbzYaenh50dHSgoaFB+hdbWlrEYeL8AJDeQR6YJHxQnWFmr5lBDwkJwbBhwyTjT0dBDXxUUz9PDRRphECFhIQgPDxcnFNWhAEfy+KkSZPw7LPPSvLA4XBg48aNqKurg91uxy9+8QscPHgQ+/fvR1NTEzwej5Dv8P2ltpyu65g9ezYefvhhgePFxcUhJiZGdFcbGhoEMkhHuLu7G7t370ZbW5uwmTJgUaHudKgZfHI8W1pasGTJEphMJmGsBYDTp08jLS0NVqsVX3zxBSIjI5Gfnw9N03DixAlERkZi2LBhMBgMuOaaa6TCS5hbfn4+wsLCkJiYKFBtoiq8Xh+RB7/fF4STc8h5UudKnTMmiA4dOoSHHnoIixYtwtKlS7F9+3Zomk8C5amnnsLPf/5zLF++HHfccQdMJhMqKyuxd+9erFq1Sj6LwQJJrtxut8BOOWcqiRe/FxhYq2uN76AagKvVQa65QC3rrVu39qtl7fV6ERcX9620rO+44w48//zzfWpZq9dQE4WBz6u+xwxSqWXd1dWFoqKiPrWsg4KChACqNy1rzgvfZd6HqmU9YcIEkXrqTcs6KSkJ06dP71fLWiUd7E3Luru7G+vWrYPH07uW9Z///Gfout6vlrWu6/1qWWuahr1798Jg+HZa1jxP1B5g1X9Q55XBKeCvz64mQrZv3478/HxMnjwZt9xyC5YtW4abb74ZoaGhOHnypEgK8bPV/VTdgwN9BX5FREQgKCgIHR0dmDBhAoxGI7744guBt5NxOC0tTWDjfD6iUzjmlZWVcLvdkpCPi4uTRABZ7evr68W34DNPnDhRiBPHjBkjKAAGeMClXnLC01VtbiYFeUYEBwdLX/60adOwfft2DB06FEajESdPnkRNTQ3uvvtulJSUYN68eTAYDCJbx3kpLi6G0WjEddddJ+Pmdrvx17/+Fbqu+3FMcJwtFgscDgfS09NlXgcNGiR9udxvOSbqXnrs2DE5d10uF6ZPn44DBw7g+PHjkiiYO3cujh8/DpvNhg8++KDPve/7tIEAdcAG7Ds2bpCqA6A6z4E/q/4MN2PAt9EcO3YMRUVFuPHGG1FeXi7Zc/a2ZGVlYdSoUQKVslgssFqtKCgoQHd3N1JSUlBYWIi0tDQUFRVh2rRposm5atUqFBYWYsmSJejs7BQHhoGd2n9K5zjwe+r/8d6eeOIJrFy5UmQvArOuKmxI7YnpL0jt6emRygidMF6XAbvRaBQILB0It9snTk1ijdTUVBgMPj3EhoYGXHnllfB6fZIBhNjOmTNHCFU+/PBDGAwGPPLII/KsPDwZTGqahmPHjqGjowM7duyA2+3GY489JnMeGhqKadOmSTUO8B3C4eHhGDJkiBwsbrcbq1atgsfjwT333IP6+nokJSUJFLS+vh4lJSXweDzCDPx9GYNS9r0Q8kctxujoaBiNRlgsFvT09AijoDrHrETSAeEz91ZpUythkyZNEpIJ9T1RLbC3Sj1c+T4yEEhOTobL5ZIePma+33rrLXzyySfSJ+j1erF48WJcc801uO222zB37lwAPs3I2tpanDhxAq2trQgJCUFzczMyMjJgtVqlT5RBHWFwhETHx8fDZrMhPT0dcXFxom1HWBt7mvmOBAUFYefOndi5c6fIaKjJGiYuHA6H9DHxPaF8jq7riIyMxCOPPCLPzn6skJAQbN68GadPn0ZYWBiysrKwbt06mM1mjB49Whw3p9OJq666CpWVlairq8PgwYPR0NCApUuXSt8Uq7s9PT1+Oq+ck97mR527wMBQrfBxrWzcuBEPPPAAFi9ejFdffVWg0zk5OXj66afx9ttvY8eOHdInzv20rq5OZDAYgLBSrsJagb61rNXqFO87sE9V/b3etKzvuOOOfrWsm5qaEBIS0qeWtVpJ7UvLevfu3aivr+9Ty7q3Sqm6/6rVMv4sk4uAjzQtIiICmqb1qWXN3+1Ly5pBJ9eAuka4T+7cuVNIeHrTso6MjMSaNWv61bKmDFlfWtbUp9W03rWsWfXrT8uawVRfWtYej0eqzZr27bSs1XOU60wN/mkcS7WaqrYfMchjhY4Bd35+Pp555hncfffdCAkJQXFxscjVBb7PKpRUXUe8T66FpKQkAL73OT09HXv37kVdXR1cLhdqa2sRGxuL2tpauT8m6dTkCNmHeQ85OTkSeLKtgW1GvMesrCykpaVJS0ZKSgpKSkqkes/zm3BfngdEQNBX4fOePn0ao0aNAgDs3bsXiYmJOHz4MKxWK1JSUqBpGtavX4+enh4sX74cqampMJvNOHPmDGJjY5GTkyOfs3btWklactzYSjR27Fi/RAM5KgwGA+69914J/jXNRyDV2NiIhIQEAPALYNWKtMfjweuvvw6PxyMJ0Y6ODrz99tuSqHW73Zg3bx40Tbus1etfYQMB6oAN2HdodHS42QCXU7VzE+4N/ssD32Dw6W/m5eVhwoQJGDlyJJKSknDy5EnpNfr0009ht9vl0Ozp6YHZbMakSZMwZ84cJCYmYuvWrejq6kJGRgaCgoIwYsQIREVFYdWqVcjNzRUnat68eX49Dd/GeHAZjUY88cQTeO6553D//ff7VU05ViohQWAGn2PFCiMAP71EZlwZ9PCg5hzYbDYkJiZKH+CePXtgtVolQNq0aRNsNhtuuukmcfBNJhNuueUWua7D4UBTU5NU8DTNB0FjhvGmm24CcClIJlxX13XYbDa43W6kpKQgKioKcXFxWLNmjTwryXno5IaGhsJutwshDR2yzs5OHDlyBKdOnUJ4eDjsdjvOnz//reboHzWuk+bmZkRFRUHTNOnDyc7OhtVqFU1A0tWTiIi/39nZKc+qJi0Af+gZTdN8WqmhoaGw2WyIiYmRNas6y/x7YJJDrWSxIsuD+J577vFzkLnujhw5gv/6r//Cr3/9a7jdbsydOxdPPfUUxo4di9WrV8NoNKKjo0OqbWT3bG1tRVxcnDhMZF5WBefT0tKQmpqKlpYWLFy4EOXl5UhISEBQUJCQZbHfvKGhAZ2dnbLm6AgWFRVhx44dfkyoZWVl8uzs2wOANWvWIDIyUgIfOvCapiEjI0PWKh0vVn6dTifOnj0Ll8uFd955B4AvOHz77belp95gMKChoQGJiYkYNGgQFixYgLi4OIwdOxZms1mgtAwM+N5xzlWnVg24AudQrQyoeyiTGWfOnMGTTz6JJUuWYOnSpXj//fdRVVUFh8OBn/3sZ/J7JpNJkAdskVADNDKv8vo0/l2tpqjWW1DN++OYB2pZx8TE9KtlHRwcjIKCgn61rPlZfWlZX3vttdKjq2mXa1mzVzOwSsLrq4FM4DOp7y7XDNeFqmX93HPPISgoqE8ta+6T/WlZ22w2CWZ707J+/fXXkZSU1K+W9dChQ/vVsj516hQA9Kllzb5tVrZ607JevXo1dL1vLeu6ujoYDIbvTMuaPcmcQ+51vVW91IqpGpyqCZXefi8zMxOPP/44nnzySbS3t6OoqAjNzc2X6bdyj1Wv0RtXAJEr7HNlZbGiogLBwcESEDH4Un0lrj8mKk0mEzIzM4Xbge9vTU2N35jk5ubi7NmzkvDzer1obGz0a1vi9XjPgb3pRIZ4vT72ZSKfqqur0dTUJGfLD3/4QxgMBkG83HDDDdKu8tFHH8HtdgszNPuTDQaD9DdzXjds2CDQd77zDFybmpqkdzcqKgoGg480iecO/S7+DvdVg8EgySsAaGtrQ0REBIqKiqDruvAczJ8/H6WlpXA6nVi7du0/vC6/a/vXC90M2ID9H7LvM+u0dOnSv/szZGYLNGqyfZ+2ePHi7/0ze7PA4G7WrFmX/UxJScll32MwqtrNN9/c5+fceuutfv/uay7YExVot99+e5/X/r6tra0NVqsVERERMJvNEjylpKSgu7tbqqsxMTHi3FEbkUEqYdB0PFiZowMNXCKqCSRAioqKksREbm6uX2UNuAQ/43XVxAcdX2bRVeZJkowwULVarYiKisLdd9+Njz/+GAcPHsSoUaOwYsUKFBQUYOfOndi1axdGjRolTlBiYiJOnTqFkJAQqSzGxMTA5XKhrq5OqqmE07MqmpqaKkRABoMBO3bskL40OrmEIVPGSHUsWQWMjY2VYI7P4nK5MGHCBLS2tqKtrQ233XYbtm7diuDgYGGzPnnyJIYNG4Zz585h1KhRqK+vx+bNmxEcHAyLxSJVyY6ODoSHhwu0/Yc//CE+/fRTgcKRPOz8+fOIiYlBbGws2tvbpTJCIg72c7GCqzpTvVlgxTwweUVTe4537dqF3bt3S38av09EhMHg61NXgzObzeYnY8V5VQMmtVrV1/2qDj+fS9N8WtaUIhk+fDh+9atf4cEHH8RXX32FcePGITc3Vxxsm82Gc+fOobW11U/LurS0FLNmzRL9YRL9TJo0Cbt27cL06dPh9XqxYcMG3H333TAajZf14LIKybEoLCwUNmBaYLVYfX4+GyttaoDPBJ3dbkdzczNef/11JCYmYsaMGSgoKMD06dMxfvx4qSKqQQ21axkEeDw+LWv2FxcXFyMxMRF2u91Py/rRRx/FFVdcgbVr1yItLU20rJcsWYIdO3YgPz8f+fn5mDt3LtasWQMAiI6ORm1tLeLj40W+iImBd999F4888shlWtbFxcXQNA1XXXUV7rrrLuzbtw/Hjx+H2WxGR0cHurq65B3kO7tnzx6Ehoaiu7sbra2tIrezbds2YYH9Z7SsVWSBuibVL65jdV64pjmXXKtqEq+3IBXwVcufe+45PPfccxLsXX311X5JQbWqqiZzAvtU1ZYWh8MhTM2ffPKJoHE0zaeXa7PZJJlJlEFOTg7OnTsHwIf8qKmpwcSJEyVQY+DLNZyQkIDdu3fD5XKhpKQEeXl5CAkJQVVVFQYPHoz8/HwUFBRIz2t6erofQkJFN5Ehm2PJ5LPFYsGJEydw8803Y+XKlTh//rzsmbquIz4+HpGRkQgODsaLL76I3//+94I6czqdGDlypN94lZaW4tZbb8Wjjz6KV155Reahp6cHq1atwnPPPYfFixfjnXfeQWxsrLQu6LqvfaSjowPjx4/H8ePHZT7UOXrjjTdgMBiQkpICm82GoqIivPHGG3j++eelDWfJkiVYvXr1P7w+v2sbqKAO2IAN2IANWL8WEhKCyMhIoctvaWlBeHg4NE2TvsOQkBB0d3eLbiwJd0gEpB6SZrPZD2VAp1eV+gmsZGmar9+RvxsUFOTnTKrOtPp3Bh2EDapkSgsWLBBoFqvE3d3dePPNN+F2u7FmzRrs3LkTTz/9NBYuXAi32w2LxSJBUXR0NGJiYgSWS6gfkQjs79E0H8lSd3e3wDRzc3NRXV0tThBh6REREYiMjERCQoJUozweD7q6utDY2CiBHp3R+Ph4PzIU9koNGzZMRO337t2L2bNno6WlBWlpafB6vcjLy0NsbCxaW1tRXFwMs9ks8NeOjg789Kc/hd1uFzg9qw/Tp0+XsXU6nVi/fj0MBgMWL16MlpYWHD9+HE6nE3FxcSgrK8MTTzyBsWPHCozQbDajrq5O1oUKpVODPLW/ToVD9jXP/KJjqa4LXdcFYk0GXRXpAcDPmetPy1pNnHA9qQEvHWq1HULVsn7rrbf61bJOT0+H1WrtU8u6pqYGW7du/bta1sAl9m2OAbWsExMTkZCQAE3rXctafQ61iqxa4DjwvaaWtc1mw1dffdWnlnVLS4tfRa83LWtd11FaWtqnlvWsWbPQ1tbWr5Y1Yb99aVlTPqkvLWvCwPvTsvZ6vf1qWWdkZMg7+m20rHtLGKh/V98VFeaprnEVsaAiVgLXSiCSISwsDEOGDMGUKVOEoZzvG4OrwHdO/Vz+HN/zkJAQxMXFweVyoaWlBbGxsSgoKIDJZBJiN1XXNzMzExaLBU6nEzU1NYJQ4TpkNVt9Po/Hg/b2diQnJwubbU5Ojqyn3NxcuT55I5jENJvNsj513cceTZ3SyMhIUUCYNWsWtm3bBrvdjqlTp8JqtWL9+vUAIBXjG264AWVlZX7avg6HA6tWrYLD4ZA+aY41993hw4fLGBJNQzJL9uhzr2loaBC0zFNPPdWrrnhJSYloo3ItEzFD5N6dd96Jw4cP9wob/75tIEAdsAEbsAEbsH6NhzIAIUVir5bdbkdKSgp0XYfdbofVakVSUhLa29thNptht9v9elHViiUrTYQt8fu9mRoQhIeHywE9bNgwcX7UKoDqFNHxYCBMx9xgMCA1NVUcNzqqdMZtNhs2btyI6dOn4/Tp0wJZZWUgNjZWnAJN0wRCy6BYhcOFhoZKYML+L1XrkRWXqVOnYvr06ejo6EBiYiLCw8OF+ZMswiTzAi7pHnIsKUbPgIvkMm63GwUFBTAYDOju7kZlZaVUeQFfcKFC4agLSXZNXdexdetWmQsS8xw/fhwhISEYO3YsysvLERUVJf2no0ePRlxcHGprazFhwgS/xAQDkM7OTtHRDXRy1bnvD47Y21pRieko6RAVFSWONSstKjSSPfOq465WFPkzvf2pBsdcX5wTVct648aN/WpZnzx5EjNnzuxTy5rOdX9a1qy296VlnZubi5EjR34rLevAOVLh2dRtZYDXm5Y1GXrVa3HcVC1rtgx4vZdrWfPn+9OyBtCvljVwqUevNy1rBr39aVkzgcTKWqCW9ZkzZ3Dy5El4vd9Oy1pN4DC5wu+pAWtgBTMwGFWr42xXYGuJ+jlcN5WVlZg5cybGjRuHzMxMCcC5BwQiDQI/Q+3FD/w57gOUuHvvvfcQGhoqaAdaamqqBKQMUHldAMJgS4guzWAwYNiwYdJ3mZ2dLVrVgYgIjqnX65XqM417RW1tLdra2mAymdDa2oqmpiYh9mKCq7GxEcHBwQLDjYyMxJ49e+D1egX9wHfA6/Xi+uuv9wskP/vsM+i6jjlz5viNp8fjwYkTJ+B2uxEdHQ3AX3uXzxQVFSVwaLY4cK/lfgMACQkJ8Hq9aG5uxsaNGyUpcP78eQwfPryvZfi92UCAOmADNmADNmD9GuGC7e3tArVzOBzo6uqSYLSzsxPt7e1ISUkRiBOdyJ6eHly4cEGuR6eEsNsTJ04IS3Bg3xtNdWY1zadty77dESNGSGY+sPrKg5/Mmfw5/kkI6vz584VZl86M1WqFzWbDoUOHsHnzZui6jpKSEpSVlaGpqUm04ugAOhwOcVRI/a/ec21trUA5Ca2kc6nCy8xmM2699VYMHjxYiL3S09Olp5TBL4Miji0daeAS4Qd7g0+dOoVjx45B0zTs379feqcWLFggjtns2bMxaNAgqUDSYaFcRmVlJb788ku4XC6kpqYiNDQURqMRH330kTirDHDtdjtSU1PhdDqxYMECVFZWIjo6GsnJycjMzJR7pGal1+sVSZTeglAGlYFJCHWuVVMDrra2NtjtdmRkZKClpUX6GlmR4PV5T2oPJhMAKrySn61+lhpE96dlHR0d3a+W9aBBgwS+15uWdWhoqPSC96Vlfe7cOT8uATV4YcKD8OzetKzVoFz9k9dSnzVwLNTftVgsfWpZs/rTn5a1pvl6t3mtQC1rsrEbjX1rWQcFBf1dLeuUlJQ+taxJatafljXfyb60rOPj4xEdHf2ttazVimVv88C5UANMzo2apFODVo6fruvSV8n55meRnIjvQmpqKk6dOoXS0lIcPHgQtbW1fp8fGKj2l1RiLyiTjk6nE19++SWMRiOqqqqEbCklJQVnzpxBXFycEEaqCZiMjAwAl1p4+LlJSUkYMWKEoAeSkpKkBUBlcCfcG7iEilBJ/IguOXz4sPRT9/T0SHuL0WjEgw8+CJPJhPb2dnz99dci4VJfX4/a2loYDAaMGTNG9hOSDU6ePNkP1cG+6OjoaL/e+M7OTpGoeeihhwRBw3F0OBzSDvPGG2/4ySYxcdrZ2SkBKsnGiObgubxs2TJcffXV//D6/K7tvxWgappWoWnaCU3TijRNO/y378VqmrZT07Szf/sz5m/f1zRN+4Omaec0TSvWNG38/+QDDNiADdiADdj/rFGupK2tDZmZmbBareju7kZYWBhcLhesVis6OzulKsgeRpK02O12DBs2zM9xonPFrP2RI0ekktab868aD3NWcoODg9HV1YW6ujokJydLb6sKU+Lhq17XYDBI1dZkMmH+/PmYOXMmkpOToes6Ro0aBZfLheLiYlgsFvnMwsJCFBQUIDExUeSUjEajEA+x2kPnic/JHqa6ujo4nU7ExMRIIJGeng6j0Sj3rmkaMjMzJRCdPn06PB4PcnJyhBWT46Q+J3ss6cB5vT5ZlcrKSrkfVkBramqkqtnZ2YmpU6fi3nvvhcPhEKIOXff1ABNevWnTJnR2dqKoqEigusePH4eu61iwYIFUShwOh+gRx8XFCXP22bNnERkZCa/Xi9OnT6Ojo0OcZ5UZtru7W+Y5cD2oAZdqajKAZjAYpOqcnZ2NxsZGP6ePlTo6h/wMXkcls1Mhkvz/wHukw63CA3mNvLw8zJ49u18t67Vr1yIhIaFPLeusrCzcf//9/WpZNzQ0+FXW+EUt67y8PMydO7dPLevy8vLLAh+Ol1oV623c+Xe+t31pWScmJkr1py8t6/DwcGkl6E3LOjQ0FL/4xS/61bKur6/vV8uaffX9aVkz0fDPalmzcvdttazVJI061movKd99dW7Uf/N7qtqAruv4+OOPceTIEbS2tsLlcvn1hMbFxeGqq67ClVdeiSuvvBJDhgzBz372M6xYsQKJiYno6OgQmDmfP7BayoRg4JphMkzXdQlU7XY7Ojo6UFJSgg0bNkgyraKiAv9fe+ceG/V55vvPOzM2tsfYGNvYxhgbB2wIFEgKAZKFJC3JZkNVB6XaLJUa0vSak626jSo1e1R1N3ukqjrdrbpRRSqag9S0hc02YCXpCWzS9ORCqniDw60mYBNsLr7E9wvD+DKe9/wx87z8ZsAuJASPm+cjIca/GY/f+f2e3zvv8z6X7+LFi939U11d7eZfcdiPHz/unHCfz8fKlSvduMRui4qK3Kbc3Llz3ZwtaefyXt7xSjfn3t5eF12PRqO89NJLVFdX88orr1BYWEhNTQ3vvfcer732mqunnj17Nnfc3eTHcgAAGQhJREFUcQfDw8OUlJQwf/589567du1K6OYr50lScW+88cZLrpvMlytWrEgYY2dnp7O3cDjM448/njCvjY+Pu0wg+W4uKirCWsvhw4f5zW9+4xznX/3qV1dtn9eaq4mg3mmtXWmtXRX/+XHgVWvtIuDV+M8AfwMsiv/7OvDUtRqsoiiKcv0ZGhpyDqnU9Uga6MDAAN3d3RQXFzshenH8otGo6+AqjoDXGZAaGEmHkihasg6vN2UTEusF5WdZdGRlZfH66687SR6ILeBkXFIHJ+/ndQiNMS5FVXbLfT4fX/7ylzl79qxb6AWDQZYvX05ubq6T7ohGo67VvzRPSnaUpEmR7PIvXLjQpdDOmjXL1R950+uam5sZGxvj3LlzfO5zn+PUqVOcO3fORQyMidW3yu/IotqYmAYgxJzzUCjE6OgobW1tjI6O0tnZye7du12EIhAI8Oyzz5KVlUVBQQFLliwhOzsbny8mbXD69GlOnjzpOg7n5eXh9/tdV99oNMqKFSvw+XyuzlYiWfKc6GLu27eP7u5uFi9ezJIlS/je977HunXryMzMBC7u7ItGrGxcJKf6ThSZ8TbZMibWcToSiVBeXu5S8mRh7l3wy8I6OSIq75nstHmjil7HzOs4JGtZSwQNLq9l/f3vf58vfelLE2pZHzx4kA8++GBSLesNGza4jufee0a0rD/1qU+5e1mi8V4t682bN9PU1DSplrX33Mu96HVgvddnfPxSLWvApabLfZWsZS3p9hNpWZ86dYo1a9ZMqmW9a9euSbWsJZo1kZa1OAMfRcu6qqrKpTz7fB9eyzo5ii3n93IbeGJ/yZsH4oDJNZTNmk2bNlFYWEhlZSWDg4MuW0Oujzh0Xrm5YDDId77zHZ544gnWrFnDwMAAR48edZtqXltJHoc8f7na5vT0dHfuhoeH+fWvf01tbS29vb3ccMMNZGZmkp6ezo033ug6kMtGXE9PT4JDvmDBAurr6xkdHaWhoQFrLevWraOxsRGAdevWue+Cjo4Od35MPLvCe46zs7OB2Gai1ObLHPn2228zOjpKQUEBDzzwgJOFmj9/Pj09PVRXVzv7yc3NdfOMbAZt3boVwEX4n376afx+Pw8++GDCORsZGeHpp5/GGENNTQ3RaJScnBxnh9FolB07dmCMYf369e76eq+dpAjLeZOU9L1797r3efTRR6/aPq81HyXFtwb4ZfzxL4H7PMefsTHeBmYZY0o+wt9RFEVRppDu7m5KSkrIzc1lYGDANUEaHBx0GqCyMAgEAi51NxwOu5pRSaXyLlgkbUwWdkeOHOHo0aMcOHCAt99++5LmON50Q28UTRaNIm8CsehUNBrl9OnTLpVQ/j5cTK31dlwFyM7Odg0pCgoKuOeee6ivrycQCDA8PMztt9/O3r17qaysJCMjw0VDQ6EQZWVlbkEwc+ZMp2En719YWEh5eTmhUMilL4oExdjYGJFIhNOnT7vzI05BaWmp6yYq5/bYsWNu7JJWLB1aRc5g1apVzJw50zV2amlp4bnnnnORra6uLkZGRti6davT05NNhePHjztJocWLF7N06VIXGYeLUhLGGIaGhti+fTs///nPE5yX/v5+tm/fTkZGBjU1NQlOU2trK36/ny1btjj5l9zcXBYvXuxSCiWNW+SBRM5IrnsyXhtJtt9AIEBubi79/f0uDTnZ6RTbmEjL+nJ/0ysnBokRK7//Ui3r3NzcSbWsRRd3Ii3r++67j5kzZ06qZS0O4YfVsr7rrrvYtm3bpFrWXtkg7/3s/efdVErWsgZcSuxEWtYzZsxgbGxsQi3rPXv2uDlmIi1raQYzkZb1xo0b3XW6nJa1OCsfRctaUmM/qpa1t3t5cuOj5EwTb8Mg7z2RbO/S+Vacu4aGBjo7O3n//ffdNfVex+RrLH/7/vvv56GHHuKRRx7h7Nmzrtuu/B2v/chYZZ6Vz+HF7/eTmZnp0lI7OzvJy8tj586drpxg9uzZNDU1ud8RuZienh533Xw+Hx0dHcyaNYu6ujp8Ph8lJSUcPnwYn8/HggUL3GeRztFSp+/VWzfmojxSXV0dCxYsIBAIMDY2xqFDhzAmVi5y0003UVhYyL333svx48cpLi52Tvu5c+dcKUVpaam7HmNjY1RUVCTUqI6OjtLc3MzIyEhCXe3g4KDbFJVOx7KxF41GaWtro6Ojw91DGzZscM672I58l8g8J2Pp6+vjpz/9acL35VRypQ6qBV42xtQbY0QTocha2x5/3AEUxR+XAmc9v3sufiwBY8zXjTEHTDxlWFEURUlNgsEg4XCYUChEKBRy9WIiEN7d3e20AdPT0500yvj4OF1dXbS1tbk0VK+DYYxJOD40NER/f79bVO7fv5/Tp09fsgvvXQAn12TJ62S3u7GxkVdffdWNx+vgSrqU11mRsUsUcP/+/fT39/Paa68xb948Wlpa3HistfT29uLzxfTvSktLnSadtbGOlV1dXW7xv2jRIlc3FQgEyMrKoqWlJSHa0NzcnOCA5+fns3TpUtdQafHixWRnZ5OdnU1BQYFzZMUhj0Qi/P73v3f1R/fff79zYKU+ODMzk8rKSoLBILW1tZSVlREMBlm/fj0Ajz32GJ///OdZvXo1WVlZDA8POw3HcDjs6oW9i2Bp+uNt4GRMrPmKaDZ7G6VIfdbOnTtddO/AgQM0NzdTUFBAYWEhJSUlrhGK1Ob6/X6XApwcyZPznhxdl5pfcYDkObnu3hQ4GaO1l2pZyzWRyJQ3/df79yfTst6/f/+kWtbt7e0UFhZOqGXd0NDA+++//7FqWcPFVNHJtKzlXHjP0eWelwgjXNSy9kbP5LE4SnINQqHQpFrWorU8mZb1wMDApFrWmzdvTliQJ2tZy7wwmZa1NDGbSMu6vb2dhoaGj6xl7XXuvRFRrw16nVF5XuzV29lXHDjR637zzTcJh8O0tbVx/vx5FixYQH9/PydOnODw4cMJck7ejQ253yKRCBUVFRQXF7Np0yaGhoaoq6vj7NmzCfeizNPi3MqYLxd5B1zqv9RJS3M80Z7t7u527ytRcnEYvTXxt956q8uckCwgcQ6lm7g37dwb2RcGBgaIRCIcO3bMpfnKBl1xcTHHjh2jsLCQvr4+enp6eO655wiHw5w6dYru7m4nyZaVlcXatWuBWN+AHTt2MDo66vRfo9Eo4XDYpdlKdFWw1nL06FEAvvnNb7rxeiPWogP8+OOPu+84rw0ZY5g9e7az7+rqaowx/PGPf0zYOJhKrnQW+ytrbasxZg7wijHmuPdJa601xlxVQr21djuwHeBqf1dRFEW5foyMjDgNy/z8fFpbW7lw4QIFBQWEQiGXciUO3tDQEGNjY65zqpfh4WGXxiet+6UTrjg+Uq+ZlpZGT08PbW1trF69OmHXN3lBI1/MkrIq6VcQ2xmW2htxkGXXWd5TZCm8TpQ0vLhw4YKrgZo1a5Zzwqy1hEIh8vLy6OjooLy8nNbWVkpKStzCqK2tjYULFwKxTpOyiy67/dJREmJNMWR3XBZwVVVVZGZmumYsS5cu5cyZM8yYMYNQKOSc2cHBQebOnUsgEHDNmjo6OnjyySddRDI7O5uuri7uvvtuent7WbNmDS+++CKjo6OsW7eOQCBAbW0tX/jCF9zvlJWVkZ6ezre+9S2eeeYZJ7UjtVgA58+fJz093TW88aa8hkIh9u/fzx133MGTTz7JI4884qIojY2NTs9QUrQPHjzompksX77cafCePHmSqqoq15AmIyPDnWORhfDWh8o1rKmpSdAkrquruyb3xLXgo2hZ/+AHP7jWw/mzpKqW9Wc/+9lLXnOlWtaAW+wnk2pa1pKdIfOcIJE8bwqvN8IKiZFTcSzlPWWDsbi4mOHhYYqKiujt7SUUClFaWkpTU5OrpfTWbMvc09DQwJkzZ1xac3l5udPE7u3tpb29nZtvvtltNAnJc/jlNnvEoYXYRqnP53PSZtu3b3cbfSMjIyxbtozGxkYaGxu588473WctLi5m4cKFzJgxg4yMDLfJJvNVTk6OkwoLhUKuoZI43+L8jY2NOQ3X3t5eKisrOXLkCF1dXQC88MILfPvb33bR1Tlz5pCRkcE999zDyy+/zObNm9m1axcPPfRQwhzZ19eHz+fjG9/4Bj/5yU+cLrJsZi5dutSd87S0NPr7+3n++edZvny529TJycmhu7vbdeV94403WLNmDSMjI+Tn59PV1eVsJBAIcOjQIVasWEF+fj79/f38+Mc/pqamhrGxMb74xS+ye/fua2i5H44riqBaa1vj/3cCtcAtwAeSuhv/vzP+8lagzPPr8+LHFEVRlGnI8PAwoVCIQCBAS0uLi2JI91VJWZRUTWut0xWUtFRZUEkHVW/dmjcSKulNojnX1dWF3+/nzTffTEgr9EYMvAsd73vLQm7WrFkuopiRkUF9fX2CpI3XcZVFw8DAQEIqZiQS4YknnnA1QpLaNjIyQnZ2NhcuXEj4/LIIlGY/EOsk3NHRkZCW7I14SA2rt/apqKiIjo4ONz5JMZSFhYx59uzZ7vPIQvCpp55yqV3p6el0dHTw8MMPMz4+ztmzZ90iKSMjwzUuOnbsmFvI+HwxiYaenh5XbycRpszMTD7zmc+482OtJS8vz30msQepUU1LS3NdkuW6yVj37NmDMYaHH37Ypf61trayd+9eALKyslzUd/ny5e4ciaMv0VipHZYunYryl4TXKRWSHbvkdN/k6KXXOYWYNm84HKaoqIjMzExKS0s5fPgwfX19RKNRzpw542qFxUnzRmSNMSxfvpxAIMCpU6c4ePAgXV1d5OTkUF1dzcqVK1m1apWL4l2uC7Z8huSondc5hYuddmU+GB4eJj09nZ/97Gf4/X7X0VZq1sUZrqysZGBgwGXs+P1+SktLXepsRbyr+Pj4uCtzkHMnG4lyTPRROzo6qKiocHNvSUmJm+ulC29FRQUjIyPceuutCdq51lqWLFlySXpz8mZuJBKhubnZbcTJGEXK6MiRI1hrKSwsTNiQkHH87ne/w1rLY489lhC9ls1mmc9lQ9i7AfLuu+9ezgSvK3/WQTXGBI0xM+UxcDfwJ+AFQOLOW4Hn449fAB40MdYCA55UYEVRFGWaEQqFmDlzJl1dXUQiEcLhMCUlJW7nPRAIMGPGDKdbd/78eTIyMlxXRunOKmmvySlHUhMpX6LyfmlpacyYMcO10xfNzOQIgjhEkq7k3amXL/+srCzXqKSnp4f8/HwaGxvdYk7SMgHnTKWnp7tI3RtvvEFLSwvp6enU1tY6J9Ln87luvtbGNPCEtLQ0hoeH3TistbS2thIOh11H08zMTJcCvHDhwkscq4yMDE6cOEEkEnH6f7LQqqysdJ8/NzfXvU84HOa73/2uS4nzRhhFjqS9vZ2xsTGqqqpobm6mIt4dVSQZZLEnaY0XLlxwnYCttXz1q19l48aNbpEqzq7fH9OTlcVwOBymq6uLnTt38uyzz7prJ2nH7e3t9PX1YYzhtttuA3A1hZLKZq1l0aJFbN68mU2bNlFVVcX8+fOdsy51z8YYBgcHnbSJovwlIXq13jRUiT56N/xkzrtcKq73eWstc+fOJT8/n4ULF1JVVeXqnaPRKOfPnyc/P9+Vccj9BhedP/l/xYoVrF69mltuuYWKigrmz59PWVkZc+bMoaCgwN2fsgGZXM/tnc+9G5Hi1Mr3hDw3Pj5OZmYm2dnZjIyMsG3bNnbv3u2+XyTTwhjDihUrqKurY3h42DVKWrt2LfX19QDcdtttbg4TiSiIZQ5JF2mZv0OhEABtbW0cOHDANYuqra11f3fLli2UlpZSVFTE7t27GR4edg5/Tk6Oi9w+8MADjI+PEw6H+e1vf+tSjSW9PBwOs3PnTgC+8pWvuGsm36Mvvvgi4+PjfO1rXwMStXWtjUmiySajNAOTeVc6DEMsc8fv97Nt2zZXo/3DH/7ww5rpNeNKIqhFwH5jzGHgv4H/a63dB/wIuMsY0wRsjP8M8BJwCjgJ/AL4H9d81IqiKMp1IyMjg7Nnz7p6nTlz5tDd3U1OTo6rebtw4QJ+v991/pQ6wYGBAffFKSmYkpYJF9PVJCIGF5uBSKrTnXfeydq1a7nhhhsoLS0lEolw9OjRhC6VydFYb22hd/d41qxZznFsampi3759jI2NuYWHpFbddNNNWGuZP3++e73URM6bN4958+ZhjCEYDLrIp6SIiXOWn58P4BZn0WjUpYmdOnUKYwzl5eXuXMycOZP09HQXGQXcQrGwsJD6+nq3QO3v76e9vd19dog10PD5fC5tWur6JCJqbUxOYN68eSxYsICsrCyWLVvGnj173N9aunQpx48fZ968eaSlpbnPFAgE2Lhxozvf+/btIxwOJ9QrWWvd35Vr2draypkzZ3jvvfd46623KCws5NOf/jTBYBBrLadPn8bn8/Hqq68SjUa5/fbbnWMr793f3+/OtdTpRiIRgsEg5eXlFBYWus0M2dxIrh9TlOmON/NCfk7WVPVGUr2IcyLIRtOyZcvo7Ox0WQezZ88mNzeX4uJi3nnnHerq6khLS6O6uhqIRRBlM0ocKUkFDgaDrrGZlEtIiv769evp7u6mo6OD5uZmDh06BCRqCCc7rUDCRuPlas4lXTcnJ4f29nZXCyydhGXMJ06cIBgM8tZbbxEIBCgqKnLNgvLy8tzr586d60okJFLpbaw3ODjoPntZWZlzhIeGhujr6+MPf/iD+91gMEhTUxPWWjZs2EBTUxMNDQ28/vrrABQXF7vXSidvSRcXWTS/PyZftmjRooTvTEkL9m78Sp23tdbprra0tODz+diyZcslmwrSyVi0wOfMmeN0mlOhSZK5nEFc90FoDaqiKIqiKIqiKMpfMvX2omTphKRKkcZ54MRUD0JRrpICoHuqB6EoV4HarDIdUbtVphtqs8p05HrYbfmVvChVHNQTV+JNK0oqYYw5oHarTCfUZpXpiNqtMt1Qm1WmI6lkt1eqg6ooiqIoiqIoiqIoHyvqoCqKoiiKoiiKoigpQao4qNunegCK8iFQu1WmG2qzynRE7VaZbqjNKtORlLHblOjiqyiKoiiKoiiKoiipEkFVFEVRFEVRFEVRPuFMuYNqjLnHGHPCGHPSGPP4VI9HUQRjTIsx5qgx5pAx5kD82GxjzCvGmKb4/3nx48YY82Tcjo8YY26e2tErnxSMMTuMMZ3GmD95jl21nRpjtsZf32SM2ToVn0X5ZDCBzf6zMaY1Pt8eMsbc63nuH+M2e8IY89ee47p+UK4bxpgyY8z/M8YcM8Y0GGO+HT+u862Skkxisyk/305piq8xxg80AncB54B3gC3W2mNTNihFiWOMaQFWWWu7Pcf+N9Brrf1R/AbNs9Z+L35zfwu4F1gD/Lu1ds1UjFv5ZGGM2UBMS/oZa+2y+LGrslNjzGzgALAKsEA98Glrbd8UfCTlL5wJbPafgfPW2n9Neu2NwC7gFmAu8HugKv60rh+U64YxpgQosda+a4yZSWyevA94CJ1vlRRkEpv9W1J8vp3qCOotwElr7Slr7SjwH0DNFI9JUSajBvhl/PEvid3ocvwZG+NtYFZ8YlCUjxVr7RtAb9Lhq7XTvwZesdb2xhdJrwD3fPyjVz6JTGCzE1ED/Ie1dsRa2wycJLZ20PWDcl2x1rZba9+NPx4C3gNK0flWSVEmsdmJSJn5dqod1FLgrOfnc0x+4hTlemKBl40x9caYr8ePFVlr2+OPO4Ci+GO1ZSWVuFo7VftVUoG/j6dC7pA0SdRmlRTEGFMB3ATUofOtMg1IsllI8fl2qh1URUll/spaezPwN8Cj8bQ0h43lx2sbbCWlUTtVpglPATcAK4F24N+mdjiKcnmMMdnAbuAfrLWD3ud0vlVSkcvYbMrPt1PtoLYCZZ6f58WPKcqUY61tjf/fCdQSS3H4QFJ34/93xl+utqykEldrp2q/ypRirf3AWjturY0CvyA234LarJJCGGPSiC30f2Ot3RM/rPOtkrJczmanw3w71Q7qO8AiY8wCY0w68HfAC1M8JkXBGBOMF5RjjAkCdwN/Imaf0nFvK/B8/PELwIPxrn1rgQFPyo+iXG+u1k7/C7jbGJMXT/W5O35MUa4LSTX7m4nNtxCz2b8zxswwxiwAFgH/ja4flOuMMcYA/wd4z1r7E89TOt8qKclENjsd5tvAx/nmfw5rbcQY8/fEbkw/sMNa2zCVY1KUOEVAbezeJgDstNbuM8a8A/ynMeYrwGlindAAXiLWqe8kcAH48vUfsvJJxBizC7gDKDDGnAP+CfgRV2Gn1tpeY8z/IvYlBPAv1torbWKjKFfFBDZ7hzFmJbH0yBbgGwDW2gZjzH8Cx4AI8Ki1djz+Prp+UK4ntwFfAo4aYw7Fj/1PdL5VUpeJbHZLqs+3UyozoyiKoiiKoiiKoijCVKf4KoqiKIqiKIqiKAqgDqqiKIqiKIqiKIqSIqiDqiiKoiiKoiiKoqQE6qAqiqIoiqIoiqIoKYE6qIqiKIqiKIqiKEpKoA6qoiiKoiiKoiiKkhKog6ooiqIoiqIoiqKkBOqgKoqiKIqiKIqiKCnB/wdcMui/+8sqcgAAAABJRU5ErkJggg==\n", - "text/plain": [ - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "transformed_images = [None]*5\n", - "to_tensor = transforms.ToTensor()\n", - "for i in range(5):\n", - " t = transforms.RandomAffine(degrees=0, translate=(0.2, 0.2), fillcolor=255)\n", - " transformed_images[i] = to_tensor(t(pil_img))\n", - "plt.figure(figsize=(16, 16))\n", - "show(tutils.make_grid(transformed_images))" - ] - }, - { - "cell_type": "code", - "execution_count": 32, - "metadata": {}, - "outputs": [ - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAA6gAAADWCAYAAADcga8EAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMS4yLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvNQv5yAAAIABJREFUeJzsvWmMHdd5NvhU3X3feiXZbC5NURQpiTK1W94Vz0wQ4LNjx/AMjMBJDAMBRsmPBBk7+eMltmwr/pFJ8A0QBMnYMBAFiYEAcQbwIsuWNyW2doqLuInsZnezl7vvW82Pq+ftt6rr9samTQf1AESzb986derUOefdnvc9hmVZ8ODBgwcPHjx48ODBgwcPHn7VMH/VHfDgwYMHDx48ePDgwYMHDx4Az0D14MGDBw8ePHjw4MGDBw+3CTwD1YMHDx48ePDgwYMHDx483BbwDFQPHjx48ODBgwcPHjx48HBbwDNQPXjw4MGDBw8ePHjw4MHDbQHPQPXgwYMHDx48ePDgwYMHD7cFbomBahjG/2oYxnnDMC4ahvGpW3EPDx48ePDgwYMHDx48ePDw3wvGbp+DahiGD8AbAH4DwByAnwP43y3LOrOrN/LgwYMHDx48ePDgwYMHD/+tcCsiqA8CuGhZ1mXLstoAngbwP27BfTx48ODBgwcPHjx48ODBw38j3AoDdS+AWfX73FufefDgwYMHDx48ePDgwYMHD0Ph/1Xd2DCMTwL45Fu/nspms7vdPizLgmEYu9ruTvoBAJpK3e/30e124ff70Ww20e/3EY/H5W/sOwD0ej30ej2Ew2H5W6fTQSgUgmVZsCwLgUAAnU4H3W4X7XZbnrvX68n/fT6ftG+apq1fpmnCNE30ej30+31YlgW/3y/95++dTkfaAACfzwfTNG3Px598Pj0G/Hu/35dnMQxD7k90Oh1bP3X/2R/2hchkMrgZunqz2Vz3mX6mWq2GeDwu48Xn0KhWqzh16tSO++DBw+2CF154wZvLHn7t8cILL2Az3cJNRv+q9QY3dLtdAIN+NptNRCIRBAKBdd9zymLKTMpcgvKr3+/D7/fDNE2R/6Zpwu/3yz1rtZqtXf6fMpqfUU7zd8pv/Znf75e/sQ/8Tq/XW6cPONvpdDqiezj7wt/ZDtvmtdSL9N8BiK7iHGuNm9Ux3NDr9dbpMk5Uq1XEYjGUy2VvT/bwa48XXngBAFYsyxrd7Lu3Igf1EQCfsSzrf3nr908DgGVZT25wjfWxj33spg1KGjtv3W9LbWlDVhuGvJaG0lbAzZnXsb12u41AIIClpSUAwBtvvAEAePvb3w5gsEkRzWbTZkzys0QigWq1ilAoBJ/Ph3a7DZ/Ph2g0ikgkgnK5jPPnz6NarYrwWllZgWmaSCQSCAaDAAabXSaTkbZbrRZCoRACgQD6/T5KpRIMw8DIyIgYmT6fD8FgEMViEaZpolQqIZvNihADBhu6aZrw+XxYXl5Gt9tFLpcTw1cb0u12G5VKBbFYDP1+HyMjIyIwSqUSer0e6vU6IpEI+v0+xsbG5J00Gg3U63UxKE3TRKvVwu/8zu/IGFLQbvW98ZqzZ8/KuFDQUej1ej38/Oc/x/HjxxEOh2EYBgKBAMrlsrTx3HPP7boA8+DhVwG9F3rw8OsKwzDwsY99zPVzYE1P0LKi3+/D5/NtOP/d1of+TF+/VcNX/63T6Yg8pRwvFAq4cuUK7rvvPuRyOZsDOhwOIxQK2ZzC7XYb3W5X2srn8/D7/WKEGoaBSqWCSCSCWCyGRqMBn8+HQCAAy7Kwd+9ezM3Nod1u47XXXkOn0xFnLeVjKpWCYRjodrvo9XrixKWT2e/3IxQKAQAajQZ6vR7S6TR8Ph9arRaazSb8fj+CwSAMw0CpVEImkxEDtNPpiIOezzU/P4/JyUmR1e12GwAQjUbR6XRQq9XQbDYRi8Xg9/sRDodlDHq9HhqNBqrVKqLRKCzLQigUQjweh2EYaDabaLfbaLVa8i58Ph/uvvtu7N+/3/aOtOG7UxiGgdnZWdEjnM4SwzDw5ptvolgsYm5uDpZl4cMf/rBcP2xu6c99Pp+rE0Z/l3/fbN4DEAcCnR6b6VlbGSefz7ctnW2j7/l8PtEnee+lpSUEAgGYpolGo4Fut4tUKoVer4dgMIh2u42XXnoJpmkiGo3ijTfeQDAYxMMPP4xut4tXXnkFx44dg8/nw8WLF2Wec30eP34cnU4Hs7OzSCQSCIVCME0T169fx+TkJACgUqlgdHQUlUoFlUoFzWYTY2Nj8Pl8iMfj6Ha7KBaLaLVaOHTokO2ZisUi4vE4wuEwCoWCzaGiHTSFQgHRaFQcH7VaDalUCsB6W+bee+9FNpt1fd/cy/RY7kbA7ytf+QrbeMGyrPs3+/6tiKD+HMARwzAOArgO4KMA/o+tXLjVh+eg6es48M4I4WagEcL/a6+e/tuw/mqBxI2a19JILRQK6PV6iMVisCwLjz/+uK1tGln8qZ+J7Vy/fh25XE6EkM/nw40bN5DJZNBqtWBZFiqVihiKvV7PFn1k5JXPp6OljGhS4IXDYdtYUJjQ+wiseRwZsaXxye9QyNFA1++s3W4jGAyKN5XPToHE9tlur9eD3++HYRiIxWIoFotyH7/fj7177Qxy9pn328qm1+/3cfToUZw+fXpdtJZt3HfffXjxxRdx8uRJ+P1+9Ho9RCIRNBqN29Lj7sGDBw8e7HBzRGvli3J3I2yk1DmN1+04ytm23+/H8vIyer0eEokE6vU6Dhw4gOPHj8Pn8yESiYjspkHVbDZtcp96EI2OZrOJaDQq96QBSflvWRbS6TS63S6azaYYmfl8Hj6fT+Rct9tdZ3RQ7+DzM8pJHULrEnxGbcQCEP2D7XS7XdGnTNMUhhify+/3o9FoyPjxmXWElbqLaZoIBAJivFInCAQCou/QmK1UKragRbvdxuHDh21R1d0wTjkO09PTOH/+vBjazr8fOHAAr7/+uu0zt3vrOefUefTfnNe7Rbmd7WuDBVgLwAwbA7c1ttH3er2eLcAz7LtbGXPdT/ZVG2faGL527Rr27Nkjc2R1dRUrKyuytkqlkjgt3njjDZmX0WgUPp8PzWYTrVYLZ8+elXGjjk5j+MaNGwgGgzLvGIgJBoOo1+vo9/sol8vidCmVSlhaWpLADACk02nMzs5iamoKiUQCtVrNxgrQDqPV1VVEo1EEg0FZN5qFSLz00ku46667MD4+7jqubiyGXzZ23UC1LKtrGMb/CeDbAHwA/sGyrNc3uWy797D9vt2BcwqQYQtyO/3QC5ubNQ3OXq+H8fFx2Qy5OLSRSGhjmZ/3+300Gg2Z9ACwsrKCZDK5TgDw79o4c3rF2E8KM0JTd7WwbLfbto1Ej6GOnGqDlxRkvZHxWgoDCg16aNgfTUXieOn+OSk909PTQ9/NduaGaZrIZrMoFovr/saNJ5lMYn5+HlNTU7AsSzYeDx48ePBw+8OpPO9GFAzY3KG92bW8fmVlBYFAAJFIBIlEQiJ9nU5HGEj1et2VMUbjUBt2ABAKhaQdyljtWK7X64jFYgiFQqjX6wgEAuh2u/D5fCiVSiL76TjWfeZ4ameyU+5TvgcCAXFqOw1SYM14MAwDrVZLdJpWqwW/3y8RX6euRaoz06dCoZBNt6IuAqwx0gAgGAxK/7QBzn74/X5MTEy4Un63y9Iahn6/jzvuuANnzpxZR8Nm30+cOIErV67Y5oz+v57Pei65wZmW5VwDbuuBY+Q0TIetnc3Wgg4gbGaY6j5vBrfgAvuiHVCGYaBer2NychLdbldYAUy543y5evWqOEo4H3kt52iv1xMDF1hLF+N7aLVacm04HBYmgWVZKJfL6Pf7CAQCWF5eBjCYD8vLyxgfH5d2AGB6ehqvvPIKTp48iXA4LA4NrTMbxoD9WC6XEQwGhabvdJhxTM6ePYtms4mDBw+6OtV2sqftJm7JnS3L+v8sy7rDsqzDlmV94VbcA3CftDsxVoH1HtTttKU3Yi7kpaUlzM7Oot/v49KlS6hWq7IAKGg0DZjUEz2Z9Abr9/sRiURgGANaTq/Xw6FDhxAOhyW6yWu4oDjxGR2ll4rf5SLhPRgBDQaDcu9GoyF9BiBeIG4ubJeCgs/FvnJjJ4XHmf8RCARkgVUqFQAQ7xCFHhdJrVZDoVCwGaitVgsHDhxY906cuS1bQb/fx759+4SS5Hbt8ePHUSqVsLy8LOOaSCQ29bh78ODBg4dfPXS0bbfb1PJ0o+9qUL4Vi0UUi0U0Gg2cPn1adBKt4LJ9Oo6bzaYoy8FgEMlkEiMjIxgfH0c2m0UsFkM0GoVhGGJA8p7AmkG4uLiIWCwm1MNQKCR6RbVaFXlKmi/7TH1DO49J9yWtlnoBx6jf79t0IBoDTCmio5oRYR0BY/Sq3+/LmHS7XTFQ2SZ/pwFBJz+wZqCwLzpQUCqV5B2xX/ffv56JeCsU95mZmaF/c9a7cILzmWlXG/VNz9GtOmioN26Fnehs0209aAakfraNnnMrOpbTYOf/GaXnmmL0kyl1Kysroq9qvbtSqaBerwNYm0u9Xg/lchnVahX1eh2WZaFer6Ner6PT6aBcLmN5eRkLCwui1/PahYUFFAoFMXg1pbxYLGJlZQXtdhu1Wg3PP/88ms2mTbc/ceIEarUaAoGA6MbAepsllUoJG5PfI9PSaYi++eabOH36tLSz2bv7ZeJXZxrvAPpFccEw2rediJnT2+RGcdhM0PAavcl1Oh3cuHEDV65cwenTp4U3/uijjyIWi4nxGYlEEI/HEY/HJYc0FAohGAwKJ1wbzNFoFNlsFp1OB9VqFT6fD+Pj45ifnxfDj4Yj+0xvID0oXFgsdsTf+SztdlsMVm767Xbb9jkAMcgoIJzR0WKxiFAoJL87I8RcqKRhsA16bLXXS9OES6USotGoeF+BwWJ64IEHXKkxzkILW0W/38fMzAySyaSrRwkATp06hW63izNnzsg40SPrwYMHDx5uP2gqKbC7NE1iszb133w+H3w+H1ZXV1EsFlEul/GLX/wCuVwO73znO5FIJCSPlEZXNBpFOp1GLpfD5OQkcrkckskkgIFsLZfLqFQqqFaropACkCij7mun07EVWWIkidRAymbtwKb81alU2gHO72hjnQ54nf6kZTbfR61WE+c37016IiOvmkpMx7nW4SqViug9dLTr1CIAUo+DBi/7Fo1G5X40epnG43ynOqXsZsCINNlYBw8elLlBuM0nrZ/qPuiggP7MqQtx7m3FocL7bSXSSWhDZ6P1QPadMzjkxHbWqjZ+ea0OxtCI53oyTVOcPLQptH0BDIy8YDAo1PF+vy/GLZ1D2vnCdaIDTJY1SMPj3KaDBYCwIuiUabfb6Pf7ePnll9dFfpeWloSlyLRBJzPCsgZ032q1CmAw5wOBAC5dugQA69770tISfvGLX6yL4N/M/ugsPLajNm66hV8C9Gano3986TsxRIZN+K28ECd9oNVqoVQqIRKJwDRNHHgrVwSAfMZckV6vZ8sV0ZQEbhgUHPoe9NoEg8Ft5Ynwen7G6CDH0Glo6/7Q68mJRoGnc0QA2BZzp9MRgcmoq/ZYO/NDKNiYp0ohZRiG5IYAA6pvtVoVAUWj2i035GZhWe55IXoeHngrL4Sb32aV+Dx48ODBw68GWrnW0RRCG3JbjRpQpmnF1+1aZ0SCBteBAwfw3e9+F6OjoyiVStizZw+OHj0qOabUb1qtFtrttlB3a7Wa7f76PjT+KJ9p2NJhq53zwWBQqIITExO2okaMJFJ3YWSTBh2/R/lNvYXRTl5Lec3+a3ov+6Cd4nxGyniteBcKBWFS6dxSbSw2m02Ew2FhkbHtSCQi+YJ8/z6fT/Qk0zRRqVRsrDPDMHDfffetM3g0bjaKyqgx30ssFsPIyAiWl5dd9V7nu2Z0ULPxnDRXNyOUhoyzdosT2ojdbF1oCvBm39W6n35+N2ylPWKYIa3zoVnDhVHIZrMp62V8fNxm+HOu6nmoczp1VFnbFHrusl+maQr1XDsIOHb8Ph0o/P3HP/4xHnvsMVlX+/fvx7lz53D06FFZw41Gw9UWyuVyyOfziEaj8Pv9OHjwIBYXFzE5OSnPw/7WajX89Kc/xaOPPror7AA3Wvx28WthoGpwEmzV8+MGLiS2t517A+vzReLxOGKxmExuTV+ht8S5EdAA1LQTYJAr4rwPMHjZKysruOOOO7adJ8JFyfb05sSx5GZBTyUFEvvG59I5Itqbqb0+9FxuNT+E+R+APT9EV2LrdDoiQPjZsNwQDbfNfSvQeSFu88yyBnkhP/nJT/DAAw/seC568ODBg4dbC6a3DFOG3aiBw6Dz5/T3eQ+3NigzaRSxsMq+fftw4cIFHD9+XBy4pO0Oo1NqQ5QyktEgv98vz0jqqmYdOSNs9XpdqtyShcWooy58qNvTTDYAohzr4IHTeNcFGvVxdcFgUBR+Rk7ZT+og7BOd14wKURdh4Sfm9vl8PluOrr7/ysoKRkcHp1uQPsmobalUku9TDzpy5IirjrEdo2kY3BwlpmliYmICtVoNjUbDNob63gTnG39upV9uKWzDDBKto23WpjZydVR7WH+0/rkR9Nwd1o6+p7O9breLhYUF7Nu3T9phX+mg4Zpm7ZSNHFXOd6Hvy2udUW19rWZwaOcOr3em3vn9fvznf/4nTp06Jcb3XXfdBQDSdzpgnDCMQU5qPp+XqtbZbBbz8/PYs2ePzanA9n74wx/iPe95j+uzu7Wv9wPt0HDuEzvBr4WBypfv9BbuFLqtzQwY54TntazM1Wg0sLKygjvvvNNGqeG1zg2ZVF5SAjgx+dNZCIj/D4VCyGQytjyRbDZryxMh2BbvCUC8qbpQgF6Izkgq+6arBdK4pPDSQtowBvQaChxdHTAWi0m/nPkh9BbR28lNi+3TS1UqlaRfvO9WckNuNqI6MzODCxcu2AxjPbb79+/HSy+9hLe97W03dR8PHjx48HDrsZWozUbQ12m54NQVtNHr9/ulImgoFEI2m0Wv10OtVpPoJQ0NLTtp7NHwDIVCcswZ/84cNuak6iiO818wGLSxfaiH8Ng3RpfuuecevPrqqxJJpQObBqOuQ6HlMp+DVffpeO50OlJ1l89JY5mGNp+ZOgod66x0yvuwYCT/zkDAysoKer0eisWiKO3AgJ7J1KJIJCKOdu2wYHEcbdSOjY25KtfbibJr6PfAsdMFmbQuNjMzI5V7nX3QRqubsUQ4/8Y5MyxaOqyfbvq3E2xTrwdnH9zusZVx1EbQsHvrc3Gd6PV62Ldvn6wjHWFtNBry3hlhfPHFF2FZFg4fPgzLsrCysoKpqSn0+4M6Kcyt5jzURy26Ff7SP3XEFVgzrlmLxQ28nkaljvBrw5rPo8eM/9LptLA8g8EgRkZGbEYqx5H3e+aZZ/De9753U/1Zz0GnocrPnHNiO7itDVQ3b4zGdqNjw7yRW+nH/v378fOf/1y8cK1WC9PT01JBNhgMIhQKSQRU52PQ+OKLpEDRORwUNq1Wy1bGndeNjY1hdXXVVnHPsiyhu9AoJK/duQmRnkMKsBZSmgeviypxfLkgO52OFE9glJjGps/nQ7lcRjabtVF1uCFYloV8Pm+jGlG4au+mzuXk9fw/72NZFhKJhCxqvSm5eXC2CwoMYEAtnpycxPz8vPxd33NqagqZTAY/+9nPdnQvDx48ePBw6+BUFreTS+cGraxvFOHkvfv9vlTrjMViCIfDtkJA/J15YtqBzQgoKav1eh3VatXGytL35bPSoUw9hFFBYM1AB+zntXc6HcTjcRSLRZw9exaBQEAq2jcajXUpO7pmhTamaSzQKc7nYHRUK68sMkN9wjm+pPJSn+C48plMc3C+K3UJ3l9Hqkj9Zds6qkvnP6Ouekzvv/9+G9PuZp3dOsJGcCz0Z3zGo0eP4uzZs66GHJ/TeY2O6Dk/5/22ErGkk2GzteKMFm4WaWX/aJg776vXKtt2G7dh/Xa7X7vdRigUkgAJ22WhJBYwTSQSWFpaAgBZk61WCz6fD6FQCN1uV+jj7M/IyAg6nQ4ymQxCoRCuXbsGv9+PpaUl5HI5GIYhFXIZ6OAzHTp0CPF4HFeuXJGgE9kMU1NTaLfbWFxclPHVLAe9j5HpEAwG1xVM1c6MVColVbB9Ph+y2SyuXbuG6enpdWNuGAaeffZZvOc979nUIcM5xTXvdkbrTnHbG6jDvD3cILfjCdWbjK4k6/wOwXbJV6/VahgdHUUymcT09LR44eLxuLTZbDbFS6g3N6cHwbIsW4U5RhZZiY+GJyeWXqgUAIyackJx8bNNvXlo74g2YGkcsv9OocP+az6+c8PU3kBt3HKj1NfSANaLXFOO9TlpvIfm7fN7Bw8edN24nRvyTqDnlWUNzoZbWFiQ9um14piT4k1vswcPHjx4uD0wzLm92/dgu4xUttttrKysIJVKYWlpCel0GvF43JZjGQgEcPbsWTz44IMAIJU92Wdn3qhWBOkIBgY6Co+VoCHKPjnlMvUT3o8GmtYbKN8o01gQhrpBo9FYRy/VkRhn4UJGS7Xy7JZ21Gg0bOwp0pM1C03fi453HWHVuY2GMcito7FOnafb7Ur+K5+F4PPHYrGh+ac7dYAPixq6tefz+TA9PY3Z2VlbP7RxNyxqSr3L2f5W5r2msG8GfY/NIrNsWzsPNIYZ4jpQ5YaNbABnlFn3gVF8FtaKxWKYnZ0FMDBQA4EAGo0GYrGYjZUYiURsVPRwOCxMQF20C4DN+UE2APvL+cfPqOPyaKlIJIL5+XmxDfx+P3K5nKwtHZF9/fXXcfz4cSSTSZTLZdt4MjpMW6JWq0kRMRrVU1NTtnHm/vSjH/0Ijz766IYFj7QjyJkaqO2SnVB9b2sDVecgap40sEab2e6Ccy7mjcLq2vPnzBfhpkmD1E0I0vjSuSI0aEkN0IJDV/7SdJNqtSoeoO3miVDQOPNEtFDQ15CS7GaUai+oFqDae1koFEQwmKa5aX4IhS3LbWtvEdvm/7WH7Vblhrjlhfh8Ptxxxx04f/68zTjVY3PvvffaoqwePHjw4OH2wE7lgtNA1NDySCu8ALC8vIxkMonFxUWhCDL6op3GrVYLIyMjtnvQyAJgq+tA2ipZWvweabI00rSM1s9MeatlF7DmmGZxJCrd+XwezWbTdtQb29E/AUjunps+xc910UPe1xl9o/Odzn4dzdNt0cGv+6B1EX0+OcdPR02ZisTqvYVCQQwNwzBw4sSJDY20m3GAD4v0uYFH2LlFXZ1tDZvj26Ek67zfnQR+NgP1zq2wGHR03w2b5bBST9aBFh04iUajaDQaWF1dlb6RnZBMJsWhok+tCIVCKJfLNjYAdflSqYRgMIhqtSoMx9HRUZimiUKhILaA3+/HyMiILUDE9nq9HkZGRmAYg2q91N1jsRhM05R1ptkGvV4Pd9xxB27cuIHx8XFEIhEJcPE7zndAB83o6Ch6vR7m5uawd+9e25jSqfTcc8/hXe96l60oLcfUGXjTzhPq+5s5LTbCbWOg8jgUPpyb98eJrRqbOtHebXOl8cXB7na7kk9JoWBZluSL0JjkwuQEDYfDQotgPiqTmLUAIq1Vg33n0S7tdltyKLhpptNpyaOwLEsm640bN2CaplBhCH3cizY6ObahUEgqAuqy7jrhvlwuiyCkUUzDNRwOC3WJ7WgDLhwOS8n7YrEo7VmWJdWNabw3Gg2Ew2E0m02hOrEtVu9zCmVCL6qdGqfDnBhEMBjE3XffLedFaUHpFCAePHjw4OH2wU7kgpMpo6EdvUwJsSwLhUJB9IYLFy7gxIkTItMMY1ArIh6P24zQdDotyq+ObGgHLWUeK9E69R39fy3LGC3UhmylUkE8HrcVbGJOXaVSwb59+yS/joaKjn4C6/UnwzDWyVAdIXVGweig1kaIjgo6v6v1s430QZ0TqCNM1Nd0/qyOymoHeK/Xw5133ik6xk5yTp2Gk7Niq36+zXDkyBGcO3du0/s5gzjOd7QZhjEWhxmBel1sJaLM97cVXWmrbQ/7m44gO5mMBKOeNA51e1rf1PuANt6Z851KpRAIBLC8vCzOHM597g08HpJzTh8NxbZ0fZdOpyMpceFwGP1+HwcOHIDf78eVK1cwNjYmThfOW0ZoeYQldXLn/I3H41IQjGlyExMTmJubw/79+9elVgLAD3/4Qzz00EOIxWI2NqNb1J/X3mwqBXAbGajOiOhm3hEnNlocnGDDFp5hrJ3DyZLM9HpMTk6KJ6DVaqFQKCCbzSKRSMhL0ofusl2n8HAKGx19IzWHwklX5CIdlqWkucFSyDQaDZTLZeHL641Y53Xwdz22gP1MOHoXKZh0uJ78/Xa7LeOoqwXqSnyMoJK+wDHSiezcTEmvaDabiEaj8l1uKsFg0PbuTNPE1NTUuveuhdlOlBE3b6nbZn3HHXfgwoULNgXDgwcPHjz8eoNyUUcWnc5lggoY5VmhUEAsFhPnaiQSwZEjR8Q4tazBGYh0MutIxMWLF/Hggw+K7Ot0OqjVanI/6hNOI1CnB9EIdSrhdKD7fD7RH1KplBQKWl1dxejoKAxjkA8KDGRusVi0UYjD4fBQWinHA1if4+hMCeL/NVWXjnHdlvM5SB90QhvOWrGm7uKMTPN3fTSLzkU1DAOpVOqmZbtzfNzezVYRDAalAi2fZaMorKZibyXnlNdqvVUbu87rtWPBee9hYABqo+fnc7np6U5sZvzoAAJ1UuZHar2fuj/Hynn8i2Yw8Gxcrp1MJiOsCRqiOgLN56hWq7JGaWdwDBcWFkTfJbuSTAJ+Hg6HUa/XRT/u9Xq4cuUKjh49ahujbDaLq1evyh4Uj8clv9Y5Z9LpNMrlsqQvmqaJ8fFxvPnmm5iampLn4joFgF/84he4++67kc1m160nt3e5G7htDNSNNr2bhVPQ8H7ao7a0tIRkMon5+Xlks1mkUins27fPVmmW1zFa6ew/Xyi9ljqJ2e/3y+QDIN/TxYholNGLQjqvbo8bKu/ZbDZtRl44HJa/aW8Pc1T0OGteva5oxz44NyEKEZZgJyVZCxldYIiV8YC1TU0fu0OPEjcFGtfk3LdaLVQqFRFM3GTuvfde23NoT86w3OKNoGlaGm6beyAQwOTkJBYXF2Wc2QcPHjzGYW+EAAAgAElEQVR48PDrBS27NZwRVG3c8LN8Po9EIoF8Pi9FEin3qZACg0hJPB6Xgj2sW+Hz+cRA1P2hPNJGKOUs8+PYDzqO9TXaKV4ul0W2UncoFotC52UEqNfrIZlMiqymkaOLE2kjhvcH7AEGJ+1vs3F3ym9+rnPbdMqPfldadjsZdU4D1xnV1c500zTl/dx///22tnQEbqfQetFOkMvl5P9uBqN+P9pA2opeolPGOD7DntU5pvx9mLGiHSqbGcs66rZRjRg3g8sN/E65XEY6nRY9kmtKV7ylkQoMaNW64CiLJLXbbXHidDodZLNZcfiw79TF+/0+EokEQqEQVldX4fP5UK1WEY1GEY1GJXLZbDZtqYx79+6FYRi4du0afD4farUaotEoRkZGZK7SKXP+/HnMzMzYbJn9+/dLPirzWcvl8rqxtCwLyWQSlUpF2BuhUAh79uzB9evXMTU1ZVufNJhfffVVPPbYY/I+bzVuGwN1NxR8pyDRi0wbW3qDrVQq6Ha7mJ2dRSwWwz333INKpYJoNLoul7PX64k3lInK9FBGIhE5+JcLnjkOnIS1Ws22cToXGz+nQU2D07kxl8tlSYamcGJ+h26TkVC3cdpoDIeBhjo9Rdz0Od6k5ernB+zV62h8szoa29NRVV6TSqWkxDxL64dCIRGmzvcJbFxFzrmpuXkLNYZ5KXO5HGq1mpzL6ub18+DBgwcPv3pQ+XOmDfFvw9g3zn1dV6SvVCoIh8MoFApYXl7GkSNHbPKQ8heAVL83TRPFYhHxeNwm7yKRiBTqoTLMiKU2VkkptCxL2FLamNB6DftYrVZtkeHV1VUpyhIIBLBv3z4xThg55DEtPNau0+lgdXVVFN14PC66ACv/0/ilfKdhznF0Rjn5udPJrKNRfH7DMETv4nuk/gGsN3x0EMDpyHbW2+B7IPr9vkS5ia1GId2g3+HNYCuRKrcx3gyadrtZP7Vu6WbwuH2X75HRy620v9Ez0FjaqtPAsizMzs5i//79tjlGA5U6Km0BAOKoYKCEhqQ2QgnmLVerVQkqRaNROcrFMAzJO+V+wmOlSK1nBV7NmqRuyXS+w4cPA4DtyMN+v4833ngDx44ds+0Bd911F1544QW87W1vg2EM8mn1MZQaLNrGVMRgMIjx8XEZO70eOW7MSXWbB7uN28ZA3SmcBqmbccrfdcJupVKRjdrn8+G+++6T419YvS2fzyOdTksblmWJsckcDmAggFqtls0j4/Qu6N9p7NKAdeZTVioVBAIBEQ68xu/3o1qtol6vY2JiAqVSSZ5BV/NjX90mjzZ29WLVRjOv12PqTGynwNPGqd7wnYagpgU726JBru/d7/exurpq65dhGDh58qSNlrydjd/t2Zwe3K20Z1kW9u/fj3PnztkMbw8ePHjwcHtBM3Z0XuEwJZcGiWVZUgeCKT+kz1FxPXjwoC0lRRuUvI7y2TAMObuc4OkA8XhcZLhbjQq2zQgKjUqtR1Duaopnu92WY1gA2KjH+j6GYaBYLKLZbCKdTiOdTmN1dRXT09O4du0acrkcrl+/Loo15R4VeeomjNzxXEaer0p5r4vQAJDijTz6wrIsYWhRx6hWq0gkEpIzB9hrlLRaLSnGaBiDojKhUAiJRAKFQgH1eh3JZFLGRzvO+ZMU6fe+9702thrHeLuKOHVAHsXnli60U+j5qY0uDT2/h/VP63VbibbynQ+LsLp97kwH2w3oKrEbgXPzIx/5CK5fv27Td3ke8OLiIixrUE9ldnbW9nztdhujo6NyXGQqlRKde2RkBADEDrh+/TpMc5DrnEgk5KgozgHtKGKxtCtXrsiYaMNQp+yRacH9i5RjvvsPfvCDME0TZ86cse1zp06dwuuvv46xsTE5eYT1ZJxOIbIxmKbAo25YSEnPD177gx/8ALFYDA8//PCuvlsnbhsDVYf2t/p9bSAB7oniACSSyDxR0zRx9epV4XAzoscQOr2Co6Oj4hnhy+fE0/mdFBb6BdKDyoIH7BsFFb2l0WhUhA6rgwWDQfHwLCwsyCbC84qy2axEWAHI2ama9sOJ6PRkcpy3Ep7nM2tPJP/PzZtCUL8/p4dSj4sbNF2H99W0B46/z+fD3r17pb3tUm+cGzbvsxOqgmVZuPPOO/Haa69t+f4ePHjw4OGXi7/+67/GT37yE3zzm9+00fyGKdNk7FCWFgoFRCIRrK6uYmpqyka3dYvKujGjKId1pXoAkvrDzygzteGoHbpsi4ouv8vzSgnW0+DZnslkEiMjI2g0GhI50jLdsgaVbXneKfuZTqcxOzsrUSH2kY5x7dil7sZn1Y53MqpowOqxdzq+dSSQupibceQ8ZzUYDEpEivl8S0tLsCzL9uzUHavVqvSZR9llMhlbPQ32b7sKOI1e9v1morBOOAMMbhhmnOp6LFqvc9ODnMavNojdoN+b83vDWGpuQYKNoNfdsO/reiXFYhGVSkUYDLxXPB5HoVDA6uqqPBdp5BwjnXIGQN4lYD8nlW1z3pimiXQ6jVAoJAxNBr44j8kM5Lvo9XrIZDKwLAtXr16Fz+eTQqJkOayurtrOQT1w4IA864kTJ3D69Gmb0X7PPfdgcXER9XodsVgMsVjMdsSUM6AXi8WEhqwdQRxr57qt1+v40Y9+hHe+8523LJJ62xiow0oSuxkVnFBOL5GOGlKIsE16BPx+P15//XWcOHFCvqM3Td02AFulLL4kTmZu7ozCBgIB4XJzstIo5YvWRhXvy+JL/JwCIJ/Py+YWj8dRr9eF+sNiRfTWcNLr/AM9mTQFwG1DcEYV9Vg7r9V5ChRs+vva40jjm9/n5krhpTdG3QeOkxZMo6Ojtu/tdFHs1mKyLEsWvUfx9eDBg4fbD41GA29/+9sRjUbx9NNPi+x0Ok61Ikqn9urqKiKRCK5fvy4pLLxeG6CAPSKn5az+TrlcRiwWA2BPMWE0k3oBc1lZd0EznbRR1ul0JNUHWIv0NhoNcRyn02nkcjkpmOJmVFuWJQ58Gr46Rchp1GgnMp3STjjvo9Nh2A7TpnS72qmtiyw5jWHdB+pW1CcYAWV0VRseNHy1keP3+22nHujxvBnDUkf7tmqEucGpXzj1ODejww3D9GU3J71TP9yqjuP23Y2eezvjovVtt+fkXOB46xxQXUuGBhj1eK5nTSXX9gWLG5E9wIDU1NTUugKhZFgCEAOYfeO1/JzPo6tfkyrPdUhG5/Lysm3OHj9+3DYue/bssdVG6ff7GBsbw3/913/hvvvuE9tBr3n9DoLBoER3WbS10+mgXq8jFAqJnaHXcK/Xw+zsrO0c1d3EbaNVP/XUUyIA9GKhsQWsr45LaOueL10XEqhUKvD5fDh9+jS63S7uv/9+meR64+emGAqF0Gw2sby8bBM+jOLVajXEYjFkMhnkcjlEo1HxzNbrdRSLRZTLZZTLZdTrdVsxJC28Wq0Wbty4IcYpvTbXrl2TMtCjo6M4cuSIUAlYCXhubk7ORqWRSlry6uqqGIX1eh21Wk02dP6dY9vpdMSryAnJhUZKFD1JfDdOOpPznVGw8XNGkYPBoBxAHIvFkEgkkEqlkM1mkU6n5WcqlcLIyAiy2SxGRkYQCoUQj8dx7733bkhb2Qx6jjg90zvF4cOHNxUKHjx48ODhV4Onn34azWYTd911F77yla/giSeewPT0NIC1HEHTNHHPPffg0KFDiEQiqFQqUiH//PnzmJ6ext69e8U4pb6g6yhQ3mmHK5W5Xq+Her2ObDYrugLrU/h8PpTLZUQiEYlUUs7qQoZsh7RB5oVSJubzeVy9elWqhoZCIRw4cAChUEhy0LTs1qk4CwsLuHbtGrLZLAKBAFZWVuQ7rP/AKIuO3hmGIalRupo/I4iMLOlnYV0NfX/NiqKyzja1bqblLHUojhWd9BwrGhO6wAzfNyPLWqnWxZGc8nwzFpgGdSdep/XXnULrPRsZAhv10Rnh3I7usxUdh/qVk1mwETYaVz1efFeavr5RP4CBLh6JRJDNZtfdg9FQ5z1oxHLOku7NaKhmBxiGgTfeeENowpVKBYlEAoZhCDOS1bg53uPj41K4jI4dBqDYl0gkYmNa8ihGrg32S7MjgUFlXjp7NB5++GG8/PLLtlo1bk4By7IQjUZF72fQLRqN4sqVK7Z2NePj/Pnz+N73vndL0txumwjqCy+8gC9+8YswjAGl87vf/S6+9a1v2TYnvcAIbbQyN8SyLJRKJfj9fjQaDaTTaXQ6HTzwwAPyct1eDhdYt9uVQgXlclm8ISwxnUwmJe9Ue0H5k/kMbJdRXG6ourhBvz84Q5RGKgDs27fPdkapLhXd7XZRr9elQtjKygoOHTqEc+fOYXx8XIzJWq0mVCDDMORcM3psOIatVktyJOgF6vV64rUhxUYbnBSQLEPP6ynIgLVzV7vdruSXMME8Go1ifHwc165dExoRn5OCi+emAgP6zeOPP+4aYd0O9PX6583irrvuwoULF3alLQ8ePHjwsHu4cOECrl69ipmZGXQ6HaTTaXziE59AOBzGv//7v2N1dRUrKytIpVJSjXd2dhbXr1/HO97xDqH1ttttW3EgylNtsOooH7Cmq/R6PdvZ3zQOyEjShZN4vZZPdHDrNCV+p1ariTOdqUnRaBTNZhP1el3a0Qokld2VlRVRgKlIdzodFAoFJJNJlEolZLNZOeORzuZarSYynnqDjlqyCipPDqBjnX3R0SiOU6vVQiQSEXYVC9Mwn1XTsumA10wxTSmm0ZxOp0XhZuEZjjvHme8il8tJGwwGuLHNNoNu41ZAB1SArRcYZVRcp09tdq0ukOMWsXX2CxgedXYbR62/D2P1ubW5UVucz36/H7VaDaFQCIcOHcLly5fFPuAYkqHAtcHxYdCG90yn06jVaqLPUoc2TROLi4tSuJORR87PTqeDZrMp0floNCqOGq3HUnefm5sDAMRiMTSbTczMzMDv96NcLot+bFkWTpw44fq+Dh06hDNnzth0XcMwcOrUKbE7YrEYKpXK0Ch3JBJBrVYT+jIwCMRcuXIFBw8etFH79Xt/5pln8Pjjj+9qJPW2MVADgQC+8Y1v4H3vex8OHDiAxx57DO94xzvw9a9/Ha+99prQZp00Gi46RktJyeEht/R6kpZKj4XefAB7ngE3WhpfwFrSu883OA9V03GYa0oPpz7gmteSiqON7WKxKBXzDMMQKk6xWLQtRk4AenVYBID94ySisOOGQk49sFaciJu67gfvz8rEuuQ2n40UGn2OFL0sFE7aiAYgVcroSWW1Mt6/Uqng2LFjaLVasvgASG4tPTrO/F5gZzRd5wan6SA7gR7HO+64Y0dtePDgwYOHWwcdlWy328hkMlLQ8NixY/inf/onxGIxqZJLGfbOd74TwWAQ9XrdRn8jLZVySTupNWWS7ZCy2+8Pzj3UBRapRFO28nv6uJpSqSSym3J5ZWVFWFamaYrxTec7zx8H1gwmwzBQrVaxsrIibVEO07GdTqcRj8cRi8WESUYDdWRkBOVyWc5Y5HPr/FcazKzToSOovV5PFHU6t0OhkPRF56nqUxO0/qOd6aQeMv+U9GTqMaVSSQx16kj9fh+lUkn0FU39dJ7Fyn5sVz/Q9NDdyD11prJt1iYNJCd0HjOwuWHrfP7NDGLNJnAbs2HGvjZstwo344rFwnq9HprNJhYWFjAxMQHDMPDaa6/JOaKsuqsLi5ZKJQCw6boMTPF3yxoUSU0mk+JkymQyKBQKtkJp8XgcvV5P1men00EsFpNgFKOqDIRZ1hplvdVqoVar2dgU3W5X9gDq68ePH1/3jjnvjh8/jnPnztkcOjSgudZ4DqrzemAtdY3MS15/8OBBXLp0CUePHrXlZ2tb55lnnsH73//+XWMU3jYU31deeQXPPvssms0mXn31VTz99NMIhUL4+Mc/jqeeegq/9Vu/BcBOyQkEAkLJMU1zQ0oOsOYt44tjVJALh5sSqSLkYgMb03G4kOnx4++cDJqK0+/3bVQc0zQxNjZmo+Kwj4D9CJZWq7UjGg4FM8910uen0ZPGok+k4PDeBCecpuBo+o0W0hzLrdBvKCB0Xg/pN4ROTtd94f+3shg0xWdYNH670BQlDx48ePBw++G3f/u3USqVxDCiE7lQKEh+IivrMtK3Z88e+P1+MYLomOYxKlQWnTmajBYCazRAytdGo4HJyUmpDExQGYzFYggEAkLDK5fLKBQKNh0gn89jdnZWIkN79+7F/v370Wg0RK9xyiWfz4disYjLly9jeXkZwCDCWCgURLlOJpOIx+NYWVkRyiGNUTr4acjRKKQ8zeVyUmUXWMtnZbGlVqsl46vTlDRri++FRg6NW124kjoVDRGy7fi87XbbZihQv3Mad+wjdQ+fzycFM53Yin6gI0q6jsrN5q8Sbmy/jUAj34nNdCUnM1HTkze7Rn9vo2uclFr+3ChCPSxiq6OvzLnmuw4Gg8hkMrZKujrHulariVHa7XalujSdASy0xXnJ6CnrvHC+ca2zUjTtCbIiaXz6fD4kk0m0Wi0xDPmedBBG6+W5XE4CYfwb9x/nyR/OVMS77rrLFvUGIExIRnZpsGvHgx7nWCxmYyr4fD4cOnQIFy9etK0trj++i29/+9uuLNWd4LYxUH0+HyYnJ/HlL38Zf/RHf4QLFy7g3/7t34Rm+uijj+Kv/uqv8I53vAO/93u/h2w2i1wuh3a7jVAohFKphOeeew7j4+O4++67ZQIFg8F1B0jr/FPtNXDScXTSPLAWoUylUgDcE87pBWw2myiXy7ZoaK1Ww7Vr14R+Oz4+joMHD4pRRmjjlF7Pubk5LCwsrKPh0PvD3BbScDKZjOR6ALBFPmns0tNIweasTEyDHLAnn+sCR3ph6Kj0ZvQbHuFDqpDm1+vNHhhQaPU70O3qd7gRnInsu0Xv1V5DDx48ePBwe6HRaIh80UV0isWiyHkAkrsJDPb1RqMhjl/SahmJ0LRHfl/LFK1nAJC0GcpZgpRCXYyxWq2iWq2KzOx2u1hcXMTs7CyazSaCwSCmp6cxOTkpBhsVW+0gBoDr16/j4sWLKJVKomfk83lxhCeTSYmUUlEHBvJ8ZWVFjPRUKiUGIaPM1HXoIGe9CBquOlqqHdWMHmldgQ5qKuqavcb+OGmYWvlnBJifMTVJH8vBdvRJB9RrNANKG1hbketU4m8Vpdetwq5zrmlnyTCdyO1Z3Jh6WrfaDNoZwPm3EbQxpXW9ze7lzG+kLkca+/z8vOizfN+hUEiCV6Ojo6IfRyIR9Pt93LhxA8BaxFkzA/U6YjSU655tA5B11W63hVVA3Zlzj6mG/B7p72yfObI8xkkfmQQAy8vLUnHaMAy8973vtY2DHks9zw8fPmxj+TGnlJ+ZpinnrLrBsiwkEglbKoPf78fU1BQuX7687l1rxugzzzxjs2l2ituG4vv6668jHA7jq1/9Kr75zW9icXER99xzj2xGjHwdO3YMS0tLiEajQnfli9mIkqMruOnFxN81HYcvhBFONzpOKBSS77EgU61Wk5fCttvttpyRZJomxsfHbVV+dY6INuIWFhZQq9XEuK7X60Lb2S4Np9fryeHCXBz0ENG7RMGrjfJOp2OjRlPYaCozFyCdAbogAv++Ef1G38M0TSwvL4uw5yZw6NChdXSG7VJvtCKhN8adgnNoK15GDx48ePDwq8HBgwextLQEwB4loLxkURIaXsxBS6fTknvKeg5UEilHtByiDqHPBaSySqVzfn4ePp9PIo40zqjLaIW/3W5LVc5+v49EIoGRkRExwJx6g06xKZfLkubEaCwN42AwKIWgmE7D61kokWlKwWAQtVoN6XQaxWIR0WgUjUZD6n3QuczrE4mERIR1ni51Ao4t9QiOLQ1ORmedBp82aqiP6EI3TB+i/qHzX3UaEvUz7TxIJBLSjlMv2I5sdzMkbwV0Gpcbg82NAkx9xdlH/byaXbhdHUkfsbIRtFGjjaeNrqO+yf6y9kur1UKxWMT8/DzGxsYQi8Vkjvd6PTkdg/r5zMwMLl++bHME0ZHEn7xO35NtUmfmmo1Go0LjpZ1hmqYYk/1+XyKrvJ7rk3YDALFhGo2GnH2qHWHhcBj5fF4cNk7HjR4//c7C4TCOHj2K119/fR3D8cc//jEeeeQRyafVRdS0M4lrkmuM73hychIXL17EHXfc4RqZN00TP/7xj/HQQw8hlUrtmElw22jWrBb7gx/8AB/96EeRTCalHDu9GLVaDeFwGMvLy7aNhx43Gls0BrXngO3oTUjTYPld/k6Kr6bjGMaAPsuS86xKa5omms2mCDh+t9FoYHFxUYzZ0dFRJBIJm7eG/SJMc5B0zSRtYHDYNQsmJRIJESrMQ9Ubkj4ChpOJUV/dN83XZz4I7++cTNoD6YyQ0uAktGAmPZrvh/ekQ4AC3w18h8lkcsfVwfRiHUYR3il2KwLrwYMHDx5uHZLJpBiIWjYyj4xOaJ0zSQc45SsAicBqJ7d2cAPro0J0ZuvoCfUafpfOW02Vq1arcgY6MMhrm5iYEBqrlsNsCxjUfVhYWEA+nxe9gCcJABAaMZVz6lY0+nSEN5lMyvMx8kLDlU5pGsdOWnE8HrcZT9RFqG/wXdABwCik1jMIbTDR2B3G3tJjw/vqysr6yD8aehMTE7b5sl39wBk93204dTIaOHp8OZba+PT5fKKD0QDj7xpal9b33Ap4D+2UGfY95//Z942MevaX74pBmuXlZbRaLbzyyiu49957MT09LQYdAKHKM/Wv3W7j7Nmz4oBxGlXshzbE6DTSzEKuddoVbnqxM9LP/UUb2fw7ny8YDNpqv9CoBSA5q0zR033WATQ3mKZps6MYRT127BiazaYckch+uLVFxxXvHwqFEAgEkMlksLCwMPTeAPD888/bcmG3i9smgvrpT38aTz75JH7605/iRz/6Ef7gD/4AyWTSNnFKpRKCwaDkUUSjURSLRXS7XYTDYVsOJb2MzJvQ1E5OZB1N09SPRqMhB9tqOg4w2IgnJydtHtVKpWITSktLS6jVatLv8fFxieyyOINTuJimKbmq2jBl5S16Ohj9nJycBDDwAudyOTQaDcTjcTFsueHTS2OaJkZGRlAoFGxeNHqjnIuOxqT2UGrjnp7PcDhsOxuKf9cCT+fi0PtE7yujpACERsX+WpaFkydPum58W9lAKRCdnr3diHhqr6UHDx52jg9/+MMA1lfZ5u9OeiDXnjYQ3PYIve43im4M2w82Evxu0Ra3+5IRAgz2N0aeXnjhBVGUy+UyTp06hZdfflkYNktLSxLZyWQy6PV6WFpaQiqVwtLSkuRIlstlZDIZzM3NIZfLSb5kuVxGs9nE4cOHpV8vv/wyTp06hUqlIg5GjtvKyorkSLkpsbFYDA888IDtGTl23GO3sx9+5Stf2fJ3bxbNZhPpdNqmiPv9fuTzedEDKFv5PGT70LihzkC5r4+B0FRTfY4iKcLAGs14dHRUZCswkL+JREJ0hUqlgtXVVZnnmUwGmUxGiqc45SvfX61Ww/Xr10WHMU1TjphjTimjl9qgptLMuUDmGOcRWVXsr9aRdASX/adRzzSjXq8n9TcCgYBcp1OHnEYs81X5nKztQSOZkVi9tvldvc61g557Rb1el+I3wWAQ6XQaR44ckb/zuu2A39/N6Kl2gDihDTz907lv8v/O7zvBubYVvUgbWtpxs5mhxO/o6LhbgMZ5L44BdXsWGQuHwxgfH8cHPvABmTssksp+BAIBLC4uCvW8VqshmUwKM5IBFo4bo/ecq8yhphOL6YTAGjNC7xd0NlHvZUSf7dOWoF1CxkGn00GtVhM9n+uQ0ViyH4PBIHK53NCxdYNlWTh48CBOnz5t2+/Hx8elVk+73ZbKvTr45DTgyaIwTRPhcFh+X1hYkHQDJwzDwHe+8x3cfffd2Lt379D5MQy3jYH61FNP4Ytf/CI+9alPIRaLIRqNSvIwvXb5fB5TU1Pi8dMvM5/PY2JiQjx73Fi5YTIszs1QR1iBtcVNY0t7WbUXh38nTYVtkqdOTnokEkE8Hkc6nUa9Xhfhp+/J627cuCFUXxqq2nNqWYOiA7oaLvtVrVYxOTmJUqmETCYjZz/RsKX3ltSWdDotRjWfk8KXPykAtAFP6pLOP+Ui07kLTiGlNxluTlSCuKlRSSiXyyI8qQik02nbIteb8GaCxOmx2k0MEx4ePHjYGZzsDH4GDC/goZkohNvadFOe3Navc59x+/tm+4lTUdQROjr/AoGA5ByZpolz586h1WoJpZMKVKfTwcrKijxrLBZDo9HA9evXbYV5mLNIKmgwGESpVMLy8jLGxsbE2VcqlZBKpVAul21G/sjICFZXVxGPx20ORaJWq+HNN9/EgQMH5Nkoz273fbDVaiGbzUrUjO+DeaaBQAATExO4fPmyrTYFK3qS8sr8Sx2R41jRCALWoonUTxqNhhQkoTLKVJpGo4H5+XkAa9GoyclJhMNhuZem4QJrso+VSqn4ston11AymZRjW3RlfB19owFH53av10OhUMCePXvQ6/Vw8eJFHDp0SCKuhjFIveGJBDQaaWSTzkxWlmmactRLpVIR45OGAI+W0UYB82w5tvyd/Sd9l/1nlV46DRj1brfbSKfTYhDl83lJe8pms+j1enjooYduKsKzWylDhN4Dh7Wp6c38HVhP99zsPlu5lxM6Yqvb2gxOg3uzKLV2oPT7fXHYNZtN7N+/X3RRtsf5RnuB7zSTySCRSACAOIdodGpKrw5UkdrOOcpnbrfbki/KYxtN00StVpP5xGMbmf9MZgSDPfw/mZksJMrqvjyHlPp1LBaTIxcNw8C73vUuWe/bed/33HMPXn/9dds7vPPOO/Hiiy9ifHwcY2NjUhhN20SaMm5Zg/RCHtfk9/uRSCRQrVZx6dIlHDp0aN084fVnzpyxOZ62itvGQO33+/jsZz+Lw4cP4yMf+QgymQwA+3lXc3NzmJ6exurqqlSdo8u8Wn4AACAASURBVJFGGi03K1aG1YKGUVJNA9ELhuFrYFA6vd/vI5VK2YxWloymoVar1bC4uCjVfPft2yfPQ08jsFaIiFTl1dVVG7WmUqnIBptIJIT2oqsL8vpAIIB8Po/R0VGMjIwIBScWiwmFiIKV19PIpEDKZrNot9vCPadXk2NJLw+fvd1uS3Eo58bIdjmJ9TltAMRrqXN8dFEIeja1N9cwDDz66KM245f33qq3Xp9HtZtG6u2ukHnw8OsKrci4KTE6qjpMyaFza9jfN1q/Wi64eaY320s01Y2GC/vK48+oNGshzroBVD6oEOnvBQIBLCwsiIJExXtpaQm9Xg/ValXkYa83qJR65coVVKtVKcbHFJNEIiFOUbbDI85orKyurmJ8fFy+w7ZOnDjxa7UH5vN5USDpbO73+4hEIkLbZeVc5n6xfgWjg87on87H0lF9zdqyrEGeG6OxWhZRP0ilUjh37hwymYycf66NO218UNlj0USdB0vDNBaLyf1olGpaoL4mHA5L5IQObOpIdKYAEFkdj8flCJlQKIRWq4VwOCyOe224U96zD91uV1hpjKgyMgUMoko0HEgD1hV69VhQT9N0X96DPwOBgKw7vrNKpSJH/ITDYXHGANt3Ym9UtOhmsBXd5ktf+hIsy8JnP/tZG/XZ2Q89L90Myu3oRWxD04gZxHC24WxXO8KANd3Y7d462kiWg8/nw+nTp/Ebv/EbmJmZkT0xGo3KfksmINeeDjhRD2aqm869Jo2XjkPNCtC6JuchDUyyABlk4rhwzwcGNkQul5PruI6o97PvdPJw7BKJBPr9wXFIlAGGYUhhUz12W31//X4fx44dw5kzZ2zv/9SpU3jzzTdlPBKJhC14xZQHvc/xfGTmsvt8g2JrFy9elPfj5li+dOnSlvqqcdtImU9/+tNoNptYXFzEl770JSmgw82GngpnXiMHkB6LaDQqL5NC3ckJp5EE2D01erInk0lkMhmbB4XnjwLAtWvXcPHiRSwuLiIajWLfvn0YHx+XTZN95ISkgTg/P48bN27IJF9dXUWxWESv10MmkxEhoL16ekGbpikGJEEhqo9u0QudeRcUCIlEQp4ll8shl8tJQQFy9und1IWROEm1N9aZu0rFjga7s9CSzlnRiiQrKHKBG4Yhi9sZPdkK9Pd2g3qjx9+DBw+7B6fypKNRbtBr0O17WmFzfu6Mbg7rz1ajsISWSxrc0+gYzGQyIk9YYZ50SnqladTSy88zvrVxTjqp9lKzDyy2wedYWFgQJQxYU+Tpxdd7Nx2XoVAIIyMjWFxclP77fD6srq7i+eef3zQCcjuhXC6LIQqs5ZLSYaBlNseTBhNrTTCCSCeCVhAp46mHAIN5UCgUxMiiEtvtdpHP54UlVa/XceTIEczMzIguQ1B/4HwpFAo4c+YM5ufnxSgkaysejyOTych56852KL+BQWoUa1IwEgRAqHvtdluiNqlUSthTNO5YCZUpULqKLYMEhULBZgRoeZxMJmXu0unP56HuwUgqDWcq91ph57XUS/Q4A5BUJ+o0XDdkayUSiS07u53Q+tluOr832veI06dPI5/P4wtf+AK++tWvYnx8XMaee5STQqtp3W5O/o2ewa3GCT93g7MtOggI3RfnNRxXFvXas2cPpqen8aEPfUiOdaQR2mg0UKlUUK1WZb1SZwUg54xevHhx3T5648YN2R9ZpIyOIZ0ipx0A/N3JRuAeoI9CIiVY6+KkAWvD1LIsoe7zuajbh0Ih1Ot1idS++93v3tCo3woOvMWA0ddMT0/jypUrAAb2AgNRmsqt506/37cdjUOZNT09jcuXL+/qerhtIqhPPvkknnzySfz5n/854vE4VldXxZPACRIOh1GtVmWicqH5/X7Mzc3h7rvvtgkKUk64qepIqF4wpKPSM629BYwoMnxPOg5fCmnF2uMIwDapWbiAxhzzSfhySV1gNBOw53VQgSAVhkYnje/Lly/baDgApHKuTpLneDIayoVpGGtnp+ocJL3ZO4Usx4vRUhah0BEAAKLs6ONruAGQbkNDl0IrlUph7969N3WW0maRmO1gNxecBw8e3KHzSYetOSdFzM1hNOx6/fmw77gpT8RGxdp06oOWLfV6XZT+SCSC8fFxKfI3MjIihqmGvr+TrcL9UivlTjijKt1uF5cuXcLBgwclwsZ+suIqsKaIsjplOBzGxMQE5ubmRM7Rcfnss8/ajjtwgrKLxg3v56zG/ssAHdI0RPv9wdnksVgMly9flu8YxiA1h4oiabnOs1D5fRqUfCc6srS6uipVgCnflpaWxGBipJRVPnUeJfvLdq9fv45KpWLLZaZyGI1GEYlEpLYE5Z52YFOeJ5NJWySXz0GHt66lwahzJBLBysoKUqmUyHznuJLGqPUPrf9oAycej6NSqSASiUj1ZOpCAKTvdN5wDvPZnDnS1JP0cTKcZ3QO9PuDFDBSrn0+H/7wD/8QDz/8ML72ta9JNHY72Kwo0FbAaDHf2VaN5a997WvI5/N46qmncOnSJfzpn/4pAoEA3njjDfzP//k/bfm9Ghu1v5nDjnNxO1FX3YfNaNDc05aXlxGNRnH27Fm8/e1vF2YI29Dn44ZCIdlbub9EIhEsLy9LkCMQCCCdTiOVSsHv90va2dLSkhhbup/cp2k4cv5wPbXbbXQ6HRv7gueSkl3AtURnFtcF22GQq16vi11SLpelei/HeXV1VfYOn8+HkZGRoZHwrYIpg9qwBoCTJ0/i+eefx6lTp8Q+YCRVyx6dw68dXBzr/fv34/Llyzh8+PCuODE3DQcZhvEPhmEsGYZxWn2WNQzju4ZhXHjrZ+atzw3DMP5vwzAuGobxqmEYb9tqR/x+Pz796U/jL//yL/GRj3wE+XweAGyLgpOCeRXMH+n1epiamhLjSRukHFR9UDSVIHpMuPFWq1XhjzcaDaH0aDoOixgdPnwY09PTomDoDV8bdQsLC5ibmxN+fKlUEiM7l8sJ/5z5Q7yX3tRjsZg8M8eDNCMKNk3DoTeGC5gVkbWBHgqF1lX2jcVicp4qacNsn5QhbgasAOzcvOg1AiDf4f81jYIbvHPcKfB2OsE1zYjPdbPgxu4Zqh487D64ZvXe6fTyu3n83bzwmgrovIf+/kZFJdzgzIl1679z7wEgKRihUEj2u3q9jna7jdHRURQKBWSzWSSTSRw+fBgHDx7E6Oio9DeXy+HIkSO45557kM1m4fP5kMlk8La3vQ3T09OIRCKyn09NTUkuICOe/FsoFJJoqB5bwzCkGKF+VqaZ+P1+7NmzR67VcumZZ55xHSsAIhu0Ek6jy60I061EMpmUfnCOLC8vIxaLidFI2ct+UrlkThkjfMCaIc/2tNHEfDbWegCAQqGAxcVFWJaFVCqFyclJKVpEGawVfzoBLl26hLNnz0pqTK1Wk9oXNPCoGDOdhY5yjjPlPHUaMpdIc6XxxsIxfA4aq1SUKa+pGzClKhAIyHymfKfcpy4TDAYxMjKCTCYjv5M9xUJF+sx2Kvm8F+eTLlTF9CeuKZ2XR+gCl3SkU5l+8MEHYRgGZmZm1hXCHAZevxvGKZ8JWFP8t8rO+uhHP4q//du/RTKZRKPRQLFYxDe+8Q1MTEzgySefxOc//3kZ440cbluBPhViO0w0t/3a7TumaeLAgQOYn58XdkAsFsOjjz6KcDiMTCaDsbExZDIZhMNhoZQbxoBOTxos/+kcSgZ16AhhRV46ljS7kPOe+drNZtNW8ZnUWh3BpxELrB0hQ725XC5LLrZ2PvHoJ21XOMdGn65Bpy0L5jnHczvzkPPrxIkTNrnKdh555BH87Gc/k3XM6r/aZtKOMzoEdKDLNE1MTU3h3Llz2+6fG7YiKf5fAH8L4Ovqs08BeMayrC8ZhvGpt37/vwD8bwCOvPXvIQD/z1s/N8XnPvc5/MVf/AU+85nP4MMf/rAt+sXBSKVSuHLliljrKysrshBpjDI3QofGNQ2VBizbp0LDSCk3O07ofD6PVCqFUCgE0zRx4sQJiZgC9hLnpmmiUqngypUrQvVptVpShY6bMSk4nNw62ZvPxmIYbIMKByd/u93G3Nwc9u3bh5GREVQqFYRCIckf4jmwPICblDB6balMaaOYfaDC0ul0UCqVxBtZr9dtEUl6lXTUo91ui0eYEVyOEd+l9nLG43FcvXoVyWRSxp4HhO9kcmtq3k42Zq0sOH968ODh1kCvdV38h39zMiH0/7mH65wzJ7hHb7SON7qe93GCUSS3/YpKDfsPANVqFYuLi8hkMpidnRUZRM+8PoqkVCqJ047VIbUsK5fLIovi8TjGxsawtLQkRT72798PADh//jzq9TpGRkYkhYT9+vnPf44HHnhAzrfUxk0qlUKxWEQoFMLevXtx9uxZHD161KagfP/738fjjz9ucwro96Y99Tqa8stELpezRd5JVT506BBmZ2clX5L6AnUJy1rLDyMTSz8fMJDNzrM7qZAuLy+LrE0mk0in06LgUpGlosp/zWYTc3NzNpmsqy5TvutTAtimdvKQ1UQ9xmlgFgqFdc5iGoHaAAcgjLVisYhYLCa5zgAkNUdTG7lW6RinzOdpC9FoVFgEnG/RaBShUEgMY64JvjMy2ADYjAyuaX633+9LdFob29TfgIHR9S//8i/40Ic+hPe85z0IhUK4cOHCUH1D7xvsy24ZqYC9wu1WjNR//ud/lmigYRh49tln8Wd/9mcyN1ZWVvD5z39eHB1f+MIXbAy47egy2jjd6rodpjM5nWMsWnXt2jU0m03ceeedANaow51OB/l8Xow1HutIqiwdSJwHDHLQwKPzyDAMW0STZ5dWq1Vb1V22x/lK1iarYVOn53wjdVzLFjpXmL5HvZ3roNlsIp/PSwqc3+9HoVCQY624/9Npxj3i/e9/v7BP9F620bvk3zQjlNfcfffdeO2112yf9/t9PPbYY/jpT3+Khx56SGoVsKCejvSzHdMcHAVZKBSEZWKaJmZmZuT4LD2XtotNV4NlWc8ByDs+/h8AvvbW/78G4APq869bAzwPIG0YxuRWOvK1r30NsVgMwWAQY2NjrhtCIBDA6uqqGKTchLg5c2JTsGhDkgtM04x4D9KZdDh7eXkZhULBlp/BSe6cIFzAxWIRs7OzUpSJC8Xn8yEajUphCnqAtNeWz2Capo12pT0dwWAQhUJB+shF4vf7RWDpXCgqT/QQ0TvE+2sOOYUbPTXsDxcSxxCA5J3o/mtBqTdBCia2SQ81v6eFkGEMorh//Md/jPHx8R3le95svqmbcesZpx48/PKg172bcarBPZNCcxg2M06BzWlow/JadVRRf67prPxOs9lEqVQSw8Hp4GQOFQ0Mv9+PaDQqlUwNw5BcQDo2E4mElPmnEqb3WgCSHsK8JmAwtg8//DBeeukliYrpyCoAqQxP1tC1a9fkmfjM3/nOd9bJCL4TLVs0VeyXCZ4pSPlI5TgcDtvei883qDZJ2UaFizKTxif/rhlcfI/9/qDiKFlT8Xgce/bssRVWBNYfEVIsFnH+/HkZ306nI+efUwk0jMFRe6zxoIu/8LkikQgSiYQcu0eDkVHTfD6PQqEg+gTnqa5TkUqlkM1mxTBm33VUBRi8Y0ZxtVMplUphdHRUDGQ66OkYmJ+fF/1IG5lU5NlnYK0iMtlb7C+ZXBxDrXxrWjnzYfme+Z7+9V//VajNjzzyyLr1u5EesdlesxG4zjXDwenA2QxPPPEEPve5z+FP/uRP8Ju/+ZuYmZmRop7BYBDT09O4evUqnnvuOaTTaTz11FO4//77AUD0Yjf6L7BGB3ey0Nyed9ieymd0/o1tkMFAZgcAYRJwXljW4ASKTCaDPXv2YGxsDJFIBK1WC8ViEcVi0VarRTv6dMFNFgLj37vdrpwWwcrn1EXpoCEjgGuCc42/93qD6ts0bi1r7ZQNFkDV+j+NYha4Y3u9Xk/o7fpUELIqqaNzz+HYOd/ZMHC+u71nYFDVWMs0fvf+++/HmTNnAAzWH9P/2DftDOM/1i7QRjHH+2b2/Z1ybcYty+IJrYsAxt/6/14As+p7c299tvFprgAuX76M3//938fLL7+MPXv2AFjbAPlSSMkhzZcTrdfrIZfLCeWVi5DJzJreoytScbLxLCLDGBQiYHEDGss6bxNYf4ZpPp/H0tKSCEBu7PoZmHcKwLahc1NlSel6vW6LPOqIMGkqVE6cNBwWFeLmzGsty5LoJ41K9o3jQI4+PVV6bLiAuLDoJaLSQ4Ggj9LR46SjzZyk5NUvLy/LGa+c7A8++CB8Ph++/vWvb8ng1ALtZr2aTtrNbrTpwYOH4XBTgLYqzNyEsBObKX4bKYi6b86/D6PP8bNmsylUTi0v+v2+GJkElW6mbjQaDYyMjKBYLGJiYkLO90smk8KYoZHEYiBUrOgUDQaDOHfuHPr9QcXaqakpmKaJ+fl5HDlyRJT1kydP4o033sCdd95pcxjyZy6XEyfu5OSk7bgZOiifeeYZvO997xO5pt/PVt/DrQKZQ3RSM8pCo40UQEac6vW6UHQZ0WCERTti6dQGBvJsdXVVxi8Wi8k5oNqAAuznVi4sLGBlZUUc7OVyWYoe0UFRq9VQKpXkO7pfnEM0tvn+KUv9fr9EcyjTtPOECnYsFkMqlRJqIzBw3NMwzufzCAQCEi3m3GdVXlbF5fNRjlIP4/F7OqeU/eY7YQXqZDKJfr9vy4mlok+9Rutyelz5nvg5o1k0rNmHUCiEf/iHf8Dv/u7votvt4uTJk3jllVekLZ1v7lz/O53Heq/Qc2C7+Pu//3ubU+6JJ54QnRkY6Lt/93d/h8ceewxvvvkmvv/97+OTn/wkPvjBD+LVV1/F008/bdM99R5H44PtOPurMWzf1Xqe/skgTrPZlGCUjho7GQqsWqvvw+CIYRhC+QXW2JHBYBCTk5NYWlqCYQyOV+R8YDT0xo0bNj0YGOSDagelTm3j93Q9G35GqjkNba4frnvuzbVaTfYGp43C42joyAHW9lW/349HHnlkyEzYHDRynYErAJiYmBDHBvcB2iITExNyvikwYFHwPFa26dSPyQ5lEE9Hvjeq37ARbjoZxLIsyzCMbWvwhmF8EsAn+XsoFMI//uM/4vHHHxe6J4UIjZxEIoG5uTlkMhlks1kAkLPktNeZYXeGxzV/nPQPvhDLGuRALi0tAYB4oJx5pQzvc6B1aWbtVbEsSw4DpvH21jjJQe30YNHzSWHDiChLv9dqNaEocFLp/FYKlGAwiHg8jlAohJWVFTlAl+BxAhRYvV4P0WhUDi/n+NIoJy2YHlWOKycpHQTac1yr1dDpdBCNRm3nSnHcCR09ZsXBVColCqJhGHjiiSfw13/91/jABz6A//iP/1hHH+P1elx0pH1HVAIldPg+9f08ePBwa6DXK/cjp4LjhNOT7Pye3ifcqHNOo3QjpXMzhVT3V6PRaCAejwuDhXumzg1jjiqVdMoM5kFlMhn5LttjJCwYDCKfzyOTySAQCODixYuyt8/MzNgifaFQCOl0Gq+++iosy8K5c+dw1113yZ45MzODF198Effdd584UjVSqRSWl5eRSqUwNTUlslQXsvne976Hd7/73TfNYtltOB2lrF7s8/nE2OHZmvF4XIoksbosDT++Y32MDGUYZXcymUQikZBoKr/Hn3yXhUIB165dk7lFaqZmg+ljJYA1yrhWNpnKQwNbK/udTgfFYlHSoLTCSD0gmUxKhJ5yu9fr2WpWABDaOJVxPi/HUxu9pNPm83mbsQisFWWi7hAKhWRd0NFcr9dhWZZU2WU0iUqvNgg05ZCGObB21EyxWBRHgt4ner0evv3tb+MTn/gEOp0O7r33Xrz66qvSFvurKdvbBdc1n1vrKjejU3zmM59BrVbD3NwcLly4gD179tj2S77Hn/3sZ3jf+96Hb33rW5ienkYwGMRjjz2GkydPolAo4Mtf/rIwLtwc/FvtpxuDhddyjubzeZm7DIZoZ4VmHdKYjUQiCIVCsi82m01hLfC+1C05vpxrjUbDts7plOh0OlK4DoDo4lrHd3s+ADLn9PygDg9A1iD7w+uow2umpKboc2/nc9LJxHk7MTGxpTmo38GwueZ8n8eOHcO1a9fEgcQ2RkZGcPHiRTSbTQlEkdnJea2dkVyHtH1IQQdgs+O2i50aqDcMw5i0LGvBGFB4l976/DqAKfW9fW99tg6WZf0dgL8DAMMwrI9//OP4m7/5Gxw7dkwqWpFSSyOSwoRGnGEYWF1dRbfbRS6XQyAQkAHkpKHxVy6XpSw6B7ZcLks59Xg8jlwuZ8vzoadAKzFLS0sSLaXHk5MgmUyiXq8LZ5t9pYLABaALHlExsSxLjGtdkY8hfrbFBc7nKBaLGB0dRSKRQKFQAACpguykCwMQQ/P/Z+/Lo+Sqq/w/r6qrt+qq6qrqPb2l09nTCcGwKmsAlQMqI6IyooiOos4w+ENcCBqWJDCsATnAqAiCICNHVHA4sio7EYaQkIR0J73vW1VXd3X1Usv7/VF+bt/30t3pDoiIfM/J6aRTy3vfd793+dzPvZdZVdJ8MjIy0NnZeYCzR2XBA8KMKqkJVCo8mESZ7eicXekDEEqKHXEfHByUGUu5ubmWLn92JFo7mdNR7eay9CHT2dgP1gfrg/XurrkAQjSQM2UwgYMPMad+Igg32/Uc7P/sr9GIsnbwqcsYsAKQRhqc6U2gkHqTHXddLpdklDj2JBKJiHOvHXjqdGbNPB6PpTSFr122bBnq6+tln9auXYvm5mYsXLjQ0lGeq7CwULJl7Aar6YKmaeJPf/oTjjnmGHg8nhn37d1ezDgyi5ZIJJCfny/NhjIzMzE6Oor8/HzEYjE0NDRIt08CrJQR2mWOtOBneDweeX7aBuogNZFIoKWlRUaeOBzp2tbc3FzxaZgxGhkZkXnmzBrqZiWk8eraTwByXZR/+4QBBjBkhjHgo4OZnZ2N0dFRDAwMWM4HP4szzUOhkHyv9mHYG0QvfjbP7OTkJEpKSuBwOCy+Fm2vlmXAOlOV90/gSScA+IfXyyBa6wn7Nf/0pz/FBRdcIAkJvtYOUB+KT6DPPq/Nnmw4lM+9+uqr5e9ZWVkWRoNpmvjBD36Ar371q7jzzjuxefNm3HXXXdi8eTNM08Tq1auxbds2rF27Ftdddx0AYOPGjeLD6UBoNt2nfcLpMmrUC6w1Zoda6ipdn0j/kyAI942giX4mmjqre7fQN56YmMDExIR0q+3r60NhYaEAQPR9x8bGhHWp/UZ7kM3npH+vy/LsvjIzufZgV58/+tRaJrTfyb1xOBzCwJnLmg4gOBgL0DRNVFRUCCNHB9iLFy/G9u3bUVRUhKKiIksfm+nAF/6Odk6XXR7qRI5DDVAfAfAlANf+9efv1e//3TCMB5FujhQxp6jAs67//u//xnXXXQfTNNHd3Y3a2loJFonG5efnSyE0hz3Ljfw1Xa4fNIVvYmJCHAGHI11Lym68bFxElMU+JoWf1dXVJd3sDMNAb2+vBM0lJSUYGxsTFILfzffzPZzhxHvSyNHY2Bj6+/st1F/7dXDYuEZqdCMO7oMOqjnGJh6PS3E561ZIPaCh1U2TaHCIEDEoZvDNgnwaNxoeUhoASK2wVsL8Hg062A+W0+lEU1MTqqqq8LGPfQwPP/zwjAdsNid1rksriQ/WB+uD9d5e1BEzLTuaP9M62HnXgfB0a6b/M4x0U46xsTEJ1Jg5ou4mmEg9WFRUhNHRUfh8PnR3d4sO9/v9aGpqklpUTTFjE4uysjJxyFl3Z5omurq6EI1GEQgEUFNTA6fTKT0Q2HBm8eLF0iDG6XRi4cKFSCaTB7BwuLxerwSp7ArLveKev/LKK6irq5O62L/3IhisAy6Xy4Xu7m64XC5kZ2eLvZucnBT2k+40qxlXpPoxsGQAPJ0TRod19+7dFjvDIFV/diAQQG9vL4B0IDwyMgKHwyEyxL+TFqtt1vDwsGRq6AOwxhOAOI5+v1/KdbgfzHiOjo5ieHgYAOS6RkZGZFwOS4s8Ho+AHzow1pk43R+EsuF2u1FcXCxdifUekZ7O9+rMKANn+iv8Hs0m099HmWNWXAc4djD7mWeeQSqVwvnnn4+jjjoKr732moWerANePpe5MgT0ezVQb/ct57s+85nPoLGxEe3t7fjCF74gpV3cO4/Hg3vuuQdr165FfX09Nm/ejE996lPo7OzENddcg02bNuGb3/wmvv3tbyMajeLqq69GVlYWrr32WnR2dlruV/vY+nlxj3RWkD9dLpc0IgqFQqiurhY/PCsrSwIw7qV+Zgw+dbdoZhJN05TPsNf3kxpv/jWTHg6HhW46OjoqXcwJJLFZKLN8OpjSQa8GNqb7Hc8QM8L2xIyuzdZnkX9n0M330T+m7J100kmWfZ/L0gmgua5Vq1ZJ0yTeWyqVwtq1axEOh2GaU6Mg7TGS3gfqBLfbbWlOpmOr+ayDBqiGYfwKwIkACgzD6ACwEenA9NeGYXwFQCuAc/768scAnA5gP4AYgC/P52Iuuugi/OQnP5HOXUQAGMD5fD6pT2Dmc8GCBZaOdH+9ZkuUT+FhATVRZd1hjoGZXZGEQiHJLJLqwwPKLC47HWq0lUKZSqVkMLVuzsQHySHiDGS1IdTBIY0g35+Tk4Px8XH09/djwYIFIkD8PbtvAWkEcmBgQIwuEWPWpQLWYe0UcI/HIzUnfG9xcbEE4+zINzIyItQh3hcNIJ+FRjd1pzLuIWBt33/jjTfizjvvRGZmJjwej2RRtfHQSmo+azrH046GfbA+WB+sd2fZa5208zndms1Q29kbM/3/XJ3M2b5rpv+jDgwGgxLMAWkaZ09PDwDIrD5gykkhmEcnTGcdOEDd5/NJcw+CsT6fD2+99Zbo05KSEkHx/X6/OAddXV0ShNXV1ck9LF++HG+++SaAtFPT1NSE6upqeDweqTvSKz8/X8DFiYkJ7N+/H8uWLRNbbRgGdu3ahaGhbZOlzwAAIABJREFUIenM+fdchmEIQMvAPS8vT+rUdIPEZDIpDVmAqewfgz12EmXtma6Js2c6UqkUGhoaLCVGzK6y6SPfzwDZ7/fL/EP+icfj0jRQgy+GYQi9mP6JbkhEW+vxeARQ5jgZAiZjY2PCCKMPwu8kEJ+fny+Atu4+yjOanZ0tgAt/p3tKsINxMpkUv4ElTUNDQ5aZ7rx26gDDSDdgicViGBsbg9vtluCdPgD3RGfU+Hw4n12D7nzWDAT+/Oc/4ytf+QpWrFiBbdu2iW7Q16UB/7mCXzojaX92b2c9+OCDmJycxOjoqIwqpM7bs2cPxsbGcOSRR2Lbtm3Izc3FqlWr8Pvf/14SCxs2bMDFF1+MO++8E+3t7bjiiiuwf/9+/OAHP8DExARaWlpw2223yTPU/rC+fs2c4Jlg46Kuri4sW7YMNTU18j7uJSniuvs1P4fngs+eNcSAdYyYbj5EQCYej6Ozs1NkddmyZYhEIgLCEGgpKSlBKBSS0U9er9cSOFPnBQIBSWgxyUM9TDkmM4WUejIfKCc6DtEUYb2XOvbgfZqmicWLF1v+PVeZ42fOBwBJpVJYtWqVhebO97MEj6/zer0H2AX9el6z2+22JA1p7+az5tLF9/OmaZaapukyTbPcNM27TNMcNE1zvWmai03TPMU0zdBfX2uapvkt0zQXmaZZZ5rma3O9kKqqKng8HkFQtLLnDXKwLQWZh4KNILTR1gXXiURCEEuv1wufzyep/un+8KC/8cYb6O5OJ4BJMeGh1bQNzmMij5xKltlVBq9El4jS6gHpVKgMfr1eL4LB4AEF1jQ03d3dFjScws2DRsXMA09KxsjICPr6+kSR8A+/1+VyobCwUEYSaGePWV6OsaEgezwe+Hw+2Xfev6ZjUBFpxaNreXjAua/Dw8PYu3cv4vE4Tj/9dIss6IN8KEGlpmho9IrX8sH6YH2w3t2lUfSZglN93meiPdmZEHb9oHWT/X0zfYb9dRptn+67h4aGxFlbtmyZgKNDQ0NiT1jnR/2bSCRQUFAgFE2/349wOCwOF0eT+f1+tLa2YmJiQkpW9CgzgpctLS3SIKS8vBwApHur0+lEbW2tXHcqlcLq1asF8Fu6dKnobq/Xe8B+MhvHRnmLFi2SUhvqZ8Mw0N7ejldeeeXvDvzpjAYA9PX1yZgfAOIQcx/ZGIS+A/s8ELBlZ1o7LZQ/x8fHsXv3buzYsUOy2pzX6HK5pPEjwWwGmg6HQ0bM6UBIM5xoX8PhMPr7+8X281kT1DZNU5o8EvSg3aMPw/eTPgxMOct0qBmo03fguDldw5eXl2eZD8nr9Hg8KCgokIyvw5Gu0R0fH0dbWxt6enoszWgACKPK5/Nh+fLlWLJkCQKBAILBIAoLCwFA/DhmsBlsa8BHg+30CbVt1+C50+nEHXfcgVQqPWYDmJoEoQNa/v5gy65ftK54J4JUUvaXLFmC5uZm+S4G3E6nE9u2bcPFF1+MsbEx7Ny5E9/4xjeE/nryySfj7rvvxpo1a7B8+XLcf//9OP744/Haa6/hiSeeQCAQwLXXXouNGzeKz0odYE8muN1ueDweRCIRtLS0iFzU1tZa+qforDFlS/cm0cw6+sLj4+OIxWICBOmga3h4GCMjI3A6nRgZGUFrayt6e3uxcOFClJSUwO/3W8ajcFRMdnY2ysvLYRiGzN+dmJhANBqV+lTdIZqgFP186j/GIHyeTFxpsIB1mHz+ubm5MnqpuroapmmivLwcwWAQPp9PSicIDhFEnKv+1JTbQ+kDYBgGampq5N+UfT5/O4tCP1ueL90QkEABP+dQGAPvGU7j4sWLEQwG0dvbi5ycHFGwAITy2dzcLIEWm0/QKLIzmEZ8aEiIMutiZq10uEzTRFtbG9544w3Z0JGREYyPj4uyZhAVDAZFeTNDSANfUFCAYDAIj8djoUCxZlSjsrrGFEhTa4qL002RWUuiRw10d3dLMyK+p7+/33IoHA6HBJC8LwAHtNWm4qaQVVZWCo2Yr5ucnMTIyAgikYgYOd3pj/cGpEcS6DphfreuE+HPsbExy0HiHmgldNNNN1lmW9kRJn1w56r0tbMyXYD793amPlgfrH+2NRONbLrf6UyG/f/sQSZwYLdM6hi7Abc7rzOtg+kZBqfUw+y+S6onnXc61fZrpF3g70lns9d48bPYlI7vo/PE7r66bojUSp1p4tKMJdpefo9muUx3rdnZ2cjPz0dLS4uFAQRMzcn8ey6d5WRgRlogbQGdVGYLmC3hvemgw95kkU5ZNBrFzp07UV9fLxnXSCSC0dFR5OTkSKbWPq4CgDDD+HcGq/x83kNXV5ewoUxzavwKgzWn04lgMIiioiIJAmiPCXb39fUhHA5bGtPwj2mayMvLQ2FhoWS4uPr7+4We7PV6LZRInYGlE24Panp6etDU1GSht5PhxvKjkpISLFmyBMFgEBMTE1JGxO6rPAuGkW5qEwgERD7pSHOP6SvqLJ3d5vP1f/nLX5BKpbB8+XIJTvge3tt8FnWVPTC2+y1zWQyEuCoqKvDpT38aX/rSlxAMBuVzk8kkfv3rX0tTnTvuuAOHHXYYgPQYx2uuuQbxeBzNzc3wer3Yu3cvPv/5z2NiYgI33ngjysvLcdtttyEajeKRRx5Bfn4+brrpJhx++OGWgMwwDAFsksmk1G3X1NSI/6b3WQcs/H8CQlwTExPo7e2Fw5Guxfb7/cjPz4fb7UZ2drZk7oeHh9Hf3y9y2dnZiUgkgtzcXCxatEh8Zk2nTaVSaG9vl4CR8hoIBCzZTDJN7GCpYUyNZNSdh7nnlC8GqVreEomEAEf2zD8TWgSgyHjUGWFew1wDTurk6eKbg70vlUrJubWDwRqs07/TiT3AOmYMgGXs2aH41u+ZAPXUU0/Fueeei6GhIRQUFFiCHKfTiXA4jN7eXrnRWCwmrfXr6urg8/lEcNj9LjMzE36/Hzk5OQcM1AWmqD8A0NDQgB07dkhzCNYQuVwumefF9/MnZzdRGIgMUVESzRofH8fAwADGxsbk+jTXHEgbpaKiIumUB0DQ1tHRUfT09AgVSFODGDDq+6Jh4vezDpWHRaO+NEZEJ1OpqcZFPT09GBkZkQHyRIAZhJaXl2Pp0qWorKyUWXOZmZlivPg92tACsFyDXvqZ03jv2rULpmmiuLhYkCXep93wzLa0Azsdkvp2Uc1DQaw+WB+sD9b0Z/dgRtl+XrUumG7pTIrdadTXoR2M6V4zHU2JztZbb70lASEDGtYUUt85HOm6LE1FJBXK5XJJZq6xsVHqPCcnJ1FQUAAA0kWddiszM1NGJOTm5qKgoEAC1ry8PHi9XkxOTkpHVdM0cfbZZx+QYXI6nVi1apUlC8bMYjKZtICtvB+v1ytBl8vlQlVVFRoaGiz61DRNPPXUU3jiiSdmfDZ/68W9pv3lUPlIJIJQKGSZr8lnycZHvBcCC7o7LzDlm7z++utobGyU93NOI51i1qgCEEePQDJlhVRa0zSRn58vzyUjIwODg4MSIDLDxH4PlNuioiIEAgHxUVKplID9g4OD6OnpEfvO76Qzn5WVhcLCQukIDUydB87NZUdgXrdmaZEmSeeaPlI8HkdfXx/a2tosdFwyu7iKi4tRW1sr10OHPxaLobW1FQMDA5byHg3AZ2dnw+fzCfCjHWdg+sY3vD8GIHy28Xgc69evf1vyNt336u+cz+K9aF+pvr4ef/jDH1BeXi7TLEjnZpLlc5/7HCYmJvDGG2/gm9/8JiYmJnDVVVfhK1/5Ctra2hCLxfCRj3wE9913H9asWQOHw4EbbrgB55xzDu644w785Cc/QW9vL5LJJM4880xs2bIFn/3sZ3HBBRegtLQUBQUFaG9vxyOPPALDMLBu3TrRYTrLqv/Qf6VOoc/Ifee9MLHEYDOVSiEWiyEUCgmg09LSgvb2drjdblRWViI/P9/SYZbX0Nvbi+bmZvmseDyOcDiM4uJi6YBNKq+9Xpg0al4vwZSxsTGxA9S1pmnKvGhgyh7pngCUt3g8joGBARlDZa/XdjgcOPzwwy1Z57ks2jV7kDiXpWWMdkA/Q8NIM3M0/V43udK+tWaUMDnIBN1819seM/NOrUQigfLycjQ1Nck8Le2guN1u4YyTZjI5OQm/3y+GlweUczWJCurUN5EQCktvby96enpEWbIrL4t7ia7y8xj4UkALCgowMDAgB2B8fByBQABjY2NCzeUD4/Ux7W0YhmR9GQDy+kmnpSEhcqPbzrvdbmkHTwEYGBgQh0fXoebm5kodhtPplDlU2uEiOsr90CiJLlQvLCwU+i/rWIl4soGEx+MR9FM7Yzpwni6DzedExfTjH/8Yd911Fz760Y/iqaeeQl9f3wFozVzWbFnTQ0F3aND+UQPTs88+G4C1HsJOReL/2xWdDh7s/2f/92z7NJ0C1Uaiv79f5JWgzZ49e9DR0QHDMKRhC2ltnHMGpOtHOLsvlUphwYIFaG1tlXmBXq8X4XAYa9assdwjaUOUTYI/eh6iaZo44YQT5m1AuNg98YOVXnRq9Tmc63mczoGbbmnneKbXUr/N9ll2HTUxMYGOjg6Ew2GsXLlSvmvv3r2orq6WjuqVlZXSRC8QCGBoaEicE3ZG5xxN1o5mZ2djaGhIaunYHBCAzKekE8WgStus7Oxs6bPAa6bdA6x1SnzNihUrsGfPHstzaGxsRFVVlaXLLBcDqHg8jqysLNTU1KChocFCIaZD88QTT+C0006b9Tn9LRbnkFMPsWnK+Pg4ysrKEI1GEQqFxC7RtmlmFO9Zz0vs7OxET0+POGJ6Vrkep0aZop+ig39mAfkaj8cjAAYBCtph6rbc3FxLs8XS0tIDbClptf39/UJTZHDLn8lkEoFAQJzjmdgFkUgEbrcbXq9XgkNS1Kkv8/Pz0d/fD2Aqi8NxMVr+dJdWt9uNwsJCkddoNCo9Lfr7++UsMIBmppTZUdZAE6zRTRl5L3xW9mDR7swnEgncdtttuPjii1FTU4Pnn3/+kHtccL0TbKzpdNHVV1+NUCiE+vp6VFZWil9733334ZxzzsEDDzyAhx56CFdeeSU2btyI22+/HZs2bcJ3v/td/PznP8fmzZvx/e9/Hy+++CIuvvhi3HrrrXA4HFKDmJ2djX/913+VkVaUb7/fL/LOLPqpp54qiaLs7GyLnuUesISMZVzUGd3d3ZIBTiaTGB4elgQP5SwUCllqTtnEqbi4WNgl7CvD58Uu1ED6uQ4NDcE0TVRVVaGtrQ3BYBCBQAADAwMSUJM5yN42hpFudKf9IwIvzIjSR+WZ0uAVf29ntnDeKbO+jGmoNwwjzYKsqamxyJ+ug55tMZicT8JFfy7/vnr1auzcuVO+l7aCXb69Xq/UhHMM1HRMAf6Oumy+6z2TQb3//vuRkZEh85ooCGNjYxJsDQwMSEMi/h9rcFKplNSWEl2js6tRjkQigYaGBmzfvh1vvvkm+vr6EIvFMDg4iImJCRFYChjRHGbv2EmRlGEaIyKZo6Oj6OzsxNDQkPDJ7W3eWePp9/sBQJobpVIpDA8PS4dgUpxZt8o6pUAgINQWGilmfru6uqRDGrv/URHT6WGwyfuLRqNob29He3u7ZR4SjSybI1VVVaG0tFSC3aGhIbS1taGlpQX79+8XBIt7kpubK42t+FlUUlzT0RA0CjMxMYEbbrgBTqcTZ5555iFnOmcLQudrSHRm/x998VnPRHmcTilS7qfLgNufj70r9VyvB5hC7wg+6fpuh8MhGQoAovQ5WLu/vx/xeFxAotbWViSTSUSjUaHKj46OYtu2bXK/HR0dwsTg93PGYGZmJurr6wUFfPbZZ+dF/ZpvEPvPtOzZPDtdF5j+rM01OOVrp5NZLo36zrQoiwQKu7u78corr6Cqqgp1dXXipJimidLSUuksz2ZyvFcNwJGiGYvFEIvFpHyFAQhrUpPJpDCD2B2RWQm9V/F4XAIFfgftimmaUmM0014YhoHq6mqLDVi8eDHeeust0etcDD7I9CHdd+HChWhqapIzrwHixx9//KDP6p1eX/rSl+D3++V6i4uLhTYYj8fxyiuvIJlMYsmSJVi4cKGl2R+vn/6GYRhobW3Fa6+9JqU1kUhEejPwuejSHmZudDbCNE0UFRUJnZdZSYfDIdTvvLw8kUvWmNJJzs7ORmVlJRYsWCD3STBwaGgIPT09MnuS2TX6EJRPyqjW9QQbqOcmJiYwODgo18PpB7rTKgApQZqcnEQ4HBbfzJ4tNU0TZWVlqK2tRUlJCVKplDDVkskkGhsbJTjlfng8Htk3MrRIP+Y4OgaxeXl5Ft9FZzOnA8D4nJ1OJ3bs2CH+2sknnzwtmD3T0vrl7VDap9NzugQMADZt2oS+vj6ZYw+kfcSGhgb88pe/xJYtW2AYBq688kps2rQJ8XgcGzdulPnFl19+OS655BKMj4/jlltuwRVXXIGJiQm8/vrruPTSSxGNRrFnzx6Re7Lguru7sWfPHoyPjwvA4XK5ZOaoHgOk2SiaeUA5GB8fR0lJidwTae8E4kZHR2WU0cjICNra2tDb24tFixZhwYIF0zbJHBwcRGtrKwYHB4W1GIlERJ8ykNJANpt3UY9xZCMwJTOUa2Aq8KT/rP0bPv/x8XEBH6kTU6mU9H0hYKSvPxKJAJhiB+iGqsDsDfkoNzqp8HaXw+FA9V+7L9u/3+Fw4I9//CNyc3MFqJrufNmBUZZEzus63vadvEOrra0N3/ve93DPPfdg586dggZnZGQgHA6LAvT5fMjKysLQ0JDQnfLy8gRNoeAQtYjFYkKp2b17N3bu3Ckp98HBQREMKjxSbxmYUnB50BgcAlMzQdkdjAcyFoshGo1KpzIe1gULFliGKgOQILO/vx+RSEQCZAo/He6ioiKhPusAjottve0op1ZuvE4KUywWw9DQkIWCwKxqPB6H2+3GokWLUFlZKYBBZmYmRkZG0NTUhIGBAbnOnJwc6VZMhFjXLOjhyhrdnC7trw9DIpHAjh07hDq1fv36v6uz/34JNGiUZwqytHxphTdd8MA1nYNPJ2Yuht5OnyQKSrnm30tLS1FSUoKSkhIUFxejuLhYzlZ5eTkKCwvlvAQCAbjdbuleTUSe12sYBv7yl78AAMrLyxGPx4XaB0w15JiYmMDy5cvR3Nws9/j000+jubl5xvvRDv3fuw7vvbzsTpnWCcxqcdn3cS4O4Wwyrj9Xy6jdyGud1dLSgr6+PlRXV+PEE08UuiPnbPIeSLGkg8+mMBx/QfljOQP1Pe0Px4zweqinGbCyHoiOHedm0lnPz8+Xa6EjtXr1ass+Trd3rPvS+7R27Vph/2jHXesIgqwZGRkoLy9HY2Oj5XNpY5555pmZH9bfYDFAI3AbCAQQjUblnLe2tqKwsBAdHR0YGBiQJin0JQwj3SBwz549ePXVVyUjEwqFJEPJxka0h+xSS12jaXF+v18o29QRiURCakMpi6lUCn6/X5xq+hg+n0+yvNom9/X1SddSTd+lTLjdbpSWlsq8d01dpFzyvkdHR8U30Fngvr4+oaIz+NTsJAYaug6bgeTChQuxatUqyb7QpxgYGMD+/fulvwYzsmTPGIYhnYgJADDTFolE0NHRIZ2POT+Si/fGP5RDPie+hnJyyy23wOl0ory8fE4AtK4L159zqMvuDzHY1vookUjgQx/6kMiQYRh49dVXcemllyIzMxM//OEPcdVVV2FychIbN27Eueeei/HxcfT29mL9+vVIpVK45ZZbsH79eiQSCVxxxRX4+te/jng8jptuugmnnXYaUqmUjG0hm9Dj8Ui2EkiDEkzOkEGhM/A6K09ZYHCbk5Mj+pAgysTEBEZGRqRDbH9/P9ra2hCJRFBTU4PS0lIpbeNzMwwDbW1t2L9/PyKRCMLhMAYGBiTpw3NIxgP7pJBhQlo6gyyetYKCAqnHpDxRDwCwAIfJZFIaMBFApy/EAH+6SSJkMvD+yQI78cQTAcxthOI7EYzOlCTy+XzC8NRBeE5ODk444QS89NJLolPJbqBeoa7S2edDWe8Ziu8xxxyDxx9/HPX19di1axfuuOMO3H777XA4HBgYGEBpaallBk9TUxNWrlyJ0tJS6SAITDkGDFJpgEKhEIA0UkOHgTPQSB+kMmW2Rh+04uJieUA8tDyUpDswYKYzwNdWVlZaaDXMiLLxEAWahpSUAR4UKlkKimEYQh0G0ijn8PAwysrKDkA5tbJmLa3ursbrJBpChJMF8Gy2kZWVJSgxvz8rK0tqk2is6HjxHtiBkqhwNBqVANiuwLg0aqSzrpmZmRZ0fy6HU6Pghxpcci95bby/ubacfy8u7VgeDCnWwRoNsd7LmRz72TKn02XI7K+tqKiwXCOzoSMjI1i0aBFGRkZQXl4uYNXk5CQqKirg8XjQ1tYGl8uFyspKdHZ2IhwOw+VyoaCgAN3d3XKWeV4bGhqwdOlSkTm2kud18nzX1taio6NDRjs1Nzejr68PRx999LT3SoPNMzLfbP0/w9qyZQuam5tx1113yTnTtCLuHWAFMWbL8nPN5izOphN0AEY92t/fj+7ubtTV1SE3NxdZWVnSeIagH/Uemxclk0lUV1djcHAQbW1twljJz88XuqymgFIeo9GodOll9o/oe15engQ1LpdLagO5H9TjQNo22JvS6Pub6f4XLlyItrY2Yebw/aQS0w7ozyLjgHS/qqoqNDY2YtGiRfK6mUDJv+XaunWrhXrIjCRt8mmnnSYZO+4/ZS4ej2P79u3CDiJlkNkgPX6CelSPfaGtYMdQ7iPB4HA4jMHBQWRlZUnAxqZH3C/uN/dufHxc6sHGxsbQ1dUlcq6vg845mVMMKoEp555yRRsbjUYPyNix8y7ZJKR0spM0G2EVFxeL3qWsFRQUCB2dvldhYSHa29sxOjoqWVJm6fSMSwLyrB2mvmYgwppHlniQcpiXlyf0TCYtdG0kgwjqEtqpVCqFvXv3yqiQj3/843j00UfnLW96jw/V5+C1MWGi14YNG9Df3y+lcADw0EMPIZlM4uabb8Z3vvMdbNy4Eeeddx7uu+8+/M///A+WLFmCj3/847jzzjvh8/kQDAbx0ksvIRAIwOPx4K677sK6detQV1eHhx56CH6/HwMDAyguLrYEW9FoVMCSsbExYYZQfhiIcY8ZoAGQzD+QLqUpLS0VmSwqKpLAhkCFy+VCdXW1ZDCpi/lZDQ0N0o+FCRqOxtIADPUrkz0E09xuN9rb2+UcAVOjhVKpqS7Wo6Ojcp2jo6MiO2yeynMKQNicPOP0k/kcWQqomUIs2+A162zowdZc/eCZ3ncw+aypqUF/f7+MR+O9uVwu1NbWYvfu3Vi5cqWUoTB7zNdSF83nnvR6z6SDTjjhBFx//fU47bTTsGjRIkG9DcNAQUGBdA2jkFJgiXhqxHA6xJO89JGREeTm5krEz5oFjXbSoTQMY0a0c2BgQNBOAILoUbiZoWJ9CQMsABakk0Kskc7CwkIUFxdLgwMK+6GgnDqLSRSejg7viVQOjXAy2CYitX//fmnFTYSTQpebmysGmBlYNqE4GLrJ56Uzu3TyuLeJRAK33HKLNIY49dRTDypP9mL16QKguS67Q/WPHpwCB9bSaSrUdIqEQfp0WdKZAlx7Vkp/lv69Njz2zwXSMup2u9Ha2oqCggLRDQShOOqAAE1HRwfGxsbEEBEY8vl8KCwshNOZbmSzYsUKOROkilEW29vbpVM478/n8wEAFixYgH379sn1jY2N4ZlnnrHcExFjO33u/UALf6fXo48+iu3bt2Pz5s04//zzAUxl43WwZZezgxm8g+31wbKv/L5IJIJt27ahsLAQa9euFaeIupHBDUE4OkaJRAI+n08afjBY0YGGHnHCc6gb4DGAoAPG4FcHjqQGJxLprrHRaFRsGBHuVColHT15bwfTX5WVlZbXUKcDU7PxdGYKmMqk8vUMUvWev9uLQVsqlUJzczN8Ph+6urpk33UDFfoWzz//PJ599ln85S9/QSgUwtDQkMxn16MjKD+6+yfpqm63G36/H4WFhVIaRH3T19eHnp4e6VDLcgVmwvPy8lBeXi51xKQ+Op1ORCIRdHd3o7e3V7pGA1OjUdgsq6amBhUVFXC73WJT9Vgi0n6Hh4cRCoUwOjoqDBIyxfx+P5zOdCMoh8MhtXocieH1esVmMGPvdrtRVlaGyspKAJAyre7ubrS1tWH79u0AILPoc3JypCmV3++XPaDcplIp7N+/H7t27UJTU5OM5CEzhnqVuoJMGR2U6jPF56YBHX0eLr74YmRmZqKgoACrVq2yZNHsSwPX9te8HVnneZ5Ox3V3dwtbY2RkRHRLZmYmvv/97+Piiy/G5OQkfv3rX2PFihUoLCxES0sL7rzzThx//PEYHh5GU1MTzj33XESjUbS1teHTn/409uzZgwceeAAbN26UXiK8B6fTiYKCAuTm5qKkpER0p84EsgaVvi9tM+07fzc8PIy8vDw5M/Sn9+3bh+bmZmRkZKC6uhqFhYXCEuGe9Pb2Yt++fWhsbEQikcDg4KAAfg6HQ8BCTbHPy8sT3xQA9u/fL4wqZuN1fxifzycyaZqm1EpTDthLhSAV9QaZmsAUmMfeMdxPnYhiPwyyLQOBgIB5c1k6wTSfZQcWZ1umacqoKC5+H6n23d3dYmdmapx0qFnU90wGlc7IySefjJNOOgnRaFTQ2ubmZixevFgQCLfbjaOOOkoCK/6ZnJyUOgIAQhkwTVOElM6qRg/ZdZfpfVIDgKkMCGvXtJEgNYBICrMuVKpUHOFw2NJhjCgkr4GNEog4acXJII7vtRduZ2dnS7YzEonA7/fD7XYLyhkIBDA4OGgJFnSqvqqqStChcDgsBmxgYECQcL6WAkj0SVNa2KxjYGAA0WhUDAQNyPDwMAKBAPLz82UeH40GUU5NEbEfnn379uGiiy7CPffcI00hZjtgdsfz7WavdJA/W2bwH2nZ92SmQFPk4weGAAAgAElEQVQbqZn+z76my3Jz3+x7ZzfudCapE2hkYrGYUHGSySRKS0vl/HBGHxH5VCoFj8eD7u5uOBwOGd/U3NyMsbExqTU1zTRFf+XKlWhsbERJSYmwCBoaGrBs2TJL46Tc3FxEIhEsX74ce/fuxaJFi8QgPv300zjppJMsYIi+r/eDzPwt1lFHHQWfz4fe3l7s3r0bmzZtQn5+Pi699NID2v4fbA81FWm2NRuCTPnp7+/Hzp07ccopp+DYY4+Vhh7sbM5SBoKMtEHMZvJa6MyziQsNtb0Wn/dmz6RqytTw8LAwiTwej4V+xs8jDTKZTKKnpweGYQibgK/VP2faHwCoq6vD3r17RUcXFxdj9+7dqK2tlSZl1LXaAdEAaWVlJRoaGrBkyZJZn8nfatHRpy0uLCzEa6+9JhRRgsednZ3o6urC0NAQgHSzFc5Pp6M5OjpqGWdCJ5TObyqVki722oFkjTH9DDZ6JIuLgHNJSYkAY8xAdnV1SSkQKbQEPZjFTyQSMi+UgRqDbWa7qe8TiYRkeoCpzFYwGJTnTPllPeDQ0BDKysrgdDrluRKs1/Y6GAxifHxcalBTqZQA+bx3Tf3Nysqy0AgJfnNqAwF1Bujcd9oi+nL8fpaBeb1eDA4OSgM27idZB3wvs2HaBhmGgSeeeAInnHACPvzhD+ONN96Ytnu3Xu80O2Y2PffAAw/ghz/8IVKpdN+VyclJXHXVVcjJycEzzzyDW265Bccddxx27tyJxsZGOJ1OnHLKKXjyySfx9NNPY8OGDbj++utx3333YfPmzfjhD38oTZU2bNiAq666ChdccIHIA+0ZAYWsrCwBAbQPyx4umlGnS8Mol6xXTaVS6OnpQXFxMVwuF8rKyiTA0ewS+rYDAwMwzfQIrbGxMeTl5QkAzcw/wQpeL6nkDodDqMqk9o6OjgKYKrPTDAgt+5RzAtSRSMTSAIn1qfxJ/c8MM/0ZZml5/skcZKCck5MzZx2pfbH5yt2hyOnixYvx5ptvyjPktVdWVuLNN99Ef38/Vq9eLewOJghmYtrNdb1nMqh68UaYjWPnLgCSbdRZNyKIO3bsELouu8IRUaNjQCVI5akD0aysLKEscDGVr5X86Ogo8vLypHifRgJIC7tuCDA2Nibtp4mc8754baWlpRbkAYAg7DoYZctr/o5NASg0RG/4ejYnIOUGgCBvbrcbFRUVGB0dFWQxMzMTg4ODMkyewTidfpfLJbOpKHw8pKFQCD09PRgbG0N2dralVb1uDMHnyH/TcGjhnU7ZM5Blsw8atZnW3yoYmA4p/UdffEYzremyTJo6Pd2aDmSYqdbBXmuhZZ7vicVilqYclG86LjzbnKfn8/lQUFAghovngI0cGFgyE8XaVGZGHQ4Hli1bJpRKLU9srLBw4ULpKMz7/dOf/vRBIDrPddttt2Hz5s3YunUrzjnnHAnErrrqKnzve9+bV9Z5JpDF/hr+nE4m2Tyuv79f6rEAIBQKIRQKWeYzktmTk5ODrKws+P1+AdlYa8fv0mAcg1n2OiDrJ5FISBaAgQSba7DxB0fG8LUcNeNwOCwlGDoLo7u/z2VpBtPy5cst+7pixQoBM0k11fvK9+sGK9XV1eLAvdvnQ8/zZCaDDafIbGhra8OuXbsEPB0ZGRE6H8fEcA8pN/QZ9P0wq0c/A4A0a9OLVELaZtM04ff7LY48ATd+PnUbAy1girZdXFyM/Px8y7WwNo6vowwy6KMvkZeXh0AgYDkL1GmsCdS0X2ZTNaDO72UZEWWTwSm/n/fgcrmQl5cn42EY8CaTSXR3d6OlpUX8JvYj0QGkvk77vvLzeQ92/WEHvDRLhuvFF1+U62Ed8FwAnXdiHSwzVlRUJP7s888/b2EBHnPMMbj22mvR0dGBCy+8UF7X1NSEb37zm3C5XLjhhhvwkY98BBkZGbjssstw9tlnw+l04kc/+hGuu+46xONxhEIh+Hw+AZ1CoZAEfpRb6iH6yNrPA3CAj80JDJRdNvRkXarO0gFTemL//v3o7++3NELk1AjdaIjf7Xa74fF4JEFEHUT2B8vgCPDY2RBOZ7rTdyQSsVDOKcOkVhuGIZ279ZnSlGbuA7PI1P/8dzgcFjCxv79/znI0Xx1qz2jOd5mmaSnV4J5lZGRgzZo10uuHcmvf00Nd7xlP255d0ZRCohBU3MPDwxJUxeNx1NfX44UXXpCaSWYsORybhpEZGV24zN8XFRUJIk2HNxaLYWBgQCglnEnHg0QDRToCADEwPKzd3d1IJBJSR0GDo7vw8TpI4SWCTgMxPDyMSCQiCFEwGEQwGJRuY1TIvE/WeBIZ9Hg88veysjIAaYecYw24p+xsSqXOIninM921mPOiSOEdHh5GQ0MDdu3ahYGBAaFJMrDVxsEwDAEPWHyu75tIGRUKZYH/5qG4+eab4XQ6ccYZZ8wpU2IPfg510ZBpyuH7YU2X6eTvAUxr4IGp+hr7/tu7J3Lpmgv776fbUz3E2+l0CjCRkZEhtHkiq3TsganAlWdC16dQlujw7Nq1S5pxZGRkSGCwYsUKucempiYkEgkLcEUjmJWVhfLyckvmiEHqO+mwvN/X5OQk1q1bh8suuwy9vb1CrxodHcXjjz+OzZs34zvf+Y4ETdSt/EMdNVNmcLpnwd9pp3d0dFTGFjGoMk1TarYIQhqGIdREytfg4KAEbcBUMJFIJKRjJEFEsmsSiYTUTieT6TELIyMjQtnVwfDY2Jh0hmf9X0dHB9ra2jA8PIx4PG6pxaYNoYNw+umny/0eTDanYzosX77csnecq03QUX8ufzK7Q5sKzEz7/1uu7du3i75iI0I2+Ovt7cXjjz+OPXv2wDTTjQPb2tokG8TgUmcOSAlmsOhwpBsX+f1+S0lNIpFAOBzG0NCQJeOqM7pAOuCoqalBXl6eRe+RBkzdwxIEnRV0ONLNF9m9loHz4OAg+vr65HfsU0HQngFxSUmJ1IgCU/p7dHQUfX19GBkZsWQ9TTPdXJIgO30OIG3H2YgxHA4jGo2KziWdUfsvpAh6PB5kZmaiq6sLb7zxhny+DvYBWM68Bjb1XtKeUCa1n8FgTbMV9N/5M5FIoLW1VQJz+hqzgdNvx/nXn6HP0ExB8fnnn49kMonm5ma4XC7813/9F7Zu3YqbbroJjzzyCBwOBy688EIEAgFcf/31+PznP4+mpibcdddduOaaa5BIJPDiiy/iggsuQCqVwu9+9zuceeaZSCaT2LhxIzIzM3HYYYdJzTGQZrAxG0lmHW1nPB6XyQ66ZpYUX4Jwfr8fra2t6OjoQF9fHxYsWCD+IkEAyubevXtRX1+P+vp6ocCT2ZidnS11pvQDvF6vlO4xNnA40k2ZYrGYMA9bWloApLOg2dnZwgrg2WPpBn29eDw9n7W4uNgyaikYDFoaebH3CoFEjqshi4GyodmeyWRSdENubi5uuukmnHfeeTMCslrPzleHakbXfN6rv8vtdmPlypWy59pH/9CHPoRdu3YJu4JZ9rer698zFN+zzz4bhx12GC688EIUFxfD6/XKwyWViALf09OD5557TgwFBVA7x6Ty6gdCY2qaphRAc4NJU2XBsg6eKOxEWwoKCsQpJb2HmcOJiQkUFBRIi3dmUDMyMpCTkyOF/7yuaDSKSCRiCeSi0ailU1lmZqZ0LyYaA0w57qQPG4aBqqoqmKaJ+vp6rFixwkKL5EHgfD0689wPUiAASAcvZgNI4WhsbBSjpWdKcnF/tZPDzxwfH0deXp5ksghCkP5Gw8tDoTtdAmmF197ejqeeegonnXQSjj76aLz66qvTBkRcs/3fXJdGjA6FR/9eXHbqxXR7dDD6rn2xxsSuYPmcZ6JT2r/f4XCIg2j/rNbWVhQVFYmBMc2p+ccLFiwQOprH45HsBgGWvr4+mWlMsIfZ1P3791uo2wsWLEBXVxcWL14s38FMCjBF/ad+YGMNIK1nnnjiiTnVSn+wgCuvvBL79+9HLBZDeXk5gDTqvHnzZuTn5+Pcc8/F1q1bceGFF6K6uhrhcBg33nijBZQAZs5iaEaGXS+xXGPv3r1Ys2aN6EPTTHdpzcrKQjgcRmZmptTHkVHD5XA4hNGRTKZnUdNmMOPQ3t4uVDcGDaSFAlM0VO1MA1P6lAEnbYfu4Eu6J19LGirnSurM1XRLnz/+3X7GeYboULFGMBaLiS2lHdMOv8fjwdDQkIw308393q113XXX4de//rWlRIiU26effloy0JxxSsYVnx0DPPoAfM5kMOkJACyt4QxQ/TzZp4L7GwwGxWfh83M40s24qBcZ0LL5jM/nQygUkpm01H2Ug76+PnmODodDAGc+SwaElEE+JzZs4igOyq9+nvwMBhPUozwTBAW9Xi96e3tF/zJA5dgc+hMOhwO9vb3o6uqSIIf0Zl2/qP0JLZPaLutMTTQaFbCAIIrusKzvm5+vKch8/U033YSNGzciLy8PwWBQmmMB1vr1mWiMOpt2sGUPfmmfZnot6zQXLVqEdevWwTAMPPbYY3j99dfR2toKAPjCF76AwsJCrFixAlu2bEFmZiYuu+wyfP3rX8ftt9+Ou+66C2vWrEFzczMeeeQRLFy4EOeccw5uu+02yRzyGbS3t0uJ3MjICPbs2YO2tjasWLFCzguTJKSzclyK0+mUZxwIBJCXl2fpH6EZKmz4xVIzMhT1ZAz6BQ5HuvSHiSPDSI8y6e3tFRmjLifIrNmC9qSOBtwMw0BxcTEASDAJQBrOaVm3g1ia3kpwkhluYMofYgkj10UXXSQNrd566y35vZa1+S4NdGgmw1zex6XjJtqR6eR65cqVePXVV7Fu3ToAU/PN5/O99vWeCVAXLFiA7du34+tf/zqAdAHu1q1bxRiSg/6///u/YtjZoZAGmAqIAqY7yfLvROuAqaYCLNIGppQNDRKVFemtzCixQNo0TbS1tVkoyJwLx4HgfC0bCvBgcOgxHXsebl5Ddna2vIeOE2tXGCTqmk0eCn4fBx3zMFJQAoGAKDF2zuPfWWxOo8SW3Pv27RMaGccJ2IV0OsdGHw6n04lYLCYGksqPiL/O1k2nrInY/upXv8LJJ5+M1atX45VXXpHDMxOF9O0s7icDrPcbvReYeY+mUyj6/qdTOnYKN2CdhWp3egFrp0I6VZmZmVKLRMectDE6bclkEiUlJWhvb0dBQQH27dsn8lpbWysZMdJ99+zZg4mJCSxatEgyT9V/pR6Gw2GYpomzzjpLHOuysjK0t7cDgJwfXiPlOTc3V2pae3t7BYACgCeffPKDIHUOa9OmTXA6nbjkkktEr9x9993Izs7GwMAAjjvuONxwww0oKyvD0UcfjaamJlxzzTVob2/Hj3/8Y9EvM2VKtSMKTGWJhoaGMDg4iMrKStTV1YmjxZIGPRebr2cgQF3A2kQdnLrdbrS0tKCiogLxeBzBYBDPPvuszPOmY83P5ZmifNFB57mhU2V32O3v1yg5/56bm4va2tpZ9386tsN0q6KiQuozuY/U5/F4HF6vV1gIvEfTTNd4RyIRAXN1lvDdWBUVFfD5fHA40tRxZqidTifcbrdkY2jPqW9SKWtHf4LYLAXKzc21ZBHGx8cFKGcmj446bX5BQYE8R+4P2RsalGPtMb+H2Rwg3VyIgSflkT4IHWHaV36Hz+eTztF0uJnVJpOKARHPEf2FzMxMBAIB6cHhdrsxODgoDZQIzPO+9PgOj8cjvgjr7SKRCN58802RIV6nzpRmZGTIZ9Jfm+l8A1O2hA4xs7lMFBjGVM8L7Y9QDvhe7ce1trZiaGgI+fn5+OQnP4mf//zncu752tn8i7n4CrqOV3/WbJ/Na7J/zxlnnIEzzzwTkUgEt956K+69916YpomlS5fiX/7lXzA+Po4rrrgCOTk5+OxnP4s///nPqK+vh9PpxLp16/D666/j1ltvxaZNm9DW1ibTJ3gmGhsbpXZzcnISCxcuBABLcMr9I3jMjvnZ2dkoLi6WwJPPzjRNdHV1SeA3ODiIWCwGn89nAYeoGwnwsckSqcEMEknr1ay8oqKiA/q5UG+yxCcjI0N8bbIcGVDqvjJ8LdmCDocDQ0ND0tOCWXdmSZkE031veK6Gh4eFBcl7a2trw4c+9CHU19db7AEAS2wzl6Wz8fMNELWtJBDMzznssMOwfft2i01lDHPUUUfhhRdewPHHHy+dvwl6HYov/p4JULOysrBkyRJpbNTW1gav14vu7m4kk0n09fXB6/UKl9yenWNAySBIoyyTk5NSM0rB5O+HhoZEmJnx1PWTGRkZB9APAFiKsoE0ssKGRRxcnJGRIdRcIsepVAoNDQ1i8HRbal3zkJmZKY4QkaaRkRHhrHNptFqjon6/31KX5Ha7BeXx+XzSjpv7QUNI45pIJNDR0SFolG4EoTPARFcAq4LX10enk8qGdAxmfnUQqA8jf08Dw3ukY5hIJHDCCSfghRdeOCA41qirfR3ssNqdQN7ToaJA/2hrJhqI/Xf2YFPvj33fZ9s7jYLz/GdmZqKpqUnYCnQUNSVYjxegMzQ2NoaioiJkZGRI91DDMIQiTCrS2NiYzNajvGt5Ycaoo6PDsiccq6Dvk8BJcXEx9u7di9raWpHFJ598Eqeccso/hdwc6hocHMSSJUskMDRNU8ZQ5OTkoLGxER6PB8PDw3jsscfwne98B9deey1KSkpw8803Ix6P47LLLrM4n9og8tkROR8aGkI4HEZVVZXUF+sRIPzD7KPb7ZYxH/y3XqRPJpNJGVtQW1srzk9jYyNOPPFEvPrqq2hraxOKem5urgQ0rMUD0oANHRvKuz3LadeJ+p61LjRNE8uWLZv1/GnA5WCrrq4OO3fuFLtbXFwsoycYQOlzRPvAIJXByru5mAlJJBLw+/1wuVwoKiqyBHE6a0jH1TAMASCAKcoymwSy662eDck6M+oUwzCkEZDuTWEYhvgxdKhp86jL4vE4ioqKpOYdSM+L5yJdl7+jvDJQLCgokG7LdLAZpNP2cjHY5b0Gg0HpPMxrGxsbk6xoZ2enUHQ1/feUU07BH//4R0xOTkpTm2QyiY6ODstYDntGn2eUXYiHh4elQeNM2UR9JrTeBtKNzEjLp2/I8iuWeGkA1U4l5pn47ne/i/vuuw8TExM45ZRTpMfATL7FfBaf+3w/6/LLL0cymcRRRx2Fb3zjG+InUpZ8Ph82bNiAnJwc/Pa3v8Wrr76KpqYmTExM4N/+7d/g9/txxBFHYN26dejs7MQdd9yBl19+GZdccgm2bt2K7373uzj33HNRVVUlz9vn86G1tVXK6kpLSzEwMCD+kQ74DcOQfinBYFCAHe3fAUBDQ4MwAMgu0aMbHY5012n2NKFN5uvJxKMfy99T3gsLCyX5AUCaddJPJRhJloxu+sYyAB3kUbZKS0sl2+pwpOn91AV+v9/CsJmOTq51BTDlOyUSCVx22WW4//77ccQRR+C1116T1zFBNR/Z4k/tg8/1vdp+2O2CYRhYvXo1duzYYQFXGb8ce+yxeP7553HiiSciFosJ7VrXC891vWcCVG4oqaacv0Ql19raKhlEXfvAzSMyowNTIL1xpHvoLB4pUtopYICbTCZRUFAgBoWvo1FJJqcaHTFLyuYYDocDVVVVaG1tRWZmpsyMMk0TjY2Nogg1JYEPrqCgwNJ5jPcxOjoqtB8Ks/6Tk5ODwsJCmKYpApGZmYm+vj7k5+djfHxckGLyw3kdpOly3/v6+tDf3y8CRwqCHenXjRz0HgHWYEQfbP5uYmJCDjU/l4ZDN7SYKSOaSqXwhz/8AR/96EexfPlyPPfcc5aAQR+u2WRtpv8jGjRbQPaPvua7PwdTbrMpwOl+b8/Ejo2NYceOHTjssMPEmXO73QiFQiguLsYbb7xhQcv9fj9GRkbEGJBuNzw8jJycHKHW8BxzsDapcURxU6mUnC2daeLerFy5Env37hXdMTw8LJQ2jTJSryxZskQyqbzHp556CsXFxVi9evWse/jPuph19ng84mRQF3AGKAGt9evX4/rrr8f555+Pe++9Fy+88AJGRkZw/fXXIxqN4vLLL7eAJQw83G43hoaG0NnZiYKCAkHggalmSUTPuZgFYKdXOtB83cTEBEKhEBwOBzo6OmCaJhYuXIjx8XEBMEk1Y0MhyqYub/B6vaJvWKul7RHZQQQq+V4CNOPj4ygvLxfnin0YWEumA9vpzvx86WNr1qzBzp075d95eXkyqsnpdFqCH53dJUX43Q5Qi4qKJCOtQRA9+9ThcMhzIgjBjAzfk0wmpZs4a4l1tl3bMtNM16kHg0FLk0TN/qDcaSccgAT09B34eXSCafM5IofXyJmngUBA9p6ObzQaFbuuwVcNCOfk5KC4uFicYe1Qj4yMoKOjA0cccYQAOt3d3dJR3+v14otf/CJqa2vx5z//GRkZ6QkHnZ2dwkDTDZ/s/gHZKA6HAwsXLkRBQQGefPJJCVL0e+z+BX/qQID37/f7MTg4aKGTMrixs+3spSi8rocffhinn346li5dKuPE3m5wyvuYCyhkX7QtO3bswPl/Hct16aWXoq6uTmSNAMknPvEJnHHGGYhEIvjtb3+Ln//85zBNEytXrsRZZ52FsrIyXHXVVcjOzsaNN96Ij3/843j99dcF7OA+5ubmore3VwBdzurVzBXW+Obk5MjsXA0IMMPY1tYG0zTFhmZnZwsAUVxcjIGBASSTSSmboW3X4JuePa1ltbi4WD6X9p69ASjzXV1dKC8vR2NjI2prayVAdTgcMi+dz5hnwePxwOfzWSaEEJgiw0XTYNn8iX4I5U6XYRAU1eVGLpcLjY2NWLZsGbZt2yaA2VzlzX4udFJnrksnemZaTqcTFRUVYvfsgfCqVavw8ssv4+ijj5ZO6IdSHveeCVDttQ5E2ogorlq1Cr29vfI7zi7iw9PoF1P5gUBAEBoGelwMcrXQ0ADl5uZaqFvToZ3aKU4kEsjMzJTMDQBB/Gm8+fms2eDcIJfLhYqKCrkPojfRaBThcNjSGYv3xsNgRzl53/x3JBIRBIt7zHlOnK9GA9Hf349wOCx1ttx3jdKbpin0puHhYdkLjW4ysNVOj85kAJAgmkZWB7n8PLsR0MoymUziN7/5DT71qU9hYmIC69evF2TznQgiZ6qXfD8t+/3p+ja97FmZ6dZMRnamPdSgkGEYCIVC8Pv9OPzwwy1duEml8fl8cg4AiDNMx4yGZGhoSBgQQ0NDcq7ZCZWBDs+i3+9HMpkUA7Z8+fJpr3nZsmXYs2cPgPS53r59Ow4//HCpV+e+MNNWUlJiyaQCQG9vL1566SUce+yx0+7JP/vasGGD1KXffffdMAwDn/nMZ/DQQw/hW9/6Fm677TZcfvnluO6665CRkYF77rkHK1aswG9+8xscdthh+NrXvoZrrrkG119/veiv3bt3o76+Ho8++ihefvllrF+/XgIKZpqoc7m0oaVeYiAIpIFQZuOj0SgGBwdRWloqWTrWHpLtkkgkUFFRIRmB7OxsGVWkxxOQ5cMM3MjICDwej9R4RqNRaZLD2k82z3E6nejp6UF+fj4GBwfFXmRkZKCiokLubSYnZybGxEzLNE0sXrxY6raTyaTQPFlDqOcX8hqBNLMnFArN+bveicVGZqwDpa2ks0lKIZlCDLB5TwxidDZG0wAJ7jKY9Pl8YqvZOZQBrWaLMMClvSSVljI4Pj6O3NxcdHZ2SkaG98GaTgbZQLosKhgMit+SSqXQ1dVlyeboTAd1ut/vF6qhZnPFYjGhaAKQumPDMOD1eqXRTTQaRV1dHSoqKjA5OYnFixdjYGAADke6KZXOTtPfIPhCmXa5XFi6dCmqq6vF/tIH0wGDXrxOXhN/R/+H/89Re3o0iJ02SRtn910SiQQeeeQRfOpTn8Lk5CSOO+44vPDCC4ckhzozxeuYjnU2l5VKpeuRmVG89tprxW/+0Y9+JFk++lJerxfnn38+srOz8bvf/Q6vvPIK6uvrYZomvva1r8HhcOA///M/AQDHHHOMhZXhcrlQUlIinwWkRzCxTwzBGXa3JYWe/gQDsl27dknWkwAkAQiCIG63G0VFRejt7ZV5vaWlpQJ8Mcmia9kdDgeCwaB0Vuc1RyIR8b01U49yz3tiEKn7oFBG2cuC2fx4PI6+vj5L3EHacU1NjTREZY8XjqohCEVaMmCddMCfiUQCV111FX71q1/huOOOw8svv2zJ9M8mK/qM6M+dT/Z0rplawzBQVFQk4ygpk/xezsutr6/HsmXLhNU63/We8cKJcHAjScGh4u7r6xNlzECT9AMqPTqjFBwGnKxV1TQEKm0KrdvtRmVlpaA5NGDRaFRqj/g+Ih1AWsgWLFiAwsJCeTjRaNQS+BJ152GZnJyEx+NBTU2NIJBEOoeHh9Hc3IxQKCTKVqNEmZmZqKqqQnV1tVCWNcq5b98+ABAkR4/A8Xq9uPjii3HSSSfJvNTOzk40NzfLfD1+lz4IDBT4npqaGpxyyikHUHC1QrL/zp7VJLqpX8c9pyGlEgemakt42Ihsmma6xgJ4ZzKcGl1+Py+tHPm8tRLT9z/bfjAzMNuyv5dO+ODgIMbHx1FUVCRdG3VXyJycHAwODspnMCAlUssgA4BlbIdmOzDzpBF6NvAgiq7rsWa6l5UrV8rf165dKxmx6faFmbSenh7L70ix/2BZV35+vlCpTdPEl7/8ZTidTrz00kuYmJjAz372M5n1t3DhQiQSCXz5y19GfX09NmzYgL1792Lp0qWiG5hl5DP1+Xw49thjkUpZx4JoWizliHYDgGQ4x8bGZMQLbU9nZyfC4TCq/9rpl/obSDf7YHBKu0M2CmshGZgw8KHuozzSJujyCACSQeM18j2Dg4PSqVJ3kJyLHptPNoi6QKP/1PdZWVny3Qye9ffzvnRTvXdjkZLd398v7AcAwuABIIwm2k3AWk+vA1qC3Tp4on/Amks+H8oFnytHGNnfy5p7PnPa/a6uLrlOAhO60e5oeNMAACAASURBVBKz1sAUOEJ5CYVClg6iGmTk6woLC4W5wHtzOBwYHBxEZ2en6Fh2W2UAoDM+qVQKAwMDcoY+8pGP4IwzzpD/104zdbPb7RZAJi8vD0cddZScbe4Jg2aeE17bdEkJDWDzd7x33ayJvp1dD9uBWP35fA4AJIB+u/7B2wHTtZ2mDisrK0NpaSkmJydx2WWX4Uc/+pFkOnmvtKGnnnoqrrzySmEW3X777XjwwQclOCH9ms+a8kgbTQYc9WN2drb0JQGmdAJpsC0tLaivrxeKLem8lHWeC91NmnqG197V1SWslGg0KgxAds9mky72fuFIm1QqJaMpHQ6HdJkGIMxBZviAqew0a1J19rK1tVV6VejAtKSkBOXl5cJmiMViAnobhiFjyCi79FG4T5QHPiMyZpYvX26R07kAGTqG4ufOFpzqs2yve54Ls2blypUWG8rnx7iop6dHalEPZb1nMqhbt27FvffeizfffBOGYaC8vBwOhwNdXV1YuXIlWlpaxLBQ4TMLSKdhNsSTiom1bFS2zIJOTk4K2klUCLDWZmi00+/3S5aX38HGR3Qg6GRopJNNMzTSaZqmBSXVNBwqzaysLBQWFsr3zxXlpODFYjHU1dXB5XJhzZo18Hg8MttKK2JtRFwuFzwejzjwxx13nDRs4Hu0AbIvXZPAz7ejmzq7S3SJ+817JzVLK3U7snmoSCQwpQzfLqr5j7RuvPFG/P73v8fzzz9vUYJaUU6HWnNpUMiuAO3/1s5PKpUSZgEVMJuUOBwOCVIJDi1atEjmgw0NDYkxomzE43ELW4HsA+oCUtT5TOkA8boZRLLhw0xOg2ma0u0XmALUyCagvOTk5Mh+lpSUYP/+/ZZM6jPPPDOXx/NPtbKysoSWODIyguuuuw5Aut4uNzdXdNqGDRuwadMmeDwe3H333Vi5ciW2bNmCYDCIgYEBGfUFpGfnvfjii5b6Tf2TDi9gzejTyaZxZo0zx8UMDQ0hGAyipKQEhmFY2DHNzc1IJtPjYphRY41eTk6OZJjC4TByc3PFISPFXHcGzs7ORjQaFUSfWTI6NyzN4NxT2gI6CS6XC5/85CcPygY5FD3H/aurq8Pu3btl7xgwdXZ2oqysTJxFfo+dQvlurdLSUhiGgY6ODmRnZ6O5uRkOh0Mor5wNmkgkZAQa6/qoj2gjRkZGAEDGuhCUiMfj8jsNMui6K2Zb6UTSKafzz+ySaZqSgWRgzNnrpIPzeefl5UnjKtZ9MnjmtZDhRefY6/VKbT/tLWWYTWqY3aQ8MWNCp5x+DfUwxzOlUimsWrUKp59+Ou677z4EAgHxM8geYMarpKQEVVVVAKbKhnSAuXbtWjz//POWM6uXHVDVJUYApBcInX5m7oCpWccakKIvo/0h/rzllluwZcsWZGdno6qqCl1dXfMGGzVLbr7UTb1KS0sFPLDvAWU9Go3ivPPOg2mauOCCC/Cxj30MsVhMfFgA+I//+A9kZmbizTffxP33349NmzYhkUhg3bp1OOuss4SNQRCB7DqWEbC2mrLOc8PntXv3btlnPXKI90yWFMFEBo+jo6MIBALSuLStrc0SSNFe0IdPpVLIzs5GT08PhoeHJRNK2Wb5G2WIIxYDgQB6enokQxqJROD1eqVWnvfF80Vfmv6/3++3NHzq6OiQUTUMSDVrIisrC4ODg7P6y/SDv/GNb+BnP/sZvvzlL+OXv/zlnGRtOt/lYCAIz+90gMlc9LRppnsc7Nq1S+5L03mPPfZYvPjiizj88MMtHYvnut4zAWoymcS5554rqfCXXnpJWkavWbMG/f390vWWqA4FPC8vT2g5Oh3OonxSYrRAOxwOGU5NpIdINoWEyDfRGhowZiV1oXZ/fz8ACK2FnXyZcWWTiGQyPfuorKwMExMT6O/vF+HQ9F0u3X0PmFLEAwMDiEQicn2kILD4m/Rd0mRisZjU8RqGgd///vc48cQTZSC5DhhZw0plxe7FwIGt2akIdPpep/rt72M2a3R0FB6PB4FAQBw0bZDp6NuDZ/vhfvjhh3HmmWfilFNOOWTnX3+H7lr2fl6f/exn8e1vfxs333wzHn30UTz77LNyDvSyKy2NcM7VuPK5dXV1YWhoCCtXrsTk5CQGBwdlthiDVI/Hg4GBAXi9Xvh8PhnmbR8RoLOp7MRNR4yGuLCwENFoFIFAAO3t7fD7/TLnrKioSALhzMxMyZDOptC1gs3JycHLL7+MtWvXyu90cM85xAsXLkRnZycWLFgwp736Z1z//u//Ls7Lrbfeis997nP45S9/CcMwsGjRIjQ1NeFzn/scNm3ahEsuuQRbtmxBdXW1NOthw6OGhgYZJl5fX4/u7m7R3cyiaKBCZ7qoc3SNJGe6xWIx9Pb2ory8XBxeBmTRaBR9fX1IJtNzSxlwMEgB0vMsSe+ivuZ3OhwOCRaY8Wf5BGWTfRU4f5A/Wcek6W5s5qSd7ZmyNfMNFO2vZ5C6c+dO+XyXy4VgMCiALO2vzuIcilP+dtbixYuRkZGBlpYWFBcX44EHHpDmTnpsFPecHffZ5Ir1ndxnp9OJ8fFxeda0g9x7Zms0nVBnG+kbMIPqcKQ7gXZ3dwvrKRqNwulMdxnOzc1FVlYWgsEgfD4fsrOz0dXVherqauzZs0corMlkEp2dneLo0idwu92oqKiQkRjMjsXjcXR3dyMajYr9ZZYfSAfhmq6sM/mGkaZ0RiIRqVd87bXXUFdXJ2UWHGHChl9Lly6VMXiUF83A0ssw0mN41q9fj2eeeUaCey4ti3YZ10w59t8oKipCe3u7+I98lnqUHX0a/W9+ZjgcxsMPP4yzzjoLZ5xxBn7yk5/M2VeYrpRAU57nuy666CIYhoHOzk7cfvvtImf8XNM0BRBzOp148MEHcffddyOZTOKuu+4S0Ix7tGLFClx99dXIyMjA008/jccffxxPPfUUvvSlL2Hx4sXo6OhAVVWV6K5QKISVK1eiv79fQA7TNEX+33jjDQBpXaQDRs0qINAGTGUtqafZT4bAMzOc2dnZqKiokEQTmQz0/Z1Op5xTNkLTyRyyT8ioDAQC6OvrQ01NDbKysqSMj2AV95TgTiqVQllZmXTuJ5WZ2VqHwyFxBs8ekLYjrIVlIyY9p1rLAHUJ4xE9e3smvTlfPa4DSMrw22EEuN1uHHHEEdi+fbvlPPD8HHnkkfi///s/HHnkkfP+7PdMgNrT04PCwkJBW1asWCF1OaOjo0K15awxjqJgmp/CzEwbmxHYO7axSRDrfPQDolOh6yopeKTBUAh7e3sli0vaAYfTejweuN1uFBYWIpVKN2EJhULSOTeZTGLv3r1i+HhNvPba2lqpXQ2Hw5Il5nBx1kQFg0EJmlm8rjOapN4EAgFxuuiUhUIh3Hzzzfja175mqWMtLy/HypUrD3BwuLQTvnr1ajQ0NAgCY89I6Nfb/49BdF5eHkZGRgQQYBaVwYbORtNw6MP02GOP4eijj0ZNTQ1qa2tx5513HkBVmG5poEO30aaie7+v0tJS/Pa3v8WRRx6Jk08+GdXV1VizZg3+3//7fwdQoLRC00CCfU3nZOgsU0lJiQRqdqeCQYOmu/f390tjAnbfoxGk48GZj0RFCcqEQiFhU3i9XukO7Ha7pVFNMBi0jBGxOyX2+0qlUqirqxOWB9HBI444AoZhSAbMNE3JrLLhG2ku77Zz/o+wGLyNjY1hdHQUDz74IAoKCtDT04PGxkb4/X786le/wjXXXIPvf//7cLlcMlaDjg+zl+yymkyma/2j0ajsPYMPDawBkN9zJBEdK87kKy8vR1lZmSUr1dfXh0gkgomJCZm7yHojygvpl+FwGPn5+UJZ5Fw/zqScmJjA2NgY/H6/NGRicJqZmWnpkksGC88JHTIg7Sj09fXB4/FIoA68M6UPB2MW7Nu3z5JZa25uRk1NjaV7vT0z9W4tUny7urrw3HPPCeVPA9LMSNMesMkPgzedwaZvQOCANpy1cdq5pDPOoNQOYrHhEr+bNEZmYNhrgiUJZDc5HA60tbUJTZP6i9/PrPvhhx8ujbT43T09Peju7hbQXWdKXS6XBMG0kdyP5uZmxGIxHH744fKs+T3xeByvvvoq1qxZI1mwr371q7jllluwePFiS/NDrWNnkyvTNKWDsd47yhL31f55Wt7IQqBfRjBelxLx+3SfA34P9zuVSuGJJ57AZz7zGSSTSZxxxhn4wx/+MGcZ1PL/dtdbb72FhoYGfOxjH8MVV1yBzMxMPPfcc3jiiSfEh+J+JJNJySA6nU589atfhWma+MQnPoHzzz9fOhxzz0466SSceOKJEsxlZWXB6/UiFApJ3WhhYSF6e3vFT0ilUujo6MD+/fuF1cGaeftz5yIIpPuNsOEcGYw6Y2sYhmRYHY50mQ5ZUrxPnS3l/ZAST7CCjBN+TmFhoaXsg8AL5z0TfKmoqJB4o7e3FwMDA2JLyJwA0rqGYCczzZ2dnTDNdKmf3+8XncLgnrojmUwKKzORSOBb3/oWfvGLX+C8887DL37xiwPkgGd+vrJFVpFOGL3d5XQ6UV1djebmZste8lkceeSR2LZt27w/9z0ToFJJu91uNDU1Yc2aNYhGo4jFYrjhhhukGy2VMTAVbHI+FxsnAZBmCAxoSaPhA6Ujy+BUz+HUSDs/l4csEokILZC0LNK1SOmqqqqyfB4DRXayY3cvHYR5PB6Ul5cjMzNTKGWmma4namlpke8jRQZIFyIzC6yd656eHnFQaGSYZeIKhUJYt26doKaVlZVYvny5pcMgMLMzYZomlixZgt27d1v2nd9pD1rt6X8izmwM0dvbKwE2HTTuH5WNvWkTD+bWrVtx7bXXwuFId2Jtbm6ek8y9U0ZjLgHxe23R2enq6kJZWRnuvfde9PT04Pjjj8cXv/hFXHnlleLAAVbQYabaBJ1dBSBgE7t36s6ARJR57jX9hwaWlCGO8XC5XEI/49mgk5VIJMRYEYVNpVLIyclBU1OTULtYB5qTk4PW1lZ4PB584hOfmDHLxPuigTdNU6iNpmni2GOPxbZt27B27VoJiHjeAoGAGEjqmHciWHi/LR3EbNmyBaZp4q233sLdd9+NgoIChMNhnHPOOfjBD36A888/Hz/96U/FOUkmk/jwhz+M5557DtXV1fKsAoGAjHGhziVd026gSQsns2NwcBDBYBDFxcUis9Sx+/bts8zcy8/Pl8CUjA8GuTwHdDyysrJQUlKCvr4+AU8YKPFa8vPzYRjpxmGpVMoyQkF/FnUrA1TWPjLLv2LFilmzO/NxSg6mI3Nzc7FixQoZLp9KpVBdXS2oOUGqd6J271DWY489hu3bt0uZkGYraZohHW7+4dg4PgOCBJQFPkMGSZxNyr1lsEibTRn//+x9eXyU5bX/952ZbJNMZpLJvockhCWIuLBoqSBy2aRVqa0rdWmxP9e6oPSWTQQBBW9rvXqxLWq9V6/aIlqq1gWxanGJ7BCWmIUsk3UmySQzWWbm/f0xfk+eGRJI0Frb2/P58EmYmbzzvs9znrN+zzncVzUQqGma2Cmapkkmk1kr6lM+A51OZtEpYylrjEYjRo0aJeU0nZ2dMuqFslWdMkAbhogDXsPr9aKqqkqcE7PZLNknoL+hYG9vL5qamkK6W0+dOhWvvfZayKge4NT8pOp4n8+HnJwcOTM04GnwhwfQubdqs6POzk4pqyI6gXpDDS4A/XqJ1yLRjnzppZewYMEC5OTkyL4Mpg/5rCfTLadDXq8X7777Ll577TWsWLECe/fuxfz583HeeechOjoaO3fuxIsvvhiCEOG65Ofnw2AwYMeOHXj11VcRERGBjRs3ytQMfpb7GAgE8Mknn0iNP88wYfHl5eVwOBwA+ueFxsXFhTSvUrPviYmJcmZ4hni+eOZopxPl6HK5RM6Fl+ER9pueni7+AQOVPDtut1vs/PCRkczuNjc3S+AwMzMTLS0tsNvtyM7OFhvE7XYLUlJFUaoNxmjXHjlyRIKMahaU/4jmZFCejqLKJ36/Hw888AD+/d//XdCY4cmiL2N3qtnaL0u6rsNut6OpqSmkw7J61s4666xhX/cb46AyM2IymWRY7Z49e/Dpp5/KHE86T7oebOGu1jOyRoPKUNP6ZxYC/YKUEQs6lzQ81QHi7EbGBaahrdYzMTrOw2SxWGCz2UJw56wPpbNLZUVnzucLzmUrKiqSaEwgEGxu0NDQIEzLDJGmaaJI1Igvmb2hoQHt7e2SIaJCZRSxp6cH1dXVyMzMlGdnsyMa+cDQO3nRYVZrSrnWVHD8XY3G8r4Jk2JdjprNpoPOyOZAUGIKtY6ODolIzZkzB0888cTf1GFUM4p83n+0rCv5KjMzUxy59PR0VFZW4oknnsCIESOwaNEiHDx4EJs3bz5pDYQK446ICM4PY7QzvE6byoqGUW9vr5xR7qfBYIDD4UBcXBwyMjLw/vvvS802eYJBJJWYoaSCBSBOIZ0B8gXPIOuoTkZqdpVUUlKCgwcPwu8PzqOjk6pGrmnUckYyYUb/olDSNA3l5eV47rnnZP7j7bffjvXr1wMIZgxeeOEFTJkyBU899RSio6MF+vvAAw9g2bJlWLlypcypjIqKQkZGBvbs2SNyIbx2X0VlEO7Y2toqmU6/3y8/WR5BSDj1TExMDNxut+gB8jqvyWejfKReoCzWdV06IBqNRqSnpwsSh0YvHRkaeoRPMnvEjF5vby+qqqoQGxsLk8mEq666Cs8+++wJxvtwoPlD+SzPRXR0tDipfG38+PF47733MG3aNDFu1UDm10XPPfdcyAgzVX739vaKI0Y4NZ+ZASciNWh3UE93dHTI+CJCcQkRpBOm6j0V4khepFNKRBgzS/wOOgI0/ll+1N7ejujoaNhsNnG0ODKHDizr3dRZ7+RVPrfVapW6W74fCATQ0NAAp9MpclTNYHLGKGUt7Q9+J0fa5eXliXE+VJ4byJErKSnB9u3bxYDnv3DnLxytxedRHSXV0GdjPdWJUrO06v4BQXvtrbfewhVXXIGenh5897vfxR/+8IeTOqhqkP7LkHqN3//+98jNzYXP58Pjjz+OpKQkdHV1YfLkydi1axcuvfRSjB8/HmazGU8//TT2798v+0reS0pKQlJSEnRdxx133IFAIIBJkyZh0aJFsFgsYq9qWnCu6ttvvy2d8gOBAEpLSwWqypFW5AO1uRIDM3RaGQwiTxoMBinBow3IsZCapqG6ulqg5A0NDTAa+5sd2mw22O12kSucC0x7lsEIoH92Kfm1q6sLjY2NSE9PFzSLGkhh01M2clSD30CwsR+DgbQtKioq0NXVBbPZLIktlZcACHKrtbVVkkZEIVBX0J7SNA2VlZWIiorCtddeiyeffPJL8xGJfP5l7GT1bHGdx4wZg88++yykLlm1/4dL3xgHlUqtt7cXzc3NuPbaa9Hc3Iy0tDRxaij8gX4ceyAQkMNBQyQmJkYgLXRovF6vQD1oFLCzFCPxRqPxBMy32rhCvb5aWE+HlsZwbW2tZADpcDJ7ykNPZUn8PDs+0qFVnWBGO1morhZwd3Z2oqamRp6H9+90OpGRkSHRTTYJ+Pjjj7FgwQJxoPmeSqdiWrW+pqSkBPv37w9ppqBSuONGJ5bEqKjNZhPhR6OSxpdaexJ+b7wXh8OBrKwsMYTCM8YqhSu44VK4Y/SPmEH1er1YvXo1dF3Hvn37sGzZMvz85z9HX18frrrqKmzcuBGbNm2Cy+XCzTffjLy8PPzsZz8LqQPlXtMRZSaM3Si590QKqIaE6kjqui5zyCorK5Gamoq4uDjExcWhrq4O3/72t7FlyxZYLBY5CwxmcbQLDTUVusnzx3vkmVY7RU6fPn1AvlWfbyDSdR1JSUloamoCAEyaNEnGJ7Fmht/JcTZqreC/qJ98Ph/y8vKwbNkyAMEs95o1a5CcnIz8/HzMnz8fK1euhK7r+OijjzB9+nS8++67uOOOO7B06VL85Cc/gcfjEZlLSC15xOFwYPTo0aI7GMAE+hvbJSQkICMjQ852IBCQmdBsOkNjnRF5k8kkEHQGD1taWiSYSYgmHd2KigqkpaWJgeh0OgXmy+9sb28XQ9xoNIoz3NHRISgBBncYcKEzoxpCgUAgJEALDL1zqMqjJ0MVhMt2lruo5RxTpkzBBx98gKlTpwpq6GQG/d+CmPGm3qas0jQt5EzSQFQN+JSUFHH8iY6g/CIMkn/HoAadXF7L4/GIgcb9ohzjdWjsMshmNBrhdrvFcaauZ7bTbrcL5Ds9PR379u2TOdC0DyorKyUAR+OfgZWkpCTppsygAbOsDDioHYJJDCqqWU4G/D0eD3bv3o3zzz8fuh4ssxo/fjz27t17QoBvMFKdTMpoJiN4DdUBpW2o7iHQb6sRttve3i4ODRsMMXDKz6nZQTUTzsAAELT3XnzxRSxYsAApKSkhz8R7VPUPUXxfNoCtnpmJEydK4ubSSy/FG2+8gerqalx22WVYu3YtSktLsXDhQrS0tOD666+XmspXX30Vu3btknXl/o0YMUKcoZtuugk+nw/z58/HtddeK/q8rq4OdrsdO3bsQHt7uzSMoyxiUIZ7wv3gGBzafezKzyAd7WKWxLGu02g0oqmpSc4FId3M1LGkTdM02U/6B+ooqMTERLEnIyIipIkpz1d6ejrKy8tRXFws8t3v9yMjIwMHDhwQn4Cdje12u9wvAFRUVKC+vh5ms1nsEZXXya8qAobJJTrnavAs3Lby+XxYunQpli9fjoSEBLGRVRpull4N8gyXVBtOlfXq++eeey4++ugjuS/VUR0ufWOspeTkZHR3d6O+vh719fVSO8p6Ef6fGHIagWpmLT8/H/n5+cIoACSqzbpLtUtdVFQUYmJiEBsbKxkOj8eDrq4uqQ1Rs4NqB7vY2FhYrVbExMQI9MDhcKCxsREulwsdHR0C+UlISEBKSgpGjx6NzMxMiRCRQT/66CPs378fNTU1YlRRCaWnp8s4GmZGDx48iLKyMlRWVqKpqQlms1miShQ+6rB0RmuMRiOOHj0qB5GDt9U220MhFRqbm5srSoqCmVEztW6RDqEaqdU0TeAdjNpxT9V9J9SUzicPqHroly9fjhdffBGBQACXX375CYpDJTWrcTqHRqXTdXL/3tTS0iK8/Nxzz+Hee+/FhRdeCF3XUVpaCrvdjgMHDuBb3/oWfv3rX+M73/kOzjnnHDz88MMoKCiQSHx8fLwoHUYSB4qaqYKK685GSTw7JpMJmZmZ8hkaNp2dnRg5cqQ0T3C5XKK4yHM0HjVNkwHaRqMxBHKbkZGBESNGCBqD52AgmBZRBCdzKFNSUgTqBkAGl1PZqEqASu1fDuqJdNddd4WcQ4vFgtWrV+Oee+5BW1sbHn30UTzxxBOIjo7GmjVrcNFFF2Hs2LF4/PHHsWbNGmzatAk5OTki++rq6sQ5tNlsKCoqEjimmkHlCJj09HQpCQGA/fv349ChQ3A4HNJEBghGvymD7Ha79CUgRNjlconSTktLg8VikQY3tbW1SElJQVRUlEA3TSaTNABrbm5GU1OTyMDk5GTYbDakpKTIvMPY2FhB2gD945ro9DEYq+s6brnlFixYsCCk9T/fG4jUjN9QHEi1jEP9d+aZZ8pneAbPPfdcvPfeezL/++9BRBNx/9W+E8x4U+cHAgGBWtfW1sp60Nmj7laDXb29vUhMTITdbhf+orHMz7Hej2goEhFElHUulwvNzc1S6uJyueD1emG1WhEXFydyt6CgAElJSQgEAsjJyYHf75c5tHw2OpNRUVFISEhAWloaMjIyBOro9Xpx/Phx7Nu3D3V1dSG9MFRkDAMwDBzyfv3+YNNHokP27t0rzq3L5cIFF1wgvDEUUm0Y6mjyOAP5tCOIAhpIB/Peyf/cJzpKavkV7Tw6bXxmEv+ewYS33npL7oVdcXl+1H4W4bbFVxWY2b59O+655x7ExMTglVdewdKlSxEXF4cVK1Zg0aJF8Hg8+N3vfofc3Fw8//zz+PTTT1FXV4drrrkGDz/8MG644QYAEP3H57XZbMjNzcXIkSPx4Ycf4plnnpGmcJ2dnaiursaxY8dQX18vTbnoUNIJ5lolJCSE9HhwOp0y9oioAPZRsdvtMJvN8Pl86OrqQkdHh9gF1MHs2mswGGSEV3l5OSoqKuTsMtCSlZWF9PR02O128RkaGxvR2Ngo+8KAo6YFx8SwBwEdR/oF1Oc2mw3JycmIjo6Gw+HAhx9+iM8++0ymB6gJGjXAyIyxmv03GAziyKrJJtoh4bZpXV0dIiMj8YMf/CDEuVTtq6HasTw7p+Ogqp/ndQb77rPOOivkPKm/D4e+MdaSGiUkRIBGqAo5VSNs9OIZleA8Jm4uhRSjLyT+TXJycoiXr2LsqRx4HX4vr8kMH6E+fJ+1R/Hx8XLoGJ0nPp/ZUV6Hgo+Hg9Cd+Ph4xMbGyvN4PB5UVFTI2lARqWQwGKRLG9eLsGI+C5+LQ7VV2OzJaCCmZtRYZVj1XtRIq3odvs81ByDCkO9TuQP9Q8bDiYxvNBpRWloKk8kUAucIv/+B/v7/ImVnZwPorzcpKSnB22+/jczMTLz11ltwOp0477zzsG3bNsTExOCpp56SLsk33ngj7r//fqSkpEhHap4F1RihE6tGuQOBgDiZHLxdW1uLrKwsmeOnaRqOHz8ug7oBiJLiXtNZYK0JURDkGaIx1LqOmpoagUYCoQo6nHfVqPnJKCMjQ+6ZCofNv9Rr857+L3SIHi7Fxsbi5ptvxuOPPx7S7bGvrw/XX389fvrTn6KxsREPP/ww/vu//xsGgwFXXnklHnzwQTz88MN48MEH4XK5QqLOHANBOUf+ozPCGmFCf2kc7d+/X2BqERERSE9Ph67r0oAvOjoaaWlp8rvX65XylN7eXiQkJEgWnbWvLS0toiOio6NDmoO4XC6ZHarrOpKTk5GRkQEgiC5pa2uTv42JiYHNZgPQr8Oouxh0peHidrulrMjsMwAAIABJREFUxglASJ3dQKQGj4YSRBlMbppMJowYMSIkeAgA48ePR319vei3r5NoJ/D3QCAgckKdx0geiYuLQ21tLY4cOQKv1ytZDgAhNgkzxtRhPT09aGlpkcwngwmUg5SFauCWtgWDDXyPARRCHqnTs7KykJ2djdjYWGmoVVdXJ/yu67rIHpYGWSwWZGRkIDk5WewGh8OBXbt24ejRo5LBURuIqbYTAyN5eXmYNWsWZsyYITNMgWCXav4tZ/H6fD60tbUhKyvrhIykSqq+Vw1erg3tioSEhBM+D/Q7muGBGPV6PBdqUoPXJxG+zz3gftDh4rUYgGIw/LLLLhvUDlMb4Kj3M1RS70/9PTY2FitWrMC0adMQFRWFJUuW4LLLLkNvby9efPFFrFmzBj09PXj00UexYMEC/Od//iccDgdKS0vhcrlQUFCARx55BP/2b/8GAOJEqnZaUlISxowZI/zJXi+qg09dTlkJBOWC1WoV+dDe3i4jiJjVpixLSkqSa/l8PjQ2NgIIym82jQsEAsjOzhbZSR+huroamhYs56OjScQN99dgMEjZG7PZhNNmZmZKcJnlGTzjaumcakv39fVh//79aGtrQ0xMTAh/0t5QeU4l2t5Go1HmyJInyGsDyVTey7Zt20Lq34frmA6UNT0dh3EgHyA8mMOgrVrywqDAcOkb46CmpqaipqYG7733nkBZ1FlyFPSqMGNnT3ZoO3z4MPbs2SMOLp07RkTUTAmZit1yGTkFIBlZNSJNgUbh3dvbi66uLinaZsaUkC1NC9afjRgxQtq7d3d3o6mpSZ6J0VRusMlkQkpKCnJycmCz2UQB1tTUYPfu3aitrZVoDdAveFWoTWRkJFJSUjB9+vSQ9WtqapLuxQcOHBBnYty4cSgoKBClEE7hURr1c3x99OjR8jodCK5fuCJQr0ljpaOjAwBCBBYPrup0cC/U7yb5fD44nU7U1tbC7/dj/PjxIdAf9bvVn8Oh8MOpXuMfLYt67733AuiHpaelpcFoNMLhcOBHP/oRNE3D3Llzoeu6jE1i5NxsNuPFF1+ULAOFtLqv5E0a/4SLsW6lqqpKzkh6ejp6enpQV1eHyspKlJaWStY8IiJCukwT3sOuuGwxD0A6ewMQxwCAQL3ZeKyhoQENDQ2IjIyU+uuBeOFUgl997jPOOCOErzi3jUYb+UbXdYHV/Yv6yWQySWfIH/7wh7jxxhvxxhtvAOh3nNasWYO7774bSUlJWLZsGdauXYuWlhYsXrwYuq7jyJEjstaHDx/GZ599BiAYMIyLi4PRaJR2/6zVY1bE4XDgwIEDKCsrQ1dXlxg4KnSXxjib9bEzO52T5ORkWCwWKZlwu90yVkzTtBA4JQ09tRlefHw8MjIyEB0djaamJrS2toY09xgzZgwKCwuRnJwc0swGwAmBGQY8n3zySdxwww0hUPuBKFy+n8qQGOzMAEE5SGSRem2eyb9Hvb6u69I9n03ZiLKgjifCiGN76Hiy9gzobyqiZhjZ2dNgCM5VdblcYgwDEJuBskxtQqQ6y1wnr9crXafVkUWpqanS0dbvD44kYkkQnWieFZYhqd1LKXuPHTuGPXv2yPxHPj9tCnW/2IW1sLAQ8+fPx8iRI2Ud1OwYO6lTxnOcHR37tLQ0+buB9ob2i5pFAvqD9wAwbtw40RfhDWXUgOhgjrDBYJA9t1qtIZ9Vs1Zcg/CEharffT4f3n77bRllpgYj1e+nzjqtzFFYsEi9xqWXXor4+Hi89tpr+NnPfoa4uDj87//+rzipq1atwiWXXIK+vj4sX74ca9euxdatW/HSSy8hEAiguroaW7duxfTp07Fu3TqsW7cOo0aNAhDKByyLIC+yzIBwV9Xp48QNZgZdLpfUhKqBl8TERKSkpEigjTZ0W1ublN4RAZmTk4OCggLExMTIXGUmWBjMjoyMRG5uLgoKCmCxWBAIBOuna2pq4HA4QmwQm82GvLw8pKSkyPqy7KC5uVmQCYWFhVizZo2MpWtsbMSRI0dQUVEh51ntem2z2WT8JM8g7VfVNuJ+ElmZlJQk60h7R0WwMHAVCASwZcsW+P1+XHfddcPOfPJaXxa9dSrdEH5O2BRJPePDpW9MDeodd9whgpUNJCgk+GAU8hRQZBKTyYTa2toQgcAZqPHx8WJ0cMOZIa2srAzJfACQtL4aTVEFF19XFQpbs/MeOasJ6IeFtbW1CVSMEUur1Sr1qVR2rJNxuVyoq6tDb2+vQKMGy0IyspSSkoKJEyeKoGc3SV3X0draivT0dLjdbnz88ccYN26c1ONMnToVVVVVAwpSVcAOFClh5IqBAq6HmlXifZLUpgXhkSA1Uqd281Uj/KqCUiO9BkOwo+9DDz2E6dOnY//+/YNGtMLX8VSkPstA63Q6h+/vSawT/cMf/oAzzzwT7733Hh5++GHcd9990pBi2bJl0nTlsccew3e+8x2ZT+ZwOGS+YHimnDxDI5/OQW9vr3QN5hxgOhd+v18azphMJoGosyaqsLAQe/bskc6YKSkpAILwdbanp4FJudHZ2SnoAVX486wNtmdDEeThZ3HcuHHYu3cvgGBWtbS0FOPHj5cGK8O59v814p4EAsE5c5qm4f3338ebb76J5uZm3HDDDbjwwgthMBgwZ84czJkzB5qmoaysDI888ggiIiKwdOlSuN1uREdHIzU1Fdu3b0dubq50GTeZgvOyCRfUNE0QF62trRK8YwMZBjWtVqsY+R0dHYJEIc9nZGRIp2YGAcnXRNKovFJZWYmsrCzouo4RI0agpqZGApHUYTQSR44cKeO4CGWjzE5KSpIMKbtZq/VLJpMJpaWlMBgMWLhwIZ555pmTykBVzg/GozRIhyLrxowZg3379oWUfWRkZEhfia+bVD1OHUK9z31XA9LqzFPWpGVlZeHo0aOSHSNMkEY6GykZjcaQ2rukpKSQYAODEgyEE/JosVgESUY4MB1NznamM0oHkPKQ/B0dHS0dSQOBgGT3d+/eLf0c1GAi94Y/2ejJarVi/PjxiI+Pl2dV+YI9MdxuN+Lj4yVr4vV6UVpaiksuuQRA0AGZNm0aXn755RD9Gf77QFkh9fsSEhLkO9SgAfle5X/uN3/SQeG6Wq1WQVyo9adEWqiBf9VWof3B6z7//PO48sorMX/+fLzyyisho3y+LKk2Z/h53LZtG7797W9j+/btWLt2LeLj4xETE4Nt27ZB0zQsXboUa9euhdFoxJIlS7B27Vr4fD7ceeed2LBhAzweDy644ALceuutGDlyJC644AKMHTsWN910kyA/XnjhBWRkZODo0aOIj4+H3+9Ha2urNCzlLNC4uDipsWbHdBL3g6Pd1Nmh6nMy0BEfHw+73Q4g6KQSFgxAulXTUeYZyMvLEwgvZ4sy49vX1yewXK6jWj5oNpvR1dWFwsJCOBwOjBw5Erqu4+abb4bf78e9996L1atXo7q6OiTrzsxuUVERRo0aJdd75513BEEA9KNC1dISEu16yhx2KVabJfE98vbKlSuxevXqAWs/B6Ivm73kNVS+H+x7B9MJU6ZMwYcffhhyjeHQKZ9S07RsTdPe1TTtkKZpBzVNu+OL1xM1TXtL07RjX/xM+OJ1TdO0RzVNK9c0bZ+maUPqLUzG7u3tlYwaGVfF9avMxqgF6zvUGWBpaWkYN26cCFI1ZU/svMvlkppUOjvEw8fExIRAkdSGFGzsExERIffDgxYdHY3Ozk60trbC4XCgqakJtbW1Mpexp6dHalUCgYBAV7xeL5xOJ/bt24cjR46grq4OQD+kSHXIKJDS0tIwc+ZMzJs3D3PmzMHZZ58dEglMSEgIyRSTuVpaWiTS2tHRgbFjx4ryDDe8qUCo0AZSJD6fDyUlJQAgCg4IVXzq3zILrXZaZU0kGzsx+9nb2yuGmLoH/F2FRNHJoWC46qqrhsJ6Qyb1uf7RHQ0+w/e//31p9rBkyRKsX78edXV1SE5OhtlsRnFxsdQ+k7+AYM14a2urwKJokNMpJcIBgHT5MxqN0vRM13UcO3YMhw4dgtPpDIEoapomjQmYcaFhQSOMnS6tVitSUlKEFxmJpeFH+BFhWsxqzp49e0ChOhBUbKB1CyddD3Yt5ZmZMGECjh49eoJBryrnf1GQHn30UTz00EMnzKWNiYlBbm4u3n33XVx77bVYuHBhSIZlzJgxuP/++/HAAw9g8eLFaG1thdFoxIgRI6RpxTnnnCNGORCcAblr1y6BuzEowmAZI+3JyckiPwkH1jRNRlZkZ2cjLS1NOqyyWVZXVxcSExMlowpAMl7sGqnqDI/Hg46ODql/stvtKC4uRmZmJhwOB44fPy6dcYmOiY+Pl67z5FdmTdVgLgBcf/31g9b/UJ8MNbo+EJTxZDR+/Hj5Hn5/UlIS3n///SFf46sgooiAfhlOp0RFZ1E/M1NKuPL06dNRUFCAiIgI5ObmIhAISD256kCoZSVerxcWiwWpqakhmR/uEQOwzLgTFqnOtWVAnAgQjjbyeDxob2+XYInVakVubi4SExNhsVgkQ8TMXk9PD2bNmgUAIaVEvI/o6GipLS0uLsa8efMwZcoUqdUfjOhMWCwWaXLX29srfS50Pdj87swzzwzpah1uYwyFp2jT0W5hIEDlXdXxDs/m0PHkDEqWJakZbmZMmbVSM1lqBozO7ttvv43u7m4kJCTg4osv/tLlQuG2VfgzkHw+H95//33ExMSgqKgInZ2dKC8vx8qVK2E0GrFu3Trcc889AIB169Zh8eLFMJlMeOSRR7B8+XJER0fjgw8+wL//+7+jpqYGr7/+ujRsa2hoQE9PDy6++GL4/X4cOnQopHSAtZrd3d2CbCJSkdlQNqFTbXeO4eIZUEuCyP82mw19fX1obm5Gc3MzOjs7xWEm0or2L5t+VlVVoba2Fh6PR5zIlJQUTJgwQbKqmhYsGaqurhZ/IDY2FhkZGcjMzISmaUhKSpIAC3X2zJkzZYRXQkICkpOTkZOTg/nz52Pu3LkoKioKcRYvuuiikN4V5FWuncqfHGvGJk7kK/W8hSfoWltbYTAYMH/+/CHJaxVhMxx7VQ1eDcaD4RSevFLP9Pnnn39Ku2rQexnCZ3wA7tZ1fQyAyQBu0TRtDIAlAN7Rdb0IwDtf/B8A5gAo+uLfIgBPDOVGvvWtb+GMM86QxgLcGL/fL3UTVCg08miA0vk8mUJhN0QKZRVuxOgllQkbKlABDKRMvF5viDIhZIzQQyq7cEWSlZV1giJRuwnPmjVLrqk6pWw/P1wlAgQZRlUiAPD5559Lpravr0/gO1yjcOfyZGQwGJCTkwMgtLvXUJQHDVE1SkmhQsVBpaHWXfG7+D0UhoFAAM8//7wojpNlyoZCAzns/wzk8Xjw0Ucf4Re/+AWam5uxevVqrFmzBosXLxYobFRUFI4cOYJly5bB7/fD4XDAbDbj448/hs/nQ2trqygZ8gwz9t3d3Whra0NTU1MINK2pqQlHjx7Frl27RCBTsRDWHh8fL/WnHK9AA72jowMej0cgOampqYiNjUVCQoI0RmNzLUJn2FWVqImoqCgsXLgwZD1owJxsf08m5MknhDgZjUaMHj0ahw8flqYh/6KB6dJLL8Xx48dx3XXXYcWKFdiwYYPUhwLBc56bm4vs7GzceuutuPrqq/Hb3/5W5GNvby82bNgg2VcGGGNiYnDs2DEx6N977z3s2rULLpdLAhcqhC8+Ph5JSUkSfW9paUFNTY1kF1U4ZG1trTTFI19kZmYKXJ1BNafTKWNIGECNiYlBcnIyAKCwsBB+vx82mw0ZGRmIjY1FeXk5jh49KqUqHH1jt9vh8/lQVlaG2tpaeL1etLe3IzExUeobCU9VYb07d+7Ej370oxPWfTjQK9XIGg6xWQa/DwAmT548rGt8WaLsULPEhESrdobBYBB94fMF52+mpqYKZLu0tBQHDhwQ+8RsNsv1aYDxWZmZ6+vrQ319Pdxut6wBIYpEp/T19aGjo0MmBqj6V62RJoSYXX+tVqtAvvmZY8eO4ciRI+jo6BDHOyIiAk6nU2oAiTRhsC8zMxMzZszA7NmzkZeXF1KvOxjRseC6sQ4VCNpCzNy2t7eLE8xnUoPVw9HNo0aNgs/nC7EBVdRc+PVpG6hZUe4PG+gA/XBa1qGqAXC1WSP3jvBNBtk56zsc5TYcUh0C9YwNdL3Y2Fjk5eXB5/Phs88+w3XXXYeEhATcf//9WLBgAbq7u7FhwwZ8+9vfBgCsX78ekyZNwrRp0/DZZ59hw4YNePjhh9He3o7bb78dzc3NeOONN3DkyBHJ6peXl0vZzYoVK2Q9uJcM+NCuVveedjp5j/JVTYJwX1iPzxGI1dXV8Hq9cl4sFgvy8vIwevRopKSkSIkbUXptbW0COx4zZgwmTJiApKQkGAwGtLa2ory8HDU1NdJINDk5GVlZWcjKyhL7gIEK8sif//xnqdO++uqrkZSUhOnTp2Pu3LmYPHnyoKPiyB9qoH0g55RBEbfbLc/DgKOaBQ5HBvT29mLJkiUhCM2h8NNQSyp4r5SJw5H1qm0cjrrUNA2TJk06Ldv5lBBfXdcdABxf/O7WNK0MQCaA7wKY9sXHngGwA8B9X7z+Oz14Nx9pmmbTNC39i+sMSjNmzIDRaMRf/vIXWRg6O4xuqtFzMjzbsvv9fuTn54tCaWpqQllZWUj3NkZI6BTSYGCRNo0SlQHJMBwVwE6iatcuMh+FZHNzs1yX0Bt2x+N3VVRUoKOjAwkJCdJsw2g0wul0yv2ymy8PUElJiYxCGKoSoQIxGoNtu7Ozs+H1erFz507k5uaivb0dcXFxmDp1KiorKweE3Q6FjEajwLPVYeGqIlGvzYNABWw0Btt+sxU86xe4z9wTrnVvb68IKWbIWIj//vvv4/LLL0dnZydmz56NV1555bQUh3qv4a/9o5PJZMLEiRMxceJE6LqOtWvXwmw248EHH4Su63jsscfgcrnQ3d2NpUuXYv369Th69Cg0TRNoPACBjhsMBjQ2NiIxMRHV1dWIjo4OmV3a0NAgsHUO63a73QLDV2uraNylp6dL5JXQ3bS0NBkZFR0djYaGBnkem82GqKgoOJ1OuFwuUZJqBkmtF1JhayerjTuVIaUaRiNHjsRnn30mnx83bpzw6+nCXP7ZKSMjA88//zyam5sxceJElJSU4MYbb5QRLqtWrRJZwo62FRUV+N73vge/34/NmzcLfJfBLoPBgJaWFhw/fhyHDx+G2+0WqDcNIu6H1WqVMovm5mZ5n/WFGRkZ8Pl8qK2thc/nQ2VlJYzG4AgY1lMx+KLrugQvA4EAMjMzxWgh/Eut2QMgThHHhcXGxoocZNbi8OHDIluZmSXR+Gfmh4Y41+GZZ57BeeedJzLydGg4fKtCgdloqrGxMSRw+XUSobjceyC0vlHXdXR2doY0PYqKikJmZiZaWlpkZALvnc9mMBhCRtqxPIXX93g8IaNpAIhTqzZFCZ8NSzuBM8zpHLCZC3mc1Nvbi4qKCvkMUUx+v19ka2lpqWSOgCAya+zYsdJQkPd8MgrXh9TxTBRQJgcCAZSVlWHs2LHw+4PNo8aMGYODBw+e5g4GiTWGqmMJ9DvxJDXoEq67+azh9ac8P8ysc89Yvw70lyKQD3Rdx5YtWzBnzhzouo6LL74YL7/88in5W3U6VBspXMeEw6pJPT09OHjwIG6++WY8++yzeOqpp7Bq1SosX74cW7duxYQJE1BYWIhzzz0Xs2bNQnNzM/70pz9h//79yMzMxGuvvSY2Vm9vL+bPn49t27bhgw8+QElJCfx+Pw4cOIDS0lLs2rVLHHDeI/mcdrTFYkFnZ6dAvgGII28wGASeTj3KRqEsw+EeMHBCvmVzME0LdtPu7u6W88CAH4M1RPDV1NSgqalJeDM+Ph4GgwHJyckSUOjp6ZGSNiKqEhMTpZnS7t278d3vfhcejwd33XWXBHu4/yfb05ycHEHTAP3lbORLBlSIlgAQYv9zjjdt23BHlH0NrrjiCrz00ksDfuZ0HctT/X34Z8OdUq4Bf3KteFamTJky5PsiDUtbaZqWB2ACgI8BpCpOZwOA1C9+zwRQo/xZ7RevhTiomqYtQjDDCiAoMJ1Opwh7FcrJB1bhOaxFUDNvJ1ModBbpRFFR8bqqMuE1WRN6KmWiaZo0QaJSpkOqRp17e3vhcDjkwJrNZng8HlgslhBFwoi4pmnIzMzEqFGjRMEOV4moNRh8brfbjePHjyM6OhoejwdOpxOFhYUnve6pyO/3o7i4GPv27RNhTyf7ZMqDTE5FxntWnX/CnHi4CfdUFQfXmgeFiiMtLQ3R0dEhdSsD0VCUxlCzDf8ItHjxYrS3t+N3v/sd+vr6sHTpUmiahtdffx2HDx/G1KlTpZ5548aNuO+++7BhwwboerDTqMPhQExMTMiIDiqe9PR0OVd79uwJgbyoxmB6ejo0TZPGSW63GwaDQYw4ZgsOHToEj8eDnJwc1NbWhmQjNE1Dbm6uoBWIVNA0DW1tbYJyoLxgvdQ111yDZ555Bs8995zw1UBO6lAM6nCemDBhAvbs2SN85PF44PF4YLVaQ+ZS/ouCdPPNN2Pz5s2499578ctf/hLHjh2D0+mE1+vF7bffjoULF2LMmDHQNA3Lli0T3ZCfnw8gCGNjbefPfvYzVFVVoaWlBS+++CLa2tpE2dO4BILlI3a7Hb29vSGdR1lTmJiYCLPZjMjISBkQ7/cHx8u0tLRA13WkpaXB7/dLR0lmX9ht2mAwoLOzU2q06CC1traKE0HHxuFwwG63Izk5WYymvXv3yugiGv9qsIQyqqOjQ6CqNNqYMQSCxuKaNWuwatUqbN68eUCo7mDGMM/DqdAjqnGiZrMASPb7k08++bvIUAYuaJBSvzB4HRMTI4FpBp6joqLw4YcfimxQuyHztbS0NNTV1UkJhGpg045hsIMNAFXHnXxMXUkdzzo9QgTZKI7rzOt0dHRIUyZ1xjr/dXd3C6KDQcTs7GwUFxdLMHe4JQeqfAwEArBYLGhvbxe5ajQa4fV68emnn0rzuPb2dlxwwQVf2kHt6elBfn4+6urq5IyEO6uqUUw+Ux0gIIgeiouLQ1JSknSWNpn6R9sxcZGZmSld5nNzc0NqU4EgL+zYsQOXXXYZ+vr6kJKSIjbVyUi1h3hfA52twRyF888/H/v27cPhw4exbNkyxMXFoaWlBfPmzYPb7cYHH3yAjz76CEajEVu3boXb7cYdd9yBTZs2oaysDLfddhueeuopdHV1Yf78+XjzzTdhs9kwdepUyRrX19dj3759MjNYzexx3jfRHV6vV+o54+Li0NfXJyNiLBaLIATVshsGbFl3yeZXzM4aDMERjJS9rO+2WCySzbZarXC73XC73di/f7/YHGxEFhsbi5SUFJF3ra2taGpqEtuaPkBLSwsSExNRV1eHESNGwGg0oqysTH4/Vd29GhCcOHEiXnvtNemBozZK5N7zdwZ30tLSUF1dLQ4q/Y1wf4cwYIfDgZycHLGJeQ/8/Mkc6XDi3wwXzTDQZ8OzteGfOR25P2RXW9O0OAB/APBTXdc71Pe+yJYOKyyq6/qTuq6fo+v6OQCwfPly/PKXvxSnU603VPHZfMgxY8YA6FfSJpMJH374IcrLy0WhqKNVoqOjpS6JAkGFH3R1dYUYyczMqvBhGjiMYrJ5AbHyUVFRiIqKCil+1rRgR7Py8nJUVlaKEaRmXnlYOSLFbrdjypQpmDNnDkpKSk5w8k5FqoOnRtvZFZCRe9bqscvflyGDwYDU1FQ5HCrsOBzawGdVo/lcX7UemJ8jPxA6Fxsbi76+PlRXV8vzqrCCQCCAHTt2CDScDRtORgMpja/igH1TiTUYixYtwi233CJnYdasWbj99tuxc+dObNy4Ef/zP/+DxYsX46GHHpJ95IgawsRoENPAAoCDBw9i3759wmMAJNjk8XiQkpICl8slIzj8fj+SkpIE5suGMJoWbJ5gsVikeRaNv5SUFGRnZ0sDBwYhRo0ahczMTOkyHBkZKXUvQH9tF5EFwIkZ1JMJWpUGOjcGgwHFxcUh0GS2w2dN+b+on37zm9+gr68P69evl6zR+vXrZV50fX09tm3bhoiICKxbtw6XX345gH45FwgEUFhYiFmzZqGjowNlZWUhMp6ZEbXZC2eYMqhI56S3t1eGsTOjajQaZd4dv5flHA0NDWLQ6rqO7OxscXBYW61pmtQy0WlQ5RtncBMtU1dXh3379klmlAgitXZQRdkEAsG5nREREVLjqCKNNE1DdXX1oJka4ERjmN+rRv9PRicziqjreJ2vO4OqwjLVsTKJiYkIBAJwOp1ITU2VZ46KihK4Le0QtctvUVERJk2aBJvNJiN16ADTOff7/ejs7ERXV5c4SByFR3gk0N/40WAwSFdyZmbZXEZtOtjX14eamhpUVVVJ4xjVnqA9o+u6zOBNTEzElClTMHv2bIwaNQqapg0JgUXidYH+UibeD2uzzWaz1HMT1ky539bWhvT09EGhkUMlg8GAgoICCUirWfDBdPVADZ7YuJLnkPZGIBAQOZCRkYHGxkaZX0zHjTKdAQAA2LJli0DsL7nkkmEb+2oWaih0wQUX4PrrrwcAPPDAAzh8+DDWr1+PN954Q4JkVqsV27ZtQ1paGuLi4vDrX/8aa9askd+Li4thNptRWloqcomjDX0+n4y2In/y2Xmf3Mu4uDgUFhaGnHHqaNq2OTk5wsNEIJIHo6OjRVbTQQsEAhJ8MZlMMvecAfHi4mKMGDFCgmK9vb0SFDAYDNIRPSMjQ5zbPXv2oLW19YTJEkSoAJCkR3t7Oz755BNoWv9M9aHsJ+UuA05Av0Oq+jJAv+5qa2sLeY9rxOvQZlLl8GOPPYZAIICSkpIQNMNw+S783k9F6lmjjFG/lwEv9T5Unh5OZpc0pAyqpmkRCDqn/6Pr+pYvXm7UvoDuapqWDoB57ToA2cqfZ33x2klp2bIr2yuBAAAgAElEQVRl+Otf/4oDBw6ERKO5EN3d3UhMTERtbS3MZjP27t0bEhVVow2MUAYCAURHRyM7O1tGAFRVVQGA1EjQKOns7BTm5d+R4ehs8bBqmob29nZhdEY/lfWSjCqhhgPVJzDCyiYKZrMZZ5xxhtRgDscpVTM9FNxGY7AImxkC1UHv7e3Fvn37cPbZZ4vTQCja6RC/22azieJUGTgcGgD075WasfR4PDCbzUhJSUF9fb0cZApE1rpWVFQgMzNTFIcKe2ITqy1btuA73/kO7Ha7CFA1MjWU5/lnypoORDQw7rnnHtTV1eH5559HIBDAHXfcASDohC5ZsgR2ux0//elP4fP5YLVaER8fjyNHjgi0m0Ghffv2SQ0JR3q0t7cjJiYGqalBkEVzczPq6upEYOXn50umiR23mXVQBR35msY8M1k8h/n5+YiMjERZWRn8fj/MZjOSkpLQ19cHp9MpsoTX+PGPf4ynn356wA6np+KRU50T1obTqcjIyMAnn3yCc84557T26Z+ZrFYrurq6cM4556C0tBTnnHMOli5diu7ubvzmN7+RANX27dsRERGB2267DUuWLIHZbIbT6cSGDRsAANOnT0dZWRkSExNRWVkpY7/o9EZGRiInJ0cg4CzzCAQCwmsmk0mgXk1NTWLEjh49GiaTCXV1ddD1YPOXo0ePIi4uDqNHjxa5UlFRIXAwADJzjwgRylyOWbjyyiuxadMmtLe3o7q6Wgwmzp40m80oKCjA7t27RaarxNfYsMRkMsHj8UhGgkFQg8GA119/HTNmzMDbb7895L0ZqtE8WBZIlf2TJ0/Gzp07v3Z5St1Bg5h6h1lsOvbs5BteA6brwXrLrKws0f+ssW9sbJTOzYT7MtjF6/A7yBdq8JZlQzRo1eZw6tp1dHTA7XaLjaLWdKpEB1jXgzXVkyZNCkGSDYcGCtKF7zEzuzSo1XEzbW1t0hWbyBqn03nKnhknIyKleL7YlIb3pqIg+D7vj+tFGK/BYIDdbkdra6v8PyUlBUlJSaiurpZgEgMOanMz1UjfsWMHDAYD5s2bB6vVKsg4dc3CbSAVBjmYkz0YaZqGTZs2YebMmdA0DU8++STWrFmDlStXCnqwuLgYR44cwZEjR3DTTTfhhRdewP3334/bb78dv/rVr1BaWorFixdj06ZNcDqdWLhwIeLj4/HBBx/g9ddfl67T7PzMXi+6rkt5hM1mw+effy42NMdxsYES95lNldSgMO1bBhsY2GD2lWcnPj4eVqtV5o339fWFdO21Wq3i5MXGxiIzM1P2mnOMOSJPTQoRij5lyhQkJibiwIEDkmDSdR2VlZXSd2PatGl4//33RSYA/Si78Aw+EGxOymdQE1XAwA2LDAYD0tLS0NTUJOVsqi3ODCqvQd9j5syZwqcqdD2c1wYj1e5WeWsgWx0YOJBC3ladaHU91Mzx6TioQ+niqwH4LYAyXdcfUd56FcAPv/j9hwBeUV5fqAVpMoB2/RT1p0BQiRw7dkwcGioVHoq+vj5RKFTEzAaS0dVoRXR0NEaOHIkRI0YI7ry6uloizDExMSFwL9V4ZTQtEAhIJocCmPVFnBEWFxcX0oCBEc6WlhZ0dXWJ88mN4s/o6GjYbDbYbDaMHDkSM2fOxNSpU08YS3AqYi2FevjUCIbZbJYZfFSOjIiVlpZKhNHlciE9Pf20a5SAILMyc8TsgMq4NJhUJ0GNZKmKlIoB6J8PW1hYCK/Xi5qamhDFQeXPg0MIyI4dO8TZIVRO3YPwdQ6HCQ30mX8WOu+886QpEvchJycH9913H2655RZcddVVkiVatWoVbrvtNuzevRsGgwHd3d3S9ZEG3fbt2/HXv/4V7e3t6OrqCjnDqampSElJQVtbG7xeL6xWKzo6OpCamoq8vDw4HA60tbWhq6sLNptNuvUGAsHxTGzuxaYeI0eORGtrK3p7e5Gfny+QtaqqKjgcDthsNqSlpcFms6G+vh4dHR3S6ZeoCjo9JpMJBQUFA67RYHs/VEE7efLkEL6cMGECdu7ceRq79c9NbrcbV155JXbv3o1ly5ahtLQ0xGhnZLagoAAmkwlpaWlYu3at1Ept3LgR999/PxITE7F79258+umnqKqqCjGYLRYLjMbgnN+Ojg6BkJnNZoGptbe3o7m5Wb7TarWisLAQVqtVZvTy/YSEBEFodHd3o7a2FhUVFRJwKywsRHZ2toxWqaurQ09Pj9RC0UAqKirCo48+KvB4jixLTEzEZZddhtmzZ6OoqEggzmrGh/8IJSY8mA4BeY/8/uqrryIvL++k/Esnfaikyu2BrhVO559//pCv/VWRGsBWIXfhz0k4vuqY6rouXTwBCHJn//79UuvGrKfRaJRMK6G5/K7wWeuBQABdXV3S9CcmJiakRpR/09zcjJaWFmnQqOp6fg4IZrLYePFb3/oWZs+eLc2ohpuxVo3Kgd5T+Uc10NXkgN/vx2effSb2lMfjkb0fjk5V7Rk6NYRe8vypn2WiYLD7BILr0dbWBk0LonOItMnLy4PNZkNlZaUErng9tQGZatRTn3Auqtfrlfnaqh3G33leeE+ngybQNA1333033nnnHfz85z9HREQEVq5ciUAggDPOOAOapuHjjz/GT37yE9hsNjz99NN44IEH0NPTg0ceeQSrVq1CdHQ0fvWrX+Hqq6+G1WrFiy++iFtvvRWvvfYa/P7+5pkMrFGnM7DDkjw1wMIgmYo4BIDjx49LWQ8dRsJ6qZPpkNI+58xgziYFgoHtpqYm1NfXhyCs6Bd4PB58/vnnOHToEPbt2yelPdwD1nBPnjwZF198MWbPng2bzSbZdL/fj+PHj0vAnU72rFmzTuDZcMdORYZMmTLlBNQlZTHlBNdJ0zS43W4JhDKgw4AA/RmuETOVS5Ysgd/vx0033QQgdP7uUHmKznL42VDPm+oXnYonB3NsgdMfrzeUvzofwLUALtQ0bc8X/+YCWAdgpqZpxwBc9MX/AeA1ABUAygH8GsDNQ7mRjRs3oqamRpxBRjoZ2WbaPBAIhDiqLLLm4hB7npCQgJ6eHnz++ecoKytDWVkZOjs70dfXJ1FIRkzU2hFNC6b1VZx8Z2cnOjo6oGlByCyhN7yP1tZWNDQ0oKWlBZ2dnYI9V0cAAMEi/7i4OIwaNQqzZs3CzJkzMWPGDOTn5w9bWKnKNVyRhCuw9PR0WS8KiEAgIMYWEGx2s2DBAskqD4VU55NwM5vNJkKdEBgeTL6uHib+n9czGAxwOp0IBIIzESMiIpCSkoLu7m58/vnnqK+vl+HR5BVCO1UHlJCPFStWwGAw4IorrsD48eNPgEuosDnew+kq9H8kqqiowMqVK9Hd3Y2JEydi3bp1uOaaa8RAzsvLw/Lly3HnnXfie9/7HiIjI1FSUoLIyEhs2bIFra2t6OrqwjvvvIOXX34ZHo9HalUAyCglg8GAtrY21NXVobOzEyNHjpTB1p9//jnKy8vh8/mQn5+PlJQU+Hw+cVY5K9hutwuUMiEhAZoWHDeVkJCAiooKlJeXQ9M0pKWlSfSbbeUZQCLKgRkOCv8bbrgB559//oB7ONi+DhXZoGkaJk+eLPJL0zSce+65p79p/6Rkt9vxwgsvYM6cOVi1apVkRTRNk2HfkyZNQn19PRYtWoTdu3dj48aNOH78OIqKiqSTLoMfXV1daGlpkVEEhEBSX1A+mkwmeL1eCTry3MfExAjUfN++fWhtbRU9U1RUhIKCApjNZtjtdvj9fikriYmJwVlnnYWSkhJ4PB4cP34cNTU10LRgnXRKSorABV0ul8yM5KivtLQ0XHrppZg9ezZmzJgRskYcmaYG/gCIM6TO1dM0TZoKAv3GEx2v+fPnD8jbakDgZDIt3NAejAYyWHRdl/EkXxdxtjiNLjoVvB+up2pIqh38fT4fPB6PyBqOovB6vejr64PFYhFdz/1gsIH6hB062QyFGVt2F+U9EHXV1NQkoyh4HZVvgaCuslgssFgsOOOMMzB37lxMmjRJgh/qug9GqjHKNVD18amIgXt+nkav1+vF/v375VpOpxNjx44dFkprMPhi3hcdbOnQDPScAwVO+Jo6VsZgMCAxMVF6FLAMSx31o5Z/qGcJCIWAb926FZoWnAsfXuetIrFOxylViVm5G2+8EWvXrpXO+5qmYefOnbjllltgs9nw2GOPYfXq1TAajTJeRtOCdfwcNfjss8/KGLjo6GhMnToV48ePl4AKM5O0h2mH8z1N01BQUIDzzjsPo0ePRnFxscgpBoaIzgOC5TcsBSMvMHHBpBHrMPm7x+OBw+GA0+mUMYJZWVkiixmoaGtrk32lXUd7xmq1ykjG1NTUkJFcAKRGm03yurq6cOTIEdmv8DFJJ5N/amKFa0RnNTzDyfIN+i28NrOoav8b8h27fvv9fmm4NxxSn2EgGsgpVddKtZVVeRF+jYF+Hy6dUgLpuv6Bruuarutn6Lp+5hf/XtN1vVXX9Rm6rhfpun6RruvOLz6v67p+i67rBbquj9N1vXQoN3LXXXehs7MzBOqiLiQFwUCCggcHgGwujQO18J0KRK0H4fVZe8LIJutKWbfC2aNqXUVTU5PMJgrH0pMxjUajNEHKz8/HrFmzUFJSEsJUQ3GKuC5UgKSTMRrXid9FbD+ju3QaeQCysrKGXSeiKkwKyaKiohMU9kD3pcJbVKXIZ2JNltvtlmgzDQgV/k3+UOdIUbF0dHSgoaEBfr8fEydODHmfCvXLEIXNl8k8/z2IXfc2bdqE888/H4sXL0ZcXBzWr1+P//iP/xDlHRMTg8LCQtx999247rrr0NjYiGPHjuFXv/oVnn/+eQkqdXR0SEDJbrejvb0dbW1tYpilpaUhKysLTU1NcLvdsFqtclbz8/NRXV2NpqYmeDwe2Gw2pKamorOzEy6XC06nE7W1tWhtbZXZqlFRUXC5XLBarZKhPXbsGMrKyuScqnxOqD3HOqgR2zVr1sjYGVXxDOSIDsbPQKjhrp7Jc845R6L//4jBjL81uVwuLFy4EH/+85/xwAMPSNONyy+/HAsXLsQdd9yB2tpa3HPPPdi8eTN27tyJ++67D42NjXjqqafQ0tKCiIgI/Pa3v8Wbb74p2SfuB4fLa5omNcAs16AMYWfI2NhYmWNdX18vCIycnBzk5eUhEAigrq5OarTo1IwaNQpjxozBrl27cPDgQalZzc/PR05ODuLj43Hs2DFUVlbC6XSit7cXJpMJf/rTn2AwBDvtzp07VxA3Kv+oBg0dLRpfauaOgV0GYlRkEfXRj3/8YxnHE058nlM5JsMJ5gD9sp3PVVxcfGqm+AqJ38915Nmnbiesj8gpBqbV7AWDphx/pcp8IqrUHg80TukMEeIIIKQLOXWQ2+1GY2OjBFaZiQ3PIAKQzufx8fEYN24c5syZI939VUdoqBSOvFLXYqh/T31LZ4eoLNoenZ2dIfbTYPcYHkDmZ8P/8TwzMK4GGEhcfzWLqRrfbrdbXjObzWhqagpxCmifqOeH/w/Povb09ODNN98UNB2hz+HPdLJnHyo9/vjjuOOOO7B06VIcP34c77zzDiZOnIj58+cjPj4ev/jFL7B69Wpomoaf/exnaGlpkXrGiooK+P1+pKSkSKNMu92OlJQU3HLLLZg5cybmz58vjiMTFuqYnY6ODkElxsfHY8SIETh69Cj++te/4uDBg/B6vSJnw52X6upq1NTUiA4GggEktVkdZRt7vLS2tsLtdiMhIQGpqalISEiArus4fPgw9uzZA4/Hg8TERAlEsTdGfHw8zjjjDMyZMwczZ86UsjOVyE/MmnJOdUREBP74xz8iEAigsbERaWlpIZ8/FXGMjdrISO0BQN6i49rX14fU1NQQSLWasVfrrenUrlixAgBwzTXXnBIVE/68Q+XBgfhWPZ8Dvf9lAzAqnV7e9W9AdJ5oxKl1I1wIKmAgdLFUR5ZZHEYjOVaGkQvWeKjNKdSmBSq0hwXdjMD39PRIJzCgv7GP+rdUKoyMxsbG4vzzz8ecOXNQVFR02uszkHM+EIZ8sLVlVItNoKhAqYzp5HE+30BGONc4nMHV+wEgDRG4V0A/Nl6NeIYzuBrBpiPJaDsVA6EdfC3cuWftMhCsRfV6vdi0aZNEwpmRUaNA6v9Pl77KQ/l1UE9PD2699VYEAgGsW7cOBoMBmzdvxkMPPYQ777wTP/nJT3D33Xdjw4YNMquroKAALpcLdrtd6nwDgYC0oWebedaGMvudkJCAtrY2OBxBpP/o0aNRUlKCpKQkgWnGxMRg1KhRyMvLk0HdhOamp6cjOztbYHBRUVHIzc2Fz+dDdXU1jh49iqNHj8p4DjVSqdaIE8Lu9XpD5EdVVdWAkcBwoc8zMZy95plhJvUfjU++DlqxYgUOHz6M//f//h9WrFgBk8mEiy++GH/84x9RXl6OJ554AjNmzMAjjzyCvr4+2O12dHV1YefOnfB6vcjJycHBgwfx7rvvoru7W4KRdEKMRiNGjhwZYtQ2NjaisbERSUlJ0ryqo6NDuk/qui4OhNVqRWNjI8rLy4WHR44cidGjRyMhIQEGgwEff/wx9u7dC4vFgsLCQhQUFCA3NxdlZWWoqKjA0aNHxflkt+mYmBjJ6LIJ0kBEOTd37lzRWdQzarlLS0sLUlNTJRtEWUlZSnnrcDhwyy23hMjzoeqSk0XdgYH1crhhc7pwr9MldsUltLu3t1fkA50mBuRUW4F6yO/3o7e3F16vN8T5ZPkMM5YqYigmJkacIzqpNDq5H16vF06nU+oyVaeAe841i4yMlACKOrc0LS1tWL0qSGoWVjV+VRrqPhFJBgANDQ2yhkajEY2NjbKOPT09KC4uHlTXhqOqwh07lYg6Y5dZIMhf4Q4w7cnwbCwTHlznpKQkgVcze0WnW7U/wxuN0aahQ/vHP/4Ruq5j7ty5JzSM+aroySefhMPhED5+/fXX8fnnn2P//v1SP7906VI0NDRA14PNBCsqKtDd3Y0RI0agsrJS1rC6uhppaWmora3Fa6+9hjVr1uCRRx4JQSSqwQo+L2tsu7u7sX37drS0tAjfGo1GpKamCuSX/NHS0iJnkU2PeJ6YLKFjyyZJLLVg8Ke2thZHjhxBZWWl7D/3JzY2FhaLBWPHjsW8efMwe/ZsmTM9GJFPyQs8F5GRkdLg0el04sorrxxWEoIjlhiwYOBKldt8jwF03g9lD4NhlE/hkyg6Ojrw6quvSoJsMFL3cri8qDrFqvwOL99TPxvO81+G/78xDurmzZtF0DBqo0a5+ZCxsbEh2TfVGKVwpxJgBoXKhAIoEAjA6/UKA+q6LhlbLjwVE+GALpcLXV1dAPrbZfM9FTpCyM3YsWNxwQUXYM6cOTJDdTgbpSp3Gg8DHbShKBFd12WOFZsYEcZw6NAhcdC9Xi8uvPBC+Rv1XvhzKMxuMBjEqWE3RL5Oxc3rhUdMVSdVvW/uswr1Vj/H+1LHK1DguN1u+fw555wTcti+LPG7TsdI+HuSpmlYtWqVtEN/8MEH4fP50NjYiEsuuQTPPPMMOjo6sHjxYlx++eV48MEHMWrUKHz88ccS2PB4PIIsaG9vR2trK7q7u8VoY5t4Nm0pLCxEREQEKioqcOTIEURFRcFut0PXg03QOFQ7KysLOTk5yM7ORn19vdSbEgZz0UUX4bHHHpPh6ElJSfjud7+Lyy67DHPmzJHsg1oPxuY3nKVHY5Fn/tZbb8UNN9ww4DpRoZzMiDvZeeDrg0GJ/6+TyWTC7Nmz8dhjj2HEiBGwWCx4/fXXsXr1ajz55JO48847sWXLFmhaEJ555MgRjBkzBm63Gz/4wQ/w4IMPYuvWrSEt+tXsFwAcOHBAeNNgMEitP7s/8xzQGKMT4vf7ZdZuREQEioqKkJaWBrfbjaNHj6Knp0e6DZvNZmRmZqKsrAyHDh1CeXm5NOcgCmfBggWYM2dOiBPCQAswcDdSGsc05qijaOzwHxFBHHHW3d0ttUw0ogOBAO6//37Rj6rxfjLo5WB6ZqBgpfqT74VnMb9Oot5X95RryvfoiKjzaencU+eo+kedv97d3R1Su8rsEIPCah0aM7ZdXV0C7QMQ0jiIdg3lHW2Kc889F/PmzcOZZ54pzuxwSUU7fVXEIDIdOOrqQCCATz/9VGwYt9uNWbNmhdgxqiNKe+pU/KFpmjTNofOr2gDquSCPqzBJn8+HlJQUxMbGSrCINgUTGKTwgDlfC88WMfD+1ltvSVArfF6x+vkvQ/PmzUNdXR16e3txxRVXYOXKlbjpppswbdo0REdHw+12o6enB6NGjZJSB/ZiYS1/W1sb7rrrLqSlpaGqqgpLly7FLbfcgiVLlkgdqzrShevA0gZmGXt6emT9Y2NjUVhYiAkTJiA1NVUmZ/BcUD6T7xnYIT+wmR3tPovFIt2RHQ4HampqxClVg882mw1msxlz5swJ6VQ90HqTN9UyMyL/OC6PSE7VISS/DZX8fj8KCwtPsHX5uxocIf8HAgEZZ2Q0BhuZpqWlSTM2IJT3PB4PduzYgYiICFx//fV/ExSfmhRT7aCBkBp/C9vmG+OgLlq0SIwJRq9Zz6A6ASwiVrHr6hgLIHTMCrMnNIKpSOjIEq6jRiEI901MTITf75euWeywpTIJo3asLy0uLsasWbOQk5MjkAM+03AofLOHAr06GdlsNlF89fX1sj6lpaXweDzQdR0dHR0YNWqU7IP6fcP5bl3XUVRUJAGG8PfUSBVfI5lMwRbvhw8flnu0WCwC+aDQV4MZ4ddQjSDuL+FG3MPwezpdGopx902kVatWISYmBiNGjIDb7cby5ctx33334cILL8SUKVMwa9YsPPvss7j77rvx9NNPC1S9vLxc6vrMZrNEmgmNUhvHaJomdXGdnZ0hBn1xcTESEhIEiutyuXDWWWdh3LhxcLlcqK6uRkdHBwoKCjBixAhxZJ1OJz755BN0d3ejsLAQP/zhDzF37lzJjgHAggULxAhVI5YApJGMmu2gITrYGTtZIIayYyBSFRG/S+32/S8KEnno/vvvR1VVFdxuN77//e9j6dKlmD9/Pn7xi18gNTVVjBuj0Yiqqir4fD785je/CWleB/SjLQAIXIpQ8jFjxkiU3mAI1hy5XC7YbDYkJSXJe319fdKYhsZDVFQUKisrUVdXh/r6evT19aGoqEgMiurqahw8eDDEaYyNjcXIkSMxb948zJgxI6S7aFlZGaKjo/Hb3/4WJpMppBM574+GAHmMRlQ4X/P7WlpakJWVBYPBIM9N/qbe8/l8aGhowPjx44e1T+Gw43D+JqmQ3oHo6w7SUK/TYaRBSieda0xDmx3+uc5ms1kMcuqznp4edHZ2ylxj8ovP5xMj3mKxSIaWDlh3d7c0T1ShezRSo6KiYLFYYLVaMWnSJGnmMn36dCQlJQ3Yb2KoRH4KN9CHGrBVgxEqkop2kqYF+12wg7vH48Gnn34qZ6ilpUVKOwYK+oXf12DE882RTrGxseIE8X3uKX/SYWa2lJm46upqOBwOuQ6DmmazWZwv9ezwOuQP2oJEQRgMBqxbtw4+nw8/+MEPhKdOZ78G+3xSUhLOPvts7NmzB8nJyUhLS0NUVBRycnJw99134+6778ZDDz2ESy+9FGPHjsWKFStw5513IjY2VpAAdrsdGzdulMDczp078fjjj+PPf/4zDh06JCgA1ZGiI8gzQueV95STkwNN03DgwAHs379fGiuxRpuylbWTlE20yYlIYNDI5XKhubkZTqczpLTGaDQKjHfChAmYM2cOpk2bdspaTDVQofIS+S0zM1OywhEREfB6vfjLX/4CIAhRV8dcnuw7KK9Hjhwp/BKOHlGRK/y71tZW4dfIyEgUFxejpqYGbW1t0jSSdhYA4cOXX35ZalFV3cP7GI7voP5NeHAxXM6r//9bBR2/MQ4q0B8doOJQO7QCoUON1QUnbAAILjDhA2pkwuv1ymwmCh4guPjx8fGIi4uTrIuuB+sT2traAECYQ2U+OrGE3EybNg2zZs1Cbm6uCMLTTXNTYaiMNVSoyEBKxGAwSAvwQCAg7epNJhNqa2uxd+9eGAwGeV3thMefqqE0FGZn11RG7dSIlqpAeD3WDrB5jq7rcDgc0HUdycnJJxw0FX6lNizgPqlCwe/349e//rVAT8ePH3/aexO+D183XO2rouXLl2PcuHG46KKL8Mgjj2DUqFF46KGHYLfbsXTpUvzpT3+Cx+PBo48+ihtuuAGBQABvvPEGamtrxcFnwyHO29M0TSCS5BnWV9GoYNazvLwcVVVVMJlMsFqt0HUdx48fx759+wAEaziysrIQExODAwcOoKamBsePHw+JuJaUlAyYuaaRyKyHCokklEit0eP9vvzyyydEIU/mfKo/w99TzwwDZQaDQSDm/6J+2rp1K+655x6sXLkSDzzwAIxGI55++mlMnjwZW7duRUFBAerq6lBdXQ2r1SpBJk0LdrNct24d2trapM6PQSnur6ZpyM/PR1FREXbt2gUgKNNV+RgREQGHwyGOBjNdLImgccD6UbvdjszMTPT19aGpqUma5zEIER8fj4KCAlx88cUYO3bsCc/c19eH+Ph4qaX2+Xz4/ve/DyA0GBnOf5MmTQppkqRm6FQjjnpT7ZCpZoHWrVuHs88+e0iyb6AzoDqmqkxUjUi+x8//vYn7TUNTzQzxfTqsJHVWJvWUOgeUWSU6f+q8dc7NpU1hNBrFGAdC4XMmkwmxsbGSiT/33HORnZ19goNzOsT9+SoDA+o1w20OOu38HOU1+ZBdWfn+6VJiYqJcg2uqQiTV+yS/xsXFCdKHsoF2HtEVqvMd3qRG3Xe+xp/kjebmZrmO1Wr9ytd+0aJF2L59O84880y88cYb+PGPfyz3YLFY8Morr2DJkiV46aWXcNddd0nwj3rW6XRKYE3XdaxYsQLTp0/HbbfdhqlTp0rCgoEVFQ7Pc6DaWWzSxiZidGBZjsDPkHjm6OQy2M2z5+dA1+4AACAASURBVHa7pYyOdiplUGRkJOLi4pCRkYG5c+ciJyfnlEFiINQWHozoGPOcR0REYP/+/bJm+fn5p0SZqHtNfgiHnZM3VLnDNTUYgujD5ORkadilIoK4z/y9p6cHO3bsgKZpuPjii0PkLX8fKrrvq7BtB5NVp8v/3xjrmtAYZjrViCbhfWo0h5EJFR/PzeU8M03TpIBdrSUgNj4+Ph6JiYmIjo4W6GlERASioqIEkqBCsQyGIETVYrHAZrMhOzsbF110Ec4++2ypx/sygoiGxpchdY2AUAOBAoARZTqju3fvBtB/oDjOgffEgz1QtHwwCnccVcdedQpoRDEzeuzYMYFYOJ3OEAOMtcCcW8fOm4xEE4KlGh3kH6fTiSeffBKRkZGYNGnSsCHX6vqqe0TD45tggA2HoqOjceDAAZSXl2PlypW4+uqrYTKZ8Pvf/14giz//+c+haRo2b96M//qv/0J7e7vMKeM5ZSdTGlder1cCOtwL1TlsaWlBS0sLoqKiUFJSgvT0dBiNRlitVjQ3N2PUqFHIyspCTU0NKioqUFVVJQEnFRoZGRmJhx56KERhhAdQqNzUGhen04mUlBQAwagoIZAAsG3bNixatOiUEfyB3leFO8/JQBmBf7RmWl8HbdmyBS6XCw0NDdiwYYMgJj766CN0d3fj6NGjcDqdyMvLkzmjERERyMjIwJNPPom1a9cKT/b29kofAnZf9fv9qKqqwptvvgldD/YWIMSPELTGxka5LuuYfb7/z953h7dZnuvfn7Yl2ZJteW87w0mcRQYZZBEaMAkkQCilnAKFMltOGWW3cNikbDiEw0iZJYULDiUlEAgJhIRMspdHHMd2HFvykCVZy9b4/SGex68+yztAOL8+1+XLtmx936d3PO/93M8KwGg08nW8Xi+SkpJgNpvR0NCA8vJyHD16FEajER0dHTCZTMjLy8PSpUuxYMECTJgwoVtaBpEVZrOZW98Q+03tknoTOtuALt1MBg6tfY/Hw5FDVKWYWuWIQl6SnqS/ZKT8XJDvBXrtpxIRSANdRr0YmSUSn2IunULRledIHlLSayIpbjAYYDKZotqfNTc3MylOc0b3pnNMr9fDaDRCr9dj5MiROPfcczF37lyYzWaOIOsvMd2T9PcafYkIqMX5DgaDfH5LkgSr1cpjp1KpeM+Sd+q0007rRhCL499fsVgsUUYjgXgxsoCuGQqFkJCQAKfTidbWVsTFxSE5OZnbBZLhTPtTzE0UPaj0OcR9LZLt5FyhEM0FCxaclPEX379ixQqUlJSgoqICN9xwA4qLi3HppZfivffew/bt23HeeechKysLXq8X//mf/4nrrrsOv/nNb+ByubgAncfjwRVXXIGsrCw8/vjjjJW1Wi2am5t5PInsI11BhhThLiqg5HK5WEdSPrVSGWkFZDKZoop+kT6l+5Geamxs5GKL4n6he5tMJixevBhnn302Jk2axBiurzUjRlT2ZqyJe7+mpoZ7uBOJsXTp0pjzQmtX7sSSJAlZWVlRuaeiXhRJPPo5GAxyWG8wGIzqPysanKIXn3oyU/qSfL30R4eLhLv4nLHIlViE1w+lf04ZA/Xee+/tFu9OB4e4EGN9UDow6AByu90chy+yX+QhJUOW2B232436+nq4XC4AXRX0KKRHqVRyVbCEhAScdtppmD9/PiZMmBBVJa4/0hf4PRkHSU+MN+WKJCYmcgn7QCAAm83GbJfH48GMGTNibuSBMoGicSDfMOJGJeOkuroaCQkJSEtLQ3JyMvcnFI1UMrLFDUprgwgN8dmJ6ACA5uZmWK1W+P1+jBs3btBhuaK34KcEXkOR22+/HW1tbfj2228xbtw4PPjgg+y9IsX4xBNPoLq6Gu3t7VxYIRwOM+McHx8Pq9WKYDCIcePGcShcWVkZOjo6kJOTExUSRCHBdEhVVFSgqqoKCQkJ0Ov18Hq92LdvH44cOcLRCSNGjODcUtrjcXFxePHFF9HY2BgFLsW5mDt3blTYrgjgyQAibwYZMwqFAvX19Zg4cWKvoWaiHpIz9KL8XNfGjy2ZmZkwGAx45ZVXcNNNN+GOO+6AWq3Ggw8+iGXLluEXv/gFzGYzHA4HCgsLoVAosGTJElitVtxzzz34y1/+whWkiYHPzc3lVjEAGEyVlJSwR5byzRSKSGsrKp5Fhi7pGMqHamlpQUtLC4cEajQamEwmpKSkYOnSpTj//PMxc+bMmLpT9OKHw2Hk5uYiFAqhtbUVKpUK1dXVqK6u7lYIQy4EEMW1TV90fafTifz8fCiVSuTk5MBgMODQoUPcbiwcDqO9vR0PPvggfv3rX7MhJN6jN1AjAip5Tpe4D3o6r39soT1OlVVjeVxE4lR8jYq+iUYKXUOr1XJFUcIeHo+HQxN9Pl8UaUc5xGScxsfHIy0tDbNnz0ZpaSkKCwv52sDAw3d7e30o8yDqup6EvJlkuNF68Pv92LJlC+tJl8uFiRMn9nitgTwnGZ2xiH05BlCpVKivr4fH40FaWhqSkpI4nFqv13NfVIqsIIxCa0dM1RKLjtF3wiJEgK1du5YL/Ihtf3oS0bCJFT0hjsucOXM43/2BBx7AJZdcAovFgoyMDEyaNAlGoxGzZs1CSkoK/vKXv2DEiBH46KOPUF1djaqqKrS2tmLp0qXIy8vD3XffjT/96U946aWX8NZbb+Gdd96JSgugz0brlkKeyeii30mHkNODIvba29u5zQy9R6vV8u9OpxNOp5PbL4nX0mq13JbxvPPOw4IFC3os6CWKSPKR/SD+3pOQjRAIBLidDXmSyQYQ51HEoqJOFGXMmDGMf0TyRE6iBINBJCYmcr0DcpaRzhA7DxDpT+siHA7j4YcfRigUwsUXXxy1VvpL+IhEjxjKLL5O9xNfl99HJP5Ez3BfY9+TnDIG6pIlS7jSKikFWlx0aJOXVcw3I9ab/tbW1hZVLEOtViMhIQFZWVmsLIgRIW8OlRvXarX8vuTkZCQkJCA+Ph7p6elYtGgRx7pTL7v+Mswk8smSH/KDFdFw7I2JTEtL43tVV1fzAaxUKnHw4EEEg0HYbDYUFhZGAf/BPs/EiRM5BJRy/oAuw442dUdHBxISEhhUUsiUSqVCW1sblEolcnNz2SNGZezJiypW7RTZVJHhJsD517/+FX6/HxMnToxq4tzTZxBDREUQJm7EgRrup4KUlpaivb0dy5YtQ2VlJcLhMO69917U1NQwexcMBjF8+HDceuutuPHGGzlEjdovkXcqFArhu+++47mOi4tDZ2cnkwFqtZrbd6jVajQ1NSEQCCAxMREGg4F7pFLe1syZM7mYjBgeSYr9xIkTaGxs5IJOQHflR1W0ac5oPdH9KeyOjBLqc/zYY49h8uTJMcdMjCQgka85+eHzc/Os/xRy5MgRbN68GZIkcfXZ+++/H3fccQeefvpphEIhPPLIIwAi5OOVV16J1atXY8mSJVEFtKhlgt/vZ31OeoD2J/XEBiJhtmazGWq1mtes0+lkr5fZbEZnZyfnqZK+UKlU3MLg7LPPxpw5c2ICUFoHsbyiYtqKWq3GP/7xD7hcLsybNy+m3hXJvjFjxnBFYBLRa0+fW6vVoqamhgtsEBFD40EVhG+44YYoANIbmBZ1XazcRfHvci+qCFx+TBE/s6izRfBKZDWNoUhckbcoFApx+DbhCRKPx4PW1la43e4oI5bWAEWJpKSkICEhARaLBaWlpZg2bRp78WN5JvojsXCFPNxwsCL3kvTksSLvIxAhwDs6OjiPr6amhr2UTqeT/3co64D08IgRIxAKhTjXnETEGPTsFosF8fHxHCEnGqKEISwWC78mFmOkyr5y8iWW4yQUCuHzzz+HQhFJJ5s/f36vkQoidqNxESPX5Pvsvvvu46r2FRUVSElJgclkwqZNm2AwGPDee++huLgYv//97xEIBHDzzTfjmmuuQTAYREFBAYqKivD888/j5ptvxhVXXIEtW7bgrrvuwmWXXYbLL7+cI4so7YqwOOFqEbcRSUCeaIVCwXnWZLQHg0FYLBakpaVFFckkTzYZ/ET4GY1GnHXWWewtHTt2bL+MHLl3UsTXPQn9jcaZ8pqzsrLYYfPRRx8xuULRNeL7e4t8pPVDKYSkW8Q5JxL0yJEjnO5RWFgIrVbLZw6FpIskCOl80k1ffPEFp5r0h1QS/y4a2OJ51dMaJxH3gTgmcsJzsHLKxJtR42Cfz8deHHFyAoEA9xwVF5WYMCyGoer1evaUAOBiRzqdjjeWyFoRI02VHBMTE5GZmYm8vLwo1mYwCp/uIzIfJIM1TGmMxHvIw6xivUcEbGJI2LZt21BQUMD5XUVFRTh27Nigno2Ech5EhpmekcI4xFBtsVgN5a6K3rf09HTU1tbyZhKrLNJGoc1L16aQm87OTgZuer2ew2+oLLx8DAm8iKSFqFh+bgapXLRaLUaNGoWOjg60tLRg0aJFOPPMM3HfffehsbERaWlpXJAmJSWFw0cIsBGJQGtwxIgR0Ov1KCsrY7KoqakJ2dnZCAS6+hATi1pfX8/eK8pDPeuss3pct0Bkjw8fPhxlZWU851OmTMEnn3wSs8K1aFCK+48M7wMHDiAjIwNHjhxBdnY2R1P861//gsFg4J6Eco+LHLSJ94r1+r+ld5EkCSaTCdu2bcO+fftQWVmJ2bNnIyMjA1arFevXr8fzzz8PINLCKhAI4Nprr8XKlSvR3NyMgoICPrzpcBRTMygfk0Jp4+LiOF+UWofQ+UEEKIWsieeEyWRCYmIipk+fHnO9iZ+H9kZv4f90T51Ox3pvwYIF2Lx5c5TBKXr2CDxVVlbyeibWWzwTOjs7UVhYiPLycv6MYk4U5YY/8MADuOeee/hafXk95eegqHd7ew346QrJkd7SaDScP79r1y7OY6ZQa3lEDgDWWQRM6SyhlkSdnZ244YYb8M4770Sdr0RqEiFOrYfoPsDQwp9F/RLLIzwQ41Q+b2LUWqzn60lHk4GlVEYKhuXn5zP+amhoQFJSEu858qABg6+AT3NHlV6psrK49mhsCMiL5wF5wMiQBMDkE7U8pDVBfYvF9dHZ2RlV3ZnWN5Hxzz77LP74xz/CYDBg6dKlWLlyZcyQfxq7/Px89vKSMROr2vIVV1zBzobVq1fjhRdewPXXX49XXnkFS5YswUsvvYRXXnkFN9xwA0fNPfroowAiOYv19fV44403IEkS/vGPf2DDhg1Yu3YtrFYrsrOzmSSkyCbKyScdRZ83GAxy2hXlrQJduJy8jnT2UtsbsS6JQqHgwlmTJk3iYnhi8bJYIse/dN+Bihg9Eg6HkZiYiOPHj7P3l/JQFy5ciKamJixevBhvvfVWr3pdLpMmTcL+/fvh8Xh4vEhoPzidTiQnJ0OhUDBuEsN+g8EgY1gxzJyevaOjA+vWrcPZZ5+Niy++GG+//Xa/KnXTGhMJdfn+AbqTjSImi6V/TpacMgbq7t27+QClSSQDiibHZrPB7Xaz4iePCDHBALg4gWiYtrW1cXPslJQUtLS0RLEExDoDkeI+JSUlXMHsZOSV0maUK7LBXEs0xIGBVdoVlbTJZGJDrrOzk0t4KxSRxvZnnHHGoA1U0SBIT09HS0sLK3uqpCvOj6joacPQeHV2dnLzbzI66TOIBwiNi2goiPcBwCHfX3/9NaZNmwaj0Yi4uDj2otFBOWbMGDQ0NMBgMGD48OGoqqpCY2MjUlNT0dDQwJ/hZDDUP5U4HA5MmTIF1157LZ577jn8/e9/x4cffoj7778fDz74IBwOBx544AG8+OKLaGlpwaFDhwBEquDS2JD3WqvV4siRI9z7NjU1FTabDYFAAM3NzTAajdDpdLDb7ezdoop9Y8eORUZGRrfni8Ww0+8UDqPVarnaM4kIsufPn4+1a9fyfNH+ob0fFxeHw4cPs4FCwPKLL77Ac889hzfeeKNPwC7erzfAKQKlf0u0+P1+pKenY9y4cRg9ejSef/55HDt2DH/4wx8gSRLefPNNKJWRXE2DwYAnnngCEydOxD333MN56Bs3bmQPPgDuYU2Em1IZqTlAOp3IylAoxPlzVAlYzGNXKpUwGAwYP348MjMzGRj0JHJ2XS7i4W+xWNjrRsYSpZn0FhpFn0EELLQOKRf28OHDGD9+PJOuNBZkzJJedDgccDgcuPrqq/G3v/2tz7kinScHMfI1LzcQfko9KRr65G3Pzs5mEoIAcSAQQGlpKT755BM2nohAIKLc5XIxeS6GVBOgJgBO3u78/HwUFxdHndtDHQs5EUZn12BAYn+9In09cygUQmJiIlpaWgBE1hXhM7/fjx07dqC0tBSSFPHcZ2RkoLGxMco72Jtu7In8CIfDKCoqQkVFBe8hWtu0T2hexIKb9Br9j1qtZv1isVjQ3t7era4F4RQiVcXxEseecIzVauViQUSSiASFSqXCueeei7179yInJwcWiwXZ2dnQ6/X46quvkJKSgpqaGvZikhw5cgT//d//jRdeeAFnnHEGVqxYga1bt+Luu+/Gp59+ivLychQXF+Oqq67C1KlTUVhYiN/+9rdYvnw542kyEn/5y18yibd9+3YEg0E89dRT/Lw6nY7bKwLgc5/IPopaJF2n1WqRkpLCn9HtdnNNGZpDquSvVCqRnJyMKVOmRJEH/TH+6PqDwdTydS3uSdHZUV1dzX17dTodfD4fhg8fHqWb+xKKxCFsAXRFa9DPVDSM9Detn9zcXNTW1ka1DCPdQtcT92ooFML69esxa9YsxjyxPre4VsXxlu95OelI75U7wno6A06GnDIhviRUhCgUCkVVadNqtewVJYCgVqtRWFgY5fYmYOvxeGC1WtHY2MjhJkqlktktYtDI0J04cSIWLVqEyZMnMxM3VKGFPNRcRRGYk5EZC8D3JfSZqKgNGepyFratrQ35+flR7Wb6en45MKP/LywsjPJiAtF9ZIFIwR7x/eIz0YFA10xOTuZwNwJbxDDR9ehnMWwL6EpiX716NeLj46FQKPCLX/wiatNZLBao1WrodDoMHz4cKSkpSE1NRVFREdLS0rid0LBhw6KMkp+blJSUYPny5UhISMBtt92Gq6++Gs8++ywOHDiAlJQU1NXV4e2338Z//dd/oaCgAJ9//jlaWlq43QJVTA2Hw8w+E6nT1tYGvV4Pk8mEYDCIkSNHoqWlhQ3T+fPn44ILLsCCBQuQnp7e7dn6Cp0nAPLXv/4V4XAYM2fOBNA9rAtAVM80+qICUcXFxexNEnNu/H4/1qxZgwsuuADAwMJ0aU2I+0kesvVviZY///nPuO6663Dw4EFuQG8ymfCnP/0JwWAQv/3tb3H8+HE0NTWhpqYGr7/+Om677Ta8++67uP/++3HkyJGoCAfa9yIrTI3XfT4fnwdEkrS1tbHnlJrHJycn47zzzsPSpUtRWlrKLQh6k75AkggQJElCdnY2AKCqqorTVOrq6riIV29rZe73bRVobQMR/dbU1ASn04nMzEzY7XaMGDGC9yyF3YXDXYVfvF4vli1bBr/fz1Us5SLuR3m4oRjyKK57WuuxwoB/CiHdRJEcSqUyqjc6YYxvvvmGzyM6TzweD+rr6zn3ngAdjYvVauU1Rr2dZ8+ejYULF6K4uBjA4Lw7Pa2nWOG79NpgziO6lniNwcxZQkICry+j0chj5PP5cPjw4ShSZMaMGT16aHt6RqA7FpGkSGsbMRxVnGfK4yMHhEiwiGG+ANj5Qe+Tk91iRV9aG6IBLBrEhJ3+9re/8T2o7gVFDAUCAaxatQr79u3Dzp07cfDgQeh0OuzevRtmsxlmsxkqlYpbrJFs2rSJx/qss87C66+/DpVKhaeffhoOhwPp6en48MMPsWzZMrS2tmLevHkYPXo0nnjiCbS2tsJoNOL48eP4+9//jttvvx233XYbPvzwQzidTlRUVEQZ+WQc7dq1C9999x2ampo4vFc0akwmE9LT05GSksJ9SyVJ4igsoMsYM5vNOOOMM3DOOedg0qRJ/fbA0b6le/YnhFdcO+Ke6Y18IQKC9IDT6URDQwM7bwa6lyk1QCRO5M9Bz0Z5r83NzTxetGYoxFpcb0SeAODcZ4VCgaVLl0bhIdJvGo2Gw81pjffmaKH7xBqnwUY+DEROGbRkt9sBdIXUiJY50FWUgBLbgchCOnToEPc3I7aOyojTxJKCIAaVFktcXBwmTJiARYsWIS0tbdAD3tsGkTMOtMEGcpAQO0pfscIb+isE2EOhENra2qIUNLHGZMRT6EVfQqwcXUtc1NQHDgAbNoFAgDcLhXyKBwZdUwzlputR2XYSylEWi+8AXd5i0WAgxapWq/HVV19x/gRV67RYLKisrMTHH3+MLVu24M033+TcysrKSmRmZnJrE4/H0+vGPtXl0ksvhVarRX5+PnJzc3nsS0tLUVtbi7y8PLS1teH3v/89amtrYbfbo/oE0xoKh7vaBpDyNplMSE5O5v8pLi6GQhEpTrR48WIkJSXx3MrHrzdAS8/Y2dmJY8eOMRiZO3duj6xrKBRiXUBrwmq1QqFQsC4AuloU0TW++uqrqMqUsUT8DHIvqrh2/y29y0cffYSUlBRs2LABd911F0aNGoVAIIBFixbh+eefx5o1a7gYDXlC/H4/li5dinvvvRcOh4Pnj0g8hULBuX0UiUFzk5mZyfmDra2tAMAeMpPJBIvFgtmzZ0Ov1/e6x0Wjrb8MvrhGyKCmfsCVlZUAgFmzZvV5DcrFJ3KI9GdWVhbn+xEbT4CLvECkF+lzke6cP39+t3vJQzfpXuKzyMdDvhdOFSFCk34WjWgxxJv+TucFFTuSG4b0GWtraxnsZWRkYOrUqYiPj+/Wb/vnIEOZNyL2w+Ew53RLUlf/V1ofoVAImZmZHP45GJE/o2hQ0RkvzlcsIyXW77QnqX+3eD8iMeW4VPwfcW+Hw2E2bEKhEKfUECZqbW3FsWPHYLfbUV5ezhEgtOdycnKQnp7ejTBwu91ISEjA8ePHeY0+/vjjTNJVVVUhLS0NixYtwq233opQKASDwYCUlBScf/75KC8v557nL774IntitVott3IzmUxREWrZ2dlsFEuShHPOOYc/M2Fvn8+HhoYGNmJJTykUkZQtnU6H6dOnY+HChRzNMpD5Fx0p/cXqsea6L2IkMTERwWCQCYJQKIQdO3ZAoYi0UBJ1RF/3pM9YVFQUhUmBrtQDsX8pjRkRHBaLhXUROeFobdFZJzqeAHAfZ3mRyOHDh2Pq1Kk4/fTTMWbMGGRkZHDxNnpW8bs4ZuLn6mkv/RByyqAncdERUyCyZSLwE9ktuQdNPEhJ5KyYxWLBOeecwx6cwYbHAD3no/UGtPu7MeUbSlxAg33e5ORkXuypqamsXFwuF+x2Oy94v9+PyZMn97nBxbAl+cIGIptG7IlKnmwS2lTieInzTtenNhLEztJzA12Hihi+Qwej+Ez091AohE8++YTDusaPHw+9Xo9NmzZhw4YNqKmpwa5du+ByubBixQoOI21paUFnZydycnK4h+hPlVc1VHnooYfw8MMPY8+ePTwG7777Lu677z7Ovb3qqqvw2muvIS8vD3a7nYuDJSYmIicnBwCY6AgGI+XR1Wo12traYLPZ0NHRgYyMDDQ3N+OSSy5BaWlpj8QKhXjLRQ4GgUjFSOpnp9VqucAR0B24zJ07l70kZNxYLBYu5U5Av729nQ9kIKKD7HZ7tzxv8t6LURHi+hdZ91MNoJ+qUlFRgdraWlxxxRV48skn8emnn+Kaa67Bpk2bYLfbMWfOHCxbtgwrVqxAfX09LrvsMhgMBmg0GtTX1+PLL79k7wWFP1VVVcFqtTLTD0Q8PDqdDg0NDbDZbJgzZw5UKhXi4+ORkZGByy67DKWlpRxF0xdREuusIRH1pPj/9DeS5ORk6HQ6aLVavP/++1CpVBg1alSP54e45sj4pmtT6CmF9RLwKCwsZKAlb+EGRAjJu+++mwkZpVKJadOmYdy4cVi4cCFKS0uRm5vLhi/dP9Znpc97KhF3Yu6cfE8qlUr2Woh4gkK9e8ITNO55eXmw2WyYNGkSzj//fJx22mlcgG0omILCAWlsaT30Fv44EFwhfpdfY7DPLaZmxcXFsVFK2E7EFxaLpU98Qa/JSX4gmvjrC2OI4aAicSheg9Ys4QyqCC7HGeIZEQtnyMMnJUnCtm3b2FDrC2usXLmSK9rW1tbCYDAw1iAhHEJG6hNPPIHXX38d//M//4MLLrgA7777LlQqFc477zxceOGFkCSJDZvf/va3uO6661BYWIhnnnkGc+bMQWlpKerq6jB16lQm7HQ6HZ99tB6NRiN7xtetW8fj09HRwS1ixPXqcrn4faWlpViwYAEbvgORWAaR6Entz/sHsqZTU1N57xPZsXv3bl7DFH3Xm4EqOmuASO0EMUKB9A6R/uJ5Qo41kUCj56cUKaBrXdMaJCKWCgpecsklCAaDSE5Ohs1mwyeffIJ3330XSUlJOHz4MMaOHQsggqdi7TsgOtxfTszE+v+TLadMDqrb7YZCEcnnoNBA8sxpNBq43e5ugyAWSBIBrhibLXrnlEol8vPzMX78eHaND3RgxYkUDduhhsfQtUV3ek9G32CEDomkpCScOHEiKkSJQN2ePXswY8YMKBQKuN1uTJkyBRs3buz13v35vIWFhWhoaAAQYXfEmHtRcYjjFw6H2YAkReHxeJgJpCIGIrMUDnf1eKW5kSeUS1Ik7ESv12P37t0YMWIE2trasHbtWtTU1CAQCKCxsRF+vx/btm2DRqPBiy++CCCS7J6VlYW4uDhYrVZMmjSpG5Hyc5G0tDSkpqbizjvvxN69e/GrX/0Kzz33HIe4Pfnkk7j11lsxevRoPPTQQ/D5fFyl79ixY2hoaIDRaOT8k46ODjQ0NMBisXCZe2pU35OIIEIOQMT1Lz9ccnJy+KD461//iltvvZV1hfz9dG3KMxIP0M7OTgwbNgyVlZUc+klF0oiVfvTRR/H+++9zsSzKSTl6DO4LvwAAIABJREFU9Cjcbne38JcfQ2n/XxOPx4OKigq89NJLkCQJy5Ytw9q1a7k9BwEspVKJNWvWYNmyZbjrrruwbds2JCYmcq85cf4NBgOzyPRFeXEE/tesWYNx48ahqKgIQO8hS2Itgf7Mq5ys6EkyMzPR1NQEt9vNHpqmpib+vKKQPqNrzpgxA9u2bWMdRMYp0OUtbGxsRHp6OueqElAV0yHovPR6vbjyyivxxRdf4Mknn4Rer8ekSZOwePFiZGdnIy0tDTt37ozKjRVz/wl4iaDmVBCFIpLyQ55qqjkgB3q0LkKhEIeE0us94YlwOIxZs2YhKSmpX0VJYkksTCHHAfQ5Bnv9nnDEyZgjWgeBQICrtZN+pequkiRF4YtQKIQpU6bg66+/7vH87AkMx5L+YgwS8WwRI68oNJiMXTJIaa4DgQD8fj8bcLQWCIN6vV5otVqoVCquebFq1SpoNBpkZ2f3iTXefvtt6HQ6mEwmnHnmmdDr9WhpacGkSZP42ZVKJY4fP45//etfqK+vh91uh0qlwqWXXopzzz2X27WNHz8el19+OZYuXYoPP/wQd955J/7yl79g0aJF+PTTT3Hw4EFMmzYN4XAYH3zwAe655x6MHDmSSQTKmaX1SJ5Ql8vFBBftAzKoxIgNi8WCM844I0qv9oe0pTEVMVss6e1asWpD9FfojAiHw/B4PIwbKDVtyZIlWLlyJX9+ukes84PWHq0V0U4RCTCxpgk5iFpaWphMb2ho4D1GBbvoXBTHi9Yh2VMjR47EO++8g8rKSgSDQSQkJODGG29kr3piYiJGjhyJjIwM7NmzpxvhAkTbOQOxcYZiC/H4DendJ1EcDkdUwQrR20UubzHuPyEhgWPzxZwQOqRJMep0Ou7PSF9DHThJ6vLWyYHpYK4rhk7Fut5QDVOgy8soMu4UFkfFDDZs2MAHc0tLC/ewjHX/WGxnb5/PbDZDoVAgISEhipkTFaBcwYlMFIEMkVESFQlVBpTnogLdw3A6OjrQ2dmJ999/H5IkYfny5Th8+DAfKOKBS7mWPp+PG5BPmDABY8aMQVpaGhdO+blJZWUlfvWrX+GPf/wjampqUFZWhnPPPRc33XQTVqxYgQsvvBBjxoyBQqHAAw88AABc/ZrCJ+kgI/ZOo9EgIyMDpaWlMY1Tmru+QiJ78jaREGgQwWZSUlLMkC4gEjIp5oKLxIwkdYVz0fzTGiImOyMjA6+//jpeffVV/PGPf8SWLVswYsSIbsxibwDy57hGfiyxWq2YO3cu6urq0NnZiVWrVuHzzz9HbW0tPB4Pnn32Wbz88st48MEH8eijjyIzMxPhcBiTJ09GUVERXC4XXC4X71UxBIvybWh9qlQq7rE7ffp0Nk5jCQEKsbhFb3MskpX0/v4IkWa03tra2nDaaadFrVX5PQBw0RcCkvKQc1G3EktOxZKArpoGoVAIfr8fjzzyCGpqavDMM8/A4XBg165dWLFiBUpLS/HGG2/gyJEjGD16dNQZEmuv9vS5f6pok1AoxMWzCPRSGw2SxMREPkv6iycotUg+R4MROTktvj6Ua9J3+hLX5pDBo+AdpWulpaXxmhOjGhwORxS+cLlc3AqsJ3wh6vO+pDeMIZ8fWr+0Z0TCQaFQwOFwQJIkTn0RcQbpFxHE089i8TH6G+HE999/HxUVFX1iDaVSyS2LysvLYTKZGGuQJCQkwOVy4ZZbbkFNTQ0kKdLSLzU1Fbfccgsbszt37oRWq8XUqVMxb948JCQk4LvvvsPq1atRW1vLz0ljnJ2dDafTyXNLepT6l5PxIuJuimgUe3bSzxaLZUDeUnnUyEDy1+VYMVakR1/vJ6HoQppTrVaL9vZ22O12SJLU7cwQvee96YGSkpKozySm+4ltxCgChgxVsonoHCIyPZa9AEQcd08//TSsVitee+01lJeXc5ub1tZWHD16FDU1Ndi2bRsKCwvhdruRmZnJ76dnEMdUvE9/HTL9wUV9ySnjQfX5fFy8R6ykRR9QLGIiHiIkNFlKpTKqfYW4aAaikHs6MERjCjg5YXxirqR8owzmEJEzrvJwF6PRyOEYXq83qsw+5TSRMqYNKubnDUYKCgqwf/9+AGBFTEJtZESlL34GMqYlKRKCYzabmakVPcBifL487JJIBTqEfD4f9Ho9XnnlFf4fqvxJngagq3ACFS0gEqWpqQl1dXUYPXo09xn8OUlycjKefvpp6HQ6Lis/e/Zs2Gw2vPDCC7j44otx33334bXXXuNqzpRLRP1P09PT4XA4uNgMGa6i0MHeH+ktfE38O80rVRNWKBS48cYb8dBDD8XcLwqFAkajsVslX0mS0NTUhNzcXFRWVsLtdiM+Pj6qSNddd92F6upq2Gw2NsTF/Bn5/XryBPxcQ8F/DElKSsLUqVPxv//7vzAajairq8Of//xnTJ8+HYcPH8aNN96ITZs24fbbb0cgEOCWHw888ACCwSBaWlpgNpu5Wrjf7+fQXtKBKpUKcXFxHLorehtFket6eRuC3kR+ZvUkoqFABA8ZUe+88w5+85vf4KKLLsL+/ftjhg6Kctppp2H//v2s20hXUvEvpVKJ5uZm9tRSDQDaC3JC580330QgEEBrayuTNDQO6enpGDVqFA4fPjwoQlasvv5jihhZQyAbQJQXSD6+feEJKnzSX/0mxwuxMAW9fjIwBd1LPAMH4pHsSeTYAujCF+FwmI1DMnCoii2dv4QvyECdOHEitm/fPmR8AfSOMQBwhwjReCURI3AojFKSpG44g6qCE5FJZ4Xo+RbnmqJyJEnC+vXr+4U1AHDPUJPJxFhDnIPExETk5ubilltuwfTp06FUKuFwONDW1oaWlhaUlJTgyJEjuOyyy/Dll19CrVajpaUFCxYsYH2pUCiwatUq7NmzB0AEExw5cqRb0UkSilKLFfJOhdgIr/XWY54+gzhW8j0wGJGP/UCNW/E6aWlpaGxsjKpt8vHHH+M//uM/mKSgUP7+CpHoHo8nSh9RAVFRF9NaI3xFmJ3+l84MGjciiciLr1arsXLlSiZb7HY7RzVQ/29JktDW1ga/38+YTkxdOhmkOuGtwV7vlPGg0oDTwUpKgKqAieG8QNcBQoyVJElcREkswEOMDn31l5WRMzJyQ3corKm4EOVf9Hfxe38l1rPG+p/ExET+/cSJEzx2SqUSBw8e5EPb4/HAYrEMuiATSTgc5rBtURmQwUFebVJuYp4I0KXMqGCFQqHgSpcioyP2K6V10pPRSgdMa2srVCoV9Ho9M+IERAjgkWGWkZGBvLw8NDQ0IBwOc4/cn6NQj8nHH38c119/PbxeL/Lz8/HLX/4SZ5xxBm655Rb8/ve/R319PVwuV1R4iVhNWqmMVMOkKtvE8hG72pcCF/dSrP8Vr0f/DwBFRUVob2+HwWDAnj17WAHLRfS20/vFcCSgK4SFQnvF9eP3+6FQKFBSUoLU1FTExcXhmmuuwZQpU3DRRRfh8OHDqK+vh9fr7WaYDIRt/P9ZSkpK8Nxzz+Gf//wn9u/fD6fTycBg0aJFmD17Nu6++25YrVbceeedeOWVV+DxeFBdXQ2XywW/389VF6nJPOkCYprJWxMXF8d7XZ7nJ/ewA72HVcr1rVyPk4j6XMwnotcyMjIgSRFv7/HjxxEOh7libCyhM4zyZ0VjSvSciqF34XAYZrMZGo0GbW1tURFKpAs9Hg9cLhf3UE1ISOBzdMqUKRg/fjxqa2tRUFDAIKi3MH0aI/F5xH33YwmFSxOII4AHdEUWEQglPEFejJ7wBJ0V4jrqS34qTDFUXAH0D1uIZzvp3BMnTkCpVHbDF16vFx6Ph1uMDFV6wxj0eUWsKK5BOR5RKBRcaEaOM2idxMpHjUUAKJVKdr5QKG5vWIOwitfrRXFxMaqqqhhrkIRCIeTm5qKqqgp1dXX44IMP4PP5UF1djQ8++ACHDh2CJEkYMWIEQqEQZs+ejbvuuguJiYnQaDScIhMMBjFz5kyO3CAMTmc8Ef/0fCL5RsaZWq2GwWDg6ESqmEye1Z6EdFOsPTAQw1KM5hDf159riF5L+XXF+evo6IBOp0N1dTXC4TBaW1tRUlLS731PQmNK16X1Jk9RFPUkPUdmZiZ/TipiKrYNo+sQ8RgIBGC326HX62E2m5Gfnx+Fganwm91uh1qt5urKQPRaHozI9x591sHotlPKQKXKVXRgEggWv4tsAf0uHsriopOHE9LGIYkFJuh9sf4+FOZXVHDiQRHrGQYr/WWNyBMIRFg7qubr9XqxdetWXkgOhyMq96G/IgdgpPyoaTQx0qSU6dnp/8XvNGZqtZrDg3w+HxMONH7ERJLnRH7o0fXFap4EVLRaLTo7O2E0GjFmzBgolUpOhKeQwGAwCJvNhsbGRp7DXbt2wev1wu/3nzTW+8eS5cuXIzs7G1999RX8fj/Ky8uRlZWFo0eP4pprrkFBQQF0Oh2+/fZbHD9+nL0u8fHxmDRpEjIyMti7Qsap2NIFiH1IyOe1t/UqAm65mM1mJCUlQaVSYdWqVWhubu4GPMTnoNwn2oekLzQaDbfjUCgUnAtIERuBQACpqam48sorsWPHDmzYsAE7duxAfX09N3TPzs7Gli1b8Mgjj+CVV15hljJWBMO/pbvcfffdWLhwIS699FIsW7YM8+bNYx1+2mmnoa6uDi6XC6tWrUJOTg5ef/11LrhVX1/PYCkuLg6JiYlRIYekYwgkUxEVtVrNoEAkSPrSn32B9J7+v7cDOicnh3WN0+mERqNh4lBOqMo9DvQ+8sLIUyaoCFhzczMXkgHAhArQVTxM/H+TyQSj0Yjx48fjhRdewKxZs5CVlYWCggIMGzYMS5YsgcViwZEjR3Dw4EF88skn2Lp1KxwOR0yANVjD62SICP7FPFzCDqQXRDxB89YTnqD3iLmqQOy9HuvMJxmqN1kE9T8UrgD6jy3C4TCHJAaDQc6FI88O4YtgMAiHwxFFlvdX5OdLXxijp88hjhHNr9gKh3BHrNoFVMgz1vUp+ozmlryuAPrEGhRK7vF4sGPHDhQUFDDWICEc29LSgtNPPx1utxvr16/HN998g5UrV0atd8rLVSgUeOyxx6BUKjFlyhTk5uaioqICNTU1sFqtaG1thdPpRDAYZIKP+rfKU6fIaBMjMETPKWGmWOtFNMwGi5lE7DCQa8idH709g2hr1NTUcF6xRqOBw+HAwoUL2V7pTURyDgDGjRsHvV7PRqVon9Az0vvo56amJtZJ9L/kfRejSmkuwuEwfD4fEhIS0NHRgWPHjkGlUsHn87HnPDs7G0899RR++ctfYs6cOZg8eTLmzp0Lk8mEsrIyVFRUcH6zqBsJa8s/o0iQnkw5ZUJ8idkEugxUGhh6nZhyUSGJrnZRMdNEEmNBLnWgezll2sxyw0ouJ/OQHerhQSEANA4DYZ7E2HeDwQCbzQaj0Qi/3w+r1crX9vl8mDBhAg4dOoSmpqZ+F4EQD336HYgUMtizZ0+U902cq1hFq2L97nA4YLFYYLFYYLVa2fPe0dHBB4c4LvL3k3Ki/6PN7vF4GLhRKCH9npmZiTvuuAPz5s3jfLdwOIzjx4/j17/+Nb788st+jc2pIl9++SXuuOMO5OTkoLm5GUqlEk899RTGjh2Lt99+GzU1NRgxYgTmz5+PN998E6mpqVywJj4+HsOGDUNhYSGPb1VVVbe5EkmH/rCb/V3DYqhvVVUV57pMmzYNW7ZsiQkSxWIwktRV9Y7mWsxppeI84XAkz5H6Mq9btw7t7e248MILsX//ftTV1cFut+Oxxx6DyWSCyWRCS0sLXn31VWRnZ+POO++ESqVCbW0t2tvbB3yg/v8iNpsNaWlpqKysRFZWFt599134/X74fD6MGzeO9f7dd9+N3/3ud1i0aBE2b96MqqoqZGVlIRyO5OqYTCa0t7djxowZfC58+OGHDJqIbRZ7IvYkseZqIPp/IKQEgYzjx49Dr9fjyJEjyM7ORnJyMux2ezcyTy7Tpk3D1q1b0dHREUWKkD4Vz1MaB2rNRb0jqfCG3+/Hbbfdhvj4eMydOxdpaWloaWlBMBjEtm3bMG3aNBgMBlx88cWwWq2YP38+7HY7Dh8+jHA4DL1ej5ycHPZAAYgCNz+FiLUIJEliEpPWFYFIEU/IPZByPCEaM+K6EEG8HFPEkpNtuJ+MMR4KtgiHI/l71dXVAMAV1FUqFfeml+fSZWRkDAhfiJFNcoI7FsYAusZFBNci2Bb3O32n4mMiziDvFFV3FZ9HfK+IM8jrmpyczMVuesIa5Nm65ZZbcOmllyIlJQVtbW0Ih8O4+eabAQCPPvooLBYLz8tll12Gq6++Gp999hlmzJjBRrparYbVauWw/uTkZDzzzDM488wzUVJSgpkzZ6KhoQFqtRpNTU38+WgNixXBKRKFPh8ZiUT4UYSBGCYqN74GI6LXdigiGrT9fSbSv+RhdjgcqKurQ0ZGBjQaDeLj43kO5esqFt5RKpUwGAxs4IseWBEr03qRJInHVKGIVAyvrq7mehk0P2KVaTGCgTy/iYmJMJlM3MHgzjvvRHFxMWpqapCTk4P4+Hi0tbWhpqYGZ555JmbNmoXRo0fjySef5Gr49fX1cLvdSElJQW5uLrKysvi5xfuK9RpOBtY5ZTyoVAZfVA70O3m9KPyXXqdDF4hmqumLJlAM5RL/Lm6gk31QkPHVH0/RQEUs5iAaev0VykNVKCLhvlR1j56zvb2dD3O9Xo/TTz+dP1N/xkn+eWncDQZDVB6g+Heg60Cg1+T3EqssSpLEPVFpPOg7MZsi6yQ22RYNaKoOTc+gVquRmZnJrR8A4Prrr8e6detQUFCALVu2YOfOnTh+/DgaGxthMpkYkP2cZPPmzZAkCbfeemtUxcGNGzdCqVRiwYIFqKurQzAYRG5uLs/ZoUOHoFQqUV9fH8VEFxQUcKgiyUDW5EAYfxEEkKdSkiTMnj2712uILLDcY9bY2MjeEsq9pnXi8XgARKo4l5WV4cCBA/D7/Thx4gT0ej18Ph9aWlrQ0dGBUaNGweVy4aabbsLevXvhdDrhdruRk5MTc03/W4AdO3bggw8+wOWXX45zzjkHL730EsLhMBfOAyL9Si+55BI0NDTg6NGjMBqN0Gq1aGlpYeAhhqvSwbl48WIsWrSIvRTUfL03XSb3sAwm7Ky/IgInq9UKg8GAf/3rXwCACy64oF8eSPrsdA5QKxm5h4mMLwrjE3uC0neTyYTOzk5ccMEF2LZtGz744AM4HA7ExcXh6NGj+Pbbb9He3o69e/dCoVDg66+/xq5du/Dqq69i+PDh+Pjjj3HnnXdCkiTodDqMHz8e27dv5+IrP4XQXhZzI8V0IhpbAoVA33iCSHEycH8MTAF0ER/ieX0yZajYAgDXtAAixadcLlfUPmpvbwfQpb8Hii/EMSDpDWOIRINI4gDRhW1EEkKSJK7TIeIMCseU4wwRY4q92Okecp0CxMYamZmZWLVqFS666CK43W588803jDVIwuFIqzcaL4/Hw+GccXFxfM4NGzYMTqcTq1evxrvvvouHH34YCxYsAAAsXrwYn3/+ORve9HnC4TD3O6V6EvHx8Zg5cybS09MRCAS6hbbS2JNBJerjoXpJ6eehijj+/ZWUlBSEw2E2+EOhELZs2QIgoiumTJnC15Q/a097Ux4NKj6f+JnF0GOygaiLAukwcf3R/4n38fl8SE5ORnJyMiwWCz7//HN8/fXXmDFjBsaPHw+z2Yx9+/ahrKwMZrMZ06dPR1lZGR5//HHcdtttePnll/HWW29hzZo12LlzJw4dOoSamhokJCTAarUycRlLThYRf8qhJTHMBohsjuzsbAwbNowBiDhxorISgYf4u3jo/JAsrjx84Ie4l9yNPtA4eKBLwZHSJW8RHUpUnIMOEGowLQ/DlT+DWPGyp/umpKTEVBaSJEWFiNLhIrKQco9cMBhEYmJiVKgEKWd6XlKUsbx7dJhYLBaoVCokJiYiKSkJfr8fZ511Ft544w1s3LgRS5YsQUNDA4fjZGRkIDMzE8OGDYPJZMI333yD+vr6Ac/DTymrV6+GSqXioiji4XTRRRdBo9Fg5MiRWL16NVJTU5GWlgadTofU1FRs3boVDQ0N+PTTT2Gz2ThEXKvVIi4uDi0tLbBarTwvPe0D+Z6VizyiQR5aqdFokJuby4XVqJiF/BqiASvuUVprVD4+Ly+PS7SHw2EUFBSgs7MTbrcbtbW12LBhAyZPnoyvvvoKgUCAw9cMBgM6Ozvh8Xiwc+dOAMAVV1yBDz74AAcPHkRcXBza2to4n+nfEi0333wzVqxYgYSEBFxwwQVwOBzweDxobm7GLbfcgscffxy/+93vkJ2djQkTJqCiogLnnHMOzjrrLLjd7qhwTSqnb7PZ4PF4WI+MGzcOo0eP5pA5oHfgI4Ja+r0v6S/AjnUvtVqNnJwcKJVK3k+pqancLqOv+5NHEABX2CadSXuwtbUVBQUF0Gg0zOL7/X7ul2oymZCVlcW5Z6effjoOHTqE7du3o6mpCYWFhdi4cSOOHj2K/Px8HDt2DCdOnEAgEMDll1+O0tJSSJKEm266Cffffz8AoK6uDr/61a+Qk5MDi8XS77E8mUJjoVarYTQaMWzYMKSkpDCeIINEzKkVMUQsPEFnkQgQfwgR12Css/BkysnAFkBEL5Mho1Kp0NDQwGNI+AIAG6uEL3rbazQPfT1TLIwhfqa4uLiYJJJCEd3CiaKx5DiDCAlKARG98eSFlxuj9JrX6+0Ta6xYsYJfV6vVSElJYaxB8tVXX8FgMMDlcqGtrQ2XXnopFi5cyGsjFArB6/Vi+vTpmDlzJq677jrccccdKCwsxFtvvYWvvvoKnZ2duOiii+D1erntoEjKElkjhpEWFRXhzDPPxPDhwzFs2DAej54cEv0lnUWnhPh9IBha9FLSfeWEUX+FrkXef6qRotPpGBu3tbVh5syZMZ+9r/0pSZHiW6JTg9ZpLMwjSRKam5sRDAY5dxkAG6hEtompkQA4qujFF1/Eww8/jMrKSqxduxbZ2dlQKpXIzMzE0aNH0dbWhr179yIYDGLevHnYsGEDvv32W2g0GhiNRlx44YVYvnw5Ojo6UFdXhzfffJOjl/R6PZPwkiRxVMr/OQM1GAzCbDZzvHtiYiJPjt/vh9lsxowZMzBjxgxuektMqFwhiMy3eLgQkI0lQzEo6T69hWH9UDLYhZCYmMhjbTKZ4HA4oFar4fP5sGPHDj4IKM6eFLuofACwd7K/zzNs2DBexHKGjRQiEH04iocTKURiZZOTk7vNm8iE03WBrvAe8sjT/SUpUjq8uLgY1157LdasWYPHHnsM+fn5sFgsiIuLQ15eHqxWK2pqalBeXo7y8nIcP34cnZ2dGD9+PJYvXz6wCfiJhYyr48ePQ6PR4IwzzgAQWcvl5eUAgObmZixcuBA+nw8GgwEmk4kLifj9fsyfPx+dnZ2orKzE7t27uWgNrQebzYaGhgacOHECbrcbGzZswJo1a1BZWdnnPpF7MmKRIqNGjQIQWXOPPPIITpw4EVUkQ67s5dcV9UR8fDyvBUmKhJEfO3YMbrebvSXl5eVoaGhAa2srNm/ejM8++wxpaWkIBALMXFOOkU6nQyAQwI4dO+Dz+XDgwIEfXTf8XOT1118HEJnTq6++mnVNa2srGzUlJSXQ6XRQqVQ499xz8emnn2LXrl0oKipiVjoYDCIuLg5bt25FZWUl1q1bB7/fD5vNxi2iFAoF8vPzkZ+fD6fTiebmZl4L4proTcRzRfy5LxHvI1+f48ePZ0DU3t4OnU6HxsZGXHfddTGfR76O8/Pzo9JY6HXSlxTmTAarQqHgkELyrObn58Pn86GtrQ3vvPMOvvnmG9x+++1wu93YvHkzDh06hDlz5qCsrIwJKa/XyxE4jz76KJRKJbZv344bbrgBQATkSVKkDYZareZ2Nz+mUF6yCKjz8/MZT8TFxUWF+vcXTygUCm43FUuGginkHtIfMvJC9ACJMlhsQfuQ9iSd1SK+oGuTZ53mQI4vgK4exP19JjnGED+bGI4Zy0ilr7i4OCiVyl5xhmiMAuAoBhIyYAnX0M+9YQ2NRgOz2YzMzEzs3bsXjY2NjDVIli9fjmXLluHJJ5/E2WefzZ0NHnnkES5y5nQ6kZiYyERRa2srNm7ciIyMDCiVSqxduxZOpxNnnXUWjh49ykWTRINeoVBg3bp1qK+vZ5xHa2Xs2LFYtGgR5s6di/T0dABggmcgRqHoYBJ1MDAwIkseWj2U/UJrkEhPhUKBxsZGzkMNh8M8XiLW7C+hM2rUKITDkZoJ4rOKmFrc/0SiK5VK5OXlQalUMglELRPFqCHSdWeffTYmTpyInTt3QpIibYQaGxvxz3/+k6O/hg0bBrPZjGPHjqGsrAzt7e1IS0vDgQMH4HK5kJSUhM8++ww333wzXn/9dUybNg3t7e1Yu3Yt3n77bXzxxReYMmUKSkpKUFFRgeLi4m5tmIYip4yBKrImIiNA/S3pkFCr1Rg+fDhGjRqFqVOn4uKLL8bMmTO5hLNYqZcml16n3JJYA9fT66LIF5IcPA+F2ZQv7lhhQ0MFuOKGF9ssKJVK1NbWcj4SJWWHw2E0NzcjFArhF7/4RUzPtfis/fnsVIBDDCESlYv4szyUhwCGJEl8cACRXAHRiKWiBGKoJgBO+BcNVkmSsHDhQlx//fW44oorMGfOHLz66qtYu3Ytvv76a3zxxRdQKpVob2/H8OHDOdz1+PHjKC8vR1NTE9rb27kVy89FkpKSEA6H4Xa74fP5UF9fz6CVDnAxJI7CpgjIGQwG7Nu3Dx6PB1qtFvHx8XA6nXC5XGhqakIwGIwKv2xra8Pw4cMxduxYrFmzBs8++yyefvrpmM/WU6QD7RFam7Sfjx49ykDk6quv7vVgMxqNUSSIqCNsNhsKCwuhVqvh8Xi4+BWx42q1Gm63G/feey+amprg9XoZdJD2B031AAAgAElEQVThLoLSY8eOYdOmTdi8eTN0Oh3sdvvPbp38GHLffffxuqqtrWWyj7wWAFBcXBxVFXLy5Mmoq6uLKuJBYU1U1Cw5ORmbN29GZ2cnKioq4Pf70dDQENUUPRgMorGxEVarlQugkYTDYa4STCIaLOLvfYn87JALGdnU63nz5s1wOBxRniX5M4i/k/dVTvjJjSS3242CggIolcqoascGgwE+nw9Go5H1gcPhwJtvvon8/HxUV1fDbrdj7969kKRIxWEiDKnaPoUEbt68GU8++STuvfde7NixA19//TXWr1+Pjz76CLm5uVxE58cSkQwVPT70+4gRIzBq1CjMmDEDw4YNizJUxLGWh9L1RUz0hSligXG6nxhJNhSDvidjm8iLkxkqLI5TUlIS/0w5/ERsiviCCPDRo0dH4Qu5DCTUmIgsMojl1xPnRbyunAygqBx6nSIPRG8wfZH3itaaeMbQHHZ0dMBiseD+++/HDTfcgNGjR2P16tVYu3Yttm7dCrvdjoKCArjdbmg0GsydOxdWqxVerxdHjx7l57/mmmtgsViwbds2xMfHw2Qy4cYbb8SBAwegUERa6Nntdlx55ZVQq9V49NFHcfPNN8Pr9UKj0WDWrFmM7fR6Pa699tqodoJ6vT7q86SkpMDr9aK+vp7bkdD6CYVCyMrKwsiRI1FQUID09PRuOK4nIW9vT3PeHzmZ+FgUeh5KA6AijF6vl3GDz+dDUlLSgD8DhQ7Te0RiXMSqZBzT3+x2O4AuO4Ny5oGI04XCqyVJQklJCZqamrjw1f79+5GXlwej0YjDhw9jx44dqKys5FY2FIpvMBiQnJwMrVaL5uZmNDY2wuVyQaFQ4He/+x2mTJmCcDgMrVaLP//5z9i4cSPcbjcaGhowduxYfv6MjAykpKTw8/b3nJTLKWOgAl3FkYCI587r9aKsrAw2m40ZA3EREKuTmJiIqVOnYurUqbBYLMyiiAtW9NYNZDOIAJbuKWcch1qJDwB7nfoTxjIUoc8SDoe5opd4YFPZ/IqKCoTDYXg8HjidTowZM6bbYT3Y8OLc3NwoBSc+k5z5kodckVGp0WjgdDqhUCiQlJQUxTrROhK/5J+Trjlr1ix0dHSgqqoK+/fvR3NzM/7whz/Abrejra0NwWAQX3/9NRcqoNyPoqIiNDQ0wGazwWw29+iZP5UlOTmZvX4EFI4cOcJjTXnKBCTIWJMkiSt80nwEAgEkJCSwwev1ern5s91u52IDwWAQixcvxpIlS7B48WLs2rUrJhgEogEDgW+lUhllqAaDQaSkpER5+uUHlagPTjvtNO7LJ96DWEoC3YFAAB6PB3FxcRwCTXmmjz32GF566SUcP36cjaFYrC+N3WeffYaXXnoJ99xzDw4fPnzS5/HnLhRGlpycjMrKStjtdmi1Wrz11ls8nh9//HHUviViKRAIMKlC80z51KFQCGeccQbC4UibhrKyMni9XtTW1jIRR+HcJA0NDaivr8eJEydw+PBh7N27F5s3b8Y333wzYFY+lqc0lhCIpeegPFSFQoGmpqZeCVGScLircqhIGNJ5Ra8R4Cb96/P52GtKxim9t76+HjqdDpWVlVw5PRAIYPfu3UhJSUEwGIzZT1atVrMRarFYYDaboVarkZGRgbq6OlRVVQ1oHIcqNF42my0qh1D8O3nJxo4diyVLlmDy5MkwGo3891i5pj2dWf19JhGYyr/kzz7Uz04/i2vyZIYJy5/fYrHw2CQnJzPIpXEkHU77lfL5SPq7d2JJMBhERkYGk5hEStPeoRxt8V6xjFSaYzICyPiSYz/SX0AXeU94QCxUlpaWhilTpmDdunWoqqpCcnIy9Ho9mpub0dHRgZ07d+Lzzz9nzNXR0YH4+HhUV1fD7XbzPa+66iq8+uqrACJGc0dHBz755BPs27ePn6O1tZXTuMhgSUxMZKNz0aJFeOihh7B+/XquSUL4mvBAOBxGeno6Dhw4gJ07d+LEiRPYsmUL65ry8nIm+4j4MRgMKCoqQl5eHvR6PVpaWrqNK9C9EvNApad0iqGuafk1R40ahfj4eC4IGQgEYLPZIEkSamtrcdVVVw3IW0z7jnCkvBIw6V7aK6S7acwlKVKkkrAKpSCRE0GlUmH69OkcwUXnY3l5OTZs2ICsrCxMnToVx44dw+7du7Fv3z7Ex8dz66PPPvssqoaBx+OB2+2Gy+WCSqXCE088wde95ZZbcPHFFzMxYjKZYLfbkZWVhVAohD179iA9PZ33wM/aQA2FQoiLiwPQxSoEAgGMHDkSWVlZOHHiBJqamgB0KXfaHHRIKhQKpKeno7i4GIWFhdxOhQ4CWrwDMawIBJPIwzroeYYi4uFH9zwZRm8sEUNl6BAhI582QzAYxJYtW3iztre386EuPtdglQExK6LnQwSYvX1+AlxUZVhknMRnIg8qeUkoLEJ89uzsbLS2tka1FrHZbAgGg7jooosAAIcOHYJarca3336LpqYmrsRntVoxfPhwuN1u7Nix42cXvul0OtnzPHPmTBQVFQEAH4QKhQJFRUU8P/LegaFQCGvXruX9QN6t5ORkTJ8+HeFwGEajEU1NTRymuXHjRng8HjQ0NMDhcACIeDTLyspQXl6OsrKyqHEkkBBrf9F+Hz16NBvTHR0dXLxJrLbZk4dADCmjA6G5uRmFhYWQJIlDLUl3GI1GLtt/5plnAgBWrlzJa5eenUJ/5P3yJCm6XcC/JSJUafCOO+5AVlYWlEolWltbcfHFF/N+Ji+mJHX1saRwNiITxLWq0Wi4IJLX60VlZSUcDgfGjRuH1tZWuFwuFBcXo7GxEe3t7aipqQEQrd9NJhPy8/NRVFSEffv24YUXXsALL7yAN998s9fPIwexchEjQ0jfiUalSMh4PJ6o4nG9yZgxY2JG9NDapIgDOicpYoIADuXu+/1+Nj7r6+uh1Wrx8MMPo6qqCuXl5TAYDOwtJaKSyEZ6jdb8I488gi+++AItLS3smaGiIz+W0DlRXl6OxsZGtLS0cOEz8QyjsQqFQoiPj0dJSQkmTZrEURV0lsivPViRG48nG1eI64Cu/0PhCqBr/Og8EAkl8kBRoReRJGhra0NaWlo3b+dgQS0A5OfnA0BUuxOKsqEcbVHEMZIb8XRWUesa8cwggod0PvXcFYUI8pycHHg8HhiNRjQ3N2PPnj0oLS3FRRddhLKyMtTU1EChUGDTpk1wuVzcFmvhwoU4dOhQ1LOSYUPn0muvvRZFcmk0GvbU0ddTTz3FeiQUCuHWW2+Fx+PBunXr+DUxt5bmoLGxEX6/H4mJiUhISMDevXuxZs0amM1mKJVKrgBL80+4nO7b0tKCpqYmJtsGQzrE8pQOZG3QPcXv8rmO9VzkVaYxj4uLw4cffsg6m6J9+ro3rSWanzFjxgDo8sqLn0eMJBDXm8/ng9PpRFpaWhTpTgXvyFZRq9XsvPF6vXC5XGhubsaJEydQXl7OIcbHjh2DTqfDhg0bOLzXZrMxuajX66Oi5ojco/tarVa89dZbePrpp/Hyyy9j3bp1sNlscDgcSEhIwKhRo7Br1y7s37+/3xW6u43doN71AwlNVCgUgkaj4XyF7du3IxAIICUlBSdOnEBdXR1aW1t58sTS7iLrYDabUVRUxD0d+8pj6MkLEssrMhSh5yMgHWvyBrOJ+7qneF1JkmA2m/kwIdCg0Wjg9/tRU1PDCdkejyeKwRmqkCKMi4uLMkxJ5OxYrBAdMkAIuJKBRGtANBII1BKjKkmREDUCw9RmQa1WY+fOnew5JbaKmotv2bIFhw4dwsKFCyFJkXCPHTt2oKOjI6rK3s9BaN2FQiEOx1AoukLknU4njEYjh5UA0RVHaQ6JXZZXM1QqldDr9cjLy0Nubi6sViumTZuGiooKGI1G7N+/H52dnWhtbY2qRnfgwAH+kgOUWAeJGLb/xBNPsAddPAyA7mH5claPnp0ABkVniF55ytXz+XxwOBxIT0/H1KlTuSE6fQ7RQ0UgIy4uLiZ4+bdEcgS9Xi9WrlwJhSJSaffqq6+O8ljo9Xpef1R0hdIPkpOTmaCgKp3ECtO8JCUlwWKx4MCBAzCbzRze6na7YTQaMWvWLFRVVSEQCKCxsRHV1dVRoXrnn38+zjvvPFxwwQVIS0vDM88806vXvyfpzYNARp8kSWhra+N1eOONN/Z4XdG7l5SUxOHpoodO/F+1Wo2GhgYkJCSwAe/z+eDxeFBbWxuVZ9ba2gqtVovW1la8+OKLKC8vx8GDB1FVVYWkpKSovU9nK71X3IP79+/HM888g5dffhl33HEH1qxZM6D1MVQhUBUKheDz+ZCRkQG1Wg2bzYb6+vqojgFAVy4hzZVOp0NeXh4KCwuRlZXF+ZX02XsiwGJJLCAskudDFfF8j+WRPdm4QhT5GGg0GgbNRHzS+rTZbDwv1DXgZBaRIwNZrNLcF44TCR2aEwrxpN+Tk5O7nUN0H5EAEO+ZlJSEMWPG8Pqj4m1UpTwQCKCwsBBOpxPfffcdNBoN9x+fOXMmDh8+jEsuuYSf84YbbojCTWlpaXxO07OPGDGCIwCASETitm3buj1faWkpPB4PGyL0PHS+d3R0IDU1FXFxcVCr1Whvb0dBQQGHtu7YsQPl5eWsz0iPkB4WcyLD4TBHqDQ2NnbLD25ubsZ3332H7du3w2q1Rs3XUPeHqAtjrYdYQvNJ4+H3+6FUKrlgpiRJUe1exPfF+lmuj8lBQCSX/BlFY5rOQGprI+5tmifC50Sq0/6je7S1taGjowNr1qyBXq9nPBMIBNDW1ob4+Hi0t7cjMTGRCUexKjORFmL4+vDhw5GQkIDKykq+p0KhwN69e/HPf/4Tu3fvRv73tQ0GI31qK0mSdJIkbZckaa8kSQclSXrg+9cLJEnaJknSEUmS3pMkSfP969rvfz/y/d/z+/MgdCiIse/hcKQim0ajwbFjx/Dtt99yTLzZbEZ7ezvq6uo4bEkEnaISUSqVSEtLQ3p6OnQ6XY8V40RlLv/5ZAg9l3jdWCDih5CeGFq9Xs8so8jKSZLEi4rCcCjkc6DPGOuwpPklryYpMHFMxK9YzJkkdeWiZmZmRsXgS98zeWKFWnpPOBzGhAkTeIOSkUpe2T179mDnzp0oLS3FiBEjkJGRgdraWiiVSqxZswbr16/HvHnzoFarce655zJD9XOS4uJiJh7EKIOMjAwAkVBHMfeCQKjRaGRve25ubjeGmpQ1jTcdfIWFhaiqqkJqaipXMTx69CicTicbCrR/6b379u3D/v374XQ6+8z1Er035513XhSJQf9D36dNmxblmSXjVK7U6XUK46XeZRS+nJSUxHpl/PjxyM7OZv1F38kbRREiaWlpJ3kmf/5C40yePCAyT1TBFkAU2KLqh2JtArGCITG98v7Y4oGv0+lw4MABpKamIhgMYvfu3Zg3bx46OzthsViQl5fH+cKi7qD2DYsXL2YvnPzMiKXP6SzqTXfGx8ez3qUQLqCLyY4l8r0mFmqJFT0gJ2Vo7EKhEOe/St8TeOQ9UigUsFqtOHjwIGpqaqIIJQKZdG+RGJLfh3Rxa2trj2PwQwgRlgkJCWhsbMTmzZs5tSAlJQVutxsnTpxgXSQHsCIRodfruapqenp6zBxhUURPjSg/xDlP9yLCQrzPD4UrxPuKQthNr9dDp9PBZDJFefO2bdsW5ZCgMN+TZUCTziWdTkS2uDZpf4j3FAlWwigKhYKLOcnTeWgviPfs6OiARqNBKBSCVqtFYWEhFzAjoK9UKlFdXY09e/bA5XIhISEBWVlZMJvNqKysRDgcxvr167F3715otdqoLgFkKNFY6nQ6uFwu6HQ6ToW79dZbo3LnAeC1116L+p3m7fzzz2fsRF5ssVgYeQlVKhWTrdOmTWNvnU6ng1qtxqpVq1BZWYm8vDx0dHSgpqaG9YRoYJKuaGxs5MKGnZ2dqK+v53QNOvuHSuiKmHswuJWeV7wG1bNRKBRwu90YPXp0N10RS9/L719QUMDeyVh/p9fk11MqlVypmvASpXeEQiGsWbOGyRBy3KhUkT7E7e3tMBqNuPLKKxEIBLj2yPjx4/Hee++hqakJWq2Wz06lUsmRreIz0pl99OhRjB49Gs899xyqqqpw8OBB+Hw+js4sKyvDvn37sHbt2kHpoP5oAz+AM8Ph8HgAEwCcI0nSNADLADwTDoeHAbADuPr7/78agP3715/5/v/6FJoE0VNHG9lut/NBmJSUhC+++AJbt26F0+lEdnY2F2Sx2WzMAJOyALq8OzTB9L92u50LoMQyTE+GyEMIYl37ZChlcSPGYl9iCYE6YrpovOn/Dxw4wNd2OBwoKSnp97PGMobFTZybm8uvi+yKPOQq1rOKhwvNs1arjaq0J34m8p4SQBo7dmxUMSWv1wuv1wuHwwG/34/6+no0NTVhw4YNGDZsGKZNmwYgUvTGYrGgrq4Or732Gmw2G1wuF0KhEDZt2tSvcTlVhAobqVQqfPfdd/x6Xl4eJEnicHryJtNY2Ww21NbWIhQKobKyEs3NzVHtPEjoYBHZxdTUVGzbtg1qtRpVVVV8KNfU1GDSpEkoKChg0olYXYVCgbq6Ouzduxd79+5FVVVVt71K7LXD4UB1dTVSU/8fe28e3WZ9pY8/2mVLsmzL++7YcZzESQhkDwkhEKBs6WEtpZSyTqctp6cDp4VCKaUL7UxPp5wppWXYy3QDCkwCISwJhJCNOHZWJ7HjJV7k3ZIsWYtlvb8/lOf6I0UJSUoZ+H17z/HxJul9389yP/c+97n35iVEQdQ8PL7PYrEk/J/3TOpKTU2NOKN2u12o7/waGxuDyWSCzWZDVlYWqqqqMHfuXJx//vlYvHgxamtrUVpaiopjFNEpU6Zgzpw5eO+99/6h8/p5FI/Hg5GREamoa7Va0dfXJ9RwAKIvAEhek1oBFIA4CzrdZA7c4cOHE66lRrgXLlyIefPmAYjriy1btsBgMKCzsxNZWVnIzMzEjh07EIvF4PP54PF4EkCUzs5Oqejd1taWkm2jGjYnOlf4d0aQmpubkZ6ejpdeeklyiGggpgLsVDnrrLPEGFb3icrY0eni/Z/z8/MlihUIBCSqRaPbZrMhEAgIbZ609vfffx/PP/88ACTQv3h28/xWozXqPvy0c1BpfKenp8sYkPJLw7iwsBAZGRnQNA1HjhyRllWc61RGrsVikSIjQ0NDCcW3KDzDaOz9IyQ5L19lcnxSkdkzEe5fMrSYx8aiLW63W8CLYDCIWbNmnbH9pdo91OuMdqp7XjWw6RgkR/KSx02v1yfkrZLaSmo7GVhqvQuCZGeffbZ8DudlYmJCCuwdOXIEBw4cgMPhwDXXXIOamhrJO83KysLmzZthNpsTCotR/4VCIUSjUcyYMQMAhF7JgkZqVW+yISiapkmFbVKU6TyzKj0wqVtjsZg4wJRIJIKhoSFoWjxvtby8HDabDR0dHZJ+wQg5C9qpIAU/t6+vD4ODg8jMzITFYoHX64XZbIbX68Wbb76JtWvXSsHFv0c+jjkJTLItVMeQziDnlEWoNC1e/PG8884DcPp2PBkzDPxQP6byRdSzJRqNSv0YrnWyh7if9Hq92LtMEwHiaycQCOC5555DXl4eBgcH4fV6sXbtWsk19fv9UiCSdjfXjjqG0WgUZrMZu3btwne/+1189NFHmDFjBgYHB9Hc3Ix77rkHTz31FL75zW/iJz/5iYDPpyMfO6JaXPzHfjUd+9IArATw0rG/Pwfgi8d+Xn3sdxz7/wW6U1hZNIL5Mw3CWCwmpeyZ+FtQUACXy4XGxkasWbMGmhYv3pKfn4/R0VF0dHRgaGgo4YABEkPTVCKjo6Nwu90Jxgfw9/dMTUZdUiEpZ4QoKJtAdT75nMkG0smEioLjwu+kMTIhHgBGR0cxd+7c01pkyU65GiEtKSlJ+Jsa0VKVg4p4JhsJVHh+v1/yhoDEXljJ6JROF4+2MneElYvZx5I0CPZ22rhxIzo7O8VZ4doMhUIYHh6Wqm5ZWVmnPC6fBeF4lpeXIxwOo62tLcGBo4NHSgmj6GQwRKNRoQgyT0PNUYjFYujq6sLevXvx9ttvY9++fXLAxWIxVFRUCD1m1qxZaGxsxO7du2Gz2RLmZ3h4WCiLOl08aX/37t1oaGiQ9hZTp05FOByGxWLByy+/jG984xvYuXMnDAYDrrvuOpSXlwOYXI805kg7UwESdf9T6DQZjUZR3OXl5cIAYHsQFoex2WwoLy9HVVUVZs+ejalTp2Lq1Kl45ZVXEirR/lPi4nA4YDabpZBKOBxGZ2cngMn8NRVQUPUJaXhpaWkIhUKiE0jDbG9vl4NazYtLNkpmzZqFhQsXAohHZlnExGq14rXXXsO7776LYDCIaDSKnp6ehEISBML279+Pffv2ob29PWGeU+lhFUDkXgTiuouVZOvr64WSS+f1ZIAjAGRkZCToyFR6lWh6RkaG/H98fBx+v1/2J4U6gUaZy+XClVdeiZkzZ0phGzVil1wdXo20EuQ603ykMxXaFpqmwefzYWRkRCIOjIbU19dj06ZNOHToEAoLC2GxWOB2u3H06FH4/XHzh/fNz1LPLVI2BwYGpJha8vn/SeZ/0jlSHVNgkvL9SUUigcRzXAX01HWbSshGUe0LnS4e0fT7/dixY4d8Dp2S07GJTuTw0yGtqKiQebJarXIfvIa6R1UbUT0D1AJmdABcLtdxtTjC4bDkcfOLjL1QKAS/3y+MGr/fj+HhYfj9fvj9fgwNDaGpqQmvvfYaSkpKsGLFCoyNjWF4eBhWqxU7duzAO++8I9dj72KbzQaj0Yif/OQnSE9PR2ZmJsxmM/r7+/HII48ksCk8Ho9UVtbpdNIjkw4k58npdAKA2EV0SCcmJpCeng6j0SgOMh0UOvA2mw0zZ84U581qtUreLcefBegYAeb4cS8WFxdj+vTpqKmpEZ169OhRvPDCC/jlL395SnZ5Mqik/v1U15G6rgm0cK+FQiGsWbNG1lmyzjxVIQgIIEEnJoNZtIO591gFm2cigcFwOCwFAsn64rgysp6Wlgav1wuLxYKvfOUrcLvdaG1tlbQus9kMk8kkzDg1Osvn5P1Qp5Fdptfr8bOf/QxPPfUUAoEA1q9fj23btkkBUrVV0qnKKVUL0ul0BgD1AKoBPAbgCACPpmkc1S4Axcd+LgbQeWygozqdzgvABeCkHEgauBxU0j4ikQhKS0sBTBqUHLCsrCyMjIwgIyMDmzZtQktLC4qLi1FaWirG7ejoKBwOB1wuV4IDzAnnIg6Hw+jr60MsFi+zzX5tKmKgTghpSk6nU3Ipk8bsVIb2tCXVRkimJJ2O0LBTnUciMsz14nUzMzMTXnciSXbIuaiTlYXZbBZ6THLvUn4+r69uCBoG/NxAICCl7DMzMzEyMgKdbjI3mY4o8xV5PQCCGBIhikQiGBkZSXBmFi5ciLa2NoyNjaGrqwt5eXkIBoOYOXMmMjMzJXft8yRUgkajER6PBzt37pTIKA2Pw4cPC73JYrGII5qTkwOPx5OQ60ukuqurC93d3cjNzcXmzZsxdepUyeVkdUBSj3jQB4NBFBQUwOv1oqKiAk1NTYLwLliwAKFQCPX19cJ2YETz8OHDgu7RUaEzC8TbXWzZsgVAfB0tXrwYX/va16Q36Z49e8SxpTFNJ8jj8WDKlCmytkhlYpSPFFJgssBbMBgUR3ZiYgIOhwPf/e53MTExgezsbAQCAUFN/ymTUlFRgc7OTqFDA0igz/b09Ag1mmOvFhVSq3TTOFRzk0nNVoEtGgAEUygGgwGrVq0CEI8UulwuFBUVwWg0Yt++fRgcHITT6cT8+fMRjUbhdruRn5+fAIr5/X40NDTIPUydOjWBbq46p8nC5+H9/vGPf8T111+Pq6++Gr///e9POo78PIvFAo/Hk0DNUqnOyQVsTCaT6EGmOwCQvREIBBCNRtHX14e5c+cKwBgKhTB16lQ0NTWJY6QWUlOdaVKWea1PU1hIRKfTITc3Vwrjscr40NAQYrGYVG6fPn06tm7dipkzZ6KsrAzhcBi7d+9GVlYW8vLyEupZqA6NejbSSDcYDHA6nZIPfSJRnT3VqUsWjnGq81e9jzMRdS+oFFL12fjzqTrA/Bz183jWNjY24sorr0Q4HJaI4qnm6au2iir8m6ZpQpnX6eJsPBrZFNVhTbY3Un2m1+tFbm5uwlnGSBVtJtWhTQ6OsEo2dQGdVp0uHt33+/2IRqPYvXs30tLS4Ha7UVJSgv7+fkydOlXux+FwIBgMynPee++9sv+i0Sjuvvtu6VXMew8GgxJ1AwCv1wuHw4GJiQkB9lQgJT09Xai3/AxSP3NycsQOZi/P9PR0eDwe7N69G3q9HmazGWazGZs2bUJOTg4qKyvx/vvvw2AwoLCwEF6vV1gKyVR5vT5eObmqqgpVVVXQ6/VoaWnBzp078fjjj+Nb3/qWsDRSrXV17D/OJlbXp/re5NfQnuzr60N6ejoOHz4Mq9UqbTDVTginIrQ3Zs2aJTV2CNCw/kVysIn3wHkqLy9HS0uLnBXhcBhWq1W+WNyU5wkr/trtdgwPD2Pt2rW499578eMf/xgejwcWi0X8pYKCAqGa814pKuim2r28b5/Ph//93/8VWn9hYSGKiorEGT8dOSUto2nahKZpZwEoAbAAQO1pXylJdDrdnTqdbqdOp9sJQGgGdFCJShHdJXrFTVdeXo5zzjkHF110ETQtXg6bm7ytrQ0OhwObN2+Gz+cTNL25uRmdnZ1CNVFpHsfuCQZDvD9cT08P3G43Ojs70dvbKxPEXJxDhw5JP6HXX38d+/btk01G5+mTFB726teZihqNLCwslDFVjT2DwSDIC18fiURw1llnJbw/1X0lb/ATKf3Zs2eLIcOIxMmMN9V5UtFrlb6Wl5cn90/lE41GBeHUNA3r16+X9USnQqWEhkIh9Pf3C731xRdfxO233w6/3w+LxSJNm3fs2BjtN/gAACAASURBVIE//elPqK+vl4jP50V4YANxylJJSQlGRkYkkuXxeNDe3i7J8kTUiLLSWVSj8OPj4/B4PEItqqmpQWlpKVavXi172mKxSH+ulpYWSdAfGxuTqM6iRYtw2WWXAQA+/PBDrF27FjNmzMDFF1+MpqYmhMNhKbJAhwCIr2tGuZNlYmICH3zwAW6//Xb87ne/QywWw+9//3v84Ac/gM1mS3Bw8vPzJfeHvTF54NntdgQCAXl+7hMgsciN0WjEl770JZjNZin2wzX3Tzle8vPzBQVOjqQw5xSIr1si/zSoeU6wwBFpZ/zb0NAQmpubRS/z9Sr4ps4/ZWRkBDt27MDGjRtx6NAhVFdX47zzzkNhYSFsNhtyc3OlPcP4+Di6urpkTdIIjUajOHz4MA4cOCAtnE62BoqLi2VN+/1+HDhwQD4vVfX5VHqXLSVU5yKZPqZp8QrBjNYSKGJEhK8dHR0Vw6O8vFx6NGZkZCA7Oxs1NTW46KKLsHTpUkydOlXaMhFVV2lmwWAQPp8P69evP9NlckZCwJsGmtlsRnZ2tsy1y+VCbW0tKioqkJOTI/u6ra0N27dvx8GDB1FdXY3S0lJoWpza3dPTIykxKm2R1+L60jQNw8PDUthR/R9lYmICAwMDaGhowIcffohdu3ahr6/vuDOQ8/RJRkcpqkGvGv5q9P10hM9us9nErgAmgwNcH3wt7Tx1/SXfX6qfKamYEbFYLKGVGudVdYRUO0bdH+p+4b7jGQZMRtX4f64vRkkBoLu7W0D4ZJuJuoltPEKhENLS0rB9+3ZceumlMhZDQ0MwGo0J+fgquEYaMEElk8mE3t7e4wpOsX4ExePxICMjA3q9HqOjo8IKIHhFx5ufy7VBMPhEwIzBEK+obzQa0dvbK4A/7fAVK1Zg1qxZcDqdaG9vR3Z2NkKhEHp7e6XwabJ+JOPq6quvxqpVq9DU1HQcA0X9SmWHquuY/z9Vyj3ZH0C8NgfBtt27d8NoNGJ4eBjnnXdeAgiaLKmuS9uB4IUqyWwhfqffwqh3bm6uMCZYQ0WtHZO89hj0MxgM2Lt3L2pra7F9+3Zs27YNXV1dCAQCiEQiKCkpSQBaVOBF3Qfqc6mgXSgUEuo281DVPOpTldPSdJqmeQBsBLAYQKZOp+OJWQKAV+8GUHrs5o0AnACGUnzWE5qmzdM0bR4wyc8nTZDfk1E7orwqdZMDMmPGDIkIsZiF0+lEV1cX9u3bh5KSEpSXl8NsNqOnpwdHjx4V40d1KDkBdFpisXhCd1dXF8bGxsQR5QZOT0+H1+vFgQMHBFVL7m33ScqZOL9cSDxQ+XtaWpoU4aCyYmRNzeMF4nN01llnnfAwOF3h4aXT6RJ+5mZScxaSr5e8eVXHJPkwoKGk5kfxvbxvIrdEQ+lsMan87bffxuWXX45gMIje3l6hxHV0dAjy9HmSnp4eAJNth0hTXrFiBaqqqnD99dejoKAAQNyBtdlsMBgMKCgokGIJrIY6NDSEw4cPw+fzYfr06Vi6dCl0uniOHymxX/7yl0WJeb1eGAwGeL1ejIyMwGQyJfRfNRqN6OrqQl1dHc4991xkZWVh//792LBhA+rq6jBnzhxcfvnl6OrqSogmMJL90EMP4amnnpJ+XGpUld937dqFr33ta/jpT3+KaDTeg/OZZ56R3OLy8nLEYjE0NTUJ+qg6RKQ7EQhhtMxsNoszf+TIEbS3t+PgwYPo7OxEf38/nnjiiU97qj/zotPFKXjV1dVSdp/AGTCZ8qHX69Hd3Y2ysrIEnUA9QXTfZrMJIOVwOGC1WjFlyhTRfdRt6ppQDVMaWj6fT9Y9K5GynQKNKrfbLY7cqlWrJCd7YGAAvb29sFqtosMYiWtsbMSuXbswMDCQ4HQajUbk5+cLPS49PR3RaBSPPvooBgcH8eUvf1mqbXPdqYYZn4WMHo4Lx1E9A4xGI3w+H2KxmOSickwcDofk5rN6Jw1V5vmroEx6ejocDgdKS0tx9tlnY+nSpVi0aBHmz5+PJUuWYObMmVi8eDEWLFiACy64QBhRn6bQaVAjESywZ7FYkJaWhrq6OixatAixWAx5eXmyLjkXLALjcrlQUlKCSCSC1tZWKSinrkVVaMARWDt69KgUh9Hp4vn+3d3dCIVCQgHds2cPPvjgA3R3dx8HLnySkuycfRLXUZ0MdgdQ/6bT6SSHv62tTf7PgjN8TfL9qY7HyRxYVSqPdXAgOKpGoKjPk+crlaPD7yzYRxYPn0ulyqtn0ptvvin7kUVreA9Go1HSifx+P0KhEOx2O/bt24fKykphaOj1eqFDA3HgjLaZw+HAwMCAnK2ZmZno7+9PSDlS2T6UQCAgeoE5irS5OQYMDCV3QqAzqjqqqkPF8XA4HDjvvPOwdOlS9Pb2Cki7du1aDAwMYN68eaiqqkJmZqYAj8FgEIODg5KfrAI+qs3GtnT8XHXekkWlvHPOT1fY8YIOod/vx9q1a6UeBfOnUzm9H0eJZztMFq3ka3m/PJP4XlZTBiZTOlSQh4EXrgmuSa5/v98vDLgnn3wSl19+OQKBAPbs2YN169bh2WefxVtvvSXjzrlWA2PJwQl1HRDkIBDFgF9jY+Npj/upVPHN1el0mcd+TgOwCkAT4o7qNcdedjOA1479/L/Hfsex/2/QTsFLI4LDXCFgcmFxUak0PBWhBuKLduPGjRLdikbjTWtZ7MViscBms2HDhg1obm6G3W5HaWkpTCYTmpqapEx3KnrGsWeXw6ywsBClpaUoKytDLBbDwMAAWltbsXXrVvz2t7/Fv//7v+M3v/kN/va3vx1XSe1MJBWS+nGSaqPwYE42itTiQu3t7aI8A4EAmpubZcGNjo4iNzc3ZbuZM0F1o9EoCgsLxdlXx4rGPu8lGSHjhqFTycRuANLnTHVi2duLir21tVU49VwrBD+oYFldNhKJoL29Heecc44crAMDA3LwMcL4eRNGmvjcDocDmzZtwt69e6Fp8UrHRPc49p2dnTAY4n0qOT9msxkul0uMf2AS0WbeHg+54uJiaJqG9PR0lJSUiMPv9XphNBrR3NyMP//5z2hubpb5tdvtEjkwGAxoaWnBmjVrkJeXh2g0ivPPPx96fbxRtMViQSgUwq9+9SvceeedqKurw89+9jNZ40QeuceZPzgwMIBbb70Vd911F5xOJ374wx/isccew5/+9CcYjUah8I6NjZ20pRD3ndFoxI4dO/DBBx+gsbERBkO8PL3acP2fEheCgAaDARUVFVIdsL+/H2azGXa7XdZTf38/MjMzpTAXWTcEOHk28P8Oh0OMJ7VkvsrCiMXitQjef/99RKNRbN68GRs2bBBqu9PplEqbanXdYDCImpoayc9au3YtVqxYAZPJJGkAo6OjGBkZEcqyGnXv6elBfX09GhoahE7P51TzHcPhMO655x585zvfweuvv47a2lp85Stfwbx58wQkUfWv6pDyK7nMv6oXGZ0ZHx+XqtWk6Pl8PjGoc3NzE3qOk+pqt9ths9lgtVqRmZkJp9OJ7OxsocSWlpYiNzcX+fn5ePbZZxOox5+GUMerVZ2pT6j7VLono8knA7wbGxsRCARQXFyMgoIC+P1+tLa2SnsMngsUFWg9EeitFqNj+4fOzk60tLRIZJHn1D/CWeVYna6kch75O50z/q5G5CcmJrBt2zb5nNHR0QQAPDkKdiZgv81mQ0VFRcI+UB2F5IgQnyfVPfD9BHUZgeRn0xlQI48Gg0EYRacChrOA1PXXXy9MP4LhFNpm1Gm0k0dGRjBz5kwEAoEE+y8UCgmdl0LgmE4jnQk60OoXkJjH/d5772H//v3HRVHV4Acw2eMzFouhqqoKVqsVw8PDcm/j4+P46KOP0NLSgj179uDo0aPYuHEj+vr6kJubK+dyf39/gr7i9XS6eDuupqYmHDhwQCrSJgvfo0YuT2X/qJ9VVFQEnU4nziSL1yVHJtXrqD+r6y1ZZs2alRDd5/pTnUP1nviZnA+2leHfIpGIrB0GytRcVFLqo9Eo9u/fL/VlFixYgMrKSomSA8eDNXR6VTq+mp7JYk3cU7QPGSk+XTmVHNRCAM/p4nmoegB/1TRtrU6nOwDgzzqd7icAGgA8dez1TwH4g06nawEwDOBLp3IjRHzUDa0aGhwo8vyNRiMaGxsxd+5c2Twul0ucEw7queeeC4/Hg4aGBsRiMdTU1GDx4sUYGxvDgQMH4PV6hec+NDQEn88Hl8slyESys5qMNLIHYmtrK/bt25eAlnZ1deGxxx7Dt7/97ZT0rJMJN/yZSCqKDBcSF6x6HdUBUAsNxWIxbN++HVOmTBFaQU5ODrKzsxMU5t8jpaWl6OvrE8WYjMyoSkJVfgASFOH4+Di8Xi/S09PFOKTjS0NERc9YYAeYPDjovBM1tNlscjg4nU4cOHAA3//+93HbbbchPT1dGtaPjIwgNzf3ExmPT0ui0Sg6OjqkEq2aq0B6B8GdZPoGlXFaWpoUTGAep6r8DQYDAoEAJiYmsHnzZmlPw7HlHmJ0UtPipfXpVLz66quYN28eNE1LQAFZeXXHjh2IRCJYt24dBgYGUFxcDLPZjF/+8pcYHR1Fc3MzxsbGcO2112LGjBk4//zz8dxzz+HIkSMIh8NIS0tLMFZU5sRvfvMbAJNFtX7xi19gfHwcTz31FPbu3ZsQ3WeESc3V4GHf0dGBlpYW1NfXY9GiRVixYsWnNsefF1ENWJPJJH1R9+7dK5XE3377bVRXVyMUCkGv10tRO9WxoBB0GR8fh81mk2q/nB+uu9HRUdTX16O4uBj19fViZPb398Nut0tu9JEjR5CZmSnFI8jKIGCZk5ODvr4+1NbWYsOGDcjMzERbWxsuvvhiGI1GuN1utLW1IRAIYGBgQPpwqs/v9/uxbds2MWzpAKpgLR3w559/Hs8//7xc/4EHHkBWVha2bt0q/egKCgrQ2dkpa5sAmhrhYES6srISdrsd5eXlYkio1HX2a0xLS5McPtVoUQ0R5tgxYkWj6J577hFWwied/nIqMj4+DqvVCp/PJ/qEeWO0Jzj/HO8tW7Yk1JcoLi4W8ITP29HRgQULFkg/Zxqwfr9fruVyuSTNKPn84jixvRd1yuHDh3Hw4EGheq5du1ac51gshosuughz5sz5u8YyVcTp44BmznuqnDQ1ykPhOuLru7q6kJ2dLTly+/fvxw033CDFXQoKCuQcShXZPF1hUc2WlhbR1arhr0Z7gONrX6iAPveF1+uVFmNkA3FuSbNkAUFgMpebDgxBVOZj07Hzer0SBX3iiSfw85//HHfeeSdGR0eFzcR7y8zMhM/nk7MTiFcBd7lcaG1tldfpdDoMDg4KQKuCYCpLkeCtGuGlo8FxJ7gfiUQSmCGcZ67F5LxOYLLAUHFxMYqLizE8PIxDhw5JWs+NN96I9vZ2ZGRkoKWlBYcOHcKMGTMQjUYxffp0RCIR9Pf3y9rhPaq2vtfrFTDJaDSiqqpKmHkfJ6oNkArs4zhxPMiS4l4Ih8MoKCgQe0gd548T6h2bzZbAWlTXu6pnGQgYHh5GXl4eSkpKcOTIkQQnkRRuNYDHPciCSWQCkX7tdDqRkZEBp9MJm82GPXv2CNihRkTJcE32Kfj5yTY7c5zPJIhzKlV892iaNlfTtNmaptVpmvbwsb+3apq2QNO0ak3TrtU0LXzs76Fjv1cf+3/rqd4MH4KTT7SQi0Ld0NFoFBUVFTKhRMNp5GjHKFU7duxAR0cHCgoKEIvFEAgE8NZbb8Hr9eLss8/GhRdeCIPBgD179sBgMKC8vBxWqxW7d+9Ge3u7IN/A5IHCyeY1dTodqqqqsHr1alx22WVSkIlVTx9//PHTdlC58M/kAFKjgipykxwV5t+5wCKRCHJzcwXl1Ov1oiwAiNE3e/bsvwvBVanb6enpokhZOU5VelT6yc4pMHkwMmqiOrU0Ori5mCNChDMWiwnyxPHiJiQ9kI7U2NgYvF4vNmzYAKvVipkzZwqCRsddXSefB1HXFoszqPts8+bN6O3tlcOXRr9KHWSBBJXqpDp8AIQm5Ha7AcSVo9Vqhd1uh8lkEoqvxWJBOByG1+uVfoxAvBcqnVMCEZzz2bNno7S0FJdffjlqamoATK79WCyGhx56CBMTE3j55ZcFnODfi4qKMDExIX3XgNTFxjRNQ3d3N2688UZ87Wtfw9VXX41XX30V9913n6xT6gIa56ympxqUNBRoPPxTjhfuP0bjurq6pA+vXq9HW1sbjEYj1q9fjz179ohRpxpcBLkYzWBeMJ07HqSapmHr1q2yLvr6+gQkLSkpkTWWmZmJuro6cVDD4TBGRkYS8od4ME+fPl0iqC6XC9u2bcMrr7yCzMxMrFixAi6XC3a7XapmMxLGKAidQIJnPMsY4eD9c6xoLD700EO4/fbbEQwGceutt+KGG25AWVmZNGMHJnPlkvcnHc7S0lJxvHgtIt/UmyxOyLEBIJE+g8EgZzevk5aWhh/+8Id4+OGHkZaWhkgk8n+mJ+k88f5Vqq9er8fAwEBCDtzg4CDsdjscDgcyMjIkTzQnJwc1NTUIBAIIBoPyzGeddRZqamrQ3d2NwcFB6XmclpYmNSxUQ4/3BKR29mfOnIkVK1agoKAgAcDj2L/99ttnVGwqOTp4Ju9P5dSezBhX61gkF+4iWMBno4P2SYp2jLHDvZb8DKoTdqL3A5P0ULWnKp+Ltopqr/GcVOs1qO9TbQ9GrVhE0GKxIBgMSjBA/dyKigr4/f6E6BY/m8+n5voSuOczcu0nR/X4etW+5XV5bwASGGfqGCXvsVRBEpUi6nA48IUvfAEZGRk4ePAgJiYmsHLlStTV1aGoqEhSEN555x0MDAyIA9jZ2Qmv15swxhR1XJubm9HY2ChjdSJRx+9Ee0IF1uiUkTUBxHN6586dm3Afp2K7q7RdsgqS74mfl8q+Us8FrsNQKJRQD0etK8O5UdcH+4+bTCapyTI+Pi5sN64H9fm5HjjvdNqtVqvY3ywYqZ4NpyuffLb9GQrRWnWRA0gYUIvFIjQkAEJNUhU3kJhQTAeIhk5GRgaWL1+OYDCIl156CRs3boRer8eVV16J2tpaeL1eHD16FLW1tYKY9vb2oq+vL8FA570lF1kyGo1YtmwZrrnmGlxzzTWYNm0aNE3DM888k5J6lUqSFcmpiqosVbrFx0ViSeMk2t3f3w9gskz8tm3bJO8wEomgrq4upcP4cfemgg58tvHxcUEHidrxZ24gvk810HggqBuZ6KZer5demHTSVfqUqtRUgyUWiwmVilx9Fi8g7auzsxP3338/+vr68Le//Q39/f0wGAyfOwd1ZGQE06ZNw8DAgOR9JB9IfX19CQqIkU06X0SJk6ldfL+maQgEAti1a5cAAAUFBcjIyIDNZkNfXx8sFgvy8/ORnp6OBQsW4P7778ett94KIE6hHBgYkL3HtUCjOj09XSJG5557LnS6eG7T6OgoMjMzkZmZiZ6eHmzcuBF//vOf8fTTT6O8vBzt7e2YOXMmxsfHUVpaipkzZ4qxr1LVKOp6+fa3v40777wTd955JwYGBvDggw/innvuwQ9+8AMptsXny8rKQk5OjrASOjo6JJr3T5kUGmUqkyE9PR2VlZWy1kwmE/r7+zE+Pp6y6qQKZPFQZpRfdQQItOh0OlknQLwy5vz585GVlSUVenNzc2GxWLBr1y709PRAr9djeHgYJpNJcgO9Xq+gzKTFnn322cjNzcWyZcswY8YM7NmzB3/961/R0dGBSy65BCtXrkRfX5/kQfb09EiLnVgsJvdElgoNDrJCVAOXjrjJZMKLL76IO+64A3fddRfC4TB+97vf4emnn8YDDzyA9PR0MVqo3/Lz8+HxeLBx40YAkAipThenrFFf0ughPV6NdjMKxQrfPH/S0tJw7bXXynntdDrl+mr07dMQ9Zl5fbWisF4fTw9gZIbnxskAb6fTKX0233nnHTQ0NCA3NxfnnXcepk2bBrfbnQB6h0Ih7N69G/39/ScEvbkuDYZ4JeWMjAwsXboU1157LS688EIBvjnOZwJ8q+f2mcyDCn4nA5Op7AFN06QnsQqA81w2m80CgBNE/nsBcCARBNc0LaHvugqCU19wHlI52ioQzuelrlKBcO4LPgfXm1qI8ePAcDLBAoEAmpqa8NBDDwkYTmFrNu5pFSQzGAwJNUUMBgNGRkYkTYeiOrKk7avFzfjcqu4kOMx2anx+BjnUMVTtTo6Zmt/K+2OE9MMPP4TJZMK+ffvgcDgwZcoULF++HC6XSwqerlu3TqjKTqcToVAI3d3dkjajOnDqPHZ0dGD//v3Ys2dPAhihrtlTWW+0C8hMicViWLt2LYD4GTZt2rSEzzlRUTvVvue1586dK2eV6sSra5Cfp9pcg4OD0DRNevPSZmYFX+ou9WylE8nuHZFIBJWVlVJkNDMzE3l5ecjJycGcOXMwe/ZsZGRkCLMwFaDBdc21w3VgsViQlZUFh8NxRoDY6Wm3f6CoijqZcsefSXMiTYPGiVocR3VwVVQkNzcXsVi84fqGDRvgdDpRVFSEqqoqBINBbNy4EXV1dZJ319TUhIMHD8LhcKCyslLey7Y2VLoqbSeV8q+urkZtba30cyQFmfeb/D717ycbJ0ryIj4T0bR4SXZu9NHRUVEoBoNBIhfM9czPz5f5SEawUt0v75OLNpmXX1lZie7ubrme+hzJqHMqtJbI0MTEhORaqXmTavSMHHlem8YsHVM+F40pn88njrvFYkFTUxMKCgqwf/9+obpkZGQklK//PIiab5abm4uuri6JEBB04XioiOncuXMl6sike6J/yRFIg8EgCosFX66++mqMjY3h7bffFvDIbrfjueeewx//+Eeh3lZUVGDlypX47//+bynP39PTg5ycHLz++uswGAxYsmQJgEQEmGjdAw88gJ6eHphMJmRkZMDr9SIjIwPvvfce0tLSsGjRIixYsAALFy7Ec889h/HxcWRmZgpazblVRY2YMspyww03wGiM98h95JFHUFxcDL1ej6effhpDQ0OIRuP9YpkX/8ILL+Duu+/+x0/w50jYqgVIjHRarVZUVFRg1apViEQieP7552GxWNDX1ydz43A4hK1iNptRWlqKtLQ0hMNhTJkyRartMgrldrsRDodRVVWFm266CePj42hqasIXv/hFMQJIX4tGo3A4HHImAJCCDxXHeiz29/dLIZ2Ojg6Jvo6MjMBoNKKurg4A8Prrr8NoNGLjxo3QNA3Tp0/H4sWLRc+uW7dOqMP9/f2oqqpCd3e30JoDgQD+8z//E11dXXjmmWfkHEoGH+mo19fX45ZbbpG/TUxM4N/+7d9QV1cHnU6HH/3oR2hoaIDBEC9IZTAYMGXKFNGvVqtV8sLHxsYQDocTdCrPWRq3PLvVCPWuXbtgtVqP6xtpMBiOqzL6jxT1voDJfUzAmwXPkumeqQBvGuR6fTznLxQKYfHixbDb7XjjjTcAAFOnToXNZsPixYsRjUbR1NSEUCiE2tpaMcqZx56Tk5Nwpqc6T2lULlu2DECccr179254PB4888wzuPnmm1NGAZM/J9nGONm5nSzJoHqyDXMyoTNDu83tdqOsrAxmsxnBYBDbtm1DbW2tGMt1dXXYsmXLcdc6lXsEJttdqLTTvLw87N+/X852pndwvjnutDEpanRKBRC8Xq/kWBO84usJGvFnn8+H3NzcBAdKBcNpV+n1ethsNrFVtm7diurqavT19eGjjz5KeE69Xo+ysjL09PQkVCXv7e0VXUpR6bBA3Jli1Vi+lzYQARDqv/b2dhnPyspKcbh1Op2wDhwOB6ZNm5awz5KdkWQ7Vb0fXuuKK67Anj17JF+cjvWqVauwa9cuxGLxDgKkbR86dEiKVYVCIQwODiIrK0sidwTPVGbNwYMHZU2UlZUJ6HMq67ioqEhyxhkx7O/vh6ZpEsyhrZQcPaVDmQzm8G9k+rBAHXWoai/TNlXT1TiG+fn5klJA8JX2tJo6R71lt9slj7SwsBBDQ0PCKqMjbDTGC2cSdI9G4xXx3W63XEvVW8lzTnt8fHwct9xyi+iv05HPTASVg6LmCKm8fm5eol8qCqUWWFId12S0glGgSy65BAsWLMDSpUslx7WwsBBOpxMNDQ1Yu3atFLpYtWqVVBobGRlBTk6OVBvz+/1wu91CM+PmVJ+Bz5GRkYGjR49K9bG2tjb4fL6E150IhVTHSF0EKi/+7xXmUMZiMeTm5spCpeJ0u92IRqNS8W3RokWirE90z5zTVIiQKjqdTnqsWq1WqWambmgKUUFV2fNaJpNJ7mdiYkJaNqivJ1VN046vdEZFxqIfzC3weDzweDwA4pRTVj4bHR1Fa2srGhsbsW7dur97Dj5NmTNnDkZGRpCVlYXDhw/D5XLB5/NJNWIVeOEa48GelZWFZcuWJVAPgcScFnW8KyoqBLF74okn5D1LliyRMvOvvPIKent70dHRgWAwKFUMb7vtNni9XkybNg0zZswQRyMQCGDTpk3Q6/XYuHEjRkZGYLPZ5HBjFbqcnBwx5gsKCuDxeLBs2TK8+OKLKC0thcvlwrRp02C1WnHPPfeIcTJ79mxomiaFjVR6kuoUaJom9/Tggw/itttuw2233Yb6+nrce++9uO+++/Dwww8jFArhD3/4Aw4ePPhpTvPnRtiDGphce3p9vL/eW2+9BZ1OB7vdjtraWtx0001yOKsH+tDQEDo6OjA8PIxgMCjtCPr6+iQaSWcSmAQ0eF3qCho1d911F3Q6HYaHh1FRUQEgbuiyj59eH8+FbW5ulpoDQCJlkN+zsrIS6FtmsxlPP/001q5dixdeeAFlZWVYsWIFpkyZIkBJYWEhBgcHEY1G8b3vfQ96vR4XXHAB7rvvPlxxxRVyneRWEKoBSGM3FovnVX/961/HbbfdhnPOOQdr1qzBpk2b8Lvf/Q6Dg4MSbaGjbLVaJaoyPDyckiaoRiw4diyOZHNpMQAAIABJREFU4na7sX//fhw8eBCtra1ynvxfgHm0DzgnKuCtFnZTQeeTAd6ksxkMBuzbtw8bNmyQtUE6OPXSWWedhQULFkhRmKNHj0otB5/Ph/b2dinSqLKEVFGBb7vdjqVLl+LKK6/EzJkzE1Ii+NqP+0o+V9VrUBgRTMUqOR1R9xgAYQsYjUaEQiEBwIE4a4A9Y9VI0clEXYd8NhUEV5kJJ7JZ1DShE12P9xOLxY4DwtVIrE43mb8ZDocTCqSpuk0F+RkA4dgAkDYd+/fvl9xGYHKfs9AgddrExAQ6OzsT6mGoa5ef6/F4pLoy7TsAKat/k93E9BoVvDWZTCgsLEzIjwUmmXe8Pu85eSyTARPVltM0DXv37kVaWhpeeeUVpKWlISMjQ1KGtm7ditraWsyfPx9paWk4cOCABAqCwaDY5er6VcdOp9Oho6MDDQ0NaGhoSCjedCLJz8/HxMQEKo4V3TKZTAKA054k4JS8d9Tn5Pdk+4rAKFPHVNs1GYTn+IXDYaFCq3UDfD6f2MpqIIkpVoFAALFYTLpUkA3C51LBHqvVKnOal5eHqVOnYs6cOZgxYwaKioqkaGxNTQ3sdjtycnKQn5+PgoICVFZWorq6GocOHcLs2bNPOr6p5DMTQVULjaheOTA5gcxjAeJNnsPhMGbPnp1AIeBkqhOqOruq0xSLxVBZWYn29nY4nU5s374der1eUAOTyYQNGzZAp9NhyZIlWLlyJXbt2oWGhga4XC64XC5ZtH19fRgbG4PL5YLT6UzpNCYb7m63G11dXRLFcjqd8tpUG5qoYCqU73RFvRdVKdNZHBkZkV5xOp0OH374Ia644goAEBpOfX29XJ+IDw8jFbE50TOpUlFRgX379h2HzHKekhE39TlUxa9p8ZxQtpfgplM3KZUsc4j4fgAJ6CfRKiJNpL90dHRgbGwMFRUVCIVC8Hq9n3rz+b9XODZutxvl5eUYGBiAyWRCa2srampqZLz4XU2sp4EGxHsuMroJTAIIHHf25VqwYAGOHDkCAFi3bh3S09Nx0003YcqUKfjWt76FSCSCxsZGidzMnz8fTU1NmDVrFlauXIkDBw7AZrPhqquuwjvvvAOXy4X+/n6hg3700UcJh97mzZuxcuVK5Ofno6+vT/qRkqYMAH/9619x8OBB1NfXC8oPxNfXNddcg9/85jfIy8vDwYMHZX7ZCoDPynUPTOorKv6vfOUrACYPo5UrVyakBPxTJoX98khVUqP2FosFGzZsQFFRkUTeiDgzAkHwjCAKq2+TLglM6rzkPqvJ0T2eN7t27UoARolM5+XliZPlcDgwOjqKjz76CIWFhXj22WeRnp6OsrIyqfzMvcBG6jyrbrnlFvT09KCpqQlutxu7du3ClClTsHr1auzdu1dSS6qqqvDss8/igQceQGtrq+RG/uQnP8GGDRuwY8cOQfUDgUCCA87vai6+Xq/HmjVrsGbNGkmxeOaZZwTAfe2119DZ2SnF0MgQIPhD0Jhjl4zQM+fO7/dj7969KC0thcFgECf9006H4LwTwOZ5QlaDyspRI60qW+tEnxuLxbBo0SIYjUb09fVh37598Pl8yMnJwfnnnw+v14u//e1vmDVrFoxGI5YsWYKqqir09vaioaFBDDmdLh51Hx0dhdPpFMBWPetSRVYzMjLQ2dmJiYkJZGdnIycnR6rTJ5+/yfee/LN6DnLNpoqMnK7QdiM1n1FCNULNfcJ1Vl5eLq1G1LM/WZJZOyeTyspK7N+/PyGqdKLP5PVUh1KNoDI6xMrVHo8nwRahTmJBJL5XpZAzbY2vZR9ojhEBoxUrVuDVV1+V++O8UJdVVlaipaVFKv6yIFdytI4yMjKCKVOmAIAEaAj2aZom0WWe4+yAwb3BVACmfdHm4/VS2b7J+fO0JxoaGnDOOeeI3cB5yc7ORkFBgThjHo9H+hcD8Wjmjh07EAqFUFhYiLlz52LWrFmIRqN49913Ybfbpf3j0NAQsrOz4XA4Eu5NBRa6urrQ2dkJTdNQUlIiTr46ftQbql8yNjaG+++/H6FQCC6XCzfeeCOqq6vR3d2NlpYWHDlyJMFhP9lara2tFVYLxyl5/tRoP+eGFebLysrQ3NyMWCwmUW6DwSC9usPhsJwPLO4Zi8Ur2LNIG+1tABKBjUQicLlcslbpBEejUUmli0QiyMzMRFZWluSZM+3lqquuwvXXX59AUz9V+cw4qCqtS50E1fgjBQeA8PI7OztRXl6eUCxJ9f4p/MxkXjiVSk5OjpSSJvIei8Uwc+ZMOBwOrFu3DpFIBLW1tSgrK5Py6GlpaZg9ezays7ORnZ0NTdPQ1tYGh8MhPeJoIKnXVB2x8fFxdHd34+jRo6JwmLOgbno+FxfnyRy+k0kyesPPVFGbgYEBQdkCgQAOHDiAyy+/HDpdnPaal5eXgM7RqDwZAnkyYfVDnU6XQK3k50UiEclzOtEz8Ts3rKZpcnioa4nFq9jnjwcVaS68Bg9p5njRMGOlvszMTOj1eqk0+/rrr5/2c/9fCZFaTdPg8XiQl5eHtrY2qUjKirs8gIBJ2rpqNNhsNtjtdmRkZODAgQOytlSjlT8vWLAA7777LgYHB1FUVIR/+Zd/wd133y2GcH9/P4qKitDe3o6FCxeis7MTs2fPRlVVFY4cOYKlS5fKQXbRRRfhrbfeQnZ2tjSY1jRN0HeuzfHxcZSVleHCCy9EeXm5GCnZ2dkoLCzEoUOHhL741FNPCYKZlpaGffv2yb6+//778cYbb+Cjjz4SQ0It764itOreVgGzzxuI8WkJx6e1tRXTpk0T2pYKwlmtVgSDQaGTqwV/qINIy7fb7fD7/Qn55lyHasVWFdhSc9r5fd++fZg6dSoACFuABhIBmnA4DJPJJPqAEU8A+Mtf/gIAUnWaxieNc6vVKu3KjEYj3njjDQwMDGD9+vXo6elBcXExysrKxCBYt24dtmzZgqGhIWEGXHLJJdDpdDj//PPx29/+Fp2dnQAg4JsaMQMmo1lqNLGvrw9f//rXZcwfeeQRrFq1CgaDAUeOHMELL7wgOpHVb2lkjo2NibFNR4N56mlpaRgbG0NLSwsikQgOHjyIiy+++IwjcWcq6vyr+9PhcEjNiebmZpSVlaGgoCAhIqy+V3VoVDYFz+fc3Fycf/75GBsbQ0dHB9xuN1paWpCTk4PR0VGkp6dL2xidTodly5bBbDZj165daGlpwcTEBKZPny42A/vpsjL9iaKrvA+Px5NQPAaIV00l8H2iyKw6H5zLVADzqUqyfQMAhYWFaGtrg06nEztL0zQBiF9//XVcdtll0OvjbVcuvvhiPPnkkwlRSdVBSFXlV71eKmEE1WQyIRAICMil3nMyYK/+n/uD8+DxeFBYWAiXy5VQuwOYbA/F+/f5fGJ7EjhTI2d0FFlgj3rl6aefhl6vR2lpKdrb2wEkAmp0GCorKyWXdc2aNTAajSgvL0dJSYl0pADi63ZwcBAzZ87ExMSE9FTV6/VSBI55nrS5mR+raZpUlU4G+VT9yvFTbW3q2k2bNslY2mw2eDwevPvuu/Iafs7w8DC8Xi8qKioEwOFatFgsSE9Px4wZM6QAUl9fH7Zv347u7m5MmzYNxcXF2Lt3L5YvX453331XujGwzzDrC6jBFc5zd3c33G63nB1WqxU1NTUJdqn6rKr98utf/1qeIxaLISsrCzfccANmzJgBn8+HPXv2oKOj47h9pVLCbTab5Ieq95UKbODcjY6OwuFwwGw2w+/3Y8aMGQl50Uzp0jQNw8PDEoSzWq0oKiqC3W6Hx+MRO5d6gLYyzy32jA4Gg3KWUtenpaWJ38L3P/bYY1IR/0x0yWeG4qsaC8Bkjx91gtRKrfy7GtlRPf/kgkYnQpN4bWASDZ46daoow6NHj2Lz5s0oKCjAokWLkJGRAaPRiJycHKSlpcHn82HLli1oa2vD8PAwRkZGUFZWJkqhq6srAV1TRVWIPAyi0SgOHz6MxsZGtLW1yQJJ9f5TGVOVnqMaYBxTflfHhrQuFelS0Q/SA1SDW3Waz8QA4WamU6TeGw8ySqrnUEEJg8EgCFJ2drY4LHwPadiBQOC4gynZiAEmKSekN7OJPfNP1T6NnxexWq0YGBiQQj49PT3i1DscDgwNDUlpfPaB3bt3L4BJsEelVJnNZsyZMwe9vb2yjnlARaNRodnecMMNWLlypVCaaMS/8MILCbkjdES5J3kQm81mnHvuuejo6MAFF1wg82yz2YRqRMqlpmlYtmwZ9Ho91q9fD6PRiKNHj+Lmm29GIBDAHXfckUBlHBgYECrV/fffj6effhrRaBRerxfBYBC7d++G3++H1WqVBuqxWGKD7WQ6EXDqFf3+XxVWyeY4ud3u4/IFqQPeeustqbZKwCgzM1OiXYxK8IxghIzgJteU2s5FFRqfXPuapklREgBCI6MxShSbNDjm79ORYyQ2EAhID0Z+9r59+0RfcT9dddVVuOiii7BixYqEs8Hj8eDtt99GOBzGhRdeiO7ubmzdulX0sEo7ffDBB6VHMO+FNFDu3VQ6mufv97//fdx+++2444478PDDD6OoqAgbNmzA/fffj4ceegglJSXy7DREaLCo5xgRdDVHfcOGDZ96H1SupWR7gGuCz93W1iYOvuoQpQK8CTKczJ7IycnB8uXLsWzZMgEETSYTZs6ciYULF2LdunV48cUXYTKZJJ/wvffew8aNG9HR0QGj0SgOUCAQEBuD5xWdiFSAGKW7uxt79+7FoUOHZI+lAqg/KfCbn5V8HZ4tHEeV0UYAnKwEn88nDlEyPVI9q09XYrGYrF06G8nAIkGuU3225OdT741gNtlaappIKsCDYDjtKs4twa9Uwmump6fDbrcnRDW7urqwbds2DA0NYf369di4caMAaJxjArvqvlBpvnxm6laVhaa+Rx1j/j4xMYHdu3fjvffew86dOxNezx7sQ0NDiMVikj5APUhmFHUMo3l0hhgkcDqdyM/Ph8EQr7heUlKC/v5+HDx4EFarFTt37sRFF12EpUuX4pxzzkEgEJAaKj6fDy0tLcKOUUVN+wqFQtizZw8++OADAYDolPK8yMzMTEgV4LN6vV48/vjj+Pa3v4377rsP/f39uP3223HjjTfikksugdPpTChUVVxcLEAFWXyqrauOtQo+Dg0NQafTiUNP5guFe4s0bqZyFRQUJJyNapocmXPU62lpaRKlZwsxIA4YOBwOudecnBw8+eSTeOKJJ+Q8Pxm4djL5zDioagSUBmkyvYTICwBZPCp1U50Q1ekDEpVisnCTq44c3ztv3jwphb1//36MjY1hYiJeDnvJkiWYOnWq5JLYbDZs374d27Ztw8DAQEKF2qGhIbS3t8sCVyMrycLF7ff70dzcjJ07d+Lw4cMJ5bI/zglMPmRO5JyrotIcibBwTNUG9arhpwoX9ckWYionm3+rrKwEMOkg81p8lhNtTs6darTSUDQYDGIQ8TVcXzTsVEqFXh/PD1PXI9cONyedW7vdjsLCQoyNjZ0RfeH/Ujo6OpCbm4uhoSEMDQ1hcHAQQ0NDgsANDg7K/qNyc7lcCQUhVORRHbv3339f5o/Gt9frFeCBoMFZZ52FoqIiUfDFxcUAgPLycrlPKj1SkljsiFSthQsXQqfT4bLLLkN3dzd0Oh3Ky8thMBjw5JNPYtq0aYIw7tixA21tbaIsDQYD8vPzYbfb8cADD6Cvry8BqAAmm43/x3/8B66//np0dnZix44d0DQNpaWlSE9PR1paGgYHBxNyp1PJmRpW/38XsmamTZuGiYkJdHd3S2uOZONJr9dLAZVkQwqYXC/JVGq1PgFfn0zfpI4ZHR2V3CxSfGnQUteQ7p98PZfLhaGhIYyNjSEtLU0qD1OfMHqp1+slr52fq+bNcQ3z/rgeo9EoLrroooSoxwcffIBwOIyKigpUV1eLAaHT6XDppZfKZ4yOjiZc50RnoWpg63Q6bNmyBddddx2Gh4cxPj6O6667DjfddJP8n1RGdUy5v8hIIboOnFlKyt8jySA1MDneyVEztWenCnhz7tQxU2mqqqh2BAG2pUuXYnx8HIODg9i3bx+6u7uRnZ2N2tpaWUN05H0+n9Sq4BhbrVbk5+dLgSXmxauiGrGqXia7qaurCx0dHULBTj43P0lJ5QSr46Y6ggQ4+DyRSETYTCqLgmP796yf7OxsmXuV2cZ7SQYXk3/m/1Wwh+OtGvf8PP4/OU9cfa9qEzEYoVbnTS7io/6s2r8GQ7ybgNvtTmBI8XWapkll6Q0bNmBwcFBYHbxvvl51RAFI33ICT+pa432qv2tavH6Dx+NJsN+6urrg9Xoliu5wOBAMBpGVlSV7IHleRkdHpYihuq/U1lcGgwEzZszAjBkzxL7QtHgbsr/85S84dOgQ0tLSpO87q7AXFBTA7/fj8OHDCUEx2r4c32g0mtAHmgA474W2efJeUsdTp9Phvffewy233II//vGPAICrrroKN998M+rq6gSQMplMYrOqIIGqV1Qdq44Vc3BZ60INGvHcomOprl11DfA6XIt8ZupzXp8/Mxd/1apVePTRR/HTn/4Ura2tMm+kuZ+JjvnMUHyBRCOCUS61eI1KJeKgJyux5AOHg8IQM5VB8kSrXypqw01ZW1sLl8uFpqYmpKeno6OjQxyU6dOnC4LB3Dgau5mZmZg1a5a0vZiYmEBXVxeMRiNcLpcU5FA3vBopBiaroZIakJ2dDZfLlbKMtSqnuyBIRdLpdMjKypJmv2qUmpsiFAqhrKxMCoNQTnZPjDipv6v36nA4xLAaGRlJWfFYdYr4GZxrblYm9PN+8/LyhDKhRlbYt1JVsDS2GAE0mUzw+/1ikPLafr8fmZmZCAQCQuf+PEl5ebnkP7e1tSEjIwPZ2dmy3zg3LC2u0+ng8XjgdrvR2dmJgoICFBcXSyVf7hNGciKRCN577z0Z36GhIfnZ6XTCbDbjyJEjaG9vx/z58/HLX/4Sv/71r/HRRx9JMj0NxlgshvLycphMJuzatQsrVqyQIhVmsxkrVqyAXh9vFZGbmwu32y1tSbiGotEo1q9fj6lTp+IPf/gDKioq8Oyzz+Lcc88V5Ul9otfHi8R8//vfT0CDy8vL5YCeMmUKbr75Ztx9990oLy+H0+mUYmk8vKjA6bwkGyP/lLgQTafOra2tFT1ZWloqtDoenunp6VJMhX+zWCyYOXOm9KWjnuGeJauGjl2y3gcmAYRXX30VBoNBmCLz5s3D2rVrUVlZKUYWizPV1dXh6NGjsNls0OvjRYzKy8uhaRo2bdoEAFLAqby8HD6fTyKdwWAQW7duxfLly2Xf8TyjAc21SOPoRz/6EYqLi2EymXDgwAE8+OCDiEajeOSRR1BUVIRp06YBiPc1jcVimD17Nl599VVUV1dLNVm2VyHomApoUh1//u173/uejDcA/PjHP0ZRUZGwED744AO8/vrrsFqtks+mAsR+vx82mw1r1qzB0qVLP6XVNencq89JnabXx3Nm+RoanKokO+2pqnQmSzIYRZuirKwMs2fPRnNzs7SCYF718uXL0dnZieHhYQwPD+PIkSM455xz8M4778DpdKK6uho5OTlCvTt69CgMBgOys7OPS39RbR/VCPX5fBJJtdlsKCwslOIzpwJiq8+TPEYf91610E5fXx/Ky8vF1mG+ZXp6uvQZnT59OhobG8Wg5tycqQ7V6/UCwlMvq7RN6n/VHlNtMdV+obHv8/nEccrOzkZ/f7/8j3YFI6gEINTrAUj4m6rnOFbBYFBaKqqv53X4M4Feq9UqVF2v1yt2o9PplEjvxMREQvvAcDgsZ6VOp0uoJEu9FAwGxXlU07n4Go6Ruu/HxsYwPDyMKVOmQKeLp1+wfV1paam0euO4Pvzww3jzzTeRkZEh53JbWxs0TRN7tLq6Gm63G0VFRRIw4jw6nU7MnTsXer0e9fX1sNlsMJvN6OrqQl9fHzIzM1FdXS2tEoeGhrBx40aUlJRgYiLenoVzSkeRgQlWdY9EIsjIyJCq7X6/H4FAQHJkuZ5TAWMcmw8//BAffvih/H316tX46le/iomJeN/av/71r6ivr0+gIKfaYwT/8vLyMDQ0JD4FnXG1VgADCIzsUz+NjY0hKytL/B0Cd1z7kUhEnFRSvDnfJpMJ8+bNQ11dHUpKSjA8PCztKBmtDQQCYlOfrnxmHFSiZmqEi2WO1cOBkpWVBbPZjIyMDAQCAeTk5ACYjO6pjoiqeE6EOqroBDBZkVQVj8cDv98Pp9OJrVu3IjMzE6WlpdA0Dfn5+QgGg5gxYwZ6e3sxMDAghSvS09Nx9OhRabzMhORAIIDu7m5YLBap/qUatKrwvklFHBgYgE4X7+vKfq3q85xJpIZJ5HQMSAFlbkJraysqjlUwCwQCmDNnDnp6elJSPZJFRR45vsn3G4vF4HK5MDw8LFEVYLL1BF+vGpQ8tJIdWUbNWHY8GYliP0XVcOWG4/0w15DN50l3jUajyMjIQFZWlhw8yXnGn3U5ePCgUJVtNpsU9hoYGICmaUJR57oiFVGv18Pn88naNZvNkoOrzgepKxUVFZL/odPFiwo4nU54PB7J9fzOd76DYDCIwsJClJWVIScnRxRgV1cXioqKUF9fj/POOw/Dw8OyPnkYdnR0YM6cOeJEcy8HAgH86Ec/EmfmC1/4Al588UV0dHTg1ltvxeHDh1FTU4PW1lYcOnRIqtX98Ic/xP333w+j0Sj5GlwLPNz7+/sRDofR29srpf5nz56NZcuWobe3F83NzQKycRw/jl3w/6pEIhGcddZZUvDBZrNJHlZfX58UrFABRJPJJPmPKjWptLQUwWAQDodDDCGK6pCq+ofnAueLn8/PvPzyy3HFFVdg9+7deOyxx6TwhN1ux80334yzzz4bixYtQmZmpkQ+GVG566670Nvbi/Xr18s13G43CgoKkJ6ejtHRUbz55psA4vmCapEjq9WKSCSCUCiUEMHwer1IT0+Xlgt+vx/nn38+1q5di5ycHPz617/GypUrYbPZMDw8jIcffhi/+MUv4HQ68cMf/hD/9V//Bb/fL8wPFj45mZPBMVLPpvvuu0/+N23aNHzve9/D/PnzodPFc8L+8Ic/SIoKjaXu7m689NJL+PnPf/7JL6QTiGqMqfOuMmf4f7UGgVpQkUaies4mR0tSnU0qgKquvYqKCtTU1ODQoUPSG9lkMmHx4sXYvn07CgoK4PP5EIlEUFFRgf3796Onpwd2ux3FxcUoKCiQvNRQKIS+vj5kZ2cnAKWpwGB1PwQCAbS0tCAWi6fX5ObmIiMjI6HATipRI3MnipImSywWQ3Z2trTXYT6gGnVsbGzEkiVLBJCfM2cOjhw5ImCkOhcniqKqZ1Cq++PfWX01mZ3xcbaMyh6yWCwIBoNydjqdTnk+fhZz/0jbVfOaNS2eSuD3+8XuoFNKR4D5mLRvgbgdqtpqyXYq7R014pWXlyfF3gBI+xi1vZfqdNIe4rPYbDaMjo4m5CdTeCbyZwYH2MZpfHwcvb29KCsrwzXXXAO9Xo+jR4+iq6sLl112maR4TExM4Nlnn0VFRUUCGykvL0+YSmzvaLFYEtLfeAao9GiytPT6eGsVRhVZFTgSieDiiy/Gtddei82bN8PlcqG+vh5lZWUSHOvp6ZGxo01hNpsRCoUS0iZyc3OlWCbHJpWvoZ49HFudToeXX34Zr776KiKRCKqrq/Gv//qv+MY3viHsgqeffhq7du2SAli0XbOyshAOh/Hhhx/CYDBg+fLlqKyslEi5WmMBgBQqM5lMGBoagl6vl7xRNcrK/aLmrqr7g2uktLRUqkizZRcQP8tYOT8VyHKq8plxUNVNSwXT29sr6DoQ7ykKxAenq6sLExMTgj42NDTAZDLBbrcLOqJ+kUaTSlIZjqpTrE6uimq73W709PQgOzsbpaWlqKysRCQSwbx589DY2CgoeDAYhMvlQkVFBUwmEw4dOoTR0VHk5OTIIUNamcVigcvlSjD41UOO98bFNzo6igMHDgCIO+2lpaXymtONoKo5ElzIKqK2detWlJeXQ6fTCaWMBYROhCgnKzP176mEPZnS0tLg8XgkkZ33QYeK88b7Vg0njs/Y2JhErXlN9cAirYhV1qhcmO+oNqtnhISblE4LGxB/3iKoBkO8l5vL5YLNZhPngKhXYWEhNE0TVHVsbEx6ZxEZXrVqFYxGI44cOSIFtdRINADJQcvLy8Mbb7yBmTNnwmazYfny5di6dStKSkqk+qWmaUJRNBqNyM/Px4EDB1BSUoLe3l7s3LlTegmqRtSRI0fQ1dWVEOkPBALiTANxg2Tbtm249NJLkZWVBZfLJewAo9GI1157DTU1NWhra5M9rhqqfX19+MUvfoGCggLEYjF0d3dj06ZN+J//+R9ce+210DQNjY2NuPfee+F2uzE4OAiz2Yzu7m4AkzkrrAD8T5mUuXPnwuPxIBqNIjs7G0NDQxgZGUF+fn4Coq2eD0ajET6fD06nU/a63W4XBgYQPySZowzgOGMUSCx2Q6OEaDENzwcffBC5ubn46le/ii9+8YvYu3evNEg3m8146aWXEihqNB6YQlBYWJgAlJaXlwtg4/P54HA4oNPF+0MSlZ83bx7mz5+PzZs3SyEZFuYaHh7G8uXL8eqrr0qEmTq4oKAA4XAYL7zwAi699FI8/vjjeOCBB1BQUIDu7m5Eo1G43W5MmTIFzc3NAtDQoKMzzLFRjdRkUY2tw4cP47bbboPBYIDL5cJDDz2E73znO6IzH330UfzqV7+SAkufplCvc+9Rf6m03aKiIomABAIBAcZVwBuYjLzy+bmW1PM6OdqhOrMqgMooFQtivfnmm8jNzUVpaSlMJhNyc3NhMMT7Oi9YsADDw8MYGBjA4cOHhVVC4LuyslLOMeY7+3w+Oa/V+6XwHqn7yZ7iPZ8M/D5d20Kn0yE/Px9utxs6XZxp1t7ejqlTp0qRoI0bN2K3r+HBAAAgAElEQVTJkiVCf8/Pz8cFF1yAdevWJbDkTuScqmuUa1d9Ht53UVERhoeHhWWjrotklhffy4iYqjs4lkw7AeLMha6urgT6ZCgUwujo6HFOKD9HrZdC/RUMBhEMBhGJROBwOBJauRBEZnSQDoX6/CxgBgDf/OY3Je3p5ZdflrOPdibPd9pKKrWY5yxtMJUVxJoRapCA4G1VVZWkUd1+++0YGxuTM16n00n+ptEY77NcXV0tjAIVHOf4DQ8PIzs7G4ODg1i0aBFGRkZgMpmkEGkkEpEIK212TdOkjYrT6cTChQuxdetWRCIRZGVlISsrCw0NDfB4PNL26Utf+hKsViv6+vqwceNGFBYWSkrXwMCAtFEBgClTpiQENTweD371q1/BarXi/vvvh16vlwivGp1Wo6tcR3T4DAYD2traBPyLRCJYvHgx7rjjDqnBsXbtWqxfvx5+vx87d+6E3W6X4n0EfdRiR1xvnA+bzYbBwUGJGNN2VfNEuQ8Y+UzWddwX7Ek+OjqKc889FwMDA0hLS0NxcbEw7VjFevfu3Vi9enXKvXsi+czwzbhB1UOBJbyZLK2WiecmYoETOoCs+KVGtNRJAhIdlZNJ8qHMqJtq1BgMBvh8PuzduxdGoxEdHR146623JOcmIyMDubm5sFqtaGxslP5NF154Ic4++2ykpaWhvb0dfX19ojTC4TC8Xi9aW1sld48OtrpAkg/GkZER7NmzB7t370ZDQwM6OjrkPk9FGDkjSsZoKaMUhw4dkiJRVFKMdvHekr+A1EUT1J/V39PT0xMinmpVOACy0VM57ERiVQXAflAlJSXH5XT5fD5BcLn5eagxB4ZN3AOBgBiwVHrhcFhyFE/3wP6/FpaON5vNsFgsQv1ikQYCP6SFUHjQm0wmvPHGG1izZg0OHDiAhoYGbNmy5bicIc5VTk4OrFYr6uvr8dZbbyEnJwfXX389du/ejdmzZ2PhwoVyXxaLBd3d3Tj77LMll6a0tBQdHR1C36mrq5Oxr6urQzAYxOrVq7F69WpZOxaLReZ0fHwcbW1t2LlzpwBMmqahr68Pdrsdq1evxnXXXQcAePTRR+XQsdvtePDBB8Xwj8UmC21s374dExPxSoherxe+/4+9Lw+PurzavmfNTCbJJJN9IwkJCfuqgiD7rqVSlyq8CoJYES0uBbQWatW6VX1txbUuVVRQbAHhVSzIvgYCCYGEhCwQsq+TfZlkZr4/pvfhmSHY+l5f/fS7+lwXF1kmM7/f73me85xzn/vcp7kZL730EqKjo/HVV19h69atCA0NFUNOuff/DO9BuxMUFITW1lY0NjYiOTlZHO329nYJ6BwOhzBTVJVetX7e5XIhICBAKKykvdKRUzMPqi3h72666SZMmTLF6/e1tbV49913YTAY8Prrr2PgwIHisHR2diIlJUXoXTU1NfJ+FN1iff0tt9wCu90ujqjVakVnZydqamrEaauvr8eOHTtw5MgRuW7Ac/6cOnUKAFBZWSl09HvvvRcGgwFWqxXHjh0TVXuz2YxrrrkGWq0WW7ZsEUBz6tSpKCkpgclkwsKFC3Httdeif//+MJlMIkBCe+c7T3QqOVQ7zwx0TU2NZACWLVuG/Px8LFiwQOoqN23a9O9YRlccPAtOnz6Ns2fPyj+ei21tbSgpKUFRUZFkYLKzs3HmzBmUlJRIUM2AnAGFCuiqQ6vVejl7AC77ew4V9Ha7PXV76enpSE9PR15enpQmRUVF4aqrrpKSIhX4vu6665CSkoKWlhZUVFTAYDDAZrMhJiZGflZXVwfAW2xSr9dL7XBvmUSC39nZ2dKCwzfI/S6Dz4vBL/0G7mU1+0SaL0Hwb6P1Xsmf+zYQnMEUASXV+ebX9CnU6/e9d4JjZCIQ3FH9HGbBeJb6anmobTtaWlrEr+BrCIZzGAwG1NXVeele8H5UH5dZxoaGBqmhTE1NRb9+/TB//nyMHDlSXusL2ACemnW27tJqtSgtLUVdXZ20J6msrERVVZW0A2poaBA6JzOfFKL0tRncA1SBJYPwmmuugcPhQHx8vDBEqEasdlzo6elBRUUF3G43cnJy4HK5BDTknJFBog4mGlgDTYq82+3G0aNHsW7dOuzatQtmsxm33HKLZPNLS0sRGRkplF8ViCI76le/+pV8fmpqKux2OwYNGiQZSq4RriHfvaSuGc6FXq/H8ePHsXz5ctx777148MEHce211+KVV17Bu+++i6NHj+K+++5DdHQ0wsPDUV1dDcCzb9TsMn1XlUJPG8K2QjwHVf9d9f95bdwbFFfy8/OD3W5HRkaGJLYKCgq8EgYOhwOHDx/udT9+2/jBBKiA9wTRmJEexNQ2A1Oi1arAgO+D7Q0p7+0zezNuqjHlIgoPDxcnwhcBufbaa2WjUeWzqqoKTU1NIobBxrV79uzB1q1bkZmZKRnT22+/HaNHj4bb7ZY2KVFRUeKsFxUVwW63i1Pui9DyHlVnq6mpCSdPnsTx48dRXFzsFVyr90+ngyinaoh5qBLFX7FiBV5++WUJRubOnYsbbrgBycnJshHU59rb871SMMdnHRsbK06rr1Kw73v2dliqiDVRYSJf6jPjexsMBphMJnFE6Gg1NzcLUKIKJxF5CgkJ6fU+fgyDKnFBQUFy2PE+Ozo6vFTpgEtiBUTbWFvDeqj6+no4HA4cP35cFOJUpI1fs8/l5s2bsWnTJjgcDsyZMwcFBQWilNvR0YHs7Gyh1QCe+iwVQBg2bJjU1gwdOlQMYUdHh4gtAZfUGtvb29Hd3Y0bbrgBmZmZeOGFF5CYmIi3334bERERaGpqErXE1tZWPPbYYxL4qGvQZrNJ/dThw4exceNGaTek0+mQnZ2N7Oxs3H///Rg7diyWLVuGpKQkJCUlITIy8n/VrPr/96HVasXZamxslL68rG2sqqpCc3OzUB75WvVgJdLuC1z5+/sjJCQESUlJXiJEdCR8QTIi3iEhIbj11lvFPhoMBjQ2NiI3Nxdff/01li1bho8//lgEwdLS0qTOilSy+Ph45ObmQqfTYcyYMVJzNXr0aNx4442YMWOGnF9JSUmC/LMHKe/F7faIfRgMBqxfvx4ajUYc4eHDh+PIkSMYOnQoJk6ciPvvvx8ARAjl6NGjeP7556UU5ZVXXsGECRPgcnmUIvPz8zFz5kx0dHTA5fKoaWZnZyM8PFwCFWYQ1CDel6qoZgHohHV1daGzsxPPPfccHn74Ydx7771YtGgRXnnlle9nYf1j8B5sNpsECAzSAAhLhg6h0+lEcHAwoqOjpRxH1SBg4KKedeq5eqUzqbdzj3aJ10Rn0O12o6KiQrJKeXl5yM/Ph81mg8ViwciRI0Wz4fTp0+jq6sKwYcMwffp0pKamCtBAnQyr1SrBKgEUZlwZOKkBt69D2tjYiFOnTiEnJ0dUtP83YkUmk0n8NyrT8nONRiMsFosXIANcaunEM13NRqvXeSUQ3Pd7qu6r+0gF032p36o9UWnfKnjONjIsU+Lv+XcMULl2VPolP1etB1VFpNgbnCMyMhIWiwX19fWiwks/zdcnDAwMlDOZ9x8eHi4BnS+tWe2IQQCDATTby5CxZrVaYbVaBXh1u91Sq8l5VfeCukcITpCizt64J06cQEZGBjQajdT5s2bfYDAgJCQEPT098Pf3h9PpUQmmDkptbS0yMjJQVFQEg8GA0NBQ1NbWej0PPjs1UGSyhTFGbW0t9u7diw0bNqC9vR0GgwF33HEHpk+fLq2/GNyVlpZCp/N0jeBcNDU14e6778by5csxatQojBs3DqdOnUJMTIzUfzJI5FCvRV1vnAv6II2NjVi5ciWWLFmCpUuX4i9/+QtuvPFGbNiwAZs2bcIHH3yAlJQUGI1GnDhxwstnVSnfpD1bLJbLkkC+/6vsERV84/dksJGF0dzcjNraWomDmpubodFovDpx/KvjBxOgskbBt34U8K4b5CFIA8AaQqLJALwcD3VyrjR8kSeVHnL48GHs2bMHOTk56OjokHoIvt5kMmH48OEoLCwUirHRaJRm6uSra7VatLW1oampSTjtXV1dqKmpgZ+fn/z98OHDMWPGDISGhqKnp0eChaioKOj1ehQUFKCysvKyXlrqNamUDD6j1tZW5Ofn49SpU4I0MagHLlEyuXnVRUuHhEarubkZBw8exJIlS/DII49g9+7dSExMxH/913/hjjvuwMyZM0Uu+0rU3yutAbfbw+fnnNNQcz58USeVbqMKX6gZVYpgBAUFeYEOXBdqBljt+6nSZJhp5TOlseYh9M8Eq35og82a2RidtW40yDNmzJA1BkDof3z2nZ2d4ozyINNqtbj99tuFbkZDzIOYexeA1AZHR0cjODgYq1evRmRkJAoKClBQUCDUWK5R1pdw/hgMAx5HZsKECdDpdNi3bx+mTZsmn0en0uXy9PwqKSnBmTNnkJKSAj8/PxEieOKJJ/Daa69h8uTJAC45Q11dXdi5cyfmzZsHnU4njh973xUUFIhD4na7Ybfb8fXXX2PGjBnIy8uDRqPBbbfdhltvvRXNzc24/vrrv++p/sGPlpYWBAQEoLS0FH379pWMhMViQWtrKyIjI1FdXe3VEoFZUjVIUB1C9cB3uVwIDAyUfnbh4eFeBzLnjmcMA2M1m9LW1ibnz5tvvokDBw5g3bp1OHToEJqbmxEaGip113V1ddDr9UhMTBSwJjg4GH369EF+fj7S0tJkffb09GDevHmYPHmynGFRUVHo6OhAVVWVOF+lpaVC3QIgohR79+7FqFGjEBQUhOPHj0Oj0WDbtm2w2Wz42c9+JuwhZqlU0NZsNmPDhg3QaDTIyclBa2srqqursWjRIixfvhyxsbFeNNPu7m6pQ/PNrvI5+gKHagDLc/b77gfMuTabzfD39xeWhq+AiNlsFieKDChqYajnJe2Yevaqo7dA9EpZR5vNJmeceuaSddLd3Q2TyYRTp04hKysLdrsdnZ2dKCkpkWuIiorCwYMHcfDgQZSVlcFkMqF///4wm83o27cvampqpM1XWFgYAgICUFtbi8LCQjQ2NnoF673VYKr7yen0iJdlZWXhxIkTKCoqAnBllpYaJPKe+NyCg4Mvy6Tt27dP9oDL5Wnhde2118q+V4NTFfi8Eujd2/WQ+cWhZuZ8r7u3oYIztBnApYCa5S4cakDCIIHZJ2aXmImiX8FMO4EqUjgBoKGhQfYfbSX9I7Vcjf6awWBAZmYmjh07JlReANLpAIAXdVd9b1WfQ82I0Z4w4AIu7X9V6bW3llJqIKzReOjMFNYhs4s2ngEqtQkY2JNpB0BE8yorK8UOfvrppzh27JiU+vCzIiIihLXC56UyahYtWoRrrrkG4eHhsFqtEmd8+OGH2Lt3LwYNGiTsMgpocuzYsQNarVaowdu2bRP696JFi7BgwQJcf/31qKmpETtOxXd1j/naCl4DYyPa856eHhw8eBBLly7FokWLsGrVKjQ0NGDlypX44x//iOeffx4zZ85EeHi4nB2qP93T0yNgBT9HtdPqngMgPh59bCa1mBHPzc3F0aNHcfDgQRQXF6O+vh7Nzc0StP+re1QdPxiv2ndS1KCIaAdRTl/kTJ1cFZXha1XHWK2n4N/y95Rh7+7uRnR0tDxQUioaGhq8nGSiAllZWUhJSZHAmQtA7a9ElJAUXjojVFy7ePEisrKyYLFYMGXKFCQnJ6OmpkaashcWFiI0NBRRUVFwuz11JsXFxcKlp+OsUoB5n3yevN+KigpcvHgRWq0W/fr1k0Ob18/5oCGhmAffn416SX84fPiwZBISEhIwdepUjBs3ThZmfn4+ysvLRTnSd15UNJF0E94D6SIqwuk7+DNVkVJFP9lQ2+32qNxVV1eLweUhw6Ce98haL63WU0egGk3AU59Gp011dn8sgxlC7qXu7m4cPnwYGRkZaGpqwujRo/HOO++Ig8YMCpFY7jMi/gzww8LCpLXLJ598gqKiIgwfPtwr+62uT6p9BgYG4oMPPkBVVRX8/PykLkhVPxw9ejRMJhP27dsnbRt4Hfx8tR45NzcXiYmJcoBptVrs2LEDOp0OV111lbQJefrpp/Hyyy9j79690urklVdewdKlS/HGG28gKysL06dPB+CpN3rwwQexYcMGxMTEICMjA9HR0UhISMCFCxfgdrtx+vRp6HQ6VFRU4Pnnn8eKFStw4sQJsUn/Gd7DYrGgoqIC/fr1Q2NjI+x2u9CVOjo6BNTT6XTSTByAACwajQYHDx7E1KlTvd5XzSrQHhiNRmkLlJubKwExQUkqVdImGI1GTJ8+HTabDZs2bYJWq0V9fT0++OAD3HXXXejp6cG+ffswb9482O12NDY2yhyTZszraGhoQFlZGYYPHy5r2mAwoLq6GqGhoZg9ezY6OzuRnp6OyMhI1NbWorW1FW63W5SBGaRTyVej0eCrr77C8OHD4XQ68Ze//AU///nPsWnTJqSlpYnavFbrqWHt7u7Gn/70JzQ3N6OlpQVRUVHCKGDdU0ZGBmbPni0UuoiICFRVVUlpA50b0ojVoEa1g+o+v9Jrvo9BB4prQRW947Uxe0efg7bNl0XF16s+QG+fpwYK6s+dTicuXLiA5uZmjBo1SurnAHjRTW02mwCqADB27FgRDAoKCkJTUxN0Op1kRvv16yc9T9PT02EwGJCcnIywsDBMnz4dBoNBAsuAgACpme7p6UFpaSk6OjoQFhbmJRzU21CdaAI3mZmZ0iKDmXfVl1CfEWs1+Ux59tN53rVrF7Zu3Qqj0SiiY9dddx0WLlyI1tZWlJSUoKCgAA0NDeIg9za+bY1pNBrEx8eLsrxer4fdbvdqJXWl84rzr84zwSzWMqqfz7XU1dUl76fSd9VsMTOJFF+iD9nT0+PVB5XX1N3djcrKSvFVmZ3mazgPXFsEoWl71KwqExNqsMagWgV4qVRPP0tdK7R1auCiXi/nmz4VffHOzk6sX78ecXFxwmLjNTY1NcFqtQoNl0Ey2+EZjUZJ/FitVuzbtw/+/v4SNxgMBmzduhVWqxWTJk2CXq+Hv7+/BNZk4zCx5Ofnh/j4eMTHx0On06GyshL5+fmIjIxEY2Mj/va3v6G5uVnKfCIjI2VOac937NiBn//855gxYwb+8Ic/IC4uDlqtFrNnzxZ/ZtWqVdi6dSs6OzulZplAhSpapMY2Kgjka1+0Wi1qamrw+9//Xvy5n/3sZ5gyZQpmz54twf+JEyewbt06WXPqUAWwaBOZoKFfxTXJNcC1wvZcDQ0NAmLm5+dj3LhxkmzjM/su4weTQdVoNF4Kqr6GQUUrfVFz9dBT/1Y1MmqrB+AS2sNs4okTJ1BSUiIGhUOlLKktbfg+PCjy8/MvCwbV7zmhLHonrz4wMFAQ6n79+sFsNmPr1q3YvHmzBEojRozATTfdhPHjx6O5uRmNjY0wmUxISkpCQEAAWlpaUFhYKJQNPht+ti8qqhqVwsJCZGZm4siRI6KAykOKKBkLr/kcGhsbJfjmvXEuKisrsX79ejz00EN4+eWXUVlZiWnTpuHee+/F4sWLMWPGDJH0Bi6hkTRc/Fwu/oCAAC8JfN6X7/ccauaUz4LCPi6XS+qBQ0JCMGjQIK9npYIVzKa6XB6ZbCJPVCPz5e3/b+hO/y8Hgzneg16vx/jx4/Hwww9jzZo1XtkoADIHKg2Mc64+b3VO5s+fjyeffBLnz5+Xv1H3hjp3pAklJiaiq6sLqampQq0kKJSYmAi73Y6LFy+Kg0Qn/dixYzh8+DDGjx8v1876Yc5Na2urCCSUlZXh7bfflvqS/v37I/EfNdcU7SBq3dHRIWJdALBhwwY89thjMJvN4pTR0dFoPHSvZ555BmlpaThw4AAMBgNuuukmXH/99Ze1ZfrPuESxpBhGXFwc2traYLfbERoaKmUTwKV2VMDla4llDIA3PYqDrwsMDER0dDTOnz+Pffv2eVF/GcRxPbOHGx0ep9OJn/70pxg7dixOnjwJAFI3Vl9fj9raWmk7wffkviCAwQAP8JR98OdUqnQ4HLjuuuvgdDoxcOBAORtpm/z8/GC1WmUPqHT74uJiDBs2DO3t7SL6Ex4eLkCmVqtFU1OTUPS6urqwZcsWrzOUWcTAwEDk5+dj586dkqF44oknZF+xHEBlVvSWgeO58f+KZaJmuzhU+qHKyFKzLb4+hPpewLcHQb6gOevX3W5PnSn1IpiZ5+sAD7slIiJCQAC9Xi9ijAzoCHDzOtva2mCxWGAwGBAXFyfU1YKCAglEY2NjMXHiRJjNZhEuYZlHcHAw6urqUFZWJuqovA/f0duZ63K5UFFRgYKCArS0tFxWIqVm6tT3VOeGe4wZ7pqaGqSnp+Ott96SoHvw4MGYPn06Ro8eLWDJ/4alxSDA9/7Uue0N6Od79HaGESzz9Uc5ekseqAkM0u7V0dnZKZkr9X2Y5Oju7kZxcbFQTlWAv7fnooqFMjsLQHxv9R592Wn8Wk34qJk01T9QfTbf7LT6PvQxKcrEn7Esjtel+nW8L9KheS9msxmtra1oamqSQIr/t7S0YNu2bbDb7eKL9/T0wGKxSF2qapd5JkRFRUkJB2nGKSkpcj1sdcZny0zhvn37MHbsWJlD7jWj0Yg+ffrAarUiPT1dgmC2hTEYDF42wXeoPrf6Tx20Z9u2bcODDz6Ihx9+GCUlJdDpdBg0aBBeffVVLFy4UJTbaR98mTG+a5hsDnXueL4RnNBoNCJsyPcoLi5GUFAQJk6c2Os9fdv4wQSo3ATqAuTgz9S0M+Bd3K2+j2oI+HoGsi6XCxcvXsT+/fuxa9cuURYDLin08bAnqlFfXy+Igpoi5z+KORFB8ff3FxoHDxReAwBpVwJAFMuI3vr7+wvCSYR606ZNUm83adIkXH/99YiNjUV7eztqamoQGBiIyMhI+Pv7o7i4WGqYrmQYgEtGls9Ro/H0m9q9ezfKysq8FifRPD7/4OBgL0EDGjN14zNYff/997FkyRL84he/wMaNG6HX6zFr1iwsXLgQs2bNQt++fSXzyaCJGQJmNFSKaG80st6GalwJOqhBGfu8qk6TWovL+aZBb2trg8FggN1uh9Vq9brvH2uAqhoYGhet1iOGwDYqvEfOqS+gwGfKta7WMvG1Dz30EA4dOiT0LcBb1IGfERgYKI44+1kOGjQIubm50Gq1OH36NHbt2iWf39PTg8bGRjkwz54966VMSCqOL1g0depUnD59Gm1tbfjzn/8Mu90OrVaLtLQ07Ny5EwsWLBCkk+Ppp5/GuHHj5HM1Go/ADefe5XJh8uTJsg5Uu/TWW2/B7XZj586d+Otf//q9zfGPZZSVlSEhIUF6hTY1NaGiogLx8fHiRDDjRdVGwFugx2Qyoby8HCdPnsSRI0e8HD3aAPUwp6AJ2TYUlGtvb5eMKgBREwcgdK6LFy+irq5OFAq5Ji9cuIDa2lqkpqaKDVFtTlVVldib0tJS6PV6DB48GBUVFVK/xrohtmc4ffq01z4xGAw4dOiQtMkIDg5GeXk53nnnHbS0tHjVUr/zzjtYs2YNGhoaEBwcjF//+tdobW1FR0fHZSAwnVqdToeGhgacPn0ajzzyiDhK58+fR3JystRnpqamSm0Ysx6+yu8ctBvfRov9dw4GPnQqCfwSuGTwTFVSNXAhCEcnmvfF5+J7r+oZdebMGezfvx979+7FkSNHvM5JrVaL2tpa5ObmynojW8hoNCIvLw8lJSXC/GDvaDr0Kpir0WgQFhaGhIQE9O3bFyEhIaIw2tzcjCNHjmDr1q3429/+hqKiIvTt2xdmsxmDBg3CqFGjROTEarUiNDQU3d3dKC8vR3l5OcrKyoQO6ZtN9nWQ6Svk5eUhKysLGRkZOHHiBEpLS73q19Tn1d3d7dXiiI4yM1vcn5988glWrFiBBx54ACtWrMCGDRvg5+eHOXPmYMGCBbj55psxefJkJPqIw/j+4/x2d3dLKZHbfamtmnp/vAbfQEs9B2lbuDbomFP9uKfH0zszJSXFy09QAw1m9HQ6T+9l6kHQ/wkNDfXqEkAftLGxEW63W2onOzo6RBSS16YmVAi+fvnll8J+U+8JgABfXPeq7VTXrsoWVEF9dX/7BvnqUPdPdXW1VyJEq9UiPDxcGGp6vV7EibRaT4saZlCpm0E/gJ0Vuru70djYKNlEUooPHz4s/gr9Qs6ly+VR5+ca5T/68jNmzMCsWbOktIQBrd1ul9K3J598Ei6XCxs2bIDVaoXBYJDWeBqNBldffTXCw8PR3d2Nxx57DC6XCzExMVi5cqWIpNKHYsbbt0OFamt8gTP1H2263W7Hs88+iwULFmDp0qXSzmnjxo347//+bzz11FOYNWuWlw/rm6jhvHMf8Bmq4BK7PHA9ut1u7N69GydPnhSm0XcdP6gAlUMNntTFzptW+ev8uRo00mHk+6oTVllZKT0g+WBdLk+tA9vG8HdcnIGBgSL7DcArc0ORGAASoFKZj8EcDxZeF50PVb7e7XZj4MCBGDFiBOLi4rwUJENDQ5Gbm4vPPvsM+fn50tzY398fiYmJsFgsKC8vR2trK6KiohAaGorW1lYUFRUJWk6joqLZvoG2231JupzPn/1Ped80mswg+hol3/nkoexwOLB3716sXr0aixYtwvr16wEAkyZNwp133omFCxdi0qRJiIyMFLoQxbFYx6AaPD5P1ZD4Irvq93a73QvdLCsrQ15eniDRqrGlceYGJZigUiBo0Ho7dH8sg/fJ//kc8vPzUVBQALvd7rVmXC6XzAWRU/4daR505s6dOyfG32g04uabb8bcuXNRVlYmtEUVeFCRcIp/rFu3DmfPnhXRhIyMDLS2tmLatGnQarWw2Wxy2M6dO9fr8I+IiBA7wqyY0+mE1WrFl19+iYqKCjkshgwZghdffBE9PZ4ew6GhoVKX2r9/fzn4iACyPpDGuaysDLGxsXjggQdkfRw7dgxmsxlJSUnYtm0b3G43li9fLmqu/xmXRucEnWcAACAASURBVExMDKqqquDv74/c3FwUFxfDZrOJA0LniwIMzGa7XC40NjbKOnI4HGhra0NLSwsyMzORnp7uhfYD8DpoAQ9CHxgYKOBcYGAgmpubJRikaJZWq5UAjCqwO3bsgMlkEqCL2gBpaWliE4jiM1PFmqrTp08D8ATWKkBUW1srn+3v7y9trAAgJycHer0e27Ztg8vlkjo9KptrtVrExsbi1KlTmDx5MiIjI2EymTB//nyMHz8eDQ0NKC8vlyCT67yjowNPPvmkiFloNBp8+eWXco4B3qybJUuWYOvWrWhsbBQnv6qqSpxIZoKotqwGw+qz/74GAx91qGwoDpPJJNem+h+9nSsqvZRAVnV1NY4dOybrjvQ4wLtukTRwjUYjwT2BbsDDyurTp4+UFvH3VFRVs1hcO/QpTCYTwsLCpC6ewW90dDT8/PxEJZhApFarxbBhwzB37lyMHTsWBoMBTU1NMJlMsNlsCA4ORnd3N0pKSiSQ8HVk1aFmZHg+1tXVITMzExkZGTh//ryIFAHwUuXm35Pi6gv6qmduS0sLdu7cidWrV2P58uVYv349tFotZsyYgcWLF+OOO+5AWlqaONfq/DMAIAiunvmqUJXZbPbKtPcGrKjsIb1e78VC0+v1iI+PR0xMjPhLfB8Gy1wTavsldd1Q30HNqLW2tsrz7enpQZ8+fSS4qqqq8iozUANh/sxgMOCLL74QoFUNOn0HbY9aduWbJPAFaNQEkvqc1Hnk/bBGX81carVajBw5EgEBAZKh5DNmAAlAStsCAwNlnyxevBidnZ1obW2VZ85+5Q6HQ3yB0NBQLwEh+h++DCeNRoPExEQUFBTIPA0bNgxjx46V66qoqAAAr961kyZNQkNDg6j/d3d3Y9euXXjllVcEdIyIiEBKSgr69u0LrVaLIUOGyLo7deqU2AACF1RWp1/lOxffNtSA9eWXX8b8+fOxZMkSvP/++ygvL8fo0aPxyCOP4NFHH8UjjzyCAQMGCBOCwAsDUrZfBCBfq6URPLfMZjMsFgsiIiKwefNmZGdn/9Pr9B0/mBpUwDulzGCIG1jNiqobiZuMf6e+ng8MuHRAcCOSOlhbW4vY2FjYbDakpKSgo6MDZWVlstiIgAGXWpZoNB7hk1tuuQVZWVlYu3atIF4GgwEtLS1CfSopKYHNZsPgwYPFcDNNTnEaIo+8TvZxraioEBUsZpfKysqQn58Po9EoMt7Dhw/HwIEDodV6auza2trkkGL9YGVlJXQ6HWJiYgBc6pfEz6RhaWlpkUOJhraqqkqCj87OTgkg1ACbz7k3tEc1kgwsDx8+jP3798PpdCI5ORkzZ85EWloaZs6cCbfbLQ2vz507J/NNR1RFbNV1o/4MuERvDgoKkroEBja1tbXi6PquNVLXDAbDZQX8qgNGxPvbkMIf6lAPDN9MO9VK+dx5iKuHqiqsodacVFRUIC4uDjExMSgsLES/fv2g0WgwZswYYQDY7Xa8/fbbkpFRDzmVWsRg2O12Y/369UJ1Ym+/n/70p/jggw+g0XiymdHR0TAajVi3bh0WLFiAN954Q4Juq9WK1tZW1NTUCAjT1NSE6upqzJo1C5s2bUJycrKsCT8/PzgcDsydOxe5ubnS5oTX++GHH+Kxxx7Ds88+i8rKSrS0tMj+oFMUGBgozsayZcvwxhtv4Pz58//eif0RjuLiYlgsFlRXV6O9vR19+/aV3sMmkwltbW2oqakRsEjNcNHuBgUFSUCl0WikZvrAgQMwmUy4+uqrvYAl7lmVXWEwGEQteM+ePZg2bRoCAwNRUFAg10r6K20azxLA4zgGBQXBZDKJg8QMQUxMDAYMGICqqirJkAKXHDwi+rGxsSJewrVLynlcXJz8/PPPP8eJEycEKB03bhwuXLiAhIQEbNu2DUuXLhXaWP/+/VFcXIy1a9dizZo1+Otf/wqj0YjHH38cv/3tb73OSdUhItWefTfPnj2Lp59+Gr/+9a/R09ODkpISnDt3DlarFQkJCcjLy8NNN92EM2fOiL0g9ZDZyd6c4O9j+J5LfN7qWuL1EQAHLgGh/F4V9aGt2r59uziNLpdL9ByYNa6vr0dnZ6eovgcGBkoAz2wP25zRn2Gfd84H65l5BtFeku5LEJr+CcHy0NBQxMbGorq6Gnq9XrLpbrcb5eXlyM7ORmhoKIYOHYro6GgMGTIEgwcPllZ1AQEBiIuLQ2xsLDo7O2G329He3g6r1QqLxSLaF9xPBDS5LrnHeM30XRik6nQ6VFVVCSCtspJ8wWAVzFSFeQBPC6Hs7GwRQJo8eTKmTJmCsWPHwu32UKCLioqQl5eHtrY26HQ68am4/kmLV/0Jdd3QT/AdnHe1/ZVGo0FwcDCam5tRX1+P1tZWoXzynrhmGCAZDAZhvrFUITg4+DKQxGKxiBLzoEGD5NmSfUcwnqVa6vPn9VJTg+sGuJwRRh0A7gnuB+4Tlcat+ma+wSj/V4Nmh8OBnp4efPnllyJKx0Bn3LhxAnCTNeN0OnHu3DmEhIR4geL8/8EHH0ROTo7U465atQrffPONnMOsVW1ra0NQUBAsFou01SMgwTnk9XIN9unTB2fPnpXnRLCR6z4pKUky2fybGTNmoLm5GQsWLMAzzzwDl8uFgwcPora2FjfffDM+/PBD3Hnnneju7kZSUhKKi4sxceJEAV8iIyNx11134a233oLZbEZJSYmsGzJg/Pz8ZO36AnC+wCyHape0Wi327duH/fv3y9qYP38+Zs+ejRtvvBE//elPYTQaYbfb8dlnn8ma4/zq9Xqv2l36PlxvHR0dIuw6ZcqUy1r+/CvjBxOgqgaI2Uf+Uw0TAztuDgCXTY5qSDhJfE1DQ4Ok0pkVZTCo0+kwYMAAuFwuhIWFoampCQMGDEBUVBTy8/NRVVUlmZmLFy9Cr9djzZo1eO6559DW1ob6+noR4yElMjIyUrKlpE8x+CXyzayTwWDw6tcaExMjTbz79++PpqYmlJaWingP4Ak0N27cCADo378/Zs+eLcHT8ePHceTIEfj7+yMhIQH+/v7SW7CtrQ1Go1FU9WhI2HuqubkZVqsVDocDcXFx0uaAQXVOTg5SUlLESVPFjNS55FBRdF/UsrS0FH/+85/FEeju7sbVV1+NWbNm4YEHHhCHzuFw4OjRo9i/fz9KS0sFMVazb+rGpAJlU1MTjhw5AqfTicmTJ0vDZzXI4jywLoNAhto4my0BqHTLGhj+7Y9p9IZOc34aGhpQVFTk5TxxLtUaFjWDzQMwMjISQUFBSE9Pl3ZBKrjEYPHRRx9FSUkJtmzZIkg2DzffYD8oKEiujwyE6upqxMXFeR26c+fOxYULF7xq4s6ePSt9MIFLgYXZbMY333yD3NxcrF69WtZ6U1MT1q1bh3vvvRdr166F0WhEQkICioqK8Nxzz+GRRx7Biy++KPuYh7rdbsdTTz2Fb775BjNnzoRGo8G5c+cQGxuLuLg4lJeXY/fu3Vi5cuW/Z0J/xMPhcKClpQUulwsRERHC0KBTwfZdAASNptiH2WyWn9FZUYGzadOmSaDU0tLiVUOjrimuF4KRbW1t+PLLL5GWlibZHPaQe+CBB/D5559j/fr1WLlyJR577DGMGTNGqMg9PT0oLy9Hv379EBISguLiYsTFxaFv376w2Ww4d+4c+vTp4wXKUDH7qquuQmdnJ44ePYoRI0bg5MmTIgjDDF9PTw+OHTsmz89kMqGwsBCtra1Sn0pho7fffhv33HMPAE9LmsGDB+OLL74QJJzCdXQw29vbYTabkZeXh2effRZNTU2w2WxobGyUM8xkMsFqtYr+AhkRt9xyC66//npUV1fDz88Pra2tqKurk/d0OBzSi+/7HGoWCIA48yrgTYBCBVLV16uBiZqR5dmjUiV5vlVWViIgIACxsbFyzxqNRvrNcg5oOwMCApCQkIDAwEDk5eUhISFB2DvM/nR2dorTHhMTI/ZVtZsq8NLU1ITU1FQkJycD8GRnCajRueZ66+7uRlhYmAjIDRkyBNdddx0aGhpw9OhRsd2cS2abwsLCvBRW2X+T2SQGJPQ31KQC4GE3xcbGQqfTyV5WlVZ5fqhnPO9TDWDpEzY3N2PTpk1Yv349wsLCkJiYiFmzZmHw4MEYOHAgnE6PUFVeXp60DeMZ55sN5DmpZm857/Tb1KSICsKEhISgtrbWiynE9+X64uuZSKAwDbOmZrNZgigOZgsJyFZVVcFisSApKUlacVVVVUmmUF3LaqJGpapy7XAP6PV6hISEICYmBqdOnfJ6HQEYNUjlPlLZi+pzVPcigQzaSsBTykPwlxn+7u5uVFVVYcyYMTh16hQiIyO9QJwVK1bgwIEDGDZsGA4dOiTBsslkQnNzMyZNmoQzZ87A4XDg7Nmz8Pf3R3R0NNra2hAcHIyqqipERUUJQEhwYteuXdDr9Rg9erSwQnifqs0IDw9HXV2d13PQarWYMGEC3G5PycPEiRPx7LPPor29XVoFkfZrsVhQW1uLqKgorF+/HqtWrYLdbpeMMFkRDFgPHTqEu+++G7t375a9TAag2sJF9bXUs079me8+4pytW7cO69evR3d3N4YOHYply5bBZrNh0aJFADwA7ZdffokTJ05Ar9cLmKrGWVyvqgJzeno6Jk2ahO86fjAUXz4sVdaaGx+AKJ/5bib14aoceR4iNCqcQL1ej7KyMhw+fBglJSXo168fdDodrr/+elx11VWwWCyCXqm0g+joaCxYsEAynsXFxfjss88EvVApYDqdDsHBwWK0WavmdDol+8jN2draKtevZqrUQ4eHidVqxZAhQzBy5EjExsYKut7T0yOG9v3338ehQ4fQ1NSE4OBgDBs2DEuWLMHo0aPR3NyM9vZ2BAQEwGazwWw2o7q6GmVlZYK68bDm59NYVVdXeyG0kZGRqKurk1oLFnYTgfLNygGXC1Sp88vP5nxlZWXhhRdewJ133olVq1aJ6vCUKVOwevVqvPPOO3j44YeRkpIifUz9/f3leRN5TE9PR3Z2ttRLNTQ0wO12IyoqyivLzq+5ppxOp7QHam1tFSeXCBbnqrf7/DEMXjsPSxW11ul0KC8vlwCVyDCDU/XeVXEgAMjLy8PBgwcRHByMnJwcnDlzBpmZmYIq86Dt6elBYWEhHA4H0tPTUV9fD+ASlUgNUvlZzAi99957+Oqrr+Q1pGJ99tln2LJlCxYuXChrNzExUSiiXNtOp0foxmazob6+HgaDAcXFxTh+/Dh+97vf4YYbbkBISAjcbjeefvppzJ8/Hy6XS2r7+Azef/99PPHEEyJI4nK5hH5FkMdisSA5ORmxsbF47bXXvvcWGz+GQUCB4kJGo1GyhgyMVBGQ7u5usZv8nkwX1f4DHkXabdu24dChQ8jOzhZqLYEwfs3BtWexWKQnKJkUpE2GhYVBp9OhuLgYTqdTaoyZZdJoNFLTmpycjLq6OgAQZ+Ls2bMYMmSI7B1/f39RfjaZTOjo6MC5c+eQlJSE7u5uBAUFif0he4PsAoKdWVlZ6OjowDfffINp06YJ2MOzprOzU+jlR44ckXNl4cKF8qyioqLQ3t4Onc4jpqEybIYNGwadTofCwkK89957eO+996DVesRa6urqcODAAXm2GzduxL59+xAQEIDExEQkJiaKraDD/X0Olb6oBp2+TBzfmjv+jmuK57T6M37f2dmJixcvoqKiQt67X79+0orqxIkTEuharVbU1dUhOjpaAG+3242QkBB8/vnnQvOrrKyUNU+fIy8vD06np+PA6dOnxRa3tLTIHiEVntemBghBQUEYMGCArD064BaLRdYZtQJaWlrQ1tYGq9WKadOmYerUqfD39xc2Q1RUlKiYVlVVoaSkREB/NdCn3+ByuWC328UX43Mk1dztdov+x5EjR2C3273s5ZXOWjXgAy6JItI5zsnJwUsvvYTFixfj8ccfR1FREVJTUzFnzhz85je/wZ/+9CcsXbpU2GW+/peaKFGHmjBxuVxe88nr4v5TW9Kpf6sCrMyg6nQ6Lx9UrRXlc3M6nQgLC0NjYyMiIiLEP7ZaraL+3NDQgNraWrEFvoE9nxvtr1o6R1vkdrsxZMgQOQ+BS63f1H3DcSUxJXWfAfCq6Q4ICMCZM2eQm5uLrq4urF27Vny8PXv2iB1zuVySJQY87XzmzJmD2267Dc3NzeKzxcTESP/TiIgIaDQaLF++HIsXL0Z9fb3Yn6ioKGFRkvXIa+7p6cGhQ4fwzTffIDMzU/w+dQwePPiypEtXV5cA4rQ71DGwWq0CGg0bNgwrVqzAoUOHRLVep9MhISHBq7Xe/fffD71ej/r6evTt2xcTJkyQPRwQECDCTewPrrIZ1P3R2yDI4OuL8/nk5ORg2bJluOOOO7B48WJs2rQJXV1dmDp1KlauXIlVq1bhvvvuQ3h4uOxTtkkCPCKVDQ0NyMrKwpYtWyTI/S7jB5P24Sb3rZFUgxouAqPRKEgeqSyq08xAh05qTU0N6urqYLPZRLI6LS0NERERcLvdQmHi60lnVSkLdJKIqFmtVuzfvx/jxo0TNUfSANmzTkVeg4KCEBgYiOLiYumDygCWKrF0aFV0n89GXUTssRoYGIiioiK5X5PJhOTkZDgcDnzzzTcAPBkBIkXTpk2DXq9HVlYWCgsL5Vnw/cvLy9HV1SXXqgbONJzMMlKgY+nSpfD390dmZiY2btwIk8kkGQ5uMjUIVbMWvhtJHTRser1HAv7555+XrPSoUaOwcOFCjBo1CkOGDIGfn5/Urh06dAiNjY04f/48GhsbRdaeG6impgbBwcEICAiQg4CHg0qzVrN1Op1Osi+k/FL2ncb8SvfxQx088HwRYzpQra2t8Pf3l4NEddDUoFa9b61Wi1GjRsFkMqGoqAgJCQnS55e1dsxeOJ1O/OQnP/GiF546dQoRERFeAQaRVtVRIPvgnXfeEephamqqqOFRMj8pKQlFRUXSyonXT+qV2+1GXV0d3njjDcydOxcfffQR7rnnHvzud7/DZ599hpiYGJSVlQmzgdcZFxcnfb5IX7Tb7aioqEB2dja+/vpr/OQnP5Fa3N/85jeIi4vDkiVLMH/+fLS1tf37J/hHNNgLkqg+6yKrqqq81o2fnx9CQ0NlPtxut3zd3t7upajIAILCRGzPwrYYnZ2duO6663pF+9XDOjg4GE6nE9u3b/cKnEmz7dOnj4h2DBkyBAcOHIDRaERZWRm0Wo/y9NGjR+VsobNjtVpRW1sLm82G0aNHY+fOnRg1ahScTieOHj2KwMBAOJ1OjBs3DjExMUhPT0dxcbGXgArPS/YrnDJlCmbNmoWOjg4cPXoUMTEx2LBhA4xGo7ALtm/fjk2bNuHFF1/Eq6++iscff1zaNrjdbjz11FN45ZVXoNV66hfDw8NRU1MDwHMOsI61vb0ddrtdAEwAkg3cu3cvZsyYgRMnTuC1117DE088IZT5nJwcDBw48HtcXd5K9oBnbZjNZkH/eSapXQTUodo+3/fRaDyqlVTlb2hogN1uR3BwMM6fP4/4+HiMGTMGe/bskayN1WrF7NmzkZ+fj+LiYkRGRsLtdosOxvHjxwVsaWhokICV2hJOp9OrLyaBAgBiG3km0Z/hfuF1+/n5wWAwSMa+rq5OWumFhISIfd+yZQsADztr1KhRGDRoEEaOHInjx4+js7MTmZmZGDBgAMLDw8XG2u12dHV1ITQ0VPYvPzckJEREfjjYNocti5g5pLoqWU3McP0z51v1LzhnnN/Gxkb88Y9/FJGkmTNnIikpScqkXC5PHSIDE5YNcM8xGaH6dszCaTSefsL19fWSRYuMjER5ebnX39G/Vdla9MGYuKBADpljKuOpp6dHWrdFRESgvr5eSq5IDY+MjBSwlLRWdR74XNW1o/qZ9PXoC7W2tiI1NRUJCQnCXlN9CF8fD7jE0lL9BfUeAMh1sxWWn58f1qxZg6FDh0rGGwA+/vhjPPTQQ7j22mvhcrlQWloKq9WKxx9/HDfddBPq6+sRFBSE5uZmJCQkoLq6GgkJCQgLC/PqX3zffffh6NGjaGtrg16vF40YBrcWi0Xaz/He2R1j165dmDhxoghqqcryPT09wrD54osvsGLFCmg0nlKTAQMG4OjRo9BoNLDZbAIE+vv7o7m5GWFhYQgLC0NdXR22bNmC8ePHY/DgwaipqYHNZsOIESNw+PBhyfI2NTUhPDwcoaGhaG5uRkFBAeLj42X/MFHG81AF1Hz9NdXeqYASQRA16//VV1/h73//OxwOB2bOnInbb78dUVFRWLJkCQAI0L9s2TJkZ2eL+Jf63t91/GDSP7434HQ6ERUVhZiYGEGmAEg2h0EjsxbNzc2CGJFmU1BQgPLyckFpAA+iQiNAg+DrpKuiPKyn4OuJLkyfPh1DhgxBZGSkvDcD2/j4eEEXeV/k3NtsNlH8Onv2rJfCFw8j4NLm5ibwRQ/dbrfQ0QB41c0YDAaEhITAarVCr9dj37592LBhg/D6hw4diltvvRUjRoxAdXU18vPz0dXVhZCQEERHR0Ov1yM7O9trk5KixNS90WjEgw8+CKfTiZaWFum1du7cOURFRUm2gKqYV0KueZ8qOs3X8ecq0gMAJ0+exP33348lS5bg3XffxYULF9C/f3/MmDEDTz75JF544QUsW7YMI0aMgL+/PywWC6xWq6iq8VkGBwfDbrfL56ogBw8RyrwDkKwN63743Dl/P6ahZkx5/dxXbCPBufA1Mipo5EsZWbduHaqrqyVr0r9/f8TFxQkq3dXVhWPHjsm8z5w5E2FhYZgwYQKWLl2K+vp6qXvpjVXBzyEY1NHRgd27d0Ov1yMmJkaCPz8/P/zkJz+BRqMRKiLvsbGxEV9//bWg2jU1NYiIiMDNN9+M6dOnY+jQoaisrERCQgJ6enqwZ88e3HzzzXA6nXjppZdw//33A4D00WOfXADYuXOnUPB0Oh3sdjs+/fRTsTUqFec/wzPoPBIo6u7uljp6m80GrVaLjo4Ose1XX321F9XOF9jkz321CpxOTzsW9jzcv3+/V20V/853P+v1egQFBYnTSBGlbdu2Seb95MmTUn/KtcssDK9Jo/GITXBvMfBjjz/uETqlGo1GMlTJycleWRCV5aI6FLGxsTAajdJL1Wg04sKFC3KmnT59GhaLRRx+ACIERgAwNjZWrpf2obOzEwkJCQA8GW9SAOn8OJ1O5OTk4MiRI9LjkOUtd911l2gL6PX6y/rV/rsHs1Vq0KKeO7xHlZGlngO9MbJI0WVmJTMzUxSYhw0bBgBerCwqQHP0xsoyGo3CyqKqKUF41sYHBwfDZDIhMDDQi5nFOjD6Nl1dXRJ4qMwsBq7MlhJUiIqK+s7srJ6eHgwbNkzYWXV1dTAYDKIGXFtbi+rqapSWlsp6JthJYBiAMLTUZx0ZGYnS0lJhaTEbqtaZ/6ssLQ4GlZzDxsZGbNy4UZhab775Ji5evIi+ffti0aJFeP3117+VqUURNZfL02Jn//79aGlpgZ+fn/hyDLL/VbYWM6d6vV4o8eq6BSCtp9rb21FSUiIMIirvszWK2+0WWjj72AOXFOZV31S1oapvwOcXEBAgZ1dycrJkV3mtqv+mUqF7GwzIdTodbr31VvHBaaPr6uqQnJyMmJgYudYRI0bghRdewKFDh1BfXw+HwyG9oAmWWSwW2O12xMXFoba2VrLAwCWWjlbrETUMCwtDdHQ0BgwYAIPBgKSkJLkn+uc1NTVSXsZnlZ6ejh07duD8+fOStHK73QJ408bzOdTW1gpIQ19x2LBhcLlcGDp0qPgPLpcLr7/+uuyt4OBgfPTRR9Dr9bhw4YJ8zunTpzFv3jwUFRUJvTgyMhJPPfUUYmNjpVadQA8Fo3qjx6v7Qg1WVXtJ35vfcw3t2LEDS5cuxV133YXVq1cjPT0dABAdHY2tW7eipKQEhYWFWLFihax3njffZfxgMqgqrYaDylkajUYCp+bmZlkYISEh8hC1Wq1XgbtWq0VMTIyXY8KhBnO9DbWRMhcG1aqmTJmCrq4ufP3115gyZQo+/fRTGI1GaTLsdDploRMRcjqdaG1tRWhoKCoqKoQqyHsihbSzsxNNTU1Sc0cUpL29vdfrpZyzyuVX0Ss6PWazGXa7HSdOnEBGRgZSU1MxYsQIhIWFITAwEAMHDkRKSopXg2zWzbDewWq1ehkhon9qdnnq1KkYNWoUrr32Wpw5cwb/8z//I44R6cx6vd6rplGtjeC8+CI96lCdTmYkTpw4Aa1Wi9DQUMycOROTJ0/GzJkzMXv2bJhMJpSUlGDt2rUoLy/3oj5FRUXh9OnTmDx5sjh7zMgTNWXtD/n2zCbTEf2x1Z5yfBtdKiAgAPX19YLU19bWeiFpgHcfQdL3AGDo0KE4fvw4AGDOnDloaWkRpkJ3dzdefvllDBkyBOXl5ejTpw/MZjPS0tJgt9sRFhaGn/3sZ0hLS8O+fftQUFBwWeCvHoBkE2g0GvzhD3+A0WjEjTfeCABYu3Yt2travBx5Ok1Ejm+55RZs2LABBQUFcLlcUlM8Z84cfPTRR3j00UexZ88eHDlyBL/61a+wceNGtLa2orOzUw6u5557Di+88AIWLlyIefPmISMjA4BHFKm5uRlnzpyBy+XCsWPH8Oabb2LFihX/l2fyxz84lyEhIejq6kJNTY3YfXXd0fmdMGECdu7cKY45Hb/6+nrJ0KglIqx/U+unW1pacN1110k2nU4v15YK3NC2BgYGQqPx1CiFh4fDZrOhoaEBn3/+OZqamnDNNdcgOTkZ5eXlsFqtEmyqWQk65m63G2fPnsWAAQOg0+m8+kMmJiaKcjCvOTIy0ivoVulcFKPIyMiQjJcagGVkZMDpdOLAgQM4d+4cHA4HJk6ciL179+LChQuYN28ennrqKWg0Grzyyiv47W9/iyeeeEICGJvNBrvdjoiICISEhODUqVN48sknERwcLHYC8AB4u3btwqhRo/Dee+9hqblH+QAAIABJREFUzZo1ADyMg4yMDLjdbqSkpGD48OHf08ryDJWBoZYD0fEnwMxzVO3pdyVGVnt7u9AHJ0+ejKysLNx6660ijsizgn/DTDPXem+sLKPRKKws2hG1vILMLJ6pZrNZ2E50usnMYtDA1iNkZrFejgFqR0eH3P93ZWeFhYUJm2ratGkwmUyorq5GRkYGOjo6BFyhD6YytFhTDVxiaHEO+JrGxkb069cPS5cuhclkwtq1a1FbWysBOJ/pv8LSUh1zFYzizwjKnzp1Cg6HA9HR0Zg8eTImTJjgxdRqbGzEwYMHcejQITQ0NKCkpASNjY3w8/NDZGSk3AezX2rWmPWm38bWCgwMRH19vdg1lhaoQ6fTobS0VMoQAE9dKjOAgCcgo5/J1lKs46efpQYtvoA1bQ1tFZ8jmXVGoxEjR47ExYsX5fXqHPgC2QzEfWnT3COsW9RoPKVsn376KVwuj2hRamoqMjMzkZqaipUrV+LNN99ESEgI8vPzMWDAAAAehhx7+Q4cOFBa8PAz6uvrJeDt6OhAnz59kJ6ejmuuuQZdXV0YO3YstmzZIjotZHk5HA5hFTBDCXhik/Pnz8vaox9CQJI9agHPWWM0GtHS0iJZRYqcGgwGnD17FldddRVCQkLEnvIZ1tTUoLKyUvbRxo0b8dvf/tYrC057MHbsWOzfvx+RkZFoaWlBTU2NgFJ8lmrml0O1deq+8P0egNe18WyrqKjAn//8Z7z77rvo7OxEfHw8EhISEBERgaFDh+KLL75AcHCwtGf8LuMHk0FVa0S4Ocxms1BR1IwOHzo3MXBpU3Dzud1ucSTVonwVhebf+QasPBBU1ItOSv/+/eWzN23ahKCgIK++mBzMCHBB8PqDgoKkUXZ+fj6OHDkiSHpTUxPq6uqwd+9eQalJfSCypjpsKsfd7fauIeD/5O83NTXB4XBIYfemTZvw6aefwmw2S5H9bbfdhrlz54o0dlhYmGxwGg8+e6KJdOg6OzsxefJkpKWlCZIVHR2NrKwszJ49W54J54yGXEU6+d6cS9VJVAeNHXApM+J0OlFbW4tPPvkECxcuxJNPPomjR4+ivb0dsbGxePrpp7Fu3Tps2bIFEydOlGCktLTUC+Qgmsngk9fGZ9DV1SUKhN8GcvzQhy+irtY7Dxw40GstqYeNb6AKXKob1Wq1mDJlCubMmYOenh4sXboUYWFhIihWUlKCq6++GjabTWhkGo2H9uLv74/XXntN6qOmTZuG2267DSdPnvQCLHyzuPx8Gunt27dj48aNaGhowKOPPiry/VwzKnpcXl6OgIAAdHZ2IiMjAx9//DE2btyImpoayWrk5eVJ38nCwkKpY5w4cSIqKyvR0NAg9Peuri6MHj0aL774IsaMGYOqqirodDqcOXMGBw8ehFbraffwn+E9WA9vt9tRV1cne7+3/R8UFITS0lJhj6hBqBp0AJdsJBkBKko8Z84cxMbGQqPx0PKys7PFgeJrgMtVPOlwUrUwJCRE+uDV1dUhISEBhYWFSEtLk6wI9wYAr5YTankKcKm/4qhRozBp0iQJXk6ePHmZE0lABfCcO2azGbm5uRg3bhwaGxsxY8YMyfyyPpIMkoKCAowaNQput0cdW2UoqMwkACKiRLoYM0LqecZSDgbLe/bsQUhICM6fP49nnnkGgAesuueee6TFx/c5mGWIjY1FTEwMoqKiZH4peMf5JSOrsbHxWxlZKp28b9++EjCpGXMOjUbjBXpfiZWl0Wi8WFmBgYGiuKvT6YSZZbFY5HmTmUVmGJlZ586dE1upMrN4bbx+38wur/FfYWfx/cjOcjgcCAsLw4wZM3DrrbfC7fYoWFdVVcFoNHoxtEi95/729/eXdUWG1jXXXCMsLT8/P/ziF7/A2bNnERMT4+Uj/DOWFveNWm/H3/FrZoc4N/X19fjb3/52GVMrODhYmFpvvfWWMLXYq5RlZJxft9tTY8v6en7uldhatAlUIW9tbb0s81RXV4f4+Hj4+fkhODhYBBwBjy1taWlBe3u7MIqamprQ1NSE+vp6Waf0i/nZ9ONU/1sN8PlcuU8I1pHl6Asoci/wmfv6dnxvBjnz5s2T+mMG8rRBPT09eOGFF7B582YMHjxYwMmMjAwR+bRYLAgPDxf2hno/FPTidbB3KoHrfv36CTA9adIkCeiio6Ph7+/v1RuW4lNkOKhnDYHvnp4eZGVleWUjbTYbVq9ejalTp8JkMqFPnz6yzx566CG4XC4MGDAAWq0Wn376KTZu3IjGxkasX78epaWlsh7UoJRZaMBjpwcNGoSWlhY4nU45G9rb21FaWio2iD4sS17UPa/Oi2q7uAZUP1H1pbheeF5VVFTg8OHD2LJlC9566y28+OKLWLNmDX75y19eZmv+2fjBBKjqIUljoqaXiWhQcRC41H+MwRnpEKoBACC1Q9ws/yzVrNY68G8YULW2tsoC8ff396pNII2FRrO5uVkWBRcA1SnZ/NdkMqG2tlbqpbq6ulBeXo6tW7diw4YNqK+vl8BBRf94mJLCys/2DZzodMXGxmL8+PHC+edhAXiC9vXr10ttVWpqKubPny+y+HwfBsl0xlVVsnfeeQfR0dE4fPgwuru70adPH0RERGDkyJEYNWoUZs2aJZLvRE/dbreXkrLvPzULAngHR+q9qq/noV9ZWYkPPvgAS5YswX333YfMzEzU1dVJPdZLL72E119/HS+88AKMRqNXk2hmvtXDi8aypaXFq/6H48cWrKqHBR0F1idPmzZNRIv4OhonNfPKQ4ftXwCPw1JcXIzCwkIkJCTgwoULiI6OlrqJCRMmYOTIkcjOzsYHH3yAl156Cdu3b8c333yD+fPny6FB53fo0KFezp0KGKjUNeBSJre2thZmsxl/+ctfvEoCVIpYYGAg3nzzTREt+Oyzz9C/f3/s2bMHU6ZMwdGjR8V5B4Df//73eOyxx9DV1YXdu3dj9uzZaG9vR2FhIXJycvDaa69h79692L17txyKlZWVKC8vR0hICJqamrBjxw6sWrXq+5zmH8VYtmwZVq1ahdWrV+OZZ57B888/j5dffhlr1qzBzTffjJSUFBGX69Onj2SSfGtk6NCpNGo1+ODaMhqNOHDgAEaMGIExY8bg4Ycfxg033IDz589Ljai65znU7xmo+vv7iz3fvn07LBYL6urqkJqaitLSUgCXgjyNxkPvGjp0KEwmExISEsQeqmquJpMJ5eXl2L9/PwBPqy3Au82W2WwWZ4jXRsV1wBOAvP3223jzzTeh1+vx4YcfYt68eYiLi8Onn37q9Yzq6+tx//33S9DscrlEbVqj8VAJ7777bsycORNRUVGIiopCZWWlOIV0KAHg+PHjOH78OHp6ejBjxgx0dnYKYn/gwAE4HA7pG/h9jnPnzqGgoAAXLlxAcXGxCNKQyseMi1rCYTabxdEjI4uaBur6UIPMKw2uQ77W7fZmZbFGsU+fPti7dy+CgoIQEBCAiIgIAV9MJhP8/PxgsVgE1CErKTc3F7m5uaivr0doaCiMRqO0xSMzi463RnOprRHpiL7jn7Gz6MzqdJ5WICdOnMCHH36IzZs3o6ysDC6XR8sgMDAQ99xzj4gqssfquHHj4O/vLzRW6h7wc7q6unDHHXfIudPc3IykpCTYbDb84he/wJ133omrr75awElmEslwUfeqCkD7Zg57Gyq1kd9nZmbiqaeewoIFC7BmzRocPHhQavFeffVVbN68GZs3b8ajjz6KIUOGwGQyoaWlRfw09izmWiIQxPVgMpnkWdJWNTc3e7G1OGJiYtDa2iqtYurq6uDn54f4+HjplRsfHw+73Y7W1lZR821qahIGIs9UMv7oF/Oz1ftX514dpDATtE9NTRWFZ9VX5eAaV4FEp9Mp4IQqbkjGI5MvDocDX3zxBT755BMUFhYiNDRU+g0DHsVk6oI4nU4JoOkTUAQU8NBQCZDo9Xr069dPhOzoe5jNZkyYMAEAEB8fL4B6QEAA2traUFFRIfuXe6GkpERs4fbt2+WMSktLg8FgwIsvvgiNRoNTp04hICAANTU1iI2NRXd3N7KysnDkyBFhOvD6yfhTabtmsxkLFiyQtdnW1oa1a9fC4XDAbDbj2LFjOHPmjKinm0wmrFmzBtdee62IotHOt7e3y77jfPcWuKoJMd+9o2ZffX/GuVRLDr7L+MEEqHwQPIRJ02LQQQPPFDnrZWjMVOSGD5j0D1/VMeDKxokOt1or4HK5JKPJBsdqhpUpfU4KZfipTqdmbXktVAxTvzabzTh58qRXj7b8/HzodDpkZ2fj73//uwTpvBebzXaZXHh2drZXQOvn54dx48YhOTkZU6dOxdy5czFq1CipnyBF4auvvsJnn32GdevWYdu2bXJ4cfGVlJSIs9/Z2YkvvvgCLpcLX331FdLT0+FwOBAQEICioiL88Y9/RF5enog9EI0aMGAAysrKsHz5cnE8aSxZe+UrQ9/bUDcEn4WK6ACQoL29vR1vvfUWVq1ahYULF2Lx4sVYuXIl9uzZg9TUVNx4442YP38+br/9dixevBhTpkyRHrM2m81LzZJ9n1R08ccWnHLQ8KhO/sWLF2Gz2WRd+GZMVYCEzgMPE2ZSrrrqKowZMwYPPPAAEhISkJWVBZvNhu3btyMgIACnT5+Gv78/hg8fjgcffBBJSUkoKyvD0aNHkZ6eLkHEli1bYLVakZSUhPvvvx/l5eXIycnxum4Vleb1ud0e8RwyE4xGI3JycuQQT0hIwMWLFzFs2DC43W7s2LEDOTk5mDRpEoqLi0WE7ezZs9i6dSuioqJQW1uLIUOGICUlBcXFxdBoPD3oBgwYgO3bt0Oj8dCMr776algsFrzzzjv49a9/DaPRKDXQZ86c8crI/Wd4hho8cj7ZjuSqq67C3Xffjccffxzjx4/HkCFDBBFW1yTttVqTyfdmZpKfo5aD8G+HDRuG5cuX45e//CUKCgpw6tSpXve173qj7QQ89mHz5s3o6uqC1WpFQUGBsE3oeMbGxiIxMRF2ux0jRoxAfn6+2Meqqipxdo4dO4aSkhIJlDQajThMhYWFcnYQnKGTwR6PZWVlIlDmcDhQWlqKjo4OzJ8/HzabTQJhl8uFt956S0A3t9uNP/zhD1LXxjNuw4YNGDp0qGQI2cPT7XZ79eakg5eXl4fCwkJkZGRg9erVMBgMuOmmm3DHHXfg448//resoysNOqx+fn4CJvhSErlO1MCGATttvMrIYoaGr/Md6vvzGv4ZK8vtdgsrSwVx1fcim4yBC30g9iQlM6uwsBCZmZkALjGzPvroIxQXF4tfBEA6BPjup3+FnUXwm+wsnuUZGRn44IMPBCikAqnK0Kqurr6MoVVXV+fF0MrNzZU1SrG+wYMHi5McEBDwnVha/FoFXdXf+zrdTJoAlxIoWq1WmFpLliwRplZZWRmMRiNGjx6Np59+Glu2bMGvfvUrJCcnyz1mZ2f/U7YWAxzSr1k7qa6liooKeS7MDEZGRkr7wKioKDQ2NkrXhujoaOn/zXmjPdJoNNIjntn13NxcOUM51MCe3/MZWq1WmEwmdHZ2IjY2FoMHD/ZSSOc+6c1no14Dr8vt9gg/UbVXLSUrKSlBXV0dXn31VfEVz58/71WnTHsYHx8v90fBOV5zv379UFJSInMaGhoqonYMzLnHb7jhBowfPx719fVwuTx1wxaLRRgGTU1N8r4q84RACWvxCfgYDAZs27YNFosFZ86cQVVVFTo6OjBz5ky4XB7l6ejoaHlW/Bu1tOjrr7/Gq6++ivb2dphMJqSlpUlrorvuukuCWybs7rzzTmi1WhQVFSEiIgIJCQnSfoigl8PhECVgdb+oQ2Uj+J6NvZ2Vqq/zv/WR/+UAVaPR6DQaTaZGo/mff3yfpNFo0jUaTaFGo/lMo9EY//Fzv398X/iP3yf+K++v3gCRYRXB4WZhAEJnWM2cqYcGcAnpo9FTD/TejDJfr2ZmVY48xVgYRPOziFDymn0PPl/DxwwsgycejOStUzKe6AMNWGdnJ7Zt24Yvv/wSZWVlIlSh1WplgbndbgwcOFD6d3LzMEvLDRQWFoYRI0Z4UbYYzJM+wkbHPJy44HgoFhQUIDAwEF999RU0Gg3y8vJwww03YO3atWJA2MOvb9++6O7uRmRkJBITExESEoLs7GyEhIQgICBAercy46xmVlVqHp+hulb4/FRqla/ysVqL/H/Ye/PwqMqzf/xzZksyk2Qm+x6SkIQACURkB0FBFlHEhVKplSq+WittsVW72Fhb21qrXq9VX/dq1aqtLLZKLQqy7wkBwpKELSSB7Hsm62zn98f4uXlmDC7tW6++3x/3deXKZDLnzDnPuZ/nuZfP/bm5Gb799tu444478POf/xylpaWyKKempuL6669HUVERbr31VmRlZQE4X+eg6iXHJDiC9J8uNJZUA54Qsba2Nqm3UKEcQGD2Wq2dAs6TXj3yyCMYO3YsnnnmGdhsNlRWVsJg8Nd8Hj16FKNHj8batWsxMDCAvr4+rF27FtHR0bjlllswadIkucalS5di+fLlmDNnDkwmEx577DG8+OKL0pdSvbahAlTUJZfLhfT0dFkvdF1Heno6BgYGcPbsWXg8HqmHHT58OB5//HEkJSWhu7tb+m16PB4xCJOTk/HOO+9gxowZMJvNuOKKK7B27Vq43W4hC2MkOyIiAunp6UhISEBHR8dX3mLj/4JwbVYdTdX45/+amppQWFgo/RdZYxx8LnXN5/lUA1ANQnIecP2LiorCT3/6U7jdbuzbtw8VFRUAzgdkVEONf/N4GvOapuG1115DU1MTAH+Pv/LycjGAjhw5gk2bNsHhcKCsrAxGoxGzZs3Czp075bu8Xi9sNhs+/vhjXH311QgJCUFubi4AiHGqknMBfmKwVatWYdSoUThy5AhGjx6NsLAwvPTSS+jo6BBOh6uuugobN27EsmXLxDA0m80CneQ6OG3aNJlLXE+zsrIQFxeHsLAw9Pf3IyQkBPHx8QFws7179+L9998H4N+3y8rKpBUPGYe/SlHLfNS1gvcNBNYmcr/4LEQWAxy0BYLJz4Yy8Di2F0JlARBUFq+V1w5AkFmElqvILNX5ZnkRjyEyiwHw1157DR9//DGMRqPYH/8MOotjSHQW66QJR+bYfvDBB9ixY0cAQmv69OnCIcLzMFtEhNbevXsFpdXW1oa+vj5MmDABRqMR6enpiIqK+kIoLZfLFaB3wfcRHExQn5d638FILYPBIEitoqIiLFu2DL/61a9w8OBBWCwWjB07Ft/97nfx8ssv49lnn8WoUaM+F63F9YuOGYNfqlit1oAAbHZ2Nvr7+9HV1YWIiAiB9XZ3dyMlJQXnzp0Ttlqz2Qyn04mOjg5ZG6m7RHkMDg5KL9LgbNqFAvKapglJnIpMGCqITB2jjgd3jHC73bjxxhuxbNky8QUI/SZCidd+5MgRbNiwASkpKaJf1A+2/CJpJ49JSkpCR0cHdN0PQff5fAGoDgY5qL9er5+LY+bMmZJdzcrKQmRkZADvDcnF+Aw3bNggDjfnxdq1a0WHsrOzpa40OztbzvPDH/4QixcvlvaPnCdk8d69eze6u7vhcDjQ3t6OkpISuFwulJSUSKcJrnc9PT1Yv349AGD27Nmor68P6G7R2toqHAZEmrpcLvT29n7Knwp+dnzuqk6odpg6h4ZaD7+IfBmreiWACuXv3wF4Utf1bAAdAG7/5P3bAXR88v6Tn3zuc4UQDRXGpCr2UJNFhZ+oUXj+H0DA38ERoAs5FTRi1MF3Op1oaGgQo5hGBCMXpFTXNE0mAyOBwdnW7u5uOJ1OiXpzolosFmEo6+zsRG9vrzi8arSPRs67776LkydPyn1ToWgw8Tu9Xi86OzsDjEAabImJibLYTp48WRhXWU/C79Q0TSAUHKPBwUG8++674gx/+OGHyMrKkvvnM6qrq5NIFSPHbrcbCxYsQFpaGnJzc+HxeHD27FlcddVVYiyyXqKvr++Chr1qJKqZ16GiPOoxHBfAH5F89dVXcdttt+HOO+/E2rVrhW0zPDwct912G55//nk899xz+M53viOkB3R6+fz+Lwl1Jjjz1NzcLDV+KiyKc0WFXXOM1Tno9XqxYsUKdHV1SfR3ypQpqKurQ0hICMaPHw+n04k777xTYIxxcXFYunSpEICozgXnojqXioqKcM8996CoqEj0DDjvzKjzRYXWMarIep24uDgAwPvvvw+Hw4Ff/epXsFgsaGpqklYbBw8exLPPPguj0Yhf/vKXWLFiBVpaWlBTU4NZs2ahv78f+fn5qK6ulnEitb3L5cK9996L8vJybNiwAY2NjXjmmWe+0uf8f0XUgB+flZrZ0DQ/kyxLPRioVOc4M+/AeXZWrv/UH9UJVplFm5qa5DvPnTuHq666CosXL8a0adMCjP1g55RzX12HGME2mUz4y1/+grS0NHF0dV1HeXk52tvbRRcNBoO02uC1jR07Fi6XCydPnoTdbsfx48dlL1DbgHCfYhCzp6cHzc3NqK2tRUJCAl577TUh41i7dq04ocXFxZLB0jQNa9euxR133AHAn71evXo15syZI/fe19eHiooK3HjjjfI82tvb4XA4sHDhQrz11luy3jOwGh4eLrDDgwcPinPy8MMPfwUadV64ThOSp+u61L7RiVH1SIWiqeu6ul4G7y+fFaBU911VX5hh2blzZ4A+0wEyGo1Sa2Yw+PuSqi3gVEOR10X4sMqfYDKZsGvXLsTExMDlciEkJETIhkwmEyoqKgIC97wvwgNVx4THUW8MBgPmzZuHnJwcXHbZZZgyZQqysrJgMJxvX2I2m9He3o5Vq1bhT3/6E0pLSwUequ4j5BGgg9rW1iaQ+9dffx3FxcWYOXMmjh8/jmeeeQYff/wx3G632A/MBiUmJqKyshITJ06UOcJgVm9v7wURWrwWXo8aKFNFDVCr+qXrOmpra/HCCy/g5ptvxiOPPIIjR44ISdt1112H66+/HrfddhtmzpyJ4cOHC9yWMFk1GEymWjU4AgA2mw2dnZ1wu93IyMiQDhbcK+mApqamor6+HrGxsRJc7ezslKwidZHfR/2ks1JVVSV941VbPNiWpnDsIiIikJSUBKfTKQRHtNVVhAudURWBwu9gIsjr9SItLQ1GoxHXXnstpk6dKkRGANDR0YGIiAjExsZKllPTNGm/YzAYEBcXF7AX0CGPjIxETU0NfD6foBgBf9sgrvWcC6NGjZL5cOWVV+Ls2bNoaWmR9xnU4z0ODAxg79690HVd+p/a7XZh7GXm2OfzSfuw2tpa9PX1YcOGDRgzZgyio6OlP6qmaYiNjZVkDgDpR8z3/va3v8Hn80kQh2sP7zcvLw+6rmPPnj3YvHkzenp6EBcXh4aGBqxcuRL5+flyH/Rl+vv70dPTE0Ayy7mh2l7B84PPkJ+nfnxZ+UIUpJqmpQK4GsBvAPxQ83/TLADf+OQjrwP4BYDnASz65DUArAHwP5qmafqFvIVPhNkNKgb7LDGKqRqenDAqwURwpEb1/tVFcKhoTrCoyknjm84klYtOGNlteT5d1wOMJF4fWcS4ybEWta+vTzKmZrNZavUMBkNAZpYGlWrEccEhY6TaMiE4WsusU/B9x8fHSxE2iSQGBgZw7tw5gQZx7MmsR2IMo9GIrVu3ynewfUhGRgaOHTsm9793715cc801GDVqFMrKypCeng6j0YiRI0fi2WefxYIFCxAZGYlvfetbyMnJwa5duxAZGSkYfvapolMY3PSaEpyp/qxnrE4a9TkPDg5i06ZN2LBhAzTN395hxYoVUmt2+eWXY+7cuTAa/Ux6a9euhdPpvOD3/KfKwoULP/P9m2666V/+jqKiIgAQZt1/h8yZM+ffdu6jR4/K62PHjsnru+66S17z3m644YZPvUe5/vrr/12X+P+EXMgIVA1wZt/YB1I1dPhZQrDUHn3AecQHjXZ+jrVdO3bswIwZM8R4Z51+amoqUlJS8PTTT0s7qpEjR8r3UYL3FBrbNLD+/ve/CwzW5/OT8JBFlUGS4LW9sLBQ+jkzOj5ixIiAOjY6ptwf2tvbYTabMWbMGMmaVVVVwWAwYNSoUaiurhbDjyQ2l156Kfbv349jx45hwYIF0nORpDfqdf31r3/FiBEjxMDn2nnFFVfI2FIOHDiAyZMno7u7WyDH77zzDhYvXozf/va3X2mrGcJD1WtWrzW4PjE4Y8rj1KCraqRRdF0P2GPVoEtwQIO/W1tb0dTUJBkuNUtLXbFarbBarZKJYaBbvQeTyYT29vaAlhaE64WGhiIsLAx79uxBamqqoIhYH3zixAkcP34cdrsdl1xyCaKiouDxeBAbGysM19yHyVirEnSRtAfwlxzFxMRA1/1tlCorK6U3usHgh9bX1dXhD3/4g9gK1CcSCTHYrmkaSkpKMHPmTLS0tOAf//gH5s6di48//ljQWySmsVgsGDlyJHbt2oW0tDTExMTgsssuQ0dHh2QNq6urYbVaJavqdrsFSaHaC5+VTWUmbKh6PY4HpaqqCs8995wEwq6//nrMmjULMTExiI2NxfTp0zF79mz09/ejvLwchw8fxrlz5wICPSrMltLf3y8OKF+T6dfpdKKlpQWpqalobm4WQjfaykSnpaSkBHRNUCGwrEU1mUxCtjVy5Eh5PsGJIlUP1fOxFVFra6skSPh5BgwHBwflOdJhU89PojD2sLXb7QJF1TQNM2fOxIgRIxAaGioJJAYyWePJdlgDAwOwWCzCwZKZmYlDhw4B8MPFOa8zMzNRV1eHxsZGJCcnA/Azq5NIz2QyobCwUAgTyeDr9XrR2NiItLQ0ISrinJ43b56gSs6ePYsPPvgA4eHhQoIVFhaGW265Bdu3b8eePXswe/ZsOBwOSVYZDAYsX74cjzzyCJqbm2G329Hd3S17IfXSZDJh+/btcDgcAfvN888/jzvvvFNKOTRNw5kzZzBmzBi4hbRUAAAgAElEQVRkZWUhJiYGY8eORV9fH4xGI06fPi17a3t7u6wbLpcLkZGRASVgwfrAvUlFylwIpfB58kUzqL8H8CMAnH0xADp1XecVnAOQ8snrFABnP7lYD4CuTz7/uaIuTOpGHBxRoagscJ98H4DARs3B2S1GxVRjYKjr4ETlJsPIFiOUjEz4fD5MmjQJUVFRYvAwo8hznzt3TtLuAGTR4ILBhr26rmPy5MlwOp1oa2uD0WiUzcJoNApjKJ1kt9uN/v5+gRKqdahqVD9441QlIiICMTEx4vAxk5uVlYVLL70UaWlpAdHE9vZ2cf7VOmCeq6+vD7NmzRL4kNvtRkVFBSwWC3bv3o36+no4nU6YzWbExsZi3rx5srEOGzYMmqYhPz8fdrsdDocjAI5C4xGAQJ1Uxec1flFIQbBDS1HHrqqqCj/4wQ/w9a9/HU888QSqqqpET+Pj47FixQo8+OCDuP3224f6iotyUS7KF5DgtUl1TimEAXJPUAnzVMg6HbbgiDy/Q3VUdV3HtGnT0NTUJDVwsbGxmDVrlrQl+M53voOf/exnAPwkQKpzqCIxKOqeQ6hhf38/3njjDXR2dspaq2kaRo0aFeDkqEQlU6ZMkXthq7Hc3FwYDAbU1dXJms7fvA7uXx988IE4ByRu8Xq9AgU0Go2YOHGiXG9oaCiSkpJknzMajViyZInslRzTpUuXCotrS0sLnnvuOfh8PmRmZsp98ZwGg0GIm4gyUAM8X4Wo+z/3dbVeTDWyaajzGVNvaE+omQmVfVf9nyoqAkDNamqaH83x0UcfBZCW0DHt7OxEf3+/MPbyOrkP8pycCx0dHZKJ6+3tlSC6pvm5LTo7O+HxeNDV1SXPV90/fT4fnE4nduzYgXXr1qGiokLYSxkE0XUdw4cPh81mCwgQsXwIOJ/JNRqN0gveZDJhxIgRkg0ym83IyclBdnZ2QJsdlRmZxjaJlTRNk3ZghCeyHzHbzhDibLfbkZ6eDrvdjn379mH//v0yJ06cOCHwTXYb6O7uFpZU9bkN9Ry591NUp/ZC2STO53Xr1mHlypW45ZZb8NxzzwmnR2hoKEaPHo1ly5bh6aefRlFREWbMmIGwsDBx7NXv5HiQqJLBF4/Hg46ODuFpsVqtYjPpui6Q/Ojo6ABIurp2ABCU36lTp0TvKysr0dTUFIBkC173+Fk1Q20ynWcSpxOpziuXyyXrEsePOslEDm1lTdNQVVWF1tZWnDhxAgMDA3jkkUdkPSXRGbOrnZ2dAM7vJT09PfIdbJnF/7MuH4CUcRH2y/WgtbVVbF5mdU0mE4YPHy5jQHZcNWGk6zry8/NlDkVERKC8vFyg2gaDAVVVVZg4caJA9Yk2mzFjBpYvXy7weWa+meHk+YloLC8vF6QQddLtdgu/wZIlSwKemUqelpmZie7uboGGs9aW67fZbA7oR6zqvyrq2qmul5+FMrmQfO4RmqZdA6BZ1/XSL332zz7vnZqm7dc0bT9w3jnlQq2yRfHm1CgkDRR+hgaKahyojovqtAQ7wGoWFkCAMaPrOsrKygKUi45laGgoCgsLhSmUVO9qapsRDnWx4QJutVoRGRkJq9UqztmsWbNQWFiIuLg49Pb2SrE7Mei9vb2oq6sLgAdzExoqks/7MxqNUtAd9ByQnJws8El1QyVOnvfs8/mEhMNoPN+7ihvcXXfdhZ6eHsyYMUPgZvycpmlYs2YNiouLkZycLHVkuu6vsxg9ejS2bt0Kn8+HnJwcgVpGRUWhpaUF1157LaxWq8CAVEODsB3qhjpB1OcaLNQzdXNRoVZ8flwcKyoq8Nhjj+Hmm2/GypUrsWHDhoCI3EW5KBflyws3eq5XNE6AQAi/1WpFY2OjHKeSyXGe8niKujGqUEvgfIsRTdMEbqgG49RIv8lkkv6gp0+fFmcrOJIcvNbwWpgdeO+99xAaGioMi6NGjZKspaZpspd4vd6APnasUbryyiuh67r0SVX74DFz+49//ENYfs+dO4ennnoKV199NRYuXCj9EU0mk9TB0tj96KOP8L3vfU8QJa+//jrGjBkjtVzs+ZqWliaso263W7LaTz31VADcnyR5XIv37NmD6upqud6vSmhUqvs7RUVaBQcbVIck2JbgnqA6B58VFFVtD+457e3tck7ukeyfyh7lAARexyw8dZI6w5/BwUHJLBEFQH1qb2+XVijd3d3o6OgIqEMk8STvs7q6Glu2bBF7RkUlsEyHc87pdAZAn9Wf2NhYGI1GREdH4/LLL8dll12GrKwsKY9SnV+fzycZUSLITCYTHn74YTHCm5ubpYUSn5fBYBACIrax4fF33XUXFixYIM5yWFgY7rjjDmE6ttlsUnun8n+oGXfqRnCgQ3W0VNtL/VFFLU04fPgwHn30UXzjG9/Aj370I2zfvl3sqejoaNx4441Yt24d/vjHP2Ly5MkBxr3b7UZ8fLwEI8LDw9Ha2oqGhgbJRDqdTimXsVgskk0/d+6c8JcAEGdOTQwxewpAStEMBgNqamqwa9euT40Fj1WTIcEBf4fDAaPR33Jty5YtMv79/f2yHg41rg6HQ9qElZeXo6mpCRUVFcjPz0dYWJisw4Df2WJQw+v1CjMvr7e+vl50IysrS3SK6ys7F9BpbGhoCEA9dHV1CWLSbrdD0/wEU6dOnZJrYFkcHcadO3fKekrkQnJysrAfG41GdHZ24t1334XZbMa8efOwcOFCNDQ0YPbs2UJ81t3dLQzoapBEraN3uVxYvXq1fB9tYa4drF1Xn8vp06dhNPpJWHVdx4kTJ7Bjxw5UVVUhIiICNpsNDocjgKyKcHHWdqtoE84B+nLqe/+MfBGXdhqAazVNqwbwF/ihvU8BcGiaxpU+FUDdJ6/rAKQBwCf/twNoCz6prusv6bo+Xtf18cB5shk+xIyMjCEzosGF7qqDQeVWI56qgl0o2sXPB09OHkvmUNUgAYDCwkIMDAzIhGBdAHA+EnvmzBmYTKaAQvhggyk+Ph45OTkA/PANq9WKgoIC3H333Rg7dix6enqgaZrU6EVHRwucgDUhhPEww8vJx/EzGPzsc6pwPKOiooQJWR0XbnSJiYnyHgu7+cxo0LG3ZV9fH6KjoxERESG1pm1tbVLHN3r0aMTExOCDDz6Az+eTmiiHw4HS0lKEhIQgJSUF8fHxkomdM2cOLrnkEhQUFMBqtSIrK0siSAaDQQh9BgcHhb04mNBpKOOB4x+ccea9q8aKqmuapqGzsxOrV6/GTTfdhB/+8IfYtm3bp85/US7KRfliojqlatRVrT+OiYnBmTNnpA8dN3k1S0G0C0UNXgYjZ8LDw1FbW4sjR47gzJkzCAkJwbFjx3Dy5Ekx6FRntampCb29vfjZz36G7OxslJWVoby8HMD59T54n1HvTyWiMBqNeOmll3D48GHs2LFDSjjWr18ve9CGDRuwePFi6Lqf2IjM9pqmITU1VYK0Kss411y2uFi2bJm0mmhtbcVjjz2GyZMn45133sHu3bvh8/kwb948aJq/REN1kGmg/eIXvwBw3qChM8T2EM3NzXj88ccFAsayHDrhAMRxuPfeez+Vefl3C9d1Br+ZeVf3BRW9RV3kePIczFQP9ZxVZv2hhHqnZm6pu3QOgfM9gQFI0JoGP8uC1GA9yxAiIiIQGhqK2NhY2O12WCwWgU7y2hnMJWyStWy8dxqV/CFSi1ldFZVGx5WoNtWZUyUuLg4+n0/gl+Hh4cjPz8fUqVMxbtw4pKenBzj9ra2tYoeRtRQ4n4k8cOAACgoKROc5hsXFxTCZTEhMTER3dzdGjBghtkR7ezvKy8sRFxeHiRMnIiYmRhBvw4YNQ2RkpCQSyHTrdrslEx28dqgOqErGeCHd4281eE4dMpn8bdHeeecd3HHHHVi2bBleeeUV1NfXS7nCFVdcgQcffFDOmZaWhqamJikHa2hoQEtLC7q6upCSkiLPmGsDs+ZtbW0IDQ2VDCsAIdakTqp1ouq1kjfFaDSioqJC4K68Lz4nnifY/qaecc0OCwuTNjpWq1Vq8YODHFFRUSgpKQEAybb39fXh2WefhaZpAW2fBgYGYLfb0dLSIplDXpvqsAIIYO41Go1ISEhAXV2dnIusyMFO3tixY3Hq1Cl4vV5kZmZC13WBtwOQelNe76ZNm6DrOoqKiqBp/vrc06dPIy0tDQcOHEB3dzduvvlmdHR0BCTiWNMfGxsLi8WCcePGwWw24/7775fAYHh4OHw+H4YNGybETrx+7l/cO7u7u/Hcc8/B6/UK+Sjt+6qqKqxZswa6ruOJJ56Qnu8bN25ERUUFdF0XfpxDhw6JU09ova774dMMrlHUROA/K5/roOq6/lNd11N1Xc8AcBOAzbqu3wxgC4DFn3zsWwDe++T1+5/8jU/+v1n/Au4zF38yVoWGhuLSSy8VvLM6CbjgBkdCgfOQYOA84VIwa6AazVQj5qrjyO9gdJPMiYwWA/7oU1JSEqqrq1FTU4Pq6mphv2pubpY0vhqhBfzFylzgNU2T2idGUMn4GRISghkzZuBb3/qWjAkXm7CwMERFRcFmswkBwODgoFCN87W6GJ4+fRonT54MIPahEmuaJhGkoOcvsC86aYSdcRExGo248847ZZFwuVzIz8+Hpmno6OgQ+Bzgr+UzGAxClb5161bZ6FjsTVIDFsBzwk+YMEHqPUNCQoQpjpFHQiMYSVZb1lzISVU3DlUPVYNEFXXh5QbT0dGBv/zlL5+n4hflolyUIUR1RC/0w57StbW1st5xbVfXdzXjcaHv4evt27fDarVi4sSJOHfuHBoaGqR2KbiVC53CU6dO4ec//znS09ORkpKCsWPHBuwjqrGnZlXU1wAEJrlz507U1tZK3VhtbS0A/zrEDNK6devg9XqxceNGCbhxDVe/U9d16fd3++23Q9M0/O53v0NtbS2MRj/xzIwZM+Dz+bBx40Z861vfQkhICMaNGydOpwpT7enpQW1trRg+XM8rKipw7733CrEhs14A8Pjjj8ueSSb3qKgoFBYWSl37Vw3xDYaBcwxV3VH3BtWZ5LNXM+VqDbLBYJBMvmqIB4tqU3Cf554dHAix2Wy49NJLxZlgH1nVQfL5fKisrERiYqL0P1QNR8CfCeK+e//996OwsFDqSOls0PFtb29Hc3OzlOWoQd7IyEhxWIOdWI4nIX+qMKhOB5VzmYYx695UUkoVlkliKzr/Ho8HeXl54ogbjUaxx1paWmA2m/HMM89gx44d2LJlCwYHB2Gz2XDJJZdg/PjxSE5ORn19vXQToP7GxcVB0zT09vZKwJw673K5JItIW4K2m4qWu5CDrorq2KqQVtWmcrvd2L17Nx588EEsWbIEDz74INrb2wPOXVFRgb6+PiH56e3thdlsxrBhw9DS0oLw8HBxWGgHsSadNZrUZzrgfOZ8vqzbVAOEHBPypjDAFYweUFEJfJ/67fP5YLPZxCnt7e3Fpk2bxGlSjzca/TX6ZNpnwII9nvv7+/HrX/9adMzj8SAuLg5dXV3CEUOkgcFgCID4Go1G1NXVYXBwUHoJEwYLAMOHD5d7533k5OQgPj4eR44cga77YbsMlDCj6nK5BDZrNPrJRouKigLIGUNCQjBs2DCEh4cLcWh8fDwGBgaQkZEBg8EgveWHDRsGg8GAESNG4P3334fdbpe6aZvNhvnz5+OOO+6Q+/V6vZIl5TVQR5n8o49FveP9M2A1ODgoNnVnZye2bduG9vZ2xMXFITExEXfffTdGjRol7bC41thsNtEvZuZVHfhn5F/pjfFj+AmTTsFfY/rKJ++/AiDmk/d/COAnX+akLChnD6jhw4djypQpEulRM4RDbQR0KtSIEBDouAKBDq0aHeR7TJ2rC3V4eDhCQ0PR2dkpC2pubi5eeuklbNu2Dffff79sas3NzeI0cbKlpKRI1Ofb3/42Vq1ahfXr10vzahoY7FulwoF13d9v7p577sGkSZNkYqk9W0NCQtDX14empia0tbWhv78/oEGupvn7XG7ZsgUbNmzA5s2bhSk4Li4OlZWVAVANjpNaG8LFFYCQF8XFxSE+Pl4Mpr6+Ptx6660CDcrJyZEFkXDlvLw8/P3vf8f+/fvhdrvR1dWFvLw8rF69Gl6vV9gK4+PjUV1dLTCRwsJC2WQiIyPR29uL3/zmN7DZbEhNTRXGSt6zruuiS4yoUVQnlO/T8OP/VeHfXJAZmSOB00W5KBfly4saMb/QT319PcLDw1FXVyf1mLquy2YKfLrfobp2qpBhGk/Dhg2Dx+NBTU0NcnNzERkZiVOnTiEzM1P2C9Xp6OjowIwZM/Cb3/wGTU1NyMvLQ0xMDGJiYuDz+YRIS107gcAG6GoJAY0tm82GVatWoaGhQcYD8DNHbtiwAWfOnAHgh13SkdQ0DWfPnpX1TS1rYbaErWIGBwfxyiuv4Jvf/CYKCgrQ3t6OFStWYOXKlXjllVfg9XqFNOXJJ5/EAw88INf49ttvQ9M0pKWlSUaNpE9qtvHMmTN49NFHkZOTE5AJOHz4sARaFyxYgLCwMCGc+qok+FmqaCa+r0J5eV8qL4O6F6hBDv4/OKOqZpfUH763bds26LqO0NBQaS+n6zoKCgowceJEdHR0oKenR3pCAgiAtVZVVQWwjao9US0Wi7T8WLhwoTjCs2bNQkhICObNmyf6zD3b4XAgMjISLpcLLS0twt7Kc5MUSQ3uq4EZwpWDhcczQ6k+E2Z1iQjTdV3Ifuj4cz82Go2YN2+eOFxTpkwRwh8GEKqqqlBdXY0xY8bghhtuQElJCdxuN5KSkgSSzAzXuHHjMHr0aOTn5yMqKgpmsxl33303rrvuOkRGRiI2NlbGMCQkBKGhofB4PHA6nTLfVP1XxyVYVJuKOqIGuXn9KoqLwe+6ujr8+Mc/DiAtZD1nVVWVcJeEh4ejpaVF7o+tziiEOEdHR8Nms4ktxO/iGkX7lQ4zALEhBwYG0N/fj8mTJ2Pu3LmYP3++kEeqdlPwHFDXPfX5OxwOhIWFweVyoa6uDhs3bvxUsigyMlLYxElW+fzzz2Px4sWwWCxYtGgREhMTpWSOLa94fuon60pVSDtL0Gpra4XfQB2vkJAQmZeAHz1YU1MjPgpbfNXU1ODMmTMydnV1dQFJD/LGsO5b1/3IyokTJ2LKlCloamrC1KlT8be//U3OOW3aNCEsoiO5f/9+SbqoDj8zphSyCvP567qfS6Wrqws7duyAzWaTtns8d1tbG55//nkYDAY8+uijEvzlM2NJ3rx586R2m33lExMThV1f0zR0dXXJ2LHd3j8rX8pB1XV9q67r13zyukrX9Ym6rmfruv41XdcHP3l/4JO/sz/5f9UXOTcXL/bFNBgMskD09/cLnTgjCxwMNZIMfNoRDY5qqlkxdVFRJ49aD2UymaSeh4PNou3ExERERUWhs7MTVVVVmDRpEh588EGJNvEh3XfffSgsLERjYyOMRiN+8IMf4OWXX0Z7eztaWloC6O8J8WX2kz+Ea+i6nyjjv/7rv7BkyRJomh96Rpiv1WoVmFBPTw/a2toE5sMNTnXG9+7di02bNqGrqwsdHR0SzVTrcH0+H6Kjoz9VW8AIEZkaKU6nU6A7DCzw+jRNQ01NDZKSkrBr1y5YLBaUlpYiMjIS0dHROHnypNTPWCwWREdHIyEhQWo9CwoKkJCQIBN09uzZEnkiaVR+fr40IiYMgQYVNzgVvqLqxlBZUwqPUTP3wRvPRbkoF+XLiWqUBGeT+Lq2thYZGRlob28PMHzUujnVaA4+v+qYcg378MMPUVJSgqioKCQnJ6O4uBhRUVGIi4tDdnZ2QD9Jt9uNiIgIycLNnTsXCxcuDCCyGD16NE6fPi3rsZpVoVGhBk8pPT090j/QYDDglVdegclkwuzZs1FTUyN7XkxMDAwGg0TZo6KiAgJsvD+bzYYPP/wQcXFxaGxslNo9n89PvvPCCy9gzpw5GDNmjEAJhw0bJtfJMWTN3JkzZ7BixYqA62eQMSYmRjKKXD+XLl0qWRDe61/+8hdomoa5c+cKLO+rkuzsbBQWFiInJweapomzoRrm3HNUJxVAgOM6VMABCMyeDeWsqJlE6jPrP+n0BDsnx44dw/Hjx1FSUoKKigrZu5uamlBTUyMERjQyjUYj8vLyxPagftDgbW9vF7tpxIgRWLFiBe677z5BeJHrwmw2ixND2B4Dvj09PXA6nRIUUlEDhw8fxp49e8SGUPfE7OxsWCwWtLa2DrlP5uXliU3CvqtqplvT/NDI+fPnC7cF25JpmobW1lZUVlbitddeg8ViwYEDB+D1evHxxx9L2RON5EsvvVQYZnt6esQpiY6ORnJyMmJjY9He3i79fcPCwtDb24umpiboui4wa7IkswUHdWOopIm69gz1nqpvql4F26SUkJAQ6d8JQIg07XY7zGaz2I9utxvNzc3o7e2VvvLMdHOeE76tznkKdYdIP85vzp+SkhJs27YNu3fvxr59+1BTU/MpxEHwD0XNIJOHhQkVJjLoeHd3d0PTNCHc2rZtm3AGZGZmYtiwYTAajYiJiRH22oGBgYAWPSwxUOelrutITEzE6dOnJSBAB5afra6ulvsOCQnB8ePHERISImVlvb29sFgskhjx+XzCts2Mqgr/JyHa0aNHERISgs7OTmzfvh1msxnHjx+X/cxkMuHtt98G4HeATSYTxo4dK+sU4N+TiouLoev+Vn68L5YxuFwuGSc669u3b4emaYiJiZEMMPdX9nwnOaDX65VARl9fHzo7O3Hw4EEAwIwZM7B7925BXVqtVmRnZyMhIQHx8fESYKLvROblLyv/Sgb1f1WowNxwVUfT4XCIU1hQUIAJEyZIFFRl3VPPpXrtjAaoDp/6WW5U/D5101cjqZGRkdLvlA3IV69ejbKyMlRVVaGjo0MW7zfeeENYuR5//HE89NBDcs6Ojg5kZ2ejpKQEGzZsgM1mE4gDFYLXzOsjQQJw3oF2OBzIzMzEmDFjZBNMSUkRiIfNZpO0O9vVkACARgaNCzWyy8WBjqqm+eFtKlRKhXEYjUbB/QMIIOIgXp6bDAAUFBQAON9wvrGxUTbPhIQENDU1YcuWLWK4ZWRkiCGVmpqKrKwsAH7Ds66uDhEREWJsEa5ks9lgMBhw+vRp+SxrgTnhgpt3A+eNjwttNOqPSrzxr8AYLspF+f+z0LkKzjapzmpbWxumTp2KwcFBJCQkBDCWA+cdgws5C1yrVCK4iRMn4uqrr8bGjRulbcOECRNgs9kCHIA9e/agvr4ex48fl+siPLCxsVFIPK655ho8+uijeOihh9DT0yP9n9X1nNfCNUTNLIWHhwuJzZNPPokXXngBUVFRQiRyyy234NChQwEtj2hYMQtlMBjEKM3NzZV1nH2IExMTsWjRIulDPW/ePLz++uvS7oKtaiZNmgSfz08MsmrVKsnacr3evXs3Fi5cCJ/Pzzw7depU6LqO06dP4/rrr8ctt9yCG2+8EXfffbcQ63V3d2Pp0qXCCvlVidPpRF9fH2w2GyZMmCD9JlWDXIWMq6IafcB5Z0Kt7VQNbz5XNXgOBNZ58rz9/f2IjIxEfn6+EGXV1dWhoqICKSkp+O1vf4ucnBwZe6PRiMbGRglYqL/Dw8MREhKC9evX44033sCaNWvw8ssvS1CY8Ea2tqCwRcaYMWMwYcIEDAwMCGyWnQG417W2tgp6gQQ9dFQ1zQ+RPXDgANavXy99fw0GA+Lj4wFAWqgEjy8JxDh+qk1HJ/SOO+4QpER3dzfsdjuio6MFYkldJ3qAdYVOpxNr1qwRIhtd1yWzGh8fL+dpa2tDSUkJ+vr6MHv2bJjNZjQ2NiI0NFR6FD/yyCOIjY2VuW+xWASBxmwRM7rB9kCwExqsH0ONy1DHcUxZIhAeHi4Qb9a3O51OeL1e9PT0BCQ2AL99w5IqZrf5fXQkCJdVodd0WAB/3/CPPvoIzc3NEvDneO3atQv19fVyfvVehhLV1o+KihLo77Zt2/CPf/xDsvNkalYDAUajEaWlpaI/RCgajX5C0LS0NHR1dQHw18AHj29UVBSGDx8umcakpCS5HpfLJW26+Hk6pCpcNyEhAV/72tdw9dVXy/iw1ROh0/RVOI4sQ+vu7kZ5eTlsNpsgFc1mszjTRArOmzcPBoMBmZmZWLduHR566CEZUyInrr322gAHPCoqCunp6bjnnnsk2KJm6puammAwGMQZJX/AqlWroGkabrrpJrnGrq4uuN1uHDhwQHyCjIwMNDQ0YNOmTXjvvfdw7NgxKQ+wWq1wuVxob28Xh58MwF9W/mMcVG4QTqdTlAw4H53kDbKX0bRp08QJVTeW4Ai66vh+3nerRhIj3h6PB6NHj5ZoMBcjnpMkG9XV1Th+/LhEtauqqjB69Gj8z//8D2w2G26//XbccMMN0HU/EzCVvLe3V6J0LDimMjOSyGOo4KqkpaWhra0NERER0HU/u+Pg4CBSU1PlWtjzj5H0jo4OWYhUkgaj0V+AXlxcLAXSzBhQwbmBqGOvaRr2798fEAFuamqSfqmsy+UYjx8/HrquIyMjQ+pIeO6cnBxs27YNnZ2dkrV1OBzYtGmTBBlUGLfH40F3dzdyc3Mxffp0tLa24uzZswgPD4fH44HNZkN9fT1ycnKkMb3JZJLIs9vtFoY06gJwvg7iQotrsM5dlItyUf45UYM8wT8UQgG57vp8PjGegrMTwedUf/O1wWDA5MmTsXXrVtTV1SEjI0PQG6yfNBj8xHLx8fHSlkCtESW09cCBAygvL0d8fLw4pT/+8Y9x4403oq2t7VPZg+CMgpqZI+yX6/zAwICskT09PdiyZQt+97vfyfGsFVLHjMe+/vrrMBr9DKpdXV3CP/Dkk0+KM1RZWYkzZ86Io6lpfogW2YKB8/tjWFiYZDn27t0LTdNw9OhRhIWFwWazYfTo0fjwww+haX721vT0dDidToF4sg+gWp/0VQgDk8yQci/k2Kt7ioO1DeQAACAASURBVJqp4r0Plfmh0cnjg/flYP1Vs898bszCnDp1SsipUlNTsWjRItx1112YOnUqHnjgAcycOTOgJInoIgZ+ua+FhITgb3/7m2TjDx8+LPdIm4WQSsCvh3Rg3W43pk2bhpkzZ8JsNovjwdo2QiidTqe0mlPHj0Lbpa6uDsXFxcImS4cpeDyDkwLq+PE393G+R5uFWSG17Iu2otvtRlRUFLZt24aTJ0+iqalJHPXq6moYDAbJsLndbowcORJnzpxBaGio1Nexfyp1SNd1pKSkIDo6GvHx8VLTp9ZpMrMarAvB4zNUJjlYVJ1SX1ssFkRFRSE+Ph5er1daEZEMh+sSuxsMDAyIjUeuD+onnQY6+nQsCUXl9QZne1V72efzCSOzxWLBuXPnpIWO+oyDRf2/6szGxsYKyc+6devEGeSazOBQZGQk6uvrkZSUBE3zE20yQ9zd3Q2HwyFOEu1jomKMRqPAuDn/ExIS5LvoWKk97nm81+uV8orU1FRpceRwOGSchgpGAufh2YTpZ2RkyJwbNmwYamtrERUVJSiL0tJSCTCEhoairKwMVqsV+fn5APwlkcXFxVKvrevnSxLnzp0rKEfa8SSw43NUs+OnT5/GoUOH4PP5MHbsWIEyBwd8mTUln4Gu65g5cyZuuukm9Pb2ShujsLAwlJWVfSZR6efJf4yDSoeAG4TqHDCqQdiv0egn0XE6nVK4y88yu0dRByV4E+H/h4qkAufppvPy8sRhZcsBRiIiIyORmpoqD3/nzp3wer247LLLcNNNN8FmsyExMRG9vb1obW3FwMAADAaDMOxpmoa4uDi0tbUhLCxMYAJ2u102csJ1uBCohkNMTAycTieio6NhMBgwb9483HTTTThz5gza2tqQlJQkjp4aperu7kZjY2PAvRNyxE2nvr4ef//737F582aBwnDCsQ8fN7eDBw8GQHtOnjwpvfLUqOLixYulhiMtLU3umRj38PBwHD58GJqmiSM9MDCAyspKHDx4UCDFzAQbDAZ89NFHcLvdmDJliixidXV1MJlMiI+Pxze/+U3MnDkTmqZh5MiRSEhIEIOLEG7Abzgx0syABMc6eJEdypi5KBflonx5ocHANV59rWZQ2WQcwKfIkYKdQL7H89OxDd5wFy1ahNGjR2PlypWYO3cumpubsXv3bgD+/eLAgQNoaWnBqVOnsHXrVqnVVA229PR0Ccax1yM3+fvvvx8HDhwIYJAEArPG/C7eO51FdT988cUX8etf/xoej0f4DXgcx4PGB99PSUkRw3lgYACvv/46bDYb8vPz8fvf/x4ZGRlYt24dzpw5A7PZjPb2dlRVVeGpp56CyWTCiRMncOLECezZswdr1qxBUVER2tvb0draCk3zwwNXr16NkSNHoqWlBcePHxfkzOWXX466ujrJApSXl0vbnJ///Of/C1rzxcVg8DO9k3CxtbUVdrtdSoZoqDFgO5RjSRnKJlH1iaIa3UBg67oLlQ15vV4pGwIgJIu/+MUvxDhmEMHj8QSUDrlcroDSIYvFgi1btgSUDmmaH9rHelFN02SPpkPyZcqHWltbJQBDvgn1nvv6+rB3715s2LBBal7p0HGMqO9DlRBxPrhcLpSWlgYEdFwuF2bNmiXZLhrmQ5UR0fbq7e2F3W6XQBQD1SwlYo9UwhRVBl+WEs2dO1dKiQhxp4HPbHZISEgAnFbVmy9aTvRZ9gVrDpmsYa9QJhm6uroQHx8vkNe4uDhERUVJAsjhcOBrX/saNE0TFBuTI8B5pmmPxxPAPxJsD+m6LtBNtkJpa2tDT08PGhoasGvXLoGEBpeWqet1cDBH13VBKppMJuEt2bVrF/r7+/HUU09h0qRJaGlpwb59+1BcXIw9e/YgNTVVWN5ra2ths9lQW1srcyc4SJmfn4+2tjbpi5udnY3jx4/L3CZST3XiRo0ahZEjR2Lv3r0IDQ1FQUEBiouLYTAYcMUVV4g/wRK94J6kageMyspKTJo0Cfn5+SguLsaYMWPwxhtvwOPxoL29HZWVldi4cSM8Hg/q6urQ3d2N+vp6DAwMYMyYMXKd69evh8/nwzXXXCPXyu/y+Xy47LLLZD65XC4hAuvs7ERERARyc3NFbywWC44fP46XX35Zrp33rml+hOXTTz8NAEL+dOmll+LUqVOIiIjAj370I9jtdixatAhWqxV2ux333HMPfvrTn6KwsHBIff4s+Y9xUKk8hPKqC52qzJy4LOofHBxEUlISCgoKAqIwapSaP2ptKXC+9okPk5sNHTUuOgAkCqhGkBipKykpwcaNG/HOO+/gz3/+s0QQuRg8//zzGD9+PN555x1YrVbU1dVJjyrAzwDW1dUltbfcSNSibULO1Pd4XW63W+qT1OzCVVddJcXyvBdCOqxWKxwOB9ra2tDY2Ii2tjaJTKrZSUJvDx48KO/TUWZQgVlMNRMdGxsLg8EgGyrHjpALLuTcqFmfC0CIEg4fPgyPx4OQkBDY7XZs27YNZrMZhw8fFgfS5/P3FBscHERERASsVqs8T25cU6ZMQVpaGi6//HIMDg7CZDIhLy9PSCY4+dgk2efzCQW6qp/BESDqQvDGelEuykX54jJUhjFYWEPF9UUlRlL3BtXoU0sXKGodoKZpuOeee3DJJZfgJz/5Cfr6+tDa2op58+bBYrHA5XIhLS0Nvb29GDNmDGJjY/HRRx+htrYWmzZtQllZGcaNG4ddu3ahpaVFnDbVeXY4HLDZbLKuqlkJtU6LEuy4MuPH2iaLxYLExERZ43kOsozGxcXJOl9bW4ujR4/CZDKhq6tLWuJs3rwZzc3NKCgoQG9vLxobG7Fz505hxuzo6IDZbMbs2bPFaauurg5gAN23bx9eeOEF6LqOLVu2YMeOHTh27Bi6urrwzDPPwO1249ixY+jo6EB9fT3GjBkDn8+Hp5566p+Cev0rou7zbD3C+ygoKEBycjKATxO7UGiIU7ifcf8JDmKqmexgXeTnhiob0jRNyoZ2796NyspK1NfXo7W1VTKmb7zxhjhXaumQrusBpUM1NTWS8WaWUdP8Le1Y0wf4A8KqTfRlyoeo2ywfUkkDAch76piwhEh1VJk5pv3GYIFqf5WUlAS0UxoYGMDkyZO/UBkR2Wc5Tiwjeu2116SPb2hoKBwOB+rr6+HxeKRXJZ8dS4nsdruUEjF7yQBad3e3BJpoSwwODko5UXBG8bPKidTPBTurdKy5VrBG2Ov1oq2tTZzGnp4edHV1obW1Vbo/EE2Xm5sr5QXq+en8Dw4OfqqPKGG2AwMDcl+8XxXGqQbvdF1HW1tbAEoBQMB8Giq7xrljNpslKdXe3o7U1FQUFhbC6/UKvHrNmjW47rrrYLVaUV9fL3amz+fvpqHeg4reCA8PF/QIkSr19fWiqyNGjAgo99M0DVlZWRg7dqxkclnyAEDK6dS1G8CnelVzH+P40WdgkJEdOux2u7Q/GzduHLZv346zZ88iJCQEqampcm30QyZNmiT35vP5e26TFI26omZ0mSyz2WyCtjx16hRWrVolLYuCdZY2/YkTJwAA3/3ud1FRUQGDwYATJ05A13XccMMNqKyshM/nQ2ZmpiTdSLT2ZeQ/xkGlAjEKFzwwQylwWFiYQDm52ZASXYWI8Lda76MqHUU1HlRSC2brJk2aJAqoQnt6enrQ29uLmJgY/OAHP4DJZMLx48dx6NAhVFdXo6mpCUVFRbjrrrtgMBhQWlqKzs5OOUdsbKxAEqjU0dHRAfTi7F1FBkQqGxcrOva8tsTERNTX12P8+PFYtGgR5s+fLwtHTEwM7HY7DAaDOKqq0cCJwvFQYQEcJ9KQc4xYh0qJjIyU6B4zpxEREThy5AgsFgtiYmKg6zri4uKg6zpuvfVWuYf09HTouo6Ghgbs3LkTGzduhKZpsrHW1tYGwIIBSIuaK664IsC4YFH9wMAARo8ejQMHDgjxFnH/7e3tEi2lfjC76nK5BKJMp1iV4MXoolyUi/LlhNmEz/qhEcjAIdc6rkFcy7mPqJmK4OCkWm7w9NNP4/DhwygoKIDD4cAll1yCjo4OCbr19fXBbrdLfVl3dzfee+89VFdXS6uK5uZmlJWVobm5WZwgRv9fffVVTJkyBdnZ2dIyhMHA4DFQA6Bcq7m+0Ojt6OjAQw89hNGjR8texGxrfHy8tEQzmUxITU2VqPh3vvMdadXT19eHAwcOBBhmW7ZsEWhieXk5NmzYgJUrV8JqtSIiIgINDQ3QNA1f//rXMWzYMGRlZQlh1W233YacnBzk5eXB4XCIMf7www9LX+xTp06hurr6nyLK+FclOGMH+PdTlgxlZGQgJydnyIyOusfwHMHPiBJsS6jBBpKj0LYYqmyIeltbW4vS0lKcOHEChw8fFp1i6dBDDz2EqVOnfmbpUGVlpbDcq9mx2NhYYe81Go0SUKH+qfJ55UN0HKhvg4ODaGxsFIIeOvy0z9QSou3bt6OiokIcHMJVeYzL5QpwbrnfAxBHrLm5+QuVEVksFphMJhw5cgQApIzo5MmTOH78uGRvc3JysGnTJgD+wFJWVlZAsL67uxs9PT1SSlRVVSX2p8PhQGNjI1asWCG6xLlJWCjXj88rJwq2S6kzFKL4aPd5PB50dnaKvajqpgopZ10podpqiRKdf5/PJ8g+6rraKpG6ze/ns2dtIu97xowZWLZsGa655hqMGDFCYK0M7HCtVmtV+ez5mv+n/lutVtHncePGiV6VlZXh5MmTcLvdaGlpEbgrmXrVMWxoaBDbGYC0KayqqpJnxLGPjo6GpvmRhLwm9nE1m80oLy8PCBKotjIAqb33eDwSUOA8o61bV1cHXddRWVmJkJAQZGdn4+zZsxg5ciQGBgbQ3NyM9evXw+FwYPPmzTAajXj//fdhMBgE5uvz+fDqq68KJBfwB3TNZjPWrVsn7Xm4X3i9Xun9yxaSNpsNJpMJ7e3tAbXHwPn1k38PDg5izZo1cLvdWLRoEfr6+lBRUYG1a9dC0zRBMX7jG98AAOmBPHXqVHxZ+Y9xUNVMFBdyVXEv5BhomiZ9lTgwZrMZaWlpsNvtAYYMRd1kKGoGlQ9R/TxJCJYsWSLU7XwYNpsNERER6OnpwXPPPQePx4NVq1bhrbfewltvvYXXX38df/zjH7F06VLYbDY0NjZKtAWAOKiq0zds2DApZOYGx15NzEpyrGJiYiSCzgxlXl6eNCLmvROyMG3aNMnWpqSkIC4uDmFhYbKAsfdXXV2dRJpVWBuvmbh2Pod3331XvotESQ6HAw888AB27NiBiRMnIi8vD21tbUhLS5NMcUFBgWRUPR4PDh8+LI4tM7VdXV0wGAx44IEHoGmaRDBpkDY3NyM0NBTTp08PeG4ulwvbt2+XTO/y5cvR1taGsrIyHDlyBC6XCzExMWhoaMD48eNxzz33oLCwMAD6xSx1e3u7ZHLUIAo3+ItyUS7Kl5ehgonqDxEzbW1tsh4BgfuC6hCo8Hy+p0aA+Tc/FxoaCrfbjeHDh2PXrl3461//CqvVir1792L//v3IzMxEWVkZjEY/U+RVV12Fb3/724iJicH+/fsRExODGTNmQNO0AIIMRqczMzOxfPlyfPvb38b3vvc9xMbGoqamRq6bv9Xaehru6n3qui4Qy7ffflvWHLaUYe1gREQEKioqJOjmdDpx/fXXw2g04ujRo3jssceEKOj73/8+EhIShBzpkksuwfDhw1FaWoq+vj5ERUUhNjYWGRkZqKqqws033yyOtsfjQVNTEyZPnoyioiLcdtttsm7u3LlTnJTExEQ4HA5MnDjxU9nIr0LUTBxwXr8MBn/JECGZZrMZUVFRGDduXEBGMVjHVKQWP0ObQoWTc880Go1Ss8q/c3Nz5TssFktAr1minkpLS7Fp0yYUFRVJlnLmzJkYPXo0fvjDH+KXv/ylZFXp/DkcDrhcLrS2tiIsLAz19fXC7ur1ehETEyN7Kb+PzkVnZ2eAk56UlITOzk6kpqbCbDZj/vz5+PrXv46qqirExcWJk8Ja0MjISEF6ud1uNDU1obm5WZBMmqbJ676+Ppw7dw5bt27Fhg0bsGXLlgCn6sSJEwHPyWQy4ejRo2JkGwwGvPXWW3A4HGIncF7ffvvt4mhkZWUJQzXLkiIjI1FZWQmDwYCDBw8Ke2pycjJOnjwJTdMQHR2NV199FcD5TCdhjwsWLJD7YEsRwj8TExOxZMkSxMTEIC8vD7m5uQgLCxMSIzp7brcbvb29n2rDoWa4KGpgBQDuvfdefPOb38SMGTOQkpIitZ8k2VHtWLVcgnZteHg4Ghoa5HvVrJ9q+1osFqnBVEvgCCPlb3W943s2m01QaCNGjEBlZSUqKytRVlaGU6dOfQpRoP5WbXY1UaVp/jrT0NBQIT3TNH9NZEVFBdrb29HV1SWtdhjIpPNttVpRU1MTkMHu7+8XeDfHmZ/nZ3i9PN/Ro0cxMDCA8vJyCYIQ0UPINwCcOXMmYE1QUTRMCJWWluKyyy6D2+3GqVOnMGHCBKxdu1Yy1QaDASUlJXC5XLj88suRnJyMAwcOQNd1WadU+3jZsmVyry6XC/v374emaZIcA/y2udVqRVZWFpYvXw6Xy4Xo6Ghp10OIuKZpSE5OlvWOgZR9+/bJ5zweDxYuXIi2tjYhdgKAr33taygrK4Pb7cYf//hHaUX0ZeU/xkEFPt0nLjhCqU5a/q2+FxYWhujoaFGWpKQk5ObmBkQQgzOxwX+rUU06YKoRw6wjJxF7cfX09Eikguft7+/HyZMn4XQ6kZSUhMrKSgwMDKCmpiaghpK04GoWMzw8XArFVRjqUL081XYChGXQaeW1cMNhLciUKVOwePFi6UEaHx8vcBWf7zwzZE9PD5qbm4WlV53cjNhwAtbU1Eg/V75vNBrx5ptv4uzZs8JQtmvXLhgMBpSXlyMiIgK1tbUIDQ1FdnY2Nm7cKM8S8Edx1ZoIGkDqWOu6jnXr1gUw8hLCwCgxI1aFhYWoqqoSGIXT6RTDb9asWYiJiUF9fT2ys7NRUFAgPaVoqNAAZURVJZy4KBflonx54fqm/lZ/enp6EBERgerqavT29gYYR8D5ucf1mkaBmpXg9wRnwF588UV0d3cLs+dll12G5uZm1NfXY+nSpViwYAHa29sxfPhw5ObmorKyUoKh5A3Iz89Hbm4ukpOTA9YBTdNw4403Yvr06WJweTwerFy5Er///e/xox/9CFdccQUaGxsDsgmqwaZmhSi67m8Sz882NzcLzI9rbl5eHrxeLzIzM9HY2Aiz2Yy4uDisXr0aY8eORVJSEn73u99h/vz5Ah/985//jIcffhgGg0HqTB955BEYjUacO3cO7777rrDNmkwmpKWlSSsEn88nLR4iIiKEuCMjIwP9/f3S+L65uRnPPvvsv12nVFGzNswkUDeoD3a7XQK/brcbo0ePllot1VhWDU51P1TRN3xGquFPGCqAgOdJx5Xn4TlIuON0OqXW0Ww2Y+/evTAYDGJUPvXUU3KNx44dQ0JCgnw/4CeLYZZxcHBQSMDUucA6Q6KzeCwNdbL5cg8k8+28efNw/fXXw+l0BnBcMJtEh7Wzs1M4JgghVbOhHFv++Hy+gBIoZp1474CfMCc9PT3A6SP0fffu3TAajcK2m5GRIa8TEhLg8XiQlpYmxx09elSM8LS0NJSWlmLPnj0YM2ZMAOMxYfwDAwPIzs6GwWCQYLXX65XMpsFgEGRdd3c34uPjxYlRA2ZEWmianwG5t7c3YGyoK8HCbgaXXXYZli1bhnvvvRdFRUUoKirCr371KzzyyCMoKirCrbfeiunTpyMrKwt2u13Ku5KSkiSgz2cCQEqaVJ0nTDu4HlZ1aJkUUgM2O3fuRFZWFsaPH4/w8HDcdddd+MlPfiJtTw4ePCjdIgCI88bnH7yOqsEKg8Eg7SgHBgZkjfvoo4/Q39+P2NhYgcEzyw8A8fHxgh7gT2hoKHJyckSHCTXnM2SgRb3n2tpahISEyLMaO3Ysjh8/DgCYOHGizCGHwwEAAeSnXIuoC21tbZK9ZG9kZsLZU5rM3QsXLsTIkSMlgxsdHR3Qn7q5uVmuX90TWdseXDa5cOFCuFwuXHvttTLuCQkJOHnypHyGKFU18Mtzvvjii/B6vVi+fDkGBwexa9cu/OEPfxDn2OPxYOnSpTAajfj9738/pC5/nvxHOajl5eUBqfDPg00G3zAfisPhkGgy4G/ITipnHhcMI+ZmQmeEmz2jfiQBUI/1er2YPXu21BtwQgP+DaujowM1NTXo7OzEvn37sHr1aqlL6O/vR11dHZqbm2UyMBLrcrnEIaaQ3ZGLoSpJSUlobW1FdHS0ZF35WTVSygW5ublZ7mPWrFm48cYbJeLFCC4p3LnQ2+12MVoA/6LGvmbc/LjQUjjxWETPY48ePQqj0YiGhgYUFxfDZDLB6XQG1IDy+S9fvjwgUkfjdHBwUBg1WSO1adMmMWJ9Ph+SkpKQkpKCkJAQHDp0SJxUbv5qtoLX6nK5MHHiRInGxcfHIyEhASEhIUJeQR2lM97f3/+VM1NelIvy/4oEO6TBP5znbKju8/kQEREhwUzgfCZINfaDg0bcuGmw9PT0YP78+Zg+fTq6urpw6NAhfPDBB7jyyiuxdetWMUQnTJiA7Oxs7Ny5E9HR0cKEvmfPHjQ2NqKwsBChoaHiuKmw5KGgymp0f8KECXjiiSewfPnygIwpf6tZJVVMJpM462zbMjAwIH0kPR6P9HSMiIhAaWkp7r77bum/yTozj8eDK6+8UsaXmSi3240nn3xSgqc+n08YLn/5y1/C4/Fg0qRJiIuLCzCI8vPzZR8rLS3F7bffDqvVKuUarLv6KoX7BRBofAcbbEajURiJBwYGEB4ejqSkJMkE83g1MKoimSjULzXYzj2Hx5F9efr06cKYSTuA2VQGiXfv3o2zZ8/C4/HgzTffxGOPPYZDhw7BaDQiIyMDb775JiwWCxobGwW6ze93Op0yV5xOpxiZHA+Wufh8PqkVJFpL087Dx9UMUHJystT26bqOa665BhEREcIiGxkZKQFvwG8HREdHy77e2toKt9stLflUx51jGhMTI8RNgH+PZz0eJS8vD4Dfgc7MzMS4ceMwd+5cQRSQPDMiIgKhoaGYM2cOYmNjJVhPx3xwcBC7d+/G22+/jTNnzmD37t1Yu3atkPPwJyQkBPv27YPX68W1114rbMq0H2kHMLBAuD0hxhaLBf39/Th69KgwrnKOEwrMecba4eC2R0DgeqmuK8FBh9zcXMyfPx/Lly/Hfffdh6KiIsTHxyMzM1PKEdQgjarXfB50uNUA2VCBxOBMKvVG/XG73fj+97+P++67DzNnzkRHRwcOHTok51Wfv7q2q8EgXic/FxUVFRB0GTZsGBITE2Vc09LShHckMzMTvb29AdebkJAgfVQJ866rq5PrsdvtgoTgcSwn5DMNDw/HiRMnYDAYkJ6eLp9Ts6kej0dQgT6fDz09PQgPD5dEFe19FX773//939B1HWfPnkVJSQlGjRolAcUPPvgAZrMZ48aNk3n8pz/9CW63W4jPAH+t70svvQSv14vhw4fLffX19UlLmcLCQui6n5jKaDQKoRaREUxsUb+8Xi9KSkokcHT48GHk5uaKz0TY8pIlS3Do0KEAhuEvK/9RNKR9fX3YvHkzwsPDpa/aUF63OrGHumlChO12uziDhPuq5+Tmom5AFDp36oQnbEStgQoNDcXixYtx+vRpHD9+XDJydO7Ua42Li5Mo5dmzZzE4OIjw8HCkp6dLY13Aj11nzanqOEdGRqKzs1M2NzVq2dvbi6ysLJw+fTpgc6ESqRnN1tbWgIJ1r9eL1NRUZGZm4uOPP5bJm5ycjM7OTmHOZRSS0tbWJoy4jKwcO3YM+fn5MBgMSEpKQldXlzDlrVmzRgIFjOpQmfft24dp06ahsLAQ+/fvB+DPAoeEhGDixIlYv369GGCsReAEYkPi4uJiYehLSkpCT08PDh06BE3TsGnTJiF5sFqtYpxR7+x2OxoaGpCSkoLx48fjvffeg8PhEPhVRESENCknDIQ1zRfhvRflovzzsnDhws/9/09+8hMA+Mqzb6osWLAg4O877rjjf/X8hGf9O4XjyCg58On7uuaaaz7zbwCYP38+AGDlypUB7w/1LBctWvTPXez/kqh7eTBk8EISGRkp5DZhYWEYNmwYWlpa0NnZKY4kcJ59meek0AFWYbx0OHgtoaGhcLlcGD58OIYNG4a9e/fCZDKhr69Psrl0IN1utxB3tbW14dSpUyguLkZiYiJWrFgBl8uFpqYmpKamBjge/f39QoTU09MjThN7Z9IQ7ejogNPpFCeJ9+ZwOISMpqenBzabDSkpKSgrK5O/AX92qqurC9HR0cjLy8P+/fthtVqRmJiIwcFBqbvWNA2RkZFoaWmR2k2W0KjPhrXd6enpQnjIjCUlJiZGssLf+MY3sG7dOowfPx6jRo1CS0sLoqOjUVNTIzWAc+bMEUKZPXv2BKAtyGjKcWLwhwRlRE3t2LEDU6ZMCSC9UbPfH3zwAa677josWrQIpaWlOH36NFpbW+Hz+ZCamir2y8yZMzF16lRs3rwZlZWVYlfQVmxpaUFMTAxcLhesVuunsvMUPmdVgjOwqq1sMpmQnZ2NDRs2SEacxzPJQPuPTiKPVR1T3q/6zJhl5WvVseV8oX7NmDEDl19+OaqqqvCHP/wBUVFRGDVqlBwfXJPP9zn31ECR0Xi+H6zL5UJJSQk0zc9Sm5WVhc7OTsTGxiI5OTkgOGgwGJCbmyvBAJPJhMTERGzfvh1paWkAgOzsbJSWlkrWUNP8RGNjxoxBaWkpuru7hdWagSrqsqb5a6dpy9PuVIMCgJ8MdNy4cdi6dSsOHTqE5ORkrFmzBjfddJMQen300UcoKCiAwWDAkiVL8Oabb2LOnDnIy8uT4BcRhnfeeSeeeOKJABSFwWDA8uXL8eCDDwZ06mBteW0XNQAAIABJREFUfGxsrMzRsLAwVFdXCzRe13VZe3hO6saTTz4pqEKLxYJdu3ZB13X87Gc/k8Dm4sWL8e677+Kpp5760nvBf0wGlYPFaMjmzZsDsn9AYC2OGuVRhUrMycLMV1dXlyziamE2cL63GCccU+bcfBgpUycGJw7rNnNzc3HppZdKDUZ4eDjGjh2L3t5e7N27V4qwycIbGRmJEydOoKqqSpxJZuGI+w8NDZWIj67rQmxEJ5hCgiDCmwkBYu9UdWw8Ho+w+qrjFxsbi+rqaoSHh8Pn82Hu3LkYHBxEZGSk9LQjQy6PCY7gud1uFBcXy0KblJQkEWH2XeVCYDKZEBsbCwDSBNhisWD69OnQdV1qcgF/zazVapVoGTPB3Dz4vtfrRUVFhWRD6uvrpYa0vb0d69evB3DeIOOz5HWtW7dOdCsrKwsulwudnZ3SP5BMyS6XC4cOHUJHR4dEhf8/9t48OK7ySh9+bu9aurW19l2yFsvGLDY2GGODcWwg4AQSQoaQCQZCQmWSqSGpmW+KoRKSKpikUqlMMplUDUMgkwxrIIkN2BhsbBnjXZYXyZKsfd/VUqtXdff9/mg/x+9tiy3z+33xV9WnSmW51d333ve+733POc9znqNmGJOWtKQlLWmXjyW2ewA+vCyDfgYDA+73BQUFSEtLw8LCgsFZo6l7LT/D1+kr8DxUGqGmaRLo8f2hUAjhcNggVkibnp7Gnj17sG/fPmzZskUS2dy/1QCV/gkQD0BIx2Sgp2maMItUcUmOQ0pKiiCvRNDpaLONCpPnDIjLy8tx0003SZ0lKcEqAuVyuZCeno6pqSnMzMzImKvHJ32We6vJZILH45HxVQOpwcFBzM7OCmI6PDws/l5/f7+oF+u6jvfffx8AJKkOXKSY8jyY5Ff9A13XJXAlosq/Ef3q7u6G2WxGeXk5urq6pI1JRkYGvF6vfG7p0qUyl+rr68WXUdFDlicl1qky+CNTgX4r/Sp13tHf4vkvLCxI6ZY6t1QEVfVl1PFXX1MBDvW46vnwX/XceF68hvr6eikpYL0k55QaAPO4iSivumYoykll8h07dsDj8UjfTiLZKr0/JydHyuN4XWSZaJomNHLOfSBOAydLpaurSxIZqkgUz5GlcYsxYaiv0t3djcrKSgGUdF1HZ2cnQqEQ7r77bvE5LRaLBIO5ublSb8z2a9FoXNmYiRZd1yVIZEmcSu9nPa2u69i6davMFd4fXoumaSLqpurOsOc321WyHy8/o2ka7rzzTjm/j2PELmaXjVediEzm5ORgYmLCIIqR+H7WkyS+DlyazaSoDx8w8/Pz0jpF/WGwoQanrIlgTai6cFR6Cptgk6bR3NyMe+65B7W1tdiwYQNOnjwpWaWRkRG5mQsLCyKyZLPZhFqSnp4uwarZbEZmZibMZrM0H1bHjYuE48JMDzcdlRqR+DsAOafi4mI4HA7Mzc1h7dq16O/vR3d3N6ampoQqTcvOzpaNmrQx1prqui5Ua26KlEKPxWIylry3lAXPycnB4OAgLBaL0J+zsrIk81xQUCD3lNLYPp8PIyMjCAaDcDqdMJlMoiKsFsq3tbXB4XDg+uuvN6j2mkwmUUwD4gtp69atiEajIubQ2NgoGW6KHtjtdtx7773Izc39/5y2lrSkJS1pSft4W4xplYhC8V/VwWS9JfeSYDCI4uJiNDQ0oKGhwaDYmYiYMtDgd9E34PkQGWPQSH+DyWG26yAdlw4y9/+enh5UV1fjmWeewdNPPy377/DwMAYGBsTJJMKo67o46qWlpRgbGxO/gQq6i/lTNTU16Ovrg9vtRk9PD0wmkyCXFGE0m82yB5KVlJGRgc9+9rNCoZ+amjKIWJJKzVIiamHwvlB1lvXZHOPXX39drg2IJ+dTUlKwc+dOqcVOT0/Hjh07kJaWhrNnz6KjowOjo6N49dVXJchhMEJG149+9CMDo4yClfPz8+jr6zP4Vbt27UJ/f7845tnZ2SgpKUFJSQn8fj/Onj2LjIwM5OXlwel0im4G50teXh5KS0thMpmEbkvqbXl5OTIyMqTFBwW2Eo3zivdQDSLV+nu1jpdCamQWUhSL8zIRtOG/iSAQgyiWNPB81ONy7i2WEFL9bZMpXo5WUVEhrEkmEhj8q/MycV3xNfU9GRkZKCoqQiAQQGtrK5qamqSuk+ATzeVyobu7G+FwGENDQ4jFYjIujBsAoKurS+Z6fX09mpubUVhYKC0Pa2pqpAd0ZWWlAfhRATP12hl0UvhU13UcO3ZMFIrtdjs+85nPAICUrjEw3bx5Mzo6OgSNBuIJqGeffVZqVIl8RqNRPPfcc4hGo6irq5Nrn5ubwx/+8Aehwy8sLMj1kiHItmWPPfbYJXorg4ODMvei0Siys7MRiURw9OhRPPfcczJev/vd7/D973//IxkrH2aXTYAKQMRsHA4HXC4XXC4XpqencfLkSUM9JmDkyScugsUWQiwWF7jJysqSAHJubg7vvffeolQClfvPBtDqQmaGQK2tTElJkY2EGYjx8XFs2rQJ58+fR15entRVMGt61113SQZyZmZGglIGa2rfU3LTPR7PJZkl8tkZsOl6vFaECCwfVHwosChczWz5/X6UlZUBiNcjmc1meZD6fD6cOnXKUGvpdDoxPT1tGG++V9M05OfnC++dYkMUVzpz5gwsFou04dF1Ha2trQgEAqJqZjbH+zL5fD4UFhaitLRUXrdYLPjiF78o9GMiuBx39R4BkIfH+fPn0dnZaah11rR4nbHVasXevXsBxClLVqtVlNg0Ld7m5ty5c9B1HSUlJdi0aZNIqKs9qJKWtKQlLWmXh6mo0mKWyMpKNLVkyGSK6y2odYVqsKCa+n1EaFR/JbFsiJ93OBz48pe/jC1btqCsrExadOh6vHTI6/VKYphoCv2VgYEBDA0Nobm5GT6fz1A6xAAxOzsbXq9XzptOLIWc+L1MDvt8PmRnZ0v/UqJV4+PjBvSOrzOpTltYWBCKLYVlcnNzUVRUhNzcXLhcLhHfUi03N1fqYhlcDA0NGWp0CwsLZVxYRvTOO+8IollQUCD0x5GREdhsNlx11VUAIMJGLCVaWFiAx+PBwMCAwY9QS4nm5uZw9OhRYVsVFhZKcEOhtT179iASieBzn/ucBE2cQ36/Hw6HQ9o2rVq1CgMDA5ienpb7ysCdLD4m+VVTkyrqv/xd/VFfJwsuGo0akHOuA3Ueq7XItER6LwMUNTBloMr5TVq6+h18T39/P2677TasWbMG6enpcDqdIrbGcrhEVF9NOPH8EwEpAKJeHQqF8Pbbb+P5558HAGmHyO8dGhqCxWJBW1ubfHZiYkLOMRaLSd9PAlGdnZ244YYbZD5fffXVOHnyJADguuuuM4y5ijhTeEx9BsRiMUxMTIho1enTp5Gbmytsz1WrVsHv9+P111+X2MVqteK1116D1WpFRUWFrN+ZmRmYTCZ84xvfMNCrZ2ZmYLFY8MADDxiYBxaLBa+88goikYj0POa4jI6OCsMiMzMTX/va1+Re8/xnZ2ellpsoaiQSweDgoCRm/u7v/g5PPfXU//9VfFWK7eDgIJqamtDb24tgMIjjx48bCm3VYFKdrItZYhDLAFgt0vf7/TLRmeHkd7Jeg8IC6g0izYUTl5sQH+7BYBCnTp0S8YmJiQmEQiF897vfxfbt2+WcKf9OZTI+OKk6BkAyYJSUVq+bwbymaSKClJWVJWgrz4lKvQwKadykGej29/fDZDKhqqpKNmJSTlSaBesriFSazWY0NzfLZL3lllsMCsWjo6Po7+8XNDInJ0cEmg4dOgRN07Bx40ZpfUP5eZ/Ph9OnT2N6ehqFhYUA4tndrVu3oqamBgCkGJ7nplowGEQkEsGbb76J7du3GwQYeP2hUAjvv/8+HA4HQqGQyHir7/P7/dD1uAIfg1Kn0ymF9UlLWtKSlrTLx/bu3WugtKq+g+ozJDr0fM9iJUMs/0hNTcXu3btRWFho8AsYFHHvVQPFDysbYtKUfkhOTg42btwIr9eLW2+9VfZnm80Gh8NhKB3iebN0iK0o1NIhBqh5eXnweDyXIF6ftnxI7Rmp67rUZapJa/peRUVFhhKikpISoTGzhIg90fmd9FV4nhTYYs2g2Wz+RGVEvC9WqxWDg4NSRsTvYylRRkaG1M0S4U4sJWJbl3PnzgGAlBIx4ItEIpiengYAVFZWXoL8fVw50YkTJzA9PQ273S73muVEiXNysR913NXaUSLXaWlp4sMwWFVFvTiXeN9U4xrgv+rcYZKFa4rzngg363MBSODN4Mfv98PtdmPNmjXYuHEjUlJScOrUKfT39+PIkSOX0GPVoDwxqaH+jdfNdjQMJpuamgyocyQSQUlJiTACsrOzpUaf9c+klnNth0IhUdZmP1iW65HZxzFSj8OkANc8gbfDhw+jrKwMmhbvu1pfX49XX31VaNCBQEBak2mahsHBQTgcDnR2dsLhcODaa681BNd8RqkxQldX1yUtvgKBgLRc+uIXvwiLxSIiS7yfbOl43333GZ41sVgM586dw8LCAvLy8qDrurSnaWpqwu9//3sZg8985jP4/ve/j09rl1WAykXBgfV4PJidnRVkrLm5GY2Njejr67ukZoKTVc2GcrGog6ouOtZBpKenY35+Hu+//z727NkjfY5IGwgEAkhJSZEMonrT2avIZDIJDYgCSWazGWNjY+jr60Nrayv27duHZcuWwW63S38hnldhYSGmp6eRn58vgXBxcTEmJydlgquLQ1WMJMWgp6cH0WgU/f39sFjiDZl7e3sxPz8vDyrSD3p6egyLCICBXjs/Pw+zOS67XV9fL5RWqs1xgVG9MhaLySbx7rvvyuRm/1iep9PpRFFRkYgnRaNRVFVVobCwUJool5aWIi0tDStXrpRs12OPPYZvfOMb2Lp1K+rr66U+6IYbbpCGyVQMZlZbFTEymeICC1NTU+jo6LikhshkMmF2dhZ2u11qVbds2SL0BZrVakVLSwvsdjteeOEFxGIx3H333Xjvvff+D6+GpCUtaUlL2v/WcnJy4Pf7MTU19aG1lolUQtpifoOu63A6ncjPz4fP58PCwgJcLhf27t2LycnJS+pdVVE9Hn+xsiGiK4mqtuw3SvRkzZo1sNvthtIhOtwsHeJ+p5YO0dLT0yVIYTDCffLTlg8xmFY1GsbGxgzXr+s6SktLDSVEJSUlWLt2LdauXSslRKzxVY1IIs/DZrNJ3RzRzY8rIyL6vLCwgAMHDoifQ+SQpUR33nmnqK0WFhZKSxS1lIi94anvQfo3y4hYZ0xV1muuucYQLH1cOZHH48HJkyexe/duTE9Pi2iNSvNV/cFEY9DIeatSzpmwIOOMqL4qjKV+D+elKvapCgxx/jAg5efUrgsEMSwWiwj+sPsDEKfA3nzzzViyZIkgiFu2bMETTzyBRx99FE6nE2fOnEFLS4tcG2uuVfR3sfWqJqAsFou0bpyensaLL76Il19+WRIZy5cvFzZkQ0MDBgYG5Dupgq0qE6enp6OlpUUUm3n/yEJQRa/YKzcajcrYcOxmZ2cBAJOTk1iyZAk0TUNXVxeGhoYwNjaGhYUF3HbbbVi3bh16e3uh6/E2NMXFxbjtttvw0ksvSecJBuHPP/88wuGwJM04Ti+++KKBXQhAgLOhoSERQ1NbX01MTMBqtWJiYgKBQACVlZUyLmprIXXMU1JSEIlE0NvbK4mvFStW4Mc//vEl8/Xj7LIJUFmDoWZLOHkWFhakkD41NRWjo6M4cOAAAoGAPCQTsz/AxULrj1JZ5UTJzMxEMBjEwsIChoaGkJubKwuJCnqJ389MKG8Mz52KX3zgM/sSDofR2dkpVA/Sd7l4QqGQQdiIC4rBl/qgTaQ8Z2dni6gAg8tYLC4tr2bf8vLyBJFUx1rTNBQXF2N0dBR5eXmw2+1CFS4tLZXNanZ21lBMn5WVJQE1r5HZ6lgshuHhYVE2I1WpoKAALpdLNvPu7m7JzJ48eRKhUAirV69GdnY2li9fjqamJqSkpCAvLw8pKSmwWq1ITU3FxMQE/H4/Nm3aJD3ImPnkuPKhRqRbRbh5nup9CwaDgqKGw2Fs27bNIADAxR4KhQSpjkajUiuQtKQlLWlJu3yM/TipMk965WKWiBypzCs14OJe4HK5kJqaKoIn7e3tOHbsmKGvIwBRy1TRlMSyITp83Kv4OlVu+T1NTU1YunSpoXSIiXlNi5cOff7zn0deXp6hdMhsNstxuWerTuWnLR+ij0D/igjo5OSkUKCZRC8qKjKUEDGJn5mZKf4Iacm0aDQqZUS8dl3XpR8qg56PKyMKh8Po7e0V9WOWEZnNZvGVOjs70dDQgMLCQrhcLqFAJ5YS0f+iP6aWEnHuBINBnD9/HlarFc3NzZcgjR9VTqSOe2dnp6B7bAUFLE6zVYEadS7zHgNxFpnD4cD09LQE9PweFf3n8ek/Jx6Dx1eDYX5O9eH53fn5+YhGo6ivr4fFEu9Nyhpr9bvVhIjJZEJxcTH+5V/+BT/4wQ+Qn58vwkHqOuV38PiJ605dx0womM1m6e7xu9/9Dg6HAwUFBUhNTYXdbkddXR2mpqbkWli3qQbxy5Ytw5kzZxCJRAxdJ3g+LpdLxpAsSHVcVECNwBbFPm02G2ZnZ2GxWKRGNDs7Gw8//DDeeOMNXHXVVWhpaRHE3263i+AW143dbsfDDz8srzGestlsePjhh2W8zGYzvF6v1Ix+85vflLnKYNvv9+PZZ5+Fruv41a9+JbEN7zfrcNnRg2jqoUOH8NJLL8m8UufwJ7XLKkA1m83SFJYTVQ3+5ufnBVF1uVxoaWnBmTNnJEvASc6Jwe9V/5+4aFW4mv2ymF0kD31hYcHQmFf9URcMEF8gVJPTNE3oDbyGl156SbJoaiaXFGLShJmJUxWHuSlardZLKKqkblBwgIFTJBIxBKgZGRlCsSBlhz8FBQWYnp5GVVUVAEgLGLfbLcE5a2J4bIfDIdlAjqPZbEZfXx+8Xi9++ctfyj0jJ729vR2FhYXo7OyE2+1GfX09brzxRpHap4iCpsV7/B04cADRaFTEHMzmeL+qxsZGAMDNN98sCQ673S4Pv/Xr16OsrAwWi0UUxojqMqHBa2eWOhAIyGYRi8Vwyy23XDJXbTab1Klw07n//vv/8smftKQlLWlJ+79ig4ODOHbsGFpaWuD1etHf34+DBw8u2puP/gb9iMVsMR+CSN78/LwkzNWyIQZKdEoXKxtiAKYGqLoeb0dDdIe+UVFRkaF0iElTlg6tW7cOgLF0CICgimRj8Rhut/tTlw8xeOR5MRAh1VYdLwaPDHbJPFNreRNLiOj/TU5Oiv/h9/sxPDwsojKfpIyItGbgYiuVjRs3IisrS5Le1CLx+Xzo7e1FJBIRqqNaSkSfL1ExV7VgMIg333wTwWBQEvqJwd+HlROpRj9KLSfiXFEDuUS0U2XW8TxNprgCMgNUFQVV9Tjok6rfqQan6rmp95dBjRp0qsHRuXPn0NTUBK/XK+1M1OtV15L6wwTOHXfcgb//+7/HzMyMiPOoCCmPowqYJgaqvGbOO873WCyG559/XhJIRHLJxGtoaIDZbBZEUNM0XHHFFZibm0N1dTXOnj0Lk8mE1atXo6+vDxaLBTfddJMcr6CgQM5NpfUmrg2CLwCk28b58+cRiURw4403IicnB83NzXA4HALYbdiwQTRV1q5dK3O8vb0dmqahurrakKDYuXMnSktLDeMWDodlPJxOJyKRiPRwNZlMGB8fl7hjfn5e4hO19retrc3wzCQN/KWXXpK1esUVV1yyVj7OLpsAlRdnMsUVWPkaJ6GKfFGBlQjh/v37MTk5CeAiQpb43aqpAR9g7OfEbEU0GkUwGMTU1JSoganS8TwnlY4DQBBfUkVID+aG0NLSgoWFBfzXf/0XUlJSDLx+qryRqsqHADdS9TxJDeCxuXCo9Kuq91I2m9fKzySKGWRlZSEQCCA/Px+xWEwoBaQf5eTkSICoZqzUgJzHfO+99/Dzn//cgG7z2DabDVVVVULv7ezslFY9ExMTMJniwg+8LrV+iGNQWVmJU6dOIS0tDW63W/qVqtSTTZs24c4777ykRoNS/Jxb/BvnVyQSwVtvvQWLxYLh4eFL7jFR6mg0ivfff1+yy0lLWtKSlrTLyyKRCObn50V8kPvX8ePH0drauijiAizewo7/ct9Q38v6NSaHo9EoDhw4gObmZhHh4efD4TDsdrsEagxEVSSUP2lpafB6vRIQlZaWoqmpCTMzM/B4PGhvb4fdbofVasWVV14JTdMkGex2u6VlGxFDjkFimxgGzrwWHr+goEA0K5i0dzgcyM/PFyVbTbvYM5SMKTV4IwAwNzeHaDSKwcFBSaYXFhaKOGZ5ebmBwqr6cwzuNU0TxV9dj3cLAC4iei6XCzk5ORgfHxdqc0ZGBioqKpCWlga73S59bYeHhzEyMiJMr0cffRTbtm3Dpk2bcNVVV6GgoAC6ruOGG24QOjepvAz6E89R0zSMjIxg//79sNvtBoYZgQq/3w+LxSKo6ZYtWwzoMY29aU+fPv3JJvtHGHvZk5LK+0aqrzq/6ccx6FQTNzR1nqg+IQESzrOpqSl0d3djfn4eDocDubm58Pl8OHfuHJqbmyXYUf00HtNms0mXi5SUFKxcuRLj4+M4cuSIgb3I81IBHa5Dmpp4UecKAKkX/+1vfyvrgb5vRkYGNE1Db2+v3GMKAxFdJXhz/vx56LqO4uJiCa7JXgAgSSp1bbDc7JVXXhFRsJMnT+ILX/gCdD2ud8LepNXV1QiFQuju7sbY2Bh6enqwd+9eYTVwDN59913ouo7bb79d7mU4HMbRo0flumj0z0+dOiXJKjXu4li9//77BtYo73EsFpN4hHXj9957r4z3008//ZEJv4+yyyZA5U1SA1EAhkwQMzoMTlmDUFNTg9TUVEEVE+tI1AmcSMng62rWRdd1URPLzMwUOueSJUvQ3t5uoESoC4vnTZouMzVAnGvf2NiIAwcOyE1/4YUXDFC52WyW4Nvn8yEWi/fMmp+fl2vhOCQqnJEKRFlo0mJSUlIkK8oxZd1oolCSpl0UcwiHw3IMopdUwmV2hcdl9pPXHAqF0NXVJXQgPgzVB3UgEEBNTQ0yMjJko+Qm2dfXh5ycHNnsamtrJbPKYzPjMzk5Cb/fj/7+fgkUuWmlpKSgrKzM0DiZC4U1NX6/3/Cg9fl8mJycFMnxPXv2yLXxXtP5GB8fx969exet4Uha0pKWtKT99S0RnfT5fMKY8Xq9OHnypDzT6awlJrFVBIzfqRoDD+7DFCnhPnrixAk5JlFGMqHUxK2maRK0co9lOxr6HV1dXfD7/RgYGMDExAR6e3uRlZUFi8WCvr4+TE1NSS0nmVV0sklTdblcUnqkaXGFYl47X+M55OfnY2ZmBpFIBB6P55LX1fczma2ytjg+OTk5GBkZQWZmpqCUmqahtLRU0NP29nbDtROt5e/0nU6dOmXwK1gnSJSquLhYyo8KCwsRCAQwPz+P7u5ujIyMSPnO17/+dTz44IMoKSkRtdL5+XkpIyJSFQ6HMTs7K30o/X6/sLxUlJxzw263Y9euXYKWqsgkAEGvd+zYIYH6qlWrDGOm1na+++67hrFU6a1qUJf4f77PZIrXBpeVlWF8fByhUOgSX5vBhzr+qq+r3md1DahrgT6vyWRCR0cH/vjHP8Lr9WLZsmVwu93w+XyYnp7GwMCAKCwndoJQ19O5c+fw7rvvYs+ePTh9+jRKSkoEzWtpaZH668Tz4NxYDKVNXMtk3QWDQXi9Xvznf/4nwuGwnBfjh46ODkMQXlBQgCVLlkhyxWazScLBZrNJ8Etknr4pkX7OAY/Hg1gshunpadx9993Q9XjP5aqqKszPz2NwcBClpaXIy8vDhg0b4HA4cOutt2L37t2w2+1S46reN6r5UtyM95rJCFJ5gTgo5/F48Oc//xm6ruORRx6B2WwWJiJZDI2NjdA0Dc8++6yMAZ8XjF0yMjIQjUaxdu1aSVIdPnwYTz/9NP4Su2wCVE4mbg4qXYAZOf67sLCAyspK3HzzzVi9ejVqa2uRk5MDp9Mpix24lIrDyZb4w+xRIrVH1+M1ry6XC4FAAKFQCJ2dndi5c6dsRipczzrL5cuXy995szdv3oy5uTncc889sFgsKCoqwunTp4V6wqCJFBxSD9LT0w0BptPpBACRjeZ1sg9Rbm4uAAiinJuba1DuYpBns9kwPDxsGJdYLN73bXh4WPqDcTyKi4ul8Pn8+fPyADCb4z3Q2M9NpYUwiQBAsmSkTwwNDaGiogJ9fX3IzMyUfmdZWVk4ePCgQWGwpqZGCvuJ4kYiESxbtgyNjY1CH1AfRtwQgsEgrr766kuoHhRAUO+12WyWbKrdbsfBgwdx7NgxuFwuFBcXw+l0GrKNpEBPTEwsmoFPWtKSlrSk/XUtMTkJxJEpigdGIhEcOXIEMzMzhj2Ln1WdWTWJriI1KurE97Bths/nE8ooj0EVX7WtC+syVYovEE/cMmHNvZ69GllDOTg4CADSboWUWKfTKX5JLHaxx6PT6ZQ6S03TpOzFZDIZWucBkCSyyWQSZlcsFkN2dvYl9bJMzo+OjhrGXtfjLVlGR0dRXl6O6elpEUAsLS2VcVPVkGlEZYCL+h5NTU0SvL322msYHx8X1hoQpzMODg5iYGAAS5Yswfz8PG666SY8/PDDOHjwIGKxGPLz88WvrKmpkeunuq3FYkFFRQXOnDmDcDiMr371q+KH8P5rWpxG+cADD0hwTi0T0pkXS4STtj02NiaJgW9961sGf5Lj4PF4xNcD8JG6Kol+iMrQm5ycxPLly9Hf3y9gAa85USCM81lFrVV2mhoU8v+q/8UgsqysDBkZGbjlllvgdDoxMjIi7X7Gx8cRDAaRlpaGlJQUSSSpNOUVK1YgPz8fd955J2ZnZ9HU1IT169dj/fr12Lj0KsVzAAAgAElEQVRxI9asWYNoNLooXV9FVRd7XfX5NS3egoeUc4fDgZdeegnPPPMMPvjgAwQCARFA5TVv2rRJuotQdEtt33PHHXfIWPT29gqtOTU11ZAIom4K2Qucw0888QTm5+fx7LPPinL2sWPH8MEHH2DNmjVobGzE/Pw8MjMz0dfXh7q6OqxevVqYEEy83XzzzQAAn88niRrOfY4FEeSJiQlEIhGDmq+maQKSNTY2ig/O+82Y7MyZM7KuCgsLsX37dphM8faPTU1N+Nd//ddF5+xH2WUVoAIwLGCVz6xm6KhQywc6JwRVfk+ePIm2trZL+OnqZFwsaOX/1QekiqiSrsEFnfg+Zs4I/2uaJoXTjY2Nho1tfn4e//iP/yiZS2ZeZ2ZmDLLW2dnZBgotJ426uXCS5ebmwul0IhqNYnh4WKiyuq5LXS2pAJqmSZZT7YVaUFCAwcFBFBYWCm3WbDZL4+jU1FSkp6fLPeGDiAqJnMwqvYK/k98fjUbR3d2NlJQUDA0Nob6+HmfOnIGmaaitrUVXVxcWFhaQnZ0NIL7pHjlyRLKaHo9HHkqk+W7dulXql3U9XldBysLmzZsN/bw4vxbLrvEhEQ6H0djYKBL2brcbtbW1KCkpEQeEc/Xll19OUnyTlrSkJe0yNDWAZK1mOBxGJBLB3Nwc5ubmYDabce7cORw4cAD9/f0ALk1o8zWidDT6FuprPCZZXpFIRHQnwuEw5ufnRTiJxsBSdRwByJ43OzsrjqTJZBKBoUgkgieffBIAsGvXLnH6gfjeHgqFDCrAQFyYh36FijADkDYuvA7Sk5lcZkBFtU81MGeCfHx8XGobeYz8/Hx4vV4JSAcGBmQvpv8UiUSkpIfHp3YEzykSiYh/88477+Do0aOG4ACI16E++OCD6O/vl2Q6g6DW1lY4HA5BZs1mM7KysnDmzBlpmcExrq2tRWNjI+x2O5YvXy4IIzsWaJqGW2+9FStXrhQfhyVE9AeZLFAT+ACELbdv3z7o+kVtDF4ny6LY/oTGAFUN5D7sR/WZA4GAJAc4xrx/nLtqGZsawKm+s+pzqvdJnQdWqxUNDQ3w+/0oKSlBW1sbpqam4HQ64Xa70dHRgf3798sc4fWpxyZif+2118Jut2PdunVwOp1IS0sTTRGHw4GJiQmMjo6iqakJLS0tcj2q/5n43WrCKhFRVeeLpmk4deqUlOOp152fn4/m5maYzWZBFxsaGoSWXVZWJt/lcrkEwVTPgcenL9nX12eg3pMJabPZkJubi+uvv17WeG5uLhobG7F371689tpr0HUda9eulcTVm2++Cbvdji1btkjgG4lE8NxzzwGI1/XyucXP/O53v4Ou63j00UcFKKRNTk4KePT8888bYjKOG2vSdT3OVikoKEB2djbsdruISX0au2wCVC58tQF1IrWAg0lKRWtrK9555x28/PLLePvtt+Hz+YS+Ojc3hyNHjsiA8YcPMnVRJlpiJgaAZDizsrKEcz0xMQGbzYaOjg5B1tLT0w0Pck7Kubk59Pb2SnYuHA6jqqoKmZmZcm7Z2dmSYWWQl5OTY1DWc7vdokKmPrQ0La6eRalooq4FBQVwOp2C8lmtVhEzoOy1ShkpLi7G3NwcqqqqDJlC0o9KS0sN1GVeJxcgAKnR4N/UseC/rEXJzs5GZWUlxsbGYDKZUFZWJiIEOTk54gwcPnwYvb29InLgcDhQX18vSskNDQ1y/bR33nkHVqtVGm1znHjv+dBZu3atbIJXX3016urqREiKaDAD/IyMDKSnp0v/Kz5EF6sfSVrSkpa0pP11TUV7bDabJJFTUlJEmITvsVgsGBkZES2ExXwEVcWext9VdEv9e2pqKrKysoQhxvKRgoIC8W0okKLu7SZTXJNjYWFBfCOyxEhX/sIXvgCXy4VgMIj29nYDu0qllDJAtdls4leoLCzu90xcq467zWZDXl4eLBaLtNNQES/ukwUFBTCZTEIlVo3IEIUj+/r6hGmWkZEhta0UaaS/EAqFRC+DVEKz2Yynn34au3fvlutTgxKeZ25uLrq7u6W3Jcdxfn7e4N9EIhHs27dP/A76Kaxl1bS46E9mZqb4QgRH6uvrMT8/j7q6OmH+cdydTqdhPqhjBsSR3gMHDsBqteL48eMGqjUTHGpiIdESx1gNwlQLBALw+XzIyMiQGlQAImilBqhcCwxsE79TnZuJYA8/F4lE8MILLyAnJwfd3d2orKzE6tWrAQDHjx/HypUr8cgjj2DlypUGSjLbHMZiMYyOjooPaLPZkJqaig0bNuD2229HXV0dVq1ahbvvvhsPPvggnnjiCaxcuRK5ubk4d+4czp07J36eqt2ini+vOTFA5ZxgosjhcMiY/fKXv5Q1QyZDfX09ent7YTKZsHbtWhw9elRotaRpZ2VlGZIwiQg0a6rPnTuHFStWwGKxiGgr2xuxL251dTUcDgcef/xxOBwOXHXVVVIywBjJ4XDAbrdj7969CAaDBsScddQ333yz4TwYGJOFEQgEhKYMQBJ5ZrMZ2dnZKC0tlaQM/Wp2KXnrrbcAAL/4xS8ETOT6/zR2WQWo4XAYfr9f2qUQTlcDVQaKH3zwAfr6+hAKhaTPZjAYxPz8vGQAbTYb+vr6cPjwYaG2ABcb0KobyGK2WJYIgAgElJaWiqT4G2+8gd27d8sGyBvHvlyBQAB5eXkIh8NIS0uDz+fD2NgY3G63ZDsLCwsxMTGBWCwGn88Hq9WK4uJig1IeKbu6rl9Cq83Ly8PQ0JAETpoWLwIvKSkx9D11uVwGMQReG4PkYDAIt9uNUCgkkvFUFbvmmmuwdetW3HPPPQYUkYEo749K5+XfSAVgRnZwcBDXXXcdurq6sGLFCulvW1RUJKp8zNz6fD68+uqr0DQN69atk8B/7dq1+P3vfw+fz4dvfvObQk+KxeJ90CYmJgw1PSpFi9LcW7ZskQeZSqPSdR3Z2dlCjeL9LywslEzv8ePHYTKZ8Itf/OJ/twCSlrSkJS1p/8dNrc1igpuBCgObsrIyXH/99Vi7di02btyIgoICafG2WMKapu599CsSnV31fVlZWYIysqznzTffxODgoPTTpNGxTk1Nhc/nw8qVK6FpGnJzc1FUVASTyYR/+qd/wiOPPILt27fDZrPB4XDgqaeeErEWBuW8FgIBLFvi+cViMWF+TU5OSgDGIN3pdEo50sTEhCBcLpfL4Efl5uYKbVYdC44jWWLARQRR/dzk5KQEjCqyxCARuBhQeDyeS8RewuGwBFUejwfLli3D7Ows6uvrcf78eaFwHj9+HJFIRO4FEGelMShjAj4Wi6G0tBStra0IhUL4+te/Lgl0XdelVCgajeKzn/2sIaFB34VjkCgcNTw8jP7+fphM8frQ9957T9BXgglA3Dfu6+tbdO6pKrtqkkH1d5h0YPlYOBxGQUGBYSwTe+8u5hvze1UdF85T/queT3p6OtxuNyYmJrB7927Mzs4iMzMT4XAYRUVFwohT1aSrqqrQ1dWFsbExzMzMGBScAUgLwcrKSszMzKC/vx+tra3QNA0PPPAAHn/8cTz00EMoKSnBxMQEPB6PrHH6fvxXvS7+m+jLkqLKQC0Wi+Hf//3f8cILLwjt9aabbhKRMpfLhVOnTsFqtYoQGMeYfjvpujwXsznevknT4hoohw4dkvGMRCLo6enBSy+9JGvi1ltvRVdXF5YsWQKPxwOv14v169ejv78fTqcT69atk6TG/v37oWka7rvvPkNSoqmpSSjLvK5YLIZgMIjXXnsNAISiTFCNQM2JEyeg6zoefPBB3Hbbbdi2bRu2bdsGm82GrKwseL1e6a7CNcAEw6e1yyZAVReuStdQ0T1OLHWTYWaQr/MnEAjA4/FItu/8+fM4ceKEbCKL0RRUUyeqeo6JmxVppWomqqWlRT7Lh7vZbMavfvUrvP322yL0EwqFRBkOiKOjzOzxHNlUm+fK2lBd1yWryGuxWCwYHx83oKyaFhczoBw9x5AiQ4kPfR6DGWT2KNW0ONWHwTILpBMpKVxwaq8w/o21Gey5RkS5q6sL69evR0dHB0wmE2pqatDc3Cwb+uzsLEZGRkSBUBWxyMvLw+joqCzubdu2GebRrl27YLFYsGHDBkNbHSD+ACgvL0c4HBb1Qz6geQ3qAzsQCCAajSI1NVXGxGyO95FSBaCSlrSkJS1pl4cRsaOpTir3wvLycmRmZsLlcsHhcCAjIwNOpxNTU1M4ffq0ge6o7pl0ntV9Sf1JpEbye4hwUEiwvb0djY2NQkHmXs2gjugdkY+srCw4HA4cO3YMmqbh+uuvh67HNTO+9KUv4fjx45Iop9AikT++j6KSPBZVaefm5sRx5bXm5OQgOzsbuq5jbGxMvicvL08CL/og3BfZ510d95ycHAwMDEhCmH5TZWWlOPPt7e2Coql1sWoynnVyKiOOviAd6vPnzyMrKwsVFRUoKCgQxLi8vBwnTpyQ0h0KJlZVVaG9vR2RSATnz5+XIHTp0qV47733YLPZsHTpUqn3pX/D72X3Axr9DAapFJlUnX2v1wtd1/H6669jYGAAuq5jxYoVWLlypaCHuq4bujZwHBIDq8V+VFVh+ptMLqgMOiJ+PDd+byI4pL6mzh31PGhVVVXw+/3o7e2Fx+NBdnY2fD4f3G43PB6PJPc5r0ymeEuTuro6NDc3o6GhAcPDw6JgzGvhuDc3N2N4eBjp6ekIBAISrBYWFuKRRx7B97//faxevRojIyOyXj/sXNX1rL5PXd9paWmCzE9PT+PZZ5+Fy+UyoMxkNvz85z/HL3/5S6xYsULWitp2h8ko9XzYYnJiYgKpqalS580evKSMj42N4Q9/+AP8fj++973v4YEHHoDNZpMglqVxwWBQBKuuvPJKObbH48Ebb7wBAPjOd75jQHunpqbQ0dEBTdNw4403ore3F2NjYygsLMTtt9+O6upq7Ny5E7FYDFdffTWuu+46ZGZmwmKx4G//9m9FRweA9Ct+6qmnBP39tHbZBKiEnhl88uGp8r4Xq/VQTaV5hMNhLCwswOfzCXrqcDjQ2NgoctFqZkadsJyQKrqYaJxYFosFdrvdQMMZGhqSxUyqi8lkwo4dO/DTn/4U3d3d0HUdx44dQ2ZmJpqampCRkQGXyyVS3Nzw+PBU1bJIE1EfWrS5uTkUFBTAYrFIpjMnJ8dAQWXgB0ACYma/qNjb39+PvLw8DA4OCt04Ozsbw8PDMh6qeq+maUK9oak0I6KTzMDpuo7x8XFEo1FkZWWhuLgYGRkZsNlsKCgowJEjRyS4jkajklGjs0DKC+85g97E3mRdXV2wWq1YvXo16uvr5fjM0lKpbPPmzQaBAn43g1Vd12UcTCYTysvL5UF5/vz5j0x2JC1pSUta0v46pu5J3D+Bi/uSuo+Fw2GcPXsWO3bswOHDh9Hd3Y1gMIgjR45gYGDAgCyp5SvA4k4vcFFYiY4vgwAKDFosFtmfFxYWYLPZMDc3J3uN3W5HIBCQ5DT7L2qahvPnz2NgYEBarfj9fmzevBm33nqr6CVkZmZienpamGZkndFX4n7JgIi+l0rdJOIWjUYleQ1AAj9ev1obSdRTDaiKi4tFEJF0apPJhIyMDBGTys7OFh+H+3VhYaEB2UoMjngf6C9Go1FJXFdXV2NsbAwZGRkYHx9HbW2tnLPFYpGWdjU1Ndi3bx/MZjM2bNgg/ozNZsPk5KQkqYuLi+WYFosFe/bskRrPa6+9VoJrBiYul0vUm1XtC5Y3hUIhTE9Pyxykv1pYWCjtgRLpkepcUn8S/8b/U5CKirHsTckxTFQZ5ufU31VqO++xet9p9KvXrVuH2dlZ1NXVob6+Hr/97W+xe/duuFwupKWlob6+HqdPn8bMzAyi0Sjm5ubQ3d2NU6dOweFwYHx8HGfPnkVxcbEB/ADiNHgyAxjYRiIRLF++XJDMhYUF3HzzzfjBD36Ae+65Bz09PbL21OBQDRITE1kqm44JENYyh0IhBINB/PrXv4bZbMapU6fwwQcfwG6349FHH8WPf/xjXHXVVTInqZkDwMAw5L3i/M7PzxeqPFVxeXyHw4Gf/exn0ru0rq4OZ86cwalTp4Tmu3z5cgFXfD4fXn/9dQSDQYTDYYyNjaGzs1OYBFlZWdi0aRN8Ph/Gx8cxOjoqYqHhcBi5ubmIRqO45ppr4HQ6UVpaCqvVKsAa1zXr7MvLy0Uobf/+/XLuX/7yl4WB8WnssglQgYty8KpSKoveeRMpEqAuQNKDEzcAdbH5/X5ZmKOjo3j//fdFGY/fwd8TH35qVkk1HouTjoq/vNl+vx8HDx6UG6rruggWAPGJ+MMf/hAOhwOjo6NCpSVCR2UyXddlA4tGoyJ4wIcOjSgle3exVQr7sJLWy43CbDZjcnLSENSZTCaUlJRgbGwMFRUV8Hq9srDLyspkk9Y0TepnOBY8X/LuASMSTboQM67kxl9xxRU4f/481qxZIxsz5ez5YLdarejp6cGhQ4fEyYhEInjxxRfR39+P6elpWCwWNDQ0GGTprVYrurq6YDab8ZnPfOaS5AbFndTaYRqzstz81TnCYn2LxSLIcNKSlrSkJe3yMianVeSH+1A4HIbX68Wf/vQnvPrqq3jjjTfQ3t4Ov9+PmZkZ2UM0TcPw8DA++OADofYx0fmXGgMY1oQy2QoA+/fvx759+8TfYfI1Go3C7/ejpqZGrsHv9yMvL09QGovFgsrKSmktQ8SK+5gqwsJEr6Zpcg4MVtSEfU5OjjC21B7tOTk5oh+h7pcmkwnDw8PinPMnLy8PgUBAkvAdHR0A4omDjIwM3HvvvaipqREGE+mfKSkpBn0SssNUn09lVlEcKhKJIC0tDZ2dnSgvL8fZs2dFL6OtrQ3RaBQZGRnyeZ6zKl5JQaXW1lZEIhHcd9990v9V13WcPXtWkOyNGzfKPaCQpdlslg4T7A9pMsX1QRwOhyBYpPcSVTaZTHC73eLfqvNZ9ZVZ08r5rb5G/5RMQt5vjpM6FxZjDQIX+5ryXvKecJ6oFF1dv1hLbbPZpMtGS0sLpqensWbNGlRUVCAlJQXt7e0oKSmROuGBgQGMjIxgYGAANpsNra2tyMvLQ2pqqgA1PEZaWhrS09Nx1VVXoaGhAeXl5Vi2bJnMARVtB+K+5+zsrAigqf48v5efVecr56a6FjhWROqDwSCi0Sj27NmDP//5z6isrBQwg9Rgfr9aE6sCT7quC5iVlpaG6667Tj4TiUTg9XrR3NwMi8WCmpoapKeny7wYGRmR+CAcDiMrKwsDAwNob29Hc3Mz2traAAC33HKLrOHJyUmcPHkSsVgM1113nXQNYVyxc+dO6Hq85QzHgCWYmqbhjTfegKbFRaEmJibQ1taGM2fOIBAIwG63o6+vT5gOZrMZq1at+ouERC+bAJXF8MxKMFBRKbkMfpgNYE0jkVdVtYuZPIomARBk1WKxwO12IxwOCxzNTJ16Pqqp2SRaIrLLjF9OTo4ICnBjWbJkiai0zc3NIRaL4ec//7kEkJWVlaJwm5GRIVQCPiA5sXRdF8pwovw4Fy8DUraaASBCTXyIFRUVSZYxMWhj/WlZWZn0VmNQywcdANTW1grKCEACZ7UulUpovHekZmtanOI8PDyM3Nxc7N+/H263G6dPn5ax7+zsRCQSgdvtlnpUBtQVFRU4fPgwTp06BY/Hg8bGRhnvpUuXGq7n7bffRiwWl79OzLiybiccDovaGXDxoRQOhyVwJSLOzSEtLQ2aFqc+88GXtKQlLWlJu7yMiW7AyEKyWq3yL2m3VK8nQsOAlK+dPXtWVOeBS9tWfBJT38e90ul0isYEA8+3334bR48eBQBx/IPBIDIyMqTGKzc3Fw6HQ1pVTE1NIRQKyb7FljJEJr1erwRE9LM0TTOoyLI+lE4+hRZVcID1rURUOQ6FhYUAICrB6nWSxlhSUoJIJIKJiQkRfWKA3NDQgHvvvdeg8KqOrapQq6JQDJaIJMViMaF32u12rF27FgMDA6LUe/DgQflcNBrF8ePHEQ6H0d/fj3A4DLfbLXPjyiuvxL59+2C1WrFq1SpBIJmMb2xshNVqFSojx5dBj8ViwQ033IDy8nJBGBn08Xuoskp6tKZp4mOo80VlAahjqwZkfI3/9/l8UstMgUc1uFSTAeocXQyZVYM+JljU12kMlG02G7q7u/E3f/M36OnpQU5ODlpbWwXBGxkZQVlZGc6dOweLxYLi4mIMDg6ioaEBK1aswOHDh8VnDoVCmJiYQCgUwurVq1FeXo7s7Gw899xzgniztE8FXYhIejwenDx5Ej09PYbgkPOU46oCYirjQb1GAmhMMkQiERQXF2NgYEBqP202m9SXqvOFaDpwsTTO5/MJSv/aa68J4rpkyRK0tbVh9+7dCIfD2LdvH3bt2iV9S4niHzp0CC+//DL8fr8kOXivU1NTcccdd0is1NLSgl27dkHX48Kf7NkcDodFcdlsNqOyslJUmMnYHB0dFWVpXdfx1ltvobGxEUNDQ/D5fGhtbZV47Ne//rUkztasWYNPa58oQNU0rVfTtDOapjVrmnb8wmvZmqa9o2na+Qv/Zl14XdM07ReapnVqmnZa07RrPuExJLvAHwYzXADqe9VFyywnC5VVmglw8YG2Zs0afOELX8DnP/95rF+/HkuWLDEoxXKjYGZS5Warpi7aRLU1PpBsNpuhNU04HEZ9fT2uvPJKycyGQiGcPHkSJ0+exK5du+SGU0VuampKKCJTU1Ny7OzsbGjaxd5EKlpstVoRCAREqY7XlJGRIZRfTYs3v+YGweCXlpaWJoG11WrF5OQk7Ha7bGCkVVAsSF18HKvErK9K81UR6vPnzwOI93Xt7OxEe3u7qCUfOHBAKDp8+Hi9XrS1tSEnJ0cUwzRNw5kzZ2RhcKMD4kEo+72Gw2FRkuN5hsNhyeKuWrVKMorqPWcG3mw2Iz8/X5DgzMxMcXzUZEDSkpa0pCXt8jA6S2rJjkqrI2rIn0+iaRGNxpXlmTDl936YJaIytMQAAIjrWqSnpwvVdmxsDEuXLhVtC6JgqrZFX1+fQduC7SmAi9oW6enp0DQN09PTiMXiyvsU/zObzZ9a34KImqpvQZSUyFIiygnE/Qt2CVBVQNkCRdPiGhdUCVVZbjSr1Spgw8fpXND30nUdDQ0NonPR398vAXNWVhZ6enowMjKCxsZGABA/DLiodUFBm23bthnomuxKEAqFsGHDBvF31OBm/fr1H6t3wZaJKuVbDf4Xm0+JP4sx/th6aHp62jBeaisgNWCj0WdTj/thczrRR+e/bW1tePjhh0VFuLOzU9oV2u12aaG0bt06pKenY/v27Vi2bBmAuCCXz+fDgQMH8G//9m/o6emB1+vF888/Lyjrn/70J9x1113C4KusrJS1QV/T7/ejrq4OS5cuxfXXXy8ovQrOqOVn6rxV1+qHAVepqakCogQCAfT09OCnP/0pQqEQcnJyAMR9yqGhIcOaYbsYsicoutXY2Ci+KBmlMzMzMj9Mpni7JJvNhm9/+9uylvr6+qDrOr73ve9J/NTa2oo9e/aIhgqDburQxGIxLFu2TJiNXNcsp/vRj36EwcFBnDp1CocPH4bT6YTJZMKhQ4egaRqeeOIJ1NTUSL9hltJRM4bjtmHDBnxa+zQI6s26rl+l6/qqC///fwDs0XW9BsCeC/8HgNsA1Fz4eQTArz/Jl3NzYIRPyBu4mDmkqQ9NSiKrmQ01u6TSHoqKipCSkiIiBGazWRRnOzo6hMrKh4aaLUzkrH/YZFUnNLO0zFAwCKb6r8/nw+TkpNQ1DA4OwuFwCEWG3Pzs7GyhvGoX6DbMSqhmMpngdDqlnkENugsKCjA2NmYYQyCeWWMdCMeVBeGDg4OX1NmwdykAKcDmsc1ms9SdMDvHBcCfRJSXgXdtbS327dsnVJaGhgb09/cLwkk15IGBARw4cADhcBh2u10CRpPJJG1lKPbAc1YX4i233CLXSmGjnTt3SkKkoqLCQE/mv06nE7quY/Xq1bjpppukmXNWVpbMxaQlLWlJS9rlZaTg2e12Q1u1/42mBbUiuru70djYKAiK6rCrv6v7H//2YUZ6psvlEiElOrcmk0lKdeh879ixA9/85jcN2hY+nw9zc3MGbQuXyyV0QV3X4Xa7pR0d8On1LegfJbZYy87OFmeXSfJEjYvs7Gzk5eXBZrPJe/Lz8w0aF9XV1eLbMWhQgyFSaYGP1rkYHBxESUkJjh07huuuu050LmKxmAg+5efnw+PxYGxsDL29vbDb7UhJSblE64JJcIvFgtLSUrlmm82GgwcPAgBWr15t8Kfo25Jh91F6F6Rlc97qui56FzSVIpr4w3FQf2ddsdlsxsTEhCg5M0jheNLvVb9PLcmiqUgtz1VFVdW5FIvFhbH6+vrQ29uLvr4+FBQUoLa2FhkZGVILXFpaivLycnR0dKCoqAhpaWnYvXs35ubmBNXTdR2HDh3C8PAwvvWtb8Hn8+GVV17B4cOHcebMGXi9XkP7IM5lTdPw6quvIj8/H7m5uYIqss2gCv6oPj6BLzVoTUwWqL/rui7f6/F4EAqF8LOf/QwVFRXyd6KpHCfWmFZUVEiADcSTB21tbbDb7bj99tsRi8Vw7tw5jIyM4KGHHjJQcUn7J41Z1+P9UHmPR0dHJeny1FNPweVyITMzE4ODg3jllVcAAE8++SSWLFmC6upqVFdXY25uDm+++aYk6kizTk9PR2pqKsxmM/bv3w8gXof+5JNPYmBgQGKVUCiEqakpRCIRPPPMMwa0/tPY/4bi+zkAv73w+28BfF55/b/1uB0GkKlpWuHHnsiFDAAXMR82vCgiVeqE4GRRs1R8jQuTwYOmaWhtbYXf70dtbS2uvvpqrFy5EuvWrRO1YPK8Sd9JzJ4sRnlQ/6aael6k7zCD6ff7MTc3h6ysLNTV1cFms+G1117DyMgICgoKBN1ks+vq6mqMjIzIdaiCAURReRtt3FoAABiCSURBVMzKykp0d3dLMTPHYMmSJZicnJT/E4mljLU67gBQU1OD1tZWaffS1dWFaDSKK664Aq2trRLQVVVVyT2JRqNSC8oJSTRZFQ0gUs4sa3d3N1asWCFZ2YMHD6KyshKxWAxnzpzB/Py8fMbr9WJwcBCapuHxxx+H1+tFRUUFenp6xHGoqqqSTCmzmidOnBC6MTO7rIOhSrCu6/j6178uf6epD70bb7xR6gPUjPFfwq9PWtKSlrSk/d817j10mPk7KXzci2iqSKP6nFcT35FIxCAweOTIEXR0dMh38j2q/kKiAM1iQap6TPowREapbQFAKIDUtmCyHohrW3i9Xhw8eNCgbUH0xePxwGq1Ijc3V2i43Jc/rb6FSgnmddrtdnHu2bKGRo0Lv9+PiooKWCwWAQZSUlIMGhcUi1E/z6Qy/Q/g43Uu+vr6YLfbpR8n6bPp6ek4cOCAaF0woR4IBDA1NSU0Z1XrYu/evaJ1wcCDvs6hQ4dEiIrnwL8DEPXTj9K74D1ksMHkOFsRqnNksTmzmJ/KJIDNZhMxIc4/+tXqeNKXIQtRnau8N/THAAj4wuvg2qI/XllZienpaVx77bUIBAIoLy9HKBTCyy+/jMbGRixfvhxmsxnf+MY3MDIyArfbjYKCApjNZgwNDaG2tha5ubkwmUx4+OGHsW7dOrzxxhv47//+b6SlpeGBBx7A4cOHYTab0dLSIokb9YeIu8lkQl1dHTZt2oQHH3xQ1Ky7u7sxMjJyCQKtftdiSLE6/1TAKisrSwLVP/3pTwDiQFt+fr787vf7pX+xruuora3FwMCA1HHPz88jGAxi3bp1Eg+9+uqr+MpXvgKTyYSOjg4cO3YMuq7j+uuvR3p6OoaHh/HKK69IW8vCwkKUl5dLKcPy5cuRl5eHrKwsYQ+q9G4CfrxuxgtPPvkkHn/8cdx9990GUJDMQVWhmrTnqqoqpKSkYH5+HidOnMBfYp80QNUB7NY07YSmaY9ceC1f1/WRC7+PAsi/8HsxgAHls4MXXjOYpmmPaJp2XLtAGb7wmtx8TopgMGhobKsuFFpiEfNilAhm0yoqKoTCCsQfvDfddBMeffRRlJWVCS335MmT6OrquiQgTZycicf4sMwoXyfqxl6kXOh5eXloaWmRvlzcEEymuAy31+uVa2b/TgCGGhI+1GdmZmSBs5es0+kUGgk36uLiYphMJqmrVK28vByzs7OCdHZ1dQGIZ7pY/K/rOpYsWWLICjNLy8SAujGqFBJSV6LRKLq7u2GxWKTovKOjA5FIBPn5+VJbWlNTI8IIzN6yyXp1dTVWrFiBYDCIxsZGqdtVHzb79+/H0NAQdF03FK5z3N566y25R3yI8Jr44GENz8LCAjZv3iybh8lkMvRTS1rSkpa0pF0eNjc3J+gE1VBV6hlpoeqPiqyqCW9V04L+id1uR2pqKpxOpzCd+HfuHSpVlbaYL6EirurfKaSUl5cnQiWqtsXAwIBB2+K1115Dc3OzQduC30dHnQGqmuj/NPoWDBapb6EysNjShkq6qrndbgwODqKsrAwLCwuicUEkSB1f6lzw/FQWFo/1cToXk5OTojAaiURE56Kurg7nzp0TrQt2ZBgYGBA0tCJB64KU7mg0avATgLiTTt2QRF8wFouhra3tY/UueA845gRt1ABVrcFVUdLFftQkPVuUqEJJiYg/x+/DfF5+hnNa0zSDCrZqNpsN/f39OHr0KEZGRtDX14fp6Wlpm3TnnXfiqquuwpEjR6DrOh599FHcd999yMrKwtDQEB544AEEAgG88cYbaGlpwaOPPoqdO3ciNTUVs7Oz2Lp1K1JSUvD222/j8ccflzI+Jm3UNXX//ffjoYcewv3334+1a9eisrISFosFDz30EH7yk5/ghz/8Iex2u9Smqr4h56Y6FpxviyUL1N/dbjeWLl0qyD6fPxxr3m8CTW63GzabDUVFRbDb7RgfH4fdbsc///M/iyBZIBCA2+1GSUmJ1Fs//vjjcLvdcDqd0qXksccek4TJxMQEXn/9dUkkWSwW3HbbbTCbzXj22WcBAF/96lcFqGLJ5DPPPCNJGgAoKSmReWexWPDb3/5W5ld6ejqcTicikQiuvPJK5ObmwuVyIRqNYu/evfhL7JMGqOt0Xb8GcfrutzRNW6/+UY/flU+mDnDxM/+p6/oq/SJl+JIHtgrVJyKaKqWB/1czHerf+TuV/NTv5QJPTU3FV77yFXz729/GsmXLpMD95MmTwttfbCImZlkTsyz8N3FzIqJKJUFmHoeHhxEKheRBwUCMG2diJpb0BB6b9ZcUBOJDhKimWi9TWloKTYvXezD4o7lcLuh6XEwpFAqJ2ACV5hg8FxUVyeQ1mUzIzMwUChHvmUrR4Vj4/X4JIknrqa6uBnAxI9fQ0ICxsTFYLBZ897vflYzr2NgYRkdHEQgEcN999+GWW27B0aNHkZWVhebmZkSjUdx8882orKyUY87Pz+Pdd98Vio1qCwsLGB4elszn5z73OcPfWZwfjUaFzrRq1SosLCwI9TcxwE9a0pKWtKT99Y1JWO7FKtNK3afpdHHPdTgc8n8VTaIzvn79enz1q1/F1q1bcccdd2DVqlWoqqqSxvQMGBPbVJA2rDr7DK6Ai7oWag0s/2632yWJHggE4PP5UF9fjyuuuEJoqJOTk3jnnXfg8/ng9/uFCjs7OyvOJPdq1onRf1CT1qqPZDKZkJqaiunpaSwsLMDj8UiP9JKSEvT29sq1qMyqgYEBCTjpA6SlpaGnp0eSBhMTExJkmkzxliFE4RjEcmwoyGQ2mwUZJurLe8uxY2Lc7/djaGgI1157LX7zm9/gzJkzsNvtqK+vx8LCArZv3w6r1YobbrgBwWAQ4+PjOHHiBMxmMzIzM9HS0iLzwOv1Yvv27dB1HRUVFQZ6uNlsxm9+8xtYLBbcf//98rrP50MgEIDVasWOHTsAxP0HfjYx6EtNTZXfH3/8ccRiMUMCXPVvF/tJfI/P54PZbJaWQCq9WGUPqOJNXDfqfeO8Z7ClJmISkVu+LxwOY3Z2Fi6XS9iBfr8fmZmZcDgcePHFF6WHb319Pfx+P2w2G4aGhjA+Po477rhD+nmOjo5iy5YteOWVVzA6OoqhoSHcc889uOOOO6RdEFs+qr4s/bpE3xmA+MUZGRl48skn8R//8R/YtGkTJicnxQ9VE1YqsMXr/rBnCt8XCoXg9Xrl80QzOZ4+nw8OhwOzs7MG4aScnByEQiH09fVh48aNSEtLE6BqzZo1sNlsmJmZwbFjxyQRobIVly1bJsmDUCiE1tZWAHEAhqDN9PQ05ufnYbVacc011xiQ761btwq7UPXl8/LyZB1zXgDAP/zDP8Bms6HiArPAZDKJoBUQFz39tPaJvGpd14cu/DsO4I8AVgMY0y5Qdy/8O37h7UMASpWPl1x47SNNzSapQaX6+4d9LvEziYGiGiB9WLZDRds2b96Mbdu24cYbb0RWVhampqYwNjZ2CRUoMShVX0v83kR6Dy0zMxM2mw3hcFg2mLfeesuwsXHSqW1iOMlJ26VRUYxB7dzcnNADiFDyXFg/CcRFini+6kZdW1sr2T2ee2pqqlBxiNACF1VxWddhNpsNGRt1TKgwxmB3fn5eCtetVitGRkYkgO7v70d5eTmuu+46WK1WBINBHDhwAACwYcMGeL1efOlLXxLpbYvFgszMTEPfK5PJhM7OTlgsFlx99dUG2XZuck1NTQDitS+s0+Bnw+EwUlJSsHPnThmP2tpaGY8Pm59JS1rSkpa0v66pDB4V/VCDMP6Ne5X6d5rqS7AforpX0+EdGBjA0NCQqLt/WBKTgVViIvvDSkZ4DCC+f6tIL3sRmkxxgZKhoSF0dXUJ44iCk6pPkshsSk1NNVyP6qs4HA4RSKRjDcQZXawj5fszMjIAGHvNq35CKBTCzMyMOOkqxZDaI7wfallSYh2bKsTI76aDznGKxWKYnp6GyWSShH4sFpPvpdNfUlIiIADFIKPRKFwuF9xuN4B4gr6np0cCZCYOiIZR5Km0tPQSereu6+ju7hY6LVv9JV4PAEGyWO+nvu/jAtREv9nr9cJms8HtdhsQXp7fYv7rR/nc6ns5lqqp82vXrl04fPgw7rnnHhQUFMDr9YoQUF1dHdasWYOzZ8+iu7sbLpcLeXl5mJubQ1FREaqqqrB//36puUxLS8PevXtRXFws/q/b7Ybb7TawJhNRZPW1RMSZ94cJqFgshmuuuQY/+clPcO+99woST0scM95XFdlXjWV2qr9JhJfzLC0tDX6/X3oBc33Qr2xsbJSWSUx03HnnnfIsOnz4sEFMdm5uDidOnIDf78e6desMNGeHw4EvfvGL0DRNQBcgDuCoNbBkGsZiMbz55puGa6PAGRNE7733ngCAnNt+vx9jY2Po6ekRRHfXrl0fOZ8Ws48NUDVNS9M0zcnfAWwGcBbAdgBfu/C2rwH484XftwP4Wy1u1wGY1S9SgT/uWIYMUOJi4QPlwwJFdZPhA4obBicgJ6WawVCzIHy/y+XCypUr8dhjj+Guu+5CX18fDhw4IA8zHiNxgwGMGbHECZv4mq7HOdtsLUMxBqqCqeJIpAFEIhHZAFSKjooIU2Z7ZGREMkjZ2dkyIYnY8hqIkNLYMLu4uFjavLBOo6ioSII/ysPzWpgx4vgwQZAodMVFzfPp6OhAenq6TPCxsTGMjIzAarXiwIED0HUdW7ZskQ2yvb1dMtVutxs1NTXYsGEDamtrYbVaEQqFkJeXJ8dl9rmnpweRSASFhYWGB1okEsGePXuEznD11Vdfct9CoRDOnj0rc+SOO+4AAKExJC1pSUta0i4vYzBBaiMTvrTEIJXOWGKinP9PpPh6vV5EIhGUlZUhKysL586dEwRmdnYWzc3NBj9B9Q0SfYFEUcLFggU1MEhPTxcxn4WFBZjN8X6itbW1mJmZEb0Il8uFyclJCarJ2gIgrfY0TRNdCgAGpxqI0w9HRkaQmZkp9YEmkwl5eXlCCabflZ+fL4qhPJbqo0WjUQwNDQmVkR0LiouL0d/fbxgj9m8F4oEvES2iRkSIVZ0LBqAcJwaVbAPT3d2NcDiMoqIiQXFvuOEG2cfHx8dx5MgRRKNRPPDAA6iurkZRURHOnj0ryfxoNIr169cbxtFsNmNqakqEGQEIGr+wsGBg4915552GAB6AXJvL5UJFRQX8fj8CgcAl8/XjglP1fUNDQ0hNTcWSJUsM+hoMrBOVbNX5r87TxNf5PYlJAx43HA7ja1/7Gr7zne/A4XBg165d2LhxoyQ2pqenUVVVhaysLMzPz8Pj8aC6uhpbt27FypUrYTKZkJ+fD5vNhiuvvBIvv/wyrrzySvzP//yPMPyInKpI/6exxT7D+KKyshKf+9znDGJc6ufUhFci+EW2Bd+rtiocHh6W45K+zrnqdDphsVhQUlKCsbEx6LqOtrY2xGIxSZK0tbWJwrXJFBdTtVqtePjhh2EyxXsEv/vuuwCAu+66S+6Tx+PBO++8g9raWgOi7PF48Jvf/Aa6ruMrX/mK9DJNTU3FunXr4PV6YbVaZd7W1dVJUsliseDEiRMCnpWUlGBoaAitra04deqU9DyemJi45HnySeyTSI/mA/jjhYu0AHhB1/VdmqYdA/CKpmkPAegD8KUL738LwO0AOgH4AWz7pCfDjAJN3QgSJbBVuo26kFj3qAaxDND4u5oFTcyQqhlKIL6IKZus6zpOnz6NlJQUrFq1Sh6CiSjtYhsQF/ZiASuNfdY8Ho8gdjt27IDb7YbD4cDU1BTcbrc8tL1er0HiHYA0pqY4Qm9vL5YuXQqTyYTi4mLMzs4iMzPTkEW22WwYHh7GihUrDK+XlpYiHA5jyZIlaGpqkkxlZWUl3nvvPTnm0qVL8cEHH8h1qvUSPDeLxXKJ+BDpHOFwGN3d3Vi+fDny8/MNQXVqaio6OzsRCoVQUFCAvLw8zMzMSL+3tLQ0OBwO6SG1bNn/297dxUh11nEc//6gbDeswNIUNnVldSIvWfWiYtNCNGYTwrKWkOUlMTREUUzkghpMNsGXGxu9aYya6I2JRpKWaEsTbOyFkZeEIBerwhK2u9CFEoHIhu2mLCs2JArl78WcczLsm4yWOWfc3+dmzjxzZueZzW+eeZ45z3nOJ7l48SJtbW1s27aNgwcPMjo6mn0RHD58mF27dtHV1cWBAweA8rkjjY2N3L59O1vtrqurizNnzmQD/nThjPnz5zM4OMiqVauyZfnTPJqZWbGk/Ye0I50edUuPNqYqfwiv7FNUPpbenzdvHsePH2fRokX09PRkC500NzezZ88exsbGOHToEOPj40QEg4ODLFu2LDsFp/LH8cpBysSjVpWvX3lb2fmF8iVR0pV7m5ubWbhwIe3t7fT39zNnzpxsJlh6Lcz0nMTGxkbGx8ezle/ToyYRkV0WJH2tlpYWBgYGaGtr49atW9k1NZcsWZKd8pMORBcsWJC9rxs3brB06dJJ/Z+RkRHWrFnDyMgIly9fZvXq1ZRKJU6cOJF9/9+9e5fly5dni6w0NTUxOjqaXWs17b81NDRkqzWn/6PKo8A3b94kImhvb2dkZIT+/n5KpVJ2GlFvby/d3d3ZtODh4WH6+vro6Ohg5cqVnDx5ki1btmSXGxoaGqJUKt33mg0NDdy5c4djx46xefNm1q1bx5EjR+7rB86dO5fe3l7Wrl1La2vrpFl86aAxnXJ87949VqxYwdWrV7P9Nm3aVEX6799/x44dVT33g5T+oF+NDRs2TCrbvXv3B1GdB7Jx40b27dtXs9ebyfr16++7PzQ0dN/9zs5O9u7dO+l5AwMDk8rS6b5T6enpyba3bt065T7bt2+fsnzi6XH/K800YKoVSflXwszMzMzMzB6WvqhYf2g6RTns8x5wIe9KmFXpceDdvCthVgVn1uqRc2v1xpm1elSL3H70QXYqygD1woOMps2KRNJp59bqiTNr9ci5tXrjzFo9KlJufW0MMzMzMzMzKwQPUM3MzMzMzKwQijJA/UXeFTD7Lzi3Vm+cWatHzq3VG2fW6lFhcluIVXzNzMzMzMzMinIE1czMzMzMzGa53AeokrokXZB0SdK3866PWUrSFUkDks5KOp2UPSbpqKS3k9vFSbkk/SzJ8ZuSVudbe5stJO2XNCppsKKs6pxK2pns/7aknXm8F5sdpsnsC5KGk/b2rKRnKx77TpLZC5I2VJS7/2A1I2mZpOOSzks6J2lvUu721gpphswWvr3NdYqvpLnARWA9cA04BTwXEedzq5RZQtIV4KmIeLei7IfAWES8mHxAF0fEt5IP9zeAZ4FngJ9GxDN51NtmF0mfp3wt6Zcj4lNJWVU5lfQYcBp4CgigD/hMRNzM4S3Z/7lpMvsC8F5E/GjCvp8AXgGeBj4MHANWJg+7/2A1I+kJ4ImIOCNpAeV2cjPwFdzeWgHNkNkvUvD2Nu8jqE8DlyLirxHxL+BVoDvnOpnNpBt4Kdl+ifIHPS1/Ocr+BDQnDYPZQxURfwTGJhRXm9MNwNGIGEs6SUeBrodfe5uNpsnsdLqBVyPinxFxGbhEue/g/oPVVERcj4gzyfY/gLeAVtzeWkHNkNnpFKa9zXuA2gr8reL+NWb+x5nVUgBHJPVJ+npS1hIR15PtEaAl2XaWrUiqzanza0XwfDIVcn86TRJn1gpI0seATwN/xu2t1YEJmYWCt7d5D1DNiuxzEbEa+AKwJ5mWlony/Hgvg22F5pxanfg58HHgSeA68ON8q2M2NUkfAg4B34yIW5WPub21Ipois4Vvb/MeoA4DyyrufyQpM8tdRAwnt6PA65SnOLyTTt1NbkeT3Z1lK5Jqc+r8Wq4i4p2IeD8i7gG/pNzegjNrBSJpHuWO/q8j4rdJsdtbK6ypMlsP7W3eA9RTwApJJUkNwHbgjZzrZIakpuSEciQ1AZ3AIOV8pivu7QR+l2y/AXw5WbVvDfD3iik/ZrVWbU4PA52SFidTfTqTMrOamHDO/hbK7S2UM7td0qOSSsAK4C+4/2A1JknAr4C3IuInFQ+5vbVCmi6z9dDePvIw//h/EhF3JT1P+YM5F9gfEefyrJNZogV4vfzZ5hHgNxHxB0mngNckfQ24SnklNIDfU16p7xJwG/hq7atss5GkV4AO4HFJ14DvAS9SRU4jYkzSDyh/CQF8PyIedBEbs6pMk9kOSU9Snh55BdgNEBHnJL0GnAfuAnsi4v3k77j/YLX0WeBLwICks0nZd3F7a8U1XWafK3p7m+tlZszMzMzMzMxSeU/xNTMzMzMzMwM8QDUzMzMzM7OC8ADVzMzMzMzMCsEDVDMzMzMzMysED1DNzMzMzMysEDxANTMzMzMzs0LwANXMzMzMzMwKwQNUMzMzMzMzK4R/A1y4iMESLKgWAAAAAElFTkSuQmCC\n", - "text/plain": [ - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "transformed_images = [None]*5\n", - "to_tensor = transforms.ToTensor()\n", - "for i in range(5):\n", - " t = transforms.RandomAffine(degrees=0, scale=(0.5, 1.5), fillcolor=255)\n", - " transformed_images[i] = to_tensor(t(pil_img))\n", - "plt.figure(figsize=(16, 16))\n", - "show(tutils.make_grid(transformed_images))" - ] - }, - { - "cell_type": "code", - "execution_count": 30, - "metadata": {}, - "outputs": [ - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAA6gAAADWCAYAAADcga8EAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMS4yLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvNQv5yAAAIABJREFUeJzsvVuMHNd1NvpV9b17eqanZzjkDDmc4Z0UKZsWJVu3SLItx8GfwEGQAPnPCZCnJE8nDwGCXB7yEsBwHCcIzgHykDwEwUEeDg6CIECAY1uWBEqhbdGyKFKURVK8c3iZ4UzPpW/T07c6D+1vzardVdXVQ05E5+8FDOZWXbVr77W//a3LXttyHAcDGchABjKQgQxkIAMZyEAGMpCBfNZif9YNGMhABjKQgQxkIAMZyEAGMpCBDAQYGKgDGchABjKQgQxkIAMZyEAGMpAnRAYG6kAGMpCBDGQgAxnIQAYykIEM5ImQgYE6kIEMZCADGchABjKQgQxkIAN5ImRgoA5kIAMZyEAGMpCBDGQgAxnIQJ4IGRioAxnIQAYykIEMZCADGchABjKQJ0K2xUC1LOtXLMu6YlnWNcuy/mw7njGQgQxkIAMZyEAGMpCBDGQgA/nvJdbjPgfVsqwIgE8BfA3AXQDvA/jfHMf55LE+aCADGchABjKQgQxkIAMZyEAG8t9KtiOC+kUA1xzHueE4Th3A/wPg17fhOQMZyEAGMpCBDGQgAxnIQAYykP9Gsh0G6m4Ac+r3uz//20AGMpCBDGQgAxnIQAYykIEMZCC+Ev2sHmxZ1h8A+IOf/3oql8v5Xfe4nwvHceS++ufHce/HJea9HMdBu92Wn/l/y7LQbDYRiUSwvr4Ox3HQarXk2nQ6DQBoNBpoNpuwbVvuEYlE5D6NRgPRaEcd2u02bNuWvmq1WohGo7BtG81mE5ZlwbZttNtttFotxGIxuSf/7jgOHMdBMpl0vUej0ZDr2Ubzncz/6f97jZfjOBgeHu6rfx9V9uzZ0/W3Dz74AKdOnZLfr1+/3tc9/1fVdfN+pq7z91gs5qnrQEdfIpEIEomEp65bloVIJAKgW9cBIBKJIBKJYGNjo0vXo9EoWq0Wms2mr663Wi2ZaxSt63wHLx3mPfg/9offeKVSKWn/dsnCwoJLlwcykF9E+eCDD+DHLbT8ImEv778d9yJOEYeedOwFgGaz+d8Ke7VonmHyi4EM5BdRPvjgAwBYchxnR69rt2Om3QMwrX7f8/O/ucRxnH8E8I8AYFmW85WvfMUFEgQ0P9EAQjAMEm3QEeD8rmMbgu4biUQEoGnM9Xp+0HPNawnA7IdWq4WVlRUB80ajgUajIb+3Wi0Ui0XMzc3h/v37At6ZTAaRSATNZhOVSgXxeFzAOplMIh6Po9VqoVQqIRqNwrIstNttJBIJjI6OotVqYX5+XgxNton/KxQKiMfj8n6ZTAbRaBSO42B5eRnxeBxPP/20C/wbjQZqtRpGRkZQr9dRr9flf61Wy9VHxWJR2szFJEg3vvzlL8uzeM/HTT4A4K//+q+7/mZZFn7605/K77/1W7/VdY3XIthLJ8LqJOB2LjxuXQ/T1jBt1M/n9Wy34zioVCpimFJfGo0G1tfXMTk5iY2NDbz//vuIxWK4d++e6Prw8DBs20alUkEikXA5WajrGxsbqFariMVi0s5oNCr6vLi46HKgTE9PY3193aXr7XYb0WgUO3bsQL1ex/LyMlqtVpeuA8DVq1dx5MgRWJbl0nUtfE+t6ySDJm5RZmdnsW/fvm3T9e985zs4cOCAtM8U9q05X02JRqOudwlT8yASiYS6Lgzuhr0XMQvoYG2r1UK1WoVt24Kztm1jfX0dlmUhFovh0qVLaDQaWFtbk2u/8IUvoNVq4eOPP4ZlWUin06LDw8PDiMfjWF5eRrvdRjabheM42NjYQCqVEiNgbW0NY2NjACCYPTQ0hFKphFKphImJCcHpeDyOWq0mhsHhw4dd73v+/HmcPHkS0WgUlUpF5opt27J+AUCtVhNsdxwH9Xpd1gNTWq0WuF5r4fjqNetx6aQX3oYRy7Lw1a9+FcB/Dfby3Z907GUb/LC31Wq5eMZWsJd97Ie9rVbLxTO2gr2tVsuTZwCPD3tN2W7s1aL13rIs7N+/3+X0MGU7cTmsbm0nLlMPgnD5woULsCzLF5fj8Thisdgj4TL12A+XJyYmMD8/P8BlD/n5c2+HuXY7DNT3ARyyLGsfOobp/wTwv/f6kCbVeoD8pJ8O1teF+Zx5jZc3jYOsiTSwaSB4SSQSEeMwSPh5DfJcMPTimkwmUSqVEIlEkEwmsbS0JBFVChWck2NjY0PuT9AmIWs0GrBtG9FoFI1GAwsLC2g0GnAcB+vr664xWlpaks+xL6LRKKrVKoDOBGZbLl26hKeeekraEYvFUKlUUKlUkE6nZeLpRZjvOTIygtXVVSQSCfGybmxsiFFsyptvvonXX3/dNZaPQ8KCeJj76K8w9+R4h3kXbbz3o+tBEWo9H7V+B+m6JoPm8/T/TCeT1nXeh7qeSCRQLBZlUSKZoGhd14APbOo69ZkLVywWg+M4ouvUWdu2Yds2Hjx4gGaz6dJ19sni4iKazSaq1aos+JcuXcKxY8ek3YcOHcL58+dx+PBh0fWNjQ3Xe/NdtK7H43FUKhXU63XP7IBbt26hUqng+PHjLpx4HKIJkJee6uiI1/ibeqHHMuiZQcSrH6PCbGOQeM1BElqSAo4RdWZqagrLy8tYX18XLGs2m4hGo7h69SocxxHyVKlU5Bm1Wg3r6+tyn0KhIH1DnV5cXEQ0GhV8tSwL+Xwe9XodKysriMViWFxclKjW+Pg4HMdBsViEZVl4+PAhJiYm5J2eeeYZXLlyBUeOHEE6nUa1WnWtB3zfZDKJer0u6xMNIx0t0kTn9OnTeO2117rWSeqiHoMnRfScC8sz+plX/WCvXyZQEM8Iwlt9D+JJWCNB841EIoFKpSJcJRaLiR4sLy8jmUwim82iVqshGo2iVqsBgEvPHccRXLZtG7VaDRsbG+JkJpdge+fn58XgIPZaloW7d+/KPaibQAdfHj58CKATPd3Y2ECz2cR7772HF154Qa47fPgwVlZWsLi4iMOHD4sx4KWjw8PDePjwIYaHh5FIJLCxsSFOR3090MHe69evu3jGf6WYc9d8l16OEQAu/QjCW31fL9mKA8XEHz8x9d62baTTaWxsbAAA4vE4Go0GNjY2XA4U6isNv1arhcuXL8uz6dBjH5RKJXFOVKtV4QHkF6OjowA6HGJpaUn6j8EhGrrE5Wg06uqXUqmEixcvuhwoJ0+eRLPZRLlcdgVqzHmbSqUAbAZqdCaDOb8jkQjeeeedLgeKmSHwpOFykDz2ljqO0wTwfwD4PoBLAP5fx3F+FvKzoUBYg38Ykt8PsdH3Mo1Pr3Zw4oQZ/LCLHZ/r1S5NRpkOk0wmZSHgNbFYTNJj2F/RaBSxWEzIECc2vW0kJjQYG42GPF8Tf3pVOckJAq1WC/V6HbVaTX5vNpsoFou4c+eOq99yuRx+9rOfwbIsJBIJMRS8+j8ajXYRp6WlJU8jKBKJ4Ny5c6EW6H7kUe5lAgV/NtOYvZ7Tr6dL62TYdvFzvXSd9+6l60EGtZdem3/TC57WdYI970u9NXU9EokgFot56rrGjVgshna77dJ1nR5Po8LUdaAzHzY2Nlyfo67r/rRtG5/73Odcuu7lYTZ13bZtxONxFAoFeTbvS1laWsL6+vq26ro5Pv06ScIYiPzOa73mhb4fxzno2qB7me9nevOJqc1mUwgHtzLQCGy1WtixY4cYowAk6soxodOPDo5qtYp6vS74yi9mtywvL4sDQzsO7927h6WlJYkU8J61Wg1LS0uC+7VaDXNzc2Is8P0OHz6MixcvSho626YdZdQ3YDMNk0acl5PCsiy888474mx5nPq3ncKxDsMzAG/99JOt8Axt1IXlGWHbEUb8eIYmtX48gwYcybLJM4i/NCJMnkGs4zwBvHlGrVYTI8DkGXQ48vNePCOdTgup9+IZvHZ8fFyME80zuCaYTjfNMx6X/oeJPmrpx+gwdS2sQRmEpXqtDIPLfG4Y7qMdJ/y/OQ7Ut0wmg0ql4oocc90kphI7qT9a9+g81Os6HS1LS0uC+dqQdBwHKysrWF1ddWE9MwXq9TrW1tbQbDZRr9dduEz+cfv2bZlHml/wvTlmsVhMAkjsB68+AzoOFM4Z/bxfRNkWU9pxnP/PcZzDjuMccBznm2E/F3bSmGRaD5iXEAh5rZeQVGly3wt4tDeo13VhrqEQICmcKBq8uChks1kAEKINQEgwFwj2qyYjnCCJREI8PvQW8vn84oJj27YrhSkajcpnufBzkupntVotzM3NYX19XdoKAM899xzz0ZFMJrvSo2hcDA0NyQS1LAtDQ0PIZDJYWFhw9Qn7cG1tDWfPnpV2PA4J0q8g0Tpkpmx66brXwml6RIPaoheUIMLDa029C7pvr8XYNGD9RO9BMnUd2OwDeuHZb61WC7VaTfRQp5NpXWd72Sat68wQSCQSXXquDVv9jqauc0xo2Jq6/qMf/Ug+66Xr6XS6i/ibul6r1RCPxzE7O4v5+XkXudPvd/bsWSwtLT1WI9XUL00+wxJ1jRW9rjWNAL95wT5mdAfoTczCzDEvwqp109wSsbq6CgBYXV11OUS4lYKpt9QZjlutVkOpVJJ3NZ0d2rFHgqTJOJ2JNF4ZDVhdXRVyVqvV8PHHH7v607IsnDhxQvSP6Zhe755KpWSuJxIJAJ3o2MOHDz1x5b333pOo7OP0zG+Hl5/j04+Y+hkkNOqDrtMRcz0+/fAMv2v75RlmX3jxDF7vxTOo+5o3aJ7Be8RiMZkPXtfzy4tn6H7jHNM8g7jIeerFM3bv3o3r16+j3W578gy2MxqNIpvNdvGMGzduuPrNi2f4OZz7lbDRRUo/xnE/uGyuJ0G4zPHjGARFcINw24/76N+TyaTcX6+bHDP9fvF4XHSNeElMJi7T4VIul12p7hzTer3uiv7T6G02m+J4ocGqI7PLy8tYW1uT6P76+jouXryIubk517ueOHEC9+/fR6PRwMjIiDgJqU9813Q6jUwmg0ajITymWq3i8uXL0jbdpz/+8Y9x7ty50MGKsNKvA+VR5TMrkmSKSX78gJYeOXOjehBoawkz+akcQalpzI3Xxq+fcLIHPZuTB3CnVALutEdzobIsC6Ojo1hbW5N0HHr2NVD0Uir+30zn5H3obeK76v0cXqDDz7HNtm3j4sWLeO655+S6druNZ555Rp6ZTCaF7Jh9zyhZuVyW3H/L6qQB7d6922UYRSIR1Go1/PCHP8Srr74a+N6m6L0ZBD79tzBpYVps25bP6r+F8YRrD6K+1kvnqI8avL3uby7KQHdqnina8Atqs6lzQdeR3JgkqdFooFAoYGpqytVntVoNo6OjiEQisr9PFz/pZZRTvPScv9MgADbJGxcqttksDKLfW4/B/fv3MTU15erbkydPyjweGhqSYiPaM0xjWN9zcnISd+7cwd69e8Ww1ovpxx9/jFdeeaWvhUhHknQ6kpezz/TSB/WxdrwEYZ5pjIYh6lpfgkiW9tD3aqtfNI1RU+p+JBKR/T9M+RoZGcHa2poQb74HhfinMUy30a/Yin43bSCbaV3UVzMaSifJyZMnkUqlpC+effZZXL58GUeOHBH9o37zWY7jIJvNot1uS6pnNptFIpHA9evXcfDgQdfYtdttnD17FpZl4dVXXw10MvQjj4Psa9GEVosfz+iH+PP+WxH9nCC8ftw8g7ptGgD679pw8+IZk5OTrjb2Mo55LeCPvybPoMOH/eOlWxpjLcvq4hmWZeHo0aPisPbiGRp/0+m0i2fMzMzg+vXrOHz4sC/PeOmll/oqnuTFM7SzMizP6GeumYa5l+g28P69xpQcp5fxEgaXNXfRxbbYFuKVxplWq4VsNiu6Uq/XZQuYDp7wudrWcBzHhcu6n8x2ms583Yd8DttJB6P+HB0oY2NjSKVS8v/du3fj008/xZEjR5BMJgWXNVcjBmSzWYnEDg0N4eDBg7hx4wYOHjzY1c9ra2t4++23n2hc7iVPjIEKdKccUvQAa0NOi6nwGnj0njY/oSdOe+p7kXF+D7OQhfFqBhH8RqMhBTd4Pz6b4Xwq5vDwMBYXFyU6ys9zYdGTU7+PSfL1hNMRNMfpFEMimdXkUt9DTy7e88aNGzhw4ICrjwkynHTFYrHLE8drOUGj0SjS6TQikQju3r2L6elpuU7vE3z77bfx2muvhZqYlrW5f5eLJf+mQS6MB1L3axCwmc/Xfe2nW3rBpofMBPOw7dFj5CfmohUkvdqgPfNeus49H9q5AUDSeIrFouyjzmazKBaLUuiACwMNPJIePkcbX5oUaqNQR0+JN3544KXr/H19fR0bGxuy+OlrSIzi8bikfPJ5fJbej5pMJrF//34sLCwgn8+7ipBQ3n33XezcuRPHjh0LpQc6HUsv2uYY9kvUTdIbVvoh6r2iYGGMU2BzIfcj6nSU+BH1crkMy7I+U6KunV9BRJ1/8yLqur1eRN22OxkKQUS91Wrh9OnTeOWVV7ZE1PV9tkLUgyQIf02eoQlYr3GkPgKbPKPXuJs8g/fpJY+bZ/jxG/IM7qFm6iz3m2qekclk8PDhQ0SjUQwNDUmEiTxDPwtwO7zZVo3NXjxDO4/MzwW9s+YZFK1fmmfwf5r7aZ5B/L1y5QoOHTokfal5xrvvvosXX3zRZXgEjZEXz6BBFZZn+OG8qcfA5jzT26W8xORsvFcQH9a4rIsamaIDGb2uATbHS/dDLBaTAkXUzVgshnq9jtHRURSLRaRSKbRaLYyNjbkikkB4Bzb5g8Zp/g3YdGBz3rO/+H/2He9FbuE4Di5evIjp6WnXunH48GF5d7afDmzeg/dMp9MoFovCDQ4cOIB79+5hYmICiUTCZRi3222cPn0a+/btw759+wLfXYt2YGs+ZerWn/zJn2y5gF0YeaIMVJI3k0DrCeylYF4E28zl7pWC5PXsoGvDepaonEETRIO43//NqIp+35GREUnza7fbyOVyuH37NprNJqampiQtbe/evWg0GiiVSkin01hYWJB7crIwlWd+fh71eh3VahW5XA65XA7Dw8MolUqYn5/HxsaGeP927tyJVqtTRZipZn6GlW3buHr1Kg4dOtQFgJzguVwO5XK5y5jmGNFQSSQSSCQSmJycxNWrV7F7925Xiho/+8477+DkyZMS+fATkiJN/rhomMTlT//0T/Htb3+75/ibOuunN/oddTu0+C3Men70SvXU1/TSXx2x7/WeYfRcg7b5DpRGoyFkR7eRixVTeJrNJvL5PG7fvg3btjE5OYlIJOKp50tLS7Kw7d7dOZJZ63k0GhXjz0vPme6YzWYRj8ddes53Z2ExEhfbtnH79m0hNLqf+C5ciPSipscol8uJnnOePXjwAPl8HqlUqssJND8/j3K5jC996UuB48X+ZFt0Sj7xlu0NY+xpR4k2toOw1GseBOmZOTceVcwor2kwxGIx5HK5rqyVdruNdDqNtbU15PN5cQSyuIZ2lFAv+HmNc7pftcFJbNfj4TiOK9rSj6Pk5s2biMfjclyFee9MJgPbtrG2tub6uybqTD1Op9PYv38/7t2753KUaGzs11GiIwVss24Hx+rP/uzP8Fd/9VdbGmsv/NWk20sHvPTdfFd9TS/81RjZD88IY5SaRoTfZ8IauBx7y7JcDrZ6vS6keWhoCPl8HnNzc3AcRwyzUqkkzuLV1VUXz2i325iZmUEymRQ9N3nG7OwsIpGIFIFjsTLiX6PRwNLSkjhs9HzQ2BOJRHDt2jUX/prG8vDwMFZXV7u2UwEdg2RoaEgKmtm2jf379+PatWs4cuSIZ4Trxz/+MU6ePIl8Pt8Ty3RmhMYC7ZDxW0P5Gb+x9nNg6zW0l2gs9OLX5rV+nMWUXoEf6p6e+1o47qaTxXRgM9rohcvUvSBc1u+lcdl0YG8Vl9fX13H37l3BZX2NdmCTf/B57B86sJk+PzU1FejAvnnzJqrV6pYc2CZmhbGnHpc8UeWcTO+N7mAqkN/ntGjypTu117PDCsHFyxDTv5upWX5tCGPsmmBGou44jlTMtazOntRIJCITlpuvuV+OufckJ9zLQTJFIt9ut6XMtmVZGBkZQSqVclW35H6R8fFxTE5OCulut9tIpVIYHx/HsWPHpKop3yGZTMqCpglTu93GmTNnUK/XMTQ05Iqssp90GhojadFoFLOzsygWi1JFWOtPu93GhQsX8O677/YcW0Yt6A3k59nfbEcYfdEeY/0Z/bufnnvd/3HqeZj2sx2fpZ4Dm4ual57zeQRNLz1nYZggPec7eek596xMTU356vns7CyOHj3qq+eaeHvpOZ9PnfHS81gshsnJSV89t+1OEZ1H0XPLsuR3tsMk4ub46wXb/J+X6AU9jB7q/XpBotcIryiCbq/5Hl5tZtVlM8JBkjc/Pw8AyOVyWFlZQbVaRSaTwcjIiByFMTk5ieHhYezYsQPJZBKZTAZAJ61r9+7dOHjwIJ5++mkkEgmMj4+Lg25mZgYnTpzAs88+i5GREWSzWVhWp8jWjh07sHv3bmSz2S4nmE4FJdnd2NjA2tqaq5/Yr7yWDhuzT4jzfO9YLIadO3dKgS6t25T5+Xm8//77gWNF0SnUpnPLdAo+ipj4q/VJfw/iGaY+6Qiv1ju/z/b7DppnmGLir/67X/t7RXg18Td5Bg3VlZUVwUmTZ2xsbCCTycj+PJNnsBo5UzBNnsExikajkk5v8ozx8XFJqffiGe1223U0UyaTca21fJ9Lly4BAPL5fBfP0IY5q8ZqnnH58uUu3sL+u3DhguwzDBIdkTPTjDkn/DJFvHCZ4ofL/Tg7thOXiadeormDHy4zwh+Ey+xPP1zeu3fvI+PyzMzMtuNyKpUKxGU6ULcTl3UwbDtwuZc8UQaql5B8hhWtqP10Xj/PIah4eYxMY4MgZP4PgAsw/dqqPX78jF5II5GIpBNGIhFkMhmsra3BcRwMDw9LVTGCPD/LPPZ4PC5HuOgiIPF4XBYHvT+CJeL5TuPj4wAg15Dst1otjI+Pw7IsLC4uiteIxRISiQTy+XwXAL3yyis4d+6cAJFXRIFjlc/nhajH43Hs2LFDzqTSHh4uIs1mE2+99ZYr9UKPOceCX/q4ES4oJNePku7qtVD0o38kBr1Sb/zaFPZZfN9H1XNesxU9Z1/56TmNR8dxfPWcc8BPz7mA+el5PB4XXfDTcx4N46fn5qJs6nkikRDPqZ+eN5vNQD1n3/rpudY7Pz3X4+NFPvwcHHqszHllik5dDaO7vF+vdE+TiPkZG35OF/Oaer3uqhDNyA9TtSYnJ+V5IyMjouMktCzWwoyAkZERIb7sL32WNfEc6Oy51hFKrms0CCYnJ5HL5WQbB/WUxCqXy8k7JBIJrK+vyxYRjaWrq6uyp3tkZKSrT2icpFIppFIpNBoNichGIhGJoOl7RiIRVKtVvP322z3TsakL/K7TMNkv/WKc11j2kn6jAV7ph71EO9PCiOYZppj46xUF1M9lm/36gp/X663GDepOoVCQZ5g8g47Ddrstxew0zxgeHpYMGcuyungGj9EoFouCtSbPqNVqknrMYnOaZzDyZFkWdu/ejVgs5uIZnPuHDx/GwsKCZESY7637yeQZs7OzuH79uqv/2c/tdhuffvoprly54vp/EM8ghmue4ccxvJza5v+9rtdrYpBsNy7zPf1wuVcGWDqd7onL+nleuMz2Pgou0+nth8ts06PgMk8d8MNlANuOy4xYbwcuh5En2kDVHRB2Lwo7Ewi3B0l/73V9GO+832eCJOjdtNdFk0AqHicBU2Jv3bolVSY5WTlJCM76uBkN9FQ8Lgw6TS0Wi6FUKslZk+yzoaEhOI4jxJ3gAWwuiMvLy9JvQGexicViUrlSi+M4ePrpp6VAB4k739/st5GREVfqw9jYGO7du+caWxOUz58/L33Ybm9WdvMDgX6JSC8x36NfPec1YfV2q3puWeHOCgxzT2Bres7FLEjPmXbjp+ckO356riOHXnpuWZZkEvjpuW3bgXp+//591/t76bl+f7PfRkZG0Gw2xZjcip7zHU1d1nquyU9YL3pYvNUSNuIPbOpG2L2Nuh3mO+h79MJw3RdmZMG2bTlSiIScf9fRdQASjS8Wiy7SYNu27OXXhJykybIs17mNOqrebreFNDMFHdgcS5J/ZhKMjY119R+98JlMRqJn3MPEdzdTDh3HcTk0otHOeYO6n7Xxf+bMGc+ojm6rxl+vMeEatB3C99AkLIz0ez1xpBcBB7bGMx5V+B7c12fyDGIsq5TW6/UunkHyqqNGmmfwPoy2mjyDVUxLpZKQeJNnLC8vu9I8ATfP0DpMI8HkGfwcU49NnmGOj8kzuB/79u3bXYYX5d69e65qwkE8g7+H5Rm/yLjst7borJ0gnTfXKC9c5s9+uMx3exRcLhQKgbjM6x8Flz/88MNAXNZ87r8bLlOeOANVe8BNotpLdKojP897ms/w+jlo4pvP92uPl4fI61peo70zpmjvh54I/KL3D4BEKWdnZ7GysiJKTcnlcmg0GuKVYSoMJxbbQeLNFB3btsXjSTJdq9XkKAxujr93756U26Z3i2SYC20mk0G9XsfY2Bja7TZu3ryJO3fudI1PMplEqVTC+vq67CXwA05G03ThD+4V0WSfbbAsC0tLSzh79qxrvINAcavGadDC8Kh67pVmYT5P32ureu737n467XftVvWc7fXTc6BzbAYJlpee27Yte0i99JwpMn56Pjc3h127dsFxHF89dxwHDx488NXz9fV1MWApWs/b7bbr+A+vcRgdHRWi56fnmiw+qp6HIUJeka+w4udJ16L/34+zxO89e+3x0v/jMRY6U0FHoXbu3AnHcXD9+nUMDw/DsixUq1XBRRqXPD9yaGhIItbAZv/SqWK+K50yHHN+xqy8S8LOecSfzf2djIRqsm5ZFvbu3SvpZowO6HHVusWKvs1mU/b/x2IxiRiZEb92u4233noLH330keudSXz0XNc4YY614zj48z//c9/x6ld0fwL+Rdu8RPOMoLXBC9N7RR3M+4XlGaaNfiUaAAAgAElEQVT+6OuCsFe3k/NXk37iiTmmyWSyi2fo7QK8r+YZ9XpdjnkpFAqePKPVagn+Oo7TxTN41JJ2uGmeUa1WZT5RxzTP4LtGIhEcP34c7733nifP8BoX8gzO6Z07d+LmzZsuY1mP73vvvSd7u3Xf+UnYrKyt4HIY0WvPduGyn/Ae2sHhJcxmCsJl/s0Pl+n4eBRcZs0JP1wm/j4KLp86dWrbcZlOlEfB5e2UJ8ZANT12+m+9xCsdwo+Qe92zn07u5TEIu9dEE/AwQsJtLgBMA2NkSbeBFULNyA09mvTqEcwzmYwoLNMeLWvzOA8Sdi5ELEhELxPFtm1MT0/Dsiw5xLhSqSAWiyGZTAqxJyBcvXrVdWyN4ziYmprCjRs3xFNE75cfGCaTSZfhsG/fPty4ccPVx9p7uba2hh/+8IddHjg/6Yd46/cw7/E49NzcL+P3vF6/B0lQNT4g/L6WoLQyr3ZtRc+Z2uOn5yRZfnrOxcJPz6enp+UZfnrOxSxIz1dXV331fHFxEY7jSDTLS8/b7U7hJD8952Lqp+fsyyCSpMdiK2S9l1PQJD/b4RT0MzT0QhvkLOE9/JyC/GyQU7DZbAY6BTVZ93MKarLu5RRcX18PdAqSrPs5BfU4bNUpSKLn5xTkz/9VTsEgMTE3DBHX4pV9EPQsU4J03cTzrfAMr/uH5RmW1dm2QNwx8Ze6aRaJ0TyDGR7EPpNn8PMsDge4eQadg4w4Eq81z2i1WpIpA6CLZ3B89u3bh3g8jpWVFRfP0O9k2zZeeumlLp4R5GhLJpOi9yxOc/v27S7cpO7/9Kc/xf379x8rz3gUJ0oQLmvZLlx+HE6UXrjM6/1wmcblduIyDclHweVIJBKIy6ZsBZffe++9zxyXg+SJMVB7GZF+wr14/AoSElG/SarFVPawnisNRH7voSeXJtumkGjxniSujAY2m03UajU4joPR0VExKnTevM6t1/dNJpOS1sJFxHE6kSouJPp4Fcdx5NBj7VXR/a/bSe/RxsZG17P1Adsck5s3b8qxDbzuyJEjWF1dlb8zAuY3XrlcTrxQ0WgU+/fvl4OU9Vjy//V6HT/4wQ9kX2OQbFU/9Wf6dUp4SZgolVdqj14o9N/1e+s0m17tNKOcpmiPWz8k6d69e11/Z6qM9kSSALHdjCpHo1HRU6BDKEhq6vW6eFS5fxWA7B/lXKHu0ltqWZbr6AQWYeIC5jgO7t69K4QKAGZmZmDbNh4+fCj65zgObt265Tp3jXquU3IYyfUSx+mkwvHIBxbuqFQqLuNZe4Hr9TrefPNN1Ot1wbKgMQgrQd75R9VzbUj3ImRhnYLaa+0nxG1iKMeU+qWdHZOTk7BtG/V6XZwbGxsboqflctkVYWfkiWmOLBbD47KIXcRP3RbiOlPN9RzXbSPm632+JEYkXZZl4erVq67iYHSWOI6DhYUFWJaFTCYjc8dLhoaG5B4kQw8fPpS9iXw2dapcLuP06dPyjv04Sx5FTKdDv6IxMiz+ekXNgvCXeqbPXe71jDAGr+YiQZEux3FQLpe7nk3nc6lUEkNPF5EjbrKokbnOJZNJNBoNSX+kI1mfZc/70KC1bVsKHUWjUanQvri4KNyHGKrx17IsKbZEQ2Jubk50vtls4urVq13868iRI7hw4YKMbTKZ7BonSjQalXeikTo5OSmGNT+nt6pcunQJb775pszPIHkcOr9VXA6LoY+Cy17PeNy4zLb54TJ19VFwmfxjO3GZ7+2Hy3591g8uc94/Ci4/zswWU54YA5WGGBA+xx7YPI9If95PtGGlIwpewgmujdOw0SBtVPrdm5OFIO3Xdirijh07uiIPBEjLsrCysiIpB2w7PZrtdttF2iORiBBipg1wvwb3cbCyHgBZPHRuvWVZUpyApdprtZpUDWZVNE4q3uvAgQOIRCJYWFiQPVQ0hF944QUpZMPFLpfL4f79+wIkLBeuRS/GuVwO1WpVFr1EIoF6vd7lQdJk4cyZMygWi6HJ+VYWEa0XYTygFNMLqj2jfqKNf685pX/WhEsfq9OrPfp9vISLP/W8V99qD76ZvmzbtmuBMe9HPWe6LtvP+1HPaeg2Gg25X6PREN3kfiUSMu770HpOYsPiNAR6vXiQINm2jXv37onhyHbyHEl+1tRzx9k8Z9jsI0o+n5dCDlxMqeccJy40XIj61XM/IWb1s58uzDNNvSOB6KXvYaITXs6SXotwpVIB4N4XrJ0lS0tLQlZYjVGTJl3RVBMU7SyJRqNSdTESiXQ5S+gkIUaazhISb9u2MTU1hWaz2eUs0URMO6G8nCU0Ckxnidf4aWcJ5x3P4vNyllAP33rrrcfuLAkSjftbwV++m5la5yd6Dno9Lwh/g6L7vCfgPq/WS3Sfs9/97ss5lsvlcOXKFZcjnLqrK4YyAsT0cxZ8YTRft4/rOJ3guj91mqY2DrWRToe2mSXH99ZOcRox/HssFnMFLhynEyE7evSo636WZeHzn/88rl+/Lv3ELSVatGGUzWbFyE6lUshkMsKPAPc6zTa9/fbbfeNvP+T/UXE5KDJnPuezwmXLsnriMsfJD5eJp4+Cy4zE+uEyT8cY4PKjyRNjoJrSqxMIoAR0P8+MKQRfv/ubE7afxSzswGli2it/n9Ee7lPTz6HhqZ9LA4PFYer1ulQxZdEC9kGz2cTQ0JCkISQSCRSLRcTjcTF2M5kMksmk9G+5XBZjd2xsDJZlYX5+XlIp2R49eUqlkiw8BAqSO44fDen9+/e7FifbtnHgwAF8/PHHWFlZEQDR72sanjt27JAD6GOxmIu860WMn2+1Wjh79iwePnzYMwrfr/Sa7L2EOt7PogP0LjZgLp5B80hLWONcL/5h291ut/HCCy8A2CTm8Xgc8XgcDx48kMqJ+Xwely5dgm3b2LVrF4COXu7atQutVgsrKytSvY+LTDabxdjYGIaGhrC2tgbbtrGysiKZB9PT00gkElJ4qNFoIJlMYmxsTCpIPnz4UOaR43Qin+12Gx9++KEsVpZl4ejRo4hEOmf50RCmLv72b/82AGB0dFR0g2Tm4MGDOHfunOfiz3vw761W56gbfYZpKpWSc9O0nvPndruNH//4x6H3DPmNqSbSYZ2Cuu29nIJMB+TiGqRzjKj0wmpivm5zkHOFTpGVlRXJEuEzqFM7duwQRwyzO3Rb6Sjj/OW7cW4yLUw7M3k/to+GgNlHJGM0mrQxRNJHksN30ZEj6t7Ro0cxOzvrSl/N5XKIx+OSuqiPQvKSXC4n2Mv5SsPZyzFn2zbeeecdfPDBB48lYtRLHgV/2VdmcRYtXmQ/aDuKeS31Mkz6nLl+BYnmGWFx/Xd/93e72thut2U9te3Ofn6mOGqewegT0yRNnkFdZ2TK5BnkH9Qd7hHUPEOffzo7O9vFMyzLknROjT3s31OnTgEAjh07Jv1u8gx+VnMr3Yecd7lcTtJENc8g/uoMMZNnPE695zhrTrwVXO4VUOEcDuOoCYvLOpqonQt+bWZ7lpeXu3CZHJRfrVZLcJKGmckVdTuBTb4WiUQk+09nLergielMZxvIM9ku8mfqNh3ijO46juNqu2VZOHbsGGZmZlzPGx0dRSaTwU9+8hMJ1jDaT9HzYXR0VKL9dGJbloVareY6v1Xj8rvvvovvf//7/yW4HEaeWAPVr4N0WgjgTqfoJXpC+l1PDwcVNex9OWHDLB40NnpNXt6T3huT0OhJwy8uiowY6YN+CdS1Wk323vEZ9JByYWEKAwl8sViUxYfEmM8nCBPIaGTevXvXZRRns1nxurJfOVFefvllAJ3xfOqpp2Qc2Eef+9znJF230WhIhFb3uU7xyGazWFlZEUOH6cyaOBCk2P7z58/j9OnTj9VDpA1Ar7/7iekdD/KWUzi++r38RO/Z0Doe5EXVi30v0emxQTrOtnJf0e7du13RRQBSxGB1dVXGiuNI8NeGGp9bq9VcaWskSVwoGo2GeD2ZGlar1cQIjUajGB0dlYg/dZyLBgB5nvaCWlYnjUhHfUnWORd37drlSonjOz/77LO4cuUKNjY2xOjhGJEY6edyDpIoxuNxSTfjwmqO65tvvrklI5VjqlOXgkTPS/170LU0TPnz43aYaAMwqC2O42Dv3r3inOBnaYRZ1mYEFYAcjUFdJMFuNBqCV8ReEntiY7FYlH1RTBXTpEY7HCYmJmBZFh48eCBeehqxxO4HDx5IKmKz2cT+/fuRzWZx69Yt2XtNw6Hd7hTaY3VK6nY0GkU+n5c9fCQ3uo84t2kU0NHItYM6aEZR+C7Ly8s4c+bMY3cKeokXZgXpo85s4lrUKxqkyW+vCJPmGb3aYv5/Kzwj6Hpi7cbGhlQONXlGtVpFuVwWwr20tATAzTO4P65arUrRF80zWOjFTNUlzwCA5eVlV1qkyTOIQfxu8gzLsjA9PY16vY7r16+LvpNnHD58WN6bqZMmz/jggw/QaDSQSCS6jh/hz+zfoaEhFItFF8/glg6zVoTmGbdu3eo5fmGFa1IYBzOwORdMR2aQ6Os4DkFiYn9QW3QEvde15XIZMzMzGBsb63pXnTLL5zLaGolsVu3X2Vbc31ypVKTORSKRQCKRwMOHD2HbthQesu1ORgAj5Sy2ZFkWpqamBNOIe+TsnF88Jo/ObFb5Zw0W7eRrtVoYGhoSfdU2xvPPP4+LFy/KvbQjRWMU5xCjqXSSs7/No2a0gfzWW28Jp/os5YkyUMN0BjuVE0STclM4UHpx6kWuCYraE9NLTDIYRoK8miYppYfx2LFjQnLpgSyXyy6PHY/HIGFwHEeIUrlcFgLE+9NYLJVKWFpaQrPZRKlUEmNyaGgIAFAoFKQ/IpGI7PFgcRd67PnsVqsl6Y1Me9yzZw8A4Pbt23LOJBevyclJeX/HcXD48GEZa4LN5OQk7ty5I33ENByvfrRtG5OTk5LmEIvFZMP7p59+Kn2gU+WAjn698cYboSKWvcSLGPTj0SbZ1X/zk37ayjmgI1a9nqFTu/uRXuk6lLW1NVSrVdy7d8+1D0JHoOjc0J5JLs40CujNLhaL0ndaT6ir+p1pdK2trYkRkclkZAHY2Nhw6Xk0GsWePXtgWRbu3r0rxrNt264iRpybJGJf+9rXXO984MAB18JP3Dl48CAWFhZk75ZJjCic28DmOcR0MvEgej0G+llvvfWWa69LL9HRAB3Z9CIqJn7xb73IvXby9SLuprHUS9hm9lGv9wQgmKmdD9x7bNs2JiYmhCxzqwQXf42tNBL0XqZ2u+1KLTejXXRA6LMgqefmesfxNTNESO5ZuIPzQc8tyq5duzA1NSW/02gYGxvDuXPnUKvVkM1mhQxpks5nxmIxiYLZti3kvlwuy7XaYeI4nfOG33zzTdy8ebPnGAKPtt/J1KkgfdQ8g4aQn7ON99F62A/PoAMrSDTp3A6eQaOSKbsmz2i321hYWJB2VqvVLp5BfTUzuzgXuHeVDkKTZ5gZGnQqevEMADL3TJ5BneTar3mG7udsNitnuAKb8+iLX/wizp8/L1k0QTwjGo1ix44dLp4RjUbx8OFDqdquDTr2ybVr13D58uXH4pzRDsNeTj3dFq6jYRyNpj776bc2THsFYAA3Rw/SU+oxa6PM/jx6rgNVOj1VP5/6wnNQuVayInC1WpX9qo7jSJ0KOhK5fnOfaqFQEAOVBZWoRyzWxc+Pjo66nFwbGxtoNptSAZ4Yrd/1a1/7mvxu2zZ27tzZ1WdPP/00bt68iY2NDdmeZ44Br3WczZRfAJKqXC6XXXOEn2V7T58+LfViPit5ogzUXhOFnl1+aSLTS8JGQ0lGtEe/HzGfodvHdgeRO96DQMJwfL1ex8rKimuBZ3ECFizgGY78H0m9JkIEWz0xuPeURiijpDpdhErM9xsfH4dt2xKl1HtJ6EHMZrPiQeLiRe+Vjrw+99xzXUBNoq1LcltWJ7q6urrqShc29UADNdMoAIiROjY2Jh5gHXHk+wLARx999FgWD00s+D1ID/l8euu0LoaRMGBC0qX3PYQVL701jQVdgEBH9b3uxfaQ6CwtLQmQ8rPUH3r/qAuMirPoAXWKRIjXUf9v3rwpHn16TAFImjvP9ePc4B5UGhS62AbnHiNcTCViJOrhw4dd++NHRka6+np2dlaMa441F2H2Edtv6jl1Raews58OHTokXnqddaKj0++8805oHdft1vPRi6howt4PodZkMowDh9/DzA9tBPjdW5MkkpJyuewy5KnfAFx7knkNyTU/z71NxDNWfeQ4W9bm3mZ66oGOzugqk2zb6Ogoms0mVlZWRC9J1EnMSJSoC2y3nlexWAzHjx939R31k/OVBtTJkyfx4MEDV/v8xigSiYihwMhUMpnEjRs3XLpGvSDuMoqwXR57M7IQJCbP6Acft8ozNMEMI17OKspWeAbHanR0tOu4KxYiqlar8neOn+YZxN9MJiORK80zKpUKyuUyKpWK8ATNM6jvJP6tVquLZ/BdJiYmPHmGPlpPn+lKnmHKnj17unhGu93GoUOHUKvVZJ+pF8/Q/a55RiKRwOjoqJzZSpzU+g4Ad+7ceWSd5/iFdR5rB2A/2K8j2L2u1d97icYtv/Zrp+H6+joikQguXLgghQIBSIST6zQjpsR9OnOZtssMp3Q6LQYez98tFAqSraidWsR4bh+KRCLCM/Qea+K/ZVni4NT9SCNZf+n3N3Uil8theHjYtQ7Yto2ZmRl8+OGH4vzxKoiqZXR01OUoSafTuHbtmhzXR73gNe125yiaq1ev9tSV7SqU9EQZqIB7z57pKTYjm0GTzDRYzL95/Z8/9+sx0J4iPwBj+/kOfD+/SUmSUa1WJdWkWCwKWQU6HiEetG5ZnZRF7s9k+pbjOK40BxJ5LipUSE5W/XxOSvOoDhIyRkHNqLOZ/tdqtcSDz2dyMYhEIq530nLgwAHZf6JTTHO5nGshoyHh1e+2bcuxIjRkRkdHkc1mce3aNQDoAhDLsrC4uIg33njDc0zDShBI++k4yaz+fBgd57wxvXF+bdF7ecII7+3lFTXnj9ZxjrGXtFqdAkarq6tSQS+fzyOfz7uMV5IcGu0kHiQxmvTTW8hFgM+nDrHsfKlUkrlC72K1WnUZMWNjY5Lyq733ugAC0In+cv8nr2OEQXupvaIk3OeqjblIJIK9e/cC2IyMMuXT7GsK+0xHUvfs2dOl45z7nIM/+MEPAIRLOWc/9uO065VeSCGpDpMO7CXmXDAJOyPxQWuGxkOmzu7atQsjIyMANo0cjil1iSSIxgwjrIxoDg0NoVarCSliobcrV66gUCjInlZezz2gDx8+BNA59oi6Tv3nVgfi9OTkpERKU6mUYOLU1JSMOaOxJIXMyKG0220cP34cAFwOk2g0iuHhYcHwoaEhlw6ZwnWEVa5jsRimp6dx48YNmTfaAcz7nD59WoyU7RYdedHfgc1INL/C7CXVa7kfz/DCY23wBYmJvWYf+bUf8Dd+qaOLi4vibGu1Wjhw4IAL35jerYvY0Rjgc4j5XM9LpRIajQYqlYqk/haLRZmHeq88K/suLCzIM1iY0bI2UyO1U5uRVUbnmXq5a9cuWJaF+/fvi6Gi54gp7XYbBw8edI0jI1/cRkRD2muc9JjzPdj+fT8/AoxzTo8Jdf7MmTOurQL9io4Oh7mHGSnrJdoJaa7lQQZlGL3WaavM9PB6PvutUqnIMUD5fB47d+4UnSDmMuKv5y/vrc/IpcOYWxhqtZqcYHHt2jVZ96l37XZb1mnWKuERSXSUkFNT74BONf+NjQ3cuXMH0WhUKvCOjY0JRyHm6fXC7LepqSl5D455IpHAc889h1KpJFyYfeZnh3De0AF/+PBhOI4ja43m2px/c3NzOH/+vO84bqc8cQaqnsSmZ8j0TvbyngdNQjMiQPFLFw56hjZwel1rknWzHe12W4ypTCYjaWZMIZyYmIBt21I5r1gsCsEBIFXLtNGozxDlJKjVavJ3oOOBWlxcFBLD/RQkViRFBOp6vY5KpSJnPpHI7N69G61WS/YL0vvKojDRaFQ8+TTYddEj3Re23UnTZYoRx5teW3qqWOHM/Dz7l0YLDVAW8piZmcGNGze6SITWtzfeeKNn+pWfBOlRLx3X7d8OHe+VgqY/s506zoi6n463Wq1AHdfpkn46ToLpp+PMUPDTcVbXDdJxRh/8dPxXf/VXPfuKOj47O+tK0fTScbOypKnj2Ww2UMdNcgSE03HzOWEcL34640fYdbTZ/Izfc/yIkDln2K/8n5/ukwAsLi6iXC7L1/37912efs7ZsbExAJvHDtEhon/ms9PptDxbExoWnSNpAjp6x0iBJuV8lvaC8z2Jj+xz27bF40/jVDtLTCcihUYqCROJCo1mptlns1nfcWq3O/taGdkHOkeTHDhwAKurq1haWuoi7Hyf//zP/8SZM2e2TNjDCtcsr+eYeNsLK6kXXqLfUT8rrEHB9mg9CDM3zDaYQmyrVCrIZDK4deuWjPmZM2dEx6nbxCrT8CXOMtuFxJefZ3YMHWfcH8/ol+M4QvJXVlakXxynk2rJiCuvpaOGTsq5uTnZ721Gc7UBPTw87HI0cJ3iHDpx4kQXLnJO0gAnvup76N+ZLcCxSiQS2LdvH65fvy6nI/Az2kl/7tw5vPvuu6H0wctA1BjoxxX083o5vL2cH14cw6u95nVB3MX8vN88azabUij0wYMHoj8smkiHiXYoMYJKJwzvT72hbtP5QrykoUs91XyMEXXiMr/bti1pwnqNBTbXHjrQdd0Ircec4364DABHjhyR7AatuyMjI3KvoaEhVxaBV38zYMPsyvHxcYyMjEgVaz3HOZ6FQuEzKZ70xBmogH/6ip/HVotJpsy/ewkHQRs5vYxUKnVYQwIIztsnaa9WqwLILE5x7949WQiuXLkikzCVSmF9fR0LCwsAOgUGmIbDScl2cfKsrq6KYcCKprpMvI6Esirf8vKyGAz0KpIoaSLDzzQaDSwuLro2l+fzeQCQSnzsv71793Yt3HryWZaFw4cPu0CSJJCTuV6vCyHSi4DZx9zvwgUkFothz549uHLliqu/NIBbVmfDON+1HwmazL10POiz2qP5KDre6324QG+njkcikUAdt207UMdpVATp+K1btwJ1vNVqBeo4jY0gHY9EIoE6rvvFS8eHhoaQz+cDdRxAoI4DCNRx7sEJ0nFzzPg9rJ5p8hWU/mv+TGLab9qZeZ+wn9VC3axUKpKVQl3ifqEDBw4A6BB7VjTduXMnLMuSvWocOxIjnRJOYmJZm5EoOjIYsdXziwezU+9IuorFomTEaF2hQ5KGgGVZYvia6Xn8m1df87PHjx+XFHrqItPYaYxks9muLQgap23bljP5mBY/NjaGkZER3L59W67ne/Nz1WoVP/jBD7Z975N2VpiGRpDo99X92+t6vmM/6bw6ayHIqNbXB7WDW4JoDBIPJiYmJNUxmUzi+eefl3YzIkSdJilnlJ7RJ+oU9YrHyBUKBbkvo4Xc88f3YcXrVqtT7Iv6ur6+Lg5Jkv2JiQmJiNm2LeekMw2SWTW8RywWw1e/+lXpB85Dnc0FAEePHnWNF9+Z41Wv1zEyMuIyfE2JxWJScI9p/jMzMwAga5h2EGmnFLNZgkTPCa/130836Jzyw2XeT3/n58LiqzYSw3D1IK6gx5/FDOmcoPN1165dcg86hPW7UBc5d6gLOtqvU8epz8w0dBxHimUxMlutVqXOC9P0WRCsUqnIs7i/lXNmeXkZtVrNpXutVkvqvrA/mMGi+1T348zMjCvLSEdTGb31SknXawuNWnIhXj89PY3r16+7jGu2gd9ZYLFfLrxVeSINVL+XD7NgcdJr7zr/7id6oe6XIHEChJmM5qQloKyvr6NWq2FhYQEXL150AZxlWRgbGxPQZkqk3sDNTdtjY2PSfp2mxuc3Gg3ZZ0dSls1mJd0T6KQNM6WNqQ9MQWA0iNEqlprnxOfn6SFisQyeB8j9WZyQ0WhUjhUBNlOOvIDS3C/FhYOpQ/T06rLbpkfQcTrpEyT7vMf+/ftx48YN1yQ2vZJvvfWWK4oXRoKIyKPoOA2z7dZxXvtZ6jhJsp+O62f56TjPIfPTcaar++k4C+ME6Tj3ij+Kjk9OTgbqOBeTreq4PrSe/+NnqOMfffRRV9sGjsFux6BlWY/sGCQp8nMM8jikR3EMUg+30zGoSamX8c+9T/9VjkE/MfXA1DX+3uuZOrpgYnCQ6HkVFoN1NFATxaBnmG2nDjebTdy8eRPLy8s4d+6cOFK0M9BxOpkYPMoC6BS54t79dDot0SXyD8dxJBOlUqlgbW1NxjoajbpOCGA7+BnbtuVcaqbsMoVcR3F1ZgB1cG5uDpZlSZYMMX1lZUUM1EgkIntegc2sG92PHJdIJOIqFKZ1mplgzB7QY2A68vRWKqCTucPI1/z8vAtXeQ376/vf/34oveDnvfTUXH8BdxZJmPsCcM31XtfrLAK+V6/PmfOMPxOTW60W5ubm5OzPdrtTNfnatWsoFouoVquSjQG45xfXTArHi7pHbqv1gSnp5ATcskPuQOcOdZd4CECir9rpw7WEzkhGXbXBzGgo28eipLpPzEDB8ePHxSGkHQ+sPkyjmtuCvPTEsjpnr3ObCHnF9PQ0rly5IpFkL0fm22+/jU8++cQ1ZtslT4yBykHSA6F/JlHzEg0QW+kwPrfXpNLPNxenMJ8jgbUsS1Ica7UaCoUCfvazn8FxHDluRYf72+22kNKJiQl88YtflGdaluU6zsKyLDFKuX+VIE+g58RzHEe8piRYjAZwUSuVSq59IdyD1263JTVS79sANkt7M1WSRTOYrsmJywlP0R5FLexbHj+jx5kkjBvbme6ryZJJGoeHh6VUOPVuZmYG169fl740J6ZlddLP3n///dBATwDROf2/SDrOhWerOt5oNFCr1QJ1nPf103Hur/bTcT1efjpOouSn42tra4E6zoU0SMfHx8cfScfZh2biv/gAACAASURBVEE6/uGHHwbqOH8P0nF9NquXji8sLOD99993tWm7HYP9OEzM6F+YuWBGuLTTxHEc3L59W47Q0NENpj4xKsKCKuVyGa1WCxMTEwA6+38zmYyQavP9K5WKpIZx/x0dIPyMNgQ5b0iKdaoZP6PTGWkIU+/ZBu6fIinT/aDTxelkNB0TjuPg2LFjLtJu27aktHNuDw8PexIr9rPjdFIsNWmPx+M4cOAAbty44Tp2zBzPt99+GxcuXHDpUb8FOYi9fFc/wkbjQhsOXu+0VVIWBkfN+3N98DL+zc8R37VRSx2fm5vD/Pw8FhcXMT09jV/7tV8TjKO+FwoFVKtVrK+vy5njHOdCoQBgs24AAFcROQqNTe63puFKPGZkitEw27aleJLWQ6bKEucYpaWRaNudI7X4DDp16JwhFkciEbz44ouhHVojIyOuucE2AHA57zjf9Rqj1zPWVNA8K5fLSYVfr8gk5zmN1F7OPT99MOdSP04U/fmwGKt1wG8em8I1SetOq9WSbIt79+6hUCjg7t27kh4LdDjS2NgYMpkM4vE4UqkU1tbWZH3mvVutTmVozh3teKbTbmNjQ+pOcJ5lMhlJOY9EOhX98/m8GKg8a5dRVHIGRnv1uOhih8RmAF1H5NBYtSxLak9oxwLHTvfp3r17Xc5u/o9OKAZs6OyhaJ0AgOHhYVlngE5NjH379knWmflsfr97966c2b6d8sQYqIB7ATAjNkFeTT1AOs0g6DlenyeR9RMu8GY6Wi/Q0x5XALJozM/PY35+HgsLC/jCF74gZ0DmcjlXkYKhoSEsLi5KsYB3330XKysr0mYSeyqrLidNjyUXFkZ4+FlN5LUXnxFYXfCGiySBhJ4hClMMNKHSXhh+hn0RtFfRr/+5V4TtsW0bu3btwvj4OH76059KNJeGDZ+lx8iyLDmDimSPqTilUknOvPSKGq6treGtt96CZVk9iZL+vGlgepGdXnqk79uPjmvDJYyO8xlsk9c9/Z7HtnG/xsOHD3HhwgU0Gg2cPHkSs7OzADZTVfkMx3FQLpfFacOz7WiMcbHQURguENFoVArE8DvTgkiKYrGYLH7UaQI8PZvFYtG1n0MTQ01wWMCMixO9l9ppA3QWqK985Suucesl7XZnD6DuG8uysGvXLjz33HO4du2aFETQ40FM0nqWz+fl/DMWWpudncXc3Jwsln46znsEGdS8Jkx03U/Cknav5/VL2klu2ScmaR8dHXWRM5O0c88xscWLtAObThOKJu0kr5yHXqSd0dONjQ1P0q73/fuRdmIc9dUk7SdOnJD2BeGObduepD0S6VTrpdME6CbtvC9/Nkl7NBoVp0kQaddOk63omfkZk7BrPQprhAYZCH5t0NzGb07pdlB3zTUk6BnEAZLe+fl53L59G6VSCTt37sSXvvQlaT/fgdss+LloNIqdO3di165dEqlZXFyUdV3zHn7pegJMgWcmAq+lw6xUKolDgw48RnLoOKSB6+V4Y2aLZVkyF1jHol6vS4V+oKM/XHP4e69+3Ldvn2CM5g7PPfccfvKTnwj309WOvXCSzk4+N5VKYWRkBNFo1BVJ1ePBn998883Aceb8CbqmXx3VQmd50Of9AjZeUTezbZblrha7vr6OdruNO3fuoFgs4vLly8jn83jttdekBgkAOTeXkXqm3tJIpKOajmjen8/V5+IyNVwHDqh7HGNdM4DZhLqyN59DPSX+sW9oYJrrFvk1+8D8znsEBSyeeuopyXLh9bFYDEePHsWFCxeEc/P4HDM4wbbQ0Od2k2Qyif3796NYLOLBgwcuPdCfLxQKriMZt6OS7xNloGoxvT+9yJ1JJsOQaX1/0wPkd71JWM3/+z2PHhjHcSTvfXp6GseOHcPzzz8vlRd19V29yAMQksnKvRRd4U57kkjSSXrYFg36elLrSceoDQk4FZcLD5/DvX38nYsHDUV9PbA5ASORCE6dOuXqpzCA2m63ceTIEVf/sI9OnjyJW7duuQDCz8DiwkyjhyCVy+Uk/c4UPUnffvvtwHb+zu/8judz9e96QepViEMDXD867kWIzD72MgD4jDBGlY7u0Fu+uLiIWCyG/fv347XXXsORI0fEqHIcx0Xo+S6ZTEZSU3bu3IlMJoPFxUUpMBGPx10/E4BpHJJcUS9Z+p/pPDQ8mQqmHTYk+Xwf7VBhIRH9HBrhlmWJR9SyLCFHnFt8Fv8WhjBoUq8N40gkIunL7AP9f7M/Abj2hmmvKY828Xqu2d5e+GYaIv1IGEOToue0Fw773Z8OOc7p5eVlLCwsSPGr559/Xioh6v1A6XRaMBnoHFNEghKJRORYAh0ZZPuog8RWbbSyTUwFp37qtHeOL7CZAs176SJLnDc6msrrdYoXx1sbuOzTsKSdbdeEa/fu3VLMzLZtWQvMNZbC45n4+Wg0ir1796JYLLqKjOhnAR2nyY9+9KO+ou1avDBY47BOzQu6h/5cGCPWfA+gN8fQ+q1Jba/307oTiUQwNzeHPXv24PDhw3jxxReRyWTkSJVmsymZTbqtNPSq1SquXr0qWVhMCaeu6LVXvxMd65wXxEjyHwCS6st28juLMvJ9qNP8nVs4NH/h/4jJNAgpGvf0fYOk3W7jqaeechF6fn/ppZfw3nvvSbuo7yb2cbxo6JFfxeNxOYqMZ7tq0f36ve99z1NvvYwFv/fQRaf8xO8ZvYxgM2BjOleChGOuefGDBw9QKBRw+/ZtfP3rXxeH29DQkGAw8Y3bbKLRKMrlMhYWFrqOhuJ3tov9obMM9AkVOuONhuz6+ro4BnkdAMlkIm+gwcn3AiBnY9MQ1rpJfqFP2NAShgs7juOJtwBw6tQpXLp0SbLBstms6xnm3LVtWxyv7LuRkRGUy+UuZ4oWy+pUot4ueeIMVC/DwAuctTFmRsh6iV5g2PFBBoIGcb2Ye1VU83oWFXd5eRmVSgWXLl1CNpuFZVniWWfkiFFSTTwJyCQcmUwG6XRaUhYJ0ARp7kcyU29oLOro0vr6ukSuaJTq4wj47GKxKGW4LWszYqSLEdDYazQaQt605117mABI8QBKGKLE+0xNTXWRoEwmI+Rb7xXRC765OOVyOYkyM51o7969uHXrVldlU60vzWYzsKjBl7/8ZQDeVaG9Ui78pJ/UHLOtYc/w81okvQwfv+dQr2q1GpaWlvCzn/0M8XhcyBD3c1C/19fXXWklnPNzc3NwnM4RMOfOnZOy/YyuMIWXi4JeXPTfzf021WpV/l+pVCRSpYlOrVYT4qD3b5JI6VQe7rHS6fPUf038X3311cB+9pN2u43Pf/7zrj7m95dffhkffPCBeIn1Hhr9HI4hj2vg+5IgRSIRKS3v1z4aUUFiEvZ+MFjf20vPtQ6aTpQwJEjPeWZ9LCwsYMeOHdi7dy++/vWv48CBA0gkEkilUnImo24T9SIajWJychJHjx6VPXgTExPilDGJo9ZN4j/1ku2p1+suLz6j9vrsaxqx/DwNABrdmnDpY44Adwq13n9qEqWwpP3EiRMu5ykATE9Pw7Zt/OQnPxFnkd4j7eU44TxjNJfVTldWVjxJO8e+UqngjTfeCK1jFBODtUNa399P9Gf7MZC3isEc00fB4Pn5eRQKhUAMtixLnHImBhO/uf/eC4N1erAXBjNy3263fTF4bW1N9NsLg835Y2KwNkb8MJjvGKYvKSYG87N+GKwdYLxWY/DIyIhrG4kfBpucxtyTyv2tJvZ56bA2GIN0XPcJ723u6zTboD/rtZfUb55oTCbXXFtbw8rKCg4ePIgXXngBp06dEv2jfnEM2afJZFLOL5+YmMDnPvc5V6RUB5y0Icl7ao5N3KWxXCwWBUt5P+Iz+W+z2ZTK0MRkOmS006LdbmNtbU3+7jiOFDKiztKo1ZX+zWinn7TbbRw7dsw1XpwPMzMzOHv2rNyHAQKv8eFnqMfErunpaYn4m9fza2NjA2+88UaorUD9yhNjoHKim4sJJ4OfB1QbHWE7SANYmBQzXqtTV83PeC2qBHAuvB9//DHS6TRefvllxONxZLNZ5PN5DA8Py8KeTqclNUQbxsBmzjzPWGSRIhJQeqJ0mo9eQNgmLhxcuPg/HQ3VVSwJWDwT1Zw8LNikvesEbXrgarUaFhcXZdxY5Uzfox9jjFFkDSIAsGPHDhQKBSF4mUxGxsyMVPKZ4+Pj4rnlZvF9+/bh5s2bkgqpxzWM/MZv/Ab+5m/+Br//+78vY6I96ub9vO6rI6xhAYuiI9VhpF8jg9dQ75aXlyUN75lnnsHu3buRSCTkkGmeV5tOp5FKpcS7qKNAJCepVErIkeM4kk6r+0UvjAR9YHPBJLGnw4dHf2iyTq8m5yiL3wBwOWAI7FzI6BW1bVucNrpPOFY8hoRtDmNUUZrNJo4dO+YaC47/qVOncP36dclq4BzTeMifI5HNc4Y535LJJIaHhxGJRKTQjx7XMOS9XyOBoo3eIK+s+X/TqRhEgDRhB4A7d+5gfn4ely9fxt69e8UDXyqVUC6X4TiOZLBks1kZP0aPmEpWLpdx9uxZGfN79+6JXlA36Gykk1AfDK/XKaaYkXzregE6o2NtbU3O32PkkQ5Dknz2FQsxUY9J6vQ+atu28Y1vfKOr38LoZbvdST/XhB/obEF56aWXcO7cOXkOM3y0Mav3SzGtjEQxHo9jx44dGB0dxc2bN7s8/LqdPJ86rBCDuXfWy+HB76ZemxgcBk81NgL9Y7Dedxb2ehODJycn8eyzzwZiMAmpFwZHIp2jhegU98JgXW/AC4PZNsuyPDGY78AMBC8M5n39MJhHapg84HFisBfPMzE4k8m46gKYGAx0otN+GGxirja2tGjD1/y7vp7vGZYzaB6p2+wlph54ZQd4GdCaE1uWhbt372JpaQl3797F9PS08EumdzvO5taUHTt2SJ9wi1ypVBKjkvvU+WXbtjgEmEIOQJx/ACRjRmeA8X7ayUd95c/ULWZsmU5B9oEuzEhd0HNW2yFeYxpWTx3HwYkTJ7pqXWSzWbz44ouy9Q3YrAejn6XbSGctA1bpdBr5fB7j4+NSSJSfN/GyV1bhVuSJMVB/8zd/U6x4KhgNI3NwzYELkwaj/9evMaQNnCDR5AjopJJtbGxgfHwcMzMz+OVf/mXZD8ezHwuFAsrlMtbX1+VcMq8IUyKRwI0bN5BMJpHNZl176gAIGTfTUjSYaANV/5+Tk4UIOKm5KDMdiAsM76U9ofT6lstl8VRxQWHfaS/+r/zKr7jatRXvy8zMjBjCGoiPHDmCjz/+WO7LtCW+pwYy9i/367E9rHx6586dLiM1jHzve9/Dv/3bv+Hw4cP41re+hb/8y78Ur5lOW9eePnNx0dKPvpJghF2c2H/9kCICOYtozczM4PXXX8fk5KR4G1mAqFwuSxl2HuVB4xTYrFTJlNjR0VGMjo5KGg8AcZiQzPJ6FiLgYsCf6fnn5+j9JBnifiiOLfWP+kqHC7301F/eQzvOmDpcKBQEw/SCauJXWLHtzhmpekGgR3jXrl1Sol7v5zU9pJzjqVRKDukGOvujWLTjwYMHXc/V7dbjzv9rAtTrvbRec/7xHl5i6q1JtIKwXi+ghUIBq6urSCaTmJmZwcsvv4zR0VGMjIxIyjYdFCRHlmXJHjoeL0BSnU6nsWfPHonS8LB1Ei4duafhpZ0Hun06XRHYJCz8PHWZ0U6dRqedgBqjeR+2R4+f7nemDpvjGlZOnjwp6eX6888//zw+/fRTAJ15RCPVHHO9vmezWTmOhCnCPLtXE65HEWLw3/7t32J2dta158zEYD3XvDA4jCNbO0O3gsFheUYQBvMaPwyu1WqyxxnoxmDqJo16oBuDibV+GMz20aAwMdh0NHlhcDQalbR6Lwy2LPc+bD8M5vhuBYOffvppV797YTDba44Pn0vxw+CbN292Yaxfe7Qe6J9NXO5H97SxFKaPvK7xw2VttHP7T6lUQj6fx/79+/GVr3xFMJnbKmgHMMNQO9xY/Zn3zefzmJqacs07zYPZH9Qd27ZFl7VDW2cBsk+oz9pwp97F43EZDzotWQyPkX5iDVOGGTAhz+fzdc2ZfpwoWp555hnXOPD9X3jhBanKTS7P99NjpvFnaGhI1j3ONx5Ds5W2bVWeGAP1l37pl/Cd73wHf/zHf4ydO3e6FmZz8vmJ38Tysvb7ASozcur3XA7wwsIC7ty5g08++USKATC9hkQikUggk8kgn89LxCidTiOTyUi6mY4wplIpTE5OYnh4WNIfHMeRipKOs1mVkmBNkNTFBrSRSgXkPlV9vpNOZ1heXpaKaPQaAZC0BVam1MWYLMuSPVX0YBGkzPTjXh47vz63bdtVUEaPxXPPPYf3339fFsmRkREXCHBctbHKhZn3i0aj2LdvH+bm5rrO2eol3/3ud/Gv//qv+OY3v4kPP/wQpVIJ3/zmN/F3f/d3eP3116X9mixpPfd75zB90m9fUm+DolK8zrIsrKysoFqt4vz584jH4xgeHhbCQ88ndXRkZASpVArxeByZTEYOkuZ7cgx0gZjl5WU0m5vn3JHo6f3CmtyQWFGPaNjSwcUv6qtOBeN7M/WHVf0cp5OKRh3mAlepVFxHzlDnGTHjHNPpvVshRrw+l8u5zj6l5HI5yajgsTnm/isvrzrTpgFIdCUej+PevXueaT86w8QLf03nkJfo/xPP++mHfoxgRjY3NjZw48YNjIyMYGJiQsZ8eXkZxWJRtjWQiLMgVyQSQTablTEnVj548ED2DFF/mZ6ncYT4RkxlBF97zHmcgdZlYNNIpWefe46ZCkY9pXGrvf58fzoWtSOGUWKN+3pstNMpjDiOg0OHDrnem/LUU0/h/PnzQvhGRkZchqCOolLGxsZcbbLtTsXpreCulxCDv/vd7+KP/uiPAjE4jLM7SOiU6PfzGoO9xsF0cHAbjR8GU3/8MJgZA9yjbmIwAJRKJSHhXhgMwJWK64XB2hloYjCxKwiDgc3IuxcGk+DrrAUvDNZ92M9Y8vvBgwe77qExmEd7+GEw+9gPg2dmZnDt2rWeWGo6PfmzX8QyrPRzNq9+RphrqNv379/H2toabt26haGhITkOrVgsYnV1VY76Iv7R0UyOya0D1IVqtSr3pAFIZzExE4Bkr1B/yIX1nnddsEgHVXRQo9FoSCFGjb3UbR0s4TU6c4Zt0U5J/k/bF7x3v7jcarVw/Pjxrmw7x3Hw4osv4qOPPhJOxOi9Xrc0LltWpyAZ+5Wp5bOzs7h69epjweUw8sQYqNeuXcONGzewc+dO/OEf/iG+9a1v4Q/+4A8kgrHVBQPY3NNhkvcwUTszkqtFeypisRgOHTqEYrGIdruN/fv34/XXX0cul8PExASy2ayUxgY2K4KtrKxI1dF6vY5KpSKHAWcyGVm4WWn24sWLWFtbk+paLN/PEtc0/likg/tN9N5SRkv5O9N82+22eFf1BOZiQiDm/3XaAyceJyy/MzLBCQx47ykL6+2j6Huwsq++j+M4eOmll+Rok1qtJtXMTGeDJs3c18BDlePxOPbt24dyuSwETz/DT1ZXV7F7927s2LEDmUwG1WpV9rW++uqr+M53voPf+73feyz6zcVFexWB/vSb80OLqd+HDx9GqVTC+vo6crkcXn75ZUxPT2PHjh3IZrNyDhd1p1arycLDtB2eqcdURM6rWCyGdDqNTz/9FO12G8vLy+JwYUEafYYe9a9Wq0nKOz35dMpQx0nWufiRxBOgC4WCq+I1x2JtbU0+q6MALGvPKEK9Xke1WpVrqfO6H/sV7a3dv39/1xyxLEuO6SAhZLqbudhppwfTdrjAsOgaP6v1m/qgiXu/oiOIYSJQXv0Q9Fz9XoVCQSpAtlotPP/88y78HBoaknRHeupJwJmO2253qnPqEvvj4+MYHR1FJpPB2NiY7Eves2ePYB8JFQApsqHJuybYuhAecZPP5zqgM2j4XR8hQ9wANlPVdCSEfycOs6808WG/sY1hPeOMbjz99NNi1Gt59tlnce3aNVkrhoeHYVmb2z64nmryPjQ0JGsW8WB2dhblcrkrws9nhBVi8IEDB/DJJ5/g4sWL24bB5jzdCgabUTITg2/duoVCoYBUKuWLwToi5IXBpVLJdXavicHxeBz5fB7FYtEXg+mM8cNg4jMNDRODNe/xw+DV1VXJcvDCYM5d27YDMXgroh3nqVRKqmLrsSEG37t3T7ZOcUz1/DXnnInBlmVhz549rogsP6tFR+00LveLq7zHVmq48N174TLn/Pz8PG7evIlSqYSpqSl86UtfEqykcyWfz4vRGo/HxdjXuMyUb6BjRO7cudNVtI7jxUw/zYW1caozq4DNc82ZhUgd0zVVmBnI6CudxuVyWT6v9Y04z2dwHyx5eDqdlqOVotEoXn75Zde811HwsLjM8Th+/LhnUO6LX/wizp49K+/AoJMZtNGR/9HRUVc/sqjd3bt3t5RZ2K/0fHPLsv7JsqyHlmV9rP6WtyzrB5ZlXf3599Gf/92yLOv/sizrmmVZH1mW9UzYhvzDP/wD8vk8gM6kuXv3Lg4ePIhvf/vb+Iu/+AtJ9+g1Gb0GU6egagmKNJn7pIJSK1ZXV7G+vo7r169jYWEBU1NTYpStrKxgaWlJwF2DUjabxejoKLLZrCuCyn2oOnpTKpWQSqWkuFI+n4fjOCgWi64FgcYBvZBcAJhGwDawOBKjByRJ6+vrYoRwQpXLZaysrGBlZQWNRkNSLBuNBlZXV6Vqr5liQjAYGhqS6qvRaBSvvPKKq6+1dzhITJAnqFqWhRMnTnQVdXEcB1/4whfw/vvvS9SBe8wI7uYiQg94NpuViRmNRqWYkm5HEECzsuaNGzfwL//yL33pdxAgPS79Nvt8O/Q7lUptu35zsdlO/eZ9ftH1m88Iq9+5XK7vaLyX9Bs1NQmYV7/pdSCRSGBpaUlIy6//+q9jYmJC9txZliURp7W1NTnChXpCXSYxsm1b+hGAbKdgdsji4iJs25bqhjoqSJ3U6Y06cql1lDpFhx6dkyTj/HyhUHDpF8kPjTmdosbzIGm00jnGOgG6EIdXn/cSGrkc06NHj7qiOvw6cuQILl26JPt9me7LzwPdR3NEo1HJHGIUOJfLyZ6orQox+J/+6Z9w6NAhHD9+3BeDdS0FP/GaExwDnfXkdy2lFwbrdUZj8OTkJPbs2SNRei8MpiPCD4Oz2azgsBcGMxW1Vqv5YjAx1Q+D+TP11sRgGnQ0mr0wmMfr+WGwnttBGKwzQYLEC4P5mV27dvlicDweR7FY9MVgc031wuB4PC7nbPtxjG9/+9uPBZe1g1a/S5BoZ6DX9WaQIB6PY3FxEUAns+LZZ5+V/c/cIsAjtVZWViRVnVFpYiRxOZVKuRyOtm1jbGxMts3dvXtXdJQGJ9doOmE09lIfLcuS5xHPtfOjVCq5slvISwqFgitVmA4200FCJyHHmhFaPe+ZtePX572Efc9xZVq65hCMpJ4/f17ejem+5phq3WfQhkX74vE4ZmdnXU5YSj+Ow1DvFUIpXwFQBvB/O45z4ud/+2sAy47j/JVlWX8GYNRxnD+1LOt/APhDAP8DwJcA/J+O43ypZyMsy7l48SJarRYmJycBdIB9eXkZn3zyCfbu3YudO3cikUjgm9/8JlZWVlzeVwoHQ088Tcz8xMt7pAfV63+2bbu8ixxopsOSHHHTf61WE9JgLsw6/Umng0WjUVQqFVm4eZ/R0VHxaF67dg2FQkFS9FhgSU8EAOJVBSCeD5IWen8rlYosetwHQE8SwZbAoveoAJseIv6Pe0Z46DErl8ViMXzjG99wTZytiB9Af/yx+FFcY3/mzBmcOnVKvEBMezOjCppA8R1JArlI6305P/3pT116wWd+4xvfwK5du3Dy5EmcOHEC+Xw+tH4vLy/LPb30W/fb/+r6TdIy0O/Hq98ffvgh1tbWUK/X8dFHH+Gf//mfJTqhDQxTZ/zw2IzcBYmeR15RfWBzXhQKBUxOTqLVauHu3buYmppyRaRIqFmELpVKuXSRhELPK37Nzc1JhDCdTkuUOhLZPIfv5s2boptMDyaRofHJNDXHcVz1BaLRqOhPtVqVfUss1NZut1EsFoV0MNoQi8Vw584diRzwKAbuMeTZwdFoVM7Q5u/UTYomJkHjY46Jxg/H6RTRefDgQVfE8Nq1a5iYmBD8YPYOP6fvwTGLRCJYWVkRnKVh3263peAX4MbeoHb//d//PU6cOIGJiQlYliVpx14YnE6n8e///u84ffq03MMv5dYPg736sh8M1jrohcFLS0uy1YfbgUwMZpuJIyYGA5vFmMrlchcGDw0NoVKpoNFoyPEfJgbz2l4YTFwE3BisC2UxmmtiMPe4Wpbli8HRaBTpdFoyXj4LDObfDx486InBAFw4Y2Iw70WjyotjWJaF//iP/8Dzzz+PWCyGf/zHf8Tly5ddWRJmm7R+6e/E5H73SfP+XrjM+/MEh9HRURQKBbTbbeTzeVdKKXGZBab0NjO9FmmsASAODMuyBDtpTJ04cQKXLl2SyrUsPsVMmVqt5qqcS4fU/8/em4dHWZ7tw+fzzEySSchkDyFh31ER0bq0FkXp61Gpde0Pt1bcqrZafesGssgmO6JSd+3hCtZatVrrYRehtYqoCIQ1bNkhCUlmkskymWRmnu+PeF65nicTSMD+3r7f993HwUEgM89y39d93ue167oqlmUJxuhwYV6HnIXnIHs7Z2RkoLa2Vp6HFeE9Ho+0kuE54PF4JOydTokrrriim6HvWJjMa1qWvcepro+xa9cukT2NU5999hnOPvts2c/BYNC2vvEUVa3ocw0Z2q7l41i4/M3nvrYs65ja7DHNppZlfQLA7/jvywC88s3PrwC4XP3/q1bn2AQg3TCMAce6B9C5UfPy8nhPmKaJpUuXStL7unXr4PF4MHPmTFx99dW2JHkCWzyLe28A6lgHs/6bZDcYDIrA69ADWgWZQ1dfXy/ND/fWTAAAIABJREFU3rXCyHBehtQy7CMzM1P+0HJhWZ35cBSKWCwmZdYbGxsFcJyhalogKVy01uvYeVpBSaD4rgyT4OA1df6qthIx1AfoCm1iDhaAbqFdvbXa63Gs7wwZMqSbJdqyLIwePRrbtm2DYRjiWeDG7mkw1FfPr2maMr9HG5mZmYhEIvj666/x8ssv90m+Fy1adFT51uP/6/LNA+zfKd/8+X+7fDuJyNHkGwA2bNiAiooKjB07Fo899pj0ldWhZfowJQHTsnqsg7YncsTv9vQ75iulpKTIocmfma+UlZWFvLw8UUhaW1tRV1eHhoYG6XuqldnW1lYEg0HJU+X+0/1vGULp8XjQ0NAgssZ3p9Lb0dEhe4LeVJJRepeYOqEjXSjHJO30llIO/X4/KisrZd4o4w0NDbY2MwxHY/9T5zxznfS/jzacZ6r+P8MwkJGRYSPgHKNGjcKOHTtsaR5aOdWkjCMajdqquWp5OhbuxhvE4CeeeAINDQ3CEeJhcEtLCy666KITwuB4c9kXDKZRp66uLi4GEzMNw+gRg/mOR8PgtLQ0JCcnx8Xg4uJi2YM9YTDz03rCYO11iofBNMwcDYO1PMfDYH1NPosTg3urgHE419f5/XgYbBgGJkyY0CMG6zXRQ2Mw1/1oHGPo0KF47bXXEIvFcP311+Puu++2KToaNzX2as+z3ld9HU7ccP7O7/dLyD4NF6yXwnSSrKws5Obmol+/frAsC42Njairq5P2LrqYJnOO6V0HunKSqSBSidyzZ4/Mr46M4XoQ42kkp9IViXS1YbIsS+Rbp1zw80x10xEFsVgMVVVVUlyPMsv7se+0ZVnyfjo8lsZIrgnntzeGFR1GTNnhsCwLY8aMiSv/EyZMwP79+wFAosD4Hb2e/JsGIr6jlgXu83/H6HtmdOfob1kWE0OqAfT/5ucCABXqc5Xf/F/3JBLH0H0tedDHYjF8/vnnmDJlCj744AMMGTIEJSUluOWWWySsYsmSJQgEAra8heMZzs0cb6FcLhcaGxtx8OBBjBkzRvrjUbhoFdSWe5fLJQUJ6PlhmAFBhQc1Kz9qAuDxeCRWnWEIlZWVkovQ2tpqez4tpD1ZAHUVR24mAGJlByAHAt+f1+QzasuKM9+DTeebm5uRlZUl5eBdLhemTJlim+/erIleg2N9JzU1FampqZKjwmtkZ2cjMzMT27dvx/jx4xGLxZCSkiKtekiuaFUksGVkZCAQCCApKclGDBiG1NOYP38+WlpaUFlZif379/dJvs877zwsXry4R/k+Hjn//+X7+OUbgBAg4H+vfGsP1bHkG+j0CDz//POYPXs2/vWvf2HBggXSXmfevHlCHPW7UAnR73SsOXAOp5zyPU3TRFNTEwCgtLQUY8aMEXnk95KTk21hhhy6OAblSLeg0oWviAWpqak4fPiw/NuyLOzcuVN6w+Xl5Ylyw+/q6ADOJ/cLYPdYhkIhKRjD/ZGYmIjW1labB4Xyy3Aqw+jyeGvjANeVBXJItpytEDi0TPSGrPKzGku0vJ588snYvXu37TMAcO6556KqqkrklGHHfB6SHG30oHeD2EAPC//0BQOnTZuG/fv343vf+x4KCgpkDuNh8J49e3DppZeif//+cTGY40QwOJ7XyonBJPtDhgzphsHhcFjypPkdJwYzlFbPqcZg3pNz7cRg7qukpKSjYnBPw3k2xsNgnqN6fwN2DOa7MK+bgxis62f0hMHHMg4cC4Od3+8Jg2OxGM4888y4GMx36wmDSfw5tz1xjN/85jeYMWMGrr32WixcuBDFxcV4/PHH4Xa7UVhYiN/+9rc2edFeSP0MHH1R3p1KNrHUMAzpk7tv3z4J+zdNE+np6RJ1xdxhfT0aIrjHWTAUgKQx6Hx8wzDEi0oDZSgUQnJyss2z6fV6BZMZmkoli+tF5VLPDb32PDNpGCSGWpYlGK2jBbRHnM9OzKKBkuuujZo0GmlZ0+/am/WhIs/v6OFyuWy4zHlPSUnB2LFj8cUXX+A73/kOotEo+vXrZ1sffpbYAUC6KzgNVkDv23z2ZRwzxBcADMMYCuADqyvEt8GyrHT1+4BlWRmGYXwAYJllWZ9+8/8fA5hhWVY3n69hGLcBuO2bf55x6NAhEQLDMPDxxx/js88+Q0FBAQ4ePIiRI0eitLQU4XAYN9xwA3w+Hz7++GPcdtttiEajKCoqwgsvvCAC0NuNR6DQP+v8Uy4Ce3jS6xMOhyXkpL29XdzcR44cgdvtRnZ2tlyDIKYPCv1HhzN8MzdioefneWBlZmbCNDuLI3V0dGD9+vUCSNzofHc9B/zZOTf6+Zzf4b2dh5EznESHTehDWAMxewxefPHFEppzrMOjp/XqzfD7/dKn0HkAbdq0CWeddZYcAM3NzTI3/BxJIL+vDxltIdu+fbttvnivW265RQ6t+++/X0re84CfMWMG5syZg0ceeQSGYeCGG27A2rVrkZubi5tuuglffPEFJk6cKDkp8+bNEznpLRBoj0M8qxwPGN3mgWvJeWZeEmVs69atOPPMM2Ue9PW0d4RyTcKsD0fmpRHoLMtCbW0tcnJyEAqFkJKSgnA4jK1bt8Llckk7BKcHxSnnznnRn9fv5JwHPZxAz+/o/9cKSUdHh4Qp+Xw+XHDBBbb792UQ/441DMPAjh07hNjr59q6dauQI4aiannmc5FsEIOoyPJamzdvxi233ILExEScfvrpeP311/Hzn/8chmGgf//+GD9+PJKSkrB06VIJpeqrp4JDkzbnNfisJAGlpaVC2hMSEtDS0iIFK/x+v1QlZvis2+22eXDizaV+Bn6WSi6xnUTaNDtL8LvdbmzduhWhUAg1NTW2OdWyo88WFosBuvYO94b+rp5LTTD13Ghc1iSCFngdxgZ05hMXFBTgjDPO6BX2Ovca9/+x8NowDBQWFnbL07MsC3v27MHgwYOFjDIPPN47aSxuamrqVjUbAAoLC4/6LPz+zTffjMTERMyZM8dGNB966CHccsstePbZZ5GcnIwZM2Zg5cqVwjESExNRWlqKqVOnwu124/nnn8f27dv7zDH02sXjGFQGeQ5ROQ+Hw+JhY9s2wM4x9NlG+eRe15EkurI5P0+OAdhbu1DJJcf45JNPbKHp+n2cXCLevOjvOOUnHseIx1GYe63nU59V0WhU8mt//OMf2/ZTX3BJKyvHGiT9zuuzyn1ubq6cX5RZfl6vP2WdHIPzRKWGIcWGYeCOO+5AOBzGf/3Xf+Gjjz7Crl278OSTT+KLL77ANddcA8Mw0NzcjEWLFtkUi+MZnFentzgWiwkeNDY2IjU1FQcOHMDo0aMRCoWQlJSEhoYGSWtqamqSonT8G4BgrTZy6vtoGaPnkR50Ku58vmAwKJVpd+3ahXA4LLLtjPbR+Mj30POu5VjjmHZi6OeMN5xKpmEYchYlJSUhEokgISEBqampuPzyy20c6WhD74PeYjLneu/evbKm3B/hcBibN2/GOeecIw4IRpnod3W+Nw1a2qhELPs2Q3yPV0HdC2CyZVlVRmcI7z8syxpjGMZz3/z8hvNzx7i+tXHjRgwdOlSEaN++fXj11VcRjUZx/fXXY926dejXrx/Gjh2LzZs3w+Px4Lvf/a4UBLj88svFyrZgwQIbOQDQTag0QQO69/6jpa+xsRGmaaKiogJDh3b2UGNJcyoqrFxnGJ3FDLxer1jyAYg1nXHpQJelhocIh2l2tqnhiEQiOHTokFg90tPTEQqFkJ+fjyNHjqC9vR0HDx5Ec3OzeJ1Ms6tasVYMnEqoBnD9OxJFp/ATDGg10RuZf+uQAz33tIxdfvnlvbbYc/BdnErGsUZJSYkoX9pbQLKqcw24wfTg3PAPK/nqBuKaJGlQffvtt7F3714cOXIEM2bMsFmtY7EYZs2aBZfLhblz5+Lhhx9GUlIS5s2bh7lz58LlcuGRRx7BL3/5S/z6179Gc3MzzjjjDCQmJmLZsmU4dOiQjczHO4ScHkBNZhm6RtK9ZcsWnHbaafJ77VVKSkqSJuumaaKyshIFBQUit7wW+0pSIdWWRL2OwWBQrMXRaGeBj2AwCI/Hg8zMTCQmJsLv94tVubS0FE1NTaLYEuidRp94sq3XxfkzZVYfXsQLHSLG59RGCa1cGIYhPUanTp1qm//ejHghWb0Z7e3t3doS0KBCyzItv01NTXHv4TyQ2VYC6Mwjeffdd/GHP/wBSUlJuPvuu7FixQokJydj2rRpCAaDKC4uxm233YZwOIyKigo88cQTkkcJHL08vlM+9fNpI4BldRakqKurw+DBg7sRWl31FYAYM+MZZLQlm/+vib9+3kAgYAvLKi4uxpgxYyTk1u12o7W1FTt37pSzQhs09b2JWfr62nDCvaRlUBt6tAecGKyJg/4311R7X03TRHZ2NkaOHImxY8fG3SPHGlpOezN27NhhU+S4pjU1NWLQ4Z7imeE8hzQpCgQCkg/McyYSidjyAeMNwzAwY8YM/PSnP0VTU5NwjEgkghUrVqC1tRXjxo3D3r17EYlEkJqaKhzj3HPPxSuvvILVq1fjH//4B26//XYJ2VuwYIGNLALdOQb/PhrHMM3OnohVVVXIz88XbCPHoCFch0drjsHcd9PsDKVkOKVhdOVwOhW7SCQi4ZJApxGSBnj2UA2Hw8Ix9u/fLwZFPh8NrXwvJ5eIxzH0fOjcuXgcg2vk5Bg6vFPLSywWk5B+Fmo5Ho7RF25imiZ27Ngh/9YcIxAIiIxTkdAcw3leaY7BM87JMQzDwC233IJFixZh9uzZcLlcuPvuu/HUU0/B5/Ph5ptvRkdHBzZt2oRrrrkGlmXhjTfewKZNm2z36+mM0evgNCYQr7n+jJCiYghAIkfYSojzUVtbi4yMDPFeOu8Vi8Vs3n7TNMVoo5+7ra1N8np5XaZOxWIx9O/fHw0NDdiwYQMsy5IijFo+NVdwGsN4Hycm82d97mv+RdzWeoW+np5fPa8ejwc+nw+XXXaZLSqktzirDbu9HTt37rTtGT5TYWEhJkyYYOMMXPN4RlItr5QJGg+PZTj8v6GgrgRQb3UVScq0LOtBwzB+BOAudBVJWmNZ1lm9uL5VVVUlwuN2u/HAAw9gyZIlmDlzJlwuFxYuXIi5c+fCMAzceeedeOyxx8Qq+tBDD2Hw4ME4/fTTMWLECAwZMgQJCQlYt24dtmzZYguFclrznJbolJQURCIRNDU1ITExESUlJRg7dqyEnWilgJuJm5WWnOrqapx00kk28qc3iFokUZh4T30IBQIBuN1uZGVliRVfWwwZQrFt2zYUFxfDsiz4fD4kJiaKckEFjZ7XpqYmqR4XjUalBUBDQ4OU+OYmZQgaYPeO6Tl0Ahnnx7nhYrEYRo0ahXHjxtnm4WhDb8C+KrW87/bt27uRHsuypLpmv379bFUx9dDEQnsudVGdL7/80nY/vv/06dMl9OT222/HiBEjAEBk+9JLL8Vbb72FpKQkXHHFFXjjjTfg9Xpx33334dlnn0VLSwumT5+O5557DhUVFZg/fz4OHDiAa665BuFwGKWlpXjyySfl8Isn2xpASeINw5AwzcbGRvTv31+s6841pDyzOIdhGCguLsbo0aNtn423nnrOGNqjKzkyYiIxMREFBQWSc8reWzz0tm3bJhXz6BWzLEuMBK2trUhKSkJqaqooZgDkGnxP9l/VRiGSQB0C6QRh59AHl17zaDSKK664Iu4hdTT55J/jke8jR47gyJEj3UgP8xxTU1PR1tYGj8cjobEadzTuca2CwSCSkpLw9ddf4+abb8agQYOkT93555+PlJQU/OUvf8HIkSOxfft2nHTSSSgoKMDkyZOFNK9ZswYlJSU2pUn/7cQSygt/xwO3oaEB9fX1UpGXQ8u6aZq2tkEkQyw05PV6BT/Y+si5TzgnbHdBrG5qakJdXR0Mw8CgQYPQ3NyMjIwMVFdXIyMjAz6fD19++SVqa2tlntPS0hCLxWSvUZl19kd0ej972rN6nrSCo//wLODnSOItyxJDos/nwyWXXGLzovdmOO/ZWwOKy+WyRZfoc1AXxDEMQ3ISnXOgsZ9kSEdSsIDX0YZhGLjyyisxb948tLS0dMNhJ8d48MEHBYeXLFnSjWP88Ic/FByOxWLHxTFYWbq5uRkej6dHjqH3huYYThzuiWPwu8RczTHIO4jDTo6RmpraDYedHIMFUnieOjkGYMfheBxD4/DROAbXUs+LdkDws8fDMXo6w3ozNMfQz+nkGAxVJQ5zECO1U4D47eQYhmHgpz/9KZKSknDvvfdiyZIlSE5OxoIFCzBr1ix4PB7Mnj0bV199NR599FF89tlnuPPOOxEOh/HVV1/hrbfeshkFND7reeDv9XxoozYVmokTJ8r1OIh39P6TW7JidFJSklTl5b2Y08ln0WerYRjSt5r7nrnZADBy5EjJK/X5fAgGgygvL5fiXgCk7y+jYTjHDAnX6++UMf0cToWZ5w9xSu9Xp3FWhxNT7nnWXXbZZfK93sibZVnHZVABIEZ/jRl8VnIpZ2RhT7hPXGbhM6DzDN22bdsx3wHfloJqGMYbACYDyAZQA2AegD8C+D2AwQDKAEyzLMtvdN75SQA/BNAK4CYrTnhvnHtY9BpyE8yaNQtutxv33nsvFi9eLIfGI488Aq/Xi0mTJuHjjz+G2+3Gww8/jIcffhherxc/+tGPMHDgQGzYsAF33HEHQqEQvvrqK7zxxhvSt9A5WQQRw+iMb6+trcXQoUMldp3E1XkA6cOHB1Q4HEZjYyPy8/MRi3WF7wJd4Y2Mh6cnsqGhQRTPmpoayUEaPHiwgJy24hBMkpOTYZom9uzZg/r6egmZ4kFFb6/L5ZIQU7rx2TOMyewdHZ3N5V0ul7QqaG5uRnJyMqLRrqIVLOTBymvhcBi5ublISEhAUVERBg4cKLkALE9PMLriiits4QW9HTosri+DB3FPB4g+BLh28bw+nHd+lx5YwzB6rOJ7xx13oK2tDXPmzOlWQnzRokVob29Hfn6+tJEYPHiwFG/xeDw477zz8Pe//x3jxo3DwYMHEYlE8MADD+Dw4cOoqqrCOeecg+TkZHR0dGDhwoVChp3AGot15hG53W7pKadBWR8w+vkp0wwD4fcCgQCysrJkTuiF5fdJorjXdP5sc3Mzamtr4XK5MGjQoG5eVsoFw6Oi0SgOHTqElpYWlJWVAYBELDC/o6OjAzk5OVI1kY3nSZhisc7wVRYBYLQDLYXRaBRJSUmi6NDbz2p9Xq8XeXl5OHDgAAYNGmTr48cKqx6PBxMmTMDgwYMF8Hsr38SBvpB/Pfbu3dvNsGIYhmAIvTy0dmsiosmK/r9gMIjt27ejpqYGDzzwABITE7Fw4ULMnDkTHo8HN954I5555hmkpKRg/vz5mDdvHtra2vDQQw9hw4YNuPrqqxGLdbaIePTRRyW81XnIO8k4I0yYE8u10bmX+h1poGBLJ657VlZWj2Fj3L/aMOgschUMBhEMBgFAPEr63AgGgxg8eLAUn6moqEBxcTEAIDk5WZQohs+bpomcnBzZR/SasfcnDYE0lLDwB8PnKGPcp+FwWOaKRrCEhAT4/X4xwmiiz3185ZVX2oxZx5JRna9NnOzr0B4moCtskIYCKjg0CHA/aE+xJlHakGiaJr766quj3t8wDFx33XV49NFHu+2xeByjoKBAcPj888/vxjE8Ho/g8NatW7txDD3/+hmcHIN4qg3OTo7Bd3RyDO4R4nA8jsGf6YV1cgyG9BOHnRyDEQLE4Z44hs6zc3KMSCRiw2Enx2DtCuJwPI7BMH6v14vc3NxuHIP7lXvkeDkG57WvgxyjqKjIhsNOjsH54DnlHNo42hPHMAwD06dPx2WXXYY///nP8Hg8mDx5Mv72t78hLS0Nv/jFL7B06VIkJSVh/PjxUql1xIgRSE5OxplnnoloNIr3338fGzZssMmUDuXVHJPPrisqt7a2Ci/kcJ5d3ANUwioqKjBkyBDb/DjninvbsroqxvK86OjoEI+p2+3G0KFDbZFBmud2dHRg+/btaG9vl2gz5nmSe7NSNWtJsL1dS0sL0tLSxCBGGeTzaTkm19BpGjxvXa7Oiu50MjFShGtN7p+QkICLL764T04YzRl4r74My7JsnlSgy1ATCoVs1eSZk8p5c96Pa0RjG2Vpy5YtR31+fItVfK+1LGuAZVkey7IGWpb1W8uy6i3LmmJZ1ijLsn5gWZb/m89almXdaVnWCMuyxvdGOeXQ4Dh//nxcf/31CIfDWLVqFS6++GJEIhEsX74cU6dORTgcxoYNG5CXl4cBAwZg4cKF+MlPfoLm5mZ88MEHyMvLw4cffogXX3wRf/jDH3DGGWdg9erVuOuuu8RjQgIxdOhQpKWlobGxEaWlpWhvb7cppwBshJ4HgCZ0BO1YrLNATFlZmYA3w27YkoMKKq3zgUAAsVgMlZWVqKioQCwWw4gRI5Cbm2ur/sW5OXz4sISBaE9pbm6uKKzc1LpkN4Gfhz8VXAC2EE5uON6ToEplhIJMd772TgwaNEg8WLyPy9VVZt55cDutb85BJTte2HBvBgH0lFNOkf/j8wMQa5pW5EiYnARDf59reLRRXFyMiooKyY/jd+fPnw/LsnDqqaeisrISDQ0N+MEPfoCysjKEw2FMnjwZbW1tWL9+PR566CHJG5g4cSIeffRRhMNhVFVV4d5770VxcTEOHDiApUuXYvHixRg2bJiso2maOOWUU8Qj2drairKyMiH+en6BrgpwTmVTFzvShyiVH4Zus78Z16u2tlaajvv9fpSXl8Pv92P06NHIy8uzVXykHLS2tmL//v3iCaMST2sjyT33HEOm9AHLcG2CJfNXCaQAbAeg0/jBhvacE/b/YuGQUCiESCQieRqU7eHDhwOwh3kdTWYpQ1pBO54xbtw4+Vlbd71er20uWFXS6dXUByJ/ZrXeOXPm4M4770RzczNmz56NX/3qVxgwYABeeuklITtz587FPffcAwB47LHHcOGFF2LRokX4/PPPYVkWli1bhiVLlkjRCh50ek95vV74fD40NDSgpKREKujyYNfESZMnYlhdXZ28P9eK+aOxWMwWEs6593g83XJLq6qqcPjwYYRCIQwdOlRaQpHAU7EKhUKoqKiQsHTuFRI53p/4yz1GuaLMOj33XJu2tjYYhmEL7QS6CjDptU5MTERaWhpcLheGDx+O9PR0yfcPhUIit4wMirfuPQ1GlhBP+iKjnO8JEyZ0i84wDEPOHu6TWCyG5ORkeS9+h5jEf+siO70dLpcL+/fv74bD8TiGxuF4HEPjcDyO4cTheBxD43BPHMNpxNEcw4nDPXEM4nA8juHEYSfHcOJwPI6hcTgex3DicDyOoXE4Hsfg3BCHnRzDicPxOMbR5JZn4PHiL/ejxuF4HEPjcDyO4cThnjiG2+3GBx98gDlz5qCjowN//etfMXPmTDQ2NmLVqlVYuHAhWlpasHnzZtx3330oLCzEli1b8Mwzz+Dzzz/HSy+9hMsvvxzLly/Hz372M8FTrhMH03aIadu3bxfjCxUzjcPOfatDdLm3dWVcRplouWeEFGCvmVJeXo7a2lqkpaVh2LBhUpiQ16ZXl9dva2uTtn0s/kc5ZeVoGrr1+ct0Qb0O5PT8DD3h2iGg96dWPH0+H5qbm5GbmytFzCgLzEG98MILZS/31kCicaAvyqnGFKbLcBADKJtc99TUVInSc645/22aJtLS0sS40FeF+Wij7yajf+NgKEs4HMa6deuwfPlyRCIRbNiwAbNnz5af582bh+TkZAQCAZx88sloa2vDBx98gMWLFyMcDuPhhx/GSy+9hMLCQjz33HMoKytDNBpFeno6VqxYgUmTJuGmm25C//790d7ejrKyMqxfvx79+/dHdna2hJwAXQetJjk6oZmhAtpiz+IFBHFtFW5vb0ddXZ2QFXqHDKOz+EhOTo4tRw/oavRbWVmJjo4OaQhNcGdoTTQalR6SnE8qHty8JDnxLNJ8XlqK6Lbns3CDkiDwe62trfD7/eL9pjDzu0Bno2bOD8exrJZUPI5X4Al+JLuaDPD6tGzSIsjncVqydeihy+WSBuM9jUGDBmHkyJEoKyuTa9Bbw7wivt+BAwfg8/nQ1taG8vJy3HbbbXC5XFi5ciXOPfdcKelvmibeffdduFwuDBgwAHfffbdUFG1vb8ett96Kq6++GjfddJO0EUhMTERFRQXq6+sxfvz4bmGtJPD6nTlo1dZyqEmGHiQdzDGlLJWXl6OlpQU+nw/Dhg2zhY3oda6pqQHD/IGuHEASUpaop7dTK3g8eHgtkhwaTnRYoM4zBbpkm4eNkxyxDH16errsC2IB0LnHTjrpJNn/mlDrd3QOGnj6SrbjjVGjRtlkk/dNSkoS5Zxrzbxrp9FF7zHtqXv++edxySWXwLIsPPXUU6irq8PkyZNRWFiI7OxsDBgwAI899himTZuGUCiExx57DBdffDF+85vfYP369Xj33XcBdEYN/PKXv+wWQTJs2DB4PB4EAgE0NzejX79+GDRokBBe/azcg5Qt/sz2MiREX375pZB1YpJeb5IIHW5XXl4upGXQoEG2li0aC44cOQKv14uUlBTU19dLBedhw4YJeeecUkEmeeFa6Bwnzj3nnLhL4kVZ0VEO2tMQjXa2qklMTJSCUdrjSy8Jr6UxrbcGQn7+aPLsHLyPZVmC/fp3NAhynUn6mEPJ+2lPP2VAh4P2ZvzsZz9Dbm5uNxyOxzE0DsfjGBqH43EMJw7H4xgah/V7ao7hxGHNMZw4HI9jaByOxzGcOOzkGE4cjscxNA7H4xhOHOZ9NL46cdjJMTQOx+MYThzW76TntqfBM/h4cVjLoROHeX3Kq9OzG89bfiyOMWHCBADAkiVLMGXKFFiWhdWrV2Py5MkIh8OYP38+5syZA8Mw8OKLL2LixIkoLy/HTTfdhMLCQmzYsAHbt2/Hjh07cNJJJ2HlypUYMmSIzJN31iz9AAAgAElEQVRpmjj11FPh9XrR1NSEcDiMvXv34owzzhDjA5+P3Mx5djAKSht/uW8Z7UEM1mduR0cHAoGAKIp+vx9VVVVyTnA+dHgrz//y8nK4XJ2tmWKxmBgOiWH8WfcJpoLLZ2GRUV6bz66HPmc4+Axa2Sf+W5Yl/MyyLImo4fed7V36Ytjuq8dfX5dtwZzGHABSAVxX8u5p0HlgWZ19b/me39boVQ7qv3sY3+Sg0iL40ksvobS0FK2trVJ9sKioCIZh4LzzzsM//vEPmKaJxYsXS/7If//3f2PNmjUwTRMnn3wytm/fjqSkJAwfPhzXXnstUlJSJI69srISgwYNwlNPPYW8vDwUFxfjiy++wCmnnCJVyJweQloktVDQu8hDgAcOy2rr4gWmacLv72onW19fj8bGRrhcLmRnZ9vy4zTBACDhjQyZtCwLEydORGtrq4Qk1dTUiPWeBxb79GnPAZW1UCgklYYZQkOwZrEbfVDoGHMSCgBiaeGhw8R0zolpdjYtPv3005GbmwvAHrZ0DLkQ8OoLOeJ3nSACQHKW+HsedG1tbVK6n9Y0TYx4mGhPB9CZY6NDcHgfhv3l5uYiIyNDZNuyOiME/p2yXV1djS+//PKEZFu/azzZZu+uE5HtlpYW8X7Fk20aa/6dsk1rIXD8sn3JJZfYFIL/Cdk+dOgQ6uvrbb8/Edn+9NNPsWLFCjEwcB0PHz6MSCSChx56CKtXr4ZlWbjsssvwwQcfIDc3F1deeSWeeeYZDBw4EIFAAIcPH8ZTTz0FwzCk+mJbWxtcLheee+45hEIh1NbWYvfu3Tj99NPF28LoEMqo3lv6WfmuoVBIwhL9fr/kGvI9tbJIwhIOh1FZWQmXy4W0tDTbHPFe3CeVlZXyu3A4jCFDhqChoQGDBg2CaZooLS1FdXU1kpOTxcsKdBGOtLQ0hEIhkcWOjg7BBZ2zR7nQ1mitmFNuSXqYR9Xa2oqsrCyYpik9XvmuiYmJuPDCC6UgVG/JjSbSxzO0gcEwOitPa8WVg8oRPUx8z564Cb/f2tqKr7/++pjPUFFRIfte4/D9998vIevPPvuszA9x2OPx4Oyzz8bnn3+OrKwsWJaFQCAgOFxYWIirrroKa9euxYMPPoiBAwdKPj9xeNOmTQiFQtKGgy12iMN8Hz6rNpLyd4bR2YtUE0WNw5QHyh2vQRx2u90YMmSIreUUABsOV1dXwzAMKURjGIYNhxMSErBv3z4cOXJEUnYA2HCYsqVzTDUOp6Sk2Pq6OnGYXmWGvzPViDisawnEYjE5Y5w4fCylVA8qC33F4Xj7iDjMs5BySsWeOMx9yQrr8XCY3zUMA5988on8/pZbbsHDDz+MWbNmwev14o477pDidJmZmXC73aipqcF3vvMdfP3113C5XLjnnnuwfPlyJCUl4b777sPKlSuxa9cuLFu2DOPHjxcvWHNzM9588034/X40NTUhGAzKGa/z/PWa8fn5M5VA3XaLnnwWASMmc9/TgEZuwH7Vpmli0KBBci47jRnRaBSHDx8WowZTbDIzMxEIBFBfX49IJCLyzJ7QVDIZ/UUc5bnEnGnyJaZUGEZnPiwNkcSSSKSzujArFOuUjZqaGgwbNgwNDQ2iQFP+DcPAj3/8YxuPOZYMco5PFJM5j0VFRTZdQ597AGReDKOrdolzOI1pjY2NRy1e980zfDshvv83h2maeOedd3DgwAGYponTTjsNwWAQe/fuxfTp02FZFv75z3/iwQcfRHt7O+bMmYP77rsPbW1teOKJJzB//nyEw2Fs2bIFDzzwAJqbm6UUOK15Ho8HVVVVAmTcHKNGjRIyy0OGB5km8noxWALeMAxbI22v14uMjAwR2GAwKNWAa2trUVFRgXA4jBEjRqCgoKBbNUCgk+SXlZWhvLwcbW1tqKurE4BmXka/fv1w+PBhGIYhhxU3vMvlQkZGBrKzs+W5uflJUGnl1OE22ntCBZuhavxDQOPGZPEF/Q6MXWfoW//+na1ytcWzp6Gtefo7JzJ4vaHfVGLWygHf56OPPpKDgZZ8fXA5n8MwDPFIOceSJUtw2mmnyQFN2Z47d+6/XbZ37959wrLNde9JthMTE09Ytuvr648q2ySMJyLbXOeeZFvL9fHKNn/3PynbAwcOPGHZdioE+/btQygUwrJly1BfX4+amhqcffbZSEhIwKOPPiphvn/6059w9dVXo6SkBE899RS+//3vSwXtxx9/XHKGGarncrnEuztixAjk5OQIKWDbHp1iAXTlepEkaes4DSgkU9nZ2di5c6etvx7QZVBkGkZdXR3S09MxfPhweL1eMUYAXRb0mpoalJSUoKOjA36/X4wAptlZGZcGEQCCcV6vF9nZ2cjKyhKjCSuA09tPLxDXgPJHxYzrwVBlKlc6AkB7ENra2kQh4X7Q66+jYeJFQMQbJE4616wvMqmJlGVZGD9+vJw1fDb+HY1GUVNTI8qN7kmrvRlaTlNSUnDuuece81nKyspsBiTi8G233Yb29na8+OKLOPPMMwXniMOWZeGTTz7BXXfdhfr6egQCARsO/+IXv8Af//hHjB49GoFAQN5F4zAxhAo4jRTEYT0XPeEwI1B0+KrGYaYc8JxmKC9xODc3V8i39ppoHG5tbZVQYJ37SexlscX09HSRRY3DXB8ts04c5lpyXztxWGOSZVm2dk8MieQ68nv8OV59kaPJJmCPNjkeHI7HBwYOHIjExEQbTnEdicOM1mLkR084HA+Tw+EwFixYgGnTpqG9vR1PP/00vv/978MwDPE4Xnrppdi2bRtSUlIwfPhwPPbYY/jBD36ACy64AKtWrcJ5552HSy65BPn5+SKbwWAQCQkJKCsrE8xsaWmxKXCUHc415Ve/K0OYiXXaGFJUVCT5m/QyMtwV6NynNTU1CAaDGDlyJPLy8my1C3itYDCIsrIyHD58GMFgELW1tWhtbUV9fb08Q0JCAgYMGCDGPgDIyspCZmamvB/PdvZU1fmV2jgK2Kuqt7W1detiwT9U3hlK7Ha7ceTIEdnjvL7b7ZaCer2RP/177pe+Dn1u8nlOPvlkMY5qPGKkwqFDh6TAFLmuNm5rIw3PZ7ZG/DbGf5SCCgCTJk1CVlYWwuEwCgsLcdttt8GyLLz66qtYvHgxDMPAypUrMWXKFMRiMTzxxBOYMmUKIpEI5s+fj9tvvx0dHR1YvXo1LrroIrG0WZYlwJGamiphtCweEAgEJE6eIVYMpYkHfLQ2MlSTPUuZe9Ha2oqmpiY0NTXBsiyUlZWJtX7YsGGSzM1rkwyUl5fjwIEDaGxsRCAQQF1dHTo6OiS3hNakQ4cOCaFJTEwUcuR2u8XCQwKQkZEh1nVuTB2PD3RuSIaTUBEAIJuWQq3zU91uNzo6OqRpMYk9BZyVNCdPniwHWm8Ogp4s5yc66MlgJVhtsfR6vTj//PPx5ZdfipWPVjPKgjMk4mggEYlEsHv3bpvsTJo0SXIh/tNlm6S/J9lubGz8t8s2K2mfiGzTG9OTbGvr//HKNoD/cdkGcMKy7VSsly1bBsuyMGfOHMycOROtra3YtGkT+vXrh+HDh+Orr76Cz+fDkCFDsHbtWkycOBHXX3+9eJzy8vKwaNEiMaIwtOvIkSOyd5qbmxGJRCQfTrfW0Iclw/C0Uudydebyswk80HV4swdhMBhEIBBAe3s7GhsbUVZWhtraWglPpmeT624YhuRrl5SUoLm5GX6/H3V1ddIDm4XtgM5ICdM0RRml4sjnzc7ORkZGhhACXfyDJIcKJb1fACSUlJ5kEgJ+15k7rZVSnk2APc2B89ObES8l4kSHZVk47bTTZK/ovcBUgpKSEpETZ15cvGfvzbNlZWXZrkMcfuWVVzBx4kSkp6dLIT2Nwyw+95vf/AaLFy9GNBq14fCzzz6LRYsWobGxUWoNOHGY901OThbPicZhPr/2RnEQh9kyip914jAVwpKSElRWVmLAgAE2HOb1qSweOHCgGw4zDJVy6cThcDgsea7MFdQ4nJ2dbfs/AN1wmEYYYq0Th2OxrpoXVHA0DnPPU7EwTbMbDuvwyaPJ4YkOp/dUj1GjRnXzKmoc3rhxoxhcmXPpxGGN33p4vV5MnjwZ77//PhISEvDf//3fUuV3+vTp6OjowIcffogbbrgBfr8fxcXFmDt3LjZs2ID169fj1ltvxfr161FcXIzDhw+LspiRkSHF31iHwev1isJJIwN5AOWXz6dD9WlcSEpKEhzq37+/FHZrb29HIBCQIkfV1dWoqKhAQkIChg4dKmHxvDbvU11djeLiYvj9fikuGY1GkZaWBgCiCNbV1SE1NVW4a3Z2tsgW/82IIL4TlViGFxOXtYeX/6cLLQJdRdD4eWIx50zLuZZPhvfyGkeTW8oslcjjwWTuPed1hw0bhoKCAgBdvVWBzhShzMxMfPrpp+Is0FEcmjPwvb4Ng7se/1EKalJSEp544gmUl5fjoosugsvlwosvvoilS5fCMAzMmTMHs2fPRkdHBz799FOkpaVh6NCh2LhxIzIzM5Gfn4/f/va3+M53voP/83/+D/75z38iIyNDWgVoy0BRURFcLheam5sRDoeRmZkpBImeFIYz6BYBvA43IAlNZmambEwdqlRaWory8nKkpaWhoKBAWj8AXQpONBrFvn37cODAAbS1tcHv9yMYDCI1NVUODVq6dAhULBZDWlqaKBKmaUqZaH0A8LkyMjKkoEZiYqJUWOU1CTx8T1pD9aYjaaDXgJtWW9FYAVNbTONZA3sa2sPUl6GtlUfbKMOHD7fliGjgOeecc/DZZ5/JRmRVTg79DlRc4o0HHngAubm58hyU7R/96Ef/dtlmO4YTkW3msvUk201NTScs27Sq9yTbtISeiGzTituTbPPnE5FtHhq9ldHjkW3nQdDTOBHZdnrVZsyYgUGDBiE3NxcrVqxASkoKli9fjsbGRpSUlGDevHmi9C1cuBA7d+7EunXr8KMf/Qi1tbWoqqrCww8/DAA2sjJs2DBZ2+zsbCQnJ6OqqkqKUOlwTO055TrxZxZxsCxL+uNFo1EJsaPyGwqFUFpaimAwiIKCAuTn54vHVXtu2tvbsX//flRXV8OyLJus0vNKuaypqUEoFEJ6erotNzojI0Oq8+qcs6ysLMndI5kJhULd6gkwR4rrrZ9NexE02WCObTxPFA0STrk5mgxSCTiR0ZOMGoaBU0891UZuKKcFBQXo378/tm3bJh52Kn46JJA41VsvAsNzATsOR6NR7NixA9FoFGeffTbC4bANh+fOnSvV2GfNmoXbb7/dhsOmaeLhhx/GxRdfLN4oJw5ToaPxIRqN2nCYssCftUJDHGZYLOfAicNUNoYOHSpE04nDTU1N2Ldvn3hWnThMjy7n14nDrGZOLGSxOG2ETExMRFZWligiThxubW0VUk8lWOMwAAnV5/4B7DjMtnc0zDpx+ERkszeDCvjRMJ8RUvrZNQ6PHDkSu3btEhxOSUmJi8PxOJNlWdiwYQPuv/9+GIaBNWvWYOnSpYhGo3jllVdwxhlnwOPxiPHFNE088sgjuOqqqzBhwgSR3SFDhojiBkAcHfX19ZJSQOWf8kaZ1GvD56ViSgz2er3w+/02fsFWcMz9r6ioQHl5ORISEjB8+HBkZ2dL1wpyglgsJulSbW1tCAQCkovMMNmWlhZYVlflaR2W6vP5bHUstCKZmpoqxiv9/HQsMaea3lSmCBF7yFl4ZlGWuU5sn0VjK8PhuZd1v95jRbR8G4rf0WQ2KyvLZhTm/RISEnDOOefg888/l3encRvoSs8gdsUzqpzQM39rVzrBEYlEsGvXLkQindUm//jHP2Lu3LlwuVyYPXs2Zs+ejXA4jCVLluDHP/4xzjvvPGkWf91116G5uRnl5eW46qqrsHv3bqxbt07aH2jwogVl8+bNEqrABaGlSCuYtCIBXVZOWvkikc5iFcnJyWhubhbQLCkpwb59+1BSUoIBAwZg4MCBSElJsRUKADrDx/bv3y9tRFh0Iz09XUJ7tDvdNE0JudG9pABInL2u1kjClJKSgrS0NAHWjIwMuQZJnz6MeFhR+PjuTFDXByjnFOgqytLc3Iz29nZkZmZKLlhvhba3nijn4Hd668U65ZRTbJtVk6Wzzz5brEbRaFQOEk0eeJ2eSFJWVpaQi3A4jKVLlyIWi+Hjjz/GV199hREjRsA0TTzwwAO46667YJomlixZgtWrVyMSidgMFI2NjcjJyYFhGNi9ezf+8Ic/YNy4cfj5z3+OCRMmCCgWFhZi8ODBGDBggJAyKqv0/Og2CJRzHaJBoKeF2uv1IhQKobGxEa2trSLboVAIw4YNQ15enoRHaktiSUmJyHZLSwtqa2tlLnmoaEslSTwPEnraqKSQYNOSmZCQgLS0NCQlJUmvPu5tl8slCgi9C/xuMBiU9kgkVfQ4cU+zDROVWh5M2guVlZWFSy65pE+yeryyTVnrDQGzLAtDhw61WfCBLitvfn6+WM651vH2AdBpra+oqEB1dTWmTp2KaDSK2bNn4+6770ZbWxvmzZuHa665BqFQCAsXLsS1116LU045Be+99x6GDRuGfv36YeHChXC73eINMQwDtbW1NgzxeDxiNWfrJAAiqzpvSRcQ4s/hcFgK0wGd1nqGrZWWlqK+vh7Dhg0TgxE/x70bCARw8OBBVFZWoqmpCX6/H4FAQDCT3hutQCQlJYkiTAWRmEqZ4xliWZbIaGZmpuxr5onqsF+ulTPUV3sTqZybpinhj1xPkie+Y0JCAk477bQ+EQcqWcczemOAicViOPXUU23eX75nQkIC8vPzxZNqWZa0Z9FySuW7N+/EM01zjPXr1yM7OxujR49Ge3s7Nm7ciF/96lc2jhGJRPDCCy/grLPOgtvtxvPPP2/jGKZpYty4cRg0aJB4bIAuHEpOThaljrLDMHaNvTQIOAsxUrHVOfSMWiEOM996wIABgo2UVaCLY1RXVwsOm6bZjWNoHKbBhRyDPX7pZaVXX3MMr9crOfopKSnIycmxcQziLT1obL9CjqHJMT/P3FPOKfmT2+0WmdAco7dDe3n7MvpiZI/FYrY8eN4XgPSRraqqkn3r5Bj6e3oMGTIE/fv3x6OPPor29nb87Gc/w5w5c2CaJq677joUFhaira0NF154oaQ5XHDBBXj77bexfft2pKSkoLa2FpWVlZLnaVmdnv9Dhw4hPz9f0i2Y2851IO7p9dLvRUWF32NhrYaGBsHD8vJyHDx4EBUVFYLJrDhOXDMMA/X19Thw4ACKi4vFy8+WWi6XS4pxEav4bBUVFbJv0tPTpX0MIxo0309LSxPvcWZmphQOIp4Hg0EbL6bSrJVTJwckn9Yecd6Pz5yQkCAtojTvPNroTWRAT6O33xs9erT0EOb3uMbf+973cOjQIUnDSE5O7tabXBtVJ02adFzP6hz/MQqqz+fDiBEjsGjRIixevBgjR47E4sWLsWjRIkQiESxZsgRXXnklotEo/vrXv+Ljjz/GjBkz4HK58Nprr2H+/PkwTRNvvfUWHn74YUQiESxcuBDTp0+XWHaSBR4m/D/2DGQYCw98bgS9AXlw8KCgx4XXo9VpxIgRkrSthdiyLJSWlmLfvn0IBoMIhULw+/1imaCCx81HwUxNTRWyTGWaJL+8vFxIM9AZOsBqaQz3JLBws6SkpCArK0tCPPj8bW1taGlpkcqxAGxFD3h48KAhoBCcTNOUKrFerxejR4/u1QYh4eDPfR1O5fFYIxaLSQsEHujacskiGLTg8/DlH33feIN9fWk1Y6GCiy66CIFAAPv378f111+PxMRErFmzBsuWLUMsFsOsWbOQl5eH/Px87NixAy6XCyeffLJ4FRg+VFhYKOGcfB7KJcNyEhIShNiS5LIADQk/rXk656KpqUkUUyqnycnJSE1NFWsnvbIMISSw79u3D/v370coFJJCBT6fT9oEEOSBLg80rZwMp6U3iiFlDM/1er22KoAs3ECPG9BJRtPS0sRbRW8FFRmSVR3yy7wRfkZ7pmKxmNyvpqZGjDSaPB1r0Bp9NHk52ujr4ZScnIzs7GybRZPK6ODBg3H48GEx3DF/C+hOiJqamrBs2TIYhoGPPvoI99xzD4YPH46nn34abrcb06ZNwzvvvAO324077rgDb731Fnbu3InZs2ejtLQUTU1NWLhwoeArCTwLKBGPSF4Z3gtA8vV06wgSeOJzJBKRPClWYWxoaEBDQwN27tyJuro6FBQUYMCAAbbQQb5rS0sL9u3bJ7nQ9B7QA8SKixq7fT6fGC0pN01NTfD5fMjJyQHQib/E/bS0NFsvae6FjIwMkXta50nC6anVHlWSJO41Gl24ZsRgnVtOhU9XF+7N0KF7fRl9jSQYP368YLZ+5uzsbOTl5eGLL76QfGlGdGgFWGP2sUZDQ4ONY9x9991xcVhzjHg4rDlGPBzWHCMlJSUuDmuOEQ+HNcdISkrqhsOaY+Tn58fFYc0x4uGw5hjxcFhzjJ5wWHOMeDjs5BjxcFhHq8TDYc0x4uFwXzgGPWmUv76OvvALwI7DmmPEw2HNMY4m06WlpaitrcXUqVPh8Xjw8ssvY/HixTBNE2+88QZOOukk+Hw+/P3vf0d2djYKCgrw97//HTk5Obj//vslfeGnP/2p9K3lPX0+nxiSiT80slMJZNSVDvkFIGtnmqZ4FNnmip8tKiqCx+PBqFGjkJ+fLzxSz83+/fuxf/9+NDQ0SH6p1+uVgln0sOp58vl8NmcTOUZ9fT3C4bBwZ8otW77wXdLT02UfZGdnw+fz2ZRxyqmOguA66ufgvmUrHfIpeqS1g+iss87qFVZqY+HxRl71xmjI4Xa7kZ+fb+MO3NP9+/cXJxrfhxFu+nPf5viPUVBfeuklPPPMM3JY//KXv8TChQsRCoXQv39/TJgwAX/+858RiUQwY8YMAMCKFStw9tlnw+12Y+7cuRgxYgR8Pp/kjlx//fV4+eWXUVFRIcJiGAYqKysxZswY6YWXnZ2Nuro6eL1e2YxAV8y4dnu7XC5bI3mg8/BjgnZHRwf69esnmwiwV4Lcu3evKIHNzc1wu93IyMjoZpVhaAStPLQessBCY2Mj9u7dC6DTW6fDzFjUgM/Q2toq/cJ06GVKSgpSU1ORlpYmygetPAztJfGjgqoBiQorwYVWMG4Il8uF6667rteen96SG46+bLye7jl+/Hibh5pr7PP5MG7cOHz66adCjmjJ7s0hlZaWhmAwiPz8fKxZswZHjhxBe3s7Jk2ahIULF2LVqlXYtGkTFi5cCNM0MXPmTPz6179Ge3tnG6LKykpcdtll0oPs1ltvRVtbG3bt2oU5c+aIRZtKVHt7O7Zt24bdu3fLPHq9XpSVlcE0TVvVT64n94T2FDIUmESnvLxc8pT9fj8GDx4sMsq1DwaDKCoqwsGDB9HW1oZgMAjLsqT6IwkPibZpmiLXVJpJ1EnEYrEY0tPTUVVVhY6ODvFm0duQmJiI1tZWCZPUlkha9tLS0sTSrg8akkDuMRYzIChTtnXoDcN3SHCee+65PgFyX2TbGfFxPIMFqpzWUJfLhdNPPx2FhYXyO33IaHJ044034qGHHpLCME8++ST27duHadOmISEhAW+++Sbmz58Pt9uNp59+GkuWLEFHRwcWL14ssjtv3jz069dP8n+IKcnJyTL/NGZR9uiRZ4EO7VVibicAKYVPL+LBgwfh8/mQlJSEsWPHYtiwYTJ/Ojz04MGDKCoqEq9SfX09TNOUoiXa4841oGLK/eb1ehGLxaQKNdBVWIMtboinVK5DoZAYNvnebAdFgwvJOeW1paXFdn16/HV1ad6D88l2TYZhoKmpydZi5Vijr+kYevTWy6/H2LFj5WeSOUZRDB06FHv27BHizjnXHuLeDOLw/fffjw0bNiAxMRGZmZlxcfjDDz/EgAEDMGjQoLg4PHfuXMRiMSxatCguDkejnZVFaaQFuuOw0/DixGEWVuG578ThcDiMgoICW36pE4dbW1vR3NyMpqamuDjMZ0lKSpIexRqHGRFWWVmJSCQSF4eJm2xL4cRh/p2amiptP5w4rPOr4+EwzyEdxaZx+LrrrkNeXl6vZeF4cfh4vVfEYQ7ySScOM2qIWNyTkrpw4UIsW7YMhYWF+PWvf41oNIpZs2Zh2LBh+MUvfoHi4mIEg0GsWLECgUAANTU1WLZsGWpra7Fy5Uqcfvrp4tRhZWkqXJWVlTjrrLMktD4rKwtVVVUSPUQMd+Ze6pzU6upqMbQZRme0DL8zfPhwW1400MX79u/fjwMHDkjBokAggNTUVCQnJ4sBhHNCTM/IyIDH45HfJycno729HTt37hQcS09PR3Z2thhgqDxyDYLBoNQT4PnicrmQmZkpUQY8YximrlspAfZ0JnpftUNHyyvQaWQ499xzeyVTx8uLj+aFP9bIzs7GyJEj5d+ce5fLhdNOOw3Nzc0oKSkRY5n2pn/b4z9GQS0sLERzczOWLVuGl19+WRY4MTERDz74IKZOnSpWuxUrVmD16tUAgE2bNuH++++X8IGrrrpKihf9/ve/x4QJEyRkgYuWkpKCI0eO2LyoWVlZYvVgqB+Fg1Z+5mqkpqaKUJaVlSEUCgkZjkaj0j5Ah6vs2rVLQr8Ym64t8Tq8gdZLehF4aLhcLvj9fhtJ8/v9AuYdHR1SiIFkitfm5uOGKygoEGWMhyyLHHDwHvq7mhABXeFZBF6GEJmmKdXljuVp0pbNvgzeu68kXntrLcuScFgNGFz7tLQ01NfXi7VbV7Q92qiurobH48H+/fsRiUTw+uuvi1xzTqZPn45gMIjLL78cpmli9erVWL16tcznpk2bkJOTA7fbjddffx2jRo1CQkICFixYgJkzZ+LIkSM2j09iYqLItfZmU36dhFlHBDi9EWVlZaiurpbCBY2NjUhLS7N5PEj2a2pqYBiGVMJjhEJLS4soxrxnYmKiFKYhGaFBRZcxp1xz7pkrRWw3Cy0AACAASURBVCstcw71Ho7FYrYQX8p1enq6zaNEedR5LjoyQMur/jeLORlGZ0iwBvF443jlWnuY+wr8+n4k/85rxGIxjB8/Hlu2bLEVd3F+buzYsVi5ciXWrl2L5cuXyxr+4x//QEFBAVwuF+bMmYPLLrsMhmHgwQcfxIIFCxCJRPDRRx9J9EtLS4vIWjQaxZEjR0QJIQ6WlpZK0Qp6XImH3N9UgEh8aMEmgadhsLW1VZRibeEOBoNSuIyRApZlITc31xYyxr1DxZmGSw4aKdnSJhqNiqKq28WQ7DC0XF8bgLwvjZR8z46ODvE86H2pvfsAbHmp+l2JXS6XC7///e+lympfZKc3w0ngj4fEUyHTSjGxJS8vDw0NDd3Oxb4q0MThWCyGbdu2YcmSJYJZThw2jM7wwgkTJsTF4QULFuCRRx6BYRg94nBtbS0A9IjDNAYCiIvD7e3tQo4DgUA3HHamCsTDYbbGYDVeJw7zbKOMxcNhl6uzCJnb7Y6Lw1QOdAikxmFWGaZcO3GYETDcKz3hsN4XgB2HzzrrLEyZMuWossfn7SuWco2OJyTYicPxFF2Nw5yXY92LRr4777wT4XAYK1euBNB5Xr/88svi5b/nnntw6623Ci7PmjUL4XAYW7duxX333YekpCTBX3Le5ORkaWlInsroi7a2NlubKp3Dz+dta2tDdna2GPlKS0sF31tbW5GRkSE8URtV9uzZI2cqva3p6emCd1rJ4/Xo2SUmABAMpvGEc8nP8fvcf0xjotyy+FdWVpYYubkPKaeUQSpslBEaUqi3cN9zjgKBgHCX+fPnY9SoUceUKX3fvg7iZV++qx0+zn7pXLdYLIacnBzU1NTIftVOgW97/Mf0QT106BASExPx3HPPobS0VBTGyy67DOPGjRPyYFkWVq1ahebmZtx7771YunQpPB4Pzj//fOkXNWbMGJx11llYt24dpkyZgnPPPVdCdg2jsxrYs88+K1b72tpaW0UvHha0gOjQE4aGHTlyBEBnThGrSHIBKbTBYBCHDh0SwWavJIa48JpUOGOxmC0PNhaLyaGhixgQFNxuN1JTUzFw4ED4/X6pXOn3+9He3i7PSAstc0V0fi1DUQmOltWZJK5JD8MkKYTM0yOAEOB4uLMfV2JiItatW4empib8+c9/tlnA9HrqELfeDg10/HdfBw8Fw+js0aettABknrds2dKt6AG9xQDwr3/9i3Is/3fo0CF5roSEBGzcuBF/+tOfkJWVhWg0itGjR+PKK6+UQzohIQFvv/02Nm/ejJycHHg8Hhw6dAgdHR249NJL8dFHHyEUCuHnP/+5KLrXXXedJNoDwPr16/HJJ59Im4vi4mLU1taK94fEnwCqc1Iph5WVlYjFOhtds3qkfm/mZhw6dEi8PrSW0tLIwcqtXGOtcNEDyn6iJO+8T3Z2NgYOHIiWlhYMGDAALpcLhw8fRjgcRlVVFQCI0cDr9cq8trW1ITU1VSoEa7mm5Z6KLUPSdI6uDkWlYYif5zp5PB6sWrUKQ4cOxdq1a22h4Voeefj1Va6Brjz145Vrzqff70d5ebmsIQeVqubmZowaNUrIK8npp59+ig0bNuDkk0+2RYPs2bMH69atg2EYWLp0KWbMmAG3242bbroJzzzzjGDEmWeeic2bN8M0TfGiBgIBCXvct28f3n33XaSlpeHw4cPw+/1ISUmx9dxl2C+jOYjJ2oBQUVEh8sqwdBIvHdlx8OBBIenErdTUVASDQfECaa850y302sZiMfFoEotNs7OxfTQaRXp6OiKRzp5/JF/cDzr3KTk5WZQD5hjSim9ZlrQxolc0HA5LD0kSS63o0DBJzK2pqUF6ejo8Hg/WrFmDnJwcvPvuu3ENhRp7nUa63g4dZdTb4TQGMj+Uv+P1otEodu3aheHDhyM9PV16R9KbyEEMjncfVgjlnn/uuedQVVUlytbtt98uCiSfZ9WqVThy5Ajmz5+P5cuXIxqNYvny5Zg1axaATo5RVlaG9vZ2W92AlJQU4Rhvv/02fD4fysvLJcKKOdY8/3WKDNC178kxTNPEgAEDbMZD7b0pLS2VkHBim8/nkwJhvIdWaqkckmN4vV6bwQfoih5zu90YMWKEtG8iZvD8IMcg+WatBOIJOYbzfGeUDTkGQy6Tk5NFYSZvIscgTjc3N4vX1+PxCMf48MMPbUollQ5isDYOHWs4cVj/X2+HVr5dLpdwDM6x9mZrjkEjhY4U+Ne//gXDMHD//ffju9/9Li644AKJqmtvb8drr72G0tJSdHR04O6778YLL7yAtrY2rFy5ErNmzYJpmjjnnHOwceNGGIaBiy++GKeffjr69esHy+oMOy8tLcULL7yAfv36IRQKydlM+eE80ujm5LA0DtbU1MDj8SAvL094Hv8m16ipqZGaLTRy+Hw+MeSRWxCXaFChl53PzEq+2qlEw+Lw4cOFQ1MhZVQBr8lr5eTkwDRNm1JOHOeeY3En8nwadan06v1G5xplprGx0eaRff3115GcnIxXXnmlm+HkRHhxT3J4PN8xDEOMJ/EiQjdu3IhJkyaJAUBHWwJHx2T0sg/qf4yCStKpn+eJJ56Q8IGsrCzccMMNtryJ2tpabNmyBZ9//jk6Ojrg8/mQnZ2NkpISxGIxLF++HG1tbUhPT5dG12yY++STT8Ln80nOUryDg9YXEmHGxbvdbuTk5AiA6D8AUFRUJIJdXV0tXiN6fbj5SICBzjh6fei63W7JTXVaGHlv3n/kyJFobm5GXl4ekpKSUF5ejvb2dlRWVqJfv34CQrR6cpPywNKW6YSEBCGpDLUkYDJsidfSeQgELm5wEqapU6fixhtvxK5du1BYWNhtM/bVQuS0HPdl6I1KC5geX3/9dTeyRTD64osv8N3vflfeEYD03DyagqqtUvzdhx9+iMLCQultN336dPHich5Xr16N888/H2+//Tba2trwyCOPSMuOa6+9Fu+99x6uvfZanHTSSXIQb968GR9++KHkUOzYsQPBYBAjRoywKaf0QPGe7e3tovSxXYb2PPHZq6qqUFdXB5fLherqakSjUakEyMPEGaKu811jsZhY3XUZecpySkoKUlJSxMs7cuRIGEZnIYzGxkZpd1NVVQXLsqTfFolULBaTPpVcQ/Zk83q9ItdtbW2i9NBAwLwwhgk5D6HGxkZkZmaKocs0Tfzud7/Dnj174sq1XvfejuP1QjnvQxnm2Lt3r2AL54XGhv379yM3Nxc5OTki16FQCJ9++inuvfdeqbQ7ffp0G/nz+/1Ys2YNbrzxRjz99NPweDwYN24czj77bLz++utoa2vDAw88gCeffBJLliyBYRjS+Jwexd///vfIyMiQatCMkGEje+Jka2urhHcSA5l3z9QGLa/E4vr6ehw+fFjy6ph+QUs9iRbvZVmWkDOuRUJCAoLBoJxDlE3uOeaS+nw+ZGRkSD9KHtZVVVXijaLhjt83TVOuTVJOTKKMkvxxT3Jfkbxwnfk9Gg4ZgQMA77zzDkzTxNq1a8X4osO2jkdBdWIa0Ds81p/hNUgwy8vLUVdXZyPwlLmvvvoKeXl5GDlypMgy/zYMQ4zT8e5XUVHRzRPv8Xhw8OBBvPTSS8jIyEBHRwcmTZqE733ve4KRXq8XH330ESZPnozZs2fD5XIhNTVVOIbH48H48eMxZcoUZGdni6GAHOPNN99EdnY29uzZg6KiIgwcONBGoikjNJQxuoDcICcnRwg9n5trRI7B8Fu2mGJ+mDas01jGqCpeixxDG+YpEzrtKCEhAQMHDgQAMf40NjbaOIbP57N5iwOBgK06L7FYnxHEVco4FVSgy3hMIxDxVIdBMyyWHOO1116Li7nHwzGOB4d5L/5NOdb76uuvv+7mRSUek2NwHZibT/k2DAPvv/8+/vSnP4m80YHDOU1OTsbatWsxdOhQvPPOO3C5XBg4cKC0ZYnFYpg7dy6WLVuGq6++GqeeeqoYLzZt2oSOjg5s2bIFpmlix44dqK2txZgxY2w1C5giozkS+6FbloXMzEw5Tzm4jkVFRSKXdXV1cLvdSE9Pl+r4/Czx1bI687m1NzUlJUWcMHwGngtsORONRjFs2DB4PB5kZmaitbVVDD5lZWXiUdU9fClf5DiUBd1jl++q/22aXS3u6PTi2hODyR9oZMnJycEzzzyD5uZmvPfee934A4A+e/2PRxHlO3Jfag8+93NxcTEaGhq6PQt58bnnnmuLouD7/79KQaVH5pt/20IJPv/8c7z//vvIyspCe3s7rrrqKowZM8YmJG+++Sb27NmD1tZW3HzzzXj11Vfh9Xqllx+tH5WVlcjLy8Nrr70Gl8uFuro6+P1+CVngwaE9IS0tLWhpaYHL5ZIwXm4IHh6xWAy7du2yHap0g3s8HslvogJMSydDDgnYJGROIKd1klZS0+wM6aytrZViExkZGQJEhmHIRmOoY2Njo1yTc8wDhcoSlWZajwzDkMI7VKC0lZWHPclkQkKCVO/j5v7d734HAHj11VdtngcNQkcb/KyWj77KrbakxruGttbrOeJnuBknTpwoXkcCdTwFddq0aZg5cyYGDBhgO6T14dTc3IwnnnhCcqzGjRsn+U7a8rZs2TKYZmeV3xkzZiAhIQGTJk3C97//fakOmpiYiF27duEvf/kLXC4XWlpaxAqqr8d3ATotrwxzoXzy99pzdODAAVFeOjo6RDZcLpeEhTOsnvehN8myLFECAoGAWOO4fxgFwLwVvktdXR3Gjx8PwG68YesceuNM05RcPW1wIIHRxMkwOkPseBjSy6DXX1tiqfjSI8AG9rSYLl68GKNHj8arr74q+1Wv87FIDuXFuTZ9OWicch1vRKNR7N692+ZB08aBr7/+GuPHjxe5jkaj+Pjjj8XKvWbNGluxsNtvv93m0fjqq6+QlpaGV155BZFIBNOmTcNrr72GpKQkXHDBBbjwwguRkpKCQCAAr9eLpKQkbN26Fe+//z4yMjJQW1uLYDBoK8TC/DsSPR5+NTU1EsLNRuc6J8qyLFRWVqK+vh5ud2ehmbq6Osl51vOslbvU1FSRP1q/WY2ciillhJXUOVc0pgCQdBJGzjBUnXJDL7BuV8T31fuJhMAwDCm0BnTuWeIFYA+94j06Orp6C5PM/+53v0Nrayveffdd2eccx0PgnXLb19GT3AaDQRw8eNCGzTr0s6ioCGeddZYYEejRPhoZeu211/DDH/5Q5Amwe8iSkpLw4osvory8XOTgqquukjXlfp81a5Z4iW6++Wa88cYbCAaDuO222zBx4kQAnZ5Pcow33nhDjCv79++X9adXiusbi8UQCAQAQIoqkWPotfL7/VJPg//P3o+sp0FDIdBV1IuGSebds0CZxiwAwgNoeCXHqK6uxmmnnSZKO6t7OzkGZVXLBTlGamqqKOIkwVSY+D3uJXIm/kyOwZxBRgeQq5BjaPnm2aL51tGGk2P0xVjDQSOp00Co5yIajXYzaGo5IOHnGtAgRg8qOaVhGHj88cflbM/Ly8Mll1wiLeGIIW+88QYOHz6MUCiExx9/HA8++CBM08Rdd90Fl8uF/Px8RKOdxZC2bduGrVu3IhgMoqWlRaq9U6EEYPPGM0qEhl9GMdGwoA3T+/btE6wOBAJSkFSHDVM+KSPp6eniNOL9mpqaRGHUdQoyMzPF286zOjk5GRkZGWI4JF9ramqypc55vV7ZE1qxpOyyiBKjJGhAYM41I69ozNERDjoUnn20aSy64IILcNNNN2Ht2rXdonUok8caTizuq9z21jC5fft2mR9tmDAMA5999hkmTpwoBjCu2dGMhvjfpqBecsklmDhxIu655x4hHFohAToB9L333sOXX34Jn8+H9vZ2CeHVZflXrVqFQCCAM844AxdeeKHkGLFs+uHDh/HSSy+JNb++vl6S9plDRECn0kXlkSBDgbUsS0KTSFh0JUV6mizLkmp49KjquQ8Gg0KCnDme+fn5shmo1NFaRcvN+PHjpSBGUlISDh06hEgkAr/fb7OaUqANw0D//v1tIRQMDybJ0cTesizJMdQhofQ+uN1usdwnJCRIKCStbmvXrkVCQoKQWJ0Dyvk+GsnWXr3jtXAey6vF9y4qKpI10GQA6PSO0+PEz/ztb3+T7/N9brrpJvj9fsRiMZFrhmmfiFxPnToVRUVFPcr1O++8IyFbPck1jRw9yTWthCUlJQC6y7VpmqiurhbycDxynZSUJFEF8eQ6ISEBeXl5SE9P71GuOZ89ybVpdoXYHq9cU3HuSa5ffvlllJaWYvPmzXHl+ljj25DrYxEwXnPHjh09yvXGjRslZzwcDuNvf/sbqqurbYdRQkICvvjiC7z33ntIS0tDJBLBT37yE4wZM0Ywzuv14umnn8bu3buxePFiLFu2DI888ggACFlnlMd7770nXsQjR46IFZbtLIhbLCanCbUOvaKXLBAICKkDIOkSzE2yrP+HuzePb7LM2sev50naJE2TbnRvoS27rC4s4+4wbiwzjr7jMs44Kgrq6zavCwrKPoKjAwiir4gyLuO+IyBuLCMgyE4BgRa6723apumWJs/vj3gd7qQptDi/z3fmvT8fP2BJkyfPc+5zX+c61znHkBmlHR0diIuLC3pebW1tonIh0OH3j46OluwBM+6c62uxWNCvXz+ZA1lXVxeUsWZNImtG6QP4vFNSUoIIGga2/BlfF2qnBG0AhMVnR2ECPwatvXr1wvPPP4/169dLMMC9091AU7WFn8PWn84Hu91u5OXlBZ2BwMk6r927d+PCCy8MUkGcCgxde+218nzvuOMOXHTRRQK0+f7cQ4ZhYPHixeJXk5OT8ac//UlAsslkQl5eHp5//nksWbIE8+bNwzXXXCMBqt/vlxE0mzdvFsJ73bp1yMnJEQk8/Qyl2iR4VbvmYuMbTdNECUNC3e12Szd/TdOEsGRAyNXR0QGXyyU+UAX9VNIwKKZtNzU1AQj4qOHDh8Pr9UqSoLKyUohJ2hO/F5+xruvo06dPULDt8XiC+mIAJ3Ge1+uV/UefyP3Ne9TREegOTH9APzxu3DhMnjwZ//jHP8IShN2RSv4cApzrdJ/BZ5afny+EqJroaG1tRW5uLsaMGSMy35aWFmzZsgWapokig0vTNDQ1NWHx4sUwmUwS9E2dOlV8W0REBL7//nt8/vnncDgccDqdKCgowPXXX49Ro0bJ2f/NN99gyJAh+OyzzwTLffrppxgxYkSnXhVer1f2CH9fTdrw/h08eFCeNUvc6BNVZQD7uthsNsTExATZP7Ps9HuqyjElJUWUJpqmITo6WtRgmqZhxIgR8PsDDReBk8o2NipVm5vRh5LgS01NFf9iGIYoYwAExSjEFrxm9XnSThk0U6lIu507dy4GDhwo2X8S/z2xP5UQORP8wPc41e8ahoHDhw8HlXAxBtI0TZomUdbN5/3dd991eq//yAB1ypQpaGhokKzG5Zdfjttuu00yM7x5apbv1VdfRWFhobSOnjp1qjx8XddRVFSExMRExMbGwuVyITo6GiUlJUhMTMTixYslG1pYWAi73S5G5fF4YLFYpMW7yqxzs9XW1qKkpETkY3TsBCAqG1dZWSnfIz09HTabTd5L7a5LOTFZen5nsuOtra1wu93CWBJs6rqOYcOGob29XeYBklnNy8sDgCBnTgkGf5d1uASF6nzVnJwcGIaBiooKqT1paGiQ62edISUpzAqbzWYBmmyu8OKLL+LLL78UGReB4Kk2RlegqCdM0elAUbi1c+dO+XuonG7Pnj2ScfJ6vVi/fr18Dl9z5513ynejXfv9fjzyyCMyuzScXbvdbnz00Udd2rXFYkF+fn6Xdv3GG28gLi4OJSUlXdo161G7suvi4uKgBhihdm0yBbo8NjU1dWnXdXV1ki0KZ9d04pyzF86uo6KipD7yTOy6oaEhqI4m1K69Xi8KCwvlWsLZNQ+mrux6+vTpGDZsGNatW/cfbdeapmHbtm0477zzYLFYsGbNGtTU1AR19gRONo2KjIzE6tWrsX37dgEXU6ZMQVJSkthVR0cHNm7ciCuuuAJRUVGoq6uT+Wnbtm3D2rVrERsbK/5HZaAZkNEPcyyGmlXSNE38MHByFjQlYfSbbW1tQtSZTCZkZWUFERQcu6GCd2YOCCrIzkdGRkpjJL4H7+GIESPQ1NQknTtLS0slW8rggFk0EjAEQJGRkWhpaUF9fX1Q5oc2kZGRgeLiYgCQjpIMcAjggQAJwACVNkqVzPnnn4977rlHGHt+J4LkUwFz7n3+nXbbE/zAzMvpfic00xQK/nRdx5YtW0Ra5vV68fXXX3f5XmwWAwDV1dXwer1ISkrC7Nmzg8gmlSy1WCzYsWMHPvroI5EA33jjjejfv7/c88jISKlvTU5Olut79913hTDk9ZeUlARlu1tbW2G32wFAAkE166RpmhClHR0dUltNxZW696urq4U4SUpKEpkj35szR9XGMR0dHRJs8hqpXGHADkD28uDBg2G1WhEfH4+amhoJpn0+H/Lz88XGuP+oimF2qaKiQr4jACm9cDgcSElJgdvtltINt9stxAyfLwMHBmDMTNH/s9+F1+vF22+/HURqnMpHqnat+uwzyUT1ZB07dkxUFLRp3pvq6mq4XC4MGzZMxgR+9dVX0LTAHPS4uDi5VmJdZlKLi4vx0ksvITY2Fj6fD+PGjcP5558fVBP88ssvY+zYsYiKikJOTg6AQBC7bds2NDU1ITc3V2x1z549Mo6GpJ3f75eaeNZbqgGl3x8Yv2QymcR+iSHU7HhFRYVcd1pamkxL4P1wuVzybGj/tPPExMQge25vb0dDQ0NQ/wEAOOussxAREYGkpCRUV1dLwz5N01BQUCCfxbM+NTUVun5yVA4AaerHz2YASekwcRLJddouA15eS6iqhbb7+uuvo6ioCN9//33YLH53ffKZrJ74ZMMwpAyO10mfpes69u3bhwEDBkgZDUmPcO+F/7QAlYcIr8flcqGlpQW6rmPmzJnIysoS8Kce3NyUK1asEJniVVddhTFjxsisrI6ODgmeNm/ejHPOOQevvPIKnE4nysrKBJQwU0IHruu6FFSrRk3ZFodcswhbZQLJFAGQ0RmGYSA5ORm6rkuzB7KIBGZxcXECPgwjoLfngUCgxI0eGxsrv9e7d2+YTIE5TpwTBkCyExaLRbIHLCIvLi6WDccMFhBo382AoKmpSYZKM5MBIAjQqzVUnDHFBgaU4ERGRuLll1+G3W7HqlWrZGN1wzY6HRTdOTx6mh0I97k7d+4Mu3E1TcPWrVsxduxY6LqONWvWyM8Nw8DUqVODpN+8Bs5a1DQNmZmZ0uWUTkJ1TAwSNm/eLHO87rzzzk7EBZ/funXrUFBQAJ/Ph4aGBmG3eaAweAQC9kiQQyZw//79kmUIDZ5VgKgGq1VVVVKb0q9fP5FfMqhTO8HFxsbKYctsOw9oynT4WjZIs9lsSEpKgtvtRlpaGsxmM8rLy+H3+5Gfnw+LxRLEplOiWl1dLQExbdDn8yEzM1MOx46ODjQ1NUmTMEp5SUh5vV65d5GRkZJBI0tN1cCoUaPwwAMPoKSkBJs2beq2bal/cnXHrslih76uu9LgyspKFBUVBWVQabfHjh1DXV0dLrroInz22We4+uqrYRgGHnjgAWH0+bkqy9vS0oJFixaJzdjtdtx1111BQS0AYdAjIiJw9OhRfPjhh4iOjkZDQ4PMpmXQxcOc4Jw+hveppKREyAoCfNomr48NiJiRJPGRmpqKmJgY8atq/Q2/H88SlUhhDRPBCr8bfV6fPn0ABOq4STyyxvTEiRPQ9cB4JRIdtCMCc3WPdXR0ICUlRfoH8CxjZ0wAQXVBVBOwjjrUB9M3/+Mf/4DX68Vbb73V4yy/urpjq7SrnoJ3rvr6esmkqoED1/bt26VJB0nCcNdAbKGe0fTFhmHg2muvxQ033BBUe8zXkShYunSpnH+hvpjAlXXSob6YoJWBojqijjPUeaY3NTWJxFn1xdz3anYm1BezmVZqaqoQzqovZqYKgEiMVV9MkK72llB9MUuMWPdN1QN9MeXtLPNpaWnp5IvpQ3v37h1EjNIX8zzTNE0ah6m+mB1c2eOCZSWqL+6qFjXcIvbpaeYpXJaWq7u+eM+ePZ2k1qovjo2NRXp6OjweD77++mtomoZJkyYJ9vvLX/4S1IiHz9MwAsq6Xbt24f3330dcXBw6OjowcuRITJw4Ue57YWEhcnJy5DM///xzXHbZZXjrrbeQnJyMw4cPi++l7TIZAZwkBGnLHo9H9qvX65VSNbV8jpieZDHHF5nNZmRlZQUFetxbtEsqxWgLJMCJ0dl4kf6GddR9+vRBTU0NsrKyYLVaUVZWBiAwjov4heOTIiIiBI9UVFTIPqCNAUBWVpaQLyQgOaqM0l9iK+4zlvmRMKSSgp+9YsUKfPnll4JHumN/4fBtd2yvK1lvd35X13Xs3bs3iEDitWiahuPHj8NisSA7Oxsej0fUhaHXjm4GqP82Y2ZCV1xcHDIyMiRAfeyxx4JAP3DyIaWnp+Ppp5/G6NGj0d7ejq+++gpz5szBvn37xCkCgUPJ5XKhvr5emBaVpeOBQ2ZDZYV8Ph92794tYIqyA3bEAyAzlrghQ4EIANTW1kpNJzc9N0BycrJcD+edFhUVyXvxELNYLEhKShLWXNMC0h86el3XJUB2Op0SlKqZCTYZoWFxU2dlZSEqKkqAocvlQn5+fpDch9cbExMjwJ7kAQEE/+Sm9Xq9eP755yXA7u4KdwB09zA50+CUix0ZuVQ55uDBg3HgwIGgujYu9fupoNdqtSIjIwOJiYkoLi7GTTfdhPfeey+IXQQgjnncuHF46qmnpHHMsmXL8NZbb8mzUIPwqKgosU1mIQmcmOFSrx8IPCOXy4V9+/aJY2XWSq1NUoEVD0fDMBAbGwuLxQKz2YyysjLoui6MKQPB2NhYxMfHBzHnHPbMTCRBChvN0OkR+54fQQAAIABJREFUMHJGrxrgsrZKlYFVVVWhpqZG7jf/PSoqCv369RObbG9vR2FhoUg/mZngM6JNq8BcfaZqhnTnzp3QNA39+/fvkW2FYy27C/jDBRbdtfXk5OSw76HrOgYMGICoqCiph8vMzER8fDyWLVuGm266Cdu3b5fMhypJjoqKwowZM3DvvfciIyMDlZWVWLx4MZYvXx5Up0kS0Ov1Ij8/P+iQ53v6/X5ER0cLgCFDzu/PYI/BKQNBXhcDPTZrIzhggyICiurqarFxypaYYeVIEaoFGJyqJIhqh/zs4uJi+HyBOnZeE21TDWAoi3K5XCgoKBDZLgFSe3s7+vTpIx1N3W43fD4fjh8/LmCRvp1Sa5Z68D8+YxKIBEy33norACApKanbfjScv+7O7zLYUM/RnqzY2FgkJSUFZS3Uzx4+fDi+//57WK1WXH755ad8L/rJUF/scDjw8ccf48Ybb0R+fn4nX0zAf//99+OOO+7o5ItJBPC8DOeLSZAzuAr1xfQ1R48eRX5+flhfrDYgCeeLiVmoRgj1xT6fTzAD72OoL+b+0TQtrC8GIA2gGDSovpj72Ov1oqCgIKwvTk5ORnp6elAHWtUXM/Dk8w/ni0Ofa6gvHjduXLdt7HRZqq4WPzOcbXfXFw8fPjwo26f+/oABA2REnfp+KSkpSEpKQn19PaZOnYoXXnhB/I96bSz7evrpp5GWlobm5mYcPHgQ8+bNkw78ffr0EdskgUVfSHLbMAwh+ux2u6gy1PvFwISNu1TbZZaNGJVkHu+dOjKIjZZ4/arqKSkpKaiBod1uR0NDgzQtjYiIEDInJiYmaHSgilV53hhGYFJAREREEHnv9/tRWFgoZKSmaXIWxMfHI+unRBnJk+LiYinlok2wQ7uanVRxBL87r40k6VVXXdUjoiScnZ3O9sIRfd39XSBw/UOHDg0KjtWkYd++feUc5M9+zvq3yaDeeeedQTIHdTHlzjlfN954IyZMmCAsisrSkq384IMPcOjQIZhMgSHrDz74IHRdxyeffAKr1YqKigpER0djy5YtQd0VyXjwwOHGZn0PDxq+jgewKhlLSUmRZggAROrjcrmk4JzMOMfUqDUhZWVlch9oyB0dHUhKShKQpUobeLD06dMHVVVV0haezDwHt9P5cNNRGpKcnCySEB4wau0s6+6YLfb7/SKL0/VAMT6BqNVqlawWM6c0VJvNhr///e/QNA2rVq3q1qFwJkHmmTL24dbu3bvl/gLBIxDYyXH//v1B/8ZW/IsWLQpyiOGuk7ITNt1JSUkRW+J/tLMDBw7gzTffFFnsRRddhAkTJsBms2HXrl346KOPEB0djT179iA5OVnm8fK5ksX3+XzSMZNA2TAMGUFEJ6aSLRxdw39rbGxEc3OzPHu+lixhSkpKENPJw4fgj/fEZDIhJSVFMrpczCqkp6dLJ1nWs/J9WY9DYEi7BgKKBNY/MXNXV1cn10sgyb1EdYLX6w0ikJhNsFqtwtzz0OQB/MADD+Ccc87Bd999h4KCgtPaVFfM++lWT7v6nWr98MMPci2qbfv9fuzatQs1NTW44447AECeU3l5uQDkhx9+GMOHD5cgTv1euh6oUV62bJmMfrnuuuswYMAAAQtbt27FmjVrEBsbK5kYu90uoJ12S5v1er04cOCAnBEEygSuquqF1+F0OiXIY7aRtU8EHZzZGxcXJwEts2bcx2oXR6/XK7N2CTyYAfH7/cjOzobb7UZqaqrMo6Sks6amRkAg63ToJ30+H5xOJ5KSkkTOxswYx86wzptZBe473g/ucXUfqlkmZgf+93//t0dKljO1V/VMPJPf59q7d68E3bwe4GRn8dbWVvTv3x+rV68Oe+1ffPEFPvjgAzk7Q8E1n2N5eblk+kJ9sQrGVF/M7uD333+/lF2E+uKMjAyRL/J7qL74wIEDYuvMvoT6YrWhVFe+uKamRr6fmrEJ9cWUVob6Yo7rUpUS/Gy/34+cnBy0t7fLKBl2UqUv5vciNuH1qr6YxFFpaak0rVF9MTP+9PPMMNMHEDupth3qi0eNGoVVq1Z1q5bv50gj/xWroqICJSUlnQJlktR79+7FmDFjsGbNGmiaJj6Zr+no6BBVUXJyMp566ikJ8vkM+b42mw1r1qzBt99+K+VtDz/8MDQtIFd/7733MH78eHzwwQew2+3YtWuX+BzaCRezlqWlpXIO19XVAThJRtAOWKsKQGTi/J41NTXS0JG+32q1SjfczMxMOQ/o79jwiE316H+Tk5ODklhq/4jk5GS43W4kJycjMjISFRUVMJlMKCwsFDsiPuY+oVKG8uP29nYpN2EsQszLTDyJdHU/c/+o2X/iYu6TyMhIrFq1CuXl5WFlseHWmdiuStL9nKVpGnbs2CF/BxBkuz/88APOPvtsfPnll2F/F/9pEt+amhq43W4899xzQa3SQ1kqTQuMyGATiv/+7//GxRdf3KkjIEFLZWUlFi5cKCNr9u3bh++//x66HuhcykwiAywWXO/fv1/q4urq6kQ6pUpjyNgAEHmt3++XjnrsUsfAV52Ll5qaKuxnZGSkMPrt7e3i9LnZkpKS5PuojoLAh2wkaxbS09OhaSc7vlGGA0CkBazvI+MVERGBkpISkYoxuGT9APX4/C4lJSXweDxISUmBy+US5xEREYHo6GjZgAT0ZJOGDh2KadOmBY08CF38nmqGtyeLzuLnZlCBk5stFNQAgfufm5srNXA8CN544w1ccMEFIufbtm0b3n//fbFPkg58T15vcXGxgOWnn34a0dHRQRkkfqbVahXQtXz5cpjNZlRWVuLdd98FAGzYsAHnnHMOgJMF+2wKsG/fPgCQbnp81gzUCAL4WXa7XeqJSMawcybZzJqaGrS2torMkQ1aGKj4fD4J/njwJiYmBjGM3CcMOJk9jYmJQWZmJpqampCYmIjIyEiR5pGA4l6mLJKNE3w+HyorK6WDNwMSHl6cdclnV19fj/LycphMJukuy4wXa5+4Lxi08v698MILiI6OxquvvtqlHYWqCnpqnz1twHSq1dTUhEOHDnW5t7744gtMmTKlEzNP31dTUyPs9fz585GVlSWZIS4C5X379mHVqlXw+XxYtGiRHG579uwBELDXm266CUeOHBEbIWBpamrC3r17AUDm5hJsqVJEdcXHx0tmn/9GMoY2wd9NSkpCSkoKgJP7sqKiIqgZERCwdc6RZkDDZ8hg0+PxIDExUeoRuf9LS0thGAaOHj0qgIRSR5/Ph8TERKSlpUmzm9bWVpHLM8vAvcGacoJ4v9+P0tJSxMbGynnT0RGY96mCodCSi7//NEv59ddfP2WQyvtL5r8ni36N73OmS9MC9ZicnRl6Hc3NzTh06JA0owr9XbfbLdmbUF+sKkrUa6YvHj16NB599NGgTqZcPOuXLVuGm2++GU6nM6wvZgdclQBWfTHnM5Ko5v1SfTFwcgRYOF9M+Xptba0EBKovbm9vl06lapMk1RcTB6l+SvXFw4YNkz1TV1cncyhVX6wq1sL5YhJLqqJA9cXcW5WVlaipqYHVag3yxVTEcB4w30v1xa+99hrKy8uxfv36LolhNYOlSr97sv4VQaqmadi3b1/QSB4uqicKCgpQVlYGTdNw1113BREXwMkMVktLCyoqKmAYBq677jrceOON0kMg9H3b2wPze8vKyvDSSy/B6/Vix44dGDRoEN577z1ER0dj79694peYSTWZTNLkicQ27Yo+mee6WkdPHEGfbDabRZXS2toaVEdtsViQk5Mje9FkMqG2tlZUggDkT7M5MCpGTehw7/Ca29raMHLkSLhcLmRlZYlPBgK+o6SkROyIwXVkZCSGDBmC9vZ28ctVVVUSKNtsNrEd9kng3lJ9MmvASRyq5RcMUImP7777bowdO1aSOF3ZC//riU+mzauE7s/BEZoWaKao9gngz7m2bt0qmebQ38V/WoC6bds21NbW4pJLLoHH45F5Trt27QpbJ0Am0+VySU3ok08+iZEjR3Yaq0E23jAMHDhwAKmpqTJ7iM03GJgWFBRIippjBlj0S4BG2QyZFsoHyJKw/sNkMqGhoQExMTFS48EsLBBo1OLxeMSBs9aDLbgpFeD1AycbKbjdbgFIBCOU5VBaZrVaUV1dDU3TgrLJffr0kYPH4XDIOAkCcG66+Pj4oMO7sbFR6lYp6dE0TeZYsbMbNyzfT+1cZjKZMGXKFFx00UV49dVXw24wlXXvKShSN8q/iikCEBSkqsGGz+dDSUmJgH3DMHD8+HE88sgjmDVrFnbt2oXf/va3aGtrE5veu3evOPBQO6XssaioCF6vF+PHj8c999wjh4O63G63OLqWlhb84x//EJv2+/1S19fe3o4dO3ZA13WZX8dmQnToHKeh2jQ/LyIiQvYCxxVRWkh7rK+vh9frRU5ODpqbm2U/ca/wXiUnJwuY4u/yGthVj4w4M1pk3ynbqa6uFlBEUJL1k/SGzUeYeSK7SSKINs373tjYiJqaGsla0fGzjpD1T7qui/SUQaoKFiMjI/HSSy/hhx9+wI8//nhKm+opGPr/w6YNw0B+fr7URKs2DQQC1L1792L58uVyvfQFtFO+vqSkBF6vF8OGDcNdd92F+Pj4Tt+RtsCMfG5uLr7++mtp+sLsCkF8a2urZHmZOVdn62maJoQYrykxMREAJABkJhyAnCncH6w17tWrF1JSUuDxeGTOLgEgAXxqaqrI3PmdCcRCZ/rGxcUhPT0dLpcLgwcPhtvtlvdtaGgQKbzdbkdGRobYW69evbBv3z45u+g3Ozo6hPBjwNLa2orS0tIggpRnBc8XZpqYWWIAoOuBWqqzzjoL06ZNwzvvvCNEUzgbAc4suGT98L8CxKtBIwG4uggeSXioS9M0TJ8+HZMmTcLhw4c7+eKe4otzzz23ky9WfRnJFdUXh+KLwsLCIF/cU3zB31N9cSi+MAwjyBeH4gtd1zv5YuAkvlD7FHSFL6KiooJ8cSi+COeLVXwRGxvbyReH4gsAQb44FF+wPEP1xf+J+CIvLy+sLw7FF1VVVVi2bBmqqqrk30O/DxMzzc3NiIyMxN/+9jfJWobaLTs2G4aB3NxcxMfHY926dUhNTcXBgwcFR9An5+bmis9jYkOV8apS146ODqlXBgIKENqn1WqVkh1eE3sCkFzh9+D+5Ht3dHQE7Ql+d0plQ32yw+FAdnY2NE1DYmKilEwwEbZ3715JpGRmZsoZxWssLCyUM4XZ5OjoaERFRck97ugIjEBzuVzik5msqampEazkcDjEFzOJw3toMpmwatUq1NXV4fPPPw8bQIYjt7ubxKGChO/zcxZtrqSkRBqghaoADMPAF198EfZ38Z8WoN53330oKSnBsmXLRAN++eWXC2hZunQpamtrAYSvG9C0wKiB9vZ25OTk4O6770Z2drZsHAL/DRs2oKamBjU1NYiPj0dhYSH8fj/27NmD2tpadHQEZh3xMCdwJbNHkMLDX2WL2JyGn8mso8PhkEPE5XKhpqZG2FfK9uj0OU7D5/PJ4cLFw46bWpVKAIGDuk+fPjAMQ+r2eKCz6JssL9vIq4w+EKj7oKOPiIhATU0NysvLhTUKtRdK09ipj4CQ94eHEgABSyaTCX//+9/hdrvx4YcfhpV1/9wD4OcAI5Wh4vPWdR3btm2Tf+efdJDr168Xm5w6dSpmzZqFWbNmwe1243/+53/Q0dEhNk2w/MEHH2Dbtm1BB4z6J2UzHDmxcOFCZGdnC4CmbR4/fhy9e/fGq6++Kjbt8/lQWlqKgwcPwmw2C9PNbqEMxtQslMlkkufP70hwRqYzOjoaAKRdPT+noaFBAg0GArRpp9MpzTa4T7gYKPA+9urVK8h56rouQ+I5woMHant7u3Q0pm2pjWxo0zxMfD6ffP/jx4+LTYeCU1Wqzv3j9XqloyttmjWJtOmlS5ciJiYGK1eu7NK2fu7BcKZ2ze/HZ8lgc9euXWEZ+fXr12Pbtm0YPHgw/H6/SHbVIDX0/Wnjra2tiI+Px7PPPitMM3+He+brr79GbGwsdu7cidjYWBQVFckhf+jQIQH1zMyrTbRUYofEBUGtyWSSZitmsxkej0eajDHDTvac4JqMNmue6L/T0tIkYFDJHna4Vjv7ct4uAGRkZARlDiwWi3xmbm4ukpKSkJCQgJaWFlEE0G+qZB73DJ97dXU1ysvLJSOq3lOCH1WebrVaRYVBuyWAjYiIkJErP/zwAw4fPvyz6um6srd/BRACTtqb2+3G4cOH5b35c/rqdevWdfr9DRs24KOPPkJVVVUnX3wm+EL1xaH44tixY518cTh8EeqLQ/FFqC9W8YWmaZ18cSi+ABDWFxNfkDBSfbGKL0iahKp3VHxBfNIVvuCM7dC6UjWYDPXF4fCF6otD8YUaoP4n4wugsy8Ohy/efvttTJw4UZoBvfjii6Lg6iqgIZnl8/kwbdo0jB07NuhzWCv99ddf48ILL8SqVatgNpulHKalpQXbtm2T+0jlFaW1tH1V0m+324Uc1nVd5pfTNkwmk6ieevXqBZfLBa/XKz7ZMAzp8K4qrdgssa2tTeTf9MMMOvnahIQE+Y4mk0mUhVSh1NTUQNcD3YI9Hg8yMzNlHndzc7PsN6pdNE0LwtzMIrIbsKoyVH2ypmmibDCbzXIeqWos2rDZbMYrr7xySp/8/9J2gZNBMu+z2+0WAkVVPTLxEM4n4z8tQK2qqsKTTz4Jn8+HSZMmYeXKlZgwYQIyMjKQmpoqctTCwkK8+OKLYsShS9cD7dTZTn7kyJGYNWuWHPQ//PCDjDdISkrC66+/jtbWVjQ3N8PtdgfJGwnkVGmd0+mEpmlitI2NjSL/4SblpkxOThZQbLfbUVpaKtK12tpatLa2isH26dNHHnBjY6PMPdW0QJdKZlJjYmLgcDjk+0RERKC+vl6yaSNGjJC6UnWT6bqO/fv3y4YjyNE0DVarFQkJCcIatbW1ITc3Vw6HUOaNhqkezjExMSLTIJinxIybkffMZDLhj3/8Iy677DKsWrUqrNzgTIHNz5FBcuOFuwZNCzSXOnDgQNjXrFu3Tu7L5MmTYbFYMH78eHzyySeIiIjA1VdfLTY9evRoHDt2DBMmTJADftmyZaisrOzyuslWkQVfvHgxkpKS4PP5sGHDBgwePBjvvPOO2DRZT7ZIpyRQbXuukiNqTY9q05SkkTlMT08Xm2Y9BhuCcP4jxwolJydLgELZCx09bRoI2HBiYqIcmjwcadODBw9GZGSkzERlPbiu6zh27JgcYrQ1Mp20aYL5I0eOoLGxUUBbKJvM50fbTkxMlPEnbDvPbJZ6z/g+VqsVK1aswOHDh/HDDz/8ywA/bbqnGSlVUt7VNRiGgR07dgRJMf1+P7788ks88sgjmDBhAoqLi3HRRRdJbXl1dTWWLl0K4OQYCnURUPPgvu6663DrrbdKlgcIdGDdtm2byPP27t0rGfHGxka0tLTAbreLvDCcL2bnRQIlNtigxIvPnWNA1MwqfRWVH2S7ExMTg4JNdd6v3+8XdYKu69K7QK17qq+vh9PpDKrXa29vF6mTy+UK6vLI58vAVO3ySxLqxx9/FJBPX8lgRvVFUVFRiIyMRGVlZRCQJxBi6QnvGRl7XdexcuXKTkEZn+WZgCGV2OvpUgk6NSMDBJ57Xl6eyApV8gNAJ8Ze0zTcfffdGD9+PKqqqrBjxw74fD6cd9552LlzJ+65554gX2y1WrF69Wps2LBBZHGhiyRIQ0MDoqKignyxYQTkqa+99hrMZjMOHjyIEydOBPliNdvD76X6YvpMAv/6+nrJ6tAXkyRnI6OoqKggX0xZM/dOVlZWkC9m3TcAqfU0DEN8sfq9vV6vkKSqL+bYl8rKSvh8geZlqi+mb9R1XeTJBPFHjhyB2+0OImS4Qn0xEFAmUKas+mIS6SRguB//3Xyxum9PdQ379+8X2Taflclkwtq1a6FpGo4cOYJ169aJbPzss8+Gz+dDdXU1PvnkExw/fhxA58ZmtLPS0lK0tbUhMzMTzzzzTFCwvH37dgwZMgSvvfYaYmNjceTIEWzatElwbktLS1AHavpdlWCx2WxSTgBAGu6ZTCYh9/h5aWlpkuQBIPbDYJZ11FarFampqfIdXC6X2DZl7vz8hISEoMwq8XFLSwvOOecceL1eJCYmBs1WNZlMyM3NhcPhgMfjkevhGCibzSa2RlLlxx9/lAy0qsDgd+MigUocwWas/D1iCRW/LFmyRIjuf7VPVm2hu0v1vV355I6ODuzevTsok8/7EM4n4z8tQL377rtxzz334G9/+xuioqJkBmRtbS3S0tIwbtw4HD9+HJdddpnUG5SVleGFF16QGwQEF+pqWqD+pLKyEn369MHixYtx6NAhvPjii0hMTMTbb78Ns9kMl8slB4eaYVCZP0pbuMmYkdJ1XRon8aCIiYkRp2symcS5U0OvBrbt7e1ISUlBYmIivF6vSBpY/8RNGBcXB+Dk3MfGxkYB6arBsfawV69eQRp/wzBQXFwsh53D4RApHsEL63yioqIAIEgKysNadcpqLSylk5QzaJomz1ENHFTQuHLlSvh8Prz55pudMjOhjvt0MgZel8pCd9P2gn5H/YxwzsHv92PHjh1BdgYENiFfP3fuXDlInU4nbDYbampqxKZdLhdqa2vx1FNPSVvuESNGwOfzoaysDB9//DGKiorEhnmf1f8vKipCa2sr3nrrLXz11VfIyMjAF198ITZNSS4Dy8jISKlNogN3Op3SLIF2StDH19K+6PgJOurq6sRZ0an7fD5pBJaSkiIMfElJidwnkjK06VAwyiyG+hycTieys7NRW1uLzMxM2Gw2CeZZfxYVFQW73S71h13ZtM/nkwM1nE3zGfv9fgF/NTU1kh2Ojo6WToShEh2LxYJnn30WsbGxeOmllzrZZOgKleqErjO1ad6/cHvpdDbN+75+/XrcddddePzxx7Fw4ULk5eVh3rx5kmW58MILpZv5kiVLgrrHhl5Ha2ursPzjxo3DzTffjE2bNmHAgAF44403sG7dOpjNZplVykwfwRlrKgk42PyLr2EWiIEpfa/VapWGRpTHcj4vQWBLS4sEnb179xblSXt7uzTCoJ/mM4iPj5dAWNMC3SldLleQHZ199tkij+M4MNp2Xl4e3G437Ha72FBiYqJkNiMiInD48GGZ88fAlEsFAer99vv9SExMhMfjkUDFbDZLgy81UFXlvn//+98BAK+88gqAYEIjNPvdXdsLtbszXeoeUFd5ebl0uFfBUji2/sEHH5SSlhkzZuC5555Dc3Mzpk+fLplT+uKqqipMnDhRmq+88MILYX0xv1+oL2YToOeffx6vv/66yHBVX8xsPgCxa9UXA4GslsfjkUYtfF6apsmZQl9MyaLqi4GT81GtViv69+8f5ItJ5NMH0u7pa9i5msQ4bV/1xf369ZNeBoYRkAWrvthutwuJxD4bnO8NQIIIfo9wNsbXkBDiWED6YgJ8yn35Xqov/uijj1BfXy+E+pmAe94z2mJP3kM948L9PHRRVaUu4oupU6ciKysLLpcLR48exWWXXYYhQ4bA5/Nh7NixIoFeuXKljHwJDVaBwL6i3TqdTixZsgSbNm3CZZddhueffx7vvvuuzEuvra2ViQYkp9kUkd+LYwnpo4lviT/pw6xWq/hDvraiokJUKUDgmXMsmMPhQO/evaW+lT6Z/Vron0OJPcMwpGaU9zo5OVlk7SwHIY6vr69HYWGhJJNobwwe7Xa7zJZXG3+eDqf6/X4ZwUQlotfrhcPhEJJbVbfQ/7OXxYoVK8ISZeHw6qmWWnLRld11d6mZ0tCfb926NSiOMgzjZ2VQf34f4H/R8nq9WL58Oc4++2wMGTIEH374ISoqKnDrrbfi448/xquvvopRo0bh/vvvx/Tp03H06FGcf/75mDt3LiwWC7755ht8/vnnogfnpnQ6nXA6nfjNb34jHRTpyFjPwaCJIEs1dM4/JKhh/QazLEDAWJi25yZmJy/KeMiMAoHgsbi4GDU1NRIgezwe6PrJOXZ+f6Arm8PhkHoQwwh0GgSCATw7/FLjHxUVhcLCQtH/q5lal8sFu90Op9OJyMhIHDt2LKhrJFlWFcDzezGw4aZUs8319fXo1asXAEi2zufzSQDg8/mEJeb9Xr9+Pa6++uogiWVXGy7cz9XMivqznm6+rj5bfR/1kBk4cGBQnWHoAVBaWgqPx4NJkyZh48aNKC8vx6233io2/de//hUzZ87EggULsHDhQtxyyy2466670NTUhPPPPx8PPPCAyB1ZA+j3n2yOZTabpYmA3W5HU1MTDhw4EGTTPAw4+ojAiDZNcNrW1iZjawAE2TRbtgMn2VISHOyca7PZYLPZkJqaioKCApGPuVwuNDQ0yH5k5iwqKgqZmZli0z5fYFYg7Z/PgDbNbpa897xm3ofExETExMRIBsxisaCurk4OPV0PdIhU2UO1fX1oAwl+V03TxKbtdrs0SmD9DO8VbRoI+LCHHnoIK1aswKWXXorvvvsurF1xdQXgQzNjZ3qghHv/cIdKqE3zNT6fDwsWLMBFF10Er9eLN998U2yooaEBFosFAwYMwOzZsxEREYGdO3fi888/lxnPACS4HDBgAEymwLzcmJgY1NfX44cffhBQzjpNAJL954HN78+RWXyGLFHg3iDxwHFdPCDZSZFg3uFwyJxT4ORIhfr6etTV1XXKwra2tiIhIUFqBXltbGzH1zOg7tWrl2QAXC6XZI/4/ZxOJxoaGqTLKhn14uJiVFdXS2dS2pmu6xL4nG7x89xut5BSJIQIVlVpoaZpmDFjBubMmYP4+HiZ5xdqc6cDQur3/zmLz4wrnN1rmobU1FTpskxg3NUeqa6uxqRJk7BhwwYsXrwYs2bNwpNPPolFixZh5syZQb54ypQpOHToEC6//HL4fL5OvpjnotqkRfXF+fn5yMjIEHulEkr1xdwTfMabKzNvAAAgAElEQVRs6KX6Yp5rPPdDfTEAVFVVSWaVEln64tLSUiQmJgrpmJ+fH+SLNS0gTec8Uiqf6IuJfxggRkZGIikpKcgXW61WIfcNwwjri9vb2yWrFxEREeSLAUhtHj8vnD0AJ8m8UF+sYi6qg0hI0Rf/7ne/w8svv3xGhImqBFCJoTN5n9AVzhcbhoELLrgAW7duldeon3fppZdi06ZNaG1txe9+9zts2bIFBT/NRt64cSOuu+46lJeXY+rUqTJHd/HixULIqQqh3r17Q9d1qWOur68XYsZisUjZDv2IWgdKH8zyGf4/CRL2bmDGPC4uTogxwzDE1kgG+nw+8cmUr2uaJmoAqq9INNJm0tPTpdxCVY+oPtkwAnJhPgOXy4WMjAwZx6RpGnr16oWSkhIh8xg0Njc3Iz8/X4hoyupJ4ndlt1zMHPN3+P/ch7xGVTLr9XrFJ99+++147bXXOvlF2svpbI7+itcSzu5Ot3piu8OGDUNubi6Ak0T/z1n/NnNQH3/8cbS3t+Pw4cNYvXo1MjMzYbfb8cknnyAtLQ0RERGoq6tDZmYmlixZgs8++wzt7e1Yu3Yt2tracMEFF2DJkiXCXFPTDQRuXu/evcWInU6nfC4PCm4GOmVN02TsATcxWXTWhXKzswaOjpLD2SkN5GZLSEhAVlaWgDIyNX6/X96TDPfgwYPhdDoloHO5XFLHpB4c7JyqjkgAgIaGhiDDTExMRFxcnDBh1dXV0r5fld/SqFk7xQOQ94hAKzRDw/9UeRKvMfS1BJSsDxk3btwZOX2gs3zxTNbpPjsUrDHz19Xve71eOJ1O5Obmwul0wm63B9n0E088genTp8NsNuOhhx7CY489hi+++EJs+p///CeKioqQlpaGBQsW4OabbxaQoAJdVRY4dOhQ+Xz+HEAnMECbtlqt8Hg8aG5uFofP56WCDT7XhoYGyZ7RppkBGjhwIKKiouBwOCT4JWhiRgwAsrOzkZmZGWTT6rBsANJAQ7VpHmxxcXHSDp42HRsbK3N7vV4vDh06hNra2iCbpr1y74fatAo81PvG/+Li4iQ4BhB0n9R7zP1hs9kwZMiQM7bprjKuPVndCWrV19Cm1WtOSkpCdHQ0Nm/ejJSUFGm1X19fD5vNhtdffx3ffPMNvv32Wxw4cADnnHMOpk2bhvnz5wujrqpQfD4fvvvuO9hsNvTt2xe//vWvASAI5JDIoC/2+XwiG+P1UW5GuZWqNKEUjPewtrZWyJCmpiZRoui6juzsbPGLbDBDYKWCq6ysLBlPYLFY0NLSgtLS0qBaWD779PR0URgYRmBGt91uR319vbDk2dnZSE5ODhr7tW/fPrFbdY/zWmnj6s9C7URlx+Pj40VtwxEdPJNCfXFZWRlcLheuueaaLmuMT7cI9FQiracrFISdDtwMHz48iGAKRzYBEF983333QdM0PP7443jiiSeg63onX7x48WLU1tZi6dKlyM7O7uSLZ8+eLd1IVUJNndtLQjacTwEgWR+HwyEZTtUXM/jivUxISAjyxZxPrmmaNGkJ9cX9+vWTzwLQyRfHxcWhb9++cr0tLS1BvphBjKZpUmYV6osZzJB4D/XFRUVFOH78uMhw1fvBhnXqXMyufDHvndfr7eSLeW5xqWQzry2csqO7q6fZ0nCrp76YyYlwn7t27VrcddddsFqtWL9+PaZMmSI++aGHHsLy5cvxzjvvIDc3F9988w3i4uLEJ1988cWdfLLfH5ilS59cWVmJlpYWDB06VOybZzMDMmYuSYirEmCSN2ptanx8vJCHtF8SkuxezdKD7OxsDBo0SHCw3++X2mP6ZJMpUFJG++XPSkpKxCfTFwGBbtIOh0PO7PT0dBw5cgR2u12SKrRHzvuNjIxEcXEx8vLyJNOpJlJog6E4MNzZTUVbcnKyXG9ra6t0GCYRBZzsQkyfrOu6TPHoqR1yb6tqzJ6ucOqyUy2Hw4HMzEz5/658cnfXv02A+te//hU+nw/jx49Ha2srioqKMGPGDLS3t6O0tBQzZsxAXl4empqa8MwzzyAqKgpLlizBueeei1tuuQUHDx7E6tWr8etf/xpPPfUUFixYgMmTJ8tD2rx5Mzo6OlBSUoKBAweKRKS5uVkCSBY00+mRgeLMJxoTN3hsbKxIWz0eD1wul9TWUZtvMpmkHo9ynpKSEplJx3lmHAMyYMAAmefk8XhQWFiIEydOSNdIBqEpKSnIycmR2iogIHsqKCiAyWRCWloaHA4HGhsbkZCQgIceegjTpk2D0+kUiamaneDG4vD3Cy64AL///e/l+6qHhhqsqkC+rq5Ovg8DCYIj4OQ8NTULO3fuXOTk5ODWW289ow10powmELz5uhOkqmvs2LFB2TN1/fnPf0ZLSwvq6uowePBgeb4M4C0WC+bMmYObb74ZUVFRePXVVzFmzBjExcXh1VdfRXp6OpYuXYoTJ05g3bp1GDFiBObPn4/Zs2cH1VXOnj0bmqZhyJAhWLNmDb777jvU1tbKwcGglvecjTIoSVSDSL8/MFKGQYlhGPB4PGhsbBS5DQFYbGwsEhIS0KtXL6l1zs/Pl1oQHkzcY/369UNWVhaAAHt57NgxFBYWBoEhs9mMPn36oE+fPkhOTkZERASqqqpQXFwsUj52nIyMjMSMGTPw2GOPCZPa2Ngo8ngensxQxMXFISUlBb/+9a8xadIkkReFI134HVWwqUos1YNCDQhoB4ZhyNzjngD1cJ9/JovvcboDKRxoGjBgQND3Y8OVK6+8Ei6XC5WVlZgzZw4iIiLw6aefYunSpTh06BBWr16Nqqoq7Ny5E0ePHkVubi4effRRLFiwAA888IDcR13XkZ6eLhInBlm0UQJd1kIzW8nXNTU1we12C8CnTzSZTMjIyJAuz62trWhoaJDxVwQtnPPYu3dvpKWlATg5KonMOBu/WCwW9O7dGzk5OdL59MSJEyguLpbujwAEgPXp00e68vp8PpSXl0vdUVFREaxWKxwOB+677z5cc801opw5fvw4jh07BuBk+Qb9Z0xMDJxOJ4YPHy4BCH2tyozzZ3ymrI3k++i6LlkK4GSnVv6/YRh47LHH4Pf7ccUVV5yR7fEe81n3ZJ1KSRC6VLs1DAOjR4+Wf+sKPKWlpaG6uhrLli3DvHnzYLFYsHDhQjz22GMwm8149tlnMWPGDJhMJjz99NO4+OKLERsbi2effRZZWVmYPn069uzZg88//xylpaUie588ebL4mv79+8uz57VFRERIQzwAotyiAoRqLPpiqo74HWNjY0WKyPOE9XSUT6rlOr169UJHRwdKS0vFpkg8USVAX+x0OtHW1obCwkIcP35cCG3ahcPhQFZWFnr37i3gvLS0FKWlpeIz8/PzJeP09NNP449//KPsWwYL6vMymQJdfh0OBy655BJMmDABOTk5nTBFqC+kHXDvAyc7kvKeAZDPox34fD489thj0HUdt912W4/tOjRQ7unvq0TT6Xxx6MrOzg46c7hsNhteeOEFzJs3T/5Onzx79mz86U9/EvJw1KhReO6558QnT5w4EfPnz8df/vIXIYr52arsFICMdeFZSZ9MQpAlQg0NDTLCi8Qeg6yEhAQkJCRIJp0+uaWlBc3NzVIrbLfbkZmZKT6Z0nNm49Vmeb1790afPn0QHx8vjbmKioqknI3fxWw2Izs7G1lZWRL8VlZWik9mlpg+5JlnnhEV4vHjx3HkyBE0NTUJRqaijOrD4cOH48orrwwiCtVgXvXJmqbJNanlFTx7Qs9h+mfa7rXXXtstkiN0kZg5E1mvapOqX+2OT05PT5ez/OdmUP9talA3b96M9957Dx6PB7fffjtWrVoFm82GSy65BOvXr5e/f/nll8KuJCQkoKKiAsOHD8e+ffvg9/sxefJkmEwmnDhxAqNHj5Zs5e7duzFu3Dhs3rwZn376KXRdx6ZNm3Ds2DGR8ZI9ITOoaZq0O2fnL10PDJoPBfcARGYDBB5kdHQ00tPTpUaKtah1dXWSKeDv8TVDhgxBVVVVUGtvXovFYkFKSooccCaTKWiYPKXB6enp0gK7V69eMJvNmD17thS533DDDSguLhb9O7Oeffr0kaJ7Gl1ubi6OHz8utbe83lBmhJshMTERbW1tqKqqEqYtNjZWZEzMyqo1UK+88gr8fj9WrlzZLaaIjoCb+0yZoe7Yvvreoa/XdR1btmyBpmlBNaj33HMP+vfvj4KCAgnOBw4ciCNHjqCtrQ1//etf8cQTT8AwDMybN0+ezYwZM7Bo0SIYhoE5c+Zg7ty5aGhowDPPPCOdeYcNGyYyJpvNhu3btyMuLg4vvfQS/vnPfyIvLw9RUVEib2XQyEOcshVm7zVNE6kh5VYE/mz6xT0QHR2NlJQUkYSzEzQQyNg7nU7ZQ7W1tcJ4Dh48WCQ6agMIwzjZ9IuHGkdoqLUgJlOgyQfZxMrKSkyePBn9+vVDdHQ0brnlFgkcKM1h7fMvf/nLToD5gw8+AACxZz5bPmuVySYALC8vlzEHHLnAehFmxlSbfumll1BQUIBvv/222/ZI+wlXF3u61ZOMa+j3VFdrayv27t2L9evX4/Dhw3jmmWdgs9lwww034LXXXhO/1b9/f+Tl5SE5ORkejwcejwe//OUv0dTUhG+//RYDBgzAJZdcgpaWFlx44YVSP7Ry5UrMnz8fR48ehaZpeOmll7BlyxYcPXpU7JBgnYCTQJyLEkXWMZM8oS9mcE7bZZaA9sQgjnIyj8cDi8UCr9cr6hdmoViLSsJFlfslJCQEdcalRJhSRt4nTdOQlJSE6upqIQmPHTuG+fPniyqGAMhisUgDmIsvvjiIAPn444+DZLS0WT57+m0GGDabTUamcZxJdHR0kN1yf2laoGvx/PnzsXr1aumm2R27oz2did2GZs5O91p+79Cfb9myRZ7L+vXrg/7tlltuwR133IHXX39dzr+u8EVrayu+//77LvGF3W7HTTfdhMLCwrD44vnnn8d5552HDz/8sEt8QckfM4uh+ILjVzg7Ohy+oE13hS9IuLe2tobFF8x2UZIZDl+QJOwKX7hcLmRnZ8Pv93eJL9jopit8YTab8dFHH50SX9AnhsMXhmGISqkrfDFx4kT85je/+T+BLx599FEZZQgE/MvRo0cBADNmzMCCBQvg8Xhw3333YdWqVWhubsYll1yCb7/9Fs3NzfjLX/6C3bt3Y9KkSdJn4t1338Utt9yCo0ePorS0FJs2bUJJSQk2bNgg/VXYyZx2ySSNxWJBW1ubKPP8fr8kfHw+nyQl+Mzpb6j+IwHD/gAkHTwejzRkIkHudDrRr18/GEagERnPCNqL1+uVLtW8tz6fD8XFxRJgR0REoH///uLHeQ333HMP8vLy8OSTT0rpBv89IiJCGuddeOGFiImJkWf1zTffSAIr9NmpwRnPpcTERMESvG7W5KodqRkYm0wmvPzyy2hvb8ebb77ZLZtSEyen63PR1eouKXMq+z169KgoiUJ9MrpZg3raK9c0LVPTtA2aph3SNO2gpmkP/PTzeE3TvtI07dhPf8b99HNN07SlmqblaZq2X9O0c073GQDwxhtvYPbs2bDZbHjrrbeQlZUFu92OjRs3wuFwIC0tDRs3bpQaC6/Xi+PHj+Omm27Czp07YbVacemllyIpKQmDBg3CxRdfLLIqr9eLs846SwAzh+dy83AGGWUAnD/X0tIi9Q7ASWDErr8qoFcZv5iYGJEtlJSUoLq6Go2NjdKMw263IyEhAQMGDMDgwYPlgPD7/fjxxx+lFoCbKjMzE+edd57IxRoaGlBcXIzS0lL4fD6ZT9arVy8ZyQFAAommpiaUlZXBZDLhrLPOgtlsRlJSEhITEzFo0CCMHz8eEydOxNChQ4MYYAAYMWKEyDUIRrh5aGwqOHe5XHIgU+pEVk3dNKoxP/7449B1HVOmTDktwOG/n4lsQc32dpeYCQeG+KdhGPjFL37R6XciIiKQl5cHj8eDsWPHAgD27NmDKVOmwGazYfbs2Zg8eTJ8Ph/mzp2L22+/Hbqu47nnnsP06dNhs9kwZ84c/PGPf8TNN9+MiIgIDB06VDKKfn+gcU9bWxuOHj0qNX88MJqbm4WF5MFBqR/r9njwUm7C+ijWRxAIG4aBqKgoaRjADqgNDQ1wu90ScLITdU5OjrCxkZGBmXcnTpwImqPqcDhw7rnnYujQoQKYy8vLUVhYiPr6eiQkJMDhcCAxMRGJiYmiKOD3joyMxNq1a+H3BxrkLF68GPHx8UhNTUVMTAzOP/98XH311WGDU03TMHz4cGFP1X3LZ6raCQ/llJQUYT/VhkBUW4TKf6qrq5GTk3NaG1X3vnrfu7PU6+zJUm06NEPAjokABGwDwDvvvIPMzExMmTIFuq5j7969mDx5MqqqqlBVVYVf/epX2LRpE7777jtMnz4dxcXFWLduHerq6mCxWFBQUACn04m0tDT4fD6Zm6gGko2NjVIvrzbzYjMm+mlmzdn4gX6aowd4PzVNEzIhKioKHo8HlZWV0mCuqalJpOn0w8w4tra2oqCgACUlJSK9JEEyZMgQ9O3bVxQ3RUVFKCwshMfjkUA4LS1NpE5+v1/245o1a6BpGi6//HJpJsbuwcwsXX311UHBKZ8NA2I1Uxkq8aUdkzjideu6LnbLRbDJ58/60+uuu65bNqjK8vlZ3V1qYMzvd7rVld0CwAUXXNBlPZjD4cAbb7yBa665Bi0tLafEFxs3bsTDDz/cJb74/e9/j379+nWJL6Kjo7Fjx45T4gvaWFf4grbZFb5wOBxC+naFL6KjozFo0KAu8QUbJ3aFL4qKilBQUHBKfJGQkIDIyMgu8UVycjISExNPiS8I1LvCF6qthMMXhmGcFl+sXbv2/wy+aGpqQl5eHq655hpoWqA0gD554cKFeOSRR2A2m7FixQrMnDkTVqtVfHJcXByWL1+OX/ziF/D5fKioqEBbWxsmTpwoPjkqKgp1dXUSRLndbjQ2NopP1jRNzvWmpiaZhar6ZL/fL514vV6vdIjm/bRarYiJiZFO0OXl5airqxO754QAnkNOp1N6qRQUFKCoqAgNDQ2SHbRarTj77LODfHJVVRUKCwtRXl6OuLg4REVFISkpSXwyZbRms1ky8wMHDsSDDz6IlJQUCVxjY2MxYsQITJo0CePHj5eyO65f/epXEhCGnsfq/9OGTSaTNCfVdV32N202HJb4+OOPxWecbqlkIX+/u0slG0P9a1frVPY7cODA09bonvaauvGaDgAPGYZxFoCxAP5b07SzADwG4BvDMPoD+Oan/weAqwH0/+m/KQBe7M6FMMt37733IjU1FRUVFSgrK8MVV1yB9vZ2HDx4EP/1X/+FlJQUDB8+HAsXLsQLL7yAlJQUzJgxA36/H2vXroXb7ZYaE3bGJditrq7G8uXLoWmaDH+ngdNQHA4HAAhzwpvLeji+locHHX9ERARsNhusViva29tRW1uL0tJSVFdXC3iy2+1ITk5G3759kZmZCZPJhNLSUpjNZqkZ8ng8aG1tRa9evTBixAiMHDkSSUlJqK+vR15eHg4cOCAHocPhQEpKCjIyMpCYmIjIyEi4XC4cPnwYmhYo/CYz8+mnn8IwAt3KzGYzrr76aowfPx4jR44MAoqdHn5HB84991zYbDZ5jSqBVFkxlTVk0ygyW9yAzCiqmYGGhgbJap0O5JARCgVnp1pdfbfu/m6ohFP9NwAYPHhw0O+0tbVh9OjRsFqt+Oabb/DII4/A4XBgxYoVmDt3LgzDwKpVq9CvXz/Y7XZs374d06dPx9y5c2Gz2XDttdfi+uuvxxtvvIGtW7di0aJFmDFjBiwWi9QiHT58GB0dHThx4gTmzZsX1LyFczrJhDNDHh0dHST7YD0qO+RRNsbvq7ZA93g8KC8vR0VFhRA8zIBmZGSgf//+MhbEbreLtJ17zTACg+PPOecc9O3bF+3t7Th69ChKSkpw7NgxaUAQGxuL1NRUZGRkCKNYUFCAEydOyP2OjIwU5l/XdYwePRppaWmYNGkSJk6cKAdbODBiGAbOOuss2beqXZBxp3NmYK82AmFQpMrx1AYHQGDPzJo1C5qm4be//e0pHbS6p05n06GgLdQeu7vUa1Vtm9eS9ZMc+09/+hMOHToEj8eDOXPmoKKiAitXrsSYMWPgdDqxcuVKjBw5EtnZ2bBarVi4cCGeffZZREREYNSoUTJ3mnMrbTYbzjvvPHR0dODSSy/Fm2++GRSQEpyyOZHKKqu1jSQXGGCpMnEVLDGL2dbWhvz8fJSXlwupwoYymZmZ6N+/P6xWKwYMGCDEoqZp0lVY1wPS5FGjRiE1NRU2mw1VVVXIz8+XxmDsWJqZmSnNRwCI3ba2tiImJkZKME6cOIGDBw/C6/UiNTUVV111FSZMmBBU3x76bC+55JIg/6kGLapfJpHS1NSEjIwM6XBKYpVzh3nvVN88depU+P1+3HHHHUEKg9DFDAH3SHdtkdcbGkR0Z3Vlt/RnPKdC15w5c2A2m2W83KnwRa9evbB9+/Yu8YXD4cBjjz3WJb44cOAANm3adFp8QVsNhy+oFugKX/DMPBW+YNfervBFREQEhg0b1iW+YD+BU+EL2tCp8MXll19+Snyhadop8UUouA/FF1Q+nApfAPg/gy9uvfVWxMXF4a233sLs2bNlnM68efMAAE8//TTGjBmDtLQ07Nq1C88++yyeeeYZNDQ0YMyYMbBYLFiwYAGKi4ulDC0vLw9tbW0oLS3F+vXrYRiGEFy6rgs+IB6gj+E1kuimj6YaisQKz06LxSL125SXFxYWiuTXZrMhLi4OWVlZGDx4MJKSktC/f3/ZK7QvlktlZmbinHPOwaBBg6DrgX4DeXl5KC4uFkIoPj4eGRkZQkgBgVI4wzDQu3dvaepI4vnOO+9EZGQkcnJyMHHiREyYMAEDBgwQuwpdtDlVmh5qv2oCx+12d0rgqDJ12p8qU//yyy9hNptx5513ntYn85p6omRRfXJPbbkr++U1jBkz5owwCtdpKU/DMMoBlP/0d7emaYcBpAP4DYBLf3rZawA2Apj2089fNwJX9b2mabGapqX+9D5drvvuuw9LlizB0qVL4Xa7cfvttyMiIgKDBg3ChRdeiIMHD2Lr1q247bbbMGfOHHz11Ve44oorsHr1algsFkyePBnLly8Xff68efMETALAbbfdhurqaiQkJMBut8ucMDo4AEFdTsnksJEF9eiUgwEnte4xMTEwmQKdaznvjt14aZzx8fEAAqC5oaEBhYWFACDyAAYQzOpkZGTA7XajpKREmFE10OBAbDqEQ4cOyXURmBmGIU2QWCdQVlaGWbNmYfPmzbIJujJIGm3fvn1x+PBhkTx2pSvnBm1paUFCQoKww8xycHOx4x6X1+vFV199hRtvvBF33HEHXn755bAOgYfLzzH4rq473MHAjcf7wM9Xu8RRkhV6nRs3bsQTTzyBRYsW4emnn8bcuXOxbds2tLa2Yt68eWhpacHWrVuxZcsW2O12LF68GK2trbj00kuxbds2eDwezJo1C4sWLYLH48G1114r9dJ+vx/Hjh3DK6+8Ip0ZySDzelmz4fP5pFMkgYmu62LfPNzV5gcMMIFAPYhae03SgQcNDyYOpG9ubkZTU5NkblWAYbfbUVZWhpKSEgkoOA+MY5x474uKitDY2ChyHbKwDJzVzI/b7caAAQOCskOnWrxX/C5d2TMPEH6PhIQEmb9I5p+AUZWlcR9UVlbK3Fh1tERXdne6FWqjp5KfhWbf1PdQA231z9D3W7VqFebOnYvHH38cCxYswJw5c7B3716MGjUKV155Jaqrq7FmzRpUV1dj//79QhDef//92LNnD9ra2jB27Fi88MIL+MMf/gCfz4fc3FxUVVXhs88+k/p4kibs4kh7Zh01Z//ynre2tspYL9os/6PPJYtP0MRAAYAw6rS3kpISqdEj0Ked+Hw+nH322dD1QI19fn6+2Ab9dWJiooz90PXAKA9KfUl40Ge0tbVh27ZtGD16NDo6OvDggw/i+PHjYZ+VuuiDevfujaqqKgAnM0Wqv2LAqWmaSEkzMjJQVFQk2SbeX76Wn6tpAZn0+++/jxtuuEHsP9z+6AkrHs7Ouzpzwr1WBUG8D6HXzp+dffbZnd5zzpw5uO+++/DRRx+hqKgIFRUVXeKLoUOHYv369diyZUtYfLFs2TKMGjVKuqaH4ouUlBQBol3hC7vdLvV94fAFZeOcRR2KLwj+GdSFwxcMFDkCJhRfsBa8oKAgLL6gsoX7Jxy+0LRApt5qtf4/wxe06VPhC8Mw/u3xBZ8LlQ5d4Ys333wTF198MTZs2ICZM2di2LBh0hBrwYIFcLvdWLt2LWprazFgwAA8+uijaGpqwv3334/t27dD13XMnTsXCxYswMyZM9HR0YHhw4fj66+/xr59+0SFkpaWhvLy8iAyiw0MqUIkTqaCi/bY0tIiUtWOjg5RDlDxBEBwB38nKipKMADxYlVVlZB7tDnWX5977rkiLz527JiQ8fwsu92O2NhY2XN8HX2yxWJBfHw8mpqaoGka5s+fj2nTpqGgoAB33nmnzC5Wn1dXz/W6667DmjVrZMQPA3T1dfTJ9L1ZWVlB5CWvC+gscfd6vbjjjjuwYsUKXHfddXj//fe77G9xJtlK1fZO9T3D/Uy1X/W78meGEehKfaarR7SPpmlZAM4GsB1AshJ0VgBI/unv6QCKlV8r+elnoe81RdO0nZqm7QSAzMxMzJw5E9OmTcO5556Ljz/+GG63W9otb9iwAY2NjZg3bx7sdjseeughfPnll+jo6MCf//xnvPLKK/D5fPj9738v9RDskKfWkxpGYOxATEyMzLNTuzEmJycj66dOu2qzmdraWtTV1QkoZQaTEhd2gCSbwo3GzWIYgTlhZWVlqK+vR319PTo6OhAbGwuHw4Fhw4ZB13WREGzduhWHDx9GY2OjSM9iY2ORnZ2NhB5UdE0AACAASURBVIQE6LqOoqIiHDhwQGaLqSwj6xIKCwtht9vhcrnwz3/+E16vF+PGjZO6rVMtGprP58PIkSODOrHRQEOzqWZzYBwPO2qSKWpubg4aLcKGN/zP5/Ph9ttvh8/nw29+8xsJkFTQz2C3p5vwdK8PB/y5afn/oYCIGzCcJPMPf/gDnn32WSQlJeHee+/FwIEDMW/ePGlkMHv2bEybNk3myG3YsAEPP/wwbDYbvv32W9x3330wmUx45pln8OSTTyIyMhKrV6/GL37xC8mElJWViX0RtKsgmIFrbGwsBg0aJM+B2SefzyeNEMiSOp1OkQizqyQdITNczJqaTIF26eXl5aisrJRRSZQDDx48GOnp6YiLixPGdPfu3UH1IAkJCUhNTUVWVpbMXMvNzcWJEyeE4AllIAGIHHTTpk0wjEDTsVGjRnWLNeR7jBo1qpM0hweL+nzpOxoaGuTwYzDPzoKU5dA+COpmzpwJv9+P3/3ud2Jj6jVqmiYBcE9WdyRk6oGh7jPatBosh34+v8fcuXOh6zquueYajBo1CnPnzsU777wjNWfLly/H9ddfLx07H3/8ccTHx+OVV17BmDFjoOs69u3bh3vvvVdGrVx55ZV47733ZFyWy+WSuabcTyRROBuSNWwE9I2NjdLZkc+O/pbdeimR5EghAlbWD3m9XpSWlor8nGNz2NCOGdaOjg4cPHgQ+/fvl+wns1lJSUnIUppwHD9+HAcOHJAMLX3XkSNHYBgGfvzxR1itVmzcuBEWiwUHDx7E7bff3iUgV583Ac65554bpIJgttRkMgXZM+V2nPPHhiNAgNAh6UQbZiCqaRq+/vprFBcXiz/mnlHtpCd2S1tUJWTdDQLC2SZ/Fiql6wpk/fnPf0ZWVhZuvfVWPP744xgyZAjefvttGZE0e/ZsZGdn48SJE9i2bRvi4+ORkpKC9evXi7R15cqVOOuss3DDDTdI93s2O+RsTl3XpYyhqalJ5OYq/sjOzkZ2draQIAS0dXV1UkMaWkfvdrvhdrul5lLTtKBAjjbd0NCAiooKVFRUSKbJ6XQiKSlJzm8SckeOHMGBAwcEX0RFRSExMRE5OTmIiYmB2WxGcXExDh8+jIKCgiB8QfKHuMhsNqO1tVVUO1dddVXYZxe6iC9INjJzHCqTDCU6SerzeugXOJJHVVuodvbee+8FqWbUM4U+safBqWrTXa3T4YtQJQ9JLZ7t6pnx5JNPYty4cZgwYQIuvvhiHDp0CG+++SZ2796NRx99FPPmzRN8t2jRIkyfPh2RkZF4+eWXcdFFF8EwDMyfPx9TpkxBdXW1+IjXX38dJ06ckNIyEiwkqWg7sbGxok4xDEP+dLvdqKysRHNzs/gZNTik6oqday0Wi9h3fHy82E5hYSFKSkpQVlYmwazT6URKSor0KDAMA0eOHMG+fftw8OBByQRHR0dLtpRjbQoKCnDw4EEUFRUF+eSamhoAkH4XJpNJRvRdf/31p21wqBLE9KOqCig060+frOuBXgV+v19UDbquCzbm+6n2Sx9SXl6O+Ph4ObtpP1w9laTzPXqqxjoVicprUv/+cwifbgeomqZFA/gQwIOGYTSq//ZTtrRHV2EYxgrDMM4zfiqUffDBBzF//nwsW7YMN910k0hq5s+fj7a2NtTV1WHcuHFob29Ha2sr5syZg0svvRR2ux3z5s3D+eefD7vdLvUlKsBwOp0SrDITpDojduulkXOoPINTVfbY3t6OpKQkyV4ys8P6HnY9A05K0ZqamqTRBttxc1M6nU707dtX2HHgJMBldsBqtSIlJUU65NbV1WH//v3C9vPzuBFYg6JpgQZF9fX/H3vvHR1Xde2Pf+7MqIw0Tb13CXfhgmtccAFblikhL/QHAWJKqIa4gAu2gwsxEIhJMAFeQskjoTgkYGxjG9OxRLFsC9uyLFuyrF6mqE2f3x+TvX3m6s5oJMMLv7W+Zy0vS6OZO/ee8zmf8zl777O3BVFRUTh06BBUKv85DnksvbyJ51Z8Ph+HXBJhipOPAEtEKkkS11ITryUmGhFwxWNBQo+yuQHnskOKnwnXU0ZtKOFk8haMsJQE0cGDB/HKK69gxYoV2LJlSz88R0REQK/XY+vWrSguLoZerw/A89NPP40nn3wSXq8XK1as4LPZhOeoqKh+lnQ5nilLnslkQmdnJ1srRTwTZuPj4znZh4hnqjMHgMdHxDORqsVigdFo5ILUhOeYmJgAazYZRUQ8U53fqqoqNDQ09MMzWVfp7J3H4wnAM9UXnDVrVsjxI5JXwjMtJrT4URONJNS/tNFXwrMonOV4lp8hEUn+hzC6hGoDzQW6z/b2dixduhS7du1ii7dOp8OSJUuwYsUKqNVqbNy4ERs2bAAArF27FpdffjnsdjvKy8uxdOlS2O12PPPMM3zOp7m5OSCUnJJ20YaH+hUAZ4CWL6IU4uV0OjnLKOGaMEZcKGaxpn82mw3t7e3wer1c7oA2cBdccAFH2VAkgcViCUgYlpSUhKysLOj1eqjVag7tFDfXYl+Km2PycFEf9Pb29hMawcaL3kMGSvqbyNMkguh12qRTHUDaoIj/RLEM+Pny2Wefhdfr5XIT9P0D3WuoJm7Eg11jIDEvih6a0/SeUG3JkiWsL7RaLSIjI/HrX/8av/jFL+Dz+fDoo49i48aNrC8KCgrgcrnQ29vL50tPnz7N+oK89wBYL+j1etYXdrudMU2bS4PBAEmS+P1UkkscL5/Px/qCxg4A408UfLSR6e7uRltbG7xeLx9HIH2RmZnJdXklSQpIYEM5OeLi4pCdnc0lmM6cOcP6QuQt4jQxDJ68VC6Xi+uyit6TYI2ehUR1Tk4Of07+vxwrxL+UYId4XfT2i7ghr9WePXsAAD/96U9Zm4jeHmoDGYzkLRz8hfosEFxfyEOSn3nmGWzZsgXvv/8+du7ciUWLFsFkMuHll1/Gr371KzidTjzxxBO49tpr4XA4sHnzZlx//fVwOp04cOAA13Petm0bDAYDGwJJNxInJSUlcckusZa6SqVCUVERALD3kxKAke6jHCxkiCaOI2MhaWcylFMOjM7OTqhUKv6++Ph4+Hw+ZGVlISEhAVqtth+HAn6e0Ov1SEtL4yNJlOyPohCUOFmlUiEpKYmNHlRvVKVScQRJqCbyUUlJCW/mCbs0pnRNEdO0LwHAOoScamJyKcKWy+XCb37zG0iShPvuuy/AGCk+l1xfh4s/ep5w3qPkwKGxFK8hGuUpBH2wLawNqiRJEfBvTv/q8/m2//vlFkmS0v799zQArf9+vQFAlvDxzH+/FrIVFxfDaDTC4XBg48aNWLVqFZxOJzZs2ICSkhI4nU784x//QEpKCk+A999/H4888ghiYmKwZ88ePPzww5AkCRUVFVi6dCk+/fRTrFy5EsuWLePwMBr0vr6+AGJSq/1ZS0+cOMHWTwA8eSm00ufzoaamJsC7RAsegYWs12J5FXq/Xq+H0WhETk4OUlNTYTAY0NraihMnTnDYL6Xm1mg00Ol0fF710KFDqKioQGtra0CYDVkRtVotJk2ahNLSUsyePZvvr66ujmutAf4C3zQBg4FSDM+hhYTqqIkbUjEJB00Y8q5RWJ3P5+NFjDZFdD3Re+10OnmsiouLeRMsWqHptVBNboENZuVUElviZKSJR88kXpM+L/8MAFx55ZU4c+YMbrrpJuh0un547uzsxKxZsxAbG4svvvgCv/rVrwLw7HQ6sWrVKtx2220AgA0bNgTged26dVwOgxZ4OZ4lyR9yQ0kFKASG8KzVatk70traisbGxn54FoW/Ep4pa6NOp0N6ejoyMzMD8EzlkUgUUVikiOeysjKcPn064EyWiOesrCyUlpaipKSEa5aJeKZ5SkaNUOKZFlg5nkWxSxtQ+YJCr2VkZLCVVAnPIhZEPIsJOuhaIp4HEnMDYY7+Jse0/H30TCQs6PpK13722Wdx8cUXAwAOHDgAlUqFNWvWwGg0YsuWLVi7di1UKhXWrVuHBx98ED6fD2+//TYyMzMRExODZ555BhMmTGDO/MMf/oDt27fz+WUKoaWjEoDfu+fxeGAymVBTU4PDhw+zR5K8LGT4IIMfWebpOuJcpd9JPFCyJZvNxoI9KSkJubm5SExMRE9PD2pqargECGXEJtFfUFAAg8GAtrY2HDx4EJWVlbBarQHhliT2oqOjuZyG1+vl8Difz4f3338fPp8PTU1NuPjiiwPGWBwL0SJNYzV16tR+Hn95uDpdLzo6mmtmEr69Xi+LQvI4kYineWK1WqFSqXDJJZcgOjqaPyd6A8JpIhZD8XY4XCzil/qDjDv0T8l4abVasX79etYXFRUVuPXWW2E0GvHSSy9hw4YN0Gq1ePTRR1lffPLJJ4iIiEB2djb279+PiIgIZGVlsb74+c9/jtdeew0rVqzARx99xDxMfeT1eplbyFDS1taG6urqAO8qeW/IsO1yuVhf0LjTGWIaWzFBGAlbwqtOp0NKSgrrC4/Hg5aWFj7rR1ghfUGlbMxmM8rLy/lcNOkLGueYmBgYjUbMmTMHCxYsQHFxMeOFPD/bt2+Hy+XijUYwfSFusmksyRBCmBajAug9oheVElSJfUxlTkRtIY8AaGxsRGJiYsDmQa4rlIzOYlOaq0PVFzTGImfJDUZi6+zsRHNzMy677DLo9Xq8+eabzMnPPfccxowZA41Gg7fffhupqakwGo3MyXfddRe6urpQV1fHCX/eeustrFmzhjmZeIoiLHw+H5+RNJlMaG9vx6FDh9ihQ3qQOBk4pw+6urrYuyn2mejgoPdSwiVJkthokpubi6ysLI52IYyTh9/n80cnFhUVISkpCR6Phzm5sbEROp0uILKEDN2zZs3C5Zdfzpl5a2trERkZidOnT8Pj8ZcImzRpEuNCaUzF8aF5LOpFcW7LcRMREcE1TlNTU3mNoghMMrSI3Ab4Dfx/+9vf2LArcjBpCPlmPFgTNXyo94TSEyInEyfRc4qfH4oBnlo4WXwlAC8BOObz+Z4S/vQvADf/++ebAfxTeP0myd+mALD6Bjh/CgDXX389WltbMWfOHHg8HmzevJkzHe7ZswdarRbZ2dno7OxEXV0dnzHdsGEDVq5cicjISGzcuBF9fX0wGAzYvHkztm/fHmBhJYKWJCnAak+1KYngCAD5+flc7gI4F45D9QEpVjwhIYHBT5ZVmuy0OYuNjeWEAhEREWhvb+fMpZRNjSw6dBaDap+RiAcQkEKdrPxU47GkpARpaWm8SFPoHFmzyPOgVqsxatQoRQASYconIACMHj2aNy30mvheUQxS0h3yHhEZUWpw6h/gnPXQ4/Ggq6sLkZGRmD179kCQCdrExU9scnIJx/Ipt1qL/RHq96VLl+Kdd97BunXr+uE5NjYWu3btwvLly6HX6/Hss88G4DknJweSJOHll19GR0cH3G53AJ4BwGaz8cZINIAQnqlgOi3YHo8nAM/kDSM80/eIeKaEI2TJl+NZFFDd3d398OxwODiMhazzcjyTsKfFQI7nCRMmMD6oSLuI5x07dkCj0XB/DAXPNF/E94l4psWnvb2d514wPItCX8QzLfxDaSKe5c+mhOdQmKb+CObNFX8fNWoUysvL0dfXh7Vr18Ln82Ht2rXs+V69ejXcbjcmT56M3/3ud+jr68P69es5xPCGG27AoUOH4Ha7sWvXLjQ1NfGZaBKOVqsV8fHxSEnxnxCJjo6GVquFxWJhLw+FVFOIF40J4E9CRKW4yMtOWSBpcRSFNCX7oPOudJaajl80NTXxZpfOCcbExMDhcKCjowPV1dX47rvv0NzcHBBeC4DPVOfn57NRJS4uDl6vFyNGjIDX60VlZSVMJhMOHjzI68WcOXPYGyAfc6WFXa1WIzExkc/l0njSuVK5uKf5kZ2dzcdW5EcuxLO/gF8M3XnnnfD5fLj11lv56Ao9a7hWenFDKefQwXIxCSHxffKNskajwWOPPRbwGp0LlSSJjwi98sormDVrFiRJwiOPPMI1Fun85saNG+FyuVBXV4dVq1bB4/Fw+KFOp8P27ds5gyltFCmqxWazwWAwsEFNXP/kfKzX6wMSClI99ZaWFualxMRE9lrShkEsPxMZGckhkpRrwGKxcCIoqs8eGRmJ+Ph43mxSmPKBAwfQ0tLCBkLReEZVExYuXIi5c+fyhtDr9WLUqFEA/B5kg8HApWWampqwefPmoHyspDu8Xi+GDRsWcE5W/n7RgEjepvT0dN5wU7Id0QAm/u90OvHYY49BrVZjyZIlYeE3GA6B70dfiMYn8bVgbc2aNdDpdNixYweuvfZaqFQq5uRf/epXXI/8scceQ2dnJ9ra2piTt23bhlGjRvHn7777blRUVPAaTv1EuR+oNrROp2NOpmhDlcpfwo6SaYmcfObMGU7GFhERwaHCxMlkQCCjDnELcTIZ8ux2O5qbm9Ha2gqbzcacTIYVSZJgsVhgNptRUVGBqqoq5mTRoEHHji655BLmZJfLhdTUVPh8PsTFxUGn06GrqwvffPMNvF4vTpw4gQsvvFDRwBtsfBYsWBDAyfSsomGFIrU0Gg1aWloQGxvLIf10PIX4WEygRNf58MMP0djYyJwMnNtkhhO5ID6HnJMJe4PREyIn09oh759wjZlKLRwP6k8A/DeAOZIkVfz730IAmwFcIklSNYB5//4dAN4HcArASQAvAPhVODdSVlaG3Nxc7NixA7fffjt6enr4rE5RURHsdjsOHz6M2267DUajEatWrWIP0mOPPcYeoJiYGDQ3N6Ovrw+LFi3Cxo0bMX78eAYqdSJZHWmSkGDSaDSYOHEipk6diqSkJF5QaHKJiREcDgeHhvX29jIwyZNCwla0VDscDjidTrS3t8NqtTIZZGRkID09nZMfUaijy+VCVlYWh0pSGHJKSgoWLFiAkpISThsubykpKfD5fHy20Ol0srWXsvmJ5A+EJse8vDyedCKximRAv0dHR3NYn2jVocWcRL0Yd0+W/aVLl6KjowPDhw8flOVFXNSUJpU44UQBKF9AgXNWarnXVD5hlb6HUsGnpaVh1apV/fD8yCOPQKfT4fHHH2fxI+K5paWFs+ylp6f3w/PGjRv53KkkSZw9V8Qz/axWq1FcXIzZs2cH4Fmv1zPeyYstxzN5XT0eD4friHgmUnK5XDCbzbBYLAF4zs3NRVJSEnttaW6IeFar/dl+DQYD5syZExLPgF/IiHg+evQoXC4XGhsbMW/evCHhmcLVRIu2fBNHWFCr1Sx4gXN4pjkvblZFPHs8Htx4441hWTflWBwKnuWbccKzvF+U8Ey/HzhwAHfddRdiYmLwm9/8Bg8//DASEhLQ2tqKs2fP4rLLLkN0dDR2796Nq666CgaDAatXr0Z7eztUKhVeffVVmM1m3jDOnDkT8fHxXHidLODEiYQzOk+tUvkThEyfPh1Tp07FsGHDAurjqlQq9p729fXBZDIhOzubyyDQ81A2VcK8aKXu6enhjKhUfsZgMHBm6oKCAk7IRGeHaF6RNdxoNMJoNLIAGjNmDIsHuXEhKSmJk+FUVVWxh2v48OH8PjnnKLUJEyYEcBQZjsgjJOKZQp/JqEkYpw07GbrofkVv4Ouvv46+vj5cd911Q+JiJREUDLsifuXXoLmndB2ln8Vms9lQVlYWlI9FfaHEx6K+UOJjUV8E42NRXyjxsVxfKPEx6YtgfCzqCyU+FvVFMD4mfTFUPhb1xVD5WNQXSnws6otgfCzqCyU+/r/WF8H4WPQ2idcQrym2TZs2YenSpXC73XjjjTeQnZ3NnPzss8/yBnzNmjW44447IEkSc7Lb7cahQ4fY+CFJEmbMmIELL7yQOVl06IjJmoiTAXCW9hkzZuCCCy4AgICa5RShQsnshg8fjri4uACvNWGcEnNRSDw5eHp7e7n8TE9PD4erFxUVITs7G2q1mjexhYWFAZEzlCNAq9Vi3LhxWLhwIWbPnh2g4ahRZI3ZbEZERAQ+/vhjeDweTkwpYk/JmCA2+huNq2jkJl6m7xY9u5mZmWxYpZBnkZPpf9rMr1mzhjk5XEMhNcJisOcIxsnyn0VDPb2u1L+DvT95CyeL72cAgs3iuQrv9wG4e7A38j//8z+88KelpWHOnDk4duwYzGYzvvrqKxaxzz//PJxOJ7RaLWJjY1FdXY3CwkJkZGSgvr6es/j19vbioosu4uxdBFxKn03WIpfLX6eJilAXFhbCarXiwIEDbFUCwOUOKAnCv5+VEx7RhpTImDy29DqVP6C6UrGxsXymlCz9p06d4kEna6fH40FbWxvXTho/fjzXhaR7kDcCPlkuPB4P2tvbER0djS+++AIzZsxAQ0MDJkyYgCNHjvB1BhLPtBmh5wQQsACLRCyeB0tJSUFjYyMnM6C+JO8ukRVdl0JHZ86ciRMnTgQFuUp1LnkIoCxuQjX6PDWRnOWWMvo9mLdJbNdeey1ycnIYz06nE48++ig2b94Mr9eLX//61yyWV69eDafTCZXKX9+utbWVkwcRntVqNe677z5ccMEF6OnpwRNPPAGVyp9VljLmEk7Jgk9nMwsLC+FwOLB//34ugUTh6OQNJzxT6InNZmNLJuHZ6/VyGDGlm6cQS9poEJ5VKv8557a2tgA8k1fXZrMhLi4O0dHRGDVqVNh4FjFHXgjRMtne3s6fDwfP9FxiKFowPNNrNK9ErxV9p9Pp5Ky0knQuFT9ZRQe6n2B4DmW1pUbvleNZnmyBWjh47urqQllZGcaNG4dvv/2WvSKPP/44Hn74Ybz33ntoaWlBamoq3nzzTZw6dQp5eXlISUmBzWbjcj8WiwXr1q1DcnIyZsyYgWPHjnH/tbe3IyEhgY11FA1AvJCVlQWv14uysrKAxV2r1TJP0Pyhs9HELXReWzwTKIoYet1qtUKtPlefjsKnKBuvWq3mDM4URhwbGwuNRgODwYCf/OQnYS3EKSkpaGpqwuHDhzFs2DD87W9/wx133AHAnwV21apVAVgYaLzpena7ndcqekZx/OlabW1tHMas1Wq5VBPNFY/HE3CURa1WY+/evThy5AiWL1+O4cOH4/jx42E9q7gxHcwmQOk5ae6JCfOCvVdprlx//fXIyclhXg+lL8jblJaWpqgvVq9ejVdffTWoviA+CaUvdDodPv30Uz6LBwTqi+joaN5gKekL4u1Q+iIyMpIT1CnpCxrrqKioIesLGudg+mLu3LmcyG6o+oLuIZi+sFqtfC4xmL7o6+tDVFTUkPUF9dePQV/U1dXhwQcfxPLly5GamorNmzfz0Rmr1YoVK1ZwiP62bdugVqvR2trKzgrSFGazGStWrGBN8cEHHwA4d9YZ8GOScphQZl4AKCws5HJDoqag9Z0MH4A/YspisfCmlLQKYbynp4c1BWXnpwRcck3R2dmJlpaWgMimuLg47Nu3D1qtFjqdDtHR0Zg+fXpAEjEl4wpheOTIkaisrITL5UJKSgqam5tx9OhRjBo1CrW1tRg1ahRr5HDGfOHChdi1axef7wYCNQVhgOZra2sr0tLSkJGRgYaGBg6NJv5U0hQejwcPPfQQfvvb3+KOO+7Aiy++GPTctJKmEPEWqhGfivOD5js9i9I1lDa0Q21DK970AzSr1cqDeM899+DAgQNobm6GzWbjkgBnz55FZGQk9Ho9ewALCwvh8/kTt2RkZMBisWDZsmWIi4tDRUUF2tvbkZ+fzwkEKGyRsvgmJycjOTmZrexnzpzByZMnAZw7W0EhsRTGSzH1FIJrNpt5sChjJFk4Keysq6uL02VTiKLH42FvG2UEpGsTUcTExCA1NRWzZ8/G/PnzOQQ4WKM+JJFBB92pPqXVakVbWxtcLldAdtFwmiSdOwxOYcxyS5FocaLFLyoqCnl5edwndN4gPj4eVVVVvBiL4WXLli2DSqXCnXfeyd8hb+IiOpSJIJ7XEhfdwYaxAQgIK6Ozn/Qdzz33HFauXMnkQ7VMKfNhbGwsWlpa4PF4kJycjPr6eiQnJyMnJwc1NTX4xS9+gfb2dg4Lb2trCyAr0bpMCzwJujNnzuDEiRMBltvCwkIUFxfzc9PzWq1WdHZ28gJCfydMUvIteg6qx0tWfipbUFtby54z+iyFnyUnJ2PatGlYtGgRLr744pB4FgUNESW91+Fw8Dlaymx8xRVXDCqchEKLRCEh4pnEHC2QGo2GnwsITG7g8XgYzyKp0//r16+H1+vlfldqoqVUtLaGg22xb+gZaJOhtJCEc021Wo2dO3eipqYGOp2OLeyrVq1Cc3MzfD5/Qhcqk5Kfnw+Xy8U1P3t7e5Gamor4+Hg8//zz2LBhA5566il0d3dzSKtWq+WMyLGxsezVoLNLlZWVnH1ctCjHxsayx4rECGWl9vl8bEGPjIzksaRzzWRYoZJBRqMRBoMBarU/+UZVVRWOHz/OHgASYXFxcTCZTMjMzERpaSlKS0sxbdq0AXmC5hiFpZEXq6enB//85z+hUvlr+FEZnHDbqFGj4PF4WIyTaBPDfMkYJeI4NTWV1zXynNjtdsTFxaGuro55nYwxZFC9+OKLw+JEEmFDaSJWReyK3lO5h0psSrgW+RgAli1bhv3797PXXORjSZKQlZXF/VBYWBjAx6+99hqWLVuGvLw8JCYmora2NoCPaS2itSwyMpKPUhAfl5WVsYdEkiTExcX142MKOzebzRxqLv6dxlav18PpdKKnpyeAj0nnNDc349SpU5ytlPhYPFs6YsQIlJaW4tJLLw3Jx/INFQAOza+rq+N1h/iYjIjhNkmSMHHixH4bO+Jj+ifyMUX5UJ4D0RtHfCzHhNvt7sfHofRFOMZFpRZKX5yPcFfSFN3d3SE1xcmTJ0NqCjqSE0pT0DgE0xR5eXkDagoaw2CaAkBITaHVagfUFOJZfHkLR1N88MEH560p5PkA5JqCXg+lKXQ6XUhNYTabf3BNIfbb96EpBtuGHhz8Pbevv/4ad955J+6++24O7cn4ZAAAIABJREFUVers7MQTTzwBo9GIe++9FyqVCu+//z6OHj0Ko9EIwJ9pMjExESaTCddffz1eeeUVPPnkk7DZbCgqKkJiYiJsNht7qGgzZLfbkZiYCLPZzBY1IkayVuh0OuTm5kKlUnEYH8XSUyw8WedoQ0YhO2T9ILCaTCZ+3e12o7m5mcUEJSQgUUDnoiZPngyj0RgWycvDn+gz8fHxaGxs5GfTaDR45513cNttt7FljcrnhNPouhSSRBOcBCU9BzXxbxqNhjfeeXl5OHXqFIddi/dPqc7feecdlJaW8pkp0UIuWnDF3wdq4iSSexhEq7/8vYOZfAaDAVarFR0dHfjb3/7GZ3uWLl3KoWHTpk1DdHQ0XnzxRZw5cwbp6emoqalBbm4ucnJycPr0aTz55JNYt24dXnrpJTz88MNcmoUIX+xf8gaSYCcjBxGbWq1GQUEBe5RaW1v5vkRrP4lW8hBRnT4R/5SNkvrdZrOhp6eHLddiX5G3Sa/XY/Lkyfy3UHgTsSAaCyRJQnx8PKxWK9cY7OvrQ1dXF5/jCAfHIl7Gjx+Pr7/+OsBDI1o6qe/o79TXSUlJaG5uhiRJyM/PR09PD06cOIGEhISATYMk+cOtu7q6oNFoMHXqVBw6dCgAU+KiEQx/Az2LPIkNtfMJsXG73XjkkUeQmpqKlpYWvPDCC3y2qLi4GF1dXbjiiivw1ltvweVycXgsiZv4+Hi0trbC5XLhoYceQkJCAjQaDe68804Wsh0dHcjMzERcXBx6enpw6NAh6PV6vgfCNHlNhw0bxot8VVUVAHAtO6olSXigGr2ECZ/PXw5BPENN/dfe3o6+vj7ekIr9q9FouAyHeCY6GC8rhaISbggXVLfxzJkzUKn8icrWrFmD1atXhy3qfT4fSkpKsHfvXl7D6HU5RxIuWltbuW4rRUX09vYiPz8f1dXV0Ov1PM8BcPj0/fffj9///ve4++678fzzz4ecZ0PBHM03Ogcfai4MVggRH19wwQW4++672aCxbds2nDp1CnPnzmU+fuONN3D06FEUFBSgubkZJpMpgI/XrFmDJ598Etdddx3UajXS09MD+Jj0BZ01ozBEMozQM0qShLS0NC6HIfKxRqPh9Y4y+ZOop9B4n8/H59+J+2j+U6g6rQOEC7oOhWNOnjw5oEZlqH6VY1rk4+bmZs6KbrFYeP6Rvujo6Agb00lJSWysJj4WQ87pXujvdHY6NTWVI38SExMRHR0dwMei/qFSKMTH1dXVAfqCmijIB6MvxM+IfCzXF/LPhdOCaYp7770XaWlpQTVFYWFhSE1B5x2pv5U0BXAu8lBJU9TU1AyoKUQ+VtIUAEJqCloLfkhN4XQ6z1tTfPvtt6xjqd8GqykaGhpgt9uDaorNmzfj97//fVBNIdcX4t/DeRa6NzL8UDsfTTHYpl67du3/2ZcFa+vWrVs7duxYfPTRR1i4cCHee+89FBcXQ6vV4qKLLsLJkyeRnZ2NF154AXfffTcuvvhiZGZm4ssvv+Rsj7GxsTh69ChaWlqQlpYGj8cDg8GA7du3w2KxoKWlhQULHfiOjY1FW1sbl+QAzoUG6nQ6JCQkwGKx4OzZsxzDLyb4kItBsixQZtGoqCi24vt8/pphlKWSNq4EBqoLlZ+fjxkzZiA3NzegZE2oRkJLBB/dFwk3ej7aVE+fPh29vb0s0Afa5NH1KYlOY2Mje51FCz1NQlpQJMmfetxgMECr1cJutyM9PR319fVwOp1svRa9r+RBaWxsxIIFCzBq1CiegKLoovsKp8k3TTR+4YY5yN8nikD62/79+7F27VqcPHkSDocDF110EV577TUsWrQIPp8P8+fPxyeffIKcnByuQbdo0SLMnTsXO3fuRHx8PE6fPs1W988++wx5eXno6urCwYMHcejQIezduxcdHR0scIn06dwRGUW6u7u5TzUafwZSjUaDkydP8vlnmgskUETPPY2DWASdIggkyV86wWw2s2Aiqyn1hcFgQGRkJKZMmYIxY8YgIyMjLCu0XJTIx0+n06GtrY03LJQQZNSoUVCpVDh8+HCAVTTYd5BYocgMcQGh+xTHVv4aid6cnBzU1tbCZvNX3iI801wQeaKsrAxTpkwB4M+kLbeoy/kkVBOt1yKWh4Jnpd/379+Pq666Cv/85z9x5ZVXwmg0Yt68eaiqqsK0adMQFRWFsWPHYuzYsZgzZw52797NiR76+vrYu0H4raioQEVFBXbt2sWcS2KZzs6ZzWYOEaPi78Q58fHxSEhIQHV1Ndra2rgOsJh4Q61WB3jFyaNA90N8rNH4ky61tbWhr6+PjTAkwgB/yZCoqChMmzYN48ePR35+PtLT00PiisYD6I9fGlfKhClJ/rOKPT09aG1t5aR1FRUVg8IvJbqh56fvko+ziF+12p9kifDr8XjYIx4XF8f9LvKAWq1GWVkZJk+eDK/X+73jl14b6PPh4pfKTq1btw433HADHA4H3nvvPSxcuBDJyckA/ELy1KlTiIuLYz5euHAhLr74Yq57DgB2u535mMJ/y8vL0dbWhv379/OZVArx1Wq1sNlsfAyC6kASLgGwd+TMmTMcKUafJX1BQl/0WDidTg6fpEzRlFCxra0tgI9FfUHnRImPKWFeuOMkH1txw9Da2sp83Nvby3xM+uLbb78NiWfRW+92u9HQ0NCPj0U8ivdC+iI2NhZdXV0wmUwc2QacqxELgD1vdC3i4+Li4gB9cT54Jr6in4eqL8RGeF63bh3XmaWstbNnz0ZtbS0kyZ8c8Q9/+AMefPBBzJ49Gx0dHXy8hI4BfPbZZ2hqakJmZib27duHzs5O7N27l0vRkXNIq9XCbDaz88ThcHDpQOI2igYUEyNS2TvqY+Jk4Jw+puhD4kDyrDocDrS3twdksBbHgjRFbm4upk+fzhn1wxkbJeMtfdZoNKK1tRWS5C9XY7FY8M0332D69OlobW3FjBkzUFFRETaGIyIicPbsWfbyivNeNIqKWLbZbHxkKz4+Hp2dnZz4koyp1Ifid+3evRuXXHIJxo4di8OHDzNexA35UDEszr2BPheOlv7kk08wc+ZMrFu3DgCa1q5d+6eB7ulHE+Lb1NSEjRs34p133sHMmTM5wQB5E7du3YrIyEh0dXVh5cqVqKmpgc1mQ0dHB5KTk7F48WJ4vf5aonPnzsXNN9+MSy65BA8++CCfOxU7nSydokWDwlZpUp05cwYWi4Wz75HVw2g0BkwgSgpAZCCKecB/BqujowMq1blzf+KGTq/XIz4+HvPnz8fo0aMHtDaKYBXJUGmD6fP52HtLnmQKdwOAzMxMJpBgTTxz53a7uVizSMbURKEkblbpHI5arebyI6LllhZjMSRBpVKhubmZraSicBpMEy1CoqdJaUKJm31xwlJfyjfI8p8BYM+ePTh48CAuuugivPjii3juuef4O1euXIm9e/eivLwcjY2N6Ovrw/Lly3HppZfCZrMhMTERHR0dXJbk6NGjWLVqFa699lrcd999uPLKK9nLQM8gltcgowaFV5HYsVgsqK+vh8/n43JDhOfo6GiuHxYVFRVQRobOt5CwJyFElnt5WHd0dDQMBgMmT54cUBomlAFEDGsL1qfUyEJLfRAREYG6ujp4vf4kNhdddNGAWKDPEvaKiorY8CQ/X6Vk+KHNqEqlwsmTJzlcT+QTAAFhfH19fZz1U0xhLw9nC6eJGzfiMXo92HuD4Rno7x2h3wsKClBQUIC77rqL5+Zdd92F2tpalJWV4bPPPoPdbseqVauwefNmdHd3M7ZuueUWuN1u1NfX44orrsDKlStxzz33YOPGjUhJSWEPBnktaTMbHx/P5X8ogY/P54PVauUwTUruQ31sMpnYyk74pbPUlDCDjma43W4WboRfcawpbDI2Nhbz588PC78iH5P3R8mLSt8DgJNCxcbG4vTp04iIiEBzczNWrlw5oFGSNqfUZs+ezaHSSnNI/owUSpeamoqmpiZO/kSJ10TvhxhxROvt2LFjAyJPaL0ajLGQ+Hgg6344+BXHhtZW8cgF8THpi9tvv53n+v333x/Ax6QvrrrqKo7OEPmY9MWmTZvw85//HCtXrgzgY1EEU5gtjZl4dovKyVBCGBonUV94vV729nZ1dTEfk74gPLe2tqKnpyeAj0lfUK31vLy8AD4O1QhDorAOFuYoch1lbSU+Bvz6Ihwju/idcj6Wf5+ScUKlUiE5ORldXV3o6ekJ4GP6OyVPAhDAx6QvqA0mLBkIxLPSBkj+3oHwLP9Z5JJQGJZrZDmG5RpZCcOiRgb6Y1iukYeCYVEjK2FYrpHlGB6sRh4KhuUaeSgYDqaRRQyLGjkYhklDKGFYrpGp/dgwPFjNTu1H40EdOXIk3nvvPeTk5KCqqgqffPIJjh8/jq+++goGgwETJ05ETU0NPv74YzQ1NeHgwYMMJjrAPmLECKSkpPBOnUJZ3nzzTQ4z7ejogNFohNVqRXR0NFpaWgI2nwQYWlBokkRERHBhdgICfUZu8SaSoDpQ4uaKhCmFKEyYMAETJkxAZmZmWH1FQJVbEUM1rVbLlilKkjFmzBjExMTAYDCgs7MTHR0d/UhRKc6cvru2tpbT3tPmROwT0asjloGwWq0AwKnKxc2Rz+fjsCQity+++AILFixAUVERKisrA8g6XEFETZzo4jVCTTIl65D4ProfSZLw4YcfYu3atXj55ZfhcrnwxRdfoLa2FkajEVu3buXzzcOHD0dqaioqKyuxb98+1NfX49ChQ3wv0dHRKCws5OyKO3fuREFBAdLT07F371589913sFgsvHjY7Xb2ClHYDG1aaSNGCSiIsHU6HZ9Joo0UbXjEaALaEFKKeUpIIz47lQbJysrCnDlzkJuby+UIQjW6t4HCy8Tm8/nQ3NwMlcp/PpuwMmvWLDgcDkyaNAmffvqpovAVrfDi6yaTCbW1tQFJdEQLJ40L3avD4YDJZIJer2cPnMPhQExMDDQaDRum5HOINqN5eXk4ceIE84gcf+H2g3yjIr+WkpCn30VjD/GnaC398MMPcejQIeTk5ECr1eKJJ55AT09PUD7euXMnn+FT4uOJEydy2Phf//rXfnxss9mg1WrR2dnJfCHnY7FeqhIfE5ZprKgRH3d3d/OGKxgfX3LJJZwpMpymxMcDLcY9PT1wuVyor69HWlpaPz7ev39/v82biCW5wUSJj+UiXs7HMTExiImJ4ZqVcj6meULzQYmPKdnVUPALBBozxf4cDB+L6454PUnyZyhdt24d8vLy4HK5sHfvXkU8jxo1ivn4k08+CcAzrVvEx/Hx8f3w/OqrrzIfU9IeEc/kCRH5WMSzJEkBfEzGFyU8k2FHjmexL6g+qIjnhISEAfmYMCKenRP/D9ZEPKekpDAfA1DEM40dPbscN0p8LF9/qU8IuyKeqRQK8bFYh1M01hAfFxUV/aB4lv8cjI/pvoBAoxIAxvK3336L1NRUpKSk4KOPPsKxY8dw4MAB5ObmYvjw4Whubsa+fftgs9nw+uuvcwbcqKgoHDhwgPHwwQcfYOTIkVz+cNeuXZzAkN5vtVphMBjQ3NzMHlgKWyWep/BUGieaE6SLaXPq8/lYW1C4KkV9kfFTXJMk6Vz5x0mTJmHChAlcNnGgJnKyvB+DteTkZLS3t6OjowMpKSno7u7G4cOHMWnSJDbWd3Z29uNk4iP59fV6PWeapv2FnKvEzxAfGAwGdHV1wev1srFQzGtDG356RofDgfLycixatAjjxo3DN998E6A5hoJhJa9puJys9LMSJyNMD+qP5gwqCdzFixcjJSUFY8aMwbvvvouioiK27s6ePRsajQbLli3jg9XHjh3jc32XX345H2wmYU3ikTaKBA4ALKQiIiI46xYl4xBDFinsobu7mxNmkCiiTF20uLvdbgYYTUoCIi1AI0eORH5+Pn9/uOc/adLRz+E2ei+dD/N6vfjXv/6Fm266Ce3t7fjpT3+Kxx9/nBcn+p5QbvsxY8Zw9jP52VPxPikTZ319PeLj45GXl4eTJ09y35AnlRZwuUeVCs2bTCaMHTuWM6oNponWIflmaCDCE8UdiWZxMVHqo6SkJGg0GnR2duL222/H1q1b8dJLL+H555/HTTfdBMAfMnP11VcjOjoa9913H4xGI2pqavhcbl5eHlJTUzF9+vSA7LfV1dX8PIAffzabjT2dnZ2d7EkhixuRI515JqMOnZ+msyc+n48XIUoy0dDQwJk9xflD5/I0Gg2mT5/OYT3hWu7o3sI1soiNvGSnTp1CdnZ2QISD2+0OOPuhRKjyRgsk4Zg2LcA5g4soEElA5efnAwCH8dNnRe+S6FWx2+3Yv38/SktLcf311+Oll17iewgnRIZaqD4L9ZyipZZ+lr8mXjcrKwtVVVV49dVXMWbMGMydOxd79uzBNddcA4/Hg7FjxyIuLg6VlZX417/+hd7eXjQ0NHA2wldeeQVer5fPkUuS33uXkZGB48ePw+12IyYmJsBK7PH4a5NarVYWtZSkQ7RQR0REsAXZ5/PxWT3yxtLGOzIykrNIEjaJA2nOxMXFYerUqQEZLMMZD9H7PRg+LigoQGVlJVJTU3Hq1Cmkp6fjxRdfxIoVK3Dy5EmsXr2a+Vgcp1DfNW/ePHz44Yc8R+XCX+Qt8pzm5OSgqKgI1dXV0Ol0cDgc6O3tRXR0NF+DxCZwLsPnpk2bsGLFChQXFwfwsVx0DdR31H/iZwbDx+Imhfg5FB87nU4sXrwYcXFxeOqpp3Do0CE899xzfP7u6quvRkREBJYtW4aYmBi0tbXh+PHj0Gq1zMdutxtXXHEF55r47rvvAviY+JHujd4n8jEADjknzIp8TFrF6/UiJiaGwyGJjyksnsYV8M9Z2hxMnz6ds6WKfTTQeCgJ7XBaRkYGampqGDPExyqVP0t3Xl4e6uvr+/FOMMOkyMekL0SDkmhEJX1RWVmJCy+8kNcqkctF77p4HeLjyZMnc5QZ8f5g+Zg+Oxg8y7lXxI547+J95Obmoq6uDo899piiRp4xY0Y/TiaNnJiYiJKSEni9XsycOZM5WdTIRqORo2BoLESNTIZnytwr18jEvcTbkiT108iiZlHSyNHR0f04eagaOZwNLb0XOKeRKb+FRuNPZjcUjZyZmRmQTEr8Hvm90VluuUZ2uVzstVbSyMC5pF8rVqzop5GHysnyZxoIx/LnGUgjh9t+NCG++fn50Gq1uP/++xEbG4vx48fj73//OzQaDV544QUWGI2NjdiwYQMeeeQRNDY2Qq/XY8SIEfjqq69w77334pe//CWeffZZnD59Gj6fPylGUlIShxCK4S9erxdZWVmcYIDOovp8/nh38tBQgWCy+tPCERcXh+TkZA4f6+np4RT+NCh0zio2NhazZs3CwoULuQTJYAaN7jlcQS+3RGdnZ8Pr9XISktbWVkRGRsJqtaKvrw96vZ6FOVlQQn0HWWQpuRTdFy1MRLYpKSmoqqqC2Wzm84HUZ3TugGq80YSkBcrn82dn3rRpEzQaDSZMmBAQjz9QE9+ntHiI71GydFKjiUeLkJKVX2x1dXXIy8tDWloa0tPTUV1djU2bNuG+++7Diy++yMLY5/Ph448/xpYtWzgT6pgxYzB+/Hg888wzWLx4MVauXInTp0+zUKQU5ET6JLxEAUqbccqUqtPpWLx4vV5OfEKLjlrtzzCakpISEGrb1taGe+65J2CRIk/T3LlzUVpairlz57IndyDCFseCsDYQzuj9ABhX6enpAMBJFOx2O3bu3AlJkjhKgOaKuPiHIuqxY8dy2BLdD/Uf3ader4fdbsexY8fQ1dUFn8+HlJQUXqidTidv7gEE4Jgw5HQ6sWPHjn5nSwYzn4NxQDjXEgWbaLGWfwcATuwiSRJ+9rOf4ZprrgnAsMFg4POT1113HZqamhjDeXl5eOihh/DLX/4S27ZtQ19fHwtLsT4yCRASKiKWxL7U6/V8Hoow7HA42CgYGxuLpKQkpKamMrcAfgyLWVQpXIwwPH/+fEycOLFfps5gfUh9RF4E0Toc7vjRM3g8HsYwEJjtUUxOQa+LPCtv1LcUjSKPfqH1MyEhAadOneIyESKGaW0T+YQMtjSvyeuh0WgwadKkAFEykBAS8SnHstJ7gjUlL0Y4fKzX63HgwAH8/e9/x9ixY5Gfn4+lS5cG8PFLL72EjRs3oqWlBVarFUVFRQF8fNddd2HPnj3seaISM8THNGdIVNO9inxMtUbJuyTnY6/XC6PRyElvyIhDfEweVorWiI2NxZgxYwL4WNQXocZF7Ddx3Q/V/6LxjrxmPp+Pa/wSH6vValgsFlx22WUB1yc+DvUdxMci9gmHIh+bTCZ89913APzcmpaWxs4F4mO3280ePcK1yMcbN26EJEm45ppr+vVLqH5T6l8lDgnVn+K90OdDaZFhw4YhKipKUSMTt3m9XnR3d/fTyIWFhVi5ciXuuusuPPLII4oauaOjgw0nShqZOEGSJPZaixqZavKSVlbSyFarFb/85S/5enKNXFJSgosuuqgfJw/UBquR5UcfRI2s1WrR3d3NWdaVNDJtzEJx8pgxYxAZGRmwCRc1MuG4o6NDUSOTA4GiFOQama7X2tqK3bt399PI4XCy+N6hamR5vw7EyeG2H80G9bLLLsP999+P0aNHw+l0Yv369XA4HLjyyitx1VVX4U9/+hO8Xi8SEhIAAL/97W8B+B/eYrHggQcewNatW3HvvfeitbUV27Ztw+LFi/HMM89wAgIgMNOXaL0isJEL3ePxoLe3F3a7nRcNIhOj0chhfJQggSz8tAk2Go0wmUy44IILsGjRIsyfPx9xcXH8vYNpwWLnQzVRfPp8/uQXKpUKKSkp7N375JNPAPhF3BVXXBGwwQmnTZgwgRcC6j9aqN1uN/R6PSorK6HX65GYmIj4+Hg4HA5O2EMhTOSRFsM9SZjS2GzatAlerxc///nPg57vCtbkE0YpHEH8LtGyKQrDcMnyjjvuQEJCArKyshSx/PDDD7P1cMKECf2wbLfbsXHjRmzevBnJycn9sOxyuTj7sUjGIpZpMaZnkGOZPC0mk4kzYithmTJQKmF5IOuc0jgNBctyj6SIZcrceuTIEXi9XkUsK4WsyBttasXPEJZ9Ph9iYmJw7NgxuN1uRSyTKKJze0pYBvyLyb59+4aEZZXqXHmIoWJ5oEVVbOPGjcOIESOg0Whwyy23oLS0FLfeeiv6+vqwatUqqNVqrtH25ptv8rU7Ojrgcrnw1FNP4cknn4TdbsfatWtx5513YuPGjTCZTJz1V55QgxLYkIeJwtLJgOh0OgMw7HQ6YTKZoNVqObEaeaJ0Oh1iYmJw5ZVXIiYmBnq9HoWFhSgpKcGCBQsY9+GIcWqip24wTRwH6lu1Wo2UlBT2YKxfvx4+nw+nTp3CL37xi358PBCG5871lyWn/A1yzjMYDDh06BDXF9Tr9XA4HAF1JMV+JhyLAh/wG4YefPBBuFwu3HLLLWHPaeo7MTRO/nxKGKa/ycPlB8vH9913Hy655BJ88cUXOHToEKqrq5GRkRHAxzfccAPOnDmD3t5eqFT+2roiH1NtzyVLluCWW25BeXl5AB/HxMQE1EonXIt8TPU6+/r6AjL7Eh9TGaKOjg60t7cDQEBW9cjISJhMJiQmJqK0tBQlJSUYNmzYkPiY7m8wTTzCQ7/T84l8TNldk5KSAsZ2MHwsjrOIC+LjxsZGJCQkIDk5mcv9kJGV+FiM0pKf1SMDLZV5Eo8GhGrEdWJUh5yT5cZawrN8DgwGyz+kriDHy//TFf9PV/yQumIw7UcT4rt69Wps3boV06dPx+9//3scP34ct99+O95//31UVVVh+PDhuPXWWzFp0iTk5+dj/vz5OHz4MCTJ79nJzs6Gw+HAuHHjMH78+IAEHDfffDOHk8rDg4i8gHObOhJPNCh6vR5xcXFwOBw82cXPkJUjLi6ORdvEiRP7ed4GEjVk5VYi5HCa3GIhAosWEQqD1Gq1+OyzzzB58mT4fD4UFRVBpVINKkTTZDKxZ0JuXaGNa3Z2NluTXS4XLBYLH46nM8QUxkBnReia1BcOhwOdnZ348MMPMWPGDN7YKj27OGGoiZNNfI0+Rz+LRCxeN1yBSO3zzz9HRkYG7rzzTkUsnzx5Ej/72c8wY8YMpKam9sPyxIkTERUVhYSEBCxZsoTDGQnLarWazzuJYVwilsUQKQpxBM5hmfqP6glT/4hYBoAPPvgACxYsYIPMYLBMi73ovR4slsUxUMLy6dOnMWzYMEiS/+yu3W5XxHKo7yWyV6vVvEDL74OwTLiUY1k8V6aEZRGDXq/3P4ZlJeEUrEVERPTD8D/+8Q8kJibi888/D4phSZJCYpiEVDAMx8bGcm1HJQxHRUUF1GtUwjB5VamunRzD4SykZLAEhoZh6nNxbOQYliTpP45hnU6H7OxsTm4jx7BccCthWNzAit8ZLobF179PDAPAwYMH0dbWhtLSUpSXl6O7uxtHjhzB1VdfjaNHj6KzsxOvv/466uvrodVqkZ+fj9tuu42N4lR/PSkpCVdffTVjrLy8HG63G0eOHIHFYkFCQgKXTbJYLIiOjkZ6ejrOnj0Li8XCnibxnJck+TOC63Q6FvlWqxU+n4+N5eRNNxqN2LlzJ/Lz8xW9/gM1EcuD6T9qcg+LeI3Ro0fj2LFjMJvNyMjIQEdHB9avX4+HHnoI1dXVuOqqq7B9+3a+h4HmkNfr5QzA9JziHHQ6ncjLy+P7omiLrq4uFBQUoKamBgA4q7Fa7T9n7na72RBDz9Hb24stW7Zgw4YNuPnmm/GXv/xF0YuktO6Fi2W6T7nAD1dfbNy4EQBw+vRprF27Fnv37sVrr70Go9GIUaNGYffu3QE4TkhIwNSpU9HS0sIZakUcL1myBCqVP2lneXk53n77bdhsNsax0+nkUHKVyp+zwuVycb1Vl8sFs9nMm+2IiAgkJSXx2Hi9XpjNZr51LTm9AAAgAElEQVR/4hzKEr5z506MHz8eeXl5vPEKR1fIOVnMUxBuEzdUYqPznlR/mHB8+vRp5OfnIy0tTXH8QzXCcUVFBZexoWeQJCkAxz6fP3TaZrNBp9MhKysLZ86cAeDHcU9PD1QqFRvBqCoJPYfH48GKFSvw6KOPYuHChXj33XcV709uIKTvFnEsx2Uwbpf36UCcQjgOt/1oPKgUmjBv3jzceOONSElJwVNPPQWr1YrU1FS8/fbbePzxx9HZ2YnZs2cjNzcXW7ZsYfLv7e3Fnj17sH79etx9991Yt24dKioqcOTIEQ49oBAoWrzLyso4PEQ82E3NaDQiNTWVkx40NTVxuAqJJTqLZzKZMH36dCxYsAATJkwYtIWdQBtu2KP4nmCHouXXoEWRimoD51KvkzV3sAsehYbRGTDxnohI6HU6SwIAOTk5ASJerB8rFyVerz8xzZ49e6BSqfBf//Vf/bIXU/hefn4+3xd9b6i+lC8O8smn1K+h2k9+8hOcPHkS8+bNQ0ZGBhYvXgyr1YpNmzZh1qxZ2L9/P1JSUrB161ZYrVZcfvnlyMnJwYoVK2A2mxEdHY3/+q//wu7du7F27Vrcc889+OMf/4iamhq8+uqrTFTieVCn04mysjIcO3YMAFh80sKsUqmQlJSE9PR0xMTEoLm5GWfPnkVpaWlABmCVyl8rODMzEwsWLOCQMbEuZKgm9jctAGIYTTifk4+ZUv+L4o7C3yRJGvKZVrpWMKsjYVQMOaMwKOBcTWC6lrj4iXgWjQcVFRUA/PNA/lmNRoPExEQ+KiA/gxWsKQkd0co7WGHf1NSE3t5e3HPPPVixYgWefvppHD9+HHl5eSguLsbJkycZJ4WFhXjqqaewfPlydHZ2Ys+ePaivr8frr7+OpUuX4oEHHsCuXbvw4YcfYuTIkQD8kRt03lGlUuHbb7/FkSNH0NzczHwscnJERARSUlKQkJAAl8uF9vZ2nDlzhlP50xiKYbwLFiwYFIapn8S+CgfDYr+L/4fi48LCQqhUKqSnpyMyMhI9PT144okneNM6a9asQXknJUnC9OnTA8ZYfi/isxBHNjQ0QKPRwGg0Mif39PRwuB7xiXg2sq+vDzt27EBPTw+uvPJK3mCL/9RqNQoLCwOSWClhWMnYQj+LUSyDXVNJDB07dgx6vZ71xZ///GdoNBq88cYbmDNnDjIzM7F582ZcfvnlrC9GjhyJX//61+js7IROp8PZs2fx17/+lfXF1q1bERUVxWfMKEyb9IXD4UBZWRnX6lXamCUlJSEtLY0zOJOWob6jGpAGgwEpKSmYP38+Lr30UtYX4azVSmMi7/NwPjeQvqDrUsgn9QdlZB09evSgvSt6vZ71hfw75fdB4c49PT3w+fzRAuRJJX0hnqUmbgHO6Yunn34abrcbI0eO7LeWaTQaxMXF8RGEYFgWm5KoD/f3UH1iNpsxZ84crF+/HlarFdXV1bj11lsDcHz11Vdj/PjxWL58OTIyMmC1WgNwvHTpUjz00EO8MSWHDPU1efm//fZbfP3111zaTcSxJEm84U1KSoLb7Q7AMfG6RqOBVquFTqdDUVER4zg7OzvgaFKoJhoIRBwP5CkMxoPBvjMzM5M1PeF4x44dAPzr1Zw5cwYdcUC1huV1qpXui6owtLe3Q6PRBOCYolpIS1BkC+HYbrdzWSsKuxefl8aCOFkMMx4MjsV5HG7EwVDbj2qDevbsWbbILF26lL0SNTU1SElJwaJFi/Dggw/C6/UiOjoaSUlJ2L17N3w+H2bPng0AePzxx/HCCy/g0UcfRVJSEsfZ08CTdZ6EAYWGRUREIDU1le+HYubtdjsaGhrQ09OD6OhoLtVC14yOjkZ+fj5KS0thMBgGbZkkt7h84EM1OTkHs+LJG4VPWK1WtjrS93d3dyMtLa3f9YN9N31nQUFBgJCg+yELvPy8Cp1DIMsPfY42WrRJFYWpJJ0LZ+jr6+NQEuqvqVOnIiMjA5MmTcKIESNQWlqKrKws3qjS/Yr/03VDbYSG0qjmK2HZ6XRi8+bN0Ol0GDNmDFwuVz8sx8bGYtiwYbj88stRVVXVD8u33nor2tracObMGX4mEcu0oaF7d7vd/bAsSRLMZjNaWlp4/ohYprNOU6dOxcyZMwM8J+E2kQjDJS6lhXqgsaDze8GwTII61DWUNm2EZbLY03PIsUyvU+RAYmJiAJYpWkBJeBOWW1paoFarMW3atIDzsYTlMWPGYPLkyRg1ahTS0tL4zJp4rYGwPJTNutjS0tI46ZTRaERBQUEAjn/605/i5ZdfxqZNmxAVFYX4+PgAHC9fvhxz5szBH/7wB46OiYqKYhwbjUbmeI/Hg8zMzICC7dSP9AyUoEPEMVmkCXdRUVEwmUwBOB4Khs8Hx6FEvdiolAadyyVuJDxPmDBhUDgmLPt8Pg6NFp+D1jkRqxRGRjgWhSCFR4rfTZiidZq8TYRjwI/Lyy67DCNHjsTIkSMxa9asAByLxppgHlJxPTxfbrbb7UhLSwvJyYTlYJxMWA7GyYTlUJwslnNT4mQxe3IwTg5nnRfbf4KTCcvnw8mE5WCcTFgOxcmE5WCcLOJLiZMBPz5DcbLc+BgMyz8kJ9NcDMbJhPVQnEyb9mCcTB6/YJzc1NSEtra2kJxcUlKCgoKCIXMy6cz/NCeL1wv1vaE4GUBITqbz6ufDyRdeeGFITo6KiuIcDefDyeermUO1H80G1WKxoLKyEk6nE6+99hpWrlyJkpISqFQqFBUVweFw4LLLLsOyZctgt9vx29/+ljts+/btmDZtGj7//HPcfPPNmDJlChYtWgSbzYaSkhImLUptHRMTw4uJ0WhkK117ezsPTldXF5qammC1WgMsN7t27WLLxsSJE1FSUoLhw4eHPWmUrC/yAR7IUiy3jITb0tLS4PF4uISCy+XCb37zG0RGRsJsNuPOO+8MuSkRSYLek5yczNbL7u5u9vhotVoW3vR++r2vrw+AP5slJTRxu90cqkdhULRZFcNRKWHSTTfdBK/Xi6SkJOzcuRMff/wx3n33XXz33XdoaGiA3W7HlClTglqI5OeZRCKRb84G08d0/o42qVu2bMGf//xnLF68GNnZ2Thx4gQ0Gg0uu+wyXHXVVZCkc0kHbrnlFtxxxx2YPXs2PvnkE1xxxRWYNm0aXn/9dYwYMYKLjxNJREdHc/2x2NhYDsvVarUc1gMA3d3daGlp4ey2RPJfffUVIiIikJubi9LSUlx66aUBhc1DNRHHSmQlEmOoJmJ5MBZ2Oi9EyR16enpQXl7Oz6vX6wf02srDD9PS0vg1igiIjIxkqzlhWbQ8Un1jo9HI30chkoRbMrbQgkS4/vLLL+FyuXjzmZiYiLfffhsff/wxdu/ejfj4eBw7dgxjxowB4M/0J8cy/SznBKVFRvw/VKPPlZeXw2q1wmw2Y8uWLfD5fHjmmWdQWVmJuro69sJ99dVXXILI5/Ph1ltvxXvvvYeCggJs27YNV155JaZOncqvLVy4kI1YlLiLEnnFx8dzArbExEQWOz6fD52dnQE4puehJD85OTlYtGgRpk+fzsbCwTTRWDFUHA9mE+Hz+TBq1Cj4fL6AMiVUKsDtduOee+4J4Cqle6Z/JCLnzJnDdWRJTJIxFTh37pAs85IkoaWlBSqVCvHx8fzMfX19XCKFxJF4jq+vrw+NjY0oKyuDw+HAz372Mw6//uMf/4h3330Xb7zxBpKTk2G1WjFy5EgYDAaeZ3T/Ss9CTe55HAqO29ra8NFHH4XkZJ/Pfx4sGCfn5+fjd7/7XVBOJmEbipPF5F1KnCxJfg/WD8nJ4WLzfDi5oaHhe+FkSZKCcrKI5WCcLBr+hsLJer0eNpttQE4W+1v8Wa4hzoeTqSlx8t13342oqKignOx2u/GXv/wlJCfrdDoACMrJhONgnCx/ViVOHiyexH77v+RkcgAE42TKmxCKk0XHixIn0zgF42TKExOMk2mtDcXJxcXFITnZ4XDgv//7v4NycrBNqNyLLj53OP07mPajqYPa19cHi8UCvV6Pv//977BarTh+/DhuvvlmvPHGG2hra+PEF5MnT8a7776Lzz//HBdeeCG8Xi/mzJmDffv2wWAwIDc3F6mpqTh69CieeuopAH4LJm2i9Ho91yeiEANKWkAbFAILucUpqU9SUhIuueQS5OXlDVoEid5EEcTyFmqgaaM31Eb1TumMolqtRkZGBkwmE6KiovD1118HJNgQBTygHOYWGxuLtrY27iPxM2JoFn2WiJPqxNF7KMkE9bsYxiQumNXV1SguLobb7cbbb7+NsrIy1NbWoqWlBXv37sWxY8e4JFBGRgZ+8pOf4MSJEwH3LLcY0ffIw5JDNfkE/fDDDxEXF8dZGf/xj3+grq4Ora2tyMjIwJEjR3DixAk0NTUhPT0ds2bNwurVq3HNNdfgxIkTiIqKwsiRIzFlyhR8+eWXSEhIQHZ2Nrq6uvDss88iPj4eNpsNZrOZs9719vZy9mny9gH+zbxoFBDPihAJ5ubmYsqUKUhPTx+Ut5REFvUb/VNqwa4pzoGhtJiYGJjNZkRFRbEFuKGhAdOmTUNfXx+GDx+OyspKvgf5ZjSYoKivr4ckBRbXlt+nnKxjY2O5xI8kSWwVpdpl9H4x8ytFhsyYMQNGoxEtLS3YtWsXTp48iaqqKlRWVuLTTz9FdXU1C92RI0fye8V5RU1uPKJ5HGoclJok+Wv60nkcEcdvvPEGDAYDqqur0dbW1g/HDz/8MMaNG4eIiAhMnTpVEcdUd9lisSjimJ6LSivJcUycTHgeKo7puueDY7loGmxTq9Voa2vj2oNyHGdmZnIyu2A4lo+xHMfie4Lh2OVyQavVcl1sEcfEF/QZJRxPmTIFPp8PdXV1ijiuqqqC1+uvOZmTkxMUx6KR8PvEcWJiIjo6OoJycmNjI1JTU3HxxRcrcrLZbEZnZycSExMVOTkyMhK9vb0wmUwhOZnGQImTR4wYgRkzZqCoqGjInExN3JyF274vTjYYDJxd+z/JyVFRUWwsCMbJovFczslffvklvvrqK9TX1wfl5OLiYtTV1fG1fihOBhCUkwnHdNY6mLZQqVRBOXkw2kKJkwnHKpXqvDiZxuZ8tYXP5xsyJ8fExMBqtQbl5O8Lx/RcoXBMIf7BtAV9Jpi2cLvdQTmZcJybm/t/zskIsw7qj8aDGh8fj4KCArZeGI1GzJ07F5mZmejs7ERkZCRqamrQ29uL119/Henp6fj000/xwAMP4LvvvsP//u//cqFy4NzCnZmZCQBc5oASaMTExLCVlNLvE6hpstGGi/4nL0e4B/3pPuQTJdxNrbgpEz2Rg9kUixtNwO/xJK9kVFQUuru78a9//Qs+nw+tra1YsGBBP8ANRBRJSUkAzp3LEL+XgE1WYgr5oL6mM3iiZZlEqpLwstvtOHv2LKKjo/Hiiy+iqqoKOp0OarUanZ2dsNvtqKqqwr59+1BbW8th2qJQE5+HCJa+RzxnGE7fih5XAFxoecmSJairq4MkSRg3bhy++OIL6HQ6Pn/3zTffcBHq2bNn44MPPsDXX3+NHTt24JVXXmHDiUrlTzCVk5MDm80GAFw2g55HPItgMpkCwmvomchaFxERwZs6MQQtnCb35ITy7MibiF8aS/H/wX4/WXzJaki1BClhUUFBQcBnRSNJKAE2evRoqNVqPtco97SLmKHnoTqyVOKDMAecq1MohsDR75Ikobq6Gl9++SWef/551NbWAvBbsV0uF06dOoW6ujqUlZUhPz8fPT09XGJH3qfy/qVxD8fQIr8eNcrKec0118DpdCIrKwsLFizAggUL2NN56NAhHDx4ECUlJVixYgXKy8vx4IMP4siRI3jzzTd5jhDPTpw4EW1tbQH4bW9vh8lk4nOC9BmTyRTQzyIf05lzCpUcLI5JxNDvg9nUiv1FfT9YTqbm9XqRnJwMj8cDq9UKg8EAm83G0SinT59GaWlpv3kzEI5nzpwZINTEdYDuWbTWR0VFwWKxAADy8vKYJ6gsBOVwIByTkZN45/HHH4der8eOHTtQXV3N3+t2u1FTU4PKykr86U9/wkcffYTW1lakpKQElNeR/zsfHMs5OTExEXa7PSQnR0RE4ODBg0E5mZKVBONklUrFeiIYJ4t8osTJdA7wfDhZXM8Gaj8UJ5OH6IfmZPkmRs7Jer2e71GJk+kegnHyrl270N3dDSA4JxuNRsybN0+xT78vThabEifv2bMH1113XVBOfuutt/DAAw+E5GTy0ofiZI1GE5STychyPpysUp1LCjdUTlbSZIO9RkxMTEhOzsjI4MRlwA/DyfT3YJws5tUJxsmbN28Oycm1tbXYvXt3SE7+vnA8lDEBfkRZfCMiInDjjTeioqICq1evRm5uLiRJ4ix4U6ZMgdfrxYQJEzBq1CiUlZVh4cKFaGhoQHl5Oe6//3643W5oNBo8+uij3HEJCQlob2/ngaR4a2piMhmxqdVqTuBBGy9xA6bUxIlPTaVSLnwbTqNridccrIdAbp1JSUlBc3NzQK02OsfncrkwceJE7Ny5M+xsvoCf2KlAMBV6lyQpYOMgkh+F9FqtVs58TOdHaGEXEyZR/9EYeb1ePP3007BarZAkibPJORwOXsRSU1PR19eHhoYG2Gw2zJgxA/v37x/SOIRqdO/i5I2Li0N2djaWLFmCqVOnQq1Wo76+Hi+++CIyMjIwevRonDx5EjfccAP27t2LiIgIbN++HQ899BA6OjpgMpl47Kj0hNfrhc1mg8fj4Yx69H3iuRD5BpuEAhlYRI90qCbHnUgugyUZ4NziP1QcUxM93XQuqa6uDkVFRbDb7bxwUCgNJekIt8XHx3Ofid9PyaTE+6ext1qtbImsrq7uJ1DpcyLG1Wo1+vr68Morr+DMmTOIjIzkrKoUrkObNFqoqGawPHP294Vp+TEFkZOff/55GAwGOByOoJxssVgG5GQyBipxMi24A3EyWemHysn0rOfDyeJnfwhO/uc//4kbb7zxvDiZhMX5cDLdTzBOpmz5Go1GkZMpaYdKpfqPcTKF7YXi5GHDhuHEiRMhOZl4VYmTxWQloTiZxl+Jk8VkQErt/y+crFarOYLph+RkSoAUjJNjYmKg0+nQ3d2tyMmEbxLtck6mMEsqO6jEyVQH+ofmZLqeEiebzWZYLJagnGw2m7F161b85S9/CcrJdrud57gSJ4t9JTbiZPE88FA5eTD4U/ocXXew15Jzss/nC8nJ7e3tmD59OsrLy4fMyeK4KnGyqOGUOJlCe2nDq8TJERERA3KySqUKycnfF47lnBxu+9F4ULVaLdLT02GxWHDo0CFUVFTA6XRCp9PxpiMzMxPz5s1DcXExFi9ejOLiYqSmpmL69OnYtm0bd0JBQQH6+vqgVp9LQ00TUaPR8OAQudHunogzIiKCSxRER0ezq582q6Ga6CkUXfihLCtiExcfcVGT/32ge6D7EJs4adVqf7IIOuRO3+dwOAKSOYTTfD4fFzGma0uSxFlIxc2laEGmFPDi4u/znYurp0WFJh+dH+nu7kZnZydnT87NzWURQuPT1NSEoqIiJCUlcS05UWANZUEXLWXyZxE3z9nZ2aipqUF9fT3eeust2O12ZGZm8vkRSZJwwQUXwOv1YubMmVixYgUWLVqEyMhIZGVlYc2aNeyVa2ho4OtSmE1UVBTsdjtPeBov0UImequp4HNkZCTXIqNFJViTe72B/kQ+UCNhpmSBCwfHZE2Uf57uj7xpVDeQCsQDQGdnJ0aPHh32vVITy8LQPRC2SBCK1kC6L7PZHGBtVKvVnAyMFjGKXCBsk3eBasalp6cHLPiSJLH332w2IyIiAgaDASaTia83FBzLP0v4Ff9R+/rrrxETE4OdO3fiwIED8Hg8aG1tRWtrK7q7u5GVlYX6+noUFxdj1apVWL58ObKysrB06VLExMTAYrHA7Xbj3nvv5UWT6hXSmbyWlhYeR+pTGnuDwcD9R1gmHFM9ObvdHhSXYlTMUDkZCB1dMVgsy+9PvJ/Dhw/DaDSitraW39/Q0ICioqJBLe4+nw/Tpk2DJEncT4QtsU/odboH4mRKZkJ/6+7uZuyKG1ZK/EFnKuPi4mA0GjFy5EjmY/GcK4V105p+vpxMLRQn+3w+zJs3LyQnR0REYNy4cUE52Ww2Mw6VOFnc9ATjZPpbKE4ONcbfByeLOP6hOBnwl0P5oTmZxHowTgaA9PT0oJws6golTtZqtSguLg7JyRaLBTab7QfnZHGM5Zys0WiQnJwclJPnzJmDZ555JiQn03cG42QAbGBV4mTyop4vJw+G45Q4eTAbq2CcLNfgck622+2YPXv2eXEyefNDcTKFTCtxsni+OBgnd3V1heRkAKioqPiPcHLY1xjyt3/PjQbtqquuQkFBAT799FPs27cPd911FyIiItDb2wuNRsNnCiRJQklJCXp7e9HV1YUDBw7gueeeg8PhQE1NDVpaWtDZ2QmbzcY18Yi8yLopWj1pwya6rv8/7t48OK7qTB9+bndrafUmqVv7bsm2bMmyDTI2DtjGxIYAJiwJGQpSIZOQSZhKhWJmEiaVZMhHQYVhjSHLUISEkISBBAjYgHHkVTbe5EWSrV2ytba2lnpfpO6+3x/t9/W5V92yLIXfMHOqXJZa3bfPvec5z3nOe96FXBZoQMXSEtTUJLLQAb3SBYeehXiN2YAlWlt6e3vZzff06dMAYvEFBQUFlwWSuBml76T03GQxVd+PegNLCwJlQaVJSe7YYkptcQGl601NTSlIIy0tDXq9HmlpafjBD36ARx55BDfffDO+9rWvwWg04stf/jJCoRCam5tx/vx5TE5OctyxuElXnyTRgigukokaGTIcDgfWrl0Ln8+HvXv34pVXXoHNZsO5c+eYBOx2O9/PjTfeCK1WizVr1uC73/0uHnnkEYyPjyMYDKKrq4vrvJFFU9z0RKNRtlQTvul5EwkThrXaWP1J0dorjola2M2n0XOcz+dFY4a6D+rrFRcXK5IWpKSk4PTp0+yac+utt86JEAnD9N6amhrOQknjLi6i9D7xGdIikpOTwzimRUQcL5oPhOlgMMiGtImJCVRWVkKrjdXs02g0KCwsxLPPPot77rkHGzduRG1tLTZt2gSLxYK2tjZ0dHRgcHCQN8jifIkXiiCOsxjTFW+scnJysHbtWqxbt445WY1lWZaRm5urOB0SsUxJaHp7e5mTRSxTbgBaWMmaL2JZNLjRsxWxrD6R+TSwLFrn59LENWS2Z0ytuLgYsixzrBhx8pViWdzcE3ZFLKtFAuFXxPLY2JjCUKDGsvoe6WfC8tTUFJctI05WY/m2225Dfn4+c7Iay/S8qB9qLNOYEDeLTf074Xg2TiYcJ+JkwnEiTiYc0/fH42TCcSJO/iziGLhyTiYcL4STCXuJOFn8WyJOJhwn4mTx/uJxMuF4Nk4mHCfiZHpGiThZPISYLyeTTk7EyaSTZ+Nk0smJOJnWr0ScTDp5NiyrDaD/E5ysbvPhZCCmkxfCyWlpaZflZJr/QHxODgQCs3IybW4TcTLp5EScTDr5cpws3udcOXmu7TPj4vvaa68hOzsbQ0NDqKqqQlVVFb7xjW9g9+7d2Lx5MxoaGrBhwwb4/X40NjbCarWyEN+/fz/Kysrwu9/9Dp2dnbDb7UhOTsbQ0BC7AU5OTiIvLw9OpxPp6emIRqO4cOGCYgMlxqWKpyGiQCXrxEIaTay5Wi0v16hvc3VrqK2txenTp5GZmYnk5GQYDAbs3LkTV199NRwOBx544AE89dRTCkul2Pd4p7parRbr1q1DfX29gpxoMyaeWJMlTpIkTExMIDs7GxUVFejq6mJrEJ1yGwwG3sjShJXlWKmJoqIidhmsqqrChQsXsHr1anzxi19EZWUljhw5Ap/Ph/T0dKSmpqKtrQ333nsvpqensXz5cjzzzDPo7u5GamoqDh48CKvVCoPBgPz8fOTl5XEJIuovAMX/8Sbdk08+yRtuSZJw3333MY737NnDn0lKSsLIyAiLmHfeeQcffvghNm/ejGeeeQbRaBR33303SkpKMD09rdjIiAsHpYVPSkpiq7B4ak1kRmJIfYIkYvlKDSM09kRYC23iwjWXOWa1WtHT04NIJIKhoSHOckvZ8qgg+1xwLC4KVKCcjFriZ2gMiKDJyknuayaTibMaisYw4hPRzYdOVE0mE7vEUW3KqqoqxnFvby+KiopgMpngdDrR29uLzZs34/rrr1fgeGRkhEtiUd1bEcfi2FMjHMfjjR/84AeMZeLk9957D48++ijef/99WK1WaLVaDA4O4uzZs7BYLGhtbcW6devw+OOP4/Of/zweeugheDwetLW1oaSkBENDQ7zoEzYdDgesViusVisuXLjAzyYQCCA3NxcXLlxgq664QNPYeDweeL1emM1m7vtCsTxXLk3URE6eC5bT09MhyzIyMjIwMjKCzMxM7Ny5k2tlkmgkg5qIZbG/1Gf63nXr1uHo0aMwGAyMW3qvyMmiIYawLHKyVquF3+9nXiOXYDGpSTQaRUZGBrxeL/x+P1auXImmpiasXr0agUAA3/rWt9De3o6ysjJkZGTA6XSio6MD9957L/bt24cbbrgBR48exdmzZ9HW1jYDy+np6bDZbJzBkp4xcTI9Z/W4vfTSS8jOzuYkJfE4WZIkrFmzJiEnV1dX43Of+xxOnToVl5MBMAcn4mTqYyJOjneaMx9jtbj5WWibDyf39fXBZrMtmJPFPsyHk+m1RJxMXjGJODklJQXZ2dlscI/Hyenp6ejr60vIycFgEAcPHoRer4/LyfHWzivlZMLy2rVr43Iy6eTZOJmeldlsjsvJxBeJOJl08mycPJ/299QXV6qTE3Ey6eTs7OyEnBzPM0Hk5Pr6+styMunORJwcDocXxMmE5UScTDr59ddfT8jJibCs1snzXUs/MxtUp9MJq9UKAGytmJycRG9vr+IYv5anS7AAACAASURBVKqqCn/9619x9OhROBwO9PT04OGHH8bf/vY3fPGLX8Qf//hH5OTkIDMzk62U4XCYazlRIH80GsW6devgcDhw7tw5xYmP2gpBJCZJEgtItRVxrgOgPk1caItneblcI9ckshZSAgGaYLQpow2hSOKigFM3eg6hUGhGwiQAiuuIJ4C06RddpEKhEMd+pKSk8CmU6Luv1WqRnp4OnU6Hf/zHf8Qdd9yBSCSCwcFBFBUVYXBwEE1NTVi2bBnMZjOuvfZa1NfX489//jOysrLw/vvv8yLq9Xpx/vx5lJaWYsWKFfD5fLDb7SguLr4iixARGiVXiIfjSCSCyspKuN1uHDp0aAaOjUYjDh48iOLiYvh8vhk4lmWZTyXC4XBcHKv9/dU4jrfBng+O/96bU+DKcEzPhYL8Py0cqzlBjWNJkuByuWCz2eLimN4jy3JCHE9PT2PVqlX4wx/+EBfHVqsVlZWVCXFssVjYohoPx6JhTLzXRC0elj/44AP8y7/8C8fMqLHc19eHuro6/PrXv8ajjz4aF8uyLHMt6nhYJustAH5W8U776V8iY9FcLezxsLwQfp4PlmVZjovlI0eO4K677oLL5cKaNWtw8OBBBZbps4mMnbIccw3r6OjA9PQ0n2yJWBYTslGfRSzTdaanpxXeGWosy3IsiV08LE9OTiI/Px9Hjx5FT08PDAaDAst79+5Ffn4+XnzxRbS0tMBsNs/AcjgcRkdHB/Ly8pCWljZnviJtQeMym7Y4fPhwXE4mbbF8+fK4nEzaYjZOVuPh/7q2+LQ5GVBmQp2vtiAeS6QtqqurE3LyXLWFJEkL1hb0t0T6IhEnE5Z/8YtfJORkwjLhL5G+AOJzsrjB/7+iLxJxMuEkESfHMxiqr0uvJ+Jk8TrU53j64nKcDCAhJxOWT58+HZeTCcv19fUJOZmwnJOTE5eT53tySu0zU2amoqKCM1V6vV7cd9992LZtG5qbm9Hd3Q2NRoPy8nJs2LABFRUVWLNmDa699locOnQIp0+fRkNDA4qLi9kiYLFYMDk5yTVPCeTkDkjuOCkpKSgpKWFr0eDgIJKTk5GWlsbuN8Alf/WqqqoZ/U8EeLUlXn3qeCVNvAYtqvOduLIcK6xM/9tsNrhcLqSkpLBbgyzHsvrSdxJRXK7fBoMBTqcTfr+fYxviCWPqezQahc/ng9lshtlshsvlAgCOBSErs0iItLj5fD48//zzePDBB9Hc3IzBwUFMTU1h0aJF0Ol0MBgMOHDgAH8mNTUVFRUV+P73v88xReFwGE8++SRWrVqFw4cPw+Px4JNPPoHD4cDq1atRVFQEp9OJYDDIKd7FbNFiq6urQ1VVFSorK+Hz+dDX14cHH3wQx44dw5YtWzjWaNGiRXjooYeQl5eH2tpafP7zn0dtbS1+8IMfQJIkmEwmLFq0CGfOnGEBFAqFIEkSjEYjgsEgrFYrW9nopLm4uBhWqxWBQAA+nw96vZ4twbSoE27WrFnDc4BaorFVWwMTCa7Zmjh29DtZ+uZzSgDEMDQwMAAgRvRUk27Lli2YmpqCzWZDfX39DJeiy+FYlmMp4SmjuPpvahFJiwElsPJ4PHxyotHE4oBJBJAVlf5GMT0/+clP8L3vfQ+Dg4Po7++H1WpFamoqLBYLgsEgenp6AICtpjfccAM0Gg26u7sRDodhtVrx4x//GDU1NTh8+DCcTidaWlqQlJSENWvWsBEqFAohJSUFoVCIazfGexZ1dXV4/fXX0djYyGLo1VdfRXNzMx544AH88pe/RFJSEux2OywWC/7hH/4BtbW12Lx5M2699Va0t7fjV7/6FSRJwtVXXw2n04nh4WGMjY0hKyuLY/R0ulhRcoPBAK02Fsuo1+tRUFCAvLw85OfnY3p6GpOTk+w6RlxM40infxQHdjnsqPEcj5/mch0gPp6vtNFn8vLyMDw8DIvFwmU6xsfHsXXrVvj9ftTU1ODAgQMz8Cu6gsVrer0e/f39Ct4S52+8E2M6Nc3OzubM6uLrGo2G+0jXI26x2Wz44Q9/iBtvvBEHDx7EwMAAcnNz4ff7sXTpUiQlJeHcuXMYGxvDokWLkJ6eju985ztobW1ll32j0YgXXngBJ06c4Lp+bW1tMJlMWLZsGbRaLZcrorg32nSI91FXVwfSFhTSMZu2KCoqwtVXX51QW1D8dCJtIf6fSFtQ/On/dW1B3k2fprag71yItog3l0Rt0dnZidHR0QVpi3379sHv9yfUFmNjYxzDmUjU19XVoaCgABs3bsTw8PAMLFOG6uzsbNx1111YsWKFAstZWVn47//+bxQUFGDx4sUYHh6egWVKBkixqGos5+XlIScnB+Pj4xw6p8YyaRgqXUjtSrA8H04Wv4OuQXi+0iZ+LyVCU2M5GAziqquuwieffKLQBYTl2fpuMBi4dIzYP/U908906GAymRRYJmMPGbdEF3Eak+npadx000341re+xYm9RCybTCacOXMGWq2WE8KJWPZ6vdBoNLjzzjvx0EMP4Z133pmB5WXLlsHtdsPj8WB8fHxWLNfV1QFzLDPzmdmgZmZm4ujRo9izZw/+4z/+A+FwGBs3bsQf//hHtmQUFxejqqoKb7zxBq666iqMj4/j448/RmFhIfr7+9HR0QGr1Yply5bh+PHjKC2NZdXMyMhg4BgMBuzduxfT09PIzc1lC5skxZL6VFRUoKKiAiaTSVHknECUlZV1WUFAEwNQWjXjLSZzbeTathDrqDhhyR/d5XIhNzcXkUgEdrsd1113Hfx+P9asWYNjx44pXJPm0oxGI2fAE11yRVdf0fpP90alfyYmJngRkySJBSmJe/p8UlISamtr4XQ64XK5sHXrVpw8eRLj4+Nob29Heno6srKy4HK50NjYyD74RqMRO3bs4FhQi8WCuro69Pf34/vf/z78fj+mp6fR2dmJtrY2nDx5Et/4xjdgsVhw/PhxFovFxcVwOBwKIq2rq4OI4+eeew6BQACLFy/G3r17+dkXFRUxjpctW4Z9+/bh6aefRmpqKvr6+tDZ2ck4PnDgABc412g0LMIIx0lJSbDZbJyFUJIkZGVlYfHixYxjr9fLrmWiiJwvjoH5WzbF06/5NtFaGwwGGcc5OTnw+/3Iz89HRkYGNBoNxsfHOT28eD+zNTIEjI+PKyz24uIhijjRPcdoNMJiscDlcrG7mBh7QyUXaDG5/vrrUVRUBLvdzjg+cOAA14HOysqCTqdDSUkJdu/ejampKSxfvhxPPvkkfvjDH8LpdPIJ7Z49exjHkiRxPJTdbsfIyAjuvvtuxjHF5JGgV49HXV0dHnvsMbz66qt499138fOf/xxnzpyJi+eNGzfi0KFDKCoqgk6nm4HnqakpBINBVFdXY3JykjMjEp79fj+MRiPOnDmD/Px8JCcnz8DzkiVLUF5erkgCIeLZYDBcdoMq4j0enueDyb83nu12O7RaLSYnJ2GxWBAIBGbgmUqbLATPaiOTGs8aTSz7r8ViiYtnMriI3yHieWxsjEtqdXV1YWxsbAaeGxoaEAgEUFBQgCeeeGIGnv/yl79g69atKCoqgs1mQ09PjwLPt99+O/R6PTIzMxWGIXE86urqcPz4cciyjL/+9a946aWX4PF4MDAwgJUrV+L06dO8wdiwYQNeeeUVFBUVQZZlvP322zAajXA4HLDb7SgvL8fQ0BDKyspgt9sxODiIrKwsTExMwGg0YmpqCsnJyTh58iSsVisLVcInbZRLS0s5jIXi22RZhtvtRlpaGt9/oqb2IFD/m09bqLYALhn+cnNzMTIyApPJhMnJSWRlZaG3txc33ngjfD4frrnmGtTX1yvm4ly+Oy0tjQ2H4udEw6O4OSDx7nK5kJ6ezkllJEliDiEtQZoDiGH5pptuwujoKE6dOoW77roLANDY2Ijh4WFMTEwgNzcXY2NjyM7Oxrlz5xAKhWCz2bB+/Xq89NJL7Oq9f/9+HD58GF/60pdQVFSErKwsHD9+HMePH8fAwABuu+02FBcXIzU1FVarFXq9HlarlevDi62urg6HDh3CU089hbfeemsGlhsaGqDRaDA6OorMzEzU19crsHzo0CFotVo0Nzdz2Rw1lmVZ5pJTRqNxBpaJc4qKilBRUYGysrIZWNZoLpVcmisnx8PwZ4GTjUYjhoeH42KZTupHRkYU5abmimWDwYDJyckZGkP8fhEDGo1mBpY1Gg3Xp9VqtbwpFK+zadMmDimLh2WTyYTy8nLk5OTgyJEjnBeGsOz3+2EwGHDhwgW8++67uOeee2ZgmeLzt23bxhgWsazmZPxvq4P6s5/9DK+99hoOHToEk8kEi8WChx56CEDMShAIBCBJEp588kmMjIzgySefxMMPP4xgMIjk5GRcf/31kGUZ4+PjSEtLw+233w673Y7c3FxEo1G2EFFGOEqBPj4+zlZR0RpqNptRUVHBhcX1ej1n7LucKBCBprYgzHXSiIHT6hOghTQCb1FREaLRKHJzc+HxeDgoniyKopC+ku9WfybeiZP6JEuSYqm2gVidVtrcTk9Pc4ZEWkiAmAVUrI12/vx5yLKM2267Df39/WhtbcWJEyfQ2dmJcDiMyspKNDc3w+PxwGAwwGq1IiUlBePj4xgeHkYwGMTo6Cj+8z//E2vWrMH4+Di0Wi2+/vWvY9OmTXjzzTdht9uxYsUKeL1eWK1WjvejzTfds4hjg8EAo9GItWvXIjk5mRM0iDj+6U9/il/96leIRqOorKxEbm6uAsff+ta3FPV76RkTjrOysjhNOJUgEa1yZrMZlZWVKC8v58QJ5GK5EBzPdXMqbs7mauSYS6P+iDimMfjggw+g0WjgcDiwbdu2eeGYFmtyVRJddugaFIMjLoiUTIC4JB6OqS8k/N1utwLHd955J9ra2jA6Oor6+nq4XC4cPnyYcXzq1CkcOnSI67ENDw/D4XAocLx//34WGz/60Y9QX1+vwDHdQ15eHtcxjmf5feCBB/DBBx+wtTwjIwNbt27l+rM+nw89PT3o7u7Gq6++iueffx6/+c1voNFoUFNTg40bN+L8+fO8kdi8eTMnl6HxoEXV7XazcBJ5GQDzc0FBAZYtW4by8nLmZbJuXw6T4sL998KzOlHEfJrYL3JlTU5OhlYbSzjy5ptvIikpCePj47j//vtniLi5NPIuEMW4uDEW+0FrQHJyMosLci2jhHT0T3SBD4fD8Hg8XDf13XffxZIlS7B161a0t7ejo6MDn3zyCdxuN06fPo2cnBzs3bsXJ0+ehNvtRm1tLYBYBvbR0VFIkoTdu3fj2LFjOHnyJPPHTTfdhJKSEuzYsQNTU1PweDwwGo1YtGgRMjMzmZdpTB988EHYbDb2SDEajbjvvvtw9uxZaDSxRCKTk5N44IEH4Ha78eqrr+Khhx5CIBCAwWDApk2bEAqFsHv3bqxZswYmkwlXXXUVkpKSODYMUBqRdTodfD4fBgYGOOEfiXvC8dKlS1FWVobMzExFPN9sGoFcWBeiBUS32L8nJ1Oja0YiEYyPj/P6PjY2xs+bYlOpzfV+ampqWJSrxTw1ui55WRCH5+XlMd4jkQi7K1I8KuG4uroaY2NjcDqdiEajaG5uRklJCYxGIxoaGtDb24vz58/j85//PJcA6ezsRENDA+rr66HX6+F0OjE4OAiPxwONRoM33ngD2dnZOHToEJKTk1FeXo4f//jH+Pa3v40LFy4gPT2dDc8NDQ2YmJjgzZ2akx999NG4WBY1Rl1d3QwsixpjcHAwLpZJY4gJQ0UsixqDPNtELIsaQxyLRG02LF+JAfzTwDL1i2LJ42GZNMZ8sCxqDPE+xPsWNYaoRwjLosYAMAPLosaIRCJxsSxqjE2bNs3AsqgxJEmKi2XSGJTzRY1lUWNcSfvMbFDT09ORk5MDAOz3HQqFkJaWxpaugYEBDrqWZRk1NTUsdrRaLbZs2YLHH38cb775JnQ6HZYvX87xIXRNWZZhs9kwPj6O48ePo729ncU/nbaIgpMILj8/H4sWLUIwGORNhgjKuRzrz6UlItu/l3WTWmpqKoxGI0wmE7q7uyFJErul0CRUu4DO1kTrF6Vmp1Ta1MTnSpOSFgw6RaXAdDplpfTZdOoiyzI+97nPscGAMqHt3bsXjY2N+PKXv4yioiJ0dnbi4MGD8Hq9yM/PxzXXXIPi4mI0NDTw99MJnNfrhdvtxtDQELZv387ldywWC1atWoVly5YhMzMTer0eAwMDGBoa4kWKYj7o2Yo4lqRYOZm33nqLNyeRSESBY5PJhKVLl8JkMkGn06GyslKBYwBc99Tv9yuyG9tsNrS0tODYsWNwOBycLGJgYECBYxp7g8HAFvxEOF6oJZ6ukchNbKFGFvU1RRxPTU0hKSmJXVJkOVb+aD44FjeYahyLJyNqHNMzLSkp4feI6eCpDiAAFBYWssAXcWy327F69Wro9XoMDw/jr3/9KyorKxnHo6OjKCgo4Li6aDQKv9+vwDEZMK6//nq8/fbbePHFFxU4HhoaQnp6Otrb2/HSSy8hJydHsQmnpsbzQnkZwKy83NLS8r+Cl0W8LPS6iXiZ7ud/kpcJu7PxMm16E/Gy3W6flZdHR0cvy8u0oUjEy0NDQ3F5GQDuu+8+vP766woPnm3btuHUqVP8msfj4Qz0wCWMUvm0G2+8Eenp6fjZz37GrsaVlZWQZZkT+FCuhOTkZJw4cQINDQ3sfeX3+zE8PMxjQviVpFjd7IqKCnZ9j5cdkz4n/n6lTTxhFHH39xD2Yp/I6EQJB4lP33vvPciyjOHhYdx5551znpcihinBpVhRQeQE8Z5EjwCquyxmnqWSeqJxYMmSJXxCS6K/q6sLTU1N2LBhA8rLy2G329HS0oI33ngD69evh9lsxsqVKzE6OopAIMB1WyORCLxeL5el+c1vfgOtVou0tDQ89NBDqK+vx7PPPotQKITx8XFEIhGMjIygpKQEvb29eP311+OeBlISL2r19fW45ppr+FlEIhHerAIxLFdWVjIfL1++HBs3bsRXvvIVvPPOO0hPT0d+fj7PBXLtpbnd0NCA06dPY2xsjL93bGwMLpdLsYkFoNAYBoNBkXRUxIo4ZvNt4gbt08IyAKxatQoGgwFLly7lJEQvvfQSaxly25/rWiBqjJUrV0Kn03GiJfV7RJ6g/ynhV1FRERu9pqen4fV6MTU1xTqIYpEpGZPP50NnZycOHz6MsrIyrFu3Di6XCz09Paivr0c0GqsfnJeXh7GxMdx///1Yvnw5H15R+GVSUhL+67/+i++7pKQEHR0deOaZZ2C323k9t9vtWLJkCXQ6Hd59911eM66kfWZcfL/zne/g9ddf50HS6XQ4dOgQW3ABcDwexZHKsoz7778fbW1tfOq2b98+znTp9Xp5koRCIeh0OnaRjEajGBkZYX/y3t5etLS08NH02NgYu+7QCQxZMvx+P4LBIHw+H4LBIPdnIUJIPGEST3Lncz0xxi3RBleWZX4u6enp0GpjtdoOHjyITZs2wel04tZbb8WRI0dm/S71IiHLsZqRdrtdsfmkiSROQnX2PJ/Pxy6STqcTAHhjKlqZlixZgrGxMUxNTSEUCiEQCLCg6evrw8qVK7F48WJ27R0cHIRer4fP5+PyMv39/Sy0SBDQwkTWxOPHj+PUqVNsfaJ7MJlMKC4uRllZGYaHhzEyMgKbzYbdu3fj4YcfxiuvvMLPIz8/H/v371c8J1q4RdfQG264AcPDw3yv7777LoLBIM6ePQuz2QxJkuD1ejkGNS0tDW63GwMDAxxjOjIywgRhs9mYJCTpUgIOsQQQ4ZhIh+JFaByvFHtiXOBCDSsidkUcxWsUz0RzcXp6Glu2bOGY0MzMTHR0dCTcHMdbPDUaDTIzMzEyMjLD5U60EKvnKomDpKQkhbs6nYLThnTLli3wer1ITk7mskq0uNAJ1OrVq3Hy5EkUFxejqakJGo0G6enp8Hq92LFjB86cOcOuRcRp5H5JG4v+/n4MDAxw/CclBbFYLEhLS0NtbS1kWUZXVxesViuP+9/+9jc89thjGBkZYV4m/C+ElwcGBuB2uxPy8vDwMHw+32eel+d7LZEPExlw6LnQs0nEy7MZe8S5MxsvAzMzkqt5mVywE/EyfT4RL9MpeyJeNpvNl+Vl2pwm4mWz2YzU1NQZvNzQ0IB///d/x8svv6zA8OnTp9mQJUmx8J7S0lKOuZMkCV/5ylfQ1tbG45+SkoLt27fD4XBgZGSE+0NzmjwBKN5ar9ejvLwcFy5cQGNjIyd38vl8GB4e5oy0IoYpOzBh+dPC8HxxLGJYPIlVNzqZJJzLsgyXy4WNGzciEomgpKSEY/cStXjaIhqNlWejTaW4MRGvResc/T0SicBkMsFoNPLJqOgxRvpi1apVcDqdbBSi8mEk7quqqrBo0SKcOHECGo0GZ86cAQDeZLa0tKC9vZ0/T/xPG2adTodQKIT6+nr09PQwTug+TSYTMjMzOWs75QiRZZk52el0xtUYooExnsZ45plncOzYMQBIqDEkSWJtEU9jaDQajIyMoKenh2OKRY0hJslKpDHEcZuvxoh3iDOfJmL4SjVGMBhERUUFQqEQl+65Eo0BxDilr68vocZQPye1xiBjBa0bao1BOsDj8bDBUa0xurq6kJKSwmu6WmNQ2AV5HMTTGK2trejp6YHT6YyrMRwOB2Ng//79wP82F9/HH3+cLT50EkQWH+CSCBJ92jUaDVatWqUQ1hMTE7jpppu4pAxZyMjKSS4moVAIubm5AGKiIBQKwWw2o6urC/v370d7ezuX8nC5XOjv72exQP0h8hkdHYXdbsfw8DCTMjWylJDrk3oC0IJJ/aD7n08TRcVcJjC5k0rSpex4VKOJXlPfj9jEEzq1u82KFSsYxKJIF0mU3NhogdBqtQrRQIuOWLuMXqcxJRcRcskBgOPHj8PhcGBqago6nQ4TExNobW2F1WrF1NQUJyEhayZZO8mXX8xwPDAwgF27duHll19GQ0MDT8Zjx47hqaeegiRJuOOOO1BWVgYA+MpXvqKwGooLNeGZFlpqS5YsQXt7O/9OIufGG2/k18STWrLshsNh5Ofnc4IYiquhOJ2jR49ieHgYKSkpCAaDnBqe+kLX0Gg0cDqdGBoaYldhNQaJAEdHR+O6NdJ9qa2AV9oSLT6JFg5ZljnZDxFnIBDA0aNHIUkSxsfHsXTp0oR9SWTJlWWZs3LSeIlCSW29pWtotbFEAwCQkZHBwiQSiSgWEzqJEnE3NTWFsbExXtTz8vJgNpvR09MDSZIwPDyMhoYGjI2NwWg0ckbn1NRU7o9YF402Di6XC3v37sV7772Hp59+GiMjI+jo6MC+ffvw61//Gu3t7bj++uuxfv16FBUVKZ6DyMuEm4XwstPpnJWXjUbjgnmZxuizzsuJxAzxMp1EJuLlRHNC5GXCQiJepmeWiJfpOSyElymhRyJeJtfu2XiZxG8iXu7q6orLy8AlDJO4WgiGs7OzZ9UWZFiYDcOnTp2aFcNiaZpPC8NX6skSD8OzGTEpv8ds2uLaa6/9TGgLMoYkwrDb7V6wtiCvhUQY3r59+6zaQhyvH/3oR4rs8ZOTk7wGAGBciqes9PzUhqibbroJAHhdpzA2iifVamOJ0gwGA7xeL5fdSUlJwalTp3ijHQ6H4fV6WUfodLqEGoPwTFxNjQzoifAshtvQfSxEY8zVM0GWZSxbtkzx+enpaZw6dQoGgwGhUAiLFi1SlHKb7TvFbNEAsHr1al7/xO8UDVNkZKIxobrn5KFAz4bKyciyjIaGBhiNRrjdbj7YCIVCGBoaYuNeS0sLamtrWSc2NzdzVuLc3FxUV1cjKysLGo2G6wDTJjc5OZn5cWxsDKdOncK7776Lp59+GseOHUNHRweOHj2Kw4cP47333oNer8e6deuubKwu9wZJklIlSTouSVKjJEnnJEn66cXXyyRJOiZJUpckSW9KkpR88fWUi793Xfx76Vw60tXVxTes0Wg4toisWBqNBl/96lcV4liWZfzTP/0Tg5UIR5ZlLF++nHf4ABQnpzQxaNECYq4JV111FUpLS9ltaHp6GsPDw3C5XCgsLMTY2BgGBwdZCNOEunjfAGITnbKjEQkODAygt7cX586dw/79++Hz+fgzojVlPk092ebaRMs33QdNEqfTCa1Wi0AggOzsbIWAF79HPK5Xfz+5/InESfcpLmzqBc/v9/NCRpORXMlog9bT08PWZnJlDIfDcLvdCAaDMBgMWLFiBYLBIFunFi9ejA8++IAnIiVi0Wg0igB16o+YDOTBBx/Eww8/jL179+LIkSPIzc3lgsYffvghbr/9djzyyCMAgMHBQaSmpiIpKYmzwKampjIu9Xo9tm/frhjzw4cP8yaFniU982XLljHWCJMieVEMFG0eqqurUVVVBVmWOVNhS0sLhoaGUFhYiEgkgr6+Po6NEYUREBMGLpcLAwMDmJycZMuyz+dDe3s7enp68Mknn6Cvr4/vIVGZjytp88EwEXNGRsYMPB07dgySFMvGSHXv4m0waNypD2KTZZmFLLng0PXpZ3HDSlxCwohcvWiukTDWaDSoq6tjsifrKbmdkYFg586dcDgcbLm+cOECuru70d3dzfFx1A86aQEuZVolnktJScHmzZvx6KOP4v7778f27duRnZ2N5cuXo6qqCosXL8ZXv/pV3H777fjxj3+sGEuRl5OTkxfMy3fcccesvAxgwbxMbvA0voCSl2nMPk1eJjGxEF6mMfyf5GXqy0J42eVyzcrLra2tC+ZlAHF5Gfj7aguDwTCrthANvokwTCev89UWVH91NgzT2H+WtcXKlSsXjGG6v4VoC8oPkAjDtEldiLYQ14l4GF63bt2s2sLj8fD9qzmZvFEWwsmi0TweJ4vGkXicPDIyAkmSZuVkaok4ORqN4vz58wk5mQzz822i2/eVNDUnT01NQa/XJ+RkagvhZOKARJwsSRIbSRJxvuUimQAAIABJREFU8sTExGU52e12J+RkyoMyGyeT224iTi4qKoJWq53ByXNtcxmtEIDNsiyvBLAKwM2SJK0D8BSA52VZrgAwCeAbF9//DQCTF19//uL7LtsGBwcVlkNyxRgcHGS/byqtwJ3XaDi5Dnf2YjkOWZZRXV3N1yTrkCjuAXCgN7lAZmVlIRqNoqqqio/SA4EAent70d/fj8zMTITDYQwNDXFReZrYdO1IJMLlIgYHB+FyueDxeNjafObMGZw/f54Hdb4T70onmzougyyHBG6KT9qxYwcAwOFw4JZbbpkh4GlRme37RSs1Wa5FC54o8Okf/T0SiaCgoICFPi0iFNd2/vx53nTQpCYxR+nX//SnP+Hmm29mIdHQ0IDm5mYMDQ1xyQraIKg3LmTlpP/feOMNPP/88zAYDLjrrrvQ39+P8+fPo7i4GM899xw++ugjvPDCC/zMCA/k0jAwMACPxwOtVoutW7eyuwU1ik0SG1mmSktLGavkHkP9pc0jWWsp7qCwsJATg5WVlfGcGBwcRGdnJ5dRCgQCvNkR+0PjEggE0NfXh4GBAYyOjsLtdnMcwvnz53HkyBHFwjUfMSQKlLlscsXvIrzQBpUWsaSkJIyNjfFnpqamuEaXiFu1QI/Xli5dynwEzMwQKIogyjhNz44WEuprKBTi5DLRaBRms5lPwmiMJCl26jsxMQGtVouf/vSnnEDJYDAwHzmdTo6PI6MFxSaK/QNic/H48ePYvn07fvvb3+LWW29FOBzGyMgIAGDDhg04cOAAfv3rX+Oxxx5TjMOBAwd4DpERyGg0YnR0FD6fD7Is49y5c5yEh54JnVZQE+PrbrnlFv7b8PAwL6wZGRmMe7K4j4yMcBIsq9WKgoIC5OfnY3h4GLIcKwc0NDTESd3ITTReTA/x8uDgICevcbvdCAQC8Pv9aGhowJkzZzhuUEyudSVNzXOXayKmiRcNBsOMEltPPfUUn6p9/etfj2tYmQsvl5eXQ6PRsCGE+FO9GSQcEc94vV4UFhYqNq4+n49P/Lu6urgsUmpqKgsor9eLwcFBTE5OYteuXXjssceg0+kwNDSEhoYGNDY2sgvYkiVLIMuXkriJoR20Toq8/POf/xzBYBBZWVkIh2P1Cg0GA5544gnKFsnagrTAQrQFeS0k0hZ+v/+y2qKoqGhWbTE5OTmrtpiYmListqDSVv+T2oKyzyfSFsTJ4vddqbYIBAIL1hZ06p1IW9D9LERbUN8SaYs9e/bMqi3EuHNRK1MYAGXijYdnSZKY58QmamU6iYunlcXNayKt7PF4oNfrE2plWmsSaWXK7eF0OhNq5dlK8Fyu0TjG8/6K1xJpZQolIYOLWivTdy1EKxPn0aZWjWe6LpUziqeVyVCRSCt7vV44HA4kJyfHxfPIyAifqs+mlUXjglorr1ixAsPDwxgYGFDg+UraZVlIjjXvxV+TLv6TAWwG8JeLr78G4I6LP3/x4u+4+PcbpTms8jRZSKD867/+K2RZRk5ODnQ6Hfr7+3HvvfcqBmlqagoZGRkAlEKXNl0+n48nGrlLATFLBQ06uTSIp0CyLOOTTz5ht46CggIWmwaDATabDT6fj0URxVaphSE1q9XKabnD4TCOHTuGDz/8EC+88AKeffZZ/PznP7/c45nR5rKASFL8MgT0mlZ7qewFvT81NRXnzp1jiwuVoFFfdy5t2bJlkOVYwgv6DJ0GiaeEANgNB4gVQKZnSRNvenqaY/UikQgnUCH3GdGCPTY2BovFgvz8fF7sSWhRwiGTyaSwAonfT0QkYkSWZUxMTOCxi6U38vLycO7cOezbtw9dXV1M/gaDge/LZDLhRz/6EZfO8Hg8OHnyJJ599ll+jkQqohim/hGGgRhZkYsFncaRBZnIRzypo2d68uRJthBbrVbOet3b2wuj0YhwOFbex+v1ore3F4FAQLHYAzGspaSkoKioCEuWLEFhYSGam5tx5MgRvPzyy3j++efx7LPPoqmpaU64EO9dxFIiXIlYj0f4IsYnJiYYY2TtGxsbw9atWxNaN2dr5KpHFnaxH+oNqLhZdbvdkKRYTTrqnyRJXBaH+Ij4h/oMxPjJ5XLB5/Ph5ZdfRlVVFYLBIAKBABsJZFnmpCyEIcIE3TcJZBHHWq0Wu3fvxuOPP85id//+/Th58iQXbadMxNQMBgMnSqioqMA3v/lN5OfnIzU1FePj4zh16pTCfU6j0czA9MTEBJ9gifUIReOC6HZK8cMpKSmYnJxkHuns7ERXVxdvgChxRSQSSw5WVVWF1NRUFnF+v5+zIwPKxT41NRWlpaVYvHgxVqxYAavVivr6ejz33HN47rnnsH37djz77LNz5lpa/NWvx2vixkuM66NWUlLCr/X29nJcD13T5/OxgYLaXDFNSa+IT0RjixrT4kkw8XJpaSmvlySGCNNut5sxrdFo2DU3NTUVTqcTExMTeO211/Dwww9zxkm9Xs9J3sQYNfIAEDMPi7GGdFogyzJ+8pOf4Mknn8Tvf/97dHZ2Yu/evdi1axeAS9qCnvlCtAVhOJG2oP7Mpi0cDses2oKMbAvRFs8888z/uLagxDv0frW2oHVbfd25NNIWxHXA/LVFOByeVVvQWCxEWxD2EmkLSZJm1RYnTpzge+/q6lK4JH/ta19DcXExc8bExAS+/vWvK07hqDSJ+IzFuFNRO1MOAMIk/UwcTTGndI3u7m4OT0lKSmJjZCgUQm9vr8I1l/LD0AaK5gVdr6CgAIWFhSgvL4fNZsPp06exY8cOPP/888zLzc3Nc8IIXXeuHCnimfol4nnp0qWMM50uVr+c8ixEo1FYLJYZbtVzxfOGDRt4HafvJ0MpjSNhmzzjJCmWMEmj0SArK4vfT9n1KRSEcDk1NQW/38+c7PP5MDg4iJSUFDQ1NWHNmjXo6+vjdZ1ioysqKhReenT/dH9kSKMxJgx++9vfxiuvvILs7Gx0dnbio48+uiKNCMwxBlWSJK0kSWcAjAL4G4BuAE5Zlmk0BgAUXPy5AED/xYccBuACYL3cd/T398NisTDR79mzBxUVFVwEWMwkCYAHanx8XPE6nWJEo1FkZWUparVRhkECFxFDWloaRkZGOI6ByoNQnaLh4WG2jAWDQRw5cgRmsxmrVq1CaWkpgyEajWXTvHDhgsK/XiSiyspKfOlLX8K2bduwZcsWZGZmIhKJ4IUXXsDJkyfnMhxztgSRxZACy+m5ia2qqopj4U6fPs0+9d3d3QCAgYEB/Nu//dsVWVRpQlHpB6PRCK/Xy30SAU19onFJTk7mRZ6EEJ2OhkIh+Hw+TE1N8caNLMTk+hcOh5Gamgq73Y6dO3fixRdf5OxzRqMRXV1dXO9WfB5kwSKhJS50okWRFp0nnngCb731Furq6vDxxx9z0e709HQW6Bs2bMDTTz/Np1sUB5Cdna04oRA3MCR+MjIyEI1GOT28JF1y8aVMhjS+dCJKgez0utPpREZGBqxWK6LRKHp6ejhwXa/Xo7m5GZ/73OdQUlICp9PJsSU+nw+9vb3w+Xy8wNHY0T3efvvtuPvuu/GFL3yByyTs2bNnzhgWx38u7xUTMKmxSPMhHA7DbrcjKSkJfr8fZ8+e5Y3NkiVL5rxYAJcwHI3GUvMnJSUpLNDkqqW+BzL6EAcZjUZIksRuvNFolHHqdDr5O+g1Os1LS0vD5OQkwuEwtm3bhpGRETgcDs5qC4AzP9MzIFFF1mfxZEw04NDz+uUvf4k//OEPOHToEOrr63H06FE0NDRgaGhIcU+U3VSSJAwMDGDPnj28QV68eDG7AAFKfhbjyLKysnjzkp2dzZimjTwt8EajkTmio6MDfr+f5wKJebp+WVkZWltbkZycjJGREYRCIXR2dsLr9WLlypXQaGJJpYibx8bG0N/fD5/Px5xChgf6/jvuuAPbtm3DnXfeierqakxPT+Oll17idSQRhsSTh8s1mlN0skQbQ/V76PnRfUejUfz0pz+FJEkYGhrCN7/5TcX6drlGYicajWLt2rW80RW/Tz0vSXzTaSbNL0pKpNPFSlB4PB74/X6cPn2ahSxxUTQa5QQcwWAQLpcLu3fvxle/+lUMDw8zXl0uFzIzM7FkyRL+bhG31Oi5ieKTuNvtduPtt9/Gm2++id/97ncAwJm8CVcrVqyA2+1GcnIykpOT8bWvfY1jI+k5ud1uLoWiFrmyLCuyv0YiETYW0IaH+Ig8KSiZWTgchs/ng8ViQUpKCgYGBuByudj1rr6+HrIsY82aNbBarTh16hQbEgcGBhQ6hfpEmFuyZAm+9KUv4Y477sDWrVuZw1588cUZ+EqEy7k2um9R1Ks/TwJXlmV4PB7odLHauXRy6nA4sGHDhjl/J3Bpw5CTkwNJkhTl12h81ZxMz4qSykiShMLCQo5TBaAIC0hLS2PcUQuFQoy50dFRvP3229i+fTucTie7DlPiK5PJxM+DMKLRaBTagua9mpPD4TB+9rOfYceOHTh27Bj27duHkydPzsi8nZmZOQPPap1B/Sc8q3UGrWcinkWdQdwo4lmtM9R4VusMep+IZ7XOUONZrTPUeP60dIaI53g6Q41ntc6YD55FnQEgLp7VOkPEs1pnUJ8Jz2qdQX8X8azWGWSoFvGs1hnx8KzmaxHPos64kjanXYcsyxFZllcBKARwDYDKK/qWOE2SpG9JktQgSVIDvUYTRKfT4fz58zh9+jSAGCndc889MxalSCQCs9msABJNZJqUonAS3ZhEdyGtVsuB7fRecfNALoIEyOnpaeTl5aGjo4M3qzfccANKS0u5xhcRALmbEXGKR/VpaWm4/vrrcffdd+O2225Dd3c30tLSuP/xNoX0ejzrpdhoESciVE9+apFIBJmZmdDpdMjJyWFQ7dixg91yKJHA5RY5EkBiP8nqRAKA+iH69VN/xc9RLVKxnAVZqoPBICeioXuixSYQCHDSAFmWcfLkSWzbtg1vvPEGjh49ylakpKQkWCwW/k4SrNTEZDbiM6dnSacFZ86cwa5du/DWW28xnsh40tfXh8bGRsbYjTfeiJKSEj4RAICOjg7U1tYqxrq1tRUZGRls3SeBHQqFoNfr+ZSLNiqhUAjDw8MoKyvja1PiDZ1Oh4KCAsiyzLEKGo0Gk5OTWLFiBQ4cOIBDhw5Bo9Fg48aNyMvLg9/vR0FBAfR6PSYnJzEwMJDQ3SQpKQnXXXcd7rnnHmzcuJEzM4rPTY2RRBtNdZsrhqmJmJJlWVFTV4yTmK2J7xf7Sn8TRSG9V41hEUMidgk3ROK0UaPMe2QVJYwaDAa43W5MTEzgwoULeP/993H27Fk+mXG5XAqruHif4qKn3jRSIww3Nzfj448/xnvvvYcDBw7g97//veI+6P5SU1ORkZGBQ4cOMX9WV1fD4/Gw5ws1ckGj5nQ6mavJCAlcqstJC7HIy+SSRJxNm2T1Zkqv18Nms0Gr1bIBihKCnTt3Dps2bcKiRYvY+JaWloZgMMiFytUWYHp2paWluPvuu3HLLbegtbVVYeQTxbho3U4k8kUOmauxz2q9ZNelkxwy4lEZiY0bNypwOdt3032JnjNkXRffS/ihewMuzQv6neodih4uJGqJe0mw0FhNTk6ym1hzczMqKyvx+uuv48iRIxgYGGDjY2FhIY8F9Z1+By5lrBbHg/iJ1q+JiQlMTEwAgOIESKvV4pFHHlHM2+npaWRlZSnWJ4fDwSdMABhnwKUkjvR6OBzm8AoSZqLhKTk5GRaLZcZ8JG5LTU1lcZ+SkoK8vDzU1dWhsbERy5YtQ3V1NQoKCpCamsragpIr0ZpF30nfm5aWhltuuQV33XUXrrvuOjz//POX5eW5blBFXqbf4zVxnC5cuMBeRCMjI7xZrK2tVZzIJGqEP8KvuHkUDwPo76JHi4hp0nPEV/ReKuNGG1jSoQBYW3g8HsZ3KBTC5OQkOjs78eGHH6Krq4vjVMm7g76PuE3kZTJQic+T7mF6ehojIyP45JNPsGPHDvzxj39U5KgALmnUpKQkfO9732NNK0kSNmzYwBqErmkwGGYk8SGtSVqU+uP1ehV5L8jTQqPRoKuriw18APiUPBKJYGxsDMXFxTAajdDpdBgdHUVKSgqGh4dx4sQJeDwerF27FtFoFPn5+aisrER2djZ7uPh8PnR3d2NyclIxp6mvt956K774xS/itttug9VqRVdXl0JLiLqdXlPzrYhxERvx8BxPK+fk5PD8D4VCSE1NxV/+8hekpqYiHA5j5cqVPLazzScRo9TX9evXIxq95EJN656ae9V9Hh0dhVar5RJJVIViYmICPp8P586d47h30dPK6XTC5XKx1n3iiSfw29/+Fi+99BIOHjzIGfdlWcbGjRsVOofmlmjAoDkj8ij93e12o7GxER9++GHCZxKvXVGggSzLTgD7AFwLIF2SJGKVQgCDF38eBFB0sXM6ABYADtWlIMvyy7Is18qyXEuveb1ehVsYEQcJdHUqZrJyUhMHFQC7kBDQKWU4CTx6Hy0wABTER9cU3TM0mlj5idzcXGRlZXFRcEmS8O6778JkMmFiYgLFxcUALiWQ8fl86Ovr48B0Im9xYbnqqqvQ2NiItra2GS5z4qIi3qM4sej9l9u8qhvFH2RmZkKr1XJJFrrm1NQUSktLGZTqJhKEKCQAcAYwumex0UQWJ6oobKhv4saCxpRiOomUp6eneRPs9/sxMTGB6elpHDp0CCdOnMDo6Ch6enpw6NAhvP322zh69CgmJiYUFjZxMyQ+ZxofUZSILg50cg5cwjD1jcgsNTUVo6OjXLCYmjpbHS2Q6lMM6qvT6VRYXqlPZBET5w81cfEnHIkYps+6XC58/PHHHAtYXFwMr9eL9PR0+Hw+TE5O8umauNGifpjNZnzhC19AR0cHurq6FHGIalGkXjTExWE+GCbxSK6o4XAYb7/9Nm80SSwmOuGi76TnLvaPMKx+jtTERVBcIMmIYrFYGMOitZqw6/F4GMO0caDnnZKSgqGhIZSXl2N0dBRNTU3Yt28f3nvvPfT19QFQLqT03Og+RZFGGCZMi9ZbmlP9/f0zng0tUlNTU2z8oeufPXuWF2exqTfDeXl5sNvtPFbkJUGllSgmjdyUJCnmvpSZmcneF/Q3MQENXSccDqOwsBBbt27FVVddhauvvhput5tPDD/44AMEg0GUlZWhrKwMixYt4s1qKBTCxMQE7Ha7gptpPGmOtbe3o62tDT09PXENGOr7F3FMz3mumwAAsNls7C5IMclerxdPPPEEx3ZVVlbypk99bdGgq95wAsD69evZuEd/E/lO/XnCzPDwMKLRKAoLCxV48/l8PB6ioKfyHoSl8fFxGAwGnDlzBvfeey8cDgeamprw0Ucf4Q9/+AN27drFz1d8nur1j9YNcU7TZkdch8jlbaG8nJGRMSsvi4aO+fLyddddt2BeFj1+4vGyuFH5LPOyOIfi8fKKFSsuy8vivcTjZcL0Qnh5YGBgVl4WN6PxeJk8OWbj5dbWVsWzIbd6cqWnMaUDB/osNafTOcPbwufz8fOiGusAFNqGvoOMAAaDgT1qaDzVhj26Do3vzTffjNraWvj9fkQiEeTl5WFqago7d+5EUlIStmzZgoqKCkSjUdhsNlgsFgQCAdjtdrhcLsYxnegSJ0ejUeZkSrYkjrW6qT0Jqb9X0ognUlJS0N3dzTH2VN7P6/Vi3bp1CeeK6O4t8hk18o6jTLyiFhQPScT70Ol0XBpJ1LA0duIml4yRRqMRWm0sKSNlX9bpdHjllVewePFiOBwOnDlzBu+88w5+97vfYffu3Yq9Cml+ajTW4j96BurESlfS5pLFN0uSpPSLP+sBbAHQithG9UsX3/Y1AO9d/Pn9i7/j4t/3ynPsFWWXIrEvy7Eg8JSUFDgcDnbzpTY+Ps5xIsAlt19qdJpKBECDQ4CnB0aW+z179rAYuni/MxZw4FLyg6ysLNx5553QaGL1sGw2GweL79q1C7IcK14eDAZRX1/PZSfcbrei5Ie4IQJixDo4OIjW1la0trZicHAw7oQTLbmJLKOJmng9mkiUZIImMgkAh8OBm266SbERFReP2ayvdI9JSUl8cqI+hROJha6n0Wjg9XoVG3SaeGSRURM8WY5p7MmK7vf7UVRUBJvNxtal1tZWhaWbriFawcn1TMSLSAD0s4g5wrBWq2XxotFo+KRJbWQhoqfmcrn4eiIR0XdSsgSRvOj9fX19OHz4sIIE1As7jRVhuKamBmvWrMHU1BSamppgs9kwNTWF/v5+7Nq1C1VVVaiurobBYEBnZyeys7PZwksYVk9vWkAIw+3t7QoXI7HFO62ZK4bFlpeXB+CSsYXqi1H/pqenFVl1CWtE3LN9p4hhsoyLeI+HYfG+yNWd/lHcnvhMxPgneo/X64XH48Grr74KjUaDoqIiWK1WSFKsJm5jY6Oij+I8oHumfwAUIkjcjJCon56eVmSLpEZunBpNLK6aMkBPTk6iqqpKYVGnZ6AWSKIxUdx80NwS474ITzROo6OjnPVYHTohPmfRpS0ajaK8vFxRggiIbWhPnDiBXbt2seD/6KOPMDIygqysLI6l6uvrU4gueqZkNKWyJq2trey+GA8zgDKO6XJLoXoOEJ7T09N5rpNHDuGgsrKSPyPyhXideE3cyFHiD7EP6n6JxlAKpRDX3HA4VsuXngfxKJUZoazmhO0//elP8Hg8WL16NdauXYvly5cr6tvS94pzR5Ikdl2bnp5mHqN/5FpMvwNAW1sbG4kJuxQyEgwGYbPZ2BuFGpXtomYwGDjeSzxtN5lMHB9MRnQ6sdXpdMjPz2e3eLfbjfT0dOb8gYEBxdrh9XrR1NQEWZZRU1ODTZs2IS0tDX19fcjIyIDL5YLdbsfBgwdhtVpRU1ODcDiMEydOwGQyIRyOleg4f/48u86L40cbmO7ubrS2tqKtrS2htlALY7Ugna2JY0ehFXl5eZiYmIDZbIbdbmd8DA4O4sEHH1TwsngaNtt3ms1mVFdXIzk5mZNTqbUFzTmR70lb2Gw2djvW6XRcs/fAgQN8akqhF+SaTNidnJzEn//8Z643arPZ+IS4sbFRwUk03sRthF3RqKc21pPeSUpKYr0jNppX9HxoHrjdbjgcjhmHCRTbLLbs7Gx287VarTw/+/v7IcsxN1aDwcBJciKRWAUAvV6Pvr4+7N27l0MyaF6LY0beEDQPS0tLYTQaeeOv0+l483306FF4PB7s27cPGo0GH3/8MZxOJwwGA3Py6Ogoj6t6o+9yudDW1obW1lZ0dHTE5WRaO8Q1Yy4GF5GTabMXjUZRVlaGaDSKpKQk/OIXv4BGo4HH40FtbS0/a1FbEBZFw4W61dTUALjkcSjqNrWWIyxrtVo+uCkrK4NGo2GdSSFDExMT0OliJfho46rX63kPQZ6hHR0dWLp0KVavXo1rrrkGpaWlzFnisxB5WfSeoTWK8E58LB7IXUmbC+PkAdgnSVITgBMA/ibL8k4APwDwiCRJXYjFmP7m4vt/A8B68fVHADw6185IUsylgMAuyzK7C4yNjcFms8U6fZHARMFPE8hgMABQlr6gh0TvA5QuaKLFnKzDtLiKacJFoSr2WZIkrFixAuvXr0dNTQ1kOZaAwWAw4MSJE9Dr9cjMzERdXR127NiBrq4u5OTkMDD6+/vZYiKCl/rkdrvR0tKCs2fPoq+vT0HgorXrSqwTIsDo+wDwguV2uzkwPxqNstsWoKxRNpdGz4QSCFGLF8MHXLIUUbbbwsJCxX2S27HaCiZu9EmshEIheDweZGRkwGQy4aabbsINN9yA6667DjU1NQrXT61WyyQtCi+6PllXaTLSRKcMgPTder0eFosFXq+XY4Apy6Qoouk+xQ0+kTJwya0PAIsgMRW5eMpDk59ingjDIiHT+8RNLXDJqnfdddfh2muvhdFohMvlQk5ODvr7+9Hc3AxZjiWI2LFjB3bv3s0Yp1hXu92umEeEZfp9YGAAZ8+exblz53hjpsbRlW5MReFqNpv5evQMdDodW/DHx8e5rqx6kzaXVlNTwxtvcc6I1xD/p/uiBD8ihgmb4nMQMUa4IJyRZTYjIwOLFi3C7bffjuuuuw5XX301uwmTwKNnQGMcD8N0TfpfrGepPgkFgJ6eHmRkZMDj8XCpBSAmcE6fPs2cLRq1SFzRveTn5/PJAhmJgEuue1TSSDQYhMNhZGVl8cLe3t6O9PR07jdtGMiyHggE0NLSwv02Go0wGo3Q6/XYunUrVq1aBZfLBZfLxZZqnU6HqqoqDA0NYceOHfjwww8VhiBKv0/ZlgnXNMZALH9CY2MjmpqacOHCBcZmIqOd2MSNl3ojQBxE39fa2oqUlBR4PB68/fbbkOVY0pGbb75Z4f4813lEBlRZljkng4hBGlNRoNH6R+6zFRUVCu8At9uNI0eOsPAn3FGsPP3s9Xo534AkxeKgKisrsXbtWqxcuRKbN29mvlR7a4ineTSGamu+2lgrSRLH7c2mLei9ibQFfV8ibUFCcTZtQTw8X21Ba+ls2sJoNM6qLagl0hai19J8tYVoSAJmagsa24VoC4vFsmBtIfY7nrYQx3O+2oI2DYm0BYBZtUU8DxU6QSduAmKcLIYG0P1SORTxEMDpdDLPkbcDAN500uZYvHdRW4h4ovfRnBQNemIjTKWmpmLr1q0oKiripGEajQb33XcfIpEISkpK0NXVhR07duDChQsYHx9HdnY2e/H09/cr8CVyMsVrNzU1obm5WZE4Ue3Fp25qXRyPk6lWOPEvzWN6j1gHma45l7WA3puRkQGNRsP5R0QuVl9P3DRTnD+FJpAxbWpqik9IafzEa9H7iLvcbjf0ej2sVivKy8tRXV2NVatWoba2VuFFSHo4OTlZ4dpL2BH7OG+Nd7k3yLLcJMvyalmWa2RZrpZl+f+7+HqPLMvXyLJcIcvyl2VZDl18PXjx94qLf++Za2fIapCZmcmZKjUaDSwWC8c50YPZXgcSAAAgAElEQVSQ5UtuNDTxgsEgC1KRVGnyiGRAnyVrFhERiVDh/hn0asKlJoKPAFNZWclp4JOTk1FeXo6lS5dCr9eju7sboVAITU1NmJqaYj/+jo6OGbEGotWFBMCZM2e4OPJcFg9xwU50WkSLMQVby7LMp3F0f2Lw9pVYQshNTd1X8TRR7fYgbsJFAQyAyVucqGoCoHslAW42m2E2m/kEgtynKKMZNdpQipZ6WmQoYZd4wiw+V+AShunkhvoi3oM4mcmCRRuVQCDAGBaTGFBcgmjxU1vSxPi9eEQsWrASYTgajaUVLy4uRmVlJex2O7xeL2RZxoYNG1BSUsKbA8Kw0WiEzWbjhGLqsRDHKRqNcmwuJfy43KJB9ypiIx6GCZcjIyN8IkgbxFAohMLCQgVe4m3GEjVKfCEKL7UQIwyLJ2aEMzWGI5EIAoHADEEqPgsSCGR4MJvNyMjI4JMSvV6PoqIixYkT9VE86SJRTRgWN4GixwK9Hq/5/X6kpKQgLS2NXbVoY0OCjhY+erbEp0AMy2KckyhuiBtE7wX6G/EhxcCrLbn0naL7kdjUG0CNRoMtW7bg2muv5aLmubm5WL9+PcrKyqDVamEwGHDs2DEcPXoU09PTKCgogMPhUCS/EwUa9UeSJD4F6+joYA+QRE20is/2PsKe6GIquvyRe5/Yn3hrQqJnQzgT+Up9b6LRhcaXBCm9lwQgGXGI20iciycYND/D4VjyuoKCAvj9frjdblgsFjgcDhbaNLaiFV7caFI/UlJS2ICn0+kUpTnS0tL4FH82bUGbz/lqC9Gr6dPSFlu2bLmstti9e/es2kIcu3jagrD7aWoLkTvnqy3E5zhfbSH+LZ62iHfPf29tQdybSFuoebm0tJTHSNxk0L2LfBqJRNhlXVy7gsEgfx+FngGX6stSP8X1RYwFFnlavXGlz4ube3qviBONJuaV84UvfIGz4mo0Gtx4442orq5Gfn4+J9erq6vD2NgY0tLSmJMpbETNx8ClzT+Vs6JarXT/8RoZIERMqBt56QHg8jlerxc9PbGtjtPpxOrVqxWfT4TpeN9RVlYGQBmuoF5z1BqEmizL7GUFgE9XxZhoGgPiJOInGkMyeoiJIU0mE19DNBjSvRGviYbEpKQkBa9SrP2VtCv3pfsUW2FhIVspqfwAHe9nZGSw6y8AXpBEUFJ9PrVlNT09nTMO0mIg1uCiE4GUlBTOQEYTKxAIcKyCJEkYHh5W1CyMRmNupRTXAsT81E0mE9atW4cbbrgBO3bsgCzLsNlsuPrqq3HnnXfCbDaz6++OHTtw8uRJThU9NTWFwcFBXkhFsNGC6fP50NzcjLNnz6KlpUUx+WZriRac6upqSNIlVxyTycSp/2U55vbxz//8z/z+RJNXvYmgRXXjxo0cvyAKYXHiiacvlAxkaGgI0WgUxcXFLDw0mpgLpJidkdy96HVyZ9DpdIypxYsXIxAIoKioCGazGbm5uVi9ejWfApD1ksiXJj8tWhRfRRMzKSkJqampMJlMjKFFixbBbDYrrFWyLDP+xDGglPSiyAuFQpzwhTaGkiTx9YFLxhXqb1pamsLSS8+DvkscE1qI1Is2uaXRa/LFE4atW7eivLwcSUlJcLvdWLp0KW644Qbk5eVx9tQPP/yQ3eOzs7MRDofhcDjQ398/Y4Gi8ZPlWPxhS0sLOjo6FAaXRJiaDXcicVMcr1injASxKBBnc+9Rb2pkOZa1NiMjg0991AuDOFfJgkgLgWic0Ol0nJFakiROsBIOhznOkk7MaeNAMZparRZmsxmFhYUwm80wGAyoqamB0WhkjIqnTtQfss6Lp/g0x/R6PUwmEyffideWL1+OiooKeL1ePq2enp7G4OAgF4onHIvzlJrFYlHU+yUeo+8lLtVoNGzk0Wq1vDkzGo3s6kicQZsZmqtjY2MIBAIKS65er+eFVZZlDgnRaGJJwd5//31MT0/DbrejqKgIW7ZsgV6vx5o1a6DRaPDRRx9h9+7dfEJDroCDg4Nca1c9bzQaDZ+8trS0oLGxkeu90vvFTd/lWnl5OYDYBrWvr4+NAiQeBgYG8N3vfjeuhV1sopgVx2b9+vXs9SG6BosiSDSqkdigGrVLly5VeIJMTU1xiSD6XTydp3hEMgZotVpkZWUhLS2NubmgoACVlZVYsWIFli9fzqdK6hMdEeuiO7vIgUCMl00mE8e3AZeSntB8pjks8ja1yclJ5mUSc+Lc1ul0CgMLfVYsp0E6gTBCmwoSoeKppfrkhl6jZErr1q3DzTffzFmAS0tLsXr1amzZsgXJyckwmUzYt28fdu7cia6uLhQVFXG8XH9//4xQJtIWGk0si3xnZyeam5vR1tYW1+0/XkukLcgwmJeXx8bW0dFRvjefz4eysjKFoSNeUz8L+n3x4sWcuEg0zlGf6NkCl1wu6cRJlmUUFxcr3GvptF8s7ZSSkoJQKMSniz6fjzGXlpaGZcuWITk5Gbm5ucjMzER2djaWL1/O8ZY07qQv6JmL7r5iX0Vepth8sdXV1WH58uWwWCyMPZoPnZ2dAJQuoHl5eYxzatnZ2exuS+7OGo0GY2NjkCSJ8w3QPI5Go/B6vdDr9WysHx0dhdlsZkPU0NAQrwN0ekf90Whi2dxJy9D3ynLsBHbZsmXIzc1Fd3c36uvrkZycjMWLF+Paa69FRUUFMjMzceLECezcuRONjY1ISUmByWTiflFpMUCZLIvGfWxsDO3t7WhqakJjYyPXgaa+iDw+WxO5rry8HJIUCyn6/e9/z89p8eLFWL169QzjtdjUm2X6XoPBgPT0dJhMJt5riH8XeZz6I45dUlIS0tPTGTPhcBgtLS1ISUlBcnIy45YMKGR4Jj1iNBpRVFQEr9eLgoIC2Gw2ZGZmIicnB9dccw1qampQWFio0BOEXeJi0i70L9GhyeXa5XOP/z9ser2eM46Oj48rXEAcDgeWLVumuEmyAImZ7MQsuGI65Hh+0Hq9XhGXRYRAA0hH5nl5eQrrlEjERGqiBU/c4AAxAr7tttuwZ88emM1mLFmyBPn5+bjmmmtgNBrR2NjI5Or1ejkTHIkrh8MBjUbD4l+0ggAx8Tk6OgqHw4Hp6WlYrVYUFxdf1vopNvE52O12pKenw+v14uDBg1i/fj0Lk3jWSiDxBKf3kkCRJIljktTvFxd68XcS7pS8iVwLRNcy0epF8RoU7xCJRJCfn49AIMAxlmR0IGNCTk4O++ePj49jcnKS/fJF67Z433TyKRYwJndGSixAmHO73TNiqB0OxwzXMlmWmcDdbjdvOPR6PVtMiQDIOkVlCP5/9t48uu3ySh9/tFiSJcuSd9nyvsXBcRIcB7IQSCCQhKUUKGtDIC1Lh9JD2+mU0zMt8+30zOl0oIXTjRnoMGUr6cIaQhay4DghieM4TuLYsR3vuy3Lsi3Jm5bfH/o+N6+EaZk/OF9+c+ZzTk4SW8vn8773vctzn3uvRhMZ7cPzMDMzA6/Xi0WLFkWhWqrsLLSHquIMh8MYHx+HwWDAiRMnEA5H2AEOhwMGgwFbt27FsWPHMDU1hZGREZhMJqSkpMDn8wnNhdQx1omqF4PmQCCA1tZWqW1jUPzfuVJSUuByuZCRkSHP2tTUJE18AoEA7HY7pqamPoFoquc4dj2AyN4XFBSgs7MT09PT0vRFXTO+X6U3BYNBoScVFBSgo6NDdANphCqCScd6fn4eFotF0E+n0wm73S4BLYM4OlLJyclC1Z2cnMTo6KjIjwr+8N6IjtNZm52dhdVqRUlJyYJrS8pubm4uBgYGxHBarVZMTEx8olESHSaeFwYtXDMitNx73lcgEBmjwzoqXjzToVBkjEFcXByysrKidIuaMeP3xO4zX0sdGghE2uufPXsWgUAAycnJWLVqFQDg+uuvR319vdwbQa2WlhYkJCQgMTFRsvQ8e9xD0luZTRkdHYXb7cbc3BzS0tIWzHx/2qW+jrV209PTePfdd3HPPfeITWKWeSHZXiggVoMyAqO0PZR56gmCB2qmS6/Xw+/3w2QyweFwYGhoKAogIWuA38tawfHx8ShAxW63Y2xsDFlZWXIOgIhTR0CIHddJBxwbG4vK+qi0NfX+eOl0OjidToRClxraBYNBpKamwuPxRK3ZQus1Pj4u+ouyr2Yd6PiRussAiucgJSUFCQkJciaHh4cFJFeDblUvxwIfXBNVHmjfamtrkZubi5ycHKSlpeGKK67A+Pg4hoeHMT09DZPJhNTUVJw6dQoOhwM6nQ4DAwMi82x8ozq91BHd3d3yHIWFhUKr/az+RVJSEjo7OwFAHGOdTofDhw/jqquugsfjwcaNG/HKK698AhhQ90T9m+sTDoeRlZWF/v5+2Gy2qHEp6utjs0bcR8o+bTmZLUx+qAAIEOkRYTQakZCQIDrKbrfD5XLB4XBIvapWq4XD4YDNZkNOTg4CgcjIodHRUYyPjwt1N9bW8m8yG+bm5mAwGFBRUYEjR45ErYlOp4PD4ZBnphylpqZiZGTkEwBLrM33eDwoKioSPcX3U5/QNoVCIfT398s6FBQUiA4iMMikxpIlS4SaqpbDqPuogl/8GZ/b7/cLs+j48eMIhULIy8tDZWUlqqqqUFRUJN1pbTYbbDYb4uPjceLECZm/Sr8nKSlJssH8DupjAGhvb5dkVm5urvhOn0Uns7Zc1Zkqu8bv92PVqlVoaGj4BEgf62ss5HMVFRXh/PnzsFqtUfWdLLGhjKtJN/U7MjIyovq0cNY6cKkHBOWMiTD6G9w7TkGgHgMgz5mSkoLs7GzR5x6PR5op8j4WAmD/u0HqFyqDCkDqpjizlNF9bE2B6qirgaDaNImbyO69aoZpcnISg4ODGBgYkM8dGhrCwMAAgsFIC2mDwSADjtXsmSrwn3apBo8Oi06ng8/nQ0NDAzwejwydT0tLQ2lpqcwLzcnJwYoVK1BeXi51Mmzg4XK5MDo6GkXTY2MVrh9HR5w9exa9vb1RNQp/7WLArnZG/uijj8SxpFOpIjhEAmODG+4R/6ZxU/cF+CRSD1wyjnQw2MCCSCedwJmZmSj+Ow8c309njgGJSjWgESHtMSkpSWpJMjIykJeXh5ycHCxatAipqalISEgQJcbvNBqNMBqNsFgs4kAx00ZAhEamt7c3qi4BAEZGRiToUNeCyBflVnUSgUs0itHRUUEsgYiz0t3djXA4LHNNOQNVnY35t5BCtcYKiBg31fFqbW2V0TTHjh1DcnIysrOzsW7dOthsNszNzWH9+vVYvHgxCgsLpWNwMBjpSMhB3wywmTXk3s/PzwvS2dDQEJXJ/2sXHVwaJX52a2sr5ufnMTo6ihtuuCFKdmONQ+z/VVnm7D3WiMSe/1gniq/h+lNG1f3SarVRlCx+l06ni1L4dIj4bHwPn9Fmswlyb7VakZeXh+LiYpSUlCArK0vqMZl5ZcBJVDUxMRF2uz1q1utCcmE2m5GZmQmTyQSTySQGM3ZOH7NkXIv4+HjpnknQUaOJMFlIzeSzZ2VlCQJMcI76Lzk5GU6nEw6HA2azGXa7Xdba7/dHza4EIs2z1LpUp9OJ5uZmec3mzZslI6DXR+ZV7tmzB5mZmXj11VcRCARkbc1mM2pqapCSkoLVq1dLF9Hx8XFpquN2u9HT0yMZKbUOl3bA4/FIfVR9fb106P1r14oVK8RZC4cjzXmYadBqtejv78ejjz4q8qUyWIDoumtVtvn38uXLAUR3hVfp2Pwc6lDKH2d4Wq3WqEZIf/zjHwFcKnlgVio+Ph4mk0lm7VF3q4wD/ox/k3mTmJiI5ORk5Ofno6KiQhzWzMxMOBwOLF26FHl5eQIkpKeny/px7dXGezzLsdkptSM8L5/PF6WXaZu4vwxywuGwdIQeHByU4IHdsT0eD2ZnZ2Gz2aICABVM+Wt7pr6OQUJiYqKwjWpra6HVatHW1iZgyNq1axEKhXDmzBmsWLECixcvRnl5Oex2OywWC4xGo3RNnZubixq5R1tK/4XlGQ0NDejt7f1MfoUa6JHh5vV68dFHH8nPLRaLyFmsb6HqanUd+G91DVUa80JnimeCcsbXq2OGKCvquA9+F8s21P4ABJo1Go2MgaN9o8wmJSVJAmbRokXIzc1Ffn4+EhMTo3wLBqYcPUbfYiG9TAaXXh8ZEch/NzQ0RJ1ZABLIcm2BSGabrJaMjIxPBDC0PwRYLBYLDAYDUlJSxFadPHkS+fn5SE1NhcPhwPDwcNTs74GBAal/BSJ2gD4RAPHxCXxVVVWhqakJeXl5skcDAwMYHR3FiRMncP78eRgMBmEzXLhwATU1NaiqqsKSJUsQHx+Pjo4OzM/PS4JneHhYakXVbLAaKPb09OD06dNoaGhAd3d3VAOqha7U1FSp9VRZUU899RQMBoM05+RotVh9/Lf8ZZUOywaF1IVq4KcyL+i3sdY9KytLzm04HI4qN6Is8DONRqOMXwQgjfhUO8bXMz5LTU2VMqPS0lIsXboUy5cvR15eHrKyspCdnS1xDTOwsT7w37q+UBlU4JJTxwwX69RIh1HrWuiwUqm4XC5pA82NY7aNCkwVKA40pvNIQ0tFEx8fL59NpcPP/izPoKKet956axRCZjAYYDKZ0NXVJTRRp9MpvO89e/bAZrNh+fLlGBwcRGVlJerq6qDVRhrC+Hw+Kf5mp7TYAB6IBBcTExOydsXFxUIJ5euoPHJycqQRCXDJyXzyySdRXFyMhx56CPfffz+am5tRV1cXRWte6KCpl0YToXN0dnYiMTExquZJNQzqGi6EMKoX0Viz2Yzp6WkEg0EJDOkAT01NCRWB3Hw1o8QD6vP5olriz8zMID09HbOzs9JQZWZmBrm5uYK4k34YFxcHq9UqI4fC4bCgd2xF7na7UV5eLnIYDAaFLqPKpmpUY9Fgtd5Jq9XKXFMGCMw0MzCmzNLo0amKzbDEGnI1GGRGeH5+XgxJOBwZvB0IBGR2HDv72Ww2WCwWVFdXY2JiAiUlJTCbzVixYgX6+/tRV1eHrKwsBAIByVJzxl9sLZZqPPr7+xEIBJCTk4PMzMwo1FAFSEib4hr5/X7813/9F5KTk3Hvvfdi8eLF2LJlC06cOCEdqxe6YmVYRd+Z+VPXSb1v3odKMXS5XMjKyoLD4RBnlcglHUJm3aenp8UZIKMiFAohMzMzCvmkroiPj5esIOWcLBDgUjfzYDCImZkZ5OTkYGJiQmrDqVNKS0vxD//wD5+6HnQEzWYz8vLyJBCjM6ACgGqzLt4znYNYsFCt0+M6k/5J6lEwGJTnZIBE/c5nWIierO4R9W4gEMCZM2dQVFSEjIwMyWRzhjUzIVVVVRgcHITFYkFXVxccDgdyc3MxPj6OPXv2wO/3o6ysDPfee69kGU6fPg2Hw4HJyUnMzs7C4/EgLy/vE/elnq++vj7pnJmdnS3Nx1QQJBbUI0p/8OBBfPjhh9DpdLjjjjvwwAMPwOv14tSpU2hvb/8E42ch2ebnUWeS2se9in29uuZ6/aUmQ7m5uWhra0MoFJLsilZ7qf6etHatVouEhAT4fD5MT09jZmYGDodDbDydOO753NwcUlJSMDExIbaDZ5tZMwKWmZmZ0uQjFIp0yuV6q1mtrq4uBINBnDx5EllZWVFnfGJiQhxg4NLQe15sfkc/hcE3QVCyuJgdpT4mMKrKL3XFX8saqjKjvoe6gutLEI1sApb9eL1eZGdnIy0tDRcvXkRvby9SUlKQn58Pi8WC0tJSfPTRR0hOThYGw9TUFBITE4VxpN4DdS0zJ2SmFBYWRtlp9dl4VsluoS75+c9/jrVr18LhcGDr1q04fvw4WltbowKpv+VbAJFmXRcvXozKwKuvV20s74/sFqfTibS0NAHCaR8ZIDIYnZ6eFsYK6/DtdruADbwYrND+8t987tnZWaSkpMDv9yMzM1M+u7i4GIODgxLkUgcajUb89re/hdPp/MQ5BCCBaWJioox346gZnimyAdVrfHxckgZ+vz8qKxcOh8X/VRMhDNAZvDCbThsWq7NUphz3IzbLptPp0NXVBb1ej8WLF8tnUweSuVBYWIjz58/D4/FI8AtEgOna2lo5/5dffjmWLVsGnU6HEydOwGw2w2w2Y2JiAmNjY0hOTkZiYuInkkjUEcwGcv1KS0vFZ1Z1YnZ2NiYmJqKyiwDw05/+FD6fD3a7HV/96ldRXFyM/v5+nDx5UpoBqtenyXRZWRkaGhpgt9ujAGD19bHrSNsKRMBBzuQFIOAT/QbqJ7/fj4SEBJjNZklG2Gw2CVYpQ5RtnU4n9p0MF5PJBIPBIHX+gUCko7vdbkdSUlKUP9rd3b3g8y50feECVCpeAIJQ8PBwYykkKsqp1WrhdruRn58vKW06SOz8yGySWgfA2jQAUq/IwIsHRQ0qYpUfBYLCzJb2bLChKnQi37Ozs/D7/bBardJZkE4vAyGHw4FQKIShoSFYLBbs2rULGzZsgNFoxODgIPbu3YuCggJp8jA2NgaDwSBdPbkmsQhtS0uL3H9RUZHUD+h0kXovrVYbFRjRIWxsbMT3v/99ee+NN96IjRs3AgDq6+vR1dUlXWdVB4ZKOTZ45v7w4u95z6pRC4VCmJiYgM1mQ35+Ptra2qJoE9xHADI6gAaWszhTUlKkGQgRJwZdNDxUzKxjpYLl/ZCmSDolr1tvvRV33nmnjBHiwTcajSguLpZB1FQEn9bcanZ2VoJkooy8JzUA4Qw0OuVE2lSqGZEvyq2a/ec543rzdzpdZLxRZ2cnTCYTDh48CL1ej/HxcZSUlIhhNplMGBoaQnp6ugwAV88vM12kKofDYbz55puoqqrCTTfdBIPBIAEsg01SUnNycsQ5iXXQ4uLiJDNBhFDtYqsaGWbXSZHy+/349a9/LetUXl6OrVu3Cu3u+PHjGBoaEiXKz1SDJo0mUnNC+eP6xsp2rIOm/o7ZS6LvKkUQgBiTyclJqbecmJhAdna2OKJ0iDlwnjqGjgopUqS4E2AJBiMzKVkzbTQaBeR5+eWXBahb6Prggw9w4403yvpptVoUFRVJkLF7924UFRXB6XSKsYsFsNT60MTEROmkSbSdQCBR9enpaVy4cAF2ux2pqanwer1CVRwbG4Ner5d5yDxT8/PzaG9vR3FxscgQM3YNDQ0IhUJIT0/H6OgoTp06BY1GgzNnzmDNmjU4dOgQfD4furu7kZ6eLmUWBJJ4RjlSgk7D3r17MTk5icLCQtxyyy04fvw4cnJycP78edhsNqHP9fb2Ijc3V4At9WxS7w0MDKCvrw+hUEiaByUlJSEYDKK4uBjt7e2YmZlBYmJiFEV2enoaf/7zn/HWW2+J7rnnnnuwatUq+P1+dHZ2oqGhQdZJBf4oyxUVFaitrUVSUpLQH2PlgWdTtWukrGZmZiIlJQXDw8OiJ3gvHBcXFxcngAtryFRaJKlofJ96FhMSEkRXa7XaKLCCn8HMfiyVjHJIG56fn4+enh4EAgHpKMy1GR4ejqqJVMtOtFqtyC0QyQiRMswumDz3lBmWYzAopZ6KzVjR31DvVwXazp49C5vNhuPHj4vc+P1+0R1ms1myu1arVWrsaBt9Ph+MRqME5DMzMxgcHITP58MNN9yAYDCIuro6tLa2IicnByaTSZhmzMao7BDaxbi4OExNTQlNPiEhQcbLqIHz0NBQFMjHZm8fffQR9u3bh2AwiKSkJGzbtg0lJSWYmJiQOZdcG5Wer8puamqqdBun8/3XAluuSyAQGQtHQJmNt6ijYwEeZjLVUXAEQ+ijUu9R1hm48NnNZrPoZbIAGUzk5OTI2phMJrz66qui42Iv6g0VkC0sLJS+HdXV1SgvL0dWVhYyMjJw8eLFKIAzPz8fHR0dWLx4MdLT02UONMePcL15Fvk+1kHT90xKSpK9ZdDCQIx+t6prcnJy0NjYiLS0NLS3t0u2kCOSSktLEQwGkZaWJiO/zGYzSkpKhL3Es5GWlga9Xo/CwkIpRZqdnUV1dTXcbjdWr16N7Oxs1NTUwGq1YmRkBHFxcfD7/fD7/ZiampJyJVWm1eD+woUL4gsTpGRQztf5fD5pxMbzGgqF8Nxzz0mmPicnBw888AAyMzMxMjKClpYWCcxVP4NrpdfrsXz5cpw5cyYKYOGzq3EF95WfMTg4CKfTKUmnYDCIt99+Gw888IDINc+q2+3G5OSk9A2gz2QymSSTSr9ILS9jrKCeR4IaJpNJfIvMzEzMzs4uqJf/1vWFClBVB5PGJyEhQZqI7N69WwIGk8kkw2aprH0+n7yWgQkPjYoM8eIGMzDUaDRSBK9eKrqvUo21Wq0EKrW1tXIvnBXJ51Ezp16vFzqdDhcvXsRll10W5WQSMTYajUhJSZGMwcjICJxOJ2prazE6Oiro59VXXw2j0YhTp05JgMkZXUajUdBKrq363MFgEBcvXpSDl5WVhby8PEn98754v7FdaXft2oXdu3djfn4e2dnZuOmmm3D11VfD5/PhzJkzUbPA1HVnHV9sZnQhR0hFrNgQhTQNUgMZxHLtec/q/CW1SRap2wwKVNoyHU6+dmpqSowLD5vK/2fm9I477pCmEwQHqLi0Wq0UlFdXV4t8st5Zzbx9/PHHuPbaa4W+SyoGgz7K5vz8vCB8wWBQqDg+nw95eXkYHR2NyiKPjo6KHIZCIbhcLqxatUrkl00zampqRH7ZjZcF9osWLRJ6Fmcdzs3NidND5NTv98NisYiDHwwGMTIygqKiInR2dqK2thYpKSkIh8P48pe/LPLr8XikDliVX7WWUQ38wuEwBgYGpO6N8qtmYdUAUa1X0+l0aGpqwg9/+EPJ/m3btg133XXXJ+SXQaia1VE/d6FsVCwgoyplBmFGoxG33nqryK/aJIPvmZubw/T0NFJSUpCUlCQUVMqvOqpArbMlXXVqaiqKnk99SkpdXFwc0tPT8a//+q+YnJyM6k640KUizjwzFotFArCOjg5BR3U6HY4ePSpBK1krvEi7j12/2LCgsUQAACAASURBVFl9vCeuu1oHz/MVW1PD7+/v78eFCxfgcDhQX18flYFiHSS/k2tJW3D+/HmUl5fLulFncq+SkpKkaRXrJ8fGxrB7927Y7XaMj49j48aNotcPHDggTWI8Hg9GR0fhcDgk0FT1NO2f1+tFe3u7PF/B/527FwwGpaEFszsMVChrs7OzeOWVV/CHP/xBqJ6PPvooSktLJcNKMFWVZTouKlim6mfKgIrWMzsaDoeRmZmJ6elpFBUVRVEzdbpIN0mj0SjzJuPj4+HxeMQpZ8DAPaYDRFmjY8Xn43xGILqDNh2+TZs24d133426bz6HwWBATk4OgsGgNHrjvRqNRhQVFcn9qBRtAKJ7+Xmqg6h+BwEl2gIV7AaiWUK0YerFJlyHDh2SZ1AzxvRdSEXNzc1FU1OT/I6fqVKGExISkJGRAYPBIECW2WzGBx98AK/Xi8LCQmzevFnYRgcPHpQ+CV6vF2NjY3A4HFHsMq4FdZs6ozkQCKC0tBROp1MYRnR4ybxR5/p6PB78+te/FtuUnZ2Nxx57DHl5eRgbG8PRo0fR09MjfqBac82zSrBClV0VwFQBB+6nzWaT0SYlJSWf0CVzc3NITEyE2+2Gx+NBWloavF4v0tLSRHbpc+r1+qj6UfoWav0w64953gBIqQMz7iUlJRKcfhpwyPvke8h4YpkaG2kSEDpx4gTKysqQkpKCvr4+KedisyTKGYEHNQhSv5OvU4Mq4FIvE9WWARF7MTExgcbGRgHXOzo65N65fwQU6e+MjY0hPz8f7e3tKCkpEf9UZSixQzzB0Y6ODhgMBjidTtTX16O+vh4OhwNFRUVSltHf348zZ84gNTVVmC7T09PCUlKzhrzoS42OjiIUCokPR/9LDWppG2jX+Z0/+9nPZG2WLVuGr33ta7BYLHC5XKipqVmQ1UX7rvoVsbqD+6QCSH6/H2azGQ6HA9PT00hPT496H/+YzWZ4vV5MTEzAYrEIK486L7ZfjJqYY08Tlk/QXybAQlCL5/H222/H22+/vaAsL3R9oQJU9eKC8AFJlaHCYTOQPXv2CCLqcDhkkVNTU1FfXx+F3jK9D0DoiqFQCImJiZicnITJZEJfXx8qKyvhcrnEOKWlpUWl/FNSUjA1NYX29naMj49LXQ1rHolAdnR0wGKxoLKyEhMTEyguLkZHR4cET+qsTCBy0FjzysHpDMiYfWVDHpPJhPfffx/Lly/H8uXLEQ5HOu2ePXtWAmQ2SKBBUp0gVWnrdDoMDw+jp6dH0HmuN4O6pKSkqBlvdBj0ej0GBwfx0ksv4YUXXkB+fj4eeeQRVFVVYWJiAqdPn5ZAWKOJ1PyQ9qoKfWxgwZ/zAHq9XnR1daGgoAAFBQUYGBhAdXU1tmzZEmWIg8HIvDoGbjabDd3d3VKDodY78n1UGGpGTK2PUo0q32symbBq1Sps27ZNgjT14lpz5iNliZ/FhjYffPABrFYrSktLo5xTUpZ5X8yixhoqNUPOtVuIusLXUlFwoHg4HJa6gEAgIA4vz5fdbhemAr/HYrFE0eRpMAgQAJFMIBWdRhOpYaHRoSGtq6vDsmXLUFFRgcrKSlRXV8PlcgntiQYsKytLnDc10KYjq9NFugm2t7dLDTtfQ6Wu0sD5GZTfubk5kd/rrrsOd999N5YvX46RkREcO3ZMGkyEw2Gkp6ejvb09KtvB71gIaKGxY+Mi1vrQ0WQQoH4W19Ln8wG4NOuPn6fKLGWGcqpmZYh086zTQaJj9L3vfU/ovnzNZ0U4uXYqDYgZWJVe3NLSgubmZgGE6Cjl5+ejs7NTKJUEbiwWi9QJUi8tWrRI1qa9vV3GkIRCkdq+pKQk6Xg9MDAAi8WCvXv3Rt2bThepe2NH+JycHMn0JSYmIj4+Xqh4BAaYCWYXTWY9SF8CIkFIUlISKisr0d7eLpTSyclJ7Ny5EyaTCRs2bMD1118vNU5OpxO5ubnSDZgAIc8vL8oxf37y5EkByTjLlXYnLS0NPp8vip1BMIvsop/97GeYn59HZWUltm7dijVr1kgQXF9fD71ejyuuuELqylkDv1Cml5fKOunu7kZOTg4SEhLg8XjQ398vted0PFXqssfjEWea8qTKs/pHPUvUz7H3xcyTw+HAddddF9WALvZMEu1XgyP+CQaDqKmpkfO+aNGiKJBVBZOoZ6ifefGzVB2k3ivvA4h2+DWaSElDR0cHNJpIEz++Z3JyUmZBkoGljn5obGyUMVQMPHm/ZOOQuaHVXpodrdPppBY3GIw0XEpLS8OVV16J9evXIxQK4eOPP4ZWq0VGRgbm5uYwMDAgtZWqPPB5gUv1q52dnfI8tG1qgMkRVlwTNRs5ODiIp556CkajEdu2bcPmzZsxOzuLtrY2nDp1SvyhYDAIp9MpmTwGhn+r6Q33TwVpKItqNkv9Oe3k+Pg4MjIyZH+pQ9WAXbXLBD/4/Kp9V/sQJCcn45lnnsH8/DxmZ2cxMTER1ReE1549e7B582ZZN4I7ZrMZY2NjAugxIGGz0JMnT0YFtOnp6XA6nbhw4YLsm8FgkIZQtCuUe9YeM3NKpiPljnuisloOHTokYDtBcLfbDZfLhZmZGVRUVCAYDEqt6P79+3HTTTehs7MTXV1dIuMsu6JfQfYCS24YcLndbixduhTd3d3SZ+bo0aMYHx/HNddcA6fTiaysLAHq8vLyhHUwPj4Om82G1NRUeQaeG8YNBC34rLQ9WVlZ8Hq9SElJkYyuypRSmX+NjY349re/DQC47bbbsGXLFsTFxaG3txfHjh2ThFtZWRlaWlqEFcb7oYwR+FIZX3Nzc5LYstvtGB4extjYGK699lphsHDPmC1VmSEETSmntKX8Lp51NXuqAvc8K9TL5eXl2LBhQ9SM3s9yfaECVC4uEE2VUxU6D4fNZoPdbpcF6uzshF6vR29vr7R2p4KemJhAQkKCZDupRFQFRAPCzVfRQRoeKs5gMCjDyFUFMDMzg6ysLHHoc3JyMD4+Ll057XY7Nm3ahP3790dlBkhLzcnJQTh8qVaLxpyNj1auXAmNRoP+/n4MDAwgJSVF6p7C4TA2btwIi8WCNWvWoLGxUVD62dlZ9PX1QavVSp0rgKjus4FAIKrAn1TB+Ph4TExMYG5uTurWYtdIXbu+vj78n//zfyT4v/POO/Hggw/KutXW1qK3t1doBbwXCjfXnIpdRReJLOn1evh8PgwODuLGG2+UQ6TKj0olS01NFXSVzoiKeKuOID+DoIb6mfHx8di6dSvy8vIENaUhiJ1fq9I2WNju9/ulwVVWVpas88zMDM6ePQuDwYC9e/cKNYtKWL0HFaWjLDOzowZiqkFVkTf+Tfll0DAyMoLU1FTpjGq1WpGYmIjx8XGkp6cLjcrpdMpsUQBwu92wWCyYm5tDcXFxlJGmbAOROhGn04nZ2Vl0dnYK6nj8+HGpYaNsrlq1Cvv27ZPGPrOzsxgbG0NcXJw4bCryzOeyWq1wuVxITk4WZzaWCqsqVL5Pld9Dhw7h8OHDUj/xjW98QzodTk5OoqamBsClDtEqyqyeWX4HAzV1hnNOTo7MyVRnW1JuwuGwUGRo1NWu1SogwO9Xaz51Ol0Um4TBodFoxLe+9S1hH/AzGTiodMGFLjW44NlkTQpwySEdGBhAQkKCONGUc61Wi8OHDwtFXe1KrAbINIL8PtXhU0eL8ffquVAzewxKecbz8vKi9D7nF/KZtm7diuLiYvzqV78S9kFfX584R8nJydIMibqBOmZubg5FRUUoLi6WzAUpU/v27ZPh7YsXL8aqVasQFxeHffv2CYA4NTUFv98vwSodPPWM00kk0Ecdo3YdVbNwqhOj6tWGhgbpLmk0GrF27VrcdtttsFqtmJqags/nw+nTp8XhVNdZzbKQzshMEfVESkqKzN1UGQWUFTqxqnyzPITPwfPNwIWUYVUuuG8MSFauXIkrrrhCMhux2fhYmeH3hEIhobpaLBYByDQaDVwul9Tvffjhh2JnOB6M/1edNr6XtWN8xthAbiGAa3h4WDKLlPPh4WHk5+ejsLAQWq1WxqvRToTDYeTk5KCzs1MAFJYMcQ0J4FKWmSU3GAxS9+fxeNDe3i578P7772N6ehoVFRUiJxMTE9i/fz8yMjKg1WrhcrmkZpd0bZ51+gDUhXzmtrY2XHbZZbLHBL9Vx5vrwn2am5vDCy+8gBdffBEzMzOoqqrC9u3bpcSko6MDR44cwdjYmID8lNdY9osaQDMb7ff7pe9IR0eHBCIM5tk0iV2T1fIFNaDm56rZU7X5jAoSkyFgNBpRUVGBW265BYmJiTCZTOjt7ZU6UnXEXOylMrVUOSRNX6fTYWJiAsPDw8jLy5Msl+rz0udggzkGY5Rt/oyfb7FYZOIBv1cFWbhn/A41gJqdnRWglvq0ublZdAlr/CnXb775Jr70pS9JJ2SWA7W2tsJutyMnJ0dqHAFIiQBLXC677DIsWbIE/f39mJychNPpxOnTp3HmzBkZz3TvvfeK/O/fv1/KjiYnJzE2NgabzYbExER5RtUe82+NJlLrHQqFBFhgokddE/VS/Zc333wTb731Fubn51FQUICvfe1rwrgZGhrCiy++KA2taNtUACYWOAyFQlFTE9LS0jA0NCT+EM8lZYe6mnLL5+IfAOKrElziWnNfVZYlkxcPP/ywxGkstfnvXF+oAJWHAriEcqoBB+kL5E7Pzc1JQbPJZIqikqnGicpfRUFjMyn8GRcf+GSNKR3OcDgsNMqSkhJBYvv7+6HX67Fo0SK4XC6sX78eO3bsQHp6ujhXBoMBcXFxcgiZ+ue1ePFi6VpKhJMKls9COhnnphqNRrhcLpw/fx5msxnvvvsuNm3ahLKyMuh0kSYcnOFFeibRWN4XjRYdr1AohO7ubixevFjWs6enB7m5uZ/IqHGtgGhjq9Vq5eAxc7J9+3b85Cc/EUXz+9//HhcvXhTql0rhUxU/FVRtbS2qqqokI8cMpRqQqcaYHfemp6dF4avGkq8jkv5p6LbFYsHNN98cNToBiDgPxcXFguTRMKrZ6WAwKA2NuMZUKC6XCz6fD1arFampqVGZLsofg07SNrjeqsGlomKdKC81EOMzazQaoRAXFhbKXvX09GBqagqXX3450tPTodFocPjwYand3rx5M1599VU4HA6R397eXhmmPT8/j2XLlsHlcgk9mUaMis9gMKCsrAz19fWoqKhAV1eXOMEulwterxd1dXXYsmWLKOKTJ08iPj5e5j8S6UtNTY2imQAQ6hSNPx0EOmus9YnNdqqAGO95amoKTz/9tBiCgoICPProo/jSl74EAKirq8OOHTukkykz6dQnNKjJycnQ6/UYGRnB1NSU1Cx5PB4UFBREIc0aTaR+w+v1RmWzaShiM7dcOxopNZjhc+l0OixbtgzFxcWS9aQDrc6xU9kFC11cF8q1un68PzbrYm0SM5IjIyPIycmBy+WKCiQ1mkjNHxsLcd61SrOy2+0YGBiAXq9Ha2srrrrqqqislM1mk0ZwPp8PaWlpGBwcREpKCsxmM9ra2mT00Ze//GX09PTA7XZj1apVopvYcINjhPx+P3p7e1FWVibgJ50N1i+xOUpCQgJOnTqF1atXy57Ex8djZmYGV111FYaGhiRbrNPp8Oabb8Jut+OGG26QtaipqcHQ0BAWLVoEr9eL2dlZQeJZVqA29pibm8PZs2exdOlSWfeOjg7k5eUhGAxGOaGx+pn6gMBadXU1qqur5ef33HMPHn/8cRmhcPz4cbz55puYnp4WZ5POGTNXOp0OjY2NGBkZwfr165GdnY09e/bgW9/6lnwnm2mQnkxZHxkZQXx8vFCuue9q9lRlS1Dn8f9arRbLli0TpkEs6h+rl1VWATMuGRkZGBsbE7vC8W7BYGQUDedsUtYJSJvN5qjMKP0OBr8MUHmpgSmzrvybTu7o6Ciys7PFQZ2ampJmigRBDh8+LDXSAPDv//7vuOOOO6RxUXp6OlpbW+FwOESeiouLxeGmrWBdbDAYRGJiIioqKnDs2DFUVlaitbUVg4OD8Hg80Gq1+NOf/oSrr74aX/nKV8Qprampgd1ulzpUj8cjMxzVjDvBIovFguzsbJHJubk5HD9+HIsWLRJ2zUJsKvW8G41GnDt3Dt/97nfld9deey22b9+OBx54AKFQCO+88w4OHDggcqPaSjXxkJWVBa1Wi9OnT2P58uVwOp1RdFf1zPBckaJOnZ+amholr5RL6j7+m9+p6m+DwYD77rtPKPLT09MCHPC+1URC7EX5oUyq9p4+FVmFvH+WfxC05/nie3muYwNP6gyO+1MDLPVSs+D0Jd1uN7Kzs6WuEYiMn7vmmmvQ09ODcDiM22+/HW+99RasVqt04Nbr9bDZbAgEAtJnhb48P4tjdcgCIDBNICwUivSr6OzsxLp16zA0NASXyyXgF23A9ddfj7vuukt8jmPHjon/Ozk5CY/HI7011NiAI2+ys7Oh1WqljvP8+fMoLi6WLKUKjqj7xPWlTurt7cWPf/xj2YuVK1fiySeflL4Ohw8fFn2sssXoE5rNZqSnp6OxsRFjY2O4+uqrkZycDJfLFRUk01YR4NRoNPD5fJifnxefVN136jSeBQan6r2rPu7VV18d5Xcy6fPfub5QAerQ0BAcDkfUQePF/7NJjc/nk8YqJpNJgg+OkxgbG8OKFStkUenoqweHm8XAmIpEzTbxD5FBNrgIh8NSv7dmzRqMjIzgoYcegsFgkFEfVB4bN27EO++8g4SEBGRlZckc0FAo0gmW7cX52SaTKSpAUu+PP0tOTsbw8DCuuOIKUfidnZ3SUvujjz7C2NgYMjMzsWLFCphMJqxbtw7nz5/H+fPnZXAzZ3nRmSMFh3WeVKIMFPPz8zE4OCjOCgMCVYjV9VNpADMzM3j++ecFYayqqsK3v/1tAR9cLhdeeukloWqSxhYOh3HkyBHZR9JShoeHo76L6JDX6xXkc3p6WhprxGZOVQPKz6bSUYPEQCCAyy+/XBB8dpNkjd/MzAxGRkaiPlOVYf5tMBiQlpaG1NRUDAwMwO12Y3p6WuZ5aTQaMfIajUZqrNXPUuWXToYKCKgt22OzDFTqfP3IyIjU3tx8880wGo04fvy4BJROpxMbNmyA1+uF3+/HRx99JAEP14KzsnJzc0X5sJU8AAF0+vr6JMOqUvoWLVqEsrIyccIAYHBwELt378bQ0BDy8/Nx7bXX4sCBA1izZg327NkDjUYj9dYMVnkfZCbMz89jYGAAubm50Osjzc86OjpQWloq4BKNeCyTgmsdmznq7OzEP/7jP8rvb7vtNjz77LMSfJ86dQp/+MMfogbbJyYmYmhoSDqqarVaXHnllcjJyZF95vfSoWEQZDAY4PF4hPal6gDuL9+rNpRRzx6ZJKwJVmfUaTQRNgYbw9HxWohOBgD9/f1RzudC6xUXF5lPSkAqLS0NZ86cEacnLS0NoVBImAe8dwASBKp6lzqdZ5EBkgrKhMMRKiuz2kCkIcyFCxdwzz33QKvVoqGhAVVVVeIUqz0C7rvvPrz//vuw2+1Ys2YN6urqJPOQm5srDqPL5RI6Ih171ZmmngMiiDXHCDkcDqG6njhxQoA1NvKIi4tDSkoK7rrrLthsNpw9exbNzc1wOp3SMZzlKcnJyXC73TJMnTJB/XfnnXeira0NDQ0Noi9iSxXUS3W++Rw7duzAn//8Z8zMzKC4uBibN2/G888/L/uwY8cO1NTUSI3R6dOnJWPPcVusD+WeqVk92hdSCCmT6v7yXKiOkVomobJtmGEbGxtDf3+/yJnb7ZbnZBMTVW5V3Ww2m6HX60U304ZYrVbJQAKI0s2sw1RrYdWguKOjA5mZmbBarfB4PPD5fNLoieeJv+NIJVIDWRaTl5eH+++/X3RzUlISent7JRhdunQp/H4/vva1r0m94pkzZ7Bp0yakpKRAp4t0WuUzUG8TYNBqIyNpqJu5d2fPnsXKlSuxdOlS0c3slmo0GkU3b9y4EQcOHMC6deuwZ88eCYAmJiaidHM4HJkjDETYJJ2dncjLy8P09DSuuOIKdHR0oKSkZEHdrPqC3H/qNv7uwIEDYp+om1988cVP1c0zMzOw2WwYGBgQ3VxbW4srr7wSTqcTe/bskX4Qqv0EIAEHO8NyzA/PF19Lf0MdV6OWlhEcGR4eRnp6OqampuSZU1NT0dzcjNLSUszPz8Pj8SyokwcGBqLmr6ogNOVRbXh11VVX4dy5c+jp6UEoFEJOTo4ED/RxWX9IP5R0cHaZb2lpgdPplPpLMu3o17KpETPHLS0tyMrKwrlz53DzzTfjqquuQnNzM9avXy810Sz9MpvNqKioQEFBAfx+P5544glhCjY1NeHaa69FZ2cnzGYz5ubm0N7ejvz8/Kh+JDzbDQ0NWLJkiewfGyhRHxMQCgYjdeiNjY3SA6OwsBDl5eW45pproNPpsHfvXulUTBpwMBihKnP+qlarlT2bmJhAWVkZuru78fzzz+M3v/kNfD6fgFyxzbdi9Rl1k06nw+nTp3H69GkBwDZu3Ihf/vKXsr+9vb14+eWXBYgFIPrZbDbj1KlTWLlyJfLz87Fz505s3rxZglPaInbKJyA8MTEhDaioM1V/iDKmyjrtg1arlaRda2sr8vLyxJ+mffis1xcqQCUFkpkkdVHUbKrb7UYoFMKyZcukmcHvf/97OJ1OqfejI0QDoiKrDHCASzWCdrtdNpdtwJkBCgaDMhJDp9MhIyMDjzzyCHbs2IEvf/nLmJ2dxfDwsGxabKaB92Q0GqVrF4MIZlNnZmakJTNrLwwGg8xzY/0n14CHlgoRAAoKCjAyMiLPzbbkp06dglYbaWy0adMmlJaWwmQyibNEo8UB6By8zo6qLIBOT0/H448/jmAwiK6uLrz88svQaDTS1Ca2yYqaBVSDWCrChoYGPP7443Igt2zZgu9///tiAOrr6/HDH/5QnoVItMvlkiYlPBxqXYtqgEOhEMbHx+FwOOR7VJovD5SKFPE1KjJFSohGo8Hq1avFyJSUlEjdGYAo+QUu0THoCHJGltFoxP3334/Z2VmYzWZ8+OGHACDIJGt7VFRTDdCzs7PFiFNek5OTpUaNaBufbWRkBLOzs0hOTkZubi4eeeQRtLe3Izs7Gy0tLbJmpHnT2DDryJb4SUlJ6OjoEJlja/Lx8XFkZ2dLkM25gWxsoBpYZts8Ho80TEpISMCyZcvw8ccfIxAIIDExUQaC79u3D/Hx8WhoaMCNN94o9JMDBw7AbDbLmB02O2AHSaLVBFisViseeugh5OXl4atf/aoERmyYo9JbVeOhyq6KgKrsAIvFgu3bt+O5554TNPy9997Dz3/+cyQlJcHpdEpgDkCcI+oAr9criCSphm63W1B1Ojm8H1V+aUQWCpY0mkgGua2tDX19fTCbzaiqqoLNZoPb7RbnZnx8HFarFb29vaisrFxQP7e1tUlX2YyMDDnjDK4p86Qne71erF69GsXFxThz5ow0DnK73Th27FgUnZ4BIZkGHBdGlH9+fl6yWwSUWPNvMBgwMjKCwcFBAfnocLMpFEGT+fl51NbWYt26dWhra5Nz1NHRgbVr1+LkyZNCrx8dHZVZ1OFwpLtlS0sLli1bJlleGvK4uDh0dXUhLy8PGo0GmZmZMlebNd46nQ4rVqyQYfSJiYnSyG5ubg51dXUYHh7G5s2bUVFRIfrk0KFDwqaZnJzEhQsXUFVVBbPZjL6+PuTm5koX4YqKCpSXl+M73/kOrrnmGqxduxbDw8MIhUJRc6MpR7EBq8oi0Ol06OzsxH/8x3/gxRdfFLv6zW9+E9u2bUMgEBDQkbYkFAqho6NDGpKwdMBoNEoQy8y93W7H6Ogo3G630OGYEaWeU4NSNYtK2eM5uHDhAlJTU6HX69Hf34+UlBSRIQAyn/F/gm4m5fTz1M1qJvjz0s08u/+rmz+bbl7ompqagl6vF32syjXPNxkWbCxWWVmJK6+8Ujrb5uTk4NixY1E+N/eI62swGGTcHGuV2ZuEenVmZkao2hyxl5iYKIBmeXm5MOFUAIqNeYLBoNC9ExMT4ff7ccstt6Cnp0fOnl6vl54Ds7OzMnmCDS6pj8nUUHWJyWQSX5B7pVL1Q6FI53TKrNFoxCuvvIIrr7wSN954o2RxDx06BKvVKgmcvr4+8bVpv6hXqqqqoNVq8U//9E/4wQ9+gIKCAvT39yMUCoksqsAZ5Zd7oCZ8qHMOHTqE6upq0Zf33XcffvSjH2F6ehomkwk7d+7E8PCw1Kez34ler0dfX5+sBxmLs7OzEjiz9pT/V5NMKgCgAvdqgoQ62+/3o7W1Ffn5+Thz5gzKysqku/N/5/pCBajd3d3Izs6WjlNsS686QWqamUXJbMev0+lw++23Y25uDkNDQ2hqaoJWGxmkTqXCJgP9/f1CNbrqqqtQW1srgeSqVavQ2NgoNXqkF9IohEIhnD59WsYNMHNDxX769GkUFhaiubkZBQUFACIO09zcnAwVDwaDaGtrQ0FBgQx5DofDaGpqQlVVldCeaLC1Wi0uXryIxYsXSxpfo9EIYsNr9erVkhllxoaHPyEhAa+//jq02kjL/i1btqCpqQnDw8PYsGED3nvvPWkIxQxpV1eX0BzUurXVq1fj2Wefxb/8y7/gww8/lCCY9AA2z6HwxqJbNCbqtXv3buzduxdAJEC877778MEHH2B6ehpWqxXd3d341a9+hebmZmi1ka6W+/fvx4YNG4QGazQapQstO+txrI/ZbI6iPqqXSgun4iTgQNoCqU7Nzc1YunSpILP5+fli3AgQZGRkiCNIkIWHmNmA48ePo7KyUgxHZmYm1q1bB51OhzfeeEMUp1pbwrVrbW1FOByhe+v1enR2dsLv92PdunUYHR2VID8pKQkApGsrny02S8GL9S6cdebz+VBSUoKhoSFMTU1hZGQEVqsVNptNxsnwOdlUhXVUdrsdfX19Qmnl6B+NluIamgAAIABJREFURiONblj3wuei4VMVNinozc3NOHfuHMLhMDZs2IDrrrsOH330ETZt2oSdO3ciEAhIkxaXyxU1giYYDOKnP/0p9PrI4O8777wTV155JT7++GMcOXJEZpL6fD7JAnDduXeqE6I6SUCkw+Wvf/1rqX1es2YNtm/fjltuuUW6Er788svYtWsX3G63jB/h+vP7SIOh4zE5OSnItUrDit1DtR6I90t9NT8/j6GhIRkuzrrKyclJLF68GBMTE/KZf6uTb1JSkmQ/GRComVSuC2muJ0+exMqVK9Hf3y90JLvdjrKyMly8eFH2XGW0hMNhuN1u2ZPs7GyhPFMuGXTSJhDhJ5WThpXPFYtWk8VBef/Od76D2tpatLS0wGazie4lpVCv1wttV6/Xo7m5WdgWZBJw/FFpaSnm5uaQlJQk9GXeA5suEbHm/dG4e71e7N27F6FQhNK7cuVKpKSk4LrrroPf78euXbuQm5sLr9cLvV4vY8qoU+iY+nw+VFRU4MEHH8TRo0dx5MgRKX8ZGRmR++Aa8Vl5UYb4h3ZvdHQU//zP/yx67dZbb8X3v/99OSderxfPP/+8lJvs27cP99xzzye+Iz4+XkZR0O5yLRbaM/V+VAdXBR2Z1SgvL0diYqLoJQBCJf6foJv5vJ+nbiYd8PPUzQyU/1c3fzbdvNCVmZkJl8slLBtmPakLeEbi4uKQlJSEgwcPYvny5bDZbJLBX7JkCW666Sa88cYbkqQhrZiZNo4kYtnKkiVLhGlw4cIFKatg8sVqtcoYHbfbjdLSUlRWVqKzsxPZ2dnIz8+XLH1lZSUaGxvh9XqxZs0a7NixA7fffjtMJhOam5ulBIx1tfn/dzwOZyT39PQgPT0dFy5ckNIIdhFvampCSkoKMjMzUVlZiaNHj0bp41AoUqvZ1dUl2UTq49nZWaSlpeHo0aNobW2Fx+PB0qVLkZKSgnXr1glgWV9fj1AoJH0r2IiP87Vpz1588UUcP34cbW1t8Pv9ePvtt2G32yXAJvCqyol6qfes2rfXXnsNr732mtiXxx57DG+//bYkn5qamvCLX/wCGk1kTB4p38wo6/V6aRTLPR8aGsJll10mukbVO2qijAwnNWvPMzM2Nia9dVpaWqJGD37W6wsVoAKAw+HAzMyMFFKrdYKM5LlAJpMJdXV1WLFiBVwul9TKaTQaoeJw0wFIBhWACAIzbqozo6KjNMzsAMrfqc1BVPQOuGT05ufnBR0pLy9HfX29jMohHZWHgRtPh4OImdvtRk9PDyoqKjA5OYmzZ8/i7NmzuOGGGxAKRWoYS0tLo6hRKlVQpZuSnkDU/siRI9KoZX5+HnfddReACGp59uxZhEIhMaK8T6Jlra2tWL9+vTQWKCkpQUpKCjweD1paWgSBCwaDYgTVS1W4NCoqnUan0+HVV1/Fa6+9hvn5eVRUVODmm2/Gz372MzEUBw8exO9+97soWg2HwdN5pLJnMxR+H7+fDi4zNarhU1FEAFKTRBo0ax6cTqfMl83KysLw8DDMZnNUAxbVmBJsmZ6eRl1dndQpUFmpMstROaojxFoSAhUsPNfpdFEUM66p6liolA1VxtXAgk4ji+LpFCYkJEQhexwBweYRnNdIOZifj8ykDIVCqKiowODgIN59912UlZWhuLgYzc3NKCsri3KEgUs1MDz7fBYO4p6amkJbWxvq6uqQnJyM48ePY9OmTRK8sGOu2rTC7/dj9+7duOOOOxAMBvGVr3xFxutUVlbilVdewRNPPIH3339fzrxaC6Q6w7EBGdeZe6rTRQaE19bWCq1lw4YN+PrXv477778fBoMBFy9exGuvvQaXyyXZMXUGH+m4zI4sJLux54kyz2CHjQ84gqa3t1eaXrFTbW5uLtrb24Ux8mlIvarb2H08Li5OAAY10OHr6OwfOHAAK1aswMWLFwWZzsvLw7lz52CxWATF5nmjwST9imvkcrkAABcvXkRGRga6u7sRFxcnc0v7+vpgMBiE9kXqYHZ2tmTsrVYrqqqqpHvkmjVrUFtbi8rKSqkXpi5lhjA9PR39/f3S9CgYDCIrKwsej0ec/qGhIZSXl2NmZga///3vAQBOpzPKoQYiBp30VrUDNs99eno6ysrK0NfXB6/Xi6amJuj1evzlL3/BsmXLcPfdd4te/fjjjxEMBnHu3DmUl5djamoKzz33HB5//HE0NTXhxRdfRHt7O9asWQO9Xo/Dhw/j4YcfRl1dHU6cOCGOLB0ztYQkVs7Uf9PhBoB3330XO3fulM/auHEjvve97wnl0OPx4O2335bygvHxcaGAEVBgzTXPvHq+YksyVHup6g3qKJfLhSNHjiA3NzeqSQjnXP5P0M208Z+nbmYfjM9TN3d1daGsrOx/dfNn1M0LXQzaZ2Zm4PF4YLPZxOdU6Zf87ri4OCl3YIKFepdMQ54vXpRTtUcM/1YTRlwjNQlB0CeWqh17qQAVn/Xw4cNIS0uTMhUGx7TrHP2jMiXZjZY1xUw0zc/PS5kLg2ief4fDIT6wWiMcDkdYMwSWgEh3dr1ej/fee0+AQzYv4ziowcFBYUywQSPfe+DAAdx6662ora1FamoqnnrqKdTV1WHXrl0Sn8TqY1VnLLSGKmvQ5/Ph6aeflmfMy8vD448/jhdeeEHk/09/+pOU47BplspOoVyqMRG/n/vI+Aa4FMCqsRnP3cTEBLxer2Tf/xYAHnt94QJUUllZqwJcmifGhQEuGVFSQVmDRUU/OTkZhR7xb+AS+sgFpmNG2gZ/xg3jRSOjBje8+DMKPe/PZrMhGIx0SiMNQE2ZazQR2g2zEfwZFRfT9qyJoRI4ePAgcnJyAETG08QWbhNlCYfD0tkuHA7j/vvvh9VqxejoKC5evCjPefjwYaHO3nTTTSgsLJSRDn19fcjMzITP50NNTQ3Wrl2LH//4x3jxxRcxNTWFhoYG5ObmYsuWLWhoaEB6ejpeeuklbNu2DceOHRNnlsY7Fhni+ql0KzWg1el0OHfuHM6fPy9OwsaNG3Hvvfdi9erVwqVvaWnB0aNHYTAYpKEOsy5cFxXp4WHjHqudyxjkcy3VbmXDw8OYmJgQemogEJC9JQWG9EN2MeXe8P5VGvLJkyelfovGl8g05zKqdBs+F2njbKCkniGeldgmCKRUxQIEvJgtovywbkJFhAFIpzfKbFJSkhg6OkakFJJqU1NTA6PRKK3f09PT0dLSIvMmNRpNVGduAjCsiczPz8f69eulDobr2NnZiaamJgSDQVx77bXIy8sDEDF6pN9otVo0Njbi9ttvx7PPPosXXngBLS0t+Mtf/oJFixZhy5Yt0uBp//79qKqqkk7Yqampcg5VZ0g1GLxn/lylH46Pj+Ott97Ce++9h/n5eRQVFeHxxx/HU089JTVSAwMDYuA0Go2MTZmbm4PVao0qdVANhUpP4v6qTi3lQWVAdHR0IBAIwOfzobGxEbfeeqvUIxOQ+rRLDd7I0mCX6ljknk6H2WzGhQsXxEAFg8GoDsiku3I9aaTpGKl6mw6K6vBQxnlG1aBmIYNuNBqjAMyZmRnpWqvWnhsMBqHwsXZGq9WKA2E0GmWUANkV9fX1wvoZHR1Ffn4+3nzzTaxatQrZ2dkCQqojgM6ePStotcViQVFRERYtWoTp6Wn09vZiYGAARqMR58+fR0NDA2ZnZ7FlyxaxTzx7cXFxcLlcCIVCePrpp3HFFVfg4MGDeOONN5Cfnw+r1YqMjAwpjfjud7+Lt956C729vVHlLHRkYzNTqo7hPqlrAgAHDx7EgQMHEAwGkZGRgccffxzbt2+XdZycnMTevXvR1tYm8jE9PY3i4mJZD+4dP18NxlQamQrKBgIBaQQTCES6uqrX5OSk6K3/v+tm3u/nqZszMzM/d93MwOZ/dfNn080LXc3NzViyZAlmZmbg9/ulUy3vSw38KN9xcXGor68HAKnJpX9G3co6d3W0iRrwWywWxMfHY2pqCr29vbjyyivldwSgVPafVqsVHy4/Px86nU7qRoPBIJqbm7Fo0SL4fD6sX79e7oEz5LVarZTIlZeXw2QySeBHPUg9zICYPRMuXrwIq9WKoaEhZGdnY//+/ZiZmcGmTZsEOIuPj8fY2JjMZh4dHUVJSYlknjlqamhoCB0dHUhOTobf78ef//xnGQ+5evVqNDY2SsBqMBjg9Xrx+uuvY+vWrXjwwQfxl7/8BR988AEOHjwocUpaWho2bNiA119/Hf/2b/+GHTt2yGgctRRQDf55bvh3rN9MOevq6sKTTz4JrTbSj2XNmjV46KGHopgHXV1dqK6uhlarxdDQkGR0VR2iAmfUlfy3+nveM0Eh6mraMbXs4rNcn96y8f/RRSVPYVSzm6qwqwGMwWCQJklcMHWGG4MitdunWhPAw6tmVIHozmU8bKwBWQg1i0V/eViBSA0Mv4sbyowAmwioqKRaB8isoMlkEiQnGAwK7/7AgQOorq6O+m4GnpOTk0JlplEJhSIjE1avXo3LL79c1oNo4RtvvCEZ1GAwMqeQzsDJkyeF4peeno5f/OIXGBkZQVtbG5qamoQOTBpERUUFEhMTZQg1UXPVmKiXipqr2W1V+YfDYRw4cAAPPPAAnn32WWkusGjRIjz22GO4++67kZqaKmCF0+mMMlCxqDCdLZVmpSp3ItI0MpSV8fFxjI2Nobq6Gt3d3bKOBAXm5+cFUOCl0je4V0TauPcc+M3voSxSdri3dNb4uVwbde0okwRXYjP//L36GtWIh0Ih7Ny5U6iOlHOv1xvVsl41gFTMKnpLhJPOLGscWltb8eabb0pDGdar8T1sEsN74p4uWbIEq1evhsPhEGNlt9tRU1OD3/3ud/J+1g1zb3mGxsbGsH//fgQCkXbyra2tstZ2ux133303SktLpdEaQbLYDsmxMqwqa54fXpSnjo4O/P3f/z2eeOIJtLW1IS4uDqmpqbj33nuxbds2obSqOoiXinBTvxBYUfUivx+AGHc6hHq9Hm63G1NTUxgfH0dra6vMIqV++WsXAzSv14vu7m6ZW8or1mFWdRt/Rt3F1/N16lqpwRJlnP8ntY6GUJVn9buYbVJ/z/fz/MzPz+PcuXNRQS/1ngpSqkZZo9FIGQadTNKd2ByGZ1Kj0eDUqVPYtWsX5ufnkZSUJKOZgsEgLrvssk84HqFQZDh7SUkJ1q5dK92n4+Li4HA4sH//fvkOOq900piRvnDhAo4dOwYgEgS63W4JRJOSkqQZDqlzRqNRusJyVFWsU0T5UW2baku5J0CkV8BTTz2Fhx9+GK+//jrC4TAsFgtuu+02/OAHP8A111wjn6d+lurwc/1UWaaTTZ3D11JHcy16enqi1vR/im7mzz5P3RwKhT533Rw7Zup/dfNf182fdvHMMYNGmis/n3Kl1hVTzlXdRrCFe6Gumep7UaZV2V/oNZQx6mQ12bOQz8w/ZFdQb/PsTU1NiX7gPqpAj9/vF1YK6/zpr1FG/X6/AF47d+5EbW2tlD4QFE1OTkZ+fr7c1+joqAA8qampWLt2rXwvG9wZjUbs2rVL7o/lC+FwGG1tbdi5c6fI+q9+9Su4XC7Mzc3hxIkTMkkhPz9f6mvZ94DJkunpacn6xl48d2pMpCaomNjS6/Woq6vDww8/jKeeegqjo6OIi4tDdnY2tm/fjgcffBCFhYXQ6/Ww2+3yearcxO63un+UC/V7VX2m1WoxODj4qXK80PWFC1C5EcyAkptOA6Qqe15cKDZhCIcjxetcKFV5xxoHFaWg0PP/NDJUNjxwqiFUs6jApY3gZxuNRrz99tuCqhOBo+KjcMXFxYmiZiDp8/lk3pTH40F8fDyGhobg8XiiOpgGAgFMTExg165dQrcCINQfZpBCochge1WwMjMzUVBQALPZjLVr10oWhahrOBwWepBer8fo6Cj+6Z/+SbKJXV1dMmLk/fffF3Q5LS1NDBPpZw8++GBUbdfMzEzUvDL1b9WZ5d6of5gNbW5uxhNPPIHt27fjkUceweuvv47U1FRs27YNP/3pT/H888/jm9/8JkKhkAw4Vz8XiO5gyf+rdCsVlKChCIcjCDZnghKJnJ6ellEb7e3t4tyoB1c1jNx/Gle9Xi9Kk+sYCoWQkpIi7xsbGwNwqU04a2wBSMMLKt2MjAyMjo7KfjudTmRkZKCmpkYaXTCrA0S6vw0MDKC8vBwulwuDg4OYm5tDfHy8zAok5SYzM1NGrNAQGI1GtLe3S0dYr9crTRI6OjqkmyUZAjxLR44cwTvvvIOOjo6orENhYaHQXciSoLGzWq3SYIGya7FYpEEOzx/BhkAggG984xvQaiMds0nLJlJImeNc4rS0NCQlJWF6ehrbt2+XkU7hcDhKdtWzT9mN1U+qo89/T0xM4JlnnsHWrVvx8MMPY8eOHYiLi8Ptt9+On//85/jNb36D733ve0Jlo+zSuaJcqcEgqbXM1KjBoeqAeL1ecXxIKfP5fH9zkHZjY2PUerF2hs6qqlNVZ1qj0YgRZy8AAmdstEF6scFgkNEV4XAYLS0tMhM3GIzU7hNMAyLd31mTr57F8+fP4+jRowiHwygqKkJTUxOACPWW87THx8elpuzixYuw2+3SfTIpKQmjo6OSueLogsnJSZklS9vEc835mz6fDyMjI4K2s0nJhx9+iNraWmRkZGByclKyWtT9ajaWck7ndeXKlbj11lvhdDrhcDiiOhE3NTUhLi4ObrcbP/zhDxEKhfCjH/0IL730EtLS0qS2raGhAddffz2qqqrwzjvv4Omnn0Z5eTluuukmdHd34+/+7u8E5ea5IXtHDYxiUXXVcaKc08GZnZ1FdXU1vv71r+PRRx/Fo48+igMHDuDGG2/Es88+iw8++AC/+93vcPfdd8v54zla6PMp+8x+82zTTvNv9T1zc3Pw+XyC6Hd2dgq7Rr1n1RZQN3MyAAGv2DNP2poaeHAv1SBGDXxpS3ivqtO3UHDFc6H6G+zwzQyT3+9HOByO6v5O+WFmTKfTYXp6Gl6vN8pfGhoawuTkJMbHxyVw5zrU1NRg586d6O7uRnJysug9NqikjVS/MzExUfo03HTTTcjIyIDNZoPT6ZTPVdcNAN577z0AwEsvvYRly5YhHA5LtpI1iOXl5QiFQnA4HHA6nYiPj8fAwICUGnk8HtFnql7kpWbdVXmN3aPJyUk888wzeOCBB/Dwww/jJz/5CXw+H+655x4888wzeP755/Hb3/4WSUlJEtSrVG/1uSjL8/PzUiOqJiPU19K2EMCrr6+Hz+eLaoIUexFcp31oa2uTIJW6ODZzC1yijB89ehSBQEBsMn1gnnE1aaICQszG8wyo8js+Po6UlBTxl7q6ujAwMACn04muri6xS+y0vWnTJuTl5aGhoQFpaWlSh+9yuWQuKXsDDA4OIj09HQaDQfwOt9uNnJwcTE1NYWpqClarFYODgzAajRgZGRFApb29XXR5OBwpz3jvvfcwMTGBgoICCez5uYFAQEbq0acKhyMle0ajEXfddZeMceFse+7j7OwsLBaL6L9wOIzHHnsMP/7xjzE7O4uJiQkptcjOzsa6deug0Whw8803Y/ny5dBoNMjOzkZWVpaM2iGrUu2XAuATwaBaE63KOX3I3t5e/OAHP8CDDz6IRx55BE8++STGx8fx2GOP4bnnnsO7776LJ598Eunp6ZicnIxiHcQyXYBLrFDqAcoSf87SQLXZ2We5vnAUX1Jj2ESCbdTZUAW4ZAh4eLhgaWlp+OCDD3D99ddjZmZGUCDysfn5RLWoiFXeNAWQSkRFzAFEGSDgk8N3gUuBLTs9so0+HXuj0Yjc3FxJ4xNNDwQCGBkZkY58VCrx8fFykDlzy2w2Y2xsDOnp6VGIC51PdZ4plYTKsVeFNy8vD62trcjIyEBGRoYEsu3t7QAg1B4qVRrWAwcO4Oqrr8b7778vnTgZwFdWVooD5/F4kJCQgMzMTOTn52NkZASjo6OS2eaeUHF8FmO90NprtVrs27cPBw8exNzcHLKysvDNb34TN954I6655hr5/OrqatTU1ACIKCKOOwgGI0Pv6ezyeRmM8n4tFot0HKUhpPP7P0V2WTP0ecpuamrq5y67VJCfp+wyo/h5yi5HWnxespuVlYXly5djz549WLVq1YL3zItUUgASzPT39wvVLikpSWQ7NlA3Go04efIkSktLo2hGauaNQCTfy67A/L0qA6rDqaL9dIDUDIF6H3Tc6YDwHjQajYzlKC0tRWtrKzSaSIaJ99jb24v09HTJPPJeOVt6ZGQEPp9P6qMI7JEpwyCM2Uw1+AsGg+ju7o4CCjQaDbKystDS0oKrrroKS5YsARCpPzt27Bg0Go3MsiVVkEGcz+fDXXfdhV/+8peYm5vDvn37sHLlSgwODuLs2bP40pe+hJSUFHR1dWHFihWw2+3IycmBxWJBc3OzjNMg6BsMBiXLyjWN3Wd1nVXgkQh8KBTC66+/jj/+8Y+YmZnB0qVLccstt2D58uUiF4FAQJrv8bPYWIbneGZmRgBPBqsMuNhBlxdnlo6NjSEUCiE/Px+hUEg66BOQBaKDVFJkWdPmdrtFbtkEhk4Xg0Vmq1Q2Cn8fq59VZ08NcBe61EwQEKEtE+i1WCyw2WzIyMjAyMiIrBc7BlNuS0pKBPzhe8nW0ul00imbupZ2RqvVYnR0FDabLSpLzHPOs7YQMGc2m7Fy5UpoNBq4XC40NTXJdwwMDCAjIwNerxfV1dUIBoM4cuQIXn/9dbz00ktwu93SUKu0tBSzs7PSxfo///M/UVxcjG9/+9vo6urC7t27hVZNh5iOPMsQ1Hv7tIu/43nUarXo7OzE9773PZGJbdu2YePGjUKfNJlMaGtrwzvvvCN1isxaETygLmLgGB8fLzKh6qm4uLioGZdut1u6Ci90kY7Jfa6oqMDMzIzU6pNBFitf9BlmZ2dx6NAhzM7OSgZSXQdVT1NWeRa5x7GsGSYh1KCI8rJQFpDAEV9H4In6xmg0YuXKlVJCQV9ufn5eqLzZ2dlISEgQkNBmsyEuLg4rVqxAZ2en6HYG1+wezmdjJ3EVAFsI5AAizanOnj0Lv98vzex0Oh2amprQ398PnU6Hvr4+GaVE9qDf70dVVZXQe61WKy5cuICenh4MDw/LeKOxsTEkJCTg3nvvxdzcHD788MP/j733DpOyPNuHz2fK7uzO9t7ZhS3URRGQ3kEBsQdjQ5BYYnk1ogkRy6sYDjSveT9NjNGfJvYoioiFJrAsvbMgyy6wsH22t5nd2TbzzO+P4by4Z4BVk8/vyHck93FwMLPTnue+r/u8r3peqKurw/79+5GWlibzRWepGvH+PvlWnca817q6Ojz77LOCXTfffDOuv/56LF68WJwXrFOvr68XEi3KNnUfOlbozOV6qgRUP2b8y0VQWddD0HO73XA6nbDZbDh16pREDf2Vbnp1AgICsHnzZlGcPR6PRLF4cNHS5yTSM0+FhQoz/7W3twvDnaZpsNlsqKmpwdSpU3H27Fm5nubmZhiNRsyZMwdJSUkIDAxEdXW1bOiamhokJydj+PDh0v9U13VJr+G9sG8UwUqlx1+0aBEuu+wytLe3o7OzE3V1deL5qaqqQm9vLzo7O5GamioEDfS8UCmz2+0+IEyPC1vlmM1mZGRkYMqUKSLE3333ndSssL4hOjoajzzyCIKDg4X175133sHatWtx/PhxvPDCC7jmmmvQv39/TJs2DZs2bcLtt9+OwYMHS/TipZdeQnh4OCwWi3iEOzo6fDaAGt3yH2p6IBUlphDbbDY89dRT4gVdsmQJioqKMHXqVCxZsgRPPvkklixZggkTJviktKoRAP6dil9ISIgwdBLUSNgAQBSg7OxsyQZoaWmR/nzqtaqRVMC7qTdu3OgD7sB5hYm9ygDf1BhVQeeaqvOher/4T/V2q9ehvuebb74BcL4G3GKxYMKECcJGzf3m8Xijo4zchYSESFZAaGgoBg0aBE3T8OijjyIxMREtLS1SX856wNraWng8Hkk/VCOEqlLqf7ixFpCeO8ou68sSExMlpa2rq0vut3///rj77rthMpmE+txgMGD9+vXo7u5GdXU1pk6dioyMDIwaNQpBQUEYP348EhMT4fF4UFxcjHnz5iE4OFhwg+mdrGVU0/z8ZZey5p8ix4gyZXfBggW477778PXXX8PhcGD8+PFYsmSJpE6mpqZKdoDHc57NlZF8OheIfXQ0UKHloUGGWGJGX0ONNiQmJkLXdeTk5MDlcsHhcIjxStlVsYbe1fLycrlWdW9TcePeNxgMaGxsRGhoqA+hBxVOzhtrW/kbNpsN1dXVcpiHhoZKJF7XdcyePRuJiYkoLi4WbzNbFlx22WUYN24cGhoaxCtOw7+kpETOFpL4ken96quvRkBAAP7rv/4LkZGRGD9+PNra2mAwGBAfH4/u7m5pq9LW1ib1eJQDzhfZFNUxdOhQn0Ne07wtdGbOnAld95K9MKWNGTq6rqOsrAxXX301QkJCpH91RUUFtm3bhpkzZ+LAgQPIzMyE1WrF0KFD4Xa7MWXKFGRmZiIkJATt7e14/vnnERYWJimbNAZJRkcsuZjiyfvzlwfitNFoxPHjx/Hiiy9Kndazzz6LlpYWzJs3D0uXLsXjjz+OZcuWYdasWT5GIKNGjHjz2qKjo2EymcSQBs63kOF+CAoKgt1uR1NTE+rr68VoUGVcfW4wGHDo0CEfngpVRjn8U3zp1PB3mtA4UKN5nEPV0FOdMszeMBq9nAyMnPJvubm5iImJQVxcnGRncc7r6urkOgMCvD18qfjruo7HHnsMo0aNgtPpRFdXl9Qgtre3o7q62occSTVK6FxhFoV/VIVYxrmKjY3F+PHj5T0X64e8bNkyn/UEgNWrV+PXv/418vPzsXr1ammXwnT3nJwcDBo0CB0dHbDb7aJXsBSBdZ00xKkD/hi9gk7Tnp4evP3227jjjjuwePFivPLKKygsLEQ184AAAAAgAElEQVRWVhYeeOABLFu2DEuXLsXIkSNljmisqs4x4rWu64JtZDklKVBQUBCqq6vxwQcfXHRvcT1p8KekpIhjmQZXQ0PDRZ1J/BuxhrWbvC6+R3WK+Duc/A1UnsNqFpE6x9Qn1Wuhjsfx5Zdfyt94lk6fPh2VlZVIS0uTTBs6XRiJVyN4ERERSExMRHd3NyZNmoTc3FzMmDFDnJGU/dbWVtTV1cl10BnnLws1NTUXdbaWlZXJvvV4PBg0aJDodlxL1UH+9NNPSySduPn555/jk08+QVpaGg4cOCAYd+WVV+Ldd9+VzIOkpCSkpqZixYoVCA0NRUxMDEJDQyXLg0RzXFfVEPW/H3X9VL2U+sfnn3+Ou+66CwsWLMDDDz+M/fv3IyUlBXfffTd++9vf4tlnn8XVV18tOgbPJbPZLFltxAq2iVKJSn/o+JeLoLa1tYkXg73g+vXrJ4or+/tERUVJ2oRq3FGw6fFVa0iYCmE2m5GTk4Oqqio0NTVdsMlU0GVYX1Ui1M2qGsv8Dh6cHo8H3333nXwHFaE9e/aIwUjPj9vtRn19vU8dCu8lKChI2ti0tbUhLi4Ov/zlL2E2e5ukt7W1obe3V5SglpYWxMbGikFJsOd9Hj16FFOnTpX7NBgM4qGn4UGjjJsyIyMDuq7L31wuL5U42eZOnTol3miTyYQJEybg7NmzAkia5q3DmjFjBsxmM2bPno3169dLC6DQ0FBkZWWhsLBQDiqXyyW1UfT4qmvNOVKB0n9DcgNqmjfFY8WKFXJNc+fOxfz58zFjxgxMmzYNgYGB6OrqwoYNG3D48GEYDAZR6nVdl5oeOjaYMkZPHmWDNSWtra0ip4GBgdITketLJwxlyWA43+aA165GoqgUx8TE4NixY7IOqsJEsPE3XlX55GP/+VIN1127don8cS8kJiZi7dq14omkgk0DAoBEcVgDZTR6ez+ePHkSHo8Hra2teOyxx4QpmpFHq9UKh8OB9vZ2DBkyBE6nEx0dHbL2vCceUqoBy3Rb9X05OTk4cOCAj/zRsLnyyisBePvdMUpQXl6OpUuXoqurC9HR0Xj//ffx2GOPwWj0NrrfsGEDpk+fjqqqKgQGBmLKlCkYOXIkCgoK0NPTg6amJmlYDsAn2kSjSB19RZxUA5CfW7VqFVatWiXG2P33348rrrgCd999t8hkVVUVvvrqK1RVVYknXHXEke2R+GQwGKSNBvvDfV+KL+Bl+Xa5XMjNzYWue3sY6ro3Hb2zsxOtra1wOp3S+ovY6W+s06hUjRbuV4vFguzsbAQFBeHo0aNSH8Q9oaYQq/it/t7FDkPihdonz2AwwOFwCMadOXMGAwcOhM1mE4OHKeGRkZE+UZrHH38cO3bswPDhw7Fr1y7ZTwaDtw3Wo48+iuPHj6OrqwtFRUUIDg6WNhZsRUCuATpbdF3Ht99+C6PRiAEDBqBfv34iB6WlpUhLS/ORGRoOqtPJarXi2WeflXqo+fPn46233kJ3dzf++te/yvft3LkTkyZNQn19PRISEvDyyy/j17/+NZqamjB+/HgUFBRIGjKZWnft2oXFixcjLy9PHEhkiKQzS72+i0Vg/PcB19BkMgmxh2qk3XjjjZKarLIIM8rKMgmeT/7nxH/0in9PvcJsNmPgwIE/qV7Bzgo/lV7h8XjQv3//S+Ix01IBSI/e1NRUtLe3C86Xl5cjODjYh9hRrc2n0dTR0QGTyYSamhpkZGSgpaUFERERUltMJ11tba3IDwMeVqtVHJxsGUkOCLvdjoSEBEycOBHvvfceMjIyMHDgQBQUFGDs2LEICwvDli1bhGySZ1Ntba20PiwrKxMS1bFjx8qeokGoppYyqj1u3Dhs374ds2bNwqZNm/Doo4/C4/EgPz8fLS0tMJu9TPS9vb1obm6G1WoVZxVLelwuFwoLC1FaWoorr7xSDO6pU6di48aN0vqRchEUFCTZTCyPYtmSruuoqKjABx98gNtuuw02mw0OhwNJSUn4+uuvMWfOHBQVFeHMmTPIzs7GsGHD4HK5kJ2djdbWViQlJaGrqwt33303Vq9ejX79+qGyshL19fWIjo6GpmkSGGGaMtP7VYeB6qjw109UmTcavXw+f/rTn/DnP/8ZLpcL8fHxePTRR3HllVdi+PDhoreePXsWH3/8Mex2u8wdMwlCQkJExn7M+JeLoFLxuOyyywTA6+rqkJ6eLs3W6+vrpdktJ1cVEKYI0OPFQ5NeKnqZU1JSMGrUKFF+1EW62AL6L5waGeBnCMzqoeNyuZCeng673S4Rnptvvhmpqamora2F0WgUZZ29/TRNw/Dhw7F+/Xp88sknMBq9dR1NTU2Sa6/rupAcXX/99XIIJyQkSHpTfX092tra4HQ6xcvW1dWFjRs3YvPmzWhsbITJZEJWVtYFXmQqj6pSQYVu6dKlMBqNaGxsxMKFC32MNLfbLb1gqZgRZIOCgqRuIzU1VRTRwMBAaRMREBCA2NhYGI1GScsirbu/x1mdd3+vHof/PRHU1q1bh8WLF2PRokV4/vnn0dDQIJGQZcuWYeXKlVi4cKFEBDVNEwWfBykj3DQqe3t7kZ2dLWseEhIi9aMdHR2oqamRGlIqN7w+pukwnZMkKlRQYmJiJJUzNzdXwIv3SzBVlT7es+opUwfnUPWiUtll5D01NRVut5c0JjU1FS+//DI8Ho9Eh5qamuTQDQwMRGhoKB544AGsWrUKX3/9Naqrq2VPMEoVEREhdSXs92Y0GqXWpr29HU1NTejo6IDD4ZA9WlRUhG+//RY7d+6U/R8SEoLi4mKf+4qJibngHnlv8+bNEyCPj49HQECAtAShkhAeHo4PPvhAFJVdu3ahp6dH6tH27NkjtYuNjY2wWq2w2+147rnnpD6SxgY99/4eTTVK5m+c8rpVeabi53Q68Yc//AF33nknFi1ahA8//FDSghctWoRnnnkGv/vd7zBnzhyJOlN2OY/8TTYWB7zRDLKd9jVUpZuYQ4UvMjJS0gTPnj0rpEsXc54AEOOHxjRf53sCAgIwaNAgBAcH+9TWUTmmYuJ2uyVioOu6pIpPnjwZp0+fFlmpra0F4I2879y500de6urqUFxcjPDwcCxatAjLly/H3/72N/zlL3/xUaCjoqLESAkKCkJ5eTmuu+467N69G0lJSdi9ezcWLVqE1atXAwByc3MRFBSEBQsW4P7770d8fDycTidcLpfwA3R2dkpfQZLU9Pb24uTJk9i8eTM2b96MYcOGoaioyEdR1jQN06ZNk7lknU9nZyeKiooAQNJ8yd5Ow+qzzz7DmDFjsGLFCuzcuRN2ux1TpkxBa2sr5syZg8DAQFx++eX4n//5HzzwwAMYMWIEmpubkZWVhZycHNx8882Ii4uTWkDiDw1WVdbVNb9UxMofp3muezwefPHFF/jlL3+JhQsXYvHixVi9ejU8Hg9mzJiBX//611iyZAmeffZZXHXVVbK+KrHMf/SKf0+9wm63/+R6Bc/Wn0qv0HVdSBgvNlgDm5OTA4/HI9l01D00TUNHRweam5ulZ6pqtFDuAF9GdDU1l3W9brcbAwYMkHIdRtyIwWoWgP9Q9RE+V889yjTfFxISIgZWeno6li9fjvz8fDzxxBPweLzkQ9RVeC7ceOONuO+++7Bq1So8/PDDcqbruo76+nrZdx6PN9q5YMECyUxjlLO7uxutra2if1DmOzo6kJeXh82bN6OsrEzk2L++l63zeK+8v9jYWDGerVYr+vfvD7fbjaysLISGhkqt5zfffIODBw8C8BJDffrpp+IkjYuLw6pVqxAREYHo6GhpB9XR0YGnnnoKKSkpPv2tPR4vJwMdHfybisGqg1hdK76P98nvqK2txZNPPomFCxfiF7/4BfLy8tDR0YHU1FQsWbIEzz//PJ588kmMHTtWdBZi7sUc832Nf7kIKhVxALDZbLBYLEJ3rdb1OBwONDQ0COCog0orPeQ8CKhsMqoQHBws+fvZ2dmyedWNBlzYs1NlF/P3PACQ9CrVU0rveW9vLxYuXAir1Yrrr78ecXFxWLp0qdRZ/eY3v8Err7wCo9GIpUuXYu7cuVizZg0qKysxZMgQlJaWIjIyEsePH5c0U/675557YDR6+xyxQBvwCiBJEZKSkqSeymAw4OjRo/juu+8AQCJ/VMQAb41ZW1ubgBE91cXFxULUMXbsWEkhIztkUVERLrvsMqxYsQKtra3o378/MjMzsW/fPmkWPHDgQOzatQtTp07F7t27xZB58skn4XQ68eabbyImJka8zqrS4Ha7JR3jYqmt6vBX6FTw4KYpKyvDkiVLBIwnTpyIu+66CykpKXjooYdkjsvKyvDll1/6FM6Xl5fLHI4YMUJSRc6cOYPY2FjZ5PHx8WhtbZUWF5GRkT6yy3vigdzZ2SmHFGWRKeiBgYHo6emRdLa6ujohEVI98TxkKf/qYaQqQxw00OiN7O7uxokTJzB48GCMHDkSubm5iIiIwF//+le0t7fjtddeE2/w22+/jaeffhoejwctLS148cUX8fDDDwujXVdXF1JSUnx+S9M0jBs3DqNHj0ZbWxvWrFmD6OhoMXJo2ANechuuV3d3N7Zt2yaKY2VlJYYOHSp7k+9T94dK8sDI0o033ojXXnsNra2tYjSXlJRg8ODBWL16NZqamjBq1CgxMnTdm86q6zrCwsIwZcoUvPvuu4JN9Py73W5kZGTgyJEjUmuk67rU9qqRHsqwvwNBPUh4CBLweY+AN4rEuTCZTLjjjjswdepUXHHFFbjiiitgMHjrRA4dOoSdO3f6RDKOHj2KU6dOSRq0x+PBihUr0NdwuVy47LLL4PF4EBcXh6amJqm9bmhoEFlXU+tUUiPKHtPtVEWQ9xYUFCTRIqvVKn3skpOTfSIo6t5XCdfUeVLnU51jYhwNKypEgYGBePrpp5Gbm4uhQ4dKC5gPPvgAjz76KOrq6jBr1iyUl5fj9OnTiIqKQnl5OUJDQxEZGYnS0lJR8Ig3LpcLtbW1iIyMlJri2NhYlJWVCWEU0/oaGhqQmJgoUTzK9PHjx6FpXkKxCRMmyP2FhYX53HdwcDC6u7uxdu1aPP7446KgDho0CHv37oWmaYiKikJbWxvcbi+LcEVFheDBO++8A4PBgOzsbEkhM5lMGDBgAPbs2QOj0Uu0c/nll+Orr75CUlKS9Ndkf1dGx7kmdLoBkHVTFRY1WqjKPveHOpeapmHdunXYuHEjent7kZGRgQcffBAJCQnIzc3F8OHDYTKZ4HA48Prrr8te+Y9e8e+nV5SWlkp680+lVxC/fiq9oq6u7qJOHY6WlhYkJycL5sTFxQkTsNvtRlNTk+Av54K6h+ooZBYSMUc1NBkBY4mcruuIj49HWloa9u3bh5KSEsTFxaG8vBy6rku/a84NjciioiJhimXZBlN5mbbKuWJ9ZXd3N4qKipCYmIgTJ07giiuuwIABA+B2u3Hw4EG4XC6sWbMGjzzyCLZs2YInn3wS99xzDx588EF88803Uj6WnZ0t5+z48eOxZ88ehISEYNGiRTAajfjoo48QFhaGmJgYdHd3C08ASf3UFO2SkhKcPXsWZrMZ69atw7XXXivYRGJLdX4DAwOF+BXwkka98soruPrqq1FXV4eEhAQYDAYUFRVh+vTpWLVqFTZt2oTZs2eL3nHs2DHk5ORICQ2dcSdOnEBGRgbMZjOysrKgaV6G7KqqqguyWshH4c8K7S/DKh5fTDehjAPec5HOfGa7XHvttZg2bRqmT58ucnT06FGsX7/+knJ8sfEvF0E1mUxS55Weni4TyQhOfX09IiIixFvucDguAHpd1yXlSFVSVS8YDxVGfXRdx8SJE0Vx5Hs4VGY7ta6EQ/W2qoLJQyciIkIE49NPP8XRo0dhs9lE2N577z0EBgbi97//Paacq/1saWlBZmYmysvLYbVaBSSYMsF7pUFAb1xGRgaGDRsGs9mM5ORkhISEICgoCBEREWhsbBRPO4WMhwYA5Ofny+YymUxISUkRQVQVwgMHDojwdnV1oX///uLFCg0NlegGQ/pUMvLz8/HOO+9IWkp+fj5cLheSkpIk1aa0tFRqqpjLz+iOruviDWW9DL+f68DrUGXC/5+qKADnayPoSdu3bx/uuecePPfcc2hubhYPUFpaGh599FE8/vjj6OnpwRtvvIH8/HyJFLlcLtTU1Eg6nNFoRGtrqxwYVqtVPHUq+REAMSTUGg3OuyrDlK+IiAh0dXUhNDQUo0aN8lkjVcZVb9rFvMLqXNGIZZQqICBAvLBHjx7F5s2bUVxcjJaWFgHw0aNHw2QySSsJj8eDUaNGobCwEJs2bZJatfb2dsTFxfkYBtwfTEvjWgBAVFQULBYLrFarkPtQiaDcql5g1oOrYKvKBQ8+rlVXVxfGjBkDk8kkbKxms1lSAAMCAqQvWHx8POrq6pCXl4euri6kp6fDZrMhJSVFFDaLxYLQ0FAxZpubm2GxWISwicYPo6DqvruUZ1HFEfXQ8FfuuaddLhf+9re/4bbbbsPHH38s77dYLBg/fjyeeuop3HrrrTh48CDeeustFBYWClb6K2CXGuQBIIEW5ZkkHapyp7Kyq2vX3d0tXnxd1+XwJs52dnZKLTmNaYvFgs7OTvTr1w8ej7f+lVGqsrIykVP+vtHobbWyb98+AMDgwYNx+vRpmceEhATEx8cjJydHZITfFxQUhF27duHIkSPYv38/wsLCsGfPHixbtgyhoaE4duwY5syZg7y8PNx66604dOgQbrnlFlHidF3HNddcg66uLhgMBlHe+Nvjxo3DiRMnpBa4tbUVKSkpCA8PR1hYmBCl1NbWSh9nOilaW1uxZcsW7Ny5U4wflRHVYDhPIsTzs76+HnfccYdwPBDr+vfvj7CwMEREROCrr76SKGNCQgJqamoQEBCA8ePH46OPPkJ6ejqGDRsGXdfx/vvvo6enRwhIGhsbRbF74IEHxBik8c8ejdyHxDResz9eq7LuH32jjNGRV15ejieeeAK33347XnrpJakDV9PJ3G4vUznT/MlyTUO2oaEBUVFRiI6OhsPh8OFpUI1Qtf0O6yd5lql7KDAwUHSUCRMmIC0tDRaLxSett7S0VDAPgDBGFxUVicwOHDhQyiM0TcOOHTsQGxsrqcgRERGSqvr2229j27ZtKC4uxpkzZxAYGIhXX30VRqMRL730EtasWQOPx4O1a9di5cqV2LBhA26++WYh8PN4PNi7d6/Ml8ViwaFDh2R9brnlFsTFxSEoKAgxMTGIjY2VyCUxnmeCqldomoa1a9cKa7jRaMSECRN80vSZgrx9+3bZu83NzfjTn/4k/A+sYYuIiMC4ceNgMBiwdetWuFwuHDx4UF43Go1CHjZt2jRhzl2xYoWQ49DBCECiYUxL7OzsFI4EzgtlQB196RIqNnMuNE3D3r17ce+99+LBBx/EoUOHxKhMS0vDww8/jGeffRZjxozBJ598gry8PJGx7xsDBgwQmaysrJQMAtZMR0dHIyAgAJGRkejq6hKZVQ0PzpMa9fePrqmRVtbijxkzxkeOVUcSh6p38D0c6ndyn3Gfsha2paUFb731Fl5//XVs3bpV1of1yPPnzxcnGHWt48ePC2GV3W5HTEyMZI/Q0AbOZ7Ixvbm9vV1S2y0Wi7Rc4dwwS4R2gNlsxvr161FRUeHjDFfvnb/BTC86IsmizzN64cKFkp1DTpvg4GCcPn0aJSUlUjqzb98+WCwWcfhTdvv374+zZ8+KLuJyuXDq1ClxzLMEiRipDn8c5ugr6qlmuRCbv/jiC9xxxx14/fXXhUSwq6sLgwYNwtKlSy/5XRcb/3IGKglVnE4nysvLpYaroaFBjFYuKNtBqJPNSaIniBuFhxoVcCp8jBQFBQWht7cXo0aNEk+JqrD5byg1JQTw9XrS+DAajfjZz34mv0WFuqGhAYMGDfKJrFx++eV48cUXERUVhaamJvT09EjE6NChQ3IoAV6hUFuH8DlHVlYWoqKioGkaxo4dK2l3ZFilol5bWytRKobvHQ4HtmzZgk2bNqG0tBTp6ek+PVU5n3zsdrtlQ/C+NU3D888/D4vFgtzcXJhMJmzfvl02YUVFhRBDkZEtPT1d0kbef/99dHR0SOPqpqYmREdHo66uDs8//zwuu+wyyXunwd3R0SFtK1RAVTeXepCoSr5/Kpdq/JSWluI3v/kNFi5ciPvvvx/vvfee5NL/93//N0pLS1FeXo6PPvpI1j4pKUkUXbW9QWRkpE9KDJVw/q6mne/ZS2Vd9exSxijXACSaCngPKX6ew984Ve/fP2rBvRAQEID4+HhkZGQI0DLNhWtFoigA+M1vfoOXX34Zu3btwooVK+DxeMQb63Q6pR6opaUFKSkpEoFgOq/as5he1MTERCEcIDECPfF2ux319fWiZNMoLysrw5YtW7B161YBcoIuPdNOpxNffPGF7Lv8/HzExcXJPISHh2P48OHQdW8zcjUF7LXXXkNbWxsiIiIQERGB999/H7quSzNxj8dbG5aZmYnGxkYhxmLd8LRp05CZmSn1MWwn0N7e7mOMUVb9lXQ1oqO+rqZiqZ7NTZs24f7778fChQvx1FNPidKbmJiIL7/8EuXl5SgpKcHjjz8uit0PpYE3GAwICQmBzWYTpZsEYQMGDJDUP0aceOhT/gwGgxil/F1GhiiLfJ+6d8PCwsRQjYyMlHQq/zobfyeF+pj3SnlyuVyYOXMmAMhe4tweOnQIn3/+Odra2tDR0YEhQ4bgs88+Q29vL9577z2p32Otkc1mk/0aHR0tDNaJiYmigAFeBYVGaVNTE2699VacPXsWJpNJmJHDwsIQHh4OXfemoxJzuf+dTicKCgqwadMmnxp4MoZ2dXXh+PHjIh8ffvghwsLCpCXV4sWLpTawX79+kgIfFhaG48eP48iRIxJZOX36NHp7e/HXv/5Vzlq73Y45c+bIOtXV1SEuLg4JCQnIyMhASkqK1BFbLBaJqhKrOfyNUDrY/OWd965imPq/pmk4efIkfvWrX2Hx4sV44IEH5PODBw+GxWJBbW0tTCYT4uLioGneXrbd3d1ISEjwOQcNBoPUWlNh1nXdB19JnEXZZWq2iq1UCFkPSmZW4Hw/WQ7+xqX2G89X4jONfMp9Y2MjBgwYIOdPSEgIGhoa8Mgjj2DYsGGYN28ecnJyRF4dDgdiY2PR2Njoc72U0/j4+AtqV6uqqtDd3Y2ZM2di0KBBklrMeSFjtdoCh/Oxb98+bNq0CXv37pX0dupF/CwjqtRp8vPzYTab0dLSItHy1NRUcV6ytm/YsGFYunQp8vLyYDAYMHLkSDQ2NiI9PR0pKSly/gUGBuLWW2/FmTNnJDgQHx+P559/Xpz5jFSzXVR7e7tPVpL6vyqbqjyrcq2WNvCx0+nE66+/Lv0nn3rqKSGpnDx5Mo4cOYIzZ85IfS33d1/DYDAgMTFRag9VhnAaVJSLtrY2qQ9V9WL+Hx4eLplgPGuYGcDnmqaJw91qtSIyMhLDhg2T85jnHOeipqYGZWVlmDp1KoqLi+V3ybA7c+ZMcU7Mnz9frr2+vl56NA8YMAAulwvl5eWSKvvGG28gNTUVJ0+ehK57CSqzs7MlQDN58mSUlpYiIyMDX331lc9asJ+0pmlYsGABsrOzERYWhnnz5mHatGlwOp1ITEwUO4QkpLW1tdLuhWt88uRJfPvtt9i0aRNGjhwp2QQ1NTWip3/yyScyxw6HA4MHDxYjlWdbVFQUrr76ahiNRnzwwQfo7e3F4cOH5Xx0Op0oKiqSmmw6/Z988kmJarNel7i7bNkyxMXFYfDgwXLv1DF5VquOBX+9mTaMqneoe0LFcGaJHThwAA888ADuuece3HvvvXjllVd+UAmRj0z/qHf/fzDcbi9rb3h4uBTw0wondTxTh1gwbLPZRJmn8kFwpDGmTioNYAoNJ5oHKQBcfvnlGD58OEJCQlBVVSXKOAChgD5x4oT0McrJyZGceLfbjR07diAlJQVHjhzBnDlzfGqtDAYDXnnlFaxYsQK/+tWv0N3djaeeegoZGRn4+OOPpWXAsmXLcNVVV8HlcmHu3LmSXkVh5sExZswYMYgBb7SWPcXq6+sxfvx4NDQ0YPr06UI9HxISIr2Venp6YLPZRHEhiJaUlGDTpk0S9QkJCUFra6u0tdiyZYukg2iat8cmU/zy8vJwxRVXSLomUz5GjhyJsLAwfPbZZ+jp6cGECROkpvHIkSMCxIWFhXjmmWfk4Orp6UFWVhY8Hg+ysrKQkJCAzMxMXH755QKErFdxOp2ySfwVfeA8GZD6nMYfN5uaokGlv6enB9u2bcMvfvEL/OIXv8DixYvx0ksv4c033xQHgcfjEZpxKg0ejwdJSUlSzxAcHCzZAAR8grpaI6geGKpiphrT/J91KwEBARgxYgQyMjJ8Dkb+r0aYaBxwPhipooNm1KhRPql1AQEBOHHiBFauXInly5fjwQcflNf69++PJ554QtJlampqMGjQIADe7IOIiAg0NzdLFB8A4uLixLDhtaWnpyMtLQ3Nzc1ISkrCjBkzMGjQIGkSz4OSbMpNTU3yfTQMAGDPnj2oq6uTPUvCG5PJhDNnzsjvnThxQmrQeK9UAocMGSJpdHQw8HdUz/nmzZvlc6tWrRJWUTrRgoKCcN9992Hy5Mlwu91ITU1FRkYGgoKCEBAQIGRSLpdLPNv+SjuHWqfJa/CXb1V+VFbgN998EwsXLsSiRYuwbNkyvPHGG1i1ahVyc3Oxdu1aHDp0SGpfvm8Qc5kSzr7TdALQ8ULFtaWlBadPn5b2AYyIUDmtr68XoxXwKuts2M57UuU9LCwMwcHBMJvNGDx4MOLj4yWVifNhs9mktYbZ7O1rSoZBAJg6dSra29vR1dUlyjgVEO4JRlNuuOEGnD59Gvv370dFRQX+/Oc/Y9OmTbBYLKisrBRCmYKCAgwePBg7duyAyWTCt99+C8CbDbTp21IAACAASURBVMA0LcCLORMnTkRJSYk4pm6//XakpKRg0qRJmDlzJkwmE4KDgxEVFYWIiAhomibOLqfTKQoonVjEL+4HXdeFtELTNNx///2SCnj99dfjiy++gMlkwpVXXonBgwcjJCQEQ4cOlb1vMBiwceNGfPPNN0hPT8dnn32GpKQkSb1+5ZVX4HK5MGvWLMEwXdexefNmzJgxQ4zjpKQk2etkPGfqKR14/g5fNb1SlW3uP3/HG+VDxWtm1/D7nE4n0tPT5axkXR0zINirNjU1FS0tLWhsbPTRHahL8LpoyPD8U9sq8NrohGVdWHZ2NkaMGCFdAVSDltFF9Vzi/fAxz0OPx4PrrrtO9Ao61z7++GMsX74cy5cvR1VVFY4dO4bJkyfjD3/4AyZPnoxjx46hsrJSWEmpQKs6Fe83Li5Oronz3tnZCZPJhOrqaiQlJaG7uxvjxo2TezSZTAgLCxMSKZvNJg5JrlN7ezu2bt0q+KzrurCr9vT0YMuWLT74TMc1086tVivcbrekn9PJHRwcLIZDYGAg3nnnHTQ3N2Pz5s1yrh46dAiDBg2Se6djCfBmNYSGhiIzMxPp6enixCKbMDO21MCHKps8T9Xn6lDllI8pizabDb/97W+xYMEC3HvvvXjppZfwzjvvYPv27bj33nuxbdu2iwPxucFU9YqKCskCJDN+SkoKHA6HD0Eee9jTGaBmJqh7TM3eUq9f1S3oWKb8Dx8+HElJSTI3/o5X1UlJ5w/xlsEnAOI85Oc7Ozuxd+9efPnllzhz5gx0Xcf+/fthtVqxbNky/Nd//ReCg4Oxb98+JCUlweVyiQOO+hODGMB5pmQOq9Uqtb6VlZVy71OmTBF5oBHJqGpzczM6OjqEh4E4dvjwYdmn0dHRkpHo8ZzP/GBmCbNoGGjJzMz0YcPVdR0ZGRkIDAzE1q1bERQUhGHDhmHPnj3Iz89HZWUlgPMZaXfffTcMBoMY0Iyus8tAfHw8MjMzBbu4f7q7u+UzF8sW8C8JUw1ZVW9WSxXU/fHdd9/hoYce6lOO/ce/nIHa2NiIkJAQ1NbWSv0di5ujoqLgcDjgdDoRFxeHxsZGtLe3C6EKNx43HQWTyjeFlBPGyA3gS4VPdjJN05CQkICkpKQLvPCkuqcQ0/PD99HAYSSJLHM06AgAjMbW1tZi5cqVKC4uxvLly5GYmIi2tjasW7dOlJCmpia4XN62JWlpaRLVYtsXGjmMhEVHR6OyshJGoxFZWVnQdR3z5s3DlClTJHUgNjZWmCV57bW1tbLhSO5CQKqsrJR52LlzpwjlpEmTYLVakZGRgauuugpHjx6Vz/C3WcRPcoD9+/dj1apVcDgcOHjwoLDIMU2Ayr0KIj09PRgyZIjUV4SEhADwGrS8NtY1UNlklJiD16+2bOC9q8BLmVABXI1o0mu2efNmfPLJJwCA5uZmqa0gkyPbADHFlYoZ+2wS4LjO/h5lbnT1evzBAfACAturmM1mDBkyxMeDzc+qyp/63fxNyi3grXtivaGqFKry/vjjj2PPnj0YOnQovvzySwQGBmLdunVS52O1WhEbGwun0ykMgZTbnp4eVFVVyR4aMGCAeIBpFJ4+fRpXXnklZs+eLWuQmJiI4OBgSSenEUSQ9Zdb9oLj99IRNWnSJFE0r732Wom0pKWlSXr7o48+Cl3XMWjQIBgMBnz88cdYtWoVWltbfeSW8xkZGYn77rvPZ67feustdHV1oby8XNYjMjISTqcTlZWVoiQxk8PpdEoKrTr8I0uUBTXNhu9TU2opt8Qtm82G3bt344svvsBf/vIX/P73v8djjz2Ghx9+2B+SLzoyMjKkfySxyWq1iiOmt7cXKSkpaG9vF6baiIgIlJWVobu7W95DGaZxqWa20JAl5l7scLRarfJZs9ksjLd83T9FTZ0/VT4Ar1I2f/58afHV29sLq9WKwMBAYTD88MMP8e2332LJkiUoLS2F1WqV2lLA6xwkWQtTqrjHTCaTpIzToVdfXw+XyyXpj1VVVfJ+KjI5OTkwGAxCRsSsAYfDgerqajH6GUGPiYnxYXf9/PPP4fF4JGLfv39/7NixA7NnzxaMouMnMDAQzc3NOHbsGIKCguBwOBAUFIQzZ86IY4c1UbruJR5huibncPv27TAYDLj77rvR1NSEo0eP4uTJk2IMvfjii5IFQzbZ3t5e2btcJyqn/soSnWrqnlAfq2vN8R+94t9Tr1i5cuX/7/WKV199FS+88AL6GnV1dQgLC0NUVJTsT8DLVK+eA2zhRicTWyzx+hgAInkeM0qoH5DEzX9/AefTfhntpDOW19LU1ASLxYITJ05g9+7d0DQNQ4cOFaOwt7cXe/bsQWpqKg4cOIAbb7wR06ZNw7Rp02T+2bru8OHDaG1txerVq/HRRx+hqakJQ4YMwcKFC/HJJ5+goaEBmuZl81+9ejXcbjcqKip8otHjx48XY557p76+HmlpaYJ1o0ePRk9PDwYPHixZNtwDMTExiIyMhMfjkdIJZvSpDje3241jx47BYPCSrD333HPy+p133ol+/fph9OjRuOqqq/D+++/LfU6dOhWxsbEYO3YsTCaTnCvbtm3D119/jfXr12Pjxo1oamoS59pzzz0HTdOQlZUl2VAdHR0oKSnBgw8+KKUPra2tCA0NRW1tLb777juMHz9esJ2ZGiSnVPUN4p763N9Zrjo3KNvqvx8z/uUM1KSkJNhsNlitVjnQPR4PUlNTxTNPtjmmxyUnJ6OtrU2Eo729XQ4PbjxVuSZY0ePjv9E4kUxdYAPj7Oxs8QJzYdRBJYFeDx40pJm+9tprRXjV1E56jU6fPo0nnngCpaWleOGFF0Rp4PWwhrGpqUkOGuA8bXxra6tcS2JiIlJTU4ViPisrSzzrPPAmTZokbH2pqalCjhASEiK9IHmY8DNRUVGi5NE7rCpFkyZNgq7ruPLKKwUoIyMjkZaWhpCQEKnhMxqNojTouo6vv/5aUnA4d2S+5Jp0dHTgj3/8o3g99+/fj+PHj8PpdIph8fTTT2Ps2LFSj8D0cCp0VDJoCHINuLkoN6pM8G/+h47/ZwFIo+iqqip0dnYiJSVFjNDU1FS0tbWhq6tL1tBut6O5uRnd3d1yaFAu2WOSqd1syE5PHOfF/7HR6O37yPYVpEFnPeCwYcNw8uRJuN3eum7WFBoMBhQUFKCiokLqMEePHo2srCxRVhkpo9fUYPCy3r388svSeoAHEyOeBoO3+D85ORl2ux2DBg2SKGtgYCCys7NFwSsqKpLr1HVvbWL//v3lEDEYDLjqqquQm5srabyUW3pcbTabOAG4B5kdwH3L9OBTp07B7XZjwYIFcLlcMtclJSVoaWnBU089hd7eXuTn5wvBDA9JwEsQoB5wBGbWDRJ/aMBGR0ejoKAAx48fl/t/+umn8dhjj2HcuHESTTWZTJJiyKiqqsRwqM4Jf/lU5VZVctXP8v09PT3o6urCpk2b8EOGzWYTOWX6NLkD2POPDoOuri7ExcWhpaUFwcHBcDgcPr/vdrulBpgRCtbhqHvL30miem1pqBoMBgwcOBADBw6E3W6H1WrFrFmzUFhYKDJOdtve3l7s27cPRqO3TjwhIQGBgYEYPnw4AEiqLR1IVDw//vhjzJ49G19//TVCQkJw+vRpiRb+/Oc/l0yaPXv24NZbb0VJSQmMRiMmTpwoawt4Fbrc3FxMmTIF27dvR09PD8LDw8VJNGvWLBgM3rr06667DpMnT0ZcXJzUAbJ2lM6uqqoqAN5z6OjRoxIB5loZjUZMnz4dAKQedsOGDT5RuHXr1mHu3Lni7AKAG2+8UdLbIiMj0dzcLE60N954A263G/369ZNoUHx8PN566y0MHToUVVVV0ue6vb0dWVlZ6OzsxDXXXCOkRhaLRRRh7mHK48WUGhpzqvHq78DwP9P/o1f8e+oVjGr9lHoFyxd+Kr1CXddLDbZp4xxXVlYiPj4eoaGhUmYQExMjfBhkXGcaM/k+KIM0QukooUPB4/FcwCviP4jHrLkm67eaJgpcmNlFBxsz4SwWC5xOp7TGYeS8o6NDIv6apmHr1q1YuXIldu/eLX3kOzs7Ybfbhf8gOjoaNpsNycnJ4lzhPi4rK5PryM7ORlpamhhiOTk5UsOpad4sqczMTEyZMgVTp07FhAkTJB2amQNOpxNtbW0ShQWA9HMRWNYG03gkV8v69etx9uxZXHHFFRLBPHDggMjVV199Jdl3N9xwg4+jQ9M0KbHzeDySDUlHSnh4OP7+979D0zSMGTMGp0+flv6vycnJWLRoEWbNmoX4+HgMGjRICOJYp8u9qvJIqEOVabUkh69dymn4Q8YPNlA1TTNqmnZE07Svzz3P0DRtn6ZpJZqmfaJpWsC5vweee15y7vX0H3NBNptNCIUaGxuh6zqysrIkhSEhIQGtra3o7OwU9jK2eVB7k9ETTS8sJ6myslKavF9ssrhRVNDgActC38GDB/scTGrKgrowqieUG9xg8BJmEDAZ5tc0b3G3pmn4P//n/+CNN96Aruuorq5GZWWlhOBZoxoWFiY1CwQKm80m95CVlSVtGdxubz0U54RCRjKiuXPnYuLEiVJDRlp6KusM+wNeb7Na38v+ZIBX4Vu/fj3279+PkJAQlJWVISEhAfv27ZM6Pv621WrF8uXLfZRpj8cjc9Db24sNGzbg1VdfhdPpRFRUFHJycqTX3cKFCyVaSK/znXfeCYPBgDNnziAuLg4ZGRkIDw9HUlKSeHZ5nSR1UAGT66bKARVgVV78DxpVifo+ubXb7X3KLVn2LiW3qqf+p5JbAAL+l5JbGtSXklumV15KbgFvCllfcmswGPqU25tuuqlPuW1ra5P7uZTcxsfH9ym3n376aZ9yy8hZX3KbkpLSp9ympqb2KbdkbO5Lbv3lwF9u/aPu/nKrrv0PGaGhodIDr729XVo/0KgOCQmB0+lEc3Mz4uPjYbfbhdCFCijxizKoRolqamqwbds2n3YH6lD3ojoPjAJ2dXUhMzNTXuN9kl3V37AnKQfniXhxyy23YMyYMT6kQ21tbcjPz0dZWRlSUlJgNHrJmE6cOAGj0SjyTYcN+RNoFDCt32g0Ij09XXqqBgQEIDMzUwwEq9UKTdNQUVEh8zNu3DhxwLhc3jYjsbGx4jThnmZqGw3Lw4cPA4APq/hnn32GsrIyrF27FmlpaXC5XDh8+LDUXbvdbkRGRmLIkCHo6uqS3ocej7fFFHvenjx5Evv375f2HseOHUNNTQ2MRiMeeughH6WdZ5rRaERlZSUqKysRGRmJmJgYqVFl2hlTK5kR4a/8UA7UaOrF9gTwH73i31WvYITrp9Qr6Fz5KfWK7xuhoaHCdm6z2YRoTcXjtrY2mQfuLX88Vvcpr1HFY4/nPBOzOtTrVg1Pg8EgtavEY9XBxN9S8ZisvprmS0BGPJ47dy7CwsLEiC4rK0NlZaUPHp85cwaFhYXCEB0bG+uDxzxnXC4X6urq5HqIx8xqMZlMqKqq8lkj4rHBYEBMTAw6OjqQm5sLg8Hgg8d0knAvE0P8a0oBb2Dj0KFDWL9+vQ8e06EyevRoAN4sDBWPW1tbZX3obHr33XcFj5OTk1FaWgq3243W1lZcfvnlcnZw7ouKiqDrXpbz8vJymEwmHzymU1UthaAjWV1vygGN/4u99mPHj4mgPgKgSHn+IoD/9Xg8mQBaACw+9/fFAFrO/f1/z73vB4/g4GBJyWDz9O7ubrS1tSE0NBQdHR2y0eLi4uBwOMRzwYlh03kAF0QydF3H9u3bceLEiQtSh4DzHiP/oWma/A5fr6+vv8DD76/4cIFJCkHv0E033YQFCxZgzJgxwianMtvSo93Z2YmjR4/i7NmzCAgIQFxcnCg4NGYIJFSEDAZvsXxzc7MAD40GDrPZ7PPc7fayHE6ePFlAKDMzU3LuCZQUdE3TJOLCCBLJpiorK7Fp0yasWbNGyJoYXbr++uvlGo1Gb1/C+vp6OBwOSSOy2+3o7u7G7t27RbE9e/YsDhw4gJ6eHhw4cEAIEghkPMQAYPr06bDZbKioqBDgBuDTD41U8txsl1JsLuYR6mt8n9za7fZ/Sm5ramr+abllL8W+5Jbe5kvJLT2GwMXl1maz9Sm36iF5Kbll6u2l5JZK0aXkVr3nS8ltXFxcn3JbX1/fp9yqc3UpuT169Gifcpufn9+n3LLu66eSW/Xw/aFj37590HVdoiudnZ1Sh5l+rs6PtWeqXAHeWhmuBVMeY2Ji5DAGIOzIu3btEuZd1dhW/1fvgwYvFbTu7m5s2rRJsgRGjBghxE4GgwGzZ8/GgAEDUFBQgHHjxmHv3r1wu71tCKggDBw4ULzlFotFmFUBoLq6Wg777777DoWFhXA4HJg4caLcb1VVlQ8hzubNm0UpCwoKwpYtW2A0entWxsTEiGy6XC7Mnj1b0jl5j0OGDMENN9wAs9mMUaNGYdSoUQgKCpI6UsDrKCspKZGI5Nq1ayXKHR0djfj4eDHWT506JT2WPR4PCgoK8Lvf/Q42m01aPJDAr62tDeHh4TCbzfJdn3zyCaxWK4qKilBfXy9ETCtXrkRycjIiIiLE4LHb7ZLGtmLFCjidThw7dgxbtmwRBbK2thanTp0SEjUS99DoYrRdxS3ANyLlP/6jV/x76hV00vyUegWzPX5qfO5r7Nu3D5qmCfFRYmKilFEQj1lzHRsbK50JTCaTDx6zHRCJv4inxOOTJ0/i0KFDgr08/y6Fx8z2UvG4srISAwcORE1NDSIiIiR1mnh87NgxWCwWaJqGAwcOwGg0+uBxdHQ0brzxRuEwuPbaayXCTDwOCwtDbW0t1qxZg1tuuUW+D/DicUtLCwwGA0aPHi2cESoeT506Fdu2bROuBEZ//fFY0zT8/Oc/x9mzZ2E0Gi/AYxIw6roupUzE4/r6ehiNRsyYMUPYznt6enzwuLe3F08//TRmzpwJm82GsrIyHzwmDrMuli1uiMcnT56UbI1XX30VHo8Hy5Yt88Hjzs5OFBYW4oYbbkBnZyeampp88BjwtrK5+eab4fGc73yi4rE/FquG6o/RK/zHDzJQNU1LATAXwFvnnmsApgH47Nxb3gVw/bnH1517jnOvT9d+xBUWFBSI94KpXgwtk9aagKseUtxoTMPhYct0EApgUlKSRNLOnDkjNNzcaIAv2Qrge0BQqbJarejo6EB2djby8/MRHh4uh5nJZMK8efNQUFAg9XQHDx4UQgousq7rGDx4MLq7u4X4yOFwYMyYMSgoKICu64iIiEBNTQ0KCgpw2223iVcP8AIjvdETJkyA2+32IWrYs2cPgoKCcPDgQei6lw2VY+7cuT7ePP5t7969MBgMmDFjBsaOHYuGhgYYDAZkZWXJZ51Op3i6Dh8+LKkAs2fPlmtjSgCJTnRdx/Lly5GZmYmqqir09PTg22+/xdtvv42QkBDpP2U0GjFs2DAEBASgpaUFERERqK+vR1BQkPStW7duHQICAvDqq68KcLCB+meffYYBAwaI0pGXlyf3lJaWhqKiIjzzzDNISEgQQDabzejt7ZVokOrR41xeTOngAakqxk1NTULl3tTUJOl6DodDWGx7enok/YUeNLYd4e/4s4lSBpubm4VcyT/NjNdysevUNE36PJaWliI7OxtHjhzx8VaqSgmjTGQE9a/VcLvdInMTJkyA2WzGggULJHXYbrfD4XAgNDQUUVFRQhJDo4b3FRAQ4BMdCQwMFNp/vp+tHQBIjzzOCeBl6CTB2Q033ICsrCw0NDTI3/nZsrIyMWr37t0rUQyyFfO+2GSez9esWYPhw4eLMRYQEIBHHnkEkZGRErWgoqmmR9XX1wt5V3d3N7744gvouo5Ro0b57OEdO3YAON/gm2yAx48fBwDU1NTgkUcewc033yzEcYy4MfVQJZegTPgfGhxUrFW5/UeGzWaDy+VCWlqaRKKioqLQ2toqdXxksg0KChLCEZL2kBhJvR7uZ6Z4hYaGIj09HdXV1aiurr5AAfKPWKmGrNFoFJZrh8OBHTt2CDMtlVkqLtzjao9Ldc+HhoZKCn5FRQWmTJmC6Ohon5q/kJAQ5OTkICYmBiEhIaKMkEVU0zSJcnDeNc2bKp+amiopuqpRRBxobW0VRTsrKwuVlZUwGAwoLi5GSEgI2tvbUVhYKPXdwHk8oFHDFMhp06bJ/LHOd//+/ejq6kJ9fT02b94s9bddXV1Ys2YNnE4nYmNjER8fL9dXX18Pm80m+5z3yPWkIsdm8oyQdXd3Y8OGDRL9IyN9cXExioqKEB8fj7vuugv33nsvxowZg4SEBJ9oAtPXWBtKI0TNFPAfVqsVLpfre/GZdV6XwmcaJZfCZ4vF8k/js8Ph6BOf+V194XNMTEyf+Mwas77w2WAw9InP7Hn5j+JzbGwsBgwY0Cc+a5r2T+MzU4wvhc9Mi70UPnOt+8Jnu93eJz6rdcmUif+38dnhcMDhcCA9PR0dHR2w2+0+eNzU1ISUlBTYbDZERkaK7qPiMeUbgE+knngcGBiIAQMGoL29HUeOHLnAKPGPCKuvESOZqltcXIytW7f6MGez97q6j7m/VaOe5T8AsG3bNowcOdIHjzVNQ3V1NW6//XZYrVZxihOPmeVCg45ORuJxb2+vlEVkZ2eLPaLiMc8JwKv3+eNxcXGx9OwGvGzRvA9N87Ze0jRNSph4xqh4XF5eDrfb2/Jp3LhxiIqK8sFj8iUYjUY88MADqKmpQUtLi+BxQECAzInaFUPF446ODnz22WfQNA233nqrlIcQjz0eb1p3VlYWMjIyZB+oeOzxeISdXT0HuBb/qJH6QyOo/w+AXwNgvkE0gFaPx8NYfxWA5HOPkwFUAsC519vOvf8HD4J7v379AHgPh5iYGB8vfWJiIhoaGoSB02QyifeItV3AeeFWSRyYPhQREQGTyXTBRuPnLpZ6wYXRdV2ae+u6jry8PFF8AMjBws/w+/3z8HVdl/o4j8eDQYMGISUlRVgPPR5vLckNN9wghdhMMYqOjpbWA2T+VdMvQkJCEBcXJ4oNCUzUeghGLtWD2Gw2o6KiQg6mjo4OHD16VK5ZTSWjYGqahvj4eHEKdHV1oaenB9999x16enpQVlYmr02cOBG9vb04cuQIOjo6kJiYKKzEJpMJt912G+x2uxjB/opHV1eX9IXi5tQ0LwPgiRMnAHiN7dLSUvksN5rRaBQFhG1MOD/M8Vf7hPEe3W63TxSIf6eCo651aGioeFBjY2PFU8/IAlOvqFjw8Gptbb0gSqQqsuytFx4ejubmZmFi9B+qrKoKO4EmODhY+hKyny1lkp5MKkUqWZIqWyQ2AYDDhw9jxIgROH36tBiEVMbj4uIQFhbm0+BbJVng96j7gS0KCHZMQ9I0bxNsKk5UIJOSklBbWyvXSa9sQUGBTzsLRg94j0xtGjp0qLyHkQmz2YyysjI0NTWhpKREmF3b29tRUlKCwYMHS80aD246z3p7eyUSyLng75rNZhQUFMhc87q2bt0qCj3nnDU0KSkpiImJQU5ODnJzcxEfH+9Tk0RZVeniKdOqF5yD+4oHtho1/zGDRgPrfemBJhEKjQKybVL5pKKdkZEhcqZikq7rCA4Olnqeffv2oby8HBUVFdi7d69Er1TZvlgkQpV7RvE4xyTySU5Oxu7du8XAYcsDykJhYSEAbwRu3bp1kgrl8Xhw4sQJ1NfXo6WlBWazGcuWLYPL5cKMGTOwfft2JCcnIy8vD5MnT8auXbsAAFOmTIHVavXxOl9zzTVSkxQUFIS5c+eirKxMFESLxYK8vDxZH7PZjIMHDyImJganT5+GyWTC1KlTpTaM2Jyamiq/4XA48MILL4ijMjQ0FNHR0XA6nSguLsbnn3+OGTNmSIpnb28vfvWrX0mDerfbjcWLF6O6uhppaWnweDxYuXIlcnJyxNNP3OCw2+344x//iOTkZJ/7bW9vx969exEUFCTefA7WXFH5JwYMGzZMjP64uDgYDOcJqMiOrqbH+g81EtgXPhuNxj7xmd9/KXy2WCz/ND6TPO1S+Ayc1yP+UXwODAz8XnxWv+di+NzS0vJP4XNZWZmc13zNH59NJlOf+FxXV/e9+Mz7/Ufxuaen53vxecaMGX3is8lk+qfx+fuGum6sKVXxmHLEzCTV8CMes+sADRpGUonHbrcbx48fR2FhIbq7u7Fr1y6f76aMcK385Z6yz6g4o9UqHtfW1ko/4Ouuu044C4jH/J7Q0FBcfvnlyMnJQWdnpw8eGwzevrdDhgyBrus4cOCADx4fPHhQ1l7XdWzZskXk4ZprrsHGjRvhdntbKMbGxmLr1q2iBxKP1TNnyJAhmDt3rg8eE5+YHeHxeFBYWCh4zL7HVqsVDz/8sOw1FY9jYmLQ0tKCN998E3fddReio6N98Jh9Vbmn4uPjJY3YYDD4nOmdnZ348MMPkZqaekHU0+PxtsgjySBHb28vSkpKkJGRAbPZjPnz58NutyMzM9MHjzs6OhAUFCS6v0rw6O9E/jHjew1UTdOuAVDv8XgO/UO/cOnvvVfTtIOapl3Q14CeTNYwBQcHo7e3Fw0NDXA4HEhJSUFLS4s0HzabzaKkBQcHIzk5WVLH1Dx+eoxNJm8zXpKV9PT0YPfu3T45+KpRpB4Q9CZxWCwWiex2dXUhLy9PgIHF3KoCRuFSB/Pkp0yZgqioKOzYsQNVVVUCLAcOHBCPJnuXtrW1ITU11Qf4jUajGDkmkwnJyclISUkRRTw2NlYoqTkXp0+flutgpDkzM1NYFyMjI6URuNrQu729HQaDl8Djm2++EWOD/V1peG/YsAGapgmBytmzZzF//nwxwAMDAzFkyBAMHToUc+fORWxsLAICApCVlSXzxt/k6O7uxrvvviuNgNXXmRo6YMAAH491T08P7HY7hg8fLlGIzs5OxMXFITo6WpqPc00pcx0dHZKqcykFXj3MMjIy4HA40NbWhrS0NLS1taGuzHE+agAAIABJREFUrk5o8BkVt1gsPtEkyi2Hv3eVz0tLS7F9+3ZUVVWhtLQUu3fv9pFVta7FX4FXH/MaSHNOwOaBRa84nSf00FMpCw0NFYdQXFwcysrK0NPTg9LSUgQEBGDPnj348MMPAXjTg7q7u0UBoLdU13UkJSWJkc/76OzshMViQXl5OYxGo3gejUajyLO6Ty0WC86ePYvu7m6JxlC57N+/v+w3OlDUvQh4GX47OzsRFBQEu92Ojo4OnDhxAlFRUVJHYjabMWLECMTFxUmLpIiICEyZMgUmkwkPPfQQnnjiCSE9YHodh9Ho7UtZWFgoRFNci46ODuTn50PTNCxevNhH1ouKioQR0ePxYN++fTCZTEhNTUV0dDQsFosYEWRepCJG4+9SpAaqN5dr8mMGSaXoOQ0PD0ddXZ3gKA8r9hsNCAiQ2pW6ujqUl5eLx14ld1Edgz09Peju7kZra6scugUFBdi7d+8F+1F17rAuTd1DjFIBXqVx8+bNOH78uDhEOQcqCRmvV9M0qUE1Go3YuXMnysvLcfbsWQwZMgSBgYHo16+fzCVrb6m8kUiGuNvQ0CByb7FYpE6ITjI69AwGAzIyMi5QZKOjozFkyBCffcEoNdmz3W63T1RW3Wc33XSTGMAZGRmIjIyE2WxGeHg4EhMTsWbNGqSmpsJqteKKK67Ap59+ipCQENxxxx0YOHCgOPJOnDgh6xQdHS3nFGWrqakJp06d8unzyyjW+vXrZX5VGS0sLBRcvPHGG3Hw4EGUlpYiMjISERERiIqKEqOJhiodJWpUVR1kS/4+fK6vr+8Tn1Vd4mL43NDQ8L34rEbPLobPdKhcCp/Vz1wKn9nK7FL4PGnSpD7xmXPYFz4zE+AfxWdG4v8ZfI6Ojv5efB42bFif+Mzf/Gfwefjw4X3iM9fkn8Hn7xsOhwNpaWlobW2Fw+FAQkKCDx4nJiaisrJSWjiynl7FY5WBmGe0isecF/6GyWTywWM1EkkDnHJzsTR3nrEqHlM+uOZqdhCzQKibMmLN6yceu93e9lbUWQMDA33wmMYjdWUGY4jHBoO3vrSiokL2LX+XeKyS/fXv3x9VVVU++8Dj8UgNu+qAAiB17WfOnIHH42Wg5vmm4rHb7UZOTg5qamok20XF4/vuuw9XXXWV7MHp06dD13XBY+C8U4cy/vzzz/vgscfj5RT44osvAADPPPOMj0yyPrujowMBAQHIy8vDkSNHfPCYhHp0BpHtmnj8j44fopGMB3CtpmllAD6GN7X3FQARmqaZzr0nBUD1ucfVAFLPXawJQDiAJv8v9Xg8b3o8npEej2fkBRd17gBra2uT2gT2O0xOTkZVVZXkQZO8wmj0sqpVVFSgpqZGwI0RFB4QPNj4usPhEGOrvLwcu3btkkNEPUzoPVEBlEPXdYSGhiImJgZOpxP5+fmSnsPwOhWEc/MCXT/fPoSHw/Hjx3Hw4EHU1dWhqKgICQkJCAoKEmUFgBRuNzY2IjEx0afnHetBqMgwdYBNuRkZPTf/MBgMQmbBkZSUhOzsbABegKTByTQ5CnRFRYUoW4cPH5YIwOjRowWMkpKSpPUEjefPP/8cZrMZV111FebNm4eamhpMnz5dlFm73S7905juxJQQAD7e1fb2dkkv4Fw6HA689tprcLvdyM3NVeUNZ86cQXt7O8xmMyZMmID8/HwcOnQIXV1d0m8QgE9PR6Z6+Tc0Vr9XBeaWlha0tbUhOjoa5eXlqKmpEaWTLRsYwWeaSHV1tfQv46DDhQx8lCXKHjMJAgMDsW/fPhQUFPgoQqrMq4etOmgsG41GNDU1IS8vT5w+9IBxzUnYQuU9IiICDocDuu4t0C8vL0dRURFKS0vR29sraYJUkAjmACTlStM0AVZ1bS0WCxISEgTwMzMz0dTkhRAaCJR77iXWxh09elTuNTg4GCUlJbLvVFbh7u5uaWcAQPpqapq3/cfGjRvlQKmqqkJ9fT1uuukmHyfKiBEjYDZ7+3AeOnQIYWFhooBRkSWjLR9/+umnsm94LTwA2LaF/RtVReXYsWPQdR2LFy/Gli1bcPz4cdmXERERwrTHQ5YKK6NL/C3VY3opxeiHjtjYWFFUIiIiYLPZUF9fD7vdjpSUFDQ3N4vcGAwGcZ61tLQgPDwcoaGhkqZFp5bRaERzc7M4r0JCQsTpRMWOmH7gwAHxqKv3wnukjPExFRL2gA4MDMTp06el1nHv3r248847UVpaCoPBgKFDh0p/Y8DbJ3HYsGGIjY1FY2OjlASMHz8egLe2iY8DAgKkzgvwtnugXMTExMh1Uy5YD/Xll19K+wp+NisrCwaDQbJYDAYDZs6ciYMHD2LgwIFC5MW5Us+XqKgo2O12SWN97bXX4PF4kJ+fj56eHklvdLvd2LhxI5xOJ8aMGSMst/Pnz0d0dDT69+8PTfO2bQgMDMR1112HEydO4LXXXpP2UVSwkpOTZS+zdQvT/hnBcblc2L9/P4xGI95//30kJibKPLvdXmbUF198ES6XC0888QSKi4uxfv167N69G62trRLFPXnypPQ4Zg2gqhxxsO3R9+EzCU4uhc+Us0vhMyNffeEzxz+Kz8D5c+dS+BwQENAnPhcXF/eJzyzt6QufabheCp95T5fCZ6vVKtlvwMXxmcGAS+EzGfP7wueUlJQ+8ZllUZfCZ15bX/j85z//uU98pvPpUvjM/doXPn/fqKysRFVVlUTHSktLffDY4XAgJiZGroHOIRWPmd7t8XikhlfF48jISJ+ooMPh8MHjAwcOSC0ls2U4VIOI90hZUPGYRnZlZSWioqLw5Zdf+uAxda1Zs2bh6NGj6O3tRUtLiw8eO51OvPLKKyK/t99+uw8ez58/H4WFhTCZTJg9e7YYqZSDn/3sZ0hJSRECu+uuuw5nz54FcB6PN2zY4BMYKSgo8MHjG264QdK8iQtxcXGCxy6XC2+//TY0zZv12dnZiejoaB88njVrFqKiojBnzhx89dVXePrpp33wmORMI0eOxDPPPIPc3FzhrOjt7UVUVJSwtPMMZSRcxWNi2+eff47s7GwfPPZ4vLwE//u//4uAgAD8/e9/R3Nz8wV4TOfVqVOnxAGl4jHby/2Y8b0Gqsfj+a3H40nxeDzpAH4OYKvH47kdQB6Am8+97S4Aa889/vLcc5x7favnR2o+3LjsBdjR0YHGxkZkZmbCZrP51MFwI7S0tEj/pYiICBFiel/UtAPVyOGGYc2c0WhEUVERjh075qPc8PP+/6uDnjlN86YxhYaGimdXbZnAEDwHDxqTyeRTa0SPPL19uu5lgAwP/7/sfXd0VVXe9nPOrbnpjYSQBoQQWiAiCAgiKIiN7mCbsQvjzOjMZ3ltYHd0yjvOOOOMZdR5R1SkSRGl91CkJgQChGAIIQkkIQlJbnLLOd8fd54f+1wSR993vWu51vfttVhAyrn77P3bz/7V5xcr6c28ZNT6DA4y7DkcDpSXlyMQCFial5Mog2kkzLdnSorT6ZRIJKOw3Eo11chms6G8vBzNzc34/e9/Lwb3iRMnkJGRgf79+6O5uRl33HGHpF9QWZw/fz5M00RSUpLlUnn88ccBhBQ9RrFef/11Swoq14XrxTmRqW7u3LmWS58X4MqVKxEIBNC/f380NTVh9+7d2LZtG2pra8UpUFpaKt41es5YT9ze3i6XPwc/p7GxUZwKjY2NaGpqQo8ePdDa2irNrN1ut3gu6+vrhRGVhhtwMWVLrSmOi4uzyKxhhBqcswZx586dUq9N4OclF34EVYXIZrOJokovNVsA0LihcsA/bDfB6IfP54PP58PcuXNFTqmAxcbGigcwEAggNTVVnAo8LypwpaWloVevXlJflZKSgqqqKjkDPp8PZWVllygQgwcPFoWxZ8+eMM1QGwmePcMwJKLk9/uxfv16mKaJ5cuXS7N6IFSbHh8fj9TUVNx888148MEHsXDhQgH4goIC1NfXo3fv3gCA3Nxc7Nu3D4FAABkZGXLmo6KiMGHCBJimaaGWJ5CrHtvm5ma89dZbCAaDmDhxomWfysvLsWjRIjidTiFpOXfuHNauXYt9+/aJQupyuVBSUoJbbrlFPNjh0SWus6ocfF9FiKOyslIU5IqKCnl+ZmYmqqqqkJCQIPNiaQHTR5liST4AdS40suggoMwR40zTlHKPpqYmbN26VQzJ8GhDeB2gKjOmGWpvQSWtpaUFx44dEwcVMx6IV/Hx8Vi2bJlkvFDB/vvf/w63240//OEPkjpIFthgMNTeKisrSxRCZkqoRBv0mvOzcnJyLD1BTdPEN0o7BDpu+/btCyCED+wtqmmaMJECoVphru+ZM2ewcOFCfP3115b6uoaGBmkP4vf7MWzYMKxduxZJSUliKC1dulTkSdd1fP7555LNw+ckJibiwQcflAgN95IedrXEpaWlBe+88w6AUDmKerecOnVKHHJ9+vRBIBCQCMy+ffvEQMnLy0NLSwuefPJJDBw4UKIfNAI4Tp06haqqKjQ1NSEQCKCqqkqc01lZWaivr5caUGZYaVqIGCkqKgoRERFoaGgQeST+0jB0u91ISEiQu4gGBluMtbW1oaSkBEVFRZaIVLh8qrLL/dN1Xc6J3W5HcXGxOE2pCPbq1UvwiY74wsJCdHR0IDMzUzDdbrdjyJAh6OjokDWNjY3FqlWrEB0dja1bt2Lq1KnYvHmzpHYGg0HBSk3TMHPmTNnHQ4cOITk5GRs3bhTDLjExEZs3bxaM13Ud11xzDYYNG4ampiY4nU7ccsst4pDimTEMQyJKpmli9erVqK6uln6/vJOOHTuGnJwc3HjjjThz5gzuuusuvPnmm2L0FRQU4He/+5046Km3aJqGP/zhDwBC+M8srldeeUVkNRgMCgM55YgZem+88QY0TcPy5cstJQlFRUWw2WxYt24dunfvjoKCAtTU1GDt2rXYvXs36urqkJaWBqfTKXoFDXmmVhPTmpubLVgV7tT4d4MOPGbfqXhMPgTKDmuyVTzmPvDO51zVuaj/Zv0o8djtdmPHjh3Yv3//JRFY4GLttGqIq/9OSEiQdGyXy4WNGzfKZxCP1fTpuro6eScVj//4xz9aooROp9OCxw6HA2fPnpW70zRN0R14nr/55hthvKWjmvPkvLk2uq4jLS3Ngsd0uiYlJQkWmaZpwWMgRED0+9//XtrCABfxuL6+XjCDxriKx9TNbTYbevTogejoaDz44IPyeTabDb169cKcOXNE3lh2QzyhjAcCAZSWlsJut1vwWH3nrVu3oqOjAz179rTg8fLly3Hq1Clpkzd37lwLHjPdn2f9u47/SR/U/wDwfzRNK0OoxvTv//r63wEk/uvr/wfAk9/3wWSWAiCU9+xjRuYqMoLRQ8jGyfRG82Co1M6maYphSPpxXiiqAtnW1oYLFy6gsLDQ4kHnUKOfjFyqEQq73Y6YmBjpTbV161apW+LB5vvR8Gtra5PLIBgMYsqUKZg5cya8Xi9efvllqRsIBAJITk4WA4dzp9JAzy7fraqqSno/8ecplL1794bNZrMQayQmJuLw4cNCGgSE8tvJxshnp6amytoFAgF8+OGHeOmllyR6ynYPWVlZKCkpQWJiImw2m7AFZmdnQ9d1qV/JysqCruvo27cvli9fLkY4DdlJkybJntIQpXLAi5rv1draiuLiYhQWFlo8eTyAu3btQjAYxJNPPineVF0PFbivXbsWQMjAmTVrFh566CH079/fwuTmcDgkqktyGFU2ampqJA8/KysL586dk+iG0+mU1KuKigohyOro6LDUTaupIAAsNbDqxa+S8Ph8Phw6dEgYVukF5p51lp6mOgsiIyOlPrW1tRVr1qyRei/WolCGkpOTpU7Y7XaLonrmzBl0794d77//Ps6fPw8gBILnzp2DYYSIDTIzM6VumetPqneuvaoY2O2hvoXqnBnh4Xr16dNH5mqz2TBw4EAxANjPkpcTz4bP58Ozzz6LHTt2wO/3CzbYbDYMGDAA+fn5OHLkCPx+v7D5cg8++ugj2XPTNHH55ZfD6XTiqaeeEqUjMjISkydPRl5eniWdja1peH4YoWC09ciRIxayB67Be++9BwC4++67xVAjCzDJ3qKiotCvXz8MHz4c2dnZcjEQZ5xOJ+rr66U9w/9k1NXVSYNwrh8dOKyJYRSNe5eeno709HSkpqYiJSVFoqpUdrhGlHEqjJQjrjffp3fv3hg1apS0VVGNbVU+KPPqv/m5ZEJ0OByoqKhARUUF1qxZI7X2ZDXNycmRFEH2yrPb7fjHP/6B/Px82Gw2zJs3D4YRIlopLy/H8OHDsX79euTk5EgpRGZmJmw2GwoLC0Wec3JysGLFCgSDQVRWVqJnz55YtWqVEFlMmzZN3oV/T5w4EV988YXUZpGngb1HeSeRZIbe/AMHDgCAhdiKysaRI0ewdu1axMTEYNeuXdB1HQUFBTAMQzA8KysLNpsNM2bMQFlZGebPny+kIiTPeeqpp+Tz1ZRKYp/dbkdlZSVqampQU1MjZFa8TxnlY1P7BQsWCNYz6lZSUiKKoa7ruPnmmxEdHY3evXsjNTVVsJPj/+sV/+/pFXQ6/G/qFaZp/q/rFeHR9a4GawHr6+vFIUY8pizxvjEMQ8rAiMdOp1OMCEabVTzWdV0c7Zy3isc2mw0jR47ElVdeKWdfjZiGD8q2isemGSIvpJxnZ2db8Hjr1q3y7IaGBqSnp2PAgAEWPD527Bg0TcOvfvUrkX8VjzVNk2xE1iTv2LFD5sSSvWHDhmHJkiXiXFfx2GYL9brmfTJy5EgLHvt8PuTk5KC8vFxKxlQ8pn6zcOFCeX+WzRCP09PTcdlllyEQCGDMmDHYuXOnBY9Xr14tNdpXXHEFnn32WeTl5Qke+/1+tLS0SNsYylE4HhN3jh07ht/85jcWPOZZ3LdvHzZs2ACbzYann37agsfMsDx//jz69OkDv99vwWOyQH9fZ/j3MlBN09xkmuZN//p3uWmaw03TzDFN8xbTNDv+9fX2f/0/51/fL/9eM8JF2u66ujpJ4Wxvb0dUVJR4MFXvc1JSElJSUpCUlCRMXiQDiI+PF2DTdV2a6rJGiv+mB6m9vR25ubmYMGECJkyYgPT0dNjtFwmPVIU/XAEK9wZxbrz0Vq1ahS+//BK6rmPbtm0iGHfddRfGjx8Ph8OBRx55BB6PB1OmTIHdHupHFBERgfPnz8Nut+OWW26RFBZd1/GjH/1IGgnfdNNNconyAJNxmAI7bdo07N+/HwCkNmL79u2WAxIIBHDjjTdi2bJlMAwDEyZMkPRppntomobDhw/LAafgMRrGry1YsAAjR45Ejx49sGDBAlx55ZX49a9/DbfbDdM0MXbsWLzzzjuw2Ww4f/68xXtIll7DMLB79244nU5pNA6EauDi4uKkXpKXZ2lpKVauXInu3buje/fuyMzMlLVub29HU1OTeNh++9vfyntTwT148CAiIiKEojs1NRWRkZHo0aOH1JZQYaDXkO/f0NAgBFEpKSk4d+6cpPdRYaA8ZWRkiNzGxsZaUsAp54z6cp3Vi4G08fQoXnPNNZg4cSKuvvpqC6mBerlRTlWlRjVUGKll7YfdbseXX34p0VICWUxMDJqbm6W1QWxsLObMmYPbbrsNXq8XaWlp+NOf/iTPpuJD9kay6qmRHSpDKSkp0qqAkRQqNUCoxyMJengGc3JyUFFRIevH2saKigqUlZXBMEI9x2jI8D2YTgOELmzKJVPH9u/fL1kZuh7qc6aOpqYmifKsWLFCPJHqWt19992iOAKQXpJqenp5eblEA81/ZRRQuaKnu6qqCk6nE9ddd50oRtxb9mpLS0uDz+fDtddeK9GMHj16IDExUby9LI/g+zOd+7sqQerweDxoaGiA3++Hx+NBbGyspBhx7qZpora2FjU1NaiurpaUvLNnz1o8vVTquB9cPzoyVGzlc7OzsxEbG4uoqCicPn0aO3bswM6dOy0kZzwDaj2UKvOUIUaqmDmwYcMGaJpmaSlCpW/UqFHo1auXOIduvPFGGIaBF198EUlJSeKlJv6bpikM85wTWWQ5h2Aw1FfyxIkT8jMkCWKpAUsybDYb4uLi4Pf70atXL5SWlkLTNOFfYM0ln8OSifb29ktwgcr23r17MXToUBQUFCAQCOCyyy7D/v37xQhLTEzEunXrJNLb3t6OJUuWwO/346677pLz/Omnn8Lv91tS+U0zVBagyllzczNOnjyJ9957D2fOnEFsbCwuu+wyZGdnS1SG0cz29nY89dRT8rnsC1xaWoro6GhxoLlcLunlR8OHQ2UVbm1tRUxMjLSZUdncuWZJSUlITU1FcnIy4uPjkZycLBlLlCP13mO0RcWWjo4OcVaOGDECEydOxMSJE5GVlSV6gTq60iv4f557pmp2dHTgiy++EOOImQpACGeuuuoqmGaIAMfj8eCee+7B1q1b4XQ6MWXKFDEIPR4PunXrJlHVuLg4wSamqNJQIzNtVlaWyGdaWprsa2pqqqRrcv7BYBDnzp2D2+3GwYMHYRgGcnNzUVdXJ0ylxAEa//w9dX2J+SdPnoTP58PIkSNRW1uLUaNG4fPPPxfcZ5pnRkYGgsEQeeSePXvg8/nQv39/eQ4xYfDgwRa8AGBJ5W1tbcXRo0fx2Wef4dNPP0VCQgK6d+8uKdW813n/k0BTJc4sKytDZGQkcnNzERkZKVl/7FdJQ4rGk8vlkswXNWr4bSMiIgJVVVViGKp4TEOCMllTU4PTp09b8JiRdqbM07FBfOQeq7qKisf9+vWzOFH279+PXbt2WfBYxV86YjvLRmPZQkREhAWPiUdAKPMiOTkZgUDAgsckwHz++eeRkZEhWRzE42AwiNTUVDHA2B+a70VHXLdu3SyOPhWPAUhUlXeXisculwsDBw4Uvg7eA8RjOkWoywEQZxjxOCkpCUlJSdi4cSNSU1OxefNmCx4zpZqR8JiYGERFRQke830iIiIwe/Zsy31DPOb939jYKBmVKh6TaAoIOUD+/Oc/Q9M0Cx6zp/GxY8dgGAZOnTplwWPqVuH6078b/5MI6v/acLlc0mMLCHklA4GAXELAxYbrZ8+eRWtrK7xeryVNgZcz07UCgcAlF4pa90Qh5MXmcDiE8GDnzp3Yt28fdu7cKReUquxwhF8mqveINRRsOq6mAkVFRUk+eEpKCux2OzZv3ozIyEj07NkTDQ0NonDGxMTgzJkz4uHxeDxClMTw+tmzZ2UOPXv2REVFhVwqVHKo3NLbqKbJ5ubmwm63S8q0YRhSIN2tWzfxujJFRvWk8H0JiOp+RkZGSiqG3W5Heno6AAiZ1ODBg6FpGlJTU7FixQpp+s4UjUWLFklqEi9PNYWAe9CtWzd4vV68++670DQNiYmJGDhwoKTznTp1SghP0tLSBITV1iqNjY1iyI8cORIrVqzAiRMnZL8zMzPRo0cPueCplPBZJPIgINrtdmltwL6vbW1tEoEKBAKSmszII5VJsgVyXdULg3t29dVXw+124+jRo9iwYQMKCwtRWFiI3bt3W1gYw72Yqsxy/vx/REQEYmJi0N7eLizI/N6qVaswevRo2O129O3bVxxAQMhomz17Nn72s58JIJOaPCcnByUlJdJegB7J3r17i0xWVVUhIyMDhmFIFJCKh6ZpQmpCh4Tdbkdtba0oSZQ7nonIyEjxBFN21FR9vheVL64r2xKYpilnlt+nIsuUddZwmaYpfWb5bFWx5D7YbDZxlrW1tUnt7+rVq6FpGjIyMpCTkyN7rrZyYHoWDSbTDJEcsESBz1+0aJGQPNB4ycjIkNIDvqfa0+z7Djqs6BRjPS1wMYU1EAjA5XJJL0q32y3RqsrKStkPrqXai4/nmntBxxtT2NasWYNFixZh0aJFOHz4sNwTJSUl2LFjB1pbWy24/23DNE2ZV3x8PNxuNy5cuIBDhw5JWUBmZiauueYaFBUVYcKECTAMAytXrkRMTAxM08Tx48fRv39/aJqGWbNmSV/V1tZWXHvttSInV111Ffx+P4qKigCEzvLkyZPRv39/SUGbMWMGVq5cKe8cHR0tZC0cN998s0TRNE3DhAkTAADjx4+XfdU0TVLpIyMj5UxSsWbK2Llz55CamoqTJ09i/vz5SEpKwrJly4R4BAj1W6Ry43K5MH78eKkb27Nnj8heUVERfvazn4lcXLhwQZyqd911F2w2GwYPHixtSVJSUqBpmjj06GguKirCG2+8Abvdjvz8fHEYk/Suuroara2teOGFF2Cz2XD77bdj7969+Oyzz7BlyxbL/kZFRUkmB6NElEebzSZpnfX19WhoaBDGdWJ0VFSUrCd1ESrolHMAglVUPnm3ut1uyeJZu3YtCgsLceDAAezdu1d0kPCUSBVzVYOKeMUa1XPnzqG9vV2ilowe0kgfNGiQ4OSIESPEaGU0PDExUboaABBCKE3TxBALd3SyPy3TCOvq6ixKMe8zvpvdbseAAQNQXl4uWVN8Du8Bv98vZ4TGu2rMEPsOHTqEUaNGobGxEVu2bIHD4cDRo0cF3+12Oz7++GM4nU5xWAwePBgulwu/+MUvZN/o+P7Rj34kznBNC7ER896hfpGRkSGEdS6XC2lpaRK5czqdOH36NP72t78BAN58801xKtNQpVGyZMkSwYDCwkJp++HxeJCTk4OUlBTJBnI4HBJI+S6DXR1o3LHUhHOkIdHW1oaYmBhp/8NIf2RkpJxZnhE1qs9nc07Uc2mc7dy5E0uWLMFnn32G7du3o62tTfCYvVqJx+p9qA5V/nmfMSOHpXCMVMbFxUnqL1s4VlRUSNZCaWmpGJ+ZmZmio164cMHCZpuRkXGJjGdlZUlgyG63Izs7GydPnpSSmaSkJAkacJ3i4uIkhZ4GmmmaGDdunEV3Y5SeOgXXl+vCgI3P58OuXbtw9uxZtLS0IBgMSvs14pDL5UKfPn3gcDhwxRVXoKioCP369bOwiO/evVsCVdRFWK4xZ85ckfHGAAAgAElEQVQcIVHNyckRbgIGu2w2mxCInjhxAufPn4fNZkNBQYE4VIjHHR0daGtrw0cffQTTNHHPPfdg06ZNWLRokcjD9xk/SANVTZ1gaw7V+89NbGxsFM8nDw4VSAoCPRP8HQIYPSGqJ425+Fu2bMHChQtRWFgoYWsK79dffy29MsOV/W8bBMTExESJpixfvlwEnsLHyCMFNz8/H36/Hz179hRhbGxshGEYYowT0Pn+58+fl4OWmJgo9Pj19fUIBAKWXn+MDDFfnwYeiSeoiMfGxmLWrFki5AAkSk1PrlrrYRiG1DAEAgFkZ2cjKysLgUAA/fr1Q2lpqaShka6cCju9rGQt44V86NAhuTCp9PCA3H777TIXGoeMhrFHFSMbVNDYyoC1vjxkfr8fhw4dksuXbWn27t2Lzz//XNIpOzo6kJqaKlEVAN8qswSHrmRWTR/rTGYpr98ms0ePHkVbW1uXMvtd0zq5j53JbCAQkFYZncmsaYbq5bqSWZKJqe8fLrMXLlwQZbEzmVWJBzqTWSpF48eP71Jmub5dyWxZWZlERTqTWbvdjssvv1y8g53J7Ndffw2/39+lzFIR6EpmY2Ji4PP5UF9f/60yC6BLmWW9XlcyS+VRrdn7roNZCtHR0fB6vZLKTqPaNEM1N4z0M0ODrWlUpUDNigEuXthqw3vVkUgPP9PcWWtMb73L5cKhQ4csbIvfB7MNw0BycrKcx9WrV4vSSGVa13XxQHs8HlRVVSE2NlY+j7XqFy5cQHx8vBBkMG1ZzUyIjo7GuXPnRC75Ptyn7t27WyLJdLKyOf358+cFD6KjozFr1iyLcwSAnFFGADRNsyhIbNHGti7dunWTDJtAIIAdO3bg1KlTQpITHx+PTZs2weFwYOrUqXKmt2zZgmAwKIYcWxJEREQgOztbzgDJSSgXvIeYKsuzzCb3P/3pT8XTz3UoLi6G0+nE+vXroes6nnnmGei6bsm6ACD8DMFgUKIolDnOh3vKiDXvCkaUiNFqOQblSiXXU5V3vtvnn3+OlStXCka3trZKxgXTvb8vRlNOmD5XV1eHTZs2CUbzHfr06QPDMIS0ihjNe4gRWcqIitEkZgqXO2I0DVy11yRxXa05joqKkmwmfg4xWlXUVT2CZ4FYoMpMZGSkYDQdeC6XS1JBz5w5g2AwKBidlpaGw4cPX+LUJ0aztlzVC2fNmiUYzVIWyjKNWcotiaKIpcSNlpYWkcWWlhYcOnRI9qSurg5r164VjCbvAnUyjs4Muc5GRkYG0tLShN2YcspsFsMwLKUZKh6TmJR3IFOMuR/UW8g7oBpmxGQarzR4iYF8zs6dOyU1P9w5ro7w9+X/6TgIBoNYu3YtUlNTsWvXLjHkNU3D7t274fV68cILL+C9995DfHw8IiIiMGrUKKxatQqBQADr169HVlYWVq1aJUEMwwi1i1TxeNOmTTBNEzt37gQAYS3XdR0TJ05ERESE9FQFQsRNLI345JNP4PP50K1bN8TGxmL69OmWtHmV50O9M/x+P6Kjo+Hz+bB06VJMmDABs2fPRm1tLaZOnYp3330XWVlZ8Pv9iI2Nxe9+9zsxwuPi4rBkyRL4fD4MGzZM3mv16tWw2Wy47777LPcr9ZZrrrkGNptNmPYZvafDiwEFv9+PhoYGvPbaazAMA6+99houXLhgcaDv2rULTqdTor2vvfYadD1EkrhixYrvJMccP0gDlcyvaWlpsgkRERHSOLyjowMnTpyQ2ieGpXmRxMTEWJR64CKYU5mmZ4teIrUejF54HkgakGQLPnr0KHbt2mUx6r7toPF7vHxYNN3e3o6jR49i+fLlUotF4KXnafDgwQIejCzU1dXBMAxhNKNyz+hddXW1XByMPkdHR0vKjZp+yCgN2X25/mwNUF5eLpEvr9crrGJqCg6jWOq70qChUj1o0CBce+212LNnD3Jzc7Fs2TIEgyG2xs2bN0s/LaYSJSQkICIiAq+++qoAKxAyMB588EFR8jQtRMLTq1cv/PznP7codLzUGEFklM8wDBw7dgwff/wxTNPEnDlzxPvDQQfHW2+9JWBD2di1axfuvvtukR0qOwC+VWZJzNGVzHJ0JbO8MP6dzLLesTOZVffpuyjrncns2rVrxUHUmcwmJCSguLi4S5nt3r27REG7klkyV3clszQSupJZnpm4uLguZZakUF3JbFpaGqZMmdKlzG7btk0i6F3JLHtndiWzUVFRlgh8uMwyDYfpM53JLC+GrmS2qakJycnJncpscXExmpqaBKu+7+D+sX6KigKxMRgMIjk5Wej26TWmApOYmCj9rrkXVMwoe8Rqtc+nmhGjGgH8eltbG+rr6+H1elFeXm5RisLHt3nx6XCKioqSM7t69WoYhiFRtXfeeQcTJ06Ew+FAaWkpzp49K8paY2MjHA4HqqurER8fL8oMSSjUtWdEmdFkAIJZjHipaaicX0NDA6KioqR/Y3R0tLQBUR2pXFeyy1LJZlYPI5ZXXnklpk6dipqaGgwbNgzffPONRFWCwSAWL15sWfdz586hra0N9913n8hxW1sb3G437r//fnGaUMk1DAMzZ84UHOLfdJYyWswaqMrKSrz77ruw2WzYtWuXyAD/MItg586d0DQNgwYNgt/vl9pzjm7duqFHjx7o1q2b7J3a//PMmTOC0cRsNfoenpnBr9EAA3BJ3SANVeK0itGmaQrBncPhwO7du/HNN99ccpd+m6yqP8dIemNjo2A0155GW0pKijjEiouLkZmZCSBEmOj1egXLVIxOTEyUqBPfIz09XTCahhCVWZ57FaM1TUP//v3lZ51OpwWjp0+fDuBixMw0TUmBVolV/H6/GOM0OqdMmYKoqCjk5eXhm2++kWgYnbTE6NjYWCxZsgRtbW2YNGmSOPiI0dnZ2ZbMGr4nMZpON/YWVXkJiFOmaWLNmjVSr0jSQlV5t9vt4ggfOXIkAAhG33rrraivr0daWhpcLhe8Xi8OHTpkkb1vG3QC8j1UPKZOnJSUJHjMO5d47HQ6LX3uuUZcE8o0nak8g/xMOnSJDcQgFY8DgQD2798vDp3O8Lez0iT+zVrsYDCIqqoqyQJR8Xj48OH4+OOPAYRqaTkP4jHPNbMmuIfhd6GmacjKykJlZaVEuVU9IhgMSkkGzyLxmGuVn58vBHdsR8R3BCB3Bf+v4jHlrbq6WkrqVDzeu3evlBkxajx06FDBY0ZRGcEn266Kx5MnT8bo0aNlremkIR43NDTA5/OJE6usrEyMa+IxS3qoy1RWVlrwmBkc6ud8l/GDNFBpqTMqZ7fbpScYo6uZmZlIT09HfHy8FJoHAgHx4g8cOFAuBwAWwhgKK61+Xgi8zNW8eHoC1X58pKsuKytDYWGh5YJS/w5PY1APHBVxvhc9whS2t956C2PGjMFtt90G0zRRWVkp+ebnz5+Hx+MRYhnW4/F9w8PoHo8HvXr1EnIACp5phlIbqNRzXkwBSUtLE+90nz59pDfq0KFD5TASvMJTlFRGMh7gTZs2yd6yHovkFHV1dXC73VLMT9bDQYMGCVuqaZrYsGGD7CEPNvePlNoEHFVJYUp49r9IFM6fPy+sh0uXLpX9oTJhmib27dtnATTKysiRI6HrOn784x/j2muvlXphAN8qs+np6cjMzOxSZtmioiuZJfj/T2S2sLAQJ0+e7FJmCZ7fJrNMsetKZmtra3H06NEuZTYqKkoY9LqS2erqavj9/i5llgZ4VzKr67pEebqSWSpLXclsQUEBrrjiii5llpEKEj51JrOUg65k9sEHHwSALmU2Ojoa8fHxYrR0JrP83a5kNjc3F2VlZZ3KbI8ePeByufD4448jIyMD/51BZYLKA+fDFjS8zJj2z7PS3NyM48ePY/r06bDZbKipqZHoFSMuTIViBF2NWNFQ4+WuRgP4Pb/fL0rH9u3bJVJF5UKNoHN0ZQQkJCSIw+n8+fNYv3490tPTERsbi4aGBtTX1+PkyZNoaGjA4cOHpa3DlClTpC3XwYMHxQhjCqB6xt1uNyZNmiRG8NSpUyVVlYrxsmXLLMbPiBEjMH78eKlpmzp1KpYtC5Hq//jHP5bIFLGDjgA1UykQCCA+Pl5qXP/0pz8hOTkZHo8HN910E1auXIn+/fujW7du2LhxI1paWiRtf9u2bXjjjTfg9/vxk5/8RPbk17/+tawZcJHcatGiRejbt684f6kE02inEd+vXz+4XC5UVVXBMAzMnTsXmZmZ6Nu37yW4xP7jv/71r2GaJjZu3GiJbPDziaGRkZGS2sv09u7duwtGx8TESPSJMqTrumA0Mx1o3BJX+HyVmZbyRjlVHS9sX1ZXVyf9WYnRqpKuRj1UjA7XLzweD6KiogSjSc7DO23ixImYNGmSYDT7FKampuLMmTNiXKgYTcIhlSk7OztbMLqqqgqmaYpTBQgZy8Ro4kF6ejpKSkrg9/vlPBOjgYt1jarzgRhNWed5CQaD+OabbwSjDx8+jAEDBmDx4sUwDAMNDQ04fvw49u7dKxhNhd/j8VhKSojRd9xxh8whGAxKCjIxmvLL9SR+5OfnC0YXFRVJxgHvSupmKsb/+c9/hmmaeO655+BwOASjPR4PHn30UTQ2NiIhIQFut1uc8t9lnD592lKmouIx06eZPciop4rHLS0tiIqKQkFBgZDqqHhMWSNLP3+GZ4s6iRpdJiarZ6mlpUXWika8Wl5Eee3MUcM5REZGStTU7/db8Njj8WD79u3QdR11dXVSVkU85tzGjx8vEd6JEydaasw1TcOtt95qYfdW8RgIGZxVVVXijFHxOBgMorS0FFFRUcJqnJubK3hMzND1UGkM8U/FY5/Ph3Xr1kHXdfzyl7+E1+u14PG9996LgwcP4q9//SuysrLQ0NCArVu3Ch6//PLLwgXwwQcfQNd1/PSnP7XI80svvYSOjg7ce++9srYqHrMTSnp6OmJiYuBwOHDkyJFO8ZiYWF5ebsHjF198EV6vVwz17zp+sAYqhdxms0mPzGAwRCzR1NSE1tZWNDU1iUeGF0UwGCIGmjFjhnjh+Dz1ucDFQ6aCPxV7KuBMo1VrV1mvQkVz27ZtEoFUn6caAeq7cRDs2ND5/PnzKCwshMvlgtvtFrAwTROLFi1CQUGBpH4kJCQIxbYaXfJ4PJZenYFAqEEzlXqbLdS3jBcH14c93zgvl8uFIUOGyEXi8XjEiE1LS7N4gSjoas9HroXL5UJLSwsaGhpQXV2NSZMmwel0Ij09He3t7eLtLC0tRVFRETIyMtDY2Ig1a9Zg6dKl8Hq9Fq8T6ezz8/PloAOQdJ6cnBzLfJiewvdjA3S7PdTS54033sCxY8eQlpZm2R/VAPrwww+h6zrmz58vhCnz58+HYYRY8L766itLmlxXMtvS0vKtMnvllVd+q8zSCPp3MqtGAsNlVtNCNZxdySxBVr0UwmWWabZdyaymhepou5JZwzCkTrormSVgf5vMsl9lZzJrmiYGDRokJAadySw9hV3JrN1ux/r167uUWZJ+sH9cZzIbDAaxZs2aLmXW7XZLBKgzmQUgnstvk1nucWcySxK1zmQ2JSVFCEK+a2qhOjgv/k1Ci8bGRiGJ8fl8aG1tlRYI5Axgj8CCggL4fD4hCeFQs2DUNFrKK6P3qtde0zSJBpAMStdDafXDhw/HddddJ9FDRn+CwaB4rNXPDzcQOJ/Y2FhkZGRIFCUrKws5OTkoLi5GMBjEX//6V+zbtw9NTU3IyMiQveW54TOpxLO9gc1mQ8+ePaXROaMm7EcIhEotmBXBtenRo4ewRpIsQ43kqXWnlH2SDvLdGGGx2WwoLi7G5ZdfLvsRGxuLAwcOCBGWz+fDggULYBgGduzYgcjISGkzlpGRISzVjNTef//9lnTF0tJS2Gw2PPTQQxacaW9vl1YtjFSwhv/QoUOw2WxYuHChZECo+KjrOnbv3i376vV6pZ0PBw0bTdMsbepYb8pWQ+y/TgWfZzY1NVUwmsaTmoatOtvUaD3lU834YvSITgIaMXR21dbWYuvWraIw07mgOibCnYmUZ2Im6087Ojrw+eefIzo6WlreEKPfeustREREIDo6WpjRWQ5CjKbMMGJqmqGSCmL08ePHZS/odMjNzRWM5jx1Xcfp06eh67r0hVUxmhFW4GK0X02zphHEO/jIkSOC0fyMhoYG2Gw2SftfvXq1YHRlZSUOHz6MQ4cOoV+/flI/SozWdV2yqFTHKzGaZ7Sjo0NqRLmPxGgaHGT+5t2i7t3OnTtFrljXTozm3j388MPSn7NXr16XEH5921DTSFU8JoETHbFs4ajisc1mQ35+vjgu1Luf54AGITGmMzwm+R4dAOF4bJom+vfvj3HjxglxoYrHqkOeg2eXP8sUcqfTKVmTxOPLL78cNTU1MAwDf/7zn7FkyRILHjPVOT09XeSc2EM8pq5RUVEBn88n97GKx0x9VQm1iMf8G4D0FdV1XfCYuhvlRMU0FY+rq6uRkpKC2tpafPXVVxY8ZlYNGZfr6uqwZ88eweOOjg7k5+eLDnn06FEkJSVZ8JjZVD169BAHGnARj4lL1J+pS6p4TD4aNRKs4rHNZsMnn3xiYfb+LuMHaaDSI0FFgiDOHH96y2lAksyAF8/p06dx5MgR9OrVy1JTAlwkfuEFz2hDuLdG9eTYbCEiEc6JXt/k5GTcdNNNuOWWW6TNgNfrlfkBl6ZcqEoXDxk/KyUlBQ6HA16vF/3798eIESOkufPBgwfx6aefYt++fZg0aRKGDBkiAnfPPfdIetfUqVPhcrlw5swZAcSxY8di7dq1CARC/d969+6NL7/8UtbyzjvvhK7rllqtGTNmSDuCDz74AH6/H3l5efIuSUlJFiOJoMMie3pwuX8rV67EzTffjJKSErz//vu48sor8Zvf/Aa6rmPkyJFwu9348MMP5XlOpxNFRUUoLy9Hfn4++vbtK4rBmjVrMHnyZEtk4N1338WpU6dw2223SYSc+6G2StE0TRgNSXoUExMjZDpqNBIAtmzZgsrKSlRUVMDj8WD9+vXYuXMnTp06Jb//1FNPCYkIlUt6nehVZgoKjUjKLGvoqAyaZoghjmtKQPb5fJIqQyMq3LPIQRlVLwXK8aRJkzBjxgxMnz4dgwYNEk8wL8twRZ0KIGVVBSHTNKURtt/vx4ABA3DZZZcJi+DatWuxdOlSUXjS0tLE0OC6aJqGtLQ0mR8NTvbg5XtkZmaisrJSZNbv90s6HNchLy9P+qixmTxTO4PBoIAyzzqjJ/QC84IgmUlbWxv27duHkydPCrnIggULxONZVlaGI0eOIC8vD59//rn0JXa5XMjMzBQ52rJlC5xOp6Q18mJ6//33YRgGRowYIdhDxZfRMrvdjtjYWCQlJcFut+Po0aNS/0pM4+dwHU6cOIGqqiqLwXjhwgVUVFTI78ycORPz5s2DaZoS7bn99tv/HTRfMpj6pPbX45zUPQ2PUBIv2OctLS1NCDDojWaqGZU+Ojfoyec+UjGmnPMzgIsOl6FDhyI7OxuRkZGIjo5GXFycfLb6c8QT4gfXlXig3g+qBz8hIQGDBg1CMBhETU0NGhsbcf78eaSkpOCf//wnNE1DYWEhbrvtNmzatAm6rmP8+PEIBAJYs2aNfHZubi6WLl0KwwgxlweDQeTm5koEYvLkyXC73cLay3Wsq6vDTTfdJNHV66+/HhUVFRIF4F5UVFSI4cVzy7Vsb29HREQEioqK0KtXL/zzn//E+++/DyDEinrgwAG8/fbbAEKOQnrdyXPw9ttvQ9M0XHfddbJGL774IpxOJ5599llR6IFQGjqZ0dWzSIfr0KFDYZohdlj2Td63b5/UVLEtGXAx2hIMBnHq1Cm5V/7yl79YZJVySn2AKX9qJJ0YzRpRMqBqmiakIFS4AGvaL+WFuM7P6gynmeZKjKbM5+TkCEbfcsstiI6OFj4DnvNwjFbPgYrTnENMTIzU8l511VWIiYkRjF69ejXWr18Pm+1ij2Iq9urd5ff75f9qunLv3r1FYadjAgCys7MFozkYSevfvz/KysosGG2z2TBo0CDBR9UgpHFOJwAVdzoW9u3bhwMHDkg6st/vl9Yb9fX1gtHvvfcegsEgli5dCr8/xLbK+2zLli0wTVNSbonRvONHjBgh76FmjDCzhxjtcDhQVVWFd999V4wVVadQcYss93/9618Fo9kP0zAMLF68GN26dbOQg36Xwagns204Z+4bcDE4wp+nnDqdTuzcuROjR49GdHQ0DMOw4DEdgUyPZbQ93GHC+ymcV0D9nMGDByMuLg6RkZEWPFZbJVG+VTxW15KyHo7HHR0dojN6vV40NDRY8Pi6667Dtm3bYLPZsHbtWksmyZo1ayyR9GAwiEmTJmHlypWX4DHlc8mSJZfgcU5Ojqzf2LFjsWjRIgseq1F1p9MpvBEqHjPN+8CBA9ixY4fUMhOPExIS4PV6sXfvXnzwwQe44YYbJHvp7bffhs/nw5gxY0TOP/vsM+i6bsHjQCCA3//+9zBNE88884w474jH/Ixx48YhMTFRdBEVj1new7slHI+rq6uRkJCAW2655TvLMfADNVABCKOcpoVIL+rr6wWE1UPOwQuGykLPnj3FE2oYhhgM4Z4ZHnw2wlaBkMqSGlUDLkaR+vbtKwQ4gUAAJ0+eRFFREfbv3y+fqwIBP4uHjN9TwUfXQw3h2dw+NjZWImQnT56E0+lEUlKSpMPRmKdByudRmaeBeOHCBXg8Hhw9elSERyUwAIDjx4/L+wcCIVbZ2NhYAZm+ffsKScuwYcNkzvT+cV3UqJ+a2uH3+4UO3+FwCDPnjBkz4PV6UVpaivLycowdOxZ5eXmIjY3FggULEAgExAukaRr27NkD0zQxfvx4mYOmaZg/fz4ASH0JAYygMX36dKk7ItnNkSNHhFK/e/fusmdcG10P1Th9/PHHohxER0fjwIED+Oyzzy5pFUBFoqOjQ8iUVHBWBxUdGgm1tbXCzEzvHfdTVZpJOBQe2aSDRo2oqd511qPQi19dXY2ioiIcOHBAiv9Vg48KkJp1wGep7+JwOIRVLhgMCptubW2ttMKJi4uTtkaq91PTNEmlUqPx3bp1Q8+ePcVojoyMtCg7jBaql2JOTg4OHz4sCiajAFwbGmdqxJRRf/XiZS3R7t27MXr0aHzxxRdYt24d3G63tLJhr7Lly5ejvb0dl112mXiHjxw5gpiYGFGAbDYbFi9eLJeBGvVxu92YNm2aJXLH9WLTbSBksJPhW9dDRGAAhFhMjayQLZKGzwcffICDBw+ivb0dn3/+uaz5a6+9hkAggJKSkm91ePy78cwzz2Du3Ll46qmn8MADD+C6667DFVdcgfT0dERHRyMiIkKUcDXljPuYnZ2NpqYmOZOq40l1HLKPJ/c7XPbVC1+Vf3qW29rakJ+fj6FDh2L06NHCOHzgwAFJU1SfTcVINVDVwZ93OBzC8NrR0YH4+Hipk6uurkZLSwuio6NFkacXORAICDkG14MymJGRgRMnTkj2QFVVlcUAYfYAsapPnz7SI9LtdiMpKQmHDx+WzyFGMaWSjim1v7EamSwvL0dWVhbsdju2b9+Onj17Yt26dUKMduHCBSxYsACmaUqvwpMnT6KlpQW9evWyRLSpNNIANgxD2i488MADcq8CIRKZhIQEjBkzBj/+8Y9l/xMTE+V9mbJGg4g4quu6pFKT7EQdbNnG0osLFy5Y5Ckc1zgoQz179hSMVmWTQ8VoOpEoN3wGIz7EAL47cbFv376C0T6fD8ePH0dxcTH27NkjnQ06w2g6GFXZ5btoWoi9mXXUsbGxgtFlZWVwOByora1FY2OjEH2pGM27iM5+YjUxmuvQp08fUaD5vsxE4ZlMSkpCfn4+HA6HBaMBWKLRTKkGINkVqmOUa0SMZvrqmDFjsGzZMqkXLS0tFYwGILWXERERQhylRp1vvPFG2RPTDGVKEaPVeyMQCPF6UKfg/CMjI+Ws8lypzmbu2+7duxEIBKTtCzE6EAhI+vgvf/lL3HDDDTBNE3/84x8vkcuuxjPPPIOnn34ac+fOteBx7969JeWeREbheBwMhphx1XZWKh4TJ2hUqUENdVCe+W8Vj3nvf/3118jNzcVll11mweNjx46huLjYIsv8O1wPUYeKx2QtZ+Cqb9++FjwmdqhGlaZpgsfMRGJAg+R/Kh7bbKHaXZaWqZlEffr0weHDh9He3o4TJ06IPcD1JB4Hg0Hp4MG9V/GY2VVFRUXIz89HMBi04HFERIQYfSSSGzdunODxRx99hEAggP79+wMIZVPs3bvXgsfc3+rqajgcDjkrXAddD5U2jBgxAsFgiFOCQR8Vj8kloWZMEI/fe+89GIaBu+666zvLMfADNlB/+tOf4vHHH8dzzz2H559/Hq+99hqee+45zJkzBxMmTMCQIUOQlpYmCpBKVhATE4Pu3bujubn5kugZFVACrZqmoV44FDb+TLjSr+s6NmzYgJSUFAwZMgQjR47EnDlzcOONN8LtduPQoUMoKioSoFfz64GLfe8A6+FWjfCoqCh4PB40NjZK/6x169bB5/NJrQGNIDUdx+/3S/0ND7XdbpcLVtd1aXDNg+D3+y19JPnOl19+uUSQXS6XGAlsCwDAsib0+qheRkYHi4qKMHToUAAhMgmyquXk5IjyuXDhQrS0tKCkpAQzZsyQyzExMRGZmZni8bfZbLjmmmtk3ehVdTgcuPrqqy9Z5+joaPTp0wdz5swBEHJIpKamWmpkACA9PV0uFe4X6wdYx/Jf//VfaG5uhsPhEEcBSR4ef/xxvPDCC5g3bx5ef/11PP3007j77rsxatQoZGZmisdQpdY3zVDKVG1trXjWAcjFB1xMo1H3RmXoowFKJ4Equ7xo6Clm3c7kyZPxxBNPoHv37nA4HNi3b5+wxaoeynDHEOcTLrsul0tS0g3DwNChQ2Gz2bB+/XqkpKRIyi6jXbzw6ZFTWTd79eol+4b9WWMAACAASURBVHr27Fn4fD5JubLb7dKGpqamRs6qx+MRUqVDhw5JJJIKQ35+vjyTfVf5LtxvOhdMM5TS1qNHDzidTmlXwnk/9thjktVht9txxx13IC4uDnFxcVi8eDECgYBkGRhGqDZJ13Vcf/31socdHR3YuHEjvF6vXDLqJTxt2jTExsbC7/cjNTUVABAdHY2ioiKh+Se+8Xe559XV1UJSkJycDK/Xi507d+LgwYMiX1OnTsW8efMQDAaxYsWK/7aByvXT9VBri/79+2PKlCmYM2cOnnzySTzzzDN44YUX8Morr+DFF1/Ef/zHf2DWrFkYOXIk7HY7li9fjsGDB1tadzCLgvJNjGltbRXiFNUwVY1L/rz6tQMHDqCsrMxyNh566CE8/vjjmDlzJgKBAIqKiiSypEalww3X8MF5xMTEiOLjcrlw4sQJnDhxAsOGDcO4ceMsqeU0MqZMmQKHw4Hy8nI5GzNmzEBmZibsdjt2796NqKgo7N69W35n1KhRksrIubH+LjIyUvqQ0gEIALNmzZL5Es9Yi6lGeNh/ccOGDbjyyitht9tx7NgxBINB9OnTB5WVlfjqq6/gcrmwZcsWGIaB999/H5dffjliYmLwpz/9CaYZqtvTtFDmx7x58+B2u/Hiiy8KjnR0dOCVV15BMBjE8OHDLTIcDAaxZcsW9OjRA/Hx8ejevTucTidSU1OlLzEQchKF1+LreiiFcvv27ZY0ZwB44IEH8Mgjj+CJJ57Ac889hxdeeAGvvvoqfvWrX2H69OkYMmSIYDSdKpQ7l8slxEH8OhV2OrRUPYFRINX4Vp0oarYI99HhcGDDhg0WjL733nvx8MMPw+FwSIQpHKPDs13Uc8k15Z/4+HhhaTUMA8OGDUNrayv279+P1tZWxMXFoaGh4RKMJlGSipnEaLvdLlkj33zzjegUxGjVQBk0aJCQLakYzXpsRvzISqoa8KqDHwjpOsRoAFi3bh2ioqLEMKBOQYy+9dZbMW7cOAAhxnOVVdgwDCxcuFDIsVTdhhhNZzuV+8jISNEpiNGsXfZ6vdKagzqFOhglffPNN0XpZ43ef/7nf4qR+tFHH0lv6+86VMeaisf33HMPfvWrX+HJJ5/EvHnz8Oqrr+LVV1+14HFqaiqqq6tRUFAgDhYVjxk15uA9puKxapwTa8PxmPcsn6Xi8RNPPIH4+HgxVClP4XjMZ4XjMs8XAxFs76ji8dKlS2GaJk6dOoXZs2dLS0Hi8fLly+U5BQUFWLJkCYLBoAWP1awWp9OJffv2WfDY6XTiJz/5CQoLC6HrOmbNmoXt27cDCOEx14VZklFRURY9hPdqbGyskEz17NnTgscvvfQSfvSjH8E0Qx0DPvnkEzz66KOCx9SdZs6cKXvy1VdfWfBY10NZSh9++CEcDgdef/11Cx43NTVJC7eHH34YbrcbLpfLgsfsjZ2ammr5XeKxzWbD3//+90v6Pv+78YM1UKlUU9iZCpmVlYXx48dj1qxZ+MUvfiGH7ZVXXsFvfvMbTJgwAXa7HVFRUZYIEIGWzwIuevk6846rig69Pvw3v8c+QnyOaZoYPnw4HnvsMdxxxx1ITU3F8ePHhVxINYDVS1UdqgFAhSQlJUU8bZmZmdi6dasQFDCFmLUrNCJYC0iFPjY2Vuoz6e1krj3fz+v1CsgAQM+ePZGRkQGv14uamhoBFQo0jcXu3bujqalJUvzoDKBxzlFSUoLc3FwAwNatW9GtWzfs2bMHbW1t+OMf/wiXy4Xm5mY4nU78/Oc/R3p6OiIiInD06FEkJCRg9OjRsm4ff/yxeJe4boFAACtXroTP5xPjzW63y4VIWvxAIIDExERoWogl9ezZs0JMoxb9cx0oM2+++SZcLhcqKyslsvDuu+9aIjtqVJyGcV5eHm6++WbMnj0bjz/+OJ555hk899xzePXVV/HKK68gOjoaGRkZqK+vFwCmEyXci8jPoOGoeilVGVKVIFUx4qVJT59hGLjvvvvwxBNPYNiwYWhoaMDBgwcBXKw54ZxUeVXnpcosvfXMSAgGg1LTydo5m80m/Q+ZKhYMBnH69Gn5fnZ2NioqKuByuSS6C1ykvO/fvz90XcexY8dkr1j7kp2djZKSEvGcsg5k4MCBlogaLwg1mgvAUmun67qkb5tmKAVs48aNGDJkCEzTRFlZmfRivuGGG6R9i9vtxpgxYyTSQvwhyQrXZuPGjdA0DUOGDJHPp6OHDe1HjRoFIOSwYKSaCq2q4Kn7X1ZWBiBkjDgcDol0NDU1Yc2aNeIJ/t3vfodgMPi9mCLDh6poq7hN+VOVWl0Ptc7Jz8/H1KlTkZycjCFDhuD06dNdYiHljc9i6qT6c+pnq5+rfr56RjgMw0BeXh5+9rOf4dlnn8XIkSNRXl6O/fv3X3IvhM+P+0XDkz/PtEriQnl5OZYuXSqkYgMGDBBM4vlhn2EgZAwcOHBAUrdoWPAzc3JyYLPZpJ+kmiFw3XXXiSNnzJgx+Prrr6HrurBBG4aB1NRUaJomZ4l1sXwHtj45ffo0srKyYBgG3nrrLRQUFODjjz9GW1ubpM29/fbbksL2wAMP4KqrrsLx48fRo0cPSw3aO++8I45arjtl5/rrr7fIcGtrK7Zt2wa73Y6HHnpIHKqapkk7qeTkZJhmiNlYJaLhPnm93kuiTsQ8NQrIZw4bNgyzZs0SjJ43bx5efPFFvPbaa3j55ZcRFxeHqKgoC0bzPdQ7Tv2s8Iis6ghVZVY9KxERERaMttlCxHSPPfYYHn30UaSmplowWnVchsupqmtQTvl9YjRLTs6fPy8p1z6fz4LRuq6LIcc52+12wWjeyR6Px0LAR4xmqY+maUhPTxcGVxWjiZs0NolVqnGjRosZ4SJG2+12nD171lLSRZ2CGH3LLbdg9OjRKC4uxpIlS9De3o7Ro0cLRlNPmz59uqxhIBAQjL733ntljTkX6hTEaDqbjx49Kvjs8XhEvnlWDSPUSaC5uVmyS3r27ImTJ0+KgxQAHnnkEYvD/bsMFe/UtF7uG2WOjriYmBgMGjQIkydPRn5+Plwul7xXuFzxnlV1YdXRrso6MZt3q5rWqtbYh58nu92O2bNn47HHHkN2djZKS0ulPlqdT3hmF2VddVYR14hxXq8Xp06dEgOwsrISLpdLdA9VB1D/TSIx1lSTk4VOfL/fL1kt/L2BAweiqalJMNDhcEhfcjX7g6UspnmRG4DzZiq1aZrYvn07Ro0ahUAg1JZq6NChQrw4c+ZMOJ1OlJeXIxAIIDc3Fw8++CBSUlLw0Ucfwe+/2K+VaczMCOTw+/3Ys2eP6Paq3Hg8Hvk7GAxKRktKSop0bzAMQ5yiXEvu7YkTJ4Qz5/uMH6yBClijiRROFXTV76lebxp2jKqpCiJgLbYGrBdH+OerXiH158M99+rhNE0TmZmZePDBBzFmzBi43W4cOHDA4kXiz6mKvmoQdTafuLg4MUhpXDDdQvVUEhjVA9e9e3cxqHn5sLUHAOl1pjLpZWZmSvNtpv96PB5L+isQUsgYsQ03qHjomDpAoKqqqhJPpd1uR1ZWlrRjAICxY8dKRI5sqQQpVQEvKCiQ9QkEAuIJGzdunMyPtXhffvklDMNAbm6uXHxkliWwUdFTDRZN00QZPHDgAI4dOyb9KdUWM+Fyqa6HaiRyXWhkRkZGIjU11ZK6S3lQ5UJVRsMvD35fVXpU2aXshSv2nJvP58PEiRNxzz33ID4+HsXFxcISHf5unXkxw2XZNE2psWLN1KpVqyTaoBbqc67sUwiEWDJramqQmJgotef0/AOQtBL2C1XfMzc3V56dmZkptVO8SOmV5M8zhUwdPEMtLS3o06cPAGDnzp3IyMjAvn374PP5JAVn69atAEJETNOmTUNkZCQuXLgg0XIqA1u2bJEaN64TIxXsScd98vl8qKmpgaZpuPrqq2Gz2cQ4ZY/C6OhoAKH2Gap8qcrAV199BZvNJnXOhw4dwrZt2wS70tPTMXnyZGia9r0Z9tT9V//mu6nyEv51GkOZmZlYtGgRrrrqKnFsqcQ0pmlKOwM+T3V0cKhnkHPhOgCQSJdqzPBn1Ihrfn4+nnnmGfh8PhQWFlrabxEj+G/1M8ONV/4MW00R6+bPn4+8vDwsWLBA5t2tWzc0NTXJ5U2H4zXXXCPlHjfddBO+/PJLmUMwGMTmzZstn0cPva6HekDquo7S0lI0NzeL0qLroUwDsmMz0qe+R3NzM0zTxIoVK3DttdciOjragvmtra1YsWIFdF1HcXExHA6HRLxSUlKwYMEC+P1+PPTQQ5J6WltbC6fTieeff16e4/P58NJLL8Hv9+Ppp58W2WBU5m9/+xsCgQBGjBghRj/vPUYcAIjypMrbwYMHL3FIdKY7hP9fjfqwRIVytG3bNowdO1b2iTJD7OCdouI8a8rCcVOdrzqP5uZmMRBVuaK83nPPPXj00UfRp08flJWVobq6utNIf2cYHY79pmkKDra3tyMuLk7IqDZt2oRp06ZJn8gBAwbA4XAIKzDntHv3biQmJkrPR64BEIpwG4aB5cuXW87PuXPnMGnSJHE2MUIFADfddJNgNJnnnU4nvF6vpG0CIR2JpTmLFy/GzJkzZT533HEHfvvb3yI1NVVw+p133oHP50NLSwueffZZREdHw+/3C1M012zz5s0YOHDgJWUGxcXFyMjIEH3LZrOhublZaveuvvpqGIYh/a5JIsWes+np6Zb91jQNDQ0NMAwDL7/8MnRdl1ZdW7ZswfPPPy94uGPHDtxwww2X4EtXg3sbrpvyaypOq3e3zWZDa2sr3G63ZDOpjoFwRzR/VzVAVTlTzxT/rzqSqGuqGSU8G7wDbr/9dkybNg0VFRXiQOD8w6Oy4XdNuH5EfZJy6vF4UFdXB13XRUdgOV149NdmsyEjI0P03ISEBGG1BkI4TqcJPzs5ORknT55EIBAQDOaz/X6/6NxM8+UahzubKcP19fWSasyzTycj6/6bm5vhdrtFZxk7diwqKyuhaZrIomEY2LlzJ0zTFIIqrtX69esBhPRqdf06OjqwefNmeS/ef5qmyZrQSO3MgUzd/ejRo/g+4wdroHamnIcDOr+ufq+goAANDQ2oqqqSZt+MOJmmKcyR6jOZqkMPg2pEqJcmcDH1l0LF1ilqPSe9JYZhYPjw4XjkkUdw5513YseOHdixY8clBzkcSLrKszfNUE8+Ep+wmHnlypW48847JUX39ttvFwWf7zNy5EgsXLgQQKgXV2RkJHbv3i3rd+ONN8JutwuFtqaFyGtWrVqF+++/H+Xl5XA6nbj11lul2PuBBx6QtWVNC2tD1IuZ9TgkZZgyZQpMM0SQ0qNHD0lzGThwIOrq6vDCCy+IIjlu3Dj07dsXgUAAI0eOxNixYwXoNm7ciOuvv14MX7IyFxYWSnsQykp9fT1KS0vh9XolpY9pyvHx8WhubkZqaiqCwaAcWtXbeO7cObS2tmLlypXIz88XFtvCwkK8+uqrFrY+/t2ZEanKl6aFmBSTkpIwaNAgnDx50lILyDVSHRYEGJvNJhEK7qGqHKmeeWYJJCUliWKgenNVz2pcXBx+8YtfoK2tTXpZqY4Odf4c4bVPqsyyrxqj/Zqmobi4GHv37hUFkxGoAQMGyHxqa2sxYMAAnD17Fr1794auXyTx0nVdWk+kpKTIWmpayLt74sQJeb/a2lqRRXV+TP/jBRRei0bwZTRT10MEJhs2bLBkdnDtgFCqcjAYFLIP1inxucSg8Pp3/p+XjbrPmqZZUl85f0YK1KiQangBoUiUz+fD3LlzZZ1VtkDDMDBw4EAUFBTANEMpVv+dER6tVOWL8+M78ec5j7a2NqSkpODs2bPSasg0TSm74Dqpv6sqOaoMq8am6sgJ/8M1DL87+Lt0FHg8Hpw9e1Z6k6rGteoYVN8tXM5Y10kSCb/fj4ULF0qmBhAilOH+8Gs9evSQCNb58+eh6xdJAvl9u92Oqqoqi9xWV1cjKioKu3btgqaFIlbFxcWw2+24/PLL5fn0nHNvwslMSHbX3NyM4cOHAwi1CZs0aRIWL14MAMjKykJNTQ2++OILcQzruo5rr70WXq8XqampEhFraGjAsmXLJBpBBYckQSwN4Whvb0d9fT0cDgfGjx8PXb/Ifmm321FfXy9n67e//e0l9zQxRB0qNocrtp3htHqfm6aJ7Oxsab3CvVXxTpUlri1lU32mKqudYTd/j4o7n6li+W233Ya5c+eisrISmzZtusSICH+H8LMUjtGxsbFypzCbpb29XdaQHBJUyjlnl8uFfv36SX11VlaWsBjzs1ljx3fOzMxERkYG/H4/KisrJQLFDCxGEJOTky2lSqoRY5qm9KmtqamR1NrS0lKcOXMGbW1tsNvtuOKKK2AYBk6fPg2n04kBAwbAbrfjxhtvxPz589He3o6f/OQngs10NjLThVlay5cvh2EYuPPOOy17rOu61DmqPR41TRPnPtc7MjLyEgfG3r17AQCLFy+GpmlCtOT1eoU1dtasWdKq77uM2bNnWxxxqlzxa6oMqGnygwYNQn19PQYMGADDMCQjjl0QHA6H8B7wWSqRWGcObdUwVeWgqalJ3pH7G+5s1jQNvXr1gt0e6mu7fft2i2MuPMVflW81wsp31jRNeFV8Ph+am5vxz3/+U2QZAIYMGQKXyyWlEABw5513orW1VTLtrr32WlRWVorT+84775RsPXVux48fx/XXX49PPvlE9pKkYCwLAyDrS54MFVOAkEO+vb0dp0+fRr9+/eDz+dCtWzdMmjQJL730EhITEzF27FicPHkSL730kjzH6XRi9uzZ2LBhA+6++25cffXVoiuXl5fjoYcekm4AwWCoDVNlZSWmTJkizjnKONOT77vvPvj9fqSlpcmcq6qqxCHVrVs3icCqUdiioiKxQb7r+MEaqCpQhyscqsJMxY2LwX6aycnJaG1tFYBjCiyFtjMjN3x09rn82fB6P+BiaqUqXOoFxO8fO3bsEs+1Wl/R1Tx5qRPsWFB94cIFrFmzRhQRKhtlZWVy2MlKmpCQIJESNUU5GAxKbasKMlFRUSgtLQUAif40NzfDZrvI/hqu1NMRoBr4bP1x4sQJuQzXrFmD8ePHo7GxEW1tbXj44Yel5pBeSLvdLoySuq5Lq46Ojg45MNdffz2AkFIVCASwYcMGSV1V96G1tRXvvPMONC3EUqx+j1EDppI999xz8n3uQVFREQzDkHQ11tjs378fX3/9texRZ3KjXqydyStJIVTjQpUDVZnuSl45X9VYUOVV9dp/m7xyfv8deVUV9s7kta2tDQcPHkRiYiIqKio6lVcqJ8eOHetSXrOysmCz2TqV1xEjRsjP9evXr1N5pROC7x8uryQ8OXjwYKfyqmkXe5iyTiZcXv1+v8hmZ/Jqs4VYVBnZD5dXKtmsV+pMXgFIBKAzeaWnvzN5tdlsyMvLw+uvv36JYvt9BuetjvDnqcarOrKysnDs2DEMGTJEanB4Vvl7jODxd1VWT2JOuNypn8c/TU1NlpYuVEqZQQFcNGKuueYa3HzzzZg+fboQYHE+qqGq/o4qP+qc1KgL07fa29vxySef4NNPPxWyGDaWB0I9+T755BOMGzcOn332GQKBAAYMGACv1wubLUTmomka1q5da7mDCgoKMHXqVGkbcf3110umjNvtFkOGzluyR5KEhvPl+n/66afIzs5GTEyM1CE1NTXBMAw8//zz8Hg8WL16NbZt2yY9dbt3744//OEPAEJs0Tz/VMxeffVV2SOv14sXXngBNptNHCncnwsXLuDFF19Ee3s77r33XsFnyoBphnrTFhcX48MPPxTllM5QtkULl0FVWQ9XnFXZVZ13QKguUnWkqAYjfz4cS9X34Z/O9AnV4aI6UtT+v+r3Kb+GYUgZhEospN7Jav1euEHOnyFjJ0mswh0pfF5ra6tF/6IjhTwCeXl5kvbL79Pw4OcNGjRI+kOqjhS+X3x8vNw/4Y4UDuoUqiNF10NR+bNnzyIiIkJ0CtWRojqSycehOlK41jfccAM0TROyMzpSBgwYYDn/1ClURwoDILzXKLOdOVK4N0eOHBEHYkdHB0pKSvCXv/xF9ob1jt9nqMagKheq80PNMLHZbDh37hxcLhfq6+tF9g3DEF1T5VJRdRlVpviZ4YZm+NdsNpsli4rzUBn++XuDBg3CuHHjMHXqVEybNg1+v98i18ClbdKovwCwOIWJgR6PB9HR0dJl4B//+Ae+/vpr9O7dG8FgUAI+AISk0efzSRptenq6yCd7PZeUlMjaMm07LS0NHo8HHo8HMTExOHLkiMyFGWXx8fGWFHaVzZeZXJqmYevWrTh37hyCwSC++uorSQ+m/DFwRt4WXdclJRiAOPR1XcfChQsRCARw9913y1oHg0EhG502bZrlvvZ6vXjvvfdgmiGCUs5Plae4uDgJyqlyaJqmpSTvu44frIHKoQJpuLEWbqyaZogsiDn9rFtVhZKGWGfGL4dqCHDx1YtB/cNohuopozdRBaPy8nLMmDEDM2fOxIgRI8T7q6bFhCs86nsTqNRhmqH8dZvNJr2uPvroI+zduxd+v1/6i3E4HA4MHDhQvDxk3VPBy+v1Woxv1vAFAgEUFxdLZEEVXM6Rh01V0FQjxePxSHSCPTPPnTsHj8eDpUuXIjIyEhkZGaiursamTZuknyGN1vb2dsTExMjnk6xi7Nixlvfk1zMzMy1GE9lr7XY7+vXrB8MwLGxumqZJ/RUZ08KNOKYo8OIHgLa2Nqxbt+5b5VU1GjuT14aGBpGxzuS1M/APl1fVqOxMXvlz6gWryiuf73Q6ccMNN3Qqr6pR0Jm8qpdQZ/LKYnqmxnYlr3a7HadOnepSXmlQdiav3E+mGHcmr6x7VRU2VV4ZnabMh8uraZoYN24c3G436urqOpVXTdMQHR0t57szeQ0EAmIshstrS0sL/v73vyMYDOL+++/vVF4TExMxfvx4+blweWUqkSpTlFe+86RJkzBv3jz8T4a6jp3JqCob6u80NzcjIiIC58+fF0OOfwMXFTiOrqL54WUH6vfUS1Q9B8Qdklmp82OtjdfrRXx8PE6cOIFDhw5h9+7dQnjFNQ6X83DHonopm6YprNc2WyilrqSkBE6nUyIxdKa0tbUhIyMDTqcTDocDeXl5UuOncgCoUbYBAwZg3759aGtrQ01NDXw+HwYNGiROmv79+8vcVFI8FW/4fKZWvvPOO8JQvX//fvTu3RsrVqxATEwMBgwYgMrKSmzdulVkd//+/RgzZgz8fr+QHAGQ/qkqCy4Q8vLToZOZmSkYSC+82+1GYmKikLlx/mfOnIHdbseoUaMQExMj7O2d4VJnQ40iqXKrGpI0kqhT1NbWSoSP543GA40fYg0/X3We8N1Uw4FyqjpwiPlkhyYJoorvp0+fxrRp0zBt2jRMmDBB0kuZRqiOcHmk4qviPvc+Ojpa9Cafz4dPPvkEixYtEnZ59dl5eXmWOrv4+HiUlZXJz+Tl5QGAZHzYbDakpKSgpKQEQ4YMwblz56BpoTZQ/L3hw4f/X/a+OzyqMu37d85Mkpn0XgkJaZTQpQkGWIriKoJYUFxdV5dF93313dXVa11XhbW7RV17d2kWQOwdaSIthEBIAikkpPfMTDK9nO+P8b555uTMgPC937Xfe73PdeViMkzmnPM8v+d+fnfne4uLi+M9Iz4H4Yf2YVtbG5xOJ3Q6HZKTkzF//nzmFOT1onxuWoPZs2dzxVLyInk8Hnz00Uew2+0BhYlcLhc2bdoEl8vFYdGyLDOnOHr0KBcLEotrdXZ2crXcYcOGccV60RB64sQJzt9TFH9rJbfbjaamJlbArrvuupBYDoZt+hHPfvVnxNfFxcXo7e1FYWEhXC4XK4vk6YyKiuKzl/AqRh+JiiY9IxkdRb6tKAr6+vo4Yo/CWI8fP85Kl2gwz8vLY6MaGaosFgvKy8tRX18fwLfUSjT9qNNA6P24uDjGe1VVFT7//HNW1gl3xG1XrFjBxQcvvvhivPvuu3xmxcTEcI9nGnPmzOH2ievWrYPH4+FuEQBw22238X1TKC7hneSKovjDfGNiYtDT0wOr1YrU1FS0tLRg06ZNmDZtGtasWYOioiIsWrQItbW1+Pvf/84K+KlTp3DjjTfi22+/RX5+PqZMmcJYpzoXI0eO5H3h8/lbnE2aNCngWajPdldXF6ZPnw5FUbh4IwBuxVhXV4f58+cjMTExABOAv3r1Txn/1gpqKMKjtgzRe0R4HA4HK6hahIfC8+jvaIiER9zY4v+JShhwujy6FuGhjRKM8Bw7dgwHDx5kRRUYSnhESxD9Lg4iPAaDQZPw0OGRkpKiSXjo+6kQgprwmM1mJCcnc19ULcKjKAqHoWgRHlIKFEXRJDwkZCZNmgSXy6VJeNauXQufz4elS5cCGEp4aNM7nU5NwkPC8rvvvuNQXjXh0el03MBbi/CQN4ri8LOzs6HT6djL/D+FoIuK2v8S9P9/CfqRI0cCFHERr1VVVfB6vRgzZgyeeOIJnM9QK6jiPhANM2riMXLkSPT392P06NFscBA9U0ajkStEioY0yiun39UeIsKQeM2+vr6AHLr33nsPLS0taG5uRn9/P5Mwn89fmKqkpAQlJSUoLi7GnXfeiQcffBC/+MUvEB4ejvLychw8eJC9uWSwEkMtRdKkpSAA4J7cZrMZBoMB77//Pvbs2QOdToeFCxdi48aNbMSIjo7Gnj17+BkpX2/Tpk0sw8PCwlBbW4trrrkGH3zwAXQ6f+VU8s6WlJQEFAciS7/H4+FCIrR/SdHxeDyYOHEiwsPDUV9fj127drHh5oYbbkBOTg4OHDiA0tJS9qbl5ubiiSeegNfrxTXXXMMKANUyeOSRR1hJslqtaGlpgc1mw69//euAvM/+/n489NBDUBQFN910E1wuF0c/AIDZbMazzz4Lr9eLlStXBkRUqc9JtQdRxKGIYbW3yWQyHrelfgAAIABJREFUISYmBg6HA93d3ewJoB8ir0TE1ee1qIASsRevKctygAwjAxx9fu3atYiOjg4wYCrK6agPUv70ej2OHDmCY8eOYf/+/Whra+N70DIqBpPRdM/UG9ztdnOhFrE+g16vR2ZmJk6cOAG3281FjSgMWqfTcesJkRuQF3706NHcKzMtLY1bixQUFPA8kvER8J8zlO9LczMwMACfz4fS0lKUlJRAkiRs2rQJVquVOcXKlSuh1+thMpmg1+uRnJyMlpYWFBQUYO3atZBlf2QWGQWoeBNVPqU9QZ6v//zP/wzY3z6fv2epoihYtWoVJEkKaBGn0+lY0X7hhRcCwmFl2R8NY7FY2AhMOXynTp3C008/zUbqnzJEOavGoogztXezr68PkZGR3N1A3SOcFD3ixzQHoheVno3wJUYKiPtLvH5kZCTa29vx4Ycforq6OqCHK+D3Qk+aNAklJSWYO3cu5s+fj2XLlsFgMMDlcuHQoUOMUZp30dir9sBrDcoBNpvNsNvtiI6OxsaNGzlCiYrTUd0IikoUuQcZiAjrZBimApSSJGHy5MlcuFHsPkKRQjQ/xJdpzulaaWlpGDduHBRFQX9/P7q6ujiSYuLEifB6vVyJOz4+nlPZiKNccMEFfE598sknMBgMuPbaa3m+3G4356JSlBjglxFWqxVvvvkmJEnCqlWroChKAPd0OBxslH/99dcD1lrNZc8Kxz/p0/8Ph7iRtA4V9Uaj3202G1tXSFCIVjPgdD8n4HQYjzr0Rn2Yid4pAqn4dx6PB+vWrUNvby/a2tpw/PjxgLDNWbNm4cILL8RFF12E4uJi/Nd//RdWr16NFStWcGl0MeZdFAZqbxzdNxBoDSUrnF6vR2dnJyIjI7FhwwYucz169GiuzkYNeKnXniRJnDsn5vqR92n27Nl8EBYXF3PuxIwZM3iOhg8fzvMCIKB4hKIo7P2Kj4/na9bX13NOUkREBBYsWMCeMsqNiY2NRVFREdrb23mDkkJ//PhxyLKMG264gefA4/GgubkZsiwHbDDAX3WPrHbUk0kkuiaTCb29vfB4PBymQIctkQFqbULD5/NxfthPJeiEVyoHLlo8RbyKgj0YXtV41sIr4UYLr1SkiKzLWnidPHkyjEYjysrKNPFKa6BVtIaGmqCr8Uqtk7xeryZe1VZhNV4J05Q/q4VXANxHVQuvwOkCJ1p4PXLkSEAehhZe165dC7fbjUWLFmnilebN6XRq4lWn8+eEvfPOO1AURROviqJwuw8tvFIOZGpq6hC8fvzxx/y5hQsX4nyGuCYieVevt3r09vbCYDDAZDKxMg4gwFijJvP0neoiRfT/ouKv/jsxX/qyyy7D1VdfjeTkZHR2dgbkVhEho9dEHMaOHct9BqdPn46WlhZUVVUFnCdEasX7Vhs11XNHkQUWiwX19fVYv349bDYb96I+ceIEJEni6sCyLCM7O5vbaIjPXVBQwMWVqF0FVRElQkH3Q/n74n2RIYuqugP+YmOEu8LCQlaWc3JykJCQgOXLl2P79u3weDycp/izn/0MTqcTcXFx3F/Q4/Fg7dq1XOhKkvw1ChwOB15//XUA4Fw+wg3dCynlJGf0ej2fsXSGvPbaa4wL0fpPv2txCi25LH6WMGqxWFgJA063CqF/RU5C16PPivigfwlj4ufF6yqKgq1bt3Lkhslk4utLkj96Yvbs2ZgzZw6mT5+OefPm4cEHH8SKFSu48BYZUWjuyYgiGhfFe6J5FxVhg8EAo9HIcnn9+vX44YcfOCqksbERPp8Px48f5/NXPJ9I7onzoNPp0N7ezvfh9Xo5Z5PmlF739vZCkiSWZSIvIkMiEXK9Xg+DwYBNmzYxp0hJScGSJUtQXV3NNUBo/drb2+FwOJCRkYHhw4fz8wP+cHn1GUy5rWLI7cDAAMsHCtEl7xEAVozJACdW/CdOcfz4cQ4lliR/IRpZlodU8z/boca31nvq92VZRn5+Prq6uvhc7urqCsAkedFpDekZiGuLyrA67UJ8BvpbMoL5fD5kZWXhgQcegN1uR3V1NcsmWZZRVFTE5zPNR0JCAu677z489NBDWLZsGU6dOoWKiooAZw9hSEyREbFOr0XFNi4ujkPHzWYz3nrrLaxbtw4TJkzAK6+8Aq/Xi82bN0OWZSxZsoQLaI4cORI+nw9r164NmNObb74ZHR0d0On8KTvR0dEoLS1FX18fvF4vLrzwQkiSvzhndXU1e6VjY2P5nmVZxuDgIOsKY8aM4bDimpoazJs3D08++SQmTZqESy65BLNnz8bTTz8Nn8/HivFvf/tbbN++HfHx8bjiiiv4u//85z/D5XLxfVBLvQ8//BCpqam45557+FlI1u3atQuxsbEYNmwYkpKSWNHu7u6Goij44Ycf4PP5mOfQfJyNoUAc/7YKqkhy1AQn2O904Obk5AQQHkmSuKAQARYItF4CQ2PYRQErHuzi9UlwSZI/TOWWW27hyoXqZHKR6JCQFAlPUlISGhsbuaULfa86FIeEdTDC4/P52GLtcrkCCA/lOvT393PokEh4JEliMkTCU0149Ho95/dQuKM4/zRnoodaJDw9PT0YM2YME8jw8PAhhGfs2LE4dOgQW3C9Xm8A4aG2CVTtbeTIkQGER5L84TVkfRTXke6FFCmxr5dIeGw2GxISEgJCssLDw1lRpiI/2dnZmrgN9p4WXqkBPM2XFl7p74PhVVRWg+GV8u208EolwkWSpsbrNddcgz/96U+48cYbNfGqVkTUeNVSVNR4/fLLL7FhwwYkJSVp4pX2bH5+viZenU4nxo0bxyHDWngFwJ5NLbxKksQKrBZeAb+iuHDhQowZM0YTr5T753K5NPFKRItkiBqvRIhqamr4EFfjtbe3F+Xl5ZBlWROver0elZWVkGV5CF6///579iKPGzduyLr8lCGGJ9JrUe6JVnT60ev1GDduHHp6epCfnw+Px8NGElJUSbERSTt9H1VIpr0kKgMi5tXGHjJ6REVFYf/+/aitrYXP5+McSXF/AkPPIr1ej8jISFx11VW47rrrcNttt6Gurg61tbW8Z0VPKpFpIt401LJbXKPBwUGUlZUhISEBdrudc0pvvfVWrF27ForiL7ZFeUIUISBJEubPn4833ngDBoMBL730EsLCwvDrX/8a77zzDmRZxh133MHXpOqKYWFhGBwcRExMTECoOHmCXn/9dYwdOxayLKO5uRm9vb3Ytm0bdDodVq5ciZEjR2Lu3LlobGzEsmXLsGPHDuTn5+Opp56C1+vFsmXLkJ6eDkXxF3Lyer148MEHoSgKeyhsNhuamppwySWXBBDznp4ePPfcc2htbcW4ceMgyzKHlkmShLa2Nvzzn//EqVOnuB2Z2+3WjEQ6kxFFy5giGlFIsQa0jShq3GgZUURDoZYRRZIkxomWEYW8q6GMKL///e+xZs2aczaiaIVDahlRtm7dGtSIQtcJZkQ5dOgQXC4XKisrgxpRCKPBjCiKovCeCWZEcblcSE5ODmpEWb9+PSsmwFAjCuDPvwtlRHE6nXj99deDGlF8Ph97ntRGlPDwcD4XKIqBzqnS0lKuFPxTBmFMLYfF9QSGykqq4gv4FXjy4tH9krJHa0v7Sb2nSBlR45oG4Y0iKOgerVYrBgYGuOWgyEfEIXIUr9ffS/mBBx7A0qVL0dbWhpMnT3LRTLpvem6RNwUzHNL9kzfQ6XTivffe45aAbW1tiIiIwLBhwxi/9EOyi+aUWtKEh4dzBExeXh5KS0uhKP7K8TTH4n70+XwBfEVchxMnTnBkkcPhwKeffsrdMubNmwfAj1mr1YoZM2Zg165dUBR/ETBFUbiTBRVbNRgM3KOdlPMTJ05AURSuQk/zaLFY8P333wMAbrrpJuh0/uKatM7d3d18vauvvpr5kCgbz3b82yqookVKXX1M1MJFiw3gJ50//PADCgsLuTWJLMuskMiyHNBLULT40WFBmw443fIlmMWF3Pzh4eEoLCyEz+cvf56dnY3W1tYhC6K1SLLsD6e5++67MXfuXK6wRQKArkNDDAENdn8A2MIJ+PPOjh07hhMnTiA3Nxf/+te/oCgK8vLysHnzZt5YHo8noLovAMybNw9vvfUWFEXBhg0b4PV6MeLHvooul4sVW6/Xi4aGBr4vEoji5hVL9FPOxcGDB9HZ2cl5ccuXL0dmZia+/PJLKIo/z/bo0aMoKCjA448/DpfLhVWrVvG6Pvnkk3C73VizZg0kSeK2HW+88QYA4OGHH+Z5VhR/WMTTTz8NRVHw5z//GYC/uA5hrL+/H8888wwA4O2332YlQrTCHT9+nKtKkvVQjVtRISYc07OTUHQ4HMjMzITJZGKrMa0vHQaEAdFgQmtO/yfOtehlonuizyYmJjLWhw8fjoMHDyIxMZHDS7SGGluSJKGoqAj33nsvF25pa2sLwChdW7Re0nNpKbE0TxSmDpw2rjgcDq5WKob9UGENagUikhvKRSUCSXNLocEAONcFAPf3Fe+diqHIsoxhw4bxPFZVVQHwh9imp6dj9OjRMBqN+P777+F2u2G32xEeHo6CggLo9XoMHz6cG8SbzWbs3r2bc27omqSYK4qCgoICni/xGXw+HyZPnhyACSo+Rs/3pz/9CcBpw5miKFxwS7RA03w99thjAWt0rkM8WNXvhfrcwMAAF88iwwwNrUNba4h4UhNtEdP099TH2OfzYfTo0RzGHhcXh/7+frhcroD2HWrF2ufzob29HeHh4cjKyuIQKafTiYMHDw6ZAxFXohFE3Nfi31AIPIXYA/68+q6uLni93gAvWnJyMhRF4f56siyzrCosLMTg4CDPAbUyIC+aJElITEzEwMBAgOIvjoGBAc6HSkpKYmUyIyMDOp0OfX19SEhI4Iq727dv54gAn8/HhjZFUZjUu1wu7Nixg4kLDafTiS+//BKSJHHlYOA0/qnlycUXXxwQ0k3f8dlnn0FRFNx5552ackasEioWSSKZIZJekWgPDAywwYrONfFME0O7aV1FY6F4TVprNW8RcUxRFXR+p6SkwO12IyMjA01NTZp4Vw9J8kd1XXXVVZg3bx6MRiNqamqCKs9qhVRUoun7aG6of+rg4CBMJhPi4uKQnJzM+EpNTQ2okk7PJHKKESNGcP0FCp2lyr4AuC0LtdQTn1GtMBF5bmpqYoWqubkZjY2N2L17N8LDw5Gfn88FYiikuLKyEoWFhWykW7RoERt+WlpaIEkSZs+eDeB0f3QKI7344osD9i0V5PR6vbjgggsgSVJABff+/n60tbVBUfx5e6I3m56pvr4+wHNNsvD7778/px7VIsZETkh8hNaYuAkpiAaDgY3lERERQZULkZOLgzApyjt6Rrq2OKi3sNvtxr59+/heaI6CYVx8TnJKpKam4s4770R+fj76+voCquoCp0Pd6ZnU+0C8VxqUTmKxWODz+XDy5En+m7CwMPYaknGG5kZ0JLjdbgwbNowLcxUWFvJrqtKv1+uRlJTE80QedXG+Kf/96NGj8Hg8CAsLQ1hYGIqKitiAnZycDLfbjbFjx+KHH34IMEZQtWsKoaZoAPJY07qrDQ9U+4JkGqVdUQEvUR6SAZhaS4r7V200PNP4t1VQgaEFZ0hAAoEVdsXQGOqXRVWvqDqhaEERrVfiNWiIky0eMvQ7CRH6Wwpx6e7uRllZGZqbm9n7Yjab4XQ6WWATkRR/AHC14QsuuAA333wzHnjgARw+fJjDKNXWTvGZgpFLum+KracS8o2NjQgLC4PBYMC4ceM41EaW/Y2yRSsvbRQqHET99aZOnco902bPns0HSVZWFh/4YrsZmm/a5Lt27eJqfZRrSHkiOTk5cDgcmD17Nux2O2bNmoXW1lZ4vV4YjUa2+IieGtoMdMDRnFBvKnqf1lGSJK5K7Ha7OVyC/kaWZZhMJrhcLkycODHAmEFWP0k63ROKqgCKuBVxSYPWkgQTeVD7+vo4fELEuXgA0L9EEtXEXTwMSLjQe6SwiKTM7XZzi4f29na0trbiyJEjTM5F66X4Qy0hKBT48ssvx8KFC3Hw4EEWcHRf4v2rSbkWXsmCB4D7p0ZGRuLw4cOcL0eHa35+PisLNMLCwlBYWIjk5GRERkaytyE+Pp7JMuVzud1uroZLvTZFpZAIts/n4xwqRVE4BOr999/nQ+nCCy9kzyflKA4bNoxzHgsKCvg6VFGPKkUSPltaWiDLMn79618HkFi73Y433ngDsiyzdV4sWuXz+fDKK69AlmXk5uZy+wUx58NsNgesAXmgent7uTjD+YybbrqJv0PEjfq1+FySJCE3N5crR1IuNnA6SkDEOpEpIpHkpSHc0B4VMSvinqzNcXFxOHToELq6uuB0OlnJpM9t3rwZ0dHRaG9vDzhnROXCZDKhr68PFRUVXH365z//OXQ6Hfbv34+ysjI2iqoxLmJdfE9cH7JckxFvYGAAmzdvxmuvvcb7Q1SCdu/eHXAe3nDDDZwr9c9//hNerxdz587lMM2LL76Yr02VYiMiIgIautP80Rzu3LmTCWtNTQ2WLFmCZ599FpLk79WblpaGmTNnwufz4cEHH8QLL7yAn//85/jLX/6CsLAwFBQU8POVlZWxgYQIo8/nQ39/P3p6erB06dKA4it9fX3o7e1FR0cHxowZw4obrUdzczPsdjuqqqqg1+txww03BFXcxPkXZa2IUS1OQSGo58MpRAVYi1MQ7oNxirS0tDNyCtpfwThFQ0PDeXOKqKiokJxCrG2hxSny8vLOyClIDpwrp4iIiOB2LcE4BVUuDsYp5s6dG5JTqPezFqdQFCUkpyDsB+MUsixj1apVmusRbNx0000BGBQ5bbAfwF91mfqxR0VFcaocpQqIXlFRrgLgCuWiQVotj0V5Lcv+qJ/Y2FicPHkSu3btgsPhQHh4OPdNrqmpweeffw6n0wm73c6GZPEHAPePJT5z8cUXY8SIEaivr8e+ffv4XKH5FOdG5ITi2aQ2ICYkJHChT0mS8Oqrr2L9+vWIjY3lFKpf/vKXCAsLY6cPyZYrrrgCY8eO5bzxnJwcjBw5Ek6nE263myOYIiIicPLkSeh0ugDPI92n3W5HXFwcWlpaMHXqVD5j9u7di8WLF2PNmjUwGo1wuVwYNWoUr8+SJUuwefNmFBQU4NFHH4Usy1i6dCnLrs2bNyMsLAyPPvoorx1VUZdlGX/84x8DuBG1g1QUv5PH4/EgNTWVz6SOjg488sgj0Ol0+PDDDzF8+HDG+08Z//YKqijYRMIrEh/g9GFNFa6ospwYQkMTpI6Np78XwUDvAaeJv2glFZObc3Jy0NPTgz179iA3N5ereYaFhaGrq4tj4y0WC1v71KTLYrGgvb0djY2NUBQF27ZtYw9LaWkpeyaDEZtgFibxWaOjo7kowcDAAF577TXs378fVquVSX9xcTF0Oh1bjWnOo6OjMWXKFERERCA8PBwpKSk4duwYAH/TYq1r0mEihlQTAaLCGHq9HlFRUaioqEBSUhK2bNnClp7c3Fy8++67cDqd+Pzzz7Fnzx4YDAb2tM6fPx+K4vfibt26FT6fD8uXL2eB63A48MYbb0CSJCxYsCBgzq1WK15++WVIkoSbb7454B4Bf/+zV199FYqi4IknnmACIWKgqamJw6xEUqcm5+r1JmwRFqklkugZozAb8fNEXkRPjnrQXhGNKIRXspAeOHAAnZ2dqKurQ2JiIgwGA4YNG4bk5GRUVlbCbrfzIaxWTgE/eaqpqUF5eTkqKysRGRnJlteKigo2qoheDsKgmoyLQ/ydFElaq8HBQbz22mvcU1VRFI4OEK3NkuTvhVxWVgaXy8WhXFOmTGGL5axZs/heyFJOCjvNO82t0WiEoig4duxYgBV70aJFnENC87xgwQJ4PB4sXrwY27ZtQ2xsLOdiUDgOHfJ6vR6zZs1ick5r1tXVxW2U6B4HBwc5V5iqFov4aG9vh81mQ0VFBQDglltu4f1H801hwunp6ZAkCb/73e9Y3t19991DPGfnMoIZNMQ9oSbR5EHV6/WsKJGcJjxoWfBF2U8YUIdok2ylz9Jc9PX1scw7cOAAIiIisHv3bi6uN378+IBiOOKg/VVUVISEhAQu9vXdd9/BYDAgIyMDY8eOxdSpUxEREcH5UOp7Fe+H3tMadIZRpUmK2HnrrbfQ3d0NvV6P9PT0AKIoSX5PAhXIIsPJmDFj2DhC7TIABISMUyVSURYNDAxAp/O3xLryyis5JaOqqor3SVhYGD755BNERUVh06ZNnEf27bffYsyYMdi2bRsrskTkt2zZwjinObBarXjrrbcA+NMJaJ5k2d83cf369ZAkCbfddhuA01ETpNx++OGH8Pl8uO6664bIFxEb58IpKBcyGKcQyTf9vZpTiDgChnIK+vtgnIJkRChOQfMSjFOYTKaQnEKdUqIehI1QnKK6ujokpwBw3pyC5jwYp1iyZAnjLBinaGpqCskpnE5nSE4h4ioYp6C1PhdOQUXzqIrzTxlaXORMPxT9ExkZyVEtsiwHFPVTy2faOyIfITyLv4vchGSVLMuorKyEzWaD0WjEhAkTkJKSgqNHj7KCP3HiRJw4cSLAsELrQb87nU7MmDED8fHxaG9vR2lpKYqLi+FyuaDT+YutUZg5Kc00tAxJaqO6eM2YmBhO2aG88E8++YSjchTF78Enb75Op8OIESOwd+9euN1ulJaWskGmrKwMsixj5syZfE0yCup0/jY81H6NlEk619va2nDTTTdBlmWkpKQgKyuL127SpEmwWCzIysrC999/j9jYWOzcuRN2ux0GgwERERHIy8vjZ3S5XDh8+DC8Xm9A7q9Op8O6devg8/kwZswY3ouET1KwqaAYzSsZEhobG+HxePDiiy8GzPnZjn9bBVV9WIikQzxc1MB3OBzIy8tjKxCBhAgdMLTKqCg0RYsmDQIOeWDUE3348GEMDg6iuLgYWVlZaG5uRlNTE7dGoYrCbrebyYBoqQOA0aNHo7KyEs3NzXj11VeRmZmJmJgYFBUVYcqUKcjJyQkQZuK9iQeMWnklay0Nuv/U1FQOgY6Pj8frr7/OXikAKC0t5XWQJAnjx4/nAgGK4vdgWCwW6PX+nnMieNW9KUUrIR1qALB48WLu/0SW0lOnTnGosNfr5RDtBQsW4OOPP4YkSdi2bRt8Ph8mTJjAz0pVJcePH8/r4/P5WOBeeumlbJFXFIU3FlUidLvdfChKksTWZ0mSAhpR04FIxZJkWUZiYiJGjx49BKda5FwUzl988QXy8vJQU1PDXmSv18thm2JBDnHtqZE3vRbXle5BlmUmNPTMRqMRDQ0NSEhIgNVq5b6iu3fvRl9fHxoaGjBu3Dh0dHTwM4r5kHSN4uJi1NXVwWq1wuVywefzV64tLCzE9OnTuZIcefLUBOhsyDldT6xQTdbpt99+m3FHwlwMBW1oaEBhYSGHzMiyjIaGBl5fyk1VFIWra1PoHn2XSIAkyR82Pnv2bFYoCZ8+nw9dXV3o7Ozk/F5F8Ydcbt++nddJ7UnXIq6kfIg4Eg/K/v5+NhaJc0fYoLQAqlRMxIKIAylcXq8Xubm5AbKD9vv5DPFgP5O1nuQe5czW1tYiOzs7gFBRhU5FUQLIjl6vZwOKaBgSw+XoPfV9dXd3o6amBqmpqQgLC8PKlSsRExPDoVokp6n6OxW1EmU1hcgTWZkwYQJWrlyJxMREFBQUIDs7G4mJiUhJSUF0dDTLS1GBpiFijO5TDEMjYkhrSJ4evV6PzZs346WXXuLPPPfcc/zdbrcbMTExuOuuuyBJEqqrq+Hz+QtmUIslqpHg8/kL3IgKPc2XaBSTJAllZWUYNWoUvF4vTpw4gZMnT+Lhhx+GXq/HLbfcwt41WZbxwQcfYN++fTh8+DD2798PvV6PCy+8kPPtqCXDmjVreB3tdjvcbjeqqqowefJkNuYQifJ6vdizZw8iIiJQVFTEfauJhOp0Orz55ptwuVwBfWXpGc6HU3R0dITkFOI4V05B7wfjFFTUJxSnoBGMU4wbN+68OYU4tDhFf39/SE5B3tNQnEL0XGpxCtoXwTjFs88+i7a2tpCcYvfu3SE5BXGfYJxC3LvnyikoBFSLUzzzzDNs6PipQ4t/aP2InyWlkKrVE8aNRmMADijXl3Di8/kC2uvQXInGDLXiJ0n+/PLu7m50dHRgypQpaG1tRV5eHhvYMjIyWKk6cOAAK+oUQkt7lQp5tba2Ijo6GhERETCZTJj7Y8XfqVOnIj09HT6fv3UShQSrFVAxakPEvPgc9D5FqERGRuLUqVN48cUXsX79esyaNQsREREBVWzJQ37HHXegrKyM/7aioiJAxsmyvxYMhfgD4IrBZNzu6uriqLLOzk4UFRXB6/Xi/vvvR3NzM1avXo2MjAzEx8dz/2pJkvD8889jz549GBwcxGOPPcbnA3Gor776ChEREfjb3/7GmLPb7WhtbYXFYsENN9zA+JZlGd3d3XjmmWc4xNhoNLL3X6fToampCe+//z70ej1cLhfuueee/3lFksTXocgO/Wu32zFixAj09/cHAI8+Q2GaItlQb2RxiIqkSCLEe8vMzERSUhIKCgpYqaBE48HBQSQlJSElJQU9PT0BnjAaBOIJEyZg5syZeOihh2AwGDB16lRkZ2cjNTUVycnJHNIm3r84xENStMbQIUQgJwKk0/nzSaKjo+F0OvHiiy9iy5Yt8Hg8qK6uDhBeEyZMwP79++FwOLhEdlRUFD8LeYPICkdzR0MUBhSadPz4cc4LiYqKwtatWyHL/lCz66+/Hh6PhxW/VatWob+/Hz6fLyCkgg4zSfKXlyfBoygKhx+Vl5fD6XTi1ltvDcCTx+PBjh074PF4MGbMGL62OIdU+ODtt98OUNQof6Svr4/Le9Nang1WaR4iIiKGYJUOI1L+tLAq5lDQOolYpe9RK4eE1cTERBQVFeHAgQPIzs7WxKpIwESLJ2F16tSpmDZtGnJD95XeAAAgAElEQVRzczF8+PAhWKW8HbGqnhZW1QUY1FilzxiNxiFYJUudGAqqxqqiKJgxYwZX8lVjlebX6XRy4R0Rq2StLC0thU6n08RqVlYW0tLSsGXLFjgcjiFY9Xg8WL58OQt9NVYHBwdht9uZVGth9ZNPPoHX62+nAWAIVqkYUlFREbdMELFKHlaqSvziiy9yKNFjjz2G8x2EPVGhC/ZDn6d5z8jI4NBFURnTkstEDkRZRkMtv0UZRoerTqfD2LFjOaeTioOZTCYoir8o1v79+xEWFsb94eg7RCXSarUiKyuLc3ry8vLYOm2322Gz2VBSUoKenh40NzejrKwM9fX1jHF6NnGvivNC1yQlVlTAKT8pNjYW7e3tLIdpXnQ6HaZPn8658nv27AEATJs2DU1NTdDr9SgpKeHnyczMZLw4nU4OoySrOIX6lZeXw+v1clEl8oB+99130On8OZPJycn46KOPmLTW1taisrISO3fuhCzLHOprtVqxbt06OJ1Orl5N+cj097fffnuAnOvv7+cCHeTZEgv1tbW1cdVUcX/QvJ0Pp6D2KcE4hSiHg3EK9T74qZwiKirqjJxCVHi1OEVaWlpITqE2oIjzB5xOFwnFKWRZDskpdDrdGTkFGR+CcQpxLbQ4BRlKQ3EKqoAejFPQfgjGKeheQ3EK8ezT4hSEBS1O4fF4kJycfG7ep7P0mhJeCKsOhwMFBQXo7OxkQ6CYCyrKIhHjWs+q5qkitsi7qSj+2hgejwdz5szhdaAUqLq6OrS3t2P58uXIzc0NcBaI8tjr9aKoqAiFhYWYP38+pk+fjpiYGMTExMDlciExMRH5+fkwm81oa2tDeXk56urqhuCXnk/EPhnyaL+LHIxSpShKgOSTzWYLkMcLFixAXV0dAHA0wezZs9Hc3IywsDBcdtll/Dw2m433td1u1ywIKEkSvv76a3ZGpKSkoL+/H263G0ajkaOlRo4ciS1btiAxMREmkwnHjh3j6teRkZEYO3YsAHDFYpvNFhBGb7FY8Pzzz0NRFNx1110B8tjr9WLLli0AgHvuuYeNF/QZu92Ov/71r9Dr9Zg2bRrv07PG8E/69P/Dcb5kh0L7gpEd8bAPRnaCbS6R7CiKEpTspKen4+DBg2ckO1RtS4vsyLK/aE8wskPPqCinc1rUZEe8phbZIYtgMLID+C1oJ06cCEp2EhISOPRAi+zQOgQjO5LkL0bR398flOzMnj07JNmhKoLByA4VzqK50CI7lNunRXbEtgv0b01NDRRF4UILInZDYVWv12NgYAAJCQlDsCqGWIkWbsKqmqwHw6r4nhqrdA+JiYmIi4vTxCqtuxhWI2I1KiqKvZtkVROxOjg4iI6ODjQ0NHAPXTVW1UMLq+JQY5VCzMRwVjVW9Xo90tLScPToUUiSNASrZK2kogNqrFL+TXZ2NivCaqx6PB5ERUWhtrYWiqIMwSoADsX0eDxDsEokmw5mLayK6+DxeIZglUJqZFnG9ddfPwSrND+0H1NTUxEfH4/k5ORzKsKhHqGs9GpLPb0G/ArzF198gXHjxjFRIQ8qfYb60NIgBZxyerRIPr2m7yB5Zjab8eqrryIxMRGNjY3wer1YunQpjEYjysvLMXLkSKxYsYKrRBNeaW2oQjx5AEnpDQsLw6JFi7BgwQIsWbIEixcvhslkwrx587BmzRrcfvvtMBgMaGlpQV1dHZMqkXQAp/eI+BzqZ6G5GBgYQHh4OAYGBuDxePD888/j7bffhk6nQ25uLrZu3YpbbrmFvcLjx4/Hxo0bIUn+dl001zqdP2+P8Em5prReRMK8Xi+6u7uRmprKoettbW3YsWMH990bPXo0e1HfeustREZGIi8vDzt37oRer8eVV17JWG5qaoLH40FSUhITG8CvBH3zzTdISUlBcnIyPzsRwrfffhsulwvLly9nrw8pWmazmfusiuPGG288a05Be9Hn8yEhIQEfffQRpk6dCkVRuGIy4ZNqPIjrQtcmJVmMIKI1V6eVSJLE3obMzExs27aNc84oDNbj8cBoNOKHH35AZGQky2lxn5Gc7u3tRXp6OoxGIwwGA3syyTvicrm4dkN3dzeqqqpQXl7O90jYIPyJclFUPrQ4haL4UzCio6PR2trK5wP9P51PtbW1WLlyJVeEXrVqFV588UVIksQ9cRVFwbBhwzjvfHBwkIuDAf6zrb+/HxERESgtLYXRaOT8SZfLherqajz44IO8VhMmTMCjjz4Kj8eDCy64AAMDAzh+/DgeeeQRAMCdd94Jnc4f2v3www9DkiTuxWswGGC32/H5558jLCwMf/nLX+D1ehk3/f392LdvHwBg2bJl8Hj8PdPpuVtbW9HX18ehxcuWLeP5pPkhJeaLL76Az+fDs88+y0VqfsrQkrvB5LH4nloe034R+bEoj0mpJUOAljzWkvuiPD548CDLVy15TEWpyFijJY99Ph/jTEseX3LJJcjJycEf/vCHoPJYVL5CyWP13qW5IBz09fUFlccXXXQRnn/++aDyGACnWWjJY53O366GDCla8vihhx4KKY+tVitWr14NRVE05fEf//jHgOirYPLYZDKhvr5eUx4DQHt7OzweD8vjr7/++idh+N9WQV2xYsUZCU8oslNcXMwHlhbZERW3UGRHBK14HdpcTqczKNkpLS1FampqSLJDjZGDkZ1FixbhiiuuCEp2zGYzKioqWJE5F7JD9xSM7JjNZrhcLhQVFQUlOxaLhUNhtMiOmLejRXb0ej1ycnLQ3NwclOzcfffdsNvtQcmOx+Ph9iRaZMftduP+++8PIPpqsqMoCuLi4jTJztdff41Ro0YhLS2NrVukaFMlwrMl5hQ6JraYIWJExhQisGKIDmFWTFgXiQkN8VqiUH322WeRmZnJVXcvu+wyDAwMoKKiAkuWLMF1112HjIwMLthAApEKWBkMhiFhaXq9HkuXLsWll16Ka665Btdccw18Ph+mTZuG++67D7/61a+QkpKC5uZm1NXVDcEkDfVrLazSMw8ODsJgMGBgYAAulwuvvPIK3n77ba446nA4MG3aNJhMJoSFhcHr9XL4mMlkCpg/CiUjbIrXJNkgSRIaGho4nI6uTWXbExMTERsbi+nTp0Ov1+Puu+/mCs07duxAeHg4k1VaR5fLhbS0tIAqeHa7HZ9++ikXHaBrU2jZ22+/DUVRsGLFCp4nuu+BgQFs3LgRPp8Pv/zlL7l3HMkTRVFw+PBhSNLpfmX33nsvhymd76BnCGWppzmnodPp2GLf3d3NBhhFUfigE8PE1QoADSKJ4tqJspDugcYll1wCn8+HL7/8Eq+++ipeeOEFGI1GWK1WJCcnc46NujJ1XV0ddu3ahcrKSnR3d+Po0aMBpIZw2tHRgbKyMrZi22w2jBkzBvfffz/WrFmD/Px8dHd3o62tTTMMXm29p2cS54NkkCz7e0tHRETAbDbDarXihRdewHvvvYfo6Gjs3bsXDoeDq09HR0fDarXi4MGDAcZD0YBG5EjMrTSbzfB6vQHGEiraY7FYcOTIEW6ZMW3aNGzfvh06nQ45OTnIycnB3LlzsWvXLuj1elxzzTUA/AanDRs2QJb97W/oXux2Ow4dOgRF8VflpXugvC+qGpyfn8+GYfJkdnZ28r2rx9kaUER5K8ppWZbZYKUlp9XY1JLT6nsR8UrPuHnzZsyZM0dTTh8/fhw33ngjhg8fHnCvopyOjIxEY2NjQJqHKKfnzp2LSy+9FKNGjdKU08eOHeNnE+Ww+HxqhVTEKL2m2gRachrwF4+j4kRacpo4Bck4+l51gSIxVF4tp+nvIiIiNOV0Q0MDhztryekNGzbA5XIFeOrVclo8r7TkNHEKr9erKafT09N5PweT0z91iDLzTPJY5KVqeSwa0gBteaw2kKvlsfq+1PL4tttuQ2xsLB5++GFNeUxzKCo/anlcU1PD+dha8vjZZ59FRkYG9Hp9UHk8MDCgGXmmjiQQjeeiPKb30tLSgsrjCy64AEajMag8prkRowZEeUxzSsq8ljym/w8mj8k5QUUn1fJYxDW9r5bHFD0RTB6TgtvX18f3TutztuPfVkEFAoEc6oeGuLmoslswsiOCLhjZoX+DkR3aAMHIzsDAAMaPHx+S7HR1deG7774LSnbIKxKM7Nxzzz249tprOWzhv4PsbNiwAS+++CImT54clOzU1tZySCqtBf3r8/m48mEoskNhFMHIjqIoKC4uDkp2vF4vWltbg5IdAFwiPxjZiY6O5mR3NdlxOp144IEHcNVVV2Hp0qWYM2cO5syZw7lzNL9nOgjUWFUfBKGI+dkeBMGIOR0EwYg5cPYHQTBiTgdBKGJ+tgdBMKyKBqnzPQiCEXO6fihiDpzdQRCMmNNBEIqYn+1BoMbq4sWLcdVVV2Hx4sW46KKLArBKBConJyfAG3s+Y8WKFUPwqR7qfeL1epGSkoIvvvgC06dPZw+2oijcUkg0cKlllXjfauWU9psoS0nGULj2+PHjMXz4cPh8PowcORLZ2dlszFQUhRV8SZJQW1uL+Ph4mM1m5Ofno6ioCPHx8XA6nRyWRvdmtVrR39+Puro6WCwWLv6yefNmdHd3Y9WqVbj//vvxxBNPcJsWQLt6qhhZIebRU0g8EV8A3EbAZrOho6MDAHDkyBHMmzcPH330EXQ6f4XfN954Ax9//DHuuecevo7H42GvGoXE0fyFh4fDbrcjJiYGNpsN+/btQ25uLj9zY2Mjtm7dytEmXV1dXJDprrvuQkJCAioqKvDDDz9Ar9dj9OjRbFRtb2/Hyy+/DKPRiIyMDPY2OhwOrFu3DpLkbzuTmJgIm82GgYEBtLa24vHHH4fX68Wf/vQndHR0wGw249JLL8UVV1yBmJgYPP3000Pm8kwGlFCcoru7OySnIEOSiEUaopwOxSkIv6EMKGfiFLt27QrJKQCE5BRr1qz5b+cUL7zwwhk5xcGDB0NyirM1oITiFDqdLiSnoBy8MxlQQnEKr9cbklOQnNbiFKKc/qmDnDyhhppXaMljwg3JGLU8VpTTqTCh5LHaCCTK4/DwcOzduxezZ8/WlMfvv/8+RzoFk8dutxsNDQ0cMquWxxdeeCGOHDmCU6dOBZXHt912GxwOR0h5LN63Wh6rX2vJ4+eeew4ulyuoPBa9s1rymDzVbW1tQeVxdXU1HnnkkaDyeMWKFUhISMDq1as15bHNZsMdd9zBnFRLHsfGxsJkMqGnp0dTHiclJeHSSy/FokWLWB7/j8lBBYaS8WCDAEOkhnpyiZ4yIkPA6RwKcXOKG1CsvkUkmg4XtcUVAE6ePMmevezsbPziF7/AqFGjYLFY0NPTg6+//hqSJAUkmlMY5datWzF16lR231O7FuC0wM/MzGSCQE19qcF6X18fZs2ahXvuuQdpaWmcMwCc3lwioRetjmLiPf1Oh6wsy4iLi4PX66/q5fF48P7776O2thYej4c3eEpKCr788kt8+umnbI0Rc3LIIigCk55FURTuk0Qez7CwMPT29mLv3r38+ZaWFpSWlkKv1+OKK67gyn/79u1ji6R4/5J0uh8VkTgSgIC/QiA9I62pxWKB1+vF5Zdfzj38rrrqKixbtgyAP9wG8IdeJCcnIy8vj0uFU8XEM2GUsKbGqtVqDbBQqrEqjmBYpTUkXAfDakVFBQwGwxCsUlEYKmRB3gERq2FhYZy/c+zYsSFYlSR/sZ38/HzU1NQMwarD4cBvf/tbrFy5EmazmS3bIlZFYidiVsSq+FqNVbvdju7ubuh0p8u0q7H65Zdfcll3LayKuWuSJA3BqqjUihWPgdNYpf7AkydPhsFgGIJVj8fD5O+yyy7jexGt+HQw0f+psdre3o62tjYMGzYMl19++RCsjh07Fnl5eaxMiVjV6/XsxRg3bpympftchohF0bMi/i6+R3MYFxcHk8kUgHdZPt0LT/y8+rU4CDOiMUgcYjXK6OhoTJ06FTqdDhUVFVyZNiIiAseOHUNFRQVXWWxtbUVmZiasVismTZqE/v5+DAwMoLm5eQhmaa3oUI+Pj0dqairq6upwww03cG424E9Jufbaa3Hbbbfh5MmTQ+Q0fSf9iHKUPiPmcCuKPyqE2iMRdvfu3cuK5sMPPwy73Y6XXnoJlZWVGDlyJF+X8sJorkSDLl3L5/MhKSmJ8ZOUlMREhpT1p59+msPNYmNjceLECa5KSn2LY2JiWA4SIVy6dCm6u7tx8uRJlJeX8/xefvnl3NfV6XSyN5MikSiVhKIOKMpFPc6HU1CV6WCcgv6OrhNMTp8PpzAYDGfkFGFhYSE5hSRJITmFw+E4I6dQK6z079lyCrvdfkZO8eWXX4bkFKKc1uIUhI9QnMLlcoXkFHTNYJyC5HQoTtHe3h6SU4wdOzYopxDl9LkMtdw9H3lMa6Alj4MpHoQRNaelQfKYQtepcKNaHk+dOhUWiwUVFRVB5TH9jdqLLxqbqT5NMHkcGRmJu+66K6Q8FhVvLXlMcxpMHnu9Xs7x1JLH4tmlJY/FawSTx1TAMpg8Li8v5wJlWvL4m2++YYU1mDxOT0+HzWaDzWbTlMfp6eksN0kek1H/bMe/tYKqtblCESDaTGlpaejp6QnwhIpWCTpgaIgHmfidNOHi5hIPOBK+F110EcxmM0aOHIlRo0bhX//6Fzo6OhAZGYlx48ahsLAQXV1dKC8vZ5CWlZXhyJEjWLx4MSwWCzc87+zsDLCySpK/+ht5Iaurq1FbW8ul3clSBAC//e1vce+99zJgRKuuWgipn0W0xNNckpWWwsCo91p0dDS33aisrORcjby8PBbSkiRxfzan0xkg2Og+yKv61VdfcYhLUlISTCYT2traAACHDh3CP/7xD7hcLrS1tWHkyJGoqKjAkSNH2Lo0duxYDvmxWq344IMP4HA44HK50NnZifr6ejQ2NrIX9t5770VhYSFaWlrQ19eH9vZ2vPbaa1AUBUVFRXA4HCguLkZUVBTfN4Vgu91ufr6kpCTGHCkS54JVk8nEa0KHXzCsimsmfqfa+q9eX9oH06dPR21t7RCsxsbGIioqCqNGjcLRo0e5yI+IVYPBgK6uLhw7dgxZWVlDsAr4Q6qKi4sRExMzBKu03qmpqVi9ejXuvvvuIVjVGuKziPglXItYJeHocDjw0ksvQafTDcHqk08+iYkTJw6pzExYJWWdZIAaq9RH1ePxoK+vD++9994QrIaHh2PGjBkA/IqQGquSJHG4NBU1ErH68ccfw+PxYMGCBbBarejq6hqC1SeffBJerxeTJ09GTEzMEKxSQRVZlgO8LWTtX7t2LbxeL26//fYhBWXOdYhWa7J2q3+n9+h1cnIyTp48ycUaOjo6eC0kSWLSIRoGaF8QgQUCCZSIJ3FP6PV6mM1mdHZ2Ii4uDmlpabjyyiuxdOlSrF69GkuWLEFbWxsmTJiAvLw87pObmpqKmpoabssVERHBRUQovFQMQRwzZgwuvvhipKWlYdKkSdyfV1EUDrsUf5KTk9Hb24tDhw5x9VCRHImkR4zeoTkX00NoUGVLqnqsKAr++te/IicnB1dffTWOHDmC3Nxc/PznPw8g9ET+bTYb4uPjmUSKuU+SJOGLL76AXq9HXFwcMjIyYLFYOB1iyZIlkCQJb775Jnw+H+677z7s2LED7733HtavXw+dToeVK1eirq4ONTU1qKmpwebNm5Gens77Wa/Xo62tDR988AGnc7hcLl7juro63gPr1q2Dy+VCU1MTuru7cerUKTQ3Nw/B5/lwCkoNOB9OIa4V/a1aTofiFLGxsWfkFAaDISSnABCSU1DUABCcU9Bzif+q5XQoTkGEPRSnePLJJ0NyClFO0zW15HQoThEeHh6SU0iSFJJTkJwOxSmefPLJkJyCUpi0OIUop89lqOWuWvYSFuk92vc5OTno7u4O6MVJmKZ1FL2g9H/EicmIoMaIiEOS33QWTps2DRMmTMC1116LCy+8ELfeeiuGDx/OEUp2u52Nqenp6WhtbcWhQ4dw4MAB9Pb2chh+ZmYmX5vucezYsWhpaYHX6+/Z29/fj6KiIlbyxDlRFL8n2Gw2o7S0FD09PQFzKvIUmgfRy6yea/o8pdJQCpLD4cDjjz/ORiRFUZCSksJKOs0VRQuIc0/P1tvbC0XxFyYiJ0liYiJz1Q8//BCSJLFX+v3334fD4cA333yDbdu2oba2Fhs3boTX68WpU6fQ0NCA6upqrvz7+9//Hv39/VyfgyLPdDodqquroSj+gk6Ekerqani9XkyaNAkJCQmorq6Gw+FAX18f7HY7XnnllZ+E37NSUCVJapQkqUKSpHJJkkp/fC9RkqRvJEmq/fHfhB/flyRJ+qckSXWSJB2VJGnyT7ojYag3F/0ugkncXJQor9frg24uINCDKm4uAtuPz6G5uQgYkiRpbq7Y2FjU1tZi3bp1mpuLQiO6u7tx4sQJzc0lglprc+3fv59BqLW5HA4HWlpacPDgQa211HwWrc0lDvXmioqKwj/+8Q/MnTsX7e3tmpuLCJtasNF16XP5+fmam4sO6ilTpgTdXG1tbYiOjkZlZaXm5hocHITX60VbW1vQzUWJ4XTgqTcXFQSggkjjx49HU1MTDh8+jMbGRsTFxQWUP1djVU3W1VilDUwEW41VEYOhsEr/JxItkTyRslVQUDAEqyNGjEBMTAwkSUJfXx9SUlKGYNXj8eDIkSNISUnBsGHDhmBVp9Nhzpw5cDgcyM3NHYJVuleaA4/HExKrNLSwSsJbjVVFUbiSKlkR1VgF/GHM4r4WsUrfLxJQEatjxozh0Fxq3aPGak9PD8aOHYstW7bg3XffHYLVkydPoqKiAu+++y7cbvcQrB4+fBiAP1eSjAVqrJJiZLFY0NjYOASrAAJaVIhYpR6JlMdCxVbOd4iKk7he6t/F94kUms1mDl9SE1taDyJA9H1aQ7RAqz8r4keSJGzZsoV72P3ud7/Dt99+i5ycHPT29qKhoQELFy5EQ0MDy3CbzQa73Y4TJ05g6tSpiIyMRFlZGerq6hATE4OdO3diw4YNaGhoQFVVFQoKCnDs2DH09PRwXrfBYGDrtuhhEqujlpeXY3BwMOAZRWyKypDo5aT9BZyWGT6fD1FRUTAYDBwN8/HHH2PXrl0sr6m9EQCuIClJErc4oEIvALilh8ViQVdXF/R6PZKTk3Hs2DHOe5w+fTq2bduGqqoqmEwm2O129sgcP34cLS0tiI+Ph8vlgsvlwsDAAOrr6yHLMofmer1etLS0cOExr9fLPZEdDge3dSJZcv311+PAgQPsYRU9fzRCcQp6HYxTOByOkJyC1uh8OAW9DsYpRowYcUZO4fF4QnIKnU4XklPQc/x3cgoygoTiFABCcgoi9sE4BcnpUJyip6cnJKc4efJkSE5BcjoUp4iNjQ3JKQAE5RSinD7XoSV3g8lmj8eD9PR0fP/995g8eTLfJxWiAsCKv3hGikZX2o+0Z+lzpGipuYzZbOYIvOeeew4ffPABsrKy8Pe//50VMMAfveb1+tsF6fV69PT0wGKxIC8vDwMDAygsLER6ejq+++471NbWwmg0YseOHVi/fj3q6+tx+eWXIysrCytWrIAsyyxfExIS4HK54HA4eL9S8TBZltHS0oIjR45wBVrxbFEba7QMVaIMF/lJXFwcPB4PMjMzERkZieeffx7Jycn4/e9/HyA3qDCk3W5HVlZWgCHG4/EgISEBDocDX3zxBfr6+ngfHDx4EJ9++in0ej1uu+027NixAwcOHIDNZsPatWu5kFpVVRW6urrw/PPPs4f38OHDeOedd5CSksLtcjweD3bt2oXHH38cAPD888+zwdDhcOCHH37AJ598Ap1OhzvvvBMdHR0oLy/HkSNHUFdXh8bGxqBndrDxUzyoP1MUZaKiKFN+/P2PALYpilIIYNuPvwPApQAKf/z5DYCXftIdqcaZSI74XmZmJhwOB44fP665uWijUDipenNRKWcSeFqbSzxIgKGbq7u7GzfeeCOys7M1N9fJkyfh9XoxceJESJIUdHPJsoyjR49qbi66TwCam2vNmjWQZZnJk9bmoiGSHfXmEi2u9FnaXLSByWKrtbnocAq2uagCsMfj0dxca9asgST5w0b37dunubkoR4PeU2+u+Ph4Xstgm4sUr4cfflhzc/X19aGyshIfffQRtm/fDp/Phzlz5qC3txd2uz2guq8WLrVw29PTw541l8sVQMpFnImkRfweUtBoXcS1k6TA0BNxbYmUv/vuuzh48CAee+wxxraalA8ODmLv3r3IyMjA1KlTMX369ABS/tRTT6G/vx8WiwUbN25Ea2trACkHwP1xAb9nTE3KqZiZXq9HWVkZK3tqjKqxqw49FLFK+SFEyiXJb3kvLy8P8BCIPUfJU0YHqEjKZVlGT08PfD4fOjo6uKiLSMo/++wzhIeH46OPPsK+ffuwcePGAFLe0dGBjRs3wuVyIScnB06nM4CUU7838sxQSKBIyomMORwOfPfdd/B6vQGk/LvvvsOpU6fQ39+PDz74AD6fDzk5OaiqqkJLSwssFgtOnjyJUaNGca7Qxx9/DEVR8Itf/AL/N8bVV189xDsoEnbRUEJzm5WVBavVioKCAng8Hm4dRBWkyXBCOKHvoH1H/0frTj+iEkGyng51+r+rr74aixcvBgB89tlniIiIwIEDB2CxWDh/78SJE3y/P/vZz7B7924YDAY2zBmNRvT29uLll19GbGwsnE4n9uzZg3HjxqGsrAyVlZWoqqrC8ePHeZ6MRiMiIyO5YJ/BYMDChQtx0UUXYebMmZg1axYSEhK4qqsYOqjeD6IVX23gokF7g7xkbrcbZrMZTz31FP7+97/jz3/+M4esUrsdCjHPyMhAW1sb0tPTYbVaUVNTw2fXyZMnWcGLjY1Fd3c3HnzwQURHRyM6Ohpmsxn//Oc/4XA48Pnnn0OW/f3z1q5dC0nyt9qgHpD79+/Hv/71L8TExCArK4u9RnzsJf4AACAASURBVFVVVdxT86uvvkJYWBiMRiPi4uLQ2tqKRx99FJIk4eKLL0Z0dDRX9x4xYoQmRoPxB3GegslpIq10TqnldDBDGq2XWrZpyWnaFxaLRVNO01lBefBacjonJweSJCEjI0NTTjc2NuKqq65CbW0tgKFyWh0RoCWn1WkZWnKahpacpvcNBkNQOZ2YmIiZM2cyxtVymnr5kqFeS04DgMlkwv79+zXltE6nw0033QSr1aopp8V1DSanFUXBBRdcwKHLajl9zz33wGazYXBwUFNOu91unDp1Cn19fUHl9LmOYPJYLTvptdVqRVJSEnp7ewO4MCnJtBZqPIsGNFprcYhGasIOYYA8hTfffDNmzpwJt9uNbdu2oby8HDt37kRpaSm6urqQkpKC7u5u9PX1wWazYeHChRzp4XA40N7ejqSkJFRVVfEZfNFFF6G/vx8tLS04evQo3nvvvQBFFwBXfiZZ1tLSgp/97GcoKSlhmZyUlMR4EO9djXe1LNEaojwmLkLyuLy8nPeex+PhEFyaN7fbzZ54p9OJpqYmns/a2lr4fD6MGjUKer0e7e3t2L59O/cG7uvrwzvvvAOr1cqRje3t7XjnnXfg8XiQkZEBt9sdkHsdHx8Pm80WkDJotVphs9ngcrmg1+sRFRWF5ORkuFwujii7/PLLOfQ4KioKeXl5Q1LWzjTOJ8R3CYB//fj6XwCWCu+vVfxjH4B4SZIyzvUiwYiO1uZyu93w+XzIzc3V3Fz0edHiJl5H6wChIR4y6rAScXN1d3ez8NfaXOPGjYPZbGbLh9bmWrt2LTZt2oSGhgbNzbVo0SL25gFDN9eGDRswffp0zJgxI+jmUgsJGvT8ohDS+owkSVwK2263a24uOjjoUFJvLjqYQm0ug8GA1atXA4Dm5qqtrUVrayuHmak311NPPRVgjNDaXJGRkSwotDZXREQEt7OhwgjkYXS5XGhtbQ0g3MFIOb2WJAnt7e2IiIjg0DvKHxAtZ/S7KPhFvIu4VGM1lCHimmuuwfLlyxETE4PPP/8cY8aMQXV1NWw2G4dVdnd3c05IdHQ0XnvtNdTX13P7FrPZjPnz5+PDDz9EXV0dH8ZZWVnYu3cvGhsbYbVaWbkAgGHDhiEyMpI9FS0tLRgYGEBJSQlmzZqFkpISjBw5MoAc0ZyqyZBIAknhVRMkui6tIXk/HnvsMaxfv569hpQ/R3/j8XgQGxsLs9mMuLg4DAwMcE57dHQ0Vwamw629vR2lpaVwu91cPZI+c8cdd/AzuN1u6HQ6PPzww/B4PHC73Ux8PR4PcnNz2UtYXl6ON954A5Ik4T/+4z84nD8mJgbd3d3cHmnp0qWIiYlBW1sbMjIykJKSwnNMBUYuuOACxMTEICIiAk6nE5WVlTh+/Dgb8ygS4797kFcGCGx/ZTabodPpYDabeT0BBMhvUbEVv4P2NhkVaKhzg9SGRZ1OxxEwnZ2dKC8vR1JSEi655BLMmzcPeXl5uPDCC/HVV1+hpKQEPp+/+El7ezuysrIwadIk1NfXo6OjA83Nzdi1axcWLFgARVGwatUq3HTTTQgLC4PJZOLWEnl5eQFWZDVRp9BHyiskz5Fer0dLSwu6uroCvMj03OI8kAKvVrpE5dbn87HxlkKvqAAMeUPJsEtnRkJCApxOJ7c/io6OZqOryWRCSUkJnn76afh8PvT29sJgMGDNmjUAgPr6erz77rtwOBxsKK2trUV7ezvi4uJY8dLpdOjo6IBOp8NTTz2FrKwsZGZmIjY2Fj09Pejs7ITdbkd+fj5yc3ORmpqK2NhY6HQ69obcddddmD9/Pi677LKAQm8iBs/WeKIlp+12uyY5JzlNf0teWC05LWIwlJyOj4/XlNO9vb3YsWMHoqKiUFZWpimnyfhrtVo15XRqair27duHtrY2TTlN4XxkYNaS0xEREWw8obkNprRqyWn10JLTf/3rXzF58mT+e7WcNhqNsNvtyMjICCqnJcnvJQsLC9OU01arFfPmzeOzVS2nHQ4HSktLsXbt2pBy+pZbbmG5opbTTqcTLS0tMJvNmnJalmX84Q9/4KI6WnL6XIaWsUocWoaatLQ0tLa2YtSoUVAUf9oUYUmS/CkXiqIEtA4SsU+D+DhhQpRb4tnt9Xq57RqlFFRWViIqKgoPPvggLrvsMlx55ZXIysrCzp07UVxcjLi4OLhcLjQ3N6OjowOjR4/Gxx9/jJSUFOzbtw8nTpzAtGnTkJqaioKCAkyZMgVFRUXYuXMn+vv7sXfvXmRlZaGurk5Tyc7Pz4fRaGRMUmQP9VStqalBZ2dnwHOJ+BY5p9ogJr5Hc0Ze6b6+PmzatInlDLWclCS/d9pqtSI1NRUmkwm5ubkYMWIEBgcHERcXh+LiYgCA1WrFVVddhREjRkCWZWzevBk6nQ7PPPMMdDod51l/8803fL5UVVWhp6cHL774Ip8DhG0A2Lp1KzIzM5GZmYmmpiY899xzCA8Px2effYacnBwMHz6cz4iGhgY2OM+YMQOXXXYZLr/8cjbI/iT8nuXnFABfS5J0SJKk3/z4XpqiKO0/vu4AkPbj6ywAYvJHy4/vBQxJkn4jSVKp9GPIsObNBdlcWt4qAOjv72fLdG9vL8fw00IDpw8AIsniRqGWEvTd4r8AAgiBJPmLp4jhK3TYAn7SlZycjOuvvx4zZsxAdXU1+vr64HA4EBsbi/j4eHz99ddITEyE0WjE+++/D6vViu3bt+POO+9EU1MT0tLSEBcXh0mTJsHj8eDDDz+EyWRCRUUFCzyRiIkHbVRUFFvnidwajUbYbLYA5VbcKOKGIYEjfr+aUBGpioyMhMPhwODgILZs2cIgVBSFlTfKgYuLi0N/fz9ycnLQ09PDhQ2ys7MhSRJiYmKwYsUK2O12NDY24uDBgxgxYgTPO1XHu/322yFJEjo6Otj688QTT3BRAbvdznOdmJiIjIwMZGZmor29HW+++SYrr3l5ecjKykJKSgo3hZZlGaNGjcLMmTNx5ZVXIi4ujnO5JEninqf33Xcf+vr6kJ6ejuzs7KBYFcOm6PejR4+ipKQEn3/+Oc8LET8ieeo1JQySUCchSN9JBywJSvpbp9OJ8PBwjB49mquklpeXw2AwID09HfX19Vi6dClk2V+N02azYebMmRyetHbtWixcuBAJCQnYuXMnBgYGcODAAW4RVFlZiWXLluGGG26AyWTCnj17uKIitTtQ51wpioJ9+/YxIaf5JczSIaCVUygehB6PJ4D8qTEt/lCI4+DgIE6dOoVTp07x91JJdzWpp1Ac0cDi8/lgMBiwfPlyDsukeXU4HAH7x+l0DjH2kCeCDFu0XikpKXwIJCUl8T3l5OSgoKAAI0aMQEZGBodYSpK/yt/MmTNx9dVXY/z48YiKimLFgYqeLFmyBC0tLTCZTBgxYgQyMzMRFxcXYAQIZe39qUN9IIu/q39kWcaYMWMwODjIbTNiY2MDcv1o7tQWeiJMomynQZhRF/SgvWKxWPDpp59ClmVkZ2ejuLgYWVlZSE5Oxrfffgudzt/LDgB6e3s5FNFmsyEtLY0LjTkcDlgsFpSUlKCmpgajR4/mddmzZw9jvaSkBEePHkVUVBS3IxMVJEVR2GM0a9Ys/OpXv8Ktt96K3/zmN7j33nuxcOFCOBwONDY2YnBwMCAvSZx3UUFQy3hxnUm2x8fHIyoqCvHx8ejv72fiT22zwsLCMDg4yIqAxWJBQUEBKisr2eJ+7NgxfPvttyxj6+rqsHr1auTm5jKxPXz4MBRFwc6dO5GRkQGdTsde1HfeeQepqansqaXc8YkTJyIyMpJDWtetW4eIiAj87W9/C/AkUqu3jo4OREdH46KLLsKkSZM4B/xsxv8aTwKNJ+L4X+PJf4/xxGg0AgBefvllxs2vfvUrmM1mjBgxAjk5OYiOjubqr+cytOSwuAbiD71vtVoRHh7Oyj45IsRoFLXnkGS5eD3RSCOusXhNMfWJ/l+n06GwsJB7nMbExMDhcODdd99FVVUV4uPjER4ejt27d6OiogK5ubmw2+2YNGkSampqMGXKFEyYMAH/p71zD66yOv/9Z+1LrpCQG8EQSEJCiiRUA5ifNuD8Siu3FhAEkWn7Q+qFejkU7Kit1B7HMwPUaanHWpj+ztS2joJ1qIJT1HN0wB40BcRMuYRwKxElJuROYgJJ9t7r/LH3Wr7ZvEmJv0rC8fnMMNn73S97v5fv+6znWetZzxo2bBilpaVorfn444/Zs2cPVVVVjBs3jltvvdWOpptOeac9Npk2eXl5zJ49m7vvvpsVK1bwwAMP8J3vfMdOrzl58qRtx6PjFfM90Z01BmcAC+Hn3pyb8TGNL2SKCzl97qamJoLBILm5uXR3d9s6LNXV1RQXF7N+/XoCgQAnTpzg4MGDTJw40a4jvHXrVjo7O0lKSmLUqFEAbNmyhUAgwA033EBWVhYZGRk0NzfT3t5u/ZX4+Hg7qn/ixAm6u7tJSkoiJiaGuLg4INz2/v73v0drzaxZs5g8ebJNcR9oqvrlBqjTtNaTCafvPqCUutn5oQ5f+QGVgdRa/6fWeqr+LGX4Egbq6NTX15OYmEhPTw/19fXWAdNa21SM6EbGfK8xpE4xOQNhN8Nq0lKcD5bWmkmTJjFu3DgrqrNnz9piBR6Ph927d1NeXk5paSmFhYXWOWtpaWHatGkcOnSIlStXcvPNN5OSksKePXvsws6TJk3C7/czatQo6/hG9wbHxcXZ1Jg777yT73//+6xYsYKHHnqI0tJS4uLiqK6u5vz5866jqM7r4GxYo+8DfOYIJiQkMHz4cHp6emhsbLQGp7293f7/zs5OEhIS8HjCa65OmDCB06dPA9i05ebmZgoKCsjNzcXj8bBz5046OjpsA1lTU0NPTw+33XYb11xzDdnZ2fT09Nhey/T0dLKzs2lubuYPf/gDgUCA0tJS2yDk5+fT1dVFbGwsRUVFtmfMOA1er9cuLjxr1iyKi4u54YYb7O97vV62bt1qG9NQKGR7Qs2160+v5h51dnaSn5/vqlOjTWdaolOnzh5IpzPl1KlzxNhNpzk5OWitmTNnTp86DQQCbNmyhYyMDFedHjx4kLy8PJYvX+6q07KyMg4ePGjnirnp1Oy7aNEiV52eO3eO06dPu3ZWGSfb+Uy76dTpKLrp1Ov1MnLkSOskROvUpPomJib2qdPGxkbi4uJ4/PHHXXVqCrfV1dW56lQpxdq1a61z5aZTn8/Xaw5dtE7LyspQSvWpU6XCIwlah+domR5y05j8q3DeY6PP/lLMzp07Z3uHTYXP6KDUGVhFp+86nxHnqJUzKHU+Gwav18vp06c5ffo0e/fuJTU1lZqaGgoLCwG47rrrevXuL168mKKiIkpKSqiurubaa69l3rx5TJo0iZ07d/Lxxx+zefNm9u/fT0xMDCkpKYwfP54RI0bQ0dHB7NmzrWMbvR6cUopFixYxd+5c8vLyrKaVCmcGfeMb3+CXv/wlGzduJDU11VaYdNphZ3BgngtnEBXtVBqCwSDDhg3rtUSEqYbrDNyMc9LT00NxcbFNsx0xYgSNjY34/X7WrVtHdna21e5PfvIT0tPTCQaDvPTSSzat3u/3097eTl1dHfHx8YwfP962n62trXi9Xu677z5770zGwd/+9jd6enpsoOK8Ti+88IINkrTWtuhW9HU2f/tz1s0+0c66mbMdHahGO6HOAmzmvTlepzPr5qyb7aYAYrSzHggEOHz4MJmZmX0665mZmfbeuznrO3bsoLm5mezs7M/trK9cuZLVq1czevToz+2sm3Pvy1k3y74ZuxftrCsVnppl0m7dnPW6ujpGjx7dp7Nu2rvVq1f36axnZWXZtabdnPXnn38erTWrV692ddZ9Pl+vzhPjrJtA+p133iEUCvHpp5/S3d1tp6ElJSUxderUz53hYtaJd9pho8Xof2b72LFj6ezsJC8vj0AgQENDg/0OZ5vr1KqxU0br0VlQzvvubJOd+jDTKy5evEhFRQVf+cpXiI+Pp7KykqSkJJYuXUpRURGvv/46fr+fX/ziF8yfP58JEybQ2trKLbfcQlFREV1dXezZs4cdO3bw1FNPceDAAcaMGcORI0eYMWMGHo+Hjz76iHvuuYfc3Fz7nEfr9c4776SsrMyui22OefTo0axbt47169fz61//2rYj0Zo3ndfOQS1n++Vsp5zvvd7w0kem08f4Yibm6OzsJCYmxhbXMh3pCQkJdhDiiSeeICYmhhdeeIGxY8fy0ksvEQwGefTRR8nIyODYsWOEQiG2bt1qv7elpYWmpibWrl3bK9X52WefxePxsGnTJvsMejwetm/fjs/n46c//SkpKSnWbpjsxqNHj/a677fddptrB1h/XFaAqrWuifytB14FSoFzKpK6G/lbH9m9BnAOJ2VHtg2YaKc+upfG+blSitbWVpt22draakdEzc13Oi9u85Qi52h/222bm6OjlGLfvn0cPnyY1tZWGhoaaGhosI3xxIkT8fl8NDU10dzczLJlyygoKGDUqFEkJycTFxfHsmXLAHjzzTdpb29n165d/OUvfyE+Pp7a2loKCgpIT09n//79fP3rX7cl+xsbGy/pybr99tuZM2cOubm5AL0cnRkzZvCrX/2KkpISzp8/T1NTU68G2enMOnuEo4MAN2cnFAqRnJzcq5fEGHBzDyG8NpQxcLm5uXb0yePx0NLSgt/vZ8WKFaSlpdkCKgsXLiQzM5NgMMi+ffu4ePGi/Y6uri47jykhIcEerxmFXLx4sb13+fn5aK05d+6cTYF0NpJxcXHs3LnTXjfAzmUyjWFMTIydnzVixAgbcMJnc2z606nW4WIPJlUjWqfmWvc3n87NCXAbMXE2Ak6dPvPMM+zcuZPt27f3qVOv18t3v/tdZsyY4arTjo4Oxo0bR2trq6tOi4uLWbJkCV/72tf61OnixYuts+um06VLl+L3+y9JjTTXwOks9qXTaKJ16mx83XR68eJF8vPzrZPlplMzYj916lRXncbFxdl5om46dd73f6ZTM2cxWqfDhw+3qXp96bSjo4Pk5GSGDRtmR3tNqtJgcf78eZv14HRWoXcWjRl1NDpwZhc4AynntXJeU/OMmX9er5frrruOt956i66uLhobG/nkk09sA5yUlERzc7MdSfX7/XadzkAgwAcffEBCQoIt5V9ZWcnDDz9MSUkJtbW17Nq1i7Nnz7Js2TLy8vJs4JSYmGhHwQ3ObB63kVHTAebxeFizZg0///nPqa+vt/NgndfIjNg52z1zzm4dPR5PuPx/bGxsr44cZwEhpZRNRTcOiJlrFx8fTyAQ4MyZM3b9RjNn2nTu+f1+zpw5QzAYpLCwEK/XazOGtA4vW+C8DubZNnNqzdy8PXv2oLVm0aJF9h7GxsZSWlpqO71MYGaesehr6fQlnNc92lZ7PB7b/pgUX2eHgAnczPU31975WwZnZ4nTOY/WpcHv99vgygTHVVVV+P1+PB4PkydP5ty5c+zevRufz8dvfvMb5s2bR2JiIjU1NRQXF5Obm8vx48c5evQonZ2dvPHGGxw6dIiRI0fS1NTERx99REFBAXV1dcybN4+srKxeQbJTH3fffTdlZWWkpaX1CjQBVq1axY9//GPS0tJsdoDz3KKfQSfOa2augfPaBINBsrOzbTZIMBiksbHRPhMmS87rDRe0iomJoaGhwQZ33d3dVFdX093dTVpaGhkZGZSXl9vsKlPDw+fzMXPmTOLj422gDrB06VI78tvW1sbbb79NKBSuRO/xeOxItLFNY8aMsZ36bkG1oaCgoNegxr59+wiFQmRmZtrK2IDtrEpNTb1Ey18U0fbYPFPRPoc5Z4Ozkz7aHjv9ITd7bHQxcuRIV3uslCI7O5stW7b0aY/NPPmEhARXe1xVVUVHRwdJSUn/JXvsHBSItscmw8powM0eO8/dzR57vV6bnelmj813xcTE9GmPL168SEJCQp/2+MKFCxw+fJju7m5Xe2w6WJy+cbQ9DgaDdtkZN3v89ttvW90Ye5yenj4gLf7TAFUplaiUGm5eAzOBI8BrwPLIbsuBHZHXrwH/ocLcCJzXn6UCDwinQXN7HW3w6uvriY2Npa2tjdbW1l49OM75HtHG0unoGEG6/ZbT8TE9SVqHh9y/+c1vUl5ezsGDB62hTklJweMJlzmvra3lxIkTpKWlAZAbSQOprq62FVwLCwvxeDy89957zJ8/n1mzZvHyyy9TV1dHTU0NS5Ys4dvf/jY+n4+MjAxiY2NJS0vrdQ1MWpzpEYsOpE0wuHz5cp555hkee+wxkpOTbWEEYzBMA2waG/OdzuAourFRKjyyZNIftNZ2HqfzunZ0dBAXF8f58+ftgxAIBGxRgjNnzjBjxgwSExNpb28nFArxve99z5a6fuedd+jp6WH8+PH4/X5aWlrYtm0bPp+PlStX2t8x55+ammrv+V//+leam5v54x/D06d/8IMf2OsWExPD/PnzOX/+vB1xN8VynCMKpgomwJo1a4iJibGdAW7OTrROOzo68Hq9ZGRk9KlTp+HuS6fR99apU2cPvZtO09LSmD59uh3pcdOp1poJEybQ2dnpqtOYmBjq6uq4cOGCq067u7vtSGRfOjWv+9LplClT2LhxI5s2bXLVqcH5bEfr1Nkx4KZTE+TW1ta66tQ5stCXTgOB8Jp+PT09rjo1jZpJd47W6Z/+9CcCgQCZmZl96tQ0AB6Px1WnXq+XKVOm2H2idWp6XNPT0+1cbZPBcOrUKf5V9DdiGv3PHHN9fT2ZmZl4vV47ymucQvPapPKa+2meG7MMjXP5g+hONqdTbdLGKyoqbFGVW2+9laKiIrKysnjllVc4duwYDQ0NzJ8/n9bWVsrLyzl79iyhUIjU1FSbKt3e3k5jYyOLFi2yFZNramp49dVXiY+Pp6KigoSEBPLz812r0DsDArfXTmfI+XlSUhIbNmzgqaeeYs2aNYwYMeKStS7Na3PNnKOpzmfG2GkIt6HGiTx8+DCAHWkyczqN8wzhquumUvbmzZvp7u6mrKyMixcvsm7dOrTW5OXl4fP5qK+vp6qqyhbS01pb3TmvT11dna0dsGHDBuAzR+/ChQucPHmS0aNHk5SUZB2zkpIS5s2bZ3/TLRhyXpu+fInozzMyMujs7KSgoMA6nsZpNx1F5v44U807Ojp6dfA6tWd06RztNWitbeHFoqIi3n//fSorK0lJSWHUqFHs37+f2NhYCgsLaWhooKamxmrfjPTFxsZSWVnJ9ddfz4wZM2hra+Ppp59m9uzZFBUV2QIz06ZNY8KECXaKi5kGFJ1V5Uy1jy7qZLScnJzMk08+ybPPPsv9999PY2MjtbW1vYJ459QUuHTebvSInLmPTU1NttPP5/ORlZVlp4M4jzU5OZm2tjZGjRpFbGws11xzDenp6fzjH/+gvr6eV155hcTERFuR97nnniM+Pp7m5maefPJJQqEQY8eOxe/309DQwLZt28jPz+e+++7D6w1XAT5+/Dgej4fHH3/cZntAOKh7+umn6erqYuXKlfY8gsEgN954I7/97W9tQBEIBLjpppsYOXIkEJ5rHAqFl8YxS9Z0dnYyZswYm2I80PUjo+/RQP6ZKQTmN82SQM5ML4PTrka34c59nSOnzg4zs6/5nt27d5Oens6qVau49tprKSkpoa2tjbi4OHp6ekhNTeXGG2+0Bey6urrslLnCwkI2btxIKBSyS921tLRw4MABtm3bRkpKCnl5eUydOpWCgoLPbY+dunfa4/Xr1/Ozn/3MZjlGXwPTJsFn9iy6s8fsl5SUZKedmf1MoVdzvbxeL+3t7bZehkklDwaDtLS02Ermpvq88QVMUdg33niDnp4e7r33Xnse1dXVANxzzz1WD42NjWzdupVQKGQz77TWtLS02Ol1OTk51p6lpKQwffr0Xtox5+2W1dIf7qvr9iYTeDXyAz5gi9b6TaXU+8DLSqm7gDPA7ZH9XwfmAqeATmDFgI7IgVvvQn9MmTKFF198kczMTCZNmmR70oyjYyZ3+/3+Xo6s09ExVTeNAI0QTEDqdP6NyCoqKpg2bVovR6exsZFPP/2UY8eOkZaWxvz589myZQvl5eXk5OSQnp5OamoqnZ2dxMXF9XJ0du3aZXsm0tPT+eSTT6ioqGD69OlkZGT0ui599cC4vXbblpSUxF133WUd+t/97ne2t8X5gDkdnehRK6cTaAIbUxTKODrOlBrj0JtFuwG7FuLw4cPZvHkz69ato6ysjPLyctatW8djjz3Gpk2bePjhh3s5OqtXr8bj8XDq1Cm2bdvGwoULbUBmHJ1HHnmEDRs28Mgjj9jzN45Ofn4+SUlJdpmb1tZW6+j86Ec/ss7w3LlzefHFF4GwU1ZVVcVNN93E8OHDKSkpsVVr/5lmzbWJjY0lJibGpllHB1zQO73aed+i5/mYQNwYfmealdbapgx5veEJ8unp6RQWFtqRzXfffZdgMFz9OC8vj9GjR3Po0CHS0tJIT08nJyeH5ORkfD4f27dvt3P2ampqbEXQs2fP4vf7OX36NLNmzbIBfX8MRKfBYJC77roLgMbGRpsu6HRGnT3S5vr01QCYgNmU/IfwHHazoHQoFK4C3NLSYnuvOzo6SE1N5eTJk2RkZPDnP/+Z+++/nwsXLpCQkGCDvYkTJ/Lhhx+idbjC67333svatWvts2MaUPM7ZnRZqfAyBmvXru1VeMHn85GTk8OIESNoa2tDKWWfs4qKCoqLi+01yM/PZ+/evXY0qbKykjlz5uDz+ZgyZQpHjhyxBeTMUg5KKXbs2MGjjz7a7/26XExV3IGwevVqAPbt2/cvOYaBsHTp0l7vH3zwwV7vV6y4vCbMzGsD+OEPf/hfP7ABMH/+/Cv6e/0xY8aMXu9nzpx5yT5mjquTb33rW67ft2DBAtftbuf80EMP9XtsA/EptNaXOOt9pfSazwxO2xXtqEbb6uhRV9NJHtvlRgAABgpJREFUZmz1qlWryMrKwu/3895777na6q9+9avWWfd4PIwfP76Xra6qqqKlpYUTJ06wd+/eL9RWG2ddKXWJrY52sAdqq/1+v3WOTYeb01anpKTQ0tJiCx0Fg0FSU1N72WqtNa+99hoLFiygp6fHdvIZW/3EE08QCoWssx5tq7du3codd9xBTk6O9dOMc25stXmfkZHB9OnT2bt3by9bDWFn/d133yU2NvYSW23ONRAIUFNTY+ckfh4Gao+dz9WVtitz5869ZNuSJUvs64ULF/b7/2fPnv0vP6aBcssttwz2IfQiej1os7SRk0WLFl2yLbodhL5tNFx679z2nTdvnvXDLwcVbRAGA6XU4B+EIAiCIAiCIAiC8EXxge6n/pDhckZQrwSfAscH+yAEYYCkA42DfRCCMABEs8LViOhWuNoQzQpXI1dCtzmXs9NQCVCPX040LQhDCaXUAdGtcDUhmhWuRkS3wtWGaFa4GhlKuh3YJE9BEARBEARBEARB+IKQAFUQBEEQBEEQBEEYEgyVAPU/B/sABOFzILoVrjZEs8LViOhWuNoQzQpXI0NGt0Oiiq8gCIIgCIIgCIIgDJURVEEQBEEQBEEQBOFLzqAHqEqp2Uqp40qpU0qpHw/28QiCQSn1oVLqsFLq70qpA5FtqUqpt5RSJyN/UyLblVLqmYiODymlJg/u0QtfFpRSzyml6pVSRxzbBqxTpdTyyP4nlVLLB+NchC8HfWj2CaVUTcTe/l0pNdfx2U8imj2ulJrl2C7+g3DFUEqNUUrtVkodVUpVKqV+GNku9lYYkvSj2SFvbwc1xVcp5QVOALcAZ4H3gWVa66ODdlCCEEEp9SEwVWvd6Nj2FNCstd4QeUBTtNaPRh7u/wbMBf4N+J9a638bjOMWvlwopW4mvJb081rr4si2AelUKZUKHACmAhr4AJiitW4ZhFMS/j+nD80+AXyqtf5F1L4Tga1AKZAFvA0URj4W/0G4YiilrgGu0VpXKKWGE7aTtwJ3IvZWGIL0o9nbGeL2drBHUEuBU1rr01rrbuAlYMEgH5Mg9McC4I+R138k/KCb7c/rMHuBERHDIAhfKFrr/ws0R20eqE5nAW9prZsjTtJbwOwv/uiFLyN9aLYvFgAvaa27tNbVwCnCvoP4D8IVRWtdq7WuiLxuB6qA0Yi9FYYo/Wi2L4aMvR3sAHU08LHj/Vn6v3CCcCXRwP9RSn2glLo3si1Ta10beV0HZEZei5aFocRAdSr6FYYCD0ZSIZ8zaZKIZoUhiFIqFygB9iH2VrgKiNIsDHF7O9gBqiAMZaZprScDc4AHImlpFh3Oj5cy2MKQRnQqXCVsBvKB64Fa4JeDeziC4I5SahjwZ2C11rrN+ZnYW2Eo4qLZIW9vBztArQHGON5nR7YJwqCjta6J/K0HXiWc4nDOpO5G/tZHdhctC0OJgepU9CsMKlrrc1rroNY6BPwvwvYWRLPCEEIp5Sfs6L+otX4lslnsrTBkcdPs1WBvBztAfR8Yr5TKU0rFAHcArw3yMQkCSqnEyIRylFKJwEzgCGF9mop7y4EdkdevAf8Rqdp3I3DekfIjCFeager0fwMzlVIpkVSfmZFtgnBFiJqzv5CwvYWwZu9QSsUqpfKA8cB+xH8QrjBKKQX8DqjSWm90fCT2VhiS9KXZq8He+r7IL/9naK0DSqkHCT+YXuA5rXXlYB6TIETIBF4NP9v4gC1a6zeVUu8DLyul7gLOEK6EBvA64Up9p4BOYMWVP2Thy4hSaivw70C6Uuos8N+BDQxAp1rrZqXU/yDcCAE8qbW+3CI2gjAg+tDsvyulriecHvkhsBJAa12plHoZOAoEgAe01sHI94j/IFxJyoDvAYeVUn+PbHsMsbfC0KUvzS4b6vZ2UJeZEQRBEARBEARBEATDYKf4CoIgCIIgCIIgCAIgAaogCIIgCIIgCIIwRJAAVRAEQRAEQRAEQRgSSIAqCIIgCIIgCIIgDAkkQBUEQRAEQRAEQRCGBBKgCoIgCIIgCIIgCEMCCVAFQRAEQRAEQRCEIYEEqIIgCIIgCIIgCMKQ4P8Bn1vkNrQ0yaEAAAAASUVORK5CYII=\n", - "text/plain": [ - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "transformed_images = [None]*5\n", - "to_tensor = transforms.ToTensor()\n", - "for i in range(5):\n", - " t = transforms.RandomAffine(degrees=0, shear=10, fillcolor=255)\n", - " transformed_images[i] = to_tensor(t(pil_img))\n", - "plt.figure(figsize=(16, 16))\n", - "show(tutils.make_grid(transformed_images))" - ] - }, - { - "cell_type": "code", - "execution_count": 33, - "metadata": {}, - "outputs": [ - { - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAA6gAAADWCAYAAADcga8EAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMS4yLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvNQv5yAAAIABJREFUeJzsvXmU3GWVPv58at/33rvT3dn3hYQlYBAlAgriQUYdFWRxQQeVcdTxBy4TWWSYo+KCI0cW9ajDV8d1PI4KGAc8EAgEkpAFknSn0/tSS3fXvn5+f7TPzVuV6iULEKCec3I63VX1qc/yvve997nPva+m6zpqqKGGGmqooYYaaqihhhpqqOHVhuHVPoEaaqihhhpqqKGGGmqooYYaagBqAWoNNdRQQw011FBDDTXUUEMNpwlqAWoNNdRQQw011FBDDTXUUEMNpwVqAWoNNdRQQw011FBDDTXUUEMNpwVqAWoNNdRQQw011FBDDTXUUEMNpwVqAWoNNdRQQw011FBDDTXUUEMNpwVelgBV07RLNE17SdO0Q5qm/X8vx3fUUEMNNdRQQw011FBDDTXU8PqCdqr3QdU0zQjgAIC3AegH8AyA9+u6vu+UflENNdRQQw011FBDDTXUUEMNryu8HBnUswAc0nW9W9f1HID/B+BdL8P31FBDDTXUUEMNNdRQQw011PA6wssRoLYA6FN+7//732qooYYaaqihhhpqqKGGGmqoYVqYXq0v1jTtYwA+9vdf169fv/7VOpU3LF588UUUi0XkcjkAgKZp8tNoNAIAbDYbACCTyaBYLMJoNMLhcEDTNCQSCWiaBl3X4ff75/Sduq6jUCigUCjI950oKE8vlUoolUrIZrMwGo0wmUwwm81V38ufixcvPqnvroYdO3bgjTCOjxw5AgDI5/MwGI5yXLquw2QyoVgsIpVKQdM05PN5mEwmGI1GFItFWCwWlEol+UyhUIDJZILVaoWu68jlctB1HZqmwWazIZvNolAowGAwwGAwQNM0FAoF6LouY7RYLKJYLMJkMsmYKhaLAACfzzfr9ZzsOOQcmOk4uVwO+Xx+2tcXLVp0UudwqnEqxnJvby98Ph9isRgAoLm5GUNDQyiVSqirq0MkEoHX68XExASKxaLcx/r6elgsFgDA4OBg2TELhQJKpRLmz58PtTxlaGgIBoMBoVAIRqMRmqahv79f3mMwGFBfX4+RkRH4/X7kcrmysaHruozlYrGIRCIBt9uNSCSCUqkk444/gaPjhuOZ47OybOZUl9EcD3iO1cbnbGN2NpRKJRSLxVmvm/fPbDYjn89D0zQsWLDghL/3ePBGscmnAv39/afkOOpYqGYb1d9bWmq5i7lix44dCAaDJ/x51RYQXEu5/laDruvQdV3mMUGbTXtL385utyOfzyOZTMrnuR6bTKayY9Lmlkol+b+maSiVSjAajWXfq9pbrv20vfw9n8+L71f5nblcDiaTSb6b36V+h8FgkPFpMBjK/FSXyyXHOhWYzfbm83lZa6ohkUjAaDSKj06o/tVcwOeoQtd1LF++/LiOM1fs2LEDAMK6rtfN9t6XowZ1I4Atuq5f/PffbwYAXdfvnOEz+qu5iL8R0dHRgUAggOHhYSQSCXEyvF4vzGYzdF2H0+lEW1sbjEYjuru7EY/H4XA4sGDBAvj9fuzatQupVAoAcMUVV5RNDDVwmQkGg0GM2cDAAAwGgxyHxoMGgz/VBZA/aYyMRqMYm5deegnRaBSapmHp0qVwOBwSDP32t789ZfeSqOakvd7wsY99TJ5DPp8vGzsA4HA4EI/HsXv3bphMJvj9fhw6dAjz5s1DNpuF3++HzWYT4zs4OIiWlhY0NjbCYrGgq6sLw8PDaGpqQlNTE8LhcNl3cEGdnJyE0+mE0WhEOp1GLBaD1+uFzWaDw+HA8PAwNE3DFVdcAWDm8cjFaCbM9DoXx5nAsWE0GrF9+3YUi8UyR+1Pf/rTrMd4JXGyY/mhhx7Ck08+ic7OTnR3d0PXdWzZsgVbtmxBoVDAli1bcPvtt2PevHno7u5GoVCAzWaDpmn45Cc/id27d+PCCy/EE088gf/93/8tCzT7+/vR2tqKO++8U+a92WzGzTffDI/Hg5tvvhm5XA6HDx/G9773PXg8HuRyOdx555340pe+hG984xt45plnsH79ehnDdrsdRqMRTz31FAKBAFpbW3H33XfDbrejubkZ+/fvh9VqxejoKLq7uzF//nyYTCZMTEwIYbNw4ULYbDakUik4HA6YzWaxZWogVxngqvbtVIHHVudmpVN2PMeiM1kqlcShm5ychNFolOtkwGo0GsW5jEajyGQyqKurQygUwtjYGIrFInbt2oXu7u5Tdr2znXsN1bFhwwb5f6lUgtlsxpve9KayYOFEwHWYx1CDChV33jmtW1hDBTRNw7XXXgvgKKGmaZrYltlgNBrF9pAw3bFjB1auXImGhgYhevmsHA4HjEajrNXj4+PHENL5fB5OpxMmkwm5XA42mw1erxcjIyN49tlnYTQakUwmhWhW30sfEwBSqRSMRqP8Ho/H4Xa75fOlUgkulwv5fL7stXw+j2KxKL+Pjo6KvaddbWhoQKFQQF9fn9h52imr1YpwOAyHwyEkt9lslkA0Ho/DYDDAbDbj3e9+txBsJ0tqA5BgeTpomobu7m7xf3g9KlEQjUaRSqXQ0tIiBHixWJTz5O/TjQ/O0UQiAWBqjPC7nnnmmZO+xumuC8AOXdc3zPrelyFANWGqSdKFAAYw1STpA7qu753hM7UA9RVCa2urGBCyQxz4gUAABoMBbrdbHBCHw4GFCxcCAPbu3QuDwQCbzYZVq1Zh//79mJiYkGNcccUV4oTP9jxVB6raa9lsFl1dXcjlcscw9Orix3OfbhKq76OR7enpQalUQiQSOfEbOc01vV7H8fXXXw/g2GAsEonAYrGgWCzCarWiVCqhUCigp6cHk5OT8Pl86OnpQVtbmywkwFFnOZVKwW63Q9d1NDY2YnBwECMjI2hubpb3kkQJBALIZDIwGAyIxWJwuVziIA8MDMDtdkPXdXg8HoTDYdhsNlx22WVlxr0S6lidadGpNrbosM8lQK08BjN8AwMD4vT/+c9/ntNxXgmczFgeHR2VbPqXvvQltLa2YmRkBBaLBZOTk7BarfjKV76Cf/u3f0Mul4Pb7UY2m8XnP/95fOMb30A+n0dTUxOuvvpq+Hw+3HzzzZKZB6bu4+HDh/FP//RPOO+88+RcR0dH8aMf/Qhr1qzBO97xDmiahm9+85uw2+0YHBzEDTfcgIaGBuzbtw9r164V5phki8ViwWOPPYYlS5bgoYceErKF3xsOh0UFUBms0f4UCgXkcjnkcjmMjo7CZDKhvb0duVwO6XQaBoMBHo9HnEU1k6BmCk703jOjMVM25GSdq2w2i1QqJYQCr4H3iedQKBQQi8XEKfb7/RgdHcXIyAhKpZI4t3v3TusanDRezzb5RLBq1SpYrdY5vdfr9WLdunXTZsjnAipfZvpsLUidGzRNwzXXXANg7gkAgnOU5G4sFkMikcDatWthsVhgs9kkYJqcnJTPMIGQzWYxMTEBs9kMs9kscz0ej8PpdCIQCIhCwuPxYGxsDM888wx0XUcymRQyi6oVBrfMdubzedjtdvl9YmICNptNCGir1Qqz2YxMJoNMJiPnWygU5Jx0XUc4HIbFYhE76nA44HA4MDY2hkwmI6+ZTCYhyicnJyUo5Xi1WCwS5DJYvfLKK8ts3Gx2tHLO8Hzn6i8QIyMjCIfDx5CEaqIgl8vBbrfLmsYgVX3+/Kn6/KpyiWpKXtvpEKCe8hpUXdcLAD4J4M8A9gP4xUzBaQ2vLHK5HGKxmGRwKK0gc9LW1obm5maR0hE0XmTe0uk02trayuQRqqRrNqiTrdprFosFy5Ytw7p16xAMBsVI0DCrDK/KJvJv/KmyuBaLBaFQCBs2bMCZZ56JzZs3n+htfEOBwWk1g2y1WsskugaDAQ6HAw0NDdB1XRaceDwOq9UqC5WmachkMshms8JSJhIJWbRKpRIymQzS6bQwnnR0VVkQDWw+n5fvJ+OqjsfpHCR1fEyH6V6bq5Sm0pngmG1ubsbChQuPW5JzusNqtWLPnj145pln8B//8R+oq6tDNpvFZZddhra2NhQKBWFsdV2Hy+VCqVSC0+kUIikejwtzfNttt5Xdw1KpBJ/Ph9/97ncylgwGA+bNm4dUKoV9+/YJs37RRRchEAggl8uho6MD4XAYZ555JkqlEnK5nIwNp9OJhx56CKtWrcJPfvITOJ1OlEoldHR0IJfLIRKJoLu7G5qmIZVKIZvNSiDKsUcputVqhc/nw7Jly9DQ0IB8Po9MJgOn0wmn04l4PI7JyUlEo1HEYrFj7LA6HiodoWpjkdfPezGTLOxExhqPy+eVTCZhNpthNBqRy+VkDqrBSDqdBgC43W5YrVYEg0Houo7BwUG5V+l0GvF4/LjPp4bjx5lnnol169aVESOzYXJyEo899liZVH4uYAadJRc1guDUoVpgNFsWTs2kMdjp6OjA2WefDZ/PB5fLhUQigfHxcYyPj0vwk0qlZE02mUxobm4uC670v0uDh4eH5VlHIhHxxaiqqBYoce2mbWTgxyCJARP9VJYG8W/0QZn1zOVycu75fF7+MbBOpVJlZCJLwjKZjPiWaoBG30SVHvMceV+P91mdaFlbU1OTlNWpKkJ+B4PzRCIhfrrFYpG/q/a5cqzwefI4XMt0XceZZ5553Od6qvGy7IOq6/r/6rq+WNf1Bbqu3/FyfEcNx48VK1agsbERyWRS6geNRiNCoRD8fr/UDlA6ydrBoaEhGbQ0HpzIfI/q+Mx1QWJAMR04IVtaWrB8+XKsWbMGq1atwqpVq+D3+8UoVjpwKkvECcisgpq5PV4m640IBqfAUfZNfWaU2fB1ALLolUolDA8Pw2AwiMOaTCaRy+WEBXU6nZicnEShUEAkEkE+n5eaimKxiGw2K0aWBpaLFRfBVCoFk8mERCKBVCqFVCol31dJaFTDdIuG6nRPh7mMIS5yld+jaRqCwSDOPfdcbNy4EZdffvmsxzrdMTw8jGKxiL179+J3v/sd/vVf/xWbNm3C3Xffjfvvvx9Lly6Fpmm4/fbb0dzcDIfDgeXLl0v2sVgswuFw4C1veQtuv/12HD58WJhnFX6/H319fXj44YfF9mQyGdxxxx3I5/O49957YbPZsHr1arzzne/ErbfeiqefflqkUMwAkpj79a9/jUsvvRT/+Z//CafTKePz4MGDElAuWLAAZrMZDocDwBRpZ7FYyph3jplCoYBsNgubzQaz2Qy32y3v83g8GBwcRDgcxoEDBzAyMoJ4PI6xsTGk02nYbDYsWrSorKZaZcCBYyW8ZMNVh6oajtfmqQFGLpfD2NgYXC6XZBhUea8akLhcLrjdbqTTaZHc79q1S4Ld4eFhZLNZRCIRuN3uOfcwqGHuOP/88/GmN70JZ555JtLpdNl4LZVKSKfT2Lt3b1kPikoJp8FgwIEDB7B161Zs3boV8Xi86vquflZVLc0FN99888le6hsG1QLUmdYvrpuxWAzJZBKLFi3CmjVrYLVaJXs4Pj4OoLwe1e12o6GhAU6nE3a7HVarVdZf/svn8/D5fELCRaNR+Hw+TE5OSmBJ5YvBYJDgslgsih/A7ywWi0in0xI08mcikUCpVJK1nYkTXdeRzWbFlpM0pIKFwSRLONTvyGQySKVS8j4GyTxGMpmU+8Nzb2trO+5nVUmMn6i/yZ4LdXV1EmRzbvKfx+OByWRCOp0WRZvJZILb7S5bQ9TEjprcIbxerxCPpwOx9LIEqDWcfmhubkY0GsXw8LBktgwGgwx6BqfRaBQAEAgEYLVaEY/HkUwmYTAYhMkBpjJi+XweDodDDNb+/fsBHJU+zobZ3jOd9FINWletWoW1a9di3bp18Pl8EoiqE5nGVJVFcGJecsklx30v3wi47rrryoJTopIRdTgcZc650WgUOUpbWxtKpRIWLlwoGSe+z+FwoLGxsWzRY/aVWdBisYhMJiNBgbo4qU0RaJR1XZeFBji2dg4od5pmY/c5hqYbp8cr7a38LvX8DAYDzjrrrDkd73TFoUOHJPPd19cHr9crErFPfepT+OxnP4sLLrgAF198MTKZDFatWgVd17Fo0SIYDAYcOnRIAtFFixYJmWY0GvHv//7vZc+hVCohFArh/vvvL8ves549Ho/jgQceEOnYxMQEzjjjDGQyGQCQDGksFsP27duxaNEi/OhHP4LP50N9fT2SySTC4TAGBgbQ3d0Nj8cjATTrl9RsJRd82ioGo5U2jMx+e3s7Ojs7sX79eni9XrhcLtTX14tjxPKJsbExxONxORfOAQajPB/OvdnklMcLjtGJiQkMDQ3B4/GUSdtJGFmt1rKmJD6fDxMTE3C73XjppZcwODgojk9fX584mHxuLpcLF1544Sk77zcqzj//fGzatAkXXHCB2Em73S4E9OTkpNTUeTweLFu2DEajUcoNOIZpkyoVJs888wwee+wxPP744+jt7S1zdEmOnA6O7esVc7m3qi3SdR09PT3Yv3+/yGUZyGWzWQnAXC4X7Ha7ZCNVv482PZfLwev1lilFSMaRoAKmMu+0R1Q+0U4WCoUyaW8ymZRzImnCwDCTyYh8lXWwDGoZBLPcgA2QGBjzWpmdVQli2i3eB9og2s9SqYRoNFo2ns8666yqyry5gsc9Xmm2+kwbGhpgNpvLAk71H7OsyWSyLOPsdDrLzl39PH/ntbMul8/51UYtQH2do7GxEUuWLBG5QyaTEcfa7/ejVCqhs7NTMmEmkwn5fF7qUTmY0+k06uvrZUKPjY2VOSkmk0kC1FMpWZzJIKvZMU3T0NnZiTVr1mDDhg1Yv349zjjjDKk5AMonpxp4XHzxxafsfF/ruPbaa3HddddNy8qqwSF/p1RSzV7b7XYxqAwqyehx4clkMiiVSnC73eLcOhwOlEolxONxWZjUz+fzeUxMTKCurg4jIyMYHx8vW6Q4XhjQVpNMquc+k+RGDbwrodZEzoTpguDKv5Nhvv3222c95umIyy+/HDfffDO+/e1vS01Pe3s7DAYDdu3aBYPBgAcffBDPPPMM/va3v+Fb3/oW2tvboes6vve97yGbzeKBBx4Qx3rnzp3IZDK49957sXXrVhQKBdxwww1lz8Pj8cBsNuM3v/mN3EuDwYBPfvKTmJycRFdXl8ic6uvrJTvJ9w4NDWFkZATDw8P4+c9/Lov8/v37hUkfHx/HokWLRAKlNj1SSS8y9JRxVXa85HcycFUzr8zk8jixWAyjo6OwWCxoamqC1WrF7t270dXVhUgkglQqhdHRUcRiMZEq0wmkA0jHbi4SYf5dDUz4fxJS0WhUmqZU1s5yXvK6PB4PhoeHAUwFNNlsFiaTCdlsFuPj49KIhESp3W6HzWZDJBLB+vXrUV9ff4pH5+sXF154Id7+9rfjkksuwUUXXSRkRaU9ps2uq6uT8caaPJvNhpaWFjQ3NyOdTuPgwYPo7+8vq4sGyh3iQqGAQ4cO4S9/+Qt27NhRJutkhvZ4UMuizg2zlaIwoDIYDBgfH8cTTzyBZDKJ888/H16vFw6HA8FgEI2NjWhoaJAsazweF9uRTqclcKTKiU3faLdISGQyGbhcLpHKZrPZssCVtkgl1BiQkqDisZjRZEDJa6H0lus7g6jK+mbWo6q+rAoGYKr8nPeskpRRSyZ4DuoaM9dA02QyzdlXmA5q3emSJUuO8UlUP4KNQFkaxbnodDrlHCpJU/X61HnLHiCvJmoB6usYS5cuRT6fx9jYGFKplASUZrMZwWAQDocDa9askW0d3G63SCJU2VqxWERfX5+wsZRa5HI5BIPBMuklUSlNq4YTZZOIStkb/686T6tXr8aZZ56Js88+G2effTbWrVsn2eMaysEOgUD151bNMGuaVkZmMCj0eDxyDBpLZlYp5Y1Go8L0TUxMSAaUiyYXo2w2C6/Xi3g8Dk3TkMvlpGFNqVSSmlMuMoFAAJo2VXOsNsKq5jTNxPjP5GRxe5yZUJlVI9RMvupUvFYl5zfeeCMymQwGBgZw8OBBfOlLX8L4+Dje+973olQqYevWrfj4xz+OUqmE/fv3Y/Pmzfjyl7+MPXv24Oabb8aqVauk++Py5cuRz+fx0ksvwWazwWKxoL6+HsViEYsWLSpric8s/QMPPCCSbmDKCb/qqquQTqdx3333QdM0qYGmhC2Xy6GrqwtNTU3o7e1FQ0MD2traMDAwILXPe/fuRUdHR5nqgraDWUM6ILSrzAyoioJK+8RML1DedVzXdamVamhoEBmtzWbD5Zdfjk2bNqGtrQ0ejwcLFixAqVTCtm3b8OyzzyKRSCCdTqOvr0+6oRsMBpHTM5Og1iMRanDMczUajZicnMTY2BgCgQBcLldZFplrCR0gXdele2Y0GkU4HMZzzz0nzmyhUIDVakVjY6PIeRmksk5saGgIBw4cQKFQwJvf/OaXd9C+hnHxxRfj4osvxiWXXAKTySQd9g0GQ1kXfvV50unmP6vVKlJtZsro+M+fPx/19fUYHBxEf3+/kLl07jlWeexYLIZHH30UW7duRW9vr6wBtUzqKweVlEilUpicnEQikcDGjRuxYcMGOBwOWK1WpNNpRKNRjI2NYXx8XD5nt9vh9XoRCoVQX18Pp9MJq9WKVColRHIul0MymRQbBUBsHuW2lU2UaBtpC9PptIwfm80Gq9UqtlQFxymDYjWoVANNdUyTLFOzgWoZGv9G2bLdbofFYkFdXR38fj98Ph88Ho/87vf7ZZ7wHs+muqoESUOuGyeSvKlUMaxcuVLuRSURCkAaSJFAdTgcyOfz0kSSx1QbPqlBsPpdrzZqXvrrFC0tLbBYLOLkcHKFQiHpImm1WjE+Pi51X2azWeqMNE3DvHnzYDKZxEBls1m0tLTA4/FA0zTJhJFpUZ32uQSApyLTOpd9qdRA1mw244wzzsA555yDjRs34uyzz4bb7cZll1120ufyWsW11157THBazUBV+xvfOzw8LAFWoVBAOByG1WpFe3s7xsfHYbfbRRZO54ayHhpTSi5VaY0q92X3wVQqJc1V1KAhEAigoaFBZOipVEo60VU65bNd12zj93g6VVcDF1RmpNSg67WURb3iiivQ09NTdj8YwNC5tVgsmDdvHjRNw+HDh7Fo0SKYzWZ0dXXhtttuQy6Xwz//8z9j6dKlQjz09/ejoaFBauR5rM997nMAyuXVbrcb11xzjdgTg8GA5cuX46KLLsLOnTvR19cni7TJZMKvfvUrDAwMAAB+8YtfiFO3d+9e9Pf3Y2RkBPv370dHR4ew/VQJMCvAvzGTyvFLYoVBGR174KiDQ2nd+Pi41HGSxS4Wi2hoaCir+eL52Ww2CW4zmQwaGhpwwQUXYPPmzcjlcshms+jr60NDQwMmJyfR19eHkZER9Pb2YnBwUBQ0ao1XOp1GKpUS585gMCCVSiGZTIoKIhqNwu12lzXOUyXOxWIRNptN/k9ZISV+zc3NaG9vl+uj5JROJqV9bIplMBjQ1dWFdevWvWLj+HTHypUr0dnZibe85S3HlCt0dHTA6XQiHA7jyJEjUkPIXhLVglXaVmBq/tjtdqTTabHHdrsdTU1NaG1tRaFQQHd3NyYnJ8ueP8eA6rgzq/rwww9j+/btZd87G2pZ1ONHZR0hy136+vrQ3t4Os9ksc5xBJNdgv98Pj8cDu90uJNH4+LiQHZo2tdd9NpsVUo7jgiqKZDIpz7enpweapiEajcpYoY0gkcXAijZR9d/UNbqSDGctv9lsliZ0Xq8XPp8PbrcbPp8PwWAQfr8fgUAAfr8fwWCw7F8oFEIoFILdbofT6YTL5ZKuwmowzPMg0UKJ7PEGp4S6vc+JJkYqSewVK1YgEAgAOEp6MgPK+5zNZnH48GEkEgnZRofb+KhKQq4x6rlVI9ZfDZy6XWdrOG2watUqpNNpjIyMyNYGRqMRDQ0NKJVKkpGgzt7v98Pr9UpBOpkZatotFosYNbUpxtjYGBobG6W2BQB+/vOfSzvu6eSRxMlmjOa6+KlZKvWzwNTEZy3crbfeiq985SsndU6vNVxzzTVzNkTTPVOylGqAYjabEQqFkM1mRUbGjbs5vtR9VGkkuRiyBTydfjZJqCbXdjgcsNvtyGQyaGtrk2ysyWRCZ2fncd8TVfIzHWZTCMwkpVTHLe/daxHd3d1Yu3YtDh8+jIaGBhw4cAB+vx+Tk5M466yz8NxzzwGY6ur761//Wq45FApB13UcOXIEjY2NGBgYEMZ+4cKFePvb345HHnkEN9xwA7761a9i3759+OMf/4iLL74YF154IT7xiU/g+9//vtzL1tZWHDx4EIlEAi6XS57f29/+dqxfv16y+clkEn/+85+xfv16/P73vxfmv6GhAb29vaivr8fo6CgMBgPWrFkjDbtUgk/NpvIZsyZLze7SKVOz5RxPVqtVnrsqKaNj5/V65TuYEeN2LbTXDC5JQC5ZsgTxeByhUEjOtampCS+++CIGBwdhMBgQDodhMBjQ0dFRtncgt8ChA0SZnKZpqKurQ3d3twQkvBZ1ywS/34+RkRHouo7h4WEMDQ3BbDaLE5hIJNDT0yOBD+2B0+nE0NBQ2fF4D7PZLHp7e9HS0gKr1fqK7Jl6umHlypWy/jLLtWfPHnHy16xZIwGjyWQSNdPevXuloRe38KLKBDi2YSAJEpIGJE5IcFssFsyfPx+5XA4jIyOyDRTr4SrlhrTnk5OT+Mtf/gKDwYA3v/nNs/oDNcwdtA98piwJYPKgrq5O7I3FYpHeDpqmiZKJe8Qz2KSqgx341caE6ppFCandbhdCq1AooK6uDj09PfB4PHC73fIaZaeV0m9u1wWg6thQVUaVwaGqaFGJSQBlRDn9CG4jQ1tLRcjevXuxcOFC2avVYDBgbGwMAIT4PueccwCUJ1TUcT4TuC6cjK/L+6J+H9c9t9uNI0eOiL+jbh3p9Xrh8Xiwbds2dHZ2orGxEYVCQfrN0KevzMJW+iivJmoZ1NcZuBm6rh9t9W8wGMRJs9vtCAQC4lRpmobBwUFhWILBoBSJV7Kv3L+OkjlVnw8cm3mbaYBAjSQgAAAgAElEQVTPlUk62QVtNtZLlQMbDFNbWrxRcPXVV097b44n8NJ1HaFQSDbbptR7cnJSnCkaQ03TJFPPsUPmkk49SRVme1gXw2CVhtpkMsHr9cJut6Ourg6tra2w2WySxeE4nS4jPN0YVOuTZxo7M9WVVPucGlyr310ZsL5W8JOf/AT33HOPtPdfvnw5rr32WmzatAkHDhzAueeeK07BpZdeKsEHHaJSqYTR0VGUSiWYzWY8++yziMfjuPDCC3H++efLGAmHwzCbzRKEtbW1HbONhcPhwPXXX18238PhMDweD4ApBv6vf/0rNm3aBACiCGlra5OmPYODg4hGo/B4POKsqVlR1kSzHkrdZoYMOd9fub8uxzrfT5kc/0WjUWQyGQQCAQkamTnNZDIIhULI5/MwGo1IJBIiSeOG7NFoFE6nU+q5qVBYsmQJNm/ejE2bNuHcc89FKBSSPQZ5XgxK2cGdsuBIJIIjR45IAzJmxvnsmO0dHByU39k4R9d1yTwcOXJExoHD4ZAsBYMuzk8GxSSv6DSbTCYsW7bslR7erwrWrl2LBQsWYMmSJQCARCKBeDwu953NatLpNJ566imxI6otWrFihdxjrm1er7escZdKnKj7AbMnAJukUCXAYLeurk5qhCs7faqkHc+LY+Uvf/kL9u7dK03KpkMtizo7VGmnyWRCMpmEyWSCy+WS52g0GjE6OiqZx1gshkgkItvAcI5RzVFfX4/6+nqEQiHJuFI5R1A6PDo6WlazyG7lfLYks7huq3usqplK9XpUqHaTAR7HLoNNtbswFVhqjSrr3GmXGCBzhwCSMgzcWQ+rZjp5XpWETuV5zgT6KSeDmXwCj8cjJU2V58rzO/fcc9Hd3S0NBxnYq8c+XX2QWoD6OsKll16KbDaLdDqNXC4nwYLf74fNZoPD4UBzc7M4BRysNESNjY0i62AWym63Sx0gtwOxWCyyZyGdS36WA382qE7OTJjLZJlJKqwyTtMdW+1+qeuvLYnlieDqq6/G1VdfPWOxf7XAfrr3cpwYDFOdnrkQqftMMlCMRqMwm82YmJiQ50/WL5lMlkkFuX8ZGypwkQGmiJhgMFi2j5uu6zh8+LB8J4OO45H2Vr423Xtm68hXKZdR75XKTvP1yu853cfgu971Ljz22GOwWCy4/PLL8T//8z/YvXs3li5dive///3YsGED7rjjDrzvfe+DpmnYsmULli5dCqPRiDvuuEM61951110wGAyyldU999yDaDSKAwcOIBAI4Oyzz8bBgwfhdDpx2223YefOnSgWi/j85z9fJhdvbW2F3W7H1q1boWmadJBVybXzzjsPTz75JP70pz9B0zTJSFmtVgwODmLHjh3o6Ogoq7NkgESnjzZVrYNSgwE6jZXPlM+a2Ul1Cxn+TgeR2Syv1yudKmOxGHw+n4ydSCSCZDIJp9MpDhbrzgKBgASEvBa73Q4AaG9vx/z588uyDMBUd+FwOIxCoSBNUdra2soCGLXWS9M0tLS0oKenBwaDAaOjo9izZ49I8BoaGjA6Oop9+/bJtS5evBgul0uaLQFTDhabqVFmrM4POtgmk0mCttcbNm7ciKVLl+Ktb30rSqUSbDYbwuEwRkdHZb/c8fFxIYgZpBaLRTz11FNl2eVK+S4AIeocDofsS8v7r5IplVkTt9st7ydBSJknxwN9B5XUY1ChjhWjcWq7ur/97W/485//jJGRkZNqHPNGBe2K2WxGNBrF6OioZPz47HlfmYhIJpNlQa3L5UJdXZ00YGOpVzKZlOfJ0oVkMolEIiF7H9tsNiExqPoYGBiQ8cjs6djYGBYsWCBrczgclu6+AKQnCn2CaDQqgXMmk5Gxw+1oON5oZxmMqjZUrb0GIEE0A3E2WNR1XfwKjlOqWlQUi0VRL6ioVvc507M62UzkTP6tpk2V4pFEBI4tP9J1HcuWLcP+/fuh67q8lz4bj6P+5OfPP//8kzr3k0UtQH2dwOfzIRaLCTNFaU4wGJTOa9wfslAooLGxETabTdrO67ouW84AEAZ+3rx5Un9AXX4wGJQJzbbgNDjFYhGHDx8GMHMAwEV0NunDTJObn50tG1vtdZVdnk6H/3oDA1Pet9kyy5XPb7p7w4XDbrdj//79UjOaz+dRX18Pm80Gm82GYDAoe+6RrWRgSnZU7SbILWO4ILEeJBgMihGm7LdQKEiQqutTjXCYoanGYKqS5ErMlRSZ7vOV96kaSaJmGOZyjNMJGzZsEOl2MBjEkSNHxEbwuVxwwQUwGo34wQ9+IDWGS5cuRbFYRDgclm6EXV1d0DQNX//614WsCAaDCIfDuO+++3DgwAH8y7/8C2688UbpNPrkk0/C5XLhs5/9bBlpMW/ePNx7773Ytm0bvF4vdH2qEccf/vAHmEwm/OxnP8Pg4CBSqRQCgQCGh4eliZzL5cLatWvlGulkqzWm/J3OEe2d6mhXOvn8nYFgLBYTB4yYmJiQYJMOWiqVwsTEhMhiudcfANTX18s8ouyXCgbOoXw+L/dADWgsFos4imoWRNenOkQmEgnkcjkMDw9jcHAQAMqcQ87Buro69Pf3w2g0Yu/evdK1lx0jWU/OcdHa2lqWcdE0TbYo0zQNPp9PnqPaWIm1cdw3tqmp6XWxHc15550nHfYHBgYQjUaxb98+ee7MYnPcUUbJzBD/MUjs7u5Gf3+/2C61sQrnwfbt2yWDxn1qWSc8nRPMscznw3We56HuWb1v3z6Ew2EA5eU71SSYu3fvxtatW/HII4/ghRdeKPvOWhZ1ehiNRslkDg8PIxqNoq2tTcoCgKNdWVkWQFKBxAJJj1gsJs+JW7KofSEoDXa5XNLRnAFeMpnEwMAAUqkUFi9ejHg8LiUT/f39ACD9IFj3yIBXXdMZEKtrrkp2MBgnWchmSrRZJClVMpzXXylVVb+De6w6nU4JwnVdFzUQfQgeV73/s4GB6amQtM/FJy2VSliyZAkCgUAZQaHC7/dj7dq1ePrppxGLxWTeWq1WeT7VcLLB9cni9PWCapgz5s2bh2w2ixdeeEEmFAvJgSmHRt2vklIyLlxqjRADjWKxKM6c3++H2WxGoVBANBqVeiU2tiDrS+zZswfAzINblRTN9J6TddSrfb5SQlJ5rq/HLOrVV18t/1drSas9o2qGdaaAis+QzCodbKvVimQyidWrV5ex9Oo+kqyhAyBOdC6Xk9pU4GidJrOlqrSHMmAuUtxyw2q1IpvNSh115bWczB5fapas2muzGXV165vKY6jz4mtf+9oJn+PLhfvuuw+XXnopLr74YqlBvOyyy3DJJZegVCrJQg8czc6xwc+DDz4oz3/lypUolUr4zne+g0wmA7fbjQ996EPweDy48847YbVa8fzzz+PKK6/Eli1bkE6n8ZnPfAZmsxmHDh1CPB6XDeLV+51IJLBhwwYpQXj88cexefNmfPOb34TFYhHb9+Mf/xjRaFTYdWaIgKPkDANuYGrMcIsXysnozKkElxr4kQCjk2UwGKSbJr8rEolgYmICLS0tAKbGRjAYLNvKJZlMwu/3i+QyFovBbDajvr5ezo8Brs/nk469VC14vV6psaJTqt6ziYkJ2bKJ16yuA7w2yoIdDocEr08//bTIls1mMxoaGjAwMICuri4AkC0tKOGnbWhqakJ9fT3mzZsn58EGJwBEtpdOpxGPxyUDomkaent7X3Vm/3ixadMmrFq1Cm1tbVi8eLGUMbDpG531WCxW1iyKz5fZeofDIc1e2AyGjjQwtXUSM9SVktv169fD7XZj+/btOHToEIrFotTiqe+vBMewzWaT7yORAUAC1SVLlsDv9yMcDuPQoUNSakQCvLJsgj+Hh4fxyCOP4KmnngJwekkMTzdwn/re3l6sWLECnZ2dEsCp9ZyBQADhcFjmkUp2cI1NJpNwOBzweDzw+/1wuVxSV8xnw7pjANK87siRIxgfH0cwGERra6t0SAemAj+r1YpAICB7k3JOs5cJG89R6cdzpy0l4Uz7SrvDxm4AJPup2lpeu0ogqkpBkl70E4aGhpBIJDA6OiprLgNbzrtKOzOXgI3fNZNvOxvUxnpzga7raG5uRmdn5zHKLLX2/Oyzz0ZPTw9SqZSoagDI/0/2vE81agHqaxyLFy8WVos1SqwhMRqNIhOjU06GvlQqweVyweVyycKfTqexevVqWK1WYeHVzn5sWkN2nkEp981SC9aJ6SZYpcyxEjSS08mAjmfiTvd3GkJKfCuP/XoIUj/4wQ/igx/84DF/V53qaqiUKKoSxmqgU5LP5+HxeJBOpzE2NiYSHXVPNDqzat0IndxYLFYm6dU0TRZQBhgMSihfpHPHRUuVmvf09ODd7363SCt57TOxm7ONrcosWeVrlfetmnSI2aLK45zOmfwbb7wRVqtVZJwWiwU33HADXC4X3va2t6GlpQW//e1vJYibP38+brnlFnzqU5+SZ6frOnw+H+68805xXqnGWLRokQRknZ2dsNvtUlecTCZx55134tFHH8WHPvQh7Nu3DwaDAVdddZU41waDAV/96leFGNm3bx8WLVqEe++9F01NTchms2hqakJ/fz82bdqEs88+WzILzCbREaJNU52nYDAosihmkICjhIPqFHFckszj/9nRUdenGgrZ7Xa0t7fLGAYgG61TKcCaVLXxGPsEFAoF+P1+mUdsShIIBERdwGZPbrcb2WwWwNH5TNvPBjtGo7GMVOIWDSSVLBYLhoeHEQ6HsXPnTjk/bsvw0ksvyflRKsy1Ra13AyB1vn6/X1h8HotzuLKmKpPJIB6P49ChQ2hvb3+lp8BxYdmyZViyZAmCwSC6urrEHo6Pj0sdH5101YFWsy/sH8GupGqdYUdHB5YvXw7gaNdTOuRerxcDAwNl9oXE2jnnnIOOjg7E43GRzFP+W0m8qTJFPgej0Yi6ujp4PB44HA6Z72x+FQwGJat38OBBjI6OVl07Kn+Px+N45JFH8PDDD2PDhg0v45N57YLE0bJly5DJZKSfA8cLfTzaVeCojNtut8Pn88FqtQrJOzk5KQQQg0NdnyqJGBsbQyKRAACMjIxgcHAQuq7D5XKhsbFR7BLtCMknqupY48/GbixHUH0Jfi/nAdVXHBvFYlEIN6ox1KZ0fI/aYVwNsiol6BaL5Zg9P1VFGecPa2XVLr88p9nW5uPZyWK6Z1wqHe3+fjyfc7vdYiM497k+cd0644wzsGvXLvT398taTgLqdMPp5wXVMCdcd911eO9734sFCxaIsWAXNzpb7e3tZeyVx+PB2NiYbKpO1p4bd/f398sEVRsqMEAlyFxxvyvWSdFAVrJWJwI6rzPJSueC6bJcwLG1BK+2nOFUg4Epn2MlpjOAldIY4Ogzr3Y/OT5aWlpENul2u2GxWBCLxdDS0gKv1wtgiqmLxWLI5/OYnJwU+SE7/NKIsna6rq4OFotFyA8Gusy2Ug5YuVCybs1ms+Gqq67ClVdeife///1l7Gg1ppDByEyodt9mWrzU7FplvXOlM1j5+dMli7pu3Trs3r0bd999NzKZDC6//HKEQiF84AMfwHe/+1388pe/xJvf/Gbs2bNHMqVUZ3R0dOCWW26RbVIYaPA9dC7URhurV68Wm1MqlfD9738fpVIJBw8eRDgcxrx589DT04O1a9fi8ssvh8lkwpYtW7Bx40Y88cQT2Lp1K9ra2nD77bdD06bq6n0+Hw4ePCiOzb59+wCUZ71NJpMQfOygq2mabL3Chkiq08MGGyp5ou6Nyt95LcwCVjoHrO8iKRiNRqXbLhveJZNJeDwehEKhMlkaSzqoaInH4wgEArDZbGXZOgbVvAdscEKpF8+zt7dXalsLhQLq6+tFnlcsFkWarWkanE4nzGYzDhw4IHOjubkZdXV1EpxScdPU1CTbn7A+zmw2S00yn0F9ff0xjXZUQopO5tq1a3HmmWe+jCP/+LB582asW7cOjY2NZSQaHXdmb9iVHDjaA4Fjho0MQ6GQEHOq3aJsk1sRqdmnStVJV1cXDh8+XGbTaXd8Ph/WrFmD7u5ujI6OIpfLCVlDwqDSTqm2jHaSQU8mk5EAm+N54cKF8Pl8iEQiOHTokPgHlTgZX+GNBFWOSSUGbRTl1cDU/fT7/eJHcQxy+xfOp2w2i0QiIaVerPtkUBOLxWRvW5PJJPswq2Qcx93g4CDGx8dhMBhEbs5sPhUgXNNVck9tqsRxSltDMptzid+p2lq1Np72Sk1skHRhIK++zuNWbglWDXMZn3NRUM3lGCdzHHbs5bGAo3WpfPYbNmzA2NgYdu3aVfYeu91ey6DWcHL4wAc+gEgkgmAwCKfTiUKhAJ/Ph5aWFmHPHQ4HRkZGkM1mxSHgpMzn8+jp6RHG1OfzYXR0VFgvsqlqIbnJZILf7xdHiROa+wEy6OVkf/TRRwFMX+A9lwk40+vVMlPV3jPd91ZO3Onef7oECMeDyqzpdEX91Z4NJYrVMN0zY+beZDLB4/GIVMbj8YgjuXz5cqljppEksUGpOEkPBgdctACUkR6U+nBhA47uy8hgNR6Pi8NULBZx7d/3ef3ABz4g96NaMDlbAwTVMVNR7b6oY00dc5U/KyVvpxsWL14Mt9uNdevW4aabbsJll12Gxx9/HMFgEFdffTUee+wxuFwuLF68GC0tLbjuuuvw3//93ygUCrj33nsxODiIL3/5y2hvb5caed5D7tOn6zq+8IUvSJDmcDiQzWaxZcsWYcwdDgc2bNgg2Zv29nYkk0kcPHgQ3/jGN2C327Fr1y4Eg0G8853vxG233YbGxkY8+OCD+MUvfoFf/vKX0pKfWXoGO3zulVvKlEpTG9STmWa9/fDwMPr6+tDb2ysyVI6nQqEgWYRisShEDK+bNaNqEw+1Ts/n8yGdTss8jMfjInOjImF8fByhUEgkvZo2tS+1z+eTwI9ZWcrdabt5DnTw2PmXc2rNmjVyPIvFAp/PJ/tl9/X1Yffu3eI0+nw+jI2N4cCBA0JUNTc3C3HEzCn/Toc6Ho/LusLOxKryAZgqTXG5XGV1l6qkmvbEbDa/qtnUd77znVi/fj0CgYDspUs5NQkDEmuVAaTNZoPP55NgNBQKSaYJmCKP169fjzPOOEOaGzGbQ1KYQSzHBW0bx1ehUMDBgwexfPnyskAYmBpvCxcuRGNjI0ZGRsoy4G63W/aUrgxS+VmC2SZ2gVblmsyqtre3Y2JiAl1dXRgaGiqz7dXW81oW9VhUEv+8b6lUCvX19WJLOeYoxSfBnE6nJWCz2WyyJyjLAMbGxuT59/f3Y3JyEi0tLWhvb0coFBICgs+W5F9vby+AKcKLKj6n0yl7oJNk5pjh3qRMjhgMBiFxuL6rnf7VYLgyU5rJZKR7P7OpAMTGqeoLtRyJ/5hpZrZY13Wpj6+cs3MBfZUTxYn4AbQnwNQYoe1USSv1vbquY+XKlQgGg/jb3/5W5qOozZOAV7dR0mtzA743MK688kpks1mYzWb09fUhm82ipaVFOvyFw2GkUim0tbUhHo+XSXNpKKLRKBobG5HNZtHZ2Ymuri7JmMZiMdnXlEXkXAx5LGYMHA4H+vv7ZRLQ6QOmnCqg3AmvzFbO1qhmuswfcHTvwdmCielQKX2qJkHiJP3a176GW265ZdpjnS744Ac/KHIeYqZAvhpTR0ayGvg+1ejlcjn09vaiqakJmjbVITCVSomjEggEZGxykaCzRkkis0hmsxlutxsAxNkmk8sxz0WOY5q10blcDi6XSzJzzPSoUqKrr74aGzduxCc/+UkMDg7isccemzbYnO6eqUTPbKgMPCuzpaqjN9NYfTXHX0dHB/x+P0qlEhYsWIDOzk4sWrQIzc3N+OEPfwiDwYDNmzdj27Zt2LJlizgn55xzDo4cOYLR0VF87Wtfw3e+8x2USiX8/ve/x86dOyWgYBDjdDoxOjoq3cB37NgBo9GIiYkJXHzxxXjyySfx8Y9/vCzjZjQa8Zvf/AbXXXcd0uk09uzZg2g0inXr1uFb3/oWAoEADh06JLJVTdOwZ88evPjii+LkNTU1wWKxoL29/RhmXm2ARGaeTYKcTqe8HolEUCqVpFbL5/PJHCEJyC6q7E5tsVjQ1NRU5nCm02kJzJkp4X6DdMZ8Pp80OaJjyuYnzHwYDAaEQiFEIhEAEBJTlZUzU0JCymq1Qtd1qcsyGo0SHLLD5vPPPy8qGWbIuru7RRrs8Xjg9XolUGXXd7L5XGOYZSGhmUgkyvZBZXDEa9N1Xc6JrxcKBUxOTsJoNKKnpwcWiwVLliyB2+3Gs88++7LPixUrVsgWPEuXLkU8Hheyg0oTZgtV51HTNFEzMYjk3A8Gg/B6vbL2UgnCdZiyTVUSzEw0MGVHKNPT9akmL6lUqkwVsmzZMlEiEDwGx2MkEoHL5RL7xL2FJyYmytQfKlQijt2mdV1HLBYrC6xDoRDcbjc0baqzcCwWQ2trqwQJldiwYcMr8jxfK6imuiGhp+u6rI983szYMzOmqt5I6tJv5LHj8TjGxsZgtVrR3t4uY5HrKZHNZjE0NARgqs9AIpGAx+PByMgI6uvrsX//frS1tcHlcuHIkSOor69HLBYrG7scW2zqROKLkl4SZ5qmSbBoNpulbwHtIq9HlSmbTCax36qdVQkuzlGTyYREIiE7VFgsFlHxzDVgPNGglM+rkrg+HpCIUmvJFy9eDF3XsXPnzmPez/WDSpdt27Zh9erVMje5fdGpyAifDGoZ1NcYuMBRPuH3+0VOpBaq09ikUilZHFlvpTJRNAg0XGRgjUYjbDabsEgEnWlmv7ipcyaTEQeJzlClIa3ETLKe6TJVwNFtGY5n4hzvpH+1J+bxghnTSiN5omxcNVTWVnAcUO5Hx9fpdIqskI6sw+EQB5RZTtYtcUwzOFWl5VartUyCw3GlLkS6rkvmjWOGWSy1PoXbMnz7299Ga2srNm/efNzjgsRINUz3d47X6XA6Z1A5z5ubm7Fx40bMnz9f5Nhs6GM0GtHS0oLbb79dmq8sWLBAnHar1Yq77roLsVgMq1atwiWXXCJBGzC1Xy6Asnv0xz/+UYK8hoYGdHR0IJlMYsmSJWKTfv/73+MjH/kIAODXv/41nnvuOSxZsgQ//elPYTKZsH37drz44ouSOYhGo+jr65PaTE3TMDw8jCNHjmDHjh147rnnhDyhE6faO5Vso5NDOWpLSwva2tqkS248Hsf4+DhefPHFsi2VmL0ikcN7AEC2ZFC/gxvNc9xlMhkJHDnf6GyFQiHJgqbTaXnf2NiYyEl5HjyXQCAgMjqbzYb29nZxUNgZXtM02SuWjqDROLVtCP9vtVqFyNA0DePj41KPTgkinUDOB95rvs4O8y0tLSJPNhgMUi/HMUdnrFQqiYNsMBhkvm/cuPFlmQtXXnkl1q5di8WLF2N4eFjkxocOHRJ5JTNMvEb+I1lRX18vEkk+P967jo4OeDwe2Gy2MqUB13JmiTgmqTrhXr+VwYvH4ylTAjCLbzAYsHTpUiEIgHIimcSgajd1XZeu0JXrMu1xNULO5/NJ4xqODbvdDrPZDL/fj3nz5qG3txdDQ0PT+gg1lINjR11zScrShpLIofqCc1cdd5Tw0ucrlUro6elBLBaDw+FAS0tLWZDC9a1YLGJ4eBjDw8PQtClpMUsPjEajNHJiwzXV3qrnqzbpMRqNCAQCZdJf4Ki9YiaU6xFVMCrJw2vgearZT/5f3d9azQLz3MxmMzweD6xWq4z3ueJEpbGVn5vrd6rnzoROpd/KOIH/J3hfqKhwuVzo7e0Ve854YjZF2cuNWoD6GsJ73vMeYSLtdnuZs1xfX49sNouVK1dKNpP7qQFTUiE2q/F6vYjFYuju7saePXuEwRwdHQUw5TCazWYEg0HJwnISNTY2Sh0D67KYFeP7/H6/OIWVsgwV09U0TpfRVD+ndrmshsrPVgsCqgXQ6vvU109nqe8//uM/TvvadEFTtewpMVPWWgVrqi677DJpJsDnzb1O7Xa7OJobNmyA1+stC07ZBIQMsCoBTiQSUlNKCZtKXNChpRyIWyCwERNQ7pAz67pjxw58+MMfhsfjwfXXX4+VK1eWGfjp7pkqjZsrVOetMps9U3ab95K46667jut7TwVWrlwpXUO9Xi9++9vf4oEHHsDg4CByuZw46gDQ1taGd73rXbjpppuEdLj00ktx1113wel0oqurC/PmzcORI0fw0EMPAYBkd1wuF3K5HBobG+F2u2VRZF39tm3b8OlPfxpr1qyR+5hMJvHwww/j3nvvRTgcRjwexznnnIPvfOc7iEQiGB0dxcTERFkAQ+dlcnISY2NjiEajCIfDmJiYwPj4OMbHx7Fz504899xzeOaZZ7B792688MILUuZA8P/q/GHWlfsLtrW1oaWlBY2NjdIXIJ1O46WXXpKmdpxPyWQSk5OTiMVikmFVtztgkJNKpcoaFgFTtpqqlmw2C6/XW7YuWK3WMpkmz5eBMedHMplEf38/wuEw/H4/3G63SPx27NiBsbGxMlKT9bgkpILBoNxfztuGhgZxcij3M5lMsq8mic1oNIpkMlkmDybpwbHEulvWljPoIdhVOR6Po6+vD6tWrTolc+CKK67Ae97zHlx66aUwGAyiFGHGmXJqrmWUMnL8BoNB2d/W4XBItmvZsmUwm80IBAJC+JCQYHaJY0zNGnM8sJsz6/s4n2hPeH/UYEaVT+u6jsWLF2PFihViu/mdHG8MMOjY53I5eDweeDyeMjKJdrvSYeZ52Gw2aapI8pxZd4vFgvnz5yMYDGJsbAxjY2Nlc+3VdI5PRzCAUO8zxwYVcur97ezsLGsiSH8tl8uJHdJ1HUeOHMHIyIjIQ9l4TSWBgamu4729vSInj0QiMJlM8Hq90lGfBB/rTnVdR0dHh4xVji11izjaDpfLJfubcz6xwRybMDFY5dxQpeycK1RdAUeDdd4TEpwkzRjs8r5xXZiJVFah1sAeL/i5SkXVXKESDsDRYBhrcUoAACAASURBVFf1UxYvXoyGhoayz1Rma1etWgW73Y4nnnhCng/jjFdzDmqnA0OlaZp+OpzH6YqPfvSj0PWpwvHKAVksFtHU1ISRkRE8//zzIhGKxWLo6OiQPacaGhqQTCZRV1eH4eFhxGIxGbROpxORSATpdFpYMAYFAwMDaG5uFiPn8/nQ1dUlE11tE07WnXsk1tfX45xzzpnWEZ9N2jjbROXkrjZ2qn22mhHgz8pJO915zSS1fKWzrgxMpwuqZrp/lY4uMdO1q1lrLgDbt2/H2972NiFEhoeHhX232+0YHh5Ga2urODlPP/20MK+U0pDd5RYgXLjYXZLdT9PpNLxer9T7lUpTnajj8bjIdQBIbRYA2QPYbDaLw8W6F6vViquuugpvfetbYTab8cADD8z4/GYab9Xut8oYVwamPIbaYW+mZ0V84QtfmPZ9pxKapskWBv/3f/+HXbt24cYbb5SGbHfeeSd+/vOfY//+/bjxxhvx5JNPYvv27fj617+OhQsXypZUX/3qV6VBVjgclgCHAdD8+fNhMBjwkY98BD/4wQ9QLBZRV1eHSCSCYrGIRCIhjnw2m8VFF10Eq9WK++67D6VSCRMTE2hsbEQymURTUxNKpRJ27tyJXC6Hw4cPy7YuzEyyUQadFXYjJrGiBgHMdFV2kl20aJF8Vq1xIlQyYnx8XLJqXq8Xo6OjUm9VGYhwyxWV3SfUPgLJZBKBQEBeJ5nD7o08XiKRkEYo6rhlEMl7w/ozXkdDQwMGBweRz+fx/PPPi4qBYzQWi8lxQqGQBGNqs7y6ujoJxjhfKdHntU9OTopz2NjYKAQnnV1ez8jIiHSCZUZYrUnjudDhZKOrYDCIp5566rht8pVXXinlLKpDG4lE0NjYiJ07d4oskjbUYDAIyUaZnDoegKmMpt1ul+75L730EhwOB8bGxiSD6nQ60dvbi2g0WtaddenSpSgUCujq6pIAAIDs3fvcc8+J7DEUCqGhoQEmkwm7d+8Wu9jS0lK1rpPXuHfv3qp2iM7//v37JXjhek+lCu11tc/TvqmqFzb4oiJGVSXk8/myPXEB1GS+mJrTH/3oR+U+qWOT5MPQ0JA0xlQJCrUBGgkVXdels7TBYIDX65Wu2xxfnF8TExMyB0lOAVN2Sa3tBKaeWTAYRF1dnagfGATrui7KGuDomqqWNKgBMe1EZdMjlhkA5eVe9Ak0TSsbR5wDtM0k11gHbrPZpJTB7XbjoYcegq7r+NGPfjRjM0n6Xur6MFecSEBLkKSgrHcuYP23apu43vH+TUxMoKenR/oQ8Dk9/PDDJ3yulfj79+/QdX3WIvNaDeppjuuvv14Go91ux8TERNleVSaTCZFIRAzW+Pg4mpubZfN3bvJOqR33iKQ0QteP1okYDAZpvsTJ7XQ6MTExAY/HI4ZFlVyqTCgL41mvNTY2BmDmmr4TxUwZQLXxkfr+yuyp+rdqr1fD6VKPOlPWVMV0wY/qHKuYydiqAS2zFW9729tE3sM6UNbIZTIZNDU1SaMWEhrsFkfHXd3bjPsd0jAajUap7QIgDCodYtaaUjqoLth05BiY5nI5cSKBKeP8k5/8BA899BDuv/9+XHfddXjwwQdnzCxPR4ZMJ+9hUF/tmHQEpltgOL9fLVxzzTX4zGc+g+9+97vYsWOHBC6tra341re+Bbfbjf/6r/+C3W7H1q1b0d3djR/+8Id4//vfL9trfPe734XVasUdd9yBgwcPIpfLoaGhAfF4XJpg2Ww2NDc3Q9OmamJoUz796U9jz5490DQNf/3rX2Gz2fCrX/0KkUhEnj87hLKmdffu3UgkEgiHw2WlCbz/3JtP13XpfMsADjjqELMDNXB032AqAQ4cOCDOUKk01flx/vz5kvlT2WxVtl4qTTVHAqaCBWbduLF9f3+/BCXA1Eb33BaBW97QLjE48vl8ZU3tSPzQKWVWhPfA7XYjFothcnJSshqs76aMdnh4GCMjIxgYGCjL3JrNZoyOjgrJw4ZomqbJvKyrq5PsncViQTweF0KC3btLpZKsIwBkKwrWrJvNZglsx8bGYDAY4Pf7MT4+jlKphEAggEKhgImJCQmO1bphNqBS5dMzgWus6hRT4cF1Tq0bXbBgAXp6egBMKZe4HhOqdNput2NsbEyIXHWuNzY2IhaLSWA+NDSEjo4OuN1uTE5OSt8HljYwo8WsEZug8DuZQfJ4PGUkHa+JY1GdEypxsWLFChgMBuzbt6/Mnun6lPRx1apVKBaL2L59e9kettxjnSQQ5eOqckQ9FgDp2EzFgN1ul8+oARIb4NQwBQYTXIu41lE51NTUJKVWFotFmrV5PB7JnFLazV0b5s+fX1Zvr6rTuA8qlUpstGa320W1UEnsc61Va+9TqRQaGhowOjoKn88nJDUDWJYmuN1uGAwGye6S2AMghBvtZiaTET+WpWocc1QRFotF6RTPNYPBGGX42WwWkUhEyOxisYh/+Id/wE9/+lN8+MMfxo9//OOytZu2nOenEi8n8jxPxDfms5prqRvvYyAQkHiBf1fHk9vtxqpVq7Bt2zacddZZQtS9WqgFqKcxrr/++rKAgWyPupBygIVCIaxduxbPP/+8dEQMh8NobW2Fw+HA0NAQGhoakEgkUF9fj3w+j6GhIWQyGSxZsgQWi0UK5bnAZLNZaZgUj8fFKaADQqkVWfRAIIBEIgG/349oNCoLy0wy02oTtFqAWQ3TTe7pmFx+J9+jOm6VE3YmvJpBarXAdLp7NVMN73QGlfdupqw0pbV1dXVIJpNwu93IZDIiJSRRwmPkcjkMDg7C5/Oho6MDhw8fxuDgoJyH6uCxHpHnx8wOf6dsh+DCw2fJzK363VzQuKjyHDkGdF3Hxz72MbS0tODWW2/Fww8/jIGBgaq1IdXum6poqPy7ShBUyqVUmVK1IPXVlNYAU+PngQcegM1mwy233IJMJoNIJIK7774ba9aswQUXXIDu7m7cd9994qTeeOONePTRR/H000/j2muvxde//nW89NJLAKYc4G3btmFkZAQtLS0477zzxLbcfPPNcDgc6O3txec+9zlEo1H86U9/wic+8QkkEgmMj49j9+7dYqMoG81msyIle+GFF6RJHJspMbMPlMv4KRWlw8axWI2Z5vhiQEWWn7WkzGzxnmmaho6ODmkAQqJEfZ509DRNk4wUiZSJiQkAkOBS/TyDAn7/xMSE1Lh6vV6YTCbEYjHZi1hVRDidTpRKU83yli5dilgshlwuJ/trshbt+eefl6yZpmmyr2wikZCMB5voaJomhBRrHvkdqVQKgUAARqNRalkLhYLYhkAgILbGYrHA5XJJ1npkZEQCULWe3G63l3UKZR2mKuGORCJCmE2H66+/Xv7P50xnl4E65zWvX9d1DAwMSNOnYDAotaJsrpJIJOD1eqWJFu1jsTjVzdnn8yGVSkltrpolJeHBjL7b7cb4+Lh0Zq5sdEN5NjMdvH46r+l0WrZ6KpVK2L9/P5YsWSLXrTrX6pxfunQpjEYjJicnceTIkWMUH+eeey50Xcfg4CDGxsawYsUK5PN5keqTfFADTjXTrQbqBoNB1ADRaFQCVTreJCtXr16N3bt3T/s83yjgvaOPxvFHX021YcViUe4piVwAsr+p3++H1+uVIFNVJAAQ/zCXy4nawev1StALlPugrGVkB3Fgaryw6RY7kQNTBAVtKtV6tGd8j6raoCqCY4L2gOfMOcUOwBaL5ZixB5SXh/H8DQZDGYlOAvymm27CPffcA6fTKfOfz6DyecwFqopA/Sx9guNd74+HvOa5z58/X1SQXEPUunne09WrV2Pbtm1405veVLa10SuNWoB6moILaCX7SEaWbCgXVGZWOzs70dPTg4ULF+LAgQMAjp0IdLQow2VdAgBZ/MxmMyYnJ2G32+HxeGSTZho+Bqf19fUIBoOSbaXzRsfi0KFDWLRo0TGTT128KjFTgKliugmtMnqVQYW60KrvUT9bzVioQcXx1iGeCrD+uBpUCWIlpruPdCCqZfX4s1qQSgezublZZHh0srhFUTKZRDabFcdO0zQ0NjZieHgYNptNSJP+/n7JEAGA3+8Xp43XxXGmnm/lc1Vl2gSZeavVKplWBiJcgOgIcrEdHBzExz/+cdx///0wmUy45557yu5pJXFSueBUQ7XsAf/O86/2XPm3yud61113vWIy33e84x3YtWsX9uzZg0KhgEAgAK/Xi3POOQc33XQTvvjFL+LAgQNoa2uTexsKhXDFFVegr68Pe/bswa5du8ThHxgYwLnnnovnn38et956Kzo7O7Fr1y7cf//98Hq94iBwa4KrrroKX/ziF/HZz34Wjz/+OCKRiDi/3PKK7PrevXtlqwMA8nc2z+J9VO+7pk01nqHklCw8bRftpTpP+LxIrDAwIHgfent7y+wlALS3t0tjEFX2yACFGTI6na2trTIHGCj09fXJfACmslfsaKxmDLltGLMrvIbJyUmp6yKR43a7MTIyIscHjm5LQQadc9Lj8ZTJ8lOpFNxud9l1sRlfKBTC0NCQSJrpSBqNRjQ0NIjMLhgMwuFwSId5yg55n0mOOhwOyWIz+LJarUJa0Aax83Al868GpUTl/KRdYHaG1+RyueBwONDa2lq2RU5rayt2794Ni8Uia+Xo6Kg0AuLx6QiyM3EikUBdXZ3I3lOplNhCBhoqgcXnp65DwFTts9Vqlc8YDFN7ULa0tJTZfhJ06u8zlRYUi0U4nU6sWLEChw4dkuev2rDm5mY0NzcjFovh4MGDWL9+PXRdl+woiXI166cGDfwbf/r9fmiaJvJflmWoNvqNDtoM1SZRGQQA0WgUXq9XSg8cDoeohhKJBCKRCGw2Gzo6Osr2rQWOkr2HDx8W+8dO2V6vF4lEoqyviLr+sXsw53epVMKuXbuwYsUKFAoFeY5U8XE/dJJdJL1VIsNgMEipAANI7vnM7v18jdle2i0ejwQhUE4Qq9lW1RdVxzd96ssvvxw/+9nPylQBatZxrqj0w0kmVZYAzQWVvttsZULq+/x+vwTdal8R9Xrsdjs6Ozuxbds2tLa24tJLL8Uf/vCH4zrHU4Fak6TTEOpCqhZ4A+VduYCj0h4A4kAWCgVEIhF4vV6RjdlsNumsSK09dfdqPanNZkMsFhPjxcWT0ksaNJPJhFAoBJvNhmQyCavVir6+PpH/0oHhNgSVqGy4U4nZJuxMr1dmmCuNSLW6S2BmeSudBV7LK9m05n3ve9+swXg1QzndZ+iIVfuMasCrvQZMySh9Pp90LAUg9aIMBBmEAsCePXtku4xQKIRQKAS73Y62tjaRw4VCISnMN5lM4rAyg0EHTNenpIuNjY1SdxMIBNDe3l7GKjOg4jYf3HKBDVzYQZQt7NVGJDfccAN0XccnPvEJrF69WoKOSsy0KKiBfrXX/n/23jy6zerMH/+82i3JluRNlnc7cfaFQhMIW8vSshQKLUvLvk9L6XTjwNAvpVBmKJzS0s4phU6nlClDD8uw/UpbwkyhrEOBhBBCYjved0uytVqWLNl6f3+Iz5MrRV4SaKcLzzk+cWTp1X3ve+9zn+XzfJ6F/j7f+gTeX93KgYrNZkNNTY0QVLBH3TXXXIOTTjpJnJfu7m4AwObNmzE1NYWf/OQnaG1tRU9PjxgMhGaFQiFs3LgRN954o0T9GxsbMTY2hjPOOAN2ux3/9V//hXXr1iGbzeKTn/wkrr76aoTDYYTDYZSXl8PtdiMajcLj8UhdDQ0nAFIvXwgVByB1kISRms1mVFVVSV8+Bv4YiFIdA64P6gGVzMlsNktNtclkEhZrOtS6rmNoaAh79+7F7t27EY/HJVNKXVm4Flg/mkwmUV5eDo/Hg/r6etTX16Ourg4+nw+6rmN8fByjo6Po7+/HwMCAOMQ08oAcVJK1oGyDY7VaYbPZEAgEYDKZ8OabbwojJ42+yclJxONx6LqOyspKCTawtyvPAO49smC6XC6MjY1B13VZO3RYfD4fgFwWpbGxUaDC0WgUQ0NDoiump6cRCoVgtVrhdrulVmzFihXYsGGDGLPquNSaM87n5ZdfXtQ55boodFIJ81P1Pdcx9QSh3KzzYwCNa4bPTh0T9/3s7Ky0/qEjyO/g5+vr6+W7AAgkksELVe+rziOfK7AvWMAzLRKJ5N3nUkTXdSxbtgxr164V1AGF9+PxeHDEEUcgkUjgrbfeEv1KGLhaZ6qOWw0uAvvOX9boMhDB+f6wL2pO1Lmk45bN5hh7yaQdDocxOzuLsrIyTE5Ooru7G8FgED6fD1VVVRKkVWu5u7q6sGfPHmQyGSQSCUEikP+Bz0dNIDBgRWHGngEpnr18llVVVRIE4jPmmawGEp1OJ6qrq8X5NZlM0ouZ567FYpEOAWzDxOAddT+h/2pJAUUlUuQcAvv0cSaTwbnnngtd13HFFVfsd96ryC21RnY+UdENqs05H9Kt8Jnz/orZbgdiF+i6jlWrVqGurk6uTTSG+n0+nw/r16/H4OCgtCz7c8uHDupfmBQ7SAuhF4RUqj2jjEajFLHTKKCyIiSKRocaKVYVFNlPGbE0GHI961QjHtjXXoIbJpvNIhQKiWOrQhiKZem4qXlPhYZZISxjPlmKUljMOVW/cymOQ2E2608tVJDziRqFLpT5HNSFInYL/Y11Uqeccor0flQzVfF4XJp8MxIOIK/FjN/vR0lJCZYvXw6LxQKv1ytBDX630+mU6wKQBva6vq9uMB6PS92Ky+VCb29vHmNpKpVCLBaD1WoVYh/uAbIP89BS69eAnHF71VVXYWxsDMceeyzOP//8g3YMix0khfu52DzzmRKB4Ha7xZC98847D2osByoGgwFNTU1ChuVwOLB+/XpcdtllWLlypeidlStXwmAwoKamRpw4o9GI008/XZhLgdx+vfbaa7F8+XI4nU4MDQ3B6/WiuroaVVVV8Hq9+M53voPVq1fj8ccfh9lsxq9+9Ssx9qurq4WNkmUFk5OTmJmZwejoaJ7BZrVa8+pnaFirgT4aM4SX0TlxuVxwu93CKEydqToINMJV54BBPWbBuJ4mJiYwMDCAqakppNNpmM1mjI+PY2RkBB0dHdi7d6/Mm+oE8Ud1Xmk4cay1tbUSiOEaHxoakmAh17kKedN1XVrK8L46OjrEsaJzRwZ3BoU4HmZnWAOpEkWRSbSvr08MRBJFlZaW5mVOmRFl8DMQCMgcxGIxqWlnIMpsNqOmpgYWiwV2ux0tLS3yfGhIq2t3vj2m6jfOe+G6L2zjQ9gp4ZDLli2T9zIzySwp9ZOaPU0mk7BarUL4QigdWYpZm881oxr9ur6PILGkpCRPR5MBmIY8z24Gl9V7U1l8C+dhMclms2hra8urY1Wzubqeg39u2bIF27ZtE1QNg95qUF01sgsDAWpQhf2OVQK8v3fhHmagQ80w89kQZm4w5BhpA4EANE0TojU1CM2f/v5+CbRFo1Eh3CJ6gbqSe6GkpETgwQBE35Jwifo2EAggm81KPTsh6pqWKx0gXwnXAcnp7Ha76C4yVnPPABD7ld9lsVgESUI9YLFYRFdyjTJYyvU0NzeXB7MvrFNlwJEsv+paVc/2heC2fF7F9NF8ttt81ziQjO1iUltbKyUVxcYF5DKpW7ZsETTmn1s+dFD/gmQ+CJLqOAH7mnADOcXAxW8ymVBXV4dNmzYJwyQAiTzRAGF0jIYUqd0ZBSbrJRs0q2QzmqahoqJCoGNq8TzrY2dnZ6WdiMlkwhFHHJF3qKqOLg2MwntezCmbL6ulKpBCKXQw1flcTD5o5bCYfO5zn8vLnKpjV6PQi0FMVeG8zXcfCwUG+B3Nzc2i4GlcskaKjKhqZrKvr08aRtOQ7ejoQDqdRltbm9SWABAyB9aRsR5ramoKAwMDMBgM0mqCUDnS6PPgJJMwDcpYLIZkMom6ujrp9UjnVTXi1D5rNLq//e1v4+GHH5Z2NIXrajGod7H1xudVeC1GZxktbm9vR2dnJ0455RR0dXXh6aefht1uR3V19X5kJ38q6ezsxBtvvIE9e/bgE5/4BL73ve9h06ZNmJiYEKZETdPQ2dmJZDKJL33pS3jooYfE4DAYDFi5cqXUKD7xxBNoaGjAFVdcgdraWnz729+GwWDA+eefD7fbDY/Hg0wmgzPPPBMNDQ340pe+JLWLNGBY12ixWDAwMIBsNiswNBobzPhyntXDnQE7NTAxOTkp8LVUKiXOCVtt8Ye1ogzwUQdxP7DdC9c6AMke2e12eY11tzMzM+LE9fT0oKurC+3t7ejq6hISJDqo/F0NbrA202azoaSkBE1NTWhubobX64XH4xEnmGNlJoqZ1Pb2dkxMTGDbtm1CqsfMC7O7bBFCw5aEe3TKaGAxMGW1WjE4OCjPivdRUlIiDr/D4cCaNWtQV1eHTCaDvr4+jIyMIJFIiE5Jp9Nwu92yp+vq6tDc3CwtaoaGhhAMBmG322VsHo9HWq7Q2CwmqmM1Xw0ZjWmeq1x/zEqyR+L4+DhWr14NTcvVUPIc5fqj4UrGZK6BQCAAs9ksTPq8h/Hx8TzYN9caIY2ElHNdkMCmsbFRMjQMMGSzWan7U0lwVFlKIFiVhoYGrF27Ng9RxHHSgD/yyCNRVlaGt99+WxiXZ2dnBSZNKTT2VUdCzRixO4CmacJa/Pcqam0u997U1JSUshDayvZabrcbK1euREtLC8rLy/dDl/n9fuzevVvq9qlLSWLItc+Mq8GQY/ulrlUJzVinms1mZZ2y1p5lY9PT0wLlJryfZwXJ3mgXWiwWTE5OSvaOOpmEZXa7HW63W9qTAbnSjrKyMukBzbIR7guuRY5T13XRXcA+5AXnKZvN4oILLoDVasWll14qdoKKrFjMNuTf1OAL5UCCRLzGByW6rmP16tXYvHnzfokJNcM7NzeHww8//AP73gORDx3UvxC57LLLir4+H+TSbrdLVEl1/MbHxwXXH4/HJY1PRkGVmVKFFAG5zU2iDGaZZmZmMD09LYrB4/Hk0XszQsprEarBMZGZ8Oyzz8bFF18sioObutiGUzOrxQ5QRg/nmy/1GqqoEbhC4UZcSGEUBgqADz6LetZZZ0nWVD20VUe+MHJdbJzzzSt/5ouazefA6rqOaDSKTCYjTiIDEJFIRCKePCwjkYhkQAgRHB0dxc6dO1FeXo7h4WHMzMygra1NWi+ocHJdz9XGlZWVSUaOEVO/3y/GnaZpGBgYkGt0dXVJnRodPsLaGxsbYbFYpA6GY6SDSyIJNZP17LPP4tprr0UgEMAVV1whUeiFHH1GcY3GHLsr1w0/V7iOnn/+eWzfvh2hUAiRSASPPvooNm3ahBNOOAETExNoaWnBxRdfjJKSEtx3333wer24/fbbl7ag3odcd911+M1vfoMXX3wRt912G3bs2AGjMcfgygCAyWTC448/jtWrV+PWW2/Fjh07xDjX9VxLlqmpKTz44IO45JJLYLVakU6ncd1118Hr9eKrX/0qMpkMvv71r+PBBx/EvffeC4/Hg1/+8pfixFGfRSIRaUfS19eH6enpPMOI2Sy3243q6mp4vV6sWLEiL+vAwF4mk8H09HQeFJSv02HldbmfzGYzPB6P9LWkQ8R7NZvNefqNmTFgX/2RWmdqtVrFOVJhcbOzs9Knure3F3v37sXg4KA4wty/6lrVNE36HBL+VlNTg/LycmSzuTY0bMek67ow6pLAKhAISCBhenoawL5Ml3r9bDYr2cpsNotEIoFkMinnAx0mMgXb7Xb4fL68oAr1cHd3d17dcDKZRDAYhNVqzXNma2trxXCNxWKYmJjIq2/kWcA9RtKdxYw/df8WOm/M/POsY+aHZEj8DPUHsK8PqVrrSafUarXmse8DkMyPmr2fnZ3N6/XKLFMikRAIZ6HzzXNYDYhxXajcEtSL6v0fKDSQ+2HdunVYu3attDQpRCbMzc3hox/9KGpraxGPx9HX1yfOjtlszuuvqAbtVGOe42NwxWaz/dmCc3+pokIxTSaTIOmIQiBigbX3oVAIQD7knfuvvb1d4LVEBvCM4nepcHan0yn6myVi3Ot8XnRM2Ts7lUphbi7HOM2+vayV5/nBwLYa/PP7/fD7/futq6qqKkHP8CwNBoOIxWJi83FsLpcLLpdLAkUcnzoHrFtVHTQ1SMJg0ZVXXgkAOOGEE/LW6XylUoWirvWDEY7lQB3ahUR10knqxvtW37OQrfOnlg8d1L8AufTSSxd0nAojvDyIvF4vxsbG8hSP1WpFbW0tVqxYIQqMRAbMchEGR9iQ1WoVeK6aLSVRBKE2rOfjRqGTQmOPkEkqOzJpnnfeebjwwgvR1dWF008/HZdccgmqq6sXzUAtdIAu9Nn55lKtBSp8rxpJn8/pK/Y3g8HwgUEuzzrrrHkzvPNFt4qNqVhAQ/3sQkpuPiVIw6K6ulrqlhkoIIFCNBqF0+lEIBBAKpWC3++XjAYhe0ajEbt375Z+uel0WupVXS6XZLg41rm5XLsIGqtsc8Ea1N7eXoEERyIRMexUJ5FjDQaDqK2tlbo5TdPEkCb8UoUc8TAJh8P47ne/i1tuuQXnnHOO1KPMN49WqxUejwctLS2YmprCGWecgTVr1uTNfzabY8h97rnn8MYbb+CPf/wj7r33Xnz/+9/Hueeei2uvvRYTExOIRqO45557MDIygqeffhq33norrFartCb5U8r111+Pu+++G4cccgh6e3tRUlICo9GIo48+GiaTCU1NTXj44YeRTqfR1NQEr9eLzs5O7N69W+rdTjnlFHzyk5/EJZdcggcffBC33nqrsJEecsghKCsrk5YdZ511FgKBAC688EKMjo5K32UenD6fD+Pj4+jq6hKDndBede5V1knqOrWsQK0t5R6ic0B4JB3ASCSCcDiMyclJBINBaV1AJ9RkyvX8q62thc/nk7IKGtV0ELiOWculIk+YkaQzRCc5FApJS4BkMonBwUEMDAygt7cX3d3deTVZ1NuFkGQ6uOnyUwAAIABJREFUBTQcST7EzCmdPupuOqelpaV5/TyZCXE4HPJeOokOhwOapgnsPhQKCWEaMzGE19N47erqkrYRAMRhJrutyWRCY2MjamtrBUUxMjKCkZERGWM8HsfU1BQcDkdeT2UAwhC8kKiOXuF5QuNNDeTSGabRfdhhhwnhETO7KlyQ+kutjWN2iWuioqIC2WwWVVVV4kyEQqG8IAfXG5lLeT32h1Z5H9SWXTT2eT9c/+r9H4jhWSz7U1NTgzVr1ghbs3pN/uvxeLBy5UqBixL+zqw6gyDqWFQHofDsO+aYY5Y85r81UaGwzO7TkYxEIhgdHYXT6URlZaW0Byx0/tvb29Hd3Q1dz5EgMXChkhSpjil1k1p6wlZeLBuj7nE4HKiqqhJdyi4QfI583mTkpTPJ9RwOh+H3+/Ng8JqmSQkIEQjpdBqRSAQTExMCQTaZTGhoaMCKFSvQ3Nws+6SsrEzQL6y7Z1KFc8o55L2rSBVmWTOZDJqamuZFXSxFDjQDWvjsPghHkfuJ+9VgMGDt2rWora3Ng94XjuH/Qj50UP8P5dJLL8Wll14KYH6Hq1i2ixvXYDAIaQYj+IR38D00+JmdMhgMeY4oGfMIjctkMgKLZFSrrKxMagxUA56fY4SYeH0aQl6vV4yZubk5/NM//RMuv/xyvP322zjppJNw8cUXo7m5GcCBQR0KHTZVVOhR4etq1rXw81QCxa7NjaxpGp555hm0tbVheHhY2CvT6fT7clLPPvtsnH322TKOwjEWM57UsRWTxZzUYq8Vg6DwO1599VVUV1cLrI6GVmlpKaLRKBwOB1wulxiPk5OTqKurE2XH+mjO5cjICGZnZ9Hf3490Oo3m5maUlZVJxrSsrAwAhF3U5/MhFArJgWgwGNDR0QEgR9Rgs9kQjUbzIsBqlJUsg6FQSJrZ03Ehgcvc3Jz0YlUPXSCXHfH7/bj22msxOzuLyy67rGgW32az4YgjjsDHP/5xpNNp7Nq1C36/H42NjfI83nnnHWiahrVr1+LVV19FbW0tPv/5z0sPyXvvvRcXXnghvvGNb+D111/HRz/6UYyOjsJkMuE3v/kN/vd//xf/8z//g+OPP77o8/2gZM+ePdiyZYvUL2YyGSHYmZ2dxejoqAS9jjjiCGhargH6bbfdhvvuuw+vv/66vBaPx3H99dfjH/7hH3DllVdiamoK559/PpqamnDzzTfjkUcewc0334zu7m7pf8mWHECOUIcZIfb3Y9ZJNYDU2mK/34+hoaE8Ag4AskYqKipEb3FdqYEoRtfVms+ZmRlEIhEEg0GEw2FBD6hrrrKyUhABKloEgJDWcD4ByF5SGYGZCfZ4PJJBo8PKjEc0GsXIyAj6+/sxNDQke4rGFp0WMmk3Nzdjbm4Ozz//PAYHB5HJZDA8PCxnAB1Jjp06c3p6GjabTTKAzKbSKFTniI4sMyh8TqlUCg6HAz6fD8PDw/K+iYkJBINB6LougSOXy4XW1lZpRTE+Po7x8XHpqUroMIC81jYej0eIdDRNg9vtXnB9F8LaVNG0HPlPIpGQ0hb2Bp+dnZW6dgDSqgeA1MVns1mMjo7KeamSvkWjUczNzWFiYgKBQECuwSwX66xZo8ughqZpci6rhEWce94Tf1dbKFHUWrmFztFic6UidwrnyufzYe3atRKcKfxeo9EosEyyhxLdwmeuOgRcw+oZ+H+ZyflLEQYo6DBRJw4PDyORSKCxsVFqQ7k3eI61t7ejo6ND1hGfAVnMgX09ULneWEIGQGqB2cOa19a0XI9N7gkyadvtdkxMTGB4eFjWAtEjLAejvmGChOPgeV1WVob6+nqYzWZZA9QFvH+SKC5btgwOh0OQMSw34rqiPuW9qSVmahlFod1FPXjVVVfBaDTikksuWRKrtJqRpY46WJnPNluqFCY1Cl8Hcizzy5cvl+fPuTnQQNYHKR+2mfk/EjqmFCqeYjJfxIYGUDAYxLJly4RNk43RDz30UOzYsUOyXNFoVA5bIBcNZ/Tc5XIJSYV6wHg8HqmBoWFEo0RVXupmVw13vp8LfGZmBrfffjtsNhuuueYaHHfccQByDs0TTzwhbGoLHZzqd8znvBV+ngYmr62+T4V0zCc8CG644QaBrjmdTrS1teG3v/1tHkPigchZZ50l4ygklOLv860NVQEWk8JMsaooC6EpdGiLfU9PTw+2bNki9SOMzNJYpyMRjUYxOzuLwcFB1NXVidIfHh5GLBaTumWPxyMZ14qKCgwPD6O+vh5erxd+v18y/nQUy8vL82peVq5ciZ07d4oBazQa0d7eLiyavHc1Q0LYI4A8NtiBgQFZz3Re0+k0xsbGUF9fL7UwakbshhtuwJ133onLLrsM27ZtQ0dHh8yppmn4/ve/j46ODsTjcSxfvhwnnXQSQqEQmpubUVVVBbfbjc7OTgQCATQ0NCAej+NnP/sZzj77bDz22GMIBAL41a9+hZaWFnziE5/AV77yFVgsFmzcuBHPPPOMGP00bv9U8uSTT8qcmc1mHHbYYXA6nTjiiCNw/PHHY+fOnbKuaEBYLBZs2LABbW1teOSRR9Dc3Axdz/VVe/fdd1FeXo5///d/x7e+9S3cfvvtuOaaa/CVr3wFN954IwYHByVgQHKmqakp1NXVIR6PIxQKSesSAJJNZ2lBIeGReijTkGJkmBkD7jkVakYji/X6XEsqjFKFPo2NjYkjajDkWHIJG1ezXi6XC1VVVQgGg6Jj1UCfmklSnXDVCGTAUdd1qdM2Go2S2RwZGQGQX69VWloq30uCoWg0KhBT3r9af6XruXY/3DecKxqzrBnTdV2MwGQyCZPJJMRnhDEzkxqPx8WJJuqGGVXee1NTk0DIObfsv6ppmsDy7Xa7BF4bGxvzyIMOOeQQtLe3L2pUFRL0UNTzgXNCvcm5YZBO13OlAV6vF7quC4rEZrNJFp9nIwBhE+e9zM7OSj9frk2SU6kOm5rVKIaC4TOgzo3FYigrKxMjXg2gqPe5VOF+4XiKia7rWL9+PQwGA3bu3Lnf33j2ksGYa4qOEp877QwA+53XlKOPPhqvvPLKksf/tyJsA8j9Mjw8DIMh10vW6XQKeo3r1WAwoLOzU/RELBZDLBZDTU0NpqamZG0SIQVAiMlUJEkqlZIWMsC+TDwRJESDcIzkDuA6JjHa8PAwWltbRRepThDvi+NnX2cG26jv1SwySxmIJMhkMhgaGpL1RqQU9+HMzIy0xlMzrwwScg4KkQYc12WXXYb7778fp556Kn7/+9/PW2qm2pXAgcPpKdz77ydrqwr303zX8ng8OOyww7Bt2zYZ/3wlYX8O0Rb7Yk3TfgHgNAABXdfXvfdaOYBHADQD6Adwrq7rYS2n8f4VwKkApgFcquv6W4sOQtP0v6fI2EUXXbTfAqGxUyw6qUJ1CkXTclBak8kkRg4PE7vdjt7eXvj9fiGGISafJDNsE2I2myWCRYOHziydVUZ31UgTC+RpUDHaxM3IdhCE1bEWh5HckpISXHbZZTjhhBMkg/DAAw8sGC1daE4WUwKFEWTKfNcKBoOoqalBSUkJ6uvrhZilvr4er7/+OgwGA1asWAGfz4fTTjttyRv57LPP3i9jw3EUroOF7mm+LEChQaI6qIXKkkq/2PeUlJRgZmYGk5OTqKqqgq7nyEjC4bAQmaRSKSGZmZyclJ6Auq6jp6dH2HQJ8SPLajweh9PpRFNTU152hnXSPT09sFqt0l/XYDCgtbVVMj90Mnfs2CFGtGqoqhlUGuHqXNCQnJycRDQaFaOovLwcsVgMdXV1Aj/imuX+stlsMBqNuPXWWxGPx/HCCy/AYDDg97//PTo7O6XBeXV1NU499VTcfPPNGBoaQklJCV555RX84Ac/wMjIiBgUHJcKo+IhWXgIqs/baDQKW/IHKZqmYfny5bL3N2zYgNraWnz5y1+G2+1GOBzGo48+ihNOOAHLli2D0+nEDTfcgB/96EfSBNzlcuGOO+7II0Xbvn07Hn/8cRgMBnzxi1/E1772Nfzyl78UYh1dzzGysqVNZWUlAoGAZARYMzk5OSl6jg4q6w9XrVoFTdMwMTGBmZkZTExMwGQyCcRwZmZG2KA9Hk/efNOQIrkGW9JQH4XD4bxslWq4c30xoJFKpaRGU9d1VFdXC2ui6nDQoaBRqWY91IChavCwJpJ1XITB03grKysTuJ2maSgvL0ckEkEikUAgEEAkEpGAkZq5VR1U6mw128A9wFpAomMymYwwu6u6mZlXZjzZ65OZZ85HNptFa2urtKSIxWIYGRkRUpJIJJJXB0omUban4pwNDw9L1j2ZTKK7uxuXXHLJfrpNDVYWE94v9QKDDrwHrpXh4WEEAgG4XC709fXBYDCgubkZ2WyOvKuyshLZbBalpaWYnp4WGGN1dXVecGdmZgahUAhVVVUwm81CehOPxyWgxswZdR1rDdmmh+RbDodDYIwMYBMmfPrppxe9z4Wc1cVskPnEYDAgEAhgfHxc/l8sKzM6Ooqenh6B7vKZq10F+Hlgn/Pw8ssvH9B4/tqFDpKu61LiYbPZ4PF48uC5fJb9/f1CdjY3l+tZzOAIdQfRb7qeK/UiARKQ2yNc/5qmyflI/eD1euVcpqPn9/slaEjdQCRAQ0ODrDej0Qi3243R0VEAuRKgQCAgEFwmRYxGI4LBoNhFPH9NJhNqampk35vNZgQCAUSjUcnSUr9RhxN9QzIznv3UsyaTSYLe/A6eE3zNYrHgX//1X+FwOBAKhfDrX/960b1zIIGgYp9fyD5bSFQbgrbQQvapuifD4bCwy/Oz5OR4v/LefGzXdX3RvlFLueP/AHBywWs3AHhO1/U2AM+9938AOAVA23s//wDg3qUN+e9HLr744qILTY1SqNGbYhm1QnE4HGJY0SgjYQczWZqWw/8zassNy4NWZZ0sLS2V6DcPCG5iRph4SDJCq2asaHwREqUyDxI2x8h0JpPBz3/+c5x//vlSX3f55ZdLhrmYYQEszqBaKAvBZAu/Q9d1dHZ2IhqNoqmpCXfccQeeeuopRKNR2eT33HOPZArm5ubw2muvHdB4gP2j2Wr2b76xFn5+sYg4DxP+XswYKUYORcVPaC6NZ4vFgkgkAo/HA7vdLnVXNMoZ+FDXMI11RjQJc+NBNTQ0JIRKXDNGoxErVqyQdUQUQCKRwMDAgDCsdnZ2ilFLAgNGjnmoqIed+joJdqqrq1FRUSFj470y+6WOgc+b9bM33XQTKioqcMYZZ8Dn86Gvr0+ccYMh16bpmmuuQTQalTYbmzdvxqWXXioZQjoxatagEI3A50OHQc3m/KmEa0XTNNTW1uLjH/84stksfvrTn8Lj8eDKK6/Eo48+iscffxyXXHIJrr32Wjz55JN5473uuuvyHG2XyyVtim677TbcfffdGBoaQiqVQjgcFjIkOnSM3peWlsqzUJ0g1Smk8dPd3Y1UKoXm5ma0tLRIpq8QXaHWOU1PTws8E4D0AlWJiAwGAzweD8rLy1FRUQGn0ymBGBp4JSUl8ix1Xc+rp04kEhgbG0MwGJQMAI05h8MhRF9cqzS01KAjnVDuZcJ/AYjj6HK5ZF3TyZqenkZFRQXC4TCy2Vy9WiQSke8mTJnrX13jaj9BGpwcB51vh8Mhf+f6dTgcSKVSebXh0WgU4XAYXq9Xalzr6uqwfPlyIRfq6OhAb28vkskkIpGI1GY6nU7pd8gWVTyHpqenhXit0Okudt6q2chiwr+XlpbirbfeEl3F5zA3N4eGhgY0NDSIA93S0gIA4oyrGc1MJiM60mDItV8hWYzdbhfeB64ZOhOsS+bz5v1wjo1Go0AayeYMQPrncv2pToQqxYLihfMy33sWEuqo6upqbNy4UcasGsqqbjn66KMxNTWFV199VfQ1s/SsceY9U/4ea1EZYLLZbPB6vVKjr2aa0+k0du7cKbwKDCyx9QyRHdFoVBIG3I/qmmJ9J+HEPHsqKysFQcVrRyIRBAIBCRZx3VVXV4v+5XcHAgG43W5MTU3J38jeryJaIpGIdJvgujGbzWhsbERDQ4MEjEKhEPr7+5FIJIQpmrqV7dno+HZ3d0t3CgbyVIea0GfOpbruqRO/+tWvwmKxoLKyEuvXr1/QFn0/57MaLDzY7GthkH4+mHEhSsHtdsPpdMr/3w88+f3Ionet6/pLAEIFL58B4Jfv/f5LAGcqrz+g5+SPANyapvk+qMH+tctFF120qLIvFqmcz3Ghgp+enobFYhEIJun6ubnr6upk46uZOsK1VAeVm4FRTMIiAIhCoNKjoaTWxPJg5yFOJ1etXeV3FUb9+vv7cdVVV+GWW26BwWDAFVdcgdNPPz0Pf3+wm7XQ6S88gAvnOxQK4fnnn8fNN9+MW265BZdddhn8fj+y2SxefvllnHTSSWLUdnZ2or29/YDHRFns8F8oyq9KYaR5vs/Md73CeWA/PyAXqe3q6hIYDBmkCeskaQKjmpqmYXBwUOqtGMmk8avrukCSyO5HB8Xn88nzam5ulsyX3W6XTFNVVZUYsUBOofp8Ppx77rl59S3F1o4aDU0mkwgEAvB4PNLygS1oCFvieNX6PtVx+eY3v4lEIiEQ1XA4DJfLBYfDgS9/+cvIZnP1311dXXj22WcRiURwyCGHoLKyMo/NUs2oqHtVNej4vczysVb3TyF0KjweDzo6OvDUU0/hpz/9qUSmDQaDwJKXL1+On/zkJ3jxxReRzWZxxx13QNO0PIIiXdfR3NyMZcuW4amnnsKdd94pEey5uTlUVVWJUe3xeBCPxzE0NCRBL0LSgsGg1IOqtUMMkMTjcbz99tt48cUX0d/fL8y96hrgvHLu6QgS+sXnTAdQZQrm/ZDkZdmyZdKLNJ1OS0aC+pPBQ2axgFxt9cTEhNRzqf1V6bwyw8iewCoBGA12CvccoXjcQ+FwGA0NDeKoalqud6wKiWMGgs4Q9wbRMGo5hWq0MHNP5mI+B+p7Bq84F6zzrq2tlblhCxs6fv39/WL40slXnRTWqzc1NeUxcw4ODkptI+9rIT2oBu0Weo+maTjnnHPystoMjPBZqeeYruuCnmBgjnNNxltCpQkp51pjYItrkgYzWYM5VjWQwvOXa4vZFvV8Vseukp+oenG++Znv/0sRFbGzfv36vPKiwmyOpuXQWEceeSRef/11hMNhlJaWyvvsdvv7ykT9rQiRCkQz8YfznEql0NHRAYNhH4mWCtdlrTTPx1gsJmgIdf0wa8oODpqmwWazobKyUtY2gyEsDVPZhO12uxDG0cGcmJhANpuFz+cTfch9DezL5rPchp9jayKDwYDly5cLszWRGWQiJomf0+kUsjWiDnp6etDd3S2QX2BfiRy/m/NSiGQBIBlWvv+///u/YbFYcNRRR4l9/EGKGkwvlkBYqhQ6nku1nTUtx5Ghos7+L+Rgq269uq6Pvff7OADve7/XARhS3jf83mt/93LRRRcByI+KFIqq0Atfn+8QYcQ1kUigtLRUiEzi8ThaW1vh9XpRU1MjCoo05HQYCXtgdMrtdotSo+PACDqzn2ThI2yEjgMj19zYrBukocKDW43KqVkpGhvDw8O4+uqr8f/+3/9DTU0NrrzySnz2s58V+OZ8okIaCl/nZqchSANJdWDC4TDeffdd/PznP8crr7yCoaEhtLW14frrr8dvf/tbbN26Fd/85jfxu9/9DnV1dbBYLHj88cdx//33Y+vWrUtfDAAee+yxvIyOCkvlmBcSrgnV4KAstL7mE9Vw53gCgQBqa2tF+ft8PmFrpqNKGBoNNRqNnZ2dCIfDYlwxSglAnrmm7WvJsGPHDmSzWUxMTCCZTKK+vl7mYdmyZdJWKRwOo6KiAhaLRepabTYbTjrpJFl3Z555ptQy87nz8OW9FUJ1JyYmUFZWJm1h6JyTLVY1fFUnlev7u9/9rjAotrS0wGjM1W9fdNFFqKurQ11dHbLZLDZt2oRnnnlGDG0gl0Fh1Jf7g/NFwx/YxzZIdAOzvPO1qHq/YjDk6iltNhtWrFghzvDw8DC2bt2KaDSKu+++G08++SR6e3sxMzMDo9GIyclJgZyyDybXFaPr7e3t6OvrE2OgpqZGnBLOVSgUgtPplEDC4OBgXjsBZo54bYoaUAgGgwIP5Z6no1dTUyP1i9RxXCN0isnsTAcvHo9Lz1DWCrLe0mKxoK6uTtofca2rBh+vx/3LFkgkAKMxwmwadSYdaLZO4Dg5t9QFNPx8Ph/MZjNWrFgh0PehoSHZ44TAAfuCVlxXnE/1d64HrvlwOAyz2Sz1hEAuaxePx+Ue6GxFo1GEQiHY7XZUVVVJtpBIHdaXdXR0SMCC82Sz2fKc91WrVkkAgG1y+vv7xUkk0V9JSYmMfz5duBQnVdM0jI6OCsGgasiSXIrriVmmQogk54M8Baw5VbPV1CmcS+oBIqFYisNzlZkvjp33ryKTMpmMIDeor9566628eyvcO8WEmbOlinqWqXO8atUqrFmzRp6LGnhTf9+8eTPS6TT+8Ic/SLbQZMr1w6WepPy9ZVFJ8qOWUQG5+d29ezd6e3uRyWSQSCQk+Mn1xzXk9XpFb+m6LgGrUCiEQCAgHR/oiFosFvh8Pqn75tnD+nCuf67dqqqq/bKmhKDzfWS+Vnk3WlpakEgkJPPLz2YyGbS2tqKtrU3WeW9vLwYGBsQGIAmZ2+0WBMrs7CyGhobQ29sriA61tRMDnkRlcE649zivnGueIbOzs3jkkUewdetWpNNpKdlbqvO3kL4pVoLGs+hgZaG9q9qcxeTwww/HIYccsuj7/lTyvkmSdF3XNU074NnTNO0fkIMB/80LnVNg4QyguqFVoeFBg7tQeGDTEJudnYXb7RamRq/Xi8MPPxzbtm2DxWLJM7pooKsblX3tCCN0Op3yGmEZrDelEULjgJkNOn6MUqltEHiPPGCZXaMBxMM+GAziC1/4AsrLy3HnnXfi0ksvRUlJCe699979sq/zQSHULMmyZcuQSqXw0Y9+FD09PWhvb5eN19PTg7a2NvT09ODkk08Wp+Kee+4BADz77LOYmppCc3Mzent7cfXVV4sxMN9zWUzUTV8YhOA15wtO0FBSs6iLGRyLOfeqw0yyD/VvdrtdCF7IkMesyODgIHy+HFiCmVU+c13XZZ3xkPT5fMhkMgKHq6iowODgIBoaGtDX1ydtS/h9paWlGBwclCj8tm3b5IBrbW2V++CBctppp+F3v/udGI0qRI/ZBhqAPIzGx8dRXV2NtWvXoqurSxgLuU5Zn8Jab9VwSyaTGBkZQXl5OUZGRnD44Yfj5z//Ofbs2YPx8XFks1k0NDRgYGAAJ554Ivr7+3HMMcdgdHQUw8PDeQ4U74PPg4cWAwLZbK6vZWlpKTZs2ICf/OQn8z7X9yNHHnkkRkZGsHHjxjxHKJVK4Re/+AUee+wxlJaW4tRTT0UqlRLH7K677oLZbEY0GsV//Md/YGZmRmDfkUgETU1N2LVrl8wrA1yEz2YyGYyMjOQFwvr6+vKMJl5TzSgS0kUovhoJV9l0+VosFhOnmu1p+DvnmA6I6hAyCGc2m8W4owHW/B4plGp4U7/SEVYz5tlsVnQodSBhsyQ04prjfXMtcGxOpzNP/9MIJGw+lUoJfJ2sqQwQ8nkCuUAJAz6sXWMtJJ1G1rAxqwHsy6QSksn1yjnW9Rxcm8KsIR131pzTmWcNKeuDNS1XD81MDjOmLBtQA0dcE3Nzc2hpacHu3bv304dcB+rZUEx4ftEArqurQ09PjzzXmZkZeL1ebNq0Cdu3b0cqlUJ1dTXGx8eRyWTykBCsP1UDCSaTSQigqE+np6fhdDrz9L7qZKiOidFolDp+opvMZrPUF9Ow5TxRj6r3tpjRqZ5vSz3n1NKAYnO6YsUKGAwG7N27V+aE+pn/suzCaMzVrZeWlmLVqlXS95Nt8v7eRC3/oJM4MjIiAS22jWGWEYDULav13jyPmdwYGRkRvUGbzWjM1fTzjGRQZHx8PC9oquu66H4VTcezWNd1gc5yTU9MTAhBoM1mk/2rsoNnMhmsWbNG6radTif6+vpEDzCRQsLKqakpOXfGxsaQSqUEfcExcdw8Z7PZXF9nogKZPFGRL5xv3h/n6aGHHsLmzZvhdruFtIvrfj67DViY1FKtNS3mCyxFlhp4ovDe1M+rgaWSkhIcccQR+OMf/3jAY3m/crAOql/TNJ+u62NaDsJLOskRAA3K++rfe20/0XX9ZwB+BuRIkg5yHH/xojqnFBrIS4HWqK8XRlI0TRNHEYDAF4ixLykpkT5tVGxsqUAYMACpPyJJAzNhLIC32WwCKyLsSm0vwwNQPfjVbGcikRBnkrA81muxfhCAjA+AZDvpxHzhC1+Aruv4t3/7N3zuc5+D0+nEAw88sF97iGLz1tbWhrKyMrz99ttYu3Yt3njjDTQ0NOCUU07B008/jfHxcVx77bV44oknMDo6il27dkmGRGWMtFqtGBkZgdVqzYuqHWg9LOWxxx7DueeeW9RJXQh+Nd/rC83DQlLoZMdiMYmK0vBRjXy1/jibzWJ4eBi1tbUAclmO3t5exOPxvCb1DGYAuQgrI6U0sDn+QCAAr9eLbDbXqqi6uhqTk5PweDyYm5tDMBhEd3e3HE4OhwPr16/PuxfO46c+9Sk8++yz8hqfk+rw0cjnwdDR0YFVq1ahra0N/f394siy5yONSTUryGvwYGtoaMDVV1+NH//4x9D1XJsPZknn5uawYsUKALmM/fr164VJWGUMLYzcAhB4L5CLgre0tODrX/+6ZCorKysP+NkvJD/4wQ9w22235WXpNS3HrkijY9myZdizZ4/AqmdnZ/Hyyy+jsbER3/jGN6BpuRYUkUgEK1asQGdnJ8bGxsTpKi0tRTweF2g4SWGCwSAcDgeWLVsmxGR05DhPqrD+sry8HHV1OdAOjdjp6WmMjY3J/iA0k4gQfl6dZ5PJJIYN9SqRAsz6MxtMZ9JgMOSx6KrOiNP/SdpfAAAgAElEQVTp3C+6z36tzCipmXlCu9g6idBZNqmnHtU0La8GVdM0WV9lZWXSJqWnpwdAjlxKDTjSIOTzo5Bpm9l83hNrKWnQqc449znPh1QqJTVhqqilHnyvwWAQiC4hw+l0Gi0tLTJn3P9sPcV5oO5iwCqdTmPVqlULBoL5/NT7U4XGoaqHGUhS9wIDuQzQqvfI4AT1wvT0tKCcCPNV+42r+14NfFEP87v5e1lZGaLRKKLRqAQ3VHgxyRMZ9DCZTGhvb8fy5cvzgrsLCefqQM+5xa6bzWaFhK2jo0POBtXG4bo/5JBDkM1mpW/vhg0bkMlkJHt//PHH4/nnn1/y2P6ahftN13Xs2rVLnMlQKCQ2lYpGIzKOwTsGjUpLSyW4TN1KZ4TvLSsry+vjHAqFBLKvwm/JD8A9Q53KM5xrzW63i11KCK6maaioqMD4+Dg0TcPGjRuFob+hoQGTk5Mwm83S1os6juSJRPXQXu3r6xPnurD8hee3OpeJRAIOh0Pa5XH8RIhRP1An0w7iHF977bX4xS9+gdWrV+Oll15a1Dld7NmqQbODucaBJkoK/YnC76Rtk8lksHr16gMez/uVg3VQfw3gEgB3vPfv/6e8/mVN0x4GcDiAqL4PCvx3JxdeeGFRRT2fczqfqO/lRgEg7Krl5eV5DiwPQ6Mx17KB9VFr1qxBZ2enRG11XZdNTEhQKBTKY01lBJwRdB64dEKZHdA0TRxeOjyEjZSXl4uDwgg8oVgkB2GWi04qDUheiw7wF7/4RVitVvz4xz/GeeedBwB46KGH8gwmVVpbW1FXV4c//OEPeOmll3D//ffjzDPPRGVlJV566SVceOGFmJycxPXXX4+xsTFhk+M9q85IIdSD90qj92DkYJSZGp1Wja3FHNT5vouf42cJp+Lf0uk0pqamUF5eDo/HI04k14Ga7evt7RXYjVoXZTKZkEgkJKKfTqfzspjsv6vCgNlz0+fzwe/34+KLL8YDDzwgQYuKigqceOKJRe+Zh+KJJ56IV199VRyJwkNKnceysjL4/X5MTk6itLQUbW1tApmis8BDmcY4x69GHGdnZ7Fnzx6cdtppqKurE5bP+++/H4lEAu+++y68Xi8SiQQqKytx/PHH44033hAHiDWBNNAY+SUxC4kinE4nvF4vJiYmYLPZPnAn9Z//+Z/3i6xSDxDyq0IKuccbGxvzAhgjIyOSCeE1HA4HzGazOEpkgGTpgclkwtDQEPr6+iT7Sgedc85rce6np6fR398PAEL809DQkFfDo2alk8lkXiuDTCYjhjyzDDyggZzTxn1CYjoiAvgZZgq4jjlGBjksFotkyEwmk7RQMhhyhFo0jmg4AvsIe0gcwtfpGBPCmUwmJUMG5Gcq6cgSeufxeBAKhcRZ5r5Ua9VUo42Gm9lsFiOVP6WlpYJO0HVdnGqV1Xdubk6yX9TvqjEYj8eF7R3I7X+yftJBi0ajYsgWKxOYmZmBz+cTvbGQTlbh/sWEc8zzLpVKCXMw2dxTqRTGx8fFsFcdq1gshoqKCiEjZDCA0GoGKAizTiaTKCkpgcViQTgcluACIb7MLHNtMFjDM5kBY2bNeM6yFph2gQohXKoxu9A8vR/hWbpq1Sroeo4he3BwcL/3MLi4bt06zM7O4qWXXsLatWtRXl4uZ8gJJ5yA55577gMf41+aGAy5zgKDg4MwGAxyJtlsNgn6UA8z2F9WViZ7BABcLhcmJiYEap7NZqXWlORyqg4cHR3N0xPUO5WVleLYUl/RRqLeAHKBQgah2ROcJUTUMWoAuaqqCqlUSvoIA/scJd5rRUWFBLNCoRDeeecdSWostMZpe6sJFZYNqWz4hEmz/l/NogL79kQmk8Gzzz6LE088EZdeeikeeOCBA7bnuIZ53h9MkmEhtN1iUvg59XfOF0uW/tyyqIOqadpDAD4OoFLTtGEANyPnmD6qadoVAAYAnPve23+HXIuZbuTazPxpiqP+CuSCCy4AcGC9xoD5s2BUDGoUU9d1gTeoWR5G0aLRKNLptDgbauE138coNhUb67roXDDSy2gxIRUq/ICbigZFd3e3sFH6/X6BylVVVaG8vFyoyHVdRzwel8OZdQQ8gFkvUEjskE6n8ZWvfAWapuGHP/whzjnnHJhMJrz55pvYs2ePKBPWAN15553Yvn27KAIyQx577LGw2WxoamrCz372M3zqU5/C+Ph4Xj0asK9pNJWAatDSOCdM5UDlkUcewbnnniv/Vw3pYk5nIdmVuhbUTEehLAYXocKNxWICGeTn2LaAayUej4th2tfXB5/PJ8ZSJpNBaWmpEDSQeZdrwOVywWazIZFICPxcNd4NBgN2796NQw45RAzdubk5uFwunHjiifjsZz8rjt8nPvGJ/e5BvV++duyxx0qEnfPJQI8K5WlvbxfnKpFIwGg0oqWlBTMzM0LrH4vFkEqlUF5eLgaAOr9msxlnnnkmYrEYXn31VYFIptNpXHjhhbjrrruEDp+wP6/Xi8MOOwwvvPDCfg4pAMmcmc1muFwuOeCPOuooNDTkACuEBn6QUuicplIp3Hjjjfjtb38rvfUMhlwPvkAggEAgIH1sY7GYZM/V8gAGzgyGHIuy3W6Hy+VCKBRCMpkU/aPW4TLqrwYE1LpiGkWEhrJmdXp6WpwwYB9sXjV03G43EomEBNwYkKEuJLJE13UhfGKLEJPJhKqqKhiNRlnTfr9fUCV0DHhNkqqxvyj3Ap1DjovkIlarVfpU02GikUjdPDs7i4mJCdm/DFay5pOQNzqq1Otq6xxmEphtYZY2m81K2wnW26roH54J6jqdnp6Gy+US51hF3ahricY069IqKyslK8HMNQMcwWBQ9iPvn8Yzr+V0OiVryoDmQu2X1Mz5fIbdzMyMBM0I/5+dncXw8HAeq7TVaoXP50N/fz9SqZRk7tVsMmGXDBKo5TJc11yfKmsy9WhZWZmwGXP8DIbQUVd76fJ8VY1flVxJzRAvdi4cqCz1M2oZSzabRUVFBcrLy7Fr1668LLJ6FppMJmzZsgUA8NprryGbzeKoo446aBTTX5vs2rUL6XRamM5LSkpkTxB6CkDsNK41nk2s8Wagqby8XPQb1w0z/sPDw7I2aTcCELQLe9yT7dtsNottwO+gU0Nd4ff7MTg4iJaWFoENJ5NJCULPzc2hsbERu3btknODaDyz2SxEY3SCSU6pEikxiKiuQxUhpjrD/Juu6/D5fBgdHc3rhUq0A1FjRPWp5EqPPvooDj30UHi9XoH6HkzSgeM/0M8dbJJDDaqrEH6uI1XPH4we+CBk0T6of5ZB/I31QaVzOl/dhrpRikmxRVos89PT04ORkREcc8wxAm3igU+GtFAohGw217eTBiLrZcLhMOx2uyxGNTrEQ5EHnXpAMJLODEo4HIbFYkFjYyN6enqEzIDRUFUZmEwmUVrsz6bCQmw2m5CAqCQd7KfH6B3HSuX1wx/+UKLKO3bswJ49e2AwGDA5OYkXXnhB+ibG43GUlJSgs7NT+gGmUim0tbVhz549uOOOO/Daa6+J48nxEcrF8fBQVeF4hYbYUuVzn/tc3ucWU1LzHcaLKRLVGFMVD39ozDLLohoI/E7CuVUCG0ZROzo6JDPAde90OoU4hb0ESXRTXV0tc8aMQzKZRCqVQm1tLSorK8VBqKurwzHHHIPjjjsObW1t+M///E+BLaqy0Ny98soreZFTAAJZVIMghfPDcfT29orxyMwVnXfer9lsxpYtW2SdsjE44eyxWAwXX3wxrr76auzatQvBYFD2LA8KAGL41tbWwuVyCXzLZDJJa4G7774bwWAQ5eXlAuv/zW9+gxtuuKHo/R+IaJqG66+/XuYlHo+jp6cHPp8PNTU1eZn77du347rrrsPu3bsxOjqKSCSCnp4eYWhkFDwWi6GxsRHpdBqJRAI+nw8mkwmTk5PCvgtADG86nHRUGICjPnA4HOK0MLuq1u+o+kzTNAmS8FmxtYLD4UA4HJYMIg00AGKA8dmozm02m4XX64XRaJRaUSBn2HR0dAhJDQNZbPNA2BwdYjVrSWeV41azckSk8DXeL+/T4XCgtbUViUQCzc3NCIVC4gRms1nJvvIeaKzyp7S0VLK7JApRUTFqT0XOjaZp4oyyLphGsZrJ4RzRcA0Gg1KzyftUnSo1G6tmYLmvGMzSNE1gwNTPiURCevH29PTMSyKmzl2hMBvEuYnH45iZmRFip3Q6jYqKCoGnBwIBbNu2TQIozFDxvdSpZB9lQIVrmMEa3jNrChmoIPqIotbOcz2rGRhC3tX9QAf1zDPPlOtwnovNw8EYzQeSlQWKdy3g39566639guCFmR6DwYA33ngDuq4LAd3fqmharhabSCQmIlTeD76voqJCznSr1YpgMAgA4kQ6HA4pL9C0XO9xBkU0TZMzhk6ZwZBjc3c4HMhmsxKo5drjPuX/Sa7I7yQPA/cpYdvsMUznmc94enoa7e3tQnzk8XikbnZqakp6pPN+VfRgYWAVyA+G8PXCMi32PydqkHu1tLRUUD6sw2WQVS0Tuu+++2AymfD0009jeHh4UVtQDc6ory22BgqDfYs5j8U+A+zfapHPnv5DsZJCXddx0003Lfh9i8l73/+B9UH9UA5A6JxSChcPN0QxKYzsLCSMyra0tMBgMAhsl8YOMwBlZWVwu93o7OwUNkcy7xbb1OXl5aiqqhKloWacKioq4HA48ghTWOPU/F5zchWmy43Pjch/mdWora2VLAjHwoOYsCpCRwYGBvJ6snJcJPu4/vrr8a1vfQuxWAwbN27EJZdcgtLSUnR1dcFgyNVYqcxxzz33HObm5tDf34/Ozk68+eabGBgYkL5ZACRjrB78VCCEWqntFQ5W5oNXzCeqM0VZSFFxftXrF4tOsyaQr6nrQjUQmKnp6+sT54MZQToLzJYSBs41xIOM0TlmbdkHbXp6Wuazo6MDuq5LO5bnnnsO6XQae/fuxRe+8IUDMpxMJhOOO+44OXSZgWMNNOeIrzMIwro3TdPQ1NQkMCJC7ghV52dZd8NaSiBXazoyMoJQKCT1gE1NTVKrm81msXr1ahx11FEoLy+Hy+XCunXrsHHjRtTX1wtEi4djNpvFeeedh2g0CrfbLayLmUwGp5xyypLnZDFR1yJJxGpra/dbow0NDTj55JPlUGZd8fj4uNTlGQwG1NXVieNDo4dOKQ0kFarPQ5NZfT5vZr1jsVgeqQTho8wccCzMtPF61FGEaJLlketejY7TqGIgQoVmOhwOTE1NIRgM5rVHASCf4zxSp1AH2+120SG8V2agY7EYLBaLZN1oTCWTSTidTlRWVqKyslLmUDUKadzRWYzH4zAYDMIc7HQ6JWvrdrvlGdLRZGAunU7L3uW88f3UJcwgMqjE91FvM/vHbC+NnlAohJmZGSFOUoN8NNiy2axkfsn2DiCvNcvc3ByWL1+eF0SNxWLo6uoqGhhW73UhZlo66L29vbJeGGRSzzUGWFlnp2aM6VBzXVF/cl7U+1T1Jq/BZ6DW6/Ke1aw19xbXWUlJCSorK1FeXi56jb15Vegj73Ohc+Nggq1L+QznXs3KFbvGRz7yEaxcuXI/R1nN+tIxdTgceOONNw54vH9tQnZelQeD5xl1ATsxALm5HhkZEdspFAoJHLi6uho1NTUAcm3a+DyocxmInZ2dRX19vSCruOYZyFU5TjwejwQwWbrDunzucTUgEg6HZX3SBjAYDBIIor0G5J77zp070dfXJ7qPr+u6LnWphYFJ1fakqHYv9yX3j/p9DFLxGur/ef9872OPPYaZmRls2LBhyba8mrU8mOxp4WeWkukslqDg/xfyTQ4Wfvx+5EMH9QOUCy64QB6guvhVKYzkqMKDrPA19V9g38YbHBxEa2sr5ubmBBZWXV0t5AH8LhWqZbfbceihh6KkpARtbW2CLadjSkOCBosawc9kMlKoztqtVColpCkDAwNwOp3weDwYGhrKO4hUJkxmTcLhMBobG9Ha2orS0lJxmmOxGMLhMBKJRF5dFdubpFKpPFbgTCaDeDyOTCaDf/mXf8GNN96IWCyGk046SRzebDaLyspKuN1urF69Gh/72MekhqWmpkYyu8ceeyw+85nPoLa2Ni+CzudCB4fGJw1qtdfigcqjjz6a92zVTC2l2JpZirIozMwViup0qgRVKnSSz5ltH2g4qr3YRkdHUVZWJmP3er0wGAzSZqS6uloikQCEgIG9eB0Oh0AVGxsbxUkkuQ3bJdx1112w2+3YtWsXvvOd78jBzHtdaA6y2SxOPvlkcTxUhctDigcF1z9rWsbHx2E0GrFq1SoxumdmZjAzMyPZS0KFVUOe1yLMNxwO46WXXsLZZ5+Nc845B1u2bMEZZ5whDtC6detw9NFHo7m5GR6PB06nU7J8dKYtFguampqg6zq6u7vh8XiELCWRSODmm29edF0sVcLhMLZt24ZDDz00LzjFee3r68Pg4CCOPPJIOdiz2Sx27tyJ8vLyPFgmUQYmkwmVlZUSQWcfPdUgp/NGY97v94vDye9mEI598GiEUX8xI0iYNZ8zYZEk/WCQhMEu9dkxiEHHwWazwel0CpyVhHOxWAwDAwPo7e1FZ2enOGp0VNlcvqWlBaWlpWIIcVwq6sBisWBiYgJ+vx9+vx/APifXbDbL95Kh0uFwwO12C9xvzZo18h6ub7W9F51VteRDraFlj07V+CLkktl86o3Cmln1OzKZjBAeUYaHh2EwGFBVVZXneLDfMM8rlpXw+jQeafQ2v9dPl+8ZHh7GO++8g/7+fhiN+3p8q6IG5eYL8gE5uPzg4CD6+vrygpBWq1WCQYRkBwKBPGZjGvjMQszNzUnLKjraXGMlJSWIRCJyznAtUS/G43FMT08jEomIblH1Ms9St9uN6upqVFVVyXxzjtW+jxyL6tAu5FAWZl4Wk2IGczFZyjX5HofDgY985CPiKAGQf1VZs2YNNm3atOSx/rWKWkqkOkssg6mqqpJzIhKJYGxsTLLnhNSaTCY0NDTA7XajtLRUeAvUjDvtKp/PJ/vMaDRidHRUmPdVZuqqqippX6NpOY6R8fFxQQdRNxiNRvh8PoyMjEDXddTX14t+VtEaNTU18Hq9SCaTyGazaG9vx549e0RX8ZxmwNjlcuFjH/sYzjjjDLS0tOTxvBSznVX9x9+j0SisVqvMEfUW504NhqqBJz6LrVu3IhAIoKGhAc3NzQfscC5F1ARDoZNceK/FXit00tVAMM87YB9ZmerD/LmdU+BDB/UDk89//vMA8lnv1H9VWSjbtpATwk0xNzeH8fFxHHPMMUI0NDMzI9Eju90Or9eLubk5MZ4Z0Z2cnERlZaX0lWpsbBTDp7AtCA8cOiOMsDND6vf7YbPZpO6JRgrhxmoNGbDPgTAajQJzCgaDQtff0tICq9UqdTnhcBiTk5OS3WN/LEaUC41SkkRkMhncdtttuP3229HU1AS32w23243169cjmUzi4YcfFqNh9erVWLNmDSKRCLxerxB1HHrooXl05qpi4nPmwWAymdD8Xk/LgxW1Nkg1INRnoCoUNeqlRpQLhQdY4ZpTnTrCFWlAqcqK64EMtGynwugqAHR2dsLv9yMajSKVSqGiokIyIIS3aZom9PRNTU1yP6lUCpWVlZKlLCsrg9Vqxfj4uAROenp6kM3matFmZ2dx6623wmAwoKurC3fccYc4GUuR2dlZIVbinHN9MlvOH/UQpLNNgiA6LqlUCqFQSODzHCcj3OxhybZJ8Xgck5OT+PWvfy1QzmQyiZaWFjidTjHcLRYLKioqJBvJsVitVhxzzDGIxWIYGxuDxWLBSy+9JEaJ3+/HaaeddiBLb17Zu3cvxsbGsHnz5ryoN9fN22+/jYmJCWzatAmRSAQ7duzA2NgYenp6sG7dOtmXzGImEglUVFTA5XJB03KsuAMDA8IWSx01NzcnNX7MMHM9mkwm2O12uN1uWWeq48HME3UBM7jqQa4GbOx2uzg+QL7e5r5jhtxqtaKiogJutxs1NTXyOx1FIjpIMEdHkJA41nF6PB5UVFSgvr5edLCK1GBGjq8lk0kkk0kh8GKQjs9c0zRp8cRAotlslrpSlemWe5z3qdZRk7xMNRSnp6cF1UB9yEwiv49OE58hkHOG2L6Be2d8fFzYQWlA8llpmibONx1Awmr5fQx0tba2igEVj8fxzjvvSF0vs+oul0ueKWUpjlEqlUI0GkUkEsGnP/1pqdXj/mMwtry8HCUlJWhoaEAoFEJtbS02bNgg557T6cw7r5kBp9PK9Um4O51Tnm+apomxTHihmgEvKytDRUUFKioqYLPZZI4cDgcaGxsRj8clKK3qOfWsUPdNMVEDQksRNTi0kCzlPYWyYcMGHH744XnQbnX8/P2kk046oOv+NQodIyYQXC6XcBSYTCZMTEzIeco1SF1RW1srHBG6rksvUzqvRE3QrqJO7e7uRnd3twSPaBe43W40NDTI53h2q+UAPGubm5ulfpVjnZ2dFcZ23ss//uM/4pvf/CY8Hg+qqqrg9/vlXgsdzLKyMpx44ok47bTT4Ha7kclksHnzZixfvlyCbjw31Eylai+oLQ6pY5hRJkSf9wxgP64RNfv4ox/9CHNzc/jUpz6Vh1wrFDUxcLBrQL1OsesXS5AVey/nqDBodzAJkQ9aPnRQPwC54IIL5oWqFFuAiy3a+Q4MbixGczOZDNxut9S5qSQJ4XAYNTU1EjkfGhoSg4+NxzOZDFasWCGQs0LYCI0FGhKBQADZbFagZNlsjuo+Fouhu7tbaL/VXnEqxKLwh2QdsVgMk5OTiMViaG1tRUtLi7TIUR2bSCSCZDIph7jqrPL/hEEx8kwWY5JbvPDCC3j55ZfxxBNP4L777sMTTzyB5557DmNjYwiFQnjttdckWv+Zz3xGshJU9HS4aeCR7ZWtLQ5WHnrooaJZUs6z6sByLPw7Xy8mhTUZQL7y4udZy8tnpUIPKeylyJYvVqsVk5OTYlAzC0VYLA10EiUxMkojKZ1OCystgxtutxtdXV3Cdjo6Oip9ey0Wi7Bm3nHHHdB1He+88w6+973vLajoi83pscceK/fHdgxqxp+fo3PKA35ychKZTAbLly8X0hzWkBHau2vXLnR2dgorLOvVSkpK8jJuZF9ltpj1aHRMaKTyNb7e2dmJF154Adu3b0dfXx+CwSAef/xxbN26FX19fTAajbjlllsWW3KLyuTkJDZu3LjfmmHm1GAw4PDDDxfDdGBgAH6/XwJUJG6yWCyIxWJ59YZ8JioagvuLhDChUCgvks09TmOKZQI1NTUS2AAg8EZm/Qk15Z7lZ8vLy4WYjc+Yhr6u62LEARDDTIUgW61WuN1uNDc3Y+3atSgtLYXb7ZbsI/Uo2dFDoRB6enrQ39+PkZERccBmZ2eFIKaiogI+nw8Gg0H0G50/Glmci4mJCUSjUcTjcVkvlZWVQsSj6kSSUamBF2Zl+ZrqvLI9D4OV7KHIZ02oMbCvJRiz0xwj56u/vx9zc3Pwer1yxrCFVzabFX4BOqqhUAipVEqywwy6EllBQ3JiYgIDAwOw2WyYnp6WdlScJ9X5LhQ1i6pp+1oOTUxMoK+vT4IyJSUlYjwDEPQM9QCNfuo9GuocI19XDeBMJoNIJCKZV2ba+VxVPaRpmtTgVVVVweFwCNyY88bPqsFaOu98ZoWBF953YeCp2DwtJoXn+mKfUe2fAymN0XUdGzZswKZNm2Sv897U4O3fsjD4xCCm2td2ampKUEgMGnFPsq8skAtKcb/E43HE43Fh4i4pKZEgyOzsLLq7u9Hf3y97hJ+32+1YuXKllBokk0kEg0GMjIzkZewBoKmpCQ0NDRLwGBkZwcjICAYGBuSeSCC2YsUKNDQ05BFhqs4Tg50ejwebN2/Gpz/96bw2N0Buj27cuFFQe2qWtJgdSjGbzQiHwxIoVwNc3FOF2WtVDIZcLe6TTz6JmZkZXHXVVXnZ/sIAr7pXlyLqWNWSraWWBaqijqUwgVRMFkNb/KnkYNvMfCjvCVvJFKbRC6OUqqgHQ+FDX0jJ8lrBYBBjY2Noa2tDNBrNy5RSsbBhPSEhzMxoWo4sxOl0YnBwEM3NzdIOYGJiAiUlJXlQNxo9QP5hMjAwAKvVCo/Hg7GxMYHrVVdX5x2yhZEcKg1gn7PF9xLuV1FRgebmZkxNTUnWQNd1oSNnrSTrgWgcsWaWEW+TKdcI3WazSa1SbW0tvF6vUIt7vV4888wzmJ2dxZtvvomqqiq8/fbb0PUcS+1RRx2FZ555Zr+6qxUrVkj2hXVRH4QUg6VROczH4FsoNJA45sLaATV6V1paiq1bt+L444/PIwtSHWC1Bqyvrw/Nzc3iUE5MTMButyOTyUgGhgamzWZDXV2dBCH4fSMjI5iensaKFSuQyWSwd+9eGI25/qF+vx9utxt1dXXYvXu3sPN5PJ68vmaZTAbf+973cNNNN6G9vR0XXHABHnroof3gX8WUNx3+j33sY3jhhRdQVlYmGeHCyKY6bzzgWCPY0tKC4eFhyZzyXwZ1mGVhsIeEC2qNIBuqq4eo2n+NY+JaB4BAICDZZJvNBq/XK/VyfX19mJ6eloza+5Gjjz56v6iqwWDAjh074HK5sH79etkPJpMJ9fX1AsOPRqNoa2sTgiw6GsFgEPX19dLiZdOmTdi2bZvsY7Zvicfjcs9qFkfXdWGupXND2C33IHUBHU1mqhmx13VddIXKiktDl06WzWaT7yasnQEJOhVGY47pOZvNEdExgMJ9w8wjoWms1TQajVLCwDGr5EBquyAGxFSoJw0yo9GIZcuWyXnCdUzWaWbuqA9p2JOwiHX5arsIkvGw/EPNJLO3Ke/R5XJJeyQV4sssKJ8DAwbUzVzrbrdbMotkamaWnPfINmV0RrLZrPQOVh1DGqoWiwXLli2D0WjEzp07i65t1Ynk+TQyMoLZ2Vkce+yxACDBTlXfMtBhNFeizGYAACAASURBVBrR09Mje2BqagpmsxmHHnootm3bJkE7ZrG5L+ggqsyoDJjwO+kAqLqHLZn4/Jjl4rVI6MJgtNPpFIQTHVdepzAIN58s1dlbzMktnPeD+Q5+D9+/YcMGJJNJdHZ25rFc67qOk08+GVu3bl3ydf+ahPuObLaEs0ciEQCQQBb1ptVqFTJLQmkjkYiUUtHBra2tzSstACC1pezXy88vW7YsDzFBR5POUjabldaG1EmJREICjrSXVIdxeHgYlZWV6O3txR/+8AeccMIJWLVqFb72ta/hnnvuwfj4uDCmr1u3DvX19RL8K7b25uZynCnkxuBZQt2j1pUC+xxGk8kk+8zj8UhJGxEdJI9Sg6ZqmzlCfY8++mjU1NRg3bp1wv9CvU2H+0DWvupEFtqAxfbffDaiat8U+hrqNQqvOV+m9k8tHzqoBymE9KqLnKIa98U2z3xRQxUqQAhToRAyuGXLFkSjUTmY0uk0rFarQHtUCvCRkRF4vV7JCkxNTSEcDqOlpQVjY2NoaWlBNBpFY2Oj1CwYDLkaBrvdjmQyKf3+SGxhMBjkb8FgUGAmZM/l4clDmJ9RM35UCFz83LhUtoTc9vf3I51OSzsdZupolAD5/ZrUSBoPck3LwXEffPBBtLa2iqM+NDSEyy+/HD/72c8wPDwMk8mEpqYmvPjii9IUnoZqJpPBunXrpDaRDHkq3PX9yMMPP4zzzjtvv+AGlar6OtddsbWkOqfzXUfTckx/bPXCgASNdxXmw+xGKpVCU1OTOC3vvvuuOKoej0fWF9fl3NwcRkdHJVPe1NQkBjPf39fXB6vVitbWVrk/9lilMUaGaCDnnMViMdTU1CAcDuOWW27BTTfdhHQ6je985zu4+eabxUnlXlroIPj4xz+OV155BTabTZqI06lR67coNOKJKKirq0M8Hoff78/LxGSzWezduxfHHHOMENWwnyEh9awNV2uOaVwAyDM01Oem1qNlMhkMDg5Ktqaurg7d3d15jJ8HK6pzyrX2+uuvY/Xq1eJwapqGrq4uNDQ0iCNElAWDZoyyv/zyy1i3bh26u7vR2toqMFbCQc1mM3p7e6W9i0pwxAwh753rf3Y216KI/ZZVZkuuMV3P9bqjflDrXBnQU4M4dEqYeQUgmTHCwaijDAaDZBiotx0OhwQrOFbe79DQkLyXgQnWtqpEOSQOUftrEl5K54qkW6wJLS8vRzKZzAtmAJCew2Rsp36mc2a32/NYQJlZpZOo67q0eSGJGssC2KpidnYW0Wg0L4jGNcsgAFu20DFlXWEymRSn1uFwyBri/RKtMTc3h6GhIclskHGcMDyTyZTnrC/kNHFN0Un1+/3yWbXMgXNjsVgwOjoqz8NgMKChoQF79uxBW1ub7FU68mrWkkgklZipMHBbWKvLmjw1kMLzloYue6uqzinnnIFZGsL8TDabxeDgIGpraxfUi8XQPPO9byminlcHmuXkPHGP8vmSTwMAtm/fntcz829ViPJg0IOt2ADkOT+1tbUA9jn1kUhEWMwzmYy0J7Tb7ZK8YHCfmVRmBNl2i6Ut3N/Dw8OiR4B95UI1NTVSVkCyQIPBIKUUBoNBWtMBOXuYLWXi8Ti2b9+OU089FclkEueeey7uuusutLa2Yt26dcK1wrW+kBx11FF46qmnBE6s6kSeJ6pNxfVFRFVpaamQ+NEGUgOEDJBS76vr80c/+hHuvPNOHHfccRgYGJBApFqjfiBSaPvN9/dCKXQ6VZ+k2P8X+o6lBqE+SPnQQT0IoXN6MMoWWPhB0wAplklT4V7pdBoul0vIFDwej7ChMoNA9lCSN5AYZGJiAhaLBXv27MGaNWswOTkJh8MhDsjQ0JAwogWDQSGpIBxpYmICRqMRTU1N8P//7H15eFvllf57ryRbtixZlvd9yb4HAgSylKXQACllCdCWAmVt6bSF39NSdpICZQ0UCi0t0FK6AWWg0JkOpWxhKCE0hKxO4iWO432VbFmSbdmS7u8P9T35dCM5NkunneE8D4+DLd3lW853znvec05Pj9yTuWWMlJWUlKC9vT0hDwFIbL7N/+emVTdLd3c3cnNzpWdWZ2enGHGRSLxXZTgclmqxvA4NfbOhcvrpp0vOBXN3LRYLXn75ZbhcLlx11VVwOBxYuXIl9uzZg0gkgtNOOw19fX3Izc2VwiSM6rEfluqEf1QxR+LVd1Lfx6xcVDHTes1UFsMwcPzxx6OhoQGRSATl5eVIT08XCg8deipdrsmhoSHk5eXBMAzs2LEDIyMjgtDrui45gXl5edLWh/ktRFNbWlpgsVhQWFiI5ubmhIhBbW0t8vPzoetxCimjL1/4whfkEJw7dy5qa2vR1dUlc3LXXXfh2muvxYEDB3DDDTfg7rvvTjm+yShoK1aswPDwMHbs2CHGqNrYnPtczeXiQd/f34+CggLk5OSgsbERACQaZBjxqokOh0PyBp1OpxRGMYx4Dg/3o4pA81npbNAApZHHarAs1sJoF1sIEBhIVlBksqIe4IODg9i9e7fkENJxYYSKdO5gMCjsB7/fL/nGe/fuFVooHUuCSStWrEB9fb3k4tGI4T5j1Ed1ztS1qes6ioqKJHLPZ+e6Z7RBZSKoIBYdLTqgTC2g88L7qFE8RmzVnFGuUepvw4i3RGEEkA4Un1+l/tKg45gQ1FGBPlUf0IEjbY8526TD04D1er2H5LYywkFqnqZpcDgcUsjH7XZLpV8CH9SVfHbOHx1pTdOkpRT3BveCut90XZc1QMYM35UtLBhFV+nCLKQFICFPmftTzTmj/mMUOZXwubxeLxoaGoRaSPCL+zcvLw+jo6MoKytDX1+fjCeBYBrp7OmozglFBTCpX9RznntifHxcQCs6XNTF6jlMPcW/0zlhTi/1Mo1o5lGzBdzhjE11jyYT9QziWKYC1dWx/jDOKe9jfhbVVjjiiCMQDAZRV1eHaDSKVatW4S9/+cuU7vWvIJy7wcFBmXfqCMMwJF2LY8bcSX4WgOg4Vs2lnqATRWBMrWbPlllutxstLS2yPghg5+TkoKKiQp6L9hodUzp8qo0IAF1dXdK6TNd1ASzJ/hsdHcVZZ50l11TB3FSinls1NTVob2+XAo/8vUqVBg468txfQ0NDyM/PlxxYBigIngKQ2hKqzclrDQ0N4YorrsDjjz+OK664Ag8++OCU8rlV4Zmv2oCHE/P+pFB3qeCp+XuHkx/84Ae45ZZbpvYSH1I+7YM6RfnSl76U4CikQhuSKWJ1gaVS1BMpcMMwxCBmBIYGF1FtUrU0LV75sq2tDSUlJULV6O7uRjgclpxA5jEUFRUhEAigvLwc0WgUvb29ophCoRD6+/uluAX7ZxUWFkoCe2FhIXbs2CG5rOeee64YC88//7wg4IzOcTzMCI+6yWlI7dq1CwsXLhSDu7W1VQxCHqLsOcfIplrog6j9scceK4YgaW1ULHa7HWlpaQiFQli+fDnWrl2L0dFRTJ8+XRS22qyZxoNqOP7ud7/7WNDbCy64ICHv1DxWVC6cZxWVU52Z/Px8XHfddZL3BRx0JDo7O7Fx40Zomoauri709vYmUHxpiBI9Jeo+NjaG2tpaQRk5Vyx+0NXVlVBohXl2RPINw8CSJUuwf/9+KZk/f/58bNu2DbquY/bs2fjggw/gcDgkv7empgbAwYiiruuora0Vo5Ztk6677jpomobe3l5s2LAh5fia95ga6SEST1qiiiqaDTIaB4ZhSD/Mtra2hNxtp9OJc845B6FQSGhWakVkUssXLFiQUEFbjV7SyKeRxjVMR4lzTpbD+++/j6amJlit1g8dSdU0DTfccAOi0XiF1J6eHhxzzDHyXOFwGNu3bxf2BgDJGyblrLKyUioXk4UxNjaG5cuXw+v1oqCgQOZifHwcL730kuxVtcUAo6IAZFxVFJ05+KozxPVOJ5NG+vj4uBhMjNjxfRn94vf4e4IrAIRNweiDpmloa2uTd6Ce5RxHIhEpcqVpGsrLy2W++B7Mq2XFa5XNQEedTqvqqPMcoBM+NDQEh8OB9vZ2AQt6e3sTKoxTX5jPLEaZgYM5unR6SMfjeUA9q9Y8YOsdXofrk89KsIngVGtrqxi3LObDe/EeXNOcV44NwSP2S+Za4Rrs6elBOBzG8PAwGhoakvZBJUDQ0dGBhoYGnHTSScjPzxcDm5F/9t6NRqPS09Xn88m6zcrKgt/vh8fjEeN1ZGQEe/bsEaAXQEJhKa7PSCQiFGlWyA6HwxIlItWQLApd1zEwMCD7gnTm4eHhBJaPWsSFOrK5uRmZmZmw2+245JJLpE1IMpkM/dcMmE5GzFGryYiZPZTsGcwSCASwd+9evPzyy5O+z7+CaJomABfZcVybamBCZfFwnVNPxGLxOg9ZWVkAINRV1kqgrmLhvubmZum9TDCZ5yILeM2ePVt0Wnd3t9SN0HVd2EJqEadwOCyFlDIyMjB37lyxX/n8CxcuxJo1a+T6N9544yHpJhMJn3F8fByvvvqq/L9qU1G/mq9L3WW1WuHxeNDZ2Sn2Ms9wdiSgTlPZNSrAuG7dOpSVlWHTpk3YsWNHSvbkRO/xYQNhFDXCrjqlHyUi+lEc1L/f99M+qB+3qLReYGIqCQ9nVVKhiOrn1INdFd6TqDEdAB5spEoFg0EAkFwoormGEa/YNjQ0JEZrRkaGGEhAvPVHc3MzAEhlXkZmioqKhObJaCmNZtKyiLapRSWi0ShOPfVUUaqMzqlIEDe4mvjNv3m9XjE6Ojs7YbPZMG3aNDnweVCz2qRawZOHJ6OumqaJU0DDl046IxcOhwMbN25EVVWVGAJ0fjl3RPp4f5W69XGI6qib6RjmCAsN6xkzZuCHP/whfvzjH+Puu+/Gww8/jLVr10LXdbz++ut44YUXpH9YWloaZs2ahXPPPVcOE0b/+E7AwVxNovAqrYZKkxEvwzAwODgo+bkcP5axp/Mbi8XQ09MjeWMLFy6U/FRSjRg5KysrE+eUa4l7g1G8AwcOCFWZVMdly5YJaGCel2QoPKOjBFHUAhzmeVFBKRYuIhgUicT7xTHfEIjn8bzzzjtiRHMsiYJz77FoGZ+R60ylrPMZ2NOQTjUjJ4yOEMC58MIL0dTU9KHWICUWi6Gjo0Ny7YD4HmATdQI10Wi8cA4dM+bAdnd3IxgMorOzU6iI7777LqLRKLxer9yHVYupB5inREol1yWNHOYyFRYWimGj5p2q+1J17Ahs0Yhj3iPpaSw+w6JYAMR4oy5hASj2ai4sLJS1SrCI65SFsqg3Ozs70dnZmVAxm/rb6XQiJydH2tBwz4dCIYlIq61KuAbU3HzVaGULMKYicO54xnB+1X3A91Vb5PBzKk2N+5+RULWSNs8m/p3jwfYpDQ0NGBsbExCKe47vRf02MjIiQMPQ0FBCDiYA6aOtGnFcZ6RYJxPuG+bJnnbaaZJvBsT3YkFBgTinwWBQisfYbDbk5+fLmNGQVSM+vAfpmAQYOM/qmuJ1OK68JgEY4CDwoeYdqyCWekaoObXcl2qEZ3x8HOeccw4uvPDCQwxzzt9kDGIVuJus8PkmI2Y2Vaq5TBYBcjqdOOqoo/5XVvRVAUIVwOPZRVCMYA6p73QAqWOoAwmqxGIxiag6HA7MmjVLdFBubq6sKRZOI01/1qxZsr7a29vR1tYGwzCkPgdrJFCfjoyMoK6uTqqQq8LAh6ZpkvpDhsLo6OhhWWqqDmPE0W63J+jkZJ81M8z4O+63goICsW1VFiOZE9ynZjZELBbDunXrEI1GsWjRokOq/6Z6ByCxCJL5/J+qcCySAZOq/DMGCT+NoE5Szj///KRKe6JNo1JX1fejwkj2eTWUr95P1+NFjHp7e3HCCSegv78fubm50vqCmzkzM1NK5be2tqK4uFg2J9EwGs8WS7wcPg3FqqoqiRa43W5pi8CS4YZhoL6+HhZLvB/k/v37xUhsbm6WCofnnXfeIe+oaRr27t0rffB4sPPdzHmTACTHgs4yr2OxWKSwU1NT0yEHHyMcNptNcpQ0TcPcuXMTHEwaASyywEgNq8Vu2bIFCxcuFMeCRgKdLSpqRhF++ctffmyb/OKLLxZlQgppaWkpTjjhBGkrRGfuxRdfxNDQEAoLC5GbmwuHw4H/+I//wKWXXor29nbcc889WLBgAdLS0nDjjTeKU/PBBx9g8+bN8Hg84kTRGVCppKSAx2Ix7NmzB16vV4z+WCwmxQu6urqk/cHg4CD8fj+Ki4sFpWXxG0a18vPzMWfOHOzcuRM2mw3Tp0/Hjh07JBKnGhjJnM3a2lpYLBZ0dHRIi4fvf//7CIfDmDVrFtauXSvOcyrFnGz/bty4MeFQS4U40hikoxGJRJCXlwer1Yp9+/YBgBTFueCCC2TMuOYYWWTUy263S443nRN1bXOP0KEngs15SEtLwyWXXIKCggIMDw/j6quvRiQSwWuvvTbl9adpGi688EK0tLRg+fLl8gyGYWDz5s0S6eK+yMvLk3YomhbPb2YEjxEe5kvZ7XbMmTMHhmEIkk+n8rXXXpPoGX+q7ADgYE5lRkYGZs6ciZycHHEkgsGgROPpFJExQTaI1WrF5ZdfjieeeCLBeaUxQ4BNjdQahiEUSxZmIjBAI4bGVW1trTggrAWQmZkp70IKLp+REUu1yby69mKxGLxeb8LfKioq4HQ6ZRwYqaWBaBgG+vr6BHwDDubRstInDTDVIfH5fKLH09LSJMeKAKNq8LG4EcECRkRpTPJcUXUz55DRBjWXmc/Kip6apsHv98s6IIOCPbhpcFksFsnzJkjGNdzY2JgQQeV7s5ZDVlaWgCzUZT6fT/Zubm6uMEAASI0HRru5Nhn1dTqd6Ovrw5YtWwQ0YC9vOojc07wfQT4gbqRbrVa4XC4MDw8jIyNDDFuOC9NZQqEQHA5HguPO6scEijh2ZBowSr1+/XqUl5fj6aefTrr3uR6Sncv8zFScUzXyP1lRWULJrkfhZ1QHgWt67dq1k77fP7toWrxgZWZmpjAWGKEEIIEHtTibpmkCFPMawWAQ4XAYTqdT0sJisRgKCwuFUZKeno59+/ZhdHRUgKnBwUHRXcccc4wwGfx+vxTpI2BktVql2wP3bV1dXUJPaeBgUbAZM2ZA13XpSsHUHZ4lsVgMjzzySMK5SKH9lYzxpGlxuu6mTZtkj/L+BNyTrS/uz7S0NLjdbvT19UlBUv6e4BPHn+xAgqkch1tvvRU5OTkYHh7Giy++OOko6oeJmvK91aJ/h7uOeq6Yg0eqcP/GYrF/WAT1Uwd1EpLKOVXFrHhTUXzV/J1koi4M87+7urpQVVUl0SJGAaxWq/QYDIfDQtno6upCfn4+YrEY9u/fj6GhoYRFS74/DbDMzEwMDQ3hqKOOgt/vR3V1NbxeL7KzsxOKJ9XX18NutyMYDEpz6MbGRlEwq1evTqAJqZskFoth586dki9G40yNTlHRRCIRdHV1oaSkJGEsaCzk5uaKodza2ip/U+mPrPZLhH727NmiPGj0ZGRkSDEAjqeux4ufUBGpjoB62NJYjkajePLJJz82B7W/vz8hklpXV4dp06ahv79f2im8/fbbeO6551BRUSEtYDIyMnDddddh3759eOKJJ8Sx5LNfeumlmDlzJiwWC1pbW/Hiiy+itLQUvb29aG9vT4jga9rBojR0Rnbs2CG5bHSQ7Xa75J5VV1dLX9C0tDTU1NSgqakJuq4LkDE+Pi45mjT4Z86ciYaGBonAn3XWWQmGbbL9ouvxirotLS0wDEPai9x0002IRqOYMWMGbrvttpR7N5nTS9m7d29CYR0VaFKdVuCgM8noltvtRkZGhlT4JYXzyiuvlPVBmj0dj/7+foRCIekfSkeWa433pVNEinBJSQm+9KUviYGanp6O++67D9u2bQNw0CH47//+7ymtP03T8JnPfAbz58+XKpAjIyPYv3+/RLb43LNmzRLKIJkNzF0nPYwRyGOPPRa9vb1obW3FscceKwYSD9TW1lb09fVJRJTzxHXJNalpGkpLSzFjxoyEZ+Y40RncvHmzOM7MW+d+V40TrgU110/XdemLTIOPEU2OK5+bVGtGGtXPLFq0CJmZmdLWgVXJeW+OJb/P9irm9cZnGhsbQ1VVlVyDVclZIVvT4j1me3t7YbHE2+mo7Uioy/i+o6Oj0l5hcHBQnoPfsVgskrrBvaS2oOFz8O+M5ABxh0uNHPBdaczRaQYAt9uNYDAIi8WCwcFBcXz57qTVUT/T6SKgyPWujtv+/fsPcVCBgz1f3W63OIqsrTA8PIySkhKh+8Zi8fxd5qJyzzc1NSEzMxMDAwNSoIq5fYZhYMuWLQgGgwLo8WxRo5/MZeb+4E+Px4NQKCTMFUbYqUuYP8yKynRYCHhxvZPNQYdWLVSTl5eHn/zkJxgfH8eLL74oemYiI1Udx6k4qNxjkzGYKarjMRnhM/E/6tpbb711Ss/5zyrc69SLXBsEzHlm8LPcw3Rih4eHpXVWJBIRWnBaWhrKysoAQFK4mOJFcDoaPVi8kmtt3rx5OHDgQAKbwmKxoKioSJzo8fFxCWSojqkKCB5//PFobm6W56+oqEB3dzecTieuv/566LqOBQsW4MYbb0zqoFJSrZNYLIZXX31V0muS5YJyvavOI4G5/Px8GIaBtrY2RCLxivwEFPkOZC9xDLnnuN8eeughRCIRPP3005KGl8pWTAWIpxL183yHyTqlyUR18lVgWL3mTTfdNKlnm+DenzqoH4ecf/75AA4WvjBLKsN5okWSKoI60X2sVmtCNV4e7szT4XNYLBZ0d3ejtbUVFRUVQjXYv39/Qj4S0XEakDk5OQAgzcKPOOIIocCxAmVvb6/cw+fzob29HQ6HQ4yJtLQ0nHfeeQn9o9TDQt0UpJ5arVapnkqZ6HDkIUeHiQqRv2PJc743lYTb7Raj1+l0Si6Yw+GQ+SBVmI6hz+dDW1sbKisrE6LOsVhM0Dg1svTEE098rDSJn/70p3jqqafwrW99C6+88gruuOMOiTjddNNN8v5f+cpXMHv2bASDQbz//vv485//DMMwpNiIYRi46qqr8P7772PNmjVysGVmZqKvrw/PPPMMvF4vOjs7kZubK9QzIL5OAoEAIpEItm/fjoGBARkLUoaYc8Y8VK/XC7vdjuLiYmkaXlxcjIqKCtTW1iYU1FHbTpD++sUvfjHhoFBzXSnqPmlqasLIyIhU4rRYLFi7dq0YoI8++ughh9LhACJN01BXVwefz5dQpVmNPJmdSABC/XG5XMjMzJRK0dFoFBdeeGGCQc9+qJ2dnYhGo6irq8OKFSsSqE+kcjJaE4lEcMopp2DlypVS0fP555/Hq6++ekglVhVoeOutt6a09jRNkzWmaXH6Fg1y0lUHBgZQU1OD4eFhDA0NSS9gr9cr+kKNArO9DOe0ubkZJ554IsLhsLQQMgwDnZ2dCc4BjRuKYRgoKSmR9i6TEdLL/vrXv4oDRvrYRNdgpM1ms8HtdsuhzfUTCoUAQHpaMt+LDl8sFhMWBquq0klpaGhAOByWtAwzPY2ARFpaGrKzs4VSn5aWhoKCAnGWi4qKxEnjWDc2NkqUkWuVa5uMB+5DOkWk4DICSgeHkUaOh5oHSl2vpmjw+XldRhvV+eNeJwConhNer1ccNxX04J4nKJmRkYGurq5DIiEcN5vNhtraWlx66aVy7YGBAezatQsnnHCCOKXcxy6XSyL9jJzHYvGcPc4vaZFk3Xi9XgSDQZSVlWF8fFwKRREs3rt3r9yX0XM+C9cCQVVd1+Xd2CNZBaVIh2YF+2AwCKfTKWtbLVjF81CdH+bqq+kq+fn5ePjhh2EYBl544YVDopET7afJGMIUzs1EhZRUmcznVLtCBcSBgzYE2UL/qKIun6RoWpzZRvYY7RSVAs6cU0ZMgYP9cQkUqZXRs7OzBdxmYINAHh3bgoICjI2NSZFKgk78Xnp6OjweDzwej+ibXbt2JdRyUM9w2q6f+cxnpDPD4OCgVMJnEadYLIbbb78d4+PjqKmpwa5du/DCCy/IdQh20A5KBWZoWryFlNruBTiYhmcGTbjuqAM0TUNBQQF8Ph+8Xq8wMHRdl+rzaWlp8Hq9qKmpERCNazE9PR3p6el4+OGHMT4+jscffzwhKKOu8w8bNTXrPtWGNu8jM7hu3jP8txlQUtkU/ygH9dMqvhPI+eefnxDBUEWN+KX6WypRc7mS/S3Z76urq9HQ0CCHGlHVcDgMj8cjPfJ4XX4GAAYGBhCJRKQvFKlRauXIaDQqZcg9Hg8OHDiA8vJyuFwucTAyMzPhdrvR09OD3NxcoQezUiuLdqQaJ1VisRg++9nPora2VopOqApNHSe1uAI3CK8ZiUTQ29srLWdmzpyJrq4uiehEIhFxsCwWC1wul1ChWQSDOaZUjmNjYwgGg1KCvb+/H9XV1dJqgc+j0vQmayhPRWbNmoWZM2fizTffxNKlS+Hz+cRQdLvd+O53v4s9e/Zg586deOaZZ2S86ASlp6dj3bp1aG1txYEDB7BlyxbU1dXhrrvukop0/f39iEQi8Hg86OjokMOMBx2V+Pbt26XaLCNYDodDFDYrB7LHWl5eXkJEsaSkBPv27YPVasXMmTPR3t4uTjIAodUtWLAgYSzN9HiK+rtp06Zh165dKC4uFvrwQw89hG9+85sYHh7G2rVrcc8998h7MUcnFXWNMnv2bGzdulXWmWrs8Rl48HLseTD19/fD4XCgoKBAaJXPPPMMrrjiCgAHHU/2lSSdT80ZUp3qyy67DIWFhUIx37BhA5566ikxdM26SDXeDMPAypUr8de//nXyiw8Ho1E7duyQPDw6waOjoygvL5f8SCLwpJmxTymfYfbs2Whra0NBQQE8Ho9UgSaDQx13FuOiLlR/Wq1WFBcXC8A0lXdJT0/HySefLJGFpqYmBAIBSTtg3z/1sCdYMD4+ju7ublmzNBJzcnIkIjk2NoaOjg7RC5T9+/cnRGhn/H5RhQAAIABJREFUzZolhUMikYi0Verp6RGnDjh4PrCoCOd48eLF8kzTp0+X3El+j06tGk3i2AEQ6i3fjxV6+W6cF13XxQGKxWJilHHPApDf8fsUlZ2gMnvUtcq9GggEElIuGFFn9I/vop7Dfr9f3oN/47wCwKpVq2C321FbWyvPlJ2djVAohNNOO02q0cdi8RZZo6OjCIVCCS1h2LOYEXjmWTPiyarTM2fOlF6wgUAA/f396OzsRGVlJebOnYvGxsYESrP6zKSdq2vOYrGIo8DxstlsQncm5ZhnNquEk+nCInlqZJVOsMowIpByxRVX4Fe/+hUuvPBC/OpXv5qQhquO9VScUxU4O5yk0vnJnoPXVA1q1ahOFi37VxemTDBizr1mPv9pI6pAhwpOcN8PDw+L7hsZGUFmZiY8Hg+ys7PhdruFscd9npWVJeCy2+2Gx+MRXbZ//354vV5hVnFds+aEw+HAnDlzUFJSIs8HxPucd3Z2Ij09HS0tLZg/fz78fj8efvhhfO1rX0N9fT0WL16MZ599NoEBYmaXJBPDMFBcXIza2lqxi6lfKOb1rDJkuNfy8vKkECE/U1RUBADYt28f8vLyhGnDfaxGZTs6OpCXl4ezzz4bf/rTnxLWLx3Ww/kOZlE/q2kHu1fwuvy9+m/1b+oYqftODRpwbKZCz/+45NMIago577zzABzauzOZJCu8ov483OfN3zV/j2j3yMgIGhsbcfTRR8tnXS4XhoaGpAXK4OAg+vr6kJeXJwnzzc3N6Ovrk0R6l8slOUJEgeiwxGLxyrk0HKZPny6HKsvqZ2RkwOv14s4778SBAwdw8803IzMzE6eeemqCkUI5HMVydHQUmzZtEnodlajZIU0ljC4BcaXBPD7mp6q0VRYMoUN75JFHSlSChhejNzabDcFgENu3b8fixYuFzkeHXt34Y2NjePzxxz/WCCoAPPDAAwiHw9i3bx++8Y1voKKiApqm4Z577hEKn4pOOhwOLFy4EHPmzMHbb7+dkJNGg+jSSy/FokWLEI1G8ac//QlZWVmoq6uDzWbDyy+/jHnz5klrifHxcQSDQWzdulXWB9eIzWaD1+tFOBxGSUmJIKsZGRnweDwSnWep98HBQeTm5iI7Oxv79+9HaWkp7HY7du3aJXmEp556qrz74fK7zbJnzx4A8WJfg4ODyMrKwq233gpN0zB9+nTcc889clgkA5ZS5TwNDAygvr5ejEF1f6sOjWp00qDXNE16Du7duxcLFy7E0UcfLbmRo6Oj6OrqwsjIiPRcc7vdOPLII3HmmWfKobN+/XoMDQ3hwIED8lx0Ori2uS7Vv5tlslRfTYtX8R0cHMSOHTvk0NW0eIR8+vTpkvNZXFwMXdfR19eHWCyGvr4+cZyj0SiWLVuG7du3o7q6WqpEdnd3o7y8HH/605/whS98Ab29vXLYc2+RGk5hxD43N1fypsyO0YcR6lxWXqVxr1ZEVZ19zjvHNz09HVVVVWIwsjBULBYT50o1QNQ1yLZNpA9zDukQc8xp7KSlpWH27NnQNE2qvTLyGQ6HYRgGWltbEwq4qc63+s6q0aLuNeZa8d34viz2A0BYJKyYrhpiatE9TdOEVqjrulAHVUCHoBgjMipVjlFF3qevr++Q+SaYQ0bM5z73Obn+Aw88gMsuu0wqtLe2tgojiG156AiS6ksqI9Mb1L6yjDINDg5iaGgI1dXVAuwahiGpNRUVFfB6vZJ+0dDQIEY9n1cFMjh+ZOjEYjGJ1Ko0OwoNfoK6nAuuL16H4A+pm9TbPN/otKSlpeGJJ56AzWZDZ2cnXnvttQmjmJN1TvnsE7HGpnpNdR8CiUChWdQ1/q8eRdU0TWwzgpoEc1QdQ51A3ai2MAMggIXqrIVCIUmHslqtKC0tlf7E/f39ACD7glRZrs3Kyko0NjbKPqEzw/sRyFu5cmVCqoFZmpubEQ6HYbHEK32zBgqr83s8Hjz99NPC2Jrq2GmahjfffFMAdvN5b/43z1X+LCgogN/vlyrhZWVlyMrKkvorpaWloqM5jmqevMPhwIMPPoixsTFs2bIFu3fvnjLYM9H7qdHkVHstlYNqBoUO9zyfUnz/B4W0Xkoq+sBknFf191SsVAyqUZCK683FzcOSJeyPPvpoZGVlIRQKSZEEKhRSgWOxGPbt2yfPODg4KAaAph3s9aTmaJGyEIvFMDg4CLfbDQCorKyUzcbiLN/85jcFZausrMQNN9xwiPGQzOHmO5p/39LSgo6ODgCQg1yNBFCZAonKhEYcjQwaOy6XC4FAQAxmtYWE3W6Hw+HAvHnzUFxcLEZVVlaWVDqmwcPCGyy+pD43lcvo6Cgee+yxj91Bveiii2C1WvG1r30NlZWVMhY7d+7E73//e2RnZ+Ooo47CrFmzUFpairvuugujo6MyVkTVr7nmGrz33ntYsWKFFFpgQY3+/n48++yzCAaDEolnwahAIIDNmzfD5/OJscooAh2S3NxcGIYhUevc3FxpW5ObmwubzSbOV3FxsVSZLSgoQG1trVD9zj777IQCJ6kcRiD5frNYLNixYwd0Pd52g47U5ZdfLk7qvffem/JwMyt21agaGhrCnj17JNKpOioqcg8cpA4xV7SoqAhjY2PIzMxEe3s7Lr74YqnuahjxYjaBQADXXHMNCgoKBMX+29/+hh//+McJAAPXOu9tjhyYhc/JcZ2Kg/rlL38ZbW1toptY+KeiogKjo6MYGhpCSUkJotEoBgYGpI0M9240GsXKlSvR09ODgYEBTJ8+PQGZ9Xq9ksO0cOFCWCwWoTbqerylBiPrrGxbXV2d8J78bDJnfCqijpPNZkN/fz+Ghoawe/duydPkO5l1D/cY9YXNZpNet9nZ2SgvLwcQBzq4NzVNEwBR1fuVlZXyPqq+Y4GSefPmCT05Pz8foVAILpdLnNloNIrW1laJsPD8YASElWaZb6k62zybyITgmiLNl+wWUrxJxePYkzHByAypb6zyy+vxzFIj7NTZmhbPa+d7syCQWqxFPTsYuT777LOllZAqDzzwAM4++2zk5+cLPbGvrw8zZ85MmHdScxm9JABCsIIAEgsy+Xw+zJgxQ3LpY7GYtHDzer2oqqoS54FnDysRE0jg/Und5efMRiufkc/C+VfBDv7bZjvYO1fVDwTMmCunRqc5rxaLBQsXLsS1114Lm82Gxx9/fMIIZKqz3fyZyRjfvFYqfa/+zazn1LNiIqfjf4ODmpOTg5GRkYT5W7BggeSRMxqvstG4LlSwn+vGDDKxDzkBRwI4LPbHomKM/NNJVYEWOmY2mw0ejwcLFiyA2+2e0DbStHj+e1dXFwCgp6dH2tccddRR+NznPgdd1+H3+/GXv/xFcsGnIoZh4M0335SAj9oXVf2prjHuy7GxMWRnZye0bDKMOHWfgKrH40mIHJPurBar0nUdjzzyCMbHx7Ft2zbs2LEj6ZqeihCcU/fjRNFT83uqdOOpyId1UqfioH7aZsYkZucUSCz1bI6OJnO01O+l+n/zd1Qjxfy9np4e+Z3dbsfcuXMldykSiSAnJwe6rgu/nsVieE+/3y8UJOZNMu+JTgydO7WaIJsqDw4OYtu2bXLgud1uhEIhPProo7BYLBgYGMD+/ftxwQUXyKZMNQYTSWVlJZYtWybOoDl6qjoQ6j2IIKqKcmRkBH6/Hy6XCzNnzpScW16XNF72cqRRND4+LgYWnXiHw4Genp6E/B6+2+EAio8qv/nNb1BZWYmf//znkvel6zoqKiowPj6OwcFBvPnmm/jpT3+KtWvXwmKx4I477sD111+PRYsWiSH50EMPYevWrXjooYfwzjvviGE3OjqK999/XyIFREH5H9eUWlCLzoNqINE5dbvdEjnjvPl8PgQCAaFLBwIBOJ1O9Pb2JrRLUh1Hju1knVPV0I7FYmhsbER2dja6u7vxs5/9DECcGrV06dKUY21eq1zvQDwa5HA4hIbLv6t6QdUJKqrv9XqxdetWWCwWVFZWor6+HkB8L4fDYVxyySW4/fbbJdfn9ttvx5e//GX8+Mc/TngnPqNqYPA5Ur3Phzl4KK2trQmHXzQaRU1NjVRlLiwsBHAw4saUAd6zvLwcXV1dCAQCKC0tlcbthmHI79mOwDAM+Hy+hDFnOwRN01BTUyPFKlShAaa+42QMZ7OoOoYMgcrKSixduhSzZ89GZmamFC5Sr61SCZkewOgi0ypoSBYVFaG0tFT6v7KtjdPpFH3T3NyM5uZmcSoBiOPLaKvVahV2A1tJcJx6e3vFeFSjlDREOFaVlZUJ48kopZqzCkAMVD6Hem6odEGuU4JY/AxbtRAQJG2YxjPvpTr/vB9rBkSjUQFzgIP7lMDtOeecIyBBsjklc4j6iYAaUz3odOq6LsWQ1Mq9AwMDkoOqaRp8Ph9yc3MTqLlq+6FoNIrOzk5YLBYBEJxOJwzDSGgJwjlj1COZwczqv5x3MoBcLhfcbjdcLpfk56p1EtR9wHfnGKtALvU8Ze/evfjb3/6GaDSKr3/96wlrPNWemUimcv5PtG/N45Lse6kc2/9NwqKO6jnAc4iF2WKxg5XBubaAgyAGz3BS+mn3ZGRkIBAISIX5aDQqkVKbzYacnBx4PB6UlZUJ845rSnVsudcdDgdmzJghLQmTCeeNdiuFLL+RkRFpLxiNRrFkyRJh2kxVNE3DSSedJM6zmVFitu2BRP1OgIwgck9Pj+hgM12YQgBSBbNZjPKoo45KanNM5j1S+Q/mz3CdqI6rek8G31Rw/Z9JPo2gKrJmzZqkVJSJFHSycLrqxKgHsCqpKC/m6CkAaSkTCAREKbS3t6OoqEgMfCBuKB44cAAzZsxAKBRCIBBAS0tLQuSQCdvsqZednY20tDRB6AsKChCJRODz+ZCfny9FM5xOJxwOh7St8fl8mDlzJkZHR7Fu3TpReosWLcJ3v/vdw9KYUzkZ/P0HH3wgeT+ki5ijVhTVCGd0gs6UYRhSvMdisaCpqUkMSBpDmqbhq1/9qtyD3yXSTyOrubkZTqcTVVVVCQAAHWoWm/i45dJLL8WMGTNw8sknJ0SQbrzxRgEdbr31Vjz55JNYtmwZfvnLXx6Sg5aeno4lS5bgmGOOQUFBAfbv34/q6mrYbDb8/ve/FwOtqalJ2lYAwIYNGzA0NCRj6Xa7pYUB81EYgdf1eGVIn88n65KHDOmZQ0ND0oqCUSm3242TTz75ECfDvEZSUVfMTsPu3bsT2kiMjo7ioosuwvTp01FeXo7nnnsOHR0dE4ILKiKv/o5R1P7+fommmxFKKnlG0LZu3Yr09HRkZmaiuLgYK1aswFe/+lVEIhFkZGTgkksukXupDAT14OD1ecCoQBb3QzIKJ/cGdUA4HMbmzZuTLbME0bR4FV8aNWyhMTIyAq/Xi9LSUoks8XcqVVLTNCxduhTbtm3DokWLEvLIOUcDAwNYunQpOjo6JN97eHhYol2apkk0S6XgT+bZzYb3hxUaeFyPbW1tCAQCEvnVNE0qWBPoy8jIkGjpEUccgbq6OnFMpk+fLs4BdQwAcc45r8zljUaj4sBWVVVB0zR0d3djzpw50vKkq6tL5nnnzp0C2KkRVD5/aWkpdF2XYkdq1JGOLote0TGkA8ToCGnuzF8jhZM6h8ZxKBQSWjArqo+NjaG9vR0AxAhWc5BZFZORWdLxSF8FIAyks88+e0LDFwDuv/9+nHPOORLZjsVi8Pl8iEajKC4uTtgnubm5UtzKMAwEg0Hk5OSgt7cXkUhEnru6ujqhRytp96zOrAIL7B/sdrvlGlwzal4750Qt4lJYWChgA88rtWBVNBoVVpQZOOXeIZhGIMMwDAFGVMeW4K3q1D/88MNwOp3493//dykeqNLTJ3L+1LNnogiRmTlmBtTMUR/zd9Wfk5F/5SiqpmlSh4C6loAFWXGMrJPlptpBwMEenSxKx7ZbBIMIhLCat67ryM7OltQMwzBw4MABhMPhhGhqLBYT4EnTNBx55JHIy8s77BoxB39YVAyAFE8DgNtuu02CMZmZmbjvvvsmNWbqOUmH/Y033hCHk3vAXAdB0w4WQDSnANhsNhw4cACRSERa9jCtiXMBQBgk1J3AwYJJ999/v9RHYQXtqYiaZmYOnql7KBWgQ8Ay2WcmC+r8IyKonzqof5dUzinlcA6V+fcTGb8T3UdFcbihRkdHBV1Vi0wwp40IVzAYREdHBwoKChCNRlFfX49AICA0K13XpfofqZmxWAw9PT3QdV2UXygUkogjK/dWVlbC5/NB0zSh6lks8SpwXq8XHo8HN998MyKRCObNm4dbbrllQtrdRGPHMfD7/dizZ48oWRWd5yY0G+uqqIi8rscrFlKJsy2NSiX+yle+Is6tpmlSOKO/v18ish0dHZJLod4nNzcXt99++yfioALxIjm5ubm47rrrEI3Gq8hlZGTgJz/5Cdra2hKMAdWoZon9O+64Q2g/6enpuPrqq5GXlwcgHj1pbm7Gn//8Z/zhD3/A8ccfj0AggHfffRc9PT0SHdF1XdBDVgpUKZ0ssELFZ7fbMTw8nFA5mkV+enp6xME97bTTDnEmzABHKpp9ss9arVb87W9/g81mQ25urhiSN9xwgwAWjz322IQOTDKAhffeuXOnRK+5JtW1qOYn9/X1wWKxoKSkBI899pjQDB9//HHU1dUlUFRVvWF2OtX1roqKkqrvQ/Sba1tFUt9+++2U761ed8WKFbIfnE6nGLlZWVmwWCwCXHi9XjGgefAtWrRIqkKrjnxLS4v0VSbNc+7cuVIApri4GMPDw5JawGuWlJQkna9k+988Tipt3DxOkxWOA+eEBk4gEMCWLVsSqK+xWAxDQ0PSCsTcLgeIMwZY/ZKOE1tBAAfbTKmU08rKSqFac++ylgDXC53hkZGRhBwo4GAqBMVisSA/Pz/hb4ZhSM4Z55fnEJ+Za4k0OxU4YH4aIylAXA+43W6pVp2XlycU6LGxMcn5p64mU8ScwwXEQZ81a9YIlfVwwhzUvr4+qeQOQFq0sCYDQUieNWpUn0Z5WloaysvLMTo6KmvqwIEDiEaj8Hq90jtcXXvsX01Qh47o4OAgQqGQOPGke9MJ4bgzPYURLHW/Dw4OCsWSrZ56e3tl7vkcBB1USjlz4tSew+np6ZK3zHl74IEH4HK58NJLL0kOonkdJRNG0/hOh3Mi+Xcz+JxM1MCAGiE8nGiahptvvnlSn/1nFE3TUF1dLWw4wzAS2Dws7EWWANuYqfqKLAa1ywH3GFNxqHfIbqHe7OrqOgT4y8zMlIhtVlaWtCWbaH2Y50sFXYF4f3M+j8vlwsDAAD7/+c/j2GOPhcViQUVFBR566KEEdkcqScZwHBkZwXvvvScFkCjmM5jPxrGinVNeXi4F8ciUiUTifajJtCEThcw0jjMQ15GrVq3C6tWroWkaNm/ejF27dk3aduR5xzEz75eJzkL1rDRfh9/91EE1P8T/sIN67rnnitJNpuxSoYXJckkpEynZiTjfKhpCKiq/w8/zYGaFwfLycgwODkpUbHx8XArrWK1Wyf9TiyFFo1HJr2SEpaioSPLJiouLEQgEEAgEkJGRgYKCAnR2diI7O1soFuwnOH36dKlw+53vfEcOurfffhvt7e0JkSDV8Ta/f7Jx1jQNGzduFHSX+TXmz6vRB/6NipToF51cHvTt7e0YHR0VmsbFF1+cEHlkPhENKTaknjlzpozlqlWrsHLlSnHePql1XF9fj3/7t3/Do48+KtTrtWvXyiGt6zqmTZuGyy+/HNu3b8emTZvQ3t4uYw3ElU84HMYZZ5yBsrIyySV0Op3o6enBiy++iMrKSuzatQuvvPKKIIQ0vEnpo9BY5XpmBIZUYTpHzJFmO6Genh4pInDcccehuLg44brm/TQRWp8K7InFYqL0WazEMAzce++9CIVCKCkpwcMPP5x076YyeFTEsba2FqOjo/D7/VJEjA4MDdmrrroKHo8HVqsVL774Ip5//nlpmcKDkQfE4QArfsYcJQUga1gttMMxYzVqwzAQCARwwQUXoK6uDr/4xS+S3k+99vLlyyW6Eg6HpagKnaJoNIrt27dj2rRpUigmNzcX5eXl0mKmra0NxxxzDNLT0zE+Po66ujroui4Rt2g0ilmzZqGurg4WiwXTpk2DpmkSQeO4sJXURxE1WqY69QQ/Pqxw/wWDQQwPD2Pbtm3yrKTdmcEL4GAROOYnV1RUyBqjo0QnpaCgQFqgTJs2DT6fD5mZmQmVcRsbGyU6RlCAUTZGAWiw0bBTDSn2BGV1WjqoNECZ10r6K6OjNL5YmIVzzb3AoiZz5swRIJV6hdEXRgJ5LtDB5ZobHx9Hfn4+TjjhBHF61PWg6jl1Xu+77z5cfvnlwqjgd5kPWlZWJgAmHWOer4wisdBccXGxzKVhxPugRqNR0QF02PkO2dnZGB4ehtPpRFFREfr6+mSOe3p6AEDqHagAoEot5vuQBRSLxeD3++U9YrGYVBFmDiydazoPPp9PIjmsXExQm6AD/86cX007mEtYVlaG2267DQDwq1/9alL7cLJOI8VsL5kjp+r/q+yWwxnUyYIFH6XAy/+kaJqGRYsWwe/3C7hHIJIOXWZmpqwV7jsy59iuiHYUz3SCa1y3BAeZ7tPa2ipnverI6LouDu+pp546aeBvIsBC0zRpG2ez2ZCdnY1AIACHw4Gbb75ZWr41NTVh48aNABKLAXK+6XymCoJs2rQJ4XBYKiInW0Oq3cgAUUdHB9xut3QMYF931gtwuVzCTtB1XcAf2kXUlw6HAx6PB9/73veg6zp+85vfJHW2VeA7GRCbTCbanxx7c5Q12b8nEsMwPjTY86mDOgVZs2YNgMREY1VSGccqkj6VqCswuR6MQDy6Z84J4oLl5hkfH0dfXx/y8/PR0tIixn5dXZ2gwkD80FSRa/bV8/l8GBsbQ0FBgWxYNmVn/7pp06aJsmO7EJvNhoqKCmRlZYkTnZeXh7GxMSxbtgyrVq3C+Pg45s+fjxtvvBHp6ekJRlGq9zePNd+9v78fe/fulbwj1VlKhhiZDzgau2qOG1uA1NfXY2xsDDU1NZg7d66UQNd1XYrZ9Pb2Cm26u7sbs2fPxr333otwOIyf/exn2Lp1K1555ZVPzEEFgMsvvxwXX3wxZs2aBSBOeznxxBMxd+5cPPjgg6KY1Tzg448/HkuWLEFGRob0FqPharfb8cc//hGnn346ent78bvf/Q6GES+Q8sorr0gkhfOSk5ODcDgsecsAJP+CtF2Vjs1WKna7XXKxQqEQwuEw0tPTccYZZ0zaQZwKg0GlfrE1E9dNeno6brrpJkSjUVRWVkqeJ+dtIudYVeIABIXlnolEInjyySflwLrtttukkJQKElBUfZMqQkpRndhQKCT5QypYo657ItoZGRloamqSwhAulwvf/va3ccYZZyS9j/rey5Ytk1xHFQlmzqnP54PP50MoFEJ5eTmKiopQVFQkoFRnZ6f06Zw2bRoaGxuFUkj66bRp09DW1obi4mL09PRgaGgIc+bMQU9PD8rLy2X8dT2eozTV/aVGZNT8WJUu/lELLAGHNjfXtHif23feeSehAA4dRfP3KHSkZsyYIc9Mp0/T4sV8SM0NBAJC6QsGg2hoaBCKWU5OjuxFOiAsjNbU1CTPqoIlaq/fSCQiBX1oXPF5aIABB3OzuD641lXwhPPNfq1erzcBLGBuP+ni1BmsTnvmmWdOisaZ7PxYv349LrroIqSlpcHv9yM7O1uekZH8/Px80RF8ZoKWbW1tsFqtwi7iGcJqz319fcjJyZF6EHxXdd2NjIxgxowZUtSwsLAQw8PD0m81HA6jr68PHo9H5pvOqa7rKCsrEwYUI2JpaWlCKUxPT5ezkQAsAas9e/YIcMX9w/Y8zEelniBdVO1dSbuouroa69atAwD84he/SEkPpKSyp1LN2WQjpyrwrOrqZHqUP7nXaej/KzuoZWVlAkCq+efcZzznGanPyMgQ8IbrUnVO2cNe13UBNPj/7e3tUthHBQR4H6vVis9+9rOTdmomCuioouvxPqpAfI5zcnIQiURw/fXXA4CAMrfddps4orw/QUCOV6o1ZbVa8frrrycES8wAF/Ugo6P79u2TtTR//nzEYvFCf7SpWTGZvWXVVll0VvmcPLuvvfZauN1uBINB/Od//mdSW5ZgLllKh2OPJHNiuQ/Mn5nMvKWSTyOon6Cce+65CRsrVfSOn0k1kVzg5o2QynBWUahUQgfD5/NJ/hFwsJw6aQdEUZnvxN6ThmGgrq4OhmFIIYecnBwAcdpsOBxGYWGhfA+I9x70+/0YGhpCXl6eoOiFhYVIT09He3s7XC4XKioq0NjYCAAoLS3Fsccei9raWui6jsHBQVRXV2NwcFCieYZhYMaMGQgGg3jkkUdSomxmJ0AdC/UzW7duPaQYC4AExEydD3PEAoCg9rFYvBgKo3l1dXWIRqO4+uqrJUeSRmUgEIDf70dVVZUUj/jWt74lxrphGHj99dc/UQd19erVeOyxx2R9PPvss0KBPumkkzBnzhyhkTEvlnkqFosFd999t4AePNiKiorwjW98QyIdP//5z/Hss88moK+MfOp6vFet2+1Genq6GNNpaWkSaWc7h7GxMTgcjoQ5SktLg8/nE5rOMccckzDPqpGoymRovaqo+5UNwOnQ+f1+ZGRk4JZbbpHneOGFF6R6tPkZ6GyoBg8pcIz+LV++HADwzDPP4I033jjEIUoVISVQwINUpe8TVBkeHpYeiuoe4T4aGRlBe3s7fvvb3+Lll1+WFic0Li0WCxobG3H++efjv/7rvxCLxfC9730PkUhkQidV0+Itf8giKC0tBRBnbYyPj0vPznA4jFAohEWLFkl0DTjYMqCmpgYFBQUCahHJZ5S5paUFY2NjKC8vR2NjI1wuF8rKyiT3jtReMijUQhofh6gHtdmwMVO+JgIQJro+DZ3R0VG8++67ch+1D6DZyKPeMQxooTMAAAAgAElEQVQDS5YsARAHJqZNm4a+vj44nU6pI5CWlobW1laJdDAaBkAK6mRlZcmeVSPtHR0dMmcqsMQ1TD3AKKqu6zKHaqoBz0HqYBrMdFqtVmsC40LTNKnQbBiGRC9ZaXZ8fByrV6+G3W5PiJgm+zmRkbV+/XpcfvnlCWwailqNd2RkROiMdMz7+vokyspISiwWQ0NDA6xWq9B62TuVUV+eB7quJ7TYYbG+np4e1NTUICMjAy0tLQLAUr9yHK1WK/Ly8mQ82KKHUU2C1GSkGIaBrKws6YNaX18vc6Yaxunp6RgYGEhYY6zeT+eUc6wa1jabDXfeeSfy8/OxYcMGycNOJipQc7iIzlSMZHMuu3ods6hOlfkM+Vd0UjUtTv829+MOBoPikKq6yu12IyMjQ9YP9Zmac65pmlSs5jnFloTquPFnZmYmjjrqKAFsDgcsJKPYTgaMaG5uxvDwMDo7O3HEEUfA5/PB4XBImk5hYSEcDgfuvfde2W9cc9yLh6N/DwwMYNeuXSlBTNrZ7G7AYqTcHyy2xnxUjmdWVhbsdru0zaI+VoNNBJMsFosEOrZt24b6+nq5P8/Cnp4eFBQUoLi4GJmZmaivr8f777+Po48+WvS3GeA228FcE0BiocWp7j/1+586qJ+QrFmz5pAIm6r4VG42kDp6AyQv3DLRpJvRe7PwXuxNxTwedVFxk4RCITnUtm3bhiVLliASiWD37t1ScYwRr6KiIvh8PgwPD6O6uhqxWAy9vb2IxWKoqqpCe3u7vLPH4xFaU0VFBfr6+mCz2VBZWYndu3cLerZ69WrZcIODg+ju7gYQR6R9Ph+ysrJw8803i9FSU1OD22+/fVLOPMc2mZA+zCgFKzFS1PYMHHNzlITGRiQSEUOasm/fPlxxxRXyt9HRUbjdbqxZswbp6el49NFHsWnTpgSalWEYeOONNz5RBxWAGCkssOJ0OuX/aVA88sgjkifFlhd2ux1nnnkm5syZg8zMTFGmmqZJ65Rdu3bh1VdfxQsvvCDURB56mqZJYRXmlAKQNkODg4MoLS0V54VFG3gwkk4OxFv5sGw8JdlBpv5NlYmQUfPnLRaL9D8cHh6WaEd2drY4apWVlfjRj350CBtCjU6q1/vsZz+L4uJiBINBPP3009i2bVvCGlPXmjnPJVmklGuIBSfokLIIDvclqVuk00ciEfj9fui6jm9961twOBz47W9/i+eeew4AcN9990m0vbW1FVu3bpU89WuvvRann356yjHUNA3HH3+8OA+kfgKQXpRcD0cddVRCUY3Ozk64XC64XC6Ul5ejoaEBgUBAoj0AUFNTg5aWFqE8c07Ky8uxf/9+5ObmCu2UxnokEpEibeoBa3Ygp+pIqtE8/j/njOtSjbJ/VLFY4sXaQqEQOjo6xEmgo6iut1gshvnz5wswQqPnm9/8phhCV199Ndra2uQ8ICClaZqwGngtRsMYCVRbIBCUZIVuOqsjIyPSK1oFS+iQhcPhBOMQgHxOzb8i+u/z+SSSQkNKTa3IzMzESSedlHSO1PQYdT5SRQTWr1+PSy+9VOaS7SUIDnR3dyMUCmHevHkAIJTpjo4OoZVTv3q9Xhkbn88Hj8cj9RxoUA4PD8t91OrH2dnZiEajmD59eoIBykiVpmmSP6pGSBmhZm6yYRiYNm0agDhY1N3dLSkGBLPr6uoEIDTbGhxDj8eDsbExmQvSGLmmDMMQWjedU87n97//fYmGP/roowmAGWUyDkIyIzqVpGKqmY1u/u5w4MW/qoNaWlqKGTNmSP2HvXv3AoDoZupltoBijrtaJI/sKYvFIvRT6nTuVeBgFNxiideKWLp0qfRensyzqvbzVJ2gsbExNDU1SXCFNuRXv/pVFBYWQtPitVDuuOMOuQ91Vqpgh1ksFgveeuuthKiuei1+hgwRnrmMQFssFhQUFMDr9Qp1nhT7rKws0deMonINqwCi3W5HSUkJrr76athsNjz11FMCjubm5qKsrAxvv/02XC4XTjjhBOzbtw+LFi0S55pF8pKlqXB/qX7NZMGdw40bbd8Ps48+dVAnIeeeey6Ag4rNTJUDDl8dLpnSVBeDWbioed9kf+diYe4KUS8uQDodvE84HEZvb68cTuFwGI2NjRgYGBBHlkqKdEPmiAUCAZSUlCAzMxNdXV0IhUKYNWuWUO2ys7Ph8XhQX18Pt9stxqau61i8eLEclDSGQqEQmpubZVx4AN90001CA1apGRNJKqVG8EDTNPT09GD//v0SyTODDjT2Obaq0HigAVZWVoZdu3Zh0aJFcv3c3FxcdNFFqKioQEZGBi666KJDKCSMuEWj0U88ggoADQ0Nsi5ef/11NDQ0CGUWgFRG9Xq9UimW0Sfzga3mTLa0tEDXddTW1uIXv/gFNm/eLMUOdF0XQ4xjS6Sfxp6uxyuEhkIhORxHR0cxPDwMj8eDvr4+Wcdnn332IfuDDog671MFLoBDwSF+dv/+/QgEAujt7UV1dTV8Ph9WrFiBz3/+8wCA6upq/OhHP0q4t2p41dTU4JRTTsEvf/lLbNiwQQ529b6pdAGQWPTIMAwpYERDUD1IGE3KyspCf38/Ghsb8eyzz2Lv3r148MEHMTIyIpU9gXgu+MyZMzF79myceeaZMAwDp59+Oo455hjY7XbcdNNNst5/+MMf4v/9v/+H++67D9OnT8fPf/7zlOO4YsUKZGZmSk67YcQL6Khosd1ux8yZM6WCaV9fnxRdW7VqFZqamgQAUXOkOjs7E4xTwzBQVVWFffv2oaamRuiY/f39CW0FotEoysrKEhxKc6Rkoqj1hxHqg4+DCmwW7q/x8XFs2LBBxpUAGvt1Uh/5/X7MmTMHl1xyCcLhMJ566im88cYb6O3tRXp6urRCovPn9/vFKTFXWeZ+zM7ORmFhodyba589tNkTVd2PKh2Y48xcN+4b0ti4BzVNEwaFqo+ou9LT03HKKacksF74PfWnWczRZ1XWr1+PK664Qhxyi8UiBbkYdWJuKPPwmpubkZubmwAOB4NB6dHY398vkVM135SOOoFNMnXYPiojI0N6sDKyyT3BYnT79u2Druti4BJA5LpesGABBgcH0dvbK8wWRn5dLhe2b98ubBfVtjGPD1N+CEiQUZOVlSVVgtkfU6Xk82wvKirC+vXrAcQpvyzWxD1yOBrnVJwIdV8nu4YZVExlf5nlX81J1TQNJ598srwbI+Q8S7h+AYhN4HQ6BWRg4TaVxs1caHUs1fWSlZWF5cuXw+12JzArDvec5rma7HrgmrVYLNi+fbvYeXPnzsXw8DDKyspkP5eXl2PLli146623PpSuN4x4tfTt27cf8m60m1TmBW09wzhYnIqVwA8cOCBBARXcYc94vhN1Hf+f0dgvfOELOO644zA2NoY//vGP8Hg8eO6551BQUIBAICAslFWrVsHhcEha0axZs+D1erFjx45Dzjx1z0zVCU0l5nkk7XoqMhUH9f9sH9Tnn38eAA45CNUJPhxSxAVhjpCk2sTG3ykDqYwnKgZy/M1IOp0JNafNTCNkewDmkw4ODqK1tVUibsyVowIjtYfOCg0SAPB4PGhoaIDD4UBhYSFaW1vFuGBkhuNGg4QtSoaGhmC32zEyMoI777wT7733HgzDQH19PdatW3fYlhHJxt58qBUWFgoFmdECdUNSMSc7pOkgM+rIiGF9fb2Mwxe/+EXU1NTA5/MJCm82hjjXUy0K8WHl17/+NTZs2ABd17F3714Eg0EEAgF8+9vfxvXXX49169Zh9erVuOyyyxLADABCHbNYLHj//ffx4IMP4rzzzsNZZ52FV199FTU1NTjuuOOwcOFCOJ1OBAIBBINBMVzYk0/T4kVsWHyHUdahoSHJbyKVmJV9Of7z5s1L6pxyDM0o+FTEPMfqnMyYMQNWqxUlJSVCV9+1a5e8T3Nzs+TE0pEsLS3FWWedhQsuuACbNm3CJZdcgo0bNyYY55RkB4C6ViwWC4LBoFRAJgWIxqTFEu+DeNxxx6GgoABut1v6SM6YMQNerxfV1dW48cYbJeLBfopsEVRXVyd796WXXsLWrVthtVrxwAMPCGXr+uuvx49+9CMUFxdj3759E44nC+0wGsRoCx2InJwczJs3T6p7d3d3o7OzE5FIBMuXL0d/f39CYQjmsNOx5nxpWpy629nZiWXLlmHfvn1YvHgx+vr6kJubm/BMFotF2n1wXydzSj9OoEhF05PlKH/Ua1P3nnLKKTjxxBNlPZCuT3otexXOnz9fqLDr16+XM4A1B3h+2Gw2FBQUSK49KbekfvO9/H4/2trahM5L6jbnX42aqhR4NZpAXcPzj/0VOT5+vx9erzdBH9E4i0Qi8Hg8OPnkkxMYQvxMsrlUz4fDzQGvyf2ovn9aWhocDocwjlpaWmCz2eTduca6uroQDocxMDAAt9stgA2NUbZh0XVd/s0IC2nYnZ2dSEtLQ319veyrzMxMiX5pmoaZM2cmRImZp2oYBmpqajA2NgaXywW/3y96MyMjA263Gzt27EiwG7iOUgG9g4ODAmbwHGQLMdop3O80vPlcAwMDuOyyy6BpmqS80OiejDMyEaigCoEFdQ2Yv5fMTvrfLC0tLVJsji3cCPSo+4YUfNqEKgMiGo2Kc5ospYV9TI888siECOZkhPPBqOHhghHqma9G+fhczKcH4gyyd999F0A8uHDEEUd8KB3MtUr2Ac90VddzP1PnqdXwCdqw/3tubq7sIzLzGCRRU0Vou6vAtWEY2LZtm+i98vJy7N69G62traivr8eePXvQ2dmJ3t5e3HvvvfjJT36CPXv2YPfu3diyZUvCO6n/TSbSfTjhnqdd9I8OJP6fjaBSGElVN4YZTUomySJS6t/Un6pM1P/UjF7ROFHvR6SMB/3o6Ch6enqkB9WuXbsQCAQk94WbhXkpmZmZKCsrQ1tbG+x2O6qrq9HQ0ADDMDBnzhwpolFVVYW8vDxBZEtLS9HY2Ahdj/fF+tznPjfhWLCfVW5urkTSiouLcc011wCI57y+/vrrqK2tTbqRUh1yyeYjFovhvffeS1Am6ufV6pXqdwBIYvvOnTuRk5MDp9OJl156CWNjY3j77bfx9NNPJwACE63TV1999R+ygffv34/u7m5Mnz5doo7quDCiy4jBBx98gCeffBJer1fGoaKiIiG6rOs67r77brz55pt46623sGnTJunvF41GkZ2djdmzZ0u+MRAfQ/YDtFjiVTxJFWMxkrS0NIk0cN2Y53YyPYHVd0sm5s8mW5O6rmPnzp3y7Nzr119/vSDNHR0dqKmpQUNDA379619jaGhI9hFwKECRbO0ywmgYRgK1h/vXMAyccsop2Lt3r6zX7u5u9Pf34/7778fzzz+PhoYG7Ny5UwzDJUuWoLq6GmvWrEE4HMZll10m4IDf75d3XbFiBVavXo0FCxbAarXi1FNPhcvlwty5c/GZz3wGxx9/PMbGxvDSSy/hggsuwC233CIHvnn8Vq1aJcANKWB0qMbHx3HcccdJakB3d7dUCz/xxBPR19eHuro6DA8PY/78+RgfH8fs2bOlSTnnkQyPvr4+caqys7MxMjICp9OJvr4+9PX1Yd68eQlrxOVypQS51DlS9efHtTfNDo9qbAAH9x9wkBkwVdH1eIEvgn01NTWSrnHNNdfAbrejqKgIhYWFmDt3Lnp7ezE2NpZQQXfatGnSf1Wlm7IPKJ2SaDQqhhbXKiNzLACitpyhoUa9SqeE309PTxcWgNfrlTZpFovlkEjc0UcfjYKCAgDJKdof1dlQI6g8L+12u0QMAUgaR1dXF2w2m/T4ZXR07969CfRrwzCkMrnD4ZB5oSPAtcezz263o7+/X6LYzNWvqqrC2NgYiouLEyqzGoaBhoYG0ctkHTB9gpFXiyXeJsjhcGDTpk3IzMxMqDnA501mWKpR76KiIoyMjEheKteRy+WSaJuZsg1AIkK33norKioqsGnTJonkHE4ON69mY1t9Zv5dfZdkNttkHJd/pSiqpmkoLi6WdlQEF0jBJVtCbXXEOVPzIHkem9cFe6cCQHl5ubDJgMm1FuL9PozDmGw9EDizWOJFw8rKyjAyMoKioiJcddVV0DQNc+bMwfe///0p63fVRq+rqxP9Sd0GHGTZMb+eojqasVgMhYWF0PV4D3me+wSlsrOz5VzjnuQ9dF0X593lcmHNmjUoKyvDSy+9hBdffFFqNrBuDPWV3W7HggULcN555yEajWLlypWwWq349a9/PeVxTyXq+lHBArP980lHUCeGNv4PCAdfXeCTORz5valsiokQCCoM9UBRIwOqMOKpafGG306nU6IppAcxN5E915hvNDw8LDRdXdfR29ubgI53dXUhKysLxcXFeP/995GWlobc3Fw5GEkzUJGmZLJgwQJs374dAwMDsNvt8Hg86O3txV133YW1a9eiq6sLy5YtwwknnIBHH300wTBJRqVLBgYQBTMMAytXrkRra6sUuyGyyGdU0V/OBX+6XC4cd9xxePjhhwEA1113nbTRAA6tDMjfqX/7qJGUqUhNTY0Yonw3RtQee+wxbNu2DaFQCNFovMef0+lEbm7uIVVQ1TWs67oYpZ2dndC0OG2NBRMikQg++OADGIaB5cuXY/v27QAgVUPVHDUA8Pl8Yqwyur569epDHLpkB0uqw2YqUepk4FIsFsOiRYuwY8cODAwMoKioCMPDw7j33nuxfv16PP7449izZ4+8r9kgUudaZRwQqSaKrRax2L59Oy6++GI0NTVB1+Pl5Ts6OrBs2TL09/djcHBQcsYyMjLw2GOP4corr8SBAwekkqFhGNi+fTuGhoYQCoVw5ZVX4g9/+APOOussDA8PyyFnGAY2btwIi8WCDRs24Dvf+Q5eeuklXHTRRdi0aRP27NmD+vp6XHLJJTj77LMRDAYl+pFMWHyDRabUnOulS5fKXuvs7JTCa4sXL0ZbWxva29vh8XikYmlOTo5QlXjgc/y4hpYsWYJ33nlH6FPV1dVoa2vDzJkz0dbWJo4DEF9fzMc0ixmUUOfPrOM/jJgNYDVKTkBB/TdR9WS6PJWoBgn3cjgcxuzZs4Uylp+fj/POO0+eh+uOTgTBAPYszsnJQUlJCQAkUNrGxsYSADsgnovJAnvU++paV51ZCiOTdrsdg4ODGBwclKI9bBdDh27JkiUoKSk5RNdz3CZKhZmqqFE1nnUEJvv7+2G321FbW4uKigrk5+cnnG21tbUSBeH+V41P6r2BgYGEdVdcXIxQKISMjAz09vYKUOFyuYQF5fV6kZOTg927d2PatGnSK1bT4gylvXv3wmq1oqysDMFgUHoqM5fb7XbDbreLc2oGaPnOqj1jPr90PV6MsaCgADabDf39/RLdHRgYgNPpFEOcbXoYKTKMeKrHD37wA6xbtw5HH300RkdHpdBLKpkM6MC9cjhn03wOq+vGbEz/bxC1b3ssFhM6NouSxWKxhJZHTM9gdHVgYOAQAF/XdYnCVldXY86cOQCmxlJQo/5TEdU2M98nOzsbra2twhJixJLnEhAvpnTSSSfh7bffnhQQmCx6P2/ePPT09CAjI0OqZdOxV5+T5x+QmFbi9/vhdrtRVFSEjo4O2YNkmKgpeaxJYbZlRkdH8dxzz+HWW2/Fa6+9JntQtUOGh4dhsVjETmtsbBR7pqKiYtJjfrh54nMy+GEev3+kHFZTaJpWrmnaBk3T9miatlvTtGv+/nuPpmmvaZrW+PefOX//vaZp2sOapu3TNG2npmlHftIv8VHkhRdeSGkkHy5Eri5YykQbdaJoLCUjI0Mq8NKYM6MXVqsVPp8PsVgMgUBAlMuBAwckDyYcDiM7OxtWqxUejwd5eXnweDxSxY/0kM7OTgwMDAgtKRQKoaKiIsHZZREcAFKZdSJh5HfBggVSBdbv98PpdCISieD222+Xsv9+vx833HCD9G7jGCaTZPdVf1dRUSEVVd1ut6D7yRBWTYuXWc/IyMAjjzyChx56CNdeey2+8Y1vSO9TKs5kylM97AOBwKSaxn+cYrPZsGHDBtx111248sorceWVV+LrX/86Wlpa4PF4UFpaioqKCmlOr0YMacjm5OTgmmuuwf33348f/OAH+OlPf4ojjzwSmqZh4cKFyM3NlVL0pKZmZ2fD4XDI+CxevBhAHNVzOBzIysoSI4oAidPpxIoVKxKMUTUPTRUVpDH/3jz/qtFpFvMaUqmg1dXV8Hg8UrnTYrHguuuuQ1tbm1DUgUQUn2MXi8WESk7DMzMzEyUlJaisrERTUxPWrVuHqqoqZGRk4IgjjsAZZ5yBb3/72/D7/XKA/exnP8NZZ52FxYsXy7XHxsbQ2tqKxx57DFVVVfjNb34j1JpoNIr6+nps3LgRt912G8bHx/H73/9eqLsUTdPw17/+FaFQCHfddZfkmlVWViIUCuHPf/4zfvjDH4rDUFxcjAsvvDDpGuM9CwoKZEzS0tKwZMkSadpOtoamaViyZInQvEkB17R4D0en0yl5qmqksaysDKFQCEuWLBGgKRAIwOVyoampCccddxyCwSA8Hs8h+4/G/GTFvI8/CWApGdhpNp75b5X5oD6HpmlSKX18fBxHHnkkhoeHkZWVhQsuuAAWiwWzZ89Ga2urFMwqKioSZ5DX5n0yMjIk+rZp0ybs3LlT9B+jqDk5OeLwpKWlCaOA70O9T6OLji8jKyqNje2FyOqx2WxwOp1S8Gr16tXSR1AVXsdstH5U4TyTWs/8rbGxMWEXLVmyBIWFhQn33Lt3r+QC89ygwelyuSRayRxRntculwtDQ0Ow2Wzwer3SxzUvL0/AhWg03qapt7cXhYWFcnYXFBQIwDB//nw5o7u7uyVyyqrMsVgMmzdvTpgnt9sNh8Mh91TXmqrPCDoQVOjr64PdbkdFRQWsViuysrIkGhcOh6UgGoEq0n7Jornllltw2223YenSpQJ6qXrZDAxPRtQcQPN11PWR7CyZaG9zLi0WC+68885JP88/g9DZ+f/svXl4lOXVP/55ZiaTSSaTyT7ZV0IIJCQggSCKFFQUtGpbqXUFFVttXwuKbKICooJU7at9tbUu2Oql1GoRFBdACEgiayAJW8i+TpLJTGbPTGbm+f0xnsMzQ4KJSuv3d/VcV64kszzL/dz3uc/yOZ9DbaOk5Hr02+v19yimtScIfuTAbbfdFjC/Cd4eFhaGnJwczJ49m53TkUhwoG44nwUCkSZDPS+ao3FxcZxksVgs+PzzzyGXyznYJWUiH0qGshe8Xi80Gg0H4L7NNqHrItuc+DgIrSIN4FB/cPIVpOuGzg2AEY+vvvoqPzeTycSdNigZQfaHwWBAXl4er8eTJ09yAJeeN/kK0j2Jgqft7e3nJdjob2n522DP7d8p3wrxFQQhCUCSKIpHBUHQADgC4EYA8wEYRVFcLwjCcgDRoiguEwRhDoD/ATAHwBQA/yuK4pRvOYf4Q21G31XmzZs36OsXUqhDwRlosxouRFFqfBPRBE1cab0bbTi0kBobGxEeHo7IyEgAQE1NTYCxEBcXB6vVypNVo9Ew4QUAht5Ko/vEHkZKfNSoUbxRq1Qq/OxnPwuAzQ0FrZT+X11dHeAUkpKJj4/HI488Ap/Ph/T0dOzcuRMnTpw4r+ZkqOcw1FiSHD58GILgr5eRNiCn61q9ejXS09Nxxx13cP0URcqDRboxCoI/c02Ln67l3wXxJVmwYAFfN12jNLInfS0mJgaTJ0/G1VdfzTAzq9WKXbt2Yd68eWysvf766ygvL2cSoC+//DIgklZYWIhTp05BJvM3lVer1WhsbOQWBQ6Hg3t2UaAkOjoas2fPDrj2kUC4h/rOUJvat80LADhx4gTcbjcrYerZ2t/fz0QGoigGZItoXVssFqxduxavvPIKPB4PFi1aBIfDgYMHD6K/vx+TJ0+GTqfDG2+8Ab1eD4/Hg/Hjx+PGG2/E6tWr+VwAsHr1alRUVGDz5s0B9eXh4eF4/PHH8eijj6K2thbx8fEMsxQEAQUFBVi0aBGWLFmC7u5uhvlKCXB0Oh1ycnIwfvx4fPrppwDAJGbTpk1Dbm4urrvuOoiiyGRR0rHNzc2F2+2G3W5HZmYmtFotwsPDYTQa4XQ6YTabOVM+c+ZMNDQ0oLe3l50iqsETRX99slRvKBQKpKenM4OqdN24XC7U1dXB6XRCqVQiOTkZ3d3dMBgMyMnJCYBRRkdHB9TdfZf1J3UiL/b6Jf0snY/k2JD+kcn89cQEx6NnT43dvV4vioqK8NBDD7EzMzAwgJ07d3L2mYwjOqc0wELnBPzzntoZSN+jeU+BKQCMxCFCHGldK9W3O51OREZG8nqRy/116BqNBlOmTBl0DUvHPJg4azAkxEhk48aNWLhwId8rZQYIht/V1YW4uLiAQBS1aJFC9QXhHFM6tWxTKBTo6enhenutVsufJ54HmUzG6ALpmFM5REREBLKysjhIYDabERERwdBqmUyGs2fPMqQzLi4O4eHhOHnyJMM2BcFfD37FFVcwaqimpgZ1dXVs1NN4Sh1F6V5Ijk5ycjJkMhnzVpBBTMFHlUrFwS2qw6NroCDE448/jtDQUPT09OCDDz7goNRwn6N0TkiDOkOtzcEc4QsJGenSz34XqOK/WwRBYII42vdpTtJaiYyMZGQJ1ZgS5JfsSoKmC4K/5jkrK2tEwTp6FsFoh+F8ZyTOLOB//sePH4dcLkdvby9GjRoFi8WC6OhorFixgtfWX//614CytuDzSksyBhOfz4f9+/cHoAWAc/sJ7avSY5Aeoc8QIquzs5MDBQMDA4iPj4cgCBywlSKsKFBEazAmJgZ6vR49PT0IDw9HXFwcTp8+zaV6lBmfNWsW3nnnHU7+REREoLm5GRs3bkRpaSl27NjB+nbNmjWYO3cuxo8fj61btyIuLg5yuRzJyckQBH9bnNjYWEZHBDutF3pWFxviO+IaVEEQPgLwp29+Zoii2PmNE7tHFMU8QWzjsAAAACAASURBVBD+8s3f737z+TP0uQsc8z/uoP7yl78MUIrBWQn6Pxh2SxHKwZyEYPk2ozwrK4uptYkFlTYBirxIN1GCvbndbjQ1NaGzsxNKpRIulwvx8fHc95EgXvHx8airq4NKpUJGRgZTlIuiHzNPTK1kbERGRiIpKQktLS2QyWRISkrCjBkzAq57sMk7mLJzOp3cIJ4UjdFoRGRkJJ588kl2ZBQKBV599dUAR3GwsRyKQTn4s1VVVeyIUA/M559/HnK5HKtXrw7I6lyoXk0QBB4fMoh9Ph/Gjx/P5Ddvv/32v9VBJcMLOEeI4fV6ER0dDbfbzc4QOVbV1dUwm834+OOP0dnZidzcXKxevRrr16/HTTfdhNGjR+PXv/41wwtjYmJw9uxZnD17NqBfnsvlQlxcHKKjo9HU1ARRFDmi73a7ERsbi87OTjZagtuZSHvWBstQRsx3dU6H2hBlMhmOHDkCn+9cY3KHwwG1Wo3q6mrk5ubitttuw7p167Bhwwbo9Xp88sknmDNnDsxmM4qKihAdHY39+/djx44dWL58OSwWC86cOQOn04n29nbccccdeP3119Ha2gqlUolZs2ahtLQUy5YtQ3Z2Nurr66HRaLBq1Sps374de/bsAXCuPjYyMpLJrxITExkyTI7AlClTuDbU6XSis7PzPB0WHh6OwsJCZiulDaelpYU3y82bN0MURcydOzdg3HQ6HY9nXFwcdDodbDYb90al7N1ll12GpqYm9PX1MVRcLveT/KjVajQ3NwcYCTKZDImJiTCbzZg0aVLAs6Ga+o6ODowaNQqtra0oKSlBZWUlG8zU/oPGKiwsDHa7HR6PB7m5ud+bbZfW9r9rLZPzQ/r01KlTDAFNSkpi2PjDDz/M2YTdu3fj+PHjg9ZG0VwuKysDcK7VEo0fAG63QnsKESxRbTrBOQlmNjAwwBBtl8vF2QNBEBh9Q44pvRcaGoq3334bPp8Pe/fu5TZkUpEaQN9mDF1IKLsQ/MyoBpUypZ2dnYiOjoYoimhtbeW2RdKfEydOwG63w2KxMJM5BQ+oxYUg+HuXUksPgloODAygr68PISEh3OInPj6ex54yXV6vNwDlk5KSAkEQuB2NIAhMOCeXyzlgER4ejvb29oCaQq1Wi9mzZ5/ndFqtVuzZs4cJtaRlRMHjLLVpyBHt6+uDyWTiaydWWEHww8sdDkdAX2MATND14osvMtv4hx9+OOK1RHtWMMGO1M6idTpYqdWF5tFgjtX/Kw5qWloaB6EAcMCEasrpnqj1CLUpCgsL40whAFx33XXfqTYeGFmpzVD24Ui+TwRuISEhiIqK4rZXq1atYmh7Tk4O1q1bN+g8Cw7aDyXV1dWwWCw8XoNlF+lvugfKhrrdbsTExCA0NBStra3M6Etrh4jqpOuW9Kc0ySGXy9npdDqdiI+PR29vL2dp+/r68Mknn0Cn0zFx26RJk9Db24sjR44gMTER//znP6HVapGXl4cDBw7g9OnTTFJKmea4uDiUlpYiLy8PoigySzOVKun1eu7+QZ0aBpMflYMqCEImgL0ACgC0iKIY9c3rAgCTKIpRgiB8DGC9KIpfffPeLgDLRFE8HHSs+wDc982/l/ynHVTg/CzqYNnB4OscDEo4mAy1qKURwuTkZDidTvT09CAiIoKjsaIoBmRXvF4v9Ho9R3LJ+SAmQp/Ph9jYWLhcLlgsFnYubTYb7HY78vPzYbVa0d7ejujoaISGhvJmS/dI5CgUCdJqtbjlllsCJupg9zTY5ieFPxGJQltbG4qKimA2m6FSqXDrrbcy6U92djY2btzIEJ/hOPt0Xum5yWghKPETTzyBkJAQ3H777QF1hrQRBhfFk7KgeqWOjg4kJyejs7MTGRkZ+NnPfoZPP/0Udrsdx44dQ0dHx7/VQQWAe+65B2q1mp18chD+9a9/ITQ0FGlpadi6dStn2Oh+4uPjsXbtWjQ0NOCll17C4sWLkZKSgpdffhllZWUcGe/s7ERvby/6+vo4O0o1K2RIKZVKdkxFUeSsmkKhwI033ngeTOtCyINgGQ5kh0RaiznU8eiYNFcOHjwIURRx9uxZ5Ofnw2w2w+v14u6778aYMWOQl5eHv//97/j6669ht9vx2GOPwe12c6Nsn8+HAwcOQBAETJkyBRaLBYcOHYJKpYJer0dZWRnDnxUKBaZMmYItW7bA6XQiKiqKjVKXywWj0QitVstrsLq6GiUlJWhtbeUMi1KphNFoZKevuLiYs5mA3zAhAhhad4IgoKioiCGH5Aw1NTXxZzIzM3H69GkeI0EQkJCQwM+ptLQUbW1t6Ozs5IzbsWPHUFJSAqfTiebm5vOyKrm5uWhubg6IPJNzarVaMWnSJDYeaJOndkCTJk3C3r17ERERgZiYGK7BJNKI+Ph4vk5BEFBbW8v9WidMmMC9HL+LSDPR0rlL13kxhPSc0WhkR45KNKh11A033ACfz4fi4mKeh0O1Ogo23Pfu3cvQdGorIu1nTN8hnUh7QUZGButQmczftkbqTDudzgA4Ia0tMpY3b94MuVyOsrIyNDc3X5Sxu5BQBlWaBfV4PGhpaWFIHOn906dP8/339fWxUQiAYfw0JwwGA/c81Gq1APxoJ6rLHhgYYOQKzRsyMKVZWKvVypDZCRMmwOfzcQsblUoFs9kMnU7HQRuCtVNGJiIiAjfddNN5NfMkMpkM27dvD2i5Jn2PHDWaL1KG7fj4eHi9XrS0tMDlcnFQQ6vVQi6Xw2g0Ii0tDWq1OiATJJf72VGnT5+OG2+8EV6vF6+//vq3OgnBDnawBDunwRmtbxOpozLYvvJjd1JJJ9N9yOVyZtanvY6IECkb5vF4eH7K5XLMnj17RNls+p6UaHKk2fDg5zPS81NyQxD8rZRo7mk0Gjz22GPwer3soA5FuDgcsdlsqKqqgkwm4+QQBdOlCBSpniVdSOUOFPCiEg3pviUlHZN2LqBnSesxLi4O9fX18Hg8yMzMhNFoxLhx47BixQrU19cjKSkJfX19UKvVaGpqQmxsLMaMGcP22datW/Haa6+hubkZcrkco0aNwtVXX4309HScPn0azz77LM8NSoZdcsklmD17NmJjY7ltzbx589De3s6tsWpra/mZSmWk62YkDuqwZ4kgCBEAPgCwSBRFi/S9b9KfI5oVoii+KoripOFc5L9LqME9GVLBi2uwDN1wZChjXFp/dN99fl+dIBjkFNFnpb+Ds7XUm5AiyNIoOm1KZDAQu6rFYkFoaChGjRqF5ORkhoAQrj0iIoI3ItoIg53ToaJVg71GG0JRUREEwd9wurKyElFRUXC73Xj33XfZ+Wlubsby5csZSvZdxlg6ttnZ2XjiiSewefNm3H777bw4ycEKrg2j+6OidKrzy8rKYubMxsZGZGRkYPHixd856v9DyPPPP4+nn34abrcbO3bsQHl5OX7xi1/g7bffxmeffYZ//OMf0Gq1mDdvHm644QZkZmZCrVajsLAQS5cuxZ/+9CeEhobijTfegEKhgMFgYHIji8WC1NRUdmyJmZOCJFKIC9Ua0+ZJjutgBn3weF1oXQ0VuRvKgBnO5kRGenV1NRMujBs3LsDYpms6duwYxo8fj2uvvRYDAwN45plnoFAosH37duzcuZOj1GazGevWreMsEmWlqK0TZU4qKio4s0n3YLVaA4IptGElJSXBarUiKSkJPp8PJpMJoigiPj4eCQkJiIiIwIEDB5g0RRRFNqKlTrjP58OxY8cYzkg1ye+88w47OUM5D6IoYvLkyWhsbGSEBvUfnjZtGlQqFcN6CfoHAJmZmejp6QlwoAgWTnV0Uv0ql8vR3d0Np9PJrQNmzpzJ0PPOzk5mQ3Q6nQEGpvgN5I0yfG1tbd9rTUqNXqmevpBz+n11AJ2jq6uLgzwUuQ4PD0dRURFkMj+jq5S4J/i6pceTrofp06dj1qxZjCah/SAYHknvhYWFQaFQoKmpCY2NjXzvdDyqd6N+ndLjSbNepA+oNdAPLUMFtaTPg3QIZYPb2tqYAZr2JoKg054TGxvLSABCFxFiQK/XMwyQgscEaaXz0R6nUCh4jVksFnYyyIkgR5aMXNr/IiMjOYj1y1/+EkuXLoVGo0FiYiJkMhlnk2bPns3HGGxcKIhFZRdSg5ieN+kLGgsaOyJQ1Gq1THTlcrkQExMDq9XK9gnNBWnA1+l0Yvfu3XjiiSfg8XiwcOHCb10/UqcTwHlzjoTOMVQ5zlBC9/59IOP/aaE1LQj+1lykBx0OB4xGY8DeKwgCB+oKCwsxY8aMIdF933ZOacsoOv9w99rBAn3DEfocwZF9Ph8jPehZUkbSbrfj/vvv/156mFBggiBwRprugYTmKF1bcJ0mrQFpBpaORzantLY62JanjLeU7fzuu+/GM888A7Vajfz8fERFRWHMmDFobGxER0cHsx0T6mH+/Pkwm83c+qatrQ3/+7//i927dyMvLw/33HMPRo0aBafTySSqR44cwcsvv4y4uDhERUXhuuuug8FggCiKjGqSBmiHQmL80DKsmSIIQgj8zuk7oih++M3LXYIf2otvfnd/83o7gDTJ11O/ee3/CQmueZBOTnIApZ+RymALLzizRxu4XC5HQUEBNmzYwJEf6kmq0WgC2CBpI/N6vWwMUlSMag2oxYPP5ydFAc4pcmIBtNvtvDGSc+bxeLinVnp6OvR6PeRyOTIzM2EwGBjWM2vWrIB7Gup+h/MaEaKkp6ejpqaGjdWVK1eygjh79iyWLl0a0K5hKAleLLTJTpw4EXfccQeqq6vx61//Grt27QowbIOfI0WwXC4X9Ho9BwrS0tKQkpKCjIwM9Pb2csucp59+GjKZDEuWLMH//d//DXl9F1Nqampw11134bbbbsP+/fuxfft2FBQUYNmyZVi5ciWWL18OvV6Pt956C1u2bEFbWxuefPJJ7Nq1C0qlEnl5eVi5ciVmzZqFRYsWBRhqRKCQnJyM1NRUyGQyrvMgqAj1OSUnwWKxcO3htddeG3Ct0gCAVIba6Mj5C5bBlOKF5ocoiujp6UF1dTUqKytRWVmJ+vp6rnEkRU0QHIVCgX/9618ICwvDmjVr8NZbb+Ho0aPo7OyExWLBmjVr0NfXh/DwcHz44Yd477338PXXX+PKK6/E2rVrsWXLFuzevRstLS3soBLczel0IjY2ltceweO8Xi+SkpIAgPuNxsfHMwlLcnIys2z+5Cc/QUpKCjQaDTIyMtDU1ITw8HCuc3v//ffPq5mRy/3ECLW1tYiJiUFMTAzee+89fPzxx5ydJYZXEsrWGY1Grn2hoA0AZv2mOjd6PTs7G319fayT6FiJiYmw2WzweDwYM2YMP0uZTIbGxkY4HA4UFhay4SttlUTHbW1tRVpaGpqamgIctIyMDG6HUltbyy2Sfighw5auN1jn0N8jgb8FC60jKdKhr68PWq0W6enp8Pl83L832JkYSqTvy+VylJSUsIMivW7p/UidGIJ4qVQqngPSfVAmkzEhGnDOIVUoFIyoOHDgwEVDlkiDt9J5Kb1vCsgIgoD29nao1WrExMTwfbjdbrS3tzNklfZaKouhfs/d3d1cm0kBpdTUVMjlcobj0nkzMjIC9KTZbIZcLmciJrPZzLrW5/P3BzaZTHA6nYiOjkZjYyP3W42KioJOp8OePXsgCH6YZ2xsLGbPnn1eVpSuQWoAJyUlcVsmafabxk36QzqQPkN9Xyk4RCgM2idtNhs74DRulImn1jV/+MMf2EkdCnUlk/nbTY0ePRoFBQWYMmUKBgYGkJiYyJ+RBt2kc3Y4c0S6Vob6zoYNG771WD8G0Wq1TKZlNpu5dy1BROlZhoeHIy0tDddffz3S09MD1ulQMtTzka5fCmZc6FhSW+y7OMVS9I8U6k1rKiIiAv39/Xj99dd5bVK7l+E6TcHBOblcjiuuuAKi6C9ZotIFKXRaquek40HvU5AgNTU1oD8tXbfb7Q4IsNJxKMhEjmxYWBjmz5+PTZs2wWq1YseOHdi3bx9UKhUzGmdnZ3O/8TNnzuDYsWOIjo5motSamhqYTCYO3G7duhUPPvggWltbGY0VFRWFSy65BP/4xz8wY8YM/OpXv2L0CPVJzsvLg9VqxeHDh1FTU8O9Y38UDqrgv4LXAZwSRfF5yVtbAdz1zd93AfhI8vqdgl9KAZjFC9Sf/tjk3XffveD70kgjyVAPSqpECYZB0ZCnn34apaWlkMlkHBm5+eabERMTg7FjxyIlJYVrWEi5iqK/ryhtpMC5Ru/S7JYgCOju7maDpr+/H06nk53gzs5OOJ1OZGVlce8sUgDUtmJgYADh4eGcQQx2AAaLXg41BsHi8XhQUlLCmyfBi1UqFVauXIkNGzZAqVSiubkZv//971FQUAAAAVH6oTJr9HwWLFiAP//5z3jggQfQ3NzMG2ewUiIhtjV6ZjNmzGDo6oIFC3D//fcz9b5CoYDdbofD4cDatWvx/vvvIyoqatDrudgybdo0hIeHIzMzEytWrMBjjz0Gh8OBr7/+Ghs3bsTq1avh8XigVqvhdrtx//33Qy6XY/369cjKykJDQwOefPJJbNmyBQMDA3j66acZthkaGsokKQkJCVCpVAwpImgYKfHExETo9Xqo1WoolUpMmzYtYH5IlbxUBsueSo3NoSR4XgVnjywWC06cOIHq6mrU1NSwIyXdGAg1oNFooNfrOetJm8CKFSsQGRmJpqYmTJ48GS+88AK6u7vR3NyM+Ph4vP/++8yQa7FYcOzYMVxzzTVIS0uD0WjkWiEycn/yk58gKSkJDoeDndQXXniBDUKv18u1Kk8++STrDKLX12q1iIiIwIQJE3DttdciOTkZcXFxiIiIgMFggF6vh06nYyf54Ycf5nGi+W82m3Hw4EGEhIRAr9ejv78fmzdvxtSpU5GYmIjs7Gwex5KSElitVvT29nKdNcGYRo8ejdOnT6Ourg5ms5mzuHl5eTCZTAG9Wb3ec03iCY4kfV4OhwNutxs5OTnnUfvrdDqcPXuWe9Pm5+dDr9ez4UpZfJ/Ph5SUFKhUKoSHhzPr+FDz5btKcDaA5ps08v5tRtJQ71HEOiQkBMXFxZyxy8vL43PodDocPXqU9f5IHD+FQoGvvvqK577UuZM63nSNUoM3+JqptyLNXfqslEV4yZIlkMvlOHHixPd2UIPPT+s42OGSOjFSh3VgYICJRyi4C/hb+Jw5cwY2m43ZyWlMKDsaGxvL36F10N/fj8TERPT396Ojo4MDLzKZn1hQJpNxQNhut3PGUyaTMTzY6/VCpVIhJSUFISEhaGtrQ3NzM/r7+xEdHY22tjZERUVhw4YNqKqqQm1tLSoqKrBhwwb89Kc/HXIe0bOVPs9p06Zh9OjR7KSSTUL3SseSQg8JLtnb24vU1FRkZGQgKiqKiaHIMaLsKekYMrJFUYTD4UBrayuTIf7qV78KIDUju2bs2LEoLCxEQkICzGYzFAoFCgoKEBISgokTJ2Ls2LF8rYMFiEjI4ac5Ls32SceF1uuF5tiPUbRaLTNRNzc3M9qN7ovYfTMzM3HllVdi0qRJw66nD3bCpI7XcCTYtqI1NFyRrtvgQBntlwDQ0tICudzPntvY2MjP2Ov14oorrgi4hwsJ2YNSAkRpZp6y0MH3JHW66XvS9UIw+YyMDB4/siUpaUQlFBTMAc7t0R0dHbjhhhswMDCAbdu24fbbb0dtbS30ej1zVej1eqSkpKCxsRGRkZE4efIkOjo6sG/fPnR3dzP60W63Q6/XcycGt9uNbdu24cyZM+ws22w2LFiwAFFRUbjsssvQ1dUFg8EApVKJ7u5ueL1epKSkoLS0lJ8D+RsXW4YT1pgG4A4AMwVBOPbNzxwA6wFcJQjCWQBXfvM/AGwH0ACgDsBfATzww1/2xZXNmzefZ4RIJ6A0+jhYNpUWF0VOFi5ciKeeegrPP/88Dh06hIqKCtx8883YvHkz2tvbYbfb4Xa7UVFRAY1GwxlRghBJo0eCIHChO2UaqaaIiBfo2oBzhDQUVaHFpFQqYbfb0dPTg/7+fsTExMBisfDfJ06cgEzmbzMTzMA6XAedxm2w10RRxKRJk6BUKlnB2u12xMXFwWQy4fHHH+c6vtzcXF6wdC4aE+m5Y2JicM8998DpdGLhwoW88KXPkjYmmUzG8EFyxpubm7Fs2TIkJyfj5MmTjOlfs2YNAOCuu+7C888/z86+QqFAX18fqqqqsG7duiFm08WXn/zkJ1Cr1Vi3bh1fB0X6CwoK4HK58NBDD2HRokV48803sWTJEjz55JNcUwAAer0eaWlpOHjwIPdZdblcnPGjFhbkDFA/RI/H339Vr9czNbqUbEcqg8HQvotREPwd2girqqpw/Phx1NTUoLW1la9VukFTZN/hcKCjowMWi4UZUqldSltbGzQaDRMdeL1evP/++8jJycE999zD0dC4uDjMmjULkydPxvTp01FfX4+uri44nU7ExMRg3bp1zB6tVqtRVlaGxYsXo7GxkY3Q6upqPPbYY9yjtr+/H7Gxsewg0z3I5XJmGP76669RVFSEBx98kKOgtMl2dHQgLCwMX331FUpLS7F161a+byks9PDhw7BarXjuuecgk8lw5513Yvny5QEOotFo5Ppth8MBAIwmqK2txcDAALPodnd3IzMzEy6XCw6Hg3UNZVcTExPh9XqZlIH0wsDAAFpaWpCVlRWQDVIoFKirq0N/fz8uv/xyAMCZM2eg0+mgUqn4e9JSg8jISK7fIUgSzQ+pE/V9RerESfcCIBBeKn0tGJI8mFgs/soZWmdEVDd37lzIZDKMHj0azz77bMD5B7uuoURqtFOAU2q8DxUsIodCyolAREpSo1La0oBgbNJs1/cRKRSO9mEpjBU417Ih+FxE/qHVahETExPwnaamJm4JQ9lOukcigqMa8tjYWA7W9vf3c30+rcu0tDTk5OQgJCSE4e3UCzUhIQEKhQJtbW18naGhoYiPj0dYWBgaGhqYJZuM3oiICFgsFqjVanz66acIDQ1Fc3MzLr300kEdLODcfivd8+hzaWlp55Ge0POiQBiNr/R5ymQy6PV67hgQGRnJhr3VaoXNZuOMf39/P/fVlRreXq8XS5cuhdVqxU9/+lOMHz+ejx8VFYXPPvsMGzduxMaNG7Fp0ybMnz8fH3/8McPIiZ9COh9ILxLyoLW1FQcPHkRbWxvMZjNnf2Uyf+10ZWUlCgsLERMTw3ouKiqKx1wQBGzcuPE7zM5/nxBLrMViYZ1P9cUhISEIDQ3FzJkzGdY9XBnMfh2JSHXeYFnGbxNpkCz4egA/Qy7tJ1FRUZDL5WyTEHqhvr4excXF7HgOJ1sszQ7TPjFjxgw+F+mEYBtXuldJr10Q/ORmtP8RzJacT4fDwbYTlQkEj8P8+fO5PMhgMKCpqQn33nsvAODkyZPo7u5GeXk52trakJGRAUEQMG3aNMTExMDtdkOv10OlUnE5gM/nZ+K2WCxcQqjX65nnhljCKysrkZeXxy3LqI81JcpSU1ORn5+P1tZWaDSagP7ZF0tGzOJ7US7iR8DiO5jccsst/Ld0wksXABlb5Eh6vV6kpaVhyZIlzBz4+uuvIz09HePHj4fFYkFaWhpD1ZqbmzFu3DhmtO3q6sJHH32EuLg4GAwG1NbWMuyIMimRkZEMnzt69CgzNAJAbGwsTCYTBgYGmMCBal80Gg0MBgPkcn/fPKrtioyMRFRUFKxWK288xMKal5fHGUySwZTXUAbIUIpO+vna2lp2esxmM1JSUmCz2RASEoKVK1fyIouPj0dbWxt27doVYLBERETghhtuwNq1a2EymfhY0ggeKQzKkpHRRX07o6Oj0dzczG1nXnvtNe5ZSZlolUqFhx9+GP39/Vi2bFmAMSAIAmcn/hOycOFCDjrodDosWrQI/f392LlzJw4fPozbbrsN77//Po+Fw+FAf38/+vv7ERERgfT0dPT19eHuu+9GVlYWFi5cyOMUFhaG9vZ2KBQKnD17lpmhyZihmiQyWseNG4fc3Fy+tqEM36FkMPii1BHx+XxM8kUKkmB8wUYrGbKNjY0QBIENFiI0KS4uZjhOSEgIduzYAcDv4BO0Ra1Wo6+vD3/84x+Rnp6OhQsXoq6uDunp6Zg8eTKuueYaZsu755578PLLL7PDvmTJEjz77LO46qqr2Ai85JJLcMcddyA9PR0ymQy/+93vEBERgWeffRaiKHImhjJQGzduDFjjcrmf9fPee++Fy+Vi0gOqjfN4PEhNTUVxcTFmz54NuVyO66+//rxMgiD4WVsTExPxm9/8htdEQ0MDHnroIYwZMwYDAwOsM/Ly8pCUlITW1laeD1RnTL1Rz549i5SUFISGhsJut0OtViMpKYkDYRS0owx7fX09srOzmWWU7u/06dPo7+/H+PHj4fV6sXPnTsTGxqK+vp4JG4qKitDR0YG0tLSA5/7hhx8iJCQEGRkZ+PnPf34e2kJqlEiZ0Yf6zA8h0kyOdB8hw4faa/X19SEzM5Pfu/322/n+xo4dixUrVgQcN5i0I1hIdwJARUUFsz4TRFXq6NDnyIEiPRofHw+z2czEeYR2oTUTHAAQBD+s7e9//zsUCgXeeustAOfXk3/buA/lgH8bvFAq69evx4IFCzjDQvd4/PhxeDyegHZhLpeLnV+dTsc9oMnAk2YkiRGb6kbHjh3LjitlGBUKBbdF6unpgclk4nGNiYnhwGxjYyOio6MRERHBZGonTpxgEraQkBBGM61atQputxvp6el4/vnn2YAOdkgvND5nz55l3TmYHhYEgbOsdGy3283164IgoK6ujjOlgL9OUKPRsANPARDaI0k/azQaJCUl4YEHHoBc7ifR0uv1+PTTT9mRpXNnZmZiwoQJuPrqq3mf+eSTTxASEoKamhq43W5cdtllOHXqFI4dO8ZMx5Th8Xq9sFqtCAsLw8svv8z6hxBira2t2Lt3L9LT0zFhwgQmrFq6dOmwIePIKAAAIABJREFU5ta/WwThXN2pNMtIvAOZmZlcJjJcoUDSd4HhSuX7BqCG4xCfOHGCA7adnZ0YN24crFYrsrOzcd9998Hn8yErKwsvvfQS82HQXCaRBswupN9ra2sZBRV8bYPpH2l/U9ILxO1C7Z5obya7XKFQcH9pgjHHxMQgM9PfQ12j0UCtViM0NBRz587FwMAAPv30U+acycrKQlNTE66//nq4XC7uSW4wGLBt2zaUl5dzuypKUBGJIQC+N9JbNAcoWBcXF4fbbruNr50QVydOnMCsWbOQk5MDj8eD5cuXj+RR09hdnDYzF0N+rA4qANx+++2DRiulaX6v14vU1FQsXboULpeLe/aFhYVhxYoVmDNnDvdVo1Q/1TfK5XLMnz8fBQUFvHisVis2bdoEpVKJo0ePIjo6mjcTr9ffhNlkMuHw4cMcdScyjcjISK5ho1YOtPHGxMTAaDRiYGCAyZJogpJxFBcXh6amJoSFhSEyMvK87Cnde7CMxEEN3kDp/JS1bWhoQF5eHl/3ggULkJSUBEEQOJL98ssvY9SoUYiLi8Pf/va3AEhfsNElk/lZ2ShaRUZyV1cXNmzYgD//+c+cWaPNmLKm69atY3iERqNBREQELrnkEuzcuRN2u50dHkEQ8OWXX/7HHFRqZfL0009ztox6pc2aNQu7d++GzWbDY489hri4ODQ3N+Pll18OCK7IZDJMmTIF1113HZYsWYKOjg4mS6K6D5PJhNbWVhiNxgD4X0hICMLCwqBWqxESEoIrr7wyIFsdbHgGryf632KxoLy8HCdPnoRc7ifouu+++zhoIjXypM4WOUqU3SSIe2xsLD//lpYWTJ06FXl5eQw99Pl8OHToECIjIxEdHY3w8HDONIqiCJVKxcp68eLFfK7FixcjMjISM2bMQF5eHiIiIrBhwwZ4PB6sWbMGTz75JMOElixZgq6uLnR1dcHn8yE7OxtqtRq//OUvUVhYCADIysrCLbfcgj/96U8wm828YYWGhmLRokV45pln4PF4OGopCH4Wz0cffRQymQyrVq1ikiuC3KWlpUGhUGDJkiVMhnDvvfdCoVDgzjvvhEqlwpdffgmr1cq1dIsXL4bH48HPf/5zZGVlwWQyQRAEFBcXQxT95FI9PT28kXm9XpSUlKC7u5vZZ6kujaDAFosFdXV1TLhF8yA1NZUZaqXQSqvVisbGRuTn5wfANCsqKpCQkIC+vj7s27cPs2bNglKpZCIQOrbH48HWrVshl8uRlpaGn/3sZ0Ma7PQ9qbEurW2VsntfLJHJZDh+/DgAoKOjA+PHj0dfXx/S0tJw//33w+fzITo6GsePH0dFRUXA9Uh1ndQBJoeMXlMoFNiyZQsEQWCnigwj6TFoTQH+zCNB4I1GIxwOBxs6lL0gNI5UF1B245VXXkFjYyO3OAgmdaKxJf1PThYFeukah3p2wwkirF+/Hvfddx9nf71eL6qqquDxeGA2m3lPoB6poihytrOvr4+zH4B/vgL+rCsZvlSvFRoaCp1Ox1kKhUKB3Nxc+Hw+NDY2MvlSaGgoYmNjAfizYQaDAZGRkVwX6/H4e7CSg1dYWAhB8DN92+12aDQarFixAm63G+PGjcM///lPVFZW8lgM5XRKRRRFWK1WZtSX1vqRvpYGK3w+H7RaLWpra+H1elkXNDQ08PuEQoqIiGBnmmwL2oeIaEetViMtLQ0PPPAAKioqsGbNGtYnvb29XI+nUqmQn5+PW265BZdffjmSk5PR3NyM7du34+qrr8brr7+Offv2Yd68eVAqlUhPT0dzczP27NkDl8uFtrY2LFiwAJmZmZg/fz4efvhhuFwuZGVlITw8HAkJCWhtbQXgr28PCQnBrl27oFarf5ROqiD4e9tLYamC4G/5k5+fz8zmwxVycikgMBInMzg5M1KRzrPhHkOhUPBcNxgMGDVqFKxWK1QqFZ544gnODrrdbnzwwQcBepLW8HAzw4IgYO/evVAoFHC5XIwWkdrhUqeexpCyrnK5n2GZmLipjA04F6CijDcFU0RRxIQJE6DVajEwMMC2VGhoKPdEveSSSyAIAv71r3+xvW6xWJCVlcUlA83NzTh8+DAOHDjAhIEDAwMBhGyk16g3rtSPcTqdmDNnDjZv3ox7772X2fzlcjk+//xz1NfXo6+vD6+++irKy8tHjDoYiYMqX7169YgOfjFkzZo1q38M1zGYvP/++wFQLenDveqqq3D//ffjyiuv5GxUZ2cn1q1bh9bWVhw7dowzmgBYqUhhUAqFAkVFRczYKYoi9u3bh/r6em5wTjU0FIGkiUbMmxTZ1Wq1TMZCr0k3RsrC0IJwOBwcJZey+dF3SkpKGE4slcGyYcFOvNQxGUyCPysI/r6LlEkGwFm5Q4cOobu7G4WFhdyPb+rUqXjmmWdw6NAhhmYGGzMEpZDL5cjOzkZPTw9nSH0+fzuKN998E8uWLUNpaSlMJhPT+O/fvx9yuRy/+tWvUFZWxgYUOTpqtRpWqxUul4ujyk1NTfhPzWOtVotly5YFBE0mTpyI9vZ2zJ8/H11dXbDb7aioqOCsKkXO6D7y8/NRVFSEuLg4VFZWwmQyobvbz32WkZEBm80Gh8MBm83GCpVQAkqlEmq1GiqViud3TEzMoM8/eP5YrVYcPXoU9fX1+Oqrr7ju4eabb0ZOTg4MBgNn4EiCn7fVag2IYFosFgwMDODaa69FVlYWMjMzYbfbUVdXh6qqKoZAOxwObmWi0+lQUlLCJDwAOEtoMplQXl6Oyy+/HD6fv61TXV0dioqKeN5ce+21OHjwIHbt2oVHH30U27Ztw80334x33nkH11xzDZOifP3118jIyMCtt97KDh8x9N58882or6/nMfb5fKipqcGiRYtw5ZVXYs+ePexEOJ1OHDhwANOmTcP06dNhs9nQ19fHxjjVS1dUVGDGjBkIDQ3FL37xC4Zse71edHR0wGg0ci89usf33nuPjYbQ0FBkZ2ejra0NJpMpoNwhOTkZcrkcjY2NrKOoFrGwsBAWiwVnz55lAqeenh6YzWaMHj2aocONjY1oamrimhqj0YgxY8YE1O+sX78eKSkpDCnv6elBe3s7UlNT0dHRwQE30jljxozByZMnYbPZEBERAZ1ON+i6oXlEa4b0pvS1iyHSuVtTUwPA3wM2Pz8fFosFGo2GAws+nw+5ubn461//el7kn3RnsJMqfY/+rq+v59pxqSMrvWfp90VRRHR0NARB4EwiBV2o9lQKC6X9g37feOONMJlMaGtrCzgfCQVhaD7T/krXK/39XeWrr77C5MmTuS6spqaGg7GU1aO9Kz4+Hmq1mt+nsSeEjUqlYhI4gr2p1WoOaNntdm71NW7cOF5fNK/kcjnGjh0Ln8/Pckswu4SEBGi1WpjNZnR0dLAxrFQq0d/fz0EzIm756quvMHPmTBgMBlx++eX47LPPBi01Gkqo5CAjI4P1INkmUrgi3TsFOmw2G6Kjoxl+SK3spHsBPWOaF9LsHI0z7eFlZWUYM2YMjhw5wm1spMEJQRCwYMEC3H777WhoaEBqairUajWmTp2K3bt3Y+vWrfD5fDhy5AgOHTqEbdu2Yf/+/SgoKEBZWRm3IlEqlWhpacGYMWPgcrnYeDcajYiIiEBtbS3+8pe/4PXXX8fChQvR1dWF6dOnf695dzFkzZo1XFuoUCigUqmgUCgQERHBUPGhRLr2pOsMGFmpjdSukwY1RiKkK0aatRVFkRMs1G5Mo9HAZrNh1KhR3KM9OzsbZrOZbRe6x5GejxI9FICi/UWqu4ODZFIkpcvlYqJJClwB5/hbaM1SMKegoICDfWazmcv2pO2uqqurOVBD7f80Gg1aW1thMBigVqu5bMlkMsFkMrHOlwZb6TqlAUnqRkB2xP/8z/8gJyeHHeyenh5cd9113BEiNDQUCQkJXB8+XPkm+dO5evXqV7/ts//NoA5DbrvtNn64K1euRGRkJGOwDQYDoqOj8cEHH2DatGk4cOAA6urqsGTJEvT19eHQoUOYNm0aDh48iEmTJuH48eOorKzE1KlTcfjwYXR1daGgoADjxo1DcXExt+vw+Xx49913ER4ejt7eXphMJja2BgYGsHv3bs4aUkSN6khdLhc7q+RsqtVq2Gw2doqJWImgS/39/Vz/ShGbwWoJh5s9/bZIFX1nMOVYV1cHp9MJmcxfU0ckNEqlEs8//zz+/Oc/o7a29jyDCvArDHIa5XI5Wltb8dxzz+HJJ5/EzJkzUVVVhWXLlmFgYAB79uwB4KfJ7+3txS9+8QusXbuW242Ior9Wq729nZ18UtAEC6Oswpw5czhr9p+Shx9+GMXFxbj00kuh0WjwwgsvcBaARCaTBTBRhoeHc08+wN/AeenSpTCbzVi1ahVny7RaLVpbW9l5JPIku93O5FvU4D0kJATR0dFISUkJICQBzvWYlT7zsrIyeDwe6HQ6xMbGMnxYqtCl4vF40Nvbi6ioKA5CNDc3Y/z48ZgwYQJEUcTBgwdhMBiYPZOcGyJ/IodNFEXs3LkTiYmJKC4u5nMcOXIEvb29PMfCw8PR39+P3Nxc3HnnnXA4HHjllVfQ3t4OlUrFLJfkhIaHh2Pp0qVoaWlhIpZ58+Zhy5YtyM7OhtFoxPTp0+HxeLBlyxacPHkSgN9oX758OV544QUmoKL59vDDD0OpVOLRRx8NgBRFRUVxVkWv1+PNN9+EzWZjp10mkyEtLQ1XXXUVCgsLIZPJcPjwYRw6dAh6vZ6Z/oixWSaToby8HFFRUdBqtRg7diyampq4rIDYCYmUiBwfeq4ulwulpaWwWCyYPn06PvzwQ0yePBkffPABkpOT2Tltbm7mujtyfIiE5fLLL+fNs7KyEh0dHUhNTYXFYkF2djZOnTqF48ePIyYmBqWlpWhvb0dmZmZA9tBut2P37t0oLCwMYCH/Nhksqk+G9g8tLpeLe/zRGjCZTJwdJ+cgMzMTTz/99Ihqfsig8vl8KC8vh8ViCeiPSEKZRalDTo5bXFwc+vv7A1oOhIWFITQ0lANT0rZMhDC4//77UVpayv0v6TxDOcLfBrn7rrJ+/XosXLgQbrcbVVVVvGdThpIyp8RqKQgCjEYjB23VajXrR9pvqUdiTk4ORFHE6dOnMTAwwD2UAX+9p9VqZbSURqNBWloabDYbamtrWWfEx8dzBj08PJy5FSgDSzD7hoYG2Gw2ZGRk8FolaF1ubu6wAqOD7bV6vZ77v5KTSgY0fTY0NBSdnZ2IiorieSGKIqKiohAaGor+/n4uh3G73az/aY5IAxlSeCoArnHr7u5Gf38/UlJScPr0aXi9Xjz22GOIjIzEpZdeirNnz7KNQ9cxc+ZMtLa2MlpI6vi43W4olUqe57SnAP79Y9KkSZg7dy4UCgU0Gg1iY2OhUqkYMbRv377/GDP/UCIIAlJTUwPGFfDPlYkTJyI2NnbIgNpgSKbhZNul3x/s75HISBzhocRut3OP0c7OThQUFHCt5OOPP46BgQHExsYiMjKSS1++q14RBAH79u3j1i9k/w2mv0goECYlIouKioLNZkNnZ2dAEJ0g8RqNBnK5HFdffTXMZjOcTicjt6jsRhAExMbGckBs7ty5ePXVVxn+PzAwgMzMTFRXV7PNd/r0abS2tqK1tZWftTToRg4qrf3BAp9utxsJCQlITEzErFmz4HA4GPGh1Wpx4sQJABjRWhlJBnXo3gz/FZZ33nkHvb29cLvdGBgYQGVlJdPU5+XlITQ0FMeOHcPRo0d5ERL0ThRF7N27Fz6fD7t37+ZoxkcffcSTvLq6Gtdffz1CQkI4YvrZZ5+xsUvKlTYwiuio1WrY7XaIoojIyMhBawloYyDWMGm0mhxhghGQkUjnkooU6vR9JFghDqawcnNzUVVVxdA2cqbtdjseeeSRgI1UGu31+fxF6OHh4bj22msZhuR2u7F+/Xps2bIFJSUleOutt3Drrbdi3LhxOHbsGOLj49HR0YHKykrulUf3Gh4ejsTERHR1dfFClhbIq9VqREZGYtu2bd97bL6vPPfcc/jtb3/L2XWDwYC1a9dCoVCgt7cXR44cQXl5OUPVaOzb2tqwYsUKJCcnc01EdHQ0ByyovQkAaDQaJvWKiIjgvmRSghRRFLkNS7CDKjVG5XJ/78ucnBy+HlLwUkOciDAI0jIwMICYmBh0dHTgkksuQXp6Onbu3ImioiJ0dnaivLwciYmJ/HmKLJMhWlxcHMBgeeWVV+LLL7/ktQkAkydPxscffxxQZ+zz+TizSpAbp9OJ3t5eZp2+8847sW/fPsyePRsNDQ0YPXo0ampqkJeXh1dffRULFy5ETU0N4uPj8dvf/havvPIKioqKGL7k8/lgNBqxePFiPPXUU+y4yGQyrF+/HsuXL8fMmTOxa9cuXu8mkwlPPfUU7rjjDuh0Olx22WX46quvYDabAZxz6Hfs2AGj0YgrrriCs+T5+fno7u5GSkoKs/VR5FkQBIwbNw42my0ADklrIzo6Gl1dXQHGj8/nY6hZbGwsysrKMGnSJCQmJuKKK65g50ilUmHixIk4cuQIenp6kJmZiaioKA587Nq1C1dddRXkcjm2b9+OW2+9FdXV1WhubmYDNDk5GR0dHRyplhoMgiAgOjqaSViCA1kXksGcpcHgvyQjOXbwMdva2vg5xsXF8XpKSkpipEx+fj527drFRsNwz0XrWyaToa+vjwMd0kwWZVMGM7YAcLaP5iEdT4oEkp6P/i8qKgowDun1wWpmv+v4DVc8Hg+3JSKEDu3lUiJCmUyG7u5u1g1UNuPz+ZiIhUo90tLSIJfLmd+BevsS8qG9vZ0DLz6fD6NGjUJ/fz/Onj3LQToiDjtz5gwiIiL4emUyf4uQ/Px8iKLIUHq1Wo3Ozk4kJSWhr68PJ0+eREFBAc6ePYtVq1Zh/fr1F4SjSzPWJImJiQgJCcGJEye4RpOuj/ZVIl+kdUW2gMViYf0aExPD+sbtdnNrCwryUk2u9NiCIHCrL6/XyyzkbrcbW7ZsQVxcHDweD+Li4uD1evHpp59ytnncuHH4zW9+gz/+8Y/o7u6G2+0OQIuRLSXNHEVEROC+++5Deno61Go12traYLVacerUKUbRqNVqqNVq1NfXX4yp+L1FpVIFBMhpP71QgEcaIBrs9W+T7+PkSYUco5GKVMdQcIxsYincmWqLKeM4WDB8pDJhwgRUVVWd55TSXJM6/RTUonVGpGGCIHAvV6k9QcF9l8sFlUqFgwcPYtSoUecF7ORyPxsvMeNTL2yqQW1qakJycjIqKio4uEa1q8GthaRswlK0gzQgReuWxq6vrw9dXV3o7OxEWloaxo8fD5fLxUhCaab6h5b/QnyHKbt374Yoili0aBFmzpyJt956C/PmzeNo/datW/Hzn/8cDocDs2bNYiO+sLAQfX19nKGk7CY5UldffTX6+vqgVqvR3t6OjIwMAEBBQQGKi4uxb98+xMbGwmazcQbB4XCgpqYmoDG2UqmEzWYLgNKQISeTyQLo+MnRcrvdCA8Ph9VqhUajQW9vLyv4a665JuD+SRkEy1BKbrhZ1aGOmZiYyNkYUjbELEubDTHZiaLIGPsHH3yQGxbPnDkTRqMRTqcTVVVVuO666ziD0N7ejsrKSnz++ecoLi5GfHw8Pv74Y4SHhzM8VKVScfSJmpX39/dj48aNKCsrCxgbjUaDurq6/xjElyQ2NhaXXnoptm3bBlEUUVZWhv379+PAgQNMjLFw4UI0Njbi5ptvxt69ewH4oW/19fV48803sXv3bjQ2NmL58uX44IMP2PFITU2F0WjkYn+1Ws1Kk7INFCGnwEhiYmKA8rNYLDAYDHA4HAEZB5rH5JhaLBY2jAFwln/MmDHIzMzE2bNnMW3aNBw+fBiVlZWIjY1Fc3MzjEYjUlNTkZbmb8VMZEXSaGFbWxtycnIC5l5OTg6++OILjBo1ij+bl5eHuro6AP6m9VFRUfB4PNi7dy+mT5+OoqIiHD58GKGhoQHwGkHwkzE1NDRg7Nix0Ol06O3thd1uR19fHwoKClBfX4/PP/8cp06dwk033YScnBzOUh49ehRZWVmYO3cudu3axayuguAn4rr11ltx1VVX4csvv+Sxc7lcOH36NEaPHo0xY8YgPz8fZ86cgSAIGD9+PEpKShAWFobTp0+jq6sLU6ZMwZw5c1BSUoKvv/4aOp0O8fHx8Pl8aGlpQW9vL2bPns31o9KaMplMhuLiYq4JlmbPqDerSqVCb28vMjMzkZmZyUG03t5ejBo1ClVVVThz5gyys7ORkZGBsLAwRjLQsSIjI/Hcc8/h1KlTaG9v5wyYxWJBTk4OEhISUFhYiPLycuTl5XFbDulGa7FYUFpaOiJGywsJXZv057sab9LMpNlshk6ng9PphFqtxkMPPcQGT3x8PN58803W7cMV6Zprb29nCCUZJlKoL32ezkERfpVKxU4XZVnJASCUijQ7Rse46aabIJPJUFNTc9740J45Erjdd5WvvvoKbrebgyzEXEljSxkOhUKB7u7uAKOf6tdNJhNDy5VKJRISEgD4WUOJ3ZZIlei7Ho+HW8+MHz8ePp8Pp06dYl2ZmZmJkJAQHD9+PKB0KDQ0FJmZmSgtLQ24j/j4ePT09PDelZKSgoMHDyImJob3SiJoGmy8g6GJUiGUFgXDpYFG6bHoOqXBCSobio6OZoZSIpyiGjcpnHGwLDoRMA0MDCA9PR1bt27lQDlBLInEkV7bvXs3cnJyUFxcjBMnTsDhcHCJAc1faXCeUGMHDhzA559/jk8++QRlZWU4cOAAqqqq0NDQgFOnTmHnzp3Yt28fBEHA/fff/0NPx+8la9asQVJSUoCDSiUo1MYo+NnTc5IGj4Yj3ydjKoXp03wfyn4cybXQ8VQqFQeayBkjFtrS0lJOxlBQeyQ6UzpeMpmMazqDmcOlNcA05tJAANlMNNeJK4HaGQLnHHZi1u7v7+egDADm1aBzEEO+SqWC0+nE9OnTcc0112DXrl0AwCzfWq02INHS2trK1yNFZNH/0rkhtcGkr8lkMjgcDtjtdlRWVuLo0aOoq6uDwWCAQqHArbfeOuwxHgnE9+LvEP8/kblz56KlpQUxMTH4+9//jgcffBCdnZ28+DZu3MjtH8rKytDc3Ay5XI6KigpWqh6PBxMnToTdbkdhYSGcTieOHDmCyZMno6ysDEVFRbzRuFwu7NixA3fffTc8Hg9aW1vR09MDn8+HgwcP8mZCBdwul4snsbQXHTkNAFhpkxGiUqngdruhVqsDokHft8fRUFEyqfKk8wWL9DWdTofRo0fD5/NxxJsyJQRL7Ovrw4oVK/Diiy9i8+bN8Hg80Gq1qKysxOrVq1FaWoqKigqo1Wq89dZbGDNmDP75z39CEARkZGQgJSUFmzZtwj/+8Q8A/gyJUqnkGoe77roLMTExzHIWExODU6dOYeXKlbj11lv5ei82kcpwpbS0FE6nE/fddx+uueYaOJ1OlJaWciTsqaeewttvv43f//732LRpExN4hIeHw+Vy4Y033sBzzz2H3/3ud7jnnntQWFjIdVUE6yLFarfbuTifWjkAYGVOhrHBYIDBYEB3dzdDt8n4EUURdrsdLS0tzD7tdDrhdDpx+PBh7NmzB+Xl5Rz5++KLL7B9+3Zu0aLValFaWoopU6YgLCwMl19+OQoKCqDT6XDppZciMzMTXV1d8Hq9DKWUyWTM1ksil/tp1E+cOIEjR47giy++wM6dOzlTT3AWinhu3LgRMpkMixcvhtvthk6ng8fjgdFohFqtxo4dO1BYWIhNmzZh586dyM/Px+jRo7F27VrU1tZi8uTJcDqd6Onpwbp166BQKHDXXXfxPHvjjTdgt9vxxBNP8NwiHfLss8/C5XJh/fr1AU5Sb28vXnvtNezduxcxMTF44IEHYDAYYLPZOBtnNptx5swZ/OEPfwiIBhN75sDAALq6ugAAXV1dqK+vZ0OenNRx48ahtbWV6fRpA42IiEBOTg7rt66uLmRmZrKRWVdXh6ysLGg0GsyZMwcLFy5kCDmd12az8Qb5l7/8BTfccAN6e3vx9ddfsxNEwT/q8exyudhop/EjKSkpCUA8XAwJNv4oA3chkclkqK2t5XtKTk7mcovLLruMW2ckJyfjtddeY4NipEIO1mDOhfRvun5pJjUiIoJrochwIbgm7THAuRZsdD4aA6mxIx2r4Y7RDyWUBSYdR04Y9YxUKBSMBCBdRvXzpJNo34yLi4MoitDr9ew4xcTEMESUMhfUfkmpVOLYsWM4fvw4Q/9zc3OhUChw6NAhriOUy+VQqVS4/vrrmTiNhAxKIkyKjo7mQMaOHTtQXl4Ok8mE0tJSTJs2jdcrjbc0GzKUTJw4EZdccgnzU9B3yVYg20AamKHnTkzfGRkZSE9P5/pIQoMQuRtwDk5O10T319/fD51Oh4ceegjR0dF499138cUXX+CTTz7B3r17sW3bNuh0Olx++eVQqVQcPDx27BiPT0REREAgks5Dr9H5KNhCrxHCoKuri52Aurq6gKz2j0XI4ZNms4HzW2nRz3c5fvCzHq5Is4PS7/8Q2Vepk0pByNDQUN6DFAoFbDYbX0Nvby/mzp07KAvvYMeWBnGAc2RxPp+Pg9m0lqS6Unqf0sAfzUkK4gAI4OggIe4aKp8holBKuEivj/wI4hP54osv0NraCoVCwRwWiYmJzD0hbVtFxwlee8H3Ib026d/0PQq6EQN6V1cXQ64vhvzXQR2B3HTTTQx/fPHFF/HCCy9wWxOlUgmj0YhPP/0URqMRgJ/NUkp00N/fj+rqaoiiyMZuT08Pf2ft2rUwm80M45gzZw4aGhpgMBhQUlLCG0J/fz+TIdFEcblcsNvtARPZ5/Nx1lGpVCIrK4trbmgxkrNBDJyCIOCqq646794H29yG+xpwfqZ0uNCLsLAw3phFUWQqeKVSCY1Gg8jISF6gtbW1mDVrFo4cOYKmpiaH4i+tAAAgAElEQVQMDAxg/fr1uO666/DOO+9ApVLhk08+gUqlwttvvw1BEDBz5kykpKQAAFauXIkpU6YgNDQUhYWFyMnJweTJk7Fq1So28rxeLzZv3ow//elPGD16NNasWfOjcU5J6Pm/8847sFqt2LlzJ373u98hPT0dDz30EKxWK9auXYvCwkL89a9/hSiKaGlpwfLly9HQ0IAlS5bA6/XijTfeQHZ2NgBwdpBIRMgIIPgZrQEiJMrOzuaegAAConImkwkNDQ1MCkZIgEOHDmH79u3cUiQtLQ0TJ07EmDFjAPizCFlZWRg3bhySkpIQGhoKmUyGlJQUiKKIkpISVFRUBCjcKVOmMNFSTk4OzyOlUom9e/diz5492LVrF3bu3Inu7m50dHTAYDBwLbgU7p2VlYWuri6EhYXBaDRiy5Yt8Hq9WLhwIYxGI2JjY+FwOBAbGwutVouDBw9i6tSpOHDgAF588UWMHTsWaWlpWLlyJdra2rBt2zYmX3nppZewc+dOrFu3DjqdDnK5HH/84x/x0ksvYdWqVYyAEEURDocDzzzzDFwuF/e9pY3RbDZj//792L9/P7RaLTZt2oSDBw/izJkz2LZtG0fcZTIZnn32WQiCgPXr1yM8PBwejwfbt29nUreOjo4Aw0epVCI3Nxc2m43RC4B/89JoNMzKO3PmTLz33nuYN28eO6enT59GZmYmkzOFhoZi06ZNmDRpEjNsUysnu92O48ePsx6sqakJMB4EQUBVVRXXvEVHR3NbGoI4SYlbmpqaziMG+aHXm9Q4pIyR9Cd4829paeG52NzcjLCwMGakJH1HkXUiGfouhl5bWxs7wlQ7HBwolBq6dD90H/RZKqeQtpUBwHMJOJdVoKDivn37hrzmiwnpDRa73c6BL8quaDQa7nVMRCKUgUtISOC9RlqbGxYWBp/P326BiKx0Oh20Wi1aWlrQ19cHi8WCsLAw6HQ6dlKpZt/r9fI6q6mp4bFTKpWIj4/HtddeOyiEXKojCwoKuJSFrnnfvn1QKpWwWq2YMWNGABRbOhcvtN/K5XIm2yHEhlwuZxg0OQE0T6RBKzLEu7u7ERYWxplnmitGoxE2m42fA40nBZ3pWktLS3HkyBG8+eab+PWvf42rr74aMpkM7e3taGtrQ1lZGV599VWcPHkSLS0t0Gg0yMvLw5w5c5CUlMTQSqollTradI/B90widfooAPPvnKPDFRrXsLCwgMQDQUBHIlKnj36PNNMqFWmmnJyh7yukn6Ti8/m4HVNsbGxAZvOTTz4BcK4kjQg3LySDBeik50xNTQ1wOKWfkcKWg9EoUmefAmC0NmhNkm6gfdLr9QaQt1HwWKlU8loE/KgYu92ODz74AGvWrEFHRwf0ej0MBgN8Ph/b/BkZGQHlerQX0bMhHUH7JQV0CAUi/Z50T6A91mQyob29fbiPc8TyX5Kk7yB33nkn7rvvPmzYsAF/+9vfOMPU0dGB+Ph43tRCQkLQ1taG9PR0tLe349ChQ5g6dSoSExPR29uL+Ph4WCwWJj4qLy/HmDFjEBsby5E9wnhv3ryZHSsiDHA6nQDAE5A2FYpqEuSDMqxUX0ZKnDZNosIG/E7AjBkzzrvn4cBzh4pUDab0hoJ8SF+j71AW4NSpU7ypG41GpthWq9V45JFHEBYWhkceeQSbN2/GQw89hBMnTiAyMpJ7WhoMBtx7771MWmMwGDB69GgOGiQnJ2PhwoUwmUxYvnw57r33XphMJpw4cQK//e1v8dxzz8FsNgdAVh555BEIgoAdO3bg6aef/tFsaj/96U+h1+uxbNkyTJ8+na+LNvuCggK88cYbePjhhzF//nxWgps3b8aECROQnp6OhIQEyOVyNDc3w2KxQKvVIjExET09PdzbS6fTYdSoUTxfSYmRE0tzsaenBwkJCfB6vdxsntoZpaWl4aOPPkJ+fj4iIiLQ2trKxf/19fVwOBzQarVITk6G1WpFTEwMk5xYLBZMnTqVDeja2lpkfgOfkyIXDhw4AJvNxrUgJGSw0sZjNpsRFxeHjo6OgM8SrFEURYSFhTGs/NFHH8X69ethsViYMt5kMuHmm2/G4cOHUVBQgP+Pve8Oj6pM27/PmZZJJpPMJJPeCOkEEkJvhiKIEKRY0NXF1bXsWtfv213dXRtWUNHd9WNZu3w2WFFXFFBAukBoISGkF0idlEkmU1Om/P6YfR5OxoCBxf38XZfPdXEByZT3nPOWp9z3/URFRfG9nzdvHubOnQulUonrr78et9xyC6qrq+HxeFBUVITc3FwkJyejpqYGH330EURRRExMDCtPP/vss3yYkBCTKPp6pgqCMKgNTVZWFm677TbI5XLcddddiI2NxcKFC7Fv3z6+FovFgl/+8pfQ6XRYvHgx3xdBEDBixAgA4OpOUlISnE4nzp49OyhrrNFoWIV87ty5eP/997l/mtfrRXFxMVfyaewVFRW87+h0OkRFRXHyqL+/H2VlZVi8eDFOnjzJjcGJl2kwGBAdHY2ysjJMmjQJx44dQ2lpKcPbW1tbERcXN2gthoaGIjIykp2K/6SR4yJ12MrKyhgO2dXVhREjRsBisWDUqFFYvnw5AB8Xv7GxER9++CEnSqii5R9onu97KyoqmKcr5Z/S7+kZSQNRCqQ0Gg3Onj3LCSiC9avVaubi+8N7FQoF/vznP0Or1eLNN9+8ZGf3ctmqVasQGRkJURRZwCcoKAiBgYGc0KVeolqtls9K4jTStdIZS8mr0NBQREREQKFQMI2EtCGSk5PhdrtRXV0Nu93OQkLEzRw3bhyqqqqYt0pCQP52vvNUJpOhpKSERYMoyKO+yXFxcThz5gw++eSTYZ9H0jPaZDKhvLyck2LAOUd2KJgwrWmaO0TrqK+v58Da5XJBq9UiMDCQ4Y5UyabzNCcnBxqNBqGhoQgMDERycjIcDgdSUlJgs9nw+eefc9/yzMxMfPvtt2hoaGC6z5YtWzgxL53PdM+GSrrQ72js0gCDghJq5fd/bYLgo2tQL1fg3HOZPHkybDbbkF0XzvdZQ/0buLg+w8C5eUr72cWIL/nbcN9nNpu5lZzb7YZGo+Ge5U899RQcDgfUajWio6OxZs2aQVVD6d4pRQhcyKqqqlg0kRIhdM1k0vsm5XGSjxEZGQmv14vGxkb09/cPqvKHhIRArVYzwoKCVKIVqFQqptPQOqLvmzhxIqZMmYKUlBTunxobGwuNRgO1Wg2j0YidO3cOeX9pPUgTC9LKrTRBIKVlSJOydC+lrR4vZP/6vJ/6oP5Q9utf/5qzf/fddx+S/tVYXS6XcyN1KczBP8tCr5VukC+88ALD7FpbW7Fx40Y8/PDDcLvd+OKLL3hSv/vuuxyYktFn0uZM/FLKtlDFMSQkBF1dXcyhoYnX09PDjveyZcuGPNT8D0v/iU6k8OHyTKWvO98moVAoYDabUVVVBYfDgeDgYPT09HAVj/qlOp1O7ttaU1MDlUqFt99+GydOnMCzzz7LWXESlnrggQdw8OBBtLW1IT09HZmZmXj22WdZqe33v/89VCoVTpw4wQHxhg0b8PTTT8PpdOKjjz5CY2MjL2ydToc77rgDs2fP/tEEqPfeey+OHz+OK664Ao899hhWrFgBg8HAAQ5tLmazGW+99Ra6urrQ2NiIrKws5jvSvKLDgKpc0vksDUgBcJN6cgIdDgdUKhW0Wi0MBgP6+/tx6NAhaLVahISEQBB80GNpc/vS0lLIZDJMmDABWq2WOSDE9SIONTncvb29mDhxIgICAtDa2oqamhqG/lAmUKfT8TwXBIGhNfHx8WhoaOB2GvR7tVqN8PBwVoJsbm5mTgjgSwrRwUPOLAWNBGm77bbb0NDQgLKyMixatAh2ux0KhQLp6elYunQpIiMjsXDhQhQUFKC5uZnXryAIiI+PR3FxMTZv3gxB8LXA+OUvfwmlUonHHnuMs5uBgYG45557EBQUhMLCQpw+fRo7d+6Ey+VCdHQ0YmJiuKcfOd82mw0ffPABVzH84c6Ab2+YNWsWz/OsrCyGYkudF1EUkZGRAZvNhvnz52P37t2IiYlBQkICBEFAcXEx7HY7JxFIkMXr9bLgTHJyMj7//HMsXboUW7ZsQXFxMZYuXYqysjKUlZVBEASMHTsWJ0+eRFJSEnJycnDmzBmcPHkSEydOZFETqgAFBwfDaDRyA3t6ptHR0QgJCbksmf1LNY/H12rHaDRyUBcaGgqLxYLAwECulpNw28aNGwedHf7OI/1Myn+SOl87duzgIIy+j+awdN3Se6RVxL6+PrS1tbEzRTBKEtIh/pSUj6VUKrFmzRqEhITgtddeuySY4eW0VatWccJXLpdDrVZzcpYqhaRCToEqtTyhQIAoL6RNQMq+oigyrJCoNQkJCRgYGEBTUxMjqEi9l9T4AZ/YjUajQXZ2NvPl/W2o6hGZTCbDqVOnIIoi8z/lcjn+9Kc/wev1Ij4+Hi+//PKw5vpQCWSPx4NDhw5x0EHzjv6W8uukAR8FndSvs6qqip31/v5+BAcHIzAwEGq1mhVK3W5fb1USSaNWG+Rgy+Vy2O12LF26FH/961/hdDoRExMDk8mE06dPY+TIkdzqq7CwkMdD30tVI+naob1bmqyiNURIA1EU2a8glfX/SxMEXz9qKfcb8I07Ly8PDQ0NSEpKGvZnSZ/jpZpUdOvfDUwvJjAWBIH7mBM6hOYi+W8ejwfx8fHYtWsXnznDSeydb3wHDhzgsz0oKIh/R3x2OpOlAR0ApuO53W7ExMTA7XbjzJkzXOmkzw8ODub2T7GxsbDb7dBoNDwPrVYrUz9IAZh6zxcUFCA3N5cpMnQm0NgooSxNUEoTndKqsEwm48SSNJnpXz2WmkKhgM1mG/azwzAD1J8gvpdg69atw4IFC+ByufDGG29ws2eCWtBicbvdmD17NgwGA2f+XS4XYmJiYDAY8Pzzz+P555/H008/DZPJhA8++AAvvPAC3nrrLeTl5cFsNsPr9WLZsmVYvnw5iwMR14U2bwo2aMJSlliqbtbb28tKxJ2dnQy7ISgBOUrSxUsOjb+TMdQmcr7gdKjXDsU/8q+c1tTUoLCwEJWVlfB6vQzZIV4UAMTGxqK0tBRqtRr9/f1oamqCXq9nZcS4uDhs374dSqUSTU1NTPJubm7GlClTYLVa8dlnn+Evf/kLHn74YcyfPx8qlQoHDx7EJ598gjFjxjAk49prr8V7772Hd955B7feeisee+wxTJ06lXmHzz333DBnz3/G1q5di08++QQbN27Egw8+iJUrV+LAgQN48cUXuRflL37xC/T19eGBBx7AM888g/Xr1+Ohhx5CfHw8EhISeF4Qh9Hj8aC1tRWCILBDZLPZ0NTUBLvdznC51tZW6PV61NXVwWQy4dSpUzh9+jQsFgv27NkDg8EAh8OByMhIhIaGQqlU4uqrr8b111+PzMxMdohIOZngZSRERdUQwCeCJIoijh8/jn/84x/YuXMn6uvrodVqmatosVhQWVmJ0NBQ5tSpVCqe7wkJCQgLC2N5+oyMDOTl5UGpVHLbGlrPZGazmQ9BcrY8Hg+LIMhkMrzzzjtITk6G3W7HO++8A0EQUFZWBq/Xi82bN6O9vR3vvPMO6uvrERMTwyJcAwMDaGxsRG5uLpYvX84Kly+88AJEUcSjjz7KB7rFYsG6devgdrsxadIkXHvttQB865F6mhGU1+FwQBB8KoA333wzFArFkMEpWWBgIBQKBbKysmCxWNDQ0DBonUqD06uuugrvvvsuoqOjvxOcTp8+HYDP8aurq2PuvEqlQlpaGo4fP45JkyaxAMzixYvhdDqRk5OD+fPnw+v1orq6Gm63G9nZ2byHhYWFMQeVKv7d3d08t+iQBXx7c1NTE4vG+dv3ITwuFzxYoVCgtbWVxxQUFASn0wmZzNdmgJw8tVqNoqKiQeJO0oy2tPrpH7CSFRYWslPiX3UZ6rr8HTf6DqkjS99FDjI5/1KHl9bexfJM/Z/B993z4Qa/pM9Az57axFD1or+/H06nk9ceVdLofDWbzczrosoFCRJR5VQulyMiIgIDAwNobW2F3W5HYGAgYmJi0NfXx5SG8PBw6HQ66PV6zJ8//7zBKVUVz2dut5s7CVRXV7Pa8PPPPw+ZzNde7aGHHoJWq73gfTzf70RRRH5+Pj9j2tOkqCaae/T8RVFkZAvx5FJTU5lnS0KOJGhEbbSIg93d3Q2lUsmt8trb27n3slKpxObNm/Hkk08yUiYgIABxcXGora2FWq1GamoqJk6cyOuD/CIpkmZgYIDPFIJXUxcDSrYQCoUUWEm48sdgUvi21OhcGY5djn2NElRDBfjDNQqapFW54b4HAJKTk/lZ09q2Wq344osv+Lk3NzcjPz+f33+pBQSPx8OK1/7KuENxgoHB/WbpfpEQG0G0pZowDocDvb29aGho4M8gVXfyWQjJYbfb2Z9ta2vD6tWruXNFeHj4d5JIdH+ldBOlUslJfqmuACWlKAFCSDhp1ZS4tPQ6WveX234KUC/Rrr32WoapREdHAwAHUiR2IZPJsHv3bnR1dUEmk8FkMmHatGn49a9/jfvvv58P9e3bt2PdunWor6/H4sWL8dxzz2H27NkIDg5mfHpbWxsaGhpYhIRgxYGBgfwnKCiIA1jp4UEBq0zm63tJlRMiZ995550QhOFzT89nQy3+4fxMusnJZDIUFRUx/EqaJSScfl9fH6KjoyGTyZCYmIjTp09DJvM1TadqMR1QLS0tWL9+PTQaDUwmE8LDw9HZ2Ylt27bhF7/4BURRZFGEqVOnIi8vD3q9HllZWfj73/+OqKgoPlzz8vIQGRmJbdu2YWBgAHl5eUhKSmJH58dmBOs+efIkQkJCMH36dOzZswdtbW14+OGHsWTJEqxZswarVq3CwMAATCYTVq9ejXnz5mHFihX47W9/y5V7pVLJDgUlYogzV1VVhfLychw5cgQ2mw0KhYK51FOmTOGseUdHB66++mpoNBpMnjyZpf0p8Ovr68ORI0c4oDlz5gy++eYbxMXFobe3FzU1Nejr6+ODx2q1QqPRoKSkBC6XC0ajkcUFFAoFTCYTt2rSarWwWq2IioqCwWBAXFwcIiMjkZiYyHD5OXPm4KqrrkJOTg66urqYbzpz5kw+hKRy8cQjp2CdjCAyAFBSUoLly5fD6/Xi/fffR05ODl588UXIZDKefx9++CEOHz7MXFqNRoP169dDJpPxPCeY3AsvvAAAeOihh3jOmUwmvPzyyygpKeFWF6IocuXI7XbjtddeGwQzXb58Of75z39ecP54vV5ERERwmxrpYUfIAWpNs3fvXkyaNIk5y0QfyMrK4u8kdeDIyEgMDAwgMzMTpaWlCA8PZ0czPj4efX19MJvNWLFiBSZMmMCJkJiYGFZHDQgIYK4gVfx0Oh10Oh2am5sZOuufLSZJfKlz5J8ZlgZi9O/LBQumyj4d+uQgqFQqjB/vSygT3L66uvq8+wqNixwOaZAgFeeQZsOHer+0Kkv/pr6fpEhNr5f2XqR7RX9LIWKXeq+kcFL/Kj1dm/T7h6rYSMdLNjAwwOJZxPMirjLBTaklCcGAKfnkdDo50Uv7PN1bWvPkcDqdTrS0tHAiiOCW5eXl3DOW+nnPmzdvyIBcCkW90H0SRR/33uPxtWI7ffo0AgICYLFYWNWzubkZDz300He4mFK7kBaEy+XCiBEjBu0bND5KSPvPAfJpKFgQBIGreuTM2u12bidFHDvi+Pb09LDyr0KhYHqIyWSC2+3GunXr8NJLL8FkMqGzs5P78HZ1daG3txehoaGDep4Ssowgk9J92uv1csBK5wah2Gie0LP4IVtpXIyNGzcO4eHhiImJGQTPl953f5Mmlfx/djFG81N6Dy/lc2isl+IzSdEhFOSRBgmt06qqKt47aY1TQvtiTZoo0ul0PC/JD6HrkIrz+V+XFLlCcz02NnZQ/2gAg/ZpKd+bBA6lAS8lUHp6ergfvdPpRF5eHlJTUzF37lzMmjWLKUZEPaTxSVGX5C9TEofuL10rJTeVSiU0Gg0nrqUIhx8KlfRTm5l/wxYvXoy1a9di7ty5nMHMz89HbGwsrFYrbr31Vp68Dz30EObNm4e0tDSo1Wr8z//8D3bv3o2DBw+ip6cHP//5zzF//nxERkYOggZQy4G7774bsbGxMBqN6Onp4cbYNNHIGbVardDpdKy2Sk4QZUcInknOxsDAAI4fPw69Xs/9KMmGghgNBQcCLqzKKH39UNVYQfDh3svKymA0Ggc5GXQvWlpaeCHZbDaMGjUKQUFBMJlMLFIREhICmczX37WkpAQzZszghXz27FmGVFEm+9NPP8Xdd9+Njz76CHV1dejo6MCyZcsQHh4Oo9GI5ORkfPDBB7jmmmug0+nQ0dGBsLAwrFmzBtXV1SgoKMCYMWPQ1NSErq4u1NfX48c0j3U6HbZt2waZTIbt27ejo6MDjz/+OLZs2YK77roLu3btYljhr371KxiNRvzxj3/E6tWrkZubi4CAAGzevJkrEJSAkcvlKC4uhslkQlNTEyZOnAibzYa0tDRuuTIwMIDw8HDU1dVBJvP1okxLSwMA6PV6nDx5EjExMRBFcVC1PzQ0FDKZjDPCoihi5MiRKCwshF6vZ+GSmJgY1NXVsRNNYmEBAQEAgLNnzyI3Nxdnz56Fy+VCeno6urq6YDAYcOWVVzLsbtSoURgxYgSqq6tZiRLwiXNlZ2dj8+bNyMjIQFZWFmpraxEeHs69YbVaLd8T6plIvYUBX8WwtrYWZ8+exa233ori4mIUFRVh8eLFWLduHe68807Y7Xbs27cPbW1tGDt2LJKSkvDZZ5/h5ptvxksvvYSrr74akydPRn19PSuKHjt2DDNnzkRiYiJOnjyJ6upqbofkcDjwyCOPIDc3F6WlpbzOb7zxRqxbtw6TJk0axDU9n61cuRJxcXEICgpCQ0PDoAPe6/UyB89gMKCzsxORkZHszDocDpw+fRopKSnQ6/UcjH355ZfIzc1Fc3MzRFFEYmIitm/fjvz8fHg8Hhw4cACxsbHc4/TBBx/E4sWLIQgCIiIiMHHiRLS2tqK1tZW5j8SlDQgIQGdnJwIDA5Gbm4uysjIEBwcP6r8oCD5oN1EcyKRO9lB7G/1c6pBcimMGgJ8TOfQqlYqDHaIIREVFoaqqilscnc+RO99YAbDCIn2X9LX+QYk0IKIxUUKVEDaAj4dN1TD6Iw0eZTIZrrvuOqSmpmLLli3fUVQmG+pckVZRpAGu9NkMdS3S5yH9XNo79u/fD51OxwGny+ViVU06X0ngjYIucmypgkbCPlLBNwpmaV7ROUyOsk6ng8vlQm1tLdNE6OdXXXXVeYN4KX3ifCa9xsjISG5xZbPZEBoaipaWFthsNqSmpsJsNuOKK65AYWHhoACSnvWF5jHBbFtbWzkopefk/+ylCQ+pQ068SGqVR0bIB8CHKiA9Auk6I/QACTuSguixY8dw7733wu124/XXX0dDQwNqa2tRVFSEsrIyDir85wRVh6QwZamzTn9UKhUiIiIwYsQIpKSk4G9/+xtDKv8vbeXKlRg7diyflzqdDuHh4QgJCeHrDAwM/M5zlaIbLhVyfzlUt6VIi/PttUMZ7ZV0jpBJ96iWlhZGzRB0dtSoUfxah8PB8Prhfqd0L6LiTlNTExdLpGORVlWla0GaKJHyVtVq9SA0IL2Gqpnh4eF8xhE1RppEpLlL43A6neju7mbBwfDwcC5CUX974s/7B9JSzjZdsxT+S4E1fR8F57QuY2NjERsbizvuuGNY9/anNjP/QYuJiUF/fz8r96pUKmzYsAG1tbWoqqrCsmXL8OCDD0KtVuPQoUN4+eWX8eyzz8JoNOKRRx7Bf/3Xf+H+++9HVFTUIMz4gQMHcOONN+Lmm2/mLPvAwADuvvtuTJ8+HUlJSd+ZsFTJGTVqFDo6OuByuZCTk4O0tDRuv0KqoVR11ev10Ov1WLhw4aDrkgaHUhtqU5FWVi5k0sOCNp2ioiKUlpaivLx8kDMEgANx4ph2dnZi9uzZKCgoQE1NDUpLS9lhio6OZl4wqTMS/1QQBNxzzz2IiorC0aNHsX37dnz++efo7u7GmjVrWEgmMzMTn376KV5++WXk5OTA4/Fg/vz5eOCBB3Dy5EmkpqYiJiYGTqcTVVVVeOGFF/C3v/0NN954I5544omLmjf/KTMajbjrrruwZs0aPPPMM3jxxRdRV1eH0aNHo6KiAjabDY8++igSEhLgcDiwdetWfP311zCZTPjNb36DhoYGbnFEB0RTUxOmTp3KfX1Pnz7NFVNqw0CtB8gJ6enpwbFjxwCAYSiUrQsJCUFVVRV27dqF6upqbN26FRkZGVxt3bNnDxYsWID6+noEBwczIoEEN0RRhNFoxLhx4xgS+t///d/Q6/XIzMzEb37zG+Tn5+PnP/85mpub8dVXXwEAw7dFUcSSJUtgNpt5XoaGhmLfvn2IiIjAp59+CkEQcM0116Cjo4MTIeTwCMI5mXtqTeDx+Bpxa7VadHZ24o033sAtt9wCURTx7bffYvLkyXj22WexYsUK/OEPf0BaWhpqa2tht9uxePFiPPjggzAajXj88cehVCpxyy23YMGCBRgYGEBXVxdef/11JCUl4b777mOHes+ePdi2bRuOHDmC1NRU5qNRUmfv3r1YsmTJsOcOZUZ7e3sBnFv7er0eBoMBTqcTmZmZqKurQ0pKCjuEVVVV6O/vh16vB+A73Dds2IC5c+dywmDixIl4//33ceONN8LtdqOvrw/x8fHQ6XSQyWSoq6vjdUwcY6p8hYaGIiAggJ+9RqOBIPgoCh0dHQgMDMTs2bOZc0zPiQ7iysrK7wRJF+swna9acSGjOUG9eIOCgljh/aabbmLRDK1WO0jg5lKCYQoGiBPoH0hKqy50j2iMVLmi/5ODRIGrtJIirZzROpIGBkPdI6mDJa0KkEmhmee7dqnzNJRDLqbz7GkAACAASURBVP1MaqNGkDap8jjtTxRYklCSFCJK10IVBuKb0vOipGhHRwcCAgJY6It46xTcxsTEYNasWUMGp3Q9w4E10/vp/tDao0qSy+XCnj178Pnnn8Pj8aCnpwcPP/zwd6rfw0l8iKJPCV2n03HliL6fFEiHQh/I5XKeL9TmKyEhAcC5qhTBfIlCUlNTA7PZjL6+Pu5UQJ9nMpn4vouiiPfffx8JCQmYMGECc3vJxzEYDAgLCxuU6PcPQqlaSsirMWPGYMyYMcjKysLkyZORkZGBxMREvPPOOwgJCeF998dgtOakwSbxek0mE9rb29HR0cHjlaIsLvSZ7e3t6OnpGXJ+XqjSPly7mIqp9NrIv/T3Mz0eX396r9eLpKQkFkkaGBhAbW0tIwntdjuuvPLKYe3x0oQLjUMaiFILKJlMxnQeCiKln+H/N30GFUzofVLqBXCulRwl3umPtE0T3RPa06R+M2kEUPAYFRUFlUoFg8GArKwspKSkQKFQcPJWKipGCTz6Dim6gLi3tP6pXReJv917773DfrYXYz9VUP9NW7p0KZ544gmkpaUxBGDWrFmYO3cuYmNjUVZWhg0bNuDQoUMoKyvDVVddhaVLl+LKK68EMBi+1NnZidtuuw3ffPMN8ym1Wi0EQYDRaMTixYtRXV2NoqIihi8Cvmw58SaoVQMdlnq9HpWVlbwYaDKGh4eju7sb/f39CAgIQGpq6qDrGiq7er7s24U2Pul7pE5RcXEx2traBi1OOsh7e3sZxmexWLB48WJuXXD69GmUlJTAarUiJSUFo0ePxogRI3DmzBkA5w4jau/z7bffIjw8HOHh4Rg9ejSioqLQ1NSEnp4ehk9bLBaMGzcOW7ZswZQpUzBy5Ei8/vrruPHGG5kv9Nxzz8FqtWLq1Km4/fbb8cYbb+Cee+5BQ0MDtm7dipkzZ+Kdd975UVVQAeD222/Hiy++iPT0dKSnp+Oqq65i8aLi4mL09fWhtbUVr776KmbMmIGenh4Igk+AgJraU1sFqpYRb9RisbDDlpOTw8+OghpRFPngAHxKqmFhYRAEgfuYElSprq6ON77g4GCMHj2a+avPPPMMPB4Pkv4FFQsODobdbkd0dDQ6Ojpgs9mg1WqhUCjQ3d2N8ePHw+PxICEhAV1dXSguLub5o9VqkZycjJ07dyIvLw+ff/45srOz4Xb7Gn7v27cP6enpEAQBSUlJOHr0KJYuXYqNGzdi1KhRUKvVmDZtGjIyMlBSUsKOFlWeSMyLHDdqg2K1WtHZ2Qmr1QqLxQKTyYTExERs2bIFy5YtQ1paGvekjYqKQn5+Pnbu3AmTyYSysjLMnj0bMTExaGxsRGdnJw4ePIjOzk7MmTMHqampOHbsGMLCwpjH2d7ejrFjxyI3Nxd79uzBu+++e1HzZuXKlRg9ejT6+/tZJZMOsZSUFDidTsyfPx9vv/02br31Vrjdbm4P43Q6kZ+fz2JcH3/8MWbOnIna2lqkp6cjLS0NGzduxIIFC1BWVobOzk60tLTA7XajqKgIVVVVaGpqgkwm4+el1Wrx5ZdfcrCh0Wjg8fh6Oet0Om5hoVar0dHRgbi4OO4J19HRMainocfjYbG4S4GZ+Tvw53P2pQ6RKIooKiqCIPgEi9LS0nifMxgMWLhw4SBI1ZEjRwAMFj26mPER/1Q6VmkgIa1IUoKDKtFqtZrbltHvpL2ypfwkKRRNFEUUFBRALpdj7969F7wv9PrhmPTckDrKw3E49+/fz/0xqW2RIAhckejt7eW5rVAoEB0dDZPJxOcStfSgZNrAwADvn4APJUGOKgmaEBef7lFwcDCioqIwYcKE845XWvUbyi50zSRe1d3dzWuD/IkrrrgCgA+uvWDBAhw+fPh759NQSQODwYDAwEAWiqJr868c0d/S8VJipq+vjykVVJ2m5BRVmBwOB6v30udQoooE3qiCU1lZCY/HA4PBAI1Gg4kTJyIqKoqFY6jqGR8fD5VKhdjYWOj1egQHByM0NBRJSUnQ6XSIj49HUFDQIKXvkJAQhIaGMgqLEiH/bo/4f8dWrlyJ8PBwOBwObtcliuKgCpj/mrfZbLBarejp6WHBQuL/S19z9uxZdHR0oK2tDTU1NaioqEBgYCD7tBebjPO3i0kAklGV/kLv12g06OzshNfrawtFHE2n04nc3FxW3NdqtThy5AgXe4Yy6fyl7/Q3lUrFFBIpL/P7kon+exg9Q61WO6ifuFTVNyIiggtFgjC4TRjty5RoczgcsFgsCA8PZzFQGhutUUEQoNfrERUVxQgoEg5UKBQcC1DQGxAQwLxbaSKe2uiFh4cjLCwMWVlZAIDZs2cP67leTAX1JxXfy2RVVVWcERdFkftLEtdq0aJFXIGiyUWT/K677oLZbEZ4eDjzVqRZekEQsHLlSmg0Gvz973/H7t27YTAYcOzYMTQ1NQEATyKXy4XQ0FDY7XZkZ2ejsrISvb29LKjQ29uLkJAQDhoCAgKwYMGC71yPfzBKG6H/RiVVcfM3/02lr68PlZWVfOBL8fRyuRzV1dUsxS2TyTBjxgwIgoB3330XwcHBrLTqdruxcOFCVqYEfBtAZ2cnTp48CY/H16tuxIgR3A9q0qRJLGxVW1uLjz/+GB6PB+3t7fB6vViwYAH6+vpQX1+P0aNHw2Aw4OOPP8b999+PkJAQPPjggzhz5gzcbjfeeOMNGAwGlJSUIDw8HB6PB4WFhVi7du0lObw/tE2bNg2zZs3Cvffeyy0s+vv7MX36dM78kXro119/jVtuuQVz586F0WjEyJEjkZGRgYGBAVRUVMDhcCA0NJRhKjU1Neju7kZAQACCg4MZJn7ixAkEBgYiJCQEwLnEgVqtxsSJEwEAdXV16OnpgdVq5aqlzWZDS0sLenp6uMpLWfPMzEyGoxN3TKPRoKmpiR3ryMhI5OTkoLi4GOPHj2choMTERK40EBc0KioKERER+OSTT3DTTTfB4/Hgs88+A+DjmNP6I8GshoYGzJ49G5s3b+aeyBs2bBgE+SOON2Vu6TDU6XQwm81ITEzk+UktkBISEvCnP/0J9fX1sFgsqK6uxrXXXgtRFHHnnXdiypQpaGxsxJ/+9CcMDAzgmmuuYdXAadOm4Xe/+x2amprw/PPPc0ZWoVBg3LhxeO211y5pzgiCgGXLlrESaXl5OcLCwhATEwObzYYFCxbgjTfewIoVK+B2u+FwOFBWVobe3l7M/FcvRgrGduzYgfj4eFitVuTn56OzsxM7duzg6lViYiJEUURlZSVXO6U8vvT0dBw9epQdZ2pdlJaWhuDgYFitVoSEhECpVKKjowNmsxnR0dEICgpCa2srdDodt/UCzvX0FEURo0aNGjIAuth1TE4D7ZP+gSFBtGUyGVpaWpCdnQ2bzQa3240ZM2Zg/vz5AICRI0fi6aefHrSvXexYTCYTSktLv+PMkFFVlKpxtDapz55Wq2WVSYKsh4SEcH9JEtSQOpBerxexsbF46qmn0NjYiG+++WYQlIzu7aVUnYFLF3N57rnnuLen1+tFSkoKmpqaeI4B5yqBgYGBEASfONLAwAA0Gg2rxdJ9IKoMnV/kaJKT2tHRwfsViYksWrSIYcJD2XBgl8Nx8J1OJ7egk1ZmHn30UQiCTwl8+/btLDzob/5tiIayoqIiFoaSal0Qd9f/cwkWSHNQo9FAo9GgubmZIbvkF0k5buPHj4fdbocoitBqtZz8I4oHcXo7OztZeE2r1SIsLAxWq5X5feTAA+BqMvkslJChMUv5dhqNBg8//DAnHvv6+qBWq6FUKpmj/Z82QRBw4403sqgacQ+ptU9AQMAgGLy0bQ6ZdC16PB4OuOvq6uByuTgxQIERJRASExOhVCqRnJzMNJrhjpn2oeEEqNKq4XDt9OnT/G/yLc1mMzQaDZ544glOHoWHh+PVV18dck+8kB/rbzKZDAcOHADgEysKCgoaxNH2D4CllXzaeymxIgi+LgadnZ0s9ORyuaBWq7F06VLmadO+BIATahR4Ehe1qakJYWFhGBgYYH0I+j4AjBrweDzcsaKtrY3HTZSFrq4u5oE7nU7ExcWhpaWFW9iQfx4YGMgINUr8DMf+9Wx/ajPzn7YPP/wQs2fPhs1mw+7du3H99dfzZkJGmwI5xORU+1+/IAjIzMzEr371K4auVlRU4NixYzh48CCLaPT19aGlpYWdI4LBeL0+gROCuRE8hypY5CxNnjyZRZ7ILqZSer7X0iYjk/l6tlG2j8ZCRpA8ChYzMzORnp6O48ePw+12o7GxkbOco0aNgiiKaG9vR0lJCcM5pd9ps9nw7bffAgAH47RRe71ePP/88+jr64PL5cJf//pXWK1WhnbecMMN2LVrFyZOnIiTJ09i3rx5OHjwIJKTkzF9+nRs3LgRmzZtgiAIyM7OxjPPPAOHw4G2tja0trbi97///Y8yQAWAsWPHYvz48fjiiy+watUqjBs3DmlpacjJyUFBQQG39SgoKMDvfvc7GAwGVFZWoqmpCcXFxZg0aRK6uroQGxsLg8GAjo4O5mMShCs2NhYRERGor6+HXC5HT08PtFotVyVcLhfi4uKQkZEBwDefysrKGFrk9Xoxfvx4HDt2DDfccANKSkrQ29uL0tJSdmBiY2PR1dUFuVyOhoYGjB49Gh0dHZDJZNixYwfWrVuHAwcOYNasWdi0aROWLVsGQRCwZ88ezJs3j+fChg0bEBISgoKCAni9XhQWFmLy5MkQRRGbN2/mijM54Rs2bEB/fz9uueUWCIKATZs24YYbboBCocD69esZRkvQZVEUWUWYDghyhu644w5s2LCB4Z6ZmZloaGjAU089hdbWVng8HhQXF/P8Pn36NL799lu0t7cjPz+f1XpdLhf0ej0mTJiAO++8EwqFAg888ABXtYqLiy95vgiCgGuvvRZer683LDnbVqsVS5Yswc6dO6HX65Geng6ZTIbCwkLY7XakpaUhMjISgA+utGXLFqSkpMBisaC7uxsmk4mdK4/Hg8TERKjVapSXlzOkWFrlSklJQU1NDcrLyxEVFYXIyEhYrVYMDAywGjpVTkkkjSrUJHBWU1MDvV4/CHZI8yA7O5uDffreH0Lw4cSJE7w3ECyNAsInn3wSbrcbBoMBRqMRmzZt+g4P82LGVFtby6rl5NRQ9Rs4lyyieyB1YgwGAwfUpLZMfGtSiKe1SGOjROvTTz+N8PBwVFdX48CBA0OeaefbH4frwF6sPffcc4iOjmbqTHp6OiorK7mnK61buscE2yUeGDmzlESl4JWQIwSttNvtsFgsg6rMOp0Oc+bMGZSM9bcLBaf+1ZnhWGVlJbe8ioiIgNVqhUKhwOOPPw7AR2soLCxEUVER33OqDg+Xn2i32xk9Ih2bfxWJEhIEHQTAUEXyOWpqangvAMBJzSuuuIK7DAQGBn6HQyyt0DY1NbH4VHh4+CA4P4lJEuSRnjH5SgTBpuQD4AvoCSpKXEFSHjabzRg3btywn8flNEEQMGrUKAQHB0OtVuPkyZMYMWIEsrOz4fV6UVVVBZ1OB5vNhqSkpEHVVZpH3zeXqG/v3r170dHRwZVWCnxdLhdyc3MxZ86cix77hdb3haqW32eNjY3M5ayurkZ2djbPp8cff5yTpfHx8fjzn/88SJ1WmqwcrhG3XRRFblclLSZJK9p0TdLWfbTHuN1uXgfUM5iQkABw2223weVyobe3lzUuKPFNtBuKIdrb22E0GrkzAf1NyXK32z1IPMzpdA6CBtPnUfAphcPT+qM+8rSWFy9ezJQO2juGk7y5mAD1Jw7qZbS6ujrOzi5atGgQjMXj8eDNN9/EL37xC/z85z9HXFwct0Ygo4n+yiuvYM2aNbjyyitRXFyMXbt2sehBYWEhbDYbLBYLUlNTERERwZBLciTJ2ejq6mL4jJR7ERERwQ6nf3A61AZxvgV8ITiS1WpFbW0tSktLAQwW4lAqlWhvb2c4KEEUrr/+egwMDGDnzp3cW5J4JtJKR2RkJObPn4+vv/6axyWXy2EymXD8+HG+BxSU0/f39fXhySef5Cp3R0cHw0IIiigIAmJiYhAREYGKigrk5+fD6/XipZdews0334x//vOf6OvrQ2lpKe655x4cPHgQMTEx37mPPzbr7OxESUkJ7r//flgsFlgsFqxevRpqtRqnT5/Gxo0bcf3110OhUKClpQUejwdZWVm46qqrcNNNN2H69OkcNFAVnDJ1dPip1Wq0trbi7NmzSElJYWeAoIIymQwdHR04cuQIBEHArl27YDabERAQALPZDI/HA5PJhOuuuw4AMGrUKK4CkPw/tXQRRZ+CdWNjI2JiYhAZGYldu3Zh06ZNyM/Px6FDh5Camsqb6bhx4/D555/zQTlnzhxkZGRg8+bN8Hq9zC0RBAGLFi1CdXU1H2RutxtXX301YmNjsWnTJoiiiJtvvhknTpyAy+XC0qVL+QAhkQ6CAkrVLqka+frrr2PChAkMnzl8+DDq6+uxcuVKhIWFQa1WY/To0XjmmWdY7GHLli04fPgwvv76axw6dAiffPIJVwnLy8vxyiuvQBRFvPTSSygtLf23glN/oyqC2WxGSkoKduzYAZ1Oh8zMTMjlchw6dAg2mw2zZ89GVFQUv+fTTz9FVlYWrFYrVxBpTni9PjGgwMBA2Gw2fs7U9sHtdiMuLo45OfHx8cxJDwkJwYgRI2AwGNDa2sqwLlH0iWxR0s9oNGLUqFGYNGkSAgMDWVVUaqWlpVyVlUIV/eFe/44RFYOy06RkqlQqcccdd/AeqNPpUFJS8p3vpKB9OCaT+VqMCILADg2ZtBpJcC2pUSAm5Tj6JxT9g9uhKBz79+8fcmwXco5/iOCUjJQuFQoFKioqOFhJTk7GyJEjodFoWOVeqVRCpVKxroTVamUIHiVSqWqhVquhUqnQ1dXF1QcK1nU6HfNNv4/7d76fD7d6Tt8rk8mQlZXFEOz29nZotVr09/fj6aefhiD4uJxz5szBjBkzAJxzSOnfFxonzRuCw1KFRRqYSjnO0jVE46PKJsGR09LSmBtMzndnZyd27tyJ0NBQqFQqOJ1OTvR7POdaJlHAStQgmpOURKFKf2BgILfmoyQDdTuQCkgqFArcd999/Fn0s+LiYoZMBwUFYdWqVd/7TH4oi4iIYGXa7OxshISE4OzZs6ipqUF8fDwsFguPv7+/HxUVFeju7maVY/9AVfpvgnXKZDLk5+fjuuuuw6JFi3DllVdi0aJF3CaxrKwMdXV134Hf+s9z+tmFChvS+XKpewBxmwFfEpienc1mYx0SSlbccMMNg977ffuqP2+b/p+fnw+VSgWdTvcddIQ/zBo4JzIlXUcymYwT0tQ3WPpeWkPkn0oTiqSkK5fLGc1ACvehoaHnPcuoeBUUFMRnrUwmY4QiJXFJo4YSFAqFgsW4goODERMTg2uvvZbb30gToJfTfgpQL6M9+uijiIqKgkaj4azgoUOHcN999+H222/HmTNnmBsFnJvICoUC1157LZ588km88sorOHnyJP7xj3/g1VdfRUpKCg4fPszcUqqQCIKv1YbBYEB4eDiTo4Fz0uokdKFUKuF0OvmQLS4uhsfjYVVVqfkvrAstYILVrl+/Hq+88gree+89eL1eVFRUoLW1lYND2sAsFgtzBuVyOSZMmID58+dz36YdO3agrq6OoQu0+CoqKvizyDweD+bNm4evvvoKW7Zswfbt21FcXDwoA0UZRJKqJ6jOypUrsWrVKg7enU4nH1YtLS344IMPMH78eFRUVODtt99GY2MjFi9ejCeffBKHDh3CZ599hhUrVqCrqwtvvPEG/vrXvyI2NvYSZ81/xkjoiCqGpaWl6OjoQFBQEDo6OmC32/Hll19i1apVyMvLQ01NDWfiDQYDN0p3u90IDQ1leBtlrUVRRHV1NQTBp764e/duxMXF4ezZs+js7GRoSH9/P0JDQ7Fz5052poODg/HLX/4SN910E6KiolBWVoYPP/yQq4ejR4/m7F1TU9Mg3gZV42bMmIENGzbghhtuwN69e2GxWJCbm4vt27dDLpczBJTk56Ojo3H8+HFkZ2fj888/h0ajwbZt23hNarVafPTRR3xwUtXruuuuw//+7/9CFEWMHj0aLpcLwcHBWLFiBUTRp0xrsVhYOIU4PHRfurq6EBwcjB07dmDy5MnQ6/UIDw+HVqvF2bNn8eyzzzL3p6amBqtXr4bL5cK6deswMDCAEydO4JtvvkFbWxtef/11lnk3Go14/vnnUVBQcNnmDDku1EsxKysLbW1tCA0NRVZWFkRRxMGDB9Hb24ukpCSGOisUCnz00UeYNm0aTCYTC2/QnkgQyIiICBaXIfiu2WyGw+FATEwM5HI56urq+NAk1eDm5maGgxOkWlphod7EcXFxOHLkCCIjI9Ha2oqQkBBuVyF1ok+dOsWJDGBoR1saiEl5l99nMpmM14UoioiKiuI9NiwsDHFxcfyZNpsNVVVVg8YCDK4unK/SQOM5duwYc4ikAST9e6hx02cRWoD4wNQvVHrNVD2VqrjS72NiYgYFGz8WowQGtRIRRR83PiIiAjU1NVxBpio8OWgUMLndbpjNZuZfCoJPz4HWM1XD6SzPyMjAzJkzhxwL3X96Fv9uRXmo55mbm8tnbnFxMauFPv300xBFkUXNrr76ap5PQ2lO+I+HjKDx06dPh0ajGRQgS51haYWVHGUKCC0WC5qbm5nTTsGkIAjcl7a8vJznHPkzQUFBrE5K7WP0ej0HyjQG+iyVSgVRFAeJVZFTTnOZnhuNi7jhLS0tqK+vx6hRo9DS0gKNRoO+vj4sWrToe5/LD2WlpaUwGo3MQSVIs9vtZqrNlClTOPiIiYlhPi2dv6WlpQxvpzlIlTaq8JGRzyiTyTBp0iQsX76cBTWlcGD/5+wfpEn/TXuKFA57MYlAaeBL30P0jYSEBD5vKKlN1cGWlhbuHCD9rAslgqTrUFoF9nh8nTMAsIgWvR4AJ1Sk60q61qT7slzu6/EuXT8ej4fRBiTkJUXSWCwWuN1u5qVKOdJS1XpaczTXpXB66RqhpBw9a6VSyTBe0m8gX+93v/sdnn32WeaCG43G76UGXKr9FKBeZouLi2Mu1KpVq/DOO+/wz6WTnRbFggULsHr1asyYMQNfffUVKisrsXbtWpw4cQJLly7F6tWr0dXVhSeeeIJ5oyTSIM3oer1enuSkEkZVHQpoSXRIoVAgKiqKexZKbTiTjDaFgYEBvPvuuyy2FBcXh5qaGt6waFwejwfNzc1Qq9XQarVwu90YN24cGhsbsXPnTrS3t3MfV5vNxguT+j/19fWhsLCQuTP02Xv37uVDh5zT/v5+FpKhvp1UFaDPJcgPLViv10ewpyqGzWZDYWEhrrvuOshkMoZNrVixArt378bOnTuxYMEC/OY3v+EK5BdffHFZ5s8PaYGBgfj000+Rn5+P6dOn48iRI6yiWlRUxHMVAO6//35uIq9SqdDS0gLg3LOnQ0zKefF6fWIEkZGR0Gg0CAsLYweBuBWCIKClpQUKhQJtbW3MbSHY5sGDB+FwODBt2jTedGUyX7uDsWPHshNMG7xKpUJaWhp2796Nm2++mWFQM2fOxIkTJ7BkyRJ8++23EEURU6ZM4QonAPzsZz+D0+lEZGQkurq6UFBQwIHzzJkzWfSJ1sTs2bPxz3/+Ex6PB1u3boVKpcKXX34J4JxQCf2bNnniilDVkKrIALBp0ya+PhJWam1tRUNDA+RyOVJTU/Htt9/i+eefh06nw+bNm3ktvf3223C5XFizZg0EQUBpaSn3P7xcJs2sE9y+oqIC2dnZnHAiaHfSvwSsPB4P9u/fj0mTJrEkP7X/CQgIQG5uLiwWC7cEIA44BTx6vR6i6OvharFYeM+jAL+7uxsjR45kB4SeJa1tggUSVLW1tRUAuFpNUEDKPpNTQE6M/7VL92x6VgTRGo719fVxttput3OfRpVKxcJIABAeHo7a2toLOmqUcPN3eOi+A2B+IM03/yCBxi29PjLK1EuhYP7BuHT/pddQixb6/XAz6T9Exn0oo2dM1Tav1wu9Xo/Ozk54PB7ueZmTk4PExESoVCpGaRDfzuv1oru7mxMocrmc29XQvqZUKpmXdaFz9HJxTs83V+haZDIZYmNjYbFYoNVqYTab8eGHH0IUReb4n68VkP/3+M89wHdPRo4cyegQ+hm9R+qbSM8IQfBxy0XRp75On0PrWRB8fWYrKiqg0Wg4uKR1R8+E9mfgnFKz9N7SHkxnlnTs0uCB3hMREYH29nbU1NTAbrdDoVCgs7MTp06dQlBQELq6umA2m9HT04PHHnvsgvfshzKNRgODwYC6ujq43W40NDRw68KQkBDY7Xbs2LGDe31rNBpUVlaivr6e+0JnZGQgICAARqMRtbW1aG5u5mADGDyv/OcGVSM9Hg8aGxtRVlaG1tZWnDlzZlh7AO2f0j11uCatPvobBagejwdtbW18pgiCgJMnT/J1ud2+FnX+YxrqM+nnQ1WHAZ9eACUw6drpc4KCggAMvn8016SIg4GBAT6jqFBDYyXEGO0t/vO7t7eXkVr0WSEhIZwgpHFLz1fpmQmA/TNK2kjXA72efp6Tk8Mt6ugZSpNGP1VQ/z+xu+66Cy+99BL33AsLC+OJTO1MnnnmGbz00ksICAjAvn37cM011+DgwYN46623oNfr4XK58Omnn6K3t5crigEBAdi4cSM3+SWoWFJSEgwGA8OTaPP3eDys4Nbf34+goCAOxNra2oaEZPj/3/9Qam9vx759+7Bnzx6sW7cOAQEBWLZsGa677jokJCQMCk7b29u5YmG325GYmIjJkycjNTUVFRUVaGxshEqlwqRJkzB9+nRMnjwZI0eOhNFoZIl5+tPf34/29nbs378fO3bswPbt2wdBhD0enzLnwMAAq/GlpaUhPj4eHo+HK3jUQ5EElihTLggCqwQGBQWhpKQEb775Jm6//XYExy8IKAAAIABJREFUBgZi48aNOHHiBDIzM6FQKLB69WqMHz8ef/vb3zBlyhSYzeb/yNz6d6y0tBRXXnklQ6AJAtbT04Pe3l7OCALAli1bWAgAAN/n4OBgDjKDgoJgt9t5UySodkBAAFpaWrB3714kJyfzswsODmYuREREBJxOJ9xuNzIyMlBfXw9BEDB+/HiMGDECkZGR2LZtGwDfnEtKSkJTUxNKSkpw6NAhJCUlITs7G319fTh06BC8Xi/ef/99AL7D6sMPP+Qq65gxYyCKIreQICEkj8en9JuYmIgDBw5AFEVceeWVzHG64YYbsHXrVuzatQuAb8O+6aabkJubi4aGBni9Xixfvhzr16+HQqHAddddx5lUQizQoUJ8NvpeOmjNZjPGjBkDuVyOpKQkGI1GrF27FjabDX/4wx8gl8uxf/9+vPzyy1AqlVi7di0r1a5evZo5uj+ESdc9BeoLFy7kQ+zUqVMsAkd7iUqlYqEWr9enRkgOp8PhQFFREfR6PcOQRo4cicmTJzO/PDAwEKNHj4bdbkdPTw+rElJmOCYmBmFhYTCbzWhvb0doaCiAcyIPVquVeVJU3d6zZw+mTJkCm83GvaTpuVBVjXqO+ttQDpS/cys91P2ts7NzUKWGxHkCAgKQkJDAkOfg4GB8+umnF6yc0Rki/b20ikCOEiVwKCCQ7vPSzL1/kEqJT0r2URKAqqLkrJCTJQ08rr766kH3Yzh2MVWTf8fo3pATNTAwwDoGpMBNbcVILIaqqSSMQ2OlFigkHkPBfFBQEOLj4zF37tzzjoPu/fclN87nEA/1uvMZtZij6hgJJdbV1fFramtr8dhjj3El5ftsqDEZDAa+V8TlBc7NVWllleYHQcYJSdHe3g5BEJD0L94kOc12ux3vvvsuq50C5wQVqeJHCs0E2VWpVNx2itYcfSfNX6lTTvNXLpejubkZp06dwtGjR3H8+HEcPHiQFdGpakl+yYWe8w9phBSh/ZWECp1OJ6xWK+RyOa644grY7XYcPXoUgiAw4mX8+PEscnT06FH09fVBp9MhMjISdrsddrsd3d3daG5u5hYkNH/826iQn0fJjoGBAVRXV6O8vBxlZWWoqamB2+1m2pm0CulPLbiQna8iO9R9oWeZlJSEhoYGiKIIi8WCoqIinmsdHR3IyckZBH0d6jul45VeN/1NvjElS8i39zfpepD+TeuChK1E8Zz6NvlnRDmjvZrWEVU5AbBvT34VBbR0DbR/SxFB9JmEIpAGpBSo0uvVajXGjBmDxx57DPPmzUN6ejrUajXDxZVKJQsa/hD7+U8B6g9gn332GQswSKuICxcuxJNPPolbb70VW7duRXNzM7RaLSZMmIBHH30UTzzxBBYsWIBHHnkEBQUF0Gg0KCgoQHBwMLKysvDqq6/CarXikUce4WCTqlsRERFITU1FbGwswwFIeZAyx93d3cwpUiqVOHLkyKBJ5T/B/J0br9eLM2fO8MF8/fXXY+HChYOCWLfbDaPRCJfLhbCwMOaQpqam4uTJkzhw4ACOHj3KRP8ZM2YMUoWbOnUqcnNz0dPTwwIHVEkj54IgVX19fTAajazuRz0UiZ9WUVGB8vJyVpIVBB9U02g0svy4x+Phw482HWkj97Vr1yIrKwsrVqxAYWEhDhw4AJfLhVmzZuHJJ5+EUqnEbbfdxo7yj93kcjna29uxc+dO2O12zJs3j7OaxcXFvIGqVCq+D6IocoYyIyMDguCTK6cetSQootFocOLECYiiyGJBWq2WHTq9Xs8OeVVVFYsS0XdYrVYkJyejpqYGZWVlmDVrFkTRB/tUqVRob2/H22+/jY0bNyI4OBizZs3Chx9+iLfeegtffvklq1iKooiFCxdi7Nix2LJlC1QqFdavXw+ZTIYFCxZAqVQyhyYkJAQ2mw1jx47Fxo0bAQCHDh3iqnxKSgra29vhcDj4PkVHR+POO+/Ehg0bIAgClixZgjfffBOiKOK+++7jw7u7u5sPgr6+Pg7KBEFg5WKr1Ypjx46htrYWAJCdnY2ysjLceeed2Lt3L7Zt2waZTIaKigo89dRTGDFiBH7961/D4/Fg9+7dlxXS62/Sw5mgtNXV1ZDJZNizZw+cTifmzp3LmdNTp07htddew1VXXYXm5mZMmTKFs/ZBQUHQ6XQICgpCUlIStFot9Ho9SkpKcPbsWTQ3NyM4OJj7GZO4i7QSo1KpWPCmpqYGvb29aGxsRFBQEPr6+lj4gXqmJicnQy73NTE/fPgwHA4HFAoFAgIC0NTUNEgYRaFQwGg0Dju4AgZDb2mv928XYjab4Xa7YbVaER0dzX1P//CHP/D+k5ycjNdee435QcMZw1CvaW9v53YB/sEQrWv/vnf0e0owmUwm/h1dHz0HaYBMzo30Wt1u92Wv4kvtUh2gkJAQeDyeQc5UXV3doPvvdDo54ULXFBoaCoVCwfQZgvVSwo24kMHBwSgoKEBeXt6wxn6hqpHU+T2ffV8SQOoUEzXBZrOxg/rcc8/h/fffhyAIqKiowG9/+9vvJKKHqqadbxw5OTmIiYlhCKy0iir9PCkUlBxjmlvNzc2Qy+XIyMjg857GQH4AceAoICW1VsAnztLe3s4JQvo+KZyXgl+6xwTrpeCV+MSdnZ04duwYOjo60NraitLSUmzZsgUffPABzpw5g6NHjyIwMBCHDx8+7735oUwaUI8YMQJer5cDcwp2rFYrSktLYTKZUFdXB5lMhqioKBQXF+PgwYPo6OjA+PHjMX36dGRnZyMtLY2Dt8DAQNYocTgcqKurQ2NjI9rb2wcFblIYsBQiS9U6l8uFyspKNDc34+TJkyguLkZ9fT1sNhujNOh6LodJIbcA+HvIb6B9jtoYTZ06dVBCTZrk8x+X/xgpWASAMWPGwOv1MuWE9hT/gFQa3Eq/i55nQ0MDACAtLY3nIiWY6L2031KShXwUKhKEhYXx/i7tWS2t4iqVSka8UNKRxBup6EXBu1KpxA033IC0tDR4vV6OKxQKBWw2G6N1aK74o5Auh/3UB/UHsmuuuQbbtm2Dx+PBmjVrkJ+fD5vNhpqaGvzxj39Ed3c3CgsLcezYMezZsweVlZXYuXMnKisrsWzZMvzlL39Bf38/Tp06BbVajbNnz8LhcODuu+9GXFwcZ6za2tqg1WoRFBSEzs5OhISEMB9DGtAJgsCBKrXooEMpNDT0vIemf7aeuF3+anmiKKKtrY0J0wSlKC8vR0lJCcLCwrhqMHbsWISFhSE2NhZHjhxBkoS/BvjgeCSIQo4p8VJcLhcT4anHJFXwLBYLpk6ditraWsyePRsRERHo6uri9+l0OiZ1DwwMcF88p9PJLYLIKHsYFBSE6upqdHZ2YubMmejp6UF5eTmqqqoQHR2Nr776CjExMZg4cSLee+89/Njn8e7du3HgwAF8/PHH6O3txd69e5lnJZPJ8N5772Hfvn0AAL1ezw68VqtFYWEhYmNjeTN1OBxMrHc6nejo6IAoisjIyOD7CoBl0qlfqdPpRE9PD6KjozF16lQ0NDQgPT0dRUVFiI+Ph0ajYZ4rBSJOpxOxsbGorq5GZWUlsrKy8M0333CWdNOmTVCpVGhsbMTp06eRmJiI8vJyxMbG4tSpU1i2bBnKysoQGRkJg8GAw4cPIzs7G4APWrN161akp6fDYrFg2rRp2LhxI7KyshAbGwur1YrCwkLOvGq1WnzyySdISEjAV199hWnTpmHChAn44osvkJaWhmnTpnHPtaqqKoab0mFBYgdUIXQ6nYiIiGAHIT4+HrW1taisrERKSgoeeughbN++HQ6HA8eOHcOSJUvw8ssv/6DzZOXKlcjMzARwbg+w2+2sitvb24sFCxZwJaOmpgYHDx5EQUEB6urqMG7cOM7Uk8iP2+3GiBEj4Ha7sWTJEkaX7Nu3DwMDA0hISGCYPpnUYYiLi0N3dzejPwiVQXB9miOUkKKqK8GRsrKykJyczArmUooE4HNqSPjmYoycDH8npLS0lO8PORV2ux0ymQzz5s1jTm54eDh27tw56P3fB7scagzHjh3j4Gao4JnmnVTZkc4J4l2SsB6dFQT7BTAouy7N0Ov1ejz44IMQRRHbtm27pPEPx6SfSWvpQsGeTCbDvn37kJqaCkEQWJBN6rSp1Wp4PB60trYy55YcMTo3RNHHl+7v72fYvlKp5GTd3Llzv/d6pfNjOK8Z6rXkoPonjs937QC4R3BAQAB6enqg0WgYnTBjxgymBGVmZqK8vPw7VaILjRU456zrdDoOMmm9S++zdD5Kr0F6HRaLhduBEFTf6/WiuLgYU6dO5SQDOdf0exIc6+zsZP0P8kOk1yGtxtH3S9Es/rB+u92Orq4uOBwOmEwmdHZ2wmg0QqFQoKenB2VlZdxn9j9h1AeVKr+hoaGsii4NjNLS0pCQkICRI0ciOjoaNpsNra2tMJvNrMhtMpkA+PoEq9Vqbpm3a9cu1NbWwmazoaGhASNHjuS2bl6vlylUVCSQFhf8KQX+/x4YGIDJZILJZEJ3dzcsFgtrUEhbFkmDxosxgmRTsEoBHLVAokCL9qzW1lau8EqTIkPNff+AlRJI5CNSYk/6GQA4ge8/x2jeSfdiSoDZbDb2zwmhIK28ejwehuZ7vV7ua9vY2MjtJP2TQ5Rcpyqv/9qg90jfGxcXB1EUMWLECNaDIb8uPDwcZ8+ehV6vR1dXFzweXxcOooVdyC6mD+pPAeoPaAUFBZgzZw52794Ns9mMDz74ANXV1ayORRNGqVTypjp//nysXbuWoRwUTNGCLSkpgcvlwrx587iPZFBQENra2hAbGwuz2Yzg4GDOUF1xxRWorKzkSUfZR8rAUJXMvwLoD3MwGo2wWCzctoAcu4aGBhYrKSwsxOnTp+FyuZCcnMxZLGlz4Pr6eiQmJvLiio+Px8GDBxEaGoqAgABUVlbi6NGj6O/v5xYSvb29aGlpYfEPj8fDHKG8vDzk5eUhJSUFY8aMwbZt2zB37lx89dVXmDx5MtLT0zmYbGtr4wUuiiJne+lZEN+FBARoQw4JCUF7ezva29uxfPlyKBQKNDU1wWazweFwwOl0Yv/+/aisrPzRB6g5OTnIysrCiy++OOhQ7+/vh1wux5gxY5gLJ5fLcebMGRZYaGxs5KbmNpuNqwvNzc2DAlUSCJDL5WhtbUVqaiqrNkZERKChoQFutxvx8fEoKytDdXU1Ro0axSIPoaGh2LNnD/r7+2Eymfi5h4SEoLy8HO3t7Th69CgUCgUaGxtx4sQJVFRUICIigkUubrzxRuj1euzevZv7YcbGxkKn00GtVqOiogKlpaXIzs6GTCZDXl4eWlpaUFZWhry8PIwZMwY1NTUICwtDfHw8SkpKUFRUhNzcXMhkPrXMXbt2Ydq0adi+fTvGjh2LpH9BdAmSVl9fz3MnNDSUN3gpH9VsNnOVubOzE5WVlfjZz36Gp556CuvXr0d7ezuysrKwdOlSfPnllzhx4gT3mv0hbeXKldwOiIx4SwqFgpt8C4IPSvfxxx9j8eLFDCEcOXIkPvvsM+Tk5HBAGRsbi/7+fnR0dMBisSAvLw+TJ0/G1q1bGcprs9m4X5s0exwSEgKZTMaiNNJMLmWO5XI5Ro0axY4AVZBaWlpgtVrhcDgQFxeHo0ePIiQkBA0NDSxiBfj2PJPJxNf17xgJj5HzSGtMJpMNUgpNSEhAY2MjKisr2em51DY3BGujvd7r9XJwTn9IzZSoIPR/Upy2Wq3cSioyMpLbfgQHB3NrGSm/i2Bqc+bMgUql+kGrSnQG+kP+pPA3qYPo9Xqxf/9+pKSkwGazMUePkpvkaBKsWQr7pEorAKbomM1mDmyCg4Ph8XhwxRVXfG8gNxzY83ADWOnfw7lfgC8BRxVfp9PJXPn9+/djypQpvIZzcnJw6tSpYX02zWtpgiA+Ph5msxkul4sdc2liTrqe6XnRfadz2WazsaIo3W+Xy4WsrCxeM1LuOABGU0VGRqKtrQ2CIDBccqiA4/+x9+XxTZXp/t+TNGnTNd2b7kugLV3YKatsooAKsqlAXWDUwY2R69xBcRtRP96fjjNuuFxxGEVBhBFHlFV2Rih7pbSldN/TJWnTJG3TJOf3R3ke3sQWcEbv6J37fD79tE1OTs55z/s+77N8n+8jZqnFuUQ1rpSVJBvK4XBwH1bqu9zY2AgfHx+cPXsWY8eOvaYx+2flueeeY5QYBUrJxqKxiImJcUPqUJaZgnNxcXEICQlBfX09GhsbERAQgO7ubgwdOhRmsxlBQUEIDAxER0cH61qj0Yjz58/DbrfzvKfetMS+bzAYmDBLFFEX0dhTOQYFf9rb29HU1ASDwcAEg0T+80P0sFqtRn19vVtQjtBL9fX1OHr0KPbs2YO8vDwAQE5ODuLi4jB+/HgMGDAAGo0GSUlJTJJFc4Tg1KLjTPdFxEINDQ3w8vLiIBcdI659MYjlGYzs6emBxWJBcHAwo9NMJhMSExO5TIYCEDRHxfnqdDqZX4H0mDj+9DcFCkToveg8i8cHBASgq6sLdXV1XObR3d2Njo4O/gy1X6Kx/vDDDzF+/PirzmP8n4P68xC1Wo19+/YhPT0dERERsFgsuOWWW2CxWDB37lzMmjULISEhWLx4MaZOnYrIyEhkZGRg9uzZGD9+PG644QYcOnSIF+7KlSshyzL+9re/4fHHH8fu3bu53yQ5S97e3oz/JzZdgvZ6eXm5UVTTxCQiD1HMZjPa2trQ0dHhtrho0vr7+6OsrAzFxcVITU2FwWDAkCFDWMERnTs5gAA4K5aQkMAR+IsXLzIcpaamhvH0NTU1iImJ4VoJovUePnw4RowYAaVSiYKCAuj1el7wgwcPxq5duzBlyhTs2rULGRkZSEtLw7lz56BWqzF16lQMGzYMRUVFHHUkmm673Y7u7m42Jkkow2qxWHD8+HFkZGTAaDSycjh+/DgMBgM6Ojp+9g4qANTW1kKlUuHs2bNQqVRITExEQUEBVCoVysrKEB4ezorq4sWLiImJYdiu2WxGREQEqqqqIMu95FIBAQHcWoUU2KBBg2Cz2dDW1oaEhARYLBa0t7dDoVBw/UJLSwuSkpJwww03oKKiAunp6Th9+jTi4uKQkpLCEDCq5+zs7ER8fDy6urrg4+ODzs5ObqNQUVGB+Ph4htosXrwY5eXlXCtrsVgwatQofPDBBxg2bBhnDHx9fdkBlyQJUVFR2L59OzIyMtDe3s7Q5uHDh6OgoABWq5VrrXNycpCfn4/6+np89913GDlyJM6fPw+9Xo+QkBCUlJTweqTMP23y4gZYXV2NwMBABAUFoa2tDadOncLtt9+O+fPn469//Su+/fZb/PGPf8Ty5cv/x+YIOaieRgLBpnJycthQXLNmDWbPns09DSdNmoQ9e/YgIyMDycnJOHPmDGJiYtiQGTx4MNrb2xEdHY3g4GDcfffdOH78OOx2O6qrq9HZ2YmWlhYYjUbU1dVBpVIhMjISRUVFrBvIiaXz2O12bhlC1ynLMgoLC9HW1gYvLy8YDAbuG3v69GmEhYXBZDIxoQVwuZ0RwQlFWOu1ZgapFIL+pixDR0cH/P39OeOmUqmg0+mwZs0aN+NdzA56ZhFEkg1RCD5Hdd2eRgh9prm5GSaTCQ0NDWhoaIDZbIbNZoNWq0VNTQ06OzthMBhgsVjQ1tbGtb7Exkwtzci49/b2xqpVq5ig79SpU25oGE8Rx5ScEnHsyWnpS8Rov2jw9mXE0uuHDh1CQkIC4uLiGKYI9MJGRTIREfpJJQleXl7QarVwOp2cqXK5XPD398fAgQMxdOjQfg1oT/hzf9JfxsZTfgiZDPB9opvIyEiu9STW+q6uLhw9ehSTJ0/mgHBGRsYVW1TRc6Nr95SoqCiG/Pn6+jIEUHRm+8pq0vuyLMNsNsPHxwfh4eEcpG5tbeVWc+KzF4M6DocD1dXVsNvt3L6vPweV5hDZAHQNhLgA4JYkcDqdbNeQPqAWdZP6YWz+sYUyqBT8DQ0NZb4EGgetVssODQAOQgcHB7s5IQ0NDRg/fjxiY2NhNptx9uxZJCQkoLOzE+np6fD29kZ2djZ/l8vlQmVlJYqLixmp19raCrPZjODgYO5/6XA4UFdXB4vFwmUsBCWlZID4TDyfDel0g8GAxsZGNDU1sfNHPCL0rMRgB3AZ4UfPVXymhPYjNFZhYSH279+Pb7/9Fl988QX27t2L0tJSaDQa3HjjjUhMTMS0adOQmZmJqKgoZGdnA+jlFCC7j+6LanStViv8/PzcyipExxZwry+n98mp1el0aGtrg0ajgVarhclkQlhYGO+dIvqKAmykx7q7u7mFGSEKPZ1gGmt6nuR807npOHqdappbWlpYL3Z2djKcPiYmhhM7tN4LCgowceLEq85jXKOD+n81qP8Dkp+fj7fffhs7duxAU1MTtmzZgqqqKqxfvx4vvfQStm7dihdeeAHffPMNXnjhBbz99tt45pln8Pvf/x6rVq1ipiylUomXX34ZGzZswNSpU9HV1cVtCghiEB8fj87OTib7USqVvMmK/Zoo4kEOQ2VlJSve5uZmJioCLqf/rVYrN24PCgrCtm3bUF9fj6ysLPj4+GDUqFFs/HR3d6O5uRkOh4OjLOTU+fv74+jRo6ioqMBHH32ExsZGVFVVMfa+vr6eW3S0t7dzr6mbbroJw4YNQ2trK3p6eqDX66HX67F9+3Ze+C6XC7NmzcL+/fsxa9YsbN68GU6nEzfffDPGjRuHkydPQqlU4rbbbmNYiVKpZPIkX19fdHZ2ct0S0KsAzWYzOjs7odFosHPnToSFhWHBggVQq9UYMGAAQkJC/jWT6x+QjIwMNDc3c0Q6NTUVcXFx8PLywrBhw7hvIwAMHTqUlR21eREzWOLmAIAz7BTNI4Y3ggm1trYiNTWVAyTnzp3Dnj17cPz4cbhcLoYZEjlQaGgo5s2bx3AYUrA0n81mMzvHZrMZDQ0NiIqKQm1tLSZPnoxp06ZBr9dj4sSJ2Lp1K+677z6uUx0/fjxOnz7N2d3o6Gjk5eVhypQp+PLLL6HX67Fu3TqoVCo4HA5MmjSJ21JQJHLAgAF4+OGHYbfbYTabMWHCBLz55psAgLvuussNyma1WqHRaNDd3c2w34iICKjVag74ULBl+fLlMJlMePrpp685q/FjixhZpc0uICCAWxUoFAq8++67mDNnDjunU6ZMwbFjxzjI4HK5MHjwYDidThiNRqjVapw+fRrJycnsxK1evRpOpxNHjx7lyLq3tzcCAwMRExOD1NRUbklBsEsyytLT05mtnBwIutaWlhZYrVZER0cjJSUFgYGBOHz4MFwuF2bPns3Rf0/olclkYhgbzWvSBdficFC/VUmSUFJSwtFstVqN9PR0PldcXJwbBEw8h+iQ0Jz3hI+RKJVK1NXVMUyMSKXEun1aO1qtFqGhoQgJCUFoaCjD9EQngvaErq4uqFQqRhEQ5AsA119KkoSEhAR4eXnhyJEjfA7PLDDpZ9ERIUOIzkNGEzlBNAbkuIowNzGDejUh9IJer2fiHJGDgJ4NkQnRugwICOCWReSM+/v7IyUlBQMGDOjzuwh5ci1ZcDHw0Z94zoWrnU/87fke1SwSozQhrZ555hkoFArU1tbCz8+PYaR9BUeuFqSRpF4oPgX9aF32lcEUz0XH0O/6+npGmkRERKC2thYlJSXs8Ij9HAl9ZrfbodPpYDKZOPsJXHYERAgv1Z+LTgwJZZpp/RPsm0gCKXOlVCrxzjvv/OCSgH9GyNECLjv3Iju10+lEUVERgMu6ikijxPkREhLCr6WnpyMjIwNKpRKtra34/PPPYbPZ0NzcDC8vL0RFRWHatGkYM2YMsrOzMWDAABiNRoSGhmLs2LEYMGAABg4cCIvFApPJxIg5nU4HhaKXnKyhoQG1tbX8fAmp4BlsouvzhMLW19fjzJkzyM/Px9mzZ9HS0gKDweB2DABkZWXx/yILLpW6EZu8JEk8lqR/Ojo6sHPnTqxYsQLPPvsscnNz8etf/xpvvPEGmpqaMGLECNx555247777cPfdd+PWW29FZGQkrrvuOsTHx/Mco/2J5g6JGFyk+3Y4HPDz80NMTAxKSkqYB4SC89SBQHTwKbhJ0F7qVhEQEPA95nsRcSLqTBHBQGuRMq+011DrIoL1ms1mDiAZDAZGCFVVVcHhcDBZ1o8pV3VQJUnykSTpuCRJ+ZIknZck6blLrydJkpQnSVKpJEmbJElSX3rd+9L/pZfeT/xRr/gXKO+++y6WLFmCzMxMPPzwwxg5ciR++9vf4rnnnsO9996LF154Ac8++ywmT56MOXPmYPHixXj++efx4IMPwuFwoKWlBaWlpVixYgVqa2tx//33o6OjA7t372ZSFhEGTBsDYdpDQ0P7ZBEVyTRsNhsqKyu5jlCM8lDdAZGLBAUF4fDhw5g4cSJuv/12BAcHIz8/H3v37kV0dDTa2trYkLTZbMwYK0Z9FIreGi3aYBwOBwoKCpg8xWazwWazISEhAQpFby0UNe8W24wMGjQIsixj586dbpv93Llz8cUXX+DGG2/E3/72N8hyb2sByiZLkoS5c+eycgoNDWXiHI1Gw9k5UmI0rkajEd7e3igsLMTu3bsRFhYGX19fBAQE/Atm1j8uH3zwAfdPO3PmDGdcGhoaUFJSwscR7JKE/qY6aBqv1tZW+Pn5Md1/eXk5AgMDodfrcfr0ac5Ku1wuPoYkICAAd955J5RKJQYOHIjCwkIAQGRkJJRKJU6fPo1Zs2Zh8eLFmDRpErRaLUaOHIm7774bra2tGDBgAOLj45lsZ9iwYXjzzTexfft2KBQK6HQ6VFRU8HMnGBBFAPfu3ctrY/bs2aipqUF7ezu8vLwwf/58rF27FkqlEnFxcejp6cGnn34KoNdwjo+PxzvvvINf//rX2Lx5M1QqFe6//36sXbsWTqcTjzzyiFuNCxlDtKm0tLQgNTUVGRkZDP8n4+6X14/VAAAgAElEQVS1117D4sWLf/K50J94GkLiBtzR0QGz2YywsDBUVFSgq6uLyTbOnTuHtLQ0ht1S9km6BAlSqVQ4cuQIZykJrUD6ihwULy8vJCQkcB0R1Zd1d3eju7sbVqsVgYGBaG5uhkajwa5du7hWSqFQoKamhp3YqKgoyHJvGyRZ7q3bCQgIQFBQEDvKwOVoMxFckYiBPU9nUhSFopfwhZzC6OhoPp+Pjw9mzJjh5kgTOZYong6AaMiLwQISyvaJzp74OhHBUKRfoehtn0IQfTFC73K5M/j6+flx3Sbdt+gwqdVqJs27cOGC23V5OtSezg05pCL809PxpDVDr/flfPUl4vcQukih6IWZE3M71QBT9o3mOzn3bW1tmDVrFhvTSmUvqyXVZvf3vaIz1J9cS6ADuDZiKE8YX1/nIgSHmMEmfWO327F//354eXmhtrYWubm5bgy4dN4rZbY9n01WVhaPGxmyJJ7PW3y2Ils0ZSvJltm5cydnfMT7oWyw2CZJbAXl6SD3VYenVCpZt1C9uMvV23aOvo/GxMfHB76+vti4cSOMRuNVn8+PKaIzTQ6p6AR5JiE815wYCBLPSVnBpKQk2O125OXlIS8vj5nwy8vLuTtCbGwsRowYAX9/f5w7dw5dXV2wWq0YPnw4Jk+ezHX8TU1NHEAODg5GVFQU6urqUFlZidbWVi4XETN3QN/BQDFQKssy6uvrUVtbi7Nnz+LMmTNoa2vjuUQ2myRJbnqc7GJPEiM6p9gRgj5Prcn++Mc/4t5778XKlSvxwAMPYNOmTbBYLLjxxhuh1+tx99134/XXX8f06dMRGRmJlJQUfg4UQAcuB6UoORISEoLg4GBUVFRwOzYiFky8xGotPksKjJAdTfOSUGZOp9ONMV4cP1HEtUoBDlGIyVyWZdhsNm5VVF1djYKCAkbXUBCprq6OSw9/TJGuQZFKAPxkWbZIkqQCcATAbwD8B4DPZVn+VJKkdwHky7L8jiRJDwLIlmV5mSRJdwCYI8vy7Vf5Dvlq1/G/QR566CGOZNNicrlcuOOOO5hBlKIndrv9exsPKZewsDAsW7aM6yyeeOIJlJSUwGazwdvbG9HR0aitreW6q66uLgQFBXE0PyAggFl0vb29ERYWxrAQMZplMBi4L2hrayv0ej3KyspgNpsRHR0NX19flJeXo6CggA3KIUOGwMfHBzU1NYiKikJFRQWGDBkCoLdGKioqCjU1NbBYLBg8eDCOHj0Kb29v+Pn5wWQyITg4GCkpKRg7dixvVna7HRcuXIDZbOa6n6qqKgQGBjLk5YsvvoAsy1iwYIGbQt62bRumT5+Obdu2ISwsDFOmTIHdbofVakVYWBjUajXWrl3LY9vc3MyF3gSfoMgRbfQ9PT3Q6XQMu05OTobNZuNr+KVITEwMb86RkZFoa2uD3W5HfHw8xowZw4X1eXl5XGdTV1eHyMhIaDQanDhxgpnriB3OarWirq4OCQkJMBgM6OzshM1mg06nQ0tLC2/+SUlJqKmpAdDLEJuRkYETJ05g8eLFOHHiBMaMGQOXy8X1IkOGDMHnn3+O3Nxc5OXlITU1lfuGXbx4ERUVFTh79iz8/PwwZ84cNDU1weVy4ZFHHsGJEydQXFwMnU6HPXv2YNGiRfjzn/+Me+65B62trdi3bx8mTJiAmJgYAL0Q6MjISHz22WfIzc1FWVkZWlpaMH78eDidTnz44YdYsGABZ82VSiVef/116PV6XLhwAY8++igTtGzfvh0PPfQQvvzyS3bQCfJLmTGz2cxRXjLGDh48+C+YEZdFkiTMmTPHzZAkI4J6BFosFowcORJFRUVISEhAamoqioqK0NLSwgzNBw4cgMvl4tYutJ6VSiU6OzuRnJyMKVOmYOTIkcjNzUVwcDBn0UNCQhAQEICGhgZeVxT0MhqNmDhxIrcZSklJYTZDaiR/4sQJpKSkMLsjMVuazWYMHDgQlZWVHE0X21iQqFQqZGVlXTEb5umMdHZ2oqKiAg6HA0rl5V64HR0diI2NxcMPPwxZlqHT6XDmzBkcOnSoX9jutYjT6cSJEyfcHCzP6yLjU/yfyKh8fHwQFhaG7u5uVFdXc289ymATGyQFBcXgIDkEr732GmRZxvvvv/89Q0g0Bj0zgWIAhAyvvkQ0MH+oPP/881i4cCEaGxv5GsrKyrjdWnd3Nz8joDfbFBwcDKPRyGtUqVTCz88Ps2bNcmMv7e9ar+RUemb5+xKyCyiLcbUM6tW+z/Mc586d4wAEBWNVKhWeeeYZnkcxMTHYuHEj73304/ldnhlgz2dVUVGBlpYW5iUgfWKz2bjTgefcJAh/TEwMDh8+zD2w7XY7bDYbPwer1QqFQsFZIyovKS8vh6+vL7fEIJuKAl0ajYZhxBQ48vX1hdVq5fmq0WiYFEuhUHC2qqurC/v37+fsHQA30pqfUiRJQnJyMiOTKNAirnti4B04cCA/i8rKSrS3t2PIkCF8jSUlJeju7kZWVhYA4PTp0ygrK0NGRga+/vprt2d666234vjx4wzrJGJKgrOqVCoUFxdzNjc+Pp7bhdH6IeemqqoKcXFxCAoKYui2r68vOjo6EBYWxq3oxCBbf2NB+wjZKRTUcDqdjBq0Wq1Mjkn7rcVigb+/v9ucI/F05vsKFtJvEXYbHh6OZcuW4fjx47j99tvdkkFqtRqnTp3Cnj17oNPpcPr0abS3tzO64OTJkzzfgN59Z9CgQZAkCaWlpbj55puZlJTmvMViYVgvJZHq6urQ2trK5GeewT6aK+Rf0LqjcaT7JWeadCPB3qkULjw8HFarFbIsY+zYsQgJCeGymM7OTt5/rzSPAZySZXnEFQ/ENWRQ5V6hFIrq0o8MYAqALZde/xDArZf+nn3pf1x6f6p0tXDhv4kQfFStVqOtrQ3z5s3D4MGD8fHHHyMpKQk2mw0RERFoa2tDd3c3GhoamB7earWisrIS06dPx4oVK2AymfD444/jt7/9LebNm4eYmBj4+/sjJCSEGVJpkVDGhqJjNPHS0tKQnJzMRfWUKayqqoLdbkdbWxuam5u5GXdtbS1iYmKg0+kwfPhwZGVlYdCgQQxxCQ0NRWFhIQIDAxEYGIiqqiqudzh58iQSEhJQWloKSeqt+Tl58iRycnJgNBqRlJSEyZMnY+nSpZg8eTKOHj3K/R3VajWio6MZOnzo0CEkJSWhoaEBLS0tAHqV6KxZs7B161a3KPHs2bOxY8cOzJs3D/X19di5cyc0Gg2Cg4Px17/+FQ6HA/fddx9HkcLDw7nXGdWeBAUFcR0ZQS/q6urY0SgpKcHx48f/BTPqnxO6B4KHUIYlJyeHYYlKpRKxsbFseMXExKC8vBwulwuRkZE8bwAwLCoqKgoFBQVob29HYmIivL29mamXotEUyQR64cBGo5HbgmRkZDBD7+jRo3mu3HPPPZAkCTk5OTh37hzPb6p9Tk1N5dY2//mf/4nm5mZERkbijjvuwIABA/DZZ59Br9ejqKgIKSkpTLCTm5uLffv2oaurizOlf/nLXyBJEjZv3oyBAweioKCAicWWLl3K2VKgV/EvX74cpaWlyMzMxFtvvQUAOHnyJFavXo2IiAg88MADvFkUFxdzJoBgmZLU27rn5+CckohRZhFaRnNFo9GgoKAAoaGhSEtLwzvvvIPS0lJ2To8ePQqn04kxY8ZAoVDAZrMxGZfdbmc28ClTpuDEiROYMmUKOjo68Lvf/Q4ajYZhaKmpqQAut6ag59DT04OysjJmJqXat7a2NuTl5cHb29sNep+VlQUvLy+Eh4ejra0N119/PRuhNKfFOjmHw4GzZ89e0fAUHRKFQsH9/5xOJ8rKytho8vHxwYoVK9g5UavVKC4uZqjiPyKkrylbSwaO5/n6qhmkTGtERARcLhfq6uq+Z5yRcSn2VRXP43Q6MWXKFL73voQCsX054f1lZT3lWjKSVxKLxcKoBLVajYSEBIZsUuYMAAdKKQNO3+3l5YUZM2b06ZzSWIj1vn2J6LhdzRwSx+tKtbzi776+i/72PEdWVhYjqqhsxel04rnnnuPnXFtbi8WLF7uRbPXlnPYHtabPJCcncxaUeikrlUom26F79TyXTqfDwYMHOUtKwU3KDtEakiQJvr6+UKvV0Gg08PHxQXR0NDo7O9nRJcIrMaNIQRqNRoO4uDhkZGRgxowZuP3227Fs2TL8+te/xpNPPoknn3wSCxcuxMMPP4zly5fj97//PfLy8lBZWYnCwkIUFhZi586dWLZs2RWf6Y8l/v7+6OrqQmNjIyNCaE8lJ4wCniRRUVH8TEgoOE2vjR49mj9PdfxAL4qpoaEBycnJ8PPz44wZOUYmkwnt7e3sdKakpHBHid27d2PPnj2or69HYGAgNBoNbr31Vlx33XUYNWoUM9X6+PggMjKSz9fU1ITKykouG+kLsSKSZZFDS+RN/v7+rGeDg4PR2trKTh0AXLhwgVmM6bNiYkYcF9HRpP2aAn50vFKphMlkwksvvYQDBw5g2bJlWLZsGZYuXYqnn34aFy9eRFlZGX73u99h4cKFePPNN/H+++9jzZo1ePDBBzFnzhw88MADGDRoEEJDQxEYGMgBsqSkJJw7d47HgEoRKHNKpRZ07xRgFPUM6U/PQCUdQ8zYNJZiQFGEYtNeQ3Z3U1MTdu7cidraWtTU1ECj0XCJ148lV82gXnoISgCnAOgBrAHwCoBjsizrL70fB2CHLMuZkiQVAJguy3LtpffKAOTIstxyhfP/W2RQAeCBBx5AT08P9wTMysrCrl27oNVqOapLeHPKttxwww349NNP2bkkltz58+dj+PDh8Pb2xubNm/HRRx8B6J1YYWFhMBqNKCsrY7hLWFgYiouLMXfuXPT09PD3OJ1OtLW1cSSkqKgIN998M86fP4/Q0FA0NzdzxsLlcuHUqVMYMaI3+HHq1ClotVoYjUbs3bsXSUlJMJlMmDRpEvbt28eRSa1Wi7CwMERFRTGlP9UmTJw4EampqRw5joiIgCRJOHjwIOLj46HX69np3L59O5RKJUJDQzFq1CgUFRUhNjaW2Ti7urqYlGrgwIE87l999RX3YlUqlVi4cCHUajU+/vhjLF68GC6XC/n5+Th9+jQAcI2KWAQvyzLa29tZoVE2ldjrTp48+YvKoAJAcnIyN3EnQ3rgwIFQq9U8fkqlEo2NjYiMjAQAHDhwAIGBgTAYDMym7O/vzyQw7e3tqKmpYfZFImQZNmwYLly4wIqVyItkWUZwcDAyMzPx7bffMmnOuHHj4HK5UFJSwpFBgtOWl5fDbDZj+PDhkGUZn3/+Oby9vTF79mx8+OGHeO6555CcnIwVK1bgySefZBKopqYmHDx4EPPmzcPmzZtx++294A6qD1+6dClHFfft24eWlhZotVrMmTMHb775JpYvXw5JkvDpp5+isbERjz/+OMPrVSoVEy8sWbIETz/9NAYNGoQnn3wSALBhwwZmuTUajYiKimISJZrvPxeRpN7+rp5Cm1p7ezsHKajf6TfffMMIhvz8fK5hIhKWTZs28ecDAgLg5+eHG264ARMnToSPjw8ee+wxWK1WLF++HB999BF6enoQFxeHF198EbNnz3aDMIWEhKCqqgoXL15khllfX1+GXdvtdkycOPF7zo3VasW5c+dgtVq5Vc6ePXug0WjQ1NSE5ORktw3a6XRi8ODB1zRm5eXlXH9aX18PvV7PpEVqtRpPP/00k+zIsoxPPvkEgHuW8YdKY2MjysvL2fEXUR6k3z1/KEvmcDgQHR0Nl8uFiooK9PT0sJMbEBDAhj85X2SwiHV5r732GhwOB8xmMzZv3nxFGOi/Qjc+//zzmD59OsLDw9HS0gJZ7u1ZWFRUBJPJhO7ubobbE2M+IXfIAQoPD+e51Jf056SJ71+LXEuN57Wck/asq2XmiXSspqYGer2eew9TgMjlciEhIQFr1qxxqwv0/H7PeyenHricJT9z5gxnQGlc6Vj6mzLpfn5+zCRPY0tQdWpDlpCQgAEDBqCuro7biCgUvcy2oaGhyMjIgK+vL3+H2P9XDKrS/+IP3StljajtEN1PTU0Ntm3bhvz8fLatyHE7cODAVZ/dPyqS1Fvf63Q6GTJLLW9o/GS5F8aak5PDn+vu7kZxcTEzIQPg4B6Ry/X09OCzzz7Dddddh8OHD3OGz263Y9iwYVCpVExWBPTC4IlsMzAwkHWwJPWScJlMJoaetrS0MOEYkQzdeOONPMckScKpU6dw5swZJCYmwmw2c/CbbDoqDQkNDYXNZmPHnJ4hZVFprlitVk6clJaWMmmjLPcS2EVGRjKxm8Vi4Tp3SkqIY04iOrO0tsQ10VcATpzf5OQpFAqMGDECM2bMQEdHBzIyMtDT08NBVx8fH+Tl5eHgwYO4ePEiOjo6MG7cOC45IzuNEIUUJOju7kZbWxv0ej2XKYjXRvNfpVJxX3e6Lir9IAJLSeqF5tOeSnOcxpiCoj4+PtBoNFxznJ2djQkTJlx1HuMaM6jX5KAKJ9YC2ArgaQB/+WccVEmS7gdw/6V/h//SDPt/Rh588EHYbDakpKQws564MVGkNikpCUuWLMGqVatQW1uLxsZGTJw4EUuWLEFcXBx8fX1x77334t1330VjYyNeffVVVFVVwdfXFwaDAQkJCWhpaUFVVRX0ej0iIiLc6pAcDgcaGhoQGRnJDI6+vr4IDw+H0+lEQ0MDwsLCMGLECOTn53PtTUdHB6qrq5GVlQVZlvHyyy9j5syZKCoqwsWLFxEcHIyuri5MmzYNR44c4eiYj48PX0N5eTkCAgLg7e2NCRMmoKamBoMHD0Z+fj4yMzOh0WjgcDhw4MABjBw5kmF6sizjiy++wKhRo9DY2IhRo0bh2LFjyMrKQmBgICRJgtFoxJEjR6DX65GVlcWKQqVS4fDhwygrK4MkSbjnnnugUqmwdu1a3HvvvWxIvvXWW+wsE6mTCL+gOl2KqnV3dyM2NhYHDhz4xTmoABAeHs7Mb9QzLyIiAmPHjuVaHCIsIlZVp9MJi8UCi8XC7Y2oZ5fZbEZFRQXDW2JjY1FWVoaenh5kZ2ejoqIC3t7eSEtLQ2VlJdcFUz3x+PHj0dHRgZqaGt5Ud+zYAaVSiRkzZsBgMCAiIgLffPMNxowZwxm0xsZGFBQUIDs7GwcOHEBLSwuysrLY8J40aRIOHTqEO+64Axs2bMCSJUuwbt065ObmQqFQ4JNPPsH111+PyMhIhiv7+vri7bffxuLFi6FSqbB161bMnDkTW7ZsYaOJ+ueS4SNulmS4UL0bZQ4IPq5Wq39WjilJfw6q+L7JZEJMTAwGDx6MDRs2YOnSpWws7ty5Ey6XizNPsixj27Zt3I6AkCITJkzAoEGDoNfrsX79esTGxqK9vR3nzp3Ds88+i0cffRTbtm2D0+nExYsX8cUXX+DChQtwOBzIycnB2rVrOTgSFBSE8PBwaLVaOBwOjB8/3i1aTFndhoYGdHR0IDAwEHPnzoVCocD+/fsBgDM2wGWopUKhwNChQ68I75QkyY39lBweytCuWrWKN/aUlBS8/PLLbhn4f0RvEIycjAlP4h8RhkbOvZgF8PX1ZQPk4sWLXIOnVqsZ3ktGJxGikSFHxEqvv/46HA4HNm7cyC3Lfk7y/PPP45ZbbkFwcDAcDgf3eKRsvkKhgMlk4v0BAMOaVSoVO+jiHkTSV1axLyGD70rPWKw1u5KIDlR/ci2kSnSO7777DgBQX1+PxMREdlJfeOEF1t8BAQFYv36921zyvFbR2CXbgoxxMmIbGhoQGxvLnQqOHTuGmJgYhIaGIigoyI20SDSoKUsl3he1MxPr4Yi4RWz/ZrfbkZ2dDZPJBIvFgq6uLq6ZBi7rBYIAi99P90PHea4nel085qfU5ZIkISUlhZ1QKs0hvgSC23p7e2PUqFH8uZ6eHpSWliI5ORkajQb19fWoqKhASEgIM/pLkoRvv/0WN910E06fPo3m5makpqaisLAQYWFhGDlyJIqLizmIQg4p2Vz0jNVqNXOVDBs2DI2NjdzPnmClNM5NTU2QZRnp6ekwm83o6enB6NGjuf5bq9Vix44d8PHx4UwhOUadnZ28j+p0OmYWl6Reks/w8HC32kqVSsWlXd7e3jAYDFi+fDmGDBmCoqIifPbZZ/xcW1pamGwOgBvbtwg/pvkg/qbXxQwrZSfF48RjKSjm7e2NcePGYfHixSgtLUVqair3HiekVV1dHcOpy8vL0dDQAJvNBqvVCpvNhsbGRmRmZrqxJdPYUCCdbBty5ru7uzkZRgkwCtITf4EIZwbAPBBUZkWIqhUrVrglhvqbx/gpHNRLJ38GQCeAlQCiZFl2SJI0BsDvZVm+UZKkXZf+PipJkheARgDhV0qR/jtlUAFg48aNGD58OMrLy3HgwAGMGjUKo0ePxrlz57BhwwbIsozXX38dL730EgYOHIgDBw4gOjoa1dXVPJkWLFiA2267DZIkYdSoUZg6dSqam5vR0tKClpYWhIeH84ZKCgEAw+AoC1JcXAytVss9JCMiIlBWVoasrCzEx8fzQrLb7dwjEgAz6tXX13P2cebMmRg6dCh+97vfwcfHB/7+/pgwYQKzptJk1+l0qK+v55q7jz/+GI8++ijsdjvi4uKQkJDAffmamppQUFCAyZMns4EEAJs2beI2H/Pnz8fevXuh0WgwduxYZqM7dOgQE9CQkaBUKrF+/XrMnz8fGzduxNKlS+Ht7Y333nsP999/P2/Kr732GvenbW1tRWxsLBwOB7c4MZvNbgYfZfp+ifP4uuuuQ1NTE6Kjo1FUVASNRoOYmBgMGDAAcXFx2LFjByspoBdSq9PpuLYkMTERLtdlBt7g4GBugm232zF27FiUlJSgoaEBEyZMwKlTp9jwiY2NZdh3cHAwsrOz0d3djYyMDBw8eBDBwcEYNmwYmpqaUFZWhrFjx+Ljjz9Gbm4uzGYzjh07hunTp0OhUOCVV17B+PHjMXToUPzlL39BaWkpdDodEhMTce7cOQwZMoQ3A0mSOINLG4foMJBz0t3dzWslMDCQ4cnUn5cinwQVpL/FmnHapOictAEoFIqftG/kPyOSdLkGtb/329raoFarUVlZiYcffpjhWLt27YLT6cRNN93kBgn7/PPPodPpmHiLIH+5ubmYOnUqEhISMG/ePK5RnjZtGl599VX893//NzZs2MCQ/87OTiQmJiIuLo4z2YGBgVAoFEhMTERLSwsUCgVGjhzpFomvrq5GeXk5qqurERERwf3r7rjjDuTn5zM7oZhFpXuVJInrtfoSg8EAg8EAWZZRUlKCzMxMyHJv7enAgQOxdOlSeHl5ITIyEmfOnMGRI0f6dEb6y0D2Ja2trSgsLGTHmWop+8oUAZcZQCnrQ2UgdXV1zNBO+o8cVDLS1Go16wAyplQqFV5++WV4e3vjo48+4vYsPyehGtTm5mbmPyDnpru72639FUEG6f4IHaJSqRAWFobBgwfzeF7JCRQDzldzOIGrZ2D7Or4/+aH1qk6nEwUFBZBlGQaDAfHx8azrnn76ac7EhoeHY8OGDfD19YW/vz+3v6DghRhApoBcRUUFTCYTWlpa2Ik0Go3o6OiA1WrF2LFj0dHRwXXh5EBQMI+cUprH9EN6lIR0LOnUvmCbtC7ob/E9OgeNjedzuxLCoa/5/lM5qZIkYezYsWhpaeFAGtXS0/teXl6IjY2FXq/n8QCAwsJCWK1Wt7Y0UVFRDBUGetFwkydPdgswBgQEQKFQYOLEiWhsbOSMMjn1lNVTKBTw8/PjoG1TUxN6eno4k0vXQW2oKBMIgIO21NoKAKZNm8blBy6XC7t370ZiYiLrcUKiiLDV1tZWaDQaJszy8vLibgxmsxnh4eFob2+Hn58fzGYz/vCHP8DlciEpKQmvvPIKpk6dit27d6OkpIQhtC6Xi+s+FQoF61jRyRSDoH2hCcQABonIQUNzFrhMCkfndzqdCAgIYALV+vp6xMTEMN8MPePm5mbs378fH330EXJyctzg9BS4pL2YSv4oW0z11RQYoEwy8T1Qq0gau66uLmYMphaQtHcMHjwYDocDK1euvOI8xo/loEqSFA6gR5blNkmSNAB2A/h/AO4G8Ff5MknSd7Isvy1J0kMAsuTLJElzZVm+7Srf8W/loAJg4o/W1la88847GD16NEpKShhC4nK5EBQUBH9/f6xatQpr166FQqHAwYMH4XA4MG7cOJw9e5Yb3NMijYqKwr59+3jjCAsLY3IAcizKy8tRXl6OoKAgZhErKirCkCFDuP0IZcnGjh3LC6umpgbR0dFcN1VSUoLGxkZmeisrK8P06dNhMBhw7tw5hIWFQaPRMBSHrqm6uhojRoxAXV0ddu3aBbvdjv/6r/+CSqVCUVERcnJy0NbWBm9vb4SHh6OiogKlpaWIjo5GdnY2K+JPPvkE6enpKCsrw4IFC6BQKLB582bccccdkGUZtbW1OHbsGFJSUjBixAhWHl5eXli3bh0WLlyILVu2YPr06YiKisL777+PCRMmIC0tDQqFAm+++SYXiLe1tXEtB2UZqO8gOa7UG/SXKNOmTWOyjKqqKvj7+3NNg1qt5p5mWq0WpaWl0Ov1mDJlCpxOJ7Zu3crHUiS1s7MTjY2NsFqtiI2Nhd1uR21tLRwOBzIyMlBfXw+VSsXstUQgRC1oUlJSuL43ODgYAQEB3COXaqbE2hNxs7BYLJyBJwVO9TWisUIM0rIssxIHLhvyFAyh5y1GSz03FvpuT6FsgniMl5fXTwoF+zFEkiSG1YrGtmjYkZOempqK5ORkNmwcDgemT5/uduzGjRsxffp0HDt2DEqlEqWlpTCbzUhJScG0adMwf/58lJeXY926dVAoFJg1axa2bduGyZMnw2g0Yt++fcjMzERFRQUCAwMxfPhwrFmzBg899BBeeeUVBAYGIikpiTdr2ujnzZvHz+jIkSPMRk5kVATjnThxIvbv3w+FQkY/wOkAACAASURBVIHW1lYkJSW5ZSIBuKExRPHy8sKpU6cgSRJaW1uZaMxsNsPPzw+rV69mCC6RbzU3N/c79jRnSaf3N68OHjzIWQWK8gPg7L2nk0qGIWWPqJSiurqaW4FRoInQLSKDLWVPgd6MxI033sgZl3Pnzv3snFOg10GdP38+LBYLwsLC0NraymQ6BPP29/dnAkEiDhIzeuS4ZmVlISgo6KrfSXNPdHqudOy1Cs2LvpxZygBeCdZ7pWv57rvv0NPTg/b2dkRFRaGjo4ODiNRySEQWeDrgNM/604miY0m/RV1M853mqgiPpDXQlx7q75i+xk50avtyQun+xGsjIT3oGUDydF4lSfrJdLskSRg9ejSamprg7+8PLy8vDBgwAGVlZbxXkT6WZRl6vR7FxcUwGAyIiopiSDvVTBKCj0oxjEYjjEYjZs6cie3btzNZWnh4OEaOHMkkagaDAS6Xi6HUVOtLnQyoJtJqtWL06NE8HyRJwsmTJ9Hd3Y0JEyYweoG4SiwWC8rKyhAREcGtfoxGI0OQU1NTERUV5QbDz8vLQ1NTE/O7UOawu7sbnZ2diIqKgtPp5J7mBPUOCwvDI488wmNrMpngcrlgNBpRXV2NkpISVFdX44477sBXX33F2UfKBNPYiOvN02Htaw6JLWjEcfGcs+K5RLuVWrBZLBYMHz4cmZmZmDFjBpqbmxEaGsocN5T8aW9vx+nTpxkBR1lryraT/UqIGCL7Ip4IsrGCg4MZpUNjSAFJIhKjkpGYmBg89NBDV5zHuEYH1etqBwDQAfhQ6q1DVQD4TJblryRJKgTwqSRJLwA4A+CDS8d/AGC9JEmlAIwA7riG7/i3E51Oh4aGBvT09KCxsRHbtm3jRswuV29T5N27d+NXv/oVHA4HlixZwtHtW2+9FUqlEl9//TX3TSL8Om201DaE2Op6enpQU1OD2tpaJCQkYMKECVykfv78eQQFBcFgMCAxMZF7XSoUCpw/fx4ZGRmQJAmJiYm4cOEC0tLScPjwYTgcDm6zUldXh1WrVmHHjh3cN0+We7H6xP5GDX/9/f1RWlqKAQMGYPv27Vi3bh3uuusu7Ny5E1OmTMG+ffswdepUHDt2jAvvU1JS8PXXX6OmpgazZs2Cy+XCnXfeiU2bNiEsLAxbtmzB7bffzvDNO++8E3FxcUhKSsLGjRuhUqm4sbrT6cQ999yDdevW4Ve/+hU+/PBDTJ8+Hb/61a+gVquxe/duTJ06FcuXL8fHH38Mo9EIrVaLrq4uhkKIRl5VVRUSExNRVVX1L55V/7iQkpEkCQMHDnRTVERwodFoMHnyZM6C7t69G9dffz10Oh0sFgsbtBRBp9phgpEkJCRAlnuJEAgebrfbkZCQAABcD9Ld3c0GLxnNxMIZHBzMytUT9kVGilqtBuBudFCU1zOiKRqTwGUInRjJFFk+6TpFofN4GivkyJIQscPP3Tkl8dxwPYXgzbSWd+7cCeBy3z3aeL/66iuMGzcOx44dg0LR296ioaEBer0eWq0WTU1NKCkpwdq1ayHLvcQfFRUVGDVqFHp6enDo0CEMHz4chYWFUCgUGDJkCGw2G7cJGjp0KPdRzs/PR0REBGe+qqqqEBMTg/Pnz7ORTZF+mg8mkwn5+fmw2+0ICAhAaGgoKisrERMTw4afw+HAmTNnMGzYMI5+U/SesvCyLDNDMOkKqt2RJIlhedRG4ErZafFvT0OaXveczySexj2JaDiRQ9vU1OSWhaCsqegIiWNFr4nESaJTQfrV835+aODOM0v2zwjVxtfX1yM+Pp45Fej6KWNK2UKRVIoyQmS8Ariqk0oOztWc02uB/orSn2NK491X8MRz/vQnPj4+cDgcCA4OBgD4+fkxA7uvry8b5qTnxCyl51wT9aAIXRYDJeLxnuMgHtvX/BW/x/Ma+hozMeBDnxf/v5JjIZ7f09kQkQri74kTJ/5kWVTxGmjd0jMBwO1LDAYD9x2la6PgGwWJ6fj4+HgOTLW1tcHHxwerV6/G5s2bUV5eDqPRCIPBAIVCgaKiIkRHR0OhuNzOR6FQsOMrtrwhpBKtJ6fTiYSEBJSXl3PWLigoCNnZ2VCpVDh06BBnUompmDJ2SqUSBw8eZGhvcnIyJk+ejMzMTBQWFsJgMGDy5Mn48ssvERISArPZzI6cWq2Gv78/qqurERcXx0R6ZCc3NzcjMzMTb775JubOnYvy8nKMHDmS7cecnByUlpbi0KFDuOuuu5CXl+dW403Bb9H26G8OifwyV1qXfc17cpKpPOi7775Dfn4+Nm3axOSQRqMRkydPxsyZM9keGzduHEaNGsX6TqVSoaamBkePHsXhw4fdeuICl9cvZaMJHq3RaNgeo3umbDQFJ8hO+7HkWlh8v5Nleagsy9myLGfKsrz60uvlsiyPkmVZL8vyAlmWuy+93nXpf/2l98t/1Cv+XyTr1q3DY489htraWnh5eUGn03EU3mq14v7773er7fjTn/6Em2++GRqNBiqVCjNnzkRYWBieeOIJPPvss9zHlLD+ktRbj+l0OnH69GnGstvtdhQUFLDhlJGRwZuOqOyJsKC4uJgnbVJSEmcL6PP+/v6YNWsWmpqaMHToUCbBAXoXl9VqhcFgYLIBchYos/LYY4/hq6++wvDhw7F+/XpMnDgRe/bswfjx47F9+3a+HjIOqdbJ6XRi4cKF3Gtr27ZtXJtWXl7O95Keno7CwkJs2LABwGXjbsGCBdi0aRNUKhW2bNnCWeO4uDh8++23AIDc3Fw3Vju6JzJcHA4HIiIifpb1Vz9EPB0rqk+jxuRBQUGs4CRJ4mwmAG5aT4Z7V1cXZ1SpPoKIpBSKXqZkSeqtWyG6fDKMQ0NDodVqmewiPDwc/v7+0Gq1TIQlwtVJ2Xtmi+h6xM3galF2UURjynOcKCMqZpZoXGhe03uikerl5YVDhw79o4/of1wokyw6m6LQhqxUKnm9ORwOJq6iwFltbS38/PygVCpx8eJFJsGor69HaWkpAgICmH07LS2NYU2S1EueERsbyw5oWloavLy8sGnTJsyaNQsAMGnSJCQnJ7NeEUnN8vPzmTW9sbGR+39SZhDohZh1dHTAz8+PIU4iMQRtxlTSQBFoYhWtrq7+HrMjBXSys7N5DgUEBCAvL++anIb+5iftBwRLpQAKrQFaH2IQRZyD5FgQgQy1ZaDPUhaOAk2i0ULrjTKKQ4cOhcvlcuvl2lfW8FocJE/5sZxT4DIrLqEkxGwYrVEy7Ilgiu5DZNMl8perSX/36/m6ZxbyaiLqX7ovAG5OwbVei6dkZmZ+7zwUgLLZbMyASqiU/s4rOud0f6J+FgMr4r3Q/zS3xGvv77vE7JVn7SiJp1NKa1u8B3q+NBf6ylJ7/i9er3hfP7VQO0CyvUTmbtJLBOEXeTPo+tPS0jBo0CDU1NRAknq7KbS0tMBut0Or1TK7/vnz57n1iXwp4BwWFobIyEjepyMiItDe3s6oJspqis+AiHxoDMkOEHurA+DyGJ1O59arHriMDoiKiuIxN5lM+POf/4xvvvmGGZz9/Pwwb948XH/99Vi0aBGuu+46dhidTifMZjOPh8PhQGVlJRQKBb777ju0tLQw+VB+fj7+/ve/M8qQMoihoaGYMGECBg4ciIEDB/K9iuUPZrMZnZ2dfL+e84aeGz2zaxHPdULjSfYX6av29nao1WocOHAAK1euxH333YfVq1czMSRllzs7OxEWFobZs2fjjTfewMqVK/Hkk09i0aJF/Hyonljsq03fSSgTclgJbn7hwgVs374dr7/++jXd17XIT7+i/k/6lVWrVmH//v1c+/bKK6/AbDZj+/bt0Gg0aG1tRXNzM/7jP/4Ds2fPhsFgwJo1a2C329Ha2or8/HxmFSWmL1I6FInSarWorq7G2LFj4e/vj4CAANTX12PIkCFcfyNJvX0ArVYrysvLkZCQwMZebW2tG3EIsbbRRKUGzg0NDSgpKUF0dDSGDRsGvV7P7I6UQfP392eWV5VKhT/84Q/w8fHBl19+iVmzZkGhUCA3Nxcffvgh0tPT8dVXX2HOnDn4+OOPIcsyoqOjodPpsHfvXlZgDocDc+bMgbe3NwYOHIi//vWvSEtLQ0pKCioqKiDLMrKzs5GdnQ0A+PTTT3kDDAoKwqJFi9DV1YVHH30Ur7/+Otrb25GWlgadToeNGzcCAO688043Kny73c4QKFLgZPD9UmX79u1uBoEIjxWhbwcOHMChQ4fg5+eH4OBgqNVqhIeH84ZDypKgHzS36LyUjSEDWMy+AH1HiPtT6KJDQBsz/S8qVdEgErOadJx4354GDx0v/i1eH10HbTziseL9HT58+BflnALukV/xNeCygUYoDCLZGTFiBNcoeXt747XXXsOSJUtw6tQpDvSMGDECd955J37zm99g2rRp8Pf3Z4ZV6q1KRuOpU6eQmZkJm82G8PBw6HQ6lJaWYsiQITzGFy9ehMvVWwcfGBjIrafIATl48CAsFgt8fX05SCZmecjJTk1NxU033YS2tjYEBgaira2NAxBkvDc2NnL/Y1nubWJus9kAgFspUX9RAFiwYIFbsOIfaUclGtl0TwUFBbxOAffaJbp3z6CN+Dwpm+v5jGnN0vd4Ohyi4xEdHQ2lUsk98TzniHgtdI2exwB912CKxr6oe6/mBPTl1BD7PenukJAQxMbG8pgajUbEx8d/L5glOh5kVNNxnkLX2Bdk1+Fw4OTJk9i/fz927NiBI0eO4OzZs9i/fz/+9Kc/4Q9/+AN27979vfsX70V0xui3qJv6G4trcVDpOQ8bNoyDKwSjpAwtIWz6yqaLWVLPsRezd/S/Z3a0L+eP1gtdQ3/PXZyPfT2DvsaGXuuP5ZSuWTzfDw0oTZw48YrH/6MiZq7FcRQDqvR8KLhGbVf8/f1x7NgxaLVaREdHQ6vVAgBuuukmtLe3M1+HLMtoaGhgzgZJkpiIiYLTFRUVKCkpwcWLF+Hl5cWkhtQDmmoSz58/z+NMgSBfX1+0trZ+b8yIF4CuWxxber42mw1RUVGM3lMqlazvP//8c6xfvx5HjhyBxWJBUFAQpk2bBgDsdNFa7urqwvvvvw+VSoW8vDy89dZbePnll7Fs2TIUFxfDZDIx0mXYsGGIjIxEfHw8fHx8EBUVhfDwcC5PWr16NRN9EUkUBQio3RG1LiSnlq5DXDtXCvqIAVBPPSWOE80PWsc1NTVYuXIlPv30U9x2223Izc3FY489hq+//hpqtRpNTU1cMhUTE4PHHnsMzz77LFauXIklS5bgiSeewL333ovw8HDWCWIwyGazYf369Vi7di1OnjzJ7XF+LPk/B/VnIBEREdi1axfWr18Po9EInU6H4uJiPPXUU3j//fdhtVoRFxeHAwcOYMmSJbjuuuswd+5crF+/HiUlJQgKCoK3tze++OILNDQ0oL6+ngvn6+vrERISgvr6et6UlUolzpw5g4sXL7LRSPDLrq4uXLhwAaGhoVxjVllZiePHj7OyrqmpQVhYGMrKyjBnzhwcPHgQarUaoaGhKC4uhsPhwHfffYf09HQEBARgyJAhiIyMRHFxMfz9/eHr6wuz2YyVK1fCYDCgtbUVGzduRENDAxQKBR588EEcPXoU4eHh2LdvH+655x5s2rQJkiRh+PDhuOGGG7B161a3BX799dfj+PHjyMzMxLp16+ByuZCeno4tW7ZAoVAgKysLubm5cDgc+Pjjj92yQvfffz/WrFmD3/zmN1i/fj3eeOMNpKSkYP78+Vi3bh1Onz6NBx54AOPHj+e6JKVSieLiYmaXIybX/w0iKj7RKCWIuZeXF/dj27t3L3bu3MmRVyIXIKOH4GF9GQqemTlR8ZL0BVsTjUf6fH9Rb09lLkYEKdPWl3gaT6KhKhqw4vlF44jm5eHDh3H48OEf9gB+JiI6OH1tOrRJWSwWOBwODB482A1eW1NTg/j4eOzatQuSJLFx5HA4cOLECdTU1KCjo4OhvxRtp4Ddli1bMHfuXOzevRteXl4YNGgQtmzZwkzNQG8GkFi1afMMCAhAc3OzW0aRauslSWLiFjHgYDQasX//fiiVSqSkpKCzsxN+fn7cNoayEkBv2wKab+fPn+fxSElJ4bHy9vbGkiVL+HO+vr5obGzke7sWx4FEnIOSJHFrJs/nQnVAfZ2fjvc09MUxojlO9UjknHsGiYgUp6enh514zyCPeD7Pe/Gs2+oP7iZmgkVHW3zNM8DU3+smkwkajYbr58T7pmskw9LlcnGgTcwcA729jauqqr73XaI+8HxuLS0tsFqtjEhqbGzE/v37kZ+fzzro7Nmz+NOf/gRfX19+jiKpFbGXUukOZbkp0OfpTFHdndFoRGtrK4xGI1paWphIUbxWcgioZRqNNZUH0fPJy8vj0hZqU0H3T2PqdDrR1NTE19zd3c01rDabjdv6EHyYCKvEH2pB19XVhY6ODlgsFlitVr5GGhvR+e0P1t5fEFIM7vTnHIj7Ul/vezro/xPiCa8GwL0naa7Ici9UPzAwEDabDceOHePnumzZMsTHx/N8AsCQ/oiICOj1egBg8juan9XV1fDy8sLgwYMRExPDAWoa+9bWVrS2trI+FqGs1DOTECehoaEwmUzfCwARh0BERAQ/Z0/nOz09HdOmTcNtt92GMWPGIDY2FlqtlnWbVqtFW1sb9u7diy1btmDt2rW8HiIjI2EymRjSTwz6VHdKXBOElpAkCcePH+e+39OmTUNXVxfS09ORl5eHtrY2pKWlcceK5ORkpKSkoKOjgxMXVIpkMpnciNnEuecZeOvLjqHfnnaJZwCZRFzb5CyTvWwymfD5558jNzcXK1aswKJFi7BkyRI89dRT+Pbbb+Ht7c0BYYVCgcjISNx///1YtWoVnnnmGTzyyCPcFeHw4cNsF9K+/0P2tqvJD2bx/SlE+jckSfKUBQsWoLm5GS6XCwsXLsTOnTvx8MMPY8eOHXjxxRfx1FNPMaELNZoHeidicnIyysvLuQartLQUWq0WUVFRTHJD9SWVlZVsCGq1Wmg0GoSHh2P//v0MjSUnUozaKJVK+Pj4oLOzk9nbXC4XRo0aBbVajU8//RR6vR6DBg3Cl19+iZ6eHlitVqSlpcHf3x95eXlYtGgRli5discffxzvvfcejEYjE9oEBwfjgQcegNlshsViwV133YVDhw7BYrEgNDQU48aNw/r165GbmwtJ6oVBHz58GGPGjEF0dDSA3uzuunXrkJKSgsrKStx9990AerOmCxcuZCXw4YcfQpZlZtYkeeutt5CamoqSkhJ0dXXh8ccfh0KhwKuvvorHHnsMstyLy//973/PG2JdXR1SU1PR09ODw4cPXzNs4+cqU6ZM+Z5h6QnXIhENPafTCZvNBj8/P6jVarS2tiI4OJh7btFmJho0wGVj0zPbQ1BN4Ps1bGRY0qYkGun0mmg0iSJes6fhLX4eQL/fT8d5bhg0DvT/L6XWtC+RJAm33HKL2/99EbBI0mXSCL1ej7S0NMhyL6x/8+bNUKlU0Gq1OHfuHK+fqKgoBAcHu9UI1dXVcYR2/vz5KCoqgkqlQltbG6xWKwYPHozo6GisW7cOd999NzsPJ06cgM1m49KCDRs2MDTNz8+P+0H39PRAq9Uy+QO9T3L+/HlotVrEx8dj0qRJOHPmDMxmM4xGI8LDw5mIrqenh/uwdnd3w2w289hQtonIOVasWMFORkJCAt544w12KEQD+YdKQ0MDysvLGWZM4hngEeFkZNj29PRAo9EgMDCQyZFkWUZ3dzejY4gBkwxHEfpINekhISFYuXIlLBYLtmzZ0m8Q6YeKqAeuxfAn49PTUaRzrV69GrNmzeJ+tARP9NRxSqWS20HQePXloIgGIe0dYpCrL51BuqwvneSp90QdKz47cc6I40pGsKiP+tN/nuND75MupTEkJnaXy4XS0lKkpaXBbDZzHd/atWsRExMDpbK37derr76KkpIS/OUvf8H777+PAwcOYN++fcjPz8fbb7+Nb775BuXl5Yz4Onr0KHNT0JiJCADxOilAYDQaERQU1KfOttlsDNGmz4r6nLLf5MiJwR2a3/S/w+FwY6yXZZkh9CLkm56rCLcUg5P0vH/MPUCSJMydO5d7uWs0Gu4nTvYgoQR0Oh3OnDmD+fPnIyIiAjabDeXl5UhKSmKCJSq3GDp0KI4fP46cnBw0NTVh69atyM7Oxpw5c/Dcc8/B6XQiKCgIAwYMgCzLKC8v52dDulmWe0kzaT8mDousrCxERERwSZks97aTs1gsmDx5MsNhaS0WFBQw3Faj0bDDS98NAHq9nhMqtI7tdjsOHTqE+Ph4VFVVsWPm4+PDHCxOZ2/P3/T0dGYRDggIYA6BJ554AhkZGbj55pvhcrmg0+mgUqnw7LPP4u2334ZWq0VoaCimTp3KSZnq6moEBgbipptuwq5du1BXV4fa2lo0Nzfjrbfewrvvvst8CYWFhTwvSMgRpr2xL1uDxBMJ05d4Bs49A/x9Obhi8N7hcPBvqj1NS0vDjBkzMHz4cM5si8gal6uXl6WrqwsvvfQSbr31VsycOfOK8xjXSJL0fxnUn4ls3rwZPj4+WLBgAc6ePYvs7Gy8+uqrOH/+PG655RaMHz8eFRUVKC8vx2OPPYann34aTz75JF588UXceOONaG5uxsWLF+Hj44MHH3wQU6ZM4U2ZelsSWVF4eDiA3jYFOp0OjY2N8PPzg9Fo5FYhHR0dSLxE600RZapDrKysRElJCQIDA2GxWGC323H77bdDoVCgoKAACxcuBNCrdKjGSa/XY9iwYTh79ix++9vfYunSpTh48CC2bduGv/3tb3jxxRcREhICPz8/zm5MmjQJer0era2tqKurw6JFi5jlMywsDGlpafj73//O8AmHw4F7770XNTU10Ol0eO211yBJEhYsWIDPPvuMDQrKbLz33ntum83y5csRGhqKhIQEBAUF4YUXXoBCoUBhYSFeffVVvPfee3jnnXfQ2dnJNag6nQ4VFRVXrAP6JUlNTQ0HMCggQvBZmgtk7ALuGVYKWlRWViIgIIDrn6m2F/g+myK9LsJYREeProG+S1S+4nWQiBlO+gydT/xeMeouGn+igeppCJF4Gt5ixkWSJBw8ePAX7ZySmM1mdqLE8fQccy8vL3R2dqKsrIzHcP369Zg+fTqCg4NRXFzMdUKU2SkqKkJ3dzezLxKcasyYMQgODkZBQQEyMzPR1dWFiIgIREZG4oMPPsBdd93F12IwGNDd3Y2QkBAEBgbCx8eHo+A+Pj4cxbfb7dyGRpJ6azbJ2FWr1cjJycG0adPgdDrR1tYGSZIwdOhQDrZQNpbuTZZlVFRUMCxZnMPEhP3UU0/xZh8dHY3jx4+zc0rHkvQFC+1PJElCaWmp2zk8jQ4S0QEj5xq4TPJDtbUkVHdEx3vW5InG+T333AOHw4Ht27e7fWd/19KX9DUWYlbqSpkB8X/RIRO/1zPDRdlhk8nk5hSJxhwxVIp6jcZBdJxIX3g6gWJWUXQE6fo8nU8x8+eZyRMdLXqdHCoxqyL+7QkNpu8QdaJngI4+R9myoKAgznAlJyfjwoULCAoKgsViYbZuQml98skn3JIoNzcXFosF4eHhyMnJ4Q4AM2bMQEpKCmbNmvX/2fvy8CjLc/179kkmmWQyk22y7wtJ2CQBwiIEZBM3FLUWK9alWq31qEdtkYLiWqs9xXrUtm5tEcUNZA8gQoCEJYEASci+Z7JOJpPJZNbv98ec5+GbAVtP2/Pr5nNdXMAks33f+z7vs9zPfWPixIm4/fbboVQqWb/yrrvuQnFxMWukUkdm4sSJCAkJQVBQEM/81dfX495770VCQgKmTJmCyZMnIy8vDzKZjxCyo6MDs2bNQmxsLHMoCILAOt80rqJSqVg+gzrRNEdJEkM01yiXy9mfiK8f3Xtxok3d6ECY6t/KBEFAeHi4H5GZ1+vF4OAgTCYTEzWGhobiRz/6ERc0SHObTCKRoKqqiv0XFev0ej1uvfVWKJVKrF27FgqFgn0l+Q96vkKhYHQe4O8/lEoln/3btm2DRCJh2G9tbS1kMhl27dqF7du34/Dhw7wGjUYjlEolx4Dh4eHIzMzkdUrIHFq7tBdopCIiIgILFy7E8uXLMWfOHBiNRuj1el77ycnJ/PlpJpNe6+TJk0waRUzE9LPTp08zmZRCocCcOXPgcrmYk8XpdCIjIwOpqanIz8/H1VdfDbvdjptuuonH2qRSKVpaWjBv3jx+bSoyUBJNKILAAjp9ZvJFX3dmiPc8nX9iP0K/I46JyD9R8uxwOACAdaLr6+vxX//1X7jzzjtxxx134JlnnsE999yDl156CW+++Sa2bt2KrVu34tFHH8Xo6Ci++OKLv3B1X2rfJqj/QNbU1ISkpCTcdtttePDBB/Haa6/hZz/7GXQ6HSoqKpCeno6MjAw89dRTAHwH34svvog1a9ago6MDHR0daGxsxPz581FaWoq+vj6eKbLZbPxvWuSCIOD48eOIjIxEXFwcmpqaEBMTA6/XRyFdW1sLjUbDi7ijo4Pb/iTH0tbWBqvVCpfLhZiYGKSkpLB4vBiCpNFo8MorrwAAHnnkEfT09GDbtm3YvXs33n33XZSWlsJqtcLr9eL222/HqlWruNomCD5B8a+++gp333033n//fUgkEuTm5iI2NhZ79uxh50nESd3d3bjvvvvw9ttvQyaT4aabbsJrr73G1+C+++6DIAh4/fXXAVw8dKZNm4b8/HxmJ3711VeRn5/PkKfOzk5OjEwmE+RyOQwGAw4cOPD3WTR/YyOGVSLcslgsrKklDogpmBHPqtKhHBERwcEAVebEwZE4MQTAWqOBM1UURIqDVXKk9IeeJ06kAgMxcaUvsAJPr0cBOb0vPZdm7MTJsCAIsFgsDGEDfMy1NpvtXyIxJevo6PDrelOSJoZJ0/2kDvmZM2fw4osvori4GFVVVfB4PLjnnnsA/RjRaAAAIABJREFUgHXpXC4XHnjgAQC+6/bBBx/grbfewve//32kpaXhk08+wXXXXYd9+/ZBJpNhypQp2LFjB1auXMn31u128zjBpEmTOHD/zne+w/IuxPhMkFVxMELJpsPhgFarRW5uLqZNmwaVSoUvvvgCUqmUZQsIKgdchJ1ScEuBKlWbidVy7dq1iIiI4PVPWtBk9Dg9FkhqFJiskBFUjq6neK2LCyz0OsDFwFn8MzFsjvgExLp6lLx93byhXq//k/On38QCX/Prfv7nHgv8voHfkebjyW+RpNXOnTvR3t6O9vZ2tLW1obOzE8PDwzCbzbBYLLBarRgZGYHVaoXVaoXD4YDFYoHFYkFnZydMJhNMJhN6e3vR39+PwcFBdHZ28pgNPU5jLENDQxyoAhfhd+T3xF1uWkvU+aE/1LUaGxtjSKXFYmEYLOkL0/elAoNYMod8IfnrsbExhgCKE6yenh6GXo6OjnKnbtOmTZx8PPLII6yVuXnzZjz33HMwGo2YNWsWcnJysHXrVmi1WmRnZ8NoNOKFF16AVqtFYWEhsrKyMG/ePLzzzjtYsGAB0tPTWV7Obrdj+fLl+MlPfgKLxYLe3l6GV0qlUtxzzz1obm7GiRMnIAgCy6EkJCRgzpw5uOuuu6DT6aDT6WA0GmEwGJh47fz583jooYcwc+ZMFBUVITc3F1lZWfx8goyaTCbWNqeYgBJmMYFMVFQU/5+ur1Tq08ycM2fON94P38RcLhfrhfb09GBkZISLb6RRS4mxVCrl8ygQEUfnHjUVCH0HAIcOHeLXI8kSWp80bxqo9w1c9GEE96TXl8vlrBFNe3BgYIA711arFTt27MAXX3yBhoYGAD7fNjo66ldcojVNRcBAf2Y0GnHhwgX+v06nw4QJE5CTk8P3ivYP3aPR0VGOU5YuXYrOzk7cfvvtkEql6OnpQVtbG/7zP/8TKpUKZWVl0Gg0WL9+PRwOB8OQ4+LiWEeeeGNaW1tx8uRJGAwGxMbGYnBwEACQmJiIrKwsrFy5ElqtFhkZGYxYUSqVfgSjHo+H97NY5ulyhSv6t/hcFhfvxCaObcTXlt4z0OeT0dnW0tICr9eL+vp6HD16FJ9//jk+/PBDmM1mdHV1obGxEQsWLPhmC/rPmGzdunV/kxf6a2z9+vXr/hE+x9/bfvSjH+Htt9/GsWPHEBcXh23btvnNoJ4+fZorLjU1NTCZTFi1ahWWLVuGbdu2obCwEGlpaTwXVVRUxIezw+HgKig5YKLsJvYyiUSCyspKzJkzh+elHA4HMjIyMDQ0xE7KYDDAYrEgNTUV0dHRnMjq9XqMj49Dr9cze+v4+DiGh4d5gJyoxIkh0GAwMB16RkYGhoeH0d/fj6SkJEyYMAGtra2Ij49nuvujR4/ipptuwjvvvIMpU6Yg+X+kb86cOYPQ0FDodDrIZDJMmjQJv/vd77BixQp88sknmDZtGoqKivDqq6+iuLgYADB9+nSUl5fj6NGjGBkZQVlZGY4cOYLa2lrYbDbWPxULtlM1FQATslRXVwMA1q9fj3+Fdfyb3/yG5zB6enqYvtxsNvuRZdCBRd0H8TwUza9JpVLuuKtUKlRVVXEQ19fXh97eXjQ1NXFl/tixY+jq6kJPTw/6+/thNpvR1NTEtPhlZWUYHBxEX18fxsfHMTg4iIaGBrS3t3OltaamBp2dnejr64PZbIbVasXAwABaW1vR29sLi8XCcgptbW08GzU6Oopbb70VJSUliI2NxYcffoju7m6em1Eqlejo6IBcLsedd96Ja665BnPmzMGMGTOwfv16TsT+FWz9+vUIDQ1FaGioX1eNDnkxBJsOQSpmJCQk8IzZLbfcgt/+9rcIDg7GH//4RyiVStxzzz348MMPIZfLsXbtWgwMDKCqqgpyuRzl5eWIjIxEc3MznE4nFi9eDJlMhoqKCj/R97Nnz8JqtSI3N9dvbu7cuXMsMSCTydDZ2clFKTHKgdbuzJkzOXGNiopCfX09nE4naxpToDkwMACp1DcnS3uArsPg4CCCg4PhdDr5WjmdTuzbtw833ngjFwLFgYUYUkkm7lbS/2k/0d89PT2w2WwMhyYTF2HEFXLx67vdbmg0GmZWpoCVCkHBwcFQKBQsqySelxUXGqOjo1FYWAhBEHD69On/VVL6Te3rEvT/7Wt89dVXrKPsdrvR39+P0NBQOBwOJCQkoLa2FiMjI7BYLAgODkZrays0Gg0n9eRn6CwgmQ61Wg2bzebXgSQoKfk/lUrF0jQAmPWSSAylUh/jOd1HcYFOLG0lLpxRoE1dF5rro24QrRVxoklcEuPj45wgi5NeQfDNkYvfn5LEtLQ0RhXQ421tbRgaGkJWVhZ2796NadOm4cCBA6irq0NQUBAOHz6MkpIStLa2orKyEt3d3Vi4cCFcLhciIyPx6aef4rrrrkNKSgpaW1uRmJiId999F9/73veQmJiIM2fOMON5ZGQkrr/+euzfv5+TlvPnz2PatGkoLi5GZGQkdu3axUgxpVKJ3t5eTJw4EaGhoTh58iTGxsZQUVGB9vZ2dHd3Y+LEiTh//jxuueUWhIWF4d1332U0hlQqZTJJuuaDg4N45JFHkJiYiK6uLqjVap5hpKRQp9NxElJfX4/+/n786le/Qnd3N6699tq/ah2TrV+/HtnZ2ejr6+MEkrrDhHahP/Hx8XxOTpo0CYAPcdLT08PyXDU1NSgoKIDVasWkSZPQ2NiI5ORkNDQ0QKvVQiKR8KyxIAhob29HYmKiX1Ga1n9YWBg6OjoAgEc+JBIJoqOjERcXh7Nnz0Kn08FisfC+0el0MJvN7OekUinGxsa4SEOdUXFhkQp7RqPxsqzTPT09yMzM5GRKjCqg+0O+gbhDqNhYXFzMsmXE00JqBF6vb5Y2Pz8f4+Pj2LFjByorK5Gfnw+lUon9+/dj5syZ/HqRkZEoLS1FSUkJlEola8J7vV4sWrQIsbGxMBgMOH/+PPR6PRNLnTt3DnPmzGFeBTprqKjm8Xg4WRWjwMQIicsZFQPE3x+4tKAf+HqXO7PEz6PrC1yKhlu9evXXrmMAPevWrXvrz635bzuo/2D24osvoq6uDuXl5XA6nejq6oJGo8Ftt92GHTt2sNC9RCLBtddeC0EQ/Iay6We0iGNiYhjKZbVaAfgCH3KwBI1JTEzkLghVp8gRUUVOIpGgrq4OYWFhMBgM2Lx5Mwc3NTU1AACDwYDjx4/7Mf2S4yBnQ46I5iaCgoKQnZ0Ni8WCwcFBlJSUYOPGjXj44YcRExMDjUaDkydPIj8/nynCb775Zpw4cQIAcP3110MQBBw9epRZiQHgO9/5DmpqauByufDee+9BKpXisccewy9/+Uuo1WocOnQIsbGxUKvVaGpq4sCPgja1Wg29Xs+OQBz8UUBMFbt/JautrWUnI5FIGK4jkfgIWhobG9Hb28uHIl0vMbEBEYkMDg4iPj6eK5dyuZyLFyQSPmPGDGg0Ghw+fJiDYovFguHhYQwNDXES0tTUhOjoaCQnJ2Pt2rV46KGH8Oijj+Kuu+5Ceno69Ho9B4EFBQV47rnn8NRTT2HBggV48skncd1118Hr9aK4uJh1bX/0ox/hpptuwmOPPYYHH3wQUVFR8Hq9iI+Px/j4OLKzs5Geng65XI677roLL7/8Mp544gkYjUZ2yMuWLft73q7/M7Pb7QgKCros3FkM86X7TwQJBoMBY2NjXFFWq9WIjIyEUqlEZmYmuru74Xa7kZqaCq1Wy0Wrjo4Ovncejwfx8fGQSCR49913sWrVKn7P7u5umM1muN1uJCQk8GeTy+WoqanBzJkzERoayhT4JIUg7gTTLCqReJFRcEozbfPnz0dfXx8HU7GxsbBYLHzYt7S0sBQB+Tua16frduHChUuChz8F6w2EbYqvc39/P1wuF3c1vi44udxriItHtL/pNcSFJkrexR04sc8rKipixMrfc7ThmyawJJUmkfhkLhQKBYaGhnhGmbpf8fHxUKlUaG9vx6lTp6DX6zE8PMxkQ4IgcBeOJCtmzpyJqVOn4oorrsD4+Djmzp2L6dOno7CwEHq9HllZWVzEMplMsFqt2LZtG7Zt24atW7eivr4eNTU12LlzJ7744gvs3bsXg4ODaG1tRUxMDJKTk5GYmIiUlBTIZDIkJSWhoKAAubm5PO9tNBqRmJiIxMRExMXFwWg0wm63o6WlBTabDVarlaViFAoFqqqqYLFYYLfbeR5bpVJxB3hgYIB1FQlRBYALyUqlEo2NjWhpaYHH48GTTz6JdevWQaVSoaenB16vFwcOHIBSqcTIyAiMRiNefPFFxMTEQC6X44orrmDJN+oQFxcX48SJE0hOTkZWVhbv9Y8//hhyuRxPP/00pFIpazM+++yzkEqlfjGBTCaD1WrlomNqairuvffeS+D0Z86cYYKmiIgIPPnkkzCbzaivr2d9b7oP0dHRSE1NhUQiYajol19+iYqKCjQ2NqK9vR1DQ0MwmUxMOJmVlcX+57rrrsNPfvKTv9map30pJvMi3wOAi+i0P+k8BXyzm+QDxMkbJRv0er29vZxEAuDkPyoqCoIgMHqCiLrIZ9D8Pc2Ver1eLmBRUYaaCKSb6nQ6eaSICirk+ynZvhwiivajuIgTHh7OEjWBfiklJeWyo0V0fSZNmsSvbbfbkZ6ezgUnkrxTqVSw2WxITk7m885msyEsLAxjY2NQqVQoLy9n/yyXy5mcNDU1ld/vyy+/hNfrZeI84n2h17z55puRlJSEjIwM9uPUtaezRRAEbgARAu3rTNyFFp8JgUUGusb0d+DZE5jkin8mPn/+VKL8v7VvO6j/gLZu3Tp0dXXh7rvvxuTJk3HzzTcznfPp06dhMpmg1WqxYMECrF+/nrUCs7KyeOPGxMSgvr4ecXFxiI2NxcDAAHdRaUaTHIpKpcKJEycQGxsLnU6H06dPY/bs2ayVZTabkZSUxIyNzc3NiI6OxuLFi9HR0YHMzExERETg1KlTiIuLQ0pKCkMWLBYLV+DsdjuioqIwMjICg8GA7373uwgPD4fdbofRaMTp06dhtVpx8uRJ3H///bhw4QIefvhhBAUFwWg0Ynx8HPn5+dixYwemTp0Km80Gu90OnU6HK664Ah0dHaiursaECRO4wuhwOKDRaNDe3o4jR47g9OnTkMvlOHHiBEwmEztTAJyY0x/g8hII9LMjR474Pf6v0kEFfIRRCoWCZ4osFguSkpLg9Xqh0WjgdDoxOjrK878AmKmRAgmq/imVSoyPj8NkMvnB2qhLkJ+fj23btnF1kAgW6NovWrQIIyMjWL58OYqKilBQUOCXbJDQtMPhwNy5czF//nzuuCoUCsTFxWF0dBQej49NlbQwZ86ciYSEBCiVSpSXl/O/xQczBbR33nmnH5RJJpNh6dKlzHr4r2br169nUpT4+PhL4KmX6/4pFAqMjIxwcaykpAR/+MMfAABPPPEEw3vffvttSCQS/PCHP0RpaSknddu2bUNJSQn27NkDQRAwY8YMOJ1OtLa2MtTf4/HgzJkzcDqdmD17NifFHo8H+/fvR3FxMWpra7kL5XA4YLfbWWqBAhKZTIbZs2dfMmep1+vR29sLAFwcSU5ORk1NDTQaDRobG5GYmAgAPPsVGxsLp9MJhUKBkJAQDrgEQcCOHTtw2223obKy8mvnKv+Uia+xx+NBV1cX7ykqRgbeG/G/qStGgR5V6wkRQ/eROnJiWQ/6I4bZBwUFIT09HcnJyaiuruZr9X9lgYl2IKTtz9lXX32Fq666ChaLBSEhIXC73QgJCWGyKLfbDZPJBIfDAZPJhKlTpwIAJk6cCI/Hg6GhITidTlgsFkycOBEymQz5+flobGxESkoKenp6uFuSlZWF1tZWxMbGcmBOxIfl5eUwGo2IiIhAbGwssrKyoFQqkZ2djcjISO40hYWFMQqpq6sLTU1N6O7uRnd3N2QyGbq6utDQ0IC2tjb09PTwrOj58+eZSdVsNsNmsyExMRFWq5XXdnBwMJMiFhQUID8/H5GRkUhNTUVycjKvp+XLlyM2NhZJSUk8r93T0wONRoPBwUGEhYVheHjYD045NDSEl19+GZ9//jn6+/tRXV2Nrq4uOJ1OGAwG5Obmor6+Hlu3bsWECROg0WjwySef4OTJk1AqlUhISIBcLseRI0dQUlKC8vJyAOCE8Morr4ROp0NNTQ3Gx8cRHx+PnTt3oqSkBFdeeSW6u7sxODgIr9cLh8PB4wVZWVlYuHAhNm/e7BeEWywWNDQ0YN68eQgKCsINN9yAzz77jPU45XI5hoeH0d3djejoaBw8eBDz589Hbm4uWlpacP78eQwMDODRRx/FDTfcgOLiYkyfPh0nT57E1Vdfjf7+fhw8eBAzZsxgWOdfa+vXr2fNeuo8ejweJCcnw2Qy8V6VyWRcDKBrC/g6m729vUj+nzlM0iO3WCxITExERUUFa/2GhIRAEAQMDg5Co9FArVbD7XZDr9fzPRfvw9DQUBgMBiYeopnevr4+JCUlYcWKFTh//jysVit3EsXn8sDAAMbGxjgmoPenoh/gH4v19vait7cXoaGhrCAgkUjQ3t6Oc+fOXXK9BcEnRyZGpxAaweVyoa+vD1deeSX/Py8vD9u2bYPFYuEZa5vNBpVKhbi4OHR1dcFms6GzsxOZmZnIysrCyMgISktLMXnyZISEhPB8ak5ODg4dOsSEkd3d3bjyyisRFBSElpYWjIyMMNmewWDAlVdeiWnTpqGyshISiQRxcXFQKpUYGxvz84FiODmNG9E55PV6OU6VSC6OtIiLmXRuiK8JXavL+VZxh1V878XQcbF920H9F7WOjg6YzWbcd999WLFiBRMJVVZW4sMPP4TT6URFRQWefvppnDt3jgMRWuSAz4GIFzQ93tPTA71eD4fDAbVazTDJ4OBgREVFQaFQQK1WY9++fUhOTobD4WBRY41Gg/Hxca4yf/nllwx5JCc2PDwMpVLJcx9ZWVm4+uqrIZfLcf3112PixImIiYnBsmXLOFiIj49n6GVYWBji4uIwffp0TJo0iQkVqqurIZH44Jtz587FRx99hOzsbFRXV2N0dBQulwtLly6FTCbDe++9h97eXhw5cgQnT57EhQsXGBpN7HeBcznEwEkWuEEDu0hlZWX/n1bD38eo60OOLj09HQ6HA8PDw7BarX6Vy8bGRu5AE7SGOqVUOdXpdNBqtX7JP7329u3b+f+UwJL4tSAIqK2tRXFxsZ9DFTtZqnrOmjXLTzgdAE6fPs0d27y8PISEhGDp0qVYtmwZHA4HWltbYTKZsGjRIu760+cnWNqqVav4QKbPsGTJkv9/N+PvZOLqu/gAEpPLiKv5BAEFgLy8PC5eXH311UyI8stf/hKCIOCOO+5Ab28vB1l79uzBFVdcweyIxMr70Ucf4cYbbwTgO5D37t0Lu92O2NhY1jQFALPZzHBkiUSCxYsXo6urixnCKVEQk+EQqycZQdWAi3PRn3zyCVQqFbKzszE2NoaCggLeF3a7Hbm5ucxaSRV18YyuRCLBmjVrcNNNN/F7f9POn9ikUinDhG02m989cblclwQb4iCDklgx/JOMiH4C2YDpeWKoF6FqSG+bkrBAMfe/pf2phP6bJvsdHR2YPXs2QzgbGxsRFBQEs9nMzJw0xzc4OMjzcgAwYcIEOJ1O6PV61NfXQ61W48SJEwgJCUFfXx/6+vowNDSEgoIC2Gw2aDQadHZ2IiIigpPO9vZ2ZGRkQC6Xo7a2FgaDAQ0NDfB4PLhw4QLD2YngKywsDFKpFHq9HhERESwjJ5VKeVQmLCyMiXxcLheCg4M5UBdrX4aHh3PgL5FcZNyur6/HRx99hK1bt+Krr77CkSNHMDAwAIVCgR07dmDPnj3Yv38/Dh48iNraWp6LCw0N5QI5va/X69NQbWtrwx/+8AcmKVMoFNDpdDh48CCampoQEREBtVqNzZs3Q6FQYPr06dBqtTwiIwgCOjs70dTUhMHBQR5BEgQBzz33HKZNm4bh4WEkJSWhqqoKNpsNP/vZzzA0NITvfe97fhJnw8PDqKiogMlkgiAI2Lx5sx9BlcPhwNmzZ/HMM8/ghRdegNvtxgcffODXCRwfH8fIyAgOHTqE9vZ2PP/887BarfiP//gPbNmyBXK5HJs3b8aLL77Ie+WRRx7Bnj17sHLlSqhUKrz++uuQy+V/M+IYIqIjH0QJA82/0h+CpAI+fWYyGk8CfLOihCwgpEtTUxOvIa1WyxwkSUlJUKlULFFEf7xeL7q6ulBfXw/gYvGWNKcfffRRCIKPJfz73/8+Zs+ejQceeAATJ05kpJLL5YJer2ddVoLUEnrHZrPx3DVB7yUSH69KZWUl9u7diwMHDmBsbAyLFi2CUqlkoiGxLVmyhJM0uoYymYy70JQIj4+PY2hoCEVFRX7cAlqtFvfffz8XBSIjI/mMc7lcvH7MZjPPjhJUOiwsDK2trYx8qays5FEhsYY6daXHx8dRWVmJ3t5eLpSHhYXBbrejpqYGDz74IBOAUbxCkoe0hqkYIEa/iAsLgdcnMKYiu1yHlSxwjET8nL+FfZug/oPahQsX4PF4cMstt2Dx4sW45557sGzZMsTHx8NsNsNsNmP37t3Q6/W8sXp6enDq1CkAvkU2ZcoUnkedMmUKVCoVVCoVS8BoNBrEx8cjPT0darUa5eXl/BgFMUQSYjKZ4HQ6ER8fz+QiISEhiIqKwh//+EfIZDJMmzaN5W7o80RGRqK6uho33HADWltbodVqERQUxF0ShULBs41XXHEFQzu3bNnCbH0vvfQS9Ho9tm7dioiICJSWlmLFihXYsmULrr32WjgcDuzYsQOffvopzwft2rULdXV1POND8ggEU6CgjYIrcvLiQE0cnIkrR/+supb/WxOTBVHy6Ha7YTQaOcH3eDwIDQ3lAMrtduP8+fOQSC4y7PX390On03FARYcYkYEAF+cWKMggp0jkDZs3bwYA7ub09fXx/ZFIJJg3b56fZiM52UmTJiEoKAhxcXGQSCRYsmQJvF4vdu3aBafTicjISO5KAb4DnP4dGRmJ1atX88FBXdM/RaH+r2RLly6FUqn0QwpQgiM+qMQQUIVCAZfLhdOnT+Ptt99GS0sLqqurER8fj0ceeQRyuRyPP/44nE4nsz92dnay5qjT6URMTAxiYmLw/vvvY+XKlXyoUvFLoVBwF52ssrISEyZMwKlTp5jUbPXq1QwtVqlUfkQwgdAmwLff9+7dC5fLhdTUVIauKZVK5OTkICoqCmazmf0WwRPJdwwPD18yGwr44Opr167FTTfdxO9J61j83oGfR2wjIyN+s1ri54lZssUd1UA4NgWAdN3pnlHhjvykmACFfKK4y0qjEDQDSV0O8Zr4JtDfr+uMBlogxO/rXuvrXqO7uxsHDhzgDptWq4VcLudCGF0vmUyGlpYWZGVlYXR0lP08wbUBcKdCEAQmBmxvb0dkZCSSk5ORkpKC4eFhxMfHc/FVpVIhNTUVOTk5yMnJwblz56BSqVBXV4cLFy7g/PnzPCva1NQEs9nMEmwSiQ+WbDQaYTQaERcXB51Oh5kzZ2LGjBlYsGABioqKEBUVhcjISCxatAhLlizBvHnzMHPmTCa1IUZO8qlOpxNRUVG4+uqrsWzZMixcuBDz5s3D/PnzWYZo2rRpyM7ORkJCAhISErjIQfOZJAMVFhaGo0ePAvB1tnbs2AFB8M0sejw+/UmbzQaHw4GZM2dCJpOxVMrixYsRFRWFDz74ALt378bIyAi++OILBAUFYWxsjP282WzGmjVruGAZFhYGr9cLi8WCjRs3orS0lJMdupfDw8P47W9/i6eeegoSiYQTCEp6XS4XKisrAQAbNmzg36H4R4yu6u7uRkdHB9544w2eNf/DH/6AqqoqtLW14ec//zk+//xzAMBDDz0Ej8eDO+64A1arFS+99BJ2797957bDNzJie6U1T/GKwWDgxyi+EZP6AWAIsngumkaaqAsunr2mfTFr1izuhEdERLA/kEgkaG5uZvZjAIiNjQUATpgI6k0IErPZDJlMhiuvvBKzZ8/mz05wcIonBgcHefbSbDajt7cXfX19nPjR9xQnSOXl5di7dy8kEgn279/Pcji0bgkaS9eDCiA0v71nzx4utvb393M3mNAm1JlMSEhAWloajEYjZDIZTp06BZlMhquuugpSqRS7du0CAJZU++ijj9Df389+hJA1FRUVvJ/oXjmdTqxduxZSqRQbN25EWloampqacOrUKXR2dkKtViMsLAwhISFYvnw5oqKiMHXqVI6tVCoVj93R/6n4NTo6yrJigcgUsQX60q/zu+L4kM4KsstBgf8S+zZB/Qe2xsZG2O12ZGZmMjx24sSJmDp1KkMRqqurmX0NACZPngyz2QwADKMYHh72m1ei7mlXVxfPE2g0GqhUKtTW1gIAQ31TUlJ4Dra/vx/R0dFcia+pqYHVasX3vvc9fPDBB/B4PMjNzcWXX34JAJg9ezZKS0sB+OCwWVlZqKysxPTp0yEIAurq6gD4HGBycjLy8vJQUFCABx54AFu2bMGDDz6IrVu3YnBwEMeOHUN7ezsziL311ls8W7pnzx709fWxRivNyQQFBfmJzpORAycHR5U08YwD/R/wh0D8uySngK9aS9eEujSCILDDi4mJYe1TcWWWihgAYDKZODCmRIGIrKhDSoQeNAslnouhoIoYqQHfwXLkyBGoVCqW+KFOHnW+xPdcTI4A+IKXRYsWoaioCEqlEldffTW6u7tZNkQ8dycuWvy7JKZkXq8Xixcvxty5c/nQosOeTNzNpr1DiSCRJrW2tuKhhx6C2+1GSUkJ0tLSEBMTw93qo0eP4oorrkBPTw9CQ0MxYcIETrxIG06hUKCsrAwul4t17CiAqqurQ3R0NA4fPgyVSsVB0qZNm9Da2gqdTsdESvQ5BUHAqVOnLtuNz8jIQH5+PkJDQxESEsLz60QDHcSMAAAgAElEQVQgRAznBC0npEBnZyfPbolhsVTcsVgsWL58ud97iufB6LNdzoaHhyEIApPFBQYQdE8CgwkKFARB8Ntb1DkFfIkGyW2IYWEA/LqtdHb09PTg6NGjMJlM6O/vx9DQEORyOQoKCmAwGPg1xCzF9PzAzy1+PHBt0Wtc7rmBFuirxf/Ozs7GwoULucstk8nQ39/P70WavHRfgoOD0dvby2R62dnZ3Hns6OhAfHw8r/XBwUHuwND1JigxFUMzMjJw7tw5AEBGRgYMBgMyMzP5Pup0OjQ3N8NoNDIPBCVIJOFE82ZWqxWRkZGoqKjwu7Z07gfux+nTpyM1NRU9PT0YHx/nAFyMMBF3pQhaL5VKebY1LS0NqampWLBgAa8Hu90Ol8vFDNeVlZX8umNjY7jqqqsQHByMvr4+JqeaPn06EhIScOutt6KxsRGzZs3iee7Q0FD09/czXwZ1ZgmVEBMTA6vViujoaADw48VwOp0oKytDZ2cnS+tRoYVgj88//zykUikiIyNhMBiQk5PDxdKamhqYzWY89dRTUKvV+MUvfgGXy+UHdfd6vWhubkZZWRleffVVfPbZZ1Cr1di+fTuamppw+PBhNDU1Ye/evQgKCkJqaiosFgu+//3vw+12M/nNX2tyuRzh4eF+iAmz2cxFVNr/nZ2dkEqlmD9//iXoCsDndwwGAxf0zWYz71fxXiguLkZzczMuXLiApqYmNDQ08Lo+f/48I06I3V2tVuPRRx/FxIkTsX37dshkMi4IEcqPEmRKakJCQrBy5Urcd999WL58OZKTk3kt6/V66HQ6REZGMsKJyL7EvBe0himeUygUaG5uxr59+7Bv3z6Ul5fD7XYjJiaG33toaIh9zujoKBdZxMVC+uyxsbGMXomOjobL5cKECRMQEhKCO+64g/2n0WiERCLB0aNHsXPnTnz++eeor6/H2bNnMTAwwGRHbrcbhw8fhkwmY+k0QuVIJBIcO3YMbrcbU6ZM8UP7WK1WxMfHQy6XIz09HYWFhWhvb2eEWmRkJIKDg/2KsAT1poSVOto2m81PDSHw7KH1Ii5IBK5Fio8o/qLnfVN0y5+zbxPUf3Dbv38/Kisr0dfXh/vvvx8A8OMf/5gDeoIgiiFdzc3N/HxiYJXJZCgsLGQnNDw8DK1Wy9AFg8HAPzMYDAgPD2coUl5eHleNT58+jaSkJABg537o0CFMnToVmzZt4jkXIsn5wQ9+AMCXnHR1dSE8PByjo6NYuHAhurq6UFZWBolEguTkZOzduxczZ87Exx9/jPvuuw9NTU0YHh7G8PAwFAoFMjMzudtFG5ZgDVqtloMPcWWRLDDQCsTRUwIvDujEFaF/FW3Lv8QEQUBvby/kcrmfnAutQToUaX5ETIjlcDgYJhMbG8sHFUFBqSopLhIAF6FCdB+tVis+/PBDAGAY3cDAAEJDQ1kE/sSJE9zhIqODuLKykkk+KBiuq6vD1q1b8bvf/Q6ffPIJDh06hPHxcWa+pvv/79Q1FVt4eDhDkug+i7vWAC7ZRxKJBBqNBg6HAzk5OVCr1TAYDBgZGUFycjJmz56NoKAgNDQ0QBAE7N69G9OnT8e+ffvg8XiwcOFCqNVq/P73v8ctt9zCe3nbtm0AfDAtYnCkddfc3Ay32w2VSoWcnBwIgsAMi9/97neZuVSn0/F8M0HgCGECAF9++SU8Hg9SUlL48xPUctu2bSzfMTo6itzcXA7GQ0JCMDAwgKCgIISEhDC8TOxnvF4vnnnmGXR1dfHsV2AHUdz5FwcDUqmUGTLFxRvqklBwRq8lfk/6N/lL8WOXS2wDP4uYYZY6Gk8++SR+/vOfY+/evdi5cyfOnj2LsrIyNDY2Ij4+HiUlJX6fJ5CQhR4Xr5vAIJo+l9hvBz4v0L6u0k86ttdeey13/8fHx6HRaGA2mxlKS88/fPgwJ4/UQadrEJgIUHJF85hSqRTJycl8BkskEi5k0PUsLi5GWVkZioqK+P719/fj+PHj3KkkDU/Ad3b29PSwPvnIyAhkMhnOnj3LAaLX68XUqVNRVlbmdx+lUikKCwuZ9A3wFR7FSdzBgwf9kl2NRoOFCxfiwIEDlzBFL1myxG+N0X632+14+umn4fV60dfXhylTpjCBkVqthlarxfHjx5GXlwcAyM/PR29vL+bPnw+32w2FQgGDwYANGzbwTHBYWBgiIiLgcDiYFG1gYICJm8TziSSDExkZCZvNBqfTyYE5+YOnn36av7NcLkdubi5kMh9bfVtbG6qqqrBu3TrY7Xbk5+dz8kXfk77zoUOHUF9fj1//+tdwu93Ytm0bPvvsMxw8eBAff/wx1q1bB6VSidTUVJ7jpS7uX2u1tbV+XVJKDMXdRNpvGo0GgiBw4kX3mBIkiUTCCRF1TAlhQmvl7NmziI+P52IdJXUhISF+M5CERJPL5dDr9Vi+fDnGxsbQ1tbm5yv1er1fBxcAF3NkMhkiIiIwZcoUZGVlQaVSMYogKioKRqMRarWaIeRKpZLnaqkYKi60iNFZNpsN+/bt45l5gr93dXXxNaMEGvD5kgkTJiAiIoIh7FdddRU+/fRTP01vauyoVCo0Nzfz3iRoOV1Pg8EAiUTCXVSSkGlra0N0dDTGxsbg9XqRmJgIlUqF0tJSeL1e1gIXw9MBXzwK+JQ6hoaG0N/fj7q6OgwODrIUZG9vL86fP4+FCxciOjqafRrdL7VaDY/Hg76+PiYNo6SV3kscGwf6VrEPEMfdVOSfO3fuX73ev01Q/wls//792LlzJ8LDwyGRSLBq1Squ6MjlcmzYsAEnT55kZ5Gbm8tzSlKpFJmZmUw6kJOTw3Ma1P4nI93SqqoqGAwG2O129Pf348SJEzyLA1ysElO1amBgAENDQ1i9ejUaGxtRVFSEvr4+NDQ0wOVyMatgY2MjsrOzsXnzZiiVSixZsgTj4+Po7u6GIPjm0t566y2EhIRg27ZtSEhIwNy5cxEUFMSJjkajYVp36o6KoZ3ApUPedJAFBmfkyMUdFDH0joKjf9fEFPAlhuPj436Oh64ZMfb19fXxATU6OsqdeIK4UIID+IItqvgSAZE4OCKnLn6MAlvq1ADAzTffjIaGBp4zVCgUiIyMxJ49e9ixtre3Qy6Xo7W1FcBFEixB8BFvlJWVYe7cuRgcHERhYSFuuOEGZnH9d4PzXs5yc3Nx7tw5WCwW3iNiNtzAgIsOMSpkSCQSDmQ8Hg9uuukmmM1mbNmyBW63GxcuXOBZPgC48cYb4fV6sWnTJtxwww0AwN11ei/xXCwAlkro7+9nev/R0VE0NzcjKysLLpcLmZmZDKOimR76rN3d3Sxf4XQ6sWzZMg6WEhISkJiYiLCwMD7MiTiHnk8V8dzcXLz77rtYsWIFQ8nESRrgS7o2btyIkpISREREXHLtyAKTVeqGiGVGgIuyNOL3oMfFvlCcVBG8l34ulsygmS/xa4uvO/EVeL1ehhmGhoaira0NNTU1OHXqFKxWK4KDg7lTTO9FFXbxd6NASRzoiJNm8feh6325BFdsgYk/4NNspccpYaDZMEo66XtTAhMTE4OOjg6cPHkSe/fuRVpaGvu7xsZGGAwGPmdIL5o+P8EGyed5PB5kZmbi1KlTOHDgAA4ePMhrKikpibuIDoeDNUVJT/Ds2bNob29HVFSU3zlG82/Nzc2QSqV8/wCwf6XrpVarkZeXB0HwMf6npaWxH6XkkKCedN28Xi8yMzPx+eefX1JYWLZsGSdHdDYQ10B9fT2kUini4+Mxffp0JCcno7W1lREIu3btwowZM9DV1YVf//rXOHLkCJ555hkIgoDW1lbs2rULr7/+OqKiohAREcHjPxs2bEBeXh5DlqOjo7nDR50uWu+UMAPgAkNERATcbjeztPf19fG+LSgo8OM7eOmllxATEwODwQCNRnPZ8+no0aOwWq149tlnGeWRmpqK/v5+nD17FuvXr4dcLofD4cATTzyBDz74ALm5uZg3b95l1+03NUK70VqjLhaxS9NnHBkZuaTgROf3hQsX/MYXaJ+73W6MjY1Bq9VyUUav12PSpEnYsGEDfvOb3+D48eMoLCxkcjqXy4WQkBBmwfV6vUhOTsbGjRvR1taGlStXcsFuYGAASUlJOHbsGH+miIgI7oySUXKl1Wqh1WqxYsUKlJSU8CiIQqHgWDgoKIjnsgEfYqujo4OJmMg/UzOHrg8ZJcsymY9RmJQhACAzMxMOhwPp6en46KOPIJFIEBMTA6VSiUmTJuHChQuIiopCXV0d8vPzUVNTw0l2bGws7rzzTr+YiRJgQjRarVa89957ePPNNxndcubMGbS3t0Mmk+Gjjz6CIAhYuXIlr2WJxMcgXlFRwQk6NYGGhoZQVVWF3bt3Y2BgAJGRkdDr9RgdHcU999yDgoIC5OTksE49xc40okVnAXWpSXpKfBYFmrhz/X9h3yao/wT2wgsv4Ic//CF6enr4oCcNzra2NoyOjiImJoY3nkajwblz5/wck0ajYbgFQZKIlddisSA0NBSRkZGQyWSIi4tDf38/wsLCoNFo4HK5YLVaodfr4XK50NraCr1ez4f60NAQ+vr6mJpdIpGguLgYAwMDkMvlSEtLw6JFi5CVlYVdu3bh3nvvxeuvvw6v14v09HRUV1dj165d2LFjB1JSUjA4OMjVObVazYLbVA0NrOaInUBgZ0d8DcS/TxYYRIkDceBiperf2Ugnl4KYsbExJucAwDMPVD0k5mQ6JM1mM0NLpFIpQkNDMTo6yuuDCErExQPgYpBM1TkSiZfJZDAajTCZTEhOTkZ/fz+MRiOUSiXmzp2LsbExAEBKSgoHkFqtFnv37sWuXbvwxhtvwGKxMNvi/fffj7GxMSaAkEql/9aJKZlcLscdd9yBzs5OnD59GsDFJDTwQBInILRvBUFAamoqz2WWlpZi69atAICxsTEcPXoU06dPR19fH7NEfvnllxAEgQMQmif3eDy45pprAFzsBLhcLrS3t6O+vh6CIGDWrFn8+4WFhVzoIEZnt9sNnU6HgYEB/vw0D9fQ0HAJXb9UKkVBQQGvc7IJEybA4/HA6XQiNDQUy5Ytw9133w2Px4MlS5YgJiaGOxCBa1qtVuO9997Drbfeyl1JwL+LGphkiav6FIhRgElVa7EFzp4B4KCWSO/ECRmhUMS+kpJQcYBit9u52xEaGsoSEZS4nz59GhcuXEBVVRWqqqoQHx/PgSwFRGJUxJ/rKNHvijs0gQiZwO9+uWtOLPZerxdRUVHsa6iYZjKZGPpJ358IqXQ6HSdAYjgcEZTQTKpWq+XkDADPjkkkPsm38vJyht55PB7odDrs3r0bV1xxBQBfR1EqleLQoUNISUnhmWOdToempiYmnbNarfB6vWhsbIRM5mP1vXDhAj766CNs3ryZmX+/+uorbN++HZ9//jl27tyJ06dPIzo6+hJpMMBHCEb602azGWNjY+jv70d4eDiSk5Oxfft2v26ZIAiIjY2FRCLhgg0lvJs2bWIOgokTJ2Ly5MlISUlBW1sbZDIZ6urqmF1XIpGgrKwMHo8HP/vZzxAeHo7Kykq88847eOCBB7Bw4ULcfvvtuOGGG/D5559j1apVDHsmDXWJRIKXXnqJETm01iMjI2G1WnHXXXcBAPMm2Gw2Lu6bTCYu3hQUFCA6OhoOh4O1U2lcgGRmAvfHsWPHIJfLsX79ejQ0NECn0yEpKYl1V++44w6cPXsWTqcTK1aswJIlS5Cbm/sn1/yfM5r5Ft8/6tbT+hQXJwhaLh43EBfwicgK8KHR6GwmYiDA52dHRkYwOjqKY8eOIScnBxKJBO+//z5SUlIgCALDl2UyGd5//31otVqcOnWK41QqGqSmprJmrCAIKCgogNfr5REG+h7Jycm83kwmE++7RYsW4frrr0dJSQkn53q9HjExMQgODkZYWBh0Oh37NZIAoiRVDH0VBIG108mvVlRU8NmlVqv9iPRIDmbDhg2QyXya1D09PTxzTaMlhNBISEiAy+VCZ2cnQ/M9Hg8ng4Qoa2hogNVqxeDgICIiInget66uDmq1GtOmTWOEEPlih8OBNWvWQKVSISoqCuHh4fzdAHDRNzo6mvXDQ0NDYbfbWdkgOjoaYWFhTMpGfpWKdBSzud1ublRQp1dcJBUXGf9Ws6dk3yao/ySWmpqK//7v/+aAva2tDWlpaUz+kvw/uH2y3NxcDmwAH23+V199xVUrChqo+koLLjo6Gq2trejt7UVSUhJXEOvq6vhA8Hg8aG1tZaivIPjoyENCQhAdHc06mm63G5WVlexMd+7ciaCgIJSVlSEpKQmlpaVobGxkAgVB8NGLE5V5IIuoePGLqa3FCajY+VJVkLoP4oBIfNiIA0Jx4PfnAqh/F6uvr+dAUK1Ww+FwsFyFRqPhA04qlcJisSA2NhaDg4MMc6G5vPHxcURFRTHBBTli6rYB8DtAxVA68exca2srSxP19fUxE2ZtbS0fTHRvm5ub0dDQALvdjpSUFIyOjiI7OxttbW3QaDSorq6G2WxGQUEBr/Vvk1OfCYJP95Ggcjabje+DuPtEe5H2jtfrZb1CWhM9PT04fPgwsrOzIZFIUFpaiqVLl/Kc0uTJk6FQKJjxlNYBdTiJlRS4mKCWl5ejuLgYTqcTKSkpcLvd6O7uhtFo5ABHEARUV1dDEATu8lC3hAK8wcFBDAwMIDk5+ZJkVEwKJggCM7zS/JNMJsOMGTP8fE1DQwMX9ug60t9utxt1dXVwuVychH/dtScbGxtjmDz9TDyKAFzK3kszuvRzgoGK7xP5/cDiHv1NzK30b4vFwlqaYWFhcDqdTJjk9XoxPj6OY8eOoaenBxMnTmRJp0B4WuBnFneoxI8HXg+xr6aimPg6BRq9blBQELPNS6U+DW6ZTIbu7m4olUpes+JE2Ov18tiLx+NBR0cHd6mkUina2toAgGVJTCbTJUE2zU8eP37c777R/Bdp+M6fPx+pqamw2+2wWCyor6/HlClTMDw8jM7OTmi1WkYI1NbW4uTJk2hpaUF5eTlUKhVqamr8ig9ms5m1QemzOJ1ODA0NISIigmHu1IEjX0ld8CNHjqC6uhoVFRWMjtm5cydKS0uxe/du7Nq1y4/pmAifqAu1ZcsWPruJFCk4OBibNm1iDVNiSO/u7sYLL7wAmUyGhx9+mEeBJBIJJkyYAIfDwRqtCoUCb731FhcuPB4PqqqqMD4+jieeeMIveQR8xZeEhAQ88MADXOggSC91YAkRIJFIEB0dDb1eD4VCgbq6Omi1Wn6tpKQk9hvi9VVWVsbdLoozkpKSWDpl586dfus1MTERv//97792zf45o26kmPOB9otWq/Ur1tPf4tiHtHOpoHDVVVfxc+j3g4ODodfreU77ww8/xMaNG/Hqq6+irq4OOp2Oz9bVq1dDIpHgzTffxFNPPQWHwwHAlwwR18mBAwdY81Mi8ZGtiYtugiCgoqKC/0/PJyQAJbPZ2dn8O06nE9HR0Tx2MDQ0xDOghHYBwEnrwMAA666LUYNut5u7rXSWiTVgCXXl9XpRW1uLiooKJse75557eG85HA7Mnj2bC4oUEz355JOIiori9Uhr0OVy8bmo0+ng9Xr9tLPpHq1fv95PSo18GkHYT548CYfDgYyMjEvGIagTS+dfRkYG9u7di/r6ej6vBEHgee/x8XE0NDTwZxAXpcTklmLtWgCXnMt/S/s2Qf0nsoKCAq7AjoyMYO/evRgfH4dSqcSnn36Kzs5O/l2tVouamhq/ygYJLVNHAQBaW1shl8vR19cHjUYDuVzOOpfl5eXQ6XQwGo28QakK7Ha7ERwczMmATCbDuXPn0NPTgylTpuDMmTOYP38+BgcH0d7ejoMHDyIpKQnj4+Po6+vjoXKlUgmDwcAsrxR0BHY9AX+GOnHlXNxVFSc6BIm6HMtk4PPISXd3d+POO+/ET3/6039raG+gyeVyWCwW1qMdGxvjjhTdR8DHfDsyMsIBtUKhYGILcSJKf5PDI4gW4E9dHkg209XVBa1Wi+bmZibcMhgMcDgcGBoaQlpaGsNlGhsb0dDQgKamJsTGxiI6Ohper4+h76mnnsLIyAiuvPJKPPPMM/jkk09w2223fZucBphCocDq1asRFBSE1tZWvyRDnGiIkxzap6RlR/d46dKl8Hq9kMvlMJlMiImJ4eKawWDAe++9hxtvvBGFhYW8x48fPw6Hw4FFixbxASgIvjlnIk6iWR2JRILq6mq/LoVMJkNHRwdDvHQ6HTQaDcN0yZe53e5LmIHp+e3t7fx5EhISmMxCoVAgPT2dk5HIyEicPn0aFosF0dHRXIkP7L64XC7cfffdWLlyJZNqfF2SRoyVSqWSr+flfldcbCOjgCY8PPxrESSB4xFi6C3NtxHrKzHBajQatLS0MHuw3W7n5KixsREnT55EX18fz4NfTs4nMFn9us8n/j/BW8XfkQI5MWEHBVfi19ZoNKivr2fUDnVevF4vB2j0e/Qa1HVOTU2FyWRCRkYGRkZG0NTUBKvVynrftPYNBgMaGxv9PjtJqJB8TWJiIl9brVaLo0ePQqVSISQkhJEfZ8+eRUhICEPWScebAnCtVouQkBBoNBocO3YM0dHRGB4eZvZfkgoJCgrCyMgI+vv7MTIywkgYKgyOjo7CYrGwjAftUyKeEXfRAqHYtI8Bn19PTk6Gx+NBUFAQkxLJ5XIEBQVh+vTpPFNLpFK//OUv8fjjjzPSZt26dYiNjcWSJUuwfPlyvPzyy5DJZJg+fToUCgWKiopw7NgxVFdXY926dTy24fF48PTTT8Pj8eCxxx7joovX60VYWBh3wdeuXcv7jDqphDrr7+/HvHnzeFaWEBD9/f3MwiyVSvl+B6IBjh8/jtraWshkMh59SktLg0QiwfHjx/HKK68wo2x+fj4nLH+pXe4+0B6jx8RoFuoA089pNIruKyUqxH5OBSyKNcfGxrBq1Srk5ORALpfj6NGjkEqlaG5uxoQJE/Dxxx8jLS0Nq1ev5mIOaUwTWkOMkrr22msxNDTESZeYgZiub35+PuuzdnV1QS6Xs24q7c/Jkydzsrp8+XIsXrzYb/SAkDy010NDQ5kDhdBggiAwIRMVMJ999lmGPl933XXweDyIiIjA6Ogok0lRQYauU1VVFdLT09He3s4QcqnUN0tuMBiQn5/PsfWdd97JXWlCnNG5IDZCoBH5l9hn0nXdu3cvVCoVFi1axPBcwBf/WiwWmEwm7Nu3jxtTCxcuRH9/P3bt2oVDhw6hs7OTR7GUSiVrDa9duxb5+fn8frTWxYRLhJ6w2Wyw2+3M3fGniob/W/s2Qf0nspycHHaWQUFB+MlPfoLExES43W44HA5MnTqVob3kmOiAodlU0u8kLcvIyEj09vYiIiKCNxVRiUulUp7DoMprRkYGBya1tbWIj4/nw53EjA8ePAir1Yr9+/fD7XajtrYWSqWSFzYxBwcGGsBFIgrAv+IurnKLLTDJpNehShUlReJ5MJvNxjCWmpoadHZ24pprrsHjjz+On//857j99tuxcOHC/8M7+c9nR48eZU0tYmWk4XrqKPX29jKZjriDU1hYCLPZzMRJRFdP65Rgg+L5OoIr0T0VJ6rbt2+HyWTCNddcA6fTieLiYiQmJnKlcWBggLu1arUa8+fPR1lZGc6dO8fr9vnnn8f58+fR0tKCm2++mbVYv7VLLSYmBgsWLIDT6cTo6OjXFo4C9yEFtSEhIZg8eTILor/yyiv4wQ9+gM8++wxKpRITJkzA4cOHMWPGDIZOyuVyfPrpp3A6nbj22mv9Dm+pVIpTp04hLy8PMplPo5e6IWFhYYz0AHxyXRKJBNOmTeO5dQqYSMICAM+2iUlhBEHA/v37+fvZ7XYeM6Aka+rUqZDL5RyQHzp0CG+88QZmz54NlUrl11Wja0XBY11dHRYsWMCfVwz3pX129uxZCII/HBHAJQW6wMIB3Q/qGAPg4In8qJhshMjK6HPEx8dj3rx5mDJlChYtWoTo6GgUFhZyIYBmF+m1KcAiaNipU6eg1+sxdepUPyI1sQVel8t1cQN/X1y4CrwO4u9P64TMYDDwvJk4IKbEjyTRKDDV6/U4ceIEVCoVk/4RCoNYVIl8kHwVFXBImon0UPv7+5GTk4OmpiaEhIQgIiIC1113HWbOnIl58+ahs7MTg4ODWLVqFZKSkhAbG4uWlhaeufR4PNDr9eju7mZJDmLKpnn/7OxsnrtUq9UMOQ0JCeEZfcCXKNPMpM1mw8DAAM/FdXZ2MuTZZDKht7cXXV1d6OrqgsVi4Q7v2NgYnE4nxsbGeF9SUksyYX19fVizZg0jDCgBlEgk6O7uht1ux/PPP4+wsDCEh4cjKCgIGzZswIQJExAaGoolS5bgtddeQ01NDfLy8uDx+LQg9+zZA7lcjk2bNsFkMgHwdXCfe+45aLVaTkSpg/ziiy/i1VdfhdPpxI9//GM/eDkxsioUCkydOhVr1qxBWFgY4uPjYTQaAfgYwiMiIhAZGQmv14uUlBQekaJ1RhDI48ePMyzY4/EgMTERarUaZWVlePzxx/Hd736XE0KSIvlLjGbF6f2pqyruDFLRhMjViMSL9oiYCIfYZB0OB2688UbeZ3SehoWFYfPmzTh8+DC++OILLoBQvNje3o4nn3wSx44dw+joKKxWK3bu3MkxF82Jer1e/jxNTU38GcPDwyGT+WSByLxen5yL0WjkwpjRaMSOHTsAXIQ66/V6/rxerxclJSVYsWIFBgcHMWvWLG54GI1GZgImnVQyvV4Pk8nE3XXyC4IgICwsDLm5uVyIImm87du385x5X18fSyO9+eabvOfWrFkDt9uNu+66C1VVVdwlbWpqwmOPPYaMjAy+bwQ7p3iV4p3x8XG88847EISLc8W0hkdGRjA2NoYzZ87A6XTiV7/6lZ/vEwTf3K9MJsOePXsgk8mwevVqv8JBd3c39uzZg7179/K1IKTGnDlzELBGh8UAACAASURBVB0dzRJrxFVAsZ1cLme5MRoTsdvtPIrwt7BvE9R/Mmtra+NK8qFDh3iTeL1ebNmyheFLgC+wPHXqFPr7+wFchMUeOHAAUqkUnZ2d8Hg8XGGnYXOCeLjdbhw5cgRGo5Gd1VdffYW0tDTG2AuCD8eflpaGpKQkri5SMkpiz2SX68CQBXZBv66TSnMWwEVHLA5GqCJIQtUDAwM4ceIEKisrUVlZifDwcERGRkKn02Hjxo14+eWXkZmZCZfLhWXLlv0tb9e/lFFQRs6SZpMVCgWvO5JykEgksFgsaG5uxg9/+EMuGFitVp51Ec+VEfkDWWC3VdxVbW9vR1FREYaHh1FVVYVjx44hNjYWWq0W3d3dHJDJZDKeg4qIiEBBQQEWLlyIpUuXslzHL37xC7zyyit/l+v5j27UbZBKpZg1axbPtlOF/XKJl7i6T0mLRCJBW1sbent7sX//fqxcuRJbtmyBx+PBzJkz+dBOT0/n/Ww2mzmJoqIcVdgbGxuRlpaGQ4cOQS6XIy8vD1KpT66mqKiID2e73Y7W1laEh4cjJiYGc+bMgc1m4yQ4ODgYwEUStYqKCj90BnUOgYvznx6PT8qI4Ku/+tWv8MgjjzDqZN68eRgYGEB7ezumTJmCiIgIv2CIkka3242XXnoJx48fxzXXXOMHz6JEkYqIwMVEjPaBGApPCaMYPUIWFhbGwbq4+0h7FADDXPPy8jBr1ixMmzYNRqMRIyMjjKopLCzEa6+9hg0bNmDjxo2Ijo5m7U7SUY2KikJhYSGKioowadIk6HQ6WK1WP/jg13VMAwuM4nUk/ltsX/eYuMhAptfr/RJTev3w8HBOnAiibbfbUVtbi76+Pu6yxsbG4vDhw4iJiWHym97eXiQkJPB5I5H4SGNKS0u5Ezk4OIicnBy0tLTg3nvv5fO0q6sL9fX1SE9PR319PVQqFcrLyxl5MDY2xkUTghZLJL4xibi4OO6A0s8UCgUefvhhTJo0CcXFxVi8eDHrlFPhwOFwwGKxoKmpiZn6MzIyWJ81+X9g8fHx8dDpdAgLC4Ner+ful0wm47XkdDoxPj7Ofyi4JkIvtVoNp9OJZ599FlKpFNOmTcNPf/pTDoAVCgXi4+Nht9shlfoYP+fPn4933nkHycnJEAQBc+fORXV1NeRyOTNrl5SU4Pe//z2Gh4fxxhtvYPLkyTwvvWbNGjidTtx3331830nzvbS0FFqtlvVOqUA+MjKC8PBwvPvuu3A6nQwVJqZt6l7T+pbJZNi6dSuSkpL43KL1JAgC66rS86Oiovics9vtuO6661jG5i+xWbNmYXx8nMkHxYX9QC3Unp4eeL1eJmaiZFMmk6G5udlvnjUuLo4TKJlMhrS0NC5kEaEb+a6jR48yZ8Pjjz+O1tZWWK1WtLS0YNOmTTwWQXP43/nOd9iXEKSaOpwymQyxsbGQyWSsN0pGc84KhcIP2iwm7YuNjWXiRTK3243U1FS0trZCEHxjKkVFRUxMFh8fz4U2Mvpc5MvefPNNfu+8vDy43W6EhobCZrPBbDazxi6hLkZGRtDc3Oz3GnK5HMePH4dGo8Hq1au50BcbGwuDwYBp06Zx4YiSPuqI0153Op1+5HjimXy6Lp9++ikUCgVCQ0P91gQViOvq6nDq1CnYbDa43W784he/4O6vQqFgdt/z589DJpMhNTUVmZmZ0Gg0yMvLg81m4+5vTEwM4uPjERcXxwz99LlovEqj0SAoKOgSyPH/Y+/Lw6Mqr/8/984+2SZ7yL6wRJawBGQTBAISdhAQLeJCC2ptq61tXVFRQRAUrdhqUdC6VEBZiwQUAkhYAkJ2ErLv62SZmSSz398f4zncidhvV/u0P87z5JnMcmfufe/7nvcsn/M5/4jccFD/C6WyspJJaqqqqhiyJEkeopDKykoAHgNzwoQJKCkpQXZ2No4fP87YdaobIuns7IQgeAhldDodQ261Wi3Onz+Pfv36ISUlBVOmTIFarcawYcPg7+/Pn6FHomaXGxp9IW59hd6T4+/lilf+v9wIlv+O2+3mfmylpaVoaWlBUlISHnjgATz11FMYNWoUBMFD3qTT6bBo0SL8+Mc/hl6v58V/A97516WqqgqdnZ3MiEibHd2z7u5upmUHwD0jm5qaOIJKzgNlyWiTN5lMXFcjn5d9DXTAM68/++wzJCcnY8iQIejq6sIjjzyCzz77DD09PdyWiObxyy+/jJiYGO6zSnVhGzdu/AFH779TaJPp6OiAwWBAeHg4Z9Lla1ySrkFP5Ru/fKPq6enBTTfdhODgYG4JExwcjA8++AALFizg45VKJY4ePQqXy4VFixZ5OVaHDh1Cbm4u98GbO3cuRFHE3r17GbInih7G0OPHj8PtdnPdq9vtxuzZsxEaGuoFc5MHu1paWvg7zp0753VNdC3EXGg0GqHRaGCz2fDaa69hw4YNaGxsxIcffgi1Wo3MzEzY7XYui5DrQoqEb9u2DUeOHMHMmTO94HVUyyoIApMxAdfWwffBqOSvk66XJAn19fV8P8nQlGdOAwMDERISArPZzDBNCg4IgsDrp6CgAH5+fjh16hQ++ugjPProo4iPj0dAQAASExORlJSE+Ph4aDQaJCYmcvZUfm7Xy8DLHdHvy6ZezyGVX/f19gUaM4VCgfDwcM56EvmRy+VCdXU1AgICmEU3JiYGERER8PX1xcWLFyEIAqN+ioqKEBgYyA4/EcLR3lRbW4vw8HA0NjYiPDwceXl50Gg0mDJlCgAPCsput6OtrQ2TJ09GcXExHnvsMQQGBmLgwIEcfFar1Th79ixGjBjBhrJKpUJlZSUUCg9JHGVT9Xo9RowYgW+++QZTp05FVlYWQkNDMXz4cG51ctddd+H+++/HAw88AFEUMXr0aPj4+GDatGmIi4tDbW0tbrvtNsyePRtBQUHMlk862sfHhxEp0dHRSExMxKBBg5CYmIj+/fsjPj6eHbvKykqIosgIgo0bN3Jgx2w2IyAgAN3d3SgqKsLtt98OrVaLTz75hNlRt27disGDB8PtdmPYsGF46aWXoNVqMWrUKCiVSqSmpuK1117jPXvy5MlsJL/88ssIDw/Hs88+64XqyM7Oxquvvgqr1Yrf/OY3vA9RQKK1tZWd6ccffxwul4tJbNRqNZqbm2E2mxEWFobe3l5s2bIFw4cP91qT5Dh88803PN8oexcTE8NZy6ysLKxYsQIZGRnfO5+/T1wuFxPuJCcn83XQvJEHrmjNkA6joJS8fp2eU1aNyKGoXVRAQIBX8IacEGJWb21txdatW7Ft2zbU1NRw/XBVVRWMRiOTAh07dgx+fn4wm81wOByIiYnBxYsX2el0Op3cY5XuZb9+/ZCRkQFJknD06FFIkoS5c+fi2LFjfG3EwUKtpOhv8uTJqKysxMiRIxkZM27cONTX16Oqqgq5ubmsY1wuF/R6PeuJrq4uTrKIooioqCgMGTKEa5AJQUhERMTa++GHH0IQBPzyl7/EnDlzsGDBAmRlZTGMWRRFpKamYt++fXC5XLj55puZDM3hcDDD9EsvvcTkkRQUJdZwOjdal8Su/sknnwAA93kl/URM206nE3/4wx+4pI5sN6vVyjW4DocDxcXFcLvdTLw1fvx4TJw4EceOHUNjYyOam5vZeQ4PD/dCcFI9sFzn/rNyw0H9LxWCaVCNEmUMPvnkE4ZRAEBjYyMXNZOisdvtCA4OxqpVq7ignyKeVLdBjYhTUlIYSkf01FQzI4dkyo2H6xkg18u40Otk8PU9tm92lGrX6urqcP78eZw7dw4VFRUMgyIK/S1btuCxxx7D0qVLeaE8/PDDmDx5MtckhIaGep33rFmz/pW3539Wenp60NzczAQxlK0go89isTBxChnb69atw69//Wuuc6Leg1SbShkW4Fo2hyLy3+cItba24syZM2hra8OMGTPwzDPPYPLkyRBFEXFxcWhra4PZbEZrays2b96M5ORkNDY2IiQkBFu3bv3PDN5/meTn53PUVafTYeLEiQgICGAoo3zNkw6iTZHep7oYtVqN6upqSJKEvXv3cp3R6dOnkZKSwtF/URSZ0j8tLc1LH6hUKnR2dmLq1Klobm7mIB0ZDRMnTuR5RARNBKmk81OpVDAajQgLC4O/vz8sFgtH+gHgm2++Yegi1SbJYYEUdZY7XjqdDqIoorCwEBs3boTNZkN2djbGjh2L1157zauvIHAtQ0rZ0J07dyI6OhrTp0/3yhLTGFK9N403RdvlAb++zhvVxcoDSPL3CK4lL+UgxAnVh5HRJooixo8fz+Qe2dnZDJO+66678MQTT+DYsWPIyMhAXFwcGzBWq5X7zPadK333i7/mxMoNz77HXu87r7f/AB5ehtLSUgDgFkREbAOA7zfpI7rPYWFh3M+zqamJ90y73Y7y8nLOspLDSlJbW4uf/vSnGDx4MKqqqqBQKBjqHhsbi5ycHK6ZLCwshEajYUegs7OTGYep1yLgQQPl5+cjJiaGnSd/f3/k5eUhKCgIFy5cwOrVq/H5558DAKOaiIxMFEWsWrUK2dnZSE1NxeHDhxEZGcnBBKfTiVtuuYWvb/78+Vi6dClmz56NQYMGYciQIejo6MDo0aMxcuRItLe3o6SkhJEVgCd7WF9fz+vNarWisbGR2bPNZjN8fX0hiiI+/vhjTJo0Cf369cNnn33GmaCKigoMHTqU25hs3LiRs9HEnPz000/j8OHDGDduHObNm8f7xvPPPw+Hw4Gnn37aa16YzWasX78efn5+ePHFF3k8BEHgVj9btmyBw+HA2rVrIUkSwsLCvI6vq6vDW2+9BaPRiKeeegrvvPOOV/aL5hJlUgm6SUG05cuXMyPzokWLvjOP/y/RarWIiIjg9T1kyBAMGzbMC6Uhd0JpjZ85c8Yr8CtnfB09ejRn1Eg3WK1WBAcHIzo6mgMkVOZAyQAA6OrqQnV1NTo7O7Fr1y7s27ePy8GKiopQVFTE2T5qlUTzp7m52StrSBnVviVe1BKKgoRkL9JapSCTWq3mGlaCCEdEREAUPay39D6VmyUmJrIODg0N5eAkQejXrl3LOuLmm29Gb28vw+qTkpIQGhqK1NRUGI1GrrM+ePAg29KESrRYLDyPFAoFpk6dCqPRCLvdjtWrVzNCSJIkJgNMSkr6Dv8G8XxQ9p1qqYkkkvq8DxkyBGPGjOHAmdvtRklJCaxWK9rb22G32/Hmm29ykoruu8lkQlNTE3p6evDpp59yhn3AgAFobGzE5cuXcfLkSWRkZCA5ORm1tbVc601Oe1VVFSorKzFhwgQIgsCBuX9Ubjio/6VSXFzMsIPq6mpERkbyom1vb8fXX3+NQ4cOcW8mmtxtbW0YNGgQG//JyclMJw2AKdwJBmez2eDr68uRtL4iNwrof3nkkhTQ9Y6Rw1zkDixFdpxOJ7Kzs3H+/HlWdr6+vli4cCG3nSHoxvz58zFjxgz87Gc/Q319PRtZtACVSiXmzZsHh8OBu+++m6OOAG5kTv8OMRqNaG1t5agZ9THt7OxkoxQAO6/Tp09HdXU14uPjOVggJ1Xy9fVlB8BqtTIRDP31hY3K52BRURHCwsIQGBiIxMREZhQmA7+yshJOpxMnT55EfX093nnnHcydO/cHHrH/XhkxYgRv/iaTCWFhYUhLS+NNk5wsuZDRIw8wEKuiRqNBaWkpnE4nFi1aBI1Gg/z8fIboAmBCB0mSGCZH82Pv3r1YsGABsxLOnTsXSqUSn332GRYuXMg6pKysDIcOHYLL5cLUqVO5DomcPWLYJMOSsjvUDuf06dMcQSbx8fFhSKu8XvOpp57CBx98gI0bN8JqtSI0NBROpxPp6enMUjx27Fjo9XrOPMsdRnKGVq9ejX79+nFmlyLrZBz2HeO+WUYS+dohJwC4VtsvSRK3OQCuNVmnAAGtS4qo031sb2+HxWJhlse2tjaGTg8ZMgQWiwUnTpzAkiVLkJSUhH79+uHzzz/3mg903iTye9v3GuSP13vv+96n7/2+gCnB78jgIwNLFEVunUNEOfRZQiQRCqSxsZH76gIeZAkZ0VarFadPn8aoUaOwZMkSNDQ0cEacUEpDhw5Fa2srHA4H+vfvjzNnzkCv1yMvLw833XQTuru7ERwcjJiYGNTV1XG21+12M8y0p6cHkZGRWL16NXbt2oV3330Xzc3NsFgsaG1txcqVK3Hq1CkAwPTp06HX62E0Gvnezp49G+fOnYMgeHpXjxw5EidPnuSxmTNnDpRKJTIyMnj/DgwMREFBAaZMmYL9+/dDo9Fg0aJF0Ol0jC6g+RwcHMz9T51OJ7Zt24bNmzcDAKMQfH194XQ6sXPnTsyaNQt+fn64ePEixo8fj7q6Onz66acYOnQo0tPTkZ2djTfffJMhyN3d3cjNzcWJEydgt9sxbNgwrFy5ku/3yy+/DJ1Oh7Vr1zIBH607yqQ+/vjjXtlPq9WKtrY2bNiwAW63G0888QQ7qRSskSQPqdJ7773H5Dlr1qzhsiP546VLl9DQ0ICIiAiEhYXh1KlTmDdvHhYsWMABMmJ7/XtEkjzEY9QTlHSar68vd1YQRU8vXVEUudWM3HElQk1yuAiaTb2gyTajmv25c+dizJgxmDNnDrMbS5KEuLg4OBwOVFdX4+zZs/D392dYv0KhwKVLl1BYWMhQdBo/cnJJB9x6660QBAE5OTleqBVfX1+kpKRweZHb7casWbPQ2NjIc410c98kR1JSErq7u+FyuVBXVwebzYbk5GQO+Fy9etWLJI6+E/CgT3p7e1kHvvnmm1AqlQzzLSoqgtvtRn5+PpKTk6HT6ZCYmIjc3Fw4HA4mv7vlllvw0UcfccCP0Bgff/wxent7OWBFqEYfHx9IkoT77rvPS2+53W4EBQVBkjwtbOg+0v+9vb346KOPIEme0jan04n+/fszZwzgsf1ff/11duLvvPNO2Gw29Pb2wmw28/05f/48RFHE+vXrObsr7y8fGRmJGTNmYM2aNVi1apVXIDc5ORlutxsjRozAY489hpSUlL97fsvlhoP6XyxNTU3o168f4uPjGY7gdrs5e0A1f5Tx6OzsxMqVKxEfH4+enh6uHRk8eDA7t9R4Wx5No4LnvtH66z2XO6fkYPTNnpIykTuvtPEXFxcjMzOTFbCfnx/8/Pzw+OOPY82aNdxP9Uc/+hFuvfVWSJKnyLy5uRnd3d0AgLCwMERFRXFdAUFbQkJCuJfiDef0HxdRFDkySUY7RXAp8zBy5Eg8/PDDmD59OhITE7FlyxY2COWBDF9fXwCeOURZWXpfTmxFQvOI3vviiy/Q09ODlpYWBAQEwGg0Ys+ePZgwYQKKi4vh6+uL5ORk/PznP/9hBud/SDo6OmA0GlFRUcHkN1SzRZAq4LsQX+BavTttXG63myGngwcPhsvlwo4dO7jvG2VbqY9ySEiIl/NLULzy8nK0t7dzjTyhPuSfKyws5GbuNN8oYLJnzx6uKaIshBzNQddETok8SEL1Rd3d3Xj88cfxu9/9jgMsZ8+e5fclycOISceQkUcQMblzSo/kWC9dutQrKEOwuutlq68n8uyDVqtlw5vORX48ET7p9Xp0dXVx9pb2DjpOo9GgvLycDWGLxQKj0ciMtxUVFWhpacHgwYPR3NzM/Yf7Imbk1yyfO3KHu68T+33Z0L7//60SGxvLjhpxK4iiCLPZzDBHebZWqVSitbWVa6tUKhVKS0t5btE1+fn5obm5Ge3t7Uxik5+fz320+/fvj+LiYgBg4kDKpDU3N6OtrY1hsbQ+qE6UMkednZ08n4lXIiYmBi+//DKzBI8dOxZ5eXm8n9I1KRQKXLhwgQOzZBSnpqYiMzMTKpUK6enpOHHiBF/7tGnTAFxjUlcqlViwYAEKCgogSRJ2797NLS4o4KHX6/n4iIgIWCwWJk8hR0I+JwMDA2G1WlFXV4fbb78d48ePx8GDBxEeHo7S0lIUFBQgMTERMTExuHTpEn7/+9/znuHj4wOz2YwtW7bwWBAvhtPpxIYNG2C1WvHrX/+aa+GtVit6e3tx7NgxaLVazJkzhw14skcEQcC2bdsgSRJnYSlgT3qIashFUcTw4cPx/vvvs7NL9o3dbkdjYyOT/xDyYNmyZdDr9VyG8o+IfP0QXJOQKjfddBNnCeVrhPrTDh06FMA1PUDzQRAE5OfnQxRFZGVleelum82GuLg4hIWFYfHixUxuRe1kent7sXjxYq7PliQP0zoxw9K9EgQB9fX1cLvdTEQFgOvAKTBHc2ju3LnsmBKk1tfXl209QRBw2223QRA8rcsk6Rqhoo+PD06cOAGDwcCwa4LM90XnAUBCQoLXfFcoFGhra8OXX37J5Ei05hUKBaKioqBWqxEbG4vS0lJERUXxdROqgoJ7VMMMeHR0bGwsTp06xczyABgJSL1GKdhO95mClS6Xi2t4w8LCmESMbHZ5gM7Pz4+h283NzVAoFKirq4MgCBgxYgR6e3u/0zWBSKRIL9L3ETFbeHg4CgsL4XA4YLVakZqayo4sBadoTv6zdag3HNT/cjl16hR0Oh30ej2GDx+O/v37IykpiTdNgmmEhYXxhmE2mzF69GicPHkSarUawcHBXjTWSqWSFYDT6WToEeDNsit3QEmxX8+AkEfvAc8isFgsyMnJQU5ODvepnDFjBrq6urBw4UIEBwdj6NChWLNmDdasWcPRp8TEROTn56O1tZU3lPT0dJhMJkycOBFVVVXYuHEjRFFEaGgoQ6NIIVFvO+CGc/qPCjHnySEoBP0eP348XnrpJSxevJhrApcsWYKmpib89Kc/5UhtR0eHV80g3ROat2QoEjukfH7Rb7e3t6OqqorJQIYPH85QFIVCgXvuuQft7e148MEH/8Mj9t8pBoMB0dHRqKysxJkzZ5hSvqamBnV1dV4Oj8vl4v5wJBTAkENadTodxowZA6VSCZPJhHvuuYc3yD//+c/sNM2YMcNr0/z000+xYMECVFRUQJIkTJ8+HVqtFnv27PGC59P8cDgcmDZtmpc+stlsCAoKQnNzMxulxCDZ1dUF4Frml6Sjo4NbR1Dd1aZNmxASEgK73Y7IyEh8/fXX6OrqYqgWbdTURiArKwulpaUMnSMhnUjjuGnTJrS3t2PgwIEAPHqYyKL6ZhXlDqD8GuVGBWUHGhoa+HN9ES+iKLJhRvqcoHB0bjabDY2NjQz7owyISqVCeXk5IxW6u7sRGhrKTLGU+ZPfG7kB1ddJlTsIciOy73nLnXz5mPw1IYMzNDSU9UP//v2ZfIWy6ZQxoWPoPAguFxsbC4fDAb1eD39/f2g0GjQ3NzPU1t/fH06nE1evXkVLSwsGDBjAulGv18NisUAURSxfvhwWiwWFhYVYtmwZ91JsbGyEVqtFeno6ampqcOXKFeTk5ECn06G9vR1WqxUajQbbtm2D3W7HG2+8genTp+PIkSO45557UF1dDZVKxXWO1F9y6tSpUKlUXMtHhv3JkycRHx+Pw4cPw+FwsONKWTqXy9O/XD7GY8aMweDBgxEVFYVDhw5h+PDhiI6O5gwTBYxo3ioUCmi1WvT09MDf359ZjwleHxAQgKNHj6K8vBwFBQV49NFH0dTUhB/96EfYt28fFAoF3nzzTdjtdhw6dAhvvPEGNmzYwM5GdHQ08wncdddd7PRZrVZkZmbC4XDgueeegyAIDN+8cOEC3nzzTYwcORIvvvii1z23WCxoaWnhDPQTTzwBu93OrThE0dOOrrW1Fa+88go7sAcOHIDNZmPHjnRJWVkZysvL4XQ6sWnTJigUCmzatAmPPvootFotr7+/R+Q6gVAA/v7+TJxDdZO0niizJV8zFPgaMmSIl16i90VRZPRLTk4OQ2cpKRAUFAS73c6kOIIgoLKy0mvNbt26FWFhYexoUs2oSqVCcHAwQ+KJfKurqwsNDQ28XtVqNSorKxESEoLW1lY+9urVq17BLIK70rnTd1LJD0F7aa0rlUrExsZ+57rJuSZkwfvvv89BJqvV6oX6y83NRXp6Omw2G/Ly8qDT6WAwGJCVlcVkQ6IoYunSpSgpKYFOp+M2SzabDbm5uVAqlfj5z3/O7agEQcB7773HKBw6F0LfEBFSX0Zq0tX9+vWD0+nknrNUqyqKIrPrvvfee1AoFEhISIBer2fdS8660+nE+fPn4Xa78eqrr/KYZGZmwul0orKyEvv27eN5Pn/+fAwePJhrt0NCQnDixAnOdv8zcsNB/R8Qiv5T5J02BK1WC71eD4PBAJPJBLVajdzcXBQUFKC0tBRqtRr79+9HZmYm/Pz8uIUEMaESg5gkSejs7GRlIs+UyhcHfZYe8/PzcfHiReTk5HCTY6oJq6urw5w5c/D000/j0UcfxY9+9CPupaXT6TBgwACGqwDgQv6enh74+Phg/PjxmD17NubNmwe9Xo+2tjZcuXIFBoMBQ4YMQUtLC9ra2ryiZDQ2wA3n9J+RxsZGSJKnvoiyp2lpaXjhhRcwb948r3lBtPoNDQ0YNGgQ0/qT0qT+ZOSkdnV1cYScpG9WHri2CVksFhw5cgRqtRrPPPMMOjo68OSTT+LcuXNYtGgRHnnkkR98fP6XRKVSYfXq1TAYDOjq6sKuXbvYgCUDgZxSeTQa8Ca40ul0nDGy2+3YsmULVq1axccS8YXVasUdd9zB3y93GIuKiuBwOLBs2TI4nU50dnaywQt4HLq9e/fC4XBwnRYZSgqFAgcOHMCECRMY/k/QW0HwsHVS7TQJvU5OGvUilUPJNBoNsrKyOBOpUCiwYsUKuN2eBvMEa9+1axeTydH6ALyDdyqVCk888QTGjRuH9PR0Ju8gQwOAlw7u6+DRa/J+qRT1l7dsoh7USqUSEyZMYGIL+g46Rzlk02QyeRFwVFdXo6mpCVevXmWYGvXUk9eqFRUVfWcuyO+JfN4QFJbGXn5d9Dk6ri+K56+JPPhAWUqCdhOqg1huNRoNO1IEMRcEASUlJZwdHzlyJMrLy6HXwSw+eQAAIABJREFU65lU8PLly4iNjeU1Y7fbYTAYEBgYiF27dkGh8DCjUsmN0+lES0sLtFot2tvbGXliNpvR2dmJK1euICEhAcuWLcPbb7+NcePGYfz48ZAkT9nNfffdh4cffhiCIGDgwIG4fPkyt3ghtuqqqioMHDiQmdcnTZrklZFRKBRYunQpB2suXLiAadOmYe/evWhra4NSqcTChQtht9s5O0XXV1VVhaioKK6vHj16NPr374/29nauT6Q1SYzERK5Dzgzg4TQgltDMzExoNBp89dVX0Gq1yMnJwR133IFXX30VHR0dOH78OARBQGZmJnbv3o1HHnkECxYsQFpaGoKDg/HCCy9Aq9Xit7/9LRPOfPnll1i3bh00Gg2ee+45nkculwvd3d346quvOBsqX0+dnZ04ffo0Xn/9dYiiiKeffhpBQUGcGaK11dnZiU2bNuHMmTOw2+04ePAgo4vkAQ6j0YgLFy5AFEW8/PLLHCD5yU9+cl3W6e+TvgEmeQCISpmohy5BN5ubm7+DZKPAPa3BsrIyRrlQoIiCxBEREYx+od8hfQd4snQ333wzcnJy0NraytnbOXPmIDo6mn+TWh8pFAqeP0QURYkQGiv5dVJwoLe3l51ked90wBM0lwcRadwjIyNhs9ngdrs50DJ79my+N4mJiTyuVLpATjDZsnQ87TPE00IQ5ZycHIwcORI2mw233XYbtzajrLvT6cTu3buZcE6j0WDQoEGYNm0aioqKIEkeFmUS0gHJycm49957vWDMtI4AYPDgwawLRVHk4Aj9UZlcSkoKlzEUFhYy7PvMmTOIjY31QiDJ50RnZyfUajW2bNkClUoFi8WCP/3pT2hsbIQgeLgTaL9cvHgx3nrrLURGRkKSJGbf/mflhoP6PyBffPEFLzh55Pt6Io84EXU0TXqNRgOHw4GQkBDOqBIxBkF35BBNm80Gk8mEU6dO4cqVK/j66695Ul68eBE2m42Z+9ra2hAdHQ2dToekpCT86le/wogRIzhSRouKanCIQIc2UopI6vV6JCcnM+SQznHmzJkYO3Ys4/obGhrQ1NSErKwsbiB8I3P6rxOaM2lpaVizZg3S0tK83pcHAwiq88EHHyAlJQWtra08T8lYpmNobmm1Wi7il0Ms5cYtvVdUVARBELBo0SIsXLgQe/bswfr163/YAfkfFIpgEyyrrq4OM2bM4BoWgtCTyHWD3HCijb67uxui6CFBIvZOwHOvGxoa4HA4sGDBAq9ghFqtxscff4wFCxagtraWiZA0Gg0OHz6MuXPnsnFx5swZCIKACRMmYPLkyaxXXC4Xjh07hrS0NBQVFcFms+H2229HfHw8qqurERERwTWo8mBbb28vAgIC2LgTBAFPPvkkG3BarRYtLS3M1Eisvk1NTXjrrbdQUVGB8vJyPPPMMygqKkJCQoJXXZMc4gtcIz/67LPPMG/ePNhsNq9NnpxH+l8+xvQaQZepXqm2tpadPXn/OmJcpxomOeuiIAhseBBxDhHUpaamMqGGPDhBsD2TyYSgoCDs2LEDt956K1566SVUVFRw9oKk7zyRjwe9TveV9jb6DBnI9Pm+jm5foZ6GNJajR49GZmYmLl265AVhI4ZKcq5pvGJjYxEcHMwQvbKyMhiNRoSHh3tBoXU6Hfr168eMuzU1NWhubkZUVBSqq6vh4+MDHx8fnD59GoIg4I477kBNTQ2uXr3K5xoXF8f9SNPS0jB8+HDU1NRg3LhxCAgIwBNPPIEPPvgAO3bsYELE3/72tzwOgwYNwueff47p06ejpKQEAQEBXD+m0WjQv39/HDlyhNtyUFZ53LhxnPlesGABTp48yfdm+vTpsNlsOHv2LOtegsZOmTIFhw8fhsvlwvDhw2GxWBAXF8c1ubSeyJ6g+m1y9gnuS+dy8uRJJCYmIjMzE9988w3OnTuHefPmYevWrXC5XNi3bx+USiV27tyJd955BzNnzoTBYEB6ejruvPNOPP3003C73bj77rsBeGDAlEG12Wz8CHjsl/Pnz3PN6YsvvsjoB0HwtLqyWCxYu3YtlEolVq1ahUWLFnFveFEU0dXVxQiTLVu2QKFQ4A9/+AOSk5ORlJSElStXYvLkyfjFL36BiIgImEwmWK1WbNq0CUVFRRg8eDA2b96MJUuWXHfu9pXvs+3kASuaz35+fsy6brPZ0N7ejtOnT6O+vp51EP0RNwTp5AULFnCpGO3V9DuC4IGcE7qD4NednZ0oKSlBTk4ODAYDiouL0dzczMf6+PgwG3ZXVxcnRUhPE5yd4O7ANThsW1sb6zdJktC/f3/ObAqCgKSkJOh0Ov4cvT5+/HjU1tZi0qRJMBqNUKvV3BKspaUFFRUVaG9v56wqMUyTvqCyI7puyqJSIO7KlSuYP38+EhIS8PHHH8NisWDUqFEoLi5mJmi1Wo3S0lLU1NRwkEmlUiEwMBB79uyBTqfD6tWrGXotSRIyMjLgdDoRGxvLATQaj7i4OGg0Gtx2221M9imfA0TMRdBot9vtRUJYVlaGPXv24MKFCwDA5Kc0r0in5+fn8xres2cPJElCSEgI3G43Tp8+jfLycjQ3N3Ng49FHH2V+Ervd/k/1+iW54aD+j0jfySBXPvJNvq/hKIdJECkERc7l0Leenh5kZmaioKAAZrMZZ86cwTfffIMrV64wjn38+PHQ6/U4ffo0rFYr92oDwDATMp76GhcUBXK73Zg2bZoXOyw9Up2FnLGOzt/X1xddXV0QRRFff/01RNHTMmLSpEnw8/O7kTn9F0tNTQ1+85vfYNKkSd8xFuVwEXovISEBp0+fxs0338wM0EajkecowXEEQYDNZoO/vz8b1fI/uQNBBnJ3dzfUajVCQ0OxdOlSfPbZZ//JofmfEdqczp8/Dz8/PyaXmDx5MkaNGsXtJOSf7bvJkYiiyJBBURQxYMAAPo5Ye1NSUpgkgnTV/v37MW/ePBw4cAAGgwFRUVFwuVw4cOAAZs+ezfNLrVaz4SVv6UBGMRlEra2tmDZtGteJEdFcSEiIV52ZKIps7NhsNuh0Oixfvhw2m40zm2FhYfjlL38Jq9WKfv36Qa/Xo7OzEzt37kRNTQ2OHDmCyMhIdnBbW1sRGBjIWTp5qYRcvvrqKxw+fBhvvvkmG8Iul8uL4VLujFG2Q57Jlju/8rGQPw4aNMgrEElBp87OTi8HmNZkRUUFMjIyEBQUhK6uLl6nZAg5HA50dXUhKCgIxcXF+OKLL/Daa68hISEBO3fu5O/9vrkmr4WSzxv5edD9IaORxqTvcXKh7NvRo0dx/PhxnDlzBgC4VRHtJxEREWhtbYWPjw/f44iICJSWlqKhoQG1tbWcPXG73SgvL0dYWBiPfU5ODmfPAKC7u5vLD7KzswEAAwYM8GLtBDwZKIPBAB8fH5SXl0Or1aK+vh67du3Cli1b4O/vj1OnTqGxsREZGRno6OiAVqvFY489hkuXLnFf6cbGRrhcLtx999344IMPkJSUhOPHj+O+++7jIPbAgQMhCAJOnz7N5xkWFoYjR45g6tSp2LdvH1QqFe666y58/vnnkCQPrHDYsGFoamryIrWKjIzEqVOnMHz4cHz00Udwu91YsWIFSktLkZ6ejjvuuMOLIZa4LChb7OPj4xUEp8zQu+++i2XLliEgIAAlJSXIyMjAXXfdxTDdd955BwqFh5n2rbfeYqdKqVRi5cqVWLduHfz8/PDUU08hMjKSz3n9+vVQKBTYsGEDz2/i7HjxxRchCAJefvll9OvXD4AnU2wymdDV1YVXXnkFbrcbAwcO5B6RRCgmCALD3N99910oFAqsXbsWy5YtQ3R0NEaMGAGn04nZs2fDYDDAYrHAZDLh8OHDzIS6fPny752/cpHrV7mjSmuDHmmduFwu+Pv7c497qvd1u908pyjzGh8fz99J7OuSJHGtOa03SZJgMBg4s0gcII2NjXwfqqqqmPSHxNfXF1evXuXessC1oJooenptS5LEMHi6jrCwMNTX16O3t5cDFElJSTAajey4hYaGwuVyobS09Dvw5/DwcAQHB3O/VEmScPvtt3PQhtiy5eNGyR6r1Qq9Xu9VjkH7jSAIuHz5MkTRU7M7bNgwNDc3Y+DAgdi/fz8aGxsRGBiIzs5ONDQ0YPv27QCA5ORkAJ6scWpqKkwmE/r3788s94DHviJHb9asWV57otlsRkNDA9RqNR544AGvoH1PTw+vWYVCwZ0T/Pz8uG0SkbURQ78gCJzZpvtHcurUKVitVhQUFDARGmW9W1pasGPHDkbjAMDChQvx7LPPore3F7m5udclwft75G92UAVBUAiCcFkQhL98+zxBEITzgiCUCYKwUxAE9beva759Xvbt+/H/1BnekL9ZMjIyOOINeMPHyMDvC5ckym4SIg0hcgiCManVavj5+cFkMuHSpUvo6urinpNUH+Pn54eMjAw2NqiompyWo0eP4vLly99xPolunCAJtHFcr8CaHGhygkkh5+bmor6+HgkJCZAkD7w4MjKSFcrs2bNvOKf/YnnwwQe/A5+WQ0zk2XFRFNGvXz+kpqZ6BUR6enoQFBTECpWilJSFoBpGgh7JFTV9j8vlwsyZM/Haa6/9x8bif1Eoijxp0iTExsZ6tSXR6XRITU39zhq9HpSTNn3qdweAYUJnzpyBzWZDYGAghg8fzhF7wEPqYTAYOJOampoKwMOaarFYGObldrtx4cIFKJVKjBw50ssJkiQJ586dw5QpU3DgwAE21gBg9+7dUCg8LK1kpNAcq66uxsCBA70ctQEDBvD7ZrMZJSUlXrDg0NBQmEwm+Pr6YubMmZgxYwZSUlLgcrlw5swZpKSkcHZADnMEvMmDyGn39fXFq6++ysEbGlfAOwBAOo6MbTI45VlT0vc6nQ4KhQJTpkzhTCEhGcigsdvtbPRTfzu3280wbIvFwn1mQ0JCEBERgbq6OlitVmi1WvzlL3/B5cuX8c477yAsLAxnz57Frbfeiq1bt/5VR5KMXxJ5drXva33flzuxcngljRXd04aGBpjNZg40+Pv7c2sHgnKLoocxlAijqE6LssQBAQHQarWoqalhSBvdw/b2di/IXENDA+rr6xETE4Oqqir4+PjgpptuwunTpwEAc+fORX5+PmeWdDqdF+eDVqvFG2+8gV27dgHwZLR/97vf4Te/+Q0OHjyI5557Do899hjKy8uRl5eH4uJimM1mzJ07lyHKgIed+cqVK3C5XJg3bx63niEdfffdd+Pw4cO8Jl0uF0aNGsUZLmp7sXPnTq+6tqVLlyInJwdDhgzBRx99BKfTydcEAAMHDvTqjWixWLi3o9PphF6v57nc29vL773//vtYsmQJgoODUVdXh2+++QZRUVF48cUXERkZiR07dsBqteLEiRPYuHEjIiMj4e/vj5qaGqxcuRJPPfUUJEnCXXfdhfHjx/M8Wb9+PQRBwNq1a/m8CK5J0Nuf/vSnXoEQyq7T/rJo0SKsWbMGAQEBXpmp6upqNDc3Y926dayLSP/V1taipqYGRqMRJpOJ97udO3fi7bffhkKhwJ49e753bchFPu8pOCN3TuV7o1yvqFQqhv9SPXRJSQlOnTqFpKQkXisXLlyAVqvFwYMHIYoikpKSoFQqkZWVxecwbdo0tLe3M6xUrVbDYDAwZ8TatWs5607oqlmzZqG1tRWhoaGMgJs/fz4HroijQBRFr/IC4qpIS0vzYl53OByc7QTwnbIK0gHJycno6OjgoJI8g79s2TLMmTMHgYGBHCCT6wH6HhJJktixCwgIYPKkiRMnYuHChSgqKoLL5UJYWBg+//xzCILAznF5eTlfE2WwX3vtNWbWveeeezBkyBDW1zt27IDb7cbYsWO99L3D4eC2UU6nEyNHjvSaG2azmeHX3d3dmDp1KteHh4eHQxAEnDt3zmtOUZcOeSaerp9acxGLdk9PD6ZOnYqSkhLY7XYcPXqUx/Omm27Chg0bsHjxYoiiiC1btuCLL774m+b19eTvyaA+AuCK7PlGAFskSeoPoAPAj799/ccAOr59fcu3n7shP5CYzWaODBMcAri2SQPeRBsUOaf/ifHP6XTC398fCoWCN9yBAwdyNIngwADYsaTG9ORUkNNCvyVJEpqbm/Hhhx9y5MvpdCIrKwu5ubno6OhgGKDVakVlZaXXOVMGVe44U9Te7Xbjk08+gdPpZAPK398fSqXyhmP6bxSaN3IYTN8Nkv5iYmKwZs0a9O/fH7W1tUxYQcYwiRwSRNF3ymTJIb8kRDBwQ/61Ijf6BwwYgKSkJFRWVkKhUHCEnfq99XUKSTf0jfgTvCozMxNarZZrn8aMGePlaCgUChQUFGDUqFHIyMhAYGAgt/nIz8/H/PnzedO22+2oqamBTqdDcnIy6zTgmj68dOkSJEnins5VVVVcItDb24uamhqvLLD8T61WIyYmht+32WzYtm0btm7dit7eXoZZmc1mSJKn/Y3FYkF+fj6CgoJw9uxZDBgwAMeOHcO9996Le+65BwA4wCYfJznkjkgn/vCHP3zH8KRzIaHjtVotn099fT2/J//+WbNmMUsjZUgoMCC/XwTlJmeJ6mEJraJQeNrhtLS0IDExEQ6HA9u3b8dDDz2E3t5e6PV6bN++HWFhYfjjH/+IoUOHemVhvm/O9XVi++qUviKP0vclggHADpUgePqAU8mJXq/H6NGjOXAAeOButOeR3gkKCmKdU1payuzyBOsOCgpivUQt38hxoJ6fISEhOHv2LCRJgr+/vxcRVVhYGMLDwxEZGYlFixaxQUx9m0VR5PYWXV1dzIr7xz/+EceOHePekyqVCg0NDexEnjt3DjNnzsThw4eRnp6OixcvcrZy6tSpXLdG4zZq1CgolUrU1tZi7969iI+PR01NDc/TpUuXQq1Ws8FJc3DWrFkICAiAUqnEp59+Ch8fH0RFRWH37t0AgHvuuYcdDnJA5TYHjR3dG6pbfffddzFr1ixER0cjJyeHz+H555+Hr68vZs2ahWHDhuHy5cv45JNP4OfnB41Gg/b2djzwwANYt24dfH19MWXKFMyZM4edi+eeew5ut6c/Nzkkouhh5123bh1MJhPWr1+PmJgYnkuEyHr99ddx9epVOBwOPPzww5AkCSaTCf369cPMmTMxYcIEAMCrr74Kp9OJoUOHIjU1FWfPnkVbWxu3MiosLERpaSlaW1vR09ODV1555TvtpK4npFuvt0b6JiOup5fl+zRlVbu6uuB0OlFYWMgsvAEBAZxgkDuE8sBPWVkZenp64Ovr69Vi7tSpUzh27Bjcbjf27t2L6upqHmOLxYLQ0FCuh/T19cWRI0e8kHAAOEtHvzV27Fh2Xvfv3w8AmDdvnhdx0u233w61Wo3jx497IQYDAgJw8uRJJCQkoKysjLkzCEGiVquRlpbmxasib5FGRF50LkQmRnp17969CAgIwLvvvovZs2ejt7cXgwcPRmdnJ9s90dHRaGlpwSuvvAKHw4Ho6GicO3cOPj4+qKiogMlkgo+PD0JCQth+bmtrw7Fjx2C327k1Iu1P1FJKoVDgtttu88p60ro0GAxITEzEmDFj0Nvby61+goODmUwrMjKS9Q2RScm/A/AEJLu6utDe3o7Lly9DpVKxvrxw4QKys7Px2muvsT33yCOPMFeDw+H4mwMv15O/yUEVBCEawBwA7377XAAwDQBh6T4AsPDb/xd8+xzfvp8m/LWw6Q35l8qFCxfQ2tqKK1eu4MqVK2hvb4fRaOTNrbq6GkajEUajEW1tbbDb7RwFAjybORWaS5LEdQZEbqDRaLi1AkFcAI+RSA3uqbBc7nhQRBLwFMt/+umnsFqtUKvVcDgciP+2HqyqqoqZIe12Oy5dusQOkNPpxLlz53Dy5EnuxVVYWIiTJ08iMzMT8+fPR09PD5YsWcJMnTec03+vrFq1ysup6Js9Ba5lOCZOnIi6ujr84he/4E2KapJUKpUXAYXRaGSWTLnRLN90CwoKUFhY+J+58P8PRB5cIkgTZc3JQKdanL6OAdHh9zWS5BH97du3QxAEZhiney9JEi5cuICUlBTs27cPADBjxgxoNBrs3r0b0dHRTEik0Wiwf/9+SJKE6Ojo72QUSkpKMGDAALS2tkKhUHCmlODJCoWC+73SMc3NzRg2bJgXUchDDz3Ext8777wDh8OBqqoquN1u6HQ6NnroWhsaGvDUU09xRgHwOAGDBg1Cbm4uDAYD1Go1w9lI5A5re3s7fvWrXwHwZB8IUSD/HCFOCGZKAUa5kSW/ttjYWH5OGSy500vGL61NmgczZsxAR0cHKisrYTabuZ726tWrMBqNKCsrw8cff4yMjAwm/eno6IBKpcKZM2dw7733Ii8vDzt27MDevXs5Q/GvFrmz+swzzwDwQHeVSiUiIiJw33334d5770VCQgKcTicCAwPZYKOAB7VQItgpMWcSMigwMJB7Oefl5SExMdELHkj9nMlRaGxsRFNTE+6//36Ul5fD5XJhyJAhyMzMhCAISEtLw5kzZ3g+zp8/H2lpadzmhZwnIhgrLS3lrG9dXR26urq492Rvby/efvttBAYGYt68eThy5AhcLheqqqpw55134pNPPoEgeOoTk5KScOTIEZ4viYmJ0Ov1mDt3Lu/5w4cPx1/+8hcAnjV7xx13oKenhzMwoigyezH1HLZYLAgODsYtt9yC6upqCIKAFStW8NqWB34cDgeCg4O9DGNykKxWK95//32kp6dDq9WiqKgIBQUFGDNmDF566SWsXLkSzz77LBYuXIjs7Gxs3boVKSkpqKmpQX5+Ph588EGvbOaoUaN4XRDMd82aNUxQ093dzeOnUqmwatUqdqzovOvr67Fnzx5s374dAQEBWLduHQBPWyq9Xs8BIqPRiDfeeIN14TvvvAOVSoWOjg7ExcUhKSmJe+1GRUVBpVJhw4YNePHFF//q/D5//jzXvNN6l5e8yNfy9TgB5NkxuiYqO3A4HGhra0Nvby+OHj2KsLAwL0fFbrfDaDSy86dSqRgRRYRder0eVVVVWLp0KRQKBYKCgnDx4kVGBZBNKQgCmpqaOINO50flGdRjnc49ODiY68V7enrYDs3OzuY1T/YmdXiQi4+PDxITEznJIQgCYmJiOEtM9iw58IQakWep6TjKgBKpms1mw9WrVxEaGorExETk5eUhICAA06ZNY9RJQkICtFotjEYjKisrERwcjMLCQkiSp1b097//PdRqNSZNmsT3qbe3l4NKxAZOetzhcOD1119HY2MjVCoVkpOT+dwoEKNSqZi0ktorhoWFceeO4uJiSJKn3zjxEGg0GkaOyPcOIlcilCbNY7VajdOnT3ObLrm+onpvaq/1j8jfukO8DuC3AMgKCQbQKUkS4bvqABANVRSA2m8H2Qmg69vP35AfSKqrqxEVFQWNRsPkJgCYMIMULTmp1Neuq6uLs6G04er1ethsNo50xsTEwMfHhyPNtKDlkXZydoFrMAF5jEKhUDA9OW165KTQone73TAYDIiNjeXzEQQB48aNQ2BgIAIDAyGKHpICu92O+++/n2ssnE4nmpubsXr16h924P8/FXmmnJ7LI620WTidToSFhXEk1OFwMI29r6+v1wZLGwlBPImNlDbLK1eu4Ib8+4Vgi0VFRSgtLUVlZSUGDBiA2NhYdHR0oKKiwov5FvCuh+orLpen9ygRyzidTkyePNkLXqRSqVBYWIiBAwdCpVIx1LazsxOBgYEYNmwYf76yshIajQb+/v4YPHiw12+aTCY0NTUhNjYWGo0GEyZMgNvtxtWrV2EymdjgoEg0nTs5L6STfHx8uB0JQdpqamrYQSejd9CgQXjooYfgdrvR1dWFxx9/HIcOHeJNn+qkqD8psWv2rdelOe50OtHb24tLly5h0aJFAPCdoA/pcwoQyHumykmfyOCgMaCaVjqGHDJal9TOxO12c4bIZDLxGISEhHDbIepBXVtby1A6ajtDNZ6Ujejo6EBvby8uX74Mk8l0XcKXf4U89dRT/D85zAqFglsmjB8/Hr6+vjCZTAA82QNCelA5AdVqAWDmcafTifLycuh0Oq5ZttvtHLBxu92oqqqCwWDw+n2NRoPi4mKcP3+e4XZUxygIHnLA+Ph4XLhwAXq9HsePH2fYu8FgQHx8PMLCwtjQNRgMuPnmmzFjxgw4nU6Ehoby2hkwYACGDx/ORDBTp05FQUEBVCoVkpKSeMyHDh0KURS9yI/S0tJw+vRpDBgwAH/5y1/gdDqRnJzMtW0OhwNBQUF8r2keRkVFISsrCzfddBMHlaKjo3H+/Hm2E+h33W43SktLoVQqmR2UmKfp+9rb29mBLS0t5VrSkydPQqVSISEhAceOHeN1FxwczG1PZs+ejdbWViYGWr9+PZxOJ5KSknjNWK1WrF+/noNPRHwDeDJGL7/8MiRJwiuvvMLrlc6vqqoKXV1daG5uhs1mY7hwYWEhGhoaUFZWxlD7TZs2MTLhiSee4DYggEcXxsbGci9nl8uFI0eO/NV5TcHBc+fO8fiTA0W6gIScKhJ5hpDuN3ANteHn54fIyEjmFNFoNDh06BCqqqo4804167R3+/r6Mvu0HBIqCAJGjRqFnp4e/PznP0d0dDREUURQUBA6Ojq4HRM5QIQ+IV1NOgoA11CeOHGC7QrSX3KnTD6/yAalaw0LC+Okx/79+xmSTAE+QRC86sf7jqskXWM3p99wuVw8R41GI0P3S0pKAHhamOXm5kKtVjOrdG1tLb7++mvY7XZs3rwZAwYMYG4H6oM6efJkdpCdTicuX74Mq9XKY0jnoFAoUF5eDsATwJTvAfSZjz76CAAwYsQIJnyiY6menhxTQnPQPJOjK0VRRFVVFSTJUyrRv39/Jr5Tq9UwmUx4/vnnea4lJibilVdeQXJyslci6++V/9NBFQRhLoAWSZK++Yd/5frfu1oQhIuCIFz8V37vDfFIT08Pt5mhqJXBYIDBYIC/vz/i4uKYPpwcbAjAAAAgAElEQVR6lBmNRrS2tsJqtTIFPk16QfAUVZNiIwgwNS8nGAgZcAC8ImNyPL8gCKiurmYIkFqthkqlQlNTEwwGA+rr67l2qaGhgQ0cQRCY0MJgMCAmJgYOhwMpKSnYvXs3srOzER4ejrq6Om5vcEP+/SJX6gQVpPtFmwgpu6VLl+Lpp59GfHw8OwHUzJ1EvinIo3m0KVBNxA3594ogCPjmm29w9OhRWCwW9Pb2Ijo6GtXV1YiLi8OPf/xjRjH0Fdog5Q4j3VeVSsUG6JgxY7hJOx23f/9+zJw5E59++ikAcC1Xbm4uBg0a5MVkm5+fD5fLxdBeel2SJJSUlGDo0KFMmhUeHg61Ws1tAKKiohAQEICWlhYv45POnZxoubOTl5fHsL7nn38eP/7xj6FUKrF48WI88MADaG9vh8lkQmNjI8xmM8LDwyFJEs6cOYOf/exnsFgsnHUNDQ2Fr68vwyzp3MkAI4fnzTffRGNjI1555RWGh5LxIjc6qU0YORJut5uDBy6XC/fdd58XgzDVPFKQkUij5Ou3p6eH17AkSQgPD4fZbMaRI0fQ1dWFxsZGdHZ2orS0lMlAKPLe29uLzs5OOBwO7N69G88++ywcDgdaWloQERGBrq6u77D7/juE2EkDAwNx+fJlHD9+nAOZZWVlAID4+Hh0dnZyhlSSJC9CJGqxo1AoGB5M515SUsIwYBpDrVbrRaLT0tKCqqoq9O/fnzMKbrcbJSUlkCQJt9xyC5qamgAAp0+fxvLly1FWVgaDwcDB3/T0dEyaNAlz5sxBYmIi3G43zp07x1Dr2tpaREdH4+GHH+ba2BUrVmDnzp2Ii4vD4cOHMX78eOzfv5+dprCwMO5NDXiCIsR27e/vj4yMDAwbNoydWEEQMGPGDCgUChw+fNgLlrp48WKUlpZi5MiR+NOf/gSlUonly5fjgw8+4DpXCqbExcWhoKAAgiB4sXnL9QCxaB85coTr3nQ6HQ4ePIhhw4YhKysLO3fuxMiRI5Geno5x48Zh27ZtKCsrw913341HHnkEaWlpmD59Op577jkMGjQITz75JDsCFosFH3/8MRwOB2cu6fza29uxceNGzrKOGzcOgwcPRnNzM6xWK7KzszkbHRsbi08++QR5eXm4fPkyE2PRvNu8eTMyMjLgcDjw2GOPsc1FzoFCoUBzczPKyso4iPR90tnZyURexcXFDB2WZzpp/OQ6WG7H9f2s/LkkSV5zjuY3ITPIaRNFEffeey+6u7vhdnsI4LRaLRQKBerr6yGKIoYOHcpwbmJ/DQ4OxsWLF7n9i0LhaXXU3NzMc5Lqcwm9Jw94UuvBzs5OOJ1OGAwG3n8kSeJA0xdffMHnKYoiJkyYgObmZqSnp3O/VEJQEIs2kecBYEgz2S7yFjf0W3JSutLSUvj7+zOM32w287w2Go1Yvnw5rFYrenp6mJhKkiTU1NRwx4oXXngBLpeLA7BkV588eRI6nQ4rVqzw4nygsjqFQgGDwcB1xHR+JpOJkUZutxsLFiyAKIpe9e7EHUBlfAqFgsnLaNxpX21qaoLdbsfu3btRXV0Nl8uFhIQEuN1u5OXloaenh8dbofB01lixYsU/pd//lgzqRADzBUGoAvApPNDeNwAYBEEgqzIaQP23/9cDiAGAb98PAGDs+6WSJP1RkqTRkiSN/ofP/oZ8r5CD1redB0X2WlpavCLnFNmXO5UEzRAEAY2NjdBoNDCbzYiMjOSsAtV60obct+ZUnhkhZUMGT0lJCfbv3w+73Q69Xg+n08kRufz8fISFhSE1NZVrYVUqFfz8/DB58mScPXsWZWVlqK2txYULF2CxWFBTU4O9e/diw4YN/7Fx//9RVq5c6RVto41BruDodX9/f1RUVOBnP/sZzz1itZTDfAEPy6YgCFwP1trayux/N+SHkeTkZNx8883o6enB2LFjUVBQgClTpsDlciE4OBharZbve9+sqdxIIqH3KeNEMCP6fElJCerq6hh2SXVx1NeY2H+VSiXa2trQ09MDnU7nlQEDwPWBxDB75513cp9du93OGSd58EuhUHD9Jukr+i5BENDQ0ICvv/4aZrMZfn5+2LVrF6xWK5RKJYYMGYLq6mqoVCqcO3fOq68f4KmhstvtyMnJ4T6SFDgkeLB83KhuEvAYQM8//zz8/f2xefNmNiLIAbXZbEwsFhUVxXWEcvjf8uXLvfSx2+3m36DPUAASANcPUasDqsGj3o9lZWVwOp0wmUyoqalhZklCPpCQYdvd3Q2dTodBgwahu7sbjY2N0Ov1aG9v90Jf/CtEHlAgIWdfpVJBr9cjNzcXNpuNW0+EhoZ67X/yGl2dTgcfHx8MGzYMqampGDNmDDMea7VamEwmdkhobGtqahASEsLjaTQa4ePjg/DwcO7zSEYzOQP+/v7o378/jwcx7U6cOJHnhCAICAoKwtdff43U1FQkJCQgKysL58+fh8vlQkpKCmw2G7Zv347p06dDFEVMnz4dCQkJUKvVyMnJwdKlS/Hee+8xIkmj0XCvVtLjkydPxpgxYzhgMWvWLBw8eJDny1133QWFQoGPPvqIj1OpVJg7dy4b8du3b4fb7cb8+fNx4cIFKBQKrFq1ysvZIPQCZdjJ6affaWtrg06nQ01NDRISEhhGu2vXLsTExCA/Px9vv/02UlNTUVhYiGXLlnEN4r59+7Bt2zZUVVXB19cXH374IURRxJNPPgmDwQCVSoWysjJs2rQJKpUK69at497vLpcLbW1teOGFF6BQKLBgwQJERERwYMjhcCArKwubN29GcXEx7HY7tm3bxpwXBMknJ7WgoAAFBQVwOp3YvHkzo8BsNhva2tpYH2RlZTHL9PVEp9OxcwsAISEhzDosZ/L+Pj0sr62nz8r1Aj2XZ/lJz1Cd6oEDB7immAKNdrsdP/nJT9ixczgcGD58OGw2G1pbWzFgwAAIgoCRI0cyPJdsgsDAQEZniKKIGTNmMDqORBAEJnhSKpXcOikiIoIdYnquUCi4RyldD/XrDQ4OhkajwY4dOyAInvZ3NTU1vNYJRSJPppBjKken0HiTU9bd3Y2KigrExcXB5XIhLy8P3d3dmDBhArdI2rlzJyMWsrOz4XK5sH37dtx5551obm4G4CEADAoKwtKlS/m3zGYzSktL4XK5MHr0aC8dLUkSdu/eDYfDgblz5/JY0b5js9nwwgsvQBRFDBkyBHa7HWq1mvdKQfC0U6IsqiiKeOSRR7xsdfkcouRAYGAg61VCQubm5mLjxo187E033YQPP/yQWz79I/J/OqiSJD0pSVK0JEnxAO4EcFySpOUAMgFQ86Z7Aez/9v8D3z7Ht+8fl/7dIdIbcl0hqAEAhjfJ4Wx9YTe0SQDXoG6dnZ0APBOSYDAUAfTz82NCJrVaDb1e77WISZnLJ7ocikJGj8ViwcWLF5GWlgabzYbExERER0ejsrISbW1tUKlUMBqNKC8vh91uR3FxMQwGA0pKSjB27FhMnDgRU6ZMwfTp0/GnP/3pBxvfG/JdkUN7v+++BwUFITs7mzch6rEYGBjoVTcjiiLMZjN8fHy4RcEN+eHk/PnzqK+vh1qtxsSJE9HS0sIkZBQ8uPXWW5nFta/RIxe5sQCAiTXkjpzD4cCpU6ewZMkS/PnPf4ZSqcSgQYOgVCqRkZGBxYsX8/FKpRLHjx+HzWbDwoULvYwwtVqN/fv3IykpCRUVFew0KRQKHDx4kEsfgGvOpyRJKC8v5158VILw4IMP8jVTrWtHRwdGjRqF8ePHo7u7G0888QTXAwrCtdrPhQsXQhA8NXWDBg3iusyOjg74+Pigs7MTkyZNui48j66HsnKCIOCPf/wjRFFESEgItzzp168fIiMjERkZiejoaK9xIBKcO++8kwM/9DtUQ+lwOBi+SQYicRBYrVZMnz4dfn5+iI2NxeXLl1FRUcFMnE6nE62trcjJyWHiGMr+ygOTZrMZKpUK+/btw9WrV9HT04NTp04xkQY5JfJo/ffJ/2VKXM857RtsoNeISfTQoUPIzMzE4MGDuf0PZS8ooCtvu0HGbHh4OMaMGYNbbrmFswlUa0fZP2LFlyQJLS0tKC8vx/Tp01FUVMTflZ+fD0nyEHjRvM3IyMBDDz2EY8eOQRRFhIeHMzmRKHrYc4l8hqCeDocDW7duxdq1a1FYWIgXX3wRERERiIuLw1dffYWZM2eiqqoKgiBg3rx5qK+vh1KpxKJFi6BQKFBRUcHfHxUVhatXr2LOnDnsvC5YsACZmZnshC9btoyPIydHr9ejX79+mDRpEtRqNf785z9zqQ5d86pVq/jeKBQKri2k8h5CbpGzTJ0C3nvvPdxzzz0MtTx79iyWLFmCxsZGtLS0YNGiRThw4ABnUu12O/bt24eMjAzU1tbi4sWLeP311yFJEpYvX87OaGdnJ9auXQsA2Lx5M9ra2lBSUoLq6mpcvnwZmzZtQltbG2bNmoW5c+cyGRChLw4fPoz33nsPbrcba9aswahRo5CSkoJly5ZxJowywZs3b4bT6cT999+PxMREZGRkIDMzE42NjTAav5PH+Y5QH86uri7MnDkTo0ePZtKukpISZGdn49y5czx+cr3S9/GviTzYSLaev78/B8O+/PJLHDx4kLP+9Ojn54fDhw/j5ptv5hKrZ599Fj09PcjLy+MkREtLCxwOB7788kuIoojc3FyvNSpJErKzs71siUWLFqGlpYWDCyqVCiNHjoTJZOJjo6OjWf82NDR4XU96ejq39SGhZA39zooVK/jzlIUkh52C6X33OeJhqKiowC233AK1Wo2rV6+ioqICvr6+EEWRx5Bs5RMnTiAvLw96vR4OhwOrV69mGDMF9ei6AWDnzp3cG5ngy4TwKS0t5bptCrCSjurp6YFSqcSePXsgCAKef/55iKLI6BBCrhF0HwCKi4vxwAMPeI0DJbOIdJD2LEEQmMhNFEUUFxdjw4YNHBCcN28eLl68yLW4f6/8MywFjwP4lSAIZfDUmL737evvAQj+9vVfAf+Pve8Oj7LM2r/nnZpkZtJ7QhJaIk1DBwtFVEAkCArY0FVWLIuLsrq4tsvGrmAvKyJgobjiggICKiAERGqABJJACAmk92RmMn3mfX9/jOfwzIC7q99+n7s/c64rV5Kpz/s8532eU+5zHyz4H3xHl/wPhQx+nU6HyMhIPmjJkJAkiVsMUGSN2BvPnTuHlpYWdlqJsdPhcCAyMhIRERGIjo5GZ2cn13yI0CfgfORahKKJz7W1teHLL79ERkYGQ3vDw8NhMpkwa9YslJaWor6+HiaTCT179kRxcTHGjh2LZcuWIScnB6mpqejWrRtGjBiBV1555Reb51+7zJo16wLHlDYtclopspeXl4eNGzfiN7/5DRsb1COThA6c6urqrnrTX0j69euHiooKdu7Cw8O5LlSlUuHkyZO48sor0atXL26fQgeaWC9MIkboaR+QJIn72S1btozJY/x+PzMXNjU1ITExMciwIAQIlTCI30HEL0RANGvWLH78mmuuYbQIOamKEqgjvOSSS3hfBALZCYJfKYoSVJ8PBPbWxsZG3js1Gg2OHTsGl8vF/SZVKhV27tyJxMRE7Ny5EyaTCSdOnEBKSgpefvllTJs2jSFtIquvCLGi+Tx06BAWL16M5557DmPGjEFZWRn27t2L7777DgcOHMDRo0cBgPf1jIwM/Pa3v0V0dDTv3SIhiCzLXCdK10yZEcoYf/zxx/jwww/R0dHBvQ7JSKZ79PTp02hvb+c5EduQkLHp8XjYkOzduzfcbjc2bNiA3Nxcro0iY4n2DFFfSMQMRqhczDklEaF54meJQdlz587BaDSyg07ZQ7HejGpzIyMjOZtDzn5YWBi6d++Ofv36ITs7Gz6fD+np6dwOqbm5GWazGevXr+e62CuvvBItLS0MO50yZQoKCgo4S0b1f5deeik0Gg0OHz7MYyPG2v79+7Nzt379emg0GmRkZHBGHQBmzJiB7du3Q5ZlrF+/HsnJyUzYpCiBFh5HjhzhEhq1Wo1Ro0Zh48aNkGUZX331FdcjE6RSo9Hg2muvxf79+zkrrFarkZKSgn379mHcuHFs1KakpKCkpISzpXPmzGG4OfWnNBgM8Hq9rJti0MRqtcLr9eLNN99Ebm4u+vfvj9jYWKxfvx4WiwVvvPEGvvjiC8yePRtGoxFjx47FK6+8Ar/fj+XLl8PpdCIhIQHDhw/Hvn37kJCQgGeeeYb3IqfTiYULFwIAli9fHpRJ3b17Nz766CNotVrMmDEDvXr1Qvfu3ZGUlMS9UltbW7F48WJIksQwzPj4eM5qAeDg15QpU3DjjTdyX8yfIlRCQzwOHo8Hn3/+OTZu3Ai73c61jAcOHAjKxIrM96EEQj8moQgYWZZhNpsZOUM6SPvWmTNncNNNN8FutyM8PJwd+Y6ODjz33HPIzc2FJEm444470N7ejqysLFgsFr4HaVyyHOhdqlarUVdXF4TIampqQt++faFSqVBcXAyfz4fevXuzs0ZBLioZE1EBMTExOHDgAPr06QO9Xs99R8ePH89nAXWpoOunBI3BYOB7TDy/iHmart/v9+Paa6+Fz+fDwYMH0djYiMzMTCxevBgulwv33XcfVKpA/9Svv/4aLpcL/fr1AwCMHz8eDQ0N8Pl86N69O8aMGcPrRggXrVaLzMxMHisFpnbt2gUA3CJJXGfikADAZKcExyZpbm4GEMjQjx49GuPHj0dMTExQsgkI7JkUjKS6WkLZyLKMjo6OoOyzTqdDVlbWz+5N/5McVEVRdimKMumHvysURRmqKEpPRVFuVhTF/cPjrh/+7/nD8xU/a2Rd8m+RM2fOoLW1FbW1tTAYDFwzIkIWqABexNjLcoBlz+l0MrmSwWCAxWJBeHg4b/jEcEgwD4JyhUIDRGgJRWRI+aurq/HNN98gNzcXanWgH9iePXvgcrmYDfj06dPYt28fF48vX74cOTk5XI9xww03/DIT3CVBIm5OojEpZlPT09PR1NSEsWPHQq/Xo6GhgdsuJCcnIzw8HHV1dRyV7ZJfRlatWsWMi8XFxbBarYiLi4Ner0dSUhK3IpgxYwaSk5M5gyYaFKKjFSrUB3fTpk2wWq0cQOvs7ERycjI8Hg80Gg22bNnCLIQEudq8eTM8Hg+mT58O4HxzcWoWP3XqVFRWVmLAgAEMxWpra0NCQgJ69+59gRNEzI8+nw9utxsajYbZfcl5IzIMIqfYvn07911evXo1ampq0NraimeffRYDBgyAogRInIgohGCjI0eO5HqhJ598Emq1mhu6d3Z2Arh4Da/P50NZWRkkScKCBQtgMpkQFxfH2dFBgwZxqxObzYZ+/foxSybt9+R0AWAyFI/Hwz1MqX7SarXC4XCgpaWFmTs1Gg3Kysqg0+k4CLl///4gSKZo1NJvYuw9fvw4Z5q/+eYbaLVazmiJwSkRQUH6E0rCFqpP/8g5FYMKlFGiGl3xuyhTSlBfq9UalOGjUhT6ftJz+p+Ik4jt3mAwICIiAikpKejRoweysrLgcrm4R+GBAwfQ0NAAvz/Qe1JRAiQlV111FbKzs7F161bMmTMHH330Efx+Py677DK0tbUxfHHgwIE8b9S66NFHH0VLSwsWLFiAjo4OHD9+HJMmTYLX68XAgQNx+eWXQ5IkbNu2DePGjcMXX3wBRVGYMIb6K9Ka3HrrrYiNjUV9fT3Ky8sxZcoUnDhxgmGCFPzYvHlz0HrdddddKC8vx4wZM7Bx40Z4vV5Mnz4dH374ITvjc+bM4e+i9SHSJKrppXub9FRRFOzcuRMDBgzgoGavXr1w9uxZFBQUQFEUZGZmwmw249prr8XHH3+MM2fOYMWKFUhOTkZTUxOqqqqwevVqaLVaDBkyBOfOnUNFRQUKCgrw/PPPw+/3Y9OmTRyYUBQFJ06cwPPPP49Dhw7hz3/+M+bPnw+tVguNRgOdTge3242EhAS89tprAM5n6lNSUnDbbbdh48aN2LRpE/bs2fOjevqvCEH6bTYbvvjiCyZVIsRDZ2cnB3y0Wi0KCgqwb9++i2blQkVch9DXkL6LjxPcWlECxGlFRUXYsGED0tLSYDQaUV1dzYGFhoYGrF69mssACgsLERsby3sDcY5QAK1bt25ch0rfT2UlTU1NMBqNKCsrg1odYGavra3la77pppu4fISyyHRNer2e996jR48yIoVg4R6PB0lJSXytsbGxDL8lVEWog68oChOmrVixglmZiawzKysLDocDdrsd1113XRDxI7Xjos+5++67sXLlSsiyjL59+yIlJQVAYA99+eWXIUkS7rnnHiZ7AwK2emFhIa8vsYyT0JwSq/S8efOgKEpQn2SVKkBMZjQakZSUBL/fj2XLlgW1oaJ7lXgEaN+gxymTvHfvXiYgkyQJ/fv3/9kIuH8/z3uX/EcKQRUIbkUiyzIbTqISAoGDPTExEZ2dnaiurmYlFA07tVqNxMTEIIfT5XJxtBAAw9TEyE5om4GGhga8++67iImJweTJk2E2m7F161Zs2LABzc3N6N+/P9LS0tDW1saHuSRJuPTSS4OilF3yywnBY0QHhYTWmjbLPn36oKCgAPfffz8/TmzTRFrQJb+sWCwWlJWVobm5GTExMejTpw87Ku3t7UhLS2Njvnfv3qivr+e9QYy8itHXUIgl1Ul++OGHmDZtGk6ePAmfz4epU6dCq9VixYoVmD59OtdoWq1WfPTRR5BlGRMmTAjKDBBBR+/evVFVVQVZDrR1AQJZvqFDhwIIkC4RtIqMsoyMDDZIyamj16vVarzyyitcq08tPmRZxtChQ/H666+jpqYGNpsNc+bM4eg0Eeo4nU6uOaO2KyNHjkRbWxuys7MRFRXF2Vq9Xh+0BqIRQv/PmzcPALBgwQKEhYXBbDbzZ1N2TZZl9O7dm/d0glCTM2mz2bhfNPU+JCfWYrGgpaUFtbW1aG5uhtPpxPDhw/l9+fn5UKlU7LyGrquYPRfLOXw+HzweD/r06YMVK1bAYrHgpptugtFoRHNzM2pra4PeR7ok1uKKdWEk/8g5BQJ1XURaRfNIWToKQJATTHopyzL3hyXDi65TvGZR1+n6Q5FCVM8XFRWF8PBwZu10OBwoKCiALMuor69Hfn4+vv32Wxw8eJBbUFA5jUajQWJiIiRJwp49e7gtzh133IGePXvi+PHjeOCBB9DU1IRdu3bBZrPh1VdfxRdffIHW1lYUFRUhPj4eGzduxI033oj29nacPXsW06ZNw6FDhzizpSgKO6l0DR6PBzNnzsSRI0eg0Whw6623Yvny5XC5XJAkCbNnzwYAfPzxx0FrNGrUKKxZswaZmZlYtmwZ1Go17rnnHqxatYpRWvfeey8UJdCLl+CI4eHhsNvtiI2NDVofr9fLvRw/+OADjBs3jmG2WVlZKCkpwXPPPYeWlhZER0cjKysLubm5TLY2e/ZsuFwu2Gw2VFRU4LXXXsOkSZOwdu1aOJ1OeL1e7Nu3D4sWLYJOp8PWrVsRHx+PjIwMREZGwmQy4fvvv0dHRwfi4+OxcOFC1nViT77++uvxyiuvcIuPvLw8zJw58x/q508RupeB4HtL1GUq5WptbWW28vLycuzevZtrXcV9RbTLLpZdDXVMKWtGCQ1C5xEKhJifKTgIAIWFhZxdq6mpgcFgYEZ/RVFw/fXXs5OpKIHewz6fj+tD6fu7d++OqqoqJCUlwW63Q6fTITU1FQcOHODX0GeKCBS6X1NSUvDtt9+if//+XF6k1WoZDaFWqzF8+HCGtCqKwtBrSuSYzeYLzjiR5drn8+GSSy6BVqtlmPLEiROxbt06AOA67cLCQnz66aes/zqdDj6fD3V1ddBoNDCbzbjpppvY1gWAt99+m+G4YsbZ4/EwAuCRRx7h7DVJe3s77HY7SktL4fV6MWfOHLbnSQg9sXjxYv7c7Oxs3otFNCaV+1DSS6VScUZVURS27ej/n3sPdDmovxKx2WxwOp1MekTKRjUSokFAG1V0dDQ3ZCc4B20wVBcqbjJ0qIX26gPOG6oUCaNx0CZAcIlt27Zhy5YtmDBhAsLDw/H000+joKAAGo2Gexg2NDSgqKgIJ0+exIQJE36pKe2SHxFad8qUh9ahAsCll16K5cuXo0ePHhzMMJlM2Llz5y827i4JltTUVMTExKCzsxOdnZ3M9AeczxCQzJw5kw0kMtzpnheNHjGiTf9TXdJ3333HGRHqz0ywMjJcyKAHgOTkZDZeyEisrKzkJuh9+/ZlFujKykpkZmby3ic6fNSsnQwMRVHQq1cvyHKAmItqmYjRlt6bmJjIDcwpi5CQkMBBQILFSpKEbt26cQSfnKPKykqYTCZ4PJ4gQ4wM8tBaMdonCWY5ePBgrFmzBjqdjus5qf5x+PDhcDqdQeOl+0+WZY7AKz9ALMkA8Xq9sFqtsFgs3IuwtbUVJpMJ2dnZ8Pv9sFgsKCwsZGZX0XkTDTciDSIHj1haU1JSkJSUhN69ezMKZ+TIkezAitlR0QgT9w86Y/6ZcwoE+lt6PB52gltbW/mcE/WRnGjSOwBBZEl0fcTDIKKDxExE6JqJv2m+lB9gjdHR0TCbzYiKioLL5YLL5WLiLkVRkJ+fj8jISOzYsYPhvj6fD5WVlTzmLVu2QKfTIT8/HzNnzsTGjRvx6quv4t5774UkSVi3bh0HkwYOHIgtW7ZgyJAh2LNnD/x+P/capzZgFosFZ86c4WuZOnUqtm7dCr/fj/3790OSJEycOJHbyfj9fgwfPhwAGAFA+jB8+HD07t0bOp0ODQ0N3J7kyy+/5CADQawVJdBvkTJhiqIwGQvZJBQEU6lU2Lt3L7e7o3vrzJkzeOutt9iQ/uabb2C32/Hss8/C5/OhT58++Oqrr1BQUICSkhIcPHgQdrsdf/rTn3gdT5w4gSVLlsDpdOK5557jvaWpqQkNDQ1YtmwZOxPXX3897xtnz55FVlYW8vPz/9ftEtHxIiEnle4jADYosnEAACAASURBVEH6bDAY0NDQgNLSUg4Gi8kGv9/Pjs3FkBChqAWqjQfOI1soUEYBLwoo2O12WK1WlJSUwOPxwGw2o7KyktEthFAgW4H63JNTS/e+yWSCz+dDcnIyw21p3cip9Pv9uOmmmyBJEjZv3szjo33Y7XZzX1GCcqekpOCrr76CoigcjBCvjf6mQGloHSuRBcmyjIKCAiQmJrJtTc+1tbVBo9GgV69ezJTc1NTEDj4hXKZOncrs3mFhYcwGTvXSQAApIa4RZchpvycSKhIKbFC5VGxsLM+zuK4i6RMAPP7448wNQnsxfS/tEWK3j9jYWP7e1tbWIEj0z5EuB/VXJAUFBQxRIuIQWZYZSkvpfDFyKtamulwu3HvvvYiPj4dareboS2RkJNLT0/kQIWOSesGRMUvfJxqJdPNTZPrMmTOYOXMmwx727NmD+Ph4PPLIIzCZTJg4cSK6deuGiooKLFjQVd78nya33HILgGBIrwgPIt3r378/WlpasH37dmg0GuzduxdHjhz5JYfeJSFiNBoRGRkJr9eLpKQkNDc3s0Egtishw/6yyy5DZ2dnkBEuRqLFehrxOwi+RwQW48ePh06nw/vvv4+77rqLHbbw8HCcO3eOSR7ocdIraqOxcuVKqFQqriktLS1l6n6VSoVPPvmE4ZsqlYrH4Ha7uc6wsLAQCxYswKuvvor333+fswMAUFVVBbvdjksvvRTff/89H77Z2dmQZRmvvfYatm7dit27d7Px1b17d/h8PpSUlOAPf/gDVKpAo/oTJ05wP0WCJxMxj+jYi0Efn8+HhQsXco+9kSNHIiIiAgC4xnPIkCFBwQISuk4xeED3qs1m497YZrMZPXr0QFpaGnJycrhVRP/+/aHValFUVIS2trYgI5XGKUlSEBzO7Xaz8+z1elFYWIjGxkZcccUVkCSJHfbBgwdzJiYUahga5BAzqP9MsrKyWJcJYtzQ0MDkQqS/dB4ReZJ4BhLDseiMiwabyOtwMeeBXke6Gpp1lSQJERER3AqO4MIGg4Fh70ePHoXb7YbD4cCJEyfw5ZdfYuvWrejduzdiYmIY6RAXF4cDBw7g+PHjKCoqQkFBAf72t79h06ZNiIyM5F61BOmeOXMmEyqOHz8eKpUKhw4dYn2XZRnXXnst+vbty5wQ8fHxSE5O5iBIv3794PP5sHbtWp4XevzMmTO45ZZbsGXLFrhcLkydOhUWiwXbt28HAM6iqtVqZGVlwe/3s/NAPUIBcLarvb0dGo0GjY2NaGtr43rFzMxMuFwutLW14bnnnoPBYMBtt90Go9GI4uJi/PnPf0a/fv3w7rvvQpIkNDc3Y8eOHTh06BByc3ORlpaG6OhomEwmNDQ0YOHChUhKSmIYutPphM/nQ0tLCxYtWgRZljFo0CAcOHAABw8exN/+9jfuV/y/JaSntC7kHHs8HkapkS7SGqhUKlitVrhcLgwePBhDhgxBz549OfNHmVDar2lvED9HDDqG3u+kwySU/SOns7W1FQMHDuRa0fz8fGRkZKC6uhp+v58h4KFtXES7gfaVhIQE7sPrdrvZYQsPD8eBAwfY4Y6IiIDH42FuBBontbfZu3cvZFnGsmXLoCgKsrKy0NLSwtdw+eWX8/URMy+NAQAH3cT9g8Z//Phxbn8GABs3bsT69etZr/r164fhw4fD6/WiuLgY586dYzu7rq4OcXFxTIpkMBj4DAQC+2BBQUFQzS2tl9frxYsvvghZlvH000/zXNKctLe3o6ysjPX4/vvvh0qlQnx8fFC9qizLrPMxMTFYsWIF6xnpBRAI/FksFm6ZRmdpdnY27+E33HDDBUHpnyJdDuqvTKjRMjmoAHgjIeIL8ZAlQgyv14uioiI0NDQgKSkJ1dXV0Ol03OZBp9MFQX3pe4iUiSjQaRMVI1CiIdvU1ISvv/6aCRgICvLII4/g8ccfhyQFmBWXL1/+Y5fYJb+wiDAv0jXxb3pNRkYGduzYgQMHDvzCI+6SiwkxBg4aNAgej4d72RH5CpHJ0OFzxx13cC0OiWj4i7BBMSJNEEYA+N3vfhfUS1JEWqxdu5br0aj2lMRms8HhcLADS1kbv9+P4uJiZtElw5f2IMogkmFHLV9o/6OaLwqupaamIjk5GTNmzIDVasW6devQ0NAAWZYxZswYbNq0iXWfDIPY2Fh88cUXCAsL47YmKpUqqM+fLMtIT09H3759YTAYGG4V2tqA5lar1eLtt9/Gzp078eijj3L7gYqKCkyYMCEoEyuuhcfjgc1m472YnEan04mGhga0tbUhOjoakiRxppFYlqkeOSwsjNdTrCGktRGdS4/Hw/NOUfza2lpYLBb+XJfLxbDM9vZ2zh78M/lXsqdA4MyLj49HdHQ0IiIiOMBqt9vR0dHBGXARykaBAAqUisgjj8cDt9vNNdckoTDnUGNeNGjF9QzN5oqZG4Jom0wmHj+hlqjW1e8P9IaWZRl///vf2VkbMmQIcnJy4Pf7ERUVhYMHD2L79u1IS0tDeXk5pk2bhq1btyIxMRG1tbUMT7zlllvg8/kYkggEHICCggKMGTMGX3/9Nev79u3bOZtNLWTef//9oPv86quvxqpVqzBgwACsWLECGo0Gt9xyCwYMGIDvv/8efr8f8+bNY+OXsn+UOSLdEoNR1Iu2ubmZAztUt1dRUcHESTExMbjvvvvgdDqxd+9eLF26FNHR0bjssssgSYGWH6+//jo+++wzvPHGG8jLy4NOp0NzczO8Xi/WrVsHrVaL1157jZ0oSZKwY8cOTJo0CXl5ef+SDv67RKVScZ082WxkW5GTr9PpoNfrGfEmSRKuuuoq3HHHHcjKyoLZbIbJZEJ4eDhsNhsKCgrQ2Nh4gbNJ30VzThKaPRTHBpwv49Hr9TCbzUhLS+MECJGM1dXVwWKx4PLLL4fP54NWq0V8fDw2bdrEe8XEiROh1WqxYcMG/g7a2wmqvGLFCqhUKkybNo15BOiHMpo2my0oODRlyhTU19fDaDQyORNxKBAhYHx8PJ8DwPnMKZ0PxJkBnGeCJ2ff4/HA4XAwk7VarcbgwYORk5ODzz77DH6/H/Hx8Rg8eDDsdjuWLFnCZQX0XVdeeSWjEYxGI6688kpem61btwIAnn32WQ6UkdNJAT+Xy8WBP3FNZVnG0qVL4fef7/dMgR/SlerqakiShI6ODt4H586dGxSgo/2NkBwajQYRERHQaDS48847g87QF198kTOyP1W6HNRfmZSUlHANAQDOjFIEiCLeBPfQ6XTQarV8GL744ot48sknkZqaGqSwMTEx/H5SdJvNxsQn9F10CNEYaOMQsyFFRUXYvHkzoqOjMWzYMHz22WeIiorC448/ziQFXfKfK9TDKzRrKtahqlQqbNiwAbt37/6FR9slPyYxMTGYO3cukpKSkJCQwLA5AJzRJJGkQMuoQ4cOobKyMsgJFSFiIlssADZG2tramMLfYDDgzTffxOzZs/m9jY2NsFqtAIAJEyaw4U4HbHFxMfr27YvKykpIkoTBgwPttc+ePcvRXZVKxS0zRGeBMo1iTSwQMDyonpQaw9fV1cFkMmHLli3YunUrYmJiEB0djYceeghqtRplZWV8MJMD6vf7YTabsWHDBqSkpKC5uRl5eXlcJwsEnK3HHnuMmSVFx0/M1JHQfrl69WpIkoTHHnsMHo8HN9988wUZFtprxXo+MjypfYLT6eRgo0ajgcFgQHJyMsLCwtjYSEhI4H6gtL4Xc8IoCEXGnjh+ajmyfft2vP/++6wLBE02GAzIzc29AN4cKn/84x//JR0GAkal3W5HZ2cnWlpamKSEnKG2tjbU1dUxkoiCAzQ2ynC0tLRApVKhuroaR44cwaFDh9DU1AS73R5EEEYi6r04XyKSCMAFj4vGZOi6i0JBApPJhNjYWM64UgaYINUUhMjOzkZycjLa2trQ2dmJTz/9FJ2dndi8eTPGjx+P1atXs+7dc889kCQJGzduZB28//77sWXLFmRmZuLjjz+GwWDA9OnTsWrVKr6WMWPGQK1W44MPPuAsuizLePjhh3H48GFkZ2fj9ddfZ3Kb48ePcyBpxIgRfF4QBF3cE0iPyOhvaGjgYE5nZydGjBgBv9+PIUOG4MSJE9i6dStee+01xMXF4fPPP2dn59VXX8UTTzyBOXPmoFu3boiPj0dJSQkOHz6MyZMnIz09HRqNBjabDQcPHsSiRYsgSRJOnTqFEydOoLCw8F/WvX+3iPWn9JscDAqiiAEQCpIR5JOg+9nZ2SgtLYXD4YBer0d9fT0KCwtRWFjITgnpKO3Voh6G6nro87QP0HoSjJ0QHA6HAwC4TKCjowPdu3fnPRMAO4nEXkuPZ2RkwOVycfss0lmbzcZ7j8/nQ48ePaDT6VBTU8N2qVarRUxMDNdVa7VaFBYWQqVSIS8vj5nQAQTBZEX7lO5LgqbTXk01sbIcYCG2Wq1MIrpt2zY0NTWhtbUVXq8XkydPRv/+/TFs2DDU19ejsbERiYmJ8Hg8kOVA7eeiRYvgcDgQERGB7t2783UZDAbk5+fD7XZj3LhxAMBnsSzLWLRoERRFwVNPPQWTyRSkN1arlfcrr9eLBx98EB6Ph6HT4mvfe+89Dt5OmDCBH6egCI2nvr4eanWgN7fX60VmZibuvPNODiodPnwYs2bNwtKlS/+5godIl4P6KxRyCqiwu7W1lY0XOgxaWlq4XkqtVsPpdPKN+cQTT+D3v/893yx2ux1utxtGoxHx8fGcDZUkiTMvlLGlhskizE/MsJDRUF5ezq1uLr30UqbQp35lXfKfLaKDSpFGWuMbbrihi3X5v0DIuCDnIZREg9aTDMn58+dDo9Ggra0NwIUOVWjdqmiom81m+Hw+HDt2DEuXLmUnlAyuQ4cOca1Qeno6fwZFrM+dO8d1OPfccw9kOUDecfDgQT7E29vb2fCljAMZGUT4QtF2EV5GgTSdToc777wTSUlJsNlsCAsLY9ZpYqQ8d+4cqqqqkJKSgkcffRQvvPACO6k6nQ7ff/89Fi5ciD59+vAcEaSxoqICGo2G913qyXkxEbMc8+fPh6IEyJPI2SOnhK6F4HBerxednZ2cHWxubuY2Menp6UhOTmZHh67fYDBwxi4sLIwJ9wDwtYl1rqJuiM6pLAfa2pAhlp2djUOHDgEIBEOoDU1FRQXXSF1MfopzSmMEAqRfkiQx8yhlQ71eL9rb27mfpsPhgMvlQkNDA8PcJEniQCtBhilrQjBaavmzb98+NDc3Q6VScdlMqJMp6qC4lhdb54s5A/R6cmxFNJSYvTQYDIiKikJiYiIiIiIYrUA/AFBeXo4PPvgAYWFhWLt2LT755BOsWbMGQIDdmjLdKpUK11xzDbKzs6HVarF06VKoVCrMnj0bf/vb36Aogdptqt/+6KOPeEw+nw833HAD90x89dVXYTAYcP/99+P111+HVqvF0KFD0b9/fwAIKiNQq9VsRIuoAJ/Ph6amJnZe//rXv6KsrIxrX1UqFfLz87mONC8vD2FhYTh79iz27duH8ePH47HHHoNKFaiR/Pzzz7F+/XrMnTsXY8aMQUlJCU6cOIEdO3bwHvJLixiwor2Xzlh6juaHHEStVouvvvoKW7ZswZgxYzB48GAYjUbcd999eOCBB3Dfffdh2rRpXA995MgRdsJ/jE8kFAUX+rw4XvHv8PBwmM1mmM1mbt3S3t6OTZs24dSpU/B4PCgoKABwvobdbrdfELjp27cvZ2krKiqY0FMk37ziiiugKIF+uWJJgCRJmDlzJk6ePAkAOHbsGJN91tXVcTbxN7/5TRB/BvUrFudXHJeiKLBYLACAU6dOMXEWcbwQS+6iRYvg9/sxbtw4JCQkYPbs2Xjrrbeg0WgwduxY7N69GypVgGRs9erV8Pv9SEhIQFJSEoCA3X748GHuT07ZXQCcST158iQURcHkyZOD9gZFCbSJXLJkCdRqNcxmMy655BJuk0XXQUikl156CVqtFk6nEy+88ELQmU8Z5/r6ehw/fhwqlQpJSUnQ6XR8r4tIzYcffvif6neodDmov2Kh2iDCs5NBRD2OgEAPK0rPq9VqXHHFFaioqIDJZIJer0d1dTWzm4WHh0Ov1yMmJoaZgonwIS4uDgAugPqKkSmCK8iyjLa2NixcuBCRkZEYOHAgbr755p/dS6lL/u9l6tSpbKCKB2eXY/rfI7RmYWFhQeQHoqOq0Wjw97//HS+88EIQguKf1Z2EZlepB+K+ffvQ2trKjMGKoqC8vJwN/kmTJgU5umq1GitXrsSsWbNQUFCA3NxcRoOsXbsWN998M3w+H3Q6HTOjktNL5A6ig0D/i1l/yo6NHz8eu3fvhtlsRq9evTBu3DiUl5cjMzMTbrcbb7zxBjQaDdLT0/Hb3/4WKSkpDF2k7AVds2h8U4uPs2fPco9DIqWjPqWiYRQqLS0tePHFF9GrVy9cffXVDAGUJAmdnZ1wOp1cXxUaPKioqOA2KOTA0NqS80P3LzmtRKokGsdEBiIaJCIcjIwjRVFw8OBBLFiwALW1tdDr9VizZg2sVisuu+wydlSJvyD0mn+qc0rjIKQQEYA4nU5kZGSwARwXF8fnDxnftP4ajQYOhwMGg4Hh4cT0XF5ezmdkZ2cnX/upU6dw4MAB7Nu3D7t37+YAr5hhDg2AXCxTGgoHpveFZrlCYZji68T3i3BZvV4Pk8mElJQUzsDRWU41i263Gx9++CFWr16NVatW4cCBA9i2bRuuuuoqOBwObi9DhFqSJGHs2LEwmUxwOp1MpKRSqZD5A0nZ9ddfD41Gg5deegmSJGHu3LlYsmQJJEliB5bWjeqlw8LC4HA4kJCQwIEo0kNicM3JyWF26oceegjffPMN3G43vvvuOzz//PO4++678cc//hFqtRpr167F+vXrkZKSgj//+c98Ty5btgyTJk3CW2+99ZP17P9CyIkQ9+LQv8kpDX2M2gXR/zTPERERyM7Oxpw5czBv3jzMmzcP0dHRKC8vx7FjxxjuLN6LYhAEuHh/YfF5Mfso7t+kf8RBEhERgdLSUnz++edMIER9ckXJycnBuXPnuKac1p8YmylwSS136L4GAoHRyMhIFBUVYciQIXC5XLx/EWSebFO3282OKAUzyXGmv8X5pMCoWFpCqKPq6mqcPXuWA0dUo93W1oahQ4fyGKurq3Hs2DGoVCo0NzezI05EZDSPRUVFkGUZzz33HNvPtBZffPEFZFlGTk4OZ1dpLQhqfeLECfh8Ptx0001wuVycjaZroTZkO3fuhEqlwmWXXYaePXsCAO9hFOSjc0qlCvRHl2UZ77zzDjPUh5Ix/avS5aD+SoVw5k6nk51IOgzcbjcsFksQ1EmtVmPBggW44YYb8Nvf/haLFy/GggULkJSUxNFYq9WK2NhYNlQJvtfR0cGGrkgyIUb4Q40ZlSpQx1pZWYlHH330F5ypLvmfSFfW9L9XJk6cGOSIAsGGiCRJTJSTnp7OBDoAOIIrimjUiAYMZeAiIiKg1WpxySWXsFOk0+mwe/dueL1ezJ49O4itEAA2bNgASQqwE9KBDwT2q87OToSHh3M9zdmzZ/kaqJ8pBebCw8PR0dERRBhCdUbEbpqUlIS0tDTs2bMHmT/0WkxMTMTtt9+OsrIy1NTUID4+ng/tkydPIjIyEo2NjbDZbFxbR/0xAXB9T3h4OPLz89Hc3Iz4+HiuJyLGWNFRIaE5pBpEvV6Pbt26MfkN1c9SlpNILmheS0tLkZGRwddM7WbIYRLr/siRMplM0Gg06NOnTxAEVYR8iUYprbHohBGszmQyISEhATfffDPOnDmDqqoqlJaWQqvVsjMpys9xTmm+ifCHnFJyWEU4NBmgGo2G2UPJIaB5I+M1MzOTCZ2o1RKVxVAQ1mAw4JprruE2aEeOHOG+lPv378fu3btRVFTEOn0xGDCdy/SZ9Jw4xySh8E8x2yY+JuoNPSZmmAwGA0wmEyOiaF1dLhfDA3ft2gWdTodTp05h+fLlUJRA7emyZcuwe/du3Hzzzejs7OSgC13bsGHD8Pnnn2PGjBnQaDR48803oSiB/o9//etf8dFHH+Hee+8NciqIG4MCNrGxsZwxS0xMRFRUFJqamqBWq9G3b1+UlJTghRdegCzLWLVqFeLi4lBXV4c1a9Zg0KBBMBqNkCQJ+fn5WL58OaZOnYri4mKcOHHiZ+nX/6WEBmzEYKHoKIkOKP0mUrCLfQ4Rj/l8PpjNZsydOxdPPfUUfD4fTp06FYQKoHUJPRMoACP+T0L6JjrM4jiIb4CCa52dnfj666/5fKDkBMF0zWYztm3bhrS0NA50paWlweFwBO01V111FVQqFT777LOg+aJr6N69O9RqNaqqqqAoCrKzs7lOWlEU3HjjjXwdycnJ3BOZ7GSxzpOCsvTZRCBEjzscDvTu3RtqdaAnqcFgQEpKCqxWK7KysvDUU0/B6/Xid7/7HU6dOsVIlu3btwchEoEA4mfXrl0oKSmB3+9nW50CbpIUaNvn9/vx9NNPB9UoAwGCo23btvGaP/XUU7wOdK9Sr1OqE3e73UxMKiYdyDGnXqxbt27l+/aKK66AXq+HRqPhoONPkS4H9Vcs1HMQOA8logwIGQgUOV+4cCFTb0dGRiInJwdvv/02IiMjce7cuaAaAaPRiJiYmKDPbWtrQ1RUFGdKqC6KaL5pYxOzqQ6HA8uWLfvF5qdL/meSl5fXlTX9L5eJEyfy32TEiAbI559/zo/HxMRgyJAhGDFiRFBJABDsrISS99DzQKC+paamhp0jsSduqFFDdWh33XUX9u7dy5AirVaLb775hrOJGo0Gn376KY+5urqao8WUqSSHkA5xioxTcC0zMxN2u53bA4gRYXJG7r77bmY6p5Y3OTk5MBqNiI6Oht/vZxgrzUWPHj3g8XiwdOnSIMilTqdjGn8yAC4WgRbn7Te/+Q20Wi2mTp3KDqpGo0FLSwvXDRG5T1lZGddHkdNIxo8I6STDUtyjCQYMnEfE0LlBcyzW8YpQVprzrVu3Ii8vDw6HA06nE7GxsXjmmWcwevRoyLIc1K5EpVL9bOeU9ITmSJICpEx0xtntdsTHx6OtrY2ziuK1V1dXAwDrvcFgQGNjI2RZ5no2sb4NCKCGiL1UrVbjq6++wr59+7jVGxEPUuaxpKQEu3btwt69e7F//34eHzmjolEv6pzoaIZmSkMdXfG5H/tb/HzxtyQFWJkJnkk9IyVJQlRUFF871TMXFhZi+fLlzF/xzjvvYOnSpXjrrbfw8ccfIy8vDytXrsTvfvc7eL1eNDU14cCBA3C73XjyySeh1+sxd+5cuN1u1u/S0lIeh8/nQ1xcHDo6OtDR0YFLLrkEqampDG9MSkpCU1MT/vCHPyA1NRUfffQRvF4v9uzZg8WLF+Pw4cM4ceIESktLsX79+p+tV7+EiBlTcY0oaCc6rCKShRIGtK/Rc+JrxMclSeJgU3h4OHQ6HSwWCwwGA0P8xTr+H4P7imeFuI+Fli7QmCl4Fx4eDpfLBY/HA5fLhaioKOzduzfo/PH7/Rg4cCDrLX2m1Wrla+nRowdkWUZra2tQwItgwuvWrYOiKPj73/8OSZKQk5PD9ZmSJCErK4shtJIkoa6ujudbzJaK1y4iZbZv3w6v1wutVguDwYDjx48jLy8PbW1tcLlcSElJQVZWFlwuF7p164aGhgaYzWaUlZWhsLAQV1xxBTo6OqDX65GdnY3rrruO99eOjg7s2rULiqLg5Zdf5uvz+Xyw2+1Ys2YNJ4UyMzOD0ECUmCoqKgral8nGp8fOnDnDe5harUZMTAxuvfVWREREICIi4gJ9qayshNfrRX5+PoBA+zlC5Yht6v5lff/J7+iS/6+EbvjExMQgBkuK6BqNRjzxxBN885Ph2L9/f7S2tuLZZ59FamoqampqoNPpUF9fH9TKJjSKR4Yh3WRiRhUIrs2hGoEu+e+VLuf0v19EJxU4n01VqVSYMWMGZ6VEWNnAgQMv+JxQKJTorJKBTDXvRHe/c+dOKIqCPn36MISUDk8yYE+dOgW1Wo2rr74aKpUKp0+fRk1NDbKysgCADX4ypgjiRZlOtVrNpB1iZpeMEJPJhJycHBQUFKCuro5rdk6dOoX09HTu5efxeJCeno5u3brB4/HgyJEjsNlsuOSSS+ByuaDRaGAymYIyQ5TBLC8vD5qnxsZGzlYRB0CoiHBk+kzq5XfJJZdwEJA+g7J77e3t3EaFHA/ai2ltaY0IBSMS5Wk0Guh0OoZTixlSclxEJ5U+Q8ycUB9JqnEsKSnBDTfcgHPnzuG2224LajT/2GOP/TzFFfTV7XazA+Xz+RAREQFFCbSRIVZlcjRtNhvPb319fRDDvCzLDJmjaxVhl2LdtCzL+Oqrr+B0Ovm15LCTEdnR0QGr1QqdTscZ15SUFOj1etTU1ODYsWOoqKgI6k8pwq5prcTvJp0Q9Ul0ZsT/6f10vaEirpn4PGWO6TEKVBA5EzHFulwuWCwWNowtFgu2bduGsLAwLFmyBAaDAWvWrEFJSQlyc3PxzjvvQKvVQqvVYtasWTzejIyMoFYkKpUKERERkCSJ9TkhIQFarRZGoxEJCQmwWq14+eWXodFo8Je//AWnT5/m1jb/rSJmS0NrvOnxUAeWnFaVSoXt27fzXhHKCUDvJ/F4PNBqtRg5ciTy8vIwdOhQzq6LmUJaD3p/qB6JKIyLBdnoNeI1UDBIpVJxUK2qqgpLly7FoUOHoNPpEB4ejoaGBgDAunXroFKpMGvWLJw7d46/LyIigqHwoZnfmJgYDjrSuUNkedT72u/3c8Y9FKJPz4ciPeh8oT1j4sSJPNdWq5XbIr300kvw+XwoLi4GAGRmZiI/Px9erxfTp09HXV0dZ/XLy8uhKAoTyNHeZbVamT2d+BXEc3bx4sVQFIXZ8cXnHA4Htm3bxpnx++67jwMS4j5msVi4fypllVNTU5GYmIi7774bV111FbRaXVQE1QAAIABJREFULbKyspjI8NChQ7xH3XnnnUGlHz9FuhzUX7mIxCFkxNDN/Kc//Ql//OMfg4wW0XgLCwvD+vXr0aNHDyQkJHAaX5IkxMfHc7aBjJO2tjaOtIpsnkQkIcLD6Kbtki7pkl9eQp1UuqfDwsJw+eWX48Ybb0T37t2DnE9y+oDzJDmioSzCKymCS3V+q1evRnFxMWRZhtPpxLhx44JgiDqdDh9//DHuvvtufPfdd8jOzuZSg127duGKK65g44H6otK40tLS2MgnllAKzImGFBkjVMbQo0cP7N+/H0OGDEFRURFKSkpwzz33MOu5ShXoA+d2u1FcXIy8vDzY7XZUVVVxqxOCI9JYLr/8cjQ0NHAEmjKHXq8Xa9euxTPPPBNU/3kxER2STz/9FEePHsWoUaOYMVhRFK4nbGxsRG1tLWcqzGYzrwEFH0UnXTToxJY7BJ0mxwEAO16iiHwDNFZyaD/77DMYjUa0t7fj1ltvhdFoxPz587FmzRp8/fXXP2rM/lQhYii9Xs/ZEZPJxEgeWZYRGRkZZHTTuURkLgA4IEJ9SePj4zlLSIghcqDIISNWTjKSyUEm45+c45EjR2Lq1KkICwuDoihcdkP1a3V1dTh16hRKSkpw/PhxFBcXo7S0NKhulnRczGjRb/E5kfgmNCsb+lpRt0RSGLH+NfS+AYDOzk54vV5ERkYiKioqiGXW6/Vy9pp6KVosFhw9ehQRERFYvHgxXn31VaxYsQKjR49mvaE6RJ1OB7vdDo1Gg8jISM7mt7e3IywsDHFxcdxOaePGjRg1ahRuu+22f4su/dIi6igJOar0HAVaQrOp5DzQ54hOJbWZER1fvV6PvLw8xMbGBmU+DQYDamtrcfDgQRw6dCjo/gaCs6Wi/ol8I+I4xN9AcJBQp9MhJSUFAJgjpbi4mPf0s2fPIj09HXV1dew8nz59msnu3G43oqOjodFouMSD5qNbt26QZRm33347tFotNm/eDLVajdmzZ+PLL79kfe/Zsyc7W5mZmZxdpVKK6OjoICQJAA4Ktra2QqPRYOLEiVCr1cjIyMDTTz+NiooK+P1+hIWF4a677kJiYiJsNhsqKiogSRJGjBiBPXv2YO/evVi3bh2TlkVERGDQoEE8HxqNBitXroTb7ca8efOC9IP4B6hkguaZ1pFq5rds2QK/34+4uDjmkqF1AMBnyMqVK/kzbr75ZuTm5qKpqQnJycmYOXMmkpKSkJuby04zsYLn5ORwr+OfKl0OapcAAPei0ul0cDqdeOmll4Ki/UDwpiZJASa0bdu24fe//z1MJhPXtTY0NHDGgorBabO02WxITEwEgCDDhuBIfr+fozVd0iVd8p8jEydOvADyKzqqI0aMwN13343o6GioVCqOIpOIRkjovgIEDBu73c51O99++y2cTifmzZsXBGNUqVTYsWMHhgwZgg8//BBerxc9evSAWq1GTU0NYmJi0KtXLwDgrC5JeXk5M7DS/kM1nmSEEQGL1WqFwWDAlVdeiY0bNyIsLIxr5+rq6jB16lR4vV6ObttsNnaUieFw3LhxUBQF6enpmD9/PiRJQnp6OnQ6HW677Tbs2rULf/nLX9hwIJKO9957D4qiYNmyZew4OByOC4w7sQ6UjKh33nkHOp0OU6dOhdPp5PpXh8PBCJekpCQmrqPPpMyAmJWhvZkcWGJSdTqdzOqbkpISZGjSeEPZfWmM9FNSUoL8/HxYLBbU1tYiNzcXy5cvhyzLuOyyy6BSqf4t/ANiPS8xHDscDnR2diIsLIzrTQkGSdkFSQpwNJw+fRoAkJCQgIqKCs6403lIxh/BXcnRp/IV0i1iryfDW5zn77//HkeOHEFubi6GDRuGCRMm4IEHHsBjjz2Ghx56CA8++CAeeeQRLFiwAE888QQcDgfa29tx5MgRDup4PB524kSnRQwa0HqI2S9aJ7E2T3R6xYycmLkVnRYxABEKSabvoHHo9XqEhYUhPDwcRqMRcXFx0Ov1HLy2Wq1oa2uD3+/Hhg0b0N7eDrVajcjISM6WmkwmdkKIOZxqgvPz81FaWsr9Uf9/ktCaU9GZI+fkYq8TRa1Wc1by5MmTOHnyJGJiYoLWi/622+1Qq9VITk7GiBEjMG7cOEyaNAlDhgyB0Whkh/DQoUNMwAQgqA5U1D9xrGJwQ9QfEvE1kiTBZDJxzafFYoHf70dzczM7e42NjZxRp7FIksQtEY8ePRpEUAYAt912G44cOQKv18stvgjNcPLkSUiShNGjR/N9K8sBUiYxUAOcJ06iuaNgiaIoWL16NQ4fPsw1mFFRUUxA9O6773LGdcSIEbjjjjtw7NgxyLKMq666Cunp6Xx/rlmzBoqiYPz48VxWYrfbmcE+MTGRv1OsVX/77bcBAIsWLQrSGUkKcDO0trYyid5zzz0HWZZhNBqRlZWFKVOmYNiwYQCA2tpaLm8YPHgw0tLS0LdvXyQnJ/O+19LSwkRy9fX1cDqdUKlU6Nu3L/r27fvT9f0nv6NL/r+UxsZGhns9+eSTQdEu8bATIzQajQaTJ0/GE088gTfffBOJiYmQJInrs1JSUjhCHFrXkJSUxDAFKjhXFAWnTp36paeiS7qkS/6BiE4q3dv0t6IomDFjBrRaLaxWK9fjiK8HLuyZB5w3qoh9kWCoBL8SnYjS0lLk5ubC4/EgMTERSUlJ0Ov12Lx5M26++Wb+Lmp1Icsy2tvb0bdvXzYsaC+ibBU5pyL8beTIkYz8IKOAHI+srKygDMWOHTvQr18/NDQ0MCy0srISU6ZMwe9//3u88soraGtrQ0VFBRRFwZtvvonPPvsMycnJ0Gq1DEO95ZZboFKpmOSpT58+PA+hUGTRySGjBACeeeYZGAwGLs9wuVw4cuQIhg8fjv79+3PLFHq9Wq1mY4sMIpVKxdwC9Hpia6V+m1FRUdzjkDKDtLbk7NF8i61NgPM99FwuF6qqqtDW1oaEhATk5uYiMTHx30aOR2QfUVFRcDgcDD11u91c6yYiiMhB1Wq1SEhICIKhR0REQJZlno+EhATExcXBarUyHJBQQ3TdVJNKAYiL1fzJsoyGhga0tbVdUGdI80b3hyRJeOCBBzBs2DBMmjQJ4eHhiIuLQ1lZGRobG1FSUsKkP8ePH0dpaSkaGxuD5kQMHIiQRfFxMTNGehIKZxQdWLHumITmNZRJNXQfIP2VJAmxsbGIiopCZGQkoqOjERMTA5fLxdnr48ePAwAz+yqKgqKiIuTn53Pt269NxP1UDDKE2m6051FwqX///li3bh0qKyu5Dp0yrbIsY9SoURg2bBh69uzJSAKVKkB29fjjj+OZZ57BddddB7PZDIvFgrNnz6K0tDQokQEgKMAoBr7oOTHAFaobofcCQcjDwsKCkDEbNmzAJ598ApPJxDXLkiRh7NixUKlUaGho4L6epPexsbEoLCzEpEmTGNXg8/lw//334+DBg0FIGhobBaLEeyciIiLoGjweDwe+FEXBoEGDLiA6Ky4uRm1tLRRFQVhYGHQ6HaqqqrBlyxZoNBrMnz8farUaubm5sNvtOHv2LO8hmT8wYtMes2fPHvj9fsydOzcIPdHR0QGLxYKmpia43W5MnjyZdYOyqLW1tfjoo4/4jBs9ejSampowfPhwyLKMpKQk/nvlypWMEunTpw8jIU6fPo3Ozk7mWygrK4NarcZ7773H47nnnnswbdq0n6TXXQ5ql7BUVlZi3rx5QQdkaDQ0NNqVkZEBu92ORx99FHfeeSczmzkcDq4ZM5vNQRFZgj0Q1Jdu3MrKyl/y8rukS7rkX5TQbKrI6Oj3+3H//ffDarWiurqanalQ0pZQuBe9TqvVcobqkUceCerzRgRIPXv2xLJlyyDLMm677TYoioJPPvkEo0aNYoOlvLyciX8AMEkDwYYjIiIuqCEiJ8Pj8UCj0SAlJQUVFRWYP38+qqqqoFarsXHjRtx+++3cU1GtVqOurg5/+MMfAIAj+ZIkoaSkBP369WNjR6/Xw2g0sqFIRoJarUZ2djY7G4qi4I033kBZWRmsVisSEhKCYFpkHIn1XqIhV1lZibfffhsxMTFYtGgR6urqMGrUKBiNRnYYAAQFF8Q9nhwiCkhSRpjGTWgbg8GA2NhYZP7Aaix+DtUM0nlBjr1oLDc3N2PTpk2oqamBxWJBW1sbSktLeS7/HUKOpc1m4zrJ1tZWrv2MjY3l3oekL8nJyXC73QxvIwM0MjISdrsd4eHhsNlsQTBYKo+hmtRQFmQaC82pmB0iSN6OHTs4G0G/xfUlvcrMzET37t2hKAoGDBiAsWPH4qmnnsKf/vQnPPPMM3jqqafwzDPPYOHChZg2bRomTpyInJwcHD16FPv370dBQQGOHDkSlD0V54scSxJaR4Irh76WdOli1yu+luYktCY51FEl3aZADQUV1Go1Mn+o1du7dy8OHTrEfd1/DRLq+InzLd57InMy/RBBksVigaIoaGlpgdVqxe233w6Hw4HCwkIUFxdzdp/0jdZBXCvSQ7/fj6FDh+KJJ57AI488gtGjR2P69OkoLy/H6dOngzLqoeOntRZFDLL9o+sHzutkWFgYoqKi2CFzuVxoa2vDsmXL0NTUBL/fj/j4eKhUKqxduzbIpiXJyMiASqXi0jJZltlRV6vVmDt3Ls+nVqtlJ1ytDrDFi2VrNG90xqhUKnz99dfIzMxkltywsDAeBzEME0pgzpw5OHPmDGRZxvTp07l3cUxMDPbt2wdFUTBz5kwOVrpcLuzfv5+TQyrVeWZ8ureJEXnkyJEYOnQo1Go1mpqa0NbWxucVsQWPGzeOkUEiRwOhSsrKyiDLMhISEiBJATbs4uJiGAwG1NfXo6SkhFFEiqJgy5YtbBdcccUVP0nfuxzULgkS8eal6A89Lho1dNAAASKcs2fPYsiQIdwzTqPRMDsjkVKQEaBWB3qtJSUl8QbY1NT0i11zl3RJl/w8uRjLLxn6gwcPRkxMTBAMLVQIKiUeqGJmcMeOHUGvb25uRnl5OXJyciDLMmbNmsXZ1tbWVob2KoqCmpqaoCg8kbdRDRFBikWHmdrNyLKMHj16IDw8HMnJyXjllVcwYsQIOBwOGI1GpKamcpaYGqebzeagTFFlZSViYmLg9/tRW1vL7JdRUVEMqdLr9fB4PGhsbERhYSEcDgdiY2OhVquRlpaGXr16wWQyMSRSp9NxnSrNOYk4x9RTlq5ty5YtnL0j5yBURMZPAEy8QbWXoY4TkSXR/9HR0QwBJqOWWJJpnCKUlMRiseDrr7/Grl278NZbb+GNN97454r3E+TMmTMwmUywWq2w2+04c+YMrFYr9Ho9mpqa+LwjeGxdXR0sFguTQ8myzFlvINAbHDjvJJjNZqSkpEBRFFitVjQ3NyM8PBzh4eFBhrmYBRWdTXpOUQKkKp999llQ5pLuD9FBIMbNUaNGcY0d6R7NMdULd+vWDT179kRrayv8fj/MZjOGDBmCIUOG8NrU1taiqKgIx44dQ11dHZfbiAFk8b4MlVCYb+g9L9a0kmMjOrei4yKiAhRF4cASOdcHDx78d6nGf50Q9weAC2y0UOdU3BuA8/sD1ZtGR0dzPXpMTAx3XxAzpeLv0PUiHaagj9FoxKBBgxAbG4vp06fDZrPhyJEjOHz4MNdki+gUcQ8i/RH3n9DvpP9D54MCfYTsCA8P51rvL774AkuWLOHXEakSfQ6VEyxbtgxqtRrFxcXcemXMmDFoaGhg3aV59/v9SE9P5zlWqVR8BtG80BoQuR1lGaOjowGASfrKyspQXl7O6MWsrCzo9XqsWrUKWq0WvXv3hslkwsCBAxEdHY1vv/0WGo0GRqMRiYmJQe29iouL4fV6cd9996GlpQU1NTU4ceIE8vPz0dDQwEmjcePGMZyZHisvL8eBAweg0+mYib65uRmnT59mFFRcXBx8Ph82bNjAc05teoiJODw8HN27d4fBYODgQElJCRoaGuD3+5GZmflT1L3LQe2SYJkzZ04QOxyAC/4WNyeVSoXU1FQMHz4cTz/9NB588EE2gmjDSU1NBQC+gQGwEUgkGV3SJV3y3ynkpIbCtG699VYYDIaLwnzJGaV9JBRmGB4ejs7OTs4gkrPwwQcfoEePHtizZw9cLhcSEhKgKAp2796N4cOH8+d4PB6Ul5ezcUF97ihjajAYgmo6xUAcRcmvvvpqbN68GQcPHkRdXR28Xi/q6+uRmZnJ7Q88Hg+efvpppKWlwe124/PPP2cjsbGxkXuc7t27l6+dDBtyKvx+PzQaDeLj45GXlwe/34/Ozk4cO3YMBw8exM6dO+H3+7lWljLMoY4LXYdofD3wwAPw+wMtu7KysoIIocTXktMjfg7V4hKPgE6nY4eGah4lSeL2I8nJyUhNTWXCICLSEfkNyGkV+QckSYLRaERHRwfP079biIiH5szv98NkMvH1ENkREZdoNBrOiGq1WjidTiQkJOCmm27iwCqdccQmS9lXql+WJCkI+iciCUS4Lhm5VH9JmWZyMM+dO4evvvoKer0ejY2NzAjdu3fvoHpB0l86ryMiItDZ2YmqqiocP34cZ86cwZQpU3DllVciJiYG8fHxCAsLQ0ZGBvr06YPLLrsMvXr1gsFggM1mQ2trKzo7O1FXV4fW1lZm1g51fOh7Q4MedG+LzpKos+K4RbiimJHdu3cvdu/e/auF7oYKZcdEJ16cZ9pbRTSE+LzNZkNMTAzrdUpKClpaWrglktVqZf30eDwcwKM1FOHaNAa9Xo/q6momIjKZTFCr1ZgyZQq/rri4GAcPHuQ9QxwbXQddH4AgHRGvne4hEc0hXq8sB5jZqc6ZHq+trb0guEL7dO/evWG325GTk4PGxkbe7wYMGIB9+/ZBq9XCZrNhwIABPA+SJDGihaDR1A+ZxkFZaioHiIiIwNChQ3lfSE1Nhcvlgs1mw+nTp9nZf/jhh6HT6djJHj58OFwuFyd+9uzZA0VR0LNnT/j9ftjtdhw9epShwT179mQiMtqzi4qK8OGHH0KtDrSKoTIamsf6+np4vV58++23AIDRo0fj5MmTOHr0KDZt2oR169ahtbUVHo8HKpUKL730EhRFQXx8PAYPHsxMxzabDVVVVbBarXC5XGhubgYArFy5koNnP0W6HNQuuUDuueceALjgYBGNGRJ6nqJNCQkJMJvNTGZAzaH1ej3fqPS5hH/vki7pkv9umThx4gX7gsfjwejRo9HZ2RnkNIlZlFCh11G2CTjfr5lKA/r06cN97ajHXFFRERPrhIWFYefOnQDOGzVZWVnsHNGeRQ6IiBghUgeCSnV0dKCzs5Nb1pSWluKaa66BVqtFS0sLlixZgpSUFIwdOxYAkJKSwuyN1dXVUKlUsNlszMKr1+sZKhVau6tWq3HppZdCq9XiySefZJbdyMhItLW18djpoBedkouJogRq/O+77z4YDAbMmTMHdrs9CGIpsuyGOrqiIUzr6Xa72UihejB6PZGmmM3mC/pgU/2u6JjQ97lcrqCs8P+GnDx5kg02ctCJaZPWnQws6lVK47XZbKwHgwYNgqIoXCtK/VPJcQvNDIvXK7KainMT6mTQGAwGA7f1qa+vR0VFBWpqapgIRRSx1hA478iEhYWhqakJ8fHxyM7ORlpaGgdH2tvbkZWVhYyMDOTl5eHWW2/Fgw8+iEcffRRz587FU089hfnz5+PZZ5/Fk08+iaamJs5inj59mr9DvB5xDKHP0bgulpETYaN79uzhny4JlotlE0n/gOAARej/ItyXdM7hcLC+SZKEuLg4tLa2oqysDJWVlSgrK2MUxY9JYWEh3G43CgoK+J4pLy9HfHw8l3wNHjwYgwcP5lp7co5C9ZhE1JF/BPml/Tt0PiirCQDR0dHM3q1Wq/Huu+9ix44djI4wm83weDwYO3YsQ3MlKcB1UF1dzcFRsUeoCIuns0wscwEQxL8CBFim33nnHR670WiEogRKAwimfvToUeh0OpjNZixcuBBarRapqamor6/ntmPl5eXQaDS48sorUVpaiqamJkYQ1dXVwWazoV+/fkFETjabDTabjXkCpkyZwizmiqJwUG3Pnj0wGAxwu90YOXIkevTowXuERqMJak9G9/rkyZNx//33o6WlBbGxscjIyEBCQgKX+tHetGLFih9dxx+TLge1S35UKEIcajReDMMPAOPGjcMrr7yChx56iKFeBoMBXq+XoQ0iLIWaoHdJl3TJf79cjOX3uuuuQ2xs7EVJVkKhfKFZVGoLcvjwYRiNRixZsgQPP/ww9u/fD0VR8Oijj0JRFOzbtw+XXnopv++9995jCJPf72fmVeA8AZDT6Qz6PpUqwKxKWTBCfRCZ0oQJE5ggSK1Wo7y8HC+++CJqa2vRs2dPJCQkwGazcX3pt99+y/08n3/+eX4ffYcIlxRhatRWxO/3syNFPTOBAFyVAn5UuxrqXIZC8yRJwubNm+Hz+RjhQushZl2A87XENK7QWjFJkjjbSOPX6XRct6bX65GRkYHU1FQeGxlCBI+mufD5fJzl6Ozs5Oj9/5ZQpoAcN1pryk673W5ERkYiJSWFa9rIgaXWGqWlpUhPT+f6Uxp/XFwc0tPTg4IddFaKbdrE35RBF3/I+aU2F4RAuv766+Hz+dCzZ09uv+TxeHDq1Ck4nU6GHYvfTwGEYcOGITk5Gddeey169OgBlUqF8ePHY/Lkycy4XVxcDJVKhZqaGqxdu5bhwOJ5/eabb0KtViM8PBwmkwl2u51r6vx+Pzo6OoLqxUX9o8x9aAaF9IsIjn5N9aQ/R9RqNffwJaG9hMhrgPPZa1Eo47hz507s2rULsixj9erVsFqt6NevHwwGA3Q6HaKjo+HxeJCUlISqqipYLBZ2tsS9hvSzb9++6NGjB5Ndrlq1CiaTCSUlJRg9ejSGDx+OqKgoxMXFwWw2c6mCoihc/0ifLeqP6JiKrLSi0xca5KCxiY6rqKcWiwWyLKO0tBRvv/023nnnHTidThiNRuzbtw8ulwsvvfT/2jvz+Kqqa49/970ZbuY5YUggQMJkmBGEMpV5rIqiUK1VeIB9oFattlWrteW12Nfqo0+LT59Vsa0+lSqKiiAIqMyCCRkIBMlIJjInN3c+749kb88NoZVWSaj7+/nkc0/Onc45d91999rrt9Z6TPVpTkhI4IUXXuC3v/0tZ8+e9ZMiy3FDCKF6AnemZpFjTmtrK2PGjFGpJnJcCQgI4PPPPycwMJBp06bRp08fVT35yJEjhISEkJ2dze7du/n000/JzMxUxQEfeOABpbg4deoUzz//PEFBQdx9991KZSMXICsrK9m1axdWq5Xly5crZYvNZiM5OVlFlGU7npUrVwJt49SCBQuIjIz0axP52GOPKYWMXEiTv9uBgYEqlUCm+dTX16vfty+LdlA1nbJixQo/x9Sct9TxB1gOMCNHjqS2tpbGxkYSEhIoLS1VqyiGYZCUlKRa13SsKqjRaP41MDupXq+XRx99VEl9zHQswNHR0ZJ5dEIINmzYwHe+8x3+67/+i+rqapYsWYLT6cTn83H48GEyMjKU/FU6D3KckkXaZLQVUJVbgfMiWxMmTGDChAn89a9/xe12M2HCBHw+H2+++SZ33XUX4eHhbNmyhXPnzjFkyBBuv/12AgMDVUEdmZeZlpaGxWJRhaLkpDEwMJAnnnjCz3kJDg4mJiYGn8/Hhg0b1IRHTi5kj+k///nPKrorpb7maKoZs4z3tdde409/+hO9evVi+fLlfhEGQBVQkZMdeV3M+aQdIxXmwjnyvGJjY4mOjiY8PJzBgwergk/ydZxOp3LqzPUKvu4IKqBaOwQHBysJo5zwBwYG0tzc7BctbWlpUc+RLS02bdrE/fffryrWy2I+MrIQFhbmV2DK4XCo3GR57czXXX625msqHXZZ0T4oKIhz585RVVWF3W4nNjaW8vJy/vjHP+Lz+di/f79q6dQxYuZwOJTkUn7Oc+fOxTAMQkNDOXToEJmZmSQkJHD06FF69OjB0qVL6dmzp3ot8yLGypUrGT9+PAMHDiQxMRGbzUafPn2IjIxUkara2lrV/qO8vJyzZ88C/otT2in95+hsYcrn8/nZkrQzOe5IOzIMQ8lZx40bx4gRI9izZw9btmyhuLiYEydO4PP5KCkpIS0tjczMTGXn0sk11yORLVZ69+5Nr169WLlyJVOnTmXSpEmEh4cTGhqqJP7BwcGcPXuWqqoqSkpKqK+v5+jRoxw7dswvKmlWeZjnmOYWVhfCHEk1z1uDg4NVT14pgXW73aq11alTp5g+fboaj3w+H6dPn6a6uppbb72VuXPnMnjwYKUEBJSDKh1BOYabnWYZcTQMg4qKCgoLC1XUdcyYMVRVVZGVlUVWVhZer5fdu3eze/ducnNzeeutt7BYLPzqV7/yk/y/8sortLa2MmnSJLX4I6uFy6r0S5YsUYogi6Wtpczhw4eBtrHwN7/5Df3796dPnz7q85ELDFK2LNUTo0ePVn3Og4KClE8g1VFOp1PltMfExGAYhio8BagFv8cee+yi7Dzg7z9E801l+fLlbNq06TzpjvnH1ozVauUnP/kJDzzwABs3buS+++5TP97nzp0jISGBgIAASktLL/m5aDSaS4d0Ut977z1cLpfq5wbny9TMUkfz/VI2Kh2bESNGqIqAAwYMwO12k5OTo9pSGIbBBx98oJxQj8dDUVERQ4YM8WunIqNN5vePjo6mubkZi8XCgAEDqKurU/0XU1NT/XJTm5qa2L17t5JixcbGqnYiFktblcO77roLr9dLfn4+aWlpQFtbjMWLFzNgwADWr1+vVqJl5HTQoEGsWLGC+Ph49X4yHzc1NZWlS5fi8XhISUkhLy9PTVItli/ad5mvo5Q0y/07d+5kxYoVaqJnnlzKnCT5PHPuqHQwO4vUmiMrUgYshFDy5IiICOLj4ykpKVHOnlxxd7vdneYof53Ex8cr59wc4ZXXRO43Y540FxYWEhWOOXB/AAAgAElEQVQVpZxrm82mqswGBwcTGxvL2bNn1TWXeb3mBRJzVNpcOMjMuXPnOHjwoHIAy8vLSUhIoLa2lvDwcJxOJ5MmTVKT3KNHjzJmzBglNbRY2lrlhIaGUl1dTUlJCaNHj1aOBrR9F2bOnMnHH3/M559/zpgxY8jJyWHMmDFq4cThcKjiMNK2zdHyG2+8UdkYfBHFMr+PxGq1Mm/evK/4E/3mYVY0mKWl8j4ZPDDLvs1jrs/no6ysjOHDhzN69Gh8Ph8ZGRk0NjZSU1NDZGQkAMnJyUrGLhUH0p7NY4GUkPbu3dtvMaNHjx5kZWUxdOhQtm3bxm233UZraysZGRmkpKRgtVo5ceIEO3bswOPxkJ2dTUBAAOnp6cp+OtqVPGeJfC9z5Wfz+GduESVfxzAMZdMyH15ez/379xMUFMSTTz7JNddco4pIyQjsvHnz+MMf/oDValVjhfm7LsfLjjJgc1/Y6upqrFYriYmJtLS0qLZkzz//PI8//jg7d+5k7ty5NDQ0kJeXR0tLi3L6AgICaGlp4cyZMwQFBdHa2soDDzzAunXrsFgs5OXl8cQTT/DjH/+YFStWMHnyZPr27atyjt1uNy+++CLf//73GTZsmJ+KxWwfO3bsYNq0aeocLRYLY8eOVTnwYWFhNDQ08D//8z/ce++9eL1err/+etavX4/dbqdnz55A25jR3NzM6dOnEUKQlJR0UbauI6iav8ktt9zil7tgdk7NOVRy2+PxcOWVV/LYY4+xdu1alRQfFhaGx+NRPcw0Gs2/PvPmzVNyXDkRMDs75mrgHaOocls6EL/+9a/VSq1sYbJjxw5uvfVWVVU2Ly9PrWZ7vV6GDBkCoBxIc96TuYKwfG9ZxfLUqVMAqlz+li1bmDJlCk6nkxtvvFE5HLI40tatWyktLeU///M/yc/PV8WFnnvuOfV+UjYnV57lJFJOMDdv3qzyoszyUqvVqiJa+/fvV/n9cmW8qanJL1Igr6U8P/P5yv6oP/vZz4iJicHpdNLU1KTkxB1lp/Lay89KvlZ4eDijRo3ipptuYvXq1dx2222q36CUvgYGBqqKx8OHD6d///7KeQ0LCyM6OvqSOqeAktuZixM5nU6VS2qOTAUGBhIWFkb//v1ZtmwZ69at49vf/jb33nsvK1euVFFn+ScXSeS1kzYpF0A69oCVjqn5M5KTQVnY6uDBgxw8eJDU1FQ+/fRThg4dyscff6zqPZSXl5OTk8PgwYMB1OKM0+nEYrFw/PhxFaU+ceKE2i/PMzExkVmzZpGcnKzySmV+N0BoaKhfBOuWW27B4/Fw0003cd111/k5C+bCX/BF/uPChQtZuHChdk6/AiwWi1/et/wcJZ0pUTouALpcLtW7NzQ0lIKCAhW1DwwMJD4+nuTkZIqKijh16hRjx45VuYShoaHAF+oJq9VKZWUlcXFx6vjMzvHcuXPp06cPq1evpqCggOrqaqWsc7vdpKens3btWu677z5+9rOfkZCQQEtLC0VFRZ2eu/nWfG6dFeAxy52lw9jxMTKvUo6nskiZYRhs2bKFoUOHMmvWLHJycvB4PAQFBdHS0uJn97LYnnSQpTMZHx+vpPjHjx8nIiICgObmZr8c7hEjRiCEoKSkhE2bNqmWLj5fW9uc3//+9wQGBrJjxw51zTMzM/n4448JDg7mqquuIikpiV69ehETE4PFYmHjxo14PB4efPBBDMNQ55eens65c+dUkafvfe97atFB2sygQYPIyspS45pcrExISGDMmDHq+y2luxs3blTR15UrV6oIs91up7S0lPz8fBobG5Xi6WLQDqrm73LLLbec92Nqzh8zrzBbLBauu+46iouLGTp0KOHh4ZSUlHD48OGvrUKjRqPpvixcuJCTJ0+ydu1avxV2s/RTbssxBvwre8oojc1mY8qUKVitVv77v/+b5cuXq+d//vnnfhOV+vp69Xput5uIiAiampr8JnQej4fQ0FAaGhqwWCzMmDGDv/zlL+Tm5uJwOFi9ejVZWVk0NzczZcoUVQ3S7XYzduxYVq1ahcViYd++fWzYsIGmpiYiIiLUe8ybN0+Ni7JdTHZ2tsrFkTLflpaW81b2pbTUYrEwbdo0NWkyDIM+ffr4OTUycikxj9PmSN7p06e55557cDgcrFmzxq/CrnRuZIVic+sS+RpBQUFKShgXF6fk08nJyYwaNYrf/va33HzzzSonSbbHGThwIAMGDGDChAlERETwzjvvdFmBPCk1NOcDy2ufnJzM4sWL+cUvfsG6det44IEHWL58OcOGDUMIwahRo6itrWXo0KH4fD6Ki4sJCgqivLwci8VCbGysihLI6yYj6+AvJzdLsM1yWikTr6mpUcVpoqKiWLlyJaWlpaSlpanvhdvtJikpSfX0NS/qyF6niYmJHD16VKkR5Ps4nU6OHz/Om2++SVJSEkK09WQ8fvw4TU1NSopo/gO49dZbAfwccXPUWeaid8xJ1/zzmBevzPOujuoTud98v3nhr1evXuzfv5/m5mbGjRuHw+FQ9l1dXc2ZM2cIDg4mLi7Ob3yCL1IvgoKCKCwsxO12q4qvZsxpAdnZ2Xi9XrKzs1Wk3uVyUVZWxsmTJ2loaCAmJoYHHniA++67j4ceekjVLZGLR+aqvR3P0fyb0fFPFpcz/0nM5ySEICIiApvNpqKNFouF3bt3c/ToUQ4dOqQWaeSYLqX/8r1lbQO50JOcnExrays9evRQ75WYmAi0RWsnT57MypUrcTqd1NXVcebMGQC2bt2qvnPnzp1j165d2O12IiMj6dGjB1FRUezdu5dPPvkEh8PBnDlzVGpJdXU1NTU1AKr3qDymwsJCGhsbefnll7FYLIwePZrVq1f7XYPAwEACAwNVoUF5H8CwYcNUIUDp9Mo+rOaFuLKyMs6cOUN2djYNDQ1UVVVRXFzMvn37vqypA1riq/mSmAs9mAc+8wqe3Pb5fFx11VU88sgj3HnnnVx77bVddtwajabrWbNmDQAbN270k0CBfxXKzlZYQ0NDqampIT4+njVr1mCxWKipqaGhoQGbzYbD4SAwMJA333xTvZbValXFmaRMSbY9Ma+oy6ihx+MhOTmZd955R/WCS21vJyPlvx6Ph927d/tJldxuN8XFxUqKO2rUKO644w58vrYedwcOHFDH9MMf/hCfz8f//d//+VV8tFqtqu+mdAClEyFlozKyGhwcTG1tLQBJSUmqRURCQgIej0dFNjo6q/LW4/H4raAPGzaMvLw8WltbVVTaXKGxsyisLO5RXl5OREQEgwYNUufvdDoZNmwY69atIzIykoKCAvr166fkxEFBQXz44YeMHDny6zCzL4WUu06ZMoXBgweriabErBIyOwGy4qcQgrfeeovRo0eza9cuFRUGVC6qnBCar11sbCw1NTXq8zXLcc1Oqvn3tbW1VfVujIqKIi0tjZiYGNVXsE+fPmRkZKjP3mq14nA4KCoqUsWojhw5QlJSEj6fj7179zJgwACam5ux2WwcOnSIFStW4PV62bNnD3379qW4uJjKykqmTZtGXV0dAQEBREZG+qkeOqId0UtHR4fUbC9mp9Cckyr3Wa1WnE4nL730EkuWLOGTTz5h0aJFjBw5ktLSUhwOB3l5eURGRnLttdeqgj5yoUTas9PppKysjCeffJI77riDvLw8evXqxbFjx5SUXC6uSZs5efIksbGxFBUVYbVaOXv2LOnp6QQGBlJUVKSq5AohCA8P54YbbiAgIIDbbruNwsJCVSHWfA0uNNaZt82P6+iQXqiSsBBCVcuWzviePXs4cOAAo0ePxm63q/sTEhLUgqOsThwWFqbysQcNGkRVVRW1tbVERkaSmJhIRUWFUlGkpaWRmppKRUUFtbW1fPDBB8yaNYsrrriCmpoa7HY7+/btY8qUKTz//PP86Ec/UrLqAwcOMH78eJYsWcKHH36oPt+AgAA2b97M4sWLmT9/PgcOHFDR7uDgYIqKimhtbSUoKEgVTpNKopSUFNLT03nrrbfUYrD83fR6vQwYMICKigpeffXVTu3z6quv/gct+3y0g6r5UixbtoyXX37Zb6CD8/twyR+vRYsW8fDDD2vnVKPRKD788ENmzJihnMULIR0COWGRjc2feuopsrKyGDJkCFarlSeeeIKWlhZ69OjhN1FpaWlRuaFOp5OwsDBVMKLj5E7mXs6ZM4eioiLlhEpJ1vHjx/nlL3+Jz+fjtddew2KxkJaWxr/9278BsHbtWgYNGsSoUaOIiorC4XDw+eefq8qo0tGUTmbH4lBmyaV5ZV42f4+JiVFOh2G09fSTzndYWJgq2COdFIm5Wq/5vL1eL7feeitPP/0011xzDSEhIezcuZOwsDAlz3I4HKrJvFm2KiedsmhTQECAin4I0ZaTWlFRQVpaGi6Xi759+6pCPXJCtmDBgq/Elv4ZVq1apbY7k0qaJ7TyM5H21bdvX7Kysli8eDFHjx7FbrerAjSxsbEEBQWRkJCgCtJIGbGUy5oXaMyVcs1VWOX71dXVERISQmVlJcOHD6e+vp7MzEzy8/OZPHkyxcXFJCQkqKr5hYWFTJgwQdmbw+EgJSWFmJgYmpubGT58OMXFxbS2trJr1y7mz5+Pz+dj+/btpKWlUVdXR1NTE9dff736TDvm5IJ2SLsKsx2aFz86SsWlTUukXF8+PywsjLi4OAYNGsTjjz/OyJEjyc/P51vf+hYNDQ3MnDnTb/EkNjZWfR9k/nG/fv1Ys2YNCQkJxMfHExwcTGpqKpWVlfTs2VO9v6zgnZaWpqp85+bmkpGRQUhICE6nUykxzHTMKa2vr6e+vh6Px8OQIUOU2qQzp9T8v3nsMzvz5r7Pcl9nzqqszF5XV0dYWBhvvvkmKSkpwBcLUllZWQwePFipMuR3vb6+nqamJuLj45UCpbm5mcjISJxOJ6+//jqzZ8/mpZdeYunSpdTX15OVlcW8efP40Y9+xEMPPaSOIzMzk1GjRpGcnExJSYmq7F5RUUFsbCypqamqIFlFRYUaS8LCwkhNTaW0tFSpZOx2Oxs2bODHP/4xra2t3HjjjWzatEkVUQK45557zrsWkvvuu++C932VaImv5kuzbNkyAL+JoFlqYo4KWCwWMjMzL/1BajSabs3OnTuBL6Jx5okVdJ5rJCOgMv/Nbrdz11138cgjj/Dtb39bOXzyNiQkRMnhzE6XnLBI+WxUVJSaqISFhXHs2DF1PAkJCeTn5yvHrLS0VE3YBg0apPJCR48ezdy5c2lsbGTWrFkq8lVeXg60jZcTJkzAbrfz3HPP+bUUsVgsqle0eUIot4ODg5k8eTIA77//Pjk5OVRXV6sekbLKcUfZm3SApBNsnrjK/Zs3b8bj8TBjxgzVaF2+lrnfqZS7yRwiGVVwu924XC7q6ur8rr+sKinfU0ZmO3N0uorbb7/9vIgl4Ocsym1zrQWAUaNGUVNTw/DhwzEMQ0Wkoc1mo6Ki1OPNsmlzRNYcmTZHp80RMGj7vFpbW4mKiqK1tZX//d//5dNPPyUpKYmBAwcqmaXX6yUnJ0dFbhISEsjMzMRms1FQUIDV2tbr9ejRo5SUlNDc3MyiRYsYMmSIKoByzTXXAG2RWhkxMx8LnN9KSnNpkT2UJRdyqjq7z5ynHhISQl1dnYqmnz17lptvvpmgoCBmzJhBVVUVZWVlHD16VOUty/fNzc2lqKiIvLw8mpqalOMWHR2t+qBK5Hg7ZswYUlNTaW1tpbq6mvT0dD777DO1OBcdHY3D4VApGGYnXEbkZGGhwYMHq/aFHe2z4/9m6bxZWi/VCx0fb75u5uOwWNp6xDqdTsLDw6mtrfVb4DS/jpQim5UUUh0jncqIiAiVt2m323E4HFgsFlX4yOPxEBcXx3e/+13VCePMmTO43W4WLVqkPguXy8ULL7yA1Wpl0aJF6phk1HrDhg1Am7MpF8QyMzM5ePAge/fuZcGCBVx99dXce++93XK+rh1UzUVh/pE156F2LJ60aNGiLjtGjUbTvdmzZ4+KGHW26g343Wcu2JOVlcWUKVOUzLSoqMgvN0uItlxVGamU1XLla5pbp8iI4Lx583jttddUuxPDaGvDcfDgQVatWoXb7eall17CYrEwatQoAgICcLlcHDlyhGXLlvHuu+8SGRlJ7969EUKwf/9+lQdksVjo06cPgGqvZZbfyWOIjo4GUNclIiICt9tNSkqKksbKx0tJbmtrKzU1NWpiIgt4mHN7O4seAHzwwQfs2bMHgEceeYTg4GCcTicNDQ0EBAQQGhqqnHMZdbHZbLhcLr98qNbWVs6ePUtlZSU+n4+EhAT1nrKVizxumVfWXTBHWDrmp0nntePiSWRkJCEhIbz44otMmTKFsLAwvwlox+JJ5khSYmKiX561vLbyWABl59KWa2trsdvtlJeXM2rUKNXu4je/+Q2vv/46ycnJJCYmEh0dzYgRIygvL8dqtdK7d2+WLl3K2bNnqa6u5tChQ8TExBAZGUl8fDzHjh3jk08+4dixY9TW1rJq1SoiIiK4+eab6d27t1ok0fmk3QdpS1JSKquzSnWCOepvdlbkvE2qAWTkb9u2bUyaNIlJkybxu9/9jt69e7N9+3a8Xi89evQgJiaGpqYmDh06hMXSVpFVtgssKSmhoqKCmpoaXC4XH3/8MXl5eWoMlAtZ1dXVOJ1O8vPziYiIID09XVUqN4+RFouF0NBQP8WCxWIhJSWFyZMnc9VVVzFgwAASExMJDg5WUUrptHdWJMkcYZbKFLPirzPVRGeLR/Lx0dHRxMXF+bXssVgsJCYmqsroNptNLXr6fD5sNptSxISEhKhjj4uLw+fzsX79enw+Hxs3bsRisVBRUcFbb72FYRhMnDgRq9VKS0sLeXl52Gw2lZYAbQuUVquV+fPns3btWrKzs8nJySE3N5edO3eyY8cOFixYwOzZs8nNzSU3N/drtM6vni/loAohCoUQx4UQnwkhjrTvixVC7BBCnGq/jWnfL4QQvxdCFAghsoQQo7/OE9BcWpYuXernpGrnVKPR/CPs3r2bPXv2nLfiLle7zRMtn89HSEiIcrwGDhyI1+slPj5eVVGUDqdcxZcRVym1kq8tCxDFxcXhcrlUtVm73a4memPGjKGiokKt2gcFBVFcXKyia9deey2GYajJSkxMjNquqqpSTiO0OWmy15yU2XWcHC1cuFA50S6XS620T506FafTqSo4midhp06d8pMLSwfL4XAAnBf5k8hraLFY2LRpE2+88YZqXyOvh5z4QFvUxpyXKlf5T58+TUtLC3FxceqxskhIbW2tKrwSGRmJx+MhPj7+K7Wff5ZVq1YpJ0xGU8yRZok50iont3369OH06dOMHj1aRVSCgoKUdD08PJy4uDi/RQEpmZZ5ffJzkLfyfYOCgs6bHPt8PkaMGEFycjKhoaGMHj2aG264gfDwcD777DN2796N3W7ns88+Y8CAAZSVlTF9+nReeOEFiouLlcR67Nix9O/fn6ysLAoKCsjIyGDo0KFKcllZWcmxY8dYsGCBdkq7IeY6H+Z0ABmJBM4bT837PB4PDQ0NBAUF0bNnT8aOHcu4ceM4fPgwQ4cO5ciRI4wfP57k5GQiIiJUO6WQkBClijh58iT79u0jOTmZgQMH0tzcTGhoKP369aNnz5788pe/pKCggNOnT7Nz504OHDjAwYMHGThwIJ9++im5ubm88cYbeL1eVRTIMNqKlMkem2ZH+uTJk6oithzbg4ODsVqtlJaWKlns6dOn1bleSJoulTjyt8J8ncxRU7Nj33EskG1W5GvIFA5ZeTggIECpTaT0Xp6LvFZWq5XGxkasVit2u52cnBxsNptq8XPixAlmz57N3LlzOX78ODk5OezZs4cZM2YwY8YMMjMzyc7OJjc3lw8++OArs6/uxsVEUL9tGMZIwzDGtv//E2CnYRjpwM72/wHmAentf6uAjV/VwWq6B0uWLFHb5mIQ2jnVaDQXS2dOasciPdA2OZC9+N5//33VZkZO4M2RV6/Xq3KBpARTTja8Xq+qMgttcqtdu3b59aysqqqitLSUHj164HQ62bx5M2VlZYSHh7N06VJ1HDJyGxkZye23347VamX79u1+DseSJUvwer089dRTynGWE6DQ0FCmTJnCtGnTVA+9kydPUlZWhtfrVQWSzH1P5ev26NFDRSHmzJmjIsgyStCx76w8f+lwSSd169atANxwww1MnjxZ5S9KCa/8LGQ+rzxvh8NBdnY2+fn5nDt3jqSkJGpra3G73TQ1NSlJtdVq7XbOaWeYr01Hp1ROXOX+6dOnU1hYSGpqKmFhYTQ2NiqJHqAq5sIXaTDyukVERKj8OtnCQdqnWeIrbdpiaetF+eyzzzJs2DAyMjL405/+REZGBrfddhs+X1sf3alTp9K7d2+2bdvGyJEj8Xg8zJ49m9tvvx2Hw8G4ceOoqakhOjqa8PBwJk2axKZNm3A4HEyZMoUdO3awbds2Hn300S77DDR/G6nwkC2NzE6WOadS2pC5369MHTAMg5KSEqxWK8nJyURHRxMSEsLAgQNVpdXY2FjVCiUkJIT09HRKSkpwOBzYbDYWL17Mu+++i8vlYvjw4cTHxxMdHY3FYmHOnDls2rSJrKwsrFYraWlpTJ06laioKDIzM3n11Vex2+0cP36cpKQkCgsLKSkp8TtuiRzTXC4XLpdLRRBXrlzJ6tWr+d3vfsesWbO45557CA0NpaSkhNLS0vPkuXIMlBV9zWOwfF+zQ2qWNJuRC6BWq1XVG+ioFjEfO6AeL193z549ZGdnU1VVRWFhIUeOHOHuu+9mxowZHDp0iJMnT/5LO50Xwz9TJOlqYFr79ovAbuDH7fs3GW2f2gEhRLQQoqdhGOX/zIFquheLFy9WVTPlF1k7pxqN5h9h9+7dTJ06VTml0smS/8tcn+bmZhWxcjgcHD9+3M+BMOcVmR1WOUbJaJXNZqO+vh5oc8zefvttGhsbaW1tZfjw4ezdu5f+/ftz//3309LSoqr4pqam0qdPH4QQlJeXYxgGNptNVeGVVXDNUc2ePXsihKC+vt4vX8nn87Fy5UpCQkJ48skn1Wp8SkoKUVFRTJ8+nX79+rFq1Sq/3pRy0gZtjtBrr71GdXU1mZmZVFZWqvwsWREY/CMoMi/SHDG45557eOqpp5g9ezZHjhxRLXmk7C44OJj6+nqCg4OVg19fX4/T6aRnz55kZGSo6+lyuVQUMC8vr1v3v1yxYgXPPfecn4MI+E1u5a3ZzmRfwK1bt3L33Xezbt069biqqioSEhIICAggKipK9XoNCAjA4XAQGhqqotDy9eXnBV8okeRvq5xUy/99Ph8LFixgy5YtZGVlccstt/DCCy8wffp0CgoKcLlcfP7558TExDB48GDsdjtut5szZ84wfPhwtm3bRp8+fVR+mubyQdpBx7xHaZcyWgf+lWjNRdjMi3mNjY08+uijPP7447z99tssXLiQP/zhD9TV1VFQUEB8fLyq9Jqdnc3YsWPVOLxo0SJycnIoKCigqqqKQYMGcfLkSQBiYmKw2WwkJiYyfPhwVcBr0KBB2O12KisrWb16Na+++ipz5szh1KlTpKSkqLHJzMyZM4mIiCA8PFw55PLW7XYzc+ZMDMPg8ccfVwt8x48f57333qNfv37YbDblzMtrZL5WHb/35t8f8/U1L/5BWx6vrIa9f//+r/qj1vDlI6gGsF0I8akQQpa/SzI5nRVAUvt2b6DE9NzS9n1+CCFWCSGOSMmw5vJDO6cajearYs+ePSonEvwjfnJCJYtpSOdUTtiEEEqK6vF4aG1txWaznVe51+VykZCQoBzI+fPnY7fbqa6uxjAMWlpa8Pl8DB48mEWLFuH1ejl37hxlZWUYhkF4eDhCCBoaGnA4HFitVpqbmxk/fjwOh4Onn35aTdKgbRITERHB2bNn/RzmwMBA5Zy6XC6amprUOYeEhCCE4O233+anP/2pX76snJgFBwfj8/m48sorcblc1NTUkJqa6jcJdblc511Lmd/aMfe3ubmZhx56CJ/Px4MPPqjOLSwsjMjIyPN6eXo8HuW45+bmUlBQQGtrq7qWoaGhFBcXd2vnVCInnOZIk4x6doyimv+//vrr2bFjB6mpqUo6CW3FtiwWi4qUyj/pWLjdbtVf0u12qwmx/DNL3M2fZ3V1NRs2bMBqtRITE8Ps2bMZM2YMp0+fZs6cOVgsFiZOnEhUVBSPP/44FouF3//+91RXV2Oz2Zg/fz6LFy/mmWee0c7pZUrHdCqJWWUCnPf9Nj9Gbsu890ceeYStW7eqXPLbb7+d//iP/2D+/PlMmDABn89HRUUFERERSi0SHBzM5s2b6du3L9deey0BAQFUVlbSu3dvBgwYQEJCAidPnmTcuHEYhoHL5eLw4cMsW7YMt9tN//792bp1K01NTVitVsaPH09DQ8N5xwxt7bSko22OeoJ/UTP53PT0dGXnjz76KH379mXYsGEqN7zjtegYtZVy6b179/LRRx/5/e3bt4+9e/eyd+9e9uzZQ25urnZOv0a+bAR1kmEYZUKIRGCHEOKE+U7DMAwhROelxC6AYRjPAM8AXOxzNd2Dr7LfkUaj0QB89NFHqnItfNGcHtocs9bWVsrKytizZ4+a3FdWVpKenq4m+dKRkPmpZjmcWf4aEhLC8ePHgbbJ38yZM1UktX///ng8HrZt20ZKSgojRozgzjvvVP3kqqur6d27N6+99hoPP/wwbrdbScegLWK2du1aAJ555hkl4ZVRMVkUSVbxlY6JdEjlscfExBAcHEx1dbWKBg8cOJD09HSVo/rnP/+ZpKQkQkNDaWxsVNE6c7RUIh2ejtvFxcXU1tYSHR3NqlWrePbZZ/F4PISEhGC1WomIiKC2tpbg4GBVeVKItp6FLS0tlJSU0Lt3b4KCgigtLWXu3Llfj4F8xSxfvpxNmzYpB15iXtgwR1rkbUxMDHa7nebmZs1/jvkAAAwNSURBVJYsWcLrr7+uKhY3NTURFhamFiMA5Xw2NzcTFBSk7EDmRXdWpEnemiv8vvfee0ycOJGioiI++ugj7rzzTnJycvjJT37C+vXrCQsL4+c//zknTpxg8eLF/Pu///slvJqarxMpI5WycjNer1eNMdJxMy/wme1Z5l77fD6ioqLo2bMnDQ0NVFdXEx4ezqBBg7BarTz77LM0NjZy9uxZFi9ejNfrpby8nPXr15OamkptbS21tbX079+f6upqTp48SUNDAxUVFdxxxx289957+Hw+5s6dS3p6OmfOnCEmJobw8HBSUlKUw2qxWIiLi/NbjPxHq36b1Q8Oh4PrrruOnJwcvvOd7zBr1qx/4uprLjVfKoJqGEZZ+20V8AYwDqgUQvQEaL+tan94GZBienpy+z6NRqPRaP4uso0KfJHLI3OHoqOjVTVYudotCyJ5vV7Vb85ut/tFpDrmaCUlJVFZWUlRUZF6XkBAgGotEx0djdfrVY3npdMrW7ucOXNGOTVutxuHw+FXldLtdhMYGKja3EjnUzqZoaGhBAQEsHHjRj8H2hzJMwyDyMhIVYjIPPmMiorCMAwCAwPJz88nMDCQlJQUP5lzx15/8tzNfxLDMHjllVcIDg6mV69e9OjRg6CgINxuN6GhoUBbPpUs8iOvfa9evfB6vdjtdmw2G4GBgaqV0OWK+TqbJZPwhdzX5/MRHx/PX/7yF2UjDQ0N6rOXUnJzv0ZzFCgyMtJvgcBceVm+n/k5hmHQ0NCg+uuOGjWKHj164Ha7mTVrFg0NDezbt48xY8aQnZ3NunXrtHP6L0hnUnSzms0cJTSnSJhtTUZgZfRx/PjxZGdnA23y3CuuuAKAoUOHEhAQoHpvVldXk5qaysyZM5k+fTphYWGUlZUxbNgwxo8fz4IFC9QYER4ezsSJE5k5cyYnTpxg165dlJaW4na7iYyMJCoqSvWgllXN5ZhyMc5px3xRWdxr3rx5LFy4kAULFnD//fdr5/Qy5O9GUIUQYYDFMIym9u3ZwC+At4DvA+vbb7e0P+UtYK0Q4hVgPNCg8081Go1GczFIJ3XixIkIIWhsbFSFhaKiolQEwGq1kpSU5CeDFUKoSrjmSVlSUhItLS1qYuZwOFSksra2lh07dpCUlMSsWbNwOp288847hIWFERoayk033YRhGLz77rsMHDiQqVOn8tFHH/HDH/4QaCv5L6OWQrT1PjXnfEmnLjAwkLS0NJxOp8oTlY6kx+MhNDSUmpoaP1mpzFmU7UxcLheRkZG4XC5Onz5NcHAwlZWVJCQkqFYwISEhNDU1YbPZVH6jGem4S3w+H8eOHePll1/m+uuv5wc/+AG//vWvsdvtflVu5fWKi4vD4/Fw9uxZYmNjycjIoKSkhHPnzvGrX/3qa7WNr5pbbrlFRVE7K5BidgDMDsJ3v/tdnn76adasWYNhGEoGKaurRkREqEJT5iqrTU1NqkqqOfJljriaK7VKpLT68OHDvP/++4wbNw6bzUZeXp4qNANa3fSvSkc1iNw2F93qTPprjkpKe9q6dSsTJ04E4MMPP2TWrFmUl5cTEBDA9u3bWbx4MRMnTsThcODxeKisrGTs2LEYhsG0adNU0SLZ7iouLo7m5malQjh69Cg2mw273U50dDSBgYFUVVUxdOhQxo4dqxZ6vgy6mvQ3ky8j8U0C3mg3pADgL4ZhbBNCHAZeFUKsAIqAG9of/y4wHygA7MBtX/lRazQajeYbwb59+5g8eTJRUVEIIairq1MFMzweDyUlJQwaNAjDMHC73URERJwXhZJFRZxOJ9C26l5YWEhLS4tyDuvr64mJiVE5Ua2trfTr14/4+Hg+/PBDkpOTlaMWERGhqquGhobi9Xo5ePCgKpDk9Xrp27cvTqeTZ555xi83zOv1cuONNwJtxaFkexI50TQX8pDVOs+dO3eeM3PVVVdhGIbKn83KyuLKK68kKiqKuro6KioqiImJURFfOH/CKrfNk9x33nmHZcuW4XK5mD9/Pps3b1ZyOSnvNUeGe/XqRUFBAVOmTCEuLs6vyvvlhFkKKbc7FlHpKPONjIzE7Xbz8MMP873vfY8XX3xRRfAbGxtVFF4WVJGLKjI/WNqLLJIk+yVKezFLtOVxeb1eMjMzeeyxx7j22mu75mJpuoSOFaYlneVnSvuVmO+TLU6EEOTm5nLkyBEOHTrED37wAxwOBzExMcoBnTNnDtu3b+eKK66gqKiIpqYmrrjiClXlOz8/n7lz5/LOO+8QGRlJRkYGTqeTd999lwcffJCysjJeeOEFrrnmGs6cOcNnn33GtGnTLov8dE3XIjpLSr7kB6FzUDUajUaj0Wg0Go3mX5lPjS9all6Qf6bNzFdJM5Df1Qeh0Vwk8cC5rj4IjeYi0DaruRzRdqu53NA2q7kcuRR22/fLPKi7OKj5X8ab1mi6E0KII9puNZcT2mY1lyPabjWXG9pmNZcj3cluv2wfVI1Go9FoNBqNRqPRaL5WtIOq0Wg0Go1Go9FoNJpuQXdxUJ/p6gPQaP4BtN1qLje0zWouR7Tdai43tM1qLke6jd12iyq+Go1Go9FoNBqNRqPRdJcIqkaj0Wg0Go1Go9FovuF0uYMqhJgrhMgXQhQIIX7S1cej0UiEEIVCiONCiM+EEEfa98UKIXYIIU6138a07xdCiN+323GWEGJ01x695puCEOKPQogqIUS2ad9F26kQ4vvtjz8lhPh+V5yL5pvBBWz250KIsvbx9jMhxHzTfT9tt9l8IcQc0349f9BcMoQQKUKID4UQuUKIHCHEXe379Xir6Zb8DZvt9uNtl0p8hRBW4CQwCygFDgPLDMPI7bKD0mjaEUIUAmMNwzhn2vcboNYwjPXtX9AYwzB+3P7lvgOYD4wHNhiGMb4rjlvzzUIIMYW2XtKbDMPIaN93UXYqhIgFjgBjAQP4FBhjGEZdF5yS5l+cC9jsz4FmwzB+2+GxQ4GXgXFAL+ADYGD73Xr+oLlkCCF6Aj0NwzgqhIigbZy8BrgVPd5quiF/w2ZvoJuPt10dQR0HFBiG8blhGC7gFeDqLj4mjeZvcTXwYvv2i7R90eX+TUYbB4Do9oFBo/laMQxjL1DbYffF2ukcYIdhGLXtk6QdwNyv/+g130QuYLMX4mrgFcMwnIZhnAEKaJs76PmD5pJiGEa5YRhH27ebgDygN3q81XRT/obNXohuM952tYPaGygx/V/K375wGs2lxAC2CyE+FUKsat+XZBhGeft2BZDUvq1tWdOduFg71far6Q6sbZdC/lHKJNE2q+mGCCFSgVHAQfR4q7kM6GCz0M3H2652UDWa7swkwzBGA/OANe2yNIXRpo/XZbA13Rptp5rLhI3AAGAkUA78rmsPR6PpHCFEOLAZ+KFhGI3m+/R4q+mOdGKz3X687WoHtQxIMf2f3L5Po+lyDMMoa7+tAt6gTeJQKaW77bdV7Q/XtqzpTlysnWr71XQphmFUGobhNQzDBzxL23gL2mY13QghRCBtE/0/G4bx1/bderzVdFs6s9nLYbztagf1MJAuhOgnhAgClgJvdfExaTQIIcLaE8oRQoQBs4Fs2uxTVtz7PrClffst4Jb2qn1XAQ0myY9Gc6m5WDt9H5gthIhpl/rMbt+n0VwSOuTsX0vbeAttNrtUCBEshOgHpAOH0PMHzSVGCCGA54A8wzAeN92lx1tNt+RCNns5jLcBX+eL/z0Mw/AIIdbS9sW0An80DCOnK49Jo2knCXij7btNAPAXwzC2CSEOA68KIVYARbRVQgN4l7ZKfQWAHbjt0h+y5puIEOJlYBoQL4QoBR4B1nMRdmoYRq0Q4pe0/QgB/MIwjC9bxEajuSguYLPThBAjaZNHFgKrAQzDyBFCvArkAh5gjWEY3vbX0fMHzaXkW8D3gONCiM/a9z2AHm813ZcL2eyy7j7edmmbGY1Go9FoNBqNRqPRaCRdLfHVaDQajUaj0Wg0Go0G0A6qRqPRaDQajUaj0Wi6CdpB1Wg0Go1Go9FoNBpNt0A7qBqNRqPRaDQajUaj6RZoB1Wj0Wg0Go1Go9FoNN0C7aBqNBqNRqPRaDQajaZboB1UjUaj0Wg0Go1Go9F0C7SDqtFoNBqNRqPRaDSabsH/Ay+jZn9pF1jZAAAAAElFTkSuQmCC\n", - "text/plain": [ - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "transformed_images = [None]*5\n", - "to_tensor = transforms.ToTensor()\n", - "for i in range(5):\n", - " t = transforms.RandomAffine(degrees=45, translate=(0.2, 0.2), scale=(0.7, 1.2), shear=10, fillcolor=255)\n", - " transformed_images[i] = to_tensor(t(pil_img))\n", - "plt.figure(figsize=(16, 16))\n", - "show(tutils.make_grid(transformed_images))" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.5.2" - } - }, - "nbformat": 4, - "nbformat_minor": 1 -} diff --git a/test/smoke_test.py b/test/smoke_test.py index c3a4bdd19d6..38f0054e6b6 100644 --- a/test/smoke_test.py +++ b/test/smoke_test.py @@ -1,4 +1,146 @@ +"""Run smoke tests""" + +import os +import sys +from pathlib import Path + import torch import torchvision -import torchvision.datasets as dset -import torchvision.transforms +from torchvision.io import decode_avif, decode_heic, decode_image, decode_jpeg, read_file +from torchvision.models import resnet50, ResNet50_Weights + + +SCRIPT_DIR = Path(__file__).parent + + +def smoke_test_torchvision() -> None: + print( + "Is torchvision usable?", + all(x is not None for x in [torch.ops.image.decode_png, torch.ops.torchvision.roi_align]), + ) + + +def smoke_test_torchvision_read_decode() -> None: + img_jpg = decode_image(str(SCRIPT_DIR / "assets" / "encode_jpeg" / "grace_hopper_517x606.jpg")) + if img_jpg.shape != (3, 606, 517): + raise RuntimeError(f"Unexpected shape of img_jpg: {img_jpg.shape}") + + img_png = decode_image(str(SCRIPT_DIR / "assets" / "interlaced_png" / "wizard_low.png")) + if img_png.shape != (4, 471, 354): + raise RuntimeError(f"Unexpected shape of img_png: {img_png.shape}") + + img_webp = decode_image(str(SCRIPT_DIR / "assets/fakedata/logos/rgb_pytorch.webp")) + if img_webp.shape != (3, 100, 100): + raise RuntimeError(f"Unexpected shape of img_webp: {img_webp.shape}") + + if sys.platform == "linux": + pass + # TODO: Fix/uncomment below (the TODO below is mostly accurate but we're + # still observing some failures on some CUDA jobs. Most are working.) + # if torch.cuda.is_available(): + # # TODO: For whatever reason this only passes on the runners that + # # support CUDA. + # # Strangely, on the CPU runners where this fails, the AVIF/HEIC + # # tests (ran with pytest) are passing. This is likely related to a + # # libcxx symbol thing, and the proper libstdc++.so get loaded only + # # with pytest? Ugh. + # img_avif = decode_avif(read_file(str(SCRIPT_DIR / "assets/fakedata/logos/rgb_pytorch.avif"))) + # if img_avif.shape != (3, 100, 100): + # raise RuntimeError(f"Unexpected shape of img_avif: {img_avif.shape}") + + # img_heic = decode_heic( + # read_file(str(SCRIPT_DIR / "assets/fakedata/logos/rgb_pytorch_incorrectly_encoded_but_who_cares.heic")) + # ) + # if img_heic.shape != (3, 100, 100): + # raise RuntimeError(f"Unexpected shape of img_heic: {img_heic.shape}") + else: + try: + decode_avif(str(SCRIPT_DIR / "assets/fakedata/logos/rgb_pytorch.avif")) + except RuntimeError as e: + assert "torchvision-extra-decoders" in str(e) + + try: + decode_heic(str(SCRIPT_DIR / "assets/fakedata/logos/rgb_pytorch_incorrectly_encoded_but_who_cares.heic")) + except RuntimeError as e: + assert "torchvision-extra-decoders" in str(e) + + +def smoke_test_torchvision_decode_jpeg(device: str = "cpu"): + img_jpg_data = read_file(str(SCRIPT_DIR / "assets" / "encode_jpeg" / "grace_hopper_517x606.jpg")) + img_jpg = decode_jpeg(img_jpg_data, device=device) + if img_jpg.shape != (3, 606, 517): + raise RuntimeError(f"Unexpected shape of img_jpg: {img_jpg.shape}") + + +def smoke_test_compile() -> None: + try: + model = resnet50().cuda() + model = torch.compile(model) + x = torch.randn(1, 3, 224, 224, device="cuda") + out = model(x) + print(f"torch.compile model output: {out.shape}") + except RuntimeError: + if sys.platform == "win32": + print("Successfully caught torch.compile RuntimeError on win") + else: + raise + + +def smoke_test_torchvision_resnet50_classify(device: str = "cpu") -> None: + img = decode_image(str(SCRIPT_DIR / ".." / "gallery" / "assets" / "dog2.jpg")).to(device) + + # Step 1: Initialize model with the best available weights + weights = ResNet50_Weights.DEFAULT + model = resnet50(weights=weights, progress=False).to(device) + model.eval() + + # Step 2: Initialize the inference transforms + preprocess = weights.transforms(antialias=(device != "mps")) # antialias not supported on MPS + + # Step 3: Apply inference preprocessing transforms + batch = preprocess(img).unsqueeze(0) + + # Step 4: Use the model and print the predicted category + prediction = model(batch).squeeze(0).softmax(0) + class_id = prediction.argmax().item() + score = prediction[class_id].item() + category_name = weights.meta["categories"][class_id] + expected_category = "German shepherd" + print(f"{category_name} ({device}): {100 * score:.1f}%") + if category_name != expected_category: + raise RuntimeError(f"Failed ResNet50 classify {category_name} Expected: {expected_category}") + + +def main() -> None: + print(f"torchvision: {torchvision.__version__}") + print(f"torch.cuda.is_available: {torch.cuda.is_available()}") + + print(f"{torch.ops.image._jpeg_version() = }") + if not torch.ops.image._is_compiled_against_turbo(): + msg = "Torchvision wasn't compiled against libjpeg-turbo" + if os.getenv("IS_M1_CONDA_BUILD_JOB") == "1": + # When building the conda package on M1, it's difficult to enforce + # that we build against turbo due to interactions with the libwebp + # package. So we just accept it, instead of raising an error. + print(msg) + else: + raise ValueError(msg) + + smoke_test_torchvision() + smoke_test_torchvision_read_decode() + smoke_test_torchvision_resnet50_classify() + smoke_test_torchvision_decode_jpeg() + if torch.cuda.is_available(): + smoke_test_torchvision_decode_jpeg("cuda") + smoke_test_torchvision_resnet50_classify("cuda") + + # TODO: remove once pytorch/pytorch#110436 is resolved + if sys.version_info < (3, 12, 0): + smoke_test_compile() + + if torch.backends.mps.is_available(): + smoke_test_torchvision_resnet50_classify("mps") + + +if __name__ == "__main__": + main() diff --git a/test/test_architecture_ops.py b/test/test_architecture_ops.py new file mode 100644 index 00000000000..32ad1a32f89 --- /dev/null +++ b/test/test_architecture_ops.py @@ -0,0 +1,46 @@ +import unittest + +import pytest +import torch + +from torchvision.models.maxvit import SwapAxes, WindowDepartition, WindowPartition + + +class MaxvitTester(unittest.TestCase): + def test_maxvit_window_partition(self): + input_shape = (1, 3, 224, 224) + partition_size = 7 + n_partitions = input_shape[3] // partition_size + + x = torch.randn(input_shape) + + partition = WindowPartition() + departition = WindowDepartition() + + x_hat = partition(x, partition_size) + x_hat = departition(x_hat, partition_size, n_partitions, n_partitions) + + torch.testing.assert_close(x, x_hat) + + def test_maxvit_grid_partition(self): + input_shape = (1, 3, 224, 224) + partition_size = 7 + n_partitions = input_shape[3] // partition_size + + x = torch.randn(input_shape) + pre_swap = SwapAxes(-2, -3) + post_swap = SwapAxes(-2, -3) + + partition = WindowPartition() + departition = WindowDepartition() + + x_hat = partition(x, n_partitions) + x_hat = pre_swap(x_hat) + x_hat = post_swap(x_hat) + x_hat = departition(x_hat, n_partitions, partition_size, partition_size) + + torch.testing.assert_close(x, x_hat) + + +if __name__ == "__main__": + pytest.main([__file__]) diff --git a/test/test_backbone_utils.py b/test/test_backbone_utils.py index 41d54514568..c64e27f14ac 100644 --- a/test/test_backbone_utils.py +++ b/test/test_backbone_utils.py @@ -1,25 +1,336 @@ -import unittest - +import random +from copy import deepcopy +from itertools import chain +from typing import Mapping, Sequence +import pytest import torch -from torchvision.models.detection.backbone_utils import resnet_fpn_backbone - - -class ResnetFPNBackboneTester(unittest.TestCase): - @classmethod - def setUpClass(cls): - cls.dtype = torch.float32 - - def test_resnet18_fpn_backbone(self): - device = torch.device('cpu') - x = torch.rand(1, 3, 300, 300, dtype=self.dtype, device=device) - resnet18_fpn = resnet_fpn_backbone(backbone_name='resnet18', pretrained=False) - y = resnet18_fpn(x) - self.assertEqual(list(y.keys()), [0, 1, 2, 3, 'pool']) - - def test_resnet50_fpn_backbone(self): - device = torch.device('cpu') - x = torch.rand(1, 3, 300, 300, dtype=self.dtype, device=device) - resnet50_fpn = resnet_fpn_backbone(backbone_name='resnet50', pretrained=False) - y = resnet50_fpn(x) - self.assertEqual(list(y.keys()), [0, 1, 2, 3, 'pool']) +from common_utils import set_rng_seed +from torchvision import models +from torchvision.models._utils import IntermediateLayerGetter +from torchvision.models.detection.backbone_utils import BackboneWithFPN, mobilenet_backbone, resnet_fpn_backbone +from torchvision.models.feature_extraction import create_feature_extractor, get_graph_node_names + + +@pytest.mark.parametrize("backbone_name", ("resnet18", "resnet50")) +def test_resnet_fpn_backbone(backbone_name): + x = torch.rand(1, 3, 300, 300, dtype=torch.float32, device="cpu") + model = resnet_fpn_backbone(backbone_name=backbone_name, weights=None) + assert isinstance(model, BackboneWithFPN) + y = model(x) + assert list(y.keys()) == ["0", "1", "2", "3", "pool"] + + with pytest.raises(ValueError, match=r"Trainable layers should be in the range"): + resnet_fpn_backbone(backbone_name=backbone_name, weights=None, trainable_layers=6) + with pytest.raises(ValueError, match=r"Each returned layer should be in the range"): + resnet_fpn_backbone(backbone_name=backbone_name, weights=None, returned_layers=[0, 1, 2, 3]) + with pytest.raises(ValueError, match=r"Each returned layer should be in the range"): + resnet_fpn_backbone(backbone_name=backbone_name, weights=None, returned_layers=[2, 3, 4, 5]) + + +@pytest.mark.parametrize("backbone_name", ("mobilenet_v2", "mobilenet_v3_large", "mobilenet_v3_small")) +def test_mobilenet_backbone(backbone_name): + with pytest.raises(ValueError, match=r"Trainable layers should be in the range"): + mobilenet_backbone(backbone_name=backbone_name, weights=None, fpn=False, trainable_layers=-1) + with pytest.raises(ValueError, match=r"Each returned layer should be in the range"): + mobilenet_backbone(backbone_name=backbone_name, weights=None, fpn=True, returned_layers=[-1, 0, 1, 2]) + with pytest.raises(ValueError, match=r"Each returned layer should be in the range"): + mobilenet_backbone(backbone_name=backbone_name, weights=None, fpn=True, returned_layers=[3, 4, 5, 6]) + model_fpn = mobilenet_backbone(backbone_name=backbone_name, weights=None, fpn=True) + assert isinstance(model_fpn, BackboneWithFPN) + model = mobilenet_backbone(backbone_name=backbone_name, weights=None, fpn=False) + assert isinstance(model, torch.nn.Sequential) + + +# Needed by TestFxFeatureExtraction.test_leaf_module_and_function +def leaf_function(x): + return int(x) + + +# Needed by TestFXFeatureExtraction. Checking that node naming conventions +# are respected. Particularly the index postfix of repeated node names +class TestSubModule(torch.nn.Module): + def __init__(self): + super().__init__() + self.relu = torch.nn.ReLU() + + def forward(self, x): + x = x + 1 + x = x + 1 + x = self.relu(x) + x = self.relu(x) + return x + + +class TestModule(torch.nn.Module): + def __init__(self): + super().__init__() + self.submodule = TestSubModule() + self.relu = torch.nn.ReLU() + + def forward(self, x): + x = self.submodule(x) + x = x + 1 + x = x + 1 + x = self.relu(x) + x = self.relu(x) + return x + + +test_module_nodes = [ + "x", + "submodule.add", + "submodule.add_1", + "submodule.relu", + "submodule.relu_1", + "add", + "add_1", + "relu", + "relu_1", +] + + +class TestFxFeatureExtraction: + inp = torch.rand(1, 3, 224, 224, dtype=torch.float32, device="cpu") + model_defaults = {"num_classes": 1} + leaf_modules = [] + + def _create_feature_extractor(self, *args, **kwargs): + """ + Apply leaf modules + """ + tracer_kwargs = {} + if "tracer_kwargs" not in kwargs: + tracer_kwargs = {"leaf_modules": self.leaf_modules} + else: + tracer_kwargs = kwargs.pop("tracer_kwargs") + return create_feature_extractor(*args, **kwargs, tracer_kwargs=tracer_kwargs, suppress_diff_warning=True) + + def _get_return_nodes(self, model): + set_rng_seed(0) + exclude_nodes_filter = [ + "getitem", + "floordiv", + "size", + "chunk", + "_assert", + "eq", + "dim", + "getattr", + ] + train_nodes, eval_nodes = get_graph_node_names( + model, tracer_kwargs={"leaf_modules": self.leaf_modules}, suppress_diff_warning=True + ) + # Get rid of any nodes that don't return tensors as they cause issues + # when testing backward pass. + train_nodes = [n for n in train_nodes if not any(x in n for x in exclude_nodes_filter)] + eval_nodes = [n for n in eval_nodes if not any(x in n for x in exclude_nodes_filter)] + return random.sample(train_nodes, 10), random.sample(eval_nodes, 10) + + @pytest.mark.parametrize("model_name", models.list_models(models)) + def test_build_fx_feature_extractor(self, model_name): + set_rng_seed(0) + model = models.get_model(model_name, **self.model_defaults).eval() + train_return_nodes, eval_return_nodes = self._get_return_nodes(model) + # Check that it works with both a list and dict for return nodes + self._create_feature_extractor( + model, train_return_nodes={v: v for v in train_return_nodes}, eval_return_nodes=eval_return_nodes + ) + self._create_feature_extractor( + model, train_return_nodes=train_return_nodes, eval_return_nodes=eval_return_nodes + ) + # Check must specify return nodes + with pytest.raises(ValueError): + self._create_feature_extractor(model) + # Check return_nodes and train_return_nodes / eval_return nodes + # mutual exclusivity + with pytest.raises(ValueError): + self._create_feature_extractor( + model, return_nodes=train_return_nodes, train_return_nodes=train_return_nodes + ) + # Check train_return_nodes / eval_return nodes must both be specified + with pytest.raises(ValueError): + self._create_feature_extractor(model, train_return_nodes=train_return_nodes) + # Check invalid node name raises ValueError + with pytest.raises(ValueError): + # First just double check that this node really doesn't exist + if not any(n.startswith("l") or n.startswith("l.") for n in chain(train_return_nodes, eval_return_nodes)): + self._create_feature_extractor(model, train_return_nodes=["l"], eval_return_nodes=["l"]) + else: # otherwise skip this check + raise ValueError + + def test_node_name_conventions(self): + model = TestModule() + train_nodes, _ = get_graph_node_names(model) + assert all(a == b for a, b in zip(train_nodes, test_module_nodes)) + + @pytest.mark.parametrize("model_name", models.list_models(models)) + def test_forward_backward(self, model_name): + model = models.get_model(model_name, **self.model_defaults).train() + train_return_nodes, eval_return_nodes = self._get_return_nodes(model) + model = self._create_feature_extractor( + model, train_return_nodes=train_return_nodes, eval_return_nodes=eval_return_nodes + ) + out = model(self.inp) + out_agg = 0 + for node_out in out.values(): + if isinstance(node_out, Sequence): + out_agg += sum(o.float().mean() for o in node_out if o is not None) + elif isinstance(node_out, Mapping): + out_agg += sum(o.float().mean() for o in node_out.values() if o is not None) + else: + # Assume that the only other alternative at this point is a Tensor + out_agg += node_out.float().mean() + out_agg.backward() + + def test_feature_extraction_methods_equivalence(self): + model = models.resnet18(**self.model_defaults).eval() + return_layers = {"layer1": "layer1", "layer2": "layer2", "layer3": "layer3", "layer4": "layer4"} + + ilg_model = IntermediateLayerGetter(model, return_layers).eval() + fx_model = self._create_feature_extractor(model, return_layers) + + # Check that we have same parameters + for (n1, p1), (n2, p2) in zip(ilg_model.named_parameters(), fx_model.named_parameters()): + assert n1 == n2 + assert p1.equal(p2) + + # And that outputs match + with torch.no_grad(): + ilg_out = ilg_model(self.inp) + fgn_out = fx_model(self.inp) + assert all(k1 == k2 for k1, k2 in zip(ilg_out.keys(), fgn_out.keys())) + for k in ilg_out.keys(): + assert ilg_out[k].equal(fgn_out[k]) + + @pytest.mark.parametrize("model_name", models.list_models(models)) + def test_jit_forward_backward(self, model_name): + set_rng_seed(0) + model = models.get_model(model_name, **self.model_defaults).train() + train_return_nodes, eval_return_nodes = self._get_return_nodes(model) + model = self._create_feature_extractor( + model, train_return_nodes=train_return_nodes, eval_return_nodes=eval_return_nodes + ) + model = torch.jit.script(model) + fgn_out = model(self.inp) + out_agg = 0 + for node_out in fgn_out.values(): + if isinstance(node_out, Sequence): + out_agg += sum(o.float().mean() for o in node_out if o is not None) + elif isinstance(node_out, Mapping): + out_agg += sum(o.float().mean() for o in node_out.values() if o is not None) + else: + # Assume that the only other alternative at this point is a Tensor + out_agg += node_out.float().mean() + out_agg.backward() + + def test_train_eval(self): + class TestModel(torch.nn.Module): + def __init__(self): + super().__init__() + self.dropout = torch.nn.Dropout(p=1.0) + + def forward(self, x): + x = x.float().mean() + x = self.dropout(x) # dropout + if self.training: + x += 100 # add + else: + x *= 0 # mul + x -= 0 # sub + return x + + model = TestModel() + + train_return_nodes = ["dropout", "add", "sub"] + eval_return_nodes = ["dropout", "mul", "sub"] + + def checks(model, mode): + with torch.no_grad(): + out = model(torch.ones(10, 10)) + if mode == "train": + # Check that dropout is respected + assert out["dropout"].item() == 0 + # Check that control flow dependent on training_mode is respected + assert out["sub"].item() == 100 + assert "add" in out + assert "mul" not in out + elif mode == "eval": + # Check that dropout is respected + assert out["dropout"].item() == 1 + # Check that control flow dependent on training_mode is respected + assert out["sub"].item() == 0 + assert "mul" in out + assert "add" not in out + + # Starting from train mode + model.train() + fx_model = self._create_feature_extractor( + model, train_return_nodes=train_return_nodes, eval_return_nodes=eval_return_nodes + ) + # Check that the models stay in their original training state + assert model.training + assert fx_model.training + # Check outputs + checks(fx_model, "train") + # Check outputs after switching to eval mode + fx_model.eval() + checks(fx_model, "eval") + + # Starting from eval mode + model.eval() + fx_model = self._create_feature_extractor( + model, train_return_nodes=train_return_nodes, eval_return_nodes=eval_return_nodes + ) + # Check that the models stay in their original training state + assert not model.training + assert not fx_model.training + # Check outputs + checks(fx_model, "eval") + # Check outputs after switching to train mode + fx_model.train() + checks(fx_model, "train") + + def test_leaf_module_and_function(self): + class LeafModule(torch.nn.Module): + def forward(self, x): + # This would raise a TypeError if it were not in a leaf module + int(x.shape[0]) + return torch.nn.functional.relu(x + 4) + + class TestModule(torch.nn.Module): + def __init__(self): + super().__init__() + self.conv = torch.nn.Conv2d(3, 1, 3) + self.leaf_module = LeafModule() + + def forward(self, x): + leaf_function(x.shape[0]) + x = self.conv(x) + return self.leaf_module(x) + + model = self._create_feature_extractor( + TestModule(), + return_nodes=["leaf_module"], + tracer_kwargs={"leaf_modules": [LeafModule], "autowrap_functions": [leaf_function]}, + ).train() + + # Check that LeafModule is not in the list of nodes + assert "relu" not in [str(n) for n in model.graph.nodes] + assert "leaf_module" in [str(n) for n in model.graph.nodes] + + # Check forward + out = model(self.inp) + # And backward + out["leaf_module"].float().mean().backward() + + def test_deepcopy(self): + # Non-regression test for https://github.com/pytorch/vision/issues/8634 + model = models.efficientnet_b3(weights=None) + extractor = create_feature_extractor(model=model, return_nodes={"classifier.0": "out"}) + + extractor.eval() + extractor.train() + extractor = deepcopy(extractor) + extractor.eval() + extractor.train() diff --git a/test/test_cpp_models.py b/test/test_cpp_models.py deleted file mode 100644 index b6654a0278d..00000000000 --- a/test/test_cpp_models.py +++ /dev/null @@ -1,150 +0,0 @@ -import torch -import os -import unittest -from torchvision import models, transforms -import sys - -from PIL import Image -import torchvision.transforms.functional as F - -try: - from torchvision import _C_tests -except ImportError: - _C_tests = None - - -def process_model(model, tensor, func, name): - model.eval() - traced_script_module = torch.jit.trace(model, tensor) - traced_script_module.save("model.pt") - - py_output = model.forward(tensor) - cpp_output = func("model.pt", tensor) - - assert torch.allclose(py_output, cpp_output), 'Output mismatch of ' + name + ' models' - - -def read_image1(): - image_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'assets', 'grace_hopper_517x606.jpg') - image = Image.open(image_path) - image = image.resize((224, 224)) - x = F.to_tensor(image) - return x.view(1, 3, 224, 224) - - -def read_image2(): - image_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'assets', 'grace_hopper_517x606.jpg') - image = Image.open(image_path) - image = image.resize((299, 299)) - x = F.to_tensor(image) - x = x.view(1, 3, 299, 299) - return torch.cat([x, x], 0) - - -@unittest.skipIf( - sys.platform == "darwin" or True, - "C++ models are broken on OS X at the moment, and there's a BC breakage on master; " - "see https://github.com/pytorch/vision/issues/1191") -class Tester(unittest.TestCase): - pretrained = False - image = read_image1() - - def test_alexnet(self): - process_model(models.alexnet(self.pretrained), self.image, _C_tests.forward_alexnet, 'Alexnet') - - def test_vgg11(self): - process_model(models.vgg11(self.pretrained), self.image, _C_tests.forward_vgg11, 'VGG11') - - def test_vgg13(self): - process_model(models.vgg13(self.pretrained), self.image, _C_tests.forward_vgg13, 'VGG13') - - def test_vgg16(self): - process_model(models.vgg16(self.pretrained), self.image, _C_tests.forward_vgg16, 'VGG16') - - def test_vgg19(self): - process_model(models.vgg19(self.pretrained), self.image, _C_tests.forward_vgg19, 'VGG19') - - def test_vgg11_bn(self): - process_model(models.vgg11_bn(self.pretrained), self.image, _C_tests.forward_vgg11bn, 'VGG11BN') - - def test_vgg13_bn(self): - process_model(models.vgg13_bn(self.pretrained), self.image, _C_tests.forward_vgg13bn, 'VGG13BN') - - def test_vgg16_bn(self): - process_model(models.vgg16_bn(self.pretrained), self.image, _C_tests.forward_vgg16bn, 'VGG16BN') - - def test_vgg19_bn(self): - process_model(models.vgg19_bn(self.pretrained), self.image, _C_tests.forward_vgg19bn, 'VGG19BN') - - def test_resnet18(self): - process_model(models.resnet18(self.pretrained), self.image, _C_tests.forward_resnet18, 'Resnet18') - - def test_resnet34(self): - process_model(models.resnet34(self.pretrained), self.image, _C_tests.forward_resnet34, 'Resnet34') - - def test_resnet50(self): - process_model(models.resnet50(self.pretrained), self.image, _C_tests.forward_resnet50, 'Resnet50') - - def test_resnet101(self): - process_model(models.resnet101(self.pretrained), self.image, _C_tests.forward_resnet101, 'Resnet101') - - def test_resnet152(self): - process_model(models.resnet152(self.pretrained), self.image, _C_tests.forward_resnet152, 'Resnet152') - - def test_resnext50_32x4d(self): - process_model(models.resnext50_32x4d(), self.image, _C_tests.forward_resnext50_32x4d, 'ResNext50_32x4d') - - def test_resnext101_32x8d(self): - process_model(models.resnext101_32x8d(), self.image, _C_tests.forward_resnext101_32x8d, 'ResNext101_32x8d') - - def test_wide_resnet50_2(self): - process_model(models.wide_resnet50_2(), self.image, _C_tests.forward_wide_resnet50_2, 'WideResNet50_2') - - def test_wide_resnet101_2(self): - process_model(models.wide_resnet101_2(), self.image, _C_tests.forward_wide_resnet101_2, 'WideResNet101_2') - - def test_squeezenet1_0(self): - process_model(models.squeezenet1_0(self.pretrained), self.image, - _C_tests.forward_squeezenet1_0, 'Squeezenet1.0') - - def test_squeezenet1_1(self): - process_model(models.squeezenet1_1(self.pretrained), self.image, - _C_tests.forward_squeezenet1_1, 'Squeezenet1.1') - - def test_densenet121(self): - process_model(models.densenet121(self.pretrained), self.image, _C_tests.forward_densenet121, 'Densenet121') - - def test_densenet169(self): - process_model(models.densenet169(self.pretrained), self.image, _C_tests.forward_densenet169, 'Densenet169') - - def test_densenet201(self): - process_model(models.densenet201(self.pretrained), self.image, _C_tests.forward_densenet201, 'Densenet201') - - def test_densenet161(self): - process_model(models.densenet161(self.pretrained), self.image, _C_tests.forward_densenet161, 'Densenet161') - - def test_mobilenet_v2(self): - process_model(models.mobilenet_v2(self.pretrained), self.image, _C_tests.forward_mobilenetv2, 'MobileNet') - - def test_googlenet(self): - process_model(models.googlenet(self.pretrained), self.image, _C_tests.forward_googlenet, 'GoogLeNet') - - def test_mnasnet0_5(self): - process_model(models.mnasnet0_5(self.pretrained), self.image, _C_tests.forward_mnasnet0_5, 'MNASNet0_5') - - def test_mnasnet0_75(self): - process_model(models.mnasnet0_75(self.pretrained), self.image, _C_tests.forward_mnasnet0_75, 'MNASNet0_75') - - def test_mnasnet1_0(self): - process_model(models.mnasnet1_0(self.pretrained), self.image, _C_tests.forward_mnasnet1_0, 'MNASNet1_0') - - def test_mnasnet1_3(self): - process_model(models.mnasnet1_3(self.pretrained), self.image, _C_tests.forward_mnasnet1_3, 'MNASNet1_3') - - def test_inception_v3(self): - self.image = read_image2() - process_model(models.inception_v3(self.pretrained), self.image, _C_tests.forward_inceptionv3, 'Inceptionv3') - - -if __name__ == '__main__': - unittest.main() diff --git a/test/test_datasets.py b/test/test_datasets.py index 2410f18de09..7e91571744a 100644 --- a/test/test_datasets.py +++ b/test/test_datasets.py @@ -1,215 +1,3549 @@ -import sys +import bz2 +import contextlib +import csv +import io +import itertools +import json import os +import pathlib +import pickle +import random +import re +import shutil +import string import unittest -import mock +import xml.etree.ElementTree as ET +import zipfile +from typing import Callable, Tuple, Union + +import datasets_utils import numpy as np import PIL -from PIL import Image -from torch._utils_internal import get_file_path_2 -import torchvision -from common_utils import get_tmp_dir -from fakedata_generation import mnist_root, cifar_root, imagenet_root, \ - cityscapes_root, svhn_root - - -try: - import scipy - HAS_SCIPY = True -except ImportError: - HAS_SCIPY = False - - -class Tester(unittest.TestCase): - def generic_classification_dataset_test(self, dataset, num_images=1): - self.assertEqual(len(dataset), num_images) - img, target = dataset[0] - self.assertTrue(isinstance(img, PIL.Image.Image)) - self.assertTrue(isinstance(target, int)) - - def generic_segmentation_dataset_test(self, dataset, num_images=1): - self.assertEqual(len(dataset), num_images) - img, target = dataset[0] - self.assertTrue(isinstance(img, PIL.Image.Image)) - self.assertTrue(isinstance(target, PIL.Image.Image)) - - def test_imagefolder(self): - # TODO: create the fake data on-the-fly - FAKEDATA_DIR = get_file_path_2( - os.path.dirname(os.path.abspath(__file__)), 'assets', 'fakedata') - - with get_tmp_dir(src=os.path.join(FAKEDATA_DIR, 'imagefolder')) as root: - classes = sorted(['a', 'b']) - class_a_image_files = [os.path.join(root, 'a', file) - for file in ('a1.png', 'a2.png', 'a3.png')] - class_b_image_files = [os.path.join(root, 'b', file) - for file in ('b1.png', 'b2.png', 'b3.png', 'b4.png')] - dataset = torchvision.datasets.ImageFolder(root, loader=lambda x: x) - - # test if all classes are present - self.assertEqual(classes, sorted(dataset.classes)) - - # test if combination of classes and class_to_index functions correctly - for cls in classes: - self.assertEqual(cls, dataset.classes[dataset.class_to_idx[cls]]) - - # test if all images were detected correctly - class_a_idx = dataset.class_to_idx['a'] - class_b_idx = dataset.class_to_idx['b'] - imgs_a = [(img_file, class_a_idx) for img_file in class_a_image_files] - imgs_b = [(img_file, class_b_idx) for img_file in class_b_image_files] - imgs = sorted(imgs_a + imgs_b) - self.assertEqual(imgs, dataset.imgs) - - # test if the datasets outputs all images correctly - outputs = sorted([dataset[i] for i in range(len(dataset))]) - self.assertEqual(imgs, outputs) - - # redo all tests with specified valid image files - dataset = torchvision.datasets.ImageFolder(root, loader=lambda x: x, - is_valid_file=lambda x: '3' in x) - self.assertEqual(classes, sorted(dataset.classes)) - - class_a_idx = dataset.class_to_idx['a'] - class_b_idx = dataset.class_to_idx['b'] - imgs_a = [(img_file, class_a_idx) for img_file in class_a_image_files - if '3' in img_file] - imgs_b = [(img_file, class_b_idx) for img_file in class_b_image_files - if '3' in img_file] - imgs = sorted(imgs_a + imgs_b) - self.assertEqual(imgs, dataset.imgs) - - outputs = sorted([dataset[i] for i in range(len(dataset))]) - self.assertEqual(imgs, outputs) - - @mock.patch('torchvision.datasets.mnist.download_and_extract_archive') - def test_mnist(self, mock_download_extract): - num_examples = 30 - with mnist_root(num_examples, "MNIST") as root: - dataset = torchvision.datasets.MNIST(root, download=True) - self.generic_classification_dataset_test(dataset, num_images=num_examples) - img, target = dataset[0] - self.assertEqual(dataset.class_to_idx[dataset.classes[0]], target) - - @mock.patch('torchvision.datasets.mnist.download_and_extract_archive') - def test_kmnist(self, mock_download_extract): - num_examples = 30 - with mnist_root(num_examples, "KMNIST") as root: - dataset = torchvision.datasets.KMNIST(root, download=True) - self.generic_classification_dataset_test(dataset, num_images=num_examples) - img, target = dataset[0] - self.assertEqual(dataset.class_to_idx[dataset.classes[0]], target) - - @mock.patch('torchvision.datasets.mnist.download_and_extract_archive') - def test_fashionmnist(self, mock_download_extract): - num_examples = 30 - with mnist_root(num_examples, "FashionMNIST") as root: - dataset = torchvision.datasets.FashionMNIST(root, download=True) - self.generic_classification_dataset_test(dataset, num_images=num_examples) - img, target = dataset[0] - self.assertEqual(dataset.class_to_idx[dataset.classes[0]], target) - - @mock.patch('torchvision.datasets.imagenet._verify_archive') - @unittest.skipIf(not HAS_SCIPY, "scipy unavailable") - def test_imagenet(self, mock_verify): - with imagenet_root() as root: - dataset = torchvision.datasets.ImageNet(root, split='train') - self.generic_classification_dataset_test(dataset) - - dataset = torchvision.datasets.ImageNet(root, split='val') - self.generic_classification_dataset_test(dataset) - - @mock.patch('torchvision.datasets.cifar.check_integrity') - @mock.patch('torchvision.datasets.cifar.CIFAR10._check_integrity') - def test_cifar10(self, mock_ext_check, mock_int_check): - mock_ext_check.return_value = True - mock_int_check.return_value = True - with cifar_root('CIFAR10') as root: - dataset = torchvision.datasets.CIFAR10(root, train=True, download=True) - self.generic_classification_dataset_test(dataset, num_images=5) - img, target = dataset[0] - self.assertEqual(dataset.class_to_idx[dataset.classes[0]], target) - - dataset = torchvision.datasets.CIFAR10(root, train=False, download=True) - self.generic_classification_dataset_test(dataset) - img, target = dataset[0] - self.assertEqual(dataset.class_to_idx[dataset.classes[0]], target) - - @mock.patch('torchvision.datasets.cifar.check_integrity') - @mock.patch('torchvision.datasets.cifar.CIFAR10._check_integrity') - def test_cifar100(self, mock_ext_check, mock_int_check): - mock_ext_check.return_value = True - mock_int_check.return_value = True - with cifar_root('CIFAR100') as root: - dataset = torchvision.datasets.CIFAR100(root, train=True, download=True) - self.generic_classification_dataset_test(dataset) - img, target = dataset[0] - self.assertEqual(dataset.class_to_idx[dataset.classes[0]], target) - - dataset = torchvision.datasets.CIFAR100(root, train=False, download=True) - self.generic_classification_dataset_test(dataset) - img, target = dataset[0] - self.assertEqual(dataset.class_to_idx[dataset.classes[0]], target) - - @unittest.skipIf('win' in sys.platform, 'temporarily disabled on Windows') - def test_cityscapes(self): - with cityscapes_root() as root: - - for mode in ['coarse', 'fine']: - - if mode == 'coarse': - splits = ['train', 'train_extra', 'val'] - else: - splits = ['train', 'val', 'test'] - - for split in splits: - for target_type in ['semantic', 'instance']: - dataset = torchvision.datasets.Cityscapes(root, split=split, - target_type=target_type, mode=mode) - self.generic_segmentation_dataset_test(dataset, num_images=2) - - color_dataset = torchvision.datasets.Cityscapes(root, split=split, - target_type='color', mode=mode) - color_img, color_target = color_dataset[0] - self.assertTrue(isinstance(color_img, PIL.Image.Image)) - self.assertTrue(np.array(color_target).shape[2] == 4) - - polygon_dataset = torchvision.datasets.Cityscapes(root, split=split, - target_type='polygon', mode=mode) - polygon_img, polygon_target = polygon_dataset[0] - self.assertTrue(isinstance(polygon_img, PIL.Image.Image)) - self.assertTrue(isinstance(polygon_target, dict)) - self.assertTrue(isinstance(polygon_target['imgHeight'], int)) - self.assertTrue(isinstance(polygon_target['objects'], list)) - - # Test multiple target types - targets_combo = ['semantic', 'polygon', 'color'] - multiple_types_dataset = torchvision.datasets.Cityscapes(root, split=split, - target_type=targets_combo, - mode=mode) - output = multiple_types_dataset[0] - self.assertTrue(isinstance(output, tuple)) - self.assertTrue(len(output) == 2) - self.assertTrue(isinstance(output[0], PIL.Image.Image)) - self.assertTrue(isinstance(output[1], tuple)) - self.assertTrue(len(output[1]) == 3) - self.assertTrue(isinstance(output[1][0], PIL.Image.Image)) # semantic - self.assertTrue(isinstance(output[1][1], dict)) # polygon - self.assertTrue(isinstance(output[1][2], PIL.Image.Image)) # color - - @mock.patch('torchvision.datasets.SVHN._check_integrity') - @unittest.skipIf(not HAS_SCIPY, "scipy unavailable") - def test_svhn(self, mock_check): - mock_check.return_value = True - with svhn_root() as root: - dataset = torchvision.datasets.SVHN(root, split="train") - self.generic_classification_dataset_test(dataset, num_images=2) - - dataset = torchvision.datasets.SVHN(root, split="test") - self.generic_classification_dataset_test(dataset, num_images=2) - - dataset = torchvision.datasets.SVHN(root, split="extra") - self.generic_classification_dataset_test(dataset, num_images=2) - - -if __name__ == '__main__': +import pytest +import torch +import torch.nn.functional as F +from common_utils import combinations_grid +from torchvision import datasets +from torchvision.transforms import v2 + + +class STL10TestCase(datasets_utils.ImageDatasetTestCase): + DATASET_CLASS = datasets.STL10 + ADDITIONAL_CONFIGS = combinations_grid(split=("train", "test", "unlabeled", "train+unlabeled")) + + @staticmethod + def _make_binary_file(num_elements, root, name): + file_name = os.path.join(root, name) + np.zeros(num_elements, dtype=np.uint8).tofile(file_name) + + @staticmethod + def _make_image_file(num_images, root, name, num_channels=3, height=96, width=96): + STL10TestCase._make_binary_file(num_images * num_channels * height * width, root, name) + + @staticmethod + def _make_label_file(num_images, root, name): + STL10TestCase._make_binary_file(num_images, root, name) + + @staticmethod + def _make_class_names_file(root, name="class_names.txt"): + with open(os.path.join(root, name), "w") as fh: + for cname in ("airplane", "bird"): + fh.write(f"{cname}\n") + + @staticmethod + def _make_fold_indices_file(root): + num_folds = 10 + offset = 0 + with open(os.path.join(root, "fold_indices.txt"), "w") as fh: + for fold in range(num_folds): + line = " ".join([str(idx) for idx in range(offset, offset + fold + 1)]) + fh.write(f"{line}\n") + offset += fold + 1 + + return tuple(range(1, num_folds + 1)) + + @staticmethod + def _make_train_files(root, num_unlabeled_images=1): + num_images_in_fold = STL10TestCase._make_fold_indices_file(root) + num_train_images = sum(num_images_in_fold) + + STL10TestCase._make_image_file(num_train_images, root, "train_X.bin") + STL10TestCase._make_label_file(num_train_images, root, "train_y.bin") + STL10TestCase._make_image_file(1, root, "unlabeled_X.bin") + + return dict(train=num_train_images, unlabeled=num_unlabeled_images) + + @staticmethod + def _make_test_files(root, num_images=2): + STL10TestCase._make_image_file(num_images, root, "test_X.bin") + STL10TestCase._make_label_file(num_images, root, "test_y.bin") + + return dict(test=num_images) + + def inject_fake_data(self, tmpdir, config): + root_folder = os.path.join(tmpdir, "stl10_binary") + os.mkdir(root_folder) + + num_images_in_split = self._make_train_files(root_folder) + num_images_in_split.update(self._make_test_files(root_folder)) + self._make_class_names_file(root_folder) + + return sum(num_images_in_split[part] for part in config["split"].split("+")) + + def test_folds(self): + for fold in range(10): + with self.create_dataset(split="train", folds=fold) as (dataset, _): + assert len(dataset) == fold + 1 + + def test_unlabeled(self): + with self.create_dataset(split="unlabeled") as (dataset, _): + labels = [dataset[idx][1] for idx in range(len(dataset))] + assert all(label == -1 for label in labels) + + def test_invalid_folds1(self): + with pytest.raises(ValueError): + with self.create_dataset(folds=10): + pass + + def test_invalid_folds2(self): + with pytest.raises(ValueError): + with self.create_dataset(folds="0"): + pass + + +class Caltech101TestCase(datasets_utils.ImageDatasetTestCase): + DATASET_CLASS = datasets.Caltech101 + FEATURE_TYPES = (PIL.Image.Image, (int, np.ndarray, tuple)) + + ADDITIONAL_CONFIGS = combinations_grid(target_type=("category", "annotation", ["category", "annotation"])) + REQUIRED_PACKAGES = ("scipy",) + + def inject_fake_data(self, tmpdir, config): + root = pathlib.Path(tmpdir) / "caltech101" + images = root / "101_ObjectCategories" + annotations = root / "Annotations" + + categories = (("Faces", "Faces_2"), ("helicopter", "helicopter"), ("ying_yang", "ying_yang")) + num_images_per_category = 2 + + for image_category, annotation_category in categories: + datasets_utils.create_image_folder( + root=images, + name=image_category, + file_name_fn=lambda idx: f"image_{idx + 1:04d}.jpg", + num_examples=num_images_per_category, + ) + self._create_annotation_folder( + root=annotations, + name=annotation_category, + file_name_fn=lambda idx: f"annotation_{idx + 1:04d}.mat", + num_examples=num_images_per_category, + ) + + # This is included in the original archive, but is removed by the dataset. Thus, an empty directory suffices. + os.makedirs(images / "BACKGROUND_Google") + + return num_images_per_category * len(categories) + + def _create_annotation_folder(self, root, name, file_name_fn, num_examples): + root = pathlib.Path(root) / name + os.makedirs(root) + + for idx in range(num_examples): + self._create_annotation_file(root, file_name_fn(idx)) + + def _create_annotation_file(self, root, name): + mdict = dict(obj_contour=torch.rand((2, torch.randint(3, 6, size=())), dtype=torch.float64).numpy()) + datasets_utils.lazy_importer.scipy.io.savemat(str(pathlib.Path(root) / name), mdict) + + def test_combined_targets(self): + target_types = ["category", "annotation"] + + individual_targets = [] + for target_type in target_types: + with self.create_dataset(target_type=target_type) as (dataset, _): + _, target = dataset[0] + individual_targets.append(target) + + with self.create_dataset(target_type=target_types) as (dataset, _): + _, combined_targets = dataset[0] + + actual = len(individual_targets) + expected = len(combined_targets) + assert ( + actual == expected + ), "The number of the returned combined targets does not match the the number targets if requested " + f"individually: {actual} != {expected}", + + for target_type, combined_target, individual_target in zip(target_types, combined_targets, individual_targets): + with self.subTest(target_type=target_type): + actual = type(combined_target) + expected = type(individual_target) + assert ( + actual is expected + ), "Type of the combined target does not match the type of the corresponding individual target: " + f"{actual} is not {expected}", + + def test_transforms_v2_wrapper_spawn(self): + expected_size = (123, 321) + with self.create_dataset(target_type="category", transform=v2.Resize(size=expected_size)) as (dataset, _): + datasets_utils.check_transforms_v2_wrapper_spawn(dataset, expected_size=expected_size) + + +class Caltech256TestCase(datasets_utils.ImageDatasetTestCase): + DATASET_CLASS = datasets.Caltech256 + + def inject_fake_data(self, tmpdir, config): + tmpdir = pathlib.Path(tmpdir) / "caltech256" / "256_ObjectCategories" + + categories = ((1, "ak47"), (2, "american-flag"), (3, "backpack")) + num_images_per_category = 2 + + for idx, category in categories: + datasets_utils.create_image_folder( + tmpdir, + name=f"{idx:03d}.{category}", + file_name_fn=lambda image_idx: f"{idx:03d}_{image_idx + 1:04d}.jpg", + num_examples=num_images_per_category, + ) + + return num_images_per_category * len(categories) + + +class WIDERFaceTestCase(datasets_utils.ImageDatasetTestCase): + DATASET_CLASS = datasets.WIDERFace + FEATURE_TYPES = (PIL.Image.Image, (dict, type(None))) # test split returns None as target + ADDITIONAL_CONFIGS = combinations_grid(split=("train", "val", "test")) + + def inject_fake_data(self, tmpdir, config): + widerface_dir = pathlib.Path(tmpdir) / "widerface" + annotations_dir = widerface_dir / "wider_face_split" + os.makedirs(annotations_dir) + + split_to_idx = split_to_num_examples = { + "train": 1, + "val": 2, + "test": 3, + } + + # We need to create all folders regardless of the split in config + for split in ("train", "val", "test"): + split_idx = split_to_idx[split] + num_examples = split_to_num_examples[split] + + datasets_utils.create_image_folder( + root=tmpdir, + name=widerface_dir / f"WIDER_{split}" / "images" / "0--Parade", + file_name_fn=lambda image_idx: f"0_Parade_marchingband_1_{split_idx + image_idx}.jpg", + num_examples=num_examples, + ) + + annotation_file_name = { + "train": annotations_dir / "wider_face_train_bbx_gt.txt", + "val": annotations_dir / "wider_face_val_bbx_gt.txt", + "test": annotations_dir / "wider_face_test_filelist.txt", + }[split] + + annotation_content = { + "train": "".join( + f"0--Parade/0_Parade_marchingband_1_{split_idx + image_idx}.jpg\n1\n449 330 122 149 0 0 0 0 0 0\n" + for image_idx in range(num_examples) + ), + "val": "".join( + f"0--Parade/0_Parade_marchingband_1_{split_idx + image_idx}.jpg\n1\n501 160 285 443 0 0 0 0 0 0\n" + for image_idx in range(num_examples) + ), + "test": "".join( + f"0--Parade/0_Parade_marchingband_1_{split_idx + image_idx}.jpg\n" + for image_idx in range(num_examples) + ), + }[split] + + with open(annotation_file_name, "w") as annotation_file: + annotation_file.write(annotation_content) + + return split_to_num_examples[config["split"]] + + def test_transforms_v2_wrapper_spawn(self): + expected_size = (123, 321) + with self.create_dataset(transform=v2.Resize(size=expected_size)) as (dataset, _): + datasets_utils.check_transforms_v2_wrapper_spawn(dataset, expected_size=expected_size) + + +class CityScapesTestCase(datasets_utils.ImageDatasetTestCase): + DATASET_CLASS = datasets.Cityscapes + TARGET_TYPES = ( + "instance", + "semantic", + "polygon", + "color", + ) + ADDITIONAL_CONFIGS = ( + *combinations_grid(mode=("fine",), split=("train", "test", "val"), target_type=TARGET_TYPES), + *combinations_grid( + mode=("coarse",), + split=("train", "train_extra", "val"), + target_type=TARGET_TYPES, + ), + ) + FEATURE_TYPES = (PIL.Image.Image, (dict, PIL.Image.Image)) + + def inject_fake_data(self, tmpdir, config): + + tmpdir = pathlib.Path(tmpdir) + + mode_to_splits = { + "Coarse": ["train", "train_extra", "val"], + "Fine": ["train", "test", "val"], + } + + if config["split"] == "train": # just for coverage of the number of samples + cities = ["bochum", "bremen"] + else: + cities = ["bochum"] + + polygon_target = { + "imgHeight": 1024, + "imgWidth": 2048, + "objects": [ + { + "label": "sky", + "polygon": [ + [1241, 0], + [1234, 156], + [1478, 197], + [1611, 172], + [1606, 0], + ], + }, + { + "label": "road", + "polygon": [ + [0, 448], + [1331, 274], + [1473, 265], + [2047, 605], + [2047, 1023], + [0, 1023], + ], + }, + ], + } + + for mode in ["Coarse", "Fine"]: + gt_dir = tmpdir / f"gt{mode}" + for split in mode_to_splits[mode]: + for city in cities: + + def make_image(name, size=10): + datasets_utils.create_image_folder( + root=gt_dir / split, + name=city, + file_name_fn=lambda _: name, + size=size, + num_examples=1, + ) + + make_image(f"{city}_000000_000000_gt{mode}_instanceIds.png") + make_image(f"{city}_000000_000000_gt{mode}_labelIds.png") + make_image(f"{city}_000000_000000_gt{mode}_color.png", size=(4, 10, 10)) + + polygon_target_name = gt_dir / split / city / f"{city}_000000_000000_gt{mode}_polygons.json" + with open(polygon_target_name, "w") as outfile: + json.dump(polygon_target, outfile) + + # Create leftImg8bit folder + for split in ["test", "train_extra", "train", "val"]: + for city in cities: + datasets_utils.create_image_folder( + root=tmpdir / "leftImg8bit" / split, + name=city, + file_name_fn=lambda _: f"{city}_000000_000000_leftImg8bit.png", + num_examples=1, + ) + + info = {"num_examples": len(cities)} + if config["target_type"] == "polygon": + info["expected_polygon_target"] = polygon_target + return info + + def test_combined_targets(self): + target_types = ["semantic", "polygon", "color"] + + with self.create_dataset(target_type=target_types) as (dataset, _): + output = dataset[0] + assert isinstance(output, tuple) + assert len(output) == 2 + assert isinstance(output[0], PIL.Image.Image) + assert isinstance(output[1], tuple) + assert len(output[1]) == 3 + assert isinstance(output[1][0], PIL.Image.Image) # semantic + assert isinstance(output[1][1], dict) # polygon + assert isinstance(output[1][2], PIL.Image.Image) # color + + def test_feature_types_target_color(self): + with self.create_dataset(target_type="color") as (dataset, _): + color_img, color_target = dataset[0] + assert isinstance(color_img, PIL.Image.Image) + assert np.array(color_target).shape[2] == 4 + + def test_feature_types_target_polygon(self): + with self.create_dataset(target_type="polygon") as (dataset, info): + polygon_img, polygon_target = dataset[0] + assert isinstance(polygon_img, PIL.Image.Image) + (polygon_target, info["expected_polygon_target"]) + + def test_transforms_v2_wrapper_spawn(self): + expected_size = (123, 321) + for target_type in ["instance", "semantic", ["instance", "semantic"]]: + with self.create_dataset(target_type=target_type, transform=v2.Resize(size=expected_size)) as (dataset, _): + datasets_utils.check_transforms_v2_wrapper_spawn(dataset, expected_size=expected_size) + + +class ImageNetTestCase(datasets_utils.ImageDatasetTestCase): + DATASET_CLASS = datasets.ImageNet + REQUIRED_PACKAGES = ("scipy",) + ADDITIONAL_CONFIGS = combinations_grid(split=("train", "val")) + + def inject_fake_data(self, tmpdir, config): + tmpdir = pathlib.Path(tmpdir) + + wnid = "n01234567" + if config["split"] == "train": + num_examples = 3 + datasets_utils.create_image_folder( + root=tmpdir, + name=tmpdir / "train" / wnid / wnid, + file_name_fn=lambda image_idx: f"{wnid}_{image_idx}.JPEG", + num_examples=num_examples, + ) + else: + num_examples = 1 + datasets_utils.create_image_folder( + root=tmpdir, + name=tmpdir / "val" / wnid, + file_name_fn=lambda image_ifx: "ILSVRC2012_val_0000000{image_idx}.JPEG", + num_examples=num_examples, + ) + + wnid_to_classes = {wnid: [1]} + torch.save((wnid_to_classes, None), tmpdir / "meta.bin") + return num_examples + + def test_transforms_v2_wrapper_spawn(self): + expected_size = (123, 321) + with self.create_dataset(transform=v2.Resize(size=expected_size)) as (dataset, _): + datasets_utils.check_transforms_v2_wrapper_spawn(dataset, expected_size=expected_size) + + +class CIFAR10TestCase(datasets_utils.ImageDatasetTestCase): + DATASET_CLASS = datasets.CIFAR10 + ADDITIONAL_CONFIGS = combinations_grid(train=(True, False)) + + _VERSION_CONFIG = dict( + base_folder="cifar-10-batches-py", + train_files=tuple(f"data_batch_{idx}" for idx in range(1, 6)), + test_files=("test_batch",), + labels_key="labels", + meta_file="batches.meta", + num_categories=10, + categories_key="label_names", + ) + + def inject_fake_data(self, tmpdir, config): + tmpdir = pathlib.Path(tmpdir) / self._VERSION_CONFIG["base_folder"] + os.makedirs(tmpdir) + + num_images_per_file = 1 + for name in itertools.chain(self._VERSION_CONFIG["train_files"], self._VERSION_CONFIG["test_files"]): + self._create_batch_file(tmpdir, name, num_images_per_file) + + categories = self._create_meta_file(tmpdir) + + return dict( + num_examples=num_images_per_file + * len(self._VERSION_CONFIG["train_files"] if config["train"] else self._VERSION_CONFIG["test_files"]), + categories=categories, + ) + + def _create_batch_file(self, root, name, num_images): + np_rng = np.random.RandomState(0) + data = datasets_utils.create_image_or_video_tensor((num_images, 32 * 32 * 3)) + labels = np_rng.randint(0, self._VERSION_CONFIG["num_categories"], size=num_images).tolist() + self._create_binary_file(root, name, {"data": data, self._VERSION_CONFIG["labels_key"]: labels}) + + def _create_meta_file(self, root): + categories = [ + f"{idx:0{len(str(self._VERSION_CONFIG['num_categories'] - 1))}d}" + for idx in range(self._VERSION_CONFIG["num_categories"]) + ] + self._create_binary_file( + root, self._VERSION_CONFIG["meta_file"], {self._VERSION_CONFIG["categories_key"]: categories} + ) + return categories + + def _create_binary_file(self, root, name, content): + with open(pathlib.Path(root) / name, "wb") as fh: + pickle.dump(content, fh) + + def test_class_to_idx(self): + with self.create_dataset() as (dataset, info): + expected = {category: label for label, category in enumerate(info["categories"])} + actual = dataset.class_to_idx + assert actual == expected + + +class CIFAR100(CIFAR10TestCase): + DATASET_CLASS = datasets.CIFAR100 + + _VERSION_CONFIG = dict( + base_folder="cifar-100-python", + train_files=("train",), + test_files=("test",), + labels_key="fine_labels", + meta_file="meta", + num_categories=100, + categories_key="fine_label_names", + ) + + +class CelebATestCase(datasets_utils.ImageDatasetTestCase): + DATASET_CLASS = datasets.CelebA + FEATURE_TYPES = (PIL.Image.Image, (torch.Tensor, int, tuple, type(None))) + + ADDITIONAL_CONFIGS = combinations_grid( + split=("train", "valid", "test", "all"), + target_type=("attr", "identity", "bbox", "landmarks", ["attr", "identity"]), + ) + + _SPLIT_TO_IDX = dict(train=0, valid=1, test=2) + + def inject_fake_data(self, tmpdir, config): + base_folder = pathlib.Path(tmpdir) / "celeba" + os.makedirs(base_folder) + + num_images, num_images_per_split = self._create_split_txt(base_folder) + + datasets_utils.create_image_folder( + base_folder, "img_align_celeba", lambda idx: f"{idx + 1:06d}.jpg", num_images + ) + attr_names = self._create_attr_txt(base_folder, num_images) + self._create_identity_txt(base_folder, num_images) + self._create_bbox_txt(base_folder, num_images) + self._create_landmarks_txt(base_folder, num_images) + + return dict(num_examples=num_images_per_split[config["split"]], attr_names=attr_names) + + def _create_split_txt(self, root): + num_images_per_split = dict(train=4, valid=3, test=2) + + data = [ + [self._SPLIT_TO_IDX[split]] for split, num_images in num_images_per_split.items() for _ in range(num_images) + ] + self._create_txt(root, "list_eval_partition.txt", data) + + num_images_per_split["all"] = num_images = sum(num_images_per_split.values()) + return num_images, num_images_per_split + + def _create_attr_txt(self, root, num_images): + header = ("5_o_Clock_Shadow", "Young") + data = torch.rand((num_images, len(header))).ge(0.5).int().mul(2).sub(1).tolist() + self._create_txt(root, "list_attr_celeba.txt", data, header=header, add_num_examples=True) + return header + + def _create_identity_txt(self, root, num_images): + data = torch.randint(1, 4, size=(num_images, 1)).tolist() + self._create_txt(root, "identity_CelebA.txt", data) + + def _create_bbox_txt(self, root, num_images): + header = ("x_1", "y_1", "width", "height") + data = torch.randint(10, size=(num_images, len(header))).tolist() + self._create_txt( + root, "list_bbox_celeba.txt", data, header=header, add_num_examples=True, add_image_id_to_header=True + ) + + def _create_landmarks_txt(self, root, num_images): + header = ("lefteye_x", "rightmouth_y") + data = torch.randint(10, size=(num_images, len(header))).tolist() + self._create_txt(root, "list_landmarks_align_celeba.txt", data, header=header, add_num_examples=True) + + def _create_txt(self, root, name, data, header=None, add_num_examples=False, add_image_id_to_header=False): + with open(pathlib.Path(root) / name, "w") as fh: + if add_num_examples: + fh.write(f"{len(data)}\n") + + if header: + if add_image_id_to_header: + header = ("image_id", *header) + fh.write(f"{' '.join(header)}\n") + + for idx, line in enumerate(data, 1): + fh.write(f"{' '.join((f'{idx:06d}.jpg', *[str(value) for value in line]))}\n") + + def test_combined_targets(self): + target_types = ["attr", "identity", "bbox", "landmarks"] + + individual_targets = [] + for target_type in target_types: + with self.create_dataset(target_type=target_type) as (dataset, _): + _, target = dataset[0] + individual_targets.append(target) + + with self.create_dataset(target_type=target_types) as (dataset, _): + _, combined_targets = dataset[0] + + actual = len(individual_targets) + expected = len(combined_targets) + assert ( + actual == expected + ), "The number of the returned combined targets does not match the the number targets if requested " + f"individually: {actual} != {expected}", + + for target_type, combined_target, individual_target in zip(target_types, combined_targets, individual_targets): + with self.subTest(target_type=target_type): + actual = type(combined_target) + expected = type(individual_target) + assert ( + actual is expected + ), "Type of the combined target does not match the type of the corresponding individual target: " + f"{actual} is not {expected}", + + def test_no_target(self): + with self.create_dataset(target_type=[]) as (dataset, _): + _, target = dataset[0] + + assert target is None + + def test_attr_names(self): + with self.create_dataset() as (dataset, info): + assert tuple(dataset.attr_names) == info["attr_names"] + + def test_images_names_split(self): + with self.create_dataset(split="all") as (dataset, _): + all_imgs_names = set(dataset.filename) + + merged_imgs_names = set() + for split in ["train", "valid", "test"]: + with self.create_dataset(split=split) as (dataset, _): + merged_imgs_names.update(dataset.filename) + + assert merged_imgs_names == all_imgs_names + + def test_transforms_v2_wrapper_spawn(self): + expected_size = (123, 321) + for target_type in ["identity", "bbox", ["identity", "bbox"]]: + with self.create_dataset(target_type=target_type, transform=v2.Resize(size=expected_size)) as (dataset, _): + datasets_utils.check_transforms_v2_wrapper_spawn(dataset, expected_size=expected_size) + + +class VOCSegmentationTestCase(datasets_utils.ImageDatasetTestCase): + DATASET_CLASS = datasets.VOCSegmentation + FEATURE_TYPES = (PIL.Image.Image, PIL.Image.Image) + + ADDITIONAL_CONFIGS = ( + *combinations_grid(year=[f"20{year:02d}" for year in range(7, 13)], image_set=("train", "val", "trainval")), + dict(year="2007", image_set="test"), + ) + + def inject_fake_data(self, tmpdir, config): + year, is_test_set = config["year"], config["image_set"] == "test" + image_set = config["image_set"] + + base_dir = pathlib.Path(tmpdir) + if year == "2011": + base_dir /= "TrainVal" + base_dir = base_dir / "VOCdevkit" / f"VOC{year}" + os.makedirs(base_dir) + + num_images, num_images_per_image_set = self._create_image_set_files(base_dir, "ImageSets", is_test_set) + datasets_utils.create_image_folder(base_dir, "JPEGImages", lambda idx: f"{idx:06d}.jpg", num_images) + + datasets_utils.create_image_folder(base_dir, "SegmentationClass", lambda idx: f"{idx:06d}.png", num_images) + annotation = self._create_annotation_files(base_dir, "Annotations", num_images) + + return dict(num_examples=num_images_per_image_set[image_set], annotation=annotation) + + def _create_image_set_files(self, root, name, is_test_set): + root = pathlib.Path(root) / name + src = pathlib.Path(root) / "Main" + os.makedirs(src, exist_ok=True) + + idcs = dict(train=(0, 1, 2), val=(3, 4), test=(5,)) + idcs["trainval"] = (*idcs["train"], *idcs["val"]) + + for image_set in ("test",) if is_test_set else ("train", "val", "trainval"): + self._create_image_set_file(src, image_set, idcs[image_set]) + + shutil.copytree(src, root / "Segmentation") + + num_images = max(itertools.chain(*idcs.values())) + 1 + num_images_per_image_set = {image_set: len(idcs_) for image_set, idcs_ in idcs.items()} + return num_images, num_images_per_image_set + + def _create_image_set_file(self, root, image_set, idcs): + with open(pathlib.Path(root) / f"{image_set}.txt", "w") as fh: + fh.writelines([f"{idx:06d}\n" for idx in idcs]) + + def _create_annotation_files(self, root, name, num_images): + root = pathlib.Path(root) / name + os.makedirs(root) + + for idx in range(num_images): + annotation = self._create_annotation_file(root, f"{idx:06d}.xml") + + return annotation + + def _create_annotation_file(self, root, name): + def add_child(parent, name, text=None): + child = ET.SubElement(parent, name) + child.text = text + return child + + def add_name(obj, name="dog"): + add_child(obj, "name", name) + return name + + def add_bndbox(obj, bndbox=None): + if bndbox is None: + bndbox = {"xmin": "1", "xmax": "2", "ymin": "3", "ymax": "4"} + + obj = add_child(obj, "bndbox") + for name, text in bndbox.items(): + add_child(obj, name, text) + + return bndbox + + annotation = ET.Element("annotation") + obj = add_child(annotation, "object") + data = dict(name=add_name(obj), bndbox=add_bndbox(obj)) + + with open(pathlib.Path(root) / name, "wb") as fh: + fh.write(ET.tostring(annotation)) + + return data + + def test_transforms_v2_wrapper_spawn(self): + expected_size = (123, 321) + with self.create_dataset(transform=v2.Resize(size=expected_size)) as (dataset, _): + datasets_utils.check_transforms_v2_wrapper_spawn(dataset, expected_size=expected_size) + + +class VOCDetectionTestCase(VOCSegmentationTestCase): + DATASET_CLASS = datasets.VOCDetection + FEATURE_TYPES = (PIL.Image.Image, dict) + + def test_annotations(self): + with self.create_dataset() as (dataset, info): + _, target = dataset[0] + + assert "annotation" in target + annotation = target["annotation"] + + assert "object" in annotation + objects = annotation["object"] + + assert len(objects) == 1 + object = objects[0] + + assert object == info["annotation"] + + def test_transforms_v2_wrapper_spawn(self): + expected_size = (123, 321) + with self.create_dataset(transform=v2.Resize(size=expected_size)) as (dataset, _): + datasets_utils.check_transforms_v2_wrapper_spawn(dataset, expected_size=expected_size) + + +class CocoDetectionTestCase(datasets_utils.ImageDatasetTestCase): + DATASET_CLASS = datasets.CocoDetection + FEATURE_TYPES = (PIL.Image.Image, list) + + REQUIRED_PACKAGES = ("pycocotools",) + + _IMAGE_FOLDER = "images" + _ANNOTATIONS_FOLDER = "annotations" + _ANNOTATIONS_FILE = "annotations.json" + + def dataset_args(self, tmpdir, config): + tmpdir = pathlib.Path(tmpdir) + root = tmpdir / self._IMAGE_FOLDER + annotation_file = tmpdir / self._ANNOTATIONS_FOLDER / self._ANNOTATIONS_FILE + return root, annotation_file + + def inject_fake_data(self, tmpdir, config): + tmpdir = pathlib.Path(tmpdir) + + num_images = 3 + num_annotations_per_image = 2 + + files = datasets_utils.create_image_folder( + tmpdir, name=self._IMAGE_FOLDER, file_name_fn=lambda idx: f"{idx:012d}.jpg", num_examples=num_images + ) + file_names = [file.relative_to(tmpdir / self._IMAGE_FOLDER) for file in files] + + annotation_folder = tmpdir / self._ANNOTATIONS_FOLDER + os.makedirs(annotation_folder) + + segmentation_kind = config.pop("segmentation_kind", "list") + info = self._create_annotation_file( + annotation_folder, + self._ANNOTATIONS_FILE, + file_names, + num_annotations_per_image, + segmentation_kind=segmentation_kind, + ) + + info["num_examples"] = num_images + return info + + def _create_annotation_file(self, root, name, file_names, num_annotations_per_image, segmentation_kind="list"): + image_ids = [int(file_name.stem) for file_name in file_names] + images = [dict(file_name=str(file_name), id=id) for file_name, id in zip(file_names, image_ids)] + + annotations, info = self._create_annotations(image_ids, num_annotations_per_image, segmentation_kind) + self._create_json(root, name, dict(images=images, annotations=annotations)) + + return info + + def _create_annotations(self, image_ids, num_annotations_per_image, segmentation_kind="list"): + annotations = [] + annotion_id = 0 + + for image_id in itertools.islice(itertools.cycle(image_ids), len(image_ids) * num_annotations_per_image): + segmentation = { + "list": [torch.rand(8).tolist()], + "rle": {"size": [10, 10], "counts": [1]}, + "rle_encoded": {"size": [2400, 2400], "counts": "PQRQ2[1\\Y2f0gNVNRhMg2"}, + "bad": 123, + }[segmentation_kind] + + annotations.append( + dict( + image_id=image_id, + id=annotion_id, + bbox=torch.rand(4).tolist(), + segmentation=segmentation, + category_id=int(torch.randint(91, ())), + area=float(torch.rand(1)), + iscrowd=int(torch.randint(2, size=(1,))), + ) + ) + annotion_id += 1 + return annotations, dict() + + def _create_json(self, root, name, content): + file = pathlib.Path(root) / name + with open(file, "w") as fh: + json.dump(content, fh) + return file + + def test_transforms_v2_wrapper_spawn(self): + expected_size = (123, 321) + with self.create_dataset(transform=v2.Resize(size=expected_size)) as (dataset, _): + datasets_utils.check_transforms_v2_wrapper_spawn(dataset, expected_size=expected_size) + + def test_slice_error(self): + with self.create_dataset() as (dataset, _): + with pytest.raises(ValueError, match="Index must be of type integer"): + dataset[:2] + + def test_segmentation_kind(self): + if isinstance(self, CocoCaptionsTestCase): + return + + for segmentation_kind in ("list", "rle", "rle_encoded"): + config = {"segmentation_kind": segmentation_kind} + with self.create_dataset(config) as (dataset, _): + dataset = datasets.wrap_dataset_for_transforms_v2(dataset, target_keys="all") + list(dataset) + + config = {"segmentation_kind": "bad"} + with self.create_dataset(config) as (dataset, _): + dataset = datasets.wrap_dataset_for_transforms_v2(dataset, target_keys="all") + with pytest.raises(ValueError, match="COCO segmentation expected to be a dict or a list"): + list(dataset) + + +class CocoCaptionsTestCase(CocoDetectionTestCase): + DATASET_CLASS = datasets.CocoCaptions + + def _create_annotations(self, image_ids, num_annotations_per_image, segmentation_kind="list"): + captions = [str(idx) for idx in range(num_annotations_per_image)] + annotations = combinations_grid(image_id=image_ids, caption=captions) + for id, annotation in enumerate(annotations): + annotation["id"] = id + return annotations, dict(captions=captions) + + def test_captions(self): + with self.create_dataset() as (dataset, info): + _, captions = dataset[0] + assert tuple(captions) == tuple(info["captions"]) + + def test_transforms_v2_wrapper_spawn(self): + # We need to define this method, because otherwise the test from the super class will + # be run + pytest.skip("CocoCaptions is currently not supported by the v2 wrapper.") + + +class UCF101TestCase(datasets_utils.VideoDatasetTestCase): + DATASET_CLASS = datasets.UCF101 + + ADDITIONAL_CONFIGS = combinations_grid(fold=(1, 2, 3), train=(True, False)) + + _VIDEO_FOLDER = "videos" + _ANNOTATIONS_FOLDER = "annotations" + + def dataset_args(self, tmpdir, config): + tmpdir = pathlib.Path(tmpdir) + root = tmpdir / self._VIDEO_FOLDER + annotation_path = tmpdir / self._ANNOTATIONS_FOLDER + return root, annotation_path + + def inject_fake_data(self, tmpdir, config): + tmpdir = pathlib.Path(tmpdir) + + video_folder = tmpdir / self._VIDEO_FOLDER + os.makedirs(video_folder) + video_files = self._create_videos(video_folder) + + annotations_folder = tmpdir / self._ANNOTATIONS_FOLDER + os.makedirs(annotations_folder) + num_examples = self._create_annotation_files(annotations_folder, video_files, config["fold"], config["train"]) + + return num_examples + + def _create_videos(self, root, num_examples_per_class=3): + def file_name_fn(cls, idx, clips_per_group=2): + return f"v_{cls}_g{(idx // clips_per_group) + 1:02d}_c{(idx % clips_per_group) + 1:02d}.avi" + + video_files = [ + datasets_utils.create_video_folder(root, cls, lambda idx: file_name_fn(cls, idx), num_examples_per_class) + for cls in ("ApplyEyeMakeup", "YoYo") + ] + return [path.relative_to(root) for path in itertools.chain(*video_files)] + + def _create_annotation_files(self, root, video_files, fold, train): + current_videos = random.sample(video_files, random.randrange(1, len(video_files) - 1)) + current_annotation = self._annotation_file_name(fold, train) + self._create_annotation_file(root, current_annotation, current_videos) + + other_videos = set(video_files) - set(current_videos) + other_annotations = [ + self._annotation_file_name(fold, train) for fold, train in itertools.product((1, 2, 3), (True, False)) + ] + other_annotations.remove(current_annotation) + for name in other_annotations: + self._create_annotation_file(root, name, other_videos) + + return len(current_videos) + + def _annotation_file_name(self, fold, train): + return f"{'train' if train else 'test'}list{fold:02d}.txt" + + def _create_annotation_file(self, root, name, video_files): + with open(pathlib.Path(root) / name, "w") as fh: + fh.writelines(f"{str(file).replace(os.sep, '/')}\n" for file in sorted(video_files)) + + +class LSUNTestCase(datasets_utils.ImageDatasetTestCase): + DATASET_CLASS = datasets.LSUN + + REQUIRED_PACKAGES = ("lmdb",) + ADDITIONAL_CONFIGS = combinations_grid(classes=("train", "test", "val", ["bedroom_train", "church_outdoor_train"])) + + _CATEGORIES = ( + "bedroom", + "bridge", + "church_outdoor", + "classroom", + "conference_room", + "dining_room", + "kitchen", + "living_room", + "restaurant", + "tower", + ) + + def inject_fake_data(self, tmpdir, config): + root = pathlib.Path(tmpdir) + + num_images = 0 + for cls in self._parse_classes(config["classes"]): + num_images += self._create_lmdb(root, cls) + + return num_images + + @contextlib.contextmanager + def create_dataset(self, *args, **kwargs): + with super().create_dataset(*args, **kwargs) as output: + yield output + # Currently datasets.LSUN caches the keys in the current directory rather than in the root directory. Thus, + # this creates a number of _cache_* files in the current directory that will not be removed together + # with the temporary directory + for file in os.listdir(os.getcwd()): + if file.startswith("_cache_"): + try: + os.remove(file) + except FileNotFoundError: + # When the same test is run in parallel (in fb internal tests), a thread may remove another + # thread's file. We should be able to remove the try/except when + # https://github.com/pytorch/vision/issues/825 is fixed. + pass + + def _parse_classes(self, classes): + if not isinstance(classes, str): + return classes + + split = classes + if split == "test": + return [split] + + return [f"{category}_{split}" for category in self._CATEGORIES] + + def _create_lmdb(self, root, cls): + lmdb = datasets_utils.lazy_importer.lmdb + hexdigits_lowercase = string.digits + string.ascii_lowercase[:6] + + folder = f"{cls}_lmdb" + + num_images = torch.randint(1, 4, size=()).item() + format = "png" + files = datasets_utils.create_image_folder(root, folder, lambda idx: f"{idx}.{format}", num_images) + + with lmdb.open(str(root / folder)) as env, env.begin(write=True) as txn: + for file in files: + key = "".join(random.choice(hexdigits_lowercase) for _ in range(40)).encode() + + buffer = io.BytesIO() + PIL.Image.open(file).save(buffer, format) + buffer.seek(0) + value = buffer.read() + + txn.put(key, value) + + os.remove(file) + + return num_images + + def test_not_found_or_corrupted(self): + # LSUN does not raise built-in exception, but a custom one. It is expressive enough to not 'cast' it to + # RuntimeError or FileNotFoundError that are normally checked by this test. + with pytest.raises(datasets_utils.lazy_importer.lmdb.Error): + super().test_not_found_or_corrupted() + + +class KineticsTestCase(datasets_utils.VideoDatasetTestCase): + DATASET_CLASS = datasets.Kinetics + ADDITIONAL_CONFIGS = combinations_grid(split=("train", "val"), num_classes=("400", "600", "700")) + + def inject_fake_data(self, tmpdir, config): + classes = ("Abseiling", "Zumba") + num_videos_per_class = 2 + tmpdir = pathlib.Path(tmpdir) / config["split"] + digits = string.ascii_letters + string.digits + "-_" + for cls in classes: + datasets_utils.create_video_folder( + tmpdir, + cls, + lambda _: f"{datasets_utils.create_random_string(11, digits)}.mp4", + num_videos_per_class, + ) + return num_videos_per_class * len(classes) + + @pytest.mark.xfail(reason="FIXME") + def test_transforms_v2_wrapper_spawn(self): + expected_size = (123, 321) + with self.create_dataset(output_format="TCHW", transform=v2.Resize(size=expected_size)) as (dataset, _): + datasets_utils.check_transforms_v2_wrapper_spawn(dataset, expected_size=expected_size) + + +class HMDB51TestCase(datasets_utils.VideoDatasetTestCase): + DATASET_CLASS = datasets.HMDB51 + + ADDITIONAL_CONFIGS = combinations_grid(fold=(1, 2, 3), train=(True, False)) + + _VIDEO_FOLDER = "videos" + _SPLITS_FOLDER = "splits" + _CLASSES = ("brush_hair", "wave") + + def dataset_args(self, tmpdir, config): + tmpdir = pathlib.Path(tmpdir) + root = tmpdir / self._VIDEO_FOLDER + annotation_path = tmpdir / self._SPLITS_FOLDER + return root, annotation_path + + def inject_fake_data(self, tmpdir, config): + tmpdir = pathlib.Path(tmpdir) + + video_folder = tmpdir / self._VIDEO_FOLDER + os.makedirs(video_folder) + video_files = self._create_videos(video_folder) + + splits_folder = tmpdir / self._SPLITS_FOLDER + os.makedirs(splits_folder) + num_examples = self._create_split_files(splits_folder, video_files, config["fold"], config["train"]) + + return num_examples + + def _create_videos(self, root, num_examples_per_class=3): + def file_name_fn(cls, idx, clips_per_group=2): + return f"{cls}_{(idx // clips_per_group) + 1:d}_{(idx % clips_per_group) + 1:d}.avi" + + return [ + ( + cls, + datasets_utils.create_video_folder( + root, + cls, + lambda idx: file_name_fn(cls, idx), + num_examples_per_class, + ), + ) + for cls in self._CLASSES + ] + + def _create_split_files(self, root, video_files, fold, train): + num_videos = num_train_videos = 0 + + for cls, videos in video_files: + num_videos += len(videos) + + train_videos = set(random.sample(videos, random.randrange(1, len(videos) - 1))) + num_train_videos += len(train_videos) + + with open(pathlib.Path(root) / f"{cls}_test_split{fold}.txt", "w") as fh: + fh.writelines(f"{file.name} {1 if file in train_videos else 2}\n" for file in videos) + + return num_train_videos if train else (num_videos - num_train_videos) + + +class OmniglotTestCase(datasets_utils.ImageDatasetTestCase): + DATASET_CLASS = datasets.Omniglot + + ADDITIONAL_CONFIGS = combinations_grid(background=(True, False)) + + def inject_fake_data(self, tmpdir, config): + target_folder = ( + pathlib.Path(tmpdir) / "omniglot-py" / f"images_{'background' if config['background'] else 'evaluation'}" + ) + os.makedirs(target_folder) + + num_images = 0 + for name in ("Alphabet_of_the_Magi", "Tifinagh"): + num_images += self._create_alphabet_folder(target_folder, name) + + return num_images + + def _create_alphabet_folder(self, root, name): + num_images_total = 0 + for idx in range(torch.randint(1, 4, size=()).item()): + num_images = torch.randint(1, 4, size=()).item() + num_images_total += num_images + + datasets_utils.create_image_folder( + root / name, f"character{idx:02d}", lambda image_idx: f"{image_idx:02d}.png", num_images + ) + + return num_images_total + + +class SBUTestCase(datasets_utils.ImageDatasetTestCase): + DATASET_CLASS = datasets.SBU + FEATURE_TYPES = (PIL.Image.Image, str) + + def inject_fake_data(self, tmpdir, config): + num_images = 3 + + dataset_folder = pathlib.Path(tmpdir) / "dataset" + images = datasets_utils.create_image_folder(tmpdir, "dataset", self._create_file_name, num_images) + + self._create_urls_txt(dataset_folder, images) + self._create_captions_txt(dataset_folder, num_images) + + return num_images + + def _create_file_name(self, idx): + part1 = datasets_utils.create_random_string(10, string.digits) + part2 = datasets_utils.create_random_string(10, string.ascii_lowercase, string.digits[:6]) + return f"{part1}_{part2}.jpg" + + def _create_urls_txt(self, root, images): + with open(root / "SBU_captioned_photo_dataset_urls.txt", "w") as fh: + for image in images: + fh.write( + f"http://static.flickr.com/{datasets_utils.create_random_string(4, string.digits)}/{image.name}\n" + ) + + def _create_captions_txt(self, root, num_images): + with open(root / "SBU_captioned_photo_dataset_captions.txt", "w") as fh: + for _ in range(num_images): + fh.write(f"{datasets_utils.create_random_string(10)}\n") + + +class SEMEIONTestCase(datasets_utils.ImageDatasetTestCase): + DATASET_CLASS = datasets.SEMEION + + def inject_fake_data(self, tmpdir, config): + num_images = 3 + + images = torch.rand(num_images, 256) + labels = F.one_hot(torch.randint(10, size=(num_images,))) + with open(pathlib.Path(tmpdir) / "semeion.data", "w") as fh: + for image, one_hot_labels in zip(images, labels): + image_columns = " ".join([f"{pixel.item():.4f}" for pixel in image]) + labels_columns = " ".join([str(label.item()) for label in one_hot_labels]) + fh.write(f"{image_columns} {labels_columns}\n") + + return num_images + + +class USPSTestCase(datasets_utils.ImageDatasetTestCase): + DATASET_CLASS = datasets.USPS + + ADDITIONAL_CONFIGS = combinations_grid(train=(True, False)) + + def inject_fake_data(self, tmpdir, config): + num_images = 2 if config["train"] else 1 + + images = torch.rand(num_images, 256) * 2 - 1 + labels = torch.randint(1, 11, size=(num_images,)) + + with bz2.open(pathlib.Path(tmpdir) / f"usps{'.t' if not config['train'] else ''}.bz2", "w") as fh: + for image, label in zip(images, labels): + line = " ".join((str(label.item()), *[f"{idx}:{pixel:.6f}" for idx, pixel in enumerate(image, 1)])) + fh.write(f"{line}\n".encode()) + + return num_images + + +class SBDatasetTestCase(datasets_utils.ImageDatasetTestCase): + DATASET_CLASS = datasets.SBDataset + FEATURE_TYPES = (PIL.Image.Image, (np.ndarray, PIL.Image.Image)) + + REQUIRED_PACKAGES = ("scipy.io", "scipy.sparse") + + ADDITIONAL_CONFIGS = combinations_grid( + image_set=("train", "val", "train_noval"), mode=("boundaries", "segmentation") + ) + + _NUM_CLASSES = 20 + + def inject_fake_data(self, tmpdir, config): + num_images, num_images_per_image_set = self._create_split_files(tmpdir) + + sizes = self._create_target_folder(tmpdir, "cls", num_images) + + datasets_utils.create_image_folder( + tmpdir, "img", lambda idx: f"{self._file_stem(idx)}.jpg", num_images, size=lambda idx: sizes[idx] + ) + + return num_images_per_image_set[config["image_set"]] + + def _create_split_files(self, root): + root = pathlib.Path(root) + + splits = dict(train=(0, 1, 2), train_noval=(0, 2), val=(3,)) + + for split, idcs in splits.items(): + self._create_split_file(root, split, idcs) + + num_images = max(itertools.chain(*splits.values())) + 1 + num_images_per_split = {split: len(idcs) for split, idcs in splits.items()} + return num_images, num_images_per_split + + def _create_split_file(self, root, name, idcs): + with open(root / f"{name}.txt", "w") as fh: + fh.writelines(f"{self._file_stem(idx)}\n" for idx in idcs) + + def _create_target_folder(self, root, name, num_images): + io = datasets_utils.lazy_importer.scipy.io + + target_folder = pathlib.Path(root) / name + os.makedirs(target_folder) + + sizes = [torch.randint(1, 4, size=(2,)).tolist() for _ in range(num_images)] + for idx, size in enumerate(sizes): + content = dict( + GTcls=dict(Boundaries=self._create_boundaries(size), Segmentation=self._create_segmentation(size)) + ) + io.savemat(target_folder / f"{self._file_stem(idx)}.mat", content) + + return sizes + + def _create_boundaries(self, size): + sparse = datasets_utils.lazy_importer.scipy.sparse + return [ + [sparse.csc_matrix(torch.randint(0, 2, size=size, dtype=torch.uint8).numpy())] + for _ in range(self._NUM_CLASSES) + ] + + def _create_segmentation(self, size): + return torch.randint(0, self._NUM_CLASSES + 1, size=size, dtype=torch.uint8).numpy() + + def _file_stem(self, idx): + return f"2008_{idx:06d}" + + def test_transforms_v2_wrapper_spawn(self): + expected_size = (123, 321) + with self.create_dataset(mode="segmentation", transforms=v2.Resize(size=expected_size)) as (dataset, _): + datasets_utils.check_transforms_v2_wrapper_spawn(dataset, expected_size=expected_size) + + +class FakeDataTestCase(datasets_utils.ImageDatasetTestCase): + DATASET_CLASS = datasets.FakeData + FEATURE_TYPES = (PIL.Image.Image, int) + + def dataset_args(self, tmpdir, config): + return () + + def inject_fake_data(self, tmpdir, config): + return config["size"] + + def test_not_found_or_corrupted(self): + self.skipTest("The data is generated at creation and thus cannot be non-existent or corrupted.") + + +class PhotoTourTestCase(datasets_utils.ImageDatasetTestCase): + DATASET_CLASS = datasets.PhotoTour + + # The PhotoTour dataset returns examples with different features with respect to the 'train' parameter. Thus, + # we overwrite 'FEATURE_TYPES' with a dummy value to satisfy the initial checks of the base class. Furthermore, we + # overwrite the 'test_feature_types()' method to select the correct feature types before the test is run. + FEATURE_TYPES = () + _TRAIN_FEATURE_TYPES = (torch.Tensor,) + _TEST_FEATURE_TYPES = (torch.Tensor, torch.Tensor, torch.Tensor) + + combinations_grid(train=(True, False)) + + _NAME = "liberty" + + def dataset_args(self, tmpdir, config): + return tmpdir, self._NAME + + def inject_fake_data(self, tmpdir, config): + tmpdir = pathlib.Path(tmpdir) + + # In contrast to the original data, the fake images injected here comprise only a single patch. Thus, + # num_images == num_patches. + num_patches = 5 + + image_files = self._create_images(tmpdir, self._NAME, num_patches) + point_ids, info_file = self._create_info_file(tmpdir / self._NAME, num_patches) + num_matches, matches_file = self._create_matches_file(tmpdir / self._NAME, num_patches, point_ids) + + self._create_archive(tmpdir, self._NAME, *image_files, info_file, matches_file) + + return num_patches if config["train"] else num_matches + + def _create_images(self, root, name, num_images): + # The images in the PhotoTour dataset comprises of multiple grayscale patches of 64 x 64 pixels. Thus, the + # smallest fake image is 64 x 64 pixels and comprises a single patch. + return datasets_utils.create_image_folder( + root, name, lambda idx: f"patches{idx:04d}.bmp", num_images, size=(1, 64, 64) + ) + + def _create_info_file(self, root, num_images): + point_ids = torch.randint(num_images, size=(num_images,)).tolist() + + file = root / "info.txt" + with open(file, "w") as fh: + fh.writelines([f"{point_id} 0\n" for point_id in point_ids]) + + return point_ids, file + + def _create_matches_file(self, root, num_patches, point_ids): + lines = [ + f"{patch_id1} {point_ids[patch_id1]} 0 {patch_id2} {point_ids[patch_id2]} 0\n" + for patch_id1, patch_id2 in itertools.combinations(range(num_patches), 2) + ] + + file = root / "m50_100000_100000_0.txt" + with open(file, "w") as fh: + fh.writelines(lines) + + return len(lines), file + + def _create_archive(self, root, name, *files): + archive = root / f"{name}.zip" + with zipfile.ZipFile(archive, "w") as zip: + for file in files: + zip.write(file, arcname=file.relative_to(root)) + + return archive + + @datasets_utils.test_all_configs + def test_feature_types(self, config): + feature_types = self.FEATURE_TYPES + self.FEATURE_TYPES = self._TRAIN_FEATURE_TYPES if config["train"] else self._TEST_FEATURE_TYPES + try: + super().test_feature_types.__wrapped__(self, config) + finally: + self.FEATURE_TYPES = feature_types + + +class Flickr8kTestCase(datasets_utils.ImageDatasetTestCase): + DATASET_CLASS = datasets.Flickr8k + + FEATURE_TYPES = (PIL.Image.Image, list) + + _IMAGES_FOLDER = "images" + _ANNOTATIONS_FILE = "captions.html" + + def dataset_args(self, tmpdir, config): + tmpdir = pathlib.Path(tmpdir) + root = tmpdir / self._IMAGES_FOLDER + ann_file = tmpdir / self._ANNOTATIONS_FILE + return str(root), str(ann_file) + + def inject_fake_data(self, tmpdir, config): + num_images = 3 + num_captions_per_image = 3 + + tmpdir = pathlib.Path(tmpdir) + + images = self._create_images(tmpdir, self._IMAGES_FOLDER, num_images) + self._create_annotations_file(tmpdir, self._ANNOTATIONS_FILE, images, num_captions_per_image) + + return dict(num_examples=num_images, captions=self._create_captions(num_captions_per_image)) + + def _create_images(self, root, name, num_images): + return datasets_utils.create_image_folder(root, name, self._image_file_name, num_images) + + def _image_file_name(self, idx): + id = datasets_utils.create_random_string(10, string.digits) + checksum = datasets_utils.create_random_string(10, string.digits, string.ascii_lowercase[:6]) + size = datasets_utils.create_random_string(1, "qwcko") + return f"{id}_{checksum}_{size}.jpg" + + def _create_annotations_file(self, root, name, images, num_captions_per_image): + with open(root / name, "w") as fh: + fh.write("") + for image in (None, *images): + self._add_image(fh, image, num_captions_per_image) + fh.write("
") + + def _add_image(self, fh, image, num_captions_per_image): + fh.write("") + self._add_image_header(fh, image) + fh.write("
    ") + self._add_image_captions(fh, num_captions_per_image) + fh.write("
") + + def _add_image_header(self, fh, image=None): + if image: + url = f"http://www.flickr.com/photos/user/{image.name.split('_')[0]}/" + data = f'{url}' + else: + data = "Image Not Found" + fh.write(f"{data}") + + def _add_image_captions(self, fh, num_captions_per_image): + for caption in self._create_captions(num_captions_per_image): + fh.write(f"
  • {caption}") + + def _create_captions(self, num_captions_per_image): + return [str(idx) for idx in range(num_captions_per_image)] + + def test_captions(self): + with self.create_dataset() as (dataset, info): + _, captions = dataset[0] + assert len(captions) == len(info["captions"]) + assert all([a == b for a, b in zip(captions, info["captions"])]) + + +class Flickr30kTestCase(Flickr8kTestCase): + DATASET_CLASS = datasets.Flickr30k + + FEATURE_TYPES = (PIL.Image.Image, list) + + _ANNOTATIONS_FILE = "captions.token" + + def _image_file_name(self, idx): + return f"{idx}.jpg" + + def _create_annotations_file(self, root, name, images, num_captions_per_image): + with open(root / name, "w") as fh: + for image, (idx, caption) in itertools.product( + images, enumerate(self._create_captions(num_captions_per_image)) + ): + fh.write(f"{image.name}#{idx}\t{caption}\n") + + +class MNISTTestCase(datasets_utils.ImageDatasetTestCase): + DATASET_CLASS = datasets.MNIST + + ADDITIONAL_CONFIGS = combinations_grid(train=(True, False)) + + _MAGIC_DTYPES = { + torch.uint8: 8, + torch.int8: 9, + torch.int16: 11, + torch.int32: 12, + torch.float32: 13, + torch.float64: 14, + } + + _IMAGES_SIZE = (28, 28) + _IMAGES_DTYPE = torch.uint8 + + _LABELS_SIZE = () + _LABELS_DTYPE = torch.uint8 + + def inject_fake_data(self, tmpdir, config): + raw_dir = pathlib.Path(tmpdir) / self.DATASET_CLASS.__name__ / "raw" + os.makedirs(raw_dir, exist_ok=True) + + num_images = self._num_images(config) + self._create_binary_file( + raw_dir, self._images_file(config), (num_images, *self._IMAGES_SIZE), self._IMAGES_DTYPE + ) + self._create_binary_file( + raw_dir, self._labels_file(config), (num_images, *self._LABELS_SIZE), self._LABELS_DTYPE + ) + return num_images + + def _num_images(self, config): + return 2 if config["train"] else 1 + + def _images_file(self, config): + return f"{self._prefix(config)}-images-idx3-ubyte" + + def _labels_file(self, config): + return f"{self._prefix(config)}-labels-idx1-ubyte" + + def _prefix(self, config): + return "train" if config["train"] else "t10k" + + def _create_binary_file(self, root, filename, size, dtype): + with open(pathlib.Path(root) / filename, "wb") as fh: + for meta in (self._magic(dtype, len(size)), *size): + fh.write(self._encode(meta)) + + # If ever an MNIST variant is added that uses floating point data, this should be adapted. + data = torch.randint(0, torch.iinfo(dtype).max + 1, size, dtype=dtype) + fh.write(data.numpy().tobytes()) + + def _magic(self, dtype, dims): + return self._MAGIC_DTYPES[dtype] * 256 + dims + + def _encode(self, v): + return torch.tensor(v, dtype=torch.int32).numpy().tobytes()[::-1] + + +class FashionMNISTTestCase(MNISTTestCase): + DATASET_CLASS = datasets.FashionMNIST + + +class KMNISTTestCase(MNISTTestCase): + DATASET_CLASS = datasets.KMNIST + + +class EMNISTTestCase(MNISTTestCase): + DATASET_CLASS = datasets.EMNIST + + DEFAULT_CONFIG = dict(split="byclass") + ADDITIONAL_CONFIGS = combinations_grid( + split=("byclass", "bymerge", "balanced", "letters", "digits", "mnist"), train=(True, False) + ) + + def _prefix(self, config): + return f"emnist-{config['split']}-{'train' if config['train'] else 'test'}" + + +class QMNISTTestCase(MNISTTestCase): + DATASET_CLASS = datasets.QMNIST + + ADDITIONAL_CONFIGS = combinations_grid(what=("train", "test", "test10k", "nist")) + + _LABELS_SIZE = (8,) + _LABELS_DTYPE = torch.int32 + + def _num_images(self, config): + if config["what"] == "nist": + return 3 + elif config["what"] == "train": + return 2 + elif config["what"] == "test50k": + # The split 'test50k' is defined as the last 50k images beginning at index 10000. Thus, we need to create + # more than 10000 images for the dataset to not be empty. Since this takes significantly longer than the + # creation of all other splits, this is excluded from the 'ADDITIONAL_CONFIGS' and is tested only once in + # 'test_num_examples_test50k'. + return 10001 + else: + return 1 + + def _labels_file(self, config): + return f"{self._prefix(config)}-labels-idx2-int" + + def _prefix(self, config): + if config["what"] == "nist": + return "xnist" + + if config["what"] is None: + what = "train" if config["train"] else "test" + elif config["what"].startswith("test"): + what = "test" + else: + what = config["what"] + + return f"qmnist-{what}" + + def test_num_examples_test50k(self): + with self.create_dataset(what="test50k") as (dataset, info): + # Since the split 'test50k' selects all images beginning from the index 10000, we subtract the number of + # created examples by this. + assert len(dataset) == info["num_examples"] - 10000 + + +class MovingMNISTTestCase(datasets_utils.DatasetTestCase): + DATASET_CLASS = datasets.MovingMNIST + FEATURE_TYPES = (torch.Tensor,) + + ADDITIONAL_CONFIGS = combinations_grid(split=(None, "train", "test"), split_ratio=(10, 1, 19)) + + _NUM_FRAMES = 20 + + def inject_fake_data(self, tmpdir, config): + base_folder = os.path.join(tmpdir, self.DATASET_CLASS.__name__) + os.makedirs(base_folder, exist_ok=True) + num_samples = 5 + data = np.concatenate( + [ + np.zeros((config["split_ratio"], num_samples, 64, 64)), + np.ones((self._NUM_FRAMES - config["split_ratio"], num_samples, 64, 64)), + ] + ) + np.save(os.path.join(base_folder, "mnist_test_seq.npy"), data) + return num_samples + + @datasets_utils.test_all_configs + def test_split(self, config): + with self.create_dataset(config) as (dataset, _): + if config["split"] == "train": + assert (dataset.data == 0).all() + elif config["split"] == "test": + assert (dataset.data == 1).all() + else: + assert dataset.data.size()[1] == self._NUM_FRAMES + + +class DatasetFolderTestCase(datasets_utils.ImageDatasetTestCase): + DATASET_CLASS = datasets.DatasetFolder + + _EXTENSIONS = ("jpg", "png") + + # DatasetFolder has two mutually exclusive parameters: 'extensions' and 'is_valid_file'. One of both is required. + # We only iterate over different 'extensions' here and handle the tests for 'is_valid_file' in the + # 'test_is_valid_file()' method. + DEFAULT_CONFIG = dict(extensions=_EXTENSIONS) + ADDITIONAL_CONFIGS = combinations_grid(extensions=[(ext,) for ext in _EXTENSIONS]) + + def dataset_args(self, tmpdir, config): + return tmpdir, datasets.folder.pil_loader + + def inject_fake_data(self, tmpdir, config): + extensions = config["extensions"] or self._is_valid_file_to_extensions(config["is_valid_file"]) + + num_examples_total = 0 + classes = [] + for ext, cls in zip(self._EXTENSIONS, string.ascii_letters): + if ext not in extensions: + continue + + num_examples = torch.randint(1, 3, size=()).item() + datasets_utils.create_image_folder(tmpdir, cls, lambda idx: self._file_name_fn(cls, ext, idx), num_examples) + + num_examples_total += num_examples + classes.append(cls) + + if config.pop("make_empty_class", False): + os.makedirs(pathlib.Path(tmpdir) / "empty_class") + classes.append("empty_class") + + return dict(num_examples=num_examples_total, classes=classes) + + def _file_name_fn(self, cls, ext, idx): + return f"{cls}_{idx}.{ext}" + + def _is_valid_file_to_extensions(self, is_valid_file): + return {ext for ext in self._EXTENSIONS if is_valid_file(f"foo.{ext}")} + + @datasets_utils.test_all_configs + def test_is_valid_file(self, config): + extensions = config.pop("extensions") + # We need to explicitly pass extensions=None here or otherwise it would be filled by the value from the + # DEFAULT_CONFIG. + with self.create_dataset( + config, extensions=None, is_valid_file=lambda file: pathlib.Path(file).suffix[1:] in extensions + ) as (dataset, info): + assert len(dataset) == info["num_examples"] + + @datasets_utils.test_all_configs + def test_classes(self, config): + with self.create_dataset(config) as (dataset, info): + assert len(dataset.classes) == len(info["classes"]) + assert all([a == b for a, b in zip(dataset.classes, info["classes"])]) + + def test_allow_empty(self): + config = { + "extensions": self._EXTENSIONS, + "make_empty_class": True, + } + + config["allow_empty"] = True + with self.create_dataset(config) as (dataset, info): + assert "empty_class" in dataset.classes + assert len(dataset.classes) == len(info["classes"]) + assert all([a == b for a, b in zip(dataset.classes, info["classes"])]) + + config["allow_empty"] = False + with pytest.raises(FileNotFoundError, match="Found no valid file"): + with self.create_dataset(config) as (dataset, info): + pass + + +class ImageFolderTestCase(datasets_utils.ImageDatasetTestCase): + DATASET_CLASS = datasets.ImageFolder + + def inject_fake_data(self, tmpdir, config): + num_examples_total = 0 + classes = ("a", "b") + for cls in classes: + num_examples = torch.randint(1, 3, size=()).item() + num_examples_total += num_examples + + datasets_utils.create_image_folder(tmpdir, cls, lambda idx: f"{cls}_{idx}.png", num_examples) + + return dict(num_examples=num_examples_total, classes=classes) + + @datasets_utils.test_all_configs + def test_classes(self, config): + with self.create_dataset(config) as (dataset, info): + assert len(dataset.classes) == len(info["classes"]) + assert all([a == b for a, b in zip(dataset.classes, info["classes"])]) + + +class KittiTestCase(datasets_utils.ImageDatasetTestCase): + DATASET_CLASS = datasets.Kitti + FEATURE_TYPES = (PIL.Image.Image, (list, type(None))) # test split returns None as target + ADDITIONAL_CONFIGS = combinations_grid(train=(True, False)) + + def inject_fake_data(self, tmpdir, config): + kitti_dir = os.path.join(tmpdir, "Kitti", "raw") + os.makedirs(kitti_dir) + + split_to_num_examples = { + True: 1, + False: 2, + } + + # We need to create all folders(training and testing). + for is_training in (True, False): + num_examples = split_to_num_examples[is_training] + + datasets_utils.create_image_folder( + root=kitti_dir, + name=os.path.join("training" if is_training else "testing", "image_2"), + file_name_fn=lambda image_idx: f"{image_idx:06d}.png", + num_examples=num_examples, + ) + if is_training: + for image_idx in range(num_examples): + target_file_dir = os.path.join(kitti_dir, "training", "label_2") + os.makedirs(target_file_dir) + target_file_name = os.path.join(target_file_dir, f"{image_idx:06d}.txt") + target_contents = "Pedestrian 0.00 0 -0.20 712.40 143.00 810.73 307.92 1.89 0.48 1.20 1.84 1.47 8.41 0.01\n" # noqa + with open(target_file_name, "w") as target_file: + target_file.write(target_contents) + + return split_to_num_examples[config["train"]] + + def test_transforms_v2_wrapper_spawn(self): + expected_size = (123, 321) + with self.create_dataset(transform=v2.Resize(size=expected_size)) as (dataset, _): + datasets_utils.check_transforms_v2_wrapper_spawn(dataset, expected_size=expected_size) + + +class SvhnTestCase(datasets_utils.ImageDatasetTestCase): + DATASET_CLASS = datasets.SVHN + REQUIRED_PACKAGES = ("scipy",) + ADDITIONAL_CONFIGS = combinations_grid(split=("train", "test", "extra")) + + def inject_fake_data(self, tmpdir, config): + import scipy.io as sio + + split = config["split"] + num_examples = { + "train": 2, + "test": 3, + "extra": 4, + }.get(split) + + file = f"{split}_32x32.mat" + images = np.zeros((32, 32, 3, num_examples), dtype=np.uint8) + targets = np.zeros((num_examples,), dtype=np.uint8) + sio.savemat(os.path.join(tmpdir, file), {"X": images, "y": targets}) + return num_examples + + +class Places365TestCase(datasets_utils.ImageDatasetTestCase): + DATASET_CLASS = datasets.Places365 + ADDITIONAL_CONFIGS = combinations_grid( + split=("train-standard", "train-challenge", "val"), + small=(False, True), + ) + _CATEGORIES = "categories_places365.txt" + # {split: file} + _FILE_LISTS = { + "train-standard": "places365_train_standard.txt", + "train-challenge": "places365_train_challenge.txt", + "val": "places365_val.txt", + } + # {(split, small): folder_name} + _IMAGES = { + ("train-standard", False): "data_large_standard", + ("train-challenge", False): "data_large_challenge", + ("val", False): "val_large", + ("train-standard", True): "data_256_standard", + ("train-challenge", True): "data_256_challenge", + ("val", True): "val_256", + } + # (class, idx) + _CATEGORIES_CONTENT = ( + ("/a/airfield", 0), + ("/a/apartment_building/outdoor", 8), + ("/b/badlands", 30), + ) + # (file, idx) + _FILE_LIST_CONTENT = ( + ("Places365_val_00000001.png", 0), + *((f"{category}/Places365_train_00000001.png", idx) for category, idx in _CATEGORIES_CONTENT), + ) + + @staticmethod + def _make_txt(root, name, seq): + file = os.path.join(root, name) + with open(file, "w") as fh: + for text, idx in seq: + fh.write(f"{text} {idx}\n") + + @staticmethod + def _make_categories_txt(root, name): + Places365TestCase._make_txt(root, name, Places365TestCase._CATEGORIES_CONTENT) + + @staticmethod + def _make_file_list_txt(root, name): + Places365TestCase._make_txt(root, name, Places365TestCase._FILE_LIST_CONTENT) + + @staticmethod + def _make_image(file_name, size): + os.makedirs(os.path.dirname(file_name), exist_ok=True) + PIL.Image.fromarray(np.zeros((*size, 3), dtype=np.uint8)).save(file_name) + + @staticmethod + def _make_devkit_archive(root, split): + Places365TestCase._make_categories_txt(root, Places365TestCase._CATEGORIES) + Places365TestCase._make_file_list_txt(root, Places365TestCase._FILE_LISTS[split]) + + @staticmethod + def _make_images_archive(root, split, small): + folder_name = Places365TestCase._IMAGES[(split, small)] + image_size = (256, 256) if small else (512, random.randint(512, 1024)) + files, idcs = zip(*Places365TestCase._FILE_LIST_CONTENT) + images = [f.lstrip("/").replace("/", os.sep) for f in files] + for image in images: + Places365TestCase._make_image(os.path.join(root, folder_name, image), image_size) + + return [(os.path.join(root, folder_name, image), idx) for image, idx in zip(images, idcs)] + + def inject_fake_data(self, tmpdir, config): + self._make_devkit_archive(tmpdir, config["split"]) + return len(self._make_images_archive(tmpdir, config["split"], config["small"])) + + def test_classes(self): + classes = list(map(lambda x: x[0], self._CATEGORIES_CONTENT)) + with self.create_dataset() as (dataset, _): + assert dataset.classes == classes + + def test_class_to_idx(self): + class_to_idx = dict(self._CATEGORIES_CONTENT) + with self.create_dataset() as (dataset, _): + assert dataset.class_to_idx == class_to_idx + + +class INaturalistTestCase(datasets_utils.ImageDatasetTestCase): + DATASET_CLASS = datasets.INaturalist + FEATURE_TYPES = (PIL.Image.Image, (int, tuple)) + + ADDITIONAL_CONFIGS = combinations_grid( + target_type=("kingdom", "full", "genus", ["kingdom", "phylum", "class", "order", "family", "genus", "full"]), + version=("2021_train",), + ) + + def inject_fake_data(self, tmpdir, config): + categories = [ + "00000_Akingdom_0phylum_Aclass_Aorder_Afamily_Agenus_Aspecies", + "00001_Akingdom_1phylum_Aclass_Border_Afamily_Bgenus_Aspecies", + "00002_Akingdom_2phylum_Cclass_Corder_Cfamily_Cgenus_Cspecies", + ] + + num_images_per_category = 3 + for category in categories: + datasets_utils.create_image_folder( + root=os.path.join(tmpdir, config["version"]), + name=category, + file_name_fn=lambda idx: f"image_{idx + 1:04d}.jpg", + num_examples=num_images_per_category, + ) + + return num_images_per_category * len(categories) + + def test_targets(self): + target_types = ["kingdom", "phylum", "class", "order", "family", "genus", "full"] + + with self.create_dataset(target_type=target_types, version="2021_valid") as (dataset, _): + items = [d[1] for d in dataset] + for i, item in enumerate(items): + assert dataset.category_name("kingdom", item[0]) == "Akingdom" + assert dataset.category_name("phylum", item[1]) == f"{i // 3}phylum" + assert item[6] == i // 3 + + +class LFWPeopleTestCase(datasets_utils.DatasetTestCase): + DATASET_CLASS = datasets.LFWPeople + FEATURE_TYPES = (PIL.Image.Image, int) + ADDITIONAL_CONFIGS = combinations_grid( + split=("10fold", "train", "test"), image_set=("original", "funneled", "deepfunneled") + ) + _IMAGES_DIR = {"original": "lfw", "funneled": "lfw_funneled", "deepfunneled": "lfw-deepfunneled"} + _file_id = {"10fold": "", "train": "DevTrain", "test": "DevTest"} + + def inject_fake_data(self, tmpdir, config): + tmpdir = pathlib.Path(tmpdir) / "lfw-py" + os.makedirs(tmpdir, exist_ok=True) + return dict( + num_examples=self._create_images_dir(tmpdir, self._IMAGES_DIR[config["image_set"]], config["split"]), + split=config["split"], + ) + + def _create_images_dir(self, root, idir, split): + idir = os.path.join(root, idir) + os.makedirs(idir, exist_ok=True) + n, flines = (10, ["10\n"]) if split == "10fold" else (1, []) + num_examples = 0 + names = [] + for _ in range(n): + num_people = random.randint(2, 5) + flines.append(f"{num_people}\n") + for i in range(num_people): + name = self._create_random_id() + no = random.randint(1, 10) + flines.append(f"{name}\t{no}\n") + names.append(f"{name}\t{no}\n") + datasets_utils.create_image_folder(idir, name, lambda n: f"{name}_{n+1:04d}.jpg", no, 250) + num_examples += no + with open(pathlib.Path(root) / f"people{self._file_id[split]}.txt", "w") as f: + f.writelines(flines) + with open(pathlib.Path(root) / "lfw-names.txt", "w") as f: + f.writelines(sorted(names)) + + return num_examples + + def _create_random_id(self): + part1 = datasets_utils.create_random_string(random.randint(5, 7)) + part2 = datasets_utils.create_random_string(random.randint(4, 7)) + return f"{part1}_{part2}" + + +class LFWPairsTestCase(LFWPeopleTestCase): + DATASET_CLASS = datasets.LFWPairs + FEATURE_TYPES = (PIL.Image.Image, PIL.Image.Image, int) + + def _create_images_dir(self, root, idir, split): + idir = os.path.join(root, idir) + os.makedirs(idir, exist_ok=True) + num_pairs = 7 # effectively 7*2*n = 14*n + n, self.flines = (10, [f"10\t{num_pairs}"]) if split == "10fold" else (1, [str(num_pairs)]) + for _ in range(n): + self._inject_pairs(idir, num_pairs, True) + self._inject_pairs(idir, num_pairs, False) + with open(pathlib.Path(root) / f"pairs{self._file_id[split]}.txt", "w") as f: + f.writelines(self.flines) + + return num_pairs * 2 * n + + def _inject_pairs(self, root, num_pairs, same): + for i in range(num_pairs): + name1 = self._create_random_id() + name2 = name1 if same else self._create_random_id() + no1, no2 = random.randint(1, 100), random.randint(1, 100) + if same: + self.flines.append(f"\n{name1}\t{no1}\t{no2}") + else: + self.flines.append(f"\n{name1}\t{no1}\t{name2}\t{no2}") + + datasets_utils.create_image_folder(root, name1, lambda _: f"{name1}_{no1:04d}.jpg", 1, 250) + datasets_utils.create_image_folder(root, name2, lambda _: f"{name2}_{no2:04d}.jpg", 1, 250) + + +class SintelTestCase(datasets_utils.ImageDatasetTestCase): + DATASET_CLASS = datasets.Sintel + ADDITIONAL_CONFIGS = combinations_grid(split=("train", "test"), pass_name=("clean", "final", "both")) + FEATURE_TYPES = (PIL.Image.Image, PIL.Image.Image, (np.ndarray, type(None))) + + FLOW_H, FLOW_W = 3, 4 + + def inject_fake_data(self, tmpdir, config): + root = pathlib.Path(tmpdir) / "Sintel" + + num_images_per_scene = 3 if config["split"] == "train" else 4 + num_scenes = 2 + + for split_dir in ("training", "test"): + for pass_name in ("clean", "final"): + image_root = root / split_dir / pass_name + + for scene_id in range(num_scenes): + scene_dir = image_root / f"scene_{scene_id}" + datasets_utils.create_image_folder( + image_root, + name=str(scene_dir), + file_name_fn=lambda image_idx: f"frame_000{image_idx}.png", + num_examples=num_images_per_scene, + ) + + flow_root = root / "training" / "flow" + for scene_id in range(num_scenes): + scene_dir = flow_root / f"scene_{scene_id}" + os.makedirs(scene_dir) + for i in range(num_images_per_scene - 1): + file_name = str(scene_dir / f"frame_000{i}.flo") + datasets_utils.make_fake_flo_file(h=self.FLOW_H, w=self.FLOW_W, file_name=file_name) + + # with e.g. num_images_per_scene = 3, for a single scene with have 3 images + # which are frame_0000, frame_0001 and frame_0002 + # They will be consecutively paired as (frame_0000, frame_0001), (frame_0001, frame_0002), + # that is 3 - 1 = 2 examples. Hence the formula below + num_passes = 2 if config["pass_name"] == "both" else 1 + num_examples = (num_images_per_scene - 1) * num_scenes * num_passes + return num_examples + + def test_flow(self): + # Make sure flow exists for train split, and make sure there are as many flow values as (pairs of) images + h, w = self.FLOW_H, self.FLOW_W + expected_flow = np.arange(2 * h * w).reshape(h, w, 2).transpose(2, 0, 1) + with self.create_dataset(split="train") as (dataset, _): + assert dataset._flow_list and len(dataset._flow_list) == len(dataset._image_list) + for _, _, flow in dataset: + assert flow.shape == (2, h, w) + np.testing.assert_allclose(flow, expected_flow) + + # Make sure flow is always None for test split + with self.create_dataset(split="test") as (dataset, _): + assert dataset._image_list and not dataset._flow_list + for _, _, flow in dataset: + assert flow is None + + def test_bad_input(self): + with pytest.raises(ValueError, match="Unknown value 'bad' for argument split"): + with self.create_dataset(split="bad"): + pass + + with pytest.raises(ValueError, match="Unknown value 'bad' for argument pass_name"): + with self.create_dataset(pass_name="bad"): + pass + + +class KittiFlowTestCase(datasets_utils.ImageDatasetTestCase): + DATASET_CLASS = datasets.KittiFlow + ADDITIONAL_CONFIGS = combinations_grid(split=("train", "test")) + FEATURE_TYPES = (PIL.Image.Image, PIL.Image.Image, (np.ndarray, type(None)), (np.ndarray, type(None))) + + def inject_fake_data(self, tmpdir, config): + root = pathlib.Path(tmpdir) / "KittiFlow" + + num_examples = 2 if config["split"] == "train" else 3 + for split_dir in ("training", "testing"): + + datasets_utils.create_image_folder( + root / split_dir, + name="image_2", + file_name_fn=lambda image_idx: f"{image_idx}_10.png", + num_examples=num_examples, + ) + datasets_utils.create_image_folder( + root / split_dir, + name="image_2", + file_name_fn=lambda image_idx: f"{image_idx}_11.png", + num_examples=num_examples, + ) + + # For kitti the ground truth flows are encoded as 16-bits pngs. + # create_image_folder() will actually create 8-bits pngs, but it doesn't + # matter much: the flow reader will still be able to read the files, it + # will just be garbage flow value - but we don't care about that here. + datasets_utils.create_image_folder( + root / "training", + name="flow_occ", + file_name_fn=lambda image_idx: f"{image_idx}_10.png", + num_examples=num_examples, + ) + + return num_examples + + def test_flow_and_valid(self): + # Make sure flow exists for train split, and make sure there are as many flow values as (pairs of) images + # Also assert flow and valid are of the expected shape + with self.create_dataset(split="train") as (dataset, _): + assert dataset._flow_list and len(dataset._flow_list) == len(dataset._image_list) + for _, _, flow, valid in dataset: + two, h, w = flow.shape + assert two == 2 + assert valid.shape == (h, w) + + # Make sure flow and valid are always None for test split + with self.create_dataset(split="test") as (dataset, _): + assert dataset._image_list and not dataset._flow_list + for _, _, flow, valid in dataset: + assert flow is None + assert valid is None + + def test_bad_input(self): + with pytest.raises(ValueError, match="Unknown value 'bad' for argument split"): + with self.create_dataset(split="bad"): + pass + + +class FlyingChairsTestCase(datasets_utils.ImageDatasetTestCase): + DATASET_CLASS = datasets.FlyingChairs + ADDITIONAL_CONFIGS = combinations_grid(split=("train", "val")) + FEATURE_TYPES = (PIL.Image.Image, PIL.Image.Image, (np.ndarray, type(None))) + + FLOW_H, FLOW_W = 3, 4 + + def _make_split_file(self, root, num_examples): + # We create a fake split file here, but users are asked to download the real one from the authors website + split_ids = [1] * num_examples["train"] + [2] * num_examples["val"] + random.shuffle(split_ids) + with open(str(root / "FlyingChairs_train_val.txt"), "w+") as split_file: + for split_id in split_ids: + split_file.write(f"{split_id}\n") + + def inject_fake_data(self, tmpdir, config): + root = pathlib.Path(tmpdir) / "FlyingChairs" + + num_examples = {"train": 5, "val": 3} + num_examples_total = sum(num_examples.values()) + + datasets_utils.create_image_folder( # img1 + root, + name="data", + file_name_fn=lambda image_idx: f"00{image_idx}_img1.ppm", + num_examples=num_examples_total, + ) + datasets_utils.create_image_folder( # img2 + root, + name="data", + file_name_fn=lambda image_idx: f"00{image_idx}_img2.ppm", + num_examples=num_examples_total, + ) + for i in range(num_examples_total): + file_name = str(root / "data" / f"00{i}_flow.flo") + datasets_utils.make_fake_flo_file(h=self.FLOW_H, w=self.FLOW_W, file_name=file_name) + + self._make_split_file(root, num_examples) + + return num_examples[config["split"]] + + @datasets_utils.test_all_configs + def test_flow(self, config): + # Make sure flow always exists, and make sure there are as many flow values as (pairs of) images + # Also make sure the flow is properly decoded + + h, w = self.FLOW_H, self.FLOW_W + expected_flow = np.arange(2 * h * w).reshape(h, w, 2).transpose(2, 0, 1) + with self.create_dataset(config=config) as (dataset, _): + assert dataset._flow_list and len(dataset._flow_list) == len(dataset._image_list) + for _, _, flow in dataset: + assert flow.shape == (2, h, w) + np.testing.assert_allclose(flow, expected_flow) + + +class FlyingThings3DTestCase(datasets_utils.ImageDatasetTestCase): + DATASET_CLASS = datasets.FlyingThings3D + ADDITIONAL_CONFIGS = combinations_grid( + split=("train", "test"), pass_name=("clean", "final", "both"), camera=("left", "right", "both") + ) + FEATURE_TYPES = (PIL.Image.Image, PIL.Image.Image, (np.ndarray, type(None))) + + FLOW_H, FLOW_W = 3, 4 + + def inject_fake_data(self, tmpdir, config): + root = pathlib.Path(tmpdir) / "FlyingThings3D" + + num_images_per_camera = 3 if config["split"] == "train" else 4 + passes = ("frames_cleanpass", "frames_finalpass") + splits = ("TRAIN", "TEST") + letters = ("A", "B", "C") + subfolders = ("0000", "0001") + cameras = ("left", "right") + for pass_name, split, letter, subfolder, camera in itertools.product( + passes, splits, letters, subfolders, cameras + ): + current_folder = root / pass_name / split / letter / subfolder + datasets_utils.create_image_folder( + current_folder, + name=camera, + file_name_fn=lambda image_idx: f"00{image_idx}.png", + num_examples=num_images_per_camera, + ) + + directions = ("into_future", "into_past") + for split, letter, subfolder, direction, camera in itertools.product( + splits, letters, subfolders, directions, cameras + ): + current_folder = root / "optical_flow" / split / letter / subfolder / direction / camera + os.makedirs(str(current_folder), exist_ok=True) + for i in range(num_images_per_camera): + datasets_utils.make_fake_pfm_file(self.FLOW_H, self.FLOW_W, file_name=str(current_folder / f"{i}.pfm")) + + num_cameras = 2 if config["camera"] == "both" else 1 + num_passes = 2 if config["pass_name"] == "both" else 1 + num_examples = ( + (num_images_per_camera - 1) * num_cameras * len(subfolders) * len(letters) * len(splits) * num_passes + ) + return num_examples + + @datasets_utils.test_all_configs + def test_flow(self, config): + h, w = self.FLOW_H, self.FLOW_W + expected_flow = np.arange(3 * h * w).reshape(h, w, 3).transpose(2, 0, 1) + expected_flow = np.flip(expected_flow, axis=1) + expected_flow = expected_flow[:2, :, :] + + with self.create_dataset(config=config) as (dataset, _): + assert dataset._flow_list and len(dataset._flow_list) == len(dataset._image_list) + for _, _, flow in dataset: + assert flow.shape == (2, self.FLOW_H, self.FLOW_W) + np.testing.assert_allclose(flow, expected_flow) + + def test_bad_input(self): + with pytest.raises(ValueError, match="Unknown value 'bad' for argument split"): + with self.create_dataset(split="bad"): + pass + + with pytest.raises(ValueError, match="Unknown value 'bad' for argument pass_name"): + with self.create_dataset(pass_name="bad"): + pass + + with pytest.raises(ValueError, match="Unknown value 'bad' for argument camera"): + with self.create_dataset(camera="bad"): + pass + + +class HD1KTestCase(KittiFlowTestCase): + DATASET_CLASS = datasets.HD1K + + def inject_fake_data(self, tmpdir, config): + root = pathlib.Path(tmpdir) / "hd1k" + + num_sequences = 4 if config["split"] == "train" else 3 + num_examples_per_train_sequence = 3 + + for seq_idx in range(num_sequences): + # Training data + datasets_utils.create_image_folder( + root / "hd1k_input", + name="image_2", + file_name_fn=lambda image_idx: f"{seq_idx:06d}_{image_idx}.png", + num_examples=num_examples_per_train_sequence, + ) + datasets_utils.create_image_folder( + root / "hd1k_flow_gt", + name="flow_occ", + file_name_fn=lambda image_idx: f"{seq_idx:06d}_{image_idx}.png", + num_examples=num_examples_per_train_sequence, + ) + + # Test data + datasets_utils.create_image_folder( + root / "hd1k_challenge", + name="image_2", + file_name_fn=lambda _: f"{seq_idx:06d}_10.png", + num_examples=1, + ) + datasets_utils.create_image_folder( + root / "hd1k_challenge", + name="image_2", + file_name_fn=lambda _: f"{seq_idx:06d}_11.png", + num_examples=1, + ) + + num_examples_per_sequence = num_examples_per_train_sequence if config["split"] == "train" else 2 + return num_sequences * (num_examples_per_sequence - 1) + + +class EuroSATTestCase(datasets_utils.ImageDatasetTestCase): + DATASET_CLASS = datasets.EuroSAT + FEATURE_TYPES = (PIL.Image.Image, int) + + def inject_fake_data(self, tmpdir, config): + data_folder = os.path.join(tmpdir, "eurosat", "2750") + os.makedirs(data_folder) + + num_examples_per_class = 3 + classes = ("AnnualCrop", "Forest") + for cls in classes: + datasets_utils.create_image_folder( + root=data_folder, + name=cls, + file_name_fn=lambda idx: f"{cls}_{idx}.jpg", + num_examples=num_examples_per_class, + ) + + return len(classes) * num_examples_per_class + + +class Food101TestCase(datasets_utils.ImageDatasetTestCase): + DATASET_CLASS = datasets.Food101 + FEATURE_TYPES = (PIL.Image.Image, int) + + ADDITIONAL_CONFIGS = combinations_grid(split=("train", "test")) + + def inject_fake_data(self, tmpdir: str, config): + root_folder = pathlib.Path(tmpdir) / "food-101" + image_folder = root_folder / "images" + meta_folder = root_folder / "meta" + + image_folder.mkdir(parents=True) + meta_folder.mkdir() + + num_images_per_class = 5 + + metadata = {} + n_samples_per_class = 3 if config["split"] == "train" else 2 + sampled_classes = ("apple_pie", "crab_cakes", "gyoza") + for cls in sampled_classes: + im_fnames = datasets_utils.create_image_folder( + image_folder, + cls, + file_name_fn=lambda idx: f"{idx}.jpg", + num_examples=num_images_per_class, + ) + metadata[cls] = [ + "/".join(fname.relative_to(image_folder).with_suffix("").parts) + for fname in random.choices(im_fnames, k=n_samples_per_class) + ] + + with open(meta_folder / f"{config['split']}.json", "w") as file: + file.write(json.dumps(metadata)) + + return len(sampled_classes * n_samples_per_class) + + +class FGVCAircraftTestCase(datasets_utils.ImageDatasetTestCase): + DATASET_CLASS = datasets.FGVCAircraft + ADDITIONAL_CONFIGS = combinations_grid( + split=("train", "val", "trainval", "test"), annotation_level=("variant", "family", "manufacturer") + ) + + def inject_fake_data(self, tmpdir: str, config): + split = config["split"] + annotation_level = config["annotation_level"] + annotation_level_to_file = { + "variant": "variants.txt", + "family": "families.txt", + "manufacturer": "manufacturers.txt", + } + + root_folder = pathlib.Path(tmpdir) / "fgvc-aircraft-2013b" + data_folder = root_folder / "data" + + classes = ["707-320", "Hawk T1", "Tornado"] + num_images_per_class = 5 + + datasets_utils.create_image_folder( + data_folder, + "images", + file_name_fn=lambda idx: f"{idx}.jpg", + num_examples=num_images_per_class * len(classes), + ) + + annotation_file = data_folder / annotation_level_to_file[annotation_level] + with open(annotation_file, "w") as file: + file.write("\n".join(classes)) + + num_samples_per_class = 4 if split == "trainval" else 2 + images_classes = [] + for i in range(len(classes)): + images_classes.extend( + [ + f"{idx} {classes[i]}" + for idx in random.sample( + range(i * num_images_per_class, (i + 1) * num_images_per_class), num_samples_per_class + ) + ] + ) + + images_annotation_file = data_folder / f"images_{annotation_level}_{split}.txt" + with open(images_annotation_file, "w") as file: + file.write("\n".join(images_classes)) + + return len(classes * num_samples_per_class) + + +class SUN397TestCase(datasets_utils.ImageDatasetTestCase): + DATASET_CLASS = datasets.SUN397 + + def inject_fake_data(self, tmpdir: str, config): + data_dir = pathlib.Path(tmpdir) / "SUN397" + data_dir.mkdir() + + num_images_per_class = 5 + sampled_classes = ("abbey", "airplane_cabin", "airport_terminal") + im_paths = [] + + for cls in sampled_classes: + image_folder = data_dir / cls[0] + im_paths.extend( + datasets_utils.create_image_folder( + image_folder, + image_folder / cls, + file_name_fn=lambda idx: f"sun_{idx}.jpg", + num_examples=num_images_per_class, + ) + ) + + with open(data_dir / "ClassName.txt", "w") as file: + file.writelines("\n".join(f"/{cls[0]}/{cls}" for cls in sampled_classes)) + + num_samples = len(im_paths) + + return num_samples + + +class DTDTestCase(datasets_utils.ImageDatasetTestCase): + DATASET_CLASS = datasets.DTD + FEATURE_TYPES = (PIL.Image.Image, int) + + ADDITIONAL_CONFIGS = combinations_grid( + split=("train", "test", "val"), + # There is no need to test the whole matrix here, since each fold is treated exactly the same + partition=(1, 5, 10), + ) + + def inject_fake_data(self, tmpdir: str, config): + data_folder = pathlib.Path(tmpdir) / "dtd" / "dtd" + + num_images_per_class = 3 + image_folder = data_folder / "images" + image_files = [] + for cls in ("banded", "marbled", "zigzagged"): + image_files.extend( + datasets_utils.create_image_folder( + image_folder, + cls, + file_name_fn=lambda idx: f"{cls}_{idx:04d}.jpg", + num_examples=num_images_per_class, + ) + ) + + meta_folder = data_folder / "labels" + meta_folder.mkdir() + image_ids = [str(path.relative_to(path.parents[1])).replace(os.sep, "/") for path in image_files] + image_ids_in_config = random.choices(image_ids, k=len(image_files) // 2) + with open(meta_folder / f"{config['split']}{config['partition']}.txt", "w") as file: + file.write("\n".join(image_ids_in_config) + "\n") + + return len(image_ids_in_config) + + +class FER2013TestCase(datasets_utils.ImageDatasetTestCase): + DATASET_CLASS = datasets.FER2013 + ADDITIONAL_CONFIGS = combinations_grid(split=("train", "test")) + + FEATURE_TYPES = (PIL.Image.Image, (int, type(None))) + + def inject_fake_data(self, tmpdir, config): + base_folder = os.path.join(tmpdir, "fer2013") + os.makedirs(base_folder) + + use_icml = config.pop("use_icml", False) + use_fer = config.pop("use_fer", False) + + num_samples = 5 + + if use_icml or use_fer: + pixels_key, usage_key = (" pixels", " Usage") if use_icml else ("pixels", "Usage") + fieldnames = ("emotion", usage_key, pixels_key) if use_icml else ("emotion", pixels_key, usage_key) + filename = "icml_face_data.csv" if use_icml else "fer2013.csv" + with open(os.path.join(base_folder, filename), "w", newline="") as file: + writer = csv.DictWriter( + file, + fieldnames=fieldnames, + quoting=csv.QUOTE_NONNUMERIC, + quotechar='"', + ) + writer.writeheader() + for i in range(num_samples): + row = { + "emotion": str(int(torch.randint(0, 7, ()))), + usage_key: "Training" if i % 2 else "PublicTest", + pixels_key: " ".join( + str(pixel) + for pixel in datasets_utils.create_image_or_video_tensor((48, 48)).view(-1).tolist() + ), + } + + writer.writerow(row) + else: + with open(os.path.join(base_folder, f"{config['split']}.csv"), "w", newline="") as file: + writer = csv.DictWriter( + file, + fieldnames=("emotion", "pixels") if config["split"] == "train" else ("pixels",), + quoting=csv.QUOTE_NONNUMERIC, + quotechar='"', + ) + writer.writeheader() + for _ in range(num_samples): + row = dict( + pixels=" ".join( + str(pixel) + for pixel in datasets_utils.create_image_or_video_tensor((48, 48)).view(-1).tolist() + ) + ) + if config["split"] == "train": + row["emotion"] = str(int(torch.randint(0, 7, ()))) + + writer.writerow(row) + + return num_samples + + def test_icml_file(self): + config = {"split": "test"} + with self.create_dataset(config=config) as (dataset, _): + assert all(s[1] is None for s in dataset) + + for split in ("train", "test"): + for d in ({"use_icml": True}, {"use_fer": True}): + config = {"split": split, **d} + with self.create_dataset(config=config) as (dataset, _): + assert all(s[1] is not None for s in dataset) + + +class GTSRBTestCase(datasets_utils.ImageDatasetTestCase): + DATASET_CLASS = datasets.GTSRB + FEATURE_TYPES = (PIL.Image.Image, int) + + ADDITIONAL_CONFIGS = combinations_grid(split=("train", "test")) + + def inject_fake_data(self, tmpdir: str, config): + root_folder = os.path.join(tmpdir, "gtsrb") + os.makedirs(root_folder, exist_ok=True) + + # Train data + train_folder = os.path.join(root_folder, "GTSRB", "Training") + os.makedirs(train_folder, exist_ok=True) + + num_examples = 3 if config["split"] == "train" else 4 + classes = ("00000", "00042", "00012") + for class_idx in classes: + datasets_utils.create_image_folder( + train_folder, + name=class_idx, + file_name_fn=lambda image_idx: f"{class_idx}_{image_idx:05d}.ppm", + num_examples=num_examples, + ) + + total_number_of_examples = num_examples * len(classes) + # Test data + test_folder = os.path.join(root_folder, "GTSRB", "Final_Test", "Images") + os.makedirs(test_folder, exist_ok=True) + + with open(os.path.join(root_folder, "GT-final_test.csv"), "w") as csv_file: + csv_file.write("Filename;Width;Height;Roi.X1;Roi.Y1;Roi.X2;Roi.Y2;ClassId\n") + + for _ in range(total_number_of_examples): + image_file = datasets_utils.create_random_string(5, string.digits) + ".ppm" + datasets_utils.create_image_file(test_folder, image_file) + row = [ + image_file, + torch.randint(1, 100, size=()).item(), + torch.randint(1, 100, size=()).item(), + torch.randint(1, 100, size=()).item(), + torch.randint(1, 100, size=()).item(), + torch.randint(1, 100, size=()).item(), + torch.randint(1, 100, size=()).item(), + torch.randint(0, 43, size=()).item(), + ] + csv_file.write(";".join(map(str, row)) + "\n") + + return total_number_of_examples + + +class CLEVRClassificationTestCase(datasets_utils.ImageDatasetTestCase): + DATASET_CLASS = datasets.CLEVRClassification + FEATURE_TYPES = (PIL.Image.Image, (int, type(None))) + + ADDITIONAL_CONFIGS = combinations_grid(split=("train", "val", "test")) + + def inject_fake_data(self, tmpdir, config): + data_folder = pathlib.Path(tmpdir) / "clevr" / "CLEVR_v1.0" + + images_folder = data_folder / "images" + image_files = datasets_utils.create_image_folder( + images_folder, config["split"], lambda idx: f"CLEVR_{config['split']}_{idx:06d}.png", num_examples=5 + ) + + scenes_folder = data_folder / "scenes" + scenes_folder.mkdir() + if config["split"] != "test": + with open(scenes_folder / f"CLEVR_{config['split']}_scenes.json", "w") as file: + json.dump( + dict( + info=dict(), + scenes=[ + dict(image_filename=image_file.name, objects=[dict()] * int(torch.randint(10, ()))) + for image_file in image_files + ], + ), + file, + ) + + return len(image_files) + + +class OxfordIIITPetTestCase(datasets_utils.ImageDatasetTestCase): + DATASET_CLASS = datasets.OxfordIIITPet + FEATURE_TYPES = (PIL.Image.Image, (int, PIL.Image.Image, tuple, type(None))) + + ADDITIONAL_CONFIGS = combinations_grid( + split=("trainval", "test"), + target_types=("category", "binary-category", "segmentation", ["category", "segmentation"], []), + ) + + def inject_fake_data(self, tmpdir, config): + base_folder = os.path.join(tmpdir, "oxford-iiit-pet") + + classification_anns_meta = ( + dict(cls="Abyssinian", label=0, species="cat"), + dict(cls="Keeshond", label=18, species="dog"), + dict(cls="Yorkshire Terrier", label=37, species="dog"), + ) + split_and_classification_anns = [ + self._meta_to_split_and_classification_ann(meta, idx) + for meta, idx in itertools.product(classification_anns_meta, (1, 2, 10)) + ] + image_ids, *_ = zip(*split_and_classification_anns) + + image_files = datasets_utils.create_image_folder( + base_folder, "images", file_name_fn=lambda idx: f"{image_ids[idx]}.jpg", num_examples=len(image_ids) + ) + + anns_folder = os.path.join(base_folder, "annotations") + os.makedirs(anns_folder) + split_and_classification_anns_in_split = random.choices(split_and_classification_anns, k=len(image_ids) // 2) + with open(os.path.join(anns_folder, f"{config['split']}.txt"), "w", newline="") as file: + writer = csv.writer(file, delimiter=" ") + for split_and_classification_ann in split_and_classification_anns_in_split: + writer.writerow(split_and_classification_ann) + + segmentation_files = datasets_utils.create_image_folder( + anns_folder, "trimaps", file_name_fn=lambda idx: f"{image_ids[idx]}.png", num_examples=len(image_ids) + ) + + # The dataset has some rogue files + for path in image_files[:2]: + path.with_suffix(".mat").touch() + for path in segmentation_files: + path.with_name(f".{path.name}").touch() + + return len(split_and_classification_anns_in_split) + + def _meta_to_split_and_classification_ann(self, meta, idx): + image_id = "_".join( + [ + *[(str.title if meta["species"] == "cat" else str.lower)(part) for part in meta["cls"].split()], + str(idx), + ] + ) + class_id = str(meta["label"] + 1) + species = "1" if meta["species"] == "cat" else "2" + breed_id = "-1" + return (image_id, class_id, species, breed_id) + + def test_transforms_v2_wrapper_spawn(self): + expected_size = (123, 321) + with self.create_dataset(transform=v2.Resize(size=expected_size)) as (dataset, _): + datasets_utils.check_transforms_v2_wrapper_spawn(dataset, expected_size=expected_size) + + +class StanfordCarsTestCase(datasets_utils.ImageDatasetTestCase): + DATASET_CLASS = datasets.StanfordCars + REQUIRED_PACKAGES = ("scipy",) + ADDITIONAL_CONFIGS = combinations_grid(split=("train", "test")) + + def inject_fake_data(self, tmpdir, config): + import scipy.io as io + from numpy.core.records import fromarrays + + num_examples = {"train": 5, "test": 7}[config["split"]] + num_classes = 3 + base_folder = pathlib.Path(tmpdir) / "stanford_cars" + + devkit = base_folder / "devkit" + devkit.mkdir(parents=True) + + if config["split"] == "train": + images_folder_name = "cars_train" + annotations_mat_path = devkit / "cars_train_annos.mat" + else: + images_folder_name = "cars_test" + annotations_mat_path = base_folder / "cars_test_annos_withlabels.mat" + + datasets_utils.create_image_folder( + root=base_folder, + name=images_folder_name, + file_name_fn=lambda image_index: f"{image_index:5d}.jpg", + num_examples=num_examples, + ) + + classes = np.random.randint(1, num_classes + 1, num_examples, dtype=np.uint8) + fnames = [f"{i:5d}.jpg" for i in range(num_examples)] + rec_array = fromarrays( + [classes, fnames], + names=["class", "fname"], + ) + io.savemat(annotations_mat_path, {"annotations": rec_array}) + + random_class_names = ["random_name"] * num_classes + io.savemat(devkit / "cars_meta.mat", {"class_names": random_class_names}) + + return num_examples + + +class Country211TestCase(datasets_utils.ImageDatasetTestCase): + DATASET_CLASS = datasets.Country211 + + ADDITIONAL_CONFIGS = combinations_grid(split=("train", "valid", "test")) + + def inject_fake_data(self, tmpdir: str, config): + split_folder = pathlib.Path(tmpdir) / "country211" / config["split"] + split_folder.mkdir(parents=True, exist_ok=True) + + num_examples = { + "train": 3, + "valid": 4, + "test": 5, + }[config["split"]] + + classes = ("AD", "BS", "GR") + for cls in classes: + datasets_utils.create_image_folder( + split_folder, + name=cls, + file_name_fn=lambda idx: f"{idx}.jpg", + num_examples=num_examples, + ) + + return num_examples * len(classes) + + +class Flowers102TestCase(datasets_utils.ImageDatasetTestCase): + DATASET_CLASS = datasets.Flowers102 + + ADDITIONAL_CONFIGS = combinations_grid(split=("train", "val", "test")) + REQUIRED_PACKAGES = ("scipy",) + + def inject_fake_data(self, tmpdir: str, config): + base_folder = pathlib.Path(tmpdir) / "flowers-102" + + num_classes = 3 + num_images_per_split = dict(train=5, val=4, test=3) + num_images_total = sum(num_images_per_split.values()) + datasets_utils.create_image_folder( + base_folder, + "jpg", + file_name_fn=lambda idx: f"image_{idx + 1:05d}.jpg", + num_examples=num_images_total, + ) + + label_dict = dict( + labels=np.random.randint(1, num_classes + 1, size=(1, num_images_total), dtype=np.uint8), + ) + datasets_utils.lazy_importer.scipy.io.savemat(str(base_folder / "imagelabels.mat"), label_dict) + + setid_mat = np.arange(1, num_images_total + 1, dtype=np.uint16) + np.random.shuffle(setid_mat) + setid_dict = dict( + trnid=setid_mat[: num_images_per_split["train"]].reshape(1, -1), + valid=setid_mat[num_images_per_split["train"] : -num_images_per_split["test"]].reshape(1, -1), + tstid=setid_mat[-num_images_per_split["test"] :].reshape(1, -1), + ) + datasets_utils.lazy_importer.scipy.io.savemat(str(base_folder / "setid.mat"), setid_dict) + + return num_images_per_split[config["split"]] + + +class PCAMTestCase(datasets_utils.ImageDatasetTestCase): + DATASET_CLASS = datasets.PCAM + + ADDITIONAL_CONFIGS = combinations_grid(split=("train", "val", "test")) + REQUIRED_PACKAGES = ("h5py",) + + def inject_fake_data(self, tmpdir: str, config): + base_folder = pathlib.Path(tmpdir) / "pcam" + base_folder.mkdir() + + num_images = {"train": 2, "test": 3, "val": 4}[config["split"]] + + images_file = datasets.PCAM._FILES[config["split"]]["images"][0] + with datasets_utils.lazy_importer.h5py.File(str(base_folder / images_file), "w") as f: + f["x"] = np.random.randint(0, 256, size=(num_images, 10, 10, 3), dtype=np.uint8) + + targets_file = datasets.PCAM._FILES[config["split"]]["targets"][0] + with datasets_utils.lazy_importer.h5py.File(str(base_folder / targets_file), "w") as f: + f["y"] = np.random.randint(0, 2, size=(num_images, 1, 1, 1), dtype=np.uint8) + + return num_images + + +class RenderedSST2TestCase(datasets_utils.ImageDatasetTestCase): + DATASET_CLASS = datasets.RenderedSST2 + ADDITIONAL_CONFIGS = combinations_grid(split=("train", "val", "test")) + SPLIT_TO_FOLDER = {"train": "train", "val": "valid", "test": "test"} + + def inject_fake_data(self, tmpdir: str, config): + root_folder = pathlib.Path(tmpdir) / "rendered-sst2" + image_folder = root_folder / self.SPLIT_TO_FOLDER[config["split"]] + + num_images_per_class = {"train": 5, "test": 6, "val": 7} + sampled_classes = ["positive", "negative"] + for cls in sampled_classes: + datasets_utils.create_image_folder( + image_folder, + cls, + file_name_fn=lambda idx: f"{idx}.png", + num_examples=num_images_per_class[config["split"]], + ) + + return len(sampled_classes) * num_images_per_class[config["split"]] + + +class Kitti2012StereoTestCase(datasets_utils.ImageDatasetTestCase): + DATASET_CLASS = datasets.Kitti2012Stereo + ADDITIONAL_CONFIGS = combinations_grid(split=("train", "test")) + FEATURE_TYPES = (PIL.Image.Image, PIL.Image.Image, (np.ndarray, type(None)), (np.ndarray, type(None))) + + def inject_fake_data(self, tmpdir, config): + kitti_dir = pathlib.Path(tmpdir) / "Kitti2012" + os.makedirs(kitti_dir, exist_ok=True) + + split_dir = kitti_dir / (config["split"] + "ing") + os.makedirs(split_dir, exist_ok=True) + + num_examples = {"train": 4, "test": 3}.get(config["split"], 0) + + datasets_utils.create_image_folder( + root=split_dir, + name="colored_0", + file_name_fn=lambda i: f"{i:06d}_10.png", + num_examples=num_examples, + size=(3, 100, 200), + ) + datasets_utils.create_image_folder( + root=split_dir, + name="colored_1", + file_name_fn=lambda i: f"{i:06d}_10.png", + num_examples=num_examples, + size=(3, 100, 200), + ) + + if config["split"] == "train": + datasets_utils.create_image_folder( + root=split_dir, + name="disp_noc", + file_name_fn=lambda i: f"{i:06d}.png", + num_examples=num_examples, + # Kitti2012 uses a single channel image for disparities + size=(1, 100, 200), + ) + + return num_examples + + def test_train_splits(self): + for split in ["train"]: + with self.create_dataset(split=split) as (dataset, _): + for left, right, disparity, mask in dataset: + assert mask is None + datasets_utils.shape_test_for_stereo(left, right, disparity) + + def test_test_split(self): + for split in ["test"]: + with self.create_dataset(split=split) as (dataset, _): + for left, right, disparity, mask in dataset: + assert mask is None + assert disparity is None + datasets_utils.shape_test_for_stereo(left, right) + + def test_bad_input(self): + with pytest.raises(ValueError, match="Unknown value 'bad' for argument split"): + with self.create_dataset(split="bad"): + pass + + +class Kitti2015StereoTestCase(datasets_utils.ImageDatasetTestCase): + DATASET_CLASS = datasets.Kitti2015Stereo + ADDITIONAL_CONFIGS = combinations_grid(split=("train", "test")) + FEATURE_TYPES = (PIL.Image.Image, PIL.Image.Image, (np.ndarray, type(None)), (np.ndarray, type(None))) + + def inject_fake_data(self, tmpdir, config): + kitti_dir = pathlib.Path(tmpdir) / "Kitti2015" + os.makedirs(kitti_dir, exist_ok=True) + + split_dir = kitti_dir / (config["split"] + "ing") + os.makedirs(split_dir, exist_ok=True) + + num_examples = {"train": 4, "test": 6}.get(config["split"], 0) + + datasets_utils.create_image_folder( + root=split_dir, + name="image_2", + file_name_fn=lambda i: f"{i:06d}_10.png", + num_examples=num_examples, + size=(3, 100, 200), + ) + datasets_utils.create_image_folder( + root=split_dir, + name="image_3", + file_name_fn=lambda i: f"{i:06d}_10.png", + num_examples=num_examples, + size=(3, 100, 200), + ) + + if config["split"] == "train": + datasets_utils.create_image_folder( + root=split_dir, + name="disp_occ_0", + file_name_fn=lambda i: f"{i:06d}.png", + num_examples=num_examples, + # Kitti2015 uses a single channel image for disparities + size=(1, 100, 200), + ) + + datasets_utils.create_image_folder( + root=split_dir, + name="disp_occ_1", + file_name_fn=lambda i: f"{i:06d}.png", + num_examples=num_examples, + # Kitti2015 uses a single channel image for disparities + size=(1, 100, 200), + ) + + return num_examples + + def test_train_splits(self): + for split in ["train"]: + with self.create_dataset(split=split) as (dataset, _): + for left, right, disparity, mask in dataset: + assert mask is None + datasets_utils.shape_test_for_stereo(left, right, disparity) + + def test_test_split(self): + for split in ["test"]: + with self.create_dataset(split=split) as (dataset, _): + for left, right, disparity, mask in dataset: + assert mask is None + assert disparity is None + datasets_utils.shape_test_for_stereo(left, right) + + def test_bad_input(self): + with pytest.raises(ValueError, match="Unknown value 'bad' for argument split"): + with self.create_dataset(split="bad"): + pass + + +class CarlaStereoTestCase(datasets_utils.ImageDatasetTestCase): + DATASET_CLASS = datasets.CarlaStereo + FEATURE_TYPES = (PIL.Image.Image, PIL.Image.Image, (np.ndarray, None)) + + @staticmethod + def _create_scene_folders(num_examples: int, root_dir: Union[str, pathlib.Path]): + # make the root_dir if it does not exits + os.makedirs(root_dir, exist_ok=True) + + for i in range(num_examples): + scene_dir = pathlib.Path(root_dir) / f"scene_{i}" + os.makedirs(scene_dir, exist_ok=True) + # populate with left right images + datasets_utils.create_image_file(root=scene_dir, name="im0.png", size=(100, 100)) + datasets_utils.create_image_file(root=scene_dir, name="im1.png", size=(100, 100)) + datasets_utils.make_fake_pfm_file(100, 100, file_name=str(scene_dir / "disp0GT.pfm")) + datasets_utils.make_fake_pfm_file(100, 100, file_name=str(scene_dir / "disp1GT.pfm")) + + def inject_fake_data(self, tmpdir, config): + carla_dir = pathlib.Path(tmpdir) / "carla-highres" + os.makedirs(carla_dir, exist_ok=True) + + split_dir = pathlib.Path(carla_dir) / "trainingF" + os.makedirs(split_dir, exist_ok=True) + + num_examples = 6 + self._create_scene_folders(num_examples=num_examples, root_dir=split_dir) + + return num_examples + + def test_train_splits(self): + with self.create_dataset() as (dataset, _): + for left, right, disparity in dataset: + datasets_utils.shape_test_for_stereo(left, right, disparity) + + +class CREStereoTestCase(datasets_utils.ImageDatasetTestCase): + DATASET_CLASS = datasets.CREStereo + FEATURE_TYPES = (PIL.Image.Image, PIL.Image.Image, np.ndarray, type(None)) + + def inject_fake_data(self, tmpdir, config): + crestereo_dir = pathlib.Path(tmpdir) / "CREStereo" + os.makedirs(crestereo_dir, exist_ok=True) + + examples = {"tree": 2, "shapenet": 3, "reflective": 6, "hole": 5} + + for category_name in ["shapenet", "reflective", "tree", "hole"]: + split_dir = crestereo_dir / category_name + os.makedirs(split_dir, exist_ok=True) + num_examples = examples[category_name] + + for idx in range(num_examples): + datasets_utils.create_image_file(root=split_dir, name=f"{idx}_left.jpg", size=(100, 100)) + datasets_utils.create_image_file(root=split_dir, name=f"{idx}_right.jpg", size=(100, 100)) + # these are going to end up being gray scale images + datasets_utils.create_image_file(root=split_dir, name=f"{idx}_left.disp.png", size=(1, 100, 100)) + datasets_utils.create_image_file(root=split_dir, name=f"{idx}_right.disp.png", size=(1, 100, 100)) + + return sum(examples.values()) + + def test_splits(self): + with self.create_dataset() as (dataset, _): + for left, right, disparity, mask in dataset: + assert mask is None + datasets_utils.shape_test_for_stereo(left, right, disparity) + + +class FallingThingsStereoTestCase(datasets_utils.ImageDatasetTestCase): + DATASET_CLASS = datasets.FallingThingsStereo + ADDITIONAL_CONFIGS = combinations_grid(variant=("single", "mixed", "both")) + FEATURE_TYPES = (PIL.Image.Image, PIL.Image.Image, (np.ndarray, type(None))) + + @staticmethod + def _make_dummy_depth_map(root: str, name: str, size: Tuple[int, int]): + file = pathlib.Path(root) / name + image = np.ones((size[0], size[1]), dtype=np.uint8) + PIL.Image.fromarray(image).save(file) + + @staticmethod + def _make_scene_folder(root: str, scene_name: str, size: Tuple[int, int]) -> None: + root = pathlib.Path(root) / scene_name + os.makedirs(root, exist_ok=True) + # jpg images + datasets_utils.create_image_file(root, "image1.left.jpg", size=(3, size[1], size[0])) + datasets_utils.create_image_file(root, "image1.right.jpg", size=(3, size[1], size[0])) + # single channel depth maps + FallingThingsStereoTestCase._make_dummy_depth_map(root, "image1.left.depth.png", size=(size[0], size[1])) + FallingThingsStereoTestCase._make_dummy_depth_map(root, "image1.right.depth.png", size=(size[0], size[1])) + # camera settings json. Minimal example for _read_disparity function testing + settings_json = {"camera_settings": [{"intrinsic_settings": {"fx": 1}}]} + with open(root / "_camera_settings.json", "w") as f: + json.dump(settings_json, f) + + def inject_fake_data(self, tmpdir, config): + fallingthings_dir = pathlib.Path(tmpdir) / "FallingThings" + os.makedirs(fallingthings_dir, exist_ok=True) + + num_examples = {"single": 2, "mixed": 3, "both": 4}.get(config["variant"], 0) + + variants = { + "single": ["single"], + "mixed": ["mixed"], + "both": ["single", "mixed"], + }.get(config["variant"], []) + + variant_dir_prefixes = { + "single": 1, + "mixed": 0, + } + + for variant_name in variants: + variant_dir = pathlib.Path(fallingthings_dir) / variant_name + os.makedirs(variant_dir, exist_ok=True) + + for i in range(variant_dir_prefixes[variant_name]): + variant_dir = variant_dir / f"{i:02d}" + os.makedirs(variant_dir, exist_ok=True) + + for i in range(num_examples): + self._make_scene_folder( + root=variant_dir, + scene_name=f"scene_{i:06d}", + size=(100, 200), + ) + + if config["variant"] == "both": + num_examples *= 2 + return num_examples + + def test_splits(self): + for variant_name in ["single", "mixed"]: + with self.create_dataset(variant=variant_name) as (dataset, _): + for left, right, disparity in dataset: + datasets_utils.shape_test_for_stereo(left, right, disparity) + + def test_bad_input(self): + with pytest.raises(ValueError, match="Unknown value 'bad' for argument variant"): + with self.create_dataset(variant="bad"): + pass + + +class SceneFlowStereoTestCase(datasets_utils.ImageDatasetTestCase): + DATASET_CLASS = datasets.SceneFlowStereo + ADDITIONAL_CONFIGS = combinations_grid( + variant=("FlyingThings3D", "Driving", "Monkaa"), pass_name=("clean", "final", "both") + ) + FEATURE_TYPES = (PIL.Image.Image, PIL.Image.Image, (np.ndarray, type(None))) + + @staticmethod + def _create_pfm_folder( + root: str, name: str, file_name_fn: Callable[..., str], num_examples: int, size: Tuple[int, int] + ) -> None: + root = pathlib.Path(root) / name + os.makedirs(root, exist_ok=True) + + for i in range(num_examples): + datasets_utils.make_fake_pfm_file(size[0], size[1], root / file_name_fn(i)) + + def inject_fake_data(self, tmpdir, config): + scene_flow_dir = pathlib.Path(tmpdir) / "SceneFlow" + os.makedirs(scene_flow_dir, exist_ok=True) + + variant_dir = scene_flow_dir / config["variant"] + variant_dir_prefixes = { + "Monkaa": 0, + "Driving": 2, + "FlyingThings3D": 2, + } + os.makedirs(variant_dir, exist_ok=True) + + num_examples = {"FlyingThings3D": 4, "Driving": 6, "Monkaa": 5}.get(config["variant"], 0) + + passes = { + "clean": ["frames_cleanpass"], + "final": ["frames_finalpass"], + "both": ["frames_cleanpass", "frames_finalpass"], + }.get(config["pass_name"], []) + + for pass_dir_name in passes: + # create pass directories + pass_dir = variant_dir / pass_dir_name + disp_dir = variant_dir / "disparity" + os.makedirs(pass_dir, exist_ok=True) + os.makedirs(disp_dir, exist_ok=True) + + for i in range(variant_dir_prefixes.get(config["variant"], 0)): + pass_dir = pass_dir / str(i) + disp_dir = disp_dir / str(i) + os.makedirs(pass_dir, exist_ok=True) + os.makedirs(disp_dir, exist_ok=True) + + for direction in ["left", "right"]: + for scene_idx in range(num_examples): + os.makedirs(pass_dir / f"scene_{scene_idx:06d}", exist_ok=True) + datasets_utils.create_image_folder( + root=pass_dir / f"scene_{scene_idx:06d}", + name=direction, + file_name_fn=lambda i: f"{i:06d}.png", + num_examples=1, + size=(3, 200, 100), + ) + + os.makedirs(disp_dir / f"scene_{scene_idx:06d}", exist_ok=True) + self._create_pfm_folder( + root=disp_dir / f"scene_{scene_idx:06d}", + name=direction, + file_name_fn=lambda i: f"{i:06d}.pfm", + num_examples=1, + size=(100, 200), + ) + + if config["pass_name"] == "both": + num_examples *= 2 + return num_examples + + def test_splits(self): + for variant_name, pass_name in itertools.product(["FlyingThings3D", "Driving", "Monkaa"], ["clean", "final"]): + with self.create_dataset(variant=variant_name, pass_name=pass_name) as (dataset, _): + for left, right, disparity in dataset: + datasets_utils.shape_test_for_stereo(left, right, disparity) + + def test_bad_input(self): + with pytest.raises(ValueError, match="Unknown value 'bad' for argument variant"): + with self.create_dataset(variant="bad"): + pass + + +class InStereo2k(datasets_utils.ImageDatasetTestCase): + DATASET_CLASS = datasets.InStereo2k + FEATURE_TYPES = (PIL.Image.Image, PIL.Image.Image, (np.ndarray, type(None))) + ADDITIONAL_CONFIGS = combinations_grid(split=("train", "test")) + + @staticmethod + def _make_scene_folder(root: str, name: str, size: Tuple[int, int]): + root = pathlib.Path(root) / name + os.makedirs(root, exist_ok=True) + + datasets_utils.create_image_file(root=root, name="left.png", size=(3, size[0], size[1])) + datasets_utils.create_image_file(root=root, name="right.png", size=(3, size[0], size[1])) + datasets_utils.create_image_file(root=root, name="left_disp.png", size=(1, size[0], size[1])) + datasets_utils.create_image_file(root=root, name="right_disp.png", size=(1, size[0], size[1])) + + def inject_fake_data(self, tmpdir, config): + in_stereo_dir = pathlib.Path(tmpdir) / "InStereo2k" + os.makedirs(in_stereo_dir, exist_ok=True) + + split_dir = pathlib.Path(in_stereo_dir) / config["split"] + os.makedirs(split_dir, exist_ok=True) + + num_examples = {"train": 4, "test": 5}.get(config["split"], 0) + + for i in range(num_examples): + self._make_scene_folder(split_dir, f"scene_{i:06d}", (100, 200)) + + return num_examples + + def test_splits(self): + for split_name in ["train", "test"]: + with self.create_dataset(split=split_name) as (dataset, _): + for left, right, disparity in dataset: + datasets_utils.shape_test_for_stereo(left, right, disparity) + + def test_bad_input(self): + with pytest.raises( + ValueError, match="Unknown value 'bad' for argument split. Valid values are {'train', 'test'}." + ): + with self.create_dataset(split="bad"): + pass + + +class SintelStereoTestCase(datasets_utils.ImageDatasetTestCase): + DATASET_CLASS = datasets.SintelStereo + ADDITIONAL_CONFIGS = combinations_grid(pass_name=("final", "clean", "both")) + FEATURE_TYPES = (PIL.Image.Image, PIL.Image.Image, (np.ndarray, type(None)), (np.ndarray, type(None))) + + def inject_fake_data(self, tmpdir, config): + sintel_dir = pathlib.Path(tmpdir) / "Sintel" + os.makedirs(sintel_dir, exist_ok=True) + + split_dir = pathlib.Path(sintel_dir) / "training" + os.makedirs(split_dir, exist_ok=True) + + # a single setting, since there are no splits + num_examples = {"final": 2, "clean": 3} + pass_names = { + "final": ["final"], + "clean": ["clean"], + "both": ["final", "clean"], + }.get(config["pass_name"], []) + + for p in pass_names: + for view in [f"{p}_left", f"{p}_right"]: + root = split_dir / view + os.makedirs(root, exist_ok=True) + + datasets_utils.create_image_folder( + root=root, + name="scene1", + file_name_fn=lambda i: f"{i:06d}.png", + num_examples=num_examples[p], + size=(3, 100, 200), + ) + + datasets_utils.create_image_folder( + root=split_dir / "occlusions", + name="scene1", + file_name_fn=lambda i: f"{i:06d}.png", + num_examples=max(num_examples.values()), + size=(1, 100, 200), + ) + + datasets_utils.create_image_folder( + root=split_dir / "outofframe", + name="scene1", + file_name_fn=lambda i: f"{i:06d}.png", + num_examples=max(num_examples.values()), + size=(1, 100, 200), + ) + + datasets_utils.create_image_folder( + root=split_dir / "disparities", + name="scene1", + file_name_fn=lambda i: f"{i:06d}.png", + num_examples=max(num_examples.values()), + size=(3, 100, 200), + ) + + if config["pass_name"] == "both": + num_examples = sum(num_examples.values()) + else: + num_examples = num_examples.get(config["pass_name"], 0) + + return num_examples + + def test_splits(self): + for pass_name in ["final", "clean", "both"]: + with self.create_dataset(pass_name=pass_name) as (dataset, _): + for left, right, disparity, valid_mask in dataset: + datasets_utils.shape_test_for_stereo(left, right, disparity, valid_mask) + + def test_bad_input(self): + with pytest.raises(ValueError, match="Unknown value 'bad' for argument pass_name"): + with self.create_dataset(pass_name="bad"): + pass + + +class ETH3DStereoestCase(datasets_utils.ImageDatasetTestCase): + DATASET_CLASS = datasets.ETH3DStereo + ADDITIONAL_CONFIGS = combinations_grid(split=("train", "test")) + FEATURE_TYPES = (PIL.Image.Image, PIL.Image.Image, (np.ndarray, type(None)), (np.ndarray, type(None))) + + @staticmethod + def _create_scene_folder(num_examples: int, root_dir: str): + # make the root_dir if it does not exits + root_dir = pathlib.Path(root_dir) + os.makedirs(root_dir, exist_ok=True) + + for i in range(num_examples): + scene_dir = root_dir / f"scene_{i}" + os.makedirs(scene_dir, exist_ok=True) + # populate with left right images + datasets_utils.create_image_file(root=scene_dir, name="im0.png", size=(100, 100)) + datasets_utils.create_image_file(root=scene_dir, name="im1.png", size=(100, 100)) + + @staticmethod + def _create_annotation_folder(num_examples: int, root_dir: str): + # make the root_dir if it does not exits + root_dir = pathlib.Path(root_dir) + os.makedirs(root_dir, exist_ok=True) + + # create scene directories + for i in range(num_examples): + scene_dir = root_dir / f"scene_{i}" + os.makedirs(scene_dir, exist_ok=True) + # populate with a random png file for occlusion mask, and a pfm file for disparity + datasets_utils.create_image_file(root=scene_dir, name="mask0nocc.png", size=(1, 100, 100)) + + pfm_path = scene_dir / "disp0GT.pfm" + datasets_utils.make_fake_pfm_file(h=100, w=100, file_name=pfm_path) + + def inject_fake_data(self, tmpdir, config): + eth3d_dir = pathlib.Path(tmpdir) / "ETH3D" + + num_examples = 2 if config["split"] == "train" else 3 + + split_name = "two_view_training" if config["split"] == "train" else "two_view_test" + split_dir = eth3d_dir / split_name + self._create_scene_folder(num_examples, split_dir) + + if config["split"] == "train": + annot_dir = eth3d_dir / "two_view_training_gt" + self._create_annotation_folder(num_examples, annot_dir) + + return num_examples + + def test_training_splits(self): + with self.create_dataset(split="train") as (dataset, _): + for left, right, disparity, valid_mask in dataset: + datasets_utils.shape_test_for_stereo(left, right, disparity, valid_mask) + + def test_testing_splits(self): + with self.create_dataset(split="test") as (dataset, _): + assert all(d == (None, None) for d in dataset._disparities) + for left, right, disparity, valid_mask in dataset: + assert valid_mask is None + datasets_utils.shape_test_for_stereo(left, right, disparity) + + def test_bad_input(self): + with pytest.raises(ValueError, match="Unknown value 'bad' for argument split"): + with self.create_dataset(split="bad"): + pass + + +class Middlebury2014StereoTestCase(datasets_utils.ImageDatasetTestCase): + DATASET_CLASS = datasets.Middlebury2014Stereo + ADDITIONAL_CONFIGS = combinations_grid( + split=("train", "additional"), + calibration=("perfect", "imperfect", "both"), + use_ambient_views=(True, False), + ) + FEATURE_TYPES = (PIL.Image.Image, PIL.Image.Image, (np.ndarray, type(None)), (np.ndarray, type(None))) + + @staticmethod + def _make_scene_folder(root_dir: str, scene_name: str, split: str) -> None: + calibrations = [None] if split == "test" else ["-perfect", "-imperfect"] + root_dir = pathlib.Path(root_dir) + + for c in calibrations: + scene_dir = root_dir / f"{scene_name}{c}" + os.makedirs(scene_dir, exist_ok=True) + # make normal images first + datasets_utils.create_image_file(root=scene_dir, name="im0.png", size=(3, 100, 100)) + datasets_utils.create_image_file(root=scene_dir, name="im1.png", size=(3, 100, 100)) + datasets_utils.create_image_file(root=scene_dir, name="im1E.png", size=(3, 100, 100)) + datasets_utils.create_image_file(root=scene_dir, name="im1L.png", size=(3, 100, 100)) + # these are going to end up being gray scale images + datasets_utils.make_fake_pfm_file(h=100, w=100, file_name=scene_dir / "disp0.pfm") + datasets_utils.make_fake_pfm_file(h=100, w=100, file_name=scene_dir / "disp1.pfm") + + def inject_fake_data(self, tmpdir, config): + split_scene_map = { + "train": ["Adirondack", "Jadeplant", "Motorcycle", "Piano"], + "additional": ["Backpack", "Bicycle1", "Cable", "Classroom1"], + "test": ["Plants", "Classroom2E", "Classroom2", "Australia"], + } + + middlebury_dir = pathlib.Path(tmpdir, "Middlebury2014") + os.makedirs(middlebury_dir, exist_ok=True) + + split_dir = middlebury_dir / config["split"] + os.makedirs(split_dir, exist_ok=True) + + num_examples = {"train": 2, "additional": 3, "test": 4}.get(config["split"], 0) + for idx in range(num_examples): + scene_name = split_scene_map[config["split"]][idx] + self._make_scene_folder(root_dir=split_dir, scene_name=scene_name, split=config["split"]) + + if config["calibration"] == "both": + num_examples *= 2 + return num_examples + + def test_train_splits(self): + for split, calibration in itertools.product(["train", "additional"], ["perfect", "imperfect", "both"]): + with self.create_dataset(split=split, calibration=calibration) as (dataset, _): + for left, right, disparity, mask in dataset: + datasets_utils.shape_test_for_stereo(left, right, disparity, mask) + + def test_test_split(self): + for split in ["test"]: + with self.create_dataset(split=split, calibration=None) as (dataset, _): + for left, right, disparity, mask in dataset: + datasets_utils.shape_test_for_stereo(left, right) + + def test_augmented_view_usage(self): + with self.create_dataset(split="train", use_ambient_views=True) as (dataset, _): + for left, right, disparity, mask in dataset: + datasets_utils.shape_test_for_stereo(left, right, disparity, mask) + + def test_value_err_train(self): + # train set invalid + split = "train" + calibration = None + with pytest.raises( + ValueError, + match=f"Split '{split}' has calibration settings, however None was provided as an argument." + f"\nSetting calibration to 'perfect' for split '{split}'. Available calibration settings are: 'perfect', 'imperfect', 'both'.", + ): + with self.create_dataset(split=split, calibration=calibration): + pass + + def test_value_err_test(self): + # test set invalid + split = "test" + calibration = "perfect" + with pytest.raises( + ValueError, match="Split 'test' has only no calibration settings, please set `calibration=None`." + ): + with self.create_dataset(split=split, calibration=calibration): + pass + + def test_bad_input(self): + with pytest.raises(ValueError, match="Unknown value 'bad' for argument split"): + with self.create_dataset(split="bad"): + pass + + +class ImagenetteTestCase(datasets_utils.ImageDatasetTestCase): + DATASET_CLASS = datasets.Imagenette + ADDITIONAL_CONFIGS = combinations_grid(split=["train", "val"], size=["full", "320px", "160px"]) + + _WNIDS = [ + "n01440764", + "n02102040", + "n02979186", + "n03000684", + "n03028079", + "n03394916", + "n03417042", + "n03425413", + "n03445777", + "n03888257", + ] + + def inject_fake_data(self, tmpdir, config): + archive_root = "imagenette2" + if config["size"] != "full": + archive_root += f"-{config['size'].replace('px', '')}" + image_root = pathlib.Path(tmpdir) / archive_root / config["split"] + + num_images_per_class = 3 + for wnid in self._WNIDS: + datasets_utils.create_image_folder( + root=image_root, + name=wnid, + file_name_fn=lambda idx: f"{wnid}_{idx}.JPEG", + num_examples=num_images_per_class, + ) + + return num_images_per_class * len(self._WNIDS) + + +class TestDatasetWrapper: + def test_unknown_type(self): + unknown_object = object() + with pytest.raises( + TypeError, match=re.escape("is meant for subclasses of `torchvision.datasets.VisionDataset`") + ): + datasets.wrap_dataset_for_transforms_v2(unknown_object) + + def test_unknown_dataset(self): + class MyVisionDataset(datasets.VisionDataset): + pass + + dataset = MyVisionDataset("root") + + with pytest.raises(TypeError, match="No wrapper exist"): + datasets.wrap_dataset_for_transforms_v2(dataset) + + def test_missing_wrapper(self): + dataset = datasets.FakeData() + + with pytest.raises(TypeError, match="please open an issue"): + datasets.wrap_dataset_for_transforms_v2(dataset) + + def test_subclass(self, mocker): + from torchvision import tv_tensors + + sentinel = object() + mocker.patch.dict( + tv_tensors._dataset_wrapper.WRAPPER_FACTORIES, + clear=False, + values={datasets.FakeData: lambda dataset, target_keys: lambda idx, sample: sentinel}, + ) + + class MyFakeData(datasets.FakeData): + pass + + dataset = MyFakeData() + wrapped_dataset = datasets.wrap_dataset_for_transforms_v2(dataset) + + assert wrapped_dataset[0] is sentinel + + +if __name__ == "__main__": unittest.main() diff --git a/test/test_datasets_download.py b/test/test_datasets_download.py new file mode 100644 index 00000000000..856a02b9d44 --- /dev/null +++ b/test/test_datasets_download.py @@ -0,0 +1,388 @@ +import contextlib +import itertools +import shutil +import tempfile +import time +import traceback +import unittest.mock +import warnings +from datetime import datetime +from os import path +from urllib.error import HTTPError, URLError +from urllib.parse import urlparse +from urllib.request import Request, urlopen + +import pytest +from torchvision import datasets +from torchvision.datasets.utils import _get_redirect_url, USER_AGENT + + +def limit_requests_per_time(min_secs_between_requests=2.0): + last_requests = {} + + def outer_wrapper(fn): + def inner_wrapper(request, *args, **kwargs): + url = request.full_url if isinstance(request, Request) else request + + netloc = urlparse(url).netloc + last_request = last_requests.get(netloc) + if last_request is not None: + elapsed_secs = (datetime.now() - last_request).total_seconds() + delta = min_secs_between_requests - elapsed_secs + if delta > 0: + time.sleep(delta) + + response = fn(request, *args, **kwargs) + last_requests[netloc] = datetime.now() + + return response + + return inner_wrapper + + return outer_wrapper + + +urlopen = limit_requests_per_time()(urlopen) + + +def resolve_redirects(max_hops=3): + def outer_wrapper(fn): + def inner_wrapper(request, *args, **kwargs): + initial_url = request.full_url if isinstance(request, Request) else request + url = _get_redirect_url(initial_url, max_hops=max_hops) + + if url == initial_url: + return fn(request, *args, **kwargs) + + warnings.warn(f"The URL {initial_url} ultimately redirects to {url}.") + + if not isinstance(request, Request): + return fn(url, *args, **kwargs) + + request_attrs = { + attr: getattr(request, attr) for attr in ("data", "headers", "origin_req_host", "unverifiable") + } + # the 'method' attribute does only exist if the request was created with it + if hasattr(request, "method"): + request_attrs["method"] = request.method + + return fn(Request(url, **request_attrs), *args, **kwargs) + + return inner_wrapper + + return outer_wrapper + + +urlopen = resolve_redirects()(urlopen) + + +@contextlib.contextmanager +def log_download_attempts( + urls, + *, + dataset_module, +): + def maybe_add_mock(*, module, name, stack, lst=None): + patcher = unittest.mock.patch(f"torchvision.datasets.{module}.{name}") + + try: + mock = stack.enter_context(patcher) + except AttributeError: + return + + if lst is not None: + lst.append(mock) + + with contextlib.ExitStack() as stack: + download_url_mocks = [] + download_file_from_google_drive_mocks = [] + for module in [dataset_module, "utils"]: + maybe_add_mock(module=module, name="download_url", stack=stack, lst=download_url_mocks) + maybe_add_mock( + module=module, + name="download_file_from_google_drive", + stack=stack, + lst=download_file_from_google_drive_mocks, + ) + maybe_add_mock(module=module, name="extract_archive", stack=stack) + + try: + yield + finally: + for download_url_mock in download_url_mocks: + for args, kwargs in download_url_mock.call_args_list: + urls.append(args[0] if args else kwargs["url"]) + + for download_file_from_google_drive_mock in download_file_from_google_drive_mocks: + for args, kwargs in download_file_from_google_drive_mock.call_args_list: + file_id = args[0] if args else kwargs["file_id"] + urls.append(f"https://drive.google.com/file/d/{file_id}") + + +def retry(fn, times=1, wait=5.0): + tbs = [] + for _ in range(times + 1): + try: + return fn() + except AssertionError as error: + tbs.append("".join(traceback.format_exception(type(error), error, error.__traceback__))) + time.sleep(wait) + else: + raise AssertionError( + "\n".join( + ( + "\n", + *[f"{'_' * 40} {idx:2d} {'_' * 40}\n\n{tb}" for idx, tb in enumerate(tbs, 1)], + ( + f"Assertion failed {times + 1} times with {wait:.1f} seconds intermediate wait time. " + f"You can find the the full tracebacks above." + ), + ) + ) + ) + + +@contextlib.contextmanager +def assert_server_response_ok(): + try: + yield + except HTTPError as error: + raise AssertionError(f"The server returned {error.code}: {error.reason}.") from error + except URLError as error: + raise AssertionError( + "Connection not possible due to SSL." if "SSL" in str(error) else "The request timed out." + ) from error + except RecursionError as error: + raise AssertionError(str(error)) from error + + +def assert_url_is_accessible(url, timeout=5.0): + request = Request(url, headers={"User-Agent": USER_AGENT}, method="HEAD") + with assert_server_response_ok(): + urlopen(request, timeout=timeout) + + +def collect_urls(dataset_cls, *args, **kwargs): + urls = [] + with contextlib.suppress(Exception), log_download_attempts( + urls, dataset_module=dataset_cls.__module__.split(".")[-1] + ): + dataset_cls(*args, **kwargs) + + return [(url, f"{dataset_cls.__name__}, {url}") for url in urls] + + +# This is a workaround since fixtures, such as the built-in tmp_dir, can only be used within a test but not within a +# parametrization. Thus, we use a single root directory for all datasets and remove it when all download tests are run. +ROOT = tempfile.mkdtemp() + + +@pytest.fixture(scope="module", autouse=True) +def root(): + yield ROOT + shutil.rmtree(ROOT) + + +def places365(): + return itertools.chain.from_iterable( + [ + collect_urls( + datasets.Places365, + ROOT, + split=split, + small=small, + download=True, + ) + for split, small in itertools.product(("train-standard", "train-challenge", "val"), (False, True)) + ] + ) + + +def caltech101(): + return collect_urls(datasets.Caltech101, ROOT, download=True) + + +def caltech256(): + return collect_urls(datasets.Caltech256, ROOT, download=True) + + +def cifar10(): + return collect_urls(datasets.CIFAR10, ROOT, download=True) + + +def cifar100(): + return collect_urls(datasets.CIFAR100, ROOT, download=True) + + +def voc(): + # TODO: Also test the "2007-test" key + return itertools.chain.from_iterable( + [ + collect_urls(datasets.VOCSegmentation, ROOT, year=year, download=True) + for year in ("2007", "2008", "2009", "2010", "2011", "2012") + ] + ) + + +def mnist(): + with unittest.mock.patch.object(datasets.MNIST, "mirrors", datasets.MNIST.mirrors[-1:]): + return collect_urls(datasets.MNIST, ROOT, download=True) + + +def fashion_mnist(): + return collect_urls(datasets.FashionMNIST, ROOT, download=True) + + +def kmnist(): + return collect_urls(datasets.KMNIST, ROOT, download=True) + + +def emnist(): + # the 'split' argument can be any valid one, since everything is downloaded anyway + return collect_urls(datasets.EMNIST, ROOT, split="byclass", download=True) + + +def qmnist(): + return itertools.chain.from_iterable( + [collect_urls(datasets.QMNIST, ROOT, what=what, download=True) for what in ("train", "test", "nist")] + ) + + +def moving_mnist(): + return collect_urls(datasets.MovingMNIST, ROOT, download=True) + + +def omniglot(): + return itertools.chain.from_iterable( + [collect_urls(datasets.Omniglot, ROOT, background=background, download=True) for background in (True, False)] + ) + + +def phototour(): + return itertools.chain.from_iterable( + [ + collect_urls(datasets.PhotoTour, ROOT, name=name, download=True) + # The names postfixed with '_harris' point to the domain 'matthewalunbrown.com'. For some reason all + # requests timeout from within CI. They are disabled until this is resolved. + for name in ("notredame", "yosemite", "liberty") # "notredame_harris", "yosemite_harris", "liberty_harris" + ] + ) + + +def sbdataset(): + return collect_urls(datasets.SBDataset, ROOT, download=True) + + +def sbu(): + return collect_urls(datasets.SBU, ROOT, download=True) + + +def semeion(): + return collect_urls(datasets.SEMEION, ROOT, download=True) + + +def stl10(): + return collect_urls(datasets.STL10, ROOT, download=True) + + +def svhn(): + return itertools.chain.from_iterable( + [collect_urls(datasets.SVHN, ROOT, split=split, download=True) for split in ("train", "test", "extra")] + ) + + +def usps(): + return itertools.chain.from_iterable( + [collect_urls(datasets.USPS, ROOT, train=train, download=True) for train in (True, False)] + ) + + +def celeba(): + return collect_urls(datasets.CelebA, ROOT, download=True) + + +def widerface(): + return collect_urls(datasets.WIDERFace, ROOT, download=True) + + +def kinetics(): + return itertools.chain.from_iterable( + [ + collect_urls( + datasets.Kinetics, + path.join(ROOT, f"Kinetics{num_classes}"), + frames_per_clip=1, + num_classes=num_classes, + split=split, + download=True, + ) + for num_classes, split in itertools.product(("400", "600", "700"), ("train", "val")) + ] + ) + + +def kitti(): + return itertools.chain.from_iterable( + [collect_urls(datasets.Kitti, ROOT, train=train, download=True) for train in (True, False)] + ) + + +def url_parametrization(*dataset_urls_and_ids_fns): + return pytest.mark.parametrize( + "url", + [ + pytest.param(url, id=id) + for dataset_urls_and_ids_fn in dataset_urls_and_ids_fns + for url, id in sorted(set(dataset_urls_and_ids_fn())) + ], + ) + + +@url_parametrization( + caltech101, + caltech256, + cifar10, + cifar100, + # The VOC download server is unstable. See https://github.com/pytorch/vision/issues/2953 for details. + # voc, + mnist, + fashion_mnist, + kmnist, + emnist, + qmnist, + omniglot, + phototour, + sbdataset, + semeion, + stl10, + svhn, + usps, + celeba, + widerface, + kinetics, + kitti, + places365, + sbu, +) +def test_url_is_accessible(url): + """ + If you see this test failing, find the offending dataset in the parametrization and move it to + ``test_url_is_not_accessible`` and link an issue detailing the problem. + """ + retry(lambda: assert_url_is_accessible(url)) + + +# TODO: if e.g. caltech101 starts failing, remove the pytest.mark.parametrize below and use +# @url_parametrization(caltech101) +@pytest.mark.parametrize("url", ("http://url_that_doesnt_exist.com",)) # here until we actually have a failing dataset +@pytest.mark.xfail +def test_url_is_not_accessible(url): + """ + As the name implies, this test is the 'inverse' of ``test_url_is_accessible``. Since the download servers are + beyond our control, some files might not be accessible for longer stretches of time. Still, we want to know if they + come back up, or if we need to remove the download functionality of the dataset for good. + + If you see this test failing, find the offending dataset in the parametrization and move it to + ``test_url_is_accessible``. + """ + assert_url_is_accessible(url) diff --git a/test/test_datasets_samplers.py b/test/test_datasets_samplers.py index 90f3f3806aa..9e3826b2c13 100644 --- a/test/test_datasets_samplers.py +++ b/test/test_datasets_samplers.py @@ -1,117 +1,86 @@ -import contextlib -import sys -import os +import pytest import torch -import unittest - +from common_utils import assert_equal, get_list_of_videos from torchvision import io -from torchvision.datasets.samplers import ( - DistributedSampler, - RandomClipSampler, - UniformClipSampler, -) -from torchvision.datasets.video_utils import VideoClips, unfold -from torchvision import get_video_backend - -from common_utils import get_tmp_dir - - -@contextlib.contextmanager -def get_list_of_videos(num_videos=5, sizes=None, fps=None): - with get_tmp_dir() as tmp_dir: - names = [] - for i in range(num_videos): - if sizes is None: - size = 5 * (i + 1) - else: - size = sizes[i] - if fps is None: - f = 5 - else: - f = fps[i] - data = torch.randint(0, 255, (size, 300, 400, 3), dtype=torch.uint8) - name = os.path.join(tmp_dir, "{}.mp4".format(i)) - names.append(name) - io.write_video(name, data, fps=f) - - yield names +from torchvision.datasets.samplers import DistributedSampler, RandomClipSampler, UniformClipSampler +from torchvision.datasets.video_utils import VideoClips -@unittest.skipIf(not io.video._av_available(), "this test requires av") -class Tester(unittest.TestCase): - def test_random_clip_sampler(self): - with get_list_of_videos(num_videos=3, sizes=[25, 25, 25]) as video_list: - video_clips = VideoClips(video_list, 5, 5) - sampler = RandomClipSampler(video_clips, 3) - self.assertEqual(len(sampler), 3 * 3) - indices = torch.tensor(list(iter(sampler))) - videos = indices // 5 - v_idxs, count = torch.unique(videos, return_counts=True) - self.assertTrue(v_idxs.equal(torch.tensor([0, 1, 2]))) - self.assertTrue(count.equal(torch.tensor([3, 3, 3]))) +@pytest.mark.skipif(not io.video._av_available(), reason="this test requires av") +class TestDatasetsSamplers: + def test_random_clip_sampler(self, tmpdir): + video_list = get_list_of_videos(tmpdir, num_videos=3, sizes=[25, 25, 25]) + video_clips = VideoClips(video_list, 5, 5) + sampler = RandomClipSampler(video_clips, 3) + assert len(sampler) == 3 * 3 + indices = torch.tensor(list(iter(sampler))) + videos = torch.div(indices, 5, rounding_mode="floor") + v_idxs, count = torch.unique(videos, return_counts=True) + assert_equal(v_idxs, torch.tensor([0, 1, 2])) + assert_equal(count, torch.tensor([3, 3, 3])) - def test_random_clip_sampler_unequal(self): - with get_list_of_videos(num_videos=3, sizes=[10, 25, 25]) as video_list: - video_clips = VideoClips(video_list, 5, 5) - sampler = RandomClipSampler(video_clips, 3) - self.assertEqual(len(sampler), 2 + 3 + 3) - indices = list(iter(sampler)) - self.assertIn(0, indices) - self.assertIn(1, indices) - # remove elements of the first video, to simplify testing - indices.remove(0) - indices.remove(1) - indices = torch.tensor(indices) - 2 - videos = indices // 5 - v_idxs, count = torch.unique(videos, return_counts=True) - self.assertTrue(v_idxs.equal(torch.tensor([0, 1]))) - self.assertTrue(count.equal(torch.tensor([3, 3]))) + def test_random_clip_sampler_unequal(self, tmpdir): + video_list = get_list_of_videos(tmpdir, num_videos=3, sizes=[10, 25, 25]) + video_clips = VideoClips(video_list, 5, 5) + sampler = RandomClipSampler(video_clips, 3) + assert len(sampler) == 2 + 3 + 3 + indices = list(iter(sampler)) + assert 0 in indices + assert 1 in indices + # remove elements of the first video, to simplify testing + indices.remove(0) + indices.remove(1) + indices = torch.tensor(indices) - 2 + videos = torch.div(indices, 5, rounding_mode="floor") + v_idxs, count = torch.unique(videos, return_counts=True) + assert_equal(v_idxs, torch.tensor([0, 1])) + assert_equal(count, torch.tensor([3, 3])) - def test_uniform_clip_sampler(self): - with get_list_of_videos(num_videos=3, sizes=[25, 25, 25]) as video_list: - video_clips = VideoClips(video_list, 5, 5) - sampler = UniformClipSampler(video_clips, 3) - self.assertEqual(len(sampler), 3 * 3) - indices = torch.tensor(list(iter(sampler))) - videos = indices // 5 - v_idxs, count = torch.unique(videos, return_counts=True) - self.assertTrue(v_idxs.equal(torch.tensor([0, 1, 2]))) - self.assertTrue(count.equal(torch.tensor([3, 3, 3]))) - self.assertTrue(indices.equal(torch.tensor([0, 2, 4, 5, 7, 9, 10, 12, 14]))) + def test_uniform_clip_sampler(self, tmpdir): + video_list = get_list_of_videos(tmpdir, num_videos=3, sizes=[25, 25, 25]) + video_clips = VideoClips(video_list, 5, 5) + sampler = UniformClipSampler(video_clips, 3) + assert len(sampler) == 3 * 3 + indices = torch.tensor(list(iter(sampler))) + videos = torch.div(indices, 5, rounding_mode="floor") + v_idxs, count = torch.unique(videos, return_counts=True) + assert_equal(v_idxs, torch.tensor([0, 1, 2])) + assert_equal(count, torch.tensor([3, 3, 3])) + assert_equal(indices, torch.tensor([0, 2, 4, 5, 7, 9, 10, 12, 14])) - def test_uniform_clip_sampler_insufficient_clips(self): - with get_list_of_videos(num_videos=3, sizes=[10, 25, 25]) as video_list: - video_clips = VideoClips(video_list, 5, 5) - sampler = UniformClipSampler(video_clips, 3) - self.assertEqual(len(sampler), 3 * 3) - indices = torch.tensor(list(iter(sampler))) - self.assertTrue(indices.equal(torch.tensor([0, 0, 1, 2, 4, 6, 7, 9, 11]))) + def test_uniform_clip_sampler_insufficient_clips(self, tmpdir): + video_list = get_list_of_videos(tmpdir, num_videos=3, sizes=[10, 25, 25]) + video_clips = VideoClips(video_list, 5, 5) + sampler = UniformClipSampler(video_clips, 3) + assert len(sampler) == 3 * 3 + indices = torch.tensor(list(iter(sampler))) + assert_equal(indices, torch.tensor([0, 0, 1, 2, 4, 6, 7, 9, 11])) - def test_distributed_sampler_and_uniform_clip_sampler(self): - with get_list_of_videos(num_videos=3, sizes=[25, 25, 25]) as video_list: - video_clips = VideoClips(video_list, 5, 5) - clip_sampler = UniformClipSampler(video_clips, 3) + def test_distributed_sampler_and_uniform_clip_sampler(self, tmpdir): + video_list = get_list_of_videos(tmpdir, num_videos=3, sizes=[25, 25, 25]) + video_clips = VideoClips(video_list, 5, 5) + clip_sampler = UniformClipSampler(video_clips, 3) - distributed_sampler_rank0 = DistributedSampler( - clip_sampler, - num_replicas=2, - rank=0, - group_size=3, - ) - indices = torch.tensor(list(iter(distributed_sampler_rank0))) - self.assertEqual(len(distributed_sampler_rank0), 6) - self.assertTrue(indices.equal(torch.tensor([0, 2, 4, 10, 12, 14]))) + distributed_sampler_rank0 = DistributedSampler( + clip_sampler, + num_replicas=2, + rank=0, + group_size=3, + ) + indices = torch.tensor(list(iter(distributed_sampler_rank0))) + assert len(distributed_sampler_rank0) == 6 + assert_equal(indices, torch.tensor([0, 2, 4, 10, 12, 14])) - distributed_sampler_rank1 = DistributedSampler( - clip_sampler, - num_replicas=2, - rank=1, - group_size=3, - ) - indices = torch.tensor(list(iter(distributed_sampler_rank1))) - self.assertEqual(len(distributed_sampler_rank1), 6) - self.assertTrue(indices.equal(torch.tensor([5, 7, 9, 0, 2, 4]))) + distributed_sampler_rank1 = DistributedSampler( + clip_sampler, + num_replicas=2, + rank=1, + group_size=3, + ) + indices = torch.tensor(list(iter(distributed_sampler_rank1))) + assert len(distributed_sampler_rank1) == 6 + assert_equal(indices, torch.tensor([5, 7, 9, 0, 2, 4])) -if __name__ == '__main__': - unittest.main() +if __name__ == "__main__": + pytest.main([__file__]) diff --git a/test/test_datasets_transforms.py b/test/test_datasets_transforms.py deleted file mode 100644 index 6cffd4f76a9..00000000000 --- a/test/test_datasets_transforms.py +++ /dev/null @@ -1,72 +0,0 @@ -import os -import shutil -import contextlib -import tempfile -import unittest -from torchvision.datasets import ImageFolder - -FAKEDATA_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), - 'assets', 'fakedata') - - -@contextlib.contextmanager -def tmp_dir(src=None, **kwargs): - tmp_dir = tempfile.mkdtemp(**kwargs) - if src is not None: - os.rmdir(tmp_dir) - shutil.copytree(src, tmp_dir) - try: - yield tmp_dir - finally: - shutil.rmtree(tmp_dir) - - -def mock_transform(return_value, arg_list): - def mock(arg): - arg_list.append(arg) - return return_value - return mock - - -class Tester(unittest.TestCase): - def test_transform(self): - with tmp_dir(src=os.path.join(FAKEDATA_DIR, 'imagefolder')) as root: - class_a_image_files = [os.path.join(root, 'a', file) - for file in ('a1.png', 'a2.png', 'a3.png')] - class_b_image_files = [os.path.join(root, 'b', file) - for file in ('b1.png', 'b2.png', 'b3.png', 'b4.png')] - return_value = os.path.join(root, 'a', 'a1.png') - args = [] - transform = mock_transform(return_value, args) - dataset = ImageFolder(root, loader=lambda x: x, transform=transform) - - outputs = [dataset[i][0] for i in range(len(dataset))] - self.assertEqual([return_value] * len(outputs), outputs) - - imgs = sorted(class_a_image_files + class_b_image_files) - self.assertEqual(imgs, sorted(args)) - - def test_target_transform(self): - with tmp_dir(src=os.path.join(FAKEDATA_DIR, 'imagefolder')) as root: - class_a_image_files = [os.path.join(root, 'a', file) - for file in ('a1.png', 'a2.png', 'a3.png')] - class_b_image_files = [os.path.join(root, 'b', file) - for file in ('b1.png', 'b2.png', 'b3.png', 'b4.png')] - return_value = os.path.join(root, 'a', 'a1.png') - args = [] - target_transform = mock_transform(return_value, args) - dataset = ImageFolder(root, loader=lambda x: x, - target_transform=target_transform) - - outputs = [dataset[i][1] for i in range(len(dataset))] - self.assertEqual([return_value] * len(outputs), outputs) - - class_a_idx = dataset.class_to_idx['a'] - class_b_idx = dataset.class_to_idx['b'] - targets = sorted([class_a_idx] * len(class_a_image_files) + - [class_b_idx] * len(class_b_image_files)) - self.assertEqual(targets, sorted(args)) - - -if __name__ == '__main__': - unittest.main() diff --git a/test/test_datasets_utils.py b/test/test_datasets_utils.py index 14a53b75c54..500163dc7d1 100644 --- a/test/test_datasets_utils.py +++ b/test/test_datasets_utils.py @@ -1,140 +1,287 @@ +import contextlib +import gzip import os -import sys -import tempfile -import torchvision.datasets.utils as utils -import unittest -import zipfile +import pathlib +import re import tarfile -import gzip -import warnings -from torch._six import PY2 -from torch._utils_internal import get_file_path_2 +import zipfile -from common_utils import get_tmp_dir +import pytest +import torch +import torchvision.datasets.utils as utils +from common_utils import assert_equal +from torch._utils_internal import get_file_path_2 # @manual=fbcode//caffe2:utils_internal +from torchvision.datasets.folder import make_dataset +from torchvision.datasets.utils import _COMPRESSED_FILE_OPENERS -if sys.version_info < (3,): - from urllib2 import URLError -else: - from urllib.error import URLError +TEST_FILE = get_file_path_2( + os.path.dirname(os.path.abspath(__file__)), "assets", "encode_jpeg", "grace_hopper_517x606.jpg" +) -TEST_FILE = get_file_path_2( - os.path.dirname(os.path.abspath(__file__)), 'assets', 'grace_hopper_517x606.jpg') +def patch_url_redirection(mocker, redirect_url): + class Response: + def __init__(self, url): + self.url = url + + @contextlib.contextmanager + def patched_opener(*args, **kwargs): + yield Response(redirect_url) + + return mocker.patch("torchvision.datasets.utils.urllib.request.urlopen", side_effect=patched_opener) + + +class TestDatasetsUtils: + def test_get_redirect_url(self, mocker): + url = "https://url.org" + expected_redirect_url = "https://redirect.url.org" + + mock = patch_url_redirection(mocker, expected_redirect_url) + + actual = utils._get_redirect_url(url) + assert actual == expected_redirect_url + + assert mock.call_count == 2 + call_args_1, call_args_2 = mock.call_args_list + assert call_args_1[0][0].full_url == url + assert call_args_2[0][0].full_url == expected_redirect_url + + def test_get_redirect_url_max_hops_exceeded(self, mocker): + url = "https://url.org" + redirect_url = "https://redirect.url.org" + mock = patch_url_redirection(mocker, redirect_url) -class Tester(unittest.TestCase): + with pytest.raises(RecursionError): + utils._get_redirect_url(url, max_hops=0) - def test_check_md5(self): + assert mock.call_count == 1 + assert mock.call_args[0][0].full_url == url + + @pytest.mark.parametrize("use_pathlib", (True, False)) + def test_check_md5(self, use_pathlib): fpath = TEST_FILE - correct_md5 = '9c0bb82894bb3af7f7675ef2b3b6dcdc' - false_md5 = '' - self.assertTrue(utils.check_md5(fpath, correct_md5)) - self.assertFalse(utils.check_md5(fpath, false_md5)) + if use_pathlib: + fpath = pathlib.Path(fpath) + correct_md5 = "9c0bb82894bb3af7f7675ef2b3b6dcdc" + false_md5 = "" + assert utils.check_md5(fpath, correct_md5) + assert not utils.check_md5(fpath, false_md5) def test_check_integrity(self): existing_fpath = TEST_FILE - nonexisting_fpath = '' - correct_md5 = '9c0bb82894bb3af7f7675ef2b3b6dcdc' - false_md5 = '' - self.assertTrue(utils.check_integrity(existing_fpath, correct_md5)) - self.assertFalse(utils.check_integrity(existing_fpath, false_md5)) - self.assertTrue(utils.check_integrity(existing_fpath)) - self.assertFalse(utils.check_integrity(nonexisting_fpath)) - - @unittest.skipIf(PY2, "https://github.com/pytorch/vision/issues/1268") - def test_download_url(self): - with get_tmp_dir() as temp_dir: - url = "http://github.com/pytorch/vision/archive/master.zip" - try: - utils.download_url(url, temp_dir) - self.assertFalse(len(os.listdir(temp_dir)) == 0) - except URLError: - msg = "could not download test file '{}'".format(url) - warnings.warn(msg, RuntimeWarning) - raise unittest.SkipTest(msg) - - @unittest.skipIf(PY2, "https://github.com/pytorch/vision/issues/1268") - def test_download_url_retry_http(self): - with get_tmp_dir() as temp_dir: - url = "https://github.com/pytorch/vision/archive/master.zip" - try: - utils.download_url(url, temp_dir) - self.assertFalse(len(os.listdir(temp_dir)) == 0) - except URLError: - msg = "could not download test file '{}'".format(url) - warnings.warn(msg, RuntimeWarning) - raise unittest.SkipTest(msg) - - @unittest.skipIf(sys.version_info < (3,), "Python2 doesn't raise error") - def test_download_url_dont_exist(self): - with get_tmp_dir() as temp_dir: - url = "http://github.com/pytorch/vision/archive/this_doesnt_exist.zip" - with self.assertRaises(URLError): - utils.download_url(url, temp_dir) - - @unittest.skipIf('win' in sys.platform, 'temporarily disabled on Windows') - def test_extract_zip(self): - with get_tmp_dir() as temp_dir: - with tempfile.NamedTemporaryFile(suffix='.zip') as f: - with zipfile.ZipFile(f, 'w') as zf: - zf.writestr('file.tst', 'this is the content') - utils.extract_archive(f.name, temp_dir) - self.assertTrue(os.path.exists(os.path.join(temp_dir, 'file.tst'))) - with open(os.path.join(temp_dir, 'file.tst'), 'r') as nf: - data = nf.read() - self.assertEqual(data, 'this is the content') - - @unittest.skipIf('win' in sys.platform, 'temporarily disabled on Windows') - def test_extract_tar(self): - for ext, mode in zip(['.tar', '.tar.gz'], ['w', 'w:gz']): - with get_tmp_dir() as temp_dir: - with tempfile.NamedTemporaryFile() as bf: - bf.write("this is the content".encode()) - bf.seek(0) - with tempfile.NamedTemporaryFile(suffix=ext) as f: - with tarfile.open(f.name, mode=mode) as zf: - zf.add(bf.name, arcname='file.tst') - utils.extract_archive(f.name, temp_dir) - self.assertTrue(os.path.exists(os.path.join(temp_dir, 'file.tst'))) - with open(os.path.join(temp_dir, 'file.tst'), 'r') as nf: - data = nf.read() - self.assertEqual(data, 'this is the content') - - @unittest.skipIf('win' in sys.platform, 'temporarily disabled on Windows') - @unittest.skipIf(sys.version_info < (3,), "Extracting .tar.xz files is not supported under Python 2.x") - def test_extract_tar_xz(self): - for ext, mode in zip(['.tar.xz'], ['w:xz']): - with get_tmp_dir() as temp_dir: - with tempfile.NamedTemporaryFile() as bf: - bf.write("this is the content".encode()) - bf.seek(0) - with tempfile.NamedTemporaryFile(suffix=ext) as f: - with tarfile.open(f.name, mode=mode) as zf: - zf.add(bf.name, arcname='file.tst') - utils.extract_archive(f.name, temp_dir) - self.assertTrue(os.path.exists(os.path.join(temp_dir, 'file.tst'))) - with open(os.path.join(temp_dir, 'file.tst'), 'r') as nf: - data = nf.read() - self.assertEqual(data, 'this is the content') - - @unittest.skipIf('win' in sys.platform, 'temporarily disabled on Windows') - def test_extract_gzip(self): - with get_tmp_dir() as temp_dir: - with tempfile.NamedTemporaryFile(suffix='.gz') as f: - with gzip.GzipFile(f.name, 'wb') as zf: - zf.write('this is the content'.encode()) - utils.extract_archive(f.name, temp_dir) - f_name = os.path.join(temp_dir, os.path.splitext(os.path.basename(f.name))[0]) - self.assertTrue(os.path.exists(f_name)) - with open(os.path.join(f_name), 'r') as nf: - data = nf.read() - self.assertEqual(data, 'this is the content') + nonexisting_fpath = "" + correct_md5 = "9c0bb82894bb3af7f7675ef2b3b6dcdc" + false_md5 = "" + assert utils.check_integrity(existing_fpath, correct_md5) + assert not utils.check_integrity(existing_fpath, false_md5) + assert utils.check_integrity(existing_fpath) + assert not utils.check_integrity(nonexisting_fpath) + + def test_get_google_drive_file_id(self): + url = "https://drive.google.com/file/d/1GO-BHUYRuvzr1Gtp2_fqXRsr9TIeYbhV/view" + expected = "1GO-BHUYRuvzr1Gtp2_fqXRsr9TIeYbhV" + + actual = utils._get_google_drive_file_id(url) + assert actual == expected + + def test_get_google_drive_file_id_invalid_url(self): + url = "http://www.vision.caltech.edu/visipedia-data/CUB-200-2011/CUB_200_2011.tgz" + + assert utils._get_google_drive_file_id(url) is None + + @pytest.mark.parametrize( + "file, expected", + [ + ("foo.tar.bz2", (".tar.bz2", ".tar", ".bz2")), + ("foo.tar.xz", (".tar.xz", ".tar", ".xz")), + ("foo.tar", (".tar", ".tar", None)), + ("foo.tar.gz", (".tar.gz", ".tar", ".gz")), + ("foo.tbz", (".tbz", ".tar", ".bz2")), + ("foo.tbz2", (".tbz2", ".tar", ".bz2")), + ("foo.tgz", (".tgz", ".tar", ".gz")), + ("foo.bz2", (".bz2", None, ".bz2")), + ("foo.gz", (".gz", None, ".gz")), + ("foo.zip", (".zip", ".zip", None)), + ("foo.xz", (".xz", None, ".xz")), + ("foo.bar.tar.gz", (".tar.gz", ".tar", ".gz")), + ("foo.bar.gz", (".gz", None, ".gz")), + ("foo.bar.zip", (".zip", ".zip", None)), + ], + ) + def test_detect_file_type(self, file, expected): + assert utils._detect_file_type(file) == expected + + @pytest.mark.parametrize("file", ["foo", "foo.tar.baz", "foo.bar"]) + def test_detect_file_type_incompatible(self, file): + # tests detect file type for no extension, unknown compression and unknown partial extension + with pytest.raises(RuntimeError): + utils._detect_file_type(file) + + @pytest.mark.parametrize("extension", [".bz2", ".gz", ".xz"]) + @pytest.mark.parametrize("use_pathlib", (True, False)) + def test_decompress(self, extension, tmpdir, use_pathlib): + def create_compressed(root, content="this is the content"): + file = os.path.join(root, "file") + compressed = f"{file}{extension}" + compressed_file_opener = _COMPRESSED_FILE_OPENERS[extension] + + with compressed_file_opener(compressed, "wb") as fh: + fh.write(content.encode()) + + return compressed, file, content + + compressed, file, content = create_compressed(tmpdir) + if use_pathlib: + compressed = pathlib.Path(compressed) + + utils._decompress(compressed) + + assert os.path.exists(file) + + with open(file) as fh: + assert fh.read() == content + + def test_decompress_no_compression(self): + with pytest.raises(RuntimeError): + utils._decompress("foo.tar") + + @pytest.mark.parametrize("use_pathlib", (True, False)) + def test_decompress_remove_finished(self, tmpdir, use_pathlib): + def create_compressed(root, content="this is the content"): + file = os.path.join(root, "file") + compressed = f"{file}.gz" + + with gzip.open(compressed, "wb") as fh: + fh.write(content.encode()) + + return compressed, file, content + + compressed, file, content = create_compressed(tmpdir) + print(f"{type(compressed)=}") + if use_pathlib: + compressed = pathlib.Path(compressed) + tmpdir = pathlib.Path(tmpdir) + + extracted_dir = utils.extract_archive(compressed, tmpdir, remove_finished=True) + + assert not os.path.exists(compressed) + if use_pathlib: + assert isinstance(extracted_dir, pathlib.Path) + assert isinstance(compressed, pathlib.Path) + else: + assert isinstance(extracted_dir, str) + assert isinstance(compressed, str) + + @pytest.mark.parametrize("extension", [".gz", ".xz"]) + @pytest.mark.parametrize("remove_finished", [True, False]) + def test_extract_archive_defer_to_decompress(self, extension, remove_finished, mocker): + filename = "foo" + file = f"{filename}{extension}" + + mocked = mocker.patch("torchvision.datasets.utils._decompress") + utils.extract_archive(file, remove_finished=remove_finished) + + mocked.assert_called_once_with(file, filename, remove_finished=remove_finished) + + @pytest.mark.parametrize("use_pathlib", (True, False)) + def test_extract_zip(self, tmpdir, use_pathlib): + def create_archive(root, content="this is the content"): + file = os.path.join(root, "dst.txt") + archive = os.path.join(root, "archive.zip") + + with zipfile.ZipFile(archive, "w") as zf: + zf.writestr(os.path.basename(file), content) + + return archive, file, content + + if use_pathlib: + tmpdir = pathlib.Path(tmpdir) + archive, file, content = create_archive(tmpdir) + + utils.extract_archive(archive, tmpdir) + + assert os.path.exists(file) + + with open(file) as fh: + assert fh.read() == content + + @pytest.mark.parametrize( + "extension, mode", [(".tar", "w"), (".tar.gz", "w:gz"), (".tgz", "w:gz"), (".tar.xz", "w:xz")] + ) + @pytest.mark.parametrize("use_pathlib", (True, False)) + def test_extract_tar(self, extension, mode, tmpdir, use_pathlib): + def create_archive(root, extension, mode, content="this is the content"): + src = os.path.join(root, "src.txt") + dst = os.path.join(root, "dst.txt") + archive = os.path.join(root, f"archive{extension}") + + with open(src, "w") as fh: + fh.write(content) + + with tarfile.open(archive, mode=mode) as fh: + fh.add(src, arcname=os.path.basename(dst)) + + return archive, dst, content + + if use_pathlib: + tmpdir = pathlib.Path(tmpdir) + archive, file, content = create_archive(tmpdir, extension, mode) + + utils.extract_archive(archive, tmpdir) + + assert os.path.exists(file) + + with open(file) as fh: + assert fh.read() == content def test_verify_str_arg(self): - self.assertEqual("a", utils.verify_str_arg("a", "arg", ("a",))) - self.assertRaises(ValueError, utils.verify_str_arg, 0, ("a",), "arg") - self.assertRaises(ValueError, utils.verify_str_arg, "b", ("a",), "arg") + assert "a" == utils.verify_str_arg("a", "arg", ("a",)) + pytest.raises(ValueError, utils.verify_str_arg, 0, ("a",), "arg") + pytest.raises(ValueError, utils.verify_str_arg, "b", ("a",), "arg") + + @pytest.mark.parametrize( + ("dtype", "actual_hex", "expected_hex"), + [ + (torch.uint8, "01 23 45 67 89 AB CD EF", "01 23 45 67 89 AB CD EF"), + (torch.float16, "01 23 45 67 89 AB CD EF", "23 01 67 45 AB 89 EF CD"), + (torch.int32, "01 23 45 67 89 AB CD EF", "67 45 23 01 EF CD AB 89"), + (torch.float64, "01 23 45 67 89 AB CD EF", "EF CD AB 89 67 45 23 01"), + ], + ) + def test_flip_byte_order(self, dtype, actual_hex, expected_hex): + def to_tensor(hex): + return torch.frombuffer(bytes.fromhex(hex), dtype=dtype) + + assert_equal( + utils._flip_byte_order(to_tensor(actual_hex)), + to_tensor(expected_hex), + ) + + +@pytest.mark.parametrize( + ("kwargs", "expected_error_msg"), + [ + (dict(is_valid_file=lambda path: pathlib.Path(path).suffix in {".png", ".jpeg"}), "classes c"), + (dict(extensions=".png"), re.escape("classes b, c. Supported extensions are: .png")), + (dict(extensions=(".png", ".jpeg")), re.escape("classes c. Supported extensions are: .png, .jpeg")), + ], +) +def test_make_dataset_no_valid_files(tmpdir, kwargs, expected_error_msg): + tmpdir = pathlib.Path(tmpdir) + + (tmpdir / "a").mkdir() + (tmpdir / "a" / "a.png").touch() + + (tmpdir / "b").mkdir() + (tmpdir / "b" / "b.jpeg").touch() + + (tmpdir / "c").mkdir() + (tmpdir / "c" / "c.unknown").touch() + + with pytest.raises(FileNotFoundError, match=expected_error_msg): + make_dataset(str(tmpdir), **kwargs) -if __name__ == '__main__': - unittest.main() +if __name__ == "__main__": + pytest.main([__file__]) diff --git a/test/test_datasets_video_utils.py b/test/test_datasets_video_utils.py index 2488edc613d..51330911e50 100644 --- a/test/test_datasets_video_utils.py +++ b/test/test_datasets_video_utils.py @@ -1,101 +1,71 @@ -import contextlib -import sys -import os +import pytest import torch -import unittest - +from common_utils import assert_equal, get_list_of_videos from torchvision import io -from torchvision.datasets.video_utils import VideoClips, unfold - -from common_utils import get_tmp_dir - - -@contextlib.contextmanager -def get_list_of_videos(num_videos=5, sizes=None, fps=None): - with get_tmp_dir() as tmp_dir: - names = [] - for i in range(num_videos): - if sizes is None: - size = 5 * (i + 1) - else: - size = sizes[i] - if fps is None: - f = 5 - else: - f = fps[i] - data = torch.randint(0, 255, (size, 300, 400, 3), dtype=torch.uint8) - name = os.path.join(tmp_dir, "{}.mp4".format(i)) - names.append(name) - io.write_video(name, data, fps=f) +from torchvision.datasets.video_utils import unfold, VideoClips - yield names - - -class Tester(unittest.TestCase): +class TestVideo: def test_unfold(self): a = torch.arange(7) r = unfold(a, 3, 3, 1) - expected = torch.tensor([ - [0, 1, 2], - [3, 4, 5], - ]) - self.assertTrue(r.equal(expected)) + expected = torch.tensor( + [ + [0, 1, 2], + [3, 4, 5], + ] + ) + assert_equal(r, expected) r = unfold(a, 3, 2, 1) - expected = torch.tensor([ - [0, 1, 2], - [2, 3, 4], - [4, 5, 6] - ]) - self.assertTrue(r.equal(expected)) + expected = torch.tensor([[0, 1, 2], [2, 3, 4], [4, 5, 6]]) + assert_equal(r, expected) r = unfold(a, 3, 2, 2) - expected = torch.tensor([ - [0, 2, 4], - [2, 4, 6], - ]) - self.assertTrue(r.equal(expected)) - - @unittest.skipIf(not io.video._av_available(), "this test requires av") - @unittest.skipIf(sys.platform == 'win32', 'temporarily disabled on Windows') - def test_video_clips(self): - with get_list_of_videos(num_videos=3) as video_list: - video_clips = VideoClips(video_list, 5, 5) - self.assertEqual(video_clips.num_clips(), 1 + 2 + 3) - for i, (v_idx, c_idx) in enumerate([(0, 0), (1, 0), (1, 1), (2, 0), (2, 1), (2, 2)]): - video_idx, clip_idx = video_clips.get_clip_location(i) - self.assertEqual(video_idx, v_idx) - self.assertEqual(clip_idx, c_idx) - - video_clips = VideoClips(video_list, 6, 6) - self.assertEqual(video_clips.num_clips(), 0 + 1 + 2) - for i, (v_idx, c_idx) in enumerate([(1, 0), (2, 0), (2, 1)]): - video_idx, clip_idx = video_clips.get_clip_location(i) - self.assertEqual(video_idx, v_idx) - self.assertEqual(clip_idx, c_idx) - - video_clips = VideoClips(video_list, 6, 1) - self.assertEqual(video_clips.num_clips(), 0 + (10 - 6 + 1) + (15 - 6 + 1)) - for i, v_idx, c_idx in [(0, 1, 0), (4, 1, 4), (5, 2, 0), (6, 2, 1)]: - video_idx, clip_idx = video_clips.get_clip_location(i) - self.assertEqual(video_idx, v_idx) - self.assertEqual(clip_idx, c_idx) - - @unittest.skipIf(not io.video._av_available(), "this test requires av") - @unittest.skipIf(sys.platform == 'win32', 'temporarily disabled on Windows') - def test_video_clips_custom_fps(self): - with get_list_of_videos(num_videos=3, sizes=[12, 12, 12], fps=[3, 4, 6]) as video_list: - num_frames = 4 - for fps in [1, 3, 4, 10]: - video_clips = VideoClips(video_list, num_frames, num_frames, fps) - for i in range(video_clips.num_clips()): - video, audio, info, video_idx = video_clips.get_clip(i) - self.assertEqual(video.shape[0], num_frames) - self.assertEqual(info["video_fps"], fps) - self.assertEqual(info, {"video_fps": fps}) - # TODO add tests checking that the content is right + expected = torch.tensor( + [ + [0, 2, 4], + [2, 4, 6], + ] + ) + assert_equal(r, expected) + + @pytest.mark.skipif(not io.video._av_available(), reason="this test requires av") + def test_video_clips(self, tmpdir): + video_list = get_list_of_videos(tmpdir, num_videos=3) + video_clips = VideoClips(video_list, 5, 5, num_workers=2) + assert video_clips.num_clips() == 1 + 2 + 3 + for i, (v_idx, c_idx) in enumerate([(0, 0), (1, 0), (1, 1), (2, 0), (2, 1), (2, 2)]): + video_idx, clip_idx = video_clips.get_clip_location(i) + assert video_idx == v_idx + assert clip_idx == c_idx + + video_clips = VideoClips(video_list, 6, 6) + assert video_clips.num_clips() == 0 + 1 + 2 + for i, (v_idx, c_idx) in enumerate([(1, 0), (2, 0), (2, 1)]): + video_idx, clip_idx = video_clips.get_clip_location(i) + assert video_idx == v_idx + assert clip_idx == c_idx + + video_clips = VideoClips(video_list, 6, 1) + assert video_clips.num_clips() == 0 + (10 - 6 + 1) + (15 - 6 + 1) + for i, v_idx, c_idx in [(0, 1, 0), (4, 1, 4), (5, 2, 0), (6, 2, 1)]: + video_idx, clip_idx = video_clips.get_clip_location(i) + assert video_idx == v_idx + assert clip_idx == c_idx + + @pytest.mark.skipif(not io.video._av_available(), reason="this test requires av") + def test_video_clips_custom_fps(self, tmpdir): + video_list = get_list_of_videos(tmpdir, num_videos=3, sizes=[12, 12, 12], fps=[3, 4, 6]) + num_frames = 4 + for fps in [1, 3, 4, 10]: + video_clips = VideoClips(video_list, num_frames, num_frames, fps) + for i in range(video_clips.num_clips()): + video, audio, info, video_idx = video_clips.get_clip(i) + assert video.shape[0] == num_frames + assert info["video_fps"] == fps + # TODO add tests checking that the content is right def test_compute_clips_for_video(self): video_pts = torch.arange(30) @@ -104,25 +74,32 @@ def test_compute_clips_for_video(self): orig_fps = 30 duration = float(len(video_pts)) / orig_fps new_fps = 13 - clips, idxs = VideoClips.compute_clips_for_video(video_pts, num_frames, num_frames, - orig_fps, new_fps) + clips, idxs = VideoClips.compute_clips_for_video(video_pts, num_frames, num_frames, orig_fps, new_fps) resampled_idxs = VideoClips._resample_video_idx(int(duration * new_fps), orig_fps, new_fps) - self.assertEqual(len(clips), 1) - self.assertTrue(clips.equal(idxs)) - self.assertTrue(idxs[0].equal(resampled_idxs)) + assert len(clips) == 1 + assert_equal(clips, idxs) + assert_equal(idxs[0], resampled_idxs) # case 2: all frames appear only once num_frames = 4 orig_fps = 30 duration = float(len(video_pts)) / orig_fps new_fps = 12 - clips, idxs = VideoClips.compute_clips_for_video(video_pts, num_frames, num_frames, - orig_fps, new_fps) + clips, idxs = VideoClips.compute_clips_for_video(video_pts, num_frames, num_frames, orig_fps, new_fps) resampled_idxs = VideoClips._resample_video_idx(int(duration * new_fps), orig_fps, new_fps) - self.assertEqual(len(clips), 3) - self.assertTrue(clips.equal(idxs)) - self.assertTrue(idxs.flatten().equal(resampled_idxs)) + assert len(clips) == 3 + assert_equal(clips, idxs) + assert_equal(idxs.flatten(), resampled_idxs) + + # case 3: frames aren't enough for a clip + num_frames = 32 + orig_fps = 30 + new_fps = 13 + with pytest.warns(UserWarning): + clips, idxs = VideoClips.compute_clips_for_video(video_pts, num_frames, num_frames, orig_fps, new_fps) + assert len(clips) == 0 + assert len(idxs) == 0 -if __name__ == '__main__': - unittest.main() +if __name__ == "__main__": + pytest.main([__file__]) diff --git a/test/test_datasets_video_utils_opt.py b/test/test_datasets_video_utils_opt.py index f94af400838..5e6b19bfb95 100644 --- a/test/test_datasets_video_utils_opt.py +++ b/test/test_datasets_video_utils_opt.py @@ -1,11 +1,12 @@ import unittest -from torchvision import set_video_backend -import test_datasets_video_utils +import test_datasets_video_utils +from torchvision import set_video_backend # noqa: 401 -set_video_backend('video_reader') +# Disabling the video backend switching temporarily +# set_video_backend('video_reader') -if __name__ == '__main__': +if __name__ == "__main__": suite = unittest.TestLoader().loadTestsFromModule(test_datasets_video_utils) unittest.TextTestRunner(verbosity=1).run(suite) diff --git a/test/test_extended_models.py b/test/test_extended_models.py new file mode 100644 index 00000000000..0c918c0afd1 --- /dev/null +++ b/test/test_extended_models.py @@ -0,0 +1,503 @@ +import copy +import os +import pickle + +import pytest +import test_models as TM +import torch +from common_extended_utils import get_file_size_mb, get_ops +from torchvision import models +from torchvision.models import get_model_weights, Weights, WeightsEnum +from torchvision.models._utils import handle_legacy_interface +from torchvision.models.detection.backbone_utils import mobilenet_backbone, resnet_fpn_backbone + +run_if_test_with_extended = pytest.mark.skipif( + os.getenv("PYTORCH_TEST_WITH_EXTENDED", "0") != "1", + reason="Extended tests are disabled by default. Set PYTORCH_TEST_WITH_EXTENDED=1 to run them.", +) + + +@pytest.mark.parametrize( + "name, model_class", + [ + ("resnet50", models.ResNet), + ("retinanet_resnet50_fpn_v2", models.detection.RetinaNet), + ("raft_large", models.optical_flow.RAFT), + ("quantized_resnet50", models.quantization.QuantizableResNet), + ("lraspp_mobilenet_v3_large", models.segmentation.LRASPP), + ("mvit_v1_b", models.video.MViT), + ], +) +def test_get_model(name, model_class): + assert isinstance(models.get_model(name), model_class) + + +@pytest.mark.parametrize( + "name, model_fn", + [ + ("resnet50", models.resnet50), + ("retinanet_resnet50_fpn_v2", models.detection.retinanet_resnet50_fpn_v2), + ("raft_large", models.optical_flow.raft_large), + ("quantized_resnet50", models.quantization.resnet50), + ("lraspp_mobilenet_v3_large", models.segmentation.lraspp_mobilenet_v3_large), + ("mvit_v1_b", models.video.mvit_v1_b), + ], +) +def test_get_model_builder(name, model_fn): + assert models.get_model_builder(name) == model_fn + + +@pytest.mark.parametrize( + "name, weight", + [ + ("resnet50", models.ResNet50_Weights), + ("retinanet_resnet50_fpn_v2", models.detection.RetinaNet_ResNet50_FPN_V2_Weights), + ("raft_large", models.optical_flow.Raft_Large_Weights), + ("quantized_resnet50", models.quantization.ResNet50_QuantizedWeights), + ("lraspp_mobilenet_v3_large", models.segmentation.LRASPP_MobileNet_V3_Large_Weights), + ("mvit_v1_b", models.video.MViT_V1_B_Weights), + ], +) +def test_get_model_weights(name, weight): + assert models.get_model_weights(name) == weight + + +@pytest.mark.parametrize("copy_fn", [copy.copy, copy.deepcopy]) +@pytest.mark.parametrize( + "name", + [ + "resnet50", + "retinanet_resnet50_fpn_v2", + "raft_large", + "quantized_resnet50", + "lraspp_mobilenet_v3_large", + "mvit_v1_b", + ], +) +def test_weights_copyable(copy_fn, name): + for weights in list(models.get_model_weights(name)): + # It is somewhat surprising that (deep-)copying is an identity operation here, but this is the default behavior + # of enums: https://docs.python.org/3/howto/enum.html#enum-members-aka-instances + # Checking for equality, i.e. `==`, is sufficient (and even preferable) for our use case, should we need to drop + # support for the identity operation in the future. + assert copy_fn(weights) is weights + + +@pytest.mark.parametrize( + "name", + [ + "resnet50", + "retinanet_resnet50_fpn_v2", + "raft_large", + "quantized_resnet50", + "lraspp_mobilenet_v3_large", + "mvit_v1_b", + ], +) +def test_weights_deserializable(name): + for weights in list(models.get_model_weights(name)): + # It is somewhat surprising that deserialization is an identity operation here, but this is the default behavior + # of enums: https://docs.python.org/3/howto/enum.html#enum-members-aka-instances + # Checking for equality, i.e. `==`, is sufficient (and even preferable) for our use case, should we need to drop + # support for the identity operation in the future. + assert pickle.loads(pickle.dumps(weights)) is weights + + +def get_models_from_module(module): + return [ + v.__name__ + for k, v in module.__dict__.items() + if callable(v) and k[0].islower() and k[0] != "_" and k not in models._api.__all__ + ] + + +@pytest.mark.parametrize( + "module", [models, models.detection, models.quantization, models.segmentation, models.video, models.optical_flow] +) +def test_list_models(module): + a = set(get_models_from_module(module)) + b = set(x.replace("quantized_", "") for x in models.list_models(module)) + + assert len(b) > 0 + assert a == b + + +@pytest.mark.parametrize( + "include_filters", + [ + None, + [], + (), + "", + "*resnet*", + ["*alexnet*"], + "*not-existing-model-for-test?", + ["*resnet*", "*alexnet*"], + ["*resnet*", "*alexnet*", "*not-existing-model-for-test?"], + ("*resnet*", "*alexnet*"), + set(["*resnet*", "*alexnet*"]), + ], +) +@pytest.mark.parametrize( + "exclude_filters", + [ + None, + [], + (), + "", + "*resnet*", + ["*alexnet*"], + ["*not-existing-model-for-test?"], + ["resnet34", "*not-existing-model-for-test?"], + ["resnet34", "*resnet1*"], + ("resnet34", "*resnet1*"), + set(["resnet34", "*resnet1*"]), + ], +) +def test_list_models_filters(include_filters, exclude_filters): + actual = set(models.list_models(models, include=include_filters, exclude=exclude_filters)) + classification_models = set(get_models_from_module(models)) + + if isinstance(include_filters, str): + include_filters = [include_filters] + if isinstance(exclude_filters, str): + exclude_filters = [exclude_filters] + + if include_filters: + expected = set() + for include_f in include_filters: + include_f = include_f.strip("*?") + expected = expected | set(x for x in classification_models if include_f in x) + else: + expected = classification_models + + if exclude_filters: + for exclude_f in exclude_filters: + exclude_f = exclude_f.strip("*?") + if exclude_f != "": + a_exclude = set(x for x in classification_models if exclude_f in x) + expected = expected - a_exclude + + assert expected == actual + + +@pytest.mark.parametrize( + "name, weight", + [ + ("ResNet50_Weights.IMAGENET1K_V1", models.ResNet50_Weights.IMAGENET1K_V1), + ("ResNet50_Weights.DEFAULT", models.ResNet50_Weights.IMAGENET1K_V2), + ( + "ResNet50_QuantizedWeights.DEFAULT", + models.quantization.ResNet50_QuantizedWeights.IMAGENET1K_FBGEMM_V2, + ), + ( + "ResNet50_QuantizedWeights.IMAGENET1K_FBGEMM_V1", + models.quantization.ResNet50_QuantizedWeights.IMAGENET1K_FBGEMM_V1, + ), + ], +) +def test_get_weight(name, weight): + assert models.get_weight(name) == weight + + +@pytest.mark.parametrize( + "model_fn", + TM.list_model_fns(models) + + TM.list_model_fns(models.detection) + + TM.list_model_fns(models.quantization) + + TM.list_model_fns(models.segmentation) + + TM.list_model_fns(models.video) + + TM.list_model_fns(models.optical_flow), +) +def test_naming_conventions(model_fn): + weights_enum = get_model_weights(model_fn) + assert weights_enum is not None + assert len(weights_enum) == 0 or hasattr(weights_enum, "DEFAULT") + + +detection_models_input_dims = { + "fasterrcnn_mobilenet_v3_large_320_fpn": (320, 320), + "fasterrcnn_mobilenet_v3_large_fpn": (800, 800), + "fasterrcnn_resnet50_fpn": (800, 800), + "fasterrcnn_resnet50_fpn_v2": (800, 800), + "fcos_resnet50_fpn": (800, 800), + "keypointrcnn_resnet50_fpn": (1333, 1333), + "maskrcnn_resnet50_fpn": (800, 800), + "maskrcnn_resnet50_fpn_v2": (800, 800), + "retinanet_resnet50_fpn": (800, 800), + "retinanet_resnet50_fpn_v2": (800, 800), + "ssd300_vgg16": (300, 300), + "ssdlite320_mobilenet_v3_large": (320, 320), +} + + +@pytest.mark.parametrize( + "model_fn", + TM.list_model_fns(models) + + TM.list_model_fns(models.detection) + + TM.list_model_fns(models.quantization) + + TM.list_model_fns(models.segmentation) + + TM.list_model_fns(models.video) + + TM.list_model_fns(models.optical_flow), +) +@run_if_test_with_extended +def test_schema_meta_validation(model_fn): + if model_fn.__name__ == "maskrcnn_resnet50_fpn_v2": + pytest.skip(reason="FIXME https://github.com/pytorch/vision/issues/7349") + + # list of all possible supported high-level fields for weights meta-data + permitted_fields = { + "backend", + "categories", + "keypoint_names", + "license", + "_metrics", + "min_size", + "min_temporal_size", + "num_params", + "recipe", + "unquantized", + "_docs", + "_ops", + "_file_size", + } + # mandatory fields for each computer vision task + classification_fields = {"categories", ("_metrics", "ImageNet-1K", "acc@1"), ("_metrics", "ImageNet-1K", "acc@5")} + defaults = { + "all": {"_metrics", "min_size", "num_params", "recipe", "_docs", "_file_size", "_ops"}, + "models": classification_fields, + "detection": {"categories", ("_metrics", "COCO-val2017", "box_map")}, + "quantization": classification_fields | {"backend", "unquantized"}, + "segmentation": { + "categories", + ("_metrics", "COCO-val2017-VOC-labels", "miou"), + ("_metrics", "COCO-val2017-VOC-labels", "pixel_acc"), + }, + "video": {"categories", ("_metrics", "Kinetics-400", "acc@1"), ("_metrics", "Kinetics-400", "acc@5")}, + "optical_flow": set(), + } + model_name = model_fn.__name__ + module_name = model_fn.__module__.split(".")[-2] + expected_fields = defaults["all"] | defaults[module_name] + + weights_enum = get_model_weights(model_fn) + if len(weights_enum) == 0: + pytest.skip(f"Model '{model_name}' doesn't have any pre-trained weights.") + + problematic_weights = {} + incorrect_meta = [] + bad_names = [] + for w in weights_enum: + actual_fields = set(w.meta.keys()) + actual_fields |= set( + ("_metrics", dataset, metric_key) + for dataset in w.meta.get("_metrics", {}).keys() + for metric_key in w.meta.get("_metrics", {}).get(dataset, {}).keys() + ) + missing_fields = expected_fields - actual_fields + unsupported_fields = set(w.meta.keys()) - permitted_fields + if missing_fields or unsupported_fields: + problematic_weights[w] = {"missing": missing_fields, "unsupported": unsupported_fields} + + if w == weights_enum.DEFAULT or any(w.meta[k] != weights_enum.DEFAULT.meta[k] for k in ["num_params", "_ops"]): + if module_name == "quantization": + # parameters() count doesn't work well with quantization, so we check against the non-quantized + unquantized_w = w.meta.get("unquantized") + if unquantized_w is not None: + if w.meta.get("num_params") != unquantized_w.meta.get("num_params"): + incorrect_meta.append((w, "num_params")) + + # the methodology for quantized ops count doesn't work as well, so we take unquantized FLOPs + # instead + if w.meta["_ops"] != unquantized_w.meta.get("_ops"): + incorrect_meta.append((w, "_ops")) + + else: + # loading the model and using it for parameter and ops verification + model = model_fn(weights=w) + + if w.meta.get("num_params") != sum(p.numel() for p in model.parameters()): + incorrect_meta.append((w, "num_params")) + + kwargs = {} + if model_name in detection_models_input_dims: + # detection models have non default height and width + height, width = detection_models_input_dims[model_name] + kwargs = {"height": height, "width": width} + + if not model_fn.__name__.startswith("vit"): + # FIXME: https://github.com/pytorch/vision/issues/7871 + calculated_ops = get_ops(model=model, weight=w, **kwargs) + if calculated_ops != w.meta["_ops"]: + incorrect_meta.append((w, "_ops")) + + if not w.name.isupper(): + bad_names.append(w) + + if get_file_size_mb(w) != w.meta.get("_file_size"): + incorrect_meta.append((w, "_file_size")) + + assert not problematic_weights + assert not incorrect_meta + assert not bad_names + + +@pytest.mark.parametrize( + "model_fn", + TM.list_model_fns(models) + + TM.list_model_fns(models.detection) + + TM.list_model_fns(models.quantization) + + TM.list_model_fns(models.segmentation) + + TM.list_model_fns(models.video) + + TM.list_model_fns(models.optical_flow), +) +@run_if_test_with_extended +def test_transforms_jit(model_fn): + model_name = model_fn.__name__ + weights_enum = get_model_weights(model_fn) + if len(weights_enum) == 0: + pytest.skip(f"Model '{model_name}' doesn't have any pre-trained weights.") + + defaults = { + "models": { + "input_shape": (1, 3, 224, 224), + }, + "detection": { + "input_shape": (3, 300, 300), + }, + "quantization": { + "input_shape": (1, 3, 224, 224), + }, + "segmentation": { + "input_shape": (1, 3, 520, 520), + }, + "video": { + "input_shape": (1, 3, 4, 112, 112), + }, + "optical_flow": { + "input_shape": (1, 3, 128, 128), + }, + } + module_name = model_fn.__module__.split(".")[-2] + + kwargs = {**defaults[module_name], **TM._model_params.get(model_name, {})} + input_shape = kwargs.pop("input_shape") + x = torch.rand(input_shape) + if module_name == "optical_flow": + args = (x, x) + else: + if module_name == "video": + x = x.permute(0, 2, 1, 3, 4) + args = (x,) + + problematic_weights = [] + for w in weights_enum: + transforms = w.transforms() + try: + TM._check_jit_scriptable(transforms, args) + except Exception: + problematic_weights.append(w) + + assert not problematic_weights + + +# With this filter, every unexpected warning will be turned into an error +@pytest.mark.filterwarnings("error") +class TestHandleLegacyInterface: + class ModelWeights(WeightsEnum): + Sentinel = Weights(url="https://pytorch.org", transforms=lambda x: x, meta=dict()) + + @pytest.mark.parametrize( + "kwargs", + [ + pytest.param(dict(), id="empty"), + pytest.param(dict(weights=None), id="None"), + pytest.param(dict(weights=ModelWeights.Sentinel), id="Weights"), + ], + ) + def test_no_warn(self, kwargs): + @handle_legacy_interface(weights=("pretrained", self.ModelWeights.Sentinel)) + def builder(*, weights=None): + pass + + builder(**kwargs) + + @pytest.mark.parametrize("pretrained", (True, False)) + def test_pretrained_pos(self, pretrained): + @handle_legacy_interface(weights=("pretrained", self.ModelWeights.Sentinel)) + def builder(*, weights=None): + pass + + with pytest.warns(UserWarning, match="positional"): + builder(pretrained) + + @pytest.mark.parametrize("pretrained", (True, False)) + def test_pretrained_kw(self, pretrained): + @handle_legacy_interface(weights=("pretrained", self.ModelWeights.Sentinel)) + def builder(*, weights=None): + pass + + with pytest.warns(UserWarning, match="deprecated"): + builder(pretrained) + + @pytest.mark.parametrize("pretrained", (True, False)) + @pytest.mark.parametrize("positional", (True, False)) + def test_equivalent_behavior_weights(self, pretrained, positional): + @handle_legacy_interface(weights=("pretrained", self.ModelWeights.Sentinel)) + def builder(*, weights=None): + pass + + args, kwargs = ((pretrained,), dict()) if positional else ((), dict(pretrained=pretrained)) + with pytest.warns(UserWarning, match=f"weights={self.ModelWeights.Sentinel if pretrained else None}"): + builder(*args, **kwargs) + + def test_multi_params(self): + weights_params = ("weights", "weights_other") + pretrained_params = [param.replace("weights", "pretrained") for param in weights_params] + + @handle_legacy_interface( + **{ + weights_param: (pretrained_param, self.ModelWeights.Sentinel) + for weights_param, pretrained_param in zip(weights_params, pretrained_params) + } + ) + def builder(*, weights=None, weights_other=None): + pass + + for pretrained_param in pretrained_params: + with pytest.warns(UserWarning, match="deprecated"): + builder(**{pretrained_param: True}) + + def test_default_callable(self): + @handle_legacy_interface( + weights=( + "pretrained", + lambda kwargs: self.ModelWeights.Sentinel if kwargs["flag"] else None, + ) + ) + def builder(*, weights=None, flag): + pass + + with pytest.warns(UserWarning, match="deprecated"): + builder(pretrained=True, flag=True) + + with pytest.raises(ValueError, match="weights"): + builder(pretrained=True, flag=False) + + @pytest.mark.parametrize( + "model_fn", + [fn for fn in TM.list_model_fns(models) if fn.__name__ not in {"vit_h_14", "regnet_y_128gf"}] + + TM.list_model_fns(models.detection) + + TM.list_model_fns(models.quantization) + + TM.list_model_fns(models.segmentation) + + TM.list_model_fns(models.video) + + TM.list_model_fns(models.optical_flow) + + [ + lambda pretrained: resnet_fpn_backbone(backbone_name="resnet50", pretrained=pretrained), + lambda pretrained: mobilenet_backbone(backbone_name="mobilenet_v2", fpn=False, pretrained=pretrained), + ], + ) + @run_if_test_with_extended + def test_pretrained_deprecation(self, model_fn): + with pytest.warns(UserWarning, match="deprecated"): + model_fn(pretrained=True) diff --git a/test/test_functional_tensor.py b/test/test_functional_tensor.py index e318420102b..b5352f18f21 100644 --- a/test/test_functional_tensor.py +++ b/test/test_functional_tensor.py @@ -1,81 +1,1282 @@ -from __future__ import division +import colorsys +import itertools +import math +import os +from functools import partial +from typing import Sequence + +import numpy as np +import PIL.Image +import pytest import torch -import torchvision.transforms as transforms -import torchvision.transforms.functional_tensor as F_t +import torchvision.transforms as T +import torchvision.transforms._functional_pil as F_pil +import torchvision.transforms._functional_tensor as F_t import torchvision.transforms.functional as F -import numpy as np -import unittest -import random - - -class Tester(unittest.TestCase): - - def test_vflip(self): - img_tensor = torch.randn(3, 16, 16) - vflipped_img = F_t.vflip(img_tensor) - vflipped_img_again = F_t.vflip(vflipped_img) - self.assertEqual(vflipped_img.shape, img_tensor.shape) - self.assertTrue(torch.equal(img_tensor, vflipped_img_again)) - - def test_hflip(self): - img_tensor = torch.randn(3, 16, 16) - hflipped_img = F_t.hflip(img_tensor) - hflipped_img_again = F_t.hflip(hflipped_img) - self.assertEqual(hflipped_img.shape, img_tensor.shape) - self.assertTrue(torch.equal(img_tensor, hflipped_img_again)) - - def test_crop(self): - img_tensor = torch.randint(0, 255, (3, 16, 16), dtype=torch.uint8) - top = random.randint(0, 15) - left = random.randint(0, 15) - height = random.randint(1, 16 - top) - width = random.randint(1, 16 - left) - img_cropped = F_t.crop(img_tensor, top, left, height, width) - img_PIL = transforms.ToPILImage()(img_tensor) - img_PIL_cropped = F.crop(img_PIL, top, left, height, width) - img_cropped_GT = transforms.ToTensor()(img_PIL_cropped) - - self.assertTrue(torch.equal(img_cropped, (img_cropped_GT * 255).to(torch.uint8)), - "functional_tensor crop not working") - - def test_adjustments(self): - fns = ((F.adjust_brightness, F_t.adjust_brightness), - (F.adjust_contrast, F_t.adjust_contrast), - (F.adjust_saturation, F_t.adjust_saturation)) - - for _ in range(20): - channels = 3 - dims = torch.randint(1, 50, (2,)) - shape = (channels, dims[0], dims[1]) - - if torch.randint(0, 2, (1,)) == 0: - img = torch.rand(*shape, dtype=torch.float) - else: - img = torch.randint(0, 256, shape, dtype=torch.uint8) - - factor = 3 * torch.rand(1) - for f, ft in fns: - - ft_img = ft(img, factor) - if not img.dtype.is_floating_point: - ft_img = ft_img.to(torch.float) / 255 - - img_pil = transforms.ToPILImage()(img) - f_img_pil = f(img_pil, factor) - f_img = transforms.ToTensor()(f_img_pil) - - # F uses uint8 and F_t uses float, so there is a small - # difference in values caused by (at most 5) truncations. - max_diff = (ft_img - f_img).abs().max() - self.assertLess(max_diff, 5 / 255 + 1e-5) - - def test_rgb_to_grayscale(self): - img_tensor = torch.randint(0, 255, (3, 16, 16), dtype=torch.uint8) - grayscale_tensor = F_t.rgb_to_grayscale(img_tensor).to(int) - grayscale_pil_img = torch.tensor(np.array(F.to_grayscale(F.to_pil_image(img_tensor)))).to(int) - max_diff = (grayscale_tensor - grayscale_pil_img).abs().max() - self.assertLess(max_diff, 1.0001) - - -if __name__ == '__main__': - unittest.main() +from common_utils import ( + _assert_approx_equal_tensor_to_pil, + _assert_equal_tensor_to_pil, + _create_data, + _create_data_batch, + _test_fn_on_batch, + assert_equal, + cpu_and_cuda, + needs_cuda, +) +from torchvision.transforms import InterpolationMode + +NEAREST, NEAREST_EXACT, BILINEAR, BICUBIC = ( + InterpolationMode.NEAREST, + InterpolationMode.NEAREST_EXACT, + InterpolationMode.BILINEAR, + InterpolationMode.BICUBIC, +) + + +@pytest.mark.parametrize("device", cpu_and_cuda()) +@pytest.mark.parametrize("fn", [F.get_image_size, F.get_image_num_channels, F.get_dimensions]) +def test_image_sizes(device, fn): + script_F = torch.jit.script(fn) + + img_tensor, pil_img = _create_data(16, 18, 3, device=device) + value_img = fn(img_tensor) + value_pil_img = fn(pil_img) + assert value_img == value_pil_img + + value_img_script = script_F(img_tensor) + assert value_img == value_img_script + + batch_tensors = _create_data_batch(16, 18, 3, num_samples=4, device=device) + value_img_batch = fn(batch_tensors) + assert value_img == value_img_batch + + +@needs_cuda +def test_scale_channel(): + """Make sure that _scale_channel gives the same results on CPU and GPU as + histc or bincount are used depending on the device. + """ + # TODO: when # https://github.com/pytorch/pytorch/issues/53194 is fixed, + # only use bincount and remove that test. + size = (1_000,) + img_chan = torch.randint(0, 256, size=size).to("cpu") + scaled_cpu = F_t._scale_channel(img_chan) + scaled_cuda = F_t._scale_channel(img_chan.to("cuda")) + assert_equal(scaled_cpu, scaled_cuda.to("cpu")) + + +class TestRotate: + + ALL_DTYPES = [None, torch.float32, torch.float64, torch.float16] + scripted_rotate = torch.jit.script(F.rotate) + IMG_W = 26 + + @pytest.mark.parametrize("device", cpu_and_cuda()) + @pytest.mark.parametrize("height, width", [(7, 33), (26, IMG_W), (32, IMG_W)]) + @pytest.mark.parametrize( + "center", + [ + None, + (int(IMG_W * 0.3), int(IMG_W * 0.4)), + [int(IMG_W * 0.5), int(IMG_W * 0.6)], + ], + ) + @pytest.mark.parametrize("dt", ALL_DTYPES) + @pytest.mark.parametrize("angle", range(-180, 180, 34)) + @pytest.mark.parametrize("expand", [True, False]) + @pytest.mark.parametrize( + "fill", + [ + None, + [0, 0, 0], + (1, 2, 3), + [255, 255, 255], + [ + 1, + ], + (2.0,), + ], + ) + @pytest.mark.parametrize("fn", [F.rotate, scripted_rotate]) + def test_rotate(self, device, height, width, center, dt, angle, expand, fill, fn): + tensor, pil_img = _create_data(height, width, device=device) + + if dt == torch.float16 and torch.device(device).type == "cpu": + # skip float16 on CPU case + return + + if dt is not None: + tensor = tensor.to(dtype=dt) + + f_pil = int(fill[0]) if fill is not None and len(fill) == 1 else fill + out_pil_img = F.rotate(pil_img, angle=angle, interpolation=NEAREST, expand=expand, center=center, fill=f_pil) + out_pil_tensor = torch.from_numpy(np.array(out_pil_img).transpose((2, 0, 1))) + + out_tensor = fn(tensor, angle=angle, interpolation=NEAREST, expand=expand, center=center, fill=fill).cpu() + + if out_tensor.dtype != torch.uint8: + out_tensor = out_tensor.to(torch.uint8) + + assert ( + out_tensor.shape == out_pil_tensor.shape + ), f"{(height, width, NEAREST, dt, angle, expand, center)}: {out_tensor.shape} vs {out_pil_tensor.shape}" + + num_diff_pixels = (out_tensor != out_pil_tensor).sum().item() / 3.0 + ratio_diff_pixels = num_diff_pixels / out_tensor.shape[-1] / out_tensor.shape[-2] + # Tolerance : less than 3% of different pixels + assert ratio_diff_pixels < 0.03, ( + f"{(height, width, NEAREST, dt, angle, expand, center, fill)}: " + f"{ratio_diff_pixels}\n{out_tensor[0, :7, :7]} vs \n" + f"{out_pil_tensor[0, :7, :7]}" + ) + + @pytest.mark.parametrize("device", cpu_and_cuda()) + @pytest.mark.parametrize("dt", ALL_DTYPES) + def test_rotate_batch(self, device, dt): + if dt == torch.float16 and device == "cpu": + # skip float16 on CPU case + return + + batch_tensors = _create_data_batch(26, 36, num_samples=4, device=device) + if dt is not None: + batch_tensors = batch_tensors.to(dtype=dt) + + center = (20, 22) + _test_fn_on_batch(batch_tensors, F.rotate, angle=32, interpolation=NEAREST, expand=True, center=center) + + def test_rotate_interpolation_type(self): + tensor, _ = _create_data(26, 26) + res1 = F.rotate(tensor, 45, interpolation=PIL.Image.BILINEAR) + res2 = F.rotate(tensor, 45, interpolation=BILINEAR) + assert_equal(res1, res2) + + +class TestAffine: + + ALL_DTYPES = [None, torch.float32, torch.float64, torch.float16] + scripted_affine = torch.jit.script(F.affine) + + @pytest.mark.parametrize("device", cpu_and_cuda()) + @pytest.mark.parametrize("height, width", [(26, 26), (32, 26)]) + @pytest.mark.parametrize("dt", ALL_DTYPES) + def test_identity_map(self, device, height, width, dt): + # Tests on square and rectangular images + tensor, pil_img = _create_data(height, width, device=device) + + if dt == torch.float16 and device == "cpu": + # skip float16 on CPU case + return + + if dt is not None: + tensor = tensor.to(dtype=dt) + + # 1) identity map + out_tensor = F.affine(tensor, angle=0, translate=[0, 0], scale=1.0, shear=[0.0, 0.0], interpolation=NEAREST) + + assert_equal(tensor, out_tensor, msg=f"{out_tensor[0, :5, :5]} vs {tensor[0, :5, :5]}") + out_tensor = self.scripted_affine( + tensor, angle=0, translate=[0, 0], scale=1.0, shear=[0.0, 0.0], interpolation=NEAREST + ) + assert_equal(tensor, out_tensor, msg=f"{out_tensor[0, :5, :5]} vs {tensor[0, :5, :5]}") + + @pytest.mark.parametrize("device", cpu_and_cuda()) + @pytest.mark.parametrize("height, width", [(26, 26)]) + @pytest.mark.parametrize("dt", ALL_DTYPES) + @pytest.mark.parametrize( + "angle, config", + [ + (90, {"k": 1, "dims": (-1, -2)}), + (45, None), + (30, None), + (-30, None), + (-45, None), + (-90, {"k": -1, "dims": (-1, -2)}), + (180, {"k": 2, "dims": (-1, -2)}), + ], + ) + @pytest.mark.parametrize("fn", [F.affine, scripted_affine]) + def test_square_rotations(self, device, height, width, dt, angle, config, fn): + # 2) Test rotation + tensor, pil_img = _create_data(height, width, device=device) + + if dt == torch.float16 and device == "cpu": + # skip float16 on CPU case + return + + if dt is not None: + tensor = tensor.to(dtype=dt) + + out_pil_img = F.affine( + pil_img, angle=angle, translate=[0, 0], scale=1.0, shear=[0.0, 0.0], interpolation=NEAREST + ) + out_pil_tensor = torch.from_numpy(np.array(out_pil_img).transpose((2, 0, 1))).to(device) + + out_tensor = fn(tensor, angle=angle, translate=[0, 0], scale=1.0, shear=[0.0, 0.0], interpolation=NEAREST) + if config is not None: + assert_equal(torch.rot90(tensor, **config), out_tensor) + + if out_tensor.dtype != torch.uint8: + out_tensor = out_tensor.to(torch.uint8) + + num_diff_pixels = (out_tensor != out_pil_tensor).sum().item() / 3.0 + ratio_diff_pixels = num_diff_pixels / out_tensor.shape[-1] / out_tensor.shape[-2] + # Tolerance : less than 6% of different pixels + assert ratio_diff_pixels < 0.06 + + @pytest.mark.parametrize("device", cpu_and_cuda()) + @pytest.mark.parametrize("height, width", [(32, 26)]) + @pytest.mark.parametrize("dt", ALL_DTYPES) + @pytest.mark.parametrize("angle", [90, 45, 15, -30, -60, -120]) + @pytest.mark.parametrize("fn", [F.affine, scripted_affine]) + @pytest.mark.parametrize("center", [None, [0, 0]]) + def test_rect_rotations(self, device, height, width, dt, angle, fn, center): + # Tests on rectangular images + tensor, pil_img = _create_data(height, width, device=device) + + if dt == torch.float16 and device == "cpu": + # skip float16 on CPU case + return + + if dt is not None: + tensor = tensor.to(dtype=dt) + + out_pil_img = F.affine( + pil_img, angle=angle, translate=[0, 0], scale=1.0, shear=[0.0, 0.0], interpolation=NEAREST, center=center + ) + out_pil_tensor = torch.from_numpy(np.array(out_pil_img).transpose((2, 0, 1))) + + out_tensor = fn( + tensor, angle=angle, translate=[0, 0], scale=1.0, shear=[0.0, 0.0], interpolation=NEAREST, center=center + ).cpu() + + if out_tensor.dtype != torch.uint8: + out_tensor = out_tensor.to(torch.uint8) + + num_diff_pixels = (out_tensor != out_pil_tensor).sum().item() / 3.0 + ratio_diff_pixels = num_diff_pixels / out_tensor.shape[-1] / out_tensor.shape[-2] + # Tolerance : less than 3% of different pixels + assert ratio_diff_pixels < 0.03 + + @pytest.mark.parametrize("device", cpu_and_cuda()) + @pytest.mark.parametrize("height, width", [(26, 26), (32, 26)]) + @pytest.mark.parametrize("dt", ALL_DTYPES) + @pytest.mark.parametrize("t", [[10, 12], (-12, -13)]) + @pytest.mark.parametrize("fn", [F.affine, scripted_affine]) + def test_translations(self, device, height, width, dt, t, fn): + # 3) Test translation + tensor, pil_img = _create_data(height, width, device=device) + + if dt == torch.float16 and device == "cpu": + # skip float16 on CPU case + return + + if dt is not None: + tensor = tensor.to(dtype=dt) + + out_pil_img = F.affine(pil_img, angle=0, translate=t, scale=1.0, shear=[0.0, 0.0], interpolation=NEAREST) + + out_tensor = fn(tensor, angle=0, translate=t, scale=1.0, shear=[0.0, 0.0], interpolation=NEAREST) + + if out_tensor.dtype != torch.uint8: + out_tensor = out_tensor.to(torch.uint8) + + _assert_equal_tensor_to_pil(out_tensor, out_pil_img) + + @pytest.mark.parametrize("device", cpu_and_cuda()) + @pytest.mark.parametrize("height, width", [(26, 26), (32, 26)]) + @pytest.mark.parametrize("dt", ALL_DTYPES) + @pytest.mark.parametrize( + "a, t, s, sh, f", + [ + (45.5, [5, 6], 1.0, [0.0, 0.0], None), + (33, (5, -4), 1.0, [0.0, 0.0], [0, 0, 0]), + (45, [-5, 4], 1.2, [0.0, 0.0], (1, 2, 3)), + (33, (-4, -8), 2.0, [0.0, 0.0], [255, 255, 255]), + (85, (10, -10), 0.7, [0.0, 0.0], [1]), + (0, [0, 0], 1.0, [35.0], (2.0,)), + (-25, [0, 0], 1.2, [0.0, 15.0], None), + (-45, [-10, 0], 0.7, [2.0, 5.0], None), + (-45, [-10, -10], 1.2, [4.0, 5.0], None), + (-90, [0, 0], 1.0, [0.0, 0.0], None), + ], + ) + @pytest.mark.parametrize("fn", [F.affine, scripted_affine]) + def test_all_ops(self, device, height, width, dt, a, t, s, sh, f, fn): + # 4) Test rotation + translation + scale + shear + tensor, pil_img = _create_data(height, width, device=device) + + if dt == torch.float16 and device == "cpu": + # skip float16 on CPU case + return + + if dt is not None: + tensor = tensor.to(dtype=dt) + + f_pil = int(f[0]) if f is not None and len(f) == 1 else f + out_pil_img = F.affine(pil_img, angle=a, translate=t, scale=s, shear=sh, interpolation=NEAREST, fill=f_pil) + out_pil_tensor = torch.from_numpy(np.array(out_pil_img).transpose((2, 0, 1))) + + out_tensor = fn(tensor, angle=a, translate=t, scale=s, shear=sh, interpolation=NEAREST, fill=f).cpu() + + if out_tensor.dtype != torch.uint8: + out_tensor = out_tensor.to(torch.uint8) + + num_diff_pixels = (out_tensor != out_pil_tensor).sum().item() / 3.0 + ratio_diff_pixels = num_diff_pixels / out_tensor.shape[-1] / out_tensor.shape[-2] + # Tolerance : less than 5% (cpu), 6% (cuda) of different pixels + tol = 0.06 if device == "cuda" else 0.05 + assert ratio_diff_pixels < tol + + @pytest.mark.parametrize("device", cpu_and_cuda()) + @pytest.mark.parametrize("dt", ALL_DTYPES) + def test_batches(self, device, dt): + if dt == torch.float16 and device == "cpu": + # skip float16 on CPU case + return + + batch_tensors = _create_data_batch(26, 36, num_samples=4, device=device) + if dt is not None: + batch_tensors = batch_tensors.to(dtype=dt) + + _test_fn_on_batch(batch_tensors, F.affine, angle=-43, translate=[-3, 4], scale=1.2, shear=[4.0, 5.0]) + + @pytest.mark.parametrize("device", cpu_and_cuda()) + def test_interpolation_type(self, device): + tensor, pil_img = _create_data(26, 26, device=device) + + res1 = F.affine(tensor, 45, translate=[0, 0], scale=1.0, shear=[0.0, 0.0], interpolation=PIL.Image.BILINEAR) + res2 = F.affine(tensor, 45, translate=[0, 0], scale=1.0, shear=[0.0, 0.0], interpolation=BILINEAR) + assert_equal(res1, res2) + + +def _get_data_dims_and_points_for_perspective(): + # Ideally we would parametrize independently over data dims and points, but + # we want to tests on some points that also depend on the data dims. + # Pytest doesn't support covariant parametrization, so we do it somewhat manually here. + + data_dims = [(26, 34), (26, 26)] + points = [ + [[[0, 0], [33, 0], [33, 25], [0, 25]], [[3, 2], [32, 3], [30, 24], [2, 25]]], + [[[3, 2], [32, 3], [30, 24], [2, 25]], [[0, 0], [33, 0], [33, 25], [0, 25]]], + [[[3, 2], [32, 3], [30, 24], [2, 25]], [[5, 5], [30, 3], [33, 19], [4, 25]]], + ] + + dims_and_points = list(itertools.product(data_dims, points)) + + # up to here, we could just have used 2 @parametrized. + # Down below is the covarariant part as the points depend on the data dims. + + n = 10 + for dim in data_dims: + points += [(dim, T.RandomPerspective.get_params(dim[1], dim[0], i / n)) for i in range(n)] + return dims_and_points + + +@pytest.mark.parametrize("device", cpu_and_cuda()) +@pytest.mark.parametrize("dims_and_points", _get_data_dims_and_points_for_perspective()) +@pytest.mark.parametrize("dt", [None, torch.float32, torch.float64, torch.float16]) +@pytest.mark.parametrize("fill", (None, [0, 0, 0], [1, 2, 3], [255, 255, 255], [1], (2.0,))) +@pytest.mark.parametrize("fn", [F.perspective, torch.jit.script(F.perspective)]) +def test_perspective_pil_vs_tensor(device, dims_and_points, dt, fill, fn): + + if dt == torch.float16 and device == "cpu": + # skip float16 on CPU case + return + + data_dims, (spoints, epoints) = dims_and_points + + tensor, pil_img = _create_data(*data_dims, device=device) + if dt is not None: + tensor = tensor.to(dtype=dt) + + interpolation = NEAREST + fill_pil = int(fill[0]) if fill is not None and len(fill) == 1 else fill + out_pil_img = F.perspective( + pil_img, startpoints=spoints, endpoints=epoints, interpolation=interpolation, fill=fill_pil + ) + out_pil_tensor = torch.from_numpy(np.array(out_pil_img).transpose((2, 0, 1))) + out_tensor = fn(tensor, startpoints=spoints, endpoints=epoints, interpolation=interpolation, fill=fill).cpu() + + if out_tensor.dtype != torch.uint8: + out_tensor = out_tensor.to(torch.uint8) + + num_diff_pixels = (out_tensor != out_pil_tensor).sum().item() / 3.0 + ratio_diff_pixels = num_diff_pixels / out_tensor.shape[-1] / out_tensor.shape[-2] + # Tolerance : less than 5% of different pixels + assert ratio_diff_pixels < 0.05 + + +@pytest.mark.parametrize("device", cpu_and_cuda()) +@pytest.mark.parametrize("dims_and_points", _get_data_dims_and_points_for_perspective()) +@pytest.mark.parametrize("dt", [None, torch.float32, torch.float64, torch.float16]) +def test_perspective_batch(device, dims_and_points, dt): + + if dt == torch.float16 and device == "cpu": + # skip float16 on CPU case + return + + data_dims, (spoints, epoints) = dims_and_points + + batch_tensors = _create_data_batch(*data_dims, num_samples=4, device=device) + if dt is not None: + batch_tensors = batch_tensors.to(dtype=dt) + + # Ignore the equivalence between scripted and regular function on float16 cuda. The pixels at + # the border may be entirely different due to small rounding errors. + scripted_fn_atol = -1 if (dt == torch.float16 and device == "cuda") else 1e-8 + _test_fn_on_batch( + batch_tensors, + F.perspective, + scripted_fn_atol=scripted_fn_atol, + startpoints=spoints, + endpoints=epoints, + interpolation=NEAREST, + ) + + +def test_perspective_interpolation_type(): + spoints = [[0, 0], [33, 0], [33, 25], [0, 25]] + epoints = [[3, 2], [32, 3], [30, 24], [2, 25]] + tensor = torch.randint(0, 256, (3, 26, 26)) + + res1 = F.perspective(tensor, startpoints=spoints, endpoints=epoints, interpolation=PIL.Image.BILINEAR) + res2 = F.perspective(tensor, startpoints=spoints, endpoints=epoints, interpolation=BILINEAR) + assert_equal(res1, res2) + + +@pytest.mark.parametrize("device", cpu_and_cuda()) +@pytest.mark.parametrize("dt", [None, torch.float32, torch.float64, torch.float16]) +@pytest.mark.parametrize("size", [32, 26, [32], [32, 32], (32, 32), [26, 35]]) +@pytest.mark.parametrize("max_size", [None, 34, 40, 1000]) +@pytest.mark.parametrize("interpolation", [BILINEAR, BICUBIC, NEAREST, NEAREST_EXACT]) +def test_resize(device, dt, size, max_size, interpolation): + + if dt == torch.float16 and device == "cpu": + # skip float16 on CPU case + return + + if max_size is not None and isinstance(size, Sequence) and len(size) != 1: + return # unsupported + + torch.manual_seed(12) + script_fn = torch.jit.script(F.resize) + tensor, pil_img = _create_data(26, 36, device=device) + batch_tensors = _create_data_batch(16, 18, num_samples=4, device=device) + + if dt is not None: + # This is a trivial cast to float of uint8 data to test all cases + tensor = tensor.to(dt) + batch_tensors = batch_tensors.to(dt) + + resized_tensor = F.resize(tensor, size=size, interpolation=interpolation, max_size=max_size, antialias=True) + resized_pil_img = F.resize(pil_img, size=size, interpolation=interpolation, max_size=max_size, antialias=True) + + assert resized_tensor.size()[1:] == resized_pil_img.size[::-1] + + if interpolation != NEAREST: + # We can not check values if mode = NEAREST, as results are different + # E.g. resized_tensor = [[a, a, b, c, d, d, e, ...]] + # E.g. resized_pil_img = [[a, b, c, c, d, e, f, ...]] + resized_tensor_f = resized_tensor + # we need to cast to uint8 to compare with PIL image + if resized_tensor_f.dtype == torch.uint8: + resized_tensor_f = resized_tensor_f.to(torch.float) + + # Pay attention to high tolerance for MAE + _assert_approx_equal_tensor_to_pil(resized_tensor_f, resized_pil_img, tol=3.0) + + if isinstance(size, int): + script_size = [size] + else: + script_size = size + + resize_result = script_fn(tensor, size=script_size, interpolation=interpolation, max_size=max_size, antialias=True) + assert_equal(resized_tensor, resize_result) + + _test_fn_on_batch( + batch_tensors, F.resize, size=script_size, interpolation=interpolation, max_size=max_size, antialias=True + ) + + +@pytest.mark.parametrize("device", cpu_and_cuda()) +def test_resize_asserts(device): + + tensor, pil_img = _create_data(26, 36, device=device) + + res1 = F.resize(tensor, size=32, interpolation=PIL.Image.BILINEAR) + res2 = F.resize(tensor, size=32, interpolation=BILINEAR) + assert_equal(res1, res2) + + for img in (tensor, pil_img): + exp_msg = "max_size should only be passed if size specifies the length of the smaller edge" + with pytest.raises(ValueError, match=exp_msg): + F.resize(img, size=(32, 34), max_size=35) + with pytest.raises(ValueError, match="max_size = 32 must be strictly greater"): + F.resize(img, size=32, max_size=32) + + +@pytest.mark.parametrize("device", cpu_and_cuda()) +@pytest.mark.parametrize("dt", [None, torch.float32, torch.float64, torch.float16]) +@pytest.mark.parametrize("size", [[96, 72], [96, 420], [420, 72]]) +@pytest.mark.parametrize("interpolation", [BILINEAR, BICUBIC]) +def test_resize_antialias(device, dt, size, interpolation): + + if dt == torch.float16 and device == "cpu": + # skip float16 on CPU case + return + + torch.manual_seed(12) + script_fn = torch.jit.script(F.resize) + tensor, pil_img = _create_data(320, 290, device=device) + + if dt is not None: + # This is a trivial cast to float of uint8 data to test all cases + tensor = tensor.to(dt) + + resized_tensor = F.resize(tensor, size=size, interpolation=interpolation, antialias=True) + resized_pil_img = F.resize(pil_img, size=size, interpolation=interpolation, antialias=True) + + assert resized_tensor.size()[1:] == resized_pil_img.size[::-1] + + resized_tensor_f = resized_tensor + # we need to cast to uint8 to compare with PIL image + if resized_tensor_f.dtype == torch.uint8: + resized_tensor_f = resized_tensor_f.to(torch.float) + + _assert_approx_equal_tensor_to_pil(resized_tensor_f, resized_pil_img, tol=0.5, msg=f"{size}, {interpolation}, {dt}") + + accepted_tol = 1.0 + 1e-5 + if interpolation == BICUBIC: + # this overall mean value to make the tests pass + # High value is mostly required for test cases with + # downsampling and upsampling where we can not exactly + # match PIL implementation. + accepted_tol = 15.0 + + _assert_approx_equal_tensor_to_pil( + resized_tensor_f, resized_pil_img, tol=accepted_tol, agg_method="max", msg=f"{size}, {interpolation}, {dt}" + ) + + if isinstance(size, int): + script_size = [ + size, + ] + else: + script_size = size + + resize_result = script_fn(tensor, size=script_size, interpolation=interpolation, antialias=True) + assert_equal(resized_tensor, resize_result) + + +def check_functional_vs_PIL_vs_scripted( + fn, fn_pil, fn_t, config, device, dtype, channels=3, tol=2.0 + 1e-10, agg_method="max" +): + + script_fn = torch.jit.script(fn) + torch.manual_seed(15) + tensor, pil_img = _create_data(26, 34, channels=channels, device=device) + batch_tensors = _create_data_batch(16, 18, num_samples=4, channels=channels, device=device) + + if dtype is not None: + tensor = F.convert_image_dtype(tensor, dtype) + batch_tensors = F.convert_image_dtype(batch_tensors, dtype) + + out_fn_t = fn_t(tensor, **config) + out_pil = fn_pil(pil_img, **config) + out_scripted = script_fn(tensor, **config) + assert out_fn_t.dtype == out_scripted.dtype + assert out_fn_t.size()[1:] == out_pil.size[::-1] + + rbg_tensor = out_fn_t + + if out_fn_t.dtype != torch.uint8: + rbg_tensor = F.convert_image_dtype(out_fn_t, torch.uint8) + + # Check that max difference does not exceed 2 in [0, 255] range + # Exact matching is not possible due to incompatibility convert_image_dtype and PIL results + _assert_approx_equal_tensor_to_pil(rbg_tensor.float(), out_pil, tol=tol, agg_method=agg_method) + + atol = 1e-6 + if out_fn_t.dtype == torch.uint8 and "cuda" in torch.device(device).type: + atol = 1.0 + assert out_fn_t.allclose(out_scripted, atol=atol) + + # FIXME: fn will be scripted again in _test_fn_on_batch. We could avoid that. + _test_fn_on_batch(batch_tensors, fn, scripted_fn_atol=atol, **config) + + +@pytest.mark.parametrize("device", cpu_and_cuda()) +@pytest.mark.parametrize("dtype", (None, torch.float32, torch.float64)) +@pytest.mark.parametrize("config", [{"brightness_factor": f} for f in (0.1, 0.5, 1.0, 1.34, 2.5)]) +@pytest.mark.parametrize("channels", [1, 3]) +def test_adjust_brightness(device, dtype, config, channels): + check_functional_vs_PIL_vs_scripted( + F.adjust_brightness, + F_pil.adjust_brightness, + F_t.adjust_brightness, + config, + device, + dtype, + channels, + ) + + +@pytest.mark.parametrize("device", cpu_and_cuda()) +@pytest.mark.parametrize("dtype", (None, torch.float32, torch.float64)) +@pytest.mark.parametrize("channels", [1, 3]) +def test_invert(device, dtype, channels): + check_functional_vs_PIL_vs_scripted( + F.invert, F_pil.invert, F_t.invert, {}, device, dtype, channels, tol=1.0, agg_method="max" + ) + + +@pytest.mark.parametrize("device", cpu_and_cuda()) +@pytest.mark.parametrize("config", [{"bits": bits} for bits in range(0, 8)]) +@pytest.mark.parametrize("channels", [1, 3]) +def test_posterize(device, config, channels): + check_functional_vs_PIL_vs_scripted( + F.posterize, + F_pil.posterize, + F_t.posterize, + config, + device, + dtype=None, + channels=channels, + tol=1.0, + agg_method="max", + ) + + +@pytest.mark.parametrize("device", cpu_and_cuda()) +@pytest.mark.parametrize("config", [{"threshold": threshold} for threshold in [0, 64, 128, 192, 255]]) +@pytest.mark.parametrize("channels", [1, 3]) +def test_solarize1(device, config, channels): + check_functional_vs_PIL_vs_scripted( + F.solarize, + F_pil.solarize, + F_t.solarize, + config, + device, + dtype=None, + channels=channels, + tol=1.0, + agg_method="max", + ) + + +@pytest.mark.parametrize("device", cpu_and_cuda()) +@pytest.mark.parametrize("dtype", (torch.float32, torch.float64)) +@pytest.mark.parametrize("config", [{"threshold": threshold} for threshold in [0.0, 0.25, 0.5, 0.75, 1.0]]) +@pytest.mark.parametrize("channels", [1, 3]) +def test_solarize2(device, dtype, config, channels): + check_functional_vs_PIL_vs_scripted( + F.solarize, + lambda img, threshold: F_pil.solarize(img, 255 * threshold), + F_t.solarize, + config, + device, + dtype, + channels, + tol=1.0, + agg_method="max", + ) + + +@pytest.mark.parametrize( + ("dtype", "threshold"), + [ + *[ + (dtype, threshold) + for dtype, threshold in itertools.product( + [torch.float32, torch.float16], + [0.0, 0.25, 0.5, 0.75, 1.0], + ) + ], + *[(torch.uint8, threshold) for threshold in [0, 64, 128, 192, 255]], + *[(torch.int64, threshold) for threshold in [0, 2**32, 2**63 - 1]], + ], +) +@pytest.mark.parametrize("device", cpu_and_cuda()) +def test_solarize_threshold_within_bound(threshold, dtype, device): + make_img = torch.rand if dtype.is_floating_point else partial(torch.randint, 0, torch.iinfo(dtype).max) + img = make_img((3, 12, 23), dtype=dtype, device=device) + F_t.solarize(img, threshold) + + +@pytest.mark.parametrize( + ("dtype", "threshold"), + [ + (torch.float32, 1.5), + (torch.float16, 1.5), + (torch.uint8, 260), + (torch.int64, 2**64), + ], +) +@pytest.mark.parametrize("device", cpu_and_cuda()) +def test_solarize_threshold_above_bound(threshold, dtype, device): + make_img = torch.rand if dtype.is_floating_point else partial(torch.randint, 0, torch.iinfo(dtype).max) + img = make_img((3, 12, 23), dtype=dtype, device=device) + with pytest.raises(TypeError, match="Threshold should be less than bound of img."): + F_t.solarize(img, threshold) + + +@pytest.mark.parametrize("device", cpu_and_cuda()) +@pytest.mark.parametrize("dtype", (None, torch.float32, torch.float64)) +@pytest.mark.parametrize("config", [{"sharpness_factor": f} for f in [0.2, 0.5, 1.0, 1.5, 2.0]]) +@pytest.mark.parametrize("channels", [1, 3]) +def test_adjust_sharpness(device, dtype, config, channels): + check_functional_vs_PIL_vs_scripted( + F.adjust_sharpness, + F_pil.adjust_sharpness, + F_t.adjust_sharpness, + config, + device, + dtype, + channels, + ) + + +@pytest.mark.parametrize("device", cpu_and_cuda()) +@pytest.mark.parametrize("dtype", (None, torch.float32, torch.float64)) +@pytest.mark.parametrize("channels", [1, 3]) +def test_autocontrast(device, dtype, channels): + check_functional_vs_PIL_vs_scripted( + F.autocontrast, F_pil.autocontrast, F_t.autocontrast, {}, device, dtype, channels, tol=1.0, agg_method="max" + ) + + +@pytest.mark.parametrize("device", cpu_and_cuda()) +@pytest.mark.parametrize("dtype", (None, torch.float32, torch.float64)) +@pytest.mark.parametrize("channels", [1, 3]) +def test_autocontrast_equal_minmax(device, dtype, channels): + a = _create_data_batch(32, 32, num_samples=1, channels=channels, device=device) + a = a / 2.0 + 0.3 + assert (F.autocontrast(a)[0] == F.autocontrast(a[0])).all() + + a[0, 0] = 0.7 + assert (F.autocontrast(a)[0] == F.autocontrast(a[0])).all() + + +@pytest.mark.parametrize("device", cpu_and_cuda()) +@pytest.mark.parametrize("channels", [1, 3]) +def test_equalize(device, channels): + torch.use_deterministic_algorithms(False) + check_functional_vs_PIL_vs_scripted( + F.equalize, + F_pil.equalize, + F_t.equalize, + {}, + device, + dtype=None, + channels=channels, + tol=1.0, + agg_method="max", + ) + + +@pytest.mark.parametrize("device", cpu_and_cuda()) +@pytest.mark.parametrize("dtype", (None, torch.float32, torch.float64)) +@pytest.mark.parametrize("config", [{"contrast_factor": f} for f in [0.2, 0.5, 1.0, 1.5, 2.0]]) +@pytest.mark.parametrize("channels", [1, 3]) +def test_adjust_contrast(device, dtype, config, channels): + check_functional_vs_PIL_vs_scripted( + F.adjust_contrast, F_pil.adjust_contrast, F_t.adjust_contrast, config, device, dtype, channels + ) + + +@pytest.mark.parametrize("device", cpu_and_cuda()) +@pytest.mark.parametrize("dtype", (None, torch.float32, torch.float64)) +@pytest.mark.parametrize("config", [{"saturation_factor": f} for f in [0.5, 0.75, 1.0, 1.5, 2.0]]) +@pytest.mark.parametrize("channels", [1, 3]) +def test_adjust_saturation(device, dtype, config, channels): + check_functional_vs_PIL_vs_scripted( + F.adjust_saturation, F_pil.adjust_saturation, F_t.adjust_saturation, config, device, dtype, channels + ) + + +@pytest.mark.parametrize("device", cpu_and_cuda()) +@pytest.mark.parametrize("dtype", (None, torch.float32, torch.float64)) +@pytest.mark.parametrize("config", [{"hue_factor": f} for f in [-0.45, -0.25, 0.0, 0.25, 0.45]]) +@pytest.mark.parametrize("channels", [1, 3]) +def test_adjust_hue(device, dtype, config, channels): + check_functional_vs_PIL_vs_scripted( + F.adjust_hue, F_pil.adjust_hue, F_t.adjust_hue, config, device, dtype, channels, tol=16.1, agg_method="max" + ) + + +@pytest.mark.parametrize("device", cpu_and_cuda()) +@pytest.mark.parametrize("dtype", (None, torch.float32, torch.float64)) +@pytest.mark.parametrize("config", [{"gamma": g1, "gain": g2} for g1, g2 in zip([0.8, 1.0, 1.2], [0.7, 1.0, 1.3])]) +@pytest.mark.parametrize("channels", [1, 3]) +def test_adjust_gamma(device, dtype, config, channels): + check_functional_vs_PIL_vs_scripted( + F.adjust_gamma, + F_pil.adjust_gamma, + F_t.adjust_gamma, + config, + device, + dtype, + channels, + ) + + +@pytest.mark.parametrize("device", cpu_and_cuda()) +@pytest.mark.parametrize("dt", [None, torch.float32, torch.float64, torch.float16]) +@pytest.mark.parametrize("pad", [2, [3], [0, 3], (3, 3), [4, 2, 4, 3]]) +@pytest.mark.parametrize( + "config", + [ + {"padding_mode": "constant", "fill": 0}, + {"padding_mode": "constant", "fill": 10}, + {"padding_mode": "constant", "fill": 20.2}, + {"padding_mode": "edge"}, + {"padding_mode": "reflect"}, + {"padding_mode": "symmetric"}, + ], +) +def test_pad(device, dt, pad, config): + script_fn = torch.jit.script(F.pad) + tensor, pil_img = _create_data(7, 8, device=device) + batch_tensors = _create_data_batch(16, 18, num_samples=4, device=device) + + if dt == torch.float16 and device == "cpu": + # skip float16 on CPU case + return + + if dt is not None: + # This is a trivial cast to float of uint8 data to test all cases + tensor = tensor.to(dt) + batch_tensors = batch_tensors.to(dt) + + pad_tensor = F_t.pad(tensor, pad, **config) + pad_pil_img = F_pil.pad(pil_img, pad, **config) + + pad_tensor_8b = pad_tensor + # we need to cast to uint8 to compare with PIL image + if pad_tensor_8b.dtype != torch.uint8: + pad_tensor_8b = pad_tensor_8b.to(torch.uint8) + + _assert_equal_tensor_to_pil(pad_tensor_8b, pad_pil_img, msg=f"{pad}, {config}") + + if isinstance(pad, int): + script_pad = [ + pad, + ] + else: + script_pad = pad + pad_tensor_script = script_fn(tensor, script_pad, **config) + assert_equal(pad_tensor, pad_tensor_script, msg=f"{pad}, {config}") + + _test_fn_on_batch(batch_tensors, F.pad, padding=script_pad, **config) + + +@pytest.mark.parametrize("device", cpu_and_cuda()) +@pytest.mark.parametrize("mode", [NEAREST, NEAREST_EXACT, BILINEAR, BICUBIC]) +def test_resized_crop(device, mode): + # test values of F.resized_crop in several cases: + # 1) resize to the same size, crop to the same size => should be identity + tensor, _ = _create_data(26, 36, device=device) + + out_tensor = F.resized_crop( + tensor, top=0, left=0, height=26, width=36, size=[26, 36], interpolation=mode, antialias=True + ) + assert_equal(tensor, out_tensor, msg=f"{out_tensor[0, :5, :5]} vs {tensor[0, :5, :5]}") + + # 2) resize by half and crop a TL corner + tensor, _ = _create_data(26, 36, device=device) + out_tensor = F.resized_crop(tensor, top=0, left=0, height=20, width=30, size=[10, 15], interpolation=NEAREST) + expected_out_tensor = tensor[:, :20:2, :30:2] + assert_equal( + expected_out_tensor, + out_tensor, + msg=f"{expected_out_tensor[0, :10, :10]} vs {out_tensor[0, :10, :10]}", + ) + + batch_tensors = _create_data_batch(26, 36, num_samples=4, device=device) + _test_fn_on_batch( + batch_tensors, + F.resized_crop, + top=1, + left=2, + height=20, + width=30, + size=[10, 15], + interpolation=NEAREST, + ) + + +@pytest.mark.parametrize("device", cpu_and_cuda()) +@pytest.mark.parametrize( + "func, args", + [ + (F_t.get_dimensions, ()), + (F_t.get_image_size, ()), + (F_t.get_image_num_channels, ()), + (F_t.vflip, ()), + (F_t.hflip, ()), + (F_t.crop, (1, 2, 4, 5)), + (F_t.adjust_brightness, (0.0,)), + (F_t.adjust_contrast, (1.0,)), + (F_t.adjust_hue, (-0.5,)), + (F_t.adjust_saturation, (2.0,)), + (F_t.pad, ([2], 2, "constant")), + (F_t.resize, ([10, 11],)), + (F_t.perspective, ([0.2])), + (F_t.gaussian_blur, ((2, 2), (0.7, 0.5))), + (F_t.invert, ()), + (F_t.posterize, (0,)), + (F_t.solarize, (0.3,)), + (F_t.adjust_sharpness, (0.3,)), + (F_t.autocontrast, ()), + (F_t.equalize, ()), + ], +) +def test_assert_image_tensor(device, func, args): + shape = (100,) + tensor = torch.rand(*shape, dtype=torch.float, device=device) + with pytest.raises(Exception, match=r"Tensor is not a torch image."): + func(tensor, *args) + + +@pytest.mark.parametrize("device", cpu_and_cuda()) +def test_vflip(device): + script_vflip = torch.jit.script(F.vflip) + + img_tensor, pil_img = _create_data(16, 18, device=device) + vflipped_img = F.vflip(img_tensor) + vflipped_pil_img = F.vflip(pil_img) + _assert_equal_tensor_to_pil(vflipped_img, vflipped_pil_img) + + # scriptable function test + vflipped_img_script = script_vflip(img_tensor) + assert_equal(vflipped_img, vflipped_img_script) + + batch_tensors = _create_data_batch(16, 18, num_samples=4, device=device) + _test_fn_on_batch(batch_tensors, F.vflip) + + +@pytest.mark.parametrize("device", cpu_and_cuda()) +def test_hflip(device): + script_hflip = torch.jit.script(F.hflip) + + img_tensor, pil_img = _create_data(16, 18, device=device) + hflipped_img = F.hflip(img_tensor) + hflipped_pil_img = F.hflip(pil_img) + _assert_equal_tensor_to_pil(hflipped_img, hflipped_pil_img) + + # scriptable function test + hflipped_img_script = script_hflip(img_tensor) + assert_equal(hflipped_img, hflipped_img_script) + + batch_tensors = _create_data_batch(16, 18, num_samples=4, device=device) + _test_fn_on_batch(batch_tensors, F.hflip) + + +@pytest.mark.parametrize("device", cpu_and_cuda()) +@pytest.mark.parametrize( + "top, left, height, width", + [ + (1, 2, 4, 5), # crop inside top-left corner + (2, 12, 3, 4), # crop inside top-right corner + (8, 3, 5, 6), # crop inside bottom-left corner + (8, 11, 4, 3), # crop inside bottom-right corner + (50, 50, 10, 10), # crop outside the image + (-50, -50, 10, 10), # crop outside the image + ], +) +def test_crop(device, top, left, height, width): + script_crop = torch.jit.script(F.crop) + + img_tensor, pil_img = _create_data(16, 18, device=device) + + pil_img_cropped = F.crop(pil_img, top, left, height, width) + + img_tensor_cropped = F.crop(img_tensor, top, left, height, width) + _assert_equal_tensor_to_pil(img_tensor_cropped, pil_img_cropped) + + img_tensor_cropped = script_crop(img_tensor, top, left, height, width) + _assert_equal_tensor_to_pil(img_tensor_cropped, pil_img_cropped) + + batch_tensors = _create_data_batch(16, 18, num_samples=4, device=device) + _test_fn_on_batch(batch_tensors, F.crop, top=top, left=left, height=height, width=width) + + +@pytest.mark.parametrize("device", cpu_and_cuda()) +@pytest.mark.parametrize("image_size", ("small", "large")) +@pytest.mark.parametrize("dt", [None, torch.float32, torch.float64, torch.float16]) +@pytest.mark.parametrize("ksize", [(3, 3), [3, 5], (23, 23)]) +@pytest.mark.parametrize("sigma", [[0.5, 0.5], (0.5, 0.5), (0.8, 0.8), (1.7, 1.7)]) +@pytest.mark.parametrize("fn", [F.gaussian_blur, torch.jit.script(F.gaussian_blur)]) +def test_gaussian_blur(device, image_size, dt, ksize, sigma, fn): + + # true_cv2_results = { + # # np_img = np.arange(3 * 10 * 12, dtype="uint8").reshape((10, 12, 3)) + # # cv2.GaussianBlur(np_img, ksize=(3, 3), sigmaX=0.8) + # "3_3_0.8": ... + # # cv2.GaussianBlur(np_img, ksize=(3, 3), sigmaX=0.5) + # "3_3_0.5": ... + # # cv2.GaussianBlur(np_img, ksize=(3, 5), sigmaX=0.8) + # "3_5_0.8": ... + # # cv2.GaussianBlur(np_img, ksize=(3, 5), sigmaX=0.5) + # "3_5_0.5": ... + # # np_img2 = np.arange(26 * 28, dtype="uint8").reshape((26, 28)) + # # cv2.GaussianBlur(np_img2, ksize=(23, 23), sigmaX=1.7) + # "23_23_1.7": ... + # } + p = os.path.join(os.path.dirname(os.path.abspath(__file__)), "assets", "gaussian_blur_opencv_results.pt") + + true_cv2_results = torch.load(p, weights_only=False) + + if image_size == "small": + tensor = ( + torch.from_numpy(np.arange(3 * 10 * 12, dtype="uint8").reshape((10, 12, 3))).permute(2, 0, 1).to(device) + ) + else: + tensor = torch.from_numpy(np.arange(26 * 28, dtype="uint8").reshape((1, 26, 28))).to(device) + + if dt == torch.float16 and device == "cpu": + # skip float16 on CPU case + return + + if dt is not None: + tensor = tensor.to(dtype=dt) + + _ksize = (ksize, ksize) if isinstance(ksize, int) else ksize + _sigma = sigma[0] if sigma is not None else None + shape = tensor.shape + gt_key = f"{shape[-2]}_{shape[-1]}_{shape[-3]}__{_ksize[0]}_{_ksize[1]}_{_sigma}" + if gt_key not in true_cv2_results: + return + + true_out = ( + torch.tensor(true_cv2_results[gt_key]).reshape(shape[-2], shape[-1], shape[-3]).permute(2, 0, 1).to(tensor) + ) + + out = fn(tensor, kernel_size=ksize, sigma=sigma) + torch.testing.assert_close(out, true_out, rtol=0.0, atol=1.0, msg=f"{ksize}, {sigma}") + + +@pytest.mark.parametrize("device", cpu_and_cuda()) +def test_hsv2rgb(device): + scripted_fn = torch.jit.script(F_t._hsv2rgb) + shape = (3, 100, 150) + for _ in range(10): + hsv_img = torch.rand(*shape, dtype=torch.float, device=device) + rgb_img = F_t._hsv2rgb(hsv_img) + ft_img = rgb_img.permute(1, 2, 0).flatten(0, 1) + + ( + h, + s, + v, + ) = hsv_img.unbind(0) + h = h.flatten().cpu().numpy() + s = s.flatten().cpu().numpy() + v = v.flatten().cpu().numpy() + + rgb = [] + for h1, s1, v1 in zip(h, s, v): + rgb.append(colorsys.hsv_to_rgb(h1, s1, v1)) + colorsys_img = torch.tensor(rgb, dtype=torch.float32, device=device) + torch.testing.assert_close(ft_img, colorsys_img, rtol=0.0, atol=1e-5) + + s_rgb_img = scripted_fn(hsv_img) + torch.testing.assert_close(rgb_img, s_rgb_img) + + batch_tensors = _create_data_batch(120, 100, num_samples=4, device=device).float() + _test_fn_on_batch(batch_tensors, F_t._hsv2rgb) + + +@pytest.mark.parametrize("device", cpu_and_cuda()) +def test_rgb2hsv(device): + scripted_fn = torch.jit.script(F_t._rgb2hsv) + shape = (3, 150, 100) + for _ in range(10): + rgb_img = torch.rand(*shape, dtype=torch.float, device=device) + hsv_img = F_t._rgb2hsv(rgb_img) + ft_hsv_img = hsv_img.permute(1, 2, 0).flatten(0, 1) + + ( + r, + g, + b, + ) = rgb_img.unbind(dim=-3) + r = r.flatten().cpu().numpy() + g = g.flatten().cpu().numpy() + b = b.flatten().cpu().numpy() + + hsv = [] + for r1, g1, b1 in zip(r, g, b): + hsv.append(colorsys.rgb_to_hsv(r1, g1, b1)) + + colorsys_img = torch.tensor(hsv, dtype=torch.float32, device=device) + + ft_hsv_img_h, ft_hsv_img_sv = torch.split(ft_hsv_img, [1, 2], dim=1) + colorsys_img_h, colorsys_img_sv = torch.split(colorsys_img, [1, 2], dim=1) + + max_diff_h = ((colorsys_img_h * 2 * math.pi).sin() - (ft_hsv_img_h * 2 * math.pi).sin()).abs().max() + max_diff_sv = (colorsys_img_sv - ft_hsv_img_sv).abs().max() + max_diff = max(max_diff_h, max_diff_sv) + assert max_diff < 1e-5 + + s_hsv_img = scripted_fn(rgb_img) + torch.testing.assert_close(hsv_img, s_hsv_img, rtol=1e-5, atol=1e-7) + + batch_tensors = _create_data_batch(120, 100, num_samples=4, device=device).float() + _test_fn_on_batch(batch_tensors, F_t._rgb2hsv) + + +@pytest.mark.parametrize("device", cpu_and_cuda()) +@pytest.mark.parametrize("num_output_channels", (3, 1)) +def test_rgb_to_grayscale(device, num_output_channels): + script_rgb_to_grayscale = torch.jit.script(F.rgb_to_grayscale) + + img_tensor, pil_img = _create_data(32, 34, device=device) + + gray_pil_image = F.rgb_to_grayscale(pil_img, num_output_channels=num_output_channels) + gray_tensor = F.rgb_to_grayscale(img_tensor, num_output_channels=num_output_channels) + + _assert_approx_equal_tensor_to_pil(gray_tensor.float(), gray_pil_image, tol=1.0 + 1e-10, agg_method="max") + + s_gray_tensor = script_rgb_to_grayscale(img_tensor, num_output_channels=num_output_channels) + assert_equal(s_gray_tensor, gray_tensor) + + batch_tensors = _create_data_batch(16, 18, num_samples=4, device=device) + _test_fn_on_batch(batch_tensors, F.rgb_to_grayscale, num_output_channels=num_output_channels) + + +@pytest.mark.parametrize("device", cpu_and_cuda()) +def test_center_crop(device): + script_center_crop = torch.jit.script(F.center_crop) + + img_tensor, pil_img = _create_data(32, 34, device=device) + + cropped_pil_image = F.center_crop(pil_img, [10, 11]) + + cropped_tensor = F.center_crop(img_tensor, [10, 11]) + _assert_equal_tensor_to_pil(cropped_tensor, cropped_pil_image) + + cropped_tensor = script_center_crop(img_tensor, [10, 11]) + _assert_equal_tensor_to_pil(cropped_tensor, cropped_pil_image) + + batch_tensors = _create_data_batch(16, 18, num_samples=4, device=device) + _test_fn_on_batch(batch_tensors, F.center_crop, output_size=[10, 11]) + + +@pytest.mark.parametrize("device", cpu_and_cuda()) +def test_five_crop(device): + script_five_crop = torch.jit.script(F.five_crop) + + img_tensor, pil_img = _create_data(32, 34, device=device) + + cropped_pil_images = F.five_crop(pil_img, [10, 11]) + + cropped_tensors = F.five_crop(img_tensor, [10, 11]) + for i in range(5): + _assert_equal_tensor_to_pil(cropped_tensors[i], cropped_pil_images[i]) + + cropped_tensors = script_five_crop(img_tensor, [10, 11]) + for i in range(5): + _assert_equal_tensor_to_pil(cropped_tensors[i], cropped_pil_images[i]) + + batch_tensors = _create_data_batch(16, 18, num_samples=4, device=device) + tuple_transformed_batches = F.five_crop(batch_tensors, [10, 11]) + for i in range(len(batch_tensors)): + img_tensor = batch_tensors[i, ...] + tuple_transformed_imgs = F.five_crop(img_tensor, [10, 11]) + assert len(tuple_transformed_imgs) == len(tuple_transformed_batches) + + for j in range(len(tuple_transformed_imgs)): + true_transformed_img = tuple_transformed_imgs[j] + transformed_img = tuple_transformed_batches[j][i, ...] + assert_equal(true_transformed_img, transformed_img) + + # scriptable function test + s_tuple_transformed_batches = script_five_crop(batch_tensors, [10, 11]) + for transformed_batch, s_transformed_batch in zip(tuple_transformed_batches, s_tuple_transformed_batches): + assert_equal(transformed_batch, s_transformed_batch) + + +@pytest.mark.parametrize("device", cpu_and_cuda()) +def test_ten_crop(device): + script_ten_crop = torch.jit.script(F.ten_crop) + + img_tensor, pil_img = _create_data(32, 34, device=device) + + cropped_pil_images = F.ten_crop(pil_img, [10, 11]) + + cropped_tensors = F.ten_crop(img_tensor, [10, 11]) + for i in range(10): + _assert_equal_tensor_to_pil(cropped_tensors[i], cropped_pil_images[i]) + + cropped_tensors = script_ten_crop(img_tensor, [10, 11]) + for i in range(10): + _assert_equal_tensor_to_pil(cropped_tensors[i], cropped_pil_images[i]) + + batch_tensors = _create_data_batch(16, 18, num_samples=4, device=device) + tuple_transformed_batches = F.ten_crop(batch_tensors, [10, 11]) + for i in range(len(batch_tensors)): + img_tensor = batch_tensors[i, ...] + tuple_transformed_imgs = F.ten_crop(img_tensor, [10, 11]) + assert len(tuple_transformed_imgs) == len(tuple_transformed_batches) + + for j in range(len(tuple_transformed_imgs)): + true_transformed_img = tuple_transformed_imgs[j] + transformed_img = tuple_transformed_batches[j][i, ...] + assert_equal(true_transformed_img, transformed_img) + + # scriptable function test + s_tuple_transformed_batches = script_ten_crop(batch_tensors, [10, 11]) + for transformed_batch, s_transformed_batch in zip(tuple_transformed_batches, s_tuple_transformed_batches): + assert_equal(transformed_batch, s_transformed_batch) + + +def test_elastic_transform_asserts(): + with pytest.raises(TypeError, match="Argument displacement should be a Tensor"): + _ = F.elastic_transform("abc", displacement=None) + + with pytest.raises(TypeError, match="img should be PIL Image or Tensor"): + _ = F.elastic_transform("abc", displacement=torch.rand(1)) + + img_tensor = torch.rand(1, 3, 32, 24) + with pytest.raises(ValueError, match="Argument displacement shape should"): + _ = F.elastic_transform(img_tensor, displacement=torch.rand(1, 2)) + + +@pytest.mark.parametrize("device", cpu_and_cuda()) +@pytest.mark.parametrize("interpolation", [NEAREST, BILINEAR, BICUBIC]) +@pytest.mark.parametrize("dt", [None, torch.float32, torch.float64, torch.float16]) +@pytest.mark.parametrize( + "fill", + [None, [255, 255, 255], (2.0,)], +) +def test_elastic_transform_consistency(device, interpolation, dt, fill): + script_elastic_transform = torch.jit.script(F.elastic_transform) + img_tensor, _ = _create_data(32, 34, device=device) + # As there is no PIL implementation for elastic_transform, + # thus we do not run tests tensor vs pillow + + if dt is not None: + img_tensor = img_tensor.to(dt) + + displacement = T.ElasticTransform.get_params([1.5, 1.5], [2.0, 2.0], [32, 34]) + kwargs = dict( + displacement=displacement, + interpolation=interpolation, + fill=fill, + ) + + out_tensor1 = F.elastic_transform(img_tensor, **kwargs) + out_tensor2 = script_elastic_transform(img_tensor, **kwargs) + assert_equal(out_tensor1, out_tensor2) + + batch_tensors = _create_data_batch(16, 18, num_samples=4, device=device) + displacement = T.ElasticTransform.get_params([1.5, 1.5], [2.0, 2.0], [16, 18]) + kwargs["displacement"] = displacement + if dt is not None: + batch_tensors = batch_tensors.to(dt) + _test_fn_on_batch(batch_tensors, F.elastic_transform, **kwargs) + + +if __name__ == "__main__": + pytest.main([__file__]) diff --git a/test/test_hub.py b/test/test_hub.py deleted file mode 100644 index 4ae9e51021b..00000000000 --- a/test/test_hub.py +++ /dev/null @@ -1,56 +0,0 @@ -import torch.hub as hub -import tempfile -import shutil -import os -import sys -import unittest - - -def sum_of_model_parameters(model): - s = 0 - for p in model.parameters(): - s += p.sum() - return s - - -SUM_OF_PRETRAINED_RESNET18_PARAMS = -12703.99609375 - - -@unittest.skipIf('torchvision' in sys.modules, - 'TestHub must start without torchvision imported') -class TestHub(unittest.TestCase): - # Only run this check ONCE before all tests start. - # - If torchvision is imported before all tests start, e.g. we might find _C.so - # which doesn't exist in downloaded zip but in the installed wheel. - # - After the first test is run, torchvision is already in sys.modules due to - # Python cache as we run all hub tests in the same python process. - - def test_load_from_github(self): - hub_model = hub.load( - 'pytorch/vision', - 'resnet18', - pretrained=True, - progress=False) - self.assertEqual(sum_of_model_parameters(hub_model).item(), - SUM_OF_PRETRAINED_RESNET18_PARAMS) - - def test_set_dir(self): - temp_dir = tempfile.gettempdir() - hub.set_dir(temp_dir) - hub_model = hub.load( - 'pytorch/vision', - 'resnet18', - pretrained=True, - progress=False) - self.assertEqual(sum_of_model_parameters(hub_model).item(), - SUM_OF_PRETRAINED_RESNET18_PARAMS) - self.assertTrue(os.path.exists(temp_dir + '/pytorch_vision_master')) - shutil.rmtree(temp_dir + '/pytorch_vision_master') - - def test_list_entrypoints(self): - entry_lists = hub.list('pytorch/vision', force_reload=True) - self.assertIn('resnet18', entry_lists) - - -if __name__ == "__main__": - unittest.main() diff --git a/test/test_image.py b/test/test_image.py new file mode 100644 index 00000000000..b8e96773267 --- /dev/null +++ b/test/test_image.py @@ -0,0 +1,1060 @@ +import concurrent.futures +import glob +import io +import os +import re +import sys +from pathlib import Path + +import numpy as np +import pytest +import requests +import torch +import torchvision.transforms.v2.functional as F +from common_utils import assert_equal, cpu_and_cuda, IN_OSS_CI, needs_cuda +from PIL import __version__ as PILLOW_VERSION, Image, ImageOps, ImageSequence +from torchvision.io.image import ( + decode_avif, + decode_gif, + decode_heic, + decode_image, + decode_jpeg, + decode_png, + decode_webp, + encode_jpeg, + encode_png, + ImageReadMode, + read_file, + read_image, + write_file, + write_jpeg, + write_png, +) + +IMAGE_ROOT = os.path.join(os.path.dirname(os.path.abspath(__file__)), "assets") +FAKEDATA_DIR = os.path.join(IMAGE_ROOT, "fakedata") +IMAGE_DIR = os.path.join(FAKEDATA_DIR, "imagefolder") +DAMAGED_JPEG = os.path.join(IMAGE_ROOT, "damaged_jpeg") +DAMAGED_PNG = os.path.join(IMAGE_ROOT, "damaged_png") +ENCODE_JPEG = os.path.join(IMAGE_ROOT, "encode_jpeg") +INTERLACED_PNG = os.path.join(IMAGE_ROOT, "interlaced_png") +TOOSMALL_PNG = os.path.join(IMAGE_ROOT, "toosmall_png") +IS_WINDOWS = sys.platform in ("win32", "cygwin") +IS_MACOS = sys.platform == "darwin" +IS_LINUX = sys.platform == "linux" +PILLOW_VERSION = tuple(int(x) for x in PILLOW_VERSION.split(".")) +WEBP_TEST_IMAGES_DIR = os.environ.get("WEBP_TEST_IMAGES_DIR", "") +# See https://github.com/pytorch/vision/pull/8724#issuecomment-2503964558 +HEIC_AVIF_MESSAGE = "AVIF and HEIF only available on linux." + + +def _get_safe_image_name(name): + # Used when we need to change the pytest "id" for an "image path" parameter. + # If we don't, the test id (i.e. its name) will contain the whole path to the image, which is machine-specific, + # and this creates issues when the test is running in a different machine than where it was collected + # (typically, in fb internal infra) + return name.split(os.path.sep)[-1] + + +def get_images(directory, img_ext): + assert os.path.isdir(directory) + image_paths = glob.glob(directory + f"/**/*{img_ext}", recursive=True) + for path in image_paths: + if path.split(os.sep)[-2] not in ["damaged_jpeg", "jpeg_write"]: + yield path + + +def pil_read_image(img_path): + with Image.open(img_path) as img: + return torch.from_numpy(np.array(img)) + + +def normalize_dimensions(img_pil): + if len(img_pil.shape) == 3: + img_pil = img_pil.permute(2, 0, 1) + else: + img_pil = img_pil.unsqueeze(0) + return img_pil + + +@pytest.mark.parametrize( + "img_path", + [pytest.param(jpeg_path, id=_get_safe_image_name(jpeg_path)) for jpeg_path in get_images(IMAGE_ROOT, ".jpg")], +) +@pytest.mark.parametrize( + "pil_mode, mode", + [ + (None, ImageReadMode.UNCHANGED), + ("L", ImageReadMode.GRAY), + ("RGB", ImageReadMode.RGB), + ], +) +@pytest.mark.parametrize("scripted", (False, True)) +@pytest.mark.parametrize("decode_fun", (decode_jpeg, decode_image)) +def test_decode_jpeg(img_path, pil_mode, mode, scripted, decode_fun): + + with Image.open(img_path) as img: + is_cmyk = img.mode == "CMYK" + if pil_mode is not None: + img = img.convert(pil_mode) + img_pil = torch.from_numpy(np.array(img)) + if is_cmyk and mode == ImageReadMode.UNCHANGED: + # flip the colors to match libjpeg + img_pil = 255 - img_pil + + img_pil = normalize_dimensions(img_pil) + data = read_file(img_path) + if scripted: + decode_fun = torch.jit.script(decode_fun) + img_ljpeg = decode_fun(data, mode=mode) + + # Permit a small variation on pixel values to account for implementation + # differences between Pillow and LibJPEG. + abs_mean_diff = (img_ljpeg.type(torch.float32) - img_pil).abs().mean().item() + assert abs_mean_diff < 2 + + +@pytest.mark.parametrize("codec", ["png", "jpeg"]) +@pytest.mark.parametrize("orientation", [1, 2, 3, 4, 5, 6, 7, 8, 0]) +def test_decode_with_exif_orientation(tmpdir, codec, orientation): + fp = os.path.join(tmpdir, f"exif_oriented_{orientation}.{codec}") + t = torch.randint(0, 256, size=(3, 256, 257), dtype=torch.uint8) + im = F.to_pil_image(t) + exif = im.getexif() + exif[0x0112] = orientation # set exif orientation + im.save(fp, codec.upper(), exif=exif.tobytes()) + + data = read_file(fp) + output = decode_image(data, apply_exif_orientation=True) + + pimg = Image.open(fp) + pimg = ImageOps.exif_transpose(pimg) + + expected = F.pil_to_tensor(pimg) + torch.testing.assert_close(expected, output) + + +@pytest.mark.parametrize("size", [65533, 1, 7, 10, 23, 33]) +def test_invalid_exif(tmpdir, size): + # Inspired from a PIL test: + # https://github.com/python-pillow/Pillow/blob/8f63748e50378424628155994efd7e0739a4d1d1/Tests/test_file_jpeg.py#L299 + fp = os.path.join(tmpdir, "invalid_exif.jpg") + t = torch.randint(0, 256, size=(3, 256, 257), dtype=torch.uint8) + im = F.to_pil_image(t) + im.save(fp, "JPEG", exif=b"1" * size) + + data = read_file(fp) + output = decode_image(data, apply_exif_orientation=True) + + pimg = Image.open(fp) + pimg = ImageOps.exif_transpose(pimg) + + expected = F.pil_to_tensor(pimg) + torch.testing.assert_close(expected, output) + + +def test_decode_bad_huffman_images(): + # sanity check: make sure we can decode the bad Huffman encoding + bad_huff = read_file(os.path.join(DAMAGED_JPEG, "bad_huffman.jpg")) + decode_jpeg(bad_huff) + + +@pytest.mark.parametrize( + "img_path", + [ + pytest.param(truncated_image, id=_get_safe_image_name(truncated_image)) + for truncated_image in glob.glob(os.path.join(DAMAGED_JPEG, "corrupt*.jpg")) + ], +) +def test_damaged_corrupt_images(img_path): + # Truncated images should raise an exception + data = read_file(img_path) + if "corrupt34" in img_path: + match_message = "Image is incomplete or truncated" + else: + match_message = "Unsupported marker type" + with pytest.raises(RuntimeError, match=match_message): + decode_jpeg(data) + + +@pytest.mark.parametrize( + "img_path", + [pytest.param(png_path, id=_get_safe_image_name(png_path)) for png_path in get_images(FAKEDATA_DIR, ".png")], +) +@pytest.mark.parametrize( + "pil_mode, mode", + [ + (None, ImageReadMode.UNCHANGED), + ("L", ImageReadMode.GRAY), + ("LA", ImageReadMode.GRAY_ALPHA), + ("RGB", ImageReadMode.RGB), + ("RGBA", ImageReadMode.RGB_ALPHA), + ], +) +@pytest.mark.parametrize("scripted", (False, True)) +@pytest.mark.parametrize("decode_fun", (decode_png, decode_image)) +def test_decode_png(img_path, pil_mode, mode, scripted, decode_fun): + + if scripted: + decode_fun = torch.jit.script(decode_fun) + + with Image.open(img_path) as img: + if pil_mode is not None: + img = img.convert(pil_mode) + img_pil = torch.from_numpy(np.array(img)) + + img_pil = normalize_dimensions(img_pil) + + if img_path.endswith("16.png"): + data = read_file(img_path) + img_lpng = decode_fun(data, mode=mode) + assert img_lpng.dtype == torch.uint16 + # PIL converts 16 bits pngs to uint8 + img_lpng = F.to_dtype(img_lpng, torch.uint8, scale=True) + else: + data = read_file(img_path) + img_lpng = decode_fun(data, mode=mode) + + tol = 0 if pil_mode is None else 1 + + if PILLOW_VERSION >= (8, 3) and pil_mode == "LA": + # Avoid checking the transparency channel until + # https://github.com/python-pillow/Pillow/issues/5593#issuecomment-878244910 + # is fixed. + # TODO: remove once fix is released in PIL. Should be > 8.3.1. + img_lpng, img_pil = img_lpng[0], img_pil[0] + + torch.testing.assert_close(img_lpng, img_pil, atol=tol, rtol=0) + + +def test_decode_png_errors(): + with pytest.raises(RuntimeError, match="Out of bound read in decode_png"): + decode_png(read_file(os.path.join(DAMAGED_PNG, "sigsegv.png"))) + with pytest.raises(RuntimeError, match="Content is too small for png"): + decode_png(read_file(os.path.join(TOOSMALL_PNG, "heapbof.png"))) + + +@pytest.mark.parametrize( + "img_path", + [pytest.param(png_path, id=_get_safe_image_name(png_path)) for png_path in get_images(IMAGE_DIR, ".png")], +) +@pytest.mark.parametrize("scripted", (True, False)) +def test_encode_png(img_path, scripted): + pil_image = Image.open(img_path) + img_pil = torch.from_numpy(np.array(pil_image)) + img_pil = img_pil.permute(2, 0, 1) + encode = torch.jit.script(encode_png) if scripted else encode_png + png_buf = encode(img_pil, compression_level=6) + + rec_img = Image.open(io.BytesIO(bytes(png_buf.tolist()))) + rec_img = torch.from_numpy(np.array(rec_img)) + rec_img = rec_img.permute(2, 0, 1) + + assert_equal(img_pil, rec_img) + + +def test_encode_png_errors(): + with pytest.raises(RuntimeError, match="Input tensor dtype should be uint8"): + encode_png(torch.empty((3, 100, 100), dtype=torch.float32)) + + with pytest.raises(RuntimeError, match="Compression level should be between 0 and 9"): + encode_png(torch.empty((3, 100, 100), dtype=torch.uint8), compression_level=-1) + + with pytest.raises(RuntimeError, match="Compression level should be between 0 and 9"): + encode_png(torch.empty((3, 100, 100), dtype=torch.uint8), compression_level=10) + + with pytest.raises(RuntimeError, match="The number of channels should be 1 or 3, got: 5"): + encode_png(torch.empty((5, 100, 100), dtype=torch.uint8)) + + +@pytest.mark.parametrize( + "img_path", + [pytest.param(png_path, id=_get_safe_image_name(png_path)) for png_path in get_images(IMAGE_DIR, ".png")], +) +@pytest.mark.parametrize("scripted", (True, False)) +def test_write_png(img_path, tmpdir, scripted): + pil_image = Image.open(img_path) + img_pil = torch.from_numpy(np.array(pil_image)) + img_pil = img_pil.permute(2, 0, 1) + + filename, _ = os.path.splitext(os.path.basename(img_path)) + torch_png = os.path.join(tmpdir, f"{filename}_torch.png") + write = torch.jit.script(write_png) if scripted else write_png + write(img_pil, torch_png, compression_level=6) + saved_image = torch.from_numpy(np.array(Image.open(torch_png))) + saved_image = saved_image.permute(2, 0, 1) + + assert_equal(img_pil, saved_image) + + +def test_read_image(): + # Just testing torchcsript, the functionality is somewhat tested already in other tests. + path = next(get_images(IMAGE_ROOT, ".jpg")) + out = read_image(path) + out_scripted = torch.jit.script(read_image)(path) + torch.testing.assert_close(out, out_scripted, atol=0, rtol=0) + + +@pytest.mark.parametrize("scripted", (True, False)) +def test_read_file(tmpdir, scripted): + fname, content = "test1.bin", b"TorchVision\211\n" + fpath = os.path.join(tmpdir, fname) + with open(fpath, "wb") as f: + f.write(content) + + fun = torch.jit.script(read_file) if scripted else read_file + data = fun(fpath) + expected = torch.tensor(list(content), dtype=torch.uint8) + os.unlink(fpath) + assert_equal(data, expected) + + with pytest.raises(RuntimeError, match="No such file or directory: 'tst'"): + read_file("tst") + + +def test_read_file_non_ascii(tmpdir): + fname, content = "日本語(Japanese).bin", b"TorchVision\211\n" + fpath = os.path.join(tmpdir, fname) + with open(fpath, "wb") as f: + f.write(content) + + data = read_file(fpath) + expected = torch.tensor(list(content), dtype=torch.uint8) + os.unlink(fpath) + assert_equal(data, expected) + + +@pytest.mark.parametrize("scripted", (True, False)) +def test_write_file(tmpdir, scripted): + fname, content = "test1.bin", b"TorchVision\211\n" + fpath = os.path.join(tmpdir, fname) + content_tensor = torch.tensor(list(content), dtype=torch.uint8) + write = torch.jit.script(write_file) if scripted else write_file + write(fpath, content_tensor) + + with open(fpath, "rb") as f: + saved_content = f.read() + os.unlink(fpath) + assert content == saved_content + + +def test_write_file_non_ascii(tmpdir): + fname, content = "日本語(Japanese).bin", b"TorchVision\211\n" + fpath = os.path.join(tmpdir, fname) + content_tensor = torch.tensor(list(content), dtype=torch.uint8) + write_file(fpath, content_tensor) + + with open(fpath, "rb") as f: + saved_content = f.read() + os.unlink(fpath) + assert content == saved_content + + +@pytest.mark.parametrize( + "shape", + [ + (27, 27), + (60, 60), + (105, 105), + ], +) +def test_read_1_bit_png(shape, tmpdir): + np_rng = np.random.RandomState(0) + image_path = os.path.join(tmpdir, f"test_{shape}.png") + pixels = np_rng.rand(*shape) > 0.5 + img = Image.fromarray(pixels) + img.save(image_path) + img1 = read_image(image_path) + img2 = normalize_dimensions(torch.as_tensor(pixels * 255, dtype=torch.uint8)) + assert_equal(img1, img2) + + +@pytest.mark.parametrize( + "shape", + [ + (27, 27), + (60, 60), + (105, 105), + ], +) +@pytest.mark.parametrize( + "mode", + [ + ImageReadMode.UNCHANGED, + ImageReadMode.GRAY, + ], +) +def test_read_1_bit_png_consistency(shape, mode, tmpdir): + np_rng = np.random.RandomState(0) + image_path = os.path.join(tmpdir, f"test_{shape}.png") + pixels = np_rng.rand(*shape) > 0.5 + img = Image.fromarray(pixels) + img.save(image_path) + img1 = read_image(image_path, mode) + img2 = read_image(image_path, mode) + assert_equal(img1, img2) + + +def test_read_interlaced_png(): + imgs = list(get_images(INTERLACED_PNG, ".png")) + with Image.open(imgs[0]) as im1, Image.open(imgs[1]) as im2: + assert not (im1.info.get("interlace") is im2.info.get("interlace")) + img1 = read_image(imgs[0]) + img2 = read_image(imgs[1]) + assert_equal(img1, img2) + + +@needs_cuda +@pytest.mark.parametrize("mode", [ImageReadMode.UNCHANGED, ImageReadMode.GRAY, ImageReadMode.RGB]) +@pytest.mark.parametrize("scripted", (False, True)) +def test_decode_jpegs_cuda(mode, scripted): + encoded_images = [] + for jpeg_path in get_images(IMAGE_ROOT, ".jpg"): + if "cmyk" in jpeg_path: + continue + encoded_image = read_file(jpeg_path) + encoded_images.append(encoded_image) + decoded_images_cpu = decode_jpeg(encoded_images, mode=mode) + decode_fn = torch.jit.script(decode_jpeg) if scripted else decode_jpeg + + # test multithreaded decoding + # in the current version we prevent this by using a lock but we still want to test it + num_workers = 10 + + with concurrent.futures.ThreadPoolExecutor(max_workers=num_workers) as executor: + futures = [executor.submit(decode_fn, encoded_images, mode, "cuda") for _ in range(num_workers)] + decoded_images_threaded = [future.result() for future in futures] + assert len(decoded_images_threaded) == num_workers + for decoded_images in decoded_images_threaded: + assert len(decoded_images) == len(encoded_images) + for decoded_image_cuda, decoded_image_cpu in zip(decoded_images, decoded_images_cpu): + assert decoded_image_cuda.shape == decoded_image_cpu.shape + assert decoded_image_cuda.dtype == decoded_image_cpu.dtype == torch.uint8 + assert (decoded_image_cuda.cpu().float() - decoded_image_cpu.cpu().float()).abs().mean() < 2 + + +@needs_cuda +def test_decode_image_cuda_raises(): + data = torch.randint(0, 127, size=(255,), device="cuda", dtype=torch.uint8) + with pytest.raises(RuntimeError): + decode_image(data) + + +@needs_cuda +def test_decode_jpeg_cuda_device_param(): + path = next(path for path in get_images(IMAGE_ROOT, ".jpg") if "cmyk" not in path) + data = read_file(path) + current_device = torch.cuda.current_device() + current_stream = torch.cuda.current_stream() + num_devices = torch.cuda.device_count() + devices = ["cuda", torch.device("cuda")] + [torch.device(f"cuda:{i}") for i in range(num_devices)] + results = [] + for device in devices: + results.append(decode_jpeg(data, device=device)) + assert len(results) == len(devices) + for result in results: + assert torch.all(result.cpu() == results[0].cpu()) + assert current_device == torch.cuda.current_device() + assert current_stream == torch.cuda.current_stream() + + +@needs_cuda +def test_decode_jpeg_cuda_errors(): + data = read_file(next(get_images(IMAGE_ROOT, ".jpg"))) + with pytest.raises(RuntimeError, match="Expected a non empty 1-dimensional tensor"): + decode_jpeg(data.reshape(-1, 1), device="cuda") + with pytest.raises(ValueError, match="must be tensors"): + decode_jpeg([1, 2, 3]) + with pytest.raises(ValueError, match="Input tensor must be a CPU tensor"): + decode_jpeg(data.to("cuda"), device="cuda") + with pytest.raises(RuntimeError, match="Expected a torch.uint8 tensor"): + decode_jpeg(data.to(torch.float), device="cuda") + with pytest.raises(RuntimeError, match="Expected the device parameter to be a cuda device"): + torch.ops.image.decode_jpegs_cuda([data], ImageReadMode.UNCHANGED.value, "cpu") + with pytest.raises(ValueError, match="Input tensor must be a CPU tensor"): + decode_jpeg( + torch.empty((100,), dtype=torch.uint8, device="cuda"), + ) + with pytest.raises(ValueError, match="Input list must contain tensors on CPU"): + decode_jpeg( + [ + torch.empty((100,), dtype=torch.uint8, device="cuda"), + torch.empty((100,), dtype=torch.uint8, device="cuda"), + ] + ) + + with pytest.raises(ValueError, match="Input list must contain tensors on CPU"): + decode_jpeg( + [ + torch.empty((100,), dtype=torch.uint8, device="cuda"), + torch.empty((100,), dtype=torch.uint8, device="cuda"), + ], + device="cuda", + ) + + with pytest.raises(ValueError, match="Input list must contain tensors on CPU"): + decode_jpeg( + [ + torch.empty((100,), dtype=torch.uint8, device="cpu"), + torch.empty((100,), dtype=torch.uint8, device="cuda"), + ], + device="cuda", + ) + + with pytest.raises(RuntimeError, match="Expected a torch.uint8 tensor"): + decode_jpeg( + [ + torch.empty((100,), dtype=torch.uint8), + torch.empty((100,), dtype=torch.float32), + ], + device="cuda", + ) + + with pytest.raises(RuntimeError, match="Expected a non empty 1-dimensional tensor"): + decode_jpeg( + [ + torch.empty((100,), dtype=torch.uint8), + torch.empty((1, 100), dtype=torch.uint8), + ], + device="cuda", + ) + + with pytest.raises(RuntimeError, match="Error while decoding JPEG images"): + decode_jpeg( + [ + torch.empty((100,), dtype=torch.uint8), + torch.empty((100,), dtype=torch.uint8), + ], + device="cuda", + ) + + with pytest.raises(ValueError, match="Input list must contain at least one element"): + decode_jpeg([], device="cuda") + + +def test_encode_jpeg_errors(): + + with pytest.raises(RuntimeError, match="Input tensor dtype should be uint8"): + encode_jpeg(torch.empty((3, 100, 100), dtype=torch.float32)) + + with pytest.raises(ValueError, match="Image quality should be a positive number between 1 and 100"): + encode_jpeg(torch.empty((3, 100, 100), dtype=torch.uint8), quality=-1) + + with pytest.raises(ValueError, match="Image quality should be a positive number between 1 and 100"): + encode_jpeg(torch.empty((3, 100, 100), dtype=torch.uint8), quality=101) + + with pytest.raises(RuntimeError, match="The number of channels should be 1 or 3, got: 5"): + encode_jpeg(torch.empty((5, 100, 100), dtype=torch.uint8)) + + with pytest.raises(RuntimeError, match="Input data should be a 3-dimensional tensor"): + encode_jpeg(torch.empty((1, 3, 100, 100), dtype=torch.uint8)) + + with pytest.raises(RuntimeError, match="Input data should be a 3-dimensional tensor"): + encode_jpeg(torch.empty((100, 100), dtype=torch.uint8)) + + +@pytest.mark.skipif(IS_MACOS, reason="https://github.com/pytorch/vision/issues/8031") +@pytest.mark.parametrize( + "img_path", + [pytest.param(jpeg_path, id=_get_safe_image_name(jpeg_path)) for jpeg_path in get_images(ENCODE_JPEG, ".jpg")], +) +@pytest.mark.parametrize("scripted", (True, False)) +def test_encode_jpeg(img_path, scripted): + img = read_image(img_path) + + pil_img = F.to_pil_image(img) + buf = io.BytesIO() + pil_img.save(buf, format="JPEG", quality=75) + + encoded_jpeg_pil = torch.frombuffer(buf.getvalue(), dtype=torch.uint8) + + encode = torch.jit.script(encode_jpeg) if scripted else encode_jpeg + for src_img in [img, img.contiguous()]: + encoded_jpeg_torch = encode(src_img, quality=75) + assert_equal(encoded_jpeg_torch, encoded_jpeg_pil) + + +@needs_cuda +def test_encode_jpeg_cuda_device_param(): + path = next(path for path in get_images(IMAGE_ROOT, ".jpg") if "cmyk" not in path) + + data = read_image(path) + + current_device = torch.cuda.current_device() + current_stream = torch.cuda.current_stream() + num_devices = torch.cuda.device_count() + devices = ["cuda", torch.device("cuda")] + [torch.device(f"cuda:{i}") for i in range(num_devices)] + results = [] + for device in devices: + results.append(encode_jpeg(data.to(device=device))) + assert len(results) == len(devices) + for result in results: + assert torch.all(result.cpu() == results[0].cpu()) + assert current_device == torch.cuda.current_device() + assert current_stream == torch.cuda.current_stream() + + +@needs_cuda +@pytest.mark.parametrize( + "img_path", + [pytest.param(jpeg_path, id=_get_safe_image_name(jpeg_path)) for jpeg_path in get_images(IMAGE_ROOT, ".jpg")], +) +@pytest.mark.parametrize("scripted", (False, True)) +@pytest.mark.parametrize("contiguous", (False, True)) +def test_encode_jpeg_cuda(img_path, scripted, contiguous): + decoded_image_tv = read_image(img_path) + encode_fn = torch.jit.script(encode_jpeg) if scripted else encode_jpeg + + if "cmyk" in img_path: + pytest.xfail("Encoding a CMYK jpeg isn't supported") + if decoded_image_tv.shape[0] == 1: + pytest.xfail("Decoding a grayscale jpeg isn't supported") + # For more detail as to why check out: https://github.com/NVIDIA/cuda-samples/issues/23#issuecomment-559283013 + if contiguous: + decoded_image_tv = decoded_image_tv[None].contiguous(memory_format=torch.contiguous_format)[0] + else: + decoded_image_tv = decoded_image_tv[None].contiguous(memory_format=torch.channels_last)[0] + encoded_jpeg_cuda_tv = encode_fn(decoded_image_tv.cuda(), quality=75) + decoded_jpeg_cuda_tv = decode_jpeg(encoded_jpeg_cuda_tv.cpu()) + + # the actual encoded bytestreams from libnvjpeg and libjpeg-turbo differ for the same quality + # instead, we re-decode the encoded image and compare to the original + abs_mean_diff = (decoded_jpeg_cuda_tv.float() - decoded_image_tv.float()).abs().mean().item() + assert abs_mean_diff < 3 + + +@pytest.mark.parametrize("device", cpu_and_cuda()) +@pytest.mark.parametrize("scripted", (True, False)) +@pytest.mark.parametrize("contiguous", (True, False)) +def test_encode_jpegs_batch(scripted, contiguous, device): + if device == "cpu" and IS_MACOS: + pytest.skip("https://github.com/pytorch/vision/issues/8031") + decoded_images_tv = [] + for jpeg_path in get_images(IMAGE_ROOT, ".jpg"): + if "cmyk" in jpeg_path: + continue + decoded_image = read_image(jpeg_path) + if decoded_image.shape[0] == 1: + continue + if contiguous: + decoded_image = decoded_image[None].contiguous(memory_format=torch.contiguous_format)[0] + else: + decoded_image = decoded_image[None].contiguous(memory_format=torch.channels_last)[0] + decoded_images_tv.append(decoded_image) + + encode_fn = torch.jit.script(encode_jpeg) if scripted else encode_jpeg + + decoded_images_tv_device = [img.to(device=device) for img in decoded_images_tv] + encoded_jpegs_tv_device = encode_fn(decoded_images_tv_device, quality=75) + encoded_jpegs_tv_device = [decode_jpeg(img.cpu()) for img in encoded_jpegs_tv_device] + + for original, encoded_decoded in zip(decoded_images_tv, encoded_jpegs_tv_device): + c, h, w = original.shape + abs_mean_diff = (original.float() - encoded_decoded.float()).abs().mean().item() + assert abs_mean_diff < 3 + + # test multithreaded decoding + # in the current version we prevent this by using a lock but we still want to test it + num_workers = 10 + with concurrent.futures.ThreadPoolExecutor(max_workers=num_workers) as executor: + futures = [executor.submit(encode_fn, decoded_images_tv_device) for _ in range(num_workers)] + encoded_images_threaded = [future.result() for future in futures] + assert len(encoded_images_threaded) == num_workers + for encoded_images in encoded_images_threaded: + assert len(decoded_images_tv_device) == len(encoded_images) + for i, (encoded_image_cuda, decoded_image_tv) in enumerate(zip(encoded_images, decoded_images_tv_device)): + # make sure all the threads produce identical outputs + assert torch.all(encoded_image_cuda == encoded_images_threaded[0][i]) + + # make sure the outputs are identical or close enough to baseline + decoded_cuda_encoded_image = decode_jpeg(encoded_image_cuda.cpu()) + assert decoded_cuda_encoded_image.shape == decoded_image_tv.shape + assert decoded_cuda_encoded_image.dtype == decoded_image_tv.dtype + assert (decoded_cuda_encoded_image.cpu().float() - decoded_image_tv.cpu().float()).abs().mean() < 3 + + +@needs_cuda +def test_single_encode_jpeg_cuda_errors(): + with pytest.raises(RuntimeError, match="Input tensor dtype should be uint8"): + encode_jpeg(torch.empty((3, 100, 100), dtype=torch.float32, device="cuda")) + + with pytest.raises(RuntimeError, match="The number of channels should be 3, got: 5"): + encode_jpeg(torch.empty((5, 100, 100), dtype=torch.uint8, device="cuda")) + + with pytest.raises(RuntimeError, match="The number of channels should be 3, got: 1"): + encode_jpeg(torch.empty((1, 100, 100), dtype=torch.uint8, device="cuda")) + + with pytest.raises(RuntimeError, match="Input data should be a 3-dimensional tensor"): + encode_jpeg(torch.empty((1, 3, 100, 100), dtype=torch.uint8, device="cuda")) + + with pytest.raises(RuntimeError, match="Input data should be a 3-dimensional tensor"): + encode_jpeg(torch.empty((100, 100), dtype=torch.uint8, device="cuda")) + + +@needs_cuda +def test_batch_encode_jpegs_cuda_errors(): + with pytest.raises(RuntimeError, match="Input tensor dtype should be uint8"): + encode_jpeg( + [ + torch.empty((3, 100, 100), dtype=torch.uint8, device="cuda"), + torch.empty((3, 100, 100), dtype=torch.float32, device="cuda"), + ] + ) + + with pytest.raises(RuntimeError, match="The number of channels should be 3, got: 5"): + encode_jpeg( + [ + torch.empty((3, 100, 100), dtype=torch.uint8, device="cuda"), + torch.empty((5, 100, 100), dtype=torch.uint8, device="cuda"), + ] + ) + + with pytest.raises(RuntimeError, match="The number of channels should be 3, got: 1"): + encode_jpeg( + [ + torch.empty((3, 100, 100), dtype=torch.uint8, device="cuda"), + torch.empty((1, 100, 100), dtype=torch.uint8, device="cuda"), + ] + ) + + with pytest.raises(RuntimeError, match="Input data should be a 3-dimensional tensor"): + encode_jpeg( + [ + torch.empty((3, 100, 100), dtype=torch.uint8, device="cuda"), + torch.empty((1, 3, 100, 100), dtype=torch.uint8, device="cuda"), + ] + ) + + with pytest.raises(RuntimeError, match="Input data should be a 3-dimensional tensor"): + encode_jpeg( + [ + torch.empty((3, 100, 100), dtype=torch.uint8, device="cuda"), + torch.empty((100, 100), dtype=torch.uint8, device="cuda"), + ] + ) + + with pytest.raises(RuntimeError, match="Input tensor should be on CPU"): + encode_jpeg( + [ + torch.empty((3, 100, 100), dtype=torch.uint8, device="cpu"), + torch.empty((3, 100, 100), dtype=torch.uint8, device="cuda"), + ] + ) + + with pytest.raises( + RuntimeError, match="All input tensors must be on the same CUDA device when encoding with nvjpeg" + ): + encode_jpeg( + [ + torch.empty((3, 100, 100), dtype=torch.uint8, device="cuda"), + torch.empty((3, 100, 100), dtype=torch.uint8, device="cpu"), + ] + ) + + if torch.cuda.device_count() >= 2: + with pytest.raises( + RuntimeError, match="All input tensors must be on the same CUDA device when encoding with nvjpeg" + ): + encode_jpeg( + [ + torch.empty((3, 100, 100), dtype=torch.uint8, device="cuda:0"), + torch.empty((3, 100, 100), dtype=torch.uint8, device="cuda:1"), + ] + ) + + with pytest.raises(ValueError, match="encode_jpeg requires at least one input tensor when a list is passed"): + encode_jpeg([]) + + +@pytest.mark.skipif(IS_MACOS, reason="https://github.com/pytorch/vision/issues/8031") +@pytest.mark.parametrize( + "img_path", + [pytest.param(jpeg_path, id=_get_safe_image_name(jpeg_path)) for jpeg_path in get_images(ENCODE_JPEG, ".jpg")], +) +@pytest.mark.parametrize("scripted", (True, False)) +def test_write_jpeg(img_path, tmpdir, scripted): + tmpdir = Path(tmpdir) + img = read_image(img_path) + pil_img = F.to_pil_image(img) + + torch_jpeg = str(tmpdir / "torch.jpg") + pil_jpeg = str(tmpdir / "pil.jpg") + + write = torch.jit.script(write_jpeg) if scripted else write_jpeg + write(img, torch_jpeg, quality=75) + pil_img.save(pil_jpeg, quality=75) + + with open(torch_jpeg, "rb") as f: + torch_bytes = f.read() + + with open(pil_jpeg, "rb") as f: + pil_bytes = f.read() + + assert_equal(torch_bytes, pil_bytes) + + +def test_pathlib_support(tmpdir): + # Just make sure pathlib.Path is supported where relevant + + jpeg_path = Path(next(get_images(ENCODE_JPEG, ".jpg"))) + + read_file(jpeg_path) + read_image(jpeg_path) + + write_path = Path(tmpdir) / "whatever" + img = torch.randint(0, 10, size=(3, 4, 4), dtype=torch.uint8) + + write_file(write_path, data=img.flatten()) + write_jpeg(img, write_path) + write_png(img, write_path) + + +@pytest.mark.parametrize( + "name", ("gifgrid", "fire", "porsche", "treescap", "treescap-interlaced", "solid2", "x-trans", "earth") +) +@pytest.mark.parametrize("scripted", (True, False)) +def test_decode_gif(tmpdir, name, scripted): + # Using test images from GIFLIB + # https://sourceforge.net/p/giflib/code/ci/master/tree/pic/, we assert PIL + # and torchvision decoded outputs are equal. + # We're not testing against "welcome2" because PIL and GIFLIB disagee on what + # the background color should be (likely a difference in the way they handle + # transparency?) + # 'earth' image is from wikipedia, licensed under CC BY-SA 3.0 + # https://creativecommons.org/licenses/by-sa/3.0/ + # it allows to properly test for transparency, TOP-LEFT offsets, and + # disposal modes. + + path = tmpdir / f"{name}.gif" + if name == "earth": + if IN_OSS_CI: + # TODO: Fix this... one day. + pytest.skip("Skipping 'earth' test as it's flaky on OSS CI") + url = "https://upload.wikimedia.org/wikipedia/commons/2/2c/Rotating_earth_%28large%29.gif" + else: + url = f"https://sourceforge.net/p/giflib/code/ci/master/tree/pic/{name}.gif?format=raw" + with open(path, "wb") as f: + f.write(requests.get(url).content) + + encoded_bytes = read_file(path) + f = torch.jit.script(decode_gif) if scripted else decode_gif + tv_out = f(encoded_bytes) + if tv_out.ndim == 3: + tv_out = tv_out[None] + + assert tv_out.is_contiguous(memory_format=torch.channels_last) + + # For some reason, not using Image.open() as a CM causes "ResourceWarning: unclosed file" + with Image.open(path) as pil_img: + pil_seq = ImageSequence.Iterator(pil_img) + + for pil_frame, tv_frame in zip(pil_seq, tv_out): + pil_frame = F.pil_to_tensor(pil_frame.convert("RGB")) + torch.testing.assert_close(tv_frame, pil_frame, atol=0, rtol=0) + + +@pytest.mark.parametrize( + "decode_fun, match", + [ + (decode_png, "Content is not png"), + (decode_jpeg, "Not a JPEG file"), + (decode_gif, re.escape("DGifOpenFileName() failed - 103")), + (decode_webp, "WebPGetFeatures failed."), + pytest.param( + decode_avif, "BMFF parsing failed", marks=pytest.mark.skipif(not IS_LINUX, reason=HEIC_AVIF_MESSAGE) + ), + pytest.param( + decode_heic, + "Invalid input: No 'ftyp' box", + marks=pytest.mark.skipif(not IS_LINUX, reason=HEIC_AVIF_MESSAGE), + ), + ], +) +def test_decode_bad_encoded_data(decode_fun, match): + encoded_data = torch.randint(0, 256, (100,), dtype=torch.uint8) + with pytest.raises(RuntimeError, match="Input tensor must be 1-dimensional"): + decode_fun(encoded_data[None]) + with pytest.raises(RuntimeError, match="Input tensor must have uint8 data type"): + decode_fun(encoded_data.float()) + with pytest.raises(RuntimeError, match="Input tensor must be contiguous"): + decode_fun(encoded_data[::2]) + with pytest.raises(RuntimeError, match=match): + decode_fun(encoded_data) + + +@pytest.mark.parametrize("decode_fun", (decode_webp, decode_image)) +@pytest.mark.parametrize("scripted", (False, True)) +def test_decode_webp(decode_fun, scripted): + encoded_bytes = read_file(next(get_images(FAKEDATA_DIR, ".webp"))) + if scripted: + decode_fun = torch.jit.script(decode_fun) + img = decode_fun(encoded_bytes) + assert img.shape == (3, 100, 100) + assert img[None].is_contiguous(memory_format=torch.channels_last) + img += 123 # make sure image buffer wasn't freed by underlying decoding lib + + +# This test is skipped by default because it requires webp images that we're not +# including within the repo. The test images were downloaded manually from the +# different pages of https://developers.google.com/speed/webp/gallery +@pytest.mark.skipif(not WEBP_TEST_IMAGES_DIR, reason="WEBP_TEST_IMAGES_DIR is not set") +@pytest.mark.parametrize("decode_fun", (decode_webp, decode_image)) +@pytest.mark.parametrize("scripted", (False, True)) +@pytest.mark.parametrize( + "mode, pil_mode", + ( + # Note that converting an RGBA image to RGB leads to bad results because the + # transparent pixels aren't necessarily set to "black" or "white", they can be + # random stuff. This is consistent with PIL results. + (ImageReadMode.RGB, "RGB"), + (ImageReadMode.RGB_ALPHA, "RGBA"), + (ImageReadMode.UNCHANGED, None), + ), +) +@pytest.mark.parametrize("filename", Path(WEBP_TEST_IMAGES_DIR).glob("*.webp"), ids=lambda p: p.name) +def test_decode_webp_against_pil(decode_fun, scripted, mode, pil_mode, filename): + encoded_bytes = read_file(filename) + if scripted: + decode_fun = torch.jit.script(decode_fun) + img = decode_fun(encoded_bytes, mode=mode) + assert img[None].is_contiguous(memory_format=torch.channels_last) + + pil_img = Image.open(filename).convert(pil_mode) + from_pil = F.pil_to_tensor(pil_img) + assert_equal(img, from_pil) + img += 123 # make sure image buffer wasn't freed by underlying decoding lib + + +@pytest.mark.skipif(not IS_LINUX, reason=HEIC_AVIF_MESSAGE) +@pytest.mark.parametrize("decode_fun", (decode_avif,)) +def test_decode_avif(decode_fun): + encoded_bytes = read_file(next(get_images(FAKEDATA_DIR, ".avif"))) + img = decode_fun(encoded_bytes) + assert img.shape == (3, 100, 100) + assert img[None].is_contiguous(memory_format=torch.channels_last) + img += 123 # make sure image buffer wasn't freed by underlying decoding lib + + +# Note: decode_image fails because some of these files have a (valid) signature +# we don't recognize. We should probably use libmagic.... +@pytest.mark.skipif(not IS_LINUX, reason=HEIC_AVIF_MESSAGE) +@pytest.mark.parametrize("decode_fun", (decode_avif, decode_heic)) +@pytest.mark.parametrize( + "mode, pil_mode", + ( + (ImageReadMode.RGB, "RGB"), + (ImageReadMode.RGB_ALPHA, "RGBA"), + (ImageReadMode.UNCHANGED, None), + ), +) +@pytest.mark.parametrize( + "filename", Path("/home/nicolashug/dev/libavif/tests/data/").glob("*.avif"), ids=lambda p: p.name +) +def test_decode_avif_heic_against_pil(decode_fun, mode, pil_mode, filename): + if "reversed_dimg_order" in str(filename): + # Pillow properly decodes this one, but we don't (order of parts of the + # image is wrong). This is due to a bug that was recently fixed in + # libavif. Hopefully this test will end up passing soon with a new + # libavif version https://github.com/AOMediaCodec/libavif/issues/2311 + pytest.xfail() + import pillow_avif # noqa + + encoded_bytes = read_file(filename) + try: + img = decode_fun(encoded_bytes, mode=mode) + except RuntimeError as e: + if any( + s in str(e) + for s in ( + "BMFF parsing failed", + "avifDecoderParse failed: ", + "file contains more than one image", + "no 'ispe' property", + "'iref' has double references", + "Invalid image grid", + "decode_heif failed: Invalid input: No 'meta' box", + ) + ): + pytest.skip(reason="Expected failure, that's OK") + else: + raise e + assert img[None].is_contiguous(memory_format=torch.channels_last) + if mode == ImageReadMode.RGB: + assert img.shape[0] == 3 + if mode == ImageReadMode.RGB_ALPHA: + assert img.shape[0] == 4 + + if img.dtype == torch.uint16: + img = F.to_dtype(img, dtype=torch.uint8, scale=True) + try: + from_pil = F.pil_to_tensor(Image.open(filename).convert(pil_mode)) + except RuntimeError as e: + if any(s in str(e) for s in ("Invalid image grid", "Failed to decode image: Not implemented")): + pytest.skip(reason="PIL failure") + else: + raise e + + if True: + from torchvision.utils import make_grid + + g = make_grid([img, from_pil]) + F.to_pil_image(g).save((f"/home/nicolashug/out_images/{filename.name}.{pil_mode}.png")) + + is_decode_heic = getattr(decode_fun, "__name__", getattr(decode_fun, "name", None)) == "decode_heic" + if mode == ImageReadMode.RGB and not is_decode_heic: + # We don't compare torchvision's AVIF against PIL for RGB because + # results look pretty different on RGBA images (other images are fine). + # The result on torchvision basically just plainly ignores the alpha + # channel, resuting in transparent pixels looking dark. PIL seems to be + # using a sort of k-nn thing (Take a look at the resuting images) + return + if filename.name == "sofa_grid1x5_420.avif" and is_decode_heic: + return + + torch.testing.assert_close(img, from_pil, rtol=0, atol=3) + + +@pytest.mark.skipif(not IS_LINUX, reason=HEIC_AVIF_MESSAGE) +@pytest.mark.parametrize("decode_fun", (decode_heic,)) +def test_decode_heic(decode_fun): + encoded_bytes = read_file(next(get_images(FAKEDATA_DIR, ".heic"))) + img = decode_fun(encoded_bytes) + assert img.shape == (3, 100, 100) + assert img[None].is_contiguous(memory_format=torch.channels_last) + img += 123 # make sure image buffer wasn't freed by underlying decoding lib + + +@pytest.mark.parametrize("input_type", ("Path", "str", "tensor")) +@pytest.mark.parametrize("scripted", (False, True)) +def test_decode_image_path(input_type, scripted): + # Check that decode_image can support not just tensors as input + path = next(get_images(IMAGE_ROOT, ".jpg")) + if input_type == "Path": + input = Path(path) + elif input_type == "str": + input = path + elif input_type == "tensor": + input = read_file(path) + else: + raise ValueError("Oops") + + if scripted and input_type == "Path": + pytest.xfail(reason="Can't pass a Path when scripting") + + decode_fun = torch.jit.script(decode_image) if scripted else decode_image + decode_fun(input) + + +def test_mode_str(): + # Make sure decode_image supports string modes. We just test decode_image, + # not all of the decoding functions, but they should all support that too. + # Torchscript fails when passing strings, which is expected. + path = next(get_images(IMAGE_ROOT, ".png")) + assert decode_image(path, mode="RGB").shape[0] == 3 + assert decode_image(path, mode="rGb").shape[0] == 3 + assert decode_image(path, mode="GRAY").shape[0] == 1 + assert decode_image(path, mode="RGBA").shape[0] == 4 + + +if __name__ == "__main__": + pytest.main([__file__]) diff --git a/test/test_internal_utils.py b/test/test_internal_utils.py new file mode 100644 index 00000000000..f5f8a040db9 --- /dev/null +++ b/test/test_internal_utils.py @@ -0,0 +1,17 @@ +import pytest +from torchvision._utils import sequence_to_str + + +@pytest.mark.parametrize( + ("seq", "separate_last", "expected"), + [ + ([], "", ""), + (["foo"], "", "'foo'"), + (["foo", "bar"], "", "'foo', 'bar'"), + (["foo", "bar"], "and ", "'foo' and 'bar'"), + (["foo", "bar", "baz"], "", "'foo', 'bar', 'baz'"), + (["foo", "bar", "baz"], "and ", "'foo', 'bar', and 'baz'"), + ], +) +def test_sequence_to_str(seq, separate_last, expected): + assert sequence_to_str(seq, separate_last=separate_last) == expected diff --git a/test/test_internet.py b/test/test_internet.py new file mode 100644 index 00000000000..34fc3d4aa08 --- /dev/null +++ b/test/test_internet.py @@ -0,0 +1,64 @@ +"""This file should contain all tests that need access to the internet (apart +from the ones in test_datasets_download.py) + +We want to bundle all internet-related tests in one file, so the file can be +cleanly ignored in FB internal test infra. +""" + +import os +import pathlib +from urllib.error import URLError + +import pytest +import torchvision.datasets.utils as utils + + +class TestDatasetUtils: + @pytest.mark.parametrize("use_pathlib", (True, False)) + def test_download_url(self, tmpdir, use_pathlib): + if use_pathlib: + tmpdir = pathlib.Path(tmpdir) + url = "http://github.com/pytorch/vision/archive/master.zip" + try: + utils.download_url(url, tmpdir) + assert len(os.listdir(tmpdir)) != 0 + except URLError: + pytest.skip(f"could not download test file '{url}'") + + @pytest.mark.parametrize("use_pathlib", (True, False)) + def test_download_url_retry_http(self, tmpdir, use_pathlib): + if use_pathlib: + tmpdir = pathlib.Path(tmpdir) + url = "https://github.com/pytorch/vision/archive/master.zip" + try: + utils.download_url(url, tmpdir) + assert len(os.listdir(tmpdir)) != 0 + except URLError: + pytest.skip(f"could not download test file '{url}'") + + @pytest.mark.parametrize("use_pathlib", (True, False)) + def test_download_url_dont_exist(self, tmpdir, use_pathlib): + if use_pathlib: + tmpdir = pathlib.Path(tmpdir) + url = "http://github.com/pytorch/vision/archive/this_doesnt_exist.zip" + with pytest.raises(URLError): + utils.download_url(url, tmpdir) + + @pytest.mark.parametrize("use_pathlib", (True, False)) + def test_download_url_dispatch_download_from_google_drive(self, mocker, tmpdir, use_pathlib): + if use_pathlib: + tmpdir = pathlib.Path(tmpdir) + url = "https://drive.google.com/file/d/1GO-BHUYRuvzr1Gtp2_fqXRsr9TIeYbhV/view" + + id = "1GO-BHUYRuvzr1Gtp2_fqXRsr9TIeYbhV" + filename = "filename" + md5 = "md5" + + mocked = mocker.patch("torchvision.datasets.utils.download_file_from_google_drive") + utils.download_url(url, tmpdir, filename, md5) + + mocked.assert_called_once_with(id, os.path.expanduser(tmpdir), filename, md5) + + +if __name__ == "__main__": + pytest.main([__file__]) diff --git a/test/test_io.py b/test/test_io.py index db292b73e0f..d2950ac9595 100644 --- a/test/test_io.py +++ b/test/test_io.py @@ -1,31 +1,29 @@ -import os import contextlib +import os +import sys import tempfile + +import pytest import torch -import torchvision.datasets.utils as utils import torchvision.io as io +from common_utils import assert_equal, cpu_and_cuda from torchvision import get_video_backend -import unittest -import sys -import warnings - -from common_utils import get_tmp_dir -if sys.version_info < (3,): - from urllib2 import URLError -else: - from urllib.error import URLError try: import av + # Do a version test too io.video._check_av_available() except ImportError: av = None +VIDEO_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), "assets", "videos") + + def _create_video_frames(num_frames, height, width): - y, x = torch.meshgrid(torch.linspace(-2, 2, height), torch.linspace(-2, 2, width)) + y, x = torch.meshgrid(torch.linspace(-2, 2, height), torch.linspace(-2, 2, width), indexing="ij") data = [] for i in range(num_frames): xc = float(i) / num_frames @@ -43,30 +41,32 @@ def temp_video(num_frames, height, width, fps, lossless=False, video_codec=None, raise ValueError("video_codec can't be specified together with lossless") if options is not None: raise ValueError("options can't be specified together with lossless") - video_codec = 'libx264rgb' - options = {'crf': '0'} + video_codec = "libx264rgb" + options = {"crf": "0"} if video_codec is None: if get_video_backend() == "pyav": - video_codec = 'libx264' + video_codec = "libx264" else: # when video_codec is not set, we assume it is libx264rgb which accepts # RGB pixel formats as input instead of YUV - video_codec = 'libx264rgb' + video_codec = "libx264rgb" if options is None: options = {} data = _create_video_frames(num_frames, height, width) - with tempfile.NamedTemporaryFile(suffix='.mp4') as f: + with tempfile.NamedTemporaryFile(suffix=".mp4") as f: + f.close() io.write_video(f.name, data, fps=fps, video_codec=video_codec, options=options) yield f.name, data + os.unlink(f.name) -@unittest.skipIf(get_video_backend() != "pyav" and not io._HAS_VIDEO_OPT, - "video_reader backend not available") -@unittest.skipIf(av is None, "PyAV unavailable") -@unittest.skipIf(sys.platform == 'win32', 'temporarily disabled on Windows') -class Tester(unittest.TestCase): +@pytest.mark.skipif( + get_video_backend() != "pyav" and not io._HAS_CPU_VIDEO_DECODER, reason="video_reader backend not available" +) +@pytest.mark.skipif(av is None, reason="PyAV unavailable") +class TestVideo: # compression adds artifacts, thus we add a tolerance of # 6 in 0-255 range TOLERANCE = 6 @@ -74,24 +74,24 @@ class Tester(unittest.TestCase): def test_write_read_video(self): with temp_video(10, 300, 300, 5, lossless=True) as (f_name, data): lv, _, info = io.read_video(f_name) - self.assertTrue(data.equal(lv)) - self.assertEqual(info["video_fps"], 5) + assert_equal(data, lv) + assert info["video_fps"] == 5 - @unittest.skipIf(not io._HAS_VIDEO_OPT, "video_reader backend is not chosen") + @pytest.mark.skipif(not io._HAS_CPU_VIDEO_DECODER, reason="video_reader backend is not chosen") def test_probe_video_from_file(self): with temp_video(10, 300, 300, 5) as (f_name, data): video_info = io._probe_video_from_file(f_name) - self.assertAlmostEqual(video_info["video_duration"], 2, delta=0.1) - self.assertAlmostEqual(video_info["video_fps"], 5, delta=0.1) + assert pytest.approx(2, rel=0.0, abs=0.1) == video_info.video_duration + assert pytest.approx(5, rel=0.0, abs=0.1) == video_info.video_fps - @unittest.skipIf(not io._HAS_VIDEO_OPT, "video_reader backend is not chosen") + @pytest.mark.skipif(not io._HAS_CPU_VIDEO_DECODER, reason="video_reader backend is not chosen") def test_probe_video_from_memory(self): with temp_video(10, 300, 300, 5) as (f_name, data): with open(f_name, "rb") as fp: filebuffer = fp.read() video_info = io._probe_video_from_memory(filebuffer) - self.assertAlmostEqual(video_info["video_duration"], 2, delta=0.1) - self.assertAlmostEqual(video_info["video_fps"], 5, delta=0.1) + assert pytest.approx(2, rel=0.0, abs=0.1) == video_info.video_duration + assert pytest.approx(5, rel=0.0, abs=0.1) == video_info.video_fps def test_read_timestamps(self): with temp_video(10, 300, 300, 5) as (f_name, data): @@ -99,167 +99,194 @@ def test_read_timestamps(self): # note: not all formats/codecs provide accurate information for computing the # timestamps. For the format that we use here, this information is available, # so we use it as a baseline - container = av.open(f_name) - stream = container.streams[0] - pts_step = int(round(float(1 / (stream.average_rate * stream.time_base)))) - num_frames = int(round(float(stream.average_rate * stream.time_base * stream.duration))) - expected_pts = [i * pts_step for i in range(num_frames)] + with av.open(f_name) as container: + stream = container.streams[0] + pts_step = int(round(float(1 / (stream.average_rate * stream.time_base)))) + num_frames = int(round(float(stream.average_rate * stream.time_base * stream.duration))) + expected_pts = [i * pts_step for i in range(num_frames)] - self.assertEqual(pts, expected_pts) + assert pts == expected_pts - def test_read_partial_video(self): + @pytest.mark.parametrize("start", range(5)) + @pytest.mark.parametrize("offset", range(1, 4)) + def test_read_partial_video(self, start, offset): with temp_video(10, 300, 300, 5, lossless=True) as (f_name, data): pts, _ = io.read_video_timestamps(f_name) - for start in range(5): - for l in range(1, 4): - lv, _, _ = io.read_video(f_name, pts[start], pts[start + l - 1]) - s_data = data[start:(start + l)] - self.assertEqual(len(lv), l) - self.assertTrue(s_data.equal(lv)) + + lv, _, _ = io.read_video(f_name, pts[start], pts[start + offset - 1]) + s_data = data[start : (start + offset)] + assert len(lv) == offset + assert_equal(s_data, lv) if get_video_backend() == "pyav": # for "video_reader" backend, we don't decode the closest early frame # when the given start pts is not matching any frame pts lv, _, _ = io.read_video(f_name, pts[4] + 1, pts[7]) - self.assertEqual(len(lv), 4) - self.assertTrue(data[4:8].equal(lv)) + assert len(lv) == 4 + assert_equal(data[4:8], lv) - def test_read_partial_video_bframes(self): + @pytest.mark.parametrize("start", range(0, 80, 20)) + @pytest.mark.parametrize("offset", range(1, 4)) + def test_read_partial_video_bframes(self, start, offset): # do not use lossless encoding, to test the presence of B-frames - options = {'bframes': '16', 'keyint': '10', 'min-keyint': '4'} + options = {"bframes": "16", "keyint": "10", "min-keyint": "4"} with temp_video(100, 300, 300, 5, options=options) as (f_name, data): pts, _ = io.read_video_timestamps(f_name) - for start in range(0, 80, 20): - for l in range(1, 4): - lv, _, _ = io.read_video(f_name, pts[start], pts[start + l - 1]) - s_data = data[start:(start + l)] - self.assertEqual(len(lv), l) - self.assertTrue((s_data.float() - lv.float()).abs().max() < self.TOLERANCE) + + lv, _, _ = io.read_video(f_name, pts[start], pts[start + offset - 1]) + s_data = data[start : (start + offset)] + assert len(lv) == offset + assert_equal(s_data, lv, rtol=0.0, atol=self.TOLERANCE) lv, _, _ = io.read_video(f_name, pts[4] + 1, pts[7]) # TODO fix this - if get_video_backend() == 'pyav': - self.assertEqual(len(lv), 4) - self.assertTrue((data[4:8].float() - lv.float()).abs().max() < self.TOLERANCE) + if get_video_backend() == "pyav": + assert len(lv) == 4 + assert_equal(data[4:8], lv, rtol=0.0, atol=self.TOLERANCE) else: - self.assertEqual(len(lv), 3) - self.assertTrue((data[5:8].float() - lv.float()).abs().max() < self.TOLERANCE) + assert len(lv) == 3 + assert_equal(data[5:8], lv, rtol=0.0, atol=self.TOLERANCE) def test_read_packed_b_frames_divx_file(self): - with get_tmp_dir() as temp_dir: - name = "hmdb51_Turnk_r_Pippi_Michel_cartwheel_f_cm_np2_le_med_6.avi" - f_name = os.path.join(temp_dir, name) - url = "https://download.pytorch.org/vision_tests/io/" + name - try: - utils.download_url(url, temp_dir) - pts, fps = io.read_video_timestamps(f_name) - - self.assertEqual(pts, sorted(pts)) - self.assertEqual(fps, 30) - except URLError: - msg = "could not download test file '{}'".format(url) - warnings.warn(msg, RuntimeWarning) - raise unittest.SkipTest(msg) + name = "hmdb51_Turnk_r_Pippi_Michel_cartwheel_f_cm_np2_le_med_6.avi" + f_name = os.path.join(VIDEO_DIR, name) + pts, fps = io.read_video_timestamps(f_name) + + assert pts == sorted(pts) + assert fps == 30 def test_read_timestamps_from_packet(self): - with temp_video(10, 300, 300, 5, video_codec='mpeg4') as (f_name, data): + with temp_video(10, 300, 300, 5, video_codec="mpeg4") as (f_name, data): pts, _ = io.read_video_timestamps(f_name) # note: not all formats/codecs provide accurate information for computing the # timestamps. For the format that we use here, this information is available, # so we use it as a baseline - container = av.open(f_name) - stream = container.streams[0] - # make sure we went through the optimized codepath - self.assertIn(b'Lavc', stream.codec_context.extradata) - pts_step = int(round(float(1 / (stream.average_rate * stream.time_base)))) - num_frames = int(round(float(stream.average_rate * stream.time_base * stream.duration))) - expected_pts = [i * pts_step for i in range(num_frames)] + with av.open(f_name) as container: + stream = container.streams[0] + # make sure we went through the optimized codepath + assert b"Lavc" in stream.codec_context.extradata + pts_step = int(round(float(1 / (stream.average_rate * stream.time_base)))) + num_frames = int(round(float(stream.average_rate * stream.time_base * stream.duration))) + expected_pts = [i * pts_step for i in range(num_frames)] - self.assertEqual(pts, expected_pts) + assert pts == expected_pts def test_read_video_pts_unit_sec(self): with temp_video(10, 300, 300, 5, lossless=True) as (f_name, data): - lv, _, info = io.read_video(f_name, pts_unit='sec') + lv, _, info = io.read_video(f_name, pts_unit="sec") - self.assertTrue(data.equal(lv)) - self.assertEqual(info["video_fps"], 5) - self.assertEqual(info, {"video_fps": 5}) + assert_equal(data, lv) + assert info["video_fps"] == 5 + assert info == {"video_fps": 5} def test_read_timestamps_pts_unit_sec(self): with temp_video(10, 300, 300, 5) as (f_name, data): - pts, _ = io.read_video_timestamps(f_name, pts_unit='sec') + pts, _ = io.read_video_timestamps(f_name, pts_unit="sec") - container = av.open(f_name) - stream = container.streams[0] - pts_step = int(round(float(1 / (stream.average_rate * stream.time_base)))) - num_frames = int(round(float(stream.average_rate * stream.time_base * stream.duration))) - expected_pts = [i * pts_step * stream.time_base for i in range(num_frames)] + with av.open(f_name) as container: + stream = container.streams[0] + pts_step = int(round(float(1 / (stream.average_rate * stream.time_base)))) + num_frames = int(round(float(stream.average_rate * stream.time_base * stream.duration))) + expected_pts = [i * pts_step * stream.time_base for i in range(num_frames)] - self.assertEqual(pts, expected_pts) + assert pts == expected_pts - def test_read_partial_video_pts_unit_sec(self): + @pytest.mark.parametrize("start", range(5)) + @pytest.mark.parametrize("offset", range(1, 4)) + def test_read_partial_video_pts_unit_sec(self, start, offset): with temp_video(10, 300, 300, 5, lossless=True) as (f_name, data): - pts, _ = io.read_video_timestamps(f_name, pts_unit='sec') - - for start in range(5): - for l in range(1, 4): - lv, _, _ = io.read_video(f_name, pts[start], pts[start + l - 1], pts_unit='sec') - s_data = data[start:(start + l)] - self.assertEqual(len(lv), l) - self.assertTrue(s_data.equal(lv)) - - container = av.open(f_name) - stream = container.streams[0] - lv, _, _ = io.read_video(f_name, - int(pts[4] * (1.0 / stream.time_base) + 1) * stream.time_base, pts[7], - pts_unit='sec') + pts, _ = io.read_video_timestamps(f_name, pts_unit="sec") + + lv, _, _ = io.read_video(f_name, pts[start], pts[start + offset - 1], pts_unit="sec") + s_data = data[start : (start + offset)] + assert len(lv) == offset + assert_equal(s_data, lv) + + with av.open(f_name) as container: + stream = container.streams[0] + lv, _, _ = io.read_video( + f_name, int(pts[4] * (1.0 / stream.time_base) + 1) * stream.time_base, pts[7], pts_unit="sec" + ) if get_video_backend() == "pyav": # for "video_reader" backend, we don't decode the closest early frame # when the given start pts is not matching any frame pts - self.assertEqual(len(lv), 4) - self.assertTrue(data[4:8].equal(lv)) + assert len(lv) == 4 + assert_equal(data[4:8], lv) def test_read_video_corrupted_file(self): - with tempfile.NamedTemporaryFile(suffix='.mp4') as f: - f.write(b'This is not an mpg4 file') + with tempfile.NamedTemporaryFile(suffix=".mp4") as f: + f.write(b"This is not an mpg4 file") video, audio, info = io.read_video(f.name) - self.assertIsInstance(video, torch.Tensor) - self.assertIsInstance(audio, torch.Tensor) - self.assertEqual(video.numel(), 0) - self.assertEqual(audio.numel(), 0) - self.assertEqual(info, {}) + assert isinstance(video, torch.Tensor) + assert isinstance(audio, torch.Tensor) + assert video.numel() == 0 + assert audio.numel() == 0 + assert info == {} def test_read_video_timestamps_corrupted_file(self): - with tempfile.NamedTemporaryFile(suffix='.mp4') as f: - f.write(b'This is not an mpg4 file') + with tempfile.NamedTemporaryFile(suffix=".mp4") as f: + f.write(b"This is not an mpg4 file") video_pts, video_fps = io.read_video_timestamps(f.name) - self.assertEqual(video_pts, []) - self.assertIs(video_fps, None) + assert video_pts == [] + assert video_fps is None + @pytest.mark.skip(reason="Temporarily disabled due to new pyav") def test_read_video_partially_corrupted_file(self): with temp_video(5, 4, 4, 5, lossless=True) as (f_name, data): - with open(f_name, 'r+b') as f: + with open(f_name, "r+b") as f: size = os.path.getsize(f_name) bytes_to_overwrite = size // 10 # seek to the middle of the file f.seek(5 * bytes_to_overwrite) # corrupt 10% of the file from the middle - f.write(b'\xff' * bytes_to_overwrite) + f.write(b"\xff" * bytes_to_overwrite) # this exercises the container.decode assertion check - video, audio, info = io.read_video(f.name, pts_unit='sec') + video, audio, info = io.read_video(f.name, pts_unit="sec") # check that size is not equal to 5, but 3 # TODO fix this - if get_video_backend() == 'pyav': - self.assertEqual(len(video), 3) + if get_video_backend() == "pyav": + assert len(video) == 3 else: - self.assertEqual(len(video), 4) + assert len(video) == 4 # but the valid decoded content is still correct - self.assertTrue(video[:3].equal(data[:3])) + assert_equal(video[:3], data[:3]) # and the last few frames are wrong - self.assertFalse(video.equal(data)) + with pytest.raises(AssertionError): + assert_equal(video, data) + + @pytest.mark.skipif(sys.platform == "win32", reason="temporarily disabled on Windows") + @pytest.mark.parametrize("device", cpu_and_cuda()) + def test_write_video_with_audio(self, device, tmpdir): + f_name = os.path.join(VIDEO_DIR, "R6llTwEh07w.mp4") + video_tensor, audio_tensor, info = io.read_video(f_name, pts_unit="sec") + + out_f_name = os.path.join(tmpdir, "testing.mp4") + io.video.write_video( + out_f_name, + video_tensor.to(device), + round(info["video_fps"]), + video_codec="libx264rgb", + options={"crf": "0"}, + audio_array=audio_tensor.to(device), + audio_fps=info["audio_fps"], + audio_codec="aac", + ) + + out_video_tensor, out_audio_tensor, out_info = io.read_video(out_f_name, pts_unit="sec") + + assert info["video_fps"] == out_info["video_fps"] + assert_equal(video_tensor, out_video_tensor) + + audio_stream = av.open(f_name).streams.audio[0] + out_audio_stream = av.open(out_f_name).streams.audio[0] + + assert info["audio_fps"] == out_info["audio_fps"] + assert audio_stream.rate == out_audio_stream.rate + assert pytest.approx(out_audio_stream.frames, rel=0.0, abs=1) == audio_stream.frames + assert audio_stream.frame_size == out_audio_stream.frame_size # TODO add tests for audio -if __name__ == '__main__': - unittest.main() +if __name__ == "__main__": + pytest.main(__file__) diff --git a/test/test_io_opt.py b/test/test_io_opt.py index 1ad3dea8fa2..f4e3d305295 100644 --- a/test/test_io_opt.py +++ b/test/test_io_opt.py @@ -1,11 +1,13 @@ import unittest -from torchvision import set_video_backend + import test_io +from torchvision import set_video_backend # noqa: 401 -set_video_backend('video_reader') +# Disabling the video backend switching temporarily +# set_video_backend('video_reader') -if __name__ == '__main__': +if __name__ == "__main__": suite = unittest.TestLoader().loadTestsFromModule(test_io) unittest.TextTestRunner(verbosity=1).run(suite) diff --git a/test/test_models.cpp b/test/test_models.cpp deleted file mode 100644 index 092fc567ac2..00000000000 --- a/test/test_models.cpp +++ /dev/null @@ -1,209 +0,0 @@ -#include -#include -#include - -#include "../torchvision/csrc/models/models.h" - -using namespace vision::models; - -template -torch::Tensor forward_model(const std::string& input_path, torch::Tensor x) { - Model network; - torch::load(network, input_path); - network->eval(); - return network->forward(x); -} - -torch::Tensor forward_alexnet(const std::string& input_path, torch::Tensor x) { - return forward_model(input_path, x); -} - -torch::Tensor forward_vgg11(const std::string& input_path, torch::Tensor x) { - return forward_model(input_path, x); -} -torch::Tensor forward_vgg13(const std::string& input_path, torch::Tensor x) { - return forward_model(input_path, x); -} -torch::Tensor forward_vgg16(const std::string& input_path, torch::Tensor x) { - return forward_model(input_path, x); -} -torch::Tensor forward_vgg19(const std::string& input_path, torch::Tensor x) { - return forward_model(input_path, x); -} - -torch::Tensor forward_vgg11bn(const std::string& input_path, torch::Tensor x) { - return forward_model(input_path, x); -} -torch::Tensor forward_vgg13bn(const std::string& input_path, torch::Tensor x) { - return forward_model(input_path, x); -} -torch::Tensor forward_vgg16bn(const std::string& input_path, torch::Tensor x) { - return forward_model(input_path, x); -} -torch::Tensor forward_vgg19bn(const std::string& input_path, torch::Tensor x) { - return forward_model(input_path, x); -} - -torch::Tensor forward_resnet18(const std::string& input_path, torch::Tensor x) { - return forward_model(input_path, x); -} -torch::Tensor forward_resnet34(const std::string& input_path, torch::Tensor x) { - return forward_model(input_path, x); -} -torch::Tensor forward_resnet50(const std::string& input_path, torch::Tensor x) { - return forward_model(input_path, x); -} -torch::Tensor forward_resnet101( - const std::string& input_path, - torch::Tensor x) { - return forward_model(input_path, x); -} -torch::Tensor forward_resnet152( - const std::string& input_path, - torch::Tensor x) { - return forward_model(input_path, x); -} -torch::Tensor forward_resnext50_32x4d( - const std::string& input_path, - torch::Tensor x) { - return forward_model(input_path, x); -} -torch::Tensor forward_resnext101_32x8d( - const std::string& input_path, - torch::Tensor x) { - return forward_model(input_path, x); -} -torch::Tensor forward_wide_resnet50_2( - const std::string& input_path, - torch::Tensor x) { - return forward_model(input_path, x); -} -torch::Tensor forward_wide_resnet101_2( - const std::string& input_path, - torch::Tensor x) { - return forward_model(input_path, x); -} - -torch::Tensor forward_squeezenet1_0( - const std::string& input_path, - torch::Tensor x) { - return forward_model(input_path, x); -} -torch::Tensor forward_squeezenet1_1( - const std::string& input_path, - torch::Tensor x) { - return forward_model(input_path, x); -} - -torch::Tensor forward_densenet121( - const std::string& input_path, - torch::Tensor x) { - return forward_model(input_path, x); -} -torch::Tensor forward_densenet169( - const std::string& input_path, - torch::Tensor x) { - return forward_model(input_path, x); -} -torch::Tensor forward_densenet201( - const std::string& input_path, - torch::Tensor x) { - return forward_model(input_path, x); -} -torch::Tensor forward_densenet161( - const std::string& input_path, - torch::Tensor x) { - return forward_model(input_path, x); -} - -torch::Tensor forward_mobilenetv2( - const std::string& input_path, - torch::Tensor x) { - return forward_model(input_path, x); -} - -torch::Tensor forward_googlenet( - const std::string& input_path, - torch::Tensor x) { - GoogLeNet network; - torch::load(network, input_path); - network->eval(); - return network->forward(x).output; -} -torch::Tensor forward_inceptionv3( - const std::string& input_path, - torch::Tensor x) { - InceptionV3 network; - torch::load(network, input_path); - network->eval(); - return network->forward(x).output; -} - -torch::Tensor forward_mnasnet0_5(const std::string& input_path, torch::Tensor x) { - return forward_model(input_path, x); -} -torch::Tensor forward_mnasnet0_75(const std::string& input_path, torch::Tensor x) { - return forward_model(input_path, x); -} -torch::Tensor forward_mnasnet1_0(const std::string& input_path, torch::Tensor x) { - return forward_model(input_path, x); -} -torch::Tensor forward_mnasnet1_3(const std::string& input_path, torch::Tensor x) { - return forward_model(input_path, x); -} - -PYBIND11_MODULE(TORCH_EXTENSION_NAME, m) { - m.def("forward_alexnet", &forward_alexnet, "forward_alexnet"); - - m.def("forward_vgg11", &forward_vgg11, "forward_vgg11"); - m.def("forward_vgg13", &forward_vgg13, "forward_vgg13"); - m.def("forward_vgg16", &forward_vgg16, "forward_vgg16"); - m.def("forward_vgg19", &forward_vgg19, "forward_vgg19"); - - m.def("forward_vgg11bn", &forward_vgg11bn, "forward_vgg11bn"); - m.def("forward_vgg13bn", &forward_vgg13bn, "forward_vgg13bn"); - m.def("forward_vgg16bn", &forward_vgg16bn, "forward_vgg16bn"); - m.def("forward_vgg19bn", &forward_vgg19bn, "forward_vgg19bn"); - - m.def("forward_resnet18", &forward_resnet18, "forward_resnet18"); - m.def("forward_resnet34", &forward_resnet34, "forward_resnet34"); - m.def("forward_resnet50", &forward_resnet50, "forward_resnet50"); - m.def("forward_resnet101", &forward_resnet101, "forward_resnet101"); - m.def("forward_resnet152", &forward_resnet152, "forward_resnet152"); - m.def( - "forward_resnext50_32x4d", - &forward_resnext50_32x4d, - "forward_resnext50_32x4d"); - m.def( - "forward_resnext101_32x8d", - &forward_resnext101_32x8d, - "forward_resnext101_32x8d"); - m.def( - "forward_wide_resnet50_2", - &forward_wide_resnet50_2, - "forward_wide_resnet50_2"); - m.def( - "forward_wide_resnet101_2", - &forward_wide_resnet101_2, - "forward_wide_resnet101_2"); - - m.def( - "forward_squeezenet1_0", &forward_squeezenet1_0, "forward_squeezenet1_0"); - m.def( - "forward_squeezenet1_1", &forward_squeezenet1_1, "forward_squeezenet1_1"); - - m.def("forward_densenet121", &forward_densenet121, "forward_densenet121"); - m.def("forward_densenet169", &forward_densenet169, "forward_densenet169"); - m.def("forward_densenet201", &forward_densenet201, "forward_densenet201"); - m.def("forward_densenet161", &forward_densenet161, "forward_densenet161"); - - m.def("forward_mobilenetv2", &forward_mobilenetv2, "forward_mobilenetv2"); - - m.def("forward_googlenet", &forward_googlenet, "forward_googlenet"); - m.def("forward_inceptionv3", &forward_inceptionv3, "forward_inceptionv3"); - - m.def("forward_mnasnet0_5", &forward_mnasnet0_5, "forward_mnasnet0_5"); - m.def("forward_mnasnet0_75", &forward_mnasnet0_75, "forward_mnasnet0_75"); - m.def("forward_mnasnet1_0", &forward_mnasnet1_0, "forward_mnasnet1_0"); - m.def("forward_mnasnet1_3", &forward_mnasnet1_3, "forward_mnasnet1_3"); -} diff --git a/test/test_models.py b/test/test_models.py index c70ef6830bf..202bbdbd0cd 100644 --- a/test/test_models.py +++ b/test/test_models.py @@ -1,118 +1,831 @@ -from common_utils import TestCase, map_nested_tensor_object +import contextlib +import functools +import operator +import os +import pkgutil +import platform +import sys +import warnings from collections import OrderedDict -from itertools import product +from tempfile import TemporaryDirectory +from typing import Any + +import pytest import torch -import numpy as np -from torchvision import models -import unittest -import traceback -import random +import torch.fx +import torch.nn as nn +from _utils_internal import get_relative_path +from common_utils import cpu_and_cuda, freeze_rng_state, map_nested_tensor_object, needs_cuda, set_rng_seed +from PIL import Image +from torchvision import models, transforms +from torchvision.models import get_model_builder, list_models + + +ACCEPT = os.getenv("EXPECTTEST_ACCEPT", "0") == "1" +SKIP_BIG_MODEL = os.getenv("SKIP_BIG_MODEL", "1") == "1" + + +def list_model_fns(module): + return [get_model_builder(name) for name in list_models(module)] + + +def _get_image(input_shape, real_image, device, dtype=None): + """This routine loads a real or random image based on `real_image` argument. + Currently, the real image is utilized for the following list of models: + - `retinanet_resnet50_fpn`, + - `retinanet_resnet50_fpn_v2`, + - `keypointrcnn_resnet50_fpn`, + - `fasterrcnn_resnet50_fpn`, + - `fasterrcnn_resnet50_fpn_v2`, + - `fcos_resnet50_fpn`, + - `maskrcnn_resnet50_fpn`, + - `maskrcnn_resnet50_fpn_v2`, + in `test_classification_model` and `test_detection_model`. + To do so, a keyword argument `real_image` was added to the abovelisted models in `_model_params` + """ + if real_image: + # TODO: Maybe unify file discovery logic with test_image.py + GRACE_HOPPER = os.path.join( + os.path.dirname(os.path.abspath(__file__)), "assets", "encode_jpeg", "grace_hopper_517x606.jpg" + ) + + img = Image.open(GRACE_HOPPER) + + original_width, original_height = img.size + + # make the image square + img = img.crop((0, 0, original_width, original_width)) + img = img.resize(input_shape[1:3]) + + convert_tensor = transforms.ToTensor() + image = convert_tensor(img) + assert tuple(image.size()) == input_shape + return image.to(device=device, dtype=dtype) + + # RNG always on CPU, to ensure x in cuda tests is bitwise identical to x in cpu tests + return torch.rand(input_shape).to(device=device, dtype=dtype) + + +@pytest.fixture +def disable_weight_loading(mocker): + """When testing models, the two slowest operations are the downloading of the weights to a file and loading them + into the model. Unless, you want to test against specific weights, these steps can be disabled without any + drawbacks. + + Including this fixture into the signature of your test, i.e. `test_foo(disable_weight_loading)`, will recurse + through all models in `torchvision.models` and will patch all occurrences of the function + `download_state_dict_from_url` as well as the method `load_state_dict` on all subclasses of `nn.Module` to be + no-ops. + + .. warning: + + Loaded models are still executable as normal, but will always have random weights. Make sure to not use this + fixture if you want to compare the model output against reference values. + + """ + starting_point = models + function_name = "load_state_dict_from_url" + method_name = "load_state_dict" + + module_names = {info.name for info in pkgutil.walk_packages(starting_point.__path__, f"{starting_point.__name__}.")} + targets = {f"torchvision._internally_replaced_utils.{function_name}", f"torch.nn.Module.{method_name}"} + for name in module_names: + module = sys.modules.get(name) + if not module: + continue + + if function_name in module.__dict__: + targets.add(f"{module.__name__}.{function_name}") + targets.update( + { + f"{module.__name__}.{obj.__name__}.{method_name}" + for obj in module.__dict__.values() + if isinstance(obj, type) and issubclass(obj, nn.Module) and method_name in obj.__dict__ + } + ) -def set_rng_seed(seed): - torch.manual_seed(seed) - random.seed(seed) - np.random.seed(seed) + for target in targets: + # See https://github.com/pytorch/vision/pull/4867#discussion_r743677802 for details + with contextlib.suppress(AttributeError): + mocker.patch(target) -def get_available_classification_models(): - # TODO add a registration mechanism to torchvision.models - return [k for k, v in models.__dict__.items() if callable(v) and k[0].lower() == k[0] and k[0] != "_"] +def _get_expected_file(name=None): + # Determine expected file based on environment + expected_file_base = get_relative_path(os.path.realpath(__file__), "expect") + # Note: for legacy reasons, the reference file names all had "ModelTest.test_" in their names + # We hardcode it here to avoid having to re-generate the reference files + expected_file = os.path.join(expected_file_base, "ModelTester.test_" + name) + expected_file += "_expect.pkl" -def get_available_segmentation_models(): - # TODO add a registration mechanism to torchvision.models - return [k for k, v in models.segmentation.__dict__.items() if callable(v) and k[0].lower() == k[0] and k[0] != "_"] + if not ACCEPT and not os.path.exists(expected_file): + raise RuntimeError( + f"No expect file exists for {os.path.basename(expected_file)} in {expected_file}; " + "to accept the current output, re-run the failing test after setting the EXPECTTEST_ACCEPT " + "env variable. For example: EXPECTTEST_ACCEPT=1 pytest test/test_models.py -k alexnet" + ) + return expected_file + + +def _assert_expected(output, name, prec=None, atol=None, rtol=None): + """Test that a python value matches the recorded contents of a file + based on a "check" name. The value must be + pickable with `torch.save`. This file + is placed in the 'expect' directory in the same directory + as the test script. You can automatically update the recorded test + output using an EXPECTTEST_ACCEPT=1 env variable. + """ + expected_file = _get_expected_file(name) + + if ACCEPT: + filename = {os.path.basename(expected_file)} + print(f"Accepting updated output for {filename}:\n\n{output}") + torch.save(output, expected_file) + MAX_PICKLE_SIZE = 50 * 1000 # 50 KB + binary_size = os.path.getsize(expected_file) + if binary_size > MAX_PICKLE_SIZE: + raise RuntimeError(f"The output for {filename}, is larger than 50kb - got {binary_size}kb") + else: + expected = torch.load(expected_file, weights_only=True) + rtol = rtol or prec # keeping prec param for legacy reason, but could be removed ideally + atol = atol or prec + torch.testing.assert_close(output, expected, rtol=rtol, atol=atol, check_dtype=False, check_device=False) + + +def _check_jit_scriptable(nn_module, args, unwrapper=None, eager_out=None): + """Check that a nn.Module's results in TorchScript match eager and that it can be exported""" + + def get_export_import_copy(m): + """Save and load a TorchScript model""" + with TemporaryDirectory() as dir: + path = os.path.join(dir, "script.pt") + m.save(path) + imported = torch.jit.load(path) + return imported + + sm = torch.jit.script(nn_module) + sm.eval() + + if eager_out is None: + with torch.no_grad(), freeze_rng_state(): + eager_out = nn_module(*args) + + with torch.no_grad(), freeze_rng_state(): + script_out = sm(*args) + if unwrapper: + script_out = unwrapper(script_out) + + torch.testing.assert_close(eager_out, script_out, atol=1e-4, rtol=1e-4) + + m_import = get_export_import_copy(sm) + with torch.no_grad(), freeze_rng_state(): + imported_script_out = m_import(*args) + if unwrapper: + imported_script_out = unwrapper(imported_script_out) + + torch.testing.assert_close(script_out, imported_script_out, atol=3e-4, rtol=3e-4) + + +def _check_fx_compatible(model, inputs, eager_out=None): + model_fx = torch.fx.symbolic_trace(model) + if eager_out is None: + eager_out = model(inputs) + with torch.no_grad(), freeze_rng_state(): + fx_out = model_fx(inputs) + torch.testing.assert_close(eager_out, fx_out) + + +def _check_input_backprop(model, inputs): + if isinstance(inputs, list): + requires_grad = list() + for inp in inputs: + requires_grad.append(inp.requires_grad) + inp.requires_grad_(True) + else: + requires_grad = inputs.requires_grad + inputs.requires_grad_(True) + + out = model(inputs) + + if isinstance(out, dict): + out["out"].sum().backward() + else: + if isinstance(out[0], dict): + out[0]["scores"].sum().backward() + else: + out[0].sum().backward() -def get_available_detection_models(): - # TODO add a registration mechanism to torchvision.models - return [k for k, v in models.detection.__dict__.items() if callable(v) and k[0].lower() == k[0] and k[0] != "_"] + if isinstance(inputs, list): + for i, inp in enumerate(inputs): + assert inputs[i].grad is not None + inp.requires_grad_(requires_grad[i]) + else: + assert inputs.grad is not None + inputs.requires_grad_(requires_grad) -def get_available_video_models(): - # TODO add a registration mechanism to torchvision.models - return [k for k, v in models.video.__dict__.items() if callable(v) and k[0].lower() == k[0] and k[0] != "_"] +# If 'unwrapper' is provided it will be called with the script model outputs +# before they are compared to the eager model outputs. This is useful if the +# model outputs are different between TorchScript / Eager mode +script_model_unwrapper = { + "googlenet": lambda x: x.logits, + "inception_v3": lambda x: x.logits, + "fasterrcnn_resnet50_fpn": lambda x: x[1], + "fasterrcnn_resnet50_fpn_v2": lambda x: x[1], + "fasterrcnn_mobilenet_v3_large_fpn": lambda x: x[1], + "fasterrcnn_mobilenet_v3_large_320_fpn": lambda x: x[1], + "maskrcnn_resnet50_fpn": lambda x: x[1], + "maskrcnn_resnet50_fpn_v2": lambda x: x[1], + "keypointrcnn_resnet50_fpn": lambda x: x[1], + "retinanet_resnet50_fpn": lambda x: x[1], + "retinanet_resnet50_fpn_v2": lambda x: x[1], + "ssd300_vgg16": lambda x: x[1], + "ssdlite320_mobilenet_v3_large": lambda x: x[1], + "fcos_resnet50_fpn": lambda x: x[1], +} -# models that are in torch hub, as well as r3d_18. we tried testing all models -# but the test was too slow. not included are detection models, because -# they are not yet supported in JIT. -script_test_models = [ +# The following models exhibit flaky numerics under autocast in _test_*_model harnesses. +# This may be caused by the harness environment (e.g. num classes, input initialization +# via torch.rand), and does not prove autocast is unsuitable when training with real data +# (autocast has been used successfully with real data for some of these models). +# TODO: investigate why autocast numerics are flaky in the harnesses. +# +# For the following models, _test_*_model harnesses skip numerical checks on outputs when +# trying autocast. However, they still try an autocasted forward pass, so they still ensure +# autocast coverage suffices to prevent dtype errors in each model. +autocast_flaky_numerics = ( + "inception_v3", + "resnet101", + "resnet152", + "wide_resnet101_2", + "deeplabv3_resnet50", "deeplabv3_resnet101", - "mobilenet_v2", - "resnext50_32x4d", + "deeplabv3_mobilenet_v3_large", + "fcn_resnet50", "fcn_resnet101", - "googlenet", - "densenet121", - "resnet18", - "alexnet", - "shufflenet_v2_x1_0", - "squeezenet1_0", - "vgg11", - "inception_v3", - 'r3d_18', + "lraspp_mobilenet_v3_large", + "maskrcnn_resnet50_fpn", + "maskrcnn_resnet50_fpn_v2", + "keypointrcnn_resnet50_fpn", +) + +# The tests for the following quantized models are flaky possibly due to inconsistent +# rounding errors in different platforms. For this reason the input/output consistency +# tests under test_quantized_classification_model will be skipped for the following models. +quantized_flaky_models = ("inception_v3", "resnet50") + +# The tests for the following detection models are flaky. +# We run those tests on float64 to avoid floating point errors. +# FIXME: we shouldn't have to do that :'/ +detection_flaky_models = ("keypointrcnn_resnet50_fpn", "maskrcnn_resnet50_fpn", "maskrcnn_resnet50_fpn_v2") + + +# The following contains configuration parameters for all models which are used by +# the _test_*_model methods. +_model_params = { + "inception_v3": {"input_shape": (1, 3, 299, 299), "init_weights": True}, + "retinanet_resnet50_fpn": { + "num_classes": 20, + "score_thresh": 0.01, + "min_size": 224, + "max_size": 224, + "input_shape": (3, 224, 224), + "real_image": True, + }, + "retinanet_resnet50_fpn_v2": { + "num_classes": 20, + "score_thresh": 0.01, + "min_size": 224, + "max_size": 224, + "input_shape": (3, 224, 224), + "real_image": True, + }, + "keypointrcnn_resnet50_fpn": { + "num_classes": 2, + "min_size": 224, + "max_size": 224, + "box_score_thresh": 0.17, + "input_shape": (3, 224, 224), + "real_image": True, + }, + "fasterrcnn_resnet50_fpn": { + "num_classes": 20, + "min_size": 224, + "max_size": 224, + "input_shape": (3, 224, 224), + "real_image": True, + }, + "fasterrcnn_resnet50_fpn_v2": { + "num_classes": 20, + "min_size": 224, + "max_size": 224, + "input_shape": (3, 224, 224), + "real_image": True, + }, + "fcos_resnet50_fpn": { + "num_classes": 2, + "score_thresh": 0.05, + "min_size": 224, + "max_size": 224, + "input_shape": (3, 224, 224), + "real_image": True, + }, + "maskrcnn_resnet50_fpn": { + "num_classes": 10, + "min_size": 224, + "max_size": 224, + "input_shape": (3, 224, 224), + "real_image": True, + }, + "maskrcnn_resnet50_fpn_v2": { + "num_classes": 10, + "min_size": 224, + "max_size": 224, + "input_shape": (3, 224, 224), + "real_image": True, + }, + "fasterrcnn_mobilenet_v3_large_fpn": { + "box_score_thresh": 0.02076, + }, + "fasterrcnn_mobilenet_v3_large_320_fpn": { + "box_score_thresh": 0.02076, + "rpn_pre_nms_top_n_test": 1000, + "rpn_post_nms_top_n_test": 1000, + }, + "vit_h_14": { + "image_size": 56, + "input_shape": (1, 3, 56, 56), + }, + "mvit_v1_b": { + "input_shape": (1, 3, 16, 224, 224), + }, + "mvit_v2_s": { + "input_shape": (1, 3, 16, 224, 224), + }, + "s3d": { + "input_shape": (1, 3, 16, 224, 224), + }, + "googlenet": {"init_weights": True}, +} +# speeding up slow models: +slow_models = [ + "convnext_base", + "convnext_large", + "resnext101_32x8d", + "resnext101_64x4d", + "wide_resnet101_2", + "efficientnet_b6", + "efficientnet_b7", + "efficientnet_v2_m", + "efficientnet_v2_l", + "regnet_y_16gf", + "regnet_y_32gf", + "regnet_y_128gf", + "regnet_x_16gf", + "regnet_x_32gf", + "swin_t", + "swin_s", + "swin_b", + "swin_v2_t", + "swin_v2_s", + "swin_v2_b", ] +for m in slow_models: + _model_params[m] = {"input_shape": (1, 3, 64, 64)} -class ModelTester(TestCase): - def check_script(self, model, name): - if name not in script_test_models: - return - scriptable = True - msg = "" - try: - torch.jit.script(model) - except Exception as e: - tb = traceback.format_exc() - scriptable = False - msg = str(e) + str(tb) - self.assertTrue(scriptable, msg) - - def _test_classification_model(self, name, input_shape): - # passing num_class equal to a number other than 1000 helps in making the test - # more enforcing in nature - set_rng_seed(0) - model = models.__dict__[name](num_classes=50) - self.check_script(model, name) - model.eval() - x = torch.rand(input_shape) - out = model(x) - self.assertExpected(out, rtol=1e-2, atol=0.) - self.assertEqual(out.shape[-1], 50) - - def _test_segmentation_model(self, name): - # passing num_class equal to a number other than 1000 helps in making the test - # more enforcing in nature - model = models.segmentation.__dict__[name](num_classes=50, pretrained_backbone=False) - self.check_script(model, name) - model.eval() - input_shape = (1, 3, 300, 300) - x = torch.rand(input_shape) +# skip big models to reduce memory usage on CI test. We can exclude combinations of (platform-system, device). +skipped_big_models = { + "vit_h_14": {("Windows", "cpu"), ("Windows", "cuda")}, + "regnet_y_128gf": {("Windows", "cpu"), ("Windows", "cuda")}, + "mvit_v1_b": {("Windows", "cuda"), ("Linux", "cuda")}, + "mvit_v2_s": {("Windows", "cuda"), ("Linux", "cuda")}, +} + + +def is_skippable(model_name, device): + if model_name not in skipped_big_models: + return False + + platform_system = platform.system() + device_name = str(device).split(":")[0] + + return (platform_system, device_name) in skipped_big_models[model_name] + + +# The following contains configuration and expected values to be used tests that are model specific +_model_tests_values = { + "retinanet_resnet50_fpn": { + "max_trainable": 5, + "n_trn_params_per_layer": [36, 46, 65, 78, 88, 89], + }, + "retinanet_resnet50_fpn_v2": { + "max_trainable": 5, + "n_trn_params_per_layer": [44, 74, 131, 170, 200, 203], + }, + "keypointrcnn_resnet50_fpn": { + "max_trainable": 5, + "n_trn_params_per_layer": [48, 58, 77, 90, 100, 101], + }, + "fasterrcnn_resnet50_fpn": { + "max_trainable": 5, + "n_trn_params_per_layer": [30, 40, 59, 72, 82, 83], + }, + "fasterrcnn_resnet50_fpn_v2": { + "max_trainable": 5, + "n_trn_params_per_layer": [50, 80, 137, 176, 206, 209], + }, + "maskrcnn_resnet50_fpn": { + "max_trainable": 5, + "n_trn_params_per_layer": [42, 52, 71, 84, 94, 95], + }, + "maskrcnn_resnet50_fpn_v2": { + "max_trainable": 5, + "n_trn_params_per_layer": [66, 96, 153, 192, 222, 225], + }, + "fasterrcnn_mobilenet_v3_large_fpn": { + "max_trainable": 6, + "n_trn_params_per_layer": [22, 23, 44, 70, 91, 97, 100], + }, + "fasterrcnn_mobilenet_v3_large_320_fpn": { + "max_trainable": 6, + "n_trn_params_per_layer": [22, 23, 44, 70, 91, 97, 100], + }, + "ssd300_vgg16": { + "max_trainable": 5, + "n_trn_params_per_layer": [45, 51, 57, 63, 67, 71], + }, + "ssdlite320_mobilenet_v3_large": { + "max_trainable": 6, + "n_trn_params_per_layer": [96, 99, 138, 200, 239, 257, 266], + }, + "fcos_resnet50_fpn": { + "max_trainable": 5, + "n_trn_params_per_layer": [54, 64, 83, 96, 106, 107], + }, +} + + +def _make_sliced_model(model, stop_layer): + layers = OrderedDict() + for name, layer in model.named_children(): + layers[name] = layer + if name == stop_layer: + break + new_model = torch.nn.Sequential(layers) + return new_model + + +@pytest.mark.parametrize("model_fn", [models.densenet121, models.densenet169, models.densenet201, models.densenet161]) +def test_memory_efficient_densenet(model_fn): + input_shape = (1, 3, 300, 300) + x = torch.rand(input_shape) + + model1 = model_fn(num_classes=50, memory_efficient=True) + params = model1.state_dict() + num_params = sum(x.numel() for x in model1.parameters()) + model1.eval() + out1 = model1(x) + out1.sum().backward() + num_grad = sum(x.grad.numel() for x in model1.parameters() if x.grad is not None) + + model2 = model_fn(num_classes=50, memory_efficient=False) + model2.load_state_dict(params) + model2.eval() + out2 = model2(x) + + assert num_params == num_grad + torch.testing.assert_close(out1, out2, rtol=0.0, atol=1e-5) + + _check_input_backprop(model1, x) + _check_input_backprop(model2, x) + + +@pytest.mark.parametrize("dilate_layer_2", (True, False)) +@pytest.mark.parametrize("dilate_layer_3", (True, False)) +@pytest.mark.parametrize("dilate_layer_4", (True, False)) +def test_resnet_dilation(dilate_layer_2, dilate_layer_3, dilate_layer_4): + # TODO improve tests to also check that each layer has the right dimensionality + model = models.resnet50(replace_stride_with_dilation=(dilate_layer_2, dilate_layer_3, dilate_layer_4)) + model = _make_sliced_model(model, stop_layer="layer4") + model.eval() + x = torch.rand(1, 3, 224, 224) + out = model(x) + f = 2 ** sum((dilate_layer_2, dilate_layer_3, dilate_layer_4)) + assert out.shape == (1, 2048, 7 * f, 7 * f) + + +def test_mobilenet_v2_residual_setting(): + model = models.mobilenet_v2(inverted_residual_setting=[[1, 16, 1, 1], [6, 24, 2, 2]]) + model.eval() + x = torch.rand(1, 3, 224, 224) + out = model(x) + assert out.shape[-1] == 1000 + + +@pytest.mark.parametrize("model_fn", [models.mobilenet_v2, models.mobilenet_v3_large, models.mobilenet_v3_small]) +def test_mobilenet_norm_layer(model_fn): + model = model_fn() + assert any(isinstance(x, nn.BatchNorm2d) for x in model.modules()) + + def get_gn(num_channels): + return nn.GroupNorm(1, num_channels) + + model = model_fn(norm_layer=get_gn) + assert not (any(isinstance(x, nn.BatchNorm2d) for x in model.modules())) + assert any(isinstance(x, nn.GroupNorm) for x in model.modules()) + + +def test_inception_v3_eval(): + kwargs = {} + kwargs["transform_input"] = True + kwargs["aux_logits"] = True + kwargs["init_weights"] = False + name = "inception_v3" + model = models.Inception3(**kwargs) + model.aux_logits = False + model.AuxLogits = None + model = model.eval() + x = torch.rand(1, 3, 299, 299) + _check_jit_scriptable(model, (x,), unwrapper=script_model_unwrapper.get(name, None)) + _check_input_backprop(model, x) + + +def test_fasterrcnn_double(): + model = models.detection.fasterrcnn_resnet50_fpn(num_classes=50, weights=None, weights_backbone=None) + model.double() + model.eval() + input_shape = (3, 300, 300) + x = torch.rand(input_shape, dtype=torch.float64) + model_input = [x] + out = model(model_input) + assert model_input[0] is x + assert len(out) == 1 + assert "boxes" in out[0] + assert "scores" in out[0] + assert "labels" in out[0] + _check_input_backprop(model, model_input) + + +def test_googlenet_eval(): + kwargs = {} + kwargs["transform_input"] = True + kwargs["aux_logits"] = True + kwargs["init_weights"] = False + name = "googlenet" + model = models.GoogLeNet(**kwargs) + model.aux_logits = False + model.aux1 = None + model.aux2 = None + model = model.eval() + x = torch.rand(1, 3, 224, 224) + _check_jit_scriptable(model, (x,), unwrapper=script_model_unwrapper.get(name, None)) + _check_input_backprop(model, x) + + +@needs_cuda +def test_fasterrcnn_switch_devices(): + def checkOut(out): + assert len(out) == 1 + assert "boxes" in out[0] + assert "scores" in out[0] + assert "labels" in out[0] + + model = models.detection.fasterrcnn_resnet50_fpn(num_classes=50, weights=None, weights_backbone=None) + model.cuda() + model.eval() + input_shape = (3, 300, 300) + x = torch.rand(input_shape, device="cuda") + model_input = [x] + out = model(model_input) + assert model_input[0] is x + + checkOut(out) + + with torch.cuda.amp.autocast(): + out = model(model_input) + + checkOut(out) + + _check_input_backprop(model, model_input) + + # now switch to cpu and make sure it works + model.cpu() + x = x.cpu() + out_cpu = model([x]) + + checkOut(out_cpu) + + _check_input_backprop(model, [x]) + + +def test_generalizedrcnn_transform_repr(): + + min_size, max_size = 224, 299 + image_mean = [0.485, 0.456, 0.406] + image_std = [0.229, 0.224, 0.225] + + t = models.detection.transform.GeneralizedRCNNTransform( + min_size=min_size, max_size=max_size, image_mean=image_mean, image_std=image_std + ) + + # Check integrity of object __repr__ attribute + expected_string = "GeneralizedRCNNTransform(" + _indent = "\n " + expected_string += f"{_indent}Normalize(mean={image_mean}, std={image_std})" + expected_string += f"{_indent}Resize(min_size=({min_size},), max_size={max_size}, " + expected_string += "mode='bilinear')\n)" + assert t.__repr__() == expected_string + + +test_vit_conv_stem_configs = [ + models.vision_transformer.ConvStemConfig(kernel_size=3, stride=2, out_channels=64), + models.vision_transformer.ConvStemConfig(kernel_size=3, stride=2, out_channels=128), + models.vision_transformer.ConvStemConfig(kernel_size=3, stride=1, out_channels=128), + models.vision_transformer.ConvStemConfig(kernel_size=3, stride=2, out_channels=256), + models.vision_transformer.ConvStemConfig(kernel_size=3, stride=1, out_channels=256), + models.vision_transformer.ConvStemConfig(kernel_size=3, stride=2, out_channels=512), +] + + +def vitc_b_16(**kwargs: Any): + return models.VisionTransformer( + image_size=224, + patch_size=16, + num_layers=12, + num_heads=12, + hidden_dim=768, + mlp_dim=3072, + conv_stem_configs=test_vit_conv_stem_configs, + **kwargs, + ) + + +@pytest.mark.parametrize("model_fn", [vitc_b_16]) +@pytest.mark.parametrize("dev", cpu_and_cuda()) +def test_vitc_models(model_fn, dev): + test_classification_model(model_fn, dev) + + +@torch.backends.cudnn.flags(allow_tf32=False) # see: https://github.com/pytorch/vision/issues/7618 +@pytest.mark.parametrize("model_fn", list_model_fns(models)) +@pytest.mark.parametrize("dev", cpu_and_cuda()) +def test_classification_model(model_fn, dev): + set_rng_seed(0) + defaults = { + "num_classes": 50, + "input_shape": (1, 3, 224, 224), + } + model_name = model_fn.__name__ + if SKIP_BIG_MODEL and is_skippable(model_name, dev): + pytest.skip("Skipped to reduce memory usage. Set env var SKIP_BIG_MODEL=0 to enable test for this model") + kwargs = {**defaults, **_model_params.get(model_name, {})} + num_classes = kwargs.get("num_classes") + input_shape = kwargs.pop("input_shape") + real_image = kwargs.pop("real_image", False) + + model = model_fn(**kwargs) + model.eval().to(device=dev) + x = _get_image(input_shape=input_shape, real_image=real_image, device=dev) + out = model(x) + # FIXME: this if/else is nasty and only here to please our CI prior to the + # release. We rethink these tests altogether. + if model_name == "resnet101": + prec = 0.2 + else: + # FIXME: this is probably still way too high. + prec = 0.1 + _assert_expected(out.cpu(), model_name, prec=prec) + assert out.shape[-1] == num_classes + _check_jit_scriptable(model, (x,), unwrapper=script_model_unwrapper.get(model_name, None), eager_out=out) + _check_fx_compatible(model, x, eager_out=out) + + if dev == "cuda": + with torch.cuda.amp.autocast(): + out = model(x) + # See autocast_flaky_numerics comment at top of file. + if model_name not in autocast_flaky_numerics: + _assert_expected(out.cpu(), model_name, prec=0.1) + assert out.shape[-1] == 50 + + _check_input_backprop(model, x) + + +@pytest.mark.parametrize("model_fn", list_model_fns(models.segmentation)) +@pytest.mark.parametrize("dev", cpu_and_cuda()) +def test_segmentation_model(model_fn, dev): + set_rng_seed(0) + defaults = { + "num_classes": 10, + "weights_backbone": None, + "input_shape": (1, 3, 32, 32), + } + model_name = model_fn.__name__ + kwargs = {**defaults, **_model_params.get(model_name, {})} + input_shape = kwargs.pop("input_shape") + + model = model_fn(**kwargs) + model.eval().to(device=dev) + # RNG always on CPU, to ensure x in cuda tests is bitwise identical to x in cpu tests + x = torch.rand(input_shape).to(device=dev) + with torch.no_grad(), freeze_rng_state(): out = model(x) - self.assertEqual(tuple(out["out"].shape), (1, 50, 300, 300)) - - def _test_detection_model(self, name): - set_rng_seed(0) - model = models.detection.__dict__[name](num_classes=50, pretrained_backbone=False) - self.check_script(model, name) - model.eval() - input_shape = (3, 300, 300) - x = torch.rand(input_shape) - model_input = [x] + + def check_out(out): + prec = 0.01 + try: + # We first try to assert the entire output if possible. This is not + # only the best way to assert results but also handles the cases + # where we need to create a new expected result. + _assert_expected(out.cpu(), model_name, prec=prec) + except AssertionError: + # Unfortunately some segmentation models are flaky with autocast + # so instead of validating the probability scores, check that the class + # predictions match. + expected_file = _get_expected_file(model_name) + expected = torch.load(expected_file, weights_only=True) + torch.testing.assert_close( + out.argmax(dim=1), expected.argmax(dim=1), rtol=prec, atol=prec, check_device=False + ) + return False # Partial validation performed + + return True # Full validation performed + + full_validation = check_out(out["out"]) + + _check_jit_scriptable(model, (x,), unwrapper=script_model_unwrapper.get(model_name, None), eager_out=out) + _check_fx_compatible(model, x, eager_out=out) + + if dev == "cuda": + with torch.cuda.amp.autocast(), torch.no_grad(), freeze_rng_state(): + out = model(x) + # See autocast_flaky_numerics comment at top of file. + if model_name not in autocast_flaky_numerics: + full_validation &= check_out(out["out"]) + + if not full_validation: + msg = ( + f"The output of {test_segmentation_model.__name__} could only be partially validated. " + "This is likely due to unit-test flakiness, but you may " + "want to do additional manual checks if you made " + "significant changes to the codebase." + ) + warnings.warn(msg, RuntimeWarning) + pytest.skip(msg) + + _check_input_backprop(model, x) + + +@pytest.mark.parametrize("model_fn", list_model_fns(models.detection)) +@pytest.mark.parametrize("dev", cpu_and_cuda()) +def test_detection_model(model_fn, dev): + set_rng_seed(0) + defaults = { + "num_classes": 50, + "weights_backbone": None, + "input_shape": (3, 300, 300), + } + model_name = model_fn.__name__ + if model_name in detection_flaky_models: + dtype = torch.float64 + else: + dtype = torch.get_default_dtype() + kwargs = {**defaults, **_model_params.get(model_name, {})} + input_shape = kwargs.pop("input_shape") + real_image = kwargs.pop("real_image", False) + + model = model_fn(**kwargs) + model.eval().to(device=dev, dtype=dtype) + x = _get_image(input_shape=input_shape, real_image=real_image, device=dev, dtype=dtype) + model_input = [x] + with torch.no_grad(), freeze_rng_state(): out = model(model_input) - self.assertIs(model_input[0], x) - self.assertEqual(len(out), 1) + assert model_input[0] is x + + def check_out(out): + assert len(out) == 1 + + def compact(tensor): + tensor = tensor.cpu() + size = tensor.size() + elements_per_sample = functools.reduce(operator.mul, size[1:], 1) + if elements_per_sample > 30: + return compute_mean_std(tensor) + else: + return subsample_tensor(tensor) def subsample_tensor(tensor): - num_elems = tensor.numel() + num_elems = tensor.size(0) num_samples = 20 if num_elems <= num_samples: return tensor - flat_tensor = tensor.flatten() ith_index = num_elems // num_samples - return flat_tensor[ith_index - 1::ith_index] + return tensor[ith_index - 1 :: ith_index] def compute_mean_std(tensor): # can't compute mean of integral tensor @@ -121,128 +834,218 @@ def compute_mean_std(tensor): std = torch.std(tensor) return {"mean": mean, "std": std} - # maskrcnn_resnet_50_fpn numerically unstable across platforms, so for now - # compare results with mean and std - if name == "maskrcnn_resnet50_fpn": - test_value = map_nested_tensor_object(out, tensor_map_fn=compute_mean_std) - # mean values are small, use large rtol - self.assertExpected(test_value, rtol=.01, atol=.01) - else: - self.assertExpected(map_nested_tensor_object(out, tensor_map_fn=subsample_tensor)) - - self.assertTrue("boxes" in out[0]) - self.assertTrue("scores" in out[0]) - self.assertTrue("labels" in out[0]) - - def _test_video_model(self, name): - # the default input shape is - # bs * num_channels * clip_len * h *w - input_shape = (1, 3, 4, 112, 112) - # test both basicblock and Bottleneck - model = models.video.__dict__[name](num_classes=50) - self.check_script(model, name) - x = torch.rand(input_shape) - out = model(x) - self.assertEqual(out.shape[-1], 50) - - def _make_sliced_model(self, model, stop_layer): - layers = OrderedDict() - for name, layer in model.named_children(): - layers[name] = layer - if name == stop_layer: - break - new_model = torch.nn.Sequential(layers) - return new_model - - def test_memory_efficient_densenet(self): - input_shape = (1, 3, 300, 300) - x = torch.rand(input_shape) - - for name in ['densenet121', 'densenet169', 'densenet201', 'densenet161']: - model1 = models.__dict__[name](num_classes=50, memory_efficient=True) - params = model1.state_dict() - model1.eval() - out1 = model1(x) - out1.sum().backward() - - model2 = models.__dict__[name](num_classes=50, memory_efficient=False) - model2.load_state_dict(params) - model2.eval() - out2 = model2(x) - - max_diff = (out1 - out2).abs().max() - - self.assertTrue(max_diff < 1e-5) - - def test_resnet_dilation(self): - # TODO improve tests to also check that each layer has the right dimensionality - for i in product([False, True], [False, True], [False, True]): - model = models.__dict__["resnet50"](replace_stride_with_dilation=i) - model = self._make_sliced_model(model, stop_layer="layer4") - model.eval() - x = torch.rand(1, 3, 224, 224) + output = map_nested_tensor_object(out, tensor_map_fn=compact) + prec = 0.01 + try: + # We first try to assert the entire output if possible. This is not + # only the best way to assert results but also handles the cases + # where we need to create a new expected result. + _assert_expected(output, model_name, prec=prec) + except AssertionError: + # Unfortunately detection models are flaky due to the unstable sort + # in NMS. If matching across all outputs fails, use the same approach + # as in NMSTester.test_nms_cuda to see if this is caused by duplicate + # scores. + expected_file = _get_expected_file(model_name) + expected = torch.load(expected_file, weights_only=True) + torch.testing.assert_close( + output[0]["scores"], expected[0]["scores"], rtol=prec, atol=prec, check_device=False, check_dtype=False + ) + + # Note: Fmassa proposed turning off NMS by adapting the threshold + # and then using the Hungarian algorithm as in DETR to find the + # best match between output and expected boxes and eliminate some + # of the flakiness. Worth exploring. + return False # Partial validation performed + + return True # Full validation performed + + full_validation = check_out(out) + _check_jit_scriptable(model, ([x],), unwrapper=script_model_unwrapper.get(model_name, None), eager_out=out) + + if dev == "cuda": + with torch.cuda.amp.autocast(), torch.no_grad(), freeze_rng_state(): + out = model(model_input) + # See autocast_flaky_numerics comment at top of file. + if model_name not in autocast_flaky_numerics: + full_validation &= check_out(out) + + if not full_validation: + msg = ( + f"The output of {test_detection_model.__name__} could only be partially validated. " + "This is likely due to unit-test flakiness, but you may " + "want to do additional manual checks if you made " + "significant changes to the codebase." + ) + warnings.warn(msg, RuntimeWarning) + pytest.skip(msg) + + _check_input_backprop(model, model_input) + + +@pytest.mark.parametrize("model_fn", list_model_fns(models.detection)) +def test_detection_model_validation(model_fn): + set_rng_seed(0) + model = model_fn(num_classes=50, weights=None, weights_backbone=None) + input_shape = (3, 300, 300) + x = [torch.rand(input_shape)] + + # validate that targets are present in training + with pytest.raises(AssertionError): + model(x) + + # validate type + targets = [{"boxes": 0.0}] + with pytest.raises(AssertionError): + model(x, targets=targets) + + # validate boxes shape + for boxes in (torch.rand((4,)), torch.rand((1, 5))): + targets = [{"boxes": boxes}] + with pytest.raises(AssertionError): + model(x, targets=targets) + + # validate that no degenerate boxes are present + boxes = torch.tensor([[1, 3, 1, 4], [2, 4, 3, 4]]) + targets = [{"boxes": boxes}] + with pytest.raises(AssertionError): + model(x, targets=targets) + + +@pytest.mark.parametrize("model_fn", list_model_fns(models.video)) +@pytest.mark.parametrize("dev", cpu_and_cuda()) +def test_video_model(model_fn, dev): + set_rng_seed(0) + # the default input shape is + # bs * num_channels * clip_len * h *w + defaults = { + "input_shape": (1, 3, 4, 112, 112), + "num_classes": 50, + } + model_name = model_fn.__name__ + if SKIP_BIG_MODEL and is_skippable(model_name, dev): + pytest.skip("Skipped to reduce memory usage. Set env var SKIP_BIG_MODEL=0 to enable test for this model") + kwargs = {**defaults, **_model_params.get(model_name, {})} + num_classes = kwargs.get("num_classes") + input_shape = kwargs.pop("input_shape") + # test both basicblock and Bottleneck + model = model_fn(**kwargs) + model.eval().to(device=dev) + # RNG always on CPU, to ensure x in cuda tests is bitwise identical to x in cpu tests + x = torch.rand(input_shape).to(device=dev) + out = model(x) + _assert_expected(out.cpu(), model_name, prec=0.1) + assert out.shape[-1] == num_classes + _check_jit_scriptable(model, (x,), unwrapper=script_model_unwrapper.get(model_name, None), eager_out=out) + _check_fx_compatible(model, x, eager_out=out) + assert out.shape[-1] == num_classes + + if dev == "cuda": + with torch.cuda.amp.autocast(): out = model(x) - f = 2 ** sum(i) - self.assertEqual(out.shape, (1, 2048, 7 * f, 7 * f)) + # See autocast_flaky_numerics comment at top of file. + if model_name not in autocast_flaky_numerics: + _assert_expected(out.cpu(), model_name, prec=0.1) + assert out.shape[-1] == num_classes - def test_mobilenetv2_residual_setting(self): - model = models.__dict__["mobilenet_v2"](inverted_residual_setting=[[1, 16, 1, 1], [6, 24, 2, 2]]) - model.eval() - x = torch.rand(1, 3, 224, 224) - out = model(x) - self.assertEqual(out.shape[-1], 1000) - - def test_fasterrcnn_double(self): - model = models.detection.fasterrcnn_resnet50_fpn(num_classes=50, pretrained_backbone=False) - model.double() - model.eval() - input_shape = (3, 300, 300) - x = torch.rand(input_shape, dtype=torch.float64) - model_input = [x] - out = model(model_input) - self.assertIs(model_input[0], x) - self.assertEqual(len(out), 1) - self.assertTrue("boxes" in out[0]) - self.assertTrue("scores" in out[0]) - self.assertTrue("labels" in out[0]) + _check_input_backprop(model, x) + + +@pytest.mark.skipif( + not ( + "fbgemm" in torch.backends.quantized.supported_engines + and "qnnpack" in torch.backends.quantized.supported_engines + ), + reason="This Pytorch Build has not been built with fbgemm and qnnpack", +) +@pytest.mark.parametrize("model_fn", list_model_fns(models.quantization)) +def test_quantized_classification_model(model_fn): + set_rng_seed(0) + defaults = { + "num_classes": 5, + "input_shape": (1, 3, 224, 224), + "quantize": True, + } + model_name = model_fn.__name__ + kwargs = {**defaults, **_model_params.get(model_name, {})} + input_shape = kwargs.pop("input_shape") + + # First check if quantize=True provides models that can run with input data + model = model_fn(**kwargs) + model.eval() + x = torch.rand(input_shape) + out = model(x) + if model_name not in quantized_flaky_models: + _assert_expected(out.cpu(), model_name + "_quantized", prec=2e-2) + assert out.shape[-1] == 5 + _check_jit_scriptable(model, (x,), unwrapper=script_model_unwrapper.get(model_name, None), eager_out=out) + _check_fx_compatible(model, x, eager_out=out) + else: + try: + torch.jit.script(model) + except Exception as e: + raise AssertionError("model cannot be scripted.") from e + + kwargs["quantize"] = False + for eval_mode in [True, False]: + model = model_fn(**kwargs) + if eval_mode: + model.eval() + model.qconfig = torch.ao.quantization.default_qconfig + else: + model.train() + model.qconfig = torch.ao.quantization.default_qat_qconfig + + model.fuse_model(is_qat=not eval_mode) + if eval_mode: + torch.ao.quantization.prepare(model, inplace=True) + else: + torch.ao.quantization.prepare_qat(model, inplace=True) + model.eval() -for model_name in get_available_classification_models(): - # for-loop bodies don't define scopes, so we have to save the variables - # we want to close over in some way - def do_test(self, model_name=model_name): - input_shape = (1, 3, 224, 224) - if model_name in ['inception_v3']: - input_shape = (1, 3, 299, 299) - self._test_classification_model(model_name, input_shape) + torch.ao.quantization.convert(model, inplace=True) - setattr(ModelTester, "test_" + model_name, do_test) +@pytest.mark.parametrize("model_fn", list_model_fns(models.detection)) +def test_detection_model_trainable_backbone_layers(model_fn, disable_weight_loading): + model_name = model_fn.__name__ + max_trainable = _model_tests_values[model_name]["max_trainable"] + n_trainable_params = [] + for trainable_layers in range(0, max_trainable + 1): + model = model_fn(weights=None, weights_backbone="DEFAULT", trainable_backbone_layers=trainable_layers) -for model_name in get_available_segmentation_models(): - # for-loop bodies don't define scopes, so we have to save the variables - # we want to close over in some way - def do_test(self, model_name=model_name): - self._test_segmentation_model(model_name) + n_trainable_params.append(len([p for p in model.parameters() if p.requires_grad])) + assert n_trainable_params == _model_tests_values[model_name]["n_trn_params_per_layer"] - setattr(ModelTester, "test_" + model_name, do_test) +@needs_cuda +@pytest.mark.parametrize("model_fn", list_model_fns(models.optical_flow)) +@pytest.mark.parametrize("scripted", (False, True)) +def test_raft(model_fn, scripted): -for model_name in get_available_detection_models(): - # for-loop bodies don't define scopes, so we have to save the variables - # we want to close over in some way - def do_test(self, model_name=model_name): - self._test_detection_model(model_name) + torch.manual_seed(0) - setattr(ModelTester, "test_" + model_name, do_test) + # We need very small images, otherwise the pickle size would exceed the 50KB + # As a result we need to override the correlation pyramid to not downsample + # too much, otherwise we would get nan values (effective H and W would be + # reduced to 1) + corr_block = models.optical_flow.raft.CorrBlock(num_levels=2, radius=2) + model = model_fn(corr_block=corr_block).eval().to("cuda") + if scripted: + model = torch.jit.script(model) -for model_name in get_available_video_models(): + bs = 1 + img1 = torch.rand(bs, 3, 80, 72).cuda() + img2 = torch.rand(bs, 3, 80, 72).cuda() - def do_test(self, model_name=model_name): - self._test_video_model(model_name) + preds = model(img1, img2) + flow_pred = preds[-1] + # Tolerance is fairly high, but there are 2 * H * W outputs to check + # The .pkl were generated on the AWS cluter, on the CI it looks like the results are slightly different + _assert_expected(flow_pred.cpu(), name=model_fn.__name__, atol=1e-2, rtol=1) - setattr(ModelTester, "test_" + model_name, do_test) -if __name__ == '__main__': - unittest.main() +if __name__ == "__main__": + pytest.main([__file__]) diff --git a/test/test_models_detection_anchor_utils.py b/test/test_models_detection_anchor_utils.py new file mode 100644 index 00000000000..645d4624d64 --- /dev/null +++ b/test/test_models_detection_anchor_utils.py @@ -0,0 +1,99 @@ +import pytest +import torch +from common_utils import assert_equal +from torchvision.models.detection.anchor_utils import AnchorGenerator, DefaultBoxGenerator +from torchvision.models.detection.image_list import ImageList + + +class Tester: + def test_incorrect_anchors(self): + incorrect_sizes = ( + (2, 4, 8), + (32, 8), + ) + incorrect_aspects = (0.5, 1.0) + anc = AnchorGenerator(incorrect_sizes, incorrect_aspects) + image1 = torch.randn(3, 800, 800) + image_list = ImageList(image1, [(800, 800)]) + feature_maps = [torch.randn(1, 50)] + pytest.raises(AssertionError, anc, image_list, feature_maps) + + def _init_test_anchor_generator(self): + anchor_sizes = ((10,),) + aspect_ratios = ((1,),) + anchor_generator = AnchorGenerator(anchor_sizes, aspect_ratios) + + return anchor_generator + + def _init_test_defaultbox_generator(self): + aspect_ratios = [[2]] + dbox_generator = DefaultBoxGenerator(aspect_ratios) + + return dbox_generator + + def get_features(self, images): + s0, s1 = images.shape[-2:] + features = [torch.rand(2, 8, s0 // 5, s1 // 5)] + return features + + def test_anchor_generator(self): + images = torch.randn(2, 3, 15, 15) + features = self.get_features(images) + image_shapes = [i.shape[-2:] for i in images] + images = ImageList(images, image_shapes) + + model = self._init_test_anchor_generator() + model.eval() + anchors = model(images, features) + + # Estimate the number of target anchors + grid_sizes = [f.shape[-2:] for f in features] + num_anchors_estimated = 0 + for sizes, num_anchors_per_loc in zip(grid_sizes, model.num_anchors_per_location()): + num_anchors_estimated += sizes[0] * sizes[1] * num_anchors_per_loc + + anchors_output = torch.tensor( + [ + [-5.0, -5.0, 5.0, 5.0], + [0.0, -5.0, 10.0, 5.0], + [5.0, -5.0, 15.0, 5.0], + [-5.0, 0.0, 5.0, 10.0], + [0.0, 0.0, 10.0, 10.0], + [5.0, 0.0, 15.0, 10.0], + [-5.0, 5.0, 5.0, 15.0], + [0.0, 5.0, 10.0, 15.0], + [5.0, 5.0, 15.0, 15.0], + ] + ) + + assert num_anchors_estimated == 9 + assert len(anchors) == 2 + assert tuple(anchors[0].shape) == (9, 4) + assert tuple(anchors[1].shape) == (9, 4) + assert_equal(anchors[0], anchors_output) + assert_equal(anchors[1], anchors_output) + + def test_defaultbox_generator(self): + images = torch.zeros(2, 3, 15, 15) + features = [torch.zeros(2, 8, 1, 1)] + image_shapes = [i.shape[-2:] for i in images] + images = ImageList(images, image_shapes) + + model = self._init_test_defaultbox_generator() + model.eval() + dboxes = model(images, features) + + dboxes_output = torch.tensor( + [ + [6.3750, 6.3750, 8.6250, 8.6250], + [4.7443, 4.7443, 10.2557, 10.2557], + [5.9090, 6.7045, 9.0910, 8.2955], + [6.7045, 5.9090, 8.2955, 9.0910], + ] + ) + + assert len(dboxes) == 2 + assert tuple(dboxes[0].shape) == (4, 4) + assert tuple(dboxes[1].shape) == (4, 4) + torch.testing.assert_close(dboxes[0], dboxes_output, rtol=1e-5, atol=1e-8) + torch.testing.assert_close(dboxes[1], dboxes_output, rtol=1e-5, atol=1e-8) diff --git a/test/test_models_detection_negative_samples.py b/test/test_models_detection_negative_samples.py new file mode 100644 index 00000000000..c91cfdf20a7 --- /dev/null +++ b/test/test_models_detection_negative_samples.py @@ -0,0 +1,167 @@ +import pytest +import torch +import torchvision.models +from common_utils import assert_equal +from torchvision.models.detection.faster_rcnn import FastRCNNPredictor, TwoMLPHead +from torchvision.models.detection.roi_heads import RoIHeads +from torchvision.models.detection.rpn import AnchorGenerator, RegionProposalNetwork, RPNHead +from torchvision.ops import MultiScaleRoIAlign + + +class TestModelsDetectionNegativeSamples: + def _make_empty_sample(self, add_masks=False, add_keypoints=False): + images = [torch.rand((3, 100, 100), dtype=torch.float32)] + boxes = torch.zeros((0, 4), dtype=torch.float32) + negative_target = { + "boxes": boxes, + "labels": torch.zeros(0, dtype=torch.int64), + "image_id": 4, + "area": (boxes[:, 3] - boxes[:, 1]) * (boxes[:, 2] - boxes[:, 0]), + "iscrowd": torch.zeros((0,), dtype=torch.int64), + } + + if add_masks: + negative_target["masks"] = torch.zeros(0, 100, 100, dtype=torch.uint8) + + if add_keypoints: + negative_target["keypoints"] = torch.zeros(17, 0, 3, dtype=torch.float32) + + targets = [negative_target] + return images, targets + + def test_targets_to_anchors(self): + _, targets = self._make_empty_sample() + anchors = [torch.randint(-50, 50, (3, 4), dtype=torch.float32)] + + anchor_sizes = ((32,), (64,), (128,), (256,), (512,)) + aspect_ratios = ((0.5, 1.0, 2.0),) * len(anchor_sizes) + rpn_anchor_generator = AnchorGenerator(anchor_sizes, aspect_ratios) + rpn_head = RPNHead(4, rpn_anchor_generator.num_anchors_per_location()[0]) + + head = RegionProposalNetwork(rpn_anchor_generator, rpn_head, 0.5, 0.3, 256, 0.5, 2000, 2000, 0.7, 0.05) + + labels, matched_gt_boxes = head.assign_targets_to_anchors(anchors, targets) + + assert labels[0].sum() == 0 + assert labels[0].shape == torch.Size([anchors[0].shape[0]]) + assert labels[0].dtype == torch.float32 + + assert matched_gt_boxes[0].sum() == 0 + assert matched_gt_boxes[0].shape == anchors[0].shape + assert matched_gt_boxes[0].dtype == torch.float32 + + def test_assign_targets_to_proposals(self): + + proposals = [torch.randint(-50, 50, (20, 4), dtype=torch.float32)] + gt_boxes = [torch.zeros((0, 4), dtype=torch.float32)] + gt_labels = [torch.tensor([[0]], dtype=torch.int64)] + + box_roi_pool = MultiScaleRoIAlign(featmap_names=["0", "1", "2", "3"], output_size=7, sampling_ratio=2) + + resolution = box_roi_pool.output_size[0] + representation_size = 1024 + box_head = TwoMLPHead(4 * resolution**2, representation_size) + + representation_size = 1024 + box_predictor = FastRCNNPredictor(representation_size, 2) + + roi_heads = RoIHeads( + # Box + box_roi_pool, + box_head, + box_predictor, + 0.5, + 0.5, + 512, + 0.25, + None, + 0.05, + 0.5, + 100, + ) + + matched_idxs, labels = roi_heads.assign_targets_to_proposals(proposals, gt_boxes, gt_labels) + + assert matched_idxs[0].sum() == 0 + assert matched_idxs[0].shape == torch.Size([proposals[0].shape[0]]) + assert matched_idxs[0].dtype == torch.int64 + + assert labels[0].sum() == 0 + assert labels[0].shape == torch.Size([proposals[0].shape[0]]) + assert labels[0].dtype == torch.int64 + + @pytest.mark.parametrize( + "name", + [ + "fasterrcnn_resnet50_fpn", + "fasterrcnn_mobilenet_v3_large_fpn", + "fasterrcnn_mobilenet_v3_large_320_fpn", + ], + ) + def test_forward_negative_sample_frcnn(self, name): + model = torchvision.models.get_model( + name, weights=None, weights_backbone=None, num_classes=2, min_size=100, max_size=100 + ) + + images, targets = self._make_empty_sample() + loss_dict = model(images, targets) + + assert_equal(loss_dict["loss_box_reg"], torch.tensor(0.0)) + assert_equal(loss_dict["loss_rpn_box_reg"], torch.tensor(0.0)) + + def test_forward_negative_sample_mrcnn(self): + model = torchvision.models.detection.maskrcnn_resnet50_fpn( + weights=None, weights_backbone=None, num_classes=2, min_size=100, max_size=100 + ) + + images, targets = self._make_empty_sample(add_masks=True) + loss_dict = model(images, targets) + + assert_equal(loss_dict["loss_box_reg"], torch.tensor(0.0)) + assert_equal(loss_dict["loss_rpn_box_reg"], torch.tensor(0.0)) + assert_equal(loss_dict["loss_mask"], torch.tensor(0.0)) + + def test_forward_negative_sample_krcnn(self): + model = torchvision.models.detection.keypointrcnn_resnet50_fpn( + weights=None, weights_backbone=None, num_classes=2, min_size=100, max_size=100 + ) + + images, targets = self._make_empty_sample(add_keypoints=True) + loss_dict = model(images, targets) + + assert_equal(loss_dict["loss_box_reg"], torch.tensor(0.0)) + assert_equal(loss_dict["loss_rpn_box_reg"], torch.tensor(0.0)) + assert_equal(loss_dict["loss_keypoint"], torch.tensor(0.0)) + + def test_forward_negative_sample_retinanet(self): + model = torchvision.models.detection.retinanet_resnet50_fpn( + weights=None, weights_backbone=None, num_classes=2, min_size=100, max_size=100 + ) + + images, targets = self._make_empty_sample() + loss_dict = model(images, targets) + + assert_equal(loss_dict["bbox_regression"], torch.tensor(0.0)) + + def test_forward_negative_sample_fcos(self): + model = torchvision.models.detection.fcos_resnet50_fpn( + weights=None, weights_backbone=None, num_classes=2, min_size=100, max_size=100 + ) + + images, targets = self._make_empty_sample() + loss_dict = model(images, targets) + + assert_equal(loss_dict["bbox_regression"], torch.tensor(0.0)) + assert_equal(loss_dict["bbox_ctrness"], torch.tensor(0.0)) + + def test_forward_negative_sample_ssd(self): + model = torchvision.models.detection.ssd300_vgg16(weights=None, weights_backbone=None, num_classes=2) + + images, targets = self._make_empty_sample() + loss_dict = model(images, targets) + + assert_equal(loss_dict["bbox_regression"], torch.tensor(0.0)) + + +if __name__ == "__main__": + pytest.main([__file__]) diff --git a/test/test_models_detection_utils.py b/test/test_models_detection_utils.py new file mode 100644 index 00000000000..69703ab5817 --- /dev/null +++ b/test/test_models_detection_utils.py @@ -0,0 +1,85 @@ +import copy + +import pytest +import torch +from common_utils import assert_equal +from torchvision.models.detection import _utils, backbone_utils +from torchvision.models.detection.transform import GeneralizedRCNNTransform + + +class TestModelsDetectionUtils: + def test_balanced_positive_negative_sampler(self): + sampler = _utils.BalancedPositiveNegativeSampler(4, 0.25) + # keep all 6 negatives first, then add 3 positives, last two are ignore + matched_idxs = [torch.tensor([0, 0, 0, 0, 0, 0, 1, 1, 1, -1, -1])] + pos, neg = sampler(matched_idxs) + # we know the number of elements that should be sampled for the positive (1) + # and the negative (3), and their location. Let's make sure that they are + # there + assert pos[0].sum() == 1 + assert pos[0][6:9].sum() == 1 + assert neg[0].sum() == 3 + assert neg[0][0:6].sum() == 3 + + def test_box_linear_coder(self): + box_coder = _utils.BoxLinearCoder(normalize_by_size=True) + # Generate a random 10x4 boxes tensor, with coordinates < 50. + boxes = torch.rand(10, 4) * 50 + boxes.clamp_(min=1.0) # tiny boxes cause numerical instability in box regression + boxes[:, 2:] += boxes[:, :2] + + proposals = torch.tensor([0, 0, 101, 101] * 10).reshape(10, 4).float() + + rel_codes = box_coder.encode(boxes, proposals) + pred_boxes = box_coder.decode(rel_codes, boxes) + torch.allclose(proposals, pred_boxes) + + @pytest.mark.parametrize("train_layers, exp_froz_params", [(0, 53), (1, 43), (2, 24), (3, 11), (4, 1), (5, 0)]) + def test_resnet_fpn_backbone_frozen_layers(self, train_layers, exp_froz_params): + # we know how many initial layers and parameters of the network should + # be frozen for each trainable_backbone_layers parameter value + # i.e. all 53 params are frozen if trainable_backbone_layers=0 + # ad first 24 params are frozen if trainable_backbone_layers=2 + model = backbone_utils.resnet_fpn_backbone("resnet50", weights=None, trainable_layers=train_layers) + # boolean list that is true if the param at that index is frozen + is_frozen = [not parameter.requires_grad for _, parameter in model.named_parameters()] + # check that expected initial number of layers are frozen + assert all(is_frozen[:exp_froz_params]) + + def test_validate_resnet_inputs_detection(self): + # default number of backbone layers to train + ret = backbone_utils._validate_trainable_layers( + is_trained=True, trainable_backbone_layers=None, max_value=5, default_value=3 + ) + assert ret == 3 + # can't go beyond 5 + with pytest.raises(ValueError, match=r"Trainable backbone layers should be in the range"): + ret = backbone_utils._validate_trainable_layers( + is_trained=True, trainable_backbone_layers=6, max_value=5, default_value=3 + ) + # if not trained, should use all trainable layers and warn + with pytest.warns(UserWarning): + ret = backbone_utils._validate_trainable_layers( + is_trained=False, trainable_backbone_layers=0, max_value=5, default_value=3 + ) + assert ret == 5 + + def test_transform_copy_targets(self): + transform = GeneralizedRCNNTransform(300, 500, torch.zeros(3), torch.ones(3)) + image = [torch.rand(3, 200, 300), torch.rand(3, 200, 200)] + targets = [{"boxes": torch.rand(3, 4)}, {"boxes": torch.rand(2, 4)}] + targets_copy = copy.deepcopy(targets) + out = transform(image, targets) # noqa: F841 + assert_equal(targets[0]["boxes"], targets_copy[0]["boxes"]) + assert_equal(targets[1]["boxes"], targets_copy[1]["boxes"]) + + def test_not_float_normalize(self): + transform = GeneralizedRCNNTransform(300, 500, torch.zeros(3), torch.ones(3)) + image = [torch.randint(0, 255, (3, 200, 300), dtype=torch.uint8)] + targets = [{"boxes": torch.rand(3, 4)}] + with pytest.raises(TypeError): + out = transform(image, targets) # noqa: F841 + + +if __name__ == "__main__": + pytest.main([__file__]) diff --git a/test/test_onnx.py b/test/test_onnx.py index 090f16cc550..0350c817ff8 100644 --- a/test/test_onnx.py +++ b/test/test_onnx.py @@ -1,53 +1,71 @@ import io +from collections import OrderedDict +from typing import List, Optional, Tuple + +import pytest import torch -from torchvision import ops -from torchvision import models +from common_utils import assert_equal, set_rng_seed +from torchvision import models, ops +from torchvision.models.detection.faster_rcnn import FastRCNNPredictor, TwoMLPHead from torchvision.models.detection.image_list import ImageList -from torchvision.models.detection.transform import GeneralizedRCNNTransform -from torchvision.models.detection.rpn import AnchorGenerator, RPNHead, RegionProposalNetwork -from torchvision.models.detection.backbone_utils import resnet_fpn_backbone from torchvision.models.detection.roi_heads import RoIHeads -from torchvision.models.detection.faster_rcnn import FastRCNNPredictor, TwoMLPHead -from torchvision.models.detection.mask_rcnn import MaskRCNNHeads, MaskRCNNPredictor - -from collections import OrderedDict - -# onnxruntime requires python 3.5 or above -try: - import onnxruntime -except ImportError: - onnxruntime = None +from torchvision.models.detection.rpn import AnchorGenerator, RegionProposalNetwork, RPNHead +from torchvision.models.detection.transform import GeneralizedRCNNTransform +from torchvision.ops import _register_onnx_ops -import unittest -from torchvision.ops._register_onnx_ops import _onnx_opset_version +# In environments without onnxruntime we prefer to +# invoke all tests in the repo and have this one skipped rather than fail. +onnxruntime = pytest.importorskip("onnxruntime") -@unittest.skipIf(onnxruntime is None, 'ONNX Runtime unavailable') -class ONNXExporterTester(unittest.TestCase): +class TestONNXExporter: @classmethod - def setUpClass(cls): + def setup_class(cls): torch.manual_seed(123) - def run_model(self, model, inputs_list, tolerate_small_mismatch=False): + def run_model( + self, + model, + inputs_list, + do_constant_folding=True, + dynamic_axes=None, + output_names=None, + input_names=None, + opset_version: Optional[int] = None, + ): + if opset_version is None: + opset_version = _register_onnx_ops.BASE_ONNX_OPSET_VERSION + model.eval() onnx_io = io.BytesIO() + if isinstance(inputs_list[0][-1], dict): + torch_onnx_input = inputs_list[0] + ({},) + else: + torch_onnx_input = inputs_list[0] # export to onnx with the first input - torch.onnx.export(model, inputs_list[0], onnx_io, - do_constant_folding=True, opset_version=_onnx_opset_version) - + torch.onnx.export( + model, + torch_onnx_input, + onnx_io, + do_constant_folding=do_constant_folding, + opset_version=opset_version, + dynamic_axes=dynamic_axes, + input_names=input_names, + output_names=output_names, + verbose=True, + ) # validate the exported model with onnx runtime for test_inputs in inputs_list: with torch.no_grad(): - if isinstance(test_inputs, torch.Tensor) or \ - isinstance(test_inputs, list): + if isinstance(test_inputs, torch.Tensor) or isinstance(test_inputs, list): test_inputs = (test_inputs,) test_ouputs = model(*test_inputs) if isinstance(test_ouputs, torch.Tensor): test_ouputs = (test_ouputs,) - self.ort_validate(onnx_io, test_inputs, test_ouputs, tolerate_small_mismatch) + self.ort_validate(onnx_io, test_inputs, test_ouputs) - def ort_validate(self, onnx_io, inputs, outputs, tolerate_small_mismatch=False): + def ort_validate(self, onnx_io, inputs, outputs): inputs, _ = torch.jit._flatten(inputs) outputs, _ = torch.jit._flatten(outputs) @@ -61,23 +79,19 @@ def to_numpy(tensor): inputs = list(map(to_numpy, inputs)) outputs = list(map(to_numpy, outputs)) - ort_session = onnxruntime.InferenceSession(onnx_io.getvalue()) + ort_session = onnxruntime.InferenceSession(onnx_io.getvalue(), providers=onnxruntime.get_available_providers()) # compute onnxruntime output prediction - ort_inputs = dict((ort_session.get_inputs()[i].name, inpt) for i, inpt in enumerate(inputs)) + ort_inputs = {ort_session.get_inputs()[i].name: inpt for i, inpt in enumerate(inputs)} ort_outs = ort_session.run(None, ort_inputs) + for i in range(0, len(outputs)): - try: - torch.testing.assert_allclose(outputs[i], ort_outs[i], rtol=1e-03, atol=1e-05) - except AssertionError as error: - if tolerate_small_mismatch: - self.assertIn("(0.00%)", str(error), str(error)) - else: - raise + torch.testing.assert_close(outputs[i], ort_outs[i], rtol=1e-03, atol=1e-05) def test_nms(self): - boxes = torch.rand(5, 4) - boxes[:, 2:] += torch.rand(5, 2) - scores = torch.randn(5) + num_boxes = 100 + boxes = torch.rand(num_boxes, 4) + boxes[:, 2:] += boxes[:, :2] + scores = torch.randn(num_boxes) class Module(torch.nn.Module): def forward(self, boxes, scores): @@ -85,12 +99,79 @@ def forward(self, boxes, scores): self.run_model(Module(), [(boxes, scores)]) + def test_batched_nms(self): + num_boxes = 100 + boxes = torch.rand(num_boxes, 4) + boxes[:, 2:] += boxes[:, :2] + scores = torch.randn(num_boxes) + idxs = torch.randint(0, 5, size=(num_boxes,)) + + class Module(torch.nn.Module): + def forward(self, boxes, scores, idxs): + return ops.batched_nms(boxes, scores, idxs, 0.5) + + self.run_model(Module(), [(boxes, scores, idxs)]) + + def test_clip_boxes_to_image(self): + boxes = torch.randn(5, 4) * 500 + boxes[:, 2:] += boxes[:, :2] + size = torch.randn(200, 300) + + size_2 = torch.randn(300, 400) + + class Module(torch.nn.Module): + def forward(self, boxes, size): + return ops.boxes.clip_boxes_to_image(boxes, size.shape) + + self.run_model( + Module(), [(boxes, size), (boxes, size_2)], input_names=["boxes", "size"], dynamic_axes={"size": [0, 1]} + ) + def test_roi_align(self): x = torch.rand(1, 1, 10, 10, dtype=torch.float32) single_roi = torch.tensor([[0, 0, 0, 4, 4]], dtype=torch.float32) model = ops.RoIAlign((5, 5), 1, 2) self.run_model(model, [(x, single_roi)]) + x = torch.rand(1, 1, 10, 10, dtype=torch.float32) + single_roi = torch.tensor([[0, 0, 0, 4, 4]], dtype=torch.float32) + model = ops.RoIAlign((5, 5), 1, -1) + self.run_model(model, [(x, single_roi)]) + + def test_roi_align_aligned(self): + supported_onnx_version = _register_onnx_ops._ONNX_OPSET_VERSION_16 + x = torch.rand(1, 1, 10, 10, dtype=torch.float32) + single_roi = torch.tensor([[0, 1.5, 1.5, 3, 3]], dtype=torch.float32) + model = ops.RoIAlign((5, 5), 1, 2, aligned=True) + self.run_model(model, [(x, single_roi)], opset_version=supported_onnx_version) + + x = torch.rand(1, 1, 10, 10, dtype=torch.float32) + single_roi = torch.tensor([[0, 0.2, 0.3, 4.5, 3.5]], dtype=torch.float32) + model = ops.RoIAlign((5, 5), 0.5, 3, aligned=True) + self.run_model(model, [(x, single_roi)], opset_version=supported_onnx_version) + + x = torch.rand(1, 1, 10, 10, dtype=torch.float32) + single_roi = torch.tensor([[0, 0.2, 0.3, 4.5, 3.5]], dtype=torch.float32) + model = ops.RoIAlign((5, 5), 1.8, 2, aligned=True) + self.run_model(model, [(x, single_roi)], opset_version=supported_onnx_version) + + x = torch.rand(1, 1, 10, 10, dtype=torch.float32) + single_roi = torch.tensor([[0, 0.2, 0.3, 4.5, 3.5]], dtype=torch.float32) + model = ops.RoIAlign((2, 2), 2.5, 0, aligned=True) + self.run_model(model, [(x, single_roi)], opset_version=supported_onnx_version) + + x = torch.rand(1, 1, 10, 10, dtype=torch.float32) + single_roi = torch.tensor([[0, 0.2, 0.3, 4.5, 3.5]], dtype=torch.float32) + model = ops.RoIAlign((2, 2), 2.5, -1, aligned=True) + self.run_model(model, [(x, single_roi)], opset_version=supported_onnx_version) + + def test_roi_align_malformed_boxes(self): + supported_onnx_version = _register_onnx_ops._ONNX_OPSET_VERSION_16 + x = torch.randn(1, 1, 10, 10, dtype=torch.float32) + single_roi = torch.tensor([[0, 2, 0.3, 1.5, 1.5]], dtype=torch.float32) + model = ops.RoIAlign((5, 5), 1, 1, aligned=True) + self.run_model(model, [(x, single_roi)], opset_version=supported_onnx_version) + def test_roi_pool(self): x = torch.rand(1, 1, 10, 10, dtype=torch.float32) rois = torch.tensor([[0, 0, 0, 4, 4]], dtype=torch.float32) @@ -99,19 +180,33 @@ def test_roi_pool(self): model = ops.RoIPool((pool_h, pool_w), 2) self.run_model(model, [(x, rois)]) - def test_transform_images(self): + def test_resize_images(self): + class TransformModule(torch.nn.Module): + def __init__(self_module): + super().__init__() + self_module.transform = self._init_test_generalized_rcnn_transform() + + def forward(self_module, images): + return self_module.transform.resize(images, None)[0] + + input = torch.rand(3, 10, 20) + input_test = torch.rand(3, 100, 150) + self.run_model( + TransformModule(), [(input,), (input_test,)], input_names=["input1"], dynamic_axes={"input1": [0, 1, 2]} + ) + def test_transform_images(self): class TransformModule(torch.nn.Module): def __init__(self_module): - super(TransformModule, self_module).__init__() + super().__init__() self_module.transform = self._init_test_generalized_rcnn_transform() def forward(self_module, images): return self_module.transform(images)[0].tensors - input = [torch.rand(3, 100, 200), torch.rand(3, 200, 200)] - input_test = [torch.rand(3, 100, 200), torch.rand(3, 200, 200)] - self.run_model(TransformModule(), [input, input_test]) + input = torch.rand(3, 100, 200), torch.rand(3, 200, 200) + input_test = torch.rand(3, 100, 200), torch.rand(3, 200, 200) + self.run_model(TransformModule(), [(input,), (input_test,)]) def _init_test_generalized_rcnn_transform(self): min_size = 100 @@ -134,12 +229,20 @@ def _init_test_rpn(self): rpn_pre_nms_top_n = dict(training=2000, testing=1000) rpn_post_nms_top_n = dict(training=2000, testing=1000) rpn_nms_thresh = 0.7 + rpn_score_thresh = 0.0 rpn = RegionProposalNetwork( - rpn_anchor_generator, rpn_head, - rpn_fg_iou_thresh, rpn_bg_iou_thresh, - rpn_batch_size_per_image, rpn_positive_fraction, - rpn_pre_nms_top_n, rpn_post_nms_top_n, rpn_nms_thresh) + rpn_anchor_generator, + rpn_head, + rpn_fg_iou_thresh, + rpn_bg_iou_thresh, + rpn_batch_size_per_image, + rpn_positive_fraction, + rpn_pre_nms_top_n, + rpn_post_nms_top_n, + rpn_nms_thresh, + score_thresh=rpn_score_thresh, + ) return rpn def _init_test_roi_heads_faster_rcnn(self): @@ -155,145 +258,194 @@ def _init_test_roi_heads_faster_rcnn(self): box_nms_thresh = 0.5 box_detections_per_img = 100 - box_roi_pool = ops.MultiScaleRoIAlign( - featmap_names=['0', '1', '2', '3'], - output_size=7, - sampling_ratio=2) + box_roi_pool = ops.MultiScaleRoIAlign(featmap_names=["0", "1", "2", "3"], output_size=7, sampling_ratio=2) resolution = box_roi_pool.output_size[0] representation_size = 1024 - box_head = TwoMLPHead( - out_channels * resolution ** 2, - representation_size) + box_head = TwoMLPHead(out_channels * resolution**2, representation_size) representation_size = 1024 - box_predictor = FastRCNNPredictor( - representation_size, - num_classes) + box_predictor = FastRCNNPredictor(representation_size, num_classes) roi_heads = RoIHeads( - box_roi_pool, box_head, box_predictor, - box_fg_iou_thresh, box_bg_iou_thresh, - box_batch_size_per_image, box_positive_fraction, + box_roi_pool, + box_head, + box_predictor, + box_fg_iou_thresh, + box_bg_iou_thresh, + box_batch_size_per_image, + box_positive_fraction, bbox_reg_weights, - box_score_thresh, box_nms_thresh, box_detections_per_img) + box_score_thresh, + box_nms_thresh, + box_detections_per_img, + ) return roi_heads def get_features(self, images): s0, s1 = images.shape[-2:] features = [ - ('0', torch.rand(2, 256, s0 // 4, s1 // 4)), - ('1', torch.rand(2, 256, s0 // 8, s1 // 8)), - ('2', torch.rand(2, 256, s0 // 16, s1 // 16)), - ('3', torch.rand(2, 256, s0 // 32, s1 // 32)), - ('4', torch.rand(2, 256, s0 // 64, s1 // 64)), + ("0", torch.rand(2, 256, s0 // 4, s1 // 4)), + ("1", torch.rand(2, 256, s0 // 8, s1 // 8)), + ("2", torch.rand(2, 256, s0 // 16, s1 // 16)), + ("3", torch.rand(2, 256, s0 // 32, s1 // 32)), + ("4", torch.rand(2, 256, s0 // 64, s1 // 64)), ] features = OrderedDict(features) return features def test_rpn(self): + set_rng_seed(0) + class RPNModule(torch.nn.Module): - def __init__(self_module, images): - super(RPNModule, self_module).__init__() + def __init__(self_module): + super().__init__() self_module.rpn = self._init_test_rpn() - self_module.images = ImageList(images, [i.shape[-2:] for i in images]) - def forward(self_module, features): - return self_module.rpn(self_module.images, features) + def forward(self_module, images, features): + images = ImageList(images, [i.shape[-2:] for i in images]) + return self_module.rpn(images, features) - images = torch.rand(2, 3, 600, 600) + images = torch.rand(2, 3, 150, 150) features = self.get_features(images) - test_features = self.get_features(images) + images2 = torch.rand(2, 3, 80, 80) + test_features = self.get_features(images2) - model = RPNModule(images) + model = RPNModule() model.eval() - model(features) - self.run_model(model, [(features,), (test_features,)], tolerate_small_mismatch=True) + model(images, features) + + self.run_model( + model, + [(images, features), (images2, test_features)], + input_names=["input1", "input2", "input3", "input4", "input5", "input6"], + dynamic_axes={ + "input1": [0, 1, 2, 3], + "input2": [0, 1, 2, 3], + "input3": [0, 1, 2, 3], + "input4": [0, 1, 2, 3], + "input5": [0, 1, 2, 3], + "input6": [0, 1, 2, 3], + }, + ) def test_multi_scale_roi_align(self): - class TransformModule(torch.nn.Module): def __init__(self): - super(TransformModule, self).__init__() - self.model = ops.MultiScaleRoIAlign(['feat1', 'feat2'], 3, 2) + super().__init__() + self.model = ops.MultiScaleRoIAlign(["feat1", "feat2"], 3, 2) self.image_sizes = [(512, 512)] def forward(self, input, boxes): return self.model(input, boxes, self.image_sizes) i = OrderedDict() - i['feat1'] = torch.rand(1, 5, 64, 64) - i['feat2'] = torch.rand(1, 5, 16, 16) + i["feat1"] = torch.rand(1, 5, 64, 64) + i["feat2"] = torch.rand(1, 5, 16, 16) boxes = torch.rand(6, 4) * 256 boxes[:, 2:] += boxes[:, :2] i1 = OrderedDict() - i1['feat1'] = torch.rand(1, 5, 64, 64) - i1['feat2'] = torch.rand(1, 5, 16, 16) + i1["feat1"] = torch.rand(1, 5, 64, 64) + i1["feat2"] = torch.rand(1, 5, 16, 16) boxes1 = torch.rand(6, 4) * 256 boxes1[:, 2:] += boxes1[:, :2] - self.run_model(TransformModule(), [(i, [boxes],), (i1, [boxes1],)]) + self.run_model( + TransformModule(), + [ + ( + i, + [boxes], + ), + ( + i1, + [boxes1], + ), + ], + ) def test_roi_heads(self): class RoiHeadsModule(torch.nn.Module): - def __init__(self_module, images): - super(RoiHeadsModule, self_module).__init__() + def __init__(self_module): + super().__init__() self_module.transform = self._init_test_generalized_rcnn_transform() self_module.rpn = self._init_test_rpn() self_module.roi_heads = self._init_test_roi_heads_faster_rcnn() - self_module.original_image_sizes = [img.shape[-2:] for img in images] - self_module.images = ImageList(images, [i.shape[-2:] for i in images]) - - def forward(self_module, features): - proposals, _ = self_module.rpn(self_module.images, features) - detections, _ = self_module.roi_heads(features, proposals, self_module.images.image_sizes) - detections = self_module.transform.postprocess(detections, - self_module.images.image_sizes, - self_module.original_image_sizes) + + def forward(self_module, images, features): + original_image_sizes = [img.shape[-2:] for img in images] + images = ImageList(images, [i.shape[-2:] for i in images]) + proposals, _ = self_module.rpn(images, features) + detections, _ = self_module.roi_heads(features, proposals, images.image_sizes) + detections = self_module.transform.postprocess(detections, images.image_sizes, original_image_sizes) return detections - images = torch.rand(2, 3, 600, 600) + images = torch.rand(2, 3, 100, 100) features = self.get_features(images) - test_features = self.get_features(images) + images2 = torch.rand(2, 3, 150, 150) + test_features = self.get_features(images2) - model = RoiHeadsModule(images) + model = RoiHeadsModule() model.eval() - model(features) - self.run_model(model, [(features,), (test_features,)]) + model(images, features) + + self.run_model( + model, + [(images, features), (images2, test_features)], + input_names=["input1", "input2", "input3", "input4", "input5", "input6"], + dynamic_axes={ + "input1": [0, 1, 2, 3], + "input2": [0, 1, 2, 3], + "input3": [0, 1, 2, 3], + "input4": [0, 1, 2, 3], + "input5": [0, 1, 2, 3], + "input6": [0, 1, 2, 3], + }, + ) + + def get_image(self, rel_path: str, size: Tuple[int, int]) -> torch.Tensor: + import os - def get_image_from_url(self, url): - import requests - import numpy from PIL import Image - from io import BytesIO - from torchvision import transforms + from torchvision.transforms import functional as F - data = requests.get(url) - image = Image.open(BytesIO(data.content)).convert("RGB") - image = image.resize((300, 200), Image.BILINEAR) + data_dir = os.path.join(os.path.dirname(__file__), "assets") + path = os.path.join(data_dir, *rel_path.split("/")) + image = Image.open(path).convert("RGB").resize(size, Image.BILINEAR) - to_tensor = transforms.ToTensor() - return to_tensor(image) + return F.convert_image_dtype(F.pil_to_tensor(image)) - def get_test_images(self): - image_url = "http://farm3.staticflickr.com/2469/3915380994_2e611b1779_z.jpg" - image = self.get_image_from_url(url=image_url) - image_url2 = "https://pytorch.org/tutorials/_static/img/tv_tutorial/tv_image05.png" - image2 = self.get_image_from_url(url=image_url2) - images = [image] - test_images = [image2] - return images, test_images + def get_test_images(self) -> Tuple[List[torch.Tensor], List[torch.Tensor]]: + return ( + [self.get_image("encode_jpeg/grace_hopper_517x606.jpg", (100, 320))], + [self.get_image("fakedata/logos/rgb_pytorch.png", (250, 380))], + ) def test_faster_rcnn(self): images, test_images = self.get_test_images() - - model = models.detection.faster_rcnn.fasterrcnn_resnet50_fpn(pretrained=True, - min_size=200, - max_size=300) + dummy_image = [torch.ones(3, 100, 100) * 0.3] + model = models.detection.faster_rcnn.fasterrcnn_resnet50_fpn( + weights=models.detection.faster_rcnn.FasterRCNN_ResNet50_FPN_Weights.DEFAULT, min_size=200, max_size=300 + ) model.eval() model(images) - self.run_model(model, [(images,), (test_images,)]) + # Test exported model on images of different size, or dummy input + self.run_model( + model, + [(images,), (test_images,), (dummy_image,)], + input_names=["images_tensors"], + output_names=["outputs"], + dynamic_axes={"images_tensors": [0, 1, 2], "outputs": [0, 1, 2]}, + ) + # Test exported model for an image with no detections on other images + self.run_model( + model, + [(dummy_image,), (images,)], + input_names=["images_tensors"], + output_names=["outputs"], + dynamic_axes={"images_tensors": [0, 1, 2], "outputs": [0, 1, 2]}, + ) # Verify that paste_mask_in_image beahves the same in tracing. # This test also compares both paste_masks_in_image and _onnx_paste_masks_in_image @@ -305,11 +457,11 @@ def test_paste_mask_in_image(self): boxes *= 50 o_im_s = (100, 100) from torchvision.models.detection.roi_heads import paste_masks_in_image + out = paste_masks_in_image(masks, boxes, o_im_s) - jit_trace = torch.jit.trace(paste_masks_in_image, - (masks, boxes, - [torch.tensor(o_im_s[0]), - torch.tensor(o_im_s[1])])) + jit_trace = torch.jit.trace( + paste_masks_in_image, (masks, boxes, [torch.tensor(o_im_s[0]), torch.tensor(o_im_s[1])]) + ) out_trace = jit_trace(masks, boxes, [torch.tensor(o_im_s[0]), torch.tensor(o_im_s[1])]) assert torch.all(out.eq(out_trace)) @@ -320,20 +472,111 @@ def test_paste_mask_in_image(self): boxes2 *= 100 o_im_s2 = (200, 200) from torchvision.models.detection.roi_heads import paste_masks_in_image + out2 = paste_masks_in_image(masks2, boxes2, o_im_s2) out_trace2 = jit_trace(masks2, boxes2, [torch.tensor(o_im_s2[0]), torch.tensor(o_im_s2[1])]) assert torch.all(out2.eq(out_trace2)) - @unittest.skip("Disable test until Resize opset 11 is implemented in ONNX Runtime") def test_mask_rcnn(self): images, test_images = self.get_test_images() - - model = models.detection.mask_rcnn.maskrcnn_resnet50_fpn(pretrained=True) + dummy_image = [torch.ones(3, 100, 100) * 0.3] + model = models.detection.mask_rcnn.maskrcnn_resnet50_fpn( + weights=models.detection.mask_rcnn.MaskRCNN_ResNet50_FPN_Weights.DEFAULT, min_size=200, max_size=300 + ) model.eval() model(images) - self.run_model(model, [(images,), (test_images,)]) - - -if __name__ == '__main__': - unittest.main() + # Test exported model on images of different size, or dummy input + self.run_model( + model, + [(images,), (test_images,), (dummy_image,)], + input_names=["images_tensors"], + output_names=["boxes", "labels", "scores", "masks"], + dynamic_axes={ + "images_tensors": [0, 1, 2], + "boxes": [0, 1], + "labels": [0], + "scores": [0], + "masks": [0, 1, 2], + }, + ) + # Test exported model for an image with no detections on other images + self.run_model( + model, + [(dummy_image,), (images,)], + input_names=["images_tensors"], + output_names=["boxes", "labels", "scores", "masks"], + dynamic_axes={ + "images_tensors": [0, 1, 2], + "boxes": [0, 1], + "labels": [0], + "scores": [0], + "masks": [0, 1, 2], + }, + ) + + # Verify that heatmaps_to_keypoints behaves the same in tracing. + # This test also compares both heatmaps_to_keypoints and _onnx_heatmaps_to_keypoints + # (since jit_trace witll call _heatmaps_to_keypoints). + def test_heatmaps_to_keypoints(self): + maps = torch.rand(10, 1, 26, 26) + rois = torch.rand(10, 4) + from torchvision.models.detection.roi_heads import heatmaps_to_keypoints + + out = heatmaps_to_keypoints(maps, rois) + jit_trace = torch.jit.trace(heatmaps_to_keypoints, (maps, rois)) + out_trace = jit_trace(maps, rois) + + assert_equal(out[0], out_trace[0]) + assert_equal(out[1], out_trace[1]) + + maps2 = torch.rand(20, 2, 21, 21) + rois2 = torch.rand(20, 4) + from torchvision.models.detection.roi_heads import heatmaps_to_keypoints + + out2 = heatmaps_to_keypoints(maps2, rois2) + out_trace2 = jit_trace(maps2, rois2) + + assert_equal(out2[0], out_trace2[0]) + assert_equal(out2[1], out_trace2[1]) + + def test_keypoint_rcnn(self): + images, test_images = self.get_test_images() + dummy_images = [torch.ones(3, 100, 100) * 0.3] + model = models.detection.keypoint_rcnn.keypointrcnn_resnet50_fpn( + weights=models.detection.keypoint_rcnn.KeypointRCNN_ResNet50_FPN_Weights.DEFAULT, min_size=200, max_size=300 + ) + model.eval() + model(images) + self.run_model( + model, + [(images,), (test_images,), (dummy_images,)], + input_names=["images_tensors"], + output_names=["outputs1", "outputs2", "outputs3", "outputs4"], + dynamic_axes={"images_tensors": [0, 1, 2]}, + ) + + self.run_model( + model, + [(dummy_images,), (test_images,)], + input_names=["images_tensors"], + output_names=["outputs1", "outputs2", "outputs3", "outputs4"], + dynamic_axes={"images_tensors": [0, 1, 2]}, + ) + + def test_shufflenet_v2_dynamic_axes(self): + model = models.shufflenet_v2_x0_5(weights=models.ShuffleNet_V2_X0_5_Weights.DEFAULT) + dummy_input = torch.randn(1, 3, 224, 224, requires_grad=True) + test_inputs = torch.cat([dummy_input, dummy_input, dummy_input], 0) + + self.run_model( + model, + [(dummy_input,), (test_inputs,)], + input_names=["input_images"], + output_names=["output"], + dynamic_axes={"input_images": {0: "batch_size"}, "output": {0: "batch_size"}}, + ) + + +if __name__ == "__main__": + pytest.main([__file__]) diff --git a/test/test_ops.py b/test/test_ops.py index c4cc3fe0bd6..1ba7a2c9efa 100644 --- a/test/test_ops.py +++ b/test/test_ops.py @@ -1,109 +1,293 @@ -from __future__ import division +import math +import os +from abc import ABC, abstractmethod +from functools import lru_cache +from itertools import product +from typing import Callable, List, Tuple + import numpy as np +import pytest import torch +import torch.fx +import torch.nn.functional as F +import torch.testing._internal.optests as optests +from common_utils import assert_equal, cpu_and_cuda, cpu_and_cuda_and_mps, needs_cuda, needs_mps +from PIL import Image +from torch import nn, Tensor +from torch._dynamo.utils import is_compile_supported from torch.autograd import gradcheck +from torch.nn.modules.utils import _pair +from torchvision import models, ops +from torchvision.models.feature_extraction import get_graph_node_names -from torchvision import ops -from itertools import product -import unittest +OPTESTS = [ + "test_schema", + "test_autograd_registration", + "test_faketensor", + "test_aot_dispatch_dynamic", +] + + +# Context manager for setting deterministic flag and automatically +# resetting it to its original value +class DeterministicGuard: + def __init__(self, deterministic, *, warn_only=False): + self.deterministic = deterministic + self.warn_only = warn_only + + def __enter__(self): + self.deterministic_restore = torch.are_deterministic_algorithms_enabled() + self.warn_only_restore = torch.is_deterministic_algorithms_warn_only_enabled() + torch.use_deterministic_algorithms(self.deterministic, warn_only=self.warn_only) + + def __exit__(self, exception_type, exception_value, traceback): + torch.use_deterministic_algorithms(self.deterministic_restore, warn_only=self.warn_only_restore) + + +class RoIOpTesterModuleWrapper(nn.Module): + def __init__(self, obj): + super().__init__() + self.layer = obj + self.n_inputs = 2 + + def forward(self, a, b): + self.layer(a, b) + + +class MultiScaleRoIAlignModuleWrapper(nn.Module): + def __init__(self, obj): + super().__init__() + self.layer = obj + self.n_inputs = 3 + + def forward(self, a, b, c): + self.layer(a, b, c) + + +class DeformConvModuleWrapper(nn.Module): + def __init__(self, obj): + super().__init__() + self.layer = obj + self.n_inputs = 3 + def forward(self, a, b, c): + self.layer(a, b, c) -class RoIOpTester(object): - @classmethod - def setUpClass(cls): - cls.dtype = torch.float64 - def test_forward_cpu_contiguous(self): - self._test_forward(device=torch.device('cpu'), contiguous=True) +class StochasticDepthWrapper(nn.Module): + def __init__(self, obj): + super().__init__() + self.layer = obj + self.n_inputs = 1 - def test_forward_cpu_non_contiguous(self): - self._test_forward(device=torch.device('cpu'), contiguous=False) + def forward(self, a): + self.layer(a) - def test_backward_cpu_contiguous(self): - self._test_backward(device=torch.device('cpu'), contiguous=True) - def test_backward_cpu_non_contiguous(self): - self._test_backward(device=torch.device('cpu'), contiguous=False) +class DropBlockWrapper(nn.Module): + def __init__(self, obj): + super().__init__() + self.layer = obj + self.n_inputs = 1 - @unittest.skipIf(not torch.cuda.is_available(), "CUDA unavailable") - def test_forward_cuda_contiguous(self): - self._test_forward(device=torch.device('cuda'), contiguous=True) + def forward(self, a): + self.layer(a) - @unittest.skipIf(not torch.cuda.is_available(), "CUDA unavailable") - def test_forward_cuda_non_contiguous(self): - self._test_forward(device=torch.device('cuda'), contiguous=False) - @unittest.skipIf(not torch.cuda.is_available(), "CUDA unavailable") - def test_backward_cuda_contiguous(self): - self._test_backward(device=torch.device('cuda'), contiguous=True) +class PoolWrapper(nn.Module): + def __init__(self, pool: nn.Module): + super().__init__() + self.pool = pool - @unittest.skipIf(not torch.cuda.is_available(), "CUDA unavailable") - def test_backward_cuda_non_contiguous(self): - self._test_backward(device=torch.device('cuda'), contiguous=False) + def forward(self, imgs: Tensor, boxes: List[Tensor]) -> Tensor: + return self.pool(imgs, boxes) + + +class RoIOpTester(ABC): + dtype = torch.float64 + mps_dtype = torch.float32 + mps_backward_atol = 2e-2 + + @pytest.mark.parametrize("device", cpu_and_cuda_and_mps()) + @pytest.mark.parametrize("contiguous", (True, False)) + @pytest.mark.parametrize( + "x_dtype", + ( + torch.float16, + torch.float32, + torch.float64, + ), + ids=str, + ) + def test_forward(self, device, contiguous, x_dtype, rois_dtype=None, deterministic=False, **kwargs): + if device == "mps" and x_dtype is torch.float64: + pytest.skip("MPS does not support float64") + + rois_dtype = x_dtype if rois_dtype is None else rois_dtype + + tol = 1e-5 + if x_dtype is torch.half: + if device == "mps": + tol = 5e-3 + else: + tol = 4e-3 + elif x_dtype == torch.bfloat16: + tol = 5e-3 - def _test_forward(self, device, contiguous): pool_size = 5 - # n_channels % (pool_size ** 2) == 0 required for PS opeartions. - n_channels = 2 * (pool_size ** 2) - x = torch.rand(2, n_channels, 10, 10, dtype=self.dtype, device=device) + # n_channels % (pool_size ** 2) == 0 required for PS operations. + n_channels = 2 * (pool_size**2) + x = torch.rand(2, n_channels, 10, 10, dtype=x_dtype, device=device) if not contiguous: x = x.permute(0, 1, 3, 2) - rois = torch.tensor([[0, 0, 0, 9, 9], # format is (xyxy) - [0, 0, 5, 4, 9], - [0, 5, 5, 9, 9], - [1, 0, 0, 9, 9]], - dtype=self.dtype, device=device) + rois = torch.tensor( + [[0, 0, 0, 9, 9], [0, 0, 5, 4, 9], [0, 5, 5, 9, 9], [1, 0, 0, 9, 9]], # format is (xyxy) + dtype=rois_dtype, + device=device, + ) pool_h, pool_w = pool_size, pool_size - y = self.fn(x, rois, pool_h, pool_w, spatial_scale=1, sampling_ratio=-1) - gt_y = self.expected_fn(x, rois, pool_h, pool_w, spatial_scale=1, - sampling_ratio=-1, device=device, dtype=self.dtype) - - self.assertTrue(torch.allclose(gt_y, y)) - - def _test_backward(self, device, contiguous): + with DeterministicGuard(deterministic): + y = self.fn(x, rois, pool_h, pool_w, spatial_scale=1, sampling_ratio=-1, **kwargs) + # the following should be true whether we're running an autocast test or not. + assert y.dtype == x.dtype + gt_y = self.expected_fn( + x, rois, pool_h, pool_w, spatial_scale=1, sampling_ratio=-1, device=device, dtype=x_dtype, **kwargs + ) + + torch.testing.assert_close(gt_y.to(y), y, rtol=tol, atol=tol) + + @pytest.mark.parametrize("device", cpu_and_cuda()) + def test_is_leaf_node(self, device): + op_obj = self.make_obj(wrap=True).to(device=device) + graph_node_names = get_graph_node_names(op_obj) + + assert len(graph_node_names) == 2 + assert len(graph_node_names[0]) == len(graph_node_names[1]) + assert len(graph_node_names[0]) == 1 + op_obj.n_inputs + + @pytest.mark.parametrize("device", cpu_and_cuda()) + def test_torch_fx_trace(self, device, x_dtype=torch.float, rois_dtype=torch.float): + op_obj = self.make_obj().to(device=device) + graph_module = torch.fx.symbolic_trace(op_obj) + pool_size = 5 + n_channels = 2 * (pool_size**2) + x = torch.rand(2, n_channels, 5, 5, dtype=x_dtype, device=device) + rois = torch.tensor( + [[0, 0, 0, 9, 9], [0, 0, 5, 4, 9], [0, 5, 5, 9, 9], [1, 0, 0, 9, 9]], # format is (xyxy) + dtype=rois_dtype, + device=device, + ) + output_gt = op_obj(x, rois) + assert output_gt.dtype == x.dtype + output_fx = graph_module(x, rois) + assert output_fx.dtype == x.dtype + tol = 1e-5 + torch.testing.assert_close(output_gt, output_fx, rtol=tol, atol=tol) + + @pytest.mark.parametrize("seed", range(10)) + @pytest.mark.parametrize("device", cpu_and_cuda_and_mps()) + @pytest.mark.parametrize("contiguous", (True, False)) + def test_backward(self, seed, device, contiguous, deterministic=False): + atol = self.mps_backward_atol if device == "mps" else 1e-05 + dtype = self.mps_dtype if device == "mps" else self.dtype + + torch.random.manual_seed(seed) pool_size = 2 - x = torch.rand(1, 2 * (pool_size ** 2), 5, 5, dtype=self.dtype, device=device, requires_grad=True) + x = torch.rand(1, 2 * (pool_size**2), 5, 5, dtype=dtype, device=device, requires_grad=True) if not contiguous: x = x.permute(0, 1, 3, 2) - rois = torch.tensor([[0, 0, 0, 4, 4], # format is (xyxy) - [0, 0, 2, 3, 4], - [0, 2, 2, 4, 4]], - dtype=self.dtype, device=device) + rois = torch.tensor( + [[0, 0, 0, 4, 4], [0, 0, 2, 3, 4], [0, 2, 2, 4, 4]], dtype=dtype, device=device # format is (xyxy) + ) def func(z): return self.fn(z, rois, pool_size, pool_size, spatial_scale=1, sampling_ratio=1) script_func = self.get_script_fn(rois, pool_size) - self.assertTrue(gradcheck(func, (x,))) - self.assertTrue(gradcheck(script_func, (x,))) - return + with DeterministicGuard(deterministic): + gradcheck(func, (x,), atol=atol) + + gradcheck(script_func, (x,), atol=atol) + + @needs_mps + def test_mps_error_inputs(self): + pool_size = 2 + x = torch.rand(1, 2 * (pool_size**2), 5, 5, dtype=torch.float16, device="mps", requires_grad=True) + rois = torch.tensor( + [[0, 0, 0, 4, 4], [0, 0, 2, 3, 4], [0, 2, 2, 4, 4]], dtype=torch.float16, device="mps" # format is (xyxy) + ) + + def func(z): + return self.fn(z, rois, pool_size, pool_size, spatial_scale=1, sampling_ratio=1) + with pytest.raises( + RuntimeError, match="MPS does not support (?:ps_)?roi_(?:align|pool)? backward with float16 inputs." + ): + gradcheck(func, (x,)) + + @needs_cuda + @pytest.mark.parametrize("x_dtype", (torch.float, torch.half)) + @pytest.mark.parametrize("rois_dtype", (torch.float, torch.half)) + def test_autocast(self, x_dtype, rois_dtype): + with torch.cuda.amp.autocast(): + self.test_forward(torch.device("cuda"), contiguous=False, x_dtype=x_dtype, rois_dtype=rois_dtype) + + def _helper_boxes_shape(self, func): + # test boxes as Tensor[N, 5] + with pytest.raises(AssertionError): + a = torch.linspace(1, 8 * 8, 8 * 8).reshape(1, 1, 8, 8) + boxes = torch.tensor([[0, 0, 3, 3]], dtype=a.dtype) + func(a, boxes, output_size=(2, 2)) + + # test boxes as List[Tensor[N, 4]] + with pytest.raises(AssertionError): + a = torch.linspace(1, 8 * 8, 8 * 8).reshape(1, 1, 8, 8) + boxes = torch.tensor([[0, 0, 3]], dtype=a.dtype) + ops.roi_pool(a, [boxes], output_size=(2, 2)) + + def _helper_jit_boxes_list(self, model): + x = torch.rand(2, 1, 10, 10) + roi = torch.tensor([[0, 0, 0, 9, 9], [0, 0, 5, 4, 9], [0, 5, 5, 9, 9], [1, 0, 0, 9, 9]], dtype=torch.float).t() + rois = [roi, roi] + scriped = torch.jit.script(model) + y = scriped(x, rois) + assert y.shape == (10, 1, 3, 3) + + @abstractmethod def fn(*args, **kwargs): pass + @abstractmethod + def make_obj(*args, **kwargs): + pass + + @abstractmethod def get_script_fn(*args, **kwargs): pass + @abstractmethod def expected_fn(*args, **kwargs): pass -class RoIPoolTester(RoIOpTester, unittest.TestCase): +class TestRoiPool(RoIOpTester): def fn(self, x, rois, pool_h, pool_w, spatial_scale=1, sampling_ratio=-1, **kwargs): return ops.RoIPool((pool_h, pool_w), spatial_scale)(x, rois) + def make_obj(self, pool_h=5, pool_w=5, spatial_scale=1, wrap=False): + obj = ops.RoIPool((pool_h, pool_w), spatial_scale) + return RoIOpTesterModuleWrapper(obj) if wrap else obj + def get_script_fn(self, rois, pool_size): - @torch.jit.script - def script_fn(input, rois, pool_size): - # type: (torch.Tensor, torch.Tensor, int) -> torch.Tensor - return ops.roi_pool(input, rois, pool_size, 1.0)[0] - return lambda x: script_fn(x, rois, pool_size) + scriped = torch.jit.script(ops.roi_pool) + return lambda x: scriped(x, rois, pool_size) - def expected_fn(self, x, rois, pool_h, pool_w, spatial_scale=1, sampling_ratio=-1, - device=None, dtype=torch.float64): + def expected_fn( + self, x, rois, pool_h, pool_w, spatial_scale=1, sampling_ratio=-1, device=None, dtype=torch.float64 + ): if device is None: device = torch.device("cpu") @@ -116,7 +300,7 @@ def get_slice(k, block): for roi_idx, roi in enumerate(rois): batch_idx = int(roi[0]) j_begin, i_begin, j_end, i_end = (int(round(x.item() * spatial_scale)) for x in roi[1:]) - roi_x = x[batch_idx, :, i_begin:i_end + 1, j_begin:j_end + 1] + roi_x = x[batch_idx, :, i_begin : i_end + 1, j_begin : j_end + 1] roi_h, roi_w = roi_x.shape[-2:] bin_h = roi_h / pool_h @@ -129,24 +313,35 @@ def get_slice(k, block): y[roi_idx, :, i, j] = bin_x.reshape(n_channels, -1).max(dim=1)[0] return y + def test_boxes_shape(self): + self._helper_boxes_shape(ops.roi_pool) + + def test_jit_boxes_list(self): + model = PoolWrapper(ops.RoIPool(output_size=[3, 3], spatial_scale=1.0)) + self._helper_jit_boxes_list(model) + + +class TestPSRoIPool(RoIOpTester): + mps_backward_atol = 5e-2 -class PSRoIPoolTester(RoIOpTester, unittest.TestCase): def fn(self, x, rois, pool_h, pool_w, spatial_scale=1, sampling_ratio=-1, **kwargs): return ops.PSRoIPool((pool_h, pool_w), 1)(x, rois) + def make_obj(self, pool_h=5, pool_w=5, spatial_scale=1, wrap=False): + obj = ops.PSRoIPool((pool_h, pool_w), spatial_scale) + return RoIOpTesterModuleWrapper(obj) if wrap else obj + def get_script_fn(self, rois, pool_size): - @torch.jit.script - def script_fn(input, rois, pool_size): - # type: (torch.Tensor, torch.Tensor, int) -> torch.Tensor - return ops.ps_roi_pool(input, rois, pool_size, 1.0)[0] - return lambda x: script_fn(x, rois, pool_size) + scriped = torch.jit.script(ops.ps_roi_pool) + return lambda x: scriped(x, rois, pool_size) - def expected_fn(self, x, rois, pool_h, pool_w, spatial_scale=1, sampling_ratio=-1, - device=None, dtype=torch.float64): + def expected_fn( + self, x, rois, pool_h, pool_w, spatial_scale=1, sampling_ratio=-1, device=None, dtype=torch.float64 + ): if device is None: device = torch.device("cpu") n_input_channels = x.size(1) - self.assertEqual(n_input_channels % (pool_h * pool_w), 0, "input channels must be divisible by ph * pw") + assert n_input_channels % (pool_h * pool_w) == 0, "input channels must be divisible by ph * pw" n_output_channels = int(n_input_channels / (pool_h * pool_w)) y = torch.zeros(rois.size(0), n_output_channels, pool_h, pool_w, dtype=dtype, device=device) @@ -156,7 +351,7 @@ def get_slice(k, block): for roi_idx, roi in enumerate(rois): batch_idx = int(roi[0]) j_begin, i_begin, j_end, i_end = (int(round(x.item() * spatial_scale)) for x in roi[1:]) - roi_x = x[batch_idx, :, i_begin:i_end + 1, j_begin:j_end + 1] + roi_x = x[batch_idx, :, i_begin : i_end + 1, j_begin : j_end + 1] roi_height = max(i_end - i_begin, 1) roi_width = max(j_end - j_begin, 1) @@ -173,55 +368,82 @@ def get_slice(k, block): y[roi_idx, c_out, i, j] = t / area return y + def test_boxes_shape(self): + self._helper_boxes_shape(ops.ps_roi_pool) -def bilinear_interpolate(data, height, width, y, x): - if y < -1.0 or y > height or x < -1.0 or x > width: - return 0. - y = min(max(0, y), height - 1) - x = min(max(0, x), width - 1) +def bilinear_interpolate(data, y, x, snap_border=False): + height, width = data.shape - y_low = int(y) - y_high = min(y_low + 1, height - 1) + if snap_border: + if -1 < y <= 0: + y = 0 + elif height - 1 <= y < height: + y = height - 1 - x_low = int(x) - x_high = min(x_low + 1, width - 1) + if -1 < x <= 0: + x = 0 + elif width - 1 <= x < width: + x = width - 1 - wy_h = y - y_low - wy_l = 1 - wy_h + y_low = int(math.floor(y)) + x_low = int(math.floor(x)) + y_high = y_low + 1 + x_high = x_low + 1 + wy_h = y - y_low wx_h = x - x_low + wy_l = 1 - wy_h wx_l = 1 - wx_h val = 0 - for wx, x in zip((wx_l, wx_h), (x_low, x_high)): - for wy, y in zip((wy_l, wy_h), (y_low, y_high)): - val += wx * wy * data[y * width + x] + for wx, xp in zip((wx_l, wx_h), (x_low, x_high)): + for wy, yp in zip((wy_l, wy_h), (y_low, y_high)): + if 0 <= yp < height and 0 <= xp < width: + val += wx * wy * data[yp, xp] return val -class RoIAlignTester(RoIOpTester, unittest.TestCase): - def fn(self, x, rois, pool_h, pool_w, spatial_scale=1, sampling_ratio=-1, **kwargs): - return ops.RoIAlign((pool_h, pool_w), spatial_scale=spatial_scale, - sampling_ratio=sampling_ratio)(x, rois) +class TestRoIAlign(RoIOpTester): + mps_backward_atol = 6e-2 - def get_script_fn(self, rois, pool_size): - @torch.jit.script - def script_fn(input, rois, pool_size): - # type: (torch.Tensor, torch.Tensor, int) -> torch.Tensor - return ops.roi_align(input, rois, pool_size, 1.0)[0] - return lambda x: script_fn(x, rois, pool_size) + def fn(self, x, rois, pool_h, pool_w, spatial_scale=1, sampling_ratio=-1, aligned=False, **kwargs): + return ops.RoIAlign( + (pool_h, pool_w), spatial_scale=spatial_scale, sampling_ratio=sampling_ratio, aligned=aligned + )(x, rois) + + def make_obj(self, pool_h=5, pool_w=5, spatial_scale=1, sampling_ratio=-1, aligned=False, wrap=False): + obj = ops.RoIAlign( + (pool_h, pool_w), spatial_scale=spatial_scale, sampling_ratio=sampling_ratio, aligned=aligned + ) + return RoIOpTesterModuleWrapper(obj) if wrap else obj - def expected_fn(self, in_data, rois, pool_h, pool_w, spatial_scale=1, sampling_ratio=-1, - device=None, dtype=torch.float64): + def get_script_fn(self, rois, pool_size): + scriped = torch.jit.script(ops.roi_align) + return lambda x: scriped(x, rois, pool_size) + + def expected_fn( + self, + in_data, + rois, + pool_h, + pool_w, + spatial_scale=1, + sampling_ratio=-1, + aligned=False, + device=None, + dtype=torch.float64, + ): if device is None: device = torch.device("cpu") n_channels = in_data.size(1) out_data = torch.zeros(rois.size(0), n_channels, pool_h, pool_w, dtype=dtype, device=device) + offset = 0.5 if aligned else 0.0 + for r, roi in enumerate(rois): batch_idx = int(roi[0]) - j_begin, i_begin, j_end, i_end = (x.item() * spatial_scale for x in roi[1:]) + j_begin, i_begin, j_end, i_end = (x.item() * spatial_scale - offset for x in roi[1:]) roi_h = i_end - i_begin roi_w = j_end - j_begin @@ -236,42 +458,185 @@ def expected_fn(self, in_data, rois, pool_h, pool_w, spatial_scale=1, sampling_r grid_w = sampling_ratio if sampling_ratio > 0 else int(np.ceil(bin_w)) for channel in range(0, n_channels): - val = 0 for iy in range(0, grid_h): y = start_h + (iy + 0.5) * bin_h / grid_h for ix in range(0, grid_w): x = start_w + (ix + 0.5) * bin_w / grid_w - val += bilinear_interpolate( - in_data[batch_idx, channel, :, :].flatten(), - in_data.size(-2), - in_data.size(-1), - y, x - ) + val += bilinear_interpolate(in_data[batch_idx, channel, :, :], y, x, snap_border=True) val /= grid_h * grid_w out_data[r, channel, i, j] = val return out_data + def test_boxes_shape(self): + self._helper_boxes_shape(ops.roi_align) + + @pytest.mark.parametrize("aligned", (True, False)) + @pytest.mark.parametrize("device", cpu_and_cuda_and_mps()) + @pytest.mark.parametrize("x_dtype", (torch.float16, torch.float32, torch.float64)) # , ids=str) + @pytest.mark.parametrize("contiguous", (True, False)) + @pytest.mark.parametrize("deterministic", (True, False)) + @pytest.mark.opcheck_only_one() + def test_forward(self, device, contiguous, deterministic, aligned, x_dtype, rois_dtype=None): + if deterministic and device == "cpu": + pytest.skip("cpu is always deterministic, don't retest") + super().test_forward( + device=device, + contiguous=contiguous, + deterministic=deterministic, + x_dtype=x_dtype, + rois_dtype=rois_dtype, + aligned=aligned, + ) + + @needs_cuda + @pytest.mark.parametrize("aligned", (True, False)) + @pytest.mark.parametrize("deterministic", (True, False)) + @pytest.mark.parametrize("x_dtype", (torch.float, torch.half)) + @pytest.mark.parametrize("rois_dtype", (torch.float, torch.half)) + @pytest.mark.opcheck_only_one() + def test_autocast(self, aligned, deterministic, x_dtype, rois_dtype): + with torch.cuda.amp.autocast(): + self.test_forward( + torch.device("cuda"), + contiguous=False, + deterministic=deterministic, + aligned=aligned, + x_dtype=x_dtype, + rois_dtype=rois_dtype, + ) + + @pytest.mark.skip(reason="1/5000 flaky failure") + @pytest.mark.parametrize("aligned", (True, False)) + @pytest.mark.parametrize("deterministic", (True, False)) + @pytest.mark.parametrize("x_dtype", (torch.float, torch.bfloat16)) + @pytest.mark.parametrize("rois_dtype", (torch.float, torch.bfloat16)) + def test_autocast_cpu(self, aligned, deterministic, x_dtype, rois_dtype): + with torch.cpu.amp.autocast(): + self.test_forward( + torch.device("cpu"), + contiguous=False, + deterministic=deterministic, + aligned=aligned, + x_dtype=x_dtype, + rois_dtype=rois_dtype, + ) + + @pytest.mark.parametrize("seed", range(10)) + @pytest.mark.parametrize("device", cpu_and_cuda_and_mps()) + @pytest.mark.parametrize("contiguous", (True, False)) + @pytest.mark.parametrize("deterministic", (True, False)) + @pytest.mark.opcheck_only_one() + def test_backward(self, seed, device, contiguous, deterministic): + if deterministic and device == "cpu": + pytest.skip("cpu is always deterministic, don't retest") + if deterministic and device == "mps": + pytest.skip("no deterministic implementation for mps") + if deterministic and not is_compile_supported(device): + pytest.skip("deterministic implementation only if torch.compile supported") + super().test_backward(seed, device, contiguous, deterministic) + + def _make_rois(self, img_size, num_imgs, dtype, num_rois=1000): + rois = torch.randint(0, img_size // 2, size=(num_rois, 5)).to(dtype) + rois[:, 0] = torch.randint(0, num_imgs, size=(num_rois,)) # set batch index + rois[:, 3:] += rois[:, 1:3] # make sure boxes aren't degenerate + return rois + + @pytest.mark.parametrize("aligned", (True, False)) + @pytest.mark.parametrize("scale, zero_point", ((1, 0), (2, 10), (0.1, 50))) + @pytest.mark.parametrize("qdtype", (torch.qint8, torch.quint8, torch.qint32)) + @pytest.mark.opcheck_only_one() + def test_qroialign(self, aligned, scale, zero_point, qdtype): + """Make sure quantized version of RoIAlign is close to float version""" + pool_size = 5 + img_size = 10 + n_channels = 2 + num_imgs = 1 + dtype = torch.float + + x = torch.randint(50, 100, size=(num_imgs, n_channels, img_size, img_size)).to(dtype) + qx = torch.quantize_per_tensor(x, scale=scale, zero_point=zero_point, dtype=qdtype) + + rois = self._make_rois(img_size, num_imgs, dtype) + qrois = torch.quantize_per_tensor(rois, scale=scale, zero_point=zero_point, dtype=qdtype) + + x, rois = qx.dequantize(), qrois.dequantize() # we want to pass the same inputs + + y = ops.roi_align( + x, + rois, + output_size=pool_size, + spatial_scale=1, + sampling_ratio=-1, + aligned=aligned, + ) + qy = ops.roi_align( + qx, + qrois, + output_size=pool_size, + spatial_scale=1, + sampling_ratio=-1, + aligned=aligned, + ) + + # The output qy is itself a quantized tensor and there might have been a loss of info when it was + # quantized. For a fair comparison we need to quantize y as well + quantized_float_y = torch.quantize_per_tensor(y, scale=scale, zero_point=zero_point, dtype=qdtype) + + try: + # Ideally, we would assert this, which passes with (scale, zero) == (1, 0) + assert (qy == quantized_float_y).all() + except AssertionError: + # But because the computation aren't exactly the same between the 2 RoIAlign procedures, some + # rounding error may lead to a difference of 2 in the output. + # For example with (scale, zero) = (2, 10), 45.00000... will be quantized to 44 + # but 45.00000001 will be rounded to 46. We make sure below that: + # - such discrepancies between qy and quantized_float_y are very rare (less then 5%) + # - any difference between qy and quantized_float_y is == scale + diff_idx = torch.where(qy != quantized_float_y) + num_diff = diff_idx[0].numel() + assert num_diff / qy.numel() < 0.05 + + abs_diff = torch.abs(qy[diff_idx].dequantize() - quantized_float_y[diff_idx].dequantize()) + t_scale = torch.full_like(abs_diff, fill_value=scale) + torch.testing.assert_close(abs_diff, t_scale, rtol=1e-5, atol=1e-5) + + def test_qroi_align_multiple_images(self): + dtype = torch.float + x = torch.randint(50, 100, size=(2, 3, 10, 10)).to(dtype) + qx = torch.quantize_per_tensor(x, scale=1, zero_point=0, dtype=torch.qint8) + rois = self._make_rois(img_size=10, num_imgs=2, dtype=dtype, num_rois=10) + qrois = torch.quantize_per_tensor(rois, scale=1, zero_point=0, dtype=torch.qint8) + with pytest.raises(RuntimeError, match="Only one image per batch is allowed"): + ops.roi_align(qx, qrois, output_size=5) + + def test_jit_boxes_list(self): + model = PoolWrapper(ops.RoIAlign(output_size=[3, 3], spatial_scale=1.0, sampling_ratio=-1)) + self._helper_jit_boxes_list(model) + + +class TestPSRoIAlign(RoIOpTester): + mps_backward_atol = 5e-2 -class PSRoIAlignTester(RoIOpTester, unittest.TestCase): def fn(self, x, rois, pool_h, pool_w, spatial_scale=1, sampling_ratio=-1, **kwargs): - return ops.PSRoIAlign((pool_h, pool_w), spatial_scale=spatial_scale, - sampling_ratio=sampling_ratio)(x, rois) + return ops.PSRoIAlign((pool_h, pool_w), spatial_scale=spatial_scale, sampling_ratio=sampling_ratio)(x, rois) + + def make_obj(self, pool_h=5, pool_w=5, spatial_scale=1, sampling_ratio=-1, wrap=False): + obj = ops.PSRoIAlign((pool_h, pool_w), spatial_scale=spatial_scale, sampling_ratio=sampling_ratio) + return RoIOpTesterModuleWrapper(obj) if wrap else obj def get_script_fn(self, rois, pool_size): - @torch.jit.script - def script_fn(input, rois, pool_size): - # type: (torch.Tensor, torch.Tensor, int) -> torch.Tensor - return ops.ps_roi_align(input, rois, pool_size, 1.0)[0] - return lambda x: script_fn(x, rois, pool_size) + scriped = torch.jit.script(ops.ps_roi_align) + return lambda x: scriped(x, rois, pool_size) - def expected_fn(self, in_data, rois, pool_h, pool_w, device, spatial_scale=1, - sampling_ratio=-1, dtype=torch.float64): + def expected_fn( + self, in_data, rois, pool_h, pool_w, device, spatial_scale=1, sampling_ratio=-1, dtype=torch.float64 + ): if device is None: device = torch.device("cpu") n_input_channels = in_data.size(1) - self.assertEqual(n_input_channels % (pool_h * pool_w), 0, "input channels must be divisible by ph * pw") + assert n_input_channels % (pool_h * pool_w) == 0, "input channels must be divisible by ph * pw" n_output_channels = int(n_input_channels / (pool_h * pool_w)) out_data = torch.zeros(rois.size(0), n_output_channels, pool_h, pool_w, dtype=dtype, device=device) @@ -298,24 +663,91 @@ def expected_fn(self, in_data, rois, pool_h, pool_w, device, spatial_scale=1, y = start_h + (iy + 0.5) * bin_h / grid_h for ix in range(0, grid_w): x = start_w + (ix + 0.5) * bin_w / grid_w - val += bilinear_interpolate( - in_data[batch_idx, c_in, :, :].flatten(), - in_data.size(-2), - in_data.size(-1), - y, x - ) + val += bilinear_interpolate(in_data[batch_idx, c_in, :, :], y, x, snap_border=True) val /= grid_h * grid_w out_data[r, c_out, i, j] = val return out_data - -class NMSTester(unittest.TestCase): - def reference_nms(self, boxes, scores, iou_threshold): + def test_boxes_shape(self): + self._helper_boxes_shape(ops.ps_roi_align) + + +@pytest.mark.parametrize( + "op", + ( + torch.ops.torchvision.roi_pool, + torch.ops.torchvision.ps_roi_pool, + torch.ops.torchvision.roi_align, + torch.ops.torchvision.ps_roi_align, + ), +) +@pytest.mark.parametrize("dtype", (torch.float16, torch.float32, torch.float64)) +@pytest.mark.parametrize("device", cpu_and_cuda()) +@pytest.mark.parametrize("requires_grad", (True, False)) +def test_roi_opcheck(op, dtype, device, requires_grad): + # This manually calls opcheck() on the roi ops. We do that instead of + # relying on opcheck.generate_opcheck_tests() as e.g. done for nms, because + # pytest and generate_opcheck_tests() don't interact very well when it comes + # to skipping tests - and these ops need to skip the MPS tests since MPS we + # don't support dynamic shapes yet for MPS. + rois = torch.tensor( + [[0, 0, 0, 9, 9], [0, 0, 5, 4, 9], [0, 5, 5, 9, 9], [1, 0, 0, 9, 9]], + dtype=dtype, + device=device, + requires_grad=requires_grad, + ) + pool_size = 5 + num_channels = 2 * (pool_size**2) + x = torch.rand(2, num_channels, 10, 10, dtype=dtype, device=device) + + kwargs = dict(rois=rois, spatial_scale=1, pooled_height=pool_size, pooled_width=pool_size) + if op in (torch.ops.torchvision.roi_align, torch.ops.torchvision.ps_roi_align): + kwargs["sampling_ratio"] = -1 + if op is torch.ops.torchvision.roi_align: + kwargs["aligned"] = True + + optests.opcheck(op, args=(x,), kwargs=kwargs) + + +class TestMultiScaleRoIAlign: + def make_obj(self, fmap_names=None, output_size=(7, 7), sampling_ratio=2, wrap=False): + if fmap_names is None: + fmap_names = ["0"] + obj = ops.poolers.MultiScaleRoIAlign(fmap_names, output_size, sampling_ratio) + return MultiScaleRoIAlignModuleWrapper(obj) if wrap else obj + + def test_msroialign_repr(self): + fmap_names = ["0"] + output_size = (7, 7) + sampling_ratio = 2 + # Pass mock feature map names + t = self.make_obj(fmap_names, output_size, sampling_ratio, wrap=False) + + # Check integrity of object __repr__ attribute + expected_string = ( + f"MultiScaleRoIAlign(featmap_names={fmap_names}, output_size={output_size}, " + f"sampling_ratio={sampling_ratio})" + ) + assert repr(t) == expected_string + + @pytest.mark.parametrize("device", cpu_and_cuda()) + def test_is_leaf_node(self, device): + op_obj = self.make_obj(wrap=True).to(device=device) + graph_node_names = get_graph_node_names(op_obj) + + assert len(graph_node_names) == 2 + assert len(graph_node_names[0]) == len(graph_node_names[1]) + assert len(graph_node_names[0]) == 1 + op_obj.n_inputs + + +class TestNMS: + def _reference_nms(self, boxes, scores, iou_threshold): """ Args: - box_scores (N, 5): boxes in corner-form and probabilities. - iou_threshold: intersection over union threshold. + boxes: boxes in corner-form + scores: probabilities + iou_threshold: intersection over union threshold Returns: picked: a list of indexes of the kept boxes """ @@ -339,33 +771,1207 @@ def _create_tensors_with_iou(self, N, iou_thresh): # let b0 be [x0, y0, x1, y1], and b1 be [x0, y0, x1 + d, y1], # then, in order to satisfy ops.iou(b0, b1) == iou_thresh, # we need to have d = (x1 - x0) * (1 - iou_thresh) / iou_thresh + # Adjust the threshold upward a bit with the intent of creating + # at least one box that exceeds (barely) the threshold and so + # should be suppressed. boxes = torch.rand(N, 4) * 100 boxes[:, 2:] += boxes[:, :2] boxes[-1, :] = boxes[0, :] x0, y0, x1, y1 = boxes[-1].tolist() + iou_thresh += 1e-5 boxes[-1, 2] += (x1 - x0) * (1 - iou_thresh) / iou_thresh scores = torch.rand(N) return boxes, scores - def test_nms(self): - err_msg = 'NMS incompatible between CPU and reference implementation for IoU={}' - for iou in [0.2, 0.5, 0.8]: - boxes, scores = self._create_tensors_with_iou(1000, iou) - keep_ref = self.reference_nms(boxes, scores, iou) - keep = ops.nms(boxes, scores, iou) - self.assertTrue(torch.allclose(keep, keep_ref), err_msg.format(iou)) + @pytest.mark.parametrize("iou", (0.2, 0.5, 0.8)) + @pytest.mark.parametrize("seed", range(10)) + @pytest.mark.opcheck_only_one() + def test_nms_ref(self, iou, seed): + torch.random.manual_seed(seed) + err_msg = "NMS incompatible between CPU and reference implementation for IoU={}" + boxes, scores = self._create_tensors_with_iou(1000, iou) + keep_ref = self._reference_nms(boxes, scores, iou) + keep = ops.nms(boxes, scores, iou) + torch.testing.assert_close(keep, keep_ref, msg=err_msg.format(iou)) + + def test_nms_input_errors(self): + with pytest.raises(RuntimeError): + ops.nms(torch.rand(4), torch.rand(3), 0.5) + with pytest.raises(RuntimeError): + ops.nms(torch.rand(3, 5), torch.rand(3), 0.5) + with pytest.raises(RuntimeError): + ops.nms(torch.rand(3, 4), torch.rand(3, 2), 0.5) + with pytest.raises(RuntimeError): + ops.nms(torch.rand(3, 4), torch.rand(4), 0.5) + + @pytest.mark.parametrize("iou", (0.2, 0.5, 0.8)) + @pytest.mark.parametrize("scale, zero_point", ((1, 0), (2, 50), (3, 10))) + @pytest.mark.opcheck_only_one() + def test_qnms(self, iou, scale, zero_point): + # Note: we compare qnms vs nms instead of qnms vs reference implementation. + # This is because with the int conversion, the trick used in _create_tensors_with_iou + # doesn't really work (in fact, nms vs reference implem will also fail with ints) + err_msg = "NMS and QNMS give different results for IoU={}" + boxes, scores = self._create_tensors_with_iou(1000, iou) + scores *= 100 # otherwise most scores would be 0 or 1 after int conversion + + qboxes = torch.quantize_per_tensor(boxes, scale=scale, zero_point=zero_point, dtype=torch.quint8) + qscores = torch.quantize_per_tensor(scores, scale=scale, zero_point=zero_point, dtype=torch.quint8) + + boxes = qboxes.dequantize() + scores = qscores.dequantize() + + keep = ops.nms(boxes, scores, iou) + qkeep = ops.nms(qboxes, qscores, iou) + + torch.testing.assert_close(qkeep, keep, msg=err_msg.format(iou)) + + @pytest.mark.parametrize( + "device", + ( + pytest.param("cuda", marks=pytest.mark.needs_cuda), + pytest.param("mps", marks=pytest.mark.needs_mps), + ), + ) + @pytest.mark.parametrize("iou", (0.2, 0.5, 0.8)) + @pytest.mark.opcheck_only_one() + def test_nms_gpu(self, iou, device, dtype=torch.float64): + dtype = torch.float32 if device == "mps" else dtype + tol = 1e-3 if dtype is torch.half else 1e-5 + err_msg = "NMS incompatible between CPU and CUDA for IoU={}" + + boxes, scores = self._create_tensors_with_iou(1000, iou) + r_cpu = ops.nms(boxes, scores, iou) + r_gpu = ops.nms(boxes.to(device), scores.to(device), iou) + + is_eq = torch.allclose(r_cpu, r_gpu.cpu()) + if not is_eq: + # if the indices are not the same, ensure that it's because the scores + # are duplicate + is_eq = torch.allclose(scores[r_cpu], scores[r_gpu.cpu()], rtol=tol, atol=tol) + assert is_eq, err_msg.format(iou) + + @needs_cuda + @pytest.mark.parametrize("iou", (0.2, 0.5, 0.8)) + @pytest.mark.parametrize("dtype", (torch.float, torch.half)) + @pytest.mark.opcheck_only_one() + def test_autocast(self, iou, dtype): + with torch.cuda.amp.autocast(): + self.test_nms_gpu(iou=iou, dtype=dtype, device="cuda") + + @pytest.mark.parametrize("iou", (0.2, 0.5, 0.8)) + @pytest.mark.parametrize("dtype", (torch.float, torch.bfloat16)) + def test_autocast_cpu(self, iou, dtype): + boxes, scores = self._create_tensors_with_iou(1000, iou) + with torch.cpu.amp.autocast(): + keep_ref_float = ops.nms(boxes.to(dtype).float(), scores.to(dtype).float(), iou) + keep_dtype = ops.nms(boxes.to(dtype), scores.to(dtype), iou) + torch.testing.assert_close(keep_ref_float, keep_dtype) + + @pytest.mark.parametrize( + "device", + ( + pytest.param("cuda", marks=pytest.mark.needs_cuda), + pytest.param("mps", marks=pytest.mark.needs_mps), + ), + ) + @pytest.mark.opcheck_only_one() + def test_nms_float16(self, device): + boxes = torch.tensor( + [ + [285.3538, 185.5758, 1193.5110, 851.4551], + [285.1472, 188.7374, 1192.4984, 851.0669], + [279.2440, 197.9812, 1189.4746, 849.2019], + ] + ).to(device) + scores = torch.tensor([0.6370, 0.7569, 0.3966]).to(device) + + iou_thres = 0.2 + keep32 = ops.nms(boxes, scores, iou_thres) + keep16 = ops.nms(boxes.to(torch.float16), scores.to(torch.float16), iou_thres) + assert_equal(keep32, keep16) + + @pytest.mark.parametrize("seed", range(10)) + @pytest.mark.opcheck_only_one() + def test_batched_nms_implementations(self, seed): + """Make sure that both implementations of batched_nms yield identical results""" + torch.random.manual_seed(seed) + + num_boxes = 1000 + iou_threshold = 0.9 + + boxes = torch.cat((torch.rand(num_boxes, 2), torch.rand(num_boxes, 2) + 10), dim=1) + assert max(boxes[:, 0]) < min(boxes[:, 2]) # x1 < x2 + assert max(boxes[:, 1]) < min(boxes[:, 3]) # y1 < y2 + + scores = torch.rand(num_boxes) + idxs = torch.randint(0, 4, size=(num_boxes,)) + keep_vanilla = ops.boxes._batched_nms_vanilla(boxes, scores, idxs, iou_threshold) + keep_trick = ops.boxes._batched_nms_coordinate_trick(boxes, scores, idxs, iou_threshold) + + torch.testing.assert_close( + keep_vanilla, keep_trick, msg="The vanilla and the trick implementation yield different nms outputs." + ) + + # Also make sure an empty tensor is returned if boxes is empty + empty = torch.empty((0,), dtype=torch.int64) + torch.testing.assert_close(empty, ops.batched_nms(empty, None, None, None)) + + +optests.generate_opcheck_tests( + testcase=TestNMS, + namespaces=["torchvision"], + failures_dict_path=os.path.join(os.path.dirname(__file__), "optests_failures_dict.json"), + additional_decorators=[], + test_utils=OPTESTS, +) + + +class TestDeformConv: + dtype = torch.float64 + + def expected_fn(self, x, weight, offset, mask, bias, stride=1, padding=0, dilation=1): + stride_h, stride_w = _pair(stride) + pad_h, pad_w = _pair(padding) + dil_h, dil_w = _pair(dilation) + weight_h, weight_w = weight.shape[-2:] + + n_batches, n_in_channels, in_h, in_w = x.shape + n_out_channels = weight.shape[0] + + out_h = (in_h + 2 * pad_h - (dil_h * (weight_h - 1) + 1)) // stride_h + 1 + out_w = (in_w + 2 * pad_w - (dil_w * (weight_w - 1) + 1)) // stride_w + 1 + + n_offset_grps = offset.shape[1] // (2 * weight_h * weight_w) + in_c_per_offset_grp = n_in_channels // n_offset_grps + + n_weight_grps = n_in_channels // weight.shape[1] + in_c_per_weight_grp = weight.shape[1] + out_c_per_weight_grp = n_out_channels // n_weight_grps + + out = torch.zeros(n_batches, n_out_channels, out_h, out_w, device=x.device, dtype=x.dtype) + for b in range(n_batches): + for c_out in range(n_out_channels): + for i in range(out_h): + for j in range(out_w): + for di in range(weight_h): + for dj in range(weight_w): + for c in range(in_c_per_weight_grp): + weight_grp = c_out // out_c_per_weight_grp + c_in = weight_grp * in_c_per_weight_grp + c + + offset_grp = c_in // in_c_per_offset_grp + mask_idx = offset_grp * (weight_h * weight_w) + di * weight_w + dj + offset_idx = 2 * mask_idx + + pi = stride_h * i - pad_h + dil_h * di + offset[b, offset_idx, i, j] + pj = stride_w * j - pad_w + dil_w * dj + offset[b, offset_idx + 1, i, j] + + mask_value = 1.0 + if mask is not None: + mask_value = mask[b, mask_idx, i, j] + + out[b, c_out, i, j] += ( + mask_value + * weight[c_out, c, di, dj] + * bilinear_interpolate(x[b, c_in, :, :], pi, pj) + ) + out += bias.view(1, n_out_channels, 1, 1) + return out + + @lru_cache(maxsize=None) + def get_fn_args(self, device, contiguous, batch_sz, dtype): + n_in_channels = 6 + n_out_channels = 2 + n_weight_grps = 2 + n_offset_grps = 3 + + stride = (2, 1) + pad = (1, 0) + dilation = (2, 1) + + stride_h, stride_w = stride + pad_h, pad_w = pad + dil_h, dil_w = dilation + weight_h, weight_w = (3, 2) + in_h, in_w = (5, 4) + + out_h = (in_h + 2 * pad_h - (dil_h * (weight_h - 1) + 1)) // stride_h + 1 + out_w = (in_w + 2 * pad_w - (dil_w * (weight_w - 1) + 1)) // stride_w + 1 + + x = torch.rand(batch_sz, n_in_channels, in_h, in_w, device=device, dtype=dtype, requires_grad=True) + + offset = torch.randn( + batch_sz, + n_offset_grps * 2 * weight_h * weight_w, + out_h, + out_w, + device=device, + dtype=dtype, + requires_grad=True, + ) + + mask = torch.randn( + batch_sz, n_offset_grps * weight_h * weight_w, out_h, out_w, device=device, dtype=dtype, requires_grad=True + ) + + weight = torch.randn( + n_out_channels, + n_in_channels // n_weight_grps, + weight_h, + weight_w, + device=device, + dtype=dtype, + requires_grad=True, + ) + + bias = torch.randn(n_out_channels, device=device, dtype=dtype, requires_grad=True) - @unittest.skipIf(not torch.cuda.is_available(), "CUDA unavailable") - def test_nms_cuda(self): - err_msg = 'NMS incompatible between CPU and CUDA for IoU={}' - - for iou in [0.2, 0.5, 0.8]: - boxes, scores = self._create_tensors_with_iou(1000, iou) - r_cpu = ops.nms(boxes, scores, iou) - r_cuda = ops.nms(boxes.cuda(), scores.cuda(), iou) + if not contiguous: + x = x.permute(0, 1, 3, 2).contiguous().permute(0, 1, 3, 2) + offset = offset.permute(1, 3, 0, 2).contiguous().permute(2, 0, 3, 1) + mask = mask.permute(1, 3, 0, 2).contiguous().permute(2, 0, 3, 1) + weight = weight.permute(3, 2, 0, 1).contiguous().permute(2, 3, 1, 0) + + return x, weight, offset, mask, bias, stride, pad, dilation + + def make_obj(self, in_channels=6, out_channels=2, kernel_size=(3, 2), groups=2, wrap=False): + obj = ops.DeformConv2d( + in_channels, out_channels, kernel_size, stride=(2, 1), padding=(1, 0), dilation=(2, 1), groups=groups + ) + return DeformConvModuleWrapper(obj) if wrap else obj + + @pytest.mark.parametrize("device", cpu_and_cuda()) + def test_is_leaf_node(self, device): + op_obj = self.make_obj(wrap=True).to(device=device) + graph_node_names = get_graph_node_names(op_obj) + + assert len(graph_node_names) == 2 + assert len(graph_node_names[0]) == len(graph_node_names[1]) + assert len(graph_node_names[0]) == 1 + op_obj.n_inputs + + @pytest.mark.parametrize("device", cpu_and_cuda()) + @pytest.mark.parametrize("contiguous", (True, False)) + @pytest.mark.parametrize("batch_sz", (0, 33)) + @pytest.mark.opcheck_only_one() + def test_forward(self, device, contiguous, batch_sz, dtype=None): + dtype = dtype or self.dtype + x, _, offset, mask, _, stride, padding, dilation = self.get_fn_args(device, contiguous, batch_sz, dtype) + in_channels = 6 + out_channels = 2 + kernel_size = (3, 2) + groups = 2 + tol = 2e-3 if dtype is torch.half else 1e-5 + + layer = self.make_obj(in_channels, out_channels, kernel_size, groups, wrap=False).to( + device=x.device, dtype=dtype + ) + res = layer(x, offset, mask) + + weight = layer.weight.data + bias = layer.bias.data + expected = self.expected_fn(x, weight, offset, mask, bias, stride=stride, padding=padding, dilation=dilation) + + torch.testing.assert_close( + res.to(expected), expected, rtol=tol, atol=tol, msg=f"\nres:\n{res}\nexpected:\n{expected}" + ) + + # no modulation test + res = layer(x, offset) + expected = self.expected_fn(x, weight, offset, None, bias, stride=stride, padding=padding, dilation=dilation) + + torch.testing.assert_close( + res.to(expected), expected, rtol=tol, atol=tol, msg=f"\nres:\n{res}\nexpected:\n{expected}" + ) + + def test_wrong_sizes(self): + in_channels = 6 + out_channels = 2 + kernel_size = (3, 2) + groups = 2 + x, _, offset, mask, _, stride, padding, dilation = self.get_fn_args( + "cpu", contiguous=True, batch_sz=10, dtype=self.dtype + ) + layer = ops.DeformConv2d( + in_channels, out_channels, kernel_size, stride=stride, padding=padding, dilation=dilation, groups=groups + ) + with pytest.raises(RuntimeError, match="the shape of the offset"): + wrong_offset = torch.rand_like(offset[:, :2]) + layer(x, wrong_offset) + + with pytest.raises(RuntimeError, match=r"mask.shape\[1\] is not valid"): + wrong_mask = torch.rand_like(mask[:, :2]) + layer(x, offset, wrong_mask) + + @pytest.mark.parametrize("device", cpu_and_cuda()) + @pytest.mark.parametrize("contiguous", (True, False)) + @pytest.mark.parametrize("batch_sz", (0, 33)) + @pytest.mark.opcheck_only_one() + def test_backward(self, device, contiguous, batch_sz): + x, weight, offset, mask, bias, stride, padding, dilation = self.get_fn_args( + device, contiguous, batch_sz, self.dtype + ) + + def func(x_, offset_, mask_, weight_, bias_): + return ops.deform_conv2d( + x_, offset_, weight_, bias_, stride=stride, padding=padding, dilation=dilation, mask=mask_ + ) + + gradcheck(func, (x, offset, mask, weight, bias), nondet_tol=1e-5, fast_mode=True) + + def func_no_mask(x_, offset_, weight_, bias_): + return ops.deform_conv2d( + x_, offset_, weight_, bias_, stride=stride, padding=padding, dilation=dilation, mask=None + ) + + gradcheck(func_no_mask, (x, offset, weight, bias), nondet_tol=1e-5, fast_mode=True) - self.assertTrue(torch.allclose(r_cpu, r_cuda.cpu()), err_msg.format(iou)) + @torch.jit.script + def script_func(x_, offset_, mask_, weight_, bias_, stride_, pad_, dilation_): + # type:(Tensor, Tensor, Tensor, Tensor, Tensor, Tuple[int, int], Tuple[int, int], Tuple[int, int])->Tensor + return ops.deform_conv2d( + x_, offset_, weight_, bias_, stride=stride_, padding=pad_, dilation=dilation_, mask=mask_ + ) + + gradcheck( + lambda z, off, msk, wei, bi: script_func(z, off, msk, wei, bi, stride, padding, dilation), + (x, offset, mask, weight, bias), + nondet_tol=1e-5, + fast_mode=True, + ) + @torch.jit.script + def script_func_no_mask(x_, offset_, weight_, bias_, stride_, pad_, dilation_): + # type:(Tensor, Tensor, Tensor, Tensor, Tuple[int, int], Tuple[int, int], Tuple[int, int])->Tensor + return ops.deform_conv2d( + x_, offset_, weight_, bias_, stride=stride_, padding=pad_, dilation=dilation_, mask=None + ) + + gradcheck( + lambda z, off, wei, bi: script_func_no_mask(z, off, wei, bi, stride, padding, dilation), + (x, offset, weight, bias), + nondet_tol=1e-5, + fast_mode=True, + ) + + @needs_cuda + @pytest.mark.parametrize("contiguous", (True, False)) + @pytest.mark.opcheck_only_one() + def test_compare_cpu_cuda_grads(self, contiguous): + # Test from https://github.com/pytorch/vision/issues/2598 + # Run on CUDA only + + # compare grads computed on CUDA with grads computed on CPU + true_cpu_grads = None + + init_weight = torch.randn(9, 9, 3, 3, requires_grad=True) + img = torch.randn(8, 9, 1000, 110) + offset = torch.rand(8, 2 * 3 * 3, 1000, 110) + mask = torch.rand(8, 3 * 3, 1000, 110) -if __name__ == '__main__': - unittest.main() + if not contiguous: + img = img.permute(0, 1, 3, 2).contiguous().permute(0, 1, 3, 2) + offset = offset.permute(1, 3, 0, 2).contiguous().permute(2, 0, 3, 1) + mask = mask.permute(1, 3, 0, 2).contiguous().permute(2, 0, 3, 1) + weight = init_weight.permute(3, 2, 0, 1).contiguous().permute(2, 3, 1, 0) + else: + weight = init_weight + + for d in ["cpu", "cuda"]: + out = ops.deform_conv2d(img.to(d), offset.to(d), weight.to(d), padding=1, mask=mask.to(d)) + out.mean().backward() + if true_cpu_grads is None: + true_cpu_grads = init_weight.grad + assert true_cpu_grads is not None + else: + assert init_weight.grad is not None + res_grads = init_weight.grad.to("cpu") + torch.testing.assert_close(true_cpu_grads, res_grads) + + @needs_cuda + @pytest.mark.parametrize("batch_sz", (0, 33)) + @pytest.mark.parametrize("dtype", (torch.float, torch.half)) + @pytest.mark.opcheck_only_one() + def test_autocast(self, batch_sz, dtype): + with torch.cuda.amp.autocast(): + self.test_forward(torch.device("cuda"), contiguous=False, batch_sz=batch_sz, dtype=dtype) + + def test_forward_scriptability(self): + # Non-regression test for https://github.com/pytorch/vision/issues/4078 + torch.jit.script(ops.DeformConv2d(in_channels=8, out_channels=8, kernel_size=3)) + + +optests.generate_opcheck_tests( + testcase=TestDeformConv, + namespaces=["torchvision"], + failures_dict_path=os.path.join(os.path.dirname(__file__), "optests_failures_dict.json"), + additional_decorators=[], + test_utils=OPTESTS, +) + + +class TestFrozenBNT: + def test_frozenbatchnorm2d_repr(self): + num_features = 32 + eps = 1e-5 + t = ops.misc.FrozenBatchNorm2d(num_features, eps=eps) + + # Check integrity of object __repr__ attribute + expected_string = f"FrozenBatchNorm2d({num_features}, eps={eps})" + assert repr(t) == expected_string + + @pytest.mark.parametrize("seed", range(10)) + def test_frozenbatchnorm2d_eps(self, seed): + torch.random.manual_seed(seed) + sample_size = (4, 32, 28, 28) + x = torch.rand(sample_size) + state_dict = dict( + weight=torch.rand(sample_size[1]), + bias=torch.rand(sample_size[1]), + running_mean=torch.rand(sample_size[1]), + running_var=torch.rand(sample_size[1]), + num_batches_tracked=torch.tensor(100), + ) + + # Check that default eps is equal to the one of BN + fbn = ops.misc.FrozenBatchNorm2d(sample_size[1]) + fbn.load_state_dict(state_dict, strict=False) + bn = torch.nn.BatchNorm2d(sample_size[1]).eval() + bn.load_state_dict(state_dict) + # Difference is expected to fall in an acceptable range + torch.testing.assert_close(fbn(x), bn(x), rtol=1e-5, atol=1e-6) + + # Check computation for eps > 0 + fbn = ops.misc.FrozenBatchNorm2d(sample_size[1], eps=1e-5) + fbn.load_state_dict(state_dict, strict=False) + bn = torch.nn.BatchNorm2d(sample_size[1], eps=1e-5).eval() + bn.load_state_dict(state_dict) + torch.testing.assert_close(fbn(x), bn(x), rtol=1e-5, atol=1e-6) + + +class TestBoxConversionToRoi: + def _get_box_sequences(): + # Define here the argument type of `boxes` supported by region pooling operations + box_tensor = torch.tensor([[0, 0, 0, 100, 100], [1, 0, 0, 100, 100]], dtype=torch.float) + box_list = [ + torch.tensor([[0, 0, 100, 100]], dtype=torch.float), + torch.tensor([[0, 0, 100, 100]], dtype=torch.float), + ] + box_tuple = tuple(box_list) + return box_tensor, box_list, box_tuple + + @pytest.mark.parametrize("box_sequence", _get_box_sequences()) + def test_check_roi_boxes_shape(self, box_sequence): + # Ensure common sequences of tensors are supported + ops._utils.check_roi_boxes_shape(box_sequence) + + @pytest.mark.parametrize("box_sequence", _get_box_sequences()) + def test_convert_boxes_to_roi_format(self, box_sequence): + # Ensure common sequences of tensors yield the same result + ref_tensor = None + if ref_tensor is None: + ref_tensor = box_sequence + else: + assert_equal(ref_tensor, ops._utils.convert_boxes_to_roi_format(box_sequence)) + + +class TestBoxConvert: + def test_bbox_same(self): + box_tensor = torch.tensor( + [[0, 0, 100, 100], [0, 0, 0, 0], [10, 15, 30, 35], [23, 35, 93, 95]], dtype=torch.float + ) + + exp_xyxy = torch.tensor([[0, 0, 100, 100], [0, 0, 0, 0], [10, 15, 30, 35], [23, 35, 93, 95]], dtype=torch.float) + + assert exp_xyxy.size() == torch.Size([4, 4]) + assert_equal(ops.box_convert(box_tensor, in_fmt="xyxy", out_fmt="xyxy"), exp_xyxy) + assert_equal(ops.box_convert(box_tensor, in_fmt="xywh", out_fmt="xywh"), exp_xyxy) + assert_equal(ops.box_convert(box_tensor, in_fmt="cxcywh", out_fmt="cxcywh"), exp_xyxy) + + def test_bbox_xyxy_xywh(self): + # Simple test convert boxes to xywh and back. Make sure they are same. + # box_tensor is in x1 y1 x2 y2 format. + box_tensor = torch.tensor( + [[0, 0, 100, 100], [0, 0, 0, 0], [10, 15, 30, 35], [23, 35, 93, 95]], dtype=torch.float + ) + exp_xywh = torch.tensor([[0, 0, 100, 100], [0, 0, 0, 0], [10, 15, 20, 20], [23, 35, 70, 60]], dtype=torch.float) + + assert exp_xywh.size() == torch.Size([4, 4]) + box_xywh = ops.box_convert(box_tensor, in_fmt="xyxy", out_fmt="xywh") + assert_equal(box_xywh, exp_xywh) + + # Reverse conversion + box_xyxy = ops.box_convert(box_xywh, in_fmt="xywh", out_fmt="xyxy") + assert_equal(box_xyxy, box_tensor) + + def test_bbox_xyxy_cxcywh(self): + # Simple test convert boxes to cxcywh and back. Make sure they are same. + # box_tensor is in x1 y1 x2 y2 format. + box_tensor = torch.tensor( + [[0, 0, 100, 100], [0, 0, 0, 0], [10, 15, 30, 35], [23, 35, 93, 95]], dtype=torch.float + ) + exp_cxcywh = torch.tensor( + [[50, 50, 100, 100], [0, 0, 0, 0], [20, 25, 20, 20], [58, 65, 70, 60]], dtype=torch.float + ) + + assert exp_cxcywh.size() == torch.Size([4, 4]) + box_cxcywh = ops.box_convert(box_tensor, in_fmt="xyxy", out_fmt="cxcywh") + assert_equal(box_cxcywh, exp_cxcywh) + + # Reverse conversion + box_xyxy = ops.box_convert(box_cxcywh, in_fmt="cxcywh", out_fmt="xyxy") + assert_equal(box_xyxy, box_tensor) + + def test_bbox_xywh_cxcywh(self): + box_tensor = torch.tensor( + [[0, 0, 100, 100], [0, 0, 0, 0], [10, 15, 20, 20], [23, 35, 70, 60]], dtype=torch.float + ) + + exp_cxcywh = torch.tensor( + [[50, 50, 100, 100], [0, 0, 0, 0], [20, 25, 20, 20], [58, 65, 70, 60]], dtype=torch.float + ) + + assert exp_cxcywh.size() == torch.Size([4, 4]) + box_cxcywh = ops.box_convert(box_tensor, in_fmt="xywh", out_fmt="cxcywh") + assert_equal(box_cxcywh, exp_cxcywh) + + # Reverse conversion + box_xywh = ops.box_convert(box_cxcywh, in_fmt="cxcywh", out_fmt="xywh") + assert_equal(box_xywh, box_tensor) + + @pytest.mark.parametrize("inv_infmt", ["xwyh", "cxwyh"]) + @pytest.mark.parametrize("inv_outfmt", ["xwcx", "xhwcy"]) + def test_bbox_invalid(self, inv_infmt, inv_outfmt): + box_tensor = torch.tensor( + [[0, 0, 100, 100], [0, 0, 0, 0], [10, 15, 20, 20], [23, 35, 70, 60]], dtype=torch.float + ) + + with pytest.raises(ValueError): + ops.box_convert(box_tensor, inv_infmt, inv_outfmt) + + def test_bbox_convert_jit(self): + box_tensor = torch.tensor( + [[0, 0, 100, 100], [0, 0, 0, 0], [10, 15, 30, 35], [23, 35, 93, 95]], dtype=torch.float + ) + + scripted_fn = torch.jit.script(ops.box_convert) + + box_xywh = ops.box_convert(box_tensor, in_fmt="xyxy", out_fmt="xywh") + scripted_xywh = scripted_fn(box_tensor, "xyxy", "xywh") + torch.testing.assert_close(scripted_xywh, box_xywh) + + box_cxcywh = ops.box_convert(box_tensor, in_fmt="xyxy", out_fmt="cxcywh") + scripted_cxcywh = scripted_fn(box_tensor, "xyxy", "cxcywh") + torch.testing.assert_close(scripted_cxcywh, box_cxcywh) + + +class TestBoxArea: + def area_check(self, box, expected, atol=1e-4): + out = ops.box_area(box) + torch.testing.assert_close(out, expected, rtol=0.0, check_dtype=False, atol=atol) + + @pytest.mark.parametrize("dtype", [torch.int8, torch.int16, torch.int32, torch.int64]) + def test_int_boxes(self, dtype): + box_tensor = torch.tensor([[0, 0, 100, 100], [0, 0, 0, 0]], dtype=dtype) + expected = torch.tensor([10000, 0], dtype=torch.int32) + self.area_check(box_tensor, expected) + + @pytest.mark.parametrize("dtype", [torch.float32, torch.float64]) + def test_float_boxes(self, dtype): + box_tensor = torch.tensor(FLOAT_BOXES, dtype=dtype) + expected = torch.tensor([604723.0806, 600965.4666, 592761.0085], dtype=dtype) + self.area_check(box_tensor, expected) + + def test_float16_box(self): + box_tensor = torch.tensor( + [[2.825, 1.8625, 3.90, 4.85], [2.825, 4.875, 19.20, 5.10], [2.925, 1.80, 8.90, 4.90]], dtype=torch.float16 + ) + + expected = torch.tensor([3.2170, 3.7108, 18.5071], dtype=torch.float16) + self.area_check(box_tensor, expected, atol=0.01) + + def test_box_area_jit(self): + box_tensor = torch.tensor([[0, 0, 100, 100], [0, 0, 0, 0]], dtype=torch.float) + expected = ops.box_area(box_tensor) + scripted_fn = torch.jit.script(ops.box_area) + scripted_area = scripted_fn(box_tensor) + torch.testing.assert_close(scripted_area, expected) + + +INT_BOXES = [[0, 0, 100, 100], [0, 0, 50, 50], [200, 200, 300, 300], [0, 0, 25, 25]] +INT_BOXES2 = [[0, 0, 100, 100], [0, 0, 50, 50], [200, 200, 300, 300]] +FLOAT_BOXES = [ + [285.3538, 185.5758, 1193.5110, 851.4551], + [285.1472, 188.7374, 1192.4984, 851.0669], + [279.2440, 197.9812, 1189.4746, 849.2019], +] + + +def gen_box(size, dtype=torch.float): + xy1 = torch.rand((size, 2), dtype=dtype) + xy2 = xy1 + torch.rand((size, 2), dtype=dtype) + return torch.cat([xy1, xy2], axis=-1) + + +class TestIouBase: + @staticmethod + def _run_test(target_fn: Callable, actual_box1, actual_box2, dtypes, atol, expected): + for dtype in dtypes: + actual_box1 = torch.tensor(actual_box1, dtype=dtype) + actual_box2 = torch.tensor(actual_box2, dtype=dtype) + expected_box = torch.tensor(expected) + out = target_fn(actual_box1, actual_box2) + torch.testing.assert_close(out, expected_box, rtol=0.0, check_dtype=False, atol=atol) + + @staticmethod + def _run_jit_test(target_fn: Callable, actual_box: List): + box_tensor = torch.tensor(actual_box, dtype=torch.float) + expected = target_fn(box_tensor, box_tensor) + scripted_fn = torch.jit.script(target_fn) + scripted_out = scripted_fn(box_tensor, box_tensor) + torch.testing.assert_close(scripted_out, expected) + + @staticmethod + def _cartesian_product(boxes1, boxes2, target_fn: Callable): + N = boxes1.size(0) + M = boxes2.size(0) + result = torch.zeros((N, M)) + for i in range(N): + for j in range(M): + result[i, j] = target_fn(boxes1[i].unsqueeze(0), boxes2[j].unsqueeze(0)) + return result + + @staticmethod + def _run_cartesian_test(target_fn: Callable): + boxes1 = gen_box(5) + boxes2 = gen_box(7) + a = TestIouBase._cartesian_product(boxes1, boxes2, target_fn) + b = target_fn(boxes1, boxes2) + torch.testing.assert_close(a, b) + + +class TestBoxIou(TestIouBase): + int_expected = [[1.0, 0.25, 0.0], [0.25, 1.0, 0.0], [0.0, 0.0, 1.0], [0.0625, 0.25, 0.0]] + float_expected = [[1.0, 0.9933, 0.9673], [0.9933, 1.0, 0.9737], [0.9673, 0.9737, 1.0]] + + @pytest.mark.parametrize( + "actual_box1, actual_box2, dtypes, atol, expected", + [ + pytest.param(INT_BOXES, INT_BOXES2, [torch.int16, torch.int32, torch.int64], 1e-4, int_expected), + pytest.param(FLOAT_BOXES, FLOAT_BOXES, [torch.float16], 0.002, float_expected), + pytest.param(FLOAT_BOXES, FLOAT_BOXES, [torch.float32, torch.float64], 1e-3, float_expected), + ], + ) + def test_iou(self, actual_box1, actual_box2, dtypes, atol, expected): + self._run_test(ops.box_iou, actual_box1, actual_box2, dtypes, atol, expected) + + def test_iou_jit(self): + self._run_jit_test(ops.box_iou, INT_BOXES) + + def test_iou_cartesian(self): + self._run_cartesian_test(ops.box_iou) + + +class TestGeneralizedBoxIou(TestIouBase): + int_expected = [[1.0, 0.25, -0.7778], [0.25, 1.0, -0.8611], [-0.7778, -0.8611, 1.0], [0.0625, 0.25, -0.8819]] + float_expected = [[1.0, 0.9933, 0.9673], [0.9933, 1.0, 0.9737], [0.9673, 0.9737, 1.0]] + + @pytest.mark.parametrize( + "actual_box1, actual_box2, dtypes, atol, expected", + [ + pytest.param(INT_BOXES, INT_BOXES2, [torch.int16, torch.int32, torch.int64], 1e-4, int_expected), + pytest.param(FLOAT_BOXES, FLOAT_BOXES, [torch.float16], 0.002, float_expected), + pytest.param(FLOAT_BOXES, FLOAT_BOXES, [torch.float32, torch.float64], 1e-3, float_expected), + ], + ) + def test_iou(self, actual_box1, actual_box2, dtypes, atol, expected): + self._run_test(ops.generalized_box_iou, actual_box1, actual_box2, dtypes, atol, expected) + + def test_iou_jit(self): + self._run_jit_test(ops.generalized_box_iou, INT_BOXES) + + def test_iou_cartesian(self): + self._run_cartesian_test(ops.generalized_box_iou) + + +class TestDistanceBoxIoU(TestIouBase): + int_expected = [ + [1.0000, 0.1875, -0.4444], + [0.1875, 1.0000, -0.5625], + [-0.4444, -0.5625, 1.0000], + [-0.0781, 0.1875, -0.6267], + ] + float_expected = [[1.0, 0.9933, 0.9673], [0.9933, 1.0, 0.9737], [0.9673, 0.9737, 1.0]] + + @pytest.mark.parametrize( + "actual_box1, actual_box2, dtypes, atol, expected", + [ + pytest.param(INT_BOXES, INT_BOXES2, [torch.int16, torch.int32, torch.int64], 1e-4, int_expected), + pytest.param(FLOAT_BOXES, FLOAT_BOXES, [torch.float16], 0.002, float_expected), + pytest.param(FLOAT_BOXES, FLOAT_BOXES, [torch.float32, torch.float64], 1e-3, float_expected), + ], + ) + def test_iou(self, actual_box1, actual_box2, dtypes, atol, expected): + self._run_test(ops.distance_box_iou, actual_box1, actual_box2, dtypes, atol, expected) + + def test_iou_jit(self): + self._run_jit_test(ops.distance_box_iou, INT_BOXES) + + def test_iou_cartesian(self): + self._run_cartesian_test(ops.distance_box_iou) + + +class TestCompleteBoxIou(TestIouBase): + int_expected = [ + [1.0000, 0.1875, -0.4444], + [0.1875, 1.0000, -0.5625], + [-0.4444, -0.5625, 1.0000], + [-0.0781, 0.1875, -0.6267], + ] + float_expected = [[1.0, 0.9933, 0.9673], [0.9933, 1.0, 0.9737], [0.9673, 0.9737, 1.0]] + + @pytest.mark.parametrize( + "actual_box1, actual_box2, dtypes, atol, expected", + [ + pytest.param(INT_BOXES, INT_BOXES2, [torch.int16, torch.int32, torch.int64], 1e-4, int_expected), + pytest.param(FLOAT_BOXES, FLOAT_BOXES, [torch.float16], 0.002, float_expected), + pytest.param(FLOAT_BOXES, FLOAT_BOXES, [torch.float32, torch.float64], 1e-3, float_expected), + ], + ) + def test_iou(self, actual_box1, actual_box2, dtypes, atol, expected): + self._run_test(ops.complete_box_iou, actual_box1, actual_box2, dtypes, atol, expected) + + def test_iou_jit(self): + self._run_jit_test(ops.complete_box_iou, INT_BOXES) + + def test_iou_cartesian(self): + self._run_cartesian_test(ops.complete_box_iou) + + +def get_boxes(dtype, device): + box1 = torch.tensor([-1, -1, 1, 1], dtype=dtype, device=device) + box2 = torch.tensor([0, 0, 1, 1], dtype=dtype, device=device) + box3 = torch.tensor([0, 1, 1, 2], dtype=dtype, device=device) + box4 = torch.tensor([1, 1, 2, 2], dtype=dtype, device=device) + + box1s = torch.stack([box2, box2], dim=0) + box2s = torch.stack([box3, box4], dim=0) + + return box1, box2, box3, box4, box1s, box2s + + +def assert_iou_loss(iou_fn, box1, box2, expected_loss, device, reduction="none"): + computed_loss = iou_fn(box1, box2, reduction=reduction) + expected_loss = torch.tensor(expected_loss, device=device) + torch.testing.assert_close(computed_loss, expected_loss) + + +def assert_empty_loss(iou_fn, dtype, device): + box1 = torch.randn([0, 4], dtype=dtype, device=device).requires_grad_() + box2 = torch.randn([0, 4], dtype=dtype, device=device).requires_grad_() + loss = iou_fn(box1, box2, reduction="mean") + loss.backward() + torch.testing.assert_close(loss, torch.tensor(0.0, device=device)) + assert box1.grad is not None, "box1.grad should not be None after backward is called" + assert box2.grad is not None, "box2.grad should not be None after backward is called" + loss = iou_fn(box1, box2, reduction="none") + assert loss.numel() == 0, f"{str(iou_fn)} for two empty box should be empty" + + +class TestGeneralizedBoxIouLoss: + # We refer to original test: https://github.com/facebookresearch/fvcore/blob/main/tests/test_giou_loss.py + @pytest.mark.parametrize("device", cpu_and_cuda()) + @pytest.mark.parametrize("dtype", [torch.float32, torch.half]) + def test_giou_loss(self, dtype, device): + box1, box2, box3, box4, box1s, box2s = get_boxes(dtype, device) + + # Identical boxes should have loss of 0 + assert_iou_loss(ops.generalized_box_iou_loss, box1, box1, 0.0, device=device) + + # quarter size box inside other box = IoU of 0.25 + assert_iou_loss(ops.generalized_box_iou_loss, box1, box2, 0.75, device=device) + + # Two side by side boxes, area=union + # IoU=0 and GIoU=0 (loss 1.0) + assert_iou_loss(ops.generalized_box_iou_loss, box2, box3, 1.0, device=device) + + # Two diagonally adjacent boxes, area=2*union + # IoU=0 and GIoU=-0.5 (loss 1.5) + assert_iou_loss(ops.generalized_box_iou_loss, box2, box4, 1.5, device=device) + + # Test batched loss and reductions + assert_iou_loss(ops.generalized_box_iou_loss, box1s, box2s, 2.5, device=device, reduction="sum") + assert_iou_loss(ops.generalized_box_iou_loss, box1s, box2s, 1.25, device=device, reduction="mean") + + # Test reduction value + # reduction value other than ["none", "mean", "sum"] should raise a ValueError + with pytest.raises(ValueError, match="Invalid"): + ops.generalized_box_iou_loss(box1s, box2s, reduction="xyz") + + @pytest.mark.parametrize("device", cpu_and_cuda()) + @pytest.mark.parametrize("dtype", [torch.float32, torch.half]) + def test_empty_inputs(self, dtype, device): + assert_empty_loss(ops.generalized_box_iou_loss, dtype, device) + + +class TestCompleteBoxIouLoss: + @pytest.mark.parametrize("dtype", [torch.float32, torch.half]) + @pytest.mark.parametrize("device", cpu_and_cuda()) + def test_ciou_loss(self, dtype, device): + box1, box2, box3, box4, box1s, box2s = get_boxes(dtype, device) + + assert_iou_loss(ops.complete_box_iou_loss, box1, box1, 0.0, device=device) + assert_iou_loss(ops.complete_box_iou_loss, box1, box2, 0.8125, device=device) + assert_iou_loss(ops.complete_box_iou_loss, box1, box3, 1.1923, device=device) + assert_iou_loss(ops.complete_box_iou_loss, box1, box4, 1.2500, device=device) + assert_iou_loss(ops.complete_box_iou_loss, box1s, box2s, 1.2250, device=device, reduction="mean") + assert_iou_loss(ops.complete_box_iou_loss, box1s, box2s, 2.4500, device=device, reduction="sum") + + with pytest.raises(ValueError, match="Invalid"): + ops.complete_box_iou_loss(box1s, box2s, reduction="xyz") + + @pytest.mark.parametrize("device", cpu_and_cuda()) + @pytest.mark.parametrize("dtype", [torch.float32, torch.half]) + def test_empty_inputs(self, dtype, device): + assert_empty_loss(ops.complete_box_iou_loss, dtype, device) + + +class TestDistanceBoxIouLoss: + @pytest.mark.parametrize("device", cpu_and_cuda()) + @pytest.mark.parametrize("dtype", [torch.float32, torch.half]) + def test_distance_iou_loss(self, dtype, device): + box1, box2, box3, box4, box1s, box2s = get_boxes(dtype, device) + + assert_iou_loss(ops.distance_box_iou_loss, box1, box1, 0.0, device=device) + assert_iou_loss(ops.distance_box_iou_loss, box1, box2, 0.8125, device=device) + assert_iou_loss(ops.distance_box_iou_loss, box1, box3, 1.1923, device=device) + assert_iou_loss(ops.distance_box_iou_loss, box1, box4, 1.2500, device=device) + assert_iou_loss(ops.distance_box_iou_loss, box1s, box2s, 1.2250, device=device, reduction="mean") + assert_iou_loss(ops.distance_box_iou_loss, box1s, box2s, 2.4500, device=device, reduction="sum") + + with pytest.raises(ValueError, match="Invalid"): + ops.distance_box_iou_loss(box1s, box2s, reduction="xyz") + + @pytest.mark.parametrize("device", cpu_and_cuda()) + @pytest.mark.parametrize("dtype", [torch.float32, torch.half]) + def test_empty_distance_iou_inputs(self, dtype, device): + assert_empty_loss(ops.distance_box_iou_loss, dtype, device) + + +class TestFocalLoss: + def _generate_diverse_input_target_pair(self, shape=(5, 2), **kwargs): + def logit(p): + return torch.log(p / (1 - p)) + + def generate_tensor_with_range_type(shape, range_type, **kwargs): + if range_type != "random_binary": + low, high = { + "small": (0.0, 0.2), + "big": (0.8, 1.0), + "zeros": (0.0, 0.0), + "ones": (1.0, 1.0), + "random": (0.0, 1.0), + }[range_type] + return torch.testing.make_tensor(shape, low=low, high=high, **kwargs) + else: + return torch.randint(0, 2, shape, **kwargs) + + # This function will return inputs and targets with shape: (shape[0]*9, shape[1]) + inputs = [] + targets = [] + for input_range_type, target_range_type in [ + ("small", "zeros"), + ("small", "ones"), + ("small", "random_binary"), + ("big", "zeros"), + ("big", "ones"), + ("big", "random_binary"), + ("random", "zeros"), + ("random", "ones"), + ("random", "random_binary"), + ]: + inputs.append(logit(generate_tensor_with_range_type(shape, input_range_type, **kwargs))) + targets.append(generate_tensor_with_range_type(shape, target_range_type, **kwargs)) + + return torch.cat(inputs), torch.cat(targets) + + @pytest.mark.parametrize("alpha", [-1.0, 0.0, 0.58, 1.0]) + @pytest.mark.parametrize("gamma", [0, 2]) + @pytest.mark.parametrize("device", cpu_and_cuda()) + @pytest.mark.parametrize("dtype", [torch.float32, torch.half]) + @pytest.mark.parametrize("seed", [0, 1]) + def test_correct_ratio(self, alpha, gamma, device, dtype, seed): + if device == "cpu" and dtype is torch.half: + pytest.skip("Currently torch.half is not fully supported on cpu") + # For testing the ratio with manual calculation, we require the reduction to be "none" + reduction = "none" + torch.random.manual_seed(seed) + inputs, targets = self._generate_diverse_input_target_pair(dtype=dtype, device=device) + focal_loss = ops.sigmoid_focal_loss(inputs, targets, gamma=gamma, alpha=alpha, reduction=reduction) + ce_loss = F.binary_cross_entropy_with_logits(inputs, targets, reduction=reduction) + + assert torch.all( + focal_loss <= ce_loss + ), "focal loss must be less or equal to cross entropy loss with same input" + + loss_ratio = (focal_loss / ce_loss).squeeze() + prob = torch.sigmoid(inputs) + p_t = prob * targets + (1 - prob) * (1 - targets) + correct_ratio = (1.0 - p_t) ** gamma + if alpha >= 0: + alpha_t = alpha * targets + (1 - alpha) * (1 - targets) + correct_ratio = correct_ratio * alpha_t + + tol = 1e-3 if dtype is torch.half else 1e-5 + torch.testing.assert_close(correct_ratio, loss_ratio, atol=tol, rtol=tol) + + @pytest.mark.parametrize("reduction", ["mean", "sum"]) + @pytest.mark.parametrize("device", cpu_and_cuda()) + @pytest.mark.parametrize("dtype", [torch.float32, torch.half]) + @pytest.mark.parametrize("seed", [2, 3]) + def test_equal_ce_loss(self, reduction, device, dtype, seed): + if device == "cpu" and dtype is torch.half: + pytest.skip("Currently torch.half is not fully supported on cpu") + # focal loss should be equal ce_loss if alpha=-1 and gamma=0 + alpha = -1 + gamma = 0 + torch.random.manual_seed(seed) + inputs, targets = self._generate_diverse_input_target_pair(dtype=dtype, device=device) + inputs_fl = inputs.clone().requires_grad_() + targets_fl = targets.clone() + inputs_ce = inputs.clone().requires_grad_() + targets_ce = targets.clone() + focal_loss = ops.sigmoid_focal_loss(inputs_fl, targets_fl, gamma=gamma, alpha=alpha, reduction=reduction) + ce_loss = F.binary_cross_entropy_with_logits(inputs_ce, targets_ce, reduction=reduction) + + torch.testing.assert_close(focal_loss, ce_loss) + + focal_loss.backward() + ce_loss.backward() + torch.testing.assert_close(inputs_fl.grad, inputs_ce.grad) + + @pytest.mark.parametrize("alpha", [-1.0, 0.0, 0.58, 1.0]) + @pytest.mark.parametrize("gamma", [0, 2]) + @pytest.mark.parametrize("reduction", ["none", "mean", "sum"]) + @pytest.mark.parametrize("device", cpu_and_cuda()) + @pytest.mark.parametrize("dtype", [torch.float32, torch.half]) + @pytest.mark.parametrize("seed", [4, 5]) + def test_jit(self, alpha, gamma, reduction, device, dtype, seed): + if device == "cpu" and dtype is torch.half: + pytest.skip("Currently torch.half is not fully supported on cpu") + script_fn = torch.jit.script(ops.sigmoid_focal_loss) + torch.random.manual_seed(seed) + inputs, targets = self._generate_diverse_input_target_pair(dtype=dtype, device=device) + focal_loss = ops.sigmoid_focal_loss(inputs, targets, gamma=gamma, alpha=alpha, reduction=reduction) + scripted_focal_loss = script_fn(inputs, targets, gamma=gamma, alpha=alpha, reduction=reduction) + + tol = 1e-3 if dtype is torch.half else 1e-5 + torch.testing.assert_close(focal_loss, scripted_focal_loss, rtol=tol, atol=tol) + + # Raise ValueError for anonymous reduction mode + @pytest.mark.parametrize("device", cpu_and_cuda()) + @pytest.mark.parametrize("dtype", [torch.float32, torch.half]) + def test_reduction_mode(self, device, dtype, reduction="xyz"): + if device == "cpu" and dtype is torch.half: + pytest.skip("Currently torch.half is not fully supported on cpu") + torch.random.manual_seed(0) + inputs, targets = self._generate_diverse_input_target_pair(device=device, dtype=dtype) + with pytest.raises(ValueError, match="Invalid"): + ops.sigmoid_focal_loss(inputs, targets, 0.25, 2, reduction) + + +class TestMasksToBoxes: + def test_masks_box(self): + def masks_box_check(masks, expected, atol=1e-4): + out = ops.masks_to_boxes(masks) + assert out.dtype == torch.float + torch.testing.assert_close(out, expected, rtol=0.0, check_dtype=True, atol=atol) + + # Check for int type boxes. + def _get_image(): + assets_directory = os.path.join(os.path.dirname(os.path.abspath(__file__)), "assets") + mask_path = os.path.join(assets_directory, "masks.tiff") + image = Image.open(mask_path) + return image + + def _create_masks(image, masks): + for index in range(image.n_frames): + image.seek(index) + frame = np.array(image) + masks[index] = torch.tensor(frame) + + return masks + + expected = torch.tensor( + [ + [127, 2, 165, 40], + [2, 50, 44, 92], + [56, 63, 98, 100], + [139, 68, 175, 104], + [160, 112, 198, 145], + [49, 138, 99, 182], + [108, 148, 152, 213], + ], + dtype=torch.float, + ) + + image = _get_image() + for dtype in [torch.float16, torch.float32, torch.float64]: + masks = torch.zeros((image.n_frames, image.height, image.width), dtype=dtype) + masks = _create_masks(image, masks) + masks_box_check(masks, expected) + + +class TestStochasticDepth: + @pytest.mark.parametrize("seed", range(10)) + @pytest.mark.parametrize("p", [0.2, 0.5, 0.8]) + @pytest.mark.parametrize("mode", ["batch", "row"]) + def test_stochastic_depth_random(self, seed, mode, p): + torch.manual_seed(seed) + stats = pytest.importorskip("scipy.stats") + batch_size = 5 + x = torch.ones(size=(batch_size, 3, 4, 4)) + layer = ops.StochasticDepth(p=p, mode=mode) + layer.__repr__() + + trials = 250 + num_samples = 0 + counts = 0 + for _ in range(trials): + out = layer(x) + non_zero_count = out.sum(dim=(1, 2, 3)).nonzero().size(0) + if mode == "batch": + if non_zero_count == 0: + counts += 1 + num_samples += 1 + elif mode == "row": + counts += batch_size - non_zero_count + num_samples += batch_size + + p_value = stats.binomtest(counts, num_samples, p=p).pvalue + assert p_value > 0.01 + + @pytest.mark.parametrize("seed", range(10)) + @pytest.mark.parametrize("p", (0, 1)) + @pytest.mark.parametrize("mode", ["batch", "row"]) + def test_stochastic_depth(self, seed, mode, p): + torch.manual_seed(seed) + batch_size = 5 + x = torch.ones(size=(batch_size, 3, 4, 4)) + layer = ops.StochasticDepth(p=p, mode=mode) + + out = layer(x) + if p == 0: + assert out.equal(x) + elif p == 1: + assert out.equal(torch.zeros_like(x)) + + def make_obj(self, p, mode, wrap=False): + obj = ops.StochasticDepth(p, mode) + return StochasticDepthWrapper(obj) if wrap else obj + + @pytest.mark.parametrize("p", (0, 1)) + @pytest.mark.parametrize("mode", ["batch", "row"]) + def test_is_leaf_node(self, p, mode): + op_obj = self.make_obj(p, mode, wrap=True) + graph_node_names = get_graph_node_names(op_obj) + + assert len(graph_node_names) == 2 + assert len(graph_node_names[0]) == len(graph_node_names[1]) + assert len(graph_node_names[0]) == 1 + op_obj.n_inputs + + +class TestUtils: + @pytest.mark.parametrize("norm_layer", [None, nn.BatchNorm2d, nn.LayerNorm]) + def test_split_normalization_params(self, norm_layer): + model = models.mobilenet_v3_large(norm_layer=norm_layer) + params = ops._utils.split_normalization_params(model, None if norm_layer is None else [norm_layer]) + + assert len(params[0]) == 92 + assert len(params[1]) == 82 + + +class TestDropBlock: + @pytest.mark.parametrize("seed", range(10)) + @pytest.mark.parametrize("dim", [2, 3]) + @pytest.mark.parametrize("p", [0, 0.5]) + @pytest.mark.parametrize("block_size", [5, 11]) + @pytest.mark.parametrize("inplace", [True, False]) + def test_drop_block(self, seed, dim, p, block_size, inplace): + torch.manual_seed(seed) + batch_size = 5 + channels = 3 + height = 11 + width = height + depth = height + if dim == 2: + x = torch.ones(size=(batch_size, channels, height, width)) + layer = ops.DropBlock2d(p=p, block_size=block_size, inplace=inplace) + feature_size = height * width + elif dim == 3: + x = torch.ones(size=(batch_size, channels, depth, height, width)) + layer = ops.DropBlock3d(p=p, block_size=block_size, inplace=inplace) + feature_size = depth * height * width + layer.__repr__() + + out = layer(x) + if p == 0: + assert out.equal(x) + if block_size == height: + for b, c in product(range(batch_size), range(channels)): + assert out[b, c].count_nonzero() in (0, feature_size) + + @pytest.mark.parametrize("seed", range(10)) + @pytest.mark.parametrize("dim", [2, 3]) + @pytest.mark.parametrize("p", [0.1, 0.2]) + @pytest.mark.parametrize("block_size", [3]) + @pytest.mark.parametrize("inplace", [False]) + def test_drop_block_random(self, seed, dim, p, block_size, inplace): + torch.manual_seed(seed) + batch_size = 5 + channels = 3 + height = 11 + width = height + depth = height + if dim == 2: + x = torch.ones(size=(batch_size, channels, height, width)) + layer = ops.DropBlock2d(p=p, block_size=block_size, inplace=inplace) + elif dim == 3: + x = torch.ones(size=(batch_size, channels, depth, height, width)) + layer = ops.DropBlock3d(p=p, block_size=block_size, inplace=inplace) + + trials = 250 + num_samples = 0 + counts = 0 + cell_numel = torch.tensor(x.shape).prod() + for _ in range(trials): + with torch.no_grad(): + out = layer(x) + non_zero_count = out.nonzero().size(0) + counts += cell_numel - non_zero_count + num_samples += cell_numel + + assert abs(p - counts / num_samples) / p < 0.15 + + def make_obj(self, dim, p, block_size, inplace, wrap=False): + if dim == 2: + obj = ops.DropBlock2d(p, block_size, inplace) + elif dim == 3: + obj = ops.DropBlock3d(p, block_size, inplace) + return DropBlockWrapper(obj) if wrap else obj + + @pytest.mark.parametrize("dim", (2, 3)) + @pytest.mark.parametrize("p", [0, 1]) + @pytest.mark.parametrize("block_size", [5, 7]) + @pytest.mark.parametrize("inplace", [True, False]) + def test_is_leaf_node(self, dim, p, block_size, inplace): + op_obj = self.make_obj(dim, p, block_size, inplace, wrap=True) + graph_node_names = get_graph_node_names(op_obj) + + assert len(graph_node_names) == 2 + assert len(graph_node_names[0]) == len(graph_node_names[1]) + assert len(graph_node_names[0]) == 1 + op_obj.n_inputs + + +if __name__ == "__main__": + pytest.main([__file__]) diff --git a/test/test_prototype_datasets_builtin.py b/test/test_prototype_datasets_builtin.py new file mode 100644 index 00000000000..5f8fc90debf --- /dev/null +++ b/test/test_prototype_datasets_builtin.py @@ -0,0 +1,282 @@ +import io +import pickle +from collections import deque +from pathlib import Path + +import pytest +import torch +import torchvision.transforms.v2 as transforms + +from builtin_dataset_mocks import DATASET_MOCKS, parametrize_dataset_mocks +from torch.testing._comparison import not_close_error_metas, ObjectPair, TensorLikePair + +# TODO: replace with torchdata.dataloader2.DataLoader2 as soon as it is stable-ish +from torch.utils.data import DataLoader + +# TODO: replace with torchdata equivalent as soon as it is available +from torch.utils.data.graph_settings import get_all_graph_pipes + +from torchdata.dataloader2.graph.utils import traverse_dps +from torchdata.datapipes.iter import ShardingFilter, Shuffler +from torchdata.datapipes.utils import StreamWrapper +from torchvision import tv_tensors +from torchvision._utils import sequence_to_str +from torchvision.prototype import datasets +from torchvision.prototype.datasets.utils import EncodedImage +from torchvision.prototype.datasets.utils._internal import INFINITE_BUFFER_SIZE +from torchvision.prototype.tv_tensors import Label +from torchvision.transforms.v2._utils import is_pure_tensor + + +def assert_samples_equal(*args, msg=None, **kwargs): + error_metas = not_close_error_metas( + *args, pair_types=(TensorLikePair, ObjectPair), rtol=0, atol=0, equal_nan=True, **kwargs + ) + if error_metas: + raise error_metas[0].to_error(msg) + + +def extract_datapipes(dp): + return get_all_graph_pipes(traverse_dps(dp)) + + +def consume(iterator): + # Copied from the official itertools recipes: https://docs.python.org/3/library/itertools.html#itertools-recipes + deque(iterator, maxlen=0) + + +def next_consume(iterator): + item = next(iterator) + consume(iterator) + return item + + +@pytest.fixture(autouse=True) +def test_home(mocker, tmp_path): + mocker.patch("torchvision.prototype.datasets._api.home", return_value=str(tmp_path)) + mocker.patch("torchvision.prototype.datasets.home", return_value=str(tmp_path)) + yield tmp_path + + +def test_coverage(): + untested_datasets = set(datasets.list_datasets()) - DATASET_MOCKS.keys() + if untested_datasets: + raise AssertionError( + f"The dataset(s) {sequence_to_str(sorted(untested_datasets), separate_last='and ')} " + f"are exposed through `torchvision.prototype.datasets.load()`, but are not tested. " + f"Please add mock data to `test/builtin_dataset_mocks.py`." + ) + + +@pytest.mark.filterwarnings("error") +class TestCommon: + @pytest.mark.parametrize("name", datasets.list_datasets()) + def test_info(self, name): + try: + info = datasets.info(name) + except ValueError: + raise AssertionError("No info available.") from None + + if not (isinstance(info, dict) and all(isinstance(key, str) for key in info.keys())): + raise AssertionError("Info should be a dictionary with string keys.") + + @parametrize_dataset_mocks(DATASET_MOCKS) + def test_smoke(self, dataset_mock, config): + dataset, _ = dataset_mock.load(config) + + if not isinstance(dataset, datasets.utils.Dataset): + raise AssertionError(f"Loading the dataset should return an Dataset, but got {type(dataset)} instead.") + + @parametrize_dataset_mocks(DATASET_MOCKS) + def test_sample(self, dataset_mock, config): + dataset, _ = dataset_mock.load(config) + + try: + sample = next_consume(iter(dataset)) + except StopIteration: + raise AssertionError("Unable to draw any sample.") from None + except Exception as error: + raise AssertionError("Drawing a sample raised the error above.") from error + + if not isinstance(sample, dict): + raise AssertionError(f"Samples should be dictionaries, but got {type(sample)} instead.") + + if not sample: + raise AssertionError("Sample dictionary is empty.") + + @parametrize_dataset_mocks(DATASET_MOCKS) + def test_num_samples(self, dataset_mock, config): + dataset, mock_info = dataset_mock.load(config) + + assert len(list(dataset)) == mock_info["num_samples"] + + @pytest.fixture + def log_session_streams(self): + debug_unclosed_streams = StreamWrapper.debug_unclosed_streams + try: + StreamWrapper.debug_unclosed_streams = True + yield + finally: + StreamWrapper.debug_unclosed_streams = debug_unclosed_streams + + @parametrize_dataset_mocks(DATASET_MOCKS) + def test_stream_closing(self, log_session_streams, dataset_mock, config): + def make_msg_and_close(head): + unclosed_streams = [] + for stream in list(StreamWrapper.session_streams.keys()): + unclosed_streams.append(repr(stream.file_obj)) + stream.close() + unclosed_streams = "\n".join(unclosed_streams) + return f"{head}\n\n{unclosed_streams}" + + if StreamWrapper.session_streams: + raise pytest.UsageError(make_msg_and_close("A previous test did not close the following streams:")) + + dataset, _ = dataset_mock.load(config) + + consume(iter(dataset)) + + if StreamWrapper.session_streams: + raise AssertionError(make_msg_and_close("The following streams were not closed after a full iteration:")) + + @parametrize_dataset_mocks(DATASET_MOCKS) + def test_no_unaccompanied_pure_tensors(self, dataset_mock, config): + dataset, _ = dataset_mock.load(config) + sample = next_consume(iter(dataset)) + + pure_tensors = {key for key, value in sample.items() if is_pure_tensor(value)} + + if pure_tensors and not any( + isinstance(item, (tv_tensors.Image, tv_tensors.Video, EncodedImage)) for item in sample.values() + ): + raise AssertionError( + f"The values of key(s) " + f"{sequence_to_str(sorted(pure_tensors), separate_last='and ')} contained pure tensors, " + f"but didn't find any (encoded) image or video." + ) + + @parametrize_dataset_mocks(DATASET_MOCKS) + def test_transformable(self, dataset_mock, config): + dataset, _ = dataset_mock.load(config) + + dataset = dataset.map(transforms.Identity()) + + consume(iter(dataset)) + + @parametrize_dataset_mocks(DATASET_MOCKS) + def test_traversable(self, dataset_mock, config): + dataset, _ = dataset_mock.load(config) + + traverse_dps(dataset) + + @parametrize_dataset_mocks(DATASET_MOCKS) + def test_serializable(self, dataset_mock, config): + dataset, _ = dataset_mock.load(config) + + pickle.dumps(dataset) + + # This has to be a proper function, since lambda's or local functions + # cannot be pickled, but this is a requirement for the DataLoader with + # multiprocessing, i.e. num_workers > 0 + def _collate_fn(self, batch): + return batch + + @pytest.mark.parametrize("num_workers", [0, 1]) + @parametrize_dataset_mocks(DATASET_MOCKS) + def test_data_loader(self, dataset_mock, config, num_workers): + dataset, _ = dataset_mock.load(config) + + dl = DataLoader( + dataset, + batch_size=2, + num_workers=num_workers, + collate_fn=self._collate_fn, + ) + + consume(dl) + + # TODO: we need to enforce not only that both a Shuffler and a ShardingFilter are part of the datapipe, but also + # that the Shuffler comes before the ShardingFilter. Early commits in https://github.com/pytorch/vision/pull/5680 + # contain a custom test for that, but we opted to wait for a potential solution / test from torchdata for now. + @parametrize_dataset_mocks(DATASET_MOCKS) + @pytest.mark.parametrize("annotation_dp_type", (Shuffler, ShardingFilter)) + def test_has_annotations(self, dataset_mock, config, annotation_dp_type): + dataset, _ = dataset_mock.load(config) + + if not any(isinstance(dp, annotation_dp_type) for dp in extract_datapipes(dataset)): + raise AssertionError(f"The dataset doesn't contain a {annotation_dp_type.__name__}() datapipe.") + + @parametrize_dataset_mocks(DATASET_MOCKS) + def test_save_load(self, dataset_mock, config): + dataset, _ = dataset_mock.load(config) + + sample = next_consume(iter(dataset)) + + with io.BytesIO() as buffer: + torch.save(sample, buffer) + buffer.seek(0) + assert_samples_equal(torch.load(buffer, weights_only=True), sample) + + @parametrize_dataset_mocks(DATASET_MOCKS) + def test_infinite_buffer_size(self, dataset_mock, config): + dataset, _ = dataset_mock.load(config) + + for dp in extract_datapipes(dataset): + if hasattr(dp, "buffer_size"): + # TODO: replace this with the proper sentinel as soon as https://github.com/pytorch/data/issues/335 is + # resolved + assert dp.buffer_size == INFINITE_BUFFER_SIZE + + @parametrize_dataset_mocks(DATASET_MOCKS) + def test_has_length(self, dataset_mock, config): + dataset, _ = dataset_mock.load(config) + + assert len(dataset) > 0 + + +@parametrize_dataset_mocks(DATASET_MOCKS["qmnist"]) +class TestQMNIST: + def test_extra_label(self, dataset_mock, config): + dataset, _ = dataset_mock.load(config) + + sample = next_consume(iter(dataset)) + for key, type in ( + ("nist_hsf_series", int), + ("nist_writer_id", int), + ("digit_index", int), + ("nist_label", int), + ("global_digit_index", int), + ("duplicate", bool), + ("unused", bool), + ): + assert key in sample and isinstance(sample[key], type) + + +@parametrize_dataset_mocks(DATASET_MOCKS["gtsrb"]) +class TestGTSRB: + def test_label_matches_path(self, dataset_mock, config): + # We read the labels from the csv files instead. But for the trainset, the labels are also part of the path. + # This test makes sure that they're both the same + if config["split"] != "train": + return + + dataset, _ = dataset_mock.load(config) + + for sample in dataset: + label_from_path = int(Path(sample["path"]).parent.name) + assert sample["label"] == label_from_path + + +@parametrize_dataset_mocks(DATASET_MOCKS["usps"]) +class TestUSPS: + def test_sample_content(self, dataset_mock, config): + dataset, _ = dataset_mock.load(config) + + for sample in dataset: + assert "image" in sample + assert "label" in sample + + assert isinstance(sample["image"], tv_tensors.Image) + assert isinstance(sample["label"], Label) + + assert sample["image"].shape == (1, 16, 16) diff --git a/test/test_prototype_datasets_utils.py b/test/test_prototype_datasets_utils.py new file mode 100644 index 00000000000..2098ac736ac --- /dev/null +++ b/test/test_prototype_datasets_utils.py @@ -0,0 +1,302 @@ +import gzip +import pathlib +import sys + +import numpy as np +import pytest +import torch +from datasets_utils import make_fake_flo_file, make_tar +from torchdata.datapipes.iter import FileOpener, TarArchiveLoader +from torchvision.datasets._optical_flow import _read_flo as read_flo_ref +from torchvision.datasets.utils import _decompress +from torchvision.prototype.datasets.utils import Dataset, GDriveResource, HttpResource, OnlineResource +from torchvision.prototype.datasets.utils._internal import fromfile, read_flo + + +@pytest.mark.filterwarnings("error:The given NumPy array is not writeable:UserWarning") +@pytest.mark.parametrize( + ("np_dtype", "torch_dtype", "byte_order"), + [ + (">f4", torch.float32, "big"), + ("i8", torch.int64, "big"), + ("|u1", torch.uint8, sys.byteorder), + ], +) +@pytest.mark.parametrize("count", (-1, 2)) +@pytest.mark.parametrize("mode", ("rb", "r+b")) +def test_fromfile(tmpdir, np_dtype, torch_dtype, byte_order, count, mode): + path = tmpdir / "data.bin" + rng = np.random.RandomState(0) + rng.randn(5 if count == -1 else count + 1).astype(np_dtype).tofile(path) + + for count_ in (-1, count // 2): + expected = torch.from_numpy(np.fromfile(path, dtype=np_dtype, count=count_).astype(np_dtype[1:])) + + with open(path, mode) as file: + actual = fromfile(file, dtype=torch_dtype, byte_order=byte_order, count=count_) + + torch.testing.assert_close(actual, expected) + + +def test_read_flo(tmpdir): + path = tmpdir / "test.flo" + make_fake_flo_file(3, 4, path) + + with open(path, "rb") as file: + actual = read_flo(file) + + expected = torch.from_numpy(read_flo_ref(path).astype("f4", copy=False)) + + torch.testing.assert_close(actual, expected) + + +class TestOnlineResource: + class DummyResource(OnlineResource): + def __init__(self, download_fn=None, **kwargs): + super().__init__(**kwargs) + self._download_fn = download_fn + + def _download(self, root): + if self._download_fn is None: + raise pytest.UsageError( + "`_download()` was called, but `DummyResource(...)` was constructed without `download_fn`." + ) + + return self._download_fn(self, root) + + def _make_file(self, root, *, content, name="file.txt"): + file = root / name + with open(file, "w") as fh: + fh.write(content) + + return file + + def _make_folder(self, root, *, name="folder"): + folder = root / name + subfolder = folder / "subfolder" + subfolder.mkdir(parents=True) + + files = {} + for idx, root in enumerate([folder, folder, subfolder]): + content = f"sentinel{idx}" + file = self._make_file(root, name=f"file{idx}.txt", content=content) + files[str(file)] = content + + return folder, files + + def _make_tar(self, root, *, name="archive.tar", remove=True): + folder, files = self._make_folder(root, name=name.split(".")[0]) + archive = make_tar(root, name, folder, remove=remove) + files = {str(archive / pathlib.Path(file).relative_to(root)): content for file, content in files.items()} + return archive, files + + def test_load_file(self, tmp_path): + content = "sentinel" + file = self._make_file(tmp_path, content=content) + + resource = self.DummyResource(file_name=file.name) + + dp = resource.load(tmp_path) + assert isinstance(dp, FileOpener) + + data = list(dp) + assert len(data) == 1 + + path, buffer = data[0] + assert path == str(file) + assert buffer.read().decode() == content + + def test_load_folder(self, tmp_path): + folder, files = self._make_folder(tmp_path) + + resource = self.DummyResource(file_name=folder.name) + + dp = resource.load(tmp_path) + assert isinstance(dp, FileOpener) + assert {path: buffer.read().decode() for path, buffer in dp} == files + + def test_load_archive(self, tmp_path): + archive, files = self._make_tar(tmp_path) + + resource = self.DummyResource(file_name=archive.name) + + dp = resource.load(tmp_path) + assert isinstance(dp, TarArchiveLoader) + assert {path: buffer.read().decode() for path, buffer in dp} == files + + def test_priority_decompressed_gt_raw(self, tmp_path): + # We don't need to actually compress here. Adding the suffix is sufficient + self._make_file(tmp_path, content="raw_sentinel", name="file.txt.gz") + file = self._make_file(tmp_path, content="decompressed_sentinel", name="file.txt") + + resource = self.DummyResource(file_name=file.name) + + dp = resource.load(tmp_path) + path, buffer = next(iter(dp)) + + assert path == str(file) + assert buffer.read().decode() == "decompressed_sentinel" + + def test_priority_extracted_gt_decompressed(self, tmp_path): + archive, _ = self._make_tar(tmp_path, remove=False) + + resource = self.DummyResource(file_name=archive.name) + + dp = resource.load(tmp_path) + # If the archive had been selected, this would be a `TarArchiveReader` + assert isinstance(dp, FileOpener) + + def test_download(self, tmp_path): + download_fn_was_called = False + + def download_fn(resource, root): + nonlocal download_fn_was_called + download_fn_was_called = True + + return self._make_file(root, content="_", name=resource.file_name) + + resource = self.DummyResource( + file_name="file.txt", + download_fn=download_fn, + ) + + resource.load(tmp_path) + + assert download_fn_was_called, "`download_fn()` was never called" + + # This tests the `"decompress"` literal as well as a custom callable + @pytest.mark.parametrize( + "preprocess", + [ + "decompress", + lambda path: _decompress(str(path), remove_finished=True), + ], + ) + def test_preprocess_decompress(self, tmp_path, preprocess): + file_name = "file.txt.gz" + content = "sentinel" + + def download_fn(resource, root): + file = root / resource.file_name + with gzip.open(file, "wb") as fh: + fh.write(content.encode()) + return file + + resource = self.DummyResource(file_name=file_name, preprocess=preprocess, download_fn=download_fn) + + dp = resource.load(tmp_path) + data = list(dp) + assert len(data) == 1 + + path, buffer = data[0] + assert path == str(tmp_path / file_name).replace(".gz", "") + assert buffer.read().decode() == content + + def test_preprocess_extract(self, tmp_path): + files = None + + def download_fn(resource, root): + nonlocal files + archive, files = self._make_tar(root, name=resource.file_name) + return archive + + resource = self.DummyResource(file_name="folder.tar", preprocess="extract", download_fn=download_fn) + + dp = resource.load(tmp_path) + assert files is not None, "`download_fn()` was never called" + assert isinstance(dp, FileOpener) + + actual = {path: buffer.read().decode() for path, buffer in dp} + expected = { + path.replace(resource.file_name, resource.file_name.split(".")[0]): content + for path, content in files.items() + } + assert actual == expected + + def test_preprocess_only_after_download(self, tmp_path): + file = self._make_file(tmp_path, content="_") + + def preprocess(path): + raise AssertionError("`preprocess` was called although the file was already present.") + + resource = self.DummyResource( + file_name=file.name, + preprocess=preprocess, + ) + + resource.load(tmp_path) + + +class TestHttpResource: + def test_resolve_to_http(self, mocker): + file_name = "data.tar" + original_url = f"http://downloads.pytorch.org/{file_name}" + + redirected_url = original_url.replace("http", "https") + + sha256_sentinel = "sha256_sentinel" + + def preprocess_sentinel(path): + return path + + original_resource = HttpResource( + original_url, + sha256=sha256_sentinel, + preprocess=preprocess_sentinel, + ) + + mocker.patch("torchvision.prototype.datasets.utils._resource._get_redirect_url", return_value=redirected_url) + redirected_resource = original_resource.resolve() + + assert isinstance(redirected_resource, HttpResource) + assert redirected_resource.url == redirected_url + assert redirected_resource.file_name == file_name + assert redirected_resource.sha256 == sha256_sentinel + assert redirected_resource._preprocess is preprocess_sentinel + + def test_resolve_to_gdrive(self, mocker): + file_name = "data.tar" + original_url = f"http://downloads.pytorch.org/{file_name}" + + id_sentinel = "id-sentinel" + redirected_url = f"https://drive.google.com/file/d/{id_sentinel}/view" + + sha256_sentinel = "sha256_sentinel" + + def preprocess_sentinel(path): + return path + + original_resource = HttpResource( + original_url, + sha256=sha256_sentinel, + preprocess=preprocess_sentinel, + ) + + mocker.patch("torchvision.prototype.datasets.utils._resource._get_redirect_url", return_value=redirected_url) + redirected_resource = original_resource.resolve() + + assert isinstance(redirected_resource, GDriveResource) + assert redirected_resource.id == id_sentinel + assert redirected_resource.file_name == file_name + assert redirected_resource.sha256 == sha256_sentinel + assert redirected_resource._preprocess is preprocess_sentinel + + +def test_missing_dependency_error(): + class DummyDataset(Dataset): + def __init__(self): + super().__init__(root="root", dependencies=("fake_dependency",)) + + def _resources(self): + pass + + def _datapipe(self, resource_dps): + pass + + def __len__(self): + pass + + with pytest.raises(ModuleNotFoundError, match="depends on the third-party package 'fake_dependency'"): + DummyDataset() diff --git a/test/test_prototype_models.py b/test/test_prototype_models.py new file mode 100644 index 00000000000..d32df68f1f4 --- /dev/null +++ b/test/test_prototype_models.py @@ -0,0 +1,84 @@ +import pytest +import test_models as TM +import torch +from common_utils import cpu_and_cuda, set_rng_seed +from torchvision.prototype import models + + +@pytest.mark.parametrize("model_fn", (models.depth.stereo.raft_stereo_base,)) +@pytest.mark.parametrize("model_mode", ("standard", "scripted")) +@pytest.mark.parametrize("dev", cpu_and_cuda()) +def test_raft_stereo(model_fn, model_mode, dev): + # A simple test to make sure the model can do forward pass and jit scriptable + set_rng_seed(0) + + # Use corr_pyramid and corr_block with smaller num_levels and radius to prevent nan output + # get the idea from test_models.test_raft + corr_pyramid = models.depth.stereo.raft_stereo.CorrPyramid1d(num_levels=2) + corr_block = models.depth.stereo.raft_stereo.CorrBlock1d(num_levels=2, radius=2) + model = model_fn(corr_pyramid=corr_pyramid, corr_block=corr_block).eval().to(dev) + + if model_mode == "scripted": + model = torch.jit.script(model) + + img1 = torch.rand(1, 3, 64, 64).to(dev) + img2 = torch.rand(1, 3, 64, 64).to(dev) + num_iters = 3 + + preds = model(img1, img2, num_iters=num_iters) + depth_pred = preds[-1] + + assert len(preds) == num_iters, "Number of predictions should be the same as model.num_iters" + + assert depth_pred.shape == torch.Size( + [1, 1, 64, 64] + ), f"The output shape of depth_pred should be [1, 1, 64, 64] but instead it is {preds[0].shape}" + + # Test against expected file output + TM._assert_expected(depth_pred, name=model_fn.__name__, atol=1e-2, rtol=1e-2) + + +@pytest.mark.parametrize("model_fn", (models.depth.stereo.crestereo_base,)) +@pytest.mark.parametrize("model_mode", ("standard", "scripted")) +@pytest.mark.parametrize("dev", cpu_and_cuda()) +def test_crestereo(model_fn, model_mode, dev): + set_rng_seed(0) + + model = model_fn().eval().to(dev) + + if model_mode == "scripted": + model = torch.jit.script(model) + + img1 = torch.rand(1, 3, 64, 64).to(dev) + img2 = torch.rand(1, 3, 64, 64).to(dev) + iterations = 3 + + preds = model(img1, img2, flow_init=None, num_iters=iterations) + disparity_pred = preds[-1] + + # all the pyramid levels except the highest res make only half the number of iterations + expected_iterations = (iterations // 2) * (len(model.resolutions) - 1) + expected_iterations += iterations + assert ( + len(preds) == expected_iterations + ), "Number of predictions should be the number of iterations multiplied by the number of pyramid levels" + + assert disparity_pred.shape == torch.Size( + [1, 2, 64, 64] + ), f"Predicted disparity should have the same spatial shape as the input. Inputs shape {img1.shape[2:]}, Prediction shape {disparity_pred.shape[2:]}" + + assert all( + d.shape == torch.Size([1, 2, 64, 64]) for d in preds + ), "All predicted disparities are expected to have the same shape" + + # test a backward pass with a dummy loss as well + preds = torch.stack(preds, dim=0) + targets = torch.ones_like(preds, requires_grad=False) + loss = torch.nn.functional.mse_loss(preds, targets) + + try: + loss.backward() + except Exception as e: + assert False, f"Backward pass failed with an unexpected exception: {e.__class__.__name__} {e}" + + TM._assert_expected(disparity_pred, name=model_fn.__name__, atol=1e-2, rtol=1e-2) diff --git a/test/test_prototype_transforms.py b/test/test_prototype_transforms.py new file mode 100644 index 00000000000..85ef98cf7b8 --- /dev/null +++ b/test/test_prototype_transforms.py @@ -0,0 +1,429 @@ +import collections.abc +import re + +import PIL.Image +import pytest +import torch + +from common_utils import assert_equal, make_bounding_boxes, make_detection_masks, make_image, make_video + +from torchvision.prototype import transforms, tv_tensors +from torchvision.transforms.v2._utils import check_type, is_pure_tensor +from torchvision.transforms.v2.functional import clamp_bounding_boxes, InterpolationMode, pil_to_tensor, to_pil_image + +from torchvision.tv_tensors import BoundingBoxes, BoundingBoxFormat, Image, Mask, Video + + +def _parse_categories(categories): + if categories is None: + num_categories = int(torch.randint(1, 11, ())) + elif isinstance(categories, int): + num_categories = categories + categories = [f"category{idx}" for idx in range(num_categories)] + elif isinstance(categories, collections.abc.Sequence) and all(isinstance(category, str) for category in categories): + categories = list(categories) + num_categories = len(categories) + else: + raise pytest.UsageError( + f"`categories` can either be `None` (default), an integer, or a sequence of strings, " + f"but got '{categories}' instead." + ) + return categories, num_categories + + +def make_label(*, extra_dims=(), categories=10, dtype=torch.int64, device="cpu"): + categories, num_categories = _parse_categories(categories) + # The idiom `make_tensor(..., dtype=torch.int64).to(dtype)` is intentional to only get integer values, + # regardless of the requested dtype, e.g. 0 or 0.0 rather than 0 or 0.123 + data = torch.testing.make_tensor(extra_dims, low=0, high=num_categories, dtype=torch.int64, device=device).to(dtype) + return tv_tensors.Label(data, categories=categories) + + +class TestSimpleCopyPaste: + def create_fake_image(self, mocker, image_type): + if image_type == PIL.Image.Image: + return PIL.Image.new("RGB", (32, 32), 123) + return mocker.MagicMock(spec=image_type) + + def test__extract_image_targets_assertion(self, mocker): + transform = transforms.SimpleCopyPaste() + + flat_sample = [ + # images, batch size = 2 + self.create_fake_image(mocker, Image), + # labels, bboxes, masks + mocker.MagicMock(spec=tv_tensors.Label), + mocker.MagicMock(spec=BoundingBoxes), + mocker.MagicMock(spec=Mask), + # labels, bboxes, masks + mocker.MagicMock(spec=BoundingBoxes), + mocker.MagicMock(spec=Mask), + ] + + with pytest.raises(TypeError, match="requires input sample to contain equal sized list of Images"): + transform._extract_image_targets(flat_sample) + + @pytest.mark.parametrize("image_type", [Image, PIL.Image.Image, torch.Tensor]) + @pytest.mark.parametrize("label_type", [tv_tensors.Label, tv_tensors.OneHotLabel]) + def test__extract_image_targets(self, image_type, label_type, mocker): + transform = transforms.SimpleCopyPaste() + + flat_sample = [ + # images, batch size = 2 + self.create_fake_image(mocker, image_type), + self.create_fake_image(mocker, image_type), + # labels, bboxes, masks + mocker.MagicMock(spec=label_type), + mocker.MagicMock(spec=BoundingBoxes), + mocker.MagicMock(spec=Mask), + # labels, bboxes, masks + mocker.MagicMock(spec=label_type), + mocker.MagicMock(spec=BoundingBoxes), + mocker.MagicMock(spec=Mask), + ] + + images, targets = transform._extract_image_targets(flat_sample) + + assert len(images) == len(targets) == 2 + if image_type == PIL.Image.Image: + torch.testing.assert_close(images[0], pil_to_tensor(flat_sample[0])) + torch.testing.assert_close(images[1], pil_to_tensor(flat_sample[1])) + else: + assert images[0] == flat_sample[0] + assert images[1] == flat_sample[1] + + for target in targets: + for key, type_ in [ + ("boxes", BoundingBoxes), + ("masks", Mask), + ("labels", label_type), + ]: + assert key in target + assert isinstance(target[key], type_) + assert target[key] in flat_sample + + @pytest.mark.parametrize("label_type", [tv_tensors.Label, tv_tensors.OneHotLabel]) + def test__copy_paste(self, label_type): + image = 2 * torch.ones(3, 32, 32) + masks = torch.zeros(2, 32, 32) + masks[0, 3:9, 2:8] = 1 + masks[1, 20:30, 20:30] = 1 + labels = torch.tensor([1, 2]) + blending = True + resize_interpolation = InterpolationMode.BILINEAR + antialias = None + if label_type == tv_tensors.OneHotLabel: + labels = torch.nn.functional.one_hot(labels, num_classes=5) + target = { + "boxes": BoundingBoxes( + torch.tensor([[2.0, 3.0, 8.0, 9.0], [20.0, 20.0, 30.0, 30.0]]), format="XYXY", canvas_size=(32, 32) + ), + "masks": Mask(masks), + "labels": label_type(labels), + } + + paste_image = 10 * torch.ones(3, 32, 32) + paste_masks = torch.zeros(2, 32, 32) + paste_masks[0, 13:19, 12:18] = 1 + paste_masks[1, 15:19, 1:8] = 1 + paste_labels = torch.tensor([3, 4]) + if label_type == tv_tensors.OneHotLabel: + paste_labels = torch.nn.functional.one_hot(paste_labels, num_classes=5) + paste_target = { + "boxes": BoundingBoxes( + torch.tensor([[12.0, 13.0, 19.0, 18.0], [1.0, 15.0, 8.0, 19.0]]), format="XYXY", canvas_size=(32, 32) + ), + "masks": Mask(paste_masks), + "labels": label_type(paste_labels), + } + + transform = transforms.SimpleCopyPaste() + random_selection = torch.tensor([0, 1]) + output_image, output_target = transform._copy_paste( + image, target, paste_image, paste_target, random_selection, blending, resize_interpolation, antialias + ) + + assert output_image.unique().tolist() == [2, 10] + assert output_target["boxes"].shape == (4, 4) + torch.testing.assert_close(output_target["boxes"][:2, :], target["boxes"]) + torch.testing.assert_close(output_target["boxes"][2:, :], paste_target["boxes"]) + + expected_labels = torch.tensor([1, 2, 3, 4]) + if label_type == tv_tensors.OneHotLabel: + expected_labels = torch.nn.functional.one_hot(expected_labels, num_classes=5) + torch.testing.assert_close(output_target["labels"], label_type(expected_labels)) + + assert output_target["masks"].shape == (4, 32, 32) + torch.testing.assert_close(output_target["masks"][:2, :], target["masks"]) + torch.testing.assert_close(output_target["masks"][2:, :], paste_target["masks"]) + + +class TestFixedSizeCrop: + def test_make_params(self, mocker): + crop_size = (7, 7) + batch_shape = (10,) + canvas_size = (11, 5) + + transform = transforms.FixedSizeCrop(size=crop_size) + + flat_inputs = [ + make_image(size=canvas_size, color_space="RGB"), + make_bounding_boxes(format=BoundingBoxFormat.XYXY, canvas_size=canvas_size, num_boxes=batch_shape[0]), + ] + params = transform.make_params(flat_inputs) + + assert params["needs_crop"] + assert params["height"] <= crop_size[0] + assert params["width"] <= crop_size[1] + + assert ( + isinstance(params["is_valid"], torch.Tensor) + and params["is_valid"].dtype is torch.bool + and params["is_valid"].shape == batch_shape + ) + + assert params["needs_pad"] + assert any(pad > 0 for pad in params["padding"]) + + def test__transform_culling(self, mocker): + batch_size = 10 + canvas_size = (10, 10) + + is_valid = torch.randint(0, 2, (batch_size,), dtype=torch.bool) + mocker.patch( + "torchvision.prototype.transforms._geometry.FixedSizeCrop.make_params", + return_value=dict( + needs_crop=True, + top=0, + left=0, + height=canvas_size[0], + width=canvas_size[1], + is_valid=is_valid, + needs_pad=False, + ), + ) + + bounding_boxes = make_bounding_boxes( + format=BoundingBoxFormat.XYXY, canvas_size=canvas_size, num_boxes=batch_size + ) + masks = make_detection_masks(size=canvas_size, num_masks=batch_size) + labels = make_label(extra_dims=(batch_size,)) + + transform = transforms.FixedSizeCrop((-1, -1)) + mocker.patch("torchvision.prototype.transforms._geometry.has_any", return_value=True) + + output = transform( + dict( + bounding_boxes=bounding_boxes, + masks=masks, + labels=labels, + ) + ) + + assert_equal(output["bounding_boxes"], bounding_boxes[is_valid]) + assert_equal(output["masks"], masks[is_valid]) + assert_equal(output["labels"], labels[is_valid]) + + def test__transform_bounding_boxes_clamping(self, mocker): + batch_size = 3 + canvas_size = (10, 10) + + mocker.patch( + "torchvision.prototype.transforms._geometry.FixedSizeCrop.make_params", + return_value=dict( + needs_crop=True, + top=0, + left=0, + height=canvas_size[0], + width=canvas_size[1], + is_valid=torch.full((batch_size,), fill_value=True), + needs_pad=False, + ), + ) + + bounding_boxes = make_bounding_boxes( + format=BoundingBoxFormat.XYXY, canvas_size=canvas_size, num_boxes=batch_size + ) + mock = mocker.patch( + "torchvision.prototype.transforms._geometry.F.clamp_bounding_boxes", wraps=clamp_bounding_boxes + ) + + transform = transforms.FixedSizeCrop((-1, -1)) + mocker.patch("torchvision.prototype.transforms._geometry.has_any", return_value=True) + + transform(bounding_boxes) + + mock.assert_called_once() + + +class TestLabelToOneHot: + def test__transform(self): + categories = ["apple", "pear", "pineapple"] + labels = tv_tensors.Label(torch.tensor([0, 1, 2, 1]), categories=categories) + transform = transforms.LabelToOneHot() + ohe_labels = transform(labels) + assert isinstance(ohe_labels, tv_tensors.OneHotLabel) + assert ohe_labels.shape == (4, 3) + assert ohe_labels.categories == labels.categories == categories + + +class TestPermuteDimensions: + @pytest.mark.parametrize( + ("dims", "inverse_dims"), + [ + ( + {Image: (2, 1, 0), Video: None}, + {Image: (2, 1, 0), Video: None}, + ), + ( + {Image: (2, 1, 0), Video: (1, 2, 3, 0)}, + {Image: (2, 1, 0), Video: (3, 0, 1, 2)}, + ), + ], + ) + def test_call(self, dims, inverse_dims): + sample = dict( + image=make_image(), + bounding_boxes=make_bounding_boxes(format=BoundingBoxFormat.XYXY), + video=make_video(), + str="str", + int=0, + ) + + transform = transforms.PermuteDimensions(dims) + transformed_sample = transform(sample) + + for key, value in sample.items(): + value_type = type(value) + transformed_value = transformed_sample[key] + + if check_type(value, (Image, is_pure_tensor, Video)): + if transform.dims.get(value_type) is not None: + assert transformed_value.permute(inverse_dims[value_type]).equal(value) + assert type(transformed_value) == torch.Tensor + else: + assert transformed_value is value + + @pytest.mark.filterwarnings("error") + def test_plain_tensor_call(self): + tensor = torch.empty((2, 3, 4)) + transform = transforms.PermuteDimensions(dims=(1, 2, 0)) + + assert transform(tensor).shape == (3, 4, 2) + + @pytest.mark.parametrize("other_type", [Image, Video]) + def test_plain_tensor_warning(self, other_type): + with pytest.warns(UserWarning, match=re.escape("`torch.Tensor` will *not* be transformed")): + transforms.PermuteDimensions(dims={torch.Tensor: (0, 1), other_type: (1, 0)}) + + +class TestTransposeDimensions: + @pytest.mark.parametrize( + "dims", + [ + (-1, -2), + {Image: (1, 2), Video: None}, + ], + ) + def test_call(self, dims): + sample = dict( + image=make_image(), + bounding_boxes=make_bounding_boxes(format=BoundingBoxFormat.XYXY), + video=make_video(), + str="str", + int=0, + ) + + transform = transforms.TransposeDimensions(dims) + transformed_sample = transform(sample) + + for key, value in sample.items(): + value_type = type(value) + transformed_value = transformed_sample[key] + + transposed_dims = transform.dims.get(value_type) + if check_type(value, (Image, is_pure_tensor, Video)): + if transposed_dims is not None: + assert transformed_value.transpose(*transposed_dims).equal(value) + assert type(transformed_value) == torch.Tensor + else: + assert transformed_value is value + + @pytest.mark.filterwarnings("error") + def test_plain_tensor_call(self): + tensor = torch.empty((2, 3, 4)) + transform = transforms.TransposeDimensions(dims=(0, 2)) + + assert transform(tensor).shape == (4, 3, 2) + + @pytest.mark.parametrize("other_type", [Image, Video]) + def test_plain_tensor_warning(self, other_type): + with pytest.warns(UserWarning, match=re.escape("`torch.Tensor` will *not* be transformed")): + transforms.TransposeDimensions(dims={torch.Tensor: (0, 1), other_type: (1, 0)}) + + +import importlib.machinery +import importlib.util +from pathlib import Path + + +def import_transforms_from_references(reference): + HERE = Path(__file__).parent + PROJECT_ROOT = HERE.parent + + loader = importlib.machinery.SourceFileLoader( + "transforms", str(PROJECT_ROOT / "references" / reference / "transforms.py") + ) + spec = importlib.util.spec_from_loader("transforms", loader) + module = importlib.util.module_from_spec(spec) + loader.exec_module(module) + return module + + +det_transforms = import_transforms_from_references("detection") + + +def test_fixed_sized_crop_against_detection_reference(): + def make_tv_tensors(): + size = (600, 800) + num_objects = 22 + + pil_image = to_pil_image(make_image(size=size, color_space="RGB")) + target = { + "boxes": make_bounding_boxes(canvas_size=size, format="XYXY", num_boxes=num_objects, dtype=torch.float), + "labels": make_label(extra_dims=(num_objects,), categories=80), + "masks": make_detection_masks(size=size, num_masks=num_objects, dtype=torch.long), + } + + yield (pil_image, target) + + tensor_image = torch.Tensor(make_image(size=size, color_space="RGB")) + target = { + "boxes": make_bounding_boxes(canvas_size=size, format="XYXY", num_boxes=num_objects, dtype=torch.float), + "labels": make_label(extra_dims=(num_objects,), categories=80), + "masks": make_detection_masks(size=size, num_masks=num_objects, dtype=torch.long), + } + + yield (tensor_image, target) + + tv_tensor_image = make_image(size=size, color_space="RGB") + target = { + "boxes": make_bounding_boxes(canvas_size=size, format="XYXY", num_boxes=num_objects, dtype=torch.float), + "labels": make_label(extra_dims=(num_objects,), categories=80), + "masks": make_detection_masks(size=size, num_masks=num_objects, dtype=torch.long), + } + + yield (tv_tensor_image, target) + + t = transforms.FixedSizeCrop((1024, 1024), fill=0) + t_ref = det_transforms.FixedSizeCrop((1024, 1024), fill=0) + + for dp in make_tv_tensors(): + # We should use prototype transform first as reference transform performs inplace target update + torch.manual_seed(12) + output = t(dp) + + torch.manual_seed(12) + expected_output = t_ref(*dp) + + assert_equal(expected_output, output) diff --git a/test/test_quantized_models.py b/test/test_quantized_models.py deleted file mode 100644 index f20cc369276..00000000000 --- a/test/test_quantized_models.py +++ /dev/null @@ -1,90 +0,0 @@ -import torchvision -from common_utils import TestCase, map_nested_tensor_object -from collections import OrderedDict -from itertools import product -import torch -import numpy as np -from torchvision import models -import unittest -import traceback -import random - - -def set_rng_seed(seed): - torch.manual_seed(seed) - random.seed(seed) - np.random.seed(seed) - - -def get_available_quantizable_models(): - # TODO add a registration mechanism to torchvision.models - return [k for k, v in models.quantization.__dict__.items() if callable(v) and k[0].lower() == k[0] and k[0] != "_"] - - -# list of models that are not scriptable -scriptable_quantizable_models_blacklist = [] - - -@unittest.skipUnless('fbgemm' in torch.backends.quantized.supported_engines and - 'qnnpack' in torch.backends.quantized.supported_engines, - "This Pytorch Build has not been built with fbgemm and qnnpack") -class ModelTester(TestCase): - def check_quantized_model(self, model, input_shape): - x = torch.rand(input_shape) - model(x) - return - - def check_script(self, model, name): - if name in scriptable_quantizable_models_blacklist: - return - scriptable = True - msg = "" - try: - torch.jit.script(model) - except Exception as e: - tb = traceback.format_exc() - scriptable = False - msg = str(e) + str(tb) - self.assertTrue(scriptable, msg) - - def _test_classification_model(self, name, input_shape): - # First check if quantize=True provides models that can run with input data - - model = torchvision.models.quantization.__dict__[name](pretrained=False, quantize=True) - self.check_quantized_model(model, input_shape) - - for eval_mode in [True, False]: - model = torchvision.models.quantization.__dict__[name](pretrained=False, quantize=False) - if eval_mode: - model.eval() - model.qconfig = torch.quantization.default_qconfig - else: - model.train() - model.qconfig = torch.quantization.default_qat_qconfig - - model.fuse_model() - if eval_mode: - torch.quantization.prepare(model, inplace=True) - else: - torch.quantization.prepare_qat(model, inplace=True) - model.eval() - - torch.quantization.convert(model, inplace=True) - - self.check_script(model, name) - - -for model_name in get_available_quantizable_models(): - # for-loop bodies don't define scopes, so we have to save the variables - # we want to close over in some way - def do_test(self, model_name=model_name): - input_shape = (1, 3, 224, 224) - if model_name in ['inception_v3']: - input_shape = (1, 3, 299, 299) - self._test_classification_model(model_name, input_shape) - - setattr(ModelTester, "test_" + model_name, do_test) - - -if __name__ == '__main__': - unittest.main() diff --git a/test/test_transforms.py b/test/test_transforms.py index 1bbe1165f93..325ffa40b6c 100644 --- a/test/test_transforms.py +++ b/test/test_transforms.py @@ -1,16 +1,21 @@ -from __future__ import division +import math import os +import random +import re +import sys +from functools import partial + +import numpy as np +import pytest import torch import torchvision.transforms as transforms +import torchvision.transforms._functional_tensor as F_t import torchvision.transforms.functional as F -from torch._utils_internal import get_file_path_2 -import unittest -import math -import random -import numpy as np from PIL import Image +from torch._utils_internal import get_file_path_2 # @manual=fbcode//caffe2:utils_internal + try: - import accimage + import accimage # @manual=fbcode//pytorch/accimage:accimage except ImportError: accimage = None @@ -19,272 +24,481 @@ except ImportError: stats = None +from common_utils import assert_equal, cycle_over, float_dtypes, int_dtypes + + GRACE_HOPPER = get_file_path_2( - os.path.dirname(os.path.abspath(__file__)), 'assets', 'grace_hopper_517x606.jpg') + os.path.dirname(os.path.abspath(__file__)), "assets", "encode_jpeg", "grace_hopper_517x606.jpg" +) -class Tester(unittest.TestCase): +def _get_grayscale_test_image(img, fill=None): + img = img.convert("L") + fill = (fill[0],) if isinstance(fill, tuple) else fill + return img, fill - def test_crop(self): - height = random.randint(10, 32) * 2 - width = random.randint(10, 32) * 2 - oheight = random.randint(5, (height - 2) / 2) * 2 - owidth = random.randint(5, (width - 2) / 2) * 2 - img = torch.ones(3, height, width) - oh1 = (height - oheight) // 2 - ow1 = (width - owidth) // 2 - imgnarrow = img[:, oh1:oh1 + oheight, ow1:ow1 + owidth] - imgnarrow.fill_(0) - result = transforms.Compose([ - transforms.ToPILImage(), - transforms.CenterCrop((oheight, owidth)), - transforms.ToTensor(), - ])(img) - self.assertEqual(result.sum(), 0, - "height: {} width: {} oheight: {} owdith: {}".format(height, width, oheight, owidth)) - oheight += 1 - owidth += 1 - result = transforms.Compose([ - transforms.ToPILImage(), - transforms.CenterCrop((oheight, owidth)), - transforms.ToTensor(), - ])(img) - sum1 = result.sum() - self.assertGreater(sum1, 1, - "height: {} width: {} oheight: {} owdith: {}".format(height, width, oheight, owidth)) - oheight += 1 - owidth += 1 - result = transforms.Compose([ - transforms.ToPILImage(), - transforms.CenterCrop((oheight, owidth)), - transforms.ToTensor(), - ])(img) - sum2 = result.sum() - self.assertGreater(sum2, 0, - "height: {} width: {} oheight: {} owdith: {}".format(height, width, oheight, owidth)) - self.assertGreater(sum2, sum1, - "height: {} width: {} oheight: {} owdith: {}".format(height, width, oheight, owidth)) - - def test_five_crop(self): - to_pil_image = transforms.ToPILImage() - h = random.randint(5, 25) - w = random.randint(5, 25) - for single_dim in [True, False]: - crop_h = random.randint(1, h) - crop_w = random.randint(1, w) - if single_dim: - crop_h = min(crop_h, crop_w) - crop_w = crop_h - transform = transforms.FiveCrop(crop_h) - else: - transform = transforms.FiveCrop((crop_h, crop_w)) - - img = torch.FloatTensor(3, h, w).uniform_() - results = transform(to_pil_image(img)) - - self.assertEqual(len(results), 5) - for crop in results: - self.assertEqual(crop.size, (crop_w, crop_h)) - - to_pil_image = transforms.ToPILImage() - tl = to_pil_image(img[:, 0:crop_h, 0:crop_w]) - tr = to_pil_image(img[:, 0:crop_h, w - crop_w:]) - bl = to_pil_image(img[:, h - crop_h:, 0:crop_w]) - br = to_pil_image(img[:, h - crop_h:, w - crop_w:]) - center = transforms.CenterCrop((crop_h, crop_w))(to_pil_image(img)) - expected_output = (tl, tr, bl, br, center) - self.assertEqual(results, expected_output) - - def test_ten_crop(self): - to_pil_image = transforms.ToPILImage() - h = random.randint(5, 25) - w = random.randint(5, 25) - for should_vflip in [True, False]: - for single_dim in [True, False]: - crop_h = random.randint(1, h) - crop_w = random.randint(1, w) - if single_dim: - crop_h = min(crop_h, crop_w) - crop_w = crop_h - transform = transforms.TenCrop(crop_h, - vertical_flip=should_vflip) - five_crop = transforms.FiveCrop(crop_h) - else: - transform = transforms.TenCrop((crop_h, crop_w), - vertical_flip=should_vflip) - five_crop = transforms.FiveCrop((crop_h, crop_w)) - - img = to_pil_image(torch.FloatTensor(3, h, w).uniform_()) - results = transform(img) - expected_output = five_crop(img) - - # Checking if FiveCrop and TenCrop can be printed as string - transform.__repr__() - five_crop.__repr__() - - if should_vflip: - vflipped_img = img.transpose(Image.FLIP_TOP_BOTTOM) - expected_output += five_crop(vflipped_img) - else: - hflipped_img = img.transpose(Image.FLIP_LEFT_RIGHT) - expected_output += five_crop(hflipped_img) - - self.assertEqual(len(results), 10) - self.assertEqual(results, expected_output) - - def test_randomresized_params(self): - height = random.randint(24, 32) * 2 - width = random.randint(24, 32) * 2 - img = torch.ones(3, height, width) - to_pil_image = transforms.ToPILImage() - img = to_pil_image(img) - size = 100 - epsilon = 0.05 - min_scale = 0.25 - for _ in range(10): - scale_min = max(round(random.random(), 2), min_scale) - scale_range = (scale_min, scale_min + round(random.random(), 2)) - aspect_min = max(round(random.random(), 2), epsilon) - aspect_ratio_range = (aspect_min, aspect_min + round(random.random(), 2)) - randresizecrop = transforms.RandomResizedCrop(size, scale_range, aspect_ratio_range) - i, j, h, w = randresizecrop.get_params(img, scale_range, aspect_ratio_range) - aspect_ratio_obtained = w / h - self.assertTrue((min(aspect_ratio_range) - epsilon <= aspect_ratio_obtained and - aspect_ratio_obtained <= max(aspect_ratio_range) + epsilon) or - aspect_ratio_obtained == 1.0) - self.assertIsInstance(i, int) - self.assertIsInstance(j, int) - self.assertIsInstance(h, int) - self.assertIsInstance(w, int) - - def test_randomperspective(self): - for _ in range(10): - height = random.randint(24, 32) * 2 - width = random.randint(24, 32) * 2 - img = torch.ones(3, height, width) - to_pil_image = transforms.ToPILImage() - img = to_pil_image(img) - perp = transforms.RandomPerspective() - startpoints, endpoints = perp.get_params(width, height, 0.5) - tr_img = F.perspective(img, startpoints, endpoints) - tr_img2 = F.to_tensor(F.perspective(tr_img, endpoints, startpoints)) - tr_img = F.to_tensor(tr_img) - self.assertEqual(img.size[0], width) - self.assertEqual(img.size[1], height) - self.assertGreater(torch.nn.functional.mse_loss(tr_img, F.to_tensor(img)) + 0.3, - torch.nn.functional.mse_loss(tr_img2, F.to_tensor(img))) - - def test_resize(self): - height = random.randint(24, 32) * 2 - width = random.randint(24, 32) * 2 - osize = random.randint(5, 12) * 2 +class TestConvertImageDtype: + @pytest.mark.parametrize("input_dtype, output_dtype", cycle_over(float_dtypes())) + def test_float_to_float(self, input_dtype, output_dtype): + input_image = torch.tensor((0.0, 1.0), dtype=input_dtype) + transform = transforms.ConvertImageDtype(output_dtype) + transform_script = torch.jit.script(F.convert_image_dtype) - img = torch.ones(3, height, width) - result = transforms.Compose([ - transforms.ToPILImage(), - transforms.Resize(osize), - transforms.ToTensor(), - ])(img) - self.assertIn(osize, result.size()) - if height < width: - self.assertLessEqual(result.size(1), result.size(2)) - elif width < height: - self.assertGreaterEqual(result.size(1), result.size(2)) - - result = transforms.Compose([ - transforms.ToPILImage(), - transforms.Resize([osize, osize]), - transforms.ToTensor(), - ])(img) - self.assertIn(osize, result.size()) - self.assertEqual(result.size(1), osize) - self.assertEqual(result.size(2), osize) - - oheight = random.randint(5, 12) * 2 - owidth = random.randint(5, 12) * 2 - result = transforms.Compose([ - transforms.ToPILImage(), - transforms.Resize((oheight, owidth)), - transforms.ToTensor(), - ])(img) - self.assertEqual(result.size(1), oheight) - self.assertEqual(result.size(2), owidth) + output_image = transform(input_image) + output_image_script = transform_script(input_image, output_dtype) - result = transforms.Compose([ - transforms.ToPILImage(), - transforms.Resize([oheight, owidth]), - transforms.ToTensor(), - ])(img) - self.assertEqual(result.size(1), oheight) - self.assertEqual(result.size(2), owidth) + torch.testing.assert_close(output_image_script, output_image, rtol=0.0, atol=1e-6) - def test_random_crop(self): - height = random.randint(10, 32) * 2 - width = random.randint(10, 32) * 2 - oheight = random.randint(5, (height - 2) / 2) * 2 - owidth = random.randint(5, (width - 2) / 2) * 2 - img = torch.ones(3, height, width) - result = transforms.Compose([ - transforms.ToPILImage(), - transforms.RandomCrop((oheight, owidth)), - transforms.ToTensor(), - ])(img) - self.assertEqual(result.size(1), oheight) - self.assertEqual(result.size(2), owidth) + actual_min, actual_max = output_image.tolist() + desired_min, desired_max = 0.0, 1.0 - padding = random.randint(1, 20) - result = transforms.Compose([ - transforms.ToPILImage(), - transforms.RandomCrop((oheight, owidth), padding=padding), - transforms.ToTensor(), - ])(img) - self.assertEqual(result.size(1), oheight) - self.assertEqual(result.size(2), owidth) + assert abs(actual_min - desired_min) < 1e-7 + assert abs(actual_max - desired_max) < 1e-7 + + @pytest.mark.parametrize("input_dtype", float_dtypes()) + @pytest.mark.parametrize("output_dtype", int_dtypes()) + def test_float_to_int(self, input_dtype, output_dtype): + input_image = torch.tensor((0.0, 1.0), dtype=input_dtype) + transform = transforms.ConvertImageDtype(output_dtype) + transform_script = torch.jit.script(F.convert_image_dtype) + + if (input_dtype == torch.float32 and output_dtype in (torch.int32, torch.int64)) or ( + input_dtype == torch.float64 and output_dtype == torch.int64 + ): + with pytest.raises(RuntimeError): + transform(input_image) + else: + output_image = transform(input_image) + output_image_script = transform_script(input_image, output_dtype) + + torch.testing.assert_close(output_image_script, output_image, rtol=0.0, atol=1e-6) + + actual_min, actual_max = output_image.tolist() + desired_min, desired_max = 0, torch.iinfo(output_dtype).max + + assert actual_min == desired_min + assert actual_max == desired_max + + @pytest.mark.parametrize("input_dtype", int_dtypes()) + @pytest.mark.parametrize("output_dtype", float_dtypes()) + def test_int_to_float(self, input_dtype, output_dtype): + input_image = torch.tensor((0, torch.iinfo(input_dtype).max), dtype=input_dtype) + transform = transforms.ConvertImageDtype(output_dtype) + transform_script = torch.jit.script(F.convert_image_dtype) + + output_image = transform(input_image) + output_image_script = transform_script(input_image, output_dtype) + + torch.testing.assert_close(output_image_script, output_image, rtol=0.0, atol=1e-6) + + actual_min, actual_max = output_image.tolist() + desired_min, desired_max = 0.0, 1.0 + + assert abs(actual_min - desired_min) < 1e-7 + assert actual_min >= desired_min + assert abs(actual_max - desired_max) < 1e-7 + assert actual_max <= desired_max + + @pytest.mark.parametrize("input_dtype, output_dtype", cycle_over(int_dtypes())) + def test_dtype_int_to_int(self, input_dtype, output_dtype): + input_max = torch.iinfo(input_dtype).max + input_image = torch.tensor((0, input_max), dtype=input_dtype) + output_max = torch.iinfo(output_dtype).max + + transform = transforms.ConvertImageDtype(output_dtype) + transform_script = torch.jit.script(F.convert_image_dtype) + + output_image = transform(input_image) + output_image_script = transform_script(input_image, output_dtype) + + torch.testing.assert_close( + output_image_script, + output_image, + rtol=0.0, + atol=1e-6, + msg=f"{output_image_script} vs {output_image}", + ) + + actual_min, actual_max = output_image.tolist() + desired_min, desired_max = 0, output_max + + # see https://github.com/pytorch/vision/pull/2078#issuecomment-641036236 for details + if input_max >= output_max: + error_term = 0 + else: + error_term = 1 - (torch.iinfo(output_dtype).max + 1) // (torch.iinfo(input_dtype).max + 1) + + assert actual_min == desired_min + assert actual_max == (desired_max + error_term) + + @pytest.mark.parametrize("input_dtype, output_dtype", cycle_over(int_dtypes())) + def test_int_to_int_consistency(self, input_dtype, output_dtype): + input_max = torch.iinfo(input_dtype).max + input_image = torch.tensor((0, input_max), dtype=input_dtype) + + output_max = torch.iinfo(output_dtype).max + if output_max <= input_max: + return + + transform = transforms.ConvertImageDtype(output_dtype) + inverse_transfrom = transforms.ConvertImageDtype(input_dtype) + output_image = inverse_transfrom(transform(input_image)) + + actual_min, actual_max = output_image.tolist() + desired_min, desired_max = 0, input_max + + assert actual_min == desired_min + assert actual_max == desired_max - result = transforms.Compose([ - transforms.ToPILImage(), - transforms.RandomCrop((height, width)), - transforms.ToTensor() - ])(img) - self.assertEqual(result.size(1), height) - self.assertEqual(result.size(2), width) - self.assertTrue(np.allclose(img.numpy(), result.numpy())) - - result = transforms.Compose([ - transforms.ToPILImage(), - transforms.RandomCrop((height + 1, width + 1), pad_if_needed=True), - transforms.ToTensor(), - ])(img) - self.assertEqual(result.size(1), height + 1) - self.assertEqual(result.size(2), width + 1) - def test_pad(self): +@pytest.mark.skipif(accimage is None, reason="accimage not available") +class TestAccImage: + def test_accimage_to_tensor(self): + trans = transforms.PILToTensor() + + expected_output = trans(Image.open(GRACE_HOPPER).convert("RGB")) + output = trans(accimage.Image(GRACE_HOPPER)) + + torch.testing.assert_close(output, expected_output) + + def test_accimage_pil_to_tensor(self): + trans = transforms.PILToTensor() + + expected_output = trans(Image.open(GRACE_HOPPER).convert("RGB")) + output = trans(accimage.Image(GRACE_HOPPER)) + + assert expected_output.size() == output.size() + torch.testing.assert_close(output, expected_output) + + def test_accimage_resize(self): + trans = transforms.Compose( + [ + transforms.Resize(256, interpolation=Image.LINEAR), + transforms.PILToTensor(), + transforms.ConvertImageDtype(dtype=torch.float), + ] + ) + + # Checking if Compose, Resize and ToTensor can be printed as string + trans.__repr__() + + expected_output = trans(Image.open(GRACE_HOPPER).convert("RGB")) + output = trans(accimage.Image(GRACE_HOPPER)) + + assert expected_output.size() == output.size() + assert np.abs((expected_output - output).mean()) < 1e-3 + assert (expected_output - output).var() < 1e-5 + # note the high absolute tolerance + torch.testing.assert_close(output.numpy(), expected_output.numpy(), rtol=1e-5, atol=5e-2) + + def test_accimage_crop(self): + trans = transforms.Compose( + [transforms.CenterCrop(256), transforms.PILToTensor(), transforms.ConvertImageDtype(dtype=torch.float)] + ) + + # Checking if Compose, CenterCrop and ToTensor can be printed as string + trans.__repr__() + + expected_output = trans(Image.open(GRACE_HOPPER).convert("RGB")) + output = trans(accimage.Image(GRACE_HOPPER)) + + assert expected_output.size() == output.size() + torch.testing.assert_close(output, expected_output) + + +class TestToTensor: + @pytest.mark.parametrize("channels", [1, 3, 4]) + def test_to_tensor(self, channels): + height, width = 4, 4 + trans = transforms.ToTensor() + np_rng = np.random.RandomState(0) + + input_data = torch.ByteTensor(channels, height, width).random_(0, 255).float().div_(255) + img = transforms.ToPILImage()(input_data) + output = trans(img) + torch.testing.assert_close(output, input_data) + + ndarray = np_rng.randint(low=0, high=255, size=(height, width, channels)).astype(np.uint8) + output = trans(ndarray) + expected_output = ndarray.transpose((2, 0, 1)) / 255.0 + torch.testing.assert_close(output.numpy(), expected_output, check_dtype=False) + + ndarray = np_rng.rand(height, width, channels).astype(np.float32) + output = trans(ndarray) + expected_output = ndarray.transpose((2, 0, 1)) + torch.testing.assert_close(output.numpy(), expected_output, check_dtype=False) + + # separate test for mode '1' PIL images + input_data = torch.ByteTensor(1, height, width).bernoulli_() + img = transforms.ToPILImage()(input_data.mul(255)).convert("1") + output = trans(img) + torch.testing.assert_close(input_data, output, check_dtype=False) + + def test_to_tensor_errors(self): + height, width = 4, 4 + trans = transforms.ToTensor() + np_rng = np.random.RandomState(0) + + with pytest.raises(TypeError): + trans(np_rng.rand(1, height, width).tolist()) + + with pytest.raises(ValueError): + trans(np_rng.rand(height)) + + with pytest.raises(ValueError): + trans(np_rng.rand(1, 1, height, width)) + + @pytest.mark.parametrize("dtype", [torch.float16, torch.float, torch.double]) + def test_to_tensor_with_other_default_dtypes(self, dtype): + np_rng = np.random.RandomState(0) + current_def_dtype = torch.get_default_dtype() + + t = transforms.ToTensor() + np_arr = np_rng.randint(0, 255, (32, 32, 3), dtype=np.uint8) + img = Image.fromarray(np_arr) + + torch.set_default_dtype(dtype) + res = t(img) + assert res.dtype == dtype, f"{res.dtype} vs {dtype}" + + torch.set_default_dtype(current_def_dtype) + + @pytest.mark.parametrize("channels", [1, 3, 4]) + def test_pil_to_tensor(self, channels): + height, width = 4, 4 + trans = transforms.PILToTensor() + np_rng = np.random.RandomState(0) + + input_data = torch.ByteTensor(channels, height, width).random_(0, 255) + img = transforms.ToPILImage()(input_data) + output = trans(img) + torch.testing.assert_close(input_data, output) + + input_data = np_rng.randint(low=0, high=255, size=(height, width, channels)).astype(np.uint8) + img = transforms.ToPILImage()(input_data) + output = trans(img) + expected_output = input_data.transpose((2, 0, 1)) + torch.testing.assert_close(output.numpy(), expected_output) + + input_data = torch.as_tensor(np_rng.rand(channels, height, width).astype(np.float32)) + img = transforms.ToPILImage()(input_data) # CHW -> HWC and (* 255).byte() + output = trans(img) # HWC -> CHW + expected_output = (input_data * 255).byte() + torch.testing.assert_close(output, expected_output) + + # separate test for mode '1' PIL images + input_data = torch.ByteTensor(1, height, width).bernoulli_() + img = transforms.ToPILImage()(input_data.mul(255)).convert("1") + output = trans(img).view(torch.uint8).bool().to(torch.uint8) + torch.testing.assert_close(input_data, output) + + def test_pil_to_tensor_errors(self): + height, width = 4, 4 + trans = transforms.PILToTensor() + np_rng = np.random.RandomState(0) + + with pytest.raises(TypeError): + trans(np_rng.rand(1, height, width).tolist()) + + with pytest.raises(TypeError): + trans(np_rng.rand(1, height, width)) + + +def test_randomresized_params(): + height = random.randint(24, 32) * 2 + width = random.randint(24, 32) * 2 + img = torch.ones(3, height, width) + to_pil_image = transforms.ToPILImage() + img = to_pil_image(img) + size = 100 + epsilon = 0.05 + min_scale = 0.25 + for _ in range(10): + scale_min = max(round(random.random(), 2), min_scale) + scale_range = (scale_min, scale_min + round(random.random(), 2)) + aspect_min = max(round(random.random(), 2), epsilon) + aspect_ratio_range = (aspect_min, aspect_min + round(random.random(), 2)) + randresizecrop = transforms.RandomResizedCrop(size, scale_range, aspect_ratio_range, antialias=True) + i, j, h, w = randresizecrop.get_params(img, scale_range, aspect_ratio_range) + aspect_ratio_obtained = w / h + assert ( + min(aspect_ratio_range) - epsilon <= aspect_ratio_obtained + and aspect_ratio_obtained <= max(aspect_ratio_range) + epsilon + ) or aspect_ratio_obtained == 1.0 + assert isinstance(i, int) + assert isinstance(j, int) + assert isinstance(h, int) + assert isinstance(w, int) + + +@pytest.mark.parametrize( + "height, width", + [ + # height, width + # square image + (28, 28), + (27, 27), + # rectangular image: h < w + (28, 34), + (29, 35), + # rectangular image: h > w + (34, 28), + (35, 29), + ], +) +@pytest.mark.parametrize( + "osize", + [ + # single integer + 22, + 27, + 28, + 36, + # single integer in tuple/list + [ + 22, + ], + (27,), + ], +) +@pytest.mark.parametrize("max_size", (None, 37, 1000)) +def test_resize(height, width, osize, max_size): + img = Image.new("RGB", size=(width, height), color=127) + + t = transforms.Resize(osize, max_size=max_size, antialias=True) + result = t(img) + + msg = f"{height}, {width} - {osize} - {max_size}" + osize = osize[0] if isinstance(osize, (list, tuple)) else osize + # If size is an int, smaller edge of the image will be matched to this number. + # i.e, if height > width, then image will be rescaled to (size * height / width, size). + if height < width: + exp_w, exp_h = (int(osize * width / height), osize) # (w, h) + if max_size is not None and max_size < exp_w: + exp_w, exp_h = max_size, int(max_size * exp_h / exp_w) + assert result.size == (exp_w, exp_h), msg + elif width < height: + exp_w, exp_h = (osize, int(osize * height / width)) # (w, h) + if max_size is not None and max_size < exp_h: + exp_w, exp_h = int(max_size * exp_w / exp_h), max_size + assert result.size == (exp_w, exp_h), msg + else: + exp_w, exp_h = (osize, osize) # (w, h) + if max_size is not None and max_size < osize: + exp_w, exp_h = max_size, max_size + assert result.size == (exp_w, exp_h), msg + + +@pytest.mark.parametrize( + "height, width", + [ + # height, width + # square image + (28, 28), + (27, 27), + # rectangular image: h < w + (28, 34), + (29, 35), + # rectangular image: h > w + (34, 28), + (35, 29), + ], +) +@pytest.mark.parametrize( + "osize", + [ + # two integers sequence output + [22, 22], + [22, 28], + [22, 36], + [27, 22], + [36, 22], + [28, 28], + [28, 37], + [37, 27], + [37, 37], + ], +) +def test_resize_sequence_output(height, width, osize): + img = Image.new("RGB", size=(width, height), color=127) + oheight, owidth = osize + + t = transforms.Resize(osize, antialias=True) + result = t(img) + + assert (owidth, oheight) == result.size + + +def test_resize_antialias_error(): + osize = [37, 37] + img = Image.new("RGB", size=(35, 29), color=127) + + with pytest.warns(UserWarning, match=r"Anti-alias option is always applied for PIL Image input"): + t = transforms.Resize(osize, antialias=False) + t(img) + + +@pytest.mark.parametrize("height, width", ((32, 64), (64, 32))) +def test_resize_size_equals_small_edge_size(height, width): + # Non-regression test for https://github.com/pytorch/vision/issues/5405 + # max_size used to be ignored if size == small_edge_size + max_size = 40 + img = Image.new("RGB", size=(width, height), color=127) + + small_edge = min(height, width) + t = transforms.Resize(small_edge, max_size=max_size, antialias=True) + result = t(img) + assert max(result.size) == max_size + + +def test_resize_equal_input_output_sizes(): + # Regression test for https://github.com/pytorch/vision/issues/7518 + height, width = 28, 27 + img = Image.new("RGB", size=(width, height)) + + t = transforms.Resize((height, width), antialias=True) + result = t(img) + assert result is img + + +class TestPad: + @pytest.mark.parametrize("fill", [85, 85.0]) + def test_pad(self, fill): height = random.randint(10, 32) * 2 width = random.randint(10, 32) * 2 - img = torch.ones(3, height, width) + img = torch.ones(3, height, width, dtype=torch.uint8) padding = random.randint(1, 20) - result = transforms.Compose([ - transforms.ToPILImage(), - transforms.Pad(padding), - transforms.ToTensor(), - ])(img) - self.assertEqual(result.size(1), height + 2 * padding) - self.assertEqual(result.size(2), width + 2 * padding) + result = transforms.Compose( + [ + transforms.ToPILImage(), + transforms.Pad(padding, fill=fill), + transforms.PILToTensor(), + ] + )(img) + assert result.size(1) == height + 2 * padding + assert result.size(2) == width + 2 * padding + # check that all elements in the padded region correspond + # to the pad value + h_padded = result[:, :padding, :] + w_padded = result[:, :, :padding] + torch.testing.assert_close(h_padded, torch.full_like(h_padded, fill_value=fill), rtol=0.0, atol=0.0) + torch.testing.assert_close(w_padded, torch.full_like(w_padded, fill_value=fill), rtol=0.0, atol=0.0) + pytest.raises(ValueError, transforms.Pad(padding, fill=(1, 2)), transforms.ToPILImage()(img)) def test_pad_with_tuple_of_pad_values(self): height = random.randint(10, 32) * 2 width = random.randint(10, 32) * 2 img = transforms.ToPILImage()(torch.ones(3, height, width)) - padding = tuple([random.randint(1, 20) for _ in range(2)]) + padding = tuple(random.randint(1, 20) for _ in range(2)) output = transforms.Pad(padding)(img) - self.assertEqual(output.size, (width + padding[0] * 2, height + padding[1] * 2)) + assert output.size == (width + padding[0] * 2, height + padding[1] * 2) - padding = tuple([random.randint(1, 20) for _ in range(4)]) + padding = [random.randint(1, 20) for _ in range(4)] output = transforms.Pad(padding)(img) - self.assertEqual(output.size[0], width + padding[0] + padding[2]) - self.assertEqual(output.size[1], height + padding[1] + padding[3]) + assert output.size[0] == width + padding[0] + padding[2] + assert output.size[1] == height + padding[1] + padding[3] # Checking if Padding can be printed as string transforms.Pad(padding).__repr__() @@ -297,1124 +511,1735 @@ def test_pad_with_non_constant_padding_modes(self): img = F.pad(img, 1, (200, 200, 200)) # pad 3 to all sidess - edge_padded_img = F.pad(img, 3, padding_mode='edge') + edge_padded_img = F.pad(img, 3, padding_mode="edge") # First 6 elements of leftmost edge in the middle of the image, values are in order: # edge_pad, edge_pad, edge_pad, constant_pad, constant value added to leftmost edge, 0 edge_middle_slice = np.asarray(edge_padded_img).transpose(2, 0, 1)[0][17][:6] - self.assertTrue(np.all(edge_middle_slice == np.asarray([200, 200, 200, 200, 1, 0]))) - self.assertEqual(transforms.ToTensor()(edge_padded_img).size(), (3, 35, 35)) + assert_equal(edge_middle_slice, np.asarray([200, 200, 200, 200, 1, 0], dtype=np.uint8)) + assert transforms.PILToTensor()(edge_padded_img).size() == (3, 35, 35) # Pad 3 to left/right, 2 to top/bottom - reflect_padded_img = F.pad(img, (3, 2), padding_mode='reflect') + reflect_padded_img = F.pad(img, (3, 2), padding_mode="reflect") # First 6 elements of leftmost edge in the middle of the image, values are in order: # reflect_pad, reflect_pad, reflect_pad, constant_pad, constant value added to leftmost edge, 0 reflect_middle_slice = np.asarray(reflect_padded_img).transpose(2, 0, 1)[0][17][:6] - self.assertTrue(np.all(reflect_middle_slice == np.asarray([0, 0, 1, 200, 1, 0]))) - self.assertEqual(transforms.ToTensor()(reflect_padded_img).size(), (3, 33, 35)) + assert_equal(reflect_middle_slice, np.asarray([0, 0, 1, 200, 1, 0], dtype=np.uint8)) + assert transforms.PILToTensor()(reflect_padded_img).size() == (3, 33, 35) # Pad 3 to left, 2 to top, 2 to right, 1 to bottom - symmetric_padded_img = F.pad(img, (3, 2, 2, 1), padding_mode='symmetric') + symmetric_padded_img = F.pad(img, (3, 2, 2, 1), padding_mode="symmetric") # First 6 elements of leftmost edge in the middle of the image, values are in order: # sym_pad, sym_pad, sym_pad, constant_pad, constant value added to leftmost edge, 0 symmetric_middle_slice = np.asarray(symmetric_padded_img).transpose(2, 0, 1)[0][17][:6] - self.assertTrue(np.all(symmetric_middle_slice == np.asarray([0, 1, 200, 200, 1, 0]))) - self.assertEqual(transforms.ToTensor()(symmetric_padded_img).size(), (3, 32, 34)) + assert_equal(symmetric_middle_slice, np.asarray([0, 1, 200, 200, 1, 0], dtype=np.uint8)) + assert transforms.PILToTensor()(symmetric_padded_img).size() == (3, 32, 34) + + # Check negative padding explicitly for symmetric case, since it is not + # implemented for tensor case to compare to + # Crop 1 to left, pad 2 to top, pad 3 to right, crop 3 to bottom + symmetric_padded_img_neg = F.pad(img, (-1, 2, 3, -3), padding_mode="symmetric") + symmetric_neg_middle_left = np.asarray(symmetric_padded_img_neg).transpose(2, 0, 1)[0][17][:3] + symmetric_neg_middle_right = np.asarray(symmetric_padded_img_neg).transpose(2, 0, 1)[0][17][-4:] + assert_equal(symmetric_neg_middle_left, np.asarray([1, 0, 0], dtype=np.uint8)) + assert_equal(symmetric_neg_middle_right, np.asarray([200, 200, 0, 0], dtype=np.uint8)) + assert transforms.PILToTensor()(symmetric_padded_img_neg).size() == (3, 28, 31) def test_pad_raises_with_invalid_pad_sequence_len(self): - with self.assertRaises(ValueError): + with pytest.raises(ValueError): transforms.Pad(()) - with self.assertRaises(ValueError): + with pytest.raises(ValueError): transforms.Pad((1, 2, 3)) - with self.assertRaises(ValueError): + with pytest.raises(ValueError): transforms.Pad((1, 2, 3, 4, 5)) - def test_lambda(self): - trans = transforms.Lambda(lambda x: x.add(10)) - x = torch.randn(10) - y = trans(x) - self.assertTrue(y.equal(torch.add(x, 10))) - - trans = transforms.Lambda(lambda x: x.add_(10)) - x = torch.randn(10) - y = trans(x) - self.assertTrue(y.equal(x)) - - # Checking if Lambda can be printed as string - trans.__repr__() - - @unittest.skipIf(stats is None, 'scipy.stats not available') - def test_random_apply(self): - random_state = random.getstate() - random.seed(42) - random_apply_transform = transforms.RandomApply( - [ - transforms.RandomRotation((-45, 45)), - transforms.RandomHorizontalFlip(), - transforms.RandomVerticalFlip(), - ], p=0.75 - ) - img = transforms.ToPILImage()(torch.rand(3, 10, 10)) - num_samples = 250 - num_applies = 0 - for _ in range(num_samples): - out = random_apply_transform(img) - if out != img: - num_applies += 1 - - p_value = stats.binom_test(num_applies, num_samples, p=0.75) - random.setstate(random_state) - self.assertGreater(p_value, 0.0001) - - # Checking if RandomApply can be printed as string - random_apply_transform.__repr__() - - @unittest.skipIf(stats is None, 'scipy.stats not available') - def test_random_choice(self): - random_state = random.getstate() - random.seed(42) - random_choice_transform = transforms.RandomChoice( - [ - transforms.Resize(15), - transforms.Resize(20), - transforms.CenterCrop(10) - ] - ) - img = transforms.ToPILImage()(torch.rand(3, 25, 25)) - num_samples = 250 - num_resize_15 = 0 - num_resize_20 = 0 - num_crop_10 = 0 - for _ in range(num_samples): - out = random_choice_transform(img) - if out.size == (15, 15): - num_resize_15 += 1 - elif out.size == (20, 20): - num_resize_20 += 1 - elif out.size == (10, 10): - num_crop_10 += 1 - - p_value = stats.binom_test(num_resize_15, num_samples, p=0.33333) - self.assertGreater(p_value, 0.0001) - p_value = stats.binom_test(num_resize_20, num_samples, p=0.33333) - self.assertGreater(p_value, 0.0001) - p_value = stats.binom_test(num_crop_10, num_samples, p=0.33333) - self.assertGreater(p_value, 0.0001) - - random.setstate(random_state) - # Checking if RandomChoice can be printed as string - random_choice_transform.__repr__() - - @unittest.skipIf(stats is None, 'scipy.stats not available') - def test_random_order(self): - random_state = random.getstate() - random.seed(42) - random_order_transform = transforms.RandomOrder( - [ - transforms.Resize(20), - transforms.CenterCrop(10) - ] - ) - img = transforms.ToPILImage()(torch.rand(3, 25, 25)) - num_samples = 250 - num_normal_order = 0 - resize_crop_out = transforms.CenterCrop(10)(transforms.Resize(20)(img)) - for _ in range(num_samples): - out = random_order_transform(img) - if out == resize_crop_out: - num_normal_order += 1 - - p_value = stats.binom_test(num_normal_order, num_samples, p=0.5) - random.setstate(random_state) - self.assertGreater(p_value, 0.0001) - - # Checking if RandomOrder can be printed as string - random_order_transform.__repr__() - - def test_to_tensor(self): - test_channels = [1, 3, 4] - height, width = 4, 4 - trans = transforms.ToTensor() - - with self.assertRaises(TypeError): - trans(np.random.rand(1, height, width).tolist()) - - with self.assertRaises(ValueError): - trans(np.random.rand(height)) - trans(np.random.rand(1, 1, height, width)) - - for channels in test_channels: - input_data = torch.ByteTensor(channels, height, width).random_(0, 255).float().div_(255) - img = transforms.ToPILImage()(input_data) - output = trans(img) - self.assertTrue(np.allclose(input_data.numpy(), output.numpy())) - - ndarray = np.random.randint(low=0, high=255, size=(height, width, channels)).astype(np.uint8) - output = trans(ndarray) - expected_output = ndarray.transpose((2, 0, 1)) / 255.0 - self.assertTrue(np.allclose(output.numpy(), expected_output)) - - ndarray = np.random.rand(height, width, channels).astype(np.float32) - output = trans(ndarray) - expected_output = ndarray.transpose((2, 0, 1)) - self.assertTrue(np.allclose(output.numpy(), expected_output)) - - # separate test for mode '1' PIL images - input_data = torch.ByteTensor(1, height, width).bernoulli_() - img = transforms.ToPILImage()(input_data.mul(255)).convert('1') - output = trans(img) - self.assertTrue(np.allclose(input_data.numpy(), output.numpy())) - - @unittest.skipIf(accimage is None, 'accimage not available') - def test_accimage_to_tensor(self): - trans = transforms.ToTensor() - - expected_output = trans(Image.open(GRACE_HOPPER).convert('RGB')) - output = trans(accimage.Image(GRACE_HOPPER)) - - self.assertEqual(expected_output.size(), output.size()) - self.assertTrue(np.allclose(output.numpy(), expected_output.numpy())) - - @unittest.skipIf(accimage is None, 'accimage not available') - def test_accimage_resize(self): - trans = transforms.Compose([ - transforms.Resize(256, interpolation=Image.LINEAR), - transforms.ToTensor(), - ]) + def test_pad_with_mode_F_images(self): + pad = 2 + transform = transforms.Pad(pad) + + img = Image.new("F", (10, 10)) + padded_img = transform(img) + assert_equal(padded_img.size, [edge_size + 2 * pad for edge_size in img.size]) + + +@pytest.mark.parametrize( + "fn, trans, kwargs", + [ + (F.invert, transforms.RandomInvert, {}), + (F.posterize, transforms.RandomPosterize, {"bits": 4}), + (F.solarize, transforms.RandomSolarize, {"threshold": 192}), + (F.adjust_sharpness, transforms.RandomAdjustSharpness, {"sharpness_factor": 2.0}), + (F.autocontrast, transforms.RandomAutocontrast, {}), + (F.equalize, transforms.RandomEqualize, {}), + (F.vflip, transforms.RandomVerticalFlip, {}), + (F.hflip, transforms.RandomHorizontalFlip, {}), + (partial(F.to_grayscale, num_output_channels=3), transforms.RandomGrayscale, {}), + ], +) +@pytest.mark.parametrize("seed", range(10)) +@pytest.mark.parametrize("p", (0, 1)) +def test_randomness(fn, trans, kwargs, seed, p): + torch.manual_seed(seed) + img = transforms.ToPILImage()(torch.rand(3, 16, 18)) + + expected_transformed_img = fn(img, **kwargs) + randomly_transformed_img = trans(p=p, **kwargs)(img) + + if p == 0: + assert randomly_transformed_img == img + elif p == 1: + assert randomly_transformed_img == expected_transformed_img + + trans(**kwargs).__repr__() + + +def test_autocontrast_equal_minmax(): + img_tensor = torch.tensor([[[10]], [[128]], [[245]]], dtype=torch.uint8).expand(3, 32, 32) + img_pil = F.to_pil_image(img_tensor) + + img_tensor = F.autocontrast(img_tensor) + img_pil = F.autocontrast(img_pil) + torch.testing.assert_close(img_tensor, F.pil_to_tensor(img_pil)) + + +class TestToPil: + def _get_1_channel_tensor_various_types(): + img_data_float = torch.Tensor(1, 4, 4).uniform_() + expected_output = img_data_float.mul(255).int().float().div(255).numpy() + yield img_data_float, expected_output, "L" - # Checking if Compose, Resize and ToTensor can be printed as string - trans.__repr__() + img_data_byte = torch.ByteTensor(1, 4, 4).random_(0, 255) + expected_output = img_data_byte.float().div(255.0).numpy() + yield img_data_byte, expected_output, "L" - expected_output = trans(Image.open(GRACE_HOPPER).convert('RGB')) - output = trans(accimage.Image(GRACE_HOPPER)) + img_data_short = torch.ShortTensor(1, 4, 4).random_() + expected_output = img_data_short.numpy() + yield img_data_short, expected_output, "I;16" if sys.byteorder == "little" else "I;16B" - self.assertEqual(expected_output.size(), output.size()) - self.assertLess(np.abs((expected_output - output).mean()), 1e-3) - self.assertLess((expected_output - output).var(), 1e-5) - # note the high absolute tolerance - self.assertTrue(np.allclose(output.numpy(), expected_output.numpy(), atol=5e-2)) + img_data_int = torch.IntTensor(1, 4, 4).random_() + expected_output = img_data_int.numpy() + yield img_data_int, expected_output, "I" - @unittest.skipIf(accimage is None, 'accimage not available') - def test_accimage_crop(self): - trans = transforms.Compose([ - transforms.CenterCrop(256), - transforms.ToTensor(), - ]) + def _get_2d_tensor_various_types(): + img_data_float = torch.Tensor(4, 4).uniform_() + expected_output = img_data_float.mul(255).int().float().div(255).numpy() + yield img_data_float, expected_output, "L" - # Checking if Compose, CenterCrop and ToTensor can be printed as string - trans.__repr__() + img_data_byte = torch.ByteTensor(4, 4).random_(0, 255) + expected_output = img_data_byte.float().div(255.0).numpy() + yield img_data_byte, expected_output, "L" - expected_output = trans(Image.open(GRACE_HOPPER).convert('RGB')) - output = trans(accimage.Image(GRACE_HOPPER)) + img_data_short = torch.ShortTensor(4, 4).random_() + expected_output = img_data_short.numpy() + yield img_data_short, expected_output, "I;16" if sys.byteorder == "little" else "I;16B" - self.assertEqual(expected_output.size(), output.size()) - self.assertTrue(np.allclose(output.numpy(), expected_output.numpy())) + img_data_int = torch.IntTensor(4, 4).random_() + expected_output = img_data_int.numpy() + yield img_data_int, expected_output, "I" - def test_1_channel_tensor_to_pil_image(self): + @pytest.mark.parametrize("with_mode", [False, True]) + @pytest.mark.parametrize("img_data, expected_output, expected_mode", _get_1_channel_tensor_various_types()) + def test_1_channel_tensor_to_pil_image(self, with_mode, img_data, expected_output, expected_mode): + transform = transforms.ToPILImage(mode=expected_mode) if with_mode else transforms.ToPILImage() to_tensor = transforms.ToTensor() - img_data_float = torch.Tensor(1, 4, 4).uniform_() - img_data_byte = torch.ByteTensor(1, 4, 4).random_(0, 255) - img_data_short = torch.ShortTensor(1, 4, 4).random_() - img_data_int = torch.IntTensor(1, 4, 4).random_() + img = transform(img_data) + assert img.mode == expected_mode + torch.testing.assert_close(expected_output, to_tensor(img).numpy()) - inputs = [img_data_float, img_data_byte, img_data_short, img_data_int] - expected_outputs = [img_data_float.mul(255).int().float().div(255).numpy(), - img_data_byte.float().div(255.0).numpy(), - img_data_short.numpy(), - img_data_int.numpy()] - expected_modes = ['L', 'L', 'I;16', 'I'] - - for img_data, expected_output, mode in zip(inputs, expected_outputs, expected_modes): - for transform in [transforms.ToPILImage(), transforms.ToPILImage(mode=mode)]: - img = transform(img_data) - self.assertEqual(img.mode, mode) - self.assertTrue(np.allclose(expected_output, to_tensor(img).numpy())) + def test_1_channel_float_tensor_to_pil_image(self): + img_data = torch.Tensor(1, 4, 4).uniform_() # 'F' mode for torch.FloatTensor - img_F_mode = transforms.ToPILImage(mode='F')(img_data_float) - self.assertEqual(img_F_mode.mode, 'F') - self.assertTrue(np.allclose(np.array(Image.fromarray(img_data_float.squeeze(0).numpy(), mode='F')), - np.array(img_F_mode))) - - def test_1_channel_ndarray_to_pil_image(self): - img_data_float = torch.Tensor(4, 4, 1).uniform_().numpy() - img_data_byte = torch.ByteTensor(4, 4, 1).random_(0, 255).numpy() - img_data_short = torch.ShortTensor(4, 4, 1).random_().numpy() - img_data_int = torch.IntTensor(4, 4, 1).random_().numpy() - - inputs = [img_data_float, img_data_byte, img_data_short, img_data_int] - expected_modes = ['F', 'L', 'I;16', 'I'] - for img_data, mode in zip(inputs, expected_modes): - for transform in [transforms.ToPILImage(), transforms.ToPILImage(mode=mode)]: - img = transform(img_data) - self.assertEqual(img.mode, mode) - self.assertTrue(np.allclose(img_data[:, :, 0], img)) - - def test_2_channel_ndarray_to_pil_image(self): - def verify_img_data(img_data, mode): - if mode is None: - img = transforms.ToPILImage()(img_data) - self.assertEqual(img.mode, 'LA') # default should assume LA - else: - img = transforms.ToPILImage(mode=mode)(img_data) - self.assertEqual(img.mode, mode) - split = img.split() - for i in range(2): - self.assertTrue(np.allclose(img_data[:, :, i], split[i])) + img_F_mode = transforms.ToPILImage(mode="F")(img_data) + assert img_F_mode.mode == "F" + torch.testing.assert_close( + np.array(Image.fromarray(img_data.squeeze(0).numpy(), mode="F")), np.array(img_F_mode) + ) + @pytest.mark.parametrize("with_mode", [False, True]) + @pytest.mark.parametrize( + "img_data, expected_mode", + [ + (torch.Tensor(4, 4, 1).uniform_().numpy(), "L"), + (torch.ByteTensor(4, 4, 1).random_(0, 255).numpy(), "L"), + (torch.ShortTensor(4, 4, 1).random_().numpy(), "I;16" if sys.byteorder == "little" else "I;16B"), + (torch.IntTensor(4, 4, 1).random_().numpy(), "I"), + ], + ) + def test_1_channel_ndarray_to_pil_image(self, with_mode, img_data, expected_mode): + transform = transforms.ToPILImage(mode=expected_mode) if with_mode else transforms.ToPILImage() + img = transform(img_data) + assert img.mode == expected_mode + if np.issubdtype(img_data.dtype, np.floating): + img_data = (img_data * 255).astype(np.uint8) + # note: we explicitly convert img's dtype because pytorch doesn't support uint16 + # and otherwise assert_close wouldn't be able to construct a tensor from the uint16 array + torch.testing.assert_close(img_data[:, :, 0], np.asarray(img).astype(img_data.dtype)) + + @pytest.mark.parametrize("expected_mode", [None, "LA"]) + def test_2_channel_ndarray_to_pil_image(self, expected_mode): img_data = torch.ByteTensor(4, 4, 2).random_(0, 255).numpy() - for mode in [None, 'LA']: - verify_img_data(img_data, mode) + if expected_mode is None: + img = transforms.ToPILImage()(img_data) + assert img.mode == "LA" # default should assume LA + else: + img = transforms.ToPILImage(mode=expected_mode)(img_data) + assert img.mode == expected_mode + split = img.split() + for i in range(2): + torch.testing.assert_close(img_data[:, :, i], np.asarray(split[i])) + + def test_2_channel_ndarray_to_pil_image_error(self): + img_data = torch.ByteTensor(4, 4, 2).random_(0, 255).numpy() transforms.ToPILImage().__repr__() - with self.assertRaises(ValueError): - # should raise if we try a mode for 4 or 1 or 3 channel images - transforms.ToPILImage(mode='RGBA')(img_data) - transforms.ToPILImage(mode='P')(img_data) - transforms.ToPILImage(mode='RGB')(img_data) - - def test_2_channel_tensor_to_pil_image(self): - def verify_img_data(img_data, expected_output, mode): - if mode is None: - img = transforms.ToPILImage()(img_data) - self.assertEqual(img.mode, 'LA') # default should assume LA - else: - img = transforms.ToPILImage(mode=mode)(img_data) - self.assertEqual(img.mode, mode) - split = img.split() - for i in range(2): - self.assertTrue(np.allclose(expected_output[i].numpy(), F.to_tensor(split[i]).numpy())) + # should raise if we try a mode for 4 or 1 or 3 channel images + with pytest.raises(ValueError, match=r"Only modes \['LA'\] are supported for 2D inputs"): + transforms.ToPILImage(mode="RGBA")(img_data) + with pytest.raises(ValueError, match=r"Only modes \['LA'\] are supported for 2D inputs"): + transforms.ToPILImage(mode="P")(img_data) + with pytest.raises(ValueError, match=r"Only modes \['LA'\] are supported for 2D inputs"): + transforms.ToPILImage(mode="RGB")(img_data) + @pytest.mark.parametrize("expected_mode", [None, "LA"]) + def test_2_channel_tensor_to_pil_image(self, expected_mode): img_data = torch.Tensor(2, 4, 4).uniform_() expected_output = img_data.mul(255).int().float().div(255) - for mode in [None, 'LA']: - verify_img_data(img_data, expected_output, mode=mode) - - with self.assertRaises(ValueError): - # should raise if we try a mode for 4 or 1 or 3 channel images - transforms.ToPILImage(mode='RGBA')(img_data) - transforms.ToPILImage(mode='P')(img_data) - transforms.ToPILImage(mode='RGB')(img_data) - - def test_3_channel_tensor_to_pil_image(self): - def verify_img_data(img_data, expected_output, mode): - if mode is None: - img = transforms.ToPILImage()(img_data) - self.assertEqual(img.mode, 'RGB') # default should assume RGB - else: - img = transforms.ToPILImage(mode=mode)(img_data) - self.assertEqual(img.mode, mode) - split = img.split() - for i in range(3): - self.assertTrue(np.allclose(expected_output[i].numpy(), F.to_tensor(split[i]).numpy())) + if expected_mode is None: + img = transforms.ToPILImage()(img_data) + assert img.mode == "LA" # default should assume LA + else: + img = transforms.ToPILImage(mode=expected_mode)(img_data) + assert img.mode == expected_mode + + split = img.split() + for i in range(2): + torch.testing.assert_close(expected_output[i].numpy(), F.to_tensor(split[i]).squeeze(0).numpy()) + + def test_2_channel_tensor_to_pil_image_error(self): + img_data = torch.Tensor(2, 4, 4).uniform_() + + # should raise if we try a mode for 4 or 1 or 3 channel images + with pytest.raises(ValueError, match=r"Only modes \['LA'\] are supported for 2D inputs"): + transforms.ToPILImage(mode="RGBA")(img_data) + with pytest.raises(ValueError, match=r"Only modes \['LA'\] are supported for 2D inputs"): + transforms.ToPILImage(mode="P")(img_data) + with pytest.raises(ValueError, match=r"Only modes \['LA'\] are supported for 2D inputs"): + transforms.ToPILImage(mode="RGB")(img_data) + + @pytest.mark.parametrize("with_mode", [False, True]) + @pytest.mark.parametrize("img_data, expected_output, expected_mode", _get_2d_tensor_various_types()) + def test_2d_tensor_to_pil_image(self, with_mode, img_data, expected_output, expected_mode): + transform = transforms.ToPILImage(mode=expected_mode) if with_mode else transforms.ToPILImage() + to_tensor = transforms.ToTensor() + img = transform(img_data) + assert img.mode == expected_mode + torch.testing.assert_close(expected_output, to_tensor(img).numpy()[0]) + + @pytest.mark.parametrize("with_mode", [False, True]) + @pytest.mark.parametrize( + "img_data, expected_mode", + [ + (torch.Tensor(4, 4).uniform_().numpy(), "L"), + (torch.ByteTensor(4, 4).random_(0, 255).numpy(), "L"), + (torch.ShortTensor(4, 4).random_().numpy(), "I;16" if sys.byteorder == "little" else "I;16B"), + (torch.IntTensor(4, 4).random_().numpy(), "I"), + ], + ) + def test_2d_ndarray_to_pil_image(self, with_mode, img_data, expected_mode): + transform = transforms.ToPILImage(mode=expected_mode) if with_mode else transforms.ToPILImage() + img = transform(img_data) + assert img.mode == expected_mode + if np.issubdtype(img_data.dtype, np.floating): + img_data = (img_data * 255).astype(np.uint8) + np.testing.assert_allclose(img_data, img) + + @pytest.mark.parametrize("expected_mode", [None, "RGB", "HSV", "YCbCr"]) + def test_3_channel_tensor_to_pil_image(self, expected_mode): img_data = torch.Tensor(3, 4, 4).uniform_() expected_output = img_data.mul(255).int().float().div(255) - for mode in [None, 'RGB', 'HSV', 'YCbCr']: - verify_img_data(img_data, expected_output, mode=mode) - - with self.assertRaises(ValueError): - # should raise if we try a mode for 4 or 1 or 2 channel images - transforms.ToPILImage(mode='RGBA')(img_data) - transforms.ToPILImage(mode='P')(img_data) - transforms.ToPILImage(mode='LA')(img_data) - with self.assertRaises(ValueError): + if expected_mode is None: + img = transforms.ToPILImage()(img_data) + assert img.mode == "RGB" # default should assume RGB + else: + img = transforms.ToPILImage(mode=expected_mode)(img_data) + assert img.mode == expected_mode + split = img.split() + for i in range(3): + torch.testing.assert_close(expected_output[i].numpy(), F.to_tensor(split[i]).squeeze(0).numpy()) + + def test_3_channel_tensor_to_pil_image_error(self): + img_data = torch.Tensor(3, 4, 4).uniform_() + error_message_3d = r"Only modes \['RGB', 'YCbCr', 'HSV'\] are supported for 3D inputs" + # should raise if we try a mode for 4 or 1 or 2 channel images + with pytest.raises(ValueError, match=error_message_3d): + transforms.ToPILImage(mode="RGBA")(img_data) + with pytest.raises(ValueError, match=error_message_3d): + transforms.ToPILImage(mode="P")(img_data) + with pytest.raises(ValueError, match=error_message_3d): + transforms.ToPILImage(mode="LA")(img_data) + + with pytest.raises(ValueError, match=r"pic should be 2/3 dimensional. Got \d+ dimensions."): transforms.ToPILImage()(torch.Tensor(1, 3, 4, 4).uniform_()) - def test_3_channel_ndarray_to_pil_image(self): - def verify_img_data(img_data, mode): - if mode is None: - img = transforms.ToPILImage()(img_data) - self.assertEqual(img.mode, 'RGB') # default should assume RGB - else: - img = transforms.ToPILImage(mode=mode)(img_data) - self.assertEqual(img.mode, mode) - split = img.split() - for i in range(3): - self.assertTrue(np.allclose(img_data[:, :, i], split[i])) + @pytest.mark.parametrize("expected_mode", [None, "RGB", "HSV", "YCbCr"]) + def test_3_channel_ndarray_to_pil_image(self, expected_mode): + img_data = torch.ByteTensor(4, 4, 3).random_(0, 255).numpy() + if expected_mode is None: + img = transforms.ToPILImage()(img_data) + assert img.mode == "RGB" # default should assume RGB + else: + img = transforms.ToPILImage(mode=expected_mode)(img_data) + assert img.mode == expected_mode + split = img.split() + for i in range(3): + torch.testing.assert_close(img_data[:, :, i], np.asarray(split[i])) + + def test_3_channel_ndarray_to_pil_image_error(self): img_data = torch.ByteTensor(4, 4, 3).random_(0, 255).numpy() - for mode in [None, 'RGB', 'HSV', 'YCbCr']: - verify_img_data(img_data, mode) # Checking if ToPILImage can be printed as string transforms.ToPILImage().__repr__() - with self.assertRaises(ValueError): - # should raise if we try a mode for 4 or 1 or 2 channel images - transforms.ToPILImage(mode='RGBA')(img_data) - transforms.ToPILImage(mode='P')(img_data) - transforms.ToPILImage(mode='LA')(img_data) - - def test_4_channel_tensor_to_pil_image(self): - def verify_img_data(img_data, expected_output, mode): - if mode is None: - img = transforms.ToPILImage()(img_data) - self.assertEqual(img.mode, 'RGBA') # default should assume RGBA - else: - img = transforms.ToPILImage(mode=mode)(img_data) - self.assertEqual(img.mode, mode) - - split = img.split() - for i in range(4): - self.assertTrue(np.allclose(expected_output[i].numpy(), F.to_tensor(split[i]).numpy())) - + error_message_3d = r"Only modes \['RGB', 'YCbCr', 'HSV'\] are supported for 3D inputs" + # should raise if we try a mode for 4 or 1 or 2 channel images + with pytest.raises(ValueError, match=error_message_3d): + transforms.ToPILImage(mode="RGBA")(img_data) + with pytest.raises(ValueError, match=error_message_3d): + transforms.ToPILImage(mode="P")(img_data) + with pytest.raises(ValueError, match=error_message_3d): + transforms.ToPILImage(mode="LA")(img_data) + + @pytest.mark.parametrize("expected_mode", [None, "RGBA", "CMYK", "RGBX"]) + def test_4_channel_tensor_to_pil_image(self, expected_mode): img_data = torch.Tensor(4, 4, 4).uniform_() expected_output = img_data.mul(255).int().float().div(255) - for mode in [None, 'RGBA', 'CMYK', 'RGBX']: - verify_img_data(img_data, expected_output, mode) - - with self.assertRaises(ValueError): - # should raise if we try a mode for 3 or 1 or 2 channel images - transforms.ToPILImage(mode='RGB')(img_data) - transforms.ToPILImage(mode='P')(img_data) - transforms.ToPILImage(mode='LA')(img_data) - - def test_4_channel_ndarray_to_pil_image(self): - def verify_img_data(img_data, mode): - if mode is None: - img = transforms.ToPILImage()(img_data) - self.assertEqual(img.mode, 'RGBA') # default should assume RGBA - else: - img = transforms.ToPILImage(mode=mode)(img_data) - self.assertEqual(img.mode, mode) - split = img.split() - for i in range(4): - self.assertTrue(np.allclose(img_data[:, :, i], split[i])) - img_data = torch.ByteTensor(4, 4, 4).random_(0, 255).numpy() - for mode in [None, 'RGBA', 'CMYK', 'RGBX']: - verify_img_data(img_data, mode) + if expected_mode is None: + img = transforms.ToPILImage()(img_data) + assert img.mode == "RGBA" # default should assume RGBA + else: + img = transforms.ToPILImage(mode=expected_mode)(img_data) + assert img.mode == expected_mode - with self.assertRaises(ValueError): - # should raise if we try a mode for 3 or 1 or 2 channel images - transforms.ToPILImage(mode='RGB')(img_data) - transforms.ToPILImage(mode='P')(img_data) - transforms.ToPILImage(mode='LA')(img_data) + split = img.split() + for i in range(4): + torch.testing.assert_close(expected_output[i].numpy(), F.to_tensor(split[i]).squeeze(0).numpy()) - def test_2d_tensor_to_pil_image(self): - to_tensor = transforms.ToTensor() + def test_4_channel_tensor_to_pil_image_error(self): + img_data = torch.Tensor(4, 4, 4).uniform_() - img_data_float = torch.Tensor(4, 4).uniform_() - img_data_byte = torch.ByteTensor(4, 4).random_(0, 255) - img_data_short = torch.ShortTensor(4, 4).random_() - img_data_int = torch.IntTensor(4, 4).random_() + error_message_4d = r"Only modes \['RGBA', 'CMYK', 'RGBX'\] are supported for 4D inputs" + # should raise if we try a mode for 3 or 1 or 2 channel images + with pytest.raises(ValueError, match=error_message_4d): + transforms.ToPILImage(mode="RGB")(img_data) + with pytest.raises(ValueError, match=error_message_4d): + transforms.ToPILImage(mode="P")(img_data) + with pytest.raises(ValueError, match=error_message_4d): + transforms.ToPILImage(mode="LA")(img_data) + + @pytest.mark.parametrize("expected_mode", [None, "RGBA", "CMYK", "RGBX"]) + def test_4_channel_ndarray_to_pil_image(self, expected_mode): + img_data = torch.ByteTensor(4, 4, 4).random_(0, 255).numpy() - inputs = [img_data_float, img_data_byte, img_data_short, img_data_int] - expected_outputs = [img_data_float.mul(255).int().float().div(255).numpy(), - img_data_byte.float().div(255.0).numpy(), - img_data_short.numpy(), - img_data_int.numpy()] - expected_modes = ['L', 'L', 'I;16', 'I'] - - for img_data, expected_output, mode in zip(inputs, expected_outputs, expected_modes): - for transform in [transforms.ToPILImage(), transforms.ToPILImage(mode=mode)]: - img = transform(img_data) - self.assertEqual(img.mode, mode) - self.assertTrue(np.allclose(expected_output, to_tensor(img).numpy())) - - def test_2d_ndarray_to_pil_image(self): - img_data_float = torch.Tensor(4, 4).uniform_().numpy() - img_data_byte = torch.ByteTensor(4, 4).random_(0, 255).numpy() - img_data_short = torch.ShortTensor(4, 4).random_().numpy() - img_data_int = torch.IntTensor(4, 4).random_().numpy() - - inputs = [img_data_float, img_data_byte, img_data_short, img_data_int] - expected_modes = ['F', 'L', 'I;16', 'I'] - for img_data, mode in zip(inputs, expected_modes): - for transform in [transforms.ToPILImage(), transforms.ToPILImage(mode=mode)]: - img = transform(img_data) - self.assertEqual(img.mode, mode) - self.assertTrue(np.allclose(img_data, img)) + if expected_mode is None: + img = transforms.ToPILImage()(img_data) + assert img.mode == "RGBA" # default should assume RGBA + else: + img = transforms.ToPILImage(mode=expected_mode)(img_data) + assert img.mode == expected_mode + split = img.split() + for i in range(4): + torch.testing.assert_close(img_data[:, :, i], np.asarray(split[i])) + + def test_4_channel_ndarray_to_pil_image_error(self): + img_data = torch.ByteTensor(4, 4, 4).random_(0, 255).numpy() - def test_tensor_bad_types_to_pil_image(self): - with self.assertRaises(ValueError): - transforms.ToPILImage()(torch.ones(1, 3, 4, 4)) + error_message_4d = r"Only modes \['RGBA', 'CMYK', 'RGBX'\] are supported for 4D inputs" + # should raise if we try a mode for 3 or 1 or 2 channel images + with pytest.raises(ValueError, match=error_message_4d): + transforms.ToPILImage(mode="RGB")(img_data) + with pytest.raises(ValueError, match=error_message_4d): + transforms.ToPILImage(mode="P")(img_data) + with pytest.raises(ValueError, match=error_message_4d): + transforms.ToPILImage(mode="LA")(img_data) def test_ndarray_bad_types_to_pil_image(self): trans = transforms.ToPILImage() - with self.assertRaises(TypeError): + reg_msg = r"Input type \w+ is not supported" + with pytest.raises(TypeError, match=reg_msg): trans(np.ones([4, 4, 1], np.int64)) + with pytest.raises(TypeError, match=reg_msg): trans(np.ones([4, 4, 1], np.uint16)) + with pytest.raises(TypeError, match=reg_msg): trans(np.ones([4, 4, 1], np.uint32)) - trans(np.ones([4, 4, 1], np.float64)) - with self.assertRaises(ValueError): + with pytest.raises(ValueError, match=r"pic should be 2/3 dimensional. Got \d+ dimensions."): transforms.ToPILImage()(np.ones([1, 4, 4, 3])) + with pytest.raises(ValueError, match=r"pic should not have > 4 channels. Got \d+ channels."): + transforms.ToPILImage()(np.ones([4, 4, 6])) + + def test_tensor_bad_types_to_pil_image(self): + with pytest.raises(ValueError, match=r"pic should be 2/3 dimensional. Got \d+ dimensions."): + transforms.ToPILImage()(torch.ones(1, 3, 4, 4)) + with pytest.raises(ValueError, match=r"pic should not have > 4 channels. Got \d+ channels."): + transforms.ToPILImage()(torch.ones(6, 4, 4)) + + +def test_adjust_brightness(): + x_shape = [2, 2, 3] + x_data = [0, 5, 13, 54, 135, 226, 37, 8, 234, 90, 255, 1] + x_np = np.array(x_data, dtype=np.uint8).reshape(x_shape) + x_pil = Image.fromarray(x_np, mode="RGB") + + # test 0 + y_pil = F.adjust_brightness(x_pil, 1) + y_np = np.array(y_pil) + torch.testing.assert_close(y_np, x_np) + + # test 1 + y_pil = F.adjust_brightness(x_pil, 0.5) + y_np = np.array(y_pil) + y_ans = [0, 2, 6, 27, 67, 113, 18, 4, 117, 45, 127, 0] + y_ans = np.array(y_ans, dtype=np.uint8).reshape(x_shape) + torch.testing.assert_close(y_np, y_ans) + + # test 2 + y_pil = F.adjust_brightness(x_pil, 2) + y_np = np.array(y_pil) + y_ans = [0, 10, 26, 108, 255, 255, 74, 16, 255, 180, 255, 2] + y_ans = np.array(y_ans, dtype=np.uint8).reshape(x_shape) + torch.testing.assert_close(y_np, y_ans) + + +def test_adjust_contrast(): + x_shape = [2, 2, 3] + x_data = [0, 5, 13, 54, 135, 226, 37, 8, 234, 90, 255, 1] + x_np = np.array(x_data, dtype=np.uint8).reshape(x_shape) + x_pil = Image.fromarray(x_np, mode="RGB") + + # test 0 + y_pil = F.adjust_contrast(x_pil, 1) + y_np = np.array(y_pil) + torch.testing.assert_close(y_np, x_np) + + # test 1 + y_pil = F.adjust_contrast(x_pil, 0.5) + y_np = np.array(y_pil) + y_ans = [43, 45, 49, 70, 110, 156, 61, 47, 160, 88, 170, 43] + y_ans = np.array(y_ans, dtype=np.uint8).reshape(x_shape) + torch.testing.assert_close(y_np, y_ans) + + # test 2 + y_pil = F.adjust_contrast(x_pil, 2) + y_np = np.array(y_pil) + y_ans = [0, 0, 0, 22, 184, 255, 0, 0, 255, 94, 255, 0] + y_ans = np.array(y_ans, dtype=np.uint8).reshape(x_shape) + torch.testing.assert_close(y_np, y_ans) + + +def test_adjust_hue(): + x_shape = [2, 2, 3] + x_data = [0, 5, 13, 54, 135, 226, 37, 8, 234, 90, 255, 1] + x_np = np.array(x_data, dtype=np.uint8).reshape(x_shape) + x_pil = Image.fromarray(x_np, mode="RGB") + + with pytest.raises(ValueError): + F.adjust_hue(x_pil, -0.7) + F.adjust_hue(x_pil, 1) + + # test 0: almost same as x_data but not exact. + # probably because hsv <-> rgb floating point ops + y_pil = F.adjust_hue(x_pil, 0) + y_np = np.array(y_pil) + y_ans = [0, 5, 13, 54, 139, 226, 35, 8, 234, 91, 255, 1] + y_ans = np.array(y_ans, dtype=np.uint8).reshape(x_shape) + torch.testing.assert_close(y_np, y_ans) + + # test 1 + y_pil = F.adjust_hue(x_pil, 0.25) + y_np = np.array(y_pil) + y_ans = [13, 0, 12, 224, 54, 226, 234, 8, 99, 1, 222, 255] + y_ans = np.array(y_ans, dtype=np.uint8).reshape(x_shape) + torch.testing.assert_close(y_np, y_ans) + + # test 2 + y_pil = F.adjust_hue(x_pil, -0.25) + y_np = np.array(y_pil) + y_ans = [0, 13, 2, 54, 226, 58, 8, 234, 152, 255, 43, 1] + y_ans = np.array(y_ans, dtype=np.uint8).reshape(x_shape) + torch.testing.assert_close(y_np, y_ans) + + +def test_adjust_sharpness(): + x_shape = [4, 4, 3] + x_data = [ + 75, + 121, + 114, + 105, + 97, + 107, + 105, + 32, + 66, + 111, + 117, + 114, + 99, + 104, + 97, + 0, + 0, + 65, + 108, + 101, + 120, + 97, + 110, + 100, + 101, + 114, + 32, + 86, + 114, + 121, + 110, + 105, + 111, + 116, + 105, + 115, + 0, + 0, + 73, + 32, + 108, + 111, + 118, + 101, + 32, + 121, + 111, + 117, + ] + x_np = np.array(x_data, dtype=np.uint8).reshape(x_shape) + x_pil = Image.fromarray(x_np, mode="RGB") + + # test 0 + y_pil = F.adjust_sharpness(x_pil, 1) + y_np = np.array(y_pil) + torch.testing.assert_close(y_np, x_np) + + # test 1 + y_pil = F.adjust_sharpness(x_pil, 0.5) + y_np = np.array(y_pil) + y_ans = [ + 75, + 121, + 114, + 105, + 97, + 107, + 105, + 32, + 66, + 111, + 117, + 114, + 99, + 104, + 97, + 30, + 30, + 74, + 103, + 96, + 114, + 97, + 110, + 100, + 101, + 114, + 32, + 81, + 103, + 108, + 102, + 101, + 107, + 116, + 105, + 115, + 0, + 0, + 73, + 32, + 108, + 111, + 118, + 101, + 32, + 121, + 111, + 117, + ] + y_ans = np.array(y_ans, dtype=np.uint8).reshape(x_shape) + torch.testing.assert_close(y_np, y_ans) + + # test 2 + y_pil = F.adjust_sharpness(x_pil, 2) + y_np = np.array(y_pil) + y_ans = [ + 75, + 121, + 114, + 105, + 97, + 107, + 105, + 32, + 66, + 111, + 117, + 114, + 99, + 104, + 97, + 0, + 0, + 46, + 118, + 111, + 132, + 97, + 110, + 100, + 101, + 114, + 32, + 95, + 135, + 146, + 126, + 112, + 119, + 116, + 105, + 115, + 0, + 0, + 73, + 32, + 108, + 111, + 118, + 101, + 32, + 121, + 111, + 117, + ] + y_ans = np.array(y_ans, dtype=np.uint8).reshape(x_shape) + torch.testing.assert_close(y_np, y_ans) + + # test 3 + x_shape = [2, 2, 3] + x_data = [0, 5, 13, 54, 135, 226, 37, 8, 234, 90, 255, 1] + x_np = np.array(x_data, dtype=np.uint8).reshape(x_shape) + x_pil = Image.fromarray(x_np, mode="RGB") + x_th = torch.tensor(x_np.transpose(2, 0, 1)) + y_pil = F.adjust_sharpness(x_pil, 2) + y_np = np.array(y_pil).transpose(2, 0, 1) + y_th = F.adjust_sharpness(x_th, 2) + torch.testing.assert_close(y_np, y_th.numpy()) + + +def test_adjust_gamma(): + x_shape = [2, 2, 3] + x_data = [0, 5, 13, 54, 135, 226, 37, 8, 234, 90, 255, 1] + x_np = np.array(x_data, dtype=np.uint8).reshape(x_shape) + x_pil = Image.fromarray(x_np, mode="RGB") + + # test 0 + y_pil = F.adjust_gamma(x_pil, 1) + y_np = np.array(y_pil) + torch.testing.assert_close(y_np, x_np) + + # test 1 + y_pil = F.adjust_gamma(x_pil, 0.5) + y_np = np.array(y_pil) + y_ans = [0, 35, 57, 117, 186, 241, 97, 45, 245, 152, 255, 16] + y_ans = np.array(y_ans, dtype=np.uint8).reshape(x_shape) + torch.testing.assert_close(y_np, y_ans) + + # test 2 + y_pil = F.adjust_gamma(x_pil, 2) + y_np = np.array(y_pil) + y_ans = [0, 0, 0, 11, 71, 201, 5, 0, 215, 31, 255, 0] + y_ans = np.array(y_ans, dtype=np.uint8).reshape(x_shape) + torch.testing.assert_close(y_np, y_ans) + + +def test_adjusts_L_mode(): + x_shape = [2, 2, 3] + x_data = [0, 5, 13, 54, 135, 226, 37, 8, 234, 90, 255, 1] + x_np = np.array(x_data, dtype=np.uint8).reshape(x_shape) + x_rgb = Image.fromarray(x_np, mode="RGB") + + x_l = x_rgb.convert("L") + assert F.adjust_brightness(x_l, 2).mode == "L" + assert F.adjust_saturation(x_l, 2).mode == "L" + assert F.adjust_contrast(x_l, 2).mode == "L" + assert F.adjust_hue(x_l, 0.4).mode == "L" + assert F.adjust_sharpness(x_l, 2).mode == "L" + assert F.adjust_gamma(x_l, 0.5).mode == "L" + + +def test_rotate(): + x = np.zeros((100, 100, 3), dtype=np.uint8) + x[40, 40] = [255, 255, 255] + + with pytest.raises(TypeError, match=r"img should be PIL Image"): + F.rotate(x, 10) + + img = F.to_pil_image(x) + + result = F.rotate(img, 45) + assert result.size == (100, 100) + r, c, ch = np.where(result) + assert all(x in r for x in [49, 50]) + assert all(x in c for x in [36]) + assert all(x in ch for x in [0, 1, 2]) + + result = F.rotate(img, 45, expand=True) + assert result.size == (142, 142) + r, c, ch = np.where(result) + assert all(x in r for x in [70, 71]) + assert all(x in c for x in [57]) + assert all(x in ch for x in [0, 1, 2]) + + result = F.rotate(img, 45, center=(40, 40)) + assert result.size == (100, 100) + r, c, ch = np.where(result) + assert all(x in r for x in [40]) + assert all(x in c for x in [40]) + assert all(x in ch for x in [0, 1, 2]) + + result_a = F.rotate(img, 90) + result_b = F.rotate(img, -270) + + assert_equal(np.array(result_a), np.array(result_b)) + + +@pytest.mark.parametrize("mode", ["L", "RGB", "F"]) +def test_rotate_fill(mode): + img = F.to_pil_image(np.ones((100, 100, 3), dtype=np.uint8) * 255, "RGB") + + num_bands = len(mode) + wrong_num_bands = num_bands + 1 + fill = 127 + + img_conv = img.convert(mode) + img_rot = F.rotate(img_conv, 45.0, fill=fill) + pixel = img_rot.getpixel((0, 0)) + + if not isinstance(pixel, tuple): + pixel = (pixel,) + assert pixel == tuple([fill] * num_bands) + + with pytest.raises(ValueError): + F.rotate(img_conv, 45.0, fill=tuple([fill] * wrong_num_bands)) + + +def test_gaussian_blur_asserts(): + np_img = np.ones((100, 100, 3), dtype=np.uint8) * 255 + img = F.to_pil_image(np_img, "RGB") + + with pytest.raises(ValueError, match=r"If kernel_size is a sequence its length should be 2"): + F.gaussian_blur(img, [3]) + with pytest.raises(ValueError, match=r"If kernel_size is a sequence its length should be 2"): + F.gaussian_blur(img, [3, 3, 3]) + with pytest.raises(ValueError, match=r"Kernel size should be a tuple/list of two integers"): + transforms.GaussianBlur([3, 3, 3]) + + with pytest.raises(ValueError, match=r"kernel_size should have odd and positive integers"): + F.gaussian_blur(img, [4, 4]) + with pytest.raises(ValueError, match=r"Kernel size value should be an odd and positive number"): + transforms.GaussianBlur([4, 4]) + + with pytest.raises(ValueError, match=r"kernel_size should have odd and positive integers"): + F.gaussian_blur(img, [-3, -3]) + with pytest.raises(ValueError, match=r"Kernel size value should be an odd and positive number"): + transforms.GaussianBlur([-3, -3]) + + with pytest.raises(ValueError, match=r"If sigma is a sequence, its length should be 2"): + F.gaussian_blur(img, 3, [1, 1, 1]) + with pytest.raises(ValueError, match=r"sigma should be a single number or a list/tuple with length 2"): + transforms.GaussianBlur(3, [1, 1, 1]) + + with pytest.raises(ValueError, match=r"sigma should have positive values"): + F.gaussian_blur(img, 3, -1.0) + with pytest.raises(ValueError, match=r"If sigma is a single number, it must be positive"): + transforms.GaussianBlur(3, -1.0) + + with pytest.raises(TypeError, match=r"kernel_size should be int or a sequence of integers"): + F.gaussian_blur(img, "kernel_size_string") + with pytest.raises(ValueError, match=r"Kernel size should be a tuple/list of two integers"): + transforms.GaussianBlur("kernel_size_string") + + with pytest.raises(TypeError, match=r"sigma should be either float or sequence of floats"): + F.gaussian_blur(img, 3, "sigma_string") + with pytest.raises(ValueError, match=r"sigma should be a single number or a list/tuple with length 2"): + transforms.GaussianBlur(3, "sigma_string") + + +def test_lambda(): + trans = transforms.Lambda(lambda x: x.add(10)) + x = torch.randn(10) + y = trans(x) + assert_equal(y, torch.add(x, 10)) + + trans = transforms.Lambda(lambda x: x.add_(10)) + x = torch.randn(10) + y = trans(x) + assert_equal(y, x) + + # Checking if Lambda can be printed as string + trans.__repr__() + + +def test_to_grayscale(): + """Unit tests for grayscale transform""" + + x_shape = [2, 2, 3] + x_data = [0, 5, 13, 54, 135, 226, 37, 8, 234, 90, 255, 1] + x_np = np.array(x_data, dtype=np.uint8).reshape(x_shape) + x_pil = Image.fromarray(x_np, mode="RGB") + x_pil_2 = x_pil.convert("L") + gray_np = np.array(x_pil_2) + + # Test Set: Grayscale an image with desired number of output channels + # Case 1: RGB -> 1 channel grayscale + trans1 = transforms.Grayscale(num_output_channels=1) + gray_pil_1 = trans1(x_pil) + gray_np_1 = np.array(gray_pil_1) + assert gray_pil_1.mode == "L", "mode should be L" + assert gray_np_1.shape == tuple(x_shape[0:2]), "should be 1 channel" + assert_equal(gray_np, gray_np_1) + + # Case 2: RGB -> 3 channel grayscale + trans2 = transforms.Grayscale(num_output_channels=3) + gray_pil_2 = trans2(x_pil) + gray_np_2 = np.array(gray_pil_2) + assert gray_pil_2.mode == "RGB", "mode should be RGB" + assert gray_np_2.shape == tuple(x_shape), "should be 3 channel" + assert_equal(gray_np_2[:, :, 0], gray_np_2[:, :, 1]) + assert_equal(gray_np_2[:, :, 1], gray_np_2[:, :, 2]) + assert_equal(gray_np, gray_np_2[:, :, 0]) + + # Case 3: 1 channel grayscale -> 1 channel grayscale + trans3 = transforms.Grayscale(num_output_channels=1) + gray_pil_3 = trans3(x_pil_2) + gray_np_3 = np.array(gray_pil_3) + assert gray_pil_3.mode == "L", "mode should be L" + assert gray_np_3.shape == tuple(x_shape[0:2]), "should be 1 channel" + assert_equal(gray_np, gray_np_3) + + # Case 4: 1 channel grayscale -> 3 channel grayscale + trans4 = transforms.Grayscale(num_output_channels=3) + gray_pil_4 = trans4(x_pil_2) + gray_np_4 = np.array(gray_pil_4) + assert gray_pil_4.mode == "RGB", "mode should be RGB" + assert gray_np_4.shape == tuple(x_shape), "should be 3 channel" + assert_equal(gray_np_4[:, :, 0], gray_np_4[:, :, 1]) + assert_equal(gray_np_4[:, :, 1], gray_np_4[:, :, 2]) + assert_equal(gray_np, gray_np_4[:, :, 0]) + + # Checking if Grayscale can be printed as string + trans4.__repr__() + + +@pytest.mark.parametrize("seed", range(10)) +@pytest.mark.parametrize("p", (0, 1)) +def test_random_apply(p, seed): + torch.manual_seed(seed) + random_apply_transform = transforms.RandomApply([transforms.RandomRotation((45, 50))], p=p) + img = transforms.ToPILImage()(torch.rand(3, 30, 40)) + out = random_apply_transform(img) + if p == 0: + assert out == img + elif p == 1: + assert out != img + + # Checking if RandomApply can be printed as string + random_apply_transform.__repr__() + + +@pytest.mark.parametrize("seed", range(10)) +@pytest.mark.parametrize("proba_passthrough", (0, 1)) +def test_random_choice(proba_passthrough, seed): + random.seed(seed) # RandomChoice relies on python builtin random.choice, not pytorch + + random_choice_transform = transforms.RandomChoice( + [ + lambda x: x, # passthrough + transforms.RandomRotation((45, 50)), + ], + p=[proba_passthrough, 1 - proba_passthrough], + ) + + img = transforms.ToPILImage()(torch.rand(3, 30, 40)) + out = random_choice_transform(img) + if proba_passthrough == 1: + assert out == img + elif proba_passthrough == 0: + assert out != img + + # Checking if RandomChoice can be printed as string + random_choice_transform.__repr__() + + +@pytest.mark.skipif(stats is None, reason="scipy.stats not available") +def test_random_order(): + random_state = random.getstate() + random.seed(42) + random_order_transform = transforms.RandomOrder([transforms.Resize(20, antialias=True), transforms.CenterCrop(10)]) + img = transforms.ToPILImage()(torch.rand(3, 25, 25)) + num_samples = 250 + num_normal_order = 0 + resize_crop_out = transforms.CenterCrop(10)(transforms.Resize(20, antialias=True)(img)) + for _ in range(num_samples): + out = random_order_transform(img) + if out == resize_crop_out: + num_normal_order += 1 + + p_value = stats.binomtest(num_normal_order, num_samples, p=0.5).pvalue + random.setstate(random_state) + assert p_value > 0.0001 + + # Checking if RandomOrder can be printed as string + random_order_transform.__repr__() + + +def test_linear_transformation(): + num_samples = 1000 + x = torch.randn(num_samples, 3, 10, 10) + flat_x = x.view(x.size(0), x.size(1) * x.size(2) * x.size(3)) + # compute principal components + sigma = torch.mm(flat_x.t(), flat_x) / flat_x.size(0) + u, s, _ = np.linalg.svd(sigma.numpy()) + zca_epsilon = 1e-10 # avoid division by 0 + d = torch.Tensor(np.diag(1.0 / np.sqrt(s + zca_epsilon))) + u = torch.Tensor(u) + principal_components = torch.mm(torch.mm(u, d), u.t()) + mean_vector = torch.sum(flat_x, dim=0) / flat_x.size(0) + # initialize whitening matrix + whitening = transforms.LinearTransformation(principal_components, mean_vector) + # estimate covariance and mean using weak law of large number + num_features = flat_x.size(1) + cov = 0.0 + mean = 0.0 + for i in x: + xwhite = whitening(i) + xwhite = xwhite.view(1, -1).numpy() + cov += np.dot(xwhite, xwhite.T) / num_features + mean += np.sum(xwhite) / num_features + # if rtol for std = 1e-3 then rtol for cov = 2e-3 as std**2 = cov + torch.testing.assert_close( + cov / num_samples, np.identity(1), rtol=2e-3, atol=1e-8, check_dtype=False, msg="cov not close to 1" + ) + torch.testing.assert_close( + mean / num_samples, 0, rtol=1e-3, atol=1e-8, check_dtype=False, msg="mean not close to 0" + ) + + # Checking if LinearTransformation can be printed as string + whitening.__repr__() + + +@pytest.mark.parametrize("dtype", int_dtypes()) +def test_max_value(dtype): + + assert F_t._max_value(dtype) == torch.iinfo(dtype).max + # remove float testing as it can lead to errors such as + # runtime error: 5.7896e+76 is outside the range of representable values of type 'float' + # for dtype in float_dtypes(): + # self.assertGreater(F_t._max_value(dtype), torch.finfo(dtype).max) + + +@pytest.mark.xfail( + reason="torch.iinfo() is not supported by torchscript. See https://github.com/pytorch/pytorch/issues/41492." +) +def test_max_value_iinfo(): + @torch.jit.script + def max_value(image: torch.Tensor) -> int: + return 1 if image.is_floating_point() else torch.iinfo(image.dtype).max + + +@pytest.mark.parametrize("should_vflip", [True, False]) +@pytest.mark.parametrize("single_dim", [True, False]) +def test_ten_crop(should_vflip, single_dim): + to_pil_image = transforms.ToPILImage() + h = random.randint(5, 25) + w = random.randint(5, 25) + crop_h = random.randint(1, h) + crop_w = random.randint(1, w) + if single_dim: + crop_h = min(crop_h, crop_w) + crop_w = crop_h + transform = transforms.TenCrop(crop_h, vertical_flip=should_vflip) + five_crop = transforms.FiveCrop(crop_h) + else: + transform = transforms.TenCrop((crop_h, crop_w), vertical_flip=should_vflip) + five_crop = transforms.FiveCrop((crop_h, crop_w)) + + img = to_pil_image(torch.FloatTensor(3, h, w).uniform_()) + results = transform(img) + expected_output = five_crop(img) + + # Checking if FiveCrop and TenCrop can be printed as string + transform.__repr__() + five_crop.__repr__() + + if should_vflip: + vflipped_img = img.transpose(Image.FLIP_TOP_BOTTOM) + expected_output += five_crop(vflipped_img) + else: + hflipped_img = img.transpose(Image.FLIP_LEFT_RIGHT) + expected_output += five_crop(hflipped_img) + + assert len(results) == 10 + assert results == expected_output + + +@pytest.mark.parametrize("single_dim", [True, False]) +def test_five_crop(single_dim): + to_pil_image = transforms.ToPILImage() + h = random.randint(5, 25) + w = random.randint(5, 25) + crop_h = random.randint(1, h) + crop_w = random.randint(1, w) + if single_dim: + crop_h = min(crop_h, crop_w) + crop_w = crop_h + transform = transforms.FiveCrop(crop_h) + else: + transform = transforms.FiveCrop((crop_h, crop_w)) + + img = torch.FloatTensor(3, h, w).uniform_() + + results = transform(to_pil_image(img)) + + assert len(results) == 5 + for crop in results: + assert crop.size == (crop_w, crop_h) + + to_pil_image = transforms.ToPILImage() + tl = to_pil_image(img[:, 0:crop_h, 0:crop_w]) + tr = to_pil_image(img[:, 0:crop_h, w - crop_w :]) + bl = to_pil_image(img[:, h - crop_h :, 0:crop_w]) + br = to_pil_image(img[:, h - crop_h :, w - crop_w :]) + center = transforms.CenterCrop((crop_h, crop_w))(to_pil_image(img)) + expected_output = (tl, tr, bl, br, center) + assert results == expected_output + + +@pytest.mark.parametrize("policy", transforms.AutoAugmentPolicy) +@pytest.mark.parametrize("fill", [None, 85, (128, 128, 128)]) +@pytest.mark.parametrize("grayscale", [True, False]) +def test_autoaugment(policy, fill, grayscale): + random.seed(42) + img = Image.open(GRACE_HOPPER) + if grayscale: + img, fill = _get_grayscale_test_image(img, fill) + transform = transforms.AutoAugment(policy=policy, fill=fill) + for _ in range(100): + img = transform(img) + transform.__repr__() + + +@pytest.mark.parametrize("num_ops", [1, 2, 3]) +@pytest.mark.parametrize("magnitude", [7, 9, 11]) +@pytest.mark.parametrize("fill", [None, 85, (128, 128, 128)]) +@pytest.mark.parametrize("grayscale", [True, False]) +def test_randaugment(num_ops, magnitude, fill, grayscale): + random.seed(42) + img = Image.open(GRACE_HOPPER) + if grayscale: + img, fill = _get_grayscale_test_image(img, fill) + transform = transforms.RandAugment(num_ops=num_ops, magnitude=magnitude, fill=fill) + for _ in range(100): + img = transform(img) + transform.__repr__() + + +@pytest.mark.parametrize("fill", [None, 85, (128, 128, 128)]) +@pytest.mark.parametrize("num_magnitude_bins", [10, 13, 30]) +@pytest.mark.parametrize("grayscale", [True, False]) +def test_trivialaugmentwide(fill, num_magnitude_bins, grayscale): + random.seed(42) + img = Image.open(GRACE_HOPPER) + if grayscale: + img, fill = _get_grayscale_test_image(img, fill) + transform = transforms.TrivialAugmentWide(fill=fill, num_magnitude_bins=num_magnitude_bins) + for _ in range(100): + img = transform(img) + transform.__repr__() + + +@pytest.mark.parametrize("fill", [None, 85, (128, 128, 128)]) +@pytest.mark.parametrize("severity", [1, 10]) +@pytest.mark.parametrize("mixture_width", [1, 2]) +@pytest.mark.parametrize("chain_depth", [-1, 2]) +@pytest.mark.parametrize("all_ops", [True, False]) +@pytest.mark.parametrize("grayscale", [True, False]) +def test_augmix(fill, severity, mixture_width, chain_depth, all_ops, grayscale): + random.seed(42) + img = Image.open(GRACE_HOPPER) + if grayscale: + img, fill = _get_grayscale_test_image(img, fill) + transform = transforms.AugMix( + fill=fill, severity=severity, mixture_width=mixture_width, chain_depth=chain_depth, all_ops=all_ops + ) + for _ in range(100): + img = transform(img) + transform.__repr__() + + +def test_random_crop(): + height = random.randint(10, 32) * 2 + width = random.randint(10, 32) * 2 + oheight = random.randint(5, (height - 2) // 2) * 2 + owidth = random.randint(5, (width - 2) // 2) * 2 + img = torch.ones(3, height, width, dtype=torch.uint8) + result = transforms.Compose( + [ + transforms.ToPILImage(), + transforms.RandomCrop((oheight, owidth)), + transforms.PILToTensor(), + ] + )(img) + assert result.size(1) == oheight + assert result.size(2) == owidth + + padding = random.randint(1, 20) + result = transforms.Compose( + [ + transforms.ToPILImage(), + transforms.RandomCrop((oheight, owidth), padding=padding), + transforms.PILToTensor(), + ] + )(img) + assert result.size(1) == oheight + assert result.size(2) == owidth + + result = transforms.Compose( + [transforms.ToPILImage(), transforms.RandomCrop((height, width)), transforms.PILToTensor()] + )(img) + assert result.size(1) == height + assert result.size(2) == width + torch.testing.assert_close(result, img) + + result = transforms.Compose( + [ + transforms.ToPILImage(), + transforms.RandomCrop((height + 1, width + 1), pad_if_needed=True), + transforms.PILToTensor(), + ] + )(img) + assert result.size(1) == height + 1 + assert result.size(2) == width + 1 + + t = transforms.RandomCrop(33) + img = torch.ones(3, 32, 32) + with pytest.raises(ValueError, match=r"Required crop size .+ is larger than input image size .+"): + t(img) + + +def test_center_crop(): + height = random.randint(10, 32) * 2 + width = random.randint(10, 32) * 2 + oheight = random.randint(5, (height - 2) // 2) * 2 + owidth = random.randint(5, (width - 2) // 2) * 2 + + img = torch.ones(3, height, width, dtype=torch.uint8) + oh1 = (height - oheight) // 2 + ow1 = (width - owidth) // 2 + imgnarrow = img[:, oh1 : oh1 + oheight, ow1 : ow1 + owidth] + imgnarrow.fill_(0) + result = transforms.Compose( + [ + transforms.ToPILImage(), + transforms.CenterCrop((oheight, owidth)), + transforms.PILToTensor(), + ] + )(img) + assert result.sum() == 0 + oheight += 1 + owidth += 1 + result = transforms.Compose( + [ + transforms.ToPILImage(), + transforms.CenterCrop((oheight, owidth)), + transforms.PILToTensor(), + ] + )(img) + sum1 = result.sum() + assert sum1 > 1 + oheight += 1 + owidth += 1 + result = transforms.Compose( + [ + transforms.ToPILImage(), + transforms.CenterCrop((oheight, owidth)), + transforms.PILToTensor(), + ] + )(img) + sum2 = result.sum() + assert sum2 > 0 + assert sum2 > sum1 + + +@pytest.mark.parametrize("odd_image_size", (True, False)) +@pytest.mark.parametrize("delta", (1, 3, 5)) +@pytest.mark.parametrize("delta_width", (-2, -1, 0, 1, 2)) +@pytest.mark.parametrize("delta_height", (-2, -1, 0, 1, 2)) +def test_center_crop_2(odd_image_size, delta, delta_width, delta_height): + """Tests when center crop size is larger than image size, along any dimension""" + + # Since height is independent of width, we can ignore images with odd height and even width and vice-versa. + input_image_size = (random.randint(10, 32) * 2, random.randint(10, 32) * 2) + if odd_image_size: + input_image_size = (input_image_size[0] + 1, input_image_size[1] + 1) + + delta_height *= delta + delta_width *= delta + + img = torch.ones(3, *input_image_size, dtype=torch.uint8) + crop_size = (input_image_size[0] + delta_height, input_image_size[1] + delta_width) + + # Test both transforms, one with PIL input and one with tensor + output_pil = transforms.Compose( + [transforms.ToPILImage(), transforms.CenterCrop(crop_size), transforms.PILToTensor()], + )(img) + assert output_pil.size()[1:3] == crop_size + + output_tensor = transforms.CenterCrop(crop_size)(img) + assert output_tensor.size()[1:3] == crop_size + + # Ensure output for PIL and Tensor are equal + assert_equal( + output_tensor, + output_pil, + msg=f"image_size: {input_image_size} crop_size: {crop_size}", + ) + + # Check if content in center of both image and cropped output is same. + center_size = (min(crop_size[0], input_image_size[0]), min(crop_size[1], input_image_size[1])) + crop_center_tl, input_center_tl = [0, 0], [0, 0] + for index in range(2): + if crop_size[index] > input_image_size[index]: + crop_center_tl[index] = (crop_size[index] - input_image_size[index]) // 2 + else: + input_center_tl[index] = (input_image_size[index] - crop_size[index]) // 2 + + output_center = output_pil[ + :, + crop_center_tl[0] : crop_center_tl[0] + center_size[0], + crop_center_tl[1] : crop_center_tl[1] + center_size[1], + ] + + img_center = img[ + :, + input_center_tl[0] : input_center_tl[0] + center_size[0], + input_center_tl[1] : input_center_tl[1] + center_size[1], + ] + + assert_equal(output_center, img_center) + + +def test_color_jitter(): + color_jitter = transforms.ColorJitter(2, 2, 2, 0.1) + + x_shape = [2, 2, 3] + x_data = [0, 5, 13, 54, 135, 226, 37, 8, 234, 90, 255, 1] + x_np = np.array(x_data, dtype=np.uint8).reshape(x_shape) + x_pil = Image.fromarray(x_np, mode="RGB") + x_pil_2 = x_pil.convert("L") + + for _ in range(10): + y_pil = color_jitter(x_pil) + assert y_pil.mode == x_pil.mode + + y_pil_2 = color_jitter(x_pil_2) + assert y_pil_2.mode == x_pil_2.mode + + # Checking if ColorJitter can be printed as string + color_jitter.__repr__() + + +@pytest.mark.parametrize("hue", [1, (-1, 1)]) +def test_color_jitter_hue_out_of_bounds(hue): + with pytest.raises(ValueError, match=re.escape("hue values should be between (-0.5, 0.5)")): + transforms.ColorJitter(hue=hue) + + +@pytest.mark.parametrize("seed", range(10)) +@pytest.mark.skipif(stats is None, reason="scipy.stats not available") +def test_random_erasing(seed): + torch.random.manual_seed(seed) + img = torch.ones(3, 128, 128) + + t = transforms.RandomErasing(scale=(0.1, 0.1), ratio=(1 / 3, 3.0)) + y, x, h, w, v = t.get_params( + img, + t.scale, + t.ratio, + [ + t.value, + ], + ) + aspect_ratio = h / w + # Add some tolerance due to the rounding and int conversion used in the transform + tol = 0.05 + assert 1 / 3 - tol <= aspect_ratio <= 3 + tol + + # Make sure that h > w and h < w are equally likely (log-scale sampling) + aspect_ratios = [] + random.seed(42) + trial = 1000 + for _ in range(trial): + y, x, h, w, v = t.get_params( + img, + t.scale, + t.ratio, + [ + t.value, + ], + ) + aspect_ratios.append(h / w) + + count_bigger_then_ones = len([1 for aspect_ratio in aspect_ratios if aspect_ratio > 1]) + p_value = stats.binomtest(count_bigger_then_ones, trial, p=0.5).pvalue + assert p_value > 0.0001 + + # Checking if RandomErasing can be printed as string + t.__repr__() + + +def test_random_rotation(): + + with pytest.raises(ValueError): + transforms.RandomRotation(-0.7) + + with pytest.raises(ValueError): + transforms.RandomRotation([-0.7]) + + with pytest.raises(ValueError): + transforms.RandomRotation([-0.7, 0, 0.7]) + + t = transforms.RandomRotation(0, fill=None) + assert t.fill == 0 + + t = transforms.RandomRotation(10) + angle = t.get_params(t.degrees) + assert angle > -10 and angle < 10 + + t = transforms.RandomRotation((-10, 10)) + angle = t.get_params(t.degrees) + assert -10 < angle < 10 - @unittest.skipIf(stats is None, 'scipy.stats not available') - def test_random_vertical_flip(self): - random_state = random.getstate() - random.seed(42) - img = transforms.ToPILImage()(torch.rand(3, 10, 10)) - vimg = img.transpose(Image.FLIP_TOP_BOTTOM) - - num_samples = 250 - num_vertical = 0 - for _ in range(num_samples): - out = transforms.RandomVerticalFlip()(img) - if out == vimg: - num_vertical += 1 - - p_value = stats.binom_test(num_vertical, num_samples, p=0.5) - random.setstate(random_state) - self.assertGreater(p_value, 0.0001) - - num_samples = 250 - num_vertical = 0 - for _ in range(num_samples): - out = transforms.RandomVerticalFlip(p=0.7)(img) - if out == vimg: - num_vertical += 1 - - p_value = stats.binom_test(num_vertical, num_samples, p=0.7) - random.setstate(random_state) - self.assertGreater(p_value, 0.0001) - - # Checking if RandomVerticalFlip can be printed as string - transforms.RandomVerticalFlip().__repr__() - - @unittest.skipIf(stats is None, 'scipy.stats not available') - def test_random_horizontal_flip(self): - random_state = random.getstate() - random.seed(42) - img = transforms.ToPILImage()(torch.rand(3, 10, 10)) - himg = img.transpose(Image.FLIP_LEFT_RIGHT) - - num_samples = 250 - num_horizontal = 0 - for _ in range(num_samples): - out = transforms.RandomHorizontalFlip()(img) - if out == himg: - num_horizontal += 1 - - p_value = stats.binom_test(num_horizontal, num_samples, p=0.5) - random.setstate(random_state) - self.assertGreater(p_value, 0.0001) - - num_samples = 250 - num_horizontal = 0 - for _ in range(num_samples): - out = transforms.RandomHorizontalFlip(p=0.7)(img) - if out == himg: - num_horizontal += 1 - - p_value = stats.binom_test(num_horizontal, num_samples, p=0.7) - random.setstate(random_state) - self.assertGreater(p_value, 0.0001) - - # Checking if RandomHorizontalFlip can be printed as string - transforms.RandomHorizontalFlip().__repr__() - - @unittest.skipIf(stats is None, 'scipy.stats is not available') - def test_normalize(self): - def samples_from_standard_normal(tensor): - p_value = stats.kstest(list(tensor.view(-1)), 'norm', args=(0, 1)).pvalue - return p_value > 0.0001 - - random_state = random.getstate() - random.seed(42) - for channels in [1, 3]: - img = torch.rand(channels, 10, 10) - mean = [img[c].mean() for c in range(channels)] - std = [img[c].std() for c in range(channels)] - normalized = transforms.Normalize(mean, std)(img) - self.assertTrue(samples_from_standard_normal(normalized)) - random.setstate(random_state) - - # Checking if Normalize can be printed as string - transforms.Normalize(mean, std).__repr__() - - # Checking the optional in-place behaviour - tensor = torch.rand((1, 16, 16)) - tensor_inplace = transforms.Normalize((0.5,), (0.5,), inplace=True)(tensor) - self.assertTrue(torch.equal(tensor, tensor_inplace)) - - def test_normalize_different_dtype(self): - for dtype1 in [torch.float32, torch.float64]: - img = torch.rand(3, 10, 10, dtype=dtype1) - for dtype2 in [torch.int64, torch.float32, torch.float64]: - mean = torch.tensor([1, 2, 3], dtype=dtype2) - std = torch.tensor([1, 2, 1], dtype=dtype2) - # checks that it doesn't crash - transforms.functional.normalize(img, mean, std) - - def test_adjust_brightness(self): - x_shape = [2, 2, 3] - x_data = [0, 5, 13, 54, 135, 226, 37, 8, 234, 90, 255, 1] - x_np = np.array(x_data, dtype=np.uint8).reshape(x_shape) - x_pil = Image.fromarray(x_np, mode='RGB') - - # test 0 - y_pil = F.adjust_brightness(x_pil, 1) - y_np = np.array(y_pil) - self.assertTrue(np.allclose(y_np, x_np)) - - # test 1 - y_pil = F.adjust_brightness(x_pil, 0.5) - y_np = np.array(y_pil) - y_ans = [0, 2, 6, 27, 67, 113, 18, 4, 117, 45, 127, 0] - y_ans = np.array(y_ans, dtype=np.uint8).reshape(x_shape) - self.assertTrue(np.allclose(y_np, y_ans)) - - # test 2 - y_pil = F.adjust_brightness(x_pil, 2) - y_np = np.array(y_pil) - y_ans = [0, 10, 26, 108, 255, 255, 74, 16, 255, 180, 255, 2] - y_ans = np.array(y_ans, dtype=np.uint8).reshape(x_shape) - self.assertTrue(np.allclose(y_np, y_ans)) - - def test_adjust_contrast(self): - x_shape = [2, 2, 3] - x_data = [0, 5, 13, 54, 135, 226, 37, 8, 234, 90, 255, 1] - x_np = np.array(x_data, dtype=np.uint8).reshape(x_shape) - x_pil = Image.fromarray(x_np, mode='RGB') - - # test 0 - y_pil = F.adjust_contrast(x_pil, 1) - y_np = np.array(y_pil) - self.assertTrue(np.allclose(y_np, x_np)) - - # test 1 - y_pil = F.adjust_contrast(x_pil, 0.5) - y_np = np.array(y_pil) - y_ans = [43, 45, 49, 70, 110, 156, 61, 47, 160, 88, 170, 43] - y_ans = np.array(y_ans, dtype=np.uint8).reshape(x_shape) - self.assertTrue(np.allclose(y_np, y_ans)) - - # test 2 - y_pil = F.adjust_contrast(x_pil, 2) - y_np = np.array(y_pil) - y_ans = [0, 0, 0, 22, 184, 255, 0, 0, 255, 94, 255, 0] - y_ans = np.array(y_ans, dtype=np.uint8).reshape(x_shape) - self.assertTrue(np.allclose(y_np, y_ans)) - - def test_adjust_saturation(self): - x_shape = [2, 2, 3] - x_data = [0, 5, 13, 54, 135, 226, 37, 8, 234, 90, 255, 1] - x_np = np.array(x_data, dtype=np.uint8).reshape(x_shape) - x_pil = Image.fromarray(x_np, mode='RGB') - - # test 0 - y_pil = F.adjust_saturation(x_pil, 1) - y_np = np.array(y_pil) - self.assertTrue(np.allclose(y_np, x_np)) - - # test 1 - y_pil = F.adjust_saturation(x_pil, 0.5) - y_np = np.array(y_pil) - y_ans = [2, 4, 8, 87, 128, 173, 39, 25, 138, 133, 215, 88] - y_ans = np.array(y_ans, dtype=np.uint8).reshape(x_shape) - self.assertTrue(np.allclose(y_np, y_ans)) - - # test 2 - y_pil = F.adjust_saturation(x_pil, 2) - y_np = np.array(y_pil) - y_ans = [0, 6, 22, 0, 149, 255, 32, 0, 255, 4, 255, 0] - y_ans = np.array(y_ans, dtype=np.uint8).reshape(x_shape) - self.assertTrue(np.allclose(y_np, y_ans)) - - def test_adjust_hue(self): - x_shape = [2, 2, 3] - x_data = [0, 5, 13, 54, 135, 226, 37, 8, 234, 90, 255, 1] - x_np = np.array(x_data, dtype=np.uint8).reshape(x_shape) - x_pil = Image.fromarray(x_np, mode='RGB') - - with self.assertRaises(ValueError): - F.adjust_hue(x_pil, -0.7) - F.adjust_hue(x_pil, 1) - - # test 0: almost same as x_data but not exact. - # probably because hsv <-> rgb floating point ops - y_pil = F.adjust_hue(x_pil, 0) - y_np = np.array(y_pil) - y_ans = [0, 5, 13, 54, 139, 226, 35, 8, 234, 91, 255, 1] - y_ans = np.array(y_ans, dtype=np.uint8).reshape(x_shape) - self.assertTrue(np.allclose(y_np, y_ans)) - - # test 1 - y_pil = F.adjust_hue(x_pil, 0.25) - y_np = np.array(y_pil) - y_ans = [13, 0, 12, 224, 54, 226, 234, 8, 99, 1, 222, 255] - y_ans = np.array(y_ans, dtype=np.uint8).reshape(x_shape) - self.assertTrue(np.allclose(y_np, y_ans)) - - # test 2 - y_pil = F.adjust_hue(x_pil, -0.25) - y_np = np.array(y_pil) - y_ans = [0, 13, 2, 54, 226, 58, 8, 234, 152, 255, 43, 1] - y_ans = np.array(y_ans, dtype=np.uint8).reshape(x_shape) - self.assertTrue(np.allclose(y_np, y_ans)) - - def test_adjust_gamma(self): - x_shape = [2, 2, 3] - x_data = [0, 5, 13, 54, 135, 226, 37, 8, 234, 90, 255, 1] - x_np = np.array(x_data, dtype=np.uint8).reshape(x_shape) - x_pil = Image.fromarray(x_np, mode='RGB') - - # test 0 - y_pil = F.adjust_gamma(x_pil, 1) - y_np = np.array(y_pil) - self.assertTrue(np.allclose(y_np, x_np)) - - # test 1 - y_pil = F.adjust_gamma(x_pil, 0.5) - y_np = np.array(y_pil) - y_ans = [0, 35, 57, 117, 185, 240, 97, 45, 244, 151, 255, 15] - y_ans = np.array(y_ans, dtype=np.uint8).reshape(x_shape) - self.assertTrue(np.allclose(y_np, y_ans)) - - # test 2 - y_pil = F.adjust_gamma(x_pil, 2) - y_np = np.array(y_pil) - y_ans = [0, 0, 0, 11, 71, 200, 5, 0, 214, 31, 255, 0] - y_ans = np.array(y_ans, dtype=np.uint8).reshape(x_shape) - self.assertTrue(np.allclose(y_np, y_ans)) - - def test_adjusts_L_mode(self): - x_shape = [2, 2, 3] - x_data = [0, 5, 13, 54, 135, 226, 37, 8, 234, 90, 255, 1] - x_np = np.array(x_data, dtype=np.uint8).reshape(x_shape) - x_rgb = Image.fromarray(x_np, mode='RGB') - - x_l = x_rgb.convert('L') - self.assertEqual(F.adjust_brightness(x_l, 2).mode, 'L') - self.assertEqual(F.adjust_saturation(x_l, 2).mode, 'L') - self.assertEqual(F.adjust_contrast(x_l, 2).mode, 'L') - self.assertEqual(F.adjust_hue(x_l, 0.4).mode, 'L') - self.assertEqual(F.adjust_gamma(x_l, 0.5).mode, 'L') - - def test_color_jitter(self): - color_jitter = transforms.ColorJitter(2, 2, 2, 0.1) - - x_shape = [2, 2, 3] - x_data = [0, 5, 13, 54, 135, 226, 37, 8, 234, 90, 255, 1] - x_np = np.array(x_data, dtype=np.uint8).reshape(x_shape) - x_pil = Image.fromarray(x_np, mode='RGB') - x_pil_2 = x_pil.convert('L') - - for i in range(10): - y_pil = color_jitter(x_pil) - self.assertEqual(y_pil.mode, x_pil.mode) - - y_pil_2 = color_jitter(x_pil_2) - self.assertEqual(y_pil_2.mode, x_pil_2.mode) - - # Checking if ColorJitter can be printed as string - color_jitter.__repr__() - - def test_linear_transformation(self): - num_samples = 1000 - x = torch.randn(num_samples, 3, 10, 10) - flat_x = x.view(x.size(0), x.size(1) * x.size(2) * x.size(3)) - # compute principal components - sigma = torch.mm(flat_x.t(), flat_x) / flat_x.size(0) - u, s, _ = np.linalg.svd(sigma.numpy()) - zca_epsilon = 1e-10 # avoid division by 0 - d = torch.Tensor(np.diag(1. / np.sqrt(s + zca_epsilon))) - u = torch.Tensor(u) - principal_components = torch.mm(torch.mm(u, d), u.t()) - mean_vector = (torch.sum(flat_x, dim=0) / flat_x.size(0)) - # initialize whitening matrix - whitening = transforms.LinearTransformation(principal_components, mean_vector) - # estimate covariance and mean using weak law of large number - num_features = flat_x.size(1) - cov = 0.0 - mean = 0.0 - for i in x: - xwhite = whitening(i) - xwhite = xwhite.view(1, -1).numpy() - cov += np.dot(xwhite, xwhite.T) / num_features - mean += np.sum(xwhite) / num_features - # if rtol for std = 1e-3 then rtol for cov = 2e-3 as std**2 = cov - self.assertTrue(np.allclose(cov / num_samples, np.identity(1), rtol=2e-3), - "cov not close to 1") - self.assertTrue(np.allclose(mean / num_samples, 0, rtol=1e-3), - "mean not close to 0") - - # Checking if LinearTransformation can be printed as string - whitening.__repr__() - - def test_rotate(self): - x = np.zeros((100, 100, 3), dtype=np.uint8) - x[40, 40] = [255, 255, 255] - - with self.assertRaises(TypeError): - F.rotate(x, 10) - - img = F.to_pil_image(x) - - result = F.rotate(img, 45) - self.assertEqual(result.size, (100, 100)) - r, c, ch = np.where(result) - self.assertTrue(all(x in r for x in [49, 50])) - self.assertTrue(all(x in c for x in [36])) - self.assertTrue(all(x in ch for x in [0, 1, 2])) - - result = F.rotate(img, 45, expand=True) - self.assertEqual(result.size, (142, 142)) - r, c, ch = np.where(result) - self.assertTrue(all(x in r for x in [70, 71])) - self.assertTrue(all(x in c for x in [57])) - self.assertTrue(all(x in ch for x in [0, 1, 2])) - - result = F.rotate(img, 45, center=(40, 40)) - self.assertEqual(result.size, (100, 100)) - r, c, ch = np.where(result) - self.assertTrue(all(x in r for x in [40])) - self.assertTrue(all(x in c for x in [40])) - self.assertTrue(all(x in ch for x in [0, 1, 2])) - - result_a = F.rotate(img, 90) - result_b = F.rotate(img, -270) - - self.assertTrue(np.all(np.array(result_a) == np.array(result_b))) - - def test_affine(self): + # Checking if RandomRotation can be printed as string + t.__repr__() + + t = transforms.RandomRotation((-10, 10), interpolation=Image.BILINEAR) + assert t.interpolation == transforms.InterpolationMode.BILINEAR + + +def test_random_rotation_error(): + # assert fill being either a Sequence or a Number + with pytest.raises(TypeError): + transforms.RandomRotation(0, fill={}) + + +def test_randomperspective(): + for _ in range(10): + height = random.randint(24, 32) * 2 + width = random.randint(24, 32) * 2 + img = torch.ones(3, height, width) + to_pil_image = transforms.ToPILImage() + img = to_pil_image(img) + perp = transforms.RandomPerspective() + startpoints, endpoints = perp.get_params(width, height, 0.5) + tr_img = F.perspective(img, startpoints, endpoints) + tr_img2 = F.convert_image_dtype(F.pil_to_tensor(F.perspective(tr_img, endpoints, startpoints))) + tr_img = F.convert_image_dtype(F.pil_to_tensor(tr_img)) + assert img.size[0] == width + assert img.size[1] == height + assert torch.nn.functional.mse_loss( + tr_img, F.convert_image_dtype(F.pil_to_tensor(img)) + ) + 0.3 > torch.nn.functional.mse_loss(tr_img2, F.convert_image_dtype(F.pil_to_tensor(img))) + + +@pytest.mark.parametrize("seed", range(10)) +@pytest.mark.parametrize("mode", ["L", "RGB", "F"]) +def test_randomperspective_fill(mode, seed): + torch.random.manual_seed(seed) + + # assert fill being either a Sequence or a Number + with pytest.raises(TypeError): + transforms.RandomPerspective(fill={}) + + t = transforms.RandomPerspective(fill=None) + assert t.fill == 0 + + height = 100 + width = 100 + img = torch.ones(3, height, width) + to_pil_image = transforms.ToPILImage() + img = to_pil_image(img) + fill = 127 + num_bands = len(mode) + + img_conv = img.convert(mode) + perspective = transforms.RandomPerspective(p=1, fill=fill) + tr_img = perspective(img_conv) + pixel = tr_img.getpixel((0, 0)) + + if not isinstance(pixel, tuple): + pixel = (pixel,) + assert pixel == tuple([fill] * num_bands) + + startpoints, endpoints = transforms.RandomPerspective.get_params(width, height, 0.5) + tr_img = F.perspective(img_conv, startpoints, endpoints, fill=fill) + pixel = tr_img.getpixel((0, 0)) + + if not isinstance(pixel, tuple): + pixel = (pixel,) + assert pixel == tuple([fill] * num_bands) + + wrong_num_bands = num_bands + 1 + with pytest.raises(ValueError): + F.perspective(img_conv, startpoints, endpoints, fill=tuple([fill] * wrong_num_bands)) + + +@pytest.mark.skipif(stats is None, reason="scipy.stats not available") +def test_normalize(): + def samples_from_standard_normal(tensor): + p_value = stats.kstest(list(tensor.view(-1)), "norm", args=(0, 1)).pvalue + return p_value > 0.0001 + + random_state = random.getstate() + random.seed(42) + for channels in [1, 3]: + img = torch.rand(channels, 10, 10) + mean = [img[c].mean() for c in range(channels)] + std = [img[c].std() for c in range(channels)] + normalized = transforms.Normalize(mean, std)(img) + assert samples_from_standard_normal(normalized) + random.setstate(random_state) + + # Checking if Normalize can be printed as string + transforms.Normalize(mean, std).__repr__() + + # Checking the optional in-place behaviour + tensor = torch.rand((1, 16, 16)) + tensor_inplace = transforms.Normalize((0.5,), (0.5,), inplace=True)(tensor) + assert_equal(tensor, tensor_inplace) + + +@pytest.mark.parametrize("dtype1", [torch.float32, torch.float64]) +@pytest.mark.parametrize("dtype2", [torch.int64, torch.float32, torch.float64]) +def test_normalize_different_dtype(dtype1, dtype2): + img = torch.rand(3, 10, 10, dtype=dtype1) + mean = torch.tensor([1, 2, 3], dtype=dtype2) + std = torch.tensor([1, 2, 1], dtype=dtype2) + # checks that it doesn't crash + transforms.functional.normalize(img, mean, std) + + +def test_normalize_3d_tensor(): + torch.manual_seed(28) + n_channels = 3 + img_size = 10 + mean = torch.rand(n_channels) + std = torch.rand(n_channels) + img = torch.rand(n_channels, img_size, img_size) + target = F.normalize(img, mean, std) + + mean_unsqueezed = mean.view(-1, 1, 1) + std_unsqueezed = std.view(-1, 1, 1) + result1 = F.normalize(img, mean_unsqueezed, std_unsqueezed) + result2 = F.normalize( + img, mean_unsqueezed.repeat(1, img_size, img_size), std_unsqueezed.repeat(1, img_size, img_size) + ) + torch.testing.assert_close(target, result1) + torch.testing.assert_close(target, result2) + + +class TestAffine: + @pytest.fixture(scope="class") + def input_img(self): input_img = np.zeros((40, 40, 3), dtype=np.uint8) - pts = [] - cnt = [20, 20] for pt in [(16, 16), (20, 16), (20, 20)]: for i in range(-5, 5): for j in range(-5, 5): input_img[pt[0] + i, pt[1] + j, :] = [255, 155, 55] - pts.append((pt[0] + i, pt[1] + j)) - pts = list(set(pts)) - - with self.assertRaises(TypeError): - F.affine(input_img, 10) - - pil_img = F.to_pil_image(input_img) - - def _to_3x3_inv(inv_result_matrix): - result_matrix = np.zeros((3, 3)) - result_matrix[:2, :] = np.array(inv_result_matrix).reshape((2, 3)) - result_matrix[2, 2] = 1 - return np.linalg.inv(result_matrix) - - def _test_transformation(a, t, s, sh): - a_rad = math.radians(a) - s_rad = [math.radians(sh_) for sh_ in sh] - cx, cy = cnt - tx, ty = t - sx, sy = s_rad - rot = a_rad - - # 1) Check transformation matrix: - C = np.array([[1, 0, cx], - [0, 1, cy], - [0, 0, 1]]) - T = np.array([[1, 0, tx], - [0, 1, ty], - [0, 0, 1]]) - Cinv = np.linalg.inv(C) - - RS = np.array( - [[s * math.cos(rot), -s * math.sin(rot), 0], - [s * math.sin(rot), s * math.cos(rot), 0], - [0, 0, 1]]) - - SHx = np.array([[1, -math.tan(sx), 0], - [0, 1, 0], - [0, 0, 1]]) - - SHy = np.array([[1, 0, 0], - [-math.tan(sy), 1, 0], - [0, 0, 1]]) - - RSS = np.matmul(RS, np.matmul(SHy, SHx)) - - true_matrix = np.matmul(T, np.matmul(C, np.matmul(RSS, Cinv))) - - result_matrix = _to_3x3_inv(F._get_inverse_affine_matrix(center=cnt, angle=a, - translate=t, scale=s, shear=sh)) - self.assertLess(np.sum(np.abs(true_matrix - result_matrix)), 1e-10) - # 2) Perform inverse mapping: - true_result = np.zeros((40, 40, 3), dtype=np.uint8) - inv_true_matrix = np.linalg.inv(true_matrix) - for y in range(true_result.shape[0]): - for x in range(true_result.shape[1]): - res = np.dot(inv_true_matrix, [x, y, 1]) - _x = int(res[0] + 0.5) - _y = int(res[1] + 0.5) - if 0 <= _x < input_img.shape[1] and 0 <= _y < input_img.shape[0]: - true_result[y, x, :] = input_img[_y, _x, :] - - result = F.affine(pil_img, angle=a, translate=t, scale=s, shear=sh) - self.assertEqual(result.size, pil_img.size) - # Compute number of different pixels: - np_result = np.array(result) - n_diff_pixels = np.sum(np_result != true_result) / 3 - # Accept 3 wrong pixels - self.assertLess(n_diff_pixels, 3, - "a={}, t={}, s={}, sh={}\n".format(a, t, s, sh) + - "n diff pixels={}\n".format(np.sum(np.array(result)[:, :, 0] != true_result[:, :, 0]))) + return input_img + + def test_affine_translate_seq(self, input_img): + with pytest.raises(TypeError, match=r"Argument translate should be a sequence"): + F.affine(input_img, 10, translate=0, scale=1, shear=1) + + @pytest.fixture(scope="class") + def pil_image(self, input_img): + return F.to_pil_image(input_img) + + def _to_3x3_inv(self, inv_result_matrix): + result_matrix = np.zeros((3, 3)) + result_matrix[:2, :] = np.array(inv_result_matrix).reshape((2, 3)) + result_matrix[2, 2] = 1 + return np.linalg.inv(result_matrix) + + def _test_transformation(self, angle, translate, scale, shear, pil_image, input_img, center=None): + + a_rad = math.radians(angle) + s_rad = [math.radians(sh_) for sh_ in shear] + cnt = [20, 20] if center is None else center + cx, cy = cnt + tx, ty = translate + sx, sy = s_rad + rot = a_rad + + # 1) Check transformation matrix: + C = np.array([[1, 0, cx], [0, 1, cy], [0, 0, 1]]) + T = np.array([[1, 0, tx], [0, 1, ty], [0, 0, 1]]) + Cinv = np.linalg.inv(C) + + RS = np.array( + [ + [scale * math.cos(rot), -scale * math.sin(rot), 0], + [scale * math.sin(rot), scale * math.cos(rot), 0], + [0, 0, 1], + ] + ) + + SHx = np.array([[1, -math.tan(sx), 0], [0, 1, 0], [0, 0, 1]]) + + SHy = np.array([[1, 0, 0], [-math.tan(sy), 1, 0], [0, 0, 1]]) + + RSS = np.matmul(RS, np.matmul(SHy, SHx)) + + true_matrix = np.matmul(T, np.matmul(C, np.matmul(RSS, Cinv))) + + result_matrix = self._to_3x3_inv( + F._get_inverse_affine_matrix(center=cnt, angle=angle, translate=translate, scale=scale, shear=shear) + ) + assert np.sum(np.abs(true_matrix - result_matrix)) < 1e-10 + # 2) Perform inverse mapping: + true_result = np.zeros((40, 40, 3), dtype=np.uint8) + inv_true_matrix = np.linalg.inv(true_matrix) + for y in range(true_result.shape[0]): + for x in range(true_result.shape[1]): + # Same as for PIL: + # https://github.com/python-pillow/Pillow/blob/71f8ec6a0cfc1008076a023c0756542539d057ab/ + # src/libImaging/Geometry.c#L1060 + input_pt = np.array([x + 0.5, y + 0.5, 1.0]) + res = np.floor(np.dot(inv_true_matrix, input_pt)).astype(int) + _x, _y = res[:2] + if 0 <= _x < input_img.shape[1] and 0 <= _y < input_img.shape[0]: + true_result[y, x, :] = input_img[_y, _x, :] + + result = F.affine(pil_image, angle=angle, translate=translate, scale=scale, shear=shear, center=center) + assert result.size == pil_image.size + # Compute number of different pixels: + np_result = np.array(result) + n_diff_pixels = np.sum(np_result != true_result) / 3 + # Accept 3 wrong pixels + error_msg = ( + f"angle={angle}, translate={translate}, scale={scale}, shear={shear}\nn diff pixels={n_diff_pixels}\n" + ) + assert n_diff_pixels < 3, error_msg + + def test_transformation_discrete(self, pil_image, input_img): + # Test rotation + angle = 45 + self._test_transformation( + angle=angle, translate=(0, 0), scale=1.0, shear=(0.0, 0.0), pil_image=pil_image, input_img=input_img + ) # Test rotation - a = 45 - _test_transformation(a=a, t=(0, 0), s=1.0, sh=(0.0, 0.0)) + angle = 45 + self._test_transformation( + angle=angle, + translate=(0, 0), + scale=1.0, + shear=(0.0, 0.0), + pil_image=pil_image, + input_img=input_img, + center=[0, 0], + ) # Test translation - t = [10, 15] - _test_transformation(a=0.0, t=t, s=1.0, sh=(0.0, 0.0)) + translate = [10, 15] + self._test_transformation( + angle=0.0, translate=translate, scale=1.0, shear=(0.0, 0.0), pil_image=pil_image, input_img=input_img + ) # Test scale - s = 1.2 - _test_transformation(a=0.0, t=(0.0, 0.0), s=s, sh=(0.0, 0.0)) + scale = 1.2 + self._test_transformation( + angle=0.0, translate=(0.0, 0.0), scale=scale, shear=(0.0, 0.0), pil_image=pil_image, input_img=input_img + ) # Test shear - sh = [45.0, 25.0] - _test_transformation(a=0.0, t=(0.0, 0.0), s=1.0, sh=sh) - - # Test rotation, scale, translation, shear - for a in range(-90, 90, 25): - for t1 in range(-10, 10, 5): - for s in [0.75, 0.98, 1.0, 1.1, 1.2]: - for sh in range(-15, 15, 5): - _test_transformation(a=a, t=(t1, t1), s=s, sh=(sh, sh)) - - def test_random_rotation(self): - - with self.assertRaises(ValueError): - transforms.RandomRotation(-0.7) - transforms.RandomRotation([-0.7]) - transforms.RandomRotation([-0.7, 0, 0.7]) - - t = transforms.RandomRotation(10) - angle = t.get_params(t.degrees) - self.assertTrue(angle > -10 and angle < 10) - - t = transforms.RandomRotation((-10, 10)) - angle = t.get_params(t.degrees) - self.assertTrue(angle > -10 and angle < 10) - - # Checking if RandomRotation can be printed as string - t.__repr__() - - def test_random_affine(self): - - with self.assertRaises(ValueError): - transforms.RandomAffine(-0.7) - transforms.RandomAffine([-0.7]) - transforms.RandomAffine([-0.7, 0, 0.7]) - - transforms.RandomAffine([-90, 90], translate=2.0) - transforms.RandomAffine([-90, 90], translate=[-1.0, 1.0]) - transforms.RandomAffine([-90, 90], translate=[-1.0, 0.0, 1.0]) - - transforms.RandomAffine([-90, 90], translate=[0.2, 0.2], scale=[0.0]) - transforms.RandomAffine([-90, 90], translate=[0.2, 0.2], scale=[-1.0, 1.0]) - transforms.RandomAffine([-90, 90], translate=[0.2, 0.2], scale=[0.5, -0.5]) - transforms.RandomAffine([-90, 90], translate=[0.2, 0.2], scale=[0.5, 3.0, -0.5]) - - transforms.RandomAffine([-90, 90], translate=[0.2, 0.2], scale=[0.5, 0.5], shear=-7) - transforms.RandomAffine([-90, 90], translate=[0.2, 0.2], scale=[0.5, 0.5], shear=[-10]) - transforms.RandomAffine([-90, 90], translate=[0.2, 0.2], scale=[0.5, 0.5], shear=[-10, 0, 10]) - transforms.RandomAffine([-90, 90], translate=[0.2, 0.2], scale=[0.5, 0.5], shear=[-10, 0, 10, 0, 10]) - - x = np.zeros((100, 100, 3), dtype=np.uint8) - img = F.to_pil_image(x) - - t = transforms.RandomAffine(10, translate=[0.5, 0.3], scale=[0.7, 1.3], shear=[-10, 10, 20, 40]) - for _ in range(100): - angle, translations, scale, shear = t.get_params(t.degrees, t.translate, t.scale, t.shear, - img_size=img.size) - self.assertTrue(-10 < angle < 10) - self.assertTrue(-img.size[0] * 0.5 <= translations[0] <= img.size[0] * 0.5, - "{} vs {}".format(translations[0], img.size[0] * 0.5)) - self.assertTrue(-img.size[1] * 0.5 <= translations[1] <= img.size[1] * 0.5, - "{} vs {}".format(translations[1], img.size[1] * 0.5)) - self.assertTrue(0.7 < scale < 1.3) - self.assertTrue(-10 < shear[0] < 10) - self.assertTrue(-20 < shear[1] < 40) - - # Checking if RandomAffine can be printed as string - t.__repr__() - - t = transforms.RandomAffine(10, resample=Image.BILINEAR) - self.assertIn("Image.BILINEAR", t.__repr__()) - - def test_to_grayscale(self): - """Unit tests for grayscale transform""" - - x_shape = [2, 2, 3] - x_data = [0, 5, 13, 54, 135, 226, 37, 8, 234, 90, 255, 1] - x_np = np.array(x_data, dtype=np.uint8).reshape(x_shape) - x_pil = Image.fromarray(x_np, mode='RGB') - x_pil_2 = x_pil.convert('L') - gray_np = np.array(x_pil_2) - - # Test Set: Grayscale an image with desired number of output channels - # Case 1: RGB -> 1 channel grayscale - trans1 = transforms.Grayscale(num_output_channels=1) - gray_pil_1 = trans1(x_pil) - gray_np_1 = np.array(gray_pil_1) - self.assertEqual(gray_pil_1.mode, 'L', 'mode should be L') - self.assertEqual(gray_np_1.shape, tuple(x_shape[0:2]), 'should be 1 channel') - np.testing.assert_equal(gray_np, gray_np_1) - - # Case 2: RGB -> 3 channel grayscale - trans2 = transforms.Grayscale(num_output_channels=3) - gray_pil_2 = trans2(x_pil) - gray_np_2 = np.array(gray_pil_2) - self.assertEqual(gray_pil_2.mode, 'RGB', 'mode should be RGB') - self.assertEqual(gray_np_2.shape, tuple(x_shape), 'should be 3 channel') - np.testing.assert_equal(gray_np_2[:, :, 0], gray_np_2[:, :, 1]) - np.testing.assert_equal(gray_np_2[:, :, 1], gray_np_2[:, :, 2]) - np.testing.assert_equal(gray_np, gray_np_2[:, :, 0]) - - # Case 3: 1 channel grayscale -> 1 channel grayscale - trans3 = transforms.Grayscale(num_output_channels=1) - gray_pil_3 = trans3(x_pil_2) - gray_np_3 = np.array(gray_pil_3) - self.assertEqual(gray_pil_3.mode, 'L', 'mode should be L') - self.assertEqual(gray_np_3.shape, tuple(x_shape[0:2]), 'should be 1 channel') - np.testing.assert_equal(gray_np, gray_np_3) - - # Case 4: 1 channel grayscale -> 3 channel grayscale - trans4 = transforms.Grayscale(num_output_channels=3) - gray_pil_4 = trans4(x_pil_2) - gray_np_4 = np.array(gray_pil_4) - self.assertEqual(gray_pil_4.mode, 'RGB', 'mode should be RGB') - self.assertEqual(gray_np_4.shape, tuple(x_shape), 'should be 3 channel') - np.testing.assert_equal(gray_np_4[:, :, 0], gray_np_4[:, :, 1]) - np.testing.assert_equal(gray_np_4[:, :, 1], gray_np_4[:, :, 2]) - np.testing.assert_equal(gray_np, gray_np_4[:, :, 0]) - - # Checking if Grayscale can be printed as string - trans4.__repr__() - - @unittest.skipIf(stats is None, 'scipy.stats not available') - def test_random_grayscale(self): - """Unit tests for random grayscale transform""" - - # Test Set 1: RGB -> 3 channel grayscale - random_state = random.getstate() - random.seed(42) - x_shape = [2, 2, 3] - x_np = np.random.randint(0, 256, x_shape, np.uint8) - x_pil = Image.fromarray(x_np, mode='RGB') - x_pil_2 = x_pil.convert('L') - gray_np = np.array(x_pil_2) - - num_samples = 250 - num_gray = 0 - for _ in range(num_samples): - gray_pil_2 = transforms.RandomGrayscale(p=0.5)(x_pil) - gray_np_2 = np.array(gray_pil_2) - if np.array_equal(gray_np_2[:, :, 0], gray_np_2[:, :, 1]) and \ - np.array_equal(gray_np_2[:, :, 1], gray_np_2[:, :, 2]) and \ - np.array_equal(gray_np, gray_np_2[:, :, 0]): - num_gray = num_gray + 1 - - p_value = stats.binom_test(num_gray, num_samples, p=0.5) - random.setstate(random_state) - self.assertGreater(p_value, 0.0001) - - # Test Set 2: grayscale -> 1 channel grayscale - random_state = random.getstate() - random.seed(42) - x_shape = [2, 2, 3] - x_np = np.random.randint(0, 256, x_shape, np.uint8) - x_pil = Image.fromarray(x_np, mode='RGB') - x_pil_2 = x_pil.convert('L') - gray_np = np.array(x_pil_2) - - num_samples = 250 - num_gray = 0 - for _ in range(num_samples): - gray_pil_3 = transforms.RandomGrayscale(p=0.5)(x_pil_2) - gray_np_3 = np.array(gray_pil_3) - if np.array_equal(gray_np, gray_np_3): - num_gray = num_gray + 1 - - p_value = stats.binom_test(num_gray, num_samples, p=1.0) # Note: grayscale is always unchanged - random.setstate(random_state) - self.assertGreater(p_value, 0.0001) - - # Test set 3: Explicit tests - x_shape = [2, 2, 3] - x_data = [0, 5, 13, 54, 135, 226, 37, 8, 234, 90, 255, 1] - x_np = np.array(x_data, dtype=np.uint8).reshape(x_shape) - x_pil = Image.fromarray(x_np, mode='RGB') - x_pil_2 = x_pil.convert('L') - gray_np = np.array(x_pil_2) - - # Case 3a: RGB -> 3 channel grayscale (grayscaled) - trans2 = transforms.RandomGrayscale(p=1.0) - gray_pil_2 = trans2(x_pil) - gray_np_2 = np.array(gray_pil_2) - self.assertEqual(gray_pil_2.mode, 'RGB', 'mode should be RGB') - self.assertEqual(gray_np_2.shape, tuple(x_shape), 'should be 3 channel') - np.testing.assert_equal(gray_np_2[:, :, 0], gray_np_2[:, :, 1]) - np.testing.assert_equal(gray_np_2[:, :, 1], gray_np_2[:, :, 2]) - np.testing.assert_equal(gray_np, gray_np_2[:, :, 0]) - - # Case 3b: RGB -> 3 channel grayscale (unchanged) - trans2 = transforms.RandomGrayscale(p=0.0) - gray_pil_2 = trans2(x_pil) - gray_np_2 = np.array(gray_pil_2) - self.assertEqual(gray_pil_2.mode, 'RGB', 'mode should be RGB') - self.assertEqual(gray_np_2.shape, tuple(x_shape), 'should be 3 channel') - np.testing.assert_equal(x_np, gray_np_2) - - # Case 3c: 1 channel grayscale -> 1 channel grayscale (grayscaled) - trans3 = transforms.RandomGrayscale(p=1.0) - gray_pil_3 = trans3(x_pil_2) - gray_np_3 = np.array(gray_pil_3) - self.assertEqual(gray_pil_3.mode, 'L', 'mode should be L') - self.assertEqual(gray_np_3.shape, tuple(x_shape[0:2]), 'should be 1 channel') - np.testing.assert_equal(gray_np, gray_np_3) - - # Case 3d: 1 channel grayscale -> 1 channel grayscale (unchanged) - trans3 = transforms.RandomGrayscale(p=0.0) - gray_pil_3 = trans3(x_pil_2) - gray_np_3 = np.array(gray_pil_3) - self.assertEqual(gray_pil_3.mode, 'L', 'mode should be L') - self.assertEqual(gray_np_3.shape, tuple(x_shape[0:2]), 'should be 1 channel') - np.testing.assert_equal(gray_np, gray_np_3) - - # Checking if RandomGrayscale can be printed as string - trans3.__repr__() - - def test_random_erasing(self): - """Unit tests for random erasing transform""" - - img = torch.rand([3, 60, 60]) - - # Test Set 1: Erasing with int value - img_re = transforms.RandomErasing(value=0.2) - i, j, h, w, v = img_re.get_params(img, scale=img_re.scale, ratio=img_re.ratio, value=img_re.value) - img_output = F.erase(img, i, j, h, w, v) - self.assertEqual(img_output.size(0), 3) - - # Test Set 2: Check if the unerased region is preserved - orig_unerased = img.clone() - orig_unerased[:, i:i + h, j:j + w] = 0 - output_unerased = img_output.clone() - output_unerased[:, i:i + h, j:j + w] = 0 - self.assertTrue(torch.equal(orig_unerased, output_unerased)) - - # Test Set 3: Erasing with random value - img_re = transforms.RandomErasing(value='random')(img) - self.assertEqual(img_re.size(0), 3) - - # Test Set 4: Erasing with tuple value - img_re = transforms.RandomErasing(value=(0.2, 0.2, 0.2))(img) - self.assertEqual(img_re.size(0), 3) - - # Test Set 5: Testing the inplace behaviour - img_re = transforms.RandomErasing(value=(0.2), inplace=True)(img) - self.assertTrue(torch.equal(img_re, img)) - - # Test Set 6: Checking when no erased region is selected - img = torch.rand([3, 300, 1]) - img_re = transforms.RandomErasing(ratio=(0.1, 0.2), value='random')(img) - self.assertTrue(torch.equal(img_re, img)) - - -if __name__ == '__main__': - unittest.main() + shear = [45.0, 25.0] + self._test_transformation( + angle=0.0, translate=(0.0, 0.0), scale=1.0, shear=shear, pil_image=pil_image, input_img=input_img + ) + + # Test shear with top-left as center + shear = [45.0, 25.0] + self._test_transformation( + angle=0.0, + translate=(0.0, 0.0), + scale=1.0, + shear=shear, + pil_image=pil_image, + input_img=input_img, + center=[0, 0], + ) + + @pytest.mark.parametrize("angle", range(-90, 90, 36)) + @pytest.mark.parametrize("translate", range(-10, 10, 5)) + @pytest.mark.parametrize("scale", [0.77, 1.0, 1.27]) + @pytest.mark.parametrize("shear", range(-15, 15, 5)) + def test_transformation_range(self, angle, translate, scale, shear, pil_image, input_img): + self._test_transformation( + angle=angle, + translate=(translate, translate), + scale=scale, + shear=(shear, shear), + pil_image=pil_image, + input_img=input_img, + ) + + +def test_random_affine(): + + with pytest.raises(ValueError): + transforms.RandomAffine(-0.7) + with pytest.raises(ValueError): + transforms.RandomAffine([-0.7]) + with pytest.raises(ValueError): + transforms.RandomAffine([-0.7, 0, 0.7]) + with pytest.raises(TypeError): + transforms.RandomAffine([-90, 90], translate=2.0) + with pytest.raises(ValueError): + transforms.RandomAffine([-90, 90], translate=[-1.0, 1.0]) + with pytest.raises(ValueError): + transforms.RandomAffine([-90, 90], translate=[-1.0, 0.0, 1.0]) + + with pytest.raises(ValueError): + transforms.RandomAffine([-90, 90], translate=[0.2, 0.2], scale=[0.0]) + with pytest.raises(ValueError): + transforms.RandomAffine([-90, 90], translate=[0.2, 0.2], scale=[-1.0, 1.0]) + with pytest.raises(ValueError): + transforms.RandomAffine([-90, 90], translate=[0.2, 0.2], scale=[0.5, -0.5]) + with pytest.raises(ValueError): + transforms.RandomAffine([-90, 90], translate=[0.2, 0.2], scale=[0.5, 3.0, -0.5]) + + with pytest.raises(ValueError): + transforms.RandomAffine([-90, 90], translate=[0.2, 0.2], scale=[0.5, 0.5], shear=-7) + with pytest.raises(ValueError): + transforms.RandomAffine([-90, 90], translate=[0.2, 0.2], scale=[0.5, 0.5], shear=[-10]) + with pytest.raises(ValueError): + transforms.RandomAffine([-90, 90], translate=[0.2, 0.2], scale=[0.5, 0.5], shear=[-10, 0, 10]) + with pytest.raises(ValueError): + transforms.RandomAffine([-90, 90], translate=[0.2, 0.2], scale=[0.5, 0.5], shear=[-10, 0, 10, 0, 10]) + + # assert fill being either a Sequence or a Number + with pytest.raises(TypeError): + transforms.RandomAffine(0, fill={}) + + t = transforms.RandomAffine(0, fill=None) + assert t.fill == 0 + + x = np.zeros((100, 100, 3), dtype=np.uint8) + img = F.to_pil_image(x) + + t = transforms.RandomAffine(10, translate=[0.5, 0.3], scale=[0.7, 1.3], shear=[-10, 10, 20, 40]) + for _ in range(100): + angle, translations, scale, shear = t.get_params(t.degrees, t.translate, t.scale, t.shear, img_size=img.size) + assert -10 < angle < 10 + assert -img.size[0] * 0.5 <= translations[0] <= img.size[0] * 0.5 + assert -img.size[1] * 0.5 <= translations[1] <= img.size[1] * 0.5 + assert 0.7 < scale < 1.3 + assert -10 < shear[0] < 10 + assert -20 < shear[1] < 40 + + # Checking if RandomAffine can be printed as string + t.__repr__() + + t = transforms.RandomAffine(10, interpolation=transforms.InterpolationMode.BILINEAR) + assert "bilinear" in t.__repr__() + + t = transforms.RandomAffine(10, interpolation=Image.BILINEAR) + assert t.interpolation == transforms.InterpolationMode.BILINEAR + + +def test_elastic_transformation(): + with pytest.raises(TypeError, match=r"alpha should be float or a sequence of floats"): + transforms.ElasticTransform(alpha=True, sigma=2.0) + with pytest.raises(TypeError, match=r"alpha should be a sequence of floats"): + transforms.ElasticTransform(alpha=[1.0, True], sigma=2.0) + with pytest.raises(ValueError, match=r"alpha is a sequence its length should be 2"): + transforms.ElasticTransform(alpha=[1.0, 0.0, 1.0], sigma=2.0) + + with pytest.raises(TypeError, match=r"sigma should be float or a sequence of floats"): + transforms.ElasticTransform(alpha=2.0, sigma=True) + with pytest.raises(TypeError, match=r"sigma should be a sequence of floats"): + transforms.ElasticTransform(alpha=2.0, sigma=[1.0, True]) + with pytest.raises(ValueError, match=r"sigma is a sequence its length should be 2"): + transforms.ElasticTransform(alpha=2.0, sigma=[1.0, 0.0, 1.0]) + + t = transforms.transforms.ElasticTransform(alpha=2.0, sigma=2.0, interpolation=Image.BILINEAR) + assert t.interpolation == transforms.InterpolationMode.BILINEAR + + with pytest.raises(TypeError, match=r"fill should be int or float"): + transforms.ElasticTransform(alpha=1.0, sigma=1.0, fill={}) + + x = torch.randint(0, 256, (3, 32, 32), dtype=torch.uint8) + img = F.to_pil_image(x) + t = transforms.ElasticTransform(alpha=0.0, sigma=0.0) + transformed_img = t(img) + assert transformed_img == img + + # Smoke test on PIL images + t = transforms.ElasticTransform(alpha=0.5, sigma=0.23) + transformed_img = t(img) + assert isinstance(transformed_img, Image.Image) + + # Checking if ElasticTransform can be printed as string + t.__repr__() + + +def test_random_grayscale_with_grayscale_input(): + transform = transforms.RandomGrayscale(p=1.0) + + image_tensor = torch.randint(0, 256, (1, 16, 16), dtype=torch.uint8) + output_tensor = transform(image_tensor) + torch.testing.assert_close(output_tensor, image_tensor) + + image_pil = F.to_pil_image(image_tensor) + output_pil = transform(image_pil) + torch.testing.assert_close(F.pil_to_tensor(output_pil), image_tensor) + + +if __name__ == "__main__": + pytest.main([__file__]) diff --git a/test/test_transforms_tensor.py b/test/test_transforms_tensor.py new file mode 100644 index 00000000000..eac52dafc17 --- /dev/null +++ b/test/test_transforms_tensor.py @@ -0,0 +1,892 @@ +import os +import sys + +import numpy as np +import PIL.Image +import pytest +import torch +from common_utils import ( + _assert_approx_equal_tensor_to_pil, + _assert_equal_tensor_to_pil, + _create_data, + _create_data_batch, + assert_equal, + cpu_and_cuda, + float_dtypes, + get_tmp_dir, + int_dtypes, +) +from torchvision import transforms as T +from torchvision.transforms import functional as F, InterpolationMode +from torchvision.transforms.autoaugment import _apply_op + +NEAREST, NEAREST_EXACT, BILINEAR, BICUBIC = ( + InterpolationMode.NEAREST, + InterpolationMode.NEAREST_EXACT, + InterpolationMode.BILINEAR, + InterpolationMode.BICUBIC, +) + + +def _test_transform_vs_scripted(transform, s_transform, tensor, msg=None): + torch.manual_seed(12) + out1 = transform(tensor) + torch.manual_seed(12) + out2 = s_transform(tensor) + assert_equal(out1, out2, msg=msg) + + +def _test_transform_vs_scripted_on_batch(transform, s_transform, batch_tensors, msg=None): + torch.manual_seed(12) + transformed_batch = transform(batch_tensors) + + for i in range(len(batch_tensors)): + img_tensor = batch_tensors[i, ...] + torch.manual_seed(12) + transformed_img = transform(img_tensor) + assert_equal(transformed_img, transformed_batch[i, ...], msg=msg) + + torch.manual_seed(12) + s_transformed_batch = s_transform(batch_tensors) + assert_equal(transformed_batch, s_transformed_batch, msg=msg) + + +def _test_functional_op(f, device, channels=3, fn_kwargs=None, test_exact_match=True, **match_kwargs): + fn_kwargs = fn_kwargs or {} + + tensor, pil_img = _create_data(height=10, width=10, channels=channels, device=device) + transformed_tensor = f(tensor, **fn_kwargs) + transformed_pil_img = f(pil_img, **fn_kwargs) + if test_exact_match: + _assert_equal_tensor_to_pil(transformed_tensor, transformed_pil_img, **match_kwargs) + else: + _assert_approx_equal_tensor_to_pil(transformed_tensor, transformed_pil_img, **match_kwargs) + + +def _test_class_op(transform_cls, device, channels=3, meth_kwargs=None, test_exact_match=True, **match_kwargs): + meth_kwargs = meth_kwargs or {} + + # test for class interface + f = transform_cls(**meth_kwargs) + scripted_fn = torch.jit.script(f) + + tensor, pil_img = _create_data(26, 34, channels, device=device) + # set seed to reproduce the same transformation for tensor and PIL image + torch.manual_seed(12) + transformed_tensor = f(tensor) + torch.manual_seed(12) + transformed_pil_img = f(pil_img) + if test_exact_match: + _assert_equal_tensor_to_pil(transformed_tensor, transformed_pil_img, **match_kwargs) + else: + _assert_approx_equal_tensor_to_pil(transformed_tensor.float(), transformed_pil_img, **match_kwargs) + + torch.manual_seed(12) + transformed_tensor_script = scripted_fn(tensor) + assert_equal(transformed_tensor, transformed_tensor_script) + + batch_tensors = _create_data_batch(height=23, width=34, channels=channels, num_samples=4, device=device) + _test_transform_vs_scripted_on_batch(f, scripted_fn, batch_tensors) + + with get_tmp_dir() as tmp_dir: + scripted_fn.save(os.path.join(tmp_dir, f"t_{transform_cls.__name__}.pt")) + + +def _test_op(func, method, device, channels=3, fn_kwargs=None, meth_kwargs=None, test_exact_match=True, **match_kwargs): + _test_functional_op(func, device, channels, fn_kwargs, test_exact_match=test_exact_match, **match_kwargs) + _test_class_op(method, device, channels, meth_kwargs, test_exact_match=test_exact_match, **match_kwargs) + + +def _test_fn_save_load(fn, tmpdir): + scripted_fn = torch.jit.script(fn) + p = os.path.join(tmpdir, f"t_op_list_{getattr(fn, '__name__', fn.__class__.__name__)}.pt") + scripted_fn.save(p) + _ = torch.jit.load(p) + + +@pytest.mark.parametrize("device", cpu_and_cuda()) +@pytest.mark.parametrize( + "func,method,fn_kwargs,match_kwargs", + [ + (F.hflip, T.RandomHorizontalFlip, None, {}), + (F.vflip, T.RandomVerticalFlip, None, {}), + (F.invert, T.RandomInvert, None, {}), + (F.posterize, T.RandomPosterize, {"bits": 4}, {}), + (F.solarize, T.RandomSolarize, {"threshold": 192.0}, {}), + (F.adjust_sharpness, T.RandomAdjustSharpness, {"sharpness_factor": 2.0}, {}), + ( + F.autocontrast, + T.RandomAutocontrast, + None, + {"test_exact_match": False, "agg_method": "max", "tol": (1 + 1e-5), "allowed_percentage_diff": 0.05}, + ), + (F.equalize, T.RandomEqualize, None, {}), + ], +) +@pytest.mark.parametrize("channels", [1, 3]) +def test_random(func, method, device, channels, fn_kwargs, match_kwargs): + _test_op(func, method, device, channels, fn_kwargs, fn_kwargs, **match_kwargs) + + +@pytest.mark.parametrize("seed", range(10)) +@pytest.mark.parametrize("device", cpu_and_cuda()) +@pytest.mark.parametrize("channels", [1, 3]) +class TestColorJitter: + @pytest.fixture(autouse=True) + def set_random_seed(self, seed): + torch.random.manual_seed(seed) + + @pytest.mark.parametrize("brightness", [0.1, 0.5, 1.0, 1.34, (0.3, 0.7), [0.4, 0.5]]) + def test_color_jitter_brightness(self, brightness, device, channels): + tol = 1.0 + 1e-10 + meth_kwargs = {"brightness": brightness} + _test_class_op( + T.ColorJitter, + meth_kwargs=meth_kwargs, + test_exact_match=False, + device=device, + tol=tol, + agg_method="max", + channels=channels, + ) + + @pytest.mark.parametrize("contrast", [0.2, 0.5, 1.0, 1.5, (0.3, 0.7), [0.4, 0.5]]) + def test_color_jitter_contrast(self, contrast, device, channels): + tol = 1.0 + 1e-10 + meth_kwargs = {"contrast": contrast} + _test_class_op( + T.ColorJitter, + meth_kwargs=meth_kwargs, + test_exact_match=False, + device=device, + tol=tol, + agg_method="max", + channels=channels, + ) + + @pytest.mark.parametrize("saturation", [0.5, 0.75, 1.0, 1.25, (0.3, 0.7), [0.3, 0.4]]) + def test_color_jitter_saturation(self, saturation, device, channels): + tol = 1.0 + 1e-10 + meth_kwargs = {"saturation": saturation} + _test_class_op( + T.ColorJitter, + meth_kwargs=meth_kwargs, + test_exact_match=False, + device=device, + tol=tol, + agg_method="max", + channels=channels, + ) + + @pytest.mark.parametrize("hue", [0.2, 0.5, (-0.2, 0.3), [-0.4, 0.5]]) + def test_color_jitter_hue(self, hue, device, channels): + meth_kwargs = {"hue": hue} + _test_class_op( + T.ColorJitter, + meth_kwargs=meth_kwargs, + test_exact_match=False, + device=device, + tol=16.1, + agg_method="max", + channels=channels, + ) + + def test_color_jitter_all(self, device, channels): + # All 4 parameters together + meth_kwargs = {"brightness": 0.2, "contrast": 0.2, "saturation": 0.2, "hue": 0.2} + _test_class_op( + T.ColorJitter, + meth_kwargs=meth_kwargs, + test_exact_match=False, + device=device, + tol=12.1, + agg_method="max", + channels=channels, + ) + + +@pytest.mark.parametrize("device", cpu_and_cuda()) +@pytest.mark.parametrize("m", ["constant", "edge", "reflect", "symmetric"]) +@pytest.mark.parametrize("mul", [1, -1]) +def test_pad(m, mul, device): + fill = 127 if m == "constant" else 0 + + # Test functional.pad (PIL and Tensor) with padding as single int + _test_functional_op(F.pad, fn_kwargs={"padding": mul * 2, "fill": fill, "padding_mode": m}, device=device) + # Test functional.pad and transforms.Pad with padding as [int, ] + fn_kwargs = meth_kwargs = { + "padding": [mul * 2], + "fill": fill, + "padding_mode": m, + } + _test_op(F.pad, T.Pad, device=device, fn_kwargs=fn_kwargs, meth_kwargs=meth_kwargs) + # Test functional.pad and transforms.Pad with padding as list + fn_kwargs = meth_kwargs = {"padding": [mul * 4, 4], "fill": fill, "padding_mode": m} + _test_op(F.pad, T.Pad, device=device, fn_kwargs=fn_kwargs, meth_kwargs=meth_kwargs) + # Test functional.pad and transforms.Pad with padding as tuple + fn_kwargs = meth_kwargs = {"padding": (mul * 2, 2, 2, mul * 2), "fill": fill, "padding_mode": m} + _test_op(F.pad, T.Pad, device=device, fn_kwargs=fn_kwargs, meth_kwargs=meth_kwargs) + + +@pytest.mark.parametrize("device", cpu_and_cuda()) +def test_crop(device): + fn_kwargs = {"top": 2, "left": 3, "height": 4, "width": 5} + # Test transforms.RandomCrop with size and padding as tuple + meth_kwargs = { + "size": (4, 5), + "padding": (4, 4), + "pad_if_needed": True, + } + _test_op(F.crop, T.RandomCrop, device=device, fn_kwargs=fn_kwargs, meth_kwargs=meth_kwargs) + + # Test transforms.functional.crop including outside the image area + fn_kwargs = {"top": -2, "left": 3, "height": 4, "width": 5} # top + _test_functional_op(F.crop, fn_kwargs=fn_kwargs, device=device) + + fn_kwargs = {"top": 1, "left": -3, "height": 4, "width": 5} # left + _test_functional_op(F.crop, fn_kwargs=fn_kwargs, device=device) + + fn_kwargs = {"top": 7, "left": 3, "height": 4, "width": 5} # bottom + _test_functional_op(F.crop, fn_kwargs=fn_kwargs, device=device) + + fn_kwargs = {"top": 3, "left": 8, "height": 4, "width": 5} # right + _test_functional_op(F.crop, fn_kwargs=fn_kwargs, device=device) + + fn_kwargs = {"top": -3, "left": -3, "height": 15, "width": 15} # all + _test_functional_op(F.crop, fn_kwargs=fn_kwargs, device=device) + + +@pytest.mark.parametrize("device", cpu_and_cuda()) +@pytest.mark.parametrize( + "padding_config", + [ + {"padding_mode": "constant", "fill": 0}, + {"padding_mode": "constant", "fill": 10}, + {"padding_mode": "edge"}, + {"padding_mode": "reflect"}, + ], +) +@pytest.mark.parametrize("pad_if_needed", [True, False]) +@pytest.mark.parametrize("padding", [[5], [5, 4], [1, 2, 3, 4]]) +@pytest.mark.parametrize("size", [5, [5], [6, 6]]) +def test_random_crop(size, padding, pad_if_needed, padding_config, device): + config = dict(padding_config) + config["size"] = size + config["padding"] = padding + config["pad_if_needed"] = pad_if_needed + _test_class_op(T.RandomCrop, device, meth_kwargs=config) + + +def test_random_crop_save_load(tmpdir): + fn = T.RandomCrop(32, [4], pad_if_needed=True) + _test_fn_save_load(fn, tmpdir) + + +@pytest.mark.parametrize("device", cpu_and_cuda()) +def test_center_crop(device, tmpdir): + fn_kwargs = {"output_size": (4, 5)} + meth_kwargs = {"size": (4, 5)} + _test_op(F.center_crop, T.CenterCrop, device=device, fn_kwargs=fn_kwargs, meth_kwargs=meth_kwargs) + fn_kwargs = {"output_size": (5,)} + meth_kwargs = {"size": (5,)} + _test_op(F.center_crop, T.CenterCrop, device=device, fn_kwargs=fn_kwargs, meth_kwargs=meth_kwargs) + tensor = torch.randint(0, 256, (3, 10, 10), dtype=torch.uint8, device=device) + # Test torchscript of transforms.CenterCrop with size as int + f = T.CenterCrop(size=5) + scripted_fn = torch.jit.script(f) + scripted_fn(tensor) + + # Test torchscript of transforms.CenterCrop with size as [int, ] + f = T.CenterCrop(size=[5]) + scripted_fn = torch.jit.script(f) + scripted_fn(tensor) + + # Test torchscript of transforms.CenterCrop with size as tuple + f = T.CenterCrop(size=(6, 6)) + scripted_fn = torch.jit.script(f) + scripted_fn(tensor) + + +def test_center_crop_save_load(tmpdir): + fn = T.CenterCrop(size=[5]) + _test_fn_save_load(fn, tmpdir) + + +@pytest.mark.parametrize("device", cpu_and_cuda()) +@pytest.mark.parametrize( + "fn, method, out_length", + [ + # test_five_crop + (F.five_crop, T.FiveCrop, 5), + # test_ten_crop + (F.ten_crop, T.TenCrop, 10), + ], +) +@pytest.mark.parametrize("size", [(5,), [5], (4, 5), [4, 5]]) +def test_x_crop(fn, method, out_length, size, device): + meth_kwargs = fn_kwargs = {"size": size} + scripted_fn = torch.jit.script(fn) + + tensor, pil_img = _create_data(height=20, width=20, device=device) + transformed_t_list = fn(tensor, **fn_kwargs) + transformed_p_list = fn(pil_img, **fn_kwargs) + assert len(transformed_t_list) == len(transformed_p_list) + assert len(transformed_t_list) == out_length + for transformed_tensor, transformed_pil_img in zip(transformed_t_list, transformed_p_list): + _assert_equal_tensor_to_pil(transformed_tensor, transformed_pil_img) + + transformed_t_list_script = scripted_fn(tensor.detach().clone(), **fn_kwargs) + assert len(transformed_t_list) == len(transformed_t_list_script) + assert len(transformed_t_list_script) == out_length + for transformed_tensor, transformed_tensor_script in zip(transformed_t_list, transformed_t_list_script): + assert_equal(transformed_tensor, transformed_tensor_script) + + # test for class interface + fn = method(**meth_kwargs) + scripted_fn = torch.jit.script(fn) + output = scripted_fn(tensor) + assert len(output) == len(transformed_t_list_script) + + # test on batch of tensors + batch_tensors = _create_data_batch(height=23, width=34, channels=3, num_samples=4, device=device) + torch.manual_seed(12) + transformed_batch_list = fn(batch_tensors) + + for i in range(len(batch_tensors)): + img_tensor = batch_tensors[i, ...] + torch.manual_seed(12) + transformed_img_list = fn(img_tensor) + for transformed_img, transformed_batch in zip(transformed_img_list, transformed_batch_list): + assert_equal(transformed_img, transformed_batch[i, ...]) + + +@pytest.mark.parametrize("method", ["FiveCrop", "TenCrop"]) +def test_x_crop_save_load(method, tmpdir): + fn = getattr(T, method)(size=[5]) + _test_fn_save_load(fn, tmpdir) + + +class TestResize: + @pytest.mark.parametrize("size", [32, 34, 35, 36, 38]) + def test_resize_int(self, size): + # TODO: Minimal check for bug-fix, improve this later + x = torch.rand(3, 32, 46) + t = T.Resize(size=size, antialias=True) + y = t(x) + # If size is an int, smaller edge of the image will be matched to this number. + # i.e, if height > width, then image will be rescaled to (size * height / width, size). + assert isinstance(y, torch.Tensor) + assert y.shape[1] == size + assert y.shape[2] == int(size * 46 / 32) + + @pytest.mark.parametrize("device", cpu_and_cuda()) + @pytest.mark.parametrize("dt", [None, torch.float32, torch.float64]) + @pytest.mark.parametrize("size", [[32], [32, 32], (32, 32), [34, 35]]) + @pytest.mark.parametrize("max_size", [None, 35, 1000]) + @pytest.mark.parametrize("interpolation", [BILINEAR, BICUBIC, NEAREST, NEAREST_EXACT]) + def test_resize_scripted(self, dt, size, max_size, interpolation, device): + tensor, _ = _create_data(height=34, width=36, device=device) + batch_tensors = torch.randint(0, 256, size=(4, 3, 44, 56), dtype=torch.uint8, device=device) + + if dt is not None: + # This is a trivial cast to float of uint8 data to test all cases + tensor = tensor.to(dt) + if max_size is not None and len(size) != 1: + pytest.skip("Size should be an int or a sequence of length 1 if max_size is specified") + + transform = T.Resize(size=size, interpolation=interpolation, max_size=max_size, antialias=True) + s_transform = torch.jit.script(transform) + _test_transform_vs_scripted(transform, s_transform, tensor) + _test_transform_vs_scripted_on_batch(transform, s_transform, batch_tensors) + + def test_resize_save_load(self, tmpdir): + fn = T.Resize(size=[32], antialias=True) + _test_fn_save_load(fn, tmpdir) + + @pytest.mark.parametrize("device", cpu_and_cuda()) + @pytest.mark.parametrize("scale", [(0.7, 1.2), [0.7, 1.2]]) + @pytest.mark.parametrize("ratio", [(0.75, 1.333), [0.75, 1.333]]) + @pytest.mark.parametrize("size", [(32,), [44], [32], [32, 32], (32, 32), [44, 55]]) + @pytest.mark.parametrize("interpolation", [NEAREST, BILINEAR, BICUBIC, NEAREST_EXACT]) + @pytest.mark.parametrize("antialias", [None, True, False]) + def test_resized_crop(self, scale, ratio, size, interpolation, antialias, device): + + if antialias and interpolation in {NEAREST, NEAREST_EXACT}: + pytest.skip(f"Can not resize if interpolation mode is {interpolation} and antialias=True") + + tensor = torch.randint(0, 256, size=(3, 44, 56), dtype=torch.uint8, device=device) + batch_tensors = torch.randint(0, 256, size=(4, 3, 44, 56), dtype=torch.uint8, device=device) + transform = T.RandomResizedCrop( + size=size, scale=scale, ratio=ratio, interpolation=interpolation, antialias=antialias + ) + s_transform = torch.jit.script(transform) + _test_transform_vs_scripted(transform, s_transform, tensor) + _test_transform_vs_scripted_on_batch(transform, s_transform, batch_tensors) + + def test_resized_crop_save_load(self, tmpdir): + fn = T.RandomResizedCrop(size=[32], antialias=True) + _test_fn_save_load(fn, tmpdir) + + +def _test_random_affine_helper(device, **kwargs): + tensor = torch.randint(0, 256, size=(3, 44, 56), dtype=torch.uint8, device=device) + batch_tensors = torch.randint(0, 256, size=(4, 3, 44, 56), dtype=torch.uint8, device=device) + transform = T.RandomAffine(**kwargs) + s_transform = torch.jit.script(transform) + + _test_transform_vs_scripted(transform, s_transform, tensor) + _test_transform_vs_scripted_on_batch(transform, s_transform, batch_tensors) + + +def test_random_affine_save_load(tmpdir): + fn = T.RandomAffine(degrees=45.0) + _test_fn_save_load(fn, tmpdir) + + +@pytest.mark.parametrize("device", cpu_and_cuda()) +@pytest.mark.parametrize("interpolation", [NEAREST, BILINEAR]) +@pytest.mark.parametrize("shear", [15, 10.0, (5.0, 10.0), [-15, 15], [-10.0, 10.0, -11.0, 11.0]]) +def test_random_affine_shear(device, interpolation, shear): + _test_random_affine_helper(device, degrees=0.0, interpolation=interpolation, shear=shear) + + +@pytest.mark.parametrize("device", cpu_and_cuda()) +@pytest.mark.parametrize("interpolation", [NEAREST, BILINEAR]) +@pytest.mark.parametrize("scale", [(0.7, 1.2), [0.7, 1.2]]) +def test_random_affine_scale(device, interpolation, scale): + _test_random_affine_helper(device, degrees=0.0, interpolation=interpolation, scale=scale) + + +@pytest.mark.parametrize("device", cpu_and_cuda()) +@pytest.mark.parametrize("interpolation", [NEAREST, BILINEAR]) +@pytest.mark.parametrize("translate", [(0.1, 0.2), [0.2, 0.1]]) +def test_random_affine_translate(device, interpolation, translate): + _test_random_affine_helper(device, degrees=0.0, interpolation=interpolation, translate=translate) + + +@pytest.mark.parametrize("device", cpu_and_cuda()) +@pytest.mark.parametrize("interpolation", [NEAREST, BILINEAR]) +@pytest.mark.parametrize("degrees", [45, 35.0, (-45, 45), [-90.0, 90.0]]) +def test_random_affine_degrees(device, interpolation, degrees): + _test_random_affine_helper(device, degrees=degrees, interpolation=interpolation) + + +@pytest.mark.parametrize("device", cpu_and_cuda()) +@pytest.mark.parametrize("interpolation", [NEAREST, BILINEAR]) +@pytest.mark.parametrize("fill", [85, (10, -10, 10), 0.7, [0.0, 0.0, 0.0], [1], 1]) +def test_random_affine_fill(device, interpolation, fill): + _test_random_affine_helper(device, degrees=0.0, interpolation=interpolation, fill=fill) + + +@pytest.mark.parametrize("device", cpu_and_cuda()) +@pytest.mark.parametrize("center", [(0, 0), [10, 10], None, (56, 44)]) +@pytest.mark.parametrize("expand", [True, False]) +@pytest.mark.parametrize("degrees", [45, 35.0, (-45, 45), [-90.0, 90.0]]) +@pytest.mark.parametrize("interpolation", [NEAREST, BILINEAR]) +@pytest.mark.parametrize("fill", [85, (10, -10, 10), 0.7, [0.0, 0.0, 0.0], [1], 1]) +def test_random_rotate(device, center, expand, degrees, interpolation, fill): + tensor = torch.randint(0, 256, size=(3, 44, 56), dtype=torch.uint8, device=device) + batch_tensors = torch.randint(0, 256, size=(4, 3, 44, 56), dtype=torch.uint8, device=device) + + transform = T.RandomRotation(degrees=degrees, interpolation=interpolation, expand=expand, center=center, fill=fill) + s_transform = torch.jit.script(transform) + + _test_transform_vs_scripted(transform, s_transform, tensor) + _test_transform_vs_scripted_on_batch(transform, s_transform, batch_tensors) + + +def test_random_rotate_save_load(tmpdir): + fn = T.RandomRotation(degrees=45.0) + _test_fn_save_load(fn, tmpdir) + + +@pytest.mark.parametrize("device", cpu_and_cuda()) +@pytest.mark.parametrize("distortion_scale", np.linspace(0.1, 1.0, num=20)) +@pytest.mark.parametrize("interpolation", [NEAREST, BILINEAR]) +@pytest.mark.parametrize("fill", [85, (10, -10, 10), 0.7, [0.0, 0.0, 0.0], [1], 1]) +def test_random_perspective(device, distortion_scale, interpolation, fill): + tensor = torch.randint(0, 256, size=(3, 44, 56), dtype=torch.uint8, device=device) + batch_tensors = torch.randint(0, 256, size=(4, 3, 44, 56), dtype=torch.uint8, device=device) + + transform = T.RandomPerspective(distortion_scale=distortion_scale, interpolation=interpolation, fill=fill) + s_transform = torch.jit.script(transform) + + _test_transform_vs_scripted(transform, s_transform, tensor) + _test_transform_vs_scripted_on_batch(transform, s_transform, batch_tensors) + + +def test_random_perspective_save_load(tmpdir): + fn = T.RandomPerspective() + _test_fn_save_load(fn, tmpdir) + + +@pytest.mark.parametrize("device", cpu_and_cuda()) +@pytest.mark.parametrize( + "Klass, meth_kwargs", + [(T.Grayscale, {"num_output_channels": 1}), (T.Grayscale, {"num_output_channels": 3}), (T.RandomGrayscale, {})], +) +def test_to_grayscale(device, Klass, meth_kwargs): + tol = 1.0 + 1e-10 + _test_class_op(Klass, meth_kwargs=meth_kwargs, test_exact_match=False, device=device, tol=tol, agg_method="max") + + +@pytest.mark.parametrize("device", cpu_and_cuda()) +@pytest.mark.parametrize("in_dtype", int_dtypes() + float_dtypes()) +@pytest.mark.parametrize("out_dtype", int_dtypes() + float_dtypes()) +def test_convert_image_dtype(device, in_dtype, out_dtype): + tensor, _ = _create_data(26, 34, device=device) + batch_tensors = torch.rand(4, 3, 44, 56, device=device) + + in_tensor = tensor.to(in_dtype) + in_batch_tensors = batch_tensors.to(in_dtype) + + fn = T.ConvertImageDtype(dtype=out_dtype) + scripted_fn = torch.jit.script(fn) + + if (in_dtype == torch.float32 and out_dtype in (torch.int32, torch.int64)) or ( + in_dtype == torch.float64 and out_dtype == torch.int64 + ): + with pytest.raises(RuntimeError, match=r"cannot be performed safely"): + _test_transform_vs_scripted(fn, scripted_fn, in_tensor) + with pytest.raises(RuntimeError, match=r"cannot be performed safely"): + _test_transform_vs_scripted_on_batch(fn, scripted_fn, in_batch_tensors) + return + + _test_transform_vs_scripted(fn, scripted_fn, in_tensor) + _test_transform_vs_scripted_on_batch(fn, scripted_fn, in_batch_tensors) + + +def test_convert_image_dtype_save_load(tmpdir): + fn = T.ConvertImageDtype(dtype=torch.uint8) + _test_fn_save_load(fn, tmpdir) + + +@pytest.mark.parametrize("device", cpu_and_cuda()) +@pytest.mark.parametrize("policy", [policy for policy in T.AutoAugmentPolicy]) +@pytest.mark.parametrize("fill", [None, 85, (10, -10, 10), 0.7, [0.0, 0.0, 0.0], [1], 1]) +def test_autoaugment(device, policy, fill): + tensor = torch.randint(0, 256, size=(3, 44, 56), dtype=torch.uint8, device=device) + batch_tensors = torch.randint(0, 256, size=(4, 3, 44, 56), dtype=torch.uint8, device=device) + + transform = T.AutoAugment(policy=policy, fill=fill) + s_transform = torch.jit.script(transform) + for _ in range(25): + _test_transform_vs_scripted(transform, s_transform, tensor) + _test_transform_vs_scripted_on_batch(transform, s_transform, batch_tensors) + + +@pytest.mark.parametrize("device", cpu_and_cuda()) +@pytest.mark.parametrize("num_ops", [1, 2, 3]) +@pytest.mark.parametrize("magnitude", [7, 9, 11]) +@pytest.mark.parametrize("fill", [None, 85, (10, -10, 10), 0.7, [0.0, 0.0, 0.0], [1], 1]) +def test_randaugment(device, num_ops, magnitude, fill): + tensor = torch.randint(0, 256, size=(3, 44, 56), dtype=torch.uint8, device=device) + batch_tensors = torch.randint(0, 256, size=(4, 3, 44, 56), dtype=torch.uint8, device=device) + + transform = T.RandAugment(num_ops=num_ops, magnitude=magnitude, fill=fill) + s_transform = torch.jit.script(transform) + for _ in range(25): + _test_transform_vs_scripted(transform, s_transform, tensor) + _test_transform_vs_scripted_on_batch(transform, s_transform, batch_tensors) + + +@pytest.mark.parametrize("device", cpu_and_cuda()) +@pytest.mark.parametrize("fill", [None, 85, (10, -10, 10), 0.7, [0.0, 0.0, 0.0], [1], 1]) +def test_trivialaugmentwide(device, fill): + tensor = torch.randint(0, 256, size=(3, 44, 56), dtype=torch.uint8, device=device) + batch_tensors = torch.randint(0, 256, size=(4, 3, 44, 56), dtype=torch.uint8, device=device) + + transform = T.TrivialAugmentWide(fill=fill) + s_transform = torch.jit.script(transform) + for _ in range(25): + _test_transform_vs_scripted(transform, s_transform, tensor) + _test_transform_vs_scripted_on_batch(transform, s_transform, batch_tensors) + + +@pytest.mark.parametrize("device", cpu_and_cuda()) +@pytest.mark.parametrize("fill", [None, 85, (10, -10, 10), 0.7, [0.0, 0.0, 0.0], [1], 1]) +def test_augmix(device, fill): + tensor = torch.randint(0, 256, size=(3, 44, 56), dtype=torch.uint8, device=device) + batch_tensors = torch.randint(0, 256, size=(4, 3, 44, 56), dtype=torch.uint8, device=device) + + class DeterministicAugMix(T.AugMix): + def _sample_dirichlet(self, params: torch.Tensor) -> torch.Tensor: + # patch the method to ensure that the order of rand calls doesn't affect the outcome + return params.softmax(dim=-1) + + transform = DeterministicAugMix(fill=fill) + s_transform = torch.jit.script(transform) + for _ in range(25): + _test_transform_vs_scripted(transform, s_transform, tensor) + _test_transform_vs_scripted_on_batch(transform, s_transform, batch_tensors) + + +@pytest.mark.parametrize("augmentation", [T.AutoAugment, T.RandAugment, T.TrivialAugmentWide, T.AugMix]) +def test_autoaugment_save_load(augmentation, tmpdir): + fn = augmentation() + _test_fn_save_load(fn, tmpdir) + + +@pytest.mark.parametrize("interpolation", [F.InterpolationMode.NEAREST, F.InterpolationMode.BILINEAR]) +@pytest.mark.parametrize("mode", ["X", "Y"]) +def test_autoaugment__op_apply_shear(interpolation, mode): + # We check that torchvision's implementation of shear is equivalent + # to official CIFAR10 autoaugment implementation: + # https://github.com/tensorflow/models/blob/885fda091c46c59d6c7bb5c7e760935eacc229da/research/autoaugment/augmentation_transforms.py#L273-L290 + image_size = 32 + + def shear(pil_img, level, mode, resample): + if mode == "X": + matrix = (1, level, 0, 0, 1, 0) + elif mode == "Y": + matrix = (1, 0, 0, level, 1, 0) + return pil_img.transform((image_size, image_size), PIL.Image.AFFINE, matrix, resample=resample) + + t_img, pil_img = _create_data(image_size, image_size) + + resample_pil = { + F.InterpolationMode.NEAREST: PIL.Image.NEAREST, + F.InterpolationMode.BILINEAR: PIL.Image.BILINEAR, + }[interpolation] + + level = 0.3 + expected_out = shear(pil_img, level, mode=mode, resample=resample_pil) + + # Check pil output vs expected pil + out = _apply_op(pil_img, op_name=f"Shear{mode}", magnitude=level, interpolation=interpolation, fill=0) + assert out == expected_out + + if interpolation == F.InterpolationMode.BILINEAR: + # We skip bilinear mode for tensors as + # affine transformation results are not exactly the same + # between tensors and pil images + # MAE as around 1.40 + # Max Abs error can be 163 or 170 + return + + # Check tensor output vs expected pil + out = _apply_op(t_img, op_name=f"Shear{mode}", magnitude=level, interpolation=interpolation, fill=0) + _assert_approx_equal_tensor_to_pil(out, expected_out) + + +@pytest.mark.parametrize("device", cpu_and_cuda()) +@pytest.mark.parametrize( + "config", + [ + {}, + {"value": 1}, + {"value": 0.2}, + {"value": "random"}, + {"value": (1, 1, 1)}, + {"value": (0.2, 0.2, 0.2)}, + {"value": [1, 1, 1]}, + {"value": [0.2, 0.2, 0.2]}, + {"value": "random", "ratio": (0.1, 0.2)}, + ], +) +def test_random_erasing(device, config): + tensor, _ = _create_data(24, 32, channels=3, device=device) + batch_tensors = torch.rand(4, 3, 44, 56, device=device) + + fn = T.RandomErasing(**config) + scripted_fn = torch.jit.script(fn) + _test_transform_vs_scripted(fn, scripted_fn, tensor) + _test_transform_vs_scripted_on_batch(fn, scripted_fn, batch_tensors) + + +def test_random_erasing_save_load(tmpdir): + fn = T.RandomErasing(value=0.2) + _test_fn_save_load(fn, tmpdir) + + +def test_random_erasing_with_invalid_data(): + img = torch.rand(3, 60, 60) + # Test Set 0: invalid value + random_erasing = T.RandomErasing(value=(0.1, 0.2, 0.3, 0.4), p=1.0) + with pytest.raises(ValueError, match="If value is a sequence, it should have either a single value or 3"): + random_erasing(img) + + +@pytest.mark.parametrize("device", cpu_and_cuda()) +def test_normalize(device, tmpdir): + fn = T.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5)) + tensor, _ = _create_data(26, 34, device=device) + + with pytest.raises(TypeError, match="Input tensor should be a float tensor"): + fn(tensor) + + batch_tensors = torch.rand(4, 3, 44, 56, device=device) + tensor = tensor.to(dtype=torch.float32) / 255.0 + # test for class interface + scripted_fn = torch.jit.script(fn) + + _test_transform_vs_scripted(fn, scripted_fn, tensor) + _test_transform_vs_scripted_on_batch(fn, scripted_fn, batch_tensors) + + scripted_fn.save(os.path.join(tmpdir, "t_norm.pt")) + + +@pytest.mark.parametrize("device", cpu_and_cuda()) +def test_linear_transformation(device, tmpdir): + c, h, w = 3, 24, 32 + + tensor, _ = _create_data(h, w, channels=c, device=device) + + matrix = torch.rand(c * h * w, c * h * w, device=device) + mean_vector = torch.rand(c * h * w, device=device) + + fn = T.LinearTransformation(matrix, mean_vector) + scripted_fn = torch.jit.script(fn) + + _test_transform_vs_scripted(fn, scripted_fn, tensor) + + batch_tensors = torch.rand(4, c, h, w, device=device) + # We skip some tests from _test_transform_vs_scripted_on_batch as + # results for scripted and non-scripted transformations are not exactly the same + torch.manual_seed(12) + transformed_batch = fn(batch_tensors) + torch.manual_seed(12) + s_transformed_batch = scripted_fn(batch_tensors) + assert_equal(transformed_batch, s_transformed_batch) + + scripted_fn.save(os.path.join(tmpdir, "t_norm.pt")) + + +@pytest.mark.parametrize("device", cpu_and_cuda()) +def test_compose(device): + tensor, _ = _create_data(26, 34, device=device) + tensor = tensor.to(dtype=torch.float32) / 255.0 + transforms = T.Compose( + [ + T.CenterCrop(10), + T.Normalize((0.485, 0.456, 0.406), (0.229, 0.224, 0.225)), + ] + ) + s_transforms = torch.nn.Sequential(*transforms.transforms) + + scripted_fn = torch.jit.script(s_transforms) + torch.manual_seed(12) + transformed_tensor = transforms(tensor) + torch.manual_seed(12) + transformed_tensor_script = scripted_fn(tensor) + assert_equal(transformed_tensor, transformed_tensor_script, msg=f"{transforms}") + + t = T.Compose( + [ + lambda x: x, + ] + ) + with pytest.raises(RuntimeError, match="cannot call a value of type 'Tensor'"): + torch.jit.script(t) + + +@pytest.mark.parametrize("device", cpu_and_cuda()) +def test_random_apply(device): + tensor, _ = _create_data(26, 34, device=device) + tensor = tensor.to(dtype=torch.float32) / 255.0 + + transforms = T.RandomApply( + [ + T.RandomHorizontalFlip(), + T.ColorJitter(), + ], + p=0.4, + ) + s_transforms = T.RandomApply( + torch.nn.ModuleList( + [ + T.RandomHorizontalFlip(), + T.ColorJitter(), + ] + ), + p=0.4, + ) + + scripted_fn = torch.jit.script(s_transforms) + torch.manual_seed(12) + transformed_tensor = transforms(tensor) + torch.manual_seed(12) + transformed_tensor_script = scripted_fn(tensor) + assert_equal(transformed_tensor, transformed_tensor_script, msg=f"{transforms}") + + if device == "cpu": + # Can't check this twice, otherwise + # "Can't redefine method: forward on class: __torch__.torchvision.transforms.transforms.RandomApply" + transforms = T.RandomApply( + [ + T.ColorJitter(), + ], + p=0.3, + ) + with pytest.raises(RuntimeError, match="Module 'RandomApply' has no attribute 'transforms'"): + torch.jit.script(transforms) + + +@pytest.mark.parametrize("device", cpu_and_cuda()) +@pytest.mark.parametrize( + "meth_kwargs", + [ + {"kernel_size": 3, "sigma": 0.75}, + {"kernel_size": 23, "sigma": [0.1, 2.0]}, + {"kernel_size": 23, "sigma": (0.1, 2.0)}, + {"kernel_size": [3, 3], "sigma": (1.0, 1.0)}, + {"kernel_size": (3, 3), "sigma": (0.1, 2.0)}, + {"kernel_size": [23], "sigma": 0.75}, + ], +) +@pytest.mark.parametrize("channels", [1, 3]) +def test_gaussian_blur(device, channels, meth_kwargs): + if all( + [ + device == "cuda", + channels == 1, + meth_kwargs["kernel_size"] in [23, [23]], + torch.version.cuda == "11.3", + sys.platform in ("win32", "cygwin"), + ] + ): + pytest.skip("Fails on Windows, see https://github.com/pytorch/vision/issues/5464") + + tol = 1.0 + 1e-10 + torch.manual_seed(12) + _test_class_op( + T.GaussianBlur, + meth_kwargs=meth_kwargs, + channels=channels, + test_exact_match=False, + device=device, + agg_method="max", + tol=tol, + ) + + +@pytest.mark.parametrize("device", cpu_and_cuda()) +@pytest.mark.parametrize( + "fill", + [ + 1, + 1.0, + [1], + [1.0], + (1,), + (1.0,), + [1, 2, 3], + [1.0, 2.0, 3.0], + (1, 2, 3), + (1.0, 2.0, 3.0), + ], +) +@pytest.mark.parametrize("channels", [1, 3]) +def test_elastic_transform(device, channels, fill): + if isinstance(fill, (list, tuple)) and len(fill) > 1 and channels == 1: + # For this the test would correctly fail, since the number of channels in the image does not match `fill`. + # Thus, this is not an issue in the transform, but rather a problem of parametrization that just gives the + # product of `fill` and `channels`. + return + + _test_class_op( + T.ElasticTransform, + meth_kwargs=dict(fill=fill), + channels=channels, + device=device, + ) diff --git a/test/test_transforms_v2.py b/test/test_transforms_v2.py new file mode 100644 index 00000000000..fb49525ecfe --- /dev/null +++ b/test/test_transforms_v2.py @@ -0,0 +1,6218 @@ +import contextlib +import decimal +import functools +import inspect +import itertools +import math +import pickle +import random +import re +import sys +from copy import deepcopy +from pathlib import Path +from unittest import mock + +import numpy as np +import PIL.Image +import pytest + +import torch +import torchvision.ops +import torchvision.transforms.v2 as transforms + +from common_utils import ( + assert_equal, + cache, + cpu_and_cuda, + freeze_rng_state, + ignore_jit_no_profile_information_warning, + make_bounding_boxes, + make_detection_masks, + make_image, + make_image_pil, + make_image_tensor, + make_segmentation_mask, + make_video, + make_video_tensor, + needs_cuda, + set_rng_seed, +) + +from torch import nn +from torch.testing import assert_close +from torch.utils._pytree import tree_flatten, tree_map +from torch.utils.data import DataLoader, default_collate +from torchvision import tv_tensors +from torchvision.ops.boxes import box_iou + +from torchvision.transforms._functional_tensor import _max_value as get_max_value +from torchvision.transforms.functional import pil_modes_mapping, to_pil_image +from torchvision.transforms.v2 import functional as F +from torchvision.transforms.v2._utils import check_type, is_pure_tensor +from torchvision.transforms.v2.functional._geometry import _get_perspective_coeffs +from torchvision.transforms.v2.functional._utils import _get_kernel, _register_kernel_internal + + +# turns all warnings into errors for this module +pytestmark = [pytest.mark.filterwarnings("error")] + +if sys.version_info[:2] >= (3, 12): + # torchscript relies on some AST stuff that got deprecated in 3.12, + # so we have to explicitly ignore those otherwise we'd error on warnings due to the pytestmark filter above. + pytestmark.append(pytest.mark.filterwarnings("ignore::DeprecationWarning")) + + +@pytest.fixture(autouse=True) +def fix_rng_seed(): + set_rng_seed(0) + yield + + +def _to_tolerances(maybe_tolerance_dict): + if not isinstance(maybe_tolerance_dict, dict): + return dict(rtol=None, atol=None) + + tolerances = dict(rtol=0, atol=0) + tolerances.update(maybe_tolerance_dict) + return tolerances + + +def _check_kernel_cuda_vs_cpu(kernel, input, *args, rtol, atol, **kwargs): + """Checks if the kernel produces closes results for inputs on GPU and CPU.""" + if input.device.type != "cuda": + return + + input_cuda = input.as_subclass(torch.Tensor) + input_cpu = input_cuda.to("cpu") + + with freeze_rng_state(): + actual = kernel(input_cuda, *args, **kwargs) + with freeze_rng_state(): + expected = kernel(input_cpu, *args, **kwargs) + + assert_close(actual, expected, check_device=False, rtol=rtol, atol=atol) + + +@cache +def _script(obj): + try: + return torch.jit.script(obj) + except Exception as error: + name = getattr(obj, "__name__", obj.__class__.__name__) + raise AssertionError(f"Trying to `torch.jit.script` `{name}` raised the error above.") from error + + +def _check_kernel_scripted_vs_eager(kernel, input, *args, rtol, atol, **kwargs): + """Checks if the kernel is scriptable and if the scripted output is close to the eager one.""" + if input.device.type != "cpu": + return + + kernel_scripted = _script(kernel) + + input = input.as_subclass(torch.Tensor) + with ignore_jit_no_profile_information_warning(): + with freeze_rng_state(): + actual = kernel_scripted(input, *args, **kwargs) + with freeze_rng_state(): + expected = kernel(input, *args, **kwargs) + + assert_close(actual, expected, rtol=rtol, atol=atol) + + +def _check_kernel_batched_vs_unbatched(kernel, input, *args, rtol, atol, **kwargs): + """Checks if the kernel produces close results for batched and unbatched inputs.""" + unbatched_input = input.as_subclass(torch.Tensor) + + for batch_dims in [(2,), (2, 1)]: + repeats = [*batch_dims, *[1] * input.ndim] + + actual = kernel(unbatched_input.repeat(repeats), *args, **kwargs) + + expected = kernel(unbatched_input, *args, **kwargs) + # We can't directly call `.repeat()` on the output, since some kernel also return some additional metadata + if isinstance(expected, torch.Tensor): + expected = expected.repeat(repeats) + else: + tensor, *metadata = expected + expected = (tensor.repeat(repeats), *metadata) + + assert_close(actual, expected, rtol=rtol, atol=atol) + + for degenerate_batch_dims in [(0,), (5, 0), (0, 5)]: + degenerate_batched_input = torch.empty( + degenerate_batch_dims + input.shape, dtype=input.dtype, device=input.device + ) + + output = kernel(degenerate_batched_input, *args, **kwargs) + # Most kernels just return a tensor, but some also return some additional metadata + if not isinstance(output, torch.Tensor): + output, *_ = output + + assert output.shape[: -input.ndim] == degenerate_batch_dims + + +def check_kernel( + kernel, + input, + *args, + check_cuda_vs_cpu=True, + check_scripted_vs_eager=True, + check_batched_vs_unbatched=True, + **kwargs, +): + initial_input_version = input._version + + output = kernel(input.as_subclass(torch.Tensor), *args, **kwargs) + # Most kernels just return a tensor, but some also return some additional metadata + if not isinstance(output, torch.Tensor): + output, *_ = output + + # check that no inplace operation happened + assert input._version == initial_input_version + + if kernel not in {F.to_dtype_image, F.to_dtype_video}: + assert output.dtype == input.dtype + assert output.device == input.device + + if check_cuda_vs_cpu: + _check_kernel_cuda_vs_cpu(kernel, input, *args, **kwargs, **_to_tolerances(check_cuda_vs_cpu)) + + if check_scripted_vs_eager: + _check_kernel_scripted_vs_eager(kernel, input, *args, **kwargs, **_to_tolerances(check_scripted_vs_eager)) + + if check_batched_vs_unbatched: + _check_kernel_batched_vs_unbatched(kernel, input, *args, **kwargs, **_to_tolerances(check_batched_vs_unbatched)) + + +def _check_functional_scripted_smoke(functional, input, *args, **kwargs): + """Checks if the functional can be scripted and the scripted version can be called without error.""" + if not isinstance(input, tv_tensors.Image): + return + + functional_scripted = _script(functional) + with ignore_jit_no_profile_information_warning(): + functional_scripted(input.as_subclass(torch.Tensor), *args, **kwargs) + + +def check_functional(functional, input, *args, check_scripted_smoke=True, **kwargs): + unknown_input = object() + with pytest.raises(TypeError, match=re.escape(str(type(unknown_input)))): + functional(unknown_input, *args, **kwargs) + + with mock.patch("torch._C._log_api_usage_once", wraps=torch._C._log_api_usage_once) as spy: + output = functional(input, *args, **kwargs) + + spy.assert_any_call(f"{functional.__module__}.{functional.__name__}") + + assert isinstance(output, type(input)) + + if isinstance(input, tv_tensors.BoundingBoxes) and functional is not F.convert_bounding_box_format: + assert output.format == input.format + + if check_scripted_smoke: + _check_functional_scripted_smoke(functional, input, *args, **kwargs) + + +def check_functional_kernel_signature_match(functional, *, kernel, input_type): + """Checks if the signature of the functional matches the kernel signature.""" + functional_params = list(inspect.signature(functional).parameters.values())[1:] + kernel_params = list(inspect.signature(kernel).parameters.values())[1:] + + if issubclass(input_type, tv_tensors.TVTensor): + # We filter out metadata that is implicitly passed to the functional through the input tv_tensor, but has to be + # explicitly passed to the kernel. + explicit_metadata = { + tv_tensors.BoundingBoxes: {"format", "canvas_size"}, + } + kernel_params = [param for param in kernel_params if param.name not in explicit_metadata.get(input_type, set())] + + functional_params = iter(functional_params) + for functional_param, kernel_param in zip(functional_params, kernel_params): + try: + # In general, the functional parameters are a superset of the kernel parameters. Thus, we filter out + # functional parameters that have no kernel equivalent while keeping the order intact. + while functional_param.name != kernel_param.name: + functional_param = next(functional_params) + except StopIteration: + raise AssertionError( + f"Parameter `{kernel_param.name}` of kernel `{kernel.__name__}` " + f"has no corresponding parameter on the functional `{functional.__name__}`." + ) from None + + if issubclass(input_type, PIL.Image.Image): + # PIL kernels often have more correct annotations, since they are not limited by JIT. Thus, we don't check + # them in the first place. + functional_param._annotation = kernel_param._annotation = inspect.Parameter.empty + + assert functional_param == kernel_param + + +def _check_transform_v1_compatibility(transform, input, *, rtol, atol): + """If the transform defines the ``_v1_transform_cls`` attribute, checks if the transform has a public, static + ``get_params`` method that is the v1 equivalent, the output is close to v1, is scriptable, and the scripted version + can be called without error.""" + if not (type(input) is torch.Tensor or isinstance(input, PIL.Image.Image)): + return + + v1_transform_cls = transform._v1_transform_cls + if v1_transform_cls is None: + return + + if hasattr(v1_transform_cls, "get_params"): + assert type(transform).get_params is v1_transform_cls.get_params + + v1_transform = v1_transform_cls(**transform._extract_params_for_v1_transform()) + + with freeze_rng_state(): + output_v2 = transform(input) + + with freeze_rng_state(): + output_v1 = v1_transform(input) + + assert_close(F.to_image(output_v2), F.to_image(output_v1), rtol=rtol, atol=atol) + + if isinstance(input, PIL.Image.Image): + return + + _script(v1_transform)(input) + + +def _make_transform_sample(transform, *, image_or_video, adapter): + device = image_or_video.device if isinstance(image_or_video, torch.Tensor) else "cpu" + size = F.get_size(image_or_video) + input = dict( + image_or_video=image_or_video, + image_tv_tensor=make_image(size, device=device), + video_tv_tensor=make_video(size, device=device), + image_pil=make_image_pil(size), + bounding_boxes_xyxy=make_bounding_boxes(size, format=tv_tensors.BoundingBoxFormat.XYXY, device=device), + bounding_boxes_xywh=make_bounding_boxes(size, format=tv_tensors.BoundingBoxFormat.XYWH, device=device), + bounding_boxes_cxcywh=make_bounding_boxes(size, format=tv_tensors.BoundingBoxFormat.CXCYWH, device=device), + bounding_boxes_degenerate_xyxy=tv_tensors.BoundingBoxes( + [ + [0, 0, 0, 0], # no height or width + [0, 0, 0, 1], # no height + [0, 0, 1, 0], # no width + [2, 0, 1, 1], # x1 > x2, y1 < y2 + [0, 2, 1, 1], # x1 < x2, y1 > y2 + [2, 2, 1, 1], # x1 > x2, y1 > y2 + ], + format=tv_tensors.BoundingBoxFormat.XYXY, + canvas_size=size, + device=device, + ), + bounding_boxes_degenerate_xywh=tv_tensors.BoundingBoxes( + [ + [0, 0, 0, 0], # no height or width + [0, 0, 0, 1], # no height + [0, 0, 1, 0], # no width + [0, 0, 1, -1], # negative height + [0, 0, -1, 1], # negative width + [0, 0, -1, -1], # negative height and width + ], + format=tv_tensors.BoundingBoxFormat.XYWH, + canvas_size=size, + device=device, + ), + bounding_boxes_degenerate_cxcywh=tv_tensors.BoundingBoxes( + [ + [0, 0, 0, 0], # no height or width + [0, 0, 0, 1], # no height + [0, 0, 1, 0], # no width + [0, 0, 1, -1], # negative height + [0, 0, -1, 1], # negative width + [0, 0, -1, -1], # negative height and width + ], + format=tv_tensors.BoundingBoxFormat.CXCYWH, + canvas_size=size, + device=device, + ), + detection_mask=make_detection_masks(size, device=device), + segmentation_mask=make_segmentation_mask(size, device=device), + int=0, + float=0.0, + bool=True, + none=None, + str="str", + path=Path.cwd(), + object=object(), + tensor=torch.empty(5), + array=np.empty(5), + ) + if adapter is not None: + input = adapter(transform, input, device) + return input + + +def _check_transform_sample_input_smoke(transform, input, *, adapter): + # This is a bunch of input / output convention checks, using a big sample with different parts as input. + + if not check_type(input, (is_pure_tensor, PIL.Image.Image, tv_tensors.Image, tv_tensors.Video)): + return + + sample = _make_transform_sample( + # adapter might change transform inplace + transform=transform if adapter is None else deepcopy(transform), + image_or_video=input, + adapter=adapter, + ) + for container_type in [dict, list, tuple]: + if container_type is dict: + input = sample + else: + input = container_type(sample.values()) + + input_flat, input_spec = tree_flatten(input) + + with freeze_rng_state(): + torch.manual_seed(0) + output = transform(input) + output_flat, output_spec = tree_flatten(output) + + assert output_spec == input_spec + + for output_item, input_item, should_be_transformed in zip( + output_flat, input_flat, transforms.Transform()._needs_transform_list(input_flat) + ): + if should_be_transformed: + assert type(output_item) is type(input_item) + else: + assert output_item is input_item + + # Enforce that the transform does not turn a degenerate bounding box, e.g. marked by RandomIoUCrop (or any other + # future transform that does this), back into a valid one. + for degenerate_bounding_boxes in ( + bounding_box + for name, bounding_box in sample.items() + if "degenerate" in name and isinstance(bounding_box, tv_tensors.BoundingBoxes) + ): + sample = dict( + boxes=degenerate_bounding_boxes, + labels=torch.randint(10, (degenerate_bounding_boxes.shape[0],), device=degenerate_bounding_boxes.device), + ) + assert transforms.SanitizeBoundingBoxes()(sample)["boxes"].shape == (0, 4) + + +def check_transform(transform, input, check_v1_compatibility=True, check_sample_input=True): + pickle.loads(pickle.dumps(transform)) + + output = transform(input) + assert isinstance(output, type(input)) + + if isinstance(input, tv_tensors.BoundingBoxes) and not isinstance(transform, transforms.ConvertBoundingBoxFormat): + assert output.format == input.format + + if check_sample_input: + _check_transform_sample_input_smoke( + transform, input, adapter=check_sample_input if callable(check_sample_input) else None + ) + + if check_v1_compatibility: + _check_transform_v1_compatibility(transform, input, **_to_tolerances(check_v1_compatibility)) + + return output + + +def transform_cls_to_functional(transform_cls, **transform_specific_kwargs): + def wrapper(input, *args, **kwargs): + transform = transform_cls(*args, **transform_specific_kwargs, **kwargs) + return transform(input) + + wrapper.__name__ = transform_cls.__name__ + + return wrapper + + +def param_value_parametrization(**kwargs): + """Helper function to turn + + @pytest.mark.parametrize( + ("param", "value"), + ("a", 1), + ("a", 2), + ("a", 3), + ("b", -1.0) + ("b", 1.0) + ) + + into + + @param_value_parametrization(a=[1, 2, 3], b=[-1.0, 1.0]) + """ + return pytest.mark.parametrize( + ("param", "value"), + [(param, value) for param, values in kwargs.items() for value in values], + ) + + +def adapt_fill(value, *, dtype): + """Adapt fill values in the range [0.0, 1.0] to the value range of the dtype""" + if value is None: + return value + + max_value = get_max_value(dtype) + value_type = float if dtype.is_floating_point else int + + if isinstance(value, (int, float)): + return value_type(value * max_value) + elif isinstance(value, (list, tuple)): + return type(value)(value_type(v * max_value) for v in value) + else: + raise ValueError(f"fill should be an int or float, or a list or tuple of the former, but got '{value}'.") + + +EXHAUSTIVE_TYPE_FILLS = [ + None, + 1, + 0.5, + [1], + [0.2], + (0,), + (0.7,), + [1, 0, 1], + [0.1, 0.2, 0.3], + (0, 1, 0), + (0.9, 0.234, 0.314), +] +CORRECTNESS_FILLS = [ + v for v in EXHAUSTIVE_TYPE_FILLS if v is None or isinstance(v, float) or (isinstance(v, list) and len(v) > 1) +] + + +# We cannot use `list(transforms.InterpolationMode)` here, since it includes some PIL-only ones as well +INTERPOLATION_MODES = [ + transforms.InterpolationMode.NEAREST, + transforms.InterpolationMode.NEAREST_EXACT, + transforms.InterpolationMode.BILINEAR, + transforms.InterpolationMode.BICUBIC, +] + + +def reference_affine_bounding_boxes_helper(bounding_boxes, *, affine_matrix, new_canvas_size=None, clamp=True): + format = bounding_boxes.format + canvas_size = new_canvas_size or bounding_boxes.canvas_size + + def affine_bounding_boxes(bounding_boxes): + dtype = bounding_boxes.dtype + device = bounding_boxes.device + + # Go to float before converting to prevent precision loss in case of CXCYWH -> XYXY and W or H is 1 + input_xyxy = F.convert_bounding_box_format( + bounding_boxes.to(dtype=torch.float64, device="cpu", copy=True), + old_format=format, + new_format=tv_tensors.BoundingBoxFormat.XYXY, + inplace=True, + ) + x1, y1, x2, y2 = input_xyxy.squeeze(0).tolist() + + points = np.array( + [ + [x1, y1, 1.0], + [x2, y1, 1.0], + [x1, y2, 1.0], + [x2, y2, 1.0], + ] + ) + transformed_points = np.matmul(points, affine_matrix.astype(points.dtype).T) + + output_xyxy = torch.Tensor( + [ + float(np.min(transformed_points[:, 0])), + float(np.min(transformed_points[:, 1])), + float(np.max(transformed_points[:, 0])), + float(np.max(transformed_points[:, 1])), + ] + ) + + output = F.convert_bounding_box_format( + output_xyxy, old_format=tv_tensors.BoundingBoxFormat.XYXY, new_format=format + ) + + if clamp: + # It is important to clamp before casting, especially for CXCYWH format, dtype=int64 + output = F.clamp_bounding_boxes( + output, + format=format, + canvas_size=canvas_size, + ) + else: + # We leave the bounding box as float64 so the caller gets the full precision to perform any additional + # operation + dtype = output.dtype + + return output.to(dtype=dtype, device=device) + + return tv_tensors.BoundingBoxes( + torch.cat([affine_bounding_boxes(b) for b in bounding_boxes.reshape(-1, 4).unbind()], dim=0).reshape( + bounding_boxes.shape + ), + format=format, + canvas_size=canvas_size, + ) + + +class TestResize: + INPUT_SIZE = (17, 11) + OUTPUT_SIZES = [17, [17], (17,), None, [12, 13], (12, 13)] + + def _make_max_size_kwarg(self, *, use_max_size, size): + if size is None: + max_size = min(list(self.INPUT_SIZE)) + elif use_max_size: + if not (isinstance(size, int) or len(size) == 1): + # This would result in an `ValueError` + return None + + max_size = (size if isinstance(size, int) else size[0]) + 1 + else: + max_size = None + + return dict(max_size=max_size) + + def _compute_output_size(self, *, input_size, size, max_size): + if size is None: + size = max_size + + elif not (isinstance(size, int) or len(size) == 1): + return tuple(size) + + elif not isinstance(size, int): + size = size[0] + + old_height, old_width = input_size + ratio = old_width / old_height + if ratio > 1: + new_height = size + new_width = int(ratio * new_height) + else: + new_width = size + new_height = int(new_width / ratio) + + if max_size is not None and max(new_height, new_width) > max_size: + # Need to recompute the aspect ratio, since it might have changed due to rounding + ratio = new_width / new_height + if ratio > 1: + new_width = max_size + new_height = int(new_width / ratio) + else: + new_height = max_size + new_width = int(new_height * ratio) + + return new_height, new_width + + @pytest.mark.parametrize("size", OUTPUT_SIZES) + @pytest.mark.parametrize("interpolation", INTERPOLATION_MODES) + @pytest.mark.parametrize("use_max_size", [True, False]) + @pytest.mark.parametrize("antialias", [True, False]) + @pytest.mark.parametrize("dtype", [torch.float32, torch.uint8]) + @pytest.mark.parametrize("device", cpu_and_cuda()) + def test_kernel_image(self, size, interpolation, use_max_size, antialias, dtype, device): + if not (max_size_kwarg := self._make_max_size_kwarg(use_max_size=use_max_size, size=size)): + return + + # In contrast to CPU, there is no native `InterpolationMode.BICUBIC` implementation for uint8 images on CUDA. + # Internally, it uses the float path. Thus, we need to test with an enormous tolerance here to account for that. + atol = 30 if (interpolation is transforms.InterpolationMode.BICUBIC and dtype is torch.uint8) else 1 + check_cuda_vs_cpu_tolerances = dict(rtol=0, atol=atol / 255 if dtype.is_floating_point else atol) + + check_kernel( + F.resize_image, + make_image(self.INPUT_SIZE, dtype=dtype, device=device), + size=size, + interpolation=interpolation, + **max_size_kwarg, + antialias=antialias, + check_cuda_vs_cpu=check_cuda_vs_cpu_tolerances, + check_scripted_vs_eager=not isinstance(size, int), + ) + + @pytest.mark.parametrize("format", list(tv_tensors.BoundingBoxFormat)) + @pytest.mark.parametrize("size", OUTPUT_SIZES) + @pytest.mark.parametrize("use_max_size", [True, False]) + @pytest.mark.parametrize("dtype", [torch.float32, torch.int64]) + @pytest.mark.parametrize("device", cpu_and_cuda()) + def test_kernel_bounding_boxes(self, format, size, use_max_size, dtype, device): + if not (max_size_kwarg := self._make_max_size_kwarg(use_max_size=use_max_size, size=size)): + return + + bounding_boxes = make_bounding_boxes( + format=format, + canvas_size=self.INPUT_SIZE, + dtype=dtype, + device=device, + ) + check_kernel( + F.resize_bounding_boxes, + bounding_boxes, + canvas_size=bounding_boxes.canvas_size, + size=size, + **max_size_kwarg, + check_scripted_vs_eager=not isinstance(size, int), + ) + + @pytest.mark.parametrize("make_mask", [make_segmentation_mask, make_detection_masks]) + def test_kernel_mask(self, make_mask): + check_kernel(F.resize_mask, make_mask(self.INPUT_SIZE), size=self.OUTPUT_SIZES[-1]) + + def test_kernel_video(self): + check_kernel(F.resize_video, make_video(self.INPUT_SIZE), size=self.OUTPUT_SIZES[-1], antialias=True) + + @pytest.mark.parametrize("size", OUTPUT_SIZES) + @pytest.mark.parametrize( + "make_input", + [make_image_tensor, make_image_pil, make_image, make_bounding_boxes, make_segmentation_mask, make_video], + ) + def test_functional(self, size, make_input): + max_size_kwarg = self._make_max_size_kwarg(use_max_size=size is None, size=size) + + check_functional( + F.resize, + make_input(self.INPUT_SIZE), + size=size, + **max_size_kwarg, + antialias=True, + check_scripted_smoke=not isinstance(size, int), + ) + + @pytest.mark.parametrize( + ("kernel", "input_type"), + [ + (F.resize_image, torch.Tensor), + (F._geometry._resize_image_pil, PIL.Image.Image), + (F.resize_image, tv_tensors.Image), + (F.resize_bounding_boxes, tv_tensors.BoundingBoxes), + (F.resize_mask, tv_tensors.Mask), + (F.resize_video, tv_tensors.Video), + ], + ) + def test_functional_signature(self, kernel, input_type): + check_functional_kernel_signature_match(F.resize, kernel=kernel, input_type=input_type) + + @pytest.mark.parametrize("size", OUTPUT_SIZES) + @pytest.mark.parametrize("device", cpu_and_cuda()) + @pytest.mark.parametrize( + "make_input", + [ + make_image_tensor, + make_image_pil, + make_image, + make_bounding_boxes, + make_segmentation_mask, + make_detection_masks, + make_video, + ], + ) + def test_transform(self, size, device, make_input): + max_size_kwarg = self._make_max_size_kwarg(use_max_size=size is None, size=size) + + check_transform( + transforms.Resize(size=size, **max_size_kwarg, antialias=True), + make_input(self.INPUT_SIZE, device=device), + # atol=1 due to Resize v2 is using native uint8 interpolate path for bilinear and nearest modes + check_v1_compatibility=dict(rtol=0, atol=1) if size is not None else False, + ) + + def _check_output_size(self, input, output, *, size, max_size): + assert tuple(F.get_size(output)) == self._compute_output_size( + input_size=F.get_size(input), size=size, max_size=max_size + ) + + @pytest.mark.parametrize("size", OUTPUT_SIZES) + # `InterpolationMode.NEAREST` is modeled after the buggy `INTER_NEAREST` interpolation of CV2. + # The PIL equivalent of `InterpolationMode.NEAREST` is `InterpolationMode.NEAREST_EXACT` + @pytest.mark.parametrize("interpolation", set(INTERPOLATION_MODES) - {transforms.InterpolationMode.NEAREST}) + @pytest.mark.parametrize("use_max_size", [True, False]) + @pytest.mark.parametrize("fn", [F.resize, transform_cls_to_functional(transforms.Resize)]) + def test_image_correctness(self, size, interpolation, use_max_size, fn): + if not (max_size_kwarg := self._make_max_size_kwarg(use_max_size=use_max_size, size=size)): + return + + image = make_image(self.INPUT_SIZE, dtype=torch.uint8) + + actual = fn(image, size=size, interpolation=interpolation, **max_size_kwarg, antialias=True) + expected = F.to_image(F.resize(F.to_pil_image(image), size=size, interpolation=interpolation, **max_size_kwarg)) + + self._check_output_size(image, actual, size=size, **max_size_kwarg) + torch.testing.assert_close(actual, expected, atol=1, rtol=0) + + def _reference_resize_bounding_boxes(self, bounding_boxes, *, size, max_size=None): + old_height, old_width = bounding_boxes.canvas_size + new_height, new_width = self._compute_output_size( + input_size=bounding_boxes.canvas_size, size=size, max_size=max_size + ) + + if (old_height, old_width) == (new_height, new_width): + return bounding_boxes + + affine_matrix = np.array( + [ + [new_width / old_width, 0, 0], + [0, new_height / old_height, 0], + ], + ) + + return reference_affine_bounding_boxes_helper( + bounding_boxes, + affine_matrix=affine_matrix, + new_canvas_size=(new_height, new_width), + ) + + @pytest.mark.parametrize("format", list(tv_tensors.BoundingBoxFormat)) + @pytest.mark.parametrize("size", OUTPUT_SIZES) + @pytest.mark.parametrize("use_max_size", [True, False]) + @pytest.mark.parametrize("fn", [F.resize, transform_cls_to_functional(transforms.Resize)]) + def test_bounding_boxes_correctness(self, format, size, use_max_size, fn): + if not (max_size_kwarg := self._make_max_size_kwarg(use_max_size=use_max_size, size=size)): + return + + bounding_boxes = make_bounding_boxes(format=format, canvas_size=self.INPUT_SIZE) + + actual = fn(bounding_boxes, size=size, **max_size_kwarg) + expected = self._reference_resize_bounding_boxes(bounding_boxes, size=size, **max_size_kwarg) + + self._check_output_size(bounding_boxes, actual, size=size, **max_size_kwarg) + torch.testing.assert_close(actual, expected) + + @pytest.mark.parametrize("interpolation", set(transforms.InterpolationMode) - set(INTERPOLATION_MODES)) + @pytest.mark.parametrize( + "make_input", + [make_image_tensor, make_image_pil, make_image, make_video], + ) + def test_pil_interpolation_compat_smoke(self, interpolation, make_input): + input = make_input(self.INPUT_SIZE) + + with ( + contextlib.nullcontext() + if isinstance(input, PIL.Image.Image) + # This error is triggered in PyTorch core + else pytest.raises(NotImplementedError, match=f"got {interpolation.value.lower()}") + ): + F.resize( + input, + size=self.OUTPUT_SIZES[0], + interpolation=interpolation, + ) + + def test_functional_pil_antialias_warning(self): + with pytest.warns(UserWarning, match="Anti-alias option is always applied for PIL Image input"): + F.resize(make_image_pil(self.INPUT_SIZE), size=self.OUTPUT_SIZES[0], antialias=False) + + @pytest.mark.parametrize("size", OUTPUT_SIZES) + @pytest.mark.parametrize( + "make_input", + [ + make_image_tensor, + make_image_pil, + make_image, + make_bounding_boxes, + make_segmentation_mask, + make_detection_masks, + make_video, + ], + ) + def test_max_size_error(self, size, make_input): + if size is None: + # value can be anything other than an integer + max_size = None + match = "max_size must be an integer when size is None" + elif isinstance(size, int) or len(size) == 1: + max_size = (size if isinstance(size, int) else size[0]) - 1 + match = "must be strictly greater than the requested size" + else: + # value can be anything other than None + max_size = -1 + match = "size should be an int or a sequence of length 1" + + with pytest.raises(ValueError, match=match): + F.resize(make_input(self.INPUT_SIZE), size=size, max_size=max_size, antialias=True) + + if isinstance(size, list) and len(size) != 1: + with pytest.raises(ValueError, match="max_size should only be passed if size is None or specifies"): + F.resize(make_input(self.INPUT_SIZE), size=size, max_size=500) + + @pytest.mark.parametrize( + "input_size, max_size, expected_size", + [ + ((10, 10), 10, (10, 10)), + ((10, 20), 40, (20, 40)), + ((20, 10), 40, (40, 20)), + ((10, 20), 10, (5, 10)), + ((20, 10), 10, (10, 5)), + ], + ) + @pytest.mark.parametrize( + "make_input", + [ + make_image_tensor, + make_image_pil, + make_image, + make_bounding_boxes, + make_segmentation_mask, + make_detection_masks, + make_video, + ], + ) + def test_resize_size_none(self, input_size, max_size, expected_size, make_input): + img = make_input(input_size) + out = F.resize(img, size=None, max_size=max_size) + assert F.get_size(out)[-2:] == list(expected_size) + + @pytest.mark.parametrize("interpolation", INTERPOLATION_MODES) + @pytest.mark.parametrize( + "make_input", + [make_image_tensor, make_image_pil, make_image, make_video], + ) + def test_interpolation_int(self, interpolation, make_input): + input = make_input(self.INPUT_SIZE) + + # `InterpolationMode.NEAREST_EXACT` has no proper corresponding integer equivalent. Internally, we map it to + # `0` to be the same as `InterpolationMode.NEAREST` for PIL. However, for the tensor backend there is a + # difference and thus we don't test it here. + if isinstance(input, torch.Tensor) and interpolation is transforms.InterpolationMode.NEAREST_EXACT: + return + + expected = F.resize(input, size=self.OUTPUT_SIZES[0], interpolation=interpolation, antialias=True) + actual = F.resize( + input, size=self.OUTPUT_SIZES[0], interpolation=pil_modes_mapping[interpolation], antialias=True + ) + + assert_equal(actual, expected) + + def test_transform_unknown_size_error(self): + with pytest.raises(ValueError, match="size can be an integer, a sequence of one or two integers, or None"): + transforms.Resize(size=object()) + + @pytest.mark.parametrize( + "size", [min(INPUT_SIZE), [min(INPUT_SIZE)], (min(INPUT_SIZE),), list(INPUT_SIZE), tuple(INPUT_SIZE)] + ) + @pytest.mark.parametrize( + "make_input", + [ + make_image_tensor, + make_image_pil, + make_image, + make_bounding_boxes, + make_segmentation_mask, + make_detection_masks, + make_video, + ], + ) + def test_noop(self, size, make_input): + input = make_input(self.INPUT_SIZE) + + output = F.resize(input, size=F.get_size(input), antialias=True) + + # This identity check is not a requirement. It is here to avoid breaking the behavior by accident. If there + # is a good reason to break this, feel free to downgrade to an equality check. + if isinstance(input, tv_tensors.TVTensor): + # We can't test identity directly, since that checks for the identity of the Python object. Since all + # tv_tensors unwrap before a kernel and wrap again afterwards, the Python object changes. Thus, we check + # that the underlying storage is the same + assert output.data_ptr() == input.data_ptr() + else: + assert output is input + + @pytest.mark.parametrize( + "make_input", + [ + make_image_tensor, + make_image_pil, + make_image, + make_bounding_boxes, + make_segmentation_mask, + make_detection_masks, + make_video, + ], + ) + def test_no_regression_5405(self, make_input): + # Checks that `max_size` is not ignored if `size == small_edge_size` + # See https://github.com/pytorch/vision/issues/5405 + + input = make_input(self.INPUT_SIZE) + + size = min(F.get_size(input)) + max_size = size + 1 + output = F.resize(input, size=size, max_size=max_size, antialias=True) + + assert max(F.get_size(output)) == max_size + + def _make_image(self, *args, batch_dims=(), memory_format=torch.contiguous_format, **kwargs): + # torch.channels_last memory_format is only available for 4D tensors, i.e. (B, C, H, W). However, images coming + # from PIL or our own I/O functions do not have a batch dimensions and are thus 3D, i.e. (C, H, W). Still, the + # layout of the data in memory is channels last. To emulate this when a 3D input is requested here, we create + # the image as 4D and create a view with the right shape afterwards. With this the layout in memory is channels + # last although PyTorch doesn't recognizes it as such. + emulate_channels_last = memory_format is torch.channels_last and len(batch_dims) != 1 + + image = make_image( + *args, + batch_dims=(math.prod(batch_dims),) if emulate_channels_last else batch_dims, + memory_format=memory_format, + **kwargs, + ) + + if emulate_channels_last: + image = tv_tensors.wrap(image.view(*batch_dims, *image.shape[-3:]), like=image) + + return image + + def _check_stride(self, image, *, memory_format): + C, H, W = F.get_dimensions(image) + if memory_format is torch.contiguous_format: + expected_stride = (H * W, W, 1) + elif memory_format is torch.channels_last: + expected_stride = (1, W * C, C) + else: + raise ValueError(f"Unknown memory_format: {memory_format}") + + assert image.stride() == expected_stride + + # TODO: We can remove this test and related torchvision workaround + # once we fixed related pytorch issue: https://github.com/pytorch/pytorch/issues/68430 + @pytest.mark.parametrize("interpolation", INTERPOLATION_MODES) + @pytest.mark.parametrize("antialias", [True, False]) + @pytest.mark.parametrize("memory_format", [torch.contiguous_format, torch.channels_last]) + @pytest.mark.parametrize("dtype", [torch.uint8, torch.float32]) + @pytest.mark.parametrize("device", cpu_and_cuda()) + def test_kernel_image_memory_format_consistency(self, interpolation, antialias, memory_format, dtype, device): + size = self.OUTPUT_SIZES[0] + + input = self._make_image(self.INPUT_SIZE, dtype=dtype, device=device, memory_format=memory_format) + + # Smoke test to make sure we aren't starting with wrong assumptions + self._check_stride(input, memory_format=memory_format) + + output = F.resize_image(input, size=size, interpolation=interpolation, antialias=antialias) + + self._check_stride(output, memory_format=memory_format) + + def test_float16_no_rounding(self): + # Make sure Resize() doesn't round float16 images + # Non-regression test for https://github.com/pytorch/vision/issues/7667 + + input = make_image_tensor(self.INPUT_SIZE, dtype=torch.float16) + output = F.resize_image(input, size=self.OUTPUT_SIZES[0], antialias=True) + + assert output.dtype is torch.float16 + assert (output.round() - output).abs().sum() > 0 + + +class TestHorizontalFlip: + @pytest.mark.parametrize("dtype", [torch.float32, torch.uint8]) + @pytest.mark.parametrize("device", cpu_and_cuda()) + def test_kernel_image(self, dtype, device): + check_kernel(F.horizontal_flip_image, make_image(dtype=dtype, device=device)) + + @pytest.mark.parametrize("format", list(tv_tensors.BoundingBoxFormat)) + @pytest.mark.parametrize("dtype", [torch.float32, torch.int64]) + @pytest.mark.parametrize("device", cpu_and_cuda()) + def test_kernel_bounding_boxes(self, format, dtype, device): + bounding_boxes = make_bounding_boxes(format=format, dtype=dtype, device=device) + check_kernel( + F.horizontal_flip_bounding_boxes, + bounding_boxes, + format=format, + canvas_size=bounding_boxes.canvas_size, + ) + + @pytest.mark.parametrize("make_mask", [make_segmentation_mask, make_detection_masks]) + def test_kernel_mask(self, make_mask): + check_kernel(F.horizontal_flip_mask, make_mask()) + + def test_kernel_video(self): + check_kernel(F.horizontal_flip_video, make_video()) + + @pytest.mark.parametrize( + "make_input", + [make_image_tensor, make_image_pil, make_image, make_bounding_boxes, make_segmentation_mask, make_video], + ) + def test_functional(self, make_input): + check_functional(F.horizontal_flip, make_input()) + + @pytest.mark.parametrize( + ("kernel", "input_type"), + [ + (F.horizontal_flip_image, torch.Tensor), + (F._geometry._horizontal_flip_image_pil, PIL.Image.Image), + (F.horizontal_flip_image, tv_tensors.Image), + (F.horizontal_flip_bounding_boxes, tv_tensors.BoundingBoxes), + (F.horizontal_flip_mask, tv_tensors.Mask), + (F.horizontal_flip_video, tv_tensors.Video), + ], + ) + def test_functional_signature(self, kernel, input_type): + check_functional_kernel_signature_match(F.horizontal_flip, kernel=kernel, input_type=input_type) + + @pytest.mark.parametrize( + "make_input", + [make_image_tensor, make_image_pil, make_image, make_bounding_boxes, make_segmentation_mask, make_video], + ) + @pytest.mark.parametrize("device", cpu_and_cuda()) + def test_transform(self, make_input, device): + check_transform(transforms.RandomHorizontalFlip(p=1), make_input(device=device)) + + @pytest.mark.parametrize( + "fn", [F.horizontal_flip, transform_cls_to_functional(transforms.RandomHorizontalFlip, p=1)] + ) + def test_image_correctness(self, fn): + image = make_image(dtype=torch.uint8, device="cpu") + + actual = fn(image) + expected = F.to_image(F.horizontal_flip(F.to_pil_image(image))) + + torch.testing.assert_close(actual, expected) + + def _reference_horizontal_flip_bounding_boxes(self, bounding_boxes): + affine_matrix = np.array( + [ + [-1, 0, bounding_boxes.canvas_size[1]], + [0, 1, 0], + ], + ) + + return reference_affine_bounding_boxes_helper(bounding_boxes, affine_matrix=affine_matrix) + + @pytest.mark.parametrize("format", list(tv_tensors.BoundingBoxFormat)) + @pytest.mark.parametrize( + "fn", [F.horizontal_flip, transform_cls_to_functional(transforms.RandomHorizontalFlip, p=1)] + ) + def test_bounding_boxes_correctness(self, format, fn): + bounding_boxes = make_bounding_boxes(format=format) + + actual = fn(bounding_boxes) + expected = self._reference_horizontal_flip_bounding_boxes(bounding_boxes) + + torch.testing.assert_close(actual, expected) + + @pytest.mark.parametrize( + "make_input", + [make_image_tensor, make_image_pil, make_image, make_bounding_boxes, make_segmentation_mask, make_video], + ) + @pytest.mark.parametrize("device", cpu_and_cuda()) + def test_transform_noop(self, make_input, device): + input = make_input(device=device) + + transform = transforms.RandomHorizontalFlip(p=0) + + output = transform(input) + + assert_equal(output, input) + + +class TestAffine: + _EXHAUSTIVE_TYPE_AFFINE_KWARGS = dict( + # float, int + angle=[-10.9, 18], + # two-list of float, two-list of int, two-tuple of float, two-tuple of int + translate=[[6.3, -0.6], [1, -3], (16.6, -6.6), (-2, 4)], + # float + scale=[0.5], + # float, int, + # one-list of float, one-list of int, one-tuple of float, one-tuple of int + # two-list of float, two-list of int, two-tuple of float, two-tuple of int + shear=[35.6, 38, [-37.7], [-23], (5.3,), (-52,), [5.4, 21.8], [-47, 51], (-11.2, 36.7), (8, -53)], + # None + # two-list of float, two-list of int, two-tuple of float, two-tuple of int + center=[None, [1.2, 4.9], [-3, 1], (2.5, -4.7), (3, 2)], + ) + # The special case for shear makes sure we pick a value that is supported while JIT scripting + _MINIMAL_AFFINE_KWARGS = { + k: vs[0] if k != "shear" else next(v for v in vs if isinstance(v, list)) + for k, vs in _EXHAUSTIVE_TYPE_AFFINE_KWARGS.items() + } + _CORRECTNESS_AFFINE_KWARGS = { + k: [v for v in vs if v is None or isinstance(v, float) or (isinstance(v, list) and len(v) > 1)] + for k, vs in _EXHAUSTIVE_TYPE_AFFINE_KWARGS.items() + } + + _EXHAUSTIVE_TYPE_TRANSFORM_AFFINE_RANGES = dict( + degrees=[30, (-15, 20)], + translate=[None, (0.5, 0.5)], + scale=[None, (0.75, 1.25)], + shear=[None, (12, 30, -17, 5), 10, (-5, 12)], + ) + _CORRECTNESS_TRANSFORM_AFFINE_RANGES = { + k: next(v for v in vs if v is not None) for k, vs in _EXHAUSTIVE_TYPE_TRANSFORM_AFFINE_RANGES.items() + } + + def _check_kernel(self, kernel, input, *args, **kwargs): + kwargs_ = self._MINIMAL_AFFINE_KWARGS.copy() + kwargs_.update(kwargs) + check_kernel(kernel, input, *args, **kwargs_) + + @param_value_parametrization( + angle=_EXHAUSTIVE_TYPE_AFFINE_KWARGS["angle"], + translate=_EXHAUSTIVE_TYPE_AFFINE_KWARGS["translate"], + shear=_EXHAUSTIVE_TYPE_AFFINE_KWARGS["shear"], + center=_EXHAUSTIVE_TYPE_AFFINE_KWARGS["center"], + interpolation=[transforms.InterpolationMode.NEAREST, transforms.InterpolationMode.BILINEAR], + fill=EXHAUSTIVE_TYPE_FILLS, + ) + @pytest.mark.parametrize("dtype", [torch.float32, torch.uint8]) + @pytest.mark.parametrize("device", cpu_and_cuda()) + def test_kernel_image(self, param, value, dtype, device): + if param == "fill": + value = adapt_fill(value, dtype=dtype) + self._check_kernel( + F.affine_image, + make_image(dtype=dtype, device=device), + **{param: value}, + check_scripted_vs_eager=not (param in {"shear", "fill"} and isinstance(value, (int, float))), + check_cuda_vs_cpu=dict(atol=1, rtol=0) + if dtype is torch.uint8 and param == "interpolation" and value is transforms.InterpolationMode.BILINEAR + else True, + ) + + @param_value_parametrization( + angle=_EXHAUSTIVE_TYPE_AFFINE_KWARGS["angle"], + translate=_EXHAUSTIVE_TYPE_AFFINE_KWARGS["translate"], + shear=_EXHAUSTIVE_TYPE_AFFINE_KWARGS["shear"], + center=_EXHAUSTIVE_TYPE_AFFINE_KWARGS["center"], + ) + @pytest.mark.parametrize("format", list(tv_tensors.BoundingBoxFormat)) + @pytest.mark.parametrize("dtype", [torch.float32, torch.int64]) + @pytest.mark.parametrize("device", cpu_and_cuda()) + def test_kernel_bounding_boxes(self, param, value, format, dtype, device): + bounding_boxes = make_bounding_boxes(format=format, dtype=dtype, device=device) + self._check_kernel( + F.affine_bounding_boxes, + bounding_boxes, + format=format, + canvas_size=bounding_boxes.canvas_size, + **{param: value}, + check_scripted_vs_eager=not (param == "shear" and isinstance(value, (int, float))), + ) + + @pytest.mark.parametrize("make_mask", [make_segmentation_mask, make_detection_masks]) + def test_kernel_mask(self, make_mask): + self._check_kernel(F.affine_mask, make_mask()) + + def test_kernel_video(self): + self._check_kernel(F.affine_video, make_video()) + + @pytest.mark.parametrize( + "make_input", + [make_image_tensor, make_image_pil, make_image, make_bounding_boxes, make_segmentation_mask, make_video], + ) + def test_functional(self, make_input): + check_functional(F.affine, make_input(), **self._MINIMAL_AFFINE_KWARGS) + + @pytest.mark.parametrize( + ("kernel", "input_type"), + [ + (F.affine_image, torch.Tensor), + (F._geometry._affine_image_pil, PIL.Image.Image), + (F.affine_image, tv_tensors.Image), + (F.affine_bounding_boxes, tv_tensors.BoundingBoxes), + (F.affine_mask, tv_tensors.Mask), + (F.affine_video, tv_tensors.Video), + ], + ) + def test_functional_signature(self, kernel, input_type): + check_functional_kernel_signature_match(F.affine, kernel=kernel, input_type=input_type) + + @pytest.mark.parametrize( + "make_input", + [make_image_tensor, make_image_pil, make_image, make_bounding_boxes, make_segmentation_mask, make_video], + ) + @pytest.mark.parametrize("device", cpu_and_cuda()) + def test_transform(self, make_input, device): + input = make_input(device=device) + + check_transform(transforms.RandomAffine(**self._CORRECTNESS_TRANSFORM_AFFINE_RANGES), input) + + @pytest.mark.parametrize("angle", _CORRECTNESS_AFFINE_KWARGS["angle"]) + @pytest.mark.parametrize("translate", _CORRECTNESS_AFFINE_KWARGS["translate"]) + @pytest.mark.parametrize("scale", _CORRECTNESS_AFFINE_KWARGS["scale"]) + @pytest.mark.parametrize("shear", _CORRECTNESS_AFFINE_KWARGS["shear"]) + @pytest.mark.parametrize("center", _CORRECTNESS_AFFINE_KWARGS["center"]) + @pytest.mark.parametrize( + "interpolation", [transforms.InterpolationMode.NEAREST, transforms.InterpolationMode.BILINEAR] + ) + @pytest.mark.parametrize("fill", CORRECTNESS_FILLS) + def test_functional_image_correctness(self, angle, translate, scale, shear, center, interpolation, fill): + image = make_image(dtype=torch.uint8, device="cpu") + + fill = adapt_fill(fill, dtype=torch.uint8) + + actual = F.affine( + image, + angle=angle, + translate=translate, + scale=scale, + shear=shear, + center=center, + interpolation=interpolation, + fill=fill, + ) + expected = F.to_image( + F.affine( + F.to_pil_image(image), + angle=angle, + translate=translate, + scale=scale, + shear=shear, + center=center, + interpolation=interpolation, + fill=fill, + ) + ) + + mae = (actual.float() - expected.float()).abs().mean() + assert mae < 2 if interpolation is transforms.InterpolationMode.NEAREST else 8 + + @pytest.mark.parametrize("center", _CORRECTNESS_AFFINE_KWARGS["center"]) + @pytest.mark.parametrize( + "interpolation", [transforms.InterpolationMode.NEAREST, transforms.InterpolationMode.BILINEAR] + ) + @pytest.mark.parametrize("fill", CORRECTNESS_FILLS) + @pytest.mark.parametrize("seed", list(range(5))) + def test_transform_image_correctness(self, center, interpolation, fill, seed): + image = make_image(dtype=torch.uint8, device="cpu") + + fill = adapt_fill(fill, dtype=torch.uint8) + + transform = transforms.RandomAffine( + **self._CORRECTNESS_TRANSFORM_AFFINE_RANGES, center=center, interpolation=interpolation, fill=fill + ) + + torch.manual_seed(seed) + actual = transform(image) + + torch.manual_seed(seed) + expected = F.to_image(transform(F.to_pil_image(image))) + + mae = (actual.float() - expected.float()).abs().mean() + assert mae < 2 if interpolation is transforms.InterpolationMode.NEAREST else 8 + + def _compute_affine_matrix(self, *, angle, translate, scale, shear, center): + rot = math.radians(angle) + cx, cy = center + tx, ty = translate + sx, sy = [math.radians(s) for s in ([shear, 0.0] if isinstance(shear, (int, float)) else shear)] + + c_matrix = np.array([[1, 0, cx], [0, 1, cy], [0, 0, 1]]) + t_matrix = np.array([[1, 0, tx], [0, 1, ty], [0, 0, 1]]) + c_matrix_inv = np.linalg.inv(c_matrix) + rs_matrix = np.array( + [ + [scale * math.cos(rot), -scale * math.sin(rot), 0], + [scale * math.sin(rot), scale * math.cos(rot), 0], + [0, 0, 1], + ] + ) + shear_x_matrix = np.array([[1, -math.tan(sx), 0], [0, 1, 0], [0, 0, 1]]) + shear_y_matrix = np.array([[1, 0, 0], [-math.tan(sy), 1, 0], [0, 0, 1]]) + rss_matrix = np.matmul(rs_matrix, np.matmul(shear_y_matrix, shear_x_matrix)) + true_matrix = np.matmul(t_matrix, np.matmul(c_matrix, np.matmul(rss_matrix, c_matrix_inv))) + return true_matrix[:2, :] + + def _reference_affine_bounding_boxes(self, bounding_boxes, *, angle, translate, scale, shear, center): + if center is None: + center = [s * 0.5 for s in bounding_boxes.canvas_size[::-1]] + + return reference_affine_bounding_boxes_helper( + bounding_boxes, + affine_matrix=self._compute_affine_matrix( + angle=angle, translate=translate, scale=scale, shear=shear, center=center + ), + ) + + @pytest.mark.parametrize("format", list(tv_tensors.BoundingBoxFormat)) + @pytest.mark.parametrize("angle", _CORRECTNESS_AFFINE_KWARGS["angle"]) + @pytest.mark.parametrize("translate", _CORRECTNESS_AFFINE_KWARGS["translate"]) + @pytest.mark.parametrize("scale", _CORRECTNESS_AFFINE_KWARGS["scale"]) + @pytest.mark.parametrize("shear", _CORRECTNESS_AFFINE_KWARGS["shear"]) + @pytest.mark.parametrize("center", _CORRECTNESS_AFFINE_KWARGS["center"]) + def test_functional_bounding_boxes_correctness(self, format, angle, translate, scale, shear, center): + bounding_boxes = make_bounding_boxes(format=format) + + actual = F.affine( + bounding_boxes, + angle=angle, + translate=translate, + scale=scale, + shear=shear, + center=center, + ) + expected = self._reference_affine_bounding_boxes( + bounding_boxes, + angle=angle, + translate=translate, + scale=scale, + shear=shear, + center=center, + ) + + torch.testing.assert_close(actual, expected) + + @pytest.mark.parametrize("format", list(tv_tensors.BoundingBoxFormat)) + @pytest.mark.parametrize("center", _CORRECTNESS_AFFINE_KWARGS["center"]) + @pytest.mark.parametrize("seed", list(range(5))) + def test_transform_bounding_boxes_correctness(self, format, center, seed): + bounding_boxes = make_bounding_boxes(format=format) + + transform = transforms.RandomAffine(**self._CORRECTNESS_TRANSFORM_AFFINE_RANGES, center=center) + + torch.manual_seed(seed) + params = transform.make_params([bounding_boxes]) + + torch.manual_seed(seed) + actual = transform(bounding_boxes) + + expected = self._reference_affine_bounding_boxes(bounding_boxes, **params, center=center) + + torch.testing.assert_close(actual, expected) + + @pytest.mark.parametrize("degrees", _EXHAUSTIVE_TYPE_TRANSFORM_AFFINE_RANGES["degrees"]) + @pytest.mark.parametrize("translate", _EXHAUSTIVE_TYPE_TRANSFORM_AFFINE_RANGES["translate"]) + @pytest.mark.parametrize("scale", _EXHAUSTIVE_TYPE_TRANSFORM_AFFINE_RANGES["scale"]) + @pytest.mark.parametrize("shear", _EXHAUSTIVE_TYPE_TRANSFORM_AFFINE_RANGES["shear"]) + @pytest.mark.parametrize("seed", list(range(10))) + def test_transformmake_params_bounds(self, degrees, translate, scale, shear, seed): + image = make_image() + height, width = F.get_size(image) + + transform = transforms.RandomAffine(degrees=degrees, translate=translate, scale=scale, shear=shear) + + torch.manual_seed(seed) + params = transform.make_params([image]) + + if isinstance(degrees, (int, float)): + assert -degrees <= params["angle"] <= degrees + else: + assert degrees[0] <= params["angle"] <= degrees[1] + + if translate is not None: + width_max = int(round(translate[0] * width)) + height_max = int(round(translate[1] * height)) + assert -width_max <= params["translate"][0] <= width_max + assert -height_max <= params["translate"][1] <= height_max + else: + assert params["translate"] == (0, 0) + + if scale is not None: + assert scale[0] <= params["scale"] <= scale[1] + else: + assert params["scale"] == 1.0 + + if shear is not None: + if isinstance(shear, (int, float)): + assert -shear <= params["shear"][0] <= shear + assert params["shear"][1] == 0.0 + elif len(shear) == 2: + assert shear[0] <= params["shear"][0] <= shear[1] + assert params["shear"][1] == 0.0 + elif len(shear) == 4: + assert shear[0] <= params["shear"][0] <= shear[1] + assert shear[2] <= params["shear"][1] <= shear[3] + else: + assert params["shear"] == (0, 0) + + @pytest.mark.parametrize("param", ["degrees", "translate", "scale", "shear", "center"]) + @pytest.mark.parametrize("value", [0, [0], [0, 0, 0]]) + def test_transform_sequence_len_errors(self, param, value): + if param in {"degrees", "shear"} and not isinstance(value, list): + return + + kwargs = {param: value} + if param != "degrees": + kwargs["degrees"] = 0 + + with pytest.raises( + ValueError if isinstance(value, list) else TypeError, match=f"{param} should be a sequence of length 2" + ): + transforms.RandomAffine(**kwargs) + + def test_transform_negative_degrees_error(self): + with pytest.raises(ValueError, match="If degrees is a single number, it must be positive"): + transforms.RandomAffine(degrees=-1) + + @pytest.mark.parametrize("translate", [[-1, 0], [2, 0], [-1, 2]]) + def test_transform_translate_range_error(self, translate): + with pytest.raises(ValueError, match="translation values should be between 0 and 1"): + transforms.RandomAffine(degrees=0, translate=translate) + + @pytest.mark.parametrize("scale", [[-1, 0], [0, -1], [-1, -1]]) + def test_transform_scale_range_error(self, scale): + with pytest.raises(ValueError, match="scale values should be positive"): + transforms.RandomAffine(degrees=0, scale=scale) + + def test_transform_negative_shear_error(self): + with pytest.raises(ValueError, match="If shear is a single number, it must be positive"): + transforms.RandomAffine(degrees=0, shear=-1) + + def test_transform_unknown_fill_error(self): + with pytest.raises(TypeError, match="Got inappropriate fill arg"): + transforms.RandomAffine(degrees=0, fill="fill") + + +class TestVerticalFlip: + @pytest.mark.parametrize("dtype", [torch.float32, torch.uint8]) + @pytest.mark.parametrize("device", cpu_and_cuda()) + def test_kernel_image(self, dtype, device): + check_kernel(F.vertical_flip_image, make_image(dtype=dtype, device=device)) + + @pytest.mark.parametrize("format", list(tv_tensors.BoundingBoxFormat)) + @pytest.mark.parametrize("dtype", [torch.float32, torch.int64]) + @pytest.mark.parametrize("device", cpu_and_cuda()) + def test_kernel_bounding_boxes(self, format, dtype, device): + bounding_boxes = make_bounding_boxes(format=format, dtype=dtype, device=device) + check_kernel( + F.vertical_flip_bounding_boxes, + bounding_boxes, + format=format, + canvas_size=bounding_boxes.canvas_size, + ) + + @pytest.mark.parametrize("make_mask", [make_segmentation_mask, make_detection_masks]) + def test_kernel_mask(self, make_mask): + check_kernel(F.vertical_flip_mask, make_mask()) + + def test_kernel_video(self): + check_kernel(F.vertical_flip_video, make_video()) + + @pytest.mark.parametrize( + "make_input", + [make_image_tensor, make_image_pil, make_image, make_bounding_boxes, make_segmentation_mask, make_video], + ) + def test_functional(self, make_input): + check_functional(F.vertical_flip, make_input()) + + @pytest.mark.parametrize( + ("kernel", "input_type"), + [ + (F.vertical_flip_image, torch.Tensor), + (F._geometry._vertical_flip_image_pil, PIL.Image.Image), + (F.vertical_flip_image, tv_tensors.Image), + (F.vertical_flip_bounding_boxes, tv_tensors.BoundingBoxes), + (F.vertical_flip_mask, tv_tensors.Mask), + (F.vertical_flip_video, tv_tensors.Video), + ], + ) + def test_functional_signature(self, kernel, input_type): + check_functional_kernel_signature_match(F.vertical_flip, kernel=kernel, input_type=input_type) + + @pytest.mark.parametrize( + "make_input", + [make_image_tensor, make_image_pil, make_image, make_bounding_boxes, make_segmentation_mask, make_video], + ) + @pytest.mark.parametrize("device", cpu_and_cuda()) + def test_transform(self, make_input, device): + check_transform(transforms.RandomVerticalFlip(p=1), make_input(device=device)) + + @pytest.mark.parametrize("fn", [F.vertical_flip, transform_cls_to_functional(transforms.RandomVerticalFlip, p=1)]) + def test_image_correctness(self, fn): + image = make_image(dtype=torch.uint8, device="cpu") + + actual = fn(image) + expected = F.to_image(F.vertical_flip(F.to_pil_image(image))) + + torch.testing.assert_close(actual, expected) + + def _reference_vertical_flip_bounding_boxes(self, bounding_boxes): + affine_matrix = np.array( + [ + [1, 0, 0], + [0, -1, bounding_boxes.canvas_size[0]], + ], + ) + + return reference_affine_bounding_boxes_helper(bounding_boxes, affine_matrix=affine_matrix) + + @pytest.mark.parametrize("format", list(tv_tensors.BoundingBoxFormat)) + @pytest.mark.parametrize("fn", [F.vertical_flip, transform_cls_to_functional(transforms.RandomVerticalFlip, p=1)]) + def test_bounding_boxes_correctness(self, format, fn): + bounding_boxes = make_bounding_boxes(format=format) + + actual = fn(bounding_boxes) + expected = self._reference_vertical_flip_bounding_boxes(bounding_boxes) + + torch.testing.assert_close(actual, expected) + + @pytest.mark.parametrize( + "make_input", + [make_image_tensor, make_image_pil, make_image, make_bounding_boxes, make_segmentation_mask, make_video], + ) + @pytest.mark.parametrize("device", cpu_and_cuda()) + def test_transform_noop(self, make_input, device): + input = make_input(device=device) + + transform = transforms.RandomVerticalFlip(p=0) + + output = transform(input) + + assert_equal(output, input) + + +class TestRotate: + _EXHAUSTIVE_TYPE_AFFINE_KWARGS = dict( + # float, int + angle=[-10.9, 18], + # None + # two-list of float, two-list of int, two-tuple of float, two-tuple of int + center=[None, [1.2, 4.9], [-3, 1], (2.5, -4.7), (3, 2)], + ) + _MINIMAL_AFFINE_KWARGS = {k: vs[0] for k, vs in _EXHAUSTIVE_TYPE_AFFINE_KWARGS.items()} + _CORRECTNESS_AFFINE_KWARGS = { + k: [v for v in vs if v is None or isinstance(v, float) or isinstance(v, list)] + for k, vs in _EXHAUSTIVE_TYPE_AFFINE_KWARGS.items() + } + + _EXHAUSTIVE_TYPE_TRANSFORM_AFFINE_RANGES = dict( + degrees=[30, (-15, 20)], + ) + _CORRECTNESS_TRANSFORM_AFFINE_RANGES = {k: vs[0] for k, vs in _EXHAUSTIVE_TYPE_TRANSFORM_AFFINE_RANGES.items()} + + @param_value_parametrization( + angle=_EXHAUSTIVE_TYPE_AFFINE_KWARGS["angle"], + interpolation=[transforms.InterpolationMode.NEAREST, transforms.InterpolationMode.BILINEAR], + expand=[False, True], + center=_EXHAUSTIVE_TYPE_AFFINE_KWARGS["center"], + fill=EXHAUSTIVE_TYPE_FILLS, + ) + @pytest.mark.parametrize("dtype", [torch.float32, torch.uint8]) + @pytest.mark.parametrize("device", cpu_and_cuda()) + def test_kernel_image(self, param, value, dtype, device): + kwargs = {param: value} + if param != "angle": + kwargs["angle"] = self._MINIMAL_AFFINE_KWARGS["angle"] + check_kernel( + F.rotate_image, + make_image(dtype=dtype, device=device), + **kwargs, + check_scripted_vs_eager=not (param == "fill" and isinstance(value, (int, float))), + ) + + @param_value_parametrization( + angle=_EXHAUSTIVE_TYPE_AFFINE_KWARGS["angle"], + expand=[False, True], + center=_EXHAUSTIVE_TYPE_AFFINE_KWARGS["center"], + ) + @pytest.mark.parametrize("format", list(tv_tensors.BoundingBoxFormat)) + @pytest.mark.parametrize("dtype", [torch.float32, torch.uint8]) + @pytest.mark.parametrize("device", cpu_and_cuda()) + def test_kernel_bounding_boxes(self, param, value, format, dtype, device): + kwargs = {param: value} + if param != "angle": + kwargs["angle"] = self._MINIMAL_AFFINE_KWARGS["angle"] + + bounding_boxes = make_bounding_boxes(format=format, dtype=dtype, device=device) + + check_kernel( + F.rotate_bounding_boxes, + bounding_boxes, + format=format, + canvas_size=bounding_boxes.canvas_size, + **kwargs, + ) + + @pytest.mark.parametrize("make_mask", [make_segmentation_mask, make_detection_masks]) + def test_kernel_mask(self, make_mask): + check_kernel(F.rotate_mask, make_mask(), **self._MINIMAL_AFFINE_KWARGS) + + def test_kernel_video(self): + check_kernel(F.rotate_video, make_video(), **self._MINIMAL_AFFINE_KWARGS) + + @pytest.mark.parametrize( + "make_input", + [make_image_tensor, make_image_pil, make_image, make_bounding_boxes, make_segmentation_mask, make_video], + ) + def test_functional(self, make_input): + check_functional(F.rotate, make_input(), **self._MINIMAL_AFFINE_KWARGS) + + @pytest.mark.parametrize( + ("kernel", "input_type"), + [ + (F.rotate_image, torch.Tensor), + (F._geometry._rotate_image_pil, PIL.Image.Image), + (F.rotate_image, tv_tensors.Image), + (F.rotate_bounding_boxes, tv_tensors.BoundingBoxes), + (F.rotate_mask, tv_tensors.Mask), + (F.rotate_video, tv_tensors.Video), + ], + ) + def test_functional_signature(self, kernel, input_type): + check_functional_kernel_signature_match(F.rotate, kernel=kernel, input_type=input_type) + + @pytest.mark.parametrize( + "make_input", + [make_image_tensor, make_image_pil, make_image, make_bounding_boxes, make_segmentation_mask, make_video], + ) + @pytest.mark.parametrize("device", cpu_and_cuda()) + def test_transform(self, make_input, device): + check_transform( + transforms.RandomRotation(**self._CORRECTNESS_TRANSFORM_AFFINE_RANGES), make_input(device=device) + ) + + @pytest.mark.parametrize("angle", _CORRECTNESS_AFFINE_KWARGS["angle"]) + @pytest.mark.parametrize("center", _CORRECTNESS_AFFINE_KWARGS["center"]) + @pytest.mark.parametrize( + "interpolation", [transforms.InterpolationMode.NEAREST, transforms.InterpolationMode.BILINEAR] + ) + @pytest.mark.parametrize("expand", [False, True]) + @pytest.mark.parametrize("fill", CORRECTNESS_FILLS) + def test_functional_image_correctness(self, angle, center, interpolation, expand, fill): + image = make_image(dtype=torch.uint8, device="cpu") + + fill = adapt_fill(fill, dtype=torch.uint8) + + actual = F.rotate(image, angle=angle, center=center, interpolation=interpolation, expand=expand, fill=fill) + expected = F.to_image( + F.rotate( + F.to_pil_image(image), angle=angle, center=center, interpolation=interpolation, expand=expand, fill=fill + ) + ) + + mae = (actual.float() - expected.float()).abs().mean() + assert mae < 1 if interpolation is transforms.InterpolationMode.NEAREST else 6 + + @pytest.mark.parametrize("center", _CORRECTNESS_AFFINE_KWARGS["center"]) + @pytest.mark.parametrize( + "interpolation", [transforms.InterpolationMode.NEAREST, transforms.InterpolationMode.BILINEAR] + ) + @pytest.mark.parametrize("expand", [False, True]) + @pytest.mark.parametrize("fill", CORRECTNESS_FILLS) + @pytest.mark.parametrize("seed", list(range(5))) + def test_transform_image_correctness(self, center, interpolation, expand, fill, seed): + image = make_image(dtype=torch.uint8, device="cpu") + + fill = adapt_fill(fill, dtype=torch.uint8) + + transform = transforms.RandomRotation( + **self._CORRECTNESS_TRANSFORM_AFFINE_RANGES, + center=center, + interpolation=interpolation, + expand=expand, + fill=fill, + ) + + torch.manual_seed(seed) + actual = transform(image) + + torch.manual_seed(seed) + expected = F.to_image(transform(F.to_pil_image(image))) + + mae = (actual.float() - expected.float()).abs().mean() + assert mae < 1 if interpolation is transforms.InterpolationMode.NEAREST else 6 + + def _compute_output_canvas_size(self, *, expand, canvas_size, affine_matrix): + if not expand: + return canvas_size, (0.0, 0.0) + + input_height, input_width = canvas_size + + input_image_frame = np.array( + [ + [0.0, 0.0, 1.0], + [0.0, input_height, 1.0], + [input_width, input_height, 1.0], + [input_width, 0.0, 1.0], + ], + dtype=np.float64, + ) + output_image_frame = np.matmul(input_image_frame, affine_matrix.astype(input_image_frame.dtype).T) + + recenter_x = float(np.min(output_image_frame[:, 0])) + recenter_y = float(np.min(output_image_frame[:, 1])) + + output_width = int(np.max(output_image_frame[:, 0]) - recenter_x) + output_height = int(np.max(output_image_frame[:, 1]) - recenter_y) + + return (output_height, output_width), (recenter_x, recenter_y) + + def _recenter_bounding_boxes_after_expand(self, bounding_boxes, *, recenter_xy): + x, y = recenter_xy + if bounding_boxes.format is tv_tensors.BoundingBoxFormat.XYXY: + translate = [x, y, x, y] + else: + translate = [x, y, 0.0, 0.0] + return tv_tensors.wrap( + (bounding_boxes.to(torch.float64) - torch.tensor(translate)).to(bounding_boxes.dtype), like=bounding_boxes + ) + + def _reference_rotate_bounding_boxes(self, bounding_boxes, *, angle, expand, center): + if center is None: + center = [s * 0.5 for s in bounding_boxes.canvas_size[::-1]] + cx, cy = center + + a = np.cos(angle * np.pi / 180.0) + b = np.sin(angle * np.pi / 180.0) + affine_matrix = np.array( + [ + [a, b, cx - cx * a - b * cy], + [-b, a, cy + cx * b - a * cy], + ], + ) + + new_canvas_size, recenter_xy = self._compute_output_canvas_size( + expand=expand, canvas_size=bounding_boxes.canvas_size, affine_matrix=affine_matrix + ) + + output = reference_affine_bounding_boxes_helper( + bounding_boxes, + affine_matrix=affine_matrix, + new_canvas_size=new_canvas_size, + clamp=False, + ) + + return F.clamp_bounding_boxes(self._recenter_bounding_boxes_after_expand(output, recenter_xy=recenter_xy)).to( + bounding_boxes + ) + + @pytest.mark.parametrize("format", list(tv_tensors.BoundingBoxFormat)) + @pytest.mark.parametrize("angle", _CORRECTNESS_AFFINE_KWARGS["angle"]) + @pytest.mark.parametrize("expand", [False, True]) + @pytest.mark.parametrize("center", _CORRECTNESS_AFFINE_KWARGS["center"]) + def test_functional_bounding_boxes_correctness(self, format, angle, expand, center): + bounding_boxes = make_bounding_boxes(format=format) + + actual = F.rotate(bounding_boxes, angle=angle, expand=expand, center=center) + expected = self._reference_rotate_bounding_boxes(bounding_boxes, angle=angle, expand=expand, center=center) + + torch.testing.assert_close(actual, expected) + torch.testing.assert_close(F.get_size(actual), F.get_size(expected), atol=2 if expand else 0, rtol=0) + + @pytest.mark.parametrize("format", list(tv_tensors.BoundingBoxFormat)) + @pytest.mark.parametrize("expand", [False, True]) + @pytest.mark.parametrize("center", _CORRECTNESS_AFFINE_KWARGS["center"]) + @pytest.mark.parametrize("seed", list(range(5))) + def test_transform_bounding_boxes_correctness(self, format, expand, center, seed): + bounding_boxes = make_bounding_boxes(format=format) + + transform = transforms.RandomRotation(**self._CORRECTNESS_TRANSFORM_AFFINE_RANGES, expand=expand, center=center) + + torch.manual_seed(seed) + params = transform.make_params([bounding_boxes]) + + torch.manual_seed(seed) + actual = transform(bounding_boxes) + + expected = self._reference_rotate_bounding_boxes(bounding_boxes, **params, expand=expand, center=center) + + torch.testing.assert_close(actual, expected) + torch.testing.assert_close(F.get_size(actual), F.get_size(expected), atol=2 if expand else 0, rtol=0) + + @pytest.mark.parametrize("degrees", _EXHAUSTIVE_TYPE_TRANSFORM_AFFINE_RANGES["degrees"]) + @pytest.mark.parametrize("seed", list(range(10))) + def test_transformmake_params_bounds(self, degrees, seed): + transform = transforms.RandomRotation(degrees=degrees) + + torch.manual_seed(seed) + params = transform.make_params([]) + + if isinstance(degrees, (int, float)): + assert -degrees <= params["angle"] <= degrees + else: + assert degrees[0] <= params["angle"] <= degrees[1] + + @pytest.mark.parametrize("param", ["degrees", "center"]) + @pytest.mark.parametrize("value", [0, [0], [0, 0, 0]]) + def test_transform_sequence_len_errors(self, param, value): + if param == "degrees" and not isinstance(value, list): + return + + kwargs = {param: value} + if param != "degrees": + kwargs["degrees"] = 0 + + with pytest.raises( + ValueError if isinstance(value, list) else TypeError, match=f"{param} should be a sequence of length 2" + ): + transforms.RandomRotation(**kwargs) + + def test_transform_negative_degrees_error(self): + with pytest.raises(ValueError, match="If degrees is a single number, it must be positive"): + transforms.RandomAffine(degrees=-1) + + def test_transform_unknown_fill_error(self): + with pytest.raises(TypeError, match="Got inappropriate fill arg"): + transforms.RandomAffine(degrees=0, fill="fill") + + @pytest.mark.parametrize("size", [(11, 17), (16, 16)]) + @pytest.mark.parametrize("angle", [0, 90, 180, 270]) + @pytest.mark.parametrize("expand", [False, True]) + def test_functional_image_fast_path_correctness(self, size, angle, expand): + image = make_image(size, dtype=torch.uint8, device="cpu") + + actual = F.rotate(image, angle=angle, expand=expand) + expected = F.to_image(F.rotate(F.to_pil_image(image), angle=angle, expand=expand)) + + torch.testing.assert_close(actual, expected) + + +class TestContainerTransforms: + class BuiltinTransform(transforms.Transform): + def transform(self, inpt, params): + return inpt + + class PackedInputTransform(nn.Module): + def forward(self, sample): + assert len(sample) == 2 + return sample + + class UnpackedInputTransform(nn.Module): + def forward(self, image, label): + return image, label + + @pytest.mark.parametrize( + "transform_cls", [transforms.Compose, functools.partial(transforms.RandomApply, p=1), transforms.RandomOrder] + ) + @pytest.mark.parametrize( + "wrapped_transform_clss", + [ + [BuiltinTransform], + [PackedInputTransform], + [UnpackedInputTransform], + [BuiltinTransform, BuiltinTransform], + [PackedInputTransform, PackedInputTransform], + [UnpackedInputTransform, UnpackedInputTransform], + [BuiltinTransform, PackedInputTransform, BuiltinTransform], + [BuiltinTransform, UnpackedInputTransform, BuiltinTransform], + [PackedInputTransform, BuiltinTransform, PackedInputTransform], + [UnpackedInputTransform, BuiltinTransform, UnpackedInputTransform], + ], + ) + @pytest.mark.parametrize("unpack", [True, False]) + def test_packed_unpacked(self, transform_cls, wrapped_transform_clss, unpack): + needs_packed_inputs = any(issubclass(cls, self.PackedInputTransform) for cls in wrapped_transform_clss) + needs_unpacked_inputs = any(issubclass(cls, self.UnpackedInputTransform) for cls in wrapped_transform_clss) + assert not (needs_packed_inputs and needs_unpacked_inputs) + + transform = transform_cls([cls() for cls in wrapped_transform_clss]) + + image = make_image() + label = 3 + packed_input = (image, label) + + def call_transform(): + if unpack: + return transform(*packed_input) + else: + return transform(packed_input) + + if needs_unpacked_inputs and not unpack: + with pytest.raises(TypeError, match="missing 1 required positional argument"): + call_transform() + elif needs_packed_inputs and unpack: + with pytest.raises(TypeError, match="takes 2 positional arguments but 3 were given"): + call_transform() + else: + output = call_transform() + + assert isinstance(output, tuple) and len(output) == 2 + assert output[0] is image + assert output[1] is label + + def test_compose(self): + transform = transforms.Compose( + [ + transforms.RandomHorizontalFlip(p=1), + transforms.RandomVerticalFlip(p=1), + ] + ) + + input = make_image() + + actual = check_transform(transform, input) + expected = F.vertical_flip(F.horizontal_flip(input)) + + assert_equal(actual, expected) + + @pytest.mark.parametrize("p", [0.0, 1.0]) + @pytest.mark.parametrize("sequence_type", [list, nn.ModuleList]) + def test_random_apply(self, p, sequence_type): + transform = transforms.RandomApply( + sequence_type( + [ + transforms.RandomHorizontalFlip(p=1), + transforms.RandomVerticalFlip(p=1), + ] + ), + p=p, + ) + + # This needs to be a pure tensor (or a PIL image), because otherwise check_transforms skips the v1 compatibility + # check + input = make_image_tensor() + output = check_transform(transform, input, check_v1_compatibility=issubclass(sequence_type, nn.ModuleList)) + + if p == 1: + assert_equal(output, F.vertical_flip(F.horizontal_flip(input))) + else: + assert output is input + + @pytest.mark.parametrize("p", [(0, 1), (1, 0)]) + def test_random_choice(self, p): + transform = transforms.RandomChoice( + [ + transforms.RandomHorizontalFlip(p=1), + transforms.RandomVerticalFlip(p=1), + ], + p=p, + ) + + input = make_image() + output = check_transform(transform, input) + + p_horz, p_vert = p + if p_horz: + assert_equal(output, F.horizontal_flip(input)) + else: + assert_equal(output, F.vertical_flip(input)) + + def test_random_order(self): + transform = transforms.Compose( + [ + transforms.RandomHorizontalFlip(p=1), + transforms.RandomVerticalFlip(p=1), + ] + ) + + input = make_image() + + actual = check_transform(transform, input) + # We can't really check whether the transforms are actually applied in random order. However, horizontal and + # vertical flip are commutative. Meaning, even under the assumption that the transform applies them in random + # order, we can use a fixed order to compute the expected value. + expected = F.vertical_flip(F.horizontal_flip(input)) + + assert_equal(actual, expected) + + def test_errors(self): + for cls in [transforms.Compose, transforms.RandomChoice, transforms.RandomOrder]: + with pytest.raises(TypeError, match="Argument transforms should be a sequence of callables"): + cls(lambda x: x) + + with pytest.raises(ValueError, match="at least one transform"): + transforms.Compose([]) + + for p in [-1, 2]: + with pytest.raises(ValueError, match=re.escape("value in the interval [0.0, 1.0]")): + transforms.RandomApply([lambda x: x], p=p) + + for transforms_, p in [([lambda x: x], []), ([], [1.0])]: + with pytest.raises(ValueError, match="Length of p doesn't match the number of transforms"): + transforms.RandomChoice(transforms_, p=p) + + +class TestToDtype: + @pytest.mark.parametrize( + ("kernel", "make_input"), + [ + (F.to_dtype_image, make_image_tensor), + (F.to_dtype_image, make_image), + (F.to_dtype_video, make_video), + ], + ) + @pytest.mark.parametrize("input_dtype", [torch.float32, torch.float64, torch.uint8]) + @pytest.mark.parametrize("output_dtype", [torch.float32, torch.float64, torch.uint8]) + @pytest.mark.parametrize("device", cpu_and_cuda()) + @pytest.mark.parametrize("scale", (True, False)) + def test_kernel(self, kernel, make_input, input_dtype, output_dtype, device, scale): + check_kernel( + kernel, + make_input(dtype=input_dtype, device=device), + dtype=output_dtype, + scale=scale, + ) + + @pytest.mark.parametrize("make_input", [make_image_tensor, make_image, make_video]) + @pytest.mark.parametrize("input_dtype", [torch.float32, torch.float64, torch.uint8]) + @pytest.mark.parametrize("output_dtype", [torch.float32, torch.float64, torch.uint8]) + @pytest.mark.parametrize("device", cpu_and_cuda()) + @pytest.mark.parametrize("scale", (True, False)) + def test_functional(self, make_input, input_dtype, output_dtype, device, scale): + check_functional( + F.to_dtype, + make_input(dtype=input_dtype, device=device), + dtype=output_dtype, + scale=scale, + ) + + @pytest.mark.parametrize( + "make_input", + [make_image_tensor, make_image, make_bounding_boxes, make_segmentation_mask, make_video], + ) + @pytest.mark.parametrize("input_dtype", [torch.float32, torch.float64, torch.uint8]) + @pytest.mark.parametrize("output_dtype", [torch.float32, torch.float64, torch.uint8]) + @pytest.mark.parametrize("device", cpu_and_cuda()) + @pytest.mark.parametrize("scale", (True, False)) + @pytest.mark.parametrize("as_dict", (True, False)) + def test_transform(self, make_input, input_dtype, output_dtype, device, scale, as_dict): + input = make_input(dtype=input_dtype, device=device) + if as_dict: + output_dtype = {type(input): output_dtype} + check_transform(transforms.ToDtype(dtype=output_dtype, scale=scale), input, check_sample_input=not as_dict) + + def reference_convert_dtype_image_tensor(self, image, dtype=torch.float, scale=False): + input_dtype = image.dtype + output_dtype = dtype + + if not scale: + return image.to(dtype) + + if output_dtype == input_dtype: + return image + + def fn(value): + if input_dtype.is_floating_point: + if output_dtype.is_floating_point: + return value + else: + return round(decimal.Decimal(value) * torch.iinfo(output_dtype).max) + else: + input_max_value = torch.iinfo(input_dtype).max + + if output_dtype.is_floating_point: + return float(decimal.Decimal(value) / input_max_value) + else: + output_max_value = torch.iinfo(output_dtype).max + + if input_max_value > output_max_value: + factor = (input_max_value + 1) // (output_max_value + 1) + return value / factor + else: + factor = (output_max_value + 1) // (input_max_value + 1) + return value * factor + + return torch.tensor(tree_map(fn, image.tolist())).to(dtype=output_dtype, device=image.device) + + @pytest.mark.parametrize("input_dtype", [torch.float32, torch.float64, torch.uint8, torch.uint16]) + @pytest.mark.parametrize("output_dtype", [torch.float32, torch.float64, torch.uint8, torch.uint16]) + @pytest.mark.parametrize("device", cpu_and_cuda()) + @pytest.mark.parametrize("scale", (True, False)) + def test_image_correctness(self, input_dtype, output_dtype, device, scale): + if input_dtype.is_floating_point and output_dtype == torch.int64: + pytest.xfail("float to int64 conversion is not supported") + if input_dtype == torch.uint8 and output_dtype == torch.uint16 and device == "cuda": + pytest.xfail("uint8 to uint16 conversion is not supported on cuda") + + input = make_image(dtype=input_dtype, device=device) + + out = F.to_dtype(input, dtype=output_dtype, scale=scale) + expected = self.reference_convert_dtype_image_tensor(input, dtype=output_dtype, scale=scale) + + if input_dtype.is_floating_point and not output_dtype.is_floating_point and scale: + torch.testing.assert_close(out, expected, atol=1, rtol=0) + else: + torch.testing.assert_close(out, expected) + + def was_scaled(self, inpt): + # this assumes the target dtype is float + return inpt.max() <= 1 + + def make_inpt_with_bbox_and_mask(self, make_input): + H, W = 10, 10 + inpt_dtype = torch.uint8 + bbox_dtype = torch.float32 + mask_dtype = torch.bool + sample = { + "inpt": make_input(size=(H, W), dtype=inpt_dtype), + "bbox": make_bounding_boxes(canvas_size=(H, W), dtype=bbox_dtype), + "mask": make_detection_masks(size=(H, W), dtype=mask_dtype), + } + + return sample, inpt_dtype, bbox_dtype, mask_dtype + + @pytest.mark.parametrize("make_input", (make_image_tensor, make_image, make_video)) + @pytest.mark.parametrize("scale", (True, False)) + def test_dtype_not_a_dict(self, make_input, scale): + # assert only inpt gets transformed when dtype isn't a dict + + sample, inpt_dtype, bbox_dtype, mask_dtype = self.make_inpt_with_bbox_and_mask(make_input) + out = transforms.ToDtype(dtype=torch.float32, scale=scale)(sample) + + assert out["inpt"].dtype != inpt_dtype + assert out["inpt"].dtype == torch.float32 + if scale: + assert self.was_scaled(out["inpt"]) + else: + assert not self.was_scaled(out["inpt"]) + assert out["bbox"].dtype == bbox_dtype + assert out["mask"].dtype == mask_dtype + + @pytest.mark.parametrize("make_input", (make_image_tensor, make_image, make_video)) + def test_others_catch_all_and_none(self, make_input): + # make sure "others" works as a catch-all and that None means no conversion + + sample, inpt_dtype, bbox_dtype, mask_dtype = self.make_inpt_with_bbox_and_mask(make_input) + out = transforms.ToDtype(dtype={tv_tensors.Mask: torch.int64, "others": None})(sample) + assert out["inpt"].dtype == inpt_dtype + assert out["bbox"].dtype == bbox_dtype + assert out["mask"].dtype != mask_dtype + assert out["mask"].dtype == torch.int64 + + @pytest.mark.parametrize("make_input", (make_image_tensor, make_image, make_video)) + def test_typical_use_case(self, make_input): + # Typical use-case: want to convert dtype and scale for inpt and just dtype for masks. + # This just makes sure we now have a decent API for this + + sample, inpt_dtype, bbox_dtype, mask_dtype = self.make_inpt_with_bbox_and_mask(make_input) + out = transforms.ToDtype( + dtype={type(sample["inpt"]): torch.float32, tv_tensors.Mask: torch.int64, "others": None}, scale=True + )(sample) + assert out["inpt"].dtype != inpt_dtype + assert out["inpt"].dtype == torch.float32 + assert self.was_scaled(out["inpt"]) + assert out["bbox"].dtype == bbox_dtype + assert out["mask"].dtype != mask_dtype + assert out["mask"].dtype == torch.int64 + + @pytest.mark.parametrize("make_input", (make_image_tensor, make_image, make_video)) + def test_errors_warnings(self, make_input): + sample, inpt_dtype, bbox_dtype, mask_dtype = self.make_inpt_with_bbox_and_mask(make_input) + + with pytest.raises(ValueError, match="No dtype was specified for"): + out = transforms.ToDtype(dtype={tv_tensors.Mask: torch.float32})(sample) + with pytest.warns(UserWarning, match=re.escape("plain `torch.Tensor` will *not* be transformed")): + transforms.ToDtype(dtype={torch.Tensor: torch.float32, tv_tensors.Image: torch.float32}) + with pytest.warns(UserWarning, match="no scaling will be done"): + out = transforms.ToDtype(dtype={"others": None}, scale=True)(sample) + assert out["inpt"].dtype == inpt_dtype + assert out["bbox"].dtype == bbox_dtype + assert out["mask"].dtype == mask_dtype + + def test_uint16(self): + # These checks are probably already covered above but since uint16 is a + # newly supported dtype, we want to be extra careful, hence this + # explicit test + img_uint16 = torch.randint(0, 65535, (256, 512), dtype=torch.uint16) + + img_uint8 = F.to_dtype(img_uint16, torch.uint8, scale=True) + img_float32 = F.to_dtype(img_uint16, torch.float32, scale=True) + img_int32 = F.to_dtype(img_uint16, torch.int32, scale=True) + + assert_equal(img_uint8, (img_uint16 / 256).to(torch.uint8)) + assert_close(img_float32, (img_uint16 / 65535)) + + assert_close(F.to_dtype(img_float32, torch.uint16, scale=True), img_uint16, rtol=0, atol=1) + # Ideally we'd check against (img_uint16 & 0xFF00) but bitwise and isn't supported for it yet + # so we simulate it by scaling down and up again. + assert_equal(F.to_dtype(img_uint8, torch.uint16, scale=True), ((img_uint16 / 256).to(torch.uint16) * 256)) + assert_equal(F.to_dtype(img_int32, torch.uint16, scale=True), img_uint16) + + assert_equal(F.to_dtype(img_float32, torch.uint8, scale=True), img_uint8) + assert_close(F.to_dtype(img_uint8, torch.float32, scale=True), img_float32, rtol=0, atol=1e-2) + + +class TestAdjustBrightness: + _CORRECTNESS_BRIGHTNESS_FACTORS = [0.5, 0.0, 1.0, 5.0] + _DEFAULT_BRIGHTNESS_FACTOR = _CORRECTNESS_BRIGHTNESS_FACTORS[0] + + @pytest.mark.parametrize( + ("kernel", "make_input"), + [ + (F.adjust_brightness_image, make_image), + (F.adjust_brightness_video, make_video), + ], + ) + @pytest.mark.parametrize("dtype", [torch.float32, torch.uint8]) + @pytest.mark.parametrize("device", cpu_and_cuda()) + def test_kernel(self, kernel, make_input, dtype, device): + check_kernel(kernel, make_input(dtype=dtype, device=device), brightness_factor=self._DEFAULT_BRIGHTNESS_FACTOR) + + @pytest.mark.parametrize("make_input", [make_image_tensor, make_image_pil, make_image, make_video]) + def test_functional(self, make_input): + check_functional(F.adjust_brightness, make_input(), brightness_factor=self._DEFAULT_BRIGHTNESS_FACTOR) + + @pytest.mark.parametrize( + ("kernel", "input_type"), + [ + (F.adjust_brightness_image, torch.Tensor), + (F._color._adjust_brightness_image_pil, PIL.Image.Image), + (F.adjust_brightness_image, tv_tensors.Image), + (F.adjust_brightness_video, tv_tensors.Video), + ], + ) + def test_functional_signature(self, kernel, input_type): + check_functional_kernel_signature_match(F.adjust_brightness, kernel=kernel, input_type=input_type) + + @pytest.mark.parametrize("brightness_factor", _CORRECTNESS_BRIGHTNESS_FACTORS) + def test_image_correctness(self, brightness_factor): + image = make_image(dtype=torch.uint8, device="cpu") + + actual = F.adjust_brightness(image, brightness_factor=brightness_factor) + expected = F.to_image(F.adjust_brightness(F.to_pil_image(image), brightness_factor=brightness_factor)) + + torch.testing.assert_close(actual, expected) + + +class TestCutMixMixUp: + class DummyDataset: + def __init__(self, size, num_classes, one_hot_labels): + self.size = size + self.num_classes = num_classes + self.one_hot_labels = one_hot_labels + assert size < num_classes + + def __getitem__(self, idx): + img = torch.rand(3, 100, 100) + label = idx # This ensures all labels in a batch are unique and makes testing easier + if self.one_hot_labels: + label = torch.nn.functional.one_hot(torch.tensor(label), num_classes=self.num_classes) + return img, label + + def __len__(self): + return self.size + + @pytest.mark.parametrize("T", [transforms.CutMix, transforms.MixUp]) + @pytest.mark.parametrize("one_hot_labels", (True, False)) + def test_supported_input_structure(self, T, one_hot_labels): + + batch_size = 32 + num_classes = 100 + + dataset = self.DummyDataset(size=batch_size, num_classes=num_classes, one_hot_labels=one_hot_labels) + + cutmix_mixup = T(num_classes=num_classes) + + dl = DataLoader(dataset, batch_size=batch_size) + + # Input sanity checks + img, target = next(iter(dl)) + input_img_size = img.shape[-3:] + assert isinstance(img, torch.Tensor) and isinstance(target, torch.Tensor) + assert target.shape == (batch_size, num_classes) if one_hot_labels else (batch_size,) + + def check_output(img, target): + assert img.shape == (batch_size, *input_img_size) + assert target.shape == (batch_size, num_classes) + torch.testing.assert_close(target.sum(axis=-1), torch.ones(batch_size)) + num_non_zero_labels = (target != 0).sum(axis=-1) + assert (num_non_zero_labels == 2).all() + + # After Dataloader, as unpacked input + img, target = next(iter(dl)) + assert target.shape == (batch_size, num_classes) if one_hot_labels else (batch_size,) + img, target = cutmix_mixup(img, target) + check_output(img, target) + + # After Dataloader, as packed input + packed_from_dl = next(iter(dl)) + assert isinstance(packed_from_dl, list) + img, target = cutmix_mixup(packed_from_dl) + check_output(img, target) + + # As collation function. We expect default_collate to be used by users. + def collate_fn_1(batch): + return cutmix_mixup(default_collate(batch)) + + def collate_fn_2(batch): + return cutmix_mixup(*default_collate(batch)) + + for collate_fn in (collate_fn_1, collate_fn_2): + dl = DataLoader(dataset, batch_size=batch_size, collate_fn=collate_fn) + img, target = next(iter(dl)) + check_output(img, target) + + @needs_cuda + @pytest.mark.parametrize("T", [transforms.CutMix, transforms.MixUp]) + def test_cpu_vs_gpu(self, T): + num_classes = 10 + batch_size = 3 + H, W = 12, 12 + + imgs = torch.rand(batch_size, 3, H, W) + labels = torch.randint(0, num_classes, (batch_size,)) + cutmix_mixup = T(alpha=0.5, num_classes=num_classes) + + _check_kernel_cuda_vs_cpu(cutmix_mixup, imgs, labels, rtol=None, atol=None) + + @pytest.mark.parametrize("T", [transforms.CutMix, transforms.MixUp]) + def test_error(self, T): + + num_classes = 10 + batch_size = 9 + + imgs = torch.rand(batch_size, 3, 12, 12) + cutmix_mixup = T(alpha=0.5, num_classes=num_classes) + + for input_with_bad_type in ( + F.to_pil_image(imgs[0]), + tv_tensors.Mask(torch.rand(12, 12)), + tv_tensors.BoundingBoxes(torch.rand(2, 4), format="XYXY", canvas_size=12), + ): + with pytest.raises(ValueError, match="does not support PIL images, "): + cutmix_mixup(input_with_bad_type) + + with pytest.raises(ValueError, match="Could not infer where the labels are"): + cutmix_mixup({"img": imgs, "Nothing_else": 3}) + + with pytest.raises(ValueError, match="labels should be index based"): + # Note: the error message isn't ideal, but that's because the label heuristic found the img as the label + # It's OK, it's an edge-case. The important thing is that this fails loudly instead of passing silently + cutmix_mixup(imgs) + + with pytest.raises(ValueError, match="When using the default labels_getter"): + cutmix_mixup(imgs, "not_a_tensor") + + with pytest.raises(ValueError, match="Expected a batched input with 4 dims"): + cutmix_mixup(imgs[None, None], torch.randint(0, num_classes, size=(batch_size,))) + + with pytest.raises(ValueError, match="does not match the batch size of the labels"): + cutmix_mixup(imgs, torch.randint(0, num_classes, size=(batch_size + 1,))) + + with pytest.raises(ValueError, match="When passing 2D labels"): + wrong_num_classes = num_classes + 1 + T(alpha=0.5, num_classes=num_classes)(imgs, torch.randint(0, 2, size=(batch_size, wrong_num_classes))) + + with pytest.raises(ValueError, match="but got a tensor of shape"): + cutmix_mixup(imgs, torch.randint(0, 2, size=(2, 3, 4))) + + with pytest.raises(ValueError, match="num_classes must be passed"): + T(alpha=0.5)(imgs, torch.randint(0, num_classes, size=(batch_size,))) + + +@pytest.mark.parametrize("key", ("labels", "LABELS", "LaBeL", "SOME_WEIRD_KEY_THAT_HAS_LABeL_IN_IT")) +@pytest.mark.parametrize("sample_type", (tuple, list, dict)) +def test_labels_getter_default_heuristic(key, sample_type): + labels = torch.arange(10) + sample = {key: labels, "another_key": "whatever"} + if sample_type is not dict: + sample = sample_type((None, sample, "whatever_again")) + assert transforms._utils._find_labels_default_heuristic(sample) is labels + + if key.lower() != "labels": + # If "labels" is in the dict (case-insensitive), + # it takes precedence over other keys which would otherwise be a match + d = {key: "something_else", "labels": labels} + assert transforms._utils._find_labels_default_heuristic(d) is labels + + +class TestShapeGetters: + @pytest.mark.parametrize( + ("kernel", "make_input"), + [ + (F.get_dimensions_image, make_image_tensor), + (F._meta._get_dimensions_image_pil, make_image_pil), + (F.get_dimensions_image, make_image), + (F.get_dimensions_video, make_video), + ], + ) + def test_get_dimensions(self, kernel, make_input): + size = (10, 10) + color_space, num_channels = "RGB", 3 + + input = make_input(size, color_space=color_space) + + assert kernel(input) == F.get_dimensions(input) == [num_channels, *size] + + @pytest.mark.parametrize( + ("kernel", "make_input"), + [ + (F.get_num_channels_image, make_image_tensor), + (F._meta._get_num_channels_image_pil, make_image_pil), + (F.get_num_channels_image, make_image), + (F.get_num_channels_video, make_video), + ], + ) + def test_get_num_channels(self, kernel, make_input): + color_space, num_channels = "RGB", 3 + + input = make_input(color_space=color_space) + + assert kernel(input) == F.get_num_channels(input) == num_channels + + @pytest.mark.parametrize( + ("kernel", "make_input"), + [ + (F.get_size_image, make_image_tensor), + (F._meta._get_size_image_pil, make_image_pil), + (F.get_size_image, make_image), + (F.get_size_bounding_boxes, make_bounding_boxes), + (F.get_size_mask, make_detection_masks), + (F.get_size_mask, make_segmentation_mask), + (F.get_size_video, make_video), + ], + ) + def test_get_size(self, kernel, make_input): + size = (10, 10) + + input = make_input(size) + + assert kernel(input) == F.get_size(input) == list(size) + + @pytest.mark.parametrize( + ("kernel", "make_input"), + [ + (F.get_num_frames_video, make_video_tensor), + (F.get_num_frames_video, make_video), + ], + ) + def test_get_num_frames(self, kernel, make_input): + num_frames = 4 + + input = make_input(num_frames=num_frames) + + assert kernel(input) == F.get_num_frames(input) == num_frames + + @pytest.mark.parametrize( + ("functional", "make_input"), + [ + (F.get_dimensions, make_bounding_boxes), + (F.get_dimensions, make_detection_masks), + (F.get_dimensions, make_segmentation_mask), + (F.get_num_channels, make_bounding_boxes), + (F.get_num_channels, make_detection_masks), + (F.get_num_channels, make_segmentation_mask), + (F.get_num_frames, make_image_pil), + (F.get_num_frames, make_image), + (F.get_num_frames, make_bounding_boxes), + (F.get_num_frames, make_detection_masks), + (F.get_num_frames, make_segmentation_mask), + ], + ) + def test_unsupported_types(self, functional, make_input): + input = make_input() + + with pytest.raises(TypeError, match=re.escape(str(type(input)))): + functional(input) + + +class TestRegisterKernel: + @pytest.mark.parametrize("functional", (F.resize, "resize")) + def test_register_kernel(self, functional): + class CustomTVTensor(tv_tensors.TVTensor): + pass + + kernel_was_called = False + + @F.register_kernel(functional, CustomTVTensor) + def new_resize(dp, *args, **kwargs): + nonlocal kernel_was_called + kernel_was_called = True + return dp + + t = transforms.Resize(size=(224, 224), antialias=True) + + my_dp = CustomTVTensor(torch.rand(3, 10, 10)) + out = t(my_dp) + assert out is my_dp + assert kernel_was_called + + # Sanity check to make sure we didn't override the kernel of other types + t(torch.rand(3, 10, 10)).shape == (3, 224, 224) + t(tv_tensors.Image(torch.rand(3, 10, 10))).shape == (3, 224, 224) + + def test_errors(self): + with pytest.raises(ValueError, match="Could not find functional with name"): + F.register_kernel("bad_name", tv_tensors.Image) + + with pytest.raises(ValueError, match="Kernels can only be registered on functionals"): + F.register_kernel(tv_tensors.Image, F.resize) + + with pytest.raises(ValueError, match="Kernels can only be registered for subclasses"): + F.register_kernel(F.resize, object) + + with pytest.raises(ValueError, match="cannot be registered for the builtin tv_tensor classes"): + F.register_kernel(F.resize, tv_tensors.Image)(F.resize_image) + + class CustomTVTensor(tv_tensors.TVTensor): + pass + + def resize_custom_tv_tensor(): + pass + + F.register_kernel(F.resize, CustomTVTensor)(resize_custom_tv_tensor) + + with pytest.raises(ValueError, match="already has a kernel registered for type"): + F.register_kernel(F.resize, CustomTVTensor)(resize_custom_tv_tensor) + + +class TestGetKernel: + # We are using F.resize as functional and the kernels below as proxy. Any other functional / kernels combination + # would also be fine + KERNELS = { + torch.Tensor: F.resize_image, + PIL.Image.Image: F._geometry._resize_image_pil, + tv_tensors.Image: F.resize_image, + tv_tensors.BoundingBoxes: F.resize_bounding_boxes, + tv_tensors.Mask: F.resize_mask, + tv_tensors.Video: F.resize_video, + } + + @pytest.mark.parametrize("input_type", [str, int, object]) + def test_unsupported_types(self, input_type): + with pytest.raises(TypeError, match="supports inputs of type"): + _get_kernel(F.resize, input_type) + + def test_exact_match(self): + # We cannot use F.resize together with self.KERNELS mapping here directly here, since this is only the + # ideal wrapping. Practically, we have an intermediate wrapper layer. Thus, we create a new resize functional + # here, register the kernels without wrapper, and check the exact matching afterwards. + def resize_with_pure_kernels(): + pass + + for input_type, kernel in self.KERNELS.items(): + _register_kernel_internal(resize_with_pure_kernels, input_type, tv_tensor_wrapper=False)(kernel) + + assert _get_kernel(resize_with_pure_kernels, input_type) is kernel + + def test_builtin_tv_tensor_subclass(self): + # We cannot use F.resize together with self.KERNELS mapping here directly here, since this is only the + # ideal wrapping. Practically, we have an intermediate wrapper layer. Thus, we create a new resize functional + # here, register the kernels without wrapper, and check if subclasses of our builtin tv_tensors get dispatched + # to the kernel of the corresponding superclass + def resize_with_pure_kernels(): + pass + + class MyImage(tv_tensors.Image): + pass + + class MyBoundingBoxes(tv_tensors.BoundingBoxes): + pass + + class MyMask(tv_tensors.Mask): + pass + + class MyVideo(tv_tensors.Video): + pass + + for custom_tv_tensor_subclass in [ + MyImage, + MyBoundingBoxes, + MyMask, + MyVideo, + ]: + builtin_tv_tensor_class = custom_tv_tensor_subclass.__mro__[1] + builtin_tv_tensor_kernel = self.KERNELS[builtin_tv_tensor_class] + _register_kernel_internal(resize_with_pure_kernels, builtin_tv_tensor_class, tv_tensor_wrapper=False)( + builtin_tv_tensor_kernel + ) + + assert _get_kernel(resize_with_pure_kernels, custom_tv_tensor_subclass) is builtin_tv_tensor_kernel + + def test_tv_tensor_subclass(self): + class MyTVTensor(tv_tensors.TVTensor): + pass + + with pytest.raises(TypeError, match="supports inputs of type"): + _get_kernel(F.resize, MyTVTensor) + + def resize_my_tv_tensor(): + pass + + _register_kernel_internal(F.resize, MyTVTensor, tv_tensor_wrapper=False)(resize_my_tv_tensor) + + assert _get_kernel(F.resize, MyTVTensor) is resize_my_tv_tensor + + def test_pil_image_subclass(self): + opened_image = PIL.Image.open(Path(__file__).parent / "assets" / "encode_jpeg" / "grace_hopper_517x606.jpg") + loaded_image = opened_image.convert("RGB") + + # check the assumptions + assert isinstance(opened_image, PIL.Image.Image) + assert type(opened_image) is not PIL.Image.Image + + assert type(loaded_image) is PIL.Image.Image + + size = [17, 11] + for image in [opened_image, loaded_image]: + kernel = _get_kernel(F.resize, type(image)) + + output = kernel(image, size=size) + + assert F.get_size(output) == size + + +class TestPermuteChannels: + _DEFAULT_PERMUTATION = [2, 0, 1] + + @pytest.mark.parametrize( + ("kernel", "make_input"), + [ + (F.permute_channels_image, make_image_tensor), + # FIXME + # check_kernel does not support PIL kernel, but it should + (F.permute_channels_image, make_image), + (F.permute_channels_video, make_video), + ], + ) + @pytest.mark.parametrize("dtype", [torch.float32, torch.uint8]) + @pytest.mark.parametrize("device", cpu_and_cuda()) + def test_kernel(self, kernel, make_input, dtype, device): + check_kernel(kernel, make_input(dtype=dtype, device=device), permutation=self._DEFAULT_PERMUTATION) + + @pytest.mark.parametrize("make_input", [make_image_tensor, make_image_pil, make_image, make_video]) + def test_functional(self, make_input): + check_functional(F.permute_channels, make_input(), permutation=self._DEFAULT_PERMUTATION) + + @pytest.mark.parametrize( + ("kernel", "input_type"), + [ + (F.permute_channels_image, torch.Tensor), + (F._color._permute_channels_image_pil, PIL.Image.Image), + (F.permute_channels_image, tv_tensors.Image), + (F.permute_channels_video, tv_tensors.Video), + ], + ) + def test_functional_signature(self, kernel, input_type): + check_functional_kernel_signature_match(F.permute_channels, kernel=kernel, input_type=input_type) + + def reference_image_correctness(self, image, permutation): + channel_images = image.split(1, dim=-3) + permuted_channel_images = [channel_images[channel_idx] for channel_idx in permutation] + return tv_tensors.Image(torch.concat(permuted_channel_images, dim=-3)) + + @pytest.mark.parametrize("permutation", [[2, 0, 1], [1, 2, 0], [2, 0, 1], [0, 1, 2]]) + @pytest.mark.parametrize("batch_dims", [(), (2,), (2, 1)]) + def test_image_correctness(self, permutation, batch_dims): + image = make_image(batch_dims=batch_dims) + + actual = F.permute_channels(image, permutation=permutation) + expected = self.reference_image_correctness(image, permutation=permutation) + + torch.testing.assert_close(actual, expected) + + +class TestElastic: + def _make_displacement(self, inpt): + return torch.rand( + 1, + *F.get_size(inpt), + 2, + dtype=torch.float32, + device=inpt.device if isinstance(inpt, torch.Tensor) else "cpu", + ) + + @param_value_parametrization( + interpolation=[transforms.InterpolationMode.NEAREST, transforms.InterpolationMode.BILINEAR], + fill=EXHAUSTIVE_TYPE_FILLS, + ) + @pytest.mark.parametrize("dtype", [torch.float32, torch.uint8, torch.float16]) + @pytest.mark.parametrize("device", cpu_and_cuda()) + def test_kernel_image(self, param, value, dtype, device): + image = make_image_tensor(dtype=dtype, device=device) + + check_kernel( + F.elastic_image, + image, + displacement=self._make_displacement(image), + **{param: value}, + check_scripted_vs_eager=not (param == "fill" and isinstance(value, (int, float))), + check_cuda_vs_cpu=dtype is not torch.float16, + ) + + @pytest.mark.parametrize("format", list(tv_tensors.BoundingBoxFormat)) + @pytest.mark.parametrize("dtype", [torch.float32, torch.int64]) + @pytest.mark.parametrize("device", cpu_and_cuda()) + def test_kernel_bounding_boxes(self, format, dtype, device): + bounding_boxes = make_bounding_boxes(format=format, dtype=dtype, device=device) + + check_kernel( + F.elastic_bounding_boxes, + bounding_boxes, + format=bounding_boxes.format, + canvas_size=bounding_boxes.canvas_size, + displacement=self._make_displacement(bounding_boxes), + ) + + @pytest.mark.parametrize("make_mask", [make_segmentation_mask, make_detection_masks]) + def test_kernel_mask(self, make_mask): + mask = make_mask() + check_kernel(F.elastic_mask, mask, displacement=self._make_displacement(mask)) + + def test_kernel_video(self): + video = make_video() + check_kernel(F.elastic_video, video, displacement=self._make_displacement(video)) + + @pytest.mark.parametrize( + "make_input", + [make_image_tensor, make_image_pil, make_image, make_bounding_boxes, make_segmentation_mask, make_video], + ) + def test_functional(self, make_input): + input = make_input() + check_functional(F.elastic, input, displacement=self._make_displacement(input)) + + @pytest.mark.parametrize( + ("kernel", "input_type"), + [ + (F.elastic_image, torch.Tensor), + (F._geometry._elastic_image_pil, PIL.Image.Image), + (F.elastic_image, tv_tensors.Image), + (F.elastic_bounding_boxes, tv_tensors.BoundingBoxes), + (F.elastic_mask, tv_tensors.Mask), + (F.elastic_video, tv_tensors.Video), + ], + ) + def test_functional_signature(self, kernel, input_type): + check_functional_kernel_signature_match(F.elastic, kernel=kernel, input_type=input_type) + + @pytest.mark.parametrize( + "make_input", + [make_image_tensor, make_image_pil, make_image, make_bounding_boxes, make_segmentation_mask, make_video], + ) + def test_displacement_error(self, make_input): + input = make_input() + + with pytest.raises(TypeError, match="displacement should be a Tensor"): + F.elastic(input, displacement=None) + + with pytest.raises(ValueError, match="displacement shape should be"): + F.elastic(input, displacement=torch.rand(F.get_size(input))) + + @pytest.mark.parametrize( + "make_input", + [make_image_tensor, make_image_pil, make_image, make_bounding_boxes, make_segmentation_mask, make_video], + ) + # ElasticTransform needs larger images to avoid the needed internal padding being larger than the actual image + @pytest.mark.parametrize("size", [(163, 163), (72, 333), (313, 95)]) + @pytest.mark.parametrize("device", cpu_and_cuda()) + def test_transform(self, make_input, size, device): + # We have to skip that test on M1 because it's flaky: Mismatched elements: 35 / 89205 (0.0%) + # See https://github.com/pytorch/vision/issues/8154 + # All other platforms are fine, so the differences do not come from something we own in torchvision + check_v1_compatibility = False if sys.platform == "darwin" else dict(rtol=0, atol=1) + + check_transform( + transforms.ElasticTransform(), + make_input(size, device=device), + check_v1_compatibility=check_v1_compatibility, + ) + + +class TestToPureTensor: + def test_correctness(self): + input = { + "img": make_image(), + "img_tensor": make_image_tensor(), + "img_pil": make_image_pil(), + "mask": make_detection_masks(), + "video": make_video(), + "bbox": make_bounding_boxes(), + "str": "str", + } + + out = transforms.ToPureTensor()(input) + + for input_value, out_value in zip(input.values(), out.values()): + if isinstance(input_value, tv_tensors.TVTensor): + assert isinstance(out_value, torch.Tensor) and not isinstance(out_value, tv_tensors.TVTensor) + else: + assert isinstance(out_value, type(input_value)) + + +class TestCrop: + INPUT_SIZE = (21, 11) + + CORRECTNESS_CROP_KWARGS = [ + # center + dict(top=5, left=5, height=10, width=5), + # larger than input, i.e. pad + dict(top=-5, left=-5, height=30, width=20), + # sides: left, right, top, bottom + dict(top=-5, left=-5, height=30, width=10), + dict(top=-5, left=5, height=30, width=10), + dict(top=-5, left=-5, height=20, width=20), + dict(top=5, left=-5, height=20, width=20), + # corners: top-left, top-right, bottom-left, bottom-right + dict(top=-5, left=-5, height=20, width=10), + dict(top=-5, left=5, height=20, width=10), + dict(top=5, left=-5, height=20, width=10), + dict(top=5, left=5, height=20, width=10), + ] + MINIMAL_CROP_KWARGS = CORRECTNESS_CROP_KWARGS[0] + + @pytest.mark.parametrize("kwargs", CORRECTNESS_CROP_KWARGS) + @pytest.mark.parametrize("dtype", [torch.uint8, torch.float32]) + @pytest.mark.parametrize("device", cpu_and_cuda()) + def test_kernel_image(self, kwargs, dtype, device): + check_kernel(F.crop_image, make_image(self.INPUT_SIZE, dtype=dtype, device=device), **kwargs) + + @pytest.mark.parametrize("kwargs", CORRECTNESS_CROP_KWARGS) + @pytest.mark.parametrize("format", list(tv_tensors.BoundingBoxFormat)) + @pytest.mark.parametrize("dtype", [torch.float32, torch.int64]) + @pytest.mark.parametrize("device", cpu_and_cuda()) + def test_kernel_bounding_box(self, kwargs, format, dtype, device): + bounding_boxes = make_bounding_boxes(self.INPUT_SIZE, format=format, dtype=dtype, device=device) + check_kernel(F.crop_bounding_boxes, bounding_boxes, format=format, **kwargs) + + @pytest.mark.parametrize("make_mask", [make_segmentation_mask, make_detection_masks]) + def test_kernel_mask(self, make_mask): + check_kernel(F.crop_mask, make_mask(self.INPUT_SIZE), **self.MINIMAL_CROP_KWARGS) + + def test_kernel_video(self): + check_kernel(F.crop_video, make_video(self.INPUT_SIZE), **self.MINIMAL_CROP_KWARGS) + + @pytest.mark.parametrize( + "make_input", + [make_image_tensor, make_image_pil, make_image, make_bounding_boxes, make_segmentation_mask, make_video], + ) + def test_functional(self, make_input): + check_functional(F.crop, make_input(self.INPUT_SIZE), **self.MINIMAL_CROP_KWARGS) + + @pytest.mark.parametrize( + ("kernel", "input_type"), + [ + (F.crop_image, torch.Tensor), + (F._geometry._crop_image_pil, PIL.Image.Image), + (F.crop_image, tv_tensors.Image), + (F.crop_bounding_boxes, tv_tensors.BoundingBoxes), + (F.crop_mask, tv_tensors.Mask), + (F.crop_video, tv_tensors.Video), + ], + ) + def test_functional_signature(self, kernel, input_type): + check_functional_kernel_signature_match(F.crop, kernel=kernel, input_type=input_type) + + @pytest.mark.parametrize("kwargs", CORRECTNESS_CROP_KWARGS) + def test_functional_image_correctness(self, kwargs): + image = make_image(self.INPUT_SIZE, dtype=torch.uint8, device="cpu") + + actual = F.crop(image, **kwargs) + expected = F.to_image(F.crop(F.to_pil_image(image), **kwargs)) + + assert_equal(actual, expected) + + @param_value_parametrization( + size=[(10, 5), (25, 15), (25, 5), (10, 15)], + fill=EXHAUSTIVE_TYPE_FILLS, + ) + @pytest.mark.parametrize( + "make_input", + [make_image_tensor, make_image_pil, make_image, make_bounding_boxes, make_segmentation_mask, make_video], + ) + def test_transform(self, param, value, make_input): + input = make_input(self.INPUT_SIZE) + + check_sample_input = True + if param == "fill": + if isinstance(value, (tuple, list)): + if isinstance(input, tv_tensors.Mask): + pytest.skip("F.pad_mask doesn't support non-scalar fill.") + else: + check_sample_input = False + + kwargs = dict( + # 1. size is required + # 2. the fill parameter only has an affect if we need padding + size=[s + 4 for s in self.INPUT_SIZE], + fill=adapt_fill(value, dtype=input.dtype if isinstance(input, torch.Tensor) else torch.uint8), + ) + else: + kwargs = {param: value} + + check_transform( + transforms.RandomCrop(**kwargs, pad_if_needed=True), + input, + check_v1_compatibility=param != "fill" or isinstance(value, (int, float)), + check_sample_input=check_sample_input, + ) + + @pytest.mark.parametrize("padding", [1, (1, 1), (1, 1, 1, 1)]) + def test_transform_padding(self, padding): + inpt = make_image(self.INPUT_SIZE) + + output_size = [s + 2 for s in F.get_size(inpt)] + transform = transforms.RandomCrop(output_size, padding=padding) + + output = transform(inpt) + + assert F.get_size(output) == output_size + + @pytest.mark.parametrize("padding", [None, 1, (1, 1), (1, 1, 1, 1)]) + def test_transform_insufficient_padding(self, padding): + inpt = make_image(self.INPUT_SIZE) + + output_size = [s + 3 for s in F.get_size(inpt)] + transform = transforms.RandomCrop(output_size, padding=padding) + + with pytest.raises(ValueError, match="larger than (padded )?input image size"): + transform(inpt) + + def test_transform_pad_if_needed(self): + inpt = make_image(self.INPUT_SIZE) + + output_size = [s * 2 for s in F.get_size(inpt)] + transform = transforms.RandomCrop(output_size, pad_if_needed=True) + + output = transform(inpt) + + assert F.get_size(output) == output_size + + @param_value_parametrization( + size=[(10, 5), (25, 15), (25, 5), (10, 15)], + fill=CORRECTNESS_FILLS, + padding_mode=["constant", "edge", "reflect", "symmetric"], + ) + @pytest.mark.parametrize("seed", list(range(5))) + def test_transform_image_correctness(self, param, value, seed): + kwargs = {param: value} + if param != "size": + # 1. size is required + # 2. the fill / padding_mode parameters only have an affect if we need padding + kwargs["size"] = [s + 4 for s in self.INPUT_SIZE] + if param == "fill": + kwargs["fill"] = adapt_fill(kwargs["fill"], dtype=torch.uint8) + + transform = transforms.RandomCrop(pad_if_needed=True, **kwargs) + + image = make_image(self.INPUT_SIZE) + + with freeze_rng_state(): + torch.manual_seed(seed) + actual = transform(image) + + torch.manual_seed(seed) + expected = F.to_image(transform(F.to_pil_image(image))) + + assert_equal(actual, expected) + + def _reference_crop_bounding_boxes(self, bounding_boxes, *, top, left, height, width): + affine_matrix = np.array( + [ + [1, 0, -left], + [0, 1, -top], + ], + ) + return reference_affine_bounding_boxes_helper( + bounding_boxes, affine_matrix=affine_matrix, new_canvas_size=(height, width) + ) + + @pytest.mark.parametrize("kwargs", CORRECTNESS_CROP_KWARGS) + @pytest.mark.parametrize("format", list(tv_tensors.BoundingBoxFormat)) + @pytest.mark.parametrize("dtype", [torch.float32, torch.int64]) + @pytest.mark.parametrize("device", cpu_and_cuda()) + def test_functional_bounding_box_correctness(self, kwargs, format, dtype, device): + bounding_boxes = make_bounding_boxes(self.INPUT_SIZE, format=format, dtype=dtype, device=device) + + actual = F.crop(bounding_boxes, **kwargs) + expected = self._reference_crop_bounding_boxes(bounding_boxes, **kwargs) + + assert_equal(actual, expected, atol=1, rtol=0) + assert_equal(F.get_size(actual), F.get_size(expected)) + + @pytest.mark.parametrize("output_size", [(17, 11), (11, 17), (11, 11)]) + @pytest.mark.parametrize("format", list(tv_tensors.BoundingBoxFormat)) + @pytest.mark.parametrize("dtype", [torch.float32, torch.int64]) + @pytest.mark.parametrize("device", cpu_and_cuda()) + @pytest.mark.parametrize("seed", list(range(5))) + def test_transform_bounding_boxes_correctness(self, output_size, format, dtype, device, seed): + input_size = [s * 2 for s in output_size] + bounding_boxes = make_bounding_boxes(input_size, format=format, dtype=dtype, device=device) + + transform = transforms.RandomCrop(output_size) + + with freeze_rng_state(): + torch.manual_seed(seed) + params = transform.make_params([bounding_boxes]) + assert not params.pop("needs_pad") + del params["padding"] + assert params.pop("needs_crop") + + torch.manual_seed(seed) + actual = transform(bounding_boxes) + + expected = self._reference_crop_bounding_boxes(bounding_boxes, **params) + + assert_equal(actual, expected) + assert_equal(F.get_size(actual), F.get_size(expected)) + + def test_errors(self): + with pytest.raises(ValueError, match="Please provide only two dimensions"): + transforms.RandomCrop([10, 12, 14]) + + with pytest.raises(TypeError, match="Got inappropriate padding arg"): + transforms.RandomCrop([10, 12], padding="abc") + + with pytest.raises(ValueError, match="Padding must be an int or a 1, 2, or 4"): + transforms.RandomCrop([10, 12], padding=[-0.7, 0, 0.7]) + + with pytest.raises(TypeError, match="Got inappropriate fill arg"): + transforms.RandomCrop([10, 12], padding=1, fill="abc") + + with pytest.raises(ValueError, match="Padding mode should be either"): + transforms.RandomCrop([10, 12], padding=1, padding_mode="abc") + + +class TestErase: + INPUT_SIZE = (17, 11) + FUNCTIONAL_KWARGS = dict( + zip("ijhwv", [2, 2, 10, 8, torch.tensor(0.0, dtype=torch.float32, device="cpu").reshape(-1, 1, 1)]) + ) + + @pytest.mark.parametrize("dtype", [torch.float32, torch.uint8]) + @pytest.mark.parametrize("device", cpu_and_cuda()) + def test_kernel_image(self, dtype, device): + check_kernel(F.erase_image, make_image(self.INPUT_SIZE, dtype=dtype, device=device), **self.FUNCTIONAL_KWARGS) + + @pytest.mark.parametrize("dtype", [torch.float32, torch.uint8]) + @pytest.mark.parametrize("device", cpu_and_cuda()) + def test_kernel_image_inplace(self, dtype, device): + input = make_image(self.INPUT_SIZE, dtype=dtype, device=device) + input_version = input._version + + output_out_of_place = F.erase_image(input, **self.FUNCTIONAL_KWARGS) + assert output_out_of_place.data_ptr() != input.data_ptr() + assert output_out_of_place is not input + + output_inplace = F.erase_image(input, **self.FUNCTIONAL_KWARGS, inplace=True) + assert output_inplace.data_ptr() == input.data_ptr() + assert output_inplace._version > input_version + assert output_inplace is input + + assert_equal(output_inplace, output_out_of_place) + + def test_kernel_video(self): + check_kernel(F.erase_video, make_video(self.INPUT_SIZE), **self.FUNCTIONAL_KWARGS) + + @pytest.mark.parametrize( + "make_input", + [make_image_tensor, make_image_pil, make_image, make_video], + ) + def test_functional(self, make_input): + check_functional(F.erase, make_input(), **self.FUNCTIONAL_KWARGS) + + @pytest.mark.parametrize( + ("kernel", "input_type"), + [ + (F.erase_image, torch.Tensor), + (F._augment._erase_image_pil, PIL.Image.Image), + (F.erase_image, tv_tensors.Image), + (F.erase_video, tv_tensors.Video), + ], + ) + def test_functional_signature(self, kernel, input_type): + check_functional_kernel_signature_match(F.erase, kernel=kernel, input_type=input_type) + + @pytest.mark.parametrize( + "make_input", + [make_image_tensor, make_image_pil, make_image, make_video], + ) + @pytest.mark.parametrize("device", cpu_and_cuda()) + def test_transform(self, make_input, device): + input = make_input(device=device) + + with pytest.warns(UserWarning, match="currently passing through inputs of type"): + check_transform( + transforms.RandomErasing(p=1), + input, + check_v1_compatibility=not isinstance(input, PIL.Image.Image), + ) + + def _reference_erase_image(self, image, *, i, j, h, w, v): + mask = torch.zeros_like(image, dtype=torch.bool) + mask[..., i : i + h, j : j + w] = True + + # The broadcasting and type casting logic is handled automagically in the kernel through indexing + value = torch.broadcast_to(v, (*image.shape[:-2], h, w)).to(image) + + erased_image = torch.empty_like(image) + erased_image[mask] = value.flatten() + erased_image[~mask] = image[~mask] + + return erased_image + + @pytest.mark.parametrize("dtype", [torch.float32, torch.uint8]) + @pytest.mark.parametrize("device", cpu_and_cuda()) + def test_functional_image_correctness(self, dtype, device): + image = make_image(dtype=dtype, device=device) + + actual = F.erase(image, **self.FUNCTIONAL_KWARGS) + expected = self._reference_erase_image(image, **self.FUNCTIONAL_KWARGS) + + assert_equal(actual, expected) + + @param_value_parametrization( + scale=[(0.1, 0.2), [0.0, 1.0]], + ratio=[(0.3, 0.7), [0.1, 5.0]], + value=[0, 0.5, (0, 1, 0), [-0.2, 0.0, 1.3], "random"], + ) + @pytest.mark.parametrize("dtype", [torch.float32, torch.uint8]) + @pytest.mark.parametrize("device", cpu_and_cuda()) + @pytest.mark.parametrize("seed", list(range(5))) + def test_transform_image_correctness(self, param, value, dtype, device, seed): + transform = transforms.RandomErasing(**{param: value}, p=1) + + image = make_image(dtype=dtype, device=device) + + with freeze_rng_state(): + torch.manual_seed(seed) + # This emulates the random apply check that happens before make_params is called + torch.rand(1) + params = transform.make_params([image]) + + torch.manual_seed(seed) + actual = transform(image) + + expected = self._reference_erase_image(image, **params) + + assert_equal(actual, expected) + + def test_transform_errors(self): + with pytest.raises(TypeError, match="Argument value should be either a number or str or a sequence"): + transforms.RandomErasing(value={}) + + with pytest.raises(ValueError, match="If value is str, it should be 'random'"): + transforms.RandomErasing(value="abc") + + with pytest.raises(TypeError, match="Scale should be a sequence"): + transforms.RandomErasing(scale=123) + + with pytest.raises(TypeError, match="Ratio should be a sequence"): + transforms.RandomErasing(ratio=123) + + with pytest.raises(ValueError, match="Scale should be between 0 and 1"): + transforms.RandomErasing(scale=[-1, 2]) + + transform = transforms.RandomErasing(value=[1, 2, 3, 4]) + + with pytest.raises(ValueError, match="If value is a sequence, it should have either a single value"): + transform.make_params([make_image()]) + + +class TestGaussianBlur: + @pytest.mark.parametrize("kernel_size", [1, 3, (3, 1), [3, 5]]) + @pytest.mark.parametrize("sigma", [None, 1.0, 1, (0.5,), [0.3], (0.3, 0.7), [0.9, 0.2]]) + def test_kernel_image(self, kernel_size, sigma): + check_kernel( + F.gaussian_blur_image, + make_image(), + kernel_size=kernel_size, + sigma=sigma, + check_scripted_vs_eager=not (isinstance(kernel_size, int) or isinstance(sigma, (float, int))), + ) + + def test_kernel_image_errors(self): + image = make_image_tensor() + + with pytest.raises(ValueError, match="kernel_size is a sequence its length should be 2"): + F.gaussian_blur_image(image, kernel_size=[1, 2, 3]) + + for kernel_size in [2, -1]: + with pytest.raises(ValueError, match="kernel_size should have odd and positive integers"): + F.gaussian_blur_image(image, kernel_size=kernel_size) + + with pytest.raises(ValueError, match="sigma is a sequence, its length should be 2"): + F.gaussian_blur_image(image, kernel_size=1, sigma=[1, 2, 3]) + + with pytest.raises(TypeError, match="sigma should be either float or sequence of floats"): + F.gaussian_blur_image(image, kernel_size=1, sigma=object()) + + with pytest.raises(ValueError, match="sigma should have positive values"): + F.gaussian_blur_image(image, kernel_size=1, sigma=-1) + + def test_kernel_video(self): + check_kernel(F.gaussian_blur_video, make_video(), kernel_size=(3, 3)) + + @pytest.mark.parametrize( + "make_input", + [make_image_tensor, make_image_pil, make_image, make_video], + ) + def test_functional(self, make_input): + check_functional(F.gaussian_blur, make_input(), kernel_size=(3, 3)) + + @pytest.mark.parametrize( + ("kernel", "input_type"), + [ + (F.gaussian_blur_image, torch.Tensor), + (F._misc._gaussian_blur_image_pil, PIL.Image.Image), + (F.gaussian_blur_image, tv_tensors.Image), + (F.gaussian_blur_video, tv_tensors.Video), + ], + ) + def test_functional_signature(self, kernel, input_type): + check_functional_kernel_signature_match(F.gaussian_blur, kernel=kernel, input_type=input_type) + + @pytest.mark.parametrize( + "make_input", + [make_image_tensor, make_image_pil, make_image, make_bounding_boxes, make_segmentation_mask, make_video], + ) + @pytest.mark.parametrize("device", cpu_and_cuda()) + @pytest.mark.parametrize("sigma", [5, 2.0, (0.5, 2), [1.3, 2.7]]) + def test_transform(self, make_input, device, sigma): + check_transform(transforms.GaussianBlur(kernel_size=3, sigma=sigma), make_input(device=device)) + + def test_assertions(self): + with pytest.raises(ValueError, match="Kernel size should be a tuple/list of two integers"): + transforms.GaussianBlur([10, 12, 14]) + + with pytest.raises(ValueError, match="Kernel size value should be an odd and positive number"): + transforms.GaussianBlur(4) + + with pytest.raises(ValueError, match="If sigma is a sequence its length should be 1 or 2. Got 3"): + transforms.GaussianBlur(3, sigma=[1, 2, 3]) + + with pytest.raises(ValueError, match="sigma values should be positive and of the form"): + transforms.GaussianBlur(3, sigma=-1.0) + + with pytest.raises(ValueError, match="sigma values should be positive and of the form"): + transforms.GaussianBlur(3, sigma=[2.0, 1.0]) + + with pytest.raises(TypeError, match="sigma should be a number or a sequence of numbers"): + transforms.GaussianBlur(3, sigma={}) + + @pytest.mark.parametrize("sigma", [10.0, [10.0, 12.0], (10, 12.0), [10]]) + def test_make_params(self, sigma): + transform = transforms.GaussianBlur(3, sigma=sigma) + params = transform.make_params([]) + + if isinstance(sigma, float): + assert params["sigma"][0] == params["sigma"][1] == sigma + elif isinstance(sigma, list) and len(sigma) == 1: + assert params["sigma"][0] == params["sigma"][1] == sigma[0] + else: + assert sigma[0] <= params["sigma"][0] <= sigma[1] + assert sigma[0] <= params["sigma"][1] <= sigma[1] + + # np_img = np.arange(3 * 10 * 12, dtype="uint8").reshape((10, 12, 3)) + # np_img2 = np.arange(26 * 28, dtype="uint8").reshape((26, 28)) + # { + # "10_12_3__3_3_0.8": cv2.GaussianBlur(np_img, ksize=(3, 3), sigmaX=0.8), + # "10_12_3__3_3_0.5": cv2.GaussianBlur(np_img, ksize=(3, 3), sigmaX=0.5), + # "10_12_3__3_5_0.8": cv2.GaussianBlur(np_img, ksize=(3, 5), sigmaX=0.8), + # "10_12_3__3_5_0.5": cv2.GaussianBlur(np_img, ksize=(3, 5), sigmaX=0.5), + # "26_28_1__23_23_1.7": cv2.GaussianBlur(np_img2, ksize=(23, 23), sigmaX=1.7), + # } + REFERENCE_GAUSSIAN_BLUR_IMAGE_RESULTS = torch.load( + Path(__file__).parent / "assets" / "gaussian_blur_opencv_results.pt", + weights_only=False, + ) + + @pytest.mark.parametrize( + ("dimensions", "kernel_size", "sigma"), + [ + ((3, 10, 12), (3, 3), 0.8), + ((3, 10, 12), (3, 3), 0.5), + ((3, 10, 12), (3, 5), 0.8), + ((3, 10, 12), (3, 5), 0.5), + ((1, 26, 28), (23, 23), 1.7), + ], + ) + @pytest.mark.parametrize("dtype", [torch.float32, torch.float64, torch.float16]) + @pytest.mark.parametrize("device", cpu_and_cuda()) + def test_functional_image_correctness(self, dimensions, kernel_size, sigma, dtype, device): + if dtype is torch.float16 and device == "cpu": + pytest.skip("The CPU implementation of float16 on CPU differs from opencv") + + num_channels, height, width = dimensions + + reference_results_key = f"{height}_{width}_{num_channels}__{kernel_size[0]}_{kernel_size[1]}_{sigma}" + expected = ( + torch.tensor(self.REFERENCE_GAUSSIAN_BLUR_IMAGE_RESULTS[reference_results_key]) + .reshape(height, width, num_channels) + .permute(2, 0, 1) + .to(dtype=dtype, device=device) + ) + + image = tv_tensors.Image( + torch.arange(num_channels * height * width, dtype=torch.uint8) + .reshape(height, width, num_channels) + .permute(2, 0, 1), + dtype=dtype, + device=device, + ) + + actual = F.gaussian_blur_image(image, kernel_size=kernel_size, sigma=sigma) + + torch.testing.assert_close(actual, expected, rtol=0, atol=1) + + +class TestGaussianNoise: + @pytest.mark.parametrize( + "make_input", + [make_image_tensor, make_image, make_video], + ) + def test_kernel(self, make_input): + check_kernel( + F.gaussian_noise, + make_input(dtype=torch.float32), + # This cannot pass because the noise on a batch in not per-image + check_batched_vs_unbatched=False, + ) + + @pytest.mark.parametrize( + "make_input", + [make_image_tensor, make_image, make_video], + ) + def test_functional(self, make_input): + check_functional(F.gaussian_noise, make_input(dtype=torch.float32)) + + @pytest.mark.parametrize( + ("kernel", "input_type"), + [ + (F.gaussian_noise, torch.Tensor), + (F.gaussian_noise_image, tv_tensors.Image), + (F.gaussian_noise_video, tv_tensors.Video), + ], + ) + def test_functional_signature(self, kernel, input_type): + check_functional_kernel_signature_match(F.gaussian_noise, kernel=kernel, input_type=input_type) + + @pytest.mark.parametrize( + "make_input", + [make_image_tensor, make_image, make_video], + ) + def test_transform(self, make_input): + def adapter(_, input, __): + # This transform doesn't support uint8 so we have to convert the auto-generated uint8 tensors to float32 + # Same for PIL images + for key, value in input.items(): + if isinstance(value, torch.Tensor) and not value.is_floating_point(): + input[key] = value.to(torch.float32) + if isinstance(value, PIL.Image.Image): + input[key] = F.pil_to_tensor(value).to(torch.float32) + return input + + check_transform(transforms.GaussianNoise(), make_input(dtype=torch.float32), check_sample_input=adapter) + + def test_bad_input(self): + with pytest.raises(ValueError, match="Gaussian Noise is not implemented for PIL images."): + F.gaussian_noise(make_image_pil()) + with pytest.raises(ValueError, match="Input tensor is expected to be in float dtype"): + F.gaussian_noise(make_image(dtype=torch.uint8)) + with pytest.raises(ValueError, match="sigma shouldn't be negative"): + F.gaussian_noise(make_image(dtype=torch.float32), sigma=-1) + + def test_clip(self): + img = make_image(dtype=torch.float32) + + out = F.gaussian_noise(img, mean=100, clip=False) + assert out.min() > 50 + + out = F.gaussian_noise(img, mean=100, clip=True) + assert (out == 1).all() + + out = F.gaussian_noise(img, mean=-100, clip=False) + assert out.min() < -50 + + out = F.gaussian_noise(img, mean=-100, clip=True) + assert (out == 0).all() + + +class TestAutoAugmentTransforms: + # These transforms have a lot of branches in their `forward()` passes which are conditioned on random sampling. + # It's typically very hard to test the effect on some parameters without heavy mocking logic. + # This class adds correctness tests for the kernels that are specific to those transforms. The rest of kernels, e.g. + # rotate, are tested in their respective classes. The rest of the tests here are mostly smoke tests. + + def _reference_shear_translate(self, image, *, transform_id, magnitude, interpolation, fill): + if isinstance(image, PIL.Image.Image): + input = image + else: + input = F.to_pil_image(image) + + matrix = { + "ShearX": (1, magnitude, 0, 0, 1, 0), + "ShearY": (1, 0, 0, magnitude, 1, 0), + "TranslateX": (1, 0, -int(magnitude), 0, 1, 0), + "TranslateY": (1, 0, 0, 0, 1, -int(magnitude)), + }[transform_id] + + output = input.transform( + input.size, PIL.Image.AFFINE, matrix, resample=pil_modes_mapping[interpolation], fill=fill + ) + + if isinstance(image, PIL.Image.Image): + return output + else: + return F.to_image(output) + + @pytest.mark.parametrize("transform_id", ["ShearX", "ShearY", "TranslateX", "TranslateY"]) + @pytest.mark.parametrize("magnitude", [0.3, -0.2, 0.0]) + @pytest.mark.parametrize( + "interpolation", [transforms.InterpolationMode.NEAREST, transforms.InterpolationMode.BILINEAR] + ) + @pytest.mark.parametrize("fill", CORRECTNESS_FILLS) + @pytest.mark.parametrize("input_type", ["Tensor", "PIL"]) + def test_correctness_shear_translate(self, transform_id, magnitude, interpolation, fill, input_type): + # ShearX/Y and TranslateX/Y are the only ops that are native to the AA transforms. They are modeled after the + # reference implementation: + # https://github.com/tensorflow/models/blob/885fda091c46c59d6c7bb5c7e760935eacc229da/research/autoaugment/augmentation_transforms.py#L273-L362 + # All other ops are checked in their respective dedicated tests. + + image = make_image(dtype=torch.uint8, device="cpu") + if input_type == "PIL": + image = F.to_pil_image(image) + + if "Translate" in transform_id: + # For TranslateX/Y magnitude is a value in pixels + magnitude *= min(F.get_size(image)) + + actual = transforms.AutoAugment()._apply_image_or_video_transform( + image, + transform_id=transform_id, + magnitude=magnitude, + interpolation=interpolation, + fill={type(image): fill}, + ) + expected = self._reference_shear_translate( + image, transform_id=transform_id, magnitude=magnitude, interpolation=interpolation, fill=fill + ) + + if input_type == "PIL": + actual, expected = F.to_image(actual), F.to_image(expected) + + if "Shear" in transform_id and input_type == "Tensor": + mae = (actual.float() - expected.float()).abs().mean() + assert mae < (12 if interpolation is transforms.InterpolationMode.NEAREST else 5) + else: + assert_close(actual, expected, rtol=0, atol=1) + + def _sample_input_adapter(self, transform, input, device): + adapted_input = {} + image_or_video_found = False + for key, value in input.items(): + if isinstance(value, (tv_tensors.BoundingBoxes, tv_tensors.Mask)): + # AA transforms don't support bounding boxes or masks + continue + elif check_type(value, (tv_tensors.Image, tv_tensors.Video, is_pure_tensor, PIL.Image.Image)): + if image_or_video_found: + # AA transforms only support a single image or video + continue + image_or_video_found = True + adapted_input[key] = value + return adapted_input + + @pytest.mark.parametrize( + "transform", + [transforms.AutoAugment(), transforms.RandAugment(), transforms.TrivialAugmentWide(), transforms.AugMix()], + ) + @pytest.mark.parametrize("make_input", [make_image_tensor, make_image_pil, make_image, make_video]) + @pytest.mark.parametrize("dtype", [torch.uint8, torch.float32]) + @pytest.mark.parametrize("device", cpu_and_cuda()) + def test_transform_smoke(self, transform, make_input, dtype, device): + if make_input is make_image_pil and not (dtype is torch.uint8 and device == "cpu"): + pytest.skip( + "PIL image tests with parametrization other than dtype=torch.uint8 and device='cpu' " + "will degenerate to that anyway." + ) + input = make_input(dtype=dtype, device=device) + + with freeze_rng_state(): + # By default every test starts from the same random seed. This leads to minimal coverage of the sampling + # that happens inside forward(). To avoid calling the transform multiple times to achieve higher coverage, + # we build a reproducible random seed from the input type, dtype, and device. + torch.manual_seed(hash((make_input, dtype, device))) + + # For v2, we changed the random sampling of the AA transforms. This makes it impossible to compare the v1 + # and v2 outputs without complicated mocking and monkeypatching. Thus, we skip the v1 compatibility checks + # here and only check if we can script the v2 transform and subsequently call the result. + check_transform( + transform, input, check_v1_compatibility=False, check_sample_input=self._sample_input_adapter + ) + + if type(input) is torch.Tensor and dtype is torch.uint8: + _script(transform)(input) + + def test_auto_augment_policy_error(self): + with pytest.raises(ValueError, match="provided policy"): + transforms.AutoAugment(policy=None) + + @pytest.mark.parametrize("severity", [0, 11]) + def test_aug_mix_severity_error(self, severity): + with pytest.raises(ValueError, match="severity must be between"): + transforms.AugMix(severity=severity) + + +class TestConvertBoundingBoxFormat: + old_new_formats = list(itertools.permutations(iter(tv_tensors.BoundingBoxFormat), 2)) + + @pytest.mark.parametrize(("old_format", "new_format"), old_new_formats) + def test_kernel(self, old_format, new_format): + check_kernel( + F.convert_bounding_box_format, + make_bounding_boxes(format=old_format), + new_format=new_format, + old_format=old_format, + ) + + @pytest.mark.parametrize("format", list(tv_tensors.BoundingBoxFormat)) + @pytest.mark.parametrize("inplace", [False, True]) + def test_kernel_noop(self, format, inplace): + input = make_bounding_boxes(format=format).as_subclass(torch.Tensor) + input_version = input._version + + output = F.convert_bounding_box_format(input, old_format=format, new_format=format, inplace=inplace) + + assert output is input + assert output.data_ptr() == input.data_ptr() + assert output._version == input_version + + @pytest.mark.parametrize(("old_format", "new_format"), old_new_formats) + def test_kernel_inplace(self, old_format, new_format): + input = make_bounding_boxes(format=old_format).as_subclass(torch.Tensor) + input_version = input._version + + output_out_of_place = F.convert_bounding_box_format(input, old_format=old_format, new_format=new_format) + assert output_out_of_place.data_ptr() != input.data_ptr() + assert output_out_of_place is not input + + output_inplace = F.convert_bounding_box_format( + input, old_format=old_format, new_format=new_format, inplace=True + ) + assert output_inplace.data_ptr() == input.data_ptr() + assert output_inplace._version > input_version + assert output_inplace is input + + assert_equal(output_inplace, output_out_of_place) + + @pytest.mark.parametrize(("old_format", "new_format"), old_new_formats) + def test_functional(self, old_format, new_format): + check_functional(F.convert_bounding_box_format, make_bounding_boxes(format=old_format), new_format=new_format) + + @pytest.mark.parametrize(("old_format", "new_format"), old_new_formats) + @pytest.mark.parametrize("format_type", ["enum", "str"]) + def test_transform(self, old_format, new_format, format_type): + check_transform( + transforms.ConvertBoundingBoxFormat(new_format.name if format_type == "str" else new_format), + make_bounding_boxes(format=old_format), + ) + + @pytest.mark.parametrize(("old_format", "new_format"), old_new_formats) + def test_strings(self, old_format, new_format): + # Non-regression test for https://github.com/pytorch/vision/issues/8258 + input = tv_tensors.BoundingBoxes(torch.tensor([[10, 10, 20, 20]]), format=old_format, canvas_size=(50, 50)) + expected = self._reference_convert_bounding_box_format(input, new_format) + + old_format = old_format.name + new_format = new_format.name + + out_functional = F.convert_bounding_box_format(input, new_format=new_format) + out_functional_tensor = F.convert_bounding_box_format( + input.as_subclass(torch.Tensor), old_format=old_format, new_format=new_format + ) + out_transform = transforms.ConvertBoundingBoxFormat(new_format)(input) + for out in (out_functional, out_functional_tensor, out_transform): + assert_equal(out, expected) + + def _reference_convert_bounding_box_format(self, bounding_boxes, new_format): + return tv_tensors.wrap( + torchvision.ops.box_convert( + bounding_boxes.as_subclass(torch.Tensor), + in_fmt=bounding_boxes.format.name.lower(), + out_fmt=new_format.name.lower(), + ).to(bounding_boxes.dtype), + like=bounding_boxes, + format=new_format, + ) + + @pytest.mark.parametrize(("old_format", "new_format"), old_new_formats) + @pytest.mark.parametrize("dtype", [torch.int64, torch.float32]) + @pytest.mark.parametrize("device", cpu_and_cuda()) + @pytest.mark.parametrize("fn_type", ["functional", "transform"]) + def test_correctness(self, old_format, new_format, dtype, device, fn_type): + bounding_boxes = make_bounding_boxes(format=old_format, dtype=dtype, device=device) + + if fn_type == "functional": + fn = functools.partial(F.convert_bounding_box_format, new_format=new_format) + else: + fn = transforms.ConvertBoundingBoxFormat(format=new_format) + + actual = fn(bounding_boxes) + expected = self._reference_convert_bounding_box_format(bounding_boxes, new_format) + + assert_equal(actual, expected) + + def test_errors(self): + input_tv_tensor = make_bounding_boxes() + input_pure_tensor = input_tv_tensor.as_subclass(torch.Tensor) + + for input in [input_tv_tensor, input_pure_tensor]: + with pytest.raises(TypeError, match="missing 1 required argument: 'new_format'"): + F.convert_bounding_box_format(input) + + with pytest.raises(ValueError, match="`old_format` has to be passed"): + F.convert_bounding_box_format(input_pure_tensor, new_format=input_tv_tensor.format) + + with pytest.raises(ValueError, match="`old_format` must not be passed"): + F.convert_bounding_box_format( + input_tv_tensor, old_format=input_tv_tensor.format, new_format=input_tv_tensor.format + ) + + +class TestResizedCrop: + INPUT_SIZE = (17, 11) + CROP_KWARGS = dict(top=2, left=2, height=5, width=7) + OUTPUT_SIZE = (19, 32) + + @pytest.mark.parametrize( + ("kernel", "make_input"), + [ + (F.resized_crop_image, make_image), + (F.resized_crop_bounding_boxes, make_bounding_boxes), + (F.resized_crop_mask, make_segmentation_mask), + (F.resized_crop_mask, make_detection_masks), + (F.resized_crop_video, make_video), + ], + ) + def test_kernel(self, kernel, make_input): + input = make_input(self.INPUT_SIZE) + if isinstance(input, tv_tensors.BoundingBoxes): + extra_kwargs = dict(format=input.format) + elif isinstance(input, tv_tensors.Mask): + extra_kwargs = dict() + else: + extra_kwargs = dict(antialias=True) + + check_kernel(kernel, input, **self.CROP_KWARGS, size=self.OUTPUT_SIZE, **extra_kwargs) + + @pytest.mark.parametrize( + "make_input", + [make_image_tensor, make_image_pil, make_image, make_bounding_boxes, make_segmentation_mask, make_video], + ) + def test_functional(self, make_input): + check_functional( + F.resized_crop, make_input(self.INPUT_SIZE), **self.CROP_KWARGS, size=self.OUTPUT_SIZE, antialias=True + ) + + @pytest.mark.parametrize( + ("kernel", "input_type"), + [ + (F.resized_crop_image, torch.Tensor), + (F._geometry._resized_crop_image_pil, PIL.Image.Image), + (F.resized_crop_image, tv_tensors.Image), + (F.resized_crop_bounding_boxes, tv_tensors.BoundingBoxes), + (F.resized_crop_mask, tv_tensors.Mask), + (F.resized_crop_video, tv_tensors.Video), + ], + ) + def test_functional_signature(self, kernel, input_type): + check_functional_kernel_signature_match(F.resized_crop, kernel=kernel, input_type=input_type) + + @param_value_parametrization( + scale=[(0.1, 0.2), [0.0, 1.0]], + ratio=[(0.3, 0.7), [0.1, 5.0]], + ) + @pytest.mark.parametrize( + "make_input", + [make_image_tensor, make_image_pil, make_image, make_bounding_boxes, make_segmentation_mask, make_video], + ) + def test_transform(self, param, value, make_input): + check_transform( + transforms.RandomResizedCrop(size=self.OUTPUT_SIZE, **{param: value}, antialias=True), + make_input(self.INPUT_SIZE), + check_v1_compatibility=dict(rtol=0, atol=1), + ) + + # `InterpolationMode.NEAREST` is modeled after the buggy `INTER_NEAREST` interpolation of CV2. + # The PIL equivalent of `InterpolationMode.NEAREST` is `InterpolationMode.NEAREST_EXACT` + @pytest.mark.parametrize("interpolation", set(INTERPOLATION_MODES) - {transforms.InterpolationMode.NEAREST}) + def test_functional_image_correctness(self, interpolation): + image = make_image(self.INPUT_SIZE, dtype=torch.uint8) + + actual = F.resized_crop( + image, **self.CROP_KWARGS, size=self.OUTPUT_SIZE, interpolation=interpolation, antialias=True + ) + expected = F.to_image( + F.resized_crop( + F.to_pil_image(image), **self.CROP_KWARGS, size=self.OUTPUT_SIZE, interpolation=interpolation + ) + ) + + torch.testing.assert_close(actual, expected, atol=1, rtol=0) + + def _reference_resized_crop_bounding_boxes(self, bounding_boxes, *, top, left, height, width, size): + new_height, new_width = size + + crop_affine_matrix = np.array( + [ + [1, 0, -left], + [0, 1, -top], + [0, 0, 1], + ], + ) + resize_affine_matrix = np.array( + [ + [new_width / width, 0, 0], + [0, new_height / height, 0], + [0, 0, 1], + ], + ) + affine_matrix = (resize_affine_matrix @ crop_affine_matrix)[:2, :] + + return reference_affine_bounding_boxes_helper( + bounding_boxes, + affine_matrix=affine_matrix, + new_canvas_size=size, + ) + + @pytest.mark.parametrize("format", list(tv_tensors.BoundingBoxFormat)) + def test_functional_bounding_boxes_correctness(self, format): + bounding_boxes = make_bounding_boxes(self.INPUT_SIZE, format=format) + + actual = F.resized_crop(bounding_boxes, **self.CROP_KWARGS, size=self.OUTPUT_SIZE) + expected = self._reference_resized_crop_bounding_boxes( + bounding_boxes, **self.CROP_KWARGS, size=self.OUTPUT_SIZE + ) + + assert_equal(actual, expected) + assert_equal(F.get_size(actual), F.get_size(expected)) + + def test_transform_errors_warnings(self): + with pytest.raises(ValueError, match="provide only two dimensions"): + transforms.RandomResizedCrop(size=(1, 2, 3)) + + with pytest.raises(TypeError, match="Scale should be a sequence"): + transforms.RandomResizedCrop(size=self.INPUT_SIZE, scale=123) + + with pytest.raises(TypeError, match="Ratio should be a sequence"): + transforms.RandomResizedCrop(size=self.INPUT_SIZE, ratio=123) + + for param in ["scale", "ratio"]: + with pytest.warns(match="Scale and ratio should be of kind"): + transforms.RandomResizedCrop(size=self.INPUT_SIZE, **{param: [1, 0]}) + + +class TestPad: + EXHAUSTIVE_TYPE_PADDINGS = [1, (1,), (1, 2), (1, 2, 3, 4), [1], [1, 2], [1, 2, 3, 4]] + CORRECTNESS_PADDINGS = [ + padding + for padding in EXHAUSTIVE_TYPE_PADDINGS + if isinstance(padding, int) or isinstance(padding, list) and len(padding) > 1 + ] + PADDING_MODES = ["constant", "symmetric", "edge", "reflect"] + + @param_value_parametrization( + padding=EXHAUSTIVE_TYPE_PADDINGS, + fill=EXHAUSTIVE_TYPE_FILLS, + padding_mode=PADDING_MODES, + ) + @pytest.mark.parametrize("dtype", [torch.uint8, torch.float32]) + @pytest.mark.parametrize("device", cpu_and_cuda()) + def test_kernel_image(self, param, value, dtype, device): + if param == "fill": + value = adapt_fill(value, dtype=dtype) + kwargs = {param: value} + if param != "padding": + kwargs["padding"] = [1] + + image = make_image(dtype=dtype, device=device) + + check_kernel( + F.pad_image, + image, + **kwargs, + check_scripted_vs_eager=not ( + (param == "padding" and isinstance(value, int)) + # See https://github.com/pytorch/vision/pull/7252#issue-1585585521 for details + or ( + param == "fill" + and ( + isinstance(value, tuple) or (isinstance(value, list) and any(isinstance(v, int) for v in value)) + ) + ) + ), + ) + + @pytest.mark.parametrize("format", list(tv_tensors.BoundingBoxFormat)) + def test_kernel_bounding_boxes(self, format): + bounding_boxes = make_bounding_boxes(format=format) + check_kernel( + F.pad_bounding_boxes, + bounding_boxes, + format=bounding_boxes.format, + canvas_size=bounding_boxes.canvas_size, + padding=[1], + ) + + @pytest.mark.parametrize("padding_mode", ["symmetric", "edge", "reflect"]) + def test_kernel_bounding_boxes_errors(self, padding_mode): + bounding_boxes = make_bounding_boxes() + with pytest.raises(ValueError, match=f"'{padding_mode}' is not supported"): + F.pad_bounding_boxes( + bounding_boxes, + format=bounding_boxes.format, + canvas_size=bounding_boxes.canvas_size, + padding=[1], + padding_mode=padding_mode, + ) + + @pytest.mark.parametrize("make_mask", [make_segmentation_mask, make_detection_masks]) + def test_kernel_mask(self, make_mask): + check_kernel(F.pad_mask, make_mask(), padding=[1]) + + @pytest.mark.parametrize("fill", [[1], (0,), [1, 0, 1], (0, 1, 0)]) + def test_kernel_mask_errors(self, fill): + with pytest.raises(ValueError, match="Non-scalar fill value is not supported"): + F.pad_mask(make_segmentation_mask(), padding=[1], fill=fill) + + def test_kernel_video(self): + check_kernel(F.pad_video, make_video(), padding=[1]) + + @pytest.mark.parametrize( + "make_input", + [make_image_tensor, make_image_pil, make_image, make_bounding_boxes, make_segmentation_mask, make_video], + ) + def test_functional(self, make_input): + check_functional(F.pad, make_input(), padding=[1]) + + @pytest.mark.parametrize( + ("kernel", "input_type"), + [ + (F.pad_image, torch.Tensor), + # The PIL kernel uses fill=0 as default rather than fill=None as all others. + # Since the whole fill story is already really inconsistent, we won't introduce yet another case to allow + # for this test to pass. + # See https://github.com/pytorch/vision/issues/6623 for a discussion. + # (F._geometry._pad_image_pil, PIL.Image.Image), + (F.pad_image, tv_tensors.Image), + (F.pad_bounding_boxes, tv_tensors.BoundingBoxes), + (F.pad_mask, tv_tensors.Mask), + (F.pad_video, tv_tensors.Video), + ], + ) + def test_functional_signature(self, kernel, input_type): + check_functional_kernel_signature_match(F.pad, kernel=kernel, input_type=input_type) + + @pytest.mark.parametrize( + "make_input", + [make_image_tensor, make_image_pil, make_image, make_bounding_boxes, make_segmentation_mask, make_video], + ) + def test_transform(self, make_input): + check_transform(transforms.Pad(padding=[1]), make_input()) + + def test_transform_errors(self): + with pytest.raises(TypeError, match="Got inappropriate padding arg"): + transforms.Pad("abc") + + with pytest.raises(ValueError, match="Padding must be an int or a 1, 2, or 4"): + transforms.Pad([-0.7, 0, 0.7]) + + with pytest.raises(TypeError, match="Got inappropriate fill arg"): + transforms.Pad(12, fill="abc") + + with pytest.raises(ValueError, match="Padding mode should be either"): + transforms.Pad(12, padding_mode="abc") + + @pytest.mark.parametrize("padding", CORRECTNESS_PADDINGS) + @pytest.mark.parametrize( + ("padding_mode", "fill"), + [ + *[("constant", fill) for fill in CORRECTNESS_FILLS], + *[(padding_mode, None) for padding_mode in ["symmetric", "edge", "reflect"]], + ], + ) + @pytest.mark.parametrize("fn", [F.pad, transform_cls_to_functional(transforms.Pad)]) + def test_image_correctness(self, padding, padding_mode, fill, fn): + image = make_image(dtype=torch.uint8, device="cpu") + + fill = adapt_fill(fill, dtype=torch.uint8) + + actual = fn(image, padding=padding, padding_mode=padding_mode, fill=fill) + expected = F.to_image(F.pad(F.to_pil_image(image), padding=padding, padding_mode=padding_mode, fill=fill)) + + assert_equal(actual, expected) + + def _reference_pad_bounding_boxes(self, bounding_boxes, *, padding): + if isinstance(padding, int): + padding = [padding] + left, top, right, bottom = padding * (4 // len(padding)) + + affine_matrix = np.array( + [ + [1, 0, left], + [0, 1, top], + ], + ) + + height = bounding_boxes.canvas_size[0] + top + bottom + width = bounding_boxes.canvas_size[1] + left + right + + return reference_affine_bounding_boxes_helper( + bounding_boxes, affine_matrix=affine_matrix, new_canvas_size=(height, width) + ) + + @pytest.mark.parametrize("padding", CORRECTNESS_PADDINGS) + @pytest.mark.parametrize("format", list(tv_tensors.BoundingBoxFormat)) + @pytest.mark.parametrize("dtype", [torch.int64, torch.float32]) + @pytest.mark.parametrize("device", cpu_and_cuda()) + @pytest.mark.parametrize("fn", [F.pad, transform_cls_to_functional(transforms.Pad)]) + def test_bounding_boxes_correctness(self, padding, format, dtype, device, fn): + bounding_boxes = make_bounding_boxes(format=format, dtype=dtype, device=device) + + actual = fn(bounding_boxes, padding=padding) + expected = self._reference_pad_bounding_boxes(bounding_boxes, padding=padding) + + assert_equal(actual, expected) + + +class TestCenterCrop: + INPUT_SIZE = (17, 11) + OUTPUT_SIZES = [(3, 5), (5, 3), (4, 4), (21, 9), (13, 15), (19, 14), 3, (4,), [5], INPUT_SIZE] + + @pytest.mark.parametrize("output_size", OUTPUT_SIZES) + @pytest.mark.parametrize("dtype", [torch.int64, torch.float32]) + @pytest.mark.parametrize("device", cpu_and_cuda()) + def test_kernel_image(self, output_size, dtype, device): + check_kernel( + F.center_crop_image, + make_image(self.INPUT_SIZE, dtype=dtype, device=device), + output_size=output_size, + check_scripted_vs_eager=not isinstance(output_size, int), + ) + + @pytest.mark.parametrize("output_size", OUTPUT_SIZES) + @pytest.mark.parametrize("format", list(tv_tensors.BoundingBoxFormat)) + def test_kernel_bounding_boxes(self, output_size, format): + bounding_boxes = make_bounding_boxes(self.INPUT_SIZE, format=format) + check_kernel( + F.center_crop_bounding_boxes, + bounding_boxes, + format=bounding_boxes.format, + canvas_size=bounding_boxes.canvas_size, + output_size=output_size, + check_scripted_vs_eager=not isinstance(output_size, int), + ) + + @pytest.mark.parametrize("make_mask", [make_segmentation_mask, make_detection_masks]) + def test_kernel_mask(self, make_mask): + check_kernel(F.center_crop_mask, make_mask(), output_size=self.OUTPUT_SIZES[0]) + + def test_kernel_video(self): + check_kernel(F.center_crop_video, make_video(self.INPUT_SIZE), output_size=self.OUTPUT_SIZES[0]) + + @pytest.mark.parametrize( + "make_input", + [make_image_tensor, make_image_pil, make_image, make_bounding_boxes, make_segmentation_mask, make_video], + ) + def test_functional(self, make_input): + check_functional(F.center_crop, make_input(self.INPUT_SIZE), output_size=self.OUTPUT_SIZES[0]) + + @pytest.mark.parametrize( + ("kernel", "input_type"), + [ + (F.center_crop_image, torch.Tensor), + (F._geometry._center_crop_image_pil, PIL.Image.Image), + (F.center_crop_image, tv_tensors.Image), + (F.center_crop_bounding_boxes, tv_tensors.BoundingBoxes), + (F.center_crop_mask, tv_tensors.Mask), + (F.center_crop_video, tv_tensors.Video), + ], + ) + def test_functional_signature(self, kernel, input_type): + check_functional_kernel_signature_match(F.center_crop, kernel=kernel, input_type=input_type) + + @pytest.mark.parametrize( + "make_input", + [make_image_tensor, make_image_pil, make_image, make_bounding_boxes, make_segmentation_mask, make_video], + ) + def test_transform(self, make_input): + check_transform(transforms.CenterCrop(self.OUTPUT_SIZES[0]), make_input(self.INPUT_SIZE)) + + @pytest.mark.parametrize("output_size", OUTPUT_SIZES) + @pytest.mark.parametrize("fn", [F.center_crop, transform_cls_to_functional(transforms.CenterCrop)]) + def test_image_correctness(self, output_size, fn): + image = make_image(self.INPUT_SIZE, dtype=torch.uint8, device="cpu") + + actual = fn(image, output_size) + expected = F.to_image(F.center_crop(F.to_pil_image(image), output_size=output_size)) + + assert_equal(actual, expected) + + def _reference_center_crop_bounding_boxes(self, bounding_boxes, output_size): + image_height, image_width = bounding_boxes.canvas_size + if isinstance(output_size, int): + output_size = (output_size, output_size) + elif len(output_size) == 1: + output_size *= 2 + crop_height, crop_width = output_size + + top = int(round((image_height - crop_height) / 2)) + left = int(round((image_width - crop_width) / 2)) + + affine_matrix = np.array( + [ + [1, 0, -left], + [0, 1, -top], + ], + ) + return reference_affine_bounding_boxes_helper( + bounding_boxes, affine_matrix=affine_matrix, new_canvas_size=output_size + ) + + @pytest.mark.parametrize("output_size", OUTPUT_SIZES) + @pytest.mark.parametrize("format", list(tv_tensors.BoundingBoxFormat)) + @pytest.mark.parametrize("dtype", [torch.int64, torch.float32]) + @pytest.mark.parametrize("device", cpu_and_cuda()) + @pytest.mark.parametrize("fn", [F.center_crop, transform_cls_to_functional(transforms.CenterCrop)]) + def test_bounding_boxes_correctness(self, output_size, format, dtype, device, fn): + bounding_boxes = make_bounding_boxes(self.INPUT_SIZE, format=format, dtype=dtype, device=device) + + actual = fn(bounding_boxes, output_size) + expected = self._reference_center_crop_bounding_boxes(bounding_boxes, output_size) + + assert_equal(actual, expected) + + +class TestPerspective: + COEFFICIENTS = [ + [1.2405, 0.1772, -6.9113, 0.0463, 1.251, -5.235, 0.00013, 0.0018], + [0.7366, -0.11724, 1.45775, -0.15012, 0.73406, 2.6019, -0.0072, -0.0063], + ] + START_END_POINTS = [ + ([[0, 0], [33, 0], [33, 25], [0, 25]], [[3, 2], [32, 3], [30, 24], [2, 25]]), + ([[3, 2], [32, 3], [30, 24], [2, 25]], [[0, 0], [33, 0], [33, 25], [0, 25]]), + ([[3, 2], [32, 3], [30, 24], [2, 25]], [[5, 5], [30, 3], [33, 19], [4, 25]]), + ] + MINIMAL_KWARGS = dict(startpoints=None, endpoints=None, coefficients=COEFFICIENTS[0]) + + @param_value_parametrization( + coefficients=COEFFICIENTS, + start_end_points=START_END_POINTS, + fill=EXHAUSTIVE_TYPE_FILLS, + ) + @pytest.mark.parametrize("dtype", [torch.uint8, torch.float32]) + @pytest.mark.parametrize("device", cpu_and_cuda()) + def test_kernel_image(self, param, value, dtype, device): + if param == "start_end_points": + kwargs = dict(zip(["startpoints", "endpoints"], value)) + else: + kwargs = {"startpoints": None, "endpoints": None, param: value} + if param == "fill": + kwargs["coefficients"] = self.COEFFICIENTS[0] + + check_kernel( + F.perspective_image, + make_image(dtype=dtype, device=device), + **kwargs, + check_scripted_vs_eager=not (param == "fill" and isinstance(value, (int, float))), + ) + + def test_kernel_image_error(self): + image = make_image_tensor() + + with pytest.raises(ValueError, match="startpoints/endpoints or the coefficients must have non `None` values"): + F.perspective_image(image, startpoints=None, endpoints=None) + + with pytest.raises( + ValueError, match="startpoints/endpoints and the coefficients shouldn't be defined concurrently" + ): + startpoints, endpoints = self.START_END_POINTS[0] + coefficients = self.COEFFICIENTS[0] + F.perspective_image(image, startpoints=startpoints, endpoints=endpoints, coefficients=coefficients) + + with pytest.raises(ValueError, match="coefficients should have 8 float values"): + F.perspective_image(image, startpoints=None, endpoints=None, coefficients=list(range(7))) + + @param_value_parametrization( + coefficients=COEFFICIENTS, + start_end_points=START_END_POINTS, + ) + @pytest.mark.parametrize("format", list(tv_tensors.BoundingBoxFormat)) + def test_kernel_bounding_boxes(self, param, value, format): + if param == "start_end_points": + kwargs = dict(zip(["startpoints", "endpoints"], value)) + else: + kwargs = {"startpoints": None, "endpoints": None, param: value} + + bounding_boxes = make_bounding_boxes(format=format) + + check_kernel( + F.perspective_bounding_boxes, + bounding_boxes, + format=bounding_boxes.format, + canvas_size=bounding_boxes.canvas_size, + **kwargs, + ) + + def test_kernel_bounding_boxes_error(self): + bounding_boxes = make_bounding_boxes() + format, canvas_size = bounding_boxes.format, bounding_boxes.canvas_size + bounding_boxes = bounding_boxes.as_subclass(torch.Tensor) + + with pytest.raises(RuntimeError, match="Denominator is zero"): + F.perspective_bounding_boxes( + bounding_boxes, + format=format, + canvas_size=canvas_size, + startpoints=None, + endpoints=None, + coefficients=[0.0] * 8, + ) + + @pytest.mark.parametrize("make_mask", [make_segmentation_mask, make_detection_masks]) + def test_kernel_mask(self, make_mask): + check_kernel(F.perspective_mask, make_mask(), **self.MINIMAL_KWARGS) + + def test_kernel_video(self): + check_kernel(F.perspective_video, make_video(), **self.MINIMAL_KWARGS) + + @pytest.mark.parametrize( + "make_input", + [make_image_tensor, make_image_pil, make_image, make_bounding_boxes, make_segmentation_mask, make_video], + ) + def test_functional(self, make_input): + check_functional(F.perspective, make_input(), **self.MINIMAL_KWARGS) + + @pytest.mark.parametrize( + ("kernel", "input_type"), + [ + (F.perspective_image, torch.Tensor), + (F._geometry._perspective_image_pil, PIL.Image.Image), + (F.perspective_image, tv_tensors.Image), + (F.perspective_bounding_boxes, tv_tensors.BoundingBoxes), + (F.perspective_mask, tv_tensors.Mask), + (F.perspective_video, tv_tensors.Video), + ], + ) + def test_functional_signature(self, kernel, input_type): + check_functional_kernel_signature_match(F.perspective, kernel=kernel, input_type=input_type) + + @pytest.mark.parametrize("distortion_scale", [0.5, 0.0, 1.0]) + @pytest.mark.parametrize( + "make_input", + [make_image_tensor, make_image_pil, make_image, make_bounding_boxes, make_segmentation_mask, make_video], + ) + def test_transform(self, distortion_scale, make_input): + check_transform(transforms.RandomPerspective(distortion_scale=distortion_scale, p=1), make_input()) + + @pytest.mark.parametrize("distortion_scale", [-1, 2]) + def test_transform_error(self, distortion_scale): + with pytest.raises(ValueError, match="distortion_scale value should be between 0 and 1"): + transforms.RandomPerspective(distortion_scale=distortion_scale) + + @pytest.mark.parametrize("coefficients", COEFFICIENTS) + @pytest.mark.parametrize( + "interpolation", [transforms.InterpolationMode.NEAREST, transforms.InterpolationMode.BILINEAR] + ) + @pytest.mark.parametrize("fill", CORRECTNESS_FILLS) + def test_image_functional_correctness(self, coefficients, interpolation, fill): + image = make_image(dtype=torch.uint8, device="cpu") + + actual = F.perspective( + image, startpoints=None, endpoints=None, coefficients=coefficients, interpolation=interpolation, fill=fill + ) + expected = F.to_image( + F.perspective( + F.to_pil_image(image), + startpoints=None, + endpoints=None, + coefficients=coefficients, + interpolation=interpolation, + fill=fill, + ) + ) + + if interpolation is transforms.InterpolationMode.BILINEAR: + abs_diff = (actual.float() - expected.float()).abs() + assert (abs_diff > 1).float().mean() < 7e-2 + mae = abs_diff.mean() + assert mae < 3 + else: + assert_equal(actual, expected) + + def _reference_perspective_bounding_boxes(self, bounding_boxes, *, startpoints, endpoints): + format = bounding_boxes.format + canvas_size = bounding_boxes.canvas_size + dtype = bounding_boxes.dtype + device = bounding_boxes.device + + coefficients = _get_perspective_coeffs(endpoints, startpoints) + + def perspective_bounding_boxes(bounding_boxes): + m1 = np.array( + [ + [coefficients[0], coefficients[1], coefficients[2]], + [coefficients[3], coefficients[4], coefficients[5]], + ] + ) + m2 = np.array( + [ + [coefficients[6], coefficients[7], 1.0], + [coefficients[6], coefficients[7], 1.0], + ] + ) + + # Go to float before converting to prevent precision loss in case of CXCYWH -> XYXY and W or H is 1 + input_xyxy = F.convert_bounding_box_format( + bounding_boxes.to(dtype=torch.float64, device="cpu", copy=True), + old_format=format, + new_format=tv_tensors.BoundingBoxFormat.XYXY, + inplace=True, + ) + x1, y1, x2, y2 = input_xyxy.squeeze(0).tolist() + + points = np.array( + [ + [x1, y1, 1.0], + [x2, y1, 1.0], + [x1, y2, 1.0], + [x2, y2, 1.0], + ] + ) + + numerator = points @ m1.T + denominator = points @ m2.T + transformed_points = numerator / denominator + + output_xyxy = torch.Tensor( + [ + float(np.min(transformed_points[:, 0])), + float(np.min(transformed_points[:, 1])), + float(np.max(transformed_points[:, 0])), + float(np.max(transformed_points[:, 1])), + ] + ) + + output = F.convert_bounding_box_format( + output_xyxy, old_format=tv_tensors.BoundingBoxFormat.XYXY, new_format=format + ) + + # It is important to clamp before casting, especially for CXCYWH format, dtype=int64 + return F.clamp_bounding_boxes( + output, + format=format, + canvas_size=canvas_size, + ).to(dtype=dtype, device=device) + + return tv_tensors.BoundingBoxes( + torch.cat([perspective_bounding_boxes(b) for b in bounding_boxes.reshape(-1, 4).unbind()], dim=0).reshape( + bounding_boxes.shape + ), + format=format, + canvas_size=canvas_size, + ) + + @pytest.mark.parametrize(("startpoints", "endpoints"), START_END_POINTS) + @pytest.mark.parametrize("format", list(tv_tensors.BoundingBoxFormat)) + @pytest.mark.parametrize("dtype", [torch.int64, torch.float32]) + @pytest.mark.parametrize("device", cpu_and_cuda()) + def test_correctness_perspective_bounding_boxes(self, startpoints, endpoints, format, dtype, device): + bounding_boxes = make_bounding_boxes(format=format, dtype=dtype, device=device) + + actual = F.perspective(bounding_boxes, startpoints=startpoints, endpoints=endpoints) + expected = self._reference_perspective_bounding_boxes( + bounding_boxes, startpoints=startpoints, endpoints=endpoints + ) + + assert_close(actual, expected, rtol=0, atol=1) + + +class TestEqualize: + @pytest.mark.parametrize("dtype", [torch.uint8, torch.float32]) + @pytest.mark.parametrize("device", cpu_and_cuda()) + def test_kernel_image(self, dtype, device): + check_kernel(F.equalize_image, make_image(dtype=dtype, device=device)) + + def test_kernel_video(self): + check_kernel(F.equalize_image, make_video()) + + @pytest.mark.parametrize("make_input", [make_image_tensor, make_image_pil, make_image, make_video]) + def test_functional(self, make_input): + check_functional(F.equalize, make_input()) + + @pytest.mark.parametrize( + ("kernel", "input_type"), + [ + (F.equalize_image, torch.Tensor), + (F._color._equalize_image_pil, PIL.Image.Image), + (F.equalize_image, tv_tensors.Image), + (F.equalize_video, tv_tensors.Video), + ], + ) + def test_functional_signature(self, kernel, input_type): + check_functional_kernel_signature_match(F.equalize, kernel=kernel, input_type=input_type) + + @pytest.mark.parametrize( + "make_input", + [make_image_tensor, make_image_pil, make_image, make_video], + ) + def test_transform(self, make_input): + check_transform(transforms.RandomEqualize(p=1), make_input()) + + @pytest.mark.parametrize(("low", "high"), [(0, 64), (64, 192), (192, 256), (0, 1), (127, 128), (255, 256)]) + @pytest.mark.parametrize("fn", [F.equalize, transform_cls_to_functional(transforms.RandomEqualize, p=1)]) + def test_image_correctness(self, low, high, fn): + # We are not using the default `make_image` here since that uniformly samples the values over the whole value + # range. Since the whole point of F.equalize is to transform an arbitrary distribution of values into a uniform + # one over the full range, the information gain is low if we already provide something really close to the + # expected value. + image = tv_tensors.Image( + torch.testing.make_tensor((3, 117, 253), dtype=torch.uint8, device="cpu", low=low, high=high) + ) + + actual = fn(image) + expected = F.to_image(F.equalize(F.to_pil_image(image))) + + assert_equal(actual, expected) + + +class TestUniformTemporalSubsample: + def test_kernel_video(self): + check_kernel(F.uniform_temporal_subsample_video, make_video(), num_samples=2) + + @pytest.mark.parametrize("make_input", [make_video_tensor, make_video]) + def test_functional(self, make_input): + check_functional(F.uniform_temporal_subsample, make_input(), num_samples=2) + + @pytest.mark.parametrize( + ("kernel", "input_type"), + [ + (F.uniform_temporal_subsample_video, torch.Tensor), + (F.uniform_temporal_subsample_video, tv_tensors.Video), + ], + ) + def test_functional_signature(self, kernel, input_type): + check_functional_kernel_signature_match(F.uniform_temporal_subsample, kernel=kernel, input_type=input_type) + + @pytest.mark.parametrize("make_input", [make_video_tensor, make_video]) + def test_transform(self, make_input): + check_transform(transforms.UniformTemporalSubsample(num_samples=2), make_input()) + + def _reference_uniform_temporal_subsample_video(self, video, *, num_samples): + # Adapted from + # https://github.com/facebookresearch/pytorchvideo/blob/c8d23d8b7e597586a9e2d18f6ed31ad8aa379a7a/pytorchvideo/transforms/functional.py#L19 + t = video.shape[-4] + assert num_samples > 0 and t > 0 + # Sample by nearest neighbor interpolation if num_samples > t. + indices = torch.linspace(0, t - 1, num_samples, device=video.device) + indices = torch.clamp(indices, 0, t - 1).long() + return tv_tensors.Video(torch.index_select(video, -4, indices)) + + CORRECTNESS_NUM_FRAMES = 5 + + @pytest.mark.parametrize("num_samples", list(range(1, CORRECTNESS_NUM_FRAMES + 1))) + @pytest.mark.parametrize("dtype", [torch.uint8, torch.float32]) + @pytest.mark.parametrize("device", cpu_and_cuda()) + @pytest.mark.parametrize( + "fn", [F.uniform_temporal_subsample, transform_cls_to_functional(transforms.UniformTemporalSubsample)] + ) + def test_video_correctness(self, num_samples, dtype, device, fn): + video = make_video(num_frames=self.CORRECTNESS_NUM_FRAMES, dtype=dtype, device=device) + + actual = fn(video, num_samples=num_samples) + expected = self._reference_uniform_temporal_subsample_video(video, num_samples=num_samples) + + assert_equal(actual, expected) + + +class TestNormalize: + MEANS_STDS = [ + ((0.485, 0.456, 0.406), (0.229, 0.224, 0.225)), + ([0.0, 0.0, 0.0], [1.0, 1.0, 1.0]), + ] + MEAN, STD = MEANS_STDS[0] + + @pytest.mark.parametrize(("mean", "std"), [*MEANS_STDS, (0.5, 2.0)]) + @pytest.mark.parametrize("device", cpu_and_cuda()) + def test_kernel_image(self, mean, std, device): + check_kernel(F.normalize_image, make_image(dtype=torch.float32, device=device), mean=self.MEAN, std=self.STD) + + @pytest.mark.parametrize("device", cpu_and_cuda()) + def test_kernel_image_inplace(self, device): + input = make_image_tensor(dtype=torch.float32, device=device) + input_version = input._version + + output_out_of_place = F.normalize_image(input, mean=self.MEAN, std=self.STD) + assert output_out_of_place.data_ptr() != input.data_ptr() + assert output_out_of_place is not input + + output_inplace = F.normalize_image(input, mean=self.MEAN, std=self.STD, inplace=True) + assert output_inplace.data_ptr() == input.data_ptr() + assert output_inplace._version > input_version + assert output_inplace is input + + assert_equal(output_inplace, output_out_of_place) + + def test_kernel_video(self): + check_kernel(F.normalize_video, make_video(dtype=torch.float32), mean=self.MEAN, std=self.STD) + + @pytest.mark.parametrize("make_input", [make_image_tensor, make_image, make_video]) + def test_functional(self, make_input): + check_functional(F.normalize, make_input(dtype=torch.float32), mean=self.MEAN, std=self.STD) + + @pytest.mark.parametrize( + ("kernel", "input_type"), + [ + (F.normalize_image, torch.Tensor), + (F.normalize_image, tv_tensors.Image), + (F.normalize_video, tv_tensors.Video), + ], + ) + def test_functional_signature(self, kernel, input_type): + check_functional_kernel_signature_match(F.normalize, kernel=kernel, input_type=input_type) + + def test_functional_error(self): + with pytest.raises(TypeError, match="should be a float tensor"): + F.normalize_image(make_image(dtype=torch.uint8), mean=self.MEAN, std=self.STD) + + with pytest.raises(ValueError, match="tensor image of size"): + F.normalize_image(torch.rand(16, 16, dtype=torch.float32), mean=self.MEAN, std=self.STD) + + for std in [0, [0, 0, 0], [0, 1, 1]]: + with pytest.raises(ValueError, match="std evaluated to zero, leading to division by zero"): + F.normalize_image(make_image(dtype=torch.float32), mean=self.MEAN, std=std) + + def _sample_input_adapter(self, transform, input, device): + adapted_input = {} + for key, value in input.items(): + if isinstance(value, PIL.Image.Image): + # normalize doesn't support PIL images + continue + elif check_type(value, (is_pure_tensor, tv_tensors.Image, tv_tensors.Video)): + # normalize doesn't support integer images + value = F.to_dtype(value, torch.float32, scale=True) + adapted_input[key] = value + return adapted_input + + @pytest.mark.parametrize("make_input", [make_image_tensor, make_image, make_video]) + def test_transform(self, make_input): + check_transform( + transforms.Normalize(mean=self.MEAN, std=self.STD), + make_input(dtype=torch.float32), + check_sample_input=self._sample_input_adapter, + ) + + def _reference_normalize_image(self, image, *, mean, std): + image = image.numpy() + mean, std = [np.array(stat, dtype=image.dtype).reshape((-1, 1, 1)) for stat in [mean, std]] + return tv_tensors.Image((image - mean) / std) + + @pytest.mark.parametrize(("mean", "std"), MEANS_STDS) + @pytest.mark.parametrize("dtype", [torch.float16, torch.float32, torch.float64]) + @pytest.mark.parametrize("fn", [F.normalize, transform_cls_to_functional(transforms.Normalize)]) + def test_correctness_image(self, mean, std, dtype, fn): + image = make_image(dtype=dtype) + + actual = fn(image, mean=mean, std=std) + expected = self._reference_normalize_image(image, mean=mean, std=std) + + assert_equal(actual, expected) + + +class TestClampBoundingBoxes: + @pytest.mark.parametrize("format", list(tv_tensors.BoundingBoxFormat)) + @pytest.mark.parametrize("dtype", [torch.int64, torch.float32]) + @pytest.mark.parametrize("device", cpu_and_cuda()) + def test_kernel(self, format, dtype, device): + bounding_boxes = make_bounding_boxes(format=format, dtype=dtype, device=device) + check_kernel( + F.clamp_bounding_boxes, + bounding_boxes, + format=bounding_boxes.format, + canvas_size=bounding_boxes.canvas_size, + ) + + @pytest.mark.parametrize("format", list(tv_tensors.BoundingBoxFormat)) + def test_functional(self, format): + check_functional(F.clamp_bounding_boxes, make_bounding_boxes(format=format)) + + def test_errors(self): + input_tv_tensor = make_bounding_boxes() + input_pure_tensor = input_tv_tensor.as_subclass(torch.Tensor) + format, canvas_size = input_tv_tensor.format, input_tv_tensor.canvas_size + + for format_, canvas_size_ in [(None, None), (format, None), (None, canvas_size)]: + with pytest.raises( + ValueError, match="For pure tensor inputs, `format` and `canvas_size` have to be passed." + ): + F.clamp_bounding_boxes(input_pure_tensor, format=format_, canvas_size=canvas_size_) + + for format_, canvas_size_ in [(format, canvas_size), (format, None), (None, canvas_size)]: + with pytest.raises( + ValueError, match="For bounding box tv_tensor inputs, `format` and `canvas_size` must not be passed." + ): + F.clamp_bounding_boxes(input_tv_tensor, format=format_, canvas_size=canvas_size_) + + def test_transform(self): + check_transform(transforms.ClampBoundingBoxes(), make_bounding_boxes()) + + +class TestInvert: + @pytest.mark.parametrize("dtype", [torch.uint8, torch.int16, torch.float32]) + @pytest.mark.parametrize("device", cpu_and_cuda()) + def test_kernel_image(self, dtype, device): + check_kernel(F.invert_image, make_image(dtype=dtype, device=device)) + + def test_kernel_video(self): + check_kernel(F.invert_video, make_video()) + + @pytest.mark.parametrize("make_input", [make_image_tensor, make_image, make_image_pil, make_video]) + def test_functional(self, make_input): + check_functional(F.invert, make_input()) + + @pytest.mark.parametrize( + ("kernel", "input_type"), + [ + (F.invert_image, torch.Tensor), + (F._color._invert_image_pil, PIL.Image.Image), + (F.invert_image, tv_tensors.Image), + (F.invert_video, tv_tensors.Video), + ], + ) + def test_functional_signature(self, kernel, input_type): + check_functional_kernel_signature_match(F.invert, kernel=kernel, input_type=input_type) + + @pytest.mark.parametrize("make_input", [make_image_tensor, make_image_pil, make_image, make_video]) + def test_transform(self, make_input): + check_transform(transforms.RandomInvert(p=1), make_input()) + + @pytest.mark.parametrize("fn", [F.invert, transform_cls_to_functional(transforms.RandomInvert, p=1)]) + def test_correctness_image(self, fn): + image = make_image(dtype=torch.uint8, device="cpu") + + actual = fn(image) + expected = F.to_image(F.invert(F.to_pil_image(image))) + + assert_equal(actual, expected) + + +class TestPosterize: + @pytest.mark.parametrize("dtype", [torch.uint8, torch.float32]) + @pytest.mark.parametrize("device", cpu_and_cuda()) + def test_kernel_image(self, dtype, device): + check_kernel(F.posterize_image, make_image(dtype=dtype, device=device), bits=1) + + def test_kernel_video(self): + check_kernel(F.posterize_video, make_video(), bits=1) + + @pytest.mark.parametrize("make_input", [make_image_tensor, make_image, make_image_pil, make_video]) + def test_functional(self, make_input): + check_functional(F.posterize, make_input(), bits=1) + + @pytest.mark.parametrize( + ("kernel", "input_type"), + [ + (F.posterize_image, torch.Tensor), + (F._color._posterize_image_pil, PIL.Image.Image), + (F.posterize_image, tv_tensors.Image), + (F.posterize_video, tv_tensors.Video), + ], + ) + def test_functional_signature(self, kernel, input_type): + check_functional_kernel_signature_match(F.posterize, kernel=kernel, input_type=input_type) + + @pytest.mark.parametrize("make_input", [make_image_tensor, make_image_pil, make_image, make_video]) + def test_transform(self, make_input): + check_transform(transforms.RandomPosterize(bits=1, p=1), make_input()) + + @pytest.mark.parametrize("bits", [1, 4, 8]) + @pytest.mark.parametrize("fn", [F.posterize, transform_cls_to_functional(transforms.RandomPosterize, p=1)]) + def test_correctness_image(self, bits, fn): + image = make_image(dtype=torch.uint8, device="cpu") + + actual = fn(image, bits=bits) + expected = F.to_image(F.posterize(F.to_pil_image(image), bits=bits)) + + assert_equal(actual, expected) + + +class TestSolarize: + def _make_threshold(self, input, *, factor=0.5): + dtype = input.dtype if isinstance(input, torch.Tensor) else torch.uint8 + return (float if dtype.is_floating_point else int)(get_max_value(dtype) * factor) + + @pytest.mark.parametrize("dtype", [torch.uint8, torch.float32]) + @pytest.mark.parametrize("device", cpu_and_cuda()) + def test_kernel_image(self, dtype, device): + image = make_image(dtype=dtype, device=device) + check_kernel(F.solarize_image, image, threshold=self._make_threshold(image)) + + def test_kernel_video(self): + video = make_video() + check_kernel(F.solarize_video, video, threshold=self._make_threshold(video)) + + @pytest.mark.parametrize("make_input", [make_image_tensor, make_image, make_image_pil, make_video]) + def test_functional(self, make_input): + input = make_input() + check_functional(F.solarize, input, threshold=self._make_threshold(input)) + + @pytest.mark.parametrize( + ("kernel", "input_type"), + [ + (F.solarize_image, torch.Tensor), + (F._color._solarize_image_pil, PIL.Image.Image), + (F.solarize_image, tv_tensors.Image), + (F.solarize_video, tv_tensors.Video), + ], + ) + def test_functional_signature(self, kernel, input_type): + check_functional_kernel_signature_match(F.solarize, kernel=kernel, input_type=input_type) + + @pytest.mark.parametrize(("dtype", "threshold"), [(torch.uint8, 256), (torch.float, 1.5)]) + def test_functional_error(self, dtype, threshold): + with pytest.raises(TypeError, match="Threshold should be less or equal the maximum value of the dtype"): + F.solarize(make_image(dtype=dtype), threshold=threshold) + + @pytest.mark.parametrize("make_input", [make_image_tensor, make_image_pil, make_image, make_video]) + def test_transform(self, make_input): + input = make_input() + check_transform(transforms.RandomSolarize(threshold=self._make_threshold(input), p=1), input) + + @pytest.mark.parametrize("threshold_factor", [0.0, 0.1, 0.5, 0.9, 1.0]) + @pytest.mark.parametrize("fn", [F.solarize, transform_cls_to_functional(transforms.RandomSolarize, p=1)]) + def test_correctness_image(self, threshold_factor, fn): + image = make_image(dtype=torch.uint8, device="cpu") + threshold = self._make_threshold(image, factor=threshold_factor) + + actual = fn(image, threshold=threshold) + expected = F.to_image(F.solarize(F.to_pil_image(image), threshold=threshold)) + + assert_equal(actual, expected) + + +class TestAutocontrast: + @pytest.mark.parametrize("dtype", [torch.uint8, torch.int16, torch.float32]) + @pytest.mark.parametrize("device", cpu_and_cuda()) + def test_kernel_image(self, dtype, device): + check_kernel(F.autocontrast_image, make_image(dtype=dtype, device=device)) + + def test_kernel_video(self): + check_kernel(F.autocontrast_video, make_video()) + + @pytest.mark.parametrize("make_input", [make_image_tensor, make_image, make_image_pil, make_video]) + def test_functional(self, make_input): + check_functional(F.autocontrast, make_input()) + + @pytest.mark.parametrize( + ("kernel", "input_type"), + [ + (F.autocontrast_image, torch.Tensor), + (F._color._autocontrast_image_pil, PIL.Image.Image), + (F.autocontrast_image, tv_tensors.Image), + (F.autocontrast_video, tv_tensors.Video), + ], + ) + def test_functional_signature(self, kernel, input_type): + check_functional_kernel_signature_match(F.autocontrast, kernel=kernel, input_type=input_type) + + @pytest.mark.parametrize("make_input", [make_image_tensor, make_image_pil, make_image, make_video]) + def test_transform(self, make_input): + check_transform(transforms.RandomAutocontrast(p=1), make_input(), check_v1_compatibility=dict(rtol=0, atol=1)) + + @pytest.mark.parametrize("fn", [F.autocontrast, transform_cls_to_functional(transforms.RandomAutocontrast, p=1)]) + def test_correctness_image(self, fn): + image = make_image(dtype=torch.uint8, device="cpu") + + actual = fn(image) + expected = F.to_image(F.autocontrast(F.to_pil_image(image))) + + assert_close(actual, expected, rtol=0, atol=1) + + +class TestAdjustSharpness: + @pytest.mark.parametrize("dtype", [torch.uint8, torch.float32]) + @pytest.mark.parametrize("device", cpu_and_cuda()) + def test_kernel_image(self, dtype, device): + check_kernel(F.adjust_sharpness_image, make_image(dtype=dtype, device=device), sharpness_factor=0.5) + + def test_kernel_video(self): + check_kernel(F.adjust_sharpness_video, make_video(), sharpness_factor=0.5) + + @pytest.mark.parametrize("make_input", [make_image_tensor, make_image, make_image_pil, make_video]) + def test_functional(self, make_input): + check_functional(F.adjust_sharpness, make_input(), sharpness_factor=0.5) + + @pytest.mark.parametrize( + ("kernel", "input_type"), + [ + (F.adjust_sharpness_image, torch.Tensor), + (F._color._adjust_sharpness_image_pil, PIL.Image.Image), + (F.adjust_sharpness_image, tv_tensors.Image), + (F.adjust_sharpness_video, tv_tensors.Video), + ], + ) + def test_functional_signature(self, kernel, input_type): + check_functional_kernel_signature_match(F.adjust_sharpness, kernel=kernel, input_type=input_type) + + @pytest.mark.parametrize("make_input", [make_image_tensor, make_image_pil, make_image, make_video]) + def test_transform(self, make_input): + check_transform(transforms.RandomAdjustSharpness(sharpness_factor=0.5, p=1), make_input()) + + def test_functional_error(self): + with pytest.raises(TypeError, match="can have 1 or 3 channels"): + F.adjust_sharpness(make_image(color_space="RGBA"), sharpness_factor=0.5) + + with pytest.raises(ValueError, match="is not non-negative"): + F.adjust_sharpness(make_image(), sharpness_factor=-1) + + @pytest.mark.parametrize("sharpness_factor", [0.1, 0.5, 1.0]) + @pytest.mark.parametrize( + "fn", [F.adjust_sharpness, transform_cls_to_functional(transforms.RandomAdjustSharpness, p=1)] + ) + def test_correctness_image(self, sharpness_factor, fn): + image = make_image(dtype=torch.uint8, device="cpu") + + actual = fn(image, sharpness_factor=sharpness_factor) + expected = F.to_image(F.adjust_sharpness(F.to_pil_image(image), sharpness_factor=sharpness_factor)) + + assert_equal(actual, expected) + + +class TestAdjustContrast: + @pytest.mark.parametrize("dtype", [torch.uint8, torch.float32]) + @pytest.mark.parametrize("device", cpu_and_cuda()) + def test_kernel_image(self, dtype, device): + check_kernel(F.adjust_contrast_image, make_image(dtype=dtype, device=device), contrast_factor=0.5) + + def test_kernel_video(self): + check_kernel(F.adjust_contrast_video, make_video(), contrast_factor=0.5) + + @pytest.mark.parametrize("make_input", [make_image_tensor, make_image, make_image_pil, make_video]) + def test_functional(self, make_input): + check_functional(F.adjust_contrast, make_input(), contrast_factor=0.5) + + @pytest.mark.parametrize( + ("kernel", "input_type"), + [ + (F.adjust_contrast_image, torch.Tensor), + (F._color._adjust_contrast_image_pil, PIL.Image.Image), + (F.adjust_contrast_image, tv_tensors.Image), + (F.adjust_contrast_video, tv_tensors.Video), + ], + ) + def test_functional_signature(self, kernel, input_type): + check_functional_kernel_signature_match(F.adjust_contrast, kernel=kernel, input_type=input_type) + + def test_functional_error(self): + with pytest.raises(TypeError, match="permitted channel values are 1 or 3"): + F.adjust_contrast(make_image(color_space="RGBA"), contrast_factor=0.5) + + with pytest.raises(ValueError, match="is not non-negative"): + F.adjust_contrast(make_image(), contrast_factor=-1) + + @pytest.mark.parametrize("contrast_factor", [0.1, 0.5, 1.0]) + def test_correctness_image(self, contrast_factor): + image = make_image(dtype=torch.uint8, device="cpu") + + actual = F.adjust_contrast(image, contrast_factor=contrast_factor) + expected = F.to_image(F.adjust_contrast(F.to_pil_image(image), contrast_factor=contrast_factor)) + + assert_close(actual, expected, rtol=0, atol=1) + + +class TestAdjustGamma: + @pytest.mark.parametrize("dtype", [torch.uint8, torch.float32]) + @pytest.mark.parametrize("device", cpu_and_cuda()) + def test_kernel_image(self, dtype, device): + check_kernel(F.adjust_gamma_image, make_image(dtype=dtype, device=device), gamma=0.5) + + def test_kernel_video(self): + check_kernel(F.adjust_gamma_video, make_video(), gamma=0.5) + + @pytest.mark.parametrize("make_input", [make_image_tensor, make_image, make_image_pil, make_video]) + def test_functional(self, make_input): + check_functional(F.adjust_gamma, make_input(), gamma=0.5) + + @pytest.mark.parametrize( + ("kernel", "input_type"), + [ + (F.adjust_gamma_image, torch.Tensor), + (F._color._adjust_gamma_image_pil, PIL.Image.Image), + (F.adjust_gamma_image, tv_tensors.Image), + (F.adjust_gamma_video, tv_tensors.Video), + ], + ) + def test_functional_signature(self, kernel, input_type): + check_functional_kernel_signature_match(F.adjust_gamma, kernel=kernel, input_type=input_type) + + def test_functional_error(self): + with pytest.raises(ValueError, match="Gamma should be a non-negative real number"): + F.adjust_gamma(make_image(), gamma=-1) + + @pytest.mark.parametrize("gamma", [0.1, 0.5, 1.0]) + @pytest.mark.parametrize("gain", [0.1, 1.0, 2.0]) + def test_correctness_image(self, gamma, gain): + image = make_image(dtype=torch.uint8, device="cpu") + + actual = F.adjust_gamma(image, gamma=gamma, gain=gain) + expected = F.to_image(F.adjust_gamma(F.to_pil_image(image), gamma=gamma, gain=gain)) + + assert_equal(actual, expected) + + +class TestAdjustHue: + @pytest.mark.parametrize("dtype", [torch.uint8, torch.float32]) + @pytest.mark.parametrize("device", cpu_and_cuda()) + def test_kernel_image(self, dtype, device): + check_kernel(F.adjust_hue_image, make_image(dtype=dtype, device=device), hue_factor=0.25) + + def test_kernel_video(self): + check_kernel(F.adjust_hue_video, make_video(), hue_factor=0.25) + + @pytest.mark.parametrize("make_input", [make_image_tensor, make_image, make_image_pil, make_video]) + def test_functional(self, make_input): + check_functional(F.adjust_hue, make_input(), hue_factor=0.25) + + @pytest.mark.parametrize( + ("kernel", "input_type"), + [ + (F.adjust_hue_image, torch.Tensor), + (F._color._adjust_hue_image_pil, PIL.Image.Image), + (F.adjust_hue_image, tv_tensors.Image), + (F.adjust_hue_video, tv_tensors.Video), + ], + ) + def test_functional_signature(self, kernel, input_type): + check_functional_kernel_signature_match(F.adjust_hue, kernel=kernel, input_type=input_type) + + def test_functional_error(self): + with pytest.raises(TypeError, match="permitted channel values are 1 or 3"): + F.adjust_hue(make_image(color_space="RGBA"), hue_factor=0.25) + + for hue_factor in [-1, 1]: + with pytest.raises(ValueError, match=re.escape("is not in [-0.5, 0.5]")): + F.adjust_hue(make_image(), hue_factor=hue_factor) + + @pytest.mark.parametrize("hue_factor", [-0.5, -0.3, 0.0, 0.2, 0.5]) + def test_correctness_image(self, hue_factor): + image = make_image(dtype=torch.uint8, device="cpu") + + actual = F.adjust_hue(image, hue_factor=hue_factor) + expected = F.to_image(F.adjust_hue(F.to_pil_image(image), hue_factor=hue_factor)) + + mae = (actual.float() - expected.float()).abs().mean() + assert mae < 2 + + +class TestAdjustSaturation: + @pytest.mark.parametrize("dtype", [torch.uint8, torch.float32]) + @pytest.mark.parametrize("device", cpu_and_cuda()) + def test_kernel_image(self, dtype, device): + check_kernel(F.adjust_saturation_image, make_image(dtype=dtype, device=device), saturation_factor=0.5) + + def test_kernel_video(self): + check_kernel(F.adjust_saturation_video, make_video(), saturation_factor=0.5) + + @pytest.mark.parametrize("make_input", [make_image_tensor, make_image, make_image_pil, make_video]) + def test_functional(self, make_input): + check_functional(F.adjust_saturation, make_input(), saturation_factor=0.5) + + @pytest.mark.parametrize( + ("kernel", "input_type"), + [ + (F.adjust_saturation_image, torch.Tensor), + (F._color._adjust_saturation_image_pil, PIL.Image.Image), + (F.adjust_saturation_image, tv_tensors.Image), + (F.adjust_saturation_video, tv_tensors.Video), + ], + ) + def test_functional_signature(self, kernel, input_type): + check_functional_kernel_signature_match(F.adjust_saturation, kernel=kernel, input_type=input_type) + + def test_functional_error(self): + with pytest.raises(TypeError, match="permitted channel values are 1 or 3"): + F.adjust_saturation(make_image(color_space="RGBA"), saturation_factor=0.5) + + with pytest.raises(ValueError, match="is not non-negative"): + F.adjust_saturation(make_image(), saturation_factor=-1) + + @pytest.mark.parametrize("saturation_factor", [0.1, 0.5, 1.0]) + def test_correctness_image(self, saturation_factor): + image = make_image(dtype=torch.uint8, device="cpu") + + actual = F.adjust_saturation(image, saturation_factor=saturation_factor) + expected = F.to_image(F.adjust_saturation(F.to_pil_image(image), saturation_factor=saturation_factor)) + + assert_close(actual, expected, rtol=0, atol=1) + + +class TestFiveTenCrop: + INPUT_SIZE = (17, 11) + OUTPUT_SIZE = (3, 5) + + @pytest.mark.parametrize("dtype", [torch.uint8, torch.float32]) + @pytest.mark.parametrize("device", cpu_and_cuda()) + @pytest.mark.parametrize("kernel", [F.five_crop_image, F.ten_crop_image]) + def test_kernel_image(self, dtype, device, kernel): + check_kernel( + kernel, + make_image(self.INPUT_SIZE, dtype=dtype, device=device), + size=self.OUTPUT_SIZE, + check_batched_vs_unbatched=False, + ) + + @pytest.mark.parametrize("kernel", [F.five_crop_video, F.ten_crop_video]) + def test_kernel_video(self, kernel): + check_kernel(kernel, make_video(self.INPUT_SIZE), size=self.OUTPUT_SIZE, check_batched_vs_unbatched=False) + + def _functional_wrapper(self, fn): + # This wrapper is needed to make five_crop / ten_crop compatible with check_functional, since that requires a + # single output rather than a sequence. + @functools.wraps(fn) + def wrapper(*args, **kwargs): + outputs = fn(*args, **kwargs) + return outputs[0] + + return wrapper + + @pytest.mark.parametrize( + "make_input", + [make_image_tensor, make_image_pil, make_image, make_video], + ) + @pytest.mark.parametrize("functional", [F.five_crop, F.ten_crop]) + def test_functional(self, make_input, functional): + check_functional( + self._functional_wrapper(functional), + make_input(self.INPUT_SIZE), + size=self.OUTPUT_SIZE, + check_scripted_smoke=False, + ) + + @pytest.mark.parametrize( + ("functional", "kernel", "input_type"), + [ + (F.five_crop, F.five_crop_image, torch.Tensor), + (F.five_crop, F._geometry._five_crop_image_pil, PIL.Image.Image), + (F.five_crop, F.five_crop_image, tv_tensors.Image), + (F.five_crop, F.five_crop_video, tv_tensors.Video), + (F.ten_crop, F.ten_crop_image, torch.Tensor), + (F.ten_crop, F._geometry._ten_crop_image_pil, PIL.Image.Image), + (F.ten_crop, F.ten_crop_image, tv_tensors.Image), + (F.ten_crop, F.ten_crop_video, tv_tensors.Video), + ], + ) + def test_functional_signature(self, functional, kernel, input_type): + check_functional_kernel_signature_match(functional, kernel=kernel, input_type=input_type) + + class _TransformWrapper(nn.Module): + # This wrapper is needed to make FiveCrop / TenCrop compatible with check_transform, since that requires a + # single output rather than a sequence. + _v1_transform_cls = None + + def _extract_params_for_v1_transform(self): + return dict(five_ten_crop_transform=self.five_ten_crop_transform) + + def __init__(self, five_ten_crop_transform): + super().__init__() + type(self)._v1_transform_cls = type(self) + self.five_ten_crop_transform = five_ten_crop_transform + + def forward(self, input: torch.Tensor) -> torch.Tensor: + outputs = self.five_ten_crop_transform(input) + return outputs[0] + + @pytest.mark.parametrize( + "make_input", + [make_image_tensor, make_image_pil, make_image, make_video], + ) + @pytest.mark.parametrize("transform_cls", [transforms.FiveCrop, transforms.TenCrop]) + def test_transform(self, make_input, transform_cls): + check_transform( + self._TransformWrapper(transform_cls(size=self.OUTPUT_SIZE)), + make_input(self.INPUT_SIZE), + check_sample_input=False, + ) + + @pytest.mark.parametrize("make_input", [make_bounding_boxes, make_detection_masks]) + @pytest.mark.parametrize("transform_cls", [transforms.FiveCrop, transforms.TenCrop]) + def test_transform_error(self, make_input, transform_cls): + transform = transform_cls(size=self.OUTPUT_SIZE) + + with pytest.raises(TypeError, match="not supported"): + transform(make_input(self.INPUT_SIZE)) + + @pytest.mark.parametrize("fn", [F.five_crop, transform_cls_to_functional(transforms.FiveCrop)]) + def test_correctness_image_five_crop(self, fn): + image = make_image(self.INPUT_SIZE, dtype=torch.uint8, device="cpu") + + actual = fn(image, size=self.OUTPUT_SIZE) + expected = F.five_crop(F.to_pil_image(image), size=self.OUTPUT_SIZE) + + assert isinstance(actual, tuple) + assert_equal(actual, [F.to_image(e) for e in expected]) + + @pytest.mark.parametrize("fn_or_class", [F.ten_crop, transforms.TenCrop]) + @pytest.mark.parametrize("vertical_flip", [False, True]) + def test_correctness_image_ten_crop(self, fn_or_class, vertical_flip): + if fn_or_class is transforms.TenCrop: + fn = transform_cls_to_functional(fn_or_class, size=self.OUTPUT_SIZE, vertical_flip=vertical_flip) + kwargs = dict() + else: + fn = fn_or_class + kwargs = dict(size=self.OUTPUT_SIZE, vertical_flip=vertical_flip) + + image = make_image(self.INPUT_SIZE, dtype=torch.uint8, device="cpu") + + actual = fn(image, **kwargs) + expected = F.ten_crop(F.to_pil_image(image), size=self.OUTPUT_SIZE, vertical_flip=vertical_flip) + + assert isinstance(actual, tuple) + assert_equal(actual, [F.to_image(e) for e in expected]) + + +class TestColorJitter: + @pytest.mark.parametrize( + "make_input", + [make_image_tensor, make_image_pil, make_image, make_video], + ) + @pytest.mark.parametrize("dtype", [torch.uint8, torch.float32]) + @pytest.mark.parametrize("device", cpu_and_cuda()) + def test_transform(self, make_input, dtype, device): + if make_input is make_image_pil and not (dtype is torch.uint8 and device == "cpu"): + pytest.skip( + "PIL image tests with parametrization other than dtype=torch.uint8 and device='cpu' " + "will degenerate to that anyway." + ) + + check_transform( + transforms.ColorJitter(brightness=0.5, contrast=0.5, saturation=0.5, hue=0.25), + make_input(dtype=dtype, device=device), + ) + + def test_transform_noop(self): + input = make_image() + input_version = input._version + + transform = transforms.ColorJitter() + output = transform(input) + + assert output is input + assert output.data_ptr() == input.data_ptr() + assert output._version == input_version + + def test_transform_error(self): + with pytest.raises(ValueError, match="must be non negative"): + transforms.ColorJitter(brightness=-1) + + for brightness in [object(), [1, 2, 3]]: + with pytest.raises(TypeError, match="single number or a sequence with length 2"): + transforms.ColorJitter(brightness=brightness) + + with pytest.raises(ValueError, match="values should be between"): + transforms.ColorJitter(brightness=(-1, 0.5)) + + with pytest.raises(ValueError, match="values should be between"): + transforms.ColorJitter(hue=1) + + @pytest.mark.parametrize("brightness", [None, 0.1, (0.2, 0.3)]) + @pytest.mark.parametrize("contrast", [None, 0.4, (0.5, 0.6)]) + @pytest.mark.parametrize("saturation", [None, 0.7, (0.8, 0.9)]) + @pytest.mark.parametrize("hue", [None, 0.3, (-0.1, 0.2)]) + def test_transform_correctness(self, brightness, contrast, saturation, hue): + image = make_image(dtype=torch.uint8, device="cpu") + + transform = transforms.ColorJitter(brightness=brightness, contrast=contrast, saturation=saturation, hue=hue) + + with freeze_rng_state(): + torch.manual_seed(0) + actual = transform(image) + + torch.manual_seed(0) + expected = F.to_image(transform(F.to_pil_image(image))) + + mae = (actual.float() - expected.float()).abs().mean() + assert mae < 2 + + +class TestRgbToGrayscale: + @pytest.mark.parametrize("dtype", [torch.uint8, torch.float32]) + @pytest.mark.parametrize("device", cpu_and_cuda()) + def test_kernel_image(self, dtype, device): + check_kernel(F.rgb_to_grayscale_image, make_image(dtype=dtype, device=device)) + + @pytest.mark.parametrize("make_input", [make_image_tensor, make_image_pil, make_image]) + def test_functional(self, make_input): + check_functional(F.rgb_to_grayscale, make_input()) + + @pytest.mark.parametrize( + ("kernel", "input_type"), + [ + (F.rgb_to_grayscale_image, torch.Tensor), + (F._color._rgb_to_grayscale_image_pil, PIL.Image.Image), + (F.rgb_to_grayscale_image, tv_tensors.Image), + ], + ) + def test_functional_signature(self, kernel, input_type): + check_functional_kernel_signature_match(F.rgb_to_grayscale, kernel=kernel, input_type=input_type) + + @pytest.mark.parametrize("transform", [transforms.Grayscale(), transforms.RandomGrayscale(p=1)]) + @pytest.mark.parametrize("make_input", [make_image_tensor, make_image_pil, make_image]) + def test_transform(self, transform, make_input): + check_transform(transform, make_input()) + + @pytest.mark.parametrize("num_output_channels", [1, 3]) + @pytest.mark.parametrize("color_space", ["RGB", "GRAY"]) + @pytest.mark.parametrize("fn", [F.rgb_to_grayscale, transform_cls_to_functional(transforms.Grayscale)]) + def test_image_correctness(self, num_output_channels, color_space, fn): + image = make_image(dtype=torch.uint8, device="cpu", color_space=color_space) + + actual = fn(image, num_output_channels=num_output_channels) + expected = F.to_image(F.rgb_to_grayscale(F.to_pil_image(image), num_output_channels=num_output_channels)) + + assert_equal(actual, expected, rtol=0, atol=1) + + def test_expanded_channels_are_not_views_into_the_same_underlying_tensor(self): + image = make_image(dtype=torch.uint8, device="cpu", color_space="GRAY") + + output_image = F.rgb_to_grayscale(image, num_output_channels=3) + assert_equal(output_image[0][0][0], output_image[1][0][0]) + output_image[0][0][0] = output_image[0][0][0] + 1 + assert output_image[0][0][0] != output_image[1][0][0] + + @pytest.mark.parametrize("num_input_channels", [1, 3]) + def test_random_transform_correctness(self, num_input_channels): + image = make_image( + color_space={ + 1: "GRAY", + 3: "RGB", + }[num_input_channels], + dtype=torch.uint8, + device="cpu", + ) + + transform = transforms.RandomGrayscale(p=1) + + actual = transform(image) + expected = F.to_image(F.rgb_to_grayscale(F.to_pil_image(image), num_output_channels=num_input_channels)) + + assert_equal(actual, expected, rtol=0, atol=1) + + +class TestGrayscaleToRgb: + @pytest.mark.parametrize("dtype", [torch.uint8, torch.float32]) + @pytest.mark.parametrize("device", cpu_and_cuda()) + def test_kernel_image(self, dtype, device): + check_kernel(F.grayscale_to_rgb_image, make_image(dtype=dtype, device=device)) + + @pytest.mark.parametrize("make_input", [make_image_tensor, make_image_pil, make_image]) + def test_functional(self, make_input): + check_functional(F.grayscale_to_rgb, make_input()) + + @pytest.mark.parametrize( + ("kernel", "input_type"), + [ + (F.rgb_to_grayscale_image, torch.Tensor), + (F._color._rgb_to_grayscale_image_pil, PIL.Image.Image), + (F.rgb_to_grayscale_image, tv_tensors.Image), + ], + ) + def test_functional_signature(self, kernel, input_type): + check_functional_kernel_signature_match(F.grayscale_to_rgb, kernel=kernel, input_type=input_type) + + @pytest.mark.parametrize("make_input", [make_image_tensor, make_image_pil, make_image]) + def test_transform(self, make_input): + check_transform(transforms.RGB(), make_input(color_space="GRAY")) + + @pytest.mark.parametrize("fn", [F.grayscale_to_rgb, transform_cls_to_functional(transforms.RGB)]) + def test_image_correctness(self, fn): + image = make_image(dtype=torch.uint8, device="cpu", color_space="GRAY") + + actual = fn(image) + expected = F.to_image(F.grayscale_to_rgb(F.to_pil_image(image))) + + assert_equal(actual, expected, rtol=0, atol=1) + + def test_expanded_channels_are_not_views_into_the_same_underlying_tensor(self): + image = make_image(dtype=torch.uint8, device="cpu", color_space="GRAY") + + output_image = F.grayscale_to_rgb(image) + assert_equal(output_image[0][0][0], output_image[1][0][0]) + output_image[0][0][0] = output_image[0][0][0] + 1 + assert output_image[0][0][0] != output_image[1][0][0] + + def test_rgb_image_is_unchanged(self): + image = make_image(dtype=torch.uint8, device="cpu", color_space="RGB") + assert_equal(image.shape[-3], 3) + assert_equal(F.grayscale_to_rgb(image), image) + + +class TestRandomZoomOut: + # Tests are light because this largely relies on the already tested `pad` kernels. + + @pytest.mark.parametrize( + "make_input", + [ + make_image_tensor, + make_image_pil, + make_image, + make_bounding_boxes, + make_segmentation_mask, + make_detection_masks, + make_video, + ], + ) + def test_transform(self, make_input): + check_transform(transforms.RandomZoomOut(p=1), make_input()) + + def test_transform_error(self): + for side_range in [None, 1, [1, 2, 3]]: + with pytest.raises( + ValueError if isinstance(side_range, list) else TypeError, match="should be a sequence of length 2" + ): + transforms.RandomZoomOut(side_range=side_range) + + for side_range in [[0.5, 1.5], [2.0, 1.0]]: + with pytest.raises(ValueError, match="Invalid side range"): + transforms.RandomZoomOut(side_range=side_range) + + @pytest.mark.parametrize("side_range", [(1.0, 4.0), [2.0, 5.0]]) + @pytest.mark.parametrize( + "make_input", + [ + make_image_tensor, + make_image_pil, + make_image, + make_bounding_boxes, + make_segmentation_mask, + make_detection_masks, + make_video, + ], + ) + @pytest.mark.parametrize("device", cpu_and_cuda()) + def test_transform_params_correctness(self, side_range, make_input, device): + if make_input is make_image_pil and device != "cpu": + pytest.skip("PIL image tests with parametrization device!='cpu' will degenerate to that anyway.") + + transform = transforms.RandomZoomOut(side_range=side_range) + + input = make_input() + height, width = F.get_size(input) + + params = transform.make_params([input]) + assert "padding" in params + + padding = params["padding"] + assert len(padding) == 4 + + assert 0 <= padding[0] <= (side_range[1] - 1) * width + assert 0 <= padding[1] <= (side_range[1] - 1) * height + assert 0 <= padding[2] <= (side_range[1] - 1) * width + assert 0 <= padding[3] <= (side_range[1] - 1) * height + + +class TestRandomPhotometricDistort: + # Tests are light because this largely relies on the already tested + # `adjust_{brightness,contrast,saturation,hue}` and `permute_channels` kernels. + + @pytest.mark.parametrize( + "make_input", + [make_image_tensor, make_image_pil, make_image, make_video], + ) + @pytest.mark.parametrize("dtype", [torch.uint8, torch.float32]) + @pytest.mark.parametrize("device", cpu_and_cuda()) + def test_transform(self, make_input, dtype, device): + if make_input is make_image_pil and not (dtype is torch.uint8 and device == "cpu"): + pytest.skip( + "PIL image tests with parametrization other than dtype=torch.uint8 and device='cpu' " + "will degenerate to that anyway." + ) + + check_transform( + transforms.RandomPhotometricDistort( + brightness=(0.3, 0.4), contrast=(0.5, 0.6), saturation=(0.7, 0.8), hue=(-0.1, 0.2), p=1 + ), + make_input(dtype=dtype, device=device), + ) + + +class TestScaleJitter: + # Tests are light because this largely relies on the already tested `resize` kernels. + + INPUT_SIZE = (17, 11) + TARGET_SIZE = (12, 13) + + @pytest.mark.parametrize( + "make_input", + [make_image_tensor, make_image_pil, make_image, make_bounding_boxes, make_segmentation_mask, make_video], + ) + @pytest.mark.parametrize("device", cpu_and_cuda()) + def test_transform(self, make_input, device): + if make_input is make_image_pil and device != "cpu": + pytest.skip("PIL image tests with parametrization device!='cpu' will degenerate to that anyway.") + + check_transform(transforms.ScaleJitter(self.TARGET_SIZE), make_input(self.INPUT_SIZE, device=device)) + + def test_make_params(self): + input_size = self.INPUT_SIZE + target_size = self.TARGET_SIZE + scale_range = (0.5, 1.5) + + transform = transforms.ScaleJitter(target_size=target_size, scale_range=scale_range) + params = transform.make_params([make_image(input_size)]) + + assert "size" in params + size = params["size"] + + assert isinstance(size, tuple) and len(size) == 2 + height, width = size + + r_min = min(target_size[1] / input_size[0], target_size[0] / input_size[1]) * scale_range[0] + r_max = min(target_size[1] / input_size[0], target_size[0] / input_size[1]) * scale_range[1] + + assert int(input_size[0] * r_min) <= height <= int(input_size[0] * r_max) + assert int(input_size[1] * r_min) <= width <= int(input_size[1] * r_max) + + +class TestLinearTransform: + def _make_matrix_and_vector(self, input, *, device=None): + device = device or input.device + numel = math.prod(F.get_dimensions(input)) + transformation_matrix = torch.randn((numel, numel), device=device) + mean_vector = torch.randn((numel,), device=device) + return transformation_matrix, mean_vector + + def _sample_input_adapter(self, transform, input, device): + return {key: value for key, value in input.items() if not isinstance(value, PIL.Image.Image)} + + @pytest.mark.parametrize("make_input", [make_image_tensor, make_image, make_video]) + @pytest.mark.parametrize("dtype", [torch.uint8, torch.float32]) + @pytest.mark.parametrize("device", cpu_and_cuda()) + def test_transform(self, make_input, dtype, device): + input = make_input(dtype=dtype, device=device) + check_transform( + transforms.LinearTransformation(*self._make_matrix_and_vector(input)), + input, + check_sample_input=self._sample_input_adapter, + # Compat check is failing on M1 with: + # AssertionError: Tensor-likes are not close! + # Mismatched elements: 1 / 561 (0.2%) + # See https://github.com/pytorch/vision/issues/8453 + check_v1_compatibility=(sys.platform != "darwin"), + ) + + def test_transform_error(self): + with pytest.raises(ValueError, match="transformation_matrix should be square"): + transforms.LinearTransformation(transformation_matrix=torch.rand(2, 3), mean_vector=torch.rand(2)) + + with pytest.raises(ValueError, match="mean_vector should have the same length"): + transforms.LinearTransformation(transformation_matrix=torch.rand(2, 2), mean_vector=torch.rand(1)) + + for matrix_dtype, vector_dtype in [(torch.float32, torch.float64), (torch.float64, torch.float32)]: + with pytest.raises(ValueError, match="Input tensors should have the same dtype"): + transforms.LinearTransformation( + transformation_matrix=torch.rand(2, 2, dtype=matrix_dtype), + mean_vector=torch.rand(2, dtype=vector_dtype), + ) + + image = make_image() + transform = transforms.LinearTransformation(transformation_matrix=torch.rand(2, 2), mean_vector=torch.rand(2)) + with pytest.raises(ValueError, match="Input tensor and transformation matrix have incompatible shape"): + transform(image) + + transform = transforms.LinearTransformation(*self._make_matrix_and_vector(image)) + with pytest.raises(TypeError, match="does not support PIL images"): + transform(F.to_pil_image(image)) + + @needs_cuda + def test_transform_error_cuda(self): + for matrix_device, vector_device in [("cuda", "cpu"), ("cpu", "cuda")]: + with pytest.raises(ValueError, match="Input tensors should be on the same device"): + transforms.LinearTransformation( + transformation_matrix=torch.rand(2, 2, device=matrix_device), + mean_vector=torch.rand(2, device=vector_device), + ) + + for input_device, param_device in [("cuda", "cpu"), ("cpu", "cuda")]: + input = make_image(device=input_device) + transform = transforms.LinearTransformation(*self._make_matrix_and_vector(input, device=param_device)) + with pytest.raises( + ValueError, match="Input tensor should be on the same device as transformation matrix and mean vector" + ): + transform(input) + + +def make_image_numpy(*args, **kwargs): + image = make_image_tensor(*args, **kwargs) + return image.permute((1, 2, 0)).numpy() + + +class TestToImage: + @pytest.mark.parametrize("make_input", [make_image_tensor, make_image_pil, make_image, make_image_numpy]) + @pytest.mark.parametrize("fn", [F.to_image, transform_cls_to_functional(transforms.ToImage)]) + def test_functional_and_transform(self, make_input, fn): + input = make_input() + output = fn(input) + + assert isinstance(output, tv_tensors.Image) + + input_size = list(input.shape[:2]) if isinstance(input, np.ndarray) else F.get_size(input) + assert F.get_size(output) == input_size + + if isinstance(input, torch.Tensor): + assert output.data_ptr() == input.data_ptr() + + def test_2d_np_array(self): + # Non-regression test for https://github.com/pytorch/vision/issues/8255 + input = np.random.rand(10, 10) + assert F.to_image(input).shape == (1, 10, 10) + + def test_functional_error(self): + with pytest.raises(TypeError, match="Input can either be a pure Tensor, a numpy array, or a PIL image"): + F.to_image(object()) + + +class TestToPILImage: + @pytest.mark.parametrize("make_input", [make_image_tensor, make_image, make_image_numpy]) + @pytest.mark.parametrize("color_space", ["RGB", "GRAY"]) + @pytest.mark.parametrize("fn", [F.to_pil_image, transform_cls_to_functional(transforms.ToPILImage)]) + def test_functional_and_transform(self, make_input, color_space, fn): + input = make_input(color_space=color_space) + output = fn(input) + + assert isinstance(output, PIL.Image.Image) + + input_size = list(input.shape[:2]) if isinstance(input, np.ndarray) else F.get_size(input) + assert F.get_size(output) == input_size + + def test_functional_error(self): + with pytest.raises(TypeError, match="pic should be Tensor or ndarray"): + F.to_pil_image(object()) + + for ndim in [1, 4]: + with pytest.raises(ValueError, match="pic should be 2/3 dimensional"): + F.to_pil_image(torch.empty(*[1] * ndim)) + + with pytest.raises(ValueError, match="pic should not have > 4 channels"): + num_channels = 5 + F.to_pil_image(torch.empty(num_channels, 1, 1)) + + +class TestToTensor: + @pytest.mark.parametrize("make_input", [make_image_tensor, make_image_pil, make_image, make_image_numpy]) + def test_smoke(self, make_input): + with pytest.warns(UserWarning, match="deprecated and will be removed"): + transform = transforms.ToTensor() + + input = make_input() + output = transform(input) + + input_size = list(input.shape[:2]) if isinstance(input, np.ndarray) else F.get_size(input) + assert F.get_size(output) == input_size + + +class TestPILToTensor: + @pytest.mark.parametrize("color_space", ["RGB", "GRAY"]) + @pytest.mark.parametrize("fn", [F.pil_to_tensor, transform_cls_to_functional(transforms.PILToTensor)]) + def test_functional_and_transform(self, color_space, fn): + input = make_image_pil(color_space=color_space) + output = fn(input) + + assert isinstance(output, torch.Tensor) and not isinstance(output, tv_tensors.TVTensor) + assert F.get_size(output) == F.get_size(input) + + def test_functional_error(self): + with pytest.raises(TypeError, match="pic should be PIL Image"): + F.pil_to_tensor(object()) + + +class TestLambda: + @pytest.mark.parametrize("input", [object(), torch.empty(()), np.empty(()), "string", 1, 0.0]) + @pytest.mark.parametrize("types", [(), (torch.Tensor, np.ndarray)]) + def test_transform(self, input, types): + was_applied = False + + def was_applied_fn(input): + nonlocal was_applied + was_applied = True + return input + + transform = transforms.Lambda(was_applied_fn, *types) + output = transform(input) + + assert output is input + assert was_applied is (not types or isinstance(input, types)) + + +@pytest.mark.parametrize( + ("alias", "target"), + [ + pytest.param(alias, target, id=alias.__name__) + for alias, target in [ + (F.hflip, F.horizontal_flip), + (F.vflip, F.vertical_flip), + (F.get_image_num_channels, F.get_num_channels), + (F.to_pil_image, F.to_pil_image), + (F.elastic_transform, F.elastic), + (F.to_grayscale, F.rgb_to_grayscale), + ] + ], +) +def test_alias(alias, target): + assert alias is target + + +@pytest.mark.parametrize( + "make_inputs", + itertools.permutations( + [ + make_image_tensor, + make_image_tensor, + make_image_pil, + make_image, + make_video, + ], + 3, + ), +) +def test_pure_tensor_heuristic(make_inputs): + flat_inputs = [make_input() for make_input in make_inputs] + + def split_on_pure_tensor(to_split): + # This takes a sequence that is structurally aligned with `flat_inputs` and splits its items into three parts: + # 1. The first pure tensor. If none is present, this will be `None` + # 2. A list of the remaining pure tensors + # 3. A list of all other items + pure_tensors = [] + others = [] + # Splitting always happens on the original `flat_inputs` to avoid any erroneous type changes by the transform to + # affect the splitting. + for item, inpt in zip(to_split, flat_inputs): + (pure_tensors if is_pure_tensor(inpt) else others).append(item) + return pure_tensors[0] if pure_tensors else None, pure_tensors[1:], others + + class CopyCloneTransform(transforms.Transform): + def transform(self, inpt, params): + return inpt.clone() if isinstance(inpt, torch.Tensor) else inpt.copy() + + @staticmethod + def was_applied(output, inpt): + identity = output is inpt + if identity: + return False + + # Make sure nothing fishy is going on + assert_equal(output, inpt) + return True + + first_pure_tensor_input, other_pure_tensor_inputs, other_inputs = split_on_pure_tensor(flat_inputs) + + transform = CopyCloneTransform() + transformed_sample = transform(flat_inputs) + + first_pure_tensor_output, other_pure_tensor_outputs, other_outputs = split_on_pure_tensor(transformed_sample) + + if first_pure_tensor_input is not None: + if other_inputs: + assert not transform.was_applied(first_pure_tensor_output, first_pure_tensor_input) + else: + assert transform.was_applied(first_pure_tensor_output, first_pure_tensor_input) + + for output, inpt in zip(other_pure_tensor_outputs, other_pure_tensor_inputs): + assert not transform.was_applied(output, inpt) + + for input, output in zip(other_inputs, other_outputs): + assert transform.was_applied(output, input) + + +class TestRandomIoUCrop: + @pytest.mark.parametrize("device", cpu_and_cuda()) + @pytest.mark.parametrize("options", [[0.5, 0.9], [2.0]]) + def test_make_params(self, device, options): + orig_h, orig_w = size = (24, 32) + image = make_image(size) + bboxes = tv_tensors.BoundingBoxes( + torch.tensor([[1, 1, 10, 10], [20, 20, 23, 23], [1, 20, 10, 23], [20, 1, 23, 10]]), + format="XYXY", + canvas_size=size, + device=device, + ) + sample = [image, bboxes] + + transform = transforms.RandomIoUCrop(sampler_options=options) + + n_samples = 5 + for _ in range(n_samples): + + params = transform.make_params(sample) + + if options == [2.0]: + assert len(params) == 0 + return + + assert len(params["is_within_crop_area"]) > 0 + assert params["is_within_crop_area"].dtype == torch.bool + + assert int(transform.min_scale * orig_h) <= params["height"] <= int(transform.max_scale * orig_h) + assert int(transform.min_scale * orig_w) <= params["width"] <= int(transform.max_scale * orig_w) + + left, top = params["left"], params["top"] + new_h, new_w = params["height"], params["width"] + ious = box_iou( + bboxes, + torch.tensor([[left, top, left + new_w, top + new_h]], dtype=bboxes.dtype, device=bboxes.device), + ) + assert ious.max() >= options[0] or ious.max() >= options[1], f"{ious} vs {options}" + + def test__transform_empty_params(self, mocker): + transform = transforms.RandomIoUCrop(sampler_options=[2.0]) + image = tv_tensors.Image(torch.rand(1, 3, 4, 4)) + bboxes = tv_tensors.BoundingBoxes(torch.tensor([[1, 1, 2, 2]]), format="XYXY", canvas_size=(4, 4)) + label = torch.tensor([1]) + sample = [image, bboxes, label] + # Let's mock transform.make_params to control the output: + transform.make_params = mocker.MagicMock(return_value={}) + output = transform(sample) + torch.testing.assert_close(output, sample) + + def test_forward_assertion(self): + transform = transforms.RandomIoUCrop() + with pytest.raises( + TypeError, + match="requires input sample to contain tensor or PIL images and bounding boxes", + ): + transform(torch.tensor(0)) + + def test__transform(self, mocker): + transform = transforms.RandomIoUCrop() + + size = (32, 24) + image = make_image(size) + bboxes = make_bounding_boxes(format="XYXY", canvas_size=size, num_boxes=6) + masks = make_detection_masks(size, num_masks=6) + + sample = [image, bboxes, masks] + + is_within_crop_area = torch.tensor([0, 1, 0, 1, 0, 1], dtype=torch.bool) + + params = dict(top=1, left=2, height=12, width=12, is_within_crop_area=is_within_crop_area) + transform.make_params = mocker.MagicMock(return_value=params) + output = transform(sample) + + # check number of bboxes vs number of labels: + output_bboxes = output[1] + assert isinstance(output_bboxes, tv_tensors.BoundingBoxes) + assert (output_bboxes[~is_within_crop_area] == 0).all() + + output_masks = output[2] + assert isinstance(output_masks, tv_tensors.Mask) + + +class TestRandomShortestSize: + @pytest.mark.parametrize("min_size,max_size", [([5, 9], 20), ([5, 9], None)]) + def test_make_params(self, min_size, max_size): + canvas_size = (3, 10) + + transform = transforms.RandomShortestSize(min_size=min_size, max_size=max_size, antialias=True) + + sample = make_image(canvas_size) + params = transform.make_params([sample]) + + assert "size" in params + size = params["size"] + + assert isinstance(size, tuple) and len(size) == 2 + + longer = max(size) + shorter = min(size) + if max_size is not None: + assert longer <= max_size + assert shorter <= max_size + else: + assert shorter in min_size + + +class TestRandomResize: + def test_make_params(self): + min_size = 3 + max_size = 6 + + transform = transforms.RandomResize(min_size=min_size, max_size=max_size, antialias=True) + + for _ in range(10): + params = transform.make_params([]) + + assert isinstance(params["size"], list) and len(params["size"]) == 1 + size = params["size"][0] + + assert min_size <= size < max_size + + +@pytest.mark.parametrize("image_type", (PIL.Image, torch.Tensor, tv_tensors.Image)) +@pytest.mark.parametrize("label_type", (torch.Tensor, int)) +@pytest.mark.parametrize("dataset_return_type", (dict, tuple)) +@pytest.mark.parametrize("to_tensor", (transforms.ToTensor, transforms.ToImage)) +def test_classification_preset(image_type, label_type, dataset_return_type, to_tensor): + + image = tv_tensors.Image(torch.randint(0, 256, size=(1, 3, 250, 250), dtype=torch.uint8)) + if image_type is PIL.Image: + image = to_pil_image(image[0]) + elif image_type is torch.Tensor: + image = image.as_subclass(torch.Tensor) + assert is_pure_tensor(image) + + label = 1 if label_type is int else torch.tensor([1]) + + if dataset_return_type is dict: + sample = { + "image": image, + "label": label, + } + else: + sample = image, label + + if to_tensor is transforms.ToTensor: + with pytest.warns(UserWarning, match="deprecated and will be removed"): + to_tensor = to_tensor() + else: + to_tensor = to_tensor() + + t = transforms.Compose( + [ + transforms.RandomResizedCrop((224, 224), antialias=True), + transforms.RandomHorizontalFlip(p=1), + transforms.RandAugment(), + transforms.TrivialAugmentWide(), + transforms.AugMix(), + transforms.AutoAugment(), + to_tensor, + # TODO: ConvertImageDtype is a pass-through on PIL images, is that + # intended? This results in a failure if we convert to tensor after + # it, because the image would still be uint8 which make Normalize + # fail. + transforms.ConvertImageDtype(torch.float), + transforms.Normalize(mean=[0, 0, 0], std=[1, 1, 1]), + transforms.RandomErasing(p=1), + ] + ) + + out = t(sample) + + assert type(out) == type(sample) + + if dataset_return_type is tuple: + out_image, out_label = out + else: + assert out.keys() == sample.keys() + out_image, out_label = out.values() + + assert out_image.shape[-2:] == (224, 224) + assert out_label == label + + +@pytest.mark.parametrize("image_type", (PIL.Image, torch.Tensor, tv_tensors.Image)) +@pytest.mark.parametrize("data_augmentation", ("hflip", "lsj", "multiscale", "ssd", "ssdlite")) +@pytest.mark.parametrize("to_tensor", (transforms.ToTensor, transforms.ToImage)) +@pytest.mark.parametrize("sanitize", (True, False)) +def test_detection_preset(image_type, data_augmentation, to_tensor, sanitize): + torch.manual_seed(0) + + if to_tensor is transforms.ToTensor: + with pytest.warns(UserWarning, match="deprecated and will be removed"): + to_tensor = to_tensor() + else: + to_tensor = to_tensor() + + if data_augmentation == "hflip": + t = [ + transforms.RandomHorizontalFlip(p=1), + to_tensor, + transforms.ConvertImageDtype(torch.float), + ] + elif data_augmentation == "lsj": + t = [ + transforms.ScaleJitter(target_size=(1024, 1024), antialias=True), + # Note: replaced FixedSizeCrop with RandomCrop, becuase we're + # leaving FixedSizeCrop in prototype for now, and it expects Label + # classes which we won't release yet. + # transforms.FixedSizeCrop( + # size=(1024, 1024), fill=defaultdict(lambda: (123.0, 117.0, 104.0), {tv_tensors.Mask: 0}) + # ), + transforms.RandomCrop((1024, 1024), pad_if_needed=True), + transforms.RandomHorizontalFlip(p=1), + to_tensor, + transforms.ConvertImageDtype(torch.float), + ] + elif data_augmentation == "multiscale": + t = [ + transforms.RandomShortestSize( + min_size=(480, 512, 544, 576, 608, 640, 672, 704, 736, 768, 800), max_size=1333, antialias=True + ), + transforms.RandomHorizontalFlip(p=1), + to_tensor, + transforms.ConvertImageDtype(torch.float), + ] + elif data_augmentation == "ssd": + t = [ + transforms.RandomPhotometricDistort(p=1), + transforms.RandomZoomOut(fill={"others": (123.0, 117.0, 104.0), tv_tensors.Mask: 0}, p=1), + transforms.RandomIoUCrop(), + transforms.RandomHorizontalFlip(p=1), + to_tensor, + transforms.ConvertImageDtype(torch.float), + ] + elif data_augmentation == "ssdlite": + t = [ + transforms.RandomIoUCrop(), + transforms.RandomHorizontalFlip(p=1), + to_tensor, + transforms.ConvertImageDtype(torch.float), + ] + if sanitize: + t += [transforms.SanitizeBoundingBoxes()] + t = transforms.Compose(t) + + num_boxes = 5 + H = W = 250 + + image = tv_tensors.Image(torch.randint(0, 256, size=(1, 3, H, W), dtype=torch.uint8)) + if image_type is PIL.Image: + image = to_pil_image(image[0]) + elif image_type is torch.Tensor: + image = image.as_subclass(torch.Tensor) + assert is_pure_tensor(image) + + label = torch.randint(0, 10, size=(num_boxes,)) + + boxes = torch.randint(0, min(H, W) // 2, size=(num_boxes, 4)) + boxes[:, 2:] += boxes[:, :2] + boxes = boxes.clamp(min=0, max=min(H, W)) + boxes = tv_tensors.BoundingBoxes(boxes, format="XYXY", canvas_size=(H, W)) + + masks = tv_tensors.Mask(torch.randint(0, 2, size=(num_boxes, H, W), dtype=torch.uint8)) + + sample = { + "image": image, + "label": label, + "boxes": boxes, + "masks": masks, + } + + out = t(sample) + + if isinstance(to_tensor, transforms.ToTensor) and image_type is not tv_tensors.Image: + assert is_pure_tensor(out["image"]) + else: + assert isinstance(out["image"], tv_tensors.Image) + assert isinstance(out["label"], type(sample["label"])) + + num_boxes_expected = { + # ssd and ssdlite contain RandomIoUCrop which may "remove" some bbox. It + # doesn't remove them strictly speaking, it just marks some boxes as + # degenerate and those boxes will be later removed by + # SanitizeBoundingBoxes(), which we add to the pipelines if the sanitize + # param is True. + # Note that the values below are probably specific to the random seed + # set above (which is fine). + (True, "ssd"): 5, + (True, "ssdlite"): 4, + }.get((sanitize, data_augmentation), num_boxes) + + assert out["boxes"].shape[0] == out["masks"].shape[0] == out["label"].shape[0] == num_boxes_expected + + +class TestSanitizeBoundingBoxes: + def _get_boxes_and_valid_mask(self, H=256, W=128, min_size=10, min_area=10): + boxes_and_validity = [ + ([0, 1, 10, 1], False), # Y1 == Y2 + ([0, 1, 0, 20], False), # X1 == X2 + ([0, 0, min_size - 1, 10], False), # H < min_size + ([0, 0, 10, min_size - 1], False), # W < min_size + ([0, 0, 10, H + 1], False), # Y2 > H + ([0, 0, W + 1, 10], False), # X2 > W + ([-1, 1, 10, 20], False), # any < 0 + ([0, 0, -1, 20], False), # any < 0 + ([0, 0, -10, -1], False), # any < 0 + ([0, 0, min_size, 10], min_size * 10 >= min_area), # H < min_size + ([0, 0, 10, min_size], min_size * 10 >= min_area), # W < min_size + ([0, 0, W, H], W * H >= min_area), + ([1, 1, 30, 20], 29 * 19 >= min_area), + ([0, 0, 10, 10], 9 * 9 >= min_area), + ([1, 1, 30, 20], 29 * 19 >= min_area), + ] + + random.shuffle(boxes_and_validity) # For test robustness: mix order of wrong and correct cases + boxes, expected_valid_mask = zip(*boxes_and_validity) + boxes = tv_tensors.BoundingBoxes( + boxes, + format=tv_tensors.BoundingBoxFormat.XYXY, + canvas_size=(H, W), + ) + + return boxes, expected_valid_mask + + @pytest.mark.parametrize("min_size, min_area", ((1, 1), (10, 1), (10, 101))) + @pytest.mark.parametrize( + "labels_getter", + ( + "default", + lambda inputs: inputs["labels"], + lambda inputs: (inputs["labels"], inputs["other_labels"]), + lambda inputs: [inputs["labels"], inputs["other_labels"]], + None, + lambda inputs: None, + ), + ) + @pytest.mark.parametrize("sample_type", (tuple, dict)) + def test_transform(self, min_size, min_area, labels_getter, sample_type): + + if sample_type is tuple and not isinstance(labels_getter, str): + # The "lambda inputs: inputs["labels"]" labels_getter used in this test + # doesn't work if the input is a tuple. + return + + H, W = 256, 128 + boxes, expected_valid_mask = self._get_boxes_and_valid_mask(H=H, W=W, min_size=min_size, min_area=min_area) + valid_indices = [i for (i, is_valid) in enumerate(expected_valid_mask) if is_valid] + + labels = torch.arange(boxes.shape[0]) + masks = tv_tensors.Mask(torch.randint(0, 2, size=(boxes.shape[0], H, W))) + # other_labels corresponds to properties from COCO like iscrowd, area... + # We only sanitize it when labels_getter returns a tuple + other_labels = torch.arange(boxes.shape[0]) + whatever = torch.rand(10) + input_img = torch.randint(0, 256, size=(1, 3, H, W), dtype=torch.uint8) + sample = { + "image": input_img, + "labels": labels, + "boxes": boxes, + "other_labels": other_labels, + "whatever": whatever, + "None": None, + "masks": masks, + } + + if sample_type is tuple: + img = sample.pop("image") + sample = (img, sample) + + out = transforms.SanitizeBoundingBoxes(min_size=min_size, min_area=min_area, labels_getter=labels_getter)( + sample + ) + + if sample_type is tuple: + out_image = out[0] + out_labels = out[1]["labels"] + out_other_labels = out[1]["other_labels"] + out_boxes = out[1]["boxes"] + out_masks = out[1]["masks"] + out_whatever = out[1]["whatever"] + else: + out_image = out["image"] + out_labels = out["labels"] + out_other_labels = out["other_labels"] + out_boxes = out["boxes"] + out_masks = out["masks"] + out_whatever = out["whatever"] + + assert out_image is input_img + assert out_whatever is whatever + + assert isinstance(out_boxes, tv_tensors.BoundingBoxes) + assert isinstance(out_masks, tv_tensors.Mask) + + if labels_getter is None or (callable(labels_getter) and labels_getter(sample) is None): + assert out_labels is labels + assert out_other_labels is other_labels + else: + assert isinstance(out_labels, torch.Tensor) + assert out_boxes.shape[0] == out_labels.shape[0] == out_masks.shape[0] + # This works because we conveniently set labels to arange(num_boxes) + assert out_labels.tolist() == valid_indices + + if callable(labels_getter) and isinstance(labels_getter(sample), (tuple, list)): + assert_equal(out_other_labels, out_labels) + else: + assert_equal(out_other_labels, other_labels) + + @pytest.mark.parametrize("input_type", (torch.Tensor, tv_tensors.BoundingBoxes)) + def test_functional(self, input_type): + # Note: the "functional" F.sanitize_bounding_boxes was added after the class, so there is some + # redundancy with test_transform() in terms of correctness checks. But that's OK. + + H, W, min_size = 256, 128, 10 + + boxes, expected_valid_mask = self._get_boxes_and_valid_mask(H=H, W=W, min_size=min_size) + + if input_type is tv_tensors.BoundingBoxes: + format = canvas_size = None + else: + # just passing "XYXY" explicitly to make sure we support strings + format, canvas_size = "XYXY", boxes.canvas_size + boxes = boxes.as_subclass(torch.Tensor) + + boxes, valid = F.sanitize_bounding_boxes(boxes, format=format, canvas_size=canvas_size, min_size=min_size) + + assert_equal(valid, torch.tensor(expected_valid_mask)) + assert type(valid) == torch.Tensor + assert boxes.shape[0] == sum(valid) + assert isinstance(boxes, input_type) + + def test_kernel(self): + H, W, min_size = 256, 128, 10 + boxes, _ = self._get_boxes_and_valid_mask(H=H, W=W, min_size=min_size) + + format, canvas_size = boxes.format, boxes.canvas_size + boxes = boxes.as_subclass(torch.Tensor) + + check_kernel( + F.sanitize_bounding_boxes, + input=boxes, + format=format, + canvas_size=canvas_size, + check_batched_vs_unbatched=False, + ) + + def test_no_label(self): + # Non-regression test for https://github.com/pytorch/vision/issues/7878 + + img = make_image() + boxes = make_bounding_boxes() + + with pytest.raises(ValueError, match="or a two-tuple whose second item is a dict"): + transforms.SanitizeBoundingBoxes()(img, boxes) + + out_img, out_boxes = transforms.SanitizeBoundingBoxes(labels_getter=None)(img, boxes) + assert isinstance(out_img, tv_tensors.Image) + assert isinstance(out_boxes, tv_tensors.BoundingBoxes) + + def test_errors_transform(self): + good_bbox = tv_tensors.BoundingBoxes( + [[0, 0, 10, 10]], + format=tv_tensors.BoundingBoxFormat.XYXY, + canvas_size=(20, 20), + ) + + with pytest.raises(ValueError, match="min_size must be >= 1"): + transforms.SanitizeBoundingBoxes(min_size=0) + with pytest.raises(ValueError, match="min_area must be >= 1"): + transforms.SanitizeBoundingBoxes(min_area=0) + with pytest.raises(ValueError, match="labels_getter should either be 'default'"): + transforms.SanitizeBoundingBoxes(labels_getter=12) + + with pytest.raises(ValueError, match="Could not infer where the labels are"): + bad_labels_key = {"bbox": good_bbox, "BAD_KEY": torch.arange(good_bbox.shape[0])} + transforms.SanitizeBoundingBoxes()(bad_labels_key) + + with pytest.raises(ValueError, match="must be a tensor"): + not_a_tensor = {"bbox": good_bbox, "labels": torch.arange(good_bbox.shape[0]).tolist()} + transforms.SanitizeBoundingBoxes()(not_a_tensor) + + with pytest.raises(ValueError, match="Number of boxes"): + different_sizes = {"bbox": good_bbox, "labels": torch.arange(good_bbox.shape[0] + 3)} + transforms.SanitizeBoundingBoxes()(different_sizes) + + def test_errors_functional(self): + + good_bbox = tv_tensors.BoundingBoxes( + [[0, 0, 10, 10]], + format=tv_tensors.BoundingBoxFormat.XYXY, + canvas_size=(20, 20), + ) + + with pytest.raises(ValueError, match="canvas_size cannot be None if bounding_boxes is a pure tensor"): + F.sanitize_bounding_boxes(good_bbox.as_subclass(torch.Tensor), format="XYXY", canvas_size=None) + + with pytest.raises(ValueError, match="canvas_size cannot be None if bounding_boxes is a pure tensor"): + F.sanitize_bounding_boxes(good_bbox.as_subclass(torch.Tensor), format=None, canvas_size=(10, 10)) + + with pytest.raises(ValueError, match="canvas_size must be None when bounding_boxes is a tv_tensors"): + F.sanitize_bounding_boxes(good_bbox, format="XYXY", canvas_size=None) + + with pytest.raises(ValueError, match="canvas_size must be None when bounding_boxes is a tv_tensors"): + F.sanitize_bounding_boxes(good_bbox, format="XYXY", canvas_size=None) + + with pytest.raises(ValueError, match="bounding_boxes must be a tv_tensors.BoundingBoxes instance or a"): + F.sanitize_bounding_boxes(good_bbox.tolist()) + + +class TestJPEG: + @pytest.mark.parametrize("quality", [5, 75]) + @pytest.mark.parametrize("color_space", ["RGB", "GRAY"]) + def test_kernel_image(self, quality, color_space): + check_kernel(F.jpeg_image, make_image(color_space=color_space), quality=quality) + + def test_kernel_video(self): + check_kernel(F.jpeg_video, make_video(), quality=5) + + @pytest.mark.parametrize("make_input", [make_image_tensor, make_image_pil, make_image, make_video]) + def test_functional(self, make_input): + check_functional(F.jpeg, make_input(), quality=5) + + @pytest.mark.parametrize( + ("kernel", "input_type"), + [ + (F.jpeg_image, torch.Tensor), + (F._augment._jpeg_image_pil, PIL.Image.Image), + (F.jpeg_image, tv_tensors.Image), + (F.jpeg_video, tv_tensors.Video), + ], + ) + def test_functional_signature(self, kernel, input_type): + check_functional_kernel_signature_match(F.jpeg, kernel=kernel, input_type=input_type) + + @pytest.mark.parametrize("make_input", [make_image_tensor, make_image_pil, make_image, make_video]) + @pytest.mark.parametrize("quality", [5, (10, 20)]) + @pytest.mark.parametrize("color_space", ["RGB", "GRAY"]) + def test_transform(self, make_input, quality, color_space): + check_transform(transforms.JPEG(quality=quality), make_input(color_space=color_space)) + + @pytest.mark.parametrize("quality", [5]) + def test_functional_image_correctness(self, quality): + image = make_image() + + actual = F.jpeg(image, quality=quality) + expected = F.to_image(F.jpeg(F.to_pil_image(image), quality=quality)) + + # NOTE: this will fail if torchvision and Pillow use different JPEG encoder/decoder + torch.testing.assert_close(actual, expected, rtol=0, atol=1) + + @pytest.mark.parametrize("quality", [5, (10, 20)]) + @pytest.mark.parametrize("color_space", ["RGB", "GRAY"]) + @pytest.mark.parametrize("seed", list(range(5))) + def test_transform_image_correctness(self, quality, color_space, seed): + image = make_image(color_space=color_space) + + transform = transforms.JPEG(quality=quality) + + with freeze_rng_state(): + torch.manual_seed(seed) + actual = transform(image) + + torch.manual_seed(seed) + expected = F.to_image(transform(F.to_pil_image(image))) + + torch.testing.assert_close(actual, expected, rtol=0, atol=1) + + @pytest.mark.parametrize("quality", [5, (10, 20)]) + @pytest.mark.parametrize("seed", list(range(10))) + def test_transformmake_params_bounds(self, quality, seed): + transform = transforms.JPEG(quality=quality) + + with freeze_rng_state(): + torch.manual_seed(seed) + params = transform.make_params([]) + + if isinstance(quality, int): + assert params["quality"] == quality + else: + assert quality[0] <= params["quality"] <= quality[1] + + @pytest.mark.parametrize("quality", [[0], [0, 0, 0]]) + def test_transform_sequence_len_error(self, quality): + with pytest.raises(ValueError, match="quality should be a sequence of length 2"): + transforms.JPEG(quality=quality) + + @pytest.mark.parametrize("quality", [-1, 0, 150]) + def test_transform_invalid_quality_error(self, quality): + with pytest.raises(ValueError, match="quality must be an integer from 1 to 100"): + transforms.JPEG(quality=quality) + + +class TestUtils: + # TODO: Still need to test has_all, has_any, check_type and get_bouding_boxes + @pytest.mark.parametrize( + "make_input1", [make_image_tensor, make_image_pil, make_image, make_bounding_boxes, make_segmentation_mask] + ) + @pytest.mark.parametrize( + "make_input2", [make_image_tensor, make_image_pil, make_image, make_bounding_boxes, make_segmentation_mask] + ) + @pytest.mark.parametrize("query", [transforms.query_size, transforms.query_chw]) + def test_query_size_and_query_chw(self, make_input1, make_input2, query): + size = (32, 64) + input1 = make_input1(size) + input2 = make_input2(size) + + if query is transforms.query_chw and not any( + transforms.check_type(inpt, (is_pure_tensor, tv_tensors.Image, PIL.Image.Image, tv_tensors.Video)) + for inpt in (input1, input2) + ): + return + + expected = size if query is transforms.query_size else ((3,) + size) + assert query([input1, input2]) == expected + + @pytest.mark.parametrize( + "make_input1", [make_image_tensor, make_image_pil, make_image, make_bounding_boxes, make_segmentation_mask] + ) + @pytest.mark.parametrize( + "make_input2", [make_image_tensor, make_image_pil, make_image, make_bounding_boxes, make_segmentation_mask] + ) + @pytest.mark.parametrize("query", [transforms.query_size, transforms.query_chw]) + def test_different_sizes(self, make_input1, make_input2, query): + input1 = make_input1((10, 10)) + input2 = make_input2((20, 20)) + if query is transforms.query_chw and not all( + transforms.check_type(inpt, (is_pure_tensor, tv_tensors.Image, PIL.Image.Image, tv_tensors.Video)) + for inpt in (input1, input2) + ): + return + with pytest.raises(ValueError, match="Found multiple"): + query([input1, input2]) + + @pytest.mark.parametrize("query", [transforms.query_size, transforms.query_chw]) + def test_no_valid_input(self, query): + with pytest.raises(TypeError, match="No image"): + query(["blah"]) diff --git a/test/test_transforms_v2_utils.py b/test/test_transforms_v2_utils.py new file mode 100644 index 00000000000..53222c6a2c8 --- /dev/null +++ b/test/test_transforms_v2_utils.py @@ -0,0 +1,92 @@ +import PIL.Image +import pytest + +import torch + +import torchvision.transforms.v2._utils +from common_utils import DEFAULT_SIZE, make_bounding_boxes, make_detection_masks, make_image + +from torchvision import tv_tensors +from torchvision.transforms.v2._utils import has_all, has_any +from torchvision.transforms.v2.functional import to_pil_image + + +IMAGE = make_image(DEFAULT_SIZE, color_space="RGB") +BOUNDING_BOX = make_bounding_boxes(DEFAULT_SIZE, format=tv_tensors.BoundingBoxFormat.XYXY) +MASK = make_detection_masks(DEFAULT_SIZE) + + +@pytest.mark.parametrize( + ("sample", "types", "expected"), + [ + ((IMAGE, BOUNDING_BOX, MASK), (tv_tensors.Image,), True), + ((IMAGE, BOUNDING_BOX, MASK), (tv_tensors.BoundingBoxes,), True), + ((IMAGE, BOUNDING_BOX, MASK), (tv_tensors.Mask,), True), + ((IMAGE, BOUNDING_BOX, MASK), (tv_tensors.Image, tv_tensors.BoundingBoxes), True), + ((IMAGE, BOUNDING_BOX, MASK), (tv_tensors.Image, tv_tensors.Mask), True), + ((IMAGE, BOUNDING_BOX, MASK), (tv_tensors.BoundingBoxes, tv_tensors.Mask), True), + ((MASK,), (tv_tensors.Image, tv_tensors.BoundingBoxes), False), + ((BOUNDING_BOX,), (tv_tensors.Image, tv_tensors.Mask), False), + ((IMAGE,), (tv_tensors.BoundingBoxes, tv_tensors.Mask), False), + ( + (IMAGE, BOUNDING_BOX, MASK), + (tv_tensors.Image, tv_tensors.BoundingBoxes, tv_tensors.Mask), + True, + ), + ((), (tv_tensors.Image, tv_tensors.BoundingBoxes, tv_tensors.Mask), False), + ((IMAGE, BOUNDING_BOX, MASK), (lambda obj: isinstance(obj, tv_tensors.Image),), True), + ((IMAGE, BOUNDING_BOX, MASK), (lambda _: False,), False), + ((IMAGE, BOUNDING_BOX, MASK), (lambda _: True,), True), + ((IMAGE,), (tv_tensors.Image, PIL.Image.Image, torchvision.transforms.v2._utils.is_pure_tensor), True), + ( + (torch.Tensor(IMAGE),), + (tv_tensors.Image, PIL.Image.Image, torchvision.transforms.v2._utils.is_pure_tensor), + True, + ), + ( + (to_pil_image(IMAGE),), + (tv_tensors.Image, PIL.Image.Image, torchvision.transforms.v2._utils.is_pure_tensor), + True, + ), + ], +) +def test_has_any(sample, types, expected): + assert has_any(sample, *types) is expected + + +@pytest.mark.parametrize( + ("sample", "types", "expected"), + [ + ((IMAGE, BOUNDING_BOX, MASK), (tv_tensors.Image,), True), + ((IMAGE, BOUNDING_BOX, MASK), (tv_tensors.BoundingBoxes,), True), + ((IMAGE, BOUNDING_BOX, MASK), (tv_tensors.Mask,), True), + ((IMAGE, BOUNDING_BOX, MASK), (tv_tensors.Image, tv_tensors.BoundingBoxes), True), + ((IMAGE, BOUNDING_BOX, MASK), (tv_tensors.Image, tv_tensors.Mask), True), + ((IMAGE, BOUNDING_BOX, MASK), (tv_tensors.BoundingBoxes, tv_tensors.Mask), True), + ( + (IMAGE, BOUNDING_BOX, MASK), + (tv_tensors.Image, tv_tensors.BoundingBoxes, tv_tensors.Mask), + True, + ), + ((BOUNDING_BOX, MASK), (tv_tensors.Image, tv_tensors.BoundingBoxes), False), + ((BOUNDING_BOX, MASK), (tv_tensors.Image, tv_tensors.Mask), False), + ((IMAGE, MASK), (tv_tensors.BoundingBoxes, tv_tensors.Mask), False), + ( + (IMAGE, BOUNDING_BOX, MASK), + (tv_tensors.Image, tv_tensors.BoundingBoxes, tv_tensors.Mask), + True, + ), + ((BOUNDING_BOX, MASK), (tv_tensors.Image, tv_tensors.BoundingBoxes, tv_tensors.Mask), False), + ((IMAGE, MASK), (tv_tensors.Image, tv_tensors.BoundingBoxes, tv_tensors.Mask), False), + ((IMAGE, BOUNDING_BOX), (tv_tensors.Image, tv_tensors.BoundingBoxes, tv_tensors.Mask), False), + ( + (IMAGE, BOUNDING_BOX, MASK), + (lambda obj: isinstance(obj, (tv_tensors.Image, tv_tensors.BoundingBoxes, tv_tensors.Mask)),), + True, + ), + ((IMAGE, BOUNDING_BOX, MASK), (lambda _: False,), False), + ((IMAGE, BOUNDING_BOX, MASK), (lambda _: True,), True), + ], +) +def test_has_all(sample, types, expected): + assert has_all(sample, *types) is expected diff --git a/test/test_transforms_video.py b/test/test_transforms_video.py index 296d519f5c4..4ad57e6a98e 100644 --- a/test/test_transforms_video.py +++ b/test/test_transforms_video.py @@ -1,10 +1,11 @@ -from __future__ import division -import torch -import torchvision.transforms._transforms_video as transforms -from torchvision.transforms import Compose -import unittest import random +import warnings + import numpy as np +import pytest +import torch +from common_utils import assert_equal +from torchvision.transforms import Compose try: from scipy import stats @@ -12,21 +13,27 @@ stats = None -class TestVideoTransforms(unittest.TestCase): +with warnings.catch_warnings(record=True): + warnings.simplefilter("always") + import torchvision.transforms._transforms_video as transforms + +class TestVideoTransforms: def test_random_crop_video(self): numFrames = random.randint(4, 128) height = random.randint(10, 32) * 2 width = random.randint(10, 32) * 2 - oheight = random.randint(5, (height - 2) / 2) * 2 - owidth = random.randint(5, (width - 2) / 2) * 2 + oheight = random.randint(5, (height - 2) // 2) * 2 + owidth = random.randint(5, (width - 2) // 2) * 2 clip = torch.randint(0, 256, (numFrames, height, width, 3), dtype=torch.uint8) - result = Compose([ - transforms.ToTensorVideo(), - transforms.RandomCropVideo((oheight, owidth)), - ])(clip) - self.assertEqual(result.size(2), oheight) - self.assertEqual(result.size(3), owidth) + result = Compose( + [ + transforms.ToTensorVideo(), + transforms.RandomCropVideo((oheight, owidth)), + ] + )(clip) + assert result.size(2) == oheight + assert result.size(3) == owidth transforms.RandomCropVideo((oheight, owidth)).__repr__() @@ -34,15 +41,17 @@ def test_random_resized_crop_video(self): numFrames = random.randint(4, 128) height = random.randint(10, 32) * 2 width = random.randint(10, 32) * 2 - oheight = random.randint(5, (height - 2) / 2) * 2 - owidth = random.randint(5, (width - 2) / 2) * 2 + oheight = random.randint(5, (height - 2) // 2) * 2 + owidth = random.randint(5, (width - 2) // 2) * 2 clip = torch.randint(0, 256, (numFrames, height, width, 3), dtype=torch.uint8) - result = Compose([ - transforms.ToTensorVideo(), - transforms.RandomResizedCropVideo((oheight, owidth)), - ])(clip) - self.assertEqual(result.size(2), oheight) - self.assertEqual(result.size(3), owidth) + result = Compose( + [ + transforms.ToTensorVideo(), + transforms.RandomResizedCropVideo((oheight, owidth)), + ] + )(clip) + assert result.size(2) == oheight + assert result.size(3) == owidth transforms.RandomResizedCropVideo((oheight, owidth)).__repr__() @@ -50,73 +59,83 @@ def test_center_crop_video(self): numFrames = random.randint(4, 128) height = random.randint(10, 32) * 2 width = random.randint(10, 32) * 2 - oheight = random.randint(5, (height - 2) / 2) * 2 - owidth = random.randint(5, (width - 2) / 2) * 2 + oheight = random.randint(5, (height - 2) // 2) * 2 + owidth = random.randint(5, (width - 2) // 2) * 2 clip = torch.ones((numFrames, height, width, 3), dtype=torch.uint8) * 255 oh1 = (height - oheight) // 2 ow1 = (width - owidth) // 2 - clipNarrow = clip[:, oh1:oh1 + oheight, ow1:ow1 + owidth, :] + clipNarrow = clip[:, oh1 : oh1 + oheight, ow1 : ow1 + owidth, :] clipNarrow.fill_(0) - result = Compose([ - transforms.ToTensorVideo(), - transforms.CenterCropVideo((oheight, owidth)), - ])(clip) - - msg = "height: " + str(height) + " width: " \ - + str(width) + " oheight: " + str(oheight) + " owidth: " + str(owidth) - self.assertEqual(result.sum().item(), 0, msg) + result = Compose( + [ + transforms.ToTensorVideo(), + transforms.CenterCropVideo((oheight, owidth)), + ] + )(clip) + + msg = ( + "height: " + str(height) + " width: " + str(width) + " oheight: " + str(oheight) + " owidth: " + str(owidth) + ) + assert result.sum().item() == 0, msg oheight += 1 owidth += 1 - result = Compose([ - transforms.ToTensorVideo(), - transforms.CenterCropVideo((oheight, owidth)), - ])(clip) + result = Compose( + [ + transforms.ToTensorVideo(), + transforms.CenterCropVideo((oheight, owidth)), + ] + )(clip) sum1 = result.sum() - msg = "height: " + str(height) + " width: " \ - + str(width) + " oheight: " + str(oheight) + " owidth: " + str(owidth) - self.assertEqual(sum1.item() > 1, True, msg) + msg = ( + "height: " + str(height) + " width: " + str(width) + " oheight: " + str(oheight) + " owidth: " + str(owidth) + ) + assert sum1.item() > 1, msg oheight += 1 owidth += 1 - result = Compose([ - transforms.ToTensorVideo(), - transforms.CenterCropVideo((oheight, owidth)), - ])(clip) + result = Compose( + [ + transforms.ToTensorVideo(), + transforms.CenterCropVideo((oheight, owidth)), + ] + )(clip) sum2 = result.sum() - msg = "height: " + str(height) + " width: " \ - + str(width) + " oheight: " + str(oheight) + " owidth: " + str(owidth) - self.assertTrue(sum2.item() > 1, msg) - self.assertTrue(sum2.item() > sum1.item(), msg) + msg = ( + "height: " + str(height) + " width: " + str(width) + " oheight: " + str(oheight) + " owidth: " + str(owidth) + ) + assert sum2.item() > 1, msg + assert sum2.item() > sum1.item(), msg - @unittest.skipIf(stats is None, 'scipy.stats is not available') - def test_normalize_video(self): + @pytest.mark.skipif(stats is None, reason="scipy.stats is not available") + @pytest.mark.parametrize("channels", [1, 3]) + def test_normalize_video(self, channels): def samples_from_standard_normal(tensor): - p_value = stats.kstest(list(tensor.view(-1)), 'norm', args=(0, 1)).pvalue + p_value = stats.kstest(list(tensor.view(-1)), "norm", args=(0, 1)).pvalue return p_value > 0.0001 random_state = random.getstate() random.seed(42) - for channels in [1, 3]: - numFrames = random.randint(4, 128) - height = random.randint(32, 256) - width = random.randint(32, 256) - mean = random.random() - std = random.random() - clip = torch.normal(mean, std, size=(channels, numFrames, height, width)) - mean = [clip[c].mean().item() for c in range(channels)] - std = [clip[c].std().item() for c in range(channels)] - normalized = transforms.NormalizeVideo(mean, std)(clip) - self.assertTrue(samples_from_standard_normal(normalized)) + + numFrames = random.randint(4, 128) + height = random.randint(32, 256) + width = random.randint(32, 256) + mean = random.random() + std = random.random() + clip = torch.normal(mean, std, size=(channels, numFrames, height, width)) + mean = [clip[c].mean().item() for c in range(channels)] + std = [clip[c].std().item() for c in range(channels)] + normalized = transforms.NormalizeVideo(mean, std)(clip) + assert samples_from_standard_normal(normalized) random.setstate(random_state) # Checking the optional in-place behaviour tensor = torch.rand((3, 128, 16, 16)) tensor_inplace = transforms.NormalizeVideo((0.5, 0.5, 0.5), (0.5, 0.5, 0.5), inplace=True)(tensor) - self.assertTrue(torch.equal(tensor, tensor_inplace)) + assert_equal(tensor, tensor_inplace) transforms.NormalizeVideo((0.5, 0.5, 0.5), (0.5, 0.5, 0.5), inplace=True).__repr__() @@ -124,49 +143,36 @@ def test_to_tensor_video(self): numFrames, height, width = 64, 4, 4 trans = transforms.ToTensorVideo() - with self.assertRaises(TypeError): - trans(np.random.rand(numFrames, height, width, 1).tolist()) + with pytest.raises(TypeError): + np_rng = np.random.RandomState(0) + trans(np_rng.rand(numFrames, height, width, 1).tolist()) + with pytest.raises(TypeError): trans(torch.rand((numFrames, height, width, 1), dtype=torch.float)) - with self.assertRaises(ValueError): + with pytest.raises(ValueError): trans(torch.ones((3, numFrames, height, width, 3), dtype=torch.uint8)) + with pytest.raises(ValueError): trans(torch.ones((height, width, 3), dtype=torch.uint8)) + with pytest.raises(ValueError): trans(torch.ones((width, 3), dtype=torch.uint8)) + with pytest.raises(ValueError): trans(torch.ones((3), dtype=torch.uint8)) trans.__repr__() - @unittest.skipIf(stats is None, 'scipy.stats not available') - def test_random_horizontal_flip_video(self): - random_state = random.getstate() - random.seed(42) + @pytest.mark.parametrize("p", (0, 1)) + def test_random_horizontal_flip_video(self, p): clip = torch.rand((3, 4, 112, 112), dtype=torch.float) - hclip = clip.flip((-1)) - - num_samples = 250 - num_horizontal = 0 - for _ in range(num_samples): - out = transforms.RandomHorizontalFlipVideo()(clip) - if torch.all(torch.eq(out, hclip)): - num_horizontal += 1 + hclip = clip.flip(-1) - p_value = stats.binom_test(num_horizontal, num_samples, p=0.5) - random.setstate(random_state) - self.assertGreater(p_value, 0.0001) - - num_samples = 250 - num_horizontal = 0 - for _ in range(num_samples): - out = transforms.RandomHorizontalFlipVideo(p=0.7)(clip) - if torch.all(torch.eq(out, hclip)): - num_horizontal += 1 - - p_value = stats.binom_test(num_horizontal, num_samples, p=0.7) - random.setstate(random_state) - self.assertGreater(p_value, 0.0001) + out = transforms.RandomHorizontalFlipVideo(p=p)(clip) + if p == 0: + torch.testing.assert_close(out, clip) + elif p == 1: + torch.testing.assert_close(out, hclip) transforms.RandomHorizontalFlipVideo().__repr__() -if __name__ == '__main__': - unittest.main() +if __name__ == "__main__": + pytest.main([__file__]) diff --git a/test/test_tv_tensors.py b/test/test_tv_tensors.py new file mode 100644 index 00000000000..ed75ae35ecd --- /dev/null +++ b/test/test_tv_tensors.py @@ -0,0 +1,320 @@ +from copy import deepcopy + +import pytest +import torch +from common_utils import assert_equal, make_bounding_boxes, make_image, make_segmentation_mask, make_video +from PIL import Image + +from torchvision import tv_tensors + + +@pytest.fixture(autouse=True) +def restore_tensor_return_type(): + # This is for security, as we should already be restoring the default manually in each test anyway + # (at least at the time of writing...) + yield + tv_tensors.set_return_type("Tensor") + + +@pytest.mark.parametrize("data", [torch.rand(3, 32, 32), Image.new("RGB", (32, 32), color=123)]) +def test_image_instance(data): + image = tv_tensors.Image(data) + assert isinstance(image, torch.Tensor) + assert image.ndim == 3 and image.shape[0] == 3 + + +@pytest.mark.parametrize("data", [torch.randint(0, 10, size=(1, 32, 32)), Image.new("L", (32, 32), color=2)]) +def test_mask_instance(data): + mask = tv_tensors.Mask(data) + assert isinstance(mask, torch.Tensor) + assert mask.ndim == 3 and mask.shape[0] == 1 + + +@pytest.mark.parametrize("data", [torch.randint(0, 32, size=(5, 4)), [[0, 0, 5, 5], [2, 2, 7, 7]], [1, 2, 3, 4]]) +@pytest.mark.parametrize( + "format", ["XYXY", "CXCYWH", tv_tensors.BoundingBoxFormat.XYXY, tv_tensors.BoundingBoxFormat.XYWH] +) +def test_bbox_instance(data, format): + bboxes = tv_tensors.BoundingBoxes(data, format=format, canvas_size=(32, 32)) + assert isinstance(bboxes, torch.Tensor) + assert bboxes.ndim == 2 and bboxes.shape[1] == 4 + if isinstance(format, str): + format = tv_tensors.BoundingBoxFormat[(format.upper())] + assert bboxes.format == format + + +def test_bbox_dim_error(): + data_3d = [[[1, 2, 3, 4]]] + with pytest.raises(ValueError, match="Expected a 1D or 2D tensor, got 3D"): + tv_tensors.BoundingBoxes(data_3d, format="XYXY", canvas_size=(32, 32)) + + +@pytest.mark.parametrize( + ("data", "input_requires_grad", "expected_requires_grad"), + [ + ([[[0.0, 1.0], [0.0, 1.0]]], None, False), + ([[[0.0, 1.0], [0.0, 1.0]]], False, False), + ([[[0.0, 1.0], [0.0, 1.0]]], True, True), + (torch.rand(3, 16, 16, requires_grad=False), None, False), + (torch.rand(3, 16, 16, requires_grad=False), False, False), + (torch.rand(3, 16, 16, requires_grad=False), True, True), + (torch.rand(3, 16, 16, requires_grad=True), None, True), + (torch.rand(3, 16, 16, requires_grad=True), False, False), + (torch.rand(3, 16, 16, requires_grad=True), True, True), + ], +) +def test_new_requires_grad(data, input_requires_grad, expected_requires_grad): + tv_tensor = tv_tensors.Image(data, requires_grad=input_requires_grad) + assert tv_tensor.requires_grad is expected_requires_grad + + +@pytest.mark.parametrize("make_input", [make_image, make_bounding_boxes, make_segmentation_mask, make_video]) +def test_isinstance(make_input): + assert isinstance(make_input(), torch.Tensor) + + +def test_wrapping_no_copy(): + tensor = torch.rand(3, 16, 16) + image = tv_tensors.Image(tensor) + + assert image.data_ptr() == tensor.data_ptr() + + +@pytest.mark.parametrize("make_input", [make_image, make_bounding_boxes, make_segmentation_mask, make_video]) +def test_to_wrapping(make_input): + dp = make_input() + + dp_to = dp.to(torch.float64) + + assert type(dp_to) is type(dp) + assert dp_to.dtype is torch.float64 + + +@pytest.mark.parametrize("make_input", [make_image, make_bounding_boxes, make_segmentation_mask, make_video]) +@pytest.mark.parametrize("return_type", ["Tensor", "TVTensor"]) +def test_to_tv_tensor_reference(make_input, return_type): + tensor = torch.rand((3, 16, 16), dtype=torch.float64) + dp = make_input() + + with tv_tensors.set_return_type(return_type): + tensor_to = tensor.to(dp) + + assert type(tensor_to) is (type(dp) if return_type == "TVTensor" else torch.Tensor) + assert tensor_to.dtype is dp.dtype + assert type(tensor) is torch.Tensor + + +@pytest.mark.parametrize("make_input", [make_image, make_bounding_boxes, make_segmentation_mask, make_video]) +@pytest.mark.parametrize("return_type", ["Tensor", "TVTensor"]) +def test_clone_wrapping(make_input, return_type): + dp = make_input() + + with tv_tensors.set_return_type(return_type): + dp_clone = dp.clone() + + assert type(dp_clone) is type(dp) + assert dp_clone.data_ptr() != dp.data_ptr() + + +@pytest.mark.parametrize("make_input", [make_image, make_bounding_boxes, make_segmentation_mask, make_video]) +@pytest.mark.parametrize("return_type", ["Tensor", "TVTensor"]) +def test_requires_grad__wrapping(make_input, return_type): + dp = make_input(dtype=torch.float) + + assert not dp.requires_grad + + with tv_tensors.set_return_type(return_type): + dp_requires_grad = dp.requires_grad_(True) + + assert type(dp_requires_grad) is type(dp) + assert dp.requires_grad + assert dp_requires_grad.requires_grad + + +@pytest.mark.parametrize("make_input", [make_image, make_bounding_boxes, make_segmentation_mask, make_video]) +@pytest.mark.parametrize("return_type", ["Tensor", "TVTensor"]) +def test_detach_wrapping(make_input, return_type): + dp = make_input(dtype=torch.float).requires_grad_(True) + + with tv_tensors.set_return_type(return_type): + dp_detached = dp.detach() + + assert type(dp_detached) is type(dp) + + +@pytest.mark.parametrize("return_type", ["Tensor", "TVTensor"]) +def test_force_subclass_with_metadata(return_type): + # Sanity checks for the ops in _FORCE_TORCHFUNCTION_SUBCLASS and tv_tensors with metadata + # Largely the same as above, we additionally check that the metadata is preserved + format, canvas_size = "XYXY", (32, 32) + bbox = tv_tensors.BoundingBoxes([[0, 0, 5, 5], [2, 2, 7, 7]], format=format, canvas_size=canvas_size) + + tv_tensors.set_return_type(return_type) + bbox = bbox.clone() + if return_type == "TVTensor": + assert bbox.format, bbox.canvas_size == (format, canvas_size) + + bbox = bbox.to(torch.float64) + if return_type == "TVTensor": + assert bbox.format, bbox.canvas_size == (format, canvas_size) + + bbox = bbox.detach() + if return_type == "TVTensor": + assert bbox.format, bbox.canvas_size == (format, canvas_size) + + assert not bbox.requires_grad + bbox.requires_grad_(True) + if return_type == "TVTensor": + assert bbox.format, bbox.canvas_size == (format, canvas_size) + assert bbox.requires_grad + tv_tensors.set_return_type("tensor") + + +@pytest.mark.parametrize("make_input", [make_image, make_bounding_boxes, make_segmentation_mask, make_video]) +@pytest.mark.parametrize("return_type", ["Tensor", "TVTensor"]) +def test_other_op_no_wrapping(make_input, return_type): + dp = make_input() + + with tv_tensors.set_return_type(return_type): + # any operation besides the ones listed in _FORCE_TORCHFUNCTION_SUBCLASS will do here + output = dp * 2 + + assert type(output) is (type(dp) if return_type == "TVTensor" else torch.Tensor) + + +@pytest.mark.parametrize("make_input", [make_image, make_bounding_boxes, make_segmentation_mask, make_video]) +@pytest.mark.parametrize( + "op", + [ + lambda t: t.numpy(), + lambda t: t.tolist(), + lambda t: t.max(dim=-1), + ], +) +def test_no_tensor_output_op_no_wrapping(make_input, op): + dp = make_input() + + output = op(dp) + + assert type(output) is not type(dp) + + +@pytest.mark.parametrize("make_input", [make_image, make_bounding_boxes, make_segmentation_mask, make_video]) +@pytest.mark.parametrize("return_type", ["Tensor", "TVTensor"]) +def test_inplace_op_no_wrapping(make_input, return_type): + dp = make_input() + original_type = type(dp) + + with tv_tensors.set_return_type(return_type): + output = dp.add_(0) + + assert type(output) is (type(dp) if return_type == "TVTensor" else torch.Tensor) + assert type(dp) is original_type + + +@pytest.mark.parametrize("make_input", [make_image, make_bounding_boxes, make_segmentation_mask, make_video]) +def test_wrap(make_input): + dp = make_input() + + # any operation besides the ones listed in _FORCE_TORCHFUNCTION_SUBCLASS will do here + output = dp * 2 + + dp_new = tv_tensors.wrap(output, like=dp) + + assert type(dp_new) is type(dp) + assert dp_new.data_ptr() == output.data_ptr() + + +@pytest.mark.parametrize("make_input", [make_image, make_bounding_boxes, make_segmentation_mask, make_video]) +@pytest.mark.parametrize("requires_grad", [False, True]) +def test_deepcopy(make_input, requires_grad): + dp = make_input(dtype=torch.float) + + dp.requires_grad_(requires_grad) + + dp_deepcopied = deepcopy(dp) + + assert dp_deepcopied is not dp + assert dp_deepcopied.data_ptr() != dp.data_ptr() + assert_equal(dp_deepcopied, dp) + + assert type(dp_deepcopied) is type(dp) + assert dp_deepcopied.requires_grad is requires_grad + + +@pytest.mark.parametrize("make_input", [make_image, make_bounding_boxes, make_segmentation_mask, make_video]) +@pytest.mark.parametrize("return_type", ["Tensor", "TVTensor"]) +@pytest.mark.parametrize( + "op", + ( + lambda dp: dp + torch.rand(*dp.shape), + lambda dp: torch.rand(*dp.shape) + dp, + lambda dp: dp * torch.rand(*dp.shape), + lambda dp: torch.rand(*dp.shape) * dp, + lambda dp: dp + 3, + lambda dp: 3 + dp, + lambda dp: dp + dp, + lambda dp: dp.sum(), + lambda dp: dp.reshape(-1), + lambda dp: dp.int(), + lambda dp: torch.stack([dp, dp]), + lambda dp: torch.chunk(dp, 2)[0], + lambda dp: torch.unbind(dp)[0], + ), +) +def test_usual_operations(make_input, return_type, op): + + dp = make_input() + with tv_tensors.set_return_type(return_type): + out = op(dp) + assert type(out) is (type(dp) if return_type == "TVTensor" else torch.Tensor) + if isinstance(dp, tv_tensors.BoundingBoxes) and return_type == "TVTensor": + assert hasattr(out, "format") + assert hasattr(out, "canvas_size") + + +def test_subclasses(): + img = make_image() + masks = make_segmentation_mask() + + with pytest.raises(TypeError, match="unsupported operand"): + img + masks + + +def test_set_return_type(): + img = make_image() + + assert type(img + 3) is torch.Tensor + + with tv_tensors.set_return_type("TVTensor"): + assert type(img + 3) is tv_tensors.Image + assert type(img + 3) is torch.Tensor + + tv_tensors.set_return_type("TVTensor") + assert type(img + 3) is tv_tensors.Image + + with tv_tensors.set_return_type("tensor"): + assert type(img + 3) is torch.Tensor + with tv_tensors.set_return_type("TVTensor"): + assert type(img + 3) is tv_tensors.Image + tv_tensors.set_return_type("tensor") + assert type(img + 3) is torch.Tensor + assert type(img + 3) is torch.Tensor + # Exiting a context manager will restore the return type as it was prior to entering it, + # regardless of whether the "global" tv_tensors.set_return_type() was called within the context manager. + assert type(img + 3) is tv_tensors.Image + + tv_tensors.set_return_type("tensor") + + +def test_return_type_input(): + img = make_image() + + # Case-insensitive + with tv_tensors.set_return_type("tvtensor"): + assert type(img + 3) is tv_tensors.Image + + with pytest.raises(ValueError, match="return_type must be"): + tv_tensors.set_return_type("typo") + + tv_tensors.set_return_type("tensor") diff --git a/test/test_utils.py b/test/test_utils.py index f1982130f75..8dfe3a1080f 100644 --- a/test/test_utils.py +++ b/test/test_utils.py @@ -1,84 +1,558 @@ import os +import re import sys import tempfile -import torch -import torchvision.utils as utils -import unittest from io import BytesIO + +import numpy as np +import pytest +import torch import torchvision.transforms.functional as F -from PIL import Image - - -class Tester(unittest.TestCase): - - def test_make_grid_not_inplace(self): - t = torch.rand(5, 3, 10, 10) - t_clone = t.clone() - - utils.make_grid(t, normalize=False) - self.assertTrue(torch.equal(t, t_clone), 'make_grid modified tensor in-place') - - utils.make_grid(t, normalize=True, scale_each=False) - self.assertTrue(torch.equal(t, t_clone), 'make_grid modified tensor in-place') - - utils.make_grid(t, normalize=True, scale_each=True) - self.assertTrue(torch.equal(t, t_clone), 'make_grid modified tensor in-place') - - def test_normalize_in_make_grid(self): - t = torch.rand(5, 3, 10, 10) * 255 - norm_max = torch.tensor(1.0) - norm_min = torch.tensor(0.0) - - grid = utils.make_grid(t, normalize=True) - grid_max = torch.max(grid) - grid_min = torch.min(grid) - - # Rounding the result to one decimal for comparison - n_digits = 1 - rounded_grid_max = torch.round(grid_max * 10 ** n_digits) / (10 ** n_digits) - rounded_grid_min = torch.round(grid_min * 10 ** n_digits) / (10 ** n_digits) - - self.assertTrue(torch.equal(norm_max, rounded_grid_max), 'Normalized max is not equal to 1') - self.assertTrue(torch.equal(norm_min, rounded_grid_min), 'Normalized min is not equal to 0') - - @unittest.skipIf('win' in sys.platform, 'temporarily disabled on Windows') - def test_save_image(self): - with tempfile.NamedTemporaryFile(suffix='.png') as f: - t = torch.rand(2, 3, 64, 64) - utils.save_image(t, f.name) - self.assertTrue(os.path.exists(f.name), 'The image is not present after save') - - @unittest.skipIf('win' in sys.platform, 'temporarily disabled on Windows') - def test_save_image_single_pixel(self): - with tempfile.NamedTemporaryFile(suffix='.png') as f: - t = torch.rand(1, 3, 1, 1) - utils.save_image(t, f.name) - self.assertTrue(os.path.exists(f.name), 'The pixel image is not present after save') - - @unittest.skipIf('win' in sys.platform, 'temporarily disabled on Windows') - def test_save_image_file_object(self): - with tempfile.NamedTemporaryFile(suffix='.png') as f: - t = torch.rand(2, 3, 64, 64) - utils.save_image(t, f.name) - img_orig = Image.open(f.name) - fp = BytesIO() - utils.save_image(t, fp, format='png') - img_bytes = Image.open(fp) - self.assertTrue(torch.equal(F.to_tensor(img_orig), F.to_tensor(img_bytes)), - 'Image not stored in file object') - - @unittest.skipIf('win' in sys.platform, 'temporarily disabled on Windows') - def test_save_image_single_pixel_file_object(self): - with tempfile.NamedTemporaryFile(suffix='.png') as f: - t = torch.rand(1, 3, 1, 1) - utils.save_image(t, f.name) - img_orig = Image.open(f.name) - fp = BytesIO() - utils.save_image(t, fp, format='png') - img_bytes = Image.open(fp) - self.assertTrue(torch.equal(F.to_tensor(img_orig), F.to_tensor(img_bytes)), - 'Pixel Image not stored in file object') - - -if __name__ == '__main__': - unittest.main() +import torchvision.utils as utils +from common_utils import assert_equal, cpu_and_cuda +from PIL import __version__ as PILLOW_VERSION, Image, ImageColor +from torchvision.transforms.v2.functional import to_dtype + + +PILLOW_VERSION = tuple(int(x) for x in PILLOW_VERSION.split(".")) + +boxes = torch.tensor([[0, 0, 20, 20], [0, 0, 0, 0], [10, 15, 30, 35], [23, 35, 93, 95]], dtype=torch.float) + +keypoints = torch.tensor([[[10, 10], [5, 5], [2, 2]], [[20, 20], [30, 30], [3, 3]]], dtype=torch.float) + + +def test_make_grid_not_inplace(): + t = torch.rand(5, 3, 10, 10) + t_clone = t.clone() + + utils.make_grid(t, normalize=False) + assert_equal(t, t_clone, msg="make_grid modified tensor in-place") + + utils.make_grid(t, normalize=True, scale_each=False) + assert_equal(t, t_clone, msg="make_grid modified tensor in-place") + + utils.make_grid(t, normalize=True, scale_each=True) + assert_equal(t, t_clone, msg="make_grid modified tensor in-place") + + +def test_normalize_in_make_grid(): + t = torch.rand(5, 3, 10, 10) * 255 + norm_max = torch.tensor(1.0) + norm_min = torch.tensor(0.0) + + grid = utils.make_grid(t, normalize=True) + grid_max = torch.max(grid) + grid_min = torch.min(grid) + + # Rounding the result to one decimal for comparison + n_digits = 1 + rounded_grid_max = torch.round(grid_max * 10**n_digits) / (10**n_digits) + rounded_grid_min = torch.round(grid_min * 10**n_digits) / (10**n_digits) + + assert_equal(norm_max, rounded_grid_max, msg="Normalized max is not equal to 1") + assert_equal(norm_min, rounded_grid_min, msg="Normalized min is not equal to 0") + + +@pytest.mark.skipif(sys.platform in ("win32", "cygwin"), reason="temporarily disabled on Windows") +def test_save_image(): + with tempfile.NamedTemporaryFile(suffix=".png") as f: + t = torch.rand(2, 3, 64, 64) + utils.save_image(t, f.name) + assert os.path.exists(f.name), "The image is not present after save" + + +@pytest.mark.skipif(sys.platform in ("win32", "cygwin"), reason="temporarily disabled on Windows") +def test_save_image_single_pixel(): + with tempfile.NamedTemporaryFile(suffix=".png") as f: + t = torch.rand(1, 3, 1, 1) + utils.save_image(t, f.name) + assert os.path.exists(f.name), "The pixel image is not present after save" + + +@pytest.mark.skipif(sys.platform in ("win32", "cygwin"), reason="temporarily disabled on Windows") +def test_save_image_file_object(): + with tempfile.NamedTemporaryFile(suffix=".png") as f: + t = torch.rand(2, 3, 64, 64) + utils.save_image(t, f.name) + img_orig = Image.open(f.name) + fp = BytesIO() + utils.save_image(t, fp, format="png") + img_bytes = Image.open(fp) + assert_equal(F.pil_to_tensor(img_orig), F.pil_to_tensor(img_bytes), msg="Image not stored in file object") + + +@pytest.mark.skipif(sys.platform in ("win32", "cygwin"), reason="temporarily disabled on Windows") +def test_save_image_single_pixel_file_object(): + with tempfile.NamedTemporaryFile(suffix=".png") as f: + t = torch.rand(1, 3, 1, 1) + utils.save_image(t, f.name) + img_orig = Image.open(f.name) + fp = BytesIO() + utils.save_image(t, fp, format="png") + img_bytes = Image.open(fp) + assert_equal(F.pil_to_tensor(img_orig), F.pil_to_tensor(img_bytes), msg="Image not stored in file object") + + +def test_draw_boxes(): + img = torch.full((3, 100, 100), 255, dtype=torch.uint8) + img_cp = img.clone() + boxes_cp = boxes.clone() + labels = ["a", "b", "c", "d"] + colors = ["green", "#FF00FF", (0, 255, 0), "red"] + result = utils.draw_bounding_boxes(img, boxes, labels=labels, colors=colors, fill=True) + + path = os.path.join(os.path.dirname(os.path.abspath(__file__)), "assets", "fakedata", "draw_boxes_util.png") + if not os.path.exists(path): + res = Image.fromarray(result.permute(1, 2, 0).contiguous().numpy()) + res.save(path) + + if PILLOW_VERSION >= (10, 1): + # The reference image is only valid for new PIL versions + expected = torch.as_tensor(np.array(Image.open(path))).permute(2, 0, 1) + assert_equal(result, expected) + + # Check if modification is not in place + assert_equal(boxes, boxes_cp) + assert_equal(img, img_cp) + + +@pytest.mark.skipif(PILLOW_VERSION < (10, 1), reason="The reference image is only valid for PIL >= 10.1") +def test_draw_boxes_with_coloured_labels(): + img = torch.full((3, 100, 100), 255, dtype=torch.uint8) + labels = ["a", "b", "c", "d"] + colors = ["green", "#FF00FF", (0, 255, 0), "red"] + label_colors = ["green", "red", (0, 255, 0), "#FF00FF"] + result = utils.draw_bounding_boxes(img, boxes, labels=labels, colors=colors, fill=True, label_colors=label_colors) + + path = os.path.join( + os.path.dirname(os.path.abspath(__file__)), "assets", "fakedata", "draw_boxes_different_label_colors.png" + ) + expected = torch.as_tensor(np.array(Image.open(path))).permute(2, 0, 1) + assert_equal(result, expected) + + +@pytest.mark.parametrize("fill", [True, False]) +def test_draw_boxes_dtypes(fill): + img_uint8 = torch.full((3, 100, 100), 255, dtype=torch.uint8) + out_uint8 = utils.draw_bounding_boxes(img_uint8, boxes, fill=fill) + + assert img_uint8 is not out_uint8 + assert out_uint8.dtype == torch.uint8 + + img_float = to_dtype(img_uint8, torch.float, scale=True) + out_float = utils.draw_bounding_boxes(img_float, boxes, fill=fill) + + assert img_float is not out_float + assert out_float.is_floating_point() + + torch.testing.assert_close(out_uint8, to_dtype(out_float, torch.uint8, scale=True), rtol=0, atol=1) + + +@pytest.mark.parametrize("colors", [None, ["red", "blue", "#FF00FF", (1, 34, 122)], "red", "#FF00FF", (1, 34, 122)]) +def test_draw_boxes_colors(colors): + img = torch.full((3, 100, 100), 0, dtype=torch.uint8) + utils.draw_bounding_boxes(img, boxes, fill=False, width=7, colors=colors) + + with pytest.raises(ValueError, match="Number of colors must be equal or larger than the number of objects"): + utils.draw_bounding_boxes(image=img, boxes=boxes, colors=[]) + + +def test_draw_boxes_vanilla(): + img = torch.full((3, 100, 100), 0, dtype=torch.uint8) + img_cp = img.clone() + boxes_cp = boxes.clone() + result = utils.draw_bounding_boxes(img, boxes, fill=False, width=7, colors="white") + + path = os.path.join(os.path.dirname(os.path.abspath(__file__)), "assets", "fakedata", "draw_boxes_vanilla.png") + if not os.path.exists(path): + res = Image.fromarray(result.permute(1, 2, 0).contiguous().numpy()) + res.save(path) + + expected = torch.as_tensor(np.array(Image.open(path))).permute(2, 0, 1) + assert_equal(result, expected) + # Check if modification is not in place + assert_equal(boxes, boxes_cp) + assert_equal(img, img_cp) + + +def test_draw_boxes_grayscale(): + img = torch.full((1, 4, 4), fill_value=255, dtype=torch.uint8) + boxes = torch.tensor([[0, 0, 3, 3]], dtype=torch.int64) + bboxed_img = utils.draw_bounding_boxes(image=img, boxes=boxes, colors=["#1BBC9B"]) + assert bboxed_img.size(0) == 3 + + +def test_draw_invalid_boxes(): + img_tp = ((1, 1, 1), (1, 2, 3)) + img_wrong2 = torch.full((1, 3, 5, 5), 255, dtype=torch.uint8) + img_correct = torch.zeros((3, 10, 10), dtype=torch.uint8) + boxes = torch.tensor([[0, 0, 20, 20], [0, 0, 0, 0], [10, 15, 30, 35], [23, 35, 93, 95]], dtype=torch.float) + boxes_wrong = torch.tensor([[10, 10, 4, 5], [30, 20, 10, 5]], dtype=torch.float) + labels_wrong = ["one", "two"] + colors_wrong = ["pink", "blue"] + + with pytest.raises(TypeError, match="Tensor expected"): + utils.draw_bounding_boxes(img_tp, boxes) + with pytest.raises(ValueError, match="Pass individual images, not batches"): + utils.draw_bounding_boxes(img_wrong2, boxes) + with pytest.raises(ValueError, match="Only grayscale and RGB images are supported"): + utils.draw_bounding_boxes(img_wrong2[0][:2], boxes) + with pytest.raises(ValueError, match="Number of boxes"): + utils.draw_bounding_boxes(img_correct, boxes, labels_wrong) + with pytest.raises(ValueError, match="Number of colors"): + utils.draw_bounding_boxes(img_correct, boxes, colors=colors_wrong) + with pytest.raises(ValueError, match="Boxes need to be in"): + utils.draw_bounding_boxes(img_correct, boxes_wrong) + + +def test_draw_boxes_warning(): + img = torch.full((3, 100, 100), 255, dtype=torch.uint8) + + with pytest.warns(UserWarning, match=re.escape("Argument 'font_size' will be ignored since 'font' is not set.")): + utils.draw_bounding_boxes(img, boxes, font_size=11) + + +def test_draw_no_boxes(): + img = torch.full((3, 100, 100), 0, dtype=torch.uint8) + boxes = torch.full((0, 4), 0, dtype=torch.float) + with pytest.warns(UserWarning, match=re.escape("boxes doesn't contain any box. No box was drawn")): + res = utils.draw_bounding_boxes(img, boxes) + # Check that the function didn't change the image + assert res.eq(img).all() + + +@pytest.mark.parametrize( + "colors", + [ + None, + "blue", + "#FF00FF", + (1, 34, 122), + ["red", "blue"], + ["#FF00FF", (1, 34, 122)], + ], +) +@pytest.mark.parametrize("alpha", (0, 0.5, 0.7, 1)) +@pytest.mark.parametrize("device", cpu_and_cuda()) +def test_draw_segmentation_masks(colors, alpha, device): + """This test makes sure that masks draw their corresponding color where they should""" + num_masks, h, w = 2, 100, 100 + dtype = torch.uint8 + img = torch.randint(0, 256, size=(3, h, w), dtype=dtype, device=device) + masks = torch.zeros((num_masks, h, w), dtype=torch.bool, device=device) + masks[0, 10:20, 10:20] = True + masks[1, 15:25, 15:25] = True + + overlap = masks[0] & masks[1] + + out = utils.draw_segmentation_masks(img, masks, colors=colors, alpha=alpha) + assert out.dtype == dtype + assert out is not img + + # Make sure the image didn't change where there's no mask + masked_pixels = masks[0] | masks[1] + assert_equal(img[:, ~masked_pixels], out[:, ~masked_pixels]) + + if colors is None: + colors = utils._generate_color_palette(num_masks) + elif isinstance(colors, str) or isinstance(colors, tuple): + colors = [colors] + + # Make sure each mask draws with its own color + for mask, color in zip(masks, colors): + if isinstance(color, str): + color = ImageColor.getrgb(color) + color = torch.tensor(color, dtype=dtype, device=device) + + if alpha == 1: + assert (out[:, mask & ~overlap] == color[:, None]).all() + elif alpha == 0: + assert (out[:, mask & ~overlap] == img[:, mask & ~overlap]).all() + + interpolated_color = (img[:, mask & ~overlap] * (1 - alpha) + color[:, None] * alpha).to(dtype) + torch.testing.assert_close(out[:, mask & ~overlap], interpolated_color, rtol=0.0, atol=1.0) + + interpolated_overlap = (img[:, overlap] * (1 - alpha)).to(dtype) + torch.testing.assert_close(out[:, overlap], interpolated_overlap, rtol=0.0, atol=1.0) + + +def test_draw_segmentation_masks_dtypes(): + num_masks, h, w = 2, 100, 100 + + masks = torch.randint(0, 2, (num_masks, h, w), dtype=torch.bool) + + img_uint8 = torch.randint(0, 256, size=(3, h, w), dtype=torch.uint8) + out_uint8 = utils.draw_segmentation_masks(img_uint8, masks) + + assert img_uint8 is not out_uint8 + assert out_uint8.dtype == torch.uint8 + + img_float = to_dtype(img_uint8, torch.float, scale=True) + out_float = utils.draw_segmentation_masks(img_float, masks) + + assert img_float is not out_float + assert out_float.is_floating_point() + + torch.testing.assert_close(out_uint8, to_dtype(out_float, torch.uint8, scale=True), rtol=0, atol=1) + + +@pytest.mark.parametrize("device", cpu_and_cuda()) +def test_draw_segmentation_masks_errors(device): + h, w = 10, 10 + + masks = torch.randint(0, 2, size=(h, w), dtype=torch.bool, device=device) + img = torch.randint(0, 256, size=(3, h, w), dtype=torch.uint8, device=device) + + with pytest.raises(TypeError, match="The image must be a tensor"): + utils.draw_segmentation_masks(image="Not A Tensor Image", masks=masks) + with pytest.raises(ValueError, match="The image dtype must be"): + img_bad_dtype = torch.randint(0, 256, size=(3, h, w), dtype=torch.int64) + utils.draw_segmentation_masks(image=img_bad_dtype, masks=masks) + with pytest.raises(ValueError, match="Pass individual images, not batches"): + batch = torch.randint(0, 256, size=(10, 3, h, w), dtype=torch.uint8) + utils.draw_segmentation_masks(image=batch, masks=masks) + with pytest.raises(ValueError, match="Pass an RGB image"): + one_channel = torch.randint(0, 256, size=(1, h, w), dtype=torch.uint8) + utils.draw_segmentation_masks(image=one_channel, masks=masks) + with pytest.raises(ValueError, match="The masks must be of dtype bool"): + masks_bad_dtype = torch.randint(0, 2, size=(h, w), dtype=torch.float) + utils.draw_segmentation_masks(image=img, masks=masks_bad_dtype) + with pytest.raises(ValueError, match="masks must be of shape"): + masks_bad_shape = torch.randint(0, 2, size=(3, 2, h, w), dtype=torch.bool) + utils.draw_segmentation_masks(image=img, masks=masks_bad_shape) + with pytest.raises(ValueError, match="must have the same height and width"): + masks_bad_shape = torch.randint(0, 2, size=(h + 4, w), dtype=torch.bool) + utils.draw_segmentation_masks(image=img, masks=masks_bad_shape) + with pytest.raises(ValueError, match="Number of colors must be equal or larger than the number of objects"): + utils.draw_segmentation_masks(image=img, masks=masks, colors=[]) + with pytest.raises(ValueError, match="`colors` must be a tuple or a string, or a list thereof"): + bad_colors = np.array(["red", "blue"]) # should be a list + utils.draw_segmentation_masks(image=img, masks=masks, colors=bad_colors) + with pytest.raises(ValueError, match="If passed as tuple, colors should be an RGB triplet"): + bad_colors = ("red", "blue") # should be a list + utils.draw_segmentation_masks(image=img, masks=masks, colors=bad_colors) + + +@pytest.mark.parametrize("device", cpu_and_cuda()) +def test_draw_no_segmention_mask(device): + img = torch.full((3, 100, 100), 0, dtype=torch.uint8, device=device) + masks = torch.full((0, 100, 100), 0, dtype=torch.bool, device=device) + with pytest.warns(UserWarning, match=re.escape("masks doesn't contain any mask. No mask was drawn")): + res = utils.draw_segmentation_masks(img, masks) + # Check that the function didn't change the image + assert res.eq(img).all() + + +def test_draw_keypoints_vanilla(): + # Keypoints is declared on top as global variable + keypoints_cp = keypoints.clone() + + img = torch.full((3, 100, 100), 0, dtype=torch.uint8) + img_cp = img.clone() + result = utils.draw_keypoints( + img, + keypoints, + colors="red", + connectivity=[ + (0, 1), + ], + ) + path = os.path.join(os.path.dirname(os.path.abspath(__file__)), "assets", "fakedata", "draw_keypoint_vanilla.png") + if not os.path.exists(path): + res = Image.fromarray(result.permute(1, 2, 0).contiguous().numpy()) + res.save(path) + + expected = torch.as_tensor(np.array(Image.open(path))).permute(2, 0, 1) + assert_equal(result, expected) + # Check that keypoints are not modified inplace + assert_equal(keypoints, keypoints_cp) + # Check that image is not modified in place + assert_equal(img, img_cp) + + +def test_draw_keypoins_K_equals_one(): + # Non-regression test for https://github.com/pytorch/vision/pull/8439 + img = torch.full((3, 100, 100), 0, dtype=torch.uint8) + keypoints = torch.tensor([[[10, 10]]], dtype=torch.float) + utils.draw_keypoints(img, keypoints) + + +@pytest.mark.parametrize("colors", ["red", "#FF00FF", (1, 34, 122)]) +def test_draw_keypoints_colored(colors): + # Keypoints is declared on top as global variable + keypoints_cp = keypoints.clone() + + img = torch.full((3, 100, 100), 0, dtype=torch.uint8) + img_cp = img.clone() + result = utils.draw_keypoints( + img, + keypoints, + colors=colors, + connectivity=[ + (0, 1), + ], + ) + assert result.size(0) == 3 + assert_equal(keypoints, keypoints_cp) + assert_equal(img, img_cp) + + +@pytest.mark.parametrize("connectivity", [[(0, 1)], [(0, 1), (1, 2)]]) +@pytest.mark.parametrize( + "vis", + [ + torch.tensor([[1, 1, 0], [1, 1, 0]], dtype=torch.bool), + torch.tensor([[1, 1, 0], [1, 1, 0]], dtype=torch.float).unsqueeze_(-1), + ], +) +def test_draw_keypoints_visibility(connectivity, vis): + # Keypoints is declared on top as global variable + keypoints_cp = keypoints.clone() + + img = torch.full((3, 100, 100), 0, dtype=torch.uint8) + img_cp = img.clone() + + vis_cp = vis if vis is None else vis.clone() + + result = utils.draw_keypoints( + image=img, + keypoints=keypoints, + connectivity=connectivity, + colors="red", + visibility=vis, + ) + assert result.size(0) == 3 + assert_equal(keypoints, keypoints_cp) + assert_equal(img, img_cp) + + # compare with a fakedata image + # connect the key points 0 to 1 for both skeletons and do not show the other key points + path = os.path.join( + os.path.dirname(os.path.abspath(__file__)), "assets", "fakedata", "draw_keypoints_visibility.png" + ) + if not os.path.exists(path): + res = Image.fromarray(result.permute(1, 2, 0).contiguous().numpy()) + res.save(path) + + expected = torch.as_tensor(np.array(Image.open(path))).permute(2, 0, 1) + assert_equal(result, expected) + + if vis_cp is None: + assert vis is None + else: + assert_equal(vis, vis_cp) + assert vis.dtype == vis_cp.dtype + + +def test_draw_keypoints_visibility_default(): + # Keypoints is declared on top as global variable + keypoints_cp = keypoints.clone() + + img = torch.full((3, 100, 100), 0, dtype=torch.uint8) + img_cp = img.clone() + + result = utils.draw_keypoints( + image=img, + keypoints=keypoints, + connectivity=[(0, 1)], + colors="red", + visibility=None, + ) + assert result.size(0) == 3 + assert_equal(keypoints, keypoints_cp) + assert_equal(img, img_cp) + + # compare against fakedata image, which connects 0->1 for both key-point skeletons + path = os.path.join(os.path.dirname(os.path.abspath(__file__)), "assets", "fakedata", "draw_keypoint_vanilla.png") + expected = torch.as_tensor(np.array(Image.open(path))).permute(2, 0, 1) + assert_equal(result, expected) + + +def test_draw_keypoints_dtypes(): + image_uint8 = torch.randint(0, 256, size=(3, 100, 100), dtype=torch.uint8) + image_float = to_dtype(image_uint8, torch.float, scale=True) + + out_uint8 = utils.draw_keypoints(image_uint8, keypoints) + out_float = utils.draw_keypoints(image_float, keypoints) + + assert out_uint8.dtype == torch.uint8 + assert out_uint8 is not image_uint8 + + assert out_float.is_floating_point() + assert out_float is not image_float + + torch.testing.assert_close(out_uint8, to_dtype(out_float, torch.uint8, scale=True), rtol=0, atol=1) + + +def test_draw_keypoints_errors(): + h, w = 10, 10 + img = torch.full((3, 100, 100), 0, dtype=torch.uint8) + + with pytest.raises(TypeError, match="The image must be a tensor"): + utils.draw_keypoints(image="Not A Tensor Image", keypoints=keypoints) + with pytest.raises(ValueError, match="The image dtype must be"): + img_bad_dtype = torch.full((3, h, w), 0, dtype=torch.int64) + utils.draw_keypoints(image=img_bad_dtype, keypoints=keypoints) + with pytest.raises(ValueError, match="Pass individual images, not batches"): + batch = torch.randint(0, 256, size=(10, 3, h, w), dtype=torch.uint8) + utils.draw_keypoints(image=batch, keypoints=keypoints) + with pytest.raises(ValueError, match="Pass an RGB image"): + one_channel = torch.randint(0, 256, size=(1, h, w), dtype=torch.uint8) + utils.draw_keypoints(image=one_channel, keypoints=keypoints) + with pytest.raises(ValueError, match="keypoints must be of shape"): + invalid_keypoints = torch.tensor([[10, 10, 10, 10], [5, 6, 7, 8]], dtype=torch.float) + utils.draw_keypoints(image=img, keypoints=invalid_keypoints) + with pytest.raises(ValueError, match=re.escape("visibility must be of shape (num_instances, K)")): + one_dim_visibility = torch.tensor([True, True, True], dtype=torch.bool) + utils.draw_keypoints(image=img, keypoints=keypoints, visibility=one_dim_visibility) + with pytest.raises(ValueError, match=re.escape("visibility must be of shape (num_instances, K)")): + three_dim_visibility = torch.ones((2, 3, 4), dtype=torch.bool) + utils.draw_keypoints(image=img, keypoints=keypoints, visibility=three_dim_visibility) + with pytest.raises(ValueError, match="keypoints and visibility must have the same dimensionality"): + vis_wrong_n = torch.ones((3, 3), dtype=torch.bool) + utils.draw_keypoints(image=img, keypoints=keypoints, visibility=vis_wrong_n) + with pytest.raises(ValueError, match="keypoints and visibility must have the same dimensionality"): + vis_wrong_k = torch.ones((2, 4), dtype=torch.bool) + utils.draw_keypoints(image=img, keypoints=keypoints, visibility=vis_wrong_k) + + +@pytest.mark.parametrize("batch", (True, False)) +def test_flow_to_image(batch): + h, w = 100, 100 + flow = torch.meshgrid(torch.arange(h), torch.arange(w), indexing="ij") + flow = torch.stack(flow[::-1], dim=0).float() + flow[0] -= h / 2 + flow[1] -= w / 2 + + if batch: + flow = torch.stack([flow, flow]) + + img = utils.flow_to_image(flow) + assert img.shape == (2, 3, h, w) if batch else (3, h, w) + + path = os.path.join(os.path.dirname(os.path.abspath(__file__)), "assets", "expected_flow.pt") + expected_img = torch.load(path, map_location="cpu", weights_only=True) + + if batch: + expected_img = torch.stack([expected_img, expected_img]) + + assert_equal(expected_img, img) + + +@pytest.mark.parametrize( + "input_flow, match", + ( + (torch.full((3, 10, 10), 0, dtype=torch.float), "Input flow should have shape"), + (torch.full((5, 3, 10, 10), 0, dtype=torch.float), "Input flow should have shape"), + (torch.full((2, 10), 0, dtype=torch.float), "Input flow should have shape"), + (torch.full((5, 2, 10), 0, dtype=torch.float), "Input flow should have shape"), + (torch.full((2, 10, 30), 0, dtype=torch.int), "Flow should be of dtype torch.float"), + ), +) +def test_flow_to_image_errors(input_flow, match): + with pytest.raises(ValueError, match=match): + utils.flow_to_image(flow=input_flow) + + +if __name__ == "__main__": + pytest.main([__file__]) diff --git a/test/test_video_gpu_decoder.py b/test/test_video_gpu_decoder.py new file mode 100644 index 00000000000..aa6d0aee9e0 --- /dev/null +++ b/test/test_video_gpu_decoder.py @@ -0,0 +1,97 @@ +import math +import os + +import pytest +import torch +import torchvision +from torchvision.io import _HAS_GPU_VIDEO_DECODER, VideoReader + +try: + import av +except ImportError: + av = None + +VIDEO_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), "assets", "videos") + + +@pytest.mark.skipif(_HAS_GPU_VIDEO_DECODER is False, reason="Didn't compile with support for gpu decoder") +class TestVideoGPUDecoder: + @pytest.mark.skipif(av is None, reason="PyAV unavailable") + @pytest.mark.parametrize( + "video_file", + [ + "RATRACE_wave_f_nm_np1_fr_goo_37.avi", + "TrumanShow_wave_f_nm_np1_fr_med_26.avi", + "v_SoccerJuggling_g23_c01.avi", + "v_SoccerJuggling_g24_c01.avi", + "R6llTwEh07w.mp4", + "SOX5yA1l24A.mp4", + "WUzgd7C1pWA.mp4", + ], + ) + def test_frame_reading(self, video_file): + torchvision.set_video_backend("cuda") + full_path = os.path.join(VIDEO_DIR, video_file) + decoder = VideoReader(full_path) + with av.open(full_path) as container: + for av_frame in container.decode(container.streams.video[0]): + av_frames = torch.tensor(av_frame.to_rgb(src_colorspace="ITU709").to_ndarray()) + vision_frames = next(decoder)["data"] + mean_delta = torch.mean(torch.abs(av_frames.float() - vision_frames.cpu().float())) + assert mean_delta < 0.75 + + @pytest.mark.skipif(av is None, reason="PyAV unavailable") + @pytest.mark.parametrize("keyframes", [True, False]) + @pytest.mark.parametrize( + "full_path, duration", + [ + (os.path.join(VIDEO_DIR, x), y) + for x, y in [ + ("v_SoccerJuggling_g23_c01.avi", 8.0), + ("v_SoccerJuggling_g24_c01.avi", 8.0), + ("R6llTwEh07w.mp4", 10.0), + ("SOX5yA1l24A.mp4", 11.0), + ("WUzgd7C1pWA.mp4", 11.0), + ] + ], + ) + def test_seek_reading(self, keyframes, full_path, duration): + torchvision.set_video_backend("cuda") + decoder = VideoReader(full_path) + time = duration / 2 + decoder.seek(time, keyframes_only=keyframes) + with av.open(full_path) as container: + container.seek(int(time * 1000000), any_frame=not keyframes, backward=False) + for av_frame in container.decode(container.streams.video[0]): + av_frames = torch.tensor(av_frame.to_rgb(src_colorspace="ITU709").to_ndarray()) + vision_frames = next(decoder)["data"] + mean_delta = torch.mean(torch.abs(av_frames.float() - vision_frames.cpu().float())) + assert mean_delta < 0.75 + + @pytest.mark.skipif(av is None, reason="PyAV unavailable") + @pytest.mark.parametrize( + "video_file", + [ + "RATRACE_wave_f_nm_np1_fr_goo_37.avi", + "TrumanShow_wave_f_nm_np1_fr_med_26.avi", + "v_SoccerJuggling_g23_c01.avi", + "v_SoccerJuggling_g24_c01.avi", + "R6llTwEh07w.mp4", + "SOX5yA1l24A.mp4", + "WUzgd7C1pWA.mp4", + ], + ) + def test_metadata(self, video_file): + torchvision.set_video_backend("cuda") + full_path = os.path.join(VIDEO_DIR, video_file) + decoder = VideoReader(full_path) + video_metadata = decoder.get_metadata()["video"] + with av.open(full_path) as container: + video = container.streams.video[0] + av_duration = float(video.duration * video.time_base) + assert math.isclose(video_metadata["duration"], av_duration, rel_tol=1e-2) + assert math.isclose(video_metadata["fps"], video.base_rate, rel_tol=1e-2) + + +if __name__ == "__main__": + pytest.main([__file__]) diff --git a/test/test_video_reader.py b/test/test_video_reader.py index bf59eb7dc4d..10995424982 100644 --- a/test/test_video_reader.py +++ b/test/test_video_reader.py @@ -1,33 +1,28 @@ import collections -from common_utils import get_tmp_dir -from fractions import Fraction import math -import numpy as np import os -import sys -import time +from fractions import Fraction + +import numpy as np +import pytest import torch import torchvision.io as io -import unittest +from common_utils import assert_equal from numpy.random import randint +from pytest import approx +from torchvision import set_video_backend +from torchvision.io import _HAS_CPU_VIDEO_DECODER + try: import av + # Do a version test too io.video._check_av_available() except ImportError: av = None -if sys.version_info < (3,): - from urllib2 import URLError -else: - from urllib.error import URLError - - -from torchvision.io import _HAS_VIDEO_OPT - - VIDEO_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), "assets", "videos") CheckerConfig = [ @@ -39,10 +34,7 @@ "check_aframes", "check_aframe_pts", ] -GroundTruth = collections.namedtuple( - "GroundTruth", - " ".join(CheckerConfig) -) +GroundTruth = collections.namedtuple("GroundTruth", " ".join(CheckerConfig)) all_check_config = GroundTruth( duration=0, @@ -115,18 +107,14 @@ } -DecoderResult = collections.namedtuple( - "DecoderResult", "vframes vframe_pts vtimebase aframes aframe_pts atimebase" -) +DecoderResult = collections.namedtuple("DecoderResult", "vframes vframe_pts vtimebase aframes aframe_pts atimebase") -"""av_seek_frame is imprecise so seek to a timestamp earlier by a margin -The unit of margin is second""" -seek_frame_margin = 0.25 +# av_seek_frame is imprecise so seek to a timestamp earlier by a margin +# The unit of margin is second +SEEK_FRAME_MARGIN = 0.25 -def _read_from_stream( - container, start_pts, end_pts, stream, stream_name, buffer_size=4 -): +def _read_from_stream(container, start_pts, end_pts, stream, stream_name, buffer_size=4): """ Args: container: pyav container @@ -139,7 +127,7 @@ def _read_from_stream( ascending order. We need to decode more frames even when we meet end pts """ - # seeking in the stream is imprecise. Thus, seek to an ealier PTS by a margin + # seeking in the stream is imprecise. Thus, seek to an earlier PTS by a margin margin = 1 seek_offset = max(start_pts - margin, 0) @@ -193,9 +181,9 @@ def _decode_frames_by_av_module( frames are read """ if video_end_pts is None: - video_end_pts = float('inf') + video_end_pts = float("inf") if audio_end_pts is None: - audio_end_pts = float('inf') + audio_end_pts = float("inf") container = av.open(full_path) video_frames = [] @@ -238,9 +226,7 @@ def _decode_frames_by_av_module( else: aframes = torch.empty((1, 0), dtype=torch.float32) - aframe_pts = torch.tensor( - [audio_frame.pts for audio_frame in audio_frames], dtype=torch.int64 - ) + aframe_pts = torch.tensor([audio_frame.pts for audio_frame in audio_frames], dtype=torch.int64) return DecoderResult( vframes=vframes, @@ -271,55 +257,64 @@ def _get_video_tensor(video_dir, video_file): assert os.path.exists(full_path), "File not found: %s" % full_path with open(full_path, "rb") as fp: - video_tensor = torch.from_numpy(np.frombuffer(fp.read(), dtype=np.uint8)) + video_tensor = torch.frombuffer(fp.read(), dtype=torch.uint8) return full_path, video_tensor -@unittest.skipIf(av is None, "PyAV unavailable") -@unittest.skipIf(_HAS_VIDEO_OPT is False, "Didn't compile with ffmpeg") -class TestVideoReader(unittest.TestCase): +@pytest.mark.skipif(av is None, reason="PyAV unavailable") +@pytest.mark.skipif(_HAS_CPU_VIDEO_DECODER is False, reason="Didn't compile with ffmpeg") +class TestVideoReader: def check_separate_decoding_result(self, tv_result, config): - """check the decoding results from TorchVision decoder - """ - vframes, vframe_pts, vtimebase, vfps, vduration, aframes, aframe_pts, \ - atimebase, asample_rate, aduration = tv_result - - video_duration = vduration.item() * Fraction( - vtimebase[0].item(), vtimebase[1].item() - ) - self.assertAlmostEqual(video_duration, config.duration, delta=0.5) + """check the decoding results from TorchVision decoder""" + ( + vframes, + vframe_pts, + vtimebase, + vfps, + vduration, + aframes, + aframe_pts, + atimebase, + asample_rate, + aduration, + ) = tv_result + + video_duration = vduration.item() * Fraction(vtimebase[0].item(), vtimebase[1].item()) + assert video_duration == approx(config.duration, abs=0.5) + + assert vfps.item() == approx(config.video_fps, abs=0.5) - self.assertAlmostEqual(vfps.item(), config.video_fps, delta=0.5) if asample_rate.numel() > 0: - self.assertEqual(asample_rate.item(), config.audio_sample_rate) - audio_duration = aduration.item() * Fraction( - atimebase[0].item(), atimebase[1].item() - ) - self.assertAlmostEqual(audio_duration, config.duration, delta=0.5) + assert asample_rate.item() == config.audio_sample_rate + audio_duration = aduration.item() * Fraction(atimebase[0].item(), atimebase[1].item()) + assert audio_duration == approx(config.duration, abs=0.5) # check if pts of video frames are sorted in ascending order for i in range(len(vframe_pts) - 1): - self.assertEqual(vframe_pts[i] < vframe_pts[i + 1], True) + assert vframe_pts[i] < vframe_pts[i + 1] if len(aframe_pts) > 1: # check if pts of audio frames are sorted in ascending order for i in range(len(aframe_pts) - 1): - self.assertEqual(aframe_pts[i] < aframe_pts[i + 1], True) + assert aframe_pts[i] < aframe_pts[i + 1] def check_probe_result(self, result, config): vtimebase, vfps, vduration, atimebase, asample_rate, aduration = result - video_duration = vduration.item() * Fraction( - vtimebase[0].item(), vtimebase[1].item() - ) - self.assertAlmostEqual(video_duration, config.duration, delta=0.5) - self.assertAlmostEqual(vfps.item(), config.video_fps, delta=0.5) + video_duration = vduration.item() * Fraction(vtimebase[0].item(), vtimebase[1].item()) + assert video_duration == approx(config.duration, abs=0.5) + assert vfps.item() == approx(config.video_fps, abs=0.5) if asample_rate.numel() > 0: - self.assertEqual(asample_rate.item(), config.audio_sample_rate) - audio_duration = aduration.item() * Fraction( - atimebase[0].item(), atimebase[1].item() - ) - self.assertAlmostEqual(audio_duration, config.duration, delta=0.5) + assert asample_rate.item() == config.audio_sample_rate + audio_duration = aduration.item() * Fraction(atimebase[0].item(), atimebase[1].item()) + assert audio_duration == approx(config.duration, abs=0.5) + + def check_meta_result(self, result, config): + assert result.video_duration == approx(config.duration, abs=0.5) + assert result.video_fps == approx(config.video_fps, abs=0.5) + if result.has_audio > 0: + assert result.audio_sample_rate == config.audio_sample_rate + assert result.audio_duration == approx(config.duration, abs=0.5) def compare_decoding_result(self, tv_result, ref_result, config=all_check_config): """ @@ -330,8 +325,18 @@ def compare_decoding_result(self, tv_result, ref_result, config=all_check_config decoder or TorchVision decoder with getPtsOnly = 1 config: config of decoding results checker """ - vframes, vframe_pts, vtimebase, _vfps, _vduration, aframes, aframe_pts, \ - atimebase, _asample_rate, _aduration = tv_result + ( + vframes, + vframe_pts, + vtimebase, + _vfps, + _vduration, + aframes, + aframe_pts, + atimebase, + _asample_rate, + _aduration, + ) = tv_result if isinstance(ref_result, list): # the ref_result is from new video_reader decoder ref_result = DecoderResult( @@ -345,37 +350,34 @@ def compare_decoding_result(self, tv_result, ref_result, config=all_check_config if vframes.numel() > 0 and ref_result.vframes.numel() > 0: mean_delta = torch.mean(torch.abs(vframes.float() - ref_result.vframes.float())) - self.assertAlmostEqual(mean_delta, 0, delta=8.0) + assert mean_delta == approx(0.0, abs=8.0) mean_delta = torch.mean(torch.abs(vframe_pts.float() - ref_result.vframe_pts.float())) - self.assertAlmostEqual(mean_delta, 0, delta=1.0) + assert mean_delta == approx(0.0, abs=1.0) - is_same = torch.all(torch.eq(vtimebase, ref_result.vtimebase)).item() - self.assertEqual(is_same, True) + assert_equal(vtimebase, ref_result.vtimebase) if config.check_aframes and aframes.numel() > 0 and ref_result.aframes.numel() > 0: """Audio stream is available and audio frame is required to return from decoder""" - is_same = torch.all(torch.eq(aframes, ref_result.aframes)).item() - self.assertEqual(is_same, True) + assert_equal(aframes, ref_result.aframes) if config.check_aframe_pts and aframe_pts.numel() > 0 and ref_result.aframe_pts.numel() > 0: """Audio stream is available""" - is_same = torch.all(torch.eq(aframe_pts, ref_result.aframe_pts)).item() - self.assertEqual(is_same, True) + assert_equal(aframe_pts, ref_result.aframe_pts) - is_same = torch.all(torch.eq(atimebase, ref_result.atimebase)).item() - self.assertEqual(is_same, True) + assert_equal(atimebase, ref_result.atimebase) - @unittest.skip( - "This stress test will iteratively decode the same set of videos." - "It helps to detect memory leak but it takes lots of time to run." - "By default, it is disabled" - ) - def test_stress_test_read_video_from_file(self): + @pytest.mark.parametrize("test_video", test_videos.keys()) + def test_stress_test_read_video_from_file(self, test_video): + pytest.skip( + "This stress test will iteratively decode the same set of videos." + "It helps to detect memory leak but it takes lots of time to run." + "By default, it is disabled" + ) num_iter = 10000 # video related - width, height, min_dimension = 0, 0, 0 + width, height, min_dimension, max_dimension = 0, 0, 0, 0 video_start_pts, video_end_pts = 0, -1 video_timebase_num, video_timebase_den = 0, 1 # audio related @@ -384,56 +386,18 @@ def test_stress_test_read_video_from_file(self): audio_timebase_num, audio_timebase_den = 0, 1 for _i in range(num_iter): - for test_video, _config in test_videos.items(): - full_path = os.path.join(VIDEO_DIR, test_video) - - # pass 1: decode all frames using new decoder - torch.ops.video_reader.read_video_from_file( - full_path, - seek_frame_margin, - 0, # getPtsOnly - 1, # readVideoStream - width, - height, - min_dimension, - video_start_pts, - video_end_pts, - video_timebase_num, - video_timebase_den, - 1, # readAudioStream - samples, - channels, - audio_start_pts, - audio_end_pts, - audio_timebase_num, - audio_timebase_den, - ) - - def test_read_video_from_file(self): - """ - Test the case when decoder starts with a video file to decode frames. - """ - # video related - width, height, min_dimension = 0, 0, 0 - video_start_pts, video_end_pts = 0, -1 - video_timebase_num, video_timebase_den = 0, 1 - # audio related - samples, channels = 0, 0 - audio_start_pts, audio_end_pts = 0, -1 - audio_timebase_num, audio_timebase_den = 0, 1 - - for test_video, config in test_videos.items(): full_path = os.path.join(VIDEO_DIR, test_video) # pass 1: decode all frames using new decoder - tv_result = torch.ops.video_reader.read_video_from_file( + torch.ops.video_reader.read_video_from_file( full_path, - seek_frame_margin, + SEEK_FRAME_MARGIN, 0, # getPtsOnly 1, # readVideoStream width, height, min_dimension, + max_dimension, video_start_pts, video_end_pts, video_timebase_num, @@ -446,20 +410,63 @@ def test_read_video_from_file(self): audio_timebase_num, audio_timebase_den, ) - # pass 2: decode all frames using av - pyav_result = _decode_frames_by_av_module(full_path) - # check results from TorchVision decoder - self.check_separate_decoding_result(tv_result, config) - # compare decoding results - self.compare_decoding_result(tv_result, pyav_result, config) - def test_read_video_from_file_read_single_stream_only(self): + @pytest.mark.parametrize("test_video,config", test_videos.items()) + def test_read_video_from_file(self, test_video, config): + """ + Test the case when decoder starts with a video file to decode frames. + """ + # video related + width, height, min_dimension, max_dimension = 0, 0, 0, 0 + video_start_pts, video_end_pts = 0, -1 + video_timebase_num, video_timebase_den = 0, 1 + # audio related + samples, channels = 0, 0 + audio_start_pts, audio_end_pts = 0, -1 + audio_timebase_num, audio_timebase_den = 0, 1 + + full_path = os.path.join(VIDEO_DIR, test_video) + + # pass 1: decode all frames using new decoder + tv_result = torch.ops.video_reader.read_video_from_file( + full_path, + SEEK_FRAME_MARGIN, + 0, # getPtsOnly + 1, # readVideoStream + width, + height, + min_dimension, + max_dimension, + video_start_pts, + video_end_pts, + video_timebase_num, + video_timebase_den, + 1, # readAudioStream + samples, + channels, + audio_start_pts, + audio_end_pts, + audio_timebase_num, + audio_timebase_den, + ) + # pass 2: decode all frames using av + pyav_result = _decode_frames_by_av_module(full_path) + # check results from TorchVision decoder + self.check_separate_decoding_result(tv_result, config) + # compare decoding results + self.compare_decoding_result(tv_result, pyav_result, config) + + @pytest.mark.parametrize("test_video,config", test_videos.items()) + @pytest.mark.parametrize("read_video_stream,read_audio_stream", [(1, 0), (0, 1)]) + def test_read_video_from_file_read_single_stream_only( + self, test_video, config, read_video_stream, read_audio_stream + ): """ Test the case when decoder starts with a video file to decode frames, and only reads video stream and ignores audio stream """ # video related - width, height, min_dimension = 0, 0, 0 + width, height, min_dimension, max_dimension = 0, 0, 0, 0 video_start_pts, video_end_pts = 0, -1 video_timebase_num, video_timebase_den = 0, 1 # audio related @@ -467,52 +474,62 @@ def test_read_video_from_file_read_single_stream_only(self): audio_start_pts, audio_end_pts = 0, -1 audio_timebase_num, audio_timebase_den = 0, 1 - for test_video, config in test_videos.items(): - full_path = os.path.join(VIDEO_DIR, test_video) - for readVideoStream, readAudioStream in [(1, 0), (0, 1)]: - # decode all frames using new decoder - tv_result = torch.ops.video_reader.read_video_from_file( - full_path, - seek_frame_margin, - 0, # getPtsOnly - readVideoStream, - width, - height, - min_dimension, - video_start_pts, - video_end_pts, - video_timebase_num, - video_timebase_den, - readAudioStream, - samples, - channels, - audio_start_pts, - audio_end_pts, - audio_timebase_num, - audio_timebase_den, - ) - - vframes, vframe_pts, vtimebase, vfps, vduration, aframes, aframe_pts, \ - atimebase, asample_rate, aduration = tv_result - - self.assertEqual(vframes.numel() > 0, readVideoStream) - self.assertEqual(vframe_pts.numel() > 0, readVideoStream) - self.assertEqual(vtimebase.numel() > 0, readVideoStream) - self.assertEqual(vfps.numel() > 0, readVideoStream) - - expect_audio_data = readAudioStream == 1 and config.audio_sample_rate is not None - self.assertEqual(aframes.numel() > 0, expect_audio_data) - self.assertEqual(aframe_pts.numel() > 0, expect_audio_data) - self.assertEqual(atimebase.numel() > 0, expect_audio_data) - self.assertEqual(asample_rate.numel() > 0, expect_audio_data) - - def test_read_video_from_file_rescale_min_dimension(self): + full_path = os.path.join(VIDEO_DIR, test_video) + # decode all frames using new decoder + tv_result = torch.ops.video_reader.read_video_from_file( + full_path, + SEEK_FRAME_MARGIN, + 0, # getPtsOnly + read_video_stream, + width, + height, + min_dimension, + max_dimension, + video_start_pts, + video_end_pts, + video_timebase_num, + video_timebase_den, + read_audio_stream, + samples, + channels, + audio_start_pts, + audio_end_pts, + audio_timebase_num, + audio_timebase_den, + ) + + ( + vframes, + vframe_pts, + vtimebase, + vfps, + vduration, + aframes, + aframe_pts, + atimebase, + asample_rate, + aduration, + ) = tv_result + + assert (vframes.numel() > 0) is bool(read_video_stream) + assert (vframe_pts.numel() > 0) is bool(read_video_stream) + assert (vtimebase.numel() > 0) is bool(read_video_stream) + assert (vfps.numel() > 0) is bool(read_video_stream) + + expect_audio_data = read_audio_stream == 1 and config.audio_sample_rate is not None + assert (aframes.numel() > 0) is bool(expect_audio_data) + assert (aframe_pts.numel() > 0) is bool(expect_audio_data) + assert (atimebase.numel() > 0) is bool(expect_audio_data) + assert (asample_rate.numel() > 0) is bool(expect_audio_data) + + @pytest.mark.parametrize("test_video", test_videos.keys()) + def test_read_video_from_file_rescale_min_dimension(self, test_video): """ Test the case when decoder starts with a video file to decode frames, and video min dimension between height and width is set. """ # video related - width, height, min_dimension = 0, 0, 128 + width, height, min_dimension, max_dimension = 0, 0, 128, 0 video_start_pts, video_end_pts = 0, -1 video_timebase_num, video_timebase_den = 0, 1 # audio related @@ -520,38 +537,79 @@ def test_read_video_from_file_rescale_min_dimension(self): audio_start_pts, audio_end_pts = 0, -1 audio_timebase_num, audio_timebase_den = 0, 1 - for test_video, _config in test_videos.items(): - full_path = os.path.join(VIDEO_DIR, test_video) + full_path = os.path.join(VIDEO_DIR, test_video) + + tv_result = torch.ops.video_reader.read_video_from_file( + full_path, + SEEK_FRAME_MARGIN, + 0, # getPtsOnly + 1, # readVideoStream + width, + height, + min_dimension, + max_dimension, + video_start_pts, + video_end_pts, + video_timebase_num, + video_timebase_den, + 1, # readAudioStream + samples, + channels, + audio_start_pts, + audio_end_pts, + audio_timebase_num, + audio_timebase_den, + ) + assert min_dimension == min(tv_result[0].size(1), tv_result[0].size(2)) - tv_result = torch.ops.video_reader.read_video_from_file( - full_path, - seek_frame_margin, - 0, # getPtsOnly - 1, # readVideoStream - width, - height, - min_dimension, - video_start_pts, - video_end_pts, - video_timebase_num, - video_timebase_den, - 1, # readAudioStream - samples, - channels, - audio_start_pts, - audio_end_pts, - audio_timebase_num, - audio_timebase_den, - ) - self.assertEqual(min_dimension, min(tv_result[0].size(1), tv_result[0].size(2))) + @pytest.mark.parametrize("test_video", test_videos.keys()) + def test_read_video_from_file_rescale_max_dimension(self, test_video): + """ + Test the case when decoder starts with a video file to decode frames, and + video min dimension between height and width is set. + """ + # video related + width, height, min_dimension, max_dimension = 0, 0, 0, 85 + video_start_pts, video_end_pts = 0, -1 + video_timebase_num, video_timebase_den = 0, 1 + # audio related + samples, channels = 0, 0 + audio_start_pts, audio_end_pts = 0, -1 + audio_timebase_num, audio_timebase_den = 0, 1 - def test_read_video_from_file_rescale_width(self): + full_path = os.path.join(VIDEO_DIR, test_video) + + tv_result = torch.ops.video_reader.read_video_from_file( + full_path, + SEEK_FRAME_MARGIN, + 0, # getPtsOnly + 1, # readVideoStream + width, + height, + min_dimension, + max_dimension, + video_start_pts, + video_end_pts, + video_timebase_num, + video_timebase_den, + 1, # readAudioStream + samples, + channels, + audio_start_pts, + audio_end_pts, + audio_timebase_num, + audio_timebase_den, + ) + assert max_dimension == max(tv_result[0].size(1), tv_result[0].size(2)) + + @pytest.mark.parametrize("test_video", test_videos.keys()) + def test_read_video_from_file_rescale_both_min_max_dimension(self, test_video): """ Test the case when decoder starts with a video file to decode frames, and - video width is set. + video min dimension between height and width is set. """ # video related - width, height, min_dimension = 256, 0, 0 + width, height, min_dimension, max_dimension = 0, 0, 64, 85 video_start_pts, video_end_pts = 0, -1 video_timebase_num, video_timebase_den = 0, 1 # audio related @@ -559,38 +617,80 @@ def test_read_video_from_file_rescale_width(self): audio_start_pts, audio_end_pts = 0, -1 audio_timebase_num, audio_timebase_den = 0, 1 - for test_video, _config in test_videos.items(): - full_path = os.path.join(VIDEO_DIR, test_video) + full_path = os.path.join(VIDEO_DIR, test_video) + + tv_result = torch.ops.video_reader.read_video_from_file( + full_path, + SEEK_FRAME_MARGIN, + 0, # getPtsOnly + 1, # readVideoStream + width, + height, + min_dimension, + max_dimension, + video_start_pts, + video_end_pts, + video_timebase_num, + video_timebase_den, + 1, # readAudioStream + samples, + channels, + audio_start_pts, + audio_end_pts, + audio_timebase_num, + audio_timebase_den, + ) + assert min_dimension == min(tv_result[0].size(1), tv_result[0].size(2)) + assert max_dimension == max(tv_result[0].size(1), tv_result[0].size(2)) - tv_result = torch.ops.video_reader.read_video_from_file( - full_path, - seek_frame_margin, - 0, # getPtsOnly - 1, # readVideoStream - width, - height, - min_dimension, - video_start_pts, - video_end_pts, - video_timebase_num, - video_timebase_den, - 1, # readAudioStream - samples, - channels, - audio_start_pts, - audio_end_pts, - audio_timebase_num, - audio_timebase_den, - ) - self.assertEqual(tv_result[0].size(2), width) + @pytest.mark.parametrize("test_video", test_videos.keys()) + def test_read_video_from_file_rescale_width(self, test_video): + """ + Test the case when decoder starts with a video file to decode frames, and + video width is set. + """ + # video related + width, height, min_dimension, max_dimension = 256, 0, 0, 0 + video_start_pts, video_end_pts = 0, -1 + video_timebase_num, video_timebase_den = 0, 1 + # audio related + samples, channels = 0, 0 + audio_start_pts, audio_end_pts = 0, -1 + audio_timebase_num, audio_timebase_den = 0, 1 + + full_path = os.path.join(VIDEO_DIR, test_video) + + tv_result = torch.ops.video_reader.read_video_from_file( + full_path, + SEEK_FRAME_MARGIN, + 0, # getPtsOnly + 1, # readVideoStream + width, + height, + min_dimension, + max_dimension, + video_start_pts, + video_end_pts, + video_timebase_num, + video_timebase_den, + 1, # readAudioStream + samples, + channels, + audio_start_pts, + audio_end_pts, + audio_timebase_num, + audio_timebase_den, + ) + assert tv_result[0].size(2) == width - def test_read_video_from_file_rescale_height(self): + @pytest.mark.parametrize("test_video", test_videos.keys()) + def test_read_video_from_file_rescale_height(self, test_video): """ Test the case when decoder starts with a video file to decode frames, and video height is set. """ # video related - width, height, min_dimension = 0, 224, 0 + width, height, min_dimension, max_dimension = 0, 224, 0, 0 video_start_pts, video_end_pts = 0, -1 video_timebase_num, video_timebase_den = 0, 1 # audio related @@ -598,38 +698,39 @@ def test_read_video_from_file_rescale_height(self): audio_start_pts, audio_end_pts = 0, -1 audio_timebase_num, audio_timebase_den = 0, 1 - for test_video, _config in test_videos.items(): - full_path = os.path.join(VIDEO_DIR, test_video) - - tv_result = torch.ops.video_reader.read_video_from_file( - full_path, - seek_frame_margin, - 0, # getPtsOnly - 1, # readVideoStream - width, - height, - min_dimension, - video_start_pts, - video_end_pts, - video_timebase_num, - video_timebase_den, - 1, # readAudioStream - samples, - channels, - audio_start_pts, - audio_end_pts, - audio_timebase_num, - audio_timebase_den, - ) - self.assertEqual(tv_result[0].size(1), height) + full_path = os.path.join(VIDEO_DIR, test_video) + + tv_result = torch.ops.video_reader.read_video_from_file( + full_path, + SEEK_FRAME_MARGIN, + 0, # getPtsOnly + 1, # readVideoStream + width, + height, + min_dimension, + max_dimension, + video_start_pts, + video_end_pts, + video_timebase_num, + video_timebase_den, + 1, # readAudioStream + samples, + channels, + audio_start_pts, + audio_end_pts, + audio_timebase_num, + audio_timebase_den, + ) + assert tv_result[0].size(1) == height - def test_read_video_from_file_rescale_width_and_height(self): + @pytest.mark.parametrize("test_video", test_videos.keys()) + def test_read_video_from_file_rescale_width_and_height(self, test_video): """ Test the case when decoder starts with a video file to decode frames, and both video height and width are set. """ # video related - width, height, min_dimension = 320, 240, 0 + width, height, min_dimension, max_dimension = 320, 240, 0, 0 video_start_pts, video_end_pts = 0, -1 video_timebase_num, video_timebase_den = 0, 1 # audio related @@ -637,93 +738,97 @@ def test_read_video_from_file_rescale_width_and_height(self): audio_start_pts, audio_end_pts = 0, -1 audio_timebase_num, audio_timebase_den = 0, 1 - for test_video, _config in test_videos.items(): - full_path = os.path.join(VIDEO_DIR, test_video) - - tv_result = torch.ops.video_reader.read_video_from_file( - full_path, - seek_frame_margin, - 0, # getPtsOnly - 1, # readVideoStream - width, - height, - min_dimension, - video_start_pts, - video_end_pts, - video_timebase_num, - video_timebase_den, - 1, # readAudioStream - samples, - channels, - audio_start_pts, - audio_end_pts, - audio_timebase_num, - audio_timebase_den, - ) - self.assertEqual(tv_result[0].size(1), height) - self.assertEqual(tv_result[0].size(2), width) + full_path = os.path.join(VIDEO_DIR, test_video) + + tv_result = torch.ops.video_reader.read_video_from_file( + full_path, + SEEK_FRAME_MARGIN, + 0, # getPtsOnly + 1, # readVideoStream + width, + height, + min_dimension, + max_dimension, + video_start_pts, + video_end_pts, + video_timebase_num, + video_timebase_den, + 1, # readAudioStream + samples, + channels, + audio_start_pts, + audio_end_pts, + audio_timebase_num, + audio_timebase_den, + ) + assert tv_result[0].size(1) == height + assert tv_result[0].size(2) == width - def test_read_video_from_file_audio_resampling(self): + @pytest.mark.parametrize("test_video", test_videos.keys()) + @pytest.mark.parametrize("samples", [9600, 96000]) + def test_read_video_from_file_audio_resampling(self, test_video, samples): """ Test the case when decoder starts with a video file to decode frames, and audio waveform are resampled """ + # video related + width, height, min_dimension, max_dimension = 0, 0, 0, 0 + video_start_pts, video_end_pts = 0, -1 + video_timebase_num, video_timebase_den = 0, 1 + # audio related + channels = 0 + audio_start_pts, audio_end_pts = 0, -1 + audio_timebase_num, audio_timebase_den = 0, 1 - for samples in [ - 9600, # downsampling - 96000, # upsampling - ]: - # video related - width, height, min_dimension = 0, 0, 0 - video_start_pts, video_end_pts = 0, -1 - video_timebase_num, video_timebase_den = 0, 1 - # audio related - channels = 0 - audio_start_pts, audio_end_pts = 0, -1 - audio_timebase_num, audio_timebase_den = 0, 1 - - for test_video, _config in test_videos.items(): - full_path = os.path.join(VIDEO_DIR, test_video) - - tv_result = torch.ops.video_reader.read_video_from_file( - full_path, - seek_frame_margin, - 0, # getPtsOnly - 1, # readVideoStream - width, - height, - min_dimension, - video_start_pts, - video_end_pts, - video_timebase_num, - video_timebase_den, - 1, # readAudioStream - samples, - channels, - audio_start_pts, - audio_end_pts, - audio_timebase_num, - audio_timebase_den, - ) - vframes, vframe_pts, vtimebase, vfps, vduration, aframes, aframe_pts, \ - atimebase, asample_rate, aduration = tv_result - if aframes.numel() > 0: - self.assertEqual(samples, asample_rate.item()) - self.assertEqual(1, aframes.size(1)) - # when audio stream is found - duration = float(aframe_pts[-1]) * float(atimebase[0]) / float(atimebase[1]) - self.assertAlmostEqual( - aframes.size(0), - int(duration * asample_rate.item()), - delta=0.1 * asample_rate.item(), - ) - - def test_compare_read_video_from_memory_and_file(self): + full_path = os.path.join(VIDEO_DIR, test_video) + + tv_result = torch.ops.video_reader.read_video_from_file( + full_path, + SEEK_FRAME_MARGIN, + 0, # getPtsOnly + 1, # readVideoStream + width, + height, + min_dimension, + max_dimension, + video_start_pts, + video_end_pts, + video_timebase_num, + video_timebase_den, + 1, # readAudioStream + samples, + channels, + audio_start_pts, + audio_end_pts, + audio_timebase_num, + audio_timebase_den, + ) + ( + vframes, + vframe_pts, + vtimebase, + vfps, + vduration, + aframes, + aframe_pts, + atimebase, + asample_rate, + aduration, + ) = tv_result + if aframes.numel() > 0: + assert samples == asample_rate.item() + assert 1 == aframes.size(1) + # when audio stream is found + duration = float(aframe_pts[-1]) * float(atimebase[0]) / float(atimebase[1]) + assert aframes.size(0) == approx(int(duration * asample_rate.item()), abs=0.1 * asample_rate.item()) + + @pytest.mark.parametrize("test_video,config", test_videos.items()) + def test_compare_read_video_from_memory_and_file(self, test_video, config): """ Test the case when video is already in memory, and decoder reads data in memory """ # video related - width, height, min_dimension = 0, 0, 0 + width, height, min_dimension, max_dimension = 0, 0, 0, 0 video_start_pts, video_end_pts = 0, -1 video_timebase_num, video_timebase_den = 0, 1 # audio related @@ -731,63 +836,65 @@ def test_compare_read_video_from_memory_and_file(self): audio_start_pts, audio_end_pts = 0, -1 audio_timebase_num, audio_timebase_den = 0, 1 - for test_video, config in test_videos.items(): - full_path, video_tensor = _get_video_tensor(VIDEO_DIR, test_video) - - # pass 1: decode all frames using cpp decoder - tv_result_memory = torch.ops.video_reader.read_video_from_memory( - video_tensor, - seek_frame_margin, - 0, # getPtsOnly - 1, # readVideoStream - width, - height, - min_dimension, - video_start_pts, - video_end_pts, - video_timebase_num, - video_timebase_den, - 1, # readAudioStream - samples, - channels, - audio_start_pts, - audio_end_pts, - audio_timebase_num, - audio_timebase_den, - ) - self.check_separate_decoding_result(tv_result_memory, config) - # pass 2: decode all frames from file - tv_result_file = torch.ops.video_reader.read_video_from_file( - full_path, - seek_frame_margin, - 0, # getPtsOnly - 1, # readVideoStream - width, - height, - min_dimension, - video_start_pts, - video_end_pts, - video_timebase_num, - video_timebase_den, - 1, # readAudioStream - samples, - channels, - audio_start_pts, - audio_end_pts, - audio_timebase_num, - audio_timebase_den, - ) + full_path, video_tensor = _get_video_tensor(VIDEO_DIR, test_video) + + # pass 1: decode all frames using cpp decoder + tv_result_memory = torch.ops.video_reader.read_video_from_memory( + video_tensor, + SEEK_FRAME_MARGIN, + 0, # getPtsOnly + 1, # readVideoStream + width, + height, + min_dimension, + max_dimension, + video_start_pts, + video_end_pts, + video_timebase_num, + video_timebase_den, + 1, # readAudioStream + samples, + channels, + audio_start_pts, + audio_end_pts, + audio_timebase_num, + audio_timebase_den, + ) + self.check_separate_decoding_result(tv_result_memory, config) + # pass 2: decode all frames from file + tv_result_file = torch.ops.video_reader.read_video_from_file( + full_path, + SEEK_FRAME_MARGIN, + 0, # getPtsOnly + 1, # readVideoStream + width, + height, + min_dimension, + max_dimension, + video_start_pts, + video_end_pts, + video_timebase_num, + video_timebase_den, + 1, # readAudioStream + samples, + channels, + audio_start_pts, + audio_end_pts, + audio_timebase_num, + audio_timebase_den, + ) - self.check_separate_decoding_result(tv_result_file, config) - # finally, compare results decoded from memory and file - self.compare_decoding_result(tv_result_memory, tv_result_file) + self.check_separate_decoding_result(tv_result_file, config) + # finally, compare results decoded from memory and file + self.compare_decoding_result(tv_result_memory, tv_result_file) - def test_read_video_from_memory(self): + @pytest.mark.parametrize("test_video,config", test_videos.items()) + def test_read_video_from_memory(self, test_video, config): """ Test the case when video is already in memory, and decoder reads data in memory """ # video related - width, height, min_dimension = 0, 0, 0 + width, height, min_dimension, max_dimension = 0, 0, 0, 0 video_start_pts, video_end_pts = 0, -1 video_timebase_num, video_timebase_den = 0, 1 # audio related @@ -795,44 +902,45 @@ def test_read_video_from_memory(self): audio_start_pts, audio_end_pts = 0, -1 audio_timebase_num, audio_timebase_den = 0, 1 - for test_video, config in test_videos.items(): - full_path, video_tensor = _get_video_tensor(VIDEO_DIR, test_video) - - # pass 1: decode all frames using cpp decoder - tv_result = torch.ops.video_reader.read_video_from_memory( - video_tensor, - seek_frame_margin, - 0, # getPtsOnly - 1, # readVideoStream - width, - height, - min_dimension, - video_start_pts, - video_end_pts, - video_timebase_num, - video_timebase_den, - 1, # readAudioStream - samples, - channels, - audio_start_pts, - audio_end_pts, - audio_timebase_num, - audio_timebase_den, - ) - # pass 2: decode all frames using av - pyav_result = _decode_frames_by_av_module(full_path) + full_path, video_tensor = _get_video_tensor(VIDEO_DIR, test_video) + + # pass 1: decode all frames using cpp decoder + tv_result = torch.ops.video_reader.read_video_from_memory( + video_tensor, + SEEK_FRAME_MARGIN, + 0, # getPtsOnly + 1, # readVideoStream + width, + height, + min_dimension, + max_dimension, + video_start_pts, + video_end_pts, + video_timebase_num, + video_timebase_den, + 1, # readAudioStream + samples, + channels, + audio_start_pts, + audio_end_pts, + audio_timebase_num, + audio_timebase_den, + ) + # pass 2: decode all frames using av + pyav_result = _decode_frames_by_av_module(full_path) - self.check_separate_decoding_result(tv_result, config) - self.compare_decoding_result(tv_result, pyav_result, config) + self.check_separate_decoding_result(tv_result, config) + self.compare_decoding_result(tv_result, pyav_result, config) - def test_read_video_from_memory_get_pts_only(self): + @pytest.mark.parametrize("test_video,config", test_videos.items()) + def test_read_video_from_memory_get_pts_only(self, test_video, config): """ Test the case when video is already in memory, and decoder reads data in memory. Compare frame pts between decoding for pts only and full decoding for both pts and frame data """ # video related - width, height, min_dimension = 0, 0, 0 + width, height, min_dimension, max_dimension = 0, 0, 0, 0 video_start_pts, video_end_pts = 0, -1 video_timebase_num, video_timebase_den = 0, 1 # audio related @@ -840,210 +948,307 @@ def test_read_video_from_memory_get_pts_only(self): audio_start_pts, audio_end_pts = 0, -1 audio_timebase_num, audio_timebase_den = 0, 1 - for test_video, config in test_videos.items(): - full_path, video_tensor = _get_video_tensor(VIDEO_DIR, test_video) - - # pass 1: decode all frames using cpp decoder - tv_result = torch.ops.video_reader.read_video_from_memory( - video_tensor, - seek_frame_margin, - 0, # getPtsOnly - 1, # readVideoStream - width, - height, - min_dimension, - video_start_pts, - video_end_pts, - video_timebase_num, - video_timebase_den, - 1, # readAudioStream - samples, - channels, - audio_start_pts, - audio_end_pts, - audio_timebase_num, - audio_timebase_den, - ) - self.assertAlmostEqual(config.video_fps, tv_result[3].item(), delta=0.01) - - # pass 2: decode all frames to get PTS only using cpp decoder - tv_result_pts_only = torch.ops.video_reader.read_video_from_memory( - video_tensor, - seek_frame_margin, - 1, # getPtsOnly - 1, # readVideoStream - width, - height, - min_dimension, - video_start_pts, - video_end_pts, - video_timebase_num, - video_timebase_den, - 1, # readAudioStream - samples, - channels, - audio_start_pts, - audio_end_pts, - audio_timebase_num, - audio_timebase_den, - ) + _, video_tensor = _get_video_tensor(VIDEO_DIR, test_video) + + # pass 1: decode all frames using cpp decoder + tv_result = torch.ops.video_reader.read_video_from_memory( + video_tensor, + SEEK_FRAME_MARGIN, + 0, # getPtsOnly + 1, # readVideoStream + width, + height, + min_dimension, + max_dimension, + video_start_pts, + video_end_pts, + video_timebase_num, + video_timebase_den, + 1, # readAudioStream + samples, + channels, + audio_start_pts, + audio_end_pts, + audio_timebase_num, + audio_timebase_den, + ) + assert abs(config.video_fps - tv_result[3].item()) < 0.01 + + # pass 2: decode all frames to get PTS only using cpp decoder + tv_result_pts_only = torch.ops.video_reader.read_video_from_memory( + video_tensor, + SEEK_FRAME_MARGIN, + 1, # getPtsOnly + 1, # readVideoStream + width, + height, + min_dimension, + max_dimension, + video_start_pts, + video_end_pts, + video_timebase_num, + video_timebase_den, + 1, # readAudioStream + samples, + channels, + audio_start_pts, + audio_end_pts, + audio_timebase_num, + audio_timebase_den, + ) - self.assertEqual(tv_result_pts_only[0].numel(), 0) - self.assertEqual(tv_result_pts_only[5].numel(), 0) - self.compare_decoding_result(tv_result, tv_result_pts_only) + assert not tv_result_pts_only[0].numel() + assert not tv_result_pts_only[5].numel() + self.compare_decoding_result(tv_result, tv_result_pts_only) - def test_read_video_in_range_from_memory(self): + @pytest.mark.parametrize("test_video,config", test_videos.items()) + @pytest.mark.parametrize("num_frames", [4, 8, 16, 32, 64, 128]) + def test_read_video_in_range_from_memory(self, test_video, config, num_frames): """ Test the case when video is already in memory, and decoder reads data in memory. In addition, decoder takes meaningful start- and end PTS as input, and decode frames within that interval """ - for test_video, config in test_videos.items(): - full_path, video_tensor = _get_video_tensor(VIDEO_DIR, test_video) - # video related - width, height, min_dimension = 0, 0, 0 - video_start_pts, video_end_pts = 0, -1 - video_timebase_num, video_timebase_den = 0, 1 - # audio related - samples, channels = 0, 0 - audio_start_pts, audio_end_pts = 0, -1 - audio_timebase_num, audio_timebase_den = 0, 1 - # pass 1: decode all frames using new decoder - tv_result = torch.ops.video_reader.read_video_from_memory( - video_tensor, - seek_frame_margin, - 0, # getPtsOnly - 1, # readVideoStream - width, - height, - min_dimension, - video_start_pts, - video_end_pts, - video_timebase_num, - video_timebase_den, - 1, # readAudioStream - samples, - channels, - audio_start_pts, - audio_end_pts, - audio_timebase_num, - audio_timebase_den, + full_path, video_tensor = _get_video_tensor(VIDEO_DIR, test_video) + # video related + width, height, min_dimension, max_dimension = 0, 0, 0, 0 + video_start_pts, video_end_pts = 0, -1 + video_timebase_num, video_timebase_den = 0, 1 + # audio related + samples, channels = 0, 0 + audio_start_pts, audio_end_pts = 0, -1 + audio_timebase_num, audio_timebase_den = 0, 1 + # pass 1: decode all frames using new decoder + tv_result = torch.ops.video_reader.read_video_from_memory( + video_tensor, + SEEK_FRAME_MARGIN, + 0, # getPtsOnly + 1, # readVideoStream + width, + height, + min_dimension, + max_dimension, + video_start_pts, + video_end_pts, + video_timebase_num, + video_timebase_den, + 1, # readAudioStream + samples, + channels, + audio_start_pts, + audio_end_pts, + audio_timebase_num, + audio_timebase_den, + ) + ( + vframes, + vframe_pts, + vtimebase, + vfps, + vduration, + aframes, + aframe_pts, + atimebase, + asample_rate, + aduration, + ) = tv_result + assert abs(config.video_fps - vfps.item()) < 0.01 + + start_pts_ind_max = vframe_pts.size(0) - num_frames + if start_pts_ind_max <= 0: + return + # randomly pick start pts + start_pts_ind = randint(0, start_pts_ind_max) + end_pts_ind = start_pts_ind + num_frames - 1 + video_start_pts = vframe_pts[start_pts_ind] + video_end_pts = vframe_pts[end_pts_ind] + + video_timebase_num, video_timebase_den = vtimebase[0], vtimebase[1] + if len(atimebase) > 0: + # when audio stream is available + audio_timebase_num, audio_timebase_den = atimebase[0], atimebase[1] + audio_start_pts = _pts_convert( + video_start_pts.item(), + Fraction(video_timebase_num.item(), video_timebase_den.item()), + Fraction(audio_timebase_num.item(), audio_timebase_den.item()), + math.floor, + ) + audio_end_pts = _pts_convert( + video_end_pts.item(), + Fraction(video_timebase_num.item(), video_timebase_den.item()), + Fraction(audio_timebase_num.item(), audio_timebase_den.item()), + math.ceil, ) - vframes, vframe_pts, vtimebase, vfps, vduration, aframes, aframe_pts, \ - atimebase, asample_rate, aduration = tv_result - self.assertAlmostEqual(config.video_fps, vfps.item(), delta=0.01) - - for num_frames in [4, 8, 16, 32, 64, 128]: - start_pts_ind_max = vframe_pts.size(0) - num_frames - if start_pts_ind_max <= 0: - continue - # randomly pick start pts - start_pts_ind = randint(0, start_pts_ind_max) - end_pts_ind = start_pts_ind + num_frames - 1 - video_start_pts = vframe_pts[start_pts_ind] - video_end_pts = vframe_pts[end_pts_ind] - - video_timebase_num, video_timebase_den = vtimebase[0], vtimebase[1] - if len(atimebase) > 0: - # when audio stream is available - audio_timebase_num, audio_timebase_den = atimebase[0], atimebase[1] - audio_start_pts = _pts_convert( - video_start_pts.item(), - Fraction(video_timebase_num.item(), video_timebase_den.item()), - Fraction(audio_timebase_num.item(), audio_timebase_den.item()), - math.floor, - ) - audio_end_pts = _pts_convert( - video_end_pts.item(), - Fraction(video_timebase_num.item(), video_timebase_den.item()), - Fraction(audio_timebase_num.item(), audio_timebase_den.item()), - math.ceil, - ) - - # pass 2: decode frames in the randomly generated range - tv_result = torch.ops.video_reader.read_video_from_memory( - video_tensor, - seek_frame_margin, - 0, # getPtsOnly - 1, # readVideoStream - width, - height, - min_dimension, - video_start_pts, - video_end_pts, - video_timebase_num, - video_timebase_den, - 1, # readAudioStream - samples, - channels, - audio_start_pts, - audio_end_pts, - audio_timebase_num, - audio_timebase_den, - ) - - # pass 3: decode frames in range using PyAv - video_timebase_av, audio_timebase_av = _get_timebase_by_av_module(full_path) - - video_start_pts_av = _pts_convert( - video_start_pts.item(), - Fraction(video_timebase_num.item(), video_timebase_den.item()), - Fraction(video_timebase_av.numerator, video_timebase_av.denominator), - math.floor, - ) - video_end_pts_av = _pts_convert( - video_end_pts.item(), - Fraction(video_timebase_num.item(), video_timebase_den.item()), - Fraction(video_timebase_av.numerator, video_timebase_av.denominator), - math.ceil, - ) - if audio_timebase_av: - audio_start_pts = _pts_convert( - video_start_pts.item(), - Fraction(video_timebase_num.item(), video_timebase_den.item()), - Fraction(audio_timebase_av.numerator, audio_timebase_av.denominator), - math.floor, - ) - audio_end_pts = _pts_convert( - video_end_pts.item(), - Fraction(video_timebase_num.item(), video_timebase_den.item()), - Fraction(audio_timebase_av.numerator, audio_timebase_av.denominator), - math.ceil, - ) - - pyav_result = _decode_frames_by_av_module( - full_path, - video_start_pts_av, - video_end_pts_av, - audio_start_pts, - audio_end_pts, - ) - - self.assertEqual(tv_result[0].size(0), num_frames) - if pyav_result.vframes.size(0) == num_frames: - # if PyAv decodes a different number of video frames, skip - # comparing the decoding results between Torchvision video reader - # and PyAv - self.compare_decoding_result(tv_result, pyav_result, config) - - def test_probe_video_from_file(self): + + # pass 2: decode frames in the randomly generated range + tv_result = torch.ops.video_reader.read_video_from_memory( + video_tensor, + SEEK_FRAME_MARGIN, + 0, # getPtsOnly + 1, # readVideoStream + width, + height, + min_dimension, + max_dimension, + video_start_pts, + video_end_pts, + video_timebase_num, + video_timebase_den, + 1, # readAudioStream + samples, + channels, + audio_start_pts, + audio_end_pts, + audio_timebase_num, + audio_timebase_den, + ) + + # pass 3: decode frames in range using PyAv + video_timebase_av, audio_timebase_av = _get_timebase_by_av_module(full_path) + + video_start_pts_av = _pts_convert( + video_start_pts.item(), + Fraction(video_timebase_num.item(), video_timebase_den.item()), + Fraction(video_timebase_av.numerator, video_timebase_av.denominator), + math.floor, + ) + video_end_pts_av = _pts_convert( + video_end_pts.item(), + Fraction(video_timebase_num.item(), video_timebase_den.item()), + Fraction(video_timebase_av.numerator, video_timebase_av.denominator), + math.ceil, + ) + if audio_timebase_av: + audio_start_pts = _pts_convert( + video_start_pts.item(), + Fraction(video_timebase_num.item(), video_timebase_den.item()), + Fraction(audio_timebase_av.numerator, audio_timebase_av.denominator), + math.floor, + ) + audio_end_pts = _pts_convert( + video_end_pts.item(), + Fraction(video_timebase_num.item(), video_timebase_den.item()), + Fraction(audio_timebase_av.numerator, audio_timebase_av.denominator), + math.ceil, + ) + + pyav_result = _decode_frames_by_av_module( + full_path, + video_start_pts_av, + video_end_pts_av, + audio_start_pts, + audio_end_pts, + ) + + assert tv_result[0].size(0) == num_frames + if pyav_result.vframes.size(0) == num_frames: + # if PyAv decodes a different number of video frames, skip + # comparing the decoding results between Torchvision video reader + # and PyAv + self.compare_decoding_result(tv_result, pyav_result, config) + + @pytest.mark.parametrize("test_video,config", test_videos.items()) + def test_probe_video_from_file(self, test_video, config): """ Test the case when decoder probes a video file """ - for test_video, config in test_videos.items(): - full_path = os.path.join(VIDEO_DIR, test_video) - probe_result = torch.ops.video_reader.probe_video_from_file(full_path) - self.check_probe_result(probe_result, config) + full_path = os.path.join(VIDEO_DIR, test_video) + probe_result = torch.ops.video_reader.probe_video_from_file(full_path) + self.check_probe_result(probe_result, config) - def test_probe_video_from_memory(self): + @pytest.mark.parametrize("test_video,config", test_videos.items()) + def test_probe_video_from_memory(self, test_video, config): """ Test the case when decoder probes a video in memory """ - for test_video, config in test_videos.items(): - full_path, video_tensor = _get_video_tensor(VIDEO_DIR, test_video) - probe_result = torch.ops.video_reader.probe_video_from_memory(video_tensor) - self.check_probe_result(probe_result, config) + _, video_tensor = _get_video_tensor(VIDEO_DIR, test_video) + probe_result = torch.ops.video_reader.probe_video_from_memory(video_tensor) + self.check_probe_result(probe_result, config) + + @pytest.mark.parametrize("test_video,config", test_videos.items()) + def test_probe_video_from_memory_script(self, test_video, config): + scripted_fun = torch.jit.script(io._probe_video_from_memory) + assert scripted_fun is not None + _, video_tensor = _get_video_tensor(VIDEO_DIR, test_video) + probe_result = scripted_fun(video_tensor) + self.check_meta_result(probe_result, config) -if __name__ == '__main__': - unittest.main() + @pytest.mark.parametrize("test_video", test_videos.keys()) + def test_read_video_from_memory_scripted(self, test_video): + """ + Test the case when video is already in memory, and decoder reads data in memory + """ + # video related + width, height, min_dimension, max_dimension = 0, 0, 0, 0 + video_start_pts, video_end_pts = 0, -1 + video_timebase_num, video_timebase_den = 0, 1 + # audio related + samples, channels = 0, 0 + audio_start_pts, audio_end_pts = 0, -1 + audio_timebase_num, audio_timebase_den = 0, 1 + + scripted_fun = torch.jit.script(io._read_video_from_memory) + assert scripted_fun is not None + + _, video_tensor = _get_video_tensor(VIDEO_DIR, test_video) + + # decode all frames using cpp decoder + scripted_fun( + video_tensor, + SEEK_FRAME_MARGIN, + 1, # readVideoStream + width, + height, + min_dimension, + max_dimension, + [video_start_pts, video_end_pts], + video_timebase_num, + video_timebase_den, + 1, # readAudioStream + samples, + channels, + [audio_start_pts, audio_end_pts], + audio_timebase_num, + audio_timebase_den, + ) + # FUTURE: check value of video / audio frames + + def test_invalid_file(self): + set_video_backend("video_reader") + with pytest.raises(RuntimeError): + io.read_video("foo.mp4") + + set_video_backend("pyav") + with pytest.raises(RuntimeError): + io.read_video("foo.mp4") + + @pytest.mark.parametrize("test_video", test_videos.keys()) + @pytest.mark.parametrize("backend", ["video_reader", "pyav"]) + @pytest.mark.parametrize("start_offset", [0, 500]) + @pytest.mark.parametrize("end_offset", [3000, None]) + def test_audio_present_pts(self, test_video, backend, start_offset, end_offset): + """Test if audio frames are returned with pts unit.""" + full_path = os.path.join(VIDEO_DIR, test_video) + container = av.open(full_path) + if container.streams.audio: + set_video_backend(backend) + _, audio, _ = io.read_video(full_path, start_offset, end_offset, pts_unit="pts") + assert all([dimension > 0 for dimension in audio.shape[:2]]) + + @pytest.mark.parametrize("test_video", test_videos.keys()) + @pytest.mark.parametrize("backend", ["video_reader", "pyav"]) + @pytest.mark.parametrize("start_offset", [0, 0.1]) + @pytest.mark.parametrize("end_offset", [0.3, None]) + def test_audio_present_sec(self, test_video, backend, start_offset, end_offset): + """Test if audio frames are returned with sec unit.""" + full_path = os.path.join(VIDEO_DIR, test_video) + container = av.open(full_path) + if container.streams.audio: + set_video_backend(backend) + _, audio, _ = io.read_video(full_path, start_offset, end_offset, pts_unit="sec") + assert all([dimension > 0 for dimension in audio.shape[:2]]) + + +if __name__ == "__main__": + pytest.main([__file__]) diff --git a/test/test_videoapi.py b/test/test_videoapi.py new file mode 100644 index 00000000000..aabcf6407f7 --- /dev/null +++ b/test/test_videoapi.py @@ -0,0 +1,312 @@ +import collections +import os +import urllib + +import pytest +import torch +import torchvision +from pytest import approx +from torchvision.datasets.utils import download_url +from torchvision.io import _HAS_CPU_VIDEO_DECODER, VideoReader + + +# WARNING: these tests have been skipped forever on the CI because the video ops +# are never properly available. This is bad, but things have been in a terrible +# state for a long time already as we write this comment, and we'll hopefully be +# able to get rid of this all soon. + + +try: + import av + + # Do a version test too + torchvision.io.video._check_av_available() +except ImportError: + av = None + + +VIDEO_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), "assets", "videos") + +CheckerConfig = ["duration", "video_fps", "audio_sample_rate"] +GroundTruth = collections.namedtuple("GroundTruth", " ".join(CheckerConfig)) + + +def backends(): + backends_ = ["video_reader"] + if av is not None: + backends_.append("pyav") + return backends_ + + +def fate(name, path="."): + """Download and return a path to a sample from the FFmpeg test suite. + See the `FFmpeg Automated Test Environment `_ + """ + + file_name = name.split("/")[1] + download_url("http://fate.ffmpeg.org/fate-suite/" + name, path, file_name) + return os.path.join(path, file_name) + + +test_videos = { + "RATRACE_wave_f_nm_np1_fr_goo_37.avi": GroundTruth(duration=2.0, video_fps=30.0, audio_sample_rate=None), + "SchoolRulesHowTheyHelpUs_wave_f_nm_np1_ba_med_0.avi": GroundTruth( + duration=2.0, video_fps=30.0, audio_sample_rate=None + ), + "TrumanShow_wave_f_nm_np1_fr_med_26.avi": GroundTruth(duration=2.0, video_fps=30.0, audio_sample_rate=None), + "v_SoccerJuggling_g23_c01.avi": GroundTruth(duration=8.0, video_fps=29.97, audio_sample_rate=None), + "v_SoccerJuggling_g24_c01.avi": GroundTruth(duration=8.0, video_fps=29.97, audio_sample_rate=None), + "R6llTwEh07w.mp4": GroundTruth(duration=10.0, video_fps=30.0, audio_sample_rate=44100), + "SOX5yA1l24A.mp4": GroundTruth(duration=11.0, video_fps=29.97, audio_sample_rate=48000), + "WUzgd7C1pWA.mp4": GroundTruth(duration=11.0, video_fps=29.97, audio_sample_rate=48000), +} + + +@pytest.mark.skipif(_HAS_CPU_VIDEO_DECODER is False, reason="Didn't compile with ffmpeg") +class TestVideoApi: + @pytest.mark.skipif(av is None, reason="PyAV unavailable") + @pytest.mark.parametrize("test_video", test_videos.keys()) + @pytest.mark.parametrize("backend", backends()) + def test_frame_reading(self, test_video, backend): + torchvision.set_video_backend(backend) + full_path = os.path.join(VIDEO_DIR, test_video) + with av.open(full_path) as av_reader: + if av_reader.streams.video: + av_frames, vr_frames = [], [] + av_pts, vr_pts = [], [] + # get av frames + for av_frame in av_reader.decode(av_reader.streams.video[0]): + av_frames.append(torch.tensor(av_frame.to_rgb().to_ndarray()).permute(2, 0, 1)) + av_pts.append(av_frame.pts * av_frame.time_base) + + # get vr frames + video_reader = VideoReader(full_path, "video") + for vr_frame in video_reader: + vr_frames.append(vr_frame["data"]) + vr_pts.append(vr_frame["pts"]) + + # same number of frames + assert len(vr_frames) == len(av_frames) + assert len(vr_pts) == len(av_pts) + + # compare the frames and ptss + for i in range(len(vr_frames)): + assert float(av_pts[i]) == approx(vr_pts[i], abs=0.1) + + mean_delta = torch.mean(torch.abs(av_frames[i].float() - vr_frames[i].float())) + # on average the difference is very small and caused + # by decoding (around 1%) + # TODO: asses empirically how to set this? atm it's 1% + # averaged over all frames + assert mean_delta.item() < 2.55 + + del vr_frames, av_frames, vr_pts, av_pts + + # test audio reading compared to PYAV + with av.open(full_path) as av_reader: + if av_reader.streams.audio: + av_frames, vr_frames = [], [] + av_pts, vr_pts = [], [] + # get av frames + for av_frame in av_reader.decode(av_reader.streams.audio[0]): + av_frames.append(torch.tensor(av_frame.to_ndarray()).permute(1, 0)) + av_pts.append(av_frame.pts * av_frame.time_base) + av_reader.close() + + # get vr frames + video_reader = VideoReader(full_path, "audio") + for vr_frame in video_reader: + vr_frames.append(vr_frame["data"]) + vr_pts.append(vr_frame["pts"]) + + # same number of frames + assert len(vr_frames) == len(av_frames) + assert len(vr_pts) == len(av_pts) + + # compare the frames and ptss + for i in range(len(vr_frames)): + assert float(av_pts[i]) == approx(vr_pts[i], abs=0.1) + max_delta = torch.max(torch.abs(av_frames[i].float() - vr_frames[i].float())) + # we assure that there is never more than 1% difference in signal + assert max_delta.item() < 0.001 + + @pytest.mark.parametrize("stream", ["video", "audio"]) + @pytest.mark.parametrize("test_video", test_videos.keys()) + @pytest.mark.parametrize("backend", backends()) + def test_frame_reading_mem_vs_file(self, test_video, stream, backend): + torchvision.set_video_backend(backend) + full_path = os.path.join(VIDEO_DIR, test_video) + + reader = VideoReader(full_path) + reader_md = reader.get_metadata() + + if stream in reader_md: + # Test video reading from file vs from memory + vr_frames, vr_frames_mem = [], [] + vr_pts, vr_pts_mem = [], [] + # get vr frames + video_reader = VideoReader(full_path, stream) + for vr_frame in video_reader: + vr_frames.append(vr_frame["data"]) + vr_pts.append(vr_frame["pts"]) + + # get vr frames = read from memory + f = open(full_path, "rb") + fbytes = f.read() + f.close() + video_reader_from_mem = VideoReader(fbytes, stream) + + for vr_frame_from_mem in video_reader_from_mem: + vr_frames_mem.append(vr_frame_from_mem["data"]) + vr_pts_mem.append(vr_frame_from_mem["pts"]) + + # same number of frames + assert len(vr_frames) == len(vr_frames_mem) + assert len(vr_pts) == len(vr_pts_mem) + + # compare the frames and ptss + for i in range(len(vr_frames)): + assert vr_pts[i] == vr_pts_mem[i] + mean_delta = torch.mean(torch.abs(vr_frames[i].float() - vr_frames_mem[i].float())) + # on average the difference is very small and caused + # by decoding (around 1%) + # TODO: asses empirically how to set this? atm it's 1% + # averaged over all frames + assert mean_delta.item() < 2.55 + + del vr_frames, vr_pts, vr_frames_mem, vr_pts_mem + else: + del reader, reader_md + + @pytest.mark.parametrize("test_video,config", test_videos.items()) + @pytest.mark.parametrize("backend", backends()) + def test_metadata(self, test_video, config, backend): + """ + Test that the metadata returned via pyav corresponds to the one returned + by the new video decoder API + """ + torchvision.set_video_backend(backend) + full_path = os.path.join(VIDEO_DIR, test_video) + reader = VideoReader(full_path, "video") + reader_md = reader.get_metadata() + assert config.video_fps == approx(reader_md["video"]["fps"][0], abs=0.0001) + assert config.duration == approx(reader_md["video"]["duration"][0], abs=0.5) + + @pytest.mark.parametrize("test_video", test_videos.keys()) + @pytest.mark.parametrize("backend", backends()) + def test_seek_start(self, test_video, backend): + torchvision.set_video_backend(backend) + full_path = os.path.join(VIDEO_DIR, test_video) + video_reader = VideoReader(full_path, "video") + num_frames = 0 + for _ in video_reader: + num_frames += 1 + + # now seek the container to 0 and do it again + # It's often that starting seek can be inprecise + # this way and it doesn't start at 0 + video_reader.seek(0) + start_num_frames = 0 + for _ in video_reader: + start_num_frames += 1 + + assert start_num_frames == num_frames + + # now seek the container to < 0 to check for unexpected behaviour + video_reader.seek(-1) + start_num_frames = 0 + for _ in video_reader: + start_num_frames += 1 + + assert start_num_frames == num_frames + + @pytest.mark.parametrize("test_video", test_videos.keys()) + @pytest.mark.parametrize("backend", ["video_reader"]) + def test_accurateseek_middle(self, test_video, backend): + torchvision.set_video_backend(backend) + full_path = os.path.join(VIDEO_DIR, test_video) + stream = "video" + video_reader = VideoReader(full_path, stream) + md = video_reader.get_metadata() + duration = md[stream]["duration"][0] + if duration is not None: + num_frames = 0 + for _ in video_reader: + num_frames += 1 + + video_reader.seek(duration / 2) + middle_num_frames = 0 + for _ in video_reader: + middle_num_frames += 1 + + assert middle_num_frames < num_frames + assert middle_num_frames == approx(num_frames // 2, abs=1) + + video_reader.seek(duration / 2) + frame = next(video_reader) + lb = duration / 2 - 1 / md[stream]["fps"][0] + ub = duration / 2 + 1 / md[stream]["fps"][0] + assert (lb <= frame["pts"]) and (ub >= frame["pts"]) + + def test_fate_suite(self): + # TODO: remove the try-except statement once the connectivity issues are resolved + try: + video_path = fate("sub/MovText_capability_tester.mp4", VIDEO_DIR) + except (urllib.error.URLError, ConnectionError) as error: + pytest.skip(f"Skipping due to connectivity issues: {error}") + vr = VideoReader(video_path) + metadata = vr.get_metadata() + + assert metadata["subtitles"]["duration"] is not None + os.remove(video_path) + + @pytest.mark.skipif(av is None, reason="PyAV unavailable") + @pytest.mark.parametrize("test_video,config", test_videos.items()) + @pytest.mark.parametrize("backend", backends()) + def test_keyframe_reading(self, test_video, config, backend): + torchvision.set_video_backend(backend) + full_path = os.path.join(VIDEO_DIR, test_video) + + av_reader = av.open(full_path) + # reduce streams to only keyframes + av_stream = av_reader.streams.video[0] + av_stream.codec_context.skip_frame = "NONKEY" + + av_keyframes = [] + vr_keyframes = [] + if av_reader.streams.video: + + # get all keyframes using pyav. Then, seek randomly into video reader + # and assert that all the returned values are in AV_KEYFRAMES + + for av_frame in av_reader.decode(av_stream): + av_keyframes.append(float(av_frame.pts * av_frame.time_base)) + + if len(av_keyframes) > 1: + video_reader = VideoReader(full_path, "video") + for i in range(1, len(av_keyframes)): + seek_val = (av_keyframes[i] + av_keyframes[i - 1]) / 2 + data = next(video_reader.seek(seek_val, True)) + vr_keyframes.append(data["pts"]) + + data = next(video_reader.seek(config.duration, True)) + vr_keyframes.append(data["pts"]) + + assert len(av_keyframes) == len(vr_keyframes) + # NOTE: this video gets different keyframe with different + # loaders (0.333 pyav, 0.666 for us) + if test_video != "TrumanShow_wave_f_nm_np1_fr_med_26.avi": + for i in range(len(av_keyframes)): + assert av_keyframes[i] == approx(vr_keyframes[i], rel=0.001) + + def test_src(self): + with pytest.raises(ValueError, match="src cannot be empty"): + VideoReader(src="") + with pytest.raises(ValueError, match="src must be either string"): + VideoReader(src=2) + with pytest.raises(TypeError, match="unexpected keyword argument"): + VideoReader(path="path") + + +if __name__ == "__main__": + pytest.main([__file__]) diff --git a/torchvision/__init__.py b/torchvision/__init__.py index ca155712671..dd1e4ea6e94 100644 --- a/torchvision/__init__.py +++ b/torchvision/__init__.py @@ -1,20 +1,32 @@ +import os import warnings +from modulefinder import Module -from torchvision import models -from torchvision import datasets -from torchvision import ops -from torchvision import transforms -from torchvision import utils -from torchvision import io +import torch -from .extension import _HAS_OPS +# Don't re-order these, we need to load the _C extension (done when importing +# .extensions) before entering _meta_registrations. +from .extension import _HAS_OPS # usort:skip +from torchvision import _meta_registrations, datasets, io, models, ops, transforms, utils # usort:skip try: - from .version import __version__ # noqa: F401 + from .version import __version__ # @manual=fbcode//pytorch/vision:version except ImportError: pass -_image_backend = 'PIL' + +# Check if torchvision is being imported within the root folder +if not _HAS_OPS and os.path.dirname(os.path.realpath(__file__)) == os.path.join( + os.path.realpath(os.getcwd()), "torchvision" +): + message = ( + "You are importing torchvision within its own root folder ({}). " + "This is not expected to work and may give errors. Please exit the " + "torchvision project source and relaunch your python interpreter." + ) + warnings.warn(message.format(os.getcwd())) + +_image_backend = "PIL" _video_backend = "pyav" @@ -29,9 +41,8 @@ def set_image_backend(backend): generally faster than PIL, but does not support as many operations. """ global _image_backend - if backend not in ['PIL', 'accimage']: - raise ValueError("Invalid backend '{}'. Options are 'PIL' and 'accimage'" - .format(backend)) + if backend not in ["PIL", "accimage"]: + raise ValueError(f"Invalid backend '{backend}'. Options are 'PIL' and 'accimage'") _image_backend = backend @@ -49,26 +60,46 @@ def set_video_backend(backend): Args: backend (string): Name of the video backend. one of {'pyav', 'video_reader'}. The :mod:`pyav` package uses the 3rd party PyAv library. It is a Pythonic - binding for the FFmpeg libraries. - The :mod:`video_reader` package includes a native c++ implementation on - top of FFMPEG libraries, and a python API of TorchScript custom operator. - It is generally decoding faster than pyav, but perhaps is less robust. + binding for the FFmpeg libraries. + The :mod:`video_reader` package includes a native C++ implementation on + top of FFMPEG libraries, and a python API of TorchScript custom operator. + It generally decodes faster than :mod:`pyav`, but is perhaps less robust. + + .. note:: + Building with FFMPEG is disabled by default in the latest `main`. If you want to use the 'video_reader' + backend, please compile torchvision from source. """ global _video_backend - if backend not in ["pyav", "video_reader"]: - raise ValueError( - "Invalid video backend '%s'. Options are 'pyav' and 'video_reader'" % backend - ) - if backend == "video_reader" and not io._HAS_VIDEO_OPT: - warnings.warn("video_reader video backend is not available") + if backend not in ["pyav", "video_reader", "cuda"]: + raise ValueError("Invalid video backend '%s'. Options are 'pyav', 'video_reader' and 'cuda'" % backend) + if backend == "video_reader" and not io._HAS_CPU_VIDEO_DECODER: + # TODO: better messages + message = "video_reader video backend is not available. Please compile torchvision from source and try again" + raise RuntimeError(message) + elif backend == "cuda" and not io._HAS_GPU_VIDEO_DECODER: + # TODO: better messages + message = "cuda video backend is not available." + raise RuntimeError(message) else: _video_backend = backend def get_video_backend(): + """ + Returns the currently active video backend used to decode videos. + + Returns: + str: Name of the video backend. one of {'pyav', 'video_reader'}. + """ + return _video_backend def _is_tracing(): - import torch return torch._C._get_tracing_state() + + +def disable_beta_transforms_warning(): + # Noop, only exists to avoid breaking existing code. + # See https://github.com/pytorch/vision/issues/7896 + pass diff --git a/torchvision/_internally_replaced_utils.py b/torchvision/_internally_replaced_utils.py new file mode 100644 index 00000000000..e0fa72489f1 --- /dev/null +++ b/torchvision/_internally_replaced_utils.py @@ -0,0 +1,51 @@ +import importlib.machinery +import os + +from torch.hub import _get_torch_home + + +_HOME = os.path.join(_get_torch_home(), "datasets", "vision") +_USE_SHARDED_DATASETS = False +IN_FBCODE = False + + +def _download_file_from_remote_location(fpath: str, url: str) -> None: + pass + + +def _is_remote_location_available() -> bool: + return False + + +try: + from torch.hub import load_state_dict_from_url # noqa: 401 +except ImportError: + from torch.utils.model_zoo import load_url as load_state_dict_from_url # noqa: 401 + + +def _get_extension_path(lib_name): + + lib_dir = os.path.dirname(__file__) + if os.name == "nt": + # Register the main torchvision library location on the default DLL path + import ctypes + + kernel32 = ctypes.WinDLL("kernel32.dll", use_last_error=True) + with_load_library_flags = hasattr(kernel32, "AddDllDirectory") + prev_error_mode = kernel32.SetErrorMode(0x0001) + + if with_load_library_flags: + kernel32.AddDllDirectory.restype = ctypes.c_void_p + + os.add_dll_directory(lib_dir) + + kernel32.SetErrorMode(prev_error_mode) + + loader_details = (importlib.machinery.ExtensionFileLoader, importlib.machinery.EXTENSION_SUFFIXES) + + extfinder = importlib.machinery.FileFinder(lib_dir, loader_details) + ext_specs = extfinder.find_spec(lib_name) + if ext_specs is None: + raise ImportError + + return ext_specs.origin diff --git a/torchvision/_meta_registrations.py b/torchvision/_meta_registrations.py new file mode 100644 index 00000000000..f75bfb77a7f --- /dev/null +++ b/torchvision/_meta_registrations.py @@ -0,0 +1,225 @@ +import functools + +import torch +import torch._custom_ops +import torch.library + +# Ensure that torch.ops.torchvision is visible +import torchvision.extension # noqa: F401 + + +@functools.lru_cache(None) +def get_meta_lib(): + return torch.library.Library("torchvision", "IMPL", "Meta") + + +def register_meta(op_name, overload_name="default"): + def wrapper(fn): + if torchvision.extension._has_ops(): + get_meta_lib().impl(getattr(getattr(torch.ops.torchvision, op_name), overload_name), fn) + return fn + + return wrapper + + +@register_meta("roi_align") +def meta_roi_align(input, rois, spatial_scale, pooled_height, pooled_width, sampling_ratio, aligned): + torch._check(rois.size(1) == 5, lambda: "rois must have shape as Tensor[K, 5]") + torch._check( + input.dtype == rois.dtype, + lambda: ( + "Expected tensor for input to have the same type as tensor for rois; " + f"but type {input.dtype} does not equal {rois.dtype}" + ), + ) + num_rois = rois.size(0) + channels = input.size(1) + return input.new_empty((num_rois, channels, pooled_height, pooled_width)) + + +@register_meta("_roi_align_backward") +def meta_roi_align_backward( + grad, rois, spatial_scale, pooled_height, pooled_width, batch_size, channels, height, width, sampling_ratio, aligned +): + torch._check( + grad.dtype == rois.dtype, + lambda: ( + "Expected tensor for grad to have the same type as tensor for rois; " + f"but type {grad.dtype} does not equal {rois.dtype}" + ), + ) + return grad.new_empty((batch_size, channels, height, width)) + + +@register_meta("ps_roi_align") +def meta_ps_roi_align(input, rois, spatial_scale, pooled_height, pooled_width, sampling_ratio): + torch._check(rois.size(1) == 5, lambda: "rois must have shape as Tensor[K, 5]") + torch._check( + input.dtype == rois.dtype, + lambda: ( + "Expected tensor for input to have the same type as tensor for rois; " + f"but type {input.dtype} does not equal {rois.dtype}" + ), + ) + channels = input.size(1) + torch._check( + channels % (pooled_height * pooled_width) == 0, + "input channels must be a multiple of pooling height * pooling width", + ) + + num_rois = rois.size(0) + out_size = (num_rois, channels // (pooled_height * pooled_width), pooled_height, pooled_width) + return input.new_empty(out_size), torch.empty(out_size, dtype=torch.int32, device="meta") + + +@register_meta("_ps_roi_align_backward") +def meta_ps_roi_align_backward( + grad, + rois, + channel_mapping, + spatial_scale, + pooled_height, + pooled_width, + sampling_ratio, + batch_size, + channels, + height, + width, +): + torch._check( + grad.dtype == rois.dtype, + lambda: ( + "Expected tensor for grad to have the same type as tensor for rois; " + f"but type {grad.dtype} does not equal {rois.dtype}" + ), + ) + return grad.new_empty((batch_size, channels, height, width)) + + +@register_meta("roi_pool") +def meta_roi_pool(input, rois, spatial_scale, pooled_height, pooled_width): + torch._check(rois.size(1) == 5, lambda: "rois must have shape as Tensor[K, 5]") + torch._check( + input.dtype == rois.dtype, + lambda: ( + "Expected tensor for input to have the same type as tensor for rois; " + f"but type {input.dtype} does not equal {rois.dtype}" + ), + ) + num_rois = rois.size(0) + channels = input.size(1) + out_size = (num_rois, channels, pooled_height, pooled_width) + return input.new_empty(out_size), torch.empty(out_size, device="meta", dtype=torch.int32) + + +@register_meta("_roi_pool_backward") +def meta_roi_pool_backward( + grad, rois, argmax, spatial_scale, pooled_height, pooled_width, batch_size, channels, height, width +): + torch._check( + grad.dtype == rois.dtype, + lambda: ( + "Expected tensor for grad to have the same type as tensor for rois; " + f"but type {grad.dtype} does not equal {rois.dtype}" + ), + ) + return grad.new_empty((batch_size, channels, height, width)) + + +@register_meta("ps_roi_pool") +def meta_ps_roi_pool(input, rois, spatial_scale, pooled_height, pooled_width): + torch._check(rois.size(1) == 5, lambda: "rois must have shape as Tensor[K, 5]") + torch._check( + input.dtype == rois.dtype, + lambda: ( + "Expected tensor for input to have the same type as tensor for rois; " + f"but type {input.dtype} does not equal {rois.dtype}" + ), + ) + channels = input.size(1) + torch._check( + channels % (pooled_height * pooled_width) == 0, + "input channels must be a multiple of pooling height * pooling width", + ) + num_rois = rois.size(0) + out_size = (num_rois, channels // (pooled_height * pooled_width), pooled_height, pooled_width) + return input.new_empty(out_size), torch.empty(out_size, device="meta", dtype=torch.int32) + + +@register_meta("_ps_roi_pool_backward") +def meta_ps_roi_pool_backward( + grad, rois, channel_mapping, spatial_scale, pooled_height, pooled_width, batch_size, channels, height, width +): + torch._check( + grad.dtype == rois.dtype, + lambda: ( + "Expected tensor for grad to have the same type as tensor for rois; " + f"but type {grad.dtype} does not equal {rois.dtype}" + ), + ) + return grad.new_empty((batch_size, channels, height, width)) + + +@torch.library.register_fake("torchvision::nms") +def meta_nms(dets, scores, iou_threshold): + torch._check(dets.dim() == 2, lambda: f"boxes should be a 2d tensor, got {dets.dim()}D") + torch._check(dets.size(1) == 4, lambda: f"boxes should have 4 elements in dimension 1, got {dets.size(1)}") + torch._check(scores.dim() == 1, lambda: f"scores should be a 1d tensor, got {scores.dim()}") + torch._check( + dets.size(0) == scores.size(0), + lambda: f"boxes and scores should have same number of elements in dimension 0, got {dets.size(0)} and {scores.size(0)}", + ) + ctx = torch._custom_ops.get_ctx() + num_to_keep = ctx.create_unbacked_symint() + return dets.new_empty(num_to_keep, dtype=torch.long) + + +@register_meta("deform_conv2d") +def meta_deform_conv2d( + input, + weight, + offset, + mask, + bias, + stride_h, + stride_w, + pad_h, + pad_w, + dil_h, + dil_w, + n_weight_grps, + n_offset_grps, + use_mask, +): + + out_height, out_width = offset.shape[-2:] + out_channels = weight.shape[0] + batch_size = input.shape[0] + return input.new_empty((batch_size, out_channels, out_height, out_width)) + + +@register_meta("_deform_conv2d_backward") +def meta_deform_conv2d_backward( + grad, + input, + weight, + offset, + mask, + bias, + stride_h, + stride_w, + pad_h, + pad_w, + dilation_h, + dilation_w, + groups, + offset_groups, + use_mask, +): + + grad_input = input.new_empty(input.shape) + grad_weight = weight.new_empty(weight.shape) + grad_offset = offset.new_empty(offset.shape) + grad_mask = mask.new_empty(mask.shape) + grad_bias = bias.new_empty(bias.shape) + return grad_input, grad_weight, grad_offset, grad_mask, grad_bias diff --git a/torchvision/_utils.py b/torchvision/_utils.py new file mode 100644 index 00000000000..b739ef0966e --- /dev/null +++ b/torchvision/_utils.py @@ -0,0 +1,32 @@ +import enum +from typing import Sequence, Type, TypeVar + +T = TypeVar("T", bound=enum.Enum) + + +class StrEnumMeta(enum.EnumMeta): + auto = enum.auto + + def from_str(self: Type[T], member: str) -> T: # type: ignore[misc] + try: + return self[member] + except KeyError: + # TODO: use `add_suggestion` from torchvision.prototype.utils._internal to improve the error message as + # soon as it is migrated. + raise ValueError(f"Unknown value '{member}' for {self.__name__}.") from None + + +class StrEnum(enum.Enum, metaclass=StrEnumMeta): + pass + + +def sequence_to_str(seq: Sequence, separate_last: str = "") -> str: + if not seq: + return "" + if len(seq) == 1: + return f"'{seq[0]}'" + + head = "'" + "', '".join([str(item) for item in seq[:-1]]) + "'" + tail = f"{'' if separate_last and len(seq) == 2 else ','} {separate_last}'{seq[-1]}'" + + return head + tail diff --git a/torchvision/csrc/PSROIAlign.h b/torchvision/csrc/PSROIAlign.h deleted file mode 100644 index a5998df2891..00000000000 --- a/torchvision/csrc/PSROIAlign.h +++ /dev/null @@ -1,150 +0,0 @@ -#pragma once - -#include "cpu/vision_cpu.h" - -#ifdef WITH_CUDA -#include "cuda/vision_cuda.h" -#endif - -#include - -std::tuple PSROIAlign_forward( - const at::Tensor& input, - const at::Tensor& rois, - const float spatial_scale, - const int pooled_height, - const int pooled_width, - const int sampling_ratio) { - if (input.type().is_cuda()) { -#ifdef WITH_CUDA - return PSROIAlign_forward_cuda( - input, - rois, - spatial_scale, - pooled_height, - pooled_width, - sampling_ratio); -#else - AT_ERROR("Not compiled with GPU support"); -#endif - } - return PSROIAlign_forward_cpu( - input, rois, spatial_scale, pooled_height, pooled_width, sampling_ratio); -} - -at::Tensor PSROIAlign_backward( - const at::Tensor& grad, - const at::Tensor& rois, - const at::Tensor& mapping_channel, - const float spatial_scale, - const int pooled_height, - const int pooled_width, - const int sampling_ratio, - const int batch_size, - const int channels, - const int height, - const int width) { - if (grad.type().is_cuda()) { -#ifdef WITH_CUDA - return PSROIAlign_backward_cuda( - grad, - rois, - mapping_channel, - spatial_scale, - pooled_height, - pooled_width, - sampling_ratio, - batch_size, - channels, - height, - width); -#else - AT_ERROR("Not compiled with GPU support"); -#endif - } - return PSROIAlign_backward_cpu( - grad, - rois, - mapping_channel, - spatial_scale, - pooled_height, - pooled_width, - sampling_ratio, - batch_size, - channels, - height, - width); -} - -using namespace at; -using torch::Tensor; -using torch::autograd::AutogradContext; -using torch::autograd::Variable; -using torch::autograd::variable_list; - -class PSROIAlignFunction - : public torch::autograd::Function { - public: - static variable_list forward( - AutogradContext* ctx, - Variable input, - Variable rois, - const double spatial_scale, - const int64_t pooled_height, - const int64_t pooled_width, - const int64_t sampling_ratio) { - ctx->saved_data["spatial_scale"] = spatial_scale; - ctx->saved_data["pooled_height"] = pooled_height; - ctx->saved_data["pooled_width"] = pooled_width; - ctx->saved_data["sampling_ratio"] = sampling_ratio; - ctx->saved_data["input_shape"] = input.sizes(); - auto result = PSROIAlign_forward( - input, - rois, - spatial_scale, - pooled_height, - pooled_width, - sampling_ratio); - auto output = std::get<0>(result); - auto channel_mapping = std::get<1>(result); - ctx->save_for_backward({rois, channel_mapping}); - ctx->mark_non_differentiable({channel_mapping}); - return {output, channel_mapping}; - } - - static variable_list backward( - AutogradContext* ctx, - variable_list grad_output) { - // Use data saved in forward - auto saved = ctx->get_saved_variables(); - auto rois = saved[0]; - auto channel_mapping = saved[1]; - auto input_shape = ctx->saved_data["input_shape"].toIntList(); - auto grad_in = PSROIAlign_backward( - grad_output[0], - rois, - channel_mapping, - ctx->saved_data["spatial_scale"].toDouble(), - ctx->saved_data["pooled_height"].toInt(), - ctx->saved_data["pooled_width"].toInt(), - ctx->saved_data["sampling_ratio"].toInt(), - input_shape[0], - input_shape[1], - input_shape[2], - input_shape[3]); - return { - grad_in, Variable(), Variable(), Variable(), Variable(), Variable()}; - } -}; - -std::tuple ps_roi_align( - const Tensor& input, - const Tensor& rois, - const double spatial_scale, - const int64_t pooled_height, - const int64_t pooled_width, - const int64_t sampling_ratio) { - auto result = PSROIAlignFunction::apply( - input, rois, spatial_scale, pooled_height, pooled_width, sampling_ratio); - return std::tuple(result[0], result[1]); -} diff --git a/torchvision/csrc/PSROIPool.h b/torchvision/csrc/PSROIPool.h deleted file mode 100644 index c67ce92f54e..00000000000 --- a/torchvision/csrc/PSROIPool.h +++ /dev/null @@ -1,128 +0,0 @@ -#pragma once - -#include "cpu/vision_cpu.h" - -#ifdef WITH_CUDA -#include "cuda/vision_cuda.h" -#endif - -std::tuple PSROIPool_forward( - const at::Tensor& input, - const at::Tensor& rois, - const float spatial_scale, - const int pooled_height, - const int pooled_width) { - if (input.type().is_cuda()) { -#ifdef WITH_CUDA - return PSROIPool_forward_cuda( - input, rois, spatial_scale, pooled_height, pooled_width); -#else - AT_ERROR("Not compiled with GPU support"); -#endif - } - return PSROIPool_forward_cpu( - input, rois, spatial_scale, pooled_height, pooled_width); -} - -at::Tensor PSROIPool_backward( - const at::Tensor& grad, - const at::Tensor& rois, - const at::Tensor& mapping_channel, - const float spatial_scale, - const int pooled_height, - const int pooled_width, - const int batch_size, - const int channels, - const int height, - const int width) { - if (grad.type().is_cuda()) { -#ifdef WITH_CUDA - return PSROIPool_backward_cuda( - grad, - rois, - mapping_channel, - spatial_scale, - pooled_height, - pooled_width, - batch_size, - channels, - height, - width); -#else - AT_ERROR("Not compiled with GPU support"); -#endif - } - return PSROIPool_backward_cpu( - grad, - rois, - mapping_channel, - spatial_scale, - pooled_height, - pooled_width, - batch_size, - channels, - height, - width); -} - -using namespace at; -using torch::Tensor; -using torch::autograd::AutogradContext; -using torch::autograd::Variable; -using torch::autograd::variable_list; - -class PSROIPoolFunction : public torch::autograd::Function { - public: - static variable_list forward( - AutogradContext* ctx, - Variable input, - Variable rois, - const double spatial_scale, - const int64_t pooled_height, - const int64_t pooled_width) { - ctx->saved_data["spatial_scale"] = spatial_scale; - ctx->saved_data["pooled_height"] = pooled_height; - ctx->saved_data["pooled_width"] = pooled_width; - ctx->saved_data["input_shape"] = input.sizes(); - auto result = PSROIPool_forward( - input, rois, spatial_scale, pooled_height, pooled_width); - auto output = std::get<0>(result); - auto channel_mapping = std::get<1>(result); - ctx->save_for_backward({rois, channel_mapping}); - ctx->mark_non_differentiable({channel_mapping}); - return {output, channel_mapping}; - } - - static variable_list backward( - AutogradContext* ctx, - variable_list grad_output) { - // Use data saved in forward - auto saved = ctx->get_saved_variables(); - auto rois = saved[0]; - auto channel_mapping = saved[1]; - auto input_shape = ctx->saved_data["input_shape"].toIntList(); - auto grad_in = PSROIPool_backward( - grad_output[0], - rois, - channel_mapping, - ctx->saved_data["spatial_scale"].toDouble(), - ctx->saved_data["pooled_height"].toInt(), - ctx->saved_data["pooled_width"].toInt(), - input_shape[0], - input_shape[1], - input_shape[2], - input_shape[3]); - return {grad_in, Variable(), Variable(), Variable(), Variable()}; - } -}; - -std::tuple ps_roi_pool( - const Tensor& input, - const Tensor& rois, - const double spatial_scale, - const int64_t pooled_height, - const int64_t pooled_width) { - auto result = PSROIPoolFunction::apply( - input, rois, spatial_scale, pooled_height, pooled_width); - return std::tuple(result[0], result[1]); -} diff --git a/torchvision/csrc/ROIAlign.h b/torchvision/csrc/ROIAlign.h deleted file mode 100644 index 765d4879d99..00000000000 --- a/torchvision/csrc/ROIAlign.h +++ /dev/null @@ -1,147 +0,0 @@ -#pragma once - -#include "cpu/vision_cpu.h" - -#ifdef WITH_CUDA -#include "cuda/vision_cuda.h" -#endif - -// Interface for Python -at::Tensor ROIAlign_forward( - const at::Tensor& input, // Input feature map. - const at::Tensor& rois, // List of ROIs to pool over. - const double spatial_scale, // The scale of the image features. ROIs will be - // scaled to this. - const int64_t pooled_height, // The height of the pooled feature map. - const int64_t pooled_width, // The width of the pooled feature - const int64_t sampling_ratio) // The number of points to sample in each bin -// along each axis. -{ - if (input.type().is_cuda()) { -#ifdef WITH_CUDA - return ROIAlign_forward_cuda( - input, - rois, - spatial_scale, - pooled_height, - pooled_width, - sampling_ratio); -#else - AT_ERROR("Not compiled with GPU support"); -#endif - } - return ROIAlign_forward_cpu( - input, rois, spatial_scale, pooled_height, pooled_width, sampling_ratio); -} - -at::Tensor ROIAlign_backward( - const at::Tensor& grad, - const at::Tensor& rois, - const float spatial_scale, - const int pooled_height, - const int pooled_width, - const int batch_size, - const int channels, - const int height, - const int width, - const int sampling_ratio) { - if (grad.type().is_cuda()) { -#ifdef WITH_CUDA - return ROIAlign_backward_cuda( - grad, - rois, - spatial_scale, - pooled_height, - pooled_width, - batch_size, - channels, - height, - width, - sampling_ratio); -#else - AT_ERROR("Not compiled with GPU support"); -#endif - } - return ROIAlign_backward_cpu( - grad, - rois, - spatial_scale, - pooled_height, - pooled_width, - batch_size, - channels, - height, - width, - sampling_ratio); -} - -using namespace at; -using torch::Tensor; -using torch::autograd::AutogradContext; -using torch::autograd::Variable; -using torch::autograd::variable_list; - -class ROIAlignFunction : public torch::autograd::Function { - public: - static variable_list forward( - AutogradContext* ctx, - Variable input, - Variable rois, - const double spatial_scale, - const int64_t pooled_height, - const int64_t pooled_width, - const int64_t sampling_ratio) { - ctx->saved_data["spatial_scale"] = spatial_scale; - ctx->saved_data["pooled_height"] = pooled_height; - ctx->saved_data["pooled_width"] = pooled_width; - ctx->saved_data["sampling_ratio"] = sampling_ratio; - ctx->saved_data["input_shape"] = input.sizes(); - ctx->save_for_backward({rois}); - auto result = ROIAlign_forward( - input, - rois, - spatial_scale, - pooled_height, - pooled_width, - sampling_ratio); - return {result}; - } - - static variable_list backward( - AutogradContext* ctx, - variable_list grad_output) { - // Use data saved in forward - auto saved = ctx->get_saved_variables(); - auto rois = saved[0]; - auto input_shape = ctx->saved_data["input_shape"].toIntList(); - auto grad_in = ROIAlign_backward( - grad_output[0], - rois, - ctx->saved_data["spatial_scale"].toDouble(), - ctx->saved_data["pooled_height"].toInt(), - ctx->saved_data["pooled_width"].toInt(), - input_shape[0], - input_shape[1], - input_shape[2], - input_shape[3], - ctx->saved_data["sampling_ratio"].toInt()); - return { - grad_in, Variable(), Variable(), Variable(), Variable(), Variable()}; - } -}; - -Tensor roi_align( - const Tensor& input, - const Tensor& rois, - const double spatial_scale, - const int64_t pooled_height, - const int64_t pooled_width, - const int64_t sampling_ratio) { - return ROIAlignFunction::apply( - input, - rois, - spatial_scale, - pooled_height, - pooled_width, - sampling_ratio)[0]; -} diff --git a/torchvision/csrc/ROIPool.h b/torchvision/csrc/ROIPool.h deleted file mode 100644 index 79b40293176..00000000000 --- a/torchvision/csrc/ROIPool.h +++ /dev/null @@ -1,128 +0,0 @@ -#pragma once - -#include "cpu/vision_cpu.h" - -#ifdef WITH_CUDA -#include "cuda/vision_cuda.h" -#endif - -std::tuple ROIPool_forward( - const at::Tensor& input, - const at::Tensor& rois, - const double spatial_scale, - const int64_t pooled_height, - const int64_t pooled_width) { - if (input.type().is_cuda()) { -#ifdef WITH_CUDA - return ROIPool_forward_cuda( - input, rois, spatial_scale, pooled_height, pooled_width); -#else - AT_ERROR("Not compiled with GPU support"); -#endif - } - return ROIPool_forward_cpu( - input, rois, spatial_scale, pooled_height, pooled_width); -} - -at::Tensor ROIPool_backward( - const at::Tensor& grad, - const at::Tensor& rois, - const at::Tensor& argmax, - const float spatial_scale, - const int pooled_height, - const int pooled_width, - const int batch_size, - const int channels, - const int height, - const int width) { - if (grad.type().is_cuda()) { -#ifdef WITH_CUDA - return ROIPool_backward_cuda( - grad, - rois, - argmax, - spatial_scale, - pooled_height, - pooled_width, - batch_size, - channels, - height, - width); -#else - AT_ERROR("Not compiled with GPU support"); -#endif - } - return ROIPool_backward_cpu( - grad, - rois, - argmax, - spatial_scale, - pooled_height, - pooled_width, - batch_size, - channels, - height, - width); -} - -using namespace at; -using torch::Tensor; -using torch::autograd::AutogradContext; -using torch::autograd::Variable; -using torch::autograd::variable_list; - -class ROIPoolFunction : public torch::autograd::Function { - public: - static variable_list forward( - AutogradContext* ctx, - Variable input, - Variable rois, - const double spatial_scale, - const int64_t pooled_height, - const int64_t pooled_width) { - ctx->saved_data["spatial_scale"] = spatial_scale; - ctx->saved_data["pooled_height"] = pooled_height; - ctx->saved_data["pooled_width"] = pooled_width; - ctx->saved_data["input_shape"] = input.sizes(); - auto result = ROIPool_forward( - input, rois, spatial_scale, pooled_height, pooled_width); - auto output = std::get<0>(result); - auto argmax = std::get<1>(result); - ctx->save_for_backward({rois, argmax}); - ctx->mark_non_differentiable({argmax}); - return {output, argmax}; - } - - static variable_list backward( - AutogradContext* ctx, - variable_list grad_output) { - // Use data saved in forward - auto saved = ctx->get_saved_variables(); - auto rois = saved[0]; - auto argmax = saved[1]; - auto input_shape = ctx->saved_data["input_shape"].toIntList(); - auto grad_in = ROIPool_backward( - grad_output[0], - rois, - argmax, - ctx->saved_data["spatial_scale"].toDouble(), - ctx->saved_data["pooled_height"].toInt(), - ctx->saved_data["pooled_width"].toInt(), - input_shape[0], - input_shape[1], - input_shape[2], - input_shape[3]); - return {grad_in, Variable(), Variable(), Variable(), Variable()}; - } -}; - -std::tuple roi_pool( - const Tensor& input, - const Tensor& rois, - const double spatial_scale, - const int64_t pooled_height, - const int64_t pooled_width) { - auto result = ROIPoolFunction::apply( - input, rois, spatial_scale, pooled_height, pooled_width); - return std::tuple(result[0], result[1]); -} diff --git a/torchvision/csrc/cpu/video_reader/FfmpegAudioSampler.cpp b/torchvision/csrc/cpu/video_reader/FfmpegAudioSampler.cpp deleted file mode 100644 index 24aecacf946..00000000000 --- a/torchvision/csrc/cpu/video_reader/FfmpegAudioSampler.cpp +++ /dev/null @@ -1,118 +0,0 @@ -#include "FfmpegAudioSampler.h" -#include -#include "FfmpegUtil.h" - -using namespace std; - -FfmpegAudioSampler::FfmpegAudioSampler( - const AudioFormat& in, - const AudioFormat& out) - : inFormat_(in), outFormat_(out) {} - -FfmpegAudioSampler::~FfmpegAudioSampler() { - if (swrContext_) { - swr_free(&swrContext_); - } -} - -int FfmpegAudioSampler::init() { - swrContext_ = swr_alloc_set_opts( - nullptr, // we're allocating a new context - av_get_default_channel_layout(outFormat_.channels), // out_ch_layout - static_cast(outFormat_.format), // out_sample_fmt - outFormat_.samples, // out_sample_rate - av_get_default_channel_layout(inFormat_.channels), // in_ch_layout - static_cast(inFormat_.format), // in_sample_fmt - inFormat_.samples, // in_sample_rate - 0, // log_offset - nullptr); // log_ctx - if (swrContext_ == nullptr) { - LOG(ERROR) << "swr_alloc_set_opts fails"; - return -1; - } - int result = 0; - if ((result = swr_init(swrContext_)) < 0) { - LOG(ERROR) << "swr_init failed, err: " << ffmpeg_util::getErrorDesc(result) - << ", in -> format: " << inFormat_.format - << ", channels: " << inFormat_.channels - << ", samples: " << inFormat_.samples - << ", out -> format: " << outFormat_.format - << ", channels: " << outFormat_.channels - << ", samples: " << outFormat_.samples; - return -1; - } - return 0; -} - -int64_t FfmpegAudioSampler::getSampleBytes(const AVFrame* frame) const { - auto outSamples = getOutNumSamples(frame->nb_samples); - - return av_samples_get_buffer_size( - nullptr, - outFormat_.channels, - outSamples, - static_cast(outFormat_.format), - 1); -} - -// https://www.ffmpeg.org/doxygen/3.2/group__lswr.html -unique_ptr FfmpegAudioSampler::sample(const AVFrame* frame) { - if (!frame) { - return nullptr; // no flush for videos - } - - auto inNumSamples = frame->nb_samples; - auto outNumSamples = getOutNumSamples(frame->nb_samples); - - auto outSampleSize = getSampleBytes(frame); - AvDataPtr frameData(static_cast(av_malloc(outSampleSize))); - - uint8_t* outPlanes[AVRESAMPLE_MAX_CHANNELS]; - int result = 0; - if ((result = av_samples_fill_arrays( - outPlanes, - nullptr, // linesize is not needed - frameData.get(), - outFormat_.channels, - outNumSamples, - static_cast(outFormat_.format), - 1)) < 0) { - LOG(ERROR) << "av_samples_fill_arrays failed, err: " - << ffmpeg_util::getErrorDesc(result) - << ", outNumSamples: " << outNumSamples - << ", format: " << outFormat_.format; - return nullptr; - } - - if ((result = swr_convert( - swrContext_, - &outPlanes[0], - outNumSamples, - (const uint8_t**)&frame->data[0], - inNumSamples)) < 0) { - LOG(ERROR) << "swr_convert faield, err: " - << ffmpeg_util::getErrorDesc(result); - return nullptr; - } - // result returned by swr_convert is the No. of actual output samples. - // So update the buffer size using av_samples_get_buffer_size - result = av_samples_get_buffer_size( - nullptr, - outFormat_.channels, - result, - static_cast(outFormat_.format), - 1); - - return make_unique(std::move(frameData), result, 0); -} -/* -Because of decoding delay, the returned value is an upper bound of No. of -output samples -*/ -int64_t FfmpegAudioSampler::getOutNumSamples(int inNumSamples) const { - return av_rescale_rnd( - swr_get_delay(swrContext_, inFormat_.samples) + inNumSamples, - outFormat_.samples, - inFormat_.samples, - AV_ROUND_UP); -} diff --git a/torchvision/csrc/cpu/video_reader/FfmpegAudioSampler.h b/torchvision/csrc/cpu/video_reader/FfmpegAudioSampler.h deleted file mode 100644 index 767a5ca6e4f..00000000000 --- a/torchvision/csrc/cpu/video_reader/FfmpegAudioSampler.h +++ /dev/null @@ -1,32 +0,0 @@ -#pragma once - -#include "FfmpegSampler.h" - -#define AVRESAMPLE_MAX_CHANNELS 32 - -/** - * Class transcode audio frames from one format into another - */ -class FfmpegAudioSampler : public FfmpegSampler { - public: - explicit FfmpegAudioSampler(const AudioFormat& in, const AudioFormat& out); - ~FfmpegAudioSampler() override; - - int init() override; - - int64_t getSampleBytes(const AVFrame* frame) const; - // FfmpegSampler overrides - // returns number of bytes of the sampled data - std::unique_ptr sample(const AVFrame* frame) override; - - const AudioFormat& getInFormat() const { - return inFormat_; - } - - private: - int64_t getOutNumSamples(int inNumSamples) const; - - AudioFormat inFormat_; - AudioFormat outFormat_; - SwrContext* swrContext_{nullptr}; -}; diff --git a/torchvision/csrc/cpu/video_reader/FfmpegAudioStream.cpp b/torchvision/csrc/cpu/video_reader/FfmpegAudioStream.cpp deleted file mode 100644 index b5b1e2fbda5..00000000000 --- a/torchvision/csrc/cpu/video_reader/FfmpegAudioStream.cpp +++ /dev/null @@ -1,103 +0,0 @@ -#include "FfmpegAudioStream.h" -#include "FfmpegUtil.h" - -using namespace std; - -namespace { - -bool operator==(const AudioFormat& x, const AVCodecContext& y) { - return x.samples == y.sample_rate && x.channels == y.channels && - x.format == y.sample_fmt; -} - -AudioFormat& toAudioFormat( - AudioFormat& audioFormat, - const AVCodecContext& codecCtx) { - audioFormat.samples = codecCtx.sample_rate; - audioFormat.channels = codecCtx.channels; - audioFormat.format = codecCtx.sample_fmt; - - return audioFormat; -} - -} // namespace - -FfmpegAudioStream::FfmpegAudioStream( - AVFormatContext* inputCtx, - int index, - enum AVMediaType avMediaType, - MediaFormat mediaFormat, - double seekFrameMargin) - : FfmpegStream(inputCtx, index, avMediaType, seekFrameMargin), - mediaFormat_(mediaFormat) {} - -FfmpegAudioStream::~FfmpegAudioStream() {} - -void FfmpegAudioStream::checkStreamDecodeParams() { - auto timeBase = getTimeBase(); - if (timeBase.first > 0) { - CHECK_EQ(timeBase.first, inputCtx_->streams[index_]->time_base.num); - CHECK_EQ(timeBase.second, inputCtx_->streams[index_]->time_base.den); - } -} - -void FfmpegAudioStream::updateStreamDecodeParams() { - auto timeBase = getTimeBase(); - if (timeBase.first == 0) { - mediaFormat_.format.audio.timeBaseNum = - inputCtx_->streams[index_]->time_base.num; - mediaFormat_.format.audio.timeBaseDen = - inputCtx_->streams[index_]->time_base.den; - } - mediaFormat_.format.audio.duration = inputCtx_->streams[index_]->duration; -} - -int FfmpegAudioStream::initFormat() { - AudioFormat& format = mediaFormat_.format.audio; - - if (format.samples == 0) { - format.samples = codecCtx_->sample_rate; - } - if (format.channels == 0) { - format.channels = codecCtx_->channels; - } - if (format.format == AV_SAMPLE_FMT_NONE) { - format.format = codecCtx_->sample_fmt; - VLOG(2) << "set stream format sample_fmt: " << format.format; - } - - checkStreamDecodeParams(); - - updateStreamDecodeParams(); - - if (format.samples > 0 && format.channels > 0 && - format.format != AV_SAMPLE_FMT_NONE) { - return 0; - } else { - return -1; - } -} - -unique_ptr FfmpegAudioStream::sampleFrameData() { - AudioFormat& audioFormat = mediaFormat_.format.audio; - - if (!sampler_ || !(sampler_->getInFormat() == *codecCtx_)) { - AudioFormat newInFormat; - newInFormat = toAudioFormat(newInFormat, *codecCtx_); - sampler_ = make_unique(newInFormat, audioFormat); - VLOG(1) << "Set sampler input audio format" - << ", samples: " << newInFormat.samples - << ", channels: " << newInFormat.channels - << ", format: " << newInFormat.format - << " : output audio sampler format" - << ", samples: " << audioFormat.samples - << ", channels: " << audioFormat.channels - << ", format: " << audioFormat.format; - int ret = sampler_->init(); - if (ret < 0) { - VLOG(1) << "Fail to initialize audio sampler"; - return nullptr; - } - } - return sampler_->sample(frame_); -} diff --git a/torchvision/csrc/cpu/video_reader/FfmpegAudioStream.h b/torchvision/csrc/cpu/video_reader/FfmpegAudioStream.h deleted file mode 100644 index 1d4f7a2f2ee..00000000000 --- a/torchvision/csrc/cpu/video_reader/FfmpegAudioStream.h +++ /dev/null @@ -1,54 +0,0 @@ -#pragma once - -#include -#include "FfmpegAudioSampler.h" -#include "FfmpegStream.h" - -/** - * Class uses FFMPEG library to decode one video stream. - */ -class FfmpegAudioStream : public FfmpegStream { - public: - explicit FfmpegAudioStream( - AVFormatContext* inputCtx, - int index, - enum AVMediaType avMediaType, - MediaFormat mediaFormat, - double seekFrameMargin); - - ~FfmpegAudioStream() override; - - // FfmpegStream overrides - MediaType getMediaType() const override { - return MediaType::TYPE_AUDIO; - } - - FormatUnion getMediaFormat() const override { - return mediaFormat_.format; - } - - int64_t getStartPts() const override { - return mediaFormat_.format.audio.startPts; - } - int64_t getEndPts() const override { - return mediaFormat_.format.audio.endPts; - } - // return numerator and denominator of time base - std::pair getTimeBase() const { - return std::make_pair( - mediaFormat_.format.audio.timeBaseNum, - mediaFormat_.format.audio.timeBaseDen); - } - - void checkStreamDecodeParams(); - - void updateStreamDecodeParams(); - - protected: - int initFormat() override; - std::unique_ptr sampleFrameData() override; - - private: - MediaFormat mediaFormat_; - std::unique_ptr sampler_{nullptr}; -}; diff --git a/torchvision/csrc/cpu/video_reader/FfmpegDecoder.cpp b/torchvision/csrc/cpu/video_reader/FfmpegDecoder.cpp deleted file mode 100644 index fb4d302cc03..00000000000 --- a/torchvision/csrc/cpu/video_reader/FfmpegDecoder.cpp +++ /dev/null @@ -1,412 +0,0 @@ -#include "FfmpegDecoder.h" -#include "FfmpegAudioStream.h" -#include "FfmpegUtil.h" -#include "FfmpegVideoStream.h" - -using namespace std; - -static AVPacket avPkt; - -namespace { - -unique_ptr createFfmpegStream( - MediaType type, - AVFormatContext* ctx, - int idx, - MediaFormat& mediaFormat, - double seekFrameMargin) { - enum AVMediaType avType; - CHECK(ffmpeg_util::mapMediaType(type, &avType)); - switch (type) { - case MediaType::TYPE_VIDEO: - return make_unique( - ctx, idx, avType, mediaFormat, seekFrameMargin); - case MediaType::TYPE_AUDIO: - return make_unique( - ctx, idx, avType, mediaFormat, seekFrameMargin); - default: - return nullptr; - } -} - -} // namespace - -FfmpegAvioContext::FfmpegAvioContext() - : workBuffersize_(VIO_BUFFER_SZ), - workBuffer_((uint8_t*)av_malloc(workBuffersize_)), - inputFile_(nullptr), - inputBuffer_(nullptr), - inputBufferSize_(0) {} - -int FfmpegAvioContext::initAVIOContext(const uint8_t* buffer, int64_t size) { - inputBuffer_ = buffer; - inputBufferSize_ = size; - avioCtx_ = avio_alloc_context( - workBuffer_, - workBuffersize_, - 0, - reinterpret_cast(this), - &FfmpegAvioContext::readMemory, - nullptr, // no write function - &FfmpegAvioContext::seekMemory); - return 0; -} - -FfmpegAvioContext::~FfmpegAvioContext() { - /* note: the internal buffer could have changed, and be != workBuffer_ */ - if (avioCtx_) { - av_freep(&avioCtx_->buffer); - av_freep(&avioCtx_); - } else { - av_freep(&workBuffer_); - } - if (inputFile_) { - fclose(inputFile_); - } -} - -int FfmpegAvioContext::read(uint8_t* buf, int buf_size) { - if (inputBuffer_) { - return readMemory(this, buf, buf_size); - } else { - return -1; - } -} - -int FfmpegAvioContext::readMemory(void* opaque, uint8_t* buf, int buf_size) { - FfmpegAvioContext* h = static_cast(opaque); - if (buf_size < 0) { - return -1; - } - - int reminder = h->inputBufferSize_ - h->offset_; - int r = buf_size < reminder ? buf_size : reminder; - if (r < 0) { - return AVERROR_EOF; - } - - memcpy(buf, h->inputBuffer_ + h->offset_, r); - h->offset_ += r; - return r; -} - -int64_t FfmpegAvioContext::seek(int64_t offset, int whence) { - if (inputBuffer_) { - return seekMemory(this, offset, whence); - } else { - return -1; - } -} - -int64_t FfmpegAvioContext::seekMemory( - void* opaque, - int64_t offset, - int whence) { - FfmpegAvioContext* h = static_cast(opaque); - switch (whence) { - case SEEK_CUR: // from current position - h->offset_ += offset; - break; - case SEEK_END: // from eof - h->offset_ = h->inputBufferSize_ + offset; - break; - case SEEK_SET: // from beginning of file - h->offset_ = offset; - break; - case AVSEEK_SIZE: - return h->inputBufferSize_; - } - return h->offset_; -} - -int FfmpegDecoder::init( - const std::string& filename, - bool isDecodeFile, - FfmpegAvioContext& ioctx, - DecoderOutput& decoderOutput) { - cleanUp(); - - int ret = 0; - if (!isDecodeFile) { - formatCtx_ = avformat_alloc_context(); - if (!formatCtx_) { - LOG(ERROR) << "avformat_alloc_context failed"; - return -1; - } - formatCtx_->pb = ioctx.get_avio(); - formatCtx_->flags |= AVFMT_FLAG_CUSTOM_IO; - - // Determining the input format: - int probeSz = AVPROBE_SIZE + AVPROBE_PADDING_SIZE; - uint8_t* probe((uint8_t*)av_malloc(probeSz)); - memset(probe, 0, probeSz); - int len = ioctx.read(probe, probeSz - AVPROBE_PADDING_SIZE); - if (len < probeSz - AVPROBE_PADDING_SIZE) { - LOG(ERROR) << "Insufficient data to determine video format"; - av_freep(&probe); - return -1; - } - // seek back to start of stream - ioctx.seek(0, SEEK_SET); - - unique_ptr probeData(new AVProbeData()); - probeData->buf = probe; - probeData->buf_size = len; - probeData->filename = ""; - // Determine the input-format: - formatCtx_->iformat = av_probe_input_format(probeData.get(), 1); - // this is to avoid the double-free error - if (formatCtx_->iformat == nullptr) { - LOG(ERROR) << "av_probe_input_format fails"; - return -1; - } - VLOG(1) << "av_probe_input_format succeeds"; - av_freep(&probe); - - ret = avformat_open_input(&formatCtx_, "", nullptr, nullptr); - } else { - ret = avformat_open_input(&formatCtx_, filename.c_str(), nullptr, nullptr); - } - - if (ret < 0) { - LOG(ERROR) << "avformat_open_input failed, error: " - << ffmpeg_util::getErrorDesc(ret); - cleanUp(); - return ret; - } - ret = avformat_find_stream_info(formatCtx_, nullptr); - if (ret < 0) { - LOG(ERROR) << "avformat_find_stream_info failed, error: " - << ffmpeg_util::getErrorDesc(ret); - cleanUp(); - return ret; - } - if (!initStreams()) { - LOG(ERROR) << "Cannot activate streams"; - cleanUp(); - return -1; - } - - for (auto& stream : streams_) { - MediaType mediaType = stream.second->getMediaType(); - decoderOutput.initMediaType(mediaType, stream.second->getMediaFormat()); - } - VLOG(1) << "FfmpegDecoder initialized"; - return 0; -} - -int FfmpegDecoder::decodeFile( - unique_ptr params, - const string& fileName, - DecoderOutput& decoderOutput) { - VLOG(1) << "decode file: " << fileName; - FfmpegAvioContext ioctx; - int ret = decodeLoop(std::move(params), fileName, true, ioctx, decoderOutput); - return ret; -} - -int FfmpegDecoder::decodeMemory( - unique_ptr params, - const uint8_t* buffer, - int64_t size, - DecoderOutput& decoderOutput) { - VLOG(1) << "decode video data in memory"; - FfmpegAvioContext ioctx; - int ret = ioctx.initAVIOContext(buffer, size); - if (ret == 0) { - ret = - decodeLoop(std::move(params), string(""), false, ioctx, decoderOutput); - } - return ret; -} - -int FfmpegDecoder::probeFile( - unique_ptr params, - const string& fileName, - DecoderOutput& decoderOutput) { - VLOG(1) << "probe file: " << fileName; - FfmpegAvioContext ioctx; - return probeVideo(std::move(params), fileName, true, ioctx, decoderOutput); -} - -int FfmpegDecoder::probeMemory( - unique_ptr params, - const uint8_t* buffer, - int64_t size, - DecoderOutput& decoderOutput) { - VLOG(1) << "probe video data in memory"; - FfmpegAvioContext ioctx; - int ret = ioctx.initAVIOContext(buffer, size); - if (ret == 0) { - ret = - probeVideo(std::move(params), string(""), false, ioctx, decoderOutput); - } - return ret; -} - -void FfmpegDecoder::cleanUp() { - if (formatCtx_) { - for (auto& stream : streams_) { - // Drain stream buffers. - DecoderOutput decoderOutput; - stream.second->flush(1, decoderOutput); - stream.second.reset(); - } - streams_.clear(); - avformat_close_input(&formatCtx_); - } -} - -FfmpegStream* FfmpegDecoder::findStreamByIndex(int streamIndex) const { - auto it = streams_.find(streamIndex); - return it != streams_.end() ? it->second.get() : nullptr; -} - -/* -Reference implementation: -https://ffmpeg.org/doxygen/3.4/demuxing_decoding_8c-example.html -*/ -int FfmpegDecoder::decodeLoop( - unique_ptr params, - const std::string& filename, - bool isDecodeFile, - FfmpegAvioContext& ioctx, - DecoderOutput& decoderOutput) { - params_ = std::move(params); - - int ret = init(filename, isDecodeFile, ioctx, decoderOutput); - if (ret < 0) { - return ret; - } - // init package - av_init_packet(&avPkt); - avPkt.data = nullptr; - avPkt.size = 0; - - int result = 0; - bool ptsInRange = true; - while (ptsInRange) { - result = av_read_frame(formatCtx_, &avPkt); - if (result == AVERROR(EAGAIN)) { - VLOG(1) << "Decoder is busy"; - ret = 0; - break; - } else if (result == AVERROR_EOF) { - VLOG(1) << "Stream decoding is completed"; - ret = 0; - break; - } else if (result < 0) { - VLOG(1) << "av_read_frame fails. Break decoder loop. Error: " - << ffmpeg_util::getErrorDesc(result); - ret = result; - break; - } - - ret = 0; - auto stream = findStreamByIndex(avPkt.stream_index); - if (stream == nullptr) { - // the packet is from a stream the caller is not interested. Ignore it - VLOG(2) << "avPkt ignored. stream index: " << avPkt.stream_index; - // Need to free the memory of AVPacket. Otherwise, memory leak happens - av_packet_unref(&avPkt); - continue; - } - - do { - result = stream->sendPacket(&avPkt); - if (result == AVERROR(EAGAIN)) { - VLOG(2) << "avcodec_send_packet returns AVERROR(EAGAIN)"; - // start to recevie available frames from internal buffer - stream->receiveAvailFrames(params_->getPtsOnly, decoderOutput); - if (isPtsExceedRange()) { - // exit the most-outer while loop - VLOG(1) << "In all streams, exceed the end pts. Exit decoding loop"; - ret = 0; - ptsInRange = false; - break; - } - } else if (result < 0) { - LOG(WARNING) << "avcodec_send_packet failed. Error: " - << ffmpeg_util::getErrorDesc(result); - ret = result; - break; - } else { - VLOG(2) << "avcodec_send_packet succeeds"; - // succeed. Read the next AVPacket and send out it - break; - } - } while (ptsInRange); - // Need to free the memory of AVPacket. Otherwise, memory leak happens - av_packet_unref(&avPkt); - } - /* flush cached frames */ - flushStreams(decoderOutput); - return ret; -} - -int FfmpegDecoder::probeVideo( - unique_ptr params, - const std::string& filename, - bool isDecodeFile, - FfmpegAvioContext& ioctx, - DecoderOutput& decoderOutput) { - params_ = std::move(params); - return init(filename, isDecodeFile, ioctx, decoderOutput); -} - -bool FfmpegDecoder::initStreams() { - for (auto it = params_->formats.begin(); it != params_->formats.end(); ++it) { - AVMediaType mediaType; - if (!ffmpeg_util::mapMediaType(it->first, &mediaType)) { - LOG(ERROR) << "Unknown media type: " << it->first; - return false; - } - int streamIdx = - av_find_best_stream(formatCtx_, mediaType, -1, -1, nullptr, 0); - - if (streamIdx >= 0) { - VLOG(2) << "find stream index: " << streamIdx; - auto stream = createFfmpegStream( - it->first, - formatCtx_, - streamIdx, - it->second, - params_->seekFrameMargin); - - CHECK(stream); - if (stream->openCodecContext() < 0) { - LOG(ERROR) << "Cannot open codec. Stream index: " << streamIdx; - return false; - } - streams_.emplace(streamIdx, move(stream)); - } else { - VLOG(1) << "Cannot open find stream of type " << it->first; - } - } - // Seek frames in each stream - int ret = 0; - for (auto& stream : streams_) { - auto startPts = stream.second->getStartPts(); - VLOG(1) << "stream: " << stream.first << " startPts: " << startPts; - if (startPts > 0 && (ret = stream.second->seekFrame(startPts)) < 0) { - LOG(WARNING) << "seekFrame in stream fails"; - return false; - } - } - VLOG(1) << "initStreams succeeds"; - return true; -} - -bool FfmpegDecoder::isPtsExceedRange() { - bool exceed = true; - for (auto& stream : streams_) { - exceed = exceed && stream.second->isFramePtsExceedRange(); - } - return exceed; -} - -void FfmpegDecoder::flushStreams(DecoderOutput& decoderOutput) { - for (auto& stream : streams_) { - stream.second->flush(params_->getPtsOnly, decoderOutput); - } -} diff --git a/torchvision/csrc/cpu/video_reader/FfmpegDecoder.h b/torchvision/csrc/cpu/video_reader/FfmpegDecoder.h deleted file mode 100644 index a0a564a4214..00000000000 --- a/torchvision/csrc/cpu/video_reader/FfmpegDecoder.h +++ /dev/null @@ -1,127 +0,0 @@ -#pragma once - -#include -#include - -#include "FfmpegHeaders.h" -#include "FfmpegStream.h" -#include "Interface.h" - -#define VIO_BUFFER_SZ 81920 -#define AVPROBE_SIZE 8192 - -class DecoderParameters { - public: - std::unordered_map formats; - // av_seek_frame is imprecise so seek to a timestamp earlier by a margin - // The unit of margin is second - double seekFrameMargin{1.0}; - // When getPtsOnly is set to 1, we only get pts of each frame and don not - // output frame data. It will be much faster - int64_t getPtsOnly{0}; -}; - -class FfmpegAvioContext { - public: - FfmpegAvioContext(); - - int initAVIOContext(const uint8_t* buffer, int64_t size); - - ~FfmpegAvioContext(); - - int read(uint8_t* buf, int buf_size); - - static int readMemory(void* opaque, uint8_t* buf, int buf_size); - - int64_t seek(int64_t offset, int whence); - - static int64_t seekMemory(void* opaque, int64_t offset, int whence); - - AVIOContext* get_avio() { - return avioCtx_; - } - - private: - int workBuffersize_; - uint8_t* workBuffer_; - // for file mode - FILE* inputFile_; - // for memory mode - const uint8_t* inputBuffer_; - int inputBufferSize_; - int offset_ = 0; - - AVIOContext* avioCtx_{nullptr}; -}; - -class FfmpegDecoder { - public: - FfmpegDecoder() { - av_register_all(); - } - ~FfmpegDecoder() { - cleanUp(); - } - // return 0 on success - // return negative number on failure - int decodeFile( - std::unique_ptr params, - const std::string& filename, - DecoderOutput& decoderOutput); - // return 0 on success - // return negative number on failure - int decodeMemory( - std::unique_ptr params, - const uint8_t* buffer, - int64_t size, - DecoderOutput& decoderOutput); - // return 0 on success - // return negative number on failure - int probeFile( - std::unique_ptr params, - const std::string& filename, - DecoderOutput& decoderOutput); - // return 0 on success - // return negative number on failure - int probeMemory( - std::unique_ptr params, - const uint8_t* buffer, - int64_t size, - DecoderOutput& decoderOutput); - - void cleanUp(); - - private: - FfmpegStream* findStreamByIndex(int streamIndex) const; - - int init( - const std::string& filename, - bool isDecodeFile, - FfmpegAvioContext& ioctx, - DecoderOutput& decoderOutput); - // return 0 on success - // return negative number on failure - int decodeLoop( - std::unique_ptr params, - const std::string& filename, - bool isDecodeFile, - FfmpegAvioContext& ioctx, - DecoderOutput& decoderOutput); - - int probeVideo( - std::unique_ptr params, - const std::string& filename, - bool isDecodeFile, - FfmpegAvioContext& ioctx, - DecoderOutput& decoderOutput); - - bool initStreams(); - - void flushStreams(DecoderOutput& decoderOutput); - // whether in all streams, the pts of most recent frame exceeds range - bool isPtsExceedRange(); - - std::unordered_map> streams_; - AVFormatContext* formatCtx_{nullptr}; - std::unique_ptr params_{nullptr}; -}; diff --git a/torchvision/csrc/cpu/video_reader/FfmpegHeaders.h b/torchvision/csrc/cpu/video_reader/FfmpegHeaders.h deleted file mode 100644 index ff26aa30a8d..00000000000 --- a/torchvision/csrc/cpu/video_reader/FfmpegHeaders.h +++ /dev/null @@ -1,13 +0,0 @@ -#pragma once - -extern "C" { -#include -#include -#include -#include -#include -#include -#include -#include -#include -} diff --git a/torchvision/csrc/cpu/video_reader/FfmpegSampler.h b/torchvision/csrc/cpu/video_reader/FfmpegSampler.h deleted file mode 100644 index 3d00be3486f..00000000000 --- a/torchvision/csrc/cpu/video_reader/FfmpegSampler.h +++ /dev/null @@ -1,16 +0,0 @@ -#pragma once - -#include "FfmpegHeaders.h" -#include "Interface.h" - -/** - * Class sample data from AVFrame - */ -class FfmpegSampler { - public: - virtual ~FfmpegSampler() = default; - // return 0 on success and negative number on failure - virtual int init() = 0; - // sample from the given frame - virtual std::unique_ptr sample(const AVFrame* frame) = 0; -}; diff --git a/torchvision/csrc/cpu/video_reader/FfmpegStream.cpp b/torchvision/csrc/cpu/video_reader/FfmpegStream.cpp deleted file mode 100644 index b745170baf4..00000000000 --- a/torchvision/csrc/cpu/video_reader/FfmpegStream.cpp +++ /dev/null @@ -1,188 +0,0 @@ -#include "FfmpegStream.h" -#include "FfmpegUtil.h" - -using namespace std; - -// (TODO) Currently, disable the use of refCount -static int refCount = 0; - -FfmpegStream::FfmpegStream( - AVFormatContext* inputCtx, - int index, - enum AVMediaType avMediaType, - double seekFrameMargin) - : inputCtx_(inputCtx), - index_(index), - avMediaType_(avMediaType), - seekFrameMargin_(seekFrameMargin) {} - -FfmpegStream::~FfmpegStream() { - if (frame_) { - av_frame_free(&frame_); - } - avcodec_free_context(&codecCtx_); -} - -int FfmpegStream::openCodecContext() { - VLOG(2) << "stream start_time: " << inputCtx_->streams[index_]->start_time; - - auto typeString = av_get_media_type_string(avMediaType_); - AVStream* st = inputCtx_->streams[index_]; - auto codec_id = st->codecpar->codec_id; - VLOG(1) << "codec_id: " << codec_id; - AVCodec* codec = avcodec_find_decoder(codec_id); - if (!codec) { - LOG(ERROR) << "avcodec_find_decoder failed for codec_id: " << int(codec_id); - return AVERROR(EINVAL); - } - VLOG(1) << "Succeed to find decoder"; - - codecCtx_ = avcodec_alloc_context3(codec); - if (!codecCtx_) { - LOG(ERROR) << "avcodec_alloc_context3 fails"; - return AVERROR(ENOMEM); - } - - int ret; - /* Copy codec parameters from input stream to output codec context */ - if ((ret = avcodec_parameters_to_context(codecCtx_, st->codecpar)) < 0) { - LOG(ERROR) << "Failed to copy " << typeString - << " codec parameters to decoder context"; - return ret; - } - - AVDictionary* opts = nullptr; - av_dict_set(&opts, "refcounted_frames", refCount ? "1" : "0", 0); - - // after avcodec_open2, value of codecCtx_->time_base is NOT meaningful - // But inputCtx_->streams[index_]->time_base has meaningful values - if ((ret = avcodec_open2(codecCtx_, codec, &opts)) < 0) { - LOG(ERROR) << "avcodec_open2 failed. " << ffmpeg_util::getErrorDesc(ret); - return ret; - } - VLOG(1) << "Succeed to open codec"; - - frame_ = av_frame_alloc(); - return initFormat(); -} - -unique_ptr FfmpegStream::getFrameData(int getPtsOnly) { - if (!codecCtx_) { - LOG(ERROR) << "Codec is not initialized"; - return nullptr; - } - if (getPtsOnly) { - unique_ptr decodedFrame = make_unique(); - decodedFrame->pts_ = frame_->pts; - return decodedFrame; - } else { - unique_ptr decodedFrame = sampleFrameData(); - if (decodedFrame) { - decodedFrame->pts_ = frame_->pts; - } - return decodedFrame; - } -} - -void FfmpegStream::flush(int getPtsOnly, DecoderOutput& decoderOutput) { - VLOG(1) << "Media Type: " << getMediaType() << ", flush stream."; - // need to receive frames before entering draining mode - receiveAvailFrames(getPtsOnly, decoderOutput); - - VLOG(2) << "send nullptr packet"; - sendPacket(nullptr); - // receive remaining frames after entering draining mode - receiveAvailFrames(getPtsOnly, decoderOutput); - - avcodec_flush_buffers(codecCtx_); -} - -bool FfmpegStream::isFramePtsInRange() { - CHECK(frame_); - auto pts = frame_->pts; - auto startPts = this->getStartPts(); - auto endPts = this->getEndPts(); - VLOG(2) << "isPtsInRange. pts: " << pts << ", startPts: " << startPts - << ", endPts: " << endPts; - return (pts == AV_NOPTS_VALUE) || - (pts >= startPts && (endPts >= 0 ? pts <= endPts : true)); -} - -bool FfmpegStream::isFramePtsExceedRange() { - if (frame_) { - auto endPts = this->getEndPts(); - VLOG(2) << "isFramePtsExceedRange. last_pts_: " << last_pts_ - << ", endPts: " << endPts; - return endPts >= 0 ? last_pts_ >= endPts : false; - } else { - return true; - } -} - -// seek a frame -int FfmpegStream::seekFrame(int64_t seekPts) { - // translate margin from second to pts - int64_t margin = (int64_t)( - seekFrameMargin_ * (double)inputCtx_->streams[index_]->time_base.den / - (double)inputCtx_->streams[index_]->time_base.num); - int64_t real_seekPts = (seekPts - margin) > 0 ? (seekPts - margin) : 0; - VLOG(2) << "seek margin: " << margin; - VLOG(2) << "real seekPts: " << real_seekPts; - int ret = av_seek_frame( - inputCtx_, - index_, - (seekPts - margin) > 0 ? (seekPts - margin) : 0, - AVSEEK_FLAG_BACKWARD); - if (ret < 0) { - LOG(WARNING) << "av_seek_frame fails. Stream index: " << index_; - return ret; - } - return 0; -} - -// send/receive encoding and decoding API overview -// https://ffmpeg.org/doxygen/3.4/group__lavc__encdec.html -int FfmpegStream::sendPacket(const AVPacket* packet) { - return avcodec_send_packet(codecCtx_, packet); -} - -int FfmpegStream::receiveFrame() { - int ret = avcodec_receive_frame(codecCtx_, frame_); - if (ret >= 0) { - // succeed - frame_->pts = av_frame_get_best_effort_timestamp(frame_); - if (frame_->pts == AV_NOPTS_VALUE) { - // Trick: if we can not figure out pts, we just set it to be (last_pts + - // 1) - frame_->pts = last_pts_ + 1; - } - last_pts_ = frame_->pts; - - VLOG(2) << "avcodec_receive_frame succeed"; - } else if (ret == AVERROR(EAGAIN)) { - VLOG(2) << "avcodec_receive_frame fails and returns AVERROR(EAGAIN). "; - } else if (ret == AVERROR_EOF) { - // no more frame to read - VLOG(2) << "avcodec_receive_frame returns AVERROR_EOF"; - } else { - LOG(WARNING) << "avcodec_receive_frame failed. Error: " - << ffmpeg_util::getErrorDesc(ret); - } - return ret; -} - -void FfmpegStream::receiveAvailFrames( - int getPtsOnly, - DecoderOutput& decoderOutput) { - int result = 0; - while ((result = receiveFrame()) >= 0) { - unique_ptr decodedFrame = getFrameData(getPtsOnly); - - if (decodedFrame && - ((!getPtsOnly && decodedFrame->frameSize_ > 0) || getPtsOnly)) { - if (isFramePtsInRange()) { - decoderOutput.addMediaFrame(getMediaType(), std::move(decodedFrame)); - } - } // end-if - } // end-while -} diff --git a/torchvision/csrc/cpu/video_reader/FfmpegStream.h b/torchvision/csrc/cpu/video_reader/FfmpegStream.h deleted file mode 100644 index b66a36977ec..00000000000 --- a/torchvision/csrc/cpu/video_reader/FfmpegStream.h +++ /dev/null @@ -1,69 +0,0 @@ -// Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved. -#pragma once - -#include -#include -#include -#include "FfmpegHeaders.h" -#include "Interface.h" - -/* -Class uses FFMPEG library to decode one media stream (audio or video). -*/ -class FfmpegStream { - public: - FfmpegStream( - AVFormatContext* inputCtx, - int index, - enum AVMediaType avMediaType, - double seekFrameMargin); - virtual ~FfmpegStream(); - - // returns 0 - on success or negative error - int openCodecContext(); - // returns stream index - int getIndex() const { - return index_; - } - // returns number decoded/sampled bytes - std::unique_ptr getFrameData(int getPtsOnly); - // flush the stream at the end of decoding. - // Return 0 on success and -1 when cache is drained - void flush(int getPtsOnly, DecoderOutput& decoderOutput); - // seek a frame - int seekFrame(int64_t ts); - // send an AVPacket - int sendPacket(const AVPacket* packet); - // receive AVFrame - int receiveFrame(); - // receive all available frames from the internal buffer - void receiveAvailFrames(int getPtsOnly, DecoderOutput& decoderOutput); - // return media type - virtual MediaType getMediaType() const = 0; - // return media format - virtual FormatUnion getMediaFormat() const = 0; - // return start presentation timestamp - virtual int64_t getStartPts() const = 0; - // return end presentation timestamp - virtual int64_t getEndPts() const = 0; - // is the pts of most recent frame within range? - bool isFramePtsInRange(); - // does the pts of most recent frame exceed range? - bool isFramePtsExceedRange(); - - protected: - virtual int initFormat() = 0; - // returns a decoded frame - virtual std::unique_ptr sampleFrameData() = 0; - - protected: - AVFormatContext* const inputCtx_; - const int index_; - enum AVMediaType avMediaType_; - - AVCodecContext* codecCtx_{nullptr}; - AVFrame* frame_{nullptr}; - // pts of last decoded frame - int64_t last_pts_{0}; - double seekFrameMargin_{1.0}; -}; diff --git a/torchvision/csrc/cpu/video_reader/FfmpegUtil.cpp b/torchvision/csrc/cpu/video_reader/FfmpegUtil.cpp deleted file mode 100644 index 9e804ee67c0..00000000000 --- a/torchvision/csrc/cpu/video_reader/FfmpegUtil.cpp +++ /dev/null @@ -1,111 +0,0 @@ -#include "FfmpegUtil.h" - -using namespace std; - -namespace ffmpeg_util { - -bool mapFfmpegType(AVMediaType media, MediaType* type) { - switch (media) { - case AVMEDIA_TYPE_VIDEO: - *type = MediaType::TYPE_VIDEO; - return true; - case AVMEDIA_TYPE_AUDIO: - *type = MediaType::TYPE_AUDIO; - return true; - default: - return false; - } -} - -bool mapMediaType(MediaType type, AVMediaType* media) { - switch (type) { - case MediaType::TYPE_VIDEO: - *media = AVMEDIA_TYPE_VIDEO; - return true; - case MediaType::TYPE_AUDIO: - *media = AVMEDIA_TYPE_AUDIO; - return true; - default: - return false; - } -} - -void setFormatDimensions( - int& destW, - int& destH, - int userW, - int userH, - int srcW, - int srcH, - int minDimension) { - // rounding rules - // int -> double -> round - // round up if fraction is >= 0.5 or round down if fraction is < 0.5 - // int result = double(value) + 0.5 - // here we rounding double to int according to the above rule - if (userW == 0 && userH == 0) { - if (minDimension > 0) { // #2 - if (srcW > srcH) { - // landscape - destH = minDimension; - destW = round(double(srcW * minDimension) / srcH); - } else { - // portrait - destW = minDimension; - destH = round(double(srcH * minDimension) / srcW); - } - } else { // #1 - destW = srcW; - destH = srcH; - } - } else if (userW != 0 && userH == 0) { // #3 - destW = userW; - destH = round(double(srcH * userW) / srcW); - } else if (userW == 0 && userH != 0) { // #4 - destW = round(double(srcW * userH) / srcH); - destH = userH; - } else { - // userW != 0 && userH != 0. #5 - destW = userW; - destH = userH; - } - // prevent zeros - destW = std::max(destW, 1); - destH = std::max(destH, 1); -} - -bool validateVideoFormat(const VideoFormat& f) { - /* - Valid parameters values for decoder - ___________________________________________________ - | W | H | minDimension | algorithm | - |_________________________________________________| - | 0 | 0 | 0 | original | - |_________________________________________________| - | 0 | 0 | >0 |scale to min dimension| - |_____|_____|____________________________________ | - | >0 | 0 | 0 | scale keeping W | - |_________________________________________________| - | 0 | >0 | 0 | scale keeping H | - |_________________________________________________| - | >0 | >0 | 0 | stretch/scale | - |_________________________________________________| - - */ - return (f.width == 0 && f.height == 0) || // #1 and #2 - (f.width != 0 && f.height != 0 && f.minDimension == 0) || // # 5 - (((f.width != 0 && f.height == 0) || // #3 and #4 - (f.width == 0 && f.height != 0)) && - f.minDimension == 0); -} - -string getErrorDesc(int errnum) { - array buffer; - if (av_strerror(errnum, buffer.data(), buffer.size()) < 0) { - return string("Unknown error code"); - } - buffer.back() = 0; - return string(buffer.data()); -} - -} // namespace ffmpeg_util diff --git a/torchvision/csrc/cpu/video_reader/FfmpegUtil.h b/torchvision/csrc/cpu/video_reader/FfmpegUtil.h deleted file mode 100644 index 9f42eb53c97..00000000000 --- a/torchvision/csrc/cpu/video_reader/FfmpegUtil.h +++ /dev/null @@ -1,27 +0,0 @@ -#pragma once - -#include -#include -#include "FfmpegHeaders.h" -#include "Interface.h" - -namespace ffmpeg_util { - -bool mapFfmpegType(AVMediaType media, enum MediaType* type); - -bool mapMediaType(MediaType type, enum AVMediaType* media); - -void setFormatDimensions( - int& destW, - int& destH, - int userW, - int userH, - int srcW, - int srcH, - int minDimension); - -bool validateVideoFormat(const VideoFormat& f); - -std::string getErrorDesc(int errnum); - -} // namespace ffmpeg_util diff --git a/torchvision/csrc/cpu/video_reader/FfmpegVideoSampler.cpp b/torchvision/csrc/cpu/video_reader/FfmpegVideoSampler.cpp deleted file mode 100644 index d87b3104dd5..00000000000 --- a/torchvision/csrc/cpu/video_reader/FfmpegVideoSampler.cpp +++ /dev/null @@ -1,90 +0,0 @@ -#include "FfmpegVideoSampler.h" -#include "FfmpegUtil.h" - -using namespace std; - -FfmpegVideoSampler::FfmpegVideoSampler( - const VideoFormat& in, - const VideoFormat& out, - int swsFlags) - : inFormat_(in), outFormat_(out), swsFlags_(swsFlags) {} - -FfmpegVideoSampler::~FfmpegVideoSampler() { - if (scaleContext_) { - sws_freeContext(scaleContext_); - scaleContext_ = nullptr; - } -} - -int FfmpegVideoSampler::init() { - VLOG(1) << "Input format: width " << inFormat_.width << ", height " - << inFormat_.height << ", format " << inFormat_.format - << ", minDimension " << inFormat_.minDimension; - VLOG(1) << "Scale format: width " << outFormat_.width << ", height " - << outFormat_.height << ", format " << outFormat_.format - << ", minDimension " << outFormat_.minDimension; - - scaleContext_ = sws_getContext( - inFormat_.width, - inFormat_.height, - (AVPixelFormat)inFormat_.format, - outFormat_.width, - outFormat_.height, - static_cast(outFormat_.format), - swsFlags_, - nullptr, - nullptr, - nullptr); - if (scaleContext_) { - return 0; - } else { - return -1; - } -} - -int32_t FfmpegVideoSampler::getImageBytes() const { - return av_image_get_buffer_size( - (AVPixelFormat)outFormat_.format, outFormat_.width, outFormat_.height, 1); -} - -// https://ffmpeg.org/doxygen/3.4/scaling_video_8c-example.html#a10 -unique_ptr FfmpegVideoSampler::sample(const AVFrame* frame) { - if (!frame) { - return nullptr; // no flush for videos - } - // scaled and cropped image - auto outImageSize = getImageBytes(); - AvDataPtr frameData(static_cast(av_malloc(outImageSize))); - - uint8_t* scalePlanes[4] = {nullptr}; - int scaleLines[4] = {0}; - - int result; - if ((result = av_image_fill_arrays( - scalePlanes, - scaleLines, - frameData.get(), - static_cast(outFormat_.format), - outFormat_.width, - outFormat_.height, - 1)) < 0) { - LOG(ERROR) << "av_image_fill_arrays failed, err: " - << ffmpeg_util::getErrorDesc(result); - return nullptr; - } - - if ((result = sws_scale( - scaleContext_, - frame->data, - frame->linesize, - 0, - inFormat_.height, - scalePlanes, - scaleLines)) < 0) { - LOG(ERROR) << "sws_scale failed, err: " - << ffmpeg_util::getErrorDesc(result); - return nullptr; - } - - return make_unique(std::move(frameData), outImageSize, 0); -} diff --git a/torchvision/csrc/cpu/video_reader/FfmpegVideoSampler.h b/torchvision/csrc/cpu/video_reader/FfmpegVideoSampler.h deleted file mode 100644 index 1fd6862f537..00000000000 --- a/torchvision/csrc/cpu/video_reader/FfmpegVideoSampler.h +++ /dev/null @@ -1,32 +0,0 @@ -#pragma once - -#include "FfmpegSampler.h" - -/** - * Class transcode video frames from one format into another - */ - -class FfmpegVideoSampler : public FfmpegSampler { - public: - explicit FfmpegVideoSampler( - const VideoFormat& in, - const VideoFormat& out, - int swsFlags = SWS_AREA); - ~FfmpegVideoSampler() override; - - int init() override; - - int32_t getImageBytes() const; - // returns number of bytes of the sampled data - std::unique_ptr sample(const AVFrame* frame) override; - - const VideoFormat& getInFormat() const { - return inFormat_; - } - - private: - VideoFormat inFormat_; - VideoFormat outFormat_; - int swsFlags_; - SwsContext* scaleContext_{nullptr}; -}; diff --git a/torchvision/csrc/cpu/video_reader/FfmpegVideoStream.cpp b/torchvision/csrc/cpu/video_reader/FfmpegVideoStream.cpp deleted file mode 100644 index 7a429249a71..00000000000 --- a/torchvision/csrc/cpu/video_reader/FfmpegVideoStream.cpp +++ /dev/null @@ -1,115 +0,0 @@ -#include "FfmpegVideoStream.h" -#include "FfmpegUtil.h" - -using namespace std; - -namespace { - -bool operator==(const VideoFormat& x, const AVFrame& y) { - return x.width == y.width && x.height == y.height && - x.format == static_cast(y.format); -} - -VideoFormat toVideoFormat(const AVFrame& frame) { - VideoFormat videoFormat; - videoFormat.width = frame.width; - videoFormat.height = frame.height; - videoFormat.format = static_cast(frame.format); - - return videoFormat; -} - -} // namespace - -FfmpegVideoStream::FfmpegVideoStream( - AVFormatContext* inputCtx, - int index, - enum AVMediaType avMediaType, - MediaFormat mediaFormat, - double seekFrameMargin) - : FfmpegStream(inputCtx, index, avMediaType, seekFrameMargin), - mediaFormat_(mediaFormat) {} - -FfmpegVideoStream::~FfmpegVideoStream() {} - -void FfmpegVideoStream::checkStreamDecodeParams() { - auto timeBase = getTimeBase(); - if (timeBase.first > 0) { - CHECK_EQ(timeBase.first, inputCtx_->streams[index_]->time_base.num); - CHECK_EQ(timeBase.second, inputCtx_->streams[index_]->time_base.den); - } -} - -void FfmpegVideoStream::updateStreamDecodeParams() { - auto timeBase = getTimeBase(); - if (timeBase.first == 0) { - mediaFormat_.format.video.timeBaseNum = - inputCtx_->streams[index_]->time_base.num; - mediaFormat_.format.video.timeBaseDen = - inputCtx_->streams[index_]->time_base.den; - } - mediaFormat_.format.video.duration = inputCtx_->streams[index_]->duration; -} - -int FfmpegVideoStream::initFormat() { - // set output format - VideoFormat& format = mediaFormat_.format.video; - if (!ffmpeg_util::validateVideoFormat(format)) { - LOG(ERROR) << "Invalid video format"; - return -1; - } - - format.fps = av_q2d( - av_guess_frame_rate(inputCtx_, inputCtx_->streams[index_], nullptr)); - - // keep aspect ratio - ffmpeg_util::setFormatDimensions( - format.width, - format.height, - format.width, - format.height, - codecCtx_->width, - codecCtx_->height, - format.minDimension); - - VLOG(1) << "After adjusting, video format" - << ", width: " << format.width << ", height: " << format.height - << ", format: " << format.format - << ", minDimension: " << format.minDimension; - - if (format.format == AV_PIX_FMT_NONE) { - format.format = codecCtx_->pix_fmt; - VLOG(1) << "Set pixel format: " << format.format; - } - - checkStreamDecodeParams(); - - updateStreamDecodeParams(); - - return format.width != 0 && format.height != 0 && - format.format != AV_PIX_FMT_NONE - ? 0 - : -1; -} - -unique_ptr FfmpegVideoStream::sampleFrameData() { - VideoFormat& format = mediaFormat_.format.video; - if (!sampler_ || !(sampler_->getInFormat() == *frame_)) { - VideoFormat newInFormat = toVideoFormat(*frame_); - sampler_ = make_unique(newInFormat, format, SWS_AREA); - VLOG(1) << "Set input video sampler format" - << ", width: " << newInFormat.width - << ", height: " << newInFormat.height - << ", format: " << newInFormat.format - << " : output video sampler format" - << ", width: " << format.width << ", height: " << format.height - << ", format: " << format.format - << ", minDimension: " << format.minDimension; - int ret = sampler_->init(); - if (ret < 0) { - VLOG(1) << "Fail to initialize video sampler"; - return nullptr; - } - } - return sampler_->sample(frame_); -} diff --git a/torchvision/csrc/cpu/video_reader/FfmpegVideoStream.h b/torchvision/csrc/cpu/video_reader/FfmpegVideoStream.h deleted file mode 100644 index 9bfbc9f665b..00000000000 --- a/torchvision/csrc/cpu/video_reader/FfmpegVideoStream.h +++ /dev/null @@ -1,54 +0,0 @@ -#pragma once - -#include -#include "FfmpegStream.h" -#include "FfmpegVideoSampler.h" - -/** - * Class uses FFMPEG library to decode one video stream. - */ -class FfmpegVideoStream : public FfmpegStream { - public: - explicit FfmpegVideoStream( - AVFormatContext* inputCtx, - int index, - enum AVMediaType avMediaType, - MediaFormat mediaFormat, - double seekFrameMargin); - - ~FfmpegVideoStream() override; - - // FfmpegStream overrides - MediaType getMediaType() const override { - return MediaType::TYPE_VIDEO; - } - - FormatUnion getMediaFormat() const override { - return mediaFormat_.format; - } - - int64_t getStartPts() const override { - return mediaFormat_.format.video.startPts; - } - int64_t getEndPts() const override { - return mediaFormat_.format.video.endPts; - } - // return numerator and denominator of time base - std::pair getTimeBase() const { - return std::make_pair( - mediaFormat_.format.video.timeBaseNum, - mediaFormat_.format.video.timeBaseDen); - } - - void checkStreamDecodeParams(); - - void updateStreamDecodeParams(); - - protected: - int initFormat() override; - std::unique_ptr sampleFrameData() override; - - private: - MediaFormat mediaFormat_; - std::unique_ptr sampler_{nullptr}; -}; diff --git a/torchvision/csrc/cpu/video_reader/Interface.cpp b/torchvision/csrc/cpu/video_reader/Interface.cpp deleted file mode 100644 index 0ec9f155821..00000000000 --- a/torchvision/csrc/cpu/video_reader/Interface.cpp +++ /dev/null @@ -1,22 +0,0 @@ -#include "Interface.h" - -void DecoderOutput::initMediaType(MediaType mediaType, FormatUnion format) { - MediaData mediaData(format); - media_data_.emplace(mediaType, std::move(mediaData)); -} - -void DecoderOutput::addMediaFrame( - MediaType mediaType, - std::unique_ptr frame) { - if (media_data_.find(mediaType) != media_data_.end()) { - VLOG(1) << "media type: " << mediaType - << " add frame with pts: " << frame->pts_; - media_data_[mediaType].frames_.push_back(std::move(frame)); - } else { - VLOG(1) << "media type: " << mediaType << " not found. Skip the frame."; - } -} - -void DecoderOutput::clear() { - media_data_.clear(); -} diff --git a/torchvision/csrc/cpu/video_reader/Interface.h b/torchvision/csrc/cpu/video_reader/Interface.h deleted file mode 100644 index e137008ce7b..00000000000 --- a/torchvision/csrc/cpu/video_reader/Interface.h +++ /dev/null @@ -1,127 +0,0 @@ -#pragma once - -#include -#include -#include -#include - -extern "C" { - -#include -#include -void av_free(void* ptr); -} - -struct avDeleter { - void operator()(uint8_t* p) const { - av_free(p); - } -}; - -const AVPixelFormat defaultVideoPixelFormat = AV_PIX_FMT_RGB24; -const AVSampleFormat defaultAudioSampleFormat = AV_SAMPLE_FMT_FLT; - -using AvDataPtr = std::unique_ptr; - -enum MediaType : uint32_t { - TYPE_VIDEO = 1, - TYPE_AUDIO = 2, -}; - -struct EnumClassHash { - template - uint32_t operator()(T t) const { - return static_cast(t); - } -}; - -struct VideoFormat { - // fields are initialized for the auto detection - // caller can specify some/all of field values if specific output is desirable - - int width{0}; // width in pixels - int height{0}; // height in pixels - int minDimension{0}; // choose min dimension and rescale accordingly - // Output image pixel format. data type AVPixelFormat - AVPixelFormat format{defaultVideoPixelFormat}; // type AVPixelFormat - int64_t startPts{0}, endPts{0}; // Start and end presentation timestamp - int timeBaseNum{0}; - int timeBaseDen{1}; // numerator and denominator of time base - float fps{0.0}; - int64_t duration{0}; // duration of the stream, in stream time base -}; - -struct AudioFormat { - // fields are initialized for the auto detection - // caller can specify some/all of field values if specific output is desirable - - int samples{0}; // number samples per second (frequency) - int channels{0}; // number of channels - AVSampleFormat format{defaultAudioSampleFormat}; // type AVSampleFormat - int64_t startPts{0}, endPts{0}; // Start and end presentation timestamp - int timeBaseNum{0}; - int timeBaseDen{1}; // numerator and denominator of time base - int64_t duration{0}; // duration of the stream, in stream time base -}; - -union FormatUnion { - FormatUnion() {} - VideoFormat video; - AudioFormat audio; -}; - -struct MediaFormat { - MediaFormat() {} - - MediaFormat(const MediaFormat& mediaFormat) : type(mediaFormat.type) { - if (type == MediaType::TYPE_VIDEO) { - format.video = mediaFormat.format.video; - } else if (type == MediaType::TYPE_AUDIO) { - format.audio = mediaFormat.format.audio; - } - } - - MediaFormat(MediaType mediaType) : type(mediaType) { - if (mediaType == MediaType::TYPE_VIDEO) { - format.video = VideoFormat(); - } else if (mediaType == MediaType::TYPE_AUDIO) { - format.audio = AudioFormat(); - } - } - // media type - MediaType type; - // format data - FormatUnion format; -}; - -class DecodedFrame { - public: - explicit DecodedFrame() : frame_(nullptr), frameSize_(0), pts_(0) {} - explicit DecodedFrame(AvDataPtr frame, int frameSize, int64_t pts) - : frame_(std::move(frame)), frameSize_(frameSize), pts_(pts) {} - AvDataPtr frame_{nullptr}; - int frameSize_{0}; - int64_t pts_{0}; -}; - -struct MediaData { - MediaData() {} - MediaData(FormatUnion format) : format_(format) {} - FormatUnion format_; - std::vector> frames_; -}; - -class DecoderOutput { - public: - explicit DecoderOutput() {} - - ~DecoderOutput() {} - - void initMediaType(MediaType mediaType, FormatUnion format); - - void addMediaFrame(MediaType mediaType, std::unique_ptr frame); - - void clear(); - - std::unordered_map media_data_; -}; diff --git a/torchvision/csrc/cpu/video_reader/VideoReader.cpp b/torchvision/csrc/cpu/video_reader/VideoReader.cpp deleted file mode 100644 index dfe7f46bf39..00000000000 --- a/torchvision/csrc/cpu/video_reader/VideoReader.cpp +++ /dev/null @@ -1,500 +0,0 @@ -#include "VideoReader.h" -#include -#include -#include -#include -#include "FfmpegDecoder.h" -#include "FfmpegHeaders.h" -#include "util.h" - -using namespace std; - -// If we are in a Windows environment, we need to define -// initialization functions for the _custom_ops extension -#ifdef _WIN32 -#if PY_MAJOR_VERSION < 3 -PyMODINIT_FUNC init_video_reader(void) { - // No need to do anything. - return NULL; -} -#else -PyMODINIT_FUNC PyInit_video_reader(void) { - // No need to do anything. - return NULL; -} -#endif -#endif - -namespace video_reader { - -class UnknownPixelFormatException : public exception { - const char* what() const throw() override { - return "Unknown pixel format"; - } -}; - -int getChannels(AVPixelFormat format) { - int numChannels = 0; - switch (format) { - case AV_PIX_FMT_BGR24: - case AV_PIX_FMT_RGB24: - numChannels = 3; - break; - default: - LOG(ERROR) << "Unknown format: " << format; - throw UnknownPixelFormatException(); - } - return numChannels; -} - -void fillVideoTensor( - std::vector>& frames, - torch::Tensor& videoFrame, - torch::Tensor& videoFramePts) { - int frameSize = 0; - if (videoFrame.numel() > 0) { - frameSize = videoFrame.numel() / frames.size(); - } - - int frameCount = 0; - - uint8_t* videoFrameData = - videoFrame.numel() > 0 ? videoFrame.data_ptr() : nullptr; - int64_t* videoFramePtsData = videoFramePts.data_ptr(); - - for (size_t i = 0; i < frames.size(); ++i) { - const auto& frame = frames[i]; - if (videoFrameData) { - memcpy( - videoFrameData + (size_t)(frameCount++) * (size_t)frameSize, - frame->frame_.get(), - frameSize * sizeof(uint8_t)); - } - videoFramePtsData[i] = frame->pts_; - } -} - -void getVideoMeta( - DecoderOutput& decoderOutput, - int& numFrames, - int& height, - int& width, - int& numChannels) { - auto& videoFrames = decoderOutput.media_data_[TYPE_VIDEO].frames_; - numFrames = videoFrames.size(); - - FormatUnion& videoFormat = decoderOutput.media_data_[TYPE_VIDEO].format_; - height = videoFormat.video.height; - width = videoFormat.video.width; - numChannels = getChannels(videoFormat.video.format); -} - -void fillAudioTensor( - std::vector>& frames, - torch::Tensor& audioFrame, - torch::Tensor& audioFramePts) { - if (frames.size() == 0) { - return; - } - - float* audioFrameData = - audioFrame.numel() > 0 ? audioFrame.data_ptr() : nullptr; - CHECK_EQ(audioFramePts.size(0), frames.size()); - int64_t* audioFramePtsData = audioFramePts.data_ptr(); - - int bytesPerSample = av_get_bytes_per_sample(defaultAudioSampleFormat); - - int64_t frameDataOffset = 0; - for (size_t i = 0; i < frames.size(); ++i) { - audioFramePtsData[i] = frames[i]->pts_; - if (audioFrameData) { - memcpy( - audioFrameData + frameDataOffset, - frames[i]->frame_.get(), - frames[i]->frameSize_); - frameDataOffset += (frames[i]->frameSize_ / bytesPerSample); - } - } -} - -void getAudioMeta( - DecoderOutput& decoderOutput, - int64_t& numSamples, - int64_t& channels, - int64_t& numFrames) { - FormatUnion& audioFormat = decoderOutput.media_data_[TYPE_AUDIO].format_; - - channels = audioFormat.audio.channels; - CHECK_EQ(audioFormat.audio.format, AV_SAMPLE_FMT_FLT); - int bytesPerSample = av_get_bytes_per_sample( - static_cast(audioFormat.audio.format)); - - // auto& audioFrames = decoderOutput.media_frames_[TYPE_AUDIO]; - auto& audioFrames = decoderOutput.media_data_[TYPE_AUDIO].frames_; - numFrames = audioFrames.size(); - int64_t frameSizeTotal = 0; - for (auto const& decodedFrame : audioFrames) { - frameSizeTotal += static_cast(decodedFrame->frameSize_); - } - VLOG(2) << "numFrames: " << numFrames; - VLOG(2) << "frameSizeTotal: " << frameSizeTotal; - VLOG(2) << "channels: " << channels; - VLOG(2) << "bytesPerSample: " << bytesPerSample; - CHECK_EQ(frameSizeTotal % (channels * bytesPerSample), 0); - numSamples = frameSizeTotal / (channels * bytesPerSample); -} - -torch::List readVideo( - bool isReadFile, - const torch::Tensor& input_video, - std::string videoPath, - double seekFrameMargin, - int64_t getPtsOnly, - int64_t readVideoStream, - int64_t width, - int64_t height, - int64_t minDimension, - int64_t videoStartPts, - int64_t videoEndPts, - int64_t videoTimeBaseNum, - int64_t videoTimeBaseDen, - int64_t readAudioStream, - int64_t audioSamples, - int64_t audioChannels, - int64_t audioStartPts, - int64_t audioEndPts, - int64_t audioTimeBaseNum, - int64_t audioTimeBaseDen) { - unique_ptr params = util::getDecoderParams( - seekFrameMargin, - getPtsOnly, - readVideoStream, - width, - height, - minDimension, - videoStartPts, - videoEndPts, - videoTimeBaseNum, - videoTimeBaseDen, - readAudioStream, - audioSamples, - audioChannels, - audioStartPts, - audioEndPts, - audioTimeBaseNum, - audioTimeBaseDen); - - FfmpegDecoder decoder; - DecoderOutput decoderOutput; - - if (isReadFile) { - decoder.decodeFile(std::move(params), videoPath, decoderOutput); - } else { - decoder.decodeMemory( - std::move(params), - input_video.data_ptr(), - input_video.size(0), - decoderOutput); - } - - // video section - torch::Tensor videoFrame = torch::zeros({0}, torch::kByte); - torch::Tensor videoFramePts = torch::zeros({0}, torch::kLong); - torch::Tensor videoTimeBase = torch::zeros({0}, torch::kInt); - torch::Tensor videoFps = torch::zeros({0}, torch::kFloat); - torch::Tensor videoDuration = torch::zeros({0}, torch::kLong); - - if (readVideoStream == 1) { - auto it = decoderOutput.media_data_.find(TYPE_VIDEO); - if (it != decoderOutput.media_data_.end()) { - int numVideoFrames, outHeight, outWidth, numChannels; - getVideoMeta( - decoderOutput, numVideoFrames, outHeight, outWidth, numChannels); - - if (getPtsOnly == 0) { - videoFrame = torch::zeros( - {numVideoFrames, outHeight, outWidth, numChannels}, torch::kByte); - } - - videoFramePts = torch::zeros({numVideoFrames}, torch::kLong); - - fillVideoTensor( - decoderOutput.media_data_[TYPE_VIDEO].frames_, - videoFrame, - videoFramePts); - - videoTimeBase = torch::zeros({2}, torch::kInt); - int* videoTimeBaseData = videoTimeBase.data_ptr(); - videoTimeBaseData[0] = it->second.format_.video.timeBaseNum; - videoTimeBaseData[1] = it->second.format_.video.timeBaseDen; - - videoFps = torch::zeros({1}, torch::kFloat); - float* videoFpsData = videoFps.data_ptr(); - videoFpsData[0] = it->second.format_.video.fps; - - videoDuration = torch::zeros({1}, torch::kLong); - int64_t* videoDurationData = videoDuration.data_ptr(); - videoDurationData[0] = it->second.format_.video.duration; - } else { - VLOG(1) << "Miss video stream"; - } - } - - // audio section - torch::Tensor audioFrame = torch::zeros({0}, torch::kFloat); - torch::Tensor audioFramePts = torch::zeros({0}, torch::kLong); - torch::Tensor audioTimeBase = torch::zeros({0}, torch::kInt); - torch::Tensor audioSampleRate = torch::zeros({0}, torch::kInt); - torch::Tensor audioDuration = torch::zeros({0}, torch::kLong); - if (readAudioStream == 1) { - auto it = decoderOutput.media_data_.find(TYPE_AUDIO); - if (it != decoderOutput.media_data_.end()) { - VLOG(1) << "Find audio stream"; - int64_t numAudioSamples = 0, outAudioChannels = 0, numAudioFrames = 0; - getAudioMeta( - decoderOutput, numAudioSamples, outAudioChannels, numAudioFrames); - VLOG(2) << "numAudioSamples: " << numAudioSamples; - VLOG(2) << "outAudioChannels: " << outAudioChannels; - VLOG(2) << "numAudioFrames: " << numAudioFrames; - - if (getPtsOnly == 0) { - audioFrame = - torch::zeros({numAudioSamples, outAudioChannels}, torch::kFloat); - } - audioFramePts = torch::zeros({numAudioFrames}, torch::kLong); - fillAudioTensor( - decoderOutput.media_data_[TYPE_AUDIO].frames_, - audioFrame, - audioFramePts); - - audioTimeBase = torch::zeros({2}, torch::kInt); - int* audioTimeBaseData = audioTimeBase.data_ptr(); - audioTimeBaseData[0] = it->second.format_.audio.timeBaseNum; - audioTimeBaseData[1] = it->second.format_.audio.timeBaseDen; - - audioSampleRate = torch::zeros({1}, torch::kInt); - int* audioSampleRateData = audioSampleRate.data_ptr(); - audioSampleRateData[0] = it->second.format_.audio.samples; - - audioDuration = torch::zeros({1}, torch::kLong); - int64_t* audioDurationData = audioDuration.data_ptr(); - audioDurationData[0] = it->second.format_.audio.duration; - } else { - VLOG(1) << "Miss audio stream"; - } - } - - torch::List result; - result.push_back(std::move(videoFrame)); - result.push_back(std::move(videoFramePts)); - result.push_back(std::move(videoTimeBase)); - result.push_back(std::move(videoFps)); - result.push_back(std::move(videoDuration)); - result.push_back(std::move(audioFrame)); - result.push_back(std::move(audioFramePts)); - result.push_back(std::move(audioTimeBase)); - result.push_back(std::move(audioSampleRate)); - result.push_back(std::move(audioDuration)); - - return result; -} - -torch::List readVideoFromMemory( - torch::Tensor input_video, - double seekFrameMargin, - int64_t getPtsOnly, - int64_t readVideoStream, - int64_t width, - int64_t height, - int64_t minDimension, - int64_t videoStartPts, - int64_t videoEndPts, - int64_t videoTimeBaseNum, - int64_t videoTimeBaseDen, - int64_t readAudioStream, - int64_t audioSamples, - int64_t audioChannels, - int64_t audioStartPts, - int64_t audioEndPts, - int64_t audioTimeBaseNum, - int64_t audioTimeBaseDen) { - return readVideo( - false, - input_video, - "", // videoPath - seekFrameMargin, - getPtsOnly, - readVideoStream, - width, - height, - minDimension, - videoStartPts, - videoEndPts, - videoTimeBaseNum, - videoTimeBaseDen, - readAudioStream, - audioSamples, - audioChannels, - audioStartPts, - audioEndPts, - audioTimeBaseNum, - audioTimeBaseDen); -} - -torch::List readVideoFromFile( - std::string videoPath, - double seekFrameMargin, - int64_t getPtsOnly, - int64_t readVideoStream, - int64_t width, - int64_t height, - int64_t minDimension, - int64_t videoStartPts, - int64_t videoEndPts, - int64_t videoTimeBaseNum, - int64_t videoTimeBaseDen, - int64_t readAudioStream, - int64_t audioSamples, - int64_t audioChannels, - int64_t audioStartPts, - int64_t audioEndPts, - int64_t audioTimeBaseNum, - int64_t audioTimeBaseDen) { - torch::Tensor dummy_input_video = torch::ones({0}); - return readVideo( - true, - dummy_input_video, - videoPath, - seekFrameMargin, - getPtsOnly, - readVideoStream, - width, - height, - minDimension, - videoStartPts, - videoEndPts, - videoTimeBaseNum, - videoTimeBaseDen, - readAudioStream, - audioSamples, - audioChannels, - audioStartPts, - audioEndPts, - audioTimeBaseNum, - audioTimeBaseDen); -} - -torch::List probeVideo( - bool isReadFile, - const torch::Tensor& input_video, - std::string videoPath) { - unique_ptr params = util::getDecoderParams( - 0, // seekFrameMargin - 0, // getPtsOnly - 1, // readVideoStream - 0, // width - 0, // height - 0, // minDimension - 0, // videoStartPts - 0, // videoEndPts - 0, // videoTimeBaseNum - 1, // videoTimeBaseDen - 1, // readAudioStream - 0, // audioSamples - 0, // audioChannels - 0, // audioStartPts - 0, // audioEndPts - 0, // audioTimeBaseNum - 1 // audioTimeBaseDen - ); - - FfmpegDecoder decoder; - DecoderOutput decoderOutput; - if (isReadFile) { - decoder.probeFile(std::move(params), videoPath, decoderOutput); - } else { - decoder.probeMemory( - std::move(params), - input_video.data_ptr(), - input_video.size(0), - decoderOutput); - } - // video section - torch::Tensor videoTimeBase = torch::zeros({0}, torch::kInt); - torch::Tensor videoFps = torch::zeros({0}, torch::kFloat); - torch::Tensor videoDuration = torch::zeros({0}, torch::kLong); - - auto it = decoderOutput.media_data_.find(TYPE_VIDEO); - if (it != decoderOutput.media_data_.end()) { - VLOG(1) << "Find video stream"; - videoTimeBase = torch::zeros({2}, torch::kInt); - int* videoTimeBaseData = videoTimeBase.data_ptr(); - videoTimeBaseData[0] = it->second.format_.video.timeBaseNum; - videoTimeBaseData[1] = it->second.format_.video.timeBaseDen; - - videoFps = torch::zeros({1}, torch::kFloat); - float* videoFpsData = videoFps.data_ptr(); - videoFpsData[0] = it->second.format_.video.fps; - - videoDuration = torch::zeros({1}, torch::kLong); - int64_t* videoDurationData = videoDuration.data_ptr(); - videoDurationData[0] = it->second.format_.video.duration; - } else { - VLOG(1) << "Miss video stream"; - } - - // audio section - torch::Tensor audioTimeBase = torch::zeros({0}, torch::kInt); - torch::Tensor audioSampleRate = torch::zeros({0}, torch::kInt); - torch::Tensor audioDuration = torch::zeros({0}, torch::kLong); - - it = decoderOutput.media_data_.find(TYPE_AUDIO); - if (it != decoderOutput.media_data_.end()) { - VLOG(1) << "Find audio stream"; - audioTimeBase = torch::zeros({2}, torch::kInt); - int* audioTimeBaseData = audioTimeBase.data_ptr(); - audioTimeBaseData[0] = it->second.format_.audio.timeBaseNum; - audioTimeBaseData[1] = it->second.format_.audio.timeBaseDen; - - audioSampleRate = torch::zeros({1}, torch::kInt); - int* audioSampleRateData = audioSampleRate.data_ptr(); - audioSampleRateData[0] = it->second.format_.audio.samples; - - audioDuration = torch::zeros({1}, torch::kLong); - int64_t* audioDurationData = audioDuration.data_ptr(); - audioDurationData[0] = it->second.format_.audio.duration; - } else { - VLOG(1) << "Miss audio stream"; - } - - torch::List result; - result.push_back(std::move(videoTimeBase)); - result.push_back(std::move(videoFps)); - result.push_back(std::move(videoDuration)); - result.push_back(std::move(audioTimeBase)); - result.push_back(std::move(audioSampleRate)); - result.push_back(std::move(audioDuration)); - - return result; -} - -torch::List probeVideoFromMemory(torch::Tensor input_video) { - return probeVideo(false, input_video, ""); -} - -torch::List probeVideoFromFile(std::string videoPath) { - torch::Tensor dummy_input_video = torch::ones({0}); - return probeVideo(true, dummy_input_video, videoPath); -} - -} // namespace video_reader - -static auto registry = torch::RegisterOperators() - .op("video_reader::read_video_from_memory", - &video_reader::readVideoFromMemory) - .op("video_reader::read_video_from_file", - &video_reader::readVideoFromFile) - .op("video_reader::probe_video_from_memory", - &video_reader::probeVideoFromMemory) - .op("video_reader::probe_video_from_file", - &video_reader::probeVideoFromFile); diff --git a/torchvision/csrc/cpu/video_reader/VideoReader.h b/torchvision/csrc/cpu/video_reader/VideoReader.h deleted file mode 100644 index efc2e4709a6..00000000000 --- a/torchvision/csrc/cpu/video_reader/VideoReader.h +++ /dev/null @@ -1,99 +0,0 @@ -#pragma once - -#include - -// Interface for Python - -/* - return: - videoFrame: tensor (N, H, W, C) kByte - videoFramePts: tensor (N) kLong - videoTimeBase: tensor (2) kInt - videoFps: tensor (1) kFloat - audioFrame: tensor (N, C) kFloat - audioFramePts: tensor (N) kLong - audioTimeBase: tensor (2) kInt - audioSampleRate: tensor (1) kInt -*/ -torch::List readVideoFromMemory( - // 1D tensor of data type uint8, storing the comparessed video data - torch::Tensor input_video, - // seeking frame in the video/audio stream is imprecise so seek to a - // timestamp earlier by a margin The unit of margin is second - double seekFrameMargin, - // If only pts is needed and video/audio frames are not needed, set it - // to 1 - int64_t getPtsOnly, - // bool variable. Set it to 1 if video stream should be read. Otherwise, set - // it to 0 - int64_t readVideoStream, - /* - Valid parameters values for rescaling video frames - ___________________________________________________ - | width | height | min_dimension | algorithm | - |_________________________________________________| - | 0 | 0 | 0 | original | - |_________________________________________________| - | 0 | 0 | >0 |scale to min dimension| - |_____|_____|____________________________________ | - | >0 | 0 | 0 | scale keeping W | - |_________________________________________________| - | 0 | >0 | 0 | scale keeping H | - |_________________________________________________| - | >0 | >0 | 0 | stretch/scale | - |_________________________________________________| - */ - int64_t width, - int64_t height, - int64_t minDimension, - // video frames with pts in [videoStartPts, videoEndPts] will be decoded - // For decoding all video frames, use [0, -1] - int64_t videoStartPts, - int64_t videoEndPts, - // numerator and denominator of time base of video stream. - // For decoding all video frames, supply dummy 0 (numerator) and 1 - // (denominator). For decoding localized video frames, need to supply - // them which will be checked during decoding - int64_t videoTimeBaseNum, - int64_t videoTimeBaseDen, - // bool variable. Set it to 1 if audio stream should be read. Otherwise, set - // it to 0 - int64_t readAudioStream, - // audio stream sampling rate. - // If not resampling audio waveform, supply 0 - // Otherwise, supply a positive integer. - int64_t audioSamples, - // audio stream channels - // Supply 0 to use the same number of channels as in the original audio - // stream - int64_t audioChannels, - // audio frames with pts in [audioStartPts, audioEndPts] will be decoded - // For decoding all audio frames, use [0, -1] - int64_t audioStartPts, - int64_t audioEndPts, - // numerator and denominator of time base of audio stream. - // For decoding all audio frames, supply dummy 0 (numerator) and 1 - // (denominator). For decoding localized audio frames, need to supply - // them which will be checked during decoding - int64_t audioTimeBaseNum, - int64_t audioTimeBaseDen); - -torch::List readVideoFromFile( - std::string videoPath, - double seekFrameMargin, - int64_t getPtsOnly, - int64_t readVideoStream, - int64_t width, - int64_t height, - int64_t minDimension, - int64_t videoStartPts, - int64_t videoEndPts, - int64_t videoTimeBaseNum, - int64_t videoTimeBaseDen, - int64_t readAudioStream, - int64_t audioSamples, - int64_t audioChannels, - int64_t audioStartPts, - int64_t audioEndPts, - int64_t audioTimeBaseNum, - int64_t audioTimeBaseDen); diff --git a/torchvision/csrc/cpu/video_reader/util.cpp b/torchvision/csrc/cpu/video_reader/util.cpp deleted file mode 100644 index ae3c3df0f0a..00000000000 --- a/torchvision/csrc/cpu/video_reader/util.cpp +++ /dev/null @@ -1,60 +0,0 @@ -#include "util.h" - -using namespace std; - -namespace util { - -unique_ptr getDecoderParams( - double seekFrameMargin, - int64_t getPtsOnly, - int64_t readVideoStream, - int videoWidth, - int videoHeight, - int videoMinDimension, - int64_t videoStartPts, - int64_t videoEndPts, - int videoTimeBaseNum, - int videoTimeBaseDen, - int64_t readAudioStream, - int audioSamples, - int audioChannels, - int64_t audioStartPts, - int64_t audioEndPts, - int audioTimeBaseNum, - int audioTimeBaseDen) { - unique_ptr params = make_unique(); - - if (readVideoStream == 1) { - params->formats.emplace( - MediaType::TYPE_VIDEO, MediaFormat(MediaType::TYPE_VIDEO)); - MediaFormat& videoFormat = params->formats[MediaType::TYPE_VIDEO]; - - videoFormat.format.video.width = videoWidth; - videoFormat.format.video.height = videoHeight; - videoFormat.format.video.minDimension = videoMinDimension; - videoFormat.format.video.startPts = videoStartPts; - videoFormat.format.video.endPts = videoEndPts; - videoFormat.format.video.timeBaseNum = videoTimeBaseNum; - videoFormat.format.video.timeBaseDen = videoTimeBaseDen; - } - - if (readAudioStream == 1) { - params->formats.emplace( - MediaType::TYPE_AUDIO, MediaFormat(MediaType::TYPE_AUDIO)); - MediaFormat& audioFormat = params->formats[MediaType::TYPE_AUDIO]; - - audioFormat.format.audio.samples = audioSamples; - audioFormat.format.audio.channels = audioChannels; - audioFormat.format.audio.startPts = audioStartPts; - audioFormat.format.audio.endPts = audioEndPts; - audioFormat.format.audio.timeBaseNum = audioTimeBaseNum; - audioFormat.format.audio.timeBaseDen = audioTimeBaseDen; - } - - params->seekFrameMargin = seekFrameMargin; - params->getPtsOnly = getPtsOnly; - - return params; -} - -} // namespace util diff --git a/torchvision/csrc/cpu/video_reader/util.h b/torchvision/csrc/cpu/video_reader/util.h deleted file mode 100644 index 6b5fd55388b..00000000000 --- a/torchvision/csrc/cpu/video_reader/util.h +++ /dev/null @@ -1,26 +0,0 @@ -#pragma once -#include -#include "FfmpegDecoder.h" - -namespace util { - -std::unique_ptr getDecoderParams( - double seekFrameMargin, - int64_t getPtsOnly, - int64_t readVideoStream, - int videoWidth, - int videoHeight, - int videoMinDimension, - int64_t videoStartPts, - int64_t videoEndPts, - int videoTimeBaseNum, - int videoTimeBaseDen, - int64_t readAudioStream, - int audioSamples, - int audioChannels, - int64_t audioStartPts, - int64_t audioEndPts, - int audioTimeBaseNum, - int audioTimeBaseDen); - -} // namespace util diff --git a/torchvision/csrc/cpu/vision_cpu.h b/torchvision/csrc/cpu/vision_cpu.h deleted file mode 100644 index d84b172ba49..00000000000 --- a/torchvision/csrc/cpu/vision_cpu.h +++ /dev/null @@ -1,86 +0,0 @@ -#pragma once -#include - -std::tuple ROIPool_forward_cpu( - const at::Tensor& input, - const at::Tensor& rois, - const float spatial_scale, - const int pooled_height, - const int pooled_width); - -at::Tensor ROIPool_backward_cpu( - const at::Tensor& grad, - const at::Tensor& rois, - const at::Tensor& argmax, - const float spatial_scale, - const int pooled_height, - const int pooled_width, - const int batch_size, - const int channels, - const int height, - const int width); - -at::Tensor ROIAlign_forward_cpu( - const at::Tensor& input, - const at::Tensor& rois, - const float spatial_scale, - const int pooled_height, - const int pooled_width, - const int sampling_ratio); - -at::Tensor ROIAlign_backward_cpu( - const at::Tensor& grad, - const at::Tensor& rois, - const float spatial_scale, - const int pooled_height, - const int pooled_width, - const int batch_size, - const int channels, - const int height, - const int width, - const int sampling_ratio); - -std::tuple PSROIPool_forward_cpu( - const at::Tensor& input, - const at::Tensor& rois, - const float spatial_scale, - const int pooled_height, - const int pooled_width); - -at::Tensor PSROIPool_backward_cpu( - const at::Tensor& grad, - const at::Tensor& rois, - const at::Tensor& mapping_channel, - const float spatial_scale, - const int pooled_height, - const int pooled_width, - const int batch_size, - const int channels, - const int height, - const int width); - -std::tuple PSROIAlign_forward_cpu( - const at::Tensor& input, - const at::Tensor& rois, - const float spatial_scale, - const int pooled_height, - const int pooled_width, - const int sampling_ratio); - -at::Tensor PSROIAlign_backward_cpu( - const at::Tensor& grad, - const at::Tensor& rois, - const at::Tensor& mapping_channel, - const float spatial_scale, - const int pooled_height, - const int pooled_width, - const int sampling_ratio, - const int batch_size, - const int channels, - const int height, - const int width); - -at::Tensor nms_cpu( - const at::Tensor& dets, - const at::Tensor& scores, - const float iou_threshold); diff --git a/torchvision/csrc/cuda/cuda_helpers.h b/torchvision/csrc/cuda/cuda_helpers.h deleted file mode 100644 index af32f60e815..00000000000 --- a/torchvision/csrc/cuda/cuda_helpers.h +++ /dev/null @@ -1,5 +0,0 @@ -#pragma once - -#define CUDA_1D_KERNEL_LOOP(i, n) \ - for (int i = (blockIdx.x * blockDim.x) + threadIdx.x; i < (n); \ - i += (blockDim.x * gridDim.x)) diff --git a/torchvision/csrc/cuda/vision_cuda.h b/torchvision/csrc/cuda/vision_cuda.h deleted file mode 100644 index b35c4c909c1..00000000000 --- a/torchvision/csrc/cuda/vision_cuda.h +++ /dev/null @@ -1,87 +0,0 @@ -#pragma once -#include -#include - -at::Tensor ROIAlign_forward_cuda( - const at::Tensor& input, - const at::Tensor& rois, - const float spatial_scale, - const int pooled_height, - const int pooled_width, - const int sampling_ratio); - -at::Tensor ROIAlign_backward_cuda( - const at::Tensor& grad, - const at::Tensor& rois, - const float spatial_scale, - const int pooled_height, - const int pooled_width, - const int batch_size, - const int channels, - const int height, - const int width, - const int sampling_ratio); - -std::tuple ROIPool_forward_cuda( - const at::Tensor& input, - const at::Tensor& rois, - const float spatial_scale, - const int pooled_height, - const int pooled_width); - -at::Tensor ROIPool_backward_cuda( - const at::Tensor& grad, - const at::Tensor& rois, - const at::Tensor& argmax, - const float spatial_scale, - const int pooled_height, - const int pooled_width, - const int batch_size, - const int channels, - const int height, - const int width); - -std::tuple PSROIPool_forward_cuda( - const at::Tensor& input, - const at::Tensor& rois, - const float spatial_scale, - const int pooled_height, - const int pooled_width); - -at::Tensor PSROIPool_backward_cuda( - const at::Tensor& grad, - const at::Tensor& rois, - const at::Tensor& mapping_channel, - const float spatial_scale, - const int pooled_height, - const int pooled_width, - const int batch_size, - const int channels, - const int height, - const int width); - -std::tuple PSROIAlign_forward_cuda( - const at::Tensor& input, - const at::Tensor& rois, - const float spatial_scale, - const int pooled_height, - const int pooled_width, - const int sampling_ratio); - -at::Tensor PSROIAlign_backward_cuda( - const at::Tensor& grad, - const at::Tensor& rois, - const at::Tensor& mapping_channel, - const float spatial_scale, - const int pooled_height, - const int pooled_width, - const int sampling_ratio, - const int batch_size, - const int channels, - const int height, - const int width); - -at::Tensor nms_cuda( - const at::Tensor& dets, - const at::Tensor& scores, - const float iou_threshold); diff --git a/torchvision/csrc/io/decoder/audio_sampler.cpp b/torchvision/csrc/io/decoder/audio_sampler.cpp new file mode 100644 index 00000000000..d46b93ddc69 --- /dev/null +++ b/torchvision/csrc/io/decoder/audio_sampler.cpp @@ -0,0 +1,251 @@ +#include "audio_sampler.h" +#include +#include "util.h" + +#define AVRESAMPLE_MAX_CHANNELS 32 + +// www.ffmpeg.org/doxygen/1.1/doc_2examples_2resampling_audio_8c-example.html#a24 +namespace ffmpeg { + +namespace { +int preparePlanes( + const AudioFormat& fmt, + const uint8_t* buffer, + int numSamples, + uint8_t** planes) { + int result; + if ((result = av_samples_fill_arrays( + planes, + nullptr, // linesize is not needed + buffer, + fmt.channels, + numSamples, + (AVSampleFormat)fmt.format, + 1)) < 0) { + LOG(ERROR) << "av_samples_fill_arrays failed, err: " + << Util::generateErrorDesc(result) + << ", numSamples: " << numSamples << ", fmt: " << fmt.format; + } + return result; +} +} // namespace + +AudioSampler::AudioSampler(void* logCtx) : logCtx_(logCtx) {} + +AudioSampler::~AudioSampler() { + cleanUp(); +} + +void AudioSampler::shutdown() { + cleanUp(); +} + +bool AudioSampler::init(const SamplerParameters& params) { + cleanUp(); + + if (params.type != MediaType::TYPE_AUDIO) { + LOG(ERROR) << "Invalid media type, expected MediaType::TYPE_AUDIO"; + return false; + } + +#if LIBAVUTIL_VERSION_INT >= AV_VERSION_INT(57, 28, 100) + SwrContext* swrContext_ = NULL; + AVChannelLayout channel_out; + AVChannelLayout channel_in; + av_channel_layout_default(&channel_out, params.out.audio.channels); + av_channel_layout_default(&channel_in, params.in.audio.channels); + int ret = swr_alloc_set_opts2( + &swrContext_, + &channel_out, + (AVSampleFormat)params.out.audio.format, + params.out.audio.samples, + &channel_in, + (AVSampleFormat)params.in.audio.format, + params.in.audio.samples, + 0, + logCtx_); +#else + swrContext_ = swr_alloc_set_opts( + nullptr, + av_get_default_channel_layout(params.out.audio.channels), + (AVSampleFormat)params.out.audio.format, + params.out.audio.samples, + av_get_default_channel_layout(params.in.audio.channels), + (AVSampleFormat)params.in.audio.format, + params.in.audio.samples, + 0, + logCtx_); +#endif + if (swrContext_ == nullptr) { + LOG(ERROR) << "Cannot allocate SwrContext"; + return false; + } + + int result; + if ((result = swr_init(swrContext_)) < 0) { + LOG(ERROR) << "swr_init failed, err: " << Util::generateErrorDesc(result) + << ", in -> format: " << params.in.audio.format + << ", channels: " << params.in.audio.channels + << ", samples: " << params.in.audio.samples + << ", out -> format: " << params.out.audio.format + << ", channels: " << params.out.audio.channels + << ", samples: " << params.out.audio.samples; + return false; + } + + // set formats + params_ = params; + return true; +} + +int AudioSampler::numOutputSamples(int inSamples) const { + return swr_get_out_samples(swrContext_, inSamples); +} + +int AudioSampler::sample( + const uint8_t* inPlanes[], + int inNumSamples, + ByteStorage* out, + int outNumSamples) { + int result; + int outBufferBytes = av_samples_get_buffer_size( + nullptr, + params_.out.audio.channels, + outNumSamples, + (AVSampleFormat)params_.out.audio.format, + 1); + + if (out) { + out->ensure(outBufferBytes); + + uint8_t* outPlanes[AVRESAMPLE_MAX_CHANNELS] = {nullptr}; + + if ((result = preparePlanes( + params_.out.audio, + out->writableTail(), + outNumSamples, + outPlanes)) < 0) { + return result; + } + + if ((result = swr_convert( + swrContext_, + &outPlanes[0], + outNumSamples, + inPlanes, + inNumSamples)) < 0) { + LOG(ERROR) << "swr_convert failed, err: " + << Util::generateErrorDesc(result); + return result; + } + + TORCH_CHECK_LE(result, outNumSamples); + + if (result) { + if ((result = av_samples_get_buffer_size( + nullptr, + params_.out.audio.channels, + result, + (AVSampleFormat)params_.out.audio.format, + 1)) >= 0) { + out->append(result); + } else { + LOG(ERROR) << "av_samples_get_buffer_size failed, err: " + << Util::generateErrorDesc(result); + } + } + } else { + // allocate a temporary buffer + auto* tmpBuffer = static_cast(av_malloc(outBufferBytes)); + if (!tmpBuffer) { + LOG(ERROR) << "av_alloc failed, for size: " << outBufferBytes; + return -1; + } + + uint8_t* outPlanes[AVRESAMPLE_MAX_CHANNELS] = {nullptr}; + + if ((result = preparePlanes( + params_.out.audio, tmpBuffer, outNumSamples, outPlanes)) < 0) { + av_free(tmpBuffer); + return result; + } + + if ((result = swr_convert( + swrContext_, + &outPlanes[0], + outNumSamples, + inPlanes, + inNumSamples)) < 0) { + LOG(ERROR) << "swr_convert failed, err: " + << Util::generateErrorDesc(result); + av_free(tmpBuffer); + return result; + } + + av_free(tmpBuffer); + + TORCH_CHECK_LE(result, outNumSamples); + + if (result) { + result = av_samples_get_buffer_size( + nullptr, + params_.out.audio.channels, + result, + (AVSampleFormat)params_.out.audio.format, + 1); + } + } + + return result; +} + +int AudioSampler::sample(AVFrame* frame, ByteStorage* out) { + const auto outNumSamples = numOutputSamples(frame ? frame->nb_samples : 0); + + if (!outNumSamples) { + return 0; + } + + return sample( + frame ? (const uint8_t**)&frame->data[0] : nullptr, + frame ? frame->nb_samples : 0, + out, + outNumSamples); +} + +int AudioSampler::sample(const ByteStorage* in, ByteStorage* out) { + const auto inSampleSize = + av_get_bytes_per_sample((AVSampleFormat)params_.in.audio.format); + + const auto inNumSamples = + !in ? 0 : in->length() / inSampleSize / params_.in.audio.channels; + + const auto outNumSamples = numOutputSamples(inNumSamples); + + if (!outNumSamples) { + return 0; + } + + uint8_t* inPlanes[AVRESAMPLE_MAX_CHANNELS] = {nullptr}; + int result; + if (in && + (result = preparePlanes( + params_.in.audio, in->data(), inNumSamples, inPlanes)) < 0) { + return result; + } + + return sample( + in ? (const uint8_t**)inPlanes : nullptr, + inNumSamples, + out, + outNumSamples); +} + +void AudioSampler::cleanUp() { + if (swrContext_) { + swr_free(&swrContext_); + swrContext_ = nullptr; + } +} + +} // namespace ffmpeg diff --git a/torchvision/csrc/io/decoder/audio_sampler.h b/torchvision/csrc/io/decoder/audio_sampler.h new file mode 100644 index 00000000000..e105bbe4de2 --- /dev/null +++ b/torchvision/csrc/io/decoder/audio_sampler.h @@ -0,0 +1,39 @@ +#pragma once + +#include "defs.h" + +namespace ffmpeg { + +/** + * Class transcode audio frames from one format into another + */ + +class AudioSampler : public MediaSampler { + public: + explicit AudioSampler(void* logCtx); + ~AudioSampler() override; + + // MediaSampler overrides + bool init(const SamplerParameters& params) override; + int sample(const ByteStorage* in, ByteStorage* out) override; + void shutdown() override; + + int sample(AVFrame* frame, ByteStorage* out); + + private: + // close resources + void cleanUp(); + // helper functions for rescaling, cropping, etc. + int numOutputSamples(int inSamples) const; + int sample( + const uint8_t* inPlanes[], + int inNumSamples, + ByteStorage* out, + int outNumSamples); + + private: + SwrContext* swrContext_{nullptr}; + void* logCtx_{nullptr}; +}; + +} // namespace ffmpeg diff --git a/torchvision/csrc/io/decoder/audio_stream.cpp b/torchvision/csrc/io/decoder/audio_stream.cpp new file mode 100644 index 00000000000..9d7354e02f5 --- /dev/null +++ b/torchvision/csrc/io/decoder/audio_stream.cpp @@ -0,0 +1,120 @@ +#include "audio_stream.h" +#include +#include +#include "util.h" + +namespace ffmpeg { + +namespace { +static int get_nb_channels(const AVFrame* frame, const AVCodecContext* codec) { +#if LIBAVUTIL_VERSION_INT >= AV_VERSION_INT(57, 28, 100) + return frame ? frame->ch_layout.nb_channels : codec->ch_layout.nb_channels; +#else + return frame ? frame->channels : codec->channels; +#endif +} + +bool operator==(const AudioFormat& x, const AVFrame& y) { + return x.samples == static_cast(y.sample_rate) && + x.channels == static_cast(get_nb_channels(&y, nullptr)) && + x.format == y.format; +} + +bool operator==(const AudioFormat& x, const AVCodecContext& y) { + return x.samples == static_cast(y.sample_rate) && + x.channels == static_cast(get_nb_channels(nullptr, &y)) && + x.format == y.sample_fmt; +} + +AudioFormat& toAudioFormat(AudioFormat& x, const AVFrame& y) { + x.samples = y.sample_rate; + x.channels = get_nb_channels(&y, nullptr); + x.format = y.format; + return x; +} + +AudioFormat& toAudioFormat(AudioFormat& x, const AVCodecContext& y) { + x.samples = y.sample_rate; + x.channels = get_nb_channels(nullptr, &y); + x.format = y.sample_fmt; + return x; +} +} // namespace + +AudioStream::AudioStream( + AVFormatContext* inputCtx, + int index, + bool convertPtsToWallTime, + const AudioFormat& format) + : Stream( + inputCtx, + MediaFormat::makeMediaFormat(format, index), + convertPtsToWallTime, + 0) {} + +AudioStream::~AudioStream() { + if (sampler_) { + sampler_->shutdown(); + sampler_.reset(); + } +} + +int AudioStream::initFormat() { + // set output format + if (format_.format.audio.samples == 0) { + format_.format.audio.samples = codecCtx_->sample_rate; + } +#if LIBAVUTIL_VERSION_INT >= AV_VERSION_INT(57, 28, 100) + if (format_.format.audio.channels == 0) { + format_.format.audio.channels = codecCtx_->ch_layout.nb_channels; + } +#else + if (format_.format.audio.channels == 0) { + format_.format.audio.channels = codecCtx_->channels; + } +#endif + if (format_.format.audio.format == AV_SAMPLE_FMT_NONE) { + format_.format.audio.format = codecCtx_->sample_fmt; + } + + return format_.format.audio.samples != 0 && + format_.format.audio.channels != 0 && + format_.format.audio.format != AV_SAMPLE_FMT_NONE + ? 0 + : -1; +} + +// copies audio sample bytes via swr_convert call in audio_sampler.cpp +int AudioStream::copyFrameBytes(ByteStorage* out, bool flush) { + if (!sampler_) { + sampler_ = std::make_unique(codecCtx_); + } + // check if input format gets changed + if (flush ? !(sampler_->getInputFormat().audio == *codecCtx_) + : !(sampler_->getInputFormat().audio == *frame_)) { + // - reinit sampler + SamplerParameters params; + params.type = format_.type; + params.out = format_.format; + params.in = FormatUnion(); + flush ? toAudioFormat(params.in.audio, *codecCtx_) + : toAudioFormat(params.in.audio, *frame_); + if (!sampler_->init(params)) { + return -1; + } + + VLOG(1) << "Set input audio sampler format" + << ", samples: " << params.in.audio.samples + << ", channels: " << params.in.audio.channels + << ", format: " << params.in.audio.format + << " : output audio sampler format" + << ", samples: " << format_.format.audio.samples + << ", channels: " << format_.format.audio.channels + << ", format: " << format_.format.audio.format; + } + // calls to a sampler that converts the audio samples and copies them to the + // out buffer via ffmpeg::swr_convert + return sampler_->sample(flush ? nullptr : frame_, out); +} + +} // namespace ffmpeg diff --git a/torchvision/csrc/io/decoder/audio_stream.h b/torchvision/csrc/io/decoder/audio_stream.h new file mode 100644 index 00000000000..2d6457b68f5 --- /dev/null +++ b/torchvision/csrc/io/decoder/audio_stream.h @@ -0,0 +1,29 @@ +#pragma once + +#include "audio_sampler.h" +#include "stream.h" + +namespace ffmpeg { + +/** + * Class uses FFMPEG library to decode one audio stream. + */ + +class AudioStream : public Stream { + public: + AudioStream( + AVFormatContext* inputCtx, + int index, + bool convertPtsToWallTime, + const AudioFormat& format); + ~AudioStream() override; + + private: + int initFormat() override; + int copyFrameBytes(ByteStorage* out, bool flush) override; + + private: + std::unique_ptr sampler_; +}; + +} // namespace ffmpeg diff --git a/torchvision/csrc/io/decoder/cc_stream.cpp b/torchvision/csrc/io/decoder/cc_stream.cpp new file mode 100644 index 00000000000..89174c396fd --- /dev/null +++ b/torchvision/csrc/io/decoder/cc_stream.cpp @@ -0,0 +1,24 @@ +#include "cc_stream.h" + +namespace ffmpeg { + +CCStream::CCStream( + AVFormatContext* inputCtx, + int index, + bool convertPtsToWallTime, + const SubtitleFormat& format) + : SubtitleStream(inputCtx, index, convertPtsToWallTime, format) { + format_.type = TYPE_CC; +} + +AVCodec* CCStream::findCodec(AVCodecParameters* params) { + if (params->codec_id == AV_CODEC_ID_BIN_DATA && + params->codec_type == AVMEDIA_TYPE_DATA) { + // obtain subtitles codec + params->codec_id = AV_CODEC_ID_MOV_TEXT; + params->codec_type = AVMEDIA_TYPE_SUBTITLE; + } + return Stream::findCodec(params); +} + +} // namespace ffmpeg diff --git a/torchvision/csrc/io/decoder/cc_stream.h b/torchvision/csrc/io/decoder/cc_stream.h new file mode 100644 index 00000000000..3a1d169f014 --- /dev/null +++ b/torchvision/csrc/io/decoder/cc_stream.h @@ -0,0 +1,22 @@ +#pragma once + +#include "subtitle_stream.h" + +namespace ffmpeg { + +/** + * Class uses FFMPEG library to decode one closed captions stream. + */ +class CCStream : public SubtitleStream { + public: + CCStream( + AVFormatContext* inputCtx, + int index, + bool convertPtsToWallTime, + const SubtitleFormat& format); + + private: + AVCodec* findCodec(AVCodecParameters* params) override; +}; + +} // namespace ffmpeg diff --git a/torchvision/csrc/io/decoder/decoder.cpp b/torchvision/csrc/io/decoder/decoder.cpp new file mode 100644 index 00000000000..cfe762bbc6e --- /dev/null +++ b/torchvision/csrc/io/decoder/decoder.cpp @@ -0,0 +1,763 @@ +#include "decoder.h" +#include +#include +#include +#include +#include +#include "audio_stream.h" +#include "cc_stream.h" +#include "subtitle_stream.h" +#include "util.h" +#include "video_stream.h" + +namespace ffmpeg { + +namespace { + +constexpr size_t kIoBufferSize = 96 * 1024; +constexpr size_t kIoPaddingSize = AV_INPUT_BUFFER_PADDING_SIZE; +constexpr size_t kLogBufferSize = 1024; + +bool mapFfmpegType(AVMediaType media, MediaType* type) { + switch (media) { + case AVMEDIA_TYPE_AUDIO: + *type = TYPE_AUDIO; + return true; + case AVMEDIA_TYPE_VIDEO: + *type = TYPE_VIDEO; + return true; + case AVMEDIA_TYPE_SUBTITLE: + *type = TYPE_SUBTITLE; + return true; + case AVMEDIA_TYPE_DATA: + *type = TYPE_CC; + return true; + default: + return false; + } +} + +std::unique_ptr createStream( + MediaType type, + AVFormatContext* ctx, + int idx, + bool convertPtsToWallTime, + const FormatUnion& format, + int64_t loggingUuid) { + switch (type) { + case TYPE_AUDIO: + return std::make_unique( + ctx, idx, convertPtsToWallTime, format.audio); + case TYPE_VIDEO: + return std::make_unique( + // negative loggingUuid indicates video streams. + ctx, + idx, + convertPtsToWallTime, + format.video, + -loggingUuid); + case TYPE_SUBTITLE: + return std::make_unique( + ctx, idx, convertPtsToWallTime, format.subtitle); + case TYPE_CC: + return std::make_unique( + ctx, idx, convertPtsToWallTime, format.subtitle); + default: + return nullptr; + } +} + +} // Namespace + +/* static */ +void Decoder::logFunction(void* avcl, int level, const char* cfmt, va_list vl) { + if (!avcl) { + // Nothing can be done here + return; + } + + AVClass* avclass = *reinterpret_cast(avcl); + if (!avclass) { + // Nothing can be done here + return; + } + Decoder* decoder = nullptr; + if (strcmp(avclass->class_name, "AVFormatContext") == 0) { + AVFormatContext* context = reinterpret_cast(avcl); + if (context) { + decoder = reinterpret_cast(context->opaque); + } + } else if (strcmp(avclass->class_name, "AVCodecContext") == 0) { + AVCodecContext* context = reinterpret_cast(avcl); + if (context) { + decoder = reinterpret_cast(context->opaque); + } + } else if (strcmp(avclass->class_name, "AVIOContext") == 0) { + AVIOContext* context = reinterpret_cast(avcl); + // only if opaque was assigned to Decoder pointer + if (context && context->read_packet == Decoder::readFunction) { + decoder = reinterpret_cast(context->opaque); + } + } else if (strcmp(avclass->class_name, "SWResampler") == 0) { + // expect AVCodecContext as parent + if (avclass->parent_log_context_offset) { + AVClass** parent = + *(AVClass***)(((uint8_t*)avcl) + avclass->parent_log_context_offset); + AVCodecContext* context = reinterpret_cast(parent); + if (context) { + decoder = reinterpret_cast(context->opaque); + } + } + } else if (strcmp(avclass->class_name, "SWScaler") == 0) { + // cannot find a way to pass context pointer through SwsContext struct + } else { + VLOG(2) << "Unknown context class: " << avclass->class_name; + } + + if (decoder != nullptr && decoder->enableLogLevel(level)) { + char buf[kLogBufferSize] = {0}; + // Format the line + int* prefix = decoder->getPrintPrefix(); + *prefix = 1; + av_log_format_line(avcl, level, cfmt, vl, buf, sizeof(buf) - 1, prefix); + // pass message to the decoder instance + std::string msg(buf); + decoder->logCallback(level, msg); + } +} + +bool Decoder::enableLogLevel(int level) const { + return ssize_t(level) <= params_.logLevel; +} + +void Decoder::logCallback(int level, const std::string& message) { + LOG(INFO) << "Msg, uuid=" << params_.loggingUuid << " level=" << level + << " msg=" << message; +} + +/* static */ +int Decoder::shutdownFunction(void* ctx) { + Decoder* decoder = (Decoder*)ctx; + if (decoder == nullptr) { + return 1; + } + return decoder->shutdownCallback(); +} + +int Decoder::shutdownCallback() { + return interrupted_ ? 1 : 0; +} + +/* static */ +int Decoder::readFunction(void* opaque, uint8_t* buf, int size) { + Decoder* decoder = reinterpret_cast(opaque); + if (decoder == nullptr) { + return 0; + } + return decoder->readCallback(buf, size); +} + +/* static */ +int64_t Decoder::seekFunction(void* opaque, int64_t offset, int whence) { + Decoder* decoder = reinterpret_cast(opaque); + if (decoder == nullptr) { + return -1; + } + return decoder->seekCallback(offset, whence); +} + +int Decoder::readCallback(uint8_t* buf, int size) { + return seekableBuffer_.read(buf, size, params_.timeoutMs); +} + +int64_t Decoder::seekCallback(int64_t offset, int whence) { + return seekableBuffer_.seek(offset, whence, params_.timeoutMs); +} + +/* static */ +void Decoder::initOnce() { + static std::once_flag flagInit; + std::call_once(flagInit, []() { +#if LIBAVUTIL_VERSION_MAJOR < 56 // Before FFMPEG 4.0 + av_register_all(); + avcodec_register_all(); +#endif + avformat_network_init(); + av_log_set_callback(Decoder::logFunction); + av_log_set_level(AV_LOG_ERROR); + VLOG(1) << "Registered ffmpeg libs"; + }); +} + +Decoder::Decoder() { + initOnce(); +} + +Decoder::~Decoder() { + cleanUp(); +} + +// Initialise the format context that holds information about the container and +// fill it with minimal information about the format (codecs are not opened +// here). Function reads in information about the streams from the container +// into inputCtx and then passes it to decoder::openStreams. Finally, if seek is +// specified within the decoder parameters, it seeks into the correct frame +// (note, the seek defined here is "precise" seek). +bool Decoder::init( + const DecoderParameters& params, + DecoderInCallback&& in, + std::vector* metadata) { + cleanUp(); + + if ((params.uri.empty() || in) && (!params.uri.empty() || !in)) { + LOG(ERROR) + << "uuid=" << params_.loggingUuid + << " either external URI gets provided or explicit input callback"; + return false; + } + + // set callback and params + params_ = params; + + if (!(inputCtx_ = avformat_alloc_context())) { + LOG(ERROR) << "uuid=" << params_.loggingUuid + << " cannot allocate format context"; + return false; + } + + AVInputFormat* fmt = nullptr; + int result = 0; + if (in) { + ImageType type = ImageType::UNKNOWN; + if ((result = seekableBuffer_.init( + std::forward(in), + params_.timeoutMs, + params_.maxSeekableBytes, + params_.isImage ? &type : nullptr)) < 0) { + LOG(ERROR) << "uuid=" << params_.loggingUuid + << " can't initiate seekable buffer"; + cleanUp(); + return false; + } + + if (params_.isImage) { + const char* fmtName = "image2"; + switch (type) { + case ImageType::JPEG: + fmtName = "jpeg_pipe"; + break; + case ImageType::PNG: + fmtName = "png_pipe"; + break; + case ImageType::TIFF: + fmtName = "tiff_pipe"; + break; + default: + break; + } + + fmt = (AVInputFormat*)av_find_input_format(fmtName); + } + + const size_t avioCtxBufferSize = kIoBufferSize; + uint8_t* avioCtxBuffer = + (uint8_t*)av_malloc(avioCtxBufferSize + kIoPaddingSize); + if (!avioCtxBuffer) { + LOG(ERROR) << "uuid=" << params_.loggingUuid + << " av_malloc cannot allocate " << avioCtxBufferSize + << " bytes"; + cleanUp(); + return false; + } + + if (!(avioCtx_ = avio_alloc_context( + avioCtxBuffer, + avioCtxBufferSize, + 0, + reinterpret_cast(this), + &Decoder::readFunction, + nullptr, + result == 1 ? &Decoder::seekFunction : nullptr))) { + LOG(ERROR) << "uuid=" << params_.loggingUuid + << " avio_alloc_context failed"; + av_free(avioCtxBuffer); + cleanUp(); + return false; + } + + avioCtx_->max_packet_size = params.maxEncodedBufferSize; + + inputCtx_->pb = avioCtx_; + inputCtx_->flags |= AVFMT_FLAG_CUSTOM_IO; + } + + inputCtx_->opaque = reinterpret_cast(this); + inputCtx_->interrupt_callback.callback = Decoder::shutdownFunction; + inputCtx_->interrupt_callback.opaque = reinterpret_cast(this); + + // add network timeout + inputCtx_->flags |= AVFMT_FLAG_NONBLOCK; + + AVDictionary* options = nullptr; + if (params_.listen) { + av_dict_set_int(&options, "listen", 1, 0); + } + if (params_.timeoutMs > 0) { + av_dict_set_int(&options, "analyzeduration", params_.timeoutMs * 1000, 0); + av_dict_set_int(&options, "stimeout", params_.timeoutMs * 1000, 0); + av_dict_set_int(&options, "rw_timeout", params_.timeoutMs * 1000, 0); + if (!params_.tlsCertFile.empty()) { + av_dict_set(&options, "cert_file", params_.tlsCertFile.data(), 0); + } + if (!params_.tlsKeyFile.empty()) { + av_dict_set(&options, "key_file", params_.tlsKeyFile.data(), 0); + } + } + + av_dict_set_int(&options, "probesize", params_.probeSize, 0); + + interrupted_ = false; + + // ffmpeg avformat_open_input call can hang if media source doesn't respond + // set a guard for handle such situations, if requested + std::promise p; + std::future f = p.get_future(); + std::unique_ptr guard; + if (params_.preventStaleness) { + guard = std::make_unique([&f, this]() { + auto timeout = std::chrono::milliseconds(params_.timeoutMs); + if (std::future_status::timeout == f.wait_for(timeout)) { + LOG(ERROR) << "uuid=" << params_.loggingUuid + << " cannot open stream within " << params_.timeoutMs + << " ms"; + interrupted_ = true; + } + }); + } + + if (fmt) { + result = avformat_open_input(&inputCtx_, nullptr, fmt, &options); + } else { + result = + avformat_open_input(&inputCtx_, params_.uri.c_str(), nullptr, &options); + } + + av_dict_free(&options); + + if (guard) { + p.set_value(true); + guard->join(); + guard.reset(); + } + + if (result < 0 || interrupted_) { + LOG(ERROR) << "uuid=" << params_.loggingUuid + << " avformat_open_input failed, error=" + << Util::generateErrorDesc(result); + cleanUp(); + return false; + } + + result = avformat_find_stream_info(inputCtx_, nullptr); + + if (result < 0) { + LOG(ERROR) << "uuid=" << params_.loggingUuid + << " avformat_find_stream_info failed, error=" + << Util::generateErrorDesc(result); + cleanUp(); + return false; + } + + if (!openStreams(metadata)) { + LOG(ERROR) << "uuid=" << params_.loggingUuid << " cannot activate streams"; + cleanUp(); + return false; + } + // SyncDecoder inherits Decoder which would override onInit. + onInit(); + + if (params.startOffset != 0) { + auto offset = params.startOffset <= params.seekAccuracy + ? 0 + : params.startOffset - params.seekAccuracy; + + av_seek_frame(inputCtx_, -1, offset, AVSEEK_FLAG_BACKWARD); + } + + for (unsigned int i = 0; i < inputCtx_->nb_streams; i++) { + if ( +#if LIBAVUTIL_VERSION_MAJOR < 56 // Before FFMPEG 4.0 + inputCtx_->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO +#else // FFMPEG 4.0+ + inputCtx_->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO +#endif + && inputCtx_->streams[i]->duration > 0) { + // There is at least two 1/r_frame_rates from the frame before the last + // one until the video duration, let's prefer to set duration after the + // frame before the last one, but as early as possible + double correction = 2 * inputCtx_->streams[i]->r_frame_rate.den / + (double)inputCtx_->streams[i]->r_frame_rate.num - + 1 / (double)AV_TIME_BASE; + videoDurationMs_ = 1000 * inputCtx_->streams[i]->duration * + inputCtx_->streams[i]->time_base.num / + (double)inputCtx_->streams[i]->time_base.den - + 1000 * correction; + break; + } + } + + VLOG(1) << "Decoder initialized, log level: " << params_.logLevel; + VLOG(1) << "Video duration: " << videoDurationMs_; + return true; +} + +// open appropriate CODEC for every type of stream and move it to the class +// variable `streams_` and make sure it is in range for decoding +bool Decoder::openStreams(std::vector* metadata) { + for (unsigned int i = 0; i < inputCtx_->nb_streams; i++) { + // - find the corespondent format at params_.formats set + MediaFormat format; +#if LIBAVUTIL_VERSION_MAJOR < 56 // Before FFMPEG 4.0 + const auto media = inputCtx_->streams[i]->codec->codec_type; +#else // FFMPEG 4.0+ + const auto media = inputCtx_->streams[i]->codecpar->codec_type; +#endif + if (!mapFfmpegType(media, &format.type)) { + VLOG(1) << "Stream media: " << media << " at index " << i + << " gets ignored, unknown type"; + + continue; // unsupported type + } + + // check format + auto it = params_.formats.find(format); + if (it == params_.formats.end()) { + VLOG(1) << "Stream type: " << format.type << " at index: " << i + << " gets ignored, caller is not interested"; + continue; // clients don't care about this media format + } + + // do we have stream of this type? + auto stream = findByType(format); + + // should we process this stream? + + if (it->stream == -2 || // all streams of this type are welcome + (!stream && (it->stream == -1 || it->stream == i))) { // new stream + VLOG(1) << "Stream type: " << format.type << " found, at index: " << i; + auto stream_2 = createStream( + format.type, + inputCtx_, + i, + params_.convertPtsToWallTime, + it->format, + params_.loggingUuid); + CHECK(stream_2); + if (stream_2->openCodec(metadata, params_.numThreads) < 0) { + LOG(ERROR) << "uuid=" << params_.loggingUuid + << " open codec failed, stream_idx=" << i; + return false; + } + streams_.emplace(i, std::move(stream_2)); + inRange_.set(i, true); + } + } + + return true; +} + +void Decoder::shutdown() { + cleanUp(); +} + +void Decoder::interrupt() { + interrupted_ = true; +} + +void Decoder::cleanUp() { + if (!interrupted_) { + interrupted_ = true; + } + + if (inputCtx_) { + for (auto& stream : streams_) { + // Drain stream buffers. + DecoderOutputMessage msg; + while (msg.payload = nullptr, stream.second->flush(&msg, true) > 0) { + } + stream.second.reset(); + } + streams_.clear(); + avformat_close_input(&inputCtx_); + } + if (avioCtx_) { + av_freep(&avioCtx_->buffer); + av_freep(&avioCtx_); + } + + // reset callback + seekableBuffer_.shutdown(); +} + +// function does actual work, derived class calls it in working thread +// periodically. On success method returns 0, ENODATA on EOF, ETIMEDOUT if +// no frames got decoded in the specified timeout time, AVERROR_BUFFER_TOO_SMALL +// when unable to allocate packet and error on unrecoverable error +int Decoder::getFrame(size_t workingTimeInMs) { + if (inRange_.none()) { + return ENODATA; + } + // decode frames until cache is full and leave thread + // once decode() method gets called and grab some bytes + // run this method again + // init package + // update 03/22: moving memory management to ffmpeg + AVPacket* avPacket; + avPacket = av_packet_alloc(); + if (avPacket == nullptr) { + LOG(ERROR) << "uuid=" << params_.loggingUuid + << " decoder as not able to allocate the packet."; + return AVERROR_BUFFER_TOO_SMALL; + } + avPacket->data = nullptr; + avPacket->size = 0; + + auto end = std::chrono::steady_clock::now() + + std::chrono::milliseconds(workingTimeInMs); + // return true if elapsed time less than timeout + auto watcher = [end]() -> bool { + return std::chrono::steady_clock::now() <= end; + }; + + int result = 0; + size_t decodingErrors = 0; + bool decodedFrame = false; + while (!interrupted_ && inRange_.any() && !decodedFrame) { + if (watcher() == false) { + LOG(ERROR) << "uuid=" << params_.loggingUuid << " hit ETIMEDOUT"; + result = ETIMEDOUT; + break; + } + result = av_read_frame(inputCtx_, avPacket); + if (result == AVERROR(EAGAIN)) { + VLOG(4) << "Decoder is busy..."; + std::this_thread::yield(); + result = 0; // reset error, EAGAIN is not an error at all + // reset the packet to default settings + av_packet_unref(avPacket); + continue; + } else if (result == AVERROR_EOF) { + flushStreams(); + VLOG(1) << "End of stream"; + result = ENODATA; + break; + } else if ( + result == AVERROR(EPERM) && params_.skipOperationNotPermittedPackets) { + // reset error, lets skip packets with EPERM + result = 0; + // reset the packet to default settings + av_packet_unref(avPacket); + continue; + } else if (result < 0) { + flushStreams(); + LOG(ERROR) << "uuid=" << params_.loggingUuid + << " error detected: " << Util::generateErrorDesc(result); + break; + } + + // get stream; if stream cannot be found reset the packet to + // default settings + auto stream = findByIndex(avPacket->stream_index); + if (stream == nullptr || !inRange_.test(stream->getIndex())) { + av_packet_unref(avPacket); + continue; + } + + size_t numConsecutiveNoBytes = 0; + // it can be only partial decoding of the package bytes + do { + // decode package + bool gotFrame = false; + bool hasMsg = false; + // packet either got consumed completely or not at all + if ((result = processPacket( + stream, avPacket, &gotFrame, &hasMsg, params_.fastSeek)) < 0) { + LOG(ERROR) << "uuid=" << params_.loggingUuid + << " processPacket failed with code: " << result; + break; + } + + if (!gotFrame && params_.maxProcessNoBytes != 0 && + ++numConsecutiveNoBytes > params_.maxProcessNoBytes) { + LOG(ERROR) << "uuid=" << params_.loggingUuid + << " exceeding max amount of consecutive no bytes"; + break; + } + if (result > 0) { + numConsecutiveNoBytes = 0; + } + + decodedFrame |= hasMsg; + } while (result == 0); + + // post loop check + if (result < 0) { + if (params_.maxPackageErrors != 0 && // check errors + ++decodingErrors >= params_.maxPackageErrors) { // reached the limit + LOG(ERROR) << "uuid=" << params_.loggingUuid + << " exceeding max amount of consecutive package errors"; + break; + } + } else { + decodingErrors = 0; // reset on success + } + + result = 0; + + av_packet_unref(avPacket); + + if (params_.uniformSampling > 1) { + if (doSeek_) { + double duration = + videoDurationMs_ > 0 ? videoDurationMs_ : params_.expectedDuration; + double step = + (duration * AV_TIME_BASE) / (1000 * (params_.uniformSampling - 1)); + avformat_seek_file( + inputCtx_, + -1, + static_cast(step * kFramesDecoded_) + 1, + static_cast(step * (kFramesDecoded_ + 1)), + static_cast(step * (kFramesDecoded_ + 1)), + 0); + ++kFramesDecoded_; + doSeek_ = false; + } + } + } + + av_packet_free(&avPacket); + VLOG(2) << "Interrupted loop" << ", interrupted_ " << interrupted_ + << ", inRange_.any() " << inRange_.any() << ", decodedFrame " + << decodedFrame << ", result " << result; + + // loop can be terminated, either by: + // 1. explicitly interrupted + // 3. unrecoverable error or ENODATA (end of stream) or ETIMEDOUT (timeout) + // 4. decoded frames pts are out of the specified range + // 5. success decoded frame + if (interrupted_) { + return EINTR; + } + if (result != 0) { + return result; + } + if (inRange_.none()) { + return ENODATA; + } + return 0; +} + +// find stream by stream index +Stream* Decoder::findByIndex(int streamIndex) const { + auto it = streams_.find(streamIndex); + return it != streams_.end() ? it->second.get() : nullptr; +} + +// find stream by type; note finds only the first stream of a given type +Stream* Decoder::findByType(const MediaFormat& format) const { + for (auto& stream : streams_) { + if (stream.second->getMediaFormat().type == format.type) { + return stream.second.get(); + } + } + return nullptr; +} + +// given the stream and packet, decode the frame buffers into the +// DecoderOutputMessage data structure via stream::decodePacket function. +int Decoder::processPacket( + Stream* stream, + AVPacket* packet, + bool* gotFrame, + bool* hasMsg, + bool fastSeek) { + // decode package + int result; + DecoderOutputMessage msg; + msg.payload = params_.headerOnly ? nullptr : createByteStorage(0); + *hasMsg = false; + if ((result = stream->decodePacket( + packet, &msg, params_.headerOnly, gotFrame)) >= 0 && + *gotFrame) { + // check end offset + bool endInRange = + params_.endOffset <= 0 || msg.header.pts <= params_.endOffset; + inRange_.set(stream->getIndex(), endInRange); + // if fastseek is enabled, we're returning the first + // frame that we decode after (potential) seek. + // By default, we perform accurate seek to the closest + // following frame + bool startCondition = true; + if (!fastSeek) { + startCondition = msg.header.pts >= params_.startOffset; + } + if (endInRange && startCondition) { + *hasMsg = pushMsg(std::move(msg)); + } + } + return result; +} + +bool Decoder::pushMsg(DecoderOutputMessage&& msg) { + pastDecodedPTS_ = currentDecodedPTS_; + currentDecodedPTS_ = msg.header.pts; + + if (params_.uniformSampling <= 1) { + push(std::move(msg)); + return true; + } + + double duration = + videoDurationMs_ > 0 ? videoDurationMs_ : params_.expectedDuration; + double step = + (duration * AV_TIME_BASE) / (1000 * (params_.uniformSampling - 1)); + if (pastDecodedPTS_ < step * kFramesDecoded_ && + step * kFramesDecoded_ <= currentDecodedPTS_) { + push(std::move(msg)); + doSeek_ = true; + return true; + } + + return false; +} + +void Decoder::flushStreams() { + VLOG(1) << "Flushing streams..."; + for (auto& stream : streams_) { + DecoderOutputMessage msg; + while (msg.payload = (params_.headerOnly ? nullptr : createByteStorage(0)), + stream.second->flush(&msg, params_.headerOnly) > 0) { + // check end offset + bool endInRange = + params_.endOffset <= 0 || msg.header.pts <= params_.endOffset; + inRange_.set(stream.second->getIndex(), endInRange); + if (endInRange && msg.header.pts >= params_.startOffset) { + pushMsg(std::move(msg)); + } else { + msg.payload.reset(); + } + } + } +} + +int Decoder::decode_all(const DecoderOutCallback& callback) { + int result; + do { + DecoderOutputMessage out; + if (0 == (result = decode(&out, params_.timeoutMs))) { + callback(std::move(out)); + } + } while (result == 0); + return result; +} +} // namespace ffmpeg diff --git a/torchvision/csrc/io/decoder/decoder.h b/torchvision/csrc/io/decoder/decoder.h new file mode 100644 index 00000000000..172a011f93e --- /dev/null +++ b/torchvision/csrc/io/decoder/decoder.h @@ -0,0 +1,100 @@ +#pragma once + +#include +#include +#include "seekable_buffer.h" +#include "stream.h" + +#if defined(_MSC_VER) +#include +using ssize_t = SSIZE_T; +#endif + +namespace ffmpeg { + +/** + * Class uses FFMPEG library to decode media streams. + * Media bytes can be explicitly provided through read-callback + * or fetched internally by FFMPEG library + */ +class Decoder : public MediaDecoder { + public: + Decoder(); + ~Decoder() override; + + // MediaDecoder overrides + bool init( + const DecoderParameters& params, + DecoderInCallback&& in, + std::vector* metadata) override; + int decode_all(const DecoderOutCallback& callback) override; + void shutdown() override; + void interrupt() override; + + protected: + // function does actual work, derived class calls it in working thread + // periodically. On success method returns 0, ENOADATA on EOF, ETIMEDOUT if + // no frames got decoded in the specified timeout time, and error on + // unrecoverable error. + int getFrame(size_t workingTimeInMs = 100); + + // Derived class must override method and consume the provided message + virtual void push(DecoderOutputMessage&& buffer) = 0; + + // Fires on init call + virtual void onInit() {} + + public: + // C-style FFMPEG API requires C/static methods for callbacks + static void logFunction(void* avcl, int level, const char* cfmt, va_list vl); + static int shutdownFunction(void* ctx); + static int readFunction(void* opaque, uint8_t* buf, int size); + static int64_t seekFunction(void* opaque, int64_t offset, int whence); + // can be called by any classes or API + static void initOnce(); + + int* getPrintPrefix() { + return &printPrefix; + } + double videoDurationMs_ = -1; + + private: + // mark below function for a proper invocation + bool enableLogLevel(int level) const; + void logCallback(int level, const std::string& message); + int readCallback(uint8_t* buf, int size); + int64_t seekCallback(int64_t offset, int whence); + int shutdownCallback(); + + bool openStreams(std::vector* metadata); + Stream* findByIndex(int streamIndex) const; + Stream* findByType(const MediaFormat& format) const; + int processPacket( + Stream* stream, + AVPacket* packet, + bool* gotFrame, + bool* hasMsg, + bool fastSeek = false); + void flushStreams(); + void cleanUp(); + bool pushMsg(DecoderOutputMessage&& + msg); // returns whether frame is passed to downstream + + protected: + DecoderParameters params_; + + private: + SeekableBuffer seekableBuffer_; + int printPrefix{1}; + + std::atomic interrupted_{false}; + AVFormatContext* inputCtx_{nullptr}; + AVIOContext* avioCtx_{nullptr}; + std::unordered_map> streams_; + std::bitset<64> inRange_; + int kFramesDecoded_{0}; + int64_t pastDecodedPTS_{-1}; + int64_t currentDecodedPTS_{-1}; + bool doSeek_{false}; +}; +} // namespace ffmpeg diff --git a/torchvision/csrc/io/decoder/defs.h b/torchvision/csrc/io/decoder/defs.h new file mode 100644 index 00000000000..d2dc5c7935b --- /dev/null +++ b/torchvision/csrc/io/decoder/defs.h @@ -0,0 +1,415 @@ +#pragma once + +#include +#include +#include +#include +#include +#include +#include + +extern "C" { +#include +#include +#include +#include +#include +#include +#include "libswscale/swscale.h" +} + +namespace ffmpeg { + +// bit mask of formats, keep them in form 2^n +enum MediaType : size_t { + TYPE_AUDIO = 1, + TYPE_VIDEO = 2, + TYPE_SUBTITLE = 4, + TYPE_CC = 8, // closed captions from transport streams +}; + +// audio +struct AudioFormat { + // fields are initialized for the auto detection + // caller can specify some/all of field values if specific output is desirable + bool operator==(const AudioFormat& x) const { + return x.format == format && x.samples == samples && x.channels == channels; + } + + size_t samples{0}; // number samples per second (frequency) + size_t channels{0}; // number of channels + long format{-1}; // AVSampleFormat, auto AV_SAMPLE_FMT_NONE + size_t padding[2]; + // -- alignment 40 bytes +}; + +// video +struct VideoFormat { + // fields are initialized for the auto detection + // caller can specify some/all of field values if specific output is desirable + bool operator==(const VideoFormat& x) const { + return x.format == format && x.width == width && x.height == height; + } + /* + When width = 0, height = 0, minDimension = 0, and maxDimension = 0, + keep the original frame resolution + When width = 0, height = 0, minDimension != 0, and maxDimension = 0, + keep the aspect ratio and resize the frame so that shorter edge size is + minDimension + When width = 0, height = 0, minDimension = 0, and maxDimension != 0, + keep the aspect ratio and resize the frame so that longer edge size is + maxDimension + When width = 0, height = 0, minDimension != 0, and maxDimension != 0, + resize the frame so that shorter edge size is minDimension, and + longer edge size is maxDimension. The aspect ratio may not be preserved + When width = 0, height != 0, minDimension = 0, and maxDimension = 0, + keep the aspect ratio and resize the frame so that frame height is $height + When width != 0, height = 0, minDimension = 0, and maxDimension = 0, + keep the aspect ratio and resize the frame so that frame width is $width + When width != 0, height != 0, minDimension = 0, and maxDimension = 0, + resize the frame so that frame width and height are set to $width and + $height, + respectively + */ + size_t width{0}; // width in pixels + size_t height{0}; // height in pixels + long format{-1}; // AVPixelFormat, auto AV_PIX_FMT_NONE + size_t minDimension{0}; // choose min dimension and rescale accordingly + size_t maxDimension{0}; // choose max dimension and rescale accordingly + size_t cropImage{0}; // request image crop + // -- alignment 40 bytes +}; + +// subtitle/cc +struct SubtitleFormat { + long type{0}; // AVSubtitleType, auto SUBTITLE_NONE + size_t padding[4]; + // -- alignment 40 bytes +}; + +union FormatUnion { + FormatUnion() : audio() {} + explicit FormatUnion(int) : video() {} + explicit FormatUnion(char) : subtitle() {} + explicit FormatUnion(double) : subtitle() {} + AudioFormat audio; + VideoFormat video; + SubtitleFormat subtitle; + // -- alignment 40 bytes +}; + +/* + MediaFormat data structure serves as input/output parameter. + Caller assigns values for input formats + or leave default values for auto detection + For output formats all fields will be set to the specific values +*/ +struct MediaFormat { + // for using map/set data structures + bool operator<(const MediaFormat& x) const { + return type < x.type; + } + bool operator==(const MediaFormat& x) const { + if (type != x.type) { + return false; + } + switch (type) { + case TYPE_AUDIO: + return format.audio == x.format.audio; + case TYPE_VIDEO: + return format.video == x.format.video; + case TYPE_SUBTITLE: + case TYPE_CC: + return true; + default: + return false; + } + } + + explicit MediaFormat(long s = -1) : type(TYPE_AUDIO), stream(s), format() {} + explicit MediaFormat(int x, long s = -1) + : type(TYPE_VIDEO), stream(s), format(x) {} + explicit MediaFormat(char x, long s = -1) + : type(TYPE_SUBTITLE), stream(s), format(x) {} + explicit MediaFormat(double x, long s = -1) + : type(TYPE_CC), stream(s), format(x) {} + + static MediaFormat makeMediaFormat(AudioFormat format, long stream) { + MediaFormat result(stream); + result.format.audio = format; + return result; + } + + static MediaFormat makeMediaFormat(VideoFormat format, long stream) { + MediaFormat result(0, stream); + result.format.video = format; + return result; + } + + static MediaFormat makeMediaFormat(SubtitleFormat format, long stream) { + MediaFormat result('0', stream); + result.format.subtitle = format; + return result; + } + + // format type + MediaType type; + // stream index: + // set -1 for one stream auto detection, -2 for all streams auto detection, + // >= 0, specified stream, if caller knows the stream index (unlikely) + long stream; + // union keeps one of the possible formats, defined by MediaType + FormatUnion format; +}; + +struct DecoderParameters { + // local file, remote file, http url, rtmp stream uri, etc. anything that + // ffmpeg can recognize + std::string uri{std::string()}; + // timeout on getting bytes for decoding + size_t timeoutMs{1000}; + // logging level, default AV_LOG_PANIC + long logLevel{0}; + // when decoder would give up, 0 means never + size_t maxPackageErrors{0}; + // max allowed consecutive times no bytes are processed. 0 means for infinite. + size_t maxProcessNoBytes{0}; + // start offset (us) + long startOffset{0}; + // end offset (us) + long endOffset{-1}; + // logging id + int64_t loggingUuid{0}; + // internal max seekable buffer size + size_t maxSeekableBytes{0}; + // adjust header pts to the epoch time + bool convertPtsToWallTime{false}; + // indicate if input stream is an encoded image + bool isImage{false}; + // listen and wait for new rtmp stream + bool listen{false}; + // don't copy frame body, only header + bool headerOnly{false}; + // enable fast seek (seek only to keyframes) + bool fastSeek{false}; + // interrupt init method on timeout + bool preventStaleness{true}; + // seek tolerated accuracy (us) + double seekAccuracy{1000000.0}; + // Allow multithreaded decoding for numThreads > 1; + // 0 numThreads=0 sets up sensible defaults + int numThreads{1}; + // what media types should be processed, default none + std::set formats; + + // can be used for asynchronous decoders + size_t cacheSize{8192}; // mow many bytes to cache before stop reading bytes + size_t cacheTimeoutMs{1000}; // timeout on bytes writing + bool enforceCacheSize{false}; // drop output frames if cache is full + bool mergeAudioMessages{false}; // combine collocated audio messages together + + std::string tlsCertFile; + std::string tlsKeyFile; + + // Skip packets that fail with EPERM errors and continue decoding. + bool skipOperationNotPermittedPackets{false}; + + // probing size in bytes, i.e. the size of the data to analyze to get stream + // information. A higher value will enable detecting more information in case + // it is dispersed into the stream, but will increase latency. Must be an + // integer not lesser than 32. It is 5000000 by default. + int64_t probeSize{5000000}; + + // Expected duration of the video to be decoded, mainly used with uniform + // sampling + float expectedDuration{0.0f}; + + // Sample N key-frames from the video roughly uniformly across the timeline + int uniformSampling{0}; + + // with 0, ffmpeg allocates buffers of size 32768 bytes for encoded frames. + // Override this with bigger buffer size if needed. + int64_t maxEncodedBufferSize{0}; +}; + +struct DecoderHeader { + // message id, from 0 till ... + size_t seqno{0}; + // decoded timestamp in microseconds from either beginning of the stream or + // from epoch time, see DecoderParameters::convertPtsToWallTime + long pts{0}; + // decoded key frame + size_t keyFrame{0}; + // frames per second, valid only for video streams + double fps{0}; + // format specifies what kind frame is in a payload + MediaFormat format; +}; + +// Abstract interface ByteStorage class +class ByteStorage { + public: + virtual ~ByteStorage() = default; + // makes sure that buffer has at least n bytes available for writing, if not + // storage must reallocate memory. + virtual void ensure(size_t n) = 0; + // caller must not to write more than available bytes + virtual uint8_t* writableTail() = 0; + // caller confirms that n bytes were written to the writable tail + virtual void append(size_t n) = 0; + // caller confirms that n bytes were read from the read buffer + virtual void trim(size_t n) = 0; + // gives an access to the beginning of the read buffer + virtual const uint8_t* data() const = 0; + // returns the stored size in bytes + virtual size_t length() const = 0; + // returns available capacity for writable tail + virtual size_t tail() const = 0; + // clears content, keeps capacity + virtual void clear() = 0; +}; + +struct DecoderOutputMessage { + DecoderHeader header; + std::unique_ptr payload; +}; + +/* + * External provider of the ecnoded bytes, specific implementation is left for + * different use cases, like file, memory, external network end-points, etc. + * Normally input/output parameter @out set to valid, not null buffer pointer, + * which indicates "read" call, however there are "seek" modes as well. + + * @out != nullptr => read from the current offset, @whence got ignored, + * @size bytes to read => return number bytes got read, 0 if no more bytes + * available, < 0 on error. + + * @out == nullptr, @timeoutMs == 0 => does provider support "seek" + * capability in a first place? @size & @whence got ignored, return 0 on + * success, < 0 if "seek" mode is not supported. + + * @out == nullptr, @timeoutMs != 0 => normal seek call + * offset == @size, i.e. @whence = [SEEK_SET, SEEK_CUR, SEEK_END, AVSEEK_SIZE) + * return < 0 on error, position if @whence = [SEEK_SET, SEEK_CUR, SEEK_END], + * length of buffer if @whence = [AVSEEK_SIZE]. + */ +using DecoderInCallback = + std::function; + +using DecoderOutCallback = std::function; + +struct DecoderMetadata { + // time base numerator + long num{0}; + // time base denominator + long den{1}; + // duration of the stream, in miscroseconds, if available + long duration{-1}; + // frames per second, valid only for video streams + double fps{0}; + // format specifies what kind frame is in a payload + MediaFormat format; +}; +/** + * Abstract class for decoding media bytes + * It has two different modes. Internal media bytes retrieval for given uri and + * external media bytes provider in case of memory streams + */ +class MediaDecoder { + public: + virtual ~MediaDecoder() = default; + + /** + * Initializes media decoder with parameters, + * calls callback when media bytes are available. + * Media bytes get fetched internally from provided URI + * or invokes provided input callback to get media bytes. + * Input callback must be empty for the internal media provider + * Caller can provide non-null pointer for the input container + * if headers to obtain the streams metadata (optional) + */ + virtual bool init( + const DecoderParameters& params, + DecoderInCallback&& in, + std::vector* metadata) = 0; + + /** + * Polls available decoded one frame from decoder + * Returns error code, 0 - for success + */ + virtual int decode(DecoderOutputMessage* out, uint64_t timeoutMs) = 0; + + /** + * Polls available decoded bytes from decoder, till EOF or error + */ + virtual int decode_all(const DecoderOutCallback& callback) = 0; + + /** + * Stops calling callback, releases resources + */ + virtual void shutdown() = 0; + + /** + * Interrupts whatever decoder is doing at any time + */ + virtual void interrupt() = 0; + + /** + * Factory to create ByteStorage class instances, particular implementation is + * left to the derived class. Caller provides the initially allocated size + */ + virtual std::unique_ptr createByteStorage(size_t n) = 0; +}; + +struct SamplerParameters { + MediaType type{TYPE_AUDIO}; + FormatUnion in; + FormatUnion out; + int64_t loggingUuid{0}; +}; + +/** + * Abstract class for sampling media bytes + */ +class MediaSampler { + public: + virtual ~MediaSampler() = default; + + /** + * Initializes media sampler with parameters + */ + virtual bool init(const SamplerParameters& params) = 0; + + /** + * Samples media bytes + * Returns error code < 0, or >=0 - for success, indicating number of bytes + * processed. + * set @in to null for flushing data + */ + virtual int sample(const ByteStorage* in, ByteStorage* out) = 0; + + /** + * Releases resources + */ + virtual void shutdown() = 0; + + /* + * Returns media type + */ + MediaType getMediaType() const { + return params_.type; + } + /* + * Returns formats + */ + FormatUnion getInputFormat() const { + return params_.in; + } + FormatUnion getOutFormat() const { + return params_.out; + } + + protected: + SamplerParameters params_; +}; +} // namespace ffmpeg diff --git a/torchvision/csrc/io/decoder/gpu/README.rst b/torchvision/csrc/io/decoder/gpu/README.rst new file mode 100644 index 00000000000..cebd31cb557 --- /dev/null +++ b/torchvision/csrc/io/decoder/gpu/README.rst @@ -0,0 +1,21 @@ +GPU Decoder +=========== + +GPU decoder depends on ffmpeg for demuxing, uses NVDECODE APIs from the nvidia-video-codec sdk and uses cuda for processing on gpu. In order to use this, please follow the following steps: + +* Download the latest `nvidia-video-codec-sdk `_ +* Extract the zipped file. +* Set TORCHVISION_INCLUDE environment variable to the location of the video codec headers(`nvcuvid.h` and `cuviddec.h`), which would be under `Interface` directory. +* Set TORCHVISION_LIBRARY environment variable to the location of the video codec library(`libnvcuvid.so`), which would be under `Lib/linux/stubs/x86_64` directory. +* Install the latest ffmpeg from `conda-forge` channel. + +.. code:: bash + + conda install -c conda-forge ffmpeg + +* Set CUDA_HOME environment variable to the cuda root directory. +* Build torchvision from source: + +.. code:: bash + + python setup.py install diff --git a/torchvision/csrc/io/decoder/gpu/decoder.cpp b/torchvision/csrc/io/decoder/gpu/decoder.cpp new file mode 100644 index 00000000000..f7377ede38b --- /dev/null +++ b/torchvision/csrc/io/decoder/gpu/decoder.cpp @@ -0,0 +1,405 @@ +#include "decoder.h" +#include +#include +#include +#include +#include + +static float chroma_height_factor(cudaVideoSurfaceFormat surface_format) { + return (surface_format == cudaVideoSurfaceFormat_YUV444 || + surface_format == cudaVideoSurfaceFormat_YUV444_16Bit) + ? 1.0 + : 0.5; +} + +static int chroma_plane_count(cudaVideoSurfaceFormat surface_format) { + return (surface_format == cudaVideoSurfaceFormat_YUV444 || + surface_format == cudaVideoSurfaceFormat_YUV444_16Bit) + ? 2 + : 1; +} + +/* Initialise cu_context and video_codec, create context lock and create parser + * object. + */ +void Decoder::init(CUcontext context, cudaVideoCodec codec) { + cu_context = context; + video_codec = codec; + check_for_cuda_errors( + cuvidCtxLockCreate(&ctx_lock, cu_context), __LINE__, __FILE__); + + CUVIDPARSERPARAMS parser_params = {}; + parser_params.CodecType = codec; + parser_params.ulMaxNumDecodeSurfaces = 1; + parser_params.ulClockRate = 1000; + parser_params.ulMaxDisplayDelay = 0u; + parser_params.pUserData = this; + parser_params.pfnSequenceCallback = video_sequence_handler; + parser_params.pfnDecodePicture = picture_decode_handler; + parser_params.pfnDisplayPicture = picture_display_handler; + parser_params.pfnGetOperatingPoint = operating_point_handler; + + check_for_cuda_errors( + cuvidCreateVideoParser(&parser, &parser_params), __LINE__, __FILE__); +} + +/* Destroy parser object and context lock. + */ +Decoder::~Decoder() { + if (parser) { + cuvidDestroyVideoParser(parser); + } + cuvidCtxLockDestroy(ctx_lock); +} + +/* Destroy CUvideodecoder object and free up all the unreturned decoded frames. + */ +void Decoder::release() { + cuCtxPushCurrent(cu_context); + if (decoder) { + cuvidDestroyDecoder(decoder); + } + cuCtxPopCurrent(nullptr); +} + +/* Trigger video decoding. + */ +void Decoder::decode(const uint8_t* data, unsigned long size) { + CUVIDSOURCEDATAPACKET pkt = {}; + pkt.flags = CUVID_PKT_TIMESTAMP; + pkt.payload_size = size; + pkt.payload = data; + pkt.timestamp = 0; + if (!data || size == 0) { + pkt.flags |= CUVID_PKT_ENDOFSTREAM; + } + check_for_cuda_errors(cuvidParseVideoData(parser, &pkt), __LINE__, __FILE__); + cuvidStream = 0; +} + +/* Fetch a decoded frame and remove it from the queue. + */ +torch::Tensor Decoder::fetch_frame() { + if (decoded_frames.empty()) { + auto options = + torch::TensorOptions().dtype(torch::kU8).device(torch::kCUDA); + return torch::zeros({0}, options); + } + torch::Tensor frame = decoded_frames.front(); + decoded_frames.pop(); + return frame; +} + +/* Called when a picture is ready to be decoded. + */ +int Decoder::handle_picture_decode(CUVIDPICPARAMS* pic_params) { + if (!decoder) { + TORCH_CHECK(false, "Uninitialised decoder"); + } + pic_num_in_decode_order[pic_params->CurrPicIdx] = decode_pic_count++; + check_for_cuda_errors(cuCtxPushCurrent(cu_context), __LINE__, __FILE__); + check_for_cuda_errors( + cuvidDecodePicture(decoder, pic_params), __LINE__, __FILE__); + check_for_cuda_errors(cuCtxPopCurrent(nullptr), __LINE__, __FILE__); + return 1; +} + +/* Process the decoded data and copy it to a cuda memory location. + */ +int Decoder::handle_picture_display(CUVIDPARSERDISPINFO* disp_info) { + CUVIDPROCPARAMS proc_params = {}; + proc_params.progressive_frame = disp_info->progressive_frame; + proc_params.second_field = disp_info->repeat_first_field + 1; + proc_params.top_field_first = disp_info->top_field_first; + proc_params.unpaired_field = disp_info->repeat_first_field < 0; + proc_params.output_stream = cuvidStream; + + CUdeviceptr source_frame = 0; + unsigned int source_pitch = 0; + check_for_cuda_errors(cuCtxPushCurrent(cu_context), __LINE__, __FILE__); + check_for_cuda_errors( + cuvidMapVideoFrame( + decoder, + disp_info->picture_index, + &source_frame, + &source_pitch, + &proc_params), + __LINE__, + __FILE__); + + CUVIDGETDECODESTATUS decode_status; + memset(&decode_status, 0, sizeof(decode_status)); + CUresult result = + cuvidGetDecodeStatus(decoder, disp_info->picture_index, &decode_status); + if (result == CUDA_SUCCESS && + (decode_status.decodeStatus == cuvidDecodeStatus_Error || + decode_status.decodeStatus == cuvidDecodeStatus_Error_Concealed)) { + VLOG(1) << "Decode Error occurred for picture " + << pic_num_in_decode_order[disp_info->picture_index]; + } + + auto options = torch::TensorOptions().dtype(torch::kU8).device(torch::kCUDA); + torch::Tensor decoded_frame = torch::empty({get_height(), width, 3}, options); + uint8_t* frame_ptr = decoded_frame.data_ptr(); + const uint8_t* const source_arr[] = { + (const uint8_t* const)source_frame, + (const uint8_t* const)(source_frame + + source_pitch * ((surface_height + 1) & ~1))}; + + auto err = nppiNV12ToRGB_709CSC_8u_P2C3R( + source_arr, + source_pitch, + frame_ptr, + width * 3, + {(int)decoded_frame.size(1), (int)decoded_frame.size(0)}); + + TORCH_CHECK( + err == NPP_NO_ERROR, + "Failed to convert from NV12 to RGB. Error code:", + err); + + check_for_cuda_errors(cuStreamSynchronize(cuvidStream), __LINE__, __FILE__); + decoded_frames.push(decoded_frame); + check_for_cuda_errors(cuCtxPopCurrent(nullptr), __LINE__, __FILE__); + + check_for_cuda_errors( + cuvidUnmapVideoFrame(decoder, source_frame), __LINE__, __FILE__); + return 1; +} + +/* Query the capabilities of the underlying hardware video decoder and + * verify if the hardware supports decoding the passed video. + */ +void Decoder::query_hardware(CUVIDEOFORMAT* video_format) { + CUVIDDECODECAPS decode_caps = {}; + decode_caps.eCodecType = video_format->codec; + decode_caps.eChromaFormat = video_format->chroma_format; + decode_caps.nBitDepthMinus8 = video_format->bit_depth_luma_minus8; + + check_for_cuda_errors(cuCtxPushCurrent(cu_context), __LINE__, __FILE__); + check_for_cuda_errors(cuvidGetDecoderCaps(&decode_caps), __LINE__, __FILE__); + check_for_cuda_errors(cuCtxPopCurrent(nullptr), __LINE__, __FILE__); + + if (!decode_caps.bIsSupported) { + TORCH_CHECK(false, "Codec not supported on this GPU"); + } + if ((video_format->coded_width > decode_caps.nMaxWidth) || + (video_format->coded_height > decode_caps.nMaxHeight)) { + TORCH_CHECK( + false, + "Resolution : ", + video_format->coded_width, + "x", + video_format->coded_height, + "\nMax Supported (wxh) : ", + decode_caps.nMaxWidth, + "x", + decode_caps.nMaxHeight, + "\nResolution not supported on this GPU"); + } + if ((video_format->coded_width >> 4) * (video_format->coded_height >> 4) > + decode_caps.nMaxMBCount) { + TORCH_CHECK( + false, + "MBCount : ", + (video_format->coded_width >> 4) * (video_format->coded_height >> 4), + "\nMax Supported mbcnt : ", + decode_caps.nMaxMBCount, + "\nMBCount not supported on this GPU"); + } + // Check if output format supported. If not, check fallback options + if (!(decode_caps.nOutputFormatMask & (1 << video_output_format))) { + if (decode_caps.nOutputFormatMask & (1 << cudaVideoSurfaceFormat_NV12)) { + video_output_format = cudaVideoSurfaceFormat_NV12; + } else if ( + decode_caps.nOutputFormatMask & (1 << cudaVideoSurfaceFormat_P016)) { + video_output_format = cudaVideoSurfaceFormat_P016; + } else if ( + decode_caps.nOutputFormatMask & (1 << cudaVideoSurfaceFormat_YUV444)) { + video_output_format = cudaVideoSurfaceFormat_YUV444; + } else if ( + decode_caps.nOutputFormatMask & + (1 << cudaVideoSurfaceFormat_YUV444_16Bit)) { + video_output_format = cudaVideoSurfaceFormat_YUV444_16Bit; + } else { + TORCH_CHECK(false, "No supported output format found"); + } + } +} + +/* Called before decoding frames and/or whenever there is a configuration + * change. + */ +int Decoder::handle_video_sequence(CUVIDEOFORMAT* video_format) { + // video_codec has been set in init(). Here it's set + // again for potential correction. + video_codec = video_format->codec; + video_chroma_format = video_format->chroma_format; + bit_depth_minus8 = video_format->bit_depth_luma_minus8; + bytes_per_pixel = bit_depth_minus8 > 0 ? 2 : 1; + // Set the output surface format same as chroma format + switch (video_chroma_format) { + case cudaVideoChromaFormat_Monochrome: + case cudaVideoChromaFormat_420: + video_output_format = video_format->bit_depth_luma_minus8 + ? cudaVideoSurfaceFormat_P016 + : cudaVideoSurfaceFormat_NV12; + break; + case cudaVideoChromaFormat_444: + video_output_format = video_format->bit_depth_luma_minus8 + ? cudaVideoSurfaceFormat_YUV444_16Bit + : cudaVideoSurfaceFormat_YUV444; + break; + case cudaVideoChromaFormat_422: + video_output_format = cudaVideoSurfaceFormat_NV12; + } + + query_hardware(video_format); + + if (width && luma_height && chroma_height) { + // cuvidCreateDecoder() has been called before and now there's possible + // config change. + return reconfigure_decoder(video_format); + } + + cu_video_format = *video_format; + unsigned long decode_surface = video_format->min_num_decode_surfaces; + cudaVideoDeinterlaceMode deinterlace_mode = cudaVideoDeinterlaceMode_Adaptive; + + if (video_format->progressive_sequence) { + deinterlace_mode = cudaVideoDeinterlaceMode_Weave; + } + + CUVIDDECODECREATEINFO video_decode_create_info = {}; + video_decode_create_info.ulWidth = video_format->coded_width; + video_decode_create_info.ulHeight = video_format->coded_height; + video_decode_create_info.ulNumDecodeSurfaces = decode_surface; + video_decode_create_info.CodecType = video_format->codec; + video_decode_create_info.ChromaFormat = video_format->chroma_format; + // With PreferCUVID, JPEG is still decoded by CUDA while video is decoded + // by NVDEC hardware + video_decode_create_info.ulCreationFlags = cudaVideoCreate_PreferCUVID; + video_decode_create_info.bitDepthMinus8 = video_format->bit_depth_luma_minus8; + video_decode_create_info.OutputFormat = video_output_format; + video_decode_create_info.DeinterlaceMode = deinterlace_mode; + video_decode_create_info.ulNumOutputSurfaces = 2; + video_decode_create_info.vidLock = ctx_lock; + + // AV1 has max width/height of sequence in sequence header + if (video_format->codec == cudaVideoCodec_AV1 && + video_format->seqhdr_data_length > 0) { + CUVIDEOFORMATEX* video_format_ex = (CUVIDEOFORMATEX*)video_format; + max_width = video_format_ex->av1.max_width; + max_height = video_format_ex->av1.max_height; + } + if (max_width < video_format->coded_width) { + max_width = video_format->coded_width; + } + if (max_height < video_format->coded_height) { + max_height = video_format->coded_height; + } + video_decode_create_info.ulMaxWidth = max_width; + video_decode_create_info.ulMaxHeight = max_height; + width = video_format->display_area.right - video_format->display_area.left; + luma_height = + video_format->display_area.bottom - video_format->display_area.top; + video_decode_create_info.ulTargetWidth = video_format->coded_width; + video_decode_create_info.ulTargetHeight = video_format->coded_height; + chroma_height = + (int)(ceil(luma_height * chroma_height_factor(video_output_format))); + num_chroma_planes = chroma_plane_count(video_output_format); + surface_height = video_decode_create_info.ulTargetHeight; + surface_width = video_decode_create_info.ulTargetWidth; + display_rect.bottom = video_decode_create_info.display_area.bottom; + display_rect.top = video_decode_create_info.display_area.top; + display_rect.left = video_decode_create_info.display_area.left; + display_rect.right = video_decode_create_info.display_area.right; + + check_for_cuda_errors(cuCtxPushCurrent(cu_context), __LINE__, __FILE__); + check_for_cuda_errors( + cuvidCreateDecoder(&decoder, &video_decode_create_info), + __LINE__, + __FILE__); + check_for_cuda_errors(cuCtxPopCurrent(nullptr), __LINE__, __FILE__); + return decode_surface; +} + +int Decoder::reconfigure_decoder(CUVIDEOFORMAT* video_format) { + if (video_format->bit_depth_luma_minus8 != + cu_video_format.bit_depth_luma_minus8 || + video_format->bit_depth_chroma_minus8 != + cu_video_format.bit_depth_chroma_minus8) { + TORCH_CHECK(false, "Reconfigure not supported for bit depth change"); + } + if (video_format->chroma_format != cu_video_format.chroma_format) { + TORCH_CHECK(false, "Reconfigure not supported for chroma format change"); + } + + bool decode_res_change = + !(video_format->coded_width == cu_video_format.coded_width && + video_format->coded_height == cu_video_format.coded_height); + bool display_rect_change = + !(video_format->display_area.bottom == + cu_video_format.display_area.bottom && + video_format->display_area.top == cu_video_format.display_area.top && + video_format->display_area.left == cu_video_format.display_area.left && + video_format->display_area.right == cu_video_format.display_area.right); + + unsigned int decode_surface = video_format->min_num_decode_surfaces; + + if ((video_format->coded_width > max_width) || + (video_format->coded_height > max_height)) { + // For VP9, let driver handle the change if new width/height > + // maxwidth/maxheight + if (video_codec != cudaVideoCodec_VP9) { + TORCH_CHECK( + false, + "Reconfigure not supported when width/height > maxwidth/maxheight"); + } + return 1; + } + + if (!decode_res_change) { + // If the coded_width/coded_height hasn't changed but display resolution has + // changed, then need to update width/height for correct output without + // cropping. Example : 1920x1080 vs 1920x1088. + if (display_rect_change) { + width = + video_format->display_area.right - video_format->display_area.left; + luma_height = + video_format->display_area.bottom - video_format->display_area.top; + chroma_height = + (int)ceil(luma_height * chroma_height_factor(video_output_format)); + num_chroma_planes = chroma_plane_count(video_output_format); + } + return 1; + } + cu_video_format.coded_width = video_format->coded_width; + cu_video_format.coded_height = video_format->coded_height; + CUVIDRECONFIGUREDECODERINFO reconfig_params = {}; + reconfig_params.ulWidth = video_format->coded_width; + reconfig_params.ulHeight = video_format->coded_height; + reconfig_params.ulTargetWidth = surface_width; + reconfig_params.ulTargetHeight = surface_height; + reconfig_params.ulNumDecodeSurfaces = decode_surface; + reconfig_params.display_area.bottom = display_rect.bottom; + reconfig_params.display_area.top = display_rect.top; + reconfig_params.display_area.left = display_rect.left; + reconfig_params.display_area.right = display_rect.right; + + check_for_cuda_errors(cuCtxPushCurrent(cu_context), __LINE__, __FILE__); + check_for_cuda_errors( + cuvidReconfigureDecoder(decoder, &reconfig_params), __LINE__, __FILE__); + check_for_cuda_errors(cuCtxPopCurrent(nullptr), __LINE__, __FILE__); + + return decode_surface; +} + +/* Called from AV1 sequence header to get operating point of an AV1 bitstream. + */ +int Decoder::get_operating_point(CUVIDOPERATINGPOINTINFO* oper_point_info) { + return oper_point_info->codec == cudaVideoCodec_AV1 && + oper_point_info->av1.operating_points_cnt > 1 + ? 0 + : -1; +} diff --git a/torchvision/csrc/io/decoder/gpu/decoder.h b/torchvision/csrc/io/decoder/gpu/decoder.h new file mode 100644 index 00000000000..5ad685ec746 --- /dev/null +++ b/torchvision/csrc/io/decoder/gpu/decoder.h @@ -0,0 +1,89 @@ +#include +#include +#include +#include +#include +#include +#include + +static auto check_for_cuda_errors = + [](CUresult result, int line_num, std::string file_name) { + if (CUDA_SUCCESS != result) { + const char* error_name = nullptr; + + TORCH_CHECK( + CUDA_SUCCESS != cuGetErrorName(result, &error_name), + "CUDA error: ", + error_name, + " in ", + file_name, + " at line ", + line_num) + TORCH_CHECK( + false, "Error: ", result, " in ", file_name, " at line ", line_num); + } + }; + +struct Rect { + int left, top, right, bottom; +}; + +class Decoder { + public: + Decoder() {} + ~Decoder(); + void init(CUcontext, cudaVideoCodec); + void release(); + void decode(const uint8_t*, unsigned long); + torch::Tensor fetch_frame(); + int get_height() const { + return luma_height; + } + + private: + unsigned int width = 0, luma_height = 0, chroma_height = 0; + unsigned int surface_height = 0, surface_width = 0; + unsigned int max_width = 0, max_height = 0; + unsigned int num_chroma_planes = 0; + int bit_depth_minus8 = 0, bytes_per_pixel = 1; + int decode_pic_count = 0, pic_num_in_decode_order[32]; + std::queue decoded_frames; + CUcontext cu_context = NULL; + CUvideoctxlock ctx_lock; + CUvideoparser parser = NULL; + CUvideodecoder decoder = NULL; + CUstream cuvidStream = 0; + cudaVideoCodec video_codec = cudaVideoCodec_NumCodecs; + cudaVideoChromaFormat video_chroma_format = cudaVideoChromaFormat_420; + cudaVideoSurfaceFormat video_output_format = cudaVideoSurfaceFormat_NV12; + CUVIDEOFORMAT cu_video_format = {}; + Rect display_rect = {}; + + static int video_sequence_handler( + void* user_data, + CUVIDEOFORMAT* video_format) { + return ((Decoder*)user_data)->handle_video_sequence(video_format); + } + static int picture_decode_handler( + void* user_data, + CUVIDPICPARAMS* pic_params) { + return ((Decoder*)user_data)->handle_picture_decode(pic_params); + } + static int picture_display_handler( + void* user_data, + CUVIDPARSERDISPINFO* disp_info) { + return ((Decoder*)user_data)->handle_picture_display(disp_info); + } + static int operating_point_handler( + void* user_data, + CUVIDOPERATINGPOINTINFO* operating_info) { + return ((Decoder*)user_data)->get_operating_point(operating_info); + } + + void query_hardware(CUVIDEOFORMAT*); + int reconfigure_decoder(CUVIDEOFORMAT*); + int handle_video_sequence(CUVIDEOFORMAT*); + int handle_picture_decode(CUVIDPICPARAMS*); + int handle_picture_display(CUVIDPARSERDISPINFO*); + int get_operating_point(CUVIDOPERATINGPOINTINFO*); +}; diff --git a/torchvision/csrc/io/decoder/gpu/demuxer.h b/torchvision/csrc/io/decoder/gpu/demuxer.h new file mode 100644 index 00000000000..f6e72dceee1 --- /dev/null +++ b/torchvision/csrc/io/decoder/gpu/demuxer.h @@ -0,0 +1,257 @@ +extern "C" { +#include +#include +#include +#include +} + +class Demuxer { + private: + AVFormatContext* fmtCtx = NULL; + AVBSFContext* bsfCtx = NULL; + AVPacket pkt, pktFiltered; + AVCodecID eVideoCodec; + uint8_t* dataWithHeader = NULL; + bool bMp4H264, bMp4HEVC, bMp4MPEG4; + unsigned int frameCount = 0; + int iVideoStream; + double timeBase = 0.0; + + public: + Demuxer(const char* filePath) { + avformat_network_init(); + TORCH_CHECK( + 0 <= avformat_open_input(&fmtCtx, filePath, NULL, NULL), + "avformat_open_input() failed at line ", + __LINE__, + " in demuxer.h\n"); + if (!fmtCtx) { + TORCH_CHECK( + false, + "Encountered NULL AVFormatContext at line ", + __LINE__, + " in demuxer.h\n"); + } + + TORCH_CHECK( + 0 <= avformat_find_stream_info(fmtCtx, NULL), + "avformat_find_stream_info() failed at line ", + __LINE__, + " in demuxer.h\n"); + iVideoStream = + av_find_best_stream(fmtCtx, AVMEDIA_TYPE_VIDEO, -1, -1, NULL, 0); + if (iVideoStream < 0) { + TORCH_CHECK( + false, + "av_find_best_stream() failed at line ", + __LINE__, + " in demuxer.h\n"); + } + + eVideoCodec = fmtCtx->streams[iVideoStream]->codecpar->codec_id; + AVRational rTimeBase = fmtCtx->streams[iVideoStream]->time_base; + timeBase = av_q2d(rTimeBase); + + bMp4H264 = eVideoCodec == AV_CODEC_ID_H264 && + (!strcmp(fmtCtx->iformat->long_name, "QuickTime / MOV") || + !strcmp(fmtCtx->iformat->long_name, "FLV (Flash Video)") || + !strcmp(fmtCtx->iformat->long_name, "Matroska / WebM")); + bMp4HEVC = eVideoCodec == AV_CODEC_ID_HEVC && + (!strcmp(fmtCtx->iformat->long_name, "QuickTime / MOV") || + !strcmp(fmtCtx->iformat->long_name, "FLV (Flash Video)") || + !strcmp(fmtCtx->iformat->long_name, "Matroska / WebM")); + bMp4MPEG4 = eVideoCodec == AV_CODEC_ID_MPEG4 && + (!strcmp(fmtCtx->iformat->long_name, "QuickTime / MOV") || + !strcmp(fmtCtx->iformat->long_name, "FLV (Flash Video)") || + !strcmp(fmtCtx->iformat->long_name, "Matroska / WebM")); + + av_init_packet(&pkt); + pkt.data = NULL; + pkt.size = 0; + av_init_packet(&pktFiltered); + pktFiltered.data = NULL; + pktFiltered.size = 0; + + if (bMp4H264) { + const AVBitStreamFilter* bsf = av_bsf_get_by_name("h264_mp4toannexb"); + if (!bsf) { + TORCH_CHECK( + false, + "av_bsf_get_by_name() failed at line ", + __LINE__, + " in demuxer.h\n"); + } + TORCH_CHECK( + 0 <= av_bsf_alloc(bsf, &bsfCtx), + "av_bsf_alloc() failed at line ", + __LINE__, + " in demuxer.h\n"); + avcodec_parameters_copy( + bsfCtx->par_in, fmtCtx->streams[iVideoStream]->codecpar); + TORCH_CHECK( + 0 <= av_bsf_init(bsfCtx), + "av_bsf_init() failed at line ", + __LINE__, + " in demuxer.h\n"); + } + if (bMp4HEVC) { + const AVBitStreamFilter* bsf = av_bsf_get_by_name("hevc_mp4toannexb"); + if (!bsf) { + TORCH_CHECK( + false, + "av_bsf_get_by_name() failed at line ", + __LINE__, + " in demuxer.h\n"); + } + TORCH_CHECK( + 0 <= av_bsf_alloc(bsf, &bsfCtx), + "av_bsf_alloc() failed at line ", + __LINE__, + " in demuxer.h\n"); + avcodec_parameters_copy( + bsfCtx->par_in, fmtCtx->streams[iVideoStream]->codecpar); + TORCH_CHECK( + 0 <= av_bsf_init(bsfCtx), + "av_bsf_init() failed at line ", + __LINE__, + " in demuxer.h\n"); + } + } + + ~Demuxer() { + if (!fmtCtx) { + return; + } + if (pkt.data) { + av_packet_unref(&pkt); + } + if (pktFiltered.data) { + av_packet_unref(&pktFiltered); + } + if (bsfCtx) { + av_bsf_free(&bsfCtx); + } + avformat_close_input(&fmtCtx); + if (dataWithHeader) { + av_free(dataWithHeader); + } + } + + AVCodecID get_video_codec() { + return eVideoCodec; + } + + double get_duration() const { + return (double)fmtCtx->duration / AV_TIME_BASE; + } + + double get_fps() const { + return av_q2d(fmtCtx->streams[iVideoStream]->r_frame_rate); + } + + bool demux(uint8_t** video, unsigned long* videoBytes) { + if (!fmtCtx) { + return false; + } + *videoBytes = 0; + + if (pkt.data) { + av_packet_unref(&pkt); + } + int e = 0; + while ((e = av_read_frame(fmtCtx, &pkt)) >= 0 && + pkt.stream_index != iVideoStream) { + av_packet_unref(&pkt); + } + if (e < 0) { + return false; + } + + if (bMp4H264 || bMp4HEVC) { + if (pktFiltered.data) { + av_packet_unref(&pktFiltered); + } + TORCH_CHECK( + 0 <= av_bsf_send_packet(bsfCtx, &pkt), + "av_bsf_send_packet() failed at line ", + __LINE__, + " in demuxer.h\n"); + TORCH_CHECK( + 0 <= av_bsf_receive_packet(bsfCtx, &pktFiltered), + "av_bsf_receive_packet() failed at line ", + __LINE__, + " in demuxer.h\n"); + *video = pktFiltered.data; + *videoBytes = pktFiltered.size; + } else { + if (bMp4MPEG4 && (frameCount == 0)) { + int extraDataSize = + fmtCtx->streams[iVideoStream]->codecpar->extradata_size; + + if (extraDataSize > 0) { + dataWithHeader = (uint8_t*)av_malloc( + extraDataSize + pkt.size - 3 * sizeof(uint8_t)); + if (!dataWithHeader) { + TORCH_CHECK( + false, + "av_malloc() failed at line ", + __LINE__, + " in demuxer.h\n"); + } + memcpy( + dataWithHeader, + fmtCtx->streams[iVideoStream]->codecpar->extradata, + extraDataSize); + memcpy( + dataWithHeader + extraDataSize, + pkt.data + 3, + pkt.size - 3 * sizeof(uint8_t)); + *video = dataWithHeader; + *videoBytes = extraDataSize + pkt.size - 3 * sizeof(uint8_t); + } + } else { + *video = pkt.data; + *videoBytes = pkt.size; + } + } + frameCount++; + return true; + } + + void seek(double timestamp, int flag) { + int64_t time = timestamp * AV_TIME_BASE; + TORCH_CHECK( + 0 <= av_seek_frame(fmtCtx, -1, time, flag), + "av_seek_frame() failed at line ", + __LINE__, + " in demuxer.h\n"); + } +}; + +inline cudaVideoCodec ffmpeg_to_codec(AVCodecID id) { + switch (id) { + case AV_CODEC_ID_MPEG1VIDEO: + return cudaVideoCodec_MPEG1; + case AV_CODEC_ID_MPEG2VIDEO: + return cudaVideoCodec_MPEG2; + case AV_CODEC_ID_MPEG4: + return cudaVideoCodec_MPEG4; + case AV_CODEC_ID_WMV3: + case AV_CODEC_ID_VC1: + return cudaVideoCodec_VC1; + case AV_CODEC_ID_H264: + return cudaVideoCodec_H264; + case AV_CODEC_ID_HEVC: + return cudaVideoCodec_HEVC; + case AV_CODEC_ID_VP8: + return cudaVideoCodec_VP8; + case AV_CODEC_ID_VP9: + return cudaVideoCodec_VP9; + case AV_CODEC_ID_MJPEG: + return cudaVideoCodec_JPEG; + case AV_CODEC_ID_AV1: + return cudaVideoCodec_AV1; + default: + return cudaVideoCodec_NumCodecs; + } +} diff --git a/torchvision/csrc/io/decoder/gpu/gpu_decoder.cpp b/torchvision/csrc/io/decoder/gpu/gpu_decoder.cpp new file mode 100644 index 00000000000..aef1ef93b09 --- /dev/null +++ b/torchvision/csrc/io/decoder/gpu/gpu_decoder.cpp @@ -0,0 +1,65 @@ +#include "gpu_decoder.h" +#include + +/* Set cuda device, create cuda context and initialise the demuxer and decoder. + */ +GPUDecoder::GPUDecoder(std::string src_file, torch::Device dev) + : demuxer(src_file.c_str()) { + at::cuda::CUDAGuard device_guard(dev); + device = device_guard.current_device().index(); + check_for_cuda_errors( + cuDevicePrimaryCtxRetain(&ctx, device), __LINE__, __FILE__); + decoder.init(ctx, ffmpeg_to_codec(demuxer.get_video_codec())); + initialised = true; +} + +GPUDecoder::~GPUDecoder() { + at::cuda::CUDAGuard device_guard(device); + decoder.release(); + if (initialised) { + check_for_cuda_errors( + cuDevicePrimaryCtxRelease(device), __LINE__, __FILE__); + } +} + +/* Fetch a decoded frame tensor after demuxing and decoding. + */ +torch::Tensor GPUDecoder::decode() { + torch::Tensor frameTensor; + unsigned long videoBytes = 0; + uint8_t* video = nullptr; + at::cuda::CUDAGuard device_guard(device); + torch::Tensor frame; + do { + demuxer.demux(&video, &videoBytes); + decoder.decode(video, videoBytes); + frame = decoder.fetch_frame(); + } while (frame.numel() == 0 && videoBytes > 0); + return frame; +} + +/* Seek to a passed timestamp. The second argument controls whether to seek to a + * keyframe. + */ +void GPUDecoder::seek(double timestamp, bool keyframes_only) { + int flag = keyframes_only ? 0 : AVSEEK_FLAG_ANY; + demuxer.seek(timestamp, flag); +} + +c10::Dict> +GPUDecoder::get_metadata() const { + c10::Dict> metadata; + c10::Dict video_metadata; + video_metadata.insert("duration", demuxer.get_duration()); + video_metadata.insert("fps", demuxer.get_fps()); + metadata.insert("video", video_metadata); + return metadata; +} + +TORCH_LIBRARY(torchvision, m) { + m.class_("GPUDecoder") + .def(torch::init()) + .def("seek", &GPUDecoder::seek) + .def("get_metadata", &GPUDecoder::get_metadata) + .def("next", &GPUDecoder::decode); +} diff --git a/torchvision/csrc/io/decoder/gpu/gpu_decoder.h b/torchvision/csrc/io/decoder/gpu/gpu_decoder.h new file mode 100644 index 00000000000..22bf680a982 --- /dev/null +++ b/torchvision/csrc/io/decoder/gpu/gpu_decoder.h @@ -0,0 +1,20 @@ +#include +#include +#include "decoder.h" +#include "demuxer.h" + +class GPUDecoder : public torch::CustomClassHolder { + public: + GPUDecoder(std::string, torch::Device); + ~GPUDecoder(); + torch::Tensor decode(); + void seek(double, bool); + c10::Dict> get_metadata() const; + + private: + Demuxer demuxer; + CUcontext ctx; + Decoder decoder; + int64_t device; + bool initialised = false; +}; diff --git a/torchvision/csrc/io/decoder/memory_buffer.cpp b/torchvision/csrc/io/decoder/memory_buffer.cpp new file mode 100644 index 00000000000..4e420c3b3cd --- /dev/null +++ b/torchvision/csrc/io/decoder/memory_buffer.cpp @@ -0,0 +1,71 @@ +#include "memory_buffer.h" +#include + +namespace ffmpeg { + +MemoryBuffer::MemoryBuffer(const uint8_t* buffer, size_t size) + : buffer_(buffer), len_(size) {} + +int MemoryBuffer::read(uint8_t* buf, int size) { + if (pos_ < len_) { + auto available = std::min(int(len_ - pos_), size); + memcpy(buf, buffer_ + pos_, available); + pos_ += available; + return available; + } + + return 0; +} + +int64_t MemoryBuffer::seek(int64_t offset, int whence) { + if (whence & AVSEEK_SIZE) { + return len_; + } + + // remove force flag + whence &= ~AVSEEK_FORCE; + + switch (whence) { + case SEEK_SET: + if (offset >= 0 && offset <= len_) { + pos_ = offset; + } + break; + case SEEK_END: + if (len_ + offset >= 0 && len_ + offset <= len_) { + pos_ = len_ + offset; + } + break; + case SEEK_CUR: + if (pos_ + offset > 0 && pos_ + offset <= len_) { + pos_ += offset; + } + break; + default: + LOG(ERROR) << "Unknown whence flag gets provided: " << whence; + } + return pos_; +} + +/* static */ +DecoderInCallback MemoryBuffer::getCallback( + const uint8_t* buffer, + size_t size) { + MemoryBuffer object(buffer, size); + return + [object](uint8_t* out, int size, int whence, uint64_t timeoutMs) mutable + -> int { + if (out) { // see defs.h file + // read mode + return object.read(out, size); + } + // seek mode + if (!timeoutMs) { + // seek capability, yes - supported + return 0; + } + return object.seek(size, whence); + }; +} + +} // namespace ffmpeg diff --git a/torchvision/csrc/io/decoder/memory_buffer.h b/torchvision/csrc/io/decoder/memory_buffer.h new file mode 100644 index 00000000000..909626d3cae --- /dev/null +++ b/torchvision/csrc/io/decoder/memory_buffer.h @@ -0,0 +1,25 @@ +#pragma once + +#include "defs.h" + +namespace ffmpeg { + +/** + * Class uses external memory buffer and implements a seekable interface. + */ +class MemoryBuffer { + public: + explicit MemoryBuffer(const uint8_t* buffer, size_t size); + int64_t seek(int64_t offset, int whence); + int read(uint8_t* buf, int size); + + // static constructor for decoder callback. + static DecoderInCallback getCallback(const uint8_t* buffer, size_t size); + + private: + const uint8_t* buffer_; // set at construction time + long pos_{0}; // current position + long len_{0}; // bytes in buffer +}; + +} // namespace ffmpeg diff --git a/torchvision/csrc/io/decoder/seekable_buffer.cpp b/torchvision/csrc/io/decoder/seekable_buffer.cpp new file mode 100644 index 00000000000..41e3e689c7b --- /dev/null +++ b/torchvision/csrc/io/decoder/seekable_buffer.cpp @@ -0,0 +1,139 @@ +#include "seekable_buffer.h" +#include +#include +#include "memory_buffer.h" + +namespace ffmpeg { + +int SeekableBuffer::init( + DecoderInCallback&& in, + uint64_t timeoutMs, + size_t maxSeekableBytes, + ImageType* type) { + shutdown(); + isSeekable_ = in(nullptr, 0, 0, 0) == 0; + if (isSeekable_) { // seekable + if (type) { + if (!readBytes(in, 8, timeoutMs)) { + return -1; + } + setImageType(type); + end_ = 0; + eof_ = false; + std::vector().swap(buffer_); + // reset callback + if (in(nullptr, 0, SEEK_SET, timeoutMs)) { + return -1; + } + } + inCallback_ = std::forward(in); + return 1; + } + + if (!readBytes(in, maxSeekableBytes + (type ? 8 : 0), timeoutMs)) { + return -1; + } + + if (type) { + setImageType(type); + } + + if (eof_) { + end_ = 0; + eof_ = false; + // reuse MemoryBuffer functionality + inCallback_ = MemoryBuffer::getCallback(buffer_.data(), buffer_.size()); + isSeekable_ = true; + return 1; + } + inCallback_ = std::forward(in); + return 0; +} + +bool SeekableBuffer::readBytes( + DecoderInCallback& in, + size_t maxBytes, + uint64_t timeoutMs) { + // Resize to th minimum 4K page or less + buffer_.resize(std::min(maxBytes, size_t(4 * 1024UL))); + end_ = 0; + eof_ = false; + + auto end = + std::chrono::steady_clock::now() + std::chrono::milliseconds(timeoutMs); + auto watcher = [end]() -> bool { + return std::chrono::steady_clock::now() <= end; + }; + + bool hasTime = true; + while (!eof_ && end_ < maxBytes && (hasTime = watcher())) { + // lets read all bytes into available buffer + auto res = in(buffer_.data() + end_, buffer_.size() - end_, 0, timeoutMs); + if (res > 0) { + end_ += res; + if (end_ == buffer_.size()) { + buffer_.resize(std::min(size_t(end_ * 4UL), maxBytes)); + } + } else if (res == 0) { + eof_ = true; + } else { + // error + return false; + } + } + + buffer_.resize(end_); + + return hasTime; +} + +void SeekableBuffer::setImageType(ImageType* type) { + if (buffer_.size() > 2 && buffer_[0] == 0xFF && buffer_[1] == 0xD8 && + buffer_[2] == 0xFF) { + *type = ImageType::JPEG; + } else if ( + buffer_.size() > 3 && buffer_[1] == 'P' && buffer_[2] == 'N' && + buffer_[3] == 'G') { + *type = ImageType::PNG; + } else if ( + buffer_.size() > 1 && + ((buffer_[0] == 0x49 && buffer_[1] == 0x49) || + (buffer_[0] == 0x4D && buffer_[1] == 0x4D))) { + *type = ImageType::TIFF; + } else { + *type = ImageType::UNKNOWN; + } +} + +int SeekableBuffer::read(uint8_t* buf, int size, uint64_t timeoutMs) { + if (isSeekable_) { + return inCallback_(buf, size, 0, timeoutMs); + } + if (pos_ < end_) { + // read cached bytes for non-seekable callback + auto available = std::min(int(end_ - pos_), size); + memcpy(buf, buffer_.data() + pos_, available); + pos_ += available; + return available; + } else if (!eof_) { + // normal sequential read (see defs.h file), i.e. @buf != null + auto res = inCallback_(buf, size, 0, timeoutMs); // read through + eof_ = res == 0; + return res; + } else { + return 0; + } +} + +int64_t SeekableBuffer::seek(int64_t offset, int whence, uint64_t timeoutMs) { + return inCallback_(nullptr, offset, whence, timeoutMs); +} + +void SeekableBuffer::shutdown() { + pos_ = end_ = 0; + eof_ = false; + std::vector().swap(buffer_); + inCallback_ = nullptr; +} + +} // namespace ffmpeg diff --git a/torchvision/csrc/io/decoder/seekable_buffer.h b/torchvision/csrc/io/decoder/seekable_buffer.h new file mode 100644 index 00000000000..9d5729f5306 --- /dev/null +++ b/torchvision/csrc/io/decoder/seekable_buffer.h @@ -0,0 +1,45 @@ +#pragma once + +#include "defs.h" + +namespace ffmpeg { + +/** + * Class uses internal buffer to store initial size bytes as a seekable cache + * from Media provider and let ffmpeg to seek and read bytes from cache + * and beyond - reading bytes directly from Media provider + */ +enum class ImageType { + UNKNOWN = 0, + JPEG = 1, + PNG = 2, + TIFF = 3, +}; + +class SeekableBuffer { + public: + // @type is optional, not nullptr only is image detection required + // \returns 1 is buffer seekable, 0 - if not seekable, < 0 on error + int init( + DecoderInCallback&& in, + uint64_t timeoutMs, + size_t maxSeekableBytes, + ImageType* type); + int read(uint8_t* buf, int size, uint64_t timeoutMs); + int64_t seek(int64_t offset, int whence, uint64_t timeoutMs); + void shutdown(); + + private: + bool readBytes(DecoderInCallback& in, size_t maxBytes, uint64_t timeoutMs); + void setImageType(ImageType* type); + + private: + DecoderInCallback inCallback_; + std::vector buffer_; // resized at init time + long pos_{0}; // current position (SEEK_CUR iff pos_ < end_) + long end_{0}; // current buffer size + bool eof_{0}; // indicates the EOF + bool isSeekable_{false}; // is callback seekable +}; + +} // namespace ffmpeg diff --git a/torchvision/csrc/io/decoder/stream.cpp b/torchvision/csrc/io/decoder/stream.cpp new file mode 100644 index 00000000000..8c914050587 --- /dev/null +++ b/torchvision/csrc/io/decoder/stream.cpp @@ -0,0 +1,289 @@ +#include "stream.h" +#include +#include +#include +#include "util.h" + +namespace ffmpeg { +const AVRational timeBaseQ = AVRational{1, AV_TIME_BASE}; + +Stream::Stream( + AVFormatContext* inputCtx, + MediaFormat format, + bool convertPtsToWallTime, + int64_t loggingUuid) + : inputCtx_(inputCtx), + format_(format), + convertPtsToWallTime_(convertPtsToWallTime), + loggingUuid_(loggingUuid) {} + +Stream::~Stream() { + if (frame_) { + av_free(frame_); + } + if (codecCtx_) { + avcodec_free_context(&codecCtx_); + } +} + +// look up the proper CODEC querying the function +AVCodec* Stream::findCodec(AVCodecParameters* params) { + return (AVCodec*)avcodec_find_decoder(params->codec_id); +} + +// Allocate memory for the AVCodecContext, which will hold the context for +// decode/encode process. Then fill this codec context with CODEC parameters +// defined in stream parameters. Open the codec, and allocate the global frame +// defined in the header file +int Stream::openCodec(std::vector* metadata, int num_threads) { + AVStream* steam = inputCtx_->streams[format_.stream]; + + AVCodec* codec = findCodec(steam->codecpar); + if (!codec) { + LOG(ERROR) << "LoggingUuid #" << loggingUuid_ + << ", avcodec_find_decoder failed for codec_id: " + << int(steam->codecpar->codec_id); + return AVERROR(EINVAL); + } + + if (!(codecCtx_ = avcodec_alloc_context3(codec))) { + LOG(ERROR) << "LoggingUuid #" << loggingUuid_ + << ", avcodec_alloc_context3 failed"; + return AVERROR(ENOMEM); + } + // multithreading heuristics + // if user defined, + if (num_threads > max_threads) { + num_threads = max_threads; + } + + if (num_threads > 0) { + // if user defined, respect that + // note that default thread_type will be used + codecCtx_->thread_count = num_threads; + } else { + // otherwise set sensible defaults + codecCtx_->thread_count = 8; + codecCtx_->thread_type = FF_THREAD_SLICE; + } + + int ret; + // Copy codec parameters from input stream to output codec context + if ((ret = avcodec_parameters_to_context(codecCtx_, steam->codecpar)) < 0) { + LOG(ERROR) << "LoggingUuid #" << loggingUuid_ + << ", avcodec_parameters_to_context failed"; + return ret; + } + + // after avcodec_open2, value of codecCtx_->time_base is NOT meaningful + if ((ret = avcodec_open2(codecCtx_, codec, nullptr)) < 0) { + LOG(ERROR) << "LoggingUuid #" << loggingUuid_ + << ", avcodec_open2 failed: " << Util::generateErrorDesc(ret); + avcodec_free_context(&codecCtx_); + codecCtx_ = nullptr; + return ret; + } + + frame_ = av_frame_alloc(); + + switch (format_.type) { + case TYPE_VIDEO: + fps_ = av_q2d(av_guess_frame_rate(inputCtx_, steam, nullptr)); + break; + case TYPE_AUDIO: + fps_ = codecCtx_->sample_rate; + break; + default: + fps_ = 30.0; + } + + if ((ret = initFormat())) { + LOG(ERROR) << "initFormat failed, type: " << format_.type; + } + + if (metadata) { + DecoderMetadata header; + header.format = format_; + header.fps = fps_; + header.num = steam->time_base.num; + header.den = steam->time_base.den; + header.duration = + av_rescale_q(steam->duration, steam->time_base, timeBaseQ); + metadata->push_back(header); + } + + return ret; +} + +// send the raw data packet (compressed frame) to the decoder, through the codec +// context and receive the raw data frame (uncompressed frame) from the +// decoder, through the same codec context +int Stream::analyzePacket(const AVPacket* packet, bool* gotFrame) { + int consumed = 0; + int result = avcodec_send_packet(codecCtx_, packet); + if (result == AVERROR(EAGAIN)) { + *gotFrame = false; // no bytes get consumed, fetch frame + } else if (result == AVERROR_EOF) { + *gotFrame = false; // more than one flush packet + if (packet) { + // got packet after flush, this is an error + return result; + } + } else if (result < 0) { + LOG(ERROR) << "avcodec_send_packet failed, err: " + << Util::generateErrorDesc(result); + return result; // error + } else { + consumed = packet ? packet->size : 0; // all bytes get consumed + } + + result = avcodec_receive_frame(codecCtx_, frame_); + + if (result >= 0) { + *gotFrame = true; // frame is available + } else if (result == AVERROR(EAGAIN)) { + *gotFrame = false; // no frames at this time, needs more packets + if (!consumed) { + // precaution, if no packages got consumed and no frames are available + return result; + } + } else if (result == AVERROR_EOF) { + *gotFrame = false; // the last frame has been flushed + // precaution, if no more frames are available assume we consume all bytes + consumed = 0; + } else { // error + LOG(ERROR) << "avcodec_receive_frame failed, err: " + << Util::generateErrorDesc(result); + return result; + } + return consumed; +} + +// General decoding function: +// given the packet, analyse the metadata, and write the +// metadata and the buffer to the DecoderOutputImage. +int Stream::decodePacket( + const AVPacket* packet, + DecoderOutputMessage* out, + bool headerOnly, + bool* hasMsg) { + int consumed; + bool gotFrame = false; + *hasMsg = false; + if ((consumed = analyzePacket(packet, &gotFrame)) >= 0 && + (packet == nullptr || gotFrame)) { + int result; + if ((result = getMessage(out, !gotFrame, headerOnly)) < 0) { + return result; // report error + } + *hasMsg = result > 0; + } + return consumed; +} + +int Stream::flush(DecoderOutputMessage* out, bool headerOnly) { + bool hasMsg = false; + int result = decodePacket(nullptr, out, headerOnly, &hasMsg); + if (result < 0) { + avcodec_flush_buffers(codecCtx_); + return result; + } + if (!hasMsg) { + avcodec_flush_buffers(codecCtx_); + return 0; + } + return 1; +} + +// Sets the header and payload via stream::setHeader and copyFrameBytes +// functions that are defined in type stream subclass (VideoStream, AudioStream, +// ...) +int Stream::getMessage(DecoderOutputMessage* out, bool flush, bool headerOnly) { + if (flush) { + // only flush of audio frames makes sense + if (format_.type == TYPE_AUDIO) { + int processed = 0; + size_t total = 0; + // grab all audio bytes by chunks + do { + if ((processed = copyFrameBytes(out->payload.get(), flush)) < 0) { + return processed; + } + total += processed; + } while (processed); + + if (total) { + // set header if message bytes are available + setHeader(&out->header, flush); + return 1; + } + } + return 0; + } else { + if (format_.type == TYPE_AUDIO) { + int processed = 0; + if ((processed = copyFrameBytes(out->payload.get(), flush)) < 0) { + return processed; + } + if (processed) { + // set header if message bytes are available + setHeader(&out->header, flush); + return 1; + } + return 0; + } else { + // set header + setHeader(&out->header, flush); + + if (headerOnly) { + // Only header is requisted + return 1; + } + + return copyFrameBytes(out->payload.get(), flush); + } + } +} + +void Stream::setHeader(DecoderHeader* header, bool flush) { + header->seqno = numGenerator_++; + + setFramePts(header, flush); + + if (convertPtsToWallTime_) { + keeper_.adjust(header->pts); + } + + header->format = format_; + header->keyFrame = 0; + header->fps = std::numeric_limits::quiet_NaN(); +} + +void Stream::setFramePts(DecoderHeader* header, bool flush) { + if (flush) { + header->pts = nextPts_; // already in us + } else { + header->pts = frame_->best_effort_timestamp; + if (header->pts == AV_NOPTS_VALUE) { + header->pts = nextPts_; + } else { + header->pts = av_rescale_q( + header->pts, + inputCtx_->streams[format_.stream]->time_base, + timeBaseQ); + } + + switch (format_.type) { + case TYPE_AUDIO: + nextPts_ = header->pts + frame_->nb_samples * AV_TIME_BASE / fps_; + break; + case TYPE_VIDEO: + nextPts_ = header->pts + AV_TIME_BASE / fps_; + break; + default: + nextPts_ = header->pts; + } + } +} + +} // namespace ffmpeg diff --git a/torchvision/csrc/io/decoder/stream.h b/torchvision/csrc/io/decoder/stream.h new file mode 100644 index 00000000000..6250dd9ecd2 --- /dev/null +++ b/torchvision/csrc/io/decoder/stream.h @@ -0,0 +1,80 @@ +#pragma once + +#include +#include "defs.h" +#include "time_keeper.h" + +namespace ffmpeg { + +/** + * Class uses FFMPEG library to decode one media stream (audio or video). + */ + +class Stream { + public: + Stream( + AVFormatContext* inputCtx, + MediaFormat format, + bool convertPtsToWallTime, + int64_t loggingUuid); + virtual ~Stream(); + + // returns 0 - on success or negative error + // num_threads sets up the codec context for multithreading if needed + // default is set to single thread in order to not break BC + int openCodec(std::vector* metadata, int num_threads = 1); + // returns 1 - if packet got consumed, 0 - if it's not, and < 0 on error + int decodePacket( + const AVPacket* packet, + DecoderOutputMessage* out, + bool headerOnly, + bool* hasMsg); + // returns stream index + int getIndex() const { + return format_.stream; + } + // returns 1 - if message got a payload, 0 - if it's not, and < 0 on error + int flush(DecoderOutputMessage* out, bool headerOnly); + // return media format + MediaFormat getMediaFormat() const { + return format_; + } + + protected: + virtual int initFormat() = 0; + // returns number processed bytes from packet, or negative error + virtual int analyzePacket(const AVPacket* packet, bool* gotFrame); + // returns number processed bytes from packet, or negative error + virtual int copyFrameBytes(ByteStorage* out, bool flush) = 0; + // sets output format + virtual void setHeader(DecoderHeader* header, bool flush); + // set frame pts + virtual void setFramePts(DecoderHeader* header, bool flush); + // finds codec + virtual AVCodec* findCodec(AVCodecParameters* params); + + private: + // returns 1 - if message got a payload, 0 - if it's not, and < 0 on error + int getMessage(DecoderOutputMessage* out, bool flush, bool headerOnly); + + protected: + AVFormatContext* const inputCtx_; + MediaFormat format_; + const bool convertPtsToWallTime_; + int64_t loggingUuid_; + + AVCodecContext* codecCtx_{nullptr}; + AVFrame* frame_{nullptr}; + + std::atomic numGenerator_{0}; + TimeKeeper keeper_; + // estimated next frame pts for flushing the last frame + int64_t nextPts_{0}; + double fps_{30.}; + // this is a dumb conservative limit; ideally we'd use + // int max_threads = at::get_num_threads(); but this would cause + // fb sync to fail as it would add dependency to ATen to the decoder API + const int max_threads = 12; +}; + +} // namespace ffmpeg diff --git a/torchvision/csrc/io/decoder/subtitle_sampler.cpp b/torchvision/csrc/io/decoder/subtitle_sampler.cpp new file mode 100644 index 00000000000..d0df24d3e35 --- /dev/null +++ b/torchvision/csrc/io/decoder/subtitle_sampler.cpp @@ -0,0 +1,46 @@ +#include "subtitle_sampler.h" +#include +#include "util.h" + +namespace ffmpeg { + +SubtitleSampler::~SubtitleSampler() { + cleanUp(); +} + +void SubtitleSampler::shutdown() { + cleanUp(); +} + +bool SubtitleSampler::init(const SamplerParameters& params) { + cleanUp(); + // set formats + params_ = params; + return true; +} + +int SubtitleSampler::sample(AVSubtitle* sub, ByteStorage* out) { + if (!sub || !out) { + return 0; // flush + } + + out->ensure(Util::size(*sub)); + + return Util::serialize(*sub, out); +} + +int SubtitleSampler::sample(const ByteStorage* in, ByteStorage* out) { + if (in && out) { + // Get a writable copy + if (size_t len = in->length()) { + out->ensure(len); + memcpy(out->writableTail(), in->data(), len); + } + return out->length(); + } + return 0; +} + +void SubtitleSampler::cleanUp() {} + +} // namespace ffmpeg diff --git a/torchvision/csrc/io/decoder/subtitle_sampler.h b/torchvision/csrc/io/decoder/subtitle_sampler.h new file mode 100644 index 00000000000..4aee811ed56 --- /dev/null +++ b/torchvision/csrc/io/decoder/subtitle_sampler.h @@ -0,0 +1,32 @@ +#pragma once + +#include "defs.h" + +namespace ffmpeg { + +/** + * Class transcode audio frames from one format into another + */ + +class SubtitleSampler : public MediaSampler { + public: + SubtitleSampler() = default; + ~SubtitleSampler() override; + + bool init(const SamplerParameters& params) override; + int sample(const ByteStorage* in, ByteStorage* out) override; + void shutdown() override; + + // returns number processed/scaling bytes + int sample(AVSubtitle* sub, ByteStorage* out); + + // helper serialization/deserialization methods + static void serialize(const AVSubtitle& sub, ByteStorage* out); + static bool deserialize(const ByteStorage& buf, AVSubtitle* sub); + + private: + // close resources + void cleanUp(); +}; + +} // namespace ffmpeg diff --git a/torchvision/csrc/io/decoder/subtitle_stream.cpp b/torchvision/csrc/io/decoder/subtitle_stream.cpp new file mode 100644 index 00000000000..27c61d4dbd9 --- /dev/null +++ b/torchvision/csrc/io/decoder/subtitle_stream.cpp @@ -0,0 +1,97 @@ +#include "subtitle_stream.h" +#include +#include +#include "util.h" + +namespace ffmpeg { +const AVRational timeBaseQ = AVRational{1, AV_TIME_BASE}; + +SubtitleStream::SubtitleStream( + AVFormatContext* inputCtx, + int index, + bool convertPtsToWallTime, + const SubtitleFormat& format) + : Stream( + inputCtx, + MediaFormat::makeMediaFormat(format, index), + convertPtsToWallTime, + 0) { + memset(&sub_, 0, sizeof(sub_)); +} + +void SubtitleStream::releaseSubtitle() { + if (sub_.release) { + avsubtitle_free(&sub_); + memset(&sub_, 0, sizeof(sub_)); + } +} + +SubtitleStream::~SubtitleStream() { + releaseSubtitle(); + sampler_.shutdown(); +} + +int SubtitleStream::initFormat() { + if (!codecCtx_->subtitle_header) { + LOG(ERROR) << "No subtitle header found"; + } else { + VLOG(1) << "Subtitle header found!"; + } + return 0; +} + +int SubtitleStream::analyzePacket(const AVPacket* packet, bool* gotFrame) { + // clean-up + releaseSubtitle(); + + // FIXME: should this even be created? + AVPacket* avPacket; + avPacket = av_packet_alloc(); + if (avPacket == nullptr) { + LOG(ERROR) + << "decoder as not able to allocate the subtitle-specific packet."; + // alternative to ENOMEM + return AVERROR_BUFFER_TOO_SMALL; + } + avPacket->data = nullptr; + avPacket->size = 0; + // check flush packet + auto pkt = packet ? packet : avPacket; + + int gotFramePtr = 0; + // is these a better way than cast from const? + int result = + avcodec_decode_subtitle2(codecCtx_, &sub_, &gotFramePtr, (AVPacket*)pkt); + + if (result < 0) { + LOG(ERROR) << "avcodec_decode_subtitle2 failed, err: " + << Util::generateErrorDesc(result); + // free the packet we've created + av_packet_free(&avPacket); + return result; + } else if (result == 0) { + result = pkt->size; // discard the rest of the package + } + + sub_.release = gotFramePtr; + *gotFrame = gotFramePtr > 0; + + // set proper pts in us + if (gotFramePtr) { + sub_.pts = av_rescale_q( + pkt->pts, inputCtx_->streams[format_.stream]->time_base, timeBaseQ); + } + + av_packet_free(&avPacket); + return result; +} + +int SubtitleStream::copyFrameBytes(ByteStorage* out, bool flush) { + return sampler_.sample(flush ? nullptr : &sub_, out); +} + +void SubtitleStream::setFramePts(DecoderHeader* header, bool) { + header->pts = sub_.pts; // already in us +} + +} // namespace ffmpeg diff --git a/torchvision/csrc/io/decoder/subtitle_stream.h b/torchvision/csrc/io/decoder/subtitle_stream.h new file mode 100644 index 00000000000..6c366e11f50 --- /dev/null +++ b/torchvision/csrc/io/decoder/subtitle_stream.h @@ -0,0 +1,38 @@ +#pragma once + +#include "stream.h" +#include "subtitle_sampler.h" + +namespace ffmpeg { + +/** + * Class uses FFMPEG library to decode one subtitle stream. + */ +struct AVSubtitleKeeper : AVSubtitle { + int64_t release{0}; +}; + +class SubtitleStream : public Stream { + public: + SubtitleStream( + AVFormatContext* inputCtx, + int index, + bool convertPtsToWallTime, + const SubtitleFormat& format); + ~SubtitleStream() override; + + protected: + void setFramePts(DecoderHeader* header, bool flush) override; + + private: + int initFormat() override; + int analyzePacket(const AVPacket* packet, bool* gotFrame) override; + int copyFrameBytes(ByteStorage* out, bool flush) override; + void releaseSubtitle(); + + private: + SubtitleSampler sampler_; + AVSubtitleKeeper sub_; +}; + +} // namespace ffmpeg diff --git a/torchvision/csrc/io/decoder/sync_decoder.cpp b/torchvision/csrc/io/decoder/sync_decoder.cpp new file mode 100644 index 00000000000..1f03ef8eb95 --- /dev/null +++ b/torchvision/csrc/io/decoder/sync_decoder.cpp @@ -0,0 +1,97 @@ +#include "sync_decoder.h" +#include + +namespace ffmpeg { + +SyncDecoder::AVByteStorage::AVByteStorage(size_t n) { + ensure(n); +} + +SyncDecoder::AVByteStorage::~AVByteStorage() { + av_free(buffer_); +} + +void SyncDecoder::AVByteStorage::ensure(size_t n) { + if (tail() < n) { + capacity_ = offset_ + length_ + n; + buffer_ = static_cast(av_realloc(buffer_, capacity_)); + } +} + +uint8_t* SyncDecoder::AVByteStorage::writableTail() { + TORCH_CHECK_LE(offset_ + length_, capacity_); + return buffer_ + offset_ + length_; +} + +void SyncDecoder::AVByteStorage::append(size_t n) { + TORCH_CHECK_LE(n, tail()); + length_ += n; +} + +void SyncDecoder::AVByteStorage::trim(size_t n) { + TORCH_CHECK_LE(n, length_); + offset_ += n; + length_ -= n; +} + +const uint8_t* SyncDecoder::AVByteStorage::data() const { + return buffer_ + offset_; +} + +size_t SyncDecoder::AVByteStorage::length() const { + return length_; +} + +size_t SyncDecoder::AVByteStorage::tail() const { + TORCH_CHECK_LE(offset_ + length_, capacity_); + return capacity_ - offset_ - length_; +} + +void SyncDecoder::AVByteStorage::clear() { + offset_ = 0; + length_ = 0; +} + +std::unique_ptr SyncDecoder::createByteStorage(size_t n) { + return std::make_unique(n); +} + +void SyncDecoder::onInit() { + eof_ = false; + queue_.clear(); +} + +int SyncDecoder::decode(DecoderOutputMessage* out, uint64_t timeoutMs) { + if (eof_ && queue_.empty()) { + return ENODATA; + } + + if (queue_.empty()) { + int result = getFrame(timeoutMs); + // assign EOF + eof_ = result == ENODATA; + // check unrecoverable error, any error but ENODATA + if (result && result != ENODATA) { + return result; + } + + // still empty + if (queue_.empty()) { + if (eof_) { + return ENODATA; + } else { + LOG(INFO) << "Queue is empty"; + return ETIMEDOUT; + } + } + } + + *out = std::move(queue_.front()); + queue_.pop_front(); + return 0; +} + +void SyncDecoder::push(DecoderOutputMessage&& buffer) { + queue_.push_back(std::move(buffer)); +} +} // namespace ffmpeg diff --git a/torchvision/csrc/io/decoder/sync_decoder.h b/torchvision/csrc/io/decoder/sync_decoder.h new file mode 100644 index 00000000000..b7cf7b625ac --- /dev/null +++ b/torchvision/csrc/io/decoder/sync_decoder.h @@ -0,0 +1,48 @@ +#pragma once + +#include +#include "decoder.h" + +namespace ffmpeg { + +/** + * Class uses FFMPEG library to decode media streams. + * Media bytes can be explicitly provided through read-callback + * or fetched internally by FFMPEG library + */ +class SyncDecoder : public Decoder { + public: + // Allocation of memory must be done with a proper alignment. + class AVByteStorage : public ByteStorage { + public: + explicit AVByteStorage(size_t n); + ~AVByteStorage() override; + void ensure(size_t n) override; + uint8_t* writableTail() override; + void append(size_t n) override; + void trim(size_t n) override; + const uint8_t* data() const override; + size_t length() const override; + size_t tail() const override; + void clear() override; + + private: + size_t offset_{0}; + size_t length_{0}; + size_t capacity_{0}; + uint8_t* buffer_{nullptr}; + }; + + public: + int decode(DecoderOutputMessage* out, uint64_t timeoutMs) override; + + private: + void push(DecoderOutputMessage&& buffer) override; + void onInit() override; + std::unique_ptr createByteStorage(size_t n) override; + + private: + std::list queue_; + bool eof_{false}; +}; +} // namespace ffmpeg diff --git a/torchvision/csrc/io/decoder/sync_decoder_test.cpp b/torchvision/csrc/io/decoder/sync_decoder_test.cpp new file mode 100644 index 00000000000..085966ce687 --- /dev/null +++ b/torchvision/csrc/io/decoder/sync_decoder_test.cpp @@ -0,0 +1,416 @@ +#include +#include +#include +#include "memory_buffer.h" +#include "sync_decoder.h" +#include "util.h" + +using namespace ffmpeg; + +namespace { +struct VideoFileStats { + std::string name; + size_t durationPts{0}; + int num{0}; + int den{0}; + int fps{0}; +}; + +void gotAllTestFiles( + const std::string& folder, + std::vector* stats) { + DIR* d = opendir(folder.c_str()); + CHECK(d); + struct dirent* dir; + while ((dir = readdir(d))) { + if (dir->d_type != DT_DIR && 0 != strcmp(dir->d_name, "README")) { + VideoFileStats item; + item.name = folder + '/' + dir->d_name; + LOG(INFO) << "Found video file: " << item.name; + stats->push_back(std::move(item)); + } + } + closedir(d); +} + +void gotFilesStats(std::vector& stats) { + DecoderParameters params; + params.timeoutMs = 10000; + params.startOffset = 1000000; + params.seekAccuracy = 100000; + params.formats = {MediaFormat(0)}; + params.headerOnly = true; + params.preventStaleness = false; + size_t avgProvUs = 0; + const size_t rounds = 100; + for (auto& item : stats) { + LOG(INFO) << "Decoding video file in memory: " << item.name; + FILE* f = fopen(item.name.c_str(), "rb"); + CHECK(f != nullptr); + fseek(f, 0, SEEK_END); + std::vector buffer(ftell(f)); + rewind(f); + size_t s = fread(buffer.data(), 1, buffer.size(), f); + TORCH_CHECK_EQ(buffer.size(), s); + fclose(f); + + for (size_t i = 0; i < rounds; ++i) { + SyncDecoder decoder; + std::vector metadata; + const auto now = std::chrono::steady_clock::now(); + CHECK(decoder.init( + params, + MemoryBuffer::getCallback(buffer.data(), buffer.size()), + &metadata)); + const auto then = std::chrono::steady_clock::now(); + decoder.shutdown(); + avgProvUs += + std::chrono::duration_cast(then - now) + .count(); + TORCH_CHECK_EQ(metadata.size(), 1); + item.num = metadata[0].num; + item.den = metadata[0].den; + item.fps = metadata[0].fps; + item.durationPts = + av_rescale_q(metadata[0].duration, AV_TIME_BASE_Q, {1, item.fps}); + } + } + LOG(INFO) << "Probing (us) " << avgProvUs / stats.size() / rounds; +} + +size_t measurePerformanceUs( + const std::vector& stats, + size_t rounds, + size_t num, + size_t stride) { + size_t avgClipDecodingUs = 0; + std::srand(time(nullptr)); + for (const auto& item : stats) { + FILE* f = fopen(item.name.c_str(), "rb"); + CHECK(f != nullptr); + fseek(f, 0, SEEK_END); + std::vector buffer(ftell(f)); + rewind(f); + size_t s = fread(buffer.data(), 1, buffer.size(), f); + TORCH_CHECK_EQ(buffer.size(), s); + fclose(f); + + for (size_t i = 0; i < rounds; ++i) { + // randomy select clip + size_t rOffset = std::rand(); + size_t fOffset = rOffset % item.durationPts; + size_t clipFrames = num + (num - 1) * stride; + if (fOffset + clipFrames > item.durationPts) { + fOffset = item.durationPts - clipFrames; + } + + DecoderParameters params; + params.timeoutMs = 10000; + params.startOffset = 1000000; + params.seekAccuracy = 100000; + params.preventStaleness = false; + + for (size_t n = 0; n < num; ++n) { + std::list msgs; + + params.startOffset = + av_rescale_q(fOffset, {1, item.fps}, AV_TIME_BASE_Q); + params.endOffset = params.startOffset + 100; + + auto now = std::chrono::steady_clock::now(); + SyncDecoder decoder; + CHECK(decoder.init( + params, + MemoryBuffer::getCallback(buffer.data(), buffer.size()), + nullptr)); + DecoderOutputMessage out; + while (0 == decoder.decode(&out, params.timeoutMs)) { + msgs.push_back(std::move(out)); + } + + decoder.shutdown(); + + const auto then = std::chrono::steady_clock::now(); + + fOffset += 1 + stride; + + avgClipDecodingUs += + std::chrono::duration_cast(then - now) + .count(); + } + } + } + + return avgClipDecodingUs / rounds / num / stats.size(); +} + +void runDecoder(SyncDecoder& decoder) { + DecoderOutputMessage out; + size_t audioFrames = 0, videoFrames = 0, totalBytes = 0; + while (0 == decoder.decode(&out, 10000)) { + if (out.header.format.type == TYPE_AUDIO) { + ++audioFrames; + } else if (out.header.format.type == TYPE_VIDEO) { + ++videoFrames; + } else if (out.header.format.type == TYPE_SUBTITLE && out.payload) { + // deserialize + LOG(INFO) << "Deserializing subtitle"; + AVSubtitle sub; + memset(&sub, 0, sizeof(sub)); + EXPECT_TRUE(Util::deserialize(*out.payload, &sub)); + LOG(INFO) << "Found subtitles" << ", num rects: " << sub.num_rects; + for (int i = 0; i < sub.num_rects; ++i) { + std::string text = "picture"; + if (sub.rects[i]->type == SUBTITLE_TEXT) { + text = sub.rects[i]->text; + } else if (sub.rects[i]->type == SUBTITLE_ASS) { + text = sub.rects[i]->ass; + } + + LOG(INFO) << "Rect num: " << i << ", type:" << sub.rects[i]->type + << ", text: " << text; + } + + avsubtitle_free(&sub); + } + if (out.payload) { + totalBytes += out.payload->length(); + } + } + LOG(INFO) << "Decoded audio frames: " << audioFrames + << ", video frames: " << videoFrames + << ", total bytes: " << totalBytes; +} +} // namespace + +TEST(SyncDecoder, TestSyncDecoderPerformance) { + // Measure the average time of decoding per clip + // 1. list of the videos in testing directory + // 2. for each video got number of frames with timestamps + // 3. randomly select frame offset + // 4. adjust offset for number frames and strides, + // if it's out out upper boundary + // 5. repeat multiple times, measuring and accumulating decoding time + // per clip. + /* + 1) 4 x 2 + 2) 8 x 8 + 3) 16 x 8 + 4) 32 x 4 + */ + const std::string kFolder = "pytorch/vision/test/assets/videos"; + std::vector stats; + gotAllTestFiles(kFolder, &stats); + gotFilesStats(stats); + + const size_t kRounds = 10; + + auto new4x2 = measurePerformanceUs(stats, kRounds, 4, 2); + auto new8x8 = measurePerformanceUs(stats, kRounds, 8, 8); + auto new16x8 = measurePerformanceUs(stats, kRounds, 16, 8); + auto new32x4 = measurePerformanceUs(stats, kRounds, 32, 4); + LOG(INFO) << "Clip decoding (us)" << ", new(4x2): " << new4x2 + << ", new(8x8): " << new8x8 << ", new(16x8): " << new16x8 + << ", new(32x4): " << new32x4; +} + +TEST(SyncDecoder, Test) { + SyncDecoder decoder; + DecoderParameters params; + params.timeoutMs = 10000; + params.startOffset = 1000000; + params.seekAccuracy = 100000; + params.formats = {MediaFormat(), MediaFormat(0), MediaFormat('0')}; + params.uri = "pytorch/vision/test/assets/videos/R6llTwEh07w.mp4"; + CHECK(decoder.init(params, nullptr, nullptr)); + runDecoder(decoder); + decoder.shutdown(); +} + +TEST(SyncDecoder, TestSubtitles) { + SyncDecoder decoder; + DecoderParameters params; + params.timeoutMs = 10000; + params.formats = {MediaFormat(), MediaFormat(0), MediaFormat('0')}; + params.uri = "vue/synergy/data/robotsub.mp4"; + CHECK(decoder.init(params, nullptr, nullptr)); + runDecoder(decoder); + decoder.shutdown(); +} + +TEST(SyncDecoder, TestHeadersOnly) { + SyncDecoder decoder; + DecoderParameters params; + params.timeoutMs = 10000; + params.startOffset = 1000000; + params.seekAccuracy = 100000; + params.headerOnly = true; + params.formats = {MediaFormat(), MediaFormat(0), MediaFormat('0')}; + + params.uri = "pytorch/vision/test/assets/videos/R6llTwEh07w.mp4"; + CHECK(decoder.init(params, nullptr, nullptr)); + runDecoder(decoder); + decoder.shutdown(); + + params.uri = "pytorch/vision/test/assets/videos/SOX5yA1l24A.mp4"; + CHECK(decoder.init(params, nullptr, nullptr)); + runDecoder(decoder); + decoder.shutdown(); + + params.uri = "pytorch/vision/test/assets/videos/WUzgd7C1pWA.mp4"; + CHECK(decoder.init(params, nullptr, nullptr)); + runDecoder(decoder); + decoder.shutdown(); +} + +TEST(SyncDecoder, TestHeadersOnlyDownSampling) { + SyncDecoder decoder; + DecoderParameters params; + params.timeoutMs = 10000; + params.startOffset = 1000000; + params.seekAccuracy = 100000; + params.headerOnly = true; + MediaFormat format; + format.type = TYPE_AUDIO; + format.format.audio.samples = 8000; + params.formats.insert(format); + + format.type = TYPE_VIDEO; + format.format.video.width = 224; + format.format.video.height = 224; + params.formats.insert(format); + + params.uri = "pytorch/vision/test/assets/videos/R6llTwEh07w.mp4"; + CHECK(decoder.init(params, nullptr, nullptr)); + runDecoder(decoder); + decoder.shutdown(); + + params.uri = "pytorch/vision/test/assets/videos/SOX5yA1l24A.mp4"; + CHECK(decoder.init(params, nullptr, nullptr)); + runDecoder(decoder); + decoder.shutdown(); + + params.uri = "pytorch/vision/test/assets/videos/WUzgd7C1pWA.mp4"; + CHECK(decoder.init(params, nullptr, nullptr)); + runDecoder(decoder); + decoder.shutdown(); +} + +TEST(SyncDecoder, TestInitOnlyNoShutdown) { + SyncDecoder decoder; + DecoderParameters params; + params.timeoutMs = 10000; + params.startOffset = 1000000; + params.seekAccuracy = 100000; + params.headerOnly = false; + params.formats = {MediaFormat(), MediaFormat(0), MediaFormat('0')}; + params.uri = "pytorch/vision/test/assets/videos/R6llTwEh07w.mp4"; + std::vector metadata; + CHECK(decoder.init(params, nullptr, &metadata)); +} + +TEST(SyncDecoder, TestMemoryBuffer) { + SyncDecoder decoder; + DecoderParameters params; + params.timeoutMs = 10000; + params.startOffset = 1000000; + params.endOffset = 9000000; + params.seekAccuracy = 10000; + params.formats = {MediaFormat(), MediaFormat(0), MediaFormat('0')}; + + FILE* f = fopen( + "pytorch/vision/test/assets/videos/RATRACE_wave_f_nm_np1_fr_goo_37.avi", + "rb"); + CHECK(f != nullptr); + fseek(f, 0, SEEK_END); + std::vector buffer(ftell(f)); + rewind(f); + size_t s = fread(buffer.data(), 1, buffer.size(), f); + TORCH_CHECK_EQ(buffer.size(), s); + fclose(f); + CHECK(decoder.init( + params, + MemoryBuffer::getCallback(buffer.data(), buffer.size()), + nullptr)); + LOG(INFO) << "Decoding from memory bytes: " << buffer.size(); + runDecoder(decoder); + decoder.shutdown(); +} + +TEST(SyncDecoder, TestMemoryBufferNoSeekableWithFullRead) { + SyncDecoder decoder; + DecoderParameters params; + params.timeoutMs = 10000; + params.startOffset = 1000000; + params.endOffset = 9000000; + params.seekAccuracy = 10000; + params.formats = {MediaFormat(), MediaFormat(0), MediaFormat('0')}; + + FILE* f = fopen("pytorch/vision/test/assets/videos/R6llTwEh07w.mp4", "rb"); + CHECK(f != nullptr); + fseek(f, 0, SEEK_END); + std::vector buffer(ftell(f)); + rewind(f); + size_t s = fread(buffer.data(), 1, buffer.size(), f); + TORCH_CHECK_EQ(buffer.size(), s); + fclose(f); + + params.maxSeekableBytes = buffer.size() + 1; + MemoryBuffer object(buffer.data(), buffer.size()); + CHECK(decoder.init( + params, + [object](uint8_t* out, int size, int whence, uint64_t timeoutMs) mutable + -> int { + if (out) { // see defs.h file + // read mode + return object.read(out, size); + } + // seek mode + if (!timeoutMs) { + // seek capability, yes - no + return -1; + } + return object.seek(size, whence); + }, + nullptr)); + runDecoder(decoder); + decoder.shutdown(); +} + +TEST(SyncDecoder, TestMemoryBufferNoSeekableWithPartialRead) { + SyncDecoder decoder; + DecoderParameters params; + params.timeoutMs = 10000; + params.startOffset = 1000000; + params.endOffset = 9000000; + params.seekAccuracy = 10000; + params.formats = {MediaFormat(), MediaFormat(0), MediaFormat('0')}; + + FILE* f = fopen("pytorch/vision/test/assets/videos/R6llTwEh07w.mp4", "rb"); + CHECK(f != nullptr); + fseek(f, 0, SEEK_END); + std::vector buffer(ftell(f)); + rewind(f); + size_t s = fread(buffer.data(), 1, buffer.size(), f); + TORCH_CHECK_EQ(buffer.size(), s); + fclose(f); + + params.maxSeekableBytes = buffer.size() / 2; + MemoryBuffer object(buffer.data(), buffer.size()); + CHECK(!decoder.init( + params, + [object](uint8_t* out, int size, int whence, uint64_t timeoutMs) mutable + -> int { + if (out) { // see defs.h file + // read mode + return object.read(out, size); + } + // seek mode + if (!timeoutMs) { + // seek capability, yes - no + return -1; + } + return object.seek(size, whence); + }, + nullptr)); +} diff --git a/torchvision/csrc/io/decoder/time_keeper.cpp b/torchvision/csrc/io/decoder/time_keeper.cpp new file mode 100644 index 00000000000..845c76cddc8 --- /dev/null +++ b/torchvision/csrc/io/decoder/time_keeper.cpp @@ -0,0 +1,35 @@ +#include "time_keeper.h" +#include "defs.h" + +namespace ffmpeg { + +namespace { +const long kMaxTimeBaseDiference = 10; +} + +long TimeKeeper::adjust(long& decoderTimestamp) { + const long now = std::chrono::duration_cast( + std::chrono::system_clock::now().time_since_epoch()) + .count(); + + if (startTime_ == 0) { + startTime_ = now; + } + if (streamTimestamp_ == 0) { + streamTimestamp_ = decoderTimestamp; + } + + const auto runOut = startTime_ + decoderTimestamp - streamTimestamp_; + + if (std::labs((now - runOut) / AV_TIME_BASE) > kMaxTimeBaseDiference) { + streamTimestamp_ = startTime_ - now + decoderTimestamp; + } + + const auto sleepAdvised = runOut - now; + + decoderTimestamp += startTime_ - streamTimestamp_; + + return sleepAdvised > 0 ? sleepAdvised : 0; +} + +} // namespace ffmpeg diff --git a/torchvision/csrc/io/decoder/time_keeper.h b/torchvision/csrc/io/decoder/time_keeper.h new file mode 100644 index 00000000000..e4d4718c705 --- /dev/null +++ b/torchvision/csrc/io/decoder/time_keeper.h @@ -0,0 +1,25 @@ +#pragma once + +#include +#include + +namespace ffmpeg { + +/** + * Class keeps the track of the decoded timestamps (us) for media streams. + */ + +class TimeKeeper { + public: + TimeKeeper() = default; + + // adjust provided @timestamp to the corrected value + // return advised sleep time before next frame processing in (us) + long adjust(long& decoderTimestamp); + + private: + long startTime_{0}; + long streamTimestamp_{0}; +}; + +} // namespace ffmpeg diff --git a/torchvision/csrc/io/decoder/util.cpp b/torchvision/csrc/io/decoder/util.cpp new file mode 100644 index 00000000000..149f402c5dc --- /dev/null +++ b/torchvision/csrc/io/decoder/util.cpp @@ -0,0 +1,389 @@ +#include "util.h" +#include + +namespace ffmpeg { + +namespace Serializer { + +// fixed size types +template +inline size_t getSize(const T& x) { + return sizeof(x); +} + +template +inline bool +serializeItem(uint8_t* dest, size_t len, size_t& pos, const T& src) { + VLOG(6) << "Generic serializeItem"; + const auto required = sizeof(src); + if (len < pos + required) { + return false; + } + memcpy(dest + pos, &src, required); + pos += required; + return true; +} + +template +inline bool +deserializeItem(const uint8_t* src, size_t len, size_t& pos, T& dest) { + const auto required = sizeof(dest); + if (len < pos + required) { + return false; + } + memcpy(&dest, src + pos, required); + pos += required; + return true; +} + +// AVSubtitleRect specialization +inline size_t getSize(const AVSubtitleRect& x) { + auto rectBytes = [](const AVSubtitleRect& y) -> size_t { + size_t s = 0; + switch (y.type) { + case SUBTITLE_BITMAP: + for (int i = 0; i < y.nb_colors; ++i) { + s += sizeof(y.linesize[i]); + s += y.linesize[i]; + } + break; + case SUBTITLE_TEXT: + s += sizeof(size_t); + s += strlen(y.text); + break; + case SUBTITLE_ASS: + s += sizeof(size_t); + s += strlen(y.ass); + break; + default: + break; + } + return s; + }; + return getSize(x.x) + getSize(x.y) + getSize(x.w) + getSize(x.h) + + getSize(x.nb_colors) + getSize(x.type) + getSize(x.flags) + rectBytes(x); +} + +// AVSubtitle specialization +inline size_t getSize(const AVSubtitle& x) { + auto rectBytes = [](const AVSubtitle& y) -> size_t { + size_t s = getSize(y.num_rects); + for (unsigned i = 0; i < y.num_rects; ++i) { + s += getSize(*y.rects[i]); + } + return s; + }; + return getSize(x.format) + getSize(x.start_display_time) + + getSize(x.end_display_time) + getSize(x.pts) + rectBytes(x); +} + +inline bool serializeItem( + uint8_t* dest, + size_t len, + size_t& pos, + const AVSubtitleRect& src) { + auto rectSerialize = + [](uint8_t* d, size_t l, size_t& p, const AVSubtitleRect& x) -> size_t { + switch (x.type) { + case SUBTITLE_BITMAP: + for (int i = 0; i < x.nb_colors; ++i) { + if (!serializeItem(d, l, p, x.linesize[i])) { + return false; + } + if (p + x.linesize[i] > l) { + return false; + } + memcpy(d + p, x.data[i], x.linesize[i]); + p += x.linesize[i]; + } + return true; + case SUBTITLE_TEXT: { + const size_t s = strlen(x.text); + if (!serializeItem(d, l, p, s)) { + return false; + } + if (p + s > l) { + return false; + } + memcpy(d + p, x.text, s); + p += s; + return true; + } + case SUBTITLE_ASS: { + const size_t s = strlen(x.ass); + if (!serializeItem(d, l, p, s)) { + return false; + } + if (p + s > l) { + return false; + } + memcpy(d + p, x.ass, s); + p += s; + return true; + } + default: + return true; + } + }; + return serializeItem(dest, len, pos, src.x) && + serializeItem(dest, len, pos, src.y) && + serializeItem(dest, len, pos, src.w) && + serializeItem(dest, len, pos, src.h) && + serializeItem(dest, len, pos, src.nb_colors) && + serializeItem(dest, len, pos, src.type) && + serializeItem(dest, len, pos, src.flags) && + rectSerialize(dest, len, pos, src); +} + +inline bool +serializeItem(uint8_t* dest, size_t len, size_t& pos, const AVSubtitle& src) { + auto rectSerialize = + [](uint8_t* d, size_t l, size_t& p, const AVSubtitle& x) -> bool { + bool res = serializeItem(d, l, p, x.num_rects); + for (unsigned i = 0; res && i < x.num_rects; ++i) { + res = serializeItem(d, l, p, *(x.rects[i])); + } + return res; + }; + VLOG(6) << "AVSubtitle serializeItem"; + return serializeItem(dest, len, pos, src.format) && + serializeItem(dest, len, pos, src.start_display_time) && + serializeItem(dest, len, pos, src.end_display_time) && + serializeItem(dest, len, pos, src.pts) && + rectSerialize(dest, len, pos, src); +} + +inline bool deserializeItem( + const uint8_t* src, + size_t len, + size_t& pos, + AVSubtitleRect& dest) { + auto rectDeserialize = + [](const uint8_t* y, size_t l, size_t& p, AVSubtitleRect& x) -> bool { + switch (x.type) { + case SUBTITLE_BITMAP: + for (int i = 0; i < x.nb_colors; ++i) { + if (!deserializeItem(y, l, p, x.linesize[i])) { + return false; + } + if (p + x.linesize[i] > l) { + return false; + } + x.data[i] = (uint8_t*)av_malloc(x.linesize[i]); + memcpy(x.data[i], y + p, x.linesize[i]); + p += x.linesize[i]; + } + return true; + case SUBTITLE_TEXT: { + size_t s = 0; + if (!deserializeItem(y, l, p, s)) { + return false; + } + if (p + s > l) { + return false; + } + x.text = (char*)av_malloc(s + 1); + memcpy(x.text, y + p, s); + x.text[s] = 0; + p += s; + return true; + } + case SUBTITLE_ASS: { + size_t s = 0; + if (!deserializeItem(y, l, p, s)) { + return false; + } + if (p + s > l) { + return false; + } + x.ass = (char*)av_malloc(s + 1); + memcpy(x.ass, y + p, s); + x.ass[s] = 0; + p += s; + return true; + } + default: + return true; + } + }; + + return deserializeItem(src, len, pos, dest.x) && + deserializeItem(src, len, pos, dest.y) && + deserializeItem(src, len, pos, dest.w) && + deserializeItem(src, len, pos, dest.h) && + deserializeItem(src, len, pos, dest.nb_colors) && + deserializeItem(src, len, pos, dest.type) && + deserializeItem(src, len, pos, dest.flags) && + rectDeserialize(src, len, pos, dest); +} + +inline bool +deserializeItem(const uint8_t* src, size_t len, size_t& pos, AVSubtitle& dest) { + auto rectDeserialize = + [](const uint8_t* y, size_t l, size_t& p, AVSubtitle& x) -> bool { + bool res = deserializeItem(y, l, p, x.num_rects); + if (res && x.num_rects) { + x.rects = + (AVSubtitleRect**)av_malloc(x.num_rects * sizeof(AVSubtitleRect*)); + } + for (unsigned i = 0; res && i < x.num_rects; ++i) { + x.rects[i] = (AVSubtitleRect*)av_malloc(sizeof(AVSubtitleRect)); + memset(x.rects[i], 0, sizeof(AVSubtitleRect)); + res = deserializeItem(y, l, p, *x.rects[i]); + } + return res; + }; + return deserializeItem(src, len, pos, dest.format) && + deserializeItem(src, len, pos, dest.start_display_time) && + deserializeItem(src, len, pos, dest.end_display_time) && + deserializeItem(src, len, pos, dest.pts) && + rectDeserialize(src, len, pos, dest); +} +} // namespace Serializer + +namespace Util { +std::string generateErrorDesc(int errorCode) { + std::array buffer; + if (av_strerror(errorCode, buffer.data(), buffer.size()) < 0) { + return std::string("Unknown error code: ") + std::to_string(errorCode); + } + buffer.back() = 0; + return std::string(buffer.data()); +} + +size_t serialize(const AVSubtitle& sub, ByteStorage* out) { + const auto len = size(sub); + size_t pos = 0; + if (!Serializer::serializeItem(out->writableTail(), len, pos, sub)) { + return 0; + } + out->append(len); + return len; +} + +bool deserialize(const ByteStorage& buf, AVSubtitle* sub) { + size_t pos = 0; + return Serializer::deserializeItem(buf.data(), buf.length(), pos, *sub); +} + +size_t size(const AVSubtitle& sub) { + return Serializer::getSize(sub); +} + +bool validateVideoFormat(const VideoFormat& f) { + // clang-format off + /* + Valid parameters values for decoder + ____________________________________________________________________________________ + | W | H | minDimension | maxDimension | cropImage | algorithm | + |__________________________________________________________________________________| + | 0 | 0 | 0 | 0 | N/A | original | + |__________________________________________________________________________________| + | >0 | 0 | N/A | N/A | N/A | scale keeping W | + |__________________________________________________________________________________| + | 0 | >0 | N/A | N/A | N/A | scale keeping H | + |__________________________________________________________________________________| + | >0 | >0 | N/A | N/A | 0 | stretch/scale | + |__________________________________________________________________________________| + | >0 | >0 | N/A | N/A | >0 | scale/crop | + |__________________________________________________________________________________| + | 0 | 0 | >0 | 0 | N/A |scale to min dimension | + |__________________________________________________________________________________| + | 0 | 0 | 0 | >0 | N/A |scale to max dimension | + |__________________________________________________________________________________| + | 0 | 0 | >0 | >0 | N/A |stretch to min/max dimension| + |_____|_____|______________|______________|___________|____________________________| + + */ + // clang-format on + return (f.width == 0 && // #1, #6, #7 and #8 + f.height == 0 && f.cropImage == 0) || + (f.width != 0 && // #4 and #5 + f.height != 0 && f.minDimension == 0 && f.maxDimension == 0) || + (((f.width != 0 && // #2 + f.height == 0) || + (f.width == 0 && // #3 + f.height != 0)) && + f.minDimension == 0 && f.maxDimension == 0 && f.cropImage == 0); +} + +void setFormatDimensions( + size_t& destW, + size_t& destH, + size_t userW, + size_t userH, + size_t srcW, + size_t srcH, + size_t minDimension, + size_t maxDimension, + size_t cropImage) { + // rounding rules + // int -> double -> round up + // if fraction is >= 0.5 or round down if fraction is < 0.5 + // int result = double(value) + 0.5 + // here we rounding double to int according to the above rule + + // #1, #6, #7 and #8 + if (userW == 0 && userH == 0) { + if (minDimension > 0 && maxDimension == 0) { // #6 + if (srcW > srcH) { + // landscape + destH = minDimension; + destW = round(double(srcW * minDimension) / srcH); + } else { + // portrait + destW = minDimension; + destH = round(double(srcH * minDimension) / srcW); + } + } else if (minDimension == 0 && maxDimension > 0) { // #7 + if (srcW > srcH) { + // landscape + destW = maxDimension; + destH = round(double(srcH * maxDimension) / srcW); + } else { + // portrait + destH = maxDimension; + destW = round(double(srcW * maxDimension) / srcH); + } + } else if (minDimension > 0 && maxDimension > 0) { // #8 + if (srcW > srcH) { + // landscape + destW = maxDimension; + destH = minDimension; + } else { + // portrait + destW = minDimension; + destH = maxDimension; + } + } else { // #1 + destW = srcW; + destH = srcH; + } + } else if (userW != 0 && userH == 0) { // #2 + destW = userW; + destH = round(double(srcH * userW) / srcW); + } else if (userW == 0 && userH != 0) { // #3 + destW = round(double(srcW * userH) / srcH); + destH = userH; + } else { // userW != 0 && userH != 0 + if (cropImage == 0) { // #4 + destW = userW; + destH = userH; + } else { // #5 + double userSlope = double(userH) / userW; + double srcSlope = double(srcH) / srcW; + if (srcSlope < userSlope) { + destW = round(double(srcW * userH) / srcH); + destH = userH; + } else { + destW = userW; + destH = round(double(srcH * userW) / srcW); + } + } + } + // prevent zeros + destW = std::max(destW, size_t(1UL)); + destH = std::max(destH, size_t(1UL)); +} +} // namespace Util +} // namespace ffmpeg diff --git a/torchvision/csrc/io/decoder/util.h b/torchvision/csrc/io/decoder/util.h new file mode 100644 index 00000000000..01b550e5bbc --- /dev/null +++ b/torchvision/csrc/io/decoder/util.h @@ -0,0 +1,28 @@ +#pragma once + +#include "defs.h" + +namespace ffmpeg { + +/** + * FFMPEG library utility functions. + */ + +namespace Util { +std::string generateErrorDesc(int errorCode); +size_t serialize(const AVSubtitle& sub, ByteStorage* out); +bool deserialize(const ByteStorage& buf, AVSubtitle* sub); +size_t size(const AVSubtitle& sub); +void setFormatDimensions( + size_t& destW, + size_t& destH, + size_t userW, + size_t userH, + size_t srcW, + size_t srcH, + size_t minDimension, + size_t maxDimension, + size_t cropImage); +bool validateVideoFormat(const VideoFormat& format); +} // namespace Util +} // namespace ffmpeg diff --git a/torchvision/csrc/io/decoder/util_test.cpp b/torchvision/csrc/io/decoder/util_test.cpp new file mode 100644 index 00000000000..78de08b7139 --- /dev/null +++ b/torchvision/csrc/io/decoder/util_test.cpp @@ -0,0 +1,35 @@ +#include +#include +#include +#include "util.h" + +TEST(Util, TestSetFormatDimensions) { + // clang-format off + const size_t test_cases[][9] = { + // (userW, userH, srcW, srcH, minDimension, maxDimension, cropImage, destW, destH) + {0, 0, 172, 128, 0, 0, 0, 172, 128}, // #1 + {86, 0, 172, 128, 0, 0, 0, 86, 64}, // #2 + {64, 0, 128, 172, 0, 0, 0, 64, 86}, // #2 + {0, 32, 172, 128, 0, 0, 0, 43, 32}, // #3 + {32, 0, 128, 172, 0, 0, 0, 32, 43}, // #3 + {60, 50, 172, 128, 0, 0, 0, 60, 50}, // #4 + {50, 60, 128, 172, 0, 0, 0, 50, 60}, // #4 + {86, 40, 172, 128, 0, 0, 1, 86, 64}, // #5 + {86, 92, 172, 128, 0, 0, 1, 124, 92}, // #5 + {0, 0, 172, 128, 256, 0, 0, 344, 256}, // #6 + {0, 0, 128, 172, 256, 0, 0, 256, 344}, // #6 + {0, 0, 128, 172, 0, 344, 0, 256, 344}, // #7 + {0, 0, 172, 128, 0, 344, 0, 344, 256}, // #7 + {0, 0, 172, 128, 100, 344, 0, 344, 100},// #8 + {0, 0, 128, 172, 100, 344, 0, 100, 344} // #8 + }; + // clang-format onn + + for (const auto& tc : test_cases) { + size_t destW = 0; + size_t destH = 0; + ffmpeg::Util::setFormatDimensions(destW, destH, tc[0], tc[1], tc[2], tc[3], tc[4], tc[5], tc[6]); + CHECK(destW == tc[7]); + CHECK(destH == tc[8]); + } +} diff --git a/torchvision/csrc/io/decoder/video_sampler.cpp b/torchvision/csrc/io/decoder/video_sampler.cpp new file mode 100644 index 00000000000..8b712609e34 --- /dev/null +++ b/torchvision/csrc/io/decoder/video_sampler.cpp @@ -0,0 +1,337 @@ +#include "video_sampler.h" +#include +#include "util.h" + +// www.ffmpeg.org/doxygen/0.5/swscale-example_8c-source.html + +namespace ffmpeg { + +namespace { + +// Setup the data pointers and linesizes based on the specified image +// parameters and the provided array. This sets up "planes" to point to a +// "buffer" +// NOTE: this is most likely culprit behind #3534 +// +// Args: +// fmt: desired output video format +// buffer: source constant image buffer (in different format) that will contain +// the final image after SWScale planes: destination data pointer to be filled +// lineSize: target destination linesize (always {0}) +int preparePlanes( + const VideoFormat& fmt, + const uint8_t* buffer, + uint8_t** planes, + int* lineSize) { + int result; + + // NOTE: 1 at the end of av_fill_arrays is the value used for alignment + if ((result = av_image_fill_arrays( + planes, + lineSize, + buffer, + (AVPixelFormat)fmt.format, + fmt.width, + fmt.height, + 1)) < 0) { + LOG(ERROR) << "av_image_fill_arrays failed, err: " + << Util::generateErrorDesc(result); + } + return result; +} + +// Scale (and crop) the image slice in srcSlice and put the resulting scaled +// slice to `planes` buffer, which is mapped to be `out` via preparePlanes as +// `sws_scale` cannot access buffers directly. +// +// Args: +// context: SWSContext allocated on line 119 (if crop, optional) or 163 (if +// scale) srcSlice: frame data in YUV420P srcStride: the array containing the +// strides for each plane of the source +// image (from AVFrame->linesize[0]) +// out: destination buffer +// planes: indirect destination buffer (mapped to "out" via preparePlanes) +// lines: destination linesize; constant {0} +int transformImage( + SwsContext* context, + const uint8_t* const srcSlice[], + int srcStride[], + VideoFormat inFormat, + VideoFormat outFormat, + uint8_t* out, + uint8_t* planes[], + int lines[]) { + int result; + if ((result = preparePlanes(outFormat, out, planes, lines)) < 0) { + return result; + } + if (context) { + // NOTE: srcY stride always 0: this is a parameter of YUV format + if ((result = sws_scale( + context, srcSlice, srcStride, 0, inFormat.height, planes, lines)) < + 0) { + LOG(ERROR) << "sws_scale failed, err: " + << Util::generateErrorDesc(result); + return result; + } + } else if ( + inFormat.width == outFormat.width && + inFormat.height == outFormat.height && + inFormat.format == outFormat.format) { + // Copy planes without using sws_scale if sws_getContext failed. + av_image_copy( + planes, + lines, + (const uint8_t**)srcSlice, + srcStride, + (AVPixelFormat)inFormat.format, + inFormat.width, + inFormat.height); + } else { + LOG(ERROR) << "Invalid scale context format " << inFormat.format; + return AVERROR(EINVAL); + } + return 0; +} +} // namespace + +VideoSampler::VideoSampler(int swsFlags, int64_t loggingUuid) + : swsFlags_(swsFlags), loggingUuid_(loggingUuid) {} + +VideoSampler::~VideoSampler() { + cleanUp(); +} + +void VideoSampler::shutdown() { + cleanUp(); +} + +bool VideoSampler::init(const SamplerParameters& params) { + cleanUp(); + + if (params.out.video.cropImage != 0) { + if (!Util::validateVideoFormat(params.out.video)) { + LOG(ERROR) << "Invalid video format" + << ", width: " << params.out.video.width + << ", height: " << params.out.video.height + << ", format: " << params.out.video.format + << ", minDimension: " << params.out.video.minDimension + << ", crop: " << params.out.video.cropImage; + + return false; + } + + scaleFormat_.format = params.out.video.format; + Util::setFormatDimensions( + scaleFormat_.width, + scaleFormat_.height, + params.out.video.width, + params.out.video.height, + params.in.video.width, + params.in.video.height, + 0, + 0, + 1); + + if (!(scaleFormat_ == params_.out.video)) { // crop required + cropContext_ = sws_getContext( + params.out.video.width, + params.out.video.height, + (AVPixelFormat)params.out.video.format, + params.out.video.width, + params.out.video.height, + (AVPixelFormat)params.out.video.format, + swsFlags_, + nullptr, + nullptr, + nullptr); + + if (!cropContext_) { + LOG(ERROR) << "sws_getContext failed for crop context"; + return false; + } + + const auto scaleImageSize = av_image_get_buffer_size( + (AVPixelFormat)scaleFormat_.format, + scaleFormat_.width, + scaleFormat_.height, + 1); + scaleBuffer_.resize(scaleImageSize); + } + } else { + scaleFormat_ = params.out.video; + } + + VLOG(1) << "Input format #" << loggingUuid_ << ", width " + << params.in.video.width << ", height " << params.in.video.height + << ", format " << params.in.video.format << ", minDimension " + << params.in.video.minDimension << ", cropImage " + << params.in.video.cropImage; + VLOG(1) << "Scale format #" << loggingUuid_ << ", width " + << scaleFormat_.width << ", height " << scaleFormat_.height + << ", format " << scaleFormat_.format << ", minDimension " + << scaleFormat_.minDimension << ", cropImage " + << scaleFormat_.cropImage; + VLOG(1) << "Crop format #" << loggingUuid_ << ", width " + << params.out.video.width << ", height " << params.out.video.height + << ", format " << params.out.video.format << ", minDimension " + << params.out.video.minDimension << ", cropImage " + << params.out.video.cropImage; + + // set output format + params_ = params; + + if (params.in.video.format == AV_PIX_FMT_YUV420P) { + /* When the video width and height are not multiples of 8, + * and there is no size change in the conversion, + * a blurry screen will appear on the right side + * This problem was discovered in 2012 and + * continues to exist in version 4.1.3 in 2019 + * This problem can be avoided by increasing SWS_ACCURATE_RND + * details https://trac.ffmpeg.org/ticket/1582 + */ + if ((params.in.video.width & 0x7) || (params.in.video.height & 0x7)) { + VLOG(1) << "The width " << params.in.video.width << " and height " + << params.in.video.height << " the image is not a multiple of 8, " + << "the decoding speed may be reduced"; + swsFlags_ |= SWS_ACCURATE_RND; + } + } + + scaleContext_ = sws_getContext( + params.in.video.width, + params.in.video.height, + (AVPixelFormat)params.in.video.format, + scaleFormat_.width, + scaleFormat_.height, + (AVPixelFormat)scaleFormat_.format, + swsFlags_, + nullptr, + nullptr, + nullptr); + // sws_getContext might fail if in/out format == AV_PIX_FMT_PAL8 (png format) + // Return true if input and output formats/width/height are identical + // Check scaleContext_ for nullptr in transformImage to copy planes directly + + if (params.in.video.width == scaleFormat_.width && + params.in.video.height == scaleFormat_.height && + params.in.video.format == scaleFormat_.format) { + return true; + } + return scaleContext_ != nullptr; +} + +// Main body of the sample function called from one of the overloads below +// +// Args: +// srcSlice: decoded AVFrame->data perpared buffer +// srcStride: linesize (usually obtained from AVFrame->linesize) +// out: return buffer (ByteStorage*) +int VideoSampler::sample( + const uint8_t* const srcSlice[], + int srcStride[], + ByteStorage* out) { + int result; + // scaled and cropped image + int outImageSize = av_image_get_buffer_size( + (AVPixelFormat)params_.out.video.format, + params_.out.video.width, + params_.out.video.height, + 1); + + out->ensure(outImageSize); + + uint8_t* scalePlanes[4] = {nullptr}; + int scaleLines[4] = {0}; + // perform scale first + if ((result = transformImage( + scaleContext_, + srcSlice, + srcStride, + params_.in.video, + scaleFormat_, + // for crop use internal buffer + cropContext_ ? scaleBuffer_.data() : out->writableTail(), + scalePlanes, + scaleLines))) { + return result; + } + + // is crop required? + if (cropContext_) { + uint8_t* cropPlanes[4] = {nullptr}; + int cropLines[4] = {0}; + + if (params_.out.video.height < scaleFormat_.height) { + // Destination image is wider of source image: cut top and bottom + for (size_t i = 0; i < 4 && scalePlanes[i] != nullptr; ++i) { + scalePlanes[i] += scaleLines[i] * + (scaleFormat_.height - params_.out.video.height) / 2; + } + } else { + // Source image is wider of destination image: cut sides + for (size_t i = 0; i < 4 && scalePlanes[i] != nullptr; ++i) { + scalePlanes[i] += scaleLines[i] * + (scaleFormat_.width - params_.out.video.width) / 2 / + scaleFormat_.width; + } + } + + // crop image + if ((result = transformImage( + cropContext_, + scalePlanes, + scaleLines, + params_.out.video, + params_.out.video, + out->writableTail(), + cropPlanes, + cropLines))) { + return result; + } + } + + out->append(outImageSize); + return outImageSize; +} + +// Call from `video_stream.cpp::114` - occurs during file reads +int VideoSampler::sample(AVFrame* frame, ByteStorage* out) { + if (!frame) { + return 0; // no flush for videos + } + + return sample(frame->data, frame->linesize, out); +} + +// Call from `video_stream.cpp::114` - not sure when this occurs +int VideoSampler::sample(const ByteStorage* in, ByteStorage* out) { + if (!in) { + return 0; // no flush for videos + } + + int result; + uint8_t* inPlanes[4] = {nullptr}; + int inLineSize[4] = {0}; + + if ((result = preparePlanes( + params_.in.video, in->data(), inPlanes, inLineSize)) < 0) { + return result; + } + + return sample(inPlanes, inLineSize, out); +} + +void VideoSampler::cleanUp() { + if (scaleContext_) { + sws_freeContext(scaleContext_); + scaleContext_ = nullptr; + } + if (cropContext_) { + sws_freeContext(cropContext_); + cropContext_ = nullptr; + scaleBuffer_.clear(); + } +} + +} // namespace ffmpeg diff --git a/torchvision/csrc/io/decoder/video_sampler.h b/torchvision/csrc/io/decoder/video_sampler.h new file mode 100644 index 00000000000..47247f2c0c5 --- /dev/null +++ b/torchvision/csrc/io/decoder/video_sampler.h @@ -0,0 +1,44 @@ +#pragma once + +#include "defs.h" + +namespace ffmpeg { + +/** + * Class transcode video frames from one format into another + */ + +class VideoSampler : public MediaSampler { + public: + VideoSampler(int swsFlags = SWS_AREA, int64_t loggingUuid = 0); + + ~VideoSampler() override; + + // MediaSampler overrides + bool init(const SamplerParameters& params) override; + int sample(const ByteStorage* in, ByteStorage* out) override; + void shutdown() override; + + // returns number processed/scaling bytes + int sample(AVFrame* frame, ByteStorage* out); + int getImageBytes() const; + + private: + // close resources + void cleanUp(); + // helper functions for rescaling, cropping, etc. + int sample( + const uint8_t* const srcSlice[], + int srcStride[], + ByteStorage* out); + + private: + VideoFormat scaleFormat_; + SwsContext* scaleContext_{nullptr}; + SwsContext* cropContext_{nullptr}; + int swsFlags_{SWS_AREA}; + std::vector scaleBuffer_; + int64_t loggingUuid_{0}; +}; + +} // namespace ffmpeg diff --git a/torchvision/csrc/io/decoder/video_stream.cpp b/torchvision/csrc/io/decoder/video_stream.cpp new file mode 100644 index 00000000000..fa08c65cac1 --- /dev/null +++ b/torchvision/csrc/io/decoder/video_stream.cpp @@ -0,0 +1,131 @@ +#include "video_stream.h" +#include +#include "util.h" + +namespace ffmpeg { + +namespace { +bool operator==(const VideoFormat& x, const AVFrame& y) { + return x.width == static_cast(y.width) && + x.height == static_cast(y.height) && x.format == y.format; +} + +bool operator==(const VideoFormat& x, const AVCodecContext& y) { + return x.width == static_cast(y.width) && + x.height == static_cast(y.height) && x.format == y.pix_fmt; +} + +VideoFormat& toVideoFormat(VideoFormat& x, const AVFrame& y) { + x.width = y.width; + x.height = y.height; + x.format = y.format; + return x; +} + +VideoFormat& toVideoFormat(VideoFormat& x, const AVCodecContext& y) { + x.width = y.width; + x.height = y.height; + x.format = y.pix_fmt; + return x; +} +} // namespace + +VideoStream::VideoStream( + AVFormatContext* inputCtx, + int index, + bool convertPtsToWallTime, + const VideoFormat& format, + int64_t loggingUuid) + : Stream( + inputCtx, + MediaFormat::makeMediaFormat(format, index), + convertPtsToWallTime, + loggingUuid) {} + +VideoStream::~VideoStream() { + if (sampler_) { + sampler_->shutdown(); + sampler_.reset(); + } +} + +int VideoStream::initFormat() { + // set output format + if (!Util::validateVideoFormat(format_.format.video)) { + LOG(ERROR) << "Invalid video format" + << ", width: " << format_.format.video.width + << ", height: " << format_.format.video.height + << ", format: " << format_.format.video.format + << ", minDimension: " << format_.format.video.minDimension + << ", crop: " << format_.format.video.cropImage; + return -1; + } + + // keep aspect ratio + Util::setFormatDimensions( + format_.format.video.width, + format_.format.video.height, + format_.format.video.width, + format_.format.video.height, + codecCtx_->width, + codecCtx_->height, + format_.format.video.minDimension, + format_.format.video.maxDimension, + 0); + + if (format_.format.video.format == AV_PIX_FMT_NONE) { + format_.format.video.format = codecCtx_->pix_fmt; + } + return format_.format.video.width != 0 && format_.format.video.height != 0 && + format_.format.video.format != AV_PIX_FMT_NONE + ? 0 + : -1; +} + +// copies frame bytes via sws_scale call in video_sampler.cpp +int VideoStream::copyFrameBytes(ByteStorage* out, bool flush) { + if (!sampler_) { + sampler_ = std::make_unique(SWS_AREA, loggingUuid_); + } + + // check if input format gets changed + if (flush ? !(sampler_->getInputFormat().video == *codecCtx_) + : !(sampler_->getInputFormat().video == *frame_)) { + // - reinit sampler + SamplerParameters params; + params.type = format_.type; + params.out = format_.format; + params.in = FormatUnion(0); + flush ? toVideoFormat(params.in.video, *codecCtx_) + : toVideoFormat(params.in.video, *frame_); + if (!sampler_->init(params)) { + return -1; + } + + VLOG(1) << "Set input video sampler format" + << ", width: " << params.in.video.width + << ", height: " << params.in.video.height + << ", format: " << params.in.video.format + << " : output video sampler format" + << ", width: " << format_.format.video.width + << ", height: " << format_.format.video.height + << ", format: " << format_.format.video.format + << ", minDimension: " << format_.format.video.minDimension + << ", crop: " << format_.format.video.cropImage; + } + // calls to a sampler that converts the frame from YUV422 to RGB24, and + // optionally crops and resizes the frame. Frame bytes are copied from + // frame_->data to out buffer + return sampler_->sample(flush ? nullptr : frame_, out); +} + +void VideoStream::setHeader(DecoderHeader* header, bool flush) { + Stream::setHeader(header, flush); + if (!flush) { // no frames for video flush + header->keyFrame = frame_->key_frame; + header->fps = av_q2d(av_guess_frame_rate( + inputCtx_, inputCtx_->streams[format_.stream], nullptr)); + } +} + +} // namespace ffmpeg diff --git a/torchvision/csrc/io/decoder/video_stream.h b/torchvision/csrc/io/decoder/video_stream.h new file mode 100644 index 00000000000..e6a8bf02b65 --- /dev/null +++ b/torchvision/csrc/io/decoder/video_stream.h @@ -0,0 +1,31 @@ +#pragma once + +#include "stream.h" +#include "video_sampler.h" + +namespace ffmpeg { + +/** + * Class uses FFMPEG library to decode one video stream. + */ + +class VideoStream : public Stream { + public: + VideoStream( + AVFormatContext* inputCtx, + int index, + bool convertPtsToWallTime, + const VideoFormat& format, + int64_t loggingUuid); + ~VideoStream() override; + + private: + int initFormat() override; + int copyFrameBytes(ByteStorage* out, bool flush) override; + void setHeader(DecoderHeader* header, bool flush) override; + + private: + std::unique_ptr sampler_; +}; + +} // namespace ffmpeg diff --git a/torchvision/csrc/io/image/common.cpp b/torchvision/csrc/io/image/common.cpp new file mode 100644 index 00000000000..16b7ac2f91e --- /dev/null +++ b/torchvision/csrc/io/image/common.cpp @@ -0,0 +1,43 @@ + +#include "common.h" +#include + +namespace vision { +namespace image { + +void validate_encoded_data(const torch::Tensor& encoded_data) { + TORCH_CHECK(encoded_data.is_contiguous(), "Input tensor must be contiguous."); + TORCH_CHECK( + encoded_data.dtype() == torch::kU8, + "Input tensor must have uint8 data type, got ", + encoded_data.dtype()); + TORCH_CHECK( + encoded_data.dim() == 1 && encoded_data.numel() > 0, + "Input tensor must be 1-dimensional and non-empty, got ", + encoded_data.dim(), + " dims and ", + encoded_data.numel(), + " numels."); +} + +bool should_this_return_rgb_or_rgba_let_me_know_in_the_comments_down_below_guys_see_you_in_the_next_video( + ImageReadMode mode, + bool has_alpha) { + // Return true if the calling decoding function should return a 3D RGB tensor, + // and false if it should return a 4D RGBA tensor. + // This function ignores the requested "grayscale" modes and treats it as + // "unchanged", so it should only used on decoders who don't support grayscale + // outputs. + + if (mode == IMAGE_READ_MODE_RGB) { + return true; + } + if (mode == IMAGE_READ_MODE_RGB_ALPHA) { + return false; + } + // From here we assume mode is "unchanged", even for grayscale ones. + return !has_alpha; +} + +} // namespace image +} // namespace vision diff --git a/torchvision/csrc/io/image/common.h b/torchvision/csrc/io/image/common.h new file mode 100644 index 00000000000..d81acfda7d4 --- /dev/null +++ b/torchvision/csrc/io/image/common.h @@ -0,0 +1,24 @@ +#pragma once + +#include +#include + +namespace vision { +namespace image { + +/* Should be kept in-sync with Python ImageReadMode enum */ +using ImageReadMode = int64_t; +const ImageReadMode IMAGE_READ_MODE_UNCHANGED = 0; +const ImageReadMode IMAGE_READ_MODE_GRAY = 1; +const ImageReadMode IMAGE_READ_MODE_GRAY_ALPHA = 2; +const ImageReadMode IMAGE_READ_MODE_RGB = 3; +const ImageReadMode IMAGE_READ_MODE_RGB_ALPHA = 4; + +void validate_encoded_data(const torch::Tensor& encoded_data); + +bool should_this_return_rgb_or_rgba_let_me_know_in_the_comments_down_below_guys_see_you_in_the_next_video( + ImageReadMode mode, + bool has_alpha); + +} // namespace image +} // namespace vision diff --git a/torchvision/csrc/io/image/cpu/common_jpeg.cpp b/torchvision/csrc/io/image/cpu/common_jpeg.cpp new file mode 100644 index 00000000000..4c993106b45 --- /dev/null +++ b/torchvision/csrc/io/image/cpu/common_jpeg.cpp @@ -0,0 +1,26 @@ +#include "common_jpeg.h" + +namespace vision { +namespace image { +namespace detail { + +#if JPEG_FOUND +void torch_jpeg_error_exit(j_common_ptr cinfo) { + /* cinfo->err really points to a torch_jpeg_error_mgr struct, so coerce + * pointer */ + torch_jpeg_error_ptr myerr = (torch_jpeg_error_ptr)cinfo->err; + + /* Always display the message. */ + /* We could postpone this until after returning, if we chose. */ + // (*cinfo->err->output_message)(cinfo); + /* Create the message */ + (*(cinfo->err->format_message))(cinfo, myerr->jpegLastErrorMsg); + + /* Return control to the setjmp point */ + longjmp(myerr->setjmp_buffer, 1); +} +#endif + +} // namespace detail +} // namespace image +} // namespace vision diff --git a/torchvision/csrc/io/image/cpu/common_jpeg.h b/torchvision/csrc/io/image/cpu/common_jpeg.h new file mode 100644 index 00000000000..7f7f9f0ccf1 --- /dev/null +++ b/torchvision/csrc/io/image/cpu/common_jpeg.h @@ -0,0 +1,27 @@ +#pragma once + +#if JPEG_FOUND +#include + +#include +#include + +namespace vision { +namespace image { +namespace detail { + +static const JOCTET EOI_BUFFER[1] = {JPEG_EOI}; +struct torch_jpeg_error_mgr { + struct jpeg_error_mgr pub; /* "public" fields */ + char jpegLastErrorMsg[JMSG_LENGTH_MAX]; /* error messages */ + jmp_buf setjmp_buffer; /* for return to caller */ +}; + +using torch_jpeg_error_ptr = struct torch_jpeg_error_mgr*; +void torch_jpeg_error_exit(j_common_ptr cinfo); + +} // namespace detail +} // namespace image +} // namespace vision + +#endif diff --git a/torchvision/csrc/io/image/cpu/common_png.h b/torchvision/csrc/io/image/cpu/common_png.h new file mode 100644 index 00000000000..68400d48e05 --- /dev/null +++ b/torchvision/csrc/io/image/cpu/common_png.h @@ -0,0 +1,6 @@ +#pragma once + +#if PNG_FOUND +#include +#include +#endif diff --git a/torchvision/csrc/io/image/cpu/decode_gif.cpp b/torchvision/csrc/io/image/cpu/decode_gif.cpp new file mode 100644 index 00000000000..f26d37950e3 --- /dev/null +++ b/torchvision/csrc/io/image/cpu/decode_gif.cpp @@ -0,0 +1,165 @@ +#include "decode_gif.h" +#include +#include "../common.h" +#include "giflib/gif_lib.h" + +namespace vision { +namespace image { + +typedef struct reader_helper_t { + uint8_t const* encoded_data; // input tensor data pointer + size_t encoded_data_size; // size of input tensor in bytes + size_t num_bytes_read; // number of bytes read so far in the tensor +} reader_helper_t; + +// That function is used by GIFLIB routines to read the encoded bytes. +// This reads `len` bytes and writes them into `buf`. The data is read from the +// input tensor passed to decode_gif() starting at the `num_bytes_read` +// position. +int read_from_tensor(GifFileType* gifFile, GifByteType* buf, int len) { + // the UserData field was set in DGifOpen() + reader_helper_t* reader_helper = + static_cast(gifFile->UserData); + + size_t num_bytes_to_read = std::min( + (size_t)len, + reader_helper->encoded_data_size - reader_helper->num_bytes_read); + std::memcpy( + buf, reader_helper->encoded_data + reader_helper->num_bytes_read, len); + reader_helper->num_bytes_read += num_bytes_to_read; + return num_bytes_to_read; +} + +torch::Tensor decode_gif(const torch::Tensor& encoded_data) { + // LibGif docs: https://giflib.sourceforge.net/intro.html + // Refer over there for more details on the libgif API, API ref, and a + // detailed description of the GIF format. + + validate_encoded_data(encoded_data); + + int error = D_GIF_SUCCEEDED; + + // We're using DGidOpen. The other entrypoints of libgif are + // DGifOpenFileName and DGifOpenFileHandle but we don't want to use those, + // since we need to read the encoded bytes from a tensor of encoded bytes, not + // from a file (for consistency with existing jpeg and png decoders). Using + // DGifOpen is the only way to read from a custom source. + // For that we need to provide a reader function `read_from_tensor` that + // reads from the tensor, and we have to keep track of the number of bytes + // read so far: this is why we need the reader_helper struct. + + // TODO: We are potentially doing an unnecessary copy of the encoded bytes: + // - 1 copy in from file to tensor (in read_file()) + // - 1 copy from tensor to GIFLIB buffers (in read_from_tensor()) + // Since we're vendoring GIFLIB we can potentially modify the calls to + // InternalRead() and just set the `buf` pointer to the tensor data directly. + // That might even save allocation of those buffers. + // If we do that, we'd have to make sure the buffers are never written to by + // GIFLIB, otherwise we'd be overridding the tensor data. + reader_helper_t reader_helper; + reader_helper.encoded_data = encoded_data.data_ptr(); + reader_helper.encoded_data_size = encoded_data.numel(); + reader_helper.num_bytes_read = 0; + GifFileType* gifFile = + DGifOpen(static_cast(&reader_helper), read_from_tensor, &error); + + TORCH_CHECK( + (gifFile != nullptr) && (error == D_GIF_SUCCEEDED), + "DGifOpenFileName() failed - ", + error); + + if (DGifSlurp(gifFile) == GIF_ERROR) { + auto gifFileError = gifFile->Error; + DGifCloseFile(gifFile, &error); + TORCH_CHECK(false, "DGifSlurp() failed - ", gifFileError); + } + auto num_images = gifFile->ImageCount; + + // This check should already done within DGifSlurp(), just to be safe + TORCH_CHECK(num_images > 0, "GIF file should contain at least one image!"); + + GifColorType bg = {0, 0, 0}; + if (gifFile->SColorMap) { + bg = gifFile->SColorMap->Colors[gifFile->SBackGroundColor]; + } + + // The GIFLIB docs say that the canvas's height and width are potentially + // ignored by modern viewers, so to be on the safe side we set the output + // height to max(canvas_heigh, first_image_height). Same for width. + // https://giflib.sourceforge.net/whatsinagif/bits_and_bytes.html + auto out_h = + std::max(gifFile->SHeight, gifFile->SavedImages[0].ImageDesc.Height); + auto out_w = + std::max(gifFile->SWidth, gifFile->SavedImages[0].ImageDesc.Width); + + // We output a channels-last tensor for consistency with other image decoders. + // Torchvision's resize tends to be is faster on uint8 channels-last tensors. + auto options = torch::TensorOptions() + .dtype(torch::kU8) + .memory_format(torch::MemoryFormat::ChannelsLast); + auto out = torch::empty( + {int64_t(num_images), 3, int64_t(out_h), int64_t(out_w)}, options); + auto out_a = out.accessor(); + for (int i = 0; i < num_images; i++) { + const SavedImage& img = gifFile->SavedImages[i]; + + GraphicsControlBlock gcb; + DGifSavedExtensionToGCB(gifFile, i, &gcb); + + const GifImageDesc& desc = img.ImageDesc; + const ColorMapObject* cmap = + desc.ColorMap ? desc.ColorMap : gifFile->SColorMap; + TORCH_CHECK( + cmap != nullptr, + "Global and local color maps are missing. This should never happen!"); + + // When going from one image to another, there is a "disposal method" which + // specifies how to handle the transition. E.g. DISPOSE_DO_NOT means that + // the current image should essentially be drawn on top of the previous + // canvas. The pixels of that previous canvas will appear on the new one if + // either: + // - a pixel is transparent in the current image + // - the current image is smaller than the canvas, hence exposing its pixels + // The "background" disposal method means that the current canvas should be + // set to the background color. + // We only support these 2 modes and default to "background" when the + // disposal method is unspecified, or when it's set to "DISPOSE_PREVIOUS" + // which according to GIFLIB is not widely supported. + // (https://giflib.sourceforge.net/whatsinagif/animation_and_transparency.html). + if (i > 0 && gcb.DisposalMode == DISPOSE_DO_NOT) { + out[i] = out[i - 1]; + } else { + // Background. If bg wasn't defined, it will be (0, 0, 0) + for (int h = 0; h < gifFile->SHeight; h++) { + for (int w = 0; w < gifFile->SWidth; w++) { + out_a[i][0][h][w] = bg.Red; + out_a[i][1][h][w] = bg.Green; + out_a[i][2][h][w] = bg.Blue; + } + } + } + + for (int h = 0; h < desc.Height; h++) { + for (int w = 0; w < desc.Width; w++) { + auto c = img.RasterBits[h * desc.Width + w]; + if (c == gcb.TransparentColor) { + continue; + } + GifColorType rgb = cmap->Colors[c]; + out_a[i][0][h + desc.Top][w + desc.Left] = rgb.Red; + out_a[i][1][h + desc.Top][w + desc.Left] = rgb.Green; + out_a[i][2][h + desc.Top][w + desc.Left] = rgb.Blue; + } + } + } + + out = out.squeeze(0); // remove batch dim if there's only one image + + DGifCloseFile(gifFile, &error); + TORCH_CHECK(error == D_GIF_SUCCEEDED, "DGifCloseFile() failed - ", error); + + return out; +} + +} // namespace image +} // namespace vision diff --git a/torchvision/csrc/io/image/cpu/decode_gif.h b/torchvision/csrc/io/image/cpu/decode_gif.h new file mode 100644 index 00000000000..68d5073c91b --- /dev/null +++ b/torchvision/csrc/io/image/cpu/decode_gif.h @@ -0,0 +1,12 @@ +#pragma once + +#include + +namespace vision { +namespace image { + +// encoded_data tensor must be 1D uint8 and contiguous +C10_EXPORT torch::Tensor decode_gif(const torch::Tensor& encoded_data); + +} // namespace image +} // namespace vision diff --git a/torchvision/csrc/io/image/cpu/decode_image.cpp b/torchvision/csrc/io/image/cpu/decode_image.cpp new file mode 100644 index 00000000000..43a688604f6 --- /dev/null +++ b/torchvision/csrc/io/image/cpu/decode_image.cpp @@ -0,0 +1,64 @@ +#include "decode_image.h" + +#include "decode_gif.h" +#include "decode_jpeg.h" +#include "decode_png.h" +#include "decode_webp.h" + +namespace vision { +namespace image { + +torch::Tensor decode_image( + const torch::Tensor& data, + ImageReadMode mode, + bool apply_exif_orientation) { + // Check that tensor is a CPU tensor + TORCH_CHECK(data.device() == torch::kCPU, "Expected a CPU tensor"); + // Check that the input tensor dtype is uint8 + TORCH_CHECK(data.dtype() == torch::kU8, "Expected a torch.uint8 tensor"); + // Check that the input tensor is 1-dimensional + TORCH_CHECK( + data.dim() == 1 && data.numel() > 0, + "Expected a non empty 1-dimensional tensor"); + + auto err_msg = + "Unsupported image file. Only jpeg, png, webp and gif are currently supported. For avif and heic format, please rely on `decode_avif` and `decode_heic` directly."; + + auto datap = data.data_ptr(); + + const uint8_t jpeg_signature[3] = {255, 216, 255}; // == "\xFF\xD8\xFF" + TORCH_CHECK(data.numel() >= 3, err_msg); + if (memcmp(jpeg_signature, datap, 3) == 0) { + return decode_jpeg(data, mode, apply_exif_orientation); + } + + const uint8_t png_signature[4] = {137, 80, 78, 71}; // == "\211PNG" + TORCH_CHECK(data.numel() >= 4, err_msg); + if (memcmp(png_signature, datap, 4) == 0) { + return decode_png(data, mode, apply_exif_orientation); + } + + const uint8_t gif_signature_1[6] = { + 0x47, 0x49, 0x46, 0x38, 0x39, 0x61}; // == "GIF89a" + const uint8_t gif_signature_2[6] = { + 0x47, 0x49, 0x46, 0x38, 0x37, 0x61}; // == "GIF87a" + TORCH_CHECK(data.numel() >= 6, err_msg); + if (memcmp(gif_signature_1, datap, 6) == 0 || + memcmp(gif_signature_2, datap, 6) == 0) { + return decode_gif(data); + } + + const uint8_t webp_signature_begin[4] = {0x52, 0x49, 0x46, 0x46}; // == "RIFF" + const uint8_t webp_signature_end[7] = { + 0x57, 0x45, 0x42, 0x50, 0x56, 0x50, 0x38}; // == "WEBPVP8" + TORCH_CHECK(data.numel() >= 15, err_msg); + if ((memcmp(webp_signature_begin, datap, 4) == 0) && + (memcmp(webp_signature_end, datap + 8, 7) == 0)) { + return decode_webp(data, mode); + } + + TORCH_CHECK(false, err_msg); +} + +} // namespace image +} // namespace vision diff --git a/torchvision/csrc/io/image/cpu/decode_image.h b/torchvision/csrc/io/image/cpu/decode_image.h new file mode 100644 index 00000000000..f66d47eccd4 --- /dev/null +++ b/torchvision/csrc/io/image/cpu/decode_image.h @@ -0,0 +1,15 @@ +#pragma once + +#include +#include "../common.h" + +namespace vision { +namespace image { + +C10_EXPORT torch::Tensor decode_image( + const torch::Tensor& data, + ImageReadMode mode = IMAGE_READ_MODE_UNCHANGED, + bool apply_exif_orientation = false); + +} // namespace image +} // namespace vision diff --git a/torchvision/csrc/io/image/cpu/decode_jpeg.cpp b/torchvision/csrc/io/image/cpu/decode_jpeg.cpp new file mode 100644 index 00000000000..052b98e1be9 --- /dev/null +++ b/torchvision/csrc/io/image/cpu/decode_jpeg.cpp @@ -0,0 +1,268 @@ +#include "decode_jpeg.h" +#include "../common.h" +#include "common_jpeg.h" +#include "exif.h" + +namespace vision { +namespace image { + +#if !JPEG_FOUND +torch::Tensor decode_jpeg( + const torch::Tensor& data, + ImageReadMode mode, + bool apply_exif_orientation) { + TORCH_CHECK( + false, "decode_jpeg: torchvision not compiled with libjpeg support"); +} +#else + +using namespace detail; +using namespace exif_private; + +namespace { + +struct torch_jpeg_mgr { + struct jpeg_source_mgr pub; + const JOCTET* data; + size_t len; +}; + +static void torch_jpeg_init_source(j_decompress_ptr cinfo) {} + +static boolean torch_jpeg_fill_input_buffer(j_decompress_ptr cinfo) { + // No more data. Probably an incomplete image; Raise exception. + torch_jpeg_error_ptr myerr = (torch_jpeg_error_ptr)cinfo->err; + strcpy(myerr->jpegLastErrorMsg, "Image is incomplete or truncated"); + longjmp(myerr->setjmp_buffer, 1); +} + +static void torch_jpeg_skip_input_data(j_decompress_ptr cinfo, long num_bytes) { + torch_jpeg_mgr* src = (torch_jpeg_mgr*)cinfo->src; + if (src->pub.bytes_in_buffer < (size_t)num_bytes) { + // Skipping over all of remaining data; output EOI. + src->pub.next_input_byte = EOI_BUFFER; + src->pub.bytes_in_buffer = 1; + } else { + // Skipping over only some of the remaining data. + src->pub.next_input_byte += num_bytes; + src->pub.bytes_in_buffer -= num_bytes; + } +} + +static void torch_jpeg_term_source(j_decompress_ptr cinfo) {} + +static void torch_jpeg_set_source_mgr( + j_decompress_ptr cinfo, + const unsigned char* data, + size_t len) { + torch_jpeg_mgr* src; + if (cinfo->src == 0) { // if this is first time; allocate memory + cinfo->src = (struct jpeg_source_mgr*)(*cinfo->mem->alloc_small)( + (j_common_ptr)cinfo, JPOOL_PERMANENT, sizeof(torch_jpeg_mgr)); + } + src = (torch_jpeg_mgr*)cinfo->src; + src->pub.init_source = torch_jpeg_init_source; + src->pub.fill_input_buffer = torch_jpeg_fill_input_buffer; + src->pub.skip_input_data = torch_jpeg_skip_input_data; + src->pub.resync_to_restart = jpeg_resync_to_restart; // default + src->pub.term_source = torch_jpeg_term_source; + // fill the buffers + src->data = (const JOCTET*)data; + src->len = len; + src->pub.bytes_in_buffer = len; + src->pub.next_input_byte = src->data; + + jpeg_save_markers(cinfo, APP1, 0xffff); +} + +inline unsigned char clamped_cmyk_rgb_convert( + unsigned char k, + unsigned char cmy) { + // Inspired from Pillow: + // https://github.com/python-pillow/Pillow/blob/07623d1a7cc65206a5355fba2ae256550bfcaba6/src/libImaging/Convert.c#L568-L569 + int v = k * cmy + 128; + v = ((v >> 8) + v) >> 8; + return std::clamp(k - v, 0, 255); +} + +void convert_line_cmyk_to_rgb( + j_decompress_ptr cinfo, + const unsigned char* cmyk_line, + unsigned char* rgb_line) { + int width = cinfo->output_width; + for (int i = 0; i < width; ++i) { + int c = cmyk_line[i * 4 + 0]; + int m = cmyk_line[i * 4 + 1]; + int y = cmyk_line[i * 4 + 2]; + int k = cmyk_line[i * 4 + 3]; + + rgb_line[i * 3 + 0] = clamped_cmyk_rgb_convert(k, 255 - c); + rgb_line[i * 3 + 1] = clamped_cmyk_rgb_convert(k, 255 - m); + rgb_line[i * 3 + 2] = clamped_cmyk_rgb_convert(k, 255 - y); + } +} + +inline unsigned char rgb_to_gray(int r, int g, int b) { + // Inspired from Pillow: + // https://github.com/python-pillow/Pillow/blob/07623d1a7cc65206a5355fba2ae256550bfcaba6/src/libImaging/Convert.c#L226 + return (r * 19595 + g * 38470 + b * 7471 + 0x8000) >> 16; +} + +void convert_line_cmyk_to_gray( + j_decompress_ptr cinfo, + const unsigned char* cmyk_line, + unsigned char* gray_line) { + int width = cinfo->output_width; + for (int i = 0; i < width; ++i) { + int c = cmyk_line[i * 4 + 0]; + int m = cmyk_line[i * 4 + 1]; + int y = cmyk_line[i * 4 + 2]; + int k = cmyk_line[i * 4 + 3]; + + int r = clamped_cmyk_rgb_convert(k, 255 - c); + int g = clamped_cmyk_rgb_convert(k, 255 - m); + int b = clamped_cmyk_rgb_convert(k, 255 - y); + + gray_line[i] = rgb_to_gray(r, g, b); + } +} + +} // namespace + +torch::Tensor decode_jpeg( + const torch::Tensor& data, + ImageReadMode mode, + bool apply_exif_orientation) { + C10_LOG_API_USAGE_ONCE( + "torchvision.csrc.io.image.cpu.decode_jpeg.decode_jpeg"); + + validate_encoded_data(data); + + struct jpeg_decompress_struct cinfo; + struct torch_jpeg_error_mgr jerr; + + auto datap = data.data_ptr(); + // Setup decompression structure + cinfo.err = jpeg_std_error(&jerr.pub); + jerr.pub.error_exit = torch_jpeg_error_exit; + /* Establish the setjmp return context for my_error_exit to use. */ + if (setjmp(jerr.setjmp_buffer)) { + /* If we get here, the JPEG code has signaled an error. + * We need to clean up the JPEG object. + */ + jpeg_destroy_decompress(&cinfo); + TORCH_CHECK(false, jerr.jpegLastErrorMsg); + } + + jpeg_create_decompress(&cinfo); + torch_jpeg_set_source_mgr(&cinfo, datap, data.numel()); + + // read info from header. + jpeg_read_header(&cinfo, TRUE); + + int channels = cinfo.num_components; + bool cmyk_to_rgb_or_gray = false; + + if (mode != IMAGE_READ_MODE_UNCHANGED) { + switch (mode) { + case IMAGE_READ_MODE_GRAY: + if (cinfo.jpeg_color_space == JCS_CMYK || + cinfo.jpeg_color_space == JCS_YCCK) { + cinfo.out_color_space = JCS_CMYK; + cmyk_to_rgb_or_gray = true; + } else { + cinfo.out_color_space = JCS_GRAYSCALE; + } + channels = 1; + break; + case IMAGE_READ_MODE_RGB: + if (cinfo.jpeg_color_space == JCS_CMYK || + cinfo.jpeg_color_space == JCS_YCCK) { + cinfo.out_color_space = JCS_CMYK; + cmyk_to_rgb_or_gray = true; + } else { + cinfo.out_color_space = JCS_RGB; + } + channels = 3; + break; + /* + * Libjpeg does not support converting from CMYK to grayscale etc. There + * is a way to do this but it involves converting it manually to RGB: + * https://github.com/tensorflow/tensorflow/blob/86871065265b04e0db8ca360c046421efb2bdeb4/tensorflow/core/lib/jpeg/jpeg_mem.cc#L284-L313 + */ + default: + jpeg_destroy_decompress(&cinfo); + TORCH_CHECK(false, "The provided mode is not supported for JPEG files"); + } + + jpeg_calc_output_dimensions(&cinfo); + } + + int exif_orientation = -1; + if (apply_exif_orientation) { + exif_orientation = fetch_jpeg_exif_orientation(&cinfo); + } + + jpeg_start_decompress(&cinfo); + + int height = cinfo.output_height; + int width = cinfo.output_width; + + int stride = width * channels; + auto tensor = + torch::empty({int64_t(height), int64_t(width), channels}, torch::kU8); + auto ptr = tensor.data_ptr(); + torch::Tensor cmyk_line_tensor; + if (cmyk_to_rgb_or_gray) { + cmyk_line_tensor = torch::empty({int64_t(width), 4}, torch::kU8); + } + + while (cinfo.output_scanline < cinfo.output_height) { + /* jpeg_read_scanlines expects an array of pointers to scanlines. + * Here the array is only one element long, but you could ask for + * more than one scanline at a time if that's more convenient. + */ + if (cmyk_to_rgb_or_gray) { + auto cmyk_line_ptr = cmyk_line_tensor.data_ptr(); + jpeg_read_scanlines(&cinfo, &cmyk_line_ptr, 1); + + if (channels == 3) { + convert_line_cmyk_to_rgb(&cinfo, cmyk_line_ptr, ptr); + } else if (channels == 1) { + convert_line_cmyk_to_gray(&cinfo, cmyk_line_ptr, ptr); + } + } else { + jpeg_read_scanlines(&cinfo, &ptr, 1); + } + ptr += stride; + } + + jpeg_finish_decompress(&cinfo); + jpeg_destroy_decompress(&cinfo); + auto output = tensor.permute({2, 0, 1}); + + if (apply_exif_orientation) { + return exif_orientation_transform(output, exif_orientation); + } + return output; +} +#endif // #if !JPEG_FOUND + +int64_t _jpeg_version() { +#if JPEG_FOUND + return JPEG_LIB_VERSION; +#else + return -1; +#endif +} + +bool _is_compiled_against_turbo() { +#ifdef LIBJPEG_TURBO_VERSION + return true; +#else + return false; +#endif +} + +} // namespace image +} // namespace vision diff --git a/torchvision/csrc/io/image/cpu/decode_jpeg.h b/torchvision/csrc/io/image/cpu/decode_jpeg.h new file mode 100644 index 00000000000..7412a46d2ea --- /dev/null +++ b/torchvision/csrc/io/image/cpu/decode_jpeg.h @@ -0,0 +1,18 @@ +#pragma once + +#include +#include "../common.h" + +namespace vision { +namespace image { + +C10_EXPORT torch::Tensor decode_jpeg( + const torch::Tensor& data, + ImageReadMode mode = IMAGE_READ_MODE_UNCHANGED, + bool apply_exif_orientation = false); + +C10_EXPORT int64_t _jpeg_version(); +C10_EXPORT bool _is_compiled_against_turbo(); + +} // namespace image +} // namespace vision diff --git a/torchvision/csrc/io/image/cpu/decode_png.cpp b/torchvision/csrc/io/image/cpu/decode_png.cpp new file mode 100644 index 00000000000..ede14c1e94a --- /dev/null +++ b/torchvision/csrc/io/image/cpu/decode_png.cpp @@ -0,0 +1,229 @@ +#include "decode_png.h" +#include "../common.h" +#include "common_png.h" +#include "exif.h" + +namespace vision { +namespace image { + +using namespace exif_private; + +#if !PNG_FOUND +torch::Tensor decode_png( + const torch::Tensor& data, + ImageReadMode mode, + bool apply_exif_orientation) { + TORCH_CHECK( + false, "decode_png: torchvision not compiled with libPNG support"); +} +#else + +bool is_little_endian() { + uint32_t x = 1; + return *(uint8_t*)&x; +} + +torch::Tensor decode_png( + const torch::Tensor& data, + ImageReadMode mode, + bool apply_exif_orientation) { + C10_LOG_API_USAGE_ONCE("torchvision.csrc.io.image.cpu.decode_png.decode_png"); + + validate_encoded_data(data); + + auto png_ptr = + png_create_read_struct(PNG_LIBPNG_VER_STRING, nullptr, nullptr, nullptr); + TORCH_CHECK(png_ptr, "libpng read structure allocation failed!") + auto info_ptr = png_create_info_struct(png_ptr); + if (!info_ptr) { + png_destroy_read_struct(&png_ptr, nullptr, nullptr); + // Seems redundant with the if statement. done here to avoid leaking memory. + TORCH_CHECK(info_ptr, "libpng info structure allocation failed!") + } + + auto accessor = data.accessor(); + auto datap = accessor.data(); + auto datap_len = accessor.size(0); + + if (setjmp(png_jmpbuf(png_ptr)) != 0) { + png_destroy_read_struct(&png_ptr, &info_ptr, nullptr); + TORCH_CHECK(false, "Internal error."); + } + TORCH_CHECK(datap_len >= 8, "Content is too small for png!") + auto is_png = !png_sig_cmp(datap, 0, 8); + TORCH_CHECK(is_png, "Content is not png!") + + struct Reader { + png_const_bytep ptr; + png_size_t count; + } reader; + reader.ptr = png_const_bytep(datap) + 8; + reader.count = datap_len - 8; + + auto read_callback = [](png_structp png_ptr, + png_bytep output, + png_size_t bytes) { + auto reader = static_cast(png_get_io_ptr(png_ptr)); + TORCH_CHECK( + reader->count >= bytes, + "Out of bound read in decode_png. Probably, the input image is corrupted"); + std::copy(reader->ptr, reader->ptr + bytes, output); + reader->ptr += bytes; + reader->count -= bytes; + }; + png_set_sig_bytes(png_ptr, 8); + png_set_read_fn(png_ptr, &reader, read_callback); + png_read_info(png_ptr, info_ptr); + + png_uint_32 width, height; + int bit_depth, color_type; + int interlace_type; + auto retval = png_get_IHDR( + png_ptr, + info_ptr, + &width, + &height, + &bit_depth, + &color_type, + &interlace_type, + nullptr, + nullptr); + + if (retval != 1) { + png_destroy_read_struct(&png_ptr, &info_ptr, nullptr); + TORCH_CHECK(retval == 1, "Could read image metadata from content.") + } + + if (bit_depth > 8 && bit_depth != 16) { + png_destroy_read_struct(&png_ptr, &info_ptr, nullptr); + TORCH_CHECK( + false, + "bit depth of png image is " + std::to_string(bit_depth) + + ". Only <=8 and 16 are supported.") + } + + int channels = png_get_channels(png_ptr, info_ptr); + + if (color_type == PNG_COLOR_TYPE_GRAY && bit_depth < 8) + png_set_expand_gray_1_2_4_to_8(png_ptr); + + int number_of_passes; + if (interlace_type == PNG_INTERLACE_ADAM7) { + number_of_passes = png_set_interlace_handling(png_ptr); + } else { + number_of_passes = 1; + } + + if (mode != IMAGE_READ_MODE_UNCHANGED) { + // TODO: consider supporting PNG_INFO_tRNS + bool is_palette = (color_type & PNG_COLOR_MASK_PALETTE) != 0; + bool has_color = (color_type & PNG_COLOR_MASK_COLOR) != 0; + bool has_alpha = (color_type & PNG_COLOR_MASK_ALPHA) != 0; + + switch (mode) { + case IMAGE_READ_MODE_GRAY: + if (color_type != PNG_COLOR_TYPE_GRAY) { + if (is_palette) { + png_set_palette_to_rgb(png_ptr); + has_alpha = true; + } + + if (has_alpha) { + png_set_strip_alpha(png_ptr); + } + + if (has_color) { + png_set_rgb_to_gray(png_ptr, 1, 0.2989, 0.587); + } + channels = 1; + } + break; + case IMAGE_READ_MODE_GRAY_ALPHA: + if (color_type != PNG_COLOR_TYPE_GRAY_ALPHA) { + if (is_palette) { + png_set_palette_to_rgb(png_ptr); + has_alpha = true; + } + + if (!has_alpha) { + png_set_add_alpha(png_ptr, (1 << bit_depth) - 1, PNG_FILLER_AFTER); + } + + if (has_color) { + png_set_rgb_to_gray(png_ptr, 1, 0.2989, 0.587); + } + channels = 2; + } + break; + case IMAGE_READ_MODE_RGB: + if (color_type != PNG_COLOR_TYPE_RGB) { + if (is_palette) { + png_set_palette_to_rgb(png_ptr); + has_alpha = true; + } else if (!has_color) { + png_set_gray_to_rgb(png_ptr); + } + + if (has_alpha) { + png_set_strip_alpha(png_ptr); + } + channels = 3; + } + break; + case IMAGE_READ_MODE_RGB_ALPHA: + if (color_type != PNG_COLOR_TYPE_RGB_ALPHA) { + if (is_palette) { + png_set_palette_to_rgb(png_ptr); + has_alpha = true; + } else if (!has_color) { + png_set_gray_to_rgb(png_ptr); + } + + if (!has_alpha) { + png_set_add_alpha(png_ptr, (1 << bit_depth) - 1, PNG_FILLER_AFTER); + } + channels = 4; + } + break; + default: + png_destroy_read_struct(&png_ptr, &info_ptr, nullptr); + TORCH_CHECK(false, "The provided mode is not supported for PNG files"); + } + + png_read_update_info(png_ptr, info_ptr); + } + + auto num_pixels_per_row = width * channels; + auto is_16_bits = bit_depth == 16; + auto tensor = torch::empty( + {int64_t(height), int64_t(width), channels}, + is_16_bits ? at::kUInt16 : torch::kU8); + if (is_little_endian()) { + png_set_swap(png_ptr); + } + auto t_ptr = (uint8_t*)tensor.data_ptr(); + for (int pass = 0; pass < number_of_passes; pass++) { + for (png_uint_32 i = 0; i < height; ++i) { + png_read_row(png_ptr, t_ptr, nullptr); + t_ptr += num_pixels_per_row * (is_16_bits ? 2 : 1); + } + t_ptr = (uint8_t*)tensor.data_ptr(); + } + + int exif_orientation = -1; + if (apply_exif_orientation) { + exif_orientation = fetch_png_exif_orientation(png_ptr, info_ptr); + } + + png_destroy_read_struct(&png_ptr, &info_ptr, nullptr); + + auto output = tensor.permute({2, 0, 1}); + if (apply_exif_orientation) { + return exif_orientation_transform(output, exif_orientation); + } + return output; +} +#endif + +} // namespace image +} // namespace vision diff --git a/torchvision/csrc/io/image/cpu/decode_png.h b/torchvision/csrc/io/image/cpu/decode_png.h new file mode 100644 index 00000000000..faaffa7ae49 --- /dev/null +++ b/torchvision/csrc/io/image/cpu/decode_png.h @@ -0,0 +1,15 @@ +#pragma once + +#include +#include "../common.h" + +namespace vision { +namespace image { + +C10_EXPORT torch::Tensor decode_png( + const torch::Tensor& data, + ImageReadMode mode = IMAGE_READ_MODE_UNCHANGED, + bool apply_exif_orientation = false); + +} // namespace image +} // namespace vision diff --git a/torchvision/csrc/io/image/cpu/decode_webp.cpp b/torchvision/csrc/io/image/cpu/decode_webp.cpp new file mode 100644 index 00000000000..4c13c5c2b1a --- /dev/null +++ b/torchvision/csrc/io/image/cpu/decode_webp.cpp @@ -0,0 +1,60 @@ +#include "decode_webp.h" +#include "../common.h" + +#if WEBP_FOUND +#include "webp/decode.h" +#include "webp/types.h" +#endif // WEBP_FOUND + +namespace vision { +namespace image { + +#if !WEBP_FOUND +torch::Tensor decode_webp( + const torch::Tensor& encoded_data, + ImageReadMode mode) { + TORCH_CHECK( + false, "decode_webp: torchvision not compiled with libwebp support"); +} +#else + +torch::Tensor decode_webp( + const torch::Tensor& encoded_data, + ImageReadMode mode) { + validate_encoded_data(encoded_data); + + auto encoded_data_p = encoded_data.data_ptr(); + auto encoded_data_size = encoded_data.numel(); + + WebPBitstreamFeatures features; + auto res = WebPGetFeatures(encoded_data_p, encoded_data_size, &features); + TORCH_CHECK( + res == VP8_STATUS_OK, "WebPGetFeatures failed with error code ", res); + TORCH_CHECK( + !features.has_animation, "Animated webp files are not supported."); + + auto return_rgb = + should_this_return_rgb_or_rgba_let_me_know_in_the_comments_down_below_guys_see_you_in_the_next_video( + mode, features.has_alpha); + + auto decoding_func = return_rgb ? WebPDecodeRGB : WebPDecodeRGBA; + auto num_channels = return_rgb ? 3 : 4; + + int width = 0; + int height = 0; + + auto decoded_data = + decoding_func(encoded_data_p, encoded_data_size, &width, &height); + + TORCH_CHECK(decoded_data != nullptr, "WebPDecodeRGB[A] failed."); + + auto deleter = [decoded_data](void*) { WebPFree(decoded_data); }; + auto out = torch::from_blob( + decoded_data, {height, width, num_channels}, deleter, torch::kUInt8); + + return out.permute({2, 0, 1}); +} +#endif // WEBP_FOUND + +} // namespace image +} // namespace vision diff --git a/torchvision/csrc/io/image/cpu/decode_webp.h b/torchvision/csrc/io/image/cpu/decode_webp.h new file mode 100644 index 00000000000..d5c81547c42 --- /dev/null +++ b/torchvision/csrc/io/image/cpu/decode_webp.h @@ -0,0 +1,14 @@ +#pragma once + +#include +#include "../common.h" + +namespace vision { +namespace image { + +C10_EXPORT torch::Tensor decode_webp( + const torch::Tensor& encoded_data, + ImageReadMode mode = IMAGE_READ_MODE_UNCHANGED); + +} // namespace image +} // namespace vision diff --git a/torchvision/csrc/io/image/cpu/encode_jpeg.cpp b/torchvision/csrc/io/image/cpu/encode_jpeg.cpp new file mode 100644 index 00000000000..d2ed73071a2 --- /dev/null +++ b/torchvision/csrc/io/image/cpu/encode_jpeg.cpp @@ -0,0 +1,113 @@ +#include "encode_jpeg.h" + +#include "common_jpeg.h" + +namespace vision { +namespace image { + +#if !JPEG_FOUND + +torch::Tensor encode_jpeg(const torch::Tensor& data, int64_t quality) { + TORCH_CHECK( + false, "encode_jpeg: torchvision not compiled with libjpeg support"); +} + +#else +// For libjpeg version <= 9b, the out_size parameter in jpeg_mem_dest() is +// defined as unsigned long, whereas in later version, it is defined as size_t. +#if !defined(JPEG_LIB_VERSION_MAJOR) || JPEG_LIB_VERSION_MAJOR < 9 || \ + (JPEG_LIB_VERSION_MAJOR == 9 && JPEG_LIB_VERSION_MINOR <= 2) +using JpegSizeType = unsigned long; +#else +using JpegSizeType = size_t; +#endif + +using namespace detail; + +torch::Tensor encode_jpeg(const torch::Tensor& data, int64_t quality) { + C10_LOG_API_USAGE_ONCE( + "torchvision.csrc.io.image.cpu.encode_jpeg.encode_jpeg"); + // Define compression structures and error handling + struct jpeg_compress_struct cinfo {}; + struct torch_jpeg_error_mgr jerr {}; + + // Define buffer to write JPEG information to and its size + JpegSizeType jpegSize = 0; + uint8_t* jpegBuf = nullptr; + + cinfo.err = jpeg_std_error(&jerr.pub); + jerr.pub.error_exit = torch_jpeg_error_exit; + + /* Establish the setjmp return context for my_error_exit to use. */ + if (setjmp(jerr.setjmp_buffer)) { + /* If we get here, the JPEG code has signaled an error. + * We need to clean up the JPEG object and the buffer. + */ + jpeg_destroy_compress(&cinfo); + if (jpegBuf != nullptr) { + free(jpegBuf); + } + + TORCH_CHECK(false, (const char*)jerr.jpegLastErrorMsg); + } + + // Check that the input tensor is on CPU + TORCH_CHECK(data.device() == torch::kCPU, "Input tensor should be on CPU"); + + // Check that the input tensor dtype is uint8 + TORCH_CHECK(data.dtype() == torch::kU8, "Input tensor dtype should be uint8"); + + // Check that the input tensor is 3-dimensional + TORCH_CHECK(data.dim() == 3, "Input data should be a 3-dimensional tensor"); + + // Get image info + int channels = data.size(0); + int height = data.size(1); + int width = data.size(2); + auto input = data.permute({1, 2, 0}).contiguous(); + + TORCH_CHECK( + channels == 1 || channels == 3, + "The number of channels should be 1 or 3, got: ", + channels); + + // Initialize JPEG structure + jpeg_create_compress(&cinfo); + + // Set output image information + cinfo.image_width = width; + cinfo.image_height = height; + cinfo.input_components = channels; + cinfo.in_color_space = channels == 1 ? JCS_GRAYSCALE : JCS_RGB; + + jpeg_set_defaults(&cinfo); + jpeg_set_quality(&cinfo, quality, TRUE); + + // Save JPEG output to a buffer + jpeg_mem_dest(&cinfo, &jpegBuf, &jpegSize); + + // Start JPEG compression + jpeg_start_compress(&cinfo, TRUE); + + auto stride = width * channels; + auto ptr = input.data_ptr(); + + // Encode JPEG file + while (cinfo.next_scanline < cinfo.image_height) { + jpeg_write_scanlines(&cinfo, &ptr, 1); + ptr += stride; + } + + jpeg_finish_compress(&cinfo); + jpeg_destroy_compress(&cinfo); + + torch::TensorOptions options = torch::TensorOptions{torch::kU8}; + auto out_tensor = + torch::from_blob(jpegBuf, {(long)jpegSize}, ::free, options); + jpegBuf = nullptr; + return out_tensor; +} +#endif + +} // namespace image +} // namespace vision diff --git a/torchvision/csrc/io/image/cpu/encode_jpeg.h b/torchvision/csrc/io/image/cpu/encode_jpeg.h new file mode 100644 index 00000000000..25084e154d6 --- /dev/null +++ b/torchvision/csrc/io/image/cpu/encode_jpeg.h @@ -0,0 +1,13 @@ +#pragma once + +#include + +namespace vision { +namespace image { + +C10_EXPORT torch::Tensor encode_jpeg( + const torch::Tensor& data, + int64_t quality); + +} // namespace image +} // namespace vision diff --git a/torchvision/csrc/io/image/cpu/encode_png.cpp b/torchvision/csrc/io/image/cpu/encode_png.cpp new file mode 100644 index 00000000000..5596d3a6789 --- /dev/null +++ b/torchvision/csrc/io/image/cpu/encode_png.cpp @@ -0,0 +1,180 @@ +#include "encode_jpeg.h" + +#include "common_png.h" + +namespace vision { +namespace image { + +#if !PNG_FOUND + +torch::Tensor encode_png(const torch::Tensor& data, int64_t compression_level) { + TORCH_CHECK( + false, "encode_png: torchvision not compiled with libpng support"); +} + +#else + +namespace { + +struct torch_mem_encode { + char* buffer; + size_t size; +}; + +struct torch_png_error_mgr { + const char* pngLastErrorMsg; /* error messages */ + jmp_buf setjmp_buffer; /* for return to caller */ +}; + +using torch_png_error_mgr_ptr = torch_png_error_mgr*; + +void torch_png_error(png_structp png_ptr, png_const_charp error_msg) { + /* png_ptr->err really points to a torch_png_error_mgr struct, so coerce + * pointer */ + auto error_ptr = (torch_png_error_mgr_ptr)png_get_error_ptr(png_ptr); + /* Replace the error message on the error structure */ + error_ptr->pngLastErrorMsg = error_msg; + /* Return control to the setjmp point */ + longjmp(error_ptr->setjmp_buffer, 1); +} + +void torch_png_write_data( + png_structp png_ptr, + png_bytep data, + png_size_t length) { + struct torch_mem_encode* p = + (struct torch_mem_encode*)png_get_io_ptr(png_ptr); + size_t nsize = p->size + length; + + /* allocate or grow buffer */ + if (p->buffer) + p->buffer = (char*)realloc(p->buffer, nsize); + else + p->buffer = (char*)malloc(nsize); + + if (!p->buffer) + png_error(png_ptr, "Write Error"); + + /* copy new bytes to end of buffer */ + memcpy(p->buffer + p->size, data, length); + p->size += length; +} + +} // namespace + +torch::Tensor encode_png(const torch::Tensor& data, int64_t compression_level) { + C10_LOG_API_USAGE_ONCE("torchvision.csrc.io.image.cpu.encode_png.encode_png"); + // Define compression structures and error handling + png_structp png_write; + png_infop info_ptr; + struct torch_png_error_mgr err_ptr; + + // Define output buffer + struct torch_mem_encode buf_info; + buf_info.buffer = nullptr; + buf_info.size = 0; + + /* Establish the setjmp return context for my_error_exit to use. */ + if (setjmp(err_ptr.setjmp_buffer)) { + /* If we get here, the PNG code has signaled an error. + * We need to clean up the PNG object and the buffer. + */ + if (info_ptr != nullptr) { + png_destroy_info_struct(png_write, &info_ptr); + } + + if (png_write != nullptr) { + png_destroy_write_struct(&png_write, nullptr); + } + + if (buf_info.buffer != nullptr) { + free(buf_info.buffer); + } + + TORCH_CHECK(false, err_ptr.pngLastErrorMsg); + } + + // Check that the compression level is between 0 and 9 + TORCH_CHECK( + compression_level >= 0 && compression_level <= 9, + "Compression level should be between 0 and 9"); + + // Check that the input tensor is on CPU + TORCH_CHECK(data.device() == torch::kCPU, "Input tensor should be on CPU"); + + // Check that the input tensor dtype is uint8 + TORCH_CHECK(data.dtype() == torch::kU8, "Input tensor dtype should be uint8"); + + // Check that the input tensor is 3-dimensional + TORCH_CHECK(data.dim() == 3, "Input data should be a 3-dimensional tensor"); + + // Get image info + int channels = data.size(0); + int height = data.size(1); + int width = data.size(2); + auto input = data.permute({1, 2, 0}).contiguous(); + + TORCH_CHECK( + channels == 1 || channels == 3, + "The number of channels should be 1 or 3, got: ", + channels); + + // Initialize PNG structures + png_write = png_create_write_struct( + PNG_LIBPNG_VER_STRING, &err_ptr, torch_png_error, nullptr); + + info_ptr = png_create_info_struct(png_write); + + // Define custom buffer output + png_set_write_fn(png_write, &buf_info, torch_png_write_data, nullptr); + + // Set output image information + auto color_type = channels == 1 ? PNG_COLOR_TYPE_GRAY : PNG_COLOR_TYPE_RGB; + png_set_IHDR( + png_write, + info_ptr, + width, + height, + 8, + color_type, + PNG_INTERLACE_NONE, + PNG_COMPRESSION_TYPE_DEFAULT, + PNG_FILTER_TYPE_DEFAULT); + + // Set image compression level + png_set_compression_level(png_write, compression_level); + + // Write file header + png_write_info(png_write, info_ptr); + + auto stride = width * channels; + auto ptr = input.data_ptr(); + + // Encode PNG file + for (int y = 0; y < height; ++y) { + png_write_row(png_write, ptr); + ptr += stride; + } + + // Write EOF + png_write_end(png_write, info_ptr); + + // Destroy structures + png_destroy_write_struct(&png_write, &info_ptr); + + torch::TensorOptions options = torch::TensorOptions{torch::kU8}; + auto outTensor = torch::empty({(long)buf_info.size}, options); + + // Copy memory from png buffer, since torch cannot get ownership of it via + // `from_blob` + auto outPtr = outTensor.data_ptr(); + std::memcpy(outPtr, buf_info.buffer, sizeof(uint8_t) * outTensor.numel()); + free(buf_info.buffer); + + return outTensor; +} + +#endif + +} // namespace image +} // namespace vision diff --git a/torchvision/csrc/io/image/cpu/encode_png.h b/torchvision/csrc/io/image/cpu/encode_png.h new file mode 100644 index 00000000000..86a67c8706e --- /dev/null +++ b/torchvision/csrc/io/image/cpu/encode_png.h @@ -0,0 +1,13 @@ +#pragma once + +#include + +namespace vision { +namespace image { + +C10_EXPORT torch::Tensor encode_png( + const torch::Tensor& data, + int64_t compression_level); + +} // namespace image +} // namespace vision diff --git a/torchvision/csrc/io/image/cpu/exif.h b/torchvision/csrc/io/image/cpu/exif.h new file mode 100644 index 00000000000..7680737f8c0 --- /dev/null +++ b/torchvision/csrc/io/image/cpu/exif.h @@ -0,0 +1,257 @@ +// @nolint (improperly imported third-party code) +/*M/////////////////////////////////////////////////////////////////////////////////////// +// +// IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING. +// +// By downloading, copying, installing or using the software you agree to this +license. +// If you do not agree to this license, do not download, install, +// copy or use the software. +// +// +// License Agreement +// For Open Source Computer Vision Library +// +// Copyright (C) 2000-2008, Intel Corporation, all rights reserved. +// Copyright (C) 2009, Willow Garage Inc., all rights reserved. +// Third party copyrights are property of their respective owners. +// +// Redistribution and use in source and binary forms, with or without +modification, +// are permitted provided that the following conditions are met: +// +// * Redistribution's of source code must retain the above copyright notice, +// this list of conditions and the following disclaimer. +// +// * Redistribution's in binary form must reproduce the above copyright +notice, +// this list of conditions and the following disclaimer in the documentation +// and/or other materials provided with the distribution. +// +// * The name of the copyright holders may not be used to endorse or promote +products +// derived from this software without specific prior written permission. +// +// This software is provided by the copyright holders and contributors "as is" +and +// any express or implied warranties, including, but not limited to, the implied +// warranties of merchantability and fitness for a particular purpose are +disclaimed. +// In no event shall the Intel Corporation or contributors be liable for any +direct, +// indirect, incidental, special, exemplary, or consequential damages +// (including, but not limited to, procurement of substitute goods or services; +// loss of use, data, or profits; or business interruption) however caused +// and on any theory of liability, whether in contract, strict liability, +// or tort (including negligence or otherwise) arising in any way out of +// the use of this software, even if advised of the possibility of such damage. +// +//M*/ +#pragma once +// Functions in this module are taken from OpenCV +// https://github.com/opencv/opencv/blob/097891e311fae1d8354eb092a0fd0171e630d78c/modules/imgcodecs/src/exif.cpp + +#if JPEG_FOUND +#include +#endif +#if PNG_FOUND +#include +#endif + +#include + +namespace vision { +namespace image { +namespace exif_private { + +constexpr uint16_t APP1 = 0xe1; +constexpr uint16_t ENDIANNESS_INTEL = 0x49; +constexpr uint16_t ENDIANNESS_MOTO = 0x4d; +constexpr uint16_t REQ_EXIF_TAG_MARK = 0x2a; +constexpr uint16_t ORIENTATION_EXIF_TAG = 0x0112; +constexpr uint16_t INCORRECT_TAG = -1; + +class ExifDataReader { + public: + ExifDataReader(unsigned char* p, size_t s) : _ptr(p), _size(s) {} + size_t size() const { + return _size; + } + const unsigned char& operator[](size_t index) const { + TORCH_CHECK(index >= 0 && index < _size); + return _ptr[index]; + } + + protected: + unsigned char* _ptr; + size_t _size; +}; + +inline uint16_t get_endianness(const ExifDataReader& exif_data) { + if ((exif_data.size() < 1) || + (exif_data.size() > 1 && exif_data[0] != exif_data[1])) { + return 0; + } + if (exif_data[0] == 'I') { + return ENDIANNESS_INTEL; + } + if (exif_data[0] == 'M') { + return ENDIANNESS_MOTO; + } + return 0; +} + +inline uint16_t get_uint16( + const ExifDataReader& exif_data, + uint16_t endianness, + const size_t offset) { + if (offset + 1 >= exif_data.size()) { + return INCORRECT_TAG; + } + + if (endianness == ENDIANNESS_INTEL) { + return exif_data[offset] + (exif_data[offset + 1] << 8); + } + return (exif_data[offset] << 8) + exif_data[offset + 1]; +} + +inline uint32_t get_uint32( + const ExifDataReader& exif_data, + uint16_t endianness, + const size_t offset) { + if (offset + 3 >= exif_data.size()) { + return INCORRECT_TAG; + } + + if (endianness == ENDIANNESS_INTEL) { + return exif_data[offset] + (exif_data[offset + 1] << 8) + + (exif_data[offset + 2] << 16) + (exif_data[offset + 3] << 24); + } + return (exif_data[offset] << 24) + (exif_data[offset + 1] << 16) + + (exif_data[offset + 2] << 8) + exif_data[offset + 3]; +} + +inline int fetch_exif_orientation(unsigned char* exif_data_ptr, size_t size) { + int exif_orientation = -1; + + // Exif binary structure looks like this + // First 6 bytes: [E, x, i, f, 0, 0] + // Endianness, 2 bytes : [M, M] or [I, I] + // Tag mark, 2 bytes: [0, 0x2a] + // Offset, 4 bytes + // Num entries, 2 bytes + // Tag entries and data, tag has 2 bytes and its data has 10 bytes + // For more details: + // http://www.media.mit.edu/pia/Research/deepview/exif.html + + ExifDataReader exif_data(exif_data_ptr, size); + auto endianness = get_endianness(exif_data); + + // Checking whether Tag Mark (0x002A) correspond to one contained in the + // Jpeg file + uint16_t tag_mark = get_uint16(exif_data, endianness, 2); + if (tag_mark == REQ_EXIF_TAG_MARK) { + auto offset = get_uint32(exif_data, endianness, 4); + size_t num_entry = get_uint16(exif_data, endianness, offset); + offset += 2; // go to start of tag fields + constexpr size_t tiff_field_size = 12; + for (size_t entry = 0; entry < num_entry; entry++) { + // Here we just search for orientation tag and parse it + auto tag_num = get_uint16(exif_data, endianness, offset); + if (tag_num == INCORRECT_TAG) { + break; + } + if (tag_num == ORIENTATION_EXIF_TAG) { + exif_orientation = get_uint16(exif_data, endianness, offset + 8); + break; + } + offset += tiff_field_size; + } + } + return exif_orientation; +} + +#if JPEG_FOUND +inline int fetch_jpeg_exif_orientation(j_decompress_ptr cinfo) { + // Check for Exif marker APP1 + jpeg_saved_marker_ptr exif_marker = 0; + jpeg_saved_marker_ptr cmarker = cinfo->marker_list; + while (cmarker && exif_marker == 0) { + if (cmarker->marker == APP1) { + exif_marker = cmarker; + } + cmarker = cmarker->next; + } + + if (!exif_marker) { + return -1; + } + + constexpr size_t start_offset = 6; + if (exif_marker->data_length <= start_offset) { + return -1; + } + + auto* exif_data_ptr = exif_marker->data + start_offset; + auto size = exif_marker->data_length - start_offset; + + return fetch_exif_orientation(exif_data_ptr, size); +} +#endif // #if JPEG_FOUND + +#if PNG_FOUND && defined(PNG_eXIf_SUPPORTED) +inline int fetch_png_exif_orientation(png_structp png_ptr, png_infop info_ptr) { + png_uint_32 num_exif = 0; + png_bytep exif = 0; + + // Exif info could be in info_ptr + if (png_get_valid(png_ptr, info_ptr, PNG_INFO_eXIf)) { + png_get_eXIf_1(png_ptr, info_ptr, &num_exif, &exif); + } + + if (exif && num_exif > 0) { + return fetch_exif_orientation(exif, num_exif); + } + return -1; +} +#endif // #if PNG_FOUND && defined(PNG_eXIf_SUPPORTED) + +constexpr uint16_t IMAGE_ORIENTATION_TL = 1; // normal orientation +constexpr uint16_t IMAGE_ORIENTATION_TR = 2; // needs horizontal flip +constexpr uint16_t IMAGE_ORIENTATION_BR = 3; // needs 180 rotation +constexpr uint16_t IMAGE_ORIENTATION_BL = 4; // needs vertical flip +constexpr uint16_t IMAGE_ORIENTATION_LT = + 5; // mirrored horizontal & rotate 270 CW +constexpr uint16_t IMAGE_ORIENTATION_RT = 6; // rotate 90 CW +constexpr uint16_t IMAGE_ORIENTATION_RB = + 7; // mirrored horizontal & rotate 90 CW +constexpr uint16_t IMAGE_ORIENTATION_LB = 8; // needs 270 CW rotation + +inline torch::Tensor exif_orientation_transform( + const torch::Tensor& image, + int orientation) { + if (orientation == IMAGE_ORIENTATION_TL) { + return image; + } else if (orientation == IMAGE_ORIENTATION_TR) { + return image.flip(-1); + } else if (orientation == IMAGE_ORIENTATION_BR) { + // needs 180 rotation equivalent to + // flip both horizontally and vertically + return image.flip({-2, -1}); + } else if (orientation == IMAGE_ORIENTATION_BL) { + return image.flip(-2); + } else if (orientation == IMAGE_ORIENTATION_LT) { + return image.transpose(-1, -2); + } else if (orientation == IMAGE_ORIENTATION_RT) { + return image.transpose(-1, -2).flip(-1); + } else if (orientation == IMAGE_ORIENTATION_RB) { + return image.transpose(-1, -2).flip({-2, -1}); + } else if (orientation == IMAGE_ORIENTATION_LB) { + return image.transpose(-1, -2).flip(-2); + } + return image; +} + +} // namespace exif_private +} // namespace image +} // namespace vision diff --git a/travis-scripts/run-clang-format/LICENSE b/torchvision/csrc/io/image/cpu/giflib/README similarity index 59% rename from travis-scripts/run-clang-format/LICENSE rename to torchvision/csrc/io/image/cpu/giflib/README index e728f248889..7353453e32e 100644 --- a/travis-scripts/run-clang-format/LICENSE +++ b/torchvision/csrc/io/image/cpu/giflib/README @@ -1,6 +1,13 @@ -MIT License +These files come from the GIFLIB project (https://giflib.sourceforge.net/) and +are licensed under the MIT license. -Copyright (c) 2017 Guillaume Papin +Some modifications have been made to the original files: +- Remove use of "register" keyword in gifalloc.c for C++17 compatibility. +- Declare loop variable i in DGifGetImageHeader as int instead of unsigned int. + +Below is the original license text from the COPYING file of the GIFLIB project: + += MIT LICENSE Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal @@ -9,13 +16,13 @@ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/torchvision/csrc/io/image/cpu/giflib/dgif_lib.c b/torchvision/csrc/io/image/cpu/giflib/dgif_lib.c new file mode 100644 index 00000000000..7d35fff87ee --- /dev/null +++ b/torchvision/csrc/io/image/cpu/giflib/dgif_lib.c @@ -0,0 +1,1313 @@ +// @nolint (improperly imported third-party code) +/****************************************************************************** + +dgif_lib.c - GIF decoding + +The functions here and in egif_lib.c are partitioned carefully so that +if you only require one of read and write capability, only one of these +two modules will be linked. Preserve this property! + +*****************************************************************************/ +// SPDX-License-Identifier: MIT +// SPDX-FileCopyrightText: Copyright (C) Eric S. Raymond + +#include +#include +#include +#include +#include +#include + +#ifdef _WIN32 +#include +#else +#include +#endif /* _WIN32 */ + +#include "gif_lib.h" +#include "gif_lib_private.h" + +/* compose unsigned little endian value */ +#define UNSIGNED_LITTLE_ENDIAN(lo, hi) ((lo) | ((hi) << 8)) + +/* avoid extra function call in case we use fread (TVT) */ +static int InternalRead(GifFileType *gif, GifByteType *buf, int len) { + // fprintf(stderr, "### Read: %d\n", len); + return (((GifFilePrivateType *)gif->Private)->Read + ? ((GifFilePrivateType *)gif->Private)->Read(gif, buf, len) + : fread(buf, 1, len, + ((GifFilePrivateType *)gif->Private)->File)); +} + +static int DGifGetWord(GifFileType *GifFile, GifWord *Word); +static int DGifSetupDecompress(GifFileType *GifFile); +static int DGifDecompressLine(GifFileType *GifFile, GifPixelType *Line, + int LineLen); +static int DGifGetPrefixChar(const GifPrefixType *Prefix, int Code, + int ClearCode); +static int DGifDecompressInput(GifFileType *GifFile, int *Code); +static int DGifBufferedInput(GifFileType *GifFile, GifByteType *Buf, + GifByteType *NextByte); + +/****************************************************************************** + Open a new GIF file for read, given by its name. + Returns dynamically allocated GifFileType pointer which serves as the GIF + info record. +******************************************************************************/ +GifFileType *DGifOpenFileName(const char *FileName, int *Error) { + int FileHandle; + GifFileType *GifFile; + + if ((FileHandle = open(FileName, O_RDONLY)) == -1) { + if (Error != NULL) { + *Error = D_GIF_ERR_OPEN_FAILED; + } + return NULL; + } + + GifFile = DGifOpenFileHandle(FileHandle, Error); + return GifFile; +} + +/****************************************************************************** + Update a new GIF file, given its file handle. + Returns dynamically allocated GifFileType pointer which serves as the GIF + info record. +******************************************************************************/ +GifFileType *DGifOpenFileHandle(int FileHandle, int *Error) { + char Buf[GIF_STAMP_LEN + 1]; + GifFileType *GifFile; + GifFilePrivateType *Private; + FILE *f; + + GifFile = (GifFileType *)malloc(sizeof(GifFileType)); + if (GifFile == NULL) { + if (Error != NULL) { + *Error = D_GIF_ERR_NOT_ENOUGH_MEM; + } + (void)close(FileHandle); + return NULL; + } + + /*@i1@*/ memset(GifFile, '\0', sizeof(GifFileType)); + + /* Belt and suspenders, in case the null pointer isn't zero */ + GifFile->SavedImages = NULL; + GifFile->SColorMap = NULL; + + Private = (GifFilePrivateType *)calloc(1, sizeof(GifFilePrivateType)); + if (Private == NULL) { + if (Error != NULL) { + *Error = D_GIF_ERR_NOT_ENOUGH_MEM; + } + (void)close(FileHandle); + free((char *)GifFile); + return NULL; + } + + /*@i1@*/ memset(Private, '\0', sizeof(GifFilePrivateType)); + +#ifdef _WIN32 + _setmode(FileHandle, O_BINARY); /* Make sure it is in binary mode. */ +#endif /* _WIN32 */ + + f = fdopen(FileHandle, "rb"); /* Make it into a stream: */ + + /*@-mustfreeonly@*/ + GifFile->Private = (void *)Private; + Private->FileHandle = FileHandle; + Private->File = f; + Private->FileState = FILE_STATE_READ; + Private->Read = NULL; /* don't use alternate input method (TVT) */ + GifFile->UserData = NULL; /* TVT */ + /*@=mustfreeonly@*/ + + /* Let's see if this is a GIF file: */ + /* coverity[check_return] */ + if (InternalRead(GifFile, (unsigned char *)Buf, GIF_STAMP_LEN) != + GIF_STAMP_LEN) { + if (Error != NULL) { + *Error = D_GIF_ERR_READ_FAILED; + } + (void)fclose(f); + free((char *)Private); + free((char *)GifFile); + return NULL; + } + + /* Check for GIF prefix at start of file */ + Buf[GIF_STAMP_LEN] = 0; + if (strncmp(GIF_STAMP, Buf, GIF_VERSION_POS) != 0) { + if (Error != NULL) { + *Error = D_GIF_ERR_NOT_GIF_FILE; + } + (void)fclose(f); + free((char *)Private); + free((char *)GifFile); + return NULL; + } + + if (DGifGetScreenDesc(GifFile) == GIF_ERROR) { + (void)fclose(f); + free((char *)Private); + free((char *)GifFile); + return NULL; + } + + GifFile->Error = 0; + + /* What version of GIF? */ + Private->gif89 = (Buf[GIF_VERSION_POS + 1] == '9'); + + return GifFile; +} + +/****************************************************************************** + GifFileType constructor with user supplied input function (TVT) +******************************************************************************/ +GifFileType *DGifOpen(void *userData, InputFunc readFunc, int *Error) { + char Buf[GIF_STAMP_LEN + 1]; + GifFileType *GifFile; + GifFilePrivateType *Private; + + GifFile = (GifFileType *)malloc(sizeof(GifFileType)); + if (GifFile == NULL) { + if (Error != NULL) { + *Error = D_GIF_ERR_NOT_ENOUGH_MEM; + } + return NULL; + } + + memset(GifFile, '\0', sizeof(GifFileType)); + + /* Belt and suspenders, in case the null pointer isn't zero */ + GifFile->SavedImages = NULL; + GifFile->SColorMap = NULL; + + Private = (GifFilePrivateType *)calloc(1, sizeof(GifFilePrivateType)); + if (!Private) { + if (Error != NULL) { + *Error = D_GIF_ERR_NOT_ENOUGH_MEM; + } + free((char *)GifFile); + return NULL; + } + /*@i1@*/ memset(Private, '\0', sizeof(GifFilePrivateType)); + + GifFile->Private = (void *)Private; + Private->FileHandle = 0; + Private->File = NULL; + Private->FileState = FILE_STATE_READ; + + Private->Read = readFunc; /* TVT */ + GifFile->UserData = userData; /* TVT */ + + /* Lets see if this is a GIF file: */ + /* coverity[check_return] */ + if (InternalRead(GifFile, (unsigned char *)Buf, GIF_STAMP_LEN) != + GIF_STAMP_LEN) { + if (Error != NULL) { + *Error = D_GIF_ERR_READ_FAILED; + } + free((char *)Private); + free((char *)GifFile); + return NULL; + } + + /* Check for GIF prefix at start of file */ + Buf[GIF_STAMP_LEN] = '\0'; + if (strncmp(GIF_STAMP, Buf, GIF_VERSION_POS) != 0) { + if (Error != NULL) { + *Error = D_GIF_ERR_NOT_GIF_FILE; + } + free((char *)Private); + free((char *)GifFile); + return NULL; + } + + if (DGifGetScreenDesc(GifFile) == GIF_ERROR) { + free((char *)Private); + free((char *)GifFile); + if (Error != NULL) { + *Error = D_GIF_ERR_NO_SCRN_DSCR; + } + return NULL; + } + + GifFile->Error = 0; + + /* What version of GIF? */ + Private->gif89 = (Buf[GIF_VERSION_POS + 1] == '9'); + + return GifFile; +} + +/****************************************************************************** + This routine should be called before any other DGif calls. Note that + this routine is called automatically from DGif file open routines. +******************************************************************************/ +int DGifGetScreenDesc(GifFileType *GifFile) { + int BitsPerPixel; + bool SortFlag; + GifByteType Buf[3]; + GifFilePrivateType *Private = (GifFilePrivateType *)GifFile->Private; + + if (!IS_READABLE(Private)) { + /* This file was NOT open for reading: */ + GifFile->Error = D_GIF_ERR_NOT_READABLE; + return GIF_ERROR; + } + + /* Put the screen descriptor into the file: */ + if (DGifGetWord(GifFile, &GifFile->SWidth) == GIF_ERROR || + DGifGetWord(GifFile, &GifFile->SHeight) == GIF_ERROR) { + return GIF_ERROR; + } + + if (InternalRead(GifFile, Buf, 3) != 3) { + GifFile->Error = D_GIF_ERR_READ_FAILED; + GifFreeMapObject(GifFile->SColorMap); + GifFile->SColorMap = NULL; + return GIF_ERROR; + } + GifFile->SColorResolution = (((Buf[0] & 0x70) + 1) >> 4) + 1; + SortFlag = (Buf[0] & 0x08) != 0; + BitsPerPixel = (Buf[0] & 0x07) + 1; + GifFile->SBackGroundColor = Buf[1]; + GifFile->AspectByte = Buf[2]; + if (Buf[0] & 0x80) { /* Do we have global color map? */ + int i; + + GifFile->SColorMap = GifMakeMapObject(1 << BitsPerPixel, NULL); + if (GifFile->SColorMap == NULL) { + GifFile->Error = D_GIF_ERR_NOT_ENOUGH_MEM; + return GIF_ERROR; + } + + /* Get the global color map: */ + GifFile->SColorMap->SortFlag = SortFlag; + for (i = 0; i < GifFile->SColorMap->ColorCount; i++) { + /* coverity[check_return] */ + if (InternalRead(GifFile, Buf, 3) != 3) { + GifFreeMapObject(GifFile->SColorMap); + GifFile->SColorMap = NULL; + GifFile->Error = D_GIF_ERR_READ_FAILED; + return GIF_ERROR; + } + GifFile->SColorMap->Colors[i].Red = Buf[0]; + GifFile->SColorMap->Colors[i].Green = Buf[1]; + GifFile->SColorMap->Colors[i].Blue = Buf[2]; + } + } else { + GifFile->SColorMap = NULL; + } + + /* + * No check here for whether the background color is in range for the + * screen color map. Possibly there should be. + */ + + return GIF_OK; +} + +const char *DGifGetGifVersion(GifFileType *GifFile) { + GifFilePrivateType *Private = (GifFilePrivateType *)GifFile->Private; + + if (Private->gif89) { + return GIF89_STAMP; + } else { + return GIF87_STAMP; + } +} + +/****************************************************************************** + This routine should be called before any attempt to read an image. +******************************************************************************/ +int DGifGetRecordType(GifFileType *GifFile, GifRecordType *Type) { + GifByteType Buf; + GifFilePrivateType *Private = (GifFilePrivateType *)GifFile->Private; + + if (!IS_READABLE(Private)) { + /* This file was NOT open for reading: */ + GifFile->Error = D_GIF_ERR_NOT_READABLE; + return GIF_ERROR; + } + + /* coverity[check_return] */ + if (InternalRead(GifFile, &Buf, 1) != 1) { + GifFile->Error = D_GIF_ERR_READ_FAILED; + return GIF_ERROR; + } + + // fprintf(stderr, "### DGifGetRecordType: %02x\n", Buf); + switch (Buf) { + case DESCRIPTOR_INTRODUCER: + *Type = IMAGE_DESC_RECORD_TYPE; + break; + case EXTENSION_INTRODUCER: + *Type = EXTENSION_RECORD_TYPE; + break; + case TERMINATOR_INTRODUCER: + *Type = TERMINATE_RECORD_TYPE; + break; + default: + *Type = UNDEFINED_RECORD_TYPE; + GifFile->Error = D_GIF_ERR_WRONG_RECORD; + return GIF_ERROR; + } + + return GIF_OK; +} + +int DGifGetImageHeader(GifFileType *GifFile) { + unsigned int BitsPerPixel; + GifByteType Buf[3]; + GifFilePrivateType *Private = (GifFilePrivateType *)GifFile->Private; + + if (!IS_READABLE(Private)) { + /* This file was NOT open for reading: */ + GifFile->Error = D_GIF_ERR_NOT_READABLE; + return GIF_ERROR; + } + + if (DGifGetWord(GifFile, &GifFile->Image.Left) == GIF_ERROR || + DGifGetWord(GifFile, &GifFile->Image.Top) == GIF_ERROR || + DGifGetWord(GifFile, &GifFile->Image.Width) == GIF_ERROR || + DGifGetWord(GifFile, &GifFile->Image.Height) == GIF_ERROR) { + return GIF_ERROR; + } + if (InternalRead(GifFile, Buf, 1) != 1) { + GifFile->Error = D_GIF_ERR_READ_FAILED; + GifFreeMapObject(GifFile->Image.ColorMap); + GifFile->Image.ColorMap = NULL; + return GIF_ERROR; + } + BitsPerPixel = (Buf[0] & 0x07) + 1; + GifFile->Image.Interlace = (Buf[0] & 0x40) ? true : false; + + /* Setup the colormap */ + if (GifFile->Image.ColorMap) { + GifFreeMapObject(GifFile->Image.ColorMap); + GifFile->Image.ColorMap = NULL; + } + /* Does this image have local color map? */ + if (Buf[0] & 0x80) { + int i; + + GifFile->Image.ColorMap = + GifMakeMapObject(1 << BitsPerPixel, NULL); + if (GifFile->Image.ColorMap == NULL) { + GifFile->Error = D_GIF_ERR_NOT_ENOUGH_MEM; + return GIF_ERROR; + } + + /* Get the image local color map: */ + for (i = 0; i < GifFile->Image.ColorMap->ColorCount; i++) { + /* coverity[check_return] */ + if (InternalRead(GifFile, Buf, 3) != 3) { + GifFreeMapObject(GifFile->Image.ColorMap); + GifFile->Error = D_GIF_ERR_READ_FAILED; + GifFile->Image.ColorMap = NULL; + return GIF_ERROR; + } + GifFile->Image.ColorMap->Colors[i].Red = Buf[0]; + GifFile->Image.ColorMap->Colors[i].Green = Buf[1]; + GifFile->Image.ColorMap->Colors[i].Blue = Buf[2]; + } + } + + Private->PixelCount = + (long)GifFile->Image.Width * (long)GifFile->Image.Height; + + /* Reset decompress algorithm parameters. */ + return DGifSetupDecompress(GifFile); +} + +/****************************************************************************** + This routine should be called before any attempt to read an image. + Note it is assumed the Image desc. header has been read. +******************************************************************************/ +int DGifGetImageDesc(GifFileType *GifFile) { + GifFilePrivateType *Private = (GifFilePrivateType *)GifFile->Private; + SavedImage *sp; + + if (!IS_READABLE(Private)) { + /* This file was NOT open for reading: */ + GifFile->Error = D_GIF_ERR_NOT_READABLE; + return GIF_ERROR; + } + + if (DGifGetImageHeader(GifFile) == GIF_ERROR) { + return GIF_ERROR; + } + + if (GifFile->SavedImages) { + SavedImage *new_saved_images = (SavedImage *)reallocarray( + GifFile->SavedImages, (GifFile->ImageCount + 1), + sizeof(SavedImage)); + if (new_saved_images == NULL) { + GifFile->Error = D_GIF_ERR_NOT_ENOUGH_MEM; + return GIF_ERROR; + } + GifFile->SavedImages = new_saved_images; + } else { + if ((GifFile->SavedImages = + (SavedImage *)malloc(sizeof(SavedImage))) == NULL) { + GifFile->Error = D_GIF_ERR_NOT_ENOUGH_MEM; + return GIF_ERROR; + } + } + + sp = &GifFile->SavedImages[GifFile->ImageCount]; + memcpy(&sp->ImageDesc, &GifFile->Image, sizeof(GifImageDesc)); + if (GifFile->Image.ColorMap != NULL) { + sp->ImageDesc.ColorMap = + GifMakeMapObject(GifFile->Image.ColorMap->ColorCount, + GifFile->Image.ColorMap->Colors); + if (sp->ImageDesc.ColorMap == NULL) { + GifFile->Error = D_GIF_ERR_NOT_ENOUGH_MEM; + return GIF_ERROR; + } + } + sp->RasterBits = (unsigned char *)NULL; + sp->ExtensionBlockCount = 0; + sp->ExtensionBlocks = (ExtensionBlock *)NULL; + + GifFile->ImageCount++; + + return GIF_OK; +} + +/****************************************************************************** + Get one full scanned line (Line) of length LineLen from GIF file. +******************************************************************************/ +int DGifGetLine(GifFileType *GifFile, GifPixelType *Line, int LineLen) { + GifByteType *Dummy; + GifFilePrivateType *Private = (GifFilePrivateType *)GifFile->Private; + + if (!IS_READABLE(Private)) { + /* This file was NOT open for reading: */ + GifFile->Error = D_GIF_ERR_NOT_READABLE; + return GIF_ERROR; + } + + if (!LineLen) { + LineLen = GifFile->Image.Width; + } + + if ((Private->PixelCount -= LineLen) > 0xffff0000UL) { + GifFile->Error = D_GIF_ERR_DATA_TOO_BIG; + return GIF_ERROR; + } + + if (DGifDecompressLine(GifFile, Line, LineLen) == GIF_OK) { + if (Private->PixelCount == 0) { + /* We probably won't be called any more, so let's clean + * up everything before we return: need to flush out all + * the rest of image until an empty block (size 0) + * detected. We use GetCodeNext. + */ + do { + if (DGifGetCodeNext(GifFile, &Dummy) == + GIF_ERROR) { + return GIF_ERROR; + } + } while (Dummy != NULL); + } + return GIF_OK; + } else { + return GIF_ERROR; + } +} + +/****************************************************************************** + Put one pixel (Pixel) into GIF file. +******************************************************************************/ +int DGifGetPixel(GifFileType *GifFile, GifPixelType Pixel) { + GifByteType *Dummy; + GifFilePrivateType *Private = (GifFilePrivateType *)GifFile->Private; + + if (!IS_READABLE(Private)) { + /* This file was NOT open for reading: */ + GifFile->Error = D_GIF_ERR_NOT_READABLE; + return GIF_ERROR; + } + if (--Private->PixelCount > 0xffff0000UL) { + GifFile->Error = D_GIF_ERR_DATA_TOO_BIG; + return GIF_ERROR; + } + + if (DGifDecompressLine(GifFile, &Pixel, 1) == GIF_OK) { + if (Private->PixelCount == 0) { + /* We probably won't be called any more, so let's clean + * up everything before we return: need to flush out all + * the rest of image until an empty block (size 0) + * detected. We use GetCodeNext. + */ + do { + if (DGifGetCodeNext(GifFile, &Dummy) == + GIF_ERROR) { + return GIF_ERROR; + } + } while (Dummy != NULL); + } + return GIF_OK; + } else { + return GIF_ERROR; + } +} + +/****************************************************************************** + Get an extension block (see GIF manual) from GIF file. This routine only + returns the first data block, and DGifGetExtensionNext should be called + after this one until NULL extension is returned. + The Extension should NOT be freed by the user (not dynamically allocated). + Note it is assumed the Extension description header has been read. +******************************************************************************/ +int DGifGetExtension(GifFileType *GifFile, int *ExtCode, + GifByteType **Extension) { + GifByteType Buf; + GifFilePrivateType *Private = (GifFilePrivateType *)GifFile->Private; + + // fprintf(stderr, "### -> DGifGetExtension:\n"); + if (!IS_READABLE(Private)) { + /* This file was NOT open for reading: */ + GifFile->Error = D_GIF_ERR_NOT_READABLE; + return GIF_ERROR; + } + + /* coverity[check_return] */ + if (InternalRead(GifFile, &Buf, 1) != 1) { + GifFile->Error = D_GIF_ERR_READ_FAILED; + return GIF_ERROR; + } + *ExtCode = Buf; + // fprintf(stderr, "### <- DGifGetExtension: %02x, about to call + // next\n", Buf); + + return DGifGetExtensionNext(GifFile, Extension); +} + +/****************************************************************************** + Get a following extension block (see GIF manual) from GIF file. This + routine should be called until NULL Extension is returned. + The Extension should NOT be freed by the user (not dynamically allocated). +******************************************************************************/ +int DGifGetExtensionNext(GifFileType *GifFile, GifByteType **Extension) { + GifByteType Buf; + GifFilePrivateType *Private = (GifFilePrivateType *)GifFile->Private; + + // fprintf(stderr, "### -> DGifGetExtensionNext\n"); + if (InternalRead(GifFile, &Buf, 1) != 1) { + GifFile->Error = D_GIF_ERR_READ_FAILED; + return GIF_ERROR; + } + // fprintf(stderr, "### DGifGetExtensionNext sees %d\n", Buf); + + if (Buf > 0) { + *Extension = Private->Buf; /* Use private unused buffer. */ + (*Extension)[0] = + Buf; /* Pascal strings notation (pos. 0 is len.). */ + /* coverity[tainted_data,check_return] */ + if (InternalRead(GifFile, &((*Extension)[1]), Buf) != Buf) { + GifFile->Error = D_GIF_ERR_READ_FAILED; + return GIF_ERROR; + } + } else { + *Extension = NULL; + } + // fprintf(stderr, "### <- DGifGetExtensionNext: %p\n", Extension); + + return GIF_OK; +} + +/****************************************************************************** + Extract a Graphics Control Block from raw extension data +******************************************************************************/ + +int DGifExtensionToGCB(const size_t GifExtensionLength, + const GifByteType *GifExtension, + GraphicsControlBlock *GCB) { + if (GifExtensionLength != 4) { + return GIF_ERROR; + } + + GCB->DisposalMode = (GifExtension[0] >> 2) & 0x07; + GCB->UserInputFlag = (GifExtension[0] & 0x02) != 0; + GCB->DelayTime = + UNSIGNED_LITTLE_ENDIAN(GifExtension[1], GifExtension[2]); + if (GifExtension[0] & 0x01) { + GCB->TransparentColor = (int)GifExtension[3]; + } else { + GCB->TransparentColor = NO_TRANSPARENT_COLOR; + } + + return GIF_OK; +} + +/****************************************************************************** + Extract the Graphics Control Block for a saved image, if it exists. +******************************************************************************/ + +int DGifSavedExtensionToGCB(GifFileType *GifFile, int ImageIndex, + GraphicsControlBlock *GCB) { + int i; + + if (ImageIndex < 0 || ImageIndex > GifFile->ImageCount - 1) { + return GIF_ERROR; + } + + GCB->DisposalMode = DISPOSAL_UNSPECIFIED; + GCB->UserInputFlag = false; + GCB->DelayTime = 0; + GCB->TransparentColor = NO_TRANSPARENT_COLOR; + + for (i = 0; i < GifFile->SavedImages[ImageIndex].ExtensionBlockCount; + i++) { + ExtensionBlock *ep = + &GifFile->SavedImages[ImageIndex].ExtensionBlocks[i]; + if (ep->Function == GRAPHICS_EXT_FUNC_CODE) { + return DGifExtensionToGCB(ep->ByteCount, ep->Bytes, + GCB); + } + } + + return GIF_ERROR; +} + +/****************************************************************************** + This routine should be called last, to close the GIF file. +******************************************************************************/ +int DGifCloseFile(GifFileType *GifFile, int *ErrorCode) { + GifFilePrivateType *Private; + + if (GifFile == NULL || GifFile->Private == NULL) { + return GIF_ERROR; + } + + if (GifFile->Image.ColorMap) { + GifFreeMapObject(GifFile->Image.ColorMap); + GifFile->Image.ColorMap = NULL; + } + + if (GifFile->SColorMap) { + GifFreeMapObject(GifFile->SColorMap); + GifFile->SColorMap = NULL; + } + + if (GifFile->SavedImages) { + GifFreeSavedImages(GifFile); + GifFile->SavedImages = NULL; + } + + GifFreeExtensions(&GifFile->ExtensionBlockCount, + &GifFile->ExtensionBlocks); + + Private = (GifFilePrivateType *)GifFile->Private; + + if (!IS_READABLE(Private)) { + /* This file was NOT open for reading: */ + if (ErrorCode != NULL) { + *ErrorCode = D_GIF_ERR_NOT_READABLE; + } + free((char *)GifFile->Private); + free(GifFile); + return GIF_ERROR; + } + + if (Private->File && (fclose(Private->File) != 0)) { + if (ErrorCode != NULL) { + *ErrorCode = D_GIF_ERR_CLOSE_FAILED; + } + free((char *)GifFile->Private); + free(GifFile); + return GIF_ERROR; + } + + free((char *)GifFile->Private); + free(GifFile); + if (ErrorCode != NULL) { + *ErrorCode = D_GIF_SUCCEEDED; + } + return GIF_OK; +} + +/****************************************************************************** + Get 2 bytes (word) from the given file: +******************************************************************************/ +static int DGifGetWord(GifFileType *GifFile, GifWord *Word) { + unsigned char c[2]; + + /* coverity[check_return] */ + if (InternalRead(GifFile, c, 2) != 2) { + GifFile->Error = D_GIF_ERR_READ_FAILED; + return GIF_ERROR; + } + + *Word = (GifWord)UNSIGNED_LITTLE_ENDIAN(c[0], c[1]); + return GIF_OK; +} + +/****************************************************************************** + Get the image code in compressed form. This routine can be called if the + information needed to be piped out as is. Obviously this is much faster + than decoding and encoding again. This routine should be followed by calls + to DGifGetCodeNext, until NULL block is returned. + The block should NOT be freed by the user (not dynamically allocated). +******************************************************************************/ +int DGifGetCode(GifFileType *GifFile, int *CodeSize, GifByteType **CodeBlock) { + GifFilePrivateType *Private = (GifFilePrivateType *)GifFile->Private; + + if (!IS_READABLE(Private)) { + /* This file was NOT open for reading: */ + GifFile->Error = D_GIF_ERR_NOT_READABLE; + return GIF_ERROR; + } + + *CodeSize = Private->BitsPerPixel; + + return DGifGetCodeNext(GifFile, CodeBlock); +} + +/****************************************************************************** + Continue to get the image code in compressed form. This routine should be + called until NULL block is returned. + The block should NOT be freed by the user (not dynamically allocated). +******************************************************************************/ +int DGifGetCodeNext(GifFileType *GifFile, GifByteType **CodeBlock) { + GifByteType Buf; + GifFilePrivateType *Private = (GifFilePrivateType *)GifFile->Private; + + /* coverity[tainted_data_argument] */ + /* coverity[check_return] */ + if (InternalRead(GifFile, &Buf, 1) != 1) { + GifFile->Error = D_GIF_ERR_READ_FAILED; + return GIF_ERROR; + } + + /* coverity[lower_bounds] */ + if (Buf > 0) { + *CodeBlock = Private->Buf; /* Use private unused buffer. */ + (*CodeBlock)[0] = + Buf; /* Pascal strings notation (pos. 0 is len.). */ + /* coverity[tainted_data] */ + if (InternalRead(GifFile, &((*CodeBlock)[1]), Buf) != Buf) { + GifFile->Error = D_GIF_ERR_READ_FAILED; + return GIF_ERROR; + } + } else { + *CodeBlock = NULL; + Private->Buf[0] = 0; /* Make sure the buffer is empty! */ + Private->PixelCount = + 0; /* And local info. indicate image read. */ + } + + return GIF_OK; +} + +/****************************************************************************** + Setup the LZ decompression for this image: +******************************************************************************/ +static int DGifSetupDecompress(GifFileType *GifFile) { + int i, BitsPerPixel; + GifByteType CodeSize; + GifPrefixType *Prefix; + GifFilePrivateType *Private = (GifFilePrivateType *)GifFile->Private; + + /* coverity[check_return] */ + if (InternalRead(GifFile, &CodeSize, 1) < + 1) { /* Read Code size from file. */ + GifFile->Error = D_GIF_ERR_READ_FAILED; + return GIF_ERROR; /* Failed to read Code size. */ + } + BitsPerPixel = CodeSize; + + /* this can only happen on a severely malformed GIF */ + if (BitsPerPixel > 8) { + GifFile->Error = + D_GIF_ERR_READ_FAILED; /* somewhat bogus error code */ + return GIF_ERROR; /* Failed to read Code size. */ + } + + Private->Buf[0] = 0; /* Input Buffer empty. */ + Private->BitsPerPixel = BitsPerPixel; + Private->ClearCode = (1 << BitsPerPixel); + Private->EOFCode = Private->ClearCode + 1; + Private->RunningCode = Private->EOFCode + 1; + Private->RunningBits = BitsPerPixel + 1; /* Number of bits per code. */ + Private->MaxCode1 = 1 << Private->RunningBits; /* Max. code + 1. */ + Private->StackPtr = 0; /* No pixels on the pixel stack. */ + Private->LastCode = NO_SUCH_CODE; + Private->CrntShiftState = 0; /* No information in CrntShiftDWord. */ + Private->CrntShiftDWord = 0; + + Prefix = Private->Prefix; + for (i = 0; i <= LZ_MAX_CODE; i++) { + Prefix[i] = NO_SUCH_CODE; + } + + return GIF_OK; +} + +/****************************************************************************** + The LZ decompression routine: + This version decompress the given GIF file into Line of length LineLen. + This routine can be called few times (one per scan line, for example), in + order the complete the whole image. +******************************************************************************/ +static int DGifDecompressLine(GifFileType *GifFile, GifPixelType *Line, + int LineLen) { + int i = 0; + int j, CrntCode, EOFCode, ClearCode, CrntPrefix, LastCode, StackPtr; + GifByteType *Stack, *Suffix; + GifPrefixType *Prefix; + GifFilePrivateType *Private = (GifFilePrivateType *)GifFile->Private; + + StackPtr = Private->StackPtr; + Prefix = Private->Prefix; + Suffix = Private->Suffix; + Stack = Private->Stack; + EOFCode = Private->EOFCode; + ClearCode = Private->ClearCode; + LastCode = Private->LastCode; + + if (StackPtr > LZ_MAX_CODE) { + return GIF_ERROR; + } + + if (StackPtr != 0) { + /* Let pop the stack off before continueing to read the GIF + * file: */ + while (StackPtr != 0 && i < LineLen) { + Line[i++] = Stack[--StackPtr]; + } + } + + while (i < LineLen) { /* Decode LineLen items. */ + if (DGifDecompressInput(GifFile, &CrntCode) == GIF_ERROR) { + return GIF_ERROR; + } + + if (CrntCode == EOFCode) { + /* Note however that usually we will not be here as we + * will stop decoding as soon as we got all the pixel, + * or EOF code will not be read at all, and + * DGifGetLine/Pixel clean everything. */ + GifFile->Error = D_GIF_ERR_EOF_TOO_SOON; + return GIF_ERROR; + } else if (CrntCode == ClearCode) { + /* We need to start over again: */ + for (j = 0; j <= LZ_MAX_CODE; j++) { + Prefix[j] = NO_SUCH_CODE; + } + Private->RunningCode = Private->EOFCode + 1; + Private->RunningBits = Private->BitsPerPixel + 1; + Private->MaxCode1 = 1 << Private->RunningBits; + LastCode = Private->LastCode = NO_SUCH_CODE; + } else { + /* Its regular code - if in pixel range simply add it to + * output stream, otherwise trace to codes linked list + * until the prefix is in pixel range: */ + if (CrntCode < ClearCode) { + /* This is simple - its pixel scalar, so add it + * to output: */ + Line[i++] = CrntCode; + } else { + /* Its a code to needed to be traced: trace the + * linked list until the prefix is a pixel, + * while pushing the suffix pixels on our stack. + * If we done, pop the stack in reverse (thats + * what stack is good for!) order to output. */ + if (Prefix[CrntCode] == NO_SUCH_CODE) { + CrntPrefix = LastCode; + + /* Only allowed if CrntCode is exactly + * the running code: In that case + * CrntCode = XXXCode, CrntCode or the + * prefix code is last code and the + * suffix char is exactly the prefix of + * last code! */ + if (CrntCode == + Private->RunningCode - 2) { + Suffix[Private->RunningCode - + 2] = Stack[StackPtr++] = + DGifGetPrefixChar( + Prefix, LastCode, + ClearCode); + } else { + Suffix[Private->RunningCode - + 2] = Stack[StackPtr++] = + DGifGetPrefixChar( + Prefix, CrntCode, + ClearCode); + } + } else { + CrntPrefix = CrntCode; + } + + /* Now (if image is O.K.) we should not get a + * NO_SUCH_CODE during the trace. As we might + * loop forever, in case of defective image, we + * use StackPtr as loop counter and stop before + * overflowing Stack[]. */ + while (StackPtr < LZ_MAX_CODE && + CrntPrefix > ClearCode && + CrntPrefix <= LZ_MAX_CODE) { + Stack[StackPtr++] = Suffix[CrntPrefix]; + CrntPrefix = Prefix[CrntPrefix]; + } + if (StackPtr >= LZ_MAX_CODE || + CrntPrefix > LZ_MAX_CODE) { + GifFile->Error = D_GIF_ERR_IMAGE_DEFECT; + return GIF_ERROR; + } + /* Push the last character on stack: */ + Stack[StackPtr++] = CrntPrefix; + + /* Now lets pop all the stack into output: */ + while (StackPtr != 0 && i < LineLen) { + Line[i++] = Stack[--StackPtr]; + } + } + if (LastCode != NO_SUCH_CODE && + Private->RunningCode - 2 < (LZ_MAX_CODE + 1) && + Prefix[Private->RunningCode - 2] == NO_SUCH_CODE) { + Prefix[Private->RunningCode - 2] = LastCode; + + if (CrntCode == Private->RunningCode - 2) { + /* Only allowed if CrntCode is exactly + * the running code: In that case + * CrntCode = XXXCode, CrntCode or the + * prefix code is last code and the + * suffix char is exactly the prefix of + * last code! */ + Suffix[Private->RunningCode - 2] = + DGifGetPrefixChar(Prefix, LastCode, + ClearCode); + } else { + Suffix[Private->RunningCode - 2] = + DGifGetPrefixChar(Prefix, CrntCode, + ClearCode); + } + } + LastCode = CrntCode; + } + } + + Private->LastCode = LastCode; + Private->StackPtr = StackPtr; + + return GIF_OK; +} + +/****************************************************************************** + Routine to trace the Prefixes linked list until we get a prefix which is + not code, but a pixel value (less than ClearCode). Returns that pixel value. + If image is defective, we might loop here forever, so we limit the loops to + the maximum possible if image O.k. - LZ_MAX_CODE times. +******************************************************************************/ +static int DGifGetPrefixChar(const GifPrefixType *Prefix, int Code, + int ClearCode) { + int i = 0; + + while (Code > ClearCode && i++ <= LZ_MAX_CODE) { + if (Code > LZ_MAX_CODE) { + return NO_SUCH_CODE; + } + Code = Prefix[Code]; + } + return Code; +} + +/****************************************************************************** + Interface for accessing the LZ codes directly. Set Code to the real code + (12bits), or to -1 if EOF code is returned. +******************************************************************************/ +int DGifGetLZCodes(GifFileType *GifFile, int *Code) { + GifByteType *CodeBlock; + GifFilePrivateType *Private = (GifFilePrivateType *)GifFile->Private; + + if (!IS_READABLE(Private)) { + /* This file was NOT open for reading: */ + GifFile->Error = D_GIF_ERR_NOT_READABLE; + return GIF_ERROR; + } + + if (DGifDecompressInput(GifFile, Code) == GIF_ERROR) { + return GIF_ERROR; + } + + if (*Code == Private->EOFCode) { + /* Skip rest of codes (hopefully only NULL terminating block): + */ + do { + if (DGifGetCodeNext(GifFile, &CodeBlock) == GIF_ERROR) { + return GIF_ERROR; + } + } while (CodeBlock != NULL); + + *Code = -1; + } else if (*Code == Private->ClearCode) { + /* We need to start over again: */ + Private->RunningCode = Private->EOFCode + 1; + Private->RunningBits = Private->BitsPerPixel + 1; + Private->MaxCode1 = 1 << Private->RunningBits; + } + + return GIF_OK; +} + +/****************************************************************************** + The LZ decompression input routine: + This routine is responsable for the decompression of the bit stream from + 8 bits (bytes) packets, into the real codes. + Returns GIF_OK if read successfully. +******************************************************************************/ +static int DGifDecompressInput(GifFileType *GifFile, int *Code) { + static const unsigned short CodeMasks[] = { + 0x0000, 0x0001, 0x0003, 0x0007, 0x000f, 0x001f, 0x003f, + 0x007f, 0x00ff, 0x01ff, 0x03ff, 0x07ff, 0x0fff}; + + GifFilePrivateType *Private = (GifFilePrivateType *)GifFile->Private; + + GifByteType NextByte; + + /* The image can't contain more than LZ_BITS per code. */ + if (Private->RunningBits > LZ_BITS) { + GifFile->Error = D_GIF_ERR_IMAGE_DEFECT; + return GIF_ERROR; + } + + while (Private->CrntShiftState < Private->RunningBits) { + /* Needs to get more bytes from input stream for next code: */ + if (DGifBufferedInput(GifFile, Private->Buf, &NextByte) == + GIF_ERROR) { + return GIF_ERROR; + } + Private->CrntShiftDWord |= ((unsigned long)NextByte) + << Private->CrntShiftState; + Private->CrntShiftState += 8; + } + *Code = Private->CrntShiftDWord & CodeMasks[Private->RunningBits]; + + Private->CrntShiftDWord >>= Private->RunningBits; + Private->CrntShiftState -= Private->RunningBits; + + /* If code cannot fit into RunningBits bits, must raise its size. Note + * however that codes above 4095 are used for special signaling. + * If we're using LZ_BITS bits already and we're at the max code, just + * keep using the table as it is, don't increment Private->RunningCode. + */ + if (Private->RunningCode < LZ_MAX_CODE + 2 && + ++Private->RunningCode > Private->MaxCode1 && + Private->RunningBits < LZ_BITS) { + Private->MaxCode1 <<= 1; + Private->RunningBits++; + } + return GIF_OK; +} + +/****************************************************************************** + This routines read one GIF data block at a time and buffers it internally + so that the decompression routine could access it. + The routine returns the next byte from its internal buffer (or read next + block in if buffer empty) and returns GIF_OK if succesful. +******************************************************************************/ +static int DGifBufferedInput(GifFileType *GifFile, GifByteType *Buf, + GifByteType *NextByte) { + if (Buf[0] == 0) { + /* Needs to read the next buffer - this one is empty: */ + /* coverity[check_return] */ + if (InternalRead(GifFile, Buf, 1) != 1) { + GifFile->Error = D_GIF_ERR_READ_FAILED; + return GIF_ERROR; + } + /* There shouldn't be any empty data blocks here as the LZW spec + * says the LZW termination code should come first. Therefore + * we shouldn't be inside this routine at that point. + */ + if (Buf[0] == 0) { + GifFile->Error = D_GIF_ERR_IMAGE_DEFECT; + return GIF_ERROR; + } + if (InternalRead(GifFile, &Buf[1], Buf[0]) != Buf[0]) { + GifFile->Error = D_GIF_ERR_READ_FAILED; + return GIF_ERROR; + } + *NextByte = Buf[1]; + Buf[1] = 2; /* We use now the second place as last char read! */ + Buf[0]--; + } else { + *NextByte = Buf[Buf[1]++]; + Buf[0]--; + } + + return GIF_OK; +} + +/****************************************************************************** + This routine is called in case of error during parsing image. We need to + decrease image counter and reallocate memory for saved images. Not decreasing + ImageCount may lead to null pointer dereference, because the last element in + SavedImages may point to the spoilt image and null pointer buffers. +*******************************************************************************/ +void DGifDecreaseImageCounter(GifFileType *GifFile) { + GifFile->ImageCount--; + if (GifFile->SavedImages[GifFile->ImageCount].RasterBits != NULL) { + free(GifFile->SavedImages[GifFile->ImageCount].RasterBits); + } + + // Realloc array according to the new image counter. + SavedImage *correct_saved_images = (SavedImage *)reallocarray( + GifFile->SavedImages, GifFile->ImageCount, sizeof(SavedImage)); + if (correct_saved_images != NULL) { + GifFile->SavedImages = correct_saved_images; + } +} + +/****************************************************************************** + This routine reads an entire GIF into core, hanging all its state info off + the GifFileType pointer. Call DGifOpenFileName() or DGifOpenFileHandle() + first to initialize I/O. Its inverse is EGifSpew(). +*******************************************************************************/ +int DGifSlurp(GifFileType *GifFile) { + size_t ImageSize; + GifRecordType RecordType; + SavedImage *sp; + GifByteType *ExtData; + int ExtFunction; + + GifFile->ExtensionBlocks = NULL; + GifFile->ExtensionBlockCount = 0; + + do { + if (DGifGetRecordType(GifFile, &RecordType) == GIF_ERROR) { + return (GIF_ERROR); + } + + switch (RecordType) { + case IMAGE_DESC_RECORD_TYPE: + if (DGifGetImageDesc(GifFile) == GIF_ERROR) { + return (GIF_ERROR); + } + + sp = &GifFile->SavedImages[GifFile->ImageCount - 1]; + /* Allocate memory for the image */ + if (sp->ImageDesc.Width <= 0 || + sp->ImageDesc.Height <= 0 || + sp->ImageDesc.Width > + (INT_MAX / sp->ImageDesc.Height)) { + DGifDecreaseImageCounter(GifFile); + return GIF_ERROR; + } + ImageSize = sp->ImageDesc.Width * sp->ImageDesc.Height; + + if (ImageSize > (SIZE_MAX / sizeof(GifPixelType))) { + DGifDecreaseImageCounter(GifFile); + return GIF_ERROR; + } + sp->RasterBits = (unsigned char *)reallocarray( + NULL, ImageSize, sizeof(GifPixelType)); + + if (sp->RasterBits == NULL) { + DGifDecreaseImageCounter(GifFile); + return GIF_ERROR; + } + + if (sp->ImageDesc.Interlace) { + int i, j; + /* + * The way an interlaced image should be read - + * offsets and jumps... + */ + static const int InterlacedOffset[] = {0, 4, 2, + 1}; + static const int InterlacedJumps[] = {8, 8, 4, + 2}; + /* Need to perform 4 passes on the image */ + for (i = 0; i < 4; i++) { + for (j = InterlacedOffset[i]; + j < sp->ImageDesc.Height; + j += InterlacedJumps[i]) { + if (DGifGetLine( + GifFile, + sp->RasterBits + + j * sp->ImageDesc + .Width, + sp->ImageDesc.Width) == + GIF_ERROR) { + DGifDecreaseImageCounter( + GifFile); + return GIF_ERROR; + } + } + } + } else { + if (DGifGetLine(GifFile, sp->RasterBits, + ImageSize) == GIF_ERROR) { + DGifDecreaseImageCounter(GifFile); + return GIF_ERROR; + } + } + + if (GifFile->ExtensionBlocks) { + sp->ExtensionBlocks = GifFile->ExtensionBlocks; + sp->ExtensionBlockCount = + GifFile->ExtensionBlockCount; + + GifFile->ExtensionBlocks = NULL; + GifFile->ExtensionBlockCount = 0; + } + break; + + case EXTENSION_RECORD_TYPE: + if (DGifGetExtension(GifFile, &ExtFunction, &ExtData) == + GIF_ERROR) { + return (GIF_ERROR); + } + /* Create an extension block with our data */ + if (ExtData != NULL) { + if (GifAddExtensionBlock( + &GifFile->ExtensionBlockCount, + &GifFile->ExtensionBlocks, ExtFunction, + ExtData[0], &ExtData[1]) == GIF_ERROR) { + return (GIF_ERROR); + } + } + for (;;) { + if (DGifGetExtensionNext(GifFile, &ExtData) == + GIF_ERROR) { + return (GIF_ERROR); + } + if (ExtData == NULL) { + break; + } + /* Continue the extension block */ + if (GifAddExtensionBlock( + &GifFile->ExtensionBlockCount, + &GifFile->ExtensionBlocks, + CONTINUE_EXT_FUNC_CODE, ExtData[0], + &ExtData[1]) == GIF_ERROR) { + return (GIF_ERROR); + } + } + break; + + case TERMINATE_RECORD_TYPE: + break; + + default: /* Should be trapped by DGifGetRecordType */ + break; + } + } while (RecordType != TERMINATE_RECORD_TYPE); + + /* Sanity check for corrupted file */ + if (GifFile->ImageCount == 0) { + GifFile->Error = D_GIF_ERR_NO_IMAG_DSCR; + return (GIF_ERROR); + } + + return (GIF_OK); +} + +/* end */ diff --git a/torchvision/csrc/io/image/cpu/giflib/gif_hash.c b/torchvision/csrc/io/image/cpu/giflib/gif_hash.c new file mode 100644 index 00000000000..42efbe8de68 --- /dev/null +++ b/torchvision/csrc/io/image/cpu/giflib/gif_hash.c @@ -0,0 +1,129 @@ +// @nolint (improperly imported third-party code) +/***************************************************************************** + +gif_hash.c -- module to support the following operations: + +1. InitHashTable - initialize hash table. +2. ClearHashTable - clear the hash table to an empty state. +2. InsertHashTable - insert one item into data structure. +3. ExistsHashTable - test if item exists in data structure. + +This module is used to hash the GIF codes during encoding. + +*****************************************************************************/ +// SPDX-License-Identifier: MIT +// SPDX-File-Copyright-Txt: (C) Copyright 1989 Gershon Elber + +#include +#include +#include +#include +#include + +#include "gif_hash.h" +#include "gif_lib.h" +#include "gif_lib_private.h" + +/* #define DEBUG_HIT_RATE Debug number of misses per hash Insert/Exists. */ + +#ifdef DEBUG_HIT_RATE +static long NumberOfTests = 0, NumberOfMisses = 0; +#endif /* DEBUG_HIT_RATE */ + +static int KeyItem(uint32_t Item); + +/****************************************************************************** + Initialize HashTable - allocate the memory needed and clear it. * +******************************************************************************/ +GifHashTableType *_InitHashTable(void) { + GifHashTableType *HashTable; + + if ((HashTable = (GifHashTableType *)malloc( + sizeof(GifHashTableType))) == NULL) { + return NULL; + } + + _ClearHashTable(HashTable); + + return HashTable; +} + +/****************************************************************************** + Routine to clear the HashTable to an empty state. * + This part is a little machine depended. Use the commented part otherwise. * +******************************************************************************/ +void _ClearHashTable(GifHashTableType *HashTable) { + memset(HashTable->HTable, 0xFF, HT_SIZE * sizeof(uint32_t)); +} + +/****************************************************************************** + Routine to insert a new Item into the HashTable. The data is assumed to be * + new one. * +******************************************************************************/ +void _InsertHashTable(GifHashTableType *HashTable, uint32_t Key, int Code) { + int HKey = KeyItem(Key); + uint32_t *HTable = HashTable->HTable; + +#ifdef DEBUG_HIT_RATE + NumberOfTests++; + NumberOfMisses++; +#endif /* DEBUG_HIT_RATE */ + + while (HT_GET_KEY(HTable[HKey]) != 0xFFFFFL) { +#ifdef DEBUG_HIT_RATE + NumberOfMisses++; +#endif /* DEBUG_HIT_RATE */ + HKey = (HKey + 1) & HT_KEY_MASK; + } + HTable[HKey] = HT_PUT_KEY(Key) | HT_PUT_CODE(Code); +} + +/****************************************************************************** + Routine to test if given Key exists in HashTable and if so returns its code * + Returns the Code if key was found, -1 if not. * +******************************************************************************/ +int _ExistsHashTable(GifHashTableType *HashTable, uint32_t Key) { + int HKey = KeyItem(Key); + uint32_t *HTable = HashTable->HTable, HTKey; + +#ifdef DEBUG_HIT_RATE + NumberOfTests++; + NumberOfMisses++; +#endif /* DEBUG_HIT_RATE */ + + while ((HTKey = HT_GET_KEY(HTable[HKey])) != 0xFFFFFL) { +#ifdef DEBUG_HIT_RATE + NumberOfMisses++; +#endif /* DEBUG_HIT_RATE */ + if (Key == HTKey) { + return HT_GET_CODE(HTable[HKey]); + } + HKey = (HKey + 1) & HT_KEY_MASK; + } + + return -1; +} + +/****************************************************************************** + Routine to generate an HKey for the hashtable out of the given unique key. * + The given Key is assumed to be 20 bits as follows: lower 8 bits are the * + new postfix character, while the upper 12 bits are the prefix code. * + Because the average hit ratio is only 2 (2 hash references per entry), * + evaluating more complex keys (such as twin prime keys) does not worth it! * +******************************************************************************/ +static int KeyItem(uint32_t Item) { + return ((Item >> 12) ^ Item) & HT_KEY_MASK; +} + +#ifdef DEBUG_HIT_RATE +/****************************************************************************** + Debugging routine to print the hit ratio - number of times the hash table * + was tested per operation. This routine was used to test the KeyItem routine * +******************************************************************************/ +void HashTablePrintHitRatio(void) { + printf("Hash Table Hit Ratio is %ld/%ld = %ld%%.\n", NumberOfMisses, + NumberOfTests, NumberOfMisses * 100 / NumberOfTests); +} +#endif /* DEBUG_HIT_RATE */ + +/* end */ diff --git a/torchvision/csrc/io/image/cpu/giflib/gif_hash.h b/torchvision/csrc/io/image/cpu/giflib/gif_hash.h new file mode 100644 index 00000000000..3066fb14592 --- /dev/null +++ b/torchvision/csrc/io/image/cpu/giflib/gif_hash.h @@ -0,0 +1,43 @@ +// @nolint (improperly imported third-party code) +/****************************************************************************** + +gif_hash.h - magfic constants and declarations for GIF LZW + +******************************************************************************/ +// SPDX-License-Identifier: MIT + +#ifndef _GIF_HASH_H_ +#define _GIF_HASH_H_ + +#ifndef _WIN32 +#include +#endif /* _WIN32 */ +#include + +#define HT_SIZE 8192 /* 12bits = 4096 or twice as big! */ +#define HT_KEY_MASK 0x1FFF /* 13bits keys */ +#define HT_KEY_NUM_BITS 13 /* 13bits keys */ +#define HT_MAX_KEY 8191 /* 13bits - 1, maximal code possible */ +#define HT_MAX_CODE 4095 /* Biggest code possible in 12 bits. */ + +/* The 32 bits of the long are divided into two parts for the key & code: */ +/* 1. The code is 12 bits as our compression algorithm is limited to 12bits */ +/* 2. The key is 12 bits Prefix code + 8 bit new char or 20 bits. */ +/* The key is the upper 20 bits. The code is the lower 12. */ +#define HT_GET_KEY(l) (l >> 12) +#define HT_GET_CODE(l) (l & 0x0FFF) +#define HT_PUT_KEY(l) (l << 12) +#define HT_PUT_CODE(l) (l & 0x0FFF) + +typedef struct GifHashTableType { + uint32_t HTable[HT_SIZE]; +} GifHashTableType; + +GifHashTableType *_InitHashTable(void); +void _ClearHashTable(GifHashTableType *HashTable); +void _InsertHashTable(GifHashTableType *HashTable, uint32_t Key, int Code); +int _ExistsHashTable(GifHashTableType *HashTable, uint32_t Key); + +#endif /* _GIF_HASH_H_ */ + +/* end */ diff --git a/torchvision/csrc/io/image/cpu/giflib/gif_lib.h b/torchvision/csrc/io/image/cpu/giflib/gif_lib.h new file mode 100644 index 00000000000..7bed0430450 --- /dev/null +++ b/torchvision/csrc/io/image/cpu/giflib/gif_lib.h @@ -0,0 +1,292 @@ +// @nolint (improperly imported third-party code) +/****************************************************************************** + +gif_lib.h - service library for decoding and encoding GIF images + +SPDX-License-Identifier: MIT + +*****************************************************************************/ + +#ifndef _GIF_LIB_H_ +#define _GIF_LIB_H_ 1 + +#ifdef __cplusplus +extern "C" { +#endif /* __cplusplus */ + +#define GIFLIB_MAJOR 5 +#define GIFLIB_MINOR 2 +#define GIFLIB_RELEASE 2 + +#define GIF_ERROR 0 +#define GIF_OK 1 + +#include +#include + +#define GIF_STAMP "GIFVER" /* First chars in file - GIF stamp. */ +#define GIF_STAMP_LEN sizeof(GIF_STAMP) - 1 +#define GIF_VERSION_POS 3 /* Version first character in stamp. */ +#define GIF87_STAMP "GIF87a" /* First chars in file - GIF stamp. */ +#define GIF89_STAMP "GIF89a" /* First chars in file - GIF stamp. */ + +typedef unsigned char GifPixelType; +typedef unsigned char *GifRowType; +typedef unsigned char GifByteType; +typedef unsigned int GifPrefixType; +typedef int GifWord; + +typedef struct GifColorType { + GifByteType Red, Green, Blue; +} GifColorType; + +typedef struct ColorMapObject { + int ColorCount; + int BitsPerPixel; + bool SortFlag; + GifColorType *Colors; /* on malloc(3) heap */ +} ColorMapObject; + +typedef struct GifImageDesc { + GifWord Left, Top, Width, Height; /* Current image dimensions. */ + bool Interlace; /* Sequential/Interlaced lines. */ + ColorMapObject *ColorMap; /* The local color map */ +} GifImageDesc; + +typedef struct ExtensionBlock { + int ByteCount; + GifByteType *Bytes; /* on malloc(3) heap */ + int Function; /* The block function code */ +#define CONTINUE_EXT_FUNC_CODE 0x00 /* continuation subblock */ +#define COMMENT_EXT_FUNC_CODE 0xfe /* comment */ +#define GRAPHICS_EXT_FUNC_CODE 0xf9 /* graphics control (GIF89) */ +#define PLAINTEXT_EXT_FUNC_CODE 0x01 /* plaintext */ +#define APPLICATION_EXT_FUNC_CODE 0xff /* application block (GIF89) */ +} ExtensionBlock; + +typedef struct SavedImage { + GifImageDesc ImageDesc; + GifByteType *RasterBits; /* on malloc(3) heap */ + int ExtensionBlockCount; /* Count of extensions before image */ + ExtensionBlock *ExtensionBlocks; /* Extensions before image */ +} SavedImage; + +typedef struct GifFileType { + GifWord SWidth, SHeight; /* Size of virtual canvas */ + GifWord SColorResolution; /* How many colors can we generate? */ + GifWord SBackGroundColor; /* Background color for virtual canvas */ + GifByteType AspectByte; /* Used to compute pixel aspect ratio */ + ColorMapObject *SColorMap; /* Global colormap, NULL if nonexistent. */ + int ImageCount; /* Number of current image (both APIs) */ + GifImageDesc Image; /* Current image (low-level API) */ + SavedImage *SavedImages; /* Image sequence (high-level API) */ + int ExtensionBlockCount; /* Count extensions past last image */ + ExtensionBlock *ExtensionBlocks; /* Extensions past last image */ + int Error; /* Last error condition reported */ + void *UserData; /* hook to attach user data (TVT) */ + void *Private; /* Don't mess with this! */ +} GifFileType; + +#define GIF_ASPECT_RATIO(n) ((n) + 15.0 / 64.0) + +typedef enum { + UNDEFINED_RECORD_TYPE, + SCREEN_DESC_RECORD_TYPE, + IMAGE_DESC_RECORD_TYPE, /* Begin with ',' */ + EXTENSION_RECORD_TYPE, /* Begin with '!' */ + TERMINATE_RECORD_TYPE /* Begin with ';' */ +} GifRecordType; + +/* func type to read gif data from arbitrary sources (TVT) */ +typedef int (*InputFunc)(GifFileType *, GifByteType *, int); + +/* func type to write gif data to arbitrary targets. + * Returns count of bytes written. (MRB) + */ +typedef int (*OutputFunc)(GifFileType *, const GifByteType *, int); + +/****************************************************************************** + GIF89 structures +******************************************************************************/ + +typedef struct GraphicsControlBlock { + int DisposalMode; +#define DISPOSAL_UNSPECIFIED 0 /* No disposal specified. */ +#define DISPOSE_DO_NOT 1 /* Leave image in place */ +#define DISPOSE_BACKGROUND 2 /* Set area too background color */ +#define DISPOSE_PREVIOUS 3 /* Restore to previous content */ + bool UserInputFlag; /* User confirmation required before disposal */ + int DelayTime; /* pre-display delay in 0.01sec units */ + int TransparentColor; /* Palette index for transparency, -1 if none */ +#define NO_TRANSPARENT_COLOR -1 +} GraphicsControlBlock; + +/****************************************************************************** + GIF encoding routines +******************************************************************************/ + +/* Main entry points */ +GifFileType *EGifOpenFileName(const char *GifFileName, + const bool GifTestExistence, int *Error); +GifFileType *EGifOpenFileHandle(const int GifFileHandle, int *Error); +GifFileType *EGifOpen(void *userPtr, OutputFunc writeFunc, int *Error); +int EGifSpew(GifFileType *GifFile); +const char *EGifGetGifVersion(GifFileType *GifFile); /* new in 5.x */ +int EGifCloseFile(GifFileType *GifFile, int *ErrorCode); + +#define E_GIF_SUCCEEDED 0 +#define E_GIF_ERR_OPEN_FAILED 1 /* And EGif possible errors. */ +#define E_GIF_ERR_WRITE_FAILED 2 +#define E_GIF_ERR_HAS_SCRN_DSCR 3 +#define E_GIF_ERR_HAS_IMAG_DSCR 4 +#define E_GIF_ERR_NO_COLOR_MAP 5 +#define E_GIF_ERR_DATA_TOO_BIG 6 +#define E_GIF_ERR_NOT_ENOUGH_MEM 7 +#define E_GIF_ERR_DISK_IS_FULL 8 +#define E_GIF_ERR_CLOSE_FAILED 9 +#define E_GIF_ERR_NOT_WRITEABLE 10 + +/* These are legacy. You probably do not want to call them directly */ +int EGifPutScreenDesc(GifFileType *GifFile, const int GifWidth, + const int GifHeight, const int GifColorRes, + const int GifBackGround, + const ColorMapObject *GifColorMap); +int EGifPutImageDesc(GifFileType *GifFile, const int GifLeft, const int GifTop, + const int GifWidth, const int GifHeight, + const bool GifInterlace, + const ColorMapObject *GifColorMap); +void EGifSetGifVersion(GifFileType *GifFile, const bool gif89); +int EGifPutLine(GifFileType *GifFile, GifPixelType *GifLine, int GifLineLen); +int EGifPutPixel(GifFileType *GifFile, const GifPixelType GifPixel); +int EGifPutComment(GifFileType *GifFile, const char *GifComment); +int EGifPutExtensionLeader(GifFileType *GifFile, const int GifExtCode); +int EGifPutExtensionBlock(GifFileType *GifFile, const int GifExtLen, + const void *GifExtension); +int EGifPutExtensionTrailer(GifFileType *GifFile); +int EGifPutExtension(GifFileType *GifFile, const int GifExtCode, + const int GifExtLen, const void *GifExtension); +int EGifPutCode(GifFileType *GifFile, int GifCodeSize, + const GifByteType *GifCodeBlock); +int EGifPutCodeNext(GifFileType *GifFile, const GifByteType *GifCodeBlock); + +/****************************************************************************** + GIF decoding routines +******************************************************************************/ + +/* Main entry points */ +GifFileType *DGifOpenFileName(const char *GifFileName, int *Error); +GifFileType *DGifOpenFileHandle(int GifFileHandle, int *Error); +int DGifSlurp(GifFileType *GifFile); +GifFileType *DGifOpen(void *userPtr, InputFunc readFunc, + int *Error); /* new one (TVT) */ +int DGifCloseFile(GifFileType *GifFile, int *ErrorCode); + +#define D_GIF_SUCCEEDED 0 +#define D_GIF_ERR_OPEN_FAILED 101 /* And DGif possible errors. */ +#define D_GIF_ERR_READ_FAILED 102 +#define D_GIF_ERR_NOT_GIF_FILE 103 +#define D_GIF_ERR_NO_SCRN_DSCR 104 +#define D_GIF_ERR_NO_IMAG_DSCR 105 +#define D_GIF_ERR_NO_COLOR_MAP 106 +#define D_GIF_ERR_WRONG_RECORD 107 +#define D_GIF_ERR_DATA_TOO_BIG 108 +#define D_GIF_ERR_NOT_ENOUGH_MEM 109 +#define D_GIF_ERR_CLOSE_FAILED 110 +#define D_GIF_ERR_NOT_READABLE 111 +#define D_GIF_ERR_IMAGE_DEFECT 112 +#define D_GIF_ERR_EOF_TOO_SOON 113 + +/* These are legacy. You probably do not want to call them directly */ +int DGifGetScreenDesc(GifFileType *GifFile); +int DGifGetRecordType(GifFileType *GifFile, GifRecordType *GifType); +int DGifGetImageHeader(GifFileType *GifFile); +int DGifGetImageDesc(GifFileType *GifFile); +int DGifGetLine(GifFileType *GifFile, GifPixelType *GifLine, int GifLineLen); +int DGifGetPixel(GifFileType *GifFile, GifPixelType GifPixel); +int DGifGetExtension(GifFileType *GifFile, int *GifExtCode, + GifByteType **GifExtension); +int DGifGetExtensionNext(GifFileType *GifFile, GifByteType **GifExtension); +int DGifGetCode(GifFileType *GifFile, int *GifCodeSize, + GifByteType **GifCodeBlock); +int DGifGetCodeNext(GifFileType *GifFile, GifByteType **GifCodeBlock); +int DGifGetLZCodes(GifFileType *GifFile, int *GifCode); +const char *DGifGetGifVersion(GifFileType *GifFile); + +/****************************************************************************** + Error handling and reporting. +******************************************************************************/ +extern const char *GifErrorString(int ErrorCode); /* new in 2012 - ESR */ + +/***************************************************************************** + it g in core. +******************************************************************************/ + +/****************************************************************************** + Color map handling from gif_alloc.c +******************************************************************************/ + +extern ColorMapObject *GifMakeMapObject(int ColorCount, + const GifColorType *ColorMap); +extern void GifFreeMapObject(ColorMapObject *Object); +extern ColorMapObject *GifUnionColorMap(const ColorMapObject *ColorIn1, + const ColorMapObject *ColorIn2, + GifPixelType ColorTransIn2[]); +extern int GifBitSize(int n); + +/****************************************************************************** + Support for the in-core structures allocation (slurp mode). +******************************************************************************/ + +extern void GifApplyTranslation(SavedImage *Image, + const GifPixelType Translation[]); +extern int GifAddExtensionBlock(int *ExtensionBlock_Count, + ExtensionBlock **ExtensionBlocks, int Function, + unsigned int Len, unsigned char ExtData[]); +extern void GifFreeExtensions(int *ExtensionBlock_Count, + ExtensionBlock **ExtensionBlocks); +extern SavedImage *GifMakeSavedImage(GifFileType *GifFile, + const SavedImage *CopyFrom); +extern void GifFreeSavedImages(GifFileType *GifFile); + +/****************************************************************************** + 5.x functions for GIF89 graphics control blocks +******************************************************************************/ + +int DGifExtensionToGCB(const size_t GifExtensionLength, + const GifByteType *GifExtension, + GraphicsControlBlock *GCB); +size_t EGifGCBToExtension(const GraphicsControlBlock *GCB, + GifByteType *GifExtension); + +int DGifSavedExtensionToGCB(GifFileType *GifFile, int ImageIndex, + GraphicsControlBlock *GCB); +int EGifGCBToSavedExtension(const GraphicsControlBlock *GCB, + GifFileType *GifFile, int ImageIndex); + +/****************************************************************************** + The library's internal utility font +******************************************************************************/ + +#define GIF_FONT_WIDTH 8 +#define GIF_FONT_HEIGHT 8 +extern const unsigned char GifAsciiTable8x8[][GIF_FONT_WIDTH]; + +extern void GifDrawText8x8(SavedImage *Image, const int x, const int y, + const char *legend, const int color); + +extern void GifDrawBox(SavedImage *Image, const int x, const int y, const int w, + const int d, const int color); + +extern void GifDrawRectangle(SavedImage *Image, const int x, const int y, + const int w, const int d, const int color); + +extern void GifDrawBoxedText8x8(SavedImage *Image, const int x, const int y, + const char *legend, const int border, + const int bg, const int fg); + +#ifdef __cplusplus +} +#endif /* __cplusplus */ +#endif /* _GIF_LIB_H */ + +/* end */ diff --git a/torchvision/csrc/io/image/cpu/giflib/gif_lib_private.h b/torchvision/csrc/io/image/cpu/giflib/gif_lib_private.h new file mode 100644 index 00000000000..04987150321 --- /dev/null +++ b/torchvision/csrc/io/image/cpu/giflib/gif_lib_private.h @@ -0,0 +1,73 @@ +// @nolint (improperly imported third-party code) +/**************************************************************************** + +gif_lib_private.h - internal giflib routines and structures + +SPDX-License-Identifier: MIT + +****************************************************************************/ + +#ifndef _GIF_LIB_PRIVATE_H +#define _GIF_LIB_PRIVATE_H + +#include "gif_hash.h" +#include "gif_lib.h" + +#ifndef SIZE_MAX +#define SIZE_MAX UINTPTR_MAX +#endif + +#define EXTENSION_INTRODUCER 0x21 +#define DESCRIPTOR_INTRODUCER 0x2c +#define TERMINATOR_INTRODUCER 0x3b + +#define LZ_MAX_CODE 4095 /* Biggest code possible in 12 bits. */ +#define LZ_BITS 12 + +#define FLUSH_OUTPUT 4096 /* Impossible code, to signal flush. */ +#define FIRST_CODE 4097 /* Impossible code, to signal first. */ +#define NO_SUCH_CODE 4098 /* Impossible code, to signal empty. */ + +#define FILE_STATE_WRITE 0x01 +#define FILE_STATE_SCREEN 0x02 +#define FILE_STATE_IMAGE 0x04 +#define FILE_STATE_READ 0x08 + +#define IS_READABLE(Private) (Private->FileState & FILE_STATE_READ) +#define IS_WRITEABLE(Private) (Private->FileState & FILE_STATE_WRITE) + +typedef struct GifFilePrivateType { + GifWord FileState, FileHandle, /* Where all this data goes to! */ + BitsPerPixel, /* Bits per pixel (Codes uses at least this + 1). */ + ClearCode, /* The CLEAR LZ code. */ + EOFCode, /* The EOF LZ code. */ + RunningCode, /* The next code algorithm can generate. */ + RunningBits, /* The number of bits required to represent + RunningCode. */ + MaxCode1, /* 1 bigger than max. possible code, in RunningBits bits. + */ + LastCode, /* The code before the current code. */ + CrntCode, /* Current algorithm code. */ + StackPtr, /* For character stack (see below). */ + CrntShiftState; /* Number of bits in CrntShiftDWord. */ + unsigned long CrntShiftDWord; /* For bytes decomposition into codes. */ + unsigned long PixelCount; /* Number of pixels in image. */ + FILE *File; /* File as stream. */ + InputFunc Read; /* function to read gif input (TVT) */ + OutputFunc Write; /* function to write gif output (MRB) */ + GifByteType Buf[256]; /* Compressed input is buffered here. */ + GifByteType Stack[LZ_MAX_CODE]; /* Decoded pixels are stacked here. */ + GifByteType Suffix[LZ_MAX_CODE + 1]; /* So we can trace the codes. */ + GifPrefixType Prefix[LZ_MAX_CODE + 1]; + GifHashTableType *HashTable; + bool gif89; +} GifFilePrivateType; + +#ifndef HAVE_REALLOCARRAY +extern void *openbsd_reallocarray(void *optr, size_t nmemb, size_t size); +#define reallocarray openbsd_reallocarray +#endif + +#endif /* _GIF_LIB_PRIVATE_H */ + +/* end */ diff --git a/torchvision/csrc/io/image/cpu/giflib/gifalloc.c b/torchvision/csrc/io/image/cpu/giflib/gifalloc.c new file mode 100644 index 00000000000..65679d22804 --- /dev/null +++ b/torchvision/csrc/io/image/cpu/giflib/gifalloc.c @@ -0,0 +1,426 @@ +// @nolint (improperly imported third-party code) +/***************************************************************************** + + GIF construction tools + +****************************************************************************/ +// SPDX-License-Identifier: MIT +// SPDX-FileCopyrightText: Copyright (C) Eric S. Raymond + +#include +#include +#include + +#include "gif_lib.h" +#include "gif_lib_private.h" + +#define MAX(x, y) (((x) > (y)) ? (x) : (y)) + +/****************************************************************************** + Miscellaneous utility functions +******************************************************************************/ + +/* return smallest bitfield size n will fit in */ +int GifBitSize(int n) { + int i; + + for (i = 1; i <= 8; i++) { + if ((1 << i) >= n) { + break; + } + } + return (i); +} + +/****************************************************************************** + Color map object functions +******************************************************************************/ + +/* + * Allocate a color map of given size; initialize with contents of + * ColorMap if that pointer is non-NULL. + */ +ColorMapObject *GifMakeMapObject(int ColorCount, const GifColorType *ColorMap) { + ColorMapObject *Object; + + /*** FIXME: Our ColorCount has to be a power of two. Is it necessary to + * make the user know that or should we automatically round up instead? + */ + if (ColorCount != (1 << GifBitSize(ColorCount))) { + return ((ColorMapObject *)NULL); + } + + Object = (ColorMapObject *)malloc(sizeof(ColorMapObject)); + if (Object == (ColorMapObject *)NULL) { + return ((ColorMapObject *)NULL); + } + + Object->Colors = + (GifColorType *)calloc(ColorCount, sizeof(GifColorType)); + if (Object->Colors == (GifColorType *)NULL) { + free(Object); + return ((ColorMapObject *)NULL); + } + + Object->ColorCount = ColorCount; + Object->BitsPerPixel = GifBitSize(ColorCount); + Object->SortFlag = false; + + if (ColorMap != NULL) { + memcpy((char *)Object->Colors, (char *)ColorMap, + ColorCount * sizeof(GifColorType)); + } + + return (Object); +} + +/******************************************************************************* + Free a color map object +*******************************************************************************/ +void GifFreeMapObject(ColorMapObject *Object) { + if (Object != NULL) { + (void)free(Object->Colors); + (void)free(Object); + } +} + +#ifdef DEBUG +void DumpColorMap(ColorMapObject *Object, FILE *fp) { + if (Object != NULL) { + int i, j, Len = Object->ColorCount; + + for (i = 0; i < Len; i += 4) { + for (j = 0; j < 4 && j < Len; j++) { + (void)fprintf(fp, "%3d: %02x %02x %02x ", + i + j, Object->Colors[i + j].Red, + Object->Colors[i + j].Green, + Object->Colors[i + j].Blue); + } + (void)fprintf(fp, "\n"); + } + } +} +#endif /* DEBUG */ + +/******************************************************************************* + Compute the union of two given color maps and return it. If result can't + fit into 256 colors, NULL is returned, the allocated union otherwise. + ColorIn1 is copied as is to ColorUnion, while colors from ColorIn2 are + copied iff they didn't exist before. ColorTransIn2 maps the old + ColorIn2 into the ColorUnion color map table./ +*******************************************************************************/ +ColorMapObject *GifUnionColorMap(const ColorMapObject *ColorIn1, + const ColorMapObject *ColorIn2, + GifPixelType ColorTransIn2[]) { + int i, j, CrntSlot, RoundUpTo, NewGifBitSize; + ColorMapObject *ColorUnion; + + /* + * We don't worry about duplicates within either color map; if + * the caller wants to resolve those, he can perform unions + * with an empty color map. + */ + + /* Allocate table which will hold the result for sure. */ + ColorUnion = GifMakeMapObject( + MAX(ColorIn1->ColorCount, ColorIn2->ColorCount) * 2, NULL); + + if (ColorUnion == NULL) { + return (NULL); + } + + /* + * Copy ColorIn1 to ColorUnion. + */ + for (i = 0; i < ColorIn1->ColorCount; i++) { + ColorUnion->Colors[i] = ColorIn1->Colors[i]; + } + CrntSlot = ColorIn1->ColorCount; + + /* + * Potentially obnoxious hack: + * + * Back CrntSlot down past all contiguous {0, 0, 0} slots at the end + * of table 1. This is very useful if your display is limited to + * 16 colors. + */ + while (ColorIn1->Colors[CrntSlot - 1].Red == 0 && + ColorIn1->Colors[CrntSlot - 1].Green == 0 && + ColorIn1->Colors[CrntSlot - 1].Blue == 0) { + CrntSlot--; + } + + /* Copy ColorIn2 to ColorUnion (use old colors if they exist): */ + for (i = 0; i < ColorIn2->ColorCount && CrntSlot <= 256; i++) { + /* Let's see if this color already exists: */ + for (j = 0; j < ColorIn1->ColorCount; j++) { + if (memcmp(&ColorIn1->Colors[j], &ColorIn2->Colors[i], + sizeof(GifColorType)) == 0) { + break; + } + } + + if (j < ColorIn1->ColorCount) { + ColorTransIn2[i] = j; /* color exists in Color1 */ + } else { + /* Color is new - copy it to a new slot: */ + ColorUnion->Colors[CrntSlot] = ColorIn2->Colors[i]; + ColorTransIn2[i] = CrntSlot++; + } + } + + if (CrntSlot > 256) { + GifFreeMapObject(ColorUnion); + return ((ColorMapObject *)NULL); + } + + NewGifBitSize = GifBitSize(CrntSlot); + RoundUpTo = (1 << NewGifBitSize); + + if (RoundUpTo != ColorUnion->ColorCount) { + GifColorType *Map = ColorUnion->Colors; + + /* + * Zero out slots up to next power of 2. + * We know these slots exist because of the way ColorUnion's + * start dimension was computed. + */ + for (j = CrntSlot; j < RoundUpTo; j++) { + Map[j].Red = Map[j].Green = Map[j].Blue = 0; + } + + /* perhaps we can shrink the map? */ + if (RoundUpTo < ColorUnion->ColorCount) { + GifColorType *new_map = (GifColorType *)reallocarray( + Map, RoundUpTo, sizeof(GifColorType)); + if (new_map == NULL) { + GifFreeMapObject(ColorUnion); + return ((ColorMapObject *)NULL); + } + ColorUnion->Colors = new_map; + } + } + + ColorUnion->ColorCount = RoundUpTo; + ColorUnion->BitsPerPixel = NewGifBitSize; + + return (ColorUnion); +} + +/******************************************************************************* + Apply a given color translation to the raster bits of an image +*******************************************************************************/ +void GifApplyTranslation(SavedImage *Image, const GifPixelType Translation[]) { + int i; + int RasterSize = + Image->ImageDesc.Height * Image->ImageDesc.Width; + + for (i = 0; i < RasterSize; i++) { + Image->RasterBits[i] = Translation[Image->RasterBits[i]]; + } +} + +/****************************************************************************** + Extension record functions +******************************************************************************/ +int GifAddExtensionBlock(int *ExtensionBlockCount, + ExtensionBlock **ExtensionBlocks, int Function, + unsigned int Len, unsigned char ExtData[]) { + ExtensionBlock *ep; + + if (*ExtensionBlocks == NULL) { + *ExtensionBlocks = + (ExtensionBlock *)malloc(sizeof(ExtensionBlock)); + } else { + ExtensionBlock *ep_new = (ExtensionBlock *)reallocarray( + *ExtensionBlocks, (*ExtensionBlockCount + 1), + sizeof(ExtensionBlock)); + if (ep_new == NULL) { + return (GIF_ERROR); + } + *ExtensionBlocks = ep_new; + } + + if (*ExtensionBlocks == NULL) { + return (GIF_ERROR); + } + + ep = &(*ExtensionBlocks)[(*ExtensionBlockCount)++]; + + ep->Function = Function; + ep->ByteCount = Len; + ep->Bytes = (GifByteType *)malloc(ep->ByteCount); + if (ep->Bytes == NULL) { + return (GIF_ERROR); + } + + if (ExtData != NULL) { + memcpy(ep->Bytes, ExtData, Len); + } + + return (GIF_OK); +} + +void GifFreeExtensions(int *ExtensionBlockCount, + ExtensionBlock **ExtensionBlocks) { + ExtensionBlock *ep; + + if (*ExtensionBlocks == NULL) { + return; + } + + for (ep = *ExtensionBlocks; + ep < (*ExtensionBlocks + *ExtensionBlockCount); ep++) { + (void)free((char *)ep->Bytes); + } + (void)free((char *)*ExtensionBlocks); + *ExtensionBlocks = NULL; + *ExtensionBlockCount = 0; +} + +/****************************************************************************** + Image block allocation functions +******************************************************************************/ + +/* Private Function: + * Frees the last image in the GifFile->SavedImages array + */ +void FreeLastSavedImage(GifFileType *GifFile) { + SavedImage *sp; + + if ((GifFile == NULL) || (GifFile->SavedImages == NULL)) { + return; + } + + /* Remove one SavedImage from the GifFile */ + GifFile->ImageCount--; + sp = &GifFile->SavedImages[GifFile->ImageCount]; + + /* Deallocate its Colormap */ + if (sp->ImageDesc.ColorMap != NULL) { + GifFreeMapObject(sp->ImageDesc.ColorMap); + sp->ImageDesc.ColorMap = NULL; + } + + /* Deallocate the image data */ + if (sp->RasterBits != NULL) { + free((char *)sp->RasterBits); + } + + /* Deallocate any extensions */ + GifFreeExtensions(&sp->ExtensionBlockCount, &sp->ExtensionBlocks); + + /*** FIXME: We could realloc the GifFile->SavedImages structure but is + * there a point to it? Saves some memory but we'd have to do it every + * time. If this is used in GifFreeSavedImages then it would be + * inefficient (The whole array is going to be deallocated.) If we just + * use it when we want to free the last Image it's convenient to do it + * here. + */ +} + +/* + * Append an image block to the SavedImages array + */ +SavedImage *GifMakeSavedImage(GifFileType *GifFile, + const SavedImage *CopyFrom) { + // cppcheck-suppress ctunullpointer + if (GifFile->SavedImages == NULL) { + GifFile->SavedImages = (SavedImage *)malloc(sizeof(SavedImage)); + } else { + SavedImage *newSavedImages = (SavedImage *)reallocarray( + GifFile->SavedImages, (GifFile->ImageCount + 1), + sizeof(SavedImage)); + if (newSavedImages == NULL) { + return ((SavedImage *)NULL); + } + GifFile->SavedImages = newSavedImages; + } + if (GifFile->SavedImages == NULL) { + return ((SavedImage *)NULL); + } else { + SavedImage *sp = &GifFile->SavedImages[GifFile->ImageCount++]; + + if (CopyFrom != NULL) { + memcpy((char *)sp, CopyFrom, sizeof(SavedImage)); + + /* + * Make our own allocated copies of the heap fields in + * the copied record. This guards against potential + * aliasing problems. + */ + + /* first, the local color map */ + if (CopyFrom->ImageDesc.ColorMap != NULL) { + sp->ImageDesc.ColorMap = GifMakeMapObject( + CopyFrom->ImageDesc.ColorMap->ColorCount, + CopyFrom->ImageDesc.ColorMap->Colors); + if (sp->ImageDesc.ColorMap == NULL) { + FreeLastSavedImage(GifFile); + return (SavedImage *)(NULL); + } + } + + /* next, the raster */ + sp->RasterBits = (unsigned char *)reallocarray( + NULL, + (CopyFrom->ImageDesc.Height * + CopyFrom->ImageDesc.Width), + sizeof(GifPixelType)); + if (sp->RasterBits == NULL) { + FreeLastSavedImage(GifFile); + return (SavedImage *)(NULL); + } + memcpy(sp->RasterBits, CopyFrom->RasterBits, + sizeof(GifPixelType) * + CopyFrom->ImageDesc.Height * + CopyFrom->ImageDesc.Width); + + /* finally, the extension blocks */ + if (CopyFrom->ExtensionBlocks != NULL) { + sp->ExtensionBlocks = + (ExtensionBlock *)reallocarray( + NULL, CopyFrom->ExtensionBlockCount, + sizeof(ExtensionBlock)); + if (sp->ExtensionBlocks == NULL) { + FreeLastSavedImage(GifFile); + return (SavedImage *)(NULL); + } + memcpy(sp->ExtensionBlocks, + CopyFrom->ExtensionBlocks, + sizeof(ExtensionBlock) * + CopyFrom->ExtensionBlockCount); + } + } else { + memset((char *)sp, '\0', sizeof(SavedImage)); + } + + return (sp); + } +} + +void GifFreeSavedImages(GifFileType *GifFile) { + SavedImage *sp; + + if ((GifFile == NULL) || (GifFile->SavedImages == NULL)) { + return; + } + for (sp = GifFile->SavedImages; + sp < GifFile->SavedImages + GifFile->ImageCount; sp++) { + if (sp->ImageDesc.ColorMap != NULL) { + GifFreeMapObject(sp->ImageDesc.ColorMap); + sp->ImageDesc.ColorMap = NULL; + } + + if (sp->RasterBits != NULL) { + free((char *)sp->RasterBits); + } + + GifFreeExtensions(&sp->ExtensionBlockCount, + &sp->ExtensionBlocks); + } + free((char *)GifFile->SavedImages); + GifFile->SavedImages = NULL; +} + +/* end */ diff --git a/torchvision/csrc/io/image/cpu/giflib/openbsd-reallocarray.c b/torchvision/csrc/io/image/cpu/giflib/openbsd-reallocarray.c new file mode 100644 index 00000000000..7d5f1e73a7d --- /dev/null +++ b/torchvision/csrc/io/image/cpu/giflib/openbsd-reallocarray.c @@ -0,0 +1,74 @@ +// @nolint (improperly imported third-party code) +/* + * SPDX-FileCopyrightText: Copyright (C) 2008 Otto Moerbeek + * SPDX-License-Identifier: MIT + */ + +#include +#include +#include +#include + +#ifndef SIZE_MAX +#define SIZE_MAX UINTPTR_MAX +#endif + +/* + * This is sqrt(SIZE_MAX+1), as s1*s2 <= SIZE_MAX + * if both s1 < MUL_NO_OVERFLOW and s2 < MUL_NO_OVERFLOW + */ +#define MUL_NO_OVERFLOW ((size_t)1 << (sizeof(size_t) * 4)) + +void *openbsd_reallocarray(void *optr, size_t nmemb, size_t size) { + if ((nmemb >= MUL_NO_OVERFLOW || size >= MUL_NO_OVERFLOW) && + nmemb > 0 && SIZE_MAX / nmemb < size) { + errno = ENOMEM; + return NULL; + } + /* + * Head off variations in realloc behavior on different + * platforms (reported by MarkR ) + * + * The behaviour of reallocarray is implementation-defined if + * nmemb or size is zero. It can return NULL or non-NULL + * depending on the platform. + * https://www.securecoding.cert.org/confluence/display/c/MEM04-C.Beware+of+zero-lengthallocations + * + * Here are some extracts from realloc man pages on different platforms. + * + * void realloc( void memblock, size_t size ); + * + * Windows: + * + * If there is not enough available memory to expand the block + * to the given size, the original block is left unchanged, + * and NULL is returned. If size is zero, then the block + * pointed to by memblock is freed; the return value is NULL, + * and memblock is left pointing at a freed block. + * + * OpenBSD: + * + * If size or nmemb is equal to 0, a unique pointer to an + * access protected, zero sized object is returned. Access via + * this pointer will generate a SIGSEGV exception. + * + * Linux: + * + * If size was equal to 0, either NULL or a pointer suitable + * to be passed to free() is returned. + * + * OS X: + * + * If size is zero and ptr is not NULL, a new, minimum sized + * object is allocated and the original object is freed. + * + * It looks like images with zero width or height can trigger + * this, and fuzzing behaviour will differ by platform, so + * fuzzing on one platform may not detect zero-size allocation + * problems on other platforms. + */ + if (size == 0 || nmemb == 0) { + return NULL; + } + return realloc(optr, size * nmemb); +} diff --git a/torchvision/csrc/io/image/cpu/read_write_file.cpp b/torchvision/csrc/io/image/cpu/read_write_file.cpp new file mode 100644 index 00000000000..06de72a5053 --- /dev/null +++ b/torchvision/csrc/io/image/cpu/read_write_file.cpp @@ -0,0 +1,108 @@ +#include "read_write_file.h" + +#include + +#ifdef _WIN32 +#define WIN32_LEAN_AND_MEAN +#include +#endif + +namespace vision { +namespace image { + +#ifdef _WIN32 +namespace { +std::wstring utf8_decode(const std::string& str) { + if (str.empty()) { + return std::wstring(); + } + int size_needed = MultiByteToWideChar( + CP_UTF8, 0, str.c_str(), static_cast(str.size()), nullptr, 0); + TORCH_CHECK(size_needed > 0, "Error converting the content to Unicode"); + std::wstring wstrTo(size_needed, 0); + MultiByteToWideChar( + CP_UTF8, + 0, + str.c_str(), + static_cast(str.size()), + &wstrTo[0], + size_needed); + return wstrTo; +} +} // namespace +#endif + +torch::Tensor read_file(const std::string& filename) { + C10_LOG_API_USAGE_ONCE( + "torchvision.csrc.io.image.cpu.read_write_file.read_file"); +#ifdef _WIN32 + // According to + // https://docs.microsoft.com/en-us/cpp/c-runtime-library/reference/stat-functions?view=vs-2019, + // we should use struct __stat64 and _wstat64 for 64-bit file size on Windows. + struct __stat64 stat_buf; + auto fileW = utf8_decode(filename); + int rc = _wstat64(fileW.c_str(), &stat_buf); +#else + struct stat stat_buf; + int rc = stat(filename.c_str(), &stat_buf); +#endif + // errno is a variable defined in errno.h + TORCH_CHECK( + rc == 0, "[Errno ", errno, "] ", strerror(errno), ": '", filename, "'"); + + int64_t size = stat_buf.st_size; + + TORCH_CHECK(size > 0, "Expected a non empty file"); + +#ifdef _WIN32 + // TODO: Once torch::from_file handles UTF-8 paths correctly, we should move + // back to use the following implementation since it uses file mapping. + // auto data = + // torch::from_file(filename, /*shared=*/false, /*size=*/size, + // torch::kU8).clone() + FILE* infile = _wfopen(fileW.c_str(), L"rb"); + + TORCH_CHECK(infile != nullptr, "Error opening input file"); + + auto data = torch::empty({size}, torch::kU8); + auto dataBytes = data.data_ptr(); + + fread(dataBytes, sizeof(uint8_t), size, infile); + fclose(infile); +#else + auto data = + torch::from_file(filename, /*shared=*/false, /*size=*/size, torch::kU8); +#endif + + return data; +} + +void write_file(const std::string& filename, torch::Tensor& data) { + C10_LOG_API_USAGE_ONCE( + "torchvision.csrc.io.image.cpu.read_write_file.write_file"); + // Check that the input tensor is on CPU + TORCH_CHECK(data.device() == torch::kCPU, "Input tensor should be on CPU"); + + // Check that the input tensor dtype is uint8 + TORCH_CHECK(data.dtype() == torch::kU8, "Input tensor dtype should be uint8"); + + // Check that the input tensor is 3-dimensional + TORCH_CHECK(data.dim() == 1, "Input data should be a 1-dimensional tensor"); + + auto fileBytes = data.data_ptr(); + auto fileCStr = filename.c_str(); +#ifdef _WIN32 + auto fileW = utf8_decode(filename); + FILE* outfile = _wfopen(fileW.c_str(), L"wb"); +#else + FILE* outfile = fopen(fileCStr, "wb"); +#endif + + TORCH_CHECK(outfile != nullptr, "Error opening output file"); + + fwrite(fileBytes, sizeof(uint8_t), data.numel(), outfile); + fclose(outfile); +} + +} // namespace image +} // namespace vision diff --git a/torchvision/csrc/io/image/cpu/read_write_file.h b/torchvision/csrc/io/image/cpu/read_write_file.h new file mode 100644 index 00000000000..a5a712dd8e2 --- /dev/null +++ b/torchvision/csrc/io/image/cpu/read_write_file.h @@ -0,0 +1,13 @@ +#pragma once + +#include + +namespace vision { +namespace image { + +C10_EXPORT torch::Tensor read_file(const std::string& filename); + +C10_EXPORT void write_file(const std::string& filename, torch::Tensor& data); + +} // namespace image +} // namespace vision diff --git a/torchvision/csrc/io/image/cuda/decode_jpegs_cuda.cpp b/torchvision/csrc/io/image/cuda/decode_jpegs_cuda.cpp new file mode 100644 index 00000000000..2079ca5f919 --- /dev/null +++ b/torchvision/csrc/io/image/cuda/decode_jpegs_cuda.cpp @@ -0,0 +1,603 @@ +#include "decode_jpegs_cuda.h" +#if !NVJPEG_FOUND +namespace vision { +namespace image { +std::vector decode_jpegs_cuda( + const std::vector& encoded_images, + vision::image::ImageReadMode mode, + torch::Device device) { + TORCH_CHECK( + false, "decode_jpegs_cuda: torchvision not compiled with nvJPEG support"); +} +} // namespace image +} // namespace vision + +#else +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +namespace vision { +namespace image { + +std::mutex decoderMutex; +std::unique_ptr cudaJpegDecoder; + +std::vector decode_jpegs_cuda( + const std::vector& encoded_images, + vision::image::ImageReadMode mode, + torch::Device device) { + C10_LOG_API_USAGE_ONCE( + "torchvision.csrc.io.image.cuda.decode_jpegs_cuda.decode_jpegs_cuda"); + + std::lock_guard lock(decoderMutex); + std::vector contig_images; + contig_images.reserve(encoded_images.size()); + + TORCH_CHECK( + device.is_cuda(), "Expected the device parameter to be a cuda device"); + + for (auto& encoded_image : encoded_images) { + TORCH_CHECK( + encoded_image.dtype() == torch::kU8, "Expected a torch.uint8 tensor"); + + TORCH_CHECK( + !encoded_image.is_cuda(), + "The input tensor must be on CPU when decoding with nvjpeg") + + TORCH_CHECK( + encoded_image.dim() == 1 && encoded_image.numel() > 0, + "Expected a non empty 1-dimensional tensor"); + + // nvjpeg requires images to be contiguous + if (encoded_image.is_contiguous()) { + contig_images.push_back(encoded_image); + } else { + contig_images.push_back(encoded_image.contiguous()); + } + } + + int major_version; + int minor_version; + nvjpegStatus_t get_major_property_status = + nvjpegGetProperty(MAJOR_VERSION, &major_version); + nvjpegStatus_t get_minor_property_status = + nvjpegGetProperty(MINOR_VERSION, &minor_version); + + TORCH_CHECK( + get_major_property_status == NVJPEG_STATUS_SUCCESS, + "nvjpegGetProperty failed: ", + get_major_property_status); + TORCH_CHECK( + get_minor_property_status == NVJPEG_STATUS_SUCCESS, + "nvjpegGetProperty failed: ", + get_minor_property_status); + if ((major_version < 11) || ((major_version == 11) && (minor_version < 6))) { + TORCH_WARN_ONCE( + "There is a memory leak issue in the nvjpeg library for CUDA versions < 11.6. " + "Make sure to rely on CUDA 11.6 or above before using decode_jpeg(..., device='cuda')."); + } + + at::cuda::CUDAGuard device_guard(device); + + if (cudaJpegDecoder == nullptr || device != cudaJpegDecoder->target_device) { + if (cudaJpegDecoder != nullptr) + cudaJpegDecoder.reset(new CUDAJpegDecoder(device)); + else { + cudaJpegDecoder = std::make_unique(device); + std::atexit([]() { cudaJpegDecoder.reset(); }); + } + } + + nvjpegOutputFormat_t output_format; + + switch (mode) { + case vision::image::IMAGE_READ_MODE_UNCHANGED: + // Using NVJPEG_OUTPUT_UNCHANGED causes differently sized output channels + // which is related to the subsampling used I'm not sure why this is the + // case, but for now we're just using RGB and later removing channels from + // grayscale images. + output_format = NVJPEG_OUTPUT_UNCHANGED; + break; + case vision::image::IMAGE_READ_MODE_GRAY: + output_format = NVJPEG_OUTPUT_Y; + break; + case vision::image::IMAGE_READ_MODE_RGB: + output_format = NVJPEG_OUTPUT_RGB; + break; + default: + TORCH_CHECK( + false, "The provided mode is not supported for JPEG decoding on GPU"); + } + + try { + at::cuda::CUDAEvent event; + auto result = cudaJpegDecoder->decode_images(contig_images, output_format); + auto current_stream{ + device.has_index() ? at::cuda::getCurrentCUDAStream( + cudaJpegDecoder->original_device.index()) + : at::cuda::getCurrentCUDAStream()}; + event.record(cudaJpegDecoder->stream); + event.block(current_stream); + return result; + } catch (const std::exception& e) { + if (typeid(e) != typeid(std::runtime_error)) { + TORCH_CHECK(false, "Error while decoding JPEG images: ", e.what()); + } else { + throw; + } + } +} + +CUDAJpegDecoder::CUDAJpegDecoder(const torch::Device& target_device) + : original_device{torch::kCUDA, c10::cuda::current_device()}, + target_device{target_device}, + stream{ + target_device.has_index() + ? at::cuda::getStreamFromPool(false, target_device.index()) + : at::cuda::getStreamFromPool(false)} { + nvjpegStatus_t status; + + hw_decode_available = true; + status = nvjpegCreateEx( + NVJPEG_BACKEND_HARDWARE, + NULL, + NULL, + NVJPEG_FLAGS_DEFAULT, + &nvjpeg_handle); + if (status == NVJPEG_STATUS_ARCH_MISMATCH) { + status = nvjpegCreateEx( + NVJPEG_BACKEND_DEFAULT, + NULL, + NULL, + NVJPEG_FLAGS_DEFAULT, + &nvjpeg_handle); + TORCH_CHECK( + status == NVJPEG_STATUS_SUCCESS, + "Failed to initialize nvjpeg with default backend: ", + status); + hw_decode_available = false; + } else { + TORCH_CHECK( + status == NVJPEG_STATUS_SUCCESS, + "Failed to initialize nvjpeg with hardware backend: ", + status); + } + + status = nvjpegJpegStateCreate(nvjpeg_handle, &nvjpeg_state); + TORCH_CHECK( + status == NVJPEG_STATUS_SUCCESS, + "Failed to create nvjpeg state: ", + status); + + status = nvjpegDecoderCreate( + nvjpeg_handle, NVJPEG_BACKEND_DEFAULT, &nvjpeg_decoder); + TORCH_CHECK( + status == NVJPEG_STATUS_SUCCESS, + "Failed to create nvjpeg decoder: ", + status); + + status = nvjpegDecoderStateCreate( + nvjpeg_handle, nvjpeg_decoder, &nvjpeg_decoupled_state); + TORCH_CHECK( + status == NVJPEG_STATUS_SUCCESS, + "Failed to create nvjpeg decoder state: ", + status); + + status = nvjpegBufferPinnedCreate(nvjpeg_handle, NULL, &pinned_buffers[0]); + TORCH_CHECK( + status == NVJPEG_STATUS_SUCCESS, + "Failed to create pinned buffer: ", + status); + + status = nvjpegBufferPinnedCreate(nvjpeg_handle, NULL, &pinned_buffers[1]); + TORCH_CHECK( + status == NVJPEG_STATUS_SUCCESS, + "Failed to create pinned buffer: ", + status); + + status = nvjpegBufferDeviceCreate(nvjpeg_handle, NULL, &device_buffer); + TORCH_CHECK( + status == NVJPEG_STATUS_SUCCESS, + "Failed to create device buffer: ", + status); + + status = nvjpegJpegStreamCreate(nvjpeg_handle, &jpeg_streams[0]); + TORCH_CHECK( + status == NVJPEG_STATUS_SUCCESS, + "Failed to create jpeg stream: ", + status); + + status = nvjpegJpegStreamCreate(nvjpeg_handle, &jpeg_streams[1]); + TORCH_CHECK( + status == NVJPEG_STATUS_SUCCESS, + "Failed to create jpeg stream: ", + status); + + status = nvjpegDecodeParamsCreate(nvjpeg_handle, &nvjpeg_decode_params); + TORCH_CHECK( + status == NVJPEG_STATUS_SUCCESS, + "Failed to create decode params: ", + status); +} + +CUDAJpegDecoder::~CUDAJpegDecoder() { + /* + The below code works on Mac and Linux, but fails on Windows. + This is because on Windows, the atexit hook which calls this + destructor executes after cuda is already shut down causing SIGSEGV. + We do not have a solution to this problem at the moment, so we'll + just leak the libnvjpeg & cuda variables for the time being and hope + that the CUDA runtime handles cleanup for us. + Please send a PR if you have a solution for this problem. + */ + + // nvjpegStatus_t status; + + // status = nvjpegDecodeParamsDestroy(nvjpeg_decode_params); + // TORCH_CHECK( + // status == NVJPEG_STATUS_SUCCESS, + // "Failed to destroy nvjpeg decode params: ", + // status); + + // status = nvjpegJpegStreamDestroy(jpeg_streams[0]); + // TORCH_CHECK( + // status == NVJPEG_STATUS_SUCCESS, + // "Failed to destroy jpeg stream: ", + // status); + + // status = nvjpegJpegStreamDestroy(jpeg_streams[1]); + // TORCH_CHECK( + // status == NVJPEG_STATUS_SUCCESS, + // "Failed to destroy jpeg stream: ", + // status); + + // status = nvjpegBufferPinnedDestroy(pinned_buffers[0]); + // TORCH_CHECK( + // status == NVJPEG_STATUS_SUCCESS, + // "Failed to destroy pinned buffer[0]: ", + // status); + + // status = nvjpegBufferPinnedDestroy(pinned_buffers[1]); + // TORCH_CHECK( + // status == NVJPEG_STATUS_SUCCESS, + // "Failed to destroy pinned buffer[1]: ", + // status); + + // status = nvjpegBufferDeviceDestroy(device_buffer); + // TORCH_CHECK( + // status == NVJPEG_STATUS_SUCCESS, + // "Failed to destroy device buffer: ", + // status); + + // status = nvjpegJpegStateDestroy(nvjpeg_decoupled_state); + // TORCH_CHECK( + // status == NVJPEG_STATUS_SUCCESS, + // "Failed to destroy nvjpeg decoupled state: ", + // status); + + // status = nvjpegDecoderDestroy(nvjpeg_decoder); + // TORCH_CHECK( + // status == NVJPEG_STATUS_SUCCESS, + // "Failed to destroy nvjpeg decoder: ", + // status); + + // status = nvjpegJpegStateDestroy(nvjpeg_state); + // TORCH_CHECK( + // status == NVJPEG_STATUS_SUCCESS, + // "Failed to destroy nvjpeg state: ", + // status); + + // status = nvjpegDestroy(nvjpeg_handle); + // TORCH_CHECK( + // status == NVJPEG_STATUS_SUCCESS, "nvjpegDestroy failed: ", status); +} + +std::tuple< + std::vector, + std::vector, + std::vector> +CUDAJpegDecoder::prepare_buffers( + const std::vector& encoded_images, + const nvjpegOutputFormat_t& output_format) { + /* + This function scans the encoded images' jpeg headers and + allocates decoding buffers based on the metadata found + + Args: + - encoded_images (std::vector): a vector of tensors + containing the jpeg bitstreams to be decoded. Each tensor must have dtype + torch.uint8 and device cpu + - output_format (nvjpegOutputFormat_t): NVJPEG_OUTPUT_RGB, NVJPEG_OUTPUT_Y + or NVJPEG_OUTPUT_UNCHANGED + + Returns: + - decoded_images (std::vector): a vector of nvjpegImages + containing pointers to the memory of the decoded images + - output_tensors (std::vector): a vector of Tensors + containing the decoded images. `decoded_images` points to the memory of + output_tensors + - channels (std::vector): a vector of ints containing the number of + output image channels for every image + */ + + int width[NVJPEG_MAX_COMPONENT]; + int height[NVJPEG_MAX_COMPONENT]; + std::vector channels(encoded_images.size()); + nvjpegChromaSubsampling_t subsampling; + nvjpegStatus_t status; + + std::vector output_tensors{encoded_images.size()}; + std::vector decoded_images{encoded_images.size()}; + + for (std::vector::size_type i = 0; i < encoded_images.size(); + i++) { + // extract bitstream meta data to figure out the number of channels, height, + // width for every image + status = nvjpegGetImageInfo( + nvjpeg_handle, + (unsigned char*)encoded_images[i].data_ptr(), + encoded_images[i].numel(), + &channels[i], + &subsampling, + width, + height); + TORCH_CHECK( + status == NVJPEG_STATUS_SUCCESS, "Failed to get image info: ", status); + + TORCH_CHECK( + subsampling != NVJPEG_CSS_UNKNOWN, "Unknown chroma subsampling"); + + // output channels may be different from the actual number of channels in + // the image, e.g. we decode a grayscale image as RGB and slice off the + // extra channels later + int output_channels = 3; + if (output_format == NVJPEG_OUTPUT_RGB || + output_format == NVJPEG_OUTPUT_UNCHANGED) { + output_channels = 3; + } else if (output_format == NVJPEG_OUTPUT_Y) { + output_channels = 1; + } + + // reserve output buffer + auto output_tensor = torch::empty( + {int64_t(output_channels), int64_t(height[0]), int64_t(width[0])}, + torch::dtype(torch::kU8).device(target_device)); + output_tensors[i] = output_tensor; + + // fill nvjpegImage_t struct + for (int c = 0; c < output_channels; c++) { + decoded_images[i].channel[c] = output_tensor[c].data_ptr(); + decoded_images[i].pitch[c] = width[0]; + } + for (int c = output_channels; c < NVJPEG_MAX_COMPONENT; c++) { + decoded_images[i].channel[c] = NULL; + decoded_images[i].pitch[c] = 0; + } + } + return {decoded_images, output_tensors, channels}; +} + +std::vector CUDAJpegDecoder::decode_images( + const std::vector& encoded_images, + const nvjpegOutputFormat_t& output_format) { + /* + This function decodes a batch of jpeg bitstreams. + We scan all encoded bitstreams and sort them into two groups: + 1. Baseline JPEGs: Can be decoded with hardware support on A100+ GPUs. + 2. Other JPEGs (e.g. progressive JPEGs): Can also be decoded on the + GPU (albeit with software support only) but need some preprocessing on the + host first. + + See + https://github.com/NVIDIA/CUDALibrarySamples/blob/f17940ac4e705bf47a8c39f5365925c1665f6c98/nvJPEG/nvJPEG-Decoder/nvjpegDecoder.cpp#L33 + for reference. + + Args: + - encoded_images (std::vector): a vector of tensors + containing the jpeg bitstreams to be decoded + - output_format (nvjpegOutputFormat_t): NVJPEG_OUTPUT_RGB, NVJPEG_OUTPUT_Y + or NVJPEG_OUTPUT_UNCHANGED + - device (torch::Device): The desired CUDA device for the returned Tensors + + Returns: + - output_tensors (std::vector): a vector of Tensors + containing the decoded images + */ + + auto [decoded_imgs_buf, output_tensors, channels] = + prepare_buffers(encoded_images, output_format); + + nvjpegStatus_t status; + cudaError_t cudaStatus; + + cudaStatus = cudaStreamSynchronize(stream); + TORCH_CHECK( + cudaStatus == cudaSuccess, + "Failed to synchronize CUDA stream: ", + cudaStatus); + + // baseline JPEGs can be batch decoded with hardware support on A100+ GPUs + // ultra fast! + std::vector hw_input_buffer; + std::vector hw_input_buffer_size; + std::vector hw_output_buffer; + + // other JPEG types such as progressive JPEGs can be decoded one-by-one in + // software slow :( + std::vector sw_input_buffer; + std::vector sw_input_buffer_size; + std::vector sw_output_buffer; + + if (hw_decode_available) { + for (std::vector::size_type i = 0; i < encoded_images.size(); + ++i) { + // extract bitstream meta data to figure out whether a bit-stream can be + // decoded + nvjpegJpegStreamParseHeader( + nvjpeg_handle, + encoded_images[i].data_ptr(), + encoded_images[i].numel(), + jpeg_streams[0]); + int isSupported = -1; + nvjpegDecodeBatchedSupported( + nvjpeg_handle, jpeg_streams[0], &isSupported); + + if (isSupported == 0) { + hw_input_buffer.push_back(encoded_images[i].data_ptr()); + hw_input_buffer_size.push_back(encoded_images[i].numel()); + hw_output_buffer.push_back(decoded_imgs_buf[i]); + } else { + sw_input_buffer.push_back(encoded_images[i].data_ptr()); + sw_input_buffer_size.push_back(encoded_images[i].numel()); + sw_output_buffer.push_back(decoded_imgs_buf[i]); + } + } + } else { + for (std::vector::size_type i = 0; i < encoded_images.size(); + ++i) { + sw_input_buffer.push_back(encoded_images[i].data_ptr()); + sw_input_buffer_size.push_back(encoded_images[i].numel()); + sw_output_buffer.push_back(decoded_imgs_buf[i]); + } + } + + if (hw_input_buffer.size() > 0) { + // UNCHANGED behaves weird, so we use RGB instead + status = nvjpegDecodeBatchedInitialize( + nvjpeg_handle, + nvjpeg_state, + hw_input_buffer.size(), + 1, + output_format == NVJPEG_OUTPUT_UNCHANGED ? NVJPEG_OUTPUT_RGB + : output_format); + TORCH_CHECK( + status == NVJPEG_STATUS_SUCCESS, + "Failed to initialize batch decoding: ", + status); + + status = nvjpegDecodeBatched( + nvjpeg_handle, + nvjpeg_state, + hw_input_buffer.data(), + hw_input_buffer_size.data(), + hw_output_buffer.data(), + stream); + TORCH_CHECK( + status == NVJPEG_STATUS_SUCCESS, "Failed to decode batch: ", status); + } + + if (sw_input_buffer.size() > 0) { + status = + nvjpegStateAttachDeviceBuffer(nvjpeg_decoupled_state, device_buffer); + TORCH_CHECK( + status == NVJPEG_STATUS_SUCCESS, + "Failed to attach device buffer: ", + status); + int buffer_index = 0; + // UNCHANGED behaves weird, so we use RGB instead + status = nvjpegDecodeParamsSetOutputFormat( + nvjpeg_decode_params, + output_format == NVJPEG_OUTPUT_UNCHANGED ? NVJPEG_OUTPUT_RGB + : output_format); + TORCH_CHECK( + status == NVJPEG_STATUS_SUCCESS, + "Failed to set output format: ", + status); + for (std::vector::size_type i = 0; i < sw_input_buffer.size(); + ++i) { + status = nvjpegJpegStreamParse( + nvjpeg_handle, + sw_input_buffer[i], + sw_input_buffer_size[i], + 0, + 0, + jpeg_streams[buffer_index]); + TORCH_CHECK( + status == NVJPEG_STATUS_SUCCESS, + "Failed to parse jpeg stream: ", + status); + + status = nvjpegStateAttachPinnedBuffer( + nvjpeg_decoupled_state, pinned_buffers[buffer_index]); + TORCH_CHECK( + status == NVJPEG_STATUS_SUCCESS, + "Failed to attach pinned buffer: ", + status); + + status = nvjpegDecodeJpegHost( + nvjpeg_handle, + nvjpeg_decoder, + nvjpeg_decoupled_state, + nvjpeg_decode_params, + jpeg_streams[buffer_index]); + TORCH_CHECK( + status == NVJPEG_STATUS_SUCCESS, + "Failed to decode jpeg stream: ", + status); + + cudaStatus = cudaStreamSynchronize(stream); + TORCH_CHECK( + cudaStatus == cudaSuccess, + "Failed to synchronize CUDA stream: ", + cudaStatus); + + status = nvjpegDecodeJpegTransferToDevice( + nvjpeg_handle, + nvjpeg_decoder, + nvjpeg_decoupled_state, + jpeg_streams[buffer_index], + stream); + TORCH_CHECK( + status == NVJPEG_STATUS_SUCCESS, + "Failed to transfer jpeg to device: ", + status); + + buffer_index = 1 - buffer_index; // switch pinned buffer in pipeline mode + // to avoid an extra sync + + status = nvjpegDecodeJpegDevice( + nvjpeg_handle, + nvjpeg_decoder, + nvjpeg_decoupled_state, + &sw_output_buffer[i], + stream); + TORCH_CHECK( + status == NVJPEG_STATUS_SUCCESS, + "Failed to decode jpeg stream: ", + status); + } + } + + cudaStatus = cudaStreamSynchronize(stream); + TORCH_CHECK( + cudaStatus == cudaSuccess, + "Failed to synchronize CUDA stream: ", + cudaStatus); + + // prune extraneous channels from single channel images + if (output_format == NVJPEG_OUTPUT_UNCHANGED) { + for (std::vector::size_type i = 0; i < output_tensors.size(); + ++i) { + if (channels[i] == 1) { + output_tensors[i] = output_tensors[i][0].unsqueeze(0).clone(); + } + } + } + + return output_tensors; +} + +} // namespace image +} // namespace vision + +#endif diff --git a/torchvision/csrc/io/image/cuda/decode_jpegs_cuda.h b/torchvision/csrc/io/image/cuda/decode_jpegs_cuda.h new file mode 100644 index 00000000000..6f72d9e35b2 --- /dev/null +++ b/torchvision/csrc/io/image/cuda/decode_jpegs_cuda.h @@ -0,0 +1,45 @@ +#pragma once +#include +#include +#include "../common.h" + +#if NVJPEG_FOUND +#include +#include + +namespace vision { +namespace image { +class CUDAJpegDecoder { + public: + CUDAJpegDecoder(const torch::Device& target_device); + ~CUDAJpegDecoder(); + + std::vector decode_images( + const std::vector& encoded_images, + const nvjpegOutputFormat_t& output_format); + + const torch::Device original_device; + const torch::Device target_device; + const c10::cuda::CUDAStream stream; + + private: + std::tuple< + std::vector, + std::vector, + std::vector> + prepare_buffers( + const std::vector& encoded_images, + const nvjpegOutputFormat_t& output_format); + nvjpegJpegState_t nvjpeg_state; + nvjpegJpegState_t nvjpeg_decoupled_state; + nvjpegBufferPinned_t pinned_buffers[2]; + nvjpegBufferDevice_t device_buffer; + nvjpegJpegStream_t jpeg_streams[2]; + nvjpegDecodeParams_t nvjpeg_decode_params; + nvjpegJpegDecoder_t nvjpeg_decoder; + bool hw_decode_available{false}; + nvjpegHandle_t nvjpeg_handle; +}; +} // namespace image +} // namespace vision +#endif diff --git a/torchvision/csrc/io/image/cuda/encode_decode_jpegs_cuda.h b/torchvision/csrc/io/image/cuda/encode_decode_jpegs_cuda.h new file mode 100644 index 00000000000..8c3ad8f9a9d --- /dev/null +++ b/torchvision/csrc/io/image/cuda/encode_decode_jpegs_cuda.h @@ -0,0 +1,59 @@ +#pragma once + +#include +#include "../common.h" +#include "decode_jpegs_cuda.h" +#include "encode_jpegs_cuda.h" + +namespace vision { +namespace image { + +/* + +Fast jpeg decoding with CUDA. +A100+ GPUs have dedicated hardware support for jpeg decoding. + +Args: + - encoded_images (const std::vector&): a vector of tensors + containing the jpeg bitstreams to be decoded. Each tensor must have dtype + torch.uint8 and device cpu + - mode (ImageReadMode): IMAGE_READ_MODE_UNCHANGED, IMAGE_READ_MODE_GRAY and +IMAGE_READ_MODE_RGB are supported + - device (torch::Device): The desired CUDA device to run the decoding on and +which will contain the output tensors + +Returns: + - decoded_images (std::vector): a vector of torch::Tensors of +dtype torch.uint8 on the specified containing the decoded images + +Notes: + - If a single image fails, the whole batch fails. + - This function is thread-safe +*/ +C10_EXPORT std::vector decode_jpegs_cuda( + const std::vector& encoded_images, + vision::image::ImageReadMode mode, + torch::Device device); + +/* +Fast jpeg encoding with CUDA. + +Args: + - decoded_images (const std::vector&): a vector of contiguous +CUDA tensors of dtype torch.uint8 to be encoded. + - quality (int64_t): 0-100, 75 is the default + +Returns: + - encoded_images (std::vector): a vector of CUDA +torch::Tensors of dtype torch.uint8 containing the encoded images + +Notes: + - If a single image fails, the whole batch fails. + - This function is thread-safe +*/ +C10_EXPORT std::vector encode_jpegs_cuda( + const std::vector& decoded_images, + const int64_t quality); + +} // namespace image +} // namespace vision diff --git a/torchvision/csrc/io/image/cuda/encode_jpegs_cuda.cpp b/torchvision/csrc/io/image/cuda/encode_jpegs_cuda.cpp new file mode 100644 index 00000000000..1f10327ddbf --- /dev/null +++ b/torchvision/csrc/io/image/cuda/encode_jpegs_cuda.cpp @@ -0,0 +1,274 @@ +#include "encode_jpegs_cuda.h" +#if !NVJPEG_FOUND +namespace vision { +namespace image { +std::vector encode_jpegs_cuda( + const std::vector& decoded_images, + const int64_t quality) { + TORCH_CHECK( + false, "encode_jpegs_cuda: torchvision not compiled with nvJPEG support"); +} +} // namespace image +} // namespace vision +#else + +#include +#include +#include +#include +#include +#include +#include +#include +#include "c10/core/ScalarType.h" + +namespace vision { +namespace image { + +// We use global variables to cache the encoder and decoder instances and +// reuse them across calls to the corresponding pytorch functions +std::mutex encoderMutex; +std::unique_ptr cudaJpegEncoder; + +std::vector encode_jpegs_cuda( + const std::vector& decoded_images, + const int64_t quality) { + C10_LOG_API_USAGE_ONCE( + "torchvision.csrc.io.image.cuda.encode_jpegs_cuda.encode_jpegs_cuda"); + + // Some nvjpeg structures are not thread safe so we're keeping it single + // threaded for now. In the future this may be an opportunity to unlock + // further speedups + std::lock_guard lock(encoderMutex); + TORCH_CHECK(decoded_images.size() > 0, "Empty input tensor list"); + torch::Device device = decoded_images[0].device(); + at::cuda::CUDAGuard device_guard(device); + + // lazy init of the encoder class + // the encoder object holds on to a lot of state and is expensive to create, + // so we reuse it across calls. NB: the cached structures are device specific + // and cannot be reused across devices + if (cudaJpegEncoder == nullptr || device != cudaJpegEncoder->target_device) { + if (cudaJpegEncoder != nullptr) + delete cudaJpegEncoder.release(); + + cudaJpegEncoder = std::make_unique(device); + + // Unfortunately, we cannot rely on the smart pointer releasing the encoder + // object correctly upon program exit. This is because, when cudaJpegEncoder + // gets destroyed, the CUDA runtime may already be shut down, rendering all + // destroy* calls in the encoder destructor invalid. Instead, we use an + // atexit hook which executes after main() finishes, but hopefully before + // CUDA shuts down when the program exits. If CUDA is already shut down the + // destructor will detect this and will not attempt to destroy any encoder + // structures. + std::atexit([]() { delete cudaJpegEncoder.release(); }); + } + + std::vector contig_images; + contig_images.reserve(decoded_images.size()); + for (const auto& image : decoded_images) { + TORCH_CHECK( + image.dtype() == torch::kU8, "Input tensor dtype should be uint8"); + + TORCH_CHECK( + image.device() == device, + "All input tensors must be on the same CUDA device when encoding with nvjpeg") + + TORCH_CHECK( + image.dim() == 3 && image.numel() > 0, + "Input data should be a 3-dimensional tensor"); + + TORCH_CHECK( + image.size(0) == 3, + "The number of channels should be 3, got: ", + image.size(0)); + + // nvjpeg requires images to be contiguous + if (image.is_contiguous()) { + contig_images.push_back(image); + } else { + contig_images.push_back(image.contiguous()); + } + } + + cudaJpegEncoder->set_quality(quality); + std::vector encoded_images; + at::cuda::CUDAEvent event; + event.record(cudaJpegEncoder->stream); + for (const auto& image : contig_images) { + auto encoded_image = cudaJpegEncoder->encode_jpeg(image); + encoded_images.push_back(encoded_image); + } + + // We use a dedicated stream to do the encoding and even though the results + // may be ready on that stream we cannot assume that they are also available + // on the current stream of the calling context when this function returns. We + // use a blocking event to ensure that this is indeed the case. Crucially, we + // do not want to block the host at this particular point + // (which is what cudaStreamSynchronize would do.) Events allow us to + // synchronize the streams without blocking the host. + event.block(at::cuda::getCurrentCUDAStream( + cudaJpegEncoder->original_device.has_index() + ? cudaJpegEncoder->original_device.index() + : 0)); + return encoded_images; +} + +CUDAJpegEncoder::CUDAJpegEncoder(const torch::Device& target_device) + : original_device{torch::kCUDA, torch::cuda::current_device()}, + target_device{target_device}, + stream{ + target_device.has_index() + ? at::cuda::getStreamFromPool(false, target_device.index()) + : at::cuda::getStreamFromPool(false)} { + nvjpegStatus_t status; + status = nvjpegCreateSimple(&nvjpeg_handle); + TORCH_CHECK( + status == NVJPEG_STATUS_SUCCESS, + "Failed to create nvjpeg handle: ", + status); + + status = nvjpegEncoderStateCreate(nvjpeg_handle, &nv_enc_state, stream); + TORCH_CHECK( + status == NVJPEG_STATUS_SUCCESS, + "Failed to create nvjpeg encoder state: ", + status); + + status = nvjpegEncoderParamsCreate(nvjpeg_handle, &nv_enc_params, stream); + TORCH_CHECK( + status == NVJPEG_STATUS_SUCCESS, + "Failed to create nvjpeg encoder params: ", + status); +} + +CUDAJpegEncoder::~CUDAJpegEncoder() { + /* + The below code works on Mac and Linux, but fails on Windows. + This is because on Windows, the atexit hook which calls this + destructor executes after cuda is already shut down causing SIGSEGV. + We do not have a solution to this problem at the moment, so we'll + just leak the libnvjpeg & cuda variables for the time being and hope + that the CUDA runtime handles cleanup for us. + Please send a PR if you have a solution for this problem. + */ + + // // We run cudaGetDeviceCount as a dummy to test if the CUDA runtime is + // still + // // initialized. If it is not, we can skip the rest of this function as it + // is + // // unsafe to execute. + // int deviceCount = 0; + // cudaError_t error = cudaGetDeviceCount(&deviceCount); + // if (error != cudaSuccess) + // return; // CUDA runtime has already shut down. There's nothing we can do + // // now. + + // nvjpegStatus_t status; + + // status = nvjpegEncoderParamsDestroy(nv_enc_params); + // TORCH_CHECK( + // status == NVJPEG_STATUS_SUCCESS, + // "Failed to destroy nvjpeg encoder params: ", + // status); + + // status = nvjpegEncoderStateDestroy(nv_enc_state); + // TORCH_CHECK( + // status == NVJPEG_STATUS_SUCCESS, + // "Failed to destroy nvjpeg encoder state: ", + // status); + + // cudaStreamSynchronize(stream); + + // status = nvjpegDestroy(nvjpeg_handle); + // TORCH_CHECK( + // status == NVJPEG_STATUS_SUCCESS, "nvjpegDestroy failed: ", status); +} + +torch::Tensor CUDAJpegEncoder::encode_jpeg(const torch::Tensor& src_image) { + int channels = src_image.size(0); + int height = src_image.size(1); + int width = src_image.size(2); + + nvjpegStatus_t status; + cudaError_t cudaStatus; + status = nvjpegEncoderParamsSetSamplingFactors( + nv_enc_params, NVJPEG_CSS_444, stream); + TORCH_CHECK( + status == NVJPEG_STATUS_SUCCESS, + "Failed to set nvjpeg encoder params sampling factors: ", + status); + + nvjpegImage_t target_image; + for (int c = 0; c < channels; c++) { + target_image.channel[c] = src_image[c].data_ptr(); + // this is why we need contiguous tensors + target_image.pitch[c] = width; + } + for (int c = channels; c < NVJPEG_MAX_COMPONENT; c++) { + target_image.channel[c] = nullptr; + target_image.pitch[c] = 0; + } + // Encode the image + status = nvjpegEncodeImage( + nvjpeg_handle, + nv_enc_state, + nv_enc_params, + &target_image, + NVJPEG_INPUT_RGB, + width, + height, + stream); + + TORCH_CHECK( + status == NVJPEG_STATUS_SUCCESS, "image encoding failed: ", status); + // Retrieve length of the encoded image + size_t length; + status = nvjpegEncodeRetrieveBitstreamDevice( + nvjpeg_handle, nv_enc_state, NULL, &length, stream); + TORCH_CHECK( + status == NVJPEG_STATUS_SUCCESS, + "Failed to retrieve encoded image stream state: ", + status); + + // Synchronize the stream to ensure that the encoded image is ready + cudaStatus = cudaStreamSynchronize(stream); + TORCH_CHECK(cudaStatus == cudaSuccess, "CUDA ERROR: ", cudaStatus); + + // Reserve buffer for the encoded image + torch::Tensor encoded_image = torch::empty( + {static_cast(length)}, + torch::TensorOptions() + .dtype(torch::kByte) + .layout(torch::kStrided) + .device(target_device) + .requires_grad(false)); + cudaStatus = cudaStreamSynchronize(stream); + TORCH_CHECK(cudaStatus == cudaSuccess, "CUDA ERROR: ", cudaStatus); + // Retrieve the encoded image + status = nvjpegEncodeRetrieveBitstreamDevice( + nvjpeg_handle, + nv_enc_state, + encoded_image.data_ptr(), + &length, + 0); + TORCH_CHECK( + status == NVJPEG_STATUS_SUCCESS, + "Failed to retrieve encoded image: ", + status); + return encoded_image; +} + +void CUDAJpegEncoder::set_quality(const int64_t quality) { + nvjpegStatus_t paramsQualityStatus = + nvjpegEncoderParamsSetQuality(nv_enc_params, quality, stream); + TORCH_CHECK( + paramsQualityStatus == NVJPEG_STATUS_SUCCESS, + "Failed to set nvjpeg encoder params quality: ", + paramsQualityStatus); +} + +} // namespace image +} // namespace vision + +#endif // NVJPEG_FOUND diff --git a/torchvision/csrc/io/image/cuda/encode_jpegs_cuda.h b/torchvision/csrc/io/image/cuda/encode_jpegs_cuda.h new file mode 100644 index 00000000000..543940f1585 --- /dev/null +++ b/torchvision/csrc/io/image/cuda/encode_jpegs_cuda.h @@ -0,0 +1,33 @@ +#pragma once +#include +#include +#if NVJPEG_FOUND + +#include +#include +#include + +namespace vision { +namespace image { + +class CUDAJpegEncoder { + public: + CUDAJpegEncoder(const torch::Device& device); + ~CUDAJpegEncoder(); + + torch::Tensor encode_jpeg(const torch::Tensor& src_image); + + void set_quality(const int64_t quality); + + const torch::Device original_device; + const torch::Device target_device; + const c10::cuda::CUDAStream stream; + + protected: + nvjpegEncoderState_t nv_enc_state; + nvjpegEncoderParams_t nv_enc_params; + nvjpegHandle_t nvjpeg_handle; +}; +} // namespace image +} // namespace vision +#endif diff --git a/torchvision/csrc/io/image/image.cpp b/torchvision/csrc/io/image/image.cpp new file mode 100644 index 00000000000..2ac29e6b1ee --- /dev/null +++ b/torchvision/csrc/io/image/image.cpp @@ -0,0 +1,37 @@ +#include "image.h" + +#include + +// If we are in a Windows environment, we need to define +// initialization functions for the _custom_ops extension +#ifdef _WIN32 +void* PyInit_image(void) { + return nullptr; +} +#endif + +namespace vision { +namespace image { + +static auto registry = + torch::RegisterOperators() + .op("image::decode_gif", &decode_gif) + .op("image::decode_png(Tensor data, int mode, bool apply_exif_orientation=False) -> Tensor", + &decode_png) + .op("image::encode_png", &encode_png) + .op("image::decode_jpeg(Tensor data, int mode, bool apply_exif_orientation=False) -> Tensor", + &decode_jpeg) + .op("image::decode_webp(Tensor encoded_data, int mode) -> Tensor", + &decode_webp) + .op("image::encode_jpeg", &encode_jpeg) + .op("image::read_file", &read_file) + .op("image::write_file", &write_file) + .op("image::decode_image(Tensor data, int mode, bool apply_exif_orientation=False) -> Tensor", + &decode_image) + .op("image::decode_jpegs_cuda", &decode_jpegs_cuda) + .op("image::encode_jpegs_cuda", &encode_jpegs_cuda) + .op("image::_jpeg_version", &_jpeg_version) + .op("image::_is_compiled_against_turbo", &_is_compiled_against_turbo); + +} // namespace image +} // namespace vision diff --git a/torchvision/csrc/io/image/image.h b/torchvision/csrc/io/image/image.h new file mode 100644 index 00000000000..3f47fdec65c --- /dev/null +++ b/torchvision/csrc/io/image/image.h @@ -0,0 +1,11 @@ +#pragma once + +#include "cpu/decode_gif.h" +#include "cpu/decode_image.h" +#include "cpu/decode_jpeg.h" +#include "cpu/decode_png.h" +#include "cpu/decode_webp.h" +#include "cpu/encode_jpeg.h" +#include "cpu/encode_png.h" +#include "cpu/read_write_file.h" +#include "cuda/encode_decode_jpegs_cuda.h" diff --git a/torchvision/csrc/io/video/video.cpp b/torchvision/csrc/io/video/video.cpp new file mode 100644 index 00000000000..0340c97794d --- /dev/null +++ b/torchvision/csrc/io/video/video.cpp @@ -0,0 +1,387 @@ +#include "video.h" + +#include + +using namespace ffmpeg; + +namespace vision { +namespace video { + +namespace { + +const size_t decoderTimeoutMs = 600000; +const AVPixelFormat defaultVideoPixelFormat = AV_PIX_FMT_RGB24; + +// returns number of written bytes +template +size_t fillTensorList(DecoderOutputMessage& msgs, torch::Tensor& frame) { + const auto& msg = msgs; + T* frameData = frame.numel() > 0 ? frame.data_ptr() : nullptr; + if (frameData) { + auto sizeInBytes = msg.payload->length(); + memcpy(frameData, msg.payload->data(), sizeInBytes); + } + return sizeof(T); +} + +size_t fillVideoTensor(DecoderOutputMessage& msgs, torch::Tensor& videoFrame) { + return fillTensorList(msgs, videoFrame); +} + +size_t fillAudioTensor(DecoderOutputMessage& msgs, torch::Tensor& audioFrame) { + return fillTensorList(msgs, audioFrame); +} + +std::array, 4>::const_iterator +_parse_type(const std::string& stream_string) { + static const std::array, 4> types = {{ + {"video", TYPE_VIDEO}, + {"audio", TYPE_AUDIO}, + {"subtitle", TYPE_SUBTITLE}, + {"cc", TYPE_CC}, + }}; + auto device = std::find_if( + types.begin(), + types.end(), + [stream_string](const std::pair& p) { + return p.first == stream_string; + }); + if (device != types.end()) { + return device; + } + TORCH_CHECK( + false, "Expected one of [audio, video, subtitle, cc] ", stream_string); +} + +std::string parse_type_to_string(const std::string& stream_string) { + auto device = _parse_type(stream_string); + return device->first; +} + +MediaType parse_type_to_mt(const std::string& stream_string) { + auto device = _parse_type(stream_string); + return device->second; +} + +std::tuple _parseStream(const std::string& streamString) { + TORCH_CHECK(!streamString.empty(), "Stream string must not be empty"); + static const std::regex regex("([a-zA-Z_]+)(?::([1-9]\\d*|0))?"); + std::smatch match; + + TORCH_CHECK( + std::regex_match(streamString, match, regex), + "Invalid stream string: '", + streamString, + "'"); + + std::string type_ = "video"; + type_ = parse_type_to_string(match[1].str()); + long index_ = -1; + if (match[2].matched) { + try { + index_ = std::stoi(match[2].str()); + } catch (const std::exception&) { + TORCH_CHECK( + false, + "Could not parse device index '", + match[2].str(), + "' in device string '", + streamString, + "'"); + } + } + return std::make_tuple(type_, index_); +} + +} // namespace + +void Video::_getDecoderParams( + double videoStartS, + int64_t getPtsOnly, + std::string stream, + long stream_id = -1, + bool fastSeek = true, + bool all_streams = false, + int64_t num_threads = 1, + double seekFrameMarginUs = 10) { + int64_t videoStartUs = int64_t(videoStartS * 1e6); + + params.timeoutMs = decoderTimeoutMs; + params.startOffset = videoStartUs; + params.seekAccuracy = seekFrameMarginUs; + params.fastSeek = fastSeek; + params.headerOnly = false; + params.numThreads = num_threads; + + params.preventStaleness = false; // not sure what this is about + + if (all_streams == true) { + MediaFormat format; + format.stream = -2; + format.type = TYPE_AUDIO; + params.formats.insert(format); + + format.type = TYPE_VIDEO; + format.stream = -2; + format.format.video.width = 0; + format.format.video.height = 0; + format.format.video.cropImage = 0; + format.format.video.format = defaultVideoPixelFormat; + params.formats.insert(format); + + format.type = TYPE_SUBTITLE; + format.stream = -2; + params.formats.insert(format); + + format.type = TYPE_CC; + format.stream = -2; + params.formats.insert(format); + } else { + // parse stream type + MediaType stream_type = parse_type_to_mt(stream); + + // TODO: reset params.formats + std::set formats; + params.formats = formats; + // Define new format + MediaFormat format; + format.type = stream_type; + format.stream = stream_id; + if (stream_type == TYPE_VIDEO) { + format.format.video.width = 0; + format.format.video.height = 0; + format.format.video.cropImage = 0; + format.format.video.format = defaultVideoPixelFormat; + } + params.formats.insert(format); + } + +} // _get decoder params + +void Video::initFromFile( + std::string videoPath, + std::string stream, + int64_t numThreads) { + TORCH_CHECK(!initialized, "Video object can only be initialized once"); + initialized = true; + params.uri = videoPath; + _init(stream, numThreads); +} + +void Video::initFromMemory( + torch::Tensor videoTensor, + std::string stream, + int64_t numThreads) { + TORCH_CHECK(!initialized, "Video object can only be initialized once"); + initialized = true; + callback = MemoryBuffer::getCallback( + videoTensor.data_ptr(), videoTensor.size(0)); + _init(stream, numThreads); +} + +void Video::_init(std::string stream, int64_t numThreads) { + // set number of threads global + numThreads_ = numThreads; + // parse stream information + current_stream = _parseStream(stream); + // note that in the initial call we want to get all streams + _getDecoderParams( + 0, // video start + 0, // headerOnly + std::get<0>(current_stream), // stream info - remove that + long(-1), // stream_id parsed from info above change to -2 + false, // fastseek: we're using the default param here + true, // read all streams + numThreads_ // global number of Threads for decoding + ); + + std::string logMessage, logType; + + // locals + std::vector audioFPS, videoFPS; + std::vector audioDuration, videoDuration, ccDuration, subsDuration; + std::vector audioTB, videoTB, ccTB, subsTB; + c10::Dict> audioMetadata; + c10::Dict> videoMetadata; + c10::Dict> ccMetadata; + c10::Dict> subsMetadata; + + // callback and metadata defined in struct + DecoderInCallback tmp_callback = callback; + succeeded = decoder.init(params, std::move(tmp_callback), &metadata); + if (succeeded) { + for (const auto& header : metadata) { + double fps = double(header.fps); + double duration = double(header.duration) * 1e-6; // * timeBase; + + if (header.format.type == TYPE_VIDEO) { + videoFPS.push_back(fps); + videoDuration.push_back(duration); + } else if (header.format.type == TYPE_AUDIO) { + audioFPS.push_back(fps); + audioDuration.push_back(duration); + } else if (header.format.type == TYPE_CC) { + ccDuration.push_back(duration); + } else if (header.format.type == TYPE_SUBTITLE) { + subsDuration.push_back(duration); + }; + } + } + // audio + audioMetadata.insert("duration", audioDuration); + audioMetadata.insert("framerate", audioFPS); + // video + videoMetadata.insert("duration", videoDuration); + videoMetadata.insert("fps", videoFPS); + // subs + subsMetadata.insert("duration", subsDuration); + // cc + ccMetadata.insert("duration", ccDuration); + // put all to a data + streamsMetadata.insert("video", videoMetadata); + streamsMetadata.insert("audio", audioMetadata); + streamsMetadata.insert("subtitles", subsMetadata); + streamsMetadata.insert("cc", ccMetadata); + + succeeded = setCurrentStream(stream); + if (std::get<1>(current_stream) != -1) { + LOG(INFO) + << "Stream index set to " << std::get<1>(current_stream) + << ". If you encounter trouble, consider switching it to automatic stream discovery. \n"; + } +} + +Video::Video(std::string videoPath, std::string stream, int64_t numThreads) { + C10_LOG_API_USAGE_ONCE("torchvision.csrc.io.video.video.Video"); + if (!videoPath.empty()) { + initFromFile(videoPath, stream, numThreads); + } +} // video + +bool Video::setCurrentStream(std::string stream = "video") { + TORCH_CHECK(initialized, "Video object has to be initialized first"); + if ((!stream.empty()) && (_parseStream(stream) != current_stream)) { + current_stream = _parseStream(stream); + } + + double ts = 0; + if (seekTS > 0) { + ts = seekTS; + } + + _getDecoderParams( + ts, // video start + 0, // headerOnly + std::get<0>(current_stream), // stream + long(std::get<1>( + current_stream)), // stream_id parsed from info above change to -2 + false, // fastseek param set to 0 false by default (changed in seek) + false, // read all streams + numThreads_ // global number of threads + ); + + // callback and metadata defined in Video.h + DecoderInCallback tmp_callback = callback; + return (decoder.init(params, std::move(tmp_callback), &metadata)); +} + +std::tuple Video::getCurrentStream() const { + TORCH_CHECK(initialized, "Video object has to be initialized first"); + return current_stream; +} + +c10::Dict>> +Video::getStreamMetadata() const { + TORCH_CHECK(initialized, "Video object has to be initialized first"); + return streamsMetadata; +} + +void Video::Seek(double ts, bool fastSeek = false) { + TORCH_CHECK(initialized, "Video object has to be initialized first"); + // initialize the class variables used for seeking and retrurn + _getDecoderParams( + ts, // video start + 0, // headerOnly + std::get<0>(current_stream), // stream + long(std::get<1>( + current_stream)), // stream_id parsed from info above change to -2 + fastSeek, // fastseek + false, // read all streams + numThreads_ // global number of threads + ); + + // callback and metadata defined in Video.h + DecoderInCallback tmp_callback = callback; + succeeded = decoder.init(params, std::move(tmp_callback), &metadata); +} + +std::tuple Video::Next() { + TORCH_CHECK(initialized, "Video object has to be initialized first"); + // if failing to decode simply return a null tensor (note, should we + // raise an exception?) + double frame_pts_s; + torch::Tensor outFrame = torch::zeros({0}, torch::kByte); + + // decode single frame + DecoderOutputMessage out; + int64_t res = decoder.decode(&out, decoderTimeoutMs); + // if successful + if (res == 0) { + frame_pts_s = double(double(out.header.pts) * 1e-6); + + auto header = out.header; + const auto& format = header.format; + + // initialize the output variables based on type + + if (format.type == TYPE_VIDEO) { + // note: this can potentially be optimized + // by having the global tensor that we fill at decode time + // (would avoid allocations) + int outHeight = format.format.video.height; + int outWidth = format.format.video.width; + int numChannels = 3; + outFrame = torch::zeros({outHeight, outWidth, numChannels}, torch::kByte); + fillVideoTensor(out, outFrame); + outFrame = outFrame.permute({2, 0, 1}); + + } else if (format.type == TYPE_AUDIO) { + int outAudioChannels = format.format.audio.channels; + int bytesPerSample = av_get_bytes_per_sample( + static_cast(format.format.audio.format)); + int frameSizeTotal = out.payload->length(); + + TORCH_CHECK_EQ(frameSizeTotal % (outAudioChannels * bytesPerSample), 0); + int numAudioSamples = + frameSizeTotal / (outAudioChannels * bytesPerSample); + + outFrame = + torch::zeros({numAudioSamples, outAudioChannels}, torch::kFloat); + + fillAudioTensor(out, outFrame); + } + // currently not supporting other formats (will do soon) + + out.payload.reset(); + } else if (res == ENODATA) { + LOG(INFO) << "Decoder ran out of frames (ENODATA)\n"; + } else { + LOG(ERROR) << "Decoder failed with ERROR_CODE " << res; + } + + return std::make_tuple(outFrame, frame_pts_s); +} + +static auto registerVideo = + torch::class_