diff --git a/BehavioralClustering/0_video2image.py b/BehavioralClustering/0_video2image.py
index c9d0a3e..74d25dd 100644
--- a/BehavioralClustering/0_video2image.py
+++ b/BehavioralClustering/0_video2image.py
@@ -3,40 +3,39 @@
import json
import math
import copy
-import pandas as pd
+
import os
-import matplotlib.pyplot as plt
-from scipy.ndimage.filters import gaussian_filter
-from scipy.cluster.hierarchy import dendrogram, linkage
-from scipy.cluster import hierarchy
+
from tqdm import tqdm
from contour_utils import mkdir_p
import setting
-def get_frames(video_path, pose_track_vis_path):
- cap = cv2.VideoCapture(video_path)
- read_flag, frame = cap.read()
- if not read_flag:
- print("read " + video_path + " failed!")
- width, height, depth = np.asarray(frame).shape
-
- i = 0
- if not os.path.exists(pose_track_vis_path):
- mkdir_p(pose_track_vis_path)
-
- while read_flag:
- cv2.imwrite(pose_track_vis_path + "/frame_{}.png".format(i), frame)
- print("\r {}/frame_{}.png".format(pose_track_vis_path, i), end="")
-
- read_flag, frame = cap.read()
- i = i + 1
-
-
if __name__ == "__main__":
arg = setting.args_class()
-
- for video_path, pose_track_vis_path in zip(arg.videodir, arg.imgdir):
- print("generating %s" % (video_path))
- get_frames(video_path, pose_track_vis_path)
+
+ for video_path, pose_track_vis_path,start_frame, end_frame in zip(arg.videodir,arg.imgdir,arg.start_frame,arg.end_frame):
+ print('generating %s'%(video_path))
+ cap = cv2.VideoCapture(video_path)
+ if cap.isOpened():
+ success = True
+ else:
+ success = False
+ print(" read failed!make sure that the video format is supported by cv2.VideoCapture")
+
+ if not os.path.exists(pose_track_vis_path):
+ mkdir_p(pose_track_vis_path)
+
+ for frame_index in tqdm(range(end_frame)):
+ success, frame = cap.read()
+ if not success:
+ print('read frame failed!')
+ break
+ if frame_index < start_frame:
+ continue
+ cv2.imwrite(pose_track_vis_path + '/frame_{}.png'.format(frame_index), frame)
+
+ cap.release()
+
+
diff --git a/BehavioralClustering/1_2_get_video_contour_includeOtherMouse.py b/BehavioralClustering/1_2_get_video_contour_includeOtherMouse.py
index b12d3e7..ad3bf97 100644
--- a/BehavioralClustering/1_2_get_video_contour_includeOtherMouse.py
+++ b/BehavioralClustering/1_2_get_video_contour_includeOtherMouse.py
@@ -203,10 +203,8 @@ def get_samples(video_path, json_path, contour_path, arg, targetMouseID):
)
-if __name__ == "__main__":
- # dir_name = '/disk1/zexin/project/mice/clustering_sequencial/forZexin/results/0603/1411_black_two/'
- # video_name = '1411_black_two.mov'
- # json_name= '/disk1/zexin/project/mice/clustering_sequencial/forZexin/results/0603/1411_black_two/alphapose-results-forvis-tracked.json'
+
+if __name__ == '__main__':
arg = setting.args_class()
if len(sys.argv) != 1:
diff --git a/BehavioralClustering/fft_utils.py b/BehavioralClustering/fft_utils.py
index 59e6ac5..aca4165 100644
--- a/BehavioralClustering/fft_utils.py
+++ b/BehavioralClustering/fft_utils.py
@@ -352,11 +352,8 @@ def retrieve_poses_Mice(arg):
frame_ind_inClip += 1
except Exception as e:
- print(
- "failed to process the {}th frame with error:{}. will restart the clip".format(
- frame_id, e
- )
- )
+ print('failed to process the {}th frame with warning:{}. will restart the clip'.format(frame_id,e))
+
frame_id += 1
pose_clip = []
poseTheOther_clip = []
diff --git a/BehavioralClustering/setting.py b/BehavioralClustering/setting.py
index 09f9438..5fe13d6 100644
--- a/BehavioralClustering/setting.py
+++ b/BehavioralClustering/setting.py
@@ -24,8 +24,17 @@ def __init__(self):
]
## videodir is a list of videos to be analysed
self.videodir = [
- "../Tracking/AlphaTracker/data/demo.mp4",
- ]
+ '../Tracking/AlphaTracker/data/demo.mp4',
+ ]
+ ## start_frame is a list of start frame numbers to be extracted. Same as the start_frame in tracking.
+ self.start_frame = [
+ 0,
+ ]
+
+ ## end_frame is a list of end frame numbers to be extracted. Same as the end_frame in tracking.
+ self.end_frame = [
+ 300,
+ ]
## mice_num: the max number of the mice in the videos
self.mice_num = 2
diff --git a/README.md b/README.md
index 582da59..91b0829 100644
--- a/README.md
+++ b/README.md
@@ -3,7 +3,7 @@
-[AlphaTracker](https://github.com/ZexinChen/AlphaTracker) is a multi-animal tracking and behavioral analysis tool which incorporates **multi-animal tracking**, **pose estimation** and **unsupervised behavioral clustering** to empower system neuroscience research. Alphatracker achieves the state-of-art accuracy of multi-animal tracking which lays the foundation for stringent biological studies. Moreover, the minimum requirement for hardware (regular webcams) and efficient training procedure allows readily adoption by most neuroscience labs.
+[AlphaTracker](https://github.com/MVIG-SJTU/AlphaTracker) is a multi-animal tracking and behavioral analysis tool which incorporates **multi-animal tracking**, **pose estimation** and **unsupervised behavioral clustering** to empower system neuroscience research. Alphatracker achieves the state-of-art accuracy of multi-animal tracking which lays the foundation for stringent biological studies. Moreover, the minimum requirement for hardware (regular webcams) and efficient training procedure allows readily adoption by most neuroscience labs.
diff --git a/Tracking/AlphaTracker/download.py b/Tracking/AlphaTracker/download.py
index 972a1f7..12343ca 100644
--- a/Tracking/AlphaTracker/download.py
+++ b/Tracking/AlphaTracker/download.py
@@ -2,34 +2,28 @@
import zipfile
-sppe_pretrain_weight = "1OPORTWB2cwd5YTVBX-NE8fsauZJWsrtW"
-yolo_pretrain_weight = "1g8uJjK7EOlqrUCmjZTtCegwnNsBig6zn"
-sppe_trained_weight = "1_BwtYySpX9uWDgdwqw0UEppyMYYv1gkJ"
-yolo_trained_weight = "13zXkuZ4dNm3ZOwstr1sSWKOOzJ19XZpN"
-demo_data = "1N0JjazqW6JmBheLrn6RoDTSRXSPp1t4K"
-sample_training_data = "15dR-vVCEsg2z7mEVzJOF9YDW6YioEU3N"
-scipy_data = "1c6vJQbAm_TcGyTCr1ah-x_R-iIYmT9TM"
-gdd.download_file_from_google_drive(
- file_id=sppe_pretrain_weight, dest_path="./models/sppe/duc_se.pth"
-)
-gdd.download_file_from_google_drive(
- file_id=yolo_pretrain_weight, dest_path="./train_yolo/darknet/darknet53.conv.74"
-)
-gdd.download_file_from_google_drive(
- file_id=sppe_trained_weight, dest_path="./train_sppe/exp/coco/demo/model_10.pkl"
-)
-gdd.download_file_from_google_drive(
- file_id=yolo_trained_weight,
- dest_path="./train_yolo/darknet/backup/demo/yolov3-mice_final.weights",
-)
-gdd.download_file_from_google_drive(file_id=demo_data, dest_path="./data/demo.mp4")
-gdd.download_file_from_google_drive(
- file_id=sample_training_data, dest_path="./data/sample_annotated_data.zip"
-)
-gdd.download_file_from_google_drive(
- file_id=scipy_data, dest_path="../../UI/data/scipy.data"
-)
+# you can add https://drive.google.com/file/d/ in front of the following file_id and download them manually through web browser.
+sppe_pretrain_weight = '1OPORTWB2cwd5YTVBX-NE8fsauZJWsrtW'
+yolo_pretrain_weight = '1g8uJjK7EOlqrUCmjZTtCegwnNsBig6zn'
+sppe_trained_weight = '1_BwtYySpX9uWDgdwqw0UEppyMYYv1gkJ'
+yolo_trained_weight = '13zXkuZ4dNm3ZOwstr1sSWKOOzJ19XZpN'
+demo_data = '1N0JjazqW6JmBheLrn6RoDTSRXSPp1t4K'
+sample_training_data='15dR-vVCEsg2z7mEVzJOF9YDW6YioEU3N'
+scipy_data = '1c6vJQbAm_TcGyTCr1ah-x_R-iIYmT9TM'
+
+# Remember that after your download, place the files to their corresponding destination folder.
+gdd.download_file_from_google_drive(file_id=sppe_pretrain_weight,dest_path='./models/sppe/duc_se.pth')
+gdd.download_file_from_google_drive(file_id=yolo_pretrain_weight,dest_path='./train_yolo/darknet/darknet53.conv.74')
+gdd.download_file_from_google_drive(file_id=sppe_trained_weight,dest_path='./train_sppe/exp/coco/demo/model_10.pkl')
+gdd.download_file_from_google_drive(file_id=yolo_trained_weight,dest_path='./train_yolo/darknet/backup/demo/yolov3-mice_final.weights')
+gdd.download_file_from_google_drive(file_id=demo_data,dest_path='./data/demo.mp4')
+gdd.download_file_from_google_drive(file_id=sample_training_data,dest_path='./data/sample_annotated_data.zip')
+gdd.download_file_from_google_drive(file_id=scipy_data,dest_path='../../UI/data/scipy.data')
+
+# The following command do an unzip operation. You can also unzip the files manually.
+with zipfile.ZipFile('./data/sample_annotated_data.zip', 'r') as zip_ref:
+ zip_ref.extractall('./data/sample_annotated_data/')
with zipfile.ZipFile("./data/sample_annotated_data.zip", "r") as zip_ref:
zip_ref.extractall("./data/sample_annotated_data/")
diff --git a/Tracking/AlphaTracker/setting.py b/Tracking/AlphaTracker/setting.py
index 6ac1001..dd78663 100644
--- a/Tracking/AlphaTracker/setting.py
+++ b/Tracking/AlphaTracker/setting.py
@@ -1,5 +1,5 @@
import os
-
+import numpy as np
# general setting
gpu_id = 0 # the id of gpu that will be used
@@ -45,15 +45,13 @@
# video_full_path is the path to the video that will be tracked
video_full_path = "./data/demo.mp4"
video_paths = [
- "./data/demo.mp4",
-] # make sure video names are different from each other
-start_frame = 0 # id of the start frame of the video
-end_frame = 9737 # id of the last frame of the video
-max_pid_id_setting = 2 # number of mice in the video
-result_folder = "./track_result/" # path to the folder used to save the result
-remove_oriFrame = (
- False # whether to remove the original frame that generated from video
-)
+ './data/demo.mp4',
+ ] # make sure video names are different from each other
+start_frame = 0 # id of the start frame of the video
+end_frame = 300 # id of the last frame of the video
+max_pid_id_setting = 2 # number of mice in the video
+result_folder = './track_result/' # path to the folder used to save the result
+remove_oriFrame = False # whether to remove the original frame that generated from video
vis_track_result = 1
# weights and match are parameter of tracking algorithm
@@ -72,3 +70,71 @@
AlphaTracker_root = os.path.abspath(AlphaTracker_root)
result_folder = os.path.abspath(result_folder)
+
+with open('train.cfg', 'r') as f:
+ dat = f.read()
+ if not dat:
+ print(f'error, train.cfg is empty')
+ try:
+ dict_state = eval(dat)
+ except Exception as e:
+ print(f'load train.cfg Exception: {e}')
+ print(dict_state)
+
+gpu_id = int(dict_state['gpu_id']) # the id of gpu that will be used
+
+# data related settings
+image_root_list = [dict_state['image_root_list']] # list of image folder paths to the RGB images for training
+json_file_list = [dict_state['json_file_list']] # list of paths to the json files that contain labels of the images for training
+num_mouse = [int(dict_state['num_mouse'])] # the number of mouse in the images in each image folder path
+exp_name = dict_state['exp_name'] # the name of the experiment
+num_pose = int(dict_state['num_pose']) # number of the pose that is labeled, remember to change self.nJoints in train_sppe/src/utils/dataset/coco.py
+
+pose_pair = np.array([[float(j) for j in i.split('-')] for i in dict_state['pose_pair'].split(',')])
+print('pose pair is:',pose_pair)
+train_val_split = float(dict_state['train_val_split']) # ratio of data that used to train model, the rest will be used for validation
+image_suffix = dict_state['image_suffix'] # suffix of the image, png or jpg
+
+
+# training hyperparameter setting
+# Protip: if your training does not give good enough tracking you can lower lr and increase epoch number
+# but lowering the lr too much can be bad for tracking quality as well.
+sppe_lr = float(dict_state['sppe_lr'])
+sppe_epoch = int(dict_state['sppe_epoch'])
+sppe_pretrain = dict_state['sppe_pretrain']
+sppe_batchSize = int(dict_state['sppe_batchSize'])
+yolo_lr = float(dict_state['yolo_lr'])
+yolo_iter = int(dict_state['yolo_iter']) ## if use pretrained model please make sure yolo_iter to be large enough to guarantee finetune is done
+yolo_pretrain = dict_state['yolo_pretrain'] # './train_yolo/darknet/darknet53.conv.74'
+yolo_batchSize = int(dict_state['yolo_batchSize'])
+
+
+with open('track.cfg', 'r') as f:
+ dat = f.read()
+ if not dat:
+ print(f'error, track.cfg is empty')
+ try:
+ dict_state2 = eval(dat)
+ except Exception as e:
+ print(f'load track.cfg Exception: {e}')
+ print(dict_state2)
+
+
+# demo video setting
+# note video_full_path is for track.py, video_paths is for track_batch.py
+# video_full_path is the path to the video that will be tracked
+video_full_path = dict_state2['video_full_path']
+video_paths = [
+ dict_state2['video_full_path'],
+ ] # make sure video names are different from each other
+start_frame = int(dict_state2['start_frame']) # id of the start frame of the video
+end_frame = int(dict_state2['end_frame']) # id of the last frame of the video
+max_pid_id_setting = int(dict_state2['max_pid_id_setting']) # number of mice in the video
+result_folder = dict_state2['result_folder'] # path to the folder used to save the result
+remove_oriFrame = int(dict_state2['remove_oriFrame']) # whether to remove the original frame that generated from video
+vis_track_result = int(dict_state2['vis_track_result'])
+
+# weights and match are parameter of tracking algorithm
+# following setting should work fine, no need to change
+weights = dict_state2['weights']
+match = int(dict_state2['match'])
diff --git a/Tracking/AlphaTracker/track.cfg b/Tracking/AlphaTracker/track.cfg
new file mode 100644
index 0000000..ecb81f4
--- /dev/null
+++ b/Tracking/AlphaTracker/track.cfg
@@ -0,0 +1 @@
+{'video_full_path': '/home/flexiv/AlphaTracker/Tracking/AlphaTracker/data/demo.mp4', 'start_frame': '0', 'end_frame': '300', 'max_pid_id_setting': '2', 'result_folder': './track_result/', 'remove_oriFrame': '0', 'vis_track_result': '1', 'weights': '0 6 0 0 0 0 ', 'match': '0'}
\ No newline at end of file
diff --git a/Tracking/AlphaTracker/train.cfg b/Tracking/AlphaTracker/train.cfg
new file mode 100644
index 0000000..a4efbb1
--- /dev/null
+++ b/Tracking/AlphaTracker/train.cfg
@@ -0,0 +1 @@
+{'image_root_list': '/home/flexiv/AlphaTracker/Tracking/AlphaTracker/data/sample_annotated_data/demo', 'json_file_list': '/home/flexiv/AlphaTracker/Tracking/AlphaTracker/data/sample_annotated_data/demo/train9.json', 'gpu_id': '0', 'num_mouse': '2', 'exp_name': 'demo1', 'num_pose': '4', 'pose_pair': '0-1,0-2,0-3', 'train_val_split': '0.9', 'image_suffix': 'jpg', 'sppe_lr': '0.0001', 'sppe_epoch': '10', 'sppe_pretrain': '', 'sppe_batchSize': '1', 'yolo_lr': '0.0005', 'yolo_iter': '60000', 'yolo_pretrain': '', 'yolo_batchSize': '1'}
\ No newline at end of file
diff --git a/Tracking/AlphaTracker/train_yolo/darknet/src/gemm.c b/Tracking/AlphaTracker/train_yolo/darknet/src/gemm.c
index 648027f..9e8b1d5 100644
--- a/Tracking/AlphaTracker/train_yolo/darknet/src/gemm.c
+++ b/Tracking/AlphaTracker/train_yolo/darknet/src/gemm.c
@@ -229,7 +229,7 @@ void time_gpu(int TA, int TB, int m, int k, int n)
clock_t start = clock(), end;
for(i = 0; i
Checked nvidia driver installation.
+
+====>Checked anaconda installation.
diff --git a/res/1_120x74.png b/res/1_120x74.png
new file mode 100644
index 0000000..0ba219d
Binary files /dev/null and b/res/1_120x74.png differ
diff --git a/res/2_120x74.jpg b/res/2_120x74.jpg
new file mode 100644
index 0000000..c715449
Binary files /dev/null and b/res/2_120x74.jpg differ
diff --git a/res/3_120x74.png b/res/3_120x74.png
new file mode 100644
index 0000000..38f26c9
Binary files /dev/null and b/res/3_120x74.png differ
diff --git a/res/4_120x74.png b/res/4_120x74.png
new file mode 100644
index 0000000..f73a2ca
Binary files /dev/null and b/res/4_120x74.png differ
diff --git a/res/5_120x74.png b/res/5_120x74.png
new file mode 100644
index 0000000..9024d41
Binary files /dev/null and b/res/5_120x74.png differ
diff --git a/res/About_72px.ico b/res/About_72px.ico
new file mode 100644
index 0000000..916715e
Binary files /dev/null and b/res/About_72px.ico differ
diff --git a/scripts/behavior.sh b/scripts/behavior.sh
new file mode 100644
index 0000000..479200f
--- /dev/null
+++ b/scripts/behavior.sh
@@ -0,0 +1,20 @@
+#!/bin/bash
+
+# if behavior button is clicked, this script will be called
+
+echo behavior start
+
+
+. ~/anaconda3/etc/profile.d/conda.sh
+
+conda activate alphatracker
+
+cd ./BehavioralClustering
+
+bash run_all.sh
+
+python fft_main_sep_twoMiceInteract.py
+
+
+echo behavior over
+
diff --git a/install.sh b/scripts/install.sh
similarity index 74%
rename from install.sh
rename to scripts/install.sh
index 8bac63f..532cd01 100644
--- a/install.sh
+++ b/scripts/install.sh
@@ -1,18 +1,18 @@
+#!/bin/bash
# if install button is clicked, this script will be called
echo install start
-conda create -n alphatracker python=3.8 -y
+. ~/anaconda3/etc/profile.d/conda.sh
-echo ". ~/anaconda3/etc/profile.d/conda.sh" >> ~/.bashrc
-source ~/.bashrc
+conda create -n alphatracker python=3.8 -y
conda activate alphatracker
# conda env update --file environment.yml
# Install pytorch
-conda install pytorch==1.9.0 torchvision==0.10.0 torchaudio==0.9.0 cudatoolkit=11.1 -c pytorch -c conda-forge -y
+conda install pytorch==1.8.0 torchvision==0.9.0 torchaudio==0.8.0 cudatoolkit=11.1 -c pytorch -c conda-forge -y
export PATH=/usr/local/cuda/bin/:$PATH
export LD_LIBRARY_PATH=/usr/local/cuda/lib64/:$LD_LIBRARY_PATH
diff --git a/scripts/install1.sh b/scripts/install1.sh
new file mode 100755
index 0000000..4c8a2a5
--- /dev/null
+++ b/scripts/install1.sh
@@ -0,0 +1,10 @@
+#!/bin/bash
+nvidia-smi
+echo '====>Checked nvidia driver installation.'
+echo
+. ~/anaconda3/etc/profile.d/conda.sh
+echo '====>Checked anaconda installation.'
+
+
+
+
diff --git a/scripts/install2.sh b/scripts/install2.sh
new file mode 100755
index 0000000..1575cf3
--- /dev/null
+++ b/scripts/install2.sh
@@ -0,0 +1,26 @@
+#!/bin/bash
+. ~/anaconda3/etc/profile.d/conda.sh
+
+find_in_conda_env(){
+ conda env list | grep "${@}" >/dev/null 2>/dev/null
+}
+
+conda config --append channels http://mirrors.tuna.tsinghua.edu.cn/anaconda/cloud/pytorch/
+conda clean -i
+
+if find_in_conda_env ".*alphatracker.*" ; then
+ conda activate alphatracker
+else
+ conda upgrade -n base -c defaults --override-channels conda
+ conda create -n alphatracker python=3.8 -y
+ conda activate alphatracker
+fi;
+
+
+# Install pytorch
+# conda install pytorch==1.8.0 torchvision==0.9.0 torchaudio==0.8.0 cudatoolkit=11.1 -c pytorch -c conda-forge -y
+
+# Replace the above command with the following two commands if you locates in mainland, China.
+
+conda config --add channels http://mirrors.tuna.tsinghua.edu.cn/anaconda/cloud/pytorch/
+conda install pytorch==1.8.0 torchvision==0.9.0 torchaudio==0.8.0 cudatoolkit=11.1 -c conda-forge -y
diff --git a/scripts/install3.sh b/scripts/install3.sh
new file mode 100755
index 0000000..68966e2
--- /dev/null
+++ b/scripts/install3.sh
@@ -0,0 +1,10 @@
+#!/bin/bash
+. ~/anaconda3/etc/profile.d/conda.sh
+
+conda activate alphatracker
+
+export PATH=/usr/local/cuda/bin/:$PATH
+export LD_LIBRARY_PATH=/usr/local/cuda/lib64/:$LD_LIBRARY_PATH
+pip uninstall alphatracker -y
+pip uninstall alphatracker -y
+python setup.py build develop
diff --git a/scripts/install4.sh b/scripts/install4.sh
new file mode 100755
index 0000000..03797f6
--- /dev/null
+++ b/scripts/install4.sh
@@ -0,0 +1,12 @@
+#!/bin/bash
+. ~/anaconda3/etc/profile.d/conda.sh
+
+conda activate alphatracker
+
+export PATH=/usr/local/cuda/bin/:$PATH
+export LD_LIBRARY_PATH=/usr/local/cuda/lib64/:$LD_LIBRARY_PATH
+
+cd ./Tracking/AlphaTracker/train_yolo/darknet/
+make clean
+make -s
+cd ../../../../
diff --git a/scripts/install5.sh b/scripts/install5.sh
new file mode 100755
index 0000000..76cc93f
--- /dev/null
+++ b/scripts/install5.sh
@@ -0,0 +1,7 @@
+#!/bin/bash
+. ~/anaconda3/etc/profile.d/conda.sh
+
+conda activate alphatracker
+
+cd ./Tracking/AlphaTracker/
+python3 download.py
\ No newline at end of file
diff --git a/scripts/track.sh b/scripts/track.sh
new file mode 100644
index 0000000..ca158c7
--- /dev/null
+++ b/scripts/track.sh
@@ -0,0 +1,19 @@
+#!/bin/bash
+
+# if track button is clicked, this script will be called
+
+echo track start
+
+
+. ~/anaconda3/etc/profile.d/conda.sh
+
+conda activate alphatracker
+
+cd ./Tracking/AlphaTracker/
+
+python track.py
+
+
+echo track over
+
+
diff --git a/scripts/train.sh b/scripts/train.sh
new file mode 100644
index 0000000..d558d47
--- /dev/null
+++ b/scripts/train.sh
@@ -0,0 +1,19 @@
+#!/bin/bash
+
+# if train button is clicked, this script will be called
+
+echo train start
+
+
+. ~/anaconda3/etc/profile.d/conda.sh
+
+conda activate alphatracker
+
+cd ./Tracking/AlphaTracker/
+
+python train.py
+
+
+echo train over
+
+
diff --git a/scripts/uninstall.sh b/scripts/uninstall.sh
new file mode 100644
index 0000000..cf29742
--- /dev/null
+++ b/scripts/uninstall.sh
@@ -0,0 +1,12 @@
+#!/bin/bash
+
+# if uninstall button is clicked, this script will be called
+
+. ~/anaconda3/etc/profile.d/conda.sh
+
+conda remove --name alphatracker --all -y
+
+
+echo uninstall over
+
+
diff --git a/setup.cfg b/setup.cfg
new file mode 100644
index 0000000..479a1aa
--- /dev/null
+++ b/setup.cfg
@@ -0,0 +1,2 @@
+[easy_install]
+index_url = https://pypi.doubanio.com/simple
diff --git a/setup.py b/setup.py
index 860ddcc..59a7308 100644
--- a/setup.py
+++ b/setup.py
@@ -112,26 +112,13 @@ def get_version():
setup_requires=["pytest-runner", "numpy"],
tests_require=["pytest"],
install_requires=[
- "numpy",
- "scipy",
- "opencv-python",
- "matplotlib",
- "tqdm",
- "googledrivedownloader",
- "h5py",
- "pandas",
- "nibabel",
- "pycocotools",
- "tensorboardx",
- "munkres",
- "visdom",
- "scikit-learn",
- "seaborn",
- "umap",
+ 'numpy', 'scipy', 'opencv-python', 'matplotlib',
+ 'tqdm', 'googledrivedownloader', 'h5py', 'pandas==1.4.2',
+ 'nibabel', 'pycocotools', 'tensorboardx', 'munkres',
+ 'visdom', 'scikit-learn', 'seaborn', 'umap', 'requests'
],
- zip_safe=False,
- )
-
- print("\n Install `torchsample` ...")
- cmd = "python -m pip install git+https://github.com/ZexinChen/torchsample"
+ zip_safe=False)
+
+ print('\n Install `torchsample` ...')
+ cmd = 'python -m pip install ./torchsample'
os.system(cmd)
diff --git a/state.txt b/state.txt
new file mode 100644
index 0000000..fd9ba2d
--- /dev/null
+++ b/state.txt
@@ -0,0 +1 @@
+{'btn0': 1, 'btn1': 1, 'btn2': 1, 'btn3': 1, 'btn4': 0}
\ No newline at end of file
diff --git a/torchsample/.gitattributes b/torchsample/.gitattributes
new file mode 100644
index 0000000..dfe0770
--- /dev/null
+++ b/torchsample/.gitattributes
@@ -0,0 +1,2 @@
+# Auto detect text files and perform LF normalization
+* text=auto
diff --git a/torchsample/.gitignore b/torchsample/.gitignore
new file mode 100755
index 0000000..d4011b9
--- /dev/null
+++ b/torchsample/.gitignore
@@ -0,0 +1,22 @@
+.git/
+sandbox/
+
+*.DS_Store
+*__pycache__*
+__pycache__
+*.pyc
+.ipynb_checkpoints/
+*.ipynb_checkpoints/
+*.bkbn
+.spyderworkspace
+.spyderproject
+
+# setup.py working directory
+build
+# sphinx build directory
+doc/_build
+# setup.py dist directory
+dist
+# Egg metadata
+*.egg-info
+.eggs
diff --git a/torchsample/LICENSE.txt b/torchsample/LICENSE.txt
new file mode 100755
index 0000000..6de4ea3
--- /dev/null
+++ b/torchsample/LICENSE.txt
@@ -0,0 +1,40 @@
+COPYRIGHT
+
+Some contributions by Nicholas Cullen:
+Copyright (c) 2017, Nicholas Cullen:
+All rights reserved.
+
+Some contributions by François Chollet:
+Copyright (c) 2015, François Chollet.
+All rights reserved.
+
+Some contributions by Google:
+Copyright (c) 2015, Google, Inc.
+All rights reserved.
+
+All other contributions:
+Copyright (c) 2015, the respective contributors.
+All rights reserved.
+
+
+LICENSE
+
+The MIT License (MIT)
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/torchsample/README.md b/torchsample/README.md
new file mode 100755
index 0000000..31b9d02
--- /dev/null
+++ b/torchsample/README.md
@@ -0,0 +1,189 @@
+# High-Level Training, Data Augmentation, and Utilities for Pytorch
+
+[v0.1.3](https://github.com/ncullen93/torchsample/releases) JUST RELEASED - contains significant improvements, bug fixes, and additional
+support. Get it from the releases, or pull the master branch.
+
+This package provides a few things:
+- A high-level module for Keras-like training with callbacks, constraints, and regularizers.
+- Comprehensive data augmentation, transforms, sampling, and loading
+- Utility tensor and variable functions so you don't need numpy as often
+
+Have any feature requests? Submit an issue! I'll make it happen. Specifically,
+any data augmentation, data loading, or sampling functions.
+
+Want to contribute? Check the [issues page](https://github.com/ncullen93/torchsample/issues)
+ for those tagged with [contributions welcome].
+
+## ModuleTrainer
+The `ModuleTrainer` class provides a high-level training interface which abstracts
+away the training loop while providing callbacks, constraints, initializers, regularizers,
+and more.
+
+Example:
+```python
+from torchsample.modules import ModuleTrainer
+
+# Define your model EXACTLY as normal
+class Network(nn.Module):
+ def __init__(self):
+ super(Network, self).__init__()
+ self.conv1 = nn.Conv2d(1, 32, kernel_size=3)
+ self.conv2 = nn.Conv2d(32, 64, kernel_size=3)
+ self.fc1 = nn.Linear(1600, 128)
+ self.fc2 = nn.Linear(128, 10)
+
+ def forward(self, x):
+ x = F.relu(F.max_pool2d(self.conv1(x), 2))
+ x = F.relu(F.max_pool2d(self.conv2(x), 2))
+ x = x.view(-1, 1600)
+ x = F.relu(self.fc1(x))
+ x = F.dropout(x, training=self.training)
+ x = self.fc2(x)
+ return F.log_softmax(x)
+
+model = Network()
+trainer = ModuleTrainer(model)
+
+trainer.compile(loss='nll_loss',
+ optimizer='adadelta')
+
+trainer.fit(x_train, y_train,
+ val_data=(x_test, y_test),
+ num_epoch=20,
+ batch_size=128,
+ verbose=1)
+```
+You also have access to the standard evaluation and prediction functions:
+
+```python
+loss = model.evaluate(x_train, y_train)
+y_pred = model.predict(x_train)
+```
+Torchsample provides a wide range of callbacks, generally mimicking the interface
+found in `Keras`:
+
+- `EarlyStopping`
+- `ModelCheckpoint`
+- `LearningRateScheduler`
+- `ReduceLROnPlateau`
+- `CSVLogger`
+
+```python
+from torchsample.callbacks import EarlyStopping
+
+callbacks = [EarlyStopping(monitor='val_loss', patience=5)]
+model.set_callbacks(callbacks)
+```
+
+Torchsample also provides regularizers:
+
+- `L1Regularizer`
+- `L2Regularizer`
+- `L1L2Regularizer`
+
+
+and constraints:
+- `UnitNorm`
+- `MaxNorm`
+- `NonNeg`
+
+Both regularizers and constraints can be selectively applied on layers using regular expressions and the `module_filter`
+argument. Constraints can be explicit (hard) constraints applied at an arbitrary batch or
+epoch frequency, or they can be implicit (soft) constraints similar to regularizers
+where the the constraint deviation is added as a penalty to the total model loss.
+
+```python
+from torchsample.constraints import MaxNorm, NonNeg
+from torchsample.regularizers import L1Regularizer
+
+# hard constraint applied every 5 batches
+hard_constraint = MaxNorm(value=2., frequency=5, unit='batch', module_filter='*fc*')
+# implicit constraint added as a penalty term to model loss
+soft_constraint = NonNeg(lagrangian=True, scale=1e-3, module_filter='*fc*')
+constraints = [hard_constraint, soft_constraint]
+model.set_constraints(constraints)
+
+regularizers = [L1Regularizer(scale=1e-4, module_filter='*conv*')]
+model.set_regularizers(regularizers)
+```
+
+You can also fit directly on a `torch.utils.data.DataLoader` and can have
+a validation set as well :
+
+```python
+from torchsample import TensorDataset
+from torch.utils.data import DataLoader
+
+train_dataset = TensorDataset(x_train, y_train)
+train_loader = DataLoader(train_dataset, batch_size=32)
+
+val_dataset = TensorDataset(x_val, y_val)
+val_loader = DataLoader(val_dataset, batch_size=32)
+
+trainer.fit_loader(loader, val_loader=val_loader, num_epoch=100)
+```
+
+## Utility Functions
+Finally, torchsample provides a few utility functions not commonly found:
+
+### Tensor Functions
+- `th_iterproduct` (mimics itertools.product)
+- `th_gather_nd` (N-dimensional version of torch.gather)
+- `th_random_choice` (mimics np.random.choice)
+- `th_pearsonr` (mimics scipy.stats.pearsonr)
+- `th_corrcoef` (mimics np.corrcoef)
+- `th_affine2d` and `th_affine3d` (affine transforms on torch.Tensors)
+
+### Variable Functions
+- `F_affine2d` and `F_affine3d`
+- `F_map_coordinates2d` and `F_map_coordinates3d`
+
+## Data Augmentation and Datasets
+The torchsample package provides a ton of good data augmentation and transformation
+tools which can be applied during data loading. The package also provides the flexible
+`TensorDataset` and `FolderDataset` classes to handle most dataset needs.
+
+### Torch Transforms
+These transforms work directly on torch tensors
+
+- `Compose()`
+- `AddChannel()`
+- `SwapDims()`
+- `RangeNormalize()`
+- `StdNormalize()`
+- `Slice2D()`
+- `RandomCrop()`
+- `SpecialCrop()`
+- `Pad()`
+- `RandomFlip()`
+- `ToTensor()`
+
+### Affine Transforms
+![Original](https://github.com/ncullen93/torchsample/blob/master/examples/imgs/orig1.png "Original") ![Transformed](https://github.com/ncullen93/torchsample/blob/master/examples/imgs/tform1.png "Transformed")
+
+The following transforms perform affine (or affine-like) transforms on torch tensors.
+
+- `Rotate()`
+- `Translate()`
+- `Shear()`
+- `Zoom()`
+
+We also provide a class for stringing multiple affine transformations together so that only one interpolation takes place:
+
+- `Affine()`
+- `AffineCompose()`
+
+### Datasets and Sampling
+We provide the following datasets which provide general structure and iterators for sampling from and using transforms on in-memory or out-of-memory data:
+
+- `TensorDataset()`
+
+- `FolderDataset()`
+
+
+## Acknowledgements
+Thank you to the following people and contributors:
+- All Keras contributors
+- @deallynomore
+- @recastrodiaz
+
diff --git a/torchsample/examples/Transforms with Pytorch and Torchsample.ipynb b/torchsample/examples/Transforms with Pytorch and Torchsample.ipynb
new file mode 100755
index 0000000..5236fd3
--- /dev/null
+++ b/torchsample/examples/Transforms with Pytorch and Torchsample.ipynb
@@ -0,0 +1,1118 @@
+{
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "# Overview\n",
+ "I will go over the following topics using the *pytorch* and *torchsample* packages:\n",
+ "\n",
+ "- **Dataset Creation and Loading**\n",
+ " - How you create pytorch-suitable datasets from arrays or from file in a variety of data formats, including from numpy arrays, from arbitrary data formats stored in folders, and from a list of files in a single CSV or TXT file.\n",
+ "- **Dataset Sampling and Feeding**\n",
+ " - How you create pytorch-suitable dataset iterators and actually sample from these iterators in a variety of ways.\n",
+ "- **Data Transforms and Augmentation**\n",
+ " - How you alter the input and/or target samples in real-time to ensure your model is robust, including how to do augmentation directly on the GPU.\n",
+ "\n",
+ "This tutorial will be almost solely about Transforms.\n",
+ "\n",
+ "I will be using 4 different datasets in this tutorial. They are all unique and show a different side of the process. You can skip any of the datasets if you wish - all code for each dataset will be contained to isolated code cells:\n",
+ "1. MNIST for 2D-Grayscale processing\n",
+ "2. CIFAR-10 for 2D-Color processing\n",
+ "3. Arrays saved in arbitrary file paths with teh file paths and labels stored in a CSV file for kaggle-like processing.\n",
+ "4. A Brain Image and it Segmented brain Mask for 3D-Image + Segmentation processing (NOTE: requires the *nilearn* package).\n"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## Understanding `Datasets`, `DataLoaders`, and `Transforms` in Pytorch\n",
+ "\n",
+ "When it comes to loading and feeding data in pytorch, there are three main concepts: Datasets, DataLoaders, and Transforms.\n",
+ "\n",
+ "**Transforms** are small classes which take in one or more arrays, perform some operation, then return the altered version of the array(s). They almost always belong to a `Dataset` which carries out those transforms, but can belong to more than one dataset or can actually stand on their own. This is where a lot of the custom user-specific code happens. Thankfully, `Transforms` are very easy to build as I will show soon.\n",
+ "\n",
+ "**Datasets** actually *store* your data arrays, or the filepaths to your data if loading from file. If you can load your data completely into memory - such as with MNIST - you should use the `TensorDataset` class. If you **cant** load your complete data into memory - such as with Imagenet - you should use the `FolderDataset`. I will describe these later, including how to create your own dataset (the `CSVDataset` class to load from a csv)\n",
+ "\n",
+ "**DataLoaders** are used to actually *sample* and *iter* through your data. This is where all the multi-processing magic happens to load your data in multiple threads and avoid starving your model. A `DataLoader` **always** takes a `Dataset` as input (this is object composition), along with a few other parameters such as the batch size. You will basically *NEVER* need to alter the DataLoaders - just use the built-in ones.\n",
+ "\n",
+ "The order in which I presented these topics above are usually the order in which you will create the objects! First, make your transforms. Next, make your `Dataset` and pass in the transforms. Next, make your `DataLoader` and pass in your `Dataset`.\n",
+ "\n",
+ "Here is a small pseudo-code example of the process:\n",
+ "\n",
+ "```python\n",
+ "# Create the transforms\n",
+ "my_transform = Compose([SomeTransform(), SomeOtherTransform()])\n",
+ "# Create the dataset - pass in your arrays and the transforms\n",
+ "my_dataset = Dataset(x_array, y_array, transform=my_transform)\n",
+ "# Create the Dataloader - pass in your dataset and some other args\n",
+ "my_loader = DataLoader(my_dataset, batch_size=32)\n",
+ "\n",
+ "# Iterate through the loader\n",
+ "for x, y in my_loader:\n",
+ " do_something(x, y)\n",
+ "```"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "# 0. Loading our test data\n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 1,
+ "metadata": {
+ "collapsed": true
+ },
+ "outputs": [],
+ "source": [
+ "# some imports we will need\n",
+ "import os\n",
+ "import matplotlib.pyplot as plt\n",
+ "\n",
+ "import torch as th\n",
+ "from torchvision import datasets"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "### 0a. Load MNIST\n",
+ "MNIST is a collection of 28x28 images of Digits between 0-9, with 60k training images and 10k testing images. The images are grayscale, so there is only a single channel dimension (1x28x28)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 2,
+ "metadata": {
+ "collapsed": false,
+ "scrolled": false
+ },
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Training Data Size: torch.Size([60000, 28, 28]) - torch.Size([60000])\n",
+ "Testing Data Size: torch.Size([10000, 28, 28]) - torch.Size([10000])\n"
+ ]
+ },
+ {
+ "data": {
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAAAP8AAAEICAYAAACQ6CLfAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAEAFJREFUeJzt3X2sVHV+x/HPZ1GaiChSUyT4gBiLVeuyKeLG0HWNsj5U\ng6gxS2JKo5E1ka2btGQNTbqaFmNWoZVotrDxcXfLulm1otmuD6Di1pZ6RVTE+lCDWegF1iIK+FS4\n3/4x57p38c5vLjNn5gz3934lkztzvufM+WbCh/M483NECEB+vlR1AwCqQfiBTBF+IFOEH8gU4Qcy\nRfiBTBF+IFOE/wBne6Ptj23vtL3D9vO2r7X9pQHz3Gv77we8Hmn7b22/YXu37c22/9X2N/Z533Nt\nL7C9q3h8YnvvgNevDaG/g2xHsZ7+5f6p/E8C+4vwDw8XR8RoScdJukXSdyXdlZj/55JmSvpzSUdI\nOl7S7ZL+bN8ZI+LmiDg0Ig6VdK2kf+9/HRGn7EePpwxY7tr9WA5tclDVDaA8EfGBpBW2t0j6D9uL\nImL9wHlsnytphqQTI2LTgNIviwcywZZ/GIqI/5S0SdKfDlI+V9KafYLfkuKQ4a8bzPa87S22f277\nuLLWjeYR/uHrfySNHWT6kZK29L+wPbY4V/CB7U+aWVFEXBARt9Up75X0NUkTJf2RpN+otncyopl1\noTyEf/iaIGn7INP/V9L4/hcRsT0ixkj6E0m/V3YTUfNcRHwWEe9L+ktJf1g8UCHCPwzZPl218P9q\nkPJKSafbPrqzXX2u/2ukrmj9KBD+YcT2YbYvkvRTST+OiFf3nScinpD0tKR/sX1GcdnvYElfbVNP\nf2z7y7ZH2B4t6R8lbZT0ZjvWh6HjbP/w8KjtPZL6JG2QtFhS6lr6LEkLJP1Yvz08eFXSec2s3PYT\nkp6KiO8PUh4n6U5JR0vaLenfJF0UEXuaWRfKY37MA8gTu/1Apgg/kCnCD2SK8AOZ6ujZftucXQTa\nLCKGdA9FS1t+2+cXXwt92/YNrbwXgM5q+lJfcW/2m6p9Q2yTpBckzY6IDYll2PIDbdaJLf80SW9H\nxDsR8Zlqd5XNbOH9AHRQK+GfIOnXA15vKqb9DttzbffY7mlhXQBK1vYTfhGxTNIyid1+oJu0suXf\nLOmYAa+PLqYBOAC0Ev4XJJ1o+3jbIyV9U9KKctoC0G5N7/ZHxB7b8yQ9LmmEpLsjouGvuQLoDh39\nVh/H/ED7deQmHwAHLsIPZIrwA5ki/ECmCD+QKcIPZIrwA5ki/ECmCD+QKcIPZIrwA5ki/ECmCD+Q\nKcIPZIrwA5ki/ECmCD+QKcIPZIrwA5ki/ECmCD+QKcIPZIrwA5ki/ECmCD+QKcIPZIrwA5ki/ECm\nCD+QqaaH6MaBYcSIEcn64Ycf3tb1z5s3r27tkEMOSS47efLkZP26665L1m+77ba6tdmzZyeX/eST\nT5L1W265JVm/6aabkvVu0FL4bW+UtFPSXkl7ImJqGU0BaL8ytvxnR8R7JbwPgA7imB/IVKvhD0lP\n2X7R9tzBZrA913aP7Z4W1wWgRK3u9k+PiM22/0DSk7b/KyJWD5whIpZJWiZJtqPF9QEoSUtb/ojY\nXPzdJulhSdPKaApA+zUdftujbI/ufy7pG5LWl9UYgPZqZbd/nKSHbfe/zz9HxC9L6WqYOfbYY5P1\nkSNHJutnnnlmsj59+vS6tTFjxiSXveyyy5L1Km3atClZX7JkSbI+a9asurWdO3cml3355ZeT9Wef\nfTZZPxA0Hf6IeEfSl0vsBUAHcakPyBThBzJF+IFMEX4gU4QfyJQjOnfT3XC9w2/KlCnJ+qpVq5L1\ndn+ttlv19fUl61dddVWyvmvXrqbX3dvbm6y///77yfobb7zR9LrbLSI8lPnY8gOZIvxApgg/kCnC\nD2SK8AOZIvxApgg/kCmu85dg7NixyfqaNWuS9UmTJpXZTqka9b5jx45k/eyzz65b++yzz5LL5nr/\nQ6u4zg8gifADmSL8QKYIP5Apwg9kivADmSL8QKYYorsE27dvT9bnz5+frF900UXJ+ksvvZSsN/oJ\n65R169Yl6zNmzEjWd+/enayfcsopdWvXX399clm0F1t+IFOEH8gU4QcyRfiBTBF+IFOEH8gU4Qcy\nxff5u8Bhhx2WrDcaTnrp0qV1a1dffXVy2SuvvDJZX758ebKO7lPa9/lt3217m+31A6aNtf2k7beK\nv0e00iyAzhvKbv+9ks7fZ9oNklZGxImSVhavARxAGoY/IlZL2vf+1ZmS7iue3yfpkpL7AtBmzd7b\nPy4i+gc72yJpXL0Zbc+VNLfJ9QBok5a/2BMRkTqRFxHLJC2TOOEHdJNmL/VttT1ekoq/28prCUAn\nNBv+FZLmFM/nSHqknHYAdErD3X7byyV9XdKRtjdJ+p6kWyT9zPbVkt6VdEU7mxzuPvzww5aW/+CD\nD5pe9pprrknWH3jggWS9r6+v6XWjWg3DHxGz65TOKbkXAB3E7b1Apgg/kCnCD2SK8AOZIvxApvhK\n7zAwatSourVHH300uexZZ52VrF9wwQXJ+hNPPJGso/MYohtAEuEHMkX4gUwRfiBThB/IFOEHMkX4\ngUxxnX+YO+GEE5L1tWvXJus7duxI1p9++ulkvaenp27tzjvvTC7byX+bwwnX+QEkEX4gU4QfyBTh\nBzJF+IFMEX4gU4QfyBTX+TM3a9asZP2ee+5J1kePHt30uhcsWJCs33///cl6b29vsp4rrvMDSCL8\nQKYIP5Apwg9kivADmSL8QKYIP5AprvMj6dRTT03WFy9enKyfc07zgzkvXbo0WV+4cGGyvnnz5qbX\nfSAr7Tq/7bttb7O9fsC0G21vtr2ueFzYSrMAOm8ou/33Sjp/kOn/EBFTiscvym0LQLs1DH9ErJa0\nvQO9AOigVk74fdv2K8VhwRH1ZrI913aP7fo/5gag45oN/w8kTZI0RVKvpEX1ZoyIZRExNSKmNrku\nAG3QVPgjYmtE7I2IPkk/lDSt3LYAtFtT4bc9fsDLWZLW15sXQHdqeJ3f9nJJX5d0pKStkr5XvJ4i\nKSRtlPStiGj45Wqu8w8/Y8aMSdYvvvjiurVGvxVgpy9Xr1q1KlmfMWNGsj5cDfU6/0FDeKPZg0y+\na787AtBVuL0XyBThBzJF+IFMEX4gU4QfyBRf6UVlPv3002T9oIPSF6P27NmTrJ933nl1a88880xy\n2QMZP90NIInwA5ki/ECmCD+QKcIPZIrwA5ki/ECmGn6rD3k77bTTkvXLL788WT/99NPr1hpdx29k\nw4YNyfrq1atbev/hji0/kCnCD2SK8AOZIvxApgg/kCnCD2SK8AOZ4jr/MDd58uRkfd68ecn6pZde\nmqwfddRR+93TUO3duzdZ7+1N/1p8X19fme0MO2z5gUwRfiBThB/IFOEHMkX4gUwRfiBThB/IVMPr\n/LaPkXS/pHGqDcm9LCJutz1W0gOSJqo2TPcVEfF++1rNV6Nr6bNnDzaQck2j6/gTJ05spqVS9PT0\nJOsLFy5M1lesWFFmO9kZypZ/j6S/ioiTJX1V0nW2T5Z0g6SVEXGipJXFawAHiIbhj4jeiFhbPN8p\n6XVJEyTNlHRfMdt9ki5pV5MAyrdfx/y2J0r6iqQ1ksZFRP/9lVtUOywAcIAY8r39tg+V9KCk70TE\nh/ZvhwOLiKg3Dp/tuZLmttoogHINactv+2DVgv+TiHiomLzV9viiPl7StsGWjYhlETE1IqaW0TCA\ncjQMv2ub+LskvR4RiweUVkiaUzyfI+mR8tsD0C4Nh+i2PV3Sc5JeldT/HckFqh33/0zSsZLeVe1S\n3/YG75XlEN3jxqVPh5x88snJ+h133JGsn3TSSfvdU1nWrFmTrN966611a488kt5e8JXc5gx1iO6G\nx/wR8StJ9d7snP1pCkD34A4/IFOEH8gU4QcyRfiBTBF+IFOEH8gUP909RGPHjq1bW7p0aXLZKVOm\nJOuTJk1qqqcyPP/888n6okWLkvXHH388Wf/444/3uyd0Blt+IFOEH8gU4QcyRfiBTBF+IFOEH8gU\n4Qcylc11/jPOOCNZnz9/frI+bdq0urUJEyY01VNZPvroo7q1JUuWJJe9+eabk/Xdu3c31RO6H1t+\nIFOEH8gU4QcyRfiBTBF+IFOEH8gU4Qcylc11/lmzZrVUb8WGDRuS9cceeyxZ37NnT7Ke+s79jh07\nkssiX2z5gUwRfiBThB/IFOEHMkX4gUwRfiBThB/IlCMiPYN9jKT7JY2TFJKWRcTttm+UdI2k3xSz\nLoiIXzR4r/TKALQsIjyU+YYS/vGSxkfEWtujJb0o6RJJV0jaFRG3DbUpwg+031DD3/AOv4joldRb\nPN9p+3VJ1f50DYCW7dcxv+2Jkr4iaU0x6du2X7F9t+0j6iwz13aP7Z6WOgVQqoa7/Z/PaB8q6VlJ\nCyPiIdvjJL2n2nmAv1Pt0OCqBu/Bbj/QZqUd80uS7YMlPSbp8YhYPEh9oqTHIuLUBu9D+IE2G2r4\nG+7227akuyS9PjD4xYnAfrMkrd/fJgFUZyhn+6dLek7Sq5L6iskLJM2WNEW13f6Nkr5VnBxMvRdb\nfqDNSt3tLwvhB9qvtN1+AMMT4QcyRfiBTBF+IFOEH8gU4QcyRfiBTBF+IFOEH8gU4QcyRfiBTBF+\nIFOEH8gU4Qcy1ekhut+T9O6A10cW07pRt/bWrX1J9NasMns7bqgzdvT7/F9Yud0TEVMrayChW3vr\n1r4kemtWVb2x2w9kivADmao6/MsqXn9Kt/bWrX1J9NasSnqr9JgfQHWq3vIDqAjhBzJVSfhtn2/7\nDdtv276hih7qsb3R9qu211U9vmAxBuI22+sHTBtr+0nbbxV/Bx0jsaLebrS9ufjs1tm+sKLejrH9\ntO0Ntl+zfX0xvdLPLtFXJZ9bx4/5bY+Q9KakGZI2SXpB0uyI2NDRRuqwvVHS1Iio/IYQ21+TtEvS\n/f1Dodn+vqTtEXFL8R/nERHx3S7p7Ubt57Dtbeqt3rDyf6EKP7syh7svQxVb/mmS3o6IdyLiM0k/\nlTSzgj66XkSslrR9n8kzJd1XPL9PtX88HVent64QEb0RsbZ4vlNS/7DylX52ib4qUUX4J0j69YDX\nm1ThBzCIkPSU7Rdtz626mUGMGzAs2hZJ46psZhANh23vpH2Gle+az66Z4e7Lxgm/L5oeEVMkXSDp\numL3titF7Zitm67V/kDSJNXGcOyVtKjKZoph5R+U9J2I+HBgrcrPbpC+Kvncqgj/ZknHDHh9dDGt\nK0TE5uLvNkkPq3aY0k229o+QXPzdVnE/n4uIrRGxNyL6JP1QFX52xbDyD0r6SUQ8VEyu/LMbrK+q\nPrcqwv+CpBNtH297pKRvSlpRQR9fYHtUcSJGtkdJ+oa6b+jxFZLmFM/nSHqkwl5+R7cM215vWHlV\n/Nl13XD3EdHxh6QLVTvj/9+S/qaKHur0NUnSy8Xjtap7k7Rctd3A/1Pt3MjVkn5f0kpJb0l6StLY\nLurtR6oN5f6KakEbX1Fv01XbpX9F0rricWHVn12ir0o+N27vBTLFCT8gU4QfyBThBzJF+IFMEX4g\nU4QfyBThBzL1/yEeOmyGHbR5AAAAAElFTkSuQmCC\n",
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ }
+ ],
+ "source": [
+ "# Change this to where you want to save the data\n",
+ "SAVE_DIR = os.path.expanduser('~/desktop/data/MNIST/')\n",
+ "# train data\n",
+ "mnist_train = datasets.MNIST(SAVE_DIR, train=True, download=True)\n",
+ "x_train_mnist, y_train_mnist = mnist_train.train_data.type(th.FloatTensor), mnist_train.train_labels\n",
+ "# test data\n",
+ "mnist_test = datasets.MNIST(SAVE_DIR, train=False, download=True)\n",
+ "x_test_mnist, y_test_mnist = mnist_test.test_data.type(th.FloatTensor), mnist_test.test_labels\n",
+ "\n",
+ "print('Training Data Size: ' ,x_train_mnist.size(), '-', y_train_mnist.size())\n",
+ "print('Testing Data Size: ' ,x_test_mnist.size(), '-', y_test_mnist.size())\n",
+ "\n",
+ "plt.imshow(x_train_mnist[0].numpy(), cmap='gray')\n",
+ "plt.title('DIGIT: %i' % y_train_mnist[0])\n",
+ "plt.show()"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "### 0b. CIFAR-10\n",
+ "CIFAR10 is an image recognition dataset, with images of 3x98x98 size."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 3,
+ "metadata": {
+ "collapsed": false
+ },
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Files already downloaded and verified\n",
+ "Files already downloaded and verified\n",
+ "Training Data Size: (50000, 32, 32, 3) - (50000,)\n",
+ "Testing Data Size: (10000, 32, 32, 3) - (10000,)\n"
+ ]
+ },
+ {
+ "data": {
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAAAP8AAAEICAYAAACQ6CLfAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJztnW2QXGd15/+n36Z73jWaF8kj2UK27MWALUBxOYbNwpJQ\nxpsCU1vFhqQoPrBrthaosJBKudit4OTDFmEXsnxISMTixOwSAhteiyKbgJesk8CyyGD8ji3LMtJI\nGo00M5ru6Z5+Pfuhr3fl4fk/05Y0PZLv/1elmtZz+rn3uU/fc2/f59/nHHN3CCHSR2arByCE2Brk\n/EKkFDm/EClFzi9ESpHzC5FS5PxCpBQ5fwows3vM7L9t9TjE5YWc/yWCmf26mR0ys4qZnTSzvzKz\n12/1uJ7HzH7TzJ41s1Uze8LMrt/qMaUdOf9LADP7EID/DOA/AJgBcDWAPwTw1q0c1/OY2b8E8B4A\n/wzAMIBfBXBmSwcl5PxXOmY2BuD3ALzP3b/i7qvu3nT3b7r7b5M+/93MTpnZOTN7wMxecZ7tDjN7\n3MzKZjZnZr+VtE+a2TfNbNnMFs3s78xsw/Mnec9HAfxbd3/cuzzj7ouXZgbEhSLnv/L5RQBFAF99\nEX3+CsA+ANMAfgTg8+fZPgvgve4+AuCVAP5n0v5hAMcBTKH77eIjABwAzOyPzOyPyL52Jf9eaWbH\nkq/+v9vLhUNsLrmtHoC4aLYDOOPurV47uPu9z782s3sALJnZmLufA9AEcKOZ/cTdlwAsJW9tAtgJ\n4Bp3Pwzg787b3r+J7G5X8vfNAF4FYBzA36B7IflMr2MWlx5dfa98zgKYNLOeLuRmljWzj5nZM2a2\nAuBoYppM/v5zAHcAeM7M/peZ/WLS/h8BHAbwN2Z2xMzu7nF8teTvx9192d2PAviTZB9iC5HzX/l8\nH0AdwJ09vv/XAbwNwC8DGAOwJ2k3AHD3H7r729B9JPgagC8l7WV3/7C770V3IfFDZvamHvb3UwAN\nJI8ICQolvQyQ81/hJF/VfwfAH5rZnWY2aGZ5M3uLmX080GUE3YvFWQCD6CoEAAAzK5jZbySPAE0A\nKwA6ie1Xzew6MzMA5wC0n7dtML4qgC8C+G0zGzGzXQDuAvDNizlucfHI+V8CuPsnAHwIwL8HsADg\nGID3o3vnXs/nADwHYA7A4wD+9zr7uwAcTR4J/jWA30ja9wH4DoAKut82/sjdvwsAZvbHZvbHkSG+\nP+l3Iun75wDujbxf9AFTMg8h0onu/EKkFDm/EClFzi9ESpHzC5FS+voLv3w+7wPFYtDWbrdpvwyR\nhbPG91XI8etaPmLLZbPU1lW5Qu2Ra2hkjK0WP+bYMmw2NkaygNtxrsp5h+/NMpEDiNDphI8tNvbo\n9iLjt8gkM1smMo5shn+e7BwAgE5k8dxjJwLrE91emMXlMirVtZ52dlHOb2a3A/gUgCyA/+LuH4u9\nf6BYxP7XvDZoW17mcR4DmfAHP1Hgk3P19kFqm5oYorbJ8WFqK2TzwfbcQIn2QZZP8eLSMrU1WvzY\nto2PUVum3Qy21+t12mdtbY3aiqXwxRoA2uAXr2qtEmwfGx+lfeB8e416g9qyCH8uAL/YjAzzz3lo\niJ8f+Tyfj1pkjB67QWTC50jsmFse9u/f/+yX+X7W77bnd67DzLLoho2+BcCNAN5pZjde6PaEEP3l\nYp75bwFw2N2PuHsDwF+g+7NRIcQVwMU4/yy6vyR7nuNJ2wsws7uSDDOHWs3wV1IhRP/Z9NV+dz/o\n7gfc/UAuz5/NhBD95WKcfw7A7vP+vytpE0JcAVzMav8PAewzs5eh6/S/hm64KGVtbQ2PPf5Y0LZ8\nhqd0myALrLadr7xOtkeozUrT1Lba4apDpR1egXcr0D7VNb5iW63xFfhmm0tbZyIaZzEXHmOrxbeX\nJavNADAwMEBt1bVVamt1wsdta9tpn0xEBWxG1IpSjp8HFbJivtjmuU8GB/lqv2X4t1cjahAAICIf\nVtfCj8Oxx+RsLvy5NNdqwfYQF+z87t4ys/cD+Gt0pb573T3s2UKIy46L0vnd/VsAvnWJxiKE6CP6\nea8QKUXOL0RKkfMLkVLk/EKklL5G9WUAlHJEpuKKEq4hkt6eGR7gMj01QW2lmJQTidqq1cMBMGtN\nLkN5ZHuFUiQgKBLY4x2+v7GJcEBTq8m3V8jzcUSCLZEt8A+t3gjPVbPF52Mwsr3cEB9jMdKvZWE5\nMhOJEmxFIvBikaTDQzyYrLJapbZmKyzpxQIqyyvngu2d2Ae2fvs9v1MI8ZJCzi9ESpHzC5FS5PxC\npBQ5vxAppa+r/WaOooUDKkZG+FCun90WbN9e4pEg+Q5PTVVZ5ME27Q6/Htaq4bFneFwPRiNpwXKR\nVerlc2XeL/KpTYyEV5zLKzwIpxEJ0KmRoBMgnpdumKTCajZ44EmmzQ8sHwkwapPUZQCQI8vz9Trv\nU8jzDzTT4QFB9coStYEEhQHAADmNWx2uSJxbDSs+7Ug+xvXozi9ESpHzC5FS5PxCpBQ5vxApRc4v\nREqR8wuRUvoq9eXMsG0gvMtSRMoZI0EdU6M8Z1qblIsCEKkzA2RzkURyJA9bvRORmiK6XC4SXNKu\nc0nMs/yaffp0uApQu8mPulzlQSfVNpdFh0uR6jt1Uq4L/JgzxmWq7ECkUs4ql3UH8+Ex5iKlsNYi\neRdrTS71dSJF1pYrfIzL1fD5UyHSMgCsNcPnQCOSq3E9uvMLkVLk/EKkFDm/EClFzi9ESpHzC5FS\n5PxCpJT+Sn1Zw9R4WLIZyXOJrVgM2zJZLq2UIvnxmi0ue3UikWrdSuQ/TyOSb6/d4DJgxyMRcxGJ\nzXM86qzcCEfotdt8fquR0mCtiK28ysc/txgeRz7Dtzda4XPfPMXLudXOcany6snrgu3T07toHxsJ\n58cDgPrSWWqrVHh05Lkyl/rOnAvLukeP8XG0s2HXrTe4PLiei3J+MzsKoIyudN5y9wMXsz0hRP+4\nFHf+N7o7vywLIS5L9MwvREq5WOd3AN8xswfN7K7QG8zsLjM7ZGaHXsxPD4UQm8vFfu1/vbvPmdk0\ngG+b2ZPu/sD5b3D3gwAOAsDYYKH3HENCiE3lou787j6X/D0N4KsAbrkUgxJCbD4XfOc3syEAGXcv\nJ6/fDOD3Yn3yuSyumgondhwtcIlieDAsbVlEKkMkwsoi0XT1GpeNMkQG3D7Cy4YNDfFotJVzfJ10\nbJRHzJUjSTWfmwtvs1LnUl8h8jQ2OxiJSszzyMOjZ8PRhXWPJF2NRPWNjY5Q2203cpFp5WRY1vVq\nZF+TPFq0XuXzUanwe+lAnm9z947wsU1Pz9A+8yth6fDsU6don/VczNf+GQBfTWrb5QD8ubv/j4vY\nnhCij1yw87v7EQA3X8KxCCH6iKQ+IVKKnF+IlCLnFyKlyPmFSCl9j+qbGAlH2+UaYWkIAAby4WEO\nDoTr0gFAvcblsGak3tr4eLguIAA4SfrYaPNraLMZSS45zOv4nVgI12IDgGee49FeC+XwsUVyQeKa\nSM3DO//xfmrbtZOP/y8fPBJs//5hLkW1OjySMZfh0lx5eYHaqpXwPI6McOkNbR5dWCzyfgUSfQoA\ng8b7tdrhD+fq3VfRPiOL4VqODz/L52I9uvMLkVLk/EKkFDm/EClFzi9ESpHzC5FS+rvan8themJ7\n0FZb5KviGQsPs0LKHAFALZLLLGeRfHaRslbsSllr8lXq8W08QKfR5ivYR46foLbFFT5Glt8vGynx\nNVrk25vOhVeVAaC4yBWJfaM7gu0nJ/g45pdPU1u9yuf4x089RW0ZkkOiORQpNTbGA2qQ4S4zNsbV\np5FOpDwYyfPojRXaZw8JkBvI934/151fiJQi5xcipcj5hUgpcn4hUoqcX4iUIucXIqX0WerLY9vk\nVNC2bZiX18pkwkERyytLtE9ztcK3146V6+IJ7ZwEGA0P8zx9TXDbE0e4RLVa56WfisUBbiuEx1ga\n4jLUtiyXRR88PE9trQY/fepjYalvahufDwOX35otLgVXGzyX4CrJ1ddo8WO2iHQbqeaGfCZS6i0T\nyV2YC89jq86lVCcyMYk9C6I7vxApRc4vREqR8wuRUuT8QqQUOb8QKUXOL0RK6avUBxhAZDuLlDNi\nDETyqQ0iHPUEALnINS+TieTjIzLgQImX6zpzikfFVc9wqXLvBJfE6lz1QpFIejdcO0v7ZCIbbGX5\nHK9EpNZcNpxncKTAP5ft266ltmv3XU1tz/7sh9T25FNzwfZCLiKjOZeJWy3uMhkSUQkA+QKfx04n\nfF51IrqiWfg8jSiRP8eGd34zu9fMTpvZo+e1TZjZt83s6eQvz3ophLgs6eVr/58BuH1d290A7nf3\nfQDuT/4vhLiC2ND53f0BAIvrmt8G4L7k9X0A7rzE4xJCbDIXuuA34+4nk9en0K3YG8TM7jKzQ2Z2\nqFyNPKwKIfrKRa/2e7eSBf1FsbsfdPcD7n5gZJAvYgkh+suFOv+8me0EgOQvT74mhLgsuVCp7xsA\n3g3gY8nfr/fSqeOO2lo4WaE1eWQWEI7AWl3lCQ4bTX5da2X4N5BKlUtzK8Q2u5tPo7f49q6Z5MLM\ntVdxaai6xvvNXn9zsL3g/JFr6RxPhFoaDydcBQCc5ZFqu3fsDLYvr/Joxb3/aB+1jW7jUYmj215O\nbUsL4flfOsdLnuUjcmTGeURlsxOJFuXBomg3w+d3JEiQlo57EUF9PUl9XwDwfQA3mNlxM3sPuk7/\nK2b2NIBfTv4vhLiC2PDO7+7vJKY3XeKxCCH6iH7eK0RKkfMLkVLk/EKkFDm/ECmlr1F9DkfbwnKI\nt3lCRSZrlIo86efwCJeGTixwWfHZ4wvUlsuHx1GY53X11ub59vZNcznvTW/gstczc+t/bf3/GZkN\nJ0id3B5OqAkApxd4ks7x8Yjs1eHjL5CElacXwlF2AJArLlPbwvJJaps7yaPw8vnweTA+yrW3Wo0L\nZp7j90uLaHOdiAyYsXA/i0SYRso89ozu/EKkFDm/EClFzi9ESpHzC5FS5PxCpBQ5vxAppa9SXzab\nwfj4cNDWynGpr1IJR6R5k8sn58o8auu5n3Fpq1LhslGpGL5WnnyWRxfOFHlSx9nZa6ht/KqXUVu+\nHAkRI0lNd918C+9yistvpRaXKtvgkYKrq2HbzsGwFAkAjTY/LhsKnzcAsGvoKmobGQ9LnOWzp2if\n0/Nnqa1pXN5ca/CkoMhwbW5oIBxl2qhFJEySENSIbBgcUs/vFEK8pJDzC5FS5PxCpBQ5vxApRc4v\nRErp62p/p91CeTm8kppr8Fx3eVKaCDyFHHJZbqxWuBKwbYQHsowPhVdla0t8tX/6Kp4Db/amf0Jt\njx5vUNtTh7nttp0TwfblZd5n5tpw3j8AyKBKbY06VwLGPbxyv3Kar6SXGjyX4M6J8HEBwHKb59XL\n3xQuJlWLBAr9w7e+QW3Hj/FjzkZKcsUKabE4omasrFwzPFcsCC64jZ7fKYR4SSHnFyKlyPmFSCly\nfiFSipxfiJQi5xcipfRV6gOALFE82pEgBicySYaU8QKAtnGpb4krSlhZieRvq4flsp1jXB78hTe+\nkdp23XArtX3lT++lth2RIJdsI5yfcO7IM3x7e2+ktuL266htyLk8W10Ml28sdcLSGwA0alxWPFPm\ntvEpHgS1fceeYHutMkr7ZLgJ7QIPZorl8Gs2udRqrXCAmjkPXGu1wq57SaU+M7vXzE6b2aPntd1j\nZnNm9lDy746e9yiEuCzo5Wv/nwG4PdD+B+6+P/n3rUs7LCHEZrOh87v7AwB4rmghxBXJxSz4fcDM\nHk4eC+iDnJndZWaHzOxQpcqfe4QQ/eVCnf/TAPYC2A/gJIBPsDe6+0F3P+DuB4YHeVYbIUR/uSDn\nd/d5d2+7ewfAZwDwHFFCiMuSC5L6zGynuz8fFvV2AI/G3v//+gEwokS0SZQSwMsWRSonwWuR7UVS\n4E1s52W+dgyGpcXXHLie9nn5bVzOWzrN5c2BFo883LtrF7V1yMHtmOa581prXDKtRqIBGy3er1kL\nn1ptcJnymbnj1PbIo4eo7bZb+Ri37whHVa6Uw1IkAJAKXwCAyT1c1u3Eyms1IrIdkZDPLfDyZfVy\neJAdEk0ZYkPnN7MvAHgDgEkzOw7gowDeYGb7ATiAowDe2/MehRCXBRs6v7u/M9D82U0YixCij+jn\nvUKkFDm/EClFzi9ESpHzC5FS+hrV5w50SARTrc4ligKJYsvleMLEbIbLP9ft4JFlxRK/Hu65Znew\n/ebX88i9nTfcRG0Pff9Pqe3q3XyMO17xKmorTF0bbM8NjtE+1TUuOdZWeOTe/Ilj1LY0H5bt2k0e\nnVcaCSdIBYDJSf5ZHzvxY2qb2TkbbG9VI1GkNV52y1aXqK3t4YhKAHCmcQMoDYSPrbCDH/PKAIl0\nfREerTu/EClFzi9ESpHzC5FS5PxCpBQ5vxApRc4vRErpq9RnZshnw7tciiRobK+FZY3SYIn2yWa4\ntDIdidw7dpJHUl37mlA2M2DXq8LtXbhk1yyvUtvYCJfmpq7fT22ruXBNu8d+/EPap17j41hZ4fNx\nZu5n1JZth6XWYpGfcrMvC8tyAHDT9TyRaCvLI+3y2fFwe4FHfebWeJLO6nNz1MZkbABoRW6zFVJX\ncnA7P64ZUgMyn+/9fq47vxApRc4vREqR8wuRUuT8QqQUOb8QKaW/gT2dDuq18Erq4AAfihXDq6H5\nDM8h521uKw3zUl5v/Rdvpbbb3vKmYPvo5AztM3/kCWrLRsa/XOY5/BaO/pTaTpTDK85/+7Wv0T7D\nJR5AslbnATA7ZrgiMToSXql+9jgPBmpE5mPiqj3Udv2rXkttaA8EmxeXeb7AKlGXAGCpxsdozs/h\ntRoPXKuQElte4arDy8MiBjq9V+vSnV+ItCLnFyKlyPmFSClyfiFSipxfiJQi5xcipfRSsWc3gM8B\nmEG3Qs9Bd/+UmU0A+CKAPehW7XmHu/MEZwAcjo6T3HodHhRhrbBM0vJISa5IzrTiwCi17X8tl40G\n8mFJ7PGHeA65pRPPUFu9zqWc8hKvin7s8OPUVvFwsFO+zfc1nOPS52iRB5dMbeNS38n5U8H2VqQs\nW7XMZcVjz/IgIuAxaqlUwjkIizl+frQGpqntbIufO6USz0E4OMKD0Eq5sBxZrq7QPq1OWHJ8EUpf\nT3f+FoAPu/uNAG4F8D4zuxHA3QDud/d9AO5P/i+EuELY0Pnd/aS7/yh5XQbwBIBZAG8DcF/ytvsA\n3LlZgxRCXHpe1DO/me0B8GoAPwAwc16l3lPoPhYIIa4QenZ+MxsG8GUAH3T3FzyMuLuDPG6Y2V1m\ndsjMDq3WeC59IUR/6cn5zSyPruN/3t2/kjTPm9nOxL4TQLDgubsfdPcD7n5gqFS4FGMWQlwCNnR+\nMzN0S3I/4e6fPM/0DQDvTl6/G8DXL/3whBCbRS9Rfa8D8C4Aj5jZQ0nbRwB8DMCXzOw9AJ4D8I6N\nN+UAwrJdp8UfCXL5cM69diRnWgM8+mpmjOfV++tvfJPaJmbCktL0znAZLwBoVHl0Xj4flngAYHiI\nS0q5DJfmhogcuWM6nPMNAGplrtCWsnyMZxfOUFuzEf5sRopc8mpUuNT39I8PUdvJJ5+itnqLlNDK\n8zlsx+Z3F5c+McTP4cwAl1qLRLbbBj5XL3/Fy4LtpeIR2mc9Gzq/u/89ABbjGI5xFUJc9ugXfkKk\nFDm/EClFzi9ESpHzC5FS5PxCpJS+JvCEGzqdsHBQiESWFXMk+WGGJ1r0SAmnToNHlp05E45GA4DK\nQthWavLoqw74cU1s4/Lb+FVT1NZq16lt7kR4jB6J98pk+GnQaHHJNGs88edQMSzPkgDN7vZixkiU\nZrvB5dQMOd9WqlzebAwQeRDAyFV87ldLvLRZucNlwLXV8D14++he2meSSLe5fO8urTu/EClFzi9E\nSpHzC5FS5PxCpBQ5vxApRc4vRErpr9QHQ8bCUWLFAR7B5CRCb6gUlpMAYGhkktqqTR5htX2E5xzI\nkXE0zs3TPp0M3141z6WtmZlw1BYAdBpcNrrhpl3B9u99937ap+FVassbl1NrFd5vdCQclVjI8VMu\na5F6dmv8M3v2JJftlpfDn1ndVmmfqev5PXF2PBKV6PyzXjrD56qwFpZMh2YjkZjVcNRkJ6KWrkd3\nfiFSipxfiJQi5xcipcj5hUgpcn4hUkpfV/szBhRy4etNtc4DJrKkZFQnkl+u2uTBGdk8DxIZKPDV\n3Hw+PI7CIC9bNTbKA4xOLXCVoDobXrUHgOnd11Hb3OlwXr1X/MLraJ/KwglqO/IUL4W1WuGBLLls\neP7HxnhuQiP5HQHg5Bwf48+eiwT2DITnf3SGK0VTE5ExRlQHW+Sf9bYl7mqz0xPB9l3j/Bw4/Hg4\ngKte40Fr69GdX4iUIucXIqXI+YVIKXJ+IVKKnF+IlCLnFyKlbCj1mdluAJ9DtwS3Azjo7p8ys3sA\n/CsAC8lbP+Lu34ruLGeYmQpfb5pnz9J+tXZYAlrlsRnwDC/llYsEl4yO8mCKAimFVVvlOfxKsZxq\nDW479L3vUdveG7hEePx4WALKRPIdDg7wXHzZiJxaKnFpa7USlvpqNS7BtiIl24ZLfBy3vfp6aiuS\nAKNWlucmbDd5EE7tGJf6MuUitU0PjlDbq69/RbjPOK96/+DJZ4PtrSY/rvX0ovO3AHzY3X9kZiMA\nHjSzbye2P3D3/9Tz3oQQlw291Oo7CeBk8rpsZk8AmN3sgQkhNpcX9cxvZnsAvBrAD5KmD5jZw2Z2\nr5nx0rdCiMuOnp3fzIYBfBnAB919BcCnAewFsB/dbwafIP3uMrNDZnZopcqf6YQQ/aUn5zezPLqO\n/3l3/woAuPu8u7fdvQPgMwBuCfV194PufsDdD4wO8kwnQoj+sqHzm5kB+CyAJ9z9k+e17zzvbW8H\n8OilH54QYrPoZbX/dQDeBeARM3soafsIgHea2X505b+jAN670YYKBcPVu8N3/zHjMsnhY2HpZX6B\nR+c12lwaGh7mh71a5RFi7U4l2J6NXEMXF7iEWa5wWWatyceRdW4bGQ4vvcyfWqR9jq9y+arjXCKc\nmeKyqHXC0WVLyzzf3sAQ/8zGx7hUVsjy+a83iOSb4/Lmap1vr1GJlCjr8H7X7d5BbVftCM/jseNc\n0j27EPaJVqzk2Tp6We3/ewChMyCq6QshLm/0Cz8hUoqcX4iUIucXIqXI+YVIKXJ+IVJKXxN4ZnOG\n0W0kMo5IFwCwbTobNgzxJIxn5nlC0LVIuatcgSdvZN06TR5B2GzzcZyrcdlrKBLFtlbl0lxtLZzA\nsxEZYzticydzD6CyEinXNRpOhDo6ypOd1mp8e2fO8rkaHubRhZYJ39+sxWXiQo4ncR3gijQKBT5X\ne67bQ221angsDzzwOO3z8FOnw9ta6z2qT3d+IVKKnF+IlCLnFyKlyPmFSClyfiFSipxfiJTSV6nP\nzJArhndZHOWx/hPD4WtUrsZltHyJRzetROqmoc2vh6XidLhLnu+rXef17AqDfBz5HJ+PbJZLnHUP\nj6XR5PKmRyL3jCti8AaXHNvElI9E06HA5c3lJS711Rq8Pt3YeFi6zREJEAAykbmvgktp82fK1LYU\nieAsr4ajNL/zt0/yfRFVdK0hqU8IsQFyfiFSipxfiJQi5xcipcj5hUgpcn4hUkpfpb5Ox1BhCRCz\nw7Tf8FBYN8qXuA41FAm/Ghvj0lxlhdeSq6yEEypWqpGovjVuGynwBJhFUhcQAFp1LnHmcuHreSFy\nmc8P8Gg0M95xMJIINUNMrTaXogqlSA3FcS5vLi5yia1MpM/RCT731UjNwKeP8oSsTz5yjNpmJni0\n6MwucmwZfp5OkoSm82Uue/7c5nt+pxDiJYWcX4iUIucXIqXI+YVIKXJ+IVLKhqv9ZlYE8ACAgeT9\nf+nuHzWzCQBfBLAH3XJd73B3Hn2Bbg6848+FbfVlvjo/MhVeIS6WIgEdXDzAxAQ/7MoqzyO3vBy2\nLZ3lgSBLfHEY2Q5fZe84VzLaba4goBO2xa7yluGBPdkcn6taJAjKyaJ+npTxAoBWlZcUa0fy+7Uj\nwULLlXA/VsULABYjis/Rw/wDXT67Sm2NVb7DHWPhUl4vv2aW9mFDfPrUCu2znl7u/HUA/9Tdb0a3\nHPftZnYrgLsB3O/u+wDcn/xfCHGFsKHze5fnK1Tmk38O4G0A7kva7wNw56aMUAixKfT0zG9m2aRC\n72kA33b3HwCYcfeTyVtOAZjZpDEKITaBnpzf3dvuvh/ALgC3mNkr19kd3W8DP4eZ3WVmh8zs0LkK\nT/4ghOgvL2q1392XAXwXwO0A5s1sJwAkf4NVBNz9oLsfcPcDY8ORigdCiL6yofOb2ZSZjSevSwB+\nBcCTAL4B4N3J294N4OubNUghxKWnl8CenQDuM7MsuheLL7n7N83s+wC+ZGbvAfAcgHdstCG3HNr5\nyaCtWThA+9U74UCWTCtcmgoAimNcvhqf4t9AtmV44MlENRxosbzIyzstn+FyXm2VT3+7xeVDOL9m\nd1rhMa7V+CNXoRDJF5jj4y+v8cCTGnnEyzsPmhnJhINVAKCT4RJWs8nncWAoLJkW8zxf4HiBj3Ev\nxqntVTfzsmE33HQzte257rpg+y23cnnz+IlKsP0fnuE+sZ4Nnd/dHwbw6kD7WQBv6nlPQojLCv3C\nT4iUIucXIqXI+YVIKXJ+IVKKnF+IlGIeiR675DszW0BXFgSASQC96xKbh8bxQjSOF3KljeMad5/q\nZYN9df4X7NjskLtzcV/j0Dg0jk0dh772C5FS5PxCpJStdP6DW7jv89E4XojG8UJesuPYsmd+IcTW\noq/9QqQUOb8QKWVLnN/Mbjezn5rZYTPbssSfZnbUzB4xs4fM7FAf93uvmZ02s0fPa5sws2+b2dPJ\n321bNI57zGwumZOHzOyOPoxjt5l918weN7PHzOw3k/a+zklkHH2dEzMrmtn/MbOfJOP43aT90s6H\nu/f1H4AsgGcA7AVQAPATADf2exzJWI4CmNyC/f4SgNcAePS8to8DuDt5fTeA39+icdwD4Lf6PB87\nAbwmeT3n0bcsAAAB9UlEQVQC4CkAN/Z7TiLj6OucADAAw8nrPIAfALj1Us/HVtz5bwFw2N2PuHsD\nwF+gmwk4Nbj7AwDWJ6nvezZkMo6+4+4n3f1HyesygCcAzKLPcxIZR1/xLpueMXsrnH8WwPm1jI9j\nCyY4wQF8x8weNLO7tmgMz3M5ZUP+gJk9nDwWbPrjx/mY2R50k8dsaYbodeMA+jwn/ciYnfYFv9d7\nNyvxWwC8z8x+aasHBMSzIfeBT6P7SLYfwEkAn+jXjs1sGMCXAXzQ3V+Qt6ufcxIYR9/nxC8iY3av\nbIXzzwHYfd7/dyVtfcfd55K/pwF8Fd1Hkq2ip2zIm427zycnXgfAZ9CnOTGzPLoO93l3/0rS3Pc5\nCY1jq+Yk2feLzpjdK1vh/D8EsM/MXmZmBQC/hm4m4L5iZkNmNvL8awBvBvBovNemcllkQ37+5Ep4\nO/owJ2ZmAD4L4Al3/+R5pr7OCRtHv+ekbxmz+7WCuW418w50V1KfAfDvtmgMe9FVGn4C4LF+jgPA\nF9D9+thEd83jPQC2o1vz8GkA3wEwsUXj+K8AHgHwcHKy7ezDOF6P7lfYhwE8lPy7o99zEhlHX+cE\nwE0Afpzs71EAv5O0X9L50M97hUgpaV/wEyK1yPmFSClyfiFSipxfiJQi5xcipcj5hUgpcn4hUsr/\nBenCT1Q9NgoeAAAAAElFTkSuQmCC\n",
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ }
+ ],
+ "source": [
+ "import numpy as np\n",
+ "# Change this to where you want to save the data\n",
+ "SAVE_DIR = os.path.expanduser('~/desktop/data/CIFAR/')\n",
+ "# train data\n",
+ "cifar_train = datasets.CIFAR10(SAVE_DIR, train=True, download=True)\n",
+ "x_train_cifar, y_train_cifar = cifar_train.train_data, np.array(cifar_train.train_labels)\n",
+ "# test data\n",
+ "cifar_test = datasets.CIFAR10(SAVE_DIR, train=False, download=True)\n",
+ "x_test_cifar, y_test_cifar = cifar_test.test_data, np.array(cifar_test.test_labels)\n",
+ "\n",
+ "print('Training Data Size: ' ,x_train_cifar.shape, '-', y_train_cifar.shape)\n",
+ "print('Testing Data Size: ' ,x_test_cifar.shape, '-', y_test_cifar.shape)\n",
+ "\n",
+ "plt.imshow(x_train_cifar[0], cmap='gray')\n",
+ "plt.title('Class: %i' % y_train_cifar[0])\n",
+ "plt.show()"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "### 0c. A CSV File of arbitrary Filepaths to 2D arrays\n",
+ "\n",
+ "For the third dataset, I will create some random 2D arrays and save them to disk without any real order. I will then write the file-paths to each of these images to a CSV file and create a dataset from that CSV file. This is a common feature request in the pytorch community, I think because many Kaggle competitions and the like provide input data in this format.\n",
+ "\n",
+ "Here, I will just generate a random string for each of the file names to show just how arbitrary this is."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 4,
+ "metadata": {
+ "collapsed": false
+ },
+ "outputs": [
+ {
+ "data": {
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAAAP8AAAD8CAYAAAC4nHJkAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAACuhJREFUeJzt3V/I3YV9x/H3ZxojVQvJuoXUhllBBmWwCA/ZoDI6nK3z\nRr0pzUXJQJZedFKhFxN3MS9lVEuvhDil6XCWgYpeyEoMBSkMMUqm0XSNk5Qmi0mLF9rB/PvdxfML\nPGbJ85w858/vyb7vFxzO7/zO7zy/Lz/yzjm/cw6cVBWS+vmdsQeQNA7jl5oyfqkp45eaMn6pKeOX\nmjJ+qSnjl5oyfqmpy6d5cJJbgR8AlwH/WFUPrLb9FdlcV3LVNLuUtIr/4b/5oN7PJNtmvV/vTXIZ\n8AvgFuAE8BKwu6reuNBjPput9Se5eV37k7S2F+sg79Y7E8U/zcv+XcCbVfVWVX0A/Bi4fYq/J2mB\npon/WuBXK26fGNZJugRMdc4/iSR7gb0AV/KZee9O0oSmeeY/CexYcfsLw7pPqap9VbVUVUub2DzF\n7iTN0jTxvwTckOSLSa4AvgE8O5uxJM3bul/2V9VHSf4G+AnLH/U9VlWvz2wySXM11Tl/VT0HPDej\nWSQtkN/wk5oyfqkp45eaMn6pKeOXmjJ+qSnjl5oyfqkp45eaMn6pKeOXmjJ+qSnjl5oyfqkp45ea\nMn6pKeOXmjJ+qSnjl5oyfqkp45eaMn6pKeOXmjJ+qSnjl5oyfqkp45eamuq3+i51P/mvw2OPoP8H\nvvb5nWOPsC5TxZ/kOPAe8DHwUVUtzWIoSfM3i2f+P6+q38zg70haIM/5paamjb+A55O8nGTv+TZI\nsjfJoSSHPuT9KXcnaVamfdl/U1WdTPL7wIEkP6+qF1ZuUFX7gH0An83WmnJ/kmZkqmf+qjo5XJ8B\nngZ2zWIoSfO37viTXJXkmrPLwFeBI7MaTNJ8TfOyfxvwdJKzf+efq+pfZzKVpLlbd/xV9RbwxzOc\nRdIC+VGf1JTxS00Zv9SU8UtNGb/UlPFLTRm/1JTxS00Zv9SU8UtNGb/UlPFLTRm/1JTxS00Zv9SU\n8UtNGb/UlPFLTRm/1JTxS00Zv9SU8UtNGb/UlPFLTRm/1JTxS00Zv9TUmvEneSzJmSRHVqzbmuRA\nkmPD9Zb5jilp1iZ55v8hcOs56+4FDlbVDcDB4bakS8ia8VfVC8A756y+Hdg/LO8H7pjxXJLmbL0/\n0b2tqk4Ny28D2y60YZK9wF6AK/nMOncnadamfsOvqgqoVe7fV1VLVbW0ic3T7k7SjKw3/tNJtgMM\n12dmN5KkRVhv/M8Ce4blPcAzsxlH0qJM8lHfE8C/AX+Y5ESSu4AHgFuSHAP+Yrgt6RKy5ht+VbX7\nAnfdPONZJC2Q3/CTmjJ+qSnjl5oyfqkp45eaMn6pKeOXmjJ+qSnjl5oyfqkp45eaMn6pKeOXmjJ+\nqSnjl5oyfqkp45eaMn6pKeOXmjJ+qSnjl5oyfqkp45eaMn6pKeOXmjJ+qSnjl5qa5Ic6H0tyJsmR\nFevuT3IyyeHhctt8x5Q0a5M88/8QuPU8679fVTuHy3OzHUvSvK0Zf1W9ALyzgFkkLdA05/x3J3l1\nOC3YMrOJJC3EeuN/GLge2AmcAh680IZJ9iY5lOTQh7y/zt1JmrV1xV9Vp6vq46r6BHgE2LXKtvuq\naqmqljaxeb1zSpqxdcWfZPuKm3cCRy60raSN6fK1NkjyBPAV4HNJTgB/D3wlyU6ggOPAt+Y4o6Q5\nWDP+qtp9ntWPzmEWSQvkN/ykpoxfasr4paaMX2rK+KWmjF9qyvilpoxfasr4paaMX2rK+KWmjF9q\nyvilpoxfasr4paaMX2rK+KWmjF9qyvilpoxfasr4paaMX2rK+KWmjF9qyvilpoxfasr4pabWjD/J\njiQ/TfJGkteTfGdYvzXJgSTHhust8x9X0qxM8sz/EfDdqvoS8KfAt5N8CbgXOFhVNwAHh9uSLhFr\nxl9Vp6rqlWH5PeAocC1wO7B/2Gw/cMe8hpQ0exd1zp/kOuBG4EVgW1WdGu56G9g208kkzdXE8Se5\nGngSuKeq3l15X1UVUBd43N4kh5Ic+pD3pxpW0uxMFH+STSyH/3hVPTWsPp1k+3D/duDM+R5bVfuq\naqmqljaxeRYzS5qBSd7tD/AocLSqHlpx17PAnmF5D/DM7MeTNC+XT7DNl4FvAq8lOTysuw94APiX\nJHcBvwS+Pp8RJc3DmvFX1c+AXODum2c7jqRF8Rt+UlPGLzVl/FJTxi81ZfxSU8YvNWX8UlPGLzVl\n/FJTxi81ZfxSU8YvNWX8UlPGLzVl/FJTxi81ZfxSU8YvNWX8UlPGLzVl/FJTxi81ZfxSU8YvNWX8\nUlPGLzVl/FJTxi81teYPdSbZAfwI2AYUsK+qfpDkfuCvgV8Pm95XVc/Na9B5+Nrnd449gjSaSX6i\n+yPgu1X1SpJrgJeTHBju+35VfW9+40mal0l+ovsUcGpYfi/JUeDaeQ8mab4u6pw/yXXAjcCLw6q7\nk7ya5LEkWy7wmL1JDiU59CHvTzWspNmZOP4kVwNPAvdU1bvAw8D1wE6WXxk8eL7HVdW+qlqqqqVN\nbJ7ByJJmYaL4k2xiOfzHq+opgKo6XVUfV9UnwCPArvmNKWnW1ow/SYBHgaNV9dCK9dtXbHYncGT2\n40mal0ne7f8y8E3gtSSHh3X3AbuT7GT547/jwLfmMqGkuZjk3f6fATnPXZfUZ/qSPs1v+ElNGb/U\nlPFLTRm/1JTxS00Zv9SU8UtNGb/UlPFLTRm/1JTxS00Zv9SU8UtNGb/UlPFLTRm/1JTxS00Zv9SU\n8UtNGb/UlPFLTRm/1JTxS00Zv9SU8UtNGb/UlPFLTaWqFrez5NfAL1es+hzwm4UNsDbnWd1Gmwc2\n3kxjz/MHVfV7k2y40Pj/z86TQ1W1NNoA53Ce1W20eWDjzbTR5lmNL/ulpoxfamrs+PeNvP9zOc/q\nNto8sPFm2mjzXNCo5/ySxjP2M7+kkYwSf5Jbk/xHkjeT3DvGDOfMczzJa0kOJzk00gyPJTmT5MiK\ndVuTHEhybLjeMvI89yc5ORynw0luW+A8O5L8NMkbSV5P8p1h/SjHaJV5RjtGF2vhL/uTXAb8ArgF\nOAG8BOyuqjcWOsinZzoOLFXVaJ/PJvkz4LfAj6rqj4Z1/wC8U1UPDP9Jbqmqvx1xnvuB31bV9xYx\nwznzbAe2V9UrSa4BXgbuAP6KEY7RKvN8nZGO0cUa45l/F/BmVb1VVR8APwZuH2GODaWqXgDeOWf1\n7cD+YXk/y/+4xpxnNFV1qqpeGZbfA44C1zLSMVplnkvGGPFfC/xqxe0TjH/QCng+yctJ9o48y0rb\nqurUsPw2sG3MYQZ3J3l1OC1Y2GnISkmuA24EXmQDHKNz5oENcIwm4Rt+y26qqp3AXwLfHl7ybii1\nfH429kczDwPXAzuBU8CDix4gydXAk8A9VfXuyvvGOEbnmWf0YzSpMeI/CexYcfsLw7rRVNXJ4foM\n8DTLpyYbwenh3PLsOeaZMYepqtNV9XFVfQI8woKPU5JNLIf2eFU9Nawe7Ridb56xj9HFGCP+l4Ab\nknwxyRXAN4BnR5gDgCRXDW/YkOQq4KvAkdUftTDPAnuG5T3AMyPOcjaus+5kgccpSYBHgaNV9dCK\nu0Y5RheaZ8xjdNGqauEX4DaW3/H/T+DvxphhxSzXA/8+XF4fax7gCZZfJn7I8vsgdwG/CxwEjgHP\nA1tHnuefgNeAV1mObvsC57mJ5Zf0rwKHh8ttYx2jVeYZ7Rhd7MVv+ElN+Yaf1JTxS00Zv9SU8UtN\nGb/UlPFLTRm/1JTxS039L+9xXbCVcjiuAAAAAElFTkSuQmCC\n",
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ },
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ " files labels\n",
+ "0 /Users/ncullen/desktop/data/CSV/YZL2M7.npy 0\n",
+ "1 /Users/ncullen/desktop/data/CSV/XHAAGV.npy 1\n",
+ "2 /Users/ncullen/desktop/data/CSV/0ZZRST.npy 2\n",
+ "3 /Users/ncullen/desktop/data/CSV/DWKH5L.npy 3\n",
+ "4 /Users/ncullen/desktop/data/CSV/0IHOQ6.npy 4\n"
+ ]
+ }
+ ],
+ "source": [
+ "import numpy as np\n",
+ "import pandas as pd\n",
+ "import os\n",
+ "import random\n",
+ "import string\n",
+ "# create data\n",
+ "X = np.zeros((10,1,30,30))\n",
+ "for i in range(10):\n",
+ " X[i,:,5:25,5:25] = i+1\n",
+ "Y = [i for i in range(10)]\n",
+ "\n",
+ "plt.imshow(X[0,0,:,:])\n",
+ "plt.show()\n",
+ "\n",
+ "# save to file\n",
+ "SAVE_DIR = os.path.expanduser('~/desktop/data/CSV/')\n",
+ "if not os.path.exists(SAVE_DIR):\n",
+ " os.mkdir(SAVE_DIR)\n",
+ "else:\n",
+ " import shutil\n",
+ " shutil.rmtree(SAVE_DIR)\n",
+ " os.mkdir(SAVE_DIR)\n",
+ "paths = []\n",
+ "for x in X:\n",
+ " \n",
+ " file_path = os.path.join(SAVE_DIR,''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(6))\n",
+ ")\n",
+ " #print(file_path+'.npy')\n",
+ " np.save('%s.npy' % file_path, x)\n",
+ " paths.append(file_path+'.npy')\n",
+ "\n",
+ "# create data frame from file paths and labels\n",
+ "df = pd.DataFrame(data={'files':paths, 'labels':Y})\n",
+ "print(df.head())\n",
+ "\n",
+ "# save data frame as CSV file\n",
+ "df.to_csv(os.path.join(SAVE_DIR, '_DATA.csv'), index=False)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "### 0d. 3D Brain Images\n",
+ "\n",
+ "Finally, I'll grab a standard structural MRI scan and the brain binary mask from the *nilearn* package to show how the processing you can do with 3D images using *pytorch* and *torchsample*. The MRI scan will include the skull and head, and the mask will be for just the brain. This is a common task in processing neuroimages -- to segment just the brain from the head.\n",
+ "\n",
+ "This data will also be useful to show the processing step involved when **BOTH** the input and target tensors are images."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 5,
+ "metadata": {
+ "collapsed": false
+ },
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Image Sizes: (197, 233, 189) , (197, 233, 189)\n"
+ ]
+ },
+ {
+ "data": {
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXoAAACBCAYAAADOgnH+AAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzsvVmMXed1LvjtM9aZ53Oq6tQ8cRZJzZRlS7KjjowEduIE\nTtIw0g4uEiTpToI8Jf3UQAMXuMhDgCTdCfo+XHR3gNuRnMCIMzhxHEmWGYkSTcqcVZyKNdepOvM8\n734ofov/2dynqiiTEi3UAgp1pj39w/rX+ta31q/puo592Zd92Zd9+eyK5dO+gX3Zl33Zl315tLKv\n6PdlX/ZlXz7jsq/o92Vf9mVfPuOyr+j3ZV/2ZV8+47Kv6PdlX/ZlXz7jsq/o92Vf9mVfPuOyr+j3\n5YFE07TXNE2b1zTtpqZpf/xp38++7Mu+7C7aPo9+X/YqmqZZAVwH8CqAFQBnAfyarutXP9Ub25d9\n2ZcdZd+i35cHkWcB3NR1/bau600AfwPgq5/yPe3LvuzLLmL7tG9gX36qJAlgWXm/AuC5fj/WNE1X\nXj/C29oXMzF462ld12Of1r3sy6cr+4p+Xx6qaJr2WwB+i+8HBgZgs9lgtVphsVhE4auKX9M0dLtd\n08VA13VomiZKS32tHr8bBMnz7PU742d7+c1ev9vpvs2etV+7WCyWnuP5Wtd1dLtddDodtNttNJtN\ndLvdxR1vZl8+07Kv6PflQWQVwKjyfuTuZyK6rv9XAP8V6LXoKUYFb/ba7Pe7/VZVimbK0ahU1d/x\nt+r3VKL8nVGpGo8znt9iuR8VNbu/fs+y07Oqnxvvwfgc+7IvwD5Gvy8PJmcBzGqaNqlpmgPArwL4\nzk4HGJUfrXqjouJn/OPvjF5AP8Xf75zG/2bnU6+p/tEL4X/1z3hsv/P3+68eZ/ad8Xw7XVM9jvdn\n1v47yT6b6rMtj0zR7w+cz57out4G8L8A+FcA1wC8oev6lZ2O6Wex9lPgyrV6/u+mFHeDc8ws/H5K\n0mq13qfcjZ+ZHd/vufvBPmb3YTze2A797l9tz3730U+0bTbV/wngywAOA/g1TdMO73rgvvzUyCOB\nbpSBIzQ8TdO+s0/D++kXXdf/GcA/P8gxZjCD2XsjPGJy7b7fGaEW9fzqfVApGj/TNA02m62v8tZ1\nvUfptttt+cwMJzc+u3pOM6jHKDspaLNn5H30Wzh2EWFT3T2ObKr9+foZkUeF0e8PnH3pkZ2sVSou\nM5y5XzDWqCh3U55mFrjNZuux2DVN6/lv9Ag6nQ50XUen04HD4UCn00G320W32xXFb3weo/S7T+Nz\n7YTn9wtMm7XxHmVPbCqtN9D+1Me4zkOV3a5ttuD185A+jbjGXhf9nY69+7drJzwqRf9ANLx9+WyL\navmaTU6jgjOTfpa60aLt971Rudtstp7XDocDDoejhyFks92bHu12+z4mS7PZlM/b7XbPa6MHYGwL\n4733u29jO5m1h9nvdmrLjyvGQLvdbr8PguvHrDK7x73Cbv0+V89vhNO4eNvtdvnM2E87eWRmxsVe\n7vGTlmq1uqfffWqsGzPrQPnO+Nu+3+10HGUnqpzRcvo0O++TvHa/Qazr+kPnW6vwyG5WqfEzo1Vr\nphR3s2otFgvsdjusViscDgfsdjvsdjsGBgbg8XjgcrngcrmECupwOABAoBxCNQBEudfrddRqNdRq\nNVQqFdTrdbRaLbRaLTSbTXQ6HbRaLaGNmj2v2bhUxfj9XtrK2N57lF3ZVEahUjcGi40Kt999m4nZ\nvNQ0c+qt2fmN44yLuXF+WywWtNttObeZst+rojcbn2a65VHJXvv4USn6B6bhETdVB4qRadDvO4o6\n0H/SxjVORuME3e044/vdLLC9fGb8fqfzmQ3Ene6fn9VqtYfKtzYqMrP2NN5TP0XI8+2lTWmR06pz\nOp1wOBxwu93w+/3w+/3weDzweDxwOp1wuVzyewBi7fMatAYBiFVfq9XQaDRQqVRQqVRQLBZRLBZR\nrVbRbDbRaDTQarXk991u97773Isy7wdVmbWzcVzsUREImwrb8/RXAfyPux1kVKxGxW+2KBufaafn\n4WfsE+MzmY0N9V6MnqSmabDb7eh2u7DZbKLszeZFt9uVWA4Xh92ewfgd78FisfScz+y36jObLd4P\ncm0zeVSK/mMPHKOoncTfmP3OTMmbWfH93N9+KzTf7zR5+n1nNol3O8Z4b/2sAjPreKc2MfvMbDF7\nVGKchKql209h79RfO7nw/ZR7OBxGIBCA3++H2+2G2+2G0+mE0+mEpmliyQPbytwoqsJpNpvQdR2N\nRgONRgPVahXVahXFYhGFQgHZbLav0lcVfj9rst97s/bh92q7Pkhf6rre1jSNbCorgP+m78Km4vX5\nX32tGm3GZ+j3PMaFYS/PbbwHNb6ijgMuPoyx0ENjHEaFcniP6kJhNt9VY2Wn++R3vFa/Y9Q+3MsY\n6LfY9ZNHoug/7sAxk50Uu9lAM7P29qJs+1m/e21IM8Wpiq7rPZivam0AuM/FV69tZgH/JGIcTLsN\n2J9EjFaf+nk/JW92b+px/I3xOoRmqOC9Xi8ikQhisRhCoRB8Ph/8fj+cTicGBgZgtVphs9mg67pY\n3eVyWc7ZbrdFeap95/V65Xifz4d2u41Op4N6vY5Go4FisYhSqYRcLoetrS1kMhmUy2VR+IR2+lnl\n/T7bSQEY27Zfu/cT/WOwqdRrAeZ5Emb3ZrSQzby83Yw5LiiE4RwOB3w+H7xeL1wuF3w+H9xuNxwO\nh1Bjef5ut4tqtYpGo4F6vY5isSgLNvtJjb8Y4Z1+49FsHqnPZLTo+ylrs/c/6fx/ZBj9Tzpw+lmh\n6n++7jc4jL9T3Se+3knZ97sf/lY9xng/xs9pDaoDnYMOMLdo1Ourz2DWZsb3ZsrT2IaPSrmb3V+/\nQW1scxWPNVMAZuclTDMwMICBgQH4/X4kEgnE43EEg0GEw2Gx4NXgXKvVQqFQEGucWDsnfavVknug\nh8BFwuPxyDk9Ho9cW9d1hEIhOWcikUA+n8fm5iZSqRSKxSLq9Trq9fqOQUG1rYztYKSS9ltAH3Xf\n9lPmu4nZ/Zvds9lCxr7ggh4KhQSKGxwchM/nkwV9YGAALpdLjCxCb+12W/q31WqhXC6jVCqhUqkg\nk8mgUqmgVquhUCigXC6jXC5LIJ6enpni79d/ZuN3NyqxUcw82wdp88emBMJOK7hqAZgpjb0+sDrA\nzM5nFgPg50ZRO5ruOBW32WA1KvJ+ixaPUY83nquf9IMz9uJq/qQWQz9Rr2Vm0ajXNVNaO52XVjyV\nr9/vx9DQEAYHBxGLxRCJRBAIBGC32yUoR/gknU4jnU6jUCigUqmgUCj0TP5mswlgG6IhpKMGcp1O\nJwKBADweDwKBAKLRKKLRKKxWqywC7XYbsVgMhUIBsVgM8XgcGxsbWF9fF4XfaDR6lMdubWjsJ7Ox\nSS9ktzZ81NJvrO1kGavHqXOE3pfD4UA4HEYwGEQwGMTw8LC8D4VCPYF19pvT6RQPSjUmOF+LxSKa\nzaZY97VaDaVSCVtbWygUCshkMiiVSiiVSigWixJkN857VcwWaePne223h+F9PzaKHuiPJe+kmNTv\ndxpYxgYy8qep2BmpZ2CO36l4IzuWNDq6/XytDgIOhN0UOi1+o2LvZy0Yn223dt0LFPUolIKxvVWP\nRxUjbq9Kv/4jc8bv9yMej4uSHxwcRCAQ6KHXtdttFAoFpNNprK6uCrxSq9VE0RKqUfF5r9crSp+f\n0+UvFAqwWq0CFfj9fiSTSUSjUQQCgZ6FIRQKIRaLIRwOIxwOY319HZubm6LwqTz6Pa/anqpVb/xO\nDQCq7f9JiNm9GseemWVqdh7ORcJxg4ODspiOj4/Layp3zuV2u41qtYpyuSyemarg1SxnMrGcTieC\nwSCsViuGh4eFldNsNsXaL5VKyOfzWF1dRS6XQ7FYRCqVQqVSEQPBDOYx82DNZKd5/KDWv5k8Vop+\nNzFapf1cW7MBB9yz2FUFzoFks9ngcrnE+nO5XD0TVT0HO7PVaokbSPyVLjkHCt09Lg5mA6BfB+/F\neu93HjMlsJv7+CgsevXaZvdE5cZJuFOsQr1vh8MBj8eDeDyORCKBZDKJ4eFhJBIJsfw48ba2trC6\nuopUKoXNzU1Wc0Sn04HNZoPX64XNZpOJr/LoeU8qj57USdIsy+UyarUa0uk0VlZWeu4pGo3C4XDI\nwkTLMxAIwOfzyT1VKhW5r37PrSp5tZ3UflRhoE/amt9t/JiNNTN4jm3lcrkQDAaRSCQwNjaGRCKB\nwcFBjIyMwOfziWdVLBaxtbWF9fV15PN5VCoVpFKpHoXf6XR6jDufzweXywWPx4NoNCrwTzAYFK+A\nnmIsFhO4bWxsTKz8xcVF5HI5ZDIZpNNpWbDVoK/6jP3mZD99Zbbg72QE7CSPpaLvZ9kbv1NlJ1xS\ntco5gZ1OJ+x2OwKBALxeLwKBAMLhMLxeL5xOpwRyOPnV6wCQDqWS51+1WhV+dS6Xk9eNRkMWhW63\ni1arBcDc9Tbeu7Gj+7WDmYu313M+KqWgDngVUjC7rgp79VuYLBYLBgYGRAGMjIxgZGQEo6Oj8Hq9\nspg3m00UCgUsLy9jZWUF2WwWjUZDzk82DscC6ZVUMPQUnE6nBFjJnaeSV/u1Wq3K4r68vIzNzU2s\nrq7KvakW/tjYWA8DyOVyIZVKIZ/Po16v9xgEFLVdjMwao9dn9D4/DTFTbntRYBaLRebf1NQUYrEY\nBgcHMTc3h0QiIZ5TrVbD+vo6Ll++jI2NDWnvSqXSA9NwnlK4+KvC+I7X68XY2BiGhoYQDocF8w8G\ng7JI+3w+tFotFItFJJNJpNNppFIp3Lx5E/l8Xix/1cLv59UYF+S9Qpf9Fo6d5LFT9HuBb/opczNR\nrXi6ax6PB16vFz6fD6OjowiHw2I9cAEgJmgW/FQpcip0Q2VPlzGfz6PZbKJYLCKdTqNWq4llQBzY\njF9tBteYKfjdICF1MqlKoJ+1/Cihm51ceOOz9XPnOSGj0ShisRgmJiYwOTmJWCwm/dZut5HP57Gy\nsoLbt28jnU6L5U6qHd11l8sl40KF41jigMFbq9WKer2OSqXSE0Alo4PWIi1HTvCNjQ1sbW1heXkZ\nU1NTGBkZkZiB1+vF1NQUfD5fT6IWLUO1pIKxjXZSDEbl8UlZ9f0WJuO9Gb/n5+wTtsvw8DAOHDiA\nkZERJBIJeDwelMtlLCws4PTp01hcXEQ6nZaFttvtwu12Y2RkBIODgwiFQgLzNBoNpFIpDA8PIxAI\nQNd1/PjHP0Y+nxcFv7m5icXFRVy4cAHvv/++eAvUEZFIROA3Wv5jY2MYGxtDs9nE4cOHkUqlROlv\nbW1JIFdlWBnn415gVbPfPOgC/tgpejPh4DUWo1L/G4UTglYgO87lcmFoaAiJRAKhUAjJZBJer1cs\neBWaUTF3ADL5eB/EBIklulyunqSaaDQqbI5oNIpyuYx0Oi3YMBUHLUTV2ug3cXbCsfu1g9oelJ0U\n/sMUMy9LHbRmz2tcoKjkvV6vWFqTk5OYmppCMBiEx+NBp9NBuVzG5uYmbt68ibW1NdTrdQwMDMgC\n0O12hR0zMDBw33hif9MiGxgYQDgcRqFQEHhF5UOzHbl42Gw21Ot1VKtV8TwAIJVKoVAoIJVKYWZm\nBvF4HG63Gz6fTyxYWvsWiwXZbBblclnGm9mCrd6zyvnere0flezmYZqNV7UNPR4PgsEgRkZGcOTI\nESSTSczNzSEQCKBareKDDz7AlStXMD8/j83NTei6jmazCb/fj4mJCRw4cAAvvviisGs2NzdlbJAz\nPzo6KjGYo0eP4sqVKzh+/Dii0agwsBqNBi5duoQf/OAHAqvlcjm4XC54vV6Js/j9fgmwRyIRjIyM\nIBqNIpFIwOVyYX19HWtra1hfXxcqJwBTb2036afkfyqhm50G5U7f9cO4qORJxYpEIojH44jH45ia\nmkIkEkEwGBSrDIC45+VyGfl8Xji1dNFJt1OvSUXidrsxMDAAn88n1iVx/nA4jGQyiU6nIyyPUqkk\nwbhKpYJsNivYPuEdAPcxMnbD9fq1yW6Y/qNU9CpurAoXLWPylCpUorSiRkZGMDs7i7GxMYRCIVit\nVoFWbt26hdu3b6NUKgmGz77z+XwIhULweDzSHlzELRYLfD4fxsbGEIvFUKlUcPnyZaTTabGydV3H\ns88+C4/Hg62tLSwtLaFYLIo1z8AwAFQqFeRyOZRKJTidTlmI7ty5g0wmg6mpKUxPTwsFMBaLibdC\nvr7FYpHz77QIqng95dPC6XcaT2aLlKZtJ6n5/X7Mzs5ienoao6OjOHLkCHw+HzY3N3H27FmcP38e\n8/PzsjDXajUcOXIEn/vc55BMJtFutyUuQqiNGDwDqWNjY5iYmECj0cDS0hKq1Sp8Ph8cDgc0TUMk\nEsHQ0BBsNhump6cxNjaGUqmEoaEhpNNp3L59G3fu3MHCwgKWlpaExUUK58TEBEZHRzE4OIhTp06J\ncXDlyhWsrKxgfX1dxqIxVreT4leVu7E/f6qhG6PspOD74dtU8rSyA4EApqamhJExMjIiwZZmsymK\nl1huoVBAPp/vcQuN1hMVlGrdWywWCeRw9WcCh9PpBACEw2GEQiG0Wi0EAgGhbHm9XlkELBaL0PzM\nrDkzrFP93Exp7oSPPkrZqf+M96O+57FU8tFoVJQ8MW8q+a2tLVy9ehXLy9t19FwuF1qtllhis7Oz\nOHjwoFAdU6kUSqUSms0mrFYr3G43Jicn8fTTT8PpdELXdczNzeGNN96Ax+NBoVDA17/+dQQCAWia\nhkajAbvdjoWFBVSrVYF6fD4fEokEAoEAtra28NFHH+HOnTtIp9MIBAJwuVyoVCq4dOkS8vk8Dh8+\nLLBTOBwWr8Bms2FlZQUA7lP2e2m3vbT/wxbjQt4Pf+Z/KkkylY4fP46xsTEkk0k4HA6srq7iH/7h\nH3D9+nVkMhkMDAygXC7D7XbjN3/zN3uCqDx/pVIRb4pYeq1WE4/d7XZjY2MDtVoN2WwWwLa3RUps\nMBhEp9OR7OYTJ05I38zMzMBqteIf//EfsbGxgVwuh1wuh3Q6jW63i7W1Nfh8PiSTSZw8eRJDQ0OY\nmJgQI9NmswmcU6lU+rJzjO1mtohTfiotelU4EIywgxkeafxMxeOdTicikQiSySRmZmYQi8UQi8Vk\nwtZqNWQyGaysrGBtbQ0rKyvCfFCZOcye5HsAgvtaLJaeIlasJme1WrGysiKB3ng8Dp/PB4/HIxbg\n0NCQwDuhUAj5fB65XK7H3avVaj11VoztZOYe7+YdmU069fiHKcbFx5iVa2at8nsq+UgkgtHRUczO\nzmJychJerxfdbheVSgWrq6u4dOkSisWilDLI5/PodDqYnZ3FM888g6eeegrBYFCud+vWLZw+fVom\nPy1Kwnu6rsPv9+PQoUNYXFzEoUOHJDDLBX52dlbiL+zzU6dOYXp6Wp7txRdfxLlz53D27FncunUL\nVqsVwWAQuq5jdXUVpVIJx44dQzKZxMDAAEKhkDC8VCVeLBbvUwo7xVmMwdpPckE3u66xX9m3wWAQ\nExMTmJ6eFjxe13VcvXoVZ86cwdmzZwW2azabeOaZZ/C1r31NDDRNu5c81Ww2sbq6KpBpKBSS+kOT\nk5MIBAIAIIFzj8cDTdMEHiPzKpPJYGtrC8ViUYwAemT1eh1f+tKX8O1vfxsulwvxeBzZbFaSrBiM\nTafT8lyjo6MIhULQNA2rq6tYW1uTxYYMLrP2Metbo/xUW/Q7PdhOyl7F+4jBxmIxHD16FGNjYxgf\nHxcrsFwuY3V1FXfu3MGVK1fQarV6aHWadq8uhkqNA+41Lt9zIVBpggB6OLgrKyu4du0anE6nZGvG\nYjHh7NID0HUd1WoVm5ubyGazyGazWFtbE66uWer8g8IvZh6A+lzK7+4AKAHoAGjruv60pmlhAK8D\nmABwB8DXdV3P7XY99bVxUJuJSoFTlfzY2Jjw2pvNJu7cuYPLly+jXq/D4XAIt314eBhHjx7F9PQ0\nTpw40UObbLVaGB0dRSwWQ7vdRjAYlMC8w+FAIBBAq9VCvV7HzMwMrl69ipmZGQCAx+OB3W5Hp9OB\nz+dDLBaDy+VCPp+HzWYTDJi/CQQC+PznPw+fz4fx8XFcvnwZa2trCAQCGBgYQLFYxLlz59BoNMT6\nI/ND7RNd11Eqle6rk6OKOg+MivZRLOD9hF5uv7FFamwkEsHx48dx6NAhHDhwAPF4HOl0Gu+++y6+\n+93vol6vyzGvvfaaJMGFQiEMDQ2hXC4jm82iWq2KEs/lcqhWq/B6vQC25+DAwECP8afWOaKSZ9IV\naZqXL1/G5z73OfEk/X4/qtUqFhcX0Wg0kEwmceLECWxubuLy5csYHx9HrVYTumWj0cCNGzcwPz+P\n0dFRHDx4EMeOHcPBgwdx+/ZtXLt2DSsrKwLbEp41LvBG2YsH108eO0VvJmbKHbj/YdVU+HA4jLGx\nMUxPTwser2ka6vU6Ll68iPn5eaTTaQDbODut5larJanVxNp5XibOcDDTylYTJdhpVEakd9GNvHXr\nFmw2m2RxcpB5vV60Wi2pqhiJRJBOp2G325HNZpHP5yWDT+XpmlnoO03sftBNn0Hziq7raeX9HwP4\nd13X/4u2vT3kHwP4o732o9Fq6QdHWCwWuN1uhEIhDA8PY2ZmBhMTE/D5fFIk7ObNm7h27Ro0TYPb\n7RaG04svvojJyUnU63VEo1HpQ9amqdVqaLfb8Pv9aDQawp92OBzCuuL9ud3unv+sWR8MBtFoNBAM\nBgVmczqdPVRNWoAWiwXRaBTpdBpf+tKXsLCwgPfff18WmWaziQ8//FAWlna7jUAggImJiR7efqfT\nQaVS6bHszTwzs1jIJyn94EN6cw6HA6FQCKOjozh8+DAOHjyISCSCpaUlvP322zh//rwYXiMjI/j1\nX/91uN1uyWbmnBoeHoamaahWq7BarQgEAggGg6hWq0JtpEdcrVbFY4pGo5INS0U/MjIihtnt27dx\n5MgRBAIBDA8PY2BgAM1mE4uLi6jVarh27RqeeeYZRCIR6cMnn3wSDocDqVQKFy5cwHvvvQe32w2X\ny4W1tTXk83kAEANE13W43W7cuXOnh4yxW999XCUPPOaKvp91olrvqnLgRAyHw5iYmMDMzAwSiYRk\nvaXTaczPz+P9999HrVYTV52Dh2nVZHj4fD5omoZUKoVarQafz4cnnngCdrsd77zzDrrdLux2O8Lh\nMLrdrih3Sr1eFwolV+5Op4NmsynVDZeWlhAOhzE3N4fBwUGBhhjZZwQ/k8lI6jxxYbU9zPD6vQR5\ndrLqTeSrAF6++/r/AfA2dlH0ZoOS1zFLkKKSD4fDGBoawvT0NCYmJuB2u4Weev36dYFCHA4H1tfX\nEQwG8Y1vfANPPvkk7HY7MpmMYPZcxGlt67oOj8cjilkthqXeH5PnjN4cPTCfz4dGoyGljnVdl/Or\ntNlCoYBnn30WkUgEL774Io4cOYK/+7u/QyqVwtDQEJrNJubn59FutzE3NyeLy8TExH0bZRgtQLZZ\nPy/pUXlq/fqWYkyAIzvJ5/NhaGgIU1NTOHjwoFjS//Iv/yILHrBtfP3Wb/2WLKzlchmZTAbFYlHa\nf2xsDFtbW+J5kxCh67oYX51OB9VqVXImXC4XRkdHoWlaTzYySRg2m03q5zDjNpvNytgLhUIIhUKw\nWCyoVqs4fPgwBgcHAQCBQACTk5NIJBL44Q9/iGw2i1gshlqthjNnzmBubg4zMzM4cuSIFMYj/Mf8\njL3Ix4HiHhtFb7z5nTBm9T8Vo8PhgMvlQigUwokTJyTZIhQKweFw4IMPPsC5c+eQzWYlUs7JMzQ0\nJLxYm82Gzc1NOBwODA0NiYtdKpXwyiuvCAb73nvvYXx8HC+++CIymYxgq++++y7K5TKGhoYwOzsL\nTdNQKBRw7do1CbbSKmUCzuLiIhYXFzEwMIChoSEcPnwY0WgUbrcbyWQSIyMjKBQKmJ+fx9bWFtbW\n1iTjr9/gMMO+je938QJ0AN/XNK0D4P/St/cPSOi6vn73+w0AiT10bV8owWih0GoOBAI9PHkq0Vqt\nhitXruD27duiUFdXVzE9PY2XX34Zp06dArBd2ySRSEiQLB6PCxPDZrOh0+mIUicrhgweWlhGKETX\ntzOhmUUdCoWE0aFa8Zq2naxVKpUAAJubm0gmk0gkEuIdnjp1Ct1uF2+//TZu3bqFaDQKTdMwPz+P\nVquFI0eOiBEyOTkpsB3vjWn9xr4zwpg7LPYPxVPbBfrrMcJsNhs8Hg9mZmZw9OhRHDlyBMFgELdu\n3cK///u/48yZM0JP/sY3voETJ07A7XZLfGt+fl6C1CwhkUgkZKEkNl+v16UfAYg3wPujccAAOi3/\nRqOBjY0N8RwTiYRQaZlo12g0cOjQIYGmqtWqEDvGxsZQq9VQLpfx0ksv4dSpU7h27Rpef/11tFot\n2Gw2XLx4EdevX8dLL72Eubk5TExMwOVyYWFhAWtra8hkMj31cx6mPDaKXpW9KH31M3ae1+vF6Oio\nJNGEw2HYbDYsLi7i3XffRT6fFwVPS2x2dlYolocOHZIg0Ycffig1SuhK+v1+YcpYLBa8/PLL8Hq9\ncrzNZsORI0dw48YNzMzMiPIgNLO5uSmVDInBEzeu1WpC6SwUCpiYmMD4+Dj8fr9gwgcPHhSWQSaT\nkep63EDBrI3MIB6j9BlUL+q6vqppWhzAv2ma9pHhGF3TtPsO1Hp3DrvvOkbPTP3MbrdL3fiRkRHM\nzMwI3lqtVnHt2jXcuHFDMPBCoYBnnnkGAETJRyIRodW1220sLi5iZWUFgUAAnU5H2C0cB16vV2i2\nAMSCBtBT2MzpdMp3TG6ipVmv1yX5qtvtIp/Po1qtolAoQNd1jI+PS1VFEgBOnTqFixcvIhwO4/z5\n8wgEArBYLLhx4wYsFguOHTsmyTwzMzMSnG82mwJTmS3YZv25BwvwgT01o+xkmJEY4fP5MDk5iYmJ\nCcTjcaRSKbzzzju4fPkynE4narUaZmZm8NRTTwntUdd1IUO43W6k02m0Wi3Y7XYUCgUJkuu6jlgs\nJn1ID58u+bD1AAAgAElEQVSvOW9UlpzqGVksFilSF4lExCvgb61WKyqVCiKRiBxLD445OJqmicGX\nyWRw8OBBnDp1CvPz81hfX4fX60W9XsePfvQjdDodTE9P4/jx43IMs+qBe7E+Yzxtrx67UR4rRa9a\nI+pnxu/U/1Ty0WgU09PTmJubQzKZlJTz73//+zh//rwodmbQPfnkkzhx4gTC4TDW1tZw5coVybrU\nNA2bm5vY2toSGl44HIbH44HD4UCpVMKhQ4ekfAIVCwM7tPJUXi+VGABx7RcWFnDjxg2kUimsr69D\n07YZE6lUCul0GleuXMGRI0eETsj6KMFgEGtra0ilUlheXhbrXt0xxwjjUMyUrfqd8nr17v9NTdO+\nje0N31Oapg3pur6uadoQgE1jH+qGncOMA9LsnoB7aejBYBCDg4OYmZlBMBiUAX/58mVcunRJFr5S\nqYTjx4+j0+kIPs7JDUCU6tDQEG7evIlGo4FoNColhd1uN6xWq7Ci2PYsWcFrtFotlEolgY6Y4ETF\nQKXLuiisaU6FNDMzI/kVAMQjYI37RqOB48eP49KlS5Ile+nSJWiahieffBIAEAwGMTMzI2WUWSZB\nZWKZKfk+3trH9tR2W8SNQqVKyGRwcBDj4+MYGxsT+PPixYuSiDY0NIRvfvObkl9QLBalneixsx/s\ndrsoRtajCofDkhzHWAfnezgcvi/RkcJxwCRKKm3G4QiNqTVzarWajDky6citj0QiAtMODQ0B2Ob+\nb2xswO/3S35Au93GE088IZ4Dg8GkX5q18ceNwzxWil4VM6Vv/F4NvhLiSCaTkrS0tLSEs2fPol6v\nw+v1otPpwGq14ujRo3j66adlc4KJiQmsr69LmnsikUA4HEa73UYul5PO4znK5TJGRkYA3Ntyjtip\nxWKB1+uFpmlSM4UrttfrFV40mRYej0dW+5WVFfEWmOF39epVbG5uYm5uDqOjoxgYGMDg4KDsjtRo\nNGC1WlEoFO5z+YwWvfG9sT2V1x4AFl3XS3df/w8A/ncA3wHwPwH4L3f///1u/cggXD/hJGLdoWg0\niqmpKSQSCWHfXLt2DVeuXIHL5ZJ4yosvvoinn34amUwG586d61HGnHjhcBi1Wg2JRAKrq6uixF0u\nF2KxGFqtFuLxuMA41WoVuVxOILVbt26h1Wrh1q1bws0nk4MJOclksqfwHa9Rq9Xku3A43LOQkIZb\nrVbx1FNPSTllpt67XC5cuXJFKJ66riORSGBqakoKqDGAt1Pb8poG+Vie2t3vZBG3WCw9v1EXGrXW\nDhPXyFQ5cOAA7HY7zpw5g9OnTwtj6tChQ/jlX/5lSS4kO45G0cjICO7cuYPR0VEsLy/Ls9XrdUQi\nEWjavcqz9KxodZMqzfukJa+y5TifiO3Ti+h2tzcpqdfrguszqZFjx+/39xTSo/FhsVjkuX/+538e\nly5dwuuvv456vY50Oo23334b7XYbx48fx+c//3lYrVbMz89jYWFBjIx+Rpna5nuRx1LRG5RO3/e0\n5rnpwMjIiFhWGxsbeOutt1AulyVDMhgMYnJyEuFwWIojhcNhrK6uYnh4GFtbW1Lzxu/3i9JsNBqS\n/ET2QzgclsbnJgh0u/x+vwRbOLC4mQGTdAKBACqVCoaHhzE9PY1nn30WV69exfnz57G4uAhgGz9u\ntVqSaKFpGoaHh6UWusPhEMuCA19l5ahtZXyvfmai/BMAvn33vQ3Af9d1/V80TTsL4A1N0/4TgEUA\nX3/QfjVaInTNXS6XZCdyQQOA1dVVnDlzRo6tVqv44he/iNdee02s+xs3buCdd97Bz/zMz8jCTOuY\nyt7n80mwnIFUemOsVcTEuXw+j3w+j0wmI274ysqKBOtJ0SNWz2Qetd4RF4JwOCysLi4k7XYb77zz\nDvx+P55//nkpj+twOPDmm28Ky+fMmTMIBoOi+EZHR7G1tSVlNah0+nlsZvJxPbW99i8Xbi7ShE1G\nR0elPMDy8jLOnj0rbJhSqYRf+qVfEgYU4bp4PC5Kl1nRKysriEajYlRxjrG0gbq4qfWqzMY+YTje\nO+eqWsSQ7Kl8Pg+73S7z12aziXdNj9LMq2KiHBW63W7HX/7lX8p4vnDhAmw2G44dO4bp6WmJu928\nebOHTqt66g+i4CmPlaLfzYJXV2W66cFgENPT0zh48KBUNJyfn8f3vvc9rKysIBQKodFo4NixYxgd\nHUWz2UQqlcLi4iIGBweF3liv13HmzBmpScNgjMPhQKPRwNjYGNxuNwqFAgqFglicAIRNwMFGBgC5\n3Zubm6I0SKMaGBgQ/q3FYkGpVMLJkydx8uRJvPXWW7h9+zbW19cFLqrX6/jggw8QCATw3HPPSfzh\niSeeQDQaxfLyMm7fvo1sNnsf+4dtZlQCZjAPAOi6fhvAcWMf6LqeAfClh9Gn6qClNc8ArN/vF/f4\n9OnTojgzmQy++tWv4md/9mdlcW232zh8+DBOnz6NUCiEubk5AOipTlooFKQGDYOCbre7p96PqlBY\nn1zXdaRSKUxNTWFiYqKnyqWKofJa3DPWZrPB7XYLLER6bTabRS6Xw/Xr13H16lW8+OKLAuV4vV58\n5StfgdPpxN///d8jEomgVCrh9OnT+IVf+AWxHCcmJiRzU62l30/BPypPrZ+o/UrPNBQKIZFIIBKJ\nCANlYWEBAwMD4tVwER0cHBTlPTg4KN4erWOv14vFxUXJU4hEInLtdrstORPGPX+5+KjQIaEZdZtI\n4F4hRCpaZl9HIhE0Gg3pXybzqUpe0zSB2KxWqyRvkR566NAhPPPMM/jwww+hadvJfR9++KEoe7J7\n1tbWBC4yU/Y/9dCNURHxv1HJk4bHMqZMrul2u3jvvfewtrYmrnwgEMDJkyeFq3727Fl89NFHcDqd\nqFariMfjUnhsY2ND6oVzA2hy6qmoma2qDhpytev1ek/dE9a3oDtfrVaFy0v2Dy0EWv3k5brdbnz0\n0Uew2+3CIS8UCrh8+bIsbB6PRxJ1SqWSBAPNqmKqZYLN4h2PUvqxMhgYjUajsnkIYZBz585ha2sL\noVAIqVQKr7zyCl577TVZ4FkFlAHxlZUVWSRY20bTNITDYfGIqIA5memqM+DODMlutyuZqvxPL4EB\ndMJ2tFyZak+6JSEbXdclyL6xsYGVlRUhD/A8zIx+7bXXkMvl8NZbbyGRSGBrawvnzp3DSy+9BIvF\ngqGhIWxsbCCfz6NQKIgHZ+w/I4R3Vx6Jp2YUTbu3I5TL5ZI8iFgshu9///s4d+6clCoYGhrC7/7u\n72J0dBQOhwPVahXdble8JhpTbMd4PC5BVypzWuBkYvE7Yu3EwAnFqJv8MA+CnxH2UYvVEe6jF9Dt\ndhGJRDA8PCwwLQApW7K+vo5cLieeN9k4hH5OnToFv9+Ps2fPotvtolAo4PTp04jFYkIIyeVyuHHj\nhih7o9f9oPP1sVH0/W68n5VCa431azh5b9y4gYWFhZ5s1hMnTiAUCkk9mZGREalTfuDAAQQCAYyP\njyOfz0tZW6Y4Uxmoq75a3Ix/drtd8HJG4judjiTosG51t9tFNBoFAMETyahglqzdbsfU1JRYMrdv\n38bW1hZisRiazSY2NjbQarUwNzeH4eFhqci5ubkpCxAHXb/oPT/7JBS86tKq9YGIq7IqJS02l8uF\nmzdv4sKFC7Db7ZLG/uUvf1ksclromUwG1WoV4XBYIBhi4AyycpPwRqMhQTku2hSWl6bSVDn37AfV\nI9PuxmBUTJdxAWK0zHYmbEOoheyfarWKdDotVinJAl/+8pdx584dbGxswG6348KFC5ienpYMXbUO\nujFwp7Yv21x5/dA8NcpO85Z9G4vFZGG+evWqeD31eh0vvPCCwGM0dLxer2SQc7tFbu4OQOAVm80m\ncxGA4OoARLnz9zQW2V9qMJvwoWoxs79Jvx0eHhZIlKQNxoKowEmI4CYyuq5LUH5zc1POR0Py4MGD\nuHTpkgSQr127JjWdEomE7KHAypdGr/ynjnXTL8hg9l9VpBMTE5I+7XK5cOHCBbz11ltot9twOBwo\nl8t44YUXepIomJQUjUZx584d5PN5UTQnT56U7EMWqSJfmoEWp9OJmZmZHouUg4g1WKjA1Sh9qVSS\nYBHLKmiaJslQKq6eTCbhdruRSCTw3HPPoVqt4s0338R7770nk4eVL5966ikJzh47dkysW7JFaA2o\n0g/rexRKX10MqYR4fdLuQqGQVB+02+1otVqSIckyta+++qpsG0dGzNLSEpaXl1Gv18XjUuMqZMPY\nbDbE43HZ/1XX7+0hwN8S+1Yn/9bWFur1Ora2tuB2u2XC0XUnfEQhZpxIJKSPidvzWCoVq9UqrK7R\n0VGMjY0Jrh8KhfDqq6/ir//6rzEwMIBCoYDz589jfHwcdrtdNj6nZa/uYkavTW33RylGxcNxRaUW\niUQQi8UwMDAgu0CxrchwIS69vr6ObDaLI0eOYGtrS2iqjEOxpj+tbaOh0mw2xRtTE6ZUeIsKnvkU\nhJdarVZPXSsVBjp69KjsJ0HPLRKJ9Ow8p0KATO7id/TyWbOo3W5LctbAwIAsbgsLC0gkEjh58iRG\nRkZkjqvjUm33B5HHQtGrspNromK63CeUtMOtrS2cP38e6XRa+Krc2KFarWJ5eVkq3g0PD0sW3fLy\nsmTOcscfVekyexKAWGqTk5MolUrC1lAHNz8DIMlMmqbB7/fLwOAiUq/XZReqSqUCp9MpUAaVBKl0\nX/ziF5HNZnHjxg3J9Gu1WpifnwcA8WrGxsak5gvdehWv3yVQ9xP2Xn8xUzqapklqOllNXq8Xly5d\nklKwuVwOL7zwApLJpNSgabfbSKVSuH37NqrVquCpLFPBSUzFTned7jcnGy1fKhhd1wViKxQKuHHj\nBpxOJ65cuYJmsymBNZfLJeUNpqamANzDh+mVAfdqj7Mf1DIavD8qOY4xKp1kMomnnnoK7777LhwO\nB5aWlnDnzh0cO3YM7XZbNsJg+dvd2vpRiaro1JgVxzjrtNdqNbz//vuoVqtShOzpp59GNpvF+fPn\nJYs9kUhgYWEBzWZTdnWi9UyoTg24qhi7usgx14ILK3/LOcg2IuuH85ftxvnHBYvwKHCvUi0XDgBy\nDnqUxPBZqpwQbi6Xk01QSBz58Y9/LAyjCxcuCBxLD4eL+WcGutkJn1eDO+RbqxjZuXPnsLi4KLsB\nRSIRPPvsszh69KhQ8Fi6dHBwEKOjo/D5fMhms6hUKkIDU6/FwcugDBU/C2EB91xEDjAOYippWu6B\nQEAmMzP+uPEIWR3cCKXdbkuwlZtpAMDU1BRsNhsuXLgg8FIul8OtW7cAQCrlTUxMYGNjQypxqu7t\nJ6UAVFGVgHp97rTEzRtYJfD69esolUoYGBhAIBDA0aNHpb4MS0fcuXNHkk4ikQgKhQLW1taEAkkL\nnOwY9V5UqKPT6cimIltbW2i1WpI04/P5cPHiRTzxxBOSP8H6Q7FYDN1uF+Pj44LvGgt6USExs5LU\nUNZRCQQCyGQyWFhYwMWLFzFxNxDNkrncHIPZ1NevX8fhw4fl/uLxOJaWloTxo7avqgwfpahWvIp/\nE1plIcGNjQ0sLS3dR2AYGxvD5z73OVGUNKYqlYrQWBmD47mNRcDUvuTnfK1a7yqMSWiF85tjhM+g\nwj28Z/6OngJ1BI/lM7PWEpPx2u02lpaWxOtnQN/hcCCZTKLRaOD8+fPodrtSyPCFF16Q7SZZKsGs\ngu1e5bFR9Kr0C8IymMYtwpjEtLa2hrNnz0qjx2IxfOUrX8H4+Dji8Ti8Xi/8fj9u3LiBUqmETCYD\nm82GRCKB8fFx6VRmwKmThpYfgzYcNGrpYgA9HU+8ngOJ25fR0gsGg3C73VhZWUG9Xkc2m4WmaQIb\nAduWXbFYFCUPAJOTkxgbG4Pf78eVK1eQSqUQjUaFfulwOMStP3jwIG7cuCGKvl6vf+yI/U/aj0Av\ndsy+pNUXDAYl+WxlZQWLi4vCkX7qqacQj8fFi3O73QC268e4XC6MjIwIvXZ6ehr5fP4+DBbotfbY\nV5qmSXwkk8n0BGrdbjdKpRLi8bgoHFqYdM1tNhuq1aoEgNWgKN8D6Nk9bHp6WuopcUzcvHkT5XJZ\ndi3yeDwoFovSj//xH/+BbreLxcVFZLNZ2ZKQe5hms1mpEaPCZGb98Cj7WP2M9MNIJAKLxYLl5WVp\nM13fLiTm8XgwNjYmpT+YQMY+JNmA52T72u126V9eX10EqIgJo/I9+4JjgDEYY0xD1TsqNKsuDOqz\nq7+3WCyYnJwUz4P3yoRKXoN1tCwWC55++mncuXNHDL+NjQ0A23qC9Fx1L9yPI4+dot/JqicGyvoX\nHBgsVev1elEul/G1r32tZ2NfrrjHjx+XiDg3AyGTgudXObeqEgd6rUEqC3WwqUFb1mzh4IjH48LW\noPvIgCl3TwK2A3J+v1/KGzebTbkvu92OSCSCV199FT6fD2fPnkU2m5UNLe7cuSN17pmhyzrZamr/\nXtr+YYtxgJK6Slqdz+dDu90WGAWAwBO5XE4YLORiv/zyy3j//fd7rDQGQ9knKrtCLQxGZcMFmzQ4\nBlSHhoag67okYQHbReYikYgUUOP2dFz4Vc62UXlQWahjUcV0AeDll19GMpmE3++XYF4ulxNYa2tr\nC9lsFuvr6z1jOxgMYn19/b5szz6sm4cuRjiB84iLVjQa7YEcaZ2+9NJL4m21Wi1UKhWJwbjd7p55\nRnIB8fpoNCpzlgYEq8SqtEoVojEqc8J4HAtk2KhCy55QDY81evvk2jN+yK0hubg7HA6pTMpkK5Vy\nPTc3h1OnTuGf//mfAQAbGxtYW1tDIpHA6Ogorl69KrvdGdt8r/LYKPp+N25U8uRaj4yMYGBgAGfO\nnMGHH34oiRcvvPACDh8+DK/XK/gn4R265dw1XtM0xOPx+2AF1epTJwwHBScVFwVOejUARu+Dg4nW\nCD/T9e1sR13XhWbHwCStt83NTdy+fVuCe8FgEC6XC5qm4Qtf+AK8Xi/efPNNbG5uSjCHeHAikZCg\nMa0JWgX9rPpHZemrcALfW61WWbQ9Ho8wTpaWllCpVAAAs7OzUid+fX0dnU4HY2NjCIfDAIADBw7g\n1q1bePrpp4Wuql6LioTWn1qFkvfgcrkwPj6Ozc1NWK1Wsahv374tE7PdbqNYLGJqakqCip1OR0gA\nantyvDJWwMAtvTUGjQnr3bp1CwcOHEAymZTkLo5Pejyzs7NYX18XCOD48ePSZnTtaUTwHtS+fJQe\nnFlAn2UBvF4vPB6PlPRgtvfo6CgOHDgATdsu+Md4GT2jVqsldFRCKcyFYNby+Ph4T0BVharU9wyy\nsk9YA0nNe+F9k1rJMWNMnFKFMLIKVRm9e55ThatarRauXbsmbCkmZpLCTcg3l8sJo45Zu8Tx2b8P\nouwfG0UP9K9nowZFWVPa5XIhlUrh1q1bssWYz+fD5z//eYTDYeGus+AUrTUybsiKMcIvaiPyv8qz\npVWvRvfZkSqPV8X1VNhAtSyGh4cRiUSkxoXD4UAsFkMqlUKns73HaKvVEsv+lVdekV1vSqUSPB6P\nQE/ZbBZ+vx+pVAoej0c2ashms1L3hZj9J4nTqwrQqAwZiCWOWywWkUqlUK/XEQ6HZU+BwcFBaetK\npSIxksnJSfzoRz/CwsICJiYmUKlUZBw4HA5R/CoUpwZFCdUNDw8L5ZXjglRZ438WywPQgysTW+ai\nD9zDh5mhy0S2SqUCm82GhYUFSebzer1wOp2yNV00GpXxSmWYzWaFUskCe8zMVeGFTzIeY2TdAPdY\nSV6vF1arVTYIYTvOzc0hFovBbreLQk8mk3L/hLLIX6fnnk6n0Wg0hN1m9NqMjCN13HHecXexbDYr\ncb6JiQnJlmfJEjLCjAqd11I9SVV/qJ6aiu/TICRtlkFpj8cjusrr9SKTyQAA0um05PZ4vV4J9KuQ\n3IP072Ol6ClGhU+3ipsWUFHfunULKysrwkWnlUvWDZU0AKk7D0CCrlztVfeO2J7qnvWbQGo9DZWH\nS1xOxfZp8ZPWx8HjdDoxODgou9U7nU4J+nEXIyZa0UJkSYVSqQS/34/x8XGpgMkU88nJSSSTSakN\nwkVP3bmH7fuosHu1vYyQHPnwah14lh+gt0MrjwpbndjMFD1x4gSuXbuGpaUljI+PIxKJYGRkRDB1\nYqVGzJwWn2rZqxYhrTv+qWwoGg08F++NCwrdfZ6fCoMJU5lMBouLi/B4PDhx4oSwMwDIQs5z8vkT\niQQymYxsV8djaDWr409t/086JsMxzzITVPSsEcQNVviMHI+cd9VqFZcvX8bm5qYUI2NGOhPrAoGA\nWNDGOjb02pivQIVLTP75559HLBbDt771LayurmJhYUGYTW63WxT/wYMHZW8ANcZCZcs8AFX5ss9V\n/aUuCg6HA4ODg9jY2ECxWMTGxoZQt4lWlMtldDodpFIpqWdPyDCdTvd4Lj/VFj1FVbIc/PF4HJOT\nk7I70I9+9COx2BOJhLiDLA3MLLt6vQ632y0KmOVf+Vs1gKXCL7quiyVnhm8bj2WwVvUEeE0mRahK\nQY0LMLmGlCq73Y7R0VHBJ1XaYDqdRrFYlE02Go0GTp8+je9///vipVy9ehUOhwPxeBxjY2OSsGNM\nsHnUWK6xPXkdQjesOWO1WkXRdzodsfDIQx8YGBDeOhUxPYInnngC+XxeqHLMblW9L3WCEEIg84LW\nkrqox+NxSexhjEUdL5z4XEhYyoDjhC47+5z5D+TXHzp0SDwTQi8Wi0Uod9yvgOMkmUzKpuKlUkmy\neNVsXVW5qO39KMVszNAoozIul8uSg1Kr1aBp27XdWfuHtWDYR4lEAmNjYygUClhaWpJy1CwHPjIy\nIh416bVksXAxV+veqCyaVquF8fFx/MEf/IFsPUjCBTNZWRSRjCnVe2cshgZcqVTC8vKyJL4NDg6K\np0hmEceXrm+XUmaxu3g8LrTrfD4vJaoJXeXzeSED0MswFjrbqzw2it5o8amrNDuRbr6maVhaWkIu\nl5PkoMOHDwt+y4Hm8Xju2/6PHUq3G+idGJzMZord6DpxUKluGj8zegKq5c/z83wM2HAyBINBsWIZ\nZCLvnufgfqVMIHruuedw/fp13L59G5q2nYhFiiCtQW6QwuQdY7t/kq4+3XtapYwlUIlxspGBQeuf\nBcIY+AIg1hCVH2ms7EN1cqj9QguMCt/lckmZCXpPaqLbxsYGAoGAlNZQMVg+F/tU3byEfeZ2uzE7\nO4uhoaEeXjQnMY9jyjw3tyZpgIshK5zSG2Is4pO03vsJ5646x+hd848bcGvadiBS3ZSdlWg17V5O\nyvLyspSWJgOrVqthdXVVLH8WPQPu1aOnZ8W+59zlPAoGg1JfXhUaVvTeVNaOqsTT6TTS6bQUwGMm\nPJ+DdG5CiTzuwIEDSCQSSKVSQsdmzgcXeeZy0MgxwnMPKo+NojcKJw2Vgt/vRygUgsfjQbVaxc2b\nN3uSJkZGRiRJgUrSZru3DRyVsYqpq8LGU5OLjHCB0Vqg12F05/mZqgCMq7DZQgBs4/zHjx9HrVZD\nsViU3ADSzlhISdM04drzmQYHB1EqlSSQt7i4iEQigWQyiUgkgkgkgo2Njfvc2gcN7DyI8BpA7wJu\nt9t7iosxaaxer8Pj8SAej+PYsWNSOZDxBXLYmZTD+vNUxmrwi9dh6QG13TVNk42k7Xa7QGHlchnX\nrl2DzWZDrVbD9evXMTc3h5s3b6LdbuPQoUMSd/F4PND17To2Pp9Pgr6q56BuIg9AMnWNLCgqpnK5\nLEXLqtUqgsEg4vE4dH17+8NKpSLtRCXPTcuNbcz2/6SEc5aGGb0vWuuMs3Gv5vHxcdl3d2BgQNpk\ndHRU9mvI5XKyPR/jFqRqLi8vyyJHnjnnZDabRTqdRi6XkzlNWjMXBgBSuoD9o85j4F5cRx3HNA5Y\nnprGyvr6ukB1aumCXC4nwV/V+xsbG0OlUkEmk5GxzfgbKb/lclkWDZWxZezn3eSxUvRmCkHTNHg8\nHlFYuq7jxo0buHbtGtxuN5rNJubm5iQ7kFmjdL0YIDVCM8ZV3rhqU0mTDqVahGaKkdAKf8OSpiok\nwPR7FccjtMM/Tg4OHqa6k3JIK4SLGS3OWq2Gp556CqFQCOfOnZPJdOXKFYyPj2N8fFwWDrWmy6MW\ns2Adn5v1+gFI3KHdbiMUCuHo0aNSEoAuvzGYqyo3tjOvwwxFlUoJ3GNOtVotnDlzBn/xF3+BL3/5\ny/id3/kdlEol+Hw+vPDCC9jY2EAqlcJv/MZvoNVqIZFIYHBwUJSJx+PBX/3VX+G73/0ufu/3fg8v\nv/xyT0wGuAftqYqO44MKxfhMvH/meVCRHT16FKFQSLaj5POwMJ5ZOr7x9cMWdY6q1+P4V5+fix5j\nZdeuXZO6Qz6fD+VyWap7HjhwQMZ0sViUmEu5XEYqleqB3ejJsI4VPTMGwdXqr7lcThhTDGRzy06j\nt04Dr9Vq4erVq1hdXYXdbsfJkyelqBqAnqA/PVDCfIRgVLhPLVVhsVgk2Y1QDWFJXrvRaEjGvJHT\n/yDy2Ch6MwiBD8Ua8Uylnp+fl/omAPDkk09iYGAA6+vrSCaTKJVKCIVC952Pk0OtJkmlSeHCoBYu\nM7r6qtJXo//qe3YmcM+qZfBXLbikeg1U9owLUDmMjIwIt5uLBoO6qqs8PDyMZDKJbreLf/3Xf5Vi\nSplMRmrJeL1esRiM1vyjwuiNQviMCorKkXgpISdik4SzmCxHN5/tSI9LVXSq0lXZEIyb6LqOQCCA\nRCKBf/u3f8M777yD3//938epU6dQKpUQDodltzE1BuPz+fDee+/hz//8z9FoNJBIJCRfQoVs1PGm\nGg68T96fuuA6nU6p3knsmaUuut3tiok3b96UeAsVKhdDjrdyuSxwlVKLx6pp2r/BZANwTdP+VwD/\nCdsbhv++ruv/ute+7TeGVBhLbuDumCbOffPmTSkm6PV6Baoql8tCQyX8Q+iRNWJYGprnL5fLWF5e\nlsG6+ewAACAASURBVHhXJBLp8eLUmkTtdhurq6tIpVKyoQsD2qphVq1Wsbm5KfV5mK/S6XTkO2Bb\nZ3BfjK2tLamxRHq3y+WSzWIajYboG/Z/Pp/H0tJSDzmEBiYNHHX8qO28V3lsFD1FxfJotRG28fv9\nuHnzJjY2NgTOePbZZ3Hy5El0Oh2sra0JG4HYlsp1p3tHihoVJZOvjBg7gB5rW1XEtBjMAo20IGnB\nqdYar0EFpQZ0qYDU+h3qQsDfkYXDQU03tNvtIpfL4emnn8YPfvADSbnf2NjA0NAQYrEYfD4fcrmc\ncHJV+aTwXfYrA3Z0vVnXh7XgWTiMLCnitKpVo9LX1JoydKtV74mTpdVqoVgswuPx4Jvf/Cb+7M/+\nDJ1OB3/yJ3+CWCyGX/zFX8Qrr7wiCyq9gLfeegvf/va3sbW1Jff9zW9+UzJZqfCN7As1QY70S3p3\n7FeODz4rcWQAQsnk1or1el08SLUd1QWDOLYiQwBe1w0bgGuadhjArwI4AmAY29sMzum63kscN+nD\nfspG9a5Ulgqtb03b3him0+ngn/7pn+BwODA8PIxu995WjjabDdevXxfviLTbfD4veHexWAQAJBLb\nOx+m02lYLBYEg0GsrKzIfKVRxEJp5XJZ2np8fByJRKInaMrnq1QqWFpawqVLl4SyzHwLYvSNRkPo\nv+l0GvV6XaqOcqwNDQ3B5XIhm81ia2sLHo8HVqtVKtGS6svtIqvVqmwPWqvVxKLnGP44GbK7KnpN\n0/4bgJ8HsKnr+tG7n4UBvI6HaB2wcdXXDLjFYjFJkLpy5QqKxaIwK77+9a9LPflsNourV69KPXBa\nzwAESy0UCsjlcpKUs7S0hKWlJdnZyAj3WK1WnD9/Hn/7t38rNe6PHz+Ob3zjG7JBM3nUHABm+DwV\nA60ZoJeeqeL1ahE1o7JiBiwnw/j4uLihDNLpuo4nn3wSZ86cQaPRwPz8PAYHBwV+YH0VVuPjbll3\nraNZTdNuPMx+NQ5KFa8lTY2TXIW4NE2TKpRUZipeSgs6nU5jbW1NrC1axmNjY+ICs20ZqPN6vcjl\ncggEAjhw4ADm5+clsP3GG2/gjTfewK/8yq9gZmYGN2/exOuvvw4AAqu1220pcU2mj5p2z/YkWWBp\naQnr6+virbH0bTQa7WFlqJmWfG5+R+uW+yHU6/WedlTjH4Q2FAlie+NvoHcD8K8C+Btd1xsAFjRN\nu4ntXafe22v/7svjL3ux6P9vAP8HgP9X+eyPAfz7w7IOgPtdPr5mIDYQCAgUQSuBu8VzMoTDYWxt\nbQkbx2q1ihXEc6lWeb1eRyaTkYJjrFDH71dWVnDmzBl873vfk3tiqQXSGEmbVIO8tAK5GgP34AQG\nYtSFjMFRKiPVClLhGV6fipGLWKlUEkuI1xgcHEQ0GkU6nZbaLNxNi9il0+kUq4gKBEBJ1/XZh9Wv\nqqgLHz0idUMX7qZDuIXWL+EbekAMeubzeSwvLwu1kkk1XHBJX6QVxOAYzzMyMoJsNovf/u3fxvvv\nv48f/vCHGB8fR7vdxvz8vNAWBwYGYLPZBDteXFzE5z//eTz33HMAIEFxKmku0GqANZVK4aOPPkKn\n05FdzAqFAoLBIEZHR6U8Au+N8BTbptPpCISjbgRPOqZqLfex9my6+QbgSQBnlN+t3P3sPtEMm4P3\nC+JzjvE1A7KMbzC2FIlE8HM/93NS3I75I4cOHYLT6UQsFsPly5cxOzuLWCx2H3STzWYluzoQCJhC\nN/V6XfqENWi4NzTjIMzKJnRC6Ia1eNTNULjNpRG6YSHFra0tDA4OyjagNAyYmLcTdHPlyhUZFyq8\nqeqHj8OhB/ag6HVdf0fTtAnDx18F8PLd1z+xdWC05IF7ASvisqFQSCY1y36eOnVKLCaPx4NYLAZN\n07CysiKBmmAwKHAMmRH5fF649Kxdzy3feP35+Xn86Z/+qbAHOECTySSef/55+P1+AOjpAFUpc1Bz\nYnLhoZXGycxFgc/BAK5K+VKhgHA4LDtJaZomZQ9opQKQQl2Dg4MoFotoNBq4ffs2xsfHJTnL5/OJ\nkr/bz2QnZB5Wv/K8at/SSyK0QusXgCSWlctlWCwWNJtNhMNhseaB7YWOgatisSiMCotle4P2lZUV\nvPzyy7JwqbtJqYlMtOxZ+fLZZ5/Fq6++ikwmgw8//BAvvfQSnnnmGbz55pv44he/CI/Hg5WVFZw8\neVKOsdlsAteozI1OpyPGBwDxVCYnJ/H2228LLZbf3blzR+odkQVE7L3ZbCKbzUrgjh4K21aFqYww\nEH9jAtH13QB8l77s2RzcLLajxp6MDDfOFS7iMzMzUueIv+eevhwX9EwikYhkfhOGZTlgAPB6vTh6\n9KgEY1utlhgAnIPqHrvJZHLXYKzb7UYymUShUJDN5alnACAWi0m9+GKxKBVXyQRTabEkh5gFYyOR\nCJxOJ65fv95jlFLRMw/BrJ33Kh8Xo088TOtAxeT5nv8dDodMdu7WwixStbIdIR5aYCwFzM7j6un3\n+5FIJGQAZTIZ2Zga6KWjkfalWmqTk5PiKqtbfJHlQRebg5zWKQBJ6KFlpuLGVHqqBcfFDuilZY6M\njCCVSmFtbU0GMjnp6+vryOfzUlvF5/NJIhKLSpHSZmzvu8/e+kn6dS9C+IrWKy15KuJsNissHBV6\nY0BLtXBUT2d+fh4vvPCCLJzscyp3LqCEPHhOtm0qlQIAPPHEEzhz5gzsdju+9a1vIRwO4/r163j+\n+eclxqOynlggi0E4Ki0uLvSyms0mnn/+ebz77ruSPMX+zuVykqBFJc9nIySVzWYl+YfKiwremKZv\nIm3NfAPwVQCjyu9G7n62q/QLxqpeGZUW5weNnkOHDslm4Jxnbrcbg4ODcDgcaLfbkv3caDRQKpUQ\njUbh9/tN92Aul8sYGxsTJcpMcBaIA4BQKNRDr6RyV/cmMDK3rFYrjhw5gkOHDkmcSFW4jC0RZhwc\nHJQ2oafCeWikV5LWy2cJBoPCCmKg3el0Ckdf9fofukW/h87+ia0Dq9V63/GclC6XS0p65vN5mTxk\nJ1Dp8jWzZDnRaSUWi0VRpoODg7JZMycVLSUqhUajAZfLJRY0IZvR0VFEo1FUq1UA91PMVJomOdyc\n5GpUH4AMegonq1qcysjQ4UIWCASQzWalWFcsFpNqmLQ0a7Ua/H6/FIQqFoviqqp884dp9Rnde5Pv\npW854Llg0tWtVqtSvY9bMRoNAS7ILFT10Ucf4ejRo8ItZ8ahmoxGj4rPSreeE4ftUa1W8cwzz+Bv\n/uZvMDk5ie985zv41V/9VdRqNbEgOZmJp1PBk1/N65Hl4/f7ZWwcPXoUH330EQ4dOgQAMgZo0QKQ\nUhBc6FOplOyzSoOC8J6ajGOmAO4+bx7mG4B/B8B/1zTtT7ENy80C+OBB+53XoeLkpipqzILwHOsK\nLS4uYmFhAXa7XbjywHaCFOd5KBTCwsKCQDvxeByjo6PCdmLOCMcSANnPgSQFnov9zsUBgCyUqqh8\nfC5UalEzGmCcb0xwGxoaEu+E3Hd6LGof6boucA37XmWeMU7HzXjq9boUY+PzKP26J/m4ij71sK0D\noJd2RoyKmzCwbrvVakWlUhFsD4BYyWQbqAkpdI9Yr93tdsNqtUpCCq0CWrt0wS9evCjuot2+vUF0\nMBjEF77whftYD2rwldFxFZOnNWOxWMQCJySkDiBanyyORUiAUAeThgBIljCtO13f3p+SHsizzz6L\narWKd999Fzdu3ACAHk6ymhiiwmWdTsd+97OP1a/qAm5cKNTrsJ/uHiPPwO8uXLiA8fFxaT+r1Srl\na1nKgdZUsViUmubcz5O0RJ/Ph0qlIgsulScXC7U+OQDxttLpNF5++WW89dZbeOWVV2TnMipuGgUc\ns6pC0O4ygLhrGEtR8/uhoSHcuXMHP/7xjyXBj+wijqdqtdpTgG1xcbEnaMv2AtAzDoF79Epd18Xa\nBLAO4FXNsAG4rutXNE17A8BVAG0A//ODxl4M/S/jlouqkZ3G7QWr1SrOnz8Pl8uF4eFhYRutrq7C\n6XT2lEBQA/bc6WtqakosaBpJ9Kg5Zsho4bjrdrdLoLRarT2VQODxPFZl43GccSHZrQQC58DVq1ex\nubmJer0u+qVarfZ4By6XSwL9jMuZGWR7lY+r6L+DR2QdqBYJJz0tYmKdNpsNX/jCF6SUQLPZFCZO\nIBAAcI/DXC6Xcfv2bVy5cgVDQ0M4duyYfM5qh8RFVZYE9/OkJUD8lIEmlY7JgCoHDO+R52I9eCZ0\nqBMauJeyzfPxc5XzrSZkAb27NnFhSSQSsNlskiylaRqGhobkGizCROtHtegBMPYRuXuJh271GV1O\no8JlUJvsF6aAk3JG/JVxBirBfD6PY8eOybHE9LlXJzd2sdls4ompY42YMXAv54HK4OjRo2JA8F7V\nYKP6DHSvySSqVCqyOFCZs8xCLBbDpUuXJD8AALLZLDY2NmScM+uVNVxYeE1tKz6X+kyMPfF+7gah\nO7qum24Aruv6fwbwnx+0P836lO1JDF3XdYmTMIhMi5VlTRhM5phOpVJS2iAWi0kZaSYRrqysYGJi\nQthLqoWrQn3qXOKi7HA4cOvWLXzrW9/C+vq6MOf6FTWjMabOFfax1WqVhYHXYB9QDxg90a2tLayu\nrkqRMu6byxpGXExIm+ZirULAnK8PAt/shV75/2E78BrVNG0FwP+GbQX/xqOyDthQZGbY7XZhlgDb\nqdOsxZ3P5wW7J65H5WixbKdLX7x4EUtLS9B1HTMzM0JPo+vF4F42m4XH48Hs7Cyq1SrS6TTi8Tj+\n8A//sAcC4QLECL5KpVQtegZXOdlUzJUp7Mzi07R7yS0cWEb8k+wFvta07XreVIisZDg1NYVisYhi\nsSixh7v9I+26uLgoCxl3V7pbw8WvbdMrH5nVp3ptRo+GyvrYsWMYGxtDt9uVjSs0TcOpU6ckP4IW\nTjweF1iPWw0ODQ3B7XajWCyK16aymYxtS2Ws8uY54dRytUacXz0PFT77yWazCWTITUMIQzEHhIX4\nOObr9Tree+896LqO2dlZyR7lnrEqNKiOFRUS/LSEnka3u71BNscXY2xM4rp58yaeeOIJ2O12yeyl\nBcygqrFMsc/nk9oyrCFjZmUT9qSoir7RaODs2bM4ffo0isUiksnkjmWKObfJvjI+p5qnw+c29oGq\n9MmxJ+NnbGxMmDgbGxvY2toSyDiRSCAcDgszkAFZNS7yUKEbXdd/rc9XD9U6UFc//hm51gywzM7O\nirK1Wq1CY+TqqHa2rm9XjKOLzKQKukrFYlGUPReJXC6HP/qjPwKwPVA++OADtFotnDx5UrBGWlHG\n5BhOvHq9LnBQIBAQCwu4Rz/jwA+FQj1wDzuQqf+kIPJ+aE22223ZX5XCQBMtXlJK1VIOmqZhenoa\n6XRaFiK2FYDruq4//bD61djHFCpMFW/nM8Xj8Z69WEOhkOyxSuyTe8JmMhmMjIzIfqSkWbK+CJkq\n6jVU1gPvgxY3y0N0u12B0JjlSFiGuKoxb0KtXcNJSciQuRv0ForFIo4ePQpN04R1QW/j4MGDKBQK\nMp4ByLaYxmvQuFAVjNHS49h6FKJei68JhbLkLksN5/N5SYRiUTNd14W5xH6ZmZkx3XgkHo+jXq9L\nYFO12jmOVGxdfc/nn56exvj4uJxDtcI5TjhnVOWuLq7AvQVEfX61H9RrAvcYUhN3901gFVnGIcmu\n4TW5ITpr8xNS7Nf2u8ljlxlLYSdQWTGTVW1A7pzk8/lk/1i1NIDFsl0pbm5uDsA9t4v/W62W1A6p\nVquYn58XSCUej0uqM5UjoSLVZaayNrqK/D2VAzf/IJebVgFxwlAoJBa56iaqbilxR7WNqPjptvLY\nlZUV2a2I52B7quf8OIGdBxHjYKdwUVQHMDFWlq8gu4rWtkpVjcfjUgAK2J6I6+vrAl3wuUhn5bXV\niagG6Yifs18INQCQuA3bWbX6jVRKo4vP6/NazEim4iB8YbFY8P+396WxcWXXmd8rbkVWkayVZJHF\nTRQXiVJracvuttuC4+5Gd9uTZMZwgviHncnEyARwPGMgCOwMEMwPI4CdWTwBPPlhjxM7mEHabXvQ\n0w7sbtlCx251JFm7SIr7zqpiVbFYO7da3vwofoe3Xhe1tEWRUuoAgorFYr377r3v3HO+851zotGo\nBDDNZrPgxvQM7HZ7ETxhZLfsNud7tbb8fuO18/lCgxjCo/X19ZLTYTabsbi4iPHxcdTV1Yk1TYIA\n54RtEjnnTU1NRa0E1cxi6gR6iCrXnN49UDCcVKo0151/awzK8vlVM9iN88q5Z8yG38Pfq2PJ5wvJ\nmEeOHBHmEPcB1561c+x2O3K5nGTBk7BQas7vRw6MoldxLPW1+oBx4hKJBBYWFiRI2dbWJtFulYak\nWqrcDJwsLgpLxkajUcHTqfjJr2Ugz7iRGARTk2WoSFVFQGyZEXqguJom3TW1ZIJ6/7yWyqrgYrM2\n99bWljBO1tbWEAwGsbS0JEkd6v3zOruxNB62GK9hvCdd36kJz7mj5UuX2+l0yrzTwyFLIZ8vNF+e\nmprCiRMnYDKZJHDL9VaT2shMYiMM1eKnFU8oMJFIwO12FzG7KFQMrLzI76UHxQee9FeO69atW4LH\nsm4Nk8eSyaRg2lVVVRJwZ/E67gkVnlTX827zvhdiVPLqMxoOh7GysoL+/n709vZiYWFBMPFf/OIX\n+MAHPgCv1yuVTPkslGoOTpiH+0OlRfI9lRTBcVCf0BPi70hNNX4H9yKw0yFMhUqoA1RlSz3E7ydn\nnwfN1tYWFhcXpYk7jVN6cZOTk7h48aLEllpaWtDa2opAIIDFxUXE4/FdS4vfrxwYRQ+8F74BdrIY\ngR2MmtFtBnHq6urkZOWCqkpzc3MTQ0NDyGYLvSL7+/vFOicWWFVVhaefflow60gkIqwf1rgYGxtD\nT0+PBFZpqTGYRkxPLXXMn5m1qma7cpNlMoV2gYcOHSo6jAgTqMlYPFS4OcktpjtI5aBphRo+d+7c\nKZq7u3Gu90IxlIIRqIAJsalccwCivPVthgYtewa4eSiwU8/c3JwEZHn/ZFzwAOZa0XDI5XISECQs\nw3kipW1jYwN37tzBwMBAUU1w1UpjoJZj5rowgEqrnlna8XgcXV1deOeddzA1NYWuri7EYjG0t7ej\ntbUVbrdbajLxMAoGg3IoACgyOgAU0S1LzfdeSinrkt5nMpmUErzt7e1wOp1YXFxEZWUllpaWcOTI\nESwsLKC9vR0+n0+8Hja1J0zHZujcs9w3qnfPueb7KqGB+51QCa101RBU9QWfQY5HvS8jIUL9BxQM\nuNnZWaFjs5F8NBoVth5RBwZhr169itXVValU2tLSAgASN1RpwO9XDryiV11lLjQfUvbh5Abf3NxE\nIBBARUWFJAo1NjZia2tLMMOKigrEYjHYbDYJkLA4ETPZHA5HUQJVT08PqqqqpLxCQ0ODKIKamhpJ\nilBdPGLyZGuQ9ULmAe+Xrxmg4QNL6wEoxrONvPp0Oo2ZmRmsrq4inU6jurrQUJpNh1mADYAoClog\nPOy46R8FfKP+rAYvOSZN06SuNyl1nF81ycxkMgkjxev1YmpqSjKfuTbE8QEIp5pBdCbv0LLKZrPi\nJqdSKeGBj4yM4MUXXxTqm9VqleQ0Hjy04FTLjiVrNzc3UVdXV1S6gvxqr9crtErVy1E58byurusC\nTXHteGjtBr/tJWRDUa+tKl7CovF4HLlcoexDR0cH5ufnAUC8sJmZGWxubuLw4cOSbT4/Py/5CDab\nTfoj19XVFcVugJ3mMsCOh8Vn0ehhqwF5lfas5rLwHtTnTN2jRriI1+M9p1IpzM7OIpvNoqGhAaur\nq3LoM2bDAm01NTVScp372uFwoLW1FZFIBKurq0IAMcYIHlQOjKJXlTuFClPlnVdUVODmzZv4nd/5\nHaTTacRiMdjtdlHywWAQHo8Hm5ubRZmEAATPjEQikv7vdDrR09MjdVNWVlakQh1P3vb2dnR2dqKq\nqgp+vx8jIyNIJpPo6urCoUOHRAnTGmEgraKiAj6fD01NTXC73RgdHRVrUMWC8/k8BgYGRMGrnGB1\nTlQogpvaarVKE/BIJAKTySSNwGOxGC5evChuPlvTqWwmzoka5H0Ua8t15T3SMwOASCQizaTHx8dl\nbLTIaOWxEcXw8DDOnj2L4eFhHD16FMlkEjabTaw4Gga8Juef1hUtZFrotbW1WFpawurqqiTWsdk6\n4SR6FcakK3pfpOVxzFRsmqZhbGwMZ8+eRXV1NQYGBqTtJXuIqkwa3vfa2po0jjZ2lOI17zXneyG8\nb/XQJv88mUxiaWkJKysr8Hq9+NCHPoRr165JY+yhoSG8/PLLOH36NEwmEwKBAFZXVzE4OFjkKTEj\n2uFwFCWjGeMu3Nsq+8YI2xCSU+FBHgK8F84pn5v19XWsrKyIV85aOszJ4YHM+EpVVRUSiYTAtKT5\nJhIJ0Wl+v1+a3LDhudVqxYkTJ9DX14df/epXGB8fl+qWRnz+QQ/xA6PogR23yRiA5IPDYkALCwtS\n3IwZltFoFNFoVHBQ1njhd9C6pYVEd51FkgAIlY9BMKBwOExOTiKbzRaVP5ibm8OVK1dw9uxZvPLK\nK3Jdl8uF1dVVDA8PS8IDyxHQulQzBQGgpaWlqG+mijOSxqeWoaVlwk3qcrmk2TQLQ42OjiKVSsHv\n9wtrhBuduQmsGbSXrAyuoyo8eLkuhKPIia6ursa1a9dw7NgxSbYBdoK1a2tr2NjYQEdHB2w2GwYG\nBjA0NASXy4VoNAqn0ynWYCQSEW+Kc6/S8ujykwJLBs7hw4clhZ0HPe+F7CUaA0aaH69Fb8lms4nR\nEYlEpLn9wMAAbDYbUqmUJAZxD3CO6HUODw8L/EQPwLiPjFbfo7To+VqFMsiKCofDcLlccLvd8Hg8\nmJ6eFiICy0aQYul0OlFdXQ232y2Jkpubm7Db7cJXV7F39R7VAC2w4wkT1qXSZzIUx6563qrhwd8N\nDw8XHeiMyXi9XjEQ+F08CABITIfFGN1utzReWV1dlVo8TLrq7u5GX18fYrEYlpaWhFZZypp/0Gf2\nQCl6YOcG+PBQ0dPi4+8vX76M3/zN3xQaIXtpEq+ncqVVzgcolUrJQpD1AUCSpHRdRzQaLcJz7XY7\nAoEAPB4PbDabKEqz2Yxr167B5/MVWY8nTpxARUWh7AIz40jjVN0/p9OJpqYmcSPplpOHPD09LWwL\nBo1DoRBisRiAAjzQ3d2NlpYWnDhxAgDke2w2G95++23E43E4nU6ZB6bR04olfPMoRA0WEldWaacM\nvPX29qK/v19KIWSzWfFAqqqqpCTG7Owsuru74XA4pASEruvweDxobm4Wy85utwuk4vf7pc4MsNNo\nhjDM+vq6rJHVasUXv/hFSXqiAiZsQ+uNioSKrbW1FQCk9n8+n5f6Svys2+0WS352dlZYKslkUqAl\nHvI2mw39/f1IJpO4ceOGzBPXVI13cJ5/XVf/fmU3BgjXNpVKIRwOw+PxoKurC0ePHkU4HEYikUB1\ndTX++Z//GS+99BKcTic8Ho9QkhsaGqTYIFCs2KjIjaUxjIYDP2e0/vmzqsyNFFzew+bmJtLpNILB\noBxGzIHhvuLBwHwL9qpmGQOyaWikBINBxONxjI2Nyb6srKzEkSNHUFVVhdnZWQSDQUQikZKtTSkP\n4rEdGEWvLhjdwHw+LzAEkwi4OD/4wQ/wwgsviFXDYFl9fT3i8TiAQkcYwhVMVGBaNYUuMrFSJk1Q\nKbHmiMlkQiQSkcbdXq8XuVwOhw8fRiwWQ2dnJ2pqavD666/j6tWrOH36tLiJkUgE586dw9GjRwUC\nolJnLRc1J+DOnTtIp9NFRbw4PlqLQKGkwa1btzA3Nwe73V4UxNF1HWNjY0U0QVqHVVVV0m7RGBN5\n2KJaHarbySAlLWJmGZpMJpw+fVqUudPpFFeakABxS5aafuqpp9DX1ycZzuxQNTk5icbGRjidTlRW\nVmJoaAhHjhyB1WqFz+eTshns78l+xAy65nK5ojK39LBYYI90PXoCnZ2d4o4zMJzP5+Hz+dDb2yu0\nwUwmg9bWVuRyOYyOjkr1Q7rwPISNLKvTp0/jhz/8oQSbqdA5j6Xc+kdh1Zdacx5o6+vrmJqaQkND\nAywWCz72sY8hlUrh8uXLyGazCAQC+Ju/+Rt8/vOfh81mk3oxmqZJNrRa+iEej2N+fh65XKEd4OHD\nh2UP0VsFUGTJq/RdI/uOhzVfq9Y9150dogiZVVYWSpD7/X5UVhbKqKuxE4/HA03TimBc7pmqqipc\nvHgRN27cEK+6sbERZ86cQUtLC0ZHRzE6OoqJiQkp86HGtB576EZV8qrQTbfZbJL0xCSUn/3sZ/jk\nJz+J1dVVccmZGETseXl5WVykxsZGOBwOCdBarVY5eamQaTXreiGRo66uDuFwGE1NTVhaWkIulxP3\nkt3qWXulsrISTz/9NC5fvoyamhoJvvGUZwMQk8kkVfv4MGezWayursLn80nvSEJI6kne1NSERCIh\ntEAGlHw+XxHr4+233xbeMul7Kg5fKrj7KDBdihobIF5ORU9oorOzE5lMRgo6UdFXVVUhHo8jEAhI\nbXe73Y6uri4AkPyGlpYWKQfw7rvvIhqN4saNGwiFQujo6MDHP/5xMSKYec35Jr766quv4ktf+pIc\nABUVFVLzntCCyWTC+fPnsbCwIPGYdDqNj3zkI+jq6pKEF6vVWtS2MhqNIh6PY2VlpagcABU9M0Kr\nqqoEmjOZTEK5VBWSMSN0v0Q9mOiRR6NRBINBoQ0+88wzCIVCGBoaQl1dHa5du4ZPf/rTss+peMPh\nMPL5PJqammAyFTo6MXZSUVEh3hqLpKkWeqlxGQ9ANTnKGAuj8BkkG4rsn1wuh0QiIVnlaszEYrHA\nbDa/J2sfAEZHR3HlyhXxDmw2G06dOoUjR45IkbfFxUXhzhsT/XbTk/eSA6HojRYlX9OKS6fTaqdI\nQwAAIABJREFUUo2O2Y4mkwlvvvkmMpkMenp6UFtbi2g0iqqqKmxubiKVSkkzgI6ODmkkruu6lP5U\nO/aQGdHe3o729nYkk0ksLy8jGo3CbDZjfn4eHR0dWFxcRC6XkxR7t9stDX75nc8//3wRHbCiogLP\nPvssrl+/jvHxcRw9elRYHrQGA4GAnOCZTEYoksAOnW52dlYC02pSDy2UcDgsCvDSpUvvYSmpXHyV\ngrbXa6sKHyoqd66xruvSfINzt7GxgebmZnR2dmJrawupVErKCNhsNrl31nZXg3Jc59nZWZw/fx7B\nYBBNTU1wuVySifruu+/iYx/7GLLZLNxut6wBFfLf/d3fIZfL4Qc/+AH+4A/+QCDBiooKuN1u8bT+\n6Z/+SZJ+NK1Qz2RkZASTk5N4/vnn4fV6BcrgGPP5vJQ3ZuMbBpo9Ho9Y9oQNqDQJ5dBqJn1XDQbf\nbf73Uqg8aTwQEkkkElhcXERLSwsOHz4Mr9eLM2fOYHp6GhsbG7BYLPjRj36ET3/606isrBR2XCgU\ngtlsFoiLSYArKyvSepCMJCN1ktY54yTc8+p8UMmriXD8Wc18Z2lvn88nOoNNbmpra9HU1ARgJ2td\nXQPSsCsqKjA0NITvf//7RTDkiRMnMDg4iGw2i+npaczNzWFpaek9mbDG2CXwmEI3wHvLb1IZsF6J\nx+OB1+vF2toaotEoKisrcfPmTVRWVko7vGQyKe45IZu1tTVYLBbBy5golUql0NTUJEkcNptNLEb2\nk2T3mtnZWczNzcHlcmFrawsrKytob29HQ0MDvF4vIpEIQqGQuPIOhwOxWEzq3QDAyZMncf36dZhM\nJulbyYAwCxeRfsfgLnHChYUFhMNhjI+PY2ZmRiwKQgvc2Jw3upiEtRoaGuBwOABAICGVD67+vxfr\nqh7mauxFxVlZzjUcDqO9vR3hcBjhcFiYUU6nU4Lv8XgcdXV18Hg8sFgsRRS36upq+Hw+TE9PQ9d1\nqavCdUskEmhvb8fy8jJu374tDSu8Xq+sxdbWFiKRCKqqqgQrNZlMUjqWFtrt27flYSdE8dRTT0kp\njvn5eSwsLKCnpwdtbW2yv+i59PX1CW2ysbERTU1NkhE6NTUlTBugYOGSPsvDUp1HrifpoQxsb0ur\npmk+AOHtn/+Trus/2V6f990mklIKXlC9XZ/Ph6qqKrhcLhw9ehTPPPOMdHBLJBIYHh5GMBjEH//x\nHwsrjEYBeffr6+tYXFwUeIcEDXLlVe8vm80inU4jGo2irq4Ovb29crDSmFCD2BsbG4hGo+JVEa7l\nc8iYHxU0UCin7HK5JI5mDOwTtrt69Spu3ryJ5eVlQSUcDgdOnTqFp556CisrK7h9+zauX7+OcDgs\n1VZ3m1fjnN+PHBhFr94I3XmVe5rL5SQ12G63C/a+sbGBeDwu9b4HBwflJOcBoNYqYQs9FkwihMOS\nsrQO+KBSIXZ3dyMSiWBmZkaSIHK5nDAJaL0Hg0Houi5NJIwMgerqaiwvL0vdDmL0dO9Ua4RNMVZW\nVjA+Po53330XAKS/rVr5MJ1OIxAICK2LyooHB+uNMEBZipK3V3iu+r28PyMThgq6ubkZQ0NDaGtr\nk3pBVFo9PT3CxuBcGWvNV1RUYHh4GD//+c9x+vRpydJU9wM9ALfbjbm5OczOzqKrq0tgETa74Dow\nIK9m5OZyOczNzWFlZUUgo+bmZoGBeD3GHxhTOnLkiFA68/k8Dh06JPEesqu2trYwMzODQCCAVCol\nWbVDQ0PSa4EKi0FbKnpaoGxqYZBv6Lr+X9U3tIfUJrKUkcC9T+91eXkZ8/PzsFqt6O7uxtmzZ7G2\ntoYbN24gk8kgEAjgu9/9Lr7whS8IJEY4lkmHa2trUpOK+QwAJKs9HA6Lx8dYDimyHo9H4hrqeGls\nBYNBrK+vS7Y5sGOla5pW1M6SLDkiDqxnxXHEYjFsbW0hEAhgfHwcsVgMLpdL4k6nTp1CT08PgsEg\n7ty5g6mpKbn+boF0FX56rFk3dK3UiDlP5kwmI/RFJqIEg0Ekk0nMz8+joaEBx48fh9VqRW1tLRKJ\nhPREra+vx9LSktSvITd+cXFRoBI+IFSOZrMZ6XRayt5qWoFze+bMGczMzCAUChVl4TY1NUmD6HQ6\nLUojFAoJ1rq2tobu7m5cuXIFZ86cAVDYZPF4XBpLUAEycYeuoN1uxyc/+Un09vaivb1dvA56PMvL\ny2htbUU0GhUeOqtkMlBdW1uLzc1N4SbTqlGDU3stZBxRiaplA0wmEwYGBqRPKMs5cz3p/vNeVIaW\n+vBeunRJAvE0HOjWc07D4TAqKythsVjwxhtv4DOf+QxaWlokZjM+Pi6fzWQyGB8fx8mTJwUuCofD\neOONN/DhD39YDhJegwqX+zeTyaC2thaXLl2STkXqHqeBwb3DjF9ahqwZMzw8jGPHjgHYyangoaNC\nJgwC36c8lObgu+0f3he7RM3OzsJiscBiscDr9eLs2bPIZDKyZlNTU7h27RpOnjwpLDt6fcytYNmR\nxsZGVFZWihEXDoextLQkjV6YYaxpmlQ0VSEeKnEad/F4XCiwjKVx/DQS1VafPPRpjLABCYsFjo2N\n4eLFi8hkMtLS1Gw24wMf+AC6urqwtbWFW7duYXZ2Fn6/X5S8mi+z2/w+1sFYKhxOMBVRIpGQbFbi\ntQCkBgS7EanRelIJjx8/LkoQKFiUZLR0dHQgHA5LQI41zKl8yNtlsJaZe11dXfjRj36EjY0NtLa2\n4syZMwgGg6ivr8fg4KBkq6bTaYTDYbS1tYklQGrc5cuXYTabsby8jNXVVaHs2e12eL3eIlYMLYgj\nR46gtbUVLS0tYs2xhy47cZGil0wmsba2JnPB74pEIojH45JazYPk/UTyH2RtgWKc0cho4ebu6+vD\nO++8I42fWbyMDI3V1VU0NTWhtbVVIBA1/yCTyeCjH/0ohoeHRfExd4KlFMiyyecLDUacTidef/11\nfOITn0BzczPGxsbw2muvCaaezWbx2muvQdM0dHV1IRgM4ic/+QmcTqfAeQzakWLH/qEco8vlwrFj\nx4qCqGowMJPJwO/3IxQKCU+fmD/vOxqNoq+vTzxdzp2aOWm0+gzyRU3TPgfgKoA/1XU9il+jTeTd\ngoMqRZoeHOsRkbJotVrR09MjwdYbN26gsrISr776Kl5//XX8xV/8RVEDE3LXq6qqJI7BLGqyWlha\nXM0GdzgcRfuOVFl6tly3lpYWTExMyLNCZc+DuKamBqOjo0X1lFg3f3x8XL774sWLsofdbrcw3QYH\nByUDeGJiAjMzM7h9+7Z44aQH74UcGEW/26ZhViArVbKqXTqdhtPphM/nE6U+Pz+PtbU1tLS0oKmp\nCWazWTJTq6qqxJ1iRujy8rLUrlFxRTX9mr/b2trC0tISNE2D3+8XqCEcDuPtt9/G8ePH4fV6MTo6\nira2Nhw9ehRTU1Po6emRB5/WYENDg8ArhFU8Ho8EYFn8bGNjAy6XC83NzRIkJvWP41X7T9bX1xcl\ndZhMJmEGMAU/kUiIi6gGjoz4314IDy1aaYxfkLWQyWTgdrthsVgQCATQ19cnrrlKcSQrqq2tTR7o\naDSKxcVFWCwWNDc3i/WeTCalPg4DY2q8JhaLYXZ2Fmtra7h69Sqee+45dHR04I/+6I/wy1/+EsvL\ny2hpacHZs2fR2NiIeDyOq1evwu/3C/bLRBhCcWQQcT4ZYG1ubsbi4iLS6TTa29sF181ms/D5fAiH\nwwLDqBABFRobU5MSyL1K2E+d3xISAvA0AB3AVwH8NwD/7gHXr6hN5N32C401NfgMFCrOBgIBVFdX\no7W1FQMDA3A6nXj55ZdhNptx/fp1YSl961vfwuc+9zmBSfnsEEZbWFjA2tpaUZkT1Yuj5V1XVydJ\nThsbGwiFQuKF6bqOU6dOwWw2S+yFuQ8si+1wOBCNRmWfMSmvrq4Ot2/fFqZUMBjErVu3cPHiRdTV\n1cHhcAjR45lnnkFrayvMZjOGhoakvwCzrx8kl+X9PKsHRtGrGWbGYCwj7clkUixecprX19cRCoWw\nvr6OwcFBKQs7PDwsJz9hgSNHjiCZTCIUCgldk9YUNyY3JPE3YqdsXfbjH/8YV69elROejJi5uTnM\nz8/j2WeflexaVuSjS8YHs7e3F2azWepZMPbAtHkeZrQI6+rqRFmxFjtdzZWVFSloxk5WW1tbcLlc\n4sLTI4pGo1haWkI0GkUqlSqib+21Ra+uKaloqnXFjVtZWYmBgQFMTEzgqaeegtPpFEudCo4sJxY8\nY+ZoPB6XUs/19fWwWCxob29HNBqF3+8Xy5HUVga+2Fj6pZdekkCcy+XCiy++iEgkIrx8oFAy46WX\nXsLKygoWFhaEhWO322Wu8/lCFUaPxwO73S7u/sbGBhYXF6XW+pkzZ6SCZiAQEMuPe49WfzQaxcTE\nBAYGBoqKb6nxAgYtS63h9vxnibtrmvZtAP+4/ev31SbSZDKV3CxGnrc6HtJjmcF+584d6LqO/v5+\ndHR04BOf+ARsNht++tOfYmNjA/Pz8/jqV7+Kl19+WUpQ8Dnw+/3SM9lqtUrzIADiRTHjllg+413M\no6GiX1pagtfrhdVqxaFDh3Dt2jUMDg7C7/dL0cDOzk7Mz8/jyJEjuHLlCk6ePCnZrefPn8f6+jrm\n5+elDwJLkbS3t2NgYADt7e3IZDIYHh7G2NgYlpaWJHeCh5jxWdntOSoVmL2XHBhFT1GtAJVJQRiC\n9UaYCNPa2ip1asg0WVtbQ39/v9AP2WZwcnIS7e3t4v6qTBdCB4yoqxF0utcWiwVdXV24fv16ETaX\nzWZRV1eHjo4OXL9+XdyzeDxeFFDigcNsTLJI2L+W+GMwGMTi4iLsdju6u7tRW1srlTRVi588XR6C\n9DJqa2ulPAKzcpPJJILBoEA2fr9fYglut1twTABPaZp2c3s5Hhozw5gqrhZ/o/IiBDEwMIAbN25g\nfHxcqIz8GyaDcf7NZrMoyVQqVVS+l9CZy+XCBz/4QYFTyN2mkZDNZqULEh+iiooKKXLHIBwVanV1\nNRwOh8Rq1tfXEY1G8fTTT4tVWV9fL3kRVqtVymgTc6+pqUEgEEBHR4d4rIRsWHZ6Y2NDmFbxeBwD\nAwNFQWi1RgvxfiPDSZEq5fW/ATC8/fqhNQdX11kVVXnxEAwGg7hx44Zw7Fmy+JVXXkFnZycuXbqE\nK1euQNM0/PznP0c+n8eJEyfwqU99qihAb7PZxIuy2+3CagEg/VZVmCuRSMh8c1z0zmw2G9xuN44d\nO4YLFy7gN37jNwBA8mtIU/b5fFhaWpLcl0gkIk1luG+6urrQ09OD9vZ2ZLNZ3L59Gz6fD36/X9pF\n8juNbMPdRP39Y2vRA+/l06s0PLpmQMEVpvL2eDzw+XxYXFyE3+8HABw6dEisJj7AhGtisZjgqHSP\n1cJndInT6fR7AjY8/elmEXckw8bv9+PjH/84rl+/DqfTiaNHj0qgkPgb2Tm8z9ra2qJuO01NTWhs\nbJQAdCwWE6ueGa4sGBWJRIR2SaXd0NAg90MsNB6PS4CPDVxo5bCfKucfQFDX9ZOGdfm1mBlqsNdo\nufCfWpGQdVEmJibQ29tb1NqNh7FaYnh5eblojTjXS0tLSKVSWF1dRUdHB9rb26FpGubm5qR0NS3w\nYDAo7Al6d7T2WVeISkzlticSCQwNDeHpp5/G3NwcOjs7pbkGoQWr1Yre3t6ie87n81heXhYWDeuv\n0EJnW8NMJoOJiQm43W5JDAJ2Dkr+U5VFIpEQXFtpDu7VNG0IBehmDsC/3/78Q2kTudv67sYUyeVy\niMViUr2SjXkIezY1NUHTNExMTCASicBsNuPKlSsYGRnBZz/7WVgsFrhcLmHGAZAERuYWVFdXi8FF\n2JelVLhn2N2NpVGYn9HQ0IBcLicGJoO458+fF3o3Sw/T06yvr0dbWxtOnToFj8eDmpoa+P1+LCws\nYGRkBOFwWALrasBVhY1LreduSv2xtOjVQasKX7Xe2BxE0wqca1pJfX19WF5exje/+U189atfldT2\nQ4cOFdV0oStMjJiWnwrbkGfPA4DeRVVVFb73ve9hfn5eLCmgsGFdLhfq6+uxsbGBK1euoLe3F+vr\n67h+/bocLM8884wsIv+W9XJogTQ3N8vB4nA4YLVa0dLSUuR68tCLRqMIhUISzKVCUml2W1tbmJ+f\nF/yXvHsm+qh1NO4hD5WZQTyZ96KW9mUtnv7+fvzyl78UT4frxjWhN5RKpaSnAK9Ba49MD13XEQwG\nJRmHFUqDwaCs98LCAoaHh3H8+HFh7AQCAWlEQ872+vo6hoeHpXUhrVPSalnGgAcHvQwju4mVEEmf\ndDqd8Pv9cuBxDVnV8uzZs7IXWdkRQBGzhPuqvr6+iF+/rfRn9RItIrfn7KG0iTQ+w8bfGw8DlQzA\njPdkMonBwUG0tbXhU5/6FIaHhyXRkDWqvv3tb2NwcBAf+chH0NbWhmy20H6S1GmuO1v0MeeGndyS\nySQcDgdWVlbQ3NwsjV7Y6pD4/O3bt+HxeLCysoKZmRmh4lZU7HS+c7lcqK2tRVdXF9rb29HS0iKx\nnLm5OYyMjGBpaQmBQEASJFUlryr3UnNnRDd2g8XuJQdC0Zc6mdSbYMCKgSdOsslUqBTHDlNLS0u4\nc+cOmpubpZBQZ2enZNCxUFI+nxf+OTFqI9VKfY/wj8PhQH9/P27duiXsAdKuAIhyTSQSEgOorq7G\n6uqq0OlUdggTPPx+P9ra2uB2u5HL5YRlwQQiVsFkRU3Vct3a2pJaJ8RAGWSMx+PyP91bNbFGteSV\n+W7SNO02HiIzQxWV9sfrE09Va697PB4Jzg0MDEiAmY1FWMSMnZrIyafCY2KL3W5HMBiEw+GAz+eT\nmkUjIyPSx5SxmAsXLgCAcPhnZ2cRj8fR2NgoHY58Ph8uXLggEArZFyMjI+K2s9bS6uoqmpubizoW\n8Z5ZGCuZTKKqqkqCrPF4XNY1n8/j+vXrMJvN8Hg8YphQgak1jErNb6n530tRr1XKMi31WWaYMn7h\n9XoRCoXQ1taGvr4+vPDCC/jwhz+MO3fuYGRkBOPj4wiFQpiensbo6CgaGhrQ1dWF/v5+PPfcc2KE\nMQ5nsVhQW1uLhYUFmT8GaJPJpHg+ZKPR0/vFL36BYDAo9XMYFGcRvYaGBrjdbjQ1NUlJFPa2uHjx\nIgKBAPx+PwKBgJQzAB58fe4G1zyWFr0qqgJQ6XdMAqKVzgeIdU7W1tbw7W9/G3/5l38pi8xSqHSt\n/H4/crkcvF6v0OzUFHQGClmWIBQKSQEldmwidTKVSqG+vl7aBGraTqNv1TKntUCsWdM0YYHMzMxg\naWkJLpdLIJWtrS3hBLOpdG9vr3DI1To1PPSIHdNtZaCIVikteeC9m4uK3mKxIJVKDaFgre8ZM4OW\nppq+TquOn6+pqYHH48HQ0JAoe00rdNSy2WxF1FHSVtluj1x4uu0ejweXLl1CZ2cnwuEwkskkBgYG\nhLPOOfH7/bhw4QK6u7uhaZp4SmrPTnKeCf+ZTCa0tbXBYrFgenpaytHOz8/j+PHjqKiokODv1taW\nBEytVqtY7GQesTkKrb+xsTEMDQ2hv79fDnlN0yRrlIYJ17YUc2OvmVSU3YLA6muj0uJBwKQj7ol8\nPo9IJILNzU14vV40Nzfjgx/8II4ePYr5+XlcuHAB8/PzWFlZwdraGm7fvo2bN2/ixz/+Mdra2tDS\n0iI1cDo7O8XrAiCNyIeHhxGLxXDp0iVYrVaEQiHMz89jdHRUguI1NTWw2+3SHctut8PtdqOhoQE2\nm008DDLy6NlNTU0JTMNkT6OB8yAHsBHSKeUF3EsOlKJXA0ncAIQjmBVHl4xlQbPZQntAKoKJiQkM\nDw9jcHBQXHan0wmXyyWJRIR8iI2zBR+DkiyYxIbhAwMD2NzcxOTkpDQCYNcbdqIymUyw2+3SiISe\nB2lkVNi0ROPxuNT66OvrQ09PjzAyJicnEQ6HEYvFYDabMTw8jEOHDuHs2bNFGbEOh0PKGqhsjEwm\ng2g0Ko1Y1JIHaoSfwk1I2EDX9fzDYGZomqYbcWSV1UQFxTHp20wmxmJsNhtMJhPeeust2Gw2tLS0\niPXH6pUqnZDwCAObhGWAQl/Xd955R773ypUrUnqAh6yu61IET627Q0YHD0xWHaysrMTKygrOnz8v\nafk+nw9jY2Po6+sTi59Wq8oH51ox0EdoiAfL8vIy3nrrLZhMJjkANE0ram5NT1c9xNWciAdVBr+O\nGA8UI7ZstO6NY2MmOIPVU1NTWFpagtvtRktLC/r6+tDc3Izu7m6cOHEC6+vrCAQCGB4exvLyMkKh\nkARJ5+fn5XqMeVBUSjGFJRfY+MPj8cDhcEgLQ5YmJysuk8lI3+qVlRVR7rFYDMlkUkqfqDkq6jwZ\nPR9Cx7vh8aU8owc9LA6Uogd27zTFcqzqwpFVks/npYrhxsYGvvOd7+ArX/mK8I7NZrPwVTVNkwJm\nXAha0bQOV1dXEQwGJUHj8uXLogRolbHoVC6XkxR1Ztc6nU4kEgnMzMwgGo3CZrNJtH9qakogipqa\nGkm5b25uRjabxejoKKanp4X+yC47zBvIZDLo6+sTdgYzAGkx0kpk381StXBUhcufARgtwofCzFAP\nbhWjJ4TENWU1P36ObCcmlZ07dw6/9Vu/JfAGsINjq4lKZK4wgEYox+fzSZ+Bubk5qVRJxck+BjQs\nYrGYUCpzuZyUNaDXxOA4obLh4WHU1tZKwTGfzyfNS+iJMm5AqIpZ37wu7ymRSODcuXNYXl6Gy+WS\n+up84HkgqxCPatGrlt+jsuiNUgoWNCo41apX1517dmJiAqFQSOoTNTc3o6WlBV6vV3ITurq6pBG5\nGlNhpjmfI84xn1N64gzqNjQ0iKVOaIzEDE0rlPcmRTkejyMSiQidMhKJiAFC/aQqbvX/B4FidlP8\n6nferxwYRW9UCCp8o1ovxLbJlWWpXvbg3NzcRDwexze+8Q18+ctfloAdXVwGsqLRqGCkPKVzuZws\nHBXjxsYG3G63FE+jkidcA0CyWAkF8P21tTXMzc0BgFiGfNDNZrOkVNfV1YmyUyP9DO6R5z87OytM\nAvLsSRdkAJqxDGaAqpmJ6gO3uroqv19eXpaSzQCObmP0c3iIzAz12lSmGxsbwkDgfareG6mXlZWV\nmJycxLlz5/DRj34UbrdbrG21Dy4ATE5OoqOjQ1gym5ubuHnzJqanp6UAFudQrUXP1oPE7AFIExLV\nIqSFXV1djUQiIcqdtfG5N6enp1FXV4eTJ0/KumxsbGBhYUG8Oh4g9BCy2SzC4TDeeecdTE5Oyr3z\nPlm2Ym1tTQwcHubM4n0/gbqHJfdSPEYGjtHip9AAIFzLrGCXywWXy4VgMCiv7XY7amtr0dbWho6O\nDpmfdDota6t6kyrRgmwuHvhkMnF/0rtgpnksFhODgYmHvI6Rpr1bTKLUe6XiGer8GOftQZU8cIAU\nPfBeTA/YcbVIPaN7R8uGHX6YaHTs2DFJTf7617+OP/uzP0Nzc7OwX5hlaLVaxaonl5U1ZlhYiokw\nd+7cwebmppQpJU7OQKnaBjAcDiMSicj4zWazcHaJN3Kj0SJsaGgQC5wHGS1eKulcLocbN26I8unq\n6pJG1bRsw+GwxAZUC1HNfuVrQj6q6LqO9fX1O6XYGQ+DmUHhw7C5uYn19XWk02mhE66vrxfBdPl8\nXgJdQ0NDCIfD6OnpEWZJTU2NKGlayKFQSDyeSCRSVDNc13VJgCPtlgcN4xw0LPjAM1bENQcgn7da\nrdLjl2tOq39sbEyqb/KgZW2W5eVloRiygXsymcT09LRQ/4jFs5wF54bQJUt5UJkZIQl1XR+F3Eu5\n3Y/yU9/ns8D6TAsLC5LDwKTC1tZW+ZlKn4FT7nFCLqrCp8JUEwqZOb6+vo5kMillvyORiCh7doqj\n3jASOErdy92gG/X93cQI/7wfOVCK3niSUVkBOwlMdMMY8DSZTEVFscxms9RBqampwde+9jW88sor\nePbZZ2XBycqgNcTPshQpGT0mU6FhMSEYwkBcZG4q0iMJNfBva2pqkEqlhCHBz6o8fjYFzmQygstv\nbm4WWeAApIzqxMSEpM57vd4imiiVCC0iYt60+NS5VS2E3TbpwxSj9cb1pPdhMpkECgMg+DvXoqGh\nQeoeDQ0Nvef7yYxg4JMPYX19Pbq7u6VeDj0FfkZlVhH+ImTGfaaOiQcIDyCSAurq6tDQ0CA8bdL5\ngsEgRkdHZb2pWBg3MUo2my1qWM1DhKyNXG6nmTm9N7U8RKn5flA892HJbsrJCGcApUugqJ9R9wtb\nNjJ5ibALMfX6+npJnDKWJgYg1joPElrt6XRakp/W19cRj8cloMpnnntBtdpLQTJGXWaEb1Tv617z\nVirW8dhDN6U2Kzc1/zF4Sn4yXWMuQkNDgzx4jY2NGBsbw7lz5/DCCy/gueeeQ11dXVFfWE0rtP1S\nszXn5+cxMjKC2dlZVFZWSjo7XT0+gKzRQguM1iIPj1QqJWwPHiRUFuTvsnTxzMyMFFnj5uackHOd\ny+Vw8eJFzMzMoK2tTRJLdF0XGEPXdTmEuLlVLrrRvb/XxnuY68t/nD/VmiZ7hthzPB6XOjIAJJ+A\neD6zfklHZUkIwlkMwnu9XqkemE6nRdEyVqJpmngRals+zhv3FQ9QoGAlsl8Aa53woLFYLAID0Tgh\nEYCHLksmezweNDQ0CJSjejTqXm5paREmGb0HHuoqW+xRYvOlgoT3I6X24W7fY/yMrusS16E3yB7O\nc3NzAquyVAibfqjxn3w+L0wqWvGEeVjLX6UiG+GYuyno3eIQu83B/cpuz+39yoFR9BSjJUJlAOyU\nfOXEU1GrpyTd4IqKCnm4BwYGkMlk8Oabb+L8+fN4/vnnpUwB8fCNjQ1x80OhEAKBgDzULpdLLHVu\nHjXjlRY9MT9+hjAPDxNmplLosbC3JN1H9f55bzyAmGrNpJCVlRVh3DC4S6uPdWBIOeSoNzOtAAAN\nM0lEQVSBqX4nr/Mw3MN7SSlLhd4NsXP2BmhoaMDhw4el6xOxb64tee1tbW3wer0YGxuTPrCcQ3Zo\nonJl4JXJZbquS8kEFk8jLsx15bywPjrLURiDx/TiuPZUIul0WqAX0kk57oGBAdTX18sYec36+nq5\nX4vFIodBVVWVGAdcT1ZavNs876Wo+DLninNrtGbV16W8SCMOfa/fa9pOrXtN0xCLxURn0INiLI+e\nmfos8Wcq9FKKdLdx7HZvxjkxxh7ux5spddg9MdCNOlmlJoknMfFrNWGFn1O59SwzwKQlPlSBQABv\nvvmmKBk+sMaoPJUKlTeDaXwA2YmG1gSVLdkVDPpybIQU6C7yM1RMKysrwvlXDzLjQ6RpO2WYSfPS\nNE1wSV3XJVhIz4KwjjET1rh59hrCUa1kwmyapslcA5BWeZxbTdMkkYWYPl1vlsGw2+04cuQITCYT\nfD6f0C9JTwWAYDAocQDOAy1hAGJlsxQBoRVCO6wkSW+OhwSww8phwI7lGlhrhwc4vb62tjb09/fD\nbDYL31rNgKytrZXcDiaHcU7UeeL80JhQ6LF7uo7qeqrGiGoxc96A0pauUXmpe+9ulnCpMRi/k8+y\nukbq96mGYykFz8+UUual7uFuh5L6/O4WLC91WKqHmvHzDzpHwAFS9BR18AyAqYEPulWsIU8hfGOc\nUFIfDx06hI6ODongM8CSz+/0h+T3MOCmBtc4no2NDQmCkn9NmIbcedWKp4IlLEFFznoshA/o2hOL\n5T2oxbSAnUAmv4PMDOKIHBuzddfW1rC6uioWoCp323wPU1QogRYXFVVtba14GlTuVM6ER6i4GQxV\nO2Qx0M1GFtlsVsoyAwXmDOeW1jSvz/gIM4fJhCoFZWmaJl4SDwIG41mCgeNR4SCWZqC173Q64fV6\nBdoh64uxCEINuq7L/ZNjr9a353ryMDcGhB8FfGOEM3ZT7A+itI3KdTdvs1QMQoWt+FyrkDCtd/7P\nzxktd6PXUcoDMVrmpQ4Fjpt/q7K3HrUcOEWvusFqRT41GEKrmMqdCpWvGUzj6a4+AGoyBEv7qtxX\nPmQMzlFh05Kj5UclTRiHXGdeV7XwgZ12eUz11/WdzjvkWRNPV60hdbOpYnxf9SyoLLe2tkSxGefS\naN3spWLg93IuOV+0ktnpCYAEyaioOXYqfM6nrutigfPB5eHLvcIgLg9iYyOXxsZGiRGQesvmL2Sy\nqPuH1+LnaWE7HA5UVFRIRUIG31XFTa/A5XJJYJgHD69F4fqp68aDjnNhsVgQj8fFI+VBwflVjYO9\nEKNiV2Eb9bC529/vZq3udjCUOkAovK661/gZVfGSjKEqeKOSN16r1HgexJreS3nsLPq74WKEKWj9\nMiuSClFVmnwIuemYOEMlrjIuCHkAO5tVDb7RGqOFqW5OWnS6rkvDD2CHFWEUJoFQ0WiaJoE/UuTU\nqD7vzfiw0NJUrXNadOxWX19fL20NVUvvbht2NzzyYYi6thy3epATLlEPdLIrCJ/Q6udBzL+lV0Al\n7nK5JGmMQTmr1So8aaCwn+hJcI3Z94AHJlsVqvfANWMgl+9XV1fD6XRifX1dDnjux1QqBYvFIg0o\nSAE0Wn5cC3WPEuojA4kxA64Vg9Rq8b1HyaU3Knv1mnvtTfAapa5DA49KnYeAusfVeVQNn0cppfD6\nB/3bx0rRZ7PZleXl5TSAlf0ey32ICwd0nFevXlV/fL/j7Hw4o3mvqNYWDyBVYfOhU2vhqNYhK25S\nqakMJ5X5wro3DGaysJXKPedhwZo4ZLmodE+WsiDVUs1JsFqtknRF44CHDlsWMvuWiVrMxiScp8IB\nagkDtYy1pmlSdoGWqOqhcg5p0avzvNdixKlLQRyl5H4U227WvvH3fK16MEYYR/UySn2nemDt9pn7\nkVJY/n58Ryk5EIpe13W3pmlX9V3KqB4kKY/zwcX4UKoKn3EXWuuExRhYp0VP5U8rV63tQ3YTS0Iw\nU1rXdWnWwjrz/F4qco5lY2ND8H4Aotg1TZN4CsfOFHom6tC6ZrCVkBA7XpEpRWiIDB61bIGm7VTd\n5Pu0/DlewlUAhEOvJscZLdw9VvipXC43vpcXeEhyYA0zg+ypYXYgFH1Z/mUIFRHjIYy38H3We2eA\nuba2Vix4JikRMiEsRZyeSpQURBWuU9Pe2bqPrB/GXVjEjJazWheI1h0Poq2tLQnkqta7mkuRy+Wk\nVWVdXZ1YlsTj6S2QpcU5YS0lWuicB77He1hbW5NDSJ3PRxGE3Zbxg2JI3E0OksFzN9nrcZYVfVn2\nVFRFSauYcA2wk6UIQILb/L1a/oCKmi42KbDATiCaJQ1oxTM4ncsVGoXzvdraWuTzebHCa2tr4XQ6\npVqmyVRoqs6DiBRbKm6VbklLnaWwCbtomiYJdrS+OQ9kzajjV6Ec3itZRur7asMYZlBTVF6/Ot9l\nKctBUvTf2u8B3KeUx/k+xBgMphKlEjSW8FVr09TW1kqNHxXuYaCVFFoeDLR81baKJpMJ8XhcIBEq\nSypSNduZbBg2sa6urobdbhe6nlpGgx6A+tpkMqGxsVGyWHO5nBwcTKYi3VKlC9Pq532yHg8rpQI7\nWD69ARIV1NT8R2jVl+UxEdO9P/JoRC/UMT/wUh7ng8lumDFxZZVho9Jq1Ro+xkJyDHyqlFJN08SS\nJ73VYrEIvKI2lFETetRrEJpRYR5CSmrQk+Mgbq5+H63q3a6vjpNQEe+FBwGvrV5DTRAkBETWljqf\nd5vzhywHYn/dh5THCUA7CFzQsjyZommarsIQxJ2ZCMRmzA6HA06nU0o2MCFK/Z/N4dfX14XzzmQi\nKk5SHQnPMDtZ07Si+jY8XFQlSCWtJpGx+UxdXZ18D3+v1pAHdmiWPARY94bfw6xe1XrXdV3K3DJZ\njoW4WEqBgVj1f7PZLAW42ElM7YRF5hIphtsH2rXHAasuy97IvkM3mqa9DOCvAVQA+F+6rn9tH8fy\ntwD+FYCQruvHtt9zAPg+gC4UarT/rl7oowpN0/4cwB8CyAH4D7quv/WIxtkO4O8BNAPQAXxL1/W/\nPoBjXcnlculcLvc4sB7uJY8Le2M32TPabFkOvuyrRa9pWgWACQAvotBw+gqAz+i6fmefxnMWQArA\n3yuK/q8ArOq6/jVN074CwK7r+pc1TTsK4B9Q6K/aCuDnAPr0B2zI8T7H6QHg0XX9uqZp9QCuAfjX\nAP7tARzrY8F6uJc8KfdRln+Zst8Y/QcBTOm6PqPr+haAVwH89n4NRtf1XwJYNbz92wC+t/36eygo\nVL7/qq7rm7quzwKYQuF+HsU4A7quX99+nQQwCqDtII61LE+WaJr2sqZp45qmTW0bE/s9nr/VNC2k\nadqw8p5D07SfaZo2uf2/Xfndn2+PfVzTtJce4TjbNU17W9O0O5qmjWia9h8f5Vj3W9G3AVhUfl7a\nfu8gSbOu64Ht18sowCXAARm7pmldAE4BuIwDPtayPN6y7YH/TwCvADgK4DPb3uJ+yncBvGx47ysA\nzuu63gvg/PbP2B7r7wEY3P6bv9m+p0chWQB/quv6UQDPAPjC9ngeyVj3W9E/VqIXcK4DE73WNM0K\n4EcAvqTrekL93QEa6+PCeriXPCn38evIgfLAgbIXfr+y34reB6Bd+dm7/d5BkuA2Jk5sPLT9/r6O\nXdO0KhSU/P/Rdf3/HtSxHhSa568rT8p9/JryuHiGB9qz3Q8vfL8V/RUAvZqmdWuaVo2Cq/LGPo/J\nKG8A+P3t178P4P8p7/+epmk1mqZ1A+gF8KtHMSCtwAv8DoBRXdf/+0Eea1nKsp9ygDxbAPvnhe+r\notd1PQvgTwC8hYIr85qu6yP7NR5N0/4BwEUA/ZqmLWma9ocAvgbgRU3TJgG8sP0ztsf5GoA7AN4E\n8IVHwWLZlo8A+CyAj2uadnP73ycO0lgPWtDubvK4BPQOgDwOHjhwAD3b7bHsnxfOUqPlf+V/D+sf\nCjkR0wAOAagGcAvA0f0e113GexbAaQDDynt/BeAr26+/AuDr26+Pbt9PDYDu7fus2O97eETzVAlg\nZvu+ua6DB2BcXYa1+y+Gtfur7deDhrWbeVRrB0BDIfflfxjefyRj3W/opixPphy4oN3dRH9MAnr7\nLfoB88CBshd+v1IugVCWhy6apn0awMu6rn9+++fPAviQrut/sr8j2122A2T/qO8kysV0Xbdtv9YA\nRHVdt2ma9k0Al3Rd/9/bv/sOgJ/quv7D/Rl5Wcpybylb9GUpyz1EL1hDZYuoLI+tlBV9WfZCHpeg\n3d3kQAb0ylKW9yNlRV+WvZDHgTZ7LylTVcvyxMi+V68sy5Mnuq5nNU1j0K4CwN/ud9DubrId0PsY\nAJemaUsA/jMKQbHXtoN78wB+FygEyTRNY5Asi0cb0CtLWd6XlIOxZSlLWcryhEsZuilLWcpSlidc\nyoq+LGUpS1mecCkr+rKUpSxlecKlrOjLUpaylOUJl7KiL0tZylKWJ1zKir4sZSlLWZ5wKSv6spSl\nLGV5wqWs6MtSlrKU5QmX/w8xQ6c6CI0f0AAAAABJRU5ErkJggg==\n",
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ },
+ {
+ "data": {
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXoAAACBCAYAAADOgnH+AAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAADkVJREFUeJzt3V2oXNd5xvH/E/njok2xVSdClkWtFCUgl8ZtjVNoMG4T\nN3IoyIVi5Ivggot7YSct9CLyVaBgUN3PQJMLpRFRaGtHtA0RIYmxTUtu0kZ2cGNZrmJFdrCEYpHY\noW4vkh7l7cXskUZzPubMzN57feznB4fZs8+cc96919rvrLX2OrMUEZiZWb3eljoAMzPrlhO9mVnl\nnOjNzCrnRG9mVjknejOzyjnRm5lVzone5iJpr6RTkk5LOpA6HjObTZ5Hb5slaQvwHeAu4CxwHLgv\nIk4mDczMNuQWvc3jduB0RJyJiJ8ATwD7EsdkZjNclToAK8oO4LWJ52eB9633YknuLubjBxHxjtRB\nWBpO9NYqSQ8CD6aOw1b5XuoALB0nepvHOWDnxPObmn2XRMQh4BC4RW+WC4/R2zyOA7sl7ZJ0DbAf\nOJY4JmuBZ1PVrbNE74pTn4hYAR4GngReAo5GxItpo7JlNbOpPgXcDewB7pO0J21U1qZOhm4mKs6l\naXiSjnkaXvki4ivAV1LHYa26NJsKQNJ4NpWv10p0NUbvimNWjk3Nppq60f5rPcRlmxARmvWaroZu\n1qo4Ozr6W2bWg4g4FBG3RcRtqWOx+SSbdePWQbY833p4Zs6msrJ11aLf1DQ8tw6y5PnWw+PZVJXr\nKtG74pgVwrOp6tfJ0E1ErEgaV5wtwGFXHLN8eTZV3Tobo3fFMTPLg/8z1sysck70ZmaVc6I3M6uc\nE72ZWeX8McVmVo1llkaVZn6SQLGc6M2saG2tez35e2pL+k70ZqxOFmtd6Jt5jfWjreQ+z+8vubyd\n6G0QpltrsxJFRFxxYa/1+rUSf82twlx0neQ3+rullqkTvVVtMwl6np+d5/Xj56Umh9ykSvDTMZRY\nnk70tiZJrwJvAReBlYi4TdJW4AvAzcCrwL0R8WaqGGfJITFAuckhF7mU41iJQ3ieXmkb+c2IuHXi\nE0YPAM9ExG7gmeZ5diIiy+SQW0y5K+WcjePMOVYn+g1MF14JBdqxfcCRZvsIcE/CWFYpoWxSxyjp\nVUkvSHpe0rPNvq2SnpL0cvN4fbIAC5e6fNfjRD9hMpGvleA3eu2srwIF8LSk55pFYgC2RcT5Zvv7\nwLY0oa1W2jlOHG/WPbWCr5lLcjsGj9E3+pquVcJ4XuP9EXFO0juBpyT91+Q3IyIkrTppUyuH9SKn\nC2oeGY3d7wPubLaPAP8GfDxFIKWWZe4G26JP1eIupaUfEeeaxwvAFxkt+P66pO0AzeOFNX7OK4fl\nbeGemqQHJT07HvJpPbCMr4dF5XKdDzLR53Dix3KpCJMk/Yykt4+3gd8GTjBaJez+5mX3A19KE+FI\njuduXgmO4f0RcStwN/CQpDum4glGbwardPkmXno5zpK6rnroJhOZDe1sA77YxHIV8I8R8TVJx4Gj\nkh5gtLbsvSmCqzEp9FX+kz01SVf01CLi/Ho9NWtHquG6QSX6EhJEDuO2EXEGeO8a+38IfKD/iK6I\nIeWf71yX5d/0zt4WEW9N9NT+lMs9tYMk6KnVXqY5GEyiL6kyZda6z0ZJZbiMDpN9dj21oZTppBSN\nuUEk+lIrUw6te6tHzj0169Ygb8aWJPVNnFwM7RwM7XiHpu/yrT7R13LB1HIcZjbS5zVddaKvLTnW\ndjyb5eOuU+3Htxl9nYNqE32tlajW4zIbqj6u6SoTfe3JcEjj9kM5zvXUevy1Hleuqkv0Q6pAQzpW\nq4fr7Wpdn5PqEr3VwwlhxOehfl1Po64q0fuCMLMSuUW/SUNN8kMarzezxVST6IfOyd7M1uNEb1ny\nG9eVfD5sGU70FXEysBL485v6V0Wid4K7zOfC1rFlvQXAJT0i6bSkU5I+1EcwTvb9Kj7RO7GtVvo5\nKT3+rix5XrazxgLgkvYA+4FbgL3ApyVtWTLUTXGyvyz59EpJhyVdkHRiYt/WHFoHTgid2J26XK0T\n1zFa+Jvm8Z5mex/wRET8OCJeAU4zWnXKKrKZFv3nGL3TTzpAZq0Du9ISb4JvuVyrdNU6C4DvAF6b\neN3ZZt8qbS8O7obaZcnn0UfE14E3pnbvw62DWv2weXS5VmqjBcBn/Fxri4M7yfdr0TH6bbm1Dqw1\n/9c8LlSulq2VZuFvphYAPwfsnHjdTc2+zjjJ92/pm7GpWgeuLN1atFzbeAP3Tbq1LXlefsRo4W+4\ncgHwY8B+SddK2gXsBr65zB+y/Cy6ZuzrkrZHxPnUrQNb34Jrzl4Ni7f6IuIQcKj5HX43zsd54K7p\nBcAj4kVJR4GTwArwUERcTBemdWHRFv0xErYO3JrfvAXO1c83j2711eViRHwgInZHxAcj4tJ9t4h4\nNCJ+MSLeExFfTRmkdWNmi17S48CdwA2SzgKfAA4CR906qNLPSXoZl6tZb7oerlQOreN5u/g5xFyS\nOSvRc23Mqmj+7kIF5fJd3xIJobVybeJYuJBcvmtbtGwjYuYPFv+fsWZWFt9sXy35f8aambXNyb5f\nTvRmZgn18abnRG9mSbhV3985cKKvXIkXU4kx98HnpS59lqcTvZlZz/p+0y4y0btlszk+T2YGhSZ6\nq5vnWa+txvMyxMZIimMuNtEPsYLMw+fHSuG62r1iEz24gphZWVLlrKITPYxO3PhriCaPf+jnwjZ0\no6Rzkp5vvj48/kYOy0TWXmdTX5eLfkyxJVD7xWCd++uI+IvJHVPLRN4IPC3p3Sk+tE5Sdfchcrlm\ni2/Rj9VWQSalbg30bUjHOo+OzktWy0S67LtRTaIv2XrDL0NL8Na5j0r6tqTDkq5v9mW3TGQN9T63\nY3CiTyi3ymBVuwC8C7iV0WpTfznvL/A6z7Plek070S+ojZZ3zcNNy8rxYkmphfOxEhEXI+KnwGe4\nPDwz1zKRy6zzPK9ck+a0EnrfTvQtqiTZ/3KuMzNsKVdPbP8ucKLZzn6ZyNySaIlDq9XMuunzjv1G\nhVvBzIHXI+LWyR2pZmZUcC5b0VIyuUnSC0AArwJ/CGUtEzk+DynrRCmJfVo1iT4nOVTIll2amQG8\nImk8M+MbacNa3jJlVVg5v7LekEtEPAo82nM8C0vVACg1yUNlQzd9FMQ8f6Okrt2Ed5YwM6MNk2Uz\nbzkt87O2vK6HTmqb/VZVoi9RhhXoBTKamdHlhbzo31rmZ+eVYf3ITlvJuJakvpbqhm667NYtWgHW\n6uLnXJki4qeSPgN8udk118wM4BCApFYKos0ynXXelymXtodycq4jufI5W5tb9D0qrMWQ1cyMNs5Z\nX+e9pFhtGKpr0UM3rfoBXXh7JH2bDGdmLFOufZdfSbFa/ZTDjIG2uvhraeP4BnbhPdfWP8R0Va7L\nzJDpU2Zxtlau0O01a/OJiJmVpsoWvdVto2QYEdm8Ma8VR07x2XBUP0a/7Ji4L8qy5F5eucdndRpM\ni37eGRG+IM2sFoNJ9GNO4GY2NNUP3ZiZDZ0TvZlZ5Zzozcwql8sY/Q+A/20ec3cDdcf5C20HYmZp\nZZHoI+Idkp7ta+WaZThOMwD+BziVOohNcMOMTBK9mRXnVAkNiVIaPF3H6TF6M7PK5ZToD6UOYJMc\np5kVJZtE33yOefYcpxlQTkPCcZLJp1danfwJh1lp9dMrrSzJb8ZK2gt8EtgC/F1EHEwYy2Hgd4AL\nEfFLzb6twBeAmxl9Rvu9EfFm871HgAeAi8DHIuLJnuLcCXwe2AYEcCgiPplhrCVNm52llNkb6/G0\n2QFL2qKXtAX4DnAXowWnjwP3RcTJRPHcwWja2OcnEv1jwBsRcVDSAeD6iPi4pD3A48DtwI3A08C7\n+1iQQ9J2YHtEfEvS24HngHuA388w1iJmPcxSy3HYMKUeo78dOB0RZyLiJ8ATwL5UwUTE14E3pnbv\nA44020cYJdTx/ici4scR8QpwmtHx9BHn+Yj4VrP9FvASsCPHWK0ukvZKOiXpdNOYSB3PYUkXJJ2Y\n2LdV0lOSXm4er5/43iNN7KckfajHOHdK+ldJJyW9KOmP+ow1daLfAbw28fxssy8n2yLifLP9fUbD\nJZBJ7JJuBn4F+A8yj9XK1vTAPwXcDewB7mt6iyl9Dtg7te8A8ExE7AaeaZ7TxLofuKX5mU83x9SH\nFeBPImIP8OvAQ008vcSaOtEXJUbjXNncYJT0s8A/A38cEf89+b2MYi1l1sMstRzHMrLqgYN74ZuV\nOtGfA3ZOPL+p2ZeT15sx8fHY+IVmf9LYJV3NKMn/Q0T8S66x1jLNs5bjWFIpPcOse7YpeuGpE/1x\nYLekXZKuYdRVOZY4pmnHgPub7fuBL03s3y/pWkm7gN3AN/sISKPVUz4LvBQRf5VzrGYpZdSzBdL1\nwpMm+ohYAR4GnmTUlTkaES+mikfS48A3gPdIOivpAeAgcJekl4EPNs9p4jwKnAS+BjzUxyyWxm8A\nHwF+S9LzzdeHc4o1t5t2Gynlhl4GSuiBQ4Y92yaWdL3wiPCXv1r9YvQ/Ed8F3gVcA/wnsCd1XBvE\newfwq8CJiX2PAQea7QPAnzXbe5rjuRbY1RznltTH0NN5ugo40xz3uFxvySCum6fK7s+nyu6xZvuW\nqbI701fZAWL0vy9/M7W/l1hTD91YnbK7abeRKOSGXmqRWQ8c3AvfLH8EgrVO0u8BeyPiD5rnHwHe\nFxEPp41sfc0Nsi/H5X+U+1FEXNdsC3gzIq6T9LfAv0fE3zff+yzw1Yj4pzSRm83mFr3ZDDFqDblF\nZMVyorculHLTbiNZ3tAzW4QTvXWhhGmzs3iqqlUj+adXWn0iYkXS+KbdFuBw6pt2G2lu6N0J3CDp\nLPAJRjfFjjY3974H3Aujm2SSxjfJVuj3hp7ZQnwz1sysch66MTOrnBO9mVnlnOjNzCrnRG9mVjkn\nejOzyjnRm5lVzonezKxyTvRmZpX7f16WjLRJZhe+AAAAAElFTkSuQmCC\n",
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ }
+ ],
+ "source": [
+ "import nilearn.datasets as nidata\n",
+ "import nibabel as nib\n",
+ "icbm = nidata.fetch_icbm152_2009()\n",
+ "#print(icbm.keys())\n",
+ "t1 = nib.load(icbm['t1']).get_data()\n",
+ "mask = nib.load(icbm['mask']).get_data()\n",
+ "print('Image Sizes: ' , t1.shape, ',' , mask.shape)\n",
+ "plt.figure(1)\n",
+ "plt.subplot(131)\n",
+ "plt.imshow(t1[120,:,:].T[::-1], cmap='gray')\n",
+ "plt.subplot(132)\n",
+ "plt.imshow(t1[:,120,:].T[::-1],cmap='gray')\n",
+ "plt.subplot(133)\n",
+ "plt.imshow(t1[:,:,100],cmap='gray')\n",
+ "plt.show()\n",
+ "\n",
+ "plt.figure(2)\n",
+ "plt.subplot(131)\n",
+ "plt.imshow(mask[120,:,:].T[::-1], cmap='gray')\n",
+ "plt.subplot(132)\n",
+ "plt.imshow(mask[:,120,:].T[::-1],cmap='gray')\n",
+ "plt.subplot(133)\n",
+ "plt.imshow(mask[:,:,100],cmap='gray')\n",
+ "plt.show()\n"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "# 2. Creating a Pytorch-compatible Dataset\n",
+ "\n",
+ "Now that we have our transforms, we will create a Dataset for them!\n",
+ "\n",
+ "There are three main datasets in *torchsample* : \n",
+ "- `torchsample.TensorDataset`\n",
+ "- `torchsample.FolderDataset`\n",
+ "- `torchsample.FileDataset`\n",
+ "\n",
+ "The first two are extensions of the *pytorch* equivalent classes: \n",
+ "- `torch.utils.data.TensorDataset`\n",
+ "- `torch.utils.data.FolderDataset`\n",
+ "\n",
+ "The last one (`torchsample.FileDataset`) is unique to *torchsample*, and allows you to read data from a CSV file containing a list of arbitrary filepaths to data.\n",
+ "\n",
+ "You should feel free to use the official classes instead if you don't need the extra functionality, but there is really no difference between them internally. Also, you may find that you actually need the *torchsample* versions of these classes to do many of the transforms presented below.\n",
+ "\n",
+ "The extra functionality in the *torchsample* versions includes the following:\n",
+ "- support for target transforms\n",
+ "- support for co-transforms (same transform applied to both input and target)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "# 1. Creating Transforms\n",
+ "\n",
+ "Now that we have our tutorial datasets stored in files or in memory, we will create our transforms! Transforms generally are classes and have the following structure:\n",
+ "\n",
+ "```python\n",
+ "class MyTransform(object):\n",
+ "\n",
+ " def __init__(self, some_args):\n",
+ " self.some_args = some_args\n",
+ " \n",
+ " def __call__(self, x):\n",
+ " x_transform = do_something(x, self.some_args)\n",
+ " return x_transform\n",
+ "```\n",
+ "\n",
+ "So you see any arguments for the transform should be passed into the initializer, then the transform should implement the `__call__` function. You simply instantiate the transform class then use the transform exactly as you would use a function, with your array to be transformed as the function argument. Here's some pseudo-code for how to use a transform:\n",
+ "\n",
+ "```python\n",
+ "tform = MyTransform(some_args=some_value)\n",
+ "x_transformed = tform(x)\n",
+ "```\n",
+ "\n",
+ "It's also important to note that **TRANSFORMS ACT ON INDIVIDUAL SAMPLES - NOT BATCHES**. Therefore, if you have a dataset of size (10000, 1, 32, 32) then your transform's `__call__` function should assume it only receives individual samples of size (1, 32, 32). There **will be no sample dimension**.\n",
+ "\n"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## 1a. Creating Transforms for MNIST\n",
+ "\n",
+ "Here, I will create some transforms for MNIST. I will use the following transforms, available in the *torchsample* package:\n",
+ "\n",
+ "- AddChannel\n",
+ "- RangeNormalize\n",
+ "- RandomCrop\n",
+ "- RandomRotate\n"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "collapsed": true
+ },
+ "source": [
+ "If you remember, the MNIST data was of size (60000, 28, 28). We will need to add a channel dimension, so we will use the `AddChannel` transform to add a channel to the first dimension. "
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 6,
+ "metadata": {
+ "collapsed": false
+ },
+ "outputs": [],
+ "source": [
+ "from torchsample.transforms import AddChannel\n",
+ "# add channel to 0th dim - remember the transform will only get individual samples\n",
+ "add_channel = AddChannel(axis=0)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "Because our MNIST is already in-memory, we can actually test the transform on one of the images. Note, however, that we couldn't do this if we were loading data from file.\n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 7,
+ "metadata": {
+ "collapsed": false
+ },
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Before Tform: torch.Size([28, 28])\n",
+ "After Tform: torch.Size([1, 28, 28])\n"
+ ]
+ }
+ ],
+ "source": [
+ "print('Before Tform: ' , x_train_mnist[0].size())\n",
+ "x_with_channel = add_channel(x_train_mnist[0])\n",
+ "print('After Tform: ' , x_with_channel.size())"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "Now, it would be kind of wasteful to have to add a channel every time we draw a sample. In reality, we would just do this transform once on the entire dataset since it's already in memory:\n",
+ "\n",
+ "```python\n",
+ "x_train_mnist = AddChannel(axis=0)(x_train_mnist)\n",
+ "```"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "Next, we know that the MNIST data is valued between 0 and 255, so we will use the `RangeNormalize` transform to normalize the data between 0 and 1. We will pass in the min and max value of the normalized range, along with the values for `fixed_min` and `fixed_max` since we already know that value so the transform doesnt have to calculate the min and max each sample."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 8,
+ "metadata": {
+ "collapsed": true
+ },
+ "outputs": [],
+ "source": [
+ "from torchsample.transforms import RangeNormalize\n",
+ "norm_01 = RangeNormalize(0, 1)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "Again, we can test this:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 9,
+ "metadata": {
+ "collapsed": false
+ },
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Before Tform: 0.0 - 255.0\n",
+ "After Tform: 0.0 - 1.0\n"
+ ]
+ }
+ ],
+ "source": [
+ "print('Before Tform: ' , x_train_mnist[0].min(), ' - ', x_train_mnist[0].max())\n",
+ "x_norm = norm_01(x_train_mnist[0])\n",
+ "print('After Tform: ' , x_norm.min(), ' - ', x_norm.max())"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "Next, we will add a transform to randomly crop the MNIST image. Suppose our network takes in images of size (1, 20, 20), then we will randomly crop our (1, 28, 28) images to this size:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 10,
+ "metadata": {
+ "collapsed": true
+ },
+ "outputs": [],
+ "source": [
+ "from torchsample.transforms import RandomCrop\n",
+ "# note that we DONT add the channel dim to transform - the same crop will be applied to each channel\n",
+ "rand_crop = RandomCrop((20,20))"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 11,
+ "metadata": {
+ "collapsed": false
+ },
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Before TFORM: torch.Size([1, 28, 28])\n",
+ "After TFORM: torch.Size([1, 20, 20])\n"
+ ]
+ },
+ {
+ "data": {
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAAAQgAAAD8CAYAAACLgjpEAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAETBJREFUeJzt3X2wVPV9x/HPR0AZFatExSeMmICGWL01DD7GwZgYYJwQ\nW6vQplp1BrXimIkZxyYddSZpJ5PUJo34MKZBcSbxoU1Q0hAtUqfqBFGk+AyKSCJXhCoK4hNy+faP\ne3Cul/1xf+zZvWfv+n7NMLt7znfP+Z25ycdzdn97vo4IAUAtu1Q9AACti4AAkERAAEgiIAAkERAA\nkggIAEkEBIAkAgJAEgEBIGlw1QOoZVfvFkO1R7WDGLZ7dumhh63Lqhtq8riZQvmzgp97bf+sOm+t\ndzQ7Nmhz/li9ZScG8e77WWXv6x1tjg/cV11LBsRQ7aHjfFqlY+gad2x27Y2zr8+qGzOk4tBrcx9G\nV3btuB9ellU3ZFNzfoowrHNLdu1ur7+XXRuLn8mqWxQLsupK/SfN9kTby22vsH1VjfW2/dNi/VO2\n8/9fB6BydQeE7UGSbpA0SdJYSdNsj+1VNknS6OLfdEk31bs/AP2vzBnEeEkrImJlRGyWdKekKb1q\npki6Pbo9Kmlv2weW2CeAflQmIA6W9EqP16uLZTtbA6BFtcyHlLanq/syREOV/w0CgOYpcwbRKWlk\nj9eHFMt2tkaSFBG3RMS4iBg3RLuVGBaARikTEI9LGm17lO1dJU2VNLdXzVxJ5xbfZhwvaUNErCmx\nTwD9qO5LjIjYYnuGpPslDZI0KyKetX1xsf5mSfMkTZa0QtK7ks4vP2QA/aXUZxARMU/dIdBz2c09\nnoekS8vsA0B1WuZDylaz2/JXs2uXvH9IVt2YIW/WO5yWd8Wa/DlwL23aL7t29mfuya7dsDV/JuUB\n//r77NqqVXlbaX4cACCJgACQREAASCIgACQREACSCAgASQQEgCQCAkASAQEgiYAAkOTun0u0lr08\nPKq+ae3OWH/+CVl1b0/alL3NXZ4all373N/dmF27M77/+pHZtb8/ZUR2bddbG7Jr44RjsmtXXZ7/\nv+VRU5/Krm1Hi2KBNsb6Pu9qzRkEgCQCAkASAQEgiYAAkERAAEgiIAAklemsNdL2g7afs/2s7ctr\n1EywvcH20uLf1eWGC6A/lbnl3BZJV0TEEtvDJD1he35EPNer7uGIOKPEfgBUpO4ziIhYExFLiudv\nS3pedM0C2kpDPoOwfZikP5O0qMbqE4vO3r+z/flG7A9A/yh9V2vbe0r6laRvRsTGXquXSDo0IjbZ\nnizpHnV3+q61nQHbem/4rQuz6vb7zaeyt9n1xvrs2jFHnZtd+8Ipt2fX/uqWL2XX7v9Wc+4S7YVP\nZteOyvszYCeUOoOwPUTd4fCLiPh17/URsTEiNhXP50kaYnvfWtui9R7Qesp8i2FJP5f0fET8S6Lm\ngKJOtscX+3uj3n0C6F9lLjFOkvQ3kp62vbRY9h1Jh0ofddg6S9IltrdIek/S1GjFn48CqKlMb85H\nJO3w56IRMVPSzHr3AaBazKQEkERAAEgiIAAkERAAkggIAEkEBICk0lOtka/r9ebMEftwY3NmnnZ8\n4+ns2ldvHJS/4a1ddYwGVeAMAkASAQEgiYAAkERAAEgiIAAkERAAkggIAEkEBIAkAgJAEjMp28Dn\nrnwhu/b8o7+YXXvroQ9n1574lxdn1w6769HsWlSLMwgASWXvar3K9tNFW73FNdbb9k9tryh6Yxxb\nZn8A+lcjLjFOjYjXE+smqbsPxmhJx0m6qXgEMAA0+xJjiqTbo9ujkva2fWCT9wmgQcoGREh6wPYT\nRWes3g6W9EqP16tF/05gwCh7iXFyRHTa3l/SfNvLIuKhejY0kFvvAe2q1BlERHQWj+skzZE0vldJ\np6SRPV4fUiyrtS1a7wEtpkzrvT1sD9v2XNLpkp7pVTZX0rnFtxnHS9oQEWvqHi2AflXmEmOEpDlF\n683Bkn4ZEffZvlj6qPXePEmTJa2Q9K6k88sNF0B/KtN6b6WkY2osv7nH85B0ab37AFAtplq3ga63\nNmTXvnbR57Jr//ibTdm1V//jrdm13z7nrOzarf/7J9m1I7+/MLtW9JDOwlRrAEkEBIAkAgJAEgEB\nIImAAJBEQABIIiAAJBEQAJIICABJBASAJEcLTjndy8PjOJ9W9TA+8dZfcEJ27d3X/Ci7dtSQPesZ\nTp/G3H5Jdu3oW/J+VLxl5ao6R9PaFsUCbYz17quOMwgASQQEgCQCAkASAQEgiYAAkERAAEgiIAAk\nlbmr9RFFT85t/zba/mavmgm2N/Soubr8kAH0lzI3rV0uqUOSbA9Sd7+LOTVKH46IM+rdD4DqNOoS\n4zRJL0XEHxq0PQAtoCFTrW3PkrQkImb2Wj5B0q/V3ZOzU9K3I+LZxDZ6tt77wsmeXHpc6D9xUkd2\n7fAf/jG79s5R/13PcPr02QfzWrSMuTb/juFdL66sdzj9rt+mWtveVdLXJP17jdVLJB0aEUdLul7S\nPant0HoPaD2NuMSYpO6zh7W9V0TExojYVDyfJ2mI7X0bsE8A/aARATFN0h21Vtg+wEVvPtvji/29\n0YB9AugHpTprFU17vyLpoh7LevbmPEvSJba3SHpP0tRoxd+XA6ipVEBExDuSPtVrWc/enDMlzez9\nPgADAzMpASQREACSCAgASQQEgCQCAkASd7VGvxs0Yv/s2s6pn82uXXJl/hdmg5z338a/evnU7G2+\ncdKb2bVV467WAEojIAAkERAAkggIAEkEBIAkAgJAEgEBIImAAJBEQABIIiAAJDHVGm1jzurHsmt3\n32XXrLp3t27O3uZXL7ssf/9zFmXXNgNTrQGU1mdA2J5le53tZ3osG257vu0Xi8d9Eu+daHu57RW2\nr2rkwAE0X84ZxG2SJvZadpWkBRExWtKC4vXHFO34blD3bfHHSppme2yp0QLoV30GREQ8JGl9r8VT\nJM0uns+W9PUabx0vaUVErIyIzZLuLN4HYICo9zOIERGxpnj+mqQRNWoOlvRKj9eri2UABojSH1IW\nfS5KfxVie7rtxbYXf6gPym4OQAPUGxBrbR8oScXjuho1nZJG9nh9SLGsJnpzAq2n3oCYK+m84vl5\nku6tUfO4pNG2RxUNfqcW7wMwQOR8zXmHpIWSjrC92vaFkn4g6Su2X5T05eK1bB9ke54kRcQWSTMk\n3S/peUl3R8SzzTkMAM3QZ+u9iJiWWLXdVMeIeFXS5B6v50maV/foAFSqVG9OYJutJ3dk1750dv5n\nTB0dK7Nrc6dP74zr3/x8/v7vXdzw/VeNqdYAkggIAEkEBIAkAgJAEgEBIImAAJBEQABIIiAAJBEQ\nAJIICABJTLX+hPG4o7JrX7x8SHbtrSfdml17ytDs0qb5ID7Mqlu4/vD8jW5dW+doWhdnEACSCAgA\nSQQEgCQCAkASAQEgiYAAkFRv670f2V5m+ynbc2zvnXjvKttP215qu/1utwO0uXpb782XdFREHC3p\nBUl/v4P3nxoRHRExrr4hAqhKXa33IuK/irtWS9Kj6u55AaDNNOIziAsk/S6xLiQ9YPsJ29MbsC8A\n/ajUVGvb35W0RdIvEiUnR0Sn7f0lzbe9rDgjqbWt6ZKmS9JQ7V5mWG1h8KhPZ9euuOCg7Np/Oif1\np9reX+y5Mbu2FXxn7dHZtQ/85KSsun1mL6x3OG2h7jMI238r6QxJf13059xORHQWj+skzVF3x++a\naL0HtJ66AsL2RElXSvpaRLybqNnD9rBtzyWdLumZWrUAWlO9rfdmShqm7suGpbZvLmo/ar0naYSk\nR2w/KekxSb+NiPuachQAmqLe1ns/T9R+1HovIlZKOqbU6ABUipmUAJIICABJBASAJAICQBIBASCJ\ngACQxF2tG2DwYYdm1W34woHZ2/zG9/4zu/bivTuza1vBFWuOza596IbjsmuH3/ZYdu0+Wz/ZU6hz\ncQYBIImAAJBEQABIIiAAJBEQAJIICABJBASAJAICQBIBASDpEzWTcvCBB2TXbrg1/8a5M0Y9mFU3\nddib2dtsBTM682cxLroxf3bkvv+Rf+fB4W8z47FKnEEASKq39d61tjuL+1EutT058d6JtpfbXmH7\nqkYOHEDz1dt6T5J+XLTU64iIeb1X2h4k6QZJkySNlTTN9tgygwXQv+pqvZdpvKQVEbEyIjZLulPS\nlDq2A6AiZT6DuKzo7j3L9j411h8s6ZUer1cXywAMEPUGxE2SDpfUIWmNpOvKDsT2dNuLbS/+UB+U\n3RyABqgrICJibUR0RcRWST9T7ZZ6nZJG9nh9SLEstU1a7wEtpt7Wez1vjXSmarfUe1zSaNujbO8q\naaqkufXsD0A1+pwoVbTemyBpX9urJV0jaYLtDkkhaZWki4ragyT9W0RMjogttmdIul/SIEmzIuLZ\nphwFgKZoWuu94vU8Sdt9BQpgYGjJqdax1+7afOK4rNqub72Rvd1/+Ez+jWBP3/3D7Nqqret6J7v2\n+Lnfyq498rvLsmuHv5U/JXprdiWqxlRrAEkEBIAkAgJAEgEBIImAAJBEQABIIiAAJBEQAJIICABJ\nBASApJacar15b2nVmXnZ9fKfzmnyaPp2w1sj+y6S9M//Myl7m+5ydu2R33s5u3b02kXZtV3ZlWhX\nnEEASCIgACQREACSCAgASQQEgCQCAkBSzj0pZ0k6Q9K6iDiqWHaXpCOKkr0lvRURHTXeu0rS2+r+\nxmxLROTdJgpAS8iZB3GbpJmSbt+2ICLO2fbc9nWSNuzg/adGxOv1DhBAdXJuWvuQ7cNqrbNtSWdL\n+lJjhwWgFZT9DOKLktZGxIuJ9SHpAdtP2J5ecl8A+lnZqdbTJN2xg/UnR0Sn7f0lzbe9rGgGvJ0i\nQKZL0lDtrjEXP5Y1gK9evN1HHy1rjPKOaWcxJRrNUvcZhO3Bkv5c0l2pmojoLB7XSZqj2i36ttXS\neg9oMWUuMb4saVlErK610vYetodtey7pdNVu0QegRfUZEEXrvYWSjrC92vaFxaqp6nV5Yfsg29s6\naY2Q9IjtJyU9Jum3EXFf44YOoNkcEVWPYTt7eXgc59OqHgbQthbFAm2M9X3eU4CZlACSCAgASQQE\ngCQCAkASAQEgiYAAkERAAEgiIAAkERAAkggIAEkEBIAkAgJAEgEBIImAAJBEQABIIiAAJBEQAJJa\n8o5Stv9P0h96Ld5XUjs24GnX45La99ja4bg+HRH79VXUkgFRi+3F7di6r12PS2rfY2vX46qFSwwA\nSQQEgKSBFBC3VD2AJmnX45La99ja9bi2M2A+gwDQ/wbSGQSAftbyAWF7ou3ltlfYvqrq8TSS7VW2\nn7a91PbiqsdTL9uzbK+z/UyPZcNtz7f9YvG4T5VjrFfi2K613Vn83ZbanlzlGJuppQPC9iBJN0ia\nJGmspGm2x1Y7qoY7NSI6BvjXZrdJmthr2VWSFkTEaEkLitcD0W3a/tgk6cfF360jIubVWN8WWjog\n1N0NfEVErIyIzZLulDSl4jGhl4h4SNL6XounSJpdPJ8t6ev9OqgGSRzbJ0arB8TBkl7p8Xp1saxd\nhKQHbD9he3rVg2mwERGxpnj+mrqbObeTy2w/VVyCDMjLpxytHhDt7uSI6FD3JdSltk+pekDNEN1f\nlbXT12U3STpcUoekNZKuq3Y4zdPqAdEpaWSP14cUy9pCRHQWj+skzVH3JVW7WGv7QEkqHtdVPJ6G\niYi1EdEVEVsl/Uzt9Xf7mFYPiMcljbY9yvaukqZKmlvxmBrC9h62h217Lul0Sc/s+F0DylxJ5xXP\nz5N0b4VjaahtwVc4U+31d/uYwVUPYEciYovtGZLulzRI0qyIeLbiYTXKCElzbEvdf4dfRsR91Q6p\nPrbvkDRB0r62V0u6RtIPJN1t+0J1/zL37OpGWL/EsU2w3aHuy6ZVki6qbIBNxkxKAEmtfokBoEIE\nBIAkAgJAEgEBIImAAJBEQABIIiAAJBEQAJL+H9Qe2a69DNH4AAAAAElFTkSuQmCC\n",
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ }
+ ],
+ "source": [
+ "x_example = add_channel(x_train_mnist[0])\n",
+ "print('Before TFORM: ' , x_example.size())\n",
+ "x_crop = rand_crop(x_example)\n",
+ "print('After TFORM: ' , x_crop.size())\n",
+ "plt.imshow(x_crop[0].numpy())\n",
+ "plt.show()"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "Finally, we will add a `RandomRotate` transform from the *torchsample* package to randomly rotate the image some number of degrees:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 12,
+ "metadata": {
+ "collapsed": false
+ },
+ "outputs": [
+ {
+ "data": {
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAAAP8AAAD8CAYAAAC4nHJkAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAEBtJREFUeJzt3X2QVfV9x/HPd5ddFiFSibiuiA8oisoopFtMRlutRupD\nppA6Y7UdQ1oT0hnr6IydKWPbxBkzE9v6UMfpRLFSsTU+dMRoGyYJEqujkzEuhIAKESUorLgL4gMP\nCuzut3/sobPRPd+73qdzl9/7NcPsved7fnu/Xvlw7r2/e87P3F0A0tNUdAMAikH4gUQRfiBRhB9I\nFOEHEkX4gUQRfiBRhB9IFOEHEjWmng/WamO9TePr+ZBAUj7WHu33fTaSfSsKv5ldLOkuSc2S/s3d\nb432b9N4nW0XVvKQAAIv+soR71v2y34za5b0r5IukXS6pKvM7PRyfx+A+qrkPf8cSa+7+yZ33y/p\nEUnzqtMWgFqrJPxTJG0Zcn9rtu23mNlCM+sys64D2lfBwwGoppp/2u/ui9290907WzS21g8HYIQq\nCX+3pKlD7h+bbQMwClQS/pckTTezE82sVdKVkp6qTlsAaq3sqT537zOzv5b0Ew1O9S1x91eq1hmA\nmqpont/dl0taXqVeANQRX+8FEkX4gUQRfiBRhB9IFOEHEkX4gUQRfiBRhB9IFOEHEkX4gUQRfiBR\nhB9IFOEHElXXS3cD9dQ0c0ZubWBCazx27ethfWDv3rJ6aiQc+YFEEX4gUYQfSBThBxJF+IFEEX4g\nUYQfSBTz/Ag1HXZYvMO048LyBzN/J7c28dX3w7FvXTYprH/nLx4K6xeMeyG39ns/vT4ce9o/doR1\nvfZGXB8FOPIDiSL8QKIIP5Aowg8kivADiSL8QKIIP5Coiub5zWyzpF2S+iX1uXtnNZrCZxPNxdvY\nsfHg1pawvOXqk+Px58Rz9XM6Xs6tPfPaKeHYi2b8MqxfMO7tsP693t/PrR2+Nj6fXx/siuuHgGp8\nyecP3X1HFX4PgDriZT+QqErD75KeNrNVZrawGg0BqI9KX/af6+7dZnaUpBVmtsHdnxu6Q/aPwkJJ\nalOJ74kDqJuKjvzu3p397JX0hKQ5w+yz2N073b2zRSU+fAJQN2WH38zGm9nnDt6WNFdS/ke7ABpK\nJS/72yU9YWYHf88P3P3HVekKQM2VHX533yTprCr2kqwxx08N63vOODqsv31u/v/GGef8Jhz7l8c8\nH9bnj/9pWC/lgQ+Pyq397KPTwrH/+5NZYX1ud1w/ct1HubVjN20Kx/b19Ib1QwFTfUCiCD+QKMIP\nJIrwA4ki/ECiCD+QKC7dPUJN48fn1qwjfzpLkvaeemRYf21e/NgXnLk+rP/D5J/n1i4c1x//8hL6\nfSCsD8jD+i92TcutnXL/x+HYMT3x6cID29+N68Ey2n3hyDRw5AcSRfiBRBF+IFGEH0gU4QcSRfiB\nRBF+IFHM82ea2trC+u65M3Nrc/7+pXDst9t/ENYnNo0L65X4YCD/tFZJunvn7LC+YXd8OvHZE+NT\nht/fH/y3/WJdOJa5+NriyA8kivADiSL8QKIIP5Aowg8kivADiSL8QKKY5894Xzyr3LZjf27toomv\nhGNLzeOv3R+f135ma/wdhMgzH00O689e96Ww3vRsvEz28jPj8d1fnpRbm9K2Ohw78HH8vKAyHPmB\nRBF+IFGEH0gU4QcSRfiBRBF+IFGEH0iUucfXXTezJZK+IqnX3Wdm2yZJelTSCZI2S7rC3d8r9WCH\n2yQ/2y6ssOViROf7b7n+C+HY/bN3h/XWX04I65f/2bNh/YZJ+dcTOPuFvwrHTl8UXxu/b/NbYR2N\n5UVfqQ99p41k35Ec+R+QdPEnti2StNLdp0tamd0HMIqUDL+7Pydp5yc2z5O0NLu9VNL8KvcFoMbK\nfc/f7u7bstvvSGqvUj8A6qTiD/x88EOD3A8OzGyhmXWZWdcB7av04QBUSbnh7zGzDknKfvbm7eju\ni9290907WzS2zIcDUG3lhv8pSQuy2wskPVmddgDUS8nwm9nDkn4u6VQz22pm10i6VdJFZrZR0pez\n+wBGkZLn87v7VTml0TlhX6bo3PLj7v91ONbGxm93+ns3hPXH7bywftLXe3JrnVO3hGO3zD4lrB/G\nPP8hi2/4AYki/ECiCD+QKMIPJIrwA4ki/ECiuHR3FfTveLemv/+o1fmXDZek7866NLf2vS/8MBx7\n05VTw/qJm88I683bdoR1NTfnlvq6347HoqY48gOJIvxAogg/kCjCDySK8AOJIvxAogg/kKiSl+6u\nptF86e5CNeXPlUuSf3Fmbm3jN1rCsf9+3pKw/sP348uSr9h8aljf/8bhubUTn/woHDvm1TfDev97\nJa8Wn5xqX7obwCGI8AOJIvxAogg/kCjCDySK8AOJIvxAopjnPxQE3wNoPu3kcOj6ayeG9Z9ddkdY\nn9wcXxKiP38lN817Ne+q8IPeXXFMWD/ukfiy4n1btob1QxHz/ABKIvxAogg/kCjCDySK8AOJIvxA\nogg/kKiS8/xmtkTSVyT1uvvMbNvNkr4paXu2203uvrzUgzHP33jGHDslrO85K55rf/cbe8L6n560\nOre28IhV4dgVe48L67dtuCisdyzqz631b3g9HKs6fv+lmqo9z/+ApIuH2X6nu8/K/pQMPoDGUjL8\n7v6cpJ116AVAHVXynv86M1trZkvM7IiqdQSgLsoN//clTZM0S9I2Sbfn7WhmC82sy8y6DmhfmQ8H\noNrKCr+797h7v7sPSLpP0pxg38Xu3ununS0aW26fAKqsrPCbWceQu1+V9HJ12gFQLyWX6DazhyWd\nL+lIM9sq6TuSzjezWZJc0mZJ36phjwBqgPP5EbIx8fGhacL4sP7xnOm5tZNuWR+OvW/qC2H9rb7d\nYf2Cx/4mt3bqv2wJx/Zt7Q7rjYrz+QGURPiBRBF+IFGEH0gU4QcSRfiBRJWc50favK8vrA/siZfZ\nHtuTf8rvmu3x6cSaGpd3DcRLl4/Zk39sG61TedXEkR9IFOEHEkX4gUQRfiBRhB9IFOEHEkX4gUQx\nz49Q8+TJYb13frwEeNvlPbm1W07+UVk9HbRroDWsH5g4UNHvP9Rx5AcSRfiBRBF+IFGEH0gU4QcS\nRfiBRBF+IFHM8x/ibGy8StLA784I6+uvaQnrT154Z1g/s7Utt7bPD4RjF39wfFi/fdm8sD7j7k25\ntfgqBWngyA8kivADiSL8QKIIP5Aowg8kivADiSL8QKJKzvOb2VRJD0pql+SSFrv7XWY2SdKjkk6Q\ntFnSFe7+Xu1aRZ7mww/Pre05L57Hf/+aXWH9f2bdE9bPaB0X1rcFy2h/e9sfhWNffOyssH7ysvja\n+33v5F9LACM78vdJutHdT5f0RUnXmtnpkhZJWunu0yWtzO4DGCVKht/dt7n76uz2LknrJU2RNE/S\n0my3pZLm16pJANX3md7zm9kJkmZLelFSu7tvy0rvaPBtAYBRYsThN7MJkh6XdIO7fzi05u6uwc8D\nhhu30My6zKzrgPZV1CyA6hlR+M2sRYPBf8jdl2Wbe8ysI6t3SOodbqy7L3b3TnfvbFF8kgmA+ikZ\nfjMzSfdLWu/udwwpPSVpQXZ7gaQnq98egFoZySm950i6WtI6M1uTbbtJ0q2SHjOzayS9KemK2rSI\npsMOC+s7//j03Frr1fF013/N+M+wfkrL+LBeytc2XpVb23vPMeHYKct/Fdb79uQv/43SSobf3Z+X\nZDnlC6vbDoB64Rt+QKIIP5Aowg8kivADiSL8QKIIP5AoLt3dAJrbjwrr7849Kax/6YaXcmvfbX8+\nHDuhKZ7Hf3x3/unCkrRo2Z+H9en353/PYMIbXeHYgYH+sI7KcOQHEkX4gUQRfiBRhB9IFOEHEkX4\ngUQRfiBRzPNXQVNb/jLUkrT96tlh/YSvbQzr9xz/z2H9uDETcmsHPF5i++734mWw733wsrA+/d5X\nwnr/+x+EdRSHIz+QKMIPJIrwA4ki/ECiCD+QKMIPJIrwA4linj9jY+PVhHz2qbm131ySP88uSXMv\nyz/fXpJuOfq5sN5c4n/Tj/bmf8/g3u7zw7FbHp0W1o//77fCeh/z+KMWR34gUYQfSBThBxJF+IFE\nEX4gUYQfSBThBxJVcp7fzKZKelBSuySXtNjd7zKzmyV9U9L2bNeb3H15rRqtteaj42vnvzY///r2\nt1/+QDj2gnE7w/peHwjrCzf9SVjf+OP86/pP2hBf+759+aqw3rdvX1jH6DWSL/n0SbrR3Veb2eck\nrTKzFVntTne/rXbtAaiVkuF3922StmW3d5nZeklTat0YgNr6TO/5zewESbMlvZhtus7M1prZEjM7\nImfMQjPrMrOuA+IlJNAoRhx+M5sg6XFJN7j7h5K+L2mapFkafGVw+3Dj3H2xu3e6e2eL4u/PA6if\nEYXfzFo0GPyH3H2ZJLl7j7v3u/uApPskzaldmwCqrWT4zcwk3S9pvbvfMWR7x5Ddvirp5eq3B6BW\nRvJp/zmSrpa0zszWZNtuknSVmc3S4PTfZknfqkmHdeIfx59HtO2w3NqNyxaEY494NX7sz696L6wP\nvLwhrB+r/GWwS/GyR2K0G8mn/c9LGu5v/qid0wfAN/yAZBF+IFGEH0gU4QcSRfiBRBF+IFFcujvT\n39Mb1o+5La5XIj6hF6gNjvxAogg/kCjCDySK8AOJIvxAogg/kCjCDyTK3Ot3RreZbZf05pBNR0ra\nUbcGPptG7a1R+5LorVzV7O14d588kh3rGv5PPbhZl7t3FtZAoFF7a9S+JHorV1G98bIfSBThBxJV\ndPgXF/z4kUbtrVH7kuitXIX0Vuh7fgDFKfrID6AghYTfzC42s1+b2etmtqiIHvKY2WYzW2dma8ys\nq+BelphZr5m9PGTbJDNbYWYbs5/DLpNWUG83m1l39tytMbNLC+ptqpk9Y2avmtkrZnZ9tr3Q5y7o\nq5Dnre4v+82sWdJrki6StFXSS5KucvcSV7evDzPbLKnT3QufEzazP5C0W9KD7j4z2/ZPkna6+63Z\nP5xHuPvfNkhvN0vaXfTKzdmCMh1DV5aWNF/S11Xgcxf0dYUKeN6KOPLPkfS6u29y9/2SHpE0r4A+\nGp67Pydp5yc2z5O0NLu9VIN/eeoup7eG4O7b3H11dnuXpIMrSxf63AV9FaKI8E+RtGXI/a1qrCW/\nXdLTZrbKzBYW3cww2rNl0yXpHUntRTYzjJIrN9fTJ1aWbpjnrpwVr6uND/w+7Vx3nyXpEknXZi9v\nG5IPvmdrpOmaEa3cXC/DrCz9/4p87spd8braigh/t6SpQ+4fm21rCO7enf3slfSEGm/14Z6Di6Rm\nP2t3ccHPqJFWbh5uZWk1wHPXSCteFxH+lyRNN7MTzaxV0pWSniqgj08xs/HZBzEys/GS5qrxVh9+\nStLBlUEXSHqywF5+S6Os3Jy3srQKfu4absVrd6/7H0mXavAT/zck/V0RPeT0NU3Sr7I/rxTdm6SH\nNfgy8IAGPxu5RtLnJa2UtFHS05ImNVBv/yFpnaS1GgxaR0G9navBl/RrJa3J/lxa9HMX9FXI88Y3\n/IBE8YEfkCjCDySK8AOJIvxAogg/kCjCDySK8AOJIvxAov4PorD0WJXoKpMAAAAASUVORK5CYII=\n",
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ }
+ ],
+ "source": [
+ "from torchsample.transforms import RandomRotate\n",
+ "x_example = add_channel(x_train_mnist[0])\n",
+ "rotation = RandomRotate(30)\n",
+ "x_rotated = rotation(x_example)\n",
+ "plt.imshow(x_rotated[0].numpy())\n",
+ "plt.show()"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "Now, we will chain all of these above transforms into a single pipeline using the `Compose` class. This class is necessary for `Datasets` because they only take in a single transform. You can chain multiple `Compose` classes if you want."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 13,
+ "metadata": {
+ "collapsed": true
+ },
+ "outputs": [],
+ "source": [
+ "from torchsample.transforms import Compose\n",
+ "tform_chain = Compose([add_channel, norm_01, rand_crop, rotation])"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "Now let's test the entire pipeline:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 14,
+ "metadata": {
+ "collapsed": false
+ },
+ "outputs": [
+ {
+ "data": {
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAAAQgAAAD8CAYAAACLgjpEAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAEe5JREFUeJzt3X+QXWV9x/HPZzeJKyEaIMkCSZBgQ2L8wYI0IGINRTDJ\nIMHWSqJTUZwJCGF0RqcTW0U77bTOVNTaUBypKTCj4K8GU43QEDsCrQqBRn4mZI2h2SU/IJCE8DO7\n++0fe0KXzX2SJ/fc3Xv38n7NZPbecz73nOeyyZdz7j77fB0RAoBKWuo9AACNiwIBIIkCASCJAgEg\niQIBIIkCASCJAgEgiQIBIIkCASBpVL0HUMkYvy7aNLbewwCa1ot6Ti/HSz5UriELRJvG6gyfW+9h\nAE3rN7EmK1fqFsP2XNsbbHfaXlphv21/s9j/gO3TypwPwPCqukDYbpV0raR5kmZJWmR71qDYPEnT\niz+LJV1X7fkADL8yVxCzJXVGxKaIeFnSLZIWDMoskHRT9Pu1pPG2jytxTgDDqEyBmCxpy4DnXcW2\nw80AaFAN8yGl7cXqvw1Rm46o82gASOWuILolTR3wfEqx7XAzkqSI+HZEnB4Rp4/W60oMC0CtlCkQ\n90qabnua7TGSFkpaOSizUtLHip9mnClpd0RsLXFOAMOo6luMiOixvUTS7ZJaJS2PiIdtX17s/5ak\nVZLmS+qU9LykT5QfMoDh4kZck/INPjqYKAUMnd/EGu2Jpw85k5LfxQCQRIEAkESBAJBEgQCQRIEA\nkESBAJBEgQCQRIEAkESBAJBEgQCQRIEAkNQw60EAryk+5K9B/H+0tTX/uLnZQy9oLYkrCAAHQYEA\nkESBAJBEgQCQRIEAkESBAJBUprPWVNv/afsR2w/b/nSFzBzbu22vK/5cXW64AIZTmXkQPZI+GxH3\n2x4n6T7bqyPikUG5uyLighLnAVAnVV9BRMTWiLi/ePyspEdF1yygqdTkMwjbJ0o6VdJvKuw+q+js\n/XPbb63F+QAMj9JTrW0fKenHkj4TEXsG7b5f0gkRsdf2fEm3qr/Td6Xj0HoPw6b15Ddn5XafMiH7\nmOPv2pw/gCNenx3dc0p7dvbpmXlTrfct/2VWrtQVhO3R6i8O342Ifxu8PyL2RMTe4vEqSaNtV/wv\nTus9oPGU+SmGJX1H0qMR8bVE5tgiJ9uzi/PtrPacAIZXmVuMd0v6c0kP2l5XbPtLSSdIr7Te+5Ck\nT9nukfSCpIXRiK28AFRUpjfn3ZIO+jujEbFM0rJqzwGgvphJCSCJAgEgiQIBIIkCASCJAgEgiQIB\nIIlVrdHQWtra8rMT86dFb7x0UlbutPdsyD7mIx/NnxL94oujs7PTJnZnZ997zONZuX+9dW9WjisI\nAEkUCABJFAgASRQIAEkUCABJFAgASRQIAEkUCABJFAgAScykRG34oGsHvUrL22dkZ3+38Kjs7Kcv\n+ml29iPjfpCV29XXl33MvinZUe3qG5OdHd/ycnb2jS1534dVo57LynEFASCp7KrWm20/WLTVW1th\nv21/03Zn0RvjtDLnAzC8anGLcU5EPJXYN0/9fTCmSzpD0nXFVwAjwFDfYiyQdFP0+7Wk8baPG+Jz\nAqiRsgUiJN1h+76iM9ZgkyVtGfC8S/TvBEaMsrcYZ0dEt+1JklbbXh8Rd1ZzIFrvAY2n1BVERHQX\nX3dIWiFp9qBIt6SpA55PKbZVOhat94AGU6b13ljb4/Y/lnS+pIcGxVZK+ljx04wzJe2OiK1VjxbA\nsCpzi9EuaUXRenOUpO9FxG22L5deab23StJ8SZ2Snpf0iXLDBTCcyrTe2yTplArbvzXgcUi6stpz\nAKgvplqjJlonTczObvxo/vTpLy74YXZ2/ti8BVsl6XPd78/K/eLR/Gnhnedfn51t9eHc3edPy841\nSk9n5ZhqDSCJAgEgiQIBIIkCASCJAgEgiQIBIIkCASCJAgEgiQIBIIkCASCJqdavNYex+nTrrJOz\ns51faMvO/tfZX83OHtPy+uzsmhfGZ2d//4W8KdQzt+Wt/ixJX+zoyM7+XfsD2dl64goCQBIFAkAS\nBQJAEgUCQBIFAkASBQJAEgUCQFKZVa1nFD059//ZY/szgzJzbO8ekLm6/JABDJcyi9ZukNQhSbZb\n1d/vYkWF6F0RcUG15wFQP7W6xThX0u8iIn/VUAANr1ZTrRdKujmx7yzbD6j/CuNzEfFwpRCt90o4\njOnTvXNOzc52LXkpO3v37H/Ozk5qHZud/d+evdnZy392RXZ25tpH84ITj8k+5o82MNX6ALbHSLpQ\nUqX1ye+XdEJEvEPSP0m6NXUcWu8BjacWtxjzJN0fEdsH74iIPRGxt3i8StJo2xNqcE4Aw6AWBWKR\nErcXto910ZvP9uzifDtrcE4Aw6DUZxBF097zJF02YNvA3pwfkvQp2z2SXpC0sGjHB2AEKFUgIuI5\nSccM2jawN+cyScvKnANA/TCTEkASBQJAEgUCQBIFAkASBQJAEqtaN4HW8YexmvP782ep3nTq9dnZ\nw1l9uuswpk9fsenPsrMn/vu+7Gzvrt1ZuVFHHpl9zL4t7dnZkYIrCABJFAgASRQIAEkUCABJFAgA\nSRQIAEkUCABJFAgASRQIAEkUCABJTLVuBhOPzo562nPZ2TPbWrOze/tezM4u+f2fZmef+P6J2dn2\nX+WvFN2Xmevp6s4+pvtOyM7+fl/+dPPxLfn/H39wX96K8M9mruvGFQSApEMWCNvLbe+w/dCAbUfb\nXm17Y/H1qMRr59reYLvT9tJaDhzA0Mu5grhB0txB25ZKWhMR0yWtKZ6/StGO71r1L4s/S9Ii27NK\njRbAsDpkgYiIOyU9PWjzAkk3Fo9vlHRRhZfOltQZEZsi4mVJtxSvAzBCVPsZRHtEbC0eb5NU6Rfh\nJ0vaMuB5V7ENwAhR+kPKos9F6V4XthfbXmt77T7l94QEMHSqLRDbbR8nScXXHRUy3ZKmDng+pdhW\nEb05gcZTbYFYKemS4vElkn5SIXOvpOm2pxUNfhcWrwMwQuT8mPNmSb+SNMN2l+1PSvqKpPNsb5T0\nvuK5bB9ve5UkRUSPpCWSbpf0qKQfRMTDQ/M2AAyFQ86kjIhFiV3nVsg+IWn+gOerJK2qenQA6oqp\n1k2g97HfZWf3PXVGdvbcRy7Mzj65akp2duqK/OnLEx+/Jzvb19ebnc3VOuMPsrM943uys1t681fL\n/uneN2Vnv3bX+7Ny23b/Y1aOqdYAkigQAJIoEACSKBAAkigQAJIoEACSKBAAkigQAJIoEACSKBAA\nkphq/Roz8+qN2dmeGVMPHSpM3ZQ/3btn2/bs7Kgp+WsM7Xxv/nh3nZz3/8Z9b34h+5gffuu92dnJ\nrfmrWv/11lOys1N/npfbuScvxxUEgCQKBIAkCgSAJAoEgCQKBIAkCgSApGpb7/2D7fW2H7C9wvb4\nxGs3237Q9jrba2s5cABDr9rWe6slvS0i3iHpMUmfP8jrz4mIjog4vbohAqiXqlrvRcR/FKtWS9Kv\n1d/zAkCTqcVnEJdKSs3fCkl32L7P9uIanAvAMCo11dr2X0nqkfTdROTsiOi2PUnSatvriyuSSsda\nLGmxJLXpiDLDwkH07hzchznN/52fzV/PWfKo/L92O84/ITv77ivzpzpfMeGXWbmTR4/NPubh6I38\nv+N//+YfZ2cvveTjeed/sC8rV/UVhO2PS7pA0keL/pwHiIju4usOSSvU3/G7IlrvAY2nqgJhe66k\nv5B0YUQ8n8iMtT1u/2NJ50t6qFIWQGOqtvXeMknj1H/bsM72t4rsK633JLVLutv2byXdI+lnEXHb\nkLwLAEOi2tZ730lkX2m9FxGbJOX/niqAhsNMSgBJFAgASRQIAEkUCABJFAgASRQIAEmsao2aaBmb\nPyX5pXfNzM6edvm67Oxlx1ScxV/R3z4xLyt38cR7so/53rZd2dkjW9qys+8ckx3VXX94fVbu3LFP\nZeW4ggCQRIEAkESBAJBEgQCQRIEAkESBAJBEgQCQRIEAkESBAJDETEokjTq2PTu7fum07OxPL/pa\ndnbFnlOzsx/44Wezs9NWvpiVu+rit2Qf87YP5L+vX+yZnJ39xkN/nJ09alzFFSAPsPGFG7NyXEEA\nSKq29d6XbXcX61Gusz0/8dq5tjfY7rS9tJYDBzD0qm29J0lfL1rqdUTEqsE7bbdKulbSPEmzJC2y\nPavMYAEMr6pa72WaLakzIjZFxMuSbpG0oIrjAKiTMp9BXFV0915u+6gK+ydL2jLgeVexDcAIUW2B\nuE7SSZI6JG2VdE3ZgdhebHut7bX79FLZwwGogaoKRERsj4jeiOiTdL0qt9TrljR1wPMpxbbUMWm9\nBzSYalvvHTfg6QdVuaXevZKm255me4ykhZJWVnM+APVxyIlSReu9OZIm2O6S9CVJc2x3SApJmyVd\nVmSPl/QvETE/InpsL5F0u6RWScsj4uEheRcAhsSQtd4rnq+SdMCPQAGMDEy1fo1pactfLHXHvJOy\ns1ed9/Ps7A3PnJWdXbPsXdnZGWuSH3EdYO/bj83K/c15P8o+5rbe/IV7r1l5YXb25G9sys5qVN4/\n6VHb+rJyTLUGkESBAJBEgQCQRIEAkESBAJBEgQCQRIEAkESBAJBEgQCQRIEAkMRU62Zg50dPOiE7\n+9KCXdnZj7wh//fw5v7PpdnZY2/dkJ2NCUdnZ7d+JG/NkYVHPpl9zFv2TszOHrU+O6q+Z/dmZz06\n8590H1OtAZREgQCQRIEAkESBAJBEgQCQRIEAkJSzJuVySRdI2hERbyu2fV/SjCIyXtKuiOio8NrN\nkp6V1CupJyJOr9G4AQyDnB+a3iBpmaSb9m+IiIv3P7Z9jaTdB3n9ORHxVLUDBFA/OYvW3mn7xEr7\nbFvShyXl9ycHMGKU/QziPZK2R8TGxP6QdIft+2wvLnkuAMOs7FTrRZJuPsj+syOi2/YkSattry+a\nAR+gKCCLJalNR5Qc1mtLy+vyO5E9umR8dvb+d34jO7vwsYsPHSqMXzYuO9v7TGd2VtOnZEfHHpE3\n1fqxfS9mH3PyqGeys8+8JTuqCcdNys7GE9szg5EVq/oKwvYoSX8i6fvpMUR38XWHpBWq3KJvf5bW\ne0CDKXOL8T5J6yOiq9JO22Ntj9v/WNL5qtyiD0CDOmSBKFrv/UrSDNtdtj9Z7FqoQbcXto+3vb+T\nVruku23/VtI9kn4WEbfVbugAhlq1rfcUER+vsO2V1nsRsUnSKSXHB6COmEkJIIkCASCJAgEgiQIB\nIIkCASCJAgEgiVWtm8Duiw74Tfukz5+zMjvbq7zpuJL0+M6jsrPH9+Uft6Utf1atd+av/rx3Q3tW\nbuOsCdnHnDkmfwXsI2fmT8vuaX9jdrZl85asXASrWgMoiQIBIIkCASCJAgEgiQIBIIkCASCJAgEg\niQIBIIkCASCJAgEgyZG5uu1wsv2kpMcHbZ4gqRkb8DTr+5Ka9701w/t6U0RMPFSoIQtEJbbXNmPr\nvmZ9X1LzvrdmfV+VcIsBIIkCASBpJBWIb9d7AEOkWd+X1LzvrVnf1wFGzGcQAIbfSLqCADDMGr5A\n2J5re4PtTttL6z2eWrK92faDttfZXlvv8VTL9nLbO2w/NGDb0bZX295YfM1fcqqBJN7bl213F9+3\ndbbn13OMQ6mhC4TtVknXSponaZakRbZn1XdUNXdORHSM8B+b3SBp7qBtSyWtiYjpktYUz0eiG3Tg\ne5Okrxfft46IWFVhf1No6AKh/m7gnRGxKSJelnSLpAV1HhMGiYg7JT09aPMCSTcWj2+UdNGwDqpG\nEu/tNaPRC8RkSQNX4ewqtjWLkHSH7ftsL673YGqsPSK2Fo+3qb+ZczO5yvYDxS3IiLx9ytHoBaLZ\nnR0RHeq/hbrS9h/Ve0BDIfp/VNZMPy67TtJJkjokbZV0TX2HM3QavUB0S5o64PmUYltTiIju4usO\nSSvUf0vVLLbbPk6Siq876jyemomI7RHRG/1rx1+v5vq+vUqjF4h7JU23Pc32GEkLJeU3dmhgtsfa\nHrf/saTzJT108FeNKCslXVI8vkTST+o4lpraX/gKH1Rzfd9epaEb50REj+0lkm6X1CppeUQ8XOdh\n1Uq7pBW2pf7vw/ci4rb6Dqk6tm+WNEfSBNtdkr4k6SuSfmD7k+r/zdwP12+E1Uu8tzm2O9R/27RZ\n0mV1G+AQYyYlgKRGv8UAUEcUCABJFAgASRQIAEkUCABJFAgASRQIAEkUCABJ/wf0Fdo/Z6m8xgAA\nAABJRU5ErkJggg==\n",
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ }
+ ],
+ "source": [
+ "x_example = x_train_mnist[5]\n",
+ "x_tformed = tform_chain(x_example)\n",
+ "plt.imshow(x_tformed[0].numpy())\n",
+ "plt.show()"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "collapsed": true
+ },
+ "source": [
+ "There you have it - an MNIST digit for which we 1) added a channel dimension, 2) normalized between 0-1, 3) made a random 20x20 crop, then 4) randomly rotated between -30 and 30 degrees."
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## 1b. Creating Transforms for CIFAR-10\n",
+ "\n",
+ "Here, I will create some transforms for CIFAR-10. Remember, this data is 2D color images so there will be 3 channel dimensions. Because we have color images, we can use a lot of cool image transforms to mess with the color, saturation, I will use the following transforms, available in the *torchsample* package:\n",
+ "\n",
+ "- ToTensor\n",
+ "- TypeCast\n",
+ "- RangeNormalize\n",
+ "- RandomAdjustGamma\n",
+ "- AdjustBrightness\n",
+ "- RandomAdjustSaturation\n",
+ "\n",
+ "You'll note that one of the transforms `AdjustBrightness` doesn't have \"Random\" in front of it. Just like with the `Affine` transforms, you can either specify a specific value for the transform or simply specific a range from which a uniform random selection will be made."
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "First, you'll note that the CIFAR data was in NUMPY format. All of these transforms I'm showing only work on torch tensors. For that reason, we will first use the `ToTensor` transform to convert the data into a torch tensor. Again, it might be best to simply do this on the entire dataset as a pre-processing step instead of during real-time sampling."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 15,
+ "metadata": {
+ "collapsed": false
+ },
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "\n"
+ ]
+ }
+ ],
+ "source": [
+ "from torchsample.transforms import ToTensor\n",
+ "\n",
+ "x_cifar_tensor = ToTensor()(x_train_cifar[0])\n",
+ "print(type(x_cifar_tensor))"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "Oh No.. This data is still in `ByteTensor` format! We should be smart and simply cast the entire dataset to `torch.FloatTensor`, but for the sake of demonstration let's use the `TypeCast` transform:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 16,
+ "metadata": {
+ "collapsed": false
+ },
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "\n"
+ ]
+ }
+ ],
+ "source": [
+ "from torchsample.transforms import TypeCast\n",
+ "\n",
+ "x_cifar_tensor = TypeCast('float')(x_cifar_tensor)\n",
+ "print(type(x_cifar_tensor))"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "Great! Now, we will perform some actual image transforms. But first, we should `RangeNormalize` because these transforms assume the image is valued between 0 and 1:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 17,
+ "metadata": {
+ "collapsed": false
+ },
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "0.0 - 255.0\n",
+ "0.0 - 1.0\n"
+ ]
+ }
+ ],
+ "source": [
+ "print(x_cifar_tensor.min() , ' - ' , x_cifar_tensor.max())\n",
+ "x_cifar_tensor = RangeNormalize(0,1)(x_cifar_tensor)\n",
+ "print(x_cifar_tensor.min() , ' - ' , x_cifar_tensor.max())"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "For the `RandomAdjustGamma` transform, a value less than 1 will tend to make the image lighter, and a value greater than 1 will tend to make the image lighter. Therefore, we will make our range between 0.5 and 1.5."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 18,
+ "metadata": {
+ "collapsed": false
+ },
+ "outputs": [],
+ "source": [
+ "from torchsample.transforms import RandomAdjustGamma, AdjustGamma\n",
+ "\n",
+ "gamma_tform = RandomAdjustGamma(0.2,1.8)\n",
+ "x_cifar_gamma = gamma_tform(x_cifar_tensor)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "Ok, now let's plot the difference:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 19,
+ "metadata": {
+ "collapsed": false
+ },
+ "outputs": [
+ {
+ "data": {
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAAAP8AAAD8CAYAAAC4nHJkAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAHz5JREFUeJztnVuMXNd1pv9Vt67q7uob2d0km5Qo6jaSbYmSGUGQPRl7\njASKEcT2iyZ+CPRghHnIGGMg8yB4gLHnzRmMHfhhYIAeK1EGHsdGbMdCIExgCw6ExIZHlCXrHomi\nKPPSbDbZ3eyqruq6rnnoEkK1979ZItnVlPb/AQSr96p9zj67zjqnzv5rrWXuDiFEemS2ewBCiO1B\nzi9Eosj5hUgUOb8QiSLnFyJR5PxCJIqcX4hEkfMLkShyfiESJXc1nc3sQQBfB5AF8L/c/Sux9+fz\neR8qFoO2TqdD+2UQ/hVi1vi+Cjl+XctHbLlsltrMwjs0i1xDI2Nst/kxx353mY2Nkfxis+tdvq8u\n35tlIgcQodsNH1ts7NHtRcZvkUlmtkxkHNkM/zzZOQAA3civZT12IrA+0e2FWVqpoFpb72tnV+z8\nZpYF8D8B/A6AUwCeNrPH3f1l1meoWMTBez8ctK2sLNF9DWXCH/xUgU/ODTuGqW16aoTadk6MUlsh\nmw+254ZKtA+yfIqXlleordnmxzY5MU5tmU4r2N5oNGif9fV1aiuWwhdrAOiAX7xq9WqwfXxijPaB\n8+01G01qyyL8uQD8YlMe5Z/zyAg/P/J5Ph/1yBg9doPIhM+R2DG3Pezff/6t7/P9bN5t3+/8Te4D\ncMzdj7t7E8DfAPjUVWxPCDFArsb55wCcvOTvU702IcR7gKt65u8HMzsM4DAADA0NbfXuhBB9cjV3\n/tMA9l3y995e2ztw9yPufsjdD+Xy/NlMCDFYrsb5nwZwq5ndZGYFAH8I4PFrMywhxFZzxV/73b1t\nZv8RwD9gQ+p71N1fivVZX1/HSy+H37Jy/jztN0UWWG0HX3nd2SlTm5VmqG2ty1WHaie8Au9WoH1q\n63zFtlbnK/CtDpe2zkc0zmIuPMZ2m28vS1abgfijWm19jdra3fBx2/oO2icTUQFbEbWilOPnQZWs\nmC912rTP8DBf7bcM//ZqRA0CAETkw9p6WKFpt8LtAJDNhT+X1nqdj2ETV/XM7+5PAHjiarYhhNge\n9As/IRJFzi9Eosj5hUgUOb8QiSLnFyJRtvwXfpeSAVDKEZkq8uO/G4mkt3+WB7jMTE9RWykm5USi\ntuqNcADMeovLUB7ZXqEUCQiKBPZ4l+9vfCoc0NRu8e0V8nwckWBLZAv8Q2s0w3PVavP5GI5sLzfC\nx1iM9GtbWI7MRKIE25EIvFgk6egIDyarrtWordUOS3qxgMrK6sVgezf2gW3eft/vFEK8r5DzC5Eo\ncn4hEkXOL0SiyPmFSJSBrvabOYoWDqgol/lQbpubDLbvKPFIkHyXp6aqLvFgm06XXw/rtfDYMzyu\nB2ORtGC5yCr1ysUK7xf51KbK4RXnyioPwmlGAnTqJOgEiOelGyWpsFpNHniS6fADy0cCjDokdRkA\n5MjyfKPB+xTy/APNdHlAUKO6TG0gQWEAMERO43aXKxIX18KKTyeSj3EzuvMLkShyfiESRc4vRKLI\n+YVIFDm/EIki5xciUQYq9eXMMDkU3mUpIuWMk6CO6TGeM61DykUBiNSZAbK5SCI5koet0Y1ITRFd\nLhcJLuk0uCTmWX7NPncuXAWo0+JHXanxoJNah8uio6VI9Z0GKdcFfswZ4zJVdihSKWeNy7rD+fAY\nc5FSWOuRvIv1Fpf6upEiaytVPsaVWvj8qRJpGQDWW+FzoBnJ1bgZ3fmFSBQ5vxCJIucXIlHk/EIk\nipxfiESR8wuRKFcl9ZnZCQAVbKhnbXc/FN1Z1jA9EZZsynkusRWLYVsmy6WVUiQ/XqvNZa9uJFLN\nPSwBNSP59jpNLgN2PRIxF5HYPMejzirNcIRep8PntxYpDdaO2CprfPynl8LjyGf49saqfO5bZ3k5\nt/pFLlXesPOWYPvMzF7ax8rh/HgA0Fi+QG3VKo+OvFjhUt/5i2FZ98RJPo5ONuy6jSaXBzdzLXT+\nj7s7/2SEENcl+tovRKJcrfM7gJ+Y2TNmdvhaDEgIMRiu9mv/R939tJnNAPixmb3q7k9d+obeReEw\nABQjz/VCiMFyVXd+dz/d+/8cgB8CuC/wniPufsjdDxVyesoQ4nrhir3RzEbMrPz2awC/C+DFazUw\nIcTWcjVf+2cB/LBX3ioH4P+4+/+NdcjnstgzHU7sOFbgEsXocFjasohUhkiElUWi6Rp1LhtliAy4\no8zLho2M8Gi01YtcJBkf4xFzlUhSzbdOh7dZbfBHrkIkEGxuOBKVmOeRhycuhKMLGx5JuhqJ6hsf\nK1PbA3dyhXl1Pizrei2yr508WrRR4/NRrfJ76VCeb3PfrvCxzczM0j4Lq2Hp8MJrZ2mfzVyx87v7\ncQB3X2l/IcT2oodwIRJFzi9Eosj5hUgUOb8QiSLnFyJRBpvAM2uYKoej7XLNsDQEAEP58DCHh8J1\n6QCgUedyWCtSb21iIlwXEACcJH1sdvg1tNWKJJcc5XX8ziyGa7EBwBtv8WivxUr42CK5IHFjpObh\np//tQWrbu5uP/2+fOR5s//kxLkW1uzySMZfh0lxlZZHaatXwPJbLXHpDh0cXFou8X4FEnwLAsPF+\n7U74w7lh3x7ap7wUruX4/Jt8LjajO78QiSLnFyJR5PxCJIqcX4hEkfMLkSiDXe3P5TAztSNoqy/x\nVfGMhYdZJWWOAKAeyWWWs0g+u0hZK3alrLf4KvXEJA/QaXb4CvbxU2eobWmVj5Hl98tGSnyNFfn2\nZnLhVWUAKC5xReLWsV3B9vkpPo6FlXPU1qjxOX72tdeoLUPKV7VGIqXGxnlADTLcZcbHufpU7kbK\ng5E8j95cpX32kwC5oXz/93Pd+YVIFDm/EIki5xciUeT8QiSKnF+IRJHzC5EoA5b68pjcOR20TY7y\n8lqZTDgoYmV1mfZprVX59jqxcl08oZ2TAKPRUZ6nrwVue+U4l6jWGrz0U7E4xG2F8BhLI1yGmsxy\nWfSZYwvU1m7y06cxHpb6pif5fBi4/NZqcym41uS5BNdIrr5mmx+zRaTbSDU35DORUm+ZSO7CXHge\n2w0upTqRiUnsWRDd+YVIFDm/EIki5xciUeT8QiSKnF+IRJHzC5Eol5X6zOxRAL8P4Jy7f7DXNgXg\nuwD2AzgB4CF357rbv24NILKdRcoZMYYi+dSGEY56AoBc5JqXyUTy8REZcKjEy3WdP8uj4mrn+ZQd\nmOKSWIOrXigSSe/2m+don0xkg+0sn+PViNSay4bzDJYL/HPZMXkztd186w3U9uavn6a2V187HWwv\n5CIymnOZuN3mLpMhEZUAkC/weex2w+dVN6IrmoXP04gS+Rv0c+f/KwAPbmp7BMCT7n4rgCd7fwsh\n3kNc1vnd/SkAS5uaPwXgsd7rxwB8+hqPSwixxVzpM/+su8/3Xp/FRsVeIcR7iKte8PONZPb0R4Vm\ndtjMjprZ0Uot8rAqhBgoV+r8C2a2GwB6/9P8S+5+xN0Pufuh8jBfxBJCDJYrdf7HATzce/0wgB9d\nm+EIIQZFP1LfdwB8DMBOMzsF4EsAvgLge2b2OQBvAXion5113VFfDycrtBaPzALCEVhrazzBYbPF\nr2vtDP8GUq1xaW6V2Ob28Wn0Nt/ejTu5MHPzHi4N1dZ5v7nb7g62F5w/ci1f5IlQSxPhhKsAgAs8\nUm3frt3B9pU1Hq144N/cSm1jkzwqcWzyDmpbXgzP//JFXvIsH5EjM84jKlvdSLQoDxZFpxU+vyNB\ngrR03LsI6ru887v7Z4npE+9iP0KI6wz9wk+IRJHzC5Eocn4hEkXOL0SiyPmFSJSBJvB0ODoWlkO8\nwxMqMlmjVORJP0fLXBo6s8hlxTdPLVJbLh8eR2GB19VbX+Dbu3WGy3mf+BiXvd44vTnU4l8pz4UT\npO7cEU6oCQDnFnmSzomJiOzV5eMvkISV5xbDUXYAkCuuUNviyjy1nZ7nUXj5fPg8mBjj2lu9zgUz\nz/H7pUW0uW5EBsxYuJ9FIkwjZR77Rnd+IRJFzi9Eosj5hUgUOb8QiSLnFyJR5PxCJMpApb5sNoOJ\nidGgrZ3jUl+1Go5I8xaXTy5WeNTWW7/m0la1ymWjUjF8rZx/k0cXzhZ5Use5uRupbWLPTdSWr0RC\nxEhS071338e7nOXyW6nNpcoOeKTg2lrYtns4LEUCQLPDj8tGwucNAOwd2UNt5YmwxFm5cJb2Obdw\ngdpaxuXN9SZPCooM1+ZGhsJRps16RMIkCUGNyIbBIfX9TiHE+wo5vxCJIucXIlHk/EIkipxfiEQZ\n6Gp/t9NGZSW8kppr8lx3eVKaCDyFHHJZbqxVuRIwWeaBLBMj4VXZ+jJf7Z/Zw3Pgzd3176jtxVNN\nanvtGLc9sHsq2L6ywvvM3hzO+wcAGdSordngSsCEh1fuV8/xlfRSk+cS3D0VPi4AWOnwvHr5uyaD\n7fVIoNA/P/E4tZ06yY85GynJFSukxeKIWrGycq3wXLEguOA2+n6nEOJ9hZxfiESR8wuRKHJ+IRJF\nzi9Eosj5hUiUfsp1PQrg9wGcc/cP9tq+DOCPAbyte3zR3Z/oZ4dZonh0IkEMTmSSDCnjBQAd41Lf\nMleUsLoayd/WCMtlu8e5PPhbH/84te29/X5q+8FfPkptuyJBLtlmOD/h6eNv8O0duJPaijtuobYR\n5/JsbSlcu7XUDUtvANCsc1nxfIXbJqZ5ENSOXfuD7fXqGO2T4SZ0CjyYKZbDr9XiUqu1wwFq5jxw\nrd0Ou+61lvr+CsCDgfa/cPeDvX99Ob4Q4vrhss7v7k8B4OlihRDvSa7mmf/zZva8mT1qZvy7nBDi\nuuRKnf8bAA4AOAhgHsBX2RvN7LCZHTWzo9Uaf+4RQgyWK3J+d19w9467dwF8EwBNE+PuR9z9kLsf\nGh3mWW2EEIPlipzfzHZf8udnALx4bYYjhBgU/Uh93wHwMQA7zewUgC8B+JiZHQTgAE4A+JN+dmYA\njCgRHRKlBPCyRZHKSfB6ZHuRFHhTO3iZr13DYWnx3kO30T53PMDlvOVzXN4cavPIwwN791Jblxzc\nrhmeO6+9ziXTWiQasNnm/Vr18KnVAZcp3zh9itpeePEotT1wPx/jjl3hqMrVSliKBABS4QsAsHM/\nl3W7sfJazYhsRyTki4u8fFmjEh5kl0RThris87v7ZwPN3+p7D0KI6xL9wk+IRJHzC5Eocn4hEkXO\nL0SiyPmFSJSBJvB0B7okgqne4BJFgUSx5XI8YWI2w+WfW3bxXyMXS/x6uP/GfcH2uz/KI/d2334X\ntT3387+kthv28THu+sCHqK0wfXOwPTc8TvvU1rnkWF/lkXsLZ05S2/JCWLbrtHh0XqkcTpAKADt3\n8s/65JlnqW1291ywvV2LRJHWedktW1umto6HIyoBwJnGDaA0FD62wi5+zKtDJNL1XXi07vxCJIqc\nX4hEkfMLkShyfiESRc4vRKLI+YVIlIFKfWaGfDa8y+VIgsbOeljWKA2XaJ9shksrM5HIvZPzPJLq\n5ntDqQyBvR8Kt2/AJbtWZY3axstcmpu+7SC1reXCNe1eevZp2qdR5+NYXeXzcf70r6kt2wlLrcUi\nP+XmbgrLcgBw1208kWg7yyPt8tmJcHuBR33m1nmSztpbp6mNydgA0I7cZqukruTwDn5cs6QGZD7f\n//1cd34hEkXOL0SiyPmFSBQ5vxCJIucXIlEGG9jT7aJRD6+kDg/xoVgxvBqaz/Acct7httIoL+X1\nB//hD6jtgd/7RLB9bOcs7bNw/BVqy0bGv1LhOfwWT/wLtZ2phFec//Hv/o72GS3xAJL1Bg+A2TXL\nFYmxcnil+s1TPBioGZmPqT37qe22D32Y2tAZCjYvrfB8gTWiLgHAcp2P0Zyfw+t1HrhWJSW2vMpV\nhzvCIga6/Vfr0p1fiFSR8wuRKHJ+IRJFzi9Eosj5hUgUOb8QidJPua59AP4awCw2ynMdcfevm9kU\ngO8C2I+Nkl0PuTtPcAbA4eg6ya3X5UER1g7LJG2PlOSK5EwrDo1R28EPc9loKB+WxF5+jueQWz7z\nBrU1GlzKqSwvUdvJYy9TW9XDwU75Dt/XaI5Ln2NFHlwyPcmlvvmFs8H2dqQsW63CZcWTb/IgIuAl\naqlWwzkIizl+frSHZqjtQpufO6USz0E4XOZBaKVcWI6s1FZpn3Y3LDm+C6Wvrzt/G8CfufudAO4H\n8KdmdieARwA86e63Aniy97cQ4j3CZZ3f3efd/Ze91xUArwCYA/ApAI/13vYYgE9v1SCFENeed/XM\nb2b7AdwD4BcAZt19vmc6i43HAiHEe4S+nd/MRgF8H8AX3P0dDyPu7iCPG2Z22MyOmtnRtTrPpS+E\nGCx9Ob+Z5bHh+N929x/0mhfMbHfPvhtAsOC5ux9x90PufmikVLgWYxZCXAMu6/xmZgC+BeAVd//a\nJabHATzce/0wgB9d++EJIbaKfqL6PgLgjwC8YGbP9dq+COArAL5nZp8D8BaAhy6/KQcQlu26bf5I\nkMuHc+51IjnTmuDRV7PjPK/ePzz+99Q2NRuWlGZ2h8t4AUCzxqPz8vmwxAMAoyNcUspluDQ3QuTI\nXTPhnG8AUK9whbaU5WO8sHie2lrN8GdTLnLJq1nlUt/rzx6ltvlXX6O2RpuU0MrzOezE5ncvlz4x\nws/hzBCXWotEtpsEn6s7PnBTsL1UPE77bOayzu/u/wSAxTiGY1yFENc9+oWfEIki5xciUeT8QiSK\nnF+IRJHzC5EoA03gCTd0u2HhoBCJLCvmSPLDDE+06JESTt0mjyw7fz4cjQYA1cWwrdTi0Vdd8OOa\nmuTy28SeaWprdxrUdvpMeIweiffKZPhp0GxzyTRrPPHnSDEsz5IAzY3txYyRKM1Ok8upGXK+rda4\nvNkcIvIggPIePvdrJV7arNLlMuD6WvgevGPsAO2zk0i3uXz/Lq07vxCJIucXIlHk/EIkipxfiESR\n8wuRKHJ+IRJlsFIfDBkLR4kVh3gEk5MIvZFSWE4CgJHyTmqrtXiE1Y4yzzmQI+NoXlygfboZvr1a\nnktbs7PhqC0A6Da5bHT7XXuD7T/76ZO0T9Nr1JY3LqfWq7zfWDkclVjI8VMua5F6duv8M3tznst2\nKyvhz6xha7TP9G38njg3EYlKdP5ZL5/nc1VYD0umI3ORSMxaOGqyG1FLN6M7vxCJIucXIlHk/EIk\nipxfiESR8wuRKANd7c8YUMiFrze1Bg+YyJKSUd1IfrlaiwdnZPM8SGSowFdz8/nwOArDvGzV+BgP\nMDq7yFWC2lx41R4AZvbdQm2nz4Xz6n3gtz5C+1QXz1Db8dd4Kay1Kg9kyWXD8z8+znMTGsnvCADz\np/kYf/1WJLBnKDz/Y7NcKZqeiowxojrYEv+sJ5e5q83NTAXb907wc+DYy+EArkadB61tRnd+IRJF\nzi9Eosj5hUgUOb8QiSLnFyJR5PxCJMplpT4z2wfgr7FRgtsBHHH3r5vZlwH8MYDF3lu/6O5PRHeW\nM8xOh683rQsXaL96JywBrfHYDHiGl/LKRYJLxsZ4MEWBlMKqr/EcfqVYTrUmtx392c+o7cDtXCI8\ndSosAWUi+Q6Hh3guvmxETi2VuLS1Vg1LffU6l2DbkZJtoyU+jgfuuY3aiiTAqJ3luQk7LR6EUz/J\npb5MpUhtM8Nlarvntg+E+0zwqvfPzL8ZbG+3+HFtph+dvw3gz9z9l2ZWBvCMmf24Z/sLd/8ffe9N\nCHHd0E+tvnkA873XFTN7BcDcVg9MCLG1vKtnfjPbD+AeAL/oNX3ezJ43s0fNjJe+FUJcd/Tt/GY2\nCuD7AL7g7qsAvgHgAICD2Phm8FXS77CZHTWzo6s1/kwnhBgsfTm/meWx4fjfdvcfAIC7L7h7x927\nAL4J4L5QX3c/4u6H3P3Q2DDPdCKEGCyXdX4zMwDfAvCKu3/tkvbdl7ztMwBevPbDE0JsFf2s9n8E\nwB8BeMHMnuu1fRHAZ83sIDbkvxMA/uRyGyoUDDfsC9/9x43LJMdOhqWXhUUendfscGlodJQf9lqN\nR4h1utVgezZyDV1a5BJmpcplmfUWH0fWua08Gl56WTi7RPucWuPyVde5RDg7zWVR64ajy5ZXeL69\noRH+mU2Mc6mskOXz32gSyTfH5c21Bt9esxopUdbl/W7Zt4va9uwKz+PJU1zSvbAY9ol2rOTZJvpZ\n7f8nAKEzIKrpCyGub/QLPyESRc4vRKLI+YVIFDm/EIki5xciUQaawDObM4xNksg4Il0AwORMNmwY\n4UkYzy/whKDrkXJXuQJP3si6dVs8grDV4eO4WOey10gkim29xqW5+no4gWczMsZOxOZO5h5AdTVS\nrmssnAh1bIwnO63X+fbOX+BzNTrKowstE76/WZvLxIUcT+I6xBVpFAp8rvbfsp/a6rXwWJ566mXa\n5/nXzoW3td5/VJ/u/EIkipxfiESR8wuRKHJ+IRJFzi9Eosj5hUiUgUp9ZoZcMbzL4hiP9Z8aDV+j\ncnUuo+VLPLppNVI3DR1+PSwVZ8Jd8nxfnQavZ1cY5uPI5/h8ZLNc4mx4eCzNFpc3PRK5Z1wRgze5\n5Nghpnwkmg4FLm+uLHOpr97k9enGJ8LSbY5IgACQicx9DVxKWzhfobblSARnZS0cpfmTf3yV74uo\noutNSX1CiMsg5xciUeT8QiSKnF+IRJHzC5Eocn4hEmWgUl+3a6iyBIjZUdpvdCSsG+VLXIcaiYRf\njY9zaa66ymvJVVfDCRWrtUhU3zq3lQs8AWaR1AUEgHaDS5y5XPh6Xohc5vNDPBrNjHccjiRCzRBT\nu8OlqEIpUkNxgsubS0tcYqsQ6XNsis99LVIz8PUTPCHrqy+cpLbZKR4tOruXHFuGn6c7SULThQqX\nPX9j832/UwjxvkLOL0SiyPmFSBQ5vxCJIucXIlEuu9pvZkUATwEY6r3/b939S2Y2BeC7APZjo1zX\nQ+7Ooy+wkQPv1FthW2OFr86Xp8MrxMVSJKCDiweYmuKHXV3jeeRWVsK25Qs8EGSZLw4j2+Wr7F3n\nSkanwxUEdMO22FXeMjywJ5vjc1WPBEE5WdTPkzJeANCu8ZJinUh+v04kWGilGu7HqngBwFJE8Tlx\njH+gKxfWqK25xne4azxcyuuOG+doHzbE18+u0j6b6efO3wDw7939bmyU437QzO4H8AiAJ939VgBP\n9v4WQrxHuKzz+wZvV6jM9/45gE8BeKzX/hiAT2/JCIUQW0Jfz/xmlu1V6D0H4Mfu/gsAs+4+33vL\nWQCzWzRGIcQW0Jfzu3vH3Q8C2AvgPjP74Ca7Y+PbwG9gZofN7KiZHb1Y5ckfhBCD5V2t9rv7CoCf\nAngQwIKZ7QaA3v/BKgLufsTdD7n7ofHRSMUDIcRAuazzm9m0mU30XpcA/A6AVwE8DuDh3tseBvCj\nrRqkEOLa009gz24Aj5lZFhsXi++5+9+b2c8BfM/MPgfgLQAPXW5Dbjl08juDtlbhEO3X6IYDWTLt\ncGkqACiOc/lqYpp/A5nM8MCTqVo40GJliZd3WjnP5bz6Gp/+TpvLh3B+ze62w2Ncr/NHrkIhki8w\nx8dfWeeBJ3XyiJd3HjRTzoSDVQCgm+ESVqvF53FoJCyZFvM8X+BEgY/xACao7UN387Jht991N7Xt\nv+WWYPt993N589SZarD9n9/gPrGZyzq/uz8P4J5A+wUAn+h7T0KI6wr9wk+IRJHzC5Eocn4hEkXO\nL0SiyPmFSBTzSPTYNd+Z2SI2ZEEA2Amgf11i69A43onG8U7ea+O40d2n+9ngQJ3/HTs2O+ruXNzX\nODQOjWNLx6Gv/UIkipxfiETZTuc/so37vhSN451oHO/kfTuObXvmF0JsL/raL0SibIvzm9mDZvYv\nZnbMzLYt95+ZnTCzF8zsOTM7OsD9Pmpm58zsxUvapszsx2b2eu//yW0ax5fN7HRvTp4zs08OYBz7\nzOynZvaymb1kZv+p1z7QOYmMY6BzYmZFM/t/Zvar3jj+W6/92s6Huw/0H4AsgDcAHABQAPArAHcO\nehy9sZwAsHMb9vvbAO4F8OIlbf8dwCO9148A+PNtGseXAfznAc/HbgD39l6XAbwG4M5Bz0lkHAOd\nEwAGYLT3Og/gFwDuv9bzsR13/vsAHHP34+7eBPA32EgGmgzu/hSAzXmqB54QlYxj4Lj7vLv/sve6\nAuAVAHMY8JxExjFQfIMtT5q7Hc4/B+DScqansA0T3MMB/MTMnjGzw9s0hre5nhKift7Mnu89Fmz5\n48elmNl+bOSP2NYksZvGAQx4TgaRNDf1Bb+P+kZi0t8D8Kdm9tvbPSAgnhB1AHwDG49kBwHMA/jq\noHZsZqMAvg/gC+7+jtQ9g5yTwDgGPid+FUlz+2U7nP80gH2X/L231zZw3P107/9zAH6IjUeS7aKv\nhKhbjbsv9E68LoBvYkBzYmZ5bDjct939B73mgc9JaBzbNSe9fb/rpLn9sh3O/zSAW83sJjMrAPhD\nbCQDHShmNmJm5bdfA/hdAC/Ge20p10VC1LdPrh6fwQDmxMwMwLcAvOLuX7vENNA5YeMY9JwMLGnu\noFYwN61mfhIbK6lvAPgv2zSGA9hQGn4F4KVBjgPAd7Dx9bGFjTWPzwHYgY2yZ68D+AmAqW0ax/8G\n8AKA53sn2+4BjOOj2PgK+zyA53r/PjnoOYmMY6BzAuAuAM/29vcigP/aa7+m86Ff+AmRKKkv+AmR\nLHJ+IRJFzi9Eosj5hUgUOb8QiSLnFyJR5PxCJIqcX4hE+f+zWYFHOK31HAAAAABJRU5ErkJggg==\n",
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ },
+ {
+ "data": {
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAAAP8AAAD8CAYAAAC4nHJkAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAHthJREFUeJztnWuMnOd13/9nrnu/kLtaUryIpMQ4ZVRFchhJSWRLtWFD\nVQPYRls1+hDogxHmQ2rUQPpBcIHa/ZYUtQOjQF3QtRClcHxpbMOCoaa1ZTuKUdc2rehmUXfzKu4u\nl7vLnd25z5x+mBFKrZ7/syMud1by8/8BBGffM8/7nnnmPfPOPP/3nGPuDiFEemS22wEhxPag4Bci\nURT8QiSKgl+IRFHwC5EoCn4hEkXBL0SiKPiFSBQFvxCJktvMYDO7F8DnAWQB/Dd3/7PY8zOZjGdz\n4UO2221+HITvQswaP1Yuw425LP/My0dsRnZpFvkMjfjYbPLXHLvvMhvzkdyx2Y7cyeltbrPIPMZo\nkfczNvcxrvZOVCNvQGwOM5mIjZ0EAFpXfbdseJ+x18xMpUoNlVqjpzfNrnpSzbIAXgLwIQDnAPwM\nwAPu/jwbky8UfOfUdNBWqazRY+Us7ONIlvu+a7RAbVPjRWqbHhuktmI2/MGVK/L9IZulpoWlErXV\nW3yXO8ZH+OFazeD2aq1Ox1QitsEBPo8t8A+vtWoluH1ifIiOgfP91WsNastGrmEsyMdHuB+jw9xW\nKPD3eq3K59FjF4hM2P9a5H1peDi+/+aHT2N+ebWn4N/M1/7bAbzi7q+5ex3AVwF8ZBP7E0L0kc0E\n/x4AZ6/4+1x3mxDiXcCmfvP3gpkdA3AMADKRr8BCiP6ymSv/eQD7rvh7b3fbm3D34+5+1N2PxhZS\nhBD9ZTPR+DMAh83soJkVAPwBgEevjVtCiK3mqr/2u3vTzP41gP+FjtT3sLv/Ijam2Wjg4sX5oK3d\nDK9So7vzIHwhGpU2f2lW5Ku5q22uOpSIJOaRaSzX+esq1/iSfjMivy3MUhMGyGQ1W3x/2cg3snKe\n/1Qr1/kKfLMdfm1W43OfifwqbETmcZCoMABQI7LJpQtcWRiKKBwWcTITewERyXSNnAfNJj8/suQ1\n12s17sM6NvWb390fA/DYZvYhhNge9CNciERR8AuRKAp+IRJFwS9Eoij4hUiULb/D70rM4pl4jDGi\nvMyM5OmYqfEBahscjEg5kTS8CpG2qhGZkuRfAAAKRe4/ItKct/nxxsfDr63Z4Psr5LkfrUiCUTYf\nkdgaYR8bkf0NRfaXG+Q+DkbGNS0sfWUiCW3NyDkQO3+Hh3jST6nMJbgGmeRYQmVpLZw4xbIpg/vv\n+ZlCiF8pFPxCJIqCX4hEUfALkSgKfiESpa+r/Z0DhldZCxFP9oyFV3onC5FafJEV8dXLfMm5FVme\nr1TDK6mZyKL92AhXFnKRVerl1SofF8kfmRoOH29lla82x8pFVSL1xDyyKj5CkmMaDZ4MlImsVBcK\nEUWCJBEBvJYjUyMAoEDqTAJAJnKs2toqtUUqnqFITuNmRJFYqYb9iOSDvQVd+YVIFAW/EImi4Bci\nURT8QiSKgl+IRFHwC5EofZX6MgAGSWZEPiIbjRbDbk4Pc80rluAQyS1BNtKOCYPh49U8IjVF2kLl\nIh1qWhEpyiMZH7OXyuH9NbkGVIp0w6m0uI/DxUgRxUZ4XDbSiCxW2D0bkfrKZe7/cD7cgSkf8aMS\nqa1YibRYa0f2uVzh7+dKLbzPSp3vjymwjUhC2Hp05RciURT8QiSKgl+IRFHwC5EoCn4hEkXBL0Si\nbErqM7NTAEroqGdNdz8ae342YxgjctlQpDjaAMney2S4rDEUqY9Xj8hX7Yjk6ETSi3StQitStK4d\nkfo8UjzPI92OSyRrrtWOZCtG5KFY27BVklkGALOlcKZgLvKejYTL0nX8uMQz5qplnpW4d2wmuH1m\nxxQdY8O8ZVtthfuxWuF+lCp8ri6Teby0FjlPydsZUSLfwrXQ+f+Juy9cg/0IIfqIvvYLkSibDX4H\n8D0z+7mZHbsWDgkh+sNmv/bf5e7nzew6AN81sxfc/Ykrn9D9UDgGAJE7XYUQfWZT4eju57v/zwP4\nFoDbA8857u5H3f1o9L55IURfuergN7NhMxt94zGADwN47lo5JoTYWjbztX8GwLesczXPAfhrd//b\n6MGyhunRsAQ3mucaxchAWNqySDYdIhlWFimMWItkuLFPyp1DvDXYyCCf4surXFIaH460fooU1Ty/\nGC7UWWnyb125SCLYTD7SuirL/XidyG/NSIHUiJqH4Yh0e8cNB6nt8qXweeVV/qInJ7mUWq3w6+Vq\nhb+2QuQ37wzpRzc5SodgkUiHcyVeqHU9Vx387v4agN+82vFCiO1FS3BCJIqCX4hEUfALkSgKfiES\nRcEvRKL0tYBnLmOYHApLNrkm701XzIU/o4YLvIBkLZJx1ogU95wc5bIdUwhrkYy5RpMXbhwe4nLe\n+SU+7uwlnj12mRR9JPU0AQBTOe7/AzdfT22/tjtcHBMA/uszc8HtT85yebMVyXKM3R26UlqmtjIp\nnMmySzuO8PkYLPJxhUjvyMGI9MyKze6bHqNjhi+HJb2lSDHT9ejKL0SiKPiFSBQFvxCJouAXIlEU\n/EIkSn9X+7NZzEyEsxXKy3x1O2Phz6hYXbRKpHZejuwPANYiRdDYqFgLp8kxrh5EukLh7CLPcinV\nIpk4pJWXRWokDkcye/bneGG9kUW+snxkeCK4fXaY+7FQ5kpAI5LM9NwsryKXITUIGwORVmOjkYya\nDF/tHx/hyUejbW6rknPVG1wBOzgZ9v/F2d7T5nXlFyJRFPxCJIqCX4hEUfALkSgKfiESRcEvRKL0\nV+rLZbFzZ1gC2jnMpZdsJuzmpVKJjmmUeS2zTCSxpx1JwHCSYDRKkpUAoA7+ul58fZ7aqpF2XflI\nIg6rFVcs8Ld6LMPn4/tnuOTYaPJrR3U4nJQyFZHDDDzRqdHiUnA1YmMdtBqRlm3WjGiwESWtEKlO\n7RGpNU/es2aDvy6aAxVRgdejK78QiaLgFyJRFPxCJIqCX4hEUfALkSgKfiESZUOpz8weBvD7AObd\n/ebuth0AvgbgAIBTAO5396WND2cAke0y+bevOg4U+ZjhyLhcRK9hGYQA0EBYXykO8qNdnOdZcdVV\nXotvzxD3ox4p08Ykvffs4fXgMpEdNiJZbJfX+GvLZcK2sTyXPqfGZqjtyI27qe2Fsyep7eSZleD2\nQqTVmDuXiWPyZjbH5yqf57Y2yTxsR85Ti2mOPdLLlf8vAdy7bttDAB5398MAHu/+LYR4F7Fh8Lv7\nEwAW123+CIBHuo8fAfDRa+yXEGKLudrf/DPufqH7eBadjr1CiHcRm17wc3dH5KZCMztmZifM7ESl\nzm9XFEL0l6sN/jkz2w0A3f/pTeruftzdj7r70cHI/eVCiP5ytcH/KIAHu48fBPDta+OOEKJf9CL1\nfQXAPQCmzOwcgE8D+DMAXzezjwM4DeD+Xg7WbjvK1fBXf2tweQUIyzKrZS411ZqRFlrGM8tWq3yf\nK9VwQcU9u7iM4y2+vz0j1ITDU9z/tRq33XBob3D7gHNZ8eIKl72GxyPFLBf5teOmmR3B7QtrvCjl\nzb9xgNp2Tg1x28791La4+FJw+9IaP9/yETkyk+MhU49li0bapbVIFmHsyuxvJ32PsGHwu/sDxPTB\nTR9dCLFt6A4/IRJFwS9Eoij4hUgUBb8QiaLgFyJR+nrXjZujaWHNwyMFK52oGkNFLsmMjvBikOcu\ncZnnzEVesDJLZuviJZ7QWFviUt/hCS7Z/at7dlLbU2fK1Da5Pyyx7Z3hd2CfiRQSvW4nl9iykf5z\nAyQb8PTsheB2AMgPXqa2s5fmqO2183yO87nwOTIxxKWycvXtF3EFAIsU8GxHtL4MGZclfRcBoLV5\npU9XfiFSRcEvRKIo+IVIFAW/EImi4BciURT8QiRKf3v1ZTPYORGWjho5XuhjhRS69AaXB5fW1qjt\n7Bzv8Vep8WKWhXZYepmf4xlzU3n++XpgZpzadh24ntqKl89TGwbDb+mRu++mQ0Z++Rq1jda5xNYE\nz9BbXgnbbhzlEma1GZHD2rxI6k2jXHKcmJwKbl9eWKBjXp8PF/0EgIbxDM5KI1JZNcO1uZFC2P96\nNZZ5GH6fY3LjW1zq+ZlCiF8pFPxCJIqCX4hEUfALkSgKfiESpa+r/a1WC5eXl8OO1HmySoG10OIL\nr8hn+OdatcZX50cH+JSMEVus7dZ1Owap7cbb3kttf/cq3+fTr3Bl5L594cSe2Xm+Mn/je++itiwi\nysgaVx12eViJuXiaJxG1Kvw1H949TW2zzQFqK2avC24vzZ+mYx796qPUduo8Tz7KRlpyIdJeq0KE\ngHqsrVwzfA60WRZcaB89P1MI8SuFgl+IRFHwC5EoCn4hEkXBL0SiKPiFSJRe2nU9DOD3Acy7+83d\nbZ8B8EcALnaf9il3f2zDfQHIEvWiRVphAYATySND2ngBQIvJgwBKfBgi3aTgpK3SzCBPLLn7Hi7n\nvec2nmzzX/7zl6ht7xCXtnK1cD27F589Scccvu13qG10/1Fqg1+ipuWzYSlttMUThSolLiueWeS1\nFXftv5na9t10S3D7yuI/0DGZx7jU15rnyTsWqbnXINIcAFgrnNBkEdmu2Qyf35GuYG+hlyv/XwK4\nN7D9L9z91u6/DQNfCPHOYsPgd/cnACz2wRchRB/ZzG/+T5jZM2b2sJlNXjOPhBB94WqD/wsADgG4\nFcAFAJ9lTzSzY2Z2wsxOVOqRH9tCiL5yVcHv7nPu3nL3NoAvArg98tzj7n7U3Y8OFmL3Pgsh+slV\nBb+Z7b7iz48BeO7auCOE6Be9SH1fAXAPgCkzOwfg0wDuMbNbATiAUwD+uNcDGlEvWpF6fExCiZTH\nQ7sa2V8k8WlsiMt21xXDQsodt+yiY373vvdT24UzPENsoMVrEN68j2e4tUg7tEN7uY+1SFZi6wJv\nhVWu83HVlfA8NjFBxzz58qvU9uOf/ZTa/tmHuD6776ZwLcSLC2fpmAIvF4jrbuBt4NoRqa9Z5ydr\nsxY+V5cX+OuqrYVDt+29i30bBr+7PxDYzEVoIcS7At3hJ0SiKPiFSBQFvxCJouAXIlEU/EIkSl8L\neMKBNmnJVK5z/a1AsthyWX7TUC7Ds6j2jXOJqljgcs2h68N3Md/94ffRMb/2u1zqe/x//jm13bRn\nhPvxW0eobXDvrwe3F8Z4m6zLq0vUtrLA0zp++cpL1DZ35pfB7c06lzBHJnix010z/FR94bUT1Hbo\n1UPB7fUVLrN6JZK5t8YLzTadj/NIF62hYvgaXJjh1+bLS+EdZrmC+RZ05RciURT8QiSKgl+IRFHw\nC5EoCn4hEkXBL0Si9FXqMwAFIs8tlrlM0qqHZY3BAZ6Bl81w6XB6rEBt5xZ5FtuR3/5AePvdf0jH\nALuppb7CZa+do1zqO3CUF9xcyoez9/7++4/TMZW1H1Pb4lK4tyIAzJ7lvfqyrfD7OTTA5dlDh3nm\n4ftvO0xtjdwotRXz4V59xUEu9+YjPQPXzvFCom2uLqMRucyWyJQMT/DwvP76oeD2wkl+/q5HV34h\nEkXBL0SiKPiFSBQFvxCJouAXIlH6utrfdkeVrKQORQryWTG82l/I8Hpl7Rav4Tc0xLMs/sX7+Er6\nP//4vwxuv+6GcPIIALxy4kfUlstwHxdW+KryqWefpraXl58Mbv/O//jfdMxoZAW+Uq9R255pnogz\nORpOxnrx/AIdU4vMx66DB6ntd+4OqzAAgGZ4Vfzc3Mt0yGqVnx+LNa4iWSR7p8qnEWtkl17m8/GP\nSCnEVqQ+5Xp05RciURT8QiSKgl+IRFHwC5EoCn4hEkXBL0Si9NKuax+AvwIwg057ruPu/nkz2wHg\nawAOoNOy635358XgAMAdbVbnLNJmyIh+0WjyTAqL9OTKF8IyFAB84J47qW24GG7V9MO/5TLa7Gu8\njWElov8sL3Gp7+Rzz/NxHn5t+TZPnBrN8mvA+ChPgto9yftanZkLnwrNSFu21RJPSnnp5GlqA/4v\ntSwvhxOThvP8/GgUecf5xdZFahuINKIdirSBG8yFw7BU4e26mu1wvLj3rvX1cuVvAvhTdz8C4E4A\nf2JmRwA8BOBxdz8M4PHu30KIdwkbBr+7X3D3J7uPSwBOAtgD4CMAHuk+7REAH90qJ4UQ15639Zvf\nzA4AuA3ATwDMuPuFrmkWnZ8FQoh3CT0Hv5mNAPgGgE+6+8qVNu/80Aj+2DCzY2Z2wsxOVEnNfiFE\n/+kp+M0sj07gf9ndv9ndPGdmu7v23QDmQ2Pd/bi7H3X3owM5iQtCvFPYMBrNzAB8CcBJd//cFaZH\nATzYffwggG9fe/eEEFuFbSQNmNldAP4ewLMA3vje/il0fvd/HcB+AKfRkfp4bycA0yMF/+it4Zpq\nC3OX6LhCMZw91mpy2agB/hPj4EFeK65uYTkPAK7btT+4fe/+G+iYaqwV1oVT1FZbuEBt77mR1wWs\nFcJZbE89xbPYyqu8luDQIFeDLZKJWaqGpUWPXG/q4FlxBi6jjQ6EXzMAVJtETs3z/bUy3Mdzq1zq\nw1Akk5S05AKAgXbYNhR5zbf91vXB7Z99+Jc4e6ESaQ72/9lQ53f3HwH0XflgLwcRQrzz0I9wIRJF\nwS9Eoij4hUgUBb8QiaLgFyJR+lrA0wG0iAJXiGSWDWaJHBkRNDzLs9HadZ4NOL/IpbnSQljJHG6s\nBLcDQDsi1+zauYParts/RW11Jl8BeOVM8F4rePgGTABAJsN9rLE3DEDW+BswOhCWTBuRmzxzsTtA\nI1marTqXKrPtsI/LlTIdUy9EMiB3czlvbYi3+SqRLDwAqK6GfZwe523I9uwNV/AsRDIL16MrvxCJ\nouAXIlEU/EIkioJfiERR8AuRKAp+IRKlr1KfwZC1sAQ3mOeuOMLyysgAl/PGRsaobbXBJZnpYe5H\nnvhRW+b951oZXrhxJc/lnwN7ef+/VqTw52/fsS+4/TuP/R0dU3M+H4WInlpe4+MmRsKZmLGaDrmI\ndLhS4/Lbaxe5bLeyGp7junG5d0c4eRMAsGeSnx91569t+SJ/r/OkTufovhE6ZrUU3l+kReVb0JVf\niERR8AuRKAp+IRJFwS9Eoij4hUiU/q72G1DIhVd01yJtnLKF8Kp+O8vdX23wFfFspFXTADkWABTy\n4VZYxSGegDE1wevLnZ7jKsHlanjVHgAOvOcItb10Zja4/Y7fu4OOWb5wntqef+FValtd4u21ctmw\nErBjLKwCAIBF6i6+PseTp15f4EpAJhdOdBke5+fO9CSv45ipRlrEXeLn1cQyVzL2TofPq5umwvUu\nAeCZJ8PnTqXM/VuPrvxCJIqCX4hEUfALkSgKfiESRcEvRKIo+IVIlA2lPjPbB+Cv0GnB7QCOu/vn\nzewzAP4IwBv9iz7l7o/F9lXIGfZOhz9vGkskuwFAuR2WUFZ5Pgc8w2WjXKRe4MQol+2KJPlobW2V\njhkpRKa4zuutff+HP6W2I3NhOQ8AXj0TbicV6UCFkSJPPspmuW0okli1Wg7Lb2XSxgsAmpGslJEi\nn6s7buQJMIPDYWmxkeXHakUSv8pnuJSWWePv9Uxkrt53JJxJtH9qmo554ofhdm7NiGS+nl50/iaA\nP3X3J81sFMDPzey7XdtfuPt/6vloQoh3DL306rsA4EL3ccnMTgLYs9WOCSG2lrf1m9/MDgC4DZ0O\nvQDwCTN7xsweNrPJa+ybEGIL6Tn4zWwEwDcAfNLdVwB8AcAhALei883gs2TcMTM7YWYn1mpvo9KA\nEGJL6Sn4zSyPTuB/2d2/CQDuPufuLXdvA/gigNtDY939uLsfdfejw5FFGyFEf9kw+M3MAHwJwEl3\n/9wV23df8bSPAXju2rsnhNgqelnt/z0AfwjgWTN7qrvtUwAeMLNb0ZH/TgH44412NFA0/PrB8CGn\njH8rePZcWB56fZFnUdXb/KWNDvLPvFKkjVOrHc5iy0Y+Q+cXeDbayhr/GVRp8oy5rPP2VBMjYdnr\n/FyJjjlFZDkAaDvPRrt+J5dFrR2WxC6t8PkdiMiiO8fCmW8AUMxyHyt1IvnmIhmhkXqBtTX+Xg+3\nue3IvnFqu+mG8HLZC6+GZVsAWFgMy5HNJo+J9fSy2v8jhLviRTV9IcQ7G93hJ0SiKPiFSBQFvxCJ\nouAXIlEU/EIkSl8LeOZywNR0+JCrszxbamqaSDlDXOKZu8j3V4lkPuXyvHhjLROWI1uR/TXaXDZa\nrnE5b7jI35pKhe+zXAtLi7UGz3JsReQhjyhHKyVeJHViNJzFNk7aeAFAucqz6eaWuEQ4OsQz5iwT\nPkcs8poLOb6/Aa44opjn19LfOHI9tZVKYV8e+8FpOuaF82G5txp5n9ejK78QiaLgFyJRFPxCJIqC\nX4hEUfALkSgKfiESpb+9+jKGwlD4kMMT/HNo12hYrsmXucRWGORSztJS5DOvzeXDoWI4M6uV4/JK\nqc4z8AoD3I8C6TEHANkMl6KqCL/uWpNLnzE5zyI2b3DJsUVUwEI2UtMhktV3foFLfeWI1DpBsgHz\nkYqmmUjG3xr4sWYXufQ5t8LHLZ0PZ1z+n+eX6ZhV8nY2Wr1n9enKL0SiKPiFSBQFvxCJouAXIlEU\n/EIkioJfiETpq9TXahmWLpNDRnrCTYyGZZLCMJc1xge4pLQ8wcddvszlq5WVsPRSqnCpr17jxxrL\nD1PbYJ7736xz2S6fC0uVxcjHfKHAjZ3izWGGh7iPGfI2N1t8rgYi79k4yRIEgEsrXGJbuRzOnBwf\n53Nfa/Jz4FWSTQcAZy9w266JM9S2Zx/JJI3orMNEFm1U+bmxHl35hUgUBb8QiaLgFyJRFPxCJIqC\nX4hE2XC138wGADwBoNh9/t+4+6fNbAeArwE4gE67rvvdfSm2r2oNeOnlsK2yxF2ZnAmveg4N8WSJ\nqTHux65p/pm3XOIrvRcXw7aLC3yVemGR+5GNJBG1I9k2zXakThspgxf7lGd17gAgF0nEKUf8d5bY\nQ9p4AUCjwlftW5EWWq1s5P0k9Q7rLb4yv1jmPs5e5D5WGvw9q5X5e7Z/Ymdw+4HJeTpmlUzHaiTJ\naT29XPlrAD7g7r+JTjvue83sTgAPAXjc3Q8DeLz7txDiXcKGwe8dVrt/5rv/HMBHADzS3f4IgI9u\niYdCiC2hp9/8ZpbtduidB/Bdd/8JgBl3v9B9yiyAmS3yUQixBfQU/O7ecvdbAewFcLuZ3bzO7kC4\nioSZHTOzE2Z2YvVt3H0khNha3tZqv7svA/gBgHsBzJnZbgDo/h9cnXD34+5+1N2Pjgz09W5iIUSE\nDYPfzKbNbKL7eBDAhwC8AOBRAA92n/YggG9vlZNCiGtPL5fi3QAeMbMsOh8WX3f375jZjwF83cw+\nDuA0gPs32pFbDs1CWNaoFQ7TceVWWL/KNC7TMcM7uAw1s5cniezKcKlkdyks18zN8RZfF+f45+ta\niU9/s8kTneARiZC0oSpHZLRigR8rl4vIaFUubZVXw8fLO5fsJjK8XVc7w1ubzTa5j0XSHWwgUqdv\nIs9/nl4PPo83DvDz6pZb+Pl98z8O2+48VaVjTs+G5+P1tfN0zHo2DH53fwbAbYHtlwB8sOcjCSHe\nUegOPyESRcEvRKIo+IVIFAW/EImi4BciUcxjvZqu9cHMLqIjCwLAFICFvh2cIz/ejPx4M+82P25w\n9+ledtjX4H/Tgc1OuPvRbTm4/JAf8kNf+4VIFQW/EImyncF/fBuPfSXy483IjzfzK+vHtv3mF0Js\nL/raL0SibEvwm9m9Zvaimb1iZttW+8/MTpnZs2b2lJmd6ONxHzazeTN77optO8zsu2b2cvf/yW3y\n4zNmdr47J0+Z2X198GOfmf3AzJ43s1+Y2b/pbu/rnET86OucmNmAmf3UzJ7u+vEfutuv7Xy4e1//\nAcgCeBXAIQAFAE8DONJvP7q+nAIwtQ3HfT+A9wJ47opt/xHAQ93HDwH4823y4zMA/m2f52M3gPd2\nH48CeAnAkX7PScSPvs4JAAMw0n2cB/ATAHde6/nYjiv/7QBecffX3L0O4KvoFANNBnd/AsD6ot59\nL4hK/Og77n7B3Z/sPi4BOAlgD/o8JxE/+op32PKiudsR/HsAnL3i73PYhgnu4gC+Z2Y/N7Nj2+TD\nG7yTCqJ+wsye6f4s2PKfH1diZgfQqR+xrUVi1/kB9HlO+lE0N/UFv7u8U5j0nwL4EzN7/3Y7BMQL\novaBL6Dzk+xWABcAfLZfBzazEQDfAPBJd1+50tbPOQn40fc58U0Uze2V7Qj+8wD2XfH33u62vuPu\n57v/zwP4Fjo/SbaLngqibjXuPtc98doAvog+zYmZ5dEJuC+7+ze7m/s+JyE/tmtOusd+20Vze2U7\ngv9nAA6b2UEzKwD4A3SKgfYVMxs2s9E3HgP4MIDn4qO2lHdEQdQ3Tq4uH0Mf5sTMDMCXAJx0989d\nYerrnDA/+j0nfSua268VzHWrmfehs5L6KoB/t00+HEJHaXgawC/66QeAr6Dz9bGBzprHxwHsRKft\n2csAvgdgxzb58d8BPAvgme7JtrsPftyFzlfYZwA81f13X7/nJOJHX+cEwC0A/qF7vOcA/Pvu9ms6\nH7rDT4hESX3BT4hkUfALkSgKfiESRcEvRKIo+IVIFAW/EImi4BciURT8QiTK/wP10I5rKi4+IAAA\nAABJRU5ErkJggg==\n",
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ }
+ ],
+ "source": [
+ "plt.imshow(x_train_cifar[0])\n",
+ "plt.show()\n",
+ "plt.imshow(x_cifar_gamma.numpy())\n",
+ "plt.show()"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "Cool, the sampled Gamma value was greater than 1, so the image became a little darker. It's important to note that the gamma value will be randomly sampled **every time you call the transform**. This means every sample will be different. This is a good transform to make your classifier robust to different inputs. Let's do the other transforms:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 20,
+ "metadata": {
+ "collapsed": false
+ },
+ "outputs": [
+ {
+ "data": {
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAAAP8AAAD8CAYAAAC4nHJkAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAHo5JREFUeJztnWuMnOd13/9nbrs7s1dyV8u7KEq0U1pVJJeRlFqSVRs2\nFCWAbSBRY6CGPhhhPqRGDaQfBBeoXfSLU9QOjKJ1QNdC5MDxpbUNC4aa1padCEFdW5SqCyXKujC8\ninshd5d7mfvM6YcdFxT9/J8dcndnJT//H0Bw9j3zvO+Z533PvDPPf8455u4QQqRHZqsdEEJsDQp+\nIRJFwS9Eoij4hUgUBb8QiaLgFyJRFPxCJIqCX4hEUfALkSi59Qw2swcAfAlAFsB/dffPx55fKBR8\noFgM2rzdvubjZ43/OjGf5e9ruaxxW4aPMwuPY9s7RmpqtlrUFvvhZS6b5UYyMPZLzrbzuc8Ynw/H\nte8zGzkvkd1F/Qf4HLPpz0bOc8xmkfmIzWPMR2aL7c/JmEsLi1haqcQO9v+57uA3syyA/wzgQwDO\nAXjazB5395fZmIFiEffce1/QVqmscCdJkA9m+QWxY6hAbeMjfdQ2MTxAbX3Z8HTl+vj+EAnUi/NL\n1Fbn7wvYNjLID9dqBrdXa3U6phKxDfTzeWyBX5wr1Upw++hI+M0fABC52Ou1BrVlI5cxe7MZGeR+\nDJW4rVDg53qlyufRI28ayIT9r0XOS8PD8f3v/+Jb/DhXH7brZ/4qdwJ43d1PunsdwDcBfGQd+xNC\n9JD1BP9uAGev+PtcZ5sQ4h3Api/4mdkRMztmZsfqdf4xRgjRW9YT/OcB7L3i7z2dbW/B3Y+6+2F3\nP1wo8O+PQojesp7gfxrAQTO7ycwKAP4QwOMb45YQYrO57tV+d2+a2b8E8D+xKvU96u4vxcY0m03M\nzs4Ebe1meJUanZ0HiXyQqLT5S7M+vpq73Oaqw1KbyGiRaSzX+esq1/iSfpMcCwAuTlET+slkNVt8\nfzFpq5znakW5zlfgm+3wa7Man/tMRMFsROZxgKgwAFAjssmlC1xZKEYUDos4mYm9gAxX31bIddBs\n8usjS15zvVblPlzFunR+d38CwBPr2YcQYmvQL/yESBQFvxCJouAXIlEU/EIkioJfiERZ12r/tWIA\nIgl1lGGivEwO5umY8ZF+ahsYiEg5keyrCpG2qhGZkuRfAAAKfdx/RKQ5b/PjjYyEX1uzwfdXyHM/\nIomHyOYjElsj7GMjsr9iZH+5Ae7jQGRc02rB7ZlIlmAzcg3Ert9SkSf9LJXDfgBAg0xyRB3E0ko4\ncap9DdmxuvMLkSgKfiESRcEvRKIo+IVIFAW/EInS49V+R44UaitEPNk9HF7pHSvw9658ZEV8+TJf\ncm5Flucr1fBKaiayaD88yJWFXGSVemGZJ2jkIvkj46Xw8RaX+WpzrFxUJVJPjNWRA4BBkhzTaPBk\noExkpbpQiCgSJIkIAHJkyZypEQBQyPHzkokcq7ayTG2RimfoI5dxM6JILFbDfrSuoRSm7vxCJIqC\nX4hEUfALkSgKfiESRcEvRKIo+IVIlB5LfYYBkhmRj8hGQ31hNydKXPNqRWSjSG4JsrHWWwPh49U8\nIjXF2oZFOtS0IlKURzI+pi6Vw/trctloKdINpxLRjkp9kSKKDdKuK9KTK6JgIhuR+spl7n8pH+7A\nlI/4UYnUVqw0+Xy0I/tcqPDzuVgL77NS5/tjCmysVuPV6M4vRKIo+IVIFAW/EImi4BciURT8QiSK\ngl+IRFmX1GdmpwAsYVU9a7r74djzsxlgmMhlxUhxtH6SvZfJcFmjGKmPV4/IV+2I5OhE0ot0rUIr\nUrSuHZH6PFI8z7NcFFsiWXOtdiRbMSIPxdqGLZPMMgCYWgpnCuYi52wwXJZu1Y9LPGOuWuZZiXuG\nJ4PbJ7eN0zFW4i3baovcj+UK92OpwufqMpnHSyuR65SczkbkfF3NRuj8/8zdL27AfoQQPUQf+4VI\nlPUGvwP4kZk9Y2ZHNsIhIURvWO/H/nvc/byZ3QDgh2b2irs/deUTOm8KRwBgINL6WAjRW9Z153f3\n853/ZwB8D8CdgeccdffD7n64L9IcQgjRW647+M2sZGZDv3wM4MMAjm+UY0KIzWU9H/snAXzPVrPg\ncgD+2t3/JjYgnzVMDIXv/kN5LmsM9oelLYtk0yGSYWWRwoi1SIYbe6fcXuStwQYH+BRfXuaS0kgp\n0vopUlTz/Fy4UGelyaW+XEQdmsxHWldluR9vEvmtGSmQGlHzUIpIt3fdeBO1Xb4Uvq68yl/02BiX\nUqsVfr9crvDXVohkd06SfnRjQ3QI5oh0mLuGfnjXHfzufhLAb17veCHE1iKpT4hEUfALkSgKfiES\nRcEvRKIo+IVIlJ4W8MxmDGPFsGSTa/LedH258HtUqcB/MViLZJw1IsU9x4a4bMcUwlokY67R5IUb\nS0Uu552f5+POXuLZY5dJ0UdSTxMAMJ7j/n/81l3U9q6d4eKYAPAXL0wHtz87xeXNViTLMaKUYXFp\ngdrKpHAmyy5ddYTPx0AfH1eI9I4ciEjPrNjs3olhOqZ0OSzpFkishNCdX4hEUfALkSgKfiESRcEv\nRKIo+IVIlJ6u9uezWUyOhrMVygt8dTtj4feoWF20SqR2Xo7sDwBWIu2Y2KhYC6exYa4eRLpC4ewc\nz3JZqkUycUgrL4skfJQimT37cryw3uAcT4I6VBoNbp8qcT8ulrkS0IgkMx2f4lXkMqSmXSNWW2Io\nklGT4av9I4M8+WiozW1Vcq16gytgN42F/e+7hsQe3fmFSBQFvxCJouAXIlEU/EIkioJfiERR8AuR\nKD2V+nK5LLZvD0tA20tceslmwm5eWlqiYxrlcOIDAGQiiT3tSAKGk6SJIZKsBAB18Nf1izdnqK0a\nadeVjyTisFpxfQV+qoczfD5+fIZLjo0mv3dUS+GklPGIHGbgiU6NFpeCqxEb66DViLRss2ZEg40o\naQWLtHqLSHB5cs6aDf66IjlQXaM7vxCJouAXIlEU/EIkioJfiERR8AuRKAp+IRJlTanPzB4F8HsA\nZtz91s62bQC+BWA/gFMAHnL3+bUPZwCR7TL5a1cd+/v4mFJkXC6i17AMQgBoIKyv9A3wo83O8Ky4\n6jKvxbe7yP2o82Q6Kum9ezevB5eJ7LARyWK7vMJfWy4Ttg3nufQ5PjxJbYdu3kltr5w9QW0nziwG\ntxcircbcuUwckzezOT5X+Ty3tUnmYTtynVpMc+ySbu78fwnggau2PQLgSXc/CODJzt9CiHcQawa/\nuz8FYO6qzR8B8Fjn8WMAPrrBfgkhNpnr/c4/6e4XOo+nsNqxVwjxDmLdC37u7oj0wzazI2Z2zMyO\nLZZ5ZRIhRG+53uCfNrOdAND5n/5I3d2Puvthdz88HOljL4ToLdcb/I8DeLjz+GEA398Yd4QQvaIb\nqe8bAO4HMG5m5wB8FsDnAXzbzD4J4DSAh7o5WMsd5Wo4U8kaXF4BwrLMcplLTbVmpIWW8cyy5Srf\n52I1/LVl9w4u43iL72/3IDXh4Dj3f6XGbTce2BPc3u9cVpxd5LJXaSRSzHKO3ztumdwW3H5xhX/1\nu/U9+6lt+3iR27bvo7a5uVeD2+dX+PWWj8iRmRwPmXosWzSShdciWYSxO7NHsk+7Zc3gd/ePE9MH\n1310IcSWoV/4CZEoCn4hEkXBL0SiKPiFSBQFvxCJ0tMCngDQtLDm4ZGClU5UjWIfl2SGBnkxyHOX\nuMxzZpYXrMyS2Zq9xBMaa/Nc6js4yiW7f37/dmp77kyZ2sb2hSW2PZP8F9hnIoVEb9jOJbZspP9c\nP8kGPD11IbgdAPIDl6nt7KVpajt5ns9xPhe+RkaLXCorV6+9iCsAWKSAZzui9WXIuCzpuwgArfUr\nfbrzC5EqCn4hEkXBL0SiKPiFSBQFvxCJouAXIlF626svm8H20bB01MjxvmSLpNClN7g8OL+yQm1n\np3mPv0qNF7MstMPSy8w0z5gbz/P31/2TI9Q2vH8XtfVdPk9tGAif0rH3v58OGfyHk9SWr3OJDeAZ\nesuLYdvNQ1zCrDYjclibF0m9ZYhLjqNj48HtCxcv0jFvzoSLfgJAw3gGZ6URqaya4drcYCHsf70a\nyzwMn+eY3PgrLnX9TCHErxUKfiESRcEvRKIo+IVIFAW/EInS09X+ZquFywsLYUfqPFmlwFpo8YVX\n5DP8fa1a46vzQ/18SoaJLdZ264ZtA9R28x3vpban3+D7fP51row8uDec2IMZvjKff8891AZwZQRN\nrjoMOlFiTvMkomKFv+ZtOyeordHkVaHz2RuC21dmTtMxj3/zcWo7dZ4nH2UjLbkQaa9VIUJAPdZW\nrhm+BtosCy60j66fKYT4tULBL0SiKPiFSBQFvxCJouAXIlEU/EIkSjftuh4F8HsAZtz91s62zwH4\nIwCznad9xt2fWHNfALJEvWiRVlgA4ETyyJA2XgDQYvIggCU+DJFuUnDSVmlygCeWvP9+Lue9+w6e\nbPNf/tNXqW1PpOFprhauZ/fqiyfomHft/21qA97FTbnbIuOIlLYnkii0xGXFlTleW7G071a+z/5b\nwmMmeIJRZphLfa0ZnrxjkZp7DSLNAYC1wglNFpHtms3w9X0NSl9Xd/6/BPBAYPufu/vtnX9rBr4Q\n4u3FmsHv7k8BmOuBL0KIHrKe7/yfMrMXzOxRMxvbMI+EED3heoP/ywAOALgdwAUAX2BPNLMjZnbM\nzI4tlSNfqIUQPeW6gt/dp9295e5tAF8BcGfkuUfd/bC7Hx6KLFQJIXrLdQW/me284s+PATi+Me4I\nIXpFN1LfNwDcD2DczM4B+CyA+83sdgAO4BSAP+72gEakiFakHh+TUCLl8dCuRvYXkUOGi1y2u6Ev\nLMncddsOOua3H7yP2qbO8Ayx/havQXjrXp7h1iLt0A7s4T4CPJsuTmwcm8dROuIXr71BbT99+ufU\n9rsf4l8nJw6RWojls3RMgZcLxA038jZw7YjU16zzi7VZC1+rCxf566qthEO37bwO4tWsGfzu/vHA\nZi5CCyHeEegXfkIkioJfiERR8AuRKAp+IRJFwS9EovS0gCfc0SYtmcp1rr8VyI+DclleMDGX4VlU\ne0e4RNVX4HLNgV3hXzG//8P30jHYzaW+l//Hn1HbLbsHuR//5BC15fb8RtgwzLPYUJvntipP6zj9\n+qvUNn3mH4Lbm3UuYQ6O8mKnOyb5pfrKyWPUNnHzgbBhkcusXolk7q3wQrNN5+M80kWr2Be+Bxcm\n+b358nx4h5GQ+BV05xciURT8QiSKgl+IRFHwC5EoCn4hEkXBL0Si9FTqMzMUiBYxV+YySaseljUG\n+nkGXjbDpcOJ4QK1nZsLF8AEgEO/9YHg9m3/9BN0DDBCLfVFLnttH+JSX+5wpOBmPpy99+yPn6RD\nKis/pba5+XBvRQCYOst79WVb4fNZ7Oda1IGDPPPwvjsOUlsjN0RtyId79WGAy735SM/AlXO8kGib\nq8toRG6zS2RKSqM8PHftKga35wvd38915xciURT8QiSKgl+IRFHwC5EoCn4hEqWnq/3ttqNKVlKL\nkYJ81hde7S9keL2ydovX8CsWeZbF79/LV9Lv/Rd/QCx8RR+LvE1WLsN9vLjIV5WbLz5Pba8tPBvc\n/oP/9r/omKHICnylXqO23RM8EWdsKJyM9YvzF+mYWmQ+dtx0E7XtuyuswqwSXhX36dfoiOUqvz7m\nalxFskj2TpVPI1bILr3M5+MfkVKIpPNXEN35hUgUBb8QiaLgFyJRFPxCJIqCX4hEUfALkSjdtOva\nC+BrACax2p7rqLt/ycy2AfgWgP1Ybdn1kLtHisEBQBttVucs0mbIWmEtpNHkmRQW6cmVL/CGoR+4\n/25qA8KtmuZOPkNHTJ3kbQwrEf1nYZ5LfSeOv8zHefi15ds8cWooy+8BI0M8CWrnGO9rdWY6fCk0\nI23Zlpd4UtWrJ05TG/B/qGVhIZyYVMrz66PRxzvOz7Vmqa2/wCXTYqQN3EAuHIZLFd6uq9kOx4sj\n0ovuKrq58zcB/Km7HwJwN4A/MbNDAB4B8KS7HwTwZOdvIcQ7hDWD390vuPuzncdLAE4A2A3gIwAe\n6zztMQAf3SwnhRAbzzV95zez/QDuAPAzAJPufqFjmsLq1wIhxDuEroPfzAYBfAfAp9198UqbuzsQ\n/rJhZkfM7JiZHVuOFEkQQvSWroLfzPJYDfyvu/t3O5unzWxnx74TwExorLsfdffD7n54cIAvHgkh\nesuawW9mBuCrAE64+xevMD0O4OHO44cBfH/j3RNCbBbdZPW9D8AnALxoZs91tn0GwOcBfNvMPgng\nNICHujtkWIqIZeERJQQt0voLAOrgtl0jvObb1/7qcWq7Ycdzwe179t1Ix1SXufpZKPBPQiMlnjFX\nyHBJaaQQlpR2b+c1AcvLvJZgMcsvkenZRWqr18PzP9zHJa/6Cpe2jj/zErWdfincGgwAqk0ip+b5\nHLYy/J5YnOT+o8iv4Uwfl6X7iWy3DfxYd9weXmIrRSTFq1kz+N397wGwXMUPdn0kIcTbCv3CT4hE\nUfALkSgKfiESRcEvRKIo+IVIlJ4W8HTnBQYLkcyygSzJVOL1EuFZLqO161x2mZnj0tzSxbng9lKD\nS15tcElpx/Zt1HbDvnFqqzP5CsDrZ4K/tYpme2Ui0mEtUhEya/wEDPWHMyAbkQKTuYh0i0iWZqvO\npcpsO+zjQqVMx9QLkQzInVzOWynyX7AuETkPAKrLYR8nIpL07j3hCp75iIR5NbrzC5EoCn4hEkXB\nL0SiKPiFSBQFvxCJouAXIlF6KvWZZZC1sAQ3kOeuOMLyymA/l/OGB4epbbnBJZmJEvcjT/yoLfD+\nc60Mz7JazHP5Z/+eA3yfkcKfv3XX3uD2Hzzxd3RMzfl8FCJ6anmFjxsdDGcl9uf4/SYXkQ4Xa1x+\nOznLZbvF5fAc143Lvdv2URN2j/Hro+78tS3M8nOdJ8mMQ3t5JubyUnh/EUXxV9CdX4hEUfALkSgK\nfiESRcEvRKIo+IVIlN6u9gMo5MIruiuRNk5ZUuuuHakvt9zgK+LZSKum/khdvUI+3Aqrr8gTMMZH\ni9R2epqrBJer4VV7ANj37kPU9vqZqeD2u953Fx2zcOE8tb38yhvUtjzP22vlsmElYNswr01okbqL\nb07z5Kk3L3IlIJMLJ7qURvi1MzEWTkoCgEw10iLuEr+uRhe4krFnInxd3TJ+Ax3zwrPha6eywv27\nGt35hUgUBb8QiaLgFyJRFPxCJIqCX4hEUfALkShrSn1mthfA17DagtsBHHX3L5nZ5wD8EYDZzlM/\n4+5PxPZVyAN7JsLvN4153qqp3A5LKMs8nwOe4bJRLlIvcHSIy3Z9JPloZWWZjhksRKa4zuut/fhv\nf05th6bDch4AvHFmNrg90oEKg5EWWtkstxUjiVXL5bD8Vq5yWa4Zadk22Mfn6q6beQLMAGl71sjy\nY7UiiV/lM1xKy6zwcz0Zmat7D4UzifaNT9AxT/3theD2RkQyv5pudP4mgD9192fNbAjAM2b2w47t\nz939P3Z9NCHE24ZuevVdAHCh83jJzE4A2L3ZjgkhNpdr+s5vZvsB3AHgZ51NnzKzF8zsUTMb22Df\nhBCbSNfBb2aDAL4D4NPuvgjgywAOALgdq58MvkDGHTGzY2Z27PIK/8mtEKK3dBX8ZpbHauB/3d2/\nCwDuPu3uLXdvA/gKgDtDY939qLsfdvfDIyX+m2khRG9ZM/jNzAB8FcAJd//iFdt3XvG0jwE4vvHu\nCSE2i25W+98H4BMAXjSz5zrbPgPg42Z2O1blv1MA/nitHfUXDL9xU/iQ48alnBfPheWhN+d4FlW9\nzV/a0AB/z1uKtHFqtcNZbNnIe+jMRZ6NtrjCZZlKk2fMZZ23pxodDMte56eX6JhTRJYDgLbzbLRd\n27ksau2wJHZpkc9vf0QW3T4cznwDgL4s97FSJ5JvLpIRGqkXWFvh57rU5rZDe0eo7ZYbw8tlr7wR\nlm0B4OJcWI5stnhMXE03q/1/j3BXvKimL4R4e6Nf+AmRKAp+IRJFwS9Eoij4hUgUBb8QidLTAp65\nvGF8InzI5SmeLTU+QaScIpd4pmf5/iqRzKdcnv8QqZYJy5GtyP4abS4bLdS4nFfq46emUuH7LNfC\n0mKtwbMcW00uD3lEOVpc4r/YHB0KZ7GNkDZeAFCu8my66XkuEQ4VecacZcLXiEVecyHH99fPFUf0\n5fm99D2HdlHb0lLYlyd+cpqOeeV8WO6tMmkzgO78QiSKgl+IRFHwC5EoCn4hEkXBL0SiKPiFSJTe\n9urLGArF8CFLo/x9aMdQWK7Jl7nEVhjgUs78fOQ9r83lw2JfODOrlePyylKdZ+AV+rkfBdJjDgCy\nGS5FVRF+3bUmlz5jcp5FbN7gkmOLqICFLH9diGT1nb/Ipb5yRGodJdmA+UhF00wk428F/FhTc1z6\nnF7k4+bPhzMu//fLC3TMMjmd15LVpzu/EImi4BciURT8QiSKgl+IRFHwC5EoCn4hEqWnUl+rBcxf\nJoeM9IQbHQrLJIUSlzVG+rmktDDKx12+zOWrxcWw9LJU4VJfvcaPNZwvUdtAnvvfrHPZLp8LS5V9\nkbf5QoEbV4s3hykVuY8ZcpqbLT5X/ZFzNkKyBAHg0iKX2BYvhzMnR0b43Nea/Bp4g2TTAcDZC9y2\nY/QMte3eSzJJIzpriciiGZLFGHxu188UQvxaoeAXIlEU/EIkioJfiERR8AuRKGuu9ptZP4CnAPR1\nnv/f3f2zZrYNwLcA7Mdqu66H3H0+tq9qFXj1tbCtMs9dGZsMr3oWizxZYnyY+7Fjgr/nLSzxld7Z\nubBt9iJfpb44x/3IRpKI2pFsm2Y7UqeNlMGLvcuzOncAkIsk4pQj/jtL7CFtvACgUeGr9q1IC61W\nNnI+Sb3DeouvzM+VuY9Ts9zHSoOfs1qZn7N9o9uD2/ePzdAxy2Q6chu82l8D8AF3/02stuN+wMzu\nBvAIgCfd/SCAJzt/CyHeIawZ/L7KcufPfOefA/gIgMc62x8D8NFN8VAIsSl09Z3fzLKdDr0zAH7o\n7j8DMOnuFzpPmQIwuUk+CiE2ga6C391b7n47gD0A7jSzW6+yOxCuImFmR8zsmJkdW1iurtthIcTG\ncE2r/e6+AOAnAB4AMG1mOwGg839wdcLdj7r7YXc/PDoY6XgghOgpawa/mU2Y2Wjn8QCADwF4BcDj\nAB7uPO1hAN/fLCeFEBtPN4k9OwE8ZmZZrL5ZfNvdf2BmPwXwbTP7JIDTAB5aa0duOTQLYVmjVjhI\nx5VbYf0q07hMx5S2ccljcg9PEtmR4fLhzqWwXDM9zVt8zU7z99eVJT79zSZPdIJHJELShqockdH6\nCvxYuVxERqtyaau8HD5e3rlkN5rh7braGd7abKrJfewj3cH6I3X6RvNc6tsFPo839/Pr6rbb+PV9\n6z8O2+4+xb8mn54Kz0f/M5Hr5irWDH53fwHAHYHtlwB8sOsjCSHeVugXfkIkioJfiERR8AuRKAp+\nIRJFwS9EopjHejVt9MHMZrEqCwLAOICLPTs4R368FfnxVt5pftzo7hPd7LCnwf+WA5sdc/fDW3Jw\n+SE/5Ic+9guRKgp+IRJlK4P/6BYe+0rkx1uRH2/l19aPLfvOL4TYWvSxX4hE2ZLgN7MHzOwXZva6\nmW1Z7T8zO2VmL5rZc2Z2rIfHfdTMZszs+BXbtpnZD83stc7/Y1vkx+fM7HxnTp4zswd74MdeM/uJ\nmb1sZi+Z2b/qbO/pnET86OmcmFm/mf3czJ7v+PHvOts3dj7cvaf/AGQBvAHgAIACgOcBHOq1Hx1f\nTgEY34Lj3gfgvQCOX7HtPwB4pPP4EQB/tkV+fA7Av+7xfOwE8N7O4yEArwI41Os5ifjR0zkBYAAG\nO4/zAH4G4O6Nno+tuPPfCeB1dz/p7nUA38RqMdBkcPenAFxd1LvnBVGJHz3H3S+4+7Odx0sATgDY\njR7PScSPnuKrbHrR3K0I/t0Azl7x9zlswQR3cAA/MrNnzOzIFvnwS95OBVE/ZWYvdL4WbPrXjysx\ns/1YrR+xpUVir/ID6PGc9KJobuoLfvf4amHS3wHwJ2Z231Y7BMQLovaAL2P1K9ntAC4A+EKvDmxm\ngwC+A+DT7r54pa2XcxLwo+dz4usomtstWxH85wHsveLvPZ1tPcfdz3f+nwHwPax+JdkquiqIutm4\n+3TnwmsD+Ap6NCdmlsdqwH3d3b/b2dzzOQn5sVVz0jn2NRfN7ZatCP6nARw0s5vMrADgD7FaDLSn\nmFnJzIZ++RjAhwEcj4/aVN4WBVF/eXF1+Bh6MCdmZgC+CuCEu3/xClNP54T50es56VnR3F6tYF61\nmvkgVldS3wDwb7bIhwNYVRqeB/BSL/0A8A2sfnxsYHXN45MAtmO17dlrAH4EYNsW+fFXAF4E8ELn\nYtvZAz/uwepH2BcAPNf592Cv5yTiR0/nBMBtAP5v53jHAfzbzvYNnQ/9wk+IREl9wU+IZFHwC5Eo\nCn4hEkXBL0SiKPiFSBQFvxCJouAXIlEU/EIkyv8DXuZSIv9XPLwAAAAASUVORK5CYII=\n",
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ }
+ ],
+ "source": [
+ "from torchsample.transforms import AdjustBrightness\n",
+ "\n",
+ "# make our image a little brighter\n",
+ "bright_tform = AdjustBrightness(0.2)\n",
+ "x_cifar_bright = bright_tform(x_cifar_gamma)\n",
+ "\n",
+ "plt.imshow(x_cifar_bright.numpy())\n",
+ "plt.show()"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 21,
+ "metadata": {
+ "collapsed": false
+ },
+ "outputs": [
+ {
+ "data": {
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAAAP8AAAD8CAYAAAC4nHJkAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAHuRJREFUeJztnWuMZVd15//rvuveeld1t/vlftjGYBpsk8ZxhMeYISCD\nEtloNBZ8iDwzKI40GTSMkg+WRxqYb8xoIOLDCKkJVsyEEKwBhJXxhDQGYjGTGBrHT9pvt3G/qrrr\nXXXf9675ULcz7fb+76p+3Wpn/39Sq2/tdfc56+xz1j3n7v9da5u7QwiRHpmNdkAIsTEo+IVIFAW/\nEImi4BciURT8QiSKgl+IRFHwC5EoCn4hEkXBL0Si5C6ms5ndCeCrALIA/tTdvxR7f6FQ8IFyOWjr\ndru0XwbhXyFmSTsAFLPcls9SE/IZ434wk0U+Q41vr9Xhx+zO++Wy3GbkF5vdyC85u11uy0TGwyPj\nz/aXjfiOyPacDxUMkfEg7dksP2fZyDFb5FzHxtgjPjIvL2R70/MrWKw2Yjv7Ry44+M0sC+C/A/gY\ngKMAfmFmj7j7r1ifgXIZH7r9w0FbrbpM91WwdrB9zJq0z+5Kh9q2DlMTtg3xkzuQCQ9Xtliifbq5\nPLVNzVeprd7hn1Cbhvn+8u3wmNSbLdqn2giPLwAMlPgl0jI+xsuNWrB9bKRA+5jz7TUa3Jbv8m2y\nD8qxCh/D4cEitRXzA9S2GPGxa/x8drNh/xt1fl4aCG/vj//0IO1zLhfz2H8LgFfc/TV3bwL4SwB3\nXcT2hBB95GKCfzuAN8/6+2ivTQjxDuCyT/iZ2X1mdsjMDjWb/DFdCNFfLib4jwHYedbfO3ptb8Hd\nD7j7fnffXyjw72ZCiP5yMcH/CwDXmdkeMysA+DSARy6NW0KIy80Fz/a7e9vM/h2AH2JV6nvQ3Z+P\n9Wm32zg9PRW0dVp8NrpgYcmjlON9Wq06316eSyi1Np9hrXaIJGP8iWYp8k1nOWJrRqStuRPc/0om\nPCYt4jsA5DL8HtDIc9tyk49VC+GZ72KNz3pnsvygW5EBKURm0pfJJbIQkVkHIwoHiOIDAMhym0fk\nw6Vm2NZucx+z2bCK1Grw6/5cLkrnd/dHATx6MdsQQmwM+oWfEImi4BciURT8QiSKgl+IRFHwC5Eo\nFzXbf74YHDki22VIOwBMEknv6gEu9V01xOWfSE4HzHgiTo3ITSsRGa0bya/KFyMZYt2YjR/36AiR\njVp8e0WSWAIA7YgfmTy/fJhkGsl9wWA+IkfyfBoUI2ma9ZWwH7GM0EYkozKSDIiRMh+P+Ro/8GYn\nbIskn2JhpRFs73TWvw6H7vxCJIqCX4hEUfALkSgKfiESRcEvRKL0ebYfyJOEj3yGz4ZeWwlnwFwV\nSewpR2Y9l+e5re18SKqN8Gel5bnvI4PUhFyBKwszLCMFQD7DE2q2hUskYm4pUhKqwcejTpJOAKAb\nqUs3OhAex1aLZzPlc3x7+SKf+253+LEVSC3HWpOfs0KO3xNzkeuqucJL0WUisk+ZJCbFql/MNcLj\nex6T/brzC5EqCn4hEkXBL0SiKPiFSBQFvxCJouAXIlH6KvVl4RiycHJMOcvlmjGSd7KjxOWaVmSp\no1ZEosoS/wBeh60aSRLJ57hEVehGVniJ1ceLZHwcnQn70mnxY56LyF5LfFcYjiT2dEkiERc342Of\nKfLMnlqV9xvNhPdYzPIjq0akz0V+WtCJJKedrvPxnyHnZrnJT3StGx77ZiyT7Bx05xciURT8QiSK\ngl+IRFHwC5EoCn4hEkXBL0SiXJTUZ2ZHsKoGdQC03X1/7P3ZDDBO5LkR43LTQCFsy0QyAUcjy0zF\n6sh1EJH6yPJJ9TaXZLqRZaaazjP3Im6gG8k6myX14NqRU70SkT5bXe7IQoMf9+srxWB7MbK01mg9\nkl04z0XCxgr38ZqBrcH2HeMTtM9AZYbaqotValuscz/m6vyczZBs0RM1fswdC5+z2DJv53IpdP6P\nuPvpS7AdIUQf0WO/EIlyscHvAH5kZr80s/suhUNCiP5wsY/9t7n7MTPbDOCgmb3g7o+f/Ybeh8J9\nAFApxn7cKYToJxd153f3Y73/pwF8H8AtgfcccPf97r6/WOhrKoEQIsIFB7+ZVcxs6MxrAB8H8Nyl\nckwIcXm5mFvxFgDft1XJIQfgL9z9r2MdChnH9sGwFDWR5eUKR8KqESxarZDbspGMv2qDayV5Iolt\nK/FhHB6IZNMtc81xvMy/Is1Flt56dS4spS13+JJcxcg47s2El4UCgFzknL3eDI9JPSL1nVyhJgwV\n+Vh9bOsOaluaClc0zS7zrL4tm/m+GvlIxl9Ezitl+XWwcyjcb3KQ7+skKawaWbnsbVxw8Lv7awBu\nvND+QoiNRVKfEImi4BciURT8QiSKgl+IRFHwC5Eoff3VTS4DbB4IyxflRmTdvVzYzcoA0QAB1Gvc\nj4ZzKWdykH8eMhGwFsmkarT5cVXKXJeZmufH9hKR8wBghmQY1iJrEF4dWRXugWsi2tFmLgN+8/Xw\noBycjay5FymAmYtkfS4uzVFbrbEQ3leHZ+dl2nVqKxe5j4VI4c8KycIDgJaHx3/vBC9aOrQcHt+/\nPQ+pT3d+IRJFwS9Eoij4hUgUBb8QiaLgFyJR+jrbX8hmsGOoFLQ1O8u0XwbhKcw5PimL5Q6fXS2Q\nZbcAAJGadVkymbtE6uYBwESFD3Gtyz97X17gtvkWT/rpktneyGQzRrt8th/d49wWMf2zzOZg++tF\nPh7HG1yiaUWK0/19jV87WZK0tNLix9zsROpOsIsAwPgg3+ZoRBFaIbUhMy2efPTe4XCi1kB2/UX8\ndOcXIlEU/EIkioJfiERR8AuRKAp+IRJFwS9EovQ3sSeXweTkcNCWL3MtKpMJSy+nlnnRt1q9zbdH\npBUAaIHLdq1C+LNyMM+loRrCNeQA4B9OLlLbSqSuXiHLfRzOhPuViO8AMNnl+/rVVIXami2efHSa\naFvbytz3TGTZsGZEgl3JcNsykYkbkWPONfm1g0jyUYmMPQB0IzX8Ctnwuek0+Vix3LRIecq3oTu/\nEImi4BciURT8QiSKgl+IRFHwC5EoCn4hEmVNqc/MHgTwOwCm3X1fr20cwHcA7AZwBMA97s4Lqf0j\nGXQzYXnICzyjy8nSW6Uil08qRB4EgGKX98tGMv5qRMoplMLyJQBMTXHtpbHIJarrCzxDrN7mp61E\nFkO96Sr+OV+scz/mMUJtM1Vewy+bD18O47lB2mfr4AS13XjNKLU9e+IpanvyzfBxFzMROQ/c1mzz\nZc+M1JoEgGJkHa0OkR2bEdkuQ+7bFpFL376NtfkzAHee03Y/gMfc/ToAj/X+FkK8g1gz+N39cQCz\n5zTfBeCh3uuHANx9if0SQlxmLvQ7/xZ3P9F7fRKrK/YKId5BXPSEn7s7Iuthm9l9ZnbIzA7NVyMV\nY4QQfeVCg3/KzLYCQO//afZGdz/g7vvdff9omU+WCCH6y4UG/yMA7u29vhfADy6NO0KIfrEeqe/b\nAO4AMGlmRwF8AcCXADxsZp8F8AaAe9azs04XWK4TuSxSUBEIL3m1VOMVPJttnnHWzoSLiALAQiQb\ncK4Z9nHnVdwP6/Djun6AF2i8eWye2haa/Nh27d0VbB/q8iKXJ5a4/6VRLvW1ZrgWtW8ynA14vMaX\nL3vfe/nyVNjM+23azKXW0zPhMZ6uckksn+fjixz3sR5JqWNyHgB0SZZpNpJB2CXftM8jqW/t4Hf3\nzxDTR89jP0KIKwz9wk+IRFHwC5EoCn4hEkXBL0SiKPiFSJS+FvB0czSzYSmt2+CZZe7hjKjByLpv\nw4Ncrnl1lmf8vTzDpblcLiwPFU7zQpzdyPY+WOHy2223c/lt4VhEmttNstg2X0v7bDp5burG/6c8\nEbk/tHhxUrY44NjUFO8z9Do1LS3wcXzpOJff8rnwdTA5wMdwuc5/jNbORbL6IoVEu10uIWdJ0dhI\nzU80SGbq+nP6dOcXIlkU/EIkioJfiERR8AuRKAp+IRJFwS9EovR3rb6sYdMokdlIYU8AmF8mGUwt\nLp/MLnNp6LVpniG20uRyzQBZL27qJO+zK6K9vGszlxzxLi6jZSLZjK2hsCxa/N1P0j7lX73A/WhG\nbLbAbfNh+SozFjnmDpfRhsALf+4aGaO2kbGwH0unTtE+b07za6ea4YU4V2JL/EWug0FSdLUZyTAt\nFIjUdx5an+78QiSKgl+IRFHwC5EoCn4hEkXBL0Si9HW2v9PpYGE+XJtusMZnjgcs7GYmy2fZCxl+\naLUGn80dK/DZ3Akyw9po8ynWrRPctu2Dv0ltM69yReKXL/MZ+I9v3xo2vBGp7nbTv+A2nIzYnonY\nyEz1i5HtrUTqOO6+ipomW3yZr8kPk4Smk4dpn+qf/29qmzvGfcwW+bXTBbfNk1p9rcg1nGmH+3TO\no4if7vxCJIqCX4hEUfALkSgKfiESRcEvRKIo+IVIlPUs1/UggN8BMO3u+3ptXwTw+wDOZEc84O6P\nrrktOPIIy3Neq0X6hT+jss7lsFaW13WbAV+uK9uM1GhrhuWr3cVw8ggA3PGRd1Nb8QM82ebRr/45\ntb2nGFnwtBquJ1h9mktb5ev+JbW584WZzGIS4d+Hm/e+zLsQGRgAML3CbXt+m5q8cFuw3bYcpH3a\nf/19amvMRuTIyDJwTq4dAMh4+PrJkVgBgGY3fA2cz3Jd67nz/xmAOwPtf+LuN/X+rRn4QogrizWD\n390fB8DLuwoh3pFczHf+z5nZM2b2oJnxhGohxBXJhQb/1wDsBXATgBMAvszeaGb3mdkhMzu0UI1U\nOxBC9JULCn53n3L3jrt3AXwdwC2R9x5w9/3uvn+k3NdUAiFEhAsKfjM7O3vkUwCeuzTuCCH6xXqk\nvm8DuAPApJkdBfAFAHeY2U1YVRaOAPiD9ezMAGTJMkOdFpc1LBMWMEoRYSNTj9iIDwAwVqEm7MxW\ng+237+PDuPXu3+IbfJ1nMo62+Bzru/dEPrOzYdmovJ1nvgHh41qbWD9Sg9B30h4nXzpObf/n0C+o\n7e6P8ZqG2Rv2hQ3Np2mf3FCD2sav5ZJ0K8v98Ab/ytuphyXrhWnaBU2yQlzHeRydy5rB7+6fCTR/\nY917EEJckegXfkIkioJfiERR8AuRKAp+IRJFwS9EovT1VzfuQLcdliKqTe5KrhLO0Mtl+GdXrs4L\nJr5rkMsuAwVue9dV4SXFbvwE/Y0TsPNfU9Phv/oP1LZvG88UxK08UxB7iLQ4vo33aR2jJls+QW0L\nL/NMwZk3wtl73SaXN8vj/Jxtv4rLioff+Am17bv+urBh9jTtk1uKXB8LS9TmxjP+ukSuBoByMWwb\nILVYAWCeLPGVza1f6tOdX4hEUfALkSgKfiESRcEvRKIo+IVIFAW/EInSV6kvA0MpE5ZzXq/xTLt6\nJ2wrl/hnVz7PpbLtQ9z22hzf5jW/eVfYcMe/pX3cucTms1y+GhomWXEA8JFPcFs+LAMe+eHf0C71\n5Z9T20ykqObpN7hEWGyHM9wqJS557b1+mNpu3c/X6kNhM7dlyVp9ZZ65N7TMbdOv8Wun0ObhFFGe\nsZQLX9+lSS45bt8R9jFfkNQnhFgDBb8QiaLgFyJRFPxCJIqCX4hE6etsf7frqNfC9cqG8nyWslIM\n9ymBL9flHT4rmy3zGefPfPh91Hb9v/l0eF+RGX1b/Cm15bO8HtyxyPJl1zzxM2qbXwrXpvvhw/+X\n9hkliSUAUG3xunS7NvFzNjkYnt5+5gSfwV7O8/M5cQ1J0AFQ+q1/RW3AeLj510/RHvN1vtTb8RZX\nAjJdfi+tVfPUNpsNj1Wrxc/Lvonw2He666/HqDu/EImi4BciURT8QiSKgl+IRFHwC5EoCn4hEmU9\ny3XtBPBNAFuwujzXAXf/qpmNA/gOgN1YXbLrHnefi2+tiw7Cdc6sy2WjHJE82pGlicy4rVTkssuN\nH7mZ2oChYGvz1YO0x/zLPGmmWuc132pzXOrb9cwL1LaYCS/LVSaJNgAwORBZ2qzCx3HXKL98XpkK\nS06tFs9wWVnk4/Hi87yW4HXdn1Lb3Hy4ZuBIno/H0sB2ajsaWSKuzBVCDA7z++xkNnw9LjT4eDR9\nOdjuEf/OZT13/jaAP3L3GwDcCuAPzewGAPcDeMzdrwPwWO9vIcQ7hDWD391PuPuTvddLAA4D2A7g\nLgAP9d72EIC7L5eTQohLz3l95zez3QBuBvAEgC3ufuZZ7CRWvxYIId4hrDv4zWwQwHcBfN7d37JA\nsLs7EP6yYWb3mdkhMzu0UI/UohdC9JV1Bb+Z5bEa+N9y9+/1mqfMbGvPvhVAcDVxdz/g7vvdff9I\nKVLORAjRV9YMfjMzAN8AcNjdv3KW6REA9/Ze3wvgB5fePSHE5WI9WX0fAvB7AJ41szOpUA8A+BKA\nh83sswDeAHDP+nYZfvT3Nv9KkMuF5YtOm8satQyXqLZF6uP9r4d+TG0TW38dbN9+9R7ap70UfCAC\nABQLBWorlUvUFlumbJx02z3OT3V1eYXaBiO3h+PTPEOv2QzXpZso8A1Wl3jG3As/f4napp4+yrfZ\nDUt6nQKvGdnIRZaOI7XzAGBlhGfUWZFf3yPN8Jjs6fAn5d/4QHjpuEqZH9e5rBn87v4zAGyLH133\nnoQQVxT6hZ8QiaLgFyJRFPxCJIqCX4hEUfALkSh9LeDpADrdsHxRykaypTIkE5BIgADQIZlSAOAN\nLrucmpuitur0bLB9S41nnLXA5bxt42PUNrwtkiIWKdL4xrHFYHs3ku2VNS4p1WKFUNv83jFaDAtE\nTXL+ASBHlmUDAJBrAAC6LV74M0+Kas5HMiqXKlz6LO2eobbFMT7G85FVtBZJLuyWyCWwY+fVwfZC\nREo9F935hUgUBb8QiaLgFyJRFPxCJIqCX4hEUfALkSh9lfoMGWQRTjur5LkE1EU4e2ykyN0frlSo\nbbbJZZ4dZS4blTycIdad5VJfJ8uz8xYLw9Q2vGMHtaHBs+neuyOcsXjw0Sdon2qV61DFDJfflla4\nHxOVsMQ5kOPby0eUvtkml9Gem+H3sLnlsORby/JjHr6WH9emzdyPXORWOssVZGRq4QMf3Mq1vqXF\n8HF1YnLpuftd9zuFEP+kUPALkSgKfiESRcEvRKIo+IVIlL7O9mcMGCAJPIvNyNJbpXC9smak1tpc\nl8/o5/N8Rr+S5dss5MKz8/nKCO2zY4QnGL1yKpwoBAA7GtuoDe+5kZoWjoa3+Ru3fZD2qR4L1yYE\ngOdfPEJtSxGVoJANJwRNDvHxyHT5LPuvp/g5e3OW12REPny9lQf5rPhVY/wayC7xfiOnwtcpAAyf\n4se9d1P4uN8/HF56DQCe/nn4vl3jOUlvQ3d+IRJFwS9Eoij4hUgUBb8QiaLgFyJRFPxCJMqaUp+Z\n7QTwTawuwe0ADrj7V83siwB+H8Cp3lsfcPdHY9sq5DrYu2k+aHtj9jTtN98ZDbYv1rjE043U6ctH\nkkvGB3kiTikblnJWVpZpn0ZukNqyNZ648eOfPktt+6ZPUtsLv14K7yvyMT8aWboqm+HyVaXIpblF\nkvSzXOfLXbUi9QJHeSlE/PYuLjmWhogklucD0qhyWW7lRV53cWSRXzsTJT5Wd14fDsPRMS71/fho\n+Dy3uSL6Ntaj87cB/JG7P2lmQwB+aWYHe7Y/cff/tv7dCSGuFNazVt8JACd6r5fM7DCA7ZfbMSHE\n5eW8vvOb2W4ANwM4kxz+OTN7xsweNDP+PCSEuOJYd/Cb2SCA7wL4vLsvAvgagL0AbsLqk8GXSb/7\nzOyQmR2arfLvdEKI/rKu4DezPFYD/1vu/j0AcPcpd++4exfA1wHcEurr7gfcfb+77x8v82o9Qoj+\nsmbwm5kB+AaAw+7+lbPat571tk8BeO7SuyeEuFysZ7b/QwB+D8CzZvZUr+0BAJ8xs5uwKv8dAfAH\na22oWGxjz96wpLcHPAvv746F+xyZvYr2qXa5jDYywOuwLdT4UlhtD0t6eedS2dSpGrXNVvnwr3R5\nelbR+ZJRm8rh2oVHprjUdKQWWfYscn/YNcbHONsNa05Ti1zqGxjg+9pMJDsAKGf418lqM1x3sRNZ\nomxuhZ8XX+H98m1u238NNWH06rC0ePoVLiFPz4R9bPHT/DbWM9v/MwChqzuq6Qshrmz0Cz8hEkXB\nL0SiKPiFSBQFvxCJouAXIlH6WsATuS6whUhfx3m3LVvCWVvNoVPBdgA4MR3OBASAapNnbeU7fEjq\nzbCkV21xiafR5dLWqXY4MwsARgpcfqtW+Wf2Uj0sbdXbvE+nzaVKd95vYYln000MhcdkYpCn5y3X\neUrasTmuYY1WIj8ey4R9tIgkNkCyNwGgVuKyYinPx+Pa9/Kl2bAYPtff/Ql38skT4Wu42tJyXUKI\nNVDwC5EoCn4hEkXBL0SiKPiFSBQFvxCJ0l+pLwtgkEhYvFYhtmwKt3drXBoqVLgMODvLpRy0yM4A\nVPKbg+2RZC40InJewSNr3Rk/NbkMLxRZI0pPvc2lw5icR5QyAIA1eSamE0V3IMsHy/LcdnqJy15L\n7YjkOBzeZi5WWiLHpeC5NrctLHAfb13k19zp4+Fx/JvD/DzPdsPXR7MjqU8IsQYKfiESRcEvRKIo\n+IVIFAW/EImi4BciUfor9bVzwByR0nI8ra8yEm6/mi+Dh4nI2m4zYzzTbm7uBLXNz4czuuZrfGet\nOv98ncwNUdtglks27QaXOAfIGe0al/pKeW7LgNsGK5F1GIgfkSUUUSxxyW5siNumFvlYLSyE+42O\ncultscu39/wJrhG+fpyH09XDvJDrjl2kaKzxTMBhcs4il83b0J1fiERR8AuRKAp+IRJFwS9Eoij4\nhUiUNWf7zawE4HEAxd77/6e7f8HMxgF8B8BurC7XdY+7z8W21aoXcPKFq4M2n+Oz/aPbw+0Dkdn+\nQqRk2tgWbqsv8unoEzNhJeDkNP8MnT5NpAoAuXZ4aS0A6DhXENpdPgMPIgTkIl0yESUgkoeDBecz\n5l1S77DS5slA3SpfngoN7mMzz8dxpR4+n7VZnoQzXePKwvHZMt9Xiyf9tCLLfF0/NB5sf88IH6v5\nVng8CudxO1/PWxsA/rm734jV5bjvNLNbAdwP4DF3vw7AY72/hRDvENYMfl/lzEdyvvfPAdwF4KFe\n+0MA7r4sHgohLgvrekgws2xvhd5pAAfd/QkAW9z9zHPwSQCRh2khxJXGuoLf3TvufhOAHQBuMbN9\n59gdCP8UzMzuM7NDZnZoZoX/Mk0I0V/Oa7bf3ecB/ATAnQCmzGwrAPT+nyZ9Drj7fnffP1HhEyJC\niP6yZvCb2SYzG+29HgDwMQAvAHgEwL29t90L4AeXy0khxKVnPYk9WwE8ZGZZrH5YPOzuf2Vmfwfg\nYTP7LIA3ANyz1oa6mQJWiruCtnqRS1vddvChAmONV2mf8mTkK0ZYbQQAlCIjsmcx3D55fID2mT42\nRm1Li1wGbLciOiap3wYAThSslSofj1IhUi8wzzNFTjX4OasuhSW2XGaF9tkckTdbGS57LWb5OFbK\nYT8KEQ1zJJIUtjPDx+PdJf5k+/738ymxyfeFa0N++PXXaJ+XT4bP2cBTseKEb2XN4Hf3ZwDcHGif\nAfDRde9JCHFFoV/4CZEoCn4hEkXBL0SiKPiFSBQFvxCJYqs/zuvTzsxOYVUWBIBJAKf7tnOO/Hgr\n8uOtvNP82OXufM25s+hr8L9lx2aH3H3/huxcfsgP+aHHfiFSRcEvRKJsZPAf2MB9n438eCvy4638\nk/Vjw77zCyE2Fj32C5EoGxL8Znanmb1oZq+Y2YbV/jOzI2b2rJk9ZWaH+rjfB81s2syeO6tt3MwO\nmtnLvf95OuDl9eOLZnasNyZPmdkn++DHTjP7iZn9ysyeN7N/32vv65hE/OjrmJhZycx+bmZP9/z4\nz732Szse7t7XfwCyAF4FsBdAAcDTAG7otx89X44AmNyA/d4O4AMAnjur7b8CuL/3+n4A/2WD/Pgi\ngD/u83hsBfCB3ushAC8BuKHfYxLxo69jAsAADPZe5wE8AeDWSz0eG3HnvwXAK+7+mrs3AfwlVouB\nJoO7Pw5g9pzmvhdEJX70HXc/4e5P9l4vATgMYDv6PCYRP/qKr3LZi+ZuRPBvB/DmWX8fxQYMcA8H\n8CMz+6WZ3bdBPpzhSiqI+jkze6b3teCyf/04GzPbjdX6ERtaJPYcP4A+j0k/iuamPuF3m68WJv0E\ngD80s9s32iEgXhC1D3wNq1/JbgJwAsCX+7VjMxsE8F0An3f3t9RN6ueYBPzo+5j4RRTNXS8bEfzH\nAOw86+8dvba+4+7Hev9PA/g+Vr+SbBTrKoh6uXH3qd6F1wXwdfRpTMwsj9WA+5a7f6/X3PcxCfmx\nUWPS2/d5F81dLxsR/L8AcJ2Z7TGzAoBPY7UYaF8xs4qZDZ15DeDjAJ6L97qsXBEFUc9cXD0+hT6M\niZkZgG8AOOzuXznL1NcxYX70e0z6VjS3XzOY58xmfhKrM6mvAviPG+TDXqwqDU8DeL6ffgD4NlYf\nH1tYnfP4LIAJrC579jKAHwEY3yA//geAZwE807vYtvbBj9uw+gj7DICnev8+2e8xifjR1zEB8H4A\n/9Db33MA/lOv/ZKOh37hJ0SipD7hJ0SyKPiFSBQFvxCJouAXIlEU/EIkioJfiERR8AuRKAp+IRLl\n/wGq+lwKJ6ukLwAAAABJRU5ErkJggg==\n",
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ }
+ ],
+ "source": [
+ "from torchsample.transforms import RandomAdjustSaturation, ChannelsFirst, ChannelsLast\n",
+ "sat_tform = RandomAdjustSaturation(0.5,0.9)\n",
+ "x_cifar_sat = sat_tform(ChannelsFirst()(x_cifar_bright))\n",
+ "\n",
+ "plt.imshow(ChannelsLast()(x_cifar_sat).numpy())\n",
+ "plt.show()"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "Now the image is a little more saturated. However, you'll notice we had to do a little trick. The *pytorch* and *torchsample* packages assume the tensors are in CHW format - that is, the channels are first. Our CIFAR data was naturally in HWC format, which Matplotlib likes. Therefore, we had to do the `ChannelsFirst` transform then the `ChannelsLast` format to go between the two. We will add the `ChannelsFirst` transform to our pipeline, although it might be best to do that first!\n",
+ "\n",
+ "Let's make our final pipeline for cifar:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 22,
+ "metadata": {
+ "collapsed": true
+ },
+ "outputs": [],
+ "source": [
+ "cifar_compose = Compose([ToTensor(), \n",
+ " TypeCast('float'), \n",
+ " ChannelsFirst(),\n",
+ " RangeNormalize(0,1),\n",
+ " RandomAdjustGamma(0.2,1.8),\n",
+ " AdjustBrightness(0.2),\n",
+ " RandomAdjustSaturation(0.5,0.9)])"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "Again, let's test this on a single example to make sure it works:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 23,
+ "metadata": {
+ "collapsed": false
+ },
+ "outputs": [
+ {
+ "data": {
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAAAP8AAAD8CAYAAAC4nHJkAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAHIRJREFUeJztnVuMXNeVnv9Vl66+kmw2L2peJEoyLUemZUpuyfJY8Sgx\nRlGESWwDgTB+GOjBGM7DxIiByYPgCcbOmxPEHvghMEDHwmgSx2MjtmEhcBLIwiSCM4YsyrqQkmxT\nEkiJF6l5afa9u7qqVh6qGJCt/a8uVndXk97/BxCs3uvsc/bZ56w6VfuvtZa5O4QQ+VHY6AEIITYG\nOb8QmSLnFyJT5PxCZIqcX4hMkfMLkSlyfiEyRc4vRKbI+YXIlNJqOpvZwwC+CaAI4D+5+9ei7QeH\nhnxkZCRpK5crtF+tXku21+v1aGwd2RrhPokh+pFkcCwgGEeDjyPqx+ak0UjPIQC4N6itJ7guxUJw\nbsRUKpV5F+PPoupSldrq9SW+T3Ju0XWuBbZo7ktlfm7hD2nJPVIscvdcWJhPts9Mz2BhYSG66f4/\nHTu/mRUB/EcAfwDgFIDnzewpd3+N9RkZGcFf/OW/Sdq277yVHuvC5KVk+9T0FO1TLPBTq5SL1DY/\nPUlt5WJ6ThvOb9pCeLPzazQ7O0tt0Qe2yanpZPv87DnaZ2kxfSMBwM2791HbUH8vtbEhbt85SrsU\ngzead06fprapS2eprVxLz+P8DL/O5y5yWx091LbjJn5u1Rr3/gI57y1bt9I+r792LNn+1I+fon3e\nd9y2t3w/9wF4w93fcvcqgL8F8JlV7E8I0UVW4/y7Abxzxd+nWm1CiBuAdV/wM7NDZnbEzI7MzKQ/\nkgohus9qnP80gL1X/L2n1XYV7n7Y3cfcfWxwcGgVhxNCrCWrcf7nAew3s1vNrAfAHwFof7VBCLGh\ndLza7+41M/uXAP4XmlLfE+7+atSnUCig0tOftM3NLdB+S9W09FIIVvQ3DW2itqEBvkrtmwepbbC/\nLz2OEl8B9uD9tVjkqsPExAS11Wpcmtu2mJbEZmf4p66ZyYvUtmkTn8dANMH0TFqJmZzkK+nRPG4a\n2kxt6avS5OKZ48n2SoFfl8Fefl+dn+AK0/mz/LpU+gao7abde5LtO7ZuoX3e7E37kQXntZxV6fzu\n/lMAP13NPoQQG4N+4SdEpsj5hcgUOb8QmSLnFyJT5PxCZMqqVvuvldpSDefPp2WlSl8Q5LKYlgEv\nXOTBKgvDPFilMTJMbV5bpDYjoVmVIL6lWOby1czMDLVNXkoHMwHATCCL1htpuakAHvlWJfIgAMzP\n83msBY+Oycm0JLYUBMwNbuLSVjEIkOoLpNv+wbTEdukClxw9iNwr9/BjVZd45OS7509R2/xS+prN\nLPL9zZFr1mi0X4dDT34hMkXOL0SmyPmFyBQ5vxCZIucXIlO6utpfLJUwPJzO4Vfq4Su9g2Txta+f\np33qJ0E4AGAeLDkHqbVYRWO2wg4AXuMrtlGqriiPXFRZuUACO4rGL3V1iSsBc3Nz1LZliAersHFE\nCkcDPFKojii3Ilckegrpc2sEKgzqfBwDZT6PkQpT5IIKZpfS17N3gd87QVawttGTX4hMkfMLkSly\nfiEyRc4vRKbI+YXIFDm/EJnSVamvv38Ad98zlrRVidwBgFZIqgclraJyVwYuzUUVqApEBoxKMUWB\nFsNbeEWW6NzqfPgwEgDjDR6wtHjHndTmwfjLUb44MilR8EtUrqthfBzNmjGEetpWNC7nWYHLgBaU\n0FoKopaCS4ZCIT2WqPwXK2H3y//9f4IjLTtu21sKIX6nkPMLkSlyfiEyRc4vRKbI+YXIFDm/EJmy\nKqnPzE4AmAZQB1Bz97SOd3Uv0soloAKRgAqBLueBlFMocAnFOonqCyLwgopcKAV56QLFER5IbF5M\n2xoNnntuaHOQ0xCBjhmUDSuSMyiV+C0XlpoK9NR6oH3WiSQWXeeIKKKy0/GvJT3BPbWctdD5/5G7\nn1+D/Qghuog+9guRKat1fgfwMzN7wcwOrcWAhBDdYbUf+x9w99NmtgPA02b2a3d/9soNWm8KhwDg\nptFdqzycEGKtWNWT391Pt/4fB/BjAPcltjns7mPuPjY8zH/LLoToLh07v5kNmNnQ5dcAHgJwbK0G\nJoRYX1bzsX8ngB+3JJMSgP/q7v8z7uJokGSXkYTCbJ2KJ7UgqWYES0rJ2oHOzguIz61R41Fs58+n\nhZdtI9tpn4UgUSSLOAOAchThRiS2Tuc+olPZriOiaxZIvp3QiewcSrPL6Nj53f0tAB/ttL8QYmOR\n1CdEpsj5hcgUOb8QmSLnFyJT5PxCZEpXE3jCA4kikFA6kQfXAzaOTqUmtj8gDgI7dfoktf3mt0eT\n7feOfZz2GR+/RG07to9S27Zt6bqLAOgJRAlNOxVvw0i7Dq5N2CewrbXguN73t578QmSKnF+ITJHz\nC5Epcn4hMkXOL0SmdHW13xGscHcY5LLWdDNIJMpBGJWuOnHiLWp78/iraUMQdNLfx0Ot9+zcQ21L\n1SVqKxRJrsZwetc+rx6zRcFY0Q0XBc50qjp0sqq/FvepnvxCZIqcX4hMkfMLkSlyfiEyRc4vRKbI\n+YXIlO4G9iCQKDqIpYgUkvUox7SWfYCVxhgEOgUS244tm9N7q83TPv39vMTTqXfPUNuuvXupbWho\nIG3oMKdhTCSZMkOHz71giI2gbFh0qZl8GEnBBfrcbv++15NfiEyR8wuRKXJ+ITJFzi9Epsj5hcgU\nOb8QmbKi1GdmTwD4QwDj7n6g1bYVwPcB7ANwAsCj7j7R3iGp9hKMIf0eFcon6yC/dSIfRnn6ov0V\ni7xM1p7du6nt9PGXku2Li1zqO/XuOLXd+sF7qO22D91BbTSGMDhnPlMIJcIoL6ARuSwqrRVf5yA6\nL+gVqHZwcryw9FaDjb/9+76dJ/9fA3h4WdvjAJ5x9/0Anmn9LYS4gVjR+d39WQAXlzV/BsCTrddP\nAvjsGo9LCLHOdPqdf6e7n229fhfNir1CiBuIVS/4efPLNf2iYWaHzOyImR25NNHmsoAQYt3p1Pnf\nM7NRAGj9T1eM3P2wu4+5+9iW4eEODyeEWGs6df6nADzWev0YgJ+szXCEEN2iHanvewAeBLDNzE4B\n+AqArwH4gZl9AcBJAI+2czAzQ7GYfr9pNK49weFay3KdEh0rShQZlyjjttFRLvWVKn3J9hdfeZHv\nb++t1HbnHfuprWj89nGiRIXyJrUAkYQVqKIoFtJGK3eWUNOMS4S1epXawog/8gy2NS8AdjUrOr+7\nf56YPr3GYxFCdBH9wk+ITJHzC5Epcn4hMkXOL0SmyPmFyJSuJ/BkRJJYnURgRZJMFBXXzeSe0TiW\nlngiThg/VrVWo7b5pfRcVXrTEiAA9PVWqK0SyGjlYB7rRJorBfMRy6JcKpudnaa28YnlYSlNpqd5\nn8WFBWorlLjUt3s3/5X78PAOamvU0/NYKARSKo3qUwJPIcQKyPmFyBQ5vxCZIucXIlPk/EJkipxf\niEy5bqS+SH4rldLDjPpEiTMjyS4U84iRSZEAcPEiT445ODhIbUNDm/g+Jy5R29nxC8n23n5SOw/A\n3OwMtf3yF/+X2h56eCvf5/xisv306dO0z7lz56jtbFAz8O133uT7HE/vM5L66nUupSKI6tsdJFZ9\n8Pcforb7P/6pZHulJ3DPKCNom+jJL0SmyPmFyBQ5vxCZIucXIlPk/EJkStdX+9kKfUdlrTos/dQI\nVnPLxUB1ILbjbx2nfU6feZfa7vv471FbdSm9Wg4AL72cLskFcCXg9ttHaZ/+ILDnaJD778zZs9R2\n/mJadThx4gTtMzs7R221Gg+CivLqsZyRvb2919wHiAJqgJPBffDUxBS1je5IqwQHDnyM9pmv8rlq\nFz35hcgUOb8QmSLnFyJT5PxCZIqcX4hMkfMLkSntlOt6AsAfAhh39wOttq8C+BMAl6MmvuzuP135\ncB5KetdKPQjQqQcyYG+Fn3Z9bpLaXvv10WT7ybdP0j533/sAtVV6uNw0vcADTyoDXJr75AP/MNm+\nc0c/7TMeBM1ceC+dAw8AXj3GZcDJmfT46/Ug72KQs663f4j3i8qlFdPHK5d5LsGeIKCmaLxfo8El\n5Eof7zczm77nCqUg/+Nc+ljXkmeynSf/XwN4ONH+V+5+sPWvDccXQlxPrOj87v4sAP72L4S4IVnN\nd/4vmtkrZvaEmQ2v2YiEEF2hU+f/FoDbABwEcBbA19mGZnbIzI6Y2ZGJiYkODyeEWGs6cn53f8/d\n696spPBtAPcF2x529zF3Hxse1gcEIa4XOnJ+M7sySuRzAI6tzXCEEN2iHanvewAeBLDNzE4B+AqA\nB83sIJpZ7U4A+NN2DmYwmo8vkig6ieqLbBcu8lxxrxz5e2qbmUyve3744D20z+jNH6C2WoNLOX3l\nLdT28CP/nNoqlpZSq1UuYT79P7hYY4H8NjzMx1gmcurCQpX28QZ/FlV6eLkxr0VRfen7qieQewcG\n+LEiqbIalF/bdzu/D27ed2uyvRGUKFtcTEd9+jVI6Ss6v7t/PtH8nbaPIIS4LtEv/ITIFDm/EJki\n5xciU+T8QmSKnF+ITLluynUhUO2KxbQkFkYwBTLJZFCqadPIDmo7+LH0b5kGhrfTPtNVHunV18un\nv1Hl8lW5zKP6Cpaek/5ymfa5+2OfpLapqQVqe+cdnrDSLC2XlYKouGowV5sGgoSbwc1TKqWfb/39\nfA6ZTAkAE1O8tFl/Ly+x9tGD9Hdw2Llrb7J9Zp4n6RwYSJdfKxBfSW7b9pZCiN8p5PxCZIqcX4hM\nkfMLkSlyfiEyRc4vRKZ0VepruGNhIS0dLQURUayu2mLQp+ZcKrtpdBe1je5K102LmF/kxzISZQcA\n8/VZais0uHxVB5dzFoj8WXAu9Y1sS0tNAHDr7fupbXKCJ/5cIJKjL/G5Khl/FpULfD42D/LkniwZ\nZ0+Fz2GxxI81V+U1FHv6eb6KXXv4PDaIG3qBR0AGynjb6MkvRKbI+YXIFDm/EJki5xciU+T8QmRK\nV1f7zQylIMCEUaulAz4MPLCnt8IDNzzI71er8dX5QjE9XeUgiAgNHghy5uRvqW3qAs+5d/sHDlBb\nefNIsr0IHhhT8B5q2//BD1Hb22+ly5cBwLnFdFDKQB+/LvPzfHW7WuUBRks1vs8dO9PzsXWE5x+M\nym4tBvfOzEKg7MzNU1uN5CAMU1SuwXq/nvxCZIqcX4hMkfMLkSlyfiEyRc4vRKbI+YXIlHbKde0F\n8DcAdqJZnuuwu3/TzLYC+D6AfWiW7HrU3cMyvI1GHXOz6WCWSiDNsXJd5SKXDT2QASNlrlAI3g9J\nRyvxHS4scImnWuO2gS08WKV3MJ2/DQBKLIebc/mqUefBNsNbeU7DA3fdTW0/P3822V4pRvPL5aup\neR7Etf8jd1HbvfeOJduj61wNgnf6T7xBbS889xy1/eyp/0Jt/+Sf/Ytk+213cEl3huSh9OjmXkY7\nT/4agD939zsB3A/gz8zsTgCPA3jG3fcDeKb1txDiBmFF53f3s+7+q9braQCvA9gN4DMAnmxt9iSA\nz67XIIUQa881fec3s30A7gbwHICd7n75s927aH4tEELcILTt/GY2COCHAL7k7lNX2ryZQD/5JdvM\nDpnZETM7cmni0qoGK4RYO9pyfjMro+n433X3H7Wa3zOz0ZZ9FMB4qq+7H3b3MXcf2xLUcxdCdJcV\nnd+aS+3fAfC6u3/jCtNTAB5rvX4MwE/WfnhCiPWinai+TwL4YwBHzeylVtuXAXwNwA/M7AsATgJ4\ndKUduQN1Iiux3H4AUCqlhxnLNTxCjO1vJVuN5Ay0QL7q7+Ofdu766P3U1qjzfTbqPP9cgeTBi+Zq\ncZHLgGZ8Pv7BnQep7eXn07JXf5lHELrxMmq9m3lU4kOPfI7a+vrS/aKckaHsTCI7AeA3R3mUY3We\n398T599LtvsHudT39tl0n+oSv5bLWdH53f3n4PkCP932kYQQ1xX6hZ8QmSLnFyJT5PxCZIqcX4hM\nkfMLkSldTeBZLBaxefPmpI0l6QS4PLi4yKOvImkrOlbUz4jNnfcpoI/aFma5HEmqbgEAKhV+2djw\nGw0e7VUO5LcoieRijfcrlgaT7SM7eJTg+Usnqe3gwXupbXBoG7Ut1dKSXqmHS4dzC/y+2rKF/4q9\n0p++twGgf4BPZJnIkdPz6SSoALDkabk3um+Woye/EJki5xciU+T8QmSKnF+ITJHzC5Epcn4hMqWr\nUh/Ak3GydqApEV5LOxBLdlFEVxQNSAnksHI5LXkBQKnIZcACPzVYgSfcNEt37DiSMZBFq4HNSulx\nLAZzv3nrVmq7eyydiBMA5oKEm060r3JQM9IL/IIObOJj3LyFy5g9QYnKIhlLqYffBKM3bU+2R+e1\nHD35hcgUOb8QmSLnFyJT5PxCZIqcX4hM6fpqP1t9rQeBJ6xPscPgnUhZKPcEOeY8vcperfIAjMlJ\nvro9NMhXjmdmeJrzuYUpahsZTgeelErtrwJfyWKwkj63MENtN+1Or3yPkMAuACj3p0u5AUBPH79V\nG+AKTYOUbWssBTkegxXzRiOQdgq836ag/BpTQEolfn/3EiEgCsRajp78QmSKnF+ITJHzC5Epcn4h\nMkXOL0SmyPmFyJQVpT4z2wvgb9Aswe0ADrv7N83sqwD+BMC51qZfdvefRvtyAFUi21UDqW9hfj7Z\nHgbvBFJfKQgI6u3lud1YAM9iMPZGUD1p4iKX83795ovUtmMXLwG2bSSdz86dy1CXLk1S21KNy2/e\n4CWodt60O9l+ICjx9dprr1Pb0Zd/RW13fPgj1FYkEVKNINldEFOF8fFz1DYywqXboU08wOvSdFq6\nLYPfVyWS968YBCW9bx9tbFMD8Ofu/iszGwLwgpk93bL9lbv/h7aPJoS4bminVt9ZAGdbr6fN7HUA\n6bd1IcQNwzV95zezfQDuBnC5BOsXzewVM3vCzIbXeGxCiHWkbec3s0EAPwTwJXefAvAtALcBOIjm\nJ4Ovk36HzOyImR25NHFxDYYshFgL2nJ+Myuj6fjfdfcfAYC7v+fudXdvAPg2gPtSfd39sLuPufvY\nlmG+ICKE6C4rOr81o2C+A+B1d//GFe2jV2z2OQDH1n54Qoj1op3V/k8C+GMAR83spVbblwF83swO\noqngnQDwpyvvyuEsysq5rFEg+eDC6LwgwiqiRkqDAXyM5WI/7VOqcK3vtaNHqG1ogEuOe0Zvoba5\n+bT8Vgze56NyY/39A9S2MD9BbQMD6ei9eoNfl5tvuZ3aXnjxeWr7xc//nto+cf8nku3lIKlefYnf\nA++8zUuK3bRrlNr6+/k9curMmWT74lxa4gaAQjF9f7AI2BTtrPb/HGmFO9T0hRDXN/qFnxCZIucX\nIlPk/EJkipxfiEyR8wuRKV1N4Fmv1zF9Kf0rv0qlQvsZCY2rB7JclBC0FpSMimAqSl+Jy2HH3+AS\n1dSlt6jtrn0PUlsZPBlksZQ+b1bGC4gjGesNnujywsQ0tW3flk4k6kEdsr7BTdT28U/8HrWdPPk2\ntdUb6XtkoIdfs7k5npD13SCq7+Z9+6ht+/Z0eS0AGDlzNtl+7gL/ReyuXTcn2yP5ezl68guRKXJ+\nITJFzi9Epsj5hcgUOb8QmSLnFyJTuir11ZaWcH48LWsMDXGZZ3x8PNleCGSNLcM8sdCFCxeozQOJ\nsH8gnYRxcCeP2Jqd5/XsKhUuN/X18fnwqF4cMZlxya5W57a5OZ6kc2qGS2K79qbnpB5GF0aRnbyG\n4u23f4DamBw8T5LCAsDCAj/nvTfvo7ZKP0/SOTPHax7u3UeiGYMIvXNEclxaCjLGLkNPfiEyRc4v\nRKbI+YXIFDm/EJki5xciU+T8QmRKV6W+ghn6SOLE+Zl0vTIAGOpPR5016lwaWlrgMtRgH48g7Ovt\nozaWfLTUz8cxeguXoWYuBDLgAI8Cq1kg5zTSktLsHI8QO3M6nUASAPbs5uM/8BFeI69cSUt9HkQX\nBkofakHRwwKJ3AN4PcdSid/6Q0M8avJDH/4wtTnTWQE0Agl5kMiYpSJ/Nhca5F4Mzut9+2h7SyHE\n7xRyfiEyRc4vRKbI+YXIFDm/EJmy4tKgmfUCeBZApbX9f3P3r5jZVgDfB7APzXJdj7o7r9/Uwsn7\nTTEI3GArpUtVHoDRCAIcBgZ4QM1SLcj9R1acZ2cnaZ9Cib+/Dm0dobb5Ra5WNMBXtyvl9DxOT/FA\nFoCvwJfLvKxVJSjlxUqA1WuBUhEt9wdBLtEYWfmqaFU8WpmvO1/RN6IsAACK0fHS17NgfH89Pelr\nFgW7vW/bNrZZBPCP3f2jaJbjftjM7gfwOIBn3H0/gGdafwshbhBWdH5vclmQLrf+OYDPAHiy1f4k\ngM+uywiFEOtCW9/5zazYqtA7DuBpd38OwE53vxyc/y6AdK5mIcR1SVvO7+51dz8IYA+A+8zswDK7\nA+mfv5nZITM7YmZHpqb4d2MhRHe5ptV+d78E4O8APAzgPTMbBYDW/8l0O+5+2N3H3H1s06Z0zXYh\nRPdZ0fnNbLuZbWm97gPwBwB+DeApAI+1NnsMwE/Wa5BCiLWnnSiAUQBPWrPeUwHAD9z9v5vZLwD8\nwMy+AOAkgEdX2pEDqNbS0kutxktoseAMFPjwy2UuX0VyzXyQv61cSu/z5Ju8XNTFi+epbe+eW6jt\njeNcNW0EOfw2bdqabN8THGvHNmoKZa+l+SDYhlyzSIgqBlIZk4iBFcq2EVvUJyodx6RUIA40qwUS\nJw3gCeRNdl14j8RxV9rA3V8BcHei/QKAT1/DsYQQ1xH6hZ8QmSLnFyJT5PxCZIqcX4hMkfMLkSnG\nop7W5WBm59CUBQFgGwCug3UPjeNqNI6rudHGcYu78wSQV9BV57/qwGZH3H1sQw6ucWgcGoc+9guR\nK3J+ITJlI53/8AYe+0o0jqvROK7md3YcG/adXwixsehjvxCZsiHOb2YPm9lvzOwNM9uw3H9mdsLM\njprZS2Z2pIvHfcLMxs3s2BVtW83saTM73vp/eIPG8VUzO92ak5fM7JEujGOvmf2dmb1mZq+a2b9q\ntXd1ToJxdHVOzKzXzH5pZi+3xvFvW+1rOx/u3tV/aKaKfRPAbQB6ALwM4M5uj6M1lhMAtm3AcT8F\n4B4Ax65o+/cAHm+9fhzAv9ugcXwVwL/u8nyMArin9XoIwG8B3NntOQnG0dU5QTPyebD1ugzgOQD3\nr/V8bMST/z4Ab7j7W+5eBfC3aCYDzQZ3fxbA8sqZXU+ISsbRddz9rLv/qvV6GsDrAHajy3MSjKOr\neJN1T5q7Ec6/G8A7V/x9ChswwS0cwM/M7AUzO7RBY7jM9ZQQ9Ytm9krra8G6f/24EjPbh2b+iA1N\nErtsHECX56QbSXNzX/B7wJuJSf8pgD8zs09t9ICAOCFqF/gWml/JDgI4C+Dr3TqwmQ0C+CGAL7n7\nVTXbuzkniXF0fU58FUlz22UjnP80gL1X/L2n1dZ13P106/9xAD9G8yvJRtFWQtT1xt3fa914DQDf\nRpfmxMzKaDrcd939R63mrs9JahwbNSetY19z0tx22Qjnfx7AfjO71cx6APwRmslAu4qZDZjZ0OXX\nAB4CcCzuta5cFwlRL99cLT6HLsyJmRmA7wB43d2/cYWpq3PCxtHtOela0txurWAuW818BM2V1DcB\n/MUGjeE2NJWGlwG82s1xAPgemh8fl9Bc8/gCgBE0y54dB/AzAFs3aBz/GcBRAK+0brbRLozjATQ/\nwr4C4KXWv0e6PSfBOLo6JwDuAvBi63jHAPxlq31N50O/8BMiU3Jf8BMiW+T8QmSKnF+ITJHzC5Ep\ncn4hMkXOL0SmyPmFyBQ5vxCZ8v8AQh3Sx4F2jF0AAAAASUVORK5CYII=\n",
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ },
+ {
+ "data": {
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAAAP8AAAD8CAYAAAC4nHJkAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAGh5JREFUeJzt3WtwnNV5B/D/s7u6WxfLkmxhbMs2DmAcYxNBITiEQEgc\nSiHOtJ6QTMpMaZzOJDQp6QeGzjT0W3ohKTPpJGMKjdOhXCYkxaGkLTghHgZCUIzvXG1sI2PLkmxd\nbF1W0j79sOsZ2T3P0erV7rsy5/+b8Vg6z559z767j1Z6nz3niKqCiMKTKPUAiKg0mPxEgWLyEwWK\nyU8UKCY/UaCY/ESBYvITBYrJTxQoJj9RoFIz6Swi6wE8BCAJ4F9V9bu+29fV12vL/BZnrLyizuw3\nOp52to+Pj3mOZv9cS3p+5GU895mA+9OQ6jmWiOdgYofGxyfsoKfj+Pi4sz0zMWLfXcY+VmVFlRkr\n85xINYZYXlFp9pFE0owNj9jjHx8bMmPJjPv5zEy4zxMApMczZsz3XJeVl5uxjHVCAMB4jaQ89zd0\n+rSzfXBwECPDI56DTbr/fG7kIiJJAP8C4BYAnQBeE5Gtqrrf6tMyvwX/+IN/dsYWrbjZPNZ7XUed\n7d09x80+qYT9oq2ttM/NcLf7WABQlXK/KMYzFWYfqai2Y2KPo6/vlBmD2i+K7t5eZ/vwwJtmn/Gh\nATO2cvmVZqylzj7HGeNcLblkpdknWVVvxnbtN19WONW1y4zVjxxztg+dOmH2OXzijBlLS60ZW7hk\nsRkbGrN/sKFijrO56aKLzC4dv33J2f7Mk8/YxznPTH7tvwbAu6p6UFXTAJ4AcMcM7o+IYjST5F8I\n4P1J33fm2ojoAlD0C34isklEOkSkY6C/v9iHI6I8zST5jwJYNOn7i3Nt51DVzararqrtdfX233RE\nFK+ZJP9rAFaIyFIRKQfwRQBbCzMsIiq2yFf7VXVcRL4B4H+QLfU9qqr7fH0SiRRqqpqcsb5T7tIF\nAIyecV85TiTtq80t8xaYsfmNdj+Zb/920tLgvtIrnjLlBOwr82Vl9tX+zk73VWoASI/aC7AMnHGX\nxAZO2iW2ga4jZqypqdmMVZbZ5bKek13O9uPH7QqNVNhX2Vvmtdoxo7wJAL1vu8dR56m0zKuyz29n\nb48Z63rPHkdljf0aWbzCHWtb6M4VANhV7e4jifzfz2dU51fV5wA8N5P7IKLS4Cf8iALF5CcKFJOf\nKFBMfqJAMfmJAjWjq/3TlU6P4cjhD5yx8jkNZr/ekUFn+7FjB8w+QwvsySo6apcBE6N9Ziw14Z79\nVl1rn8ZktT2ho7fnpBk78YE9wain357FNjbhLouWy6jZZ2TInjE3OOg+9wAwkbBLWye63Y9taNw+\nVw0t9qfDk+X2zMOmefbkqfRcd3m25wP36xAAMr6Ze56JWiNpe4yHPRPG+sfcx+seNrtgcMQ9W3Fi\nGvtw8J2fKFBMfqJAMfmJAsXkJwoUk58oULFe7S8rK0dra5szlqiyl0CqNy6Y1zbYE3Tq6u2JFImM\nfRnVNzEik3FfSU171sBLjNpX2U+etK/2j47ZV9LVGAcAJJPuk5VK2EuNjaTdayQCQP+AvQbDnEZ7\nSStrHH19djVlTOznMy32RKejSffkHQBoMCYLDVfZV+3H1H5c1ZV2v9HT9nOtY/b5PznmTsOE52r/\nqPG+rb6FIc+//7xvSUQfKkx+okAx+YkCxeQnChSTnyhQTH6iQMVa6quvrcYf3ry2YPc3jCvMWLSN\nvOBZcc+O+TbWsotowOKFl9n9PHc66jugUS1Tzwk5M/A5OzhulxVrPGXRj2XcE4zOGBNSAEASZWYs\nnbQftKq9u1Eq7S5VVohdekPSXsdRy+1y5PCwvc3XmKcCl0y6x1JeZR9rZMw9GWvfi+6dfFz4zk8U\nKCY/UaCY/ESBYvITBYrJTxQoJj9RoGZU6hORQwAGka12jatqeyEGlS+7EOKPFZq9St8UY/R19MUi\nSNtVNJTPi/NsFYM9C+/Dy739WrVnC7jzFaLO/ylVtTcwI6JZib/2EwVqpsmvAF4Qkd+LyKZCDIiI\n4jHTX/vXqepREWkB8LyIvKmq2yffIPdDYRMALF5sr9ZDRPGa0Tu/qh7N/X8CwM8BXOO4zWZVbVfV\n9uZme693IopX5OQXkRoRqT37NYDPANhbqIERUXHN5Nf++QB+LiJn7+c/VPW/CzKqIvNNivMpcPUt\nMt9Mwf097q28VjbZC0/6SjW+WY72Eqn2GItxDmfL8xL1dWXxPa5CHCty8qvqQQBXFmAMRFQCLPUR\nBYrJTxQoJj9RoJj8RIFi8hMFKtYFPH18pQur5FHo0spUrONFLTX5Sna+x/b6vk4z9vIrW53ttRu+\nZPZ592272Ld82SVmrG6+GTJLhL7HHFWU145P1Oez0CXHYr+++c5PFCgmP1GgmPxEgWLyEwWKyU8U\nqFlztd8nzqv6cU4S8U2a8dm54xUztv9193ZNibS9TVbdHPuK/lWX2DHflXvrsRXj/PpeH1Ys6rmP\nciyg8JN0CnEe+c5PFCgmP1GgmPxEgWLyEwWKyU8UKCY/UaAuiFJflIk9UUshUcouxVgT0Dv+oVEz\n1NboXlkvke41+zTUf8SM7Xj7fTN22ZxFZmxJhB3AilHSjbN06yt9Rin1+cqRLPURUWRMfqJAMfmJ\nAsXkJwoUk58oUEx+okBNWeoTkUcB3AbghKquyrU1AngSQBuAQwA2quqp4g3TbbZs/RR1xlbU8V9x\n2eVm7H93POts7xo+bfZ560iXGbuk3X5/uO56u9Q3bEZs9rxDvygltrjLxL6yndUvE2EMOo3b5vPO\n/2MA689ruw/ANlVdAWBb7nsiuoBMmfyquh3AyfOa7wCwJff1FgCfL/C4iKjIov7NP19Vj+W+Po7s\njr1EdAGZ8QU/VVV4/tQQkU0i0iEiHd3d3TM9HBEVSNTk7xKRVgDI/X/CuqGqblbVdlVtb25ujng4\nIiq0qMm/FcBdua/vAvBMYYZDRHHJp9T3OIAbATSJSCeA7wD4LoCnRORuAIcBbJzpQOKcfRWnYjyu\nyy9fY8a2VdU621/bucvs07xoiRm7ed11ZqzSjETb2izqufKNI+pCnVH4So6+mPUOXOycmDL5VfVO\nI3RzgcdCRDHiJ/yIAsXkJwoUk58oUEx+okAx+YkCdUEs4FmqvcyKLerMstNpu3DUP+b+sGVZRYXZ\np6bKLog1pOy5dnGW0Xz6PCdy/xH3+ejuNj+XhqHBATOWrBw0Y6tWLzVjbbVzzVghyTRuy3d+okAx\n+YkCxeQnChSTnyhQTH6iQDH5iQJ1QZT6ZnvZzjdj62CnHWtusWPzPHW0w50fmLEjx93rqFZUV5t9\nhk7b5av/fGKLGbvn3nvNWE+fu33vnjfNPocPHTRjB957y44dfN0eR5d7AZnhoSGzTyYzbsYgdl2x\neUGTGbv99j81Y1+60z0ptrrIL3y+8xMFislPFCgmP1GgmPxEgWLyEwXqgrjaX2i+q/NRJqvs2vG2\nGdv/5gEztvFLnzNjvkk/z7/wnBnr63dPSll+SYPZp9Yzsed3v91uxv7qLw+ZseM97okzR9+3yx/p\nUfuZUfVtXmVvUiUJdyyVsi+lJ3xviRl7HCfet6swj2/5kRm7rO1KZ/u6dZd6BjJzfOcnChSTnyhQ\nTH6iQDH5iQLF5CcKFJOfKFD5bNf1KIDbAJxQ1VW5tgcAfBXA2VkT96uqXX8qgWFPrCriff7uhaed\n7bv27jX7fPYLfx5pHMft6hXK59rr8d1w82ed7cva7PvrPPiuGevp7Ddj+/Z0mLHTI+6ynXoel4hd\nfksl7U25kp6F6yThLpqmUnansjL7PTHlmWbmmxBUVmmXCHv73jcidqlvwDiP01nvMp93/h8DWO9o\n/76qrsn9m1WJT0RTmzL5VXU7gJMxjIWIYjSTv/nvEZHdIvKoiMSzLjERFUzU5P8hgGUA1gA4BuBB\n64YisklEOkSko7vbvbACEcUvUvKrapeqTmj2A9cPA7jGc9vNqtququ3Nzc1Rx0lEBRYp+UWkddK3\nGwDYl7uJaFbKp9T3OIAbATSJSCeA7wC4UUTWIDud6hCArxVxjJH4ymiH7CXr8Ouf/psZGzjhLsms\nu/UOs8/ixQs9I7Et8JSv7v36JjNWZ7SfgXttPwD40YPmX22QpL2tVX39HDOWrHCvkZcetYtRmrHL\naOUpzzM6bt9nMumOlZXbJ3hOjT3LcXzC7pces0t9bZeuMGOr268zY5ZB4zWcmUatb8rkV9U7Hc2P\n5H8IIpqN+Ak/okAx+YkCxeQnChSTnyhQTH6iQAW5gOcH3fZMtYbF9kyq9V/4srO9pd4uDbmX08yy\nynKAf5FRXz9LDexPYK+//c/MWG/vZjP23ns7zVgi4X5pDXveb8bG7Cl/dVX2SzWFMjNWZlQPa2rs\n+yurssfY3W/PF62ssp+ZT960wYwtXVDjbPfNTF1gHMp6vC585ycKFJOfKFBMfqJAMfmJAsXkJwoU\nk58oULOm1GctSAgAdcZEqtOe+zvjia1aVm/GVi/7uKen2zFPLOpPV7t45S8BWaVF3x6Ei5ctM2Or\n1ppLNaCv5y0zNgr3DDdJ20XMMc/imBXGnnsA0Fhjzy6sLHc/AxWeSYKpcvtYA57xJ+e0mLEVH3Uv\nrOrjm6A3jYqeie/8RIFi8hMFislPFCgmP1GgmPxEgZo1V/utK/qAfdXTd8Vzvifmu4rqqxJYV+Cj\nTLQBgP37XjRjp96zlzm/5rovmLGqee6zYm92BZR7TuR1n/i0GXtn1y/NWNewe5G52kr7JTc0bD8z\no+kRM5ausGsjixa2OttbL7InOk1MjJqx/mSXGTvpGf/gKU9tapG7WlGIK/o+fOcnChSTnyhQTH6i\nQDH5iQLF5CcKFJOfKFD5bNe1CMBPkK2eKYDNqvqQiDQCeBJAG7Jbdm1UVXtPKGRLbH1GNaTBU9ew\nQr4tuXx8JZQoZTvfOPpg7w02NNZjxmoXzrNjRjkP8E/giWKptVgcgE/cdIsZ2/rYw872moTn/Ubt\n2Em70oeV19nbXW3Y8EfO9lTKPodDQ/ZzVrHjZTP24i9/ZcaeeugvzFjVNx5wtq9de4nZx8qjaezW\nldc7/ziAb6vqSgDXAvi6iKwEcB+Abaq6AsC23PdEdIGYMvlV9Ziq7sh9PQjgDQALAdwBYEvuZlsA\nfL5YgySiwpvW3/wi0gZgLYBXAcxX1bNT2Y/D/6E6Ippl8k5+EZkD4GkA31LVc9aMUFVF9nqAq98m\nEekQkY6ebvsjq0QUr7ySX0TKkE38x1T1Z7nmLhFpzcVbATg3clfVzararqrtTc3NhRgzERXAlMkv\nIgLgEQBvqOr3JoW2Argr9/VdAJ4p/PCIqFjymdV3PYCvANgjImf3Z7ofwHcBPCUidwM4DGDjVHek\nCoyNuWOnPGv4WRPBfCU23zp3UUuEUTSg1ozdtOaPYxxJ4d10k/2Ub3/2F872uSm7GDku9my6VEOD\nGfvGvfebsULPjNMy+9Wzc/srZix9xp7V13VovzvgKfW9+saQs/3McMbsc74pk19VXwJgTbi9Oe8j\nEdGswk/4EQWKyU8UKCY/UaCY/ESBYvITBSrWBTxTAjT7VpI0WDOVfNt1+Wa3zZYy4IdZIuX+tPeC\nxReZfQ73HjJjn7hxgxkr9kKXk1100WozVl5rb9dVU2uvUJtqcJeD7aVCgUGtdrZPTOP9nO/8RIFi\n8hMFislPFCgmP1GgmPxEgWLyEwVq1uzV52OVctw7nE3Nt8ihrwwYxYe5dDjgmYmpxgaAp0fTZp+5\nLfZiULdt/FTe45op3+ujxlOqbmxabsaqK4zprACSVe5XSY1nHFesdLcbd+XEd36iQDH5iQLF5CcK\nFJOfKFBMfqJAXRBX+62rr1EndPj6Rbk6P+CJDdkXtzHPM/vIdyW9d9Be625pXYVnNIXVe9re1mrJ\npa3O9qXNTWaf5FH7cfkelu/qvHX6fa+BqFueZZL29fl5C+za1OCo+3H7qlnWjm3TSWi+8xMFislP\nFCgmP1GgmPxEgWLyEwWKyU8UqCkrAyKyCMBPkN2CWwFsVtWHROQBAF8FcHbr3ftV9bmoAznli424\n21Oees2QXTVCZZkda4xQKTvpC3rGkbYrZdj22otmbMlH683Y0rq1vtE4+cqKvlKlTDj3ZgUALFux\nytl+w7WfNvuUvfy6GfvF878xY+tu+aQZs+bh2NNs/KW+d48bL0YAF19sr0/Y2GxvN3a02z6PFmu1\nwOmU+vK57TiAb6vqDhGpBfB7EXk+F/u+qv7TNI5HRLNEPnv1HQNwLPf1oIi8AWBhsQdGRMU1rb/5\nRaQNwFoAr+aa7hGR3SLyqIjMLfDYiKiI8k5+EZkD4GkA31LVAQA/BLAMwBpkfzN40Oi3SUQ6RKSj\nu7vbdRMiKoG8kl9EypBN/MdU9WcAoKpdqjqhqhkADwO4xtVXVTeraruqtjc3Nxdq3EQ0Q1Mmv4gI\ngEcAvKGq35vUPnnmxgYAews/PCIqlnyu9l8P4CsA9ojIzlzb/QDuFJE1yJb/DgH4Wj4HtGZgeSpK\nSETY4stXzvPxbQE2brT7Zl/N8QR/9axdGZ3fZM8Q+4OFdjnPmmHoO4WZjB1b4Cl9Hui359PNnbvI\nc0S3te324/rpM8+asSe2bDVjX/7y7c72Gs8r3/da3PP6a2Zs+eWXmrGGufYlsX3/9da0xxF15uFk\n+VztfwmAa6OxyDV9Iio9fsKPKFBMfqJAMfmJAsXkJwoUk58oULEu4DmaAY6cccdqPXsTpYwpWGOe\nWkjas6rj2LBVtPPLqPtnZTJp/wzd8fKTZqy/e5sZ+/TV3zRjvjJPlBKQsbPWlN7r7Ddjy9uM/aQ8\nqjyD/8qf3GbGXt7XacZGjad6rueV32eH8M6hI2Zs9dXOz7kBAJY32TXTBcsuc7a/dcSe6/rRxTP/\nND3f+YkCxeQnChSTnyhQTH6iQDH5iQLF5CcKVKylvvTICI68457B1Nzs3tsNAA4ceMfZnhL7Z9eC\niy82Y0cOHzZjGLfLgHWN7vUIFqxYbvY5ebrHjFVXN5qx+vrFZixOvZ5yalevvQLppVd5arcF9vEr\n7Ofa4ttfsd+uYOLy1Vebscp5djnPt0TnFVdf72wXz2zLd46720d9K5Oeh+/8RIFi8hMFislPFCgm\nP1GgmPxEgWLyEwUq1lJfKiFoNKZunek5avZbUF/tbJ/wTN0bH7BLbC21dkmmoXa+GZuAu/ZSVmt2\nQdtV9j5ygwftUll5lX2nngmLpm41akMA9u/ab8ZWr7rJjH32FjtWl9+wzuF7XL5YlJmMvkJkjb0V\nIlqv/4gZ843RV4FrMSbo+cZoPeaKaSxcy3d+okAx+YkCxeQnChSTnyhQTH6iQE15tV9EKgFsB1CR\nu/1PVfU7ItII4EkAbchu17VRVe1FxwAoEpgwrlMmy+3LlBMT7sk2Q+P21fKJ0VEz1thoT6gZsrth\nJOMOlg8fMvskKu2fr/VLlpkx3zpyvm2c5hnt3R8Me3pVmRHftlZ2r9kv4rKFkfvNxnOVzzv/KICb\nVPVKZLfjXi8i1wK4D8A2VV0BYFvueyK6QEyZ/Jp1dv/Kstw/BXAHgC259i0APl+UERJRUeT1N7+I\nJHM79J4A8Lyqvgpgvqoey93kOAD70zFENOvklfyqOqGqawBcDOAaEVl1XlyR/W3g/xGRTSLSISId\nfad6ZzxgIiqMaV3tV9U+AL8GsB5Al4i0AkDuf+diJaq6WVXbVbW9Ya51OYqI4jZl8otIs4g05L6u\nAnALgDcBbAVwV+5mdwF4pliDJKLCy2diTyuALSKSRPaHxVOq+qyIvALgKRG5G8BhABunuqOMAqfT\n7mJJetQuRSVT7j6ZlHvCDwCUV9mlwxG1Y/1n7PJhlbGv1e6X9pl9jh09ZMauWHmVGXv5NwfN2PiE\nPZWlucVdPvzYqqVmn5UL7ZivrOibyBKlJObrE7XEZo3RV/icE/FYF5opk19VdwNY62jvBXBzMQZF\nRMXHT/gRBYrJTxQoJj9RoJj8RIFi8hMFSrIfzovpYCLdyJYFAaAJgL3QXnw4jnNxHOe60MaxRFXd\n+8qdJ9bkP+fAIh2q2l6Sg3McHAfHwV/7iULF5CcKVCmTf3MJjz0Zx3EujuNcH9pxlOxvfiIqLf7a\nTxSokiS/iKwXkbdE5F0RKdnafyJySET2iMhOEemI8biPisgJEdk7qa1RRJ4XkXdy/xubOBV9HA+I\nyNHcOdkpIrfGMI5FIvJrEdkvIvtE5Ju59ljPiWccsZ4TEakUkd+JyK7cOP4u117Y86Gqsf5Ddnbm\nAQDLkN1ybBeAlXGPIzeWQwCaSnDcGwBcBWDvpLZ/AHBf7uv7APx9icbxAIC/jvl8tAK4Kvd1LYC3\nAayM+5x4xhHrOQEgAObkvi4D8CqAawt9Pkrxzn8NgHdV9aCqpgE8gexioMFQ1e0ATp7XHPuCqMY4\nYqeqx1R1R+7rQQBvAFiImM+JZxyx0qyiL5pbiuRfCOD9Sd93ogQnOEcBvCAivxeRTSUaw1mzaUHU\ne0Rkd+7PgqL/+TGZiLQhu35ESReJPW8cQMznJI5Fc0O/4LdOswuTfg7A10XkhlIPCPAviBqDHyL7\nJ9kaAMcAPBjXgUVkDoCnAXxLVQcmx+I8J45xxH5OdAaL5uarFMl/FMCiSd9fnGuLnaoezf1/AsDP\nkf2TpFTyWhC12FS1K/fCywB4GDGdExEpQzbhHlPVn+WaYz8nrnGU6pzkjj3tRXPzVYrkfw3AChFZ\nKiLlAL6I7GKgsRKRGhGpPfs1gM8A2OvvVVSzYkHUsy+unA2I4ZyIiAB4BMAbqvq9SaFYz4k1jrjP\nSWyL5sZ1BfO8q5m3Insl9QCAvynRGJYhW2nYBWBfnOMA8Diyvz6OIXvN425kt9nbBuAdAC8AaCzR\nOP4dwB4Au3MvttYYxrEO2V9hdwPYmft3a9znxDOOWM8JgNUAXs8dby+Av821F/R88BN+RIEK/YIf\nUbCY/ESBYvITBYrJTxQoJj9RoJj8RIFi8hMFislPFKj/A+O/zNBr1JAeAAAAAElFTkSuQmCC\n",
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ }
+ ],
+ "source": [
+ "x_cifar_example = x_train_cifar[20]\n",
+ "x_cifar_tformed = cifar_compose(x_cifar_example)\n",
+ "\n",
+ "plt.imshow(x_cifar_example)\n",
+ "plt.show()\n",
+ "plt.imshow(ChannelsLast()(x_cifar_tformed).numpy())\n",
+ "plt.show()"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "So Awesome!\n",
+ "\n",
+ "We will skip transforms for the data saved to random image files, because the point of that data is to show how to make a custom `Dataset` which will be in the next section."
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## Transforms for the Segmentation Data\n",
+ "For the 3D brain images, we had a brain image and its segmentation. I will quickly show now how you can perform the same transform on both input and target images. It's pretty simple."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 24,
+ "metadata": {
+ "collapsed": false
+ },
+ "outputs": [
+ {
+ "data": {
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAAATgAAAD8CAYAAADjcbh8AAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzsvUtsbNud3vdtFt9k8c1zzj1XR766LbVe1qMRSTHcPXBg\nOEiCIIYnDXeA2IiNyIPEycADtz1IghgGjMCxJwGCKIgRZ+DXIIYNw7BhG3E6baTT6larbyRZfXWf\n533IQ7LIYhXf3Bnw/FZ9+8+1q4rkvTJ1UX+AIFm1H2uvvda3vv9zFWVZaiQjGclIPoky9m+6ASMZ\nyUhG8nHJCOBGMpKRfGJlBHAjGclIPrEyAriRjGQkn1gZAdxIRjKST6yMAG4kIxnJJ1Y+NoAriuLf\nK4ri94qieKcoil/9uO4zkpGMZCR1UnwccXBFUTQkvS3pj0h6LOm7kn6lLMsffeQ3G8lIRjKSGvm4\nGNy3JL1TluV7ZVkeS/o7kv7ox3SvkYxkJCPJyvjHdN3XJT2y/x9L+rfrDi6K4mNLpyiK4qd63m2V\nsiz7PtOg729yX+miP0dZM9cXfz+jfpTKshxqsH5cADdQiqL4tqRv2/+V3/0+98+GOS+em/us7vvc\npK8DgtznPsFzn/e7RgSH+Lvu/Nx38fy6dl5XBgFYboLGc2Jf1V2vHxj7OXXPeV2guClYX/ecYRan\nOun3rLmxWfcOYhv6jcNhxmlsx1XeycHBwcBjkI8L4J5IemD/f+rVZ0nKsvyOpO9I9Qyu30u97neD\nzongd5XrlmWpsbGxgZNs2DbWgeIwEz8HJsP0WRzIDNJ4Tx+8H8UEjG2IE2TYd9oPxGO7rwpU/QC6\n33VpT26hcqkDizimYhuGbXvuHQ5z/9yxg/qZ69Xdo994q2vTdRaWjwvgvivpc0VRfEYXwPbHJf3H\nH9O9hpKbAE3unDoQuwlj63dsHFCD7j9s2+N3Ebjqzr3KwOvHwnPtvioY51hgTga1edh+7Mf+c+2q\nO77f98O8x2GkDlDrGPVVpB+LHjTfhgW267Jl5GMBuLIsT4ui+C8k/VNJDUl/oyzLH34c9xrJSEYy\nkjr5WMJErtyIoigHrfLx75xKmVs1+D53fu76Y2Njlz6L//djcH2eUVL/1X2Yd1HH4Pqprf3amGNB\ndSxi0H28T4dlBYNscDnm0e86wzKgYdpY1ydXuUe8Xt25g56zH2vt1+5Bbek3jj8ObLiqTS53brfb\nVXnbnQxXkX52AJecvWkQuA1Sh+L1GRSD7A+5c2Pb62xbVz1+WPVwWBtL7rMIYHXCAjGM2jiMKhP7\nA3tUVGHqxkY/4B70LP2+v87k76eS1bVt2HE2yHRR991Vrh3lpn3ANYYlDFdVoaWfEYC7qfQbJMMY\nPesGz1VeRN2EzK3aw7CRq9pt6pjSoOsMw6j7AUzumByD7feM8dzcItPPnhbbUXftumNz5+WAddjF\nqp8MAuHc/3XjZhiG1o8ZDlqE6/73tgySQUB7UxZ5qwDuqgg9yFDZ7/O6gV3HHK7TzkFA0a9dg9jd\nsEbYHJgM25Z+zPcqMiwbzbU5946dTeaYpfdhP6ZUFEXFJNGv3VeZ6Fdl2f1kEPPOtTXXX3XXrjvu\nOu+5X/vqpE7l9vc3LOPOya0CuJz0e6ib0Ou67+MEGKSm5I6pm3RcfxjbTz9mEI+texb/PHfPOgaX\n+78fwPX7LNf28/PzS+2PAOL/D1Jx65jHoLETzx20qA1aWPq9135Mqa59ufv3O64O9IYdS4Pe7aCF\n6ioAFN91vE/dNYdV2ZFbD3BI3YscxMLqQCgyACR3/FWYYe7+/ne/ydrvmnXH5Fa5qHpEljKIycXP\ncyxnbGxsqD7Ktb8sS52fn2tsbKwW7Px3PCaC4rCqIt/lwC1+x3lXBYRhgavu3H4Tut/imAPnugU2\nJ4PYdM4cMGw/1V23rh2DFpqryKhc0khGMpJPrNxaBncV1fIqamwdk7uKzSBepx+dHtTGYen9MMwg\n9ywwrZw6lzunjr0NYnG59nOOMzDk/Pxc5+fnGh8fv/T9+fl5auf5+bkajcYlpsU5/Ha256wjx+Zi\nKFAdI/bv+D/HOOO1cveNzGTY8daPreeOGeY6sV1138fxO0hbiufelNnltJGrMrhbA3D9JsogOj/I\nXtYP5Pqpo1cdhLkB4erUsCpDTgXod+86gIrf1z2rfz42NnYJ1Py3gwMA5+c6ANQBnAOAA1Tus7Oz\ns0vPHMEtnnN+fl757ccM06exb7hufMY4gXOmAv9u2Hv2m8g51bqf6uptrwPHQecOow7XPcsgVbSf\nvXOY9g2SWwNww8hVHjC38l3l3EG2jkH3HWSAHrRiXqWtDjA+Ef0+dWwL0OJnfHxcjUajcn3ObTQa\nlwCuzgvpQJCztdHHZ2dnCdwcuAAnAI7n8v/9nGjf8991tr1+Np7cex/WJhSZxzDneHv6jbF+NuXc\ndWI7+oFo7vlycY397pk7Zph+jgy97tirzpFbDXDXQey669QBXhxUdf+7+EvPdXqOtV3l5QwDonUs\nFNCJAwigGhsbSwBWFEUCNMBrfHy8Ao58J6kCcNxjfHy8cv06II9g5H10dnams7OzSwDHZ3zvx+dA\n0M/lb//taqYfl5tc8X15gPGgiR3Z8jDA2o9l+XWHkTotpM5kctV5VqexuNSN/WEXimEBeJDcaoBz\nqVO9rnstp/qDrhUHSb9JnGtrv2fopzLEzwCR3LUAnPh9o9GoAJUD2Pj4eOWH4/kbMOR7/258fDz1\nmwNmDtwd3KTLamlZljo9PU2fn5ycSFICNgc4AO/09DSdc3p6mr6P4Bfv7feNAOmqZ530A4ayLC95\nl71f4rvtBxDDsKNhxMfksIxrmPbx2bDqY+y3fipqv2t8ohicdLWOu8p3DhhXucZ17zdIeHl1A9L/\nj387qMGqnGmNj49rcnIyfT45OSlJmpycrIDY2NiYJiYmNDExIUnp7/HxcU1MTFwCvzhQ6+w1rjJK\nVYBxJoU4c3Mg49yzszMdHR1VvgcU+Z8f79OoJgNsALS3ZxhWzvPFdzRIJcs5XfpJTvuos1HlxtAg\nO1e/z/p9HtuW+7yO8Q7SbD4q7e1WAdwwgDTM58OAkE/OYc/174dZSepod05VqQM2v1b84XPAzNkW\nQARATU5OqtFoaHJy8hKANRoNTU9Pa3JyMv1IqoAeTDD3bBFg49/RFuYqKOzNAdC/c0bGd8fHx+n7\nk5OTBGaSEtgdHR3p+PhYx8fH6Tv6mXNz7ycywNgmf/Z+drY68ffspo7IluoAtu6+UZuI39W1te46\nceHKXa+uff1Ybt2c6CeDmGQ/GcXBjWQkI/nEyq1icNdZET+u+/Zb7epWr9zqlLveICaaU4Pc0xnZ\nE2ooP87CnKFNTExoenpa09PTki5U1KmpqfTb7XTc1+1sOQ9qZJx+TM5zimBXgw04C4M9YVvz66Cu\nwqhyDA4WB5M7OjqSpMqxp6enajQaFXU0ttGZ4+npqcbGxirqcp2tLX4X1fec+cElp9YN0jxiO3LX\ni8wrZxeL39WN+auw2H7HDNJy+rVhGLlVADcIBAadc937XeUaw9oL6mwPdWCYU03djkawK2CD3Qib\nGjY27GWA2PT0tKampjQzM6PZ2dmkikqqnOPOCB9Q3Dd6WDmubtC5AwG7kz8fAb4+8WLM28nJSQK4\n4+Pj9DmOBcDGnRCAHmqs2+cAO4AOAHRgdcBrNBqVNtHvUW2uk9y77qdC9lto61TYYcbuMIvroPEZ\nP3PQjG0a1MZBi3k/QLvqfL81BS/rchv7fRbtUX5cv9WyzoaVaZeky/ax3Ip6jWfODgJ+5xgTn09M\nTFwCKVjY1NSUpqenNTs7K0manZ3V3Nxc+h5GJ6kCbh5C4m0CyOinXF+cnZ2l9rmdChbGNQESn0zk\npMaBfXZ2ltpZF0JycnJyyVEB+MHg3MMKuAF8AB7gyd/YBDnO2+R/R8eJg2POc9tPhvm+3zERNKSb\nMZ+cE+OjkJu0Bdnb29P5+flQSHerGNywKmo/Cu/SDxxhJf3ofN29+/3v96uj9XXnutoX2+YqKCAm\n9ZwIMzMzmpubS0xtfn5eUo/BAWQcL/XAMqf68j2fuaHe209/RUeBA0COHfjncSJyXYAXEKQfyrJM\ngM09aB9gCLABgjwD3tfDw0PNzMwkEJSk4+PjCugdHh5WQBb2dnZ2ltoRY/C8D3iuGG93HdAYBFbD\nMrlh75tTX+tY11WeZdg2XIWh9pNbBXB19Dz3d2RhN7lXpNb92uHH5NSKq9gm/F4ObM7W+O0MDNCS\nlNja3Nyc5ubmNDs7q8nJSc3MzEiqMjw8qVH19ZAP7wsPw4ixYrAmAMKZFecCTnzmz4X9rU4mJycv\nsUH/e2JiIp3v2RcxADgGCcPQZmdnU7gJDO709DQBG7/dfhftfeTJ8jf3jGwX6ZfH6ran3OJ9VRAZ\n9rh+oOumhX5q7lWZ4pXVzGuCqXTLAO6mMiz7qwvGzP2+ioE0Hs+9/DvOi2AW7wsoSaqEb8zMzCSW\nJklzc3Oan5/X7OysZmdnk/rqNjhALIZ7AECwDia/g5jbqWBEfIfaeXx8XAEyvvdwDGc2btejHd6X\nExMTmpqaqvRFNCnAqLhODFXBWRIdBUVRaGJiQjMzM+mZHeCOjo40PT2t4+Pj9OxSz37namy8tj9r\ndI7QNp+gzlhz465ujOXG0rDMbtD/8R5X0ahyQFQHfoNAa9DcGlauHSZSFMWDoij+z6IoflQUxQ+L\novivXn3+3xZF8aQoiu+/+vkPrnuPkYxkJCO5idyEwZ1K+nNlWX6vKIqmpN8uiuKfvfrur5dl+Vdv\n3rz+ch1E70e1/ZqDrt3P45W7dlzhYFK51Co8oaiheEFRR+fn5zU3Nyepx+DcwTA9PV2xs3F97G+s\nmB5ScXh4mAzwMBoPlHXVTeoZ8535+UrsjK6OjXjbnFliFyyK4lLgMoHK2A+5BsHLbr7wPFvuh3qM\nOo79kjbjiIDB8bzdbjf9f3h4mNRX1NXx8fFK6ImzqpzX1VXYXE4sMgy7GaS2DaPi1tnVBrHDuvbF\n73IaUb829WODV5FrA1xZls8kPXv1d7soin8t6fXrXm8YGQQauc+vC4A5VeKqAyvnnXXVNHpJCcMA\npNzOhmq6tLSk+fn55ERwm9vMzEya9D6ZyFhw76MkHRwcVGxNR0dHafJKPbUM1Y3jJVVi1NzrmBvU\nTG6vUhL709VQz3ONKWKAP7bIRqORFgCpt0Cgxvp1sWV6m7mudAFEACXqKX01NzeXFoHj42N1u10d\nHBykvnJV3j3HXBd7ZnRE8O4BXL7LqarRWZOTOjV4kMSxnSuc4DLouzrVuY4E1IG7mzKuan+TPiIb\nXFEUb0j6BUn/r6RflPRni6L4E5J+Sxcsb+cK16r89r9znVrHtHJ2jUH3rLvGMPa4Qdfk3AhsHrLh\nDgS3s83MzGh+fl4LCwvJmQDAuW1ucnLyko0R+5unLx0cHEhSmqTdblf7+/vqdrvqdrvJ7uTg58xO\n0iXGFmPYhrG5+Ge5MCEPXXEWhp0R8HKbI84YAps5Tup5nGGMsR3eBmygPJez3OPjY83Ozqb+oS99\ngXDHjD8DnxdFUbF1up0yLox1TCgnuX4fNG/qwGUY9jZII6prYw5Q69p/E7lxHFxRFPOS/i9Jf7ks\ny/+jKIq7kl5KKiX9JUmvlWX5pzLnfVvSt1/9+2/FWKhh1D0/Nne8Hxe/rzs3d7+67+rYY782e0iG\nq14E4KKGAmqSErgtLCxc8pIyiUmcl6psCO+fg9nu7q4kqdPpqNvtqt1ua29vLzE0WMvh4WFiZkgd\nO3AHAt8xWc/PzytMie/rFqGc584BzlXQyPDoCwc4Bz/3RnsWCNfGcRLVR8+AAOTOz88TwKHOHx0d\npb5mIXFm5+pqLsDYWZz3cb+4ujqgGoYYXIUN1sl1HAqDgLru+7Is1Wq1ho6DuxHAFUUxIekfSfqn\nZVn+tcz3b0j6R2VZ/v4B1yndHmOfZ3/zdw7IwnUl5dnBsGCau55fo98qFv/2bAGv1CH11FBnaAsL\nC5KkZrOZQkDm5+ezNilsa7EEEMys3W6r1Wppd3dX7XZbktRut9XpdHRwcFCZtB7zxvWi2uuZFZGJ\n+bP7O/C+yIGjBwv7BKzLGKA/Ywko71viBiUlO+XMzExieTGtzZ8pqoXet4CZl3ZiMUGF7XQ6kpSY\nMbZKr3QiVdV9tw9yX+8TZFigiMcMUgv7qaO5Y/sBZR1LzM2bHGDXnbezs/PxB/oWF634XyX9awe3\noihee2Wfk6Q/JukH171Hn3tn//aXHidYHBB1dD3+Hwd57mXGQREniXQ5Z9TZhKSK86DZbCZbm6RK\nfBuT1G1SgA8T5/j4OE2u3d1dbW1taXt7W61WS/v7+4lZ4DTARhTZA+12duZ5qgCfh3tE9Qo2mWNw\nMNo4eT0EJccgc+yKz46OjipMGcCTegCHKWB+fj4tLPH76ASCjdIfMEMv04RqenJyosnJyWRigNHB\n7jqdTiWezgOIY/6tM2FneP2AahjQ8+P82JzNL455/u9nBrqq+tqvzVe5zqVzr8vgiqL4JUn/t6T/\nTxIz4i9K+hVJX9eFivqBpD9jgFd3rSsxuNzvq6qoDJi6a+USquPfOQbj37vtyFVSwI0AXUnJcbC4\nuJhsbR7rhkE9JtPz/lBDUTe3t7clSVtbW2q329rd3dX+/v6lwNbcyunR+65OO5h5BeAck3LVT9Il\ngHOJwBrToxzk3LHh7ef7WAOOttAGLxvljE66sHU2m81kz6QAAdfwoF7+p63O3jwfVqrG0AFw0UHh\n6q+niMVAZc8A8b4bFtSQOvZVRxiuYhurA8lB1xxWtre3P34GV5blr0vK3eQfX/eaIxnJSEbyUcqt\nzWSoo77DnOP/19nl+l07rmx118itSpwX8zudvXl6FWrowsKCFhcXtbi4qPn5+YqKg0qK4dztYbAF\nbGw7Ozt6+fKlNjc3JVXtbJH1+DPQVmdp09PTiZnBfFw1jixncnKy0jewuzpbHaEj0QPLMzljcUaD\nLcvthvSHp4652if1MjOcXTsbxnlDTq+zaOx1nAfLp6+whU5PT6c2uvqKx5X3juda6nlgybQgzES6\ncF5wrzqHQ7+xOoglDWu7y12733mRRd/E1n8TuVUAV2fgrFMD4zk5G1zdOf2u249W92uTq3jRQwfA\nEf6BVxRHAn8zuaKdDaM4g53J0+l0tLu7q729PW1sbOjly5d6+fKl9vb2JPU8eA4C0S4Y7VVudOez\nmOTPZMcbCfhFU4N7Pfu9C5+0PKN7HB3QAQzUwVw+qYOgOwK4LrF9h4eHFYdNp9NRu91OYAfAYR/1\nGMWYIoZN0lPjGC8nJycpBYxroBrjzaYt3ubx8fH0LHXe7DqJdrB4XhzPEZSGlRwADlKZr3Of66i1\ntwrgcjIMsPX7bBBAXqcNg8CTAR5jsGAFgNni4qKWlpYk9TylsDQ3jlPuCGZzeHiYVv+dnR3t7u5q\nY2NDz5490/b2diWWzSVXoYSMCZilMxoM7vz253FAc5BD3EgPgPm9PQbMjee0k+9ieXEYqJdK8nxS\nBzwHMakXjEv4iztlpAu2ND4+roODg0tlqQA793Lzvnhe2ux9wmeMh7Ozs2T7cxurh/HAuHneiYmJ\nSh07d0K4g2YYAIig5p/V2cxyYNXPCdHvvn6duntc1Z7YT249wCG51WgYcbWR//tdu59nKHeei1fn\nQHUDpDBgz8/Pa3V1VYuLi2o2m4nBwdxclfXwBmKu9vf3kzdUunAivHjxQs+fP9fe3l5SjXy19x2x\nPDSFSebGdk/zco8iQOcGe57VY99iv0vVyrjerw5szuL43mvCcW0vl8Q5sYpJjFfzwGX3aKJKAoAe\nwiH1UrukC5Y1Pj6eiobOz89XPN187qEmHrvn4Te8a9p1cHCg6enp9NvT9AgcbjQaCchRy+mrWOig\nrp9jhZPcnLouSbgpEcm1ua4dV5GfGYC7CQPr9yKHWSXqXlJ88W7XiZV1iWtbWVnR8vJyArtms5nO\n5XhYFG3D49ZqtbS3t6fd3V09f/5ckvTixQttb2+n0IPIgrCFuVqEasRnqFwwOUDZK/56cUyu72pN\nLnyBicdk99AHZ2x1oQ8xaDj2u7/bGFYBoHi9N9iag5x7M2F+7tV07+zY2JiOjo40OTmpVqul7e3t\nSqziwsJCJS+YfgSUsa+dn59XFpoY4D01NZXatL+/r06nUynhBKOTelWPi6K4lPvrYzWyrEELfTxu\nEBj6O6i7fu5+wx57XfmZAbh+ct0O6udkQOrySfntdhhncIAKALa0tKSVlZW06gNwrNSs2qzgRVEk\n1andbidVdHt7W8+fP9ezZ8/Sd8RfIR7cCoDBMNz24/+7KuoJ7DgOeNYoqJ65HMtoFHcQor0eZ5cL\nf6h7NxEMPdUHQKX9HnZCLTiY3eHh4aXUNYAuBvI6aBZFoYODgxQ0jQpLmA/vV+oxYY9ZdNWShQQG\nPT09nQBudnZWe3t7qW2Tk5M6PDys2FFhdIDcMMn7uXp1uWOH1WZyxwxiXL5A9gPPm6iqo121RjKS\nkXxi5dYxuOvo+bnjIn2OK9GgEBCkzj7Hsc7g3Gg/NTWV7G2StLy8rKWlJS0sLCQbjnslZ2dnk8pS\nlmWFHWxtbanVamljY0OPHz/W5uZmYh0Y22kb6m3MgnB1lHvCGPCIEkyMauWhLrC0HBsjHCPa2qJt\nyFVSb3Md88s5J/wd5QqX8jnHuVpdlmV6P16hF3UPNoex3yurkKeLo4f+gB0eHh5qd3dXrVZLzWYz\nhfxIPQ/s3NxcMgN42Ad2Q7eTesHS6enpxOI6nU7FgYHnFaeKq6ruwBmGieX6+bq270H3GlZucn/p\nFgHcdR0IdecPQ4/9uH7G0DqKDMC5pxTQajabyd4mXaionlPq9ds4l8FJ6MfW1pYkaXNzU8+fP9fm\n5mayt0XbEPmXXJ8MCa8jx487LzyX1euw+fN7LFrceAWVzW1ftMuTyLkO4vmnDnb+ThzgvE1eegl1\n1EGtLh+W79xWildTulgM8LQeHBwk+5ekSs6ue2ljxebDw0N1Oh21Wq30DhYXF7W8vFwpmOAVm8kA\nwYbqdlDf1rHb7Wp6elr7+/vpXECPijH+vJ4F4Z/T1tx4vqpx39XMYa8Xj7nJ94Pk1gBcPxnG0zPs\nuXWrWT8DbB1LcHub1LN9NZtNLS8vJ5ubpGSA9kRvSoxzj7OzM3W7XbVaLb18+TLZ2Z4+fZpSrqK9\njVXfgQ3GJqkS0kAKkge2et6lB7BK1aKWMWjWt+WL8WVStchjLABA/yF1yfQ50PLvnMnFEJVYa4/P\nYXXO7nh/p6enlRS6ubm5SsK8l5QC7Ny+R9+RWM+5e3t7arVaWlpaSkyesSCpUscvgpLHJvK+Yjzi\nxMREaldRFJUAY2fe/cZ4zjGR87xeRYY9bxggvK7cOoBz1fE6hk3/vI6VDcPWBp3vwa0AxuzsbAK3\n5eXlFAoiKe2b4Gqi52cyIVqtll68eKEnT54kT+nOzo7a7XaFNbiHjhAUQMwLYsIYYHjRu+dJ3Bi+\n3etIoLDvJSopsTZnapHhuXoU+zOnRjnTc+dD/NtNA1GN8sKh/O/hGp5BwQSO+bV8Pjk5mVgY6ivV\nWGI+aSwM4Oor/bi/v6/d3d2KJ126YHhzc3PpHcHmGG+UwvJMmOic8uopXsLJVelhAoX7mWOiWaeO\nuV1VPi5wk24hwPUDLCTX+XXnuoctXi963/i8372knm2KarIesLmwsFBZpR3g8KzOzs6mwYGndG9v\nT3t7e3r27JkePXqkjY0N7exc1AntdDqV7e/cS+qABlNzYHUVx9OqpCpDIzvCC1t66W5+PN4M1ume\nOwcLZx3Oqjw4Fw+gF6DkOgCG2/K8Ki7vwc+L6qr/76EuDpTOon18xDpz9DVAtbe3l1ial3wnxMTH\nHQvE4eFhKn6A+WJ/fz+l6BHUTZtoi7O4uKkQKis/xEgeHBwk1dXtqLwDH+P89rkUzTc5jefjZF8f\nxfVvHcANI1dVUXMgNazxMr5gZw8Yg1E1qAbSbDZTySMYgOeSEgx6fHycHAkvX77U8+fPkxOh1WpV\nIvAZkAxurovq60Uxm81mdoXPOQo89YmwCFgaAARb876PK3pUnWCUpJw5Czs5OVGn00mGc4/+p80w\nXNgPjgBiwgjUdXWd9xpV3ljhxN/fxMREel5X1XNqLsdT2mppaSmlxO3t7Wl/fz8BXa7eG32ACsu5\nOzs7Wl5e1urqaloYWTRhb26fc+CFadI2j68D0A8ODtLYjSp1XZiGC+Mmp+XEseCf+7H9ru1M8KNw\nTLjceoAb5oHrGJ3/X8fKBnVsjr3B3AA31EGPcyOv1DMZPMf06OhIu7u7evHihSTp8ePHevz4sba2\ntioTX+qpbETSRxWUINMYaCr1JgjqZGRhHu3vjE3qRfafW/0z1PFo5Eed881weNYIrETmA6ZlebE3\nAs/UbDaTIZ6FAK8xwNhqtRKzdfGy6nUeRGfgXjzUGR6sM+bWMpYwBxDoi3lhb28v2UrpZ2xrDr44\nk2hzp9PR/v6+2u12xTnF+2RhpR3uoPCFkx+ptxcH53rf0IY4d4ZxMgw7d67C8urU4ZvKKA5uJCMZ\nySdWbj2Dq5N+jK3fsfxfZ7fjuxzldrvM1NRU8lC6p3RmZibZU7B/Sb2t/xqNRmIhz58/1wcffCBJ\nevToUQoBOTg4uORxxI7mRTElpYR9VEK3eSGoSNjXYiHGWIHDjePEx0XV120+vvcBzwtD8tXeGQ2J\n7aht9CfPhIrKee5xJLsiZ0/yjV78h/s6k4q2Mg8hgZXynWciuCPCs0KazWayr+3s7FTsc9wzFvBE\nKJCAtxY72tLSUtIKeO7x8fFKOI7Hz7mX3J8FDzDP4ylp/TIfhpF43lXYmqvK8bO6468itxrghrGT\n1XXETXT5ug6ONcTwmmLQn5ubS2BD+o3HnJVlmdSrFy9e6IMPPtCjR48kXdhh9vb2KiqkTy4PP2HQ\nS6rEVhGo65M/Jp77bu2EOnjupXtRAXScJ0tLS2lCY3RHDY02K/ec0n8eQhKP9Y108DL7Ma52cQ3s\ncG6LA6TitCHCAAAgAElEQVS4Huob9wHMvQKvB9x6LBnXkXr17zx9LabpcS/6q9VqSbooG089Pg+d\ncfWa7zgWgAOYeO8eWsSYpA0scN5m1FTGLqDLM+KEyNkuo+Rsb/E7l6jK8lnOG9vvnsMcVye3GuCu\nan/LvYAcQ6s7P3d/BoHUS4iPnksAjvAQ9k/wqhATExPa39/X1taWNjY29P7776f4NknJQE173OhO\nXBb5rB5fR/AugABAxQRz7GxeO80rj8T8SJ7fgdwDiL3YpYdlxPO9/9y7iX0M4zV2Ta7tpdDd243n\n9ujoSLOzs+p2u5X7AkQw3bm5uWRjg7WRoRBtju4ppj+w/Y2NjaVEd0AO+xb3jRvdMC5WVlbUarXU\nbrfTBtIerIwNkgXm+Pg4edAJL+l0Ojo6OqosoIxJbLS0M2bWeAC32yHJaS2KouJI8nmRs6PlvvPf\nUQY5MAYB100cELcK4K76AJFl5c7PAVj0rMZQkqjCMhAZuAAcIRmxppuHZjC5KHW0ubmp999/X0+e\nPNH29nbyoh4dHVWM4o1GI3nSqB23srKitbW1pAJLvUqzY2NjFTYDiDGZfUtAD7nwPQxYwf15UT25\nj9eDo539DNB1RmtCRc7OzmoLbUqXix146Sa8w15WaXx8vMJ0Z2ZmKgyS++HocY8k3m3a6Psq+E73\nnsgfg7w9K4Q2oeKTodJutyubaAOmR0dHCfh4R8TaHRwcqNPpaHV1VWtra8kJgSoPEzs9PU2ebNRZ\nZ9j+/mKqG4sHbfL31W8e+Xv3c+I8i+fmrpO77icmTCTn8fK/BwFgrrMHrSrOJqRqhDxAx0AF3PAW\nEgqCPYxdmggpoDqrpGRz+/DDDyv5pK6mcD8YiNvZ1tfXtba2ljyluRQvgM335HS2EsvpMOCZzLmB\n7B5cj1UDVGBgLBIOEP6ZAygsCptfrhST2xBjnJtPWGfXUnWT7Pn5+Qpo4UkkFzSXNeAeVEBY6jEp\n70vsiLyHk5OTClg7q8LrTnbE/v5+ChMh9Q4W5wHVsGo2DALoCCFaW1urLKoejuOM0st4MdacFcc0\nLw9z6QcwOTV1kErp3/dTd+tIy1XkRgBXFMUHktqSziSdlmX5jaIoViT9XUlv6GJXrV8uh9zZ/qYP\n0w8I+6mqdR3JCuj2H1TC+fl5raysJDVIUlLZ3LmACrWxsaH33ntPH374oTY3N1MslAvAGJnh6uqq\n1tfXU4wdtjZJFWM5YIbdRlLFNuXhEQjno7KcnJxU1E0HEK/pFg37fOeDGnDzkkqSUpkignzduE+/\nx8kRFx1/Ft6T1CssSj9F1cqDjWMxgEajkcJ5AAPOBeAIyPawGknJBOAg6Q6mWCuQdkpKO6HB5l3l\nR53Fdvry5ctKEYBOp6O1tTWtrKwk+5zbUD2X15P0PdZvbGxM3W43qccIIJdjZzmpIxb9zECDQLGf\nbW8Y+SjCRP6dsiy/XpblN179/6uS/kVZlp+T9C9e/T+SkYxkJD91+ThU1D8q6Q+9+vtvSvqXkv78\nR3mD6J27qvSzD/i13UgrqbIbFnYx92qxQhOYic1Nkt555x29//772t7eTh61qBpj0Ie9ra2tSVIq\ncU6ppZgVcHJykkr8YLNxlcMN4O74QOU8PDxM6lI0NMeEecQ3dPGA3pyqL1VLJ8FG6ANXOzk3qrv+\nfpy98TdsCfOBe089et+fB4dL9FYT4uNMl4KWBHfv7u5W9r7wKiOxTSTTuycWlij1SsfPzc2lDBb6\nrtFoVLzP5+fnSV2VekU6j4+PtbKyomazeSk4mXtEh1m0dfk7wybsdrnr2sL62epyn/VjjFe1yd0U\n4EpJ/7woijNJ/3NZlt+RdLfsbfT8XNLd3IlFUXxb0rdrL1zmc0jjMa+ulbt+7ecOLLnvpZ59ikhx\nqVfXi6oQVO6IpadR9168eKF33nlHkvTBBx8k9cJzN7kf5xKOgTFZuvDOut3N7WnYZQA4Qie4PrYf\ngBcQlXohF3gHATy3CxI64fFSfM4k8zxX38+Bd4Bnl+vSXs9icMcC79bVWheAivfo4TiU/fbncfXH\nYwHdaUCbCbPgeTj3/Pw8qXjRtkYf+p4OHm/ITve002MIaTM1Aufn51P1ZkmpfQAd/ej1ACllfnh4\nqJWVlco+H2SMnJ2dXbILup00zpeiKCppev3mTE7qPK3+XU4tHeRxvarcFOB+qSzLJ0VR3JH0z4qi\n+LF/WZZlWRRFtldegeF3JCl3zFUfpk7/r/Pu+HfR4Cr1AM6T6QkJ4YdULCYIeYLHx8fa3NzUu+++\nq5/85CeSLmq6waz8pXvSPMC5urqq1dXV5ClbWFhIx8XQBsrkeH6mVziJOYo+sWZmZlL8Fgzs9PQ0\nxWABJDFejO9OTk5SiR6YDfbIiYmJSgDp6elppXwQwczujfQFLTomELedeQ6pl2hnIueYQQwVYY8F\nro1Tyb3E3IPz6UPiAOlL2s02gIAQDKvRaKT7xYR5roedl34kJc3Bk+tIF+x0d3c3vaNut6t79+6l\nMQxQU/sOmZ2dzSbce1/RJywMOUdAndTZw+tsbHXHXeWeObkRwJVl+eTV742iKP6+pG9JelEUxWtl\nWT4riuI1SRs3uccgyYHZIBU0/o6TwVUMn7R45zD2E/DpA/X4+Fjb29t6//339ZOf/CSVPEIt9SBe\nVBVJCdwwGFMgUepNHlgBbE26YA7usPCsB0kV1oFa5+WS/P+zs7OKeouX0/cgjWEiTKqiKDQ/P5+e\nz8M8UAcxoqPekaEQY8qkfLUL2uhZFzg4PKDa48toK33B3wACJYXiGAHcYviQ//a+83ANiigQy7a/\nv5/UecDZd/OKecpeK25+fl7b29spP5n7+3McHBxob28vgb6HAa2trV2qFs37oaqNJ9Lndueinbl3\nE99P/DyytEHOhn7XuI5cG+CKopiTNFaWZfvV3/+upP9O0j+U9Ccl/ZVXv//BtVun4ZjcTTogvgBn\nbl4iR6oyODITGChIu93Whx9+qLffflvPnz+/VFocQfX1IGHim/gBWLGTeTwU12WiS70MAI+/i8/n\nk9Y9quzI7jXOWL3ZixVVC4GJkFzuYDE2NpZKRMHeCGre29vT4eFhAmMPUkYie3ObIx5MT/1yu2Jc\n9d3u5GoaqryHZNBnOaZBm7wIgVdPmZ2dTecSpiJdlJx32yugTN9TMcW3ceT9LS0tpTQ2VFf3dPIs\nmCqc2WJaWF9f19zcXFpUfTzwvPS/v79oA+U7rn8VtTUSkRz4xeNy7+Cqc/0mDO6upL//6objkv5W\nWZb/pCiK70r6e0VR/GlJH0r65atctO4BrgNidTRZumzj80ngAb3km0q9tCifTJ5S1O129eLFC737\n7rt6/Pix9vf3E/j4ZOXaBKRKSqVyYG8AjqTE2gA2dn7iOTydioHM4IAtYLvx1drtIJQomp+fv1Tj\nDJbAvp1SL7B1bm6uYn9yZwW2vZOTk0oJ9na7nXZ6jyV+6CvPrHAHhauWpHx5RZCc3dZVZZwuMVyE\nv93e5CCLmgbAHh0dVRwJnmtclmWlsCigu7u7mxYCrwbsbJJ+8XJJsFxU2VarldicB0rzHkgR80Kl\n6+vrajabtbXv3K7p/cJzY2agL2IIySC2lgNCt8XVpYkNUnEHybUBrizL9yR9LfP5lqQ/fN3rIsN4\nW/qdO8hY6Z3rNjgGrZf/8VUa1ZRg3NnZ2bSikaXw8OFD7e/vV16ap88AJAsLC2lTGtKvSDQvy7Ky\nGUqn00kBnu5lQ81loxNiv5zxSL36/KiauT4ZHx+vAHq3203ARQwYEw8GQnGBsbGxxColpWh9qRpD\nJukS0yPeLDoxfFMYJtfR0VEqNAmLwn7I9QZlpfiPszWAxoHV2R7gRr/4HrO+abe/b6maCUCw8OHh\nYSXwmfv6c0u9/NJGo5E0h9nZ2bRY+H64LF6881arVbnm+vp6Mnu4bY6x7YwMdltnB3Ug9Ofkd51p\nyPvCz/m4NLVRuaSRjGQkn1i5Valaw8ogduf2tDo1NSaAuyfUK/WStC2pwt7cNofx/OHDh3rvvfe0\ns7NTWYW5P3FWJK/jLZWUtplzhwJqCF45MgBw+UtKSd1kVOD4cFYiXbAEVFsP73BbELYsZ3CoTsTK\nOYODsXBfN5yTTYFNz2PGON9DFKSeDQzWgVruhS3pF1RnZ2JxLOTUmVz+JeLqcFRRCYvBY316enqp\nmIJX+XCVe2ZmpuKJxjnh78ZDcaKjALsqIT/r6+vp/W9vb6vVaiWV0m2sODtc5ed5FhcXUygPzqbF\nxcXUZ76ZkPePlFflfcwNE7kQJfeuPgpz1a0BuH4PXhcPFwdnzrZUZ5j0Y9wbhq2DlCzi3iSlyhQe\nS7a3t6enT59Kkt577z1tbGwk25WrOAwkVLqVlZVkd5N6G480Go0ECL5lXdwTwXMvif3CBuSDDSDx\nvT4BTmw7PrhIFZMubI4YxlFTse9wnKclzc7OJoDDw4qDwkNb3DSAPczbXJa9Kr6tVkv7+/uVmLKY\nP+oqKk4A1N5oR/O/oxHdJ7RPXu5L3xGq4d5qXzDdzsk78BCdsiwraVNeuYRFCPXeCyicnZ0lO9zK\nyookJfDb2dlJ3lq/N/24vb196fmXl5cr7ZicnExOr7jXhtsqEVdhc/Y4B7x+quhVnAc/TSfDT0UG\nPdBNPKic7yzGQ0Owk/HSvfY9sUgvX75MwbxPnjxJE8CT1DkXr6l7SnNBmd1utxIWAFh6cHCMvsdO\nE7MA/Bjpwp7H6g9YADBMTr5fWFhIjI9YLuxoAAig76WxpR5I4fmNzg0HRkA5Mi/PwXQDt78DgMM9\nfJ4z6/ZIPMLkwEb7Uo61xAIBBCjHWnNRW4geYf/xbAbvS+yLFD6VejX73DFBOS7GjfdHu92uhIJI\nvZLoEXjPz8+1urqaIgIYp7x77w8vTBD7zoOBXXLeaJd+trg6Fv5TczL8tGWQ0yA6FvrRW3csMEly\nDA711AsIMrhOTk7UarX08OFDPXz4UJJSwKWrXh7Z72V8iDiPg5xqv51OJ7EWBk8uJismquP1ckbh\nSenEuklKNdVivBfXmp2d1fLyclKLfa9PH7xuCEdgqq56eTwVqrrXzvNBHdUt3gEVOVCv/Bml3o5d\ncSJLSiwJFRdm5Od6AVDaIlXr6tEeGLPfx5mke8FhsQC076vB4nN+fq5ms6mZmZnkRGi1WpUNdgAa\nHF8EB6+vr2t6ejqFpPCOHPgBOe4FUK2urlaCvelnPPx+b/53gItpfN5viC98SL/5elUgq5NbBXB1\nnhfp+gF/OY9qBDmP/Ie9uV0rgh8r5ePHj/XBBx+kwcikoa1eUcJzTAkUZnDyfIAb4SXRI+e5nG6j\n8RU12pjcHjk1NZUmC+eyQsdMDp53fn5ey8vLqQgjdrTIdAgbcW/x3NxcJfCXDAkCfAlrwQPp4MA7\nAbyZ0OwnSp03AN1BLHojHWhQ2bwPua9nOJCJgVrtAIX3OAYnu8rrqWmotsSpYfpwzYDFB5XdF6id\nnZ1LO4m5ug+DX1lZSYuFdLFTm8c1MnYkJZWVHwKCne1OT09raWkpsei6MRc99lcFpzhHPypwk24Z\nwEkfjWFxENsDBNxQzgSCVXjxSk8Dwkb28uVLPXz4UE+fPq3EJMG0ABTPY6V2nDsEkG63m/baPD4+\nTnFVUm9nLFRFKv9KPXYAm4D5xGcGgGIpcMDAgdEN5JOTk1pcXEzg4KEe5EsSQOq5qPyem5tLAMrz\nnJ2daWpqSgsLCxXDPOKs1IOuuZ7Hc3mcFtf20BKYp6QUIBzzgJ2ZYB5AhYbJwfoAFLddcS0HXL8v\nzpKjo6O06HmaF+AeS2BJSvckONqzIXj++fn5pPpSA4/rbm1tXQr/oC92dnYqfV0URepnnmV6elqL\ni4sJ4GmT2wa9gAH3cDvcMHP3KuEiV5FRmMhIRjKST6zcKgY3CO1vwu6ih9VVU9+8GecCQb6xlPbZ\n2ZlarZYeP36sR48eJbub1CsQCJvB1iIpFcdELWP1RuVjNyYqzsIgJaXgXTIK9vf3k5ft9PRU3W43\nsTyu688NS3W1T+qxLBhDDJL1YN6VlZUUpkJ7sTd6n3oYCX1GUr8npJMFEXMkEVet3Vnj+wv4e40M\nABXVPeT0D0y20WikwFv64eDgIDHTqampxEpQT710e2RGeKx5L1wX9ZSKJDgnvDwUv70wpnTB/Ak+\n970ZPJMF1ojjgzGwvr6erof328ukHx0dqdVqVcJsqGBDGEtRXOwBu7a2VjnXHQ/R6RDDR+L77ed8\niOzaZZDTIie3CuCuqoa6WpDT43MTIQIcA86Nts1mM2UpuKcMO9nLly/14YcfamtrK8VEIQxWJnDc\n/QoVAlDBaE8dMJKtqf3GtfAGMoh9LwdCONwTnOvLGDPmYOaGYje6e3hLs9lMEyWW7+Zc4vqwkaGe\nepyYFwTwZ4uOAT5zgItVR3indV64ougVQ/ASSqenp1paWtLW1pZevnwpqVfyiBLiMacXdXl8fDw5\nM7xd7nFFLZUuwnwc1KPN0dsbJzBgiumk3W5XthXEIwzYAMKcu7q6WlH3PZWOxdEdWLRjfX092WzJ\norhz546kasYHairX83eX86zmJBKb+HedY2IYuVUAl5M6gzn/xwEdz8sBnbMaJhnsAgMwLMsnE4b2\nx48fa2Njo5JryrXdbuRBwjgW8FqW5UWuJoCBHQ/vLUnXUnWDEFgAK7zbRjBie+19zsPgTgyX96f3\ni38WAcZtYbTF04S8hDeBpI1GbwMVD23x3Z5oixvp3XMXvXRxlY/P4dccGxurVHtxOxeLBc+0sbFR\n2fWKYp5Sr66eP5OnpjFWCPdgl3rpYhEqiiLZYWG0/rz0J7a7WAqqKIoU64jHU1Ky2XosGv1CnCPe\n0HhPQIk4Oe87bKQAN7Y+6UIbIdUMpoxjiL6NMXM5G1vuM5+nESCv43y4tQD3UToV/KU7CLl6ikcL\n1ZBKvV6Xv9Pp6Pnz53r8+HHKVvBOR8UglME3pMHz55PA6/CXZZnU4+gI4NqAlVfEYAJ6qZyDg4NL\nLI2BfHx8XCmPJPUAlDi4un71hQJ1zz28Ozs7adK3Wq20SPhGLNzPwcl/pF6oh29z6Hsf+PPnJoGr\nUM7+YDIe88V7575sBhQnEwDvm/ygLvIeAIO4L8b5+XmlGAKLqbc5Fstk4YSxsiAD0PRfo9FIe0RQ\ntsqvSxgOIBcBjv7qdDqVcCnKfC0tLaWFjrG2tLRUKcQwMzNTKYgaA4Sj5Nh2nQp7U+/qrQU4l1yo\nB58Pc14EOBgcapInmMPgvK4Yg3hra0uPHj3Sy5cvk9vcWSSqFzFgy8vLicF5afPj4+OUfM5KTeqX\ns4yYTsZgcS+px9thV/EwAliQB4u63cjj5OoGkK/2yNzcnFZWVpJ9kPAL7E6tVku7u7tpkfAKGQsL\nCwmEPLXMww2op8bkpY/39/crDBWJ2QqoT85KeGYfH24njarX5ORkWqDYXX58fDz1G+2ULmIg6UOP\ne5N6lUZiKSQHGy9YyfaB3v8OaHhLpV4QNxkfvrEz1+C+i4uLl8wAktIzt9vtiq1za2srmXB8cZud\nndXq6moCeJg8fTs5OVkxMQyyxfWzz900ZOTWAFwdiOWkzhkRUT+nwkb2xsTzcA7qevGSsFuwaTPA\nFFUCrstqzYot9ar9wqSooeb12NwBgJomXU4D8udxcIUlefwW9yuKIjkJvMY/kqswgornGQW05c6d\nO2knJ87nuSSlTY5hFoRfSBfghz3S4wxpAyoTlX9hvN5WYr7c9iX1gnW9pFSORSCAnKQUHgQ7IQZQ\nulDLWPScGVHUstVqJTCO8XW821hG3R0igASM1QONPTsFZ4bXKJyfn08LRQQ57kM6n/cTP25m4B05\n275//37F1ICdb319Pe3d4Isq6moMCPY+qZOotg8bZlIntwbgPgqJLC3H4KKBG3BzgGMvTVQlBvHz\n588rkeV0vhuaZ2dntbS0lHYg9/ppDhRMXI/Q99Ud9iFdvGzPuawz3sbsBam3Az0syO077hwAyGJs\nF/2Aegtgk2aGCl8URdonVlJlExzUTY+j2tnZUavVSsBM26ReQn2OgW1tbSUbF6WsnNF6kj62Kc8W\niTGCLs7q8QgDJDgG6BdfKCWlNDv3yvJdzLTgeBdAwqsn8zw+HnBWMK4AHr8eDogIdL6QN5vNBKzn\n5+epvJeXWpqcnNTe3p6mp6dT7iv3xPGAuuol2D3jIjqP/HnrpM7hcB0ZxcGNZCQj+cTKzySDq0P1\nyOB85fTYL9QFz4fE2IwjAEPy/v5+CiN4/vx5invDDuOJ7KQfEe/mdjc8mcSxnZycJDYpVVVUvFae\ni4oq5TFY/l1c9bwv6AcPgZB6ZXoI6YgsA0G19Vg26vzHmKeYtwtLxCsq9SL7UUGJqfNUn0ajkdRY\nD3uB5WxtbanVaiVWxveoXLwjLyx5fn6ezb31JHIYOc/iXuP4m5AeqceAsb/lsit4RsaEh5XA6mG6\n0T5HuBLahrNS2F1Mn6MCtIeBYFIgwwSnCL89vYwYuc3NzUp5fdpVlqWWlpZStRsYHHZgytVHNTUn\n0enkmspNGN2tA7h+D5ObxHHgSZcnfVTZiOvCW+pOBtQlbFZbW1t69uxiF8Tt7e2UJsWk8vQk8k19\nv1QPS/B6YqgVORUVpwA2JEoWEfIQq1igpvjzxv7wWm8OJKhx3k4EsGDCeXmgOqeEG/tj4DB/N5vN\n5LljUlPzTlKqlosX2r2mlC3iXLf9xPdflmWlltzBwUFaeHxbQM/xxRyQG1d+/fhMs7OzWllZSc/T\n7XYr9lQ+k5QcL37fTqeT7GfuCOIa1IUD3AATxuD5+XlyiHDOy5cvUxXoOCYY9yw+qMjuCcXp0Gg0\ntL29ne7Jwn5+fp7slJgUuDZqKtfMORnq+tT/j/1/VafDrQO460g0RkbbGyAgKW3L5jXfGPRSb+KS\na7mxsZE2byZg0z2xgKWkSp6pg5CkNInxPAFI3i5+sIW5VxE7FoPVA5PxZHr7cwOKtjLRnDU4EDl7\n4Jq0y209cSHJecPcKeJOk/Hx8VQCXqoW+Nzf39fY2FiKG/T4LerOUcrJ68+5YIvzTZKZ7NhfARoH\nEXceRc9xzrvniwqgzN4V7kAqyzI5lgBs92KSkM8C6lVoADbGVc6WSBgLNlD6QOo5fLyPaC9gFBPq\nYU/7+/tpMWDMra6uprF/enqqhYWFSs0+FkL3rsa+i+/LxwkOPv87946HkZ8pgKtD9fiZq1uRZXlx\nRt9wF2ZCh5JQ/+LFi5S6w+DzNC8vEIljwROpeel4Ej0EIDoLUJ1gXL7xyOzsbGWTZ9rU6XQSSDjz\niCos33u4hu8xEINuvU3SxURz43d0SORWVh+g8diYOTExMVF5DvadiOqNG+m9GosHNntYi29XSLI9\n79LDT1xQ9Zx1ufOB5/JnZkJSZojqz5KS08r721V2dy4QOuMbWQP0PG/MgoApo8qjjXBNFgIHOa7B\neMB84mYEgI9wH5xtzB3GK3F29DMB0kdHRwkInRlGiYAXF+h/I17Uoig+L+nv2kdvSvqvJS1J+s8k\nbb76/C+WZfmPb3CfSzamfsf6b/72fERUQag+IOcqGvFNGxsb2traSoPcwY0ATLdNoJp6ZQ/PMqAE\nOBODeC3+9sns6iurPaprt9utpEzt7u6mCYbNKq6Y/I9tTOqVJGfiuyrIfTnXVSq/Zt17iXYUV6H9\nvUYbntTLAQUEYsxbnRc5XhsbGswD9syCx7sG9LwWnMfR0RfUBowB0bSJd9BsNiuMh7GHDXR/f187\nOzuXilqSpeIVbCi46uDG/XjWHOOWeuW9KCdPyJBUXRBYQJeWliqgC/MicJlzdnZ2kjYwNTWls7Mz\nzc3NpZhPbHCTk5Pp/UUzQnxfsS9z//9UGVxZlr8n6euvbtyQ9ETS35f0n0r662VZ/tWrXG9Q4/tN\npjp7nKuSvpp6youXl5F6ANdqtbS5uam9vb00mL1WlwdcejCvJzuzKko9MAHIAA9ePIPCbXsetc7g\nApRhje12u1IB9vz8omiig0JUHb30ECyHVTe3kvK8Y2NjlVCPaIeL7ygH2lIVOP1dOaPzSenqWFSJ\nYYgRcPwYzmdLRdSzeC4J8uR6enkoFhdS0CLb5Tm5ttdlY9wQskKcpe9fwTjxtDhJFZsr13GAZ/Hw\nVDxXM7keYSy8v06nk/qS7AoHewKVYbP7+/sJsFutVgJOir+S28vzHB8fp43QIQSMCWfvuXET5SYM\n7qMKE/nDkt4ty/LDj+h6IxnJSEZyY/mobHB/XNLftv//bFEUf0LSb0n6c2VZ7sQTiqL4tqRv113w\nqqgdGZyzN8+D9PxTosBd5Tg9PVW73dbz589TtRC/ruc1op56MCjMAGZE0GW3201Gelaws7OzZADv\ndruJTWIfyQXd0gYvpTQ1NaXt7e0UfnJ0dJQ8au4VRpwNorZ1u92KapTrXw8ELcvexineNmcWHibh\nYQSRteWcFB69X2fc936MkrMnwgwJXYAtu30Wm9TMzIx2d3eTeaLdbqd+wiYWmb/U89Lnio7SV1Q6\ndnaPZ5135ezPw1py6pt73Z2lwYDd0875jDvaRUQB7xcvNSEkaDX0E7nGpDXi+ZYu1GqKFvCdZ6sM\nk58a+y737MPIjQGuKIpJSf+RpL/w6qP/SdJfklS++v0/SPpT8byyLL8j6TuvrlG++n3Ve/f9nhfr\nlJ8Qgenp6RSJ756yo6MjbW5uanNzM4GSx9J55VMi3QEbH/S4/r3iBN5ONzC7moJNIxqRrc8qaiPP\nuLi4mM5ptVrJWysppYt5mSEGCvYe4tLw0LmDxg32vicDfeqqdByADngAPuf6opIbuDgeUN19gtQ5\nBjiP/32BQLC9efR/VOFRMQE5SQns8Mzy7r3aCM8Vk/o9TMIXKEwbe3t7SbXEu+vtif0UnS4slHiM\n3eyBWYSYRU/hk1TxJPPM0kUoj1dWcZUapxTOB5wivINms1mpTO1eeoCWxWkQaEVnzlUx4qNgcP++\npF3YlboAACAASURBVO+VZflCkvj9qjH/i6R/dN0L54zSLtgY6mxvTBJncLjb2QU+CvXePMnbWRv/\nE1PGai6pYoAmVovByGrXaDQqiekMNNzxHgYQ2Yo/N8KEmpmZ0fLysk5PT7Wzs5M2HoGtcO8oOCSw\nNZ2enl7aCs9jz2A0TAgv5xMnI5+xyMAsYErR3hf/9gBlB/TcpHCAi8CVM8gTaB37mWeA1fi42d3d\nVbvdTv1weHiYmDKsK8abefvcLloUveoczWazAgAUVpUuHBB8Bxt3uybBzJ1OR7u7u5VQD0DEAYx3\nAGOkBiHg7IU2vVAnY0O6YLNsVYhDDWYs9Up+YXf2KiW8y2iHG5alXZXFfRQA9ysy9bQoitfKsnz2\n6t8/JukHw16on7s4iquhUZ3je16cOxkAOK/rL1U343j58qX29/eTGuYAh2rjVUg8qlxSZTXlvoSQ\nMBAYkKiwp6en2tvbS4bbXEmgHCB4pPjExISWlpbUaDTSagtA+CYlPpi4B4bug4ODxEh9UxWquwLm\nUq8qSlmWlZ2hvI3eXx6CkGOU8f25iuuOiriq+319QsX4KT+H/nCgzRm+GSNLS0tp7MCSyd+kXQ7c\nuWv5b+8nd3phwuD9oSIeHR0lZ4N73ym7RTURZ0XuDAN8PWyJRbzT6aR37YwUZxbsnX5ifI+Pj6fN\nwD3uk8W/3W4nZwP97WE2uX7PvdvoVLqK3AjgiqKYk/RHJP0Z+/i/L4ri67pQUT8I3w26Xt//h3UX\nwxo8Xg0Qwm7Gi6TTiOF58eKFtre3K+zNXfOwQg8Q5sVSLaTT6aTrYZ8jNgpVki0DCSJmsO3u7iY7\nkHtRXV30QeCDndI/09PTleqqXCMG6no/emAln7uKDwP0dC4S99kL1jfRifZDYvAkJfaKvTIndQAW\nWZlU3STHd3yquyZtI42JsQE7zC0krrrOzc0llgwQ7e7uJvNDZMtcD7WZd8a1ARPKMeXCi7rdbmWh\ndqAiwR+12zezwUbr1WykC02FDAWCrP2debSBp3AhnU5Hk5OT2t3dTaYe5gFAPzc3l2LwPFUyZt7E\nfs69s7r/B8mNAK4sy46k1fDZf3KTa9apm/5Z7px4nMeree4eBS1hY9h5sLV43BsDihdPyAJ7nGKD\niRkL+/v7Ojs7SxVcJSWmB2DEbedQEw8PD9Vut1M4gqRLKpeHNvDb2ZDnx8IWouro35E1sLi4mMoC\neX8DbK5+YS/y0kTUUKM9bsvDbkk/EneXC0729+xlemL7/ViP6/LvnNH48zBx3TYIy8k5PjzkgnEx\nOTmZtozsdDqVkBsPQnY7JfF1no5F2qCH6bjGMT5+sTsWjIhrSL1gXhYjH+vODLmeO0NoD9tB+njk\nWlwPMwXXIKau2+2q3W5X7kv9u7m5ueS8cmDG7u0stk5FvQ5rc/mZymRwydnkcswD9oZq5XmI5Ip2\nu92UUA+TIpPAV2ImLewNG42zGtKBGo1G2luB+7J6ARioglIvyJQVeX9/P7G/mFzvKo7buqItyf92\n47vbVE5OTjQ+Pq579+4lcMupV/4/1x0bG0uTErBwNhnfD22YnJxMgJhzqMAmARRXpXMOEBcWrpxN\nL44RKtHyGczP+8zPZTFAlVtbW0vjgzJO2MEAF6lXSAF2BhC6N9vta94fZHicnV2UMkcVdccY4zzn\n1fcMDJ6B58c7iprqTMsDgHEqwBq5zuHhYcqqcXsl4wKnmZdY553SjzmP6rD2uGFkVC5pJCMZySdW\nfmYZnJRnKlJ1qzlWoVjmByZFxRBXNaRe8rtUzZ8je4GUKGcqUHbsMF5/H6bi9iJYhHRRMZa4pePj\nY7Xb7aTesjLCTDDc85nb4eiLfv3lnkTaury8nHY2j2yxTjzcROqFvOTu6zIxMZGcI7njYmxdTNXK\nPQvitkRnfrnnyV3bnQC5Ci3u+BgbG0vvaGxsTO12O6nrHgPp6VeeqUK/YR/L9TeODuLUJFW2DUQT\nwC6MZ1/qbbLj5gJn/nNzc0lrIByH67oGxI9vVYnzyjUO2odqyzO7fc7DaGDPOSdDnVnq34QX9SOR\naEfzz5CcjSYCG79dPfWKva4uFEWhg4MDtVqtFMQITecYNwZjaCY3kMmBusBAId8Tmxvnohag+jg9\np9wONhGPZQOU6zxMbrjG0JwLCYlhNUxg7D/9jPOxzX4dQhFiDmwEGgdO1DVvU+79xrCLOvuYTwhv\nQy5uzie7H8918GQ6CAM08R70M/mn2KvI8ZWUPK4+JhcWFirXq3se+se9oT7mAUpUQtRSqbeo5uL9\nUCMxowBSHidHHqlXXpF6WxUWRa/G4e7uborr41jfpxW1GYeUF5bI2UlzZoWfuhf1pyE5NM+hu69S\nrD6sfF5Bw5OWo4td6hnPsRv4pIe9YW9ggPpAPj09rUR4R/EVNF6bstCsjHhiFxcXKyt87iXzHWAb\nDfx+nNchy8WkxfY6OPUbbF41IrbNBeYUS5LHUJ/YR7nr5sKD+p0XmWMcX84gPNSExaPuGQEMQMWz\nVAA4mNf5+XklBtMBM1Ze4TN+iqKolJnC6eWbSfd7lwjHkY89MzNTiXnDDs3c8Q1pPEjbbXiMV7zj\n2L5nZmaS/W56ejqRCBYS+sAlt5BdR24twA2L1M78fFKz4nk8j9RbXcbGxnR4eKitra2Ku58B46s4\n4oMBj1uMWWJQ+P4KUs+A7ZHcHraBd456Yl7rjJ2k/IVHYzGxUrSB541GfJ94DEKAOq6msEIi4fs5\ndmhLjkn5e/H/4ztEfHB73FQUQMc9cgCY35s+ZuFzp0KdqpzzvNLvde3hXcB6vGafp36REVK3V4Rf\nm3cLKxwfH9f6+npasD0uzseCizNT97Yz/og0IKJA6gEcPx7nBjnASYAnFy1oaWkpPVPcSYy5FTNm\nvK3DfDas3BqAqxs0dZ9F+5v/9vAQVAIGEysXuYCtVkvtdrvi/Yt7jfJyYGa87KIoUrCrVK26CsNz\nNaQoikopHo9ZYiKjqvqem2zcwo5ZUjXQl0FGhL1XEvYaZkSue8oUXtCc/QcvK4tH3ST09+FqVpSc\n+oo4OMYAZzyIkpLnjnAMTAg+Blzljd/B7J31ICxCMXYwPmPdc0Ym6AHijAVArtvtpmyTsixT5eJo\nWgCEAUbCeWBwABxZB14S3SuxOLj5tX2hHB8fT3Y2xhSEIAIc74PgYerNSReZDphnsAtibkGFPj4+\nrrC4OpZ2HVBzuTUAN0jqmAMD2lXUOIgd4KDPxJtR7RTxnEJilbBpQLd9XwR/sWXZ2/EIxuWT1ZOi\ni+Ii2DcG846Pj6f4uY2NDUm9wNgYLuLXhWnRDx7pzvWZKLDSuMF0BDmvx5YDC29H7n052LjqRJtp\nJ2131dkr3HqgKaqj5wHDEqQqQ/cwCEmpFBILAddypsXiANDl1Obc8zv7zaVljY+PJzsdoUluFjk7\nO0sB6DGPlU2kCbgGDHleNINGo3Ep/9ltozHLBPBkoYjf4/CK2UAxxhGtANDb29tLYVTYJT1DgtAq\nn7te4inaUW8iozCRkYxkJJ9YuXUMLqo+dd/5Z9EG54ZXVviYT3p4eJiSp33FhQGwEmOnknoMDrWO\nFBdnRDgXUAnchsdKd3h4mNiTMwSPMp+fn082DewvziicQdBecgJd1fGQBwo6ul0p58mKBRPdDhYz\nBeLfMRwBgQ1I1T1TUTfjxsE868zMjBYXF3X//n1JF/sBsBkNmy97P3p7I9v1qhutVktbW1va2dlJ\nTGp3dzelYvmuYZybYxc5WxGsN7I/bKP0Zdy/lJJS/v4wPZydnSXWGt+vpMTuyrJXOKEsy6Qqxnaj\ngna73RTkGzNGMM8QFuIpYHFs+/uFnTLG3R4JY8bk0c/GSjs+ETa464hP8mjzcLe5G9Kli8Ha6XTU\narXU7XZVlmUlrcbtN6hDUq9iLwOfPRIYbL6jFqquG78BmIODg0vVc/2ZGBTclxQuUoCkao7p2NhY\nxY0fQyvcweHldPx770/EVf9+JgKu4eoghmdshB5jBaAxQTApkPXRbDa1sLCgpaUlLS8va2lp6ZJR\nHXEA97GQM2A3Go1kw3r99dfTRs1ksTx9+lSbm5tpS0K2LZQuFh1Pkcqp6DiQmOxxy0H+JkOGMUcu\nKfYzH8sAq+/JEDMs3LwxOzubVFS8+ufn50lNpf3kTO/t7WlnZyd5UBnLLDI8i6db+f4dCGNb6pkY\n3CYOwHnoFilm/VTST7QNri4kIjoYXJeXesZzQM5z8fCeUvYGe5hXQsBwen5+XtndnGog7qTwQn+4\n7z1EhEkMIDKxY1J4dJwwWKWek+Hg4KACZLFP6gaLp0jNzMxcCn1wOyH35jwApC5chPpocYKQrA14\nueMDmw6fx30IPAQH8fCFOC7ieMgBMs/L+8CTt7q6qrW1NUnSZz7zGbVaLT158kSPHj3S9vZ2YtHs\nmEYpo5xzg+cG7H08xvb6PiCTk5MpjIQxRTsBxPn5+ayTB1sjz+1g0m631Wq10riRekn6BPhi45RU\nqelGXwNw9BfH5RY8Fk5iAI+OjjQ/P6/T09M0lgFKt1HnApE/KrnVAFfnJfXvvYPoJGiwBz76hCFT\ngKJ87s1EheVleR6ru7tRNV2F9Xu5uiypUmARZ4N7vHzlZPJw3fn5+aTW5XJkHXTqYuVgcN42ihnW\nAYKrr34s56LiT05Oan19Xevr6ynYE5D3rJEcKEXjPAKoRvU5F/vWrw8im/NnAkh83Ny9e1d37tzR\nZz/7WT1+/FjvvvuupIsqM3t7e1peXk6Vl3MhJlIP0JzteaA2vz0gl8KjLBYORHgtKfwZmZYvajh0\n+Ht/f1/b29uXGCdOhaIoEugQmyj1yimR3O+b//izeFt8/1kcOhSN8MWLvNnx8fG0t4fXsMu91+vK\nrQO4yBCGoai+ekm9Ld4I2fA4I1zaeNNQQz0WzMM4vN4btg9izgBHVkzuHycl3zlA+g5OiMc8nZ+f\np/sirLq+Sjt7yd3XJ5VUDReIwas5+1JR9DZmwWYmXdTNGxsb0xtvvKHPfe5zunPnTiWpO2fzcZDK\nDWQPgaHN7lGULsfU8VkET+8HvxeM1cNenB3iNWw2m/ryl7+s3/f7fp8k6b333tNPfvKTVAx1ZWVF\ns7OzFcDwPo62Od9QKC4qjCEmPjY5qVeYkkwIXzD8+gCqe0PRGvb29tJ4jqX7sRvDthmP2PF2dnYq\n4UJSL5SqLjWPDWsowODza25uTnt7ewn0MFt8XHLrAK4O0OIqFQe0v3ReHkbonNHWdzJyVsOgKMsy\nm09K+AgDLwbgus3J2+QgzD0AK6kXoMr5Ho/H4Njd3U2l0P1ZY9/QFv8d/5ZUYZF1pgCppwZtb2/r\nxYuLgs0LCwv66le/qi984QvJdhZBKn7m7RgEygCBb+Mn9RYBWG60udHHMBLPg4ws2dVAzgVQCXTF\nPPHVr35VDx480Ntvv6133nlHz5490/z8fMo+8b0e/BkQD8b153aBnbsJgvQpAmnPzs4qcXDuEMN5\nRtgSoMS9cF5JvbJhMG3GgFdGdjLgDM5TGf1987zEW6J1LC8vV9RbKmz79eLCGH9fV24dwCHRgeCf\nRXDjt3ci3k7YHJ3EAHCbkQOc1+ryelpSj6G53cK9ZQAG5XJivJCrVsSkuXoEwPnk5B6ofmRdcF6s\nTIz0Wwz4DlsJXl8/T6oyn8PDQ21ubibQ/frXv64vf/nLKUE/3jfHnuI78/YAaFJP/Ycx+qQlmJWA\nZbdncj2Pg6S0lXQRYY8HlgR1V4096NW97dLFJFxZWdE3vvENPXjwQD/+8Y/1/vvv68mTJ5IuvLvY\nyaITx2O93FSRW5SK4qJuHm1GdW2326kcPc8tXZgvAKaoFeC44Rl9H4ilpaWUcuiA6rGI7K1AYVAH\nv37xgtSKg0wsLS1V4uAIGsaeHEvSu2ofwe2qTodRHNxIRjKST6zcGgaXc7tH9hGP92hoZzysEuj5\n0G1JKSmYWveeVC/1SgihLsRCfrjDYVrQfKlania2OzIMrhdXq7LsbRKCEDaysLCgk5MT7e3tJTvN\n2NhY2rQkp/bEPnXbE3FhrKB16uLJyYlevnyp8/Nzff7zn5ckffazn9X09HQ6PoYuDOsNg7XiJZZU\n2f6OODmelyorVL+InkB3TvC8nqmwsLCgtbU13b9/X5/+9Kd17969CltyM0G005EG96lPfUqrq6u6\nf/++fvSjH0lS2oVtZWUlqavuNKEvYVs5hxl2UKmnTs/PzydTC3tBbG1tpb5aWlpKJhQcSV5YgtJe\nBwcHFWboWRM59u4VStgy0Z+H+YYWFLMvmEe8S/eu44hzzSFnW/UxwndXVVdvDcBJg+mnq6P+AyA4\nwKGexppuh4eHiXZLlzcxYXC7fcKdB14imjpq1AQD4FwdjeobthAGnx/nXqkYBOyqCx5M6WLCA+h1\ntrjoVY22EpwldY4Bykmtrq4mozsBqxEY4/k5jyfC/bFnci52UxYYwg2kC5vU3t5eshkRsOob2rgK\nFwX1am5uTuvr63rw4IF+7ud+TpL0cz/3c7p//76mp6cvpY/xTnAwTU5O6ktf+pLu3r0rSfrhD3+o\nd955R0+fPtXKykplp6wYDsSz58wXcWGgvSya5+fnarVaKY/16OgoOTyIHPDaa1S3oU5hLKXk3ux4\nX59b3jbaS5+cnZ1dsuNiqmG+eLEL5iq2cXduxPZEZ81VZSDAFUXxNyT9h5I2yrL8/a8+W5H0dyW9\noYuNZX65fLW5c1EUf0HSn5Z0Jum/LMvynw7bmJy9rd+xHAeDQ89Hx2fiu10E247vvtRo9DbjYFIy\nudwzWJZlsnFMTEyknFHfZJlJkBswAMDs7GwKXuU53Xsa49J8VadIIYzFdzhi0DoTcICLYOdGaQ/l\ncM/gycmJNjc3dXZ2ptdeey3FjMXYLq7tEzSy8visBKKOjY0lW6n3I+c60ACI3W5Xu7u72t7e1ubm\nZnJ+bG5upgBubKkxQp8+29ra0vvvv6+33npLkvTgwQN94Qtf0Be/+EU9ePBA8/PzlUBtnFn0O3F0\nkvQH/sAf0N27d/WDH/xAGxsb6na7qa88pMQ9qBHM3GMdF4bJycm0kB4fHyeAI26OmmuuyTBm+Iz3\nHN+J9zFtiizY2+OsPXqsEerEMVa9OIAzx5z9OI7Vm4DcMAzuf5P0P0r63+2zX5X0L8qy/CtFUfzq\nq///fFEUX9LFLvdflnRf0j8viuLny7K8vPX4kOIdWcfgYoAj7m/fZg0jNVHjeDkdlCSl1SiWeZF6\nmxej2lJZxGPoeDkxeZlJXRRFxdDqIADQRu9RPAaVg/Yy6T0NKA6ECBRcl9AT+sJVjdPTU+3v72t3\nd1dTU1N67bXXEpPy95EDN78vz8MignoZ977wYGveixuyaZOXSYKRU5jg8ePH+vDDD/X06VNtb29X\nEs8920FSWqw45uXLl3r33Xf11ltv6Stf+Yq+9rWv6cGDB6lNGPEBDPpeugCgz3/+81pbW9Nbb72l\nn/zkJ3r06JEk6d69e2k3e5+sboaIMX/+/ui/8fFxLSwsJO+kPxMG+xgEDihHtdnfGWqzp8tR8cT3\nYYhZHNF5EkkJu645o4aR8n5x0tWpqDd1MgwEuLIsf60oijfCx39U0h969ffflPQvJf35V5//nbIs\njyS9XxTFO5K+Jen/GaYxObtE3XEObgAgA8YzGAAN4m2w2RCKESc+JY/wwhL3xnes5ACbxzx5e2mT\nX9cHYy7dxZ+tX3qUu/s9OBRAztnRJF1apQF2Nhd2m4h0AQCk8dy/f193796trLgR2LzNgLx7r58+\nfSpJevToUYrNYjFhj1lJKRc0F6jNhIB90xfr6+uSpDdexeW9++67evvtt/Xo0aOUjUB2Si7VS7pg\nZTDAR48e6e2339Yv/MIvSJK+8pWv6N69exV1KvZVo9HQ6uqq/uAf/INaXV3V97///fS89+7dSwws\nV5SUa9BfsXowQDQxMaHFxcX0TL4Rt7cnvnuYnGcbMCY8lQ6wJ07U2xSvy5xjHPlOXTDlnGcXMI7Z\nDH5df/acujqsXNcGd7fsbe78XNLdV3+/Luk37LjHrz77yCUCHLFvUs+GQ/Ai6oqkxN54OQwKH3QA\nXFTDUCEJP8jFj/nL8YnkaiXJxnW2sZxE1dd3bGIw4vRw8bANwC1OcACfmDgHpZ2dHZ2fn1eM8X4u\nKmaOOfJZu93Wj370I/3u7/6upIucz263m1QxD+fh/cHASX8DHMhRjaWFmCAY5XEk/PCHP9SPf/xj\nSRfZCGSfRDbiz3RycqLt7W19//vfTyzs7bff1re+9S196Utf0sLCwqW4Nmcyk5OT+upXv5r667d+\n67f07NkzHR0daXV1NYFkfA/kCjsbivtxEBTMtRmT/fa4cBufFzxg/MJiYy4x4BYX2xhOw9h0swJ9\nwXU4x+3lnB8danWmjdz7GiQ3djKUZVkWRXFl619RFN+W9O3M55f+95/4eVzNpZ6nxvfhBFywWfg1\nfYVpNHr1qzwlheugUnm1CY8Pom0RvLyKiL/QuJozUdyOlusfjndVyfM7ESYGq6rnOcIqeX4P5pSU\n4gWbzWbKVHBDcz97CM6STqejt956S7/+67+ewIIYRH9mn4QMfK9DxuKFN/nOnTu6c+eO1tbWtLi4\neKk67tzcnD73uc9VbKRvvfVWAlepuplQ7F8mJ7a9drut58+f68mTJ/rWt76l119//VKgK+1nnLz5\n5puSLjyW3/ve9/T222/r+PhY9+7dS3Y5zkVwDHiJ7+ihnJiYSDYtAoEPDw+TDS4K792LswL0LJqx\nErLUY5ExncodL95+jyfk2owrr+fn4EZea47B8f91nAupP6953ouiKF571YDXJG28+vyJpAd23Kde\nfXZJyrL8TlmW3yjL8hvXbMNIRjKSkfSV6zK4fyjpT0r6K69+/wP7/G8VRfHXdOFk+Jyk37zKhfvp\n29HJgHrqLE5SxXsqqWKDw+CJ7cwN1pIqOwF5dgO/8bCScO12pvgMzu5wmc/NzVWcGggrKe3x3D+e\nGbUWdiH1aqt52SdfybHfEJeUK2lE5QuPKOf78/PzVAZqbGzsUpQ+bXePmvfDkydP9N3vflcPHz6s\nlLn2UBh+eF94xImy90okZVmq0+no0aNH2tzc1OLiotbW1pLH0stVTUxMaG1tLdnRJicn9bu/+7t6\n+vSp9vf3UyK5q1w8j7936YLNvvfee9rd3dXz58/1S7/0S/rCF76Q1Opoh6TPJenOnTv6xV/8Rc3N\nzemtt97S48ePdf/+/YqGgXgqn6TkPOLZGfse60ZsIBWDo/MHht9utyveV8aUb8Tk8WqMW9rkDgeu\nH9VSxir/w+ByMXRRXXWp885+5F7Uoij+ti4cCmtFUTyW9N/oAtj+XlEUf1rSh5J++dXNf1gUxd+T\n9CNJp5L+82E9qDlD/aBj+TsCnOefYmNiwHgFCQcMhPgrBwj3PGEzcu9pLh+PzxkUlGXqlxZFcGvM\nv+Q5mTi+Jd3e3t4lz3BR9CqcAOIYe3FKSEpeMmw+XN9VaGyZ1BPLefc4L4L26empHj58qIcPH1Z2\nTQfcPFTA6415oVIvEc/zcC8WBd/ubn9/P9Vxazabmp6eTulJn//85xMYP336NAWwOiBEB4y/g+Pj\nY21ubup73/teKnf/ta99TZJSSpSHVrjzotls6pvf/Kamp6f127/923ry5Ik+9alPpT72xQWzCfek\nzL4vIIz12dnZFPjcarXSGPN3QML91tZWSqLHmTY7O6tms6n5+fm0KHj78bAfHh6mYGsWPh+b0RnA\n2GABcbMIaqqPA5cYNXETFXUYL+qv1Hz1h2uO/8uS/vK1W5S/ZsX4GD2N2Gm84iiTgiBSZy1czxPw\n+ZsJxff+cjx3lWBif9GxYoSHIHi11jqGR96lG3d5XvfKOpiwAjOQfWcjSWmjmlhFRbpYxQE3ZzL0\nM6DDhIvxfYBF/ExS8l4/f/5c+/v7lUBst5nCmD3Mh0nnG/xEryJ97IZsnonwlv39fS0tLSXv7Pz8\nvN58882K0duZyTAT6ezsTO12Wz/+8Y8rGzB/4xvf0PLycno3blfCcTE9Pa2vfe1rmpyc1G/8xm+k\nPNZPf/rTaTHmPJ6XQGwvnumgy3hlExtCmDiWMcievbQJR8Xq6qqazWZa1HlHaBJlWaaiEHzHHIrv\nnXnC/JF6hMLnEIw8FwHh9kbkJkB3qzIZovRjdQwEWACgJqmyOgAizuBwt/vE9hAT3xhaqkaXoz55\nxoJPFldnGZySkuro1T+Koqhs1IHxnWKRvuOWizMLovwpv43nkwGFSgyAElzLdWk/jNFXTuKnTk5O\n1Gq1dOfOnQo4u5Ea8UHearX04sWLpDo7Q/NgZ9grE8g3lOFd0Ae+6vsk8j1Ij46OUr2/drute/fu\nSbpgWQsLC/r0pz+dGK97H139doDieZGzs4tipx988EH6vNFo6Jvf/GYCUxeug5f6i1/8osqy1L/6\nV/9K0oVX+fXXX68AogMcaYHOkByoXE3Fy+99xXEOUmNjYykLh0UkF7bkVVucnUpVoMupkyw+gCUC\nSUDLimo1z8j/V/WcutwqgKtzB9d5UP3H3c3YE7ArwYykXlkin5iACudyvtswEL73ye1eJMCL+8T6\nWzGQ1qtkjI2NJRuSl2HKvWC3E1F3X7oIVmW15rqwKQak9xNqCAPRV18mRFmWevbsmV577bXkvYu2\nOPrUwfHx48fa2NhIoQ3YkpaXl3Xv3j1NTU1pd3dXOzs7KssylR6iyCiT0iXaecbHx5NXW7oA9J2d\nnbRb2ubmZoXtAqyvv/56Ch/y6P34TmN/e9rSwcFB8gx/97vfVbPZ1Je+9KXUFsTfH1kwX/ziFxOT\n/s3f/E1NT08nO6KHiaBF+ET3642PjycbGkzLQ4jQbMbGxiqaytnZWaUgqT8/7xfWD8j2Y1Y+R71i\nivepiweW+9yNz3gTcJNuGcDlpB/oSaqwOLfBMTGZ2G4DQJwJ+aRnRYHh+crlBQfdZuOCihFf9PCB\nVAAAIABJREFUHJOVl07ZdNoMo8nZ6PyZo3F3cnJSzWazEvTrRmpWclZNX4FJWHem6fcDSDY2NvTe\ne++lZPupqakEit4mAHB3d1e/8zu/o3a7rdnZWS0vL6e8zU996lMqikJPnjzR48ePdXh4qJWVlUqt\nsroS3Yg7Y1D/JCW1FrvWzs6Onj9/nvqYNszNzenOnTuVbfbc6RCZQ2RzvHcWzsePH+sHP/iBlpeX\n9cYbb1RYSexPWNZXv/pVSReL0nvvvadms1kpKY94dksUZ7OAPeaJubm5tNAz/nyTcl+MIQOeEsci\n0el0Ku+aPoj2Zn9Od+I5wMUx7Wqq1NvXItocryujckkjGclIPrFyqxhc3SqVCx1xR0Os3OpBulJ1\nsxFXo1A/Y4iC2958BWFlYlV0g7LUS5fxyG5nIb6KeVUMSZeS73MeS9rgqgRBlKhrMEt/XrICsM3A\nGtllHfY2Pj5+ie2iYh8eHqb9CaSLyhswLt7b5ORkuvav/dqv6fd+7/c0NTWllZUVPXjwIG39d3h4\nmIpFdrvdpL6SbrW+vp5UM/oxBlGjamPn4Rmw3929e1fn5+fa29tLlVe2tra0sLCQDPOzs7NaXV1N\nbcYh5Wqrv3t/VvoWVtPtdvXs2TN9+OGHWllZ0eLiYuW4aGY5OztLbPfrX/+6njx5os3NTT148KDC\naJxZxYwCbxv97zuBRbsac4U2sXM9WRVsI8jzYOogSD3XH/EZkTrbHO/N51KcY/53TkO6itwqgKuT\nODg89s3peQxDiKEXUr6yhdvgOLcudCC+NFc9GVA++aItAg/f3t5esrlJSuDmIJj7G9oPCBEygpGZ\nyUn78UhOT08nlY1YKFK03BvqYIH3jmcg5Uq6qNX/4MEDrayspGu/ePFCv/M7vyNJ+sEPfqCzszPd\nvXtX9+/f18rKStqe7+HDh6kybaPR0MLCgh48eJAS26enpytVi6MHPdo9fStE4gHJvtje3k733dvb\n08nJSWVjHK9yS7gJXk+XaGfyz2nHwcGBdnZ2tLOzo9nZ2Wz7/TzOfe211/SZz3xGb7/9doo7i8ei\nRkbzhTvb2Gwpblfo7XAbG2DOu6bSjqRUVCK3yOQ8qH4/jvFMBrd3QyK8DJSHAbn4nLiOynqrAM4H\nUO7vft5UZ3BeAim6qQnlcC9oZAgILzICnDM3B9YYMhHZ2/HxcfLsTUxMaHl5uWKQ9hQXqWdQ9zQy\nBrsHLnc6He3u7qYyQWNjY5e2OiSRfHt7O50bvaYAMqs1zhapl+TOd++8844+/PDDVIeu0+no5cuX\nKcau0Wjo/v37Wltb0+TkpB49epTSnphEhNq88cYb+vmf//nEaAC+fu872nQi4I2NjSVWSKUR7LGk\nch0cHCQvpHQRRlJXZqnO2O1jlHbBigGauvN4z9PT03rzzTf1/vvvp52o4uIWtYUIXgBGXb9JVcaJ\n44B7RFsZHnRf+GB3BMtHsInedI8oiCwtzu0YCxeB7bpyqwAuJzlgc5XQ2ZyvAhi9AYTcipy7fk4V\n8XNYLb0NvuLGVcYH1M7OjjqdTgI3Qjiky+VyfHV1x4jUY22S0ua9/Jyenlb2ch0bG1O73dbm5mba\nBs6dJlElPj4+Tp8B3jxTjEcj9goDdKPRSJ5QdiM7Pj7W06dP04YpUrW0z/r6uj7zmc9UNixmlY/M\n1d8Dkz6CNH0HSE5PT+vOnTuSlCo5n56epuolzWZT7XZbktIGQ6iquVxVhL7j/XlMH1723FiK2RI8\n29rampaWltTtdlNcI+f6cX5/fsdjclqKVN19HtaL8ykWPHACgNbB9bxtzC8PVPb3NjY2VgG4HEBz\njEvOyVBnwuontwrghm28gwwsKqoB7imM3j7/Pqoe0e6WGzyx9lVuJWOCMGl3d3d1dHSk6enpxNxi\n3JG/VAe4w8PDCsVnRy6pt7kv1Tnm5ubUbDbT825ubqZy2rkQiPg3MXMuhKKUZVlJeqdUkYdu+AQm\nBIS2xlCc+fn5VIJoa2sr9alXW4m2HSZLDOlw1c3Vo7Ozs8SSKbpwfHyslZUVnZ9XN/ZeXl5OG924\nWaOfeOzeysqK7t69q9XV1bTpkFTdxAaJ8ZOzs7NaXFxM2RWIBw3nNJmc6hzVVzePuKkGrysVWOIG\n6fQj5gLAEZbu2g1t5R35935cJBYRyPx3Dg9+JlXUfqwpqgB1n3tIBswjp6LWSQ6k+HEVjoF/dnZW\nifpGGEQYZlHpsAvBEqIBNa5sZdnLRwQgiqJIsXOwDhwFRVEkA/PBwYGePbuoZtVqtdI2iFF98RUY\nO4v3BUwG28zc3FzqRyYDYAX780rD/M1kj3aYtbU1TU9Pa2NjQ2VZJqAhzCFnYK5Td6ITgufAeC4p\nbX9379691CaCjiVpbW0tHe+VVnLCwkqbX3/9dX3+85/Xm2++qZWVlUsql7c1jjXaT24z1aZ5Xo/r\n7GejwjbrNjFYLhVHeB5Ka5HpEavc0P+oolSrkaqml6jaIhGwcuN7kF0zPt915FYAnFQfzOt/x45w\n+1l0MsCCmMAxhid2bs7mQed6ICjZAMSUFUUvdYbVDpZQlmWi/MQ4eQmmSOX9flGd4Dg2nSGnEHCj\nLv/Ozo6ePHmSAJAsDbyGExMTaZCzuuOgiFkdDhDsdI69imBcT4R3oWYe13HwK8tSr7/+uh48eJDa\nW5allpeX0/tz1dglsqA4cXgfORb94sULdbtdfe5zn0ubNnuGBYZ6xtOLFy9SP7oahr1raWlJn/3s\nZyVJ3/zmN/WVr3wlqeiRnbvJxNvPfbGreooUz4MtLGdK4XmxLbuTyIOE3awhXYzHlZUVLSwsVGyt\nPtYRnHWMGz8mB3D0vbN5b7uz79z7473XkZ2ryCgObiQjGcknVm4Ng0OGQem4MsDiPNTDjdCeSO5q\nkq+KuXSd2CbYzvb2dioO6Q4GWAqVGqhGK6lSfinG4vEZKy4rsjM4nuHw8FCtVuvShsSzs7Pa2NjQ\nkydPKgbhqakpzf3/7b1pbKVpdt/3f0gWt8utSBZZ+zIz3TPTUyN1rIwhQc5ASILEFmwrDoJE+hI7\nNjIRYDgJkCCRIgMSHPiDk8j5IsDBGBJkGc7IAWQ7QmAgkYIAMjCSpntG7emenq6uvWshi/vlvr/5\ncPl77v89fC+L7K5RsWvuAQiS977Ls5znPOf8z/LUahobG8tgOyYvJZCePHmie/fulbAV+u0aJcn6\nzAFhCXgiwXGkpgnrWCjaX61W04ULF7S6uqqHDx9m7yoahldriWadO5Ac6HcYwU1vL2ZKmAipcKur\nq6V57upqHLCNtjsyMpJDTPCMdnQ0ihuMjo7q+vXrunnzpqTGMYqDg4MljbwKn3IowJ1ii4uLuVBC\nq9TAiF/xPMcLca5IKoX4MB68c2hoKB8azrrx+Erm3HOYq9Idfa1wr5vXMeSF670ohfNcpFfGRJWe\nL9wcZ3EG9/+lZkiDA6sOgHowbxQ0EePx2CNCMoqiESdFDqh7l1hA1FCLpwahvleZYC7cqB8nNb2m\nnCa1vr6eTd+xsTH19vbqyZMnmp6ezsIVxurt7dX58+d15coVjYyMlIKLJycncxvwsEbMKc5J9Mht\nbm5mQccZnFw3ODhYqm+GqUjbFhcX9fDhQ+3s7OTUL6l5zgQ4lwvKCGz7mNIun2s/oGV3d1fnzp3L\nAcdgqdwLX4CVIoSZe8YTZ8Lk5GSOocMT6MC+t8/HTmqGYUgNnPT27dsZdgB3431sfFWOBs6O9Txi\nvnMPv2NvkkrwAhuCjwXPdZ6L5zi4eerC0Z9X5diKm1M03VvRxxF0p0rAOUUvS5XwSymV8BquYfCY\nBHYv7vHBjTsIAqYKM0GwUc++s7OzpJn57j46OnqodLiXhY7gK98jODgcRyoXtdze3i6FPpDUPj8/\nn4H77u7ukoY3OTmZ8a21tbXSRkC2Age8MAZSU6v0OYjhC+A7tJusALy7aGEA1VJDk7pw4YJef/11\n3bt3T7Ozs4e0ja2trRy+IDU9sAghx1WrnBFou/V6PWcy9PX16Y033tCFCxcy8O64lPNPX19fLqbp\nY9HT05M1cw6+ps0xi6UqABa8K6WUA67fffdd3bt3L2cWOH7pZ2REh4rUPESps7Mz189DiwaXduHj\n48hcEwqClu48h/ZGlRb6ijBkQ2G98H1cN1UCLoZc8Z2PWRVOfhI6NQIualStvpd0aCdzxoQiWOsU\ntSbXCth13JTwtqEVSs3imL4wSZepKoZJm/ywmggW7+3taX19PSfNS+Uj3Gq1ms6fP5+fe//+fa2s\nrOTk8YsXL2azU1I+8g+TzM/rZKERhnLu3Dmtra1lUymWD0Kj8rFz4e4aQEopZwkQusI41et1DQwM\n5EOXCdfw82XxFnMegJtdceOIfLK3t5fjA8mYkBonbv3Ij/yIenp6ctkoYsEYDy+k4P1hIVM4kpJW\naHBDQ0PZ9HVnl48d/FOv1zUzM6Pbt29Lkh4+fKilpaUcOkO16Dj2LuDgDeILBwcHcxqav9chBj9h\n3tcHFgPamtQ8QBsT3zcR/nfz1M1N52tfq3HOgGqiyR2jJXyNnFTInRoBdxRVaXARe/FBYcdws9W/\ni3FsPnBefSQKONf8wLa8PPj+/n7Juxp3Md9JI7ZEuzjn03Mo19fXtbu7q6GhIV28eFGbm5t68OCB\npAaDDw4O6vLly7p27Zr6+vq0t7eXhdTS0pKKosgHpPT19WVB0tHRkd/T09Ojq1evamtrSx9++KGk\nRu6m99/HmZAG78vw8LBef/11SY1cVT9xvaurqxTX19nZOJBmYmKiVPTS38HC84XjQakejOrzt76+\nrqWlJS0tLWllZSVvNjdu3NDExETW3MAN2bAc7/KDeCRlnvBwozNnzmSMDhzS835jhgv46fz8vObm\n5kqn03seqW+AkX/xuC4uLkpSDt8ZHh7O8YORb12AMU6xGCzxf8wRc4uWRWktqXm8oQvJqFlyb9U6\n4Hofn3iPUww5OQmdagF3lGl61HXuYq6KSPdBdoxNUnaHAyj74LO7U/SSooxcE8u8VAGwrTRVAnjR\nOur1esZ/9vf3NTo6qomJCa2urur+/fvZlBgZGdHY2JjOnTuXS+142hpmNUw6NjaW67LhMNna2tKF\nCxey8GThLS0tZQ0nOmNgUMayVqvp5s2b+spXviJJGXDHnFxeXi6lgHEQ8sWLF3Noho81QoiF5+ll\nBG57bJhnjJC6trq6moOfpaZThXaQp+uByBSWJPYPgRbDIRCCCISVlZU8RmiZzoMIXky93d3d/D3a\nH46ZVhs65ib8wbvQkmMeNXy4s7OTS3M5rovWxTi7hud5rZ7NwL3MUSw/xm8P9o1CzotS0M5WQo3v\nvT8noXaYSJva1KZXlk61BheD/aL2Ez1VVd61qihrf7ZrX/xmZ44eUExTDrWJO7UTWIJrj+494hra\nBkBPtQ8P9Th37pwmJye1uLio27dva3t7O5cWunz5cg5ajVoN73APmpe+5pSlvr4+Xbp0KZudZBL0\n9vaWTlKKWIl7gi9duqQvfelLWVsiNQxNDPOd5+zv72eTih09FidAW9va2sraw8rKSm5T1JBoF33F\nA+meTiquDAwM5He4x7KrqytrbjG1ycchEuPgHmPa5Ke7AxdQaEBSPuyllTcRvgF6mJ+fz95Q0v48\n/9XB/p2dHdXrdS0vL5e872j6tBveBiYAggHTc2cMGi/YnntwnapCZpyHYk63j2X0RLca9+fRcU7V\n+g1Jf1HSTFEUNw8++58l/SVJ25LuSvrPiqJYSildl/R9SbcObv+joih+/sStkg4JtmiC+uBJh2N4\n3FEQJz0+A9Vfapq0VIPwe4n9cqDWvUcsXD7zCYp4hDO/1EgjWlxc1Pz8fPZ04ik9f/68Zmdn9eGH\nH2pzc1MjIyO6cOGCJOnixYva29vT1NSU+vr6NDY2VjIbMBGLolnnHwExPz+vzc3NLCSp6U+oR61W\ny+WHeFbEOrn+M5/5jEZGRvJCYEFzapWfMcHYx1jE6C10TM4xqbW1NaWU1NvbW6oewr2EYFDKm+yC\nlJLm5uZK53N42Ex3d3d+Xqxg6yA64+BjgcnJeMfsGcxfT7uKYU8OZ0RHwfb2tlZWVlSv17NZKjXT\n4OLmQ5uoMkPVY/BXHCIITo8ckJpnjyAEu7q68rjB234QUcxWcIrOB+bdN+OovPjvyB8noeNocL8p\n6dck/ZZ99nuSfrEoit2U0t+T9IuS/vuD7+4WRfHmiVui52NuUej5DhA7j6CKXpqoVblw4znuAvd3\nOEPijIjeVnbN2B8XeOx84DJSQ9jMzMzkUkqc2i4pe9xWVlbU19enkZGRvGhd2K6srGhiYkIdHR15\nh0cgs2Pv7+9nDxxH7I2Pj+cF58HLMKA7Zfw3TovPf/7z+uxnPyupWcKbmDrmqaOjmcSPcIqxjD4H\nvrO79tfT05Pj5oaGhvLCj2euckoXYR08t16v52R67wPtwpMYj8ZzHmOcYgI9c4+A8txN70/UdOAb\n59WIO6EV0h/aHOMpPSTKz7ugWgmblztBPHLAn+NCKzrm3OHC+1lXR5WJcifHUQ4F/q4ai5PQcY4N\n/IMDzcw/+3/s3z+S9B+d6K0VFCeWz6LG4ORhIDF/z6/xz6LX0ndKqXzMGRMfdxMXcDgj/NksbA+p\ncHOZe1dXVzOgPzs7q6WlJXV3d+v8+fMaGhrKCfN3797NQaAppRzpz3O7urp09uzZDF7jyZNUCl1g\nF0bAdXd36+LFi9lRwgla7kkbGBgo5Tf6fAwPD+tLX/qSvvzlL6u/vz8fpky7WMxumvJezMcqDc6v\nZRx9YWDOEqLDaWSRbwjboc0knBOKwzgyTxsbGzngOTqm/Nm0KaXmqWge44i5GMNEPGQoaiZuKsY1\nwJzhnCGnOZJrelLTgz4xMaHh4eGs0cYxqipEwQbv3mS3OnyN0Bc3M31jdMHEmvNNguvj749jkkZ6\nERjcX5f0T+3/GymldyTVJf3toij+1XEe0qozcbIhV6cRHo6XuMpftWu6lzROgOMvnvXAZOAlAh+K\nk+QhIVEA47UiEHZ2dlaSsuZ2/vx5DQwM6OnTp3r48GFuK4eosEijMBgdHdXS0pK2t7ezUJOUBR19\n3d9vVtfgXEwS8BG6hD709/fr6tWrWl5e1uPHj0t4XFdXlz73uc/py1/+svr6+lSv10sxZa7BML60\nieBo195ca6sKN4ixUcRoxUBR5iulZtK9Y3Ye2wfuhGY5OztbqiTsbXJIA2HlFXARGB5qAlVtvE60\nl366tk/2jNSssRf77GZsvV7P13d1dWloaEgjIyOHtCruiRCL85ULQPhDambsRHzZyRUDx8dbzW0M\nFalqz1Fj2Io+kYBLKf2SGifY/5ODj6YkXS2KYj6l9GOS/kVK6UtFUSxX3Ps1SV97zvMPTYB/54vD\nhUkMTPRzNWFONxd9N8V8RGB64KYvSM/T8zJNkLvJpeZp4AiRer2u+fn5DJ739PTo3LlzqtVqmp6e\n1rNnz0rhDVTzwF0Ps/X09Gh3dzfnTrKowbucsak2wU7MEYVFUeTg4mfPnuUFf+3aNV2/fj1jOKSC\n8SzyNj2cJGYcsIg8dMUZt5UGx3eOpzI/HR0dWVMZHR3VyMhIjtnD9GRxOmTgmxmY2c7OTj76b3Fx\nsZRe5/wGP/AMTpN3zRJnQdRaomUCRcvA8WQvaCo1TwvzMfUxY9NcX1/P7R8eHs7pcvQf8o2X73z8\nfTP3FC3ehZbL3PqzHVut6rdrrKxvx/7cCogb+Qs3UVtRSumvqeF8+HeKg7cWRbElaevg72+nlO5K\nel3S2/H+oii+LunrktTR0VG4IDt4/nO1OgbANSapib8x0KjDUhPT4SeW+nEvKsGpLuDYPQGj/X43\nd7jPgyPBdyhbvrm5mTGR0dFRdXd368mTJ6rX6xofH884W1dXl5aWlvT48WMtLCyUjhucmJjI/cSc\n3N9vHv7b399f0pgIspWUg0M5iwATjTShS5cuZfP1+vXrpSyH/f19LSwsZM3MA6N9LGlL1UbhjoYq\nik4axoJMBwob+AEvc3Nz2tjYOKR98TxwSObko48+0q1bDZ/Y0NCQzp49W5kBQ192d3dz+pKPMwLC\nnVtVVAXFQGhGHl/G+MaUK+8TbSLTBZ7yrIY4pmwenu4WtSh3mLkpS105FIGjoKSonfHMOAYR73RN\n9uM4F/JzPs5NKaU/L+m/k/SXi6JYt8/PpZQ6D/7+jKTXJN372K1rU5va1KZPQMcJE/mGpJ+SNJ5S\neizpl9XwmvZI+r0DKUw4yFcl/Z2U0o6kfUk/XxTFwnEachwMrorcJe5nGLAjgQU5HsROiJeU63ge\n5if3eqltJ3A8bwMmoX8mKRfCJB8UAJ+Ys46ODk1NTWlxcVGTk5NZe4KGhoZUFEU+qPjx48eSlI+o\no4/eXu6TmiZHURT5uQMDA7mvbsKTmI9GKDU0xYcPH+YI+r29Pd2/f18TExO6evVqKXOCfrNT06YI\nJDM2UcuGuD+GY8zNzWl5eVkjIyOq1WqlXX5gYCDH3jkG6/zCc5eWlvTBBx/kQ2nOnj2b5zPyHRoU\nmNjGxkY+YFwqV/3gZKxYRNKB+IjP4niK2JYf64fm61oRn2Eie4Xirq6ukgnqGiDvojArffHn4ojz\nOeVeTmRjntwactPVNXWudbOUtefafYSkqpxPx6XjeFF/ruLjX29x7e9I+p0TtaB8/3OBWCc3Sd3L\n4/FpcfFE1R71P37uTggPuYDBHMtxBnC3PY4IqZlPyv+Dg4MaGRnJfZientb8/LzOnTunS5culWqL\npdSII5ucnNTMzIyePXuWF+WDBw9yzidnAeBhlJpCCtzQwyK6urpKp0jt7u7mSq9Sc/EQDDs8PJxP\nid/b29P8/Ly+9a1vZSdHR0dHyanB+EbhVwUtVIHMMH9RFDn38s6dO7p165bm5+f15ptvanJyUktL\nSxnsJ4TCD8+pov39/Rx7CN8wP1UBqHzmqWO+mRCPhunmnmfnGQ9IdlMZaCF6JBkDBBAbsuN1EIfG\nOLbX2dlZ8ogyTh4Ok1LKgceY3P4ueJkQICqYINRa4YvPg5lcOEehFk1Wv+8kdGozGXyS/Ld0uBqI\n4yNSs85XKwyO+zxsI+IBCDgi2/3eeNqUC1railfL8/aI+O7r68taFF7UhYUF9fb2amxsrBRlz3vP\nnDmThc/i4mJ+7tramjo7OzU3N6d6va4LFy5obGwsx34NDw9reXk5CyoYSmpqvR0dHRmgHh0dzfdK\nyosWzY52syC3trY0Pz+v7e3tXEQROnPmjPr7+0thHnHsmU//P5bAWl5e1p07dyRJ3/3ud/X48WOt\nra3ps5/9bM62cLAdzQHBHDEcFnq9Xs/l5aVG3BgJ794G5hQtCO+ph6DwPZ8hCOmvC213gPC9pCzE\nXPsjjtG9zk5s7FgbDsSz+eLYWltbK9XGcw3bcWtvM+vKK434+bu+nqAq/NPbTYmmKkcEf7vTI2Lu\nJ6FTJ+CqBJpU3vF8t3eQ1EvbMMgwB1pYd3d3js5n8uKOyXdoYNHUYeeOQZYeMOlmB8QpVP39/drb\n29Pc3FwOyejq6tLY2FjpdHsPnETIEc/kB7pcu3YtBwoTC8cC8eMDeZ571QicxWM8MDBQCvXgO+rc\nIcAoGXThwgWdOXNGi4uLOnv2bNZo9vb2cjwewjomZrsJ6lq0m6XLy8t6+PBhrp6ysLCg/f1G4K+H\naPBsFxAswmjOra6u6sGDBxkqYI4WFhY0MDCQN6Eq85hinFQorjpQnI0xLn4PV4rmIDzI59FkY3G7\nF19qVnuWysHVvNfP4nUogtO0iI1rJUCZO3cwcX4H81wl5KoEFGPIRuv9jPCEa4CfxMlw6gQcVOVV\nhaL6iqDyQzFcK3OsxE1VQgiip4f7eAYDTAqV76g+kS7cHIeQmtoMwm1+fl7Pnj3L9w4PD2toaKh0\nKrmXYKfNXV1dGhkZKTHbwMCAxsbGNDQ0pIWFBU1PT+f+XL16NeNS7JxuhqBlEoPnC4+8VUwXNFCI\no/LwOI+MjJSEI+ZyLD0UPWlVcYRSI/D22bNnmp6ezsGrjKUkTU1NaWpqSpOTk3nuBwcHM87p7+e5\nbBaPHz/W+vp6aWGtra3lPE9gARbyzs5O3kQQrB6O5ALI55v+ukZKXyMm6fiab/S+FrA4GA/i9qos\nHD/djTkCnkDbJqOBtrkGR2gM2i74K4HVrczFiJlVeVj57evJx8LvP+r/59GpEnDPa7wzh5NPhqTS\nQcRob14GprOzM+8kYBSu1fgO44AvVWadEXt6eg5hSxDltqVmvbC9vT0tLy9rbm4u55VKTeyI9nig\nLCYe/ZyYmMiOgKmpKT1+/Fhf+MIXdOnSJY2NjeWsCKkRP0URTMIkEBA7Ozu5MCTa4dbWViln1Oux\nxf7R9tXVVV27dk2Dg4N5joi2RwhUmWRQ3KFTahwq/eTJE01NTWWBwrUIjKWlJf3hH/6hvvKVr+Tc\nzNXVVb377rt6/Pix3njjDV25ciXzC5ra2NiYxsbGNDMzU6rnR+5mUTRSuvzs2o2NjVxfjvAQDwJ3\nMwqB4aYnDoiI4XKv/1+lsbBxUmmXODl4zDVGhzYQarzb14FrSVEQ0f6trS3V63UtLCxknvIURe9n\nJMda4+cRzvG1H9tR5Ww4LrXLJbWpTW16ZelUaXDS4ejuVp4Yl+ZRM/BE++gJde+Q43qO7/l3fo3U\nBMCpPOEBrm6u4tFy7G9/fz9rb+vr66WqEGfPns2aExVevS6/O06Gh4dzuaStrS3du3dP586d09mz\nZzU4OKiBgYHsZSWkgUqxrs3iQUUzlVTypJHdAGCPt1VSLq1Nqe2JiYmSRoPWWYWrMWcxDMQdBVNT\nU7p792722rqZ6bjphx9+qLm5uez53dra0tOnT3Mq3Oc//3ldv35dUkPjTCmpv79fP/qjP6rt7e1c\n8pv+UuYIbcXbhuMJ/Ioik/Cc8xu8wu9oerrWUsXjznOE8vAjKfNGrVbLjg13CkjK+CdaA6LnAAAg\nAElEQVRaVixQWYWROWFtzM7Oan5+PjsZonnp1/M89whH09dxO+iosYjm+kno1Am4VuSdix4bD+2Q\nmsnAgL2k0EjNmmhegjrmJ7oJEfGEra0t9fb2Zm9drDaCyYAgdPMILINKs1QFlpQTx2Hmomie9O4Z\nEZhnxLddv35d3/nOd/TgwQP19vbmU704OWplZSUzNcn0Tt3d3RmL2dvbU19fX8kL52Yzea6S8rkQ\nCwsLeu211zQ0NFTyqsHULiB8LIEFXECwgDgMenZ2VouLi6XsC/feMadzc3N6+vRpfi/vfPr0qRYX\nF3PRgi9+8Ys59/by5csaHR3VvXv3shB9+vSplpeXM+blHnKv4syhM4wb/YMHIo9GimOBMHAnlYeu\nOH7Z09NTOiDI815jG6RytREKE/BccLmIQft8UH9ucXExz48LqFawgztOmBeuYzMAp45e1KPi3j61\nAu4ot3K8huvAdhgo92a6dgBzSk0MDoo7ir8/anfsgqRJuddLKu9o7sSQVHLVw6wcXMK9MERK5XLY\nYEF87x7H8fHxHITb3d2tyclJbW1tZUEEFlYURU4Ej+Wi3JPm3xPXRlhER0eHJicnJTUqVdy7d0+7\nu7tZaOzv72eNFSZ2bdk3AvrhC5q83NnZ2exhJiTGnS84W8Ceuru788L1UlX7+43SUB988IGkhkf2\nxo0bqtVqGh8fzxgdAvDDDz/U+++/r8XFxdx2j3MbGhrS8PBw9j4S3Et/6Qc8yTgytrQ9CjHnQxdy\nPlZ9fX0lqyDGjXkbfG6rhB38CJYGbzjmDOa4sLCghYUF1ev1yqojbDa+/nxjcMyR67nW/z5KcB2F\nTT6PTo2Akw6rp1Gg+QKJ4KYXGPSQERjEsxEw1XzR+b0ILpjVAzIhrvFAVgQJjOyhER4gCthPzXue\nR1u7u7uz90tSzlTwwEsmu6urS5cuXcpaT1EUOVaO/o6Pj+ddnwooPqZ4bBG2fsAvUeuYZtCjR4/0\n7NkzjY+Pa2hoKLfLxxPGdy2IeXXzH2cN5npXV1cWIvfv39f09HTpXFI0DjeJfSG7l5Y8Wal5iM7A\nwIAePnyo1dVV/eRP/qS+8IUvSFIep29/+9v5GEbGhLMvyN/1M0WZP9e2PHzDQ1c89rIKZmEu3Pvu\nRTKrTLjIa1FrgmKcG5somrsLuJWVFS0uLmpmZkYLCwulcCcn19A9HAZyLc7nJranSqHp6OgoCdWT\nelClUybgnFpNpFQepBh3JjXjgQiL8NO7MVfdTHW1GNMWYePaDkzh3kjXtHgXuxZeKKkZ/V1VJFMq\nx6YhSNxMQRPr6ek5ZA7UajXduHEjB666ebu1taWlpaVczfXMmTOlwpNu/nZ2lquNkJbkC53A5Hv3\n7ml1dVVXr17N2Qu+4NHgmDfHJyNGivCm3+Pj47py5Yo+97nP6Y033tD3vvc9vf/++5IamGIsz12F\n2fgm4NVBENbb29t66623tLOzo69+9auSGmb3V77yFRVFobffflurq6t5cfb19ZWO9Ivv9Vi0GDLi\nvMx9nroW2x7j4GK2gAsT3g3futYWPbxsqjwXLY4wEhcoHG04Pz+fq9g4tTJN/X0oAVVhIGiw8EHU\n0qq8q68EBhcnhb+jWz2Cls7QvjMxyAgQQPSNjY2spfnkgUv4oqctPLMoilwmyfE9B5NZxAgThG4E\n1r24JEIR08vLf2OioIW5hio1hML58+f17NkzraysZCfExMREThMjQNZTp1xYuwklNXNrCf6dnp7O\nggYnBoI3asIOHbh5wu+IVUYh1dHRkWO1Jicnc0jKN7/5zZwnHCEB7xMU8Ryva1av1/XHf/zH+f8f\n//Ef1/nz53Xz5k2trq7q/fffz/PDxpZSyovWsyTgOYS5QyGtBF2VgHOtOn7uf/s4S+UKuVXP5Z1u\nMbDJuwMD+GV2dlazs7NaWFgo5Z1CVevD38vcuNbOb9rvTsGq0KHnhRMdh06VgDtKBW3VuYjDSc0d\nzTU4L7bY19entbU19fb2HnIyIJg8XswHGu1rZ2enVBVWUmnX3d/fz0JNai4QX4i7u82S5aurqyWP\nq59hwHMRzpRekhrxc7RneHhY169f16NHj/LBz+fOncs13xDeMB3aXixBxILv6upSrVbTmTNnND09\nrXfeeSc/17FNgkWPiodyOADT3AWjM7RjUXt7exoaGtJP/MRP5HH67ne/e8hB4bF63o4oLJgPzL7V\n1VW99dZbkhqZDF/84hd19uzZkpdYauB3zAkmtWvi9CUmuNM+Hwva5X/HTSBqbFW/q/oXv696L23z\neL2trS2tr6/nCtNgoPBnfFbVWo2RCPBaVcob/B8xx6oNKuLzJ6F2HFyb2tSmV5ZOlQZXRUdpdW7y\noDFJ5fIzaCeuDZEjube3lyvieukeilX6j9TEkngfKn10FIBlxNAVN6XjjkpoC+3Z2trKie3shB0d\nHTn/Ea8hB6uwA4+Ojury5cvZM0hlYDyxng5GnBTaB3Fxfp5DSkmPHj3SW2+9pXv37uX24snzw4oZ\nP8idDO7Jds+qU1V4AONGnNvNmzdz+XQghlb30p6IF6HZ0xbG8v3339fU1JRGRkbU1dWVi5LSDvAq\nig5EZ4Br3TEcJ3oZvZ1VGlqVtubmaxWUUzUOUQPie3cOkCFRr9czxvrs2bNcAr9qjCFPnIccPohh\nKA43sUa8XUeF17Rqw1F0agRcnLAq4DgSJl+Mg3MPEUIJxsO9T8wYga4ezoGQi4GxfO4msAPRXo8/\nespc9Ybpenp6SgGb7hiB6aRmzFitVtPo6Kg6OzvzwgOPwtxeXV1VX1+fzp8/n7+nPZjMMBvvTill\nT5oHr66urur27dv6kz/5E01NTWUTTGrgc6R3ueCK84dwI3md/rvZhpnkC4nxwjvJ/HDa2NOnTw+Z\nNs4TEftx8vxT7xMFEBYWFrJwdnx2eXk5J+TDQ4zh4OBgrsJMFWS/1yEUb6/31z/zoFmnKKAgx/uq\nhL2PFZ/DY0tLS5qbm9P09HTGVsHeqoRbFNA838fcN+UoAF248X+cxxdFp0bAtepYK3s/7nYeGoLg\n8qBQBh+wHhAYQedgMdU4wAd8t3eBigcWQbS2tpadDlWLmM/ojx9p19/fn3MMqdHlfSQGCi3BtdWi\nKLIWt7e3Vzo4xQN3ESTusWVH5f7FxcV8RsGtW7f00UcfZeCZ8ZEaDD0wMKDx8fFSAHHUDDzOzu91\nbYQxrQKdi6IoeWcJTJaaoUGe9O79ig4mtArnNd/cWPS+6XmbeYdXd8G7PDo6qosXL2p3d1cTExOl\numw+Lq49ejvAKKNDjeuqnkWbn6fVEaQdiwOsr6+rXq9n4YbWJilXTDlKo/JNJTqKXMC5w4XfbFqs\noyjgXpSgOzUCDqryOPnnkRy49/AGJhRg3TMZ+vr61N/fn7WgWq2W72XBsHg8ep/JwRvJ9wCxXlG1\nykUPw8AMnZ2d2QzlHheMHqJAeSC0OU89SynlNuK55RkIYRjO4/ZY8JxN8P3vf193797NuzjBvQjG\naFqPjY1pfHy8ZAq5xoDXjvurQh24x+eRfjAG7hmMXsKq4Fb6i/nMWJFYz6bg1Tak5lkDfIdJyjh6\nrCWE8KQI6dzcnC5fvqzz58/nuD5PzD9KW3FB5tpulRCDeCY/8XsCzDG3vWYihRYWFxc1Ozurer1e\nqhcXLQ53gLgmGbMWEGzwnKf/VXnUYwyr9yPyxUnp1Ai4qsnz76o66Saqmzi+s2MmukBAc0IQ9PX1\nlTA4aqrFkBEEpQshNCapGczrpmrceYmt48exmlqtlr1znseKyb2ysqKNjY0S1iM108/YLf07MCg3\nFVxQz83N6b333tN3vvMdPXr0KJueUvlQaZiOeThz5owmJydzihbX+zwR68UceQwUc+fajAsuN3f8\n86oSRb4IWGiY//39/bnyyuDgYNY2MdcJvvb5Q3txExWeqMrllBpxjqSF3b17V5OTkzldbmJiQmNj\nY9l0RSv1vrvw8xi7Km+qUxw3h2rW1tby6W3Ly8t5g2ROt7e3s5CLArDqXa1gI+aIdmBtsNYIjIZ8\ns/R16t9Fk75Vm55Hp0rA+W/psGoeBzgG/EIeUY7pyr3E//T396tWq2VG9/MPpGYqC6C/pJLKjRBy\nwUoNMkwYZ1Tei4ZArh/mLU4AhGNHR8ehU+JxAAwODub+rq2tlRwZzlSSMnOxA8PYUrO00De/+U1N\nTU0dEo4+vi6opIY2dOHChWwuI8wdO3I8UirHyLnmEc2gGKjqixjTkfZULQTGkOyI8fHxPMY4VlzA\nMQfr6+taX1/PuJ+fP+tln3yB0kY2q83NTS0tLenp06e6e/eupEYhhfHx8Xy6fK1Wy0UR4CuPsWuF\no8Wx4394nTM/qNu2tLSker2ulZWVHDvpmtL+/n4+IQwBfhQWFrXKOGfucEHI8eOBvo5h8+PrOMb4\nfRINrh0m0qY2temVpeOcqvUbapx/OlMUxc2Dz35F0n8uafbgsv+hKIp/efDdL0r6G5L2JP2XRVH8\n3x+3cVU7uJOHbLgmwG7l3lQ0mlqtptXV1ZzvuLm5WTIl2Z3wguFVlZqpLmhmYDhoRGgDYD9cC3mC\nOe2kMisaJE6K5eXlrMEBWHd0dGSAnVLny8vL2fmA5hLPZ0Wzc88sNDk5qZs3b6qrq0vz8/MlbZff\njsN55sSFCxfyOGG6s9vGvNmoadGfqij2ONeuCftp8mjPrlE4uI2Z6nPPGRGO0eJEWV1dLWl4Kysr\nJc3Dtbjo+XQTMz53bm5ODx8+VH9/vwYHBzU8PKyzZ8/mbBOqkuBAoZKy86P/dq/y5uZmPoCcw6jd\n6cVaYP5cQ3MNKmKLzHWVRhe9qMylB6rD//ClRyK4heXhU5F8fGMbjkvHMVF/U9KvSfqt8Pn/WhTF\n/+IfpJTekPSzkr4k6aKk308pvV4UxZ6OSXHw/LN4XcwwcI+XT5ybWp4NwALwRY1T4cyZM7nUkHv3\npKapyoEnDkT7qd+xH5igEBgI9yLIEErci0NjeXk5mxPLy8uSGotjaGgon3yOUKVNlFkn6dzTbiir\nNDk5qbNnz+rtt9/W3NxcNrmj4Hcs8+rVqxobGys5D3wBeSgG5IvIhZ87G3iOCy1f8EtLS7ncVBUA\nDRYUk+F5DvABZuHe3l4JnyVXd2Fh4ZBzA6oynaLwxrlDv4mZXFxczJsQJqqXX+LQG4+tjI4vTzfD\nE8lnMfwC09FNevjNscAYl+j/Ow84+QboJip8iDMvVjUhZhU+r8JVXbh9EiF3nGMD/yCldP2Yz/sZ\nSb9dNE64v59SuiPpz0r6w+fd2MpLxHdVdj8UY3wQOuBoriVwOhWYA/Fnri15UUEHXlkQeOoIFo5O\nBt7tMXRS83Qs2ri7u5t3W+rMoeFRmURqMPHi4qIePnwoqVFdZGJiQpJySILUrLRBkUupsbjA9EjI\nZiPgRPtarabXXntNRVHo1q1bOR0L7dY1ZXJCr127lkspufbijNrKA+bzwfOjMI34Hwt6cXExx/Yx\nH65ZstDYKKjDxxiz2D0Y1QsxcCCRpxHRpip+jBpc1WJ0jyEaHniZ1BDaOERqtVresOBXnuUaT5XG\n5Rsw/Ob9dCUAvvZQGp+/VuvQ/3bHlTsSEOBoz15YIEYouEMQ3nBqJVSPS5/EyfC3Ukr/qaS3Jf03\nRVEsSrok6Y/smscHn52YqoTdUWqsMxeM4AuAASR2qVar5Xuqkom9yogHAcfyNajikrKZwA7mIRlS\n09EgNZkdYbqyslIqBeQa6sLCgh49eqSOjg5duXKlJNToG2EmOC4QCGgt7rVkXDEtOQLvxo0bpd32\no48+yk4RdnvqwU1MTJQ0gqpAVg90Pg7FBUR7Ozs7s8bKb8xb10KkZtgMQoRjFaGVlZUcLoPnmXHG\n1OPQGueLVoLOPcORZ92Ei4KfNkrNCiRszJubm7mgKZknwCNAAc6vLvAcznGLwYWbpJytE5/l8Xru\nRY/z5FqbFx+QmuFShGThlPO2ulbqgjV6cX3sPg59XAH3DyT9j5KKg9+/Kumvn+QBKaWvSfrawd9H\nusWrtDuYxAffPURunkY3NDsM6jGamNQ0ndDKEA7cy4QwyTyLaxEw7lGVmiETzkCYLlJjF8dEJQWJ\noMvp6Wnt7u7qxo0bun79ekmj6ejoyGWrOfcSDZA28b79/f3SYqf6r6Sc+nT+/PkcC0XkPoKkv79f\nly9fltRYeDFS3+fMN5dICFk3URl344+S2QNWubq6mu+hXR76goaKhuChOJyi5YUjPeF/c3MzJ5kT\nNsH8RG30qJCJ+FkUOpGXgUhSSpnvvPqNeyVjcUkElwsQD9vh/VFAszl5eJSnvvm8VQlC/84Dn6WG\n5YN5iiIAoXxEjTTCTYzfJxFu0scUcEVRPOPvlNI/lPR/Hfz7RNIVu/TywWdVz/i6pK9LUkdHRxFV\nftc2qpgnPCsPmNR02bsrmudisnnxRz/f0rUBr5vGvRFniiEZmKhra2uleDVMThjJQxEkZccHZsPG\nxkYuWplS0sWLFzU8PKy1tbUcoiIpn7OKYAW788DKqOV6NLlX/0UAoj0MDAzkU6RSapxef+3aNUnN\n8udoUvx2iiZrdFrEdsVrU0p5cTEW4G9VQbf+TgScB/L6XMRT4KUGXrmwsKC5uTktLS2V4AnnoSqt\nNPKoCyGP8ve/4/e03XONXfCDgbrAYIyYzxgu5amFjt0xfi4ovV/RZKUtvJONGMHmaYcRf4PX/b2e\nKVLlZGA84ucvHIOropTShaIopg7+/SuS3jv4+3cl/e8ppb+vhpPhNUnfOuYzK3e/VuSmVpwITwPB\nzncmQQDA5G6Gbm5u5l09pVRiqM3NzTw5aA1eRLCvry+bNpg6sXa+4yO+Q21ubmpxcVHLy8sZWOa5\nV65c0cWLFzPj9/b2Zoymu7s7nzoOg1ctIhYCfaINFAuoCqyNxy1evHgx57giRByf83lz7C5mZvg8\nuKBzjdBLqK+srOQkcM6rIEibefDgZRY0AsorLsM7jIVbAcwZAdWexuX8FS0LKApw19o8I8MxS28X\nz8BchegLG6ZjWghAT6ty2ABB74KfPjAHvqk7fOFzGc1xxy8RbvA6+BswgK8v2oR27IqIj+2LEG7S\n8cJEviHppySNp5QeS/plST+VUnpTDRP1gaT/4qAB30sp/R+S3pe0K+lvFifwoLapTW1q04uk43hR\nf67i418/4vq/K+nvnrQhz9PYXOWP11btXMSosXO5hoGZRpaCa1KYCL29vUopHTr4GFwHL6nnudZq\ntbzzo8URBuLmUDQ7vM3b29vq7e3V8PBw1paGh4cz6DsyMlJ6Fiaba6r+fMcK6Se7aV9fn+r1upaX\nl3NogoPfHm7R39+vq1evZs2xKgnd5wW8Ec0l4lAeJoLGGueb6xcXF3MhRg8rqIIuMF/xjG9sbJTG\n3fkFcvOJfFRP8/N5ipBJ1OD437En15K88m4Vz0eNmLAj+uNOLu+7a9TR8eOasY8T78OyiJCCRwzE\nsBHuAR/EGpKauCGaZivryucyashHwQAnoVOTquUYDf9LrR0QTlGIMUAOYPqkY9LAKB5nBWAbQe+q\n5xJ24eDqwMBATvkhcJTvYuK4M5+76wcHB3X58uWcYoRQ4lyAoihyOg64DH97oCvPRVjgMHEMhzpy\njK2HzCB8urq6cmhKNFM8ztCrtvicEaLgJX3iXMYNzJ0tc3NzOaSCjYD3M57Mr0MT7q3jHdEUit87\nNlVllkJVTgYXys7PCAKfYzerGU9/p/8mxEdqOIMc0EeA+EbibY2bD985duobD+QeVDfneYb3o4rn\nWp05URUe4k4Gdw5WjfWfCgb3oqmV58n/f570jm5zD4qMjIMm4wGffN/b25u1ACbVd0kHxGE291jW\narWc47q72zwKz7MaXAi4YPUSPIODg6UQE2Lkdnd3Va/XM0YzMDBwyFvnwLMvDnd08B1Bz+STrq+v\nZ4+lLw7OR4jajwdJO24S588xU753HMgXA5tGd3e35ufnNTs7e6iqR+xvTIr3UkeQLyba6ovJN8oo\nLKITrIpiX2NuJhocwi5qljw/hn1UaVDRK+p4aNSUo0OHvtJGnu8aK//HNvJ3FNQusPmO9/qcsYGi\nKbvyUUUfB3dzOhUCrsr0bLXLQ65lVe22MAk7ut/H812TcwEHiA0zwqguHHwhMbGArUNDQzlynZCL\ner1eChCmf26KSGXhhxBDO/Kkfo++d62G53qIACEgnZ2dJeDcGRoHBlUnfB46Ozs1Ojqasz743EML\nfKFJh089j2a5L7Qqocw8TE9Pa3Z2No+Pp2bxHPeUegqWp1ZV8YYLOr6LfBTNMqf4mQsWDz/xmDH4\nKQo4z7pgg3We4/ooDKIZHAVcvJb+OAzh17vjw+fPifjOlMoFFaqgIxQBP3zJK7NUQRO+NlpZdsel\nUyHgWuERx7G5XcBVMbK7oqVyPJgLBNfC+JwwiKhVusbnLm6EFwKOTAqpKZRgfD/ImOdQqBIPIW3C\nC4YWUBRF6UR13kVMknvlfEHTJ89ygNGIrOfsUKm5QGq1ms6fP5+rBkvl2nhc6+Pv5k+V9uCfMR+8\nj3S6qakp3b9/P5fOlsrePQSBLyDX3iNf+FjwXSvvqGsy8Tuoij9jTTSui9qbX1vldY0aE3zqJ3lV\ntdsFpwtZfx/jWBWDFk1s533e43zsfahqU9Sm0dzYgKLGH3mlalM5CZ0KAScdnapVda1TlZPB3eKu\nxcHgUWtgQNHoGOSoibA43O3N34CtnIJO6RypMbFE0UOOU7A7r6+va25uThcuXMjnEGBmE6+HRkff\nYRpPsncTxpnJMSeEmwu49fX10ilhHR0dGh0d1fnz50taJO2t2mV9jo5yBtBGB+ClZrjNvXv3ND09\nXaq4TJ+YW9deeW5VjBe/44Lyv6s0TO+PayqttDc2SxdSCC/ucdPR2xAFlf/tAtC1niioo9bF2Ebs\n1dPZPOzDsUzvlwtB2uO/o0CWyvzlVgNOE4/bizzSSms7qQbXLpfUpja16ZWlU6PBScfH4FxjcPwi\nYilVGpybLXFHlpoJymhyrsK7+oyG4DjNzs5O1uJqtZqGh4fzzrW2tqbNzU3V6/VDoLr3fWdnR/Pz\n85qamsqR4R54HHP18EoR9sJnvvOSscCYuDZL29EmHSzm//HxcQ0NDR0yZ/z/VvMVx9nbznijzRAo\nure3p7t37+ru3btaXV09BLrz2z1yjiVGE9Tn7yQmjl/vZmuV5lZFbq5CVU6X6NDwfsb2VkX9O/9H\nUzeawfE+NHFM/+iccueF9zlqolHjcq3bNTjH4KpgAneovDIYnHR0bl+V+uqM5eaDVM5FjV6cVsCz\n/w1jMMmu/rvX1U00nk1VECrvuoMD79Ha2tqhBGWwqv39fa2trenhw4c55uzixYu53BF9jxH27rVy\ngQAO6LFjvnjco0dYCybqzk7jZDFi76oWVwxHiXCBYzpxA2KM8eby/f379/Xee+9pfn4+4zVgcLTf\nHQn+Phf8bq76Z9GZEMmFhX9WtZB9LKt4kjGI37np7tdUCbaYMeLPihtMFT7XKrPF8VTnKUkljI13\nu2lcJaC9vZubmzmusgoigpccyvBn+8bo9KnF4KAqLU2q3oH8Hmda37k9D086fEhJxCdgFGLjOjs7\n871FUWSvJf+7RsRkptQ8EjAmL1OxAgZCS0PQ0O75+Xndvn07P3dycrKkZdEGUpG6urryrhm9Vgg4\nYv48XUxqxo5ROAABR3wcMXy+AJzx6Ev0iHkIiY8B4w3uxkL46KOPJEnvvvuuZmdnMz7kGhrtZYHw\nTheskQ+cIg7XykqoEmZ87ve2cjRUaUuuibfCI7kWioKTPvv9OMOIbfN7vS9VmpI7U1Jqeu7ZfD14\nPDogYngIhGYNRc+9a2ZxnKrwwpNqbU6nSsAd1ZGjhBsT7nFjfOdCTmouAHaH6FJngGEIP3SGScGU\n4n9CQRz0ZuIRFs7EeDIdWIdh6Mve3p6ePWvUNNjd3dXKyko+c9MFnAdVAuCSD8u9xJVRGCAK846O\nRoFMb6+knAXgYS0QY+RewbhR+C4dNyCpeTjJ3t6ePvroI7377ruSpKmpqQxE40F2E96Dt+Pm5mNd\n5VCI/PE8qtKIqiAGKAoNJw8FidqJ98H/9nJH8GQ0958H7fC5C0Dvg3vCvfqM31MFMUQFxENQcEhh\nrsYD0N0acOUlCuhPYp5Kp0jAtZLoVap3q0mMCyhqclJzB0F4IVh8kn2XdcGJdoYQoM3ci1ChvX5v\nX19fnlRKGiHkILygbl5KjXJJa2trGh4ezgct+4HDCFx3v/NcMEE0tyrtwk2pnp6eHEaysbGhoaGh\nXGvOx921MB8LZ1o35x0PIuSF8lIffvihbt26lT3OYDRVgdosPBY6WoYL/CgEXHDQ3yrvoI9F3ACr\nqMrErBJ+vnjxsPrG6ePH3z6ujmdFIeMU11D03MYNyJ9ZFWzL81gnVWPlgtChGl9TrmC0ggaqeLKV\nmXoSOjUCDmplIvCZkzOUD4JjLx4Lx/MZcHZUfxeBqy74YuiFh5b4u2FGqak5cS3mHzsxZY8QijwH\ngejCcXt7O5+ONDs7W8p/HRgY0MDAQNbkBgYGSic2YV62Mscc6IWZKCpJKSUOj/bF68KNnTpqJCzm\nlZUVra2tlarUbm9v68mTJ7p9+7YePHiQNUj67gvbYQAWqmNLLgBduHON80VKzXLirYTbUf8/b8Ex\nRhGjYn59A6gyw7xvfO5mOH3g2Z5GVRV+wjMi7kwbpNZZEM4zMRCZvz2kKmpwZM648CTLge8o9ult\nirhiK+vtOHRqBFwrjK2VnQ5VaSRSGcx0XMaDW303dXwJEzB6iNCuED4RK5Cah4BEZnKPbNROJJVA\n9Li7d3U1D0nhh/fOz8+rp6dHAwMDunTpUo7B83JKrvE4xbZwSDDBvik1asCRVVClsbi27JoHggAN\n64MPPsiaYU9Pj1ZXVzUzM5O1tlj11WPZIgbnc+dmKveySUXvny/Wvb29khBxck8BPGAAABgYSURB\nVCFWJcyqYr6YN4/p84Be14QYL+6l71UaiwvHiIPyPZuxO214bzRxXfi71usaU6S4sTMHCDJvB3NA\nf5gfb5Pjez5XPt7MXdygTqrNtePg2tSmNr2ydGo0OMjV+qNU1CqQNe40Duw7BheriLIDco+/17Up\nN0dJi4rYAztoTBGTdAi743Aa3usFLV37wEmAxhc1J8Dh4eFhjYyMZLOUd0UTwseL8djY2NDKyorm\n5+dziaehoSFNTEyUcEHITa6q+cFEkRolnW7fvq1Hjx5Jahb/5G+cGG7eo7VF7Y2iAO6RdIoeyajd\nMXZRS+baOEatyDUXxgONn76jlXjVDbSs6H1Ey3ZnQtVYYyr6s/0ZPiaxMGaEUyJWV0W806Ea2o+Z\nyXj494xlzDTx8eG3W1BomVHblFrjd0fRqRFwEQuLYGoEbyN2FJ/l18Q4HAa+ypvqkx4Xr2NZCB1n\nDhwYfpK39wNw3fvD9yxcJtq9mQixKuCbxc5BOn19fSWB1CpQ0//e2dnJJ6KvrKzkNk1OTmp0dLQ0\nBhGH8XmLADeMOz09nc924DsWiYP9juE4hspn9Jdkb/d4R++tmz3OU+544jPvU8TKvO9VsIUH82J6\neUI933n6VgyvcDPbBRLfOf841sZ1MZXLhV50BvlYIKBaKRIRC3NyPJK15G3yaADfHD2AnpJWvtnE\nEBhvc8Taj0OnRsAdtYscdU+cOKks6aNnlclA8LGj+CC6thQxHK+eEXfxoaEhpZRySfOurq7sRGCi\nwR36+/sPAdHsdAi5eCIX7XKhDmN7pQrfuaNW60zCOBCAzJkOnMlw8eLFjOVFbMiFSAS5fW5WVlb0\n8OHDXBqdtjB+HR3NiPkYDOoeb6iVAGERgVc6jurZHzHTwYPAWWhR44VcQ4lamNd7IyQnCj/XXugn\n7XJnQXRG8D+4YSym4PibC6MqpSHGrNG+KMSiY8c3I/gf7DDOH5aNa7WuRfOsiMPxLsfaYps/tQIu\nUjRV/bOq7yPYH9X8qOaixbGDQFHb4TN/rjMgGQZSubgk7+DZhD7QLoQDmlpHR4dWV1dzaSP3LrHj\noQmg7fk4uFDwEk5e6ima8mhvnIi+tbWlzs7OfOL6uXPn8rkMEYSOz4qnhvH+mZkZPXv2LAsbHxcv\nteMLCqHm2nIUJG7G+PcRRGchSYcrkRzlwYt844uUjc5BfOYG4esCOKVUanf0LiLEnM98Y3INCO+0\nk49BFHA81x0ePn/0IQqOqvf4HLgmzDxETdi/8/chlOFrX4Mx4yZurCdVgk6VgGuFEfAdFE0mhEY0\nYcFwqqLb+XGPD8+OTB/b6IfxwrD+XrQsNAlJOfDRd+noSRscHMwlyFNKJXzOsw+6u7tLteIQDmQt\nuNYSNQOn3d1dra+va3l5Ob93ZGQkl0on/MR3Yx8nxs/fwdygvT5+/FgrKyslDaCrq6tUdt0FWvyf\n9/rxi54VEDVVFpNneHiYhefkxo3RtUX6GrFHFqZjSLFdeK7dKohCmWcxhzHVLApHF4Bxo4ne0ygE\n0PDiu+N9VbFw3OO86psqm23UslzI+hx0dnaWDmN3qEkql4+Pz6ragJ5Hp0rAVeFLfF5FUQC2cim7\nueEmKjtHK2HqA8v/UXOJ2AOT6WaF1CwS6M9zTRCXOMcZOrju0edFUZTSrRDgfrJWVTudeB7CbWVl\nRdvb2xoYGND169d14cIFScpYIv1yrRjz3s0QZ/SUkubn5/Xo0aNcPJQ2e7hEDOehT25eUR9OKhcF\niAKX5zlW6lH0fMbi8fAM7o3jHmECBBxz4EKbdsV59t8+N44ruqbtxBi7Ge399WfG9RJDVvw93Av/\nx5AMN1vjJg8PMKcevuTkAtTnlo0A8xToRVLOSfY1Gp0xJ6HnhomklH4jpTSTUnrPPvunKaV3Dn4e\npJTeOfj8ekppw777307Umja1qU1teoF0HA3uNyX9mqTf4oOiKP4T/k4p/aqkul1/tyiKNz9OY6p2\nLzdBXaV/nm1etaP5e6JnMprG4FNuWsXvpbJzBHW8v78/73JuZhZF43R49766RhS1OD/8xc22nZ2d\nQ2YxJdLX1tZKCfM+FjyDnXZjY0NLS0va2NjQmTNndP36dd28ebOECzqG4ho2oQOuPWA+S42d+sGD\nB5qZmTkEjMe2eOiOXwMc4AdoOyTgXj43Fd2z5/MXPXbudPA5cG3StXee5RVtXcNzT2k0Gf2nih8j\nxhnnjv7u7++X8n+ZCx+zGJLhTifHKv16+uJYWdQSnbwtsfI1Y8j79/f3c2rh+vr6ofWEWe9z5Jht\nHI+T0HGODfyDlNL1qu9S423/saR/+0RvbUE++VVmYZXr3gVVJDc/nByTAez38A0WBoCnDzQTx2KP\nKjyOg1iWqQpTiB6tra2t7Ixwk9vrcnV2dmaG4n27u7u5evDQ0FCpsi94EEJyb28vVxpZXFzU4uKi\ntra2dO7cOb355pu6du1aPrHLK0K455ZxcsHn8V2SNDs7q/v37+ej7qKZAT7qpZx8fhFaeCMd4HYT\nDDMyOhl8Q0A4utnDXPJ+nsc4Vc2r41aO1dFmx7r8fgf7obg5+3g6ubMhmqJ87s6tiLN5u/nMn+d8\n5EL5KOHiXmbWh5vmRBEwjj73PT092traytd6ZAHzywYTHYOtMMaj6JNicP+WpGdFUdy2z24cmKx1\nSX+7KIp/VXVjSulrkr528PchfCL+rhrwqp1SKhcAlMpYl2MFDKJjQt4GJokFETGbiF14e1yDklQC\nViUdOpMBZqmqk+8CATyD+3DJ7+zsqF6va2ZmpqRV1Gq1EjNtb2/nM0bn5ua0vLysoih05coVXbly\nJYdh0CYEftQ+fBxcc0F4Um7cGR+KOcIONjOOYJZoS1EgIFSjRuSfMS+Ol7owdSeQt82f455dn6so\nTByzqlqAaJVV10TB7hQFkPef9jom6fdErdk3IP+/ymnj7WAdREwYbRge8WDdqJE7dsjY7+/v51Pd\nfJMB2+OdRzn9nkefVMD9nKRv2P9Tkq4WRTGfUvoxSf8ipfSloiiW441FUXxd0tclqbOzs4gCza47\n8jNnurgIfDevWpjRvIzvd5OYe+PuHQUiRD05f+7W1lZpp/fFVavVSqEens/q/QJ0dYbADOA8B6lZ\n2YRqIAjGlZUVzczMSGpocJubmxoaGtLFixfV09Ojzc3NUn9cS3Nh715Odx5MTU1Jku7cuZPNEYSz\n98cj3BE6rl0g4FywQ7QLj7GbwGh8VdqQF12IoDrvdZ7xhUff3RzzOXRtLcIeceOO5mgU0q2gl7iB\n0y4fkyoB4GC/txch7t97rJ+31TcgjyKIuabeR9+ove2+oaXUCNx2mCDmE0Mn9aBKn0DApZS6JP2H\nkn6Mz4qi2JK0dfD3t1NKdyW9LuntT/AeSWWGjOo/n1UxQxQSjv1guvj1XMs7YmQ1TBEnjvvibujt\n9LCUqLr7NVUCHW0jmk8+6bu7uzncgxp1g4OD6unpyTvi8vJyNkFXV1e1s7Oj/v5+nT17NmNSzowu\nQJ1hXRvZ39/PoSt37tyR1CjxBHmqDuTmIQskmk8uFF0b8UXY3d2t3t7erBFzqjoLzMfIeWF3dzdr\n2e79c404zl+sQEMbvE/Mjc9jnKtocjkPMwaOAboA5PlupkdsOnpDXbjxnXuAPfTF2xzXUBxHnhMz\nGbjPU+78uV6BhM3Ng4RjQPPzSi0dRZ9Eg/t3JX1QFMVjPkgpnZO0UBTFXkrpM5Jek3Tv474gTqpT\n/L/qGn+Oxxm5IGGBOtNVaXP8XyX44sCn1AwEhdDOOBoQ7Mw1D3a2CKKDM3kfIh4CofqjwS0vL5dU\nfg7elZonJ+GUiMzoWgoCI5qDrs08evQoVyEmn5V++yLd2dnJRx3SN4pfMgce7uHj66EP3d3d6u/v\nL4HUHjzs2qPUzEOmj/CAh2h4/JwLOIKg3ez1PrkpHjWeKg2tahNjE3FNime5aRvNx4jP+UYRhVj8\nLm7KUSOl/U7OB9zjuDCfe8C6bzbwIhujm7dnzpzJpbOwVNz0PqmQe66ASyl9Q9JPSRpPKT2W9MtF\nUfy6pJ9V2TyVpK9K+jsppR1J+5J+viiKheM0pBUI65Mbr3dtqsok8WsjsznjuOnlQk867AiI2Fgc\ndJ5FfI9/7oslCmScASwsXxCSsjnmbfT+uTnBrsh9DkT7gqd/vb29JbMHYkwdC+G9LBYE8vLysr7/\n/e9nzS3u9r5Lc8yhZ2f44nOsJ3pCWfw9PT3q6+vLhQk89swdDK51YnJGeCN6b6vgjAjSMx9xE3ZM\nqgozQyC08hLS9giDRMzP588FnM9jlQPBNTjaFZPy4+9WIH98TyzlxFxVKQMeD+eHq1N8wZ0PHi94\nUjqOF/XnWnz+1yo++x1Jv3PiVrSpTW1q0w+ATlUmg1S276swilZUZbLG50lN8Nh3Scce3OTDVHLt\nz/EDtJeo/flu6VqCm57s8J5kjBYbNYyOjg6tr6/nNKPn4Y3eN9dQqzQGPq/SGtF83XziXXg20Qju\n3r2rO3fulGL3XOMDoJdUcirw41qUOzfY5R1jc+3NNRieXXUQtI8Ln7nGKzXNuCpeQLt2fDKOvWuH\n9INrPbullXPKQXunON9uVURngMMebuVUeUdpi493leYWrZjoeIq4tDvC4nt9zbmGzfxy9CYZKK6R\nRqfDcejUCTioyiRt9V0r8gUK+YJ1BvZB9Mly89Vd3VUgvy/o6HHiuYODg9rf3y8JK75HWIJF+eLq\n7e3NQi6+xxepMzvPiLijtxfh4d4tHyuEGkLJS6qn1PCAPXnyRO+9954WFxcrgXTGyk1jXyS+QUCE\nq7hJKikLNkx6KHrhWDgIPEmlE9VdmLiQAuuscgA5JhlDhrwNPj5VPFKFI/H8mDpYFSoR/3aIws31\nuOHGsXZHQ1wHEY+r4qmq9kvl8JHIjygJbDT8TZsRlvBlXH/RDH4enSoBFwexCnvza92zGnE03ymj\nVsb1fB7jzmDC6Hb351JFxHfMKmbku76+Pm1tbWlvb0+Dg4M5GT1iVd5mShV1dnZmkBtngOf2bW9v\nZy9mTJiO4xl3QBgJbZV+cE9cPBCCeG1tTe+9954eP358yFsa49fi2PNM+uU4GrgbzgcPXI75lD5/\nLBw8tDg0pKbw83GKgDmYT4xjZB7gF3e8+PfeP8g1xyq82K+vqhQStXbn+7gpuDbrAo02u5blzoy4\nMbH5VWHcEeeMa8U1+8iPjhWi2TpveSWWeKBQ3KCPQ6dGwEUNI6r6VaBt3GWrntnqc3b5uEBcNXcV\n35/H5xF4lsr1rnxy/eQrrvNdD8cDjNPV1ZXvccLbSpaB15tz8zlqAIxbNG3xYLq2Fq+pEo5umt6+\nfVurq6uldsZxiwtAajAzWRd9fX3ZTHFg3x0JfBbNnmiSoqlxHicCjg1pc3PzUNUK2tPZ2ZnvjxkS\n0eHgGlD0FsfFGK2CCItAcQ5ck3KKFk00eyPF58LnrAHXUHm+C5+oNbJmqngrts2hDdZFhGLc9Ocn\nOkZwTJyETo2Ac+1Nai6KyBhS2bvHLhRd4q3UejdfHKvyOBzHhFpNnIdsVHm8IgbHYmVRRBMBM5Aw\nDxegMB5aWvTaSk3BCfbhzBPNsLjre+kiF2IulGBm32lnZ2d169atbJo6M7q2K6nUbtfaBgYGssnp\nO7Vjby5oolnFXPrG4WEorgGQCufpYT6Wvqh2d3dz3rCPC8cduhZaNT5Ri4MvqsxTnh81Zv+OvsXn\n+Frw2Dl/Z4yT4ztJOa85mqG+AVeZ1RHndV7357vwpQ38+D2+drGO2GR8jca41OfRqRFwUMSIWplW\nUZtz5qoSSD5xUus8VVetW+2I0ZzjGq9Yy0T5Aujp6Tl0PqkzAqEHLDB3nXd1dWUtz8NAIABYD7yM\nYxWFG3iNg+BxDqAICWxuburu3bt68uRJzj30xeTgdZwLqRF719/fn2PZiKXiWhYdAs6De33+ognk\nuBuaGM9FwCEAI08wfo5heZki3zhpb1yg/sw4R3HTiWYs/BDhEv87Qi2uLUZBhElaZRb72nKoxnkg\nOnv8O19zcRMFg3MnWoQqPAPC+YR7wWAdQ3X+Oi6dzCXRpja1qU2fIjpVGlyV5hC1DqiVC5t7+Imm\nqO+CEZPwNlR5SavMZf8b/I0gRccuHDd0kyC65dktI2DvGEsV2BrDVby/UZNwzdDd8/GZVQ4Q/n7y\n5IkePnyYsbcYAe9gMbs1wD2hHl7e2z2lmO9VeFs089yhIDW0NM6mRYsDg+MUM7Q6xsbnyOELN9n8\nt2Ow7ljw8Ynj7pZI1Hg8VIdrIwTinlDX7hkTf26VOcl4+n3OR9E09jFwOMjJzWrvu6+RqnHwdQnv\nRN5Dg6OEP+87iXkqnTIB14p8kqWmGRK9dI5LAEiSQsIgYZ50d3dnkw+wXWqm5CCkPH4MNf0oIBdm\ni7XkOjs7szcQd7fXfPNSSWBxVd4/MCTuAzAnDYwwCO6NuA7mH+O1vLysmZkZDQ0NZTzMv4epEN5k\nKnzwwQd69OiR1tfXsynnm40D1jhAEGCY6pjwjmtxPc/w5zkv+OJBmEkqjRHCDoeMm61x4+PZjCWh\nJBH/8cICFFSgfVGwVAk758VWkIALVhcqrZw9cWyqYIF4WFF8N/wRq37wt/fd16HzlY+VCznHzXyc\nwIt9o4CAJ6gGLanEI8elVIVD/WlTR0dHUavVDmEEVRqW3VPSfmI6D7uV42FSE6QmJYTAThafD6KD\nzlITC/OgTWcaMA8EJsJMai54L0fkC9NzVVl4XvkingbFd2giCMaNjY0SsA7FctK098yZMxoYGMiO\nBncGuLMFbIQk/o2NjYy9MU+Em3A983jmzJnSWa0efEqsG32grdHL5nzhmw8/HszrdeZimEEUkFU4\nK59VOXr8Wnd4RKeSk/MK1x6F70bejtf7JuvfRS26CidjXtiso5YecTTHXX0s/H1RaEbt0+MKEWpx\nc+Fv1gS8ziYlNb3gjx490u7u7rFUuVMh4FJKs5LWJM297Lb8gGlcr34fpR+Ofv4w9FE6nf28VhTF\nueNceCoEnCSllN4uiuLffNnt+EHSD0MfpR+Ofv4w9FH69Pez7UVtU5va9MpSW8C1qU1temXpNAm4\nr7/sBvwp0A9DH6Ufjn7+MPRR+pT389RgcG1qU5va9KLpNGlwbWpTm9r0QumlC7iU0p9PKd1KKd1J\nKf3Cy27Pi6SU0oOU0rsppXdSSm8ffDaaUvq9lNLtg99nX3Y7T0oppd9IKc2klN6zz1r2K6X0iwfz\neyul9O+/nFafjFr08VdSSk8O5vOdlNJP23efxj5eSSn9fyml91NK30sp/VcHn786c+lpTX/aP5I6\nJd2V9BlJ3ZL+taQ3XmabXnD/HkgaD5/9T5J+4eDvX5D09152Oz9Gv74q6c9Ieu95/ZL0xsG89ki6\ncTDfnS+7Dx+zj78i6b+tuPbT2scLkv7Mwd+Dkj486MsrM5cvW4P7s5LuFEVxryiKbUm/LelnXnKb\nftD0M5L+0cHf/0jSf/AS2/KxqCiKP5AUDxNq1a+fkfTbRVFsFUVxX9IdNeb9VFOLPraiT2sfp4qi\n+M7B3yuSvi/pkl6huXzZAu6SpEf2/+ODz14VKiT9fkrp2ymlrx18NlkUxdTB39OSJl9O0144terX\nqzbHfyul9N0DExbT7VPfx5TSdUn/hqQ/1is0ly9bwL3q9OeKonhT0l+Q9DdTSl/1L4uG3v/KubFf\n1X5J+gdqwClvSpqS9KsvtzkvhlJKA2qchvdfF0Wx7N992ufyZQu4J5Ku2P+XDz57JagoiicHv2ck\n/XM11PlnKaULknTwe+bltfCFUqt+vTJzXBTFs6Io9oqi2Jf0D9U0zz61fUwpnVFDuP2Toij+2cHH\nr8xcvmwB95ak11JKN1JK3WocJv27L7lNL4RSSrWU0iB/S/r3JL2nRv/+6sFlf1XS//lyWvjCqVW/\nflfSz6aUelJKNyS9JulbL6F9n5hY9Af0V9SYT+lT2sfUKAPy65K+XxTF37evXp25fNleDkk/rYb3\n5q6kX3rZ7XmB/fqMGh6nfy3pe/RN0pik/1fSbUm/L2n0Zbf1Y/TtG2qYaDtq4DB/46h+Sfqlg/m9\nJekvvOz2f4I+/mNJ70r6rhqL/cKnvI9/Tg3z87uS3jn4+elXaS7bmQxtalObXll62SZqm9rUpjb9\nwKgt4NrUpja9stQWcG1qU5teWWoLuDa1qU2vLLUFXJva1KZXltoCrk1tatMrS20B16Y2temVpbaA\na1Ob2vTK0v8P7VK5zrXU/IcAAAAASUVORK5CYII=\n",
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ },
+ {
+ "data": {
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAAATgAAAD8CAYAAADjcbh8AAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAESxJREFUeJzt3V2sZWV9x/HvryhcKIlQLJkCFiYZTcA0o07wRgl9UyRN\ngV5QSC9oNR1NLNGLpgFNKmnTpC/iVVMNRiIay0tqVWLaWCBGelFfBoPjACIDYpnpOFPBiLRGBf69\n2OvoZjhnzj775ey1nv39JCdn7We/rOfZa+3ffp71sleqCklq0S8tuwKStCgGnKRmGXCSmmXASWqW\nASepWQacpGYtLOCSXJLk4SQHk1y3qPlI0kayiOPgkpwEfBv4HeAQ8DXg6qp6cO4zk6QNLKoHdyFw\nsKoeq6qfArcBly1oXpK0rpcs6HXPAp4Yu30IeONGD07i6RSSJlZVmeRxiwq4TSXZC+xd1vwltW9R\nAXcYOGfs9tld2c9V1U3ATWAPTtJiLGob3NeAXUnOS3IycBVw54LmJUnrWkgPrqqeTfKnwBeAk4Cb\nq+qBRcxLkjaykMNEtlwJh6iStmDSnQyeySCpWQacpGYZcJKaZcBJapYBJ6lZBpykZhlwkpplwElq\nlgEnqVkGnKRmGXCSmmXASWqWASepWQacpGYZcJKaZcBJapYBJ6lZBpykZhlwkpplwElq1tQBl+Sc\nJF9M8mCSB5K8pyu/IcnhJPd3f5fOr7qSNLmpr6qVZAewo6q+nuRU4D7gcuBK4Jmq+uAWXsurakma\n2KRX1Zr6uqhVdQQ40k3/KMlDwFnTvp4kzdtctsElORd4HfCVrujaJPuT3JzktHnMQ5K2auaAS/Jy\n4NPAe6vqaeDDwE5gN6Me3o0bPG9vkn1J9s1aB0laz0xXtk/yUuDzwBeq6kPr3H8u8Pmqeu0mr+M2\nOEkTW/iV7ZME+Bjw0Hi4dTsf1lwBHJh2HpI0i1n2or4J+A/gm8DzXfH7gKsZDU8LeBx4Z7dD4kSv\nZQ9O0sQm7cHNNESdFwNO0lYsfIgqSX1nwElqlgEnqVkGnKRmGXCSmmXASWqWASepWQacpGYZcJKa\nZcBJatbUP3gpbWbGX6qZY020qgw4bcl2nbu80XwMPm2FAaeJ9OFHGeCF9TDstBm3wemEqqo34Xa8\nvtZL/WHASWqWQ1RtaAg9pBl+sHXONVEfGXB6kSEE26zWa6Oh1x4DTj+3CsF2Ise338AbPgNuha16\noG3GwBs+A25FGGazW3sPDbrhmCngkjwO/Ah4Dni2qvYkOR24HTiX0VW1rqyqH8xWTU3LYJs/g244\n5nGYyG9U1e6q2tPdvg64p6p2Afd0tyVp2y3iOLjLgFu66VuAyxcwD22izwfotmLtPfZ97q9ZA66A\nu5Pcl2RvV3bm2IWevwecud4Tk+xNsi/JvhnroOP4gdt+hl0/zXTh5yRnVdXhJL8C3AVcC9xZVa8Y\ne8wPquq0TV7HtWIO/HD1i9voFmdbLvxcVYe7/8eAzwAXAkeT7ADo/h+bZR6ajOHWP/bolm/qgEvy\nsiSnrk0DbwEOAHcC13QPuwb43KyV1Mb8EPWfy2h5ph6iJtnJqNcGo8NN/qmq/jrJLwN3AK8Cvsvo\nMJGnNnktl/4W+YEZLoeus5t0iDrTNrh5MeC2pg/LTLMz6Ka3LdvgJKnPPFVrYOy9tcNfJ148e3AD\nYri1yx0Ri2EPbgBc8VeH57nOlwHXYwbb6nL4Oh8OUXvI4YrGuS5Mz4DrGVdmrcf1YjoGnKRmGXA9\n4re0TsRNF1tnwEkDY8hNzoDrCVdabYW9ucl4mMiSuZJqFh43d2L24KQG+EW5PgNuiVwpNU+uTy9m\nwEkNMeReyICT1CwDbkn8ptWiuIf1Fww4qVGGnAEnNW3VQ86AW4JVX+m0vVZ5fZv6QN8krwFuHyva\nCfwF8ArgT4D/6crfV1X/OnUNJWlKc7mqVpKTgMPAG4E/Bp6pqg9u4fkr8xWzyt+mWq6WznbY7qtq\n/RbwaFV9d06vJ0kzm1fAXQXcOnb72iT7k9yc5LT1npBkb5J9SfbNqQ69Z+9Ny7SK69/MQ9QkJwP/\nDVxQVUeTnAl8Hyjgr4AdVfX2TV6j+Xd+FVcu9dfQh6vbOUR9G/D1qjrazfhoVT1XVc8DHwUunMM8\nBsuDLtVHq7JOziPgrmZseJpkx9h9VwAH5jCPQVqVlUjqq5mGqEleBvwXsLOqftiVfRLYzWiI+jjw\nzqo6ssnrNJcEhpuGYKhD1UmHqHM5TGRWrQVcH95TaVJDDLntPkxE0kC1/IVswElqlgE3Zy1/G0pD\nY8DNkeGmoWp13TXgJDXLgJsDD+ZVC1pchw04Sc0y4CQ1y4CT1CwDbkYtbrfQ6mpte7IBN4OWVgSp\nRQacpBdp5cvbgJPULANuSq18w0kbaWEdN+AkNcuAm0IL32zSJIa+rhtwkpplwElqlgG3RUPvskur\nZNOA6y7efCzJgbGy05PcleSR7v9pY/ddn+RgkoeTvHVRFZe0PYb8pT5JD+7jwCXHlV0H3FNVu4B7\nutskOZ/RVe4v6J7zj0lOmlttl2zIC1paRZsGXFXdCzx1XPFlwC3d9C3A5WPlt1XVT6rqO8BBVvzC\nz5KWZ9ptcGeOXev0e8CZ3fRZwBNjjzvUlQ1aaycgS6viJbO+QFXVNNc1TbIX2Dvr/CUtXlUN8vqp\n0/bgjibZAdD9P9aVHwbOGXvc2V3Zi1TVTVW1p6r2TFkHSTqhaQPuTuCabvoa4HNj5VclOSXJecAu\n4KuzVVGSprPpEDXJrcDFwBlJDgEfAP4GuCPJO4DvAlcCVNUDSe4AHgSeBd5dVc8tqO6SdELpw8bz\nabbhbZc+vD9SH/RpG1xVTVQZz2SQ1CwDTlKzDDhJzTLgJDXLgJPULANO0kSGeMqiASepWQacpGYZ\ncJKaZcBJapYBt4k+nZ4i9cGQdjQYcJKaZcBJ2pIhjWoMOEnNMuAmkGRQ31qSRgw4Sc0y4CQ1y4CT\n1CwDTlKzDDhJzdo04JLcnORYkgNjZX+f5FtJ9if5TJJXdOXnJvlxkvu7v48ssvLbzT2pUntnMnwc\nuOS4sruA11bVrwPfBq4fu+/Rqtrd/b1rPtWUpK3bNOCq6l7gqePK/r2qnu1ufpnRFewlqVfmsQ3u\n7cC/jd0+rxuefinJm+fw+pJ6ZEibaja9sv2JJHk/oyvYf6orOgK8qqqeTPIG4LNJLqiqp9d57l5g\n7yzzl6QTmboHl+SPgN8F/rC6rY5V9ZOqerKbvg94FHj1es+vqpuqak9V7Zm2DsswpG8vadVNFXBJ\nLgH+HPi9qvq/sfJXJjmpm94J7AIem0dFJWmrNh2iJrkVuBg4I8kh4AOM9pqeAtzV9Wi+3O0xvQj4\nyyQ/A54H3lVVT637wpK0YOnDMS1Jll+JLerD+yYtQx8201TVRJWYaSeDpNXRh2DbKk/VmpK/ESf1\nnwE3I0NO6i+HqHNwfMi5fU6tGeoXuT04Sc2yB7cA49929ub6aaMeicurLQbcghl2izXvoVMSl9Nx\nhjo8BYeo22rIK0ofLer9dDm1w4DTIC06hAy5kaG/DwbcNhv6CtMH2/UervqyaqH9BpykZhlw0gms\n6hkrrbTZgJPULANOg7OMwzha6dFsprUeqwEnTailD/56WmyfB/puMw8iHbZWzztuMdzAgJNmMvQz\nVVoNtjUG3DYa4gdAk1sLiz4v59YD7XgG3Dbp80qv+erTMHbVAu147mSQ1KxNAy7JzUmOJTkwVnZD\nksPdFezvT3Lp2H3XJzmY5OEkb11UxYeiquy9rbi1Qy+2ozc1Pq9V773BBFfVSnIR8Azwiap6bVd2\nA/BMVX3wuMeeD9wKXAj8KnA38Oqqem6TeTSbAIbbYrT24d3qetJa+7dq0qtqbdqDq6p7gUmvbXoZ\ncFt3hfvvAAcZhZ2kEzi+57XZnyYzyza4a5Ps74awp3VlZwFPjD3mUFcmzY0fcE1q2oD7MLAT2A0c\nAW7c6gsk2ZtkX5J9U9ZBkk5oqoCrqqNV9VxVPQ98lF8MQw8D54w99OyubL3XuKmq9lTVnmnqIEmb\nmSrgkuwYu3kFsLaH9U7gqiSnJDkP2AV8dbYqStJ0Nj3QN8mtwMXAGUkOAR8ALk6yGyjgceCdAFX1\nQJI7gAeBZ4F3b7YHVZIWZdPDRLalEg0fJrKmD+9zC9zBIJjjYSKSNFQG3Dax5zEbj//SNAy4beQH\nVNpeBtw2M+Sk7WPALYHDra3xvdK0DDhJzfIHL5do6D93vR3svWkW9uB6wg+yNH8GXI+4be6FfC80\nKwOuh/xg+x5oPtwG11OrvH3OcNO8GHADMITL0c2DwaZ5M+AGpNWgM9i0KG6Dk9QsA26AWunxuNdY\ni+YQdaCGOFw1zLTd7MEN3FBCYyj1VFsMOEnNMuC0cPbetCwGXAMMEGl9mwZcd+X6Y0kOjJXdnuT+\n7u/xJPd35ecm+fHYfR9ZZOUl6UQm2Yv6ceAfgE+sFVTVH6xNJ7kR+OHY4x+tqt3zqqCGzd6llmnT\ngKuqe5Ocu959Ga29VwK/Od9qSdLsZt0G92bgaFU9MlZ2Xjc8/VKSN2/0xCR7k+xLsm/GOoh+9pT6\nWCetllkP9L0auHXs9hHgVVX1ZJI3AJ9NckFVPX38E6vqJuAmWI0LP2+HJIM68FdatKl7cEleAvw+\ncPtaWVX9pKqe7KbvAx4FXj1rJTW5vvSa+lIPrbZZhqi/DXyrqg6tFSR5ZZKTuumdwC7gsdmqqK0y\nXKSRSQ4TuRX4T+A1SQ4leUd311W8cHgKcBGwvzts5J+Bd1XVU/OssCazdiL7dp/Q7gn06pP0YZuN\n2+AWb7uWs+Gm7VBVE61o/prIiljln0DX6vJUrRW0qGGkvTf1jQG3wuYZSIab+siAk9QsA04zs/em\nvjLgVtys4WS4qc8MOEnNMuA0NXtv6jsDTlMFleGmIfBAXwGTXYbQUNPQ9CXgvg/8b/e/ZWfQ8zbO\nKcR63845WIU2Qj/b+WuTPrAX56ICJNlXVXuWXY9FWoU2wmq0cxXaCMNvp9vgJDXLgJPUrD4F3E3L\nrsA2WIU2wmq0cxXaCANvZ2+2wUnSvPWpBydJc7X0gEtySZKHkxxMct2y6zNPSR5P8s3uMor7urLT\nk9yV5JHu/2nLrudWJbk5ybEkB8bKNmxXkuu75ftwkrcup9Zbs0Ebb0hyuFue9ye5dOy+IbbxnCRf\nTPJgkgeSvKcrb2dZVtXS/oCTGF15aydwMvAN4Pxl1mnO7XscOOO4sr8DruumrwP+dtn1nKJdFwGv\nBw5s1i7g/G65ngKc1y3vk5bdhinbeAPwZ+s8dqht3AG8vps+Ffh215ZmluWye3AXAger6rGq+ilw\nG3DZkuu0aJcBt3TTtwCXL7EuU6mqe4HjLya0UbsuA26r0SUlvwMcZLTce22DNm5kqG08UlVf76Z/\nBDwEnEVDy3LZAXcW8MTY7UNdWSsKuDvJfUn2dmVnVtWRbvp7wJnLqdrcbdSu1pbxtUn2d0PYtaHb\n4NuY5FzgdcBXaGhZLjvgWvemqtoNvA14d5KLxu+sUb+/ud3YrbYL+DCjzSm7gSPAjcutznwkeTnw\naeC9VfX0+H1DX5bLDrjDwDljt8/uyppQVYe7/8eAzzDqzh9NsgOg+39seTWcq43a1cwyrqqjVfVc\nVT0PfJRfDM8G28YkL2UUbp+qqn/piptZlssOuK8Bu5Kcl+RkRheTvnPJdZqLJC9LcuraNPAW4ACj\n9l3TPewa4HPLqeHcbdSuO4GrkpyS5DxgF/DVJdRvZmsf+s4VjJYnDLSNGf2ywseAh6rqQ2N3tbMs\nl72XA7iU0d6bR4H3L7s+c2zXTkZ7nL4BPLDWNuCXgXuAR4C7gdOXXdcp2nYroyHazxhth3nHidoF\nvL9bvg8Db1t2/Wdo4yeBbwL7GX3Ydwy8jW9iNPzcD9zf/V3a0rL0TAZJzVr2EFWSFsaAk9QsA05S\nsww4Sc0y4CQ1y4CT1CwDTlKzDDhJzfp/b6+399Ef30IAAAAASUVORK5CYII=\n",
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ }
+ ],
+ "source": [
+ "# grab a 2D slice from the data\n",
+ "t1_slice = np.expand_dims(t1[100,:,:],0)\n",
+ "mask_slice = np.expand_dims(mask[100,:,:],0)\n",
+ "plt.imshow(t1_slice[0].T[::-1], cmap='gray')\n",
+ "plt.show()\n",
+ "plt.imshow(mask_slice[0].T[::-1],cmap='gray')\n",
+ "plt.show()"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "Ok, now let's do a random `Affine` transform and show how it correctly performs the same transform on both images:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 27,
+ "metadata": {
+ "collapsed": false
+ },
+ "outputs": [],
+ "source": [
+ "from torchsample.transforms import RandomAffine\n",
+ "\n",
+ "t1_slice, mask_slice = TypeCast('float')(*ToTensor()(t1_slice, mask_slice))\n",
+ "\n",
+ "tform = RandomAffine(rotation_range=30, translation_range=0.2, zoom_range=(0.8,1.2))\n",
+ "t1_slice_tform, mask_slice_tform = tform(t1_slice, mask_slice)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 31,
+ "metadata": {
+ "collapsed": false
+ },
+ "outputs": [
+ {
+ "data": {
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAAATgAAAD8CAYAAADjcbh8AAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzsvV2odN2W3/Ubc861qmrv/Tzvx+n0yUl3QxuIghFs4dC5\n0ItIE1ERQm6a9EUMGjy50GggF93mQkUJBInJjSB2MBjBfIEJaaQxJMEgQqKJIWo+jLRJN+lO55w+\nfd73efbeVbU+5hxejDnXWlW7qnbtj2c/9TxvDdjsvdfHXKtWrfVfY47xH/8hqsrZzna2s32M5t73\nCZztbGc727uyM8Cd7Wxn+2jtDHBnO9vZPlo7A9zZzna2j9bOAHe2s53to7UzwJ3tbGf7aO2dAZyI\n/Ksi8vdF5OdE5Kfe1XHOdraznW2fybvgwYmIB/5f4LcAvwj8deAnVPXvPvvBzna2s51tj70rD+5H\ngZ9T1X+gqi3wp4Df+o6Odbazne1sOy28o3F/APhHk/9/EfhN+zauZaZzLo8fXfb+s3uJ3N3miJ0O\nHWL/jkcc6oEbfmBWZgTbn286U5Ct7bb32bftB2a7TnvXRzxpU/bfq+/nQ6ziNW1aH/UAvSuAu9dE\n5FvAtwDmXPCb5MfKih0bu/xrG0yyA7q1XKZjuC0ndcc62T6mk71jb+y/tc2dcba333dOu2wfKJeQ\nwiHQnoYdUjp8DXbt89yW0uP2c+7x++6yY67dffsevfnd7cv9cVRYKE222b4H0wO/q+n+D9lX03iP\n71v/UFO9e/0feG3/6pd/9uht39UU9ZeAH5r8/4N52WCq+tOq+k1V/WbFzBY+EdxEZD+4iTwd3PYc\n/85xp8fctpcCt5TeD7htX+djAWr7PI+5Tsea6uPBbbrvE+wrD27lOj4R3B5q7wrg/jrwG0TknxKR\nGvjtwM8c3OMAuB1jd4FqC9zu28fJ5s2wC9zK9lvgdrS9JLg5937AbQoIh8Btem77PLXn8N6mn+8x\n4PaoQ27u9yBwK7Z9Pz7VCrjdN66mdwNu2y++svwd2zuZoqpqLyL/HvAXAA/8MVX9O+/iWGc729nO\nts/eWQxOVX8W+NlHDzB5g9w3PT3ove0y5+56b9Oxj4y7HfTeHnpOu/Ypdsybbtt7ex+27zy3PbTp\n/7u8t+eKvb1gzG3c7YleSdL9HtZ9U0wnu6e3U+/tvjHeRcztIcuf2d5bkmGvbV1g2QafYrvAbdeD\nfR/QlPEfkVTYa/ed0337FNu+Ce4DwEPg9r4SC9tgtm/drvVPsRMAt+m9eXTs7TnAbV/c7aHxu6k9\nBdzew9S02GkB3LsEt8n6O57XLq9te/87q/aseyi4HfsAHuPdHQtuz3WD7QLUYz21+/4/ZLuC1dvr\n4cMDt8es297mXSQVnhPcXthOC+AmthPctjKmhwc4AtymGdND+2+B67OB2yG7Lzh+7IP4VQO3p9gj\nrs2T6SCwG5y21x1jj01KvCS4vbCC+OkA3AO5bhv2Ely3vN3RdJBj7CkZ08eM/Zw31yFwm/5/aIq6\nvf4Yu++avKDn8JXguj3W3uO0dGqnA3AcD24HuW5TOxN5d+/zHFYoIeUc9tl03a5kwnPZc9BBpp/p\n3l2+AuD2GDoInAy4wVku6WxnO9tHbCflwd1nTybzwv1k3mHHI+gg77tSYdfxXoLMu+s87rOXyJhO\nz+8x+z7hOj3Ke3tqzO2+cQ+N9a7IvLuWv0c7GYB7EtdtD9C8c67bjmPeay/NdXsXlQrF7qtUOMR1\nu2//Y+09Z0wfnC0t9tiEwHNw3eDdJBWOXf6CdhoAN/2u76ODwIsReQ+f8wfCdXtOO4bIex/Xbd+y\np5zPV5HIe+a6HWWnAXDFnpHrtnfsfeB2DLA+9Jj79in2UHA7dLyXvsH2ZUnvo4NM9z1kZ67b/m0e\nm1Q4ZB8huMEpAdx9XLfHEnmP5brdWfVM4PYUIu+Z67Z//WPtfRXQf5W4bidkJwJw+SI9kcj7Trlu\n2/s81e67Qd4nuO1LYBwDbi/BdXvodk+wd0rkPQU6yIfouT3gMCcCcByeFn4VRSun9tJct12fbR+R\n99DfZayzaOVpgttT7H2B2wOv25kHd7azne2jtdPx4DjCe3sI1+0Rqrx7Bj98TnvHPnPdTq5S4Yl2\nUt7b++K6wQfjvcGJAJyQb56DWdAHct12/T8d50NR5T3Gvipct3J+j933Qbs9MakAj08I7LNjuW4F\nuD4WIu8TMsUnAXCDHRtr2vXgv2uu277j3rdPseci8p4C1237/zPXbdPOopXPZ9uf9YHnfzoAt+uB\nOAAwB6kgT00qbO9zykTed3mTnUUrH8d1OxN5n8eeCG5wKgBXLt6ueNshocqHTEl3HvYEuW5n0cpx\nPXx44PaYddvbnDod5KVteu4PzAifBsDBw4HomETCriqFvP1ZtJLTBben2COuzVm0ctc+B8DtJb23\nJ4AbPIEmIiI/JCL/s4j8XRH5OyLyH+Tl/4mI/JKI/K38868/9hhnO9vZzvYUe4oH1wO/T1X/poi8\nAv4PEfmLed0fUdU/9KhRtzyKg0Xz02V7PbLNhMOzVyk8F5n32LGf8+15rCrv9O+zKu+mncm8z2/b\n3tsTPtujAU5Vfxn45fz3tYj8PeAHHn0mxwIbHDE1PZBw2DzI0efzoP0/BK5bOcZDuW4fkSrvLvvo\nwO1D5bo9A7jBM1UyiMgPA/8C8L/lRb9HRP4vEfljIvLZA8cawW3ahVvc+FPW2Q6bsbbp+hxrm/5M\nDrT/JF6a6/Y+iLyPAbft8/zARSufxHU71Cn+uUQrD72cP7IO9CR9dnCDZwA4EbkC/gfg96rqW+C/\nAn498COYh/df7NnvWyLyN0Tkb7S6vgtAGxtvgpYlGLa+iGPljrb3KwAz/bn/Q+9e/lxct3dpBdiO\nAbfp+b2UaOV7IPJO770XE63c9f821+0QUE5f5ttWwO8hdkpcN3i2KfeTRhGRCgO3/15V/yyAqn5b\nVaOqJuCPAj+6a19V/WlV/aaqfrOW+eSMpiDmxhjaRvztwJS1eG3vCjz2eVfHeCCnwHU7ROSd/n0m\n8u635+S67Sq5OnPdns0eHYMTe+X9N8DfU9U/PFn+jRyfA/htwN++fzA2p6LFHliNsOGxHQKch4Df\ndCr3HETeQ8d/6RvsLFr54XHdDtmHBm4bx3saHWSfPSWL+i8CvwP4v0Xkb+Vlvx/4CRH5EUCBnwd+\n95POsNgx1Qj7HpTHenP3jbtv+6mduW771z/W3ld96VeJ6/YS9hiu20uVaqnq/wrsujI/+9gxz3a2\ns53tOe1EKhkm09NjY20Z/e9VAineyy4P6ZDHdWjcsu45vJRT4bqdVXkP2ymr8j7G3vfU9DEZ0w+2\nFhXugtv2OtickvoD8Tm4+2Af+uIeSlF4zE3wvjrQb5/DMevOqryjfYxct2IfErg90k4H4Ar1Y9v2\n6bbtrE89cKGmXte+dfu23Ze8KOuOuTHOopXPx3V7gp0UuH2VRSvfsedW7DQATtgk607tPlmjh4pC\n3vdgbYPoNgVkunw67n1j3bftc4PbdLyzaGU+7Aty3fbZc4lWPjap8L6L56f2jsENTgXgih1S/XjI\nQ/GYt9O+uNq+G+IxJUEvla06xHU7i1YeZ2fRyuezXVy3FwA3OCWAO4b6MQ1+HyOQed/yqe0r9zmm\nJOulOUMPsbNo5eO4bmfRyuexxxJ5n4nw+x5qhc52trOd7WXsRDy4/Ga5L5GwT8liX81psUPVBvvW\nHaKAHIrBHRr7kD3FEzx1Mu+H5r09Zt32NmdV3q3zeHdk3kN2IgDHYXA7BGRTO2bauCtjur3tFLyO\nAbn3aacKbk+xR1zXsyrvrn0OgNtLTk8fC26HQgVH2skA3FHJhO0v7CEgc4joex9NZNc+9233GHuM\n93cWrfxqcN0eax8KHWQbwJ8B3OBUAK58jkNk3e1p4S5wu2+qum33eSGH1k0zY1Oge64b6BB4T722\n6XZn0cq8+0cGbl8Frtu2PRM95zQADu4Ht4fsO4xxIB1/b4nXjn1lxw083e4YMH1oXG7XONNrcwy4\n3TdNvW//Y+25uG4PuEbPJlr5nHYskfdjbtD8QkTe++x0AG6fHTsl3PWlFjc3TmMA99A+yjbHvoEP\nkjUPrLvvJtw3Xd0HkvuA6qWJvO8poXBvTfI+O3PdnsfeE5H3Pjs9gNvlud1Hvj10c6c94LfP3lXM\n6KHThcdWSOyLyX2FiLxnrtuO5e/S3jPX7ZCdeXBnO9vZPlo7EQ9OjitG37nrkdvdFwB+CZ7Q1BM9\nxL97TtuXMd1ets/OXLf925xVebeO9364bofsRABuR+zp2C9n+uUeo84Au+MlU5tOX56SZTs0lT4U\ng3uuZMTUpi+QD5TrZrs9IakAXy2u20vYCXDdDtmJANzEdt2oD4m5wfEXzO/5IqaLD421a92ht+hD\nAPEYr+nQ+kMvimM9uI+J6waHX2gPsTPXzeyp0kfvGNzgZADuyAd7l009rekNfMw4+zyfY7zIh06h\nRUCewPPaAMo9y/cdf982T0kyvODU9CtB5D1z3d6JnQjATWz6xR3z8JS3wF414C2wOoZTV0DBOXRr\nXNm6qdXJnWXb64f9jp2i7juv4e8dnLGDPTSP9OaO8eyei+v2BDspcHtfopXvi+sGT/fcXtCeBHAi\n8vPANRCBXlW/KSKfA38a+GGsq9aPq+oXRwz2sIMf6t2wizR6AFx0e6wDZWPq746p/sA55ptB/eT/\nY/hKe7a5A5TDudyzb9lv17qHxjvhxcDtLFq5vc97BLeT4bod/1mfIyDwL6vqj6jqN/P/PwX8ZVX9\nDcBfzv+f7WxnO9uL27uYov5W4Dfnv/848FeAnzx670NlV/tsFxF4myQ8eFtu93a7jrPDk1MRZPK2\n1B3nK6rDcnFb2/pxm+H8pp9jl6VxW5XdHtidafL2+L54b3u8Du93Z16n09XHJhPeAZn3KHuXqrzH\njPExVyq8oCrvwfO4x54KcAr8JRGJwH+tqj8NfH3S2f6fAF/ftaOIfAv4FsDcv3p6tcG0RnMbzKZA\nVhIQIgY4bsd4ZR1sZVRdPpSO09o9D9AAcAdAS5TNmzOOf4vqXZBKW8vLuqTj1HkXcDKJHaqymaWA\nAfTKFNb7cdt9iYhjKSTvk+v2LisVnmIfGtftVCoVHnH9nwpw/5Kq/pKIfD/wF0Xk/5muVFUV2eVy\nQAbDnwb4ZPZ12+aQl7aPKjIFs+my/L95TZte2QBszqFixx0AS/LfGdhUpusYt5cd50Ren7dFuBsu\nUAuX3fG4kiLJlouqYVC+eSVp3k+RqBtANvwfdQS/DHrjMSeen+pAj5E02d6xv6xtV/Lh1MHtMeu2\ntzmLVm6dx3si8j7hxfIkgFPVX8q/vyMifw74UeDbIvINVf1lEfkG8J2jBjsG3GQbhPJFLmBW/g8O\n9bIJYDIBqezFaRCSF9RPQMxBCjJsp24ELdvP/lfHBrtluCYybnPnYygGVBnMhmVg/6viIhMwK+sy\nwBUAjOOUVFIGt6hITLa+T+NDsQ18MCY+3LhckoKbgOD0dwHC6TR2CnbPRAg+i1bu2uc9em7w/om8\nT/SaHw1wInIJOFW9zn//K8B/CvwM8DuBP5h///kjB9y/XGTTQ5sCms+/gxuByDtwQqrcAFKpytNL\nL2hgALY0/J33dfnHl79tfzsXhr/LdsDE25tstwfgVEBUJoCV18XJTwY310/WJXAxe3lxApAZ9Fxn\nIOf6DHZ9yvvuAL48FZ7GAbV4faoDeA0enky8wunLZhfYFTtz3Z7PTgXcXlq08hlCAk/x4L4O/Ll8\nEwbgT6jq/yQifx34MyLyu4BfAH78/qF20DKmoOadeWMhg1QGtBQMyDQIqXhtZMAKQqyFVGUQC2Ud\npNr+T9UUyOyw5X8cJK+o100AE2zZ5P9y+uq2vpDt77asVjHPbQpwSYbQmIEYuJjjeH3+vxdcZ8An\nfZ7V9+VHbXkC1xng2VgZ+DrFdQmJCdfZgTSlfA4Z+LZifBrTAHiDh7crxhd3gN1DS+627KMDtzOR\n93HHf6I9GuBU9R8A//yO5b8K/NiDByyk2mGa6UfPLP+kDHCp9hnUMoBVQqwM0ABShYFbncHMg+ZP\nmgJo0AxeDACmIT/UXg2YXH6gy9/ZJP8v5YcRt7yYt6Mpx7gm4UdVGcNxKsN9k2IG7VQAAQOMJMPA\nEg38pBckCi6Ca/O6DHq+EQO6TnGtDN6f6xXfZQCceHkArk1In3B9slheSubdlYcouAEAVRX6iBRt\nvemUtyRzph7eNuAdeDDPopXb2++Z8r90QuEDIPLeZ2e5pLOd7WwfrZ1GqZYTdF4PHhtgHlvt848z\nby17aLF2pAD9TIgziHObbqbKhksVpFptalplCsWGh6YQFPGK+ITzOnhb4hTvcwyqLCv5B5dQFZwo\n3iVEdINR4J3iREkqaA7ClTFUZdhHVYZtYpJhfZ+cLYuOGMdAXkqOFIUUBWLeJ+8nUaCXPG3NU9jW\nfgP4VvBN8ezAdzquy1NZ6RXfZk+uS2MCoy8eXaaKeD96ZDHZ1LVMY7VkULZidLv4dXu8urMqL/u9\ntK9SpcK909Pj75PTADgR0uVsADPIIFY74symngXMAOJMSDOIFaSZGphV9gOglYJXqBLidQPAnE+4\nDGLOJbzoAFxgIOVdwrvdX5ov44hS+YiXtAFmLv8ApK1MQ0yOhAEZjKAG0CdH/ngD+JVzjklIydEn\nR997UpJhapuioAUQewM+iYL0+Zy6PKVtCtDZbwDfGOiFRnGdM8Br705hbVqb8hR2QhlJ+XcfkT6n\nfDcexC2A2H5IC1dwx3U+Ca7bWZX3fntpcHO7uFf77SQAToOj+XxGXDhiBrjBOxt+QxoAzryyuEgG\nZnXCVRHvRxDzGcgKiIWJV+ZdonJpAKmZ74dzSSrUzh5WJ7ZNnyzFWvuekLMCCfPCgiR6dRv7bwMb\nQJ/TtB7wkggZQAvAOVFiGsdpk9/YpoBhnxxJhT4DTUyOmBx970jREaNDe0fqChAZQEoGP9cb2MEI\ndq4Vwhr8WvEN+NauY2gSrrHkhO+yd1eys8nZg9fnuGnw5tVFu3ZDrI40cvqGLPgW3SR7ew+qXHjf\nXLdD9qGB28bxTpjrdoxHvWUnAXApCMvvr4hzAzSAfo79P1cDt3lC63xBg+LqSKgiVRUNyDJwAQRv\nHpgTpXKJeegGr8qJEiThJiDjJsmAPrm9Xtg6VsP/fXJEdRseHECvBjjeJfPY8rqoMiyf3sLlyNvv\nJYFh3CloepcI+fcwhgpddKTk6KIfwA5sehtbZ0kMhRiFuCjZWcF1gnSCb8GvxaaxGQDDygAwrBNh\nZZ6da/M1iwnpFely4iHaNFb67J0OU1jBtBh2AEl5eHO1hGwlKPYC3leF6/ZSdupct2OFbHfYaQBc\nBctfK8Q59Jd5WjZX0jyZd1YbkNW1eVrBJYJPw5SweGSVN+9h5ntq1w9ANfWyiseU1LGOfgCOLpqH\n1SaPqtBEn70joe3tMvW9H2JjSWXMhk6+2wIkqOSqqEl2dOrZlVhgmd66/L8wTKllAmIi4FxeLroR\nFxTylDlEgk9DLLJ8rjjPU9voSEnQTD+ZTm1j63AXal7dOl+TNgPfyhPWmn/s+vkm4TrFNzIBukkW\nvE+WfXUJvLMp7ABoDJSYwZvbURJ2sILhVOkgj7UPiQ7y0uD2vioZnstSBbc/mNBZQuYGUr6OVMFA\nbRYis9APHpeIUrs4ANzcd4TssQHUbpxytinQq+NtOwdg3Vc00bNsavOwktB3nljoGr1De4HeId04\nrbOV2euJhk+ulw0uG5SKhJHLNi1UK9sNfLopWTjnFIy6AikoWpIm3mgsGia8PF8yG4qrIuLA+UgI\n49S8WPD2QlCyR1c+qzJOa6MQO/P24iJPm5vs0V0IfSO4RghrO65vHL5RqpXDNYpvPK6NFqsDpIvD\n9NWAbgQwm75G7pTEbsuq73vITxXczly3xx3/0NhPDBucaSJnO9vZPlo7CQ9OK8X/mjX1rKMOOUid\nEwO1jyyqjtkkwB9cHKaeYHGvPjnW0Vyem3ZGGz1t72m6QNd5+tY+qq480jlcY7En14HvhHogzoJv\n7beLalUE5UUbbVkh1CIM9aLARv0oykZgTdKoQKIB+1u3PLhcxK+5HrasSxXEmTPProJYj8TlWCup\nrtCg9LXSlQxy9vAkJEJl8UqXM8RT7654dDHm0raZkHIcLXXOKCiNMyJxY54cYMmItdCtzaurVg6/\ndkOMzjfeSMRdhL7E43JsrY/gxDy5ko2dJiG2p6yq909Xj33Tn1V5zT4gVd6n2EkAnPOJ16+WeKfU\nOY4WXOKiarmqGua+G+gZAE0MJIQv2wVvmzmrLrBcz2jX9nHSMiBrb0HytVA1sFjnY7WWLXRxUt4U\n0wbzX+L446JuSB9J0jwt3c9XKsX828tLyE2DG8u79ujJJS/DgzfUw4rRZ9QxVG0YjQbi3A3Z5jjX\nIQ63AXyzSJhFXM4ohxBzqa9SVQnVZBSUUjEys3hdmntSL0jrcKsSgzOA843Qr4V+pYS14FuLZYZV\nwjcJ3zgkc+zoinqAIN4Z0BUuXUp3KSUlPrc1XRWRh2Vcd9lZlXe0U+S6PcP0FE4F4ERZVD1XdcNV\nZSSteaZuVC7SJc+X7ZxVb0/tl6sF18sZzW2N3ATCrSPcChcrGy8sjeMV1pb5860RWSHHxfq0ocxB\nkSGCwesaaQ6jDTmBbS+gANEAhBjQCXduZFEg6lA3WwreB4kkbF/fy2aMJgfmK8egWAIYEAJx7kfO\n4ELo5zkDWxln0LLSnrhQ+pkdqA2K1AlXJXyIOYkBPuRMqUISh3hFo6D1SOOJveBXzkBuLfQXBnY+\nx+jC2lMtHWHlMtDFgZ4iXbSsa4nPZXrJcM23wW47ATHx6A4C3Vm0ctM+FNHKZwI3OBGAq3zk1129\n4TK0dJlzto6BN+2C7y0X3CzntNc1/o2dbnXtqG7h4loJK6hWmatVKAyZne9iLiaPIzt/EJostoN8\nuk1jK8KZwxQyezgFDEshf/liBlEAYTPKqZsJiQ2vZKIQMixLE/ArlkB0/DwuP+h+HSkSUOblFeBz\nxFqIM6G7gLhw9DmJYB6fkuZKO09GjK4SrgCcS4hTXAJygiNl709ViDNLSriVTWH9SvAZWGMD/QLC\n0lOtMtAN3l+evrb9mIhwYhlXsIyrZK+tXJM9CYgB6PY9p9sPy5nrlo95wkTe+7Z5wOU6CYDzkmhj\n4J/cvuaL5QKA5e0Mvjuj/tIx/wJe3Sj1jX2yatlnAMsKGWXaGAsJVfcDWXloigacdxseUdGaU2/L\nUuUGL4ksn5QqGaSUUs56wujBbWdGp8uKJpyLDDpvkBVBouZ1+XeuKJDI4GWO8UAdfpu+nA0sSZAu\nDQ9RWMkwxR28vMw17BZCnJv31S8ccaHEmRLndh1jpUiVkJCJ0z4xIraiIdn1qRza2NiFQ+fXgptn\n6s8KqqUnZGANy4RfR7yXXB6WM67Fi/ZuLOyXZATijRvm7tR16tHtncJ+lUUrn8p1e67j77MncN0O\n2UkA3LKr+T9/7ofwXwZm37WL/voNzN4kZm97XFs4V5NyIJ14NoOnw2T9FpCNxLGs6FsEL50BVgYx\nzdpwKZhCSQomt2TrGLXivAw0jzuANrlv7hQ1TBMPKmxIJGVqicUB2dCDK6ogJS7o2hHgXK+jKGY/\nAfl8Lcop+C6htzIA9qxypJnQLRzdhRGA+4UQ55kzeGGAp7XS1QmZxbHHRL6kzis6j2iVSAtHarN3\nuDSvLs0y0C3A5xBCNbf4aLV0+HXCNQ7XRgNm7PqJyzG6GA28yhR10KiT/dy5fZy5Yx+es2glp8p1\ne2js9SQAjlvP1/5aRf1WCetM5r2N9uD2aXx449bDu9EMZgIm+eY3T8wZkGXBy0Hosh7By8AsjxMY\nNeQqTF5pKobpMd03p4Pa79A20OmoEZe3H0E3n9r0+UiyuV5HoHO9jBnZ/L9vycX0is9ZX9dNgG8i\niVQ8PMsEZ1DIy0PeV9cRboQqx9bi3NEvhC5XOvQXQn8puaLE0V84Up1PONf5UsjHXnEhopWddAoe\nXXtSLbjarrcr09c5hKXVGYc8fQ0rh8svMNc5tE+IF6QfkxHASBiO2UudenMT2+nFHct/+yoTeV9a\ntPIdghuceXBnO9vZPmI7CQ8uLJXX/7AdPDZgkNoebPIWmXa00tIwZtJXweJCRc13nGqCcchMWikL\nYlaFW2ZvB60wMcztyoFi2WshZxzJ3ovtDNOyre33nqaJ6GXKpVxDkC7/FCkkxVR+YZRDiubVTeWQ\nxoJ5kz/yDUPm2NbnKWw/Ebss11hBUVwTcU0krBzVrWNWZQ+ueHQXFqfrLj3xwsaNM0eaK1qZ4IH4\nZNckXys3j2idiI0ntY60GuNzmqWv4kwIcyUsbLoaloVi0hu1pBNwaci22nfvRuWSMl2Fu0mIyfV/\nMqWk2LlS4XHHPzT2Ed7bU76/kwA4SUpY9kNyADYpGUPMbDbhhZWmMQXI/Kjoa1lDbBpaAKxMQatC\njrWaTa2SacVlvTgJOXPorBa0UCfA+GLTetCdnwUGTTi3Y5uUlX1LIf1QUJ9MIilNgG+6LkWxOtco\nVkZW5JB6MVJyb0Rc12UNuML7y6AX1pku00zkzPs0JDSMlGtqIWXG7Vee6kaY1Y7u0tEtbMoK0F1a\nXK2fO+JFQmtnL4OcgS2Kx8wiVIlYOdJ6lMLyLTk2J/QroZ8p9YTbF24jvnG4pke8WBKCPPXM94PE\nBLtkmra6gO0s4j/04D5HoPtUuG4fOJH3qS+nkwC40mkKJpQMciA/N45J0zhanb2yLE0e60x4tXJT\nUp1BrDIvLNUWKAcgJON+hUS1JasEbNS7lhfMZi/oIlh59wU01XALMm5b9ptus2s5TF66E4ADUyMp\ny7rOT9RCDBRj56Az4JPWVEKAQc7c+GoZ6DL4+VYJ65S9PDdSa0r8rkvW5WsdCUtHXVscDqC7cHSX\n0F8K/YWnX6hp882KYKkOgOdCQi4SKRh0xlrRtcPlOGcoL6o6f0UzT1UL1TIRlmIeZqm+kDgShbve\nvLQpb26MUnPfAAAgAElEQVQqpe62HupDgDdsc0Rs7j4gOCWuG3ywXLfn8LxPA+AEUuWxRi/5wa1G\nTpeB2Kbg5VQjLs6yRzZMMxUqUyHxVaTySl3l5IWPGDskDcKSNtPZfNua/tpdbbfifY0e2OY+MILW\n9AVeXuhT7694e2WfAqpF+XeqDzcRCCFW4/GLJJOq0PeO2HtSNAEBAHpBOoc0kj270bsrJN2QgS+s\nE9L7oSnNtKpjmMZmL6y6MUHS9pWjuxS6yzyNfZWnsAslzYxWklRGjw5gHkleLfHQOFIFfiKVZd+t\nfe9V7QhLR1gWjT4Gj05KZ7UpSbgfqyXu2BTwhkz7pkzTkBG/D+j2AcJZtPJkwA2e1jbwnwH+9GTR\nrwf+I+BT4N8BfiUv//2q+rOHxlIR+stg8bJyk+cYTb8wSfJ+zih4OVfz0OYJKkVmER/SUMdaVdHU\nRZx5acAdoOqio89ySSnJRGFjAlzFayrnmWNjmtv+FUmkg9SmDTmR/GvSsGaY6mYJpKnacBnWTUDR\nFId18hzF4ZwrL6S6H1RSyufpOwO9rvN0rUOaTLhdjTpwYSmElamHFBALjeaMbZm+6kDlCL3COuJa\nT3XrzKO7sjHAprD9lVjZWJ2sqY8rYQBF6oh6Z6opwTLdQ7Z62kxoplS1vewAwm326CRaPM4LEj3a\nZU6Nd1sy6ls2jduVLGwxVausSOn+h/CQN3du0LzfXhDc4Gldtf4+8CMAIuKBXwL+HPBvAX9EVf/Q\n0WN5Yf25p58bDwugvxjrKo1tPwpeyjziqsS8itRVj3cGbtO4WB89MQmrtqLPYpCQY11ZAjzFDFjR\njYz5WN7mBcQmwf5kyzda/k29tAJgyiY/Li+UlCshJFNMStcu8jiSExoliVEAwWUahkuEkIZY4DC0\n6IaUOm70SqZT4lhFdO4GQOwbT986pJ2WXTGAVFhZpUhYOwO5Zlrylikn64hvEuFWqG/8UCXRXgnt\nK6G7EvpXQr+whASA9liyJiRknlCB6GWoABkoOpWMtba13ap1bYkQvxRcG+16SQKp8mXWkT+3C+hc\n5vHse8ics6+keHMPKeXStBvcXtJ7O2Wu2zHbPbM9F03kx4D/T1V/4ZnGO9vZzna2J9tzxeB+O/An\nJ///HhH5N4G/Afw+Vf1iewcR+RbwLYDq6jNufsCZ1zYvNIRR0VfqRKh7qkwiNQHHOHSxKnbbWJS6\n772JWPYOzWKOJQtHrh6Q3HtUdCTXlvVl2U5yLuPyQguZemrFuxsqFoaX4vTtKGPVw8SDK1JJRdl3\naEbtQL0SndJ5y/hKyVZ68+acHz277QxvWe4c6OStnap+ELzsG0/fOdzaDYq+fiXmwd2WWJ3JIsGo\n6Ct9rgPOFRZ+8P6s2L69FbqleXP9RQ4/LNS8cXU2HQkJcgc0AK2cZcDrMRY79LXNFKDa2XRYuoT4\nSa1xb16UOEHpd4omMM2iTj2KSbULyUIEd2pcj/Hotqd9u6orntM+IlXe55yeAshTBxSRGvjHwG9U\n1W+LyNeB72KP938GfENV/+1DY8x+8If0Gz/5e9GZARqAn0V8iIQw9lwY+iq4NHSZitHRdwZomsuE\n6JwBWByrAYZnPslYFpUwMHJb00vZAVTZ1DNWKsjmvmX/Mm6pSLDjsqEdd4ckNwHEYXpbMqwZCDXk\nU8mAB7nSokoMKr9h7CRm389IdynT2hKfK2bTdmc0vM6hWfKITvC55MovhWoJ4TZniVe5V8Pa+jT4\npnDr8jFyCVxc5PjcpdB8kuNzr6C/TKSZWkLIZdDO5zcoKrdZi24NYZmBc228yfpaqa8TfmVT5MKf\nLN2/pIu2bLvj1/b9vv3/NBsb48b6o7OtsDu29S5A7rFct+eKvb0Hrttfu/kZ3sTvHoXIz+HB/WvA\n31TVbwOU3wAi8keB//HeESpFvtYQQhy8NAu455hN7iJVEgF970mtRzuHtG4gwg79D6a9DiaeWPlf\n4hhX09zhPg19UzO4OB1AZDsGt4GC3mJqwLQO3WJ3nRta9JV+pdJveYxTsCyWQTSVoLsfhy3nT5u9\nLAH1PpeMKRo8WilxImmON4/PBfNKZKDCjN5dIvd2qJVUYmVRiDNP7AVpHN3SEbJ3F5ZQvXUGemtH\nWCUDumlTmiS5H6tmifMcn1sJ7WtHf6X0lwZymqx4f7iOVUKliBmMCYhtAnd9K4TbRFhnEGsF6cq1\nlZE3B0hJRBwqfJ/G3Q6Jbt6Xbd3nyU2P/xQ7c92OsucAuJ9gMj0VkW+o6i/nf38b8LfvG0B84uKy\n2eCHpSS0bTAwy16FZA/NukDJiDXOQKokIay5MwPIuN72gexVxSJ7pFbZcJHQWQbW2hRwnbeAvqoB\nKmDZyM5ZIqIkI4QN9VwXFOdNSDL2nn5t+8rSE24dfiVIOwLcpqKvcfjSTC37mHXbKBnI4pW2bqgK\ncE2uW01gSiiaa3Dz+EN/ByVmQrNkABtIzVtT2pFzJojDSMaV6cjFrPbbrS2BUN0I4dZ+VytHWGYP\nvE0Dr05WxcvLrRMbR1gK3SuhfW3UkniR0PKGyEkWKkVdygBX+HU5AVFqiitHHSZZ1mU0orKTXI9s\nNa2UZX3cPW2Fu8BTCvrthtzYbqh1PRQ8305APJeduW5H25MATkQugd8C/O7J4v9cRH4Eczh+fmvd\n7nEgUzU8MYNJ7By69kjjcI0zrytvr2Dk3fzgls5bYcqiF80EWE9cBtwqA81KSGrZyjhT4lXCXXUs\nFlaBPqt6+/Fx4KK1uePWqq1YNxVdE9DOmceYm74A+Coym/XMq57gI330rBaW3VtWM3qtkN7h2umc\nNn8mb/HH/kJJVxF/2bOYmSti3bLi0Baw6zxdVi/ub4PFzXIVA2qlW5sKJzLxVBn4gqm2Ko5UpZHC\nIWO5lYiCKC6YF5iCDA96nAnx0tFduRHgbg3sAKqlo1qmoeuW6xV3ax6U6z2+dYRCQG5Gjw4wYHdi\nsblcGlceg+izXFX27ApHsgh81jOhfiv4VUQkmdM0XIvi0WUl4e2Hb4jBwSCjXmzqzU0aVh/9gE5p\nJU/hyH1EopXvGtzgiQCnqrfA17aW/Y6HjpOSsLqdoc3ES2uE0MoQB9MMSIAlH2ZGFSnT2lnVb/Rz\nAKOKNL1n5WdDjkGix2EeXFoo7rLj6mrN1dzmkhe5/0PxavrkBhFO76wzVYzOKgfKLDAD3GLecTFr\nmecOYEmFOvTjZ2w8ae3QJt+XOgJRqrPX9knP/HXDq4s1F5UB3Dx0g8Kxk5RbHtpXt+xq3qzmrFY1\nbevtGjbjublc1SApa7Q50Ez0Ve/NG6p1KFnTkOtKyc9NuW5iL44BCL3F/lLlaOeO/srRLYVwkwHu\nRqivc41pnr6WEjHXJao+4Tqfa2ktsdEVDt3rTCsBiykGRbPLa9p9jkgOJ4QR7CDH/rxQV1bu5drJ\njeacceeCQ7qI9hMqCdwfn9thBwU3D3laG3SOR0xdHwJYX1Fwg1OpZIgO+W6Nz3WVYHEmu4FzJ/tJ\nS8FQWwLC53Z4wUfmVT9wwbyzps59csyCdYh/Ww6V7JWuDjQkqjqyqLsBTAq4OZTgIkHGmtKkwtoH\n60IPNgUOaYgbXszaASALwBWb1x3rWU2qPSnY51Rh6J0Qa4ivIvPXDV97dctl1TLL4Fi7nrnvWfiO\nmetxW2ndVaxZxYo2eW66GU0M3LaWUV62FU1T0S4rZBmsoqFkSVvB9YquBa08qcplbbkkTosckldU\nchJgkvUVpxYrc0bmbeeTUq5XVrdav4Hq1pt3txo5dK5Xwioi0UIIJgeVp92d4K6E/griAjTpUCtM\nUDRnhmJOtqSJmIJlXa0ypg5CdZ35cvm4Iljgsnhz9GMSAgxkpppzU9uert6XHb2P+/XQKexXVLTy\nKXaWSzrb2c720dpJeHASbUojpSs8DN5EvEzIRU8166nrcQoqE2+t8pud7b1YE+jKRercYrB0tL/p\nHZFgahylg3z22GBS0iXj39MgfEq5KqIzNUsRK60CmPnIzPfMfUdSB2JNc+w8zQMyrpva1ErGeFi8\nTPirnk8uV4P35ibpXyeJykVmrnhxOnzW12FNp7nsTIWIY5XlU1axYh0rvmgueLOe8/Z2TnNjNW/9\nrc80EAsJuBZ0NSoYmzeX45xeB6FP+w4sOVTinZbYUFKhr8yEVFv/h+oa6reO+jonAho1Jd9ML7FS\nMI/LCQyXy8e6xtl09TJlrw3z5Ko8ZXWSkw1jprlIaKVg61IlVDe59GzVQ8cGFUQIw1WWrh/XPTBO\ndkdg86HNbKQE/g4cb/CQHkEF+Yi5bofsJACOxCD/o6EEwCFeJcLrlouLhkXdbcTW2n6sISwPdrEp\nIIkos9BzObNgTLpyrJySckYvJbHazTz38nn/EfDGJMO6D7S9J648EsfmNNtTYzuHhANaxvMUyZSS\n0rzG29QUQGeJ+cLid5WPJBXaHPtzyeNEmblI7RwRh0WhyvHLlGm8pBc5+BQr4wt+Vi+5ns/53uKC\nX1lcAvB2fkFf1ei1Nxml3hIVRWtOG8mKLBOwywCmkjuD+XHqKl6H9LAGIbqcEV44UwfORN/qWqlv\nLSlRqCVhGQeKSVg7QuNsCt0JrnWDTFO8SFAl+6zTF0aRnHfOesrm3rPqxwY8KtbPgzZ/286hASSj\no6rekcHf+PJso53acxuZVbsBNvc/5uHflYg4BHTHjv0VBTc4FYDDAu6lBhEgLpIlAC7XfLJYM8tB\ndmBoH9j0ni56+mT1lb6ACzr8Xf6fZe+OxZo69NyuZnSNffwujwPmcTl0COa3ybPOx1s2NetljTQe\ntxbihcWnqpzcCJLyviVB4TcURrTw6BQj65YsMOAWPbMcvyuJjabPzaox1RAjO2fxgPwAVBLv0Dy8\nJKqJhxedI7jEzPUkZPg867aiD4FUO6uTdZp5dtlz7S0ept4SEybzngEuMACe0XI014VmD89ZtYIG\n6Csl1Y5Y6owXQrwW+pnx2HQpOQlhn62KplnnW4fr3JAoAeiixeYYhEjtdwnyR5cywGX+20aXNA9i\njcPtvmCk+5CzoiI5JrcjdpWpOBu2JRkjWz1cbfEDH+oph+6YKog7ul3p7vrH2rFxt3vspYGt2GkA\nXM6SIjqW68wS80XH5axl5s2rKdPMykeiCk3vaZpAip61S/SZCHw5a1lUHV6SySEx6rTNQ8889Czq\njuv1jK4zr+zt2qZtSYWLqjU1kixc2eXjNl0g9c7AuLLAd133Q7PqAqrFm0wTlypmQUuJ5fNm2kbu\nYFXXkcvspaoKTR+4zucUk2MVKno16aFUrZnljjSdi3jylHwCdsWrc7nRg8cA28AzJwLaYLy6/Hmk\n3A2FbxttuihpFNMswXwNObhfjerHOIWBf6cDERqXSM7kkcDKreLCPLr4xlFXQrUS/Grk0Pl1Glop\nus4NPSikFySZ/pzOklFJJtne4ll2wby3Qgy2c8Z6RXgh5E5qro0DOmrFQAyWSR+IwXYV3+9KROwA\nuScJbG7rbu2yaYD/WN2657ITyZjustMBuMzTGtLtQY0C4iM+Z0R1EhOrXGIWIq0LdCtHVM/bQkK9\n9OiFgRlwZ/o68z2XlU0H367nrNqKVa5j1QxoM2/HbvrAstS4dh5U7MFySlj0zOtuiP3tVPDdrsma\nJs6EweOpqjiUoyWMntK09vW064qV1LR9oIuedh6GBtklszpzHVFs6upQYr6QEejUs0w132sv+M7y\nFd99c2XX6bZCGmfnUeWXS+GekTPOvSBrT1jKyLUD6LKScAY99TpwE8v3Z4Td/BkrNRVQIPrs0c2s\n8Ux/IdRvhNnbDES3MvS5FSU31sn0oVyFIj0G+Asbu5CXNZk3o+LoJaHObXZMc5C8Z5bvtSCCuJJl\nnWRXRUyRZOinW1D/wHSsAFFphBNLzDgff19ZWBl3unwfSD2krvWpQPcB0UH22UkA3NCZShlrRkU3\nBCGLRwXmlaQgzENH8JE3Tlkva1Imvy5zWderizW1N2244l2VJEBwidr1VC7yxs+5yd5S0wWaLhBC\nxIsSVVivbUoXe2eB9UqpZj2z2jzB4DZvoJTBZRNYGQUfNYPbHWdACS7R9MGSBTnOqCuPRuF2HYjR\npqp1nrJfhsboLC5R5WLXKaiuU8V1P+e7zSX/6PpTfvXLK7q3Btiy9ua91QnmiWrRWRywGsMBMQmr\npmZ9W9O/rQjXOWC/zITiVgZlXokyxlADVllSvDphjBEGq1DoK9OCK71aY1YDrmfGn6tuI9IrIcaR\nQ9fbtNU3QtsLrUJapCE2WJIgODV5+lwJAQZwyZdOap5ZMImmQk6WPuHyNZT8pWmmkMg+rty2Tb2t\n7enrNtBNva60tWwY44HF+7tA5zFA9xGAG5xpImc729k+YjsJDw6nJG9B7cGrmXSgCi4RJE4ylDme\n5oW577mqW74Trrh+uwDM41mnGTEKVxcNF3U3JClqN44TXOIiNwktSYg3qzmrpmK1nJlIZJtLsrJJ\nSFQzK6Oa192QQYWczRXZpHfkv4M3YnLv1KZB5WVapMejTcHb6EnIhoovKri1Q3ulmVVcz2s+n4/v\npkosDlfOodOKJnM9vtde8J3VK/7x29fcvFnA22rsbuVAZ4pc9Vy+smTOp/MVYUIkTgirvuL6YsYX\n8wva2jxd9WHw4ogMHb+KrJF4SNGNNBPRYeo7KJ84JQqk4HKCKV+LWkhvTHAyLC35EFb2/UnyuQm2\neXLSO9pPjWZjFyNPi53mqaoMfSJsACzxEDYzrAB+bfWATsTmwr1slAdKNKWSDbcg7fHsptnWlAav\na6dXs02Q3faODhXuT4+3Pd50zGPtBFV5n2KnAXDe6ASiMip3RBlUeK2qIG3EuEq20oWOebCEQsgg\n9aVckpaB7nrGrZh+3KIaA/9tDAQ3xrzmfkwUgIHEbaaD0E7u5qC4RRpKw6o89S1T0dJDITjLpgZJ\nQ9C9cmkMumcTOyHbNwrrPmxkf0OukOjqZBJCUdDO0XQV15112LkIBrKhTE/VcRtr3rS2/jvLV3zn\nyyu6tzP8tUd6GaketeI+bfnk9S2/5vKWT2cratdvgHZUYe4rZt6Uk7+Xp4BrP0NdZQmI3JDaqk9y\nnNRD6sjNZKzpz3DTT1ovmmqIEoNDJwCnQUjeCunDWuA2f+9dorrW3EnMqjEkOtqcDe1fYRqCYWzC\nXcq8orMeDqavl9tMSr4BgcrlMKHIkLcYIib5bwEG2aXpQ7xN65gC0KQ5tUymlwcbU++Kyb3rpMEH\nWKlwn50EwInkt/x6+qXKoFvmJN0J4E/jTA7ldb0e/lcVvkyX6CrQNYFuvsmZK9nRYoU2AHBVN8ZB\n68JICC4hjMrqXi9nI1/NoRvnshl3S0PWxIkSQqLL2VNr96cD/69vLfZX5+RG7SNXi2YYa+1rdGVZ\nz9Wy5rveuGx9ctzUs4E+ctvNeNPM+fLGvNnmZobcmPgkmqk4i5xp/azhs09u+bVX13xaL7kKLUHi\nEEMsNnORIAbkBcRjFLrWERurJ5XeAK7Q89QZxSRFa0KfogykZkrHrQJ0OSYXC1/NGUk6BRO9rG78\n4GkVvlxYWgJAooPkhjKvpvN0rxx6Ecfxy1eSicjqHYglH1IYic2x8sy81bDafcUGUEnS0ZOzG407\ndqgfw9Sj2xWTm9o2iDgxgu8+kNuXad2I6U3errtoJYc8vxPmuh2y0wE4n/sUbLz4EkHSnbpO2Kou\nyFO6kll0r5Quem7iRfZ4wsBzW4RuJ2AWq3OVgBTNt1xUDhBmBjqXdbtRkD/QQrZ6oU6BwjsTBrBa\nypwNVMHnBi/9yrNeVOYZ+shF1bIIlrKsfeRLpyxV0LUn3VR8mUUpb27nfLu26VuMjr71pNsKl5V3\nqybXvTpLBMRXkfozexn8mk9u+LWXb3lVNSx8Z97bRJkz5o5YlbMKjYvQsgyWoKiqSFepAaYHn3Xu\nxhADEMA1Nn1NnXl0QG4qk4nDYQp0eddFossk3BGAisRJvp5NyrWsmj25UgVhnDlLPkSoJskdMj1H\nwKIh2ZsrwJolmGYe9MZKVu/E/LvePqJaEmzIiu0jCG/snAfb7tvKAW9uO9N6XxXDtlrJY8Fm+/jv\nuEHzu7KTADggv8Un/1cGbIXVf8eD2wF4dfHEQssnizXNuqJbVbRNxXWmesxDZ0CzYwyANoWRkpJF\nLyXrzF1cNFzNmg3ScTn29Ddk0J2Qfod1OTssCnQGAADSOro20M0dQdKgHlKsqQPruiIuPX7lkDc5\nw+oqimCGRAgxg2eRTymyUFmp5PLTFV9/fQ3A1xfXvMqcumoCapvX2RFViCr0yW9eszJnkxx7iwwA\nJwrS5RdWzHSS7GWlmTXgSTmkpQFzLsK4swK92DS1AA9YvG7mABF8EwnraFJMuU2ib53p4/WO9jOr\nhhmaFZUm3rVlXaMHXU2081wR0vTMvcmiD9JIPhkuOZCmh5RQ70YaSZ6CjtdmR0wONsFnAnSysekO\nD6xUIxyqYjjU4esQgB7Ksr5A9/kH2TY38R47CYBzTnHzHq38KB3ulVllXlLx4KaeEmwBygQEnajx\n3BYtfROIy8C1sylbHWJOWuy/UAo2PRYgJMI8K41UffYAR89tIy64FSMEBnLyqqvougCtG/hjkhhu\ntNhYQqMfkgzj+VnXsH5QMXGNDBLeEo2QO+3/oG4sAevnSnyVcJ81fHa14vuvbvi+uQW0LkOzUde6\nbSmTpI0g7OnVsc7VFU1TWb/VovqSvbcNWfYyO+wzDhY9y+zRxTkWitBcW1r2dQphVPTtZCJ46cfk\nQHULPksxFWx0vSIpV0D0jrYXuk/y6cySiXyKJXp0AVFMQgkyGLuiN2fAWpep8aonESwBAUgXoY/D\nS7kIju4kA8P9U9cJOfhOXSvsJwpPAWg7TndM6df2/sWm49xTGnaKnluxkwA4Lwnnlb5OY5lQbQRf\niwnJFsAVnpntv02mTSr06qh8RHxCV57ujWX/vvApT2dbfE5cTB/wPjlicuMLNyg+B+ULcbjE8PYB\nQ4nL9clx09px365nrN/OCDfeentOVH3Bus/HxtP1VnrWqx9A0mWvz4dEV0CiK/sxqY5gKHeLFxlg\nP4nUn6/5vk9u+NpiyetqzcJ3+bprvpaJNI3PbFmfS9beNnOul5a8iLcVfmkAN/D6pkRt8jkJWUWZ\nIaFSzll6wc2MDxuTG8rWxvichQcSjq70t61GwcvkPbVYYxyXezLIWqlVc5mZy/FB+2zdpwZqBKtl\nLeVkY5Lc2TFFJrGS8bqE1NvDnF0+A29DZdNWyEC1TXC8L1a37d3dF5+DA2TjeySYdgHdzgY8W1Ph\nfYCXdDcgvwt7oPcGZx7c2c52to/YTsKDUwRx1qSkxEtCiMxDT1JHUs0eXPbcGKeq+6arAJVP+CqS\nXEByb4TmZsabkJALhnrV6b6lxEknwdzSo+CuJ7l72tCrY9VX3LY1b2/nw3Hd22A9GcoLO008sZXg\nVo62qVh1FYuwWSERSpJiFkn1pPzIsdHfIdXQXyrdp+ZZ1J+N3tvVMCXNtAkVYnb9hsbRE++jU0eb\nArd9za+uL/lyNae5zVUQS2/KI71N5VNtnlfJSGoYS+8kq8WUelK/Ms6c682zk8482jgbY3SpTqMX\n5xWdF+9OMGVeq0ooMcCwzN9VbxJMopb1dNENMkySHO3noBcWU0Os/lbL9+usskLF5dKTQiMBFbt/\nwip7Bcl+Nku5yjxZuCN5fshK74cdQpobjW4Ocd12ZUAPUUt2eZAPyJTeHW7zWXhWj27qubmH+WQn\nAXCF0qvlpoahvwKYKsedmk7GqeL0/2JBEouq49NXK94IdKvc+VyUvnc0fTC+mtd8L47jxORyFhUk\npEHvzYnVeGocu8WDARpkkm30uX61olnXRhQG6IS0SDTzRJubxli/0XwNovUfba8rbhazLArQMbXg\nEn4WiReershLuVwfKhBn0F8o/ec9809t4K+9uuXTufHbwECri+V83TDVLioqML5AmhhY9jXfW1/w\nxXLB7fUcbuyW8WvrZJbqnEktfWxz8x5fJ6pqlGtP0dMubV9ZO2vAszTQ853F5Ur8LiqQnHHnhsQD\n+R5RehIqbpAvtxX2q7q1Xq2+yRnWToeXiJgEMG0CXYjJocuYZdXakhvRGVWlqJHYdcbKu94IlUhu\nbMOY3O2iyS3hMmCkYUo+2CMk0W1w2Zi2bg6xNcauMq37+HPT0q99XLd94+yhltw7xT7WngBucATA\nicgfA/4N4Duq+s/lZZ8Dfxr4YayxzI+X5s4i8h8CvwsLGf/7qvoX7juG6hjzKA2NS3xsF7BNrYDc\ntmeVEIJLfDJfMw89q8tRYqmYJQCCCV5mcIzJjS0K8/29q5g+ZnAo+wA00bNuK5brOu+vVPNcQfGJ\nySH5/DDdrGasrme4L+y8qhuTEq++9NwGS4i8WhhIVS7RZdCt6571laMr92HlBvJuvEz4T1o+u1rx\n2cUKgMuqHagvbfIs+3q4pkXnrsQha2cecwHs267mpplxvZzRXM/Ma1sXbwj6BaR5QhcRf9FzmXtS\ngF0zn7+bPhlVZ1XZZ+0Wga6qrInzMjee6ZiAvSAzSL3c0aED8w7jZcp8OQO54f0kgbCMJr/URFAo\nzcnMqxNc72k+F9IVWVsue0sOO5YkolPzFP34Iil0ElGPZMAZHrnszakk48mJsBGLu7cf6z2VCntM\nRPa3MCwe2THS6NuxucfUtO4AuyfF5waBA3d32ZF2jAf33wL/JfDfTZb9FPCXVfUPishP5f9/UkT+\nWazL/W8Efh3wl0Tkn1bVyAGzwnIHTnGZc1YH8yj6HbyfQXWjKNhmwcrpdNWhA3D5Kg39DZo+0GUa\niOZjozIk8LrkaHuP9m5DKwyyerCLps2WNeeiOtIke5tU8N68vipYvwdg8MhUzft7NWtYXla8fW1T\n2OWvXFJ9z1uG9FcrbnvH+rUBwsVFM0whqxDhoqXPVQ7ptSMla3zzatHy+eWSr81vhyxxcEbcbZNn\nHTKMcOoAACAASURBVCsru2os8bFqq6HXbBXi0LSnyE41XaBtKuJtQFZ+KPEC8xR1EQlXHYuLhlfz\nhquqHV4GkBM26gbxgKm33S6ESMhqH8Bq7Mfh+sydyyThmIAiDBrytNUp6SLRDhnWzfskKPgm4ro0\nFMrXb82Lc62VeDUJ4hXIrNBITByglJvFizRkWEt3MuOJGOCF27HVm8sZVGn60ZMrVvo87Hs4t5fv\n6gdxgNu20cLwPoLuoeTB9DjTloiwe4xDnuHEE3zU9PUZwA2OADhV/V9E5Ie3Fv9W4Dfnv/848FeA\nn8zL/5SqNsA/FJGfA34U+KuHjpHUtNLwOnDU6tAPtZjD+mxTtEzYtDBlrhYYMbZIlRcr069Z6PE5\nUzq1su+6tamlrrxxtWabX872fm4rY+ZdInnzHud1N2ReaxeHz6AqBEm8rhuuavN4vu0T19Ul/guL\n082+bTJQANevAtVlRz3rqUNPPY/IYgRV7xK1jyxCx+t6zaVvh+oML0oTwwAu182ML6/NQ+xua5Ne\nV1iVzOXU8ch8skKeVa/ERQaDq57Lq4ZXizVXtZGSw0SPrk+eJEITjUenWwAn3ho9pxklBTk0w3Ft\nBjm12J3MrHICGJVDJMf85tC6sWIENb0+U6bRXC2Swx6pz9pyHkkeSY4mCv3rrBhyaTQSkgzT33SR\nQV+K559BbtuKN5fKJYzIRoxWx6ngITWQR3o7d/q0Tu0xsbXtc3yKSsnWvvd6ddtA9ghgK/bYGNzX\nJ82d/wnw9fz3DwB/bbLdL+Zl95rLvKcS7xLYCLInxgRAUfEtU58+OVIS5vVIfxA/asFNwbHUtW4/\ncEXz7eZ2TnxbEa5zs2cfuJ2bx+OdeV6lz0IR0xzOMR/LiRJyn4h9VJLp+QB8frGi8pE380u6X62p\nv3RUb3OsrKnoG0983VK9jtRh7AJW+k7UvidI2qiw2LZVX7FqK7plThSsPNKNsa+i6rtRYa5ZELNW\n0iJSvTZAvsxe20XVmiTVRAzBvi+hS542elZtZVp2mYwb+0kDbYx1UYROwbwj1paA8LkSonC4++hI\ns1zbmonYOlfaT/NnsNKEDHA+8+RKcA9cjNR9GtaLCutM++hFkXk06XU1L7HMfdM82dRdYeDkTC9z\n8eRKcoBxtcQE3mVXlMMgdszUdE+h/aM8pUPT110gV+yYnhOaxn22gHebhrpbhMA9CdzgGZIMqqoi\n9zzFO0xEvgV8C6D+/td4n0yqsXg52BRnWmRfLoLPMallW7FeV8Teb3zn86rPYLj5wG1blz25VVvR\nZM23/qYaerOSLMPXvLFp5JfR0SwCF7N2aHLTRT8o5K7birb3OKcbcb19x4cxE+td4tWspfos8WW1\noLmwrCuY9lr1haNLNTdZIt1VBjRzbz1ThxiiCm7iZSS1xMLbbs5tW7Na1QwBPCzmNAgddHbhNwDP\nK7FWdJ6orlquLi1Q9jqDW1F6mYYMyN9dFz3LpmbVVPRdmCRcHNI5hma1YO0hh34cSpyZwKZfmzdX\nLCShiHqkWQYhB5p75raflticeXKVCL5wB3s1leBeqW56RBXXe0s+AGsNdJ8KzKN5cqrj8y2MJWQu\nLxA3Jj+cXTuSYroKOoJD0t2g9sSH9z472COinNfGDruC+DvOcTodnpaPTQHykFd3b1H/87HXHjvS\nt0XkGwD593fy8l8Cfmiy3Q/mZXdMVX9aVb+pqt8Mn1w88jTOdraznW2/PdaD+xngdwJ/MP/+85Pl\nf0JE/jCWZPgNwP9+32BeErNZx3pVDwoiXfREdYPLv00HqVxiXvUI0MdISo6Qp7cxew/4cQo4jZW1\nydNk76LtAn3nh+P6qw6ubPonLuEnUulJheVyxmpVD1JGqjLGh6JHFepZD/UmxWOXisn2ehFlHnq+\n/5Mb2qsVX77OiiBfzAlvAn7piMz4ngrkKVnl41DOBuBUSOIGrlubAjfdjC/WC97cLkitH72Oi0iY\n91S1xTtjdOZprXK8qbdMMHWimvdcXjS8nlvx7FXdWJOdiRjCBldRhSZ3P2vXFboMSGns3MpQsaLe\nPMiUWxPahVdIVrrmbx3VjWxWbmQvDs1eHNaYGrCYnE8UDoc6qPLH8WuTQadTpE9UN7pZWmakO7rP\nJvG4QneIlgRTUaLejcfphpJvHi17aJqSxeO2p6nbjaSfajtiewfpGk+VP5pOPw9la7cpJtv1tYfi\nc9vT1OfmwYnIn8QSCt8nIr8I/McYsP0ZEfldwC8APw6gqn9HRP4M8HcxwYZ/974MKhhgzUKkcSO3\nLKZN6sc03mVdsvo8rWssi6pjc5iYHLdtTR1Ms01Eh+RAl+N2TReGDOLiohlkui+qSY8FLEA/TEG7\nQNsHazWYwSB1Di39AlpnpT/V2L91Wre6D9zKNSifw4myqDrqT2ycm3nL21cL0hcz/I0jdTN+Jbc9\nXH8eeD1vuKobaj92BOvzQ1iypm+Xc9p1QKPgFvZZF5ctn16seD0bpaaWXT004Fk1NTEKIlDX/aB0\nUs536PA1AbfS6rCJ1synbQzc/LU3cUyMHKyS43oz0FnCXRjQArh8H8Te0V1UxIWnepOzvbfGnZNk\noqG9CmmWxmY3oujMYnKm/TZSSHIi1hTUc/Ih3PYM2qFKTnh4OlFkEYesvnpFeyvhS3Pj4Zll6kgC\nF12uSc1rprWlbWexOCdG9Nvgnj0x1nRE3G5vYP/YWNqu4x1KQuzs+5ptW57JyZ2Y3IY9Ycp6TBb1\nJ/as+rE92/8B4A885CScqBWTO7XGLljv05jcENCH0QsrIb+igFEoEVX2Hpo+0EbPugvc5HHKdxGj\ns+J+p1zMWxY50zk9jn0Ou/hV1mcDy86qWuyriZ4+epres17Zo9OtKhDNWoibyY1t267AKOA2PX4R\nv/xkseZqZqrFjSwIbz3uOxYzvP3yE65f91RXLfN5lz06hr6xMTmatSUp6B1u0XN1ZYD2/a9uuAzt\nkKBICBeh5ao2JLrtam7bmu4Od9A4dMkZGDvNbRZzYqGsXzU1/Srgls4qAJoRDFKNEYTnaaCaXGSv\nt3ijfXKsFhXL+Yw2yzQhuZa3B1lmkEtuojWX44rzlLVKRwrJAD4iOJesfjVBWNn3O2fku6kLBp4X\nBro+pKEuVjGRzl6cJTUwovIAkDmLWyYNHgM7xeJ+G9lU+7L33id37D4wvIdOMq7ais0dlHnaATC7\nZJkOJSHugOR+ArJ4t/d8HsqpO4lKBsiKt0wYCtHt5MDBCD5FkqhYATqXvbCbZsa6qejWYWCr13Pr\nhHU1M85W5eLG1HEKSoVbt8Gvy+c5FbtcznMX+cuK25V5P23vmQW3odC76xjFO0yZl1fWbPR1FSWE\nnq+9vuVtFbm9WCDfs2PWbxx6XZOqilWA27oo2W7d3CropXlJn19aXdMidAO4wZhhntMN17N2kduu\nps0vitvWgKb1fqDjABvtDgGu1zOaVWX8uXVuO5iTBaZNp6RFwl11XOSM7CJnhguHUVWoc1e163K9\ndAbiCHna6hoIKsPML9WQxPhyWie6TxhASJJDv4RKFF+chz4N5VZhFVl8D5AAzrKrfUlwXdk5SUio\nOpvNSqIr36kwIQI7o6hksJeYcCkTgF1OOExLuUqpFtzvyT3E0zsAoHengk+crm7bvi5hMALbFOSO\n6K/6GMLwSQBcIc1ac+Schet91iFzGzd8saEedQvkynhFtFLVOFclZvbqYs1l3VK7OIwZt+geMILR\nrmnlEE/LADj0Rc2ZvHVb0UcjuO7SipuOUwCuj56kDGRbcqNnsMxop0LlEt93dcvrxZpfmVvrv/V8\nTvWltfUzVRHJFId8EDEaRn+h9HPZKIEbZM63ruHYySwyCwXcrUZ23dkts2ory6B6k4qvXKKNnmWb\nm2QvZ6TrinDjDODG9qNWL3uhcNVzcTmCW/Gii+T79DspL7ub3tHFCpKjKp5cprMADMomGMileaJ9\nnQdR8+ZEc4zOgXaCxDwzSIpfJeZfRFQ8KubJgbU69Bc2y6BKNl31Nr2278jRqU2dTb7Kb9BIRBkS\nkjkIMyYon9ODe4DdW051sEWi2wSqabb1kCTTsP2Ocbb3gd3xuSOYNMVOAuCAAZAGwcQMPuXBi7rp\nzW0AT3kbT+6opEIfHSLgQ+Qyy38XcJuOuQ1ipaa0jFa8SyaeXj8BoGLeJRZ1ZxSJLrDuwjCd9i7h\nZVMA084xx9IycHineL9JUt6WZqp95GufmKbbzaznZn5Bf+sHkUvpR26bby2o7xuIa0+zroiXbjiH\nIUa4Q8Bg24yMn6eorafJ03AfYk62mIQ5gNwGwo0jLMcEQYnNx4WSLiOzi5areWPk65ysgBF4UTeU\n7JVKDh8S/SwR5w7X5aRDbwpIMMbmYrKEgwbzFAEaT6Z2OObfM8HK/5+9N4+1JMvz+j5nieVub8ms\nrMzKzKqu7q7q7unuWZieHnrMzDBss4ABi0UC2bIMFmOMZP9jyRa2JVvmD2TZlmUZC2ksIUACg4UQ\nNhhhxjDjAQ89UDP0XtNbTVVl15rLW+67SyznHP/xOyci7n1LvqyqxulRHunpvXeXuHHjxv3Fb/ku\nRvdAYN14lBdZ9LEPksnFILDWFgddkPNxAJEOlVeBNhiRgYrlqgp9aa9CwIQgpWr83QWF1JODizO5\nDxA+sb0uBAo/bF22R5fWRfCUh/BdL+zVnbG+e0fsyXqynqwn6//j9VhkcMN43rE/ImxiWD6dlVmc\nBab1QRyq6tYSvMJmPc9yWFoOlxv0wtat7fiY1vg+e3jY+wgKEyfC3gthPzXodSYQg46gv/X6IYjJ\nTgiqy2SG+5iyuMTASFnlKG9wO2vqUsyqtQ4Y4zvWQL3K4CRDL0XBpD3OuV8I7rCwLbNsfebrpf99\nUNSRkdA4002eiVNOXxnakIFTImAZoSBmLarDOno1eCPS6QDt1KMnYr2YRXXlbS8LcT8zrNuMZZNR\nxQzXtRqCGEy7MYBkpx2MJJaJyimcF2pZ5yJWeKp9ohKJJj8OWBMw0VQ6GIWpvaiQVLFU1VEaXmnW\nykqpWjjp6Qa6lCIo1QkLEFTcB9kn5TXKGVTjUa1HeT/I4tjMeNIA4qxM7r2Wpg/xaEil6kOVhB9V\nqeQyKsKnnnMOpGTj/8tnmY9FgANpTjun8REfZbOtntsF+LFt85d1a1lUOXUlby+zruuTbZeI6XnD\n6WDTCrc1lUdnrbNuT0Y1RnuZ0mlOlajp9ZLSRhomyLl99mtd5EmRzGy0lkl0sjNMq5lpqn3L4eEE\nDsWMZnlfAtzrreHKzoLdYt1BTKC/ULS+17VbR1hNGrLo3KO0oQX82qIahVnpThHErFSv4qsiUyGq\nDIdJy2RSia/sYF/bwUWm8YZ1a1lWOct1TrOWzzKsDXgpPZ2KzX1LJ+Hela2pVPQaH7XkfO4Jhafe\nj7pvRsEhG2yEYDS6CSgPpgoURwkmoglGszYZfhd07tCmZzqki5fz8rpN22P9lBf4iMipBxk4RIcu\niHCS877wDyObp57cZYcUF/mpcrovt/H/WdLlZ/FRT0mnn8bn9ft/RrD7AC0SH4sAVzvD4aqkXub9\nCZMyuKBAnQ5wKfgNg0LKwlZ1xnqV0zYGm7fYc3paG/pxgymuHgSf5H06XBd5GKR9KqzbGBps99/S\n7/SlNsbTxmxvu7+3HcCHt7VexCpD6mGyGRAz7SnKCrUfmGcjmuMcFSWPmvWIt5cZR7OK6aiitOI5\nUQ+gHuvGsqryGEDFDzYdr4AAsk9WBeuTAhcsuuohGcpJAPIWXBlw4ziBHjWUWUtpWwEJo/DedM5n\n69YKfa62ck6sTQcSVjpi0kovZH2vaGtNu4g4ubnGLmKQCwCKjukVosZcHmj2hKAftCI/iufAWpy0\ngk6ZnBhPA5RKbAV9pqkyg9eBvOzB3D7Ka/kQs3OvBwFORZ8IE60OZYraZXA6NundOUEgBbCzBgzp\n/0fN7s4JONuBbXjbqXWen8NZ6zxIyUVinad27tGD3uMR4FrL/K0ZqlHiTA64wgkWzjj81gQVTmdu\nzveGKKsqp43Zm1J0GmxnPS+tYRBLt9oY3LqAtDWt7YDHg2AUYiAz2pMPsrbtIAl9EE9L6xA/77Oz\ntY3nbv2vBkEzDJ4TkC/cKGsxO0sWecvyWLi16sSi5pb10rKyI9TIybQ5vX4sUUX6qWEWBwLABrxm\nktccFzVH+ZjWy7ZNLST9EKe4fuCaZUyUmorBPcS2wCIKHlRrgfawFv057ehwbiH36FFLXrbk0ZRo\nXWdUSe+vyEAZsrlkc/EsACR+uAAUEiSb2VaJeCwPDRpQGl2HHkKy9Izuq5j5GWoNrfVdRosK4tqV\neUKQIUeakCunolF1LFXbgPG9jFNIf2/j4+TgbGZx2xkbvLfS9Rxw8HsYMZyfhZ27hmyO9/SKl16P\nRYCjFaFHQl/G+FZ3WLhtX9Rh9iWULplGLqt4ki8zQqUhl/ItQVDgdLDYDibJoMrFLMpwGmDcPTew\nAWXQKkA84bcD27AkPouypQCtPUb3hjYXMR+2l4Luudvvq1PtNY5JWXfndjM2NJUVGtXawNrgHN3o\nKVjRfNMTAUQn3ODGMVDCKomydhxEv9amyiGoDaI8XbKhRIvOiohn0xqW65x6KIdeq57xMPKESWRf\nzComZc0oayhsK5SwwrIq5VQ+smMqCvCGbAG6HrgRepluOiCUESc30z2khlT2J+hJ39vTbSBbesoH\nsVy1hkZnHa1PazkTtAn4zBO8ovUpg+vhI12QcxaVjHJaQ0fIH+Lj4HR596jB7FFhJef0zR6a0Z3n\nvHUWwDeVoGeVro/Sn7vEejwCXPwyqqjPD+BrEZ7MrOkAsOltpwa9i2ogrZPHrqMMUFhZKQ+K/mAM\nddwSb3GbSgVS8q0bS+sMtTZMi5omjhcSnKFjGaiwofkVgrh5qQhr2M46t0vUpJOWblMx87NnZHsJ\nCAybV9mzGBCwgUrqgmViVyQRzmlZ4Saa9cxSVVb04U5sp8tGJTxbV4iz2bbV4jCY5toxyRsW48j0\nmBlMZbqgoh2dSku9zPBOs9Ke4DWu1YSlMB5AYC0qSNbmJh6zU7M7E3DyrKgpbbPhGzuyDZmRz957\nzVGraWqFcpFBMaQFB9l2ixbKVe5pY0SvXAqCsZe61c9WPpAtAt6EaEhtOgUUPWow1uGd9OpCDHIg\ng5G2FU09sTMk9vnic6PyiPTi0mRiK9BcVKZelM2d99jL9OrOkWG6sC93ajtbmdw2yPciNsdFPb9L\nrscjwJmAG4WOggPgK0NVZVjjzyhP4++oCde2hrqy+FUsSGolFnZBHlM1lmBP80MVg4AQfx+vCxYn\npWzLw4FCZK0BnYmGfyKnW+uYFDVFAvpqzySrRYDzIcn+Nt5Ma08WNeS627aCb3p0QMp6EMZE6zQm\nHqfu2YPg4+JryQS2L9k7ocyswY8U87JgMSpojyRYmBNhIbQnGYtR3vXoto9jEgow2pNHPmmVe1wu\nxjSqlaBlTyJX2GW0pSHVsKpRUtIm+SQlk1amLdPdFTujNdMoD1XYdkNYU3aAzox7XNSsyoxqYnEr\nhWkUKprd6DaWn6t4MUtBLl4I25mmin07CT7956N8gFZ6c+VRkOCWa9Z57JtaT2E9xvrugp0+C4eU\nqUk4VLcK3Ri0SxmcRbmIj4POijAeaPltEtl2q2R91PV+JrFbmeXwDD9z+nqmP2wMbMFvZmQbhtjD\n/T0nm7vkeoKDe7KerCfrN+16LDI4pRDT3zDQ5V8pXGFZaU+b6Y0LSCq1kqqHrw2h0qg0wWsVgQBO\n0TQGrW1fCmq/kfkuvbAWTg5FmkgtLXauUQbcxEEWGM2EBWGt61RHllWOUYGD+RgXp59JDaPIWnZH\na8Yx6wC6PmLjxCFex8ln6tFtT2w3srvBseqVO+R/cQCLKsZxe2fBa0JQOJ/+7u9Lj9QqyBAhazmO\n2W6lS8xcGv3LgxHea3YnYmYzKyrJOGK2ErayTIw4YgWtQLNhkYiSKWMwRG5mHEQk5Y6Rx04a9naW\nTItqw0Ix0fDOc1RTQJY5qsLhSk1bq47loNskt0SX4bZa40eRslZ6ahVxbE6RzUOHnHdeYZAsTjeB\n/CTgH0CIWkxrG2is7yT3E6ULJJNzY0XT6pi9pVI1VhytTD/0kOXgzsi0zptmDv0LzsruLmJFnJfR\nbZfCD6GTXYihe9gA4iImw3m9vUuuxyLAhVimEGT6BkQ1WUvjFW3pNseGQRGcEkVYf7rEUchn443B\naaiCok1wjeSQ7pVon80FG5a8UnwG6sMLPvzUAZ/ae4sXR+8w0RLgrtljMhzrkFGqhjfbfd6s97nf\nTAD45snTvHawT90a3nywQ/Ca8Viee2WylKms8oxMI8ORqL0LdJPX1OfyauASBhs9vUD/uacprzW+\nAwKfdXydl0lzAMxAct2xOfQobQtRtfcYqGyBWhvUyrBuR53aSzPTTPKm06Nr4xS7Ux6JwdRnAaUF\nmOtTlWXiT7KJNBETFkvnbNQwm6yZFhWTrD53qtz1TYOmchEIHC9+SocoxxQ6vT4VhwbDQBsUnedp\nKKQn1+wgAaim67GqID8mQjyypSgH+8jVdbmhyS1qDNp40L6jaikdCIWPQU4uwLoBE2W2dGPEdtAb\nIeQPFYB9OF0ebq+LStaHlaQXPfccr1Y5WA8PNF2v7qKS88xg5y+1/cusxyLA0YpHqF0N0N8tqCBX\n4GANA1qfPMb3QFIVwZVpBR3QCT+HkUa5HRzIRrI9eyLZjxt7imeF2/npG2/x2b1X+WT5BqVqmKh6\noHsm29hT0vDeM0s+WfSCxQ92p6xvZnx9/Qz/8vhZ7sz3OFpKZvjq69dAgSlbssxR5gK7SJmJUmED\nemEIffhT8iX20GUeKdMrsqGwwOYXH+j6bmkgo7XvoUby7YYBFMYF1clDjQpxAWusJdQGGkU7l0n1\nQTNlMWoYlzW5FaexdWM76XdqydBcAb5whDxANPVWVqbanSdpUHLhjrudZY7M9EKaySWtO12CCJQm\neFDlbAcRWkb5+dDI0fMWVGRQuDjwUE6mq52/QtoumlB4/MhT7yFS5oc9ZCZJIJkalAtkK48/jAGu\nVLjS4mxAlV4gP8k6TwOZx49EREJXYpXYJNZHpdG1RTmPsqaHjcinOcCGXdCQH67tjO6sIHfRoOK8\ndcnXv3DielZvbhskfNFrPSKs5LEIcMn0eAgpUA6oRIMfrfA2bI4PQz9+31RlFaBmCKGj7PhK9VfT\nJgZSD/WuJ1yv+KEPv85ndl8H4NOjO8z0mhyHVh4j/kjdtofBzm0NEvb0kkw5bkyO+NHJ11nfyHhp\n+REAvnh8m1eOrvLu3R2aN0uqInBoQjfpHe+tuLE7xyrxerDKd/6krddYZNrXTYC35KVSox+kpB2C\njvVgP73vbQ7NhnlOen99NmeNJ8scdkCmHzIKqlrUek0MWM5p/DqlaQIz8VOHnZzWewsIxGdVZ1SV\npXW2ezNNY6gzQ2Vsd4ZuYx7bIBnjurFUTe+p4RotwOBGSxVg+89PTGrE58HUUq4mfbh04FqtCLnH\njT21F8pVPEgMT0BTE0tVOXLugZahg7U447GjpqsWXDDyRc89fqRp64CpZQCS3q9uQjdsUN73JWq0\nIzwFH3mUdV7weq80sHOoX2cZU5+9P4PHpcloCnIPM76BRwLrPRYBDqJ8juoSFNCDqlRzCtmqpHWD\nauULrBItCIEkBCcZnGoFnZ7KV1OLYfH6RsvzL7zD77n+63x6dIenzbzbdgpsAI5ehhugGaSSbmtG\n44A63m/wTFTNj06+DsBvn/w6bz+9y1du3+bby2t88d2bHLy5izmUj2B9NOP1dof2SsN4f8WkrDvn\nrL1iJcHOSdBJVonQ9+SM6qk/nr5vlwKfNe4UOBj64KYHzx2uVAKXeYNWgZMiqv3mQp8KK0MbrJSa\nga40RUGYthSTmv3Zkmleb8ghJXrcOss4UCPmtSVEGEnjMk7ivrlCYZTdCHC1M2IkXWU0cXqukqpy\nEwHGmih8GbqpJl6hV6Jwkp0o7CJOVmOr1BhF0Fqc7aORdu16sK5QrVRfrlYBs46Z9DG4wuBKTV1Y\n2kzoc0AEA2upPHOPGynaasDbrcCMNLo2IsLZ+k4eRdVbn8ipL/sF5es2IHh7XSZruyz165z9eqjT\nV9enOwdOMnyNs17nIeuxCHDBQDuRpnT60JNlXQczaAayxv0kXjK5mM1121PEDKK/Ld1f7Qf8rTU/\n8/GX+dzsW3w0f5ft5VM5CBtZWgpoKeCl+8wZoaPZouYbAlfMCb97+lV+cuZ5Zf9p7nzkCt9aPg3A\nS+88y4O3d6HVVK/OqJ3ibvTrfL10ZKOG/dmSSfRRTQMM50UmPMSAkfak48AqkUIXaEji48bjNBhA\nbMNL5H1GUPVAsmgaPRlGecOizFkuCnxyIhuUmSH3mMIxG1eMs2bDzjANCmwsQ1svogSrOjIsFpZm\nbTiqLcsix5geqN35RlQGKo2qNbZSPTjeCH4ulA49absMFCR7bRpDvchwhaVQQtQ3McAFDdqAzxSh\nEEvCdhaHG42WPq+TwYOykt2ZteyXXQtv1ZUaV1rawpONkreHDA6UAXKHHynaWnVG2u0K7FrjSoOu\nnfTmkqiB1gj6ms0vevd5bUI3zoSRnGWifNn1HnBzp/Zr42HnSKdf1pLwslli2uwjPfrJerKerCfr\n/0frscjgdAPjt7QMFlLrwcc+XADTRDL5IGvbUIBQEIzCx/62K2Qa6vOAj++w2Ykbfqri933iK/zo\nzjd4zj5gHWLv5pJKesPsrRPhjM89K5NLy0WS4wKhpN2wR9zMDvjB0asA/OTeV+Hj8KuL5/na8TO8\nvZjx4Eims+39knpteOeghAB61lBEondmXaeI22U5XndsjyHty+izp6zbUI90AW2cwXtNHp+v6Nsf\nVnumcUgy1wF3nKObAVvExsercEoOabi0CkzzCjdR1JFq55cGe2IIC02T5dRZ/9koL9NyE0tRg1gj\njQAAIABJREFUgZiIeQ1I5qgmLaNxzXQk2ePwuNTOsBjlnNgRlcoo7mtsVEDRjcg8oeNQp+yllppp\nQFfy2soThwBapp5IqyRfeNojRTtSrAtLk6bCRYvSIvyV9tGNFE0syU2laCpBEJjKoBvfZ0x+II45\nnKg+qgrwZdZ7GTwMH3tWlneBPFOXyW0T7oEzVUrew7qMq9ZfAv514N0Qwqfjbf8N8PuBGvg28CdC\nCIdKqeeBl4Gvx6d/PoTwpx/2GrqB8m5gO8akE7j/P34RldoIcN6CJxCKBE1A4AG5NLqDEQUJgI89\nc5cXR+9ywx5xHInhsqHYO4v9N+nDnXFwFRs9OeBUoAM2enjd42JQ9GhxPRi8t4mu0Hh+YvYyPzF7\nmQbDnfpqfJ7mldU1vnnyNA9WY45WJScHInm01oG5HkGAYtwwG6/JjOu4sHk0p24jpWubT5swhWow\niU2qvW0rpa/TegNSEg8DRgXGRY3WnqOg8C5HR5qXahSukdKzDZosDEQOhkOPePs4k0EEwMlxJsFm\nHgNCpvGdpaC0Hoa+Dqp06DwaA+Uto6JmVtSdMfVQBqpqLUaLCsnSKZomE8MYQFdI77dSQhzQEuQA\nMX3eFRZCcvUiBHwW2xRtQNeCj3NHCjfW1GVsUxQtxkiQ8sGgjCcUCjeOQ4a1wqygrTRmbdC17ywH\nlQsyVW3lHZzP01Sbv/sP+OzHn7UetV93mSHFGQyI7k84m+61Ua5e0Ju7xLpMBveXgb8A/NXBbT8P\n/NkQQquU+q+BPwv8J/G+b4cQfuBRdiJoaCeKoYSM8jLKFzu2IPr43YcYn5eGEipOTlPQj+oVwQZC\nFgg6YHak0TLL19zMDlj6YiP4dNlXADMYy25PUYfLb1f4g8/RIcGyk0VHo/GnenPDVcdsMj3+2fw+\nABmOj+bv8lO7Xwbg1eYaSy90qlfXT/GN46d5ez5jXWfcu7sDOlDE/k+ZNxt6eOI4HzMHHU4FPKV6\nYx2tA3WtaZTBml40NL3VFORGWUs7WTNvNL6V96Ccwq8siyJnXNQdxWt79bJOrufgZh4wmNh/DQ0d\n3qwdB1wukudm1jApG6Zl1cFmMu07UYBcO6x2PWwmSqDn8b1UI0c7MZjoA5s3cs5ZORBdZQAysHAT\nT1PriNUMnclMt+3Kky097VyRTRTtJGIGS0teNv13WwGZYPRABCbalaJdy7DBVLobmgSrUa3ebDJf\ntg/1AWHJzl2XJfJfsL/n9uSgD+anAt7l+3CXsQ38pZiZDW/7h4N/Pw/8kUu/4hnLFXD0YpwcxJMx\nO1bkxwq7ArOWLK8vX8NGFic3bm1USemS6qowwMndd1NKXdMESzdO6E4gTR2ih6ZKbXlZPoihskOx\nDhmHbsJ36itdEKuCxQXNU9m8e2wKnFr5wW0ehyZTrssSjfKMdUWuHBpPptpuu+uQ41Ddtm5lD8ji\nnn2ieIuf3pX9++LqOb6xuM7ri33eOhKnlcMHE0zhOl/TImsZRTaGi5CL06IA8jszjpD1Wd72Cgwy\nsKKmmRpWCWx9YtErTaMLDjMxpUl80aHCSqcK44dAx5ilKXoTqtR+GAX8bku5U7E7WTHJ640BRv8e\nwqm/fehhJiDTTZcHXBLEXCl0HcHABrSly9CCDQTraaeKphKoiZj8xGOhJRgqF0vVYy0XbaAaGXze\nCuJDR1cu4wm53C8YOkVbgl0pXCHqvyDnumq9ZHHeJwzQqc9CdiJsBpNh9vQwsPDDJq7n3f9eZZvO\ngZec6fTVbf+MbO8h64Powf1J4G8O/v+wUuoLwBHwn4cQ/snDNqALx+7zh2TWcXQiwNh6ktPsWHQj\n1nj5EWQncTJYpUwvnnye6Iw0nMDSjVOVVx0R/42TXe7tzriVPcAFTY3BEPAx0DTQZVomHuxM9QC9\nN5qrfGHxHP/k7Y9y9/6McJR3CigEKWXMrqhfPLtzxPNTycL27ZKxqTAh0ATD2mdU3nLcyvtN+Dqt\nAvt2ye38AWWE3E90xUQ11MHQBCs/8aPT+K6s/i2jV/mRyTeZ+5Kvr28C8MrqGi8fXefOvT3WyzFZ\n0TIdS9Mpt+L5miAnQ9URkC9jbgeMAWe64JTI9inIGRWYjip8hIlUTonZ89yw1CPuAjvxdcdZg1W+\nu3jUUb037QdKyk9XKryNU/ZRzHYmnmzcMB1J37Ew7UYZCn0J3AbdSc/LPllWTcYqGn97L7pIKZNq\nR4rMR2hRLQGrQwXFctWXnmYmEI/CB3wK6FaukaYNmCqQzwNNNPdpJ4a2tNhCLMCUArSgBiC914DP\nFa5AoDGRdhhqJYFT5KHpTrTz4BnnBbGHZXOXwcpd9v7LZHbnBN8Lnb7eg3bc+wpwSqn/DHGw/2vx\npreA50II95VSnwH+jlLqUyGE4zOe+7PAzwIUT++gtefgaCIGxQAmUN46QalAtc5p75RMvpNwQ3Hg\nEGVtVGQzJKCwcv3/KaNNKf87d3f5wuw2HyrusWcWzN2IdTBdz0qCWW/5tqeX3SDi1foaf/mVH+Hg\n61eY3NFcOYoBt+nLW1cYXD5i9dSYL97c45WbVwD45LV3+NzeK1wxJ13gOnQT7jUzAO43E95a7/Kg\nGosFnfsYV0phV3x4cp+PjO6yZ5ZcM3IoU6nbhAiQVR6HofESsH/r+FuA4O++vXuNL1z9EF86usVr\nB/scPBDLQVu0zCZrRll7ZjanFcJyiP/7QNcTUSp0DIh0acmMwEIAXGtoncIsDHpuWfpx5+fQjkU4\ns3MWi4ovKTjq3OH2wE0EBqKclIiy077zuJV92jzpk99G4wyVswytGevWsK4zmsYQohm4KnzvjxGB\nvXa5GeQgZpSZIliPG3maabRDbPqWigryfNGOkyAH0E4164nF24A2Ttgk6A7rFDLpF7cjsCtwhcYV\n8aJTe0LjpW9nNKp1Z7MBhutRcGPvhwZ23hrCUi5rUr0tyTT4+734oab1ngOcUurfQYYPvyvEPQgh\nVEAV//5VpdS3gY8BL20/P4Twc8DPAYxffCYczce4kwwdncQn0zXjvMF5TVNbXNn3PIJWqDYNJXqk\nvqniFdEqORkNKBNL1XScD3J+7fVn0SrwY/vf5FZ2wEytWMQx3NpnlLqhVA07es0b7T4vryQb+utf\n+GFG3yh46vVYLgQpTYbT3WwZyBaQnwRGdw3V6/sA/MqHZtz58B6fvfY6n53+Bi/mb9PYAxaZvO7b\n7S479mleNVd5/WSfeycT3nwgZeZXeIY8d9zaPeL79t/g+fIeN6zobF81J5LZkQDGAY3vgvLCFzxt\n5vyB3X/J79r5Kr+0/wn+6d2PAvDW4Q7zkxGrTIQwc9t2lK7tdYo3HRRaqY0xjFbiCwEwnaw59grv\nlKjyLgyVLuJnr5iMKnLrOll6azxaS5+0zBvYobNfrNeRKgYioFBZFibvhAWSd25abRBP2ioyHeo6\n8lSd7jTalI5euZmjjaBah5TKygsIWGSe4nvLIj7OKEIWaGdBStV0f6MISpgStAHdhK7isCeSzbrM\noydOuKqAT8HTRN5srnC5wmX98MJnGm01wQ16ced94R91wHBRL+8yJetF67KBcbusPmPf3k+we084\nOKXUTwP/MfAHQgjLwe3XlBLmslLqI8CLwCvv5TWerCfryXqy3u+6DEzkfwF+AnhKKfUd4L9ApqYF\n8POxZk5wkB8H/iulVIPkTH86hPDgMjuS5y2jZ5adHFGmPZUzHC9KmuOcfK66sjFo2EJqRBek9J8U\nTUGp2BymuwyoBsLbJf/84EW+dP0mv/X2a/zI7re77dywhxjl+WZ1g1dW1/hHr32M+puSSe3eUZQH\nPkJTVDfBZfiyEJkVcgVPV/H8yHD4+g3+t2ef4qWPPMcfvP1FPlm+wfP2AIBb9pjnsge8UO7zpeJZ\nvqBuc+dgD4DVYYmbW75ppnwjv0lxZcXHrt8F4Ldd+Ta38wfcsIdkytHEPl3aFzMYimTK8ZOzL/Mj\nEylff2H+Pfzfb73Au3d3aFvN7nRNHmXAgU5eKQlkahW6EtV5Rd2abhIrdox9Q7/MG5jBkVN4n2Mq\nRYjtBzcynSqyUeGUsY+O2Dmg45weLqRXuTwuCWvDyhU0taUoG3LbbvhuyL5Z6trQ1paQ8HkCXUPn\nDmOinL3xHWe0VtJvUTGLG9oR6lqhs1imZn0vLqkfmyoI7StO9JUPmDq2PRaBdqFwE43LDUTfCx2h\nL84GQiF9OFdKFtdlcFZJFtcalHEEr6X/POTWvdcS7v2Uph+UCfVZvcTtjG47m3uEVpx6P/XtB7Wm\nH7sRPvMX/y0hX0f/y5NlSX2So+cWs5QTKTuRx+dHQbBIYWuamgKgkalbPVNU+4F2GvBJvlwhfMSF\nAEVdIbe1V5rufmqNXmlG72pMBdm8P0bB9BOz7cGiilSlROchDPqC8f92BOurivUNxzMv3uWPPvtr\nAHxu9G1msR660+7xzeoGvzZ/DoBfe/s28zd2sEey32ihtgE0u479W0f8+K1v89npb3DNHDPRlQS5\nreVQUSFF3us6WH51/Tz/6N4n+Ob9a6xWOUXRMC7k/lSynuUN4bz4YCTKVfK92FBLBo6WIxb3xpi5\nwac+2m7DbHfFtKzIjeuGBEPAcZJu14iP7eFaAtyDxViC3NKI7l8eIPMo2wfI4BU4Ba3uB05p2YAq\nHVnRkkdl5oT7axpDvczQRxn5ocaeCK8Z5DxpJoF2Itg7bEBVmuKeBO3yXQlk2Spgao9uQgdtWe8Z\nVk8rVk8H3JUGOxLgb3pd32rCymCPLPmBojgIlFHFJD92ZPMWs2rQqwbqRvpwAyDwhYFq23t1+/bh\neh+cz4euR5FZP2+f4/rlu3+To/rdS4W5x4LJAHB/MebkaAQnEUfVCFrdTR1+EmhUoL3fY6z0URDw\n4xlvUzcxALagW0XVKBpJwnBTkcNpMpmEmVUMnseCK9Pt5vi/LaGZxoymEGZEsEF0zgIizZQ07CLK\nXbTp+mGH7HO/7eIARnc1D+7e4H949vcA8Hc//H38vhtf4XdPv8YNc8zeaNnh4J4pjvjl8Ud47TtP\noe/kFA+gvBffbDBUr1/l717b5x/c/h5++NnX+CNPvdRp2GXKdfCUsWpoguHQj+J9LZ8bfZsfuP0a\nf3vyQ/zCGy9ydNQPA/ZnYtjjnTkTSpKcsXw42xRbAbPRmvU0w9e6s/7zlaGqLaO8Qdu2C25DK8ih\nRJImsFOsu+1KIDLYhUbNISizgYGEAeJIh00BhyC4SGcMNYL164RUvRItzsLTjqLE+DKdU8I4CCmL\ni1PQJl5ozEzUcExD76IVMzi7DtiVQJ5crfGFwgyzkrgtV0gfzmcKF2ExPosZXKMH09QL+l/nQUUu\nE9Deb1C7SDzzIk7redzZtF9pPSIPFR6TANe0hsWru6ICkprZey3T/aWAOFVgUeUc1hKlfGZjmbqZ\nRanhHx6yVcBWAbtUVNELtK4U7ViYDW7s8bmi2aOHehCHEkF1Wv1JmdWUjhDAWEdoDMErwtp0gF9l\n5EvgytANIBIUoFteGtNmKSXQ9BtyJr/5xm3+p49e5dc++hw/vv8NPlG8yc1Yvv7E7GU+MXqTfzD5\nXn4lfx5Uyegd2d/i0JPPA+O3Fc2rM/7ZzU/x+eef54efE/mnn7n6ZT6Vv8k6WJa+IFMtZczgHAqH\n4ope84f2X+JGccTff+vTvHFfgHV3H+wwmawZFz2xfxjokoJwd9zC5tAhTaaLomFZZBBhIKrWVCcF\nJzZmaVmD3j5MQeGV6oNcmnIbR1E01LrsZbZc37LwJmXwITIeVGerFQIikmo0bg0hB6VBJ7VTFTC5\nwxEz1EYUeCFi41ppcahGhljovjJoJ1owbJVcYIPth0+mCdhlEDPslSZMFJgBPk8jdoO57wJcohh6\nqwhWybluDEq7WAJvlS39h8KpdV5mNsyUzpumDkvI84xw0rpM2fooj/H+fWeVj0WAwyn8xKFHLVf3\npQ6dFRWZdszrgtoZVlXWmQoPQb/DDK57+ypSj2IGlZ+ETjPOrBT1jorqJWnCqjbVSdKkbR7hIlHx\nVbe5gE9riXnaCb0nPTco8EUsZ6YBN/WEMip4RCd0FdU9mkbKEnMS5c4XiuyVEb/y5vfwhRdv8TPP\nf42f2HkZgFI1PJc94Kevfpm9fMUvli+wsAIvMbUinwfssac4hvKBon51wj//0PcA8MVP3OT3P/8V\nfnj6CjftwQbNrFQtTTA8CCUT1fBT06/yoQ/d46/nnwPgy3ducnIidLbd0RqtQo9VA5leR4pXAvwO\ns7CABLkia6knDW3KZiuNWhmWpujKUjGT2ezD9X8LxSndnko7HZVxdTM4DwpwVmAdrpQLWeKTooM8\nLvqXGuuxmSOLDA0bA13VZKxsTtMWXXaeNwl6JJlacDGLyxOGDpqJgNJtpQiDoKtcwK4D2ULRThX1\n2uDtgJ+bLhpaKgSf98DmYBTeKILRYGKQc54Ogq5VjxC4TAC4KGCcdd9FCiEfVB/uvHVRZnfJ9VgE\nOJ15nn/+XWZ5RR6bVj4ojuoRPigZ9a8yGTQApg7oNsJGEgiL/nfQAv4Nmi7q2WoLrxQUzSz28HSQ\nng3yZUlZQYIIbFwkY3NJTnbptaWGsCslsNX7HmaihZbFoUlhHdb0ir0guKxVMjteZYSjHDvXrF6d\n8XfW38+3bz8FwI9ceYUXi3e4ak/48Z2vc/3FY/6WETbc3O8ye01RHIGpPflJIFuq7lit3t7jf739\n2/il732BP/GhX+ZTAwXiRcjJcRgC62AZq4bPFG/AM58H4G/bz/CFN26xXBZoFZgUdaf267w+BREB\nTnNdkSFFXjS97aPOUGtNWFgWYYRzmp3xupOCkrI6TYWg9baTJD9al9S1ESqYDZLd+L40dYX0J9uZ\nQ41bsrLd9EmI+2hUILOOSV6TD7xePYplI5/ZYm1wiygAsJZAItJd4BspV1Pq6QuPLwztSGHXcm50\np4yPbZNKsji11oSR6iqDpG4cbISLWNVLoWcBk8cszmqUUX3GBWcHorPWWX2ts7K271ZP/v1INqXV\nPffypeoTuaQn68l6sn7TrscigxtnDdfHc2pnWLYxo3EC1AxBsVyU6HcL8qN+JJ9AksMJ6jDjD0bh\nGUgsxaXbgF3E/p1R1Dui+hpMkr1RtGORNTcrKUdS5dQR+7tMUfpt7VQeEK7UTHdX3BivNhy1oEfc\nb7hBFeDG4lJVzSxHs5LFssAvMvxxzhdekSnqvCk5uDLho+W7zMyKT4++w8Fzoiby8/7jnLhd8FAe\nCope14EiIuyzZWD8ruLg4AZ//tM/ze/92Ff53btfBeA5e8B6MG1d+oy1MnwifweAP/PMP+Zv5J/j\nF197gcP7U9yVBTtR8NJoD2dMWIcl6pD2lUdIBkBlPLXNYG1gYVnVmrYxNFO53s6KujPgSQogqyYC\nl9c5rjGETGAa3sZJdfycXQlutyWb1UzG1UbWOczU0ntIhPz0GbVBlFeKrGFV5LhSjo+3ETbSQmgU\nqkGy/tTfywNtGbCFDAiMUX3G6OgqAlODrjSuNh1MpDtFY8nr87ABE0llqk6Zm44lKwjg/DLrIj+F\nYX/tPNjGRZndw+6Hs6em3+0Sl8ckwLmgOFiPOa4LlrFkW60zXGsIRzlmrikOVVcyBtNPOsMwzg1w\naEEhPYvutngyaZl02lUcJChFbRVhlE5UTyigyTTeSuNYDYQwXCnaY+3UoaYt168dcX0sfcPro2N2\n7JqVy2mCZtEW1JFE3notZikD2SCQPhjAfrFkr1yxmOYcrkrmi5K2ivzZg13gQyz3cp4v73HNzvne\n8XfkOD2b8Y+qj7Naj8lWqsPgJdd0U8tEee9bsDwc8/fufobXf0DoY3/m1j9mzyy7vly2ZW5xy5zw\nx65+nspZfvFbL7I4KTvFj2lRQ5ywDldnU+j1QIdNeqJlLBUz46gyx8rm+JMMvTA0ruAgOnatxjVl\n3mCN7+AoiVMagiIvWyhbwh60jaVeGYjaatiAmTbMpqtOMinvpNJ7w+jhBWdjhf42Yz1NlEtypUgl\nKRfLVAfeqX6IpCQwuUJk1sMqbFRSqQ+nI1HfNZpQDgZbQZzAhNVAp0bdKeMY1f2oYWBQCZDzkJWC\n2HmTyPMYBZddj0Ls/1cY7B6LANd6zXFdcLwsWR4JhMEcWMm+tEA7FruB7Ei+AMU9RXG4pR8XOIVL\nSxmXK6HeiQDVQhQpAOyJ3J/NFWGRQKjS6FUefB5Yz1xfyBeOrGwJQVFmjjxrKW3bZQArl3WQDB80\nhW47H4LGG3xoO65kCnhp+aAY25qdbM3VcsHdcsr9hWRpTWO5dzLh19V1Wm/wI82uEfzCpydvcvDc\nmJcOXiA/Nozv+k1X9iDB3S49szc92dLw1fVHAPjzP1jyx2//C36gfK2ThSoH/a8FllvmhD/01Es0\nQfP5157nOIohhKBEC26QkQ6zORmmiLacD0oyuCRpZAJZlCFfeEVoxLoxRKmlVa1pxpa8aNA6RLWS\naFhjHIVxUfPO03jDwXLEyUKGIa7VXaaoVJAp7Ras4jzxzd5zVsRBi7KhGclXpB0ZUbWpI/wnTVNj\nrwzrZTJfJJEA1VUZxoUuixMfB6Gv+VEMspmPMSFOZk3fU+zQAkaJuYjWBKU2vQ7SoGG7H5cCzUVB\n7TLrsv29R90unD1I+ACD3WMR4HxQHC1GrBc56iQFGo8aO566dsw4a/BB8VaEL7SrMdmiR5krL8Gs\nK1E1nSjiyW2or7c8dVO4m9enc64UC5ZtTu0tb5+Icq6LODhVK/JDjW4UzTQQ9h17VyRDW1U51TIT\naEhdsM4Ci52Cxa4898Z0zl6+ZGQ8mXY9iRs2JoQ+KCpvaYOjjhlQ7S2tD4xtzW62Jteukxe6u5hQ\nt5a7i4kAa7Xj+ZhZzPSaFyZ3efnmddbv7GFXiuJYoZM6g0O+ECGgGigPHPtfk9d85+QWf/GHRvx7\nL/4Tfmz8LdbBiPDAQBtvgeXF7D5/8ul/Su0tL70mZfPR0Rh2EQjJoFQdBg8R+OyPgRncZ7VnUtT4\nqWLZajix3cTSmYgVM55x3pAb1+nJJR/WBB8pTUs2dYyiY9fhYkTTyPDG6CDZWzzLLe5UBt1607mX\nJVNu8bCQKaeOAOLEFdUt8SealA+HjDZEJWlwOdjIcgg6+ji4gG6UYOWagWqLCrgUhEzM4gYtmB4S\nJbAoFQOd3H9GtnQWif0s4cmHeTwMH/sw/usHNZzYxsu9z2D3WAQ45zSrByP0qMU8LT2p6bji2mTR\n9U8qZzurPFeEKHApPba2VFT70M7iyH7sCaWn2Ftzc/+Yj+32xjKZ8jRBs2MrRqbm47N34Jl+X14+\nvsGdwz2O354JUn1SU0fPzfrOhOkbsVRS0E5EFugkOtqvyjVja/FBi8fp4OwX7bMkiSTlUu1tF/jW\nLtD6vge5l68o4kQ5BMW7J1OW64K3/GY5eas44KnshA/tH/DVG1PyI4tdD/xlfei+hEHJ7eVRNMH+\nluYoXOHn1I9Sfqzhk8UbGMJAw06yswbNh+wxf/TaS9xfi4z6t15/mpNFSZG1nXHMcHVZXULrhz7g\naxVELFN7yryhmRiaRuM3vpgyeU6SSEmws9s+aiPI2aS8GxQH8zHVKsN7CWaTaJs1FNVMZtWNN11/\nL6kebxhlDyAcQcfMOAzK1IiflAxLALveCt0qJHBf2oQLaCcXHVODi7Ann20euxAxfAAuU1gdIoMm\n9t62A9PD2AzdMd2CgTyqcOYHHcwuWh9QGftYBDilYHptwaSomRXSxC5M23mDJh2vzvS4VhvAzuMX\nPfntBZ++/jZALEsCE1NTecu8Kbur9HY5pZVc5ZOG2JViwa2bh7y1v0vVWq6Pj3lrKZnj68udKHQo\n8ARXSMbXtlEGyGvWbYZWHq0sduBUn2mHjtlDKltHptnI7JZBsXYZbVNSmJYrmcglMYGTJme5zpkv\nSlxQlEYylrGpGeuavXyF3aupdy3ZwGNWBSmPwqAhlOTt8hPPzm/AkbnK/6h+gj/10f+Hz41e2ehT\nZR2XVfPJ/G3+8DNCLfsr9ed4994Ohycj9mdLYTycIYyptRcVj8F9Q85qbh1F0dKOWkL8ViuvCF6C\njNW9AfTpE4fo0OW7E3ma1xybkqrVVHXBvdZwEv0riqzpuM4K6f2u64y6jX3S1mzQxdrGEKLPq27E\nY7f34yWyVWKAi7HP23gBNj1YN0GRgorWgy56OySOrAooBtxmTU9r04ptxeruduiGDhtYuLMC0VkB\n7azbLhoynAU1eRjT4IMIhqeC3eW3+VgEuMw6npouKEzLTi6UHB8Utbec1IXohbWW5lDkdrJKmufK\nQ7Wv8Hs133/zDY6j7VwV5XHeCaq/Yg90+Zso3ChWeJuGKFZ7lrrlejkn046Vy3j7WEC17SQIbUvJ\nydtMhZeYJ9K017HHJgDh1odB/yfrFGwTmXxYxrZBU3tL4w2awLwp2M0km93LllwdLbl/PMHXhmUo\neaeUfbo5mqLzOTvZmvG4YjEbCRZrlUpUKVe73QiDgQuKfOHZ+Q3FcXiKv2I/x5WPnvCp/O3+WAVN\nFqeZmfL8WNSZWz+X8dfcD3P//pS5LZmNeipVWlrFstScP+lLEktV0dIkLUCvCLWWlkCxqdh7ZqCD\nrg8KEe/mFGppCMeWZS7nzaJwKBs67FpwquespqUCnQ9vlHoCYagkUyTV4SBVFFqVfcZ4QuYJ1ojx\nkU2T0CBlaZDgpqM0evLq9bURLm2XaYfTPTgd+mA36MEFHbP1S1j2nTktvcx61B7bNkPiUZ77sKUf\nTbL8CQ7uyXqynqzftOuxyOCM8lwfzzfKtbWzzOuCxmsy7XmwztEricd2RWcraFeAU7x5sts1otet\n7RRmU4a2UXZFuIBWgTKWiaNY8lXekmnX9b9cUNzcERXdN5+H5bUCv7CoWotF3ajHd7kgmK2RbbpJ\napqUDu37NojknSyQEZu+oPBI9rpoJfO4ki/YydZCDK80rtIsp9KrO2xGXM1PcHGqOR8TfibtAAAg\nAElEQVR73Mj0XgL1JlxBWkiDctVBPvfsvqK4P3qavzr61/j3b/8CALfMETUiJilsB8Mk1re/d/pV\n3rq1x/+++l6WJwVGe2Zl1XtYxEzLGh99FVSXfWXad2ojIGVqWTa0dfJ41NBo6rXlpMgpbNt9tudN\nQNeRnV61lqYR2Eg215jVoJ2R6U4N5swkIMGOdJ/9pnLe1MJu6aBIHmE2xBJVuX6imoZcnc1EgnJ0\n5a3I6Cf+s/QK09Q/xEFD3OfoTZHsMVFxmnpWafkwrNqjrvfac7uo1/evso/HYxLgrBJZnMN6xEld\nbNx3dbQUPmqVkc17LqqQnwOuAXsv4+DqCB2VHcooh52ayqlPA3QwjmG56FEsnASMiZGSqNANhW65\nmp9wuzwE4Pv2DYfNmDuLPe4vJwI6dbpT31g3tnN1KrXD6rZrvrf0XMphuewHAVAN7wuKleub37U3\nOKfRK4MfbU5o79dTVi7DqCCem7np+z9d76bn425/uZUXFeLZq4qvTZ7jb5WfBeCPP/V5bth557lg\nCNQxWuxpz++afZXXbl7hn3/zw6xWOaO86VoCIerDgXwfQxjqy6lu0CD3B8qspYlOYOsY4HxlOFkW\nIsc0kq2Vpu36peli0LQZiyYG/OWIal5gTwx2IeDcFJREmSMFn3BmsFOxNB322uKH1R+7rkSlP6ap\n3NWDYDSoj4JSKIRimPisCQXgnNDNUlmM70Uk1ADxQ5yinlL+uAgLd17ge7+Dg20IyllqJOeVqNu3\nfxeD3WMR4DyKw3pE5WwXEFxQ7OQVx1XJu8dT1HdKChHXwKwDQcPipqaeBdzMs5iX3dV9VlZMIpMg\nfQnSZTyZECcEe6GTxVzkjOoWgzwmU5IBjm3PkXwmP+J2ecD9nQmvL67w5skO86X0/lZVTusMjdfs\nFoq9fNWd5DoMsrdBNtkNPdBoE2h9OAVleNBMqJ0V4HMWwAauRAZEoVuaoFm2OVVrpZltGQS4uJGL\nLu6Rt1scBXa+afjF6ccBePq3zPk39z9Ppjb7mADzoHg+O+TfeOpfcnc15bV3r3C0GLE3FXyeimhr\nrQZ+q8mTYWBeY2I2lxvHKKmWjDXNSQ6tppkXHHjxV02fbRFl0avWihBDnbFaSYBrlhnqxIIXuEZn\nII5gy0IcAvhMIB0h85uNmi5DU9D202ht6b6Upuq3m4Y5uo3aggAqDhkGWZiK01PRC2QTw+lBRf50\n2ocu67RDmEja/jkQDwb3w9lBZ/sx6XGXee57JepflMF9N/p0g/VYBLg0nk+4r7SO64LDkzHNaxNG\n93smQ7WvqK4IoRoNOAhN//WbZDVFVKZNA4WUWYxtzcTWFFqytBTIUkma4Bemg3RsWv8BXLELdu2S\n/WzJTn6V1zNhBjxYjlhXGU0r5WbjTV9adXvXZ4/DkrwNcgw0Bo9MSdPkd1ln3FtNMNbhRo6dKwtu\nTgTX90x5xMplvON3ZBoYBHfVeXZuk9+H/fSUNShpgJsaygNP+w0JFv/w6if4vvEdfrC4I9nmAJrh\ngqJUnu8v3uCP3vxVfm75oxweTpibqD4yXqGN2xgKDH0d1CCIp98JyxaCYgE0Jzmq1rRtzmF0RTsZ\nlWQRlpOy5+BVbwupgL0atytyR2qlMav+TQcbcCMPpcdEIr4eaDUlE2zvJSC3kV3Rrg1+Ycjm0Vgm\nKZiEwRfW02dwhr5kVVE7UMXM0G0GRxBWRLK4HPjR9GUzcnsKBskj+Mww935KyotuO++1LlDgvVRm\n910sYx+bAPfuyRTndSdZE2JwWh8XWK+ioqo8Phi5+pqFpt116J2G564fsFdIVjO2dUeRmmaiUJJ6\nbEX8O2VpIMEsBbZMt2e60kNf3oKYu+yaFR8e32cvTju/lV3jreMdqsayrHKcV8yiO1Jh2l7I0YeY\nSfqNbafSSyNZXOor3V+NOVqOCF4x2llzZbLk2bGkszOzjqDhmOW0miGlI8QviGIzuBH/T9mb3CBI\n+/Hb8v7vv7LH//HU9/HRZ97t1IYTWNcFgY5MtOd3Tr7Bl555ln84/x6Wc0ljiqxhVtQ00cRmGOgE\nLtIr+G5PRpP+3NxrnMvQa41axMz+xNKWDnKPyT3aOGzuO8WQpNpSGEfjNcerktVS9smtDGgwZUtZ\nNoxykTvP9PBzUJ2QpwuiZAOSna/zgoYM3SjJugZZmPICIwmDYDmkDvaP23wOIGYyOKFqpe922s72\n1DgGOXVWdtQf4M37Ltufe5SAclHWdVawGj7uIsDxB5zZPRYBThHJ2LrpQopWgfm6QFmPG3vsQqgy\n6Rk+D7ibFTeuHfHhnQdY7bpSCuBKvuz6aM2AEsVg+2YYYAbBrMvaOg2y/nFmq9bLlOuC5/XRnMYb\n7i/GNM5Qt5ZFyloyRWZc5wfaYBhKfG+vRZt3VK2jozHhMBd81H7FtdEJ1zMZfBS64aiN5X2rUY1w\nbbuySw1+zqtWUpBTRG8LeeD4Dc1Ld57ll3df5HdOXhZMXMIidoEOZirw03tf4mvXbvDaG1cBkZyf\n5A1GBXxHDo6vF0vWMKSfDPcHCXKt1yycItQZOpa3OoC3GjVpKUc1o7yhsG1XtiZIScqOZ3nFYSH0\nsqN8hGs1RQxu46zpBlHAqfaB87oDGBstWV21NrgyejEMyl8gZmeq+7u7OV1kUra8FRzFXFbJU8Lg\nSfQXoXPXWXi18wLDeWXlB1ESXhYbd57M01nwlfMyu0dYT2AiT9aT9WT9pl2XcdX6S4j/6bshhE/H\n2/5L4E8Bd+PD/tMQwt+P9/1Z4N9FJEf/wxDC//mw18iMY1ZUNM5wtJIeztHhGA5zync1xREoJyRm\ngOOPtcyemfPpa29TmDYyAhzTaKs1pElVsds+zOJM8BF539/WGykbxromU26jHIXNTA5Ax56UiSO2\nXLdMbM0qz6id33BVX8eGeB6J4gBD5YqklNt4LU3zKmd9LKVV/laGXYrU+uTZFZ/ZfZ3vH70GwBvN\nPoftLao2OrX72BjvJJ5UN9VTA45pWmGQ6akQBwNx9/LjwOqNMb986yN8urzD89lhfwwJ3dWxAV7M\n7vE7rn+Dv37wQwC0teGkytkdraW3uNWP61gNg16cH2SHCiizlipvaUrTZY7KS4vCGs8obxhlDSPb\n9MeUTShJblwnpFm3hrXKSFLr4urVtyqs3kqVtCdP79E4rPVUmccbmVJr1yegKkJGUr/tVFk6yKLT\nce6yv6R0rKI/hN98/qkCZFDmqSFN6yxA7kX9seHjzrvvvazzSs6LHvMoE9hHWJcpUf8y8BeAv7p1\n+38fQvhvN/dJfRL4Y8CngJvA/6WU+lgIwXHBWq1zvv2VW2THvRLt/lEgPw4o73G5YnFTsfiEBLDn\nb9/jhZ17tEGT65Zct0xNtSH30wRD5S1NMKxcRuv7syTxQjPlsdpt9OPwsCRnrGuKwYBhe/mgB726\nRNWSAcFTo0UXDBeNBKmTJhdl4lb0xkJQeK9o43TQRzqTbzShEqPkPNa3uhX14Wu/5R3+8O0v8Adn\nX+JOK/4U77S7PKjH1NHhykdWRerRqHjyBE2UZg+bnekw+J1uj881taK4p/nm/Wu8cvVpPpodnHks\nXICJ9vzU7Mu8fPsGAC+99hzzkxFF1kqZujFs6CEk/fHsA13aFaUCo7LBe50ElyHxN73GeemXJQI+\n9NStJAGlCd3wqshaqsbKcGLAO90uk4eqIhutCy3uXcFwuuSPZWdIZWrEu0F/EenKzRQAh+VoklNX\nMoBJS4VBGdwNHvovf+/PMPjyn9eDOy+4fVCB7ax1Xhl8Xul5mQnsI6yHBrgQwi8ppZ6/5Pb+IPA3\nosP9byilvgX8MPDPLtyJueLarwpPr78yyQnSlprjDyvqj6341G2hEF0v52jlGWnHxFR9BjA4GdMJ\nKiodpsvk2sGJn2AihW43RA8BGmXQQZosKZClL41D4YNm6XPmrmTeStY5bwty3bKfr7hZHnLSFry1\n3u1e13lN1ViaxtLUFrcyqFU0YmlEbyyxmtzYU9+SL+b1G4f81K2X+R3Tl/lkPudr9Q5fXj8LwK+f\nPMP99QSrPeWoZl5m+Mz0UzafAh0kms/wi6kG/4StEyhhtuZHI+61Mw59zkz34nh6K/hfNyt+79Uv\nAfCN+9c4fDBlvirZi5CWjW2rXhDTxcml2yLsaxWYlvL5zuPn4toMvAwMVnnOOG/wVm2eA+kz7npr\nkW6V4Cpe0bSG2shFIWXgGwY3Kp4r8fOw0cO1nwLEzGo4SNgIbnDq2rj12P5DCEL1UqG/3Z/xnHT7\ndgb3sPVBA4DfyzqvL/heMrtHWO9nyPAfKKX+beAl4D8KIRwAt4DPDx7znXjbQ5duRSJmeNXzVnP0\nAoQXFnz2uTsdNzOLE0itAk0wXcmZDaaiaMni5Io+UPIYXJV90FRJqCCVo/H4NcGgfYhTq9hoToDh\noGlQNMGwaAuOGwlwtbN4FHvZkkK1rHXWvVZhWpyVrMEFhXMKp80GEl55aEcB+/SKH7z9Bj/51NcA\n+FTxHW6aJeug+RfVVb6yepYvz+WwvrOaUbWWzDh2yopqltFOLe5Id8fxVB+/y9S2glpshqeTKHEp\nQ214u9plMc3ZQ8o9PThWaU204reNXgXgF55+k186eoHVImdc1JS2pelMo3vhg1TGy8CF7u9kCm21\nZ1zUXQm/qOJFoTJUi5xF3jDNKy6zWi+wkrY2aC1mOMNyNsFg0rlg0xtN++oFktLZSm4PCmI2qN1m\n+dod3pCOefw/Kb44JdCZM4ZBPYc4ee1ubjQJuPY3fAADg38V6zJDiYdNYC+x3muA+4vAn0M+ij8H\n/HfAn3yUDSilfhb4WYB8vB9R4aFzE2pHisOPB0YfPebHbr+CVX0Z0noTaVByAEYRwtAHLsPaZ9Hl\nvb96A/gtmlQKlmklBkShWjLdkinX2ewBNMHyoJ1wv5lQecvd9XSDjjW2daSDtcyrknUUcUzwg8w4\nQfRLTMRl0cSlMYTMsbe75LM3Xuf37X+RW1Z6XobAa+0O992UL62e5YuHtzmKwgKNMzKdjVi/KzsL\n3nnKUq2iMMFJQs2HMzIH+TVEIgRN58npChELVYWorRh8B53RsQeXCn8HuBAYx2399r2v82uz28wP\nx6zqjHHWDLKs3oLQRRiJUqGXFrd9cFMqCAg4YuTWRY5fGfRa4b1lno0o84Zp3ksi9eWqBKXEjllU\nOc0qI9SadRC9OaM9eZT/zrWLIgy6qwCWkSFxUuVU6wzWeoOyNTQ6QoXeD3cgdb9x3FUMfn6IVaQn\nI6QydZDBbfT6Auf34N7L+m6XqJdZjzqBfYT1ngJcCOGd/jXV/wz8vfjvG8Czg4fejredtY2fA34O\nYHrl2QCi67a4KW9i+WLNCx96hx+8cgeIV182Sw8GuLQmmC4zSCVHpoQn2gTT2+WZPhCmxrWAfnsm\nQz+gyDhwk668vbPa5+3FDnePp6yPC9TagIcwli/mtRtHPLdzwI5dkykpn4dLFEQ83jq0gmziMBHv\nlBvHJKt5bnrAh8oHzP2IVyMGa6ZXHZThbj1j7WzXpJ9kdQc3sdozsg3meuBN9gA4CQX+HU1x6EVh\n5KxzJGZuwShcpmgFVcH6imJ9zTHbWXElW7AOGXC63DSxx+OAMp6En8jf4nuuvcOvPPgIdW1pR7rD\nm7VeE1Sg8RJEnNsMcEnpJa2kOAKQFw0rk6Faja0Vjc24p2asJ4Ihyqzrnu+DksFCHZWCTwo4tphK\nE5aGk1qzHucUAzmlLAY9hTAlOhzcKsfNM8xSoyv1/7b3ZrGWZNl53rf23hFnuFNmVmZlVVd3V3Wp\nB6tlQ80B9gNpmrYMW+QLLduQxCcJEEAbEAz7wQ80ZECCAT3YgPRkwAYNCpYNm7QBSTYBP1myYBqQ\noBZJFclukj2xq7vGzMrp3nuGmPbeflh7R8Q5eXOqoe/NW2cBiXvyDHEiYsf5Yw3/+tdmyEjKlQUN\n/00jQ+cCDPL60FNyxp5zP9Ywg1wcAM506QYUR+kGH5UtDU8O4Z4W/C4C0MHTFSiewT4UwInIyzHG\n99J//xzwjfT4N4D/VUT+Nlpk+BLw9SdtLwqsXxCOvxI5/IImsv/Nz3w/NZEb6uBSD2e+YjY/P04K\nA3TkXFm+Ew+5nXGergvq4Z2mEBNUduekmfGgnlF1Tkm7ld7Fm5MJ7p6jfCAcrqBY6AKcvq7bXxxN\nOHqhYuEnfXfEYamAUK0PhvxfCsFm1vfDafaLmrlrqIPl++vr/KC6xo/t/xCAvbKmxPPd+iZ1Ardr\nU9WKGwtB5oT6jcmiD9vePTrkwY0Dpu85ZrcjttKB2Hrih3Ppy0ym1i4RgPp6x/7LC/7kjVvcLE7Y\nk2GQznZ4mkEuOx7XbMWPHb7Fb08/r7pqwHikYA5Je68uSA/2NgG2dnYohmRwzNuQ5NWY2tDVlqXR\nNSqKYUSg94a2cXRJhklOCopTHSaEEfyiwE8KVrPk4ZUBKcMgp9QZSA3xUhvcWmd09LNYGVricuhp\ncgtXy0iTj151emiahzj+9eX8W0SlnkZ8uizRRIhPL24Jj0/Yb9tFALdt+xh4ek9DE/k14GeB6yLy\nNvDXgZ8Vka+hy/Em8B/pfsRvisj/DvwB0AF/9UkV1J3tbGc7+6Tsaaqov3jG07/6mPf/TeBvPstO\n+Aks/o0lf/oz7/GTV9RrWYWSNljaaB+a9uQxfVN+G62GlSPvQHNFBsegBNtXR/t8mcFI5EE9560H\nV1j2zdol1EZDkUY7JrIDWC6F8kRwyzx8GuprQvOC3qp/6jNvM7Edb62v8hZXOSrWI8b/ZstSVh3O\noWblC1XzDQYfDQdFxTfkswDc6Q54vz7iQTtj1RV8du9BT30wROrgWHaleoje4YznxdkpAC/OTjm5\nOuPWq/s61OfeHHdXl92tBzkgP49080i8VnPlqnqHX71yjy/s3eX12Qf8qck7HJiGIucv0fybHd35\nx4+vGbhZHHPlcMWd945YNwXFdJMckkPrEKSXnIKhwprXzUrs11tScj9a9YTC3FPMWvZmKQfnhuHa\nuYDhs6ClUTerb8APqjaSpcOjMX0vKSGlw3LQ0EnvleX8Wtj69eRhNDqzIfbDn7Mo5kZDRy4ooCGq\n5Dg07V/v/XlUKDOkuQ45/zb2zsaV1LO8nefNc9u2H3WI+nFbedDwH37lDY7cmuOUAGqjlvDntsEn\nhKlHV5SRyMR0XDXakjWVbqN3VKejD6FpBjaP0CZ+nIa+gXVXcJqmRRW3CiZ39WLWyVrSq0To1HsS\nnUUT8N0MXnr1LgA3Jyfcqg85aaa9XPmY/9Vu0SBClH5ie262j1FbuprguF3vA3Cn2SNE4aiouDk5\nYWK6Ph952k1ZdiUn7ZR1KmjAMOSmsKqU8i9fe58vf+596lD0N4xTP+VWfcieq5mYjmtuycvFfV5w\nOmTnBbOkEM9cOkoJFDK0vjzc/LZpAbjhTnDWI6Wnbh2SVH+NxFR0jL1MFND3IW9z5sbWNDaFdxG/\nFyiPaq4dLjksh3mtGeBCFKqu4LjQFMSJzGilJJRDhZlR/6eGgbJB1B1mW/BQfmybBiIBjB8BoR9t\nN3/EaHVaFULGCfXYnzgZqZjk75FUmND8G8OcgrF92FDuouTfPgG7EAB3VFRMTKc9lQnEslc2zsHN\n7SBblP9NTNt7bH7knbWMaB3APLkqRiLBCB5DGy3XiwWvz+7wz4rXAHij+xzFyZTJ/UixjrQz6Su7\nEofcSyZt+gl9EttI7Jv8c8P8IolWrttiI3W4/cM1ox+7QadrHTj90TrjOXRV/8N/0M2512if6qKd\nsEjCoE3nOIsWZQTuFPvcqg74ysEtfurgOwC8XtzhQDoKgTrCXLQr4VH9e6XIxmuPa5EEnfg1sZ7Y\n6ii/8byG3OvpgyF4Q1k2bM9zGJ+bPH0sBoN4wc8C9qDl6sGKo0nFdKREMyb65kosqMd4YqDbM4iJ\nGBexzjOewxC80Xmn3hA76eet2rXBVoIsZCgGjLXkJOcEVY5cb5DDYsRcGRUZFXXyDtN/f+5w6IsM\neVBNF5EuQAiI98O2n0SdeJwe3Fn9n5fMLgTAddGwSG5SbpfxGNogfevU3DYc2XV6bSgchGhAEqiN\nAI5olQaCEnVDrsCiF3/ehkcoxPPjV7Rau//lmn9SvI55Y457VyWE+nJ/roBZ+tmp0zvw4F3tKnhj\n/7PcnJ3y2v49Fl3Jg2beCzHmfH5hPWN12/zjGk+lyo3j2T6o9rkVD6m8w0cNz/txgwnUwij8HdUO\neiej6Syn1YSTesqdRj3Dnzx8k69Nf8Dn3Iq5KNVjTP2wWxe+vjYKSQHfi3duWoHwVvsCVed0fUaD\noEng1nSWtrW4YqC56PoNxN1MAM5imb416vm4SFHmubSb3z723J2EPpzfnzT4YOimRlWEy5aJHea1\n5u9uvbaG1a3rb171uqC9XyLeYJIg5oZFeiFLTV9ERoerlv6GpEs3ANyo6JPC0ywNJok+ZXwceXBb\noPY4tZAn6cFdYu8NLgjARRJdANNfOLn6ObOq3bZvqz4EzWCmpFtLgYag7ajv1ErARM2pBOLw2ZTj\nshLwSaJo3Kf6J+Z3KF4P/D8nX8UtHOVidKFaCAU6udzoheiqyNEfJhrJ7c/x3RsdFFGVWW3k5ita\nFZ4VA5dOJPa5uW2vpfaWO4s9lqdT4iotj41IEZgfVpSuw5rY55fOGsKy3X6U3xGicFJN+Ha4AcD9\nes79q3v89N63edWdsGcyxy29H+W2jYHOPAR6Qogau7UxMjd6Lo+D59RPOVlNMaVnUrQ9jacNhs4P\nIpbGxJ7nli2DXEwE2zy6MXqjU7ck9jy2rMqs+5NyhCMF5zEAugRovZrIeJTgSGm5C4a2tKxKBbgH\nMmNVOcLKEKxWS8cnN+f1bE2vNt2zmbZ4hmOg0y9O9J0gKdcnG0KatomYJgyctxDOpod8mBD1EoMb\nXBSAi7lwMJr0jlAmom0hnjoUDzW716HAR9PnlHqib27XkUAbHPXI62ij7XN6GeRyPg7gyK35/Owe\n1155QP3mdcZUtlAquIWCkZskPV1kch/8D20fyp6+Jtw70FDyX3r5tirQBtvLqJ80E1b14OG1raNa\nlEx+OGFWDTSC9csd7mBoKO/82d4a0OfxsrdkJW6EYDEKi0q95VVdctpO+P7+db528BY/Pvs+XykG\nnpsBEMEnjlspQhuHNRh7cwGYG5vADpbB8Jv3v6wSTonnNXidtu8JBeWfwYgyEmUjtIzQ9+zSKgjE\nArrO6LBmO4DYWJSzX/P0vT4dv7PKR9R2vYBL18+YmmJMShnkc2HiQ/m63AKqO6mCrLaOuHo0xYzh\nXhNNzsHpYOjMf5NC3xw7SZO6hu1KTGDp078ubIalT9Oq1e/ICAQvueeWbSeXtLOd7ezS2sXw4JBe\niSObS54bQB2d5jjSba0QbR2qQqGhrJGHqCTZi/Pj/0NPPWmj7TsegN579NHwYvmAadFxehiZ3Kef\nUOVL+oJDrpKJH2gkoch3XL3rzm7ByZ7KEH+veIHPXDnpW8HaYDlezqjf0nyYqbVEuXdPaSihgOWL\nKXQ6HGgQPpgNr60nv6cmchFVvcgejUthXP6sDwKjuRcfLPa4t5zz5uIa3zp6iR/b/wFfmqiowetu\nwYGxBCLtSB5p24wI++lSei/ofv768b/GH919kXZVUu41vQAoqDemg1ZM30Uwtg3vLQpN53rpcOm0\nAV5aQ7MuuF/MCFE2+lG191hzlbV3LOrssRZapXY+yVZlxZCkBjOipoQotMEOXmfjoDW67uHhvJqt\n9Z9bb4anY5MAwer0rWjR+Rqg/a3Zm/OS5MxTqN3GdJ0F6IsMTyrvPMIe1wZ1Se2CAJxaVvgA+tap\nNo3TgyGEyWCmLVipGiejboWcz0M5cXmSfN5GCAMnLm9zL1Usr7klt9tDVnWJrYTmYEgGZ/WIzIUy\nbTyTDhBFqQDFMnLwA/2uk+KAe19uefXofqq2OtZ35uy/k/ogTyN+IphGq2brfelDGGcDXTBD4n10\n7nKrUw73rA0UNlImCe+cvG9Te1QUzV/puVGQ64LhzmKPr9ef5xv3X+YLh0p7+er+u3x1+g6v2GO+\nWEQ8sQ9LZ1ISiBRiqWPLKrT8brPP37//UwD8v29/kZN7ezrQmAxqiZ4REh2m7FIOjY1wUK8FzVW2\nwbCqC7qUj5QUxnkLce1Yxhl17bhfzPpzlSXHu9TJ0K5yg7PeROpZR+cN69IxLTYly/sBOVFovGWR\nenqbVYFZKzdS0h0mmtHaex2GpOFkqqDmPH7KbYZUnNI8biS6tJ4mxbut9rmOrynbRmztEw8uwFng\n9rhw8+PWenvO7EIAHJAkxwevTae+G9ah6MGqL0DIwwNLxoWCcdtW7vUZg9zMthsgWIjnmlv22/nW\n4iYP3jtkFofCAtATMG2liV/pwGzxlYCNUW+TB/rk/B3Dvb0rFDZwbbbSBvNZh0k8OPEouLXD92U3\nIQSjszNt3Kj4gdIm8oCUTHPw3vTVvxjph6o45/viBpBUTQwhGGKEqi54EGfcOj4A4HeKzzKfNLyy\nf8yL0wU3yxNendwBlAKSz/kPmuv8/ukrfPv+De58oBXl2BowETExUS+GG5CYwGTSUbquB+K8P6PT\nSAiG02rCejFBcrtVhG7fwyRgSo8YfV+1SusvEDvV1JPOYJeGvn4RE7hMtXiwKgNSBEy6kRjrsTZ7\njtC1KmkFICsdQ2gaXav+cks7a2uwTQKnmBrpx6VsUaDTqV7gZxGZ+XQ+IqExCt715khBW4+qp35U\nYNjOvT0vKiI/YrswAAeJ3zZCjJxs3h5aEpANHS+/FTxlbf3x6+NG/BwmmqhzGa66JXVCle+uXuSf\n//GrlHd1BkRwm6xyvZAHcNvQ/dq+oK30PwS3ikxuW26Fa5zenPCl63f40mdv852Fyh7tfd/iVpp4\njkb6UBcgdLphVaEdigZAKigoWz+urVbi/OD94SLYiCk9nTM4Fza7BkxEJChIBrxYOCUAACAASURB\nVAje0qY+yHpVcOL3uGWPiEEwRejBMqKhsG+temSN1WHY+Xtt7MMuUFAry3Tzsp7C+Q0V3rF/4YwS\npJd1yWo5Ja4dJE9QDjvm84b5pMUlocumcz2NpPWWuipofQFVmn6VD9cMKYQYjU7iWtseK7yNZCyU\nqIOZbepFtTXYtYJPTklIZBgMXUdsHTFtHCo/I8zJ/ae+1CJVKEDyOuQKajv0udpmFKK2AWn9wH+L\nkc0RZY+poH5KPbdsFwLghEEBxI84TEYixMDEdBvcqHHubGio32S/FzK07GzMHxXZyOVdc0uO/Yzv\nrZQ68Zvf/SL2nSnlg0QF8eOLeAhLzSZVTS1uPs6cOdC7d3EKpnHUy0Pu7a/4My99C/mT+qFv2VeY\n/8Axua/kTlsLxUnq4HAllYmUh0N5LdNEWm9pKgenBW6p+akwiZtA40J/nRszqHa4RL4dOKMD+Ra0\nSqnAZwht+pebwL1ofjNTUgzEmcfkZnerJFprA86GRM9I65eoHdvE5zH3bVFNWCyn+IWDIlDMFXoO\n9tccTmtKMyLoFoMIQBcNp+WEYzOj8UKz0X4RkVKnV5H5up0hNjmJqiDT58FaIStl2UqwLRs3M9PS\nV9ndGlytIWqulmbL96Nuql0xfhKJM5VAh0T1Cfp9CnCRNMdI6SHdyGs7q3r6URVDLnFF9UIAXLbt\nSVfpWQz+odDMkLhPAZBNDw+SVxdDD4DZMlAqebjmveaI3zt+hW/+sXpSxfsFkwd69edcW54ylRPM\n/SDhnIfpqQJs3rllkwNl2sRy74S337vG/JWaP/PiH/Wvf9t/FltbikXE1PSDrt3aUC+nHB+VxIlH\nikDMIV9lsEtLeao9jH4W8S4ie0leaNriXOjzjiZ5gfl8I7kPUg/A2dB7Q6WTjQlYXQqH+3OZyLsm\neZXbXQNG2PA2ty2frgxu+b0n1YTFakK3KMBF3Kzj6EDpK/uTup+c1R8DAx8wA3+MIBOPzCLFRM/F\nbNIyLdt0ncVexDID+qouqOsCX1tk6dSDS4Bu0pwLSTcubakarg1XxT6cDIXZuC4impMNLlONgCIo\n0AKhthpO18M1YpMSq2k84kPfxfDwSTzj/D6umPA0sw8ukV0IgDPEvqiwbWPAG/ObemKnRHwOMchE\nUq2q5lzduBnfSuDIrlmFkj9cvsw/ffc1Fm8dMrutrlYeTZjzIKaNvQeXh4dEmzhQ2wCnO9aPhZMw\nXDu21bukeO2OaN4r+ccffIX/4KXfAeDP3vwm754cUt+/oi1BMfbeg2nBrYRw29LNNrtAJaoXEU2q\n8lqIe575nroW80mryfo0jNp7QzcWJhjdOHoeWI5+SCCV3lPgz2ynki2A2rb8GRmB0lnAlgcOLZZT\n2lPlB0oRmM/rnihdbiQ9R6c9fUfdOZZVSfDas1pMOuZT9fDmaUhNHjSzDY6rsqSaOU5WU9ZA6Api\ndg5TE3wWo8zjFV2SnrJ1TIKtMrRl9SdIr5l2T/BTCJOAm3hiulnEziTvTbCNhqfDTTWMWrQCdE8h\nzvM0cxDy608Cw+cc/HY8uJ3tbGeX1i6EBxdJhYA43E1zuOrjEGKOqR8bWvpRNlV7AS8RJ0ly3LRM\nUlmqEM879VW+dXqTb/zgM5hbE2b3htaYoeUm849G+5lyatFATCHH9rR48dn7i4N2PxqqZn6UaWFy\nV/ijH77E9668CMCfmN7mqzdu8fXDQ8Idi2mGAkXehumgWG61/tjBe+vmke7IU8wbpql0aHPY6BIl\npLM03bjirPnq7MmNJ16NRS3787113nP4Gki9qtuj99JnZLwNNlV7l03J6XrC+lQ9uLiySBRiEXBl\nhxuNWsze4ji/2gbLqtUi0f3FnGoxIdYGyrARUvvUZxqM9AO4YYgG8rmyNiA2as9rWuAssJmDCA0j\ntXIKut5jCaQNi5p/Czn/Nvd96A9Aqp7mHK+Gu+l4u5Cqp6PiwrN0LzzOHjWEOb/2nHtvcEEADobc\nWAaxDHZnFRGyYu+4OXtsOmtBgW1uGvZtxf1OCbe/u/gcv3/7ZU7ePaC8p6X/cV4tV7DG7TJjHlwo\nFHj8ZMxnSq+T8u2NTj5X0qe+Jpn8maquk/uR7ntTfuPgXwHg5177A16ansBRSzezFB467fLSsMbF\ns8fVJYsOugOPPWrYn9d9nm1sAkoLyQWK1lIUPnHHtIVpHLptfHb8g+zPc9yKzzdvUGNQ2w5hW2+p\nO8diPaGpHOG0GAbwoCRYmeiixCjatI9OO8tjAnNj/LotOD5NMlunJWap7XKxMbReOE4kYVt4isJj\njOYLTQ/q6TwHJRVX65K4dLiVGZrex+CWcm/FOva5skwNyYN6xrJKQ3FBb0JSBqwL1GsFZVULVgqK\nW4FbB0wCOHzmv3nNwT1Kqnxsz8p9e9zYvmfZzgW0CwNwwCOBbLtQAOmHEwYiqxtVVqemZd9WPafu\njdPP8wf3bwLw7lsv4O45ZqebAoZ9WT6nAtPdWJUfUgW2SH2E6ULt9vRubGb6IRG9BtvaYk4dbiXY\nlX7WVaL8uTSX1Law925kYXWs4D+o/jRXD1da1bMKbj7NRmiuBMLcI1OPsXHj2st5HJFIUXhm0xZr\nNkEqe1kRvW57Achg8a3FukBRdvgQKJ3vddn685xPyWMKBtsWUY8pz2utW9cTtpvG0tVOuXJ5X2wk\nThJYFEGBt/A9pSV3QYiodFIm8tato1qVvTCBPbUDdacTYiPky7wrIl3WYTOkBGbc9Lo6QRpDsTS4\nhfTVzOzNS9Brxa1HtJB8zOMxgmG4MYZCaPd0cHeceibTVjs5UvW2WBpsBW6tfaxu7TFtOhfegw+b\n4wGfVDX9KID0uDzdc1iBvRAAl09PDjXPsm3F3kwFKUYE4f1UIVCxS+E765u8cf+zvPneC8j7Gv5M\nFwo04hMxM9E+xpa1uvSOPBBvezWRWQK3g47ZYcVeSmJnXtaimrCcTWlOC+wkVfUWJkU7GmKYqJW3\nfVVpoj6dc3o4Y5rG0kmEkFp5wsxTHtXsz6uHPDMfhjDRJy9YRt5S9nR0DJ+hLH1/PXqvXDDvDX5d\nYkykGYGKNaEHO5voJXmttj29NnEVc0Wybh1Nmv8aWh23109vTsAiLmLmjXZfFEOlXEPSMBQvRuvv\no6QB2uppdbWOEMz3N7/vNwErgrRDnsBUIJ0Z5iTYzfcSla5h6+HmR1o2iSmEXKcwsgkjL1+IMgyj\niUbbsgCafcHPBnJvCIJvh5m4thLsWgtcxdJjmtSWBUke6RHk3kfZx9m9MA5jn8MK7IUAOI+w9BMO\nXNVfqOtQpupqq0N3t/JuOfycmJY9U9NEx51WGfjvN4d8/farfHD3AHl/SrkUTOIr2SxG2JE0tvT5\nfvq4KLD10tUMXl2M9FLZmTw7mzTc2FMF3LlrMBJZTUvulHsclzOqUlt9onN6wUvmjkQkCMVqoBmE\nO0IolEfVTbWqlnfKWvWu8vi9rB/ng9GOhGBoUzfDWRaiYExgYkKv3hHyZ72h6yzBG7rW0jaDt5RN\njHLpMvgZE3XSe9p2CLkrIlUGU8+sMQEz1Q6BXrnXqUSRNdqZsa3ndlaY3OdhgyqIVOuy90TtQYsr\n0lS0lLPLqarO2/6zTaM9rb6ySKWjB7MUua6z9DnULEveawF6BbdilQBuHR7uSR2lDwStmgJ0e+rx\nhz1PUXYYE2nXTnmLgF1DsYqUy4Bdh0QNSV50rp6eRRF5lH3Mk6nO/PxzUoG9EAC3aCb8izuv8BPX\n3+bQKd9pZlumpsUQKUzXT5cHzbGt/ASPemlvr67wzuKID+5pm1B4UOJODNOtMBRymBH7kn8YEXF1\n4wN3NXtSkhYy2hyqCr4RQqs/tnH4XBqPKyrMXmTqOu6lH97CzmhNAWL6O70dTV7SAkQgGh3dZ2eQ\nXRE/sazLKZOiY160TGzHQdpu5QvWXUGD/vi7NLW93RIVNyZseEWk94eozd++6AaF3Swumdq+ohfw\n2rGQMVdEQztjImIC1mpyviyT9yex7wvNY/i2LU8Xy4/PstwTmsf3VU1BXZXEAMYF5oc1e5OGeQLt\nPBc1g+ZYSLT2rs/ZLauSpi7olg6T2rzsSudUjFvuhvF9Ucm81QBuDEvEduDR7Bna/RHA7QfMrGMy\n6VieTDEri8vpi5V2urhlwFaddi0kD4622wSSZ23JetqZo89qz0mB4ok0ERH5OyJyW0S+MXrufxOR\nN9K/N0XkjfT8ayKyHr3233+SO7+zne1sZ4+zp/Hg/kfgvwX+p/xEjPEv5Mci8reA49H7vxdj/Noz\n7cXS8uCf3uT/+swLzK6vAHj5yglH5ZouWqa25aQZpsQvm5JlVVItS+LaYU+1b7QY9Q2KT21WfvDa\ndOe3Cn+SPLP8XAAT4nD3jqMQVbQZPooQjaGVglO/x5sppLuyv+babMWVcs3cNZSj5F4IhlUUWhzi\nLbYW3GpIUrt10NwLiYJyKrico2mEVVPyoDmkvu64ur/ihZmeJ2dUkjtGoc2Vxc7SdVm5w2BsYDpp\nMcJGAWEcGkYG2ke21hs6r75zbsrP3l3wZmPWqSSPbZI8S2uSqGTKB27TeoCHwultL671Sv9Y1SXr\nlRJ/Q23BC2bWMZs1HE5r9opmyN+NBCxBPczBf+wojGfiOiauYz1xLIsJtUlThYLr26Uk0ncr6Pok\n720VVHgyJumjrQGxEiLd1OCn0O6l49gPxLlnOm1Zr7UgUiyEQvUdKJaRchGG4kIYCL0S4mb1VE/2\nw57T03hMn0QYecErsE8zNvA3ReS1s14TEQH+PPBvfZSdMKmiOL9laQ40zHy/POQ90b5KCUKwg3S4\nJE38Wa5w5a6BTPXIcjM5vNwCtT7vBr321kPTk9Jj2w4gJFFzY3l6kq0M3aqgSSHO7aOC5WHJaq9k\nv6x1kn0ClFnZ0s4sTWUJZezpBHnbtvKY2mNSaBIFiqUuT3nqKE8s6/uO9YsHvHtjxvELWmK9trdi\nXjRMbKcT41O1NEsThaQwUtUFPhhKp3QQ4CHwsVu0D2sCJMAa58CAvoc15/Hy+VwnFZPCenwOUbfa\nuMYFg5xDHBcofFD6R1UVdFUBC4c06eblhTALxFLBtgtGVZJ7OXuh25LO2hh2k+TI9fhGCbPRtSG5\nDauJfWeLW0eKhVedthAV2GScuxVt03KCL4XmQOj203HuBdyso20cfu2wS6MV9iSeXKwitkq5txye\n9j2nYbMHVU8+D9mHAavtMPOTzNOdUwX2o+bg/nXgVozxO6PnvpBC1mPgv4wx/n9nfVBEfgn4JYBi\n/6peKHGYFh+N9LyzXplj3MAchrI9YegVhKF4ELcu3P5zCcT0cdyQPBpGwiVSbhcxzQBwKmxoKJZC\nNxXapWATwLWVYbF2rA9LZrOGWdn2fZ3ruqSt9Ydq19J7BDZ5bab22HWbLmgwMWLq5A0tDOVxwfR+\nwfSuY3lSsFqrUKZ/2XDz6JRr0yXWBGrnqDuHn2hlt/OWeqTF1nSOJucjE8BZ0YJBmWaKjvPm2Xpv\nbGsgTgaPTPHJP708VMYHbf1ahUFGPe9XRBP/IXmHftT0TmtUXaNW/beeBSRALQRxrNo51azcoJPk\nAoZIxJpN7t5YVKD1lrp2tKsSyTm4WrlophnALReBylOtbkrUvlK9HqU/SRJVay84oTkS2kNoD9La\n7rcYG/CdTbJLBreCcjF477ZWgKMLSgvJum/PknP7sED1SRQlxts4xwrsRwW4XwR+bfT/94DPxxjv\nishPAP+HiPypGOPJ9gdjjL8C/ArA/MXPxUzL6O+Ilp6lD/Q9njAqAPgcisaHya8BneUxAjYgTSjK\nIBf714b3xR74cnN1Dm+li0iM2HUglEb5TUtDcaoL1Z4KzYGjPbQs9yYsiiEElMbglobiWJjcj0zv\nR8oTj12lcYZNB52OhMt3bEkySWIEU3WYVUtxUmCbKXn21XIyZTmruTZdMncNzoReMRigNYHSDQUE\nHwYPJkahaRwxCsZEKhOSYOZADcke2LaNOxvyHIOx/FGIwl6pU6yqztH5YUJV1ybAjahogNH1M250\nvqaJ5BtUaTdmikmrn5POIJUh1iVdJ7TZG7OokoqNul0TN39HEiEKsTVIbdI4QH3JrQVXJYCroFyq\n1waoh+UDwZn040ybG9FIQgn1FaG+As1RgMN0gyq079QvlDzs1lCcRopl6vFd+KG44P1A6tUT+XiQ\ne1KIuP2eJ9lHAbtHvf8cK7AfGuBExAH/PvAT+bkYYw3U6fFvi8j3gC8Dv/W4bUXRYS7bpbZxq9QY\nhBR44oYnNmaOywawxY3PZinoYZsxUQI2c3QKcPFhgEsXnK21qdotLWXiuvkHhnYudHPBTyzBDaW1\nnv2+DBTLgFt6zbkkL01aPyi2jsMT0Hl+IpjOY5oOTe0ory+4grvTfQ6nFUfluh+kkmWnCusxwah6\nhgl9rk3XSPCFEmabTukhLVD1JyjiXOiFMrPUd7YMaOOOhnEDe905nU7lVXXEZZVh53tum7PZ2wq9\nsq4R5dyNq9M5fA1RWDYF61qroH7hMJXFtqZfUz0vpNa6zR+73hyzHFKSKErdCrZRVV63hnIRKBYB\nt86ifCSpcemjjY25p1Zo9g3NkdAcRcJhR5Hk2EWgWRSYpVZOi1P1DN1q8N6l9X1LloxB7Vk8uEc1\n2X/YnNizVmCftXPiR1CB/Sge3L8N/FGM8e38hIjcAO7FGL2IvA58CfjjJ25J1FvbUGAY5dVknPQH\nxhPGxy00D202e2sjvtsAgiNgG4NYSiAPebjYl+zz4/GFbRrfl/ujNUwKQ7TSS+aM9814Hf9mGgU2\n8XFE6PQDuPWe5fjAIjGNjCvuw0H6nG1n3PNTvs91br54zNGkUh7ZaARe//3J2xpLdGM9FDrxqi1b\nms7RptYm31maxlCvC8TGDSCzNvHgRHN1kc37U859Za5c6TyzNDehMOolWgm9uOW2nt+44DA+HoAr\n0yQn3kw4nk9ZzSe0WdK80j5eEo1lI7/WiUqCNwpsWbQ0cyTdCNzc0uNWvl+L6IxGGKLXSL8qKZfa\nHBj13q5G/GGHm3W9OGiTwmC3EsoTTcMUq4CrfH8NSZ1ucP2/JwDbo8Bs/Fq/k4/I331YsMvbeQ4K\nFE8EOBH5NeBngesi8jbw12OMvwr8RTbDU4CfAf4rEck6qv9xjPHeE78j6kW3EQmFlEvriwVb4WZ+\nr+TXHzGHUl2F/gOZja4y0CNPbQx8KTwlPR48uKC9gXk2JQ85nViTwj+Rh0g44nPoOQKyR0wolzNC\nE93HSGxa7H39cexXHabd50E15dYXC1avnPDK0XFfVQxBhqbyR5CAjShnr7TCtOjwk8FbajvbFy+8\nNz25tmsNMbo+5AteNtrIRMBYj4hVDzABmb6mgJWBLAPv05ohUlrP4aRi4jqqWdWPQlxXhaoMezOc\nvlF4G9cqDa4dC+CqkabbOlUzF+pdEyPRpvU06r2Nr9FghXaeOjeuCPU1aK943EGLdZ62TrNcVxZX\nGa2cnkYmp2Gj39Q0Cdw6j3R+c+7C4ybTj+1pVH0/Ls/urG3+qAsUT2lPU0X9xUc8/5fPeO7vAX/v\nQ+/Nzna2s519jHYhOhkI9MKBG4NbtvJq26FoDl1z50Hu/RuX73MldKxa21scDdTt815oBSvqX/3e\n9Fqv7LDpxQ3HEXs5GwGSpG3/XbqNsHlnegSfSTsrpD/O8XvEBw1XAVnVzN8Ct5pRLEqOl1f44RcN\n1/aUJzfOk40lhh5lAht5tqnrzvxc4y1juXNzxiZzQSMEw7ou+yLDynkmRZdG9/k+TN0IS0fr9JCa\nc275MgEj2tmROxlW04Jq1NifPU+AuioIFITWYtdJOaaKFEvdXg5NbdXpZWIGnlt/PXUa+obC4KeG\n+oq+sL4hNC947KF6b11nCUs9XrsyFCdCeRyZnGiRytZDQz059zqeWP+sHQtneTtPI3j5uM8/jZ1H\nBfYZ7EIAXKZfPP0H9E9wiUKSAS1XYEfglnsK82/zLJzTjaXPZOWGBJ50QUENUkjJAG5nSdd04aFt\nbtgjwoQMYtv6cmdaAmZAfxjLyKT1XK1nFMspxydHvPV51Vo6eHHBwbR+SI8NhlPgR8A1vjTHMxIy\nTSTbZIsu4iRsTJUPDIBYd0pd6dut6oLF6RSxsW/fFOiVg43Rti+TVFEmxaDIXFol6ub8Xd7PPKxo\nv9BtWhN7ik6TLnNjIkHohRaKVaQ8jZSnqfix8krVyODmBoCTCNLqjU9zrEJ11bC+kY7xukeuNFjn\n8Z3FL3TMIECxMJTHUJ5om5dtNDyVZigw4X26sW6BxIcFumxPO5DmearAPoNdHIBrYz95CFIFLCV1\ns0fWg1Smj0jKdfUbGv5m2WiLzkHtASSQZG0kAV/ytkZAk3NkOWeWNdFl+27yNHfb7YLBKDk7BrPc\nCyshDn2xW6+BbBUeSPsZiE1LcTdwVHum96Yc39WlPX3tCstXKo4OV+xPGp5kkTP09Tgbq/vXcwV1\n7HVJgCRzlfXbchfFSiIV0DUWv3JIk6gfKVfWFcPMUJUzUg01UAny/XnFwaRh6treMx13ZRTW0wVD\nF43y/lIurEtdL8Vpouo8CJQnHrdoh4PXA9JuEmv6ay5X3KMRfGmorlqq60J1IxGzDzqcDSpYcFpg\nVranDxUnMDmOg4e47jBV1wMcnR8a6p9EC3lWe1rAfN4qsE9pFwLgclV0rOARbFLMzXw4K0NjvM0g\npf/fCGMZns9j2nTQsfSvSVQibQwgRjaqG1ockMFD2ga0PpR9DLiNQG0brJ60wButP2e8N0rcAMnt\nsNWcrJmuW2ylHtzkuOTkwYz7rxb4l06ZT5qHPLmxd7dBzRiReOHRKh9nNcqPQ9p+YHcCoYNpzXzS\nUDUFq1IJ0LG2xIxRiccmJqrMUpNGIgJNZzhOLWT7U8PUdUxsN0xNG3VcVE1BvS76UNE9sEzuC7MP\nItP7gfK4wy2akfcshNKCVXBDEscyH5M1+ImhumZZXxeq60oHASimHWIi7bJQAcul9AA3eRApT4Py\n3dYddt0quIVh7T4RcBvb03p2z0sF9intQgBcNNDOZSN3Fm3q9XP5MX0IGsYEYEb5ujHgRQ1FREje\nWgYd2cjvxd4729opyeXZoQLb26NK+DE+7GH1x/iEBc2LLo8BuKwBFMLwWrpQB0J0QNqO4p7m4A6r\njsnxhAeLgsXiiNPrLbMDZbodzOqe4pGBaruiOT76s3J32x0M23SP/L+xRHn+O3Ud07JlVZfUiXAM\nGkpOy5ZJocCVhS0BmrpQgnLrOAXawlJbt+HBVZ1j3RSaczstKO8rOJb3hfntyPSepzxuNdc2Os/R\nmg1wG+dmFfwM9RVL9YJh/WKku6J0EABXeKpVCZXBrUzvJQJMjgPFaafteNlz68Zk3qeomn7ctg1W\nz3MF9jF2MQBOlAX+0KBchvyZQTt4ess/6EiWV+vjqCxdI0YxYVwPUJkkGSSRglIcJPGZZKzQKpJ+\noKMdO8tzGz1+IpDB4xd0DHRmE+yiG6H6iNaiYXQYPhuj5nUAs2iYdoGrXWTyoGD5mQnrl9SjaV9y\n7O9VzPrR72qP8si2bdujG3Psxm1ZPR1k60bhErm3dB4fBlpH6TzTsmXqOi0kEOkSgo/JwzrHVWiw\nvbfpg6FqHVVVEB+UTO5apnd0u7O7gcl9T3ncaK4tndeQzmvPdTO5P3nYXz811EeW6pphfTPSXfHY\nfS0ogBYw4srhFha3UDLv5CTlDU996jXWboUMbhs3w6eRIv+k7LIVKEa2m6q1s53t7NLahfDgJGq1\nE7PpuSGRiLLPQ34u//WjcNbwsPx08txycaIvRkgO50SdHjHYER3AivZV0ikDP3r1BAHdie2pRtt3\noadNrj4uF5fpJTl0yqRhY4YcnYx8y1zdzRSWUcgjMUIXKE5a9nzENgUuVffWqzn3b5Qsj2rm04bC\neaZb1VHYFKYc2/a80+2e1W3PbezV+WioO0eTxAC6zvTM/ywAUBrfh7ZFoisXxuOjCo1mmfQYpQ9h\n+xkN9yZMPzDMb0Vmd3Xfy+MOd9pozit5xNGaYeaGNf21lENTn+bQtgeW6qpQ3YD2SsAeNpRl16sf\nh6wSstBuBQ1LUx/r2mNT1VTabvDetotU5+G9je15rcA+xi4EwEUZFQ42nhcNQfuK6fjFIXwVz4Yv\nqlXWnMCVftwfqDqJbfT1aPSxItmwgWhE26m6gBD616LTvAw50Z8v0rMWYnuRznrPdhgKCmIjQNPj\nkR7cYgK//kbQF1biUAEeT0FP32u6gK08kweCpDFgthbWq4LmquXBwQQz75jtNZQJ5CZFx8QOk622\nw05GAJZbwraT/cAGfWTbmk5lkXxn+1NkjFJDArKlS5zD50Aw0uf/1k3Bukph94MpxX3L9I4wfz8w\nu+spTrR6bOqOPN8wFDYBnDCuvqsUVoAAfmJo93UPqiuG6oZQv+AxV2uKwmtnR6OvS6WFhfKUxHcL\nFFlIoe4SuCU6SL5uxgWr8wa3s+x5qcA+xi4GwBnoZulCy8eQbwp5DumosNDPRchFCYkPzQolN0Tb\niNihOpn193MSLhrZzP05ow3aAlhBskcH2vIjIy/pcfyJsxZ9fEcce2qj9/dgJkIsMsCZTeqCO2Oh\nQ565qlI7ff9sPo8CEnTMXbFU0PKFJTjBdIZwbOhmjmoyYbmX+lwPWuZ7FYX1zBIX7az5qJBpIcMs\nVZ2Vurmf2wKXNuXfptOWai19G1hTOypbKPfORRh5jz7RP+rOcVpNWFcF7bLE3VWA27srzG5H5h90\nWkhYNINHm722BGz9DaPfQYbxf4XQ7VmqEZG3vhbgqMU5T4yqihIrBbhiOSbzalEht2JJm9Zjeyr9\ns3hH52kXrQL7DCbxApxAEfkAWAJ3zntfPmG7zuU/Rvh0HOen4RjhYh7nqzHGG0/zxgsBcAAi8lsx\nxp887/34JO3TcIzw6TjOT8MxwvN/nLsq6s52trNLazuA29nOdnZp7SIBwRb4HgAAAzhJREFU3K+c\n9w78COzTcIzw6TjOT8MxwnN+nBcmB7ezne1sZx+3XSQPbmc729nOPlY7d4ATkT8rIt8Ske+KyC+f\n9/58nCYib4rI74vIGyLyW+m5ayLyf4vId9Lfq+e9n89qIvJ3ROS2iHxj9Nwjj0tE/ou0vt8SkX/3\nfPb62ewRx/g3ROSdtJ5viMjPj157Ho/xcyLyj0XkD0TkmyLyn6bnL89axhjP7R86++57wOtACfwu\n8NXz3KeP+fjeBK5vPfffAL+cHv8y8F+f935+iOP6GeDHgW886biAr6Z1nQBfSOttz/sYPuQx/g3g\nPz/jvc/rMb4M/Hh6fAB8Ox3LpVnL8/bg/lXguzHGP44xNsCvA79wzvv0SdsvAH83Pf67wL93jvvy\noSzG+JvA9jChRx3XLwC/HmOsY4zfB76LrvuFtkcc46PseT3G92KMv5MenwJ/CLzCJVrL8wa4V4C3\nRv9/Oz13WSwC/1BEfltEfik9dzPG+F56/D5w83x27WO3Rx3XZVvj/0REfi+FsDl0e+6PUUReA34M\n+GdcorU8b4C77PbTMcavAT8H/FUR+Znxi1H9/ktXxr6sxwX8d2g65WvAe8DfOt/d+XhMRPbRaXj/\nWYzxZPza876W5w1w7wCfG/3/s+m5S2ExxnfS39vAP0Dd+Vsi8jJA+nv7/PbwY7VHHdelWeMY460Y\no48xBuB/YAjPnttjFJECBbf/Jcb499PTl2Ytzxvg/jnwJRH5goiU6DDp3zjnffpYTET2ROQgPwb+\nHeAb6PH9pfS2vwT8n+ezhx+7Peq4fgP4iyIyEZEvAF8Cvn4O+/eRLf/ok/05dD3hOT1GERHgV4E/\njDH+7dFLl2ctz7vKAfw8Wr35HvDXznt/Psbjeh2tOP0u8M18bMALwD8CvgP8Q+Daee/rhzi2X0ND\ntBbNw/yVxx0X8NfS+n4L+Lnz3v+PcIz/M/D7wO+hP/aXn/Nj/Gk0/Pw94I307+cv01ruOhl2trOd\nXVo77xB1Zzvb2c4+MdsB3M52trNLazuA29nOdnZpbQdwO9vZzi6t7QBuZzvb2aW1HcDtbGc7u7S2\nA7id7Wxnl9Z2ALezne3s0tr/D5qgNcVs4/wCAAAAAElFTkSuQmCC\n",
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ },
+ {
+ "data": {
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAAATgAAAD8CAYAAADjcbh8AAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAGoxJREFUeJzt3Xl4VfWdx/H3N5eEEFD2JQLKIshWQYmgYt2oilYFdepg\nrYOWCnUtrZ1xfUbH0ce2o91s0WLdR6G2qDjquDHu7CgiiMgSVDAShMgWgSzf+eMe2ogJublLTu7J\n5/U8eXLuL3f5nN764ezH3B0RkSjKCTuAiEimqOBEJLJUcCISWSo4EYksFZyIRJYKTkQiK2MFZ2Zj\nzGylma02s+sy9TkiInWxTBwHZ2Yx4CPgFGA9sBC4wN0/SPuHiYjUIVNLcCOA1e6+1t33ADOAsRn6\nLBGRWrXI0Pt2Bz6t8Xg9MLKuJ+dZS8+ndYaiiEiU7GIne3y3JfLcTBVcvcxsEjAJIJ8CRtrosKKI\nSBaZ77MTfm6mVlE3AD1rPO4RjP2du09z9yJ3L8qlZYZiiEhzlqmCWwj0M7PeZpYHjAeeydBniYjU\nKiOrqO5eaWZXAi8CMeABd1+eic8SEalLxrbBufvzwPOZen8RkfroTAYRiSwVnIhElgpORCJLBSci\nkaWCE5HIUsGJSGSp4EQkslRwIhJZKjgRiSwVnIhElgpORCJLBScikaWCE5HIUsGJSGSp4EQkslRw\nIhJZKjgRiSwVnIhElgpORCJLBScikZV0wZlZTzN71cw+MLPlZvaTYPwWM9tgZkuCnzPSF1dEJHGp\n3FWrErjG3d8xswOAxWb2cvC337j7nanHExFJXtIF5+4lQEkwvd3MVgDd0xVMRCRVadkGZ2a9gCOA\n+cHQVWa21MweMLP26fgMEZGGSrngzKwNMBOY4u7bgHuAPsAw4kt4d9XxuklmtsjMFlWwO9UYIiLf\nkFLBmVku8XJ7zN2fBHD3je5e5e7VwH3AiNpe6+7T3L3I3YtyaZlKDBGRWqWyF9WA+4EV7v7rGuOF\nNZ52DrAs+XgiIslLZS/qKOAi4H0zWxKM3QBcYGbDAAfWAZNTSigikqRU9qK+BVgtf3o++TgiIumj\nMxlEJLJUcCISWSo4EYksFZyIRJYKTkQiSwUnIpGlghORyFLBiUhkqeBEJLJUcCISWSo4EYksFZyI\nRJYKTkQiSwUnIpGlghORyFLBiUhkqeBEJLJUcCISWSo4EYksFZyIRFYqd9XCzNYB24EqoNLdi8ys\nA/AXoBfxu2qd7+5lqcUUEWm4dCzBneTuw9y9KHh8HTDb3fsBs4PHIiKNLhOrqGOBh4Pph4FxGfgM\nEZF6pVpwDrxiZovNbFIw1tXdS4Lpz4Gutb3QzCaZ2SIzW1TB7hRjiIh8U0rb4IDj3H2DmXUBXjaz\nD2v+0d3dzLy2F7r7NGAawIHWodbniIikIqUlOHffEPwuBZ4CRgAbzawQIPhdmmpIEZFkJF1wZtba\nzA7YOw2cCiwDngEmBE+bAMxKNaSISDJSWUXtCjxlZnvf53F3f8HMFgJPmNlE4GPg/NRjiog0XNIF\n5+5rgaG1jG8GRqcSSkQkHXQmg4hElgpORCJLBScikaWCE5HIUsGJSGSp4EQkslRwIhJZKjgRiSwV\nnIhElgpORCJLBScikaWCE5HIUsGJSGSp4EQkslRwIhJZKjgRiSwVnIhElgpORCJLBScikaWCE5HI\nSvqmM2Z2GPCXGkN9gH8H2gGXApuC8Rvc/fmkE4qIJCmVu2qtBIYBmFkM2ED85s+XAL9x9zvTkrAp\niN8aEdy/NmaxGOVnHklsV3w874WFVJ48nNw338cr9oQQVERqStcq6mhgjbt/nKb3ExFJWSo3fq5p\nPDC9xuOrzOxfgEXANe5etu8LzGwSMAkgn4I0xWi4Fj26U7l+A1suOQaA8kKj4NgvaPlge7b2jbGj\nTyU3n/g0AH9cfSI753firHPnAM4vuy4iPotx83ZVcXT+EgCOeud8Wj7YntYz5wPw1bgRtHlrDVVf\nbE4sWE4MqqvSNp8izZF5zdWuZN7ALA/4DBjs7hvNrCvwBeDAfwKF7v7D/b3HgdbBR1pq94qOderI\nnm8dAsCOg/Jo+9g8fNQwcsor+GhiGzrPjy+sdlz0BR+P60x5vz18Z/AKpnR9hQ45VRS2aJPS59dl\nTcUOAPrmxt//lBVnse2hHrR7dC5f/ssxdHrzMwAqiz+mcvRwyvrlUTa0in8auZAlZT0AKHm5JyPH\nLeW1OUPo+J6x+5wvyZ/ZDoB2j87NSG6Rpmq+z2abb7FEnpuOghsLXOHup9byt17As+4+ZH/vkWzB\nxfr3pesjpbSKVTC1+zzKqsoBaB8rYH3lDjrk5FGQk/e115RX7/nGWBjKqsppH/vHkutL5bmcWlDR\n4PdZsaecM16YwoCp26he+mE6I4o0SY1dcDOAF939weBxobuXBNM/BUa6+/j9vUeiBbfx6mMB2NUR\nxpy5gMK8rVzbcVVK+aPk6s+O4vXHjqLbb+aEHUUkYxqt4MysNfAJ0MfdtwZjjxLfu+rAOmDy3sKr\ny/4KLjb4MEpO6MiXQysoPuu+pLM2NwPeugiAwvtbsm5sDgXrW9DzVwvwysqQk4mkplGX4NKhtoLb\nddYIdrWLMeiKZTx48JshJYue4YvPp+DB9gAUPDk/5DQiDZfVBbf9n48G4Jk776JTrHWYsSLvpfJc\nLp81kcPu/ozKdZ+EHUckIQ0pOJ2qJSKR1WSW4LrefhM5h+3gd0fMAEhqj6Ik7/y1o1nzSH86TdNh\nJ9K0Zd0qav6hB3n5W1odbQrKqsoZPvsqAA6dVonNeS/kRCJf15CCS9eZDCkZ0noLoIJrCtrHClh7\n6v3xB6fCbV8M4H//8wTa/FU7JCT7aBuc7Nek9ospLcoh1r592FFEGqxJLMFJ09Ul1ppVF90DF8FJ\ny8fy1UOFALR9bF7IyUTqp4KThL06eBb8V3x66e27+OnEK2gxe3G4oUT2Q6uoIhJZKjhJyqEtclj7\nA/BjhoYdRaROKjhJSkFOHsWn3c9LMx+mdNYAYu3ahh1J5BtUcJKymwc9S/kx/cOOIfINKjhJWVHL\nz1l/cgta9OwRdhSRr1HBScp6tGjD6gvv4bn5z9L6jc60fqMzm390TNixRJrGqVpFQ/N9wYs9w44h\naVZatZORL/+E/tP2wLylYceRiMi6U7UkmrrEWlM85s8wBm7eNJiZ008AoPsvdMVhaRxaRRWRyNIq\nqoRi6IILaPvgAbSatSDsKJJltIoqTd57I6bDCOhzymT6XakrlUhm1LuKamYPmFmpmS2rMdbBzF42\ns1XB7/Y1/na9ma02s5Vmdlqmgks0/PW7d7PqDyOJDdJxdJJ+iWyDewgYs8/YdcBsd+8HzA4eY2aD\niN/lfnDwmqlmFktbWomc4S3zWHvun3j+lSc4eH5rDp7fmt2nHxV2LImIegvO3d8AtuwzPBZ4OJh+\nGBhXY3yGu+9292JgNTAiTVkl4q7r9iLXdXuRTcNyw44iEZHsXtSuNe51+jnQNZjuDnxa43nrgzGR\nevXNbUPf3DYsv2oqL362hJKnB7LrLP37KMlL+TARj++GbfCuWDObZGaLzGzRps1VqcaQCFo6Yjqn\n3f66TuSXpCVbcBvNrBAg+F0ajG8Aah7v0SMY+wZ3n+buRe5e1LmjNtOJSPolW3DPABOC6QnArBrj\n482spZn1BvoBOtBJknZDp5Xc8u4rrLp7JKvuHklsYL+wI0kWqfc4ODObDpwIdDKz9cDNwC+AJ8xs\nIvAxcD6Auy83syeAD4BK4Ap31/qnpGREy1zWnvcnAHq3vJT+k0IOJFlDZzJI1rlh4+EsnDKcnNff\nDTuKhKAhZzLoXFTJOuPbL2D9ia3Iaa176cr+qeAk6xyel8+KyVMpfrBP2FGkiVPBSdb68LhHyX2t\nkOoTjgg7ijRRKjgRiSwVnGS12w55mi+GtAo7hjRR2osqkVARHI008LUf0X/Keqo2bQo5kWSK9qJK\ns5NrMXItxtgB71E+olfYcaSJUMFJpNxV+A6v3Xcfn085Nuwo0gSo4CSSisYvpUW3rvU/USJNBSeR\ndNtBL/DBrQcTG3xY2FEkRCo4iaTCFm1YcsbvWT+mY9hRJEQqOBGJLBWcRFbbnFYsvWYqh79jHP6O\nEevfN+xI0shUcBJ557VbxHntFrH6ki7EunYJO440IhWcRN7R+TGOzo/x0YR7KJ58aNhxpBGp4KRZ\nWfHjqXSe046cIQPCjiKNQAUnzc4FXeax4bQOYceQRqCCk2bnuwW7WHrNVFb/+uiwo0iGqeBEJLJU\ncNJsrRl/L2XP9SNnyABtk4uoegvOzB4ws1IzW1Zj7L/M7EMzW2pmT5lZu2C8l5l9ZWZLgp97Mxle\nJFU39X+OLUe2Z8uR7cOOIhlQ7/XgzOx4YAfwiLsPCcZOBf7P3SvN7JcA7n6tmfUCnt37vETpenDS\nFPR5aSL9Ll4cdgypR1qvB+fubwBb9hl7yd0rg4fziN/BXiSrLfvOPVQfNyzsGJJG6dgG90Pgf2s8\n7h2snr5uZt9Ow/uLNIqCnDw+PaUALKGFA8kCKRWcmd1I/A72jwVDJcDB7j4M+BnwuJkdWMdrJ5nZ\nIjNbtGlzVSoxRNKm53GfsvPcEWHHkDRJuuDM7GLgTOBCDzbkuftud98cTC8G1gD9a3u9u09z9yJ3\nL+rcMZZsDJG0enng//DzXzxG7DCd0hUFSRWcmY0B/g04293La4x3NrNYMN0H6AesTUdQEZGGSuQw\nkenAXOAwM1tvZhOBPwAHAC/vczjI8cBSM1sC/A34sbtvqfWNRZqoca13sHJyp7BjSBrotoEitXh7\nVzW3jfs+1Us/DDuK7EO3DRRJ0aj8HIpvyqVy9HAqRw8PO44kSQUnUocPj3uU4rNzKT47N+wokqQW\nYQcQacomnzwbgNfbHUTVl1tDTiMNpSU4kf24tuMqru24inVXDA47iiRBBScikaWCE0nApO8/T4te\nB4cdQxpIBSeSgCnt11EypnvYMaSBVHAiCdr1ne206NY17BjSACo4kQR9r9+77B6gpbhsooITSdD4\ntgvZNCw/7BjSACo4kQQNzCvgq67hn9ooiVPBiSRowe4KWq/XxTCziQpORCJLBSeSoOF5MbaPKicn\nX9vhsoUKTiRBMcvhyqGvkdNZ14rLFio4kQaY0n4dqyfp2oXZQgUn0kCnjHmHWOfOYceQBKjgRBqg\nuGIHc+4/kqpNm8KOIglQwYk0QO/cNhSM2xh2DEmQCk5EIiuRu2o9YGalZrasxtgtZrYhuKPWEjM7\no8bfrjez1Wa20sxOy1RwkbC8NGQGX43TzaGzQSJLcA8BY2oZ/427Dwt+ngcws0HAeGBw8Jqpe++T\nKhIVBTl5bL5oZ9gxJAH1Fpy7vwEkem/TscCM4A73xcBqQP/USaQ8tK0LvX7yZdgxJAGpbIO7ysyW\nBquw7YOx7sCnNZ6zPhgTiYyLDyzlo6t0dd9skGzB3QP0AYYBJcBdDX0DM5tkZovMbNGmzVVJxhBp\nfJd+OopDb10adgxJQFIF5+4b3b3K3auB+/jHaugGoOZh3j2CsdreY5q7F7l7UeeO2kwn2ePO7q+w\n9axvhR1DEpBUwZlZYY2H5wB797A+A4w3s5Zm1hvoByxILaJI03L8nddwwIx5YceQBNR742czmw6c\nCHQys/XAzcCJZjYMcGAdMBnA3Zeb2RPAB0AlcIW7a/1TREJRb8G5+wW1DN+/n+ffDtyeSiiRpmpH\n9S72HAjWogVeWRl2HKmHzmQQaYBndxbS85WdKrcsoYITaYDxB5RRWtQ67BiSIBVcEiqCzYqPbe8Y\nchJpbCctH0vX388JO4YkqN5tcAI3lcYPCXh7Ux82LD6IbnOraLVxF8xbyuM9j2Hl1fEjY353zoN8\nt2BXmFElw14dPItR35tMm7/ODzuKJMDcw78NWtHQfF/wYtO8SuqAP1/GIf8+N+HnfzVuBNsv2ca7\nR83IYCoJS+9nL6X/pIVhx2jW5vtstvmWhG5vplVUEYksFdx+DJ57YYOW3gBaPb2ALmM/5LTuRzD4\n7sv5j02D+I9NgzKUUBrbHSf8jZyhA8OOIQlSwe3HU0XTqBw9PLkXu9PjjjnMGZrHnKF5nHreBHq/\nOJEvqnSZnWxVUrmDu286n+r3VoQdRRKknQz7Mf6On9N5dsOW4Opic9+j/1y4kFFsvvQYqvKMwtc2\ns+KKtgDMOuP3HJ6n+202Zb/dfBwFJbvDjiENoIKrw8B7L+fgezNzOEDH++KlWQX0vzw+9q8czdYL\nj6bVxSW8OnhWRj5XUrNlT2tyV32Gzj3MHlpFrUWfpyZz8K2Nf6xT28fm0erqPJ7ZWdDony37d//W\nbnx6ZR+qNpaGHUUaQAW3j4FvX0S/K8I7xqlqxSqueXJCaJ8vX7ejehc7qnfx2wfOxRe+H3YcaSAV\nXODMj07nzI9O55Dvrww7Cv1/W8xD27qEHUOANjn5tMnJJ/fbm8OOIklQwYlIZDX7nQy9X/gRrYrz\n6DW9BACv2BNyIqgs+Zxb55/Jxac8EHaUZu8H604EoNslm7RzIQs164I79NVL6P/DRQBN7v+8XV/I\nY/Zx8Uu5j27V1NI1D3/8siebLz8IgOrNH4ScRpLRLAvu7V3VXL/qPPpe+G7YUer0+SmVKrYQLd/z\nFU/82+nkL9EV97NZs9gGt6Zix9+n+z98Gbf2OZKCM2u9F06T0X5hLsUVOyiukV0az4Tbf0b+syq3\nbBf5Jbjiih1c/NNrKHgyfuhHb+IH2TaFbW370/XtMu4qHQ3AH7rr0jyNaeCfLufg+3TNtyiIZMGV\nVu3k4a2H8/727ix7ZDCdn0zP6VaNyT7dyHPvx69D96vCNynIyQs5UfPxw++9yGsPDaLy40/rf7I0\nafWuogZ3ri81s2U1xv5iZkuCn3VmtiQY72VmX9X4272ZDC8isj+JLME9BPwBeGTvgLv/895pM7sL\n2Frj+WvcfVi6AiairKqc9rEC+sycDEC/q+YTO/BAqrZtozPZt/QGUFVWRsc5uQDMPaGVdjg0kgW7\nK3juX0+m5ce6qGUUJHRFXzPrBTzr7kP2GTfgE+Bkd19V1/Pq0za3i3e59TpuPO+vjGq1DoAb15/N\njN7/x4LdFXxe2ZazW5ezeHd8u9nwlnkcveSf+KLsAApn5pH3ZSV5Cz+ievv2hnxsVtl53kh2X1LG\nwiOfCDtKpB1142V0eDA7/1FsLhpyRd9UC+544NfuXlTjecuBVcSX6m5y9zfreM9JwCSAfAqGH2dn\nANCisBsQP9jVcvPIOaQ7VauLqT7hCHI3fAlA1dpPoLp5LtGUPD2QpSOmhx0jkgbffTk97tDOhaau\nMQvuHmC1u98VPG4JtHH3zWY2HHgaGOzu2/b3/gdaBx9poxPJK0DJz47lz1f9DoARLXNDTpP9rt34\njy0qy87oRmXJ5yGmkfo0pOCS3otqZi2Ac4G/X/LW3XcDu4PpxWa2BugPLEr2c+Sbus0v57fnnQrA\n471fDTlNdjv0tYvp+/0lNUZUblGSyoG+3wE+dPf1ewfMrLOZxYLpPkA/YG1qEWVf9vYSNo8qY/Oo\nMno//6Ow42StE5eN26fcJGoSOUxkOjAXOMzM1pvZxOBP44F9NwYdDywNDhv5G/Bjd9+SzsDydQOv\n/5j+D12mm1A30JSSIlpf2jy34zYn9a6iuvsFdYxfXMvYTGBm6rFERFLXLM5FjbKqTZvofcNcHj95\nJHeXHRJ2nKwwc8eBfHD1EJ2p0Ayo4CKicsNnPDP5JPrOvoTy6qZ9nm2YdnsFN/33D7C3te2tOVDB\nRUjOW0s49KJ3GXXHlLCjNFktLZcOx2pPaXMRyZPtm7suf5zDiB2XAdBhwie8MOC5kBM1DcUVO/jF\nxlNod0k5lWGHkUaR0IG+maYDfTMr1rULq6f05bbzHuf8Nlvrf0HE9H7uUgD6X7qQWLu2VH3Z/P43\niJK0n8mQaSq4xrHqjyNZe86fwo7RqIYuuIBu41aEHUPSqCEFp21wIhJZWoJrZqq/fQRr41eV4oOT\nptHSonsu6/jik9n23SqtkkaMVlElYaWXH8u3LlrGI4e8EXaUtOnz5GQGTC3Dtu6gcsNnYceRNFPB\nSYPF+vXhwyu78OhZUxmVn31bLlbsKQfgnAWTOeT890NOI5mUdQVnZpuAncAXYWfJsE5Efx6hecxn\nc5hHaJrzeYi7d07kiU2i4ADMbNHeC2dGVXOYR2ge89kc5hGyfz6zb11ERCRBKjgRiaymVHDTwg7Q\nCJrDPELzmM/mMI+Q5fPZZLbBiYikW1NaghMRSavQC87MxpjZSjNbbWbXhZ0nncxsnZm9b2ZLzGxR\nMNbBzF42s1XB7/Zh52woM3vAzErNbFmNsTrny8yuD77flWZ2WjipG6aOebzFzDYE3+cSs+Bel2Tt\nPPY0s1fN7AMzW25mPwnGo/NduntoP0AMWAP0AfKA94BBYWZK8/ytAzrtM/Yr4Lpg+jrgl2HnTGK+\njgeOBJbVN1/AoOB7bQn0Dr7vWNjzkOQ83gL8vJbnZus8FgJHBtMHAB8F8xKZ7zLsJbgRxO+rutbd\n9wAzgLEhZ8q0scDDwfTDwLgQsyTF3d8A9r2ZUF3zNRaY4e673b0YWE38e2/S6pjHumTrPJa4+zvB\n9HZgBdCdCH2XYRdcd6DmhfHXB2NR4cArZrbYzCYFY13dvSSY/hzoGk60tKtrvqL2HV9lZkuDVdi9\nq25ZP4/Bzd2PAOYToe8y7IKLuuPcfRhwOnCFmR1f848eX+6P3G7sqM4XcA/xzSnDgBLgrnDjpIeZ\ntSF+N7wp7r6t5t+y/bsMu+A2AD1rPO4RjEWCu28IfpcCTxFfnN9oZoUAwe/S8BKmVV3zFZnv2N03\nunuVu1cD9/GP1bOsnUczyyVebo+5+5PBcGS+y7ALbiHQz8x6m1ke8ZtJPxNyprQws9ZmdsDeaeBU\nYBnx+ZsQPG0CMCuchGlX13w9A4w3s5Zm1hvoBywIIV/K9v5HHziH+PcJWTqPZmbA/cAKd/91jT9F\n57sMey8HcAbxvTdrgBvDzpPG+epDfI/Te8DyvfMGdARmA6uAV4AOYWdNYt6mE19FqyC+HWbi/uYL\nuDH4flcCp4edP4V5fBR4H1hK/D/2wiyfx+OIr34uBZYEP2dE6bvUmQwiEllhr6KKiGSMCk5EIksF\nJyKRpYITkchSwYlIZKngRCSyVHAiElkqOBGJrP8HWCGGVJcqTowAAAAASUVORK5CYII=\n",
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ }
+ ],
+ "source": [
+ "plt.imshow(t1_slice_tform[0].numpy().T[::-1])\n",
+ "plt.show()\n",
+ "plt.imshow(mask_slice_tform[0].numpy().T[::-1])\n",
+ "plt.show()"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "Clearly, the random affine transform was correctly preserved between the two images. This is great news for segmentation datasets.\n",
+ "\n",
+ "In the next tutorial, I will move on to `Datasets` and `DataLoaders`."
+ ]
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "Python 3",
+ "language": "python",
+ "name": "python3"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 3
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython3",
+ "version": "3.6.0"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 2
+}
diff --git a/torchsample/examples/imgs/orig1.png b/torchsample/examples/imgs/orig1.png
new file mode 100755
index 0000000..b355c91
Binary files /dev/null and b/torchsample/examples/imgs/orig1.png differ
diff --git a/torchsample/examples/imgs/orig2.png b/torchsample/examples/imgs/orig2.png
new file mode 100755
index 0000000..d842399
Binary files /dev/null and b/torchsample/examples/imgs/orig2.png differ
diff --git a/torchsample/examples/imgs/orig3.png b/torchsample/examples/imgs/orig3.png
new file mode 100755
index 0000000..845cdaa
Binary files /dev/null and b/torchsample/examples/imgs/orig3.png differ
diff --git a/torchsample/examples/imgs/tform1.png b/torchsample/examples/imgs/tform1.png
new file mode 100755
index 0000000..00df63e
Binary files /dev/null and b/torchsample/examples/imgs/tform1.png differ
diff --git a/torchsample/examples/imgs/tform2.png b/torchsample/examples/imgs/tform2.png
new file mode 100755
index 0000000..a7455d8
Binary files /dev/null and b/torchsample/examples/imgs/tform2.png differ
diff --git a/torchsample/examples/imgs/tform3.png b/torchsample/examples/imgs/tform3.png
new file mode 100755
index 0000000..fecf8bd
Binary files /dev/null and b/torchsample/examples/imgs/tform3.png differ
diff --git a/torchsample/examples/mnist_example.py b/torchsample/examples/mnist_example.py
new file mode 100755
index 0000000..d89d7a3
--- /dev/null
+++ b/torchsample/examples/mnist_example.py
@@ -0,0 +1,84 @@
+
+import torch as th
+import torch.nn as nn
+import torch.nn.functional as F
+
+from torchsample.modules import ModuleTrainer
+from torchsample.callbacks import EarlyStopping, ReduceLROnPlateau
+from torchsample.regularizers import L1Regularizer, L2Regularizer
+from torchsample.constraints import UnitNorm
+from torchsample.initializers import XavierUniform
+from torchsample.metrics import CategoricalAccuracy
+
+import os
+from torchvision import datasets
+ROOT = '/users/ncullen/desktop/data/mnist'
+dataset = datasets.MNIST(ROOT, train=True, download=True)
+x_train, y_train = th.load(os.path.join(dataset.root, 'processed/training.pt'))
+x_test, y_test = th.load(os.path.join(dataset.root, 'processed/test.pt'))
+
+x_train = x_train.float()
+y_train = y_train.long()
+x_test = x_test.float()
+y_test = y_test.long()
+
+x_train = x_train / 255.
+x_test = x_test / 255.
+x_train = x_train.unsqueeze(1)
+x_test = x_test.unsqueeze(1)
+
+# only train on a subset
+x_train = x_train[:10000]
+y_train = y_train[:10000]
+x_test = x_test[:1000]
+y_test = y_test[:1000]
+
+
+# Define your model EXACTLY as if you were using nn.Module
+class Network(nn.Module):
+ def __init__(self):
+ super(Network, self).__init__()
+ self.conv1 = nn.Conv2d(1, 32, kernel_size=3)
+ self.conv2 = nn.Conv2d(32, 64, kernel_size=3)
+ self.fc1 = nn.Linear(1600, 128)
+ self.fc2 = nn.Linear(128, 10)
+
+ def forward(self, x):
+ x = F.relu(F.max_pool2d(self.conv1(x), 2))
+ x = F.relu(F.max_pool2d(self.conv2(x), 2))
+ x = x.view(-1, 1600)
+ x = F.relu(self.fc1(x))
+ x = F.dropout(x, training=self.training)
+ x = self.fc2(x)
+ return F.log_softmax(x)
+
+
+model = Network()
+trainer = ModuleTrainer(model)
+
+
+callbacks = [EarlyStopping(patience=10),
+ ReduceLROnPlateau(factor=0.5, patience=5)]
+regularizers = [L1Regularizer(scale=1e-3, module_filter='conv*'),
+ L2Regularizer(scale=1e-5, module_filter='fc*')]
+constraints = [UnitNorm(frequency=3, unit='batch', module_filter='fc*')]
+initializers = [XavierUniform(bias=False, module_filter='fc*')]
+metrics = [CategoricalAccuracy(top_k=3)]
+
+trainer.compile(loss='nll_loss',
+ optimizer='adadelta',
+ regularizers=regularizers,
+ constraints=constraints,
+ initializers=initializers,
+ metrics=metrics)
+
+#summary = trainer.summary([1,28,28])
+#print(summary)
+
+trainer.fit(x_train, y_train,
+ val_data=(x_test, y_test),
+ num_epoch=20,
+ batch_size=128,
+ verbose=1)
+
+
diff --git a/torchsample/examples/mnist_loader_example.py b/torchsample/examples/mnist_loader_example.py
new file mode 100755
index 0000000..ce689da
--- /dev/null
+++ b/torchsample/examples/mnist_loader_example.py
@@ -0,0 +1,85 @@
+
+import torch as th
+import torch.nn as nn
+import torch.nn.functional as F
+
+from torch.utils.data import DataLoader
+
+from torchsample.modules import ModuleTrainer
+from torchsample.callbacks import EarlyStopping, ReduceLROnPlateau
+from torchsample.regularizers import L1Regularizer, L2Regularizer
+from torchsample.constraints import UnitNorm
+from torchsample.initializers import XavierUniform
+from torchsample.metrics import CategoricalAccuracy
+from torchsample import TensorDataset
+
+import os
+from torchvision import datasets
+ROOT = '/users/ncullen/desktop/data/mnist'
+dataset = datasets.MNIST(ROOT, train=True, download=True)
+x_train, y_train = th.load(os.path.join(dataset.root, 'processed/training.pt'))
+x_test, y_test = th.load(os.path.join(dataset.root, 'processed/test.pt'))
+
+x_train = x_train.float()
+y_train = y_train.long()
+x_test = x_test.float()
+y_test = y_test.long()
+
+x_train = x_train / 255.
+x_test = x_test / 255.
+x_train = x_train.unsqueeze(1)
+x_test = x_test.unsqueeze(1)
+
+# only train on a subset
+x_train = x_train[:10000]
+y_train = y_train[:10000]
+x_test = x_test[:1000]
+y_test = y_test[:1000]
+
+train_dataset = TensorDataset(x_train, y_train)
+train_loader = DataLoader(train_dataset, batch_size=32)
+val_dataset = TensorDataset(x_test, y_test)
+val_loader = DataLoader(val_dataset, batch_size=32)
+
+# Define your model EXACTLY as if you were using nn.Module
+class Network(nn.Module):
+ def __init__(self):
+ super(Network, self).__init__()
+ self.conv1 = nn.Conv2d(1, 32, kernel_size=3)
+ self.conv2 = nn.Conv2d(32, 64, kernel_size=3)
+ self.fc1 = nn.Linear(1600, 128)
+ self.fc2 = nn.Linear(128, 10)
+
+ def forward(self, x):
+ x = F.relu(F.max_pool2d(self.conv1(x), 2))
+ x = F.relu(F.max_pool2d(self.conv2(x), 2))
+ x = x.view(-1, 1600)
+ x = F.relu(self.fc1(x))
+ x = F.dropout(x, training=self.training)
+ x = self.fc2(x)
+ return F.log_softmax(x)
+
+
+model = Network()
+trainer = ModuleTrainer(model)
+
+callbacks = [EarlyStopping(patience=10),
+ ReduceLROnPlateau(factor=0.5, patience=5)]
+regularizers = [L1Regularizer(scale=1e-3, module_filter='conv*'),
+ L2Regularizer(scale=1e-5, module_filter='fc*')]
+constraints = [UnitNorm(frequency=3, unit='batch', module_filter='fc*')]
+initializers = [XavierUniform(bias=False, module_filter='fc*')]
+metrics = [CategoricalAccuracy(top_k=3)]
+
+trainer.compile(loss='nll_loss',
+ optimizer='adadelta',
+ regularizers=regularizers,
+ constraints=constraints,
+ initializers=initializers,
+ metrics=metrics,
+ callbacks=callbacks)
+
+trainer.fit_loader(train_loader, val_loader, num_epoch=20, verbose=1)
+
+
+
diff --git a/torchsample/setup.py b/torchsample/setup.py
new file mode 100755
index 0000000..43a5920
--- /dev/null
+++ b/torchsample/setup.py
@@ -0,0 +1,11 @@
+#!/usr/bin/env python
+
+from setuptools import setup, find_packages
+
+setup(name='torchsample',
+ version='0.1.3',
+ description='High-Level Training, Augmentation, and Sampling for Pytorch',
+ author='Zexin Chen',
+ author_email='czxsjtu@gmail.com',
+ packages=find_packages()
+ )
\ No newline at end of file
diff --git a/torchsample/tests/integration/fit_complex/multi_input_multi_target.py b/torchsample/tests/integration/fit_complex/multi_input_multi_target.py
new file mode 100755
index 0000000..80d679d
--- /dev/null
+++ b/torchsample/tests/integration/fit_complex/multi_input_multi_target.py
@@ -0,0 +1,120 @@
+
+import torch as th
+import torch.nn as nn
+import torch.nn.functional as F
+
+from torchsample.modules import ModuleTrainer
+from torchsample import regularizers as regs
+from torchsample import constraints as cons
+from torchsample import initializers as inits
+from torchsample import callbacks as cbks
+from torchsample import metrics
+from torchsample import transforms as tforms
+
+import os
+from torchvision import datasets
+
+ROOT = '/users/ncullen/desktop/data/mnist'
+dataset = datasets.MNIST(ROOT, train=True, download=True)
+x_train, y_train = th.load(os.path.join(dataset.root, 'processed/training.pt'))
+x_test, y_test = th.load(os.path.join(dataset.root, 'processed/test.pt'))
+
+x_train = x_train.float()
+y_train = y_train.long()
+x_test = x_test.float()
+y_test = y_test.long()
+
+x_train = x_train / 255.
+x_test = x_test / 255.
+x_train = x_train.unsqueeze(1)
+x_test = x_test.unsqueeze(1)
+
+# only train on a subset
+x_train = x_train[:1000]
+y_train = y_train[:1000]
+x_test = x_test[:100]
+y_test = y_test[:100]
+
+
+# Define your model EXACTLY as if you were using nn.Module
+class Network(nn.Module):
+ def __init__(self):
+ super(Network, self).__init__()
+ self.conv1 = nn.Conv2d(1, 32, kernel_size=3)
+ self.conv2 = nn.Conv2d(32, 64, kernel_size=3)
+ self.fc1 = nn.Linear(1600, 128)
+ self.fc2 = nn.Linear(128, 10)
+
+ def forward(self, x, y, z):
+ x = F.relu(F.max_pool2d(self.conv1(x), 2))
+ x = F.relu(F.max_pool2d(self.conv2(x), 2))
+ x = x.view(-1, 1600)
+ x = F.relu(self.fc1(x))
+ x = F.dropout(x, training=self.training)
+ x = self.fc2(x)
+ return F.log_softmax(x), F.log_softmax(x), F.log_softmax(x)
+
+# with one loss function given
+model = Network()
+trainer = ModuleTrainer(model)
+
+regularizers = [regs.L1Regularizer(1e-4, 'fc*'), regs.L2Regularizer(1e-5, 'conv*')]
+constraints = [cons.UnitNorm(5, 'batch', 'fc*'),
+ cons.MaxNorm(5, 0, 'batch', 'conv*')]
+callbacks = [cbks.ReduceLROnPlateau(monitor='loss', verbose=1)]
+
+trainer.compile(loss='nll_loss',
+ optimizer='adadelta',
+ regularizers=regularizers,
+ constraints=constraints,
+ callbacks=callbacks)
+
+trainer.fit([x_train, x_train, x_train],
+ [y_train, y_train, y_train],
+ num_epoch=3,
+ batch_size=128,
+ verbose=1)
+
+yp1, yp2, yp3 = trainer.predict([x_train, x_train, x_train])
+print(yp1.size(), yp2.size(), yp3.size())
+
+eval_loss = trainer.evaluate([x_train, x_train, x_train],
+ [y_train, y_train, y_train])
+print(eval_loss)
+
+# With multiple loss functions given
+model = Network()
+trainer = ModuleTrainer(model)
+
+trainer.compile(loss=['nll_loss', 'nll_loss', 'nll_loss'],
+ optimizer='adadelta',
+ regularizers=regularizers,
+ constraints=constraints,
+ callbacks=callbacks)
+
+trainer.fit([x_train, x_train, x_train],
+ [y_train, y_train, y_train],
+ num_epoch=3,
+ batch_size=128,
+ verbose=1)
+
+# should raise exception for giving multiple loss functions
+# but not giving a loss function for every input
+try:
+ model = Network()
+ trainer = ModuleTrainer(model)
+
+ trainer.compile(loss=['nll_loss', 'nll_loss'],
+ optimizer='adadelta',
+ regularizers=regularizers,
+ constraints=constraints,
+ callbacks=callbacks)
+
+ trainer.fit([x_train, x_train, x_train],
+ [y_train, y_train, y_train],
+ num_epoch=3,
+ batch_size=128,
+ verbose=1)
+except:
+ print('Exception correctly caught')
+
diff --git a/torchsample/tests/integration/fit_loader_simple/single_input_multi_target.py b/torchsample/tests/integration/fit_loader_simple/single_input_multi_target.py
new file mode 100755
index 0000000..b47fb24
--- /dev/null
+++ b/torchsample/tests/integration/fit_loader_simple/single_input_multi_target.py
@@ -0,0 +1,79 @@
+
+import torch as th
+import torch.nn as nn
+import torch.nn.functional as F
+from torch.utils.data import DataLoader
+
+from torchsample.modules import ModuleTrainer
+from torchsample import TensorDataset
+
+import os
+from torchvision import datasets
+ROOT = '/users/ncullen/desktop/data/mnist'
+dataset = datasets.MNIST(ROOT, train=True, download=True)
+x_train, y_train = th.load(os.path.join(dataset.root, 'processed/training.pt'))
+x_test, y_test = th.load(os.path.join(dataset.root, 'processed/test.pt'))
+
+x_train = x_train.float()
+y_train = y_train.long()
+x_test = x_test.float()
+y_test = y_test.long()
+
+x_train = x_train / 255.
+x_test = x_test / 255.
+x_train = x_train.unsqueeze(1)
+x_test = x_test.unsqueeze(1)
+
+# only train on a subset
+x_train = x_train[:1000]
+y_train = y_train[:1000]
+x_test = x_test[:1000]
+y_test = y_test[:1000]
+
+train_data = TensorDataset(x_train, [y_train, y_train])
+train_loader = DataLoader(train_data, batch_size=128)
+
+# Define your model EXACTLY as if you were using nn.Module
+class Network(nn.Module):
+ def __init__(self):
+ super(Network, self).__init__()
+ self.conv1 = nn.Conv2d(1, 32, kernel_size=3)
+ self.conv2 = nn.Conv2d(32, 64, kernel_size=3)
+ self.fc1 = nn.Linear(1600, 128)
+ self.fc2 = nn.Linear(128, 10)
+
+ def forward(self, x):
+ x = F.relu(F.max_pool2d(self.conv1(x), 2))
+ x = F.relu(F.max_pool2d(self.conv2(x), 2))
+ x = x.view(-1, 1600)
+ x = F.relu(self.fc1(x))
+ x = F.dropout(x, training=self.training)
+ x = self.fc2(x)
+ return F.log_softmax(x), F.log_softmax(x)
+
+
+# one loss function for multiple targets
+model = Network()
+trainer = ModuleTrainer(model)
+trainer.compile(loss='nll_loss',
+ optimizer='adadelta')
+
+trainer.fit_loader(train_loader,
+ num_epoch=3,
+ verbose=1)
+ypred1, ypred2 = trainer.predict(x_train)
+print(ypred1.size(), ypred2.size())
+
+eval_loss = trainer.evaluate(x_train, [y_train, y_train])
+print(eval_loss)
+# multiple loss functions
+model = Network()
+trainer = ModuleTrainer(model)
+trainer.compile(loss=['nll_loss', 'nll_loss'],
+ optimizer='adadelta')
+trainer.fit_loader(train_loader,
+ num_epoch=3,
+ verbose=1)
+
+
+
diff --git a/torchsample/tests/integration/fit_loader_simple/single_input_single_target.py b/torchsample/tests/integration/fit_loader_simple/single_input_single_target.py
new file mode 100755
index 0000000..d568620
--- /dev/null
+++ b/torchsample/tests/integration/fit_loader_simple/single_input_single_target.py
@@ -0,0 +1,73 @@
+
+import torch as th
+import torch.nn as nn
+import torch.nn.functional as F
+from torch.utils.data import DataLoader
+
+from torchsample.modules import ModuleTrainer
+from torchsample import TensorDataset
+
+import os
+from torchvision import datasets
+ROOT = '/users/ncullen/desktop/data/mnist'
+dataset = datasets.MNIST(ROOT, train=True, download=True)
+x_train, y_train = th.load(os.path.join(dataset.root, 'processed/training.pt'))
+x_test, y_test = th.load(os.path.join(dataset.root, 'processed/test.pt'))
+
+x_train = x_train.float()
+y_train = y_train.long()
+x_test = x_test.float()
+y_test = y_test.long()
+
+x_train = x_train / 255.
+x_test = x_test / 255.
+x_train = x_train.unsqueeze(1)
+x_test = x_test.unsqueeze(1)
+
+# only train on a subset
+x_train = x_train[:1000]
+y_train = y_train[:1000]
+x_test = x_test[:1000]
+y_test = y_test[:1000]
+
+train_data = TensorDataset(x_train, y_train)
+train_loader = DataLoader(train_data, batch_size=128)
+
+# Define your model EXACTLY as if you were using nn.Module
+class Network(nn.Module):
+ def __init__(self):
+ super(Network, self).__init__()
+ self.conv1 = nn.Conv2d(1, 32, kernel_size=3)
+ self.conv2 = nn.Conv2d(32, 64, kernel_size=3)
+ self.fc1 = nn.Linear(1600, 128)
+ self.fc2 = nn.Linear(128, 10)
+
+ def forward(self, x):
+ x = F.relu(F.max_pool2d(self.conv1(x), 2))
+ x = F.relu(F.max_pool2d(self.conv2(x), 2))
+ x = x.view(-1, 1600)
+ x = F.relu(self.fc1(x))
+ #x = F.dropout(x, training=self.training)
+ x = self.fc2(x)
+ return F.log_softmax(x)
+
+
+model = Network()
+trainer = ModuleTrainer(model)
+
+trainer.compile(loss='nll_loss',
+ optimizer='adadelta')
+
+trainer.fit_loader(train_loader,
+ num_epoch=3,
+ verbose=1)
+
+ypred = trainer.predict(x_train)
+print(ypred.size())
+
+eval_loss = trainer.evaluate(x_train, y_train)
+print(eval_loss)
+
+print(trainer.history)
+#print(trainer.history['loss'])
+
diff --git a/torchsample/tests/integration/fit_simple/simple_multi_input_multi_target.py b/torchsample/tests/integration/fit_simple/simple_multi_input_multi_target.py
new file mode 100755
index 0000000..53dfa1b
--- /dev/null
+++ b/torchsample/tests/integration/fit_simple/simple_multi_input_multi_target.py
@@ -0,0 +1,99 @@
+
+import torch as th
+import torch.nn as nn
+import torch.nn.functional as F
+
+from torchsample.modules import ModuleTrainer
+
+import os
+from torchvision import datasets
+ROOT = '/users/ncullen/desktop/data/mnist'
+dataset = datasets.MNIST(ROOT, train=True, download=True)
+x_train, y_train = th.load(os.path.join(dataset.root, 'processed/training.pt'))
+x_test, y_test = th.load(os.path.join(dataset.root, 'processed/test.pt'))
+
+x_train = x_train.float()
+y_train = y_train.long()
+x_test = x_test.float()
+y_test = y_test.long()
+
+x_train = x_train / 255.
+x_test = x_test / 255.
+x_train = x_train.unsqueeze(1)
+x_test = x_test.unsqueeze(1)
+
+# only train on a subset
+x_train = x_train[:1000]
+y_train = y_train[:1000]
+x_test = x_test[:100]
+y_test = y_test[:100]
+
+
+# Define your model EXACTLY as if you were using nn.Module
+class Network(nn.Module):
+ def __init__(self):
+ super(Network, self).__init__()
+ self.conv1 = nn.Conv2d(1, 32, kernel_size=3)
+ self.conv2 = nn.Conv2d(32, 64, kernel_size=3)
+ self.fc1 = nn.Linear(1600, 128)
+ self.fc2 = nn.Linear(128, 10)
+
+ def forward(self, x, y, z):
+ x = F.relu(F.max_pool2d(self.conv1(x), 2))
+ x = F.relu(F.max_pool2d(self.conv2(x), 2))
+ x = x.view(-1, 1600)
+ x = F.relu(self.fc1(x))
+ x = F.dropout(x, training=self.training)
+ x = self.fc2(x)
+ return F.log_softmax(x), F.log_softmax(x), F.log_softmax(x)
+
+# with one loss function given
+model = Network()
+trainer = ModuleTrainer(model)
+
+trainer.compile(loss='nll_loss',
+ optimizer='adadelta')
+
+trainer.fit([x_train, x_train, x_train],
+ [y_train, y_train, y_train],
+ num_epoch=3,
+ batch_size=128,
+ verbose=1)
+
+yp1, yp2, yp3 = trainer.predict([x_train, x_train, x_train])
+print(yp1.size(), yp2.size(), yp3.size())
+
+eval_loss = trainer.evaluate([x_train, x_train, x_train],
+ [y_train, y_train, y_train])
+print(eval_loss)
+
+# With multiple loss functions given
+model = Network()
+trainer = ModuleTrainer(model)
+
+trainer.compile(loss=['nll_loss', 'nll_loss', 'nll_loss'],
+ optimizer='adadelta')
+
+trainer.fit([x_train, x_train, x_train],
+ [y_train, y_train, y_train],
+ num_epoch=3,
+ batch_size=128,
+ verbose=1)
+
+# should raise exception for giving multiple loss functions
+# but not giving a loss function for every input
+try:
+ model = Network()
+ trainer = ModuleTrainer(model)
+
+ trainer.compile(loss=['nll_loss', 'nll_loss'],
+ optimizer='adadelta')
+
+ trainer.fit([x_train, x_train, x_train],
+ [y_train, y_train, y_train],
+ num_epoch=3,
+ batch_size=128,
+ verbose=1)
+except:
+ print('Exception correctly caught')
+
diff --git a/torchsample/tests/integration/fit_simple/simple_multi_input_no_target.py b/torchsample/tests/integration/fit_simple/simple_multi_input_no_target.py
new file mode 100755
index 0000000..585b7c2
--- /dev/null
+++ b/torchsample/tests/integration/fit_simple/simple_multi_input_no_target.py
@@ -0,0 +1,67 @@
+
+import torch as th
+import torch.nn as nn
+import torch.nn.functional as F
+
+from torchsample.modules import ModuleTrainer
+
+import os
+from torchvision import datasets
+ROOT = '/users/ncullen/desktop/data/mnist'
+dataset = datasets.MNIST(ROOT, train=True, download=True)
+x_train, y_train = th.load(os.path.join(dataset.root, 'processed/training.pt'))
+x_test, y_test = th.load(os.path.join(dataset.root, 'processed/test.pt'))
+
+x_train = x_train.float()
+y_train = y_train.long()
+x_test = x_test.float()
+y_test = y_test.long()
+
+x_train = x_train / 255.
+x_test = x_test / 255.
+x_train = x_train.unsqueeze(1)
+x_test = x_test.unsqueeze(1)
+
+# only train on a subset
+x_train = x_train[:1000]
+y_train = y_train[:1000]
+x_test = x_test[:1000]
+y_test = y_test[:1000]
+
+
+# Define your model EXACTLY as if you were using nn.Module
+class Network(nn.Module):
+ def __init__(self):
+ super(Network, self).__init__()
+ self.conv1 = nn.Conv2d(1, 32, kernel_size=3)
+ self.conv2 = nn.Conv2d(32, 64, kernel_size=3)
+ self.fc1 = nn.Linear(1600, 128)
+ self.fc2 = nn.Linear(128, 1)
+
+ def forward(self, x, y, z):
+ x = F.relu(F.max_pool2d(self.conv1(x), 2))
+ x = F.relu(F.max_pool2d(self.conv2(x), 2))
+ x = x.view(-1, 1600)
+ x = F.relu(self.fc1(x))
+ x = F.dropout(x, training=self.training)
+ x = self.fc2(x)
+ return th.abs(10 - x)
+
+
+model = Network()
+trainer = ModuleTrainer(model)
+
+trainer.compile(loss='unconstrained_sum',
+ optimizer='adadelta')
+
+trainer.fit([x_train, x_train, x_train],
+ num_epoch=3,
+ batch_size=128,
+ verbose=1)
+
+ypred = trainer.predict([x_train, x_train, x_train])
+print(ypred.size())
+
+eval_loss = trainer.evaluate([x_train, x_train, x_train])
+print(eval_loss)
+
diff --git a/torchsample/tests/integration/fit_simple/simple_multi_input_single_target.py b/torchsample/tests/integration/fit_simple/simple_multi_input_single_target.py
new file mode 100755
index 0000000..be09c23
--- /dev/null
+++ b/torchsample/tests/integration/fit_simple/simple_multi_input_single_target.py
@@ -0,0 +1,67 @@
+
+import torch as th
+import torch.nn as nn
+import torch.nn.functional as F
+
+from torchsample.modules import ModuleTrainer
+
+import os
+from torchvision import datasets
+ROOT = '/users/ncullen/desktop/data/mnist'
+dataset = datasets.MNIST(ROOT, train=True, download=True)
+x_train, y_train = th.load(os.path.join(dataset.root, 'processed/training.pt'))
+x_test, y_test = th.load(os.path.join(dataset.root, 'processed/test.pt'))
+
+x_train = x_train.float()
+y_train = y_train.long()
+x_test = x_test.float()
+y_test = y_test.long()
+
+x_train = x_train / 255.
+x_test = x_test / 255.
+x_train = x_train.unsqueeze(1)
+x_test = x_test.unsqueeze(1)
+
+# only train on a subset
+x_train = x_train[:1000]
+y_train = y_train[:1000]
+x_test = x_test[:100]
+y_test = y_test[:100]
+
+
+# Define your model EXACTLY as if you were using nn.Module
+class Network(nn.Module):
+ def __init__(self):
+ super(Network, self).__init__()
+ self.conv1 = nn.Conv2d(1, 32, kernel_size=3)
+ self.conv2 = nn.Conv2d(32, 64, kernel_size=3)
+ self.fc1 = nn.Linear(1600, 128)
+ self.fc2 = nn.Linear(128, 10)
+
+ def forward(self, x, y, z):
+ x = F.relu(F.max_pool2d(self.conv1(x), 2))
+ x = F.relu(F.max_pool2d(self.conv2(x), 2))
+ x = x.view(-1, 1600)
+ x = F.relu(self.fc1(x))
+ x = F.dropout(x, training=self.training)
+ x = self.fc2(x)
+ return F.log_softmax(x)
+
+
+model = Network()
+trainer = ModuleTrainer(model)
+
+trainer.compile(loss='nll_loss',
+ optimizer='adadelta')
+
+trainer.fit([x_train, x_train, x_train], y_train,
+ val_data=([x_test, x_test, x_test], y_test),
+ num_epoch=3,
+ batch_size=128,
+ verbose=1)
+
+ypred = trainer.predict([x_train, x_train, x_train])
+print(ypred.size())
+
+eval_loss = trainer.evaluate([x_train, x_train, x_train], y_train)
+print(eval_loss)
\ No newline at end of file
diff --git a/torchsample/tests/integration/fit_simple/single_input_multi_target.py b/torchsample/tests/integration/fit_simple/single_input_multi_target.py
new file mode 100755
index 0000000..fcc915c
--- /dev/null
+++ b/torchsample/tests/integration/fit_simple/single_input_multi_target.py
@@ -0,0 +1,79 @@
+
+import torch as th
+import torch.nn as nn
+import torch.nn.functional as F
+
+from torchsample.modules import ModuleTrainer
+
+import os
+from torchvision import datasets
+ROOT = '/users/ncullen/desktop/data/mnist'
+dataset = datasets.MNIST(ROOT, train=True, download=True)
+x_train, y_train = th.load(os.path.join(dataset.root, 'processed/training.pt'))
+x_test, y_test = th.load(os.path.join(dataset.root, 'processed/test.pt'))
+
+x_train = x_train.float()
+y_train = y_train.long()
+x_test = x_test.float()
+y_test = y_test.long()
+
+x_train = x_train / 255.
+x_test = x_test / 255.
+x_train = x_train.unsqueeze(1)
+x_test = x_test.unsqueeze(1)
+
+# only train on a subset
+x_train = x_train[:1000]
+y_train = y_train[:1000]
+x_test = x_test[:1000]
+y_test = y_test[:1000]
+
+
+# Define your model EXACTLY as if you were using nn.Module
+class Network(nn.Module):
+ def __init__(self):
+ super(Network, self).__init__()
+ self.conv1 = nn.Conv2d(1, 32, kernel_size=3)
+ self.conv2 = nn.Conv2d(32, 64, kernel_size=3)
+ self.fc1 = nn.Linear(1600, 128)
+ self.fc2 = nn.Linear(128, 10)
+
+ def forward(self, x):
+ x = F.relu(F.max_pool2d(self.conv1(x), 2))
+ x = F.relu(F.max_pool2d(self.conv2(x), 2))
+ x = x.view(-1, 1600)
+ x = F.relu(self.fc1(x))
+ x = F.dropout(x, training=self.training)
+ x = self.fc2(x)
+ return F.log_softmax(x), F.log_softmax(x)
+
+
+# one loss function for multiple targets
+model = Network()
+trainer = ModuleTrainer(model)
+trainer.compile(loss='nll_loss',
+ optimizer='adadelta')
+
+trainer.fit(x_train,
+ [y_train, y_train],
+ num_epoch=3,
+ batch_size=128,
+ verbose=1)
+ypred1, ypred2 = trainer.predict(x_train)
+print(ypred1.size(), ypred2.size())
+
+eval_loss = trainer.evaluate(x_train, [y_train, y_train])
+print(eval_loss)
+# multiple loss functions
+model = Network()
+trainer = ModuleTrainer(model)
+trainer.compile(loss=['nll_loss', 'nll_loss'],
+ optimizer='adadelta')
+trainer.fit(x_train,
+ [y_train, y_train],
+ num_epoch=3,
+ batch_size=128,
+ verbose=1)
+
+
+
diff --git a/torchsample/tests/integration/fit_simple/single_input_no_target.py b/torchsample/tests/integration/fit_simple/single_input_no_target.py
new file mode 100755
index 0000000..170aab1
--- /dev/null
+++ b/torchsample/tests/integration/fit_simple/single_input_no_target.py
@@ -0,0 +1,66 @@
+
+import torch as th
+import torch.nn as nn
+import torch.nn.functional as F
+
+from torchsample.modules import ModuleTrainer
+
+import os
+from torchvision import datasets
+ROOT = '/users/ncullen/desktop/data/mnist'
+dataset = datasets.MNIST(ROOT, train=True, download=True)
+x_train, y_train = th.load(os.path.join(dataset.root, 'processed/training.pt'))
+x_test, y_test = th.load(os.path.join(dataset.root, 'processed/test.pt'))
+
+x_train = x_train.float()
+y_train = y_train.long()
+x_test = x_test.float()
+y_test = y_test.long()
+
+x_train = x_train / 255.
+x_test = x_test / 255.
+x_train = x_train.unsqueeze(1)
+x_test = x_test.unsqueeze(1)
+
+# only train on a subset
+x_train = x_train[:1000]
+y_train = y_train[:1000]
+x_test = x_test[:1000]
+y_test = y_test[:1000]
+
+
+# Define your model EXACTLY as if you were using nn.Module
+class Network(nn.Module):
+ def __init__(self):
+ super(Network, self).__init__()
+ self.conv1 = nn.Conv2d(1, 32, kernel_size=3)
+ self.conv2 = nn.Conv2d(32, 64, kernel_size=3)
+ self.fc1 = nn.Linear(1600, 128)
+ self.fc2 = nn.Linear(128, 1)
+
+ def forward(self, x):
+ x = F.relu(F.max_pool2d(self.conv1(x), 2))
+ x = F.relu(F.max_pool2d(self.conv2(x), 2))
+ x = x.view(-1, 1600)
+ x = F.relu(self.fc1(x))
+ x = F.dropout(x, training=self.training)
+ x = self.fc2(x)
+ return th.abs(10 - x)
+
+
+model = Network()
+trainer = ModuleTrainer(model)
+
+trainer.compile(loss='unconstrained_sum',
+ optimizer='adadelta')
+
+trainer.fit(x_train,
+ num_epoch=3,
+ batch_size=128,
+ verbose=1)
+
+ypred = trainer.predict(x_train)
+print(ypred.size())
+
+eval_loss = trainer.evaluate(x_train, None)
+print(eval_loss)
\ No newline at end of file
diff --git a/torchsample/tests/integration/fit_simple/single_input_single_target.py b/torchsample/tests/integration/fit_simple/single_input_single_target.py
new file mode 100755
index 0000000..cbcd705
--- /dev/null
+++ b/torchsample/tests/integration/fit_simple/single_input_single_target.py
@@ -0,0 +1,74 @@
+
+import torch as th
+import torch.nn as nn
+import torch.nn.functional as F
+
+from torchsample.modules import ModuleTrainer
+from torchsample import regularizers as reg
+from torchsample import constraints as con
+
+import os
+from torchvision import datasets
+ROOT = '/users/ncullen/desktop/data/mnist'
+dataset = datasets.MNIST(ROOT, train=True, download=True)
+x_train, y_train = th.load(os.path.join(dataset.root, 'processed/training.pt'))
+x_test, y_test = th.load(os.path.join(dataset.root, 'processed/test.pt'))
+
+x_train = x_train.float()
+y_train = y_train.long()
+x_test = x_test.float()
+y_test = y_test.long()
+
+x_train = x_train / 255.
+x_test = x_test / 255.
+x_train = x_train.unsqueeze(1)
+x_test = x_test.unsqueeze(1)
+
+# only train on a subset
+x_train = x_train[:1000]
+y_train = y_train[:1000]
+x_test = x_test[:1000]
+y_test = y_test[:1000]
+
+
+# Define your model EXACTLY as if you were using nn.Module
+class Network(nn.Module):
+ def __init__(self):
+ super(Network, self).__init__()
+ self.conv1 = nn.Conv2d(1, 32, kernel_size=3)
+ self.conv2 = nn.Conv2d(32, 64, kernel_size=3)
+ self.fc1 = nn.Linear(1600, 128)
+ self.fc2 = nn.Linear(128, 10)
+
+ def forward(self, x):
+ x = F.relu(F.max_pool2d(self.conv1(x), 2))
+ x = F.relu(F.max_pool2d(self.conv2(x), 2))
+ x = x.view(-1, 1600)
+ x = F.relu(self.fc1(x))
+ #x = F.dropout(x, training=self.training)
+ x = self.fc2(x)
+ return F.log_softmax(x)
+
+
+model = Network()
+trainer = ModuleTrainer(model)
+
+trainer.compile(loss='nll_loss',
+ optimizer='adadelta',
+ regularizers=[reg.L1Regularizer(1e-4)])
+
+trainer.fit(x_train, y_train,
+ val_data=(x_test, y_test),
+ num_epoch=3,
+ batch_size=128,
+ verbose=1)
+
+ypred = trainer.predict(x_train)
+print(ypred.size())
+
+eval_loss = trainer.evaluate(x_train, y_train)
+print(eval_loss)
+
+print(trainer.history)
+#print(trainer.history['loss'])
+
diff --git a/torchsample/tests/test_metrics.py b/torchsample/tests/test_metrics.py
new file mode 100755
index 0000000..90606ec
--- /dev/null
+++ b/torchsample/tests/test_metrics.py
@@ -0,0 +1,21 @@
+import unittest
+import torch
+from torch.autograd import Variable
+
+from torchsample.metrics import CategoricalAccuracy
+
+class TestMetrics(unittest.TestCase):
+
+ def test_categorical_accuracy(self):
+ metric = CategoricalAccuracy()
+ predicted = Variable(torch.eye(10))
+ expected = Variable(torch.LongTensor(list(range(10))))
+ self.assertEqual(metric(predicted, expected), 100.0)
+
+ # Set 1st column to ones
+ predicted = Variable(torch.zeros(10, 10))
+ predicted.data[:, 0] = torch.ones(10)
+ self.assertEqual(metric(predicted, expected), 55.0)
+
+if __name__ == '__main__':
+ unittest.main()
\ No newline at end of file
diff --git a/torchsample/tests/transforms/test_affine_transforms.py b/torchsample/tests/transforms/test_affine_transforms.py
new file mode 100755
index 0000000..75d0bc0
--- /dev/null
+++ b/torchsample/tests/transforms/test_affine_transforms.py
@@ -0,0 +1,163 @@
+"""
+Test affine transforms
+
+Transforms:
+ - Affine + RandomAffine
+ - AffineCompose
+ - Rotate + RandomRotate
+ - Translate + RandomTranslate
+ - Shear + RandomShear
+ - Zoom + RandomZoom
+"""
+
+#import pytest
+
+import torch as th
+
+from torchsample.transforms import (RandomAffine, Affine,
+ RandomRotate, RandomChoiceRotate, Rotate,
+ RandomTranslate, RandomChoiceTranslate, Translate,
+ RandomShear, RandomChoiceShear, Shear,
+ RandomZoom, RandomChoiceZoom, Zoom)
+
+# ----------------------------------------------------
+# ----------------------------------------------------
+
+## DATA SET ##
+def gray2d_setup():
+ images = {}
+
+ x = th.zeros(1,30,30)
+ x[:,10:21,10:21] = 1
+ images['gray_01'] = x
+
+ x = th.zeros(1,30,40)
+ x[:,10:21,10:21] = 1
+ images['gray_02'] = x
+
+ return images
+
+def multi_gray2d_setup():
+ old_imgs = gray2d_setup()
+ images = {}
+ for k,v in old_imgs.items():
+ images[k+'_2imgs'] = [v,v]
+ images[k+'_3imgs'] = [v,v,v]
+ images[k+'_4imgs'] = [v,v,v,v]
+ return images
+
+def color2d_setup():
+ images = {}
+
+ x = th.zeros(3,30,30)
+ x[:,10:21,10:21] = 1
+ images['color_01'] = x
+
+ x = th.zeros(3,30,40)
+ x[:,10:21,10:21] = 1
+ images['color_02'] = x
+
+ return images
+
+def multi_color2d_setup():
+ old_imgs = color2d_setup()
+ images = {}
+ for k,v in old_imgs.items():
+ images[k+'_2imgs'] = [v,v]
+ images[k+'_3imgs'] = [v,v,v]
+ images[k+'_4imgs'] = [v,v,v,v]
+ return images
+
+
+# ----------------------------------------------------
+# ----------------------------------------------------
+
+
+def Affine_setup():
+ tforms = {}
+ tforms['random_affine'] = RandomAffine(rotation_range=30,
+ translation_range=0.1)
+ tforms['affine'] = Affine(th.FloatTensor([[0.9,0,0],[0,0.9,0]]))
+ return tforms
+
+def Rotate_setup():
+ tforms = {}
+ tforms['random_rotate'] = RandomRotate(30)
+ tforms['random_choice_rotate'] = RandomChoiceRotate([30,40,50])
+ tforms['rotate'] = Rotate(30)
+ return tforms
+
+def Translate_setup():
+ tforms = {}
+ tforms['random_translate'] = RandomTranslate(0.1)
+ tforms['random_choice_translate'] = RandomChoiceTranslate([0.1,0.2])
+ tforms['translate'] = Translate(0.3)
+ return tforms
+
+def Shear_setup():
+ tforms = {}
+ tforms['random_shear'] = RandomShear(30)
+ tforms['random_choice_shear'] = RandomChoiceShear([20,30,40])
+ tforms['shear'] = Shear(25)
+ return tforms
+
+def Zoom_setup():
+ tforms = {}
+ tforms['random_zoom'] = RandomZoom((0.8,1.2))
+ tforms['random_choice_zoom'] = RandomChoiceZoom([0.8,0.9,1.1,1.2])
+ tforms['zoom'] = Zoom(0.9)
+ return tforms
+
+# ----------------------------------------------------
+# ----------------------------------------------------
+
+def test_affine_transforms_runtime(verbose=1):
+ """
+ Test that there are no runtime errors
+ """
+ ### MAKE TRANSFORMS ###
+ tforms = {}
+ tforms.update(Affine_setup())
+ tforms.update(Rotate_setup())
+ tforms.update(Translate_setup())
+ tforms.update(Shear_setup())
+ tforms.update(Zoom_setup())
+
+ ### MAKE DATA
+ images = {}
+ images.update(gray2d_setup())
+ images.update(multi_gray2d_setup())
+ images.update(color2d_setup())
+ images.update(multi_color2d_setup())
+
+ successes = []
+ failures = []
+ for im_key, im_val in images.items():
+ for tf_key, tf_val in tforms.items():
+ try:
+ if isinstance(im_val, (tuple,list)):
+ tf_val(*im_val)
+ else:
+ tf_val(im_val)
+ successes.append((im_key, tf_key))
+ except:
+ failures.append((im_key, tf_key))
+
+ if verbose > 0:
+ for k, v in failures:
+ print('%s - %s' % (k, v))
+
+ print('# SUCCESSES: ', len(successes))
+ print('# FAILURES: ' , len(failures))
+
+
+if __name__=='__main__':
+ test_affine_transforms_runtime()
+
+
+
+
+
+
+
+
diff --git a/torchsample/tests/transforms/test_image_transforms.py b/torchsample/tests/transforms/test_image_transforms.py
new file mode 100755
index 0000000..f8a34fe
--- /dev/null
+++ b/torchsample/tests/transforms/test_image_transforms.py
@@ -0,0 +1,192 @@
+"""
+Tests for torchsample/transforms/image_transforms.py
+"""
+
+
+import torch as th
+
+from torchsample.transforms import (Grayscale, RandomGrayscale,
+ Gamma, RandomGamma, RandomChoiceGamma,
+ Brightness, RandomBrightness, RandomChoiceBrightness,
+ Saturation, RandomSaturation, RandomChoiceSaturation,
+ Contrast, RandomContrast, RandomChoiceContrast)
+
+# ----------------------------------------------------
+# ----------------------------------------------------
+
+## DATA SET ##
+def gray2d_setup():
+ images = {}
+
+ x = th.zeros(1,30,30)
+ x[:,10:21,10:21] = 1
+ images['gray_01'] = x
+
+ x = th.zeros(1,30,40)
+ x[:,10:21,10:21] = 1
+ images['gray_02'] = x
+
+ return images
+
+def multi_gray2d_setup():
+ old_imgs = gray2d_setup()
+ images = {}
+ for k,v in old_imgs.items():
+ images[k+'_2imgs'] = [v,v]
+ images[k+'_3imgs'] = [v,v,v]
+ images[k+'_4imgs'] = [v,v,v,v]
+ return images
+
+def color2d_setup():
+ images = {}
+
+ x = th.zeros(3,30,30)
+ x[:,10:21,10:21] = 1
+ images['color_01'] = x
+
+ x = th.zeros(3,30,40)
+ x[:,10:21,10:21] = 1
+ images['color_02'] = x
+
+ return images
+
+def multi_color2d_setup():
+ old_imgs = color2d_setup()
+ images = {}
+ for k,v in old_imgs.items():
+ images[k+'_2imgs'] = [v,v]
+ images[k+'_3imgs'] = [v,v,v]
+ images[k+'_4imgs'] = [v,v,v,v]
+ return images
+
+# ----------------------------------------------------
+# ----------------------------------------------------
+
+## TFORMS SETUP ###
+def Grayscale_setup():
+ tforms = {}
+ tforms['grayscale_keepchannels'] = Grayscale(keep_channels=True)
+ tforms['grayscale_dontkeepchannels'] = Grayscale(keep_channels=False)
+
+ tforms['random_grayscale_nop'] = RandomGrayscale()
+ tforms['random_grayscale_p_01'] = RandomGrayscale(0)
+ tforms['random_grayscale_p_02'] = RandomGrayscale(0.5)
+ tforms['random_grayscale_p_03'] = RandomGrayscale(1)
+
+ return tforms
+
+def Gamma_setup():
+ tforms = {}
+ tforms['gamma_<1'] = Gamma(value=0.5)
+ tforms['gamma_=1'] = Gamma(value=1.0)
+ tforms['gamma_>1'] = Gamma(value=1.5)
+ tforms['random_gamma_01'] = RandomGamma(0.5,1.5)
+ tforms['random_gamma_02'] = RandomGamma(0.5,1.0)
+ tforms['random_gamma_03'] = RandomGamma(1.0,1.5)
+ tforms['random_choice_gamma_01'] = RandomChoiceGamma([0.5,1.0])
+ tforms['random_choice_gamma_02'] = RandomChoiceGamma([0.5,1.0],p=[0.5,0.5])
+ tforms['random_choice_gamma_03'] = RandomChoiceGamma([0.5,1.0],p=[0.2,0.8])
+
+ return tforms
+
+def Brightness_setup():
+ tforms = {}
+ tforms['brightness_=-1'] = Brightness(value=-1)
+ tforms['brightness_<0'] = Brightness(value=-0.5)
+ tforms['brightness_=0'] = Brightness(value=0)
+ tforms['brightness_>0'] = Brightness(value=0.5)
+ tforms['brightness_=1'] = Brightness(value=1)
+
+ tforms['random_brightness_01'] = RandomBrightness(-1,-0.5)
+ tforms['random_brightness_02'] = RandomBrightness(-0.5,0)
+ tforms['random_brightness_03'] = RandomBrightness(0,0.5)
+ tforms['random_brightness_04'] = RandomBrightness(0.5,1)
+
+ tforms['random_choice_brightness_01'] = RandomChoiceBrightness([-1,0,1])
+ tforms['random_choice_brightness_02'] = RandomChoiceBrightness([-1,0,1],p=[0.2,0.5,0.3])
+ tforms['random_choice_brightness_03'] = RandomChoiceBrightness([0,0,0,0],p=[0.25,0.5,0.25,0.25])
+
+ return tforms
+
+def Saturation_setup():
+ tforms = {}
+ tforms['saturation_=-1'] = Saturation(-1)
+ tforms['saturation_<0'] = Saturation(-0.5)
+ tforms['saturation_=0'] = Saturation(0)
+ tforms['saturation_>0'] = Saturation(0.5)
+ tforms['saturation_=1'] = Saturation(1)
+
+ tforms['random_saturation_01'] = RandomSaturation(-1,-0.5)
+ tforms['random_saturation_02'] = RandomSaturation(-0.5,0)
+ tforms['random_saturation_03'] = RandomSaturation(0,0.5)
+ tforms['random_saturation_04'] = RandomSaturation(0.5,1)
+
+ tforms['random_choice_saturation_01'] = RandomChoiceSaturation([-1,0,1])
+ tforms['random_choice_saturation_02'] = RandomChoiceSaturation([-1,0,1],p=[0.2,0.5,0.3])
+ tforms['random_choice_saturation_03'] = RandomChoiceSaturation([0,0,0,0],p=[0.25,0.5,0.25,0.25])
+
+ return tforms
+
+def Contrast_setup():
+ tforms = {}
+ tforms['contrast_<<0'] = Contrast(-10)
+ tforms['contrast_<0'] = Contrast(-1)
+ tforms['contrast_=0'] = Contrast(0)
+ tforms['contrast_>0'] = Contrast(1)
+ tforms['contrast_>>0'] = Contrast(10)
+
+ tforms['random_contrast_01'] = RandomContrast(-10,-1)
+ tforms['random_contrast_02'] = RandomContrast(-1,0)
+ tforms['random_contrast_03'] = RandomContrast(0,1)
+ tforms['random_contrast_04'] = RandomContrast(1,10)
+
+ tforms['random_choice_saturation_01'] = RandomChoiceContrast([-1,0,1])
+ tforms['random_choice_saturation_02'] = RandomChoiceContrast([-10,0,10],p=[0.2,0.5,0.3])
+ tforms['random_choice_saturation_03'] = RandomChoiceContrast([1,1],p=[0.5,0.5])
+
+ return tforms
+
+# ----------------------------------------------------
+# ----------------------------------------------------
+
+def test_image_transforms_runtime(verbose=1):
+ """
+ Test that there are no runtime errors
+ """
+ ### MAKE TRANSFORMS ###
+ tforms = {}
+ tforms.update(Gamma_setup())
+ tforms.update(Brightness_setup())
+ tforms.update(Saturation_setup())
+ tforms.update(Contrast_setup())
+
+ ### MAKE DATA ###
+ images = {}
+ images.update(gray2d_setup())
+ images.update(multi_gray2d_setup())
+ images.update(color2d_setup())
+ images.update(multi_color2d_setup())
+
+ successes = []
+ failures = []
+ for im_key, im_val in images.items():
+ for tf_key, tf_val in tforms.items():
+ try:
+ if isinstance(im_val, (tuple,list)):
+ tf_val(*im_val)
+ else:
+ tf_val(im_val)
+ successes.append((im_key, tf_key))
+ except:
+ failures.append((im_key, tf_key))
+
+ if verbose > 0:
+ for k, v in failures:
+ print('%s - %s' % (k, v))
+
+ print('# SUCCESSES: ', len(successes))
+ print('# FAILURES: ' , len(failures))
+
+
+if __name__=='__main__':
+ test_image_transforms_runtime()
diff --git a/torchsample/tests/transforms/test_tensor_transforms.py b/torchsample/tests/transforms/test_tensor_transforms.py
new file mode 100755
index 0000000..fcbbeae
--- /dev/null
+++ b/torchsample/tests/transforms/test_tensor_transforms.py
@@ -0,0 +1,314 @@
+"""
+Tests for torchsample/transforms/image_transforms.py
+"""
+
+
+import torch as th
+
+from torchsample.transforms import (ToTensor,
+ ToVariable,
+ ToCuda,
+ ToFile,
+ ChannelsLast, HWC,
+ ChannelsFirst, CHW,
+ TypeCast,
+ AddChannel,
+ Transpose,
+ RangeNormalize,
+ StdNormalize,
+ RandomCrop,
+ SpecialCrop,
+ Pad,
+ RandomFlip,
+ RandomOrder)
+
+# ----------------------------------------------------
+
+## DATA SET ##
+def gray2d_setup():
+ images = {}
+
+ x = th.zeros(1,30,30)
+ x[:,10:21,10:21] = 1
+ images['gray_01'] = x
+
+ x = th.zeros(1,30,40)
+ x[:,10:21,10:21] = 1
+ images['gray_02'] = x
+ return images
+
+def multi_gray2d_setup():
+ old_imgs = gray2d_setup()
+ images = {}
+ for k,v in old_imgs.items():
+ images[k+'_2imgs'] = [v,v]
+ images[k+'_3imgs'] = [v,v,v]
+ images[k+'_4imgs'] = [v,v,v,v]
+ return images
+
+def color2d_setup():
+ images = {}
+
+ x = th.zeros(3,30,30)
+ x[:,10:21,10:21] = 1
+ images['color_01'] = x
+
+ x = th.zeros(3,30,40)
+ x[:,10:21,10:21] = 1
+ images['color_02'] = x
+
+ return images
+
+def multi_color2d_setup():
+ old_imgs = color2d_setup()
+ images = {}
+ for k,v in old_imgs.items():
+ images[k+'_2imgs'] = [v,v]
+ images[k+'_3imgs'] = [v,v,v]
+ images[k+'_4imgs'] = [v,v,v,v]
+ return images
+# ----------------------------------------------------
+# ----------------------------------------------------
+
+## TFORMS SETUP ###
+def ToTensor_setup():
+ tforms = {}
+
+ tforms['totensor'] = ToTensor()
+
+ return tforms
+
+def ToVariable_setup():
+ tforms = {}
+
+ tforms['tovariable'] = ToVariable()
+
+ return tforms
+
+def ToCuda_setup():
+ tforms = {}
+
+ tforms['tocuda'] = ToCuda()
+
+ return tforms
+
+def ToFile_setup():
+ tforms = {}
+
+ ROOT = '~/desktop/data/'
+ tforms['tofile_npy'] = ToFile(root=ROOT, fmt='npy')
+ tforms['tofile_pth'] = ToFile(root=ROOT, fmt='pth')
+ tforms['tofile_jpg'] = ToFile(root=ROOT, fmt='jpg')
+ tforms['tofile_png'] = ToFile(root=ROOT, fmt='png')
+
+ return tforms
+
+def ChannelsLast_setup():
+ tforms = {}
+
+ tforms['channels_last'] = ChannelsLast()
+ tforms['hwc'] = HWC()
+
+ return tforms
+
+def ChannelsFirst_setup():
+ tforms = {}
+
+ tforms['channels_first'] = ChannelsFirst()
+ tforms['chw'] = CHW()
+
+ return tforms
+
+def TypeCast_setup():
+ tforms = {}
+
+ tforms['byte'] = TypeCast('byte')
+ tforms['double'] = TypeCast('double')
+ tforms['float'] = TypeCast('float')
+ tforms['int'] = TypeCast('int')
+ tforms['long'] = TypeCast('long')
+ tforms['short'] = TypeCast('short')
+
+ return tforms
+
+def AddChannel_setup():
+ tforms = {}
+
+ tforms['addchannel_axis0'] = AddChannel(axis=0)
+ tforms['addchannel_axis1'] = AddChannel(axis=1)
+ tforms['addchannel_axis2'] = AddChannel(axis=2)
+
+ return tforms
+
+def Transpose_setup():
+ tforms = {}
+
+ tforms['transpose_01'] = Transpose(0, 1)
+ tforms['transpose_02'] = Transpose(0, 2)
+ tforms['transpose_10'] = Transpose(1, 0)
+ tforms['transpose_12'] = Transpose(1, 2)
+ tforms['transpose_20'] = Transpose(2, 0)
+ tforms['transpose_21'] = Transpose(2, 1)
+
+ return tforms
+
+def RangeNormalize_setup():
+ tforms = {}
+
+ tforms['rangenorm_01'] = RangeNormalize(0, 1)
+ tforms['rangenorm_-11'] = RangeNormalize(-1, 1)
+ tforms['rangenorm_-33'] = RangeNormalize(-3, 3)
+ tforms['rangenorm_02'] = RangeNormalize(0, 2)
+
+ return tforms
+
+def StdNormalize_setup():
+ tforms = {}
+
+ tforms['stdnorm'] = StdNormalize()
+
+ return tforms
+
+def RandomCrop_setup():
+ tforms = {}
+
+ tforms['randomcrop_1010'] = RandomCrop((10,10))
+ tforms['randomcrop_510'] = RandomCrop((5,10))
+ tforms['randomcrop_105'] = RandomCrop((10,5))
+ tforms['randomcrop_99'] = RandomCrop((9,9))
+ tforms['randomcrop_79'] = RandomCrop((7,9))
+ tforms['randomcrop_97'] = RandomCrop((9,7))
+
+ return tforms
+
+def SpecialCrop_setup():
+ tforms = {}
+
+ tforms['specialcrop_0_1010'] = SpecialCrop((10,10),0)
+ tforms['specialcrop_0_510'] = SpecialCrop((5,10),0)
+ tforms['specialcrop_0_105'] = SpecialCrop((10,5),0)
+ tforms['specialcrop_0_99'] = SpecialCrop((9,9),0)
+ tforms['specialcrop_0_79'] = SpecialCrop((7,9),0)
+ tforms['specialcrop_0_97'] = SpecialCrop((9,7),0)
+
+ tforms['specialcrop_1_1010'] = SpecialCrop((10,10),1)
+ tforms['specialcrop_1_510'] = SpecialCrop((5,10),1)
+ tforms['specialcrop_1_105'] = SpecialCrop((10,5),1)
+ tforms['specialcrop_1_99'] = SpecialCrop((9,9),1)
+ tforms['specialcrop_1_79'] = SpecialCrop((7,9),1)
+ tforms['specialcrop_1_97'] = SpecialCrop((9,7),1)
+
+ tforms['specialcrop_2_1010'] = SpecialCrop((10,10),2)
+ tforms['specialcrop_2_510'] = SpecialCrop((5,10),2)
+ tforms['specialcrop_2_105'] = SpecialCrop((10,5),2)
+ tforms['specialcrop_2_99'] = SpecialCrop((9,9),2)
+ tforms['specialcrop_2_79'] = SpecialCrop((7,9),2)
+ tforms['specialcrop_2_97'] = SpecialCrop((9,7),2)
+
+ tforms['specialcrop_3_1010'] = SpecialCrop((10,10),3)
+ tforms['specialcrop_3_510'] = SpecialCrop((5,10),3)
+ tforms['specialcrop_3_105'] = SpecialCrop((10,5),3)
+ tforms['specialcrop_3_99'] = SpecialCrop((9,9),3)
+ tforms['specialcrop_3_79'] = SpecialCrop((7,9),3)
+ tforms['specialcrop_3_97'] = SpecialCrop((9,7),3)
+
+ tforms['specialcrop_4_1010'] = SpecialCrop((10,10),4)
+ tforms['specialcrop_4_510'] = SpecialCrop((5,10),4)
+ tforms['specialcrop_4_105'] = SpecialCrop((10,5),4)
+ tforms['specialcrop_4_99'] = SpecialCrop((9,9),4)
+ tforms['specialcrop_4_79'] = SpecialCrop((7,9),4)
+ tforms['specialcrop_4_97'] = SpecialCrop((9,7),4)
+ return tforms
+
+def Pad_setup():
+ tforms = {}
+
+ tforms['pad_4040'] = Pad((40,40))
+ tforms['pad_3040'] = Pad((30,40))
+ tforms['pad_4030'] = Pad((40,30))
+ tforms['pad_3939'] = Pad((39,39))
+ tforms['pad_3941'] = Pad((39,41))
+ tforms['pad_4139'] = Pad((41,39))
+ tforms['pad_4138'] = Pad((41,38))
+ tforms['pad_3841'] = Pad((38,41))
+
+ return tforms
+
+def RandomFlip_setup():
+ tforms = {}
+
+ tforms['randomflip_h_01'] = RandomFlip(h=True, v=False)
+ tforms['randomflip_h_02'] = RandomFlip(h=True, v=False, p=0)
+ tforms['randomflip_h_03'] = RandomFlip(h=True, v=False, p=1)
+ tforms['randomflip_h_04'] = RandomFlip(h=True, v=False, p=0.3)
+ tforms['randomflip_v_01'] = RandomFlip(h=False, v=True)
+ tforms['randomflip_v_02'] = RandomFlip(h=False, v=True, p=0)
+ tforms['randomflip_v_03'] = RandomFlip(h=False, v=True, p=1)
+ tforms['randomflip_v_04'] = RandomFlip(h=False, v=True, p=0.3)
+ tforms['randomflip_hv_01'] = RandomFlip(h=True, v=True)
+ tforms['randomflip_hv_02'] = RandomFlip(h=True, v=True, p=0)
+ tforms['randomflip_hv_03'] = RandomFlip(h=True, v=True, p=1)
+ tforms['randomflip_hv_04'] = RandomFlip(h=True, v=True, p=0.3)
+ return tforms
+
+def RandomOrder_setup():
+ tforms = {}
+
+ tforms['randomorder'] = RandomOrder()
+
+ return tforms
+
+# ----------------------------------------------------
+# ----------------------------------------------------
+
+def test_image_transforms_runtime(verbose=1):
+ ### MAKE TRANSFORMS ###
+ tforms = {}
+ tforms.update(ToTensor_setup())
+ tforms.update(ToVariable_setup())
+ tforms.update(ToCuda_setup())
+ #tforms.update(ToFile_setup())
+ tforms.update(ChannelsLast_setup())
+ tforms.update(ChannelsFirst_setup())
+ tforms.update(TypeCast_setup())
+ tforms.update(AddChannel_setup())
+ tforms.update(Transpose_setup())
+ tforms.update(RangeNormalize_setup())
+ tforms.update(StdNormalize_setup())
+ tforms.update(RandomCrop_setup())
+ tforms.update(SpecialCrop_setup())
+ tforms.update(Pad_setup())
+ tforms.update(RandomFlip_setup())
+ tforms.update(RandomOrder_setup())
+
+
+ ### MAKE DATA
+ images = {}
+ images.update(gray2d_setup())
+ images.update(multi_gray2d_setup())
+ images.update(color2d_setup())
+ images.update(multi_color2d_setup())
+
+ successes =[]
+ failures = []
+ for im_key, im_val in images.items():
+ for tf_key, tf_val in tforms.items():
+ try:
+ if isinstance(im_val, (tuple,list)):
+ tf_val(*im_val)
+ else:
+ tf_val(im_val)
+ successes.append((im_key, tf_key))
+ except:
+ failures.append((im_key, tf_key))
+
+ if verbose > 0:
+ for k, v in failures:
+ print('%s - %s' % (k, v))
+
+ print('# SUCCESSES: ', len(successes))
+ print('# FAILURES: ' , len(failures))
+
+
+if __name__=='__main__':
+ test_image_transforms_runtime()
diff --git a/torchsample/tests/utils.py b/torchsample/tests/utils.py
new file mode 100755
index 0000000..354eea2
--- /dev/null
+++ b/torchsample/tests/utils.py
@@ -0,0 +1,31 @@
+
+import numpy as np
+import torch as th
+
+def get_test_data(num_train=1000, num_test=500,
+ input_shape=(10,), output_shape=(2,),
+ classification=True, num_classes=2):
+ """Generates test data to train a model on.
+
+ classification=True overrides output_shape
+ (i.e. output_shape is set to (1,)) and the output
+ consists in integers in [0, num_class-1].
+
+ Otherwise: float output with shape output_shape.
+ """
+ samples = num_train + num_test
+ if classification:
+ y = np.random.randint(0, num_classes, size=(samples,))
+ X = np.zeros((samples,) + input_shape)
+ for i in range(samples):
+ X[i] = np.random.normal(loc=y[i], scale=0.7, size=input_shape)
+ else:
+ y_loc = np.random.random((samples,))
+ X = np.zeros((samples,) + input_shape)
+ y = np.zeros((samples,) + output_shape)
+ for i in range(samples):
+ X[i] = np.random.normal(loc=y_loc[i], scale=0.7, size=input_shape)
+ y[i] = np.random.normal(loc=y_loc[i], scale=0.7, size=output_shape)
+
+ return (th.from_numpy(X[:num_train]), th.from_numpy(y[:num_train])), \
+ (th.from_numpy(X[num_train:]), th.from_numpy(y[num_train:]))
\ No newline at end of file
diff --git a/torchsample/torchsample/__init__.py b/torchsample/torchsample/__init__.py
new file mode 100755
index 0000000..2caa5f9
--- /dev/null
+++ b/torchsample/torchsample/__init__.py
@@ -0,0 +1,15 @@
+
+from __future__ import absolute_import
+
+from .version import __version__
+
+from .datasets import *
+from .samplers import *
+
+#from .callbacks import *
+#from .constraints import *
+#from .regularizers import *
+
+#from . import functions
+#from . import transforms
+from . import modules
diff --git a/torchsample/torchsample/callbacks.py b/torchsample/torchsample/callbacks.py
new file mode 100755
index 0000000..35540d2
--- /dev/null
+++ b/torchsample/torchsample/callbacks.py
@@ -0,0 +1,744 @@
+"""
+SuperModule Callbacks
+"""
+
+from __future__ import absolute_import
+from __future__ import print_function
+
+from collections import OrderedDict
+from collections import Iterable
+import warnings
+
+import os
+import csv
+import time
+from tempfile import NamedTemporaryFile
+import shutil
+import datetime
+import numpy as np
+
+from tqdm import tqdm
+
+import torch as th
+
+
+def _get_current_time():
+ return datetime.datetime.now().strftime("%B %d, %Y - %I:%M%p")
+
+class CallbackContainer(object):
+ """
+ Container holding a list of callbacks.
+ """
+ def __init__(self, callbacks=None, queue_length=10):
+ callbacks = callbacks or []
+ self.callbacks = [c for c in callbacks]
+ self.queue_length = queue_length
+
+ def append(self, callback):
+ self.callbacks.append(callback)
+
+ def set_params(self, params):
+ for callback in self.callbacks:
+ callback.set_params(params)
+
+ def set_trainer(self, trainer):
+ self.trainer = trainer
+ for callback in self.callbacks:
+ callback.set_trainer(trainer)
+
+ def on_epoch_begin(self, epoch, logs=None):
+ logs = logs or {}
+ for callback in self.callbacks:
+ callback.on_epoch_begin(epoch, logs)
+
+ def on_epoch_end(self, epoch, logs=None):
+ logs = logs or {}
+ for callback in self.callbacks:
+ callback.on_epoch_end(epoch, logs)
+
+ def on_batch_begin(self, batch, logs=None):
+ logs = logs or {}
+ for callback in self.callbacks:
+ callback.on_batch_begin(batch, logs)
+
+ def on_batch_end(self, batch, logs=None):
+ logs = logs or {}
+ for callback in self.callbacks:
+ callback.on_batch_end(batch, logs)
+
+ def on_train_begin(self, logs=None):
+ logs = logs or {}
+ logs['start_time'] = _get_current_time()
+ for callback in self.callbacks:
+ callback.on_train_begin(logs)
+
+ def on_train_end(self, logs=None):
+ logs = logs or {}
+ logs['final_loss'] = self.trainer.history.epoch_losses[-1],
+ logs['best_loss'] = min(self.trainer.history.epoch_losses),
+ logs['stop_time'] = _get_current_time()
+ for callback in self.callbacks:
+ callback.on_train_end(logs)
+
+
+class Callback(object):
+ """
+ Abstract base class used to build new callbacks.
+ """
+
+ def __init__(self):
+ pass
+
+ def set_params(self, params):
+ self.params = params
+
+ def set_trainer(self, model):
+ self.trainer = model
+
+ def on_epoch_begin(self, epoch, logs=None):
+ pass
+
+ def on_epoch_end(self, epoch, logs=None):
+ pass
+
+ def on_batch_begin(self, batch, logs=None):
+ pass
+
+ def on_batch_end(self, batch, logs=None):
+ pass
+
+ def on_train_begin(self, logs=None):
+ pass
+
+ def on_train_end(self, logs=None):
+ pass
+
+
+class TQDM(Callback):
+
+ def __init__(self):
+ """
+ TQDM Progress Bar callback
+
+ This callback is automatically applied to
+ every SuperModule if verbose > 0
+ """
+ self.progbar = None
+ super(TQDM, self).__init__()
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ # make sure the dbconnection gets closed
+ if self.progbar is not None:
+ self.progbar.close()
+
+ def on_train_begin(self, logs):
+ self.train_logs = logs
+
+ def on_epoch_begin(self, epoch, logs=None):
+ try:
+ self.progbar = tqdm(total=self.train_logs['num_batches'],
+ unit=' batches')
+ self.progbar.set_description('Epoch %i/%i' %
+ (epoch+1, self.train_logs['num_epoch']))
+ except:
+ pass
+
+ def on_epoch_end(self, epoch, logs=None):
+ log_data = {key: '%.04f' % value for key, value in self.trainer.history.batch_metrics.items()}
+ for k, v in logs.items():
+ if k.endswith('metric'):
+ log_data[k.split('_metric')[0]] = '%.02f' % v
+ else:
+ log_data[k] = v
+ self.progbar.set_postfix(log_data)
+ self.progbar.update()
+ self.progbar.close()
+
+ def on_batch_begin(self, batch, logs=None):
+ self.progbar.update(1)
+
+ def on_batch_end(self, batch, logs=None):
+ log_data = {key: '%.04f' % value for key, value in self.trainer.history.batch_metrics.items()}
+ for k, v in logs.items():
+ if k.endswith('metric'):
+ log_data[k.split('_metric')[0]] = '%.02f' % v
+ self.progbar.set_postfix(log_data)
+
+
+class History(Callback):
+ """
+ Callback that records events into a `History` object.
+
+ This callback is automatically applied to
+ every SuperModule.
+ """
+ def __init__(self, model):
+ super(History, self).__init__()
+ self.samples_seen = 0.
+ self.trainer = model
+
+ def on_train_begin(self, logs=None):
+ self.epoch_metrics = {
+ 'loss': []
+ }
+ self.batch_size = logs['batch_size']
+ self.has_val_data = logs['has_val_data']
+ self.has_regularizers = logs['has_regularizers']
+ if self.has_val_data:
+ self.epoch_metrics['val_loss'] = []
+ if self.has_regularizers:
+ self.epoch_metrics['reg_loss'] = []
+
+ def on_epoch_begin(self, epoch, logs=None):
+ self.batch_metrics = {
+ 'loss': 0.
+ }
+ if self.has_regularizers:
+ self.batch_metrics['reg_loss'] = 0.
+ self.samples_seen = 0.
+
+ def on_epoch_end(self, epoch, logs=None):
+ #for k in self.batch_metrics:
+ # k_log = k.split('_metric')[0]
+ # self.epoch_metrics.update(self.batch_metrics)
+ # TODO
+ pass
+
+ def on_batch_end(self, batch, logs=None):
+ for k in self.batch_metrics:
+ self.batch_metrics[k] = (self.samples_seen*self.batch_metrics[k] + logs[k]*self.batch_size) / (self.samples_seen+self.batch_size)
+ self.samples_seen += self.batch_size
+
+ def __getitem__(self, name):
+ return self.epoch_metrics[name]
+
+ def __repr__(self):
+ return str(self.epoch_metrics)
+
+ def __str__(self):
+ return str(self.epoch_metrics)
+
+
+class ModelCheckpoint(Callback):
+ """
+ Model Checkpoint to save model weights during training
+
+ save_checkpoint({
+ 'epoch': epoch + 1,
+ 'arch': args.arch,
+ 'state_dict': model.state_dict(),
+ 'best_prec1': best_prec1,
+ 'optimizer' : optimizer.state_dict(),
+ }
+ def save_checkpoint(state, is_best, filename='checkpoint.pth.tar'):
+ th.save(state, filename)
+ if is_best:
+ shutil.copyfile(filename, 'model_best.pth.tar')
+
+ """
+
+ def __init__(self,
+ directory,
+ filename='ckpt.pth.tar',
+ monitor='val_loss',
+ save_best_only=False,
+ save_weights_only=True,
+ max_save=-1,
+ verbose=0):
+ """
+ Model Checkpoint to save model weights during training
+
+ Arguments
+ ---------
+ file : string
+ file to which model will be saved.
+ It can be written 'filename_{epoch}_{loss}' and those
+ values will be filled in before saving.
+ monitor : string in {'val_loss', 'loss'}
+ whether to monitor train or val loss
+ save_best_only : boolean
+ whether to only save if monitored value has improved
+ save_weight_only : boolean
+ whether to save entire model or just weights
+ NOTE: only `True` is supported at the moment
+ max_save : integer > 0 or -1
+ the max number of models to save. Older model checkpoints
+ will be overwritten if necessary. Set equal to -1 to have
+ no limit
+ verbose : integer in {0, 1}
+ verbosity
+ """
+ if directory.startswith('~'):
+ directory = os.path.expanduser(directory)
+ self.directory = directory
+ self.filename = filename
+ self.file = os.path.join(self.directory, self.filename)
+ self.monitor = monitor
+ self.save_best_only = save_best_only
+ self.save_weights_only = save_weights_only
+ self.max_save = max_save
+ self.verbose = verbose
+
+ if self.max_save > 0:
+ self.old_files = []
+
+ # mode = 'min' only supported
+ self.best_loss = float('inf')
+ super(ModelCheckpoint, self).__init__()
+
+ def save_checkpoint(self, epoch, file, is_best=False):
+ th.save({
+ 'epoch': epoch + 1,
+ #'arch': args.arch,
+ 'state_dict': self.trainer.model.state_dict(),
+ #'best_prec1': best_prec1,
+ 'optimizer' : self.trainer._optimizer.state_dict(),
+ #'loss':{},
+ # #'regularizers':{},
+ # #'constraints':{},
+ # #'initializers':{},
+ # #'metrics':{},
+ # #'val_loss':{}
+ }, file)
+ if is_best:
+ shutil.copyfile(file, 'model_best.pth.tar')
+
+ def on_epoch_end(self, epoch, logs=None):
+
+ file = self.file.format(epoch='%03i'%(epoch+1),
+ loss='%0.4f'%logs[self.monitor])
+ if self.save_best_only:
+ current_loss = logs.get(self.monitor)
+ if current_loss is None:
+ pass
+ else:
+ if current_loss < self.best_loss:
+ if self.verbose > 0:
+ print('\nEpoch %i: improved from %0.4f to %0.4f saving model to %s' %
+ (epoch+1, self.best_loss, current_loss, file))
+ self.best_loss = current_loss
+ #if self.save_weights_only:
+ #else:
+ self.save_checkpoint(epoch, file)
+ if self.max_save > 0:
+ if len(self.old_files) == self.max_save:
+ try:
+ os.remove(self.old_files[0])
+ except:
+ pass
+ self.old_files = self.old_files[1:]
+ self.old_files.append(file)
+ else:
+ if self.verbose > 0:
+ print('\nEpoch %i: saving model to %s' % (epoch+1, file))
+ self.save_checkpoint(epoch, file)
+ if self.max_save > 0:
+ if len(self.old_files) == self.max_save:
+ try:
+ os.remove(self.old_files[0])
+ except:
+ pass
+ self.old_files = self.old_files[1:]
+ self.old_files.append(file)
+
+
+class EarlyStopping(Callback):
+ """
+ Early Stopping to terminate training early under certain conditions
+ """
+
+ def __init__(self,
+ monitor='val_loss',
+ min_delta=0,
+ patience=5):
+ """
+ EarlyStopping callback to exit the training loop if training or
+ validation loss does not improve by a certain amount for a certain
+ number of epochs
+
+ Arguments
+ ---------
+ monitor : string in {'val_loss', 'loss'}
+ whether to monitor train or val loss
+ min_delta : float
+ minimum change in monitored value to qualify as improvement.
+ This number should be positive.
+ patience : integer
+ number of epochs to wait for improvment before terminating.
+ the counter be reset after each improvment
+ """
+ self.monitor = monitor
+ self.min_delta = min_delta
+ self.patience = patience
+ self.wait = 0
+ self.best_loss = 1e-15
+ self.stopped_epoch = 0
+ super(EarlyStopping, self).__init__()
+
+ def on_train_begin(self, logs=None):
+ self.wait = 0
+ self.best_loss = 1e15
+
+ def on_epoch_end(self, epoch, logs=None):
+ current_loss = logs.get(self.monitor)
+ if current_loss is None:
+ pass
+ else:
+ if (current_loss - self.best_loss) < -self.min_delta:
+ self.best_loss = current_loss
+ self.wait = 1
+ else:
+ if self.wait >= self.patience:
+ self.stopped_epoch = epoch + 1
+ self.trainer._stop_training = True
+ self.wait += 1
+
+ def on_train_end(self, logs):
+ if self.stopped_epoch > 0:
+ print('\nTerminated Training for Early Stopping at Epoch %04i' %
+ (self.stopped_epoch))
+
+
+class LRScheduler(Callback):
+ """
+ Schedule the learning rate according to some function of the
+ current epoch index, current learning rate, and current train/val loss.
+ """
+
+ def __init__(self, schedule):
+ """
+ LearningRateScheduler callback to adapt the learning rate
+ according to some function
+
+ Arguments
+ ---------
+ schedule : callable
+ should return a number of learning rates equal to the number
+ of optimizer.param_groups. It should take the epoch index and
+ **kwargs (or logs) as argument. **kwargs (or logs) will return
+ the epoch logs such as mean training and validation loss from
+ the epoch
+ """
+ if isinstance(schedule, dict):
+ schedule = self.schedule_from_dict
+ self.schedule_dict = schedule
+ if any([k < 1.0 for k in schedule.keys()]):
+ self.fractional_bounds = False
+ else:
+ self.fractional_bounds = True
+ self.schedule = schedule
+ super(LRScheduler, self).__init__()
+
+ def schedule_from_dict(self, epoch, logs=None):
+ for epoch_bound, learn_rate in self.schedule_dict.items():
+ # epoch_bound is in units of "epochs"
+ if not self.fractional_bounds:
+ if epoch_bound < epoch:
+ return learn_rate
+ # epoch_bound is in units of "cumulative percent of epochs"
+ else:
+ if epoch <= epoch_bound*logs['num_epoch']:
+ return learn_rate
+ warnings.warn('Check the keys in the schedule dict.. Returning last value')
+ return learn_rate
+
+ def on_epoch_begin(self, epoch, logs=None):
+ current_lrs = [p['lr'] for p in self.trainer._optimizer.param_groups]
+ lr_list = self.schedule(epoch, current_lrs, **logs)
+ if not isinstance(lr_list, list):
+ lr_list = [lr_list]
+
+ for param_group, lr_change in zip(self.trainer._optimizer.param_groups, lr_list):
+ param_group['lr'] = lr_change
+
+
+class ReduceLROnPlateau(Callback):
+ """
+ Reduce the learning rate if the train or validation loss plateaus
+ """
+
+ def __init__(self,
+ monitor='val_loss',
+ factor=0.1,
+ patience=10,
+ epsilon=0,
+ cooldown=0,
+ min_lr=0,
+ verbose=0):
+ """
+ Reduce the learning rate if the train or validation loss plateaus
+
+ Arguments
+ ---------
+ monitor : string in {'loss', 'val_loss'}
+ which metric to monitor
+ factor : floar
+ factor to decrease learning rate by
+ patience : integer
+ number of epochs to wait for loss improvement before reducing lr
+ epsilon : float
+ how much improvement must be made to reset patience
+ cooldown : integer
+ number of epochs to cooldown after a lr reduction
+ min_lr : float
+ minimum value to ever let the learning rate decrease to
+ verbose : integer
+ whether to print reduction to console
+ """
+ self.monitor = monitor
+ if factor >= 1.0:
+ raise ValueError('ReduceLROnPlateau does not support a factor >= 1.0.')
+ self.factor = factor
+ self.min_lr = min_lr
+ self.epsilon = epsilon
+ self.patience = patience
+ self.verbose = verbose
+ self.cooldown = cooldown
+ self.cooldown_counter = 0
+ self.wait = 0
+ self.best_loss = 1e15
+ self._reset()
+ super(ReduceLROnPlateau, self).__init__()
+
+ def _reset(self):
+ """
+ Reset the wait and cooldown counters
+ """
+ self.monitor_op = lambda a, b: (a - b) < -self.epsilon
+ self.best_loss = 1e15
+ self.cooldown_counter = 0
+ self.wait = 0
+
+ def on_train_begin(self, logs=None):
+ self._reset()
+
+ def on_epoch_end(self, epoch, logs=None):
+ logs = logs or {}
+ logs['lr'] = [p['lr'] for p in self.trainer._optimizer.param_groups]
+ current_loss = logs.get(self.monitor)
+ if current_loss is None:
+ pass
+ else:
+ # if in cooldown phase
+ if self.cooldown_counter > 0:
+ self.cooldown_counter -= 1
+ self.wait = 0
+ # if loss improved, grab new loss and reset wait counter
+ if self.monitor_op(current_loss, self.best_loss):
+ self.best_loss = current_loss
+ self.wait = 0
+ # loss didnt improve, and not in cooldown phase
+ elif not (self.cooldown_counter > 0):
+ if self.wait >= self.patience:
+ for p in self.trainer._optimizer.param_groups:
+ old_lr = p['lr']
+ if old_lr > self.min_lr + 1e-4:
+ new_lr = old_lr * self.factor
+ new_lr = max(new_lr, self.min_lr)
+ if self.verbose > 0:
+ print('\nEpoch %05d: reducing lr from %0.3f to %0.3f' %
+ (epoch, old_lr, new_lr))
+ p['lr'] = new_lr
+ self.cooldown_counter = self.cooldown
+ self.wait = 0
+ self.wait += 1
+
+
+class CSVLogger(Callback):
+ """
+ Logs epoch-level metrics to a CSV file
+ """
+
+ def __init__(self,
+ file,
+ separator=',',
+ append=False):
+ """
+ Logs epoch-level metrics to a CSV file
+
+ Arguments
+ ---------
+ file : string
+ path to csv file
+ separator : string
+ delimiter for file
+ apped : boolean
+ whether to append result to existing file or make new file
+ """
+ self.file = file
+ self.sep = separator
+ self.append = append
+ self.writer = None
+ self.keys = None
+ self.append_header = True
+ super(CSVLogger, self).__init__()
+
+ def on_train_begin(self, logs=None):
+ if self.append:
+ if os.path.exists(self.file):
+ with open(self.file) as f:
+ self.append_header = not bool(len(f.readline()))
+ self.csv_file = open(self.file, 'a')
+ else:
+ self.csv_file = open(self.file, 'w')
+
+ def on_epoch_end(self, epoch, logs=None):
+ logs = logs or {}
+ RK = {'num_batches', 'num_epoch'}
+
+ def handle_value(k):
+ is_zero_dim_tensor = isinstance(k, th.Tensor) and k.dim() == 0
+ if isinstance(k, Iterable) and not is_zero_dim_tensor:
+ return '"[%s]"' % (', '.join(map(str, k)))
+ else:
+ return k
+
+ if not self.writer:
+ self.keys = sorted(logs.keys())
+
+ class CustomDialect(csv.excel):
+ delimiter = self.sep
+
+ self.writer = csv.DictWriter(self.csv_file,
+ fieldnames=['epoch'] + [k for k in self.keys if k not in RK],
+ dialect=CustomDialect)
+ if self.append_header:
+ self.writer.writeheader()
+
+ row_dict = OrderedDict({'epoch': epoch})
+ row_dict.update((key, handle_value(logs[key])) for key in self.keys if key not in RK)
+ self.writer.writerow(row_dict)
+ self.csv_file.flush()
+
+ def on_train_end(self, logs=None):
+ self.csv_file.close()
+ self.writer = None
+
+
+class ExperimentLogger(Callback):
+
+ def __init__(self,
+ directory,
+ filename='Experiment_Logger.csv',
+ save_prefix='Model_',
+ separator=',',
+ append=True):
+
+ self.directory = directory
+ self.filename = filename
+ self.file = os.path.join(self.directory, self.filename)
+ self.save_prefix = save_prefix
+ self.sep = separator
+ self.append = append
+ self.writer = None
+ self.keys = None
+ self.append_header = True
+ super(ExperimentLogger, self).__init__()
+
+ def on_train_begin(self, logs=None):
+ if self.append:
+ open_type = 'a'
+ else:
+ open_type = 'w'
+
+ # if append is True, find whether the file already has header
+ num_lines = 0
+ if self.append:
+ if os.path.exists(self.file):
+ with open(self.file) as f:
+ for num_lines, l in enumerate(f):
+ pass
+ # if header exists, DONT append header again
+ with open(self.file) as f:
+ self.append_header = not bool(len(f.readline()))
+
+ model_idx = num_lines
+ REJECT_KEYS={'has_validation_data'}
+ MODEL_NAME = self.save_prefix + str(model_idx) # figure out how to get model name
+ self.row_dict = OrderedDict({'model': MODEL_NAME})
+ self.keys = sorted(logs.keys())
+ for k in self.keys:
+ if k not in REJECT_KEYS:
+ self.row_dict[k] = logs[k]
+
+ class CustomDialect(csv.excel):
+ delimiter = self.sep
+
+ with open(self.file, open_type) as csv_file:
+ writer = csv.DictWriter(csv_file,
+ fieldnames=['model'] + [k for k in self.keys if k not in REJECT_KEYS],
+ dialect=CustomDialect)
+ if self.append_header:
+ writer.writeheader()
+
+ writer.writerow(self.row_dict)
+ csv_file.flush()
+
+ def on_train_end(self, logs=None):
+ REJECT_KEYS={'has_validation_data'}
+ row_dict = self.row_dict
+
+ class CustomDialect(csv.excel):
+ delimiter = self.sep
+ self.keys = self.keys
+ temp_file = NamedTemporaryFile(delete=False, mode='w')
+ with open(self.file, 'r') as csv_file, temp_file:
+ reader = csv.DictReader(csv_file,
+ fieldnames=['model'] + [k for k in self.keys if k not in REJECT_KEYS],
+ dialect=CustomDialect)
+ writer = csv.DictWriter(temp_file,
+ fieldnames=['model'] + [k for k in self.keys if k not in REJECT_KEYS],
+ dialect=CustomDialect)
+ for row_idx, row in enumerate(reader):
+ if row_idx == 0:
+ # re-write header with on_train_end's metrics
+ pass
+ if row['model'] == self.row_dict['model']:
+ writer.writerow(row_dict)
+ else:
+ writer.writerow(row)
+ shutil.move(temp_file.name, self.file)
+
+
+class LambdaCallback(Callback):
+ """
+ Callback for creating simple, custom callbacks on-the-fly.
+ """
+ def __init__(self,
+ on_epoch_begin=None,
+ on_epoch_end=None,
+ on_batch_begin=None,
+ on_batch_end=None,
+ on_train_begin=None,
+ on_train_end=None,
+ **kwargs):
+ super(LambdaCallback, self).__init__()
+ self.__dict__.update(kwargs)
+ if on_epoch_begin is not None:
+ self.on_epoch_begin = on_epoch_begin
+ else:
+ self.on_epoch_begin = lambda epoch, logs: None
+ if on_epoch_end is not None:
+ self.on_epoch_end = on_epoch_end
+ else:
+ self.on_epoch_end = lambda epoch, logs: None
+ if on_batch_begin is not None:
+ self.on_batch_begin = on_batch_begin
+ else:
+ self.on_batch_begin = lambda batch, logs: None
+ if on_batch_end is not None:
+ self.on_batch_end = on_batch_end
+ else:
+ self.on_batch_end = lambda batch, logs: None
+ if on_train_begin is not None:
+ self.on_train_begin = on_train_begin
+ else:
+ self.on_train_begin = lambda logs: None
+ if on_train_end is not None:
+ self.on_train_end = on_train_end
+ else:
+ self.on_train_end = lambda logs: None
+
diff --git a/torchsample/torchsample/constraints.py b/torchsample/torchsample/constraints.py
new file mode 100755
index 0000000..c487275
--- /dev/null
+++ b/torchsample/torchsample/constraints.py
@@ -0,0 +1,140 @@
+
+from __future__ import print_function
+from __future__ import absolute_import
+
+from fnmatch import fnmatch
+
+import torch as th
+from .callbacks import Callback
+
+
+class ConstraintContainer(object):
+
+ def __init__(self, constraints):
+ self.constraints = constraints
+ self.batch_constraints = [c for c in self.constraints if c.unit.upper() == 'BATCH']
+ self.epoch_constraints = [c for c in self.constraints if c.unit.upper() == 'EPOCH']
+
+ def register_constraints(self, model):
+ """
+ Grab pointers to the weights which will be modified by constraints so
+ that we dont have to search through the entire network using `apply`
+ each time
+ """
+ # get batch constraint pointers
+ self._batch_c_ptrs = {}
+ for c_idx, constraint in enumerate(self.batch_constraints):
+ self._batch_c_ptrs[c_idx] = []
+ for name, module in model.named_modules():
+ if fnmatch(name, constraint.module_filter) and hasattr(module, 'weight'):
+ self._batch_c_ptrs[c_idx].append(module)
+
+ # get epoch constraint pointers
+ self._epoch_c_ptrs = {}
+ for c_idx, constraint in enumerate(self.epoch_constraints):
+ self._epoch_c_ptrs[c_idx] = []
+ for name, module in model.named_modules():
+ if fnmatch(name, constraint.module_filter) and hasattr(module, 'weight'):
+ self._epoch_c_ptrs[c_idx].append(module)
+
+ def apply_batch_constraints(self, batch_idx):
+ for c_idx, modules in self._batch_c_ptrs.items():
+ if (batch_idx+1) % self.constraints[c_idx].frequency == 0:
+ for module in modules:
+ self.constraints[c_idx](module)
+
+ def apply_epoch_constraints(self, epoch_idx):
+ for c_idx, modules in self._epoch_c_ptrs.items():
+ if (epoch_idx+1) % self.constraints[c_idx].frequency == 0:
+ for module in modules:
+ self.constraints[c_idx](module)
+
+
+class ConstraintCallback(Callback):
+
+ def __init__(self, container):
+ self.container = container
+
+ def on_batch_end(self, batch_idx, logs):
+ self.container.apply_batch_constraints(batch_idx)
+
+ def on_epoch_end(self, epoch_idx, logs):
+ self.container.apply_epoch_constraints(epoch_idx)
+
+
+class Constraint(object):
+
+ def __call__(self):
+ raise NotImplementedError('Subclass much implement this method')
+
+
+class UnitNorm(Constraint):
+ """
+ UnitNorm constraint.
+
+ Constraints the weights to have column-wise unit norm
+ """
+ def __init__(self,
+ frequency=1,
+ unit='batch',
+ module_filter='*'):
+
+ self.frequency = frequency
+ self.unit = unit
+ self.module_filter = module_filter
+
+ def __call__(self, module):
+ w = module.weight.data
+ module.weight.data = w.div(th.norm(w,2,0))
+
+
+class MaxNorm(Constraint):
+ """
+ MaxNorm weight constraint.
+
+ Constrains the weights incident to each hidden unit
+ to have a norm less than or equal to a desired value.
+
+ Any hidden unit vector with a norm less than the max norm
+ constaint will not be altered.
+ """
+
+ def __init__(self,
+ value,
+ axis=0,
+ frequency=1,
+ unit='batch',
+ module_filter='*'):
+ self.value = float(value)
+ self.axis = axis
+
+ self.frequency = frequency
+ self.unit = unit
+ self.module_filter = module_filter
+
+ def __call__(self, module):
+ w = module.weight.data
+ module.weight.data = th.renorm(w, 2, self.axis, self.value)
+
+
+class NonNeg(Constraint):
+ """
+ Constrains the weights to be non-negative.
+ """
+ def __init__(self,
+ frequency=1,
+ unit='batch',
+ module_filter='*'):
+ self.frequency = frequency
+ self.unit = unit
+ self.module_filter = module_filter
+
+ def __call__(self, module):
+ w = module.weight.data
+ module.weight.data = w.gt(0).float().mul(w)
+
+
+
+
+
+
diff --git a/torchsample/torchsample/datasets.py b/torchsample/torchsample/datasets.py
new file mode 100755
index 0000000..411f9a5
--- /dev/null
+++ b/torchsample/torchsample/datasets.py
@@ -0,0 +1,639 @@
+
+from __future__ import absolute_import
+from __future__ import print_function
+from __future__ import division
+
+import os
+import fnmatch
+
+import numpy as np
+import pandas as pd
+import PIL.Image as Image
+import nibabel
+
+import torch as th
+
+from . import transforms
+
+
+class BaseDataset(object):
+ """An abstract class representing a Dataset.
+
+ All other datasets should subclass it. All subclasses should override
+ ``__len__``, that provides the size of the dataset, and ``__getitem__``,
+ supporting integer indexing in range from 0 to len(self) exclusive.
+ """
+
+ def __len__(self):
+ return len(self.inputs) if not isinstance(self.inputs, (tuple,list)) else len(self.inputs[0])
+
+ def add_input_transform(self, transform, add_to_front=True, idx=None):
+ if idx is None:
+ idx = np.arange(len(self.num_inputs))
+ elif not is_tuple_or_list(idx):
+ idx = [idx]
+
+ if add_to_front:
+ for i in idx:
+ self.input_transform[i] = transforms.Compose([transform, self.input_transform[i]])
+ else:
+ for i in idx:
+ self.input_transform[i] = transforms.Compose([self.input_transform[i], transform])
+
+ def add_target_transform(self, transform, add_to_front=True, idx=None):
+ if idx is None:
+ idx = np.arange(len(self.num_targets))
+ elif not is_tuple_or_list(idx):
+ idx = [idx]
+
+ if add_to_front:
+ for i in idx:
+ self.target_transform[i] = transforms.Compose([transform, self.target_transform[i]])
+ else:
+ for i in idx:
+ self.target_transform[i] = transforms.Compose([self.target_transform[i], transform])
+
+ def add_co_transform(self, transform, add_to_front=True, idx=None):
+ if idx is None:
+ idx = np.arange(len(self.min_inputs_or_targets))
+ elif not is_tuple_or_list(idx):
+ idx = [idx]
+
+ if add_to_front:
+ for i in idx:
+ self.co_transform[i] = transforms.Compose([transform, self.co_transform[i]])
+ else:
+ for i in idx:
+ self.co_transform[i] = transforms.Compose([self.co_transform[i], transform])
+
+ def load(self, num_samples=None, load_range=None):
+ """
+ Load all data or a subset of the data into actual memory.
+ For instance, if the inputs are paths to image files, then this
+ function will actually load those images.
+
+ Arguments
+ ---------
+ num_samples : integer (optional)
+ number of samples to load. if None, will load all
+ load_range : numpy array of integers (optional)
+ the index range of images to load
+ e.g. np.arange(4) loads the first 4 inputs+targets
+ """
+ def _parse_shape(x):
+ if isinstance(x, (list,tuple)):
+ return (len(x),)
+ elif isinstance(x, th.Tensor):
+ return x.size()
+ else:
+ return (1,)
+
+ if num_samples is None and load_range is None:
+ num_samples = len(self)
+ load_range = np.arange(num_samples)
+ elif num_samples is None and load_range is not None:
+ num_samples = len(load_range)
+ elif num_samples is not None and load_range is None:
+ load_range = np.arange(num_samples)
+
+
+ if self.has_target:
+ for enum_idx, sample_idx in enumerate(load_range):
+ input_sample, target_sample = self.__getitem__(sample_idx)
+
+ if enum_idx == 0:
+ if self.num_inputs == 1:
+ _shape = [len(load_range)] + list(_parse_shape(input_sample))
+ inputs = np.empty(_shape)
+ else:
+ inputs = []
+ for i in range(self.num_inputs):
+ _shape = [len(load_range)] + list(_parse_shape(input_sample[i]))
+ inputs.append(np.empty(_shape))
+ #inputs = [np.empty((len(load_range), *_parse_shape(input_sample[i]))) for i in range(self.num_inputs)]
+
+ if self.num_targets == 1:
+ _shape = [len(load_range)] + list(_parse_shape(target_sample))
+ targets = np.empty(_shape)
+ #targets = np.empty((len(load_range), *_parse_shape(target_sample)))
+ else:
+ targets = []
+ for i in range(self.num_targets):
+ _shape = [len(load_range)] + list(_parse_shape(target_sample[i]))
+ targets.append(np.empty(_shape))
+ #targets = [np.empty((len(load_range), *_parse_shape(target_sample[i]))) for i in range(self.num_targets)]
+
+ if self.num_inputs == 1:
+ inputs[enum_idx] = input_sample
+ else:
+ for i in range(self.num_inputs):
+ inputs[i][enum_idx] = input_sample[i]
+
+ if self.num_targets == 1:
+ targets[enum_idx] = target_sample
+ else:
+ for i in range(self.num_targets):
+ targets[i][enum_idx] = target_sample[i]
+
+ return inputs, targets
+ else:
+ for enum_idx, sample_idx in enumerate(load_range):
+ input_sample = self.__getitem__(sample_idx)
+
+ if enum_idx == 0:
+ if self.num_inputs == 1:
+ _shape = [len(load_range)] + list(_parse_shape(input_sample))
+ inputs = np.empty(_shape)
+ #inputs = np.empty((len(load_range), *_parse_shape(input_sample)))
+ else:
+ inputs = []
+ for i in range(self.num_inputs):
+ _shape = [len(load_range)] + list(_parse_shape(input_sample[i]))
+ inputs.append(np.empty(_shape))
+ #inputs = [np.empty((len(load_range), *_parse_shape(input_sample[i]))) for i in range(self.num_inputs)]
+
+ if self.num_inputs == 1:
+ inputs[enum_idx] = input_sample
+ else:
+ for i in range(self.num_inputs):
+ inputs[i][enum_idx] = input_sample[i]
+
+ return inputs
+
+ def fit_transforms(self):
+ """
+ Make a single pass through the entire dataset in order to fit
+ any parameters of the transforms which require the entire dataset.
+ e.g. StandardScaler() requires mean and std for the entire dataset.
+
+ If you dont call this fit function, then transforms which require properties
+ of the entire dataset will just work at the batch level.
+ e.g. StandardScaler() will normalize each batch by the specific batch mean/std
+ """
+ it_fit = hasattr(self.input_transform, 'update_fit')
+ tt_fit = hasattr(self.target_transform, 'update_fit')
+ ct_fit = hasattr(self.co_transform, 'update_fit')
+ if it_fit or tt_fit or ct_fit:
+ for sample_idx in range(len(self)):
+ if hasattr(self, 'input_loader'):
+ x = self.input_loader(self.inputs[sample_idx])
+ else:
+ x = self.inputs[sample_idx]
+ if it_fit:
+ self.input_transform.update_fit(x)
+ if self.has_target:
+ if hasattr(self, 'target_loader'):
+ y = self.target_loader(self.targets[sample_idx])
+ else:
+ y = self.targets[sample_idx]
+ if tt_fit:
+ self.target_transform.update_fit(y)
+ if ct_fit:
+ self.co_transform.update_fit(x,y)
+
+
+def _process_array_argument(x):
+ if not is_tuple_or_list(x):
+ x = [x]
+ return x
+
+
+class TensorDataset(BaseDataset):
+
+ def __init__(self,
+ inputs,
+ targets=None,
+ input_transform=None,
+ target_transform=None,
+ co_transform=None):
+ """
+ Dataset class for loading in-memory data.
+
+ Arguments
+ ---------
+ inputs: numpy array
+
+ targets : numpy array
+
+ input_transform : class with __call__ function implemented
+ transform to apply to input sample individually
+
+ target_transform : class with __call__ function implemented
+ transform to apply to target sample individually
+
+ co_transform : class with __call__ function implemented
+ transform to apply to both input and target sample simultaneously
+
+ """
+ self.inputs = _process_array_argument(inputs)
+ self.num_inputs = len(self.inputs)
+ self.input_return_processor = _return_first_element_of_list if self.num_inputs==1 else _pass_through
+
+ if targets is None:
+ self.has_target = False
+ else:
+ self.targets = _process_array_argument(targets)
+ self.num_targets = len(self.targets)
+ self.target_return_processor = _return_first_element_of_list if self.num_targets==1 else _pass_through
+ self.min_inputs_or_targets = min(self.num_inputs, self.num_targets)
+ self.has_target = True
+
+ self.input_transform = _process_transform_argument(input_transform, self.num_inputs)
+ if self.has_target:
+ self.target_transform = _process_transform_argument(target_transform, self.num_targets)
+ self.co_transform = _process_co_transform_argument(co_transform, self.num_inputs, self.num_targets)
+
+ def __getitem__(self, index):
+ """
+ Index the dataset and return the input + target
+ """
+ input_sample = [self.input_transform[i](self.inputs[i][index]) for i in range(self.num_inputs)]
+
+ if self.has_target:
+ target_sample = [self.target_transform[i](self.targets[i][index]) for i in range(self.num_targets)]
+ #for i in range(self.min_inputs_or_targets):
+ # input_sample[i], target_sample[i] = self.co_transform[i](input_sample[i], target_sample[i])
+
+ return self.input_return_processor(input_sample), self.target_return_processor(target_sample)
+ else:
+ return self.input_return_processor(input_sample)
+
+
+def default_file_reader(x):
+ def pil_loader(path):
+ return Image.open(path).convert('RGB')
+ def npy_loader(path):
+ return np.load(path)
+ def nifti_loader(path):
+ return nibabel.load(path).get_data()
+ if isinstance(x, str):
+ if x.endswith('.npy'):
+ x = npy_loader(x)
+ elif x.endsiwth('.nii.gz'):
+ x = nifti_loader(x)
+ else:
+ try:
+ x = pil_loader(x)
+ except:
+ raise ValueError('File Format is not supported')
+ #else:
+ #raise ValueError('x should be string, but got %s' % type(x))
+ return x
+
+def is_tuple_or_list(x):
+ return isinstance(x, (tuple,list))
+
+def _process_transform_argument(tform, num_inputs):
+ tform = tform if tform is not None else _pass_through
+ if is_tuple_or_list(tform):
+ if len(tform) != num_inputs:
+ raise Exception('If transform is list, must provide one transform for each input')
+ tform = [t if t is not None else _pass_through for t in tform]
+ else:
+ tform = [tform] * num_inputs
+ return tform
+
+def _process_co_transform_argument(tform, num_inputs, num_targets):
+ tform = tform if tform is not None else _multi_arg_pass_through
+ if is_tuple_or_list(tform):
+ if len(tform) != num_inputs:
+ raise Exception('If transform is list, must provide one transform for each input')
+ tform = [t if t is not None else _multi_arg_pass_through for t in tform]
+ else:
+ tform = [tform] * min(num_inputs, num_targets)
+ return tform
+
+def _process_csv_argument(csv):
+ if isinstance(csv, str):
+ df = pd.read_csv(csv)
+ elif isinstance(csv, pd.DataFrame):
+ df = csv
+ else:
+ raise ValueError('csv argument must be string or dataframe')
+ return df
+
+def _select_dataframe_columns(df, cols):
+ if isinstance(cols[0], str):
+ inputs = df.loc[:,cols].values
+ elif isinstance(cols[0], int):
+ inputs = df.iloc[:,cols].values
+ else:
+ raise ValueError('Provided columns should be string column names or integer column indices')
+ return inputs
+
+def _process_cols_argument(cols):
+ if isinstance(cols, tuple):
+ cols = list(cols)
+ return cols
+
+def _return_first_element_of_list(x):
+ return x[0]
+
+def _pass_through(x):
+ return x
+
+def _multi_arg_pass_through(*x):
+ return x
+
+
+class CSVDataset(BaseDataset):
+
+ def __init__(self,
+ csv,
+ input_cols=[0],
+ target_cols=[1],
+ input_transform=None,
+ target_transform=None,
+ co_transform=None):
+ """
+ Initialize a Dataset from a CSV file/dataframe. This does NOT
+ actually load the data into memory if the CSV contains filepaths.
+
+ Arguments
+ ---------
+ csv : string or pandas.DataFrame
+ if string, should be a path to a .csv file which
+ can be loaded as a pandas dataframe
+
+ input_cols : int/list of ints, or string/list of strings
+ which columns to use as input arrays.
+ If int(s), should be column indicies
+ If str(s), should be column names
+
+ target_cols : int/list of ints, or string/list of strings
+ which columns to use as input arrays.
+ If int(s), should be column indicies
+ If str(s), should be column names
+
+ input_transform : class which implements a __call__ method
+ tranform(s) to apply to inputs during runtime loading
+
+ target_tranform : class which implements a __call__ method
+ transform(s) to apply to targets during runtime loading
+
+ co_transform : class which implements a __call__ method
+ transform(s) to apply to both inputs and targets simultaneously
+ during runtime loading
+ """
+ self.input_cols = _process_cols_argument(input_cols)
+ self.target_cols = _process_cols_argument(target_cols)
+
+ self.df = _process_csv_argument(csv)
+
+ self.inputs = _select_dataframe_columns(self.df, input_cols)
+ self.num_inputs = self.inputs.shape[1]
+ self.input_return_processor = _return_first_element_of_list if self.num_inputs==1 else _pass_through
+
+ if target_cols is None:
+ self.num_targets = 0
+ self.has_target = False
+ else:
+ self.targets = _select_dataframe_columns(self.df, target_cols)
+ self.num_targets = self.targets.shape[1]
+ self.target_return_processor = _return_first_element_of_list if self.num_targets==1 else _pass_through
+ self.has_target = True
+ self.min_inputs_or_targets = min(self.num_inputs, self.num_targets)
+
+ self.input_loader = default_file_reader
+ self.target_loader = default_file_reader
+
+ self.input_transform = _process_transform_argument(input_transform, self.num_inputs)
+ if self.has_target:
+ self.target_transform = _process_transform_argument(target_transform, self.num_targets)
+ self.co_transform = _process_co_transform_argument(co_transform, self.num_inputs, self.num_targets)
+
+ def __getitem__(self, index):
+ """
+ Index the dataset and return the input + target
+ """
+ input_sample = [self.input_transform[i](self.input_loader(self.inputs[index, i])) for i in range(self.num_inputs)]
+
+ if self.has_target:
+ target_sample = [self.target_transform[i](self.target_loader(self.targets[index, i])) for i in range(self.num_targets)]
+ for i in range(self.min_inputs_or_targets):
+ input_sample[i], input_sample[i] = self.co_transform[i](input_sample[i], target_sample[i])
+
+ return self.input_return_processor(input_sample), self.target_return_processor(target_sample)
+ else:
+ return self.input_return_processor(input_sample)
+
+ def split_by_column(self, col):
+ """
+ Split this dataset object into multiple dataset objects based on
+ the unique factors of the given column. The number of returned
+ datasets will be equal to the number of unique values in the given
+ column. The transforms and original dataframe will all be transferred
+ to the new datasets
+
+ Useful for splitting a dataset into train/val/test datasets.
+
+ Arguments
+ ---------
+ col : integer or string
+ which column to split the data on.
+ if int, should be column index
+ if str, should be column name
+
+ Returns
+ -------
+ - list of new datasets with transforms copied
+ """
+ if isinstance(col, int):
+ split_vals = self.df.iloc[:,col].values.flatten()
+
+ new_df_list = []
+ for unique_split_val in np.unique(split_vals):
+ new_df = self.df[:][self.df.iloc[:,col]==unique_split_val]
+ new_df_list.append(new_df)
+ elif isinstance(col, str):
+ split_vals = self.df.loc[:,col].values.flatten()
+
+ new_df_list = []
+ for unique_split_val in np.unique(split_vals):
+ new_df = self.df[:][self.df.loc[:,col]==unique_split_val]
+ new_df_list.append(new_df)
+ else:
+ raise ValueError('col argument not valid - must be column name or index')
+
+ new_datasets = []
+ for new_df in new_df_list:
+ new_dataset = self.copy(new_df)
+ new_datasets.append(new_dataset)
+
+ return new_datasets
+
+ def train_test_split(self, train_size):
+ if train_size < 1:
+ train_size = int(train_size * len(self))
+
+ train_indices = np.random.choice(len(self), train_size, replace=False)
+ test_indices = np.array([i for i in range(len(self)) if i not in train_indices])
+
+ train_df = self.df.iloc[train_indices,:]
+ test_df = self.df.iloc[test_indices,:]
+
+ train_dataset = self.copy(train_df)
+ test_dataset = self.copy(test_df)
+
+ return train_dataset, test_dataset
+
+ def copy(self, df=None):
+ if df is None:
+ df = self.df
+
+ return CSVDataset(df,
+ input_cols=self.input_cols,
+ target_cols=self.target_cols,
+ input_transform=self.input_transform,
+ target_transform=self.target_transform,
+ co_transform=self.co_transform)
+
+
+class FolderDataset(BaseDataset):
+
+ def __init__(self,
+ root,
+ class_mode='label',
+ input_regex='*',
+ target_regex=None,
+ input_transform=None,
+ target_transform=None,
+ co_transform=None,
+ input_loader='npy'):
+ """
+ Dataset class for loading out-of-memory data.
+
+ Arguments
+ ---------
+ root : string
+ path to main directory
+
+ class_mode : string in `{'label', 'image'}`
+ type of target sample to look for and return
+ `label` = return class folder as target
+ `image` = return another image as target as found by 'target_regex'
+ NOTE: if class_mode == 'image', you must give an
+ input and target regex and the input/target images should
+ be in a folder together with no other images in that folder
+
+ input_regex : string (default is any valid image file)
+ regular expression to find input images
+ e.g. if all your inputs have the word 'input',
+ you'd enter something like input_regex='*input*'
+
+ target_regex : string (default is Nothing)
+ regular expression to find target images if class_mode == 'image'
+ e.g. if all your targets have the word 'segment',
+ you'd enter somthing like target_regex='*segment*'
+
+ transform : transform class
+ transform to apply to input sample individually
+
+ target_transform : transform class
+ transform to apply to target sample individually
+
+ input_loader : string in `{'npy', 'pil', 'nifti'} or callable
+ defines how to load samples from file
+ if a function is provided, it should take in a file path
+ as input and return the loaded sample.
+
+ """
+ self.input_loader = default_file_reader
+ self.target_loader = default_file_reader if class_mode == 'image' else lambda x: x
+
+ root = os.path.expanduser(root)
+
+ classes, class_to_idx = _find_classes(root)
+ inputs, targets = _finds_inputs_and_targets(root, class_mode,
+ class_to_idx, input_regex, target_regex)
+
+ if len(inputs) == 0:
+ raise(RuntimeError('Found 0 images in subfolders of: %s' % root))
+ else:
+ print('Found %i images' % len(inputs))
+
+ self.root = os.path.expanduser(root)
+ self.inputs = inputs
+ self.targets = targets
+ self.classes = classes
+ self.class_to_idx = class_to_idx
+
+ self.input_transform = input_transform if input_transform is not None else lambda x: x
+ if isinstance(input_transform, (tuple,list)):
+ self.input_transform = transforms.Compose(self.input_transform)
+ self.target_transform = target_transform if target_transform is not None else lambda x: x
+ if isinstance(target_transform, (tuple,list)):
+ self.target_transform = transforms.Compose(self.target_transform)
+ self.co_transform = co_transform if co_transform is not None else lambda x,y: (x,y)
+ if isinstance(co_transform, (tuple,list)):
+ self.co_transform = transforms.Compose(self.co_transform)
+
+ self.class_mode = class_mode
+
+ def get_full_paths(self):
+ return [os.path.join(self.root, i) for i in self.inputs]
+
+ def __getitem__(self, index):
+ input_sample = self.inputs[index]
+ input_sample = self.input_loader(input_sample)
+ input_sample = self.input_transform(input_sample)
+
+ target_sample = self.targets[index]
+ target_sample = self.target_loader(target_sample)
+ target_sample = self.target_transform(target_sample)
+
+ input_sample, target_sample = self.co_transform(input_sample, target_sample)
+
+ return input_sample, target_sample
+
+ def __len__(self):
+ return len(self.inputs)
+
+
+
+def _find_classes(dir):
+ classes = [d for d in os.listdir(dir) if os.path.isdir(os.path.join(dir, d))]
+ classes.sort()
+ class_to_idx = {classes[i]: i for i in range(len(classes))}
+ return classes, class_to_idx
+
+def _is_image_file(filename):
+ IMG_EXTENSIONS = [
+ '.jpg', '.JPG', '.jpeg', '.JPEG',
+ '.png', '.PNG', '.ppm', '.PPM', '.bmp', '.BMP',
+ '.nii.gz', '.npy'
+ ]
+ return any(filename.endswith(extension) for extension in IMG_EXTENSIONS)
+
+def _finds_inputs_and_targets(directory, class_mode, class_to_idx=None,
+ input_regex=None, target_regex=None, ):
+ """
+ Map a dataset from a root folder
+ """
+ if class_mode == 'image':
+ if not input_regex and not target_regex:
+ raise ValueError('must give input_regex and target_regex if'+
+ ' class_mode==image')
+ inputs = []
+ targets = []
+ for subdir in sorted(os.listdir(directory)):
+ d = os.path.join(directory, subdir)
+ if not os.path.isdir(d):
+ continue
+
+ for root, _, fnames in sorted(os.walk(d)):
+ for fname in fnames:
+ if _is_image_file(fname):
+ if fnmatch.fnmatch(fname, input_regex):
+ path = os.path.join(root, fname)
+ inputs.append(path)
+ if class_mode == 'label':
+ targets.append(class_to_idx[subdir])
+ if class_mode == 'image' and \
+ fnmatch.fnmatch(fname, target_regex):
+ path = os.path.join(root, fname)
+ targets.append(path)
+ if class_mode is None:
+ return inputs
+ else:
+ return inputs, targets
diff --git a/torchsample/torchsample/functions/__init__.py b/torchsample/torchsample/functions/__init__.py
new file mode 100755
index 0000000..a1b5da8
--- /dev/null
+++ b/torchsample/torchsample/functions/__init__.py
@@ -0,0 +1,2 @@
+
+from .affine import *
\ No newline at end of file
diff --git a/torchsample/torchsample/functions/affine.py b/torchsample/torchsample/functions/affine.py
new file mode 100755
index 0000000..dcc0a10
--- /dev/null
+++ b/torchsample/torchsample/functions/affine.py
@@ -0,0 +1,360 @@
+
+import torch
+import torch.nn.functional as F
+from torch.autograd import Variable
+
+from ..utils import th_iterproduct, th_flatten
+
+
+def F_affine2d(x, matrix, center=True):
+ """
+ 2D Affine image transform on torch.autograd.Variable
+ """
+ if matrix.dim() == 2:
+ matrix = matrix.view(-1,2,3)
+
+ A_batch = matrix[:,:,:2]
+ if A_batch.size(0) != x.size(0):
+ A_batch = A_batch.repeat(x.size(0),1,1)
+ b_batch = matrix[:,:,2].unsqueeze(1)
+
+ # make a meshgrid of normal coordinates
+ _coords = th_iterproduct(x.size(1),x.size(2))
+ coords = Variable(_coords.unsqueeze(0).repeat(x.size(0),1,1).float(),
+ requires_grad=False)
+ if center:
+ # shift the coordinates so center is the origin
+ coords[:,:,0] = coords[:,:,0] - (x.size(1) / 2. + 0.5)
+ coords[:,:,1] = coords[:,:,1] - (x.size(2) / 2. + 0.5)
+
+ # apply the coordinate transformation
+ new_coords = coords.bmm(A_batch.transpose(1,2)) + b_batch.expand_as(coords)
+
+ if center:
+ # shift the coordinates back so origin is origin
+ new_coords[:,:,0] = new_coords[:,:,0] + (x.size(1) / 2. + 0.5)
+ new_coords[:,:,1] = new_coords[:,:,1] + (x.size(2) / 2. + 0.5)
+
+ # map new coordinates using bilinear interpolation
+ x_transformed = F_bilinear_interp2d(x, new_coords)
+
+ return x_transformed
+
+
+def F_bilinear_interp2d(input, coords):
+ """
+ bilinear interpolation of 2d torch.autograd.Variable
+ """
+ x = torch.clamp(coords[:,:,0], 0, input.size(1)-2)
+ x0 = x.floor()
+ x1 = x0 + 1
+ y = torch.clamp(coords[:,:,1], 0, input.size(2)-2)
+ y0 = y.floor()
+ y1 = y0 + 1
+
+ stride = torch.LongTensor(input.stride())
+ x0_ix = x0.mul(stride[1]).long()
+ x1_ix = x1.mul(stride[1]).long()
+ y0_ix = y0.mul(stride[2]).long()
+ y1_ix = y1.mul(stride[2]).long()
+
+ input_flat = input.view(input.size(0),-1).contiguous()
+
+ vals_00 = input_flat.gather(1, x0_ix.add(y0_ix).detach())
+ vals_10 = input_flat.gather(1, x1_ix.add(y0_ix).detach())
+ vals_01 = input_flat.gather(1, x0_ix.add(y1_ix).detach())
+ vals_11 = input_flat.gather(1, x1_ix.add(y1_ix).detach())
+
+ xd = x - x0
+ yd = y - y0
+ xm = 1 - xd
+ ym = 1 - yd
+
+ x_mapped = (vals_00.mul(xm).mul(ym) +
+ vals_10.mul(xd).mul(ym) +
+ vals_01.mul(xm).mul(yd) +
+ vals_11.mul(xd).mul(yd))
+
+ return x_mapped.view_as(input)
+
+
+def F_batch_affine2d(x, matrix, center=True):
+ """
+
+ x : torch.Tensor
+ shape = (Samples, C, H, W)
+ NOTE: Assume C is always equal to 1!
+ matrix : torch.Tensor
+ shape = (Samples, 6) or (Samples, 2, 3)
+
+ Example
+ -------
+ >>> x = Variable(torch.zeros(3,1,10,10))
+ >>> x[:,:,3:7,3:7] = 1
+ >>> m1 = torch.FloatTensor([[1.2,0,0],[0,1.2,0]])
+ >>> m2 = torch.FloatTensor([[0.8,0,0],[0,0.8,0]])
+ >>> m3 = torch.FloatTensor([[1.0,0,3],[0,1.0,3]])
+ >>> matrix = Variable(torch.stack([m1,m2,m3]))
+ >>> xx = F_batch_affine2d(x,matrix)
+ """
+ if matrix.dim() == 2:
+ matrix = matrix.view(-1,2,3)
+
+ A_batch = matrix[:,:,:2]
+ b_batch = matrix[:,:,2].unsqueeze(1)
+
+ # make a meshgrid of normal coordinates
+ _coords = th_iterproduct(x.size(2),x.size(3))
+ coords = Variable(_coords.unsqueeze(0).repeat(x.size(0),1,1).float(),
+ requires_grad=False)
+
+ if center:
+ # shift the coordinates so center is the origin
+ coords[:,:,0] = coords[:,:,0] - (x.size(2) / 2. + 0.5)
+ coords[:,:,1] = coords[:,:,1] - (x.size(3) / 2. + 0.5)
+
+ # apply the coordinate transformation
+ new_coords = coords.bmm(A_batch.transpose(1,2)) + b_batch.expand_as(coords)
+
+ if center:
+ # shift the coordinates back so origin is origin
+ new_coords[:,:,0] = new_coords[:,:,0] + (x.size(2) / 2. + 0.5)
+ new_coords[:,:,1] = new_coords[:,:,1] + (x.size(3) / 2. + 0.5)
+
+ # map new coordinates using bilinear interpolation
+ x_transformed = F_batch_bilinear_interp2d(x, new_coords)
+
+ return x_transformed
+
+
+def F_batch_bilinear_interp2d(input, coords):
+ """
+ input : torch.Tensor
+ size = (N,H,W,C)
+ coords : torch.Tensor
+ size = (N,H*W*C,2)
+ """
+ x = torch.clamp(coords[:,:,0], 0, input.size(2)-2)
+ x0 = x.floor()
+ x1 = x0 + 1
+ y = torch.clamp(coords[:,:,1], 0, input.size(3)-2)
+ y0 = y.floor()
+ y1 = y0 + 1
+
+ stride = torch.LongTensor(input.stride())
+ x0_ix = x0.mul(stride[2]).long()
+ x1_ix = x1.mul(stride[2]).long()
+ y0_ix = y0.mul(stride[3]).long()
+ y1_ix = y1.mul(stride[3]).long()
+
+ input_flat = input.view(input.size(0),-1).contiguous()
+
+ vals_00 = input_flat.gather(1, x0_ix.add(y0_ix).detach())
+ vals_10 = input_flat.gather(1, x1_ix.add(y0_ix).detach())
+ vals_01 = input_flat.gather(1, x0_ix.add(y1_ix).detach())
+ vals_11 = input_flat.gather(1, x1_ix.add(y1_ix).detach())
+
+ xd = x - x0
+ yd = y - y0
+ xm = 1 - xd
+ ym = 1 - yd
+
+ x_mapped = (vals_00.mul(xm).mul(ym) +
+ vals_10.mul(xd).mul(ym) +
+ vals_01.mul(xm).mul(yd) +
+ vals_11.mul(xd).mul(yd))
+
+ return x_mapped.view_as(input)
+
+
+def F_affine3d(x, matrix, center=True):
+ A = matrix[:3,:3]
+ b = matrix[:3,3]
+
+ # make a meshgrid of normal coordinates
+ coords = Variable(th_iterproduct(x.size(1),x.size(2),x.size(3)).float(),
+ requires_grad=False)
+
+ if center:
+ # shift the coordinates so center is the origin
+ coords[:,0] = coords[:,0] - (x.size(1) / 2. + 0.5)
+ coords[:,1] = coords[:,1] - (x.size(2) / 2. + 0.5)
+ coords[:,2] = coords[:,2] - (x.size(3) / 2. + 0.5)
+
+
+ # apply the coordinate transformation
+ new_coords = F.linear(coords, A, b)
+
+ if center:
+ # shift the coordinates back so origin is origin
+ new_coords[:,0] = new_coords[:,0] + (x.size(1) / 2. + 0.5)
+ new_coords[:,1] = new_coords[:,1] + (x.size(2) / 2. + 0.5)
+ new_coords[:,2] = new_coords[:,2] + (x.size(3) / 2. + 0.5)
+
+ # map new coordinates using bilinear interpolation
+ x_transformed = F_trilinear_interp3d(x, new_coords)
+
+ return x_transformed
+
+
+def F_trilinear_interp3d(input, coords):
+ """
+ trilinear interpolation of 3D image
+ """
+ # take clamp then floor/ceil of x coords
+ x = torch.clamp(coords[:,0], 0, input.size(1)-2)
+ x0 = x.floor()
+ x1 = x0 + 1
+ # take clamp then floor/ceil of y coords
+ y = torch.clamp(coords[:,1], 0, input.size(2)-2)
+ y0 = y.floor()
+ y1 = y0 + 1
+ # take clamp then floor/ceil of z coords
+ z = torch.clamp(coords[:,2], 0, input.size(3)-2)
+ z0 = z.floor()
+ z1 = z0 + 1
+
+ stride = torch.LongTensor(input.stride())[1:]
+ x0_ix = x0.mul(stride[0]).long()
+ x1_ix = x1.mul(stride[0]).long()
+ y0_ix = y0.mul(stride[1]).long()
+ y1_ix = y1.mul(stride[1]).long()
+ z0_ix = z0.mul(stride[2]).long()
+ z1_ix = z1.mul(stride[2]).long()
+
+ input_flat = th_flatten(input)
+
+ vals_000 = input_flat[x0_ix.add(y0_ix).add(z0_ix).detach()]
+ vals_100 = input_flat[x1_ix.add(y0_ix).add(z0_ix).detach()]
+ vals_010 = input_flat[x0_ix.add(y1_ix).add(z0_ix).detach()]
+ vals_001 = input_flat[x0_ix.add(y0_ix).add(z1_ix).detach()]
+ vals_101 = input_flat[x1_ix.add(y0_ix).add(z1_ix).detach()]
+ vals_011 = input_flat[x0_ix.add(y1_ix).add(z1_ix).detach()]
+ vals_110 = input_flat[x1_ix.add(y1_ix).add(z0_ix).detach()]
+ vals_111 = input_flat[x1_ix.add(y1_ix).add(z1_ix).detach()]
+
+ xd = x - x0
+ yd = y - y0
+ zd = z - z0
+ xm = 1 - xd
+ ym = 1 - yd
+ zm = 1 - zd
+
+ x_mapped = (vals_000.mul(xm).mul(ym).mul(zm) +
+ vals_100.mul(xd).mul(ym).mul(zm) +
+ vals_010.mul(xm).mul(yd).mul(zm) +
+ vals_001.mul(xm).mul(ym).mul(zd) +
+ vals_101.mul(xd).mul(ym).mul(zd) +
+ vals_011.mul(xm).mul(yd).mul(zd) +
+ vals_110.mul(xd).mul(yd).mul(zm) +
+ vals_111.mul(xd).mul(yd).mul(zd))
+
+ return x_mapped.view_as(input)
+
+
+def F_batch_affine3d(x, matrix, center=True):
+ """
+
+ x : torch.Tensor
+ shape = (Samples, C, H, W)
+ NOTE: Assume C is always equal to 1!
+ matrix : torch.Tensor
+ shape = (Samples, 6) or (Samples, 2, 3)
+
+ Example
+ -------
+ >>> x = Variable(torch.zeros(3,1,10,10,10))
+ >>> x[:,:,3:7,3:7,3:7] = 1
+ >>> m1 = torch.FloatTensor([[1.2,0,0,0],[0,1.2,0,0],[0,0,1.2,0]])
+ >>> m2 = torch.FloatTensor([[0.8,0,0,0],[0,0.8,0,0],[0,0,0.8,0]])
+ >>> m3 = torch.FloatTensor([[1.0,0,0,3],[0,1.0,0,3],[0,0,1.0,3]])
+ >>> matrix = Variable(torch.stack([m1,m2,m3]))
+ >>> xx = F_batch_affine3d(x,matrix)
+ """
+ if matrix.dim() == 2:
+ matrix = matrix.view(-1,3,4)
+
+ A_batch = matrix[:,:3,:3]
+ b_batch = matrix[:,:3,3].unsqueeze(1)
+
+ # make a meshgrid of normal coordinates
+ _coords = th_iterproduct(x.size(2),x.size(3),x.size(4))
+ coords = Variable(_coords.unsqueeze(0).repeat(x.size(0),1,1).float(),
+ requires_grad=False)
+
+ if center:
+ # shift the coordinates so center is the origin
+ coords[:,:,0] = coords[:,:,0] - (x.size(2) / 2. + 0.5)
+ coords[:,:,1] = coords[:,:,1] - (x.size(3) / 2. + 0.5)
+ coords[:,:,2] = coords[:,:,2] - (x.size(4) / 2. + 0.5)
+
+ # apply the coordinate transformation
+ new_coords = coords.bmm(A_batch.transpose(1,2)) + b_batch.expand_as(coords)
+
+ if center:
+ # shift the coordinates back so origin is origin
+ new_coords[:,:,0] = new_coords[:,:,0] + (x.size(2) / 2. + 0.5)
+ new_coords[:,:,1] = new_coords[:,:,1] + (x.size(3) / 2. + 0.5)
+ new_coords[:,:,2] = new_coords[:,:,2] + (x.size(4) / 2. + 0.5)
+
+ # map new coordinates using bilinear interpolation
+ x_transformed = F_batch_trilinear_interp3d(x, new_coords)
+
+ return x_transformed
+
+
+def F_batch_trilinear_interp3d(input, coords):
+ """
+ input : torch.Tensor
+ size = (N,H,W,C)
+ coords : torch.Tensor
+ size = (N,H*W*C,2)
+ """
+ x = torch.clamp(coords[:,:,0], 0, input.size(2)-2)
+ x0 = x.floor()
+ x1 = x0 + 1
+ y = torch.clamp(coords[:,:,1], 0, input.size(3)-2)
+ y0 = y.floor()
+ y1 = y0 + 1
+ z = torch.clamp(coords[:,:,2], 0, input.size(4)-2)
+ z0 = z.floor()
+ z1 = z0 + 1
+
+ stride = torch.LongTensor(input.stride())
+ x0_ix = x0.mul(stride[2]).long()
+ x1_ix = x1.mul(stride[2]).long()
+ y0_ix = y0.mul(stride[3]).long()
+ y1_ix = y1.mul(stride[3]).long()
+ z0_ix = z0.mul(stride[4]).long()
+ z1_ix = z1.mul(stride[4]).long()
+
+ input_flat = input.contiguous().view(input.size(0),-1)
+
+ vals_000 = input_flat.gather(1,x0_ix.add(y0_ix).add(z0_ix).detach())
+ vals_100 = input_flat.gather(1,x1_ix.add(y0_ix).add(z0_ix).detach())
+ vals_010 = input_flat.gather(1,x0_ix.add(y1_ix).add(z0_ix).detach())
+ vals_001 = input_flat.gather(1,x0_ix.add(y0_ix).add(z1_ix).detach())
+ vals_101 = input_flat.gather(1,x1_ix.add(y0_ix).add(z1_ix).detach())
+ vals_011 = input_flat.gather(1,x0_ix.add(y1_ix).add(z1_ix).detach())
+ vals_110 = input_flat.gather(1,x1_ix.add(y1_ix).add(z0_ix).detach())
+ vals_111 = input_flat.gather(1,x1_ix.add(y1_ix).add(z1_ix).detach())
+
+ xd = x - x0
+ yd = y - y0
+ zd = z - z0
+ xm = 1 - xd
+ ym = 1 - yd
+ zm = 1 - zd
+
+ x_mapped = (vals_000.mul(xm).mul(ym).mul(zm) +
+ vals_100.mul(xd).mul(ym).mul(zm) +
+ vals_010.mul(xm).mul(yd).mul(zm) +
+ vals_001.mul(xm).mul(ym).mul(zd) +
+ vals_101.mul(xd).mul(ym).mul(zd) +
+ vals_011.mul(xm).mul(yd).mul(zd) +
+ vals_110.mul(xd).mul(yd).mul(zm) +
+ vals_111.mul(xd).mul(yd).mul(zd))
+
+ return x_mapped.view_as(input)
+
+
diff --git a/torchsample/torchsample/initializers.py b/torchsample/torchsample/initializers.py
new file mode 100755
index 0000000..e066569
--- /dev/null
+++ b/torchsample/torchsample/initializers.py
@@ -0,0 +1,262 @@
+"""
+Classes to initialize module weights
+"""
+
+from fnmatch import fnmatch
+
+import torch.nn.init
+
+
+def _validate_initializer_string(init):
+ dir_f = dir(torch.nn.init)
+ loss_fns = [d.lower() for d in dir_f]
+ if isinstance(init, str):
+ try:
+ str_idx = loss_fns.index(init.lower())
+ except:
+ raise ValueError('Invalid loss string input - must match pytorch function.')
+ return getattr(torch.nn.init, dir(torch.nn.init)[str_idx])
+ elif callable(init):
+ return init
+ else:
+ raise ValueError('Invalid loss input')
+
+
+class InitializerContainer(object):
+
+ def __init__(self, initializers):
+ self._initializers = initializers
+
+ def apply(self, model):
+ for initializer in self._initializers:
+ model.apply(initializer)
+
+
+class Initializer(object):
+
+ def __call__(self, module):
+ raise NotImplementedError('Initializer must implement this method')
+
+
+class GeneralInitializer(Initializer):
+
+ def __init__(self, initializer, bias=False, bias_only=False, **kwargs):
+ self._initializer = _validate_initializer_string(initializer)
+ self.kwargs = kwargs
+
+ def __call__(self, module):
+ classname = module.__class__.__name__
+ if fnmatch(classname, self.module_filter) and hasattr(module, 'weight'):
+ if self.bias_only:
+ self._initializer(module.bias.data, **self.kwargs)
+ else:
+ self._initializer(module.weight.data, **self.kwargs)
+ if self.bias:
+ self._initializer(module.bias.data, **self.kwargs)
+
+
+class Normal(Initializer):
+
+ def __init__(self, mean=0.0, std=0.02, bias=False,
+ bias_only=False, module_filter='*'):
+ self.mean = mean
+ self.std = std
+
+ self.bias = bias
+ self.bias_only = bias_only
+ self.module_filter = module_filter
+
+ super(Normal, self).__init__()
+
+ def __call__(self, module):
+ classname = module.__class__.__name__
+ if fnmatch(classname, self.module_filter) and hasattr(module, 'weight'):
+ if self.bias_only:
+ torch.nn.init.normal(module.bias.data, mean=self.mean, std=self.std)
+ else:
+ torch.nn.init.normal(module.weight.data, mean=self.mean, std=self.std)
+ if self.bias:
+ torch.nn.init.normal(module.bias.data, mean=self.mean, std=self.std)
+
+
+class Uniform(Initializer):
+
+ def __init__(self, a=0, b=1, bias=False, bias_only=False, module_filter='*'):
+ self.a = a
+ self.b = b
+
+ self.bias = bias
+ self.bias_only = bias_only
+ self.module_filter = module_filter
+
+ super(Uniform, self).__init__()
+
+ def __call__(self, module):
+ classname = module.__class__.__name__
+ if fnmatch(classname, self.module_filter) and hasattr(module, 'weight'):
+ if self.bias_only:
+ torch.nn.init.uniform(module.bias.data, a=self.a, b=self.b)
+ else:
+ torch.nn.init.uniform(module.weight.data, a=self.a, b=self.b)
+ if self.bias:
+ torch.nn.init.uniform(module.bias.data, a=self.a, b=self.b)
+
+
+class ConstantInitializer(Initializer):
+
+ def __init__(self, value, bias=False, bias_only=False, module_filter='*'):
+ self.value = value
+
+ self.bias = bias
+ self.bias_only = bias_only
+ self.module_filter = module_filter
+
+ super(ConstantInitializer, self).__init__()
+
+ def __call__(self, module, bias=False, bias_only=False, module_filter='*'):
+ classname = module.__class__.__name__
+ if fnmatch(classname, self.module_filter) and hasattr(module, 'weight'):
+ if self.bias_only:
+ torch.nn.init.constant(module.bias.data, val=self.value)
+ else:
+ torch.nn.init.constant(module.weight.data, val=self.value)
+ if self.bias:
+ torch.nn.init.constant(module.bias.data, val=self.value)
+
+
+class XavierUniform(Initializer):
+
+ def __init__(self, gain=1, bias=False, bias_only=False, module_filter='*'):
+ self.gain = gain
+
+ self.bias = bias
+ self.bias_only = bias_only
+ self.module_filter = module_filter
+
+ super(XavierUniform, self).__init__()
+
+ def __call__(self, module):
+ classname = module.__class__.__name__
+ if fnmatch(classname, self.module_filter) and hasattr(module, 'weight'):
+ if self.bias_only:
+ torch.nn.init.xavier_uniform(module.bias.data, gain=self.gain)
+ else:
+ torch.nn.init.xavier_uniform(module.weight.data, gain=self.gain)
+ if self.bias:
+ torch.nn.init.xavier_uniform(module.bias.data, gain=self.gain)
+
+
+class XavierNormal(Initializer):
+
+ def __init__(self, gain=1, bias=False, bias_only=False, module_filter='*'):
+ self.gain = gain
+
+ self.bias = bias
+ self.bias_only = bias_only
+ self.module_filter = module_filter
+
+ super(XavierNormal, self).__init__()
+
+ def __call__(self, module):
+ classname = module.__class__.__name__
+ if fnmatch(classname, self.module_filter) and hasattr(module, 'weight'):
+ if self.bias_only:
+ torch.nn.init.xavier_normal(module.bias.data, gain=self.gain)
+ else:
+ torch.nn.init.xavier_normal(module.weight.data, gain=self.gain)
+ if self.bias:
+ torch.nn.init.xavier_normal(module.bias.data, gain=self.gain)
+
+
+class KaimingUniform(Initializer):
+
+ def __init__(self, a=0, mode='fan_in', bias=False, bias_only=False, module_filter='*'):
+ self.a = a
+ self.mode = mode
+
+ self.bias = bias
+ self.bias_only = bias_only
+ self.module_filter = module_filter
+
+ super(KaimingUniform, self).__init__()
+
+ def __call__(self, module):
+ classname = module.__class__.__name__
+ if fnmatch(classname, self.module_filter) and hasattr(module, 'weight'):
+ if self.bias_only:
+ torch.nn.init.kaiming_uniform(module.bias.data, a=self.a, mode=self.mode)
+ else:
+ torch.nn.init.kaiming_uniform(module.weight.data, a=self.a, mode=self.mode)
+ if self.bias:
+ torch.nn.init.kaiming_uniform(module.bias.data, a=self.a, mode=self.mode)
+
+
+class KaimingNormal(Initializer):
+
+ def __init__(self, a=0, mode='fan_in', bias=False, bias_only=False, module_filter='*'):
+ self.a = a
+ self.mode = mode
+
+ self.bias = bias
+ self.bias_only = bias_only
+ self.module_filter = module_filter
+
+ super(KaimingNormal, self).__init__()
+
+ def __call__(self, module):
+ classname = module.__class__.__name__
+ if fnmatch(classname, self.module_filter) and hasattr(module, 'weight'):
+ if self.bias_only:
+ torch.nn.init.kaiming_normal(module.bias.data, a=self.a, mode=self.mode)
+ else:
+ torch.nn.init.kaiming_normal(module.weight.data, a=self.a, mode=self.mode)
+ if self.bias:
+ torch.nn.init.kaiming_normal(module.bias.data, a=self.a, mode=self.mode)
+
+
+class Orthogonal(Initializer):
+
+ def __init__(self, gain=1, bias=False, bias_only=False, module_filter='*'):
+ self.gain = gain
+
+ self.bias = bias
+ self.bias_only = bias_only
+ self.module_filter = module_filter
+
+ super(Orthogonal, self).__init__()
+
+ def __call__(self, module):
+ classname = module.__class__.__name__
+ if fnmatch(classname, self.module_filter) and hasattr(module, 'weight'):
+ if self.bias_only:
+ torch.nn.init.orthogonal(module.bias.data, gain=self.gain)
+ else:
+ torch.nn.init.orthogonal(module.weight.data, gain=self.gain)
+ if self.bias:
+ torch.nn.init.orthogonal(module.bias.data, gain=self.gain)
+
+
+class Sparse(Initializer):
+
+ def __init__(self, sparsity, std=0.01, bias=False, bias_only=False, module_filter='*'):
+ self.sparsity = sparsity
+ self.std = std
+
+ self.bias = bias
+ self.bias_only = bias_only
+ self.module_filter = module_filter
+
+ super(Sparse, self).__init__()
+
+ def __call__(self, module):
+ classname = module.__class__.__name__
+ if fnmatch(classname, self.module_filter) and hasattr(module, 'weight'):
+ if self.bias_only:
+ torch.nn.init.sparse(module.bias.data, sparsity=self.sparsity, std=self.std)
+ else:
+ torch.nn.init.sparse(module.weight.data, sparsity=self.sparsity, std=self.std)
+ if self.bias:
+ torch.nn.init.sparse(module.bias.data, sparsity=self.sparsity, std=self.std)
+
+
+
diff --git a/torchsample/torchsample/metrics.py b/torchsample/torchsample/metrics.py
new file mode 100755
index 0000000..376b24a
--- /dev/null
+++ b/torchsample/torchsample/metrics.py
@@ -0,0 +1,139 @@
+
+from __future__ import absolute_import
+from __future__ import print_function
+
+import torch as th
+
+from .utils import th_matrixcorr
+
+from .callbacks import Callback
+
+class MetricContainer(object):
+
+
+ def __init__(self, metrics, prefix=''):
+ self.metrics = metrics
+ self.helper = None
+ self.prefix = prefix
+
+ def set_helper(self, helper):
+ self.helper = helper
+
+ def reset(self):
+ for metric in self.metrics:
+ metric.reset()
+
+ def __call__(self, output_batch, target_batch):
+ logs = {}
+ for metric in self.metrics:
+ logs[self.prefix+metric._name] = self.helper.calculate_loss(output_batch,
+ target_batch,
+ metric)
+ return logs
+
+class Metric(object):
+
+ def __call__(self, y_pred, y_true):
+ raise NotImplementedError('Custom Metrics must implement this function')
+
+ def reset(self):
+ raise NotImplementedError('Custom Metrics must implement this function')
+
+
+class MetricCallback(Callback):
+
+ def __init__(self, container):
+ self.container = container
+ def on_epoch_begin(self, epoch_idx, logs):
+ self.container.reset()
+
+class CategoricalAccuracy(Metric):
+
+ def __init__(self, top_k=1):
+ self.top_k = top_k
+ self.correct_count = 0
+ self.total_count = 0
+
+ self._name = 'acc_metric'
+
+ def reset(self):
+ self.correct_count = 0
+ self.total_count = 0
+
+ def __call__(self, y_pred, y_true):
+ top_k = y_pred.topk(self.top_k,1)[1]
+ true_k = y_true.view(len(y_true),1).expand_as(top_k)
+ self.correct_count += top_k.eq(true_k).float().sum().data[0]
+ self.total_count += len(y_pred)
+ accuracy = 100. * float(self.correct_count) / float(self.total_count)
+ return accuracy
+
+
+class BinaryAccuracy(Metric):
+
+ def __init__(self):
+ self.correct_count = 0
+ self.total_count = 0
+
+ self._name = 'acc_metric'
+
+ def reset(self):
+ self.correct_count = 0
+ self.total_count = 0
+
+ def __call__(self, y_pred, y_true):
+ y_pred_round = y_pred.round().long()
+ self.correct_count += y_pred_round.eq(y_true).float().sum().data[0]
+ self.total_count += len(y_pred)
+ accuracy = 100. * float(self.correct_count) / float(self.total_count)
+ return accuracy
+
+
+class ProjectionCorrelation(Metric):
+
+ def __init__(self):
+ self.corr_sum = 0.
+ self.total_count = 0.
+
+ self._name = 'corr_metric'
+
+ def reset(self):
+ self.corr_sum = 0.
+ self.total_count = 0.
+
+ def __call__(self, y_pred, y_true=None):
+ """
+ y_pred should be two projections
+ """
+ covar_mat = th.abs(th_matrixcorr(y_pred[0].data, y_pred[1].data))
+ self.corr_sum += th.trace(covar_mat)
+ self.total_count += covar_mat.size(0)
+ return self.corr_sum / self.total_count
+
+
+class ProjectionAntiCorrelation(Metric):
+
+ def __init__(self):
+ self.anticorr_sum = 0.
+ self.total_count = 0.
+
+ self._name = 'anticorr_metric'
+
+ def reset(self):
+ self.anticorr_sum = 0.
+ self.total_count = 0.
+
+ def __call__(self, y_pred, y_true=None):
+ """
+ y_pred should be two projections
+ """
+ covar_mat = th.abs(th_matrixcorr(y_pred[0].data, y_pred[1].data))
+ upper_sum = th.sum(th.triu(covar_mat,1))
+ lower_sum = th.sum(th.tril(covar_mat,-1))
+ self.anticorr_sum += upper_sum
+ self.anticorr_sum += lower_sum
+ self.total_count += covar_mat.size(0)*(covar_mat.size(1) - 1)
+ return self.anticorr_sum / self.total_count
+
+
+
diff --git a/torchsample/torchsample/modules/__init__.py b/torchsample/torchsample/modules/__init__.py
new file mode 100755
index 0000000..368b07b
--- /dev/null
+++ b/torchsample/torchsample/modules/__init__.py
@@ -0,0 +1,3 @@
+from __future__ import absolute_import
+
+from .module_trainer import ModuleTrainer
diff --git a/torchsample/torchsample/modules/_utils.py b/torchsample/torchsample/modules/_utils.py
new file mode 100755
index 0000000..3821672
--- /dev/null
+++ b/torchsample/torchsample/modules/_utils.py
@@ -0,0 +1,123 @@
+
+import datetime
+import warnings
+
+try:
+ from inspect import signature
+except:
+ warnings.warn('inspect.signature not available... '
+ 'you should upgrade to Python 3.x')
+
+import torch.nn.functional as F
+import torch.optim as optim
+
+from ..metrics import Metric, CategoricalAccuracy, BinaryAccuracy
+from ..initializers import GeneralInitializer
+
+def _add_regularizer_to_loss_fn(loss_fn,
+ regularizer_container):
+ def new_loss_fn(output_batch, target_batch):
+ return loss_fn(output_batch, target_batch) + regularizer_container.get_value()
+ return new_loss_fn
+
+def _is_iterable(x):
+ return isinstance(x, (tuple, list))
+def _is_tuple_or_list(x):
+ return isinstance(x, (tuple, list))
+
+def _parse_num_inputs_and_targets_from_loader(loader):
+ """ NOT IMPLEMENTED """
+ #batch = next(iter(loader))
+ num_inputs = loader.dataset.num_inputs
+ num_targets = loader.dataset.num_targets
+ return num_inputs, num_targets
+
+def _parse_num_inputs_and_targets(inputs, targets=None):
+ if isinstance(inputs, (list, tuple)):
+ num_inputs = len(inputs)
+ else:
+ num_inputs = 1
+ if targets is not None:
+ if isinstance(targets, (list, tuple)):
+ num_targets = len(targets)
+ else:
+ num_targets = 1
+ else:
+ num_targets = 0
+ return num_inputs, num_targets
+
+def _standardize_user_data(inputs, targets=None):
+ if not isinstance(inputs, (list,tuple)):
+ inputs = [inputs]
+ if targets is not None:
+ if not isinstance(targets, (list,tuple)):
+ targets = [targets]
+ return inputs, targets
+ else:
+ return inputs
+
+def _validate_metric_input(metric):
+ if isinstance(metric, str):
+ if metric.upper() == 'CATEGORICAL_ACCURACY' or metric.upper() == 'ACCURACY':
+ return CategoricalAccuracy()
+ elif metric.upper() == 'BINARY_ACCURACY':
+ return BinaryAccuracy()
+ else:
+ raise ValueError('Invalid metric string input - must match pytorch function.')
+ elif isinstance(metric, Metric):
+ return metric
+ else:
+ raise ValueError('Invalid metric input')
+
+def _validate_loss_input(loss):
+ dir_f = dir(F)
+ loss_fns = [d.lower() for d in dir_f]
+ if isinstance(loss, str):
+ if loss.lower() == 'unconstrained':
+ return lambda x: x
+ elif loss.lower() == 'unconstrained_sum':
+ return lambda x: x.sum()
+ elif loss.lower() == 'unconstrained_mean':
+ return lambda x: x.mean()
+ else:
+ try:
+ str_idx = loss_fns.index(loss.lower())
+ except:
+ raise ValueError('Invalid loss string input - must match pytorch function.')
+ return getattr(F, dir(F)[str_idx])
+ elif callable(loss):
+ return loss
+ else:
+ raise ValueError('Invalid loss input')
+
+def _validate_optimizer_input(optimizer):
+ dir_optim = dir(optim)
+ opts = [o.lower() for o in dir_optim]
+ if isinstance(optimizer, str):
+ try:
+ str_idx = opts.index(optimizer.lower())
+ except:
+ raise ValueError('Invalid optimizer string input - must match pytorch function.')
+ return getattr(optim, dir_optim[str_idx])
+ elif hasattr(optimizer, 'step') and hasattr(optimizer, 'zero_grad'):
+ return optimizer
+ else:
+ raise ValueError('Invalid optimizer input')
+
+def _validate_initializer_input(initializer):
+ if isinstance(initializer, str):
+ try:
+ initializer = GeneralInitializer(initializer)
+ except:
+ raise ValueError('Invalid initializer string input - must match pytorch function.')
+ return initializer
+ elif callable(initializer):
+ return initializer
+ else:
+ raise ValueError('Invalid optimizer input')
+
+def _get_current_time():
+ return datetime.datetime.now().strftime("%B %d, %Y - %I:%M%p")
+
+def _nb_function_args(fn):
+ return len(signature(fn).parameters)
\ No newline at end of file
diff --git a/torchsample/torchsample/modules/module_trainer.py b/torchsample/torchsample/modules/module_trainer.py
new file mode 100755
index 0000000..a6aa91b
--- /dev/null
+++ b/torchsample/torchsample/modules/module_trainer.py
@@ -0,0 +1,824 @@
+"""
+ModuleTrainer for high level training on Pytorch models
+"""
+from __future__ import print_function
+from __future__ import absolute_import
+
+import warnings
+import functools
+import math
+from collections import OrderedDict
+
+import torch as th
+import torch.nn as nn
+from torch.autograd import Variable
+
+# local imports
+from ._utils import (_validate_loss_input, _validate_metric_input,
+ _validate_optimizer_input, _validate_initializer_input,
+ _standardize_user_data, _parse_num_inputs_and_targets,
+ _is_tuple_or_list, _parse_num_inputs_and_targets_from_loader,
+ _add_regularizer_to_loss_fn)
+
+from ..callbacks import CallbackContainer, History, TQDM
+from ..regularizers import RegularizerContainer, RegularizerCallback
+from ..initializers import InitializerContainer
+from ..constraints import ConstraintContainer, ConstraintCallback
+from ..metrics import MetricContainer, MetricCallback
+
+from tqdm import tqdm
+
+
+class ModuleTrainer(object):
+
+ def __init__(self, model):
+ """
+ ModelTrainer for high-level training of Pytorch models
+
+ Major Parts
+ -----------
+ - optimizer(s)
+ - loss(es)
+ - regularizers
+ - initializers
+ - constraints
+ - metrics
+ - callbacks
+ """
+ if not isinstance(model, nn.Module):
+ raise ValueError('model argument must inherit from torch.nn.Module')
+ self.model = model
+
+ # callbacks
+ self._callbacks = []
+
+ # regularizers
+ self._regularizers = []
+ self._has_regularizers = False
+
+ # initializers
+ self._initializers = []
+
+ # constraints
+ self._constraints = []
+ self._has_constraints = False
+
+ # metrics
+ self._metrics = []
+ self._has_metrics = False
+
+ # transforms
+ self._transforms = []
+ self._has_transforms = False
+
+ # losses
+ self._loss = None
+ self._loss_fn = None
+
+ # other properties
+ self._stop_training = False
+
+ def set_loss(self, loss):
+ self._loss = loss
+ if _is_tuple_or_list(loss):
+ self._loss_fn = [_validate_loss_input(l) for l in loss]
+ else:
+ self._loss_fn = _validate_loss_input(loss)
+
+ def set_optimizer(self, optimizer, **kwargs):
+ if type(optimizer) is type or isinstance(optimizer, str):
+ if 'parameters' in kwargs:
+ parameters = kwargs['parameters']
+ else:
+ parameters = self.model.parameters()
+
+ optimizer = _validate_optimizer_input(optimizer)
+ self._optimizer = optimizer(parameters, **kwargs)
+ else:
+ self._optimizer = optimizer
+
+ def set_callbacks(self, callbacks):
+ if not _is_tuple_or_list(callbacks):
+ callbacks = [callbacks]
+ self._callbacks = [self.history] + callbacks
+
+ def set_regularizers(self, regularizers):
+ regularizers = [regularizers] if not _is_tuple_or_list(regularizers) else regularizers
+ self._regularizers = regularizers
+ self._has_regularizers = True
+
+ def set_initializers(self, initializers):
+ initializers = [initializers] if not _is_tuple_or_list(initializers) else initializers
+ initializers = [_validate_initializer_input(it) for it in initializers]
+ self._initializers = initializers
+
+ def set_constraints(self, constraints):
+ constraints = [constraints] if not _is_tuple_or_list(constraints) else constraints
+ self._has_constraints = True
+ self._constraints = constraints
+
+ def set_metrics(self, metrics):
+ metrics = [metrics] if not _is_tuple_or_list(metrics) else metrics
+ metrics = [_validate_metric_input(m) for m in metrics]
+ self._has_metrics = True
+ self._metrics = metrics
+
+ def set_transforms(self, transforms):
+ if not _is_tuple_or_list(transforms):
+ transforms = (transforms, lambda x: x, lambda x,y: (x,y))
+ if len(transforms) == 1:
+ transforms = (transforms, lambda x: x, lambda x,y: (x,y))
+ elif len(transforms) == 2:
+ transforms = (transforms, transforms, lambda x,y: (x,y))
+
+ self._has_input_transform = transforms[0] is not None
+ self._has_target_transform = transforms[1] is not None
+ self._has_co_transform = transforms[2] is not None
+
+ self._has_transforms = True
+ self._transforms = transforms
+
+ def compile(self,
+ optimizer,
+ loss,
+ callbacks=None,
+ regularizers=None,
+ initializers=None,
+ constraints=None,
+ metrics=None,
+ transforms=None):
+ self.set_optimizer(optimizer)
+ self.set_loss(loss)
+
+ if regularizers is not None:
+ self.set_regularizers(regularizers)
+ self.regularizer_container = RegularizerContainer(self._regularizers)
+ self.regularizer_container.register_forward_hooks(self.model)
+ else:
+ self._has_regularizers = False
+
+ self.history = History(self)
+ self._callbacks = [self.history]
+ if callbacks is not None:
+ self.set_callbacks(callbacks)
+
+
+ if initializers is not None:
+ self.set_initializers(initializers)
+ self.initializer_container = InitializerContainer(self._initializers)
+ # actually initialize the model
+ self.initializer_container.apply(self.model)
+
+ if constraints is not None:
+ self.set_constraints(constraints)
+ self.constraint_container = ConstraintContainer(self._constraints)
+ self.constraint_container.register_constraints(self.model)
+ else:
+ self._has_constraints = False
+
+ if metrics is not None:
+ self.set_metrics(metrics)
+ self.metric_container = MetricContainer(self._metrics)
+ else:
+ self._has_metrics = False
+
+ if transforms is not None:
+ self.set_transforms(transforms)
+ else:
+ self._has_transforms = False
+
+ def fit(self,
+ inputs,
+ targets=None,
+ val_data=None,
+ initial_epoch=0,
+ num_epoch=100,
+ batch_size=32,
+ shuffle=False,
+ cuda_device=-1,
+ verbose=1):
+ """
+ Fit a model on in-memory tensors using ModuleTrainer
+ """
+ self.model.train(True)
+ # ----------------------------------------------------------------------
+ num_inputs, num_targets = _parse_num_inputs_and_targets(inputs, targets)
+ len_inputs = len(inputs) if not _is_tuple_or_list(inputs) else len(inputs[0])
+
+ if val_data is not None:
+ if num_targets == 0:
+ val_data = (val_data, None)
+ if len(val_data) != 2:
+ raise Exception('val_data must be a 2-tuple')
+ num_val_inputs, num_val_targets = _parse_num_inputs_and_targets(val_data[0], val_data[1])
+ if (num_inputs != num_val_inputs) or (num_targets != num_val_targets):
+ raise Exception('The number of input/target tensors must be the same for training and validation data\n'
+ 'Num Input tensors: (%i train, %i val), Num Target tensors: (%i train, %i val)' % (num_inputs, num_val_inputs, num_targets, num_val_targets) )
+ val_inputs, val_targets = val_data
+ has_val_data = val_data is not None
+ num_batches = int(math.ceil(len_inputs / batch_size))
+ # ----------------------------------------------------------------------
+
+ fit_helper = _get_helper(self, num_inputs, num_targets)
+ fit_loss_fn = fit_helper.get_partial_loss_fn(self._loss_fn)
+ fit_forward_fn = fit_helper.get_partial_forward_fn(self.model)
+
+ with TQDM() as pbar:
+ tmp_callbacks = []
+ if verbose > 0:
+ tmp_callbacks.append(pbar)
+ if self._has_regularizers:
+ tmp_callbacks.append(RegularizerCallback(self.regularizer_container))
+ fit_loss_fn = _add_regularizer_to_loss_fn(fit_loss_fn,
+ self.regularizer_container)
+ if self._has_constraints:
+ tmp_callbacks.append(ConstraintCallback(self.constraint_container))
+ if self._has_metrics:
+ self.metric_container.set_helper(fit_helper)
+ tmp_callbacks.append(MetricCallback(self.metric_container))
+
+ callback_container = CallbackContainer(self._callbacks+tmp_callbacks)
+ callback_container.set_trainer(self)
+ callback_container.on_train_begin({'batch_size': batch_size,
+ 'num_batches': num_batches,
+ 'num_epoch': num_epoch,
+ 'has_val_data': has_val_data,
+ 'has_regularizers': self._has_regularizers,
+ 'has_metrics': self._has_metrics})
+
+ for epoch_idx in range(initial_epoch,num_epoch):
+ epoch_logs = {}
+ callback_container.on_epoch_begin(epoch_idx, epoch_logs)
+
+ if shuffle:
+ inputs, targets = fit_helper.shuffle_arrays(inputs, targets)
+
+ batch_logs = {}
+ for batch_idx in range(num_batches):
+ callback_container.on_batch_begin(batch_idx, batch_logs)
+
+ input_batch, target_batch = fit_helper.grab_batch(batch_idx, batch_size, inputs, targets)
+ if cuda_device >= 0:
+ input_batch, target_batch = fit_helper.move_to_cuda(cuda_device, input_batch, target_batch)
+ if self._has_transforms:
+ input_batch, target_batch = fit_helper.apply_transforms(self._transforms, input_batch, target_batch)
+
+ # ---------------------------------------------
+ self._optimizer.zero_grad()
+ output_batch = fit_forward_fn(input_batch)
+ loss = fit_loss_fn(output_batch, target_batch)
+ loss.backward()
+ self._optimizer.step()
+ # ---------------------------------------------
+
+ if self._has_regularizers:
+ batch_logs['reg_loss'] = self.regularizer_container.current_value
+ if self._has_metrics:
+ metrics_logs = self.metric_container(output_batch, target_batch)
+ batch_logs.update(metrics_logs)
+
+ batch_logs['loss'] = loss.data[0]
+ callback_container.on_batch_end(batch_idx, batch_logs)
+
+ if has_val_data:
+ val_epoch_logs = self.evaluate(val_inputs,
+ val_targets,
+ batch_size=batch_size,
+ cuda_device=cuda_device,
+ verbose=verbose)
+ epoch_logs.update(val_epoch_logs)
+ epoch_logs.update(batch_logs)
+ # TODO how to fix this?
+ # self.history.batch_metrics.update(val_epoch_logs)
+
+ callback_container.on_epoch_end(epoch_idx, self.history.epoch_metrics)
+
+ if self._stop_training:
+ break
+ self.model.train(mode=False)
+
+ def fit_loader(self,
+ loader,
+ val_loader=None,
+ initial_epoch=0,
+ num_epoch=100,
+ cuda_device=-1,
+ verbose=1):
+ """
+ Fit a model on in-memory tensors using ModuleTrainer
+ """
+ self.model.train(mode=True)
+ # ----------------------------------------------------------------------
+ num_inputs = loader.dataset.num_inputs
+ num_targets = loader.dataset.num_targets
+ len_inputs = len(loader.sampler) if loader.sampler else len(loader.dataset)
+ batch_size = loader.batch_size
+
+ if val_loader is not None:
+ num_val_inputs = val_loader.dataset.num_inputs
+ num_val_targets = val_loader.dataset.num_targets
+ if (num_inputs != num_val_inputs) or (num_targets != num_val_targets):
+ raise ValueError('num_inputs != num_val_inputs or num_targets != num_val_targets')
+ has_val_data = val_loader is not None
+ num_batches = int(math.ceil(len_inputs / batch_size))
+ # ----------------------------------------------------------------------
+
+ fit_helper = _get_helper(self, num_inputs, num_targets)
+ fit_loss_fn = fit_helper.get_partial_loss_fn(self._loss_fn)
+ fit_forward_fn = fit_helper.get_partial_forward_fn(self.model)
+
+ with TQDM() as pbar:
+ tmp_callbacks = []
+ if verbose > 0:
+ tmp_callbacks.append(pbar)
+ if self._has_regularizers:
+ tmp_callbacks.append(RegularizerCallback(self.regularizer_container))
+ fit_loss_fn = _add_regularizer_to_loss_fn(fit_loss_fn,
+ self.regularizer_container)
+ if self._has_constraints:
+ tmp_callbacks.append(ConstraintCallback(self.constraint_container))
+ if self._has_metrics:
+ self.metric_container.set_helper(fit_helper)
+ tmp_callbacks.append(MetricCallback(self.metric_container))
+
+ callback_container = CallbackContainer(self._callbacks+tmp_callbacks)
+ callback_container.set_trainer(self)
+ callback_container.on_train_begin({'batch_size': loader.batch_size,
+ 'num_batches': num_batches,
+ 'num_epoch': num_epoch,
+ 'has_val_data': has_val_data,
+ 'has_regularizers': self._has_regularizers,
+ 'has_metrics': self._has_metrics})
+
+ for epoch_idx in range(initial_epoch,num_epoch):
+ epoch_logs = {}
+ callback_container.on_epoch_begin(epoch_idx, epoch_logs)
+
+ batch_logs = {}
+ loader_iter = iter(loader)
+ for batch_idx in range(num_batches):
+
+ callback_container.on_batch_begin(batch_idx, batch_logs)
+
+ input_batch, target_batch = fit_helper.grab_batch_from_loader(loader_iter)
+ if cuda_device >= 0:
+ input_batch, target_batch = fit_helper.move_to_cuda(cuda_device, input_batch, target_batch)
+
+ # ---------------------------------------------
+ self._optimizer.zero_grad()
+ output_batch = fit_forward_fn(input_batch)
+ loss = fit_loss_fn(output_batch, target_batch)
+ loss.backward()
+ self._optimizer.step()
+ # ---------------------------------------------
+
+ if self._has_regularizers:
+ batch_logs['reg_loss'] = self.regularizer_container.current_value
+ if self._has_metrics:
+ metrics_logs = self.metric_container(output_batch, target_batch)
+ batch_logs.update(metrics_logs)
+
+ batch_logs['loss'] = loss.data[0]
+ callback_container.on_batch_end(batch_idx, batch_logs)
+
+ epoch_logs.update(self.history.batch_metrics)
+ if has_val_data:
+ val_epoch_logs = self.evaluate_loader(val_loader,
+ cuda_device=cuda_device,
+ verbose=verbose)
+ self._in_train_loop = False
+ #self.history.batch_metrics.update(val_epoch_logs)
+ #epoch_logs.update(val_epoch_logs)
+ epoch_logs.update(val_epoch_logs)
+ epoch_logs.update(batch_logs)
+ # TODO how to fix this?
+ # self.history.batch_metrics.update(val_epoch_logs)
+
+ callback_container.on_epoch_end(epoch_idx, epoch_logs)
+
+ if self._stop_training:
+ break
+ self.model.train(mode=False)
+
+ def predict(self,
+ inputs,
+ batch_size=32,
+ cuda_device=-1,
+ verbose=1):
+ self.model.train(mode=False)
+ # --------------------------------------------------------
+ num_inputs, _ = _parse_num_inputs_and_targets(inputs, None)
+ len_inputs = len(inputs) if not _is_tuple_or_list(inputs) else len(inputs[0])
+ num_batches = int(math.ceil(len_inputs / batch_size))
+ # --------------------------------------------------------
+
+ predict_helper = _get_helper(self, num_inputs, num_targets=0)
+ pred_forward_fn = predict_helper.get_partial_forward_fn(self.model)
+
+ for batch_idx in range(num_batches):
+ input_batch, _ = predict_helper.grab_batch(batch_idx, batch_size, inputs, None, volatile=True)
+ if cuda_device >= 0:
+ inputs = predict_helper.move_to_cuda(cuda_device, inputs)
+ output_batch = pred_forward_fn(input_batch)
+
+ if batch_idx == 0:
+ len_outputs = 1 if not _is_tuple_or_list(output_batch) else len(output_batch)
+ prediction_lists = [[] for _ in range(len_outputs)]
+
+ if len_outputs == 1:
+ prediction_lists[0].append(output_batch)
+ else:
+ for out_idx in range(len_outputs):
+ prediction_lists[out_idx].append(output_batch[out_idx])
+
+ final_pred_list = [th.cat(pred_list,0) for pred_list in prediction_lists]
+ self.model.train(mode=True)
+ return final_pred_list if len_outputs > 1 else final_pred_list[0]
+
+ def predict_loader(self,
+ loader,
+ cuda_device=-1,
+ verbose=1):
+ self.model.train(mode=False)
+ # --------------------------------------------------------
+ num_inputs, num_targets = _parse_num_inputs_and_targets_from_loader(loader)
+ batch_size = loader.batch_size
+ len_inputs = len(loader.sampler) if loader.sampler else len(loader.dataset)
+ num_batches = int(math.ceil(len_inputs / batch_size))
+ # --------------------------------------------------------
+
+ predict_helper = _get_helper(self, num_inputs, num_targets=0)
+ pred_forward_fn = predict_helper.get_partial_forward_fn(self.model)
+
+ loader_iter = iter(loader)
+
+ _range = tqdm(range(num_batches)) if verbose > 0 else range(num_batches)
+
+ for batch_idx in _range:
+ input_batch, _ = predict_helper.grab_batch_from_loader(loader_iter, volatile=True)
+ if cuda_device >= 0:
+ input_batch, _ = predict_helper.move_to_cuda(cuda_device, input_batch)
+
+ output_batch = pred_forward_fn(input_batch)
+
+ if batch_idx == 0:
+ len_outputs = 1 if not _is_tuple_or_list(output_batch) else len(output_batch)
+ prediction_lists = [[] for _ in range(len_outputs)]
+
+ if len_outputs == 1:
+ prediction_lists[0].append(output_batch)
+ else:
+ for out_idx in range(len_outputs):
+ prediction_lists[out_idx].append(output_batch[out_idx])
+
+ final_pred_list = [th.cat(pred_list,0) for pred_list in prediction_lists]
+ self.model.train(mode=True)
+ return final_pred_list if len_outputs > 1 else final_pred_list[0]
+
+ def evaluate(self,
+ inputs,
+ targets=None,
+ batch_size=32,
+ cuda_device=-1,
+ verbose=1):
+ self.model.train(mode=False)
+ num_inputs, num_targets = _parse_num_inputs_and_targets(inputs, targets)
+ len_inputs = len(inputs) if not _is_tuple_or_list(inputs) else len(inputs[0])
+ num_batches = int(math.ceil(len_inputs / batch_size))
+
+ evaluate_helper = _get_helper(self, num_inputs, num_targets)
+ eval_loss_fn = evaluate_helper.get_partial_loss_fn(self._loss_fn)
+ eval_forward_fn = evaluate_helper.get_partial_forward_fn(self.model)
+ eval_logs= {'val_loss': 0.}
+
+ if self._has_metrics:
+ metric_container = MetricContainer(self._metrics, prefix='val_')
+ metric_container.set_helper(evaluate_helper)
+ metric_container.reset()
+
+ samples_seen = 0
+ for batch_idx in range(num_batches):
+ input_batch, target_batch = evaluate_helper.grab_batch(batch_idx, batch_size, inputs, targets, volatile=True)
+ if cuda_device >= 0:
+ input_batch, target_batch = evaluate_helper.move_to_cuda(cuda_device, input_batch, target_batch)
+
+ self._optimizer.zero_grad()
+ output_batch = eval_forward_fn(input_batch)
+ loss = eval_loss_fn(output_batch, target_batch)
+
+ samples_seen += batch_size
+ eval_logs['val_loss'] = (samples_seen*eval_logs['val_loss'] + loss.data[0]*batch_size) / (samples_seen+batch_size)
+
+ if self._has_metrics:
+ metrics_logs = metric_container(output_batch, target_batch)
+ eval_logs.update(metrics_logs)
+
+ self.model.train(mode=True)
+ return eval_logs
+
+ def evaluate_loader(self,
+ loader,
+ cuda_device=-1,
+ verbose=1):
+ self.model.train(mode=False)
+ num_inputs, num_targets = _parse_num_inputs_and_targets_from_loader(loader)
+ batch_size = loader.batch_size
+ len_inputs = len(loader.sampler) if loader.sampler else len(loader.dataset)
+ num_batches = int(math.ceil(len_inputs / batch_size))
+
+ evaluate_helper = _get_helper(self, num_inputs, num_targets)
+ eval_loss_fn = evaluate_helper.get_partial_loss_fn(self._loss_fn)
+ eval_forward_fn = evaluate_helper.get_partial_forward_fn(self.model)
+ eval_logs= {'val_loss': 0.}
+ loader_iter = iter(loader)
+
+ if self._has_metrics:
+ metric_container = MetricContainer(self._metrics, prefix='val_')
+ metric_container.set_helper(evaluate_helper)
+ metric_container.reset()
+
+ samples_seen = 0
+ for batch_idx in range(num_batches):
+ input_batch, target_batch = evaluate_helper.grab_batch_from_loader(loader_iter, volatile=True)
+ if cuda_device >= 0:
+ input_batch, target_batch = evaluate_helper.move_to_cuda(cuda_device, input_batch, target_batch)
+
+ self._optimizer.zero_grad()
+ output_batch = eval_forward_fn(input_batch)
+ loss = eval_loss_fn(output_batch, target_batch)
+
+ samples_seen += batch_size
+ eval_logs['val_loss'] = (samples_seen*eval_logs['val_loss'] + loss.data[0]*batch_size) / (samples_seen+batch_size)
+
+ if self._has_metrics:
+ metrics_logs = metric_container(output_batch, target_batch)
+ eval_logs.update(metrics_logs)
+
+ self.model.train(mode=True)
+ return eval_logs
+
+ def summary(self, input_size):
+ def register_hook(module):
+ def hook(module, input, output):
+ class_name = str(module.__class__).split('.')[-1].split("'")[0]
+ module_idx = len(summary)
+
+ m_key = '%s-%i' % (class_name, module_idx+1)
+ summary[m_key] = OrderedDict()
+ summary[m_key]['input_shape'] = list(input[0].size())
+ summary[m_key]['input_shape'][0] = -1
+ summary[m_key]['output_shape'] = list(output.size())
+ summary[m_key]['output_shape'][0] = -1
+
+ params = 0
+ if hasattr(module, 'weight'):
+ params += th.prod(th.LongTensor(list(module.weight.size())))
+ if module.weight.requires_grad:
+ summary[m_key]['trainable'] = True
+ else:
+ summary[m_key]['trainable'] = False
+ if hasattr(module, 'bias'):
+ params += th.prod(th.LongTensor(list(module.bias.size())))
+ summary[m_key]['nb_params'] = params
+
+ if not isinstance(module, nn.Sequential) and \
+ not isinstance(module, nn.ModuleList) and \
+ not (module == self.model):
+ hooks.append(module.register_forward_hook(hook))
+
+ # create properties
+ summary = OrderedDict()
+ hooks = []
+ # register forward hooks
+ self.model.apply(register_hook)
+
+ if isinstance(input_size[0], (list, tuple)):
+ x = [Variable(th.rand(1,*in_size)) for in_size in input_size]
+ self.model(*x)
+ else:
+ x = Variable(th.rand(1,*input_size))
+ self.model(x)
+
+ # remove these hooks
+ for h in hooks:
+ h.remove()
+
+ return summary
+
+def _get_helper(trainer, num_inputs, num_targets):
+ if (num_inputs == 1) and (num_targets == 1):
+ helper = SingleInput_SingleTarget_Helper()
+
+ elif (num_inputs == 1) and (num_targets > 1):
+ # use same loss function for all targets if multiple loss fns not explicitly given
+ if not _is_tuple_or_list(trainer._loss_fn):
+ trainer._loss_fn = [trainer._loss_fn] * num_targets
+ else:
+ if len(trainer._loss_fn) != num_targets:
+ raise ValueError('must give one loss function for every input if you give multiple')
+ helper = SingleInput_MultiTarget_Helper()
+
+ elif (num_inputs == 1) and (num_targets == 0):
+ helper = SingleInput_NoTarget_Helper()
+
+ elif (num_inputs > 1) and (num_targets == 1):
+ helper = MultiInput_SingleTarget_Helper()
+
+ elif (num_inputs > 1) and (num_targets > 1):
+ # use same loss function for all targets if multiple loss fns not explicitly given
+ if not _is_tuple_or_list(trainer._loss_fn):
+ trainer._loss_fn = [trainer._loss_fn] * num_targets
+ else:
+ if len(trainer._loss_fn) != num_targets:
+ raise ValueError('must give one loss function for every input if you give multiple')
+ helper = MultiInput_MultiTarget_Helper()
+
+ elif (num_inputs > 1) and (num_targets == 0):
+ helper = MultiInput_NoTarget_Helper()
+
+ return helper
+
+
+class SingleInput_SingleTarget_Helper(object):
+ def move_to_cuda(self, cuda_device, inputs, targets):
+ inputs = inputs.cuda(cuda_device)
+ targets = targets.cuda(cuda_device)
+ return inputs, targets
+ def shuffle_arrays(self, inputs, targets):
+ rand_indices = th.randperm(len(inputs))
+ inputs = inputs[rand_indices]
+ targets = targets[rand_indices]
+ return inputs, targets
+ def grab_batch(self, batch_idx, batch_size, inputs, targets, volatile=False):
+ input_batch = Variable(inputs[batch_idx*batch_size:(batch_idx+1)*batch_size], volatile=volatile)
+ target_batch = Variable(targets[batch_idx*batch_size:(batch_idx+1)*batch_size], volatile=volatile, requires_grad=False)
+ return input_batch, target_batch
+ def grab_batch_from_loader(self, loader_iter, volatile=False):
+ input_batch, target_batch = next(loader_iter)
+ return Variable(input_batch, volatile=volatile), Variable(target_batch, volatile=volatile, requires_grad=False)
+ def apply_transforms(self, tforms, input_batch, target_batch):
+ input_batch = tforms[0](input_batch)
+ target_batch = tforms[1](target_batch)
+ input_batch, target_batch = tforms[2](input_batch, target_batch)
+ return input_batch, target_batch
+ def forward_pass(self, input_batch, model):
+ return model(input_batch)
+ def get_partial_forward_fn(self, model):
+ return functools.partial(self.forward_pass, model=model)
+ def calculate_loss(self, output_batch, target_batch, loss_fn):
+ return loss_fn(output_batch, target_batch)
+ def get_partial_loss_fn(self, loss_fn):
+ return functools.partial(self.calculate_loss, loss_fn=loss_fn)
+ #def new_loss_fn(output_batch, target_batch):
+ # return self.calculate_loss(output_batch, target_batch, loss_fn)
+ #return new_loss_fn
+
+
+class SingleInput_MultiTarget_Helper(object):
+ def move_to_cuda(self, cuda_device, inputs, targets):
+ inputs = inputs.cuda(cuda_device)
+ targets = [target_.cuda(cuda_device) for target_ in targets]
+ return inputs, targets
+ def shuffle_arrays(self, inputs, targets):
+ rand_indices = th.randperm(len(inputs))
+ inputs = inputs[rand_indices]
+ targets = [target_[rand_indices] for target_ in targets]
+ return inputs, targets
+ def grab_batch(self, batch_idx, batch_size, inputs, targets, volatile=False):
+ input_batch = Variable(inputs[batch_idx*batch_size:(batch_idx+1)*batch_size], volatile=volatile)
+ target_batch = [Variable(target_[batch_idx*batch_size:(batch_idx+1)*batch_size], volatile=volatile, requires_grad=False)
+ for target_ in targets]
+ return input_batch, target_batch
+ def grab_batch_from_loader(self, loader_iter, volatile=False):
+ input_batch, target_batch = next(loader_iter)
+ return Variable(input_batch, volatile=volatile), [Variable(target_, volatile=volatile, requires_grad=False) for target_ in target_batch]
+ def apply_transforms(self, tforms, input_batch, target_batch):
+ input_batch = tforms[0](input_batch)
+ target_batch = [tforms[1](target_) for target_ in target_batch]
+ return input_batch, target_batch
+ def forward_pass(self, input_batch, model):
+ return model(input_batch)
+ def get_partial_forward_fn(self, model):
+ return functools.partial(self.forward_pass, model=model)
+ def calculate_loss(self, output_batch, target_batch, loss_fn):
+ return sum([loss_fn[idx](output_batch[idx], target_batch[idx])
+ for idx in range(len(output_batch))])
+ def get_partial_loss_fn(self, loss_fn):
+ return functools.partial(self.calculate_loss, loss_fn=loss_fn)
+
+class MultiInput_SingleTarget_Helper(object):
+ def move_to_cuda(self, cuda_device, inputs, targets):
+ inputs = [input_.cuda(cuda_device) for input_ in inputs]
+ targets = targets.cuda(cuda_device)
+ return inputs, targets
+ def shuffle_arrays(self, inputs, targets):
+ rand_indices = th.randperm(len(inputs))
+ inputs = [input_[rand_indices] for input_ in inputs]
+ targets = targets[rand_indices]
+ return inputs, targets
+ def grab_batch(self, batch_idx, batch_size, inputs, targets, volatile=False):
+ input_batch = [Variable(input_[batch_idx*batch_size:(batch_idx+1)*batch_size], volatile=volatile)
+ for input_ in inputs]
+ target_batch = Variable(targets[batch_idx*batch_size:(batch_idx+1)*batch_size], volatile=volatile, requires_grad=False)
+ return input_batch, target_batch
+ def grab_batch_from_loader(self, loader_iter, volatile=False):
+ input_batch, target_batch = next(loader_iter)
+ return [Variable(input_, volatile=volatile) for input_ in input_batch], Variable(target_batch, volatile=volatile, requires_grad=False)
+ def apply_transforms(self, tforms, input_batch, target_batch):
+ input_batch = [tforms[0](input_) for input_ in input_batch]
+ target_batch = tforms[1](target_batch)
+ return input_batch, target_batch
+ def forward_pass(self, input_batch, model):
+ return model(*input_batch)
+ def get_partial_forward_fn(self, model):
+ return functools.partial(self.forward_pass, model=model)
+ def calculate_loss(self, output_batch, target_batch, loss_fn):
+ return loss_fn(output_batch, target_batch)
+ def get_partial_loss_fn(self, loss_fn):
+ return functools.partial(self.calculate_loss, loss_fn=loss_fn)
+
+class MultiInput_MultiTarget_Helper(object):
+ def move_to_cuda(self, cuda_device, inputs, targets):
+ inputs = [input_.cuda(cuda_device) for input_ in inputs]
+ targets = [target_.cuda(cuda_device) for target_ in targets]
+ return inputs, targets
+ def shuffle_arrays(self, inputs, targets):
+ rand_indices = th.randperm(len(inputs))
+ inputs = [input_[rand_indices] for input_ in inputs]
+ targets = [input_[rand_indices] for input_ in inputs]
+ return inputs, targets
+ def grab_batch(self, batch_idx, batch_size, inputs, targets, volatile=False):
+ input_batch = [Variable(input_[batch_idx*batch_size:(batch_idx+1)*batch_size], volatile=volatile)
+ for input_ in inputs]
+ target_batch = [Variable(target_[batch_idx*batch_size:(batch_idx+1)*batch_size], volatile=volatile, requires_grad=False)
+ for target_ in targets]
+ return input_batch, target_batch
+ def grab_batch_from_loader(self, loader_iter, volatile=False):
+ input_batch, target_batch = next(loader_iter)
+ return [Variable(input_, volatile=volatile) for input_ in input_batch], [Variable(target_, volatile=volatile, requires_grad=False) for target_ in target_batch]
+ def apply_transforms(self, tforms, input_batch, target_batch):
+ input_batch = [tforms[0](input_) for input_ in input_batch]
+ target_batch = [tforms[1](target_) for target_ in target_batch]
+ return input_batch, target_batch
+ def forward_pass(self, input_batch, model):
+ return model(*input_batch)
+ def get_partial_forward_fn(self, model):
+ return functools.partial(self.forward_pass, model=model)
+ def calculate_loss(self, output_batch, target_batch, loss_fn):
+ return sum([loss_fn[idx](output_batch[idx], target_batch[idx])
+ for idx in range(len(output_batch))])
+ def get_partial_loss_fn(self, loss_fn):
+ return functools.partial(self.calculate_loss, loss_fn=loss_fn)
+
+class SingleInput_NoTarget_Helper(object):
+ def move_to_cuda(self, cuda_device, inputs, targets=None):
+ inputs = inputs.cuda(cuda_device)
+ return inputs, None
+ def shuffle_arrays(self, inputs, targets=None):
+ rand_indices = th.randperm(len(inputs))
+ inputs = inputs[rand_indices]
+ return inputs, None
+ def grab_batch(self, batch_idx, batch_size, inputs, targets=None, volatile=False):
+ input_batch = Variable(inputs[batch_idx*batch_size:(batch_idx+1)*batch_size], volatile=volatile)
+ return input_batch, None
+ def grab_batch_from_loader(self, loader_iter, volatile=False):
+ input_batch = next(loader_iter)
+ return Variable(input_batch, volatile=volatile), None
+ def apply_transforms(self, tforms, input_batch, target_batch=None):
+ input_batch = tforms[0](input_batch)
+ return input_batch, None
+ def forward_pass(self, input_batch, model):
+ return model(input_batch)
+ def get_partial_forward_fn(self, model):
+ return functools.partial(self.forward_pass, model=model)
+ def calculate_loss(self, output_batch, target_batch, loss_fn):
+ return loss_fn(output_batch)
+ def get_partial_loss_fn(self, loss_fn):
+ return functools.partial(self.calculate_loss, loss_fn=loss_fn)
+
+class MultiInput_NoTarget_Helper(object):
+ def move_to_cuda(self, cuda_device, inputs, targets=None):
+ inputs = [input_.cuda(cuda_device) for input_ in inputs]
+ return inputs, None
+ def shuffle_arrays(self, inputs, targets=None):
+ rand_indices = th.randperm(len(inputs))
+ inputs = [input_[rand_indices] for input_ in inputs]
+ return inputs, None
+ def grab_batch(self, batch_idx, batch_size, inputs, targets=None, volatile=False):
+ input_batch = [Variable(input_[batch_idx*batch_size:(batch_idx+1)*batch_size], volatile=volatile)
+ for input_ in inputs]
+ return input_batch, None
+ def grab_batch_from_loader(self, loader_iter, volatile=False):
+ input_batch = next(loader_iter)
+ return [Variable(input_, volatile=volatile) for input_ in input_batch], None
+ def apply_transforms(self, tforms, input_batch, target_batch=None):
+ input_batch = [tforms[0](input_) for input_ in input_batch]
+ return input_batch, None
+ def forward_pass(self, input_batch, model):
+ return model(*input_batch)
+ def get_partial_forward_fn(self, model):
+ return functools.partial(self.forward_pass, model=model)
+ def calculate_loss(self, output_batch, target_batch, loss_fn):
+ return loss_fn(output_batch)
+ def get_partial_loss_fn(self, loss_fn):
+ return functools.partial(self.calculate_loss, loss_fn=loss_fn)
diff --git a/torchsample/torchsample/regularizers.py b/torchsample/torchsample/regularizers.py
new file mode 100755
index 0000000..19ab5a0
--- /dev/null
+++ b/torchsample/torchsample/regularizers.py
@@ -0,0 +1,179 @@
+
+import torch as th
+from fnmatch import fnmatch
+
+from .callbacks import Callback
+
+class RegularizerContainer(object):
+
+ def __init__(self, regularizers):
+ self.regularizers = regularizers
+ self._forward_hooks = []
+
+ def register_forward_hooks(self, model):
+ for regularizer in self.regularizers:
+ for module_name, module in model.named_modules():
+ if fnmatch(module_name, regularizer.module_filter) and hasattr(module, 'weight'):
+ hook = module.register_forward_hook(regularizer)
+ self._forward_hooks.append(hook)
+
+ if len(self._forward_hooks) == 0:
+ raise Exception('Tried to register regularizers but no modules '
+ 'were found that matched any module_filter argument.')
+
+ def unregister_forward_hooks(self):
+ for hook in self._forward_hooks:
+ hook.remove()
+
+ def reset(self):
+ for r in self.regularizers:
+ r.reset()
+
+ def get_value(self):
+ value = sum([r.value for r in self.regularizers])
+ self.current_value = value.data[0]
+ return value
+
+ def __len__(self):
+ return len(self.regularizers)
+
+
+class RegularizerCallback(Callback):
+
+ def __init__(self, container):
+ self.container = container
+
+ def on_batch_end(self, batch, logs=None):
+ self.container.reset()
+
+
+class Regularizer(object):
+
+ def reset(self):
+ raise NotImplementedError('subclass must implement this method')
+
+ def __call__(self, module, input=None, output=None):
+ raise NotImplementedError('subclass must implement this method')
+
+
+class L1Regularizer(Regularizer):
+
+ def __init__(self, scale=1e-3, module_filter='*'):
+ self.scale = float(scale)
+ self.module_filter = module_filter
+ self.value = 0.
+
+ def reset(self):
+ self.value = 0.
+
+ def __call__(self, module, input=None, output=None):
+ value = th.sum(th.abs(module.weight)) * self.scale
+ self.value += value
+
+
+class L2Regularizer(Regularizer):
+
+ def __init__(self, scale=1e-3, module_filter='*'):
+ self.scale = float(scale)
+ self.module_filter = module_filter
+ self.value = 0.
+
+ def reset(self):
+ self.value = 0.
+
+ def __call__(self, module, input=None, output=None):
+ value = th.sum(th.pow(module.weight,2)) * self.scale
+ self.value += value
+
+
+class L1L2Regularizer(Regularizer):
+
+ def __init__(self, l1_scale=1e-3, l2_scale=1e-3, module_filter='*'):
+ self.l1 = L1Regularizer(l1_scale)
+ self.l2 = L2Regularizer(l2_scale)
+ self.module_filter = module_filter
+ self.value = 0.
+
+ def reset(self):
+ self.value = 0.
+
+ def __call__(self, module, input=None, output=None):
+ self.l1(module, input, output)
+ self.l2(module, input, output)
+ self.value += (self.l1.value + self.l2.value)
+
+
+# ------------------------------------------------------------------
+# ------------------------------------------------------------------
+# ------------------------------------------------------------------
+
+class UnitNormRegularizer(Regularizer):
+ """
+ UnitNorm constraint on Weights
+
+ Constraints the weights to have column-wise unit norm
+ """
+ def __init__(self,
+ scale=1e-3,
+ module_filter='*'):
+
+ self.scale = scale
+ self.module_filter = module_filter
+ self.value = 0.
+
+ def reset(self):
+ self.value = 0.
+
+ def __call__(self, module, input=None, output=None):
+ w = module.weight
+ norm_diff = th.norm(w, 2, 1).sub(1.)
+ value = self.scale * th.sum(norm_diff.gt(0).float().mul(norm_diff))
+ self.value += value
+
+
+class MaxNormRegularizer(Regularizer):
+ """
+ MaxNorm regularizer on Weights
+
+ Constraints the weights to have column-wise unit norm
+ """
+ def __init__(self,
+ scale=1e-3,
+ module_filter='*'):
+
+ self.scale = scale
+ self.module_filter = module_filter
+ self.value = 0.
+
+ def reset(self):
+ self.value = 0.
+
+ def __call__(self, module, input=None, output=None):
+ w = module.weight
+ norm_diff = th.norm(w,2,self.axis).sub(self.value)
+ value = self.scale * th.sum(norm_diff.gt(0).float().mul(norm_diff))
+ self.value += value
+
+
+class NonNegRegularizer(Regularizer):
+ """
+ Non-Negativity regularizer on Weights
+
+ Constraints the weights to have column-wise unit norm
+ """
+ def __init__(self,
+ scale=1e-3,
+ module_filter='*'):
+
+ self.scale = scale
+ self.module_filter = module_filter
+ self.value = 0.
+
+ def reset(self):
+ self.value = 0.
+
+ def __call__(self, module, input=None, output=None):
+ w = module.weight
+ value = -1 * self.scale * th.sum(w.gt(0).float().mul(w))
+ self.value += value
+
diff --git a/torchsample/torchsample/samplers.py b/torchsample/torchsample/samplers.py
new file mode 100755
index 0000000..21dd732
--- /dev/null
+++ b/torchsample/torchsample/samplers.py
@@ -0,0 +1,146 @@
+
+import torch as th
+import math
+
+class Sampler(object):
+ """Base class for all Samplers.
+
+ Every Sampler subclass has to provide an __iter__ method, providing a way
+ to iterate over indices of dataset elements, and a __len__ method that
+ returns the length of the returned iterators.
+ """
+
+ def __init__(self, data_source):
+ pass
+
+ def __iter__(self):
+ raise NotImplementedError
+
+ def __len__(self):
+ raise NotImplementedError
+
+class StratifiedSampler(Sampler):
+ """Stratified Sampling
+
+ Provides equal representation of target classes in each batch
+ """
+ def __init__(self, class_vector, batch_size):
+ """
+ Arguments
+ ---------
+ class_vector : torch tensor
+ a vector of class labels
+ batch_size : integer
+ batch_size
+ """
+ self.n_splits = int(class_vector.size(0) / batch_size)
+ self.class_vector = class_vector
+
+ def gen_sample_array(self):
+ try:
+ from sklearn.model_selection import StratifiedShuffleSplit
+ except:
+ print('Need scikit-learn for this functionality')
+ import numpy as np
+
+ s = StratifiedShuffleSplit(n_splits=self.n_splits, test_size=0.5)
+ X = th.randn(self.class_vector.size(0),2).numpy()
+ y = self.class_vector.numpy()
+ s.get_n_splits(X, y)
+
+ train_index, test_index = next(s.split(X, y))
+ return np.hstack([train_index, test_index])
+
+ def __iter__(self):
+ return iter(self.gen_sample_array())
+
+ def __len__(self):
+ return len(self.class_vector)
+
+class MultiSampler(Sampler):
+ """Samples elements more than once in a single pass through the data.
+
+ This allows the number of samples per epoch to be larger than the number
+ of samples itself, which can be useful when training on 2D slices taken
+ from 3D images, for instance.
+ """
+ def __init__(self, nb_samples, desired_samples, shuffle=False):
+ """Initialize MultiSampler
+
+ Arguments
+ ---------
+ data_source : the dataset to sample from
+
+ desired_samples : number of samples per batch you want
+ whatever the difference is between an even division will
+ be randomly selected from the samples.
+ e.g. if len(data_source) = 3 and desired_samples = 4, then
+ all 3 samples will be included and the last sample will be
+ randomly chosen from the 3 original samples.
+
+ shuffle : boolean
+ whether to shuffle the indices or not
+
+ Example:
+ >>> m = MultiSampler(2, 6)
+ >>> x = m.gen_sample_array()
+ >>> print(x) # [0,1,0,1,0,1]
+ """
+ self.data_samples = nb_samples
+ self.desired_samples = desired_samples
+ self.shuffle = shuffle
+
+ def gen_sample_array(self):
+ from torchsample.utils import th_random_choice
+ n_repeats = self.desired_samples / self.data_samples
+ cat_list = []
+ for i in range(math.floor(n_repeats)):
+ cat_list.append(th.arange(0,self.data_samples))
+ # add the left over samples
+ left_over = self.desired_samples % self.data_samples
+ if left_over > 0:
+ cat_list.append(th_random_choice(self.data_samples, left_over))
+ self.sample_idx_array = th.cat(cat_list).long()
+ return self.sample_idx_array
+
+ def __iter__(self):
+ return iter(self.gen_sample_array())
+
+ def __len__(self):
+ return self.desired_samples
+
+
+class SequentialSampler(Sampler):
+ """Samples elements sequentially, always in the same order.
+
+ Arguments:
+ data_source (Dataset): dataset to sample from
+ """
+
+ def __init__(self, nb_samples):
+ self.num_samples = nb_samples
+
+ def __iter__(self):
+ return iter(range(self.num_samples))
+
+ def __len__(self):
+ return self.num_samples
+
+
+class RandomSampler(Sampler):
+ """Samples elements randomly, without replacement.
+
+ Arguments:
+ data_source (Dataset): dataset to sample from
+ """
+
+ def __init__(self, nb_samples):
+ self.num_samples = nb_samples
+
+ def __iter__(self):
+ return iter(th.randperm(self.num_samples).long())
+
+ def __len__(self):
+ return self.num_samples
+
+
diff --git a/torchsample/torchsample/transforms/__init__.py b/torchsample/torchsample/transforms/__init__.py
new file mode 100755
index 0000000..967d5d4
--- /dev/null
+++ b/torchsample/torchsample/transforms/__init__.py
@@ -0,0 +1,6 @@
+
+from __future__ import absolute_import
+
+from .affine_transforms import *
+from .image_transforms import *
+from .tensor_transforms import *
\ No newline at end of file
diff --git a/torchsample/torchsample/transforms/affine_transforms.py b/torchsample/torchsample/transforms/affine_transforms.py
new file mode 100755
index 0000000..8a1f9c2
--- /dev/null
+++ b/torchsample/torchsample/transforms/affine_transforms.py
@@ -0,0 +1,767 @@
+"""
+Affine transforms implemented on torch tensors, and
+requiring only one interpolation
+"""
+
+import math
+import random
+import torch as th
+
+from ..utils import th_affine2d, th_random_choice
+
+
+class RandomAffine(object):
+
+ def __init__(self,
+ rotation_range=None,
+ translation_range=None,
+ shear_range=None,
+ zoom_range=None,
+ interp='bilinear',
+ lazy=False):
+ """
+ Perform an affine transforms with various sub-transforms, using
+ only one interpolation and without having to instantiate each
+ sub-transform individually.
+
+ Arguments
+ ---------
+ rotation_range : one integer or float
+ image will be rotated randomly between (-degrees, degrees)
+
+ translation_range : a float or a tuple/list with 2 floats between [0, 1)
+ first value:
+ image will be horizontally shifted between
+ (-height_range * height_dimension, height_range * height_dimension)
+ second value:
+ Image will be vertically shifted between
+ (-width_range * width_dimension, width_range * width_dimension)
+
+ shear_range : float
+ image will be sheared randomly between (-degrees, degrees)
+
+ zoom_range : list/tuple with two floats between [0, infinity).
+ first float should be less than the second
+ lower and upper bounds on percent zoom.
+ Anything less than 1.0 will zoom in on the image,
+ anything greater than 1.0 will zoom out on the image.
+ e.g. (0.7, 1.0) will only zoom in,
+ (1.0, 1.4) will only zoom out,
+ (0.7, 1.4) will randomly zoom in or out
+
+ interp : string in {'bilinear', 'nearest'} or list of strings
+ type of interpolation to use. You can provide a different
+ type of interpolation for each input, e.g. if you have two
+ inputs then you can say `interp=['bilinear','nearest']
+
+ """
+ self.transforms = []
+ if rotation_range is not None:
+ rotation_tform = RandomRotate(rotation_range, lazy=True)
+ self.transforms.append(rotation_tform)
+
+ if translation_range is not None:
+ translation_tform = RandomTranslate(translation_range, lazy=True)
+ self.transforms.append(translation_tform)
+
+ if shear_range is not None:
+ shear_tform = RandomShear(shear_range, lazy=True)
+ self.transforms.append(shear_tform)
+
+ if zoom_range is not None:
+ zoom_tform = RandomZoom(zoom_range, lazy=True)
+ self.transforms.append(zoom_tform)
+
+ self.interp = interp
+ self.lazy = lazy
+
+ if len(self.transforms) == 0:
+ raise Exception('Must give at least one transform parameter')
+
+ def __call__(self, *inputs):
+ # collect all of the lazily returned tform matrices
+ tform_matrix = self.transforms[0](inputs[0])
+ for tform in self.transforms[1:]:
+ tform_matrix = tform_matrix.mm(tform(inputs[0]))
+ self.tform_matrix = tform_matrix
+
+ if self.lazy:
+ return tform_matrix
+ else:
+ outputs = Affine(tform_matrix,
+ interp=self.interp)(*inputs)
+ return outputs
+
+
+class Affine(object):
+
+ def __init__(self,
+ tform_matrix,
+ interp='bilinear'):
+ """
+ Perform an affine transforms with various sub-transforms, using
+ only one interpolation and without having to instantiate each
+ sub-transform individually.
+
+ Arguments
+ ---------
+ tform_matrix : a 2x3 or 3x3 matrix
+ affine transformation matrix to apply
+
+ interp : string in {'bilinear', 'nearest'} or list of strings
+ type of interpolation to use. You can provide a different
+ type of interpolation for each input, e.g. if you have two
+ inputs then you can say `interp=['bilinear','nearest']
+
+ """
+ self.tform_matrix = tform_matrix
+ self.interp = interp
+
+ def __call__(self, *inputs):
+ if not isinstance(self.interp, (tuple,list)):
+ interp = [self.interp]*len(inputs)
+ else:
+ interp = self.interp
+
+ outputs = []
+ for idx, _input in enumerate(inputs):
+ input_tf = th_affine2d(_input,
+ self.tform_matrix,
+ mode=interp[idx])
+ outputs.append(input_tf)
+ return outputs if idx > 1 else outputs[0]
+
+
+class AffineCompose(object):
+
+ def __init__(self,
+ transforms,
+ interp='bilinear'):
+ """
+ Apply a collection of explicit affine transforms to an input image,
+ and to a target image if necessary
+
+ Arguments
+ ---------
+ transforms : list or tuple
+ each element in the list/tuple should be an affine transform.
+ currently supported transforms:
+ - Rotate()
+ - Translate()
+ - Shear()
+ - Zoom()
+
+ interp : string in {'bilinear', 'nearest'} or list of strings
+ type of interpolation to use. You can provide a different
+ type of interpolation for each input, e.g. if you have two
+ inputs then you can say `interp=['bilinear','nearest']
+
+ """
+ self.transforms = transforms
+ self.interp = interp
+ # set transforms to lazy so they only return the tform matrix
+ for t in self.transforms:
+ t.lazy = True
+
+ def __call__(self, *inputs):
+ # collect all of the lazily returned tform matrices
+ tform_matrix = self.transforms[0](inputs[0])
+ for tform in self.transforms[1:]:
+ tform_matrix = tform_matrix.mm(tform(inputs[0]))
+
+ if not isinstance(self.interp, (tuple,list)):
+ interp = [self.interp]*len(inputs)
+ else:
+ interp = self.interp
+
+ outputs = []
+ for idx, _input in enumerate(inputs):
+ input_tf = th_affine2d(_input,
+ tform_matrix,
+ mode=interp[idx])
+ outputs.append(input_tf)
+ return outputs if idx > 1 else outputs[0]
+
+
+class RandomRotate(object):
+
+ def __init__(self,
+ rotation_range,
+ interp='bilinear',
+ lazy=False):
+ """
+ Randomly rotate an image between (-degrees, degrees). If the image
+ has multiple channels, the same rotation will be applied to each channel.
+
+ Arguments
+ ---------
+ rotation_range : integer or float
+ image will be rotated between (-degrees, degrees) degrees
+
+ interp : string in {'bilinear', 'nearest'} or list of strings
+ type of interpolation to use. You can provide a different
+ type of interpolation for each input, e.g. if you have two
+ inputs then you can say `interp=['bilinear','nearest']
+
+ lazy : boolean
+ if true, only create the affine transform matrix and return that
+ if false, perform the transform on the tensor and return the tensor
+ """
+ self.rotation_range = rotation_range
+ self.interp = interp
+ self.lazy = lazy
+
+ def __call__(self, *inputs):
+ degree = random.uniform(-self.rotation_range, self.rotation_range)
+
+ if self.lazy:
+ return Rotate(degree, lazy=True)(inputs[0])
+ else:
+ outputs = Rotate(degree,
+ interp=self.interp)(*inputs)
+ return outputs
+
+
+class RandomChoiceRotate(object):
+
+ def __init__(self,
+ values,
+ p=None,
+ interp='bilinear',
+ lazy=False):
+ """
+ Randomly rotate an image from a list of values. If the image
+ has multiple channels, the same rotation will be applied to each channel.
+
+ Arguments
+ ---------
+ values : a list or tuple
+ the values from which the rotation value will be sampled
+
+ p : a list or tuple the same length as `values`
+ the probabilities of sampling any given value. Must sum to 1.
+
+ interp : string in {'bilinear', 'nearest'} or list of strings
+ type of interpolation to use. You can provide a different
+ type of interpolation for each input, e.g. if you have two
+ inputs then you can say `interp=['bilinear','nearest']
+
+ lazy : boolean
+ if true, only create the affine transform matrix and return that
+ if false, perform the transform on the tensor and return the tensor
+ """
+ if isinstance(values, (list, tuple)):
+ values = th.FloatTensor(values)
+ self.values = values
+ if p is None:
+ p = th.ones(len(values)) / len(values)
+ else:
+ if abs(1.0-sum(p)) > 1e-3:
+ raise ValueError('Probs must sum to 1')
+ self.p = p
+ self.interp = interp
+ self.lazy = lazy
+
+ def __call__(self, *inputs):
+ degree = th_random_choice(self.values, p=self.p)
+
+ if self.lazy:
+ return Rotate(degree, lazy=True)(inputs[0])
+ else:
+ outputs = Rotate(degree,
+ interp=self.interp)(*inputs)
+ return outputs
+
+
+class Rotate(object):
+
+ def __init__(self,
+ value,
+ interp='bilinear',
+ lazy=False):
+ """
+ Randomly rotate an image between (-degrees, degrees). If the image
+ has multiple channels, the same rotation will be applied to each channel.
+
+ Arguments
+ ---------
+ rotation_range : integer or float
+ image will be rotated between (-degrees, degrees) degrees
+
+ interp : string in {'bilinear', 'nearest'} or list of strings
+ type of interpolation to use. You can provide a different
+ type of interpolation for each input, e.g. if you have two
+ inputs then you can say `interp=['bilinear','nearest']
+
+ lazy : boolean
+ if true, only create the affine transform matrix and return that
+ if false, perform the transform on the tensor and return the tensor
+ """
+ self.value = value
+ self.interp = interp
+ self.lazy = lazy
+
+ def __call__(self, *inputs):
+ if not isinstance(self.interp, (tuple,list)):
+ interp = [self.interp]*len(inputs)
+ else:
+ interp = self.interp
+
+ theta = math.pi / 180 * self.value
+ rotation_matrix = th.FloatTensor([[math.cos(theta), -math.sin(theta), 0],
+ [math.sin(theta), math.cos(theta), 0],
+ [0, 0, 1]])
+ if self.lazy:
+ return rotation_matrix
+ else:
+ outputs = []
+ for idx, _input in enumerate(inputs):
+ input_tf = th_affine2d(_input,
+ rotation_matrix,
+ mode=interp[idx],
+ center=True)
+ outputs.append(input_tf)
+ return outputs if idx > 1 else outputs[0]
+
+
+class RandomTranslate(object):
+
+ def __init__(self,
+ translation_range,
+ interp='bilinear',
+ lazy=False):
+ """
+ Randomly translate an image some fraction of total height and/or
+ some fraction of total width. If the image has multiple channels,
+ the same translation will be applied to each channel.
+
+ Arguments
+ ---------
+ translation_range : two floats between [0, 1)
+ first value:
+ fractional bounds of total height to shift image
+ image will be horizontally shifted between
+ (-height_range * height_dimension, height_range * height_dimension)
+ second value:
+ fractional bounds of total width to shift image
+ Image will be vertically shifted between
+ (-width_range * width_dimension, width_range * width_dimension)
+
+ interp : string in {'bilinear', 'nearest'} or list of strings
+ type of interpolation to use. You can provide a different
+ type of interpolation for each input, e.g. if you have two
+ inputs then you can say `interp=['bilinear','nearest']
+
+ lazy : boolean
+ if true, only create the affine transform matrix and return that
+ if false, perform the transform on the tensor and return the tensor
+ """
+ if isinstance(translation_range, float):
+ translation_range = (translation_range, translation_range)
+ self.height_range = translation_range[0]
+ self.width_range = translation_range[1]
+ self.interp = interp
+ self.lazy = lazy
+
+ def __call__(self, *inputs):
+ # height shift
+ random_height = random.uniform(-self.height_range, self.height_range)
+ # width shift
+ random_width = random.uniform(-self.width_range, self.width_range)
+
+ if self.lazy:
+ return Translate([random_height, random_width],
+ lazy=True)(inputs[0])
+ else:
+ outputs = Translate([random_height, random_width],
+ interp=self.interp)(*inputs)
+ return outputs
+
+
+class RandomChoiceTranslate(object):
+
+ def __init__(self,
+ values,
+ p=None,
+ interp='bilinear',
+ lazy=False):
+ """
+ Randomly translate an image some fraction of total height and/or
+ some fraction of total width from a list of potential values.
+ If the image has multiple channels,
+ the same translation will be applied to each channel.
+
+ Arguments
+ ---------
+ values : a list or tuple
+ the values from which the translation value will be sampled
+
+ p : a list or tuple the same length as `values`
+ the probabilities of sampling any given value. Must sum to 1.
+
+ interp : string in {'bilinear', 'nearest'} or list of strings
+ type of interpolation to use. You can provide a different
+ type of interpolation for each input, e.g. if you have two
+ inputs then you can say `interp=['bilinear','nearest']
+
+ lazy : boolean
+ if true, only create the affine transform matrix and return that
+ if false, perform the transform on the tensor and return the tensor
+ """
+ if isinstance(values, (list, tuple)):
+ values = th.FloatTensor(values)
+ self.values = values
+ if p is None:
+ p = th.ones(len(values)) / len(values)
+ else:
+ if abs(1.0-sum(p)) > 1e-3:
+ raise ValueError('Probs must sum to 1')
+ self.p = p
+ self.interp = interp
+ self.lazy = lazy
+
+ def __call__(self, *inputs):
+ random_height = th_random_choice(self.values, p=self.p)
+ random_width = th_random_choice(self.values, p=self.p)
+
+ if self.lazy:
+ return Translate([random_height, random_width],
+ lazy=True)(inputs[0])
+ else:
+ outputs = Translate([random_height, random_width],
+ interp=self.interp)(*inputs)
+ return outputs
+
+
+class Translate(object):
+
+ def __init__(self,
+ value,
+ interp='bilinear',
+ lazy=False):
+ """
+ Arguments
+ ---------
+ value : float or 2-tuple of float
+ if single value, both horizontal and vertical translation
+ will be this value * total height/width. Thus, value should
+ be a fraction of total height/width with range (-1, 1)
+
+ interp : string in {'bilinear', 'nearest'} or list of strings
+ type of interpolation to use. You can provide a different
+ type of interpolation for each input, e.g. if you have two
+ inputs then you can say `interp=['bilinear','nearest']
+
+ """
+ if not isinstance(value, (tuple,list)):
+ value = (value, value)
+
+ if value[0] > 1 or value[0] < -1:
+ raise ValueError('Translation must be between -1 and 1')
+ if value[1] > 1 or value[1] < -1:
+ raise ValueError('Translation must be between -1 and 1')
+
+ self.height_range = value[0]
+ self.width_range = value[1]
+ self.interp = interp
+ self.lazy = lazy
+
+ def __call__(self, *inputs):
+ if not isinstance(self.interp, (tuple,list)):
+ interp = [self.interp]*len(inputs)
+ else:
+ interp = self.interp
+
+ tx = self.height_range * inputs[0].size(1)
+ ty = self.width_range * inputs[0].size(2)
+
+ translation_matrix = th.FloatTensor([[1, 0, tx],
+ [0, 1, ty],
+ [0, 0, 1]])
+ if self.lazy:
+ return translation_matrix
+ else:
+ outputs = []
+ for idx, _input in enumerate(inputs):
+ input_tf = th_affine2d(_input,
+ translation_matrix,
+ mode=interp[idx],
+ center=True)
+ outputs.append(input_tf)
+ return outputs if idx > 1 else outputs[0]
+
+
+class RandomShear(object):
+
+ def __init__(self,
+ shear_range,
+ interp='bilinear',
+ lazy=False):
+ """
+ Randomly shear an image with radians (-shear_range, shear_range)
+
+ Arguments
+ ---------
+ shear_range : float
+ radian bounds on the shear transform
+
+ interp : string in {'bilinear', 'nearest'} or list of strings
+ type of interpolation to use. You can provide a different
+ type of interpolation for each input, e.g. if you have two
+ inputs then you can say `interp=['bilinear','nearest']
+
+ lazy : boolean
+ if false, perform the transform on the tensor and return the tensor
+ if true, only create the affine transform matrix and return that
+ """
+ self.shear_range = shear_range
+ self.interp = interp
+ self.lazy = lazy
+
+ def __call__(self, *inputs):
+ shear = random.uniform(-self.shear_range, self.shear_range)
+ if self.lazy:
+ return Shear(shear,
+ lazy=True)(inputs[0])
+ else:
+ outputs = Shear(shear,
+ interp=self.interp)(*inputs)
+ return outputs
+
+
+class RandomChoiceShear(object):
+
+ def __init__(self,
+ values,
+ p=None,
+ interp='bilinear',
+ lazy=False):
+ """
+ Randomly shear an image with a value sampled from a list of values.
+
+ Arguments
+ ---------
+ values : a list or tuple
+ the values from which the rotation value will be sampled
+
+ p : a list or tuple the same length as `values`
+ the probabilities of sampling any given value. Must sum to 1.
+
+ interp : string in {'bilinear', 'nearest'} or list of strings
+ type of interpolation to use. You can provide a different
+ type of interpolation for each input, e.g. if you have two
+ inputs then you can say `interp=['bilinear','nearest']
+
+ lazy : boolean
+ if false, perform the transform on the tensor and return the tensor
+ if true, only create the affine transform matrix and return that
+ """
+ if isinstance(values, (list, tuple)):
+ values = th.FloatTensor(values)
+ self.values = values
+ if p is None:
+ p = th.ones(len(values)) / len(values)
+ else:
+ if abs(1.0-sum(p)) > 1e-3:
+ raise ValueError('Probs must sum to 1')
+ self.p = p
+ self.interp = interp
+ self.lazy = lazy
+
+ def __call__(self, *inputs):
+ shear = th_random_choice(self.values, p=self.p)
+
+ if self.lazy:
+ return Shear(shear,
+ lazy=True)(inputs[0])
+ else:
+ outputs = Shear(shear,
+ interp=self.interp)(*inputs)
+ return outputs
+
+
+class Shear(object):
+
+ def __init__(self,
+ value,
+ interp='bilinear',
+ lazy=False):
+ self.value = value
+ self.interp = interp
+ self.lazy = lazy
+
+ def __call__(self, *inputs):
+ if not isinstance(self.interp, (tuple,list)):
+ interp = [self.interp]*len(inputs)
+ else:
+ interp = self.interp
+
+ theta = (math.pi * self.value) / 180
+ shear_matrix = th.FloatTensor([[1, -math.sin(theta), 0],
+ [0, math.cos(theta), 0],
+ [0, 0, 1]])
+ if self.lazy:
+ return shear_matrix
+ else:
+ outputs = []
+ for idx, _input in enumerate(inputs):
+ input_tf = th_affine2d(_input,
+ shear_matrix,
+ mode=interp[idx],
+ center=True)
+ outputs.append(input_tf)
+ return outputs if idx > 1 else outputs[0]
+
+
+class RandomZoom(object):
+
+ def __init__(self,
+ zoom_range,
+ interp='bilinear',
+ lazy=False):
+ """
+ Randomly zoom in and/or out on an image
+
+ Arguments
+ ---------
+ zoom_range : tuple or list with 2 values, both between (0, infinity)
+ lower and upper bounds on percent zoom.
+ Anything less than 1.0 will zoom in on the image,
+ anything greater than 1.0 will zoom out on the image.
+ e.g. (0.7, 1.0) will only zoom in,
+ (1.0, 1.4) will only zoom out,
+ (0.7, 1.4) will randomly zoom in or out
+
+ interp : string in {'bilinear', 'nearest'} or list of strings
+ type of interpolation to use. You can provide a different
+ type of interpolation for each input, e.g. if you have two
+ inputs then you can say `interp=['bilinear','nearest']
+
+ lazy : boolean
+ if false, perform the transform on the tensor and return the tensor
+ if true, only create the affine transform matrix and return that
+ """
+ if not isinstance(zoom_range, list) and not isinstance(zoom_range, tuple):
+ raise ValueError('zoom_range must be tuple or list with 2 values')
+ self.zoom_range = zoom_range
+ self.interp = interp
+ self.lazy = lazy
+
+ def __call__(self, *inputs):
+ zx = random.uniform(self.zoom_range[0], self.zoom_range[1])
+ zy = random.uniform(self.zoom_range[0], self.zoom_range[1])
+
+ if self.lazy:
+ return Zoom([zx, zy], lazy=True)(inputs[0])
+ else:
+ outputs = Zoom([zx, zy],
+ interp=self.interp)(*inputs)
+ return outputs
+
+
+class RandomChoiceZoom(object):
+
+ def __init__(self,
+ values,
+ p=None,
+ interp='bilinear',
+ lazy=False):
+ """
+ Randomly zoom in and/or out on an image with a value sampled from
+ a list of values
+
+ Arguments
+ ---------
+ values : a list or tuple
+ the values from which the applied zoom value will be sampled
+
+ p : a list or tuple the same length as `values`
+ the probabilities of sampling any given value. Must sum to 1.
+
+ interp : string in {'bilinear', 'nearest'} or list of strings
+ type of interpolation to use. You can provide a different
+ type of interpolation for each input, e.g. if you have two
+ inputs then you can say `interp=['bilinear','nearest']
+
+ lazy : boolean
+ if false, perform the transform on the tensor and return the tensor
+ if true, only create the affine transform matrix and return that
+ """
+ if isinstance(values, (list, tuple)):
+ values = th.FloatTensor(values)
+ self.values = values
+ if p is None:
+ p = th.ones(len(values)) / len(values)
+ else:
+ if abs(1.0-sum(p)) > 1e-3:
+ raise ValueError('Probs must sum to 1')
+ self.p = p
+ self.interp = interp
+ self.lazy = lazy
+
+ def __call__(self, *inputs):
+ zx = th_random_choice(self.values, p=self.p)
+ zy = th_random_choice(self.values, p=self.p)
+
+ if self.lazy:
+ return Zoom([zx, zy], lazy=True)(inputs[0])
+ else:
+ outputs = Zoom([zx, zy],
+ interp=self.interp)(*inputs)
+ return outputs
+
+
+class Zoom(object):
+
+ def __init__(self,
+ value,
+ interp='bilinear',
+ lazy=False):
+ """
+ Arguments
+ ---------
+ value : float
+ Fractional zoom.
+ =1 : no zoom
+ >1 : zoom-in (value-1)%
+ <1 : zoom-out (1-value)%
+
+ interp : string in {'bilinear', 'nearest'} or list of strings
+ type of interpolation to use. You can provide a different
+ type of interpolation for each input, e.g. if you have two
+ inputs then you can say `interp=['bilinear','nearest']
+
+ lazy: boolean
+ If true, just return transformed
+ """
+
+ if not isinstance(value, (tuple,list)):
+ value = (value, value)
+ self.value = value
+ self.interp = interp
+ self.lazy = lazy
+
+ def __call__(self, *inputs):
+ if not isinstance(self.interp, (tuple,list)):
+ interp = [self.interp]*len(inputs)
+ else:
+ interp = self.interp
+
+ zx, zy = self.value
+ zoom_matrix = th.FloatTensor([[zx, 0, 0],
+ [0, zy, 0],
+ [0, 0, 1]])
+
+ if self.lazy:
+ return zoom_matrix
+ else:
+ outputs = []
+ for idx, _input in enumerate(inputs):
+ input_tf = th_affine2d(_input,
+ zoom_matrix,
+ mode=interp[idx],
+ center=True)
+ outputs.append(input_tf)
+ return outputs if idx > 1 else outputs[0]
+
+
diff --git a/torchsample/torchsample/transforms/distortion_transforms.py b/torchsample/torchsample/transforms/distortion_transforms.py
new file mode 100755
index 0000000..311d36a
--- /dev/null
+++ b/torchsample/torchsample/transforms/distortion_transforms.py
@@ -0,0 +1,158 @@
+"""
+Transforms to distort local or global information of an image
+"""
+
+
+import torch as th
+import numpy as np
+import random
+
+
+class Scramble(object):
+ """
+ Create blocks of an image and scramble them
+ """
+ def __init__(self, blocksize):
+ self.blocksize = blocksize
+
+ def __call__(self, *inputs):
+ outputs = []
+ for idx, _input in enumerate(inputs):
+ size = _input.size()
+ img_height = size[1]
+ img_width = size[2]
+
+ x_blocks = int(img_height/self.blocksize) # number of x blocks
+ y_blocks = int(img_width/self.blocksize)
+ ind = th.randperm(x_blocks*y_blocks)
+
+ new = th.zeros(_input.size())
+ count = 0
+ for i in range(x_blocks):
+ for j in range (y_blocks):
+ row = int(ind[count] / x_blocks)
+ column = ind[count] % x_blocks
+ new[:, i*self.blocksize:(i+1)*self.blocksize, j*self.blocksize:(j+1)*self.blocksize] = \
+ _input[:, row*self.blocksize:(row+1)*self.blocksize, column*self.blocksize:(column+1)*self.blocksize]
+ count += 1
+ outputs.append(new)
+ return outputs if idx > 1 else outputs[0]
+
+
+class RandomChoiceScramble(object):
+
+ def __init__(self, blocksizes):
+ self.blocksizes = blocksizes
+
+ def __call__(self, *inputs):
+ blocksize = random.choice(self.blocksizes)
+ outputs = Scramble(blocksize=blocksize)(*inputs)
+ return outputs
+
+
+def _blur_image(image, H):
+ # break image up into its color components
+ size = image.shape
+ imr = image[0,:,:]
+ img = image[1,:,:]
+ imb = image[2,:,:]
+
+ # compute Fourier transform and frequqnecy spectrum
+ Fim1r = np.fft.fftshift(np.fft.fft2(imr))
+ Fim1g = np.fft.fftshift(np.fft.fft2(img))
+ Fim1b = np.fft.fftshift(np.fft.fft2(imb))
+
+ # Apply the lowpass filter to the Fourier spectrum of the image
+ filtered_imager = np.multiply(H, Fim1r)
+ filtered_imageg = np.multiply(H, Fim1g)
+ filtered_imageb = np.multiply(H, Fim1b)
+
+ newim = np.zeros(size)
+
+ # convert the result to the spatial domain.
+ newim[0,:,:] = np.absolute(np.real(np.fft.ifft2(filtered_imager)))
+ newim[1,:,:] = np.absolute(np.real(np.fft.ifft2(filtered_imageg)))
+ newim[2,:,:] = np.absolute(np.real(np.fft.ifft2(filtered_imageb)))
+
+ return newim.astype('uint8')
+
+def _butterworth_filter(rows, cols, thresh, order):
+ # X and Y matrices with ranges normalised to +/- 0.5
+ array1 = np.ones(rows)
+ array2 = np.ones(cols)
+ array3 = np.arange(1,rows+1)
+ array4 = np.arange(1,cols+1)
+
+ x = np.outer(array1, array4)
+ y = np.outer(array3, array2)
+
+ x = x - float(cols/2) - 1
+ y = y - float(rows/2) - 1
+
+ x = x / cols
+ y = y / rows
+
+ radius = np.sqrt(np.square(x) + np.square(y))
+
+ matrix1 = radius/thresh
+ matrix2 = np.power(matrix1, 2*order)
+ f = np.reciprocal(1 + matrix2)
+
+ return f
+
+
+class Blur(object):
+ """
+ Blur an image with a Butterworth filter with a frequency
+ cutoff matching local block size
+ """
+ def __init__(self, threshold, order=5):
+ """
+ scramble blocksize of 128 => filter threshold of 64
+ scramble blocksize of 64 => filter threshold of 32
+ scramble blocksize of 32 => filter threshold of 16
+ scramble blocksize of 16 => filter threshold of 8
+ scramble blocksize of 8 => filter threshold of 4
+ """
+ self.threshold = threshold
+ self.order = order
+
+ def __call__(self, *inputs):
+ """
+ inputs should have values between 0 and 255
+ """
+ outputs = []
+ for idx, _input in enumerate(inputs):
+ rows = _input.size(1)
+ cols = _input.size(2)
+ fc = self.threshold # threshold
+ fs = 128.0 # max frequency
+ n = self.order # filter order
+ fc_rad = (fc/fs)*0.5
+ H = _butterworth_filter(rows, cols, fc_rad, n)
+ _input_blurred = _blur_image(_input.numpy().astype('uint8'), H)
+ _input_blurred = th.from_numpy(_input_blurred).float()
+ outputs.append(_input_blurred)
+
+ return outputs if idx > 1 else outputs[0]
+
+
+class RandomChoiceBlur(object):
+
+ def __init__(self, thresholds, order=5):
+ """
+ thresholds = [64.0, 32.0, 16.0, 8.0, 4.0]
+ """
+ self.thresholds = thresholds
+ self.order = order
+
+ def __call__(self, *inputs):
+ threshold = random.choice(self.thresholds)
+ outputs = Blur(threshold=threshold, order=self.order)(*inputs)
+ return outputs
+
+
+
+
+
+
diff --git a/torchsample/torchsample/transforms/image_transforms.py b/torchsample/torchsample/transforms/image_transforms.py
new file mode 100755
index 0000000..332bad3
--- /dev/null
+++ b/torchsample/torchsample/transforms/image_transforms.py
@@ -0,0 +1,421 @@
+"""
+Transforms very specific to images such as
+color, lighting, contrast, brightness, etc transforms
+
+NOTE: Most of these transforms assume your image intensity
+is between 0 and 1, and are torch tensors (NOT numpy or PIL)
+"""
+
+import random
+
+import torch as th
+
+from ..utils import th_random_choice
+
+
+def _blend(img1, img2, alpha):
+ """
+ Weighted sum of two images
+
+ Arguments
+ ---------
+ img1 : torch tensor
+ img2 : torch tensor
+ alpha : float between 0 and 1
+ how much weight to put on img1 and 1-alpha weight
+ to put on img2
+ """
+ return img1.mul(alpha).add(1 - alpha, img2)
+
+
+class Grayscale(object):
+
+ def __init__(self, keep_channels=False):
+ """
+ Convert RGB image to grayscale
+
+ Arguments
+ ---------
+ keep_channels : boolean
+ If true, will keep all 3 channels and they will be the same
+ If false, will just return 1 grayscale channel
+ """
+ self.keep_channels = keep_channels
+ if keep_channels:
+ self.channels = 3
+ else:
+ self.channels = 1
+
+ def __call__(self, *inputs):
+ outputs = []
+ for idx, _input in enumerate(inputs):
+ _input_dst = _input[0]*0.299 + _input[1]*0.587 + _input[2]*0.114
+ _input_gs = _input_dst.repeat(self.channels,1,1)
+ outputs.append(_input_gs)
+ return outputs if idx > 1 else outputs[0]
+
+class RandomGrayscale(object):
+
+ def __init__(self, p=0.5):
+ """
+ Randomly convert RGB image(s) to Grayscale w/ some probability,
+ NOTE: Always retains the 3 channels if image is grayscaled
+
+ p : a float
+ probability that image will be grayscaled
+ """
+ self.p = p
+
+ def __call__(self, *inputs):
+ pval = random.random()
+ if pval < self.p:
+ outputs = Grayscale(keep_channels=True)(*inputs)
+ else:
+ outputs = inputs
+ return outputs
+
+# ----------------------------------------------------
+# ----------------------------------------------------
+
+class Gamma(object):
+
+ def __init__(self, value):
+ """
+ Performs Gamma Correction on the input image. Also known as
+ Power Law Transform. This function transforms the input image
+ pixelwise according
+ to the equation Out = In**gamma after scaling each
+ pixel to the range 0 to 1.
+
+ Arguments
+ ---------
+ value : float
+ <1 : image will tend to be lighter
+ =1 : image will stay the same
+ >1 : image will tend to be darker
+ """
+ self.value = value
+
+ def __call__(self, *inputs):
+ outputs = []
+ for idx, _input in enumerate(inputs):
+ _input = th.pow(_input, self.value)
+ outputs.append(_input)
+ return outputs if idx > 1 else outputs[0]
+
+class RandomGamma(object):
+
+ def __init__(self, min_val, max_val):
+ """
+ Performs Gamma Correction on the input image with some
+ randomly selected gamma value between min_val and max_val.
+ Also known as Power Law Transform. This function transforms
+ the input image pixelwise according to the equation
+ Out = In**gamma after scaling each pixel to the range 0 to 1.
+
+ Arguments
+ ---------
+ min_val : float
+ min range
+ max_val : float
+ max range
+
+ NOTE:
+ for values:
+ <1 : image will tend to be lighter
+ =1 : image will stay the same
+ >1 : image will tend to be darker
+ """
+ self.values = (min_val, max_val)
+
+ def __call__(self, *inputs):
+ value = random.uniform(self.values[0], self.values[1])
+ outputs = Gamma(value)(*inputs)
+ return outputs
+
+class RandomChoiceGamma(object):
+
+ def __init__(self, values, p=None):
+ """
+ Performs Gamma Correction on the input image with some
+ gamma value selected in the list of given values.
+ Also known as Power Law Transform. This function transforms
+ the input image pixelwise according to the equation
+ Out = In**gamma after scaling each pixel to the range 0 to 1.
+
+ Arguments
+ ---------
+ values : list of floats
+ gamma values to sampled from
+ p : list of floats - same length as `values`
+ if None, values will be sampled uniformly.
+ Must sum to 1.
+
+ NOTE:
+ for values:
+ <1 : image will tend to be lighter
+ =1 : image will stay the same
+ >1 : image will tend to be darker
+ """
+ self.values = values
+ self.p = p
+
+ def __call__(self, *inputs):
+ value = th_random_choice(self.values, p=self.p)
+ outputs = Gamma(value)(*inputs)
+ return outputs
+
+# ----------------------------------------------------
+# ----------------------------------------------------
+
+class Brightness(object):
+ def __init__(self, value):
+ """
+ Alter the Brightness of an image
+
+ Arguments
+ ---------
+ value : brightness factor
+ =-1 = completely black
+ <0 = darker
+ 0 = no change
+ >0 = brighter
+ =1 = completely white
+ """
+ self.value = max(min(value,1.0),-1.0)
+
+ def __call__(self, *inputs):
+ outputs = []
+ for idx, _input in enumerate(inputs):
+ _input = th.clamp(_input.float().add(self.value).type(_input.type()), 0, 1)
+ outputs.append(_input)
+ return outputs if idx > 1 else outputs[0]
+
+class RandomBrightness(object):
+
+ def __init__(self, min_val, max_val):
+ """
+ Alter the Brightness of an image with a value randomly selected
+ between `min_val` and `max_val`
+
+ Arguments
+ ---------
+ min_val : float
+ min range
+ max_val : float
+ max range
+ """
+ self.values = (min_val, max_val)
+
+ def __call__(self, *inputs):
+ value = random.uniform(self.values[0], self.values[1])
+ outputs = Brightness(value)(*inputs)
+ return outputs
+
+class RandomChoiceBrightness(object):
+
+ def __init__(self, values, p=None):
+ """
+ Alter the Brightness of an image with a value randomly selected
+ from the list of given values with given probabilities
+
+ Arguments
+ ---------
+ values : list of floats
+ brightness values to sampled from
+ p : list of floats - same length as `values`
+ if None, values will be sampled uniformly.
+ Must sum to 1.
+ """
+ self.values = values
+ self.p = p
+
+ def __call__(self, *inputs):
+ value = th_random_choice(self.values, p=self.p)
+ outputs = Brightness(value)(*inputs)
+ return outputs
+
+# ----------------------------------------------------
+# ----------------------------------------------------
+
+class Saturation(object):
+
+ def __init__(self, value):
+ """
+ Alter the Saturation of image
+
+ Arguments
+ ---------
+ value : float
+ =-1 : gray
+ <0 : colors are more muted
+ =0 : image stays the same
+ >0 : colors are more pure
+ =1 : most saturated
+ """
+ self.value = max(min(value,1.0),-1.0)
+
+ def __call__(self, *inputs):
+ outputs = []
+ for idx, _input in enumerate(inputs):
+ _in_gs = Grayscale(keep_channels=True)(_input)
+ alpha = 1.0 + self.value
+ _in = th.clamp(_blend(_input, _in_gs, alpha), 0, 1)
+ outputs.append(_in)
+ return outputs if idx > 1 else outputs[0]
+
+class RandomSaturation(object):
+
+ def __init__(self, min_val, max_val):
+ """
+ Alter the Saturation of an image with a value randomly selected
+ between `min_val` and `max_val`
+
+ Arguments
+ ---------
+ min_val : float
+ min range
+ max_val : float
+ max range
+ """
+ self.values = (min_val, max_val)
+
+ def __call__(self, *inputs):
+ value = random.uniform(self.values[0], self.values[1])
+ outputs = Saturation(value)(*inputs)
+ return outputs
+
+class RandomChoiceSaturation(object):
+
+ def __init__(self, values, p=None):
+ """
+ Alter the Saturation of an image with a value randomly selected
+ from the list of given values with given probabilities
+
+ Arguments
+ ---------
+ values : list of floats
+ saturation values to sampled from
+ p : list of floats - same length as `values`
+ if None, values will be sampled uniformly.
+ Must sum to 1.
+
+ """
+ self.values = values
+ self.p = p
+
+ def __call__(self, *inputs):
+ value = th_random_choice(self.values, p=self.p)
+ outputs = Saturation(value)(*inputs)
+ return outputs
+
+# ----------------------------------------------------
+# ----------------------------------------------------
+
+class Contrast(object):
+ """
+
+ """
+ def __init__(self, value):
+ """
+ Adjust Contrast of image.
+
+ Contrast is adjusted independently for each channel of each image.
+
+ For each channel, this Op computes the mean of the image pixels
+ in the channel and then adjusts each component x of each pixel to
+ (x - mean) * contrast_factor + mean.
+
+ Arguments
+ ---------
+ value : float
+ smaller value: less contrast
+ ZERO: channel means
+ larger positive value: greater contrast
+ larger negative value: greater inverse contrast
+ """
+ self.value = value
+
+ def __call__(self, *inputs):
+ outputs = []
+ for idx, _input in enumerate(inputs):
+ channel_means = _input.mean(1).mean(2)
+ channel_means = channel_means.expand_as(_input)
+ _input = th.clamp((_input - channel_means) * self.value + channel_means,0,1)
+ outputs.append(_input)
+ return outputs if idx > 1 else outputs[0]
+
+class RandomContrast(object):
+
+ def __init__(self, min_val, max_val):
+ """
+ Alter the Contrast of an image with a value randomly selected
+ between `min_val` and `max_val`
+
+ Arguments
+ ---------
+ min_val : float
+ min range
+ max_val : float
+ max range
+ """
+ self.values = (min_val, max_val)
+
+ def __call__(self, *inputs):
+ value = random.uniform(self.values[0], self.values[1])
+ outputs = Contrast(value)(*inputs)
+ return outputs
+
+class RandomChoiceContrast(object):
+
+ def __init__(self, values, p=None):
+ """
+ Alter the Contrast of an image with a value randomly selected
+ from the list of given values with given probabilities
+
+ Arguments
+ ---------
+ values : list of floats
+ contrast values to sampled from
+ p : list of floats - same length as `values`
+ if None, values will be sampled uniformly.
+ Must sum to 1.
+
+ """
+ self.values = values
+ self.p = p
+
+ def __call__(self, *inputs):
+ value = th_random_choice(self.values, p=None)
+ outputs = Contrast(value)(*inputs)
+ return outputs
+
+# ----------------------------------------------------
+# ----------------------------------------------------
+
+def rgb_to_hsv(x):
+ """
+ Convert from RGB to HSV
+ """
+ hsv = th.zeros(*x.size())
+ c_min = x.min(0)
+ c_max = x.max(0)
+
+ delta = c_max[0] - c_min[0]
+
+ # set H
+ r_idx = c_max[1].eq(0)
+ hsv[0][r_idx] = ((x[1][r_idx] - x[2][r_idx]) / delta[r_idx]) % 6
+ g_idx = c_max[1].eq(1)
+ hsv[0][g_idx] = 2 + ((x[2][g_idx] - x[0][g_idx]) / delta[g_idx])
+ b_idx = c_max[1].eq(2)
+ hsv[0][b_idx] = 4 + ((x[0][b_idx] - x[1][b_idx]) / delta[b_idx])
+ hsv[0] = hsv[0].mul(60)
+
+ # set S
+ hsv[1] = delta / c_max[0]
+
+ # set V - good
+ hsv[2] = c_max[0]
+
+ return hsv
diff --git a/torchsample/torchsample/transforms/tensor_transforms.py b/torchsample/torchsample/transforms/tensor_transforms.py
new file mode 100755
index 0000000..bc03ea1
--- /dev/null
+++ b/torchsample/torchsample/transforms/tensor_transforms.py
@@ -0,0 +1,617 @@
+
+import os
+import random
+import math
+import numpy as np
+
+import torch as th
+from torch.autograd import Variable
+
+from ..utils import th_random_choice
+
+class Compose(object):
+ """
+ Composes several transforms together.
+ """
+ def __init__(self, transforms):
+ """
+ Composes (chains) several transforms together into
+ a single transform
+
+ Arguments
+ ---------
+ transforms : a list of transforms
+ transforms will be applied sequentially
+ """
+ self.transforms = transforms
+
+ def __call__(self, *inputs):
+ for transform in self.transforms:
+ if not isinstance(inputs, (list,tuple)):
+ inputs = [inputs]
+ inputs = transform(*inputs)
+ return inputs
+
+
+class RandomChoiceCompose(object):
+ """
+ Randomly choose to apply one transform from a collection of transforms
+
+ e.g. to randomly apply EITHER 0-1 or -1-1 normalization to an input:
+ >>> transform = RandomChoiceCompose([RangeNormalize(0,1),
+ RangeNormalize(-1,1)])
+ >>> x_norm = transform(x) # only one of the two normalizations is applied
+ """
+ def __init__(self, transforms):
+ self.transforms = transforms
+
+ def __call__(self, *inputs):
+ tform = random.choice(self.transforms)
+ outputs = tform(*inputs)
+ return outputs
+
+
+class ToTensor(object):
+ """
+ Converts a numpy array to torch.Tensor
+ """
+ def __call__(self, *inputs):
+ outputs = []
+ for idx, _input in enumerate(inputs):
+ _input = th.from_numpy(_input)
+ outputs.append(_input)
+ return outputs if idx > 1 else outputs[0]
+
+
+class ToVariable(object):
+ """
+ Converts a torch.Tensor to autograd.Variable
+ """
+ def __call__(self, *inputs):
+ outputs = []
+ for idx, _input in enumerate(inputs):
+ _input = Variable(_input)
+ outputs.append(_input)
+ return outputs if idx > 1 else outputs[0]
+
+
+class ToCuda(object):
+ """
+ Moves an autograd.Variable to the GPU
+ """
+ def __init__(self, device=0):
+ """
+ Moves an autograd.Variable to the GPU
+
+ Arguments
+ ---------
+ device : integer
+ which GPU device to put the input(s) on
+ """
+ self.device = device
+
+ def __call__(self, *inputs):
+ outputs = []
+ for idx, _input in enumerate(inputs):
+ _input = _input.cuda(self.device)
+ outputs.append(_input)
+ return outputs if idx > 1 else outputs[0]
+
+
+class ToFile(object):
+ """
+ Saves an image to file. Useful as a pass-through ransform
+ when wanting to observe how augmentation affects the data
+
+ NOTE: Only supports saving to Numpy currently
+ """
+ def __init__(self, root):
+ """
+ Saves an image to file. Useful as a pass-through ransform
+ when wanting to observe how augmentation affects the data
+
+ NOTE: Only supports saving to Numpy currently
+
+ Arguments
+ ---------
+ root : string
+ path to main directory in which images will be saved
+ """
+ if root.startswith('~'):
+ root = os.path.expanduser(root)
+ self.root = root
+ self.counter = 0
+
+ def __call__(self, *inputs):
+ for idx, _input in inputs:
+ fpath = os.path.join(self.root, 'img_%i_%i.npy'%(self.counter, idx))
+ np.save(fpath, _input.numpy())
+ self.counter += 1
+ return inputs
+
+
+class ChannelsLast(object):
+ """
+ Transposes a tensor so that the channel dim is last
+ `HWC` and `DHWC` are aliases for this transform.
+ """
+ def __init__(self, safe_check=False):
+ """
+ Transposes a tensor so that the channel dim is last
+ `HWC` and `DHWC` are aliases for this transform.
+
+ Arguments
+ ---------
+ safe_check : boolean
+ if true, will check if channels are already last and, if so,
+ will just return the inputs
+ """
+ self.safe_check = safe_check
+
+ def __call__(self, *inputs):
+ ndim = inputs[0].dim()
+ if self.safe_check:
+ # check if channels are already last
+ if inputs[0].size(-1) < inputs[0].size(0):
+ return inputs
+ plist = list(range(1,ndim))+[0]
+
+ outputs = []
+ for idx, _input in enumerate(inputs):
+ _input = _input.permute(*plist)
+ outputs.append(_input)
+ return outputs if idx > 1 else outputs[0]
+
+HWC = ChannelsLast
+DHWC = ChannelsLast
+
+class ChannelsFirst(object):
+ """
+ Transposes a tensor so that the channel dim is first.
+ `CHW` and `CDHW` are aliases for this transform.
+ """
+ def __init__(self, safe_check=False):
+ """
+ Transposes a tensor so that the channel dim is first.
+ `CHW` and `CDHW` are aliases for this transform.
+
+ Arguments
+ ---------
+ safe_check : boolean
+ if true, will check if channels are already last and, if so,
+ will just return the inputs
+ """
+ self.safe_check = safe_check
+
+ def __call__(self, *inputs):
+ ndim = inputs[0].dim()
+ if self.safe_check:
+ # check if channels are already first
+ if inputs[0].size(0) < inputs[0].size(-1):
+ return inputs
+ plist = [ndim-1] + list(range(0,ndim-1))
+
+ outputs = []
+ for idx, _input in enumerate(inputs):
+ _input = _input.permute(*plist)
+ outputs.append(_input)
+ return outputs if idx > 1 else outputs[0]
+
+CHW = ChannelsFirst
+CDHW = ChannelsFirst
+
+class TypeCast(object):
+ """
+ Cast a torch.Tensor to a different type
+ """
+ def __init__(self, dtype='float'):
+ """
+ Cast a torch.Tensor to a different type
+
+ Arguments
+ ---------
+ dtype : string or torch.*Tensor literal or list of such
+ data type to which input(s) will be cast.
+ If list, it should be the same length as inputs.
+ """
+ if isinstance(dtype, (list,tuple)):
+ dtypes = []
+ for dt in dtype:
+ if isinstance(dt, str):
+ if dt == 'byte':
+ dt = th.ByteTensor
+ elif dt == 'double':
+ dt = th.DoubleTensor
+ elif dt == 'float':
+ dt = th.FloatTensor
+ elif dt == 'int':
+ dt = th.IntTensor
+ elif dt == 'long':
+ dt = th.LongTensor
+ elif dt == 'short':
+ dt = th.ShortTensor
+ dtypes.append(dt)
+ self.dtype = dtypes
+ else:
+ if isinstance(dtype, str):
+ if dtype == 'byte':
+ dtype = th.ByteTensor
+ elif dtype == 'double':
+ dtype = th.DoubleTensor
+ elif dtype == 'float':
+ dtype = th.FloatTensor
+ elif dtype == 'int':
+ dtype = th.IntTensor
+ elif dtype == 'long':
+ dtype = th.LongTensor
+ elif dtype == 'short':
+ dtype = th.ShortTensor
+ self.dtype = dtype
+
+ def __call__(self, *inputs):
+ if not isinstance(self.dtype, (tuple,list)):
+ dtypes = [self.dtype]*len(inputs)
+ else:
+ dtypes = self.dtype
+
+ outputs = []
+ for idx, _input in enumerate(inputs):
+ _input = _input.type(dtypes[idx])
+ outputs.append(_input)
+ return outputs if idx > 1 else outputs[0]
+
+
+class AddChannel(object):
+ """
+ Adds a dummy channel to an image.
+ This will make an image of size (28, 28) to now be
+ of size (1, 28, 28), for example.
+ """
+ def __init__(self, axis=0):
+ """
+ Adds a dummy channel to an image, also known as
+ expanding an axis or unsqueezing a dim
+
+ Arguments
+ ---------
+ axis : integer
+ dimension to be expanded to singleton
+ """
+ self.axis = axis
+
+ def __call__(self, *inputs):
+ outputs = []
+ for idx, _input in enumerate(inputs):
+ _input = _input.unsqueeze(self.axis)
+ outputs.append(_input)
+ return outputs if idx > 1 else outputs[0]
+
+ExpandAxis = AddChannel
+Unsqueeze = AddChannel
+
+class Transpose(object):
+
+ def __init__(self, dim1, dim2):
+ """
+ Swaps two dimensions of a tensor
+
+ Arguments
+ ---------
+ dim1 : integer
+ first dim to switch
+ dim2 : integer
+ second dim to switch
+ """
+ self.dim1 = dim1
+ self.dim2 = dim2
+
+ def __call__(self, *inputs):
+ outputs = []
+ for idx, _input in enumerate(inputs):
+ _input = th.transpose(_input, self.dim1, self.dim2)
+ outputs.append(_input)
+ return outputs if idx > 1 else outputs[0]
+
+
+class RangeNormalize(object):
+ """
+ Given min_val: (R, G, B) and max_val: (R,G,B),
+ will normalize each channel of the th.*Tensor to
+ the provided min and max values.
+
+ Works by calculating :
+ a = (max'-min')/(max-min)
+ b = max' - a * max
+ new_value = a * value + b
+ where min' & max' are given values,
+ and min & max are observed min/max for each channel
+
+ Arguments
+ ---------
+ min_range : float or integer
+ Min value to which tensors will be normalized
+ max_range : float or integer
+ Max value to which tensors will be normalized
+ fixed_min : float or integer
+ Give this value if every sample has the same min (max) and
+ you know for sure what it is. For instance, if you
+ have an image then you know the min value will be 0 and the
+ max value will be 255. Otherwise, the min/max value will be
+ calculated for each individual sample and this will decrease
+ speed. Dont use this if each sample has a different min/max.
+ fixed_max :float or integer
+ See above
+
+ Example:
+ >>> x = th.rand(3,5,5)
+ >>> rn = RangeNormalize((0,0,10),(1,1,11))
+ >>> x_norm = rn(x)
+
+ Also works with just one value for min/max:
+ >>> x = th.rand(3,5,5)
+ >>> rn = RangeNormalize(0,1)
+ >>> x_norm = rn(x)
+ """
+ def __init__(self,
+ min_val,
+ max_val):
+ """
+ Normalize a tensor between a min and max value
+
+ Arguments
+ ---------
+ min_val : float
+ lower bound of normalized tensor
+ max_val : float
+ upper bound of normalized tensor
+ """
+ self.min_val = min_val
+ self.max_val = max_val
+
+ def __call__(self, *inputs):
+ outputs = []
+ for idx, _input in enumerate(inputs):
+ _min_val = _input.min()
+ _max_val = _input.max()
+ a = (self.max_val - self.min_val) / (_max_val - _min_val)
+ b = self.max_val- a * _max_val
+ _input = _input.mul(a).add(b)
+ outputs.append(_input)
+ return outputs if idx > 1 else outputs[0]
+
+
+class StdNormalize(object):
+ """
+ Normalize torch tensor to have zero mean and unit std deviation
+ """
+ def __call__(self, *inputs):
+ outputs = []
+ for idx, _input in enumerate(inputs):
+ _input = _input.sub(_input.mean()).div(_input.std())
+ outputs.append(_input)
+ return outputs if idx > 1 else outputs[0]
+
+
+class Slice2D(object):
+
+ def __init__(self, axis=0, reject_zeros=False):
+ """
+ Take a random 2D slice from a 3D image along
+ a given axis. This image should not have a 4th channel dim.
+
+ Arguments
+ ---------
+ axis : integer in {0, 1, 2}
+ the axis on which to take slices
+
+ reject_zeros : boolean
+ whether to reject slices that are all zeros
+ """
+ self.axis = axis
+ self.reject_zeros = reject_zeros
+
+ def __call__(self, x, y=None):
+ while True:
+ keep_slice = random.randint(0,x.size(self.axis)-1)
+ if self.axis == 0:
+ slice_x = x[keep_slice,:,:]
+ if y is not None:
+ slice_y = y[keep_slice,:,:]
+ elif self.axis == 1:
+ slice_x = x[:,keep_slice,:]
+ if y is not None:
+ slice_y = y[:,keep_slice,:]
+ elif self.axis == 2:
+ slice_x = x[:,:,keep_slice]
+ if y is not None:
+ slice_y = y[:,:,keep_slice]
+
+ if not self.reject_zeros:
+ break
+ else:
+ if y is not None and th.sum(slice_y) > 0:
+ break
+ elif th.sum(slice_x) > 0:
+ break
+ if y is not None:
+ return slice_x, slice_y
+ else:
+ return slice_x
+
+
+class RandomCrop(object):
+
+ def __init__(self, size):
+ """
+ Randomly crop a torch tensor
+
+ Arguments
+ --------
+ size : tuple or list
+ dimensions of the crop
+ """
+ self.size = size
+
+ def __call__(self, *inputs):
+ h_idx = random.randint(0,inputs[0].size(1)-self.size[0])
+ w_idx = random.randint(0,inputs[1].size(2)-self.size[1])
+ outputs = []
+ for idx, _input in enumerate(inputs):
+ _input = _input[:, h_idx:(h_idx+self.size[0]),w_idx:(w_idx+self.size[1])]
+ outputs.append(_input)
+ return outputs if idx > 1 else outputs[0]
+
+
+class SpecialCrop(object):
+
+ def __init__(self, size, crop_type=0):
+ """
+ Perform a special crop - one of the four corners or center crop
+
+ Arguments
+ ---------
+ size : tuple or list
+ dimensions of the crop
+
+ crop_type : integer in {0,1,2,3,4}
+ 0 = center crop
+ 1 = top left crop
+ 2 = top right crop
+ 3 = bottom right crop
+ 4 = bottom left crop
+ """
+ if crop_type not in {0, 1, 2, 3, 4}:
+ raise ValueError('crop_type must be in {0, 1, 2, 3, 4}')
+ self.size = size
+ self.crop_type = crop_type
+
+ def __call__(self, x, y=None):
+ if self.crop_type == 0:
+ # center crop
+ x_diff = (x.size(1)-self.size[0])/2.
+ y_diff = (x.size(2)-self.size[1])/2.
+ ct_x = [int(math.ceil(x_diff)),x.size(1)-int(math.floor(x_diff))]
+ ct_y = [int(math.ceil(y_diff)),x.size(2)-int(math.floor(y_diff))]
+ indices = [ct_x,ct_y]
+ elif self.crop_type == 1:
+ # top left crop
+ tl_x = [0, self.size[0]]
+ tl_y = [0, self.size[1]]
+ indices = [tl_x,tl_y]
+ elif self.crop_type == 2:
+ # top right crop
+ tr_x = [0, self.size[0]]
+ tr_y = [x.size(2)-self.size[1], x.size(2)]
+ indices = [tr_x,tr_y]
+ elif self.crop_type == 3:
+ # bottom right crop
+ br_x = [x.size(1)-self.size[0],x.size(1)]
+ br_y = [x.size(2)-self.size[1],x.size(2)]
+ indices = [br_x,br_y]
+ elif self.crop_type == 4:
+ # bottom left crop
+ bl_x = [x.size(1)-self.size[0], x.size(1)]
+ bl_y = [0, self.size[1]]
+ indices = [bl_x,bl_y]
+
+ x = x[:,indices[0][0]:indices[0][1],indices[1][0]:indices[1][1]]
+
+ if y is not None:
+ y = y[:,indices[0][0]:indices[0][1],indices[1][0]:indices[1][1]]
+ return x, y
+ else:
+ return x
+
+
+class Pad(object):
+
+ def __init__(self, size):
+ """
+ Pads an image to the given size
+
+ Arguments
+ ---------
+ size : tuple or list
+ size of crop
+ """
+ self.size = size
+
+ def __call__(self, x, y=None):
+ x = x.numpy()
+ shape_diffs = [int(np.ceil((i_s - d_s))) for d_s,i_s in zip(x.shape,self.size)]
+ shape_diffs = np.maximum(shape_diffs,0)
+ pad_sizes = [(int(np.ceil(s/2.)),int(np.floor(s/2.))) for s in shape_diffs]
+ x = np.pad(x, pad_sizes, mode='constant')
+ if y is not None:
+ y = y.numpy()
+ y = np.pad(y, pad_sizes, mode='constant')
+ return th.from_numpy(x), th.from_numpy(y)
+ else:
+ return th.from_numpy(x)
+
+
+class RandomFlip(object):
+
+ def __init__(self, h=True, v=False, p=0.5):
+ """
+ Randomly flip an image horizontally and/or vertically with
+ some probability.
+
+ Arguments
+ ---------
+ h : boolean
+ whether to horizontally flip w/ probability p
+
+ v : boolean
+ whether to vertically flip w/ probability p
+
+ p : float between [0,1]
+ probability with which to apply allowed flipping operations
+ """
+ self.horizontal = h
+ self.vertical = v
+ self.p = p
+
+ def __call__(self, x, y=None):
+ x = x.numpy()
+ if y is not None:
+ y = y.numpy()
+ # horizontal flip with p = self.p
+ if self.horizontal:
+ if random.random() < self.p:
+ x = x.swapaxes(2, 0)
+ x = x[::-1, ...]
+ x = x.swapaxes(0, 2)
+ if y is not None:
+ y = y.swapaxes(2, 0)
+ y = y[::-1, ...]
+ y = y.swapaxes(0, 2)
+ # vertical flip with p = self.p
+ if self.vertical:
+ if random.random() < self.p:
+ x = x.swapaxes(1, 0)
+ x = x[::-1, ...]
+ x = x.swapaxes(0, 1)
+ if y is not None:
+ y = y.swapaxes(1, 0)
+ y = y[::-1, ...]
+ y = y.swapaxes(0, 1)
+ if y is None:
+ # must copy because torch doesnt current support neg strides
+ return th.from_numpy(x.copy())
+ else:
+ return th.from_numpy(x.copy()),th.from_numpy(y.copy())
+
+
+class RandomOrder(object):
+ """
+ Randomly permute the channels of an image
+ """
+ def __call__(self, *inputs):
+ order = th.randperm(inputs[0].dim())
+ outputs = []
+ for idx, _input in enumerate(inputs):
+ _input = _input.index_select(0, order)
+ outputs.append(_input)
+ return outputs if idx > 1 else outputs[0]
+
diff --git a/torchsample/torchsample/utils.py b/torchsample/torchsample/utils.py
new file mode 100755
index 0000000..fc02357
--- /dev/null
+++ b/torchsample/torchsample/utils.py
@@ -0,0 +1,425 @@
+"""
+Utility functions for th.Tensors
+"""
+
+import pickle
+import random
+import numpy as np
+
+import torch as th
+
+
+def th_allclose(x, y):
+ """
+ Determine whether two torch tensors have same values
+ Mimics np.allclose
+ """
+ return th.sum(th.abs(x-y)) < 1e-5
+
+
+def th_flatten(x):
+ """Flatten tensor"""
+ return x.contiguous().view(-1)
+
+def th_c_flatten(x):
+ """
+ Flatten tensor, leaving channel intact.
+ Assumes CHW format.
+ """
+ return x.contiguous().view(x.size(0), -1)
+
+def th_bc_flatten(x):
+ """
+ Flatten tensor, leaving batch and channel dims intact.
+ Assumes BCHW format
+ """
+ return x.contiguous().view(x.size(0), x.size(1), -1)
+
+
+def th_zeros_like(x):
+ return x.new().resize_as_(x).zero_()
+
+def th_ones_like(x):
+ return x.new().resize_as_(x).fill_(1)
+
+def th_constant_like(x, val):
+ return x.new().resize_as_(x).fill_(val)
+
+
+def th_iterproduct(*args):
+ return th.from_numpy(np.indices(args).reshape((len(args),-1)).T)
+
+def th_iterproduct_like(x):
+ return th_iterproduct(*x.size())
+
+
+def th_uniform(lower, upper):
+ return random.uniform(lower, upper)
+
+
+def th_gather_nd(x, coords):
+ x = x.contiguous()
+ inds = coords.mv(th.LongTensor(x.stride()))
+ x_gather = th.index_select(th_flatten(x), 0, inds)
+ return x_gather
+
+
+def th_affine2d(x, matrix, mode='bilinear', center=True):
+ """
+ 2D Affine image transform on th.Tensor
+
+ Arguments
+ ---------
+ x : th.Tensor of size (C, H, W)
+ image tensor to be transformed
+
+ matrix : th.Tensor of size (3, 3) or (2, 3)
+ transformation matrix
+
+ mode : string in {'nearest', 'bilinear'}
+ interpolation scheme to use
+
+ center : boolean
+ whether to alter the bias of the transform
+ so the transform is applied about the center
+ of the image rather than the origin
+
+ Example
+ -------
+ >>> import torch
+ >>> from torchsample.utils import *
+ >>> x = th.zeros(2,1000,1000)
+ >>> x[:,100:1500,100:500] = 10
+ >>> matrix = th.FloatTensor([[1.,0,-50],
+ ... [0,1.,-50]])
+ >>> xn = th_affine2d(x, matrix, mode='nearest')
+ >>> xb = th_affine2d(x, matrix, mode='bilinear')
+ """
+
+ if matrix.dim() == 2:
+ matrix = matrix[:2,:]
+ matrix = matrix.unsqueeze(0)
+ elif matrix.dim() == 3:
+ if matrix.size()[1:] == (3,3):
+ matrix = matrix[:,:2,:]
+
+ A_batch = matrix[:,:,:2]
+ if A_batch.size(0) != x.size(0):
+ A_batch = A_batch.repeat(x.size(0),1,1)
+ b_batch = matrix[:,:,2].unsqueeze(1)
+
+ # make a meshgrid of normal coordinates
+ _coords = th_iterproduct(x.size(1),x.size(2))
+ coords = _coords.unsqueeze(0).repeat(x.size(0),1,1).float()
+
+ if center:
+ # shift the coordinates so center is the origin
+ coords[:,:,0] = coords[:,:,0] - (x.size(1) / 2. - 0.5)
+ coords[:,:,1] = coords[:,:,1] - (x.size(2) / 2. - 0.5)
+ # apply the coordinate transformation
+ new_coords = coords.bmm(A_batch.transpose(1,2)) + b_batch.expand_as(coords)
+
+ if center:
+ # shift the coordinates back so origin is origin
+ new_coords[:,:,0] = new_coords[:,:,0] + (x.size(1) / 2. - 0.5)
+ new_coords[:,:,1] = new_coords[:,:,1] + (x.size(2) / 2. - 0.5)
+
+ # map new coordinates using bilinear interpolation
+ if mode == 'nearest':
+ x_transformed = th_nearest_interp2d(x.contiguous(), new_coords)
+ elif mode == 'bilinear':
+ x_transformed = th_bilinear_interp2d(x.contiguous(), new_coords)
+
+ return x_transformed
+
+
+def th_nearest_interp2d(input, coords):
+ """
+ 2d nearest neighbor interpolation th.Tensor
+ """
+ # take clamp of coords so they're in the image bounds
+ x = th.clamp(coords[:,:,0], 0, input.size(1)-1).round()
+ y = th.clamp(coords[:,:,1], 0, input.size(2)-1).round()
+
+ stride = th.LongTensor(input.stride())
+ x_ix = x.mul(stride[1]).long()
+ y_ix = y.mul(stride[2]).long()
+
+ input_flat = input.view(input.size(0),-1)
+
+ mapped_vals = input_flat.gather(1, x_ix.add(y_ix))
+
+ return mapped_vals.view_as(input)
+
+
+def th_bilinear_interp2d(input, coords):
+ """
+ bilinear interpolation in 2d
+ """
+ x = th.clamp(coords[:,:,0], 0, input.size(1)-2)
+ x0 = x.floor()
+ x1 = x0 + 1
+ y = th.clamp(coords[:,:,1], 0, input.size(2)-2)
+ y0 = y.floor()
+ y1 = y0 + 1
+
+ stride = th.LongTensor(input.stride())
+ x0_ix = x0.mul(stride[1].float()).long()
+ x1_ix = x1.mul(stride[1].float()).long()
+ y0_ix = y0.mul(stride[2].float()).long()
+ y1_ix = y1.mul(stride[2].float()).long()
+
+ input_flat = input.view(input.size(0),-1)
+
+ vals_00 = input_flat.gather(1, x0_ix.add(y0_ix))
+ vals_10 = input_flat.gather(1, x1_ix.add(y0_ix))
+ vals_01 = input_flat.gather(1, x0_ix.add(y1_ix))
+ vals_11 = input_flat.gather(1, x1_ix.add(y1_ix))
+
+ xd = x - x0
+ yd = y - y0
+ xm = 1 - xd
+ ym = 1 - yd
+
+ x_mapped = (vals_00.mul(xm).mul(ym) +
+ vals_10.mul(xd).mul(ym) +
+ vals_01.mul(xm).mul(yd) +
+ vals_11.mul(xd).mul(yd))
+
+ return x_mapped.view_as(input)
+
+
+def th_affine3d(x, matrix, mode='trilinear', center=True):
+ """
+ 3D Affine image transform on th.Tensor
+ """
+ A = matrix[:3,:3]
+ b = matrix[:3,3]
+
+ # make a meshgrid of normal coordinates
+ coords = th_iterproduct(x.size(1),x.size(2),x.size(3)).float()
+
+
+ if center:
+ # shift the coordinates so center is the origin
+ coords[:,0] = coords[:,0] - (x.size(1) / 2. - 0.5)
+ coords[:,1] = coords[:,1] - (x.size(2) / 2. - 0.5)
+ coords[:,2] = coords[:,2] - (x.size(3) / 2. - 0.5)
+
+
+ # apply the coordinate transformation
+ new_coords = coords.mm(A.t().contiguous()) + b.expand_as(coords)
+
+ if center:
+ # shift the coordinates back so origin is origin
+ new_coords[:,0] = new_coords[:,0] + (x.size(1) / 2. - 0.5)
+ new_coords[:,1] = new_coords[:,1] + (x.size(2) / 2. - 0.5)
+ new_coords[:,2] = new_coords[:,2] + (x.size(3) / 2. - 0.5)
+
+ # map new coordinates using bilinear interpolation
+ if mode == 'nearest':
+ x_transformed = th_nearest_interp3d(x, new_coords)
+ elif mode == 'trilinear':
+ x_transformed = th_trilinear_interp3d(x, new_coords)
+ else:
+ x_transformed = th_trilinear_interp3d(x, new_coords)
+
+ return x_transformed
+
+
+def th_nearest_interp3d(input, coords):
+ """
+ 2d nearest neighbor interpolation th.Tensor
+ """
+ # take clamp of coords so they're in the image bounds
+ coords[:,0] = th.clamp(coords[:,0], 0, input.size(1)-1).round()
+ coords[:,1] = th.clamp(coords[:,1], 0, input.size(2)-1).round()
+ coords[:,2] = th.clamp(coords[:,2], 0, input.size(3)-1).round()
+
+ stride = th.LongTensor(input.stride())[1:].float()
+ idx = coords.mv(stride).long()
+
+ input_flat = th_flatten(input)
+
+ mapped_vals = input_flat[idx]
+
+ return mapped_vals.view_as(input)
+
+
+def th_trilinear_interp3d(input, coords):
+ """
+ trilinear interpolation of 3D th.Tensor image
+ """
+ # take clamp then floor/ceil of x coords
+ x = th.clamp(coords[:,0], 0, input.size(1)-2)
+ x0 = x.floor()
+ x1 = x0 + 1
+ # take clamp then floor/ceil of y coords
+ y = th.clamp(coords[:,1], 0, input.size(2)-2)
+ y0 = y.floor()
+ y1 = y0 + 1
+ # take clamp then floor/ceil of z coords
+ z = th.clamp(coords[:,2], 0, input.size(3)-2)
+ z0 = z.floor()
+ z1 = z0 + 1
+
+ stride = th.LongTensor(input.stride())[1:]
+ x0_ix = x0.mul(stride[0]).long()
+ x1_ix = x1.mul(stride[0]).long()
+ y0_ix = y0.mul(stride[1]).long()
+ y1_ix = y1.mul(stride[1]).long()
+ z0_ix = z0.mul(stride[2]).long()
+ z1_ix = z1.mul(stride[2]).long()
+
+ input_flat = th_flatten(input)
+
+ vals_000 = input_flat[x0_ix+y0_ix+z0_ix]
+ vals_100 = input_flat[x1_ix+y0_ix+z0_ix]
+ vals_010 = input_flat[x0_ix+y1_ix+z0_ix]
+ vals_001 = input_flat[x0_ix+y0_ix+z1_ix]
+ vals_101 = input_flat[x1_ix+y0_ix+z1_ix]
+ vals_011 = input_flat[x0_ix+y1_ix+z1_ix]
+ vals_110 = input_flat[x1_ix+y1_ix+z0_ix]
+ vals_111 = input_flat[x1_ix+y1_ix+z1_ix]
+
+ xd = x - x0
+ yd = y - y0
+ zd = z - z0
+ xm1 = 1 - xd
+ ym1 = 1 - yd
+ zm1 = 1 - zd
+
+ x_mapped = (vals_000.mul(xm1).mul(ym1).mul(zm1) +
+ vals_100.mul(xd).mul(ym1).mul(zm1) +
+ vals_010.mul(xm1).mul(yd).mul(zm1) +
+ vals_001.mul(xm1).mul(ym1).mul(zd) +
+ vals_101.mul(xd).mul(ym1).mul(zd) +
+ vals_011.mul(xm1).mul(yd).mul(zd) +
+ vals_110.mul(xd).mul(yd).mul(zm1) +
+ vals_111.mul(xd).mul(yd).mul(zd))
+
+ return x_mapped.view_as(input)
+
+
+def th_pearsonr(x, y):
+ """
+ mimics scipy.stats.pearsonr
+ """
+ mean_x = th.mean(x)
+ mean_y = th.mean(y)
+ xm = x.sub(mean_x)
+ ym = y.sub(mean_y)
+ r_num = xm.dot(ym)
+ r_den = th.norm(xm, 2) * th.norm(ym, 2)
+ r_val = r_num / r_den
+ return r_val
+
+
+def th_corrcoef(x):
+ """
+ mimics np.corrcoef
+ """
+ # calculate covariance matrix of rows
+ mean_x = th.mean(x, 1)
+ xm = x.sub(mean_x.expand_as(x))
+ c = xm.mm(xm.t())
+ c = c / (x.size(1) - 1)
+
+ # normalize covariance matrix
+ d = th.diag(c)
+ stddev = th.pow(d, 0.5)
+ c = c.div(stddev.expand_as(c))
+ c = c.div(stddev.expand_as(c).t())
+
+ # clamp between -1 and 1
+ c = th.clamp(c, -1.0, 1.0)
+
+ return c
+
+
+def th_matrixcorr(x, y):
+ """
+ return a correlation matrix between
+ columns of x and columns of y.
+
+ So, if X.size() == (1000,4) and Y.size() == (1000,5),
+ then the result will be of size (4,5) with the
+ (i,j) value equal to the pearsonr correlation coeff
+ between column i in X and column j in Y
+ """
+ mean_x = th.mean(x, 0)
+ mean_y = th.mean(y, 0)
+ xm = x.sub(mean_x.expand_as(x))
+ ym = y.sub(mean_y.expand_as(y))
+ r_num = xm.t().mm(ym)
+ r_den1 = th.norm(xm,2,0)
+ r_den2 = th.norm(ym,2,0)
+ r_den = r_den1.t().mm(r_den2)
+ r_mat = r_num.div(r_den)
+ return r_mat
+
+
+def th_random_choice(a, n_samples=1, replace=True, p=None):
+ """
+ Parameters
+ -----------
+ a : 1-D array-like
+ If a th.Tensor, a random sample is generated from its elements.
+ If an int, the random sample is generated as if a was th.range(n)
+ n_samples : int, optional
+ Number of samples to draw. Default is None, in which case a
+ single value is returned.
+ replace : boolean, optional
+ Whether the sample is with or without replacement
+ p : 1-D array-like, optional
+ The probabilities associated with each entry in a.
+ If not given the sample assumes a uniform distribution over all
+ entries in a.
+
+ Returns
+ --------
+ samples : 1-D ndarray, shape (size,)
+ The generated random samples
+ """
+ if isinstance(a, int):
+ a = th.arange(0, a)
+
+ if p is None:
+ if replace:
+ idx = th.floor(th.rand(n_samples)*a.size(0)).long()
+ else:
+ idx = th.randperm(len(a))[:n_samples]
+ else:
+ if abs(1.0-sum(p)) > 1e-3:
+ raise ValueError('p must sum to 1.0')
+ if not replace:
+ raise ValueError('replace must equal true if probabilities given')
+ idx_vec = th.cat([th.zeros(round(p[i]*1000))+i for i in range(len(p))])
+ idx = (th.floor(th.rand(n_samples)*999)).long()
+ idx = idx_vec[idx].long()
+ selection = a[idx]
+ if n_samples == 1:
+ selection = selection[0]
+ return selection
+
+
+def save_transform(file, transform):
+ """
+ Save a transform object
+ """
+ with open(file, 'wb') as output_file:
+ pickler = pickle.Pickler(output_file, -1)
+ pickler.dump(transform)
+
+
+def load_transform(file):
+ """
+ Load a transform object
+ """
+ with open(file, 'rb') as input_file:
+ transform = pickle.load(input_file)
+ return transform
+
+
+
+
diff --git a/torchsample/torchsample/version.py b/torchsample/torchsample/version.py
new file mode 100755
index 0000000..8ce9b36
--- /dev/null
+++ b/torchsample/torchsample/version.py
@@ -0,0 +1 @@
+__version__ = '0.1.3'
diff --git a/version.py b/version.py
index 7d110e7..4f3af1e 100644
--- a/version.py
+++ b/version.py
@@ -1,4 +1,4 @@
# GENERATED VERSION FILE
-# TIME: Fri Jan 28 00:02:09 2022
-__version__ = "0.1.0+7718366"
-short_version = "0.1.0"
+# TIME: Wed Apr 13 19:39:12 2022
+__version__ = '1.0.0+1a6e3a7'
+short_version = '1.0.0'